diff options
Diffstat (limited to 'users')
630 files changed, 48468 insertions, 0 deletions
diff --git a/users/Profpatsch/OWNERS b/users/Profpatsch/OWNERS new file mode 100644 index 000000000000..5a73d4c3a1fd --- /dev/null +++ b/users/Profpatsch/OWNERS @@ -0,0 +1,4 @@ +inherited: false +owners: + - Profpatsch + - sterni diff --git a/users/Profpatsch/advent-of-code/2020/01/main.py b/users/Profpatsch/advent-of-code/2020/01/main.py new file mode 100644 index 000000000000..e636017a54d5 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/01/main.py @@ -0,0 +1,22 @@ +import sys + +l = [] +with open('./input', 'r') as f: + for line in f: + l.append(int(line)) + +s = set(l) + +res=None +for el in s: + for el2 in s: + if (2020-(el+el2)) in s: + res=(el, el2, 2020-(el+el2)) + break + +if res == None: + sys.exit("could not find a number that adds to 2020") + +print(res) + +print(res[0] * res[1] * res[2]) diff --git a/users/Profpatsch/advent-of-code/2020/02/main.py b/users/Profpatsch/advent-of-code/2020/02/main.py new file mode 100644 index 000000000000..e3b27c382a21 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/02/main.py @@ -0,0 +1,77 @@ +import sys + +def parse(line): + a = line.split(sep=" ", maxsplit=1) + assert len(a) == 2 + fromto = a[0].split(sep="-") + assert len(fromto) == 2 + (from_, to) = (int(fromto[0]), int(fromto[1])) + charpass = a[1].split(sep=": ") + assert len(charpass) == 2 + char = charpass[0] + assert len(char) == 1 + pass_ = charpass[1] + assert pass_.endswith("\n") + pass_ = pass_[:-1] + return { + "from": from_, + "to": to, + "char": char, + "pass": pass_ + } + +def char_in_pass(char, pass_): + return pass_.count(char) + +def validate_01(entry): + no = char_in_pass(entry["char"], entry["pass"]) + if no < entry["from"]: + return { "too-small": entry } + elif no > entry["to"]: + return { "too-big": entry } + else: + return { "ok": entry } + +def char_at_pos(char, pos, pass_): + assert pos <= len(pass_) + return pass_[pos-1] == char + +def validate_02(entry): + one = char_at_pos(entry["char"], entry["from"], entry["pass"]) + two = char_at_pos(entry["char"], entry["to"], entry["pass"]) + if one and two: + return { "both": entry } + elif one: + return { "one": entry } + elif two: + return { "two": entry } + else: + return { "none": entry } + + +res01 = [] +res02 = [] +with open("./input", 'r') as f: + for line in f: + p = parse(line) + res01.append(validate_01(p)) + res02.append(validate_02(p)) + +count01=0 +for r in res01: + print(r) + if r.get("ok", False): + count01=count01+1 + +count02=0 +for r in res02: + print(r) + if r.get("one", False): + count02=count02+1 + elif r.get("two", False): + count02=count02+1 + else: + pass + +print("count 1: {}".format(count01)) +print("count 2: {}".format(count02)) diff --git a/users/Profpatsch/advent-of-code/2020/03/main.py b/users/Profpatsch/advent-of-code/2020/03/main.py new file mode 100644 index 000000000000..4d6baf946c3e --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/03/main.py @@ -0,0 +1,66 @@ +import itertools +import math + +def tree_line(init): + return { + "init-len": len(init), + "known": '', + "rest": itertools.repeat(init) + } + +def tree_line_at(pos, tree_line): + needed = (pos + 1) - len(tree_line["known"]) + # internally advance the tree line to the position requested + if needed > 0: + tree_line["known"] = tree_line["known"] \ + + ''.join( + itertools.islice( + tree_line["rest"], + 1+math.floor(needed / tree_line["init-len"]))) + # print(tree_line) + return tree_line["known"][pos] == '#' + +def tree_at(linepos, pos, trees): + return tree_line_at(pos, trees[linepos]) + +def slope_positions(trees, right, down): + line = 0 + pos = 0 + while line < len(trees): + yield (line, pos) + line = line + down + pos = pos + right + +trees = [] +with open("./input", 'r') as f: + for line in f: + line = line.rstrip() + trees.append(tree_line(line)) + +# print(list(itertools.islice(trees[0], 5))) +# print(list(map( +# lambda x: tree_at(0, x, trees), +# range(100) +# ))) +# print(list(slope_positions(trees, right=3, down=1))) + +def count_slope_positions(trees, slope): + count = 0 + for (line, pos) in slope: + if tree_at(line, pos, trees): + count = count + 1 + return count + +print( + count_slope_positions(trees, slope_positions(trees, right=1, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=3, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=5, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=7, down=1)) + * + count_slope_positions(trees, slope_positions(trees, right=1, down=2)) +) + +# I realized I could have just used a modulo instead … diff --git a/users/Profpatsch/advent-of-code/2020/04/main.py b/users/Profpatsch/advent-of-code/2020/04/main.py new file mode 100644 index 000000000000..36bbed7146d6 --- /dev/null +++ b/users/Profpatsch/advent-of-code/2020/04/main.py @@ -0,0 +1,104 @@ +import sys +import itertools +import re +import pprint + +def get_entry(fd): + def to_dict(keyval): + res = {} + for (k, v) in keyval: + assert k not in res + res[k] = v + return res + + res = [] + for line in fd: + if line == "\n": + yield to_dict(res) + res = [] + else: + line = line.rstrip() + items = line.split(" ") + for i in items: + res.append(i.split(":", maxsplit=2)) + +def val_hgt(hgt): + m = re.fullmatch(r'([0-9]+)(cm|in)', hgt) + if m: + (i, what) = m.group(1,2) + i = int(i) + if what == "cm": + return i >= 150 and i <= 193 + elif what == "in": + return i >= 59 and i <= 76 + else: + return False + +required_fields = [ + { "name": "byr", + "check": lambda s: int(s) >= 1920 and int(s) <= 2002 + }, + { "name": "iyr", + "check": lambda s: int(s) >= 2010 and int(s) <= 2020 + }, + { "name": "eyr", + "check": lambda s: int(s) >= 2020 and int(s) <= 2030, + }, + { "name": "hgt", + "check": lambda s: val_hgt(s) + }, + { "name": "hcl", + "check": lambda s: re.fullmatch(r'#[0-9a-f]{6}', s) + }, + { "name": "ecl", + "check": lambda s: re.fullmatch(r'amb|blu|brn|gry|grn|hzl|oth', s) + }, + { "name": "pid", + "check": lambda s: re.fullmatch(r'[0-9]{9}', s) + }, + # we should treat it as not required + # "cid" +] + +required_dict = {} +for f in required_fields: + required_dict[f["name"]] = f + +def validate(keyval): + if keyval[0] not in required_dict: + return { "ok": keyval } + if required_dict[keyval[0]]["check"](keyval[1]): + return { "ok": keyval } + else: + return { "validation": keyval } + +def all_fields(entry): + missing = [] + for r in required_dict: + if r not in e: + missing.append(r) + if missing == []: + return { "ok": entry } + else: + return { "missing": missing } + +count=0 +for e in get_entry(sys.stdin): + a = all_fields(e) + if a.get("ok", False): + res = {} + bad = False + for keyval in e.items(): + r = validate(keyval) + if r.get("validation", False): + bad = True + res[keyval[0]] = r + if bad: + pprint.pprint({ "validation": res }) + else: + pprint.pprint({ "ok": e }) + count = count+1 + else: + pprint.pprint(a) + +print(count) diff --git a/users/Profpatsch/arglib/default.nix b/users/Profpatsch/arglib/default.nix new file mode 100644 index 000000000000..b263654ac3e0 --- /dev/null +++ b/users/Profpatsch/arglib/default.nix @@ -0,0 +1,44 @@ +{ depot, pkgs, lib, ... }: + +let + netencode = { + rust = depot.users.Profpatsch.writers.rustSimpleLib { + name = "arglib-netencode"; + dependencies = [ + depot.users.Profpatsch.execline.exec-helpers + depot.users.Profpatsch.netencode.netencode-rs + ]; + } '' + extern crate netencode; + extern crate exec_helpers; + + use netencode::{T}; + use std::os::unix::ffi::OsStrExt; + + pub fn arglib_netencode(prog_name: &str, env: Option<&std::ffi::OsStr>) -> T { + let env = match env { + None => std::ffi::OsStr::from_bytes("ARGLIB_NETENCODE".as_bytes()), + Some(a) => a + }; + let t = match std::env::var_os(env) { + None => exec_helpers::die_user_error(prog_name, format!("could not read args, envvar {} not set", env.to_string_lossy())), + // TODO: good error handling for the different parser errors + Some(soup) => match netencode::parse::t_t(soup.as_bytes()) { + Ok((remainder, t)) => match remainder.is_empty() { + true => t, + false => exec_helpers::die_environment_problem(prog_name, format!("arglib: there was some unparsed bytes remaining: {:?}", remainder)) + }, + Err(err) => exec_helpers::die_environment_problem(prog_name, format!("arglib parsing error: {:?}", err)) + } + }; + std::env::remove_var(env); + t + } + ''; + }; + +in { + inherit + netencode + ; +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/default.nix b/users/Profpatsch/emacs-tree-sitter-move/default.nix new file mode 100644 index 000000000000..fdc059c089b6 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/default.nix @@ -0,0 +1,3 @@ +# nothing yet (TODO: expose shell & tool) +{...}: +{} diff --git a/users/Profpatsch/emacs-tree-sitter-move/shell.nix b/users/Profpatsch/emacs-tree-sitter-move/shell.nix new file mode 100644 index 000000000000..81d622ac73e5 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/shell.nix @@ -0,0 +1,16 @@ +{ pkgs ? import ../../../third_party {}, ... }: +let + inherit (pkgs) lib; + + treeSitterGrammars = pkgs.runCommandLocal "grammars" {} '' + mkdir -p $out/bin + ${lib.concatStringsSep "\n" + (lib.mapAttrsToList (name: src: "ln -s ${src}/parser $out/bin/${name}.so") pkgs.tree-sitter.builtGrammars)}; + ''; + +in pkgs.mkShell { + buildInputs = [ + pkgs.tree-sitter.builtGrammars.python + ]; + TREE_SITTER_GRAMMAR_DIR = treeSitterGrammars; +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.json b/users/Profpatsch/emacs-tree-sitter-move/test.json new file mode 100644 index 000000000000..d9f8075976d6 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.json @@ -0,0 +1,14 @@ +{ + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + }, + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + }, + "foo": { + "x": [ 1, 2, 3, 4 ], + "bar": "test" + } +} diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.py b/users/Profpatsch/emacs-tree-sitter-move/test.py new file mode 100644 index 000000000000..0f57bae035da --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.py @@ -0,0 +1,13 @@ +(4 + 5 + 5) + +def foo(a, b, c) + +def bar(a, b): + 4 + 4 + 4 + +[1, 4, 5, 10] + +def foo(): + pass diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.sh b/users/Profpatsch/emacs-tree-sitter-move/test.sh new file mode 100644 index 000000000000..681081f5909d --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/test.sh @@ -0,0 +1,14 @@ +function foo () { + local x=123 +} + +function bar () { + local x=123 +} + +echo abc def \ + gef gef + +printf \ + "%s\n" \ + haha diff --git a/users/Profpatsch/emacs-tree-sitter-move/tmp.el b/users/Profpatsch/emacs-tree-sitter-move/tmp.el new file mode 100644 index 000000000000..88d13fa45b81 --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/tmp.el @@ -0,0 +1,28 @@ +(defun tree-sitter-load-from-grammar-dir (grammar-dir sym lang-name) + (tree-sitter-load + sym + (format "%s/bin/%s" + (getenv grammar-dir) + lang-name))) + +(defun tree-sitter-init-tmp-langs (alist) + (mapcar + (lambda (lang) + (pcase-let ((`(,name ,sym ,mode) lang)) + (tree-sitter-load-from-grammar-dir "TREE_SITTER_GRAMMAR_DIR" sym name) + (cons mode sym))) + alist)) + + +(setq tree-sitter-major-mode-language-alist + (tree-sitter-init-tmp-langs + '(("python" python python-mode) + ("json" json js-mode) + ("bash" bash sh-mode) + ))) + +(define-key evil-normal-state-map (kbd "C-.") #'tree-sitter-move-reset) +(define-key evil-normal-state-map (kbd "C-<right>") #'tree-sitter-move-right) +(define-key evil-normal-state-map (kbd "C-<left>") #'tree-sitter-move-left) +(define-key evil-normal-state-map (kbd "C-<up>") #'tree-sitter-move-up) +(define-key evil-normal-state-map (kbd "C-<down>") #'tree-sitter-move-down) diff --git a/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el new file mode 100644 index 000000000000..907e1e4081bc --- /dev/null +++ b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el @@ -0,0 +1,139 @@ +;; this is not an actual cursor, just a node. +;; It’s not super efficient, but cursors can’t be *set* to an arbitrary +;; subnode, because they can’t access the parent otherwise. +;; We’d need a way to reset the cursor and walk down to the node?! +(defvar-local tree-sitter-move--cursor nil + "the buffer-local cursor used for movement") + +(defvar-local tree-sitter-move--debug-overlay nil + "an overlay used to visually display the region currently marked by the cursor") + +;;;;; TODO: should everything use named nodes? Only some things? +;;;;; maybe there should be a pair of functions for everything? +;;;;; For now restrict to named nodes. + +(defun tree-sitter-move--setup () + ;; TODO + (progn + ;; TODO: if tree-sitter-mode fails to load, display a better error + (tree-sitter-mode t) + (setq tree-sitter-move--cursor (tsc-root-node tree-sitter-tree)) + (add-variable-watcher + 'tree-sitter-move--cursor + #'tree-sitter-move--debug-overlay-update))) + +(defun tree-sitter-move--debug-overlay-update (sym newval &rest _args) + "variable-watcher to update the debug overlay when the cursor changes" + (let ((start (tsc-node-start-position newval)) + (end (tsc-node-end-position newval))) + (symbol-macrolet ((o tree-sitter-move--debug-overlay)) + (if o + (move-overlay o start end) + (setq o (make-overlay start end)) + (overlay-put o 'face 'highlight) + )))) + +(defun tree-sitter-move--debug-overlay-teardown () + "Turn of the overlay visibility and delete the overlay object" + (when tree-sitter-move--debug-overlay + (delete-overlay tree-sitter-move--debug-overlay) + (setq tree-sitter-move--debug-overlay nil))) + +(defun tree-sitter-move--teardown () + (setq tree-sitter-move--cursor nil) + (tree-sitter-move--debug-overlay-teardown) + (tree-sitter-mode nil)) + +;; Get the syntax node the cursor is on. +(defun tsc-get-named-node-at-point () + (let ((p (point))) + (tsc-get-named-descendant-for-position-range + (tsc-root-node tree-sitter-tree) p p))) + +;; TODO: is this function necessary? +;; Maybe tree-sitter always guarantees that parents are named? +(defun tsc-get-named-parent (node) + (when-let ((parent (tsc-get-parent node))) + (while (and parent (not (tsc-node-named-p parent))) + (setq parent (tsc-get-parent parent))) + parent)) + +(defun tsc-get-first-named-node-with-siblings-up (node) + "Returns the first 'upwards' node that has siblings. That includes the current + node, so if the given node has siblings, it is returned. Returns nil if there + is no such node until the root" + (when-let ((has-siblings-p + (lambda (parent-node) + (> (tsc-count-named-children parent-node) + 1))) + (cur node) + (parent (tsc-get-named-parent node))) + (while (and parent (not (funcall has-siblings-p parent))) + (setq cur parent) + (setq parent (tsc-get-named-parent cur))) + cur)) + +(defun tree-sitter-move--set-cursor-to-node (node) + (setq tree-sitter-move--cursor node)) + +(defun tree-sitter-move--set-cursor-to-node-at-point () + (tree-sitter-move--set-cursor-to-node (tsc-get-named-node-at-point))) + +(defun tree-sitter-move--move-point-to-node (node) + (set-window-point + (selected-window) + (tsc-node-start-position node))) + + +;; interactive commands (“do what I expect” section) + +(defun tree-sitter-move-reset () + (interactive) + (tree-sitter-move--set-cursor-to-node-at-point)) + +(defun tree-sitter-move-right () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-next-named-sibling)) + +(defun tree-sitter-move-left () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-prev-named-sibling)) + +(defun tree-sitter-move-up () + (interactive) + (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-parent)) + +;; TODO: does not skip siblings yet, because the skip function only goes up (not down) +(defun tree-sitter-move-down () + (interactive) + (tree-sitter-move--move-if-possible (lambda (n) (tsc-get-nth-named-child n 0)))) + +(defun tree-sitter-move--move-skip-non-sibling-nodes (move-fn) + "Moves to the sidewards next sibling. If the current node does not have siblings, go + upwards until something has siblings and then move to the side (right or left)." + (tree-sitter-move--move-if-possible + (lambda (cur) + (when-let ((with-siblings + (tsc-get-first-named-node-with-siblings-up cur))) + (funcall move-fn with-siblings))))) + +(defun tree-sitter-move--move-if-possible (dir-fn) + (let ((next (funcall dir-fn tree-sitter-move--cursor))) + (when next + (tree-sitter-move--set-cursor-to-node next) + (tree-sitter-move--move-point-to-node next)))) + +; mostly stolen from tree-sitter-mode +;;;###autoload +(define-minor-mode tree-sitter-move-mode + "Minor mode to do cursor movements via tree-sitter" + :init-value nil + :lighter " tree-sitter-move" + (if tree-sitter-move-mode + (tree-sitter--error-protect + (progn + (tree-sitter-move--setup)) + (setq tree-sitter-move-mode nil) + (tree-sitter-move--teardown)) + (lambda ()) + (tree-sitter-move--teardown))) diff --git a/users/Profpatsch/execline/default.nix b/users/Profpatsch/execline/default.nix new file mode 100644 index 000000000000..852fcfcfa005 --- /dev/null +++ b/users/Profpatsch/execline/default.nix @@ -0,0 +1,12 @@ +{ depot, pkgs, lib, ... }: + +let + exec-helpers = depot.users.Profpatsch.writers.rustSimpleLib { + name = "exec-helpers"; + } (builtins.readFile ./exec_helpers.rs); + +in { + inherit + exec-helpers + ; +} diff --git a/users/Profpatsch/execline/exec_helpers.rs b/users/Profpatsch/execline/exec_helpers.rs new file mode 100644 index 000000000000..b9e1f5797386 --- /dev/null +++ b/users/Profpatsch/execline/exec_helpers.rs @@ -0,0 +1,113 @@ +use std::os::unix::process::CommandExt; +use std::ffi::OsStr; +use std::os::unix::ffi::{OsStringExt, OsStrExt}; + +pub fn no_args(current_prog_name: &str) -> () { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + if args.len() > 0 { + die_user_error(current_prog_name, format!("Expected no arguments, got {:?}", args.collect::<Vec<_>>())) + } +} + +pub fn args(current_prog_name: &str, no_of_positional_args: usize) -> Vec<Vec<u8>> { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + if args.len() != no_of_positional_args { + die_user_error(current_prog_name, format!("Expected {} arguments, got {}, namely {:?}", no_of_positional_args, args.len(), args.collect::<Vec<_>>())) + } + args.map(|arg| arg.into_vec()).collect() +} + +pub fn args_for_exec(current_prog_name: &str, no_of_positional_args: usize) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) { + let mut args = std::env::args_os(); + // remove argv[0] + let _ = args.nth(0); + let mut args = args.map(|arg| arg.into_vec()); + let mut pos_args = vec![]; + // get positional args + for i in 1..no_of_positional_args+1 { + pos_args.push( + args.nth(0).expect( + &format!("{}: expects {} positional args, only got {}", current_prog_name, no_of_positional_args, i)) + ); + } + // prog... is the rest of the iterator + let prog : Vec<Vec<u8>> = args.collect(); + (pos_args, prog) +} + +pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(current_prog_name: &str, args: Args, env_additions: Env) -> ! + where + Args: IntoIterator<Item = Arg>, + Arg: AsRef<[u8]>, + Env: IntoIterator<Item = (Key, Val)>, + Key: AsRef<[u8]>, + Val: AsRef<[u8]>, +{ + // TODO: is this possible without collecting into a Vec first, just leaving it an IntoIterator? + let args = args.into_iter().collect::<Vec<Arg>>(); + let mut args = args.iter().map(|v| OsStr::from_bytes(v.as_ref())); + let prog = args.nth(0).expect(&format!("{}: first argument must be an executable", current_prog_name)); + // TODO: same here + let env = env_additions.into_iter().collect::<Vec<(Key, Val)>>(); + let env = env.iter().map(|(k,v)| (OsStr::from_bytes(k.as_ref()), OsStr::from_bytes(v.as_ref()))); + let err = std::process::Command::new(prog).args(args).envs(env).exec(); + die_missing_executable(current_prog_name, format!("exec failed: {}, while trying to execing into {:?}", err, prog)); +} + +/// Exit 1 to signify a generic expected error +/// (e.g. something that sometimes just goes wrong, like a nix build). +pub fn die_expected_error<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(1, current_prog_name, msg) +} + +/// Exit 100 to signify a user error (“the user is holding it wrong”). +/// This is a permanent error, if the program is executed the same way +/// it should crash with 100 again. +pub fn die_user_error<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(100, current_prog_name, msg) +} + +/// Exit 101 to signify an unexpected crash (failing assertion or panic). +/// This is the same exit code that `panic!()` emits. +pub fn die_panic<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(101, current_prog_name, msg) +} + +/// Exit 111 to signify a temporary error (such as resource exhaustion) +pub fn die_temporary<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(111, current_prog_name, msg) +} + +/// Exit 126 to signify an environment problem +/// (the user has set up stuff incorrectly so the program cannot work) +pub fn die_environment_problem<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(126, current_prog_name, msg) +} + +/// Exit 127 to signify a missing executable. +pub fn die_missing_executable<S>(current_prog_name: &str, msg: S) -> ! +where S: AsRef<str> +{ + die_with(127, current_prog_name, msg) +} + +fn die_with<S>(status: i32, current_prog_name: &str, msg: S) -> ! + where S: AsRef<str> +{ + eprintln!("{}: {}", current_prog_name, msg.as_ref()); + std::process::exit(status) +} diff --git a/users/Profpatsch/lib.nix b/users/Profpatsch/lib.nix new file mode 100644 index 000000000000..db81c2dfe321 --- /dev/null +++ b/users/Profpatsch/lib.nix @@ -0,0 +1,49 @@ +{ depot, pkgs, ... }: +let + bins = depot.nix.getBins pkgs.coreutils [ "printf" "echo" "cat" "printenv" ] + // depot.nix.getBins pkgs.fdtools [ "multitee" ] + ; + + debugExec = msg: depot.nix.writeExecline "debug-exec" {} [ + "if" [ + "fdmove" "-c" "1" "2" + "if" [ bins.printf "%s: " msg ] + "if" [ bins.echo "$@" ] + ] + "$@" + ]; + + eprintf = depot.nix.writeExecline "eprintf" {} [ + "fdmove" "-c" "1" "2" bins.printf "$@" + ]; + + eprint-stdin = depot.nix.writeExecline "eprint-stdin" {} [ + "pipeline" [ bins.multitee "0-1,2" ] "$@" + ]; + + eprintenv = depot.nix.writeExecline "eprintenv" { readNArgs = 1; } [ + "ifelse" [ "fdmove" "-c" "1" "2" bins.printenv "$1" ] + [ "$@" ] + "if" [ eprintf "eprintenv: could not find \"\${1}\" in the environment\n" ] + "$@" + ]; + + # remove everything but a few selected environment variables + runInEmptyEnv = keepVars: + let + importas = pkgs.lib.concatMap (var: [ "importas" "-i" var var ]) keepVars; + # we have to explicitely call export here, because PATH is probably empty + export = pkgs.lib.concatMap (var: [ "${pkgs.execline}/bin/export" var ''''${${var}}'' ]) keepVars; + in depot.nix.writeExecline "empty-env" {} + (importas ++ [ "emptyenv" ] ++ export ++ [ "${pkgs.execline}/bin/exec" "$@" ]); + + +in { + inherit + debugExec + eprintf + eprint-stdin + eprintenv + runInEmptyEnv + ; +} diff --git a/users/Profpatsch/netencode/README.md b/users/Profpatsch/netencode/README.md new file mode 100644 index 000000000000..3058e36eaf5c --- /dev/null +++ b/users/Profpatsch/netencode/README.md @@ -0,0 +1,111 @@ +# netencode 0.1-unreleased + +[bencode][] and [netstring][]-inspired pipe format that should be trivial go generate correctly in every context (only requires a `byte_length()` and a `printf()`), easy to parse (100 lines of code or less), mostly human-decipherable for easy debugging, and support nested record and sum types. + + +## scalars + +Scalars have the format `[type prefix][size]:[value],`. + +where size is a natural number without leading zeroes. + +### unit + +The unit (`u`) has only one value. + +* The unit is: `u,` + +### numbers + +Naturals (`n`) and Integers (`i`), with a maximum size in bits. + +Bit sizes are specified in 2^n increments, 1 to 9 (`n1`..`n9`, `i1`..`n9`). + +* Natural `1234` that fits in 32 bits (2^5): `n5:1234,` +* Integer `-42` that fits in 8 bits (2^3): `i3:-42,` +* Integer `23` that fits in 64 bits (2^6): `i6:23,` +* Integer `-1` that fits in 512 bits (2^9): `i9:-1,` +* Natural `0` that fits in 1 bit (2^1): `n1:0,` + +An implementation can define the biggest numbers it supports, and has to throw an error for anything bigger. It has to support everything smaller, so for example if you support up to i6/n6, you have to support 1–6 as well. An implementation could support up to the current architecture’s wordsize for example. + +Floats are not supported, you can implement fixed-size decimals or ratios using integers. + +### booleans + +A boolean is represented as `n1`. + +* `n1:0,`: false +* `n1:1,`: true + +TODO: should we add `f,` and `t,`? + +### text + +Text (`t`) that *must* be encoded as UTF-8, starting with its length in bytes: + +* The string `hello world` (11 bytes): `t11:hello world,` +* The string `今日は` (9 bytes): `t9:今日は,` +* The string `:,` (2 bytes): `t2::,,` +* The empty sting `` (0 bytes): `t0:,` + +### binary + +Arbitrary binary strings (`b`) that can contain any data, starting with its length in bytes. + +* The ASCII string `hello world` as binary data (11 bytes): `b11:hello world,` +* The empty binary string (0 bytes): `b0:,` +* The bytestring with `^D` (1 byte): `b1:,` + +Since the binary strings are length-prefixd, they can contain `\0` and no escaping is required. Care has to be taken in languages with `\0`-terminated bytestrings. + +Use text (`t`) if you have utf-8 encoded data. + +## tagged values + +### tags + +A tag (`<`) gives a value a name. The tag is UTF-8 encoded, starting with its length in bytes and proceeding with the value. + +* The tag `foo` (3 bytes) tagging the text `hello` (5 bytes): `<3:foo|t5:hello,` +* The tag `` (0 bytes) tagging the 8-bit integer 0: `<0:|i3:0,` + +### records (products/records), also maps + +A record (`{`) is a concatenation of tags (`<`). It needs to be closed with `}`. +If tag names repeat the later ones should be ignored. Ordering does not matter. + +Similar to text, records start with the length of their *whole encoded content*, in bytes. This makes it possible to treat their contents as opaque bytestrings. + +* There is no empty record. (TODO: make the empty record the unit type, remove `u,`?) +* A record with one empty field, `foo`: `{9:<3:foo|u,}` +* A record with two fields, `foo` and `x`: `{21:<3:foo|u,<1:x|t3:baz,}` +* The same record: `{21:<1:x|t3:baz,<3:foo|u,}` +* The same record (later occurences of fields are ignored): `{28:<1:x|t3:baz,<3:foo|u,<1:x|u,}` + +### sums (tagged unions) + +Simply a tagged value. The tag marker `<` indicates it is a sum if it appears outside of a record. + +## lists + +A list (`[`) imposes an ordering on a sequence of values. It needs to be closed with `]`. Values in it are simply concatenated. + +Similar to records, lists start with the length of their whole encoded content. + +* The empty list: `[0:]` +* The list with one element, the string `foo`: `[7:t3:foo,]` +* The list with text `foo` followed by i3 `-42`: `[14:t3:foo,i3:-42,]` +* The list with `Some` and `None` tags: `[33:<4:Some|t3:foo,<4None|u,<4None|u,]` + +## motivation + +TODO + +## guarantees + +TODO: do I want unique representation (bijection like bencode?) This would put more restrictions on the generator, like sorting records in lexicographic order, but would make it possible to compare without decoding + + +[bencode]: https://en.wikipedia.org/wiki/Bencode +[netstring]: https://en.wikipedia.org/wiki/Netstring diff --git a/users/Profpatsch/netencode/default.nix b/users/Profpatsch/netencode/default.nix new file mode 100644 index 000000000000..294e3b4395da --- /dev/null +++ b/users/Profpatsch/netencode/default.nix @@ -0,0 +1,136 @@ +{ depot, pkgs, lib, ... }: + +let + imports = { + inherit (depot.users.Profpatsch) + writers; + }; + + netencode-rs = imports.writers.testRustSimple + (imports.writers.rustSimpleLib { + name = "netencode"; + dependencies = [ + depot.users.Profpatsch.rust-crates.nom + depot.users.Profpatsch.execline.exec-helpers + ]; + release = false; + verbose = true; + } (builtins.readFile ./netencode.rs)); + + gen = import ./gen.nix { inherit lib; }; + + cfg-if = pkgs.buildRustCrate { + pname = "cfg-if"; + version = "1.0.0"; + crateName = "cfg-if"; + sha256 = "1fzidq152hnxhg4lj6r2gv4jpnn8yivp27z6q6xy7w6v0dp6bai9"; + }; + + log = pkgs.buildRustCrate { + pname = "log"; + version = "0.4.11"; + crateName = "log"; + sha256 = "0m6xhqxsps5mgd7r91g5mqkndbh8zbjd58p7w75r330zl4n40l07"; + dependencies = [ cfg-if ]; + }; + + serde_derive = pkgs.buildRustCrate { + pname = "serde"; + crateName = "serde"; + version = "1.0.123"; + sha256 = "05xl2s1vpf3p7fi2yc9qlzw88d5ap0z3qmhmd7axa6pp9pn1s5xc"; + }; + + serde = pkgs.buildRustCrate { + pname = "serde"; + crateName = "serde"; + version = "1.0.123"; + sha256 = "05xl2s1vpf3p7fi2yc9qlzw88d5ap0z3qmhmd7axa6pp9pn1s5xc"; + features = [ "std" ]; + }; + + mustache = pkgs.buildRustCrate { + pname = "mustache"; + version = "0.9.0"; + crateName = "mustache"; + sha256 = "1zgl8l15i19lzp90icgwyi6zqdd31b9vm8w129f41d1zd0hs7ayq"; + dependencies = [ log serde ]; + }; + + netencode-mustache = imports.writers.rustSimple { + name = "netencode_mustache"; + dependencies = [ + depot.users.Profpatsch.arglib.netencode.rust + netencode-rs + mustache + ]; + } (builtins.readFile ./netencode-mustache.rs); + + + record-get = imports.writers.rustSimple { + name = "record-get"; + dependencies = [ + netencode-rs + depot.users.Profpatsch.execline.exec-helpers + depot.users.Profpatsch.arglib.netencode.rust + ]; + } '' + extern crate netencode; + extern crate arglib_netencode; + extern crate exec_helpers; + use netencode::{encode, dec}; + use netencode::dec::{Decoder, DecodeError}; + + fn main() { + let mut buf = vec![]; + let args = exec_helpers::args("record-get", 1); + let field = match std::str::from_utf8(&args[0]) { + Ok(f) => f, + Err(_e) => exec_helpers::die_user_error("record-get", format!("The field name needs to be valid unicode")) + }; + let u = netencode::u_from_stdin_or_die_user_error("record-get", &mut buf); + match (dec::RecordDot {field, inner: dec::AnyU }).dec(u) { + Ok(u) => encode(&mut std::io::stdout(), &u).expect("encoding to stdout failed"), + Err(DecodeError(err)) => exec_helpers::die_user_error("record-get", err) + } + } + ''; + + record-splice-env = imports.writers.rustSimple { + name = "record-splice-env"; + dependencies = [ + netencode-rs + depot.users.Profpatsch.execline.exec-helpers + ]; + } '' + extern crate netencode; + extern crate exec_helpers; + use netencode::dec::{Record, Try, ScalarAsBytes, Decoder, DecodeError}; + + fn main() { + let mut buf = vec![]; + let u = netencode::u_from_stdin_or_die_user_error("record-splice-env", &mut buf); + let (_, prog) = exec_helpers::args_for_exec("record-splice-env", 0); + match Record(Try(ScalarAsBytes)).dec(u) { + Ok(map) => { + exec_helpers::exec_into_args( + "record-splice-env", + prog, + // some elements can’t be decoded as scalars, so just ignore them + map.into_iter().filter_map(|(k, v)| v.map(|v2| (k, v2))) + ); + }, + Err(DecodeError(err)) => exec_helpers::die_user_error("record-splice-env", err), + } + } + ''; + +in { + inherit + netencode-rs + netencode-mustache + record-get + record-splice-env + gen + ; +} diff --git a/users/Profpatsch/netencode/gen.nix b/users/Profpatsch/netencode/gen.nix new file mode 100644 index 000000000000..305ff7b08dd6 --- /dev/null +++ b/users/Profpatsch/netencode/gen.nix @@ -0,0 +1,69 @@ +{ lib }: +let + + netstring = tag: suffix: s: + "${tag}${toString (builtins.stringLength s)}:${s}${suffix}"; + + unit = "u,"; + + n1 = b: if b then "n1:1," else "n1:0,"; + + n = i: n: "n${toString i}:${toString n},"; + i = i: n: "i${toString i}:${toString n},"; + + n3 = n 3; + n6 = n 6; + n7 = n 7; + + i3 = i 3; + i6 = i 6; + i7 = i 7; + + text = netstring "t" ","; + binary = netstring "b" ","; + + tag = key: val: netstring "<" "|" key + val; + + concatStrings = builtins.concatStringsSep ""; + + record = lokv: netstring "{" "}" + (concatStrings (map ({key, val}: tag key val) lokv)); + + list = l: netstring "[" "]" (concatStrings l); + + dwim = val: + let match = { + "bool" = n1; + "int" = i6; + "string" = text; + "set" = attrs: + # it could be a derivation, then just return the path + if attrs.type or "" == "derivation" then text "${attrs}" + else + record (lib.mapAttrsToList + (k: v: { + key = k; + val = dwim v; + }) attrs); + "list" = l: list (map dwim l); + }; + in match.${builtins.typeOf val} val; + +in { + inherit + unit + n1 + n3 + n6 + n7 + i3 + i6 + i7 + text + binary + tag + record + list + dwim + ; +} diff --git a/users/Profpatsch/netencode/netencode-mustache.rs b/users/Profpatsch/netencode/netencode-mustache.rs new file mode 100644 index 000000000000..ee7bafed2250 --- /dev/null +++ b/users/Profpatsch/netencode/netencode-mustache.rs @@ -0,0 +1,53 @@ +extern crate netencode; +extern crate mustache; +extern crate arglib_netencode; + +use mustache::{Data}; +use netencode::{T}; +use std::collections::HashMap; +use std::os::unix::ffi::{OsStrExt}; +use std::io::{Read}; + +fn netencode_to_mustache_data_dwim(t: T) -> Data { + match t { + // TODO: good idea? + T::Unit => Data::Null, + T::N1(b) => Data::Bool(b), + T::N3(u) => Data::String(u.to_string()), + T::N6(u) => Data::String(u.to_string()), + T::N7(u) => Data::String(u.to_string()), + T::I3(i) => Data::String(i.to_string()), + T::I6(i) => Data::String(i.to_string()), + T::I7(i) => Data::String(i.to_string()), + T::Text(s) => Data::String(s), + T::Binary(b) => unimplemented!(), + T::Sum(tag) => unimplemented!(), + T::Record(xs) => Data::Map( + xs.into_iter() + .map(|(key, val)| (key, netencode_to_mustache_data_dwim(val))) + .collect::<HashMap<_,_>>() + ), + T::List(xs) => Data::Vec( + xs.into_iter() + .map(|x| netencode_to_mustache_data_dwim(x)) + .collect::<Vec<_>>() + ), + } +} + +pub fn from_stdin() -> () { + let data = netencode_to_mustache_data_dwim( + arglib_netencode::arglib_netencode("netencode-mustache", Some(std::ffi::OsStr::new("TEMPLATE_DATA"))) + ); + let mut stdin = String::new(); + std::io::stdin().read_to_string(&mut stdin).unwrap(); + mustache::compile_str(&stdin) + .and_then(|templ| templ.render_data( + &mut std::io::stdout(), + &data + )).unwrap() +} + +pub fn main() { + from_stdin() +} diff --git a/users/Profpatsch/netencode/netencode.rs b/users/Profpatsch/netencode/netencode.rs new file mode 100644 index 000000000000..28003260925c --- /dev/null +++ b/users/Profpatsch/netencode/netencode.rs @@ -0,0 +1,776 @@ +extern crate nom; +extern crate exec_helpers; + +use std::collections::HashMap; +use std::io::{Write, Read}; +use std::fmt::{Display, Debug}; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum T { + // Unit + Unit, + // Boolean + N1(bool), + // Naturals + N3(u8), + N6(u64), + N7(u128), + // Integers + I3(i8), + I6(i64), + I7(i128), + // Text + // TODO: make into &str + Text(String), + // TODO: rename to Bytes + Binary(Vec<u8>), + // Tags + // TODO: make into &str + Sum(Tag<String, T>), + // TODO: make into &str + Record(HashMap<String, T>), + List(Vec<T>), +} + +impl T { + pub fn to_u<'a>(&'a self) -> U<'a> { + match self { + T::Unit => U::Unit, + T::N1(b) => U::N1(*b), + T::N3(u) => U::N3(*u), + T::N6(u) => U::N6(*u), + T::N7(u) => U::N7(*u), + T::I3(i) => U::I3(*i), + T::I6(i) => U::I6(*i), + T::I7(i) => U::I7(*i), + T::Text(t) => U::Text(t.as_str()), + T::Binary(v) => U::Binary(v), + T::Sum(Tag { tag, val }) => U::Sum( + Tag { tag: tag.as_str(), val: Box::new(val.to_u()) } + ), + T::Record(map) => U::Record( + map.iter().map(|(k, v)| (k.as_str(), v.to_u())).collect() + ), + T::List(l) => U::List( + l.iter().map(|v| v.to_u()).collect::<Vec<U<'a>>>() + ), + } + } + + pub fn encode<'a>(&'a self) -> Vec<u8> { + match self { + // TODO: don’t go via U, inefficient + o => o.to_u().encode() + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum U<'a> { + Unit, + // Boolean + N1(bool), + // Naturals + N3(u8), + N6(u64), + N7(u128), + // Integers + I3(i8), + I6(i64), + I7(i128), + // Text + Text(&'a str), + Binary(&'a [u8]), + // Tags + // TODO: the U-recursion we do here means we can’t be breadth-lazy anymore + // like we originally planned; maybe we want to go `U<'a>` → `&'a [u8]` again? + Sum(Tag<&'a str, U<'a>>), + Record(HashMap<&'a str, U<'a>>), + List(Vec<U<'a>>), +} + +impl<'a> U<'a> { + pub fn encode(&self) -> Vec<u8> { + let mut c = std::io::Cursor::new(vec![]); + encode(&mut c, self); + c.into_inner() + } + + pub fn to_t(&self) -> T { + match self { + U::Unit => T::Unit, + U::N1(b) => T::N1(*b), + U::N3(u) => T::N3(*u), + U::N6(u) => T::N6(*u), + U::N7(u) => T::N7(*u), + U::I3(i) => T::I3(*i), + U::I6(i) => T::I6(*i), + U::I7(i) => T::I7(*i), + U::Text(t) => T::Text((*t).to_owned()), + U::Binary(v) => T::Binary((*v).to_owned()), + U::Sum(Tag { tag, val }) => T::Sum( + Tag { tag: (*tag).to_owned(), val: Box::new(val.to_t()) } + ), + U::Record(map) => T::Record( + map.iter().map(|(k, v)| ((*k).to_owned(), v.to_t())).collect::<HashMap<String, T>>() + ), + U::List(l) => T::List( + l.iter().map(|v| v.to_t()).collect::<Vec<T>>() + ), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Tag<S, A> { + // TODO: make into &str + pub tag: S, + pub val: Box<A> +} + +impl<S, A> Tag<S, A> { + fn map<F, B>(self, f: F) -> Tag<S, B> + where F: Fn(A) -> B { + Tag { + tag: self.tag, + val: Box::new(f(*self.val)) + } + } +} + +fn encode_tag<W: Write>(w: &mut W, tag: &str, val: &U) -> std::io::Result<()> { + write!(w, "<{}:{}|", tag.len(), tag)?; + encode(w, val)?; + Ok(()) +} + +pub fn encode<W: Write>(w: &mut W, u: &U) -> std::io::Result<()> { + match u { + U::Unit => write!(w, "u,"), + U::N1(b) => if *b { write!(w, "n1:1,") } else { write!(w, "n1:0,") }, + U::N3(n) => write!(w, "n3:{},", n), + U::N6(n) => write!(w, "n6:{},", n), + U::N7(n) => write!(w, "n7:{},", n), + U::I3(i) => write!(w, "i3:{},", i), + U::I6(i) => write!(w, "i6:{},", i), + U::I7(i) => write!(w, "i7:{},", i), + U::Text(s) => { + write!(w, "t{}:", s.len()); + w.write(s.as_bytes()); + write!(w, ",") + } + U::Binary(s) => { + write!(w, "b{}:", s.len()); + w.write(&s); + write!(w, ",") + }, + U::Sum(Tag{tag, val}) => encode_tag(w, tag, val), + U::Record(m) => { + let mut c = std::io::Cursor::new(vec![]); + for (k, v) in m { + encode_tag(&mut c, k, v)?; + } + write!(w, "{{{}:", c.get_ref().len())?; + w.write(c.get_ref())?; + write!(w, "}}") + }, + U::List(l) => { + let mut c = std::io::Cursor::new(vec![]); + for u in l { + encode(&mut c, u)?; + } + write!(w, "[{}:", c.get_ref().len())?; + w.write(c.get_ref())?; + write!(w, "]") + } + } +} + +pub fn text(s: String) -> T { + T::Text(s) +} + +pub fn u_from_stdin_or_die_user_error<'a>(prog_name: &'_ str, stdin_buf: &'a mut Vec<u8>) -> U<'a> { + std::io::stdin().lock().read_to_end(stdin_buf); + let u = match parse::u_u(stdin_buf) { + Ok((rest, u)) => match rest { + b"" => u, + _ => exec_helpers::die_user_error(prog_name, format!("stdin contained some soup after netencode value: {:?}", rest)) + }, + Err(err) => exec_helpers::die_user_error(prog_name, format!("unable to parse netencode from stdin: {:?}", err)) + }; + u +} + +pub mod parse { + use super::{T, Tag, U}; + + use std::str::FromStr; + use std::ops::Neg; + use std::collections::HashMap; + + use nom::{IResult}; + use nom::bytes::complete::{tag, take}; + use nom::branch::{alt}; + use nom::character::complete::{digit1, char}; + use nom::sequence::{tuple}; + use nom::combinator::{map, map_res, flat_map, map_parser, opt}; + use nom::error::{context, ErrorKind, ParseError}; + + fn unit_t(s: &[u8]) -> IResult<&[u8], ()> { + let (s, _) = context("unit", tag("u,"))(s)?; + Ok((s, ())) + } + + fn usize_t(s: &[u8]) -> IResult<&[u8], usize> { + context( + "usize", + map_res( + map_res(digit1, |n| std::str::from_utf8(n)), + |s| s.parse::<usize>()) + )(s) + } + + fn sized(begin: char, end: char) -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> { + move |s: &[u8]| { + let (s, (_, len, _)) = tuple(( + char(begin), + usize_t, + char(':') + ))(s)?; + let (s, (res, _)) = tuple(( + take(len), + char(end) + ))(s)?; + Ok((s, res)) + } + } + + + fn uint_t<'a, I: FromStr + 'a>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], I> { + move |s: &'a [u8]| { + let (s, (_, _, int, _)) = tuple(( + tag(t.as_bytes()), + char(':'), + map_res( + map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), + |s| s.parse::<I>() + ), + char(',') + ))(s)?; + Ok((s, int)) + } + } + + fn bool_t<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], bool> { + context("bool", alt(( + map(tag("n1:0,"), |_| false), + map(tag("n1:1,"), |_| true), + ))) + } + + fn int_t<'a, I: FromStr + Neg<Output=I>>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&[u8], I> { + context( + t, + move |s: &'a [u8]| { + let (s, (_, _, neg, int, _)) = tuple(( + tag(t.as_bytes()), + char(':'), + opt(char('-')), + map_res( + map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), + |s| s.parse::<I>() + ), + char(',') + ))(s)?; + let res = match neg { + Some(_) => -int, + None => int, + }; + Ok((s, res)) + } + ) + } + + fn tag_t(s: &[u8]) -> IResult<&[u8], Tag<String, T>> { + // recurses into the main parser + map(tag_g(t_t), + |Tag {tag, val}| + Tag { + tag: tag.to_string(), + val + })(s) + } + + fn tag_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Tag<&'a str, O>> + where + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + move |s: &[u8]| { + let (s, tag) = sized('<', '|')(s)?; + let (s, val) = inner(s)?; + Ok((s, Tag { + tag: std::str::from_utf8(tag) + .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?, + val: Box::new(val) + })) + + } + } + + /// parse text scalar (`t5:hello,`) + fn text(s: &[u8]) -> IResult<&[u8], T> { + let (s, res) = text_g(s)?; + Ok((s, T::Text(res.to_string()))) + } + + fn text_g(s: &[u8]) -> IResult<&[u8], &str> { + let (s, res) = sized('t', ',')(s)?; + Ok((s, + std::str::from_utf8(res) + .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?, + )) + } + + fn binary<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], T> { + map(binary_g(), |b| T::Binary(b.to_owned())) + } + + fn binary_g() -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> { + sized('b', ',') + } + + fn list_t(s: &[u8]) -> IResult<&[u8], Vec<T>> { + list_g(t_t)(s) + } + + fn list_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Vec<O>> + where + O: Clone, + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + map_parser( + sized('[', ']'), + nom::multi::many0(inner) + ) + } + + fn record_t<'a>(s: &'a [u8]) -> IResult<&'a [u8], HashMap<String, T>> { + let (s, r) = record_g(t_t)(s)?; + Ok((s, + r.into_iter() + .map(|(k, v)| (k.to_string(), v)) + .collect::<HashMap<_,_>>())) + } + + fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], HashMap<&'a str, O>> + where + O: Clone, + P: Fn(&'a [u8]) -> IResult<&'a [u8], O> + { + map_parser( + sized('{', '}'), + nom::multi::fold_many1( + tag_g(inner), + HashMap::new(), + |mut acc: HashMap<_,_>, Tag { tag, mut val }| { + // ignore duplicated tag names that appear later + // according to netencode spec + if ! acc.contains_key(tag) { + acc.insert(tag, *val); + } + acc + } + ) + ) + } + + pub fn u_u(s: &[u8]) -> IResult<&[u8], U> { + alt(( + map(text_g, U::Text), + map(binary_g(), U::Binary), + map(unit_t, |()| U::Unit), + map(tag_g(u_u), |t| U::Sum(t)), + map(list_g(u_u), U::List), + map(record_g(u_u), U::Record), + + map(bool_t(), |u| U::N1(u)), + map(uint_t("n3"), |u| U::N3(u)), + map(uint_t("n6"), |u| U::N6(u)), + map(uint_t("n7"), |u| U::N7(u)), + map(int_t("i3"), |u| U::I3(u)), + map(int_t("i6"), |u| U::I6(u)), + map(int_t("i7"), |u| U::I7(u)), + + // less common + map(uint_t("n2"), |u| U::N3(u)), + map(uint_t("n4"), |u| U::N6(u)), + map(uint_t("n5"), |u| U::N6(u)), + map(int_t("i1"), |u| U::I3(u)), + map(int_t("i2"), |u| U::I3(u)), + map(int_t("i4"), |u| U::I6(u)), + map(int_t("i5"), |u| U::I6(u)), + // TODO: 8, 9 not supported + ))(s) + } + + pub fn t_t(s: &[u8]) -> IResult<&[u8], T> { + alt(( + text, + binary(), + map(unit_t, |_| T::Unit), + map(tag_t, |t| T::Sum(t)), + map(list_t, |l| T::List(l)), + map(record_t, |p| T::Record(p)), + + map(bool_t(), |u| T::N1(u)), + // 8, 64 and 128 bit + map(uint_t("n3"), |u| T::N3(u)), + map(uint_t("n6"), |u| T::N6(u)), + map(uint_t("n7"), |u| T::N7(u)), + map(int_t("i3"), |u| T::I3(u)), + map(int_t("i6"), |u| T::I6(u)), + map(int_t("i7"), |u| T::I7(u)), + + // less common + map(uint_t("n2"), |u| T::N3(u)), + map(uint_t("n4"), |u| T::N6(u)), + map(uint_t("n5"), |u| T::N6(u)), + map(int_t("i1"), |u| T::I3(u)), + map(int_t("i2"), |u| T::I3(u)), + map(int_t("i4"), |u| T::I6(u)), + map(int_t("i5"), |u| T::I6(u)), + // TODO: 8, 9 not supported + ))(s) + } + + #[cfg(test)] + mod tests { + use super::*; + + #[test] + fn test_parse_unit_t() { + assert_eq!( + unit_t("u,".as_bytes()), + Ok(("".as_bytes(), ())) + ); + } + + #[test] + fn test_parse_bool_t() { + assert_eq!( + bool_t()("n1:0,".as_bytes()), + Ok(("".as_bytes(), false)) + ); + assert_eq!( + bool_t()("n1:1,".as_bytes()), + Ok(("".as_bytes(), true)) + ); + } + + #[test] + fn test_parse_usize_t() { + assert_eq!( + usize_t("32foo".as_bytes()), + Ok(("foo".as_bytes(), 32)) + ); + } + + #[test] + fn test_parse_int_t() { + assert_eq!( + uint_t::<u8>("n3")("n3:42,abc".as_bytes()), + Ok(("abc".as_bytes(), 42)) + ); + assert_eq!( + uint_t::<u8>("n3")("n3:1024,abc".as_bytes()), + Err(nom::Err::Error(("1024,abc".as_bytes(), nom::error::ErrorKind::MapRes))) + ); + assert_eq!( + int_t::<i64>("i6")("i6:-23,abc".as_bytes()), + Ok(("abc".as_bytes(), -23)) + ); + assert_eq!( + int_t::<i128>("i3")("i3:0,:abc".as_bytes()), + Ok((":abc".as_bytes(), 0)) + ); + assert_eq!( + uint_t::<u8>("n7")("n7:09,".as_bytes()), + Ok(("".as_bytes(), 9)) + ); + // assert_eq!( + // length("c"), + // Err(nom::Err::Error(("c", nom::error::ErrorKind::Digit))) + // ); + // assert_eq!( + // length(":"), + // Err(nom::Err::Error((":", nom::error::ErrorKind::Digit))) + // ); + } + + #[test] + fn test_parse_text() { + assert_eq!( + text("t5:hello,".as_bytes()), + Ok(("".as_bytes(), T::Text("hello".to_owned()))) + ); + assert_eq!( + text("t4:fo,".as_bytes()), + // TODO: way better parse error messages + Err(nom::Err::Error(("fo,".as_bytes(), nom::error::ErrorKind::Eof))) + ); + assert_eq!( + text("t9:今日は,".as_bytes()), + Ok(("".as_bytes(), T::Text("今日は".to_owned()))) + ); + } + + #[test] + fn test_parse_binary() { + assert_eq!( + binary()("b5:hello,".as_bytes()), + Ok(("".as_bytes(), T::Binary(Vec::from("hello".to_owned())))) + ); + assert_eq!( + binary()("b4:fo,".as_bytes()), + // TODO: way better parse error messages + Err(nom::Err::Error(("fo,".as_bytes(), nom::error::ErrorKind::Eof))) + ); + assert_eq!( + binary()("b9:今日は,".as_bytes()), + Ok(("".as_bytes(), T::Binary(Vec::from("今日は".as_bytes())))) + ); + } + + #[test] + fn test_list() { + assert_eq!( + list_t("[0:]".as_bytes()), + Ok(("".as_bytes(), vec![])) + ); + assert_eq!( + list_t("[6:u,u,u,]".as_bytes()), + Ok(("".as_bytes(), vec![ + T::Unit, + T::Unit, + T::Unit, + ])) + ); + assert_eq!( + list_t("[15:u,[7:t3:foo,]u,]".as_bytes()), + Ok(("".as_bytes(), vec![ + T::Unit, + T::List(vec![T::Text("foo".to_owned())]), + T::Unit, + ])) + ); + } + + #[test] + fn test_record() { + assert_eq!( + record_t("{21:<1:a|u,<1:b|u,<1:c|u,}".as_bytes()), + Ok(("".as_bytes(), vec![ + ("a".to_owned(), T::Unit), + ("b".to_owned(), T::Unit), + ("c".to_owned(), T::Unit), + ].into_iter().collect::<HashMap<String, T>>())) + ); + // duplicated keys are ignored (first is taken) + assert_eq!( + record_t("{25:<1:a|u,<1:b|u,<1:a|i1:-1,}".as_bytes()), + Ok(("".as_bytes(), vec![ + ("a".to_owned(), T::Unit), + ("b".to_owned(), T::Unit), + ].into_iter().collect::<HashMap<_,_>>())) + ); + } + + #[test] + fn test_parse() { + assert_eq!( + t_t("n3:255,".as_bytes()), + Ok(("".as_bytes(), T::N3(255))) + ); + assert_eq!( + t_t("t6:halloo,".as_bytes()), + Ok(("".as_bytes(), T::Text("halloo".to_owned()))) + ); + assert_eq!( + t_t("<3:foo|t6:halloo,".as_bytes()), + Ok(("".as_bytes(), T::Sum (Tag { + tag: "foo".to_owned(), + val: Box::new(T::Text("halloo".to_owned())) + }))) + ); + // { a: Unit + // , foo: List <A: Unit | B: List i3> } + assert_eq!( + t_t("{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}".as_bytes()), + Ok(("".as_bytes(), T::Record(vec![ + ("a".to_owned(), T::Unit), + ("foo".to_owned(), T::List(vec![ + T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::Unit) }), + T::Sum(Tag { tag: "A".to_owned(), val: Box::new(T::N1(true)) }), + T::Sum(Tag { tag: "B".to_owned(), val: Box::new(T::List(vec![T::I3(127)])) }), + ])) + ].into_iter().collect::<HashMap<String, T>>()))) + ); + } + + } +} + +pub mod dec { + use super::*; + use std::collections::HashMap; + + pub struct DecodeError(pub String); + + pub trait Decoder<'a> { + type A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError>; + } + + #[derive(Clone, Copy)] + pub struct AnyT; + #[derive(Clone, Copy)] + pub struct AnyU; + + impl<'a> Decoder<'a> for AnyT { + type A = T; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + Ok(u.to_t()) + } + } + + impl<'a> Decoder<'a> for AnyU { + type A = U<'a>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + Ok(u) + } + } + + #[derive(Clone, Copy)] + pub struct Text; + // TODO: rename to Bytes + #[derive(Clone, Copy)] + pub struct Binary; + + impl<'a> Decoder<'a> for Text { + type A = &'a str; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Text(t) => Ok(t), + other => Err(DecodeError(format!("Cannot decode {:?} into Text", other))), + } + } + } + + impl<'a> Decoder<'a> for Binary { + type A = &'a [u8]; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Binary(b) => Ok(b), + other => Err(DecodeError(format!("Cannot decode {:?} into Binary", other))), + } + } + } + + #[derive(Clone, Copy)] + pub struct ScalarAsBytes; + + impl<'a> Decoder<'a> for ScalarAsBytes { + type A = Vec<u8>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::N3(u) => Ok(format!("{}", u).into_bytes()), + U::N6(u) => Ok(format!("{}", u).into_bytes()), + U::N7(u) => Ok(format!("{}", u).into_bytes()), + U::I3(i) => Ok(format!("{}", i).into_bytes()), + U::I6(i) => Ok(format!("{}", i).into_bytes()), + U::I7(i) => Ok(format!("{}", i).into_bytes()), + U::Text(t) => Ok(t.as_bytes().to_owned()), + U::Binary(b) => Ok(b.to_owned()), + o => Err(DecodeError(format!("Cannot decode {:?} into scalar", o))), + } + } + } + + #[derive(Clone, Copy)] + pub struct Record<T>(pub T); + + impl<'a, Inner> Decoder<'a> for Record<Inner> + where Inner: Decoder<'a> + { + type A = HashMap<&'a str, Inner::A>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match u { + U::Record(map) => + map.into_iter() + .map(|(k, v)| self.0.dec(v).map(|v2| (k, v2))) + .collect::<Result<Self::A, _>>(), + o => Err(DecodeError(format!("Cannot decode {:?} into record", o))) + } + } + } + + #[derive(Clone, Copy)] + pub struct RecordDot<'a, T> { + pub field: &'a str, + pub inner: T + } + + impl <'a, Inner> Decoder<'a> for RecordDot<'_, Inner> + where Inner: Decoder<'a> + Clone + { + type A = Inner::A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match Record(self.inner.clone()).dec(u) { + Ok(mut map) => match map.remove(self.field) { + Some(inner) => Ok(inner), + None => Err(DecodeError(format!("Cannot find `{}` in record map", self.field))), + }, + Err(err) => Err(err), + } + } + } + + #[derive(Clone)] + pub struct OneOf<T, A>{ + pub inner: T, + pub list: Vec<A>, + } + + impl <'a, Inner> Decoder<'a> for OneOf<Inner, Inner::A> + where Inner: Decoder<'a>, + Inner::A: Display + Debug + PartialEq + { + type A = Inner::A; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match self.inner.dec(u) { + Ok(inner) => match self.list.iter().any(|x| x.eq(&inner)) { + true => Ok(inner), + false => Err(DecodeError(format!("{} is not one of {:?}", inner, self.list))) + }, + Err(err) => Err(err) + } + } + } + + #[derive(Clone)] + pub struct Try<T>(pub T); + + impl <'a, Inner> Decoder<'a> for Try<Inner> + where Inner: Decoder<'a> + { + type A = Option<Inner::A>; + fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> { + match self.0.dec(u) { + Ok(inner) => Ok(Some(inner)), + Err(err) => Ok(None) + } + } + } + +} diff --git a/users/Profpatsch/netstring/README.md b/users/Profpatsch/netstring/README.md new file mode 100644 index 000000000000..b8daea11d158 --- /dev/null +++ b/users/Profpatsch/netstring/README.md @@ -0,0 +1,18 @@ +# Netstring + +Netstrings are a djb invention. They are intended as a serialization format. Instead of inline control characters like `\n` or `\0` to signal the end of a string, they use a run-length encoding given as the number of bytes, encoded in ASCII, at the beginning of the string. + +``` +hello -> 5:hello, +foo! -> 4:foo!, +こんにちは -> 15:こんにちは, +``` + +They can be used to encode e.g. lists by simply concatenating and reading them in one-by-one. + +If you need a more complex encoding, you could start encoding e.g. tuples as netstrings-in-netstrings, or you could use [`netencode`](../netencode/README.md) instead, which is what-if-json-but-netstrings, and takes the idea of netstrings to their logical conclusion. + +Resources: + +Spec: http://cr.yp.to/proto/netstrings.txt +Wiki: https://en.wikipedia.org/wiki/Netstring diff --git a/users/Profpatsch/netstring/default.nix b/users/Profpatsch/netstring/default.nix new file mode 100644 index 000000000000..3cf882d5a279 --- /dev/null +++ b/users/Profpatsch/netstring/default.nix @@ -0,0 +1,76 @@ +{ lib, pkgs, depot, ... }: +let + toNetstring = s: + "${toString (builtins.stringLength s)}:${s},"; + + toNetstringKeyVal = attrs: + lib.concatStrings + (lib.mapAttrsToList + (k: v: toNetstring (toNetstring k + toNetstring v)) + attrs); + + python-netstring = depot.users.Profpatsch.writers.python3Lib { + name = "netstring"; + } '' + def read_netstring(bytes): + (int_length, rest) = bytes.split(sep=b':', maxsplit=1) + val = rest[:int(int_length)] + # has to end on a , + assert(rest[len(val)] == ord(',')) + return (val, rest[len(val) + 1:]) + + def read_netstring_key_val(bytes): + (keyvalnet, rest) = read_netstring(bytes) + (key, valnet) = read_netstring(keyvalnet) + (val, nothing) = read_netstring(valnet) + assert(nothing == b"") + return (key, val, rest) + + def read_netstring_key_val_list(bytes): + rest = bytes + res = {} + while rest != b"": + (key, val, r) = read_netstring_key_val(rest) + rest = r + res[key] = val + return res + ''; + + rust-netstring = depot.users.Profpatsch.writers.rustSimpleLib { + name = "netstring"; + } '' + pub fn to_netstring(s: &[u8]) -> Vec<u8> { + let len = s.len(); + // length of the integer as ascii + let i_len = ((len as f64).log10() as usize) + 1; + let ns_len = i_len + 1 + len + 1; + let mut res = Vec::with_capacity(ns_len); + res.extend_from_slice(format!("{}:", len).as_bytes()); + res.extend_from_slice(s); + res.push(b','); + res + } + ''; + + tests = import ./tests.nix { + inherit + depot + pkgs + lib + python-netstring + rust-netstring + toNetstring + toNetstringKeyVal + ; + }; + +in { + inherit + toNetstring + toNetstringKeyVal + python-netstring + rust-netstring + tests + ; + +} diff --git a/users/Profpatsch/netstring/tests.nix b/users/Profpatsch/netstring/tests.nix new file mode 100644 index 000000000000..23141472d6a8 --- /dev/null +++ b/users/Profpatsch/netstring/tests.nix @@ -0,0 +1,61 @@ +{ depot, lib, pkgs, python-netstring, rust-netstring, toNetstring, toNetstringKeyVal }: + +let + + python-netstring-test = depot.users.Profpatsch.writers.python3 { + name = "python-netstring-test"; + libraries = p: [ + python-netstring + ]; + } '' + import netstring + + def assEq(left, right): + assert left == right, "{} /= {}".format(str(left), str(right)) + + assEq( + netstring.read_netstring(b"""${toNetstring "hi!"}"""), + (b"hi!", b"") + ) + + assEq( + netstring.read_netstring_key_val( + b"""${toNetstringKeyVal { foo = "42"; }}""" + ), + (b'foo', b'42', b"") + ) + + assEq( + netstring.read_netstring_key_val_list( + b"""${toNetstringKeyVal { foo = "42"; bar = "hi"; }}""" + ), + { b'foo': b'42', b'bar': b'hi' } + ) + ''; + + rust-netstring-test = depot.users.Profpatsch.writers.rustSimple { + name = "rust-netstring-test"; + dependencies = [ + rust-netstring + ]; + } '' + extern crate netstring; + + fn main() { + assert_eq!( + std::str::from_utf8(&netstring::to_netstring(b"hello")).unwrap(), + r##"${toNetstring "hello"}"## + ); + assert_eq!( + std::str::from_utf8(&netstring::to_netstring("こんにちは".as_bytes())).unwrap(), + r##"${toNetstring "こんにちは"}"## + ); + } + ''; + +in { + inherit + python-netstring-test + rust-netstring-test + ; +} diff --git a/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs b/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs new file mode 100644 index 000000000000..3ed96a7b6eac --- /dev/null +++ b/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs @@ -0,0 +1,80 @@ +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NamedFieldPuns #-} +import Nix.Parser +import Nix.Expr.Types +import Nix.Expr.Types.Annotated +import System.Environment (getArgs) +import System.Exit (die) +import Data.Fix (Fix(..)) +import qualified Data.Text as Text +import qualified Data.ByteString.Lazy.Char8 as BL +import qualified Data.Aeson as A +import qualified Data.Aeson.Encoding as A +import Data.Function ((&)) +import qualified System.IO as IO +import qualified Text.Megaparsec.Pos as MP + +main = do + (nixFile:_) <- getArgs + (parseNixFileLoc nixFile :: IO _) >>= \case + Failure err -> do + ePutStrLn $ show err + die "oh no" + Success expr -> do + case snd $ match expr of + NoArguments -> do + ePutStrLn $ "NoArguments in " <> nixFile + printPairs mempty + YesLib vars -> do + ePutStrLn $ "lib in " <> show vars <> " in " <> nixFile + printPairs mempty + NoLib vars srcSpan -> do + ePutStrLn $ nixFile <> " needs lib added" + printPairs + $ "fileName" A..= nixFile + <> "fromLine" A..= (srcSpan & spanBegin & sourceLine) + <> "fromColumn" A..= (srcSpan & spanBegin & sourceColumn) + <> "toLine" A..= (srcSpan & spanEnd & sourceLine) + <> "toColumn" A..= (srcSpan & spanEnd & sourceColumn) + +printPairs pairs = BL.putStrLn $ A.encodingToLazyByteString $ A.pairs pairs + +ePutStrLn = IO.hPutStrLn IO.stderr + +data Descend = YesDesc | NoDesc + deriving Show +data Matched = NoArguments | NoLib [VarName] SrcSpan | YesLib [VarName] + deriving Show + +match :: Fix (Compose (Ann SrcSpan) NExprF) -> (Descend, Matched) +match = \case + (AnnE outerSpan (NAbs (ParamSet params _ _) (AnnE innerSpan _))) -> (NoDesc, + let vars = map fst params in + case (any (== "lib") vars) of + True -> YesLib vars + False -> + -- The span of the arglist is from the beginning of the match + -- to the beginning of the inner expression + let varSpan = SrcSpan + { spanBegin = outerSpan & spanBegin + -- -1 to prevent the spans from overlapping + , spanEnd = sourcePosMinus1 (innerSpan & spanBegin) } + in NoLib vars varSpan) + _ -> (NoDesc, NoArguments) + +-- | Remove one from a source positon. +-- +-- That means if the current position is at the very beginning of a line, +-- jump to the previous line. +sourcePosMinus1 :: SourcePos -> SourcePos +sourcePosMinus1 src@(SourcePos { sourceLine, sourceColumn }) = + let + col = MP.mkPos $ max (MP.unPos sourceColumn - 1) 1 + line = MP.mkPos $ case MP.unPos sourceColumn of + 1 -> max (MP.unPos sourceLine - 1) 1 + _ -> MP.unPos sourceLine + in src + { sourceLine = line + , sourceColumn = col } diff --git a/users/Profpatsch/nixpkgs-rewriter/default.nix b/users/Profpatsch/nixpkgs-rewriter/default.nix new file mode 100644 index 000000000000..286530b03f94 --- /dev/null +++ b/users/Profpatsch/nixpkgs-rewriter/default.nix @@ -0,0 +1,113 @@ +{ depot, pkgs, ... }: +let + inherit (depot.nix) + writeExecline + ; + inherit (depot.users.Profpatsch.lib) + debugExec + eprintf + ; + + bins = depot.nix.getBins pkgs.coreutils [ "head" "shuf" ] + // depot.nix.getBins pkgs.jq [ "jq" ] + // depot.nix.getBins pkgs.findutils [ "xargs" ] + // depot.nix.getBins pkgs.gnused [ "sed" ] + ; + + export-json-object = pkgs.writers.writePython3 "export-json-object" {} '' + import json + import sys + import os + + d = json.load(sys.stdin) + + if d == {}: + sys.exit(0) + + for k, v in d.items(): + os.environ[k] = str(v) + + os.execvp(sys.argv[1], sys.argv[1:]) + ''; + + meta-stdenv-lib = pkgs.writers.writeHaskell "meta-stdenv-lib" { + libraries = [ + pkgs.haskellPackages.hnix + pkgs.haskellPackages.aeson + ]; + } ./MetaStdenvLib.hs; + + replace-between-lines = writeExecline "replace-between-lines" { readNArgs = 1; } [ + "importas" "-ui" "file" "fileName" + "importas" "-ui" "from" "fromLine" + "importas" "-ui" "to" "toLine" + "if" [ eprintf "%s-%s\n" "$from" "$to" ] + (debugExec "adding lib") + bins.sed + "-e" "\${from},\${to} \${1}" + "-i" "$file" + ]; + + add-lib-if-necessary = writeExecline "add-lib-if-necessary" { readNArgs = 1; } [ + "pipeline" [ meta-stdenv-lib "$1" ] + export-json-object + # first replace any stdenv.lib mentions in the arg header + # if this is not done, the replace below kills these. + # Since we want it anyway ultimately, let’s do it here. + "if" [ replace-between-lines "s/stdenv\.lib/lib/" ] + # then add the lib argument + # (has to be before stdenv, otherwise default arguments might be in the way) + replace-between-lines "s/stdenv/lib, stdenv/" + ]; + + metaString = ''meta = with stdenv.lib; {''; + + replace-stdenv-lib = pkgs.writers.writeBash "replace-stdenv-lib" '' + set -euo pipefail + sourceDir="$1" + for file in $( + ${pkgs.ripgrep}/bin/rg \ + --files-with-matches \ + --fixed-strings \ + -e '${metaString}' \ + "$sourceDir" + ) + do + echo "replacing stdenv.lib meta in $file" >&2 + ${bins.sed} -e '/${metaString}/ s/stdenv.lib/lib/' \ + -i "$file" + ${add-lib-if-necessary} "$file" + done + ''; + + instantiate-nixpkgs-randomly = writeExecline "instantiate-nixpkgs-randomly" { readNArgs = 1; } [ + "export" "NIXPKGS_ALLOW_BROKEN" "1" + "export" "NIXPKGS_ALLOW_UNFREE" "1" + "export" "NIXPKGS_ALLOW_INSECURE" "1" + "export" "NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM" "1" + "pipeline" [ + "nix" + "eval" + "--raw" + ''( + let pkgs = import ''${1} {}; + in builtins.toJSON (builtins.attrNames pkgs) + )'' + ] + "pipeline" [ bins.jq "-r" ".[]" ] + "pipeline" [ bins.shuf ] + "pipeline" [ bins.head "-n" "1000" ] + bins.xargs "-I" "{}" "-n1" + "if" [ eprintf "instantiating %s\n" "{}" ] + "nix-instantiate" "$1" "-A" "{}" + ]; + +in { + inherit + instantiate-nixpkgs-randomly + # requires hnix, which we don’t want in tvl for now + # uncomment manually if you want to use it. + # meta-stdenv-lib + # replace-stdenv-lib + ; +} diff --git a/users/Profpatsch/read-http.nix b/users/Profpatsch/read-http.nix new file mode 100644 index 000000000000..614993c457e1 --- /dev/null +++ b/users/Profpatsch/read-http.nix @@ -0,0 +1,16 @@ +{ depot, pkgs, ... }: + +let + + read-http = depot.users.Profpatsch.writers.rustSimple { + name = "read-http"; + dependencies = [ + depot.users.Profpatsch.rust-crates.ascii + depot.users.Profpatsch.rust-crates.httparse + depot.users.Profpatsch.netencode.netencode-rs + depot.users.Profpatsch.arglib.netencode.rust + depot.users.Profpatsch.execline.exec-helpers + ]; + } (builtins.readFile ./read-http.rs); + +in read-http diff --git a/users/Profpatsch/read-http.rs b/users/Profpatsch/read-http.rs new file mode 100644 index 000000000000..50ff663b994a --- /dev/null +++ b/users/Profpatsch/read-http.rs @@ -0,0 +1,211 @@ +extern crate httparse; +extern crate netencode; +extern crate arglib_netencode; +extern crate ascii; +extern crate exec_helpers; + +use std::os::unix::io::FromRawFd; +use std::io::Read; +use std::io::Write; +use std::collections::HashMap; +use exec_helpers::{die_user_error, die_expected_error, die_temporary}; + +use netencode::{U, T, dec}; +use netencode::dec::Decoder; + +enum What { + Request, + Response +} + +// reads a http request (stdin), and writes all headers to stdout, as netencoded record. +// The keys are text, but can be lists of text iff headers appear multiple times, so beware. +fn main() -> std::io::Result<()> { + + exec_helpers::no_args("read-http"); + + let args = dec::RecordDot { + field: "what", + inner: dec::OneOf { + list: vec!["request", "response"], + inner: dec::Text + } + }; + let what : What = match args.dec(arglib_netencode::arglib_netencode("read-http", None).to_u()) { + Ok("request") => What::Request, + Ok("response") => What::Response, + Ok(v) => panic!("shouldn’t happen!, value was: {}", v), + Err(dec::DecodeError(err)) => die_user_error("read-http", err), + }; + + fn read_stdin_to_complete<F>(mut parse: F) -> () + where F: FnMut(&[u8]) -> httparse::Result<usize> + { + let mut res = httparse::Status::Partial; + loop { + if let httparse::Status::Complete(_) = res { + return; + } + let mut buf = [0; 2048]; + match std::io::stdin().read(&mut buf[..]) { + Ok(size) => if size == 0 { + break; + }, + Err(err) => die_temporary("read-http", format!("could not read from stdin, {:?}", err)) + } + match parse(&buf) { + Ok(status) => { + res = status; + } + Err(err) => die_temporary("read-http", format!("httparse parsing failed: {:#?}", err)) + } + } + } + + fn normalize_headers<'a>(headers: &'a [httparse::Header]) -> HashMap<String, U<'a>> { + let mut res = HashMap::new(); + for httparse::Header { name, value } in headers { + let val = ascii::AsciiStr::from_ascii(*value) + .expect(&format!("read-http: we require header values to be ASCII, but the header {} was {:?}", name, value)) + .as_str(); + // lowercase the header names, since the standard doesn’t care + // and we want unique strings to match against + let name_lower = name.to_lowercase(); + match res.insert(name_lower, U::Text(val)) { + None => (), + Some(U::Text(t)) => { + let name_lower = name.to_lowercase(); + let _ = res.insert(name_lower, U::List(vec![U::Text(t), U::Text(val)])); + () + }, + Some(U::List(mut l)) => { + let name_lower = name.to_lowercase(); + l.push(U::Text(val)); + let _ = res.insert(name_lower, U::List(l)); + () + }, + Some(o) => panic!("read-http: header not text nor list: {:?}", o), + } + } + res + } + + // tries to read until the end of the http header (deliniated by two newlines "\r\n\r\n") + fn read_till_end_of_header<R: Read>(buf: &mut Vec<u8>, reader: R) -> Option<()> { + let mut chonker = Chunkyboi::new(reader, 4096); + loop { + // TODO: attacker can send looooong input, set upper maximum + match chonker.next() { + Some(Ok(chunk)) => { + buf.extend_from_slice(&chunk); + if chunk.windows(4).any(|c| c == b"\r\n\r\n" ) { + return Some(()); + } + }, + Some(Err(err)) => die_temporary("read-http", format!("error reading from stdin: {:?}", err)), + None => return None + } + } + } + + // max header size chosen arbitrarily + let mut headers = [httparse::EMPTY_HEADER; 128]; + let stdin = std::io::stdin(); + + match what { + Request => { + let mut req = httparse::Request::new(&mut headers); + let mut buf: Vec<u8> = vec![]; + match read_till_end_of_header(&mut buf, stdin.lock()) { + Some(()) => match req.parse(&buf) { + Ok(httparse::Status::Complete(_body_start)) => {}, + Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"), + Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err)) + }, + None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse request headers")) + } + let method = req.method.expect("method must be filled on complete parse"); + let path = req.path.expect("path must be filled on complete parse"); + write_dict_req(method, path, &normalize_headers(req.headers)) + }, + Response => { + let mut resp = httparse::Response::new(&mut headers); + let mut buf: Vec<u8> = vec![]; + match read_till_end_of_header(&mut buf, stdin.lock()) { + Some(()) => match resp.parse(&buf) { + Ok(httparse::Status::Complete(_body_start)) => {}, + Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"), + Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err)) + }, + None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse response headers")) + } + let code = resp.code.expect("code must be filled on complete parse"); + let reason = resp.reason.expect("reason must be filled on complete parse"); + write_dict_resp(code, reason, &normalize_headers(resp.headers)) + } + } +} + +fn write_dict_req<'a, 'buf>(method: &'buf str, path: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + let mut http = vec![ + ("method", U::Text(method)), + ("path", U::Text(path)), + ].into_iter().collect(); + write_dict(http, headers) +} + +fn write_dict_resp<'a, 'buf>(code: u16, reason: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + let mut http = vec![ + ("status", U::N6(code as u64)), + ("status-text", U::Text(reason)), + ].into_iter().collect(); + write_dict(http, headers) +} + + +fn write_dict<'buf, 'a>(mut http: HashMap<&str, U<'a>>, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> { + match http.insert("headers", U::Record( + headers.iter().map(|(k,v)| (k.as_str(), v.clone())).collect() + )) { + None => (), + Some(_) => panic!("read-http: headers already in dict"), + }; + netencode::encode( + &mut std::io::stdout(), + &U::Record(http) + )?; + Ok(()) +} + + +// iter helper + +struct Chunkyboi<T> { + inner: T, + buf: Vec<u8>, +} + +impl<R: Read> Chunkyboi<R> { + fn new(inner: R, chunksize: usize) -> Self { + let buf = vec![0; chunksize]; + Chunkyboi { + inner, + buf + } + } +} + +impl<R: Read> Iterator for Chunkyboi<R> { + type Item = std::io::Result<Vec<u8>>; + + fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> { + match self.inner.read(&mut self.buf) { + Ok(0) => None, + Ok(read) => { + // clone a new buffer so we can reuse the internal one + Some(Ok(self.buf[..read].to_owned())) + } + Err(err) => Some(Err(err)) + } + } +} diff --git a/users/Profpatsch/rust-crates.nix b/users/Profpatsch/rust-crates.nix new file mode 100644 index 000000000000..e6a191f70f4b --- /dev/null +++ b/users/Profpatsch/rust-crates.nix @@ -0,0 +1,122 @@ +{ depot, pkgs, ... }: +rec { + cfg-if = pkgs.buildRustCrate { + pname = "cfg-if"; + crateName = "cfg-if"; + version = "1.0.0"; + sha256 = "1fzidq152hnxhg4lj6r2gv4jpnn8yivp27z6q6xy7w6v0dp6bai9"; + }; + + cc = pkgs.buildRustCrate { + pname = "cc"; + crateName = "cc"; + version = "1.0.66"; + sha256 = "12q71z6ck8wlqrwgi25x3lrryyks9djymswn9b1c6qq0i01jpc1p"; + }; + + ascii = pkgs.buildRustCrate { + pname = "ascii"; + crateName = "ascii"; + version = "1.0.0"; + sha256 = "0gam8xsn981wfa40srsniivffjsfz1pg0xnigmczk9k7azb1ks1m"; + }; + + regex-syntax = pkgs.buildRustCrate { + pname = "regex-syntax"; + crateName = "regex-syntax"; + version = "0.6.22"; + sha256 = "0r00n2dgyixacl1sczqp18gxf0xh7x272hcdp62412lypba2gqyg"; + }; + + regex = pkgs.buildRustCrate { + pname = "regex"; + crateName = "regex"; + version = "1.4.3"; + features = [ "std" ]; + dependencies = [ regex-syntax ]; + sha256 = "0w0b4bh0ng20lf5y8raaxmxj46ikjqpgwy1iggzpby9lhv9vydkp"; + }; + + libloading = pkgs.buildRustCrate { + pname = "libloading"; + crateName = "libloading"; + version = "0.6.7"; + dependencies = [ cfg-if ]; + sha256 = "111d8zsizswnxiqn43vcgnc2ym9spsx1i6pcfp35ca3yw2ixq95j"; + }; + + tree-sitter = pkgs.buildRustCrate { + pname = "tree_sitter"; + crateName = "tree-sitter"; + # buildRustCrate isn’t really smart enough to detect the subdir + libPath = "binding_rust/lib.rs"; + # and the build.rs is also not where buildRustCrate would find it + build = "binding_rust/build.rs"; + version = "0.17.1"; + dependencies = [ regex ]; + buildDependencies = [ cc ]; + sha256 = "0jwwbvs4icpra7m1ycvnyri5h3sbw4qrfvgnnvnk72h4w93qhzhr"; + }; + + libc = pkgs.buildRustCrate { + pname = "libc"; + crateName = "libc"; + version = "0.2.82"; + sha256 = "02zgn6c0xwh331hky417lbr29kmvrw3ylxs8822syyhjfjqszvsx"; + }; + + bitflags = pkgs.buildRustCrate { + pname = "bitflags"; + crateName = "bitflags"; + version = "1.2.1"; + sha256 = "0b77awhpn7yaqjjibm69ginfn996azx5vkzfjj39g3wbsqs7mkxg"; + }; + + inotify-sys = pkgs.buildRustCrate { + pname = "inotify-sys"; + crateName = "inotify-sys"; + version = "0.1.5"; + dependencies = [ libc ]; + sha256 = "1yiy577xxhi0j90nbg9nkd8cqwc1xix62rz55jjngvxa5jl5613v"; + }; + + inotify = pkgs.buildRustCrate { + pname = "inotify"; + crateName = "inotify"; + version = "0.9.2"; + edition = "2018"; + dependencies = [ bitflags libc inotify-sys ]; + sha256 = "0fcknyvknglwwk1pdzdlb4m0ry2dym1yx8r5prf2v00pxnjk0hv2"; + }; + + httparse = pkgs.buildRustCrate { + pname = "httparse"; + version = "1.3.4"; + crateName = "httparse"; + sha256 = "0dggj4s0cq69bn63q9nqzzay5acmwl33nrbhjjsh5xys8sk2x4jw"; + }; + + version-check = pkgs.buildRustCrate { + pname = "version-check"; + version = "0.9.2"; + crateName = "version-check"; + sha256 = "1vwvc1mzwv8ana9jv8z933p2xzgj1533qwwl5zr8mi89azyhq21v"; + }; + + memchr = pkgs.buildRustCrate { + pname = "memchr"; + version = "2.3.3"; + crateName = "memchr"; + sha256 = "1ivxvlswglk6wd46gadkbbsknr94gwryk6y21v64ja7x4icrpihw"; + }; + nom = pkgs.buildRustCrate { + pname = "nom"; + version = "5.1.1"; + crateName = "nom"; + sha256 = "1gb4r6mjwd645jqh02nhn60i7qkw8cgy3xq1r4clnmvz3cmkv1l0"; + dependencies = [ memchr ]; + buildDependencies = [ version-check ]; + features = [ "std" "alloc" ]; + }; + +} diff --git a/users/Profpatsch/tree-sitter.nix b/users/Profpatsch/tree-sitter.nix new file mode 100644 index 000000000000..099fa2d5b7b9 --- /dev/null +++ b/users/Profpatsch/tree-sitter.nix @@ -0,0 +1,179 @@ +{ depot, pkgs, lib, ... }: + +let + bins = depot.nix.getBins pkgs.coreutils [ "head" "printf" "cat" ] + // depot.nix.getBins pkgs.ncurses [ "tput" ] + // depot.nix.getBins pkgs.bc [ "bc" ] + // depot.nix.getBins pkgs.ocamlPackages.sexp [ "sexp" ]; + + print-ast = depot.users.Profpatsch.writers.rustSimple { + name = "print-ast"; + dependencies = with depot.users.Profpatsch.rust-crates; [ + libloading + tree-sitter + ]; + } '' + extern crate libloading; + extern crate tree_sitter; + use std::mem; + use std::io::{Read}; + use libloading::{Library, Symbol}; + use tree_sitter::{Language, Parser}; + + /// Load the shared lib FILE and return the language under SYMBOL-NAME. + /// Inspired by the rust source of emacs-tree-sitter. + fn _load_language(file: String, symbol_name: String) -> Result<Language, libloading::Error> { + let lib = Library::new(file)?; + let tree_sitter_lang: Symbol<'_, unsafe extern "C" fn() -> _> = + unsafe { lib.get(symbol_name.as_bytes())? }; + let language: Language = unsafe { tree_sitter_lang() }; + // Avoid segmentation fault by not unloading the lib, as language is a static piece of data. + // TODO: Attach an Rc<Library> to Language instead. + mem::forget(lib); + Ok(language) + } + + fn main() { + let mut args = std::env::args(); + let so = args.nth(1).unwrap(); + let symbol_name = args.nth(0).unwrap(); + let file = args.nth(0).unwrap(); + let mut parser = Parser::new(); + let lang = _load_language(so, symbol_name).unwrap(); + parser.set_language(lang).unwrap(); + let bytes = std::fs::read(&file).unwrap(); + print!("{}", parser.parse(&bytes, None).unwrap().root_node().to_sexp()); + } + + + ''; + + tree-sitter-nix = buildTreeSitterGrammar { + language = "tree-sitter-nix"; + source = pkgs.fetchFromGitHub { + owner = "cstrahan"; + repo = "tree-sitter-nix"; + rev = "791b5ff0e4f0da358cbb941788b78d436a2ca621"; + sha256 = "1y5b3wh3fcmbgq8r2i97likzfp1zp02m58zacw5a1cjqs5raqz66"; + }; + }; + + watch-file-modified = depot.users.Profpatsch.writers.rustSimple { + name = "watch-file-modified"; + dependencies = [ + depot.users.Profpatsch.rust-crates.inotify + depot.users.Profpatsch.netstring.rust-netstring + ]; + } '' + extern crate inotify; + extern crate netstring; + use inotify::{EventMask, WatchMask, Inotify}; + use std::io::Write; + + fn main() { + let mut inotify = Inotify::init() + .expect("Failed to initialize inotify"); + + let file = std::env::args().nth(1).unwrap(); + + let file_watch = inotify + .add_watch( + &file, + WatchMask::MODIFY + ) + .expect("Failed to add inotify watch"); + + let mut buffer = [0u8; 4096]; + loop { + let events = inotify + .read_events_blocking(&mut buffer) + .expect("Failed to read inotify events"); + + for event in events { + if event.wd == file_watch { + std::io::stdout().write(&netstring::to_netstring(file.as_bytes())); + std::io::stdout().flush(); + } + } + } + } + + ''; + + # clear screen and set LINES and COLUMNS to terminal height & width + clear-screen = depot.nix.writeExecline "clear-screen" {} [ + "if" [ bins.tput "clear" ] + "backtick" "-in" "LINES" [ bins.tput "lines" ] + "backtick" "-in" "COLUMNS" [ bins.tput "cols" ] + "$@" + ]; + + print-nix-file = depot.nix.writeExecline "print-nix-file" { readNArgs = 1; } [ + "pipeline" [ print-ast "${tree-sitter-nix}/parser" "tree_sitter_nix" "$1" ] + "pipeline" [ bins.sexp "print" ] + clear-screen + "importas" "-ui" "lines" "LINES" + "backtick" "-in" "ls" [ + "pipeline" + # when you pull out bc to decrement an integer it’s time to switch to python lol + [ bins.printf "x=%s; --x\n" "$lines" ] + bins.bc + ] + "importas" "-ui" "l" "ls" + bins.head "-n\${l}" + ]; + + print-nix-file-on-update = depot.nix.writeExecline "print-nix-file-on-update" { readNArgs = 1; } [ + "if" [ print-nix-file "$1" ] + "pipeline" [ watch-file-modified "$1" ] + "forstdin" "-d" "" "file" + "importas" "file" "file" + print-nix-file "$file" + ]; + + # copied from nixpkgs + buildTreeSitterGrammar = + { + # language name + language + # source for the language grammar + , source + }: + + pkgs.stdenv.mkDerivation { + + pname = "${language}-grammar"; + inherit (pkgs.tree-sitter) version; + + src = source; + + buildInputs = [ pkgs.tree-sitter ]; + + dontUnpack = true; + configurePhase= ":"; + buildPhase = '' + runHook preBuild + scanner_cc="$src/src/scanner.cc" + if [ ! -f "$scanner_cc" ]; then + scanner_cc="" + fi + $CXX -I$src/src/ -c $scanner_cc + $CC -I$src/src/ -shared -o parser -Os scanner.o $src/src/parser.c -lstdc++ + runHook postBuild + ''; + installPhase = '' + runHook preInstall + mkdir $out + mv parser $out/ + runHook postInstall + ''; + }; + +in { + inherit + print-ast + tree-sitter-nix + print-nix-file-on-update + watch-file-modified + ; +} diff --git a/users/Profpatsch/writers/default.nix b/users/Profpatsch/writers/default.nix new file mode 100644 index 000000000000..3888579a64d8 --- /dev/null +++ b/users/Profpatsch/writers/default.nix @@ -0,0 +1,164 @@ +{ depot, pkgs, lib, ... }: +let + bins = depot.nix.getBins pkgs.coreutils ["printf" "mkdir" "cat" "ln" "ls" "touch" ]; + + inherit (depot.nix.yants) defun struct restrict attrs list string drv any; + + inherit (depot.nix) drvSeqL; + + FlakeError = + restrict + "flake error" + (s: lib.any (prefix: (builtins.substring 0 1 s) == prefix) + [ "E" "W" ]) + string; + Libraries = defun [ (attrs any) (list drv) ]; + + python3 = { + name, + libraries ? (_: []), + flakeIgnore ? [] + }: pkgs.writers.writePython3 name { + libraries = Libraries libraries pkgs.python3Packages; + flakeIgnore = + let ignoreTheseErrors = [ + # whitespace after { + "E201" + # whitespace before } + "E202" + # fuck 4-space indentation + "E121" "E111" + # who cares about blank lines … + # … at end of files + "W391" + # … between functions + "E302" "E305" + ]; + in list FlakeError (ignoreTheseErrors ++ flakeIgnore); + }; + + # TODO: add the same flake check as the pyhon3 writer + python3Lib = { name, libraries ? (_: []) }: moduleString: + let srcTree = depot.nix.runExecline.local name { stdin = moduleString; } [ + "importas" "out" "out" + "if" [ bins.mkdir "-p" "\${out}/${name}" ] + "if" [ + "redirfd" "-w" "1" "\${out}/setup.py" + bins.printf '' + from distutils.core import setup + + setup( + name='%s', + packages=['%s'] + ) + '' name name + ] + "if" [ + # redirect stdin to the init py + "redirfd" "-w" "1" "\${out}/${name}/__init__.py" + bins.cat + ] + ]; + in pkgs.python3Packages.buildPythonPackage { + inherit name; + src = srcTree; + propagatedBuildInputs = libraries pkgs.python3Packages; + doCheck = false; + }; + + rustSimple = args@{name, ...}: src: + linkTo name "${rustSimpleBin args src}/bin/${name}"; + + linkTo = name: path: depot.nix.runExecline.local name {} [ + "importas" "out" "out" + bins.ln "-sT" path "$out" + ]; + + rustSimpleBin = { + name, + dependencies ? [], + ... + }@args: src: pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + crateBin = [ name ]; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-main.rs" { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src/bin + cp "$srcPath" $out/src/bin/${name}.rs + find $out + ''; + } // args); + + rustSimpleLib = { + name, + dependencies ? [], + ... + }@args: src: pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-lib.rs" { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src + cp "$srcPath" $out/src/lib.rs + find $out + ''; + } // args); + + /* Takes a `buildRustCrate` derivation as an input, + * builds it with `{ buildTests = true; }` and runs + * all tests found in its `tests` dir. If they are + * all successful, `$out` will point to the crate + * built with `{ buildTests = false; }`, otherwise + * it will fail to build. + * + * See also `nix.drvSeqL` which is used to implement + * this behavior. + */ + testRustSimple = rustDrv: + let + crate = buildTests: rustDrv.override { inherit buildTests; }; + tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [ + "importas" "out" "out" + "if" [ + "pipeline" [ bins.ls "${crate true}/tests" ] + "forstdin" "test" + "importas" "test" "test" + "${crate true}/tests/$test" + ] + bins.touch "$out" + ]; + in drvSeqL [ tests ] (crate false); + + + tests = import ./tests.nix { + inherit + depot + pkgs + python3 + python3Lib + rustSimpleLib + rustSimple + testRustSimple + ; + }; + +in { + inherit + python3 + python3Lib + rustSimple + rustSimpleBin + rustSimpleLib + testRustSimple + tests + ; +} diff --git a/users/Profpatsch/writers/tests.nix b/users/Profpatsch/writers/tests.nix new file mode 100644 index 000000000000..680c37a2ec02 --- /dev/null +++ b/users/Profpatsch/writers/tests.nix @@ -0,0 +1,85 @@ +{ depot, pkgs, python3, python3Lib, rustSimpleLib, rustSimple, testRustSimple }: + +let + run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [ + "if" [ drv ] + "importas" "out" "out" + "${pkgs.coreutils}/bin/touch" "$out" + ]; + + pythonTransitiveLib = python3Lib { + name = "transitive"; + } '' + def transitive(s): + return s + " 1 2 3" + ''; + + pythonTestLib = python3Lib { + name = "test_lib"; + libraries = _: [ pythonTransitiveLib ]; + } '' + import transitive + def test(): + return transitive.transitive("test") + ''; + + pythonWithLib = run (python3 { + name = "python-with-lib"; + libraries = _: [ pythonTestLib ]; + } '' + import test_lib + + assert(test_lib.test() == "test 1 2 3") + ''); + + + rustTransitiveLib = testRustSimple (rustSimpleLib { + name = "transitive"; + } '' + pub fn transitive(s: &str) -> String { + let mut new = s.to_string(); + new.push_str(" 1 2 3"); + new + } + + #[cfg(test)] + mod tests { + use super::*; + + #[test] + fn test_transitive() { + assert_eq!(transitive("foo").as_str(), "foo 1 2 3") + } + } + ''); + + rustTestLib = rustSimpleLib { + name = "test_lib"; + dependencies = [ rustTransitiveLib ]; + } '' + extern crate transitive; + use transitive::{transitive}; + pub fn test() -> String { + transitive("test") + } + ''; + + rustWithLib = run (rustSimple { + name = "rust-with-lib"; + dependencies = [ rustTestLib ]; + } '' + extern crate test_lib; + + fn main() { + assert_eq!(test_lib::test(), String::from("test 1 2 3")); + } + ''); + + +in { + inherit + pythonWithLib + rustTransitiveLib + rustWithLib + ; +} diff --git a/users/cynthia/OWNERS b/users/cynthia/OWNERS new file mode 100644 index 000000000000..da62f3777af0 --- /dev/null +++ b/users/cynthia/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - cynthia diff --git a/users/cynthia/keys.nix b/users/cynthia/keys.nix new file mode 100644 index 000000000000..bac8dc1c57ae --- /dev/null +++ b/users/cynthia/keys.nix @@ -0,0 +1,7 @@ +{ ... }: + +{ + all = [ + "cert-authority ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICsj3W6QczgxE3s5GGT8qg0aLrCM+QeRnSq9RkiZtKvz meow" + ]; +} \ No newline at end of file diff --git a/users/edef/OWNERS b/users/edef/OWNERS new file mode 100644 index 000000000000..05f7639c8947 --- /dev/null +++ b/users/edef/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - edef diff --git a/users/edef/depot-scan/default.nix b/users/edef/depot-scan/default.nix new file mode 100644 index 000000000000..a9c0f382ff45 --- /dev/null +++ b/users/edef/depot-scan/default.nix @@ -0,0 +1,12 @@ +{ pkgs, ... }: + +pkgs.writeShellScriptBin "depot-scan" '' + set -euo pipefail + + path="''${1:-$(git rev-parse --show-prefix)}" + path="''${path%%/}" + attr="''${path//\//.}" + root="$(git rev-parse --show-toplevel)" + echo "scanning //$path" >&2 + nix-instantiate -E "import ${./wrap.nix} $root" -A "$attr" -vv 2> >(${pkgs.perl}/bin/perl ${./depot-scan.pl}) >&2 +'' diff --git a/users/edef/depot-scan/depot-scan.pl b/users/edef/depot-scan/depot-scan.pl new file mode 100755 index 000000000000..8808e2eb0023 --- /dev/null +++ b/users/edef/depot-scan/depot-scan.pl @@ -0,0 +1,11 @@ +#! /usr/bin/env -S perl -ln +use strict; + +if (/^evaluating file '(.*)'$/ or + /^copied source '(.*)' -> '.*'$/ or + /^trace: depot-scan '(.*)'$/) { + print $1; + next; +} + +print STDERR unless /^instantiated '.*' -> '.*'$/; diff --git a/users/edef/depot-scan/wrap.nix b/users/edef/depot-scan/wrap.nix new file mode 100644 index 000000000000..dcb557a24b1e --- /dev/null +++ b/users/edef/depot-scan/wrap.nix @@ -0,0 +1,15 @@ +# this wraps import to override readFile and readDir to trace the files it touches +# technique inspired by lorri +let + + global = { + import = global.scopedImport {}; + scopedImport = x: builtins.scopedImport (global // x); + builtins = builtins // { + inherit (global) import scopedImport; + readFile = path: builtins.trace "depot-scan '${toString path}'" (builtins.readFile path); + readDir = path: builtins.trace "depot-scan '${toString path}'" (builtins.readDir path); + }; + }; + +in global.import diff --git a/users/edef/keys.nix b/users/edef/keys.nix new file mode 100644 index 000000000000..53e88c9e7345 --- /dev/null +++ b/users/edef/keys.nix @@ -0,0 +1,7 @@ +{ ... }: + +{ + all = [ + "cert-authority ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCvb/7ojfcbKvHIyjnrNUOOgzy44tCkgXY9HLuyFta1jQOE9pFIK19B4dR9bOglPKf145CCL0mSFJNNqmNwwavU2uRn+TQrW+U1dQAk8Gt+gh3O49YE854hwwyMU+xD6bIuUdfxPr+r5al/Ov5Km28ZMlHOs3FoAP0hInK+eAibioxL5rVJOtgicrOVCkGoXEgnuG+LRbOYTwzdClhRUxiPjK8alCbcJQ53AeZHO4G6w9wTr+W5ILCfvW4OmUXCX01sKzaBiQuuFCF6M/H4LlnsPWLMra2twXxkOIhZblwC+lncps9lQaUgiD4koZeOCORvHW00G0L39ilFbbnVcL6Itp/m8RRWm/xRxS4RMnsdV/AhvpRLrhL3lfQ7E2oCeSM36v1S9rdg6a47zcnpL+ahG76Gz39Y7KmVRQciNx7ezbwxj3Q5lZtFykgdfGIAN+bT8ijXMO6m68g60i9Bz4IoMZGkiJGqMYLTxMQ+oRgR3Ro5lbj7E11YBHyeimoBYXYGHMkiuxopQZ7lIj3plxIzhmUlXJBA4jMw9KGHdYaLhaicIYhvQmCTAjrkt2HvxEe6lU8iws2Qv+pB6tAGundN36RVVWAckeQPZ4ZsgDP8V2FfibZ1nsrQ+zBKqaslYMAHs01Cf0Hm0PnCqagf230xaobu0iooNuXx44QKoDnB+w== openpgp:0x803010E7" + ]; +} diff --git a/users/ericvolp12/OWNERS b/users/ericvolp12/OWNERS new file mode 100644 index 000000000000..5a012a695bf3 --- /dev/null +++ b/users/ericvolp12/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - ericvolp12 diff --git a/users/eta/OWNERS b/users/eta/OWNERS new file mode 100644 index 000000000000..f212e89e2ae9 --- /dev/null +++ b/users/eta/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - eta diff --git a/users/eta/keys.nix b/users/eta/keys.nix new file mode 100644 index 000000000000..8bda50f89261 --- /dev/null +++ b/users/eta/keys.nix @@ -0,0 +1,11 @@ +{ ... }: + +let + keys = { + whitby = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQD6E1wuWaXQARNoLnmlOJndwI7/ms3Ga7MJxsUvFtaSiy3g8h/hz4WgyR7YT+hUYjFihh/YkGS9Zy9aEqAa5zBGLcZtgj1O0qOl2joynm679zdlcwAart74fXSJYYupT9tFeXXeWLO1g054lVJ5xZ9KLpBBk+6yzlmmm5KuoitKBqBbadzsqAeKhNn1Nq9ITPU4vxTFk+sXp/nxk/JoUOM8S2N4YuoX9OVenDHKh9DtOcvDZhlosGmunO33/YaU2XB95ZE6cNhEtVlkbyR3a2SsAYz1qGgfH0HSyoK3LJoAM4Aiz99ktuKiI/zMy4k4TV00OCi1sCPEjzUoijZRZt5FMH/TVr9dJROVjHcL9g9//fW3jwqojf7uuJFlTJb47RxjTk4Jb4F6K7HhOs7bgh3WuOjvhyRYbCYcg+RfnwjJk+hfM5GcjZ8J4UZdNc5LyIcfH8W1v9DADBCgz7QcmfrfMloYtEgjK/5XVrtBtiMtUOgpfKujawF55d1Vj26+CxeID8NHMXzZYEMeyRpi/WXlC+lq1Wx4Fj8gvideOw/3gAdj2G3SJWdSPk8XpIFQ1fm3tXB0ltyV5TszIJhfMnmsKJeEm3YlTCR1sMW7nr3wEdMqa6mpcWZTWU+dppmAGr2c+OGSnXkCi7Z2h/YJE6X+izrOrqRspG2fCM8GlfRFWw== cardno:000607469311"; + }; + configs = { + whitby = [ keys.whitby ]; + }; +in + configs diff --git a/users/firefly/OWNERS b/users/firefly/OWNERS new file mode 100644 index 000000000000..55d62a5723a5 --- /dev/null +++ b/users/firefly/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - firefly diff --git a/users/firefly/keys.nix b/users/firefly/keys.nix new file mode 100644 index 000000000000..1d7467a0747c --- /dev/null +++ b/users/firefly/keys.nix @@ -0,0 +1,7 @@ +{ ... }: + +rec { + as = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIN9i8fs10/BjNEqFXD+3fQeQ0SuHnQx4WpuqUg4caeed firefly@as"; + + whitby = [ as ]; +} diff --git a/users/glittershark/OWNERS b/users/glittershark/OWNERS new file mode 100644 index 000000000000..67e9015c8bd5 --- /dev/null +++ b/users/glittershark/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - glittershark diff --git a/users/glittershark/achilles/.envrc b/users/glittershark/achilles/.envrc new file mode 100644 index 000000000000..051d09d292a8 --- /dev/null +++ b/users/glittershark/achilles/.envrc @@ -0,0 +1 @@ +eval "$(lorri direnv)" diff --git a/users/glittershark/achilles/.gitignore b/users/glittershark/achilles/.gitignore new file mode 100644 index 000000000000..ea8c4bf7f35f --- /dev/null +++ b/users/glittershark/achilles/.gitignore @@ -0,0 +1 @@ +/target diff --git a/users/glittershark/achilles/Cargo.lock b/users/glittershark/achilles/Cargo.lock new file mode 100644 index 000000000000..0c5779135a5f --- /dev/null +++ b/users/glittershark/achilles/Cargo.lock @@ -0,0 +1,814 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "achilles" +version = "0.1.0" +dependencies = [ + "anyhow", + "bimap", + "clap", + "crate-root", + "derive_more", + "inkwell", + "itertools", + "lazy_static", + "llvm-sys", + "nom", + "nom-trace", + "pratt", + "proptest", + "test-strategy", + "thiserror", +] + +[[package]] +name = "aho-corasick" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "bimap" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f92b72b8f03128773278bf74418b9205f3d2a12c39a61f92395f47af390c32bf" + +[[package]] +name = "bit-set" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "bitvec" +version = "0.19.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "byteorder" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b" + +[[package]] +name = "cc" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "3.0.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd1061998a501ee7d4b6d449020df3266ca3124b941ec56cf2005c3779ca142" +dependencies = [ + "atty", + "bitflags", + "clap_derive", + "indexmap", + "lazy_static", + "os_str_bytes", + "strsim", + "termcolor", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "clap_derive" +version = "3.0.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "370f715b81112975b1b69db93e0b56ea4cd4e5002ac43b2da8474106a54096a1" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "crate-root" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59c6fe4622b269032d2c5140a592d67a9c409031d286174fcde172fbed86f0d3" + +[[package]] +name = "derive_more" +version = "0.99.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "hashbrown" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" + +[[package]] +name = "heck" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +dependencies = [ + "libc", +] + +[[package]] +name = "indexmap" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "inkwell" +version = "0.1.0" +source = "git+https://github.com/TheDan64/inkwell?branch=master#a2db15b0bd1c06d71763585ae10d9ea4e775da0c" +dependencies = [ + "either", + "inkwell_internals", + "libc", + "llvm-sys", + "once_cell", + "parking_lot", + "regex", +] + +[[package]] +name = "inkwell_internals" +version = "0.3.0" +source = "git+https://github.com/TheDan64/inkwell?branch=master#a2db15b0bd1c06d71763585ae10d9ea4e775da0c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "instant" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "itertools" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d572918e350e82412fe766d24b15e6682fb2ed2bbe018280caa810397cb319" +dependencies = [ + "either", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lexical-core" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21f866863575d0e1d654fbeeabdc927292fdf862873dc3c96c6f753357e13374" +dependencies = [ + "arrayvec", + "bitflags", + "cfg-if", + "ryu", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03b07a082330a35e43f63177cc01689da34fbffa0105e1246cf0311472cac73a" + +[[package]] +name = "llvm-sys" +version = "110.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21ede189444b8c78907e5d36da5dabcf153170fcff9c1dba48afc4b33c7e19f0" +dependencies = [ + "cc", + "lazy_static", + "libc", + "regex", + "semver", +] + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "memchr" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" + +[[package]] +name = "nom" +version = "6.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" +dependencies = [ + "bitvec", + "funty", + "lexical-core", + "memchr", + "version_check", +] + +[[package]] +name = "nom-trace" +version = "0.2.1" +source = "git+https://github.com/glittershark/nom-trace?branch=nom-6#6168d2e15cc51efd12d80260159b76a764dba138" +dependencies = [ + "nom", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3" + +[[package]] +name = "os_str_bytes" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afb2e1c3ee07430c2cf76151675e583e0f19985fa6efae47d6848a3e2c824f85" + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "pest" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +dependencies = [ + "ucd-trie", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "pratt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bbc12f7936a7b195790dd6d9b982b66c54f45ff6766decf25c44cac302dce" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proptest" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" +dependencies = [ + "bit-set", + "bitflags", + "byteorder", + "lazy_static", + "num-traits", + "quick-error 2.0.0", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-error" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ac73b1112776fc109b2e61909bc46c7e1bf0d7f690ffb1676553acce16d5cda" + +[[package]] +name = "quote" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + +[[package]] +name = "rand" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", +] + +[[package]] +name = "regex-syntax" +version = "0.6.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + +[[package]] +name = "smallvec" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed22b90a0e734a23a7610f4283ac9e5acfb96cbb30dfefa540d66f866f1c09c5" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +dependencies = [ + "cfg-if", + "libc", + "rand", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-strategy" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2328963c69243416e811c88066d18f670792b2e36e17fa57f4b1a124f85d18a8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "textwrap" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "203008d98caf094106cfaba70acfed15e18ed3ddb7d94e49baec153a2b462789" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" +dependencies = [ + "once_cell", +] + +[[package]] +name = "ucd-trie" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" + +[[package]] +name = "unicode-segmentation" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0d2e7be6ae3a5fa87eed5fb451aff96f2573d2694942e40543ae0bbe19c796" + +[[package]] +name = "unicode-width" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" diff --git a/users/glittershark/achilles/Cargo.toml b/users/glittershark/achilles/Cargo.toml new file mode 100644 index 000000000000..c0ba4d137a9f --- /dev/null +++ b/users/glittershark/achilles/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "achilles" +version = "0.1.0" +authors = ["Griffin Smith <root@gws.fyi>"] +edition = "2018" + +[dependencies] +anyhow = "1.0.38" +bimap = "0.6.0" +clap = "3.0.0-beta.2" +derive_more = "0.99.11" +inkwell = { git = "https://github.com/TheDan64/inkwell", branch = "master", features = ["llvm11-0"] } +itertools = "0.10.0" +lazy_static = "1.4.0" +llvm-sys = "110.0.1" +nom = "6.1.2" +nom-trace = { git = "https://github.com/glittershark/nom-trace", branch = "nom-6" } +pratt = "0.3.0" +proptest = "1.0.0" +test-strategy = "0.1.1" +thiserror = "1.0.24" + +[dev-dependencies] +crate-root = "0.1.3" diff --git a/users/glittershark/achilles/ach/.gitignore b/users/glittershark/achilles/ach/.gitignore new file mode 100644 index 000000000000..683a53a01f6c --- /dev/null +++ b/users/glittershark/achilles/ach/.gitignore @@ -0,0 +1,5 @@ +*.ll +*.o + +functions +simple diff --git a/users/glittershark/achilles/ach/Makefile b/users/glittershark/achilles/ach/Makefile new file mode 100644 index 000000000000..3a8cd2865e87 --- /dev/null +++ b/users/glittershark/achilles/ach/Makefile @@ -0,0 +1,15 @@ +default: simple + +%.ll: %.ach + cargo run -- compile $< -o $@ -f llvm + +%.o: %.ll + llc $< -o $@ -filetype=obj + +%: %.o + clang $< -o $@ + +.PHONY: clean + +clean: + @rm -f *.ll *.o simple functions diff --git a/users/glittershark/achilles/ach/functions.ach b/users/glittershark/achilles/ach/functions.ach new file mode 100644 index 000000000000..0d2f07eff574 --- /dev/null +++ b/users/glittershark/achilles/ach/functions.ach @@ -0,0 +1,3 @@ +fn id x = x +fn plus (x: int) (y: int) = x + y +fn main = plus (id 2) 7 diff --git a/users/glittershark/achilles/ach/simple.ach b/users/glittershark/achilles/ach/simple.ach new file mode 100644 index 000000000000..20f1677235c0 --- /dev/null +++ b/users/glittershark/achilles/ach/simple.ach @@ -0,0 +1 @@ +fn main = let x = 2; y = 3 in x + y diff --git a/users/glittershark/achilles/default.nix b/users/glittershark/achilles/default.nix new file mode 100644 index 000000000000..4a72bac2bac9 --- /dev/null +++ b/users/glittershark/achilles/default.nix @@ -0,0 +1,3 @@ +# TODO(glittershark): Write the actual default.nix + +_: "nothing to see yet" diff --git a/users/glittershark/achilles/shell.nix b/users/glittershark/achilles/shell.nix new file mode 100644 index 000000000000..cdf74db415ca --- /dev/null +++ b/users/glittershark/achilles/shell.nix @@ -0,0 +1,20 @@ +with import (builtins.fetchTarball { + url = "https://github.com/nixos/nixpkgs/archive/93a812bb9f9c398bd5b9636ab3674dcfe8cfb884.tar.gz"; + sha256 = "14zzsgnigd7vjbrpzm1s4qsknm73sci38ss00x96wamz6psaxyah"; +}) {}; + +mkShell { + buildInputs = [ + clang_11 + llvm_11.lib + llvmPackages_11.bintools + llvmPackages_11.clang + llvmPackages_11.libclang.lib + zlib + ncurses + libxml2 + libffi + pkg-config + ]; + +} diff --git a/users/glittershark/achilles/src/ast/hir.rs b/users/glittershark/achilles/src/ast/hir.rs new file mode 100644 index 000000000000..6859174a2dd0 --- /dev/null +++ b/users/glittershark/achilles/src/ast/hir.rs @@ -0,0 +1,252 @@ +use itertools::Itertools; + +use super::{BinaryOperator, Ident, Literal, UnaryOperator}; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Binding<'a, T> { + pub ident: Ident<'a>, + pub type_: T, + pub body: Expr<'a, T>, +} + +impl<'a, T> Binding<'a, T> { + fn to_owned(&self) -> Binding<'static, T> + where + T: Clone, + { + Binding { + ident: self.ident.to_owned(), + type_: self.type_.clone(), + body: self.body.to_owned(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Expr<'a, T> { + Ident(Ident<'a>, T), + + Literal(Literal<'a>, T), + + UnaryOp { + op: UnaryOperator, + rhs: Box<Expr<'a, T>>, + type_: T, + }, + + BinaryOp { + lhs: Box<Expr<'a, T>>, + op: BinaryOperator, + rhs: Box<Expr<'a, T>>, + type_: T, + }, + + Let { + bindings: Vec<Binding<'a, T>>, + body: Box<Expr<'a, T>>, + type_: T, + }, + + If { + condition: Box<Expr<'a, T>>, + then: Box<Expr<'a, T>>, + else_: Box<Expr<'a, T>>, + type_: T, + }, + + Fun { + args: Vec<(Ident<'a>, T)>, + body: Box<Expr<'a, T>>, + type_: T, + }, + + Call { + fun: Box<Expr<'a, T>>, + args: Vec<Expr<'a, T>>, + type_: T, + }, +} + +impl<'a, T> Expr<'a, T> { + pub fn type_(&self) -> &T { + match self { + Expr::Ident(_, t) => t, + Expr::Literal(_, t) => t, + Expr::UnaryOp { type_, .. } => type_, + Expr::BinaryOp { type_, .. } => type_, + Expr::Let { type_, .. } => type_, + Expr::If { type_, .. } => type_, + Expr::Fun { type_, .. } => type_, + Expr::Call { type_, .. } => type_, + } + } + + pub fn traverse_type<F, U, E>(self, f: F) -> Result<Expr<'a, U>, E> + where + F: Fn(T) -> Result<U, E> + Clone, + { + match self { + Expr::Ident(id, t) => Ok(Expr::Ident(id, f(t)?)), + Expr::Literal(lit, t) => Ok(Expr::Literal(lit, f(t)?)), + Expr::UnaryOp { op, rhs, type_ } => Ok(Expr::UnaryOp { + op, + rhs: Box::new(rhs.traverse_type(f.clone())?), + type_: f(type_)?, + }), + Expr::BinaryOp { + lhs, + op, + rhs, + type_, + } => Ok(Expr::BinaryOp { + lhs: Box::new(lhs.traverse_type(f.clone())?), + op, + rhs: Box::new(rhs.traverse_type(f.clone())?), + type_: f(type_)?, + }), + Expr::Let { + bindings, + body, + type_, + } => Ok(Expr::Let { + bindings: bindings + .into_iter() + .map(|Binding { ident, type_, body }| { + Ok(Binding { + ident, + type_: f(type_)?, + body: body.traverse_type(f.clone())?, + }) + }) + .collect::<Result<Vec<_>, E>>()?, + body: Box::new(body.traverse_type(f.clone())?), + type_: f(type_)?, + }), + Expr::If { + condition, + then, + else_, + type_, + } => Ok(Expr::If { + condition: Box::new(condition.traverse_type(f.clone())?), + then: Box::new(then.traverse_type(f.clone())?), + else_: Box::new(else_.traverse_type(f.clone())?), + type_: f(type_)?, + }), + Expr::Fun { args, body, type_ } => Ok(Expr::Fun { + args: args + .into_iter() + .map(|(id, t)| Ok((id, f.clone()(t)?))) + .collect::<Result<Vec<_>, E>>()?, + body: Box::new(body.traverse_type(f.clone())?), + type_: f(type_)?, + }), + Expr::Call { fun, args, type_ } => Ok(Expr::Call { + fun: Box::new(fun.traverse_type(f.clone())?), + args: args + .into_iter() + .map(|e| e.traverse_type(f.clone())) + .collect::<Result<Vec<_>, E>>()?, + type_: f(type_)?, + }), + } + } + + pub fn to_owned(&self) -> Expr<'static, T> + where + T: Clone, + { + match self { + Expr::Ident(id, t) => Expr::Ident(id.to_owned(), t.clone()), + Expr::Literal(lit, t) => Expr::Literal(lit.to_owned(), t.clone()), + Expr::UnaryOp { op, rhs, type_ } => Expr::UnaryOp { + op: *op, + rhs: Box::new((**rhs).to_owned()), + type_: type_.clone(), + }, + Expr::BinaryOp { + lhs, + op, + rhs, + type_, + } => Expr::BinaryOp { + lhs: Box::new((**lhs).to_owned()), + op: *op, + rhs: Box::new((**rhs).to_owned()), + type_: type_.clone(), + }, + Expr::Let { + bindings, + body, + type_, + } => Expr::Let { + bindings: bindings.into_iter().map(|b| b.to_owned()).collect(), + body: Box::new((**body).to_owned()), + type_: type_.clone(), + }, + Expr::If { + condition, + then, + else_, + type_, + } => Expr::If { + condition: Box::new((**condition).to_owned()), + then: Box::new((**then).to_owned()), + else_: Box::new((**else_).to_owned()), + type_: type_.clone(), + }, + Expr::Fun { args, body, type_ } => Expr::Fun { + args: args + .into_iter() + .map(|(id, t)| (id.to_owned(), t.clone())) + .collect(), + body: Box::new((**body).to_owned()), + type_: type_.clone(), + }, + Expr::Call { fun, args, type_ } => Expr::Call { + fun: Box::new((**fun).to_owned()), + args: args.into_iter().map(|e| e.to_owned()).collect(), + type_: type_.clone(), + }, + } + } +} + +pub enum Decl<'a, T> { + Fun { + name: Ident<'a>, + args: Vec<(Ident<'a>, T)>, + body: Box<Expr<'a, T>>, + type_: T, + }, +} + +impl<'a, T> Decl<'a, T> { + pub fn type_(&self) -> &T { + match self { + Decl::Fun { type_, .. } => type_, + } + } + + pub fn traverse_type<F, U, E>(self, f: F) -> Result<Decl<'a, U>, E> + where + F: Fn(T) -> Result<U, E> + Clone, + { + match self { + Decl::Fun { + name, + args, + body, + type_, + } => Ok(Decl::Fun { + name, + args: args + .into_iter() + .map(|(id, t)| Ok((id, f(t)?))) + .try_collect()?, + body: Box::new(body.traverse_type(f.clone())?), + type_: f(type_)?, + }), + } + } +} diff --git a/users/glittershark/achilles/src/ast/mod.rs b/users/glittershark/achilles/src/ast/mod.rs new file mode 100644 index 000000000000..3a2261aeda23 --- /dev/null +++ b/users/glittershark/achilles/src/ast/mod.rs @@ -0,0 +1,410 @@ +pub(crate) mod hir; + +use std::borrow::Cow; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::fmt::{self, Display, Formatter}; + +use itertools::Itertools; + +#[derive(Debug, PartialEq, Eq)] +pub struct InvalidIdentifier<'a>(Cow<'a, str>); + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub struct Ident<'a>(pub Cow<'a, str>); + +impl<'a> From<&'a Ident<'a>> for &'a str { + fn from(id: &'a Ident<'a>) -> Self { + id.0.as_ref() + } +} + +impl<'a> Display for Ident<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl<'a> Ident<'a> { + pub fn to_owned(&self) -> Ident<'static> { + Ident(Cow::Owned(self.0.clone().into_owned())) + } + + pub fn from_str_unchecked(s: &'a str) -> Self { + debug_assert!(is_valid_identifier(s)); + Self(Cow::Borrowed(s)) + } + + pub fn from_string_unchecked(s: String) -> Self { + debug_assert!(is_valid_identifier(&s)); + Self(Cow::Owned(s)) + } +} + +pub fn is_valid_identifier<S>(s: &S) -> bool +where + S: AsRef<str> + ?Sized, +{ + s.as_ref() + .chars() + .any(|c| !c.is_alphanumeric() || !"_".contains(c)) +} + +impl<'a> TryFrom<&'a str> for Ident<'a> { + type Error = InvalidIdentifier<'a>; + + fn try_from(s: &'a str) -> Result<Self, Self::Error> { + if is_valid_identifier(s) { + Ok(Ident(Cow::Borrowed(s))) + } else { + Err(InvalidIdentifier(Cow::Borrowed(s))) + } + } +} + +impl<'a> TryFrom<String> for Ident<'a> { + type Error = InvalidIdentifier<'static>; + + fn try_from(s: String) -> Result<Self, Self::Error> { + if is_valid_identifier(&s) { + Ok(Ident(Cow::Owned(s))) + } else { + Err(InvalidIdentifier(Cow::Owned(s))) + } + } +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum BinaryOperator { + /// `+` + Add, + + /// `-` + Sub, + + /// `*` + Mul, + + /// `/` + Div, + + /// `^` + Pow, + + /// `==` + Equ, + + /// `!=` + Neq, +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum UnaryOperator { + /// ! + Not, + + /// - + Neg, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Literal<'a> { + Int(u64), + Bool(bool), + String(Cow<'a, str>), +} + +impl<'a> Literal<'a> { + pub fn to_owned(&self) -> Literal<'static> { + match self { + Literal::Int(i) => Literal::Int(*i), + Literal::Bool(b) => Literal::Bool(*b), + Literal::String(s) => Literal::String(Cow::Owned(s.clone().into_owned())), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Binding<'a> { + pub ident: Ident<'a>, + pub type_: Option<Type<'a>>, + pub body: Expr<'a>, +} + +impl<'a> Binding<'a> { + fn to_owned(&self) -> Binding<'static> { + Binding { + ident: self.ident.to_owned(), + type_: self.type_.as_ref().map(|t| t.to_owned()), + body: self.body.to_owned(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Expr<'a> { + Ident(Ident<'a>), + + Literal(Literal<'a>), + + UnaryOp { + op: UnaryOperator, + rhs: Box<Expr<'a>>, + }, + + BinaryOp { + lhs: Box<Expr<'a>>, + op: BinaryOperator, + rhs: Box<Expr<'a>>, + }, + + Let { + bindings: Vec<Binding<'a>>, + body: Box<Expr<'a>>, + }, + + If { + condition: Box<Expr<'a>>, + then: Box<Expr<'a>>, + else_: Box<Expr<'a>>, + }, + + Fun(Box<Fun<'a>>), + + Call { + fun: Box<Expr<'a>>, + args: Vec<Expr<'a>>, + }, + + Ascription { + expr: Box<Expr<'a>>, + type_: Type<'a>, + }, +} + +impl<'a> Expr<'a> { + pub fn to_owned(&self) -> Expr<'static> { + match self { + Expr::Ident(ref id) => Expr::Ident(id.to_owned()), + Expr::Literal(ref lit) => Expr::Literal(lit.to_owned()), + Expr::UnaryOp { op, rhs } => Expr::UnaryOp { + op: *op, + rhs: Box::new((**rhs).to_owned()), + }, + Expr::BinaryOp { lhs, op, rhs } => Expr::BinaryOp { + lhs: Box::new((**lhs).to_owned()), + op: *op, + rhs: Box::new((**rhs).to_owned()), + }, + Expr::Let { bindings, body } => Expr::Let { + bindings: bindings.iter().map(|binding| binding.to_owned()).collect(), + body: Box::new((**body).to_owned()), + }, + Expr::If { + condition, + then, + else_, + } => Expr::If { + condition: Box::new((**condition).to_owned()), + then: Box::new((**then).to_owned()), + else_: Box::new((**else_).to_owned()), + }, + Expr::Fun(fun) => Expr::Fun(Box::new((**fun).to_owned())), + Expr::Call { fun, args } => Expr::Call { + fun: Box::new((**fun).to_owned()), + args: args.iter().map(|arg| arg.to_owned()).collect(), + }, + Expr::Ascription { expr, type_ } => Expr::Ascription { + expr: Box::new((**expr).to_owned()), + type_: type_.to_owned(), + }, + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Arg<'a> { + pub ident: Ident<'a>, + pub type_: Option<Type<'a>>, +} + +impl<'a> Arg<'a> { + pub fn to_owned(&self) -> Arg<'static> { + Arg { + ident: self.ident.to_owned(), + type_: self.type_.as_ref().map(Type::to_owned), + } + } +} + +impl<'a> TryFrom<&'a str> for Arg<'a> { + type Error = <Ident<'a> as TryFrom<&'a str>>::Error; + + fn try_from(value: &'a str) -> Result<Self, Self::Error> { + Ok(Arg { + ident: Ident::try_from(value)?, + type_: None, + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct Fun<'a> { + pub args: Vec<Arg<'a>>, + pub body: Expr<'a>, +} + +impl<'a> Fun<'a> { + pub fn to_owned(&self) -> Fun<'static> { + Fun { + args: self.args.iter().map(|arg| arg.to_owned()).collect(), + body: self.body.to_owned(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Decl<'a> { + Fun { name: Ident<'a>, body: Fun<'a> }, + Ascription { name: Ident<'a>, type_: Type<'a> }, +} + +//// + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct FunctionType<'a> { + pub args: Vec<Type<'a>>, + pub ret: Box<Type<'a>>, +} + +impl<'a> FunctionType<'a> { + pub fn to_owned(&self) -> FunctionType<'static> { + FunctionType { + args: self.args.iter().map(|a| a.to_owned()).collect(), + ret: Box::new((*self.ret).to_owned()), + } + } +} + +impl<'a> Display for FunctionType<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "fn {} -> {}", self.args.iter().join(", "), self.ret) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum Type<'a> { + Int, + Float, + Bool, + CString, + Var(Ident<'a>), + Function(FunctionType<'a>), +} + +impl<'a> Type<'a> { + pub fn to_owned(&self) -> Type<'static> { + match self { + Type::Int => Type::Int, + Type::Float => Type::Float, + Type::Bool => Type::Bool, + Type::CString => Type::CString, + Type::Var(v) => Type::Var(v.to_owned()), + Type::Function(f) => Type::Function(f.to_owned()), + } + } + + pub fn alpha_equiv(&self, other: &Self) -> bool { + fn do_alpha_equiv<'a>( + substs: &mut HashMap<&'a Ident<'a>, &'a Ident<'a>>, + lhs: &'a Type, + rhs: &'a Type, + ) -> bool { + match (lhs, rhs) { + (Type::Var(v1), Type::Var(v2)) => substs.entry(v1).or_insert(v2) == &v2, + ( + Type::Function(FunctionType { + args: args1, + ret: ret1, + }), + Type::Function(FunctionType { + args: args2, + ret: ret2, + }), + ) => { + args1.len() == args2.len() + && args1 + .iter() + .zip(args2) + .all(|(a1, a2)| do_alpha_equiv(substs, a1, a2)) + && do_alpha_equiv(substs, ret1, ret2) + } + _ => lhs == rhs, + } + } + + let mut substs = HashMap::new(); + do_alpha_equiv(&mut substs, self, other) + } +} + +impl<'a> Display for Type<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Type::Int => f.write_str("int"), + Type::Float => f.write_str("float"), + Type::Bool => f.write_str("bool"), + Type::CString => f.write_str("cstring"), + Type::Var(v) => v.fmt(f), + Type::Function(ft) => ft.fmt(f), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn type_var(n: &str) -> Type<'static> { + Type::Var(Ident::try_from(n.to_owned()).unwrap()) + } + + mod alpha_equiv { + use super::*; + + #[test] + fn trivial() { + assert!(Type::Int.alpha_equiv(&Type::Int)); + assert!(!Type::Int.alpha_equiv(&Type::Bool)); + } + + #[test] + fn simple_type_var() { + assert!(type_var("a").alpha_equiv(&type_var("b"))); + } + + #[test] + fn function_with_type_vars_equiv() { + assert!(Type::Function(FunctionType { + args: vec![type_var("a")], + ret: Box::new(type_var("b")), + }) + .alpha_equiv(&Type::Function(FunctionType { + args: vec![type_var("b")], + ret: Box::new(type_var("a")), + }))) + } + + #[test] + fn function_with_type_vars_non_equiv() { + assert!(!Type::Function(FunctionType { + args: vec![type_var("a")], + ret: Box::new(type_var("a")), + }) + .alpha_equiv(&Type::Function(FunctionType { + args: vec![type_var("b")], + ret: Box::new(type_var("a")), + }))) + } + } +} diff --git a/users/glittershark/achilles/src/codegen/llvm.rs b/users/glittershark/achilles/src/codegen/llvm.rs new file mode 100644 index 000000000000..ee087845b640 --- /dev/null +++ b/users/glittershark/achilles/src/codegen/llvm.rs @@ -0,0 +1,365 @@ +use std::convert::{TryFrom, TryInto}; +use std::path::Path; +use std::result; + +use inkwell::basic_block::BasicBlock; +use inkwell::builder::Builder; +pub use inkwell::context::Context; +use inkwell::module::Module; +use inkwell::support::LLVMString; +use inkwell::types::{BasicType, BasicTypeEnum, FunctionType, IntType}; +use inkwell::values::{AnyValueEnum, BasicValueEnum, FunctionValue}; +use inkwell::IntPredicate; +use thiserror::Error; + +use crate::ast::hir::{Binding, Decl, Expr}; +use crate::ast::{BinaryOperator, Ident, Literal, Type, UnaryOperator}; +use crate::common::env::Env; + +#[derive(Debug, PartialEq, Eq, Error)] +pub enum Error { + #[error("Undefined variable {0}")] + UndefinedVariable(Ident<'static>), + + #[error("LLVM Error: {0}")] + LLVMError(String), +} + +impl From<LLVMString> for Error { + fn from(s: LLVMString) -> Self { + Self::LLVMError(s.to_string()) + } +} + +pub type Result<T> = result::Result<T, Error>; + +pub struct Codegen<'ctx, 'ast> { + context: &'ctx Context, + pub module: Module<'ctx>, + builder: Builder<'ctx>, + env: Env<&'ast Ident<'ast>, AnyValueEnum<'ctx>>, + function_stack: Vec<FunctionValue<'ctx>>, + identifier_counter: u32, +} + +impl<'ctx, 'ast> Codegen<'ctx, 'ast> { + pub fn new(context: &'ctx Context, module_name: &str) -> Self { + let module = context.create_module(module_name); + let builder = context.create_builder(); + Self { + context, + module, + builder, + env: Default::default(), + function_stack: Default::default(), + identifier_counter: 0, + } + } + + pub fn new_function<'a>( + &'a mut self, + name: &str, + ty: FunctionType<'ctx>, + ) -> &'a FunctionValue<'ctx> { + self.function_stack + .push(self.module.add_function(name, ty, None)); + let basic_block = self.append_basic_block("entry"); + self.builder.position_at_end(basic_block); + self.function_stack.last().unwrap() + } + + pub fn finish_function(&mut self, res: &BasicValueEnum<'ctx>) -> FunctionValue<'ctx> { + self.builder.build_return(Some(res)); + self.function_stack.pop().unwrap() + } + + pub fn append_basic_block(&self, name: &str) -> BasicBlock<'ctx> { + self.context + .append_basic_block(*self.function_stack.last().unwrap(), name) + } + + pub fn codegen_expr(&mut self, expr: &'ast Expr<'ast, Type>) -> Result<AnyValueEnum<'ctx>> { + match expr { + Expr::Ident(id, _) => self + .env + .resolve(id) + .cloned() + .ok_or_else(|| Error::UndefinedVariable(id.to_owned())), + Expr::Literal(lit, ty) => { + let ty = self.codegen_int_type(ty); + match lit { + Literal::Int(i) => Ok(AnyValueEnum::IntValue(ty.const_int(*i, false))), + Literal::Bool(b) => Ok(AnyValueEnum::IntValue( + ty.const_int(if *b { 1 } else { 0 }, false), + )), + Literal::String(_) => todo!(), + } + } + Expr::UnaryOp { op, rhs, .. } => { + let rhs = self.codegen_expr(rhs)?; + match op { + UnaryOperator::Not => unimplemented!(), + UnaryOperator::Neg => Ok(AnyValueEnum::IntValue( + self.builder.build_int_neg(rhs.into_int_value(), "neg"), + )), + } + } + Expr::BinaryOp { lhs, op, rhs, .. } => { + let lhs = self.codegen_expr(lhs)?; + let rhs = self.codegen_expr(rhs)?; + match op { + BinaryOperator::Add => Ok(AnyValueEnum::IntValue(self.builder.build_int_add( + lhs.into_int_value(), + rhs.into_int_value(), + "add", + ))), + BinaryOperator::Sub => Ok(AnyValueEnum::IntValue(self.builder.build_int_sub( + lhs.into_int_value(), + rhs.into_int_value(), + "add", + ))), + BinaryOperator::Mul => Ok(AnyValueEnum::IntValue(self.builder.build_int_sub( + lhs.into_int_value(), + rhs.into_int_value(), + "add", + ))), + BinaryOperator::Div => { + Ok(AnyValueEnum::IntValue(self.builder.build_int_signed_div( + lhs.into_int_value(), + rhs.into_int_value(), + "add", + ))) + } + BinaryOperator::Pow => unimplemented!(), + BinaryOperator::Equ => { + Ok(AnyValueEnum::IntValue(self.builder.build_int_compare( + IntPredicate::EQ, + lhs.into_int_value(), + rhs.into_int_value(), + "eq", + ))) + } + BinaryOperator::Neq => todo!(), + } + } + Expr::Let { bindings, body, .. } => { + self.env.push(); + for Binding { ident, body, .. } in bindings { + let val = self.codegen_expr(body)?; + self.env.set(ident, val); + } + let res = self.codegen_expr(body); + self.env.pop(); + res + } + Expr::If { + condition, + then, + else_, + type_, + } => { + let then_block = self.append_basic_block("then"); + let else_block = self.append_basic_block("else"); + let join_block = self.append_basic_block("join"); + let condition = self.codegen_expr(condition)?; + self.builder.build_conditional_branch( + condition.into_int_value(), + then_block, + else_block, + ); + self.builder.position_at_end(then_block); + let then_res = self.codegen_expr(then)?; + self.builder.build_unconditional_branch(join_block); + + self.builder.position_at_end(else_block); + let else_res = self.codegen_expr(else_)?; + self.builder.build_unconditional_branch(join_block); + + self.builder.position_at_end(join_block); + let phi = self.builder.build_phi(self.codegen_type(type_), "join"); + phi.add_incoming(&[ + (&BasicValueEnum::try_from(then_res).unwrap(), then_block), + (&BasicValueEnum::try_from(else_res).unwrap(), else_block), + ]); + Ok(phi.as_basic_value().into()) + } + Expr::Call { fun, args, .. } => { + if let Expr::Ident(id, _) = &**fun { + let function = self + .module + .get_function(id.into()) + .or_else(|| self.env.resolve(id)?.clone().try_into().ok()) + .ok_or_else(|| Error::UndefinedVariable(id.to_owned()))?; + let args = args + .iter() + .map(|arg| Ok(self.codegen_expr(arg)?.try_into().unwrap())) + .collect::<Result<Vec<_>>>()?; + Ok(self + .builder + .build_call(function, &args, "call") + .try_as_basic_value() + .left() + .unwrap() + .into()) + } else { + todo!() + } + } + Expr::Fun { args, body, .. } => { + let fname = self.fresh_ident("f"); + let cur_block = self.builder.get_insert_block().unwrap(); + let env = self.env.save(); // TODO: closures + let function = self.codegen_function(&fname, args, body)?; + self.builder.position_at_end(cur_block); + self.env.restore(env); + Ok(function.into()) + } + } + } + + pub fn codegen_function( + &mut self, + name: &str, + args: &'ast [(Ident<'ast>, Type)], + body: &'ast Expr<'ast, Type>, + ) -> Result<FunctionValue<'ctx>> { + self.new_function( + name, + self.codegen_type(body.type_()).fn_type( + args.iter() + .map(|(_, at)| self.codegen_type(at)) + .collect::<Vec<_>>() + .as_slice(), + false, + ), + ); + self.env.push(); + for (i, (arg, _)) in args.iter().enumerate() { + self.env.set( + arg, + self.cur_function().get_nth_param(i as u32).unwrap().into(), + ); + } + let res = self.codegen_expr(body)?.try_into().unwrap(); + self.env.pop(); + Ok(self.finish_function(&res)) + } + + pub fn codegen_decl(&mut self, decl: &'ast Decl<'ast, Type>) -> Result<()> { + match decl { + Decl::Fun { + name, args, body, .. + } => { + self.codegen_function(name.into(), args, body)?; + Ok(()) + } + } + } + + pub fn codegen_main(&mut self, expr: &'ast Expr<'ast, Type>) -> Result<()> { + self.new_function("main", self.context.i64_type().fn_type(&[], false)); + let res = self.codegen_expr(expr)?.try_into().unwrap(); + if *expr.type_() != Type::Int { + self.builder + .build_return(Some(&self.context.i64_type().const_int(0, false))); + } else { + self.finish_function(&res); + } + Ok(()) + } + + fn codegen_type(&self, type_: &'ast Type) -> BasicTypeEnum<'ctx> { + // TODO + self.context.i64_type().into() + } + + fn codegen_int_type(&self, type_: &'ast Type) -> IntType<'ctx> { + // TODO + self.context.i64_type() + } + + pub fn print_to_file<P>(&self, path: P) -> Result<()> + where + P: AsRef<Path>, + { + Ok(self.module.print_to_file(path)?) + } + + pub fn binary_to_file<P>(&self, path: P) -> Result<()> + where + P: AsRef<Path>, + { + if self.module.write_bitcode_to_path(path.as_ref()) { + Ok(()) + } else { + Err(Error::LLVMError( + "Error writing bitcode to output path".to_owned(), + )) + } + } + + fn fresh_ident(&mut self, prefix: &str) -> String { + self.identifier_counter += 1; + format!("{}{}", prefix, self.identifier_counter) + } + + fn cur_function(&self) -> &FunctionValue<'ctx> { + self.function_stack.last().unwrap() + } +} + +#[cfg(test)] +mod tests { + use inkwell::execution_engine::JitFunction; + use inkwell::OptimizationLevel; + + use super::*; + + fn jit_eval<T>(expr: &str) -> anyhow::Result<T> { + let expr = crate::parser::expr(expr).unwrap().1; + + let expr = crate::tc::typecheck_expr(expr).unwrap(); + + let context = Context::create(); + let mut codegen = Codegen::new(&context, "test"); + let execution_engine = codegen + .module + .create_jit_execution_engine(OptimizationLevel::None) + .unwrap(); + + codegen.codegen_function("test", &[], &expr)?; + + unsafe { + let fun: JitFunction<unsafe extern "C" fn() -> T> = + execution_engine.get_function("test")?; + Ok(fun.call()) + } + } + + #[test] + fn add_literals() { + assert_eq!(jit_eval::<i64>("1 + 2").unwrap(), 3); + } + + #[test] + fn variable_shadowing() { + assert_eq!( + jit_eval::<i64>("let x = 1 in (let x = 2 in x) + x").unwrap(), + 3 + ); + } + + #[test] + fn eq() { + assert_eq!( + jit_eval::<i64>("let x = 1 in if x == 1 then 2 else 4").unwrap(), + 2 + ); + } + + #[test] + fn function_call() { + let res = jit_eval::<i64>("let id = fn x = x in id 1").unwrap(); + assert_eq!(res, 1); + } +} diff --git a/users/glittershark/achilles/src/codegen/mod.rs b/users/glittershark/achilles/src/codegen/mod.rs new file mode 100644 index 000000000000..8ef057dba04f --- /dev/null +++ b/users/glittershark/achilles/src/codegen/mod.rs @@ -0,0 +1,25 @@ +pub mod llvm; + +use inkwell::execution_engine::JitFunction; +use inkwell::OptimizationLevel; +pub use llvm::*; + +use crate::ast::hir::Expr; +use crate::ast::Type; +use crate::common::Result; + +pub fn jit_eval<T>(expr: &Expr<Type>) -> Result<T> { + let context = Context::create(); + let mut codegen = Codegen::new(&context, "eval"); + let execution_engine = codegen + .module + .create_jit_execution_engine(OptimizationLevel::None) + .map_err(Error::from)?; + codegen.codegen_function("test", &[], &expr)?; + + unsafe { + let fun: JitFunction<unsafe extern "C" fn() -> T> = + execution_engine.get_function("eval").unwrap(); + Ok(fun.call()) + } +} diff --git a/users/glittershark/achilles/src/commands/check.rs b/users/glittershark/achilles/src/commands/check.rs new file mode 100644 index 000000000000..0bea482c1478 --- /dev/null +++ b/users/glittershark/achilles/src/commands/check.rs @@ -0,0 +1,39 @@ +use clap::Clap; +use std::path::PathBuf; + +use crate::ast::Type; +use crate::{parser, tc, Result}; + +/// Typecheck a file or expression +#[derive(Clap)] +pub struct Check { + /// File to check + path: Option<PathBuf>, + + /// Expression to check + #[clap(long, short = 'e')] + expr: Option<String>, +} + +fn run_expr(expr: String) -> Result<Type<'static>> { + let (_, parsed) = parser::expr(&expr)?; + let hir_expr = tc::typecheck_expr(parsed)?; + Ok(hir_expr.type_().to_owned()) +} + +fn run_path(path: PathBuf) -> Result<Type<'static>> { + todo!() +} + +impl Check { + pub fn run(self) -> Result<()> { + let type_ = match (self.path, self.expr) { + (None, None) => Err("Must specify either a file or expression to check".into()), + (Some(_), Some(_)) => Err("Cannot specify both a file and expression to check".into()), + (None, Some(expr)) => run_expr(expr), + (Some(path), None) => run_path(path), + }?; + println!("type: {}", type_); + Ok(()) + } +} diff --git a/users/glittershark/achilles/src/commands/compile.rs b/users/glittershark/achilles/src/commands/compile.rs new file mode 100644 index 000000000000..be8767575ab5 --- /dev/null +++ b/users/glittershark/achilles/src/commands/compile.rs @@ -0,0 +1,31 @@ +use std::path::PathBuf; + +use clap::Clap; + +use crate::common::Result; +use crate::compiler::{self, CompilerOptions}; + +/// Compile a source file +#[derive(Clap)] +pub struct Compile { + /// File to compile + file: PathBuf, + + /// Output file + #[clap(short = 'o')] + out_file: PathBuf, + + #[clap(flatten)] + options: CompilerOptions, +} + +impl Compile { + pub fn run(self) -> Result<()> { + eprintln!( + ">>> {} -> {}", + &self.file.to_string_lossy(), + self.out_file.to_string_lossy() + ); + compiler::compile_file(&self.file, &self.out_file, &self.options) + } +} diff --git a/users/glittershark/achilles/src/commands/eval.rs b/users/glittershark/achilles/src/commands/eval.rs new file mode 100644 index 000000000000..61a712c08a8e --- /dev/null +++ b/users/glittershark/achilles/src/commands/eval.rs @@ -0,0 +1,32 @@ +use clap::Clap; + +use crate::codegen; +use crate::interpreter; +use crate::parser; +use crate::tc; +use crate::Result; + +/// Evaluate an expression and print its result +#[derive(Clap)] +pub struct Eval { + /// JIT-compile with LLVM instead of interpreting + #[clap(long)] + jit: bool, + + /// Expression to evaluate + expr: String, +} + +impl Eval { + pub fn run(self) -> Result<()> { + let (_, parsed) = parser::expr(&self.expr)?; + let hir = tc::typecheck_expr(parsed)?; + let result = if self.jit { + codegen::jit_eval::<i64>(&hir)?.into() + } else { + interpreter::eval(&hir)? + }; + println!("{}", result); + Ok(()) + } +} diff --git a/users/glittershark/achilles/src/commands/mod.rs b/users/glittershark/achilles/src/commands/mod.rs new file mode 100644 index 000000000000..fd0a822708c2 --- /dev/null +++ b/users/glittershark/achilles/src/commands/mod.rs @@ -0,0 +1,7 @@ +pub mod check; +pub mod compile; +pub mod eval; + +pub use check::Check; +pub use compile::Compile; +pub use eval::Eval; diff --git a/users/glittershark/achilles/src/common/env.rs b/users/glittershark/achilles/src/common/env.rs new file mode 100644 index 000000000000..59a5e46c466f --- /dev/null +++ b/users/glittershark/achilles/src/common/env.rs @@ -0,0 +1,59 @@ +use std::borrow::Borrow; +use std::collections::HashMap; +use std::hash::Hash; +use std::mem; + +/// A lexical environment +#[derive(Debug, PartialEq, Eq)] +pub struct Env<K: Eq + Hash, V>(Vec<HashMap<K, V>>); + +impl<K, V> Default for Env<K, V> +where + K: Eq + Hash, +{ + fn default() -> Self { + Self::new() + } +} + +impl<K, V> Env<K, V> +where + K: Eq + Hash, +{ + pub fn new() -> Self { + Self(vec![Default::default()]) + } + + pub fn push(&mut self) { + self.0.push(Default::default()); + } + + pub fn pop(&mut self) { + self.0.pop(); + } + + pub fn save(&mut self) -> Self { + mem::take(self) + } + + pub fn restore(&mut self, saved: Self) { + *self = saved; + } + + pub fn set(&mut self, k: K, v: V) { + self.0.last_mut().unwrap().insert(k, v); + } + + pub fn resolve<'a, Q>(&'a self, k: &Q) -> Option<&'a V> + where + K: Borrow<Q>, + Q: Hash + Eq + ?Sized, + { + for ctx in self.0.iter().rev() { + if let Some(res) = ctx.get(k) { + return Some(res); + } + } + None + } +} diff --git a/users/glittershark/achilles/src/common/error.rs b/users/glittershark/achilles/src/common/error.rs new file mode 100644 index 000000000000..51575a895e91 --- /dev/null +++ b/users/glittershark/achilles/src/common/error.rs @@ -0,0 +1,59 @@ +use std::{io, result}; + +use thiserror::Error; + +use crate::{codegen, interpreter, parser, tc}; + +#[derive(Error, Debug)] +pub enum Error { + #[error(transparent)] + IOError(#[from] io::Error), + + #[error("Error parsing input: {0}")] + ParseError(#[from] parser::Error), + + #[error("Error evaluating expression: {0}")] + EvalError(#[from] interpreter::Error), + + #[error("Compile error: {0}")] + CodegenError(#[from] codegen::Error), + + #[error("Type error: {0}")] + TypeError(#[from] tc::Error), + + #[error("{0}")] + Message(String), +} + +impl From<String> for Error { + fn from(s: String) -> Self { + Self::Message(s) + } +} + +impl<'a> From<&'a str> for Error { + fn from(s: &'a str) -> Self { + Self::Message(s.to_owned()) + } +} + +impl<'a> From<nom::Err<nom::error::Error<&'a str>>> for Error { + fn from(e: nom::Err<nom::error::Error<&'a str>>) -> Self { + use nom::error::Error as NomError; + use nom::Err::*; + + Self::ParseError(match e { + Incomplete(i) => Incomplete(i), + Error(NomError { input, code }) => Error(NomError { + input: input.to_owned(), + code, + }), + Failure(NomError { input, code }) => Failure(NomError { + input: input.to_owned(), + code, + }), + }) + } +} + +pub type Result<T> = result::Result<T, Error>; diff --git a/users/glittershark/achilles/src/common/mod.rs b/users/glittershark/achilles/src/common/mod.rs new file mode 100644 index 000000000000..8368a6dd180f --- /dev/null +++ b/users/glittershark/achilles/src/common/mod.rs @@ -0,0 +1,6 @@ +pub(crate) mod env; +pub(crate) mod error; +pub(crate) mod namer; + +pub use error::{Error, Result}; +pub use namer::{Namer, NamerOf}; diff --git a/users/glittershark/achilles/src/common/namer.rs b/users/glittershark/achilles/src/common/namer.rs new file mode 100644 index 000000000000..016e9f6ed99a --- /dev/null +++ b/users/glittershark/achilles/src/common/namer.rs @@ -0,0 +1,122 @@ +use std::fmt::Display; +use std::marker::PhantomData; + +pub struct Namer<T, F> { + make_name: F, + counter: u64, + _phantom: PhantomData<T>, +} + +impl<T, F> Namer<T, F> { + pub fn new(make_name: F) -> Self { + Namer { + make_name, + counter: 0, + _phantom: PhantomData, + } + } +} + +impl Namer<String, Box<dyn Fn(u64) -> String>> { + pub fn with_prefix<T>(prefix: T) -> Self + where + T: Display + 'static, + { + Namer::new(move |i| format!("{}{}", prefix, i)).boxed() + } + + pub fn with_suffix<T>(suffix: T) -> Self + where + T: Display + 'static, + { + Namer::new(move |i| format!("{}{}", i, suffix)).boxed() + } + + pub fn alphabetic() -> Self { + Namer::new(|i| { + if i <= 26 { + std::char::from_u32((i + 96) as u32).unwrap().to_string() + } else { + format!( + "{}{}", + std::char::from_u32(((i % 26) + 96) as u32).unwrap(), + i - 26 + ) + } + }) + .boxed() + } +} + +impl<T, F> Namer<T, F> +where + F: Fn(u64) -> T, +{ + pub fn make_name(&mut self) -> T { + self.counter += 1; + (self.make_name)(self.counter) + } + + pub fn boxed(self) -> NamerOf<T> + where + F: 'static, + { + Namer { + make_name: Box::new(self.make_name), + counter: self.counter, + _phantom: self._phantom, + } + } + + pub fn map<G, U>(self, f: G) -> NamerOf<U> + where + G: Fn(T) -> U + 'static, + T: 'static, + F: 'static, + { + Namer { + counter: self.counter, + make_name: Box::new(move |x| f((self.make_name)(x))), + _phantom: PhantomData, + } + } +} + +pub type NamerOf<T> = Namer<T, Box<dyn Fn(u64) -> T>>; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn prefix() { + let mut namer = Namer::with_prefix("t"); + assert_eq!(namer.make_name(), "t1"); + assert_eq!(namer.make_name(), "t2"); + } + + #[test] + fn suffix() { + let mut namer = Namer::with_suffix("t"); + assert_eq!(namer.make_name(), "1t"); + assert_eq!(namer.make_name(), "2t"); + } + + #[test] + fn alphabetic() { + let mut namer = Namer::alphabetic(); + assert_eq!(namer.make_name(), "a"); + assert_eq!(namer.make_name(), "b"); + (0..25).for_each(|_| { + namer.make_name(); + }); + assert_eq!(namer.make_name(), "b2"); + } + + #[test] + fn custom_callback() { + let mut namer = Namer::new(|n| n + 1); + assert_eq!(namer.make_name(), 2); + assert_eq!(namer.make_name(), 3); + } +} diff --git a/users/glittershark/achilles/src/compiler.rs b/users/glittershark/achilles/src/compiler.rs new file mode 100644 index 000000000000..f925b267df57 --- /dev/null +++ b/users/glittershark/achilles/src/compiler.rs @@ -0,0 +1,86 @@ +use std::fmt::{self, Display}; +use std::path::Path; +use std::str::FromStr; +use std::{fs, result}; + +use clap::Clap; +use test_strategy::Arbitrary; + +use crate::codegen::{self, Codegen}; +use crate::common::Result; +use crate::{parser, tc}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Arbitrary)] +pub enum OutputFormat { + LLVM, + Bitcode, +} + +impl Default for OutputFormat { + fn default() -> Self { + Self::Bitcode + } +} + +impl FromStr for OutputFormat { + type Err = String; + + fn from_str(s: &str) -> result::Result<Self, Self::Err> { + match s { + "llvm" => Ok(Self::LLVM), + "binary" => Ok(Self::Bitcode), + _ => Err(format!( + "Invalid output format {}, expected one of {{llvm, binary}}", + s + )), + } + } +} + +impl Display for OutputFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + OutputFormat::LLVM => f.write_str("llvm"), + OutputFormat::Bitcode => f.write_str("binary"), + } + } +} + +#[derive(Clap, Debug, PartialEq, Eq, Default)] +pub struct CompilerOptions { + #[clap(long, short = 'f', default_value)] + format: OutputFormat, +} + +pub fn compile_file(input: &Path, output: &Path, options: &CompilerOptions) -> Result<()> { + let src = fs::read_to_string(input)?; + let (_, decls) = parser::toplevel(&src)?; // TODO: statements + let decls = tc::typecheck_toplevel(decls)?; + + let context = codegen::Context::create(); + let mut codegen = Codegen::new( + &context, + &input + .file_stem() + .map_or("UNKNOWN".to_owned(), |s| s.to_string_lossy().into_owned()), + ); + for decl in &decls { + codegen.codegen_decl(decl)?; + } + match options.format { + OutputFormat::LLVM => codegen.print_to_file(output)?, + OutputFormat::Bitcode => codegen.binary_to_file(output)?, + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use test_strategy::proptest; + + #[proptest] + fn output_format_display_from_str_round_trip(of: OutputFormat) { + assert_eq!(OutputFormat::from_str(&of.to_string()), Ok(of)); + } +} diff --git a/users/glittershark/achilles/src/interpreter/error.rs b/users/glittershark/achilles/src/interpreter/error.rs new file mode 100644 index 000000000000..268d6f479a1e --- /dev/null +++ b/users/glittershark/achilles/src/interpreter/error.rs @@ -0,0 +1,19 @@ +use std::result; + +use thiserror::Error; + +use crate::ast::{Ident, Type}; + +#[derive(Debug, PartialEq, Eq, Error)] +pub enum Error { + #[error("Undefined variable {0}")] + UndefinedVariable(Ident<'static>), + + #[error("Unexpected type {actual}, expected type {expected}")] + InvalidType { + actual: Type<'static>, + expected: Type<'static>, + }, +} + +pub type Result<T> = result::Result<T, Error>; diff --git a/users/glittershark/achilles/src/interpreter/mod.rs b/users/glittershark/achilles/src/interpreter/mod.rs new file mode 100644 index 000000000000..3bfeeb52e85c --- /dev/null +++ b/users/glittershark/achilles/src/interpreter/mod.rs @@ -0,0 +1,176 @@ +mod error; +mod value; + +pub use self::error::{Error, Result}; +pub use self::value::{Function, Value}; +use crate::ast::hir::{Binding, Expr}; +use crate::ast::{BinaryOperator, FunctionType, Ident, Literal, Type, UnaryOperator}; +use crate::common::env::Env; + +#[derive(Debug, Default)] +pub struct Interpreter<'a> { + env: Env<&'a Ident<'a>, Value<'a>>, +} + +impl<'a> Interpreter<'a> { + pub fn new() -> Self { + Self::default() + } + + fn resolve(&self, var: &'a Ident<'a>) -> Result<Value<'a>> { + self.env + .resolve(var) + .cloned() + .ok_or_else(|| Error::UndefinedVariable(var.to_owned())) + } + + pub fn eval(&mut self, expr: &'a Expr<'a, Type>) -> Result<Value<'a>> { + let res = match expr { + Expr::Ident(id, _) => self.resolve(id), + Expr::Literal(Literal::Int(i), _) => Ok((*i).into()), + Expr::Literal(Literal::Bool(b), _) => Ok((*b).into()), + Expr::Literal(Literal::String(s), _) => Ok(s.clone().into()), + Expr::UnaryOp { op, rhs, .. } => { + let rhs = self.eval(rhs)?; + match op { + UnaryOperator::Neg => -rhs, + _ => unimplemented!(), + } + } + Expr::BinaryOp { lhs, op, rhs, .. } => { + let lhs = self.eval(lhs)?; + let rhs = self.eval(rhs)?; + match op { + BinaryOperator::Add => lhs + rhs, + BinaryOperator::Sub => lhs - rhs, + BinaryOperator::Mul => lhs * rhs, + BinaryOperator::Div => lhs / rhs, + BinaryOperator::Pow => todo!(), + BinaryOperator::Equ => Ok(lhs.eq(&rhs).into()), + BinaryOperator::Neq => todo!(), + } + } + Expr::Let { bindings, body, .. } => { + self.env.push(); + for Binding { ident, body, .. } in bindings { + let val = self.eval(body)?; + self.env.set(ident, val); + } + let res = self.eval(body)?; + self.env.pop(); + Ok(res) + } + Expr::If { + condition, + then, + else_, + .. + } => { + let condition = self.eval(condition)?; + if *(condition.as_type::<bool>()?) { + self.eval(then) + } else { + self.eval(else_) + } + } + Expr::Call { ref fun, args, .. } => { + let fun = self.eval(fun)?; + let expected_type = FunctionType { + args: args.iter().map(|_| Type::Int).collect(), + ret: Box::new(Type::Int), + }; + + let Function { + args: function_args, + body, + .. + } = fun.as_function(expected_type)?; + let arg_values = function_args.iter().zip( + args.iter() + .map(|v| self.eval(v)) + .collect::<Result<Vec<_>>>()?, + ); + let mut interpreter = Interpreter::new(); + for (arg_name, arg_value) in arg_values { + interpreter.env.set(arg_name, arg_value); + } + Ok(Value::from(*interpreter.eval(body)?.as_type::<i64>()?)) + } + Expr::Fun { args, body, type_ } => { + let type_ = match type_ { + Type::Function(ft) => ft.clone(), + _ => unreachable!("Function expression without function type"), + }; + + Ok(Value::from(value::Function { + // TODO + type_, + args: args.iter().map(|(arg, _)| arg.to_owned()).collect(), + body: (**body).to_owned(), + })) + } + }?; + debug_assert_eq!(&res.type_(), expr.type_()); + Ok(res) + } +} + +pub fn eval<'a>(expr: &'a Expr<'a, Type>) -> Result<Value<'a>> { + let mut interpreter = Interpreter::new(); + interpreter.eval(expr) +} + +#[cfg(test)] +mod tests { + use std::convert::TryFrom; + + use super::value::{TypeOf, Val}; + use super::*; + use BinaryOperator::*; + + fn int_lit(i: u64) -> Box<Expr<'static, Type<'static>>> { + Box::new(Expr::Literal(Literal::Int(i), Type::Int)) + } + + fn do_eval<T>(src: &str) -> T + where + for<'a> &'a T: TryFrom<&'a Val<'a>>, + T: Clone + TypeOf, + { + let expr = crate::parser::expr(src).unwrap().1; + let hir = crate::tc::typecheck_expr(expr).unwrap(); + let res = eval(&hir).unwrap(); + res.as_type::<T>().unwrap().clone() + } + + #[test] + fn simple_addition() { + let expr = Expr::BinaryOp { + lhs: int_lit(1), + op: Mul, + rhs: int_lit(2), + type_: Type::Int, + }; + let res = eval(&expr).unwrap(); + assert_eq!(*res.as_type::<i64>().unwrap(), 2); + } + + #[test] + fn variable_shadowing() { + let res = do_eval::<i64>("let x = 1 in (let x = 2 in x) + x"); + assert_eq!(res, 3); + } + + #[test] + fn conditional_with_equals() { + let res = do_eval::<i64>("let x = 1 in if x == 1 then 2 else 4"); + assert_eq!(res, 2); + } + + #[test] + #[ignore] + fn function_call() { + let res = do_eval::<i64>("let id = fn x = x in id 1"); + assert_eq!(res, 1); + } +} diff --git a/users/glittershark/achilles/src/interpreter/value.rs b/users/glittershark/achilles/src/interpreter/value.rs new file mode 100644 index 000000000000..55ba42f9de58 --- /dev/null +++ b/users/glittershark/achilles/src/interpreter/value.rs @@ -0,0 +1,203 @@ +use std::borrow::Cow; +use std::convert::TryFrom; +use std::fmt::{self, Display}; +use std::ops::{Add, Div, Mul, Neg, Sub}; +use std::rc::Rc; +use std::result; + +use derive_more::{Deref, From, TryInto}; + +use super::{Error, Result}; +use crate::ast::hir::Expr; +use crate::ast::{FunctionType, Ident, Type}; + +#[derive(Debug, Clone)] +pub struct Function<'a> { + pub type_: FunctionType<'a>, + pub args: Vec<Ident<'a>>, + pub body: Expr<'a, Type<'a>>, +} + +#[derive(From, TryInto)] +#[try_into(owned, ref)] +pub enum Val<'a> { + Int(i64), + Float(f64), + Bool(bool), + String(Cow<'a, str>), + Function(Function<'a>), +} + +impl<'a> TryFrom<Val<'a>> for String { + type Error = (); + + fn try_from(value: Val<'a>) -> result::Result<Self, Self::Error> { + match value { + Val::String(s) => Ok(s.into_owned()), + _ => Err(()), + } + } +} + +impl<'a> fmt::Debug for Val<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Val::Int(x) => f.debug_tuple("Int").field(x).finish(), + Val::Float(x) => f.debug_tuple("Float").field(x).finish(), + Val::Bool(x) => f.debug_tuple("Bool").field(x).finish(), + Val::String(s) => f.debug_tuple("String").field(s).finish(), + Val::Function(Function { type_, .. }) => { + f.debug_struct("Function").field("type_", type_).finish() + } + } + } +} + +impl<'a> PartialEq for Val<'a> { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Val::Int(x), Val::Int(y)) => x == y, + (Val::Float(x), Val::Float(y)) => x == y, + (Val::Bool(x), Val::Bool(y)) => x == y, + (Val::Function(_), Val::Function(_)) => false, + (_, _) => false, + } + } +} + +impl<'a> From<u64> for Val<'a> { + fn from(i: u64) -> Self { + Self::from(i as i64) + } +} + +impl<'a> Display for Val<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Val::Int(x) => x.fmt(f), + Val::Float(x) => x.fmt(f), + Val::Bool(x) => x.fmt(f), + Val::String(s) => write!(f, "{:?}", s), + Val::Function(Function { type_, .. }) => write!(f, "<{}>", type_), + } + } +} + +impl<'a> Val<'a> { + pub fn type_(&self) -> Type { + match self { + Val::Int(_) => Type::Int, + Val::Float(_) => Type::Float, + Val::Bool(_) => Type::Bool, + Val::String(_) => Type::CString, + Val::Function(Function { type_, .. }) => Type::Function(type_.clone()), + } + } + + pub fn as_type<'b, T>(&'b self) -> Result<&'b T> + where + T: TypeOf + 'b + Clone, + &'b T: TryFrom<&'b Self>, + { + <&T>::try_from(self).map_err(|_| Error::InvalidType { + actual: self.type_().to_owned(), + expected: <T as TypeOf>::type_of(), + }) + } + + pub fn as_function<'b>(&'b self, function_type: FunctionType) -> Result<&'b Function<'a>> { + match self { + Val::Function(f) if f.type_ == function_type => Ok(&f), + _ => Err(Error::InvalidType { + actual: self.type_().to_owned(), + expected: Type::Function(function_type.to_owned()), + }), + } + } +} + +#[derive(Debug, PartialEq, Clone, Deref)] +pub struct Value<'a>(Rc<Val<'a>>); + +impl<'a> Display for Value<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl<'a, T> From<T> for Value<'a> +where + Val<'a>: From<T>, +{ + fn from(x: T) -> Self { + Self(Rc::new(x.into())) + } +} + +impl<'a> Neg for Value<'a> { + type Output = Result<Value<'a>>; + + fn neg(self) -> Self::Output { + Ok((-self.as_type::<i64>()?).into()) + } +} + +impl<'a> Add for Value<'a> { + type Output = Result<Value<'a>>; + + fn add(self, rhs: Self) -> Self::Output { + Ok((self.as_type::<i64>()? + rhs.as_type::<i64>()?).into()) + } +} + +impl<'a> Sub for Value<'a> { + type Output = Result<Value<'a>>; + + fn sub(self, rhs: Self) -> Self::Output { + Ok((self.as_type::<i64>()? - rhs.as_type::<i64>()?).into()) + } +} + +impl<'a> Mul for Value<'a> { + type Output = Result<Value<'a>>; + + fn mul(self, rhs: Self) -> Self::Output { + Ok((self.as_type::<i64>()? * rhs.as_type::<i64>()?).into()) + } +} + +impl<'a> Div for Value<'a> { + type Output = Result<Value<'a>>; + + fn div(self, rhs: Self) -> Self::Output { + Ok((self.as_type::<f64>()? / rhs.as_type::<f64>()?).into()) + } +} + +pub trait TypeOf { + fn type_of() -> Type<'static>; +} + +impl TypeOf for i64 { + fn type_of() -> Type<'static> { + Type::Int + } +} + +impl TypeOf for bool { + fn type_of() -> Type<'static> { + Type::Bool + } +} + +impl TypeOf for f64 { + fn type_of() -> Type<'static> { + Type::Float + } +} + +impl TypeOf for String { + fn type_of() -> Type<'static> { + Type::CString + } +} diff --git a/users/glittershark/achilles/src/main.rs b/users/glittershark/achilles/src/main.rs new file mode 100644 index 000000000000..4ba0aaf33e91 --- /dev/null +++ b/users/glittershark/achilles/src/main.rs @@ -0,0 +1,38 @@ +#![feature(str_split_once)] +#![feature(or_insert_with_key)] + +use clap::Clap; + +pub mod ast; +pub mod codegen; +pub(crate) mod commands; +pub(crate) mod common; +pub mod compiler; +pub mod interpreter; +#[macro_use] +pub mod parser; +pub mod tc; + +pub use common::{Error, Result}; + +#[derive(Clap)] +struct Opts { + #[clap(subcommand)] + subcommand: Command, +} + +#[derive(Clap)] +enum Command { + Eval(commands::Eval), + Compile(commands::Compile), + Check(commands::Check), +} + +fn main() -> anyhow::Result<()> { + let opts = Opts::parse(); + match opts.subcommand { + Command::Eval(eval) => Ok(eval.run()?), + Command::Compile(compile) => Ok(compile.run()?), + Command::Check(check) => Ok(check.run()?), + } +} diff --git a/users/glittershark/achilles/src/parser/expr.rs b/users/glittershark/achilles/src/parser/expr.rs new file mode 100644 index 000000000000..12c55df02b80 --- /dev/null +++ b/users/glittershark/achilles/src/parser/expr.rs @@ -0,0 +1,620 @@ +use std::borrow::Cow; + +use nom::alt; +use nom::character::complete::{digit1, multispace0, multispace1}; +use nom::{ + call, char, complete, delimited, do_parse, flat_map, many0, map, named, opt, parse_to, + preceded, separated_list0, separated_list1, tag, tuple, +}; +use pratt::{Affix, Associativity, PrattParser, Precedence}; + +use crate::ast::{BinaryOperator, Binding, Expr, Fun, Literal, UnaryOperator}; +use crate::parser::{arg, ident, type_}; + +#[derive(Debug)] +enum TokenTree<'a> { + Prefix(UnaryOperator), + // Postfix(char), + Infix(BinaryOperator), + Primary(Expr<'a>), + Group(Vec<TokenTree<'a>>), +} + +named!(prefix(&str) -> TokenTree, map!(alt!( + complete!(char!('-')) => { |_| UnaryOperator::Neg } | + complete!(char!('!')) => { |_| UnaryOperator::Not } +), TokenTree::Prefix)); + +named!(infix(&str) -> TokenTree, map!(alt!( + complete!(tag!("==")) => { |_| BinaryOperator::Equ } | + complete!(tag!("!=")) => { |_| BinaryOperator::Neq } | + complete!(char!('+')) => { |_| BinaryOperator::Add } | + complete!(char!('-')) => { |_| BinaryOperator::Sub } | + complete!(char!('*')) => { |_| BinaryOperator::Mul } | + complete!(char!('/')) => { |_| BinaryOperator::Div } | + complete!(char!('^')) => { |_| BinaryOperator::Pow } +), TokenTree::Infix)); + +named!(primary(&str) -> TokenTree, alt!( + do_parse!( + multispace0 >> + char!('(') >> + multispace0 >> + group: group >> + multispace0 >> + char!(')') >> + multispace0 >> + (TokenTree::Group(group)) + ) | + delimited!(multispace0, simple_expr, multispace0) => { |s| TokenTree::Primary(s) } +)); + +named!( + rest(&str) -> Vec<(TokenTree, Vec<TokenTree>, TokenTree)>, + many0!(tuple!( + infix, + delimited!(multispace0, many0!(prefix), multispace0), + primary + // many0!(postfix) + )) +); + +named!(group(&str) -> Vec<TokenTree>, do_parse!( + prefix: many0!(prefix) + >> primary: primary + // >> postfix: many0!(postfix) + >> rest: rest + >> ({ + let mut res = prefix; + res.push(primary); + // res.append(&mut postfix); + for (infix, mut prefix, primary/*, mut postfix*/) in rest { + res.push(infix); + res.append(&mut prefix); + res.push(primary); + // res.append(&mut postfix); + } + res + }) +)); + +fn token_tree(i: &str) -> nom::IResult<&str, Vec<TokenTree>> { + group(i) +} + +struct ExprParser; + +impl<'a, I> PrattParser<I> for ExprParser +where + I: Iterator<Item = TokenTree<'a>>, +{ + type Error = pratt::NoError; + type Input = TokenTree<'a>; + type Output = Expr<'a>; + + fn query(&mut self, input: &Self::Input) -> Result<Affix, Self::Error> { + use BinaryOperator::*; + use UnaryOperator::*; + + Ok(match input { + TokenTree::Infix(Add) => Affix::Infix(Precedence(6), Associativity::Left), + TokenTree::Infix(Sub) => Affix::Infix(Precedence(6), Associativity::Left), + TokenTree::Infix(Mul) => Affix::Infix(Precedence(7), Associativity::Left), + TokenTree::Infix(Div) => Affix::Infix(Precedence(7), Associativity::Left), + TokenTree::Infix(Pow) => Affix::Infix(Precedence(8), Associativity::Right), + TokenTree::Infix(Equ) => Affix::Infix(Precedence(4), Associativity::Right), + TokenTree::Infix(Neq) => Affix::Infix(Precedence(4), Associativity::Right), + TokenTree::Prefix(Neg) => Affix::Prefix(Precedence(6)), + TokenTree::Prefix(Not) => Affix::Prefix(Precedence(6)), + TokenTree::Primary(_) => Affix::Nilfix, + TokenTree::Group(_) => Affix::Nilfix, + }) + } + + fn primary(&mut self, input: Self::Input) -> Result<Self::Output, Self::Error> { + Ok(match input { + TokenTree::Primary(expr) => expr, + TokenTree::Group(group) => self.parse(&mut group.into_iter()).unwrap(), + _ => unreachable!(), + }) + } + + fn infix( + &mut self, + lhs: Self::Output, + op: Self::Input, + rhs: Self::Output, + ) -> Result<Self::Output, Self::Error> { + let op = match op { + TokenTree::Infix(op) => op, + _ => unreachable!(), + }; + Ok(Expr::BinaryOp { + lhs: Box::new(lhs), + op, + rhs: Box::new(rhs), + }) + } + + fn prefix(&mut self, op: Self::Input, rhs: Self::Output) -> Result<Self::Output, Self::Error> { + let op = match op { + TokenTree::Prefix(op) => op, + _ => unreachable!(), + }; + + Ok(Expr::UnaryOp { + op, + rhs: Box::new(rhs), + }) + } + + fn postfix( + &mut self, + _lhs: Self::Output, + _op: Self::Input, + ) -> Result<Self::Output, Self::Error> { + unreachable!() + } +} + +named!(int(&str) -> Literal, map!(flat_map!(digit1, parse_to!(u64)), Literal::Int)); + +named!(bool_(&str) -> Literal, alt!( + complete!(tag!("true")) => { |_| Literal::Bool(true) } | + complete!(tag!("false")) => { |_| Literal::Bool(false) } +)); + +fn string_internal(i: &str) -> nom::IResult<&str, Cow<'_, str>, nom::error::Error<&str>> { + let (s, rem) = i + .split_once('"') + .ok_or_else(|| nom::Err::Error(nom::error::Error::new(i, nom::error::ErrorKind::Tag)))?; + Ok((rem, Cow::Borrowed(s))) +} + +named!(string(&str) -> Literal, preceded!( + complete!(char!('"')), + map!( + string_internal, + |s| Literal::String(s) + ) +)); + +named!(literal(&str) -> Literal, alt!(int | bool_ | string)); + +named!(literal_expr(&str) -> Expr, map!(literal, Expr::Literal)); + +named!(binding(&str) -> Binding, do_parse!( + multispace0 + >> ident: ident + >> multispace0 + >> type_: opt!(preceded!(tuple!(tag!(":"), multispace0), type_)) + >> multispace0 + >> char!('=') + >> multispace0 + >> body: expr + >> (Binding { + ident, + type_, + body + }) +)); + +named!(let_(&str) -> Expr, do_parse!( + tag!("let") + >> multispace0 + >> bindings: separated_list1!(alt!(char!(';') | char!('\n')), binding) + >> multispace0 + >> tag!("in") + >> multispace0 + >> body: expr + >> (Expr::Let { + bindings, + body: Box::new(body) + }) +)); + +named!(if_(&str) -> Expr, do_parse! ( + tag!("if") + >> multispace0 + >> condition: expr + >> multispace0 + >> tag!("then") + >> multispace0 + >> then: expr + >> multispace0 + >> tag!("else") + >> multispace0 + >> else_: expr + >> (Expr::If { + condition: Box::new(condition), + then: Box::new(then), + else_: Box::new(else_) + }) +)); + +named!(ident_expr(&str) -> Expr, map!(ident, Expr::Ident)); + +fn ascripted<'a>( + p: impl Fn(&'a str) -> nom::IResult<&'a str, Expr, nom::error::Error<&'a str>> + 'a, +) -> impl Fn(&'a str) -> nom::IResult<&str, Expr, nom::error::Error<&'a str>> { + move |i| { + do_parse!( + i, + expr: p + >> multispace0 + >> complete!(tag!(":")) + >> multispace0 + >> type_: type_ + >> (Expr::Ascription { + expr: Box::new(expr), + type_ + }) + ) + } +} + +named!(paren_expr(&str) -> Expr, + delimited!(complete!(tag!("(")), expr, complete!(tag!(")")))); + +named!(funcref(&str) -> Expr, alt!( + ident_expr | + paren_expr +)); + +named!(no_arg_call(&str) -> Expr, do_parse!( + fun: funcref + >> multispace0 + >> complete!(tag!("()")) + >> (Expr::Call { + fun: Box::new(fun), + args: vec![], + }) +)); + +named!(fun_expr(&str) -> Expr, do_parse!( + tag!("fn") + >> multispace1 + >> args: separated_list0!(multispace1, arg) + >> multispace0 + >> char!('=') + >> multispace0 + >> body: expr + >> (Expr::Fun(Box::new(Fun { + args, + body + }))) +)); + +named!(fn_arg(&str) -> Expr, alt!( + ident_expr | + literal_expr | + paren_expr +)); + +named!(call_with_args(&str) -> Expr, do_parse!( + fun: funcref + >> multispace1 + >> args: separated_list1!(multispace1, fn_arg) + >> (Expr::Call { + fun: Box::new(fun), + args + }) +)); + +named!(simple_expr_unascripted(&str) -> Expr, alt!( + let_ | + if_ | + fun_expr | + literal_expr | + ident_expr +)); + +named!(simple_expr(&str) -> Expr, alt!( + call!(ascripted(simple_expr_unascripted)) | + simple_expr_unascripted +)); + +named!(pub expr(&str) -> Expr, alt!( + no_arg_call | + call_with_args | + map!(token_tree, |tt| { + ExprParser.parse(&mut tt.into_iter()).unwrap() + }) | + simple_expr +)); + +#[cfg(test)] +pub(crate) mod tests { + use super::*; + use crate::ast::{Arg, Ident, Type}; + use std::convert::TryFrom; + use BinaryOperator::*; + use Expr::{BinaryOp, If, Let, UnaryOp}; + use UnaryOperator::*; + + pub(crate) fn ident_expr(s: &str) -> Box<Expr> { + Box::new(Expr::Ident(Ident::try_from(s).unwrap())) + } + + mod operators { + use super::*; + + #[test] + fn mul_plus() { + let (rem, res) = expr("x*y+z").unwrap(); + assert!(rem.is_empty()); + assert_eq!( + res, + BinaryOp { + lhs: Box::new(BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: ident_expr("y") + }), + op: Add, + rhs: ident_expr("z") + } + ) + } + + #[test] + fn mul_plus_ws() { + let (rem, res) = expr("x * y + z").unwrap(); + assert!(rem.is_empty(), "non-empty remainder: \"{}\"", rem); + assert_eq!( + res, + BinaryOp { + lhs: Box::new(BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: ident_expr("y") + }), + op: Add, + rhs: ident_expr("z") + } + ) + } + + #[test] + fn unary() { + let (rem, res) = expr("x * -z").unwrap(); + assert!(rem.is_empty(), "non-empty remainder: \"{}\"", rem); + assert_eq!( + res, + BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: Box::new(UnaryOp { + op: Neg, + rhs: ident_expr("z"), + }) + } + ) + } + + #[test] + fn mul_literal() { + let (rem, res) = expr("x * 3").unwrap(); + assert!(rem.is_empty()); + assert_eq!( + res, + BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: Box::new(Expr::Literal(Literal::Int(3))), + } + ) + } + + #[test] + fn equ() { + let res = test_parse!(expr, "x * 7 == 7"); + assert_eq!( + res, + BinaryOp { + lhs: Box::new(BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: Box::new(Expr::Literal(Literal::Int(7))) + }), + op: Equ, + rhs: Box::new(Expr::Literal(Literal::Int(7))) + } + ) + } + } + + #[test] + fn bools() { + assert_eq!( + test_parse!(expr, "true"), + Expr::Literal(Literal::Bool(true)) + ); + assert_eq!( + test_parse!(expr, "false"), + Expr::Literal(Literal::Bool(false)) + ); + } + + #[test] + fn simple_string_lit() { + assert_eq!( + test_parse!(expr, "\"foobar\""), + Expr::Literal(Literal::String(Cow::Borrowed("foobar"))) + ) + } + + #[test] + fn let_complex() { + let res = test_parse!(expr, "let x = 1; y = x * 7 in (x + y) * 4"); + assert_eq!( + res, + Let { + bindings: vec![ + Binding { + ident: Ident::try_from("x").unwrap(), + type_: None, + body: Expr::Literal(Literal::Int(1)) + }, + Binding { + ident: Ident::try_from("y").unwrap(), + type_: None, + body: Expr::BinaryOp { + lhs: ident_expr("x"), + op: Mul, + rhs: Box::new(Expr::Literal(Literal::Int(7))) + } + } + ], + body: Box::new(Expr::BinaryOp { + lhs: Box::new(Expr::BinaryOp { + lhs: ident_expr("x"), + op: Add, + rhs: ident_expr("y"), + }), + op: Mul, + rhs: Box::new(Expr::Literal(Literal::Int(4))), + }) + } + ) + } + + #[test] + fn if_simple() { + let res = test_parse!(expr, "if x == 8 then 9 else 20"); + assert_eq!( + res, + If { + condition: Box::new(BinaryOp { + lhs: ident_expr("x"), + op: Equ, + rhs: Box::new(Expr::Literal(Literal::Int(8))), + }), + then: Box::new(Expr::Literal(Literal::Int(9))), + else_: Box::new(Expr::Literal(Literal::Int(20))) + } + ) + } + + #[test] + fn no_arg_call() { + let res = test_parse!(expr, "f()"); + assert_eq!( + res, + Expr::Call { + fun: ident_expr("f"), + args: vec![] + } + ); + } + + #[test] + fn call_with_args() { + let res = test_parse!(expr, "f x 1"); + assert_eq!( + res, + Expr::Call { + fun: ident_expr("f"), + args: vec![*ident_expr("x"), Expr::Literal(Literal::Int(1))] + } + ) + } + + #[test] + fn call_funcref() { + let res = test_parse!(expr, "(let x = 1 in x) 2"); + assert_eq!( + res, + Expr::Call { + fun: Box::new(Expr::Let { + bindings: vec![Binding { + ident: Ident::try_from("x").unwrap(), + type_: None, + body: Expr::Literal(Literal::Int(1)) + }], + body: ident_expr("x") + }), + args: vec![Expr::Literal(Literal::Int(2))] + } + ) + } + + #[test] + fn anon_function() { + let res = test_parse!(expr, "let id = fn x = x in id 1"); + assert_eq!( + res, + Expr::Let { + bindings: vec![Binding { + ident: Ident::try_from("id").unwrap(), + type_: None, + body: Expr::Fun(Box::new(Fun { + args: vec![Arg::try_from("x").unwrap()], + body: *ident_expr("x") + })) + }], + body: Box::new(Expr::Call { + fun: ident_expr("id"), + args: vec![Expr::Literal(Literal::Int(1))], + }) + } + ); + } + + mod ascriptions { + use super::*; + + #[test] + fn bare_ascription() { + let res = test_parse!(expr, "1: float"); + assert_eq!( + res, + Expr::Ascription { + expr: Box::new(Expr::Literal(Literal::Int(1))), + type_: Type::Float + } + ) + } + + #[test] + fn fn_body_ascription() { + let res = test_parse!(expr, "let const_1 = fn x = 1: int in const_1 2"); + assert_eq!( + res, + Expr::Let { + bindings: vec![Binding { + ident: Ident::try_from("const_1").unwrap(), + type_: None, + body: Expr::Fun(Box::new(Fun { + args: vec![Arg::try_from("x").unwrap()], + body: Expr::Ascription { + expr: Box::new(Expr::Literal(Literal::Int(1))), + type_: Type::Int, + } + })) + }], + body: Box::new(Expr::Call { + fun: ident_expr("const_1"), + args: vec![Expr::Literal(Literal::Int(2))] + }) + } + ) + } + + #[test] + fn let_binding_ascripted() { + let res = test_parse!(expr, "let x: int = 1 in x"); + assert_eq!( + res, + Expr::Let { + bindings: vec![Binding { + ident: Ident::try_from("x").unwrap(), + type_: Some(Type::Int), + body: Expr::Literal(Literal::Int(1)) + }], + body: ident_expr("x") + } + ) + } + } +} diff --git a/users/glittershark/achilles/src/parser/macros.rs b/users/glittershark/achilles/src/parser/macros.rs new file mode 100644 index 000000000000..406e5c0e699e --- /dev/null +++ b/users/glittershark/achilles/src/parser/macros.rs @@ -0,0 +1,16 @@ +#[cfg(test)] +#[macro_use] +macro_rules! test_parse { + ($parser: ident, $src: expr) => {{ + let res = $parser($src); + nom_trace::print_trace!(); + let (rem, res) = res.unwrap(); + assert!( + rem.is_empty(), + "non-empty remainder: \"{}\", parsed: {:?}", + rem, + res + ); + res + }}; +} diff --git a/users/glittershark/achilles/src/parser/mod.rs b/users/glittershark/achilles/src/parser/mod.rs new file mode 100644 index 000000000000..dd7874aff853 --- /dev/null +++ b/users/glittershark/achilles/src/parser/mod.rs @@ -0,0 +1,197 @@ +use nom::character::complete::{multispace0, multispace1}; +use nom::error::{ErrorKind, ParseError}; +use nom::{alt, char, complete, do_parse, many0, named, separated_list0, tag, terminated}; + +#[macro_use] +mod macros; +mod expr; +mod type_; + +use crate::ast::{Arg, Decl, Fun, Ident}; +pub use expr::expr; +pub use type_::type_; + +pub type Error = nom::Err<nom::error::Error<String>>; + +pub(crate) fn is_reserved(s: &str) -> bool { + matches!( + s, + "if" | "then" + | "else" + | "let" + | "in" + | "fn" + | "int" + | "float" + | "bool" + | "true" + | "false" + | "cstring" + ) +} + +pub(crate) fn ident<'a, E>(i: &'a str) -> nom::IResult<&'a str, Ident, E> +where + E: ParseError<&'a str>, +{ + let mut chars = i.chars(); + if let Some(f) = chars.next() { + if f.is_alphabetic() || f == '_' { + let mut idx = 1; + for c in chars { + if !(c.is_alphanumeric() || c == '_') { + break; + } + idx += 1; + } + let id = &i[..idx]; + if is_reserved(id) { + Err(nom::Err::Error(E::from_error_kind(i, ErrorKind::Satisfy))) + } else { + Ok((&i[idx..], Ident::from_str_unchecked(id))) + } + } else { + Err(nom::Err::Error(E::from_error_kind(i, ErrorKind::Satisfy))) + } + } else { + Err(nom::Err::Error(E::from_error_kind(i, ErrorKind::Eof))) + } +} + +named!(ascripted_arg(&str) -> Arg, do_parse!( + complete!(char!('(')) >> + multispace0 >> + ident: ident >> + multispace0 >> + complete!(char!(':')) >> + multispace0 >> + type_: type_ >> + multispace0 >> + complete!(char!(')')) >> + (Arg { + ident, + type_: Some(type_) + }) +)); + +named!(arg(&str) -> Arg, alt!( + ident => { |ident| Arg {ident, type_: None}} | + ascripted_arg +)); + +named!(fun_decl(&str) -> Decl, do_parse!( + complete!(tag!("fn")) + >> multispace0 + >> name: ident + >> multispace1 + >> args: separated_list0!(multispace1, arg) + >> multispace0 + >> char!('=') + >> multispace0 + >> body: expr + >> (Decl::Fun { + name, + body: Fun { + args, + body + } + }) +)); + +named!(ascription_decl(&str) -> Decl, do_parse!( + name: ident + >> multispace0 + >> complete!(char!(':')) + >> multispace0 + >> type_: type_ + >> (Decl::Ascription { + name, + type_ + }) +)); + +named!(pub decl(&str) -> Decl, alt!( + ascription_decl | + fun_decl +)); + +named!(pub toplevel(&str) -> Vec<Decl>, terminated!(many0!(decl), multispace0)); + +#[cfg(test)] +mod tests { + use std::convert::TryInto; + + use crate::ast::{BinaryOperator, Expr, FunctionType, Literal, Type}; + + use super::*; + use expr::tests::ident_expr; + + #[test] + fn fn_decl() { + let res = test_parse!(decl, "fn id x = x"); + assert_eq!( + res, + Decl::Fun { + name: "id".try_into().unwrap(), + body: Fun { + args: vec!["x".try_into().unwrap()], + body: *ident_expr("x"), + } + } + ) + } + + #[test] + fn ascripted_fn_args() { + test_parse!(ascripted_arg, "(x : int)"); + let res = test_parse!(decl, "fn plus1 (x : int) = x + 1"); + assert_eq!( + res, + Decl::Fun { + name: "plus1".try_into().unwrap(), + body: Fun { + args: vec![Arg { + ident: "x".try_into().unwrap(), + type_: Some(Type::Int), + }], + body: Expr::BinaryOp { + lhs: ident_expr("x"), + op: BinaryOperator::Add, + rhs: Box::new(Expr::Literal(Literal::Int(1))), + } + } + } + ); + } + + #[test] + fn multiple_decls() { + let res = test_parse!( + toplevel, + "fn id x = x + fn plus x y = x + y + fn main = plus (id 2) 7" + ); + assert_eq!(res.len(), 3); + let res = test_parse!( + toplevel, + "fn id x = x\nfn plus x y = x + y\nfn main = plus (id 2) 7\n" + ); + assert_eq!(res.len(), 3); + } + + #[test] + fn top_level_ascription() { + let res = test_parse!(toplevel, "id : fn a -> a"); + assert_eq!( + res, + vec![Decl::Ascription { + name: "id".try_into().unwrap(), + type_: Type::Function(FunctionType { + args: vec![Type::Var("a".try_into().unwrap())], + ret: Box::new(Type::Var("a".try_into().unwrap())) + }) + }] + ) + } +} diff --git a/users/glittershark/achilles/src/parser/type_.rs b/users/glittershark/achilles/src/parser/type_.rs new file mode 100644 index 000000000000..c90ceda4d72e --- /dev/null +++ b/users/glittershark/achilles/src/parser/type_.rs @@ -0,0 +1,125 @@ +use nom::character::complete::{multispace0, multispace1}; +use nom::{alt, delimited, do_parse, map, named, opt, separated_list0, tag, terminated, tuple}; + +use super::ident; +use crate::ast::{FunctionType, Type}; + +named!(function_type(&str) -> Type, do_parse!( + tag!("fn") + >> multispace1 + >> args: map!(opt!(terminated!(separated_list0!( + tuple!( + multispace0, + tag!(","), + multispace0 + ), + type_ + ), multispace1)), |args| args.unwrap_or_default()) + >> tag!("->") + >> multispace1 + >> ret: type_ + >> (Type::Function(FunctionType { + args, + ret: Box::new(ret) + })) +)); + +named!(pub type_(&str) -> Type, alt!( + tag!("int") => { |_| Type::Int } | + tag!("float") => { |_| Type::Float } | + tag!("bool") => { |_| Type::Bool } | + tag!("cstring") => { |_| Type::CString } | + function_type | + ident => { |id| Type::Var(id) } | + delimited!( + tuple!(tag!("("), multispace0), + type_, + tuple!(tag!(")"), multispace0) + ) +)); + +#[cfg(test)] +mod tests { + use std::convert::TryFrom; + + use super::*; + use crate::ast::Ident; + + #[test] + fn simple_types() { + assert_eq!(test_parse!(type_, "int"), Type::Int); + assert_eq!(test_parse!(type_, "float"), Type::Float); + assert_eq!(test_parse!(type_, "bool"), Type::Bool); + assert_eq!(test_parse!(type_, "cstring"), Type::CString); + } + + #[test] + fn no_arg_fn_type() { + assert_eq!( + test_parse!(type_, "fn -> int"), + Type::Function(FunctionType { + args: vec![], + ret: Box::new(Type::Int) + }) + ); + } + + #[test] + fn fn_type_with_args() { + assert_eq!( + test_parse!(type_, "fn int, bool -> int"), + Type::Function(FunctionType { + args: vec![Type::Int, Type::Bool], + ret: Box::new(Type::Int) + }) + ); + } + + #[test] + fn fn_taking_fn() { + assert_eq!( + test_parse!(type_, "fn fn int, bool -> bool, float -> float"), + Type::Function(FunctionType { + args: vec![ + Type::Function(FunctionType { + args: vec![Type::Int, Type::Bool], + ret: Box::new(Type::Bool) + }), + Type::Float + ], + ret: Box::new(Type::Float) + }) + ) + } + + #[test] + fn parenthesized() { + assert_eq!( + test_parse!(type_, "fn (fn int, bool -> bool), float -> float"), + Type::Function(FunctionType { + args: vec![ + Type::Function(FunctionType { + args: vec![Type::Int, Type::Bool], + ret: Box::new(Type::Bool) + }), + Type::Float + ], + ret: Box::new(Type::Float) + }) + ) + } + + #[test] + fn type_vars() { + assert_eq!( + test_parse!(type_, "fn x, y -> x"), + Type::Function(FunctionType { + args: vec![ + Type::Var(Ident::try_from("x").unwrap()), + Type::Var(Ident::try_from("y").unwrap()), + ], + ret: Box::new(Type::Var(Ident::try_from("x").unwrap())), + }) + ) + } +} diff --git a/users/glittershark/achilles/src/tc/mod.rs b/users/glittershark/achilles/src/tc/mod.rs new file mode 100644 index 000000000000..559ac993cc9b --- /dev/null +++ b/users/glittershark/achilles/src/tc/mod.rs @@ -0,0 +1,680 @@ +use bimap::BiMap; +use derive_more::From; +use itertools::Itertools; +use std::cell::RefCell; +use std::collections::HashMap; +use std::convert::{TryFrom, TryInto}; +use std::fmt::{self, Display}; +use std::{mem, result}; +use thiserror::Error; + +use crate::ast::{self, hir, Arg, BinaryOperator, Ident, Literal}; +use crate::common::env::Env; +use crate::common::{Namer, NamerOf}; + +#[derive(Debug, Error)] +pub enum Error { + #[error("Undefined variable {0}")] + UndefinedVariable(Ident<'static>), + + #[error("Mismatched types: expected {expected}, but got {actual}")] + TypeMismatch { expected: Type, actual: Type }, + + #[error("Mismatched types, expected numeric type, but got {0}")] + NonNumeric(Type), + + #[error("Ambiguous type {0}")] + AmbiguousType(TyVar), +} + +pub type Result<T> = result::Result<T, Error>; + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct TyVar(u64); + +impl Display for TyVar { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "t{}", self.0) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub struct NullaryType(String); + +impl Display for NullaryType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum PrimType { + Int, + Float, + Bool, + CString, +} + +impl<'a> From<PrimType> for ast::Type<'a> { + fn from(pr: PrimType) -> Self { + match pr { + PrimType::Int => ast::Type::Int, + PrimType::Float => ast::Type::Float, + PrimType::Bool => ast::Type::Bool, + PrimType::CString => ast::Type::CString, + } + } +} + +impl Display for PrimType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + PrimType::Int => f.write_str("int"), + PrimType::Float => f.write_str("float"), + PrimType::Bool => f.write_str("bool"), + PrimType::CString => f.write_str("cstring"), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, From)] +pub enum Type { + #[from(ignore)] + Univ(TyVar), + #[from(ignore)] + Exist(TyVar), + Nullary(NullaryType), + Prim(PrimType), + Fun { + args: Vec<Type>, + ret: Box<Type>, + }, +} + +impl<'a> TryFrom<Type> for ast::Type<'a> { + type Error = Type; + + fn try_from(value: Type) -> result::Result<Self, Self::Error> { + match value { + Type::Univ(_) => todo!(), + Type::Exist(_) => Err(value), + Type::Nullary(_) => todo!(), + Type::Prim(p) => Ok(p.into()), + Type::Fun { ref args, ref ret } => Ok(ast::Type::Function(ast::FunctionType { + args: args + .clone() + .into_iter() + .map(Self::try_from) + .try_collect() + .map_err(|_| value.clone())?, + ret: Box::new((*ret.clone()).try_into().map_err(|_| value.clone())?), + })), + } + } +} + +const INT: Type = Type::Prim(PrimType::Int); +const FLOAT: Type = Type::Prim(PrimType::Float); +const BOOL: Type = Type::Prim(PrimType::Bool); +const CSTRING: Type = Type::Prim(PrimType::CString); + +impl Display for Type { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Type::Nullary(nt) => nt.fmt(f), + Type::Prim(p) => p.fmt(f), + Type::Univ(TyVar(n)) => write!(f, "∀{}", n), + Type::Exist(TyVar(n)) => write!(f, "∃{}", n), + Type::Fun { args, ret } => write!(f, "fn {} -> {}", args.iter().join(", "), ret), + } + } +} + +struct Typechecker<'ast> { + ty_var_namer: NamerOf<TyVar>, + ctx: HashMap<TyVar, Type>, + env: Env<Ident<'ast>, Type>, + + /// AST type var -> type + instantiations: Env<Ident<'ast>, Type>, + + /// AST type-var -> universal TyVar + type_vars: RefCell<(BiMap<Ident<'ast>, TyVar>, NamerOf<Ident<'static>>)>, +} + +impl<'ast> Typechecker<'ast> { + fn new() -> Self { + Self { + ty_var_namer: Namer::new(TyVar).boxed(), + type_vars: RefCell::new(( + Default::default(), + Namer::alphabetic().map(|n| Ident::try_from(n).unwrap()), + )), + ctx: Default::default(), + env: Default::default(), + instantiations: Default::default(), + } + } + + pub(crate) fn tc_expr(&mut self, expr: ast::Expr<'ast>) -> Result<hir::Expr<'ast, Type>> { + match expr { + ast::Expr::Ident(ident) => { + let type_ = self + .env + .resolve(&ident) + .ok_or_else(|| Error::UndefinedVariable(ident.to_owned()))? + .clone(); + Ok(hir::Expr::Ident(ident, type_)) + } + ast::Expr::Literal(lit) => { + let type_ = match lit { + Literal::Int(_) => Type::Prim(PrimType::Int), + Literal::Bool(_) => Type::Prim(PrimType::Bool), + Literal::String(_) => Type::Prim(PrimType::CString), + }; + Ok(hir::Expr::Literal(lit.to_owned(), type_)) + } + ast::Expr::UnaryOp { op, rhs } => todo!(), + ast::Expr::BinaryOp { lhs, op, rhs } => { + let lhs = self.tc_expr(*lhs)?; + let rhs = self.tc_expr(*rhs)?; + let type_ = match op { + BinaryOperator::Equ | BinaryOperator::Neq => { + self.unify(lhs.type_(), rhs.type_())?; + Type::Prim(PrimType::Bool) + } + BinaryOperator::Add | BinaryOperator::Sub | BinaryOperator::Mul => { + let ty = self.unify(lhs.type_(), rhs.type_())?; + // if !matches!(ty, Type::Int | Type::Float) { + // return Err(Error::NonNumeric(ty)); + // } + ty + } + BinaryOperator::Div => todo!(), + BinaryOperator::Pow => todo!(), + }; + Ok(hir::Expr::BinaryOp { + lhs: Box::new(lhs), + op, + rhs: Box::new(rhs), + type_, + }) + } + ast::Expr::Let { bindings, body } => { + self.env.push(); + let bindings = bindings + .into_iter() + .map( + |ast::Binding { ident, type_, body }| -> Result<hir::Binding<Type>> { + let body = self.tc_expr(body)?; + if let Some(type_) = type_ { + let type_ = self.type_from_ast_type(type_); + self.unify(body.type_(), &type_)?; + } + self.env.set(ident.clone(), body.type_().clone()); + Ok(hir::Binding { + ident, + type_: body.type_().clone(), + body, + }) + }, + ) + .collect::<Result<Vec<hir::Binding<Type>>>>()?; + let body = self.tc_expr(*body)?; + self.env.pop(); + Ok(hir::Expr::Let { + bindings, + type_: body.type_().clone(), + body: Box::new(body), + }) + } + ast::Expr::If { + condition, + then, + else_, + } => { + let condition = self.tc_expr(*condition)?; + self.unify(&Type::Prim(PrimType::Bool), condition.type_())?; + let then = self.tc_expr(*then)?; + let else_ = self.tc_expr(*else_)?; + let type_ = self.unify(then.type_(), else_.type_())?; + Ok(hir::Expr::If { + condition: Box::new(condition), + then: Box::new(then), + else_: Box::new(else_), + type_, + }) + } + ast::Expr::Fun(f) => { + let ast::Fun { args, body } = *f; + self.env.push(); + let args: Vec<_> = args + .into_iter() + .map(|Arg { ident, type_ }| { + let ty = match type_ { + Some(t) => self.type_from_ast_type(t), + None => self.fresh_ex(), + }; + self.env.set(ident.clone(), ty.clone()); + (ident, ty) + }) + .collect(); + let body = self.tc_expr(body)?; + self.env.pop(); + Ok(hir::Expr::Fun { + type_: self.universalize( + args.iter().map(|(_, ty)| ty.clone()).collect(), + body.type_().clone(), + ), + args, + body: Box::new(body), + }) + } + ast::Expr::Call { fun, args } => { + let ret_ty = self.fresh_ex(); + let arg_tys = args.iter().map(|_| self.fresh_ex()).collect::<Vec<_>>(); + let ft = Type::Fun { + args: arg_tys.clone(), + ret: Box::new(ret_ty.clone()), + }; + let fun = self.tc_expr(*fun)?; + self.instantiations.push(); + self.unify(&ft, fun.type_())?; + let args = args + .into_iter() + .zip(arg_tys) + .map(|(arg, ty)| { + let arg = self.tc_expr(arg)?; + self.unify(&ty, arg.type_())?; + Ok(arg) + }) + .try_collect()?; + self.commit_instantiations(); + Ok(hir::Expr::Call { + fun: Box::new(fun), + args, + type_: ret_ty, + }) + } + ast::Expr::Ascription { expr, type_ } => { + let expr = self.tc_expr(*expr)?; + let type_ = self.type_from_ast_type(type_); + self.unify(expr.type_(), &type_)?; + Ok(expr) + } + } + } + + pub(crate) fn tc_decl( + &mut self, + decl: ast::Decl<'ast>, + ) -> Result<Option<hir::Decl<'ast, Type>>> { + match decl { + ast::Decl::Fun { name, body } => { + let mut expr = ast::Expr::Fun(Box::new(body)); + if let Some(type_) = self.env.resolve(&name) { + expr = ast::Expr::Ascription { + expr: Box::new(expr), + type_: self.finalize_type(type_.clone())?, + }; + } + + let body = self.tc_expr(expr)?; + let type_ = body.type_().clone(); + self.env.set(name.clone(), type_); + match body { + hir::Expr::Fun { args, body, type_ } => Ok(Some(hir::Decl::Fun { + name, + args, + body, + type_, + })), + _ => unreachable!(), + } + } + ast::Decl::Ascription { name, type_ } => { + let type_ = self.type_from_ast_type(type_); + self.env.set(name.clone(), type_); + Ok(None) + } + } + } + + fn fresh_tv(&mut self) -> TyVar { + self.ty_var_namer.make_name() + } + + fn fresh_ex(&mut self) -> Type { + Type::Exist(self.fresh_tv()) + } + + fn fresh_univ(&mut self) -> Type { + Type::Univ(self.fresh_tv()) + } + + #[allow(clippy::redundant_closure)] // https://github.com/rust-lang/rust-clippy/issues/6903 + fn universalize(&mut self, args: Vec<Type>, ret: Type) -> Type { + let mut vars = HashMap::new(); + let mut universalize_type = move |ty| match ty { + Type::Exist(tv) if self.resolve_tv(tv).is_none() => vars + .entry(tv) + .or_insert_with_key(|tv| { + let ty = self.fresh_univ(); + self.ctx.insert(*tv, ty.clone()); + ty + }) + .clone(), + _ => ty, + }; + + Type::Fun { + args: args.into_iter().map(|t| universalize_type(t)).collect(), + ret: Box::new(universalize_type(ret)), + } + } + + fn unify(&mut self, ty1: &Type, ty2: &Type) -> Result<Type> { + match (ty1, ty2) { + (Type::Exist(tv), ty) | (ty, Type::Exist(tv)) => match self.resolve_tv(*tv) { + Some(existing_ty) if self.types_match(ty, &existing_ty) => Ok(ty.clone()), + Some(var @ ast::Type::Var(_)) => { + let var = self.type_from_ast_type(var); + self.unify(&var, ty) + } + Some(existing_ty) => match ty { + Type::Exist(_) => { + let rhs = self.type_from_ast_type(existing_ty); + self.unify(ty, &rhs) + } + _ => Err(Error::TypeMismatch { + expected: ty.clone(), + actual: self.type_from_ast_type(existing_ty), + }), + }, + None => match self.ctx.insert(*tv, ty.clone()) { + Some(existing) => self.unify(&existing, ty), + None => Ok(ty.clone()), + }, + }, + (Type::Univ(u1), Type::Univ(u2)) if u1 == u2 => Ok(ty2.clone()), + (Type::Univ(u), ty) | (ty, Type::Univ(u)) => { + let ident = self.name_univ(*u); + match self.instantiations.resolve(&ident) { + Some(existing_ty) if ty == existing_ty => Ok(ty.clone()), + Some(existing_ty) => Err(Error::TypeMismatch { + expected: ty.clone(), + actual: existing_ty.clone(), + }), + None => { + self.instantiations.set(ident, ty.clone()); + Ok(ty.clone()) + } + } + } + (Type::Prim(p1), Type::Prim(p2)) if p1 == p2 => Ok(ty2.clone()), + ( + Type::Fun { + args: args1, + ret: ret1, + }, + Type::Fun { + args: args2, + ret: ret2, + }, + ) => { + let args = args1 + .iter() + .zip(args2) + .map(|(t1, t2)| self.unify(t1, t2)) + .try_collect()?; + let ret = self.unify(ret1, ret2)?; + Ok(Type::Fun { + args, + ret: Box::new(ret), + }) + } + (Type::Nullary(_), _) | (_, Type::Nullary(_)) => todo!(), + _ => Err(Error::TypeMismatch { + expected: ty1.clone(), + actual: ty2.clone(), + }), + } + } + + fn finalize_expr( + &self, + expr: hir::Expr<'ast, Type>, + ) -> Result<hir::Expr<'ast, ast::Type<'ast>>> { + expr.traverse_type(|ty| self.finalize_type(ty)) + } + + fn finalize_decl( + &self, + decl: hir::Decl<'ast, Type>, + ) -> Result<hir::Decl<'ast, ast::Type<'ast>>> { + decl.traverse_type(|ty| self.finalize_type(ty)) + } + + fn finalize_type(&self, ty: Type) -> Result<ast::Type<'static>> { + let ret = match ty { + Type::Exist(tv) => self.resolve_tv(tv).ok_or(Error::AmbiguousType(tv)), + Type::Univ(tv) => Ok(ast::Type::Var(self.name_univ(tv))), + Type::Nullary(_) => todo!(), + Type::Prim(pr) => Ok(pr.into()), + Type::Fun { args, ret } => Ok(ast::Type::Function(ast::FunctionType { + args: args + .into_iter() + .map(|ty| self.finalize_type(ty)) + .try_collect()?, + ret: Box::new(self.finalize_type(*ret)?), + })), + }; + ret + } + + fn resolve_tv(&self, tv: TyVar) -> Option<ast::Type<'static>> { + let mut res = &Type::Exist(tv); + loop { + match res { + Type::Exist(tv) => { + res = self.ctx.get(tv)?; + } + Type::Univ(tv) => { + let ident = self.name_univ(*tv); + if let Some(r) = self.instantiations.resolve(&ident) { + res = r; + } else { + break Some(ast::Type::Var(ident)); + } + } + Type::Nullary(_) => todo!(), + Type::Prim(pr) => break Some((*pr).into()), + Type::Fun { args, ret } => todo!(), + } + } + } + + fn type_from_ast_type(&mut self, ast_type: ast::Type<'ast>) -> Type { + match ast_type { + ast::Type::Int => INT, + ast::Type::Float => FLOAT, + ast::Type::Bool => BOOL, + ast::Type::CString => CSTRING, + ast::Type::Function(ast::FunctionType { args, ret }) => Type::Fun { + args: args + .into_iter() + .map(|t| self.type_from_ast_type(t)) + .collect(), + ret: Box::new(self.type_from_ast_type(*ret)), + }, + ast::Type::Var(id) => Type::Univ({ + let opt_tv = { self.type_vars.borrow_mut().0.get_by_left(&id).copied() }; + opt_tv.unwrap_or_else(|| { + let tv = self.fresh_tv(); + self.type_vars + .borrow_mut() + .0 + .insert_no_overwrite(id, tv) + .unwrap(); + tv + }) + }), + } + } + + fn name_univ(&self, tv: TyVar) -> Ident<'static> { + let mut vars = self.type_vars.borrow_mut(); + vars.0 + .get_by_right(&tv) + .map(Ident::to_owned) + .unwrap_or_else(|| { + let name = vars.1.make_name(); + vars.0.insert_no_overwrite(name.clone(), tv).unwrap(); + name + }) + } + + fn commit_instantiations(&mut self) { + let mut ctx = mem::take(&mut self.ctx); + for (_, v) in ctx.iter_mut() { + if let Type::Univ(tv) = v { + if let Some(concrete) = self.instantiations.resolve(&self.name_univ(*tv)) { + *v = concrete.clone(); + } + } + } + self.ctx = ctx; + self.instantiations.pop(); + } + + fn types_match(&self, type_: &Type, ast_type: &ast::Type<'ast>) -> bool { + match (type_, ast_type) { + (Type::Univ(u), ast::Type::Var(v)) => { + Some(u) == self.type_vars.borrow().0.get_by_left(v) + } + (Type::Univ(_), _) => false, + (Type::Exist(_), _) => false, + (Type::Nullary(_), _) => todo!(), + (Type::Prim(pr), ty) => ast::Type::from(*pr) == *ty, + (Type::Fun { args, ret }, ast::Type::Function(ft)) => { + args.len() == ft.args.len() + && args + .iter() + .zip(&ft.args) + .all(|(a1, a2)| self.types_match(a1, &a2)) + && self.types_match(&*ret, &*ft.ret) + } + (Type::Fun { .. }, _) => false, + } + } +} + +pub fn typecheck_expr(expr: ast::Expr) -> Result<hir::Expr<ast::Type>> { + let mut typechecker = Typechecker::new(); + let typechecked = typechecker.tc_expr(expr)?; + typechecker.finalize_expr(typechecked) +} + +pub fn typecheck_toplevel(decls: Vec<ast::Decl>) -> Result<Vec<hir::Decl<ast::Type>>> { + let mut typechecker = Typechecker::new(); + let mut res = Vec::with_capacity(decls.len()); + for decl in decls { + if let Some(hir_decl) = typechecker.tc_decl(decl)? { + let hir_decl = typechecker.finalize_decl(hir_decl)?; + res.push(hir_decl); + } + typechecker.ctx.clear(); + } + Ok(res) +} + +#[cfg(test)] +mod tests { + use super::*; + + macro_rules! assert_type { + ($expr: expr, $type: expr) => { + use crate::parser::{expr, type_}; + let parsed_expr = test_parse!(expr, $expr); + let parsed_type = test_parse!(type_, $type); + let res = typecheck_expr(parsed_expr).unwrap_or_else(|e| panic!("{}", e)); + assert!( + res.type_().alpha_equiv(&parsed_type), + "{} inferred type {}, but expected {}", + $expr, + res.type_(), + $type + ); + }; + } + + macro_rules! assert_type_error { + ($expr: expr) => { + use crate::parser::expr; + let parsed_expr = test_parse!(expr, $expr); + let res = typecheck_expr(parsed_expr); + assert!( + res.is_err(), + "Expected type error, but got type: {}", + res.unwrap().type_() + ); + }; + } + + #[test] + fn literal_int() { + assert_type!("1", "int"); + } + + #[test] + fn conditional() { + assert_type!("if 1 == 2 then 3 else 4", "int"); + } + + #[test] + #[ignore] + fn add_bools() { + assert_type_error!("true + false"); + } + + #[test] + fn call_generic_function() { + assert_type!("(fn x = x) 1", "int"); + } + + #[test] + fn generic_function() { + assert_type!("fn x = x", "fn x -> x"); + } + + #[test] + #[ignore] + fn let_generalization() { + assert_type!("let id = fn x = x in if id true then id 1 else 2", "int"); + } + + #[test] + fn concrete_function() { + assert_type!("fn x = x + 1", "fn int -> int"); + } + + #[test] + fn arg_ascriptions() { + assert_type!("fn (x: int) = x", "fn int -> int"); + } + + #[test] + fn call_concrete_function() { + assert_type!("(fn x = x + 1) 2", "int"); + } + + #[test] + fn conditional_non_bool() { + assert_type_error!("if 3 then true else false"); + } + + #[test] + fn let_int() { + assert_type!("let x = 1 in x", "int"); + } +} diff --git a/users/glittershark/achilles/tests/compile.rs b/users/glittershark/achilles/tests/compile.rs new file mode 100644 index 000000000000..7fa15ad9653e --- /dev/null +++ b/users/glittershark/achilles/tests/compile.rs @@ -0,0 +1,54 @@ +use std::process::Command; + +use crate_root::root; + +const FIXTURES: &[(&str, i32)] = &[("simple", 5), ("functions", 9)]; + +#[test] +fn compile_and_run_files() { + let ach = root().unwrap().join("ach"); + + println!("Running: `make clean`"); + assert!( + Command::new("make") + .arg("clean") + .current_dir(&ach) + .spawn() + .unwrap() + .wait() + .unwrap() + .success(), + "make clean failed" + ); + + for (fixture, exit_code) in FIXTURES { + println!(">>> Testing: {}", fixture); + + println!(" Running: `make {}`", fixture); + assert!( + Command::new("make") + .arg(fixture) + .current_dir(&ach) + .spawn() + .unwrap() + .wait() + .unwrap() + .success(), + "make failed" + ); + + let out_path = ach.join(fixture); + println!(" Running: `{}`", out_path.to_str().unwrap()); + assert_eq!( + Command::new(out_path) + .spawn() + .unwrap() + .wait() + .unwrap() + .code() + .unwrap(), + *exit_code, + ); + println!(" OK"); + } +} diff --git a/users/glittershark/emacs.d/+bindings.el b/users/glittershark/emacs.d/+bindings.el new file mode 100644 index 000000000000..81ffde367915 --- /dev/null +++ b/users/glittershark/emacs.d/+bindings.el @@ -0,0 +1,1420 @@ +;; /+bindings.el -*- lexical-binding: t; -*- + +(load! "utils") +(require 'f) +(require 'predd) + +(undefine-key! :keymaps 'doom-leader-map "/") + +(defmacro find-file-in! (path &optional project-p) + "Returns an interactive function for searching files." + `(lambda () (interactive) + (let ((default-directory ,path)) + (call-interactively + ',(command-remapping + (if project-p + #'projectile-find-file + #'find-file)))))) + +(defun dired-mode-p () (eq 'dired-mode major-mode)) + +(defun grfn/dired-minus () + (interactive) + (if (dired-mode-p) + (dired-up-directory) + (when buffer-file-name + (-> (buffer-file-name) + (f-dirname) + (dired))))) + +(defmacro define-move-and-insert + (name &rest body) + `(defun ,name (count &optional vcount skip-empty-lines) + ;; Following interactive form taken from the source for `evil-insert' + (interactive + (list (prefix-numeric-value current-prefix-arg) + (and (evil-visual-state-p) + (memq (evil-visual-type) '(line block)) + (save-excursion + (let ((m (mark))) + ;; go to upper-left corner temporarily so + ;; `count-lines' yields accurate results + (evil-visual-rotate 'upper-left) + (prog1 (count-lines evil-visual-beginning evil-visual-end) + (set-mark m))))) + (evil-visual-state-p))) + (atomic-change-group + ,@body + (evil-insert count vcount skip-empty-lines)))) + +(define-move-and-insert grfn/insert-at-sexp-end + (when (not (equal (get-char) "(")) + (backward-up-list)) + (forward-sexp) + (backward-char)) + +(define-move-and-insert grfn/insert-at-sexp-start + (backward-up-list) + (forward-char)) + +(define-move-and-insert grfn/insert-at-form-start + (backward-sexp) + (backward-char) + (insert " ")) + +(define-move-and-insert grfn/insert-at-form-end + (forward-sexp) + (insert " ")) + +(load! "splitjoin") + +(defun +hlissner/install-snippets () + "Install my snippets from https://github.com/hlissner/emacs-snippets into +private/hlissner/snippets." + (interactive) + (doom-fetch :github "hlissner/emacs-snippets" + (expand-file-name "snippets" (doom-module-path :private 'hlissner)))) + +(defun +hlissner/yank-buffer-filename () + "Copy the current buffer's path to the kill ring." + (interactive) + (if-let* ((filename (or buffer-file-name (bound-and-true-p list-buffers-directory)))) + (message (kill-new (abbreviate-file-name filename))) + (error "Couldn't find filename in current buffer"))) + +(defmacro +def-finder! (name dir) + "Define a pair of find-file and browse functions." + `(progn + (defun ,(intern (format "+find-in-%s" name)) () + (interactive) + (let ((default-directory ,dir) + projectile-project-name + projectile-require-project-root + projectile-cached-buffer-file-name + projectile-cached-project-root) + (call-interactively #'projectile-find-file))) + (defun ,(intern (format "+hlissner/browse-%s" name)) () + (interactive) + (let ((default-directory ,dir)) + (call-interactively (command-remapping #'find-file)))))) + +(+def-finder! templates +file-templates-dir) +(+def-finder! snippets +grfn-snippets-dir) +(+def-finder! dotfiles (expand-file-name ".dotfiles" "~")) +(+def-finder! doomd (expand-file-name ".doom.d" "~")) +(+def-finder! notes +org-dir) +(+def-finder! home-config (expand-file-name "code/system/home" "~")) +(+def-finder! system-config (expand-file-name "code/system/system" "~")) + +(defun +grfn/paxedit-kill (&optional n) + (interactive "p") + (or (paxedit-comment-kill) + (when (paxedit-symbol-cursor-within?) + (paxedit-symbol-kill)) + (paxedit-implicit-sexp-kill n) + (paxedit-sexp-kill n) + (message paxedit-message-kill))) +;;; + +(map! + [remap evil-jump-to-tag] #'projectile-find-tag + [remap find-tag] #'projectile-find-tag + ;; ensure there are no conflicts + :nmvo doom-leader-key nil + :nmvo doom-localleader-key nil) + +(undefine-key! :keymaps 'doom-leader-map "/") + +(map! + ;; --- Global keybindings --------------------------- + ;; Make M-x available everywhere + :gnvime "M-x" #'execute-extended-command + :gnvime "A-x" #'execute-extended-command + ;; Emacs debug utilities + :gnvime "M-;" #'eval-expression + :gnvime "M-:" #'doom/open-scratch-buffer + ;; Text-scaling + "M-+" (λ! (text-scale-set 0)) + "M-=" #'text-scale-increase + "M--" #'text-scale-decrease + ;; Simple window navigation/manipulation + "C-`" #'doom/popup-toggle + "C-~" #'doom/popup-raise + "M-t" #'+workspace/new + "M-T" #'+workspace/display + "M-w" #'delete-window + "M-W" #'+workspace/close-workspace-or-frame + "M-n" #'evil-buffer-new + "M-N" #'make-frame + "M-1" (λ! (+workspace/switch-to 0)) + "M-2" (λ! (+workspace/switch-to 1)) + "M-3" (λ! (+workspace/switch-to 2)) + "M-4" (λ! (+workspace/switch-to 3)) + "M-5" (λ! (+workspace/switch-to 4)) + "M-6" (λ! (+workspace/switch-to 5)) + "M-7" (λ! (+workspace/switch-to 6)) + "M-8" (λ! (+workspace/switch-to 7)) + "M-9" (λ! (+workspace/switch-to 8)) + "M-0" #'+workspace/switch-to-last + ;; Other sensible, textmate-esque global bindings + :ne "M-r" #'+eval/buffer + :ne "M-R" #'+eval/region-and-replace + :ne "M-b" #'+eval/build + :ne "M-a" #'mark-whole-buffer + :ne "M-c" #'evil-yank + :ne "M-q" (if (daemonp) #'delete-frame #'save-buffers-kill-emacs) + :ne "M-f" #'swiper + :ne "C-M-f" #'doom/toggle-fullscreen + :n "M-s" #'save-buffer + :m "A-j" #'+hlissner:multi-next-line + :m "A-k" #'+hlissner:multi-previous-line + :nv "C-SPC" #'+evil:fold-toggle + :gnvimer "M-v" #'clipboard-yank + ;; Easier window navigation + :en "C-h" #'evil-window-left + :en "C-j" #'evil-window-down + :en "C-k" #'evil-window-up + :en "C-l" #'evil-window-right + :n "U" #'undo-tree-visualize + + "C-x p" #'doom/other-popup + + :n "K" #'+lookup/documentation + :n "g d" #'+lookup/definition + + + ;; --- <leader> ------------------------------------- + (:leader + :desc "Ex command" :nv ";" #'evil-ex + :desc "M-x" :nv ":" #'execute-extended-command + :desc "Pop up scratch buffer" :nv "x" #'doom/open-scratch-buffer + :desc "Org Capture" :nv "X" #'org-capture + :desc "Org Capture" :nv "a" #'org-capture + + ;; Most commonly used + :desc "Find file in project" :n "SPC" #'projectile-find-file + :desc "Switch workspace buffer" :n "," #'persp-switch-to-buffer + :desc "Switch buffer" :n "<" #'switch-to-buffer + :desc "Browse files" :n "." #'find-file + :desc "Toggle last popup" :n "~" #'doom/popup-toggle + :desc "Eval expression" :n "`" #'eval-expression + :desc "Blink cursor line" :n "DEL" #'+doom/blink-cursor + :desc "Jump to bookmark" :n "RET" #'bookmark-jump + + ;; C-u is used by evil + :desc "Universal argument" :n "u" #'universal-argument + :desc "window" :n "w" evil-window-map + + (:desc "previous..." :prefix "[" + :desc "Text size" :nv "[" #'text-scale-decrease + :desc "Buffer" :nv "b" #'doom/previous-buffer + :desc "Diff Hunk" :nv "d" #'git-gutter:previous-hunk + :desc "Todo" :nv "t" #'hl-todo-previous + :desc "Error" :nv "e" #'previous-error + :desc "Workspace" :nv "w" #'+workspace/switch-left + :desc "Smart jump" :nv "h" #'smart-backward + :desc "Spelling error" :nv "s" #'evil-prev-flyspell-error + :desc "Spelling correction" :n "S" #'flyspell-correct-previous-word-generic + :desc "Git conflict" :n "n" #'smerge-prev) + + (:desc "next..." :prefix "]" + :desc "Text size" :nv "]" #'text-scale-increase + :desc "Buffer" :nv "b" #'doom/next-buffer + :desc "Diff Hunk" :nv "d" #'git-gutter:next-hunk + :desc "Todo" :nv "t" #'hl-todo-next + :desc "Error" :nv "e" #'next-error + :desc "Workspace" :nv "w" #'+workspace/switch-right + :desc "Smart jump" :nv "l" #'smart-forward + :desc "Spelling error" :nv "s" #'evil-next-flyspell-error + :desc "Spelling correction" :n "S" #'flyspell-correct-word-generic + :desc "Git conflict" :n "n" #'smerge-next) + + (:desc "search" :prefix "/" + :desc "Swiper" :nv "/" #'swiper + :desc "Imenu" :nv "i" #'imenu + :desc "Imenu across buffers" :nv "I" #'imenu-anywhere + :desc "Online providers" :nv "o" #'+lookup/online-select) + + (:desc "workspace" :prefix "TAB" + :desc "Display tab bar" :n "TAB" #'+workspace/display + :desc "New workspace" :n "n" #'+workspace/new + :desc "Load workspace from file" :n "l" #'+workspace/load + :desc "Load last session" :n "L" (λ! (+workspace/load-session)) + :desc "Save workspace to file" :n "s" #'+workspace/save + :desc "Autosave current session" :n "S" #'+workspace/save-session + :desc "Switch workspace" :n "." #'+workspace/switch-to + :desc "Kill all buffers" :n "x" #'doom/kill-all-buffers + :desc "Delete session" :n "X" #'+workspace/kill-session + :desc "Delete this workspace" :n "d" #'+workspace/delete + :desc "Load session" :n "L" #'+workspace/load-session + :desc "Next workspace" :n "]" #'+workspace/switch-right + :desc "Previous workspace" :n "[" #'+workspace/switch-left + :desc "Switch to 1st workspace" :n "1" (λ! (+workspace/switch-to 0)) + :desc "Switch to 2nd workspace" :n "2" (λ! (+workspace/switch-to 1)) + :desc "Switch to 3rd workspace" :n "3" (λ! (+workspace/switch-to 2)) + :desc "Switch to 4th workspace" :n "4" (λ! (+workspace/switch-to 3)) + :desc "Switch to 5th workspace" :n "5" (λ! (+workspace/switch-to 4)) + :desc "Switch to 6th workspace" :n "6" (λ! (+workspace/switch-to 5)) + :desc "Switch to 7th workspace" :n "7" (λ! (+workspace/switch-to 6)) + :desc "Switch to 8th workspace" :n "8" (λ! (+workspace/switch-to 7)) + :desc "Switch to 9th workspace" :n "9" (λ! (+workspace/switch-to 8)) + :desc "Switch to last workspace" :n "0" #'+workspace/switch-to-last) + + (:desc "buffer" :prefix "b" + :desc "New empty buffer" :n "n" #'evil-buffer-new + :desc "Switch workspace buffer" :n "b" #'persp-switch-to-buffer + :desc "Switch buffer" :n "B" #'switch-to-buffer + :desc "Kill buffer" :n "k" #'doom/kill-this-buffer + :desc "Kill other buffers" :n "o" #'doom/kill-other-buffers + :desc "Save buffer" :n "s" #'save-buffer + :desc "Pop scratch buffer" :n "x" #'doom/open-scratch-buffer + :desc "Bury buffer" :n "z" #'bury-buffer + :desc "Next buffer" :n "]" #'doom/next-buffer + :desc "Previous buffer" :n "[" #'doom/previous-buffer + :desc "Sudo edit this file" :n "S" #'doom/sudo-this-file) + + (:desc "code" :prefix "c" + :desc "List errors" :n "x" #'flycheck-list-errors + :desc "Evaluate buffer/region" :n "e" #'+eval/buffer + :v "e" #'+eval/region + :desc "Evaluate & replace region" :nv "E" #'+eval:replace-region + :desc "Build tasks" :nv "b" #'+eval/build + :desc "Jump to definition" :n "d" #'+lookup/definition + :desc "Jump to references" :n "D" #'+lookup/references + :desc "Open REPL" :n "r" #'+eval/open-repl + :v "r" #'+eval:repl) + + (:desc "file" :prefix "f" + :desc "Find file" :n "." #'find-file + :desc "Sudo find file" :n ">" #'doom/sudo-find-file + :desc "Find file in project" :n "/" #'projectile-find-file + :desc "Find file from here" :n "?" #'counsel-file-jump + :desc "Find other file" :n "a" #'projectile-find-other-file + :desc "Open project editorconfig" :n "c" #'editorconfig-find-current-editorconfig + :desc "Find file in dotfiles" :n "d" #'+find-in-dotfiles + :desc "Find file in system config" :n "s" #'+find-in-system-config + :desc "Find file in home config" :n "h" #'+find-in-home-config + :desc "Browse dotfiles" :n "D" #'+hlissner/browse-dotfiles + :desc "Find file in emacs.d" :n "e" #'+find-in-doomd + :desc "Browse emacs.d" :n "E" #'+hlissner/browse-doomd + :desc "Recent files" :n "r" #'recentf-open-files + :desc "Recent project files" :n "R" #'projectile-recentf + :desc "Yank filename" :n "y" #'+hlissner/yank-buffer-filename) + + (:desc "git" :prefix "g" + :desc "Git status" :n "S" #'magit-status + :desc "Git blame" :n "b" #'magit-blame + :desc "Git time machine" :n "t" #'git-timemachine-toggle + :desc "Git stage hunk" :n "s" #'git-gutter:stage-hunk + :desc "Git revert hunk" :n "r" #'git-gutter:revert-hunk + :desc "Git revert buffer" :n "R" #'vc-revert + ;; :desc "List gists" :n "g" #'+gist:list + :desc "Git grep" :n "g" #'counsel-projectile-rg + :desc "Checkout Branch" :n "c" #'counsel-git-checkout + :desc "Next hunk" :nv "]" #'git-gutter:next-hunk + :desc "Previous hunk" :nv "[" #'git-gutter:previous-hunk + + (:desc "smerge" :prefix "m" + :desc "Keep Current" :n "SPC" #'smerge-keep-current + :desc "Keep All" :n "a" #'smerge-keep-all + :desc "Keep Upper" :n "u" #'smerge-keep-upper + :desc "Keep Lower" :n "l" #'smerge-keep-lower)) + + (:desc "help" :prefix "h" + :n "h" help-map + :desc "Apropos" :n "a" #'apropos + :desc "Reload theme" :n "R" #'doom//reload-theme + :desc "Find library" :n "l" #'find-library + :desc "Toggle Emacs log" :n "m" #'doom/popup-toggle-messages + :desc "Command log" :n "L" #'global-command-log-mode + :desc "Describe function" :n "f" #'describe-function + :desc "Describe key" :n "k" #'describe-key + :desc "Describe char" :n "c" #'describe-char + :desc "Describe mode" :n "M" #'describe-mode + :desc "Describe variable" :n "v" #'describe-variable + :desc "Describe face" :n "F" #'describe-face + :desc "Describe DOOM setting" :n "s" #'doom/describe-setting + :desc "Describe DOOM module" :n "d" #'doom/describe-module + :desc "Find definition" :n "." #'+lookup/definition + :desc "Find references" :n "/" #'+lookup/references + :desc "Find documentation" :n "h" #'+lookup/documentation + :desc "What face" :n "'" #'doom/what-face + :desc "What minor modes" :n ";" #'doom/what-minor-mode + :desc "Info" :n "i" #'info + :desc "Toggle profiler" :n "p" #'doom/toggle-profiler) + + (:desc "insert" :prefix "i" + :desc "From kill-ring" :nv "y" #'counsel-yank-pop + :desc "From snippet" :nv "s" #'yas-insert-snippet) + + (:desc "notes" :prefix "n" + :desc "Agenda" :n "a" #'org-agenda + :desc "Find file in notes" :n "n" #'+find-in-notes + :desc "Store link" :n "l" #'org-store-link + :desc "Browse notes" :n "N" #'+hlissner/browse-notes + :desc "Org capture" :n "x" #'+org-capture/open + :desc "Create clubhouse story" :n "c" #'org-clubhouse-create-story + :desc "Archive subtree" :n "k" #'org-archive-subtree + :desc "Goto clocked-in note" :n "g" #'org-clock-goto + :desc "Clock Out" :n "o" #'org-clock-out) + + + (:desc "open" :prefix "o" + :desc "Default browser" :n "b" #'browse-url-of-file + :desc "Debugger" :n "d" #'+debug/open + :desc "Terminal in project" :n "T" #'+term/open-popup-in-project + + :desc "Slack IM" :n "i" #'slack-im-select + :desc "Slack Channel" :n "c" #'slack-channel-select + :desc "Slack Group" :n "g" #'slack-group-select + :desc "Slack Unreads" :n "u" #'slack-select-unread-rooms + :desc "Slack Threads" :n "r" #'slack-all-threads + + :desc "Email" :n "m" #'notmuch-jump-search + + (:desc "ERC" :prefix "e" + :desc "Channel" :n "c" #'erc-switch-to-buffer) + + ;; applications + :desc "APP: elfeed" :n "E" #'=rss + :desc "APP: twitter" :n "T" #'=twitter + + (:desc "spotify" :prefix "s" + :desc "Search track" :n "t" #'counsel-spotify-search-track + :desc "Search album" :n "a" #'counsel-spotify-search-album + :desc "Search artist" :n "A" #'counsel-spotify-search-artist) + + ;; macos + (:when IS-MAC + :desc "Reveal in Finder" :n "o" #'+macos/reveal-in-finder + :desc "Reveal project in Finder" :n "O" #'+macos/reveal-project-in-finder + :desc "Send to Transmit" :n "u" #'+macos/send-to-transmit + :desc "Send project to Transmit" :n "U" #'+macos/send-project-to-transmit + :desc "Send to Launchbar" :n "l" #'+macos/send-to-launchbar + :desc "Send project to Launchbar" :n "L" #'+macos/send-project-to-launchbar)) + + (:desc "Email" :prefix "M" + :desc "Compose" :n "m" #'+notmuch/compose) + + (:desc "project" :prefix "p" + :desc "Browse project" :n "." (find-file-in! (doom-project-root)) + :desc "Find file in project" :n "/" #'projectile-find-file + :desc "Run cmd in project root" :nv "!" #'projectile-run-shell-command-in-root + :desc "Switch project" :n "p" #'projectile-switch-project + :desc "Recent project files" :n "r" #'projectile-recentf + :desc "List project tasks" :n "t" #'+ivy/tasks + :desc "Pop term in project" :n "o" #'+term/open-popup-in-project + :desc "Invalidate cache" :n "x" #'projectile-invalidate-cache) + + (:desc "quit" :prefix "q" + :desc "Quit" :n "q" #'evil-save-and-quit + :desc "Quit (forget session)" :n "Q" #'+workspace/kill-session-and-quit) + + (:desc "remote" :prefix "r" + :desc "Upload local" :n "u" #'+upload/local + :desc "Upload local (force)" :n "U" (λ! (+upload/local t)) + :desc "Download remote" :n "d" #'+upload/remote-download + :desc "Diff local & remote" :n "D" #'+upload/diff + :desc "Browse remote files" :n "." #'+upload/browse + :desc "Detect remote changes" :n ">" #'+upload/check-remote) + + (:desc "snippets" :prefix "s" + :desc "New snippet" :n "n" #'yas-new-snippet + :desc "Insert snippet" :nv "i" #'yas-insert-snippet + :desc "Find snippet for mode" :n "s" #'yas-visit-snippet-file + :desc "Find snippet" :n "S" #'+find-in-snippets) + + (:desc "toggle" :prefix "t" + :desc "Flyspell" :n "s" #'flyspell-mode + :desc "Flycheck" :n "f" #'flycheck-mode + :desc "Line numbers" :n "l" #'doom/toggle-line-numbers + :desc "Fullscreen" :n "f" #'doom/toggle-fullscreen + :desc "Indent guides" :n "i" #'highlight-indentation-mode + :desc "Indent guides (column)" :n "I" #'highlight-indentation-current-column-mode + :desc "Impatient mode" :n "h" #'+impatient-mode/toggle + :desc "Big mode" :n "b" #'doom-big-font-mode + :desc "Evil goggles" :n "g" #'+evil-goggles/toggle)) + + + ;; --- vim-vinegar + :n "-" #'grfn/dired-minus + (:after dired-mode + (:map dired-mode-map + "-" #'grfn/dired-minus)) + + (:map smartparens-mode-map + :n "g o" #'sp-raise-sexp) + + ;; --- vim-sexp-mappings-for-regular-people + (:after paxedit + (:map paxedit-mode-map + :i ";" #'paxedit-insert-semicolon + :i "(" #'paxedit-open-round + :i "[" #'paxedit-open-bracket + :i "{" #'paxedit-open-curly + :n [remap evil-yank-line] #'paxedit-copy + :n [remap evil-delete-line] #'+grfn/paxedit-kill + :n "g o" #'paxedit-sexp-raise + :n [remap evil-join-whitespace] #'paxedit-compress + :n "g S" #'paxedit-format-1 + :n "g k" #'paxedit-backward-up + :n "g j" #'paxedit-backward-end)) + + ;; --- vim-splitjoin + :n [remap evil-join-whitespace] #'+splitjoin/join + :n "gS" #'+splitjoin/split + + ;; --- Personal vim-esque bindings ------------------ + :n "zx" #'doom/kill-this-buffer + :n "ZX" #'bury-buffer + :n "]b" #'doom/next-buffer + :n "[b" #'doom/previous-buffer + :n "]w" #'+workspace/switch-right + :n "[w" #'+workspace/switch-left + :m "gt" #'+workspace/switch-right + :m "gT" #'+workspace/switch-left + :m "gd" #'+lookup/definition + :m "gD" #'+lookup/references + :m "K" #'+lookup/documentation + :n "gp" #'+evil/reselect-paste + :n "gr" #'+eval:region + :n "gR" #'+eval/buffer + :v "gR" #'+eval:replace-region + :v "@" #'+evil:macro-on-all-lines + :n "g@" #'+evil:macro-on-all-lines + ;; repeat in visual mode (FIXME buggy) + :v "." #'evil-repeat + ;; don't leave visual mode after shifting + ;; :v "<" #'+evil/visual-dedent ; vnoremap < <gv + ;; :v ">" #'+evil/visual-indent ; vnoremap > >gv + ;; paste from recent yank register (which isn't overwritten) + :v "C-p" "\"0p" + + (:map evil-window-map ; prefix "C-w" + ;; Navigation + "C-h" #'evil-window-left + "C-j" #'evil-window-down + "C-k" #'evil-window-up + "C-l" #'evil-window-right + "C-w" #'ace-window + ;; Swapping windows + "H" #'+evil/window-move-left + "J" #'+evil/window-move-down + "K" #'+evil/window-move-up + "L" #'+evil/window-move-right + "C-S-w" #'ace-swap-window + ;; Window undo/redo + "u" #'winner-undo + "C-u" #'winner-undo + "C-r" #'winner-redo + "o" #'doom/window-enlargen + ;; Delete window + "c" #'+workspace/close-window-or-workspace + "C-C" #'ace-delete-window + ;; Popups + "p" #'doom/popup-toggle + "m" #'doom/popup-toggle-messages + "P" #'doom/popup-close-all) + + + ;; --- Plugin bindings ------------------------------ + ;; auto-yasnippet + :i [C-tab] #'aya-expand + :nv [C-tab] #'aya-create + + ;; company-mode (vim-like omnicompletion) + :i "C-SPC" #'+company/complete + (:prefix "C-x" + :i "C-l" #'+company/whole-lines + :i "C-k" #'+company/dict-or-keywords + :i "C-f" #'company-files + :i "C-]" #'company-etags + :i "s" #'company-ispell + :i "C-s" #'company-yasnippet + :i "C-o" #'company-capf + :i "C-n" #'company-dabbrev-code + :i "C-p" #'+company/dabbrev-code-previous) + (:after company + (:map company-active-map + ;; Don't interfere with `evil-delete-backward-word' in insert mode + "C-w" nil + "C-o" #'company-search-kill-others + "C-n" #'company-select-next + "C-p" #'company-select-previous + "C-h" #'company-quickhelp-manual-begin + "C-S-h" #'company-show-doc-buffer + "C-S-s" #'company-search-candidates + "C-s" #'company-filter-candidates + "C-SPC" #'company-complete-common + "C-h" #'company-quickhelp-manual-begin + [tab] #'company-complete-common-or-cycle + [backtab] #'company-select-previous + [escape] (λ! (company-abort) (evil-normal-state 1))) + ;; Automatically applies to `company-filter-map' + (:map company-search-map + "C-n" #'company-search-repeat-forward + "C-p" #'company-search-repeat-backward + "C-s" (λ! (company-search-abort) (company-filter-candidates)) + [escape] #'company-search-abort)) + + ;; counsel +; (:after counsel +; (:map counsel-ag-map +; [backtab] #'+ivy/wgrep-occur ; search/replace on results +; "C-SPC" #'ivy-call-and-recenter ; preview)) + + ;; evil-commentary + ;; :n "gc" #'evil-commentary + + ;; evil-exchange + :n "gx" #'evil-exchange + + ;; evil-magit + (:after evil-magit + :map (magit-status-mode-map magit-revision-mode-map) + :n "C-j" nil + :n "C-k" nil) + + ;; Smerge + :n "]n" #'smerge-next + :n "[n" #'smerge-prev + + ;; evil-mc + (:prefix "gz" + :nv "m" #'evil-mc-make-all-cursors + :nv "u" #'evil-mc-undo-all-cursors + :nv "z" #'+evil/mc-make-cursor-here + :nv "t" #'+evil/mc-toggle-cursors + :nv "n" #'evil-mc-make-and-goto-next-cursor + :nv "p" #'evil-mc-make-and-goto-prev-cursor + :nv "N" #'evil-mc-make-and-goto-last-cursor + :nv "P" #'evil-mc-make-and-goto-first-cursor + :nv "d" #'evil-mc-make-and-goto-next-match + :nv "D" #'evil-mc-make-and-goto-prev-match) + (:after evil-mc + :map evil-mc-key-map + :nv "C-n" #'evil-mc-make-and-goto-next-cursor + :nv "C-N" #'evil-mc-make-and-goto-last-cursor + :nv "C-p" #'evil-mc-make-and-goto-prev-cursor + :nv "C-P" #'evil-mc-make-and-goto-first-cursor) + + ;; evil-multiedit + :v "R" #'evil-multiedit-match-all + :n "M-d" #'evil-multiedit-match-symbol-and-next + :n "M-D" #'evil-multiedit-match-symbol-and-prev + :v "M-d" #'evil-multiedit-match-and-next + :v "M-D" #'evil-multiedit-match-and-prev + :nv "C-M-d" #'evil-multiedit-restore + (:after evil-multiedit + (:map evil-multiedit-state-map + "M-d" #'evil-multiedit-match-and-next + "M-D" #'evil-multiedit-match-and-prev + "RET" #'evil-multiedit-toggle-or-restrict-region) + (:map (evil-multiedit-state-map evil-multiedit-insert-state-map) + "C-n" #'evil-multiedit-next + "C-p" #'evil-multiedit-prev)) + + ;; evil-snipe + (:after evil-snipe + ;; Binding to switch to evil-easymotion/avy after a snipe + :map evil-snipe-parent-transient-map + "C-;" (λ! (require 'evil-easymotion) + (call-interactively + (evilem-create #'evil-snipe-repeat + :bind ((evil-snipe-scope 'whole-buffer) + (evil-snipe-enable-highlight) + (evil-snipe-enable-incremental-highlight)))))) + + ;; evil-surround + :v "S" #'evil-surround-region + :o "s" #'evil-surround-edit + :o "S" #'evil-Surround-edit + + ;; expand-region + :v "v" #'er/expand-region + :v "V" #'er/contract-region + + ;; flycheck + :m "]e" #'next-error + :m "[e" #'previous-error + (:after flycheck + :map flycheck-error-list-mode-map + :n "C-n" #'flycheck-error-list-next-error + :n "C-p" #'flycheck-error-list-previous-error + :n "j" #'flycheck-error-list-next-error + :n "k" #'flycheck-error-list-previous-error + :n "RET" #'flycheck-error-list-goto-error) + + ;; flyspell + :m "]S" #'flyspell-correct-word-generic + :m "[S" #'flyspell-correct-previous-word-generic + + ;; git-gutter + :m "]d" #'git-gutter:next-hunk + :m "[d" #'git-gutter:previous-hunk + + ;; git-timemachine + (:after git-timemachine + (:map git-timemachine-mode-map + :n "C-p" #'git-timemachine-show-previous-revision + :n "C-n" #'git-timemachine-show-next-revision + :n "[[" #'git-timemachine-show-previous-revision + :n "]]" #'git-timemachine-show-next-revision + :n "q" #'git-timemachine-quit + :n "gb" #'git-timemachine-blame)) + + ;; gist + (:after gist + :map gist-list-menu-mode-map + :n "RET" #'+gist/open-current + :n "b" #'gist-browse-current-url + :n "c" #'gist-add-buffer + :n "d" #'gist-kill-current + :n "f" #'gist-fork + :n "q" #'quit-window + :n "r" #'gist-list-reload + :n "s" #'gist-star + :n "S" #'gist-unstar + :n "y" #'gist-print-current-url) + + ;; helm + (:after helm + (:map helm-map + "ESC" nil + "C-S-n" #'helm-next-source + "C-S-p" #'helm-previous-source + "C-u" #'helm-delete-minibuffer-contents + "C-w" #'backward-kill-word + "C-r" #'evil-paste-from-register ; Evil registers in helm! Glorious! + "C-b" #'backward-word + [left] #'backward-char + [right] #'forward-char + [escape] #'helm-keyboard-quit + [tab] #'helm-execute-persistent-action) + + (:after helm-files + (:map helm-generic-files-map + :e "ESC" #'helm-keyboard-quit) + (:map helm-find-files-map + "C-w" #'helm-find-files-up-one-level + "TAB" #'helm-execute-persistent-action)) + + (:after helm-ag + (:map helm-ag-map + "<backtab>" #'helm-ag-edit))) + + ;; hl-todo + :m "]t" #'hl-todo-next + :m "[t" #'hl-todo-previous + + ;; ivy + (:after ivy + :map ivy-minibuffer-map + [escape] #'keyboard-escape-quit + "C-SPC" #'ivy-call-and-recenter + "TAB" #'ivy-partial + "M-v" #'yank + "M-z" #'undo + "C-r" #'evil-paste-from-register + "C-k" #'ivy-previous-line + "C-j" #'ivy-next-line + "C-l" #'ivy-alt-done + "C-w" #'ivy-backward-kill-word + "C-u" #'ivy-kill-line + "C-b" #'backward-word + "C-f" #'forward-word) + + ;; neotree + (:after neotree + :map neotree-mode-map + :n "g" nil + :n [tab] #'neotree-quick-look + :n "RET" #'neotree-enter + :n [backspace] #'evil-window-prev + :n "c" #'neotree-create-node + :n "r" #'neotree-rename-node + :n "d" #'neotree-delete-node + :n "j" #'neotree-next-line + :n "k" #'neotree-previous-line + :n "n" #'neotree-next-line + :n "p" #'neotree-previous-line + :n "h" #'+neotree/collapse-or-up + :n "l" #'+neotree/expand-or-open + :n "J" #'neotree-select-next-sibling-node + :n "K" #'neotree-select-previous-sibling-node + :n "H" #'neotree-select-up-node + :n "L" #'neotree-select-down-node + :n "G" #'evil-goto-line + :n "gg" #'evil-goto-first-line + :n "v" #'neotree-enter-vertical-split + :n "s" #'neotree-enter-horizontal-split + :n "q" #'neotree-hide + :n "R" #'neotree-refresh) + + ;; realgud + (:after realgud + :map realgud:shortkey-mode-map + :n "j" #'evil-next-line + :n "k" #'evil-previous-line + :n "h" #'evil-backward-char + :n "l" #'evil-forward-char + :m "n" #'realgud:cmd-next + :m "b" #'realgud:cmd-break + :m "B" #'realgud:cmd-clear + :n "c" #'realgud:cmd-continue) + + ;; rotate-text + :n "gs" #'rotate-text + + ;; smart-forward + :m "g]" #'smart-forward + :m "g[" #'smart-backward + + ;; undo-tree -- undo/redo for visual regions + :v "C-u" #'undo-tree-undo + :v "C-r" #'undo-tree-redo + + ;; yasnippet + (:after yasnippet + (:map yas-keymap + "C-e" #'+snippets/goto-end-of-field + "C-a" #'+snippets/goto-start-of-field + "<M-right>" #'+snippets/goto-end-of-field + "<M-left>" #'+snippets/goto-start-of-field + "<M-backspace>" #'+snippets/delete-to-start-of-field + [escape] #'evil-normal-state + [backspace] #'+snippets/delete-backward-char + [delete] #'+snippets/delete-forward-char-or-field) + (:map yas-minor-mode-map + :i "<tab>" yas-maybe-expand + :v "<tab>" #'+snippets/expand-on-region)) + + + ;; --- Major mode bindings -------------------------- + + ;; Markdown + (:after markdown-mode + (:map markdown-mode-map + ;; fix conflicts with private bindings + "<backspace>" nil + "<M-left>" nil + "<M-right>" nil)) + + ;; Rust + (:after rust + (:map rust-mode-map + "K" #'racer-describe + "g RET" #'cargo-process-test)) + + ;; Elixir + (:after alchemist + (:map elixir-mode-map + :n "K" #'alchemist-help-search-at-point + :n "g RET" #'alchemist-project-run-tests-for-current-file + :n "g \\" #'alchemist-mix-test-at-point + :n "g SPC" #'alchemist-mix-compile)) + + ;; Haskell + (:after haskell-mode + (:map haskell-mode-map + ;; :n "K" #'intero-info + :n "K" #'lsp-describe-thing-at-point + ;; :n "g d" #'lsp-ui-peek-find-definitions + :n "g d" #'lsp-ui-peek-find-definitions + ;; :n "g SPC" #'intero-repl-load + ;; :n "g y" #'lsp-ui- + )) + + ;; Javascript + ;; (:after rjsx-mode + ;; (:map rjsx-mode-map + ;; :n "g d" #'flow-minor-jump-to-definition + ;; :n "K" #'flow-minor-type-at-pos)) + + (:after js2-mode + (:map js2-mode-map + :n "g d" #'flow-minor-jump-to-definition + :n "K" #'flow-minor-type-at-pos)) + + ;; Elisp + (:map emacs-lisp-mode-map + :n "g SPC" #'eval-buffer + :n "g RET" (λ! () (ert t))) + + + ;; --- Custom evil text-objects --------------------- + :textobj "a" #'evil-inner-arg #'evil-outer-arg + :textobj "B" #'evil-textobj-anyblock-inner-block #'evil-textobj-anyblock-a-block + :textobj "i" #'evil-indent-plus-i-indent #'evil-indent-plus-a-indent + :textobj "I" #'evil-indent-plus-i-indent-up #'evil-indent-plus-a-indent-up + :textobj "J" #'evil-indent-plus-i-indent-up-down #'evil-indent-plus-a-indent-up-down + + + ;; --- Built-in plugins ----------------------------- + (:after comint + ;; TAB auto-completion in term buffers + :map comint-mode-map [tab] #'company-complete) + + (:after debug + ;; For elisp debugging + :map debugger-mode-map + :n "RET" #'debug-help-follow + :n "e" #'debugger-eval-expression + :n "n" #'debugger-step-through + :n "c" #'debugger-continue) + + (:map help-mode-map + :n "[[" #'help-go-back + :n "]]" #'help-go-forward + :n "o" #'ace-link-help + :n "q" #'quit-window + :n "Q" #'+ivy-quit-and-resume) + + (:after vc-annotate + :map vc-annotate-mode-map + :n "q" #'kill-this-buffer + :n "d" #'vc-annotate-show-diff-revision-at-line + :n "D" #'vc-annotate-show-changeset-diff-revision-at-line + :n "SPC" #'vc-annotate-show-log-revision-at-line + :n "]]" #'vc-annotate-next-revision + :n "[[" #'vc-annotate-prev-revision + :n "TAB" #'vc-annotate-toggle-annotation-visibility + :n "RET" #'vc-annotate-find-revision-at-line)) + +;; evil-easymotion +(after! evil-easymotion + (let ((prefix (concat doom-leader-key " /"))) + ;; NOTE `evilem-default-keybinds' unsets all other keys on the prefix (in + ;; motion state) + (evilem-default-keybindings prefix) + (evilem-define (kbd (concat prefix " n")) #'evil-ex-search-next) + (evilem-define (kbd (concat prefix " N")) #'evil-ex-search-previous) + (evilem-define (kbd (concat prefix " s")) #'evil-snipe-repeat + :pre-hook (save-excursion (call-interactively #'evil-snipe-s)) + :bind ((evil-snipe-scope 'buffer) + (evil-snipe-enable-highlight) + (evil-snipe-enable-incremental-highlight))) + (evilem-define (kbd (concat prefix " S")) #'evil-snipe-repeat-reverse + :pre-hook (save-excursion (call-interactively #'evil-snipe-s)) + :bind ((evil-snipe-scope 'buffer) + (evil-snipe-enable-highlight) + (evil-snipe-enable-incremental-highlight))))) + + +;; +;; Keybinding fixes +;; + +;; This section is dedicated to "fixing" certain keys so that they behave +;; properly, more like vim, or how I like it. + +(map! (:map input-decode-map + [S-iso-lefttab] [backtab] + (:unless window-system "TAB" [tab])) ; Fix TAB in terminal + + ;; I want C-a and C-e to be a little smarter. C-a will jump to + ;; indentation. Pressing it again will send you to the true bol. Same goes + ;; for C-e, except it will ignore comments and trailing whitespace before + ;; jumping to eol. + :i "C-a" #'doom/backward-to-bol-or-indent + :i "C-e" #'doom/forward-to-last-non-comment-or-eol + :i "C-u" #'doom/backward-kill-to-bol-and-indent + + ;; Emacsien motions for insert mode + :i "C-b" #'backward-word + :i "C-f" #'forward-word + + ;; Highjacks space/backspace to: + ;; a) balance spaces inside brackets/parentheses ( | ) -> (|) + ;; b) delete space-indented blocks intelligently + ;; c) do none of this when inside a string + ;; :i "SPC" #'doom/inflate-space-maybe + ;; :i [remap delete-backward-char] #'doom/deflate-space-maybe + ;; :i [remap newline] #'doom/newline-and-indent + + (:map org-mode-map + :i [remap doom/inflate-space-maybe] #'org-self-insert-command + "C-c C-x C-i" #'org-clock-in + "C-c C-x <C-i>" #'org-clock-in) + + (:map org-agenda-mode-map + "C-c C-x C-i" #'org-agenda-clock-in + "C-c C-x <C-i>" #'org-agenda-clock-in) + + ;; Restore common editing keys (and ESC) in minibuffer + (:map (minibuffer-local-map + minibuffer-local-ns-map + minibuffer-local-completion-map + minibuffer-local-must-match-map + minibuffer-local-isearch-map + evil-ex-completion-map + evil-ex-search-keymap + read-expression-map) + ;; [escape] #'abort-recursive-edit + "C-r" #'evil-paste-from-register + "C-a" #'move-beginning-of-line + "C-w" #'doom/minibuffer-kill-word + "C-u" #'doom/minibuffer-kill-line + "C-b" #'backward-word + "C-f" #'forward-word + "M-z" #'doom/minibuffer-undo) + + (:map messages-buffer-mode-map + "M-;" #'eval-expression + "A-;" #'eval-expression) + + (:map tabulated-list-mode-map + [remap evil-record-macro] #'doom/popup-close-maybe) + + (:after view + (:map view-mode-map "<escape>" #'View-quit-all))) + +(defun +sexp-transpose () + (interactive) + (case evil-this-operator + ('evil-shift-right (paxedit-transpose-forward)) + ('evil-shift-left (paxedit-transpose-backward)))) + +;; (defun nmap (&rest keys-and-ops) +;; (->> +;; (seq-partition keys-and-ops 2) +;; (seq-map +;; (lambda (k-op) +;; (let* ((k (car k-op)) +;; (op (cadr k-op)) +;; (prefix (substring k 0 1)) +;; (prefix-sym (lookup-key evil-normal-state-map prefix)) +;; (keyseq (substring k 1))) +;; (list keyseq prefix-sym op)))) +;; (seq-group-by #'car) +;; (seq-map +;; (lambda (k-ops) +;; (let* ((keyseq (car k-ops)) +;; (ops (cdr k-ops)) +;; (existing-binding (lookup-key evil-operator-state-map keyseq)) +;; (handler (λ! () +;; (if-let +;; ((oplist +;; (seq-find (lambda (op) +;; (equal (nth 1 op) +;; evil-this-operator)) +;; ops))) +;; (message "calling oplist") +;; (->> oplist (nth 2) funcall) +;; (when existing-binding +;; (funcall existing-binding)))))) +;; (if existing-binding +;; (progn +;; (define-key evil-operator-state-map +;; (vector 'remap existing-binding) +;; handler) +;; (define-key evil-motion-state-map +;; (vector 'remap existing-binding) +;; handler)) +;; (define-key evil-operator-state-map keyseq handler))))))) + +;; (nmap +;; ">e" #'paxedit-transpose-forward +;; "<e" #'paxedit-transpose-backward) + +(require 'paxedit) +(require 'general) +(general-evil-setup t) + +(nmap + ">" (general-key-dispatch 'evil-shift-right + "e" 'paxedit-transpose-forward + ")" 'sp-forward-slurp-sexp + "(" 'sp-backward-barf-sexp + "I" 'grfn/insert-at-sexp-end + ;; "a" 'grfn/insert-at-form-end + )) + +(nmap + "<" (general-key-dispatch 'evil-shift-left + "e" 'paxedit-transpose-backward + ")" 'sp-forward-barf-sexp + "(" 'sp-backward-slurp-sexp + "I" 'grfn/insert-at-sexp-start + ;; "a" 'grfn/insert-at-form-start + )) + + +(defmacro saving-excursion (&rest body) + `(λ! () (save-excursion ,@body))) + +(nmap "c" (general-key-dispatch 'evil-change + "r c" (saving-excursion (string-inflection-lower-camelcase)) + "r C" (saving-excursion (string-inflection-camelcase)) + "r m" (saving-excursion (string-inflection-camelcase)) + "r s" (saving-excursion (string-inflection-underscore)) + "r u" (saving-excursion (string-inflection-upcase)) + "r -" (saving-excursion (string-inflection-kebab-case)) + "r k" (saving-excursion (string-inflection-kebab-case)) + ;; "r ." (saving-excursion (string-inflection-dot-case)) + ;; "r ." (saving-excursion (string-inflection-space-case)) + ;; "r ." (saving-excursion (string-inflection-title-case)) + )) + + +(predd-defmulti eval-sexp (lambda (form) major-mode)) + +(predd-defmethod eval-sexp 'clojure-mode (form) + (cider-interactive-eval form)) + +(predd-defmethod eval-sexp 'emacs-lisp-mode (form) + (pp-eval-expression form)) + +(predd-defmulti eval-sexp-region (lambda (_beg _end) major-mode)) + +(predd-defmethod eval-sexp-region 'clojure-mode (beg end) + (cider-interactive-eval nil nil (list beg end))) + +(predd-defmethod eval-sexp-region 'emacs-lisp-mode (beg end) + (pp-eval-expression (read (buffer-substring beg end)))) + +(predd-defmulti eval-sexp-region-context (lambda (_beg _end _context) major-mode)) + +(predd-defmethod eval-sexp-region-context 'clojure-mode (beg end context) + (cider--eval-in-context (buffer-substring beg end))) + +(defun pp-eval-context-region (beg end context) + (interactive "r\nxContext: ") + (let* ((inner-expr (read (buffer-substring beg end))) + (full-expr (list 'let* context inner-expr))) + (pp-eval-expression full-expr))) + +(predd-defmethod eval-sexp-region-context 'emacs-lisp-mode (beg end context) + (pp-eval-context-region beg end context)) + +(predd-defmulti preceding-sexp (lambda () major-mode)) + +(predd-defmethod preceding-sexp 'clojure-mode () + (cider-last-sexp)) + +(predd-defmethod preceding-sexp 'emacs-lisp-mode () + (elisp--preceding-sexp)) + +(defun eval-sexp-at-point () + (interactive) + (let ((bounds (bounds-of-thing-at-point 'sexp))) + (eval-sexp-region (car bounds) + (cdr bounds)))) + +(defun eval-last-sexp (_) + (interactive) + (eval-sexp (preceding-sexp))) + +;;; + +(defun cider-insert-current-sexp-in-repl (&optional arg) + "Insert the expression at point in the REPL buffer. +If invoked with a prefix ARG eval the expression after inserting it" + (interactive "P") + (cider-insert-in-repl (cider-sexp-at-point) arg)) + +(evil-define-operator fireplace-send (beg end) + (cider-insert-current-sexp-in-repl nil nil (list beg end))) + +(defun +clojure-pprint-expr (form) + (format "(with-out-str (clojure.pprint/pprint %s))" + form)) + +(defun cider-eval-read-and-print-handler (&optional buffer) + "Make a handler for evaluating and reading then printing result in BUFFER." + (nrepl-make-response-handler + (or buffer (current-buffer)) + (lambda (buffer value) + (let ((value* (read value))) + (with-current-buffer buffer + (insert + (if (derived-mode-p 'cider-clojure-interaction-mode) + (format "\n%s\n" value*) + value*))))) + (lambda (_buffer out) (cider-emit-interactive-eval-output out)) + (lambda (_buffer err) (cider-emit-interactive-eval-err-output err)) + '())) + +(defun cider-eval-and-replace (beg end) + "Evaluate the expression in region and replace it with its result" + (interactive "r") + (let ((form (buffer-substring beg end))) + (cider-nrepl-sync-request:eval form) + (kill-region beg end) + (cider-interactive-eval + (+clojure-pprint-expr form) + (cider-eval-read-and-print-handler)))) + +(defun cider-eval-current-sexp-and-replace () + "Evaluate the expression at point and replace it with its result" + (interactive) + (apply #'cider-eval-and-replace (cider-sexp-at-point 'bounds))) + +;;; + +(evil-define-operator fireplace-eval (beg end) + (eval-sexp-region beg end)) + +(evil-define-operator fireplace-replace (beg end) + (cider-eval-and-replace beg end)) + +(evil-define-operator fireplace-eval-context (beg end) + (eval-sexp-region-context beg end)) + +;;; fireplace-esque eval binding +(nmap :keymaps 'cider-mode-map + "c" (general-key-dispatch 'evil-change + "p" (general-key-dispatch 'fireplace-eval + "p" 'cider-eval-sexp-at-point + "c" 'cider-eval-last-sexp + "d" 'cider-eval-defun-at-point + "r" 'cider-test-run-test) + "q" (general-key-dispatch 'fireplace-send + "q" 'cider-insert-current-sexp-in-repl + "c" 'cider-insert-last-sexp-in-repl) + "x" (general-key-dispatch 'fireplace-eval-context + "x" 'cider-eval-sexp-at-point-in-context + "c" 'cider-eval-last-sexp-in-context) + "!" (general-key-dispatch 'fireplace-replace + "!" 'cider-eval-current-sexp-and-replace + "c" 'cider-eval-last-sexp-and-replace) + "y" 'cider-copy-last-result)) + +;;; + +(nmap :keymaps 'emacs-lisp-mode-map + "c" (general-key-dispatch 'evil-change + "p" (general-key-dispatch 'fireplace-eval + "p" 'eval-sexp-at-point + "c" 'eval-last-sexp + "d" 'eval-defun + "r" 'cider-test-run-test) + "x" (general-key-dispatch 'fireplace-eval-context + "x" 'cider-eval-sexp-at-point-in-context + "c" 'cider-eval-last-sexp-in-context) + "!" (general-key-dispatch 'fireplace-replace + "!" 'cider-eval-current-sexp-and-replace + "c" 'cider-eval-last-sexp-and-replace) + "y" 'cider-copy-last-result)) + +(nmap :keymaps 'sly-mode-map + "c" (general-key-dispatch 'evil-change + "p" (general-key-dispatch 'sly-eval + ;; "p" 'eval-sexp-at-point + "c" 'sly-eval-last-expression + "d" 'sly-eval-defun + ;; "r" 'cider-test-run-test + ) + ;; "x" (general-key-dispatch 'fireplace-eval-context + ;; "x" 'cider-eval-sexp-at-point-in-context + ;; "c" 'cider-eval-last-sexp-in-context + ;; ) + ;; "!" (general-key-dispatch 'fireplace-replace + ;; "!" 'cider-eval-current-sexp-and-replace + ;; "c" 'cider-eval-last-sexp-and-replace) + ;; "y" 'cider-copy-last-result + )) + + +;; >) ; slurp forward +;; <) ; barf forward +;; <( ; slurp backward +;; >( ; slurp backward + +;; (require 'doom-themes) +(defun grfn/haskell-test-file-p () + (string-match-p (rx (and "Spec.hs" eol)) + (buffer-file-name))) + +(require 'haskell) + +(defun grfn/intero-run-main () + (interactive) + (intero-repl-load) + (intero-with-repl-buffer nil + (comint-simple-send + (get-buffer-process (current-buffer)) + "main"))) + +(defun grfn/run-clj-or-cljs-test () + (interactive) + (message "Running tests...") + (cl-case (cider-repl-type-for-buffer) + ('cljs + (cider-interactive-eval + "(with-out-str (cljs.test/run-tests))" + (nrepl-make-response-handler + (current-buffer) + (lambda (_ value) + (with-output-to-temp-buffer "*cljs-test-results*" + (print + (->> value + (s-replace "\"" "") + (s-replace "\\n" "\n"))))) + nil nil nil))) + (('clj 'multi) + (funcall-interactively + #'cider-test-run-ns-tests + nil)))) + +(defun cider-copy-last-result () + (interactive) + (cider-interactive-eval + "*1" + (nrepl-make-response-handler + (current-buffer) + (lambda (_ value) + (kill-new value) + (message "Copied last result (%s) to clipboard" + (if (= (length value) 1) "1 char" + (format "%d chars" (length value))))) + nil nil nil))) + + +(defun grfn/insert-new-src-block () + (interactive) + (let* ((current-src-block (org-element-at-point)) + (src-block-head (save-excursion + (goto-char (org-element-property + :begin current-src-block)) + (let ((line (thing-at-point 'line t))) + (if (not (s-starts-with? "#+NAME:" (s-trim line))) + line + (forward-line) + (thing-at-point 'line t))))) + (point-to-insert + (if-let (results-loc (org-babel-where-is-src-block-result)) + (save-excursion + (goto-char results-loc) + (org-element-property + :end + (org-element-at-point))) + (org-element-property :end (org-element-at-point))))) + (goto-char point-to-insert) + (insert "\n") + (insert src-block-head) + (let ((contents (point-marker))) + (insert "\n#+END_SRC\n") + (goto-char contents)))) + +(defun grfn/+org-insert-item (orig direction) + (interactive) + (if (and (org-in-src-block-p) + (equal direction 'below)) + (grfn/insert-new-src-block) + (funcall orig direction))) + +(advice-add #'+org--insert-item :around #'grfn/+org-insert-item) +;; (advice-add #'+org/insert-item-below :around +;; (lambda (orig) (grfn/+org-insert-item orig 'below))) + +(defun set-pdb-trace () + (interactive) + (end-of-line) + (insert (format "\n%simport pdb;pdb.set_trace()" + (make-string (python-indent-calculate-indentation) + ?\s))) + (evil-indent (line-beginning-position) + (line-end-position))) + +(map! + + (:map magit-mode-map + :n "#" 'forge-dispatch) + + (:map haskell-mode-map + :n "K" 'lsp-info-under-point + :n "g d" 'lsp-ui-peek-find-definitions + :n "g r" 'lsp-ui-peek-find-references + :n "g \\" '+haskell/repl + ;; :n "K" 'intero-info + ;; :n "g d" 'intero-goto-definition + ;; :n "g SPC" 'intero-repl-load + ;; :n "g \\" 'intero-repl + ;; :n "g y" 'intero-type-at + ;; :n "g RET" 'grfn/run-sputnik-test-for-file + + (:localleader + :desc "Apply action" :n "e" 'intero-repl-eval-region + :desc "Rename symbol" :n "r" 'intero-apply-suggestions)) + + (:map python-mode-map + :n "K" #'anaconda-mode-show-doc + :n "g SPC" #'+eval/buffer + :n "g RET" #'python-pytest-file + :n "g \\" #'+python/open-ipython-repl + [remap evil-commentary-yank] #'set-pdb-trace) + + (:after agda2-mode + (:map agda2-mode-map + :n "g SPC" 'agda2-load + :n "g d" 'agda2-goto-definition-keyboard + :n "] g" 'agda2-next-goal + :n "[ g" 'agda2-previous-goal + + (:localleader + :desc "Give" :n "SPC" 'agda2-give + :desc "Case Split" :n "c" 'agda2-make-case + :desc "Make Helper" :n "h" 'agda2-helper-function-type + :desc "Refine" :n "r" 'agda2-refine + :desc "Auto" :n "a" 'agda2-auto-maybe-all + :desc "Goal type and context" :n "t" 'agda2-goal-and-context + :desc "Goal type and context and inferred" :n ";" 'agda2-goal-and-context-and-inferred))) + + (:after clojure-mode + (:map clojure-mode-map + :n "] f" 'forward-sexp + :n "[ f" 'backward-sexp)) + + (:after cider-mode + (:map cider-mode-map + :n "g SPC" 'cider-eval-buffer + :n "g \\" 'cider-switch-to-repl-buffer + :n "K" 'cider-doc + :n "g K" 'cider-grimoire + :n "g d" 'cider-find-dwim + :n "C-w ]" 'cider-find-dwim-other-window + ;; :n "g RET" 'cider-test-run-ns-tests + :n "g RET" 'grfn/run-clj-or-cljs-test + + "C-c C-r r" 'cljr-add-require-to-ns + "C-c C-r i" 'cljr-add-import-to-ns + + (:localleader + ;; :desc "Inspect last result" :n "i" 'cider-inspect-last-result + ;; :desc "Search for documentation" :n "h s" 'cider-apropos-doc + :desc "Add require to ns" :n "n r" 'cljr-add-require-to-ns + :desc "Add import to ns" :n "n i" 'cljr-add-import-to-ns)) + (:map cider-repl-mode-map + :n "g \\" 'cider-switch-to-last-clojure-buffer)) + + (:after w3m + (:map w3m-mode-map + "/" #'evil-search-forward + "?" #'evil-search-backward + "r" #'w3m-reload-this-page)) + + (:after slack + (:map slack-message-buffer-mode-map + :i "<up>" #'slack-message-edit)) + + (:after org + :n "C-c C-x C-o" #'org-clock-out + (:map org-mode-map + [remap counsel-imenu] #'counsel-org-goto + "M-k" #'org-move-subtree-up + "M-j" #'org-move-subtree-down + (:localleader + :n "g" #'counsel-org-goto)) + + (:map org-capture-mode-map + :n "g RET" #'org-capture-finalize + :n "g \\" #'org-captue-refile)) + + (:map lsp-mode-map + :n "K" #'lsp-describe-thing-at-point + :n "g r" #'lsp-rename + (:localleader + :n "a" #'lsp-execute-code-action)) + + ) diff --git a/users/glittershark/emacs.d/+commands.el b/users/glittershark/emacs.d/+commands.el new file mode 100644 index 000000000000..a5753c8e995b --- /dev/null +++ b/users/glittershark/emacs.d/+commands.el @@ -0,0 +1,149 @@ + + +(defalias 'ex! 'evil-ex-define-cmd) + +(defun delete-file-and-buffer () + "Kill the current buffer and deletes the file it is visiting." + (interactive) + (let ((filename (buffer-file-name))) + (when filename + (if (vc-backend filename) + (vc-delete-file filename) + (progn + (delete-file filename) + (message "Deleted file %s" filename) + (kill-buffer)))))) + +;;; Commands defined elsewhere +;;(ex! "al[ign]" #'+evil:align) +;;(ex! "g[lobal]" #'+evil:global) + +;;; Custom commands +;; Editing +(ex! "@" #'+evil:macro-on-all-lines) ; TODO Test me +(ex! "al[ign]" #'+evil:align) +(ex! "enhtml" #'+web:encode-html-entities) +(ex! "dehtml" #'+web:decode-html-entities) +(ex! "mc" #'+evil:mc) +(ex! "iedit" #'evil-multiedit-ex-match) +(ex! "na[rrow]" #'+evil:narrow-buffer) +(ex! "retab" #'+evil:retab) + +(ex! "glog" #'magit-log-buffer-file) + +;; External resources +;; TODO (ex! "db" #'doom:db) +;; TODO (ex! "dbu[se]" #'doom:db-select) +;; TODO (ex! "go[ogle]" #'doom:google-search) +(ex! "lo[okup]" #'+jump:online) +(ex! "dash" #'+lookup:dash) +(ex! "dd" #'+lookup:devdocs) +(ex! "http" #'httpd-start) ; start http server +(ex! "repl" #'+eval:repl) ; invoke or send to repl +;; TODO (ex! "rx" 'doom:regex) ; open re-builder +(ex! "sh[ell]" #'+eshell:run) +(ex! "t[mux]" #'+tmux:run) ; send to tmux +(ex! "tcd" #'+tmux:cd-here) ; cd to default-directory in tmux +(ex! "x" #'doom/open-project-scratch-buffer) + +;; GIT +(ex! "gist" #'+gist:send) ; send current buffer/region to gist +(ex! "gistl" #'+gist:list) ; list gists by user +(ex! "gbrowse" #'+vcs/git-browse) ; show file in github/gitlab +(ex! "gissues" #'+vcs/git-browse-issues) ; show github issues +(ex! "git" #'magit-status) ; open magit status window +(ex! "gstage" #'magit-stage) +(ex! "gunstage" #'magit-unstage) +(ex! "gblame" #'magit-blame) +(ex! "grevert" #'git-gutter:revert-hunk) + +;; Dealing with buffers +(ex! "clean[up]" #'doom/cleanup-buffers) +(ex! "k[ill]" #'doom/kill-this-buffer) +(ex! "k[ill]all" #'+hlissner:kill-all-buffers) +(ex! "k[ill]m" #'+hlissner:kill-matching-buffers) +(ex! "k[ill]o" #'doom/kill-other-buffers) +(ex! "l[ast]" #'doom/popup-restore) +(ex! "m[sg]" #'view-echo-area-messages) +(ex! "pop[up]" #'doom/popup-this-buffer) + +;; Project navigation +(ex! "a" #'projectile-toggle-between-implementation-and-test) +(ex! "as" #'projectile-find-implementation-or-test-other-window) +(ex! "av" #'projectile-find-implementation-or-test-other-window) +(ex! "cd" #'+hlissner:cd) +(cond ((featurep! :completion ivy) + (ex! "ag" #'+ivy:ag) + (ex! "agc[wd]" #'+ivy:ag-cwd) + (ex! "rg" #'+ivy:rg) + (ex! "rgc[wd]" #'+ivy:rg-cwd) + (ex! "sw[iper]" #'+ivy:swiper) + (ex! "todo" #'+ivy:todo)) + ((featurep! :completion helm) + (ex! "ag" #'+helm:ag) + (ex! "agc[wd]" #'+helm:ag-cwd) + (ex! "rg" #'+helm:rg) + (ex! "rgc[wd]" #'+helm:rg-cwd) + (ex! "sw[oop]" #'+helm:swoop) + (ex! "todo" #'+helm:todo))) + +;; Project tools +(ex! "build" #'+eval/build) +(ex! "debug" #'+debug/run) +(ex! "er[rors]" #'flycheck-list-errors) + +;; File operations +(ex! "cp" #'+evil:copy-this-file) +(ex! "mv" #'+evil:move-this-file) +(ex! "rm" #'+evil:delete-this-file) + +;; Sessions/tabs +(ex! "sclear" #'+workspace/kill-session) +(ex! "sl[oad]" #'+workspace:load-session) +(ex! "ss[ave]" #'+workspace:save-session) +(ex! "tabcl[ose]" #'+workspace:delete) +(ex! "tabclear" #'doom/kill-all-buffers) +(ex! "tabl[ast]" #'+workspace/switch-to-last) +(ex! "tabload" #'+workspace:load) +(ex! "tabn[ew]" #'+workspace:new) +(ex! "tabn[ext]" #'+workspace:switch-next) +(ex! "tabp[rev]" #'+workspace:switch-previous) +(ex! "tabr[ename]" #'+workspace:rename) +(ex! "tabs" #'+workspace/display) +(ex! "tabsave" #'+workspace:save) + +(ex! "scr[atch]" #'cider-scratch) + +;; Org-mode +(ex! "cap" #'+org-capture/dwim) + +(evil-define-command evil-alembic-revision (args) + (interactive "<a>") + (apply + #'generate-alembic-migration + (read-string "Message: ") + (s-split "\\s+" (or args "")))) +(ex! "arev[ision]" #'evil-alembic-revision) + +(evil-define-command evil-alembic-upgrade (&optional revision) + (interactive "<a>") + (alembic-upgrade (or revision "head"))) + +(ex! "aup[grade]" #'evil-alembic-upgrade) + +(evil-define-command evil-alembic-downgrade (&optional revision) + (interactive "<a>") + (alembic-downgrade revision)) + +(ex! "adown[grade]" #'evil-alembic-downgrade) + +(evil-define-command evil-alembic (args) + (interactive "<a>") + (run-alembic args)) + +(ex! "alemb[ic]" #'evil-alembic) + +;; Elixir +(add-hook! elixir-mode + (ex! "AV" #'alchemist-project-toggle-file-and-tests-other-window) + (ex! "A" #'alchemist-project-toggle-file-and-tests)) diff --git a/users/glittershark/emacs.d/+private.el.gpg b/users/glittershark/emacs.d/+private.el.gpg new file mode 100644 index 000000000000..3179dc424980 --- /dev/null +++ b/users/glittershark/emacs.d/+private.el.gpg Binary files differdiff --git a/users/glittershark/emacs.d/.gitignore b/users/glittershark/emacs.d/.gitignore new file mode 100644 index 000000000000..1fd0e3988771 --- /dev/null +++ b/users/glittershark/emacs.d/.gitignore @@ -0,0 +1,2 @@ +.authinfo.gpg ++private.el diff --git a/users/glittershark/emacs.d/autoload/evil.el b/users/glittershark/emacs.d/autoload/evil.el new file mode 100644 index 000000000000..319c93c05e47 --- /dev/null +++ b/users/glittershark/emacs.d/autoload/evil.el @@ -0,0 +1,37 @@ +;;; /autoload/evil.el -*- lexical-binding: t; -*- +;;;###if (featurep! :feature evil) + +;;;###autoload (autoload '+hlissner:multi-next-line "/autoload/evil" nil t) +(evil-define-motion +hlissner:multi-next-line (count) + "Move down 6 lines." + :type line + (let ((line-move-visual (or visual-line-mode (derived-mode-p 'text-mode)))) + (evil-line-move (* 6 (or count 1))))) + +;;;###autoload (autoload '+hlissner:multi-previous-line "/autoload/evil" nil t) +(evil-define-motion +hlissner:multi-previous-line (count) + "Move up 6 lines." + :type line + (let ((line-move-visual (or visual-line-mode (derived-mode-p 'text-mode)))) + (evil-line-move (- (* 6 (or count 1)))))) + +;;;###autoload (autoload '+hlissner:cd "/autoload/evil" nil t) +(evil-define-command +hlissner:cd () + "Change `default-directory' with `cd'." + (interactive "<f>") + (cd input)) + +;;;###autoload (autoload '+hlissner:kill-all-buffers "/autoload/evil" nil t) +(evil-define-command +hlissner:kill-all-buffers (&optional bang) + "Kill all buffers. If BANG, kill current session too." + (interactive "<!>") + (if bang + (+workspace/kill-session) + (doom/kill-all-buffers))) + +;;;###autoload (autoload '+hlissner:kill-matching-buffers "/autoload/evil" nil t) +(evil-define-command +hlissner:kill-matching-buffers (&optional bang pattern) + "Kill all buffers matching PATTERN regexp. If BANG, only match project +buffers." + (interactive "<a>") + (doom/kill-matching-buffers pattern bang)) diff --git a/users/glittershark/emacs.d/autoload/hlissner.el b/users/glittershark/emacs.d/autoload/hlissner.el new file mode 100644 index 000000000000..87b2236d12c7 --- /dev/null +++ b/users/glittershark/emacs.d/autoload/hlissner.el @@ -0,0 +1,53 @@ +;;; autoload/hlissner.el -*- lexical-binding: t; -*- + +;;;###autoload +(defun +hlissner/install-snippets () + "Install my snippets from https://github.com/hlissner/emacs-snippets into +private/hlissner/snippets." + (interactive) + (doom-fetch :github "hlissner/emacs-snippets" + (expand-file-name "snippets" (doom-module-path :private 'hlissner)))) + +;;;###autoload +(defun +hlissner/yank-buffer-filename () + "Copy the current buffer's path to the kill ring." + (interactive) + (if-let* ((filename (or buffer-file-name (bound-and-true-p list-buffers-directory)))) + (message (kill-new (abbreviate-file-name filename))) + (error "Couldn't find filename in current buffer"))) + +(defmacro +hlissner-def-finder! (name dir) + "Define a pair of find-file and browse functions." + `(progn + (defun ,(intern (format "+hlissner/find-in-%s" name)) () + (interactive) + (let ((default-directory ,dir) + projectile-project-name + projectile-require-project-root + projectile-cached-buffer-file-name + projectile-cached-project-root) + (call-interactively (command-remapping #'projectile-find-file)))) + (defun ,(intern (format "+hlissner/browse-%s" name)) () + (interactive) + (let ((default-directory ,dir)) + (call-interactively (command-remapping #'find-file)))))) + +;;;###autoload (autoload '+hlissner/find-in-templates "autoload/hlissner" nil t) +;;;###autoload (autoload '+hlissner/browse-templates "autoload/hlissner" nil t) +(+hlissner-def-finder! templates +file-templates-dir) + +;;;###autoload (autoload '+hlissner/find-in-snippets "autoload/hlissner" nil t) +;;;###autoload (autoload '+hlissner/browse-snippets "autoload/hlissner" nil t) +(+hlissner-def-finder! snippets +hlissner-snippets-dir) + +;;;###autoload (autoload '+hlissner/find-in-dotfiles "autoload/hlissner" nil t) +;;;###autoload (autoload '+hlissner/browse-dotfiles "autoload/hlissner" nil t) +(+hlissner-def-finder! dotfiles (expand-file-name ".dotfiles" "~")) + +;;;###autoload (autoload '+hlissner/find-in-emacsd "autoload/hlissner" nil t) +;;;###autoload (autoload '+hlissner/browse-emacsd "autoload/hlissner" nil t) +(+hlissner-def-finder! emacsd doom-emacs-dir) + +;;;###autoload (autoload '+hlissner/find-in-notes "autoload/hlissner" nil t) +;;;###autoload (autoload '+hlissner/browse-notes "autoload/hlissner" nil t) +(+hlissner-def-finder! notes +org-dir) diff --git a/users/glittershark/emacs.d/clocked-in-elt.el b/users/glittershark/emacs.d/clocked-in-elt.el new file mode 100644 index 000000000000..00fda047e4a9 --- /dev/null +++ b/users/glittershark/emacs.d/clocked-in-elt.el @@ -0,0 +1,18 @@ +;;; ~/.doom.d/clocked-in-elt.el -*- lexical-binding: t; -*- +;;; +(load (expand-file-name "init" (or (getenv "EMACSDIR") + (expand-file-name + "../.emacs.d" + (file-name-directory (file-truename load-file-name)))))) + +(require 'org-clock) +(require 'org-element) + +(let ((item (or org-clock-marker + (car org-clock-history)))) + (when item + (with-current-buffer (marker-buffer item) + (goto-char (marker-position item)) + (let ((element (org-element-at-point))) + (when (eq 'headline (car element)) + (message "%s" (plist-get (cadr element) :raw-value))))))) diff --git a/users/glittershark/emacs.d/clojure.el b/users/glittershark/emacs.d/clojure.el new file mode 100644 index 000000000000..07f1a4e21373 --- /dev/null +++ b/users/glittershark/emacs.d/clojure.el @@ -0,0 +1,52 @@ +;;; ~/code/depot/users/glittershark/emacs.d/clojure.el -*- lexical-binding: t; -*- + +(defun clojure-thing-at-point-setup () + (interactive) + ;; Used by cider-find-dwim to parse the symbol at point + (setq-local + thing-at-point-file-name-chars + (concat thing-at-point-file-name-chars + "><!?"))) + +(defun +grfn/clojure-setup () + ;; (flycheck-select-checker 'clj-kondo) + (push 'clojure-cider-kibit flycheck-disabled-checkers) + (push 'clojure-cider-eastwood flycheck-disabled-checkers) + (push 'clojure-cider-typed flycheck-disabled-checkers) + ) + +(after! clojure-mode + (define-clojure-indent + (PUT 2) + (POST 2) + (GET 2) + (PATCH 2) + (DELETE 2) + (context 2) + (checking 3) + (match 1) + (domonad 0) + (describe 1) + (before 1) + (it 2)) + + (add-hook 'clojure-mode-hook #'clojure-thing-at-point-setup) + (add-hook 'clojure-mode-hook #'+grfn/clojure-setup)) + +(use-package! flycheck-clojure + ;; :disabled t + :after (flycheck cider) + :config + (flycheck-clojure-setup)) + +(after! clj-refactor + (setq cljr-magic-requires :prompt + cljr-clojure-test-declaration "[clojure.test :refer :all]" + cljr-cljc-clojure-test-declaration"#?(:clj [clojure.test :refer :all] +:cljs [cljs.test :refer-macros [deftest is testing]])" + ) + (add-to-list + 'cljr-magic-require-namespaces + '("s" . "clojure.spec.alpha"))) + +(set-popup-rule! "^\\*cider-test-report" :size 0.4) diff --git a/users/glittershark/emacs.d/company-sql.el b/users/glittershark/emacs.d/company-sql.el new file mode 100644 index 000000000000..2408347ceffc --- /dev/null +++ b/users/glittershark/emacs.d/company-sql.el @@ -0,0 +1,301 @@ +;;; ~/.doom.d/company-sql.el + +;;; Commentary: +;;; TODO + +;;; Code: + +(require 'emacsql) +(require 'emacsql-psql) +(require 'dash) +(require 's) +(require 'cl-lib) + +;;; Config + +(defvar-local company-sql-db-host "localhost" + "Host of the postgresql database to query for autocomplete information") + +(defvar-local company-sql-db-port 5432 + "Port of the postgresql database to query for autocomplete information") + +(defvar-local company-sql-db-user "postgres" + "Username of the postgresql database to query for autocomplete information") + +(defvar-local company-sql-db-name nil + "PostgreSQL database name to query for autocomplete information") + +;;; DB Connection + +(defvar-local company-sql/connection nil) + +(defun company-sql/connect () + (unless company-sql/connection + (setq-local company-sql/connection + (emacsql-psql company-sql-db-name + :hostname company-sql-db-host + :username company-sql-db-user + :port (number-to-string company-sql-db-port)))) + company-sql/connection) + +;;; Utils + +(defmacro comment (&rest _)) + +(defun ->string (x) + (cond + ((stringp x) x) + ((symbolp x) (symbol-name x)))) + +(defun alist-get-equal (key alist) + "Like `alist-get', but uses `equal' instead of `eq' for comparing keys" + (->> alist + (-find (lambda (pair) (equal key (car pair)))) + (cdr))) + +;;; Listing relations + +(cl-defun company-sql/list-tables (conn) + (with-timeout (3) + (-map (-compose 'symbol-name 'car) + (emacsql conn + [:select [tablename] + :from pg_catalog:pg_tables + :where (and (!= schemaname '"information_schema") + (!= schemaname '"pg_catalog"))])))) + +(cl-defun company-sql/list-columns (conn) + (with-timeout (3) + (-map + (lambda (row) + (propertize (symbol-name (nth 0 row)) + 'table-name (nth 1 row) + 'data-type (nth 2 row))) + (emacsql conn + [:select [column_name + table_name + data_type] + :from information_schema:columns])))) + +;;; Keywords + +(defvar company-postgresql/keywords + (list +"a" "abort" "abs" "absent" "absolute" "access" "according" "action" "ada" "add" +"admin" "after" "aggregate" "all" "allocate" "also" "alter" "always" "analyse" +"analyze" "and" "any" "are" "array" "array_agg" "array_max_cardinality" "as" +"asc" "asensitive" "assertion" "assignment" "asymmetric" "at" "atomic" "attach" +"attribute" "attributes" "authorization" "avg" "backward" "base64" "before" +"begin" "begin_frame" "begin_partition" "bernoulli" "between" "bigint" "binary" +"bit" "bit_length" "blob" "blocked" "bom" "boolean" "both" "breadth" "by" "c" +"cache" "call" "called" "cardinality" "cascade" "cascaded" "case" "cast" +"catalog" "catalog_name" "ceil" "ceiling" "chain" "char" "character" +"characteristics" "characters" "character_length" "character_set_catalog" +"character_set_name" "character_set_schema" "char_length" "check" "checkpoint" +"class" "class_origin" "clob" "close" "cluster" "coalesce" "cobol" "collate" +"collation" "collation_catalog" "collation_name" "collation_schema" "collect" +"column" "columns" "column_name" "command_function" "command_function_code" +"comment" "comments" "commit" "committed" "concurrently" "condition" +"condition_number" "configuration" "conflict" "connect" "connection" +"connection_name" "constraint" "constraints" "constraint_catalog" +"constraint_name" "constraint_schema" "constructor" "contains" "content" +"continue" "control" "conversion" "convert" "copy" "corr" "corresponding" "cost" +"count" "covar_pop" "covar_samp" "create" "cross" "csv" "cube" "cume_dist" +"current" "current_catalog" "current_date" "current_default_transform_group" +"current_path" "current_role" "current_row" "current_schema" "current_time" +"current_timestamp" "current_transform_group_for_type" "current_user" "cursor" +"cursor_name" "cycle" "data" "database" "datalink" "date" +"datetime_interval_code" "datetime_interval_precision" "day" "db" "deallocate" +"dec" "decimal" "declare" "default" "defaults" "deferrable" "deferred" "defined" +"definer" "degree" "delete" "delimiter" "delimiters" "dense_rank" "depends" +"depth" "deref" "derived" "desc" "describe" "descriptor" "detach" +"deterministic" "diagnostics" "dictionary" "disable" "discard" "disconnect" +"dispatch" "distinct" "dlnewcopy" "dlpreviouscopy" "dlurlcomplete" +"dlurlcompleteonly" "dlurlcompletewrite" "dlurlpath" "dlurlpathonly" +"dlurlpathwrite" "dlurlscheme" "dlurlserver" "dlvalue" "do" "document" "domain" +"double" "drop" "dynamic" "dynamic_function" "dynamic_function_code" "each" +"element" "else" "empty" "enable" "encoding" "encrypted" "end" "end-exec" +"end_frame" "end_partition" "enforced" "enum" "equals" "escape" "event" "every" +"except" "exception" "exclude" "excluding" "exclusive" "exec" "execute" "exists" +"exp" "explain" "expression" "extension" "external" "extract" "false" "family" +"fetch" "file" "filter" "final" "first" "first_value" "flag" "float" "floor" +"following" "for" "force" "foreign" "fortran" "forward" "found" "frame_row" +"free" "freeze" "from" "fs" "full" "function" "functions" "fusion" "g" "general" +"generated" "get" "global" "go" "goto" "grant" "granted" "greatest" "group" +"grouping" "groups" "handler" "having" "header" "hex" "hierarchy" "hold" "hour" +"id" "identity" "if" "ignore" "ilike" "immediate" "immediately" "immutable" +"implementation" "implicit" "import" "in" "include" "including" "increment" +"indent" "index" "indexes" "indicator" "inherit" "inherits" "initially" "inline" +"inner" "inout" "input" "insensitive" "insert" "instance" "instantiable" +"instead" "int" "integer" "integrity" "intersect" "intersection" "interval" +"into" "invoker" "is" "isnull" "isolation" "join" "k" "key" "key_member" +"key_type" "label" "lag" "language" "large" "last" "last_value" "lateral" "lead" +"leading" "leakproof" "least" "left" "length" "level" "library" "like" +"like_regex" "limit" "link" "listen" "ln" "load" "local" "localtime" +"localtimestamp" "location" "locator" "lock" "locked" "logged" "lower" "m" "map" +"mapping" "match" "matched" "materialized" "max" "maxvalue" "max_cardinality" +"member" "merge" "message_length" "message_octet_length" "message_text" "method" +"min" "minute" "minvalue" "mod" "mode" "modifies" "module" "month" "more" "move" +"multiset" "mumps" "name" "names" "namespace" "national" "natural" "nchar" +"nclob" "nesting" "new" "next" "nfc" "nfd" "nfkc" "nfkd" "nil" "no" "none" +"normalize" "normalized" "not" "nothing" "notify" "notnull" "nowait" "nth_value" +"ntile" "null" "nullable" "nullif" "nulls" "number" "numeric" "object" +"occurrences_regex" "octets" "octet_length" "of" "off" "offset" "oids" "old" +"on" "only" "open" "operator" "option" "options" "or" "order" "ordering" +"ordinality" "others" "out" "outer" "output" "over" "overlaps" "overlay" +"overriding" "owned" "owner" "p" "pad" "parallel" "parameter" "parameter_mode" +"parameter_name" "parameter_ordinal_position" "parameter_specific_catalog" +"parameter_specific_name" "parameter_specific_schema" "parser" "partial" +"partition" "pascal" "passing" "passthrough" "password" "path" "percent" +"percentile_cont" "percentile_disc" "percent_rank" "period" "permission" +"placing" "plans" "pli" "policy" "portion" "position" "position_regex" "power" +"precedes" "preceding" "precision" "prepare" "prepared" "preserve" "primary" +"prior" "privileges" "procedural" "procedure" "procedures" "program" "public" +"publication" "quote" "range" "rank" "read" "reads" "real" "reassign" "recheck" +"recovery" "recursive" "ref" "references" "referencing" "refresh" "regr_avgx" +"regr_avgy" "regr_count" "regr_intercept" "regr_r2" "regr_slope" "regr_sxx" +"regr_sxy" "regr_syy" "reindex" "relative" "release" "rename" "repeatable" +"replace" "replica" "requiring" "reset" "respect" "restart" "restore" "restrict" +"result" "return" "returned_cardinality" "returned_length" +"returned_octet_length" "returned_sqlstate" "returning" "returns" "revoke" +"right" "role" "rollback" "rollup" "routine" "routines" "routine_catalog" +"routine_name" "routine_schema" "row" "rows" "row_count" "row_number" "rule" +"savepoint" "scale" "schema" "schemas" "schema_name" "scope" "scope_catalog" +"scope_name" "scope_schema" "scroll" "search" "second" "section" "security" +"select" "selective" "self" "sensitive" "sequence" "sequences" "serializable" +"server" "server_name" "session" "session_user" "set" "setof" "sets" "share" +"show" "similar" "simple" "size" "skip" "smallint" "snapshot" "some" "source" +"space" "specific" "specifictype" "specific_name" "sql" "sqlcode" "sqlerror" +"sqlexception" "sqlstate" "sqlwarning" "sqrt" "stable" "standalone" "start" +"state" "statement" "static" "statistics" "stddev_pop" "stddev_samp" "stdin" +"stdout" "storage" "strict" "strip" "structure" "style" "subclass_origin" +"submultiset" "subscription" "substring" "substring_regex" "succeeds" "sum" +"symmetric" "sysid" "system" "system_time" "system_user" "t" "table" "tables" +"tablesample" "tablespace" "table_name" "temp" "template" "temporary" "text" +"then" "ties" "time" "timestamp" "timezone_hour" "timezone_minute" "to" "token" +"top_level_count" "trailing" "transaction" "transactions_committed" +"transactions_rolled_back" "transaction_active" "transform" "transforms" +"translate" "translate_regex" "translation" "treat" "trigger" "trigger_catalog" +"trigger_name" "trigger_schema" "trim" "trim_array" "true" "truncate" "trusted" +"type" "types" "uescape" "unbounded" "uncommitted" "under" "unencrypted" "union" +"unique" "unknown" "unlink" "unlisten" "unlogged" "unnamed" "unnest" "until" +"untyped" "update" "upper" "uri" "usage" "user" "user_defined_type_catalog" +"user_defined_type_code" "user_defined_type_name" "user_defined_type_schema" +"using" "vacuum" "valid" "validate" "validator" "value" "values" "value_of" +"varbinary" "varchar" "variadic" "varying" "var_pop" "var_samp" "verbose" +"version" "versioning" "view" "views" "volatile" "when" "whenever" "where" +"whitespace" "width_bucket" "window" "with" "within" "without" "work" "wrapper" +"write" "xml" "xmlagg" "xmlattributes" "xmlbinary" "xmlcast" "xmlcomment" +"xmlconcat" "xmldeclaration" "xmldocument" "xmlelement" "xmlexists" "xmlforest" +"xmliterate" "xmlnamespaces" "xmlparse" "xmlpi" "xmlquery" "xmlroot" "xmlschema" +"xmlserialize" "xmltable" "xmltext" "xmlvalidate" "year" "yes" "zone")) + +;;; Company backend + +(cl-defun company-postgresql/candidates (prefix conn) + (-filter + (apply-partially #'s-starts-with? prefix) + (append (-map (lambda (s) + (propertize s 'company-postgresql-annotation "table")) + + (-map (lambda (s) + (propertize s 'company-postgresql-annotation + (format "%s.%s %s" + (get-text-property 0 'table-name s) + s + (-> + (get-text-property 0 'data-type s) + (->string) + (upcase))))) + (company-sql/list-columns conn)) + (-map (lambda (s) + (propertize s 'company-postgresql-annotation "keyword")) + company-postgresql/keywords))))) + +(defun company-postgresql (command &optional arg &rest _) + (interactive (list 'interactive)) + (cl-case command + (interactive (company-begin-backend 'company-postgresql)) + (init (company-sql/connect)) + (prefix (company-grab-symbol)) + (annotation + (get-text-property 0 'company-postgresql-annotation arg)) + (candidates (company-postgresql/candidates + arg + (company-sql/connect))) + (duplicates t) + (ignore-case t))) + +;;; org-babel company sql + +(defvar-local org-company-sql/connections + ()) + +(defun org-company-sql/connect (conn-params) + (or (alist-get-equal conn-params org-company-sql/connections) + (let ((conn (apply 'emacsql-psql conn-params))) + (add-to-list 'org-company-sql/connections (cons conn-params conn)) + conn))) + +(defun org-company-sql/in-sql-source-block-p () + (let ((org-elt (org-element-at-point))) + (and (eq 'src-block (car org-elt)) + (equal "sql" (plist-get (cadr org-elt) + :language))))) + +(defun org-company-sql/parse-cmdline (cmdline) + (let* ((lexed (s-split (rx (one-or-more blank)) cmdline)) + (go (lambda (state tokens) + (if (null tokens) () + (let ((token (car tokens)) + (tokens (cdr tokens))) + (if (null state) + (if (s-starts-with? "-" token) + (funcall go token tokens) + (cons token (funcall go state tokens))) + (cons (cons state token) ; ("-h" . "localhost") + (funcall go nil tokens))))))) + (opts (funcall go nil lexed))) + opts)) + +(defun org-company-sql/source-block-conn-params () + (let* ((block-info (org-babel-get-src-block-info)) + (params (caddr block-info)) + (cmdline (alist-get :cmdline params)) + (parsed (org-company-sql/parse-cmdline cmdline)) + (opts (-filter #'listp parsed)) + (positional (-filter #'stringp parsed)) + (host (alist-get-equal "-h" opts)) + (port (or (alist-get-equal "-p" opts) + "5432")) + (dbname (or (alist-get-equal "-d" opts) + (car positional))) + (username (or (alist-get-equal "-U" opts) + (cadr positional)))) + (list dbname + :hostname host + :username username + :port port))) + +(defun org-company-sql/connection-for-source-block () + (org-company-sql/connect + (org-company-sql/source-block-conn-params))) + + +(defun company-ob-postgresql (command &optional arg &rest _) + (interactive (list 'interactive)) + (cl-case command + (interactive (company-begin-backend 'company-ob-postgresql)) + (prefix (and (org-company-sql/in-sql-source-block-p) + (company-grab-symbol))) + (annotation (get-text-property 0 'company-postgresql-annotation arg)) + (candidates + (company-postgresql/candidates + arg + (org-company-sql/connection-for-source-block))) + (duplicates t) + (ignore-case t))) + +;;; + +(provide 'company-sql) diff --git a/users/glittershark/emacs.d/config.el b/users/glittershark/emacs.d/config.el new file mode 100644 index 000000000000..f43054c71772 --- /dev/null +++ b/users/glittershark/emacs.d/config.el @@ -0,0 +1,1110 @@ +;;; private/grfn/config.el -*- lexical-binding: t; -*- + +;; I've swapped these keys on my keyboard +(setq x-super-keysym 'alt + x-alt-keysym 'meta) + +(setq user-mail-address "root@gws.fyi" + user-full-name "Griffin Smith") + +(let ((font-family (pcase system-type + ('darwin "MesloLGSDZ NF") + ('gnu/linux "Meslo LGSDZ Nerd Font")))) + (setq doom-font (font-spec :family font-family :size 14) + doom-big-font (font-spec :family font-family :size 24) + doom-big-font-increment 5 + doom-variable-pitch-font (font-spec :family "DejaVu Sans") + doom-unicode-font (font-spec :family font-family))) + +(require 's) + +(undefine-key! :keymaps 'doom-leader-map "/") + +(load! "utils") +(load! "company-sql") +(load! "org-query") +(load! "show-matching-paren") +(load! "irc") +(load! "github-org") +(load! "org-gcal") +(load! "grid") +(load! "nix") +(load! "email") +(load! "cpp") +(load! "lisp") +(load! "clojure") +(load! "rust") + +(require 'tvl) + +(add-hook! elixir-mode + (require 'flycheck-credo) + (setq flycheck-elixir-credo-strict t) + (flycheck-credo-setup) + + (require 'flycheck-mix) (flycheck-mix-setup) + + (require 'flycheck-dialyxir) (flycheck-dialyxir-setup) + + (flycheck-mode)) + +(setq +solarized-s-base03 "#002b36" + +solarized-s-base02 "#073642" + ;; emphasized content + +solarized-s-base01 "#586e75" + ;; primary content + +solarized-s-base00 "#657b83" + +solarized-s-base0 "#839496" + ;; comments + +solarized-s-base1 "#93a1a1" + ;; background highlight light + +solarized-s-base2 "#eee8d5" + ;; background light + +solarized-s-base3 "#fdf6e3" + + ;; Solarized accented colors + +solarized-yellow "#b58900" + +solarized-orange "#cb4b16" + +solarized-red "#dc322f" + +solarized-magenta "#d33682" + +solarized-violet "#6c71c4" + +solarized-blue "#268bd2" + +solarized-cyan "#2aa198" + +solarized-green "#859900" + + ;; Darker and lighter accented colors + ;; Only use these in exceptional circumstances! + +solarized-yellow-d "#7B6000" + +solarized-yellow-l "#DEB542" + +solarized-orange-d "#8B2C02" + +solarized-orange-l "#F2804F" + +solarized-red-d "#990A1B" + +solarized-red-l "#FF6E64" + +solarized-magenta-d "#93115C" + +solarized-magenta-l "#F771AC" + +solarized-violet-d "#3F4D91" + +solarized-violet-l "#9EA0E5" + +solarized-blue-d "#00629D" + +solarized-blue-l "#69B7F0" + +solarized-cyan-d "#00736F" + +solarized-cyan-l "#69CABF" + +solarized-green-d "#546E00" + +solarized-green-l "#B4C342") + +(defcustom theme-overrides nil + "Association list of override faces to set for different custom themes.") + +(defadvice load-theme (after theme-set-overrides activate) + (dolist (theme-settings theme-overrides) + (let ((theme (car theme-settings)) + (faces (cadr theme-settings))) + (if (member theme custom-enabled-themes) + (progn + (dolist (face faces) + (custom-theme-set-faces theme face))))))) + +(defun alist-set (alist-symbol key value) + "Set VALUE of a KEY in ALIST-SYMBOL." + (set alist-symbol (cons (list key value) (assq-delete-all key (eval alist-symbol))))) + +(comment + (custom-theme-set-faces 'grfn-solarized-light + `(font-lock-doc-face + ((t (:foreground ,+solarized-s-base1))))) + ++solarized-s-base1 +(custom-theme-) + (custom-face-get-current-spec 'font-lock-doc-face) + + ) + +(alist-set 'theme-overrides 'grfn-solarized-light + `((font-lock-doc-face ((t (:foreground ,+solarized-s-base1)))) + (font-lock-preprocessor-face ((t (:foreground ,+solarized-red)))) + (font-lock-keyword-face ((t (:foreground ,+solarized-green :bold nil)))) + (font-lock-builtin-face ((t (:foreground ,+solarized-s-base01 + :bold t)))) + + (elixir-attribute-face ((t (:foreground ,+solarized-blue)))) + (elixir-atom-face ((t (:foreground ,+solarized-cyan)))) + (linum ((t (:background ,+solarized-s-base2 :foreground ,+solarized-s-base1)))) + (line-number ((t (:background ,+solarized-s-base2 :foreground ,+solarized-s-base1)))) + (line-number-current-line ((t (:background ,+solarized-s-base2 :foreground ,+solarized-s-base1)))) + + (haskell-operator-face ((t (:foreground ,+solarized-green)))) + (haskell-keyword-face ((t (:foreground ,+solarized-cyan)))) + + (org-drawer ((t (:foreground ,+solarized-s-base1 + :bold t)))))) + +(setq solarized-use-variable-pitch nil + solarized-scale-org-headlines nil + solarized-use-less-bold t) + +(add-to-list 'custom-theme-load-path "~/.doom.d/themes") +(load-theme 'grfn-solarized-light t) + +(load! "slack") + +(defface haskell-import-face `((t (:foreground ,+solarized-magenta))) "") + +(setq doom-theme 'grfn-solarized-light) +; (setq doom-theme 'doom-solarized-light) + +(add-hook! doom-post-init + (set-face-attribute 'bold nil :weight 'ultra-light) + (set-face-bold 'bold nil) + (enable-theme 'grfn-solarized-light)) + +(defun rx-words (&rest words) + (rx-to-string + `(and symbol-start (group (or ,@words)) symbol-end))) + +(font-lock-add-keywords + 'elixir-mode + `((,(rx-words "def" + "defp" + "test" + "describe" + "property" + "defrecord" + "defmodule" + "defstruct" + "defdelegate" + "defprotocol" + "defimpl" + "use" + "import" + "alias" + "require" + "assert" + "refute" + "assert_raise") + . + 'font-lock-preprocessor-face))) + +(font-lock-add-keywords + 'elixir-mode + `((,(rx-words "def" + "defp" + "test" + "describe" + "property" + "defrecord" + "defmodule" + "defstruct" + "defdelegate" + "use" + "import" + "alias" + "require" + "assert" + "refute" + "assert_raise") + . + 'font-lock-preprocessor-face))) + +(font-lock-add-keywords + 'haskell-mode + `((,(rx-words "import") . 'haskell-import-face))) + +;; (font-lock-add-keywords +;; 'haskell-mode +;; `((,(rx "-- |") . 'haskell-keyword-face))) + + +;; (load-file (let ((coding-system-for-read 'utf-8)) +;; (shell-command-to-string "agda-mode locate"))) + +(defvar +grfn-dir (file-name-directory load-file-name)) +(defvar +grfn-snippets-dir (expand-file-name "snippets/" +grfn-dir)) + +;; +(load! "+bindings") +(load! "+commands") +(load! "cpp") + +(load! "+private") + +(require 'dash) + +(use-package! predd) + + +;; +;; Global config +;; + +(setq doom-modeline-buffer-file-name-style 'relative-to-project + doom-modeline-modal-icon nil + doom-modeline-github t) + +;; +;; Modules +;; + +(after! smartparens + ;; Auto-close more conservatively and expand braces on RET + (let ((unless-list '(sp-point-before-word-p + sp-point-after-word-p + sp-point-before-same-p))) + (sp-pair "'" nil :unless unless-list) + (sp-pair "\"" nil :unless unless-list)) + (sp-pair "{" nil :post-handlers '(("||\n[i]" "RET") ("| " " ")) + :unless '(sp-point-before-word-p sp-point-before-same-p)) + (sp-pair "(" nil :post-handlers '(("||\n[i]" "RET") ("| " " ")) + :unless '(sp-point-before-word-p sp-point-before-same-p)) + (sp-pair "[" nil :post-handlers '(("| " " ")) + :unless '(sp-point-before-word-p sp-point-before-same-p))) + +;; feature/snippets +(after! yasnippet + ;; Don't use default snippets, use mine. + (setq yas-snippet-dirs + (append (list '+grfn-snippets-dir) + (delq 'yas-installed-snippets-dir yas-snippet-dirs)))) + +(after! company + (setq company-idle-delay 0.2 + company-minimum-prefix-length 1)) + +(setq doom-modeline-height 12) + +(load "/home/grfn/code/org-clubhouse/org-clubhouse.el") +(use-package! org-clubhouse + :hook (org-mode . org-clubhouse-mode) + :config + (setq org-clubhouse-state-alist '(("BACKLOG" . "Unscheduled") + ("TODO" . "Ready for Development") + ("ACTIVE" . "In Development") + ("PR" . "Ready for Review") + ("DONE" . "Completed")) + org-clubhouse-username "griffinsmith" + org-clubhouse-claim-story-on-status-update '("ACTIVE" "PR" "DONE") + org-clubhouse-create-stories-with-labels 'existing + org-clubhouse-workflow-name "Engineering")) + + + +;; Should really figure out which of these is correct, eventually + +(setq +solarized-s-base03 "#002b36" + +solarized-s-base02 "#073642" + ;; emphasized content + +solarized-s-base01 "#586e75" + ;; primary content + +solarized-s-base00 "#657b83" + +solarized-s-base0 "#839496" + ;; comments + +solarized-s-base1 "#93a1a1" + ;; background highlight light + +solarized-s-base2 "#eee8d5" + ;; background light + +solarized-s-base3 "#fdf6e3" + + ;; Solarized accented colors + +solarized-yellow "#b58900" + +solarized-orange "#cb4b16" + +solarized-red "#dc322f" + +solarized-magenta "#d33682" + +solarized-violet "#6c71c4" + +solarized-blue "#268bd2" + +solarized-cyan "#2aa198" + +solarized-green "#859900" + + ;; Darker and lighter accented colors + ;; Only use these in exceptional circumstances! + +solarized-yellow-d "#7B6000" + +solarized-yellow-l "#DEB542" + +solarized-orange-d "#8B2C02" + +solarized-orange-l "#F2804F" + +solarized-red-d "#990A1B" + +solarized-red-l "#FF6E64" + +solarized-magenta-d "#93115C" + +solarized-magenta-l "#F771AC" + +solarized-violet-d "#3F4D91" + +solarized-violet-l "#9EA0E5" + +solarized-blue-d "#00629D" + +solarized-blue-l "#69B7F0" + +solarized-cyan-d "#00736F" + +solarized-cyan-l "#69CABF" + +solarized-green-d "#546E00" + +solarized-green-l "#B4C342") + +(set-cursor-color +solarized-s-base02) + +(after! doom-theme + (set-face-foreground 'font-lock-doc-face +solarized-s-base1) + (set-face-foreground 'org-block +solarized-s-base00) + (set-face-foreground 'slack-message-output-header +solarized-s-base01) + (set-face-attribute 'slack-message-output-header nil :underline nil) + (set-face-attribute 'slack-message-output-text nil :height 1.0) + ) + +(after! solarized-theme + (set-face-foreground 'font-lock-doc-face +solarized-s-base1) + (set-face-foreground 'org-block +solarized-s-base00) + + (set-face-foreground 'slack-message-output-header +solarized-s-base01) + (set-face-attribute 'slack-message-output-header nil :underline nil) + (set-face-attribute 'slack-message-output-text nil :height 1.0) + ) + +(after! evil + (setq evil-shift-width 2)) + +(after! org + (load! "org-config") + + (set-face-foreground 'org-block +solarized-s-base00) + (add-hook! org-mode + (add-hook! evil-normal-state-entry-hook + #'org-align-all-tags)) + (add-hook 'org-mode-hook (lambda () (display-line-numbers-mode -1))) + (setq whitespace-global-modes '(not org-mode magit-mode vterm-mode)) + (setf (alist-get 'file org-link-frame-setup) 'find-file-other-window) + (set-face-foreground 'org-block +solarized-s-base00) + + ;; (add-hook! org-mode + ;; (set-company-backend! 'org-mode + ;; '(:separate company-ob-postgresql + ;; company-dabbrev + ;; company-yasnippet + ;; company-ispell))) + ) + +(after! magit + (setq git-commit-summary-max-length 50)) + +(after! ivy + (setq ivy-re-builders-alist + '((t . ivy--regex-fuzzy)))) + +(add-hook 'before-save-hook 'delete-trailing-whitespace) + +(after! paxedit + (add-hook! emacs-lisp-mode #'paxedit-mode) + (add-hook! clojure-mode #'paxedit-mode) + (add-hook! common-lisp-mode #'paxedit-mode)) + +(require 'haskell) + +(let ((m-symbols + '(("`mappend`" . "⊕") + ("<>" . "⊕") + ("`elem`" . "∈") + ("`notElem`" . "∉")))) + (dolist (item m-symbols) (add-to-list 'haskell-font-lock-symbols-alist item))) + +(setq haskell-font-lock-symbols t) + + +(add-hook! haskell-mode + ;; (intero-mode) + (lsp-mode) + ;; (flycheck-add-next-checker + ;; 'intero + ;; 'haskell-hlint) + (set-fill-column 80) + (setq evil-shift-width 2)) + +(auth-source-pass-enable) + +(require 'fill-column-indicator) +;;; * Column Marker +(defun sanityinc/fci-enabled-p () (symbol-value 'fci-mode)) + +(defvar sanityinc/fci-mode-suppressed nil) +(make-variable-buffer-local 'sanityinc/fci-mode-suppressed) + +(defadvice popup-create (before suppress-fci-mode activate) + "Suspend fci-mode while popups are visible" + (let ((fci-enabled (sanityinc/fci-enabled-p))) + (when fci-enabled + (setq sanityinc/fci-mode-suppressed fci-enabled) + (turn-off-fci-mode)))) + +(defadvice popup-delete (after restore-fci-mode activate) + "Restore fci-mode when all popups have closed" + (when (and sanityinc/fci-mode-suppressed + (null popup-instances)) + (setq sanityinc/fci-mode-suppressed nil) + (turn-on-fci-mode))) + + +;;; Javascript + +(require 'smartparens) + +(setq js-indent-level 2) + +(require 'prettier-js) +(after! prettier-js + (add-hook! rjsx-mode #'prettier-js-mode) + (add-hook! js2-mode #'prettier-js-mode) + (add-hook! json-mode #'prettier-js-mode) + (add-hook! css-mode #'prettier-js-mode)) + +(require 'flycheck-flow) +(with-eval-after-load 'flycheck + (flycheck-add-mode 'javascript-flow 'rjsx-mode) + (flycheck-add-mode 'javascript-flow 'flow-minor-mode) + (flycheck-add-mode 'javascript-eslint 'flow-minor-mode) + (flycheck-add-next-checker 'javascript-flow 'javascript-eslint)) + + +(require 'flow-minor-mode) + +(remove-hook 'js2-mode-hook 'tide-setup t) + +;; (require 'company-flow) +;; (eval-after-load 'company +;; (lambda () (add-to-list 'company-backends 'company-flow))) +(defun flow/set-flow-executable () + (interactive) + (let* ((os (pcase system-type + ('darwin "osx") + ('gnu/linux "linux64") + (_ nil))) + (root (locate-dominating-file buffer-file-name "node_modules/flow-bin")) + (executable (car (file-expand-wildcards + (concat root "node_modules/flow-bin/*" os "*/flow"))))) + (setq-local company-flow-executable executable) + ;; These are not necessary for this package, but a good idea if you use + ;; these other packages + (setq-local flow-minor-default-binary executable) + (setq-local flycheck-javascript-flow-executable executable))) + +;; Set this to the mode you use, I use rjsx-mode +(add-hook 'rjsx-mode-hook #'flow/set-flow-executable t) + + +;; Auto-format Haskell on save, with a combination of hindent + brittany + +; (define-minor-mode brittany-haskell-mode +; :init-value nil +; :group 'haskell +; :lighter "Brittany-Haskell" +; :keymap '() +; ) + + +(require 'alert) +(setq alert-default-style 'libnotify) + +;; (setq slack-buffer-function #'switch-to-buffer) + +(setq projectile-test-suffix-function + (lambda (project-type) + (case project-type + ('haskell-stack "Test") + ('npm ".test") + (otherwise (projectile-test-suffix project-type))))) + +(setq projectile-create-missing-test-files 't) + +(after! magit + (map! :map magit-mode-map + ;; :n "] ]" #'magit-section-forward + ;; :n "[ [" #'magit-section-backward + ) + + (define-suffix-command magit-commit-wip () + (interactive) + (magit-commit-create '("-m" "wip"))) + + (transient-append-suffix + #'magit-commit + ["c"] + (list "W" "Commit WIP" #'magit-commit-wip)) + + (define-suffix-command magit-reset-head-back () + (interactive) + (magit-reset-mixed "HEAD~")) + + (define-suffix-command magit-reset-head-previous () + (interactive) + (magit-reset-mixed "HEAD@{1}")) + + (transient-append-suffix + #'magit-reset + ["f"] + (list "b" "Reset HEAD~" #'magit-reset-head-back)) + (transient-append-suffix + #'magit-reset + ["f"] + (list "o" "Reset HEAD@{1}" #'magit-reset-head-previous)) + + (defun magit-read-org-clubhouse-branch-name () + (when-let ((story-id (org-clubhouse-clocked-in-story-id))) + (let ((desc + (magit-read-string-ns + (format "Story description (to go after gs/ch%d/)" + story-id)))) + (format "gs/ch%d/%s" story-id desc)))) + + (defun magit-read-org-clubhouse-branch-args () + (if-let ((story-id (org-clubhouse-clocked-in-story-id))) + (let ((start-point (magit-read-starting-point + "Create and checkout branch for Clubhouse story" + nil + "origin/master"))) + (if (magit-rev-verify start-point) + (when-let ((desc (magit-read-org-clubhouse-branch-name))) + (list desc start-point)) + (user-error "Not a valid starting point: %s" choice))) + (user-error "No currently clocked-in clubhouse story"))) + + (transient-define-suffix magit-checkout-org-clubhouse-branch (branch start-point) + (interactive (magit-read-org-clubhouse-branch-args)) + (magit-branch-and-checkout branch start-point)) + + (transient-define-suffix magit-rename-org-clubhouse-branch (old new) + (interactive + (let ((branch (magit-read-local-branch "Rename branch"))) + (list branch (magit-read-org-clubhouse-branch-name)))) + (when (and old new) + (magit-branch-rename old new))) + + (transient-append-suffix + #'magit-branch + ["c"] + (list "C" "Checkout Clubhouse branch" #'magit-checkout-org-clubhouse-branch)) + (transient-append-suffix + #'magit-branch + ["c"] + (list "M" "Rename branch to Clubhouse ticket" #'magit-rename-org-clubhouse-branch))) + +;; (defun grfn/split-window-more-sensibly (&optional window) +;; (let ((window (or window (selected-window)))) +;; (or (and (window-splittable-p window) +;; ;; Split window vertically. +;; (with-selected-window window +;; (split-window-right))) +;; (and (window-splittable-p window t) +;; ;; Split window horizontally. +;; (with-selected-window window +;; (split-window-right))) +;; (and (eq window (frame-root-window (window-frame window))) +;; (not (window-minibuffer-p window)) +;; ;; If WINDOW is the only window on its frame and is not the +;; ;; minibuffer window, try to split it vertically disregarding +;; ;; the value of `split-height-threshold'. +;; (let ((split-height-threshold 0)) +;; (when (window-splittable-p window) +;; (with-selected-window window +;; (split-window-below)))))))) + +(use-package! lsp-mode + :after (:any haskell-mode) + :config + (setq lsp-response-timeout 60) + :hook + (haskell-mode . lsp-mode)) + +(use-package! lsp-ui + :after lsp-mode + :config + (defun +grfn/lsp-ui-doc-frame-hook (frame window) + (set-frame-font (if doom-big-font-mode doom-big-font doom-font) + nil (list frame))) + (setq lsp-ui-flycheck-enable t + lsp-ui-doc-header nil + lsp-ui-doc-position 'top + lsp-ui-doc-alignment 'window + lsp-ui-doc-frame-hook '+grfn/lsp-ui-doc-frame-hook + lsp-ui-doc-max-width 150 + lsp-ui-doc-max-height 13) + (setq imenu-auto-rescan t) + (set-face-background 'lsp-ui-doc-background +solarized-s-base2) + (set-face-background 'lsp-face-highlight-read +solarized-s-base2) + (set-face-background 'lsp-face-highlight-write +solarized-s-base2) + :hook + (lsp-mode . lsp-ui-mode) + (lsp-ui-mode . flycheck-mode)) + +(use-package! company-lsp + :after (lsp-mode lsp-ui) + :config + (add-to-list #'company-backends #'company-lsp) + (setq company-lsp-async t)) + +(use-package! dap-mode) + +(defun +grfn/haskell-mode-setup () + (interactive) + (flymake-mode -1) + (add-to-list 'flycheck-disabled-checkers 'haskell-ghc) + + (flycheck-remove-next-checker 'lsp 'haskell-ghc) + (flycheck-add-next-checker 'lsp '(warning . haskell-hlint)) + + ;; If there’s a 'hie.sh' defined locally by a project + ;; (e.g. to run HIE in a nix-shell), use it… + (when-let ((project-dir (locate-dominating-file default-directory "hie.sh"))) + (cl-flet + ((which (cmd) + (s-trim + (shell-command-to-string + (concat + "nix-shell " + (expand-file-name "shell.nix" project-dir) + " --run \"which " cmd "\" 2>/dev/null"))))) + (setq-local + lsp-haskell-process-path-hie (expand-file-name "hie.sh" project-dir) + haskell-hoogle-command (which "hoogle")))) + ;; … and only then setup the LSP. + (lsp)) + +(defun never-flymake-mode (orig &rest args) + (when (and (bound-and-true-p flymake-mode)) + (funcall orig 0) + (message "disabled flymake-mode"))) +(advice-add #'flymake-mode :around #'never-flymake-mode) + +(defun +grfn/wrap-lsp-haskell-process (argv) + (let* ((project-dir (locate-dominating-file + (buffer-file-name) + "hie.yaml")) + (shell-dot-nix (expand-file-name "shell.nix" project-dir))) + ;; (when (string-equal default-directory "/home/grfn/code/depot") + ;; (debug)) + (message "%s %s %s %s" + (buffer-file-name) + default-directory + project-dir + shell-dot-nix) + (if (file-exists-p shell-dot-nix) + `("bash" "-c" + ,(format "cd %s && nix-shell %s --run '%s'" + project-dir + shell-dot-nix + (s-join " " argv))) + argv))) + +(use-package! lsp-haskell + :after (lsp-mode lsp-ui haskell-mode) + ;; :hook + ;; (haskell-mode . lsp-haskell-enable) + :config + (setq lsp-haskell-process-path-hie "haskell-language-server-wrapper" + lsp-haskell-process-args-hie + '("-d" "-l" "/tmp/hie.log" "+RTS" "-M4G" "-H1G" "-K4G" "-A16M" "-RTS") + lsp-haskell-process-wrapper-function + #'+grfn/wrap-lsp-haskell-process) + (add-hook 'haskell-mode-hook #'+grfn/haskell-mode-setup 't)) + +(use-package! lsp-imenu + :after (lsp-mode lsp-ui) + :hook + (lsp-after-open . lsp-enable-imenu)) + +;; (use-package! counsel-etags +;; :ensure t +;; :init +;; (add-hook 'haskell-mode-hook +;; (lambda () +;; (add-hook 'after-save-hook +;; 'counsel-etags-virtual-update-tags 'append 'local))) +;; :config +;; (setq counsel-etags-update-interval 60) +;; ;; (push "build" counsel-etags-ignore-directories) +;; ) + +;; (use-package! evil-magit +;; :after (magit)) + +(use-package! writeroom-mode) + +(use-package! graphql-mode) + +(require 'whitespace) +(setq whitespace-style '(face lines-tail)) +(global-whitespace-mode t) +(add-hook 'org-mode-hook (lambda () (whitespace-mode -1)) t) + +(set-face-foreground 'whitespace-line +solarized-red) +(set-face-attribute 'whitespace-line nil :underline 't) + +;; (set-face-background 'ivy-posframe +solarized-s-base3) +;; (set-face-foreground 'ivy-posframe +solarized-s-base01) + +(let ((base03 "#002b36") + (base02 "#073642") + (base01 "#586e75") + (base00 "#657b83") + (base0 "#839496") + (base1 "#93a1a1") + (base2 "#eee8d5") + (base3 "#fdf6e3") + (yellow "#b58900") + (orange "#cb4b16") + (red "#dc322f") + (magenta "#d33682") + (violet "#6c71c4") + (blue "#268bd2") + (cyan "#2aa198") + (green "#859900")) + (custom-set-faces + `(agda2-highlight-keyword-face ((t (:foreground ,green)))) + `(agda2-highlight-string-face ((t (:foreground ,cyan)))) + `(agda2-highlight-number-face ((t (:foreground ,violet)))) + `(agda2-highlight-symbol-face ((((background ,base3)) (:foreground ,base01)))) + `(agda2-highlight-primitive-type-face ((t (:foreground ,blue)))) + `(agda2-highlight-bound-variable-face ((t nil))) + `(agda2-highlight-inductive-constructor-face ((t (:foreground ,green)))) + `(agda2-highlight-coinductive-constructor-face ((t (:foreground ,yellow)))) + `(agda2-highlight-datatype-face ((t (:foreground ,blue)))) + `(agda2-highlight-field-face ((t (:foreground ,red)))) + `(agda2-highlight-function-face ((t (:foreground ,blue)))) + `(agda2-highlight-module-face ((t (:foreground ,yellow)))) + `(agda2-highlight-postulate-face ((t (:foreground ,blue)))) + `(agda2-highlight-primitive-face ((t (:foreground ,blue)))) + `(agda2-highlight-record-face ((t (:foreground ,blue)))) + `(agda2-highlight-dotted-face ((t nil))) + `(agda2-highlight-operator-face ((t nil))) + `(agda2-highlight-error-face ((t (:foreground ,red :underline t)))) + `(agda2-highlight-unsolved-meta-face ((t (:background ,base2)))) + `(agda2-highlight-unsolved-constraint-face ((t (:background ,base2)))) + `(agda2-highlight-termination-problem-face ((t (:background ,orange :foreground ,base03)))) + `(agda2-highlight-incomplete-pattern-face ((t (:background ,orange :foreground ,base03)))) + `(agda2-highlight-typechecks-face ((t (:background ,cyan :foreground ,base03)))))) + + +(after! cider + (setq cider-prompt-for-symbol nil + cider-font-lock-dynamically 't + cider-save-file-on-load 't) + ) + +(defun +org-clocked-in-element () + (when-let ((item (car org-clock-history))) + (save-mark-and-excursion + (with-current-buffer (marker-buffer item) + (goto-char (marker-position item)) + (org-element-at-point))))) + +(comment + (setq elt (+org-clocked-in-item)) + + (eq 'headline (car elt)) + (plist-get (cadr elt) :raw-value) + ) + +(defun +org-headline-title (headline) + (when (eq 'headline (car elt)) + (plist-get (cadr elt) :raw-value))) + +(setq +ligatures-extra-symbols + (append +ligatures-extra-symbols + '(:equal "≡" + :not-equal "≠" + :is "≣" + :isnt "≢" + :lte "≤" + :gte "≥" + :subseteq "⊆" + ))) + +(after! python + (set-pretty-symbols! 'python-mode :merge t + :equal "==" + :not-equal "!=" + :lte "<=" + :gte ">=" + :is "is" + :isnt "is not" + :subseteq "issubset" + + ;; doom builtins + + ;; Functional + :def "def" + :lambda "lambda" + ;; Types + :null "None" + :true "True" :false "False" + :int "int" :str "str" + :float "float" + :bool "bool" + :tuple "tuple" + ;; Flow + :not "not" + :in "in" :not-in "not in" + :and "and" :or "or" + :for "for" + :return "return" :yield "yield")) + +(use-package! sqlup-mode + :hook + (sql-mode-hook . sqlup-mode) + (sql-interactive-mode-hook . sqlup-mode)) + +(use-package! emacsql) +(use-package! emacsql-psql + :after (emacsql)) + +(use-package! pyimport + :after (python)) + +(use-package! blacken + :after (python) + :init + (add-hook #'python-mode-hook #'blacken-mode) + :config + (setq blacken-only-if-project-is-blackened t + blacken-allow-py36 t + blacken-line-length 100)) + +(after! python + (defun +python-setup () + (setq-local fill-column 100 + whitespace-line-column 100 + flycheck-disabled-checkers '(python-flake8) + flycheck-checker 'python-pylint)) + + (add-hook #'python-mode-hook #'+python-setup) + (add-hook #'python-mode-hook #'lsp) + (remove-hook #'python-mode-hook #'pipenv-mode)) + +; (use-package! w3m +; :config +; (setq browse-url-browser-function +; `(("^https://app.clubhouse.io.*" . browse-url-firefox) +; ("^https://github.com.*" . browse-url-firefox) +; (".*" . browse-url-firefox)))) + +(use-package! ob-http + :config + (add-to-list 'org-babel-load-languages '(http . t))) + +;; (use-package! ob-ipython +;; :after (pyimport) +;; :config +;; (add-to-list 'org-babel-load-languages '(ipython . t)) +;; (setq ob-ipython-command + ;; "/home/griffin/code/urb/ciml-video-classifier/bin/jupyter")) + +(use-package! counsel-spotify) + +(after! counsel + (map! [remap counsel-org-capture] #'org-capture + [remap org-capture] #'org-capture)) + +(use-package! evil-snipe :disabled t) +(evil-snipe-mode -1) + +(use-package! rainbow-mode) + +(use-package! org-alert + :disabled t + :config + (org-alert-enable) + (setq alert-default-style 'libnotify + org-alert-headline-title "org")) + +(use-package! ob-async) + +(use-package! org-recent-headings + :after (org) + :config + (map! :n "SPC n r" #'org-recent-headings-ivy)) + +(use-package! org-sticky-header + :after (org) + :hook (org-mode-hook . org-sticky-header-mode) + :config + (setq-default org-sticky-header-heading-star "●")) + +(enable-theme 'grfn-solarized-light) + +;;; word-char +(add-hook! prog-mode + (modify-syntax-entry ?_ "w")) + +(add-hook! lisp-mode + (modify-syntax-entry ?- "w")) + +(after! flycheck + (put 'flycheck-python-pylint-executable 'safe-local-variable (lambda (_) t))) + +(defvar alembic-command "alembic" + "Command to execute when running alembic") + +(defvar alembic-dir-fun (lambda () default-directory) + "Reference to a function whose return value will be used as the directory to + run Alembic in") + +(put 'alembic-command 'safe-local-variable (lambda (_) t)) +(put 'alembic-dir-fun 'safe-local-variable (lambda (_) t)) + +(defun make-alembic-command (args) + (if (functionp alembic-command) + (funcall alembic-command args) + (concat alembic-command " " args))) + +(defun +grfn/extract-alembic-migration-name (output) + (unless (string-match (rx (0+ anything) "Generating " + (group (one-or-more (not (syntax whitespace)))) + " ..." (one-or-more (syntax whitespace)) "done" + (0+ anything)) + output) + (user-error "Error: %s" output)) + (match-string-no-properties 1 output)) + +(defun -run-alembic (args) + (let* ((default-directory (funcall alembic-dir-fun)) + (command (make-alembic-command args)) + ;; (format "nix-shell --run 'alembic %s'" args) + ;; (format "%s %s" alembic-command args) + (res + (with-temp-buffer + (cons + (shell-command command t) + (s-replace-regexp + "^.*Nix search path entry.*$" "" + (buffer-string))))) + (exit-code (car res)) + (out (cdr res))) + ;; (if (= 0 exit-code) + ;; out + ;; (error "Error running %s: %s" command out)) + out + )) + +(comment + --exit-code + --bs + ) + +(defun run-alembic (args) + (interactive "sAlembic command: ") + (message "%s" (-run-alembic args))) + +(defun generate-alembic-migration (msg &rest args) + (interactive "sMessage: ") + (-> + (format "revision %s -m \"%s\"" + (s-join " " args) + msg) + (-run-alembic) + (+grfn/extract-alembic-migration-name) + (find-file-other-window))) + +(cl-defun alembic-upgrade (&optional revision &key namespace) + (interactive "sRevision: ") + (let ((default-directory (funcall alembic-dir-fun))) + (run-alembic (format "%s upgrade %s" + (if namespace (concat "-n " namespace) "") + (or revision "head"))))) + +(defun alembic-downgrade (revision) + (interactive "sRevision: ") + (let ((default-directory (funcall alembic-dir-fun))) + (run-alembic (format "downgrade %s" (or revision "head"))))) + +(use-package! gnuplot) +(use-package! gnuplot-mode :after gnuplot) +(use-package! string-inflection) + +(after! anaconda-mode + ;; (set-company-backend! 'anaconda-mode #'company-yasnippet) + ) + +;; (add-hook! python-mode +;; (capf)) + +(cl-defstruct pull-request url number title author repository) + +(defun grfn/num-inbox-items () + (length (org-elements-agenda-match "inbox" t))) + +(use-package! dhall-mode + :mode "\\.dhall\\'") + +(use-package! github-review + :after forge) + +(after! forge + (set-popup-rule! + "^\\*forge" + :size 0.75)) + +(defun grfn/org-add-db-connection-params () + (interactive) + (ivy-read + "DB to connect to: " + (-map (lambda (opts) + (propertize (symbol-name (car opts)) + 'header-args (cdr opts))) + db-connection-param-options) + :require-match t + :action + (lambda (opt) + (let ((header-args (get-text-property 0 'header-args opt))) + (org-set-property "header-args" header-args))))) + +(use-package! kubernetes + :commands (kubernetes-overview)) + +(use-package! k8s-mode + :hook (k8s-mode . yas-minor-mode)) + +(use-package! sx) + +;; (use-package! nix-update +;; :config +;; (map! (:map nix-mode-map +;; (:leader +;; :desc "Update fetcher" :nv #'nix-update-fetch)))) + + +(after! lsp-haskell + (lsp-register-client + (make-lsp--client + :new-connection (lsp-stdio-connection (lambda () (lsp-haskell--hie-command))) + :major-modes '(haskell-mode) + :server-id 'hie + ;; :multi-root t + ;; :initialization-options 'lsp-haskell--make-init-options + ) + ) + ) + +(solaire-global-mode -1) + +(use-package! wsd-mode) + +(use-package! metal-mercury-mode) +(use-package! flycheck-mercury + :after (metal-mercury-mode flycheck-mercury)) + +(use-package! direnv + :config (direnv-mode)) + +(after! erc + ;; (setq erc-autojoin-channels-alist '(("freenode.net" "#nixos" "#haskell" "##tvl"))) + ) + +(defun evil-disable-insert-state-bindings () + evil-disable-insert-state-bindings) + +;; (use-package! terraform-mode) +;; (use-package! company-terraform +;; :after terraform-mode +;; :config (company-terraform-init)) + +(use-package! znc + :config + (setq znc-servers + '(("znc.gws.fyi" 5000 t + ((freenode "glittershark" "Ompquy")))))) + +(use-package! jsonnet-mode + :config + (map! + (:map jsonnet-mode-map + (:n "g SPC" #'jsonnet-eval-buffer)))) + +(add-to-list 'safe-local-variable-values + '(truncate-lines . t)) diff --git a/users/glittershark/emacs.d/cpp.el b/users/glittershark/emacs.d/cpp.el new file mode 100644 index 000000000000..3f118ec37e84 --- /dev/null +++ b/users/glittershark/emacs.d/cpp.el @@ -0,0 +1,39 @@ +;;; ~/code/depot/users/glittershark/emacs.d/cpp.el -*- lexical-binding: t; -*- + + +(load! "google-c-style") + +(after! flycheck + (add-to-list 'flycheck-disabled-checkers 'c/c++-gcc) + (add-to-list 'flycheck-disabled-checkers 'c/c++-clang)) + +(defun +grfn/cpp-setup () + (when (s-starts-with? + "/home/grfn/code/depot/third_party/nix" + (buffer-file-name)) + (setq lsp-clients-clangd-executable "/home/grfn/code/depot/users/glittershark/emacs.d/nix-clangd.sh" + lsp-clients-clangd-args nil) + (google-set-c-style) + (lsp) + (add-to-list 'flycheck-disabled-checkers 'c/c++-gcc) + (add-to-list 'flycheck-disabled-checkers 'c/c++-clang))) + +(add-hook 'c++-mode-hook #'+grfn/cpp-setup) + +(use-package! protobuf-mode) + +(use-package! clang-format+ + :config + (add-hook 'c-mode-common-hook #'clang-format+-mode)) + +(map! + (:map c++-mode-map + :leader + (:n "/ i" #'counsel-semantic-or-imenu))) + +(comment + (setq + lsp-clients-clangd-executable + "/home/grfn/code/depot/third_party/nix/clangd.sh" + lsp-clients-clangd-args nil) + ) diff --git a/users/glittershark/emacs.d/email.el b/users/glittershark/emacs.d/email.el new file mode 100644 index 000000000000..240fcfc53861 --- /dev/null +++ b/users/glittershark/emacs.d/email.el @@ -0,0 +1,28 @@ +;;; ~/code/depot/users/glittershark/emacs.d/email.el -*- lexical-binding: t; -*- + +(after! notmuch + (setq notmuch-saved-searches + '((:name "inbox" :query "tag:inbox tag:important not tag:trash" :key "i") + (:name "flagged" :query "tag:flagged" :key "f") + (:name "sent" :query "tag:sent" :key "s") + (:name "drafts" :query "tag:draft" :key "d") + + (:name "work" :query "tag:inbox and tag:important and path:work/**" + :key "w") + (:name "personal" :query "tag:inbox and tag:important and path:personal/**" + :key "p")) + message-send-mail-function 'message-send-mail-with-sendmail) + + (add-hook! notmuch-message-mode-hook #'notmuch-company-setup)) + +(setq notmuch-saved-searches + '((:name "inbox" :query "tag:inbox tag:important not tag:trash" :key "i") + (:name "flagged" :query "tag:flagged" :key "f") + (:name "sent" :query "tag:sent" :key "s") + (:name "drafts" :query "tag:draft" :key "d") + + (:name "work" :query "tag:inbox and tag:important and path:work/**" + :key "w") + (:name "personal" :query "tag:inbox and tag:important and path:personal/**" + :key "p")) + message-send-mail-function 'message-send-mail-with-sendmail) diff --git a/users/glittershark/emacs.d/github-org.el b/users/glittershark/emacs.d/github-org.el new file mode 100644 index 000000000000..be8af3f39466 --- /dev/null +++ b/users/glittershark/emacs.d/github-org.el @@ -0,0 +1,99 @@ +;;; ~/.doom.d/github-org.el -*- lexical-binding: t; -*- + +(require 'ghub) + +(defun grfn/alist->plist (alist) + (->> alist + (-mapcat (lambda (pair) + (list (intern (concat ":" (symbol-name (car pair)))) + (cdr pair)))))) + +;;; + +(cl-defstruct pull-request url number title author repository) + +(defun grfn/query-pulls (query) + (let ((resp (ghub-graphql "query reviewRequests($query: String!) { + reviewRequests: search( + type:ISSUE, + query: $query, + first: 100 + ) { + issueCount + nodes { + ... on PullRequest { + url + number + title + author { + login + ... on User { name } + } + repository { + name + owner { login } + } + } + } + } + }" `((query . ,query))))) + (->> resp + (alist-get 'data) + (alist-get 'reviewRequests) + (alist-get 'nodes) + (-map + (lambda (pr) + (apply + #'make-pull-request + (grfn/alist->plist pr))))))) + +(defun grfn/requested-changes ()) + +(defun grfn/pull-request->org-headline (format-string level pr) + (check-type format-string string) + (check-type level integer) + (check-type pr pull-request) + (s-format (concat (make-string level ?*) " " format-string) + 'aget + `((author . ,(or (->> pr (pull-request-author) (alist-get 'name)) + "no author")) + (owner . ,(->> pr (pull-request-repository) + (alist-get 'owner) + (alist-get 'login))) + (repo . ,(->> pr (pull-request-repository) (alist-get 'name))) + (pr-link . ,(org-make-link-string + (pull-request-url pr) + (pull-request-title pr))) + (today . ,(format-time-string "%Y-%m-%d %a"))))) + +(defun grfn/org-headlines-from-review-requests (level) + "Create org-mode headlines at LEVEL from all review-requested PRs on Github" + (interactive "*nLevel: ") + (let* ((prs (grfn/query-pulls + "is:open is:pr review-requested:glittershark archived:false")) + (text (mapconcat + (apply-partially + #'grfn/pull-request->org-headline + "TODO Review ${author}'s PR on ${owner}/${repo}: ${pr-link} :pr: +SCHEDULED: <${today}>" + level) prs "\n"))) + (save-mark-and-excursion + (insert text)) + (org-align-tags 't))) + +(defun grfn/org-headlines-from-requested-changes (level) + "Create org-mode headlines at LEVEL from all PRs with changes requested + on Github" + (interactive "*nLevel: ") + (let* ((prs (grfn/query-pulls + (concat "is:pr is:open author:glittershark archived:false " + "sort:updated-desc review:changes-requested"))) + (text (mapconcat + (apply-partially + #'grfn/pull-request->org-headline + "TODO Address review comments on ${pr-link} :pr: +SCHEDULED: <${today}>" + level) prs "\n"))) + (save-mark-and-excursion + (insert text)) + (org-align-tags 't))) diff --git a/users/glittershark/emacs.d/google-c-style.el b/users/glittershark/emacs.d/google-c-style.el new file mode 100644 index 000000000000..9bb12c61aae4 --- /dev/null +++ b/users/glittershark/emacs.d/google-c-style.el @@ -0,0 +1,151 @@ +;;; google-c-style.el --- Google's C/C++ style for c-mode + +;; Keywords: c, tools + +;; google-c-style.el is Copyright (C) 2008 Google Inc. All Rights Reserved. +;; +;; It is free software; you can redistribute it and/or modify it under the +;; terms of either: +;; +;; a) the GNU General Public License as published by the Free Software +;; Foundation; either version 1, or (at your option) any later version, or +;; +;; b) the "Artistic License". + +;;; Commentary: + +;; Provides the google C/C++ coding style. You may wish to add +;; `google-set-c-style' to your `c-mode-common-hook' after requiring this +;; file. For example: +;; +;; (add-hook 'c-mode-common-hook 'google-set-c-style) +;; +;; If you want the RETURN key to go to the next line and space over +;; to the right place, add this to your .emacs right after the load-file: +;; +;; (add-hook 'c-mode-common-hook 'google-make-newline-indent) + +;;; Code: + +;; For some reason 1) c-backward-syntactic-ws is a macro and 2) under Emacs 22 +;; bytecode cannot call (unexpanded) macros at run time: +(eval-when-compile (require 'cc-defs)) + +;; Wrapper function needed for Emacs 21 and XEmacs (Emacs 22 offers the more +;; elegant solution of composing a list of lineup functions or quantities with +;; operators such as "add") +(defun google-c-lineup-expression-plus-4 (langelem) + "Indents to the beginning of the current C expression plus 4 spaces. + +This implements title \"Function Declarations and Definitions\" +of the Google C++ Style Guide for the case where the previous +line ends with an open parenthese. + +\"Current C expression\", as per the Google Style Guide and as +clarified by subsequent discussions, means the whole expression +regardless of the number of nested parentheses, but excluding +non-expression material such as \"if(\" and \"for(\" control +structures. + +Suitable for inclusion in `c-offsets-alist'." + (save-excursion + (back-to-indentation) + ;; Go to beginning of *previous* line: + (c-backward-syntactic-ws) + (back-to-indentation) + (cond + ;; We are making a reasonable assumption that if there is a control + ;; structure to indent past, it has to be at the beginning of the line. + ((looking-at "\\(\\(if\\|for\\|while\\)\\s *(\\)") + (goto-char (match-end 1))) + ;; For constructor initializer lists, the reference point for line-up is + ;; the token after the initial colon. + ((looking-at ":\\s *") + (goto-char (match-end 0)))) + (vector (+ 4 (current-column))))) + +;;;###autoload +(defconst google-c-style + `((c-recognize-knr-p . nil) + (c-enable-xemacs-performance-kludge-p . t) ; speed up indentation in XEmacs + (c-basic-offset . 2) + (indent-tabs-mode . nil) + (c-comment-only-line-offset . 0) + (c-hanging-braces-alist . ((defun-open after) + (defun-close before after) + (class-open after) + (class-close before after) + (inexpr-class-open after) + (inexpr-class-close before) + (namespace-open after) + (inline-open after) + (inline-close before after) + (block-open after) + (block-close . c-snug-do-while) + (extern-lang-open after) + (extern-lang-close after) + (statement-case-open after) + (substatement-open after))) + (c-hanging-colons-alist . ((case-label) + (label after) + (access-label after) + (member-init-intro before) + (inher-intro))) + (c-hanging-semi&comma-criteria + . (c-semi&comma-no-newlines-for-oneline-inliners + c-semi&comma-inside-parenlist + c-semi&comma-no-newlines-before-nonblanks)) + (c-indent-comments-syntactically-p . t) + (comment-column . 40) + (c-indent-comment-alist . ((other . (space . 2)))) + (c-cleanup-list . (brace-else-brace + brace-elseif-brace + brace-catch-brace + empty-defun-braces + defun-close-semi + list-close-comma + scope-operator)) + (c-offsets-alist . ((arglist-intro google-c-lineup-expression-plus-4) + (func-decl-cont . ++) + (member-init-intro . ++) + (inher-intro . ++) + (comment-intro . 0) + (arglist-close . c-lineup-arglist) + (topmost-intro . 0) + (block-open . 0) + (inline-open . 0) + (substatement-open . 0) + (statement-cont + . + (,(when (fboundp 'c-no-indent-after-java-annotations) + 'c-no-indent-after-java-annotations) + ,(when (fboundp 'c-lineup-assignments) + 'c-lineup-assignments) + ++)) + (label . /) + (case-label . +) + (statement-case-open . +) + (statement-case-intro . +) ; case w/o { + (access-label . /) + (innamespace . 0)))) + "Google C/C++ Programming Style.") + +;;;###autoload +(defun google-set-c-style () + "Set the current buffer's c-style to Google C/C++ Programming + Style. Meant to be added to `c-mode-common-hook'." + (interactive) + (make-local-variable 'c-tab-always-indent) + (setq c-tab-always-indent t) + (c-add-style "Google" google-c-style t)) + +;;;###autoload +(defun google-make-newline-indent () + "Sets up preferred newline behavior. Not set by default. Meant + to be added to `c-mode-common-hook'." + (interactive) + (define-key c-mode-base-map "\C-m" 'newline-and-indent) + (define-key c-mode-base-map [ret] 'newline-and-indent)) + +(provide 'google-c-style) +;;; google-c-style.el ends here diff --git a/users/glittershark/emacs.d/grid.el b/users/glittershark/emacs.d/grid.el new file mode 100644 index 000000000000..ad524504e9a9 --- /dev/null +++ b/users/glittershark/emacs.d/grid.el @@ -0,0 +1,128 @@ +;;; ~/.doom.d/grid.el -*- lexical-binding: t; -*- + +(require 's) + +(defun grfn/all-match-groups (s) + (loop for n from 1 + for x = (match-string n s) + while x + collect x)) + +(defun projectile-grid-ff (path &optional ask) + "Call `find-file' function on PATH when it is not nil and the file exists. +If file does not exist and ASK in not nil it will ask user to proceed." + (if (or (and path (file-exists-p path)) + (and ask (yes-or-no-p + (s-lex-format + "File does not exists. Create a new buffer ${path} ?")))) + (find-file path))) + +(defun projectile-grid-goto-file (filepath &optional ask) + "Find FILEPATH after expanding root. ASK is passed straight to `projectile-grid-ff'." + (projectile-grid-ff (projectile-expand-root filepath) ask)) + +(defun projectile-grid-choices (ds) + "Uses `projectile-dir-files' function to find files in directories. +The DIRS is list of lists consisting of a directory path and regexp to filter files from that directory. +Optional third element can be present in the DS list. The third element will be a prefix to be placed before +the filename in the resulting choice. +Returns a hash table with keys being short names (choices) and values being relative paths to the files." + (loop with hash = (make-hash-table :test 'equal) + for (dir re prefix) in ds do + (loop for file in (projectile-dir-files (projectile-expand-root dir)) do + (when (string-match re file) + (puthash + (concat (or prefix "") + (s-join "/" (grfn/all-match-groups file))) + (concat dir file) + hash))) + finally return hash)) + +(defmacro projectile-grid-find-resource (prompt dirs &optional newfile-template) + "Presents files from DIRS with PROMPT to the user using `projectile-completing-read'. +If users chooses a non existant file and NEWFILE-TEMPLATE is not nil +it will use that variable to interpolate the name for the new file. +NEWFILE-TEMPLATE will be the argument for `s-lex-format'. +The bound variable is \"filename\"." + `(lexical-let ((choices (projectile-grid-choices ,dirs))) + (projectile-completing-read + ,prompt + (hash-table-keys choices) + :action + (lambda (c) + (let* ((filepath (gethash c choices)) + (filename c)) ;; so `s-lex-format' can interpolate FILENAME + (if filepath + (projectile-grid-goto-file filepath) + (when-let ((newfile-template ,newfile-template)) + (projectile-grid-goto-file + (funcall newfile-template filepath) + ;; (cond + ;; ((functionp newfile-template) (funcall newfile-template filepath)) + ;; ((stringp newfile-template) (s-lex-format newfile-template))) + t)))))))) + +(defun projectile-grid-find-model () + "Find a model." + (interactive) + (projectile-grid-find-resource + "model: " + '(("python/urbint_lib/models/" + "\\(.+\\)\\.py$") + ("python/urbint_lib/" + "\\(.+\\)/models/\\(.+\\).py$")) + (lambda (filename) + (pcase (s-split "/" filename) + (`(,model) + (s-lex-format "python/urbint_lib/models/${model}.py")) + (`(,app ,model) + (s-lex-format "python/urbint_lib/${app}/models/${model}.py")))))) + +(defun projectile-grid-find-repository () + "Find a repository." + (interactive) + (projectile-grid-find-resource + "repository: " + '(("python/urbint_lib/repositories/" + "\\(.+\\)\\.py$") + ("python/urbint_lib/" + "\\(.+\\)/repositories/\\(.+\\).py$")) + (lambda (filename) + (pcase (s-split "/" filename) + (`(,repository) + (s-lex-format "python/urbint_lib/repositories/${repository}.py")) + (`(,app ,repository) + (s-lex-format "python/urbint_lib/${app}/repositories/${repository}.py")))))) + +(defun projectile-grid-find-controller () + "Find a controller." + (interactive) + (projectile-grid-find-resource + "controller: " + '(("backend/src/grid/api/controllers/" + "\\(.+\\)\\.py$") + ("backend/src/grid/api/apps/" + "\\(.+\\)/controllers/\\(.+\\).py$")) + (lambda (filename) + (pcase (s-split "/" filename) + (`(,controller) + (s-lex-format "backend/src/grid/api/controllers/${controller}.py")) + (`(,app ,controller) + (s-lex-format "backend/src/grid/api/apps/${app}/controllers/${controller}.py")))))) + +(setq projectile-grid-mode-map + (let ((map (make-keymap))) + (map! + (:map map + (:leader + (:desc "Edit..." :prefix "e" + :desc "Model" :n "m" #'projectile-grid-find-model + :desc "Controller" :n "c" #'projectile-grid-find-controller + :desc "Repository" :n "r" #'projectile-grid-find-repository)))) + map)) + +(define-minor-mode projectile-grid-mode + "Minor mode for finding files in GRID" + :init-value nil + :lighter " GRID" + :keymap projectile-grid-mode-map) diff --git a/users/glittershark/emacs.d/init.el b/users/glittershark/emacs.d/init.el new file mode 100644 index 000000000000..99a6ad74995d --- /dev/null +++ b/users/glittershark/emacs.d/init.el @@ -0,0 +1,234 @@ +;;; private/grfn/init.el -*- lexical-binding: t; -*- + +(doom! :completion + company ; the ultimate code completion backend + ;;helm ; the *other* search engine for love and life + ;;ido ; the other *other* search engine... + ivy ; a search engine for love and life + + :ui + ;;deft ; notational velocity for Emacs + doom ; what makes DOOM look the way it does + ;doom-dashboard ; a nifty splash screen for Emacs + doom-quit ; DOOM quit-message prompts when you quit Emacs + ;fill-column ; a `fill-column' indicator + hl-todo ; highlight TODO/FIXME/NOTE tags + ;;indent-guides ; highlighted indent columns + modeline ; snazzy, Atom-inspired modeline, plus API + nav-flash ; blink the current line after jumping + ;;neotree ; a project drawer, like NERDTree for vim + ophints ; highlight the region an operation acts on + (popup ; tame sudden yet inevitable temporary windows + +all ; catch all popups that start with an asterix + +defaults) ; default popup rules + ligatures ; replace bits of code with pretty symbols + ;;tabbar ; FIXME an (incomplete) tab bar for Emacs + ;;treemacs ; a project drawer, like neotree but cooler + unicode ; extended unicode support for various languages + vc-gutter ; vcs diff in the fringe + vi-tilde-fringe ; fringe tildes to mark beyond EOB + window-select ; visually switch windows + workspaces ; tab emulation, persistence & separate workspaces + + :editor + (evil +everywhere); come to the dark side, we have cookies + file-templates ; auto-snippets for empty files + fold ; (nigh) universal code folding + ;;(format +onsave) ; automated prettiness + ;;lispy ; vim for lisp, for people who dont like vim + multiple-cursors ; editing in many places at once + ;;parinfer ; turn lisp into python, sort of + rotate-text ; cycle region at point between text candidates + snippets ; my elves. They type so I don't have to + word-wrap + + :emacs + (dired ; making dired pretty [functional] + ;;+ranger ; bringing the goodness of ranger to dired + ;;+icons ; colorful icons for dired-mode + ) + electric ; smarter, keyword-based electric-indent + ;;eshell ; a consistent, cross-platform shell (WIP) + ;;term ; terminals in Emacs + vc ; version-control and Emacs, sitting in a tree + (undo +tree) + + :tools + ;;ansible + ;;debugger ; FIXME stepping through code, to help you add bugs + ;;direnv + docker + ;;editorconfig ; let someone else argue about tabs vs spaces + ;; ein ; tame Jupyter notebooks with emacs + (eval +overlay) ; run code, run (also, repls) + gist ; interacting with github gists + (lookup ; helps you navigate your code and documentation + +docsets) ; ...or in Dash docsets locally + lsp + ;;macos ; MacOS-specific commands + magit ; a git porcelain for Emacs + make ; run make tasks from Emacs + pass ; password manager for nerds + pdf ; pdf enhancements + ;;prodigy ; FIXME managing external services & code builders + ;;rgb ; creating color strings + ;;terraform ; infrastructure as code + ;;tmux ; an API for interacting with tmux + ;;upload ; map local to remote projects via ssh/ftp + ;;wakatime + ;;vterm ; another terminals in Emacs + + :checkers + syntax ; tasing you for every semicolon you forget + ; spell ; tasing you for misspelling mispelling + + :lang + agda ; types of types of types of types... + ;;assembly ; assembly for fun or debugging + cc ; C/C++/Obj-C madness + clojure ; java with a lisp + common-lisp ; if you've seen one lisp, you've seen them all + ; coq ; proofs-as-programs + ;;crystal ; ruby at the speed of c + ;;csharp ; unity, .NET, and mono shenanigans + data ; config/data formats + erlang ; an elegant language for a more civilized age + elixir ; erlang done right + ;;elm ; care for a cup of TEA? + emacs-lisp ; drown in parentheses + ;;ess ; emacs speaks statistics + ;;go ; the hipster dialect + ;; (haskell +intero) ; a language that's lazier than I am + haskell ; a language that's lazier than I am + ;;hy ; readability of scheme w/ speed of python + idris ; + ;;(java +meghanada) ; the poster child for carpal tunnel syndrome + javascript ; all(hope(abandon(ye(who(enter(here)))))) + julia ; a better, faster MATLAB + ;;kotlin ; a better, slicker Java(Script) + latex ; writing papers in Emacs has never been so fun + ;;ledger ; an accounting system in Emacs + lua ; one-based indices? one-based indices + markdown ; writing docs for people to ignore + ;;nim ; python + lisp at the speed of c + nix ; I hereby declare "nix geht mehr!" + ;;ocaml ; an objective camel + (org ; organize your plain life in plain text + +dragndrop ; drag & drop files/images into org buffers + +attach ; custom attachment system + +babel ; running code in org + +capture ; org-capture in and outside of Emacs + +export ; Exporting org to whatever you want + ;; +habit ; Keep track of your habits + +present ; Emacs for presentations + +pretty + +brain + +protocol) ; Support for org-protocol:// links + ;;perl ; write code no one else can comprehend + ;;php ; perl's insecure younger brother + ;;plantuml ; diagrams for confusing people more + purescript ; javascript, but functional + (python +lsp) ; beautiful is better than ugly + ;;qt ; the 'cutest' gui framework ever + racket ; a DSL for DSLs + rest ; Emacs as a REST client + ;;ruby ; 1.step do {|i| p "Ruby is #{i.even? ? 'love' : 'life'}"} + rust ; Fe2O3.unwrap().unwrap().unwrap().unwrap() + ;;scala ; java, but good + (sh +fish) ; she sells (ba|z|fi)sh shells on the C xor + ;;solidity ; do you need a blockchain? No. + ;;swift ; who asked for emoji variables? + ;;terra ; Earth and Moon in alignment for performance. + ;;web ; the tubes + ;;vala ; GObjective-C + + ;; Applications are complex and opinionated modules that transform Emacs + ;; toward a specific purpose. They may have additional dependencies and + ;; should be loaded late. + :app + ;;(email +gmail) ; emacs as an email client + irc ; how neckbeards socialize + ;;(rss +org) ; emacs as an RSS reader + twitter ; twitter client https://twitter.com/vnought + ;;(write ; emacs as a word processor (latex + org + markdown) + ;; +wordnut ; wordnet (wn) search + ;; +langtool) ; a proofreader (grammar/style check) for Emacs + + :email + ;; (mu4e +gmail) + notmuch + + :collab + ;;floobits ; peer programming for a price + ;;impatient-mode ; show off code over HTTP + + :config + ;; For literate config users. This will tangle+compile a config.org + ;; literate config in your `doom-private-dir' whenever it changes. + ;;literate + + ;; The default module sets reasonable defaults for Emacs. It also + ;; provides a Spacemacs-inspired keybinding scheme and a smartparens + ;; config. Use it as a reference for your own modules. + (default +bindings +smartparens)) +(custom-set-variables + ;; custom-set-variables was added by Custom. + ;; If you edit it by hand, you could mess it up, so be careful. + ;; Your init file should contain only one such instance. + ;; If there is more than one, they won't work right. + '(doom-big-font-mode nil) + '(flycheck-javascript-flow-args nil) + '(org-agenda-files + '("/home/griffin/notes/personal.org" "/home/griffin/notes/2020-01-27-data-pipeline-deploy-mismatch.org" "/home/griffin/notes/architecture.org" "/home/griffin/notes/cooking.org" "/home/griffin/notes/culture-survey.org" "/home/griffin/notes/dir-structure.org" "/home/griffin/notes/dnd.org" "/home/griffin/notes/inbox.org" "/home/griffin/notes/misc-todo.org" "/home/griffin/notes/nix-talk.org" "/home/griffin/notes/notes.org" "/home/griffin/notes/one-on-one.org" "/home/griffin/notes/work.org" "/home/griffin/notes/xanthous.org" "/home/griffin/notes/xgboost.org")) + '(safe-local-variable-values + '((intero-stack-yaml . "/home/griffin/code/mlem/stack.yaml") + (elisp-lint-indent-specs + (if-let* . 2) + (when-let* . 1) + (let* . defun) + (nrepl-dbind-response . 2) + (cider-save-marker . 1) + (cider-propertize-region . 1) + (cider-map-repls . 1) + (cider--jack-in . 1) + (cider--make-result-overlay . 1) + (insert-label . defun) + (insert-align-label . defun) + (insert-rect . defun) + (cl-defun . 2) + (with-parsed-tramp-file-name . 2) + (thread-first . 1) + (thread-last . 1)) + (checkdoc-package-keywords-flag) + (cider-jack-in-default . "shadow-cljs") + (projectile-project-root . "/home/griffin/code/urb/grid/backend/src") + (python-pytest-executable . "/home/griffin/code/urb/grid/backend/src/.venv/bin/pytest")))) +(custom-set-faces + ;; custom-set-faces was added by Custom. + ;; If you edit it by hand, you could mess it up, so be careful. + ;; Your init file should contain only one such instance. + ;; If there is more than one, they won't work right. + '(default ((((class color) (min-colors 89)) (:foreground "#657b83" :background "#fdf6e3")))) + '(agda2-highlight-bound-variable-face ((t nil))) + '(agda2-highlight-coinductive-constructor-face ((t (:foreground "#b58900")))) + '(agda2-highlight-datatype-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-dotted-face ((t nil))) + '(agda2-highlight-error-face ((t (:foreground "#dc322f" :underline t)))) + '(agda2-highlight-field-face ((t (:foreground "#dc322f")))) + '(agda2-highlight-function-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-incomplete-pattern-face ((t (:background "#cb4b16" :foreground "#002b36")))) + '(agda2-highlight-inductive-constructor-face ((t (:foreground "#859900")))) + '(agda2-highlight-keyword-face ((t (:foreground "#859900")))) + '(agda2-highlight-module-face ((t (:foreground "#b58900")))) + '(agda2-highlight-number-face ((t (:foreground "#6c71c4")))) + '(agda2-highlight-operator-face ((t nil))) + '(agda2-highlight-postulate-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-primitive-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-primitive-type-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-record-face ((t (:foreground "#268bd2")))) + '(agda2-highlight-string-face ((t (:foreground "#2aa198")))) + '(agda2-highlight-symbol-face ((((background "#fdf6e3")) (:foreground "#586e75")))) + '(agda2-highlight-termination-problem-face ((t (:background "#cb4b16" :foreground "#002b36")))) + '(agda2-highlight-typechecks-face ((t (:background "#2aa198" :foreground "#002b36")))) + '(agda2-highlight-unsolved-constraint-face ((t (:background "#eee8d5")))) + '(agda2-highlight-unsolved-meta-face ((t (:background "#eee8d5"))))) diff --git a/users/glittershark/emacs.d/irc.el b/users/glittershark/emacs.d/irc.el new file mode 100644 index 000000000000..6d378fe2075e --- /dev/null +++ b/users/glittershark/emacs.d/irc.el @@ -0,0 +1,125 @@ +;;; ~/.doom.d/irc.el + +(require 'erc) +(require 'alert) + +(defun irc-connect () + (interactive) + (let ((pw (s-trim (shell-command-to-string "pass irccloud/freenode"))) + (gnutls-verify-error nil)) + (erc-tls :server "bnc.irccloud.com" + :port 6697 + :nick "grfn" + :password (concat "bnc@" + (s-trim (shell-command-to-string "hostname")) + ":" + pw)))) + + +(defgroup erc-alert nil + "Alert me using alert.el for important ERC messages" + :group 'erc) + +(defcustom erc-noise-regexp + "\\(Logging in:\\|Signing off\\|You're now away\\|Welcome back\\)" + "This regexp matches unwanted noise." + :type 'regexp + :group 'erc) + +(setq tvl-enabled? t) + +(defun disable-tvl-notifications () + (interactive) + (setq tvl-enabled? nil)) + +(defun enable-tvl-notifications () + (interactive) + (setq tvl-enabled? t)) + +(defun erc-alert-important-p (info) + (let ((message (plist-get info :message)) + (erc-message (-> info (plist-get :data) (plist-get :message))) + (erc-channel (-> info (plist-get :data) (plist-get :channel)))) + (and erc-message + (not (or (string-match "^\\** *Users on #" message) + (string-match erc-noise-regexp + message))) + (or (and tvl-enabled? + (string-equal erc-channel "##tvl")) + (string-match "glittershark" message))))) + +(comment + last-info + erc-noise-regexp + (setq tvl-enabled? nil) + ) + +(defun my-erc-hook (&optional match-type nick message) + "Shows a notification, when user's nick was mentioned. +If the buffer is currently not visible, makes it sticky." + (setq last-message message) + (if (or (null match-type) (not (eq match-type 'fool))) + (let (alert-log-messages) + (alert (or message (buffer-string)) + :severity (if (string-match "glittershark" (or message "")) + 'high 'low) + :title (or nick (buffer-name)) + :data `(:message ,(or message (buffer-string)) + :channel ,(or nick (buffer-name))))))) + +(add-hook 'erc-text-matched-hook 'my-erc-hook) +(add-hook 'erc-insert-modify-hook 'my-erc-hook) + +(defun my-erc-define-alerts (&rest ignore) + ;; Unless the user has recently typed in the ERC buffer, highlight the fringe + (alert-add-rule + :status '(buried visible idle) + :severity '(moderate high urgent) + :mode 'erc-mode + :predicate + #'(lambda (info) + (and (not (eq (current-buffer) (plist-get info :buffer))) + (string-match "glittershark:" (plist-get info :message)))) + :persistent + #'(lambda (info) + ;; If the buffer is buried, or the user has been idle for + ;; `alert-reveal-idle-time' seconds, make this alert + ;; persistent. Normally, alerts become persistent after + ;; `alert-persist-idle-time' seconds. + (memq (plist-get info :status) '(buried idle))) + :style 'message + :continue t) + + (alert-add-rule + :status 'buried + :mode 'erc-mode + :predicate #'erc-alert-important-p + :style 'libnotify + :append t) + + (alert-add-rule + :status 'buried + :mode 'erc-mode + :predicate #'erc-alert-important-p + :style 'message + :append t) + + (alert-add-rule + :mode 'erc-mode + :predicate #'erc-alert-important-p + :style 'log + :append t) + + (alert-add-rule :mode 'erc-mode :style 'ignore :append t)) + +(add-hook 'erc-connect-pre-hook 'my-erc-define-alerts) + +(defun fix-irc-message (msg) + (let ((msg (s-trim msg))) + (if (string-equal msg ":q") "" msg))) + +(advice-add #'erc-user-input :filter-return #'fix-irc-message) + +(comment + (my-erc-define-alerts) + ) diff --git a/users/glittershark/emacs.d/lisp.el b/users/glittershark/emacs.d/lisp.el new file mode 100644 index 000000000000..e18a1609b231 --- /dev/null +++ b/users/glittershark/emacs.d/lisp.el @@ -0,0 +1,38 @@ +;;; ~/code/depot/users/glittershark/emacs.d/lisp.el -*- lexical-binding: t; -*- + +(defun grfn/sly-panettone () + (interactive) + (sly + (concat + (s-trim + (shell-command-to-string + "nix-build -o sbcl -E 'with import ~/code/depot {}; nix.buildLisp.sbclWith [web.panettone]'")) + "/bin/sbcl"))) + +(defun grfn/setup-lisp () + (interactive) + (unless paxedit-mode (paxedit-mode 1)) + (rainbow-delimiters-mode) + (flycheck-mode -1)) + +(add-hook 'common-lisp-lisp-mode-hook #'grfn/setup-lisp) + +(defun sly-run-tests () + (interactive) + ;; TODO: handle other test frameworks + (let ((orig-window (get-buffer-window))) + (sly-eval '(fiveam:run!)) + (funcall-interactively #'sly-mrepl-sync) + (select-window orig-window))) + +(map! + (:map sly-mode-map + :n "g \\" #'sly-mrepl-sync + :n "g d" #'sly-edit-definition + :n "K" #'sly-documentation + :n "g SPC" #'sly-compile-and-load-file + :n "g RET" #'sly-run-tests) + + (:map sly-mrepl-mode-map + "C-k" #'sly-mrepl-previous-prompt + "C-r" #'isearch-backward)) diff --git a/users/glittershark/emacs.d/nix-clangd.sh b/users/glittershark/emacs.d/nix-clangd.sh new file mode 100755 index 000000000000..16f6252d8b27 --- /dev/null +++ b/users/glittershark/emacs.d/nix-clangd.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -euo pipefail + +CLANGD_FLAGS=--compile-commands-dir=/home/grfn/builds/tvix \ + nix-shell /home/grfn/code/depot \ + -A third_party.nix \ + --run nix-clangd diff --git a/users/glittershark/emacs.d/nix.el b/users/glittershark/emacs.d/nix.el new file mode 100644 index 000000000000..a5d09f825c17 --- /dev/null +++ b/users/glittershark/emacs.d/nix.el @@ -0,0 +1,30 @@ +;;; ~/code/depot/users/glittershark/emacs.d/nix.el -*- lexical-binding: t; -*- + +(defun nix-buffer-type () + "Returns: + +'home-manager, if the current buffer is a home-manager module +'nixos, if the current buffer is a nixos module +nil, if none of the above are the case" + (when buffer-file-name + (pcase buffer-file-name + ((rx (0+ nonl) "system/home" (0+ nonl) ".nix" eos) + 'home-manager) + ((rx (0+ nonl) "system/system" (0+ nonl) ".nix" eos) + 'nixos)))) + +(defun set-nix-compile-command () + "Set the compile command for the current buffer based on the type of nix +buffer it is, per `nix-buffer-type'" + (interactive) + (when-let ((btype (nix-buffer-type))) + (setq-local + compile-command + (case btype + ('home-manager "home-manager switch") + ('nixos "sudo nixos-rebuild switch"))))) + +(add-hook 'nix-mode-hook #'set-nix-compile-command) + +(map! (:map nix-mode-map + (:n "g SPC" #'compile))) diff --git a/users/glittershark/emacs.d/org-alerts.el b/users/glittershark/emacs.d/org-alerts.el new file mode 100644 index 000000000000..993791f367ae --- /dev/null +++ b/users/glittershark/emacs.d/org-alerts.el @@ -0,0 +1,188 @@ +;;; ~/.doom.d/org-alerts.el -*- lexical-binding: t; -*- + +;;; Commentary: + +;;; Code: + +(require 's) +(require 'dash) +(require 'alert) +(require 'org-agenda) + + +(defvar grfn/org-alert-interval 300 + "Interval in seconds to recheck and display deadlines.") + + +(defvar grfn/org-alert-notification-title "*org*" + "Title to be sent with notify-send.") + +(defvar grfn/org-alert-headline-regexp "\\(Sched.+:.+\\|Deadline:.+\\)" + "Regexp for headlines to search in agenda buffer.") + +(defun grfn/org-alert--strip-prefix (headline) + "Remove the scheduled/deadline prefix from HEADLINE." + (replace-regexp-in-string ".*:\s+" "" headline)) + + +(defun grfn/org-alert--unique-headlines (regexp agenda) + "Return unique headlines from the results of REGEXP in AGENDA." + (let ((matches (-distinct (-flatten (s-match-strings-all regexp agenda))))) + (--map (grfn/org-alert--strip-prefix it) matches))) + + +(defun grfn/org-alert--get-headlines () + "Return the current org agenda as text only." + (with-temp-buffer + (let ((org-agenda-sticky nil) + (org-agenda-buffer-tmp-name (buffer-name))) + (ignore-errors (org-agenda-list nil "TODAY" 1)) + (grfn/org-alert--unique-headlines + grfn/org-alert-headline-regexp + (buffer-substring-no-properties (point-min) (point-max)))))) + +(defun grfn/parse-range-string (str) + (when + (string-match (rx (group (repeat 2 (any digit)) + ":" + (repeat 2 (any digit))) + (optional + (and + "-" + (group (repeat 2 (any digit)) + ":" + (repeat 2 (any digit)))))) + str) + (list + (org-read-date nil t + (match-string 1 str)) + (when-let ((et (match-string 2 str))) (org-read-date nil t et))))) + +(defun grfn/start-time-from-range-string (str) + (pcase-let ((`(,start-time . _) (grfn/parse-range-string str))) + start-time)) + +(comment + (org-agenda-list nil "TODAY" 1) + + (grfn/org-alert--get-headlines) + (setq --src + (with-temp-buffer + (let ((org-agenda-sticky nil) + (org-agenda-buffer-tmp-name (buffer-name))) + (ignore-errors (org-agenda-list nil "TODAY" 1)) + (buffer-substring-no-properties (point-min) (point-max))))) + + (setq --entries + (with-temp-buffer + (let ((inhibit-redisplay t) + (org-agenda-sticky nil) + (org-agenda-buffer-tmp-name (buffer-name)) + (org-agenda-buffer-name (buffer-name)) + (org-agenda-buffer (current-buffer))) + (org-agenda-get-day-entries + (cadr (org-agenda-files nil 'ifmode)) + (calendar-gregorian-from-absolute + (time-to-days (org-read-date nil t "TODAY"))))))) + + (loop for k in (text-properties-at 0 (car --entries)) + by #'cddr + collect k) + + (--map (substring-no-properties (get-text-property 0 'txt it)) --entries) + (--map (get-text-property 0 'time it) --entries) + (current-time) + + (format-time-string "%R" (org-read-date nil t "10:00-11:00")) + + (grfn/start-time-from-range-string "10:00") + + (current-time-string (org-read-date nil t "10:00-11:00")) + + (todo-state + org-habit-p + priority + warntime + ts-date + date + type + org-hd-marker + org-marker + face + undone-face + help-echo + mouse-face + done-face + org-complex-heading-regexp + org-todo-regexp + org-not-done-regexp + dotime + format + extra + time + level + txt + breadcrumbs + duration + time-of-day + org-lowest-priority + org-highest-priority + tags + org-category) + + (propertize) + + --src + ) + + +(defun grfn/org-alert--headline-complete? (headline) + "Return whether HEADLINE has been completed." + (--any? (s-starts-with? it headline) org-done-keywords-for-agenda)) + + +(defun grfn/org-alert--filter-active (deadlines) + "Remove any completed headings from the provided DEADLINES." + (-remove 'grfn/org-alert--headline-complete? deadlines)) + + +(defun grfn/org-alert--strip-states (deadlines) + "Remove the todo states from DEADLINES." + (--map (s-trim (s-chop-prefixes org-todo-keywords-for-agenda it)) deadlines)) + + +(defun grfn/org-alert-check () + "Check for active, due deadlines and initiate notifications." + (interactive) + ;; avoid interrupting current command. + (unless (minibufferp) + (save-window-excursion + (save-excursion + (save-restriction + (let ((active (grfn/org-alert--filter-active (grfn/org-alert--get-headlines)))) + (dolist (dl (grfn/org-alert--strip-states active)) + (alert dl :title grfn/org-alert-notification-title)))))) + (when (get-buffer org-agenda-buffer-name) + (ignore-errors + (with-current-buffer org-agenda-buffer-name + (org-agenda-redo t)))))) + + +(defun grfn/org-alert-enable () + "Enable the notification timer. Cancels existing timer if running." + (interactive) + (grfn/org-alert-disable) + (run-at-time 0 grfn/org-alert-interval 'grfn/org-alert-check)) + + +(defun grfn/org-alert-disable () + "Cancel the running notification timer." + (interactive) + (dolist (timer timer-list) + (if (eq (elt timer 5) 'grfn/org-alert-check) + (cancel-timer timer)))) + + + +(provide 'grfn/org-alert) +;;; grfn/org-alert.el ends here diff --git a/users/glittershark/emacs.d/org-config.el b/users/glittershark/emacs.d/org-config.el new file mode 100644 index 000000000000..59953dfeaaa0 --- /dev/null +++ b/users/glittershark/emacs.d/org-config.el @@ -0,0 +1,180 @@ +;;; ~/.doom.d/org-config.el -*- lexical-binding: t; -*- +;;; + +(defun notes-file (f) + (concat org-directory (if (string-prefix-p "/" f) "" "/") f)) + +(defun grfn/org-project-tag->key (tag) + (s-replace-regexp "^project__" "" tag)) + +(defun grfn/org-project-tag->name (tag) + (s-titleized-words + (s-join " " (s-split "_" (grfn/org-project-tag->key tag))))) + +(defun grfn/org-project-tag->keys (tag) + (s-join "" (cons "p" + (-map (lambda (s) (substring-no-properties s 0 1)) + (s-split "_" (grfn/org-project-tag->key tag)))))) + +(defun grfn/org-projects->agenda-commands (project-tags) + (loop for tag in project-tags + collect `(,(grfn/org-project-tag->keys tag) + ,(grfn/org-project-tag->name tag) + tags-todo + ,tag))) + +(defun grfn/org-projects () + (loop for (tag) in + (org-global-tags-completion-table + (directory-files-recursively "~/notes" "\\.org$")) + when (s-starts-with-p "project__" tag) + collect tag)) + +(comment + (grfn/org-projects->agenda-commands (grfn/org-projects)) + ) + +(setq + org-directory (expand-file-name "~/notes") + +org-dir (expand-file-name "~/notes") + org-default-notes-file (concat org-directory "/inbox.org") + +org-default-todo-file (concat org-directory "/inbox.org") + org-agenda-files (directory-files-recursively + "~/notes" "\\.org$") + org-refile-targets '((org-agenda-files :maxlevel . 3)) + org-outline-path-complete-in-steps nil + org-refile-use-outline-path t + org-file-apps `((auto-mode . emacs) + (,(rx (or (and "." (optional "x") (optional "htm") (optional "l") buffer-end) + (and buffer-start "http" (optional "s") "://"))) + . "firefox %s") + (,(rx ".pdf" buffer-end) . "apvlv %s") + (,(rx "." (or "png" + "jpg" + "jpeg" + "gif" + "tif" + "tiff") + buffer-end) + . "feh %s")) + org-log-done 'time + org-archive-location "~/notes/trash::* From %s" + org-cycle-separator-lines 2 + org-hidden-keywords '(title) + org-tags-column -130 + org-ellipsis "⤵" + org-imenu-depth 9 + org-capture-templates + `(("t" "Todo" entry + (file +org-default-todo-file) + "* TODO %?\n%i" + :kill-buffer t) + + ("m" "Email" entry + (file +org-default-todo-file) + "* TODO [%l[%:subject]] :email:\n%i") + + ("n" "Notes" entry + (file +org-default-todo-file) + "* %U %?\n%i" + :prepend t + :kill-buffer t) + + ("c" "Task note" entry + (clock) + "* %U %?\n%i[%l[Context]]\n" + :kill-buffer t + :unnarrowed t) + + ("p" "Projects") + ("px" "Xanthous" entry + (file+headline ,(notes-file "xanthous.org") "Backlog") + "* TODO %?\nContext %a\nIn task: %K") + ("pt" "Tvix" entry + (file+headline ,(notes-file "tvix.org") "Tvix TODO") + "* TODO %?\nContext %a\nIn task: %K") + ("pw" "Windtunnel" entry + (file+headline ,(notes-file "windtunnel.org") "Tasks") + "* TODO %i%?\nContext: %a\nIn task: %K") + + ("d" "Data recording") + ) + + org-capture-templates-contexts + `(("px" ((in-file . "/home/griffin/code/xanthous/.*")))) + + org-deadline-warning-days 1 + org-agenda-skip-scheduled-if-deadline-is-shown 'todo + org-todo-keywords '((sequence "TODO(t)" "ACTIVE(a)" "|" "DONE(d)" "RUNNING(r)") + (sequence "NEXT(n)" "WAITING(w)" "LATER(l)" "|" "CANCELLED(c)")) + org-agenda-custom-commands + `(("S" "Sprint Tasks" tags-todo "sprint") + ("i" "Inbox" tags "inbox") + ("r" "Running jobs" todo "RUNNING") + ("w" "@Work" tags-todo "@work") + ("n" . "Next...") + ("np" "Next Sprint" tags-todo "next_sprint|sprint_planning") + + ("p" . "Project...") + ,@(grfn/org-projects->agenda-commands (grfn/org-projects))) + + org-agenda-dim-blocked-tasks nil + org-enforce-todo-dependencies nil + + org-babel-clojure-backend 'cider) + +(defun +grfn/org-setup () + (setq-local truncate-lines -1) + (line-number-mode -1)) + +(add-hook 'org-mode-hook #'+grfn/org-setup) + +(defun +grfn/insert-work-template () + (interactive) + (goto-char (point-min)) + (forward-line) + (insert "#+TODO: TODO(t) NEXT(n) ACTIVE(a) | DONE(d) PR(p) RUNNING(r) TESTING(D) +#+TODO: BLOCKED(b) BACKLOG(l) PROPOSED(o) | CANCELLED(c) +#+FILETAGS: @work +#+FILETAGS: @work +#+PROPERTY: Effort_ALL 0 4:00 8:00 12:00 20:00 32:00 +#+PROPERTY: ESTIMATE_ALL 0 1 2 3 5 8 +#+PROPERTY: STORY-TYPE_ALL Feature Bug Chore +#+PROPERTY: NOBLOCKING t +#+COLUMNS: %TODO %40ITEM(Task) %17EFFORT(Estimated){:} %CLOCKSUM(Time Spent) %17STORY-TYPE(Type) %TAGS")) + +(defun +grfn/insert-org-template () + (interactive) + (pcase (buffer-file-name) + ((s-contains "/work/") (+grfn/insert-work-template)))) + +;;; TODO: this doesn't work? +(define-auto-insert "\\.org?$" #'grfn/insert-org-template t) + +(defun forge--post-submit-around---link-pr-to-org-item + (orig) + (let ((cb (funcall orig))) + (lambda (value headers status req) + (prog1 (funcall cb value headers status req) + (grfn/at-org-clocked-in-item + (let ((url (alist-get 'html_url value)) + (number (alist-get 'number value))) + (org-set-property + "pull-request" + (org-make-link-string + url + (format "%s/%s/%d" + (->> value + (alist-get 'base) + (alist-get 'repo) + (alist-get 'name)) + (->> value + (alist-get 'base) + (alist-get 'repo) + (alist-get 'owner) + (alist-get 'login)) + number))))))))) + +(advice-add + #'forge--post-submit-callback + :around #'forge--post-submit-around---link-pr-to-org-item) diff --git a/users/glittershark/emacs.d/org-gcal.el b/users/glittershark/emacs.d/org-gcal.el new file mode 100644 index 000000000000..361daaac42f5 --- /dev/null +++ b/users/glittershark/emacs.d/org-gcal.el @@ -0,0 +1,181 @@ +;;; ~/.doom.d/org-gcal.el -*- lexical-binding: t; -*- + +(require 'aio) +(require 'parse-time) + +(setq-local lexical-binding t) +(setq plstore-cache-passphrase-for-symmetric-encryption t) + +(defvar gcal-client-id) +(defvar gcal-client-secret) + +(defvar google-calendar-readonly-scope + "https://www.googleapis.com/auth/calendar.readonly") + +(defvar events-file "/home/grfn/notes/events.org") + +(defun google--get-token (scope client-id client-secret) + (oauth2-auth-and-store + "https://accounts.google.com/o/oauth2/v2/auth" + "https://oauth2.googleapis.com/token" + scope + client-id + client-secret)) + +(cl-defun google--request (url &key method params scope) + (let ((p (aio-promise)) + (auth-token (google--get-token scope gcal-client-id gcal-client-secret))) + (oauth2-refresh-access auth-token) + (oauth2-url-retrieve + auth-token + url + (lambda (&rest _) + (goto-char (point-min)) + (re-search-forward "^$") + (let ((resp (json-parse-buffer :object-type 'alist))) + (aio-resolve p (lambda () resp)))) + nil + (or method "GET") + params) + p)) + +(cl-defun list-events (&key min-time max-time) + (google--request + (concat + "https://www.googleapis.com/calendar/v3/calendars/griffin@urbint.com/events" + "?timeMin=" (format-time-string "%Y-%m-%dT%T%z" min-time) + "&timeMax=" (format-time-string "%Y-%m-%dT%T%z" max-time)) + :scope google-calendar-readonly-scope)) + + +(defun last-week-events () + (list-events :min-time (time-subtract + (current-time) + (seconds-to-time + (* 60 60 24 7))) + :max-time (current-time))) + +(defun next-week-events () + (list-events :min-time (current-time) + :max-time (time-add + (current-time) + (seconds-to-time + (* 60 60 24 7))))) + +(defun attending-event? (event) + (let* ((attendees (append (alist-get 'attendees event) nil)) + (self (--find (alist-get 'self it) attendees))) + (equal "accepted" (alist-get 'responseStatus self)))) + +(defun event->org-headline (event level) + (cl-flet ((make-time + (key) + (when-let ((raw-time (->> event (alist-get key) (alist-get 'dateTime)))) + (format-time-string + (org-time-stamp-format t) + (parse-iso8601-time-string raw-time))))) + (if-let ((start-time (make-time 'start)) + (end-time (make-time 'end))) + (s-format + "${headline} [[${htmlLink}][${summary}]] :event: +${startTime}--${endTime} +:PROPERTIES: +${location-prop} +:EVENT: ${htmlLink} +:END: + +${description}" + (function + (lambda (k m) + (or (alist-get (intern k) m) + (format "key not found: %s" k)))) + (append + event + `((headline . ,(make-string level ?*)) + (startTime . ,start-time) + (endTime . ,end-time) + (location-prop + . ,(if-let ((location (alist-get 'location event))) + (s-lex-format ":LOCATION: ${location}") + ""))))) + ""))) + +(comment + (alist-get 'foo nil) + ) + +(defun write-events (events) + (with-current-buffer (find-file-noselect events-file) + (save-mark-and-excursion + (save-restriction + (widen) + (erase-buffer) + (goto-char (point-min)) + (insert "#+TITLE: Events") + (newline) (newline) + (prog1 + (loop for event in (append events nil) + when (attending-event? event) + do + (insert (event->org-headline event 1)) + (newline) + sum 1) + (org-align-tags t)))))) + +(defun +grfn/sync-events () + (interactive) + (let* ((events (alist-get 'items (aio-wait-for (next-week-events)))) + (num-written (write-events events))) + (message "Successfully wrote %d events" num-written))) + +(comment + ((kind . "calendar#event") + (etag . "\"3174776941020000\"") + (id . "SNIP") + (status . "confirmed") + (htmlLink . "https://www.google.com/calendar/event?eid=SNIP") + (created . "2020-04-01T13:30:09.000Z") + (updated . "2020-04-20T13:14:30.510Z") + (summary . "SNIP") + (description . "SNIP") + (location . "SNIP") + (creator + (email . "griffin@urbint.com") + (self . t)) + (organizer + (email . "griffin@urbint.com") + (self . t)) + (start + (dateTime . "2020-04-01T12:00:00-04:00") + (timeZone . "America/New_York")) + (end + (dateTime . "2020-04-01T12:30:00-04:00") + (timeZone . "America/New_York")) + (recurrence . + ["RRULE:FREQ=WEEKLY;UNTIL=20200408T035959Z;BYDAY=WE"]) + (iCalUID . "SNIP") + (sequence . 0) + (attendees . + [((email . "griffin@urbint.com") + (organizer . t) + (self . t) + (responseStatus . "accepted")) + ((email . "SNIP") + (displayName . "SNIP") + (responseStatus . "needsAction"))]) + (extendedProperties + (private + (origRecurringId . "309q48kc1dihsvbi13pnlimb5a")) + (shared + (origRecurringId . "309q48kc1dihsvbi13pnlimb5a"))) + (reminders + (useDefault . t))) + + (require 'icalendar) + + (icalendar--convert-recurring-to-diary + nil + "RRULE:FREQ=WEEKLY;UNTIL=20200408T035959Z;BYDAY=WE" + ) + + ) diff --git a/users/glittershark/emacs.d/org-query.el b/users/glittershark/emacs.d/org-query.el new file mode 100644 index 000000000000..3ed4b086af0c --- /dev/null +++ b/users/glittershark/emacs.d/org-query.el @@ -0,0 +1,96 @@ +;;; ~/.doom.d/org-query.el -*- lexical-binding: t; -*- + +(require 'org) +(require 'org-agenda) +(require 'inflections) + +(defun grfn/org-agenda-entry->element (agenda-entry) + ;; ??? + ()) + +(defun org-elements-agenda-match (match &optional todo-only) + (setq match + (propertize match 'inherited t)) + (with-temp-buffer + (let ((inhibit-redisplay (not debug-on-error)) + (org-agenda-sticky nil) + (org-agenda-buffer-tmp-name (buffer-name)) + (org-agenda-buffer-name (buffer-name)) + (org-agenda-buffer (current-buffer)) + (matcher (org-make-tags-matcher match)) + result) + (org-agenda-prepare (concat "TAGS " match)) + (setq match (car matcher) + matcher (cdr matcher)) + (dolist (file (org-agenda-files nil 'ifmode) + result) + (catch 'nextfile + (org-check-agenda-file file) + (when-let ((buffer (if (file-exists-p file) + (org-get-agenda-file-buffer file) + (error "No such file %s" file)))) + (with-current-buffer buffer + (unless (derived-mode-p 'org-mode) + (error "Agenda file %s is not in Org mode" file)) + (save-excursion + (save-restriction + (if (eq buffer org-agenda-restrict) + (narrow-to-region org-agenda-restrict-begin + org-agenda-restrict-end) + (widen)) + (setq result + (append result (org-scan-tags + 'agenda + matcher + todo-only)))))))))))) + +(defun grfn/num-inbox-items () + (length (org-elements-agenda-match "inbox" t))) + +(defun grfn/num-inbox-items-message () + (let ((n (grfn/num-inbox-items))) + (unless (zerop n) + (format "%d %s" + n + (if (= 1 n) "item" "items"))))) + +(defmacro grfn/at-org-clocked-in-item (&rest body) + `(when (org-clocking-p) + (let ((m org-clock-marker)) + (with-current-buffer (marker-buffer m) + (save-mark-and-excursion + (goto-char m) + (org-back-to-heading t) + ,@body))))) + +(defun grfn/org-element-clocked-in-task () + (grfn/at-org-clocked-in-item + (org-element-at-point))) + +(comment + (grfn/org-element-clocked-in-task) + (org-element-property :title (grfn/org-element-clocked-in-task)) + ) + +(defun grfn/minutes->hours:minutes (minutes) + (format "%d:%02d" + (floor (/ minutes 60)) + (mod minutes 60))) + +(comment + (grfn/minutes->hours:minutes 1) ; => "0:01" + (grfn/minutes->hours:minutes 15) ; => "0:15" + (grfn/minutes->hours:minutes 130) ; => "2:10" + ) + +(defun grfn/org-current-clocked-in-task-message () + (if (org-clocking-p) + (format "(%s) [%s]" + (org-element-property :title (grfn/org-element-clocked-in-task)) + (grfn/minutes->hours:minutes + (org-clock-get-clocked-time))) + "")) + +(comment + (grfn/org-current-clocked-in-task-message) + ) diff --git a/users/glittershark/emacs.d/packages.el b/users/glittershark/emacs.d/packages.el new file mode 100644 index 000000000000..67b3defdd7ec --- /dev/null +++ b/users/glittershark/emacs.d/packages.el @@ -0,0 +1,156 @@ +;; -*- no-byte-compile: t; -*- +;;; private/grfn/packages.el + +(package! moody) + +;; Editor +(package! solarized-theme) +(package! fill-column-indicator) +(package! flx) +(package! general + :recipe (:host github :repo "noctuid/general.el")) +(package! fill-column-indicator) +(package! writeroom-mode) +(package! dash) +(package! w3m) +(package! rainbow-mode) +(package! string-inflection) + +;;; Org +(package! org-clubhouse + :recipe (:host file + :local-repo "~/code/org-clubhouse")) +(package! org-alert) +(package! ob-http) +(package! ob-ipython) +(package! ob-async) +(package! org-recent-headings) +(package! org-sticky-header) +(package! gnuplot) +(package! gnuplot-mode) + +;; Presentation +(package! epresent) +(package! org-tree-slide) +(package! ox-reveal) + +;; Slack etc +(package! slack) +(package! alert) + +;; Git +(package! evil-magit) +(package! marshal) +(package! forge) +(package! + github-review + :recipe + (:host github + :repo "charignon/github-review" + :files ("github-review.el"))) + +;; Elisp +(package! dash) +(package! dash-functional) +(package! s) +(package! request) +(package! predd + :recipe (:host github :repo "skeeto/predd")) +(package! aio) + +;; Haskell +(package! lsp-haskell) +(package! counsel-etags) + +;;; LSP +(package! lsp-mode) +(package! lsp-ui :recipe (:host github :repo "emacs-lsp/lsp-ui")) +(package! company-lsp) +(package! lsp-treemacs) +(package! dap-mode) + +;; Rust +(package! rustic :disable t) +;; (package! racer :disable t) +(package! cargo) + +;; Lisp +(package! paxedit) + +;; Javascript +(package! flow-minor-mode) +(package! flycheck-flow) +(package! company-flow) +(package! prettier-js) + +;; GraphQL +(package! graphql-mode) + +;; Haskell +(package! lsp-mode) +(package! lsp-ui) +(package! lsp-haskell) +(package! company-lsp) +;; (package! lsp-imenu) + +;; Clojure +(package! flycheck-clojure) + +;; SQL +(package! sqlup-mode) +(package! emacsql) +(package! emacsql-psql) + +;;; Python +(package! pyimport) +;; (package! yapfify) +(package! blacken) + + +;;; Desktop interaction +(package! counsel-spotify) + +;;; Dhall +(package! dhall-mode) + +;;; Kubernetes +(package! kubernetes) +(package! kubernetes-evil) +(package! k8s-mode) + +;;; Stack Exchange +(package! sx) + +;;; Nix +(package! nix-update + :recipe (:host github + :repo "glittershark/nix-update-el")) +(package! direnv) + +;;; Email +(package! mu4e) + +;;; Sequence diagrams +(package! wsd-mode + :recipe (:host github + :repo "josteink/wsd-mode")) + +;;; logic? +(package! metal-mercury-mode + :recipe (:host github + :repo "ahungry/metal-mercury-mode")) +(package! flycheck-mercury) + +(package! terraform-mode) +(package! company-terraform) + +(package! jsonnet-mode) + +;;; +(package! znc + :recipe (:host github + :repo "sshirokov/ZNC.el")) + +;;; cpp +(package! protobuf-mode) +(package! clang-format+) diff --git a/users/glittershark/emacs.d/rust.el b/users/glittershark/emacs.d/rust.el new file mode 100644 index 000000000000..44c3552f6ea7 --- /dev/null +++ b/users/glittershark/emacs.d/rust.el @@ -0,0 +1,37 @@ +;;; ../code/depot/users/glittershark/emacs.d/rust.el -*- lexical-binding: t; -*- + +(add-to-list 'auto-mode-alist '("\\.rs$" . rust-mode)) + +(defun grfn/rust-setup () + (interactive) + + (push '(?> . ("<" . ">")) evil-surround-pairs-alist) + (push '(?< . ("< " . " >")) evil-surround-pairs-alist) + + (setq lsp-rust-server 'rust-analyzer) + (setq-local whitespace-line-column 100 + fill-column 100) + (setq rust-format-show-buffer nil) + (setq lsp-rust-analyzer-import-merge-behaviour "last" + lsp-rust-analyzer-cargo-watch-command "clippy" + lsp-ui-doc-enable t) + (rust-enable-format-on-save) + (lsp)) + +(add-hook 'rust-mode-hook #'grfn/rust-setup) + +(map! + (:map rust-mode-map + :n "g RET" #'cargo-process-current-file-tests + :n "g R" #'lsp-find-references + (:localleader + "m" #'lsp-rust-analyzer-expand-macro))) + +(comment + (flycheck-get-next-checkers 'lsp) + (flycheck-add-next-checker) + (flycheck-get-next-checkers 'lsp) + ) + +(set-company-backend! 'rust-mode + '(:separate company-capf company-yasnippet)) diff --git a/users/glittershark/emacs.d/show-matching-paren.el b/users/glittershark/emacs.d/show-matching-paren.el new file mode 100644 index 000000000000..d10751a63f94 --- /dev/null +++ b/users/glittershark/emacs.d/show-matching-paren.el @@ -0,0 +1,61 @@ +;;; ~/.doom.d/show-matching-paren.el -*- lexical-binding: t; -*- + +;;; https://with-emacs.com/posts/ui-hacks/show-matching-lines-when-parentheses-go-off-screen/ + +;; we will call `blink-matching-open` ourselves... +(remove-hook 'post-self-insert-hook + #'blink-paren-post-self-insert-function) +;; this still needs to be set for `blink-matching-open` to work +(setq blink-matching-paren 'show) + +(let ((ov nil)) ; keep track of the overlay + (advice-add + #'show-paren-function + :after + (defun show-paren--off-screen+ (&rest _args) + "Display matching line for off-screen paren." + (when (overlayp ov) + (delete-overlay ov)) + ;; check if it's appropriate to show match info, + ;; see `blink-paren-post-self-insert-function' + (when (and (overlay-buffer show-paren--overlay) + (not (or cursor-in-echo-area + executing-kbd-macro + noninteractive + (minibufferp) + this-command)) + (and (not (bobp)) + (memq (char-syntax (char-before)) '(?\) ?\$))) + (= 1 (logand 1 (- (point) + (save-excursion + (forward-char -1) + (skip-syntax-backward "/\\") + (point)))))) + ;; rebind `minibuffer-message' called by + ;; `blink-matching-open' to handle the overlay display + (cl-letf (((symbol-function #'minibuffer-message) + (lambda (msg &rest args) + (let ((msg (apply #'format-message msg args))) + (setq ov (display-line-overlay+ + (window-start) msg )))))) + (blink-matching-open)))))) + +(defun display-line-overlay+ (pos str &optional face) + "Display line at POS as STR with FACE. + +FACE defaults to inheriting from default and highlight." + (let ((ol (save-excursion + (goto-char pos) + (make-overlay (line-beginning-position) + (line-end-position))))) + (overlay-put ol 'display str) + (overlay-put ol 'face + (or face '(:inherit default :inherit highlight))) + ol)) + +(setq show-paren-style 'paren + show-paren-delay 0.03 + show-paren-highlight-openparen t + show-paren-when-point-inside-paren nil + show-paren-when-point-in-periphery t) +(show-paren-mode 1) diff --git a/users/glittershark/emacs.d/slack-snippets.el b/users/glittershark/emacs.d/slack-snippets.el new file mode 100644 index 000000000000..9e05382ee6f0 --- /dev/null +++ b/users/glittershark/emacs.d/slack-snippets.el @@ -0,0 +1,227 @@ +;;; private/grfn/slack-snippets.el -*- lexical-binding: t; -*- + +(require 'dash) +(require 'dash-functional) +(require 'request) + +;;; +;;; Configuration +;;; + +(defvar slack/token nil + "Legacy (https://api.slack.com/custom-integrations/legacy-tokens) access token") + +(defvar slack/include-public-channels 't + "Whether or not to inclue public channels in the list of conversations") + +(defvar slack/include-private-channels 't + "Whether or not to inclue public channels in the list of conversations") + +(defvar slack/include-im 't + "Whether or not to inclue IMs (private messages) in the list of conversations") + +(defvar slack/include-mpim nil + "Whether or not to inclue multi-person IMs (multi-person private messages) in + the list of conversations") + +;;; +;;; Utilities +;;; + +(defmacro comment (&rest _body) + "Comment out one or more s-expressions" + nil) + +(defun ->list (vec) (append vec nil)) + +(defun json-truthy? (x) (and x (not (equal :json-false x)))) + +;;; +;;; Generic API integration +;;; + +(defvar slack/base-url "https://slack.com/api") + +(defun slack/get (path params &optional callback) + "params is an alist of query parameters" + (let* ((params-callback (if (functionp params) `(() . ,params) (cons params callback))) + (params (car params-callback)) (callback (cdr params-callback)) + (params (append `(("token" . ,slack/token)) params)) + (url (concat (file-name-as-directory slack/base-url) path))) + (request url + :type "GET" + :params params + :parser 'json-read + :success (cl-function + (lambda (&key data &allow-other-keys) + (funcall callback data)))))) + +(defun slack/post (path params &optional callback) + (let* ((params-callback (if (functionp params) `(() . ,params) (cons params callback))) + (params (car params-callback)) (callback (cdr params-callback)) + (url (concat (file-name-as-directory slack/base-url) path))) + (request url + :type "POST" + :data (json-encode params) + :headers `(("Content-Type" . "application/json") + ("Authorization" . ,(format "Bearer %s" slack/token))) + :success (cl-function + (lambda (&key data &allow-other-keys) + (funcall callback data)))))) + + +;;; +;;; Specific API endpoints +;;; + +;; Users + +(defun slack/users (cb) + "Returns users as (id . name) pairs" + (slack/get + "users.list" + (lambda (data) + (->> data + (assoc-default 'members) + ->list + (-map (lambda (user) + (cons (assoc-default 'id user) + (assoc-default 'real_name user)))) + (-filter #'cdr) + (funcall cb))))) + +(comment + (slack/get + "users.list" + (lambda (data) (setq response-data data))) + + (slack/users (lambda (data) (setq --users data))) + + ) + +;; Conversations + +(defun slack/conversation-types () + (->> + (list (when slack/include-public-channels "public_channel") + (when slack/include-private-channels "private_channel") + (when slack/include-im "im") + (when slack/include-mpim "mpim")) + (-filter #'identity) + (s-join ","))) + +(defun channel-label (chan users-alist) + (cond + ((json-truthy? (assoc-default 'is_channel chan)) + (format "#%s" (assoc-default 'name chan))) + ((json-truthy? (assoc-default 'is_im chan)) + (let ((user-id (assoc-default 'user chan))) + (format "Private message with %s" (assoc-default user-id users-alist)))) + ((json-truthy? (assoc-default 'is_mpim chan)) + (->> chan + (assoc-default 'purpose) + (assoc-default 'value))))) + +(defun slack/conversations (cb) + "Calls `cb' with (id . '((label . \"label\") '(topic . \"topic\") '(purpose . \"purpose\"))) pairs" + (slack/get + "conversations.list" + `(("types" . ,(slack/conversation-types)) + ("exclude-archived" . "true")) + (lambda (data) + (setq --data data) + (slack/users + (lambda (users) + (->> data + (assoc-default 'channels) + ->list + (-map + (lambda (chan) + (cons (assoc-default 'id chan) + `((label . ,(channel-label chan users)) + (topic . ,(->> chan + (assoc-default 'topic) + (assoc-default 'value))) + (purpose . ,(->> chan + (assoc-default 'purpose) + (assoc-default 'value))))))) + (funcall cb))))))) + +(comment + (slack/get + "conversations.list" + '(("types" . "public_channel,private_channel,im,mpim")) + (lambda (data) (setq response-data data))) + + (slack/get + "conversations.list" + '(("types" . "im")) + (lambda (data) (setq response-data data))) + + (slack/conversations + (lambda (convos) (setq --conversations convos))) + + ) + +;; Messages + +(cl-defun slack/post-message + (&key text channel-id (on-success #'identity)) + (slack/post "chat.postMessage" + `((text . ,text) + (channel . ,channel-id) + (as_user . t)) + on-success)) + +(comment + + (slack/post-message + :text "hi slackbot" + :channel-id slackbot-channel-id + :on-success (lambda (data) (setq resp data))) + + ) + +;;; +;;; Posting code snippets to slack +;;; + +(defun prompt-for-channel (cb) + (slack/conversations + (lambda (conversations) + (ivy-read + "Select channel: " + ;; TODO want to potentially use purpose / topic stuff here + (->> conversations + (-filter (lambda (c) (assoc-default 'label (cdr c)))) + (-map (lambda (chan) (let ((label (assoc-default 'label (cdr chan))) + (id (car chan))) + (propertize label 'channel-id id))))) + :history 'slack/channel-history + :action (lambda (selected) + (let ((channel-id (get-text-property 0 'channel-id selected))) + (funcall cb channel-id) + (message "Sent message to %s" selected)))))) + nil) + +(comment + (prompt-for-channel #'message) + (->> --convos + (-filter (lambda (c) (assoc-default 'label (cdr c)))) + (-map (lambda (chan) (let ((label (assoc-default 'label (cdr chan))) + (id (car chan))) + (propertize label 'channel-id id))))) + + (->> --convos (car) (cdr) (assoc-default 'label)) + ) + +(defun slack-send-code-snippet (&optional snippet-text) + (interactive + (list (buffer-substring-no-properties (mark) (point)))) + (prompt-for-channel + (lambda (channel-id) + (slack/post-message + :text (format "```\n%s```" snippet-text) + :channel-id channel-id)))) + +(provide 'slack-snippets) diff --git a/users/glittershark/emacs.d/slack.el b/users/glittershark/emacs.d/slack.el new file mode 100644 index 000000000000..ac81067bb179 --- /dev/null +++ b/users/glittershark/emacs.d/slack.el @@ -0,0 +1,24 @@ +;;; ~/code/depot/users/glittershark/emacs.d/slack.el -*- lexical-binding: t; -*- + +(after! slack + (set-face-foreground 'slack-message-output-header +solarized-s-base01) + (set-face-attribute 'slack-message-output-header nil :underline nil) + (set-face-attribute 'slack-message-output-text nil :height 1.0)) + +(require 'slack) +(setq slack-buffer-emojify 't + slack-prefer-current-team 't + slack-thread-also-send-to-room nil) + +(set-popup-rule! "^\\*Slack" + :quit nil + :select t + :side 'bottom + :ttl nil + :size 0.5) + +(add-hook #'slack-message-buffer-mode-hook + (lambda () (toggle-truncate-lines -1))) + +(map! (:map slack-message-buffer-mode-map + :n "q" #'delete-window)) diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/annotation b/users/glittershark/emacs.d/snippets/haskell-mode/annotation new file mode 100644 index 000000000000..8a2854d759df --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/annotation @@ -0,0 +1,5 @@ +# key: ann +# name: annotation +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# ANN ${1:module} ("${2:HLint: ignore ${3:Reduce duplication}}" :: String) #-} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/benchmark-module b/users/glittershark/emacs.d/snippets/haskell-mode/benchmark-module new file mode 100644 index 000000000000..cbb1646e41d1 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/benchmark-module @@ -0,0 +1,26 @@ +# key: bench +# name: benchmark-module +# expand-env: ((yas-indent-line (quote fixed))) +# -- +-------------------------------------------------------------------------------- +module ${1:`(if (not buffer-file-name) "Module" + (let ((name (file-name-sans-extension (buffer-file-name))) + (case-fold-search nil)) + (if (cl-search "bench/" name) + (replace-regexp-in-string "/" "." + (replace-regexp-in-string "^\/[^A-Z]*" "" + (car (last (split-string name "src"))))) + (file-name-nondirectory name))))`} ( benchmark, main ) where +-------------------------------------------------------------------------------- +import Bench.Prelude +-------------------------------------------------------------------------------- +import ${1:$(s-chop-suffix "Bench" yas-text)} +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain [benchmark] + +-------------------------------------------------------------------------------- + +benchmark :: Benchmark +benchmark = bgroup "${1:$(->> yas-text (s-chop-suffix "Bench") (s-split ".") -last-item)}" [bench "something dumb" $ nf (1 +) (1 :: Int)] diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/header b/users/glittershark/emacs.d/snippets/haskell-mode/header new file mode 100644 index 000000000000..fdd8250d86ca --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/header @@ -0,0 +1,5 @@ +# key: hh +# name: header +# expand-env: ((yas-indent-line 'fixed)) +# -- +--------------------------------------------------------------------------------$2 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-generator b/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-generator new file mode 100644 index 000000000000..68863f70542b --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-generator @@ -0,0 +1,8 @@ +# key: gen +# name: Hedgehog Generator +# expand-env: ((yas-indent-line (quote fixed))) +# -- +gen${1:Foo} :: Gen $1 +gen$1 = do + $2 + pure $1{..} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-property b/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-property new file mode 100644 index 000000000000..bf39a2a3eecb --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/hedgehog-property @@ -0,0 +1,9 @@ +# -*- mode: snippet -*- +# name: Hedgehog Property +# key: hprop +# expand-env: ((yas-indent-line 'fixed)) +# -- +hprop_${1:somethingIsAlwaysTrue} :: Property +hprop_$1 = property $ do + ${2:x} <- forAll ${3:Gen.int $ Range.linear 1 100} + ${4:x === x} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/hlint b/users/glittershark/emacs.d/snippets/haskell-mode/hlint new file mode 100644 index 000000000000..74b63dc672e4 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/hlint @@ -0,0 +1,8 @@ +# -*- mode: snippet -*- +# name: hlint +# uuid: +# expand-env: ((yas-indent-line 'fixed)) +# key: hlint +# condition: t +# -- +{-# ANN module ("Hlint: ignore $1" :: String) #- } \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/import-i b/users/glittershark/emacs.d/snippets/haskell-mode/import-i new file mode 100644 index 000000000000..4a7fca2c2fd6 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/import-i @@ -0,0 +1,4 @@ +# key: i +# name: import-i +# -- +import ${1:Prelude} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/inl b/users/glittershark/emacs.d/snippets/haskell-mode/inl new file mode 100644 index 000000000000..6e17b83d7114 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/inl @@ -0,0 +1,6 @@ +# -*- mode: snippet -*- +# name: inl +# key: inl +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# INLINE $1 #-} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/inline b/users/glittershark/emacs.d/snippets/haskell-mode/inline new file mode 100644 index 000000000000..1beafbe50b56 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/inline @@ -0,0 +1,5 @@ +# key: inline +# name: inline +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# INLINE $1 #-} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/language pragma b/users/glittershark/emacs.d/snippets/haskell-mode/language pragma new file mode 100644 index 000000000000..6f84720f4511 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/language pragma @@ -0,0 +1,6 @@ +# -*- mode: snippet -*- +# name: language pragma +# key: lang +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# LANGUAGE $1 #-} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/lens.field b/users/glittershark/emacs.d/snippets/haskell-mode/lens.field new file mode 100644 index 000000000000..b22ea3d2e888 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/lens.field @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: lens.field +# key: lens +# expand-env: ((yas-indent-line 'fixed)) +# -- +${1:field} :: Lens' ${2:Source} ${3:Target} +$1 = lens _${4:sourceField} $ \\${2:$(-> yas-text s-word-initials s-downcase)} ${4:$(-> yas-text s-word-initials s-downcase)} -> ${2:$(-> yas-text s-word-initials s-downcase)} { _$4 = ${4:$(-> yas-text s-word-initials s-downcase)} } \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/module b/users/glittershark/emacs.d/snippets/haskell-mode/module new file mode 100644 index 000000000000..4554d33f9ba7 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/module @@ -0,0 +1,32 @@ +# -*- mode: snippet -*- +# key: module +# name: module +# condition: (= (length "module") (current-column)) +# expand-env: ((yas-indent-line 'fixed)) +# contributor: Luke Hoersten <luke@hoersten.org> +# -- +-------------------------------------------------------------------------------- +-- | +-- Module : $1 +-- Description : $2 +-- Maintainer : Griffin Smith <grfn@urbint.com> +-- Maturity : ${3:Draft, Usable, Maintained, OR MatureAF} +-- +-- $4 +-------------------------------------------------------------------------------- +module ${1:`(if (not buffer-file-name) "Module" + (let ((name (file-name-sans-extension (buffer-file-name))) + (case-fold-search nil)) + (if (or (cl-search "src/" name) + (cl-search "test/" name)) + (replace-regexp-in-string "/" "." + (replace-regexp-in-string "^\/[^A-Z]*" "" + (car (last (split-string name "src"))))) + (file-name-nondirectory name))))`} + ( + ) where +-------------------------------------------------------------------------------- +import Prelude +-------------------------------------------------------------------------------- + +$0 diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/shut up, hlint b/users/glittershark/emacs.d/snippets/haskell-mode/shut up, hlint new file mode 100644 index 000000000000..fccff1d66f29 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/shut up, hlint @@ -0,0 +1,6 @@ +# -*- mode: snippet -*- +# name: shut up, hlint +# key: dupl +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# ANN module ("HLint: ignore Reduce duplication" :: String) #-} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/test-module b/users/glittershark/emacs.d/snippets/haskell-mode/test-module new file mode 100644 index 000000000000..82224b36a49e --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/test-module @@ -0,0 +1,22 @@ +# -*- mode: snippet -*- +# name: test-module +# key: test +# expand-env: ((yas-indent-line 'fixed)) +# -- +{-# LANGUAGE ApplicativeDo #-} +-------------------------------------------------------------------------------- +module ${1:`(if (not buffer-file-name) "Module" + (let ((name (file-name-sans-extension (buffer-file-name))) + (case-fold-search nil)) + (if (cl-search "test/" name) + (replace-regexp-in-string "/" "." + (replace-regexp-in-string "^\/[^A-Z]*" "" + (car (last (split-string name "src"))))) + (file-name-nondirectory name))))`} where +-------------------------------------------------------------------------------- +import Test.Prelude +import qualified Hedgehog.Gen as Gen +import qualified Hedgehog.Range as Range +-------------------------------------------------------------------------------- +import ${1:$(s-chop-suffix "Test" yas-text)} +-------------------------------------------------------------------------------- diff --git a/users/glittershark/emacs.d/snippets/haskell-mode/undefined b/users/glittershark/emacs.d/snippets/haskell-mode/undefined new file mode 100644 index 000000000000..7bcd99b5716c --- /dev/null +++ b/users/glittershark/emacs.d/snippets/haskell-mode/undefined @@ -0,0 +1,6 @@ +# -*- mode: snippet -*- +# name: undefined +# key: u +# expand-env: ((yas-indent-line 'fixed) (yas-wrap-around-region 'nil)) +# -- +undefined$1 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/action-type b/users/glittershark/emacs.d/snippets/js2-mode/action-type new file mode 100644 index 000000000000..ef8d1a3863ee --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/action-type @@ -0,0 +1,4 @@ +# key: at +# name: action-type +# -- +export const ${1:FOO_BAR$(->> yas-text s-upcase (s-replace-all '(("-" . "_") (" " . "_"))))}: '${3:ns}/${1:$(-> yas-text s-dashed-words)}' = '$3/${1:$(-> yas-text s-dashed-words)}'$5 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/before b/users/glittershark/emacs.d/snippets/js2-mode/before new file mode 100644 index 000000000000..4569b6583143 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/before @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: before +# key: bef +# -- +before(function() { + $1 +}) diff --git a/users/glittershark/emacs.d/snippets/js2-mode/context b/users/glittershark/emacs.d/snippets/js2-mode/context new file mode 100644 index 000000000000..d83809f3c35e --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/context @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: context +# key: context +# -- +context('$1', function() { + $2 +}) diff --git a/users/glittershark/emacs.d/snippets/js2-mode/describe b/users/glittershark/emacs.d/snippets/js2-mode/describe new file mode 100644 index 000000000000..bd0198181d02 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/describe @@ -0,0 +1,6 @@ +# key: desc +# name: describe +# -- +describe('$1', () => { + $2 +}) \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/expect b/users/glittershark/emacs.d/snippets/js2-mode/expect new file mode 100644 index 000000000000..eba41ef3309d --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/expect @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: expect +# key: ex +# -- +expect($1).$2 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/function b/users/glittershark/emacs.d/snippets/js2-mode/function new file mode 100644 index 000000000000..b423044b4410 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/function @@ -0,0 +1,6 @@ +# key: f +# name: function +# -- +function $1($2) { + $3 +} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/header b/users/glittershark/emacs.d/snippets/js2-mode/header new file mode 100644 index 000000000000..3e303764cb0b --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/header @@ -0,0 +1,6 @@ +# -*- mode: snippet -*- +# name: header +# key: hh +# expand-env: ((yas-indent-line 'fixed)) +# -- +//////////////////////////////////////////////////////////////////////////////// diff --git a/users/glittershark/emacs.d/snippets/js2-mode/it b/users/glittershark/emacs.d/snippets/js2-mode/it new file mode 100644 index 000000000000..a451cfc08a90 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/it @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: it +# key: it +# -- +it('$1', () => { + $2 +}) \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/it-pending b/users/glittershark/emacs.d/snippets/js2-mode/it-pending new file mode 100644 index 000000000000..00da312e1096 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/it-pending @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: it-pending +# key: xi +# -- +it('$1')$0 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/module b/users/glittershark/emacs.d/snippets/js2-mode/module new file mode 100644 index 000000000000..dc79819d8979 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/module @@ -0,0 +1,12 @@ +# key: module +# name: module +# expand-env: ((yas-indent-line (quote fixed))) +# condition: (= (length "module") (current-column)) +# -- +/** + * @fileOverview $1 + * @name ${2:`(file-name-nondirectory (buffer-file-name))`} + * @author Griffin Smith + * @license Proprietary + */ +$3 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/record b/users/glittershark/emacs.d/snippets/js2-mode/record new file mode 100644 index 000000000000..0bb0f024367b --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/record @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: record +# key: rec +# -- +export default class $1 extends Record({ + $2 +}) {} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/js2-mode/test b/users/glittershark/emacs.d/snippets/js2-mode/test new file mode 100644 index 000000000000..938d490a74e8 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/js2-mode/test @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: test +# key: test +# -- +test('$1', () => { + $2 +}) \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/nix-mode/fetchFromGitHub b/users/glittershark/emacs.d/snippets/nix-mode/fetchFromGitHub new file mode 100644 index 000000000000..9b9373573048 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/nix-mode/fetchFromGitHub @@ -0,0 +1,12 @@ +# -*- mode: snippet -*- +# name: fetchFromGitHub +# uuid: +# key: fetchFromGitHub +# condition: t +# -- +fetchFromGitHub { + owner = "$1"; + repo = "$2"; + rev = "$3"; + sha256 = "0000000000000000000000000000000000000000000000000000"; +} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/nix-mode/pythonPackage b/users/glittershark/emacs.d/snippets/nix-mode/pythonPackage new file mode 100644 index 000000000000..0a74c21e1857 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/nix-mode/pythonPackage @@ -0,0 +1,16 @@ +# key: pypkg +# name: pythonPackage +# condition: t +# -- +${1:pname} = buildPythonPackage rec { + name = "\${pname}-\${version}"; + pname = "$1"; + version = "${2:1.0.0}"; + src = fetchPypi { + inherit pname version; + sha256 = "0000000000000000000000000000000000000000000000000000"; + }; + propagatedBuildInputs = with pythonSelf; [ + $3 + ]; +}; \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/nix-mode/sha256 b/users/glittershark/emacs.d/snippets/nix-mode/sha256 new file mode 100644 index 000000000000..e3d52e1c0201 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/nix-mode/sha256 @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: sha256 +# uuid: +# key: sha256 +# condition: t +# -- +sha256 = "0000000000000000000000000000000000000000000000000000"; \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/SQL source block b/users/glittershark/emacs.d/snippets/org-mode/SQL source block new file mode 100644 index 000000000000..b5d43fd6bc01 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/SQL source block @@ -0,0 +1,6 @@ +# key: sql +# name: SQL source block +# -- +#+BEGIN_SRC sql ${1::async} +$2 +#+END_SRC diff --git a/users/glittershark/emacs.d/snippets/org-mode/combat b/users/glittershark/emacs.d/snippets/org-mode/combat new file mode 100644 index 000000000000..ef46062d09b4 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/combat @@ -0,0 +1,13 @@ +# -*- mode: snippet -*- +# name: combat +# uuid: +# key: combat +# condition: t +# -- +| | initiative | max hp | current hp | status | | +|-------------+------------+--------+------------+--------+------| +| Barty Barty | | | | | <--- | +| Hectoroth | | | | | | +| Xanadu | | | | | | +| Aurora | | | | | | +| EFB | | | | | | \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/date b/users/glittershark/emacs.d/snippets/org-mode/date new file mode 100644 index 000000000000..297529cdac64 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/date @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# key: date +# name: date.org +# -- +[`(format-time-string "%Y-%m-%d")`]$0 diff --git a/users/glittershark/emacs.d/snippets/org-mode/date-time b/users/glittershark/emacs.d/snippets/org-mode/date-time new file mode 100644 index 000000000000..fde469276c3f --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/date-time @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: date-time +# key: dt +# -- +[`(format-time-string "%Y-%m-%d %H:%m:%S")`] \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/description b/users/glittershark/emacs.d/snippets/org-mode/description new file mode 100644 index 000000000000..a43bc95cc3ed --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/description @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: description +# key: desc +# -- +:DESCRIPTION: +$1 +:END: diff --git a/users/glittershark/emacs.d/snippets/org-mode/nologdone b/users/glittershark/emacs.d/snippets/org-mode/nologdone new file mode 100644 index 000000000000..e5be85d6b3c0 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/nologdone @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: nologdone +# key: nologdone +# -- +#+STARTUP: nologdone$0 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/python source block b/users/glittershark/emacs.d/snippets/org-mode/python source block new file mode 100644 index 000000000000..247ae51b0b78 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/python source block @@ -0,0 +1,6 @@ +# key: py +# name: Python source block +# -- +#+BEGIN_SRC python +$0 +#+END_SRC \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/reveal b/users/glittershark/emacs.d/snippets/org-mode/reveal new file mode 100644 index 000000000000..1bdbdfa5dc36 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/reveal @@ -0,0 +1,6 @@ +# key: reveal +# name: reveal +# condition: t +# -- +#+ATTR_REVEAL: :frag ${1:roll-in} +$0 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/org-mode/transaction b/users/glittershark/emacs.d/snippets/org-mode/transaction new file mode 100644 index 000000000000..37f2dd31caff --- /dev/null +++ b/users/glittershark/emacs.d/snippets/org-mode/transaction @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: transaction +# key: begin +# -- +BEGIN; +$0 +ROLLBACK; \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/python-mode/add_column b/users/glittershark/emacs.d/snippets/python-mode/add_column new file mode 100644 index 000000000000..47e83850d5b7 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/add_column @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: add_column +# key: op.add_column +# -- +op.add_column('${1:table}', sa.Column('${2:name}', sa.${3:String()}))$0 diff --git a/users/glittershark/emacs.d/snippets/python-mode/decorate b/users/glittershark/emacs.d/snippets/python-mode/decorate new file mode 100644 index 000000000000..9448b45c9623 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/decorate @@ -0,0 +1,15 @@ +# -*- mode: snippet -*- +# name: decorate +# uuid: +# key: decorate +# condition: t +# -- +def wrap(inner): + @wraps(inner) + def wrapped(*args, **kwargs): + ret = inner(*args, **kwargs) + return ret + + return wrapped + +return wrap \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/python-mode/dunder b/users/glittershark/emacs.d/snippets/python-mode/dunder new file mode 100644 index 000000000000..c49ec40a15cc --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/dunder @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: dunder +# uuid: +# key: du +# condition: t +# -- +__$1__$0 \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/python-mode/name b/users/glittershark/emacs.d/snippets/python-mode/name new file mode 100644 index 000000000000..eca6d60b481f --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/name @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: name +# uuid: +# key: name +# condition: t +# -- +__name__ \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/python-mode/op.get_bind.execute b/users/glittershark/emacs.d/snippets/python-mode/op.get_bind.execute new file mode 100644 index 000000000000..aba801c6baf9 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/op.get_bind.execute @@ -0,0 +1,7 @@ +# key: exec +# name: op.get_bind.execute +# -- +op.get_bind().execute( + """ + `(progn (sqlup-mode) "")`$1 + """) diff --git a/users/glittershark/emacs.d/snippets/python-mode/pdb b/users/glittershark/emacs.d/snippets/python-mode/pdb new file mode 100644 index 000000000000..6b5c0bbc0a73 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/python-mode/pdb @@ -0,0 +1,7 @@ +# -*- mode: snippet -*- +# name: pdb +# uuid: +# key: pdb +# condition: t +# -- +import pdb; pdb.set_trace() \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/rust-mode/#[macro_use] b/users/glittershark/emacs.d/snippets/rust-mode/#[macro_use] new file mode 100644 index 000000000000..fea942a337f6 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/rust-mode/#[macro_use] @@ -0,0 +1,5 @@ +# key: macro_use +# name: #[macro_use] +# -- +#[macro_use] +${1:extern crate} ${2:something};$0 diff --git a/users/glittershark/emacs.d/snippets/rust-mode/async test b/users/glittershark/emacs.d/snippets/rust-mode/async test new file mode 100644 index 000000000000..b48e443cb8e1 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/rust-mode/async test @@ -0,0 +1,10 @@ +# -*- mode: snippet -*- +# name: async test +# uuid: +# key: atest +# condition: t +# -- +#[tokio::test(threaded_scheduler)] +async fn ${1:test_name}() { + `%`$0 +} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/rust-mode/benchmark b/users/glittershark/emacs.d/snippets/rust-mode/benchmark new file mode 100644 index 000000000000..f1446923a0e4 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/rust-mode/benchmark @@ -0,0 +1,10 @@ +# -*- mode: snippet -*- +# name: benchmark +# uuid: +# key: bench +# condition: t +# -- +#[bench] +fn ${1:benchmark_name}(b: &mut Bencher) { + `%`b.iter(|| $0); +} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/rust-mode/proptest b/users/glittershark/emacs.d/snippets/rust-mode/proptest new file mode 100644 index 000000000000..377b3cfcf60c --- /dev/null +++ b/users/glittershark/emacs.d/snippets/rust-mode/proptest @@ -0,0 +1,10 @@ +# -*- mode: snippet -*- +# name: proptest +# uuid: +# key: proptest +# condition: t +# -- +#[proptest] +fn ${1:test_name}($2) { + `%`$0 +} \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/rust-mode/tests b/users/glittershark/emacs.d/snippets/rust-mode/tests new file mode 100644 index 000000000000..0a476ab58661 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/rust-mode/tests @@ -0,0 +1,9 @@ +# key: tests +# name: test module +# -- +#[cfg(test)] +mod ${1:tests} { + use super::*; + + $0 +} diff --git a/users/glittershark/emacs.d/snippets/snippet-mode/indent b/users/glittershark/emacs.d/snippets/snippet-mode/indent new file mode 100644 index 000000000000..d38ffceafbad --- /dev/null +++ b/users/glittershark/emacs.d/snippets/snippet-mode/indent @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: indent +# key: indent +# -- +# expand-env: ((yas-indent-line 'fixed)) \ No newline at end of file diff --git a/users/glittershark/emacs.d/snippets/sql-mode/count(*) group by b/users/glittershark/emacs.d/snippets/sql-mode/count(*) group by new file mode 100644 index 000000000000..6acc46ff397a --- /dev/null +++ b/users/glittershark/emacs.d/snippets/sql-mode/count(*) group by @@ -0,0 +1,5 @@ +# -*- mode: snippet -*- +# name: count(*) group by +# key: countby +# -- +SELECT count(*), ${1:column} FROM ${2:table} GROUP BY $1; diff --git a/users/glittershark/emacs.d/snippets/text-mode/date b/users/glittershark/emacs.d/snippets/text-mode/date new file mode 100644 index 000000000000..7b9431147011 --- /dev/null +++ b/users/glittershark/emacs.d/snippets/text-mode/date @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- +# name: date +# key: date +# -- +`(format-time-string "%Y-%m-%d")`$0 \ No newline at end of file diff --git a/users/glittershark/emacs.d/splitjoin.el b/users/glittershark/emacs.d/splitjoin.el new file mode 100644 index 000000000000..ea4dcfc39318 --- /dev/null +++ b/users/glittershark/emacs.d/splitjoin.el @@ -0,0 +1,192 @@ +;;; private/grfn/splitjoin.el -*- lexical-binding: t; -*- + +(require 'dash) +(load! "utils") + +;;; +;;; Vars +;;; + +(defvar +splitjoin/split-callbacks '() + "Alist mapping major mode symbol names to lists of split callbacks") + +(defvar +splitjoin/join-callbacks '() + "Alist mapping major mode symbol names to lists of join callbacks") + + + +;;; +;;; Definition macros +;;; + +(defmacro +splitjoin/defsplit (mode name &rest body) + `(setf + (alist-get ',name (alist-get ,mode +splitjoin/split-callbacks)) + (λ! () ,@body))) + +(defmacro +splitjoin/defjoin (mode name &rest body) + `(setf + (alist-get ',name (alist-get ,mode +splitjoin/join-callbacks)) + (λ! () ,@body))) + +;;; +;;; Commands +;;; + +(defun +splitjoin/split () + (interactive) + (when-let (callbacks (->> +splitjoin/split-callbacks + (alist-get major-mode) + (-map #'cdr))) + (find-if #'funcall callbacks))) + +(defun +splitjoin/join () + (interactive) + (when-let (callbacks (->> +splitjoin/join-callbacks + (alist-get major-mode) + (-map #'cdr))) + (find-if #'funcall callbacks))) + + +;;; +;;; Splits and joins +;;; TODO: this should probably go in a file-per-language +;;; + +(+splitjoin/defjoin + 'elixir-mode + join-do + (let* ((function-pattern (rx (and (zero-or-more whitespace) + "do" + (zero-or-more whitespace) + (optional (and "#" (zero-or-more anything))) + eol))) + (end-pattern (rx bol + (zero-or-more whitespace) + "end" + (zero-or-more whitespace) + eol)) + (else-pattern (rx bol + (zero-or-more whitespace) + "else" + (zero-or-more whitespace) + eol)) + (lineno (line-number-at-pos)) + (line (thing-at-point 'line t))) + (when-let ((do-start-pos (string-match function-pattern line))) + (cond + ((string-match-p end-pattern (get-line (inc lineno))) + (modify-then-indent + (goto-line-char do-start-pos) + (insert ",") + (goto-char (line-end-position)) + (insert ": nil") + (line-move 1) + (delete-line)) + t) + + ((string-match-p end-pattern (get-line (+ 2 lineno))) + (modify-then-indent + (goto-line-char do-start-pos) + (insert ",") + (goto-char (line-end-position)) + (insert ":") + (join-line t) + (line-move 1) + (delete-line)) + t) + + ((and (string-match-p else-pattern (get-line (+ 2 lineno))) + (string-match-p end-pattern (get-line (+ 4 lineno)))) + (modify-then-indent + (goto-line-char do-start-pos) + (insert ",") + (goto-char (line-end-position)) + (insert ":") + (join-line t) + (goto-eol) + (insert ",") + (join-line t) + (goto-eol) + (insert ":") + (join-line t) + (line-move 1) + (delete-line)) + t))))) + +(comment + (string-match (rx (and bol + "if " + (one-or-more anything) + "," + (zero-or-more whitespace) + "do:" + (one-or-more anything) + "," + (zero-or-more whitespace) + "else:" + (one-or-more anything))) + "if 1, do: nil, else: nil") + + ) + +(+splitjoin/defsplit + 'elixir-mode + split-do-with-optional-else + (let* ((if-with-else-pattern (rx (and bol + (one-or-more anything) + "," + (zero-or-more whitespace) + "do:" + (one-or-more anything) + (optional + "," + (zero-or-more whitespace) + "else:" + (one-or-more anything))))) + (current-line (get-line))) + (when (string-match if-with-else-pattern current-line) + (modify-then-indent + (assert (goto-regex-on-line ",[[:space:]]*do:")) + (delete-char 1) + (assert (goto-regex-on-line ":")) + (delete-char 1) + (insert "\n") + (when (goto-regex-on-line-r ",[[:space:]]*else:") + (delete-char 1) + (insert "\n") + (assert (goto-regex-on-line ":")) + (delete-char 1) + (insert "\n")) + (goto-eol) + (insert "\nend")) + t))) + +(comment + (+splitjoin/defsplit 'elixir-mode split-def + (let ((function-pattern (rx (and "," + (zero-or-more whitespace) + "do:"))) + (line (thing-at-point 'line t))) + (when-let (idx (string-match function-pattern line)) + (let ((beg (line-beginning-position)) + (orig-line-char (- (point) (line-beginning-position)))) + (save-mark-and-excursion + (goto-line-char idx) + (delete-char 1) + (goto-line-char (string-match ":" (thing-at-point 'line t))) + (delete-char 1) + (insert "\n") + (goto-eol) + (insert "\n") + (insert "end") + (evil-indent beg (+ (line-end-position) 1)))) + (goto-line-char orig-line-char) + t)))) + +(+splitjoin/defjoin + 'elixir-mode + join-if-with-else + (let* ((current-line (thing-at-point 'line))))) + +(provide 'splitjoin) diff --git a/users/glittershark/emacs.d/sql-strings.el b/users/glittershark/emacs.d/sql-strings.el new file mode 100644 index 000000000000..37e22af421c6 --- /dev/null +++ b/users/glittershark/emacs.d/sql-strings.el @@ -0,0 +1,75 @@ +;;; ~/.doom.d/sql-strings.el -*- lexical-binding: t; -*- + +;;; https://www.emacswiki.org/emacs/StringAtPoint +(defun ourcomments-string-or-comment-bounds-1 (what) + (save-restriction + (widen) + (let* ((here (point)) + ;; Fix-me: when on end-point, how to handle that and which should be last hit point? + (state (parse-partial-sexp (point-min) (1+ here))) + (type (if (nth 3 state) + 'string + (if (nth 4 state) + 'comment))) + (start (when type (nth 8 state))) + end) + (unless start + (setq state (parse-partial-sexp (point-min) here)) + (setq type (if (nth 3 state) + 'string + (if (nth 4 state) + 'comment))) + (setq start (when type (nth 8 state)))) + (unless (or (not what) + (eq what type)) + (setq start nil)) + (if (not start) + (progn + (goto-char here) + nil) + (setq state (parse-partial-sexp (1+ start) (point-max) + nil nil state 'syntax-table)) + (setq end (point)) + (goto-char here) + (cons start end))))) + +(defun ourcomments-bounds-of-string-at-point () + "Return bounds of string at point if any." + (ourcomments-string-or-comment-bounds-1 'string)) + +(put 'string 'bounds-of-thing-at-point 'ourcomments-bounds-of-string-at-point) + +(defun -sanitize-sql-string (str) + (->> str + (downcase) + (s-trim) + (replace-regexp-in-string + (rx (or (and string-start (or "\"\"\"" + "\"")) + (and (or "\"\"\"" + "\"") + string-end))) + "") + (s-trim))) + +(defun sql-string-p (str) + "Returns 't if STR looks like a string literal for a SQL statement" + (setq str (-sanitize-sql-string str)) + (or (s-starts-with? "select" str))) + +;;; tests + +(require 'ert) + +(ert-deftest sanitize-sql-string-test () + (should (string-equal "select * from foo;" + (-sanitize-sql-string + "\"\"\"SELECT * FROM foo;\n\n\"\"\"")))) + +(ert-deftest test-sql-string-p () + (dolist (str '("SELECT * FROM foo;" + "select * from foo;")) + (should (sql-string-p str))) + + (dolist (str '("not a QUERY")) + (should-not (sql-string-p str)))) diff --git a/users/glittershark/emacs.d/tests/splitjoin_test.el b/users/glittershark/emacs.d/tests/splitjoin_test.el new file mode 100644 index 000000000000..6495a1a5952e --- /dev/null +++ b/users/glittershark/emacs.d/tests/splitjoin_test.el @@ -0,0 +1,68 @@ +;;; private/grfn/tests/splitjoin_test.el -*- lexical-binding: t; -*- + +(require 'ert) +;; (load! 'splitjoin) +;; (load! 'utils) +; (require 'splitjoin) + +;;; Helpers + +(defvar *test-buffer* nil) +(make-variable-buffer-local '*test-buffer*) + +(defun test-buffer () + (when (not *test-buffer*) + (setq *test-buffer* (get-buffer-create "test-buffer"))) + *test-buffer*) + +(defmacro with-test-buffer (&rest body) + `(with-current-buffer (test-buffer) + ,@body)) + +(defun set-test-buffer-mode (mode) + (let ((mode (if (functionp mode) mode + (-> mode symbol-name (concat "-mode") intern)))) + (assert (functionp mode)) + (with-test-buffer (funcall mode)))) + +(defmacro set-test-buffer-contents (contents) + (with-test-buffer + (erase-buffer) + (insert contents))) + +(defun test-buffer-contents () + (with-test-buffer (substring-no-properties (buffer-string)))) + +(defmacro assert-test-buffer-contents (expected-contents) + `(should (equal (string-trim (test-buffer-contents)) + (string-trim ,expected-contents)))) + +(defmacro should-join-to (mode original-contents expected-contents) + `(progn + (set-test-buffer-mode ,mode) + (set-test-buffer-contents ,original-contents) + (with-test-buffer (+splitjoin/join)) + (assert-test-buffer-contents ,expected-contents))) + +(defmacro should-split-to (mode original-contents expected-contents) + `(progn + (set-test-buffer-mode ,mode) + (set-test-buffer-contents ,original-contents) + (with-test-buffer (+splitjoin/split)) + (assert-test-buffer-contents ,expected-contents))) + +(defmacro should-splitjoin (mode joined-contents split-contents) + `(progn + (should-split-to ,mode ,joined-contents ,split-contents) + (should-join-to ,mode ,split-contents ,joined-contents))) + +;;; Tests + +;; Elixir +(ert-deftest elixir-if-splitjoin-test () + (should-splitjoin 'elixir + "if predicate?(), do: result" + "if predicate?() do + result +end")) + diff --git a/users/glittershark/emacs.d/themes/grfn-solarized-light-theme.el b/users/glittershark/emacs.d/themes/grfn-solarized-light-theme.el new file mode 100644 index 000000000000..ae00b6b5fc75 --- /dev/null +++ b/users/glittershark/emacs.d/themes/grfn-solarized-light-theme.el @@ -0,0 +1,115 @@ +(require 'solarized) +(eval-when-compile + (require 'solarized-palettes)) + +;; (defun grfn-solarized-theme () +;; (custom-theme-set-faces +;; theme-name +;; `(font-lock-doc-face ((,class (:foreground ,s-base1)))) +;; `(font-lock-preprocessor-face ((,class (:foreground ,red)))) +;; `(font-lock-keyword-face ((,class (:foreground ,green)))) + +;; `(elixir-attribute-face ((,class (:foreground ,blue)))) +;; `(elixir-atom-face ((,class (:foreground ,cyan)))))) + +(setq +solarized-s-base03 "#002b36" + +solarized-s-base02 "#073642" + ;; emphasized content + +solarized-s-base01 "#586e75" + ;; primary content + +solarized-s-base00 "#657b83" + +solarized-s-base0 "#839496" + ;; comments + +solarized-s-base1 "#93a1a1" + ;; background highlight light + +solarized-s-base2 "#eee8d5" + ;; background light + +solarized-s-base3 "#fdf6e3" + + ;; Solarized accented colors + +solarized-yellow "#b58900" + +solarized-orange "#cb4b16" + +solarized-red "#dc322f" + +solarized-magenta "#d33682" + +solarized-violet "#6c71c4" + +solarized-blue "#268bd2" + +solarized-cyan "#2aa198" + +solarized-green "#859900" + + ;; Darker and lighter accented colors + ;; Only use these in exceptional circumstances! + +solarized-yellow-d "#7B6000" + +solarized-yellow-l "#DEB542" + +solarized-orange-d "#8B2C02" + +solarized-orange-l "#F2804F" + +solarized-red-d "#990A1B" + +solarized-red-l "#FF6E64" + +solarized-magenta-d "#93115C" + +solarized-magenta-l "#F771AC" + +solarized-violet-d "#3F4D91" + +solarized-violet-l "#9EA0E5" + +solarized-blue-d "#00629D" + +solarized-blue-l "#69B7F0" + +solarized-cyan-d "#00736F" + +solarized-cyan-l "#69CABF" + +solarized-green-d "#546E00" + +solarized-green-l "#B4C342") + + +(deftheme grfn-solarized-light "The light variant of Griffin's solarized theme") + +(setq grfn-solarized-faces + '("Griffin's solarized theme customization" + (custom-theme-set-faces + theme-name + `(font-lock-doc-face ((t (:foreground ,+solarized-s-base1)))) + `(font-lock-preprocessor-face ((t (:foreground ,+solarized-red)))) + `(font-lock-keyword-face ((t (:foreground ,+solarized-green)))) + + `(elixir-attribute-face ((t (:foreground ,+solarized-blue)))) + `(elixir-atom-face ((t (:foreground ,+solarized-cyan)))) + `(agda2-highlight-keyword-face ((t (:foreground ,green)))) + `(agda2-highlight-string-face ((t (:foreground ,cyan)))) + `(agda2-highlight-number-face ((t (:foreground ,violet)))) + `(agda2-highlight-symbol-face ((((background ,base3)) (:foreground ,base01)))) + `(agda2-highlight-primitive-type-face ((t (:foreground ,blue)))) + `(agda2-highlight-bound-variable-face ((t nil))) + `(agda2-highlight-inductive-constructor-face ((t (:foreground ,green)))) + `(agda2-highlight-coinductive-constructor-face ((t (:foreground ,yellow)))) + `(agda2-highlight-datatype-face ((t (:foreground ,blue)))) + `(agda2-highlight-field-face ((t (:foreground ,red)))) + `(agda2-highlight-function-face ((t (:foreground ,blue)))) + `(agda2-highlight-module-face ((t (:foreground ,yellow)))) + `(agda2-highlight-postulate-face ((t (:foreground ,blue)))) + `(agda2-highlight-primitive-face ((t (:foreground ,blue)))) + `(agda2-highlight-record-face ((t (:foreground ,blue)))) + `(agda2-highlight-dotted-face ((t nil))) + `(agda2-highlight-operator-face ((t nil))) + `(agda2-highlight-error-face ((t (:foreground ,red :underline t)))) + `(agda2-highlight-unsolved-meta-face ((t (:background ,base2)))) + `(agda2-highlight-unsolved-constraint-face ((t (:background ,base2)))) + `(agda2-highlight-termination-problem-face ((t (:background ,orange :foreground ,base03)))) + `(agda2-highlight-incomplete-pattern-face ((t (:background ,orange :foreground ,base03)))) + `(agda2-highlight-typechecks-face ((t (:background ,cyan :foreground ,base03)))) + + `(font-lock-doc-face ((t (:foreground ,+solarized-s-base1)))) + `(font-lock-preprocessor-face ((t (:foreground ,+solarized-red)))) + `(font-lock-keyword-face ((t (:foreground ,+solarized-green :bold nil)))) + `(font-lock-builtin-face ((t (:foreground ,+solarized-s-base01 + :bold t)))) + + `(elixir-attribute-face ((t (:foreground ,+solarized-blue)))) + `(elixir-atom-face ((t (:foreground ,+solarized-cyan)))) + `(linum ((t (:background ,+solarized-s-base2 :foreground ,+solarized-s-base1)))) + `(line-number ((t (:background ,+solarized-s-base2 :foreground ,+solarized-s-base1)))) + + `(haskell-operator-face ((t (:foreground ,+solarized-green)))) + `(haskell-keyword-face ((t (:foreground ,+solarized-cyan)))) + + `(org-drawer ((t (:foreground ,+solarized-s-base1 + :bold t))))))) + +(solarized-with-color-variables + 'light 'grfn-solarized-light solarized-light-color-palette-alist) + +(provide-theme 'grfn-solarized-light) diff --git a/users/glittershark/emacs.d/utils.el b/users/glittershark/emacs.d/utils.el new file mode 100644 index 000000000000..1bfac3ca8b33 --- /dev/null +++ b/users/glittershark/emacs.d/utils.el @@ -0,0 +1,114 @@ +;;; private/grfn/utils.el -*- lexical-binding: t; -*- + + +;; Elisp Extras + +(defmacro comment (&rest _body) + "Comment out one or more s-expressions" + nil) + +(defun inc (x) "Returns x + 1" (+ 1 x)) +(defun dec (x) "Returns x - 1" (- x 1)) + +(defun average (ns) + "Arithmetic mean of xs" + (if (null ns) nil + (/ (apply #'+ ns) + (length ns)))) + +(comment + (average (list 1 2 3 4)) + ) + +;; +;; Text editing utils +;; + +;; Reading strings + +(defun get-char (&optional point) + "Get the character at the given `point' (defaulting to the current point), +without properties" + (let ((point (or point (point)))) + (buffer-substring-no-properties point (+ 1 point)))) + +(defun get-line (&optional lineno) + "Read the line number `lineno', or the current line if `lineno' is nil, and +return it as a string stripped of all text properties" + (let ((current-line (line-number-at-pos))) + (if (or (not lineno) + (= current-line lineno)) + (thing-at-point 'line t) + (save-mark-and-excursion + (line-move (- lineno (line-number-at-pos))) + (thing-at-point 'line t))))) + +(defun get-line-point () + "Get the position in the current line of the point" + (- (point) (line-beginning-position))) + +;; Moving in the file + +(defun goto-line-char (pt) + "Moves the point to the given position expressed as an offset from the start +of the line" + (goto-char (+ (line-beginning-position) pt))) + +(defun goto-eol () + "Moves to the end of the current line" + (goto-char (line-end-position))) + +(defun goto-regex-on-line (regex) + "Moves the point to the first occurrence of `regex' on the current line. +Returns nil if the regex did not match, non-nil otherwise" + (when-let ((current-line (get-line)) + (line-char (string-match regex current-line))) + (goto-line-char line-char))) + +(defun goto-regex-on-line-r (regex) + "Moves the point to the *last* occurrence of `regex' on the current line. +Returns nil if the regex did not match, non-nil otherwise" + (when-let ((current-line (get-line)) + (modified-regex (concat ".*\\(" regex "\\)")) + (_ (string-match modified-regex current-line)) + (match-start (match-beginning 1))) + (goto-line-char match-start))) + +(comment + (progn + (string-match (rx (and (zero-or-more anything) + (group "foo" "foo"))) + "foofoofoo") + (match-beginning 1))) + +;; Changing file contents + +(defun delete-line () + "Remove the line at the current point" + (delete-region (line-beginning-position) + (inc (line-end-position)))) + +(defmacro modify-then-indent (&rest body) + "Modify text in the buffer according to body, then re-indent from where the + cursor started to where the cursor ended up, then return the cursor to where + it started." + `(let ((beg (line-beginning-position)) + (orig-line-char (- (point) (line-beginning-position)))) + (atomic-change-group + (save-mark-and-excursion + ,@body + (evil-indent beg (+ (line-end-position) 1)))) + (goto-line-char orig-line-char))) + +(pcase-defmacro s-starts-with (prefix) + `(pred (s-starts-with-p ,prefix))) + +(pcase-defmacro s-contains (needle &optional ignore-case) + `(pred (s-contains-p ,needle + ,@(when ignore-case (list ignore-case))))) + +(comment + (pcase "foo" + ((s-contains "bar") 1) + ((s-contains "o") 2)) + ) diff --git a/users/glittershark/emacs.d/vterm.el b/users/glittershark/emacs.d/vterm.el new file mode 100644 index 000000000000..02d6223a4cc5 --- /dev/null +++ b/users/glittershark/emacs.d/vterm.el @@ -0,0 +1,24 @@ +;;; ../code/depot/users/glittershark/emacs.d/vterm.el -*- lexical-binding: t; -*- + +(defun require-vterm () + (add-to-list + 'load-path + (concat + (s-trim + (shell-command-to-string + "nix-build --no-out-link ~/code/depot -A third_party.emacs.vterm")) + "/share/emacs/site-lisp/elpa/vterm-20200515.1412")) + (require 'vterm)) + +(defun +grfn/vterm-setup () + (hide-mode-line-mode) + (setq-local evil-collection-vterm-send-escape-to-vterm-p t)) + +(add-hook 'vterm-mode-hook #'+grfn/vterm-setup) + +(map! (:map vterm-mode-map + "<C-escape>" #'evil-normal-state)) + +(comment + (require-vterm) + ) diff --git a/users/glittershark/gws.fyi/.envrc b/users/glittershark/gws.fyi/.envrc new file mode 100644 index 000000000000..be81feddb1a5 --- /dev/null +++ b/users/glittershark/gws.fyi/.envrc @@ -0,0 +1 @@ +eval "$(lorri direnv)" \ No newline at end of file diff --git a/users/glittershark/gws.fyi/.gitignore b/users/glittershark/gws.fyi/.gitignore new file mode 100644 index 000000000000..7783c2834f92 --- /dev/null +++ b/users/glittershark/gws.fyi/.gitignore @@ -0,0 +1,2 @@ +result +letsencrypt diff --git a/users/glittershark/gws.fyi/Makefile b/users/glittershark/gws.fyi/Makefile new file mode 100644 index 000000000000..83b60ef9bb52 --- /dev/null +++ b/users/glittershark/gws.fyi/Makefile @@ -0,0 +1,30 @@ +.PHONY: deploy + +deploy: + @$(shell nix-build `git rev-parse --show-toplevel` -A 'users.glittershark."gws.fyi"') + +renew: + @echo Renewing... + @certbot certonly \ + --manual \ + --domain www.gws.fyi \ + --preferred-challenges dns \ + --server https://acme-v02.api.letsencrypt.org/directory \ + --agree-tos \ + --work-dir $(shell pwd)/letsencrypt/work \ + --logs-dir $(shell pwd)/letsencrypt/logs \ + --config-dir $(shell pwd)/letsencrypt/config + @echo "Reimporting certificate" + @aws acm import-certificate \ + --certificate file://letsencrypt/config/live/www.gws.fyi/cert.pem \ + --certificate-chain file://letsencrypt/config/live/www.gws.fyi/fullchain.pem \ + --private-key file://letsencrypt/config/live/www.gws.fyi/privkey.pem \ + --certificate-arn arn:aws:acm:us-east-1:797089351721:certificate/628e54f3-55f9-49c0-811a-eba516b68e30 \ + --region us-east-1 + +backup: + @tarsnap -cf $(shell uname -n)-letsencrypt-$(shell date +%Y-%m-%d_%H-%M-%S) \ + letsencrypt/ + +open: + $$BROWSER "https://www.gws.fyi" diff --git a/users/glittershark/gws.fyi/config.el b/users/glittershark/gws.fyi/config.el new file mode 100644 index 000000000000..b05d897d3ddb --- /dev/null +++ b/users/glittershark/gws.fyi/config.el @@ -0,0 +1,6 @@ +(require 'org) + +(setq org-html-postamble nil) + +(defadvice org-export-grab-title-from-buffer + (around org-export-grab-title-from-buffer-disable activate)) diff --git a/users/glittershark/gws.fyi/default.nix b/users/glittershark/gws.fyi/default.nix new file mode 100644 index 000000000000..6ec1262f67be --- /dev/null +++ b/users/glittershark/gws.fyi/default.nix @@ -0,0 +1,21 @@ +args@{ pkgs, ... }: +with pkgs; +let + site = import ./site.nix args; + resume = import ../resume args; + bucket = "s3://gws.fyi"; + distributionID = "E2ST43JNBH8C64"; + website = + runCommand "gws.fyi" { } '' + mkdir -p $out + cp ${site.index} $out/index.html + cp ${resume} $out/resume.pdf + ''; + +in writeShellScript "deploy.sh" '' + ${awscli}/bin/aws s3 sync ${website}/ ${bucket} + ${awscli}/bin/aws cloudfront create-invalidation \ + --distribution-id "${distributionID}" \ + --paths "/*" + echo "Deployed to http://gws.fyi" +'' diff --git a/users/glittershark/gws.fyi/index.org b/users/glittershark/gws.fyi/index.org new file mode 100644 index 000000000000..a9413a69ba70 --- /dev/null +++ b/users/glittershark/gws.fyi/index.org @@ -0,0 +1,22 @@ +#+OPTIONS: title:nil +#+HTML_HEAD: <title>griffin smith</title> + +my name is griffin ward smith (aka grfn, glittershark, gws) and i'm a software +engineer and musician + +code +- [[https://github.com/glittershark/][github]] +- [[https://cs.tvl.fyi/depot/-/tree/users/glittershark][my directory in the tvl monorepo]] + +music +- https://sacrosanct.bandcamp.com/, a post-rock project with a [[https://bandcamp.com/h34rken][friend of mine]] +- [[https://soundcloud.com/missingggg][my current soundcloud]], releasing instrumental hip-hop under the name *missing* +- you can also find a log of all the music I listen to [[https://www.last.fm/user/wildgriffin45][on last.fm]] + +contact + +- [[mailto:web@gws.fyi][web@gws.fyi]] +- [[https://twitter.com/glittershark1][twitter]] +- https://keybase.io/glittershark +- grfn on freenode +- [[http://keys.gnupg.net/pks/lookup?op=get&search=0x44EF5B5E861C09A7][gpg key: 0F11A989879E8BBBFDC1E23644EF5B5E861C09A7]] diff --git a/users/glittershark/gws.fyi/orgExportHTML.nix b/users/glittershark/gws.fyi/orgExportHTML.nix new file mode 100644 index 000000000000..ad5a24795826 --- /dev/null +++ b/users/glittershark/gws.fyi/orgExportHTML.nix @@ -0,0 +1,54 @@ +{ pkgs, ... }: + +with pkgs; +with lib; + +let + + emacsWithPackages = (pkgs.emacsPackagesGen pkgs.emacs27).emacsWithPackages; + + emacs = emacsWithPackages (p: with p; [ + org + ]); + +in + +opts: + +let + src = if isAttrs opts then opts.src else opts; + headline = if isAttrs opts then opts.headline else null; + + bn = builtins.baseNameOf src; + filename = elemAt (splitString "." bn) 0; + + outName = + if isNull headline + then + let bn = builtins.baseNameOf src; + filename = elemAt (splitString "." bn) 0; + in filename + ".html" + else "${filename}-${replaceStrings [" "] ["-"] filename}.html"; + + escapeDoubleQuotes = replaceStrings ["\""] ["\\\""]; + + navToHeadline = optionalString (! isNull headline) '' + (search-forward "${escapeDoubleQuotes headline}") + (org-narrow-to-subtree) + ''; + +in + +runCommand outName {} '' + cp ${src} file.org + echo "${emacs}/bin/emacs --batch" + ${emacs}/bin/emacs --batch \ + --load ${./config.el} \ + --visit file.org \ + --eval "(progn + ${escapeDoubleQuotes navToHeadline} + (org-html-export-to-html))" \ + --kill + substitute file.html $out \ + --replace '<title>‎</title>' "" +'' diff --git a/users/glittershark/gws.fyi/shell.nix b/users/glittershark/gws.fyi/shell.nix new file mode 100644 index 000000000000..846bdb6677a3 --- /dev/null +++ b/users/glittershark/gws.fyi/shell.nix @@ -0,0 +1,9 @@ +with import <nixpkgs> { config.allowUnfree = true; }; +mkShell { + buildInputs = [ + awscli + gnumake + letsencrypt + tarsnap + ]; +} diff --git a/users/glittershark/gws.fyi/site.nix b/users/glittershark/gws.fyi/site.nix new file mode 100644 index 000000000000..a74bee0bef0a --- /dev/null +++ b/users/glittershark/gws.fyi/site.nix @@ -0,0 +1,11 @@ +args@{ pkgs ? import <nixpkgs> {}, ... }: + +let + + orgExportHTML = import ./orgExportHTML.nix args; + +in + +{ + index = orgExportHTML ./index.org; +} diff --git a/users/glittershark/keyboard/.gitignore b/users/glittershark/keyboard/.gitignore new file mode 100644 index 000000000000..b2be92b7db01 --- /dev/null +++ b/users/glittershark/keyboard/.gitignore @@ -0,0 +1 @@ +result diff --git a/users/glittershark/keyboard/README.org b/users/glittershark/keyboard/README.org new file mode 100644 index 000000000000..b085883a1049 --- /dev/null +++ b/users/glittershark/keyboard/README.org @@ -0,0 +1,10 @@ +This repository contains the source of the keyboard layout for my Ergodox EZ, +plus build tooling based on Nix. + +To flash to an Ergodox EZ that's connected to your computer via USB, run: + +#+BEGIN_SRC shell +./flash +#+END_SRC + +then press the reset switch on the keyboard. diff --git a/users/glittershark/keyboard/default.nix b/users/glittershark/keyboard/default.nix new file mode 100644 index 000000000000..7e004860672a --- /dev/null +++ b/users/glittershark/keyboard/default.nix @@ -0,0 +1,63 @@ +{ pkgs, ... }: + +with pkgs; + +let avrlibc = pkgsCross.avr.libcCross; in + +rec { + qmkSource = fetchgit { + url = "https://github.com/qmk/qmk_firmware"; + rev = "ab1650606c36f85018257aba65d9c3ff8ec42e71"; + sha256 = "1k59flkvhjzmfl0yz9z37lqhvad7m9r5wy1p1sjk5274rsmylh79"; + fetchSubmodules = true; + }; + + layout = stdenv.mkDerivation rec { + name = "ergodox_ez_grfn.hex"; + + src = qmkSource; + + buildInputs = [ + dfu-programmer + dfu-util + diffutils + git + python3 + pkgsCross.avr.buildPackages.binutils + pkgsCross.avr.buildPackages.gcc8 + avrlibc + avrdude + ]; + + AVR_CFLAGS = [ + "-isystem ${avrlibc}/avr/include" + "-L${avrlibc}/avr/lib/avr5" + ]; + + AVR_ASFLAGS = AVR_CFLAGS; + + patches = [ ./increase-tapping-delay.patch ]; + + postPatch = '' + mkdir keyboards/ergodox_ez/keymaps/grfn + cp ${./keymap.c} keyboards/ergodox_ez/keymaps/grfn/keymap.c + ''; + + buildPhase = '' + make ergodox_ez:grfn + ''; + + installPhase = '' + cp ergodox_ez_grfn.hex $out + ''; + }; + + flash = writeShellScript "flash.sh" '' + ${teensy-loader-cli}/bin/teensy-loader-cli \ + -v \ + --mcu=atmega32u4 \ + -w ${layout} + ''; + + meta.targets = [ "layout" ]; +} diff --git a/users/glittershark/keyboard/flash b/users/glittershark/keyboard/flash new file mode 100755 index 000000000000..efc89463c84b --- /dev/null +++ b/users/glittershark/keyboard/flash @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +exec "$(nix-build --no-out-link ../../.. -A users.glittershark.keyboard.flash)" diff --git a/users/glittershark/keyboard/increase-tapping-delay.patch b/users/glittershark/keyboard/increase-tapping-delay.patch new file mode 100644 index 000000000000..316c435fed6c --- /dev/null +++ b/users/glittershark/keyboard/increase-tapping-delay.patch @@ -0,0 +1,13 @@ +diff --git a/keyboards/ergodox_ez/config.h b/keyboards/ergodox_ez/config.h +index ae70c4f2e..776110c09 100644 +--- a/keyboards/ergodox_ez/config.h ++++ b/keyboards/ergodox_ez/config.h +@@ -45,7 +45,7 @@ along with this program. If not, see <http://www.gnu.org/licenses/>. + /* define if matrix has ghost */ + //#define MATRIX_HAS_GHOST + +-#define TAPPING_TERM 200 ++#define TAPPING_TERM 150 + #define IGNORE_MOD_TAP_INTERRUPT // this makes it possible to do rolling combos (zx) with keys that convert to other keys on hold (z becomes ctrl when you hold it, and when this option isn't enabled, z rapidly followed by x actually sends Ctrl-x. That's bad.) + + /* Mechanical locking support. Use KC_LCAP, KC_LNUM or KC_LSCR instead in keymap */ diff --git a/users/glittershark/keyboard/keymap.c b/users/glittershark/keyboard/keymap.c new file mode 100644 index 000000000000..fbb28c9aac59 --- /dev/null +++ b/users/glittershark/keyboard/keymap.c @@ -0,0 +1,206 @@ +#include QMK_KEYBOARD_H +#include "debug.h" +#include "action_layer.h" +#include "version.h" + + +#include "keymap_german.h" + +#include "keymap_nordic.h" + + + +enum custom_keycodes { + PLACEHOLDER = SAFE_RANGE, // can always be here + EPRM, + VRSN, + RGB_SLD, + + EX_PIPE, // |> + THIN_ARROW, // -> + FAT_ARROW, // => +}; + + + +#define LAMBDA UC(0x03BB) + +const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = { + + [0] = LAYOUT_ergodox( + KC_EQUAL, KC_1, KC_2, KC_3, KC_4, KC_5, KC_LEFT, + KC_TAB, KC_Q, KC_W, KC_E, KC_R, KC_T, KC_LALT, + KC_ESCAPE, KC_A, KC_S, KC_D, KC_F, KC_G, + KC_RSFT, CTL_T(KC_Z), KC_X, KC_C, KC_V, KC_B, KC_TAB, + LT(1,KC_GRAVE), KC_QUOTE, LALT(KC_LSHIFT),KC_LEFT,KC_RIGHT, + ALT_T(KC_APPLICATION), LAMBDA, + KC_LBRACKET, + GUI_T(KC_NO), LSFT_T(KC_BSPACE), KC_COLN, + + KC_MY_COMPUTER, KC_6, KC_7, KC_8, KC_9, KC_0, KC_MINUS, + KC_RALT, KC_Y, KC_U, KC_I, KC_O, KC_P, KC_BSLASH, + KC_H, KC_J, KC_K, KC_L, LT(2,KC_SCOLON), LT(1,KC_QUOTE), + KC_MINUS, KC_N, KC_M, KC_COMMA, KC_DOT, CTL_T(KC_SLASH), KC_RSFT, + KC_DOWN,KC_UP, KC_LBRACKET,KC_RBRACKET,MO(1), + + KC_PAUSE, TG(3), + KC_RBRACKET, + KC_COLN, RSFT_T(KC_ENTER), KC_SPACE + ), + + [1] = LAYOUT_ergodox( + KC_ESCAPE, KC_F1, KC_F2, KC_F3, KC_F4, KC_F5, KC_TRANSPARENT, + KC_TRANSPARENT, KC_EXLM, KC_AT, KC_LCBR, KC_RCBR, KC_PIPE, KC_RABK, + KC_TRANSPARENT, KC_HASH, KC_DLR, KC_LPRN, KC_RPRN, KC_UNDERSCORE, + KC_LABK, KC_PERC, KC_CIRC, KC_LBRACKET, KC_RBRACKET, KC_TILD, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + RGB_MOD, KC_TRANSPARENT, + KC_TRANSPARENT, + RGB_VAD, RGB_VAI, EX_PIPE, + + KC_TRANSPARENT, KC_F6, KC_F7, KC_F8, KC_F9, KC_F10, KC_F11, + KC_PGUP, KC_UP, KC_7, KC_8, KC_9, KC_ASTR, KC_F12, + KC_DOWN, KC_4, KC_5, KC_6, KC_PLUS, KC_TRANSPARENT, + KC_PGDOWN, KC_AMPR, KC_1, KC_2, KC_3, KC_BSLASH, KC_TRANSPARENT, + KC_TRANSPARENT, KC_DOT, KC_0, KC_EQUAL, KC_TRANSPARENT, + RGB_TOG, RGB_SLD, + THIN_ARROW, + EX_PIPE, RGB_HUD, RGB_HUI + ), + + [2] = LAYOUT_ergodox( + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_MS_UP, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_MS_LEFT, KC_MS_DOWN, KC_MS_RIGHT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_MS_BTN1, KC_MS_BTN2, + KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, + KC_MS_BTN1, KC_MS_BTN2, KC_TRANSPARENT, + + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_MS_WH_DOWN, KC_MS_WH_UP, KC_TRANSPARENT, KC_TRANSPARENT, KC_MEDIA_PLAY_PAUSE, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_MEDIA_PREV_TRACK, KC_MEDIA_NEXT_TRACK, KC_TRANSPARENT, KC_TRANSPARENT, + KC_AUDIO_VOL_DOWN, KC_AUDIO_VOL_UP, KC_AUDIO_MUTE, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_WWW_BACK), + + // FPS layout + [3] = LAYOUT_ergodox( + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, + KC_SPACE, KC_TRANSPARENT, KC_TRANSPARENT, + + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT, + KC_TRANSPARENT, TG(3), + KC_TRANSPARENT, + KC_TRANSPARENT, KC_TRANSPARENT, KC_TRANSPARENT), +}; + +const uint16_t PROGMEM fn_actions[] = { + [1] = ACTION_LAYER_TAP_TOGGLE(1) +}; + +// leaving this in place for compatibilty with old keymaps cloned and re-compiled. +const macro_t *action_get_macro(keyrecord_t *record, uint8_t id, uint8_t opt) +{ + switch(id) { + case 0: + if (record->event.pressed) { + SEND_STRING (QMK_KEYBOARD "/" QMK_KEYMAP " @ " QMK_VERSION); + } + break; + } + return MACRO_NONE; +}; + +bool process_record_user(uint16_t keycode, keyrecord_t *record) { + switch (keycode) { + // dynamically generate these. + case EPRM: + if (record->event.pressed) { + eeconfig_init(); + } + return false; + break; + case VRSN: + if (record->event.pressed) { + SEND_STRING (QMK_KEYBOARD "/" QMK_KEYMAP " @ " QMK_VERSION); + } + return false; + break; + case RGB_SLD: + if (record->event.pressed) { + rgblight_mode(1); + } + return false; + break; + case EX_PIPE: + if (record->event.pressed) { + SEND_STRING ( "|> " ); + } + return false; + break; + case THIN_ARROW: + if (record->event.pressed) { + SEND_STRING ( "-> " ); + } + return false; + break; + + + } + return true; +} + +void matrix_scan_user(void) { + + uint8_t layer = biton32(layer_state); + + ergodox_board_led_off(); + ergodox_right_led_1_off(); + ergodox_right_led_2_off(); + ergodox_right_led_3_off(); + switch (layer) { + case 1: + ergodox_right_led_1_on(); + break; + case 2: + ergodox_right_led_2_on(); + break; + case 3: + ergodox_right_led_3_on(); + break; + case 4: + ergodox_right_led_1_on(); + ergodox_right_led_2_on(); + break; + case 5: + ergodox_right_led_1_on(); + ergodox_right_led_3_on(); + break; + case 6: + ergodox_right_led_2_on(); + ergodox_right_led_3_on(); + break; + case 7: + ergodox_right_led_1_on(); + ergodox_right_led_2_on(); + ergodox_right_led_3_on(); + break; + default: + break; + } + +}; diff --git a/users/glittershark/keys.nix b/users/glittershark/keys.nix new file mode 100644 index 000000000000..c52229b3a898 --- /dev/null +++ b/users/glittershark/keys.nix @@ -0,0 +1,5 @@ +{ ... }: +{ + whitby = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDIwl+xQYRCk6Ijz/Ll8eXKZrcTH9/7xwlvIowiuqDSFtGkf+73QJkwVJ0YiKHWAPwIUWMzCEO/Ab2g6j4PcR+XYu8kXbrwT5aW65L/AK1oaav2RfV1bnQEVUP9FRPL52BN42J0ibI2QJZKJVws9JF7vxTWPPG0V0eoxcaRMk1ZEqq+/k3GuN8D69VSV8xo9lB8yZEvTxs0YQRiiF7Q6t/3jhYtz6lCdazQviRcSEOj5AVsDjcf1XIAPOcLK4Q4OEXL49T3UaitSYMyKIO8hzNLiyGAUlSbshAnutPXdyNBypkCs6FrSPSRdBfFjzUVE/a+JWCPmx0q0xAVd497Efxby+Vsa2/TPMp7tSisPaqk3MpPmjBS7eI/y4Pl2GpAB4OVANEBNd1Q6K2/37Pk+PrZtIUBiRG8sM0Od36BjwLCxvG0G5P/UYZ93aC8GzqkRf4evOBMiJCvR2o9CDEDycNyTm1y5dyJzQewOTWX9nsiF1rllc92W0ZALvpO03+W2+k= grfn@chupacabra"; + main = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDHPiNpPB6Uqs/VSW/C8tR/Z5wCQxKppNL2iETb1ucsYsFf1B2apG5txj06NMT6IWXwWpZXq7ld+/sA+a2I03lO2INP7S1Dto5nAwpNhhKN/UBXk76qYTdY5tEvb9J89S2ZzfQWR30aZ0CEDDrcbc+YktU1eSLdluu6QH+M/uPBweSiVn5wNHkc5sRdbyiVsZSQJ41MO7PQrzGpe7Pxola/ghOHdEFlESJMKA5uoRpCGboxtDE9tMJwG5MxNwHERpfI9FjvvLsJRrp9dRf6A/RQjlV/nb1GmpX0I8pvrXEPxm/l0rOAgE81VSsM+BxJ7ZvCe8u/YqMYJ8xVfskzlVsf griffin@MacBook-Pro"; +} diff --git a/users/glittershark/org-clubhouse/.gitignore b/users/glittershark/org-clubhouse/.gitignore new file mode 100644 index 000000000000..2a7dd97debf1 --- /dev/null +++ b/users/glittershark/org-clubhouse/.gitignore @@ -0,0 +1,3 @@ +# Spacemacs +org-clubhouse-autoloads.el +org-clubhouse-pkg.el diff --git a/users/glittershark/org-clubhouse/CODE_OF_CONDUCT.org b/users/glittershark/org-clubhouse/CODE_OF_CONDUCT.org new file mode 100644 index 000000000000..f15e387d5464 --- /dev/null +++ b/users/glittershark/org-clubhouse/CODE_OF_CONDUCT.org @@ -0,0 +1,101 @@ +* Contributor Covenant Code of Conduct + :PROPERTIES: + :CUSTOM_ID: contributor-covenant-code-of-conduct + :END: + +** Our Pledge + :PROPERTIES: + :CUSTOM_ID: our-pledge + :END: + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our +project and our community a harassment-free experience for everyone, +regardless of age, body size, disability, ethnicity, sex +characteristics, gender identity and expression, level of experience, +education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +** Our Standards + :PROPERTIES: + :CUSTOM_ID: our-standards + :END: + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual + attention or advances +- Trolling, insulting/derogatory comments, and personal or political + attacks +- Public or private harassment +- Publishing others' private information, such as a physical or + electronic address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +** Our Responsibilities + :PROPERTIES: + :CUSTOM_ID: our-responsibilities + :END: + +Project maintainers are responsible for clarifying the standards of +acceptable behavior and are expected to take appropriate and fair +corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, +or reject comments, commits, code, wiki edits, issues, and other +contributions that are not aligned to this Code of Conduct, or to ban +temporarily or permanently any contributor for other behaviors that they +deem inappropriate, threatening, offensive, or harmful. + +** Scope + :PROPERTIES: + :CUSTOM_ID: scope + :END: + +This Code of Conduct applies within all project spaces, and it also +applies when an individual is representing the project or its community +in public spaces. Examples of representing a project or community +include using an official project e-mail address, posting via an +official social media account, or acting as an appointed representative +at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +** Enforcement + :PROPERTIES: + :CUSTOM_ID: enforcement + :END: + +Instances of abusive, harassing, or otherwise unacceptable behavior may +be reported by contacting the project team at root@gws.fyi. All +complaints will be reviewed and investigated and will result in a +response that is deemed necessary and appropriate to the circumstances. +The project team is obligated to maintain confidentiality with regard to +the reporter of an incident. Further details of specific enforcement +policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in +good faith may face temporary or permanent repercussions as determined +by other members of the project's leadership. + +** Attribution + :PROPERTIES: + :CUSTOM_ID: attribution + :END: + +This Code of Conduct is adapted from the +[[https://www.contributor-covenant.org][Contributor Covenant]], version +1.4, available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/users/glittershark/org-clubhouse/LICENSE b/users/glittershark/org-clubhouse/LICENSE new file mode 100644 index 000000000000..1777f0fac3ea --- /dev/null +++ b/users/glittershark/org-clubhouse/LICENSE @@ -0,0 +1,7 @@ +Copyright (C) 2018 Off Market Data, Inc. DBA Urbint + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/users/glittershark/org-clubhouse/README.org b/users/glittershark/org-clubhouse/README.org new file mode 100644 index 000000000000..9cd8fbe8921d --- /dev/null +++ b/users/glittershark/org-clubhouse/README.org @@ -0,0 +1,142 @@ +#+TITLE:Org-Clubhouse + +Simple, unopinionated integration between Emacs's [[https://orgmode.org/][org-mode]] and the [[https://clubhouse.io/][Clubhouse]] +issue tracker + +(This used to be at urbint/org-clubhouse, by the way, but moved here as it's +more of a personal project than a company one) + +* Installation + +** [[https://github.com/quelpa/quelpa][Quelpa]] + +#+BEGIN_SRC emacs-lisp +(quelpa '(org-clubhouse + :fetcher github + :repo "glittershark/org-clubhouse")) +#+END_SRC + +** [[https://github.com/hlissner/doom-emacs/][DOOM Emacs]] + +#+BEGIN_SRC emacs-lisp +;; in packages.el +(package! org-clubhouse + :recipe (:fetcher github + :repo "glittershark/org-clubhouse" + :files ("*"))) + +;; in config.el +(def-package! org-clubhouse) +#+END_SRC + +** [[http://spacemacs.org/][Spacemacs]] +#+BEGIN_SRC emacs-lisp +;; in .spacemacs (SPC+fed) + dotspacemacs-additional-packages + '((org-clubhouse :location (recipe :fetcher github :repo "glittershark/org-clubhouse"))) +#+END_SRC + + +* Setup + +Once installed, you'll need to set three global config vars: + +#+BEGIN_SRC emacs-lisp +(setq org-clubhouse-auth-token "<your-token>" + org-clubhouse-team-name "<your-team-name>" + org-clubhouse-username "<your-username>") +#+END_SRC + +You can generate a new personal API token by going to the "API Tokens" tab on +the "Settings" page in the clubhouse UI. + +Note that ~org-clubhouse-username~ needs to be set to your *mention name*, not +your username, as currently there's no way to get the ID of a user given their +username in the clubhouse API + +* Usage + +** Reading from clubhouse + +- ~org-clubhouse-headlines-from-query~ + Create org-mode headlines from a [[https://help.clubhouse.io/hc/en-us/articles/360000046646-Searching-in-Clubhouse-Story-Search][clubhouse query]] at the cursor's current + position, prompting for the headline indentation level and clubhouse query + text +- ~org-clubhouse-headline-from-story~ + Prompts for headline indentation level and the title of a story (which will + complete using the titles of all stories in your Clubhouse workspace) and + creates an org-mode headline from that story +- ~org-clubhouse-headline-from-story-id~ + Creates an org-mode headline directly from the ID of a clubhouse story + +** Writing to clubhouse + +- ~org-clubhouse-create-story~ + Creates a new Clubhouse story from the current headline, or if a region of + headlines is selected bulk-creates stories with all those headlines +- ~org-clubhouse-create-epic~ + Creates a new Clubhouse epic from the current headline, or if a region of + headlines is selected bulk-creates epics with all those headlines +- ~org-clubhouse-create-story-with-task-list~ + Creates a Clubhouse story from the current headline, making all direct + children of the headline into tasks in the task list of the story +- ~org-clubhouse-push-task-list~ + Writes each child element of the current clubhouse element as a task list + item of the associated clubhouse ID. +- ~org-clubhouse-update-story-title~ + Updates the title of the Clubhouse story linked to the current headline with + the text of the headline +- ~org-clubhouse-update-description~ + Update the status of the Clubhouse story linked to the current element with + the contents of a drawer inside the element called DESCRIPTION, if any exists +- ~org-clubhouse-claim~ + Adds the user configured in ~org-clubhouse-username~ as the owner of the + clubhouse story associated with the headline at point + +*** Automatically updating Clubhouse story statuses + +Org-clubhouse can be configured to update the status of stories as you update +their todo-keyword in org-mode. To opt-into this behavior, set the +~org-clubhouse-mode~ minor-mode: + +#+BEGIN_SRC emacs-lisp +(add-hook 'org-mode-hook #'org-clubhouse-mode nil nil) +#+END_SRC + +The mapping from org-mode todo-keywords is configured via the +~org-clubhouse-state-alist~ variable, which should be an [[https://www.gnu.org/software/emacs/manual/html_node/elisp/Association-Lists.html][alist]] mapping (string) +[[https://orgmode.org/manual/Workflow-states.html][org-mode todo-keywords]] to the (string) names of their corresponding workflow +state. You can have todo-keywords that don't map to a workflow state (I use this +in my workflow extensively) and org-clubhouse will just preserve the previous +state of the story when moving to that state. + +An example config: + +#+BEGIN_SRC emacs-lisp +(setq org-clubhouse-state-alist + '(("TODO" . "To Do") + ("ACTIVE" . "In Progress") + ("DONE" . "Done"))) +#+END_SRC + +* Philosophy + +I use org-mode every single day to manage tasks, notes, literate programming, +etc. Part of what that means for me is that I already have a system for the +structure of my .org files, and I don't want to sacrifice that system for any +external tool. Updating statuses, ~org-clubhouse-create-story~, and +~org-clubhouse-headline-from-story~ are my bread and butter for that reason - +rather than having some sort of bidirectional sync that pulls down full lists of +all the stories in Clubhouse (or whatever issue tracker / project management +tool I'm using at the time). I can be in a mode where I'm taking meeting notes, +think of something that I need to do, make it a TODO headline, and make that +TODO headline a clubhouse story. That's the same reason for the DESCRIPTION +drawers rather than just sending the entire contents of a headline to +Clubhouse - I almost always want to write things like personal notes, literate +code, etc inside of the tasks I'm working on, and don't always want to share +that with Clubhouse. + +* Configuration + +Refer to the beginning of the [[https://github.com/urbint/org-clubhouse/blob/master/org-clubhouse.el][org-clubhouse.el]] file in this repository for +documentation on all supported configuration variables diff --git a/users/glittershark/org-clubhouse/org-clubhouse.el b/users/glittershark/org-clubhouse/org-clubhouse.el new file mode 100644 index 000000000000..e6e29b575187 --- /dev/null +++ b/users/glittershark/org-clubhouse/org-clubhouse.el @@ -0,0 +1,1241 @@ +;;; org-clubhouse.el --- Simple, unopinionated integration between org-mode and +;;; Clubhouse + +;;; Copyright (C) 2018 Off Market Data, Inc. DBA Urbint +;;; Permission is hereby granted, free of charge, to any person obtaining a copy +;;; of this software and associated documentation files (the "Software"), to +;;; deal in the Software without restriction, including without limitation the +;;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +;;; sell copies of the Software, and to permit persons to whom the Software is +;;; furnished to do so, subject to the following conditions: + +;;; The above copyright notice and this permission notice shall be included in +;;; all copies or substantial portions of the Software. + +;;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +;;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +;;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +;;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +;;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +;;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +;;; IN THE SOFTWARE. + +;;; Commentary: +;;; org-clubhouse provides simple, unopinionated integration between Emacs's +;;; org-mode and the Clubhouse issue tracker +;;; +;;; To configure org-clubhouse, create an authorization token in Cluhbouse's +;;; settings, then place the following configuration somewhere private: +;;; +;;; (setq org-clubhouse-auth-token "<auth_token>" +;;; org-clubhouse-team-name "<team-name>") +;;; + +;;; Code: + +(require 'cl-macs) +(require 'dash) +(require 'dash-functional) +(require 's) +(require 'org) +(require 'org-element) +(require 'subr-x) +(require 'ivy) +(require 'json) + +;;; +;;; Configuration +;;; + +(defvar org-clubhouse-auth-token nil + "Authorization token for the Clubhouse API.") + +(defvar org-clubhouse-username nil + "Username for the current Clubhouse user. + +Unfortunately, the Clubhouse API doesn't seem to provide this via the API given +an API token, so we need to configure this for +`org-clubhouse-claim-story-on-status-updates' to work") + +(defvar org-clubhouse-team-name nil + "Team name to use in links to Clubhouse. +ie https://app.clubhouse.io/<TEAM_NAME>/stories") + +(defvar org-clubhouse-project-ids nil + "Specific list of project IDs to synchronize with clubhouse. +If unset all projects will be synchronized") + +(defvar org-clubhouse-workflow-name "Default") + +(defvar org-clubhouse-default-story-type nil + "Sets the default story type. If set to 'nil', it will interactively prompt +the user each and every time a new story is created. If set to 'feature', +'bug', or 'chore', that value will be used as the default and the user will +not be prompted") + +(defvar org-clubhouse-state-alist + '(("LATER" . "Unscheduled") + ("[ ]" . "Ready for Development") + ("TODO" . "Ready for Development") + ("OPEN" . "Ready for Development") + ("ACTIVE" . "In Development") + ("PR" . "Review") + ("DONE" . "Merged") + ("[X]" . "Merged") + ("CLOSED" . "Merged")) + "Alist mapping org-mode todo keywords to their corresponding states in + Clubhouse. In `org-clubhouse-mode', moving headlines to these todo keywords + will update to the corresponding status in Clubhouse") + +(defvar org-clubhouse-story-types + '(("feature" . "Feature") + ("bug" . "Bug") + ("chore" . "Chore"))) + +(defvar org-clubhouse-default-story-types + '(("feature" . "Feature") + ("bug" . "Bug") + ("chore" . "Chore") + ("prompt" . "**Prompt each time (do not set a default story type)**"))) + +(defvar org-clubhouse-default-state "Proposed" + "Default state to create all new stories in.") + +(defvar org-clubhouse-claim-story-on-status-update 't + "Controls the assignee behavior of stories on status update. + +If set to 't, will mark the current user as the owner of any clubhouse +stories on any update to the status. + +If set to nil, will never automatically update the assignee of clubhouse +stories. + +If set to a list of todo-state's, will mark the current user as the owner of +clubhouse stories whenever updating the status to one of those todo states.") + +(defvar org-clubhouse-create-stories-with-labels nil + "Controls the way org-clubhouse creates stories with labels based on org tags. + +If set to 't, will create labels for all org tags on headlines when stories are +created. + +If set to 'existing, will set labels on created stories only if the label +already exists in clubhouse + +If set to nil, will never create stories with labels") + +;;; +;;; Utilities +;;; + +(defmacro comment (&rest _) + "Comment out one or more s-expressions." + nil) + +(defun ->list (vec) (append vec nil)) + +(defun reject-archived (item-list) + (-reject (lambda (item) (equal :json-true (alist-get 'archived item))) item-list)) + +(defun alist->plist (key-map alist) + (->> key-map + (-map (lambda (key-pair) + (let ((alist-key (car key-pair)) + (plist-key (cdr key-pair))) + (list plist-key (alist-get alist-key alist))))) + (-flatten-n 1))) + +(defun alist-get-equal (key alist) + "Like `alist-get', but uses `equal' instead of `eq' for comparing keys" + (->> alist + (-find (lambda (pair) (equal key (car pair)))) + (cdr))) + +(defun invert-alist (alist) + "Invert the keys and values of ALIST." + (-map (lambda (cell) (cons (cdr cell) (car cell))) alist)) + +(comment + + (alist->plist + '((foo . :foo) + (bar . :something)) + + '((foo . "foo") (bar . "bar") (ignored . "ignoreme!"))) + ;; => (:foo "foo" :something "bar") + + ) + +(defun find-match-in-alist (target alist) + (->> alist + (-find (lambda (key-value) + (string-equal (cdr key-value) target))) + car)) + +(defun org-clubhouse-collect-headlines (beg end) + "Collects the headline at point or the headlines in a region. Returns a list." + (if (and beg end) + (org-clubhouse-get-headlines-in-region beg end) + (list (org-element-find-headline)))) + + +(defun org-clubhouse-get-headlines-in-region (beg end) + "Collects the headlines from BEG to END" + (save-excursion + ;; This beg/end clean up pulled from `reverse-region`. + ;; it expands the region to include the full lines from the selected region. + + ;; put beg at the start of a line and end and the end of one -- + ;; the largest possible region which fits this criteria + (goto-char beg) + (or (bolp) (forward-line 1)) + (setq beg (point)) + (goto-char end) + ;; the test for bolp is for those times when end is on an empty line; + ;; it is probably not the case that the line should be included in the + ;; reversal; it isn't difficult to add it afterward. + (or (and (eolp) (not (bolp))) (progn (forward-line -1) (end-of-line))) + (setq end (point-marker)) + + ;; move to the beginning + (goto-char beg) + ;; walk by line until past end + (let ((headlines '()) + (before-end 't)) + (while before-end + (add-to-list 'headlines (org-element-find-headline)) + (let ((before (point))) + (org-forward-heading-same-level 1) + (setq before-end (and (not (eq before (point))) (< (point) end))))) + (reverse headlines)))) + +;;; +;;; Org-element interaction +;;; + +;; (defun org-element-find-headline () +;; (let ((current-elt (org-element-at-point))) +;; (if (equal 'headline (car current-elt)) +;; current-elt +;; (let* ((elt-attrs (cadr current-elt)) +;; (parent (plist-get elt-attrs :post-affiliated))) +;; (goto-char parent) +;; (org-element-find-headline))))) + +(defun org-element-find-headline () + (save-mark-and-excursion + (when (not (outline-on-heading-p)) (org-back-to-heading)) + (let ((current-elt (org-element-at-point))) + (when (equal 'headline (car current-elt)) + (cadr current-elt))))) + +(defun org-element-extract-clubhouse-id (elt &optional property) + (when-let* ((clubhouse-id-link (plist-get elt (or property :CLUBHOUSE-ID)))) + (cond + ((string-match + (rx "[[" (one-or-more anything) "]" + "[" (group (one-or-more digit)) "]]") + clubhouse-id-link) + (string-to-number (match-string 1 clubhouse-id-link))) + ((string-match-p + (rx buffer-start + (one-or-more digit) + buffer-end) + clubhouse-id-link) + (string-to-number clubhouse-id-link))))) + +(comment + (let ((strn "[[https://app.clubhouse.io/example/story/2330][2330]]")) + (string-match + (rx "[[" (one-or-more anything) "]" + "[" (group (one-or-more digit)) "]]") + strn) + (string-to-number (match-string 1 strn))) + ) + +(defun org-element-clubhouse-id () + (org-element-extract-clubhouse-id + (org-element-find-headline))) + +(defun org-clubhouse-clocked-in-story-id () + "Return the clubhouse story-id of the currently clocked-in org entry, if any." + (save-mark-and-excursion + (save-current-buffer + (when (org-clocking-p) + (set-buffer (marker-buffer org-clock-marker)) + (save-restriction + (when (or (< org-clock-marker (point-min)) + (> org-clock-marker (point-max))) + (widen)) + (goto-char org-clock-marker) + (org-element-clubhouse-id)))))) + +(comment + (org-clubhouse-clocked-in-story-id) + ) + +(defun org-element-and-children-at-point () + (let* ((elt (org-element-find-headline)) + (contents-begin (or (plist-get elt :contents-begin) + (plist-get elt :begin))) + (end (plist-get elt :end)) + (level (plist-get elt :level)) + (children '())) + (save-excursion + (goto-char (+ contents-begin (length (plist-get elt :title)))) + (while (< (point) end) + (let* ((next-elt (org-element-at-point)) + (elt-type (car next-elt)) + (elt (cadr next-elt))) + (when (and (eql 'headline elt-type) + (eql (+ 1 level) (plist-get elt :level))) + (push elt children)) + (goto-char (plist-get elt :end))))) + (append elt `(:children ,(reverse children))))) + +(defun +org-element-contents (elt) + (if-let ((begin (plist-get (cadr elt) :contents-begin)) + (end (plist-get (cadr elt) :contents-end))) + (buffer-substring-no-properties begin end) + "")) + +(defun org-clubhouse-find-description-drawer () + "Try to find a DESCRIPTION drawer in the current element." + (let ((elt (org-element-at-point))) + (cl-case (car elt) + ('drawer (+org-element-contents elt)) + ('headline + (when-let ((drawer-pos (string-match + ":DESCRIPTION:" + (+org-element-contents elt)))) + (save-excursion + (goto-char (+ (plist-get (cadr elt) :contents-begin) + drawer-pos)) + (org-clubhouse-find-description-drawer))))))) + +(defun org-clubhouse--labels-for-elt (elt) + "Return the Clubhouse labels based on the tags of ELT and the user's config." + (unless (eq nil org-clubhouse-create-stories-with-labels) + (let ((tags (org-get-tags (plist-get elt :contents-begin)))) + (-map (lambda (l) `((name . ,l))) + (cl-case org-clubhouse-create-stories-with-labels + ('t tags) + ('existing (-filter (lambda (tag) (-some (lambda (l) + (string-equal tag (cdr l))) + (org-clubhouse-labels))) + tags))))))) + +;;; +;;; API integration +;;; + +(defvar org-clubhouse-base-url* "https://api.clubhouse.io/api/v3") + +(defun org-clubhouse-auth-url (url &optional params) + (concat url + "?" + (url-build-query-string + (cons `("token" ,org-clubhouse-auth-token) params)))) + +(defun org-clubhouse-baseify-url (url) + (if (s-starts-with? org-clubhouse-base-url* url) url + (concat org-clubhouse-base-url* + (if (s-starts-with? "/" url) url + (concat "/" url))))) + +(cl-defun org-clubhouse-request (method url &key data (params '())) + (message "%s %s %s" method url (prin1-to-string data)) + (let* ((url-request-method method) + (url-request-extra-headers + '(("Content-Type" . "application/json"))) + (url-request-data data) + (buf)) + + (setq url (-> url + org-clubhouse-baseify-url + (org-clubhouse-auth-url params))) + + (setq buf (url-retrieve-synchronously url)) + + (with-current-buffer buf + (goto-char url-http-end-of-headers) + (prog1 (json-read) (kill-buffer))))) + +(cl-defun to-id-name-pairs + (seq &optional (id-attr 'id) (name-attr 'name)) + (->> seq + ->list + (-map (lambda (resource) + (cons (alist-get id-attr resource) + (alist-get name-attr resource)))))) + +(cl-defun org-clubhouse-fetch-as-id-name-pairs + (resource &optional + (id-attr 'id) + (name-attr 'name)) + "Returns the given resource from clubhouse as (id . name) pairs" + (let ((resp-json (org-clubhouse-request "GET" resource))) + (-> resp-json + ->list + reject-archived + (to-id-name-pairs id-attr name-attr)))) + +(defun org-clubhouse-get-story + (clubhouse-id) + (org-clubhouse-request "GET" (format "/stories/%s" clubhouse-id))) + +(defun org-clubhouse-link-to-story (story-id) + (format "https://app.clubhouse.io/%s/story/%d" + org-clubhouse-team-name + story-id)) + +(defun org-clubhouse-link-to-epic (epic-id) + (format "https://app.clubhouse.io/%s/epic/%d" + org-clubhouse-team-name + epic-id)) + +(defun org-clubhouse-link-to-milestone (milestone-id) + (format "https://app.clubhouse.io/%s/milestone/%d" + org-clubhouse-team-name + milestone-id)) + +(defun org-clubhouse-link-to-project (project-id) + (format "https://app.clubhouse.io/%s/project/%d" + org-clubhouse-team-name + project-id)) + +;;; +;;; Caching +;;; + +(comment + (defcache org-clubhouse-projects + (org-sync-clubhouse-fetch-as-id-name-pairs "projectx")) + + (clear-org-clubhouse-projects-cache) + (clear-org-clubhouse-cache) + ) + +(defvar org-clubhouse-cache-clear-functions ()) + +(defmacro defcache (name &optional docstring &rest body) + (let* ((doc (when docstring (list docstring))) + (cache-var-name (intern (concat (symbol-name name) + "-cache"))) + (clear-cache-function-name + (intern (concat "clear-" (symbol-name cache-var-name))))) + `(progn + (defvar ,cache-var-name :no-cache) + (defun ,name () + ,@doc + (when (equal :no-cache ,cache-var-name) + (setq ,cache-var-name (progn ,@body))) + ,cache-var-name) + (defun ,clear-cache-function-name () + (interactive) + (setq ,cache-var-name :no-cache)) + + (push (quote ,clear-cache-function-name) + org-clubhouse-cache-clear-functions)))) + +(defun org-clubhouse-clear-cache () + (interactive) + (-map #'funcall org-clubhouse-cache-clear-functions)) + +;;; +;;; API resource functions +;;; + +(defcache org-clubhouse-projects + "Returns projects as (project-id . name)" + (org-clubhouse-fetch-as-id-name-pairs "projects")) + +(defcache org-clubhouse-epics + "Returns epics as (epic-id . name)" + (org-clubhouse-fetch-as-id-name-pairs "epics")) + +(defcache org-clubhouse-milestones + "Returns milestone-id . name)" + (org-clubhouse-fetch-as-id-name-pairs "milestones")) + +(defcache org-clubhouse-workflow-states + "Returns worflow states as (name . id) pairs" + (let* ((resp-json (org-clubhouse-request "GET" "workflows")) + (workflows (->list resp-json)) + ;; just assume it exists, for now + (workflow (-find (lambda (workflow) + (equal org-clubhouse-workflow-name + (alist-get 'name workflow))) + workflows)) + (states (->list (alist-get 'states workflow)))) + (to-id-name-pairs states + 'name + 'id))) + +(defcache org-clubhouse-labels + "Returns labels as (label-id . name)" + (org-clubhouse-fetch-as-id-name-pairs "labels")) + +(defcache org-clubhouse-whoami + "Returns the ID of the logged in user" + (->> (org-clubhouse-request + "GET" + "/members") + ->list + (find-if (lambda (m) + (->> m + (alist-get 'profile) + (alist-get 'mention_name) + (equal org-clubhouse-username)))) + (alist-get 'id))) + +(defcache org-clubhouse-iterations + "Returns iterations as (iteration-id . name)" + (org-clubhouse-fetch-as-id-name-pairs "iterations")) + +(defun org-clubhouse-stories-in-project (project-id) + "Return the stories in the given PROJECT-ID as org headlines." + (let ((resp-json (org-clubhouse-request "GET" (format "/projects/%d/stories" project-id)))) + (->> resp-json ->list reject-archived + (-reject (lambda (story) (equal :json-true (alist-get 'completed story)))) + (-map (lambda (story) + (cons + (cons 'status + (cond + ((equal :json-true (alist-get 'started story)) + 'started) + ((equal :json-true (alist-get 'completed story)) + 'completed) + ('t + 'open))) + story))) + (-map (-partial #'alist->plist + '((name . :title) + (id . :id) + (status . :status))))))) + +(defun org-clubhouse-workflow-state-id-to-todo-keyword (workflow-state-id) + "Convert the named clubhouse WORKFLOW-STATE-ID to an org todo keyword." + (let* ((state-name (alist-get-equal + workflow-state-id + (invert-alist (org-clubhouse-workflow-states)))) + (inv-state-name-alist + (-map (lambda (cell) (cons (cdr cell) (car cell))) + org-clubhouse-state-alist))) + (or (alist-get-equal state-name inv-state-name-alist) + (if state-name (s-upcase state-name) "UNKNOWN")))) + +;;; +;;; Prompting +;;; + +(defun org-clubhouse-prompt-for-project (cb) + (ivy-read + "Select a project: " + (-map #'cdr (org-clubhouse-projects)) + :require-match t + :history 'org-clubhouse-project-history + :action (lambda (selected) + (let ((project-id + (find-match-in-alist selected (org-clubhouse-projects)))) + (funcall cb project-id))))) + +(defun org-clubhouse-prompt-for-epic (cb) + "Prompt the user for an epic using ivy and call CB with its ID." + (ivy-read + "Select an epic: " + (-map #'cdr (append '((nil . "No Epic")) (org-clubhouse-epics))) + :history 'org-clubhouse-epic-history + :action (lambda (selected) + (let ((epic-id + (find-match-in-alist selected (org-clubhouse-epics)))) + (funcall cb epic-id))))) + +(defun org-clubhouse-prompt-for-milestone (cb) + "Prompt the user for a milestone using ivy and call CB with its ID." + (ivy-read + "Select a milestone: " + (-map #'cdr (append '((nil . "No Milestone")) (org-clubhouse-milestones))) + :require-match t + :history 'org-clubhouse-milestone-history + :action (lambda (selected) + (let ((milestone-id + (find-match-in-alist selected (org-clubhouse-milestones)))) + (funcall cb milestone-id))))) + +(defun org-clubhouse-prompt-for-story-type (cb) + (ivy-read + "Select a story type: " + (-map #'cdr org-clubhouse-story-types) + :history 'org-clubhouse-story-history + :action (lambda (selected) + (let ((story-type + (find-match-in-alist selected org-clubhouse-story-types))) + (funcall cb story-type))))) + +(defun org-clubhouse-prompt-for-default-story-type () + (interactive) + (ivy-read + "Select a default story type: " + (-map #'cdr org-clubhouse-default-story-types) + :history 'org-clubhouse-default-story-history + :action (lambda (selected) + (let ((story-type + (find-match-in-alist selected org-clubhouse-default-story-types))) + (if (string-equal story-type "prompt") + (setq org-clubhouse-default-story-type nil) + (setq org-clubhouse-default-story-type story-type)))))) + +;;; +;;; Epic creation +;;; + +(cl-defun org-clubhouse-create-epic-internal + (title &key milestone-id) + (cl-assert (and (stringp title) + (or (null milestone-id) + (integerp milestone-id)))) + (org-clubhouse-request + "POST" + "epics" + :data + (json-encode + `((name . ,title) + (milestone_id . ,milestone-id))))) + +(defun org-clubhouse-populate-created-epic (elt epic) + (let ((elt-start (plist-get elt :begin)) + (epic-id (alist-get 'id epic)) + (milestone-id (alist-get 'milestone_id epic))) + (save-excursion + (goto-char elt-start) + + (org-set-property "clubhouse-epic-id" + (org-link-make-string + (org-clubhouse-link-to-epic epic-id) + (number-to-string epic-id))) + + (when milestone-id + (org-set-property "clubhouse-milestone" + (org-link-make-string + (org-clubhouse-link-to-milestone milestone-id) + (alist-get milestone-id (org-clubhouse-milestones)))))))) + +(defun org-clubhouse-create-epic (&optional beg end) + "Creates a clubhouse epic using selected headlines. +Will pull the title from the headline at point, or create epics for all the +headlines in the selected region. + +All epics are added to the same milestone, as selected via a prompt. +If the epics already have a CLUBHOUSE-EPIC-ID, they are filtered and ignored." + (interactive + (when (use-region-p) + (list (region-beginning region-end)))) + + (let* ((elts (org-clubhouse-collect-headlines beg end)) + (elts (-remove (lambda (elt) (plist-get elt :CLUBHOUSE-EPIC-ID)) elts))) + (org-clubhouse-prompt-for-milestone + (lambda (milestone-id) + (dolist (elt elts) + (let* ((title (plist-get elt :title)) + (epic (org-clubhouse-create-epic-internal + title + :milestone-id milestone-id))) + (org-clubhouse-populate-created-epic elt epic)) + elts))))) + +;;; +;;; Story creation +;;; + +(defun org-clubhouse-default-state-id () + (alist-get-equal org-clubhouse-default-state (org-clubhouse-workflow-states))) + +(cl-defun org-clubhouse-create-story-internal + (title &key project-id epic-id story-type description labels) + (cl-assert (and (stringp title) + (integerp project-id) + (or (null epic-id) (integerp epic-id)) + (or (null description) (stringp description)))) + (let ((workflow-state-id (org-clubhouse-default-state-id)) + (params `((name . ,title) + (project_id . ,project-id) + (epic_id . ,epic-id) + (story_type . ,story-type) + (description . ,(or description "")) + (labels . ,labels)))) + + (when workflow-state-id + (push `(workflow_state_id . ,workflow-state-id) params)) + + (org-clubhouse-request + "POST" + "stories" + :data + (json-encode params)))) + +(cl-defun org-clubhouse-populate-created-story (elt story &key extra-properties) + (let ((elt-start (plist-get elt :begin)) + (story-id (alist-get 'id story)) + (epic-id (alist-get 'epic_id story)) + (project-id (alist-get 'project_id story)) + (story-type (alist-get 'story_type story))) + + (save-excursion + (goto-char elt-start) + + (org-set-property "clubhouse-id" + (org-link-make-string + (org-clubhouse-link-to-story story-id) + (number-to-string story-id))) + (when epic-id + (org-set-property "clubhouse-epic" + (org-link-make-string + (org-clubhouse-link-to-epic epic-id) + (alist-get epic-id (org-clubhouse-epics))))) + + (org-set-property "clubhouse-project" + (org-link-make-string + (org-clubhouse-link-to-project project-id) + (alist-get project-id (org-clubhouse-projects)))) + + (org-set-property "story-type" + (alist-get-equal story-type org-clubhouse-story-types)) + + (dolist (extra-prop extra-properties) + (org-set-property (car extra-prop) + (alist-get (cdr extra-prop) story))) + + (org-todo "TODO")))) + +(defun org-clubhouse-create-story (&optional beg end &key then) + "Creates a clubhouse story using selected headlines. + +Will pull the title from the headline at point, +or create cards for all the headlines in the selected region. + +All stories are added to the same project and epic, as selected via two prompts. +If the stories already have a CLUBHOUSE-ID, they are filtered and ignored." + (interactive + (when (use-region-p) + (list (region-beginning) (region-end)))) + + (let* ((elts (org-clubhouse-collect-headlines beg end)) + (new-elts (-remove (lambda (elt) (plist-get elt :CLUBHOUSE-ID)) elts))) + (org-clubhouse-prompt-for-project + (lambda (project-id) + (when project-id + (org-clubhouse-prompt-for-epic + (lambda (epic-id) + (let ((create-story + (lambda (story-type) + (-map + (lambda (elt) + (let* ((title (plist-get elt :title)) + (description + (save-mark-and-excursion + (goto-char (plist-get elt :begin)) + (org-clubhouse-find-description-drawer))) + (labels (org-clubhouse--labels-for-elt elt)) + (story (org-clubhouse-create-story-internal + title + :project-id project-id + :epic-id epic-id + :story-type story-type + :description description + :labels labels))) + (org-clubhouse-populate-created-story elt story) + (when (functionp then) + (funcall then story)))) + new-elts)))) + (if org-clubhouse-default-story-type + (funcall create-story org-clubhouse-default-story-type) + (org-clubhouse-prompt-for-story-type create-story)))))))))) + +(defun org-clubhouse-create-story-with-task-list (&optional beg end) + "Creates a clubhouse story using the selected headline, making all direct +children of that headline into tasks in the task list of the story." + (interactive + (when (use-region-p) + (list (region-beginning) (region-end)))) + + (let* ((elt (org-element-and-children-at-point))) + (org-clubhouse-create-story nil nil + :then (lambda (story) + (pp story) + (org-clubhouse-push-task-list + (alist-get 'id story) + (plist-get elt :children)))))) + +;;; +;;; Task creation +;;; + +(cl-defun org-clubhouse-create-task (title &key story-id) + (cl-assert (and (stringp title) + (integerp story-id))) + (org-clubhouse-request + "POST" + (format "/stories/%d/tasks" story-id) + :data (json-encode `((description . ,title))))) + +(defun org-clubhouse-push-task-list (&optional parent-clubhouse-id child-elts) + "Writes each child of the element at point as a task list item. + +When called as (org-clubhouse-push-task-list PARENT-CLUBHOUSE-ID CHILD-ELTS), +allows manually passing a clubhouse ID and list of org-element plists to write" + (interactive) + (let* ((elt (org-element-and-children-at-point)) + (parent-clubhouse-id (or parent-clubhouse-id + (org-element-extract-clubhouse-id elt))) + (child-elts (or child-elts (plist-get elt :children))) + (story (org-clubhouse-get-story parent-clubhouse-id)) + (existing-tasks (alist-get 'tasks story)) + (task-exists + (lambda (task-name) + (cl-some (lambda (task) + (string-equal task-name (alist-get 'description task))) + existing-tasks))) + (elts-with-starts + (-map (lambda (e) (cons (set-marker (make-marker) + (plist-get e :begin)) + e)) + child-elts))) + (dolist (child-elt-and-start elts-with-starts) + (let* ((start (car child-elt-and-start)) + (child-elt (cdr child-elt-and-start)) + (task-name (plist-get child-elt :title))) + (unless (funcall task-exists task-name) + (let ((task (org-clubhouse-create-task + task-name + :story-id parent-clubhouse-id))) + (org-clubhouse-populate-created-task child-elt task start))))))) + +(defun org-clubhouse-populate-created-task (elt task &optional begin) + (let ((elt-start (or begin (plist-get elt :begin))) + (task-id (alist-get 'id task)) + (story-id (alist-get 'story_id task))) + + (save-excursion + (goto-char elt-start) + + (org-set-property "clubhouse-task-id" (format "%d" task-id)) + + (org-set-property "clubhouse-story-id" + (org-link-make-string + (org-clubhouse-link-to-story story-id) + (number-to-string story-id))) + + (org-todo "TODO")))) + +;;; +;;; Task Updates +;;; + +(cl-defun org-clubhouse-update-task-internal + (story-id task-id &rest attrs) + (cl-assert (and (integerp story-id) + (integerp task-id) + (listp attrs))) + (org-clubhouse-request + "PUT" + (format "stories/%d/tasks/%d" story-id task-id) + :data + (json-encode attrs))) + +;;; +;;; Story updates +;;; + +(cl-defun org-clubhouse-update-story-internal + (story-id &rest attrs) + (cl-assert (and (integerp story-id) + (listp attrs))) + (org-clubhouse-request + "PUT" + (format "stories/%d" story-id) + :data + (json-encode attrs))) + +(cl-defun org-clubhouse-update-story-at-point (&rest attrs) + (when-let* ((clubhouse-id (org-element-clubhouse-id))) + (apply + #'org-clubhouse-update-story-internal + (cons clubhouse-id attrs)) + t)) + +(defun org-clubhouse-update-story-title () + "Update the title of the Clubhouse story linked to the current headline. + +Update the title of the story linked to the current headline with the text of +the headline." + (interactive) + + (let* ((elt (org-element-find-headline)) + (title (plist-get elt :title)) + (clubhouse-id (org-element-clubhouse-id))) + (and + (org-clubhouse-update-story-at-point + clubhouse-id + :name title) + (message "Successfully updated story title to \"%s\"" + title)))) + +(defun org-clubhouse-update-status () + "Update the status of the Clubhouse story linked to the current element. + +Update the status of the Clubhouse story linked to the current element with the +entry in `org-clubhouse-state-alist' corresponding to the todo-keyword of the +element." + (interactive) + (let* ((elt (org-element-find-headline)) + (todo-keyword (-> elt + (plist-get :todo-keyword) + (substring-no-properties))) + + (clubhouse-id (org-element-extract-clubhouse-id elt)) + (task-id (plist-get elt :CLUBHOUSE-TASK-ID))) + (cond + (clubhouse-id + (let* ((todo-keyword (-> elt + (plist-get :todo-keyword) + (substring-no-properties)))) + (when-let* ((clubhouse-workflow-state + (alist-get-equal todo-keyword org-clubhouse-state-alist)) + (workflow-state-id + (alist-get-equal clubhouse-workflow-state + (org-clubhouse-workflow-states)))) + (let ((update-assignee? + (if (or (eq 't org-clubhouse-claim-story-on-status-update) + (member todo-keyword + org-clubhouse-claim-story-on-status-update)) + (if org-clubhouse-username + 't + (warn "Not claiming story since `org-clubhouse-username' + is not set") + nil)))) + + (if update-assignee? + (org-clubhouse-update-story-internal + clubhouse-id + :workflow_state_id workflow-state-id + :owner_ids (if update-assignee? + (list (org-clubhouse-whoami)) + (list))) + (org-clubhouse-update-story-internal + clubhouse-id + :workflow_state_id workflow-state-id)) + (message + (if update-assignee? + "Successfully claimed story and updated clubhouse status to \"%s\"" + "Successfully updated clubhouse status to \"%s\"") + clubhouse-workflow-state))))) + + (task-id + (let ((story-id (org-element-extract-clubhouse-id + elt + :CLUBHOUSE-STORY-ID)) + (done? (member todo-keyword org-done-keywords))) + (org-clubhouse-update-task-internal + story-id + (string-to-number task-id) + :complete (if done? 't :json-false)) + (message "Successfully marked clubhouse task status as %s" + (if done? "complete" "incomplete"))))))) + +(defun org-clubhouse-update-description () + "Update the description of the Clubhouse story linked to the current element. + +Update the status of the Clubhouse story linked to the current element with the +contents of a drawer inside the element called DESCRIPTION, if any." + (interactive) + (when-let* ((new-description (org-clubhouse-find-description-drawer))) + (and + (org-clubhouse-update-story-at-point + :description new-description) + (message "Successfully updated story description")))) + +(defun org-clubhouse-update-labels () + "Update the labels of the Clubhouse story linked to the current element. + +Will use the value of `org-clubhouse-create-stories-with-labels' to determine +which labels to set." + (interactive) + (when-let* ((elt (org-element-find-headline)) + (new-labels (org-clubhouse--labels-for-elt elt))) + (and + (org-clubhouse-update-story-at-point + :labels new-labels) + (message "Successfully updated story labels to :%s:" + (->> new-labels + (-map #'cdar) + (s-join ":")))))) + + +;;; +;;; Creating headlines from existing stories +;;; + +(defun org-clubhouse--task-to-headline-text (level task) + (format "%s %s %s +:PROPERTIES: +:clubhouse-task-id: %s +:clubhouse-story-id: %s +:END:" + (make-string level ?*) + (if (equal :json-false (alist-get 'complete task)) + "TODO" "DONE") + (alist-get 'description task) + (alist-get 'id task) + (let ((story-id (alist-get 'story_id task))) + (org-link-make-string + (org-clubhouse-link-to-story story-id) + story-id)))) + +(defun org-clubhouse--story-to-headline-text (level story) + (let ((story-id (alist-get 'id story))) + (format + "%s %s %s %s +:PROPERTIES: +:clubhouse-id: %s +:END: +%s +%s +" + (make-string level ?*) + (org-clubhouse-workflow-state-id-to-todo-keyword + (alist-get 'workflow_state_id story)) + (alist-get 'name story) + (if-let ((labels (->> story + (alist-get 'labels) + ->list + (-map (apply-partially #'alist-get 'name))))) + (format ":%s:" (s-join ":" labels)) + "") + (org-link-make-string + (org-clubhouse-link-to-story story-id) + (number-to-string story-id)) + (let ((desc (alist-get 'description story))) + (if (= 0 (length desc)) "" + (format ":DESCRIPTION:\n%s\n:END:" desc))) + (if-let ((tasks (seq-sort-by + (apply-partially #'alist-get 'position) + #'< + (or (alist-get 'tasks story) + (alist-get 'tasks + (org-clubhouse-get-story story-id)))))) + (mapconcat (apply-partially #'org-clubhouse--task-to-headline-text + (1+ level)) + tasks + "\n") + "")))) + +(defun org-clubhouse-headline-from-my-tasks (level) + "Prompt my active stories and create a single `org-mode' headline at LEVEL." + (interactive "*nLevel: \n") + (if org-clubhouse-username + (let* ((story-list (org-clubhouse--search-stories + (format "owner:%s !is:done !is:archived" + org-clubhouse-username))) + (stories (to-id-name-pairs story-list))) + (org-clubhouse-headline-from-story-id level + (find-match-in-alist + (ivy-read "Select Story: " + (-map #'cdr stories)) + stories))) + (warn "Can't fetch my tasks if `org-clubhouse-username' is unset"))) + +(defun org-clubhouse-headline-from-story-id (level story-id) + "Create a single `org-mode' headline at LEVEL based on the given clubhouse STORY-ID." + (interactive "*nLevel: \nnStory ID: ") + (let* ((story (org-clubhouse-get-story story-id))) + (if (equal '((message . "Resource not found.")) story) + (message "Story ID not found: %d" story-id) + (save-mark-and-excursion + (insert (org-clubhouse--story-to-headline-text level story)) + (org-align-tags))))) + +(defun org-clubhouse--search-stories (query) + (unless (string= "" query) + (-> (org-clubhouse-request "GET" "search/stories" :params `((query ,query))) + cdadr + (append nil) + reject-archived))) + +(defun org-clubhouse-prompt-for-iteration (cb) + "Prompt for iteration and call CB with that iteration" + (ivy-read + "Select an interation: " + (-map #'cdr (org-clubhouse-iterations)) + :require-match t + :history 'org-clubhouse-iteration-history + :action (lambda (selected) + (let ((iteration-id + (find-match-in-alist selected (org-clubhouse-iterations)))) + (funcall cb iteration-id))))) + +(defun org-clubhouse--get-iteration (iteration-id) + (-> (org-clubhouse-request "GET" (format "iterations/%d/stories" iteration-id)) + (append nil))) + +(defun org-clubhouse-headlines-from-iteration (level) + "Create `org-mode' headlines from a clubhouse iteration. + +Create `org-mode' headlines from all the resulting stories at headline level LEVEL." + (interactive "*nLevel: ") + (org-clubhouse-prompt-for-iteration + (lambda (iteration-id) + (let ((story-list (org-clubhouse--get-iteration iteration-id))) + (if (null story-list) + (message "Iteration id returned no stories: %d" iteration-id) + (let ((text (mapconcat (apply-partially + #'org-clubhouse--story-to-headline-text + level) + (reject-archived story-list) "\n"))) + (save-mark-and-excursion + (insert text) + (org-align-all-tags)) + text)))))) + +(defun org-clubhouse-headlines-from-query (level query) + "Create `org-mode' headlines from a clubhouse query. + +Submits QUERY to clubhouse, and creates `org-mode' headlines from all the +resulting stories at headline level LEVEL." + (interactive + "*nLevel: \nMQuery: ") + (let* ((story-list (org-clubhouse--search-stories query))) + (if (null story-list) + (message "Query returned no stories: %s" query) + (let ((text (mapconcat (apply-partially + #'org-clubhouse--story-to-headline-text + level) + (reject-archived story-list) "\n"))) + (if (called-interactively-p) + (save-mark-and-excursion + (insert text) + (org-align-all-tags)) + text))))) + +(defun org-clubhouse-prompt-for-story (cb) + "Prompt the user for a clubhouse story, then call CB with the full story." + (ivy-read "Story title: " + (lambda (search-term) + (let* ((stories (org-clubhouse--search-stories + (if search-term (format "\"%s\"" search-term) + "")))) + (-map (lambda (story) + (propertize (alist-get 'name story) 'story story)) + stories))) + :dynamic-collection t + :history 'org-clubhouse-story-prompt + :action (lambda (s) (funcall cb (get-text-property 0 'story s))) + :require-match t)) + +(defun org-clubhouse-headline-from-story (level) + "Prompt for a story, and create an org headline at LEVEL from that story." + (interactive "*nLevel: ") + (org-clubhouse-prompt-for-story + (lambda (story) + (save-mark-and-excursion + (insert (org-clubhouse--story-to-headline-text level story)) + (org-align-tags))))) + + +(defun org-clubhouse-link () + "Link the current `org-mode' headline with an existing clubhouse story." + (interactive) + (org-clubhouse-prompt-for-story + (lambda (story) + (org-clubhouse-populate-created-story + (org-element-find-headline) + story + :extra-properties '(("clubhouse-story-name" . name))) + (org-todo + (org-clubhouse-workflow-state-id-to-todo-keyword + (alist-get 'workflow_state_id story)))))) + +(defun org-clubhouse-claim () + "Assign the clubhouse story associated with the headline at point to yourself." + (interactive) + (if org-clubhouse-username + (and + (org-clubhouse-update-story-at-point + :owner_ids (list (org-clubhouse-whoami))) + (message "Successfully claimed story")) + (warn "Can't claim story if `org-clubhouse-username' is unset"))) + +(defun org-clubhouse-sync-status (&optional beg end) + "Pull the status(es) for the story(ies) in region and update the todo state. + +Uses `org-clubhouse-state-alist'. Operates over stories from BEG to END" + (interactive + (when (use-region-p) + (list (region-beginning) (region-end)))) + (let ((elts (-filter (lambda (e) (plist-get e :CLUBHOUSE-ID)) + (org-clubhouse-collect-headlines beg end)))) + (save-mark-and-excursion + (dolist (e elts) + (goto-char (plist-get e :begin)) + (let* ((clubhouse-id (org-element-extract-clubhouse-id e)) + (story (org-clubhouse-get-story clubhouse-id)) + (workflow-state-id (alist-get 'workflow_state_id story)) + (todo-keyword (org-clubhouse-workflow-state-id-to-todo-keyword + workflow-state-id))) + (let ((org-after-todo-state-change-hook + (remove 'org-clubhouse-update-status + org-after-todo-state-change-hook))) + (org-todo todo-keyword))))) + (message "Successfully synchronized status of %d stories from Clubhouse" + (length elts)))) + +(cl-defun org-clubhouse-set-epic (&optional story-id epic-id cb &key beg end) + "Set the epic of clubhouse story STORY-ID to EPIC-ID, then call CB. + +When called interactively, prompt for an epic and set the story of the clubhouse +stor{y,ies} at point or region" + (interactive + (when (use-region-p) + (list nil nil nil + :beg (region-beginning) + :end (region-end)))) + (if (and story-id epic-id) + (progn + (org-clubhouse-update-story-internal + story-id :epic-id epic-id) + (when cb (funcall cb))) + (let ((elts (-filter (lambda (elt) (plist-get elt :CLUBHOUSE-ID)) + (org-clubhouse-collect-headlines beg end)))) + (org-clubhouse-prompt-for-epic + (lambda (epic-id) + (-map + (lambda (elt) + (let ((story-id (org-element-extract-clubhouse-id elt))) + (org-clubhouse-set-epic + story-id epic-id + (lambda () + (org-set-property + "clubhouse-epic" + (org-link-make-string + (org-clubhouse-link-to-epic epic-id) + (alist-get epic-id (org-clubhouse-epics)))) + (message "Successfully set the epic on story %d to %d" + story-id epic-id)))))) + elts))))) + +;;; + +(define-minor-mode org-clubhouse-mode + "If enabled, updates to the todo keywords on org headlines will update the +linked ticket in Clubhouse." + :group 'org + :lighter "Org-Clubhouse" + :keymap '() + (add-hook 'org-after-todo-state-change-hook + 'org-clubhouse-update-status + nil + t)) + +(provide 'org-clubhouse) + +;;; org-clubhouse.el ends here diff --git a/users/glittershark/owothia/.envrc b/users/glittershark/owothia/.envrc new file mode 100644 index 000000000000..051d09d292a8 --- /dev/null +++ b/users/glittershark/owothia/.envrc @@ -0,0 +1 @@ +eval "$(lorri direnv)" diff --git a/users/glittershark/owothia/.gitignore b/users/glittershark/owothia/.gitignore new file mode 100644 index 000000000000..8e850e7a0af2 --- /dev/null +++ b/users/glittershark/owothia/.gitignore @@ -0,0 +1,30 @@ +dist +dist-* +build/ +cabal-dev +*.o +*.hi +*.hie +*.chi +*.chs.h +*.dyn_o +*.dyn_hi +.hpc +.hsenv +.cabal-sandbox/ +cabal.sandbox.config +*.prof +*.aux +*.hp +*.eventlog +.stack-work/ +cabal.project.local +cabal.project.local~ +.HTF/ +.ghc.environment.* + +# from nix-build +result + +# grr +*_flymake.hs diff --git a/users/glittershark/owothia/chatter.patch b/users/glittershark/owothia/chatter.patch new file mode 100644 index 000000000000..c2a6179bfbbf --- /dev/null +++ b/users/glittershark/owothia/chatter.patch @@ -0,0 +1,19 @@ +diff --git a/src/NLP/POS/LiteralTagger.hs b/src/NLP/POS/LiteralTagger.hs +index 913bee8..3c2f71d 100644 +--- a/src/NLP/POS/LiteralTagger.hs ++++ b/src/NLP/POS/LiteralTagger.hs +@@ -1,4 +1,4 @@ +-{-# LANGUAGE OverloadedStrings #-} ++{-# LANGUAGE OverloadedStrings, PackageImports #-} + module NLP.POS.LiteralTagger + ( tag + , tagSentence +@@ -27,7 +27,7 @@ import NLP.FullStop (segment) + import NLP.Types ( tagUNK, Sentence, TaggedSentence(..), applyTags + , Tag, POSTagger(..), CaseSensitive(..), tokens, showTok) + import Text.Regex.TDFA +-import Text.Regex.TDFA.Text (compile) ++import "regex-tdfa" Text.Regex.TDFA.Text (compile) + + taggerID :: ByteString + taggerID = pack "NLP.POS.LiteralTagger" diff --git a/users/glittershark/owothia/default.nix b/users/glittershark/owothia/default.nix new file mode 100644 index 000000000000..2a1b37800b0d --- /dev/null +++ b/users/glittershark/owothia/default.nix @@ -0,0 +1,6 @@ +{ pkgs ? (import ../../../. {}).third_party +, lib ? pkgs.lib +, ... +}: + +(import ./packageSet.nix {}).callPackage (import ./pkg.nix { inherit pkgs; }) {} diff --git a/users/glittershark/owothia/hie.yaml b/users/glittershark/owothia/hie.yaml new file mode 100644 index 000000000000..16a6c1526259 --- /dev/null +++ b/users/glittershark/owothia/hie.yaml @@ -0,0 +1,4 @@ +cradle: + cabal: + - path: './app' + component: 'exe:owothia' diff --git a/users/glittershark/owothia/owothia.cabal b/users/glittershark/owothia/owothia.cabal new file mode 100644 index 000000000000..ef5477ea1bf2 --- /dev/null +++ b/users/glittershark/owothia/owothia.cabal @@ -0,0 +1,53 @@ +cabal-version: 2.2 +name: owothia +version: 0.0.1.0 + +executable owothia + main-is: Main.hs + build-depends: base + , relude + , irc-client + , lens + , chatter + , containers + , text + , bytestring + , random + , envy + + mixins: base hiding (Prelude) + , relude (Relude as Prelude) + + hs-source-dirs: + src + + default-extensions: + BlockArguments + ConstraintKinds + DataKinds + DeriveAnyClass + DeriveGeneric + DerivingStrategies + DerivingVia + FlexibleContexts + FlexibleInstances + FunctionalDependencies + GADTSyntax + GeneralizedNewtypeDeriving + KindSignatures + LambdaCase + MultiWayIf + NoStarIsType + OverloadedStrings + PolyKinds + RankNTypes + ScopedTypeVariables + TupleSections + TypeApplications + TypeFamilies + TypeOperators + ViewPatterns + + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 + + default-language: Haskell2010 diff --git a/users/glittershark/owothia/packageSet.nix b/users/glittershark/owothia/packageSet.nix new file mode 100644 index 000000000000..93812a08302d --- /dev/null +++ b/users/glittershark/owothia/packageSet.nix @@ -0,0 +1,20 @@ +{ pkgs ? (import ../../../. {}).third_party, ... }: + +let + + hlib = pkgs.haskell.lib; + +in + +pkgs.haskellPackages.extend (self: super: { + regex-tdfa-text = hlib.doJailbreak + (hlib.appendPatch + super.regex-tdfa-text + ./regex-tdfa-text.patch + ); + + fullstop = hlib.dontCheck super.fullstop; + + chatter = hlib.doJailbreak + (hlib.dontCheck (hlib.appendPatch super.chatter ./chatter.patch)); +}) diff --git a/users/glittershark/owothia/pkg.nix b/users/glittershark/owothia/pkg.nix new file mode 100644 index 000000000000..ef99d4d6518f --- /dev/null +++ b/users/glittershark/owothia/pkg.nix @@ -0,0 +1,6 @@ +args@{ pkgs ? (import ../../../. {}).third_party }: + +import ((import ./packageSet.nix args).haskellSrc2nix { + name = "owothia"; + src = pkgs.gitignoreSource ./.; +}) diff --git a/users/glittershark/owothia/regex-tdfa-text.patch b/users/glittershark/owothia/regex-tdfa-text.patch new file mode 100644 index 000000000000..6b2c34654382 --- /dev/null +++ b/users/glittershark/owothia/regex-tdfa-text.patch @@ -0,0 +1,40 @@ +diff --git a/Text/Regex/TDFA/Text.hs b/Text/Regex/TDFA/Text.hs +index c4ef9db..9299272 100644 +--- a/Text/Regex/TDFA/Text.hs ++++ b/Text/Regex/TDFA/Text.hs +@@ -38,13 +38,6 @@ import Text.Regex.TDFA.NewDFA.Uncons(Uncons(uncons)) + import qualified Text.Regex.TDFA.NewDFA.Engine as Engine(execMatch) + import qualified Text.Regex.TDFA.NewDFA.Tester as Tester(matchTest) + +-instance Extract T.Text where +- before = T.take; after = T.drop; empty = T.empty +- +-instance Uncons T.Text where +- {- INLINE uncons #-} +- uncons = T.uncons +- + instance RegexContext Regex T.Text T.Text where + match = polymatch + matchM = polymatchM +diff --git a/Text/Regex/TDFA/Text/Lazy.hs b/Text/Regex/TDFA/Text/Lazy.hs +index 73ca4a0..52958fb 100644 +--- a/Text/Regex/TDFA/Text/Lazy.hs ++++ b/Text/Regex/TDFA/Text/Lazy.hs +@@ -38,17 +38,10 @@ import Text.Regex.TDFA.NewDFA.Uncons(Uncons(uncons)) + import qualified Text.Regex.TDFA.NewDFA.Engine as Engine(execMatch) + import qualified Text.Regex.TDFA.NewDFA.Tester as Tester(matchTest) + +-instance Extract L.Text where +- before = L.take . toEnum; after = L.drop . toEnum; empty = L.empty +- + instance RegexContext Regex L.Text L.Text where + match = polymatch + matchM = polymatchM + +-instance Uncons L.Text where +- {- INLINE uncons #-} +- uncons = L.uncons +- + instance RegexMaker Regex CompOption ExecOption L.Text where + makeRegexOptsM c e source = makeRegexOptsM c e (L.unpack source) + diff --git a/users/glittershark/owothia/shell.nix b/users/glittershark/owothia/shell.nix new file mode 100644 index 000000000000..9446a353d5f9 --- /dev/null +++ b/users/glittershark/owothia/shell.nix @@ -0,0 +1,14 @@ +args@{ pkgs ? (import ../../../. {}).third_party, ... }: + +((import ./packageSet.nix args).extend (pkgs.haskell.lib.packageSourceOverrides { + owothia = pkgs.gitignoreSource ./.; +})).shellFor { + packages = p: [p.owothia]; + withHoogle = true; + doBenchmark = true; + buildInputs = with pkgs.haskellPackages; [ + cabal-install + hlint + pkgs.haskell-language-server.ghc883 + ]; +} diff --git a/users/glittershark/owothia/src/Main.hs b/users/glittershark/owothia/src/Main.hs new file mode 100644 index 000000000000..65578f258c1a --- /dev/null +++ b/users/glittershark/owothia/src/Main.hs @@ -0,0 +1,156 @@ +{-# LANGUAGE TemplateHaskell #-} +module Main where + +import Network.IRC.Client +import Control.Lens +import NLP.POS +import NLP.Types (POSTagger) +import qualified NLP.Types.Tags as Tags +import NLP.Types.Tree +import qualified NLP.Corpora.Conll as Conll +import NLP.Corpora.Conll (Tag) +import qualified Data.ByteString as BS +import System.Random +import System.Envy +import Data.Maybe +import qualified Data.Text +-------------------------------------------------------------------------------- + +data Config = Config + { _owoChance :: Int + , _ircServer :: ByteString + , _ircPort :: Int + , _ircServerPassword :: Maybe Text + , _nickservPassword :: Maybe Text + , _ircNick :: Maybe Text + } + deriving stock (Show, Eq, Generic) +makeLenses ''Config + +instance FromEnv Config where + fromEnv _ = + Config <$> env "OWO_CHANCE" + <*> env "IRC_SERVER" + <*> env "IRC_PORT" + <*> envMaybe "IRC_SERVER_PASSWORD" + <*> envMaybe "NICKSERV_PASSWORD" + <*> envMaybe "IRC_NICK" + +stopWord :: Text -> Bool +stopWord "'s" = True +stopWord "\"" = True +stopWord "is" = True +stopWord "are" = True +stopWord "am" = True +stopWord "were" = True +stopWord "was" = True +stopWord "be" = True +stopWord _ = False + +pickVerb :: POS Tag -> Maybe Text +pickVerb (POS Conll.VB (Token verb)) = Just verb +pickVerb (POS Conll.VBD (Token verb)) = Just verb +pickVerb (POS Conll.VBG (Token verb)) = Just verb +pickVerb (POS Conll.VBN (Token verb)) = Just verb +pickVerb (POS Conll.VBZ (Token verb)) = Just verb +pickVerb _ = Nothing + +pickNoun :: POS Tag -> Maybe Text +pickNoun (POS Conll.NN (Token noun)) = Just noun +pickNoun _ = Nothing + +randomPOS + :: Tags.Tag tag + => (POS tag -> Maybe Text) + -> POSTagger tag + -> Text + -> IO (Maybe Text) +randomPOS pickPOS tagger s = do + let candidates + = filter (not . stopWord) + . mapMaybe pickPOS + $ tag tagger s >>= \(TaggedSent ps) -> ps + i <- randomRIO (0, length candidates - 1) + pure $ candidates ^? ix i + +doOwo :: MonadIO m => Config -> m Bool +doOwo conf = do + n <- liftIO (randomRIO @Int (0, conf ^. owoChance)) + pure $ n == 0 + +data OwoType = Noun | Verb + deriving stock (Show, Eq) + +instance Random OwoType where + random = over _1 (bool Noun Verb) . random + randomR = const random + +vowels :: [Char] +vowels = "aeiou" + +article :: Text -> Text +article (x :< _) | x `elem` vowels = "an" +article _ = "a" + +owo :: OwoType -> Text -> Text +owo Noun n = mconcat + [ "I'm " + , article n + , " " + , n + , if "o" `Data.Text.isSuffixOf` n + then "wo" + else " owo" + ] +owo Verb v = v <> " me owo" + +pickOwo :: OwoType -> POS Tag -> Maybe Text +pickOwo Verb = pickVerb +pickOwo Noun = pickNoun + +randomOwo :: OwoType -> POSTagger Tag -> Text -> IO (Maybe Text) +randomOwo = randomPOS . pickOwo + +owothiaHandler :: Config -> Text -> IORef Bool -> POSTagger Tag -> EventHandler s +owothiaHandler conf nick state tagger = EventHandler Just $ \src ev -> do + hasIdentified <- readIORef state + when (not hasIdentified) $ do + nickservAuth + send $ Join "##tvl" + writeIORef state True + + when ("You are now identified" `BS.isInfixOf` (ev ^. raw)) $ + send $ Join "##tvl" + + case (src, ev ^. message) of + (Channel "##tvl" nick, Privmsg _ (Right m)) -> do + willOwo <- doOwo conf + when willOwo $ owoMessage m + _ -> pure () + + pure () + + where + owoMessage m = do + owoType <- liftIO randomIO + mWord <- liftIO $ randomOwo owoType tagger m + for_ mWord $ \word -> send $ Privmsg "##tvl" $ Right $ owo owoType word + nickservAuthMsg = "IDENTIFY " <> nick <> " " <> fromJust (conf ^. nickservPassword) + nickservAuth = send $ Privmsg "NickServ" $ Right nickservAuthMsg + +main :: IO () +main = do + conf <- either fail pure =<< decodeEnv + tagger <- defaultTagger + state <- newIORef $ not . isJust $ (conf ^. nickservPassword) + let nick = fromMaybe "owothia" (conf ^. ircNick) + conn = + plainConnection (conf ^. ircServer) (conf ^. ircPort) + & realname .~ "Owothia Revströwö" + & password .~ (conf ^. ircServerPassword) + & logfunc .~ stdoutLogger + cfg = + defaultInstanceConfig nick + & channels .~ ["##tvl"] + & handlers %~ (owothiaHandler conf nick state tagger : ) + runClient conn cfg () diff --git a/users/glittershark/pkgs/fprintd/default.nix b/users/glittershark/pkgs/fprintd/default.nix new file mode 100644 index 000000000000..0f9d414aeb86 --- /dev/null +++ b/users/glittershark/pkgs/fprintd/default.nix @@ -0,0 +1,128 @@ +args @ { pkgs, ... }: + +let + nixpkgs = import pkgs.nixpkgsSrc { + config.allowUnfree = true; + overlays = [(self: super: { + gcc = super.gcc9; + })]; + }; + + inherit (nixpkgs) + stdenv + fetchFromGitLab + fetchpatch + pkgconfig + meson + ninja + perl + gettext + cairo + gtk-doc + libxslt + docbook-xsl-nons + docbook_xml_dtd_412 + glib + dbus + dbus-glib + polkit + nss + pam + systemd + python3; + libfprint-tod = nixpkgs.callPackage ./libfprint-tod.nix {}; + +in + +stdenv.mkDerivation rec { + pname = "fprintd"; + version = "1.90.1"; + outputs = [ "out" "devdoc" ]; + + src = fetchFromGitLab { + domain = "gitlab.freedesktop.org"; + owner = "libfprint"; + repo = pname; + rev = version; + sha256 = "0mbzk263x7f58i9cxhs44mrngs7zw5wkm62j5r6xlcidhmfn03cg"; + }; + + patches = [ + # Fixes issue with ":" when there is multiple paths (might be the case on NixOS) + # https://gitlab.freedesktop.org/libfprint/fprintd/-/merge_requests/50 + (fetchpatch { + url = "https://gitlab.freedesktop.org/libfprint/fprintd/-/commit/d7fec03f24d10f88d34581c72f0eef201f5eafac.patch"; + sha256 = "0f88dhizai8jz7hpm5lpki1fx4593zcy89iwi4brsqbqc7jp9ls0"; + }) + + # Fix locating libpam_wrapper for tests + (fetchpatch { + url = "https://gitlab.freedesktop.org/libfprint/fprintd/-/merge_requests/40.patch"; + sha256 = "0qqy090p93lzabavwjxzxaqidkcb3ifacl0d3yh1q7ms2a58yyz3"; + }) + (fetchpatch { + url = "https://gitlab.freedesktop.org/libfprint/fprintd/-/commit/f401f399a85dbeb2de165b9b9162eb552ab6eea7.patch"; + sha256 = "1bc9g6kc95imlcdpvp8qgqjsnsxg6nipr6817c1pz5i407yvw1iy"; + }) + ]; + + nativeBuildInputs = [ + pkgconfig + meson + ninja + perl + gettext + gtk-doc + libxslt + dbus + docbook-xsl-nons + docbook_xml_dtd_412 + ]; + + buildInputs = [ + glib + dbus-glib + polkit + nss + pam + systemd + libfprint-tod + ]; + + checkInputs = with python3.pkgs; [ + python-dbusmock + dbus-python + pygobject3 + pycairo + pypamtest + ]; + + mesonFlags = [ + "-Dgtk_doc=true" + "-Dpam_modules_dir=${placeholder "out"}/lib/security" + "-Dsysconfdir=${placeholder "out"}/etc" + "-Ddbus_service_dir=${placeholder "out"}/share/dbus-1/system-services" + "-Dsystemd_system_unit_dir=${placeholder "out"}/lib/systemd/system" + ]; + + PKG_CONFIG_DBUS_1_INTERFACES_DIR = "${placeholder "out"}/share/dbus-1/interfaces"; + PKG_CONFIG_POLKIT_GOBJECT_1_POLICYDIR = "${placeholder "out"}/share/polkit-1/actions"; + PKG_CONFIG_DBUS_1_DATADIR = "${placeholder "out"}/share"; + + # FIXME: Ugly hack for tests to find libpam_wrapper.so + LIBRARY_PATH = stdenv.lib.makeLibraryPath [ python3.pkgs.pypamtest ]; + + doCheck = true; + + postPatch = '' + patchShebangs po/check-translations.sh + ''; + + meta = with stdenv.lib; { + homepage = "https://fprint.freedesktop.org/"; + description = "D-Bus daemon that offers libfprint functionality over the D-Bus interprocess communication bus"; + license = licenses.gpl2; + platforms = platforms.linux; + maintainers = with maintainers; [ abbradar elyhaka ]; + }; +} diff --git a/users/glittershark/pkgs/fprintd/libfprint-tod-clean.nix b/users/glittershark/pkgs/fprintd/libfprint-tod-clean.nix new file mode 100644 index 000000000000..d8e64f3cc1c9 --- /dev/null +++ b/users/glittershark/pkgs/fprintd/libfprint-tod-clean.nix @@ -0,0 +1,81 @@ +{ stdenv +, pkgs +, fetchFromGitLab +, fetchurl +, pkgconfig +, meson +, ninja +, libusb +, gusb +, pixman +, gobject-introspection +, glib +, nss +, gtk3 +, python3 +, umockdev +, coreutils +, gtk-doc +, docbook_xsl +, docbook_xml_dtd_43 +}: + +stdenv.mkDerivation rec { + pname = "libfprint-tod-clean"; + version = "1.90.2"; + outputs = [ "out" "devdoc" ]; + + src = fetchFromGitLab { + domain = "gitlab.freedesktop.org"; + owner = "3v1n0"; + repo = "libfprint"; + rev = "0e123d0752538d834ee2cca8b471373369ad5e89"; + sha256 = "11yl3nikdyykamafqf3ys1wg7zx3rb81lf11ndd8sf9rkwwfgpn6"; + }; + + checkInputs = [ (python3.withPackages (ps: with ps; [ pycairo gobject ])) umockdev ]; + + nativeBuildInputs = [ + pkgconfig + meson + gobject-introspection + ninja + gtk-doc + docbook_xsl + docbook_xml_dtd_43 + ]; + + buildInputs = [ + libusb + gusb + pixman + glib + nss + gtk3 + ]; + + mesonFlags = [ + "-Dudev_rules_dir=${placeholder "out"}/lib/udev/rules.d" + "-Dx11-examples=false" + ]; + + doChecks = true; + + checkPhase = '' + meson test -C build --print-errorlogs + ''; + + postPatch = '' + substituteInPlace libfprint/meson.build \ + --replace /bin/echo ${coreutils}/bin/echo + ''; + + + meta = with stdenv.lib; { + homepage = https://fprint.freedesktop.org/; + description = "A library designed to make it easy to add support for consumer fingerprint readers"; + license = licenses.lgpl21; + platforms = platforms.linux; + maintainers = with maintainers; [ jobojeha ]; + }; +} diff --git a/users/glittershark/pkgs/fprintd/libfprint-tod-goodix.nix b/users/glittershark/pkgs/fprintd/libfprint-tod-goodix.nix new file mode 100644 index 000000000000..f6d7d4ea98c9 --- /dev/null +++ b/users/glittershark/pkgs/fprintd/libfprint-tod-goodix.nix @@ -0,0 +1,47 @@ +{ stdenv +, pkgs +, fetchgit +, libfprint-tod ? (pkgs.callPackage ./libfprint-tod-clean.nix {}) +, git +, autoPatchelfHook +, libusb +, glibc +, +}: +stdenv.mkDerivation rec { + pname = "libfprint-2-tod1-xps9300-bin"; + version = "0.0.6"; + + src = fetchgit { + url = "git://git.launchpad.net/~oem-solutions-engineers/libfprint-2-tod1-goodix/+git/libfprint-2-tod1-goodix/"; + rev = "882735c6366fbe30149eea5cfd6d0ddff880f0e4"; + sha256 = "1x9h5x027s2avdhjhnfmmhdpgyf9x79fz8skcjd3rm3phnpr3zsj"; + }; + + buildInputs = [ + libfprint-tod + git + libusb + autoPatchelfHook + glibc + ]; + + installPhase = '' + install -dm 755 "$out/lib/udev/rules.d/" + install -dm 755 "$out/usr/lib/libfprint-2/tod-1/" + + sed -n -r '/Shenzhen/,/^\s*$/p' debian/copyright > LICENSE + install -Dm644 LICENSE "$out/usr/share/licenses/libfprint-2-tod1-xps9300-bin/LICENSE" + + install -Dm 755 usr/lib/x86_64-linux-gnu/libfprint-2/tod-1/libfprint-tod-goodix-53xc-0.0.6.so "$out/usr/lib/libfprint-2/tod-1/" + install -Dm 0755 lib/udev/rules.d/60-libfprint-2-tod1-goodix.rules "$out/lib/udev/rules.d/" + ''; + + meta = with stdenv.lib; { + homepage = "https://git.launchpad.net/~oem-solutions-engineers/libfprint-2-tod1-goodix/+git/libfprint-2-tod1-goodix"; + description = "Goodix driver module for libfprint-2 Touch OEM Driver"; + license = licenses.unfreeRedistributable; + platforms = platforms.linux; + maintainers = with maintainers; [ jobojeha ]; + }; +} diff --git a/users/glittershark/pkgs/fprintd/libfprint-tod.nix b/users/glittershark/pkgs/fprintd/libfprint-tod.nix new file mode 100644 index 000000000000..d9c87e673c97 --- /dev/null +++ b/users/glittershark/pkgs/fprintd/libfprint-tod.nix @@ -0,0 +1,89 @@ +{ stdenv +, pkgs +, fetchFromGitLab +, fetchurl +, pkgconfig +, meson +, ninja +, libusb +, gusb +, pixman +, gobject-introspection +, glib +, nss +, gtk3 +, python3 +, umockdev +, coreutils +, gtk-doc +, docbook_xsl +, docbook_xml_dtd_43 +, libfprint-tod-goodix ? (pkgs.callPackage ./libfprint-tod-goodix.nix {}) +}: + +stdenv.mkDerivation rec { + pname = "libfprint-tod"; + version = "1.90.2"; + outputs = [ "out" "devdoc" ]; + + src = fetchFromGitLab { + domain = "gitlab.freedesktop.org"; + owner = "3v1n0"; + repo = "libfprint"; + rev = "0e123d0752538d834ee2cca8b471373369ad5e89"; + sha256 = "11yl3nikdyykamafqf3ys1wg7zx3rb81lf11ndd8sf9rkwwfgpn6"; + }; + + checkInputs = [ (python3.withPackages (ps: with ps; [ pycairo gobject ])) umockdev ]; + + nativeBuildInputs = [ + pkgconfig + meson + gobject-introspection + ninja + gtk-doc + docbook_xsl + docbook_xml_dtd_43 + ]; + + buildInputs = [ + libusb + gusb + pixman + glib + nss + gtk3 + libfprint-tod-goodix + ]; + + mesonFlags = [ + "-Dudev_rules_dir=${placeholder "out"}/lib/udev/rules.d" + "-Dx11-examples=false" + ]; + + doChecks = true; + + checkPhase = '' + meson test -C build --print-errorlogs + ''; + + postPatch = '' + substituteInPlace libfprint/meson.build \ + --replace /bin/echo ${coreutils}/bin/echo + ''; + + postInstall = '' + mkdir -p $out/lib/libfprint-2/tod-1/ + ln -s ${libfprint-tod-goodix}/usr/lib/libfprint-2/tod-1/libfprint-tod-goodix-53xc-0.0.6.so $out/lib/libfprint-2/tod-1/ + ln -s ${libfprint-tod-goodix}/lib/udev/rules.d/60-libfprint-2-tod1-goodix.rules $out/lib/udev/rules.d/ + + ''; + + meta = with stdenv.lib; { + homepage = https://fprint.freedesktop.org/; + description = "A library designed to make it easy to add support for consumer fingerprint readers"; + license = licenses.lgpl21; + platforms = platforms.linux; + maintainers = with maintainers; [ jobojeha ]; + }; +} diff --git a/users/glittershark/resume/chimera.png b/users/glittershark/resume/chimera.png new file mode 100644 index 000000000000..6dde989c53b0 --- /dev/null +++ b/users/glittershark/resume/chimera.png Binary files differdiff --git a/users/glittershark/resume/collection.sty b/users/glittershark/resume/collection.sty new file mode 100644 index 000000000000..4f1540a9d214 --- /dev/null +++ b/users/glittershark/resume/collection.sty @@ -0,0 +1,85 @@ +%% start of file `collection.sty'. +%% Copyright 2013-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{collection}[2013/03/28 v1.0.0 collections] + + +%------------------------------------------------------------------------------- +% requirements +%------------------------------------------------------------------------------- + + +\RequirePackage{ifthen} + + +%------------------------------------------------------------------------------- +% code +%------------------------------------------------------------------------------- + +% creates a new collection +% usage: \collectionnew{<collection name>} +\newcommand*{\collectionnew}[1]{% + \newcounter{collection@#1@count}} + +% adds an item to a collection +% usage: \collectionadd[<optional key>]{<collection name>}{<item to add>} +\newcommand*{\collectionadd}[3][]{% + \expandafter\def\csname collection@#2@item\roman{collection@#2@count}\endcsname{#3}% + \if\relax\noexpand#1\relax% if #1 is empty + \else\expandafter\def\csname collection@#2@key\roman{collection@#2@count}\endcsname{#1}\fi% + \stepcounter{collection@#2@count}} + +% returns the number of items in a collection +% usage: \collectioncount{<collection name>} +\newcommand*{\collectioncount}[1]{% + \value{collection@#1@count}} + +% gets an item from a collection +% usage: \collectiongetitem{<collection name>}{<element id>} +% where <element id> is an integer between 0 and (collectioncount-1) +\newcommand*{\collectiongetitem}[2]{% + \csname collection@#1@item\romannumeral #2\endcsname} + +% gets a key from a collection +% usage: \collectiongetkey{<collection name>}{<element id>} +% where <element id> is an integer between 0 and (collectioncount-1) +\newcommand*{\collectiongetkey}[2]{% + \csname collection@#1@key\romannumeral #2\endcsname} + +% loops through a collection and perform the given operation on every element +% usage: \collectionloop{<collection name>}{<operation sequence>} +% where <operation sequence> is the code sequence to be evaluated for each collection item, +% code which can refer to \collectionloopid, \collectionloopkey, \collectionloopitem and +% \collectionloopbreak +\newcounter{collection@iterator} +\newcommand*{\collectionloopbreak}{\let\iterate\relax} +\newcommand*{\collectionloop}[2]{% + \setcounter{collection@iterator}{0}% + \loop\ifnum\value{collection@iterator}<\value{collection@#1@count}% + \def\collectionloopid{\arabic{collection@iterator}}% + \def\collectionloopitem{\collectiongetitem{#1}{\collectionloopid}}% + \def\collectionloopkey{\collectiongetkey{#1}{\collectionloopid}}% + #2% + \stepcounter{collection@iterator}% + \repeat} + +% loops through a collection and finds the (first) element matching the given key +% usage: \collectionfindbykey{<collection name>}{key>} +\newcommand*{\collectionfindbykey}[2]{% + \collectionloop{#1}{% + \ifthenelse{\equal{\collectionloopkey}{#2}}{\collectionloopitem\collectionloopbreak}{}}} + + +\endinput + + +%% end of file `collection.cls'. diff --git a/users/glittershark/resume/default.nix b/users/glittershark/resume/default.nix new file mode 100644 index 000000000000..2db6a650bc6e --- /dev/null +++ b/users/glittershark/resume/default.nix @@ -0,0 +1,37 @@ +{ pkgs, ... }: + +with pkgs.lib; + +pkgs.runCommandNoCC "resume.pdf" { + buildInputs = [(pkgs.texlive.combine { + inherit (pkgs.texlive) + capt-of + collection-fontsrecommended + enumitem + etoolbox + fancyvrb + float + fncychap + framed + l3packages + microtype + needspace + parskip + scheme-basic + tabulary + titlesec + ulem + upquote + varwidth + wrapfig + xcolor + ; + })]; +} '' + cp ${builtins.filterSource (path: type: + type == "regular" && + any (ext: hasSuffix ext path) [".sty" ".cls" ".tex" ".png"] + ) ./.}/* . + pdflatex ./resume.tex + cp resume.pdf $out +'' diff --git a/users/glittershark/resume/helvetica.sty b/users/glittershark/resume/helvetica.sty new file mode 100644 index 000000000000..dacc129a1025 --- /dev/null +++ b/users/glittershark/resume/helvetica.sty @@ -0,0 +1,32 @@ +%% +%% This is file `helvetica.sty', based on helvet.sty extended to include +%% definitions for rm and tt. This means commands such as \textbf, \textit, +%% etc. will appear in Helvetica. +%% Changes added by Harriet Borton on <1995/12/11> +%% +%% The original source files were: +%% +%% psfonts.dtx (with options: `helvet') +%% +%% Copyright (C) 1994 Sebastian Rahtz +%% All rights reserved. +%% +%% The original file is part of the PSNFSS2e package. +%% ----------------------------------------- +%% +%% This is a generated file. Permission is granted to to customize the +%% declarations in this file to serve the needs of your installation. +%% However, no permission is granted to distribute a modified version of +%% this file under its original name. +\def\fileversion{4.2} +\def\filedate{94/11/11} +\def\docdate {94/11/06} +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{helvetica}[\filedate\space\fileversion\space +Helvetica PSNFSS2e package] +\renewcommand{\sfdefault}{phv} +\renewcommand{\rmdefault}{phv} +\renewcommand{\ttdefault}{pcr} +\endinput +%% +%% End of file `helvetica.sty'. diff --git a/users/glittershark/resume/moderncv.cls b/users/glittershark/resume/moderncv.cls new file mode 100644 index 000000000000..a40f80733736 --- /dev/null +++ b/users/glittershark/resume/moderncv.cls @@ -0,0 +1,585 @@ +%% start of file `moderncv.cls'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesClass{moderncv}[2013/02/09 v1.3.0 modern curriculum vitae and letter document class] + + +%------------------------------------------------------------------------------- +% class options +% +% (need to be done before the external package loading, for example because +% we need \paperwidth, \paperheight and \@ptsize to be defined before loading +% geometry and fancyhdr) +%------------------------------------------------------------------------------- +% paper size option +\DeclareOption{a4paper}{ + \setlength\paperheight{297mm} + \setlength\paperwidth{210mm}} +\DeclareOption{a5paper}{ + \setlength\paperheight{210mm} + \setlength\paperwidth{148mm}} +\DeclareOption{b5paper}{ + \setlength\paperheight{250mm} + \setlength\paperwidth{176mm}} +\DeclareOption{letterpaper}{ + \setlength\paperheight{11in} + \setlength\paperwidth{8.5in}} +\DeclareOption{legalpaper}{ + \setlength\paperheight{14in} + \setlength\paperwidth{8.5in}} +\DeclareOption{executivepaper}{ + \setlength\paperheight{10.5in} + \setlength\paperwidth{7.25in}} +\DeclareOption{landscape}{ + \setlength\@tempdima{\paperheight} + \setlength\paperheight{\paperwidth} + \setlength\paperwidth{\@tempdima}} + +% font size options +\newcommand\@ptsize{} +\DeclareOption{10pt}{\renewcommand\@ptsize{0}} +\DeclareOption{11pt}{\renewcommand\@ptsize{1}} +\DeclareOption{12pt}{\renewcommand\@ptsize{2}} + +% font type options +\DeclareOption{sans}{\AtBeginDocument{\renewcommand{\familydefault}{\sfdefault}}} +\DeclareOption{roman}{\AtBeginDocument{\renewcommand{\familydefault}{\rmdefault}}} + +% draft/final option +\DeclareOption{draft}{\setlength\overfullrule{5pt}} +\DeclareOption{final}{\setlength\overfullrule{0pt}} + +% execute default options +\ExecuteOptions{a4paper,11pt,final} + +% process given options +\ProcessOptions\relax +\input{size1\@ptsize.clo} + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +% \AtEndPreamble hook (loading etoolbox instead of defining the macro, as to avoid incompatibilities with etoolbox (and packages relying on it) defining the macro too) +\RequirePackage{etoolbox} +%\let\@endpreamblehook\@empty +%\def\AtEndPreamble{\g@addto@macro\@endpreamblehook} +%\let\document@original\document +%\def\document{\endgroup\@endpreamblehook\begingroup\document@original} + +% if... then... else... constructs +\RequirePackage{ifthen} +% TODO: move to xifthen and \isempty{<arg>} instead of \equal{<arg>}{} + +% color +\RequirePackage{xcolor} + +% font loading +%\RequirePackage{ifxetex,ifluatex} +%\newif\ifxetexorluatex +%\ifxetex +% \xetexorluatextrue +%\else +% \ifluatex +% \xetexorluatextrue +% \else +% \xetexorluatexfalse +% \fi +%\fi +% automatic loading of latin modern fonts +%\ifxetexorluatex +% \RequirePackage{fontspec} +% \defaultfontfeatures{Ligatures=TeX} +% \RequirePackage{unicode-math} +% \setmainfont{Latin Modern} +% \setsansfont{Latin Modern Sans} +% \setmathfont{Latin Modern Math} +%\else + \RequirePackage[T1]{fontenc} + \IfFileExists{lmodern.sty}% + {\RequirePackage{lmodern}}% + {} +%\fi + +% hyper links (hyperref is loaded at the end of the preamble to pass options required by loaded packages such as CJK) +\newcommand*\pdfpagemode{UseNone}% do not show thumbnails or bookmarks on opening (on supporting browsers); set \pdfpagemode to "UseOutlines" to show bookmarks +\RequirePackage{url} +\urlstyle{tt} +\AtEndPreamble{ + \pagenumbering{arabic}% has to be issued before loading hyperref, as to set \thepage and hence to avoid hyperref issuing a warning and setting pdfpagelabels=false + \RequirePackage[unicode]{hyperref}% unicode is required for unicode pdf metadata + \hypersetup{ + breaklinks, + baseurl = http://, + pdfborder = 0 0 0, + pdfpagemode = \pdfpagemode, + pdfstartpage = 1, + pdfcreator = {\LaTeX{} with 'moderncv' package}, +% pdfproducer = {\LaTeX{}},% will/should be set automatically to the correct TeX engine used + bookmarksopen = true, + bookmarksdepth= 2,% to show sections and subsections + pdfauthor = {\@firstname{}~\@lastname{}}, + pdftitle = {\@firstname{}~\@lastname{} -- \@title{}}, + pdfsubject = {Resum\'{e} of \@firstname{}~\@lastname{}}, + pdfkeywords = {\@firstname{}~\@lastname{}, curriculum vit\ae{}, resum\'{e}}}} + +% graphics +\RequirePackage{graphicx} + +% headers and footers +\RequirePackage{fancyhdr} +\fancypagestyle{plain}{ + \renewcommand{\headrulewidth}{0pt} + \renewcommand{\footrulewidth}{0pt} + \fancyhf{}} +% page numbers in footer if more than 1 page +\newif\if@displaypagenumbers\@displaypagenumberstrue +\newcommand*{\nopagenumbers}{\@displaypagenumbersfalse} +\AtEndPreamble{% + \AtBeginDocument{% + \if@displaypagenumbers% + \@ifundefined{r@lastpage}{}{% + \ifthenelse{\pageref{lastpage}>1}{% + \newlength{\pagenumberwidth}% + \settowidth{\pagenumberwidth}{\color{color2}\addressfont\itshape\strut\thepage/\pageref{lastpage}}% + \fancypagestyle{plain}{% + \fancyfoot[r]{\parbox[b]{\pagenumberwidth}{\color{color2}\pagenumberfont\strut\thepage/\pageref{lastpage}}}}% the parbox is required to ensure alignment with a possible center footer (e.g., as in the casual style) + \pagestyle{plain}}{}}% + \AtEndDocument{\label{lastpage}}\else\fi}} +\pagestyle{plain} + +% reduced list spacing +% package providing hooks into lists +% originally developped by Jakob Schiøtz (see http://dcwww.camd.dtu.dk/~schiotz/comp/LatexTips/tweaklist.sty) +% modified and distributed with moderncv(not available otherwise on ctan) +\RequirePackage{tweaklist} +\renewcommand*{\itemhook}{% + \@minipagetrue% removes spacing before lists as they use \addvspace, which doesn't add vertical space inside minipages + \@noparlisttrue% removes spacing at end of lists, caused by \par + \setlength{\topsep}{0pt}% normally not required thanks to \@minipagetrue + \setlength{\partopsep}{0pt}% normally not required thanks to \@minipagetrue + \setlength{\parsep}{0pt}% not required when \itemsep and \parskip are set to 0pt (?) + \setlength{\parskip}{0pt}% + \setlength{\itemsep}{0pt}} +\renewcommand*{\enumhook}{\itemhook{}} +\renewcommand*{\deschook}{\itemhook{}} + +% lengths calculations +\RequirePackage{calc} + +% advanced command arguments (LaTeX 3) +\RequirePackage{xparse} +% TODO (?): replace all \newcommand by \NewDocumentCommand + +% micro-typography (e.g., character protrusion, font expansion, hyphenatable letterspacing) +\RequirePackage{microtype} + +% compatibility package with older versions of moderncv +\RequirePackageWithOptions{moderncvcompatibility} + + +%------------------------------------------------------------------------------- +% class definition +%------------------------------------------------------------------------------- +% minimal base settings +\setlength\lineskip{1\p@} +\setlength\normallineskip{1\p@} +\renewcommand\baselinestretch{} +\setlength{\parindent}{0\p@} +\setlength{\parskip}{0\p@} +\setlength\columnsep{10\p@} +\setlength\columnseprule{0\p@} +\setlength\fboxsep{3\p@} +\setlength\fboxrule{.4\p@} +\setlength\arrayrulewidth{.4\p@} +\setlength\doublerulesep{2\p@} + +% not set on purpose +%\setlength\arraycolsep{5\p@} +%\setlength\tabcolsep{6\p@} +%\setlength\tabbingsep{\labelsep} + +\raggedbottom +\onecolumn + + +%------------------------------------------------------------------------------- +% overall design commands definitions +%------------------------------------------------------------------------------- +% elements +% defines one's name +% usage: \name{<firstname>}{<lastname>} +\newcommand*{\name}[2]{\def\@firstname{#1}\def\@lastname{#2}} +% defines one's title (optional) +% usage: \title{<title>} +\renewcommand*{\title}[1]{\def\@title{#1}} +% defines one's address (optional) +% usage: \address{<street>}{<city>}{<country>} +% where the <city> and <country> arguments can be omitted or provided empty +\NewDocumentCommand{\address}{mG{}G{}}{\def\@addressstreet{#1}\def\@addresscity{#2}\def\@addresscountry{#3}} +% adds a mobile/fixed/fax number to one's personal information (optional) +% usage: \phone[<optional type>]{<number>} +% where <optional type> should be either "mobile", "fixed" or "fax +\RequirePackage{collection} +\collectionnew{phones} +\newcommand*{\phone}[2][fixed]{\collectionadd[#1]{phones}{#2}} +\newcommand*{\email}[1]{\def\@email{#1}} +% defines one's home page (optional) +% usage: \homepage{<url>} +\newcommand*{\homepage}[1]{\def\@homepage{#1}} +% defines one's github (optional) +% usage: \homepage{<url>} +\newcommand*{\github}[1]{\def\@github{#1}} +% defines additional personal information (optional) +% usage: \extrainfo{<text>} +\newcommand*{\extrainfo}[1]{\def\@extrainfo{#1}} + +% colors +\definecolor{color0}{rgb}{0,0,0}% main default color, normally left to black +\definecolor{color1}{rgb}{0,0,0}% primary theme color +\definecolor{color2}{rgb}{0,0,0}% secondary theme color +\definecolor{color3}{rgb}{0,0,0}% tertiary theme color + +% symbols +% itemize labels (the struts were added to correct inter-item spacing (works for single line items, until a solution is found for multi-line ones...) +\newcommand*{\labelitemi}{\strut\textcolor{color1}{\large\rmfamily\textbullet}}% the \rmfamily is required to force Latin Modern fonts when using sans serif, as OMS/lmss/m/n is not defined and gets substituted by OMS/cmsy/m/n +\newcommand*{\labelitemii}{\strut\textcolor{color1}{\large\bfseries-}} +\newcommand*{\labelitemiii}{\strut\textcolor{color1}{\rmfamily\textperiodcentered}}% alternative: \textasteriskcentered; the \rmfamily is required to force Latin Modern fonts when using sans serif, as OMS/lmss/m/n is not defined and gets substituted by OMS/cmsy/m/n +\newcommand*{\labelitemiv}{\labelitemiii} +% enumerate labels +\renewcommand{\theenumi}{\@arabic\c@enumi} +\renewcommand{\theenumii}{\@alph\c@enumii} +\renewcommand{\theenumiii}{\@roman\c@enumiii} +\renewcommand{\theenumiv}{\@Alph\c@enumiv} +% other symbols +\newcommand*{\listitemsymbol}{\labelitemi~} +\newcommand*{\addresssymbol}{} +\newcommand*{\mobilephonesymbol}{} +\newcommand*{\fixedphonesymbol}{} +\newcommand*{\faxphonesymbol}{} +\newcommand*{\emailsymbol}{} +\newcommand*{\homepagesymbol}{} + +% fonts +\AtBeginDocument{\normalfont\color{color0}} + +% strings for internationalisation +\newcommand*{\refname}{Publications} +\newcommand*{\enclname}{Enclosure} + +% makes the footer (normally used both for the resume and the letter) +% usage: \makefooter +\newcommand*{\makefooter}{}% + +% loads a style variant +% usage: \moderncvstyle{<style variant name>} +\newcommand*{\moderncvstyle}[1]{ + \RequirePackage{moderncvstyle#1}} + +% loads a color scheme +% usage: \moderncvcolor{<color scheme name>} +\newcommand*{\moderncvcolor}[1]{ + \RequirePackage{moderncvcolor#1}} + +% loads an icons set +% usage: \moderncvicons{<icon set name>} +\newcommand*{\moderncvicons}[1]{ + \RequirePackage{moderncvicons#1}} + +% recomputes all automatic lengths +\newcommand*{\recomputelengths}{\recomputecvlengths} +\AtBeginDocument{\recomputelengths{}} + +% creates a length if not yet defined +\newcommand*{\@initializelength}[1]{% + \ifdefined#1\else\newlength{#1}\fi} + + +%------------------------------------------------------------------------------- +% resume design commands definitions +%------------------------------------------------------------------------------- +% elements +% defines one's picture (optional) +% usage: photo[<picture width>][<picture frame thickness>]{<picture filename>} +\NewDocumentCommand{\photo}{O{64pt}O{0.4pt}m}{\def\@photowidth{#1}\def\@photoframewidth{#2}\def\@photo{#3}} +\newcommand*{\quote}[1]{\def\@quote{#1}} + +% fonts +\newcommand*{\namefont}{} +\newcommand*{\titlefont}{} +\newcommand*{\addressfont}{} +\newcommand*{\quotefont}{} +\newcommand*{\sectionfont}{} +\newcommand*{\subsectionfont}{} +\newcommand*{\hintfont}{} +\newcommand*{\pagenumberfont}{\addressfont\itshape} + +% styles +\newcommand*{\namestyle}[1]{{\namefont#1}} +\newcommand*{\titlestyle}[1]{{\titlefont#1}} +\newcommand*{\addressstyle}[1]{{\addressfont#1}} +\newcommand*{\quotestyle}[1]{{\quotefont#1}} +\newcommand*{\sectionstyle}[1]{{\sectionfont#1}} +\newcommand*{\subsectionstyle}[1]{{\subsectionfont#1}} +\newcommand*{\hintstyle}[1]{{\hintfont#1}} +\newcommand*{\pagenumberstyle}[1]{{\pagenumberfont#1}} + +% recompute all resume lengths +\newcommand*{\recomputecvlengths}{} + +% internal maketitle command to issue a new line only when required +\newif\if@firstdetailselement\@firstdetailselementtrue +\newcommand*{\makenewline}{ + \if@firstdetailselement% + \strut% to ensure baseline alignment, e.g. with when put in the margin vs sections that also contains a \strut + \else% + \\\fi% + \@firstdetailselementfalse} + +% makes the resume title +% usage: \makecvtitle +\newcommand*{\makecvtitle}{} + +% makes the resume footer +% usage: \makecvfooter +\newcommand*{\makecvfooter}{\makefooter} + +% makes a resume section +% usage: \section{<title>} +% identical starred and non-starred variants should be defined for compatibility with other packages (e.g. with natbib, that uses \section*{} for the bibliography header) +\NewDocumentCommand{\section}{sm}{} + +% makes a resume subsection +% usage: \subsection{title} +\NewDocumentCommand{\subsection}{sm}{} + +% makes a resume line with a header and a corresponding text +% usage: \cvitem[spacing]{header}{text} +\newcommand*{\cvitem}[3][.25em]{} + +% makes a resume line 2 headers and their corresponding text +% usage: \cvdoubleitem[spacing]{header1}{text1}{header2}{text2} +\newcommand*{\cvdoubleitem}[5][.25em]{} + +% makes a resume line with a list item +% usage: \cvlistitem[label]{item} +\newcommand*{\cvlistitem}[2][\listitemsymbol]{} + +% makes a resume line with 2 list items +% usage: \cvlistdoubleitem[label]{item1}{item2} +\newcommand*{\cvlistdoubleitem}[3][\listitemsymbol]{} + +% makes a typical resume job / education entry +% usage: \cventry[spacing]{years}{degree/job title}{institution/employer}{localization}{optionnal: grade/...}{optional: comment/job description} +\newcommand*{\cventry}[7][.25em]{} + +% makes a resume entry with a proficiency comment +% usage: \cvitemwithcomment[spacing]{header}{text}{comment} +\newcommand*{\cvitemwithcomment}[4][.25em]{} + +% makes a generic hyperlink +% usage: \link[optional text]{link} +\newcommand*{\link}[2][]{% + \ifthenelse{\equal{#1}{}}% + {\href{#2}{#2}}% + {\href{#2}{#1}}} + +% makes a http hyperlink +% usage: \httplink[optional text]{link} +\newcommand*{\httplink}[2][]{% + \ifthenelse{\equal{#1}{}}% + {\href{http://#2}{#2}}% + {\href{http://#2}{#1}}} + +% makes an email hyperlink +% usage: \emaillink[optional text]{link} +\newcommand*{\emaillink}[2][]{% + \ifthenelse{\equal{#1}{}}% + {\href{mailto:#2}{#2}}% + {\href{mailto:#2}{#1}}} + +% cvcolumns environment, where every column is created through \cvcolumn +% usage: \begin{cvcolumns} +% \cvcolumn[width]{head}{content} +% \cvcolumn[width]{head}{content} +% ... +% \end{cvcolumns} +% where "width" is the width as a fraction of the line length (between 0 and 1), "head" is the column header and "content" its content +\newcounter{cvcolumnscounter}% counter for the number of columns +\newcounter{cvcolumnsautowidthcounter}% counter for the number of columns with no column width provided, and which will then be equally distributed +\newcounter{tmpiteratorcounter}% counter for any temporary purpose (e.g., iterating loops) +\newlength{\cvcolumnsdummywidth}\setlength{\cvcolumnsdummywidth}{1000pt}% dummy width for total width, in order to enable arithmetics (TeX has no float variables, only integer counters or lengths) +\newlength{\cvcolumnswidth}% total width available for head / content +\newlength{\cvcolumnsautowidth}% total width of columns with no explicit width provided +\newlength{\cvcolumnautowidth}% width of one of the columns with no explicit width provided (based on equal distribution of remaining space) +\newif\if@cvcolumns@head@empty% whether or not at least one of the columns has a header +\newenvironment*{cvcolumns}% + {% at environment opening: reset counters, lengths and ifs + \setcounter{cvcolumnscounter}{0}% + \setcounter{cvcolumnsautowidthcounter}{0}% + \setlength{\cvcolumnsautowidth}{\cvcolumnsdummywidth}% + \setlength{\cvcolumnautowidth}{0pt}% + \@cvcolumns@head@emptytrue}% + {% at environment closing: typeset environment + % compute the width of each cvcolumn, considering a spacing of \separatorcolumnwidth and the columns with set width + \ifnum\thecvcolumnscounter>0% + \setlength{\cvcolumnswidth}{\maincolumnwidth-\value{cvcolumnscounter}\separatorcolumnwidth+\separatorcolumnwidth}% + \setlength{\cvcolumnautowidth}{\cvcolumnswidth*\ratio{\cvcolumnsautowidth}{\cvcolumnsdummywidth}/\value{cvcolumnsautowidthcounter}}\fi% + % pre-aggregate the tabular definition, heading and content (required before creating the tabular, as the tabular environment doesn't like loops --- probably because "&" generates a \endgroup) + % - the tabular definition is the aggregation of the different "\cvcolumn<i>@def" (by default "p{\cvcolumnautowidth}"), separated by "@{\hspace*{\separatorcolumnwidth}}" + % - the tabular heading is the aggregation of the different "\cvcolumn<i>@head", separated by "&" + % - the tabular content is the aggregation of the different "\cvcolumn<i>@content", separated by "&" + % to aggregate the different elements, \protected@edef or \g@addto@macro is required to avoid that \cvcolumns@def, -@head and -@content get expanded in subsequent redefinitions, which would cause errors due to the expansions of \hspace, of \subsectionstyle and possibly of user content/argument such as font commands + \def\cvcolumns@def{}% + \def\cvcolumns@head{}% + \def\cvcolumns@content{}% + \setcounter{tmpiteratorcounter}{0}% + % loop based on \g@addto@macro + \loop\ifnum\thetmpiteratorcounter<\thecvcolumnscounter% + \ifnum\thetmpiteratorcounter=0\else% + \g@addto@macro\cvcolumns@def{@{\hspace*{\separatorcolumnwidth}}}% + \g@addto@macro\cvcolumns@head{&}% + \g@addto@macro\cvcolumns@content{&}\fi% + \expandafter\g@addto@macro\expandafter\cvcolumns@def\expandafter{\csname cvcolumn\roman{tmpiteratorcounter}@def\endcsname}% + \expandafter\g@addto@macro\expandafter\cvcolumns@head\expandafter{\csname cvcolumn\roman{tmpiteratorcounter}@head\endcsname}% + \expandafter\g@addto@macro\expandafter\cvcolumns@content\expandafter{\csname cvcolumn\roman{tmpiteratorcounter}@content\endcsname}% + \stepcounter{tmpiteratorcounter}% + \repeat% +% % same loop based on \protected@edef +% \loop\ifnum\thetmpiteratorcounter<\thecvcolumnscounter% +% \ifnum\thetmpiteratorcounter=0\else% +% \protected@edef\cvcolumns@def{\cvcolumns@def @{\hspace*{\separatorcolumnwidth}}}% +% \protected@edef\cvcolumns@head{\cvcolumns@head &}% +% \protected@edef\cvcolumns@content{\cvcolumns@content &}\fi% +% \expandafter\protected@edef\expandafter\cvcolumns@def\expandafter{\expandafter\cvcolumns@def\expandafter\protect\csname cvcolumn\roman{tmpiteratorcounter}@def\endcsname}% +% \expandafter\protected@edef\expandafter\cvcolumns@head\expandafter{\expandafter\cvcolumns@head\expandafter\protect\csname cvcolumn\roman{tmpiteratorcounter}@head\endcsname}% +% \expandafter\protected@edef\expandafter\cvcolumns@content\expandafter{\expandafter\cvcolumns@content\expandafter\protect\csname cvcolumn\roman{tmpiteratorcounter}@content\endcsname}% +% \stepcounter{tmpiteratorcounter}% +% \repeat% + % create the tabular + \cvitem{}{% + \begin{tabular}{\cvcolumns@def}% + \if@cvcolumns@head@empty\else% + \cvcolumns@head\\[-.8em]% + {\color{color1}\rule{\maincolumnwidth}{.25pt}}\\\fi% + \cvcolumns@content% + \end{tabular}}} + +% cvcolumn command, to create a column inside a cvcolumns environment +% usage: \cvcolumn[width]{head}{content} +% where "width" is the width as a fraction of the line length (between 0 and 1), "head" is the column header and "content" its content ("head" and "content" can contain "\\", "\newline" or any other paragraph command such as "itemize") +\newcommand*{\cvcolumn}[3][\cvcolumnautowidth]{% +% \def\cvcolumn@width{}% + \ifthenelse{\equal{#1}{\cvcolumnautowidth}}% + {% if no width fraction is provided, count this column as auto-adjusted and set its width to \cvcolumnsautowidth + \stepcounter{cvcolumnsautowidthcounter}% + \expandafter\expandafter\expandafter\def\expandafter\csname cvcolumn\roman{cvcolumnscounter}@def\endcsname{p{\cvcolumnautowidth}}% + \expandafter\expandafter\expandafter\def\expandafter\csname cvcolumn\roman{cvcolumnscounter}@head\endcsname{\protect\parbox[b]{\cvcolumnautowidth}{\protect\subsectionstyle{#2}}}}% + {% if a width is provided, set the width of the column to it and decrease the available space for auto-adjusted columns + \addtolength{\cvcolumnsautowidth}{-#1\cvcolumnsdummywidth}% + \expandafter\expandafter\expandafter\def\expandafter\csname cvcolumn\roman{cvcolumnscounter}@def\endcsname{p{#1\cvcolumnswidth}}% + \expandafter\expandafter\expandafter\def\expandafter\csname cvcolumn\roman{cvcolumnscounter}@head\endcsname{\protect\parbox[b]{#1\cvcolumnswidth}{\protect\subsectionstyle{#2}}}}% + \ifthenelse{\equal{#2}{}}{}{\@cvcolumns@head@emptyfalse}% + \expandafter\expandafter\expandafter\def\expandafter\csname cvcolumn\roman{cvcolumnscounter}@content\endcsname{\protect\cvcolumncell{#3}}% + \stepcounter{cvcolumnscounter}} + +% internal cvcolumncell command, that enables a cvcolumn cell to contain paragraph commands (lists, newlines, etc) +\newcommand*{\cvcolumncell}[1]{{% put cell inside a group, so that command redefinitions are only local + % roughly restore \\ to its regular definition (outside of tabular) + \renewcommand*{\\}{\newline}% + % enclose the contents of the cell inside a vertical box, to allow paragraph commands + \protect\vtop{#1}}} + +% thebibliography environment, for use with BibTeX and possibly multibib +\newlength{\bibindent} +\setlength{\bibindent}{1.5em} +% bibliography item label +\newcommand*{\bibliographyitemlabel}{}% use \@biblabel{\arabic{enumiv}} for BibTeX labels +%\newif\if@multibibfirstbib\@multibibfirstbibfalse +% bibliography head (section, etc}, depending on whether multibib is used +\newcommand*{\bibliographyhead}[1]{\section{#1}} +\AtEndPreamble{\@ifpackageloaded{multibib}{\renewcommand*{\bibliographyhead}[1]{\subsection{#1}}}{}} +% thebibliography environment definition +\newenvironment{thebibliography}[1]{}{} +\newcommand*{\newblock}{\hskip .11em\@plus.33em\@minus.07em} +\let\@openbib@code\@empty + +% itemize, enumerate and description environment +\setlength{\leftmargini} {1em} +\leftmargin\leftmargini +\setlength{\leftmarginii} {\leftmargini} +\setlength{\leftmarginiii} {\leftmargini} +\setlength{\leftmarginiv} {\leftmargini} +\setlength{\leftmarginv} {\leftmargini} +\setlength{\leftmarginvi} {\leftmargini} +\setlength{\labelsep} {.5em}% this is the distance between the label and the body, but it pushes the label to the left rather than pushing the body to the right (to do the latter, modify \leftmargin(i) +\setlength{\labelwidth} {\leftmargini}% unfortunately, \labelwidth is not defined by item level (i.e. no \labeliwidth, \labeliiwidth, etc) +\addtolength{\labelwidth} {-\labelsep} +\@beginparpenalty -\@lowpenalty +\@endparpenalty -\@lowpenalty +\@itempenalty -\@lowpenalty +\newcommand\labelenumi{\theenumi.} +\newcommand\labelenumii{(\theenumii)} +\newcommand\labelenumiii{\theenumiii.} +\newcommand\labelenumiv{\theenumiv.} +\renewcommand\p@enumii{\theenumi} +\renewcommand\p@enumiii{\p@enumii(\theenumii)} +\renewcommand\p@enumiv{\p@enumiii\theenumiii} +% description label +\newcommand*\descriptionlabel[1]{\hspace\labelsep\normalfont\bfseries#1} + +% classical \today definition +\def\today{\ifcase\month\or + January\or February\or March\or April\or May\or June\or + July\or August\or September\or October\or November\or December\fi + \space\number\day, \number\year} + +%\newcommand{\widthofautobox}[1]{% +% \widthof{\begin{tabular}{@{}l@{}}#1\end{tabular}}} + +%\newcommand{\autobox}[2][b]{% +% \parbox[#1]{\widthofautobox{#2}}{#2}} + + +%------------------------------------------------------------------------------- +% letter design commands definitions +%------------------------------------------------------------------------------- +% elements +\newcommand*{\recipient}[2]{\def\@recipientname{#1}\def\@recipientaddress{#2}} +\renewcommand*{\date}[1]{\def\@date{#1}}\date{\today} +\newcommand*{\opening}[1]{\def\@opening{#1}} +\newcommand*{\closing}[1]{\def\@closing{#1}} +\newcommand*{\enclosure}[2][]{% + % if an optional argument is provided, use it to redefine \enclname + \ifthenelse{\equal{#1}{}}{}{\renewcommand*{\enclname}{#1}}% + \def\@enclosure{#2}} + +% recompute all letter lengths +\newcommand*{\recomputeletterlengths}{} + +% makes the letter title +% usage: \makelettertitle +\newcommand*{\makelettertitle}{} + +% makes the letter footer +% usage: \makeletterfooter +\newcommand*{\makeletterfooter}{\makefooter} + +% makes the letter closing +% usage: \makeletterclosing +\newcommand*{\makeletterclosing}{} + + +\endinput + + +%% end of file `moderncv.cls'. diff --git a/users/glittershark/resume/moderncvcolorblack.sty b/users/glittershark/resume/moderncvcolorblack.sty new file mode 100644 index 000000000000..3a6e1477f322 --- /dev/null +++ b/users/glittershark/resume/moderncvcolorblack.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorblack.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorblack}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: black] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0,0,0}% black +\definecolor{color2}{rgb}{0,0,0}% black + + +\endinput + + +%% end of file `moderncvcolorblack.sty'. diff --git a/users/glittershark/resume/moderncvcolorblue.sty b/users/glittershark/resume/moderncvcolorblue.sty new file mode 100644 index 000000000000..7b949c704acd --- /dev/null +++ b/users/glittershark/resume/moderncvcolorblue.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorblue.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorblue}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: blue] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.22,0.45,0.70}% light blue +\definecolor{color2}{rgb}{0.45,0.45,0.45}% dark grey + + +\endinput + + +%% end of file `moderncvcolorblue.sty'. diff --git a/users/glittershark/resume/moderncvcolorgreen.sty b/users/glittershark/resume/moderncvcolorgreen.sty new file mode 100644 index 000000000000..4de7f848a04e --- /dev/null +++ b/users/glittershark/resume/moderncvcolorgreen.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorgreen.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorgreen}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: green] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.35,0.70,0.30}% green +\definecolor{color2}{rgb}{0.45,0.45,0.45}% dark grey + + +\endinput + + +%% end of file `moderncvcolorgreen.sty'. diff --git a/users/glittershark/resume/moderncvcolorgrey.sty b/users/glittershark/resume/moderncvcolorgrey.sty new file mode 100644 index 000000000000..9018726a2384 --- /dev/null +++ b/users/glittershark/resume/moderncvcolorgrey.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorgrey.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorgrey}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: grey] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.55,0.55,0.55}% dark grey +\definecolor{color2}{rgb}{0.55,0.55,0.55}% dark grey + + +\endinput + + +%% end of file `moderncvcolorgrey.sty'. diff --git a/users/glittershark/resume/moderncvcolororange.sty b/users/glittershark/resume/moderncvcolororange.sty new file mode 100644 index 000000000000..134ae2401133 --- /dev/null +++ b/users/glittershark/resume/moderncvcolororange.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolororange.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolororange}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: orange] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.95,0.55,0.15}% orange +\definecolor{color2}{rgb}{0.45,0.45,0.45}% dark grey + + +\endinput + + +%% end of file `moderncvcolororange.sty'. diff --git a/users/glittershark/resume/moderncvcolorpurple.sty b/users/glittershark/resume/moderncvcolorpurple.sty new file mode 100644 index 000000000000..d3dc5345b080 --- /dev/null +++ b/users/glittershark/resume/moderncvcolorpurple.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorpurple.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorpurple}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: purple] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.50,0.33,0.80}% purple +\definecolor{color2}{rgb}{0.45,0.45,0.45}% dark grey + + +\endinput + + +%% end of file `moderncvcolorpurple.sty'. diff --git a/users/glittershark/resume/moderncvcolorred.sty b/users/glittershark/resume/moderncvcolorred.sty new file mode 100644 index 000000000000..681181997d38 --- /dev/null +++ b/users/glittershark/resume/moderncvcolorred.sty @@ -0,0 +1,27 @@ +%% start of file `moderncvcolorred.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcolorred}[2013/02/09 v1.3.0 modern curriculum vitae and letter color scheme: red] + + +%------------------------------------------------------------------------------- +% color scheme definition +%------------------------------------------------------------------------------- +\definecolor{color0}{rgb}{0,0,0}% black +\definecolor{color1}{rgb}{0.95,0.20,0.20}% red +\definecolor{color2}{rgb}{0.45,0.45,0.45}% dark grey + + +\endinput + + +%% end of file `moderncvcolorred.sty'. diff --git a/users/glittershark/resume/moderncvcompatibility.sty b/users/glittershark/resume/moderncvcompatibility.sty new file mode 100644 index 000000000000..1fc53f2180e1 --- /dev/null +++ b/users/glittershark/resume/moderncvcompatibility.sty @@ -0,0 +1,104 @@ +%% start of file `moderncvcompatibility.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvcompatibility}[2013/02/09 v1.3.0 modern curriculum vitae and letter compatibility patches] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- + + +%------------------------------------------------------------------------------- +% package options +%------------------------------------------------------------------------------- +% old casual option (version 0.1) +%\DeclareOption{casual}{\input{moderncvstylecasual.sty}} + +% old classic option (version 0.1) +%\DeclareOption{classic}{\input{moderncvstyleclassic.sty}} + +\DeclareOption*{} + +% process given options +\ProcessOptions\relax + +%------------------------------------------------------------------------------- +% definitions +%------------------------------------------------------------------------------- +% compatibility with version 0.1 +\newcommand*{\cvresume}[2]{\cvlistdoubleitem{#1}{#2}} + +% compatibility with versions <= 0.2 +% section, cvline, ... with width argument... +%\newcommand*{\section}[2][0.825]{% +% \closesection{}% +% \@sectionopentrue% +% \addcontentsline{toc}{part}{#2} +% \begin{longtable}[t]{@{}r@{\hspace{.025\textwidth}}@{}p{#1\textwidth}@{}}% +%% \colorrule{.15\textwidth}&\mbox{\color{sectiontitlecolor}\sectionfont#2}\\[1ex]}% +% {\color{sectionrectanglecolor}\rule{0.15\textwidth}{1ex}}&\mbox{\color{sectiontitlecolor}\sectionfont#2}\\[1ex]}% +%\newcommand*{\cvline}[3][.825]{% +% \begin{minipage}[t]{\hintscolumnwidth}\raggedleft\small\sffamily#2\end{minipage}&\begin{minipage}[t]{\maincolumnwidth}#3\end{minipage}\\} +%\newcommand*{\cvitem}[3][.825]{% +% \cvline[#1]{#2}{#3\vspace*{.75em}}} % the \vspace*{} inside the cvline environment is a hack... (should conceptually be outside the environment) + +% compatibility with versions <= 0.5 +%\newcommand*{\cvitem}[2]{\cvline{#1}{#2}} +%\newcommand*{\moderncvstyle}[1]{\moderncvtheme{#1}} + +% compatibility with versions <= 0.7 +\newcommand*{\closesection}{} +\newcommand*{\emptysection}{} +\newcommand*{\sethintscolumnlength}[1]{% + \setlength{\hintscolumnwidth}{#1}% + \recomputelengths} +\newcommand*{\sethintscolumntowidth}[1]{% + \settowidth{\hintscolumnwidth}{#1}% + \recomputelengths} + +% compatibility with versions <= 0.15 +\newcommand*{\cvline}[2]{\cvitem{#1}{#2}} +\newcommand*{\cvlanguage}[3]{\cvitemwithcomment{#1}{#2}{#3}} +\newcommand*{\cvcomputer}[4]{\cvdoubleitem{#1}{\small#2}{#3}{\small#4}} +\newcommand*{\moderncvtheme}[2][blue]{% + \moderncvcolor{#1}% + \moderncvstyle{#2}} + +% compatibility with versions <= 0.19 +\newcommand*{\maketitle}{\makecvtitle}% +\title{}% to avoid LaTeX complaining that \maketitle is a called without first a call to \title +\newcommand*{\maketitlenamewidth}{\makecvtitlenamewidth} + +% compatibility with versions <= 1.3.0 +\newcommand*{\firstname}[1]{\def\@firstname{#1}} +\newcommand*{\lastname}[1]{\def\@lastname{#1}} +\newcommand*{\givenname}[1]{\def\@firstname{#1}} +\newcommand*{\familyname}[1]{\def\@lastname{#1}} +\def\@familyname{\@lastname} + +% compatibility with versions <= 1.4.0 +\newcommand*{\mobile}[1]{\collectionadd[mobile]{phones}{#1}} +%\newcommand*{\phone}[1]{\collectionadd[fixed]{phones}{#1}}% implicit, as \phone{...} defaults to \phone[fixed]{...} +\newcommand*{\fax}[1]{\collectionadd[fax]{phones}{#1}} +\newcommand*{\@mobile}{\collectionfindbykey{phones}{mobile}} +\newcommand*{\@phone}{\collectionfindbykey{phones}{fixed}} +\newcommand*{\@fax}{\collectionfindbykey{phones}{fax}} +\newcommand*{\phonesymbol}{\fixedphonesymbol} +\newcommand*{\mobilesymbol}{\mobilephonesymbol} +\newcommand*{\faxsymbol}{\faxphonesymbol} + + +\endinput + + +%% end of file `moderncvcompatibility.sty'. diff --git a/users/glittershark/resume/moderncviconsletters.sty b/users/glittershark/resume/moderncviconsletters.sty new file mode 100644 index 000000000000..0a4e2864be29 --- /dev/null +++ b/users/glittershark/resume/moderncviconsletters.sty @@ -0,0 +1,50 @@ +%% start of file `moderncviconsletters.sty'. +%% Copyright 2013-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncviconsmarvosym}[2013/02/09 v1.3.0 modern curriculum vitae and letter icons: letters] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +% MarVoSym font +%\RequirePackage{marvosym} +\newcommand*{\marvosymbol}[1]{} +%\ifxetexorluatex +% \renewcommand*{\marvosymbol}[1]{{\fontspec{MarVoSym}\char#1}} +%\else + \renewcommand*{\marvosymbol}[1]{{\fontfamily{mvs}\fontencoding{U}\fontseries{m}\fontshape{n}\selectfont\char#1}} +%\fi + + +%------------------------------------------------------------------------------- +% symbols definition +%------------------------------------------------------------------------------- +\renewcommand*{\labelitemi}{\strut\textcolor{color1}{\marvosymbol{123}}}% equivalent to \Neutral from marvosym package; alternative: \fontencoding{U}\fontfamily{ding}\selectfont\tiny\symbol{'102} +%\renewcommand*{\labelitemii}{\strut\textcolor{color1}{\large\bfseries-}}% no change from default in moderncv.cls +%\renewcommand*{\labelitemiii}{\strut\textcolor{color1}{\rmfamily\textperiodcentered}}% no change from default in moderncv.cls +%\renewcommand*{\labelitemiv}{\labelitemiii}% no change from default in moderncv.cls + +\renewcommand*{\addresssymbol}{} +\renewcommand*{\mobilephonesymbol}{\textbf{M}~} +\renewcommand*{\fixedphonesymbol}{\textbf{T}~} +\renewcommand*{\faxphonesymbol}{\textbf{F}~} +\renewcommand*{\emailsymbol}{\textbf{E}~} +\renewcommand*{\homepagesymbol}{} + +\renewcommand*{\listitemsymbol}{\labelitemi~} + + +\endinput + + +%% end of file `moderncviconsletters.sty'. diff --git a/users/glittershark/resume/moderncviconsmarvosym.sty b/users/glittershark/resume/moderncviconsmarvosym.sty new file mode 100644 index 000000000000..eb1b1ec727bb --- /dev/null +++ b/users/glittershark/resume/moderncviconsmarvosym.sty @@ -0,0 +1,48 @@ +%% start of file `moderncviconsmarvosym.sty'. +%% Copyright 2013-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncviconsmarvosym}[2013/02/09 v1.3.0 modern curriculum vitae and letter icons: marvosym] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +% MarVoSym font +%\RequirePackage{marvosym} +\newcommand*{\marvosymbol}[1]{} +%\ifxetexorluatex +% \renewcommand*{\marvosymbol}[1]{{\fontspec{MarVoSym}\char#1}} +%\else + \renewcommand*{\marvosymbol}[1]{{\fontfamily{mvs}\fontencoding{U}\fontseries{m}\fontshape{n}\selectfont\char#1}} +%\fi + + +%------------------------------------------------------------------------------- +% symbols definition +%------------------------------------------------------------------------------- +\renewcommand*{\labelitemi}{\strut\textcolor{color1}{\marvosymbol{123}}}% equivalent to \Neutral from marvosym package; alternative: \fontencoding{U}\fontfamily{ding}\selectfont\tiny\symbol{'102} +%\renewcommand*{\labelitemii}{\strut\textcolor{color1}{\large\bfseries-}}% no change from default in moderncv.cls +%\renewcommand*{\labelitemiii}{\strut\textcolor{color1}{\rmfamily\textperiodcentered}}% no change from default in moderncv.cls +%\renewcommand*{\labelitemiv}{\labelitemiii}% no change from default in moderncv.cls + +\renewcommand*{\addresssymbol}{} +\renewcommand*{\mobilephonesymbol}{\marvosymbol{72}~} +\renewcommand*{\fixedphonesymbol}{\marvosymbol{84}~} +\renewcommand*{\faxphonesymbol}{\marvosymbol{117}~} +\renewcommand*{\emailsymbol}{\marvosymbol{66}~} +\renewcommand*{\homepagesymbol}{{\Large\marvosymbol{205}}~} + + +\endinput + + +%% end of file `moderncviconsmarvosym.sty'. diff --git a/users/glittershark/resume/moderncvstylebanking.sty b/users/glittershark/resume/moderncvstylebanking.sty new file mode 100644 index 000000000000..fb0b70fdcd10 --- /dev/null +++ b/users/glittershark/resume/moderncvstylebanking.sty @@ -0,0 +1,287 @@ +%% start of file `moderncvstylebanking.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvstylebanking}[2013/02/09 v1.3.0 modern curriculum vitae and letter style scheme: banking] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- + + +%------------------------------------------------------------------------------- +% overall style definition +%------------------------------------------------------------------------------- +% fonts +%\ifxetexorluatex +% \setmainfont{Tex-Gyre Pagella} +% \setsansfont{Tex-Gyre Pagella} +% \setmathfont{Tex-Gyre Pagella} +% \setmathfont[range=\mathit,\mathsfit]{Tex-Gyre Pagella Italic} +% \setmathfont[range=\mathbfup,\mathbfsfup]{Tex-Gyre Pagella Bold} +% \setmathfont[range=\mathbfit,\mathbfsfit]{Tex-Gyre Pagella Bold Italic} +%\else + \IfFileExists{tgpagella.sty}% + {% + \RequirePackage{tgpagella}% + \renewcommand*{\familydefault}{\rmdefault}}% + {} +%\fi + +% symbols +\moderncvicons{marvosym} + +% commands +\newcommand*{\maketitlesymbol}{% + {~~~{\rmfamily\textbullet}~~~}}% the \rmfamily is required to force Latin Modern fonts when using sans serif, as OMS/lmss/m/n is not defined and gets substituted by OMS/cmsy/m/n +% internal command to add an element to the footer +% it collects the elements in a temporary box, and checks when to flush the box +\newsavebox{\maketitlebox}% +\newsavebox{\maketitletempbox}% +\newlength{\maketitlewidth}% +\newlength{\maketitleboxwidth}% +\newif\if@firstmaketitleelement\@firstmaketitleelementtrue% +% adds an element to the maketitle, separated by maketitlesymbol +% usage: \addtomaketitle[maketitlesymbol]{element} +\newcommand*{\addtomaketitle}[2][\maketitlesymbol]{% + \if@firstmaketitleelement% + \savebox{\maketitletempbox}{\usebox{\maketitlebox}#2}% + \else% + \savebox{\maketitletempbox}{\usebox{\maketitlebox}#1#2}\fi% + \settowidth{\maketitleboxwidth}{\usebox{\maketitletempbox}}% + \ifnum\maketitleboxwidth<\maketitlewidth% + \savebox{\maketitlebox}{\usebox{\maketitletempbox}}% + \@firstmaketitleelementfalse% + \else% + \flushmaketitle{}\\% + \savebox{\maketitlebox}{#2}% + \savebox{\maketitletempbox}{#2}% + \settowidth{\maketitleboxwidth}{\usebox{\maketitlebox}}% + \@firstmaketitleelementfalse\fi} +% internal command to flush the maketitle +\newcommand*{\flushmaketitle}{% + \strut\usebox{\maketitlebox}% + \savebox{\maketitlebox}{}% + \savebox{\maketitletempbox}{}% + \setlength{\maketitleboxwidth}{0pt}} +\renewcommand*{\maketitle}{% + \setlength{\maketitlewidth}{0.8\textwidth}% + \hfil% + \parbox{\maketitlewidth}{% + \centering% + % name and title + \namestyle{\@firstname~\@lastname}% + \ifthenelse{\equal{\@title}{}}{}{\titlestyle{~|~\@title}}\\% \isundefined doesn't work on \@title, as LaTeX itself defines \@title (before it possibly gets redefined by \title) + % detailed information + \addressfont\color{color2}% + \ifthenelse{\isundefined{\@addressstreet}}{}{\addtomaketitle{\addresssymbol\@addressstreet}% + \ifthenelse{\equal{\@addresscity}{}}{}{\addtomaketitle[~--~]{\@addresscity}}% if \addresstreet is defined, \addresscity and \addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\addtomaketitle[~--~]{\@addresscountry}}% + \flushmaketitle\@firstmaketitleelementtrue\\}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \addtomaketitle{\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}}% + \ifthenelse{\isundefined{\@email}}{}{\addtomaketitle{\emailsymbol\emaillink{\@email}}}% + \ifthenelse{\isundefined{\@homepage}}{}{\addtomaketitle{\homepagesymbol\httplink{\@homepage}}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\addtomaketitle{\@extrainfo}}% + \flushmaketitle}\\[2.5em]}% need to force a \par after this to avoid weird spacing bug at the first section if no blank line is left after \maketitle + + +%------------------------------------------------------------------------------- +% resume style definition +%------------------------------------------------------------------------------- +% fonts +\renewcommand*{\namefont}{\Huge\bfseries\upshape} +\renewcommand*{\titlefont}{\Huge\mdseries\upshape} +\renewcommand*{\addressfont}{\normalsize\mdseries\upshape} +\renewcommand*{\quotefont}{\large\slshape} +\renewcommand*{\sectionfont}{\Large\bfseries\upshape} +\renewcommand*{\subsectionfont}{\large\upshape\fontseries{sb}\selectfont} +\renewcommand*{\hintfont}{\bfseries} + +% styles +\renewcommand*{\namestyle}[1]{{\namefont\textcolor{color1}{#1}}} +\renewcommand*{\titlestyle}[1]{{\titlefont\textcolor{color2!85}{#1}}} +\renewcommand*{\addressstyle}[1]{{\addressfont\textcolor{color1}{#1}}} +\renewcommand*{\quotestyle}[1]{{\quotefont\textcolor{color1}{#1}}} +\renewcommand*{\sectionstyle}[1]{{\sectionfont\textcolor{color1}{#1}}} +\renewcommand*{\subsectionstyle}[1]{{\subsectionfont\textcolor{color1}{#1}}} +\renewcommand*{\hintstyle}[1]{{\hintfont\textcolor{color0}{#1}}} + +% lengths +\newlength{\quotewidth} +\newlength{\hintscolumnwidth} +\setlength{\hintscolumnwidth}{0.3\textwidth}% +\newlength{\separatorcolumnwidth} +\setlength{\separatorcolumnwidth}{0.025\textwidth}% +\newlength{\maincolumnwidth} +\newlength{\doubleitemcolumnwidth} +\newlength{\listitemsymbolwidth} +\settowidth{\listitemsymbolwidth}{\listitemsymbol} +\newlength{\listitemmaincolumnwidth} +\newlength{\listdoubleitemmaincolumnwidth} + +% commands +\renewcommand*{\recomputecvlengths}{% + \setlength{\quotewidth}{0.65\textwidth}% + % main lenghts + \setlength{\maincolumnwidth}{\textwidth}% + % listitem lengths + \setlength{\listitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth}% + % doubleitem lengths + \setlength{\doubleitemcolumnwidth}{\maincolumnwidth-\separatorcolumnwidth}% + \setlength{\doubleitemcolumnwidth}{0.5\doubleitemcolumnwidth}% + % listdoubleitem lengths + \setlength{\listdoubleitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth-\separatorcolumnwidth-\listitemsymbolwidth}% + \setlength{\listdoubleitemmaincolumnwidth}{0.5\listdoubleitemmaincolumnwidth}% + % fancyhdr lengths + \renewcommand{\headwidth}{\textwidth}% + % regular lengths + \setlength{\parskip}{0\p@}} + +\renewcommand*{\makecvtitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputecvlengths% + \maketitle% + % optional quote + \ifthenelse{\isundefined{\@quote}}% + {}% + {{\centering\begin{minipage}{\quotewidth}\centering\quotestyle{\@quote}\end{minipage}\\[2.5em]}}% + \par}% to avoid weird spacing bug at the first section if no blank line is left after \maketitle} + +\RenewDocumentCommand{\section}{sm}{% + \par\addvspace{2.5ex}% + \phantomsection{}% reset the anchor for hyperrefs + \addcontentsline{toc}{section}{#2}% + \strut\sectionstyle{#2}% + {\color{color1}\hrule}% + \par\nobreak\addvspace{1ex}\@afterheading} + +\newcommand{\subsectionfill}{\xleaders\hbox to 0.35em{\scriptsize.}\hfill}% different subsectionfills will not be perfectly aligned, but remaining space at the end of the fill will be distributed evenly between leaders, so it will be barely visible +\RenewDocumentCommand{\subsection}{sm}{% + \par\addvspace{1ex}% + \phantomsection{}% + \addcontentsline{toc}{subsection}{#2}% + \strut\subsectionstyle{#2}{\color{color1}{\subsectionfill}}% + \par\nobreak\addvspace{0.5ex}\@afterheading} + +\renewcommand*{\cvitem}[3][.25em]{% + \ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }{#3}% + \par\addvspace{#1}} + +\renewcommand*{\cvdoubleitem}[5][.25em]{% + \begin{minipage}[t]{\doubleitemcolumnwidth}\hintstyle{#2}: #3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\doubleitemcolumnwidth}\ifthenelse{\equal{#4}{}}{}{\hintstyle{#4}: }#5\end{minipage}% + \par\addvspace{#1}} + +\renewcommand*{\cvlistitem}[2][.25em]{% + \listitemsymbol\begin{minipage}[t]{\listitemmaincolumnwidth}#2\end{minipage}% + \par\addvspace{#1}} + +\renewcommand*{\cvlistdoubleitem}[3][.25em]{% + \cvitem[#1]{}{\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#2\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \ifthenelse{\equal{#3}{}}% + {}% + {\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#3\end{minipage}}}} + +\renewcommand*{\cventry}[7][.25em]{ + \begin{tabular*}{\textwidth}{l@{\extracolsep{\fill}}r}% + {\bfseries #4} & {\bfseries #5} \\% + {\itshape #3\ifthenelse{\equal{#6}{}}{}{, #6}} & {\itshape #2}\\% + \end{tabular*}% + \ifx&% + \else{\\\vbox{\small#7}}\fi% + \par\addvspace{#1}} + +\newbox{\cvitemwithcommentmainbox} +\newlength{\cvitemwithcommentmainlength} +\newlength{\cvitemwithcommentcommentlength} +\renewcommand*{\cvitemwithcomment}[4][.25em]{% + \savebox{\cvitemwithcommentmainbox}{\ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }#3}% + \setlength{\cvitemwithcommentmainlength}{\widthof{\usebox{\cvitemwithcommentmainbox}}}% + \setlength{\cvitemwithcommentcommentlength}{\maincolumnwidth-\separatorcolumnwidth-\cvitemwithcommentmainlength}% + \begin{minipage}[t]{\cvitemwithcommentmainlength}\ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }#3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\cvitemwithcommentcommentlength}\raggedleft\small\itshape#4\end{minipage}% + \par\addvspace{#1}} + +\renewenvironment{thebibliography}[1]% + {% + \bibliographyhead{\refname}% +% \small% + \begin{list}{\bibliographyitemlabel}% + {% + \setlength{\topsep}{0pt}% + \setlength{\labelwidth}{0pt}% + \setlength{\labelsep}{0pt}% + \leftmargin\labelwidth% + \advance\leftmargin\labelsep% + \@openbib@code% + \usecounter{enumiv}% + \let\p@enumiv\@empty% + \renewcommand\theenumiv{\@arabic\c@enumiv}}% + \sloppy\clubpenalty4000\widowpenalty4000% +% \sfcode`\.\@m% +% \sfcode `\=1000\relax% + }% + {% + \def\@noitemerr{\@latex@warning{Empty `thebibliography' environment}}% + \end{list}% + } + + +%------------------------------------------------------------------------------- +% letter style definition +%------------------------------------------------------------------------------- +% commands +\renewcommand*{\recomputeletterlengths}{ + \recomputecvlengths% + \setlength{\parskip}{6\p@}} + +\renewcommand*{\makelettertitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputeletterlengths% + % sender block + \maketitle% + \par% + % recipient block + \begin{minipage}[t]{.5\textwidth} + \raggedright% + \addressfont% + {\bfseries\upshape\@recipientname}\\% + \@recipientaddress% + \end{minipage} + % date + \hfill % US style +% \\[1em] % UK style + \@date\\[2em]% US informal style: "April 6, 2006"; UK formal style: "05/04/2006" + % opening + \raggedright% + \@opening\\[1.5em]% + % ensure no extra spacing after \makelettertitle due to a possible blank line +% \ignorespacesafterend% not working + \hspace{0pt}\par\vspace{-\baselineskip}\vspace{-\parskip}} + +\renewcommand*{\makeletterclosing}{ + \@closing\\[3em]% + {\bfseries \@firstname~\@lastname}% + \ifthenelse{\isundefined{\@enclosure}}{}{% + \\% + \vfill% + {\color{color2}\itshape\enclname: \@enclosure}}} + + +\endinput + + +%% end of file `moderncvstylebanking.sty'. diff --git a/users/glittershark/resume/moderncvstylecasual.sty b/users/glittershark/resume/moderncvstylecasual.sty new file mode 100644 index 000000000000..e375e7612a5a --- /dev/null +++ b/users/glittershark/resume/moderncvstylecasual.sty @@ -0,0 +1,182 @@ +%% start of file `moderncvstylecasual.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvstylecasual}[2013/02/09 v1.3.0 modern curriculum vitae and letter style scheme: casual] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +\RequirePackage{moderncvstyleclassic} + + +%------------------------------------------------------------------------------- +% overall style definition +%------------------------------------------------------------------------------- +% commands +% footer symbol used to separate footer elements +\newcommand*{\footersymbol}{% + {~~~{\rmfamily\textbullet}~~~}}% the \rmfamily is required to force Latin Modern fonts when using sans serif, as OMS/lmss/m/n is not defined and gets substituted by OMS/cmsy/m/n +% internal command to add an element to the footer +% it collects the elements in a temporary box, and checks when to flush the box +\newsavebox{\footerbox}% +\newsavebox{\footertempbox}% +\newlength{\footerwidth}% +\newlength{\footerboxwidth}% +\newif\if@firstfooterelement\@firstfooterelementtrue% +% adds an element to the footer, separated by footersymbol +% usage: \addtofooter[footersymbol]{element} +\newcommand*{\addtofooter}[2][\footersymbol]{% + \if@firstfooterelement% + \savebox{\footertempbox}{\usebox{\footerbox}#2}% + \else% + \savebox{\footertempbox}{\usebox{\footerbox}#1#2}\fi% + \settowidth{\footerboxwidth}{\usebox{\footertempbox}}% + \ifnum\footerboxwidth<\footerwidth% + \savebox{\footerbox}{\usebox{\footertempbox}}% + \@firstfooterelementfalse% + \else% + \flushfooter\\% + \savebox{\footerbox}{#2}% + \savebox{\footertempbox}{#2}% + \settowidth{\footerboxwidth}{\usebox{\footerbox}}% + \@firstfooterelementfalse\fi} +% internal command to flush the footer +\newcommand*{\flushfooter}{% + \strut\usebox{\footerbox}% + \savebox{\footerbox}{}% + \savebox{\footertempbox}{}% + \setlength{\footerboxwidth}{0pt}} + + +%------------------------------------------------------------------------------- +% resume style definition +%------------------------------------------------------------------------------- +% fonts +\renewcommand*{\namefont}{\fontsize{38}{40}\mdseries\upshape} +\renewcommand*{\addressfont}{\normalsize\mdseries\slshape} + +% commands +\renewcommand*{\makecvtitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputecvlengths% + % ensure footer with personal information + \makecvfooter% + % optional picture + \newbox{\makecvtitlepicturebox}% + \savebox{\makecvtitlepicturebox}{% + \ifthenelse{\isundefined{\@photo}}% + {}% + {% + \setlength\fboxrule{\@photoframewidth}% + \ifdim\@photoframewidth=0pt% + \setlength{\fboxsep}{0pt}\fi% + {\color{color1}\framebox{\includegraphics[width=\@photowidth]{\@photo}}}}}% + \usebox{\makecvtitlepicturebox}% + % name + \@initializelength{\makecvtitlepicturewidth}% + \settowidth{\makecvtitlepicturewidth}{\usebox{\makecvtitlepicturebox}}% + \parbox[b]{\textwidth-\makecvtitlepicturewidth}{% + \raggedleft\namefont{\color{color2!50}\@firstname} {\color{color2}\@lastname}}\\[-.35em]% alternate design: \MakeLowercase and no space + {\color{color2!50}\rule{\textwidth}{.25ex}}% + % optional title + \ifthenelse{\equal{\@title}{}}{}{\\[1.25em]\null\hfill\titlestyle{\@title}}\\[2.5em]% \null is required as there is no box on the line after \\, so glue (and leaders) disappears; this is in contrast to after \par, where the next line starts with an indent box (even after \noindent). + % optional quote + \ifthenelse{\isundefined{\@quote}}% + {}% + {{\null\hfill\begin{minipage}{\quotewidth}\centering\quotestyle{\@quote}\end{minipage}\hfill\null\\[2.5em]}}% + \par}% to avoid weird spacing bug at the first section if no blank line is left after \maketitle + +\renewcommand*{\makecvfooter}{% + \setlength{\footerwidth}{0.8\textwidth}% + \fancypagestyle{plain}{% + \fancyfoot[c]{% + \parbox[b]{\footerwidth}{% + \centering% + \color{color2}\addressfont% + \ifthenelse{\isundefined{\@addressstreet}}{}{\addtofooter[]{\addresssymbol\@addressstreet}% + \ifthenelse{\equal{\@addresscity}{}}{}{\addtofooter[~--~]{\@addresscity}}% if \addresstreet is defined, \addresscity and \addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\addtofooter[~--~]{\@addresscountry}}% + \flushfooter\@firstfooterelementtrue\\}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \addtofooter{\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}}% + \ifthenelse{\isundefined{\@email}}{}{\addtofooter{\emailsymbol\emaillink{\@email}}}% + \ifthenelse{\isundefined{\@homepage}}{}{\addtofooter{\homepagesymbol\httplink{\@homepage}}}% + \ifthenelse{\isundefined{\@github}}{}{\addtofooter{\httplink{http://github.com/\@github}}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\addtofooter{\@extrainfo}}% + \ifthenelse{\lengthtest{\footerboxwidth=0pt}}{}{\flushfooter}% the lengthtest is required to avoid flushing an empty footer, which could cause a blank line due to the \\ after the address, if no other personal info is used + }}}% + \pagestyle{plain}} + + +%------------------------------------------------------------------------------- +% letter style definition +%------------------------------------------------------------------------------- +\renewcommand*{\makelettertitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputeletterlengths% + % ensure footer with personal information + \makeletterfooter% + % recipient block + \begin{minipage}[t]{.5\textwidth} + \raggedright% + \addressfont% + {\bfseries\upshape\@recipientname}\\% + \@recipientaddress% + \end{minipage} + % date + \hfill% US style +% \\[1em]% UK style + \@date\\[2em]% US informal style: "April 6, 2006"; UK formal style: "05/04/2006" + % opening + \raggedright% + \@opening\\[1.5em]% + % ensure no extra spacing after \makelettertitle due to a possible blank line +% \ignorespacesafterend% not working + \hspace{0pt}\par\vspace{-\baselineskip}\vspace{-\parskip}} + +\renewcommand*{\makeletterfooter}{% + \setlength{\footerwidth}{0.8\textwidth}% + \fancypagestyle{plain}{% + \fancyfoot[c]{% + \parbox[b]{\footerwidth}{% + \centering% + \addressfont\color{color2}% + \vspace{-\baselineskip}% to cancel out the extra vertical space taken by the name (below) and ensure perfect alignment of letter and cv footers + \strut{\bfseries\upshape\@firstname~\@lastname}\\% the \strut is required to ensure the line is exactly \baselineskip tall + \ifthenelse{\isundefined{\@addressstreet}}{}{\addtofooter[]{\addresssymbol\@addressstreet}% + \ifthenelse{\equal{\@addresscity}{}}{}{\addtofooter[~--~]{\@addresscity}}% if \addresstreet is defined, \addresscity and addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\addtofooter[~--~]{\@addresscountry}}% + \flushfooter\@firstfooterelementtrue\\}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \addtofooter{\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}}% + \ifthenelse{\isundefined{\@email}}{}{\addtofooter{\emailsymbol\emaillink{\@email}}}% + \ifthenelse{\isundefined{\@homepage}}{}{\addtofooter{\homepagesymbol\httplink{\@homepage}}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\addtofooter{\@extrainfo}}% + \ifthenelse{\lengthtest{\footerboxwidth=0pt}}{}{\flushfooter}% the lengthtest is required to avoid flushing an empty footer, which could cause a blank line due to the \\ after the address, if no other personal info is used + }}}% + \pagestyle{plain}} + +\renewcommand*{\makeletterclosing}{ + \@closing\\[3em]% + {\bfseries\@firstname~\@lastname}% + \ifthenelse{\isundefined{\@enclosure}}{}{% + \\% + \vfil% + {\color{color2}\itshape\enclname: \@enclosure}}% + \vfil} + + +\endinput + + +%% end of file `moderncvstylecasual.sty'. diff --git a/users/glittershark/resume/moderncvstyleclassic.sty b/users/glittershark/resume/moderncvstyleclassic.sty new file mode 100644 index 000000000000..63cf97aa3b7d --- /dev/null +++ b/users/glittershark/resume/moderncvstyleclassic.sty @@ -0,0 +1,294 @@ +%% start of file `moderncvstyleclassic.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvstyleclassic}[2013/02/09 v1.3.0 modern curriculum vitae and letter style scheme: classic] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +% Latin Modern fonts +%\ifxetexorluatex +% \setmainfont{Latin Modern Roman} +% \setsansfont{Latin Modern Sans} +% \setmathfont{Latin Modern Math} +%\else + \IfFileExists{lmodern.sty}% + {\RequirePackage{lmodern}}% + {} +%\fi + + +%------------------------------------------------------------------------------- +% overall style definition +%------------------------------------------------------------------------------- +% symbols +\moderncvicons{marvosym} + + +%------------------------------------------------------------------------------- +% resume style definition +%------------------------------------------------------------------------------- +% fonts +\renewcommand*{\namefont}{\fontsize{34}{36}\mdseries\upshape} +\renewcommand*{\titlefont}{\LARGE\mdseries\slshape} +\renewcommand*{\addressfont}{\small\mdseries\slshape} +\renewcommand*{\quotefont}{\large\slshape} +\renewcommand*{\sectionfont}{\Large\mdseries\upshape} +\renewcommand*{\subsectionfont}{\large\mdseries\upshape} +\renewcommand*{\hintfont}{} + +% styles +\renewcommand*{\namestyle}[1]{{\namefont\textcolor{color0}{#1}}} +\renewcommand*{\titlestyle}[1]{{\titlefont\textcolor{color2}{#1}}} +\renewcommand*{\addressstyle}[1]{{\addressfont\textcolor{color1}{#1}}} +\renewcommand*{\quotestyle}[1]{{\quotefont\textcolor{color1}{#1}}} +\renewcommand*{\sectionstyle}[1]{{\sectionfont\textcolor{color1}{#1}}} +\renewcommand*{\subsectionstyle}[1]{{\subsectionfont\textcolor{color1}{#1}}} +\renewcommand*{\hintstyle}[1]{{\hintfont\textcolor{color0}{#1}}} + +% lengths +\newlength{\quotewidth} +\newlength{\hintscolumnwidth} +\setlength{\hintscolumnwidth}{0.175\textwidth} +\newlength{\separatorcolumnwidth} +\setlength{\separatorcolumnwidth}{0.025\textwidth} +\newlength{\maincolumnwidth} +\newlength{\doubleitemmaincolumnwidth} +\newlength{\listitemsymbolwidth} +\settowidth{\listitemsymbolwidth}{\listitemsymbol} +\newlength{\listitemmaincolumnwidth} +\newlength{\listdoubleitemmaincolumnwidth} + +% commands +\renewcommand*{\recomputecvlengths}{% + \setlength{\quotewidth}{0.65\textwidth}% + % main lenghts + \setlength{\maincolumnwidth}{\textwidth-\separatorcolumnwidth-\hintscolumnwidth}% + % listitem lengths + \setlength{\listitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth}% + % doubleitem lengths + \setlength{\doubleitemmaincolumnwidth}{\maincolumnwidth-\hintscolumnwidth-\separatorcolumnwidth-\separatorcolumnwidth}% + \setlength{\doubleitemmaincolumnwidth}{0.5\doubleitemmaincolumnwidth}% + % listdoubleitem lengths + \setlength{\listdoubleitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth-\separatorcolumnwidth-\listitemsymbolwidth}% + \setlength{\listdoubleitemmaincolumnwidth}{0.5\listdoubleitemmaincolumnwidth}% + % fancyhdr lengths + \renewcommand{\headwidth}{\textwidth}% + % regular lengths + \setlength{\parskip}{0\p@}} + +% optional maketitle width to force a certain width (if set to 0pt, the width is calculated automatically) +\newlength{\makecvtitlenamewidth} +\setlength{\makecvtitlenamewidth}{0pt}% dummy value +\renewcommand*{\makecvtitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputecvlengths% + % optional detailed information (pre-rendering) + \def\phonesdetails{}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \protected@edef\phonesdetails{\phonesdetails\protect\makenewline\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}}% + \newbox{\makecvtitledetailsbox}% + \savebox{\makecvtitledetailsbox}{% + \addressfont\color{color2}% + \begin{tabular}[b]{@{}r@{}}% + \ifthenelse{\isundefined{\@addressstreet}}{}{\makenewline\addresssymbol\@addressstreet% + \ifthenelse{\equal{\@addresscity}{}}{}{\makenewline\@addresscity}% if \addresstreet is defined, \addresscity and addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\makenewline\@addresscountry}}% + \phonesdetails% needed to be pre-rendered as loops and tabulars seem to conflict + \ifthenelse{\isundefined{\@email}}{}{\makenewline\emailsymbol\emaillink{\@email}}% + \ifthenelse{\isundefined{\@homepage}}{}{\makenewline\homepagesymbol\httplink{\@homepage}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\makenewline\@extrainfo}% + \end{tabular} + }% + % optional photo (pre-rendering) + \newbox{\makecvtitlepicturebox}% + \savebox{\makecvtitlepicturebox}{% + \ifthenelse{\isundefined{\@photo}}% + {}% + {% + \hspace*{\separatorcolumnwidth}% + \color{color1}% + \setlength{\fboxrule}{\@photoframewidth}% + \ifdim\@photoframewidth=0pt% + \setlength{\fboxsep}{0pt}\fi% + \framebox{\includegraphics[width=\@photowidth]{\@photo}}}}% + % name and title + \newlength{\makecvtitledetailswidth}\settowidth{\makecvtitledetailswidth}{\usebox{\makecvtitledetailsbox}}% + \newlength{\makecvtitlepicturewidth}\settowidth{\makecvtitlepicturewidth}{\usebox{\makecvtitlepicturebox}}% + \ifthenelse{\lengthtest{\makecvtitlenamewidth=0pt}}% check for dummy value (equivalent to \ifdim\makecvtitlenamewidth=0pt) + {\setlength{\makecvtitlenamewidth}{\textwidth-\makecvtitledetailswidth-\makecvtitlepicturewidth}}% + {}% + \begin{minipage}[b]{\makecvtitlenamewidth}% + \namestyle{\@firstname\ \@lastname}% + \ifthenelse{\equal{\@title}{}}{}{\\[1.25em]\titlestyle{\@title}}% + \end{minipage}% + \hfill% + % optional detailed information (rendering) + \llap{\usebox{\makecvtitledetailsbox}}% \llap is used to suppress the width of the box, allowing overlap if the value of makecvtitlenamewidth is forced + % optional photo (rendering) + \usebox{\makecvtitlepicturebox}\\[2.5em]% + % optional quote + \ifthenelse{\isundefined{\@quote}}% + {}% + {{\centering\begin{minipage}{\quotewidth}\centering\quotestyle{\@quote}\end{minipage}\\[2.5em]}}% + \par}% to avoid weird spacing bug at the first section if no blank line is left after \makecvtitle + +\newlength{\baseletterheight} +\settoheight{\baseletterheight}{\sectionstyle{o}} +\setlength{\baseletterheight}{\baseletterheight-0.95ex} +\RenewDocumentCommand{\section}{sm}{% + \par\addvspace{2.5ex}% + \phantomsection{}% reset the anchor for hyperrefs + \addcontentsline{toc}{section}{#2}% + \parbox[t]{\hintscolumnwidth}{\strut\raggedleft\raisebox{\baseletterheight}{\color{color1}\rule{\hintscolumnwidth}{0.95ex}}}% + \hspace{\separatorcolumnwidth}% + \parbox[t]{\maincolumnwidth}{\strut\sectionstyle{#2}}% + \par\nobreak\addvspace{1ex}\@afterheading}% to avoid a pagebreak after the heading + +\RenewDocumentCommand{\subsection}{sm}{% + \par\addvspace{1ex}% + \phantomsection{}% reset the anchor for hyperrefs + \addcontentsline{toc}{subsection}{#2}% + \begin{tabular}{@{}p{\hintscolumnwidth}@{\hspace{\separatorcolumnwidth}}p{\maincolumnwidth}@{}}% + \raggedleft\hintstyle{} &{\strut\subsectionstyle{#2}}% + \end{tabular}% + \par\nobreak\addvspace{0.5ex}\@afterheading}% to avoid a pagebreak after the heading + +\renewcommand*{\cvitem}[3][.25em]{% + \begin{tabular}{@{}p{\hintscolumnwidth}@{\hspace{\separatorcolumnwidth}}p{\maincolumnwidth}@{}}% + \raggedleft\hintstyle{#2} &{#3}% + \end{tabular}% + \par\addvspace{#1}} + +\renewcommand*{\cvdoubleitem}[5][.25em]{% + \cvitem[#1]{#2}{% + \begin{minipage}[t]{\doubleitemmaincolumnwidth}#3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\hintscolumnwidth}\raggedleft\hintstyle{#4}\end{minipage}% + \hspace*{\separatorcolumnwidth}% + \begin{minipage}[t]{\doubleitemmaincolumnwidth}#5\end{minipage}}} + +\renewcommand*{\cvlistitem}[2][.25em]{% + \cvitem[#1]{}{\listitemsymbol\begin{minipage}[t]{\listitemmaincolumnwidth}#2\end{minipage}}} + +\renewcommand*{\cvlistdoubleitem}[3][.25em]{% + \cvitem[#1]{}{\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#2\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \ifthenelse{\equal{#3}{}}% + {}% + {\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#3\end{minipage}}}} + +\renewcommand*{\cventry}[7][.25em]{% + \cvitem[#1]{#2}{% + {\bfseries#3}% + \ifthenelse{\equal{#4}{}}{}{, {\slshape#4}}% + \ifthenelse{\equal{#5}{}}{}{, #5}% + \ifthenelse{\equal{#6}{}}{}{, #6}% + .\strut% + \ifx&% + \else{\newline{}\begin{minipage}[t]{\linewidth}\small#7\end{minipage}}\fi}} + +\newbox{\cvitemwithcommentmainbox} +\newlength{\cvitemwithcommentmainlength} +\newlength{\cvitemwithcommentcommentlength} +\renewcommand*{\cvitemwithcomment}[4][.25em]{% + \savebox{\cvitemwithcommentmainbox}{{\bfseries#3}}% + \setlength{\cvitemwithcommentmainlength}{\widthof{\usebox{\cvitemwithcommentmainbox}}}% + \setlength{\cvitemwithcommentcommentlength}{\maincolumnwidth-\separatorcolumnwidth-\cvitemwithcommentmainlength}% + \cvitem[#1]{#2}{% + \begin{minipage}[t]{\cvitemwithcommentmainlength}\bfseries#3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\cvitemwithcommentcommentlength}\raggedleft\small\itshape#4\end{minipage}}} + +\renewenvironment{thebibliography}[1]% + {% + \bibliographyhead{\refname}% +% \small% + \begin{list}{\bibliographyitemlabel}% + {% + \setlength{\topsep}{0pt}% + \setlength{\labelwidth}{\hintscolumnwidth}% + \setlength{\labelsep}{\separatorcolumnwidth}% + \leftmargin\labelwidth% + \advance\leftmargin\labelsep% + \@openbib@code% + \usecounter{enumiv}% + \let\p@enumiv\@empty% + \renewcommand\theenumiv{\@arabic\c@enumiv}}% + \sloppy\clubpenalty4000\widowpenalty4000% +% \sfcode`\.\@m% +% \sfcode `\=1000\relax% + }% + {% + \def\@noitemerr{\@latex@warning{Empty `thebibliography' environment}}% + \end{list}% + } + + +%------------------------------------------------------------------------------- +% letter style definition +%------------------------------------------------------------------------------- +% commands +\renewcommand*{\recomputeletterlengths}{% + \recomputecvlengths% + \setlength{\parskip}{6\p@}} + +\renewcommand*{\makelettertitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputeletterlengths% + % sender contact info + \hfill% + \begin{minipage}{.5\textwidth}% + \raggedleft% + \addressfont\textcolor{color2}{% + {\bfseries\upshape\@firstname~\@lastname}\@firstdetailselementfalse% + \ifthenelse{\isundefined{\@addressstreet}}{}{\makenewline\addresssymbol\@addressstreet% + \ifthenelse{\equal{\@addresscity}{}}{}{\makenewline\@addresscity}% if \addresstreet is defined, \addresscity and addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\makenewline\@addresscountry}}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \makenewline\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}% + \ifthenelse{\isundefined{\@email}}{}{\makenewline\emailsymbol\emaillink{\@email}}% + \ifthenelse{\isundefined{\@homepage}}{}{\makenewline\homepagesymbol\httplink{\@homepage}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\makenewline\@extrainfo}}% + \end{minipage}\\[1em] + % recipient block + \begin{minipage}[t]{.5\textwidth} + \raggedright% + \addressfont% + {\bfseries\upshape\@recipientname}\\% + \@recipientaddress% + \end{minipage} + % date + \hfill% US style +% \\[1em]% UK style + \@date\\[2em]% US informal style: "January 1, 1900"; UK formal style: "01/01/1900" + % opening + \raggedright% + \@opening\\[1.5em]% + % ensure no extra spacing after \makelettertitle due to a possible blank line +% \ignorespacesafterend% not working + \hspace{0pt}\par\vspace{-\baselineskip}\vspace{-\parskip}} + +\renewcommand*{\makeletterclosing}{ + \@closing\\[3em]% + {\bfseries \@firstname~\@lastname}% + \ifthenelse{\isundefined{\@enclosure}}{}{% + \\% + \vfill% + {\color{color2}\itshape\enclname: \@enclosure}}} + + +\endinput + + +%% end of file `moderncvstyleclassic.sty'. diff --git a/users/glittershark/resume/moderncvstyleempty.sty b/users/glittershark/resume/moderncvstyleempty.sty new file mode 100644 index 000000000000..85932464d1c5 --- /dev/null +++ b/users/glittershark/resume/moderncvstyleempty.sty @@ -0,0 +1,34 @@ +%% start of file `moderncvstyleempty.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvstyleempty}[2013/02/09 v1.3.0 modern curriculum vitae scheme: empty] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- + + +%------------------------------------------------------------------------------- +% package options +%------------------------------------------------------------------------------- + + +%------------------------------------------------------------------------------- +% style definition +%------------------------------------------------------------------------------- +% see moderncv.cls for command declarations that needs to be implemented, e.g. \maketitle, \section, \subsections, \cvline, etc + +\endinput + + +%% end of file `moderncvstyleempty.sty'. diff --git a/users/glittershark/resume/moderncvstyleoldstyle.sty b/users/glittershark/resume/moderncvstyleoldstyle.sty new file mode 100644 index 000000000000..ff732f4e2af5 --- /dev/null +++ b/users/glittershark/resume/moderncvstyleoldstyle.sty @@ -0,0 +1,306 @@ +%% start of file `moderncvstyleoldstyle.sty'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +%------------------------------------------------------------------------------- +% identification +%------------------------------------------------------------------------------- +\NeedsTeXFormat{LaTeX2e} +\ProvidesPackage{moderncvstyleoldstyle}[2013/02/09 v1.3.0 modern curriculum vitae and letter style scheme: oldstyle] + + +%------------------------------------------------------------------------------- +% required packages +%------------------------------------------------------------------------------- +% change the layout of the page on the fly, for resume or letter layout +\RequirePackage{changepage} + + +%------------------------------------------------------------------------------- +% overall style definition +%------------------------------------------------------------------------------- +% fonts +%\ifxetexorluatex +% \setmainfont[Numbers={OldStyle,Proportional}, BoldFont={Kurier Bold}, ItalicFont={Kurier Light Italic}, BoldItalicFont={Kurier Bold Italic}]{Kurier Light} +% \setsansfont[Numbers={OldStyle,Proportional}, BoldFont={Kurier Bold}, ItalicFont={Kurier Light Italic}, BoldItalicFont={Kurier Bold Italic}]{Kurier Light} +% \setmathfont{Kurier Light} +% \setmathfont[range=\mathit,\mathsfit]{Kurier Light Italic} +% \setmathfont[range=\mathbfup,\mathbfsfup]{Kurier Bold} +% \setmathfont[range=\mathbfit,\mathbfsfit]{Kurier Bold Italic} +%\else + \IfFileExists{kurier.sty}% + {\RequirePackage[light,math]{kurier}}% + {} +%\fi + +% symbols +\moderncvicons{letters} + + +%------------------------------------------------------------------------------- +% resume style definition +%------------------------------------------------------------------------------- +% fonts +\renewcommand*{\namefont}{\fontsize{34}{36}\mdseries\upshape} +\renewcommand*{\titlefont}{\LARGE\mdseries\slshape} +\renewcommand*{\addressfont}{\small\mdseries} +\renewcommand*{\quotefont}{\large\itshape} +\renewcommand*{\sectionfont}{\Large\bfseries\upshape} +\renewcommand*{\subsectionfont}{\large\bfseries\itshape} +\renewcommand*{\hintfont}{\bfseries} + +% styles +\renewcommand*{\namestyle}[1]{{\namefont\textcolor{color0}{#1}}} +\renewcommand*{\titlestyle}[1]{{\titlefont\textcolor{color2}{#1}}} +\renewcommand*{\addressstyle}[1]{{\addressfont\textcolor{color2}{#1}}} +\renewcommand*{\quotestyle}[1]{{\quotefont\textcolor{color1}{#1}}} +\renewcommand*{\sectionstyle}[1]{{\sectionfont\textcolor{color1}{#1}}} +\renewcommand*{\subsectionstyle}[1]{{\subsectionfont\textcolor{color1}{#1}}} +\renewcommand*{\hintstyle}[1]{{\hintfont\textcolor{color0}{#1}}} + +% lengths +\newlength{\quotewidth} +\newlength{\hintscolumnwidth} +\setlength{\hintscolumnwidth}{0.3\textwidth}% +\newlength{\separatorcolumnwidth} +\setlength{\separatorcolumnwidth}{0.025\textwidth}% +\newlength{\maincolumnwidth} +\newlength{\doubleitemcolumnwidth} +\newlength{\listitemsymbolwidth} +\settowidth{\listitemsymbolwidth}{\listitemsymbol} +\newlength{\listitemmaincolumnwidth} +\newlength{\listdoubleitemmaincolumnwidth} + +% commands +\setlength{\marginparwidth}{0\p@}% +\setlength{\marginparsep}{0\p@} +\renewcommand*{\recomputecvlengths}{% + % regular lengths + \changepage{}{+\marginparwidth+\marginparsep}{}{}{}{}{}{}{}% if a letter was typeset before the resume, \marginparwidth and \marginparsep will be non-zero; otherwise, this has no effect + \setlength{\marginparwidth}{0\p@}% + \setlength{\marginparsep}{0\p@} + \setlength{\parskip}{0\p@}% + % maketitle lengths + \setlength{\quotewidth}{0.65\textwidth}% + % main lenghts + \setlength{\maincolumnwidth}{\textwidth-\hintscolumnwidth-\separatorcolumnwidth}% + % listitem lengths + \setlength{\listitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth}% + % doubleitem lengths + \setlength{\doubleitemcolumnwidth}{\maincolumnwidth-\separatorcolumnwidth}% + \setlength{\doubleitemcolumnwidth}{0.5\doubleitemcolumnwidth}% + % listdoubleitem lengths + \setlength{\listdoubleitemmaincolumnwidth}{\maincolumnwidth-\listitemsymbolwidth-\separatorcolumnwidth-\listitemsymbolwidth}% + \setlength{\listdoubleitemmaincolumnwidth}{0.5\listdoubleitemmaincolumnwidth}% + % fancyhdr lengths + \renewcommand{\headwidth}{\textwidth}} + +\newcommand{\makecvinfo}[1]{% + \newbox{\makecvinfobox}% + \savebox{\makecvinfobox}{\parbox[t]{\hintscolumnwidth}{#1}}% + \newlength{\makecvinfoheight}% + \setlength{\makecvinfoheight}{\totalheightof{\usebox{\makecvinfobox}}}% the total height of the parbox is the sum of its height (\the\ht\makeinfobox) and its depth (\the\dp\makeinfobox); the \totalheightof command is provided by the "calc" package + \usebox{\makecvinfobox}\vspace{-\makecvinfoheight}% + \newlength{\leftcolumnwidth}% + \setlength{\leftcolumnwidth}{\hintscolumnwidth+\separatorcolumnwidth}% + \par\vspace{-\baselineskip}\vspace{-\parskip}\leftskip=\leftcolumnwidth} + +\renewcommand*{\makecvtitle}{ + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputecvlengths% + % optional picture box + \newbox{\makecvtitlepicturebox}% + \savebox{\makecvtitlepicturebox}{% + \ifthenelse{\isundefined{\@photo}}% + {}% + {% + \color{color1}% + \setlength\fboxrule{\@photoframewidth}% + \ifdim\@photoframewidth=0pt% + \setlength{\fboxsep}{0pt}\fi% + \framebox{\includegraphics[width=\@photowidth]{\@photo}}}}% + % name and title + \newlength{\makecvtitlepicturewidth}\settowidth{\makecvtitlepicturewidth}{\usebox{\makecvtitlepicturebox}}% + \newlength{\makecvtitlenamewidth}\setlength{\makecvtitlenamewidth}{\textwidth-\makecvtitlepicturewidth}% + \begin{minipage}[b]{\makecvtitlenamewidth}% + \namestyle{\@firstname\ \@lastname}% + \ifthenelse{\equal{\@title}{}}{}{\\[1.25em]\titlestyle{\@title}}% + \end{minipage}% + % optional photo + \usebox{\makecvtitlepicturebox}\\[2.5em]% + % optional quote + \ifthenelse{\isundefined{\@quote}}% + {}% + {{\centering\begin{minipage}{\quotewidth}\centering\quotestyle{\@quote}\end{minipage}\\[2.5em]}}% + % optional details + \makecvinfo{% + \addressfont\color{color2}% + \ifthenelse{\isundefined{\@addressstreet}}{}{\makenewline\addresssymbol\@addressstreet% + \ifthenelse{\equal{\@addresscity}{}}{}{\makenewline\@addresscity}% if \addresstreet is defined, \addresscity and \addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\makenewline\@addresscountry}}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \makenewline\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}% + \ifthenelse{\isundefined{\@email}}{}{\makenewline\emailsymbol\emaillink{\@email}}% + \ifthenelse{\isundefined{\@homepage}}{}{\makenewline\homepagesymbol\httplink{\@homepage}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\makenewline\@extrainfo}}} + +\RenewDocumentCommand{\section}{sm}{% + \par\addvspace{2.5ex}% + \phantomsection{}% reset the anchor for hyperrefs + \addcontentsline{toc}{section}{#2}% + \strut\sectionstyle{#2}% + \par\nobreak\addvspace{1ex}\@afterheading}% to avoid a pagebreak after the heading + +\RenewDocumentCommand{\subsection}{sm}{% + \par\addvspace{1ex}% + \phantomsection{}% reset the anchor for hyperrefs + \addcontentsline{toc}{subsection}{#2}% + \strut\subsectionstyle{#2}% + \par\nobreak\addvspace{0.5ex}\@afterheading}% to avoid a pagebreak after the heading + +\renewcommand*{\cvitem}[3][.25em]{% + \ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }{#3}% + \par\addvspace{#1}} + +\renewcommand*{\cvdoubleitem}[5][.25em]{% + \begin{minipage}[t]{\doubleitemcolumnwidth}\hintstyle{#2}: #3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\doubleitemcolumnwidth}\ifthenelse{\equal{#4}{}}{}{\hintstyle{#4}: }#5\end{minipage}% + \par\addvspace{#1}} + +\renewcommand*{\cvlistitem}[2][.25em]{% + \cvitem[#1]{}{\listitemsymbol\begin{minipage}[t]{\listitemmaincolumnwidth}#2\end{minipage}}} + +\renewcommand*{\cvlistdoubleitem}[3][.25em]{% + \cvitem[#1]{}{\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#2\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \ifthenelse{\equal{#3}{}}% + {}% + {\listitemsymbol\begin{minipage}[t]{\listdoubleitemmaincolumnwidth}#3\end{minipage}}}} + +\newbox{\cventryyearbox} +\newlength{\cventrytitleboxwidth} +\renewcommand*{\cventry}[7][.25em]{% + \savebox{\cventryyearbox}{% + \hspace*{2\separatorcolumnwidth}% + \hintstyle{#2}}% + \setlength{\cventrytitleboxwidth}{\widthof{\usebox{\cventryyearbox}}}% + \setlength{\cventrytitleboxwidth}{\maincolumnwidth-\cventrytitleboxwidth}% + \begin{minipage}{\maincolumnwidth}% + \parbox[t]{\cventrytitleboxwidth}{% + \strut% + {\bfseries#3}% + \ifthenelse{\equal{#4}{}}{}{, {\slshape#4}}% + \ifthenelse{\equal{#5}{}}{}{, #5}% + \ifthenelse{\equal{#6}{}}{}{, #6}% + .\strut}% + \usebox{\cventryyearbox}% + \end{minipage}% + \ifx&% + \else{% + \newline{}% + \begin{minipage}[t]{\maincolumnwidth}% + \small% + #7% + \end{minipage}}\fi% + \par\addvspace{#1}} + +\newbox{\cvitemwithcommentmainbox} +\newlength{\cvitemwithcommentmainlength} +\newlength{\cvitemwithcommentcommentlength} +\renewcommand*{\cvitemwithcomment}[4][.25em]{% + \savebox{\cvitemwithcommentmainbox}{\ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }#3}% + \setlength{\cvitemwithcommentmainlength}{\widthof{\usebox{\cvitemwithcommentmainbox}}}% + \setlength{\cvitemwithcommentcommentlength}{\maincolumnwidth-\separatorcolumnwidth-\cvitemwithcommentmainlength}% + \begin{minipage}[t]{\cvitemwithcommentmainlength}\ifthenelse{\equal{#2}{}}{}{\hintstyle{#2}: }#3\end{minipage}% + \hfill% fill of \separatorcolumnwidth + \begin{minipage}[t]{\cvitemwithcommentcommentlength}\raggedleft\small\itshape#4\end{minipage}% + \par\addvspace{#1}} + +\renewenvironment{thebibliography}[1]% + {% + \bibliographyhead{\refname}% +% \small% + \begin{list}{\bibliographyitemlabel}% + {% + \setlength{\topsep}{0pt}% + \setlength{\labelwidth}{\hintscolumnwidth}% + \setlength{\labelsep}{\separatorcolumnwidth}% + \leftmargin\labelwidth% + \advance\leftmargin\labelsep% + \@openbib@code% + \usecounter{enumiv}% + \let\p@enumiv\@empty% + \renewcommand\theenumiv{\@arabic\c@enumiv}}% + \sloppy\clubpenalty4000\widowpenalty4000% +% \sfcode`\.\@m% +% \sfcode `\=1000\relax% + }% + {% + \def\@noitemerr{\@latex@warning{Empty `thebibliography' environment}}% + \end{list}% + } + + +%------------------------------------------------------------------------------- +% letter style definition +%------------------------------------------------------------------------------- +% commands +%\newlength{\textwidthdelta}% +\renewcommand*{\recomputeletterlengths}{% + \recomputecvlengths% + \setlength{\parskip}{6\p@}% + \leftskip=0pt% +% \setlength{\textwidthdelta}{+\marginparwidth+\marginparsep}% + \setlength{\marginparwidth}{\hintscolumnwidth}% + \setlength{\marginparsep}{2\separatorcolumnwidth}% +% \addtolength{\textwidthdelta}{-\marginparwidth-\marginparsep}% +% \changepage{}{\textwidthdelta}{-\textwidthdelta}{}{}{}{}{}{}%\changepage{<textheight>}{<textwidth>}{<evensidemargin>}{<oddsidemargin>}{<columnsep>}{<topmargin>}{<headheight>}{<headsep>}{<footskip>} + \changepage{}{-\marginparwidth-\marginparsep}{}{}{}{}{}{}{}%\changepage{<textheight>}{<textwidth>}{<evensidemargin>}{<oddsidemargin>}{<columnsep>}{<topmargin>}{<headheight>}{<headsep>}{<footskip>} + } + +\renewcommand*{\makelettertitle}{% + % recompute lengths (in case we are switching from letter to resume, or vice versa) + \recomputeletterlengths% + % recipient block + {\addressfont% + {\bfseries\upshape\@recipientname}\\% + \@recipientaddress}\\[1em]% + % date + \@date\\[2em]% + % opening + \@opening\\[1.5em]% + % sender contact info + \hspace{0pt}% + \marginpar{% + \addressfont\textcolor{color2}{% + {\bfseries\@firstname~\@lastname}\@firstdetailselementfalse% + \ifthenelse{\isundefined{\@addressstreet}}{}{\makenewline\addresssymbol\@addressstreet% + \ifthenelse{\equal{\@addresscity}{}}{}{\makenewline\@addresscity}% if \addresstreet is defined, \addresscity and \addresscountry will always be defined but could be empty + \ifthenelse{\equal{\@addresscountry}{}}{}{\makenewline\@addresscountry}}% + \collectionloop{phones}{% the key holds the phone type (=symbol command prefix), the item holds the number + \makenewline\csname\collectionloopkey phonesymbol\endcsname\collectionloopitem}% + \ifthenelse{\isundefined{\@email}}{}{\makenewline\emailsymbol\emaillink{\@email}}% + \ifthenelse{\isundefined{\@homepage}}{}{\makenewline\homepagesymbol\httplink{\@homepage}}% + \ifthenelse{\isundefined{\@extrainfo}}{}{\makenewline\@extrainfo}}}% + % ensure no extra spacing after \makelettertitle due to a possible blank line +% \ignorespacesafterend% not working + \par\vspace{-\baselineskip}\vspace{-\parskip}} + +\renewcommand*{\makeletterclosing}{ + \@closing\\[3em]% + {\bfseries\@firstname~\@lastname}% + \ifthenelse{\isundefined{\@enclosure}}{}{% + \\% + \vfill% + {\color{color2}\itshape\enclname: \@enclosure}}} + + +\endinput + + +%% end of file `moderncvstyleoldstyle.sty'. diff --git a/users/glittershark/resume/picture.png b/users/glittershark/resume/picture.png new file mode 100644 index 000000000000..63b21b5320ed --- /dev/null +++ b/users/glittershark/resume/picture.png Binary files differdiff --git a/users/glittershark/resume/resume.tex b/users/glittershark/resume/resume.tex new file mode 100644 index 000000000000..933558d570b6 --- /dev/null +++ b/users/glittershark/resume/resume.tex @@ -0,0 +1,212 @@ +%% start of file `template.tex'. +%% Copyright 2006-2013 Xavier Danaux (xdanaux@gmail.com). +%% Copyright 2014-2020 Griffin Smith (wildgriffin45@gmail.com). +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +\documentclass[10pt,a4paper,sans]{moderncv} % possible options include font size ('10pt', '11pt' and '12pt'), paper size ('a4paper', 'letterpaper', 'a5paper', 'legalpaper', 'executivepaper' and 'landscape') and font family ('sans' and 'roman') + +\usepackage[inline]{enumitem} + + +% moderncv themes +% style options are 'casual' (default), 'classic', 'oldstyle' and 'banking' +\moderncvstyle{casual} +% color options 'blue' (default), 'orange', 'green', 'red', 'purple', 'grey' and 'black' +\moderncvcolor{black} +% to set the default font; use '\sfdefault' for the default sans serif font, +% '\rmdefault' for the default roman one, or any tex font name +%\renewcommand{\familydefault}{\sfdefault} +\nopagenumbers{} + +\usepackage[utf8]{inputenc} + +\usepackage[scale=0.8, margin=0.65in]{geometry} +\setlength{\hintscolumnwidth}{2.6cm} + +\name{Griffin}{Smith} +\title{Software Engineer} +\phone[mobile]{(720) 206-7218} +\email{grfn@gws.fyi} +\homepage{https://www.gws.fyi} +\extrainfo{References available upon request} + +\begin{document} +\makecvtitle{} +\section{Skills} +\cvitem{Clojure}{Extensive experience architecting, deploying, and building +complex web applications in Clojure and Clojurescript, with a focus on +Re-Frame and Reagent.} +\cvitem{Haskell}{Passionate love for pure functional programming as a hobbyist +pursuit, but also practical experience building production systems in Haskell at +scale, and using Haskell's advanced type system extensions where appropriate to +deliver increased ergonomics and safety.} +\cvitem{Nix}{Experience with adopting and teaching nix at scale in a production +stack both for local development dependencies and for configuring and building +production software. Core contributer to a fork of the nix implementation itself +(tvix) aimed at providing increased safety, performance, and flexibility.} +\cvitem{Scala}{Understanding of Scala from the perspective of a functional +programmer rather than a Java programmer. Experience building production +big-data processing systems using Akka, and deep programming with Scala's type +system using Shapeless.} +\cvitem{Unix/Linux}{Experience with administrating highly available distributed +systems. Passion for the Unix philosophy of discrete, composable units of +functionality.} +\cvitem{Ruby}{Experience building both full-stack applications with Ruby on +Rails in addition to smaller microservices and custom frameworks. Deep +understanding of the internals of the Ruby interpreter and object system.} +\cvitem{Javascript}{Experience developing real-time responsive single-page web +applications using React, in addition to significant contributions to the React +open-source community.} +\cvitem{SQL}{Deep understanding of relational databases, including experience +designing the database schema in Postgres for an application with over a decade +of usage, hundreds of gigabytes of data, complex, multi-tiered hierarchical data +structures, as well as experience writing and optimizing large, complex queries +against that database.} + +\subsection{Additional Tools} +\cvitem{}{\footnotesize + \begin{itemize*} + \item Vim + \item Kubernetes + \item Git + \item Puppet + \item AWS + \item Reagent + \item Datomic + \item Elasticsearch + \item Redis + \item DynamoDB + \item Docker + \item JIRA + \item Java + \item QuickCheck (and similar tools) + \item Python + \item Elixir + \end{itemize*} + \newline + \textbf{Novice Level:} + \begin{itemize*} + \item Rust + \item C++ + \item Erlang + \item Prolog + \item Idris + \item Agda + \item Tensorflow + \end{itemize*}} + +\section{Experience} +\subsection{Employment} +\cventry{2019-present}{Engineering Manager}{Urbint}{New York, NY}{} +{\begin{itemize} + \item Lead of the platform team with two direct reports - a senior SRE and + a senior software engineer. + \item Performed user research on developers, project managers, product + managers, and other internal stakeholders to build the roadmap for the + platform team. + \item Built and maintained a system to deploy one-off full stack + application instances from pull requests to enable easier testing. + \item Led a large, multi-project migration between CI systems that resulted + in a decrease of average build times from 2 hours to less than 10 minutes. + \item Maintained and extended Nix-based build and development + infrastructure for both software engineers and machine learning engineers. + \end{itemize}} +\cventry{2018--2019}{Senior Software Engineer}{Urbint}{New York, NY}{} +{\begin{itemize} + \item Built, trained, and maintained a large, deep-learning-based + image-detection model for semi-automated (human-in-the-loop) video + classification. + \item Designed, built, and maintained a novel in-house tool for collection of + training data. + \item Maintained and guaranteed reliability of a large data pipeline for + video processing and classification. + \end{itemize}} +\cventry{2017--2018}{Senior Software Engineer}{Urbint}{New York, NY}{} +{\begin{itemize} + \item Integral in the architecture of a novel, serializable ACID + transactional graph database built on RocksDB, first in Elixir then in + Haskell. + \item Helped ship customer deliverables involving multi-day data + processing jobs for disparate data sources. + \item Instructed other developers in the use of and theory behind Haskell + \item Brought computational graph theory to bear on the problem of unifying + disparate, highly heterogeneous data sources across the world of open data. + \end{itemize}} +\cventry{2016--2017}{Senior Software Engineer}{SecurityScorecard, Inc.}{New York, NY}{} +{Lead frontend developer for a rapidly-moving and growing security software startup. + \begin{itemize} + \item Took part in collaborative product design meetings to make UX + tradeoffs with product designers and managers. + \item Drove application architecture for a large, complex, data-driven frontend + application. + \item Championed increased use of production monitoring and alerting. + \item Worked with business stakeholders to set long- and short-term priorities for + application development. + \item Mentored junior team members. + \end{itemize}} +\cventry{2015--2016}{Lead Developer}{Nomi, Inc.}{New York, NY}{} +{Lead web services developer transitioning to a full-stack role implementing + shared software components and architecting a large, complex microservices + application ingesting hundreds of gigabytes of IoT data per week. + \begin{itemize} + \item Lead application architecture of the majority of the backend services to + encourage consistent REST API design and code sharing. + \item Championed the use of Haskell for rapid, safe development of the API Gateway + service. + \item Took ownership of operations and server maintenance of a >100-instance AWS + account using Puppet. + \end{itemize}} +\cventry{2014--2015}{Lead Developer}{LandlordsNY, LLC}{New York, NY}{} +{Sole engineer for a small startup connecting landlords and property managers and + facilitating the online sharing of information in a historically technology-averse + industry. + \begin{itemize} + \item Drove product design, visual design, and UX architecture for a major revamping + of the core product. + \item Interfaced with customers to set priorities for new feature development. + \item Conducted hiring and recruiting to build out an engineering team. + \end{itemize}} +\cventry{2012--2014}{Associate Developer}{Visionlink Inc.}{Boulder, CO}{} +{Integral member of an agile development team building the nation's most-used Information + and Referral platform for organizations such as United Way Worldwide and the American Red + Cross. + \begin{itemize} + \item Refactored and revamped legacy code to increase performance and long-term + maintainablity. + \item Worked on several triage-teams to rapidly fix production bugs with strict deadlines. + \item Built a complex, yet highly-performant tool for searching human services by category. + \item Acted as a core designer and developer of a major product revamp. + \begin{itemize} + \item Drove a complete rethinking of the data model in the product, leading to greater + unification, simplicity, and consistency; + \item Championed the adoption of a test-driven-development model; + \item Drove product documentation and code standardization. + \end{itemize} + \end{itemize}} + +\section{Project Highlights} +\newcommand{\project}[3]{\item \textbf{#1} -- \textit{#2}\newline{}#3} +\cvitem{}{\begin{itemize} + \project{Github Bug Bounty}{https://bounty.github.com/researchers/glittershark.html}{ + Discovered and responsibly disclosed a persistent XSS on Github's main + website} + \project{Tvix}{https://cs.tvl.fyi/depot/-/blob/third\_party/nix/README.md}{ + Fork of the Nix build tool delivering increased reliability, code + quality, and pluggability} + \project{Panettone}{https://cs.tvl.fyi/depot/-/tree/web/panettone}{ + Aggressively simple bug-tracker developed in Common Lisp for the community + involved in the development of Tvix. Hosted at https://b.tvl.fyi} + \project{Org-Clubhouse}{https://github.com/glittershark/org-clubhouse}{ + Emacs library for integration between org-mode and the Clubhouse issue + tracker} + \project{core-async-storage}{https://github.com/glittershark/core-async-storage}{ + Simple Clojurescript wrapper around React Native's AsyncStorage using + core.async} +\end{itemize}} + +\end{document} +% vim: set tw=95 colorcolumn=-1: diff --git a/users/glittershark/resume/tweaklist.sty b/users/glittershark/resume/tweaklist.sty new file mode 100644 index 000000000000..adc939893261 --- /dev/null +++ b/users/glittershark/resume/tweaklist.sty @@ -0,0 +1,56 @@ +%% start of file `tweaklist.sty'. +%% Original by Jakob Schiøtz, downloaded from http://dcwww.camd.dtu.dk/~schiotz/comp/LatexTips/tweaklist.sty; not found on ctan. +%% Modified by Xavier Danaux (xdanaux@gmail.com). +% +% The tweaklist.sty package redefines the itemize, enumerate and description packages, so that all parameters can be adjusted. +% This was done by copying the original definitions, and adding "hook commands" that are executed when entering the environment. +% The hook commands are initially empty, but can be redefined with \renewcommand. +% +% This work may be distributed and/or modified under the +% conditions of the LaTeX Project Public License version 1.3c, +% available at http://www.latex-project.org/lppl/. + + +% hooks for the itemize environment +\def\itemhook{} +\def\itemhooki{} +\def\itemhookii{} +\def\itemhookiii{} +\def\itemhookiv{} +% hooks for the enumerate environment +\def\enumhook{} +\def\enumhooki{} +\def\enumhookii{} +\def\enumhookiii{} +\def\enumhookiv{} +% hook for the description environment +\def\deschook{} +% original environment definitions, with hooks added +\def\enumerate{% + \ifnum \@enumdepth >\thr@@\@toodeep\else + \advance\@enumdepth\@ne + \edef\@enumctr{enum\romannumeral\the\@enumdepth}% + \expandafter + \list + \csname label\@enumctr\endcsname + {% + \enumhook \csname enumhook\romannumeral\the\@enumdepth\endcsname% + \usecounter\@enumctr\def\makelabel##1{\hss\llap{##1}}% + }% + \fi} +\def\itemize{% + \ifnum \@itemdepth >\thr@@\@toodeep\else + \advance\@itemdepth\@ne + \edef\@itemitem{labelitem\romannumeral\the\@itemdepth}% + \expandafter + \list + \csname\@itemitem\endcsname + {% + \itemhook \csname itemhook\romannumeral\the\@itemdepth\endcsname% + \def\makelabel##1{\hss\llap{##1}}% + }% + \fi} +\newenvironment{description} + {\list{}{\deschook\labelwidth\z@ \itemindent-\leftmargin + \let\makelabel\descriptionlabel}} + {\endlist} diff --git a/users/glittershark/system/.gitignore b/users/glittershark/system/.gitignore new file mode 100644 index 000000000000..41fbeb02c47d --- /dev/null +++ b/users/glittershark/system/.gitignore @@ -0,0 +1 @@ +**/result diff --git a/users/glittershark/system/home/.skip-subtree b/users/glittershark/system/home/.skip-subtree new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/glittershark/system/home/.skip-subtree diff --git a/users/glittershark/system/home/common/legacy-dotfiles.nix b/users/glittershark/system/home/common/legacy-dotfiles.nix new file mode 100644 index 000000000000..33d9581e6a61 --- /dev/null +++ b/users/glittershark/system/home/common/legacy-dotfiles.nix @@ -0,0 +1,8 @@ +with import <nixpkgs> {}; +fetchgit { + url = "https://github.com/glittershark/dotfiles.git"; + rev = "e0c7f2592fbc2f9942763d2146d362a1314630e9"; + # date = "2020-03-25T20:38:51-04:00"; + sha256 = "126zy4ff6nl2vma2s74waksim7j5h3n6qpaxnnn17vkc1cq0fcd9"; + fetchSubmodules = false; +} diff --git a/users/glittershark/system/home/common/solarized.nix b/users/glittershark/system/home/common/solarized.nix new file mode 100644 index 000000000000..e94693edc566 --- /dev/null +++ b/users/glittershark/system/home/common/solarized.nix @@ -0,0 +1,18 @@ +rec { + base03 = "#002B36"; + base02 = "#073642"; + base01 = "#586e75"; + base00 = "#657b83"; + base0 = "#839496"; + base1 = "#93a1a1"; + base2 = "#eee8d5"; + base3 = "#fdf6e3"; + yellow = "#b58900"; + orange = "#cb4b16"; + red = "#dc322f"; + magenta = "#d33682"; + violet = "#6c71c4"; + blue = "#268bd2"; + cyan = "#2aa198"; + green = "#859900"; +} diff --git a/users/glittershark/system/home/default.nix b/users/glittershark/system/home/default.nix new file mode 100644 index 000000000000..1d6d8795ab97 --- /dev/null +++ b/users/glittershark/system/home/default.nix @@ -0,0 +1,36 @@ +{ pkgs, depot, lib, ... }: + +with lib; + +rec { + nixpkgs = import pkgs.nixpkgsSrc {}; + + home = confPath: (import "${nixpkgs.home-manager.src}/modules" { + pkgs = nixpkgs; + configuration = { config, lib, ... }: { + imports = [confPath]; + + _module.args.pkgs = mkForce + (import pkgs.nixpkgsSrc (filterAttrs (n: v: v != null) config.nixpkgs)); + + lib.depot = depot; + }; + }); + + chupacabra = home ./machines/chupacabra.nix; + + chupacabraHome = chupacabra.activation-script; + + dobharchu = home ./machines/dobharchu.nix; + + dobharchuHome = dobharchu.activation-script; + + yeren = home ./machines/yeren.nix; + + yerenHome = yeren.activation-script; + + meta.targets = [ + "chupacabraHome" + "yerenHome" + ]; +} diff --git a/users/glittershark/system/home/home.nix b/users/glittershark/system/home/home.nix new file mode 100644 index 000000000000..39045c147d76 --- /dev/null +++ b/users/glittershark/system/home/home.nix @@ -0,0 +1,20 @@ +{ config, pkgs, ... }: + +{ + imports = [ + (throw "Pick a machine from ./machines") + ]; + + # Let Home Manager install and manage itself. + programs.home-manager.enable = true; + + # This value determines the Home Manager release that your + # configuration is compatible with. This helps avoid breakage + # when a new Home Manager release introduces backwards + # incompatible changes. + # + # You can update Home Manager without changing this value. See + # the Home Manager release notes for a list of state version + # changes in each release. + home.stateVersion = "19.09"; +} diff --git a/users/glittershark/system/home/machines/chupacabra.nix b/users/glittershark/system/home/machines/chupacabra.nix new file mode 100644 index 000000000000..e9f27daa4408 --- /dev/null +++ b/users/glittershark/system/home/machines/chupacabra.nix @@ -0,0 +1,64 @@ +{ pkgs, lib, config, ... }: + +with lib; + +let + laptopKeyboardId = "25"; +in { + imports = [ + ../platforms/linux.nix + ../modules/common.nix + ../modules/games.nix + ../modules/rtlsdr.nix + ../modules/ptt.nix + ]; + + # for when hacking + programs.home-manager.path = "/home/grfn/code/home-manager"; + programs.home-manager.enable = true; + home.stateVersion = "19.09"; + + system.machine = { + wirelessInterface = "wlp59s0"; + i3FontSize = 9; + }; + + systemd.user.services.laptop-keyboard = { + Unit = { + Description = "Swap caps+escape and alt+super, but only on the built-in laptop keyboard"; + After = [ "graphical-session-pre.target" ]; + PartOf = [ "graphical-session.target" ]; + }; + + Install = { WantedBy = [ "graphical-session.target" ]; }; + + Service = { + Type = "oneshot"; + RemainAfterExit = true; + ExecStart = ( + "${pkgs.xorg.setxkbmap}/bin/setxkbmap " + + "-device ${laptopKeyboardId} " + + "-option caps:swapescape " + + "-option compose:ralt " + + "-option altwin:swap_alt_win" + ); + }; + }; + + home.packages = with pkgs; [ + steam + xorg.libxcb + (writeShellScriptBin "rebuild-mugwump" '' + set -eo pipefail + cd ~/code/depot + nix build -f . users.glittershark.system.system.mugwumpSystem -o /tmp/mugwump + nix copy -f . users.glittershark.system.system.mugwumpSystem \ + --to ssh://mugwump + system=$(readlink -ef /tmp/mugwump) + ssh mugwump sudo nix-env -p /nix/var/nix/profiles/system --set $system + ssh mugwump sudo $system/bin/switch-to-configuration switch + '') + ]; + + xsession.windowManager.i3.config.keybindings.F7 = "exec lock"; +} diff --git a/users/glittershark/system/home/machines/dobharchu.nix b/users/glittershark/system/home/machines/dobharchu.nix new file mode 100644 index 000000000000..0b8503a00e98 --- /dev/null +++ b/users/glittershark/system/home/machines/dobharchu.nix @@ -0,0 +1,17 @@ +{ config, lib, pkgs, ... }: + +{ + imports = [ + ../platforms/darwin.nix + ../modules/common.nix + ../modules/games.nix + ]; + + home.packages = with pkgs; [ + coreutils + gnupg + nix-prefetch-github + pass + pinentry_mac + ]; +} diff --git a/users/glittershark/system/home/machines/roswell.nix b/users/glittershark/system/home/machines/roswell.nix new file mode 100644 index 000000000000..1b1bbf29c616 --- /dev/null +++ b/users/glittershark/system/home/machines/roswell.nix @@ -0,0 +1,57 @@ +{ pkgs, lib, config, ... }: + +let + laptopKeyboardId = "5"; +in + +{ + imports = [ + ../platforms/linux.nix + ../modules/shell.nix + ../modules/development.nix + ../modules/emacs.nix + ../modules/vim.nix + ]; + + home.packages = with pkgs; [ + # System utilities + bat + htop + killall + bind + zip unzip + tree + ncat + bc + pv + + # Security + gnupg + keybase + openssl + + # Nix things + nixfmt + nix-prefetch-github + nix-review + cachix + ]; + + + nixpkgs.config.allowUnfree = true; + + programs.password-store.enable = true; + + programs.home-manager.enable = true; + home.stateVersion = "20.03"; + + xsession.enable = lib.mkForce false; + + services.lorri.enable = true; + + programs.direnv = { + enable = true; + enableBashIntegration = true; + enableZshIntegration = true; + }; +} diff --git a/users/glittershark/system/home/machines/yeren.nix b/users/glittershark/system/home/machines/yeren.nix new file mode 100644 index 000000000000..0e3c20ca3ff0 --- /dev/null +++ b/users/glittershark/system/home/machines/yeren.nix @@ -0,0 +1,75 @@ +{ pkgs, lib, config, ... }: + +let + laptopKeyboardId = "5"; +in + +{ + imports = [ + ../platforms/linux.nix + ../modules/common.nix + ]; + + # for when hacking + programs.home-manager.enable = true; + home.stateVersion = "20.03"; + + system.machine = { + wirelessInterface = "wlp0s20f3"; + i3FontSize = 9; + }; + + home.packages = with pkgs; [ + zoom-us + slack + mysql + graphviz + mypaint + xdot + + (discord.override rec { + version = "0.0.13"; + src = fetchurl { + url = "https://dl.discordapp.net/apps/linux/${version}/discord-${version}.tar.gz"; + sha256 = "0d5z6cbj9dg3hjw84pyg75f8dwdvi2mqxb9ic8dfqzk064ssiv7y"; + }; + }) + + steam + + (awscli2.overridePythonAttrs (oldAttrs: { + postPatch = '' + substituteInPlace setup.py \ + --replace 'colorama>=0.2.5,<0.4.4' 'colorama' \ + --replace 'wcwidth<0.2.0' 'colorama' \ + --replace 'cryptography>=2.8.0,<=2.9.0' 'cryptography' \ + --replace 'docutils>=0.10,<0.16' 'docutils' \ + --replace 'ruamel.yaml>=0.15.0,<0.16.0' 'ruamel.yaml' + ''; + })) + ]; + + systemd.user.services.laptop-keyboard = { + Unit = { + Description = "Swap caps+escape and alt+super, but only on the built-in laptop keyboard"; + After = [ "graphical-session-pre.target" ]; + PartOf = [ "graphical-session.target" ]; + }; + + Install = { WantedBy = [ "graphical-session.target" ]; }; + + Service = { + Type = "oneshot"; + RemainAfterExit = true; + ExecStart = ( + "${pkgs.xorg.setxkbmap}/bin/setxkbmap " + + "-device ${laptopKeyboardId} " + + "-option caps:swapescape " + + "-option compose:ralt " + + "-option altwin:swap_alt_win" + ); + }; + }; + + xsession.windowManager.i3.config.keybindings.F9 = "exec lock"; +} diff --git a/users/glittershark/system/home/modules/alacritty.nix b/users/glittershark/system/home/modules/alacritty.nix new file mode 100644 index 000000000000..67d6638a31f8 --- /dev/null +++ b/users/glittershark/system/home/modules/alacritty.nix @@ -0,0 +1,56 @@ +{ config, lib, pkgs, ... }: + +{ + programs.alacritty = { + enable = true; + settings = { + font.size = 6; + font.normal.family = "Meslo LGSDZ Nerd Font"; + + draw_bold_text_with_bright_colors = false; + + key_bindings = [ + { + key = "Escape"; + mods = "Control"; + action = "ToggleViMode"; + } + ]; + + colors = with import ../common/solarized.nix; rec { + # Default colors + primary = { + background = base3; + foreground = base00; + }; + + cursor = { + text = base3; + cursor = base00; + }; + + # Normal colors + normal = { + inherit red green yellow blue magenta cyan; + black = base02; + white = base2; + }; + + # Bright colors + # bright = normal; + bright = { + black = base03; + red = orange; + green = base01; + yellow = base00; + blue = base0; + magenta = violet; + cyan = base1; + white = base3; + }; + + vi_mode_cursor.cursor = red; + }; + }; + }; +} diff --git a/users/glittershark/system/home/modules/alsi.nix b/users/glittershark/system/home/modules/alsi.nix new file mode 100644 index 000000000000..cf8b0bebe743 --- /dev/null +++ b/users/glittershark/system/home/modules/alsi.nix @@ -0,0 +1,58 @@ +{ config, lib, pkgs, ... }: +{ + home.packages = [ config.lib.depot.third_party.alsi ]; + + xdg.configFile."alsi/alsi.logo" = { + source = ./nixos-logo.txt; + force = true; + }; + + xdg.configFile."alsi/alsi.conf" = { + force = true; + text = '' + #!${pkgs.perl}/bin/perl + + scalar { + ALSI_VERSION => "0.4.8", + COLORS_FILE => "/${config.home.homeDirectory}/.config/alsi/alsi.colors", + DE_FILE => "/${config.home.homeDirectory}/.config/alsi/alsi.de", + DEFAULT_COLOR_BOLD => "blue", + DEFAULT_COLOR_NORMAL => "blue", + DF_COMMAND => "df -Th -x sys -x tmpfs -x devtmpfs &>/dev/stdout", + GTK2_RC_FILE => "/${config.home.homeDirectory}/.gtkrc-2.0", + GTK3_RC_FILE => "/${config.home.homeDirectory}/.config/gtk-3.0/settings.ini", + LOGO_FILE => "/${config.home.homeDirectory}/.config/alsi/alsi.logo", + OUTPUT_FILE => "/${config.home.homeDirectory}/.config/alsi/alsi.output", + # PACKAGES_PATH => "/var/lib/pacman/local/", + PS_COMMAND => "ps -A", + USAGE_COLORS => 0, + USAGE_COLORS_BOLD => 0, + USAGE_PRECENT_GREEN => 50, + USAGE_PRECENT_RED => 100, + USAGE_PRECENT_YELLOW => 85, + USE_LOGO_FROM_FILE => 1, + USE_VALUES_COLOR => 0, + WM_FILE => "/${config.home.homeDirectory}/.config/alsi/alsi.wm", + } + ''; + }; + + xdg.configFile."alsi/alsi.colors".text = '' + #!${pkgs.perl}/bin/perl + + # Colors for alsi + + scalar { + black => {normal => "\e[0;30m", bold => "\e[1;30m"}, + red => {normal => "\e[0;31m", bold => "\e[1;31m"}, + green => {normal => "\e[0;32m", bold => "\e[1;32m"}, + yellow => {normal => "\e[0;33m", bold => "\e[1;33m"}, + default => {normal => "\e[0;34m", bold => "\e[1;34m"}, + blue => {normal => "\e[0;34m", bold => "\e[1;34m"}, + purple => {normal => "\e[0;35m", bold => "\e[1;35m"}, + cyan => {normal => "\e[0;36m", bold => "\e[1;36m"}, + white => {normal => "\e[0;37m", bold => "\e[1;37m"}, + reset => "\e[0m", + } + ''; +} diff --git a/users/glittershark/system/home/modules/common.nix b/users/glittershark/system/home/modules/common.nix new file mode 100644 index 000000000000..0b295a450b61 --- /dev/null +++ b/users/glittershark/system/home/modules/common.nix @@ -0,0 +1,91 @@ +{ config, lib, pkgs, ... }: + +# Everything in here needs to work on linux or darwin + +{ + imports = [ + ../modules/shell.nix + ../modules/development.nix + ../modules/emacs.nix + ../modules/vim.nix + ../modules/tarsnap.nix + ../modules/twitter.nix + ../modules/lib/cloneRepo.nix + ]; + + nixpkgs.config.allowUnfree = true; + + programs.password-store.enable = true; + + grfn.impure.clonedRepos.passwordStore = { + github = "glittershark/pass"; + path = ".local/share/password-store"; + }; + + home.packages = with pkgs; [ + # System utilities + bat + htop + killall + bind + zip unzip + tree + ncat + bc + pv + + # Security + gnupg + keybase + openssl + + # Nix things + nixfmt + nix-prefetch-github + nix-review + cachix + ]; + + programs.ssh = { + enable = true; + + matchBlocks = { + "home" = { + host = "home.gws.fyi"; + forwardAgent = true; + }; + + "dobharchu" = { + host = "dobharchu"; + hostname = "172.16.0.4"; + forwardAgent = true; + user = "griffin"; + }; + + "cerberus" = { + host = "cerberus"; + hostname = "172.16.0.3"; + forwardAgent = true; + user = "griffin"; + }; + + "mugwump" = { + host = "mugwump"; + hostname = "172.16.0.5"; + forwardAgent = true; + }; + + "roswell" = { + host = "roswell"; + hostname = "18.223.118.13"; + forwardAgent = true; + }; + }; + }; + + programs.direnv = { + enable = true; + enableBashIntegration = true; + enableZshIntegration = true; + }; +} diff --git a/users/glittershark/system/home/modules/development.nix b/users/glittershark/system/home/modules/development.nix new file mode 100644 index 000000000000..7fc5de807d6d --- /dev/null +++ b/users/glittershark/system/home/modules/development.nix @@ -0,0 +1,189 @@ +{ config, lib, pkgs, ... }: + +let + + clj2nix = pkgs.callPackage (pkgs.fetchFromGitHub { + owner = "hlolli"; + repo = "clj2nix"; + rev = "3ab3480a25e850b35d1f532a5e4e7b3202232383"; + sha256 = "1lry026mlpxp1j563qs13nhxf37i2zpl7lh0lgfdwc44afybqka6"; + }) {}; + + pg-dump-upsert = pkgs.buildGoModule rec { + pname = "pg-dump-upsert"; + version = "165258deaebded5e9b88f7a0acf3a4b7350e7bf4"; + + src = pkgs.fetchFromGitHub { + owner = "tomyl"; + repo = "pg-dump-upsert"; + rev = version; + sha256 = "1an4h8jjbj3r618ykjwk9brii4h9cxjqy47c4c8rivnvhimgf4wm"; + }; + + vendorSha256 = "1a5fx6mrv30cl46kswicd8lf5i5shn1fykchvbnbhdpgxhbz6qi4"; + }; + +in + +with lib; + +{ + imports = [ + ./lib/zshFunctions.nix + ./development/kube.nix + # TODO(grfn): agda build is broken in the nixpkgs checkout + # ./development/agda.nix + ./development/rust.nix + ]; + + home.packages = with pkgs; [ + jq + yq + gitAndTools.hub + gitAndTools.tig + gitAndTools.gh + shellcheck + httpie + entr + gnumake + inetutils + tokei + jsonnet + ngrok + + gdb + lldb + hyperfine + + clj2nix + clojure + leiningen + clj-kondo + + pg-dump-upsert + + config.lib.depot.third_party.clang-tools + ] ++ optionals (stdenv.isLinux) [ + julia + valgrind + ]; + + programs.git = { + enable = true; + package = pkgs.gitFull; + userEmail = "root@gws.fyi"; + userName = "Griffin Smith"; + ignores = [ + "*.sw*" + ".classpath" + ".project" + ".settings/" + ".dir-locals.el" + ".stack-work-profiling" + ".projectile" + ]; + extraConfig = { + github.user = "glittershark"; + merge.conflictstyle = "diff3"; + rerere.enabled = "true"; + }; + + delta = { + enable = true; + options = { + theme = "Solarized (light)"; + hunk-style = "plain"; + commit-style = "box"; + }; + }; + }; + + home.file.".psqlrc".text = '' + \set QUIET 1 + \timing + \set ON_ERROR_ROLLBACK interactive + \set VERBOSITY verbose + \x auto + \set PROMPT1 '%[%033[1m%]%M/%/%R%[%033[0m%]%# ' + \set PROMPT2 '...%# ' + \set HISTFILE ~/.psql_history- :DBNAME + \set HISTCONTROL ignoredups + \pset null [null] + \unset QUIET + ''; + + programs.readline = { + enable = true; + extraConfig = '' + set editing-mode vi + ''; + }; + + programs.zsh = { + shellAliases = { + # Git + "gwip" = "git add . && git commit -am wip"; + "gpr" = "g pull-request"; + "gcl" = "git clone"; + "grs" = "gr --soft"; + "grhh" = "grh HEAD"; + "grh" = "gr --hard"; + "gr" = "git reset"; + "gcb" = "gc -b"; + "gco" = "gc"; + "gcd" = "gc development"; + "gcm" = "gc master"; + "gcc" = "gc canon"; + "gc" = "git checkout"; + "gbg" = "git branch | grep"; + "gba" = "git branch -a"; + "gb" = "git branch"; + "gcv" = "git commit --verbose"; + "gci" = "git commit"; + "gm" = "git merge"; + "gdc" = "gd --cached"; + "gd" = "git diff"; + "gsl" = "git stash list"; + "gss" = "git show stash"; + "gsad" = "git stash drop"; + "gsa" = "git stash"; + "gst" = "gs"; + "gs" = "git status"; + "gg" = "gl --decorate --oneline --graph --date-order --all"; + "gl" = "git log"; + "gf" = "git fetch"; + "gur" = "gu --rebase"; + "gu" = "git pull"; + "gpf" = "gp -f"; + "gpa" = "gp --all"; + "gpu" = "git push -u origin \"$(git symbolic-ref --short HEAD)\""; + "gp" = "git push"; + "ganw" = "git diff -w --no-color | git apply --cached --ignore-whitespace"; + "ga" = "git add"; + "gnp" = "git --no-pager"; + "g" = "git"; + "git" = "hub"; + "grim" = "git fetch && git rebase -i --autostash origin/master"; + "grom" = "git fetch && git rebase --autostash origin/master"; + "groc" = "git fetch && git rebase --autostash origin/canon"; + "grc" = "git rebase --continue"; + "gcan" = "git commit --amend --no-edit"; + "grl" = "git reflog"; + + # Haskell + "crl" = "cabal repl"; + "cr" = "cabal run"; + "cnb" = "cabal new-build"; + "cob" = "cabal old-build"; + "cnr" = "cabal new-run"; + "cor" = "cabal old-run"; + "ho" = "hoogle"; + }; + + functions = { + gdelmerged = '' + git branch --merged | egrep -v 'master' | tr -d '+ ' | xargs git branch -d + ''; + }; + }; +} diff --git a/users/glittershark/system/home/modules/development/agda.nix b/users/glittershark/system/home/modules/development/agda.nix new file mode 100644 index 000000000000..bed05693aa40 --- /dev/null +++ b/users/glittershark/system/home/modules/development/agda.nix @@ -0,0 +1,58 @@ +{ config, lib, pkgs, ... }: + +let + agda-categories = with pkgs.agdaPackages; mkDerivation rec { + pname = "agda-categories"; + version = "2128fab"; + src = pkgs.fetchFromGitHub { + owner = "agda"; + repo = "agda-categories"; + rev = version; + sha256 = "08mc20qaz9vp5rhi60rh8wvjkg5aby3bgwwdhfnxha1663qf1q24"; + }; + + buildInputs = [ standard-library ]; + }; + +in + +{ + imports = [ + ../lib/cloneRepo.nix + ]; + + home.packages = with pkgs; [ + (pkgs.agda.withPackages + (p: with p; [ + p.standard-library + + ])) + ]; + + grfn.impure.clonedRepos = { + agda-stdlib = { + github = "agda/agda-stdlib"; + path = "code/agda-stdlib"; + }; + + agda-categories = { + github = "agda/agda-categories"; + path = "code/agda-categories"; + }; + + categories-examples = { + github = "agda/categories-examples"; + path = "code/categories-examples"; + }; + }; + + home.file.".agda/defaults".text = '' + standard-library + ''; + + home.file.".agda/libraries".text = '' + ${config.home.homeDirectory}/code/agda-stdlib/standard-library.agda-lib + ${config.home.homeDirectory}/code/agda-categories/agda-categories.agda-lib + ''; + +} diff --git a/users/glittershark/system/home/modules/development/kube.nix b/users/glittershark/system/home/modules/development/kube.nix new file mode 100644 index 000000000000..97ae4760d43b --- /dev/null +++ b/users/glittershark/system/home/modules/development/kube.nix @@ -0,0 +1,34 @@ +{ config, lib, pkgs, ... }: +{ + home.packages = with pkgs; [ + kubectl + kubetail + sops + kubie + # pkgs-unstable.argocd # provided by urbos + ]; + + programs.zsh.shellAliases = { + "kc" = "kubectl"; + "kg" = "kc get"; + "kga" = "kc get --all-namespaces"; + "kpd" = "kubectl get pods"; + "kpa" = "kubectl get pods --all-namespaces"; + "klf" = "kubectl logs -f"; + "kdep" = "kubectl get deployments"; + "ked" = "kubectl edit deployment"; + "kpw" = "kubectl get pods -w"; + "kew" = "kubectl get events -w"; + "kdel" = "kubectl delete"; + "knw" = "kubectl get nodes -w"; + "kev" = "kubectl get events --sort-by='.metadata.creationTimestamp'"; + + "arsy" = "argocd app sync --prune"; + }; + + home.file.".kube/kubie.yaml".text = '' + shell: zsh + prompt: + zsh_use_rps1: true + ''; +} diff --git a/users/glittershark/system/home/modules/development/rust.nix b/users/glittershark/system/home/modules/development/rust.nix new file mode 100644 index 000000000000..4deb83465289 --- /dev/null +++ b/users/glittershark/system/home/modules/development/rust.nix @@ -0,0 +1,25 @@ +{ config, lib, pkgs, ... }: + + +{ + home.packages = with pkgs; [ + rustup + rust-analyzer + cargo-edit + cargo-expand + sccache + ]; + + programs.zsh.shellAliases = { + "cg" = "cargo"; + "cb" = "cargo build"; + "ct" = "cargo test"; + "ctw" = "fd -e rs | entr cargo test"; + "cch" = "cargo check"; + }; + + home.file.".cargo/config".text = '' + [build] + rustc-wrapper = "${pkgs.sccache}/bin/sccache" + ''; +} diff --git a/users/glittershark/system/home/modules/emacs.nix b/users/glittershark/system/home/modules/emacs.nix new file mode 100644 index 000000000000..b7520085f835 --- /dev/null +++ b/users/glittershark/system/home/modules/emacs.nix @@ -0,0 +1,99 @@ +{ pkgs, lib, config, ... }: + +with lib; + +let + # doom-emacs = pkgs.callPackage (builtins.fetchTarball { + # url = https://github.com/vlaci/nix-doom-emacs/archive/master.tar.gz; + # }) { + # doomPrivateDir = ./doom.d; # Directory containing your config.el init.el + # # and packages.el files + # }; + + depot = config.lib.depot; + +in { + imports = [ + ./lib/cloneRepo.nix + ]; + + # home.packages = [ doom-emacs ]; + # home.file.".emacs.d/init.el".text = '' + # (load "default.el") + # ''; + # + + config = mkMerge [ + { + home.packages = with pkgs; [ + # LaTeX (for org export) + (pkgs.texlive.combine { + inherit (pkgs.texlive) + scheme-basic collection-fontsrecommended ulem + fncychap titlesec tabulary varwidth framed fancyvrb float parskip + wrapfig upquote capt-of needspace; + }) + + ispell + + ripgrep + coreutils + fd + clang + gnutls + ]; + + nixpkgs.overlays = [ + (import (builtins.fetchTarball { + url = "https://github.com/nix-community/emacs-overlay/archive/54afb061bdd12c61bbfcc13bad98b7a3aab7d8d3.tar.gz"; + sha256 = "0hrbg65d5h0cb0nky7a46md7vlvhajq1hf0328l2f7ln9hznqz6j"; + })) + ]; + + programs.emacs = { + enable = true; + package = pkgs.emacsUnstable; + extraPackages = (epkgs: + (with depot.tools.emacs-pkgs; [ + dottime + tvl + ]) + ++ (with depot.third_party.emacs; [ + vterm + ]) + ); + }; + + grfn.impure.clonedRepos = { + orgClubhouse = { + github = "glittershark/org-clubhouse"; + path = "code/org-clubhouse"; + }; + + doomEmacs = { + github = "hlissner/doom-emacs"; + path = ".emacs.d"; + after = ["emacs.d"]; + onClone = "bin/doom install"; + }; + + "emacs.d" = { + github = "glittershark/emacs.d"; + path = ".doom.d"; + after = ["orgClubhouse"]; + }; + }; + + programs.zsh.shellAliases = { + "ec" = "emacsclient"; + }; + } + (mkIf pkgs.stdenv.isLinux { + # Notes + services.syncthing = { + enable = true; + tray = true; + }; + }) + ]; +} diff --git a/users/glittershark/system/home/modules/email.nix b/users/glittershark/system/home/modules/email.nix new file mode 100644 index 000000000000..77e068f43fbb --- /dev/null +++ b/users/glittershark/system/home/modules/email.nix @@ -0,0 +1,82 @@ +{ lib, pkgs, ... }: + +with lib; + +let + + # from home-manager/modules/services/lieer.nix + escapeUnitName = name: + let + good = upperChars ++ lowerChars ++ stringToCharacters "0123456789-_"; + subst = c: if any (x: x == c) good then c else "-"; + in stringAsChars subst name; + + accounts = { + personal = { + primary = true; + address = "root@gws.fyi"; + aliases = [ "grfn@gws.fyi" ]; + passEntry = "root-gws-msmtp"; + }; + }; + +in { + programs.lieer.enable = true; + programs.notmuch.enable = true; + services.lieer.enable = true; + programs.msmtp.enable = true; + + home.packages = with pkgs; [ + mu + msmtp + ]; + + # nixpkgs.overlays = [(self: super: { + # notifymuch = self.python3Packages.callPackage ../../pkgs/notifymuch.nix {}; + # })]; + + systemd.user.services = mapAttrs' (name: account: { + name = escapeUnitName "lieer-${name}"; + value.Service.ExecStart = mkForce "${pkgs.writeShellScript "sync-${name}" '' + ${pkgs.gmailieer}/bin/gmi sync + ''}"; + # ${pkgs.notifymuch}/bin/notifymuch + }) accounts; + + # xdg.configFile."notifymuch/notifymuch.cfg".text = generators.toINI {} { + # notifymuch = { + # query = "is:unread and is:important"; + # mail_client = ""; + # recency_interval_hours = "48"; + # hidden_tags = "inbox unread attachment replied sent encrypted signed"; + # }; + # }; + + accounts.email.maildirBasePath = "mail"; + accounts.email.accounts = mapAttrs (_: params@{ passEntry, ... }: { + realName = "Griffin Smith"; + passwordCommand = "pass ${passEntry}"; + + flavor = "gmail.com"; + + imapnotify = { + enable = true; + boxes = [ "Inbox" ]; + }; + + gpg = { + key = "0F11A989879E8BBBFDC1E23644EF5B5E861C09A7"; + signByDefault = true; + }; + + notmuch.enable = true; + lieer = { + enable = true; + sync = { + enable = true; + frequency = "*:*"; + }; + }; + msmtp.enable = true; + } // builtins.removeAttrs params ["passEntry"]) accounts; +} diff --git a/users/glittershark/system/home/modules/firefox.nix b/users/glittershark/system/home/modules/firefox.nix new file mode 100644 index 000000000000..c7e78685a5a3 --- /dev/null +++ b/users/glittershark/system/home/modules/firefox.nix @@ -0,0 +1,22 @@ +{ config, lib, pkgs, ... }: + +{ + + xdg.mimeApps = rec { + enable = true; + defaultApplications = { + "text/html" = [ "firefox.desktop" ]; + "x-scheme-handler/http" = [ "firefox.desktop" ]; + "x-scheme-handler/https" = [ "firefox.desktop" ]; + "x-scheme-handler/ftp" = [ "firefox.desktop" ]; + "x-scheme-handler/chrome" = [ "firefox.desktop" ]; + "application/x-extension-htm" = [ "firefox.desktop" ]; + "application/x-extension-html" = [ "firefox.desktop" ]; + "application/x-extension-shtml" = [ "firefox.desktop" ]; + "application/xhtml+xml" = [ "firefox.desktop" ]; + "application/x-extension-xhtml" = [ "firefox.desktop" ]; + "application/x-extension-xht" = [ "firefox.desktop" ]; + }; + associations.added = defaultApplications; + }; +} diff --git a/users/glittershark/system/home/modules/games.nix b/users/glittershark/system/home/modules/games.nix new file mode 100644 index 000000000000..da54f99e5b39 --- /dev/null +++ b/users/glittershark/system/home/modules/games.nix @@ -0,0 +1,59 @@ +{ config, lib, pkgs, ... }: + +with pkgs; +with lib; + +let + + df-orig = dwarf-fortress-packages.dwarf-fortress-original; + + df-full = (dwarf-fortress-packages.dwarf-fortress-full.override { + theme = null; + enableIntro = false; + enableFPS = true; + }); + + init = runCommand "init.txt" {} '' + substitute "${df-orig}/data/init/init.txt" $out \ + --replace "[INTRO:YES]" "[INTRO:NO]" \ + --replace "[VOLUME:255]" "[VOLUME:0]" \ + --replace "[FPS:NO]" "[FPS:YES]" + ''; + + d_init = runCommand "d_init.txt" {} '' + substitute "${df-orig}/data/init/d_init.txt" $out \ + --replace "[AUTOSAVE:NONE]" "[AUTOSAVE:SEASONAL]" \ + --replace "[AUTOSAVE_PAUSE:NO]" "[AUTOSAVE_PAUSE:YES]" \ + --replace "[INITIAL_SAVE:NO]" "[INITIAL_SAVE:YES]" \ + --replace "[EMBARK_WARNING_ALWAYS:NO]" "[EMBARK_WARNING_ALWAYS:YES]" \ + --replace "[VARIED_GROUND_TILES:YES]" "[VARIED_GROUND_TILES:NO]" \ + --replace "[SHOW_FLOW_AMOUNTS:NO]" "[SHOW_FLOW_AMOUNTS:YES]" + ''; + + df = runCommand "dwarf-fortress" {} '' + mkdir -p $out/bin + sed \ + -e '4icp -f ${init} "$DF_DIR/data/init/init.txt"' \ + -e '4icp -f ${d_init} "$DF_DIR/data/init/d_init.txt"' \ + < "${df-full}/bin/dwarf-fortress" >"$out/bin/dwarf-fortress" + + shopt -s extglob + ln -s ${df-full}/bin/!(dwarf-fortress) $out/bin + + chmod +x $out/bin/dwarf-fortress + ''; + +in mkMerge [ + { + home.packages = [ + crawl + xonotic + ]; + } + (mkIf stdenv.isLinux { + home.packages = [ + df + multimc + ]; + }) +] diff --git a/users/glittershark/system/home/modules/i3.nix b/users/glittershark/system/home/modules/i3.nix new file mode 100644 index 000000000000..cf1fa325dd4c --- /dev/null +++ b/users/glittershark/system/home/modules/i3.nix @@ -0,0 +1,364 @@ +{ config, lib, pkgs, ... }: +let + mod = "Mod4"; + solarized = import ../common/solarized.nix; + # TODO pull this out into lib + emacsclient = eval: pkgs.writeShellScript "emacsclient-eval" '' + msg=$(emacsclient --eval '${eval}' 2>&1) + echo "''${msg:1:-1}" + ''; + screenlayout = { + home = pkgs.writeShellScript "screenlayout_home.sh" '' + xrandr \ + --output eDP-1 --mode 1920x1200 --pos 0x2160 --rotate normal \ + --output DP-1 --off \ + --output DP-2 --off \ + --output DP-3 --mode 3840x2160 --pos 0x0 --rotate normal \ + --output DP-4 --off + ''; + }; + + inherit (builtins) map; + inherit (lib) mkMerge range; +in { + options = with lib; { + system.machine.wirelessInterface = mkOption { + description = '' + Name of the primary wireless interface. Used by i3status, etc. + ''; + default = "wlp3s0"; + type = types.str; + }; + + system.machine.i3FontSize = mkOption { + description = "Font size to use in i3 window decorations etc."; + default = 6; + type = types.int; + }; + }; + + config = + let decorationFont = "MesloLGSDZ ${toString config.system.machine.i3FontSize}"; in + { + home.packages = with pkgs; [ + rofi + rofi-pass + python38Packages.py3status + i3lock + i3status + dconf # for gtk + + # Screenshots + maim + + # GIFs + picom + peek + + (pkgs.writeShellScriptBin "lock" '' + playerctl pause + ${pkgs.i3lock}/bin/i3lock -c 222222 + '') + ]; + + xsession.scriptPath = ".xsession"; + + xsession.windowManager.i3 = { + enable = true; + config = { + modifier = mod; + keybindings = + mkMerge ( + (map + (n: { + "${mod}+${toString n}" = + "workspace ${toString n}"; + "${mod}+Shift+${toString n}" = + "move container to workspace ${toString n}"; + }) + (range 0 9)) + ++ [(rec { + "${mod}+h" = "focus left"; + "${mod}+j" = "focus down"; + "${mod}+k" = "focus up"; + "${mod}+l" = "focus right"; + "${mod}+semicolon" = "focus parent"; + + "${mod}+Shift+h" = "move left"; + "${mod}+Shift+j" = "move down"; + "${mod}+Shift+k" = "move up"; + "${mod}+Shift+l" = "move right"; + + "${mod}+Shift+x" = "kill"; + + "${mod}+Return" = "exec alacritty"; + + "${mod}+Shift+s" = "split h"; + "${mod}+Shift+v" = "split v"; + "${mod}+e" = "layout toggle split"; + "${mod}+w" = "layout tabbed"; + "${mod}+s" = "layout stacking"; + + "${mod}+f" = "fullscreen"; + + "${mod}+Shift+r" = "restart"; + + "${mod}+r" = "mode resize"; + + # Marks + "${mod}+Shift+m" = ''exec i3-input -F "mark %s" -l 1 -P 'Mark: ' ''; + "${mod}+m" = ''exec i3-input -F '[con_mark="%s"] focus' -l 1 -P 'Go to: ' ''; + + # Screenshots + "${mod}+q" = "exec \"maim | xclip -selection clipboard -t image/png\""; + "${mod}+Shift+q" = "exec \"maim -s | xclip -selection clipboard -t image/png\""; + "${mod}+Ctrl+q" = "exec ${pkgs.writeShellScript "peek.sh" '' + ${pkgs.picom}/bin/picom & + picom_pid=$! + ${pkgs.peek}/bin/peek || true + kill -SIGINT $picom_pid + ''}"; + + # Launching applications + "${mod}+u" = "exec ${pkgs.writeShellScript "rofi" '' + rofi \ + -modi 'combi' \ + -combi-modi "window,drun,ssh,run" \ + -font '${decorationFont}' \ + -show combi + ''}"; + + # Passwords + "${mod}+p" = "exec rofi-pass -font '${decorationFont}'"; + + # Media + "XF86AudioPlay" = "exec playerctl play-pause"; + "XF86AudioNext" = "exec playerctl next"; + "XF86AudioPrev" = "exec playerctl previous"; + "XF86AudioRaiseVolume" = "exec pulseaudio-ctl up"; + "XF86AudioLowerVolume" = "exec pulseaudio-ctl down"; + "XF86AudioMute" = "exec pulseaudio-ctl mute"; + + # Lock + Pause = "exec lock"; + + # Sleep/hibernate + # "${mod}+Escape" = "exec systemctl suspend"; + # "${mod}+Shift+Escape" = "exec systemctl hibernate"; + + # Scratch buffer + "${mod}+minus" = "scratchpad show"; + "${mod}+Shift+minus" = "move scratchpad"; + "${mod}+space" = "focus mode_toggle"; + "${mod}+Shift+space" = "floating toggle"; + + # Screen Layout + "${mod}+Shift+t" = "exec xrandr --auto"; + "${mod}+t" = "exec ${screenlayout.home}"; + "${mod}+Ctrl+t" = "exec ${pkgs.writeShellScript "fix_term.sh" '' + xrandr --output eDP-1 --off && ${screenlayout.home} + ''}"; + + # Notifications + "${mod}+Shift+n" = "exec killall -SIGUSR1 .dunst-wrapped"; + "${mod}+n" = "exec killall -SIGUSR2 .dunst-wrapped"; + })]); + + fonts = [ decorationFont ]; + + colors = with solarized; rec { + focused = { + border = base01; + background = base01; + text = base3; + indicator = red; + childBorder = base02; + }; + focusedInactive = focused // { + border = base03; + background = base03; + # text = base1; + }; + unfocused = focusedInactive; + background = base03; + }; + + modes.resize = { + l = "resize shrink width 5 px or 5 ppt"; + k = "resize grow height 5 px or 5 ppt"; + j = "resize shrink height 5 px or 5 ppt"; + h = "resize grow width 5 px or 5 ppt"; + + Return = "mode \"default\""; + }; + + bars = [{ + statusCommand = + let i3status-conf = pkgs.writeText "i3status.conf" '' + general { + output_format = i3bar + colors = true + color_good = "#859900" + + interval = 1 + } + + order += "external_script current_task" + order += "external_script inbox" + order += "spotify" + order += "volume_status" + order += "wireless ${config.system.machine.wirelessInterface}" + # order += "ethernet enp3s0f0" + order += "cpu_usage" + order += "battery 0" + # order += "volume master" + order += "time" + order += "tztime utc" + + mpd { + format = "%artist - %album - %title" + } + + wireless ${config.system.machine.wirelessInterface} { + format_up = "W: (%quality - %essid - %bitrate) %ip" + format_down = "W: -" + } + + ethernet enp3s0f0 { + format_up = "E: %ip" + format_down = "E: -" + } + + battery 0 { + format = "%status %percentage" + path = "/sys/class/power_supply/BAT%d/uevent" + low_threshold = 10 + } + + cpu_usage { + format = "CPU: %usage" + } + + load { + format = "%5min" + } + + time { + format = " %a %h %d ⌚ %I:%M " + } + + spotify { + color_playing = "#fdf6e3" + color_paused = "#93a1a1" + format_stopped = "" + format_down = "" + format = "{title} - {artist} ({album})" + } + + external_script inbox { + script_path = '${emacsclient "(grfn/num-inbox-items-message)"}' + format = 'Inbox: {output}' + cache_timeout = 120 + color = "#93a1a1" + } + + external_script current_task { + script_path = '${emacsclient "(grfn/org-current-clocked-in-task-message)"}' + # format = '{output}' + cache_timeout = 60 + color = "#93a1a1" + } + + tztime utc { + timezone = "UTC" + format = " %H·%M " + } + + volume_status { + format = "☊ {percentage}" + format_muted = "☊ X" + # device = "default" + # mixer_idx = 0 + } + ''; + in "py3status -c ${i3status-conf}"; + fonts = [ decorationFont ]; + position = "top"; + colors = with solarized; rec { + background = base03; + statusline = base3; + separator = base1; + activeWorkspace = { + border = base03; + background = base1; + text = base3; + }; + focusedWorkspace = activeWorkspace; + inactiveWorkspace = activeWorkspace // { + background = base01; + }; + urgentWorkspace = activeWorkspace // { + background = red; + }; + }; + }]; + }; + }; + + services.dunst = { + enable = true; + settings = with solarized; { + global = { + font = "MesloLGSDZ ${toString (config.system.machine.i3FontSize * 1.5)}"; + allow_markup = true; + format = "<b>%s</b>\n%b"; + sort = true; + alignment = "left"; + geometry = "600x15-40+40"; + idle_threshold = 120; + separator_color = "frame"; + separator_height = 1; + word_wrap = true; + padding = 8; + horizontal_padding = 8; + }; + + frame = { + width = 0; + color = "#aaaaaa"; + }; + + shortcuts = { + close = "ctrl+space"; + close_all = "ctrl+shift+space"; + history = "ctrl+grave"; + context = "ctrl+shift+period"; + }; + + urgency_low = { + background = base03; + foreground = base3; + timeout = 5; + }; + + urgency_normal = { + background = base02; + foreground = base3; + timeout = 7; + }; + + urgency_critical = { + background = red; + foreground = base3; + timeout = 0; + }; + }; + }; + + gtk = { + enable = true; + iconTheme.name = "Adwaita"; + theme.name = "Adwaita"; + }; + }; +} diff --git a/users/glittershark/system/home/modules/lib/cloneRepo.nix b/users/glittershark/system/home/modules/lib/cloneRepo.nix new file mode 100644 index 000000000000..dc487dc6bd05 --- /dev/null +++ b/users/glittershark/system/home/modules/lib/cloneRepo.nix @@ -0,0 +1,67 @@ +{ lib, config, ... }: +with lib; +{ + options = { + grfn.impure.clonedRepos = mkOption { + description = "Repositories to clone"; + default = {}; + type = with types; loaOf ( + let sm = submodule { + options = { + url = mkOption { + type = nullOr str; + description = "URL of repository to clone"; + default = null; + }; + + github = mkOption { + type = nullOr str; + description = "Github owner/repo of repository to clone"; + default = null; + }; + + path = mkOption { + type = str; + description = "Path to clone to"; + }; + + onClone = mkOption { + type = str; + description = '' + Shell command to run after cloning the repo for the first time. + Runs inside the repo itself. + ''; + default = ""; + }; + + after = mkOption { + type = listOf str; + description = "Activation hooks that this repository must be cloned after"; + default = []; + }; + }; + }; + in addCheck sm (cr: (! isNull cr.url || ! isNull cr.github)) + ); + }; + }; + + config = { + home.activation = + mapAttrs + (_: { + url, path, github, onClone, after, ... + }: + let repoURL = if isNull url then "git@github.com:${github}" else url; + in hm.dag.entryAfter (["writeBoundary"] ++ after) '' + $DRY_RUN_CMD mkdir -p $(dirname "${path}") + if [[ ! -d ${path} ]]; then + $DRY_RUN_CMD git clone "${repoURL}" "${path}" + pushd ${path} + $DRY_RUN_CMD ${onClone} + popd + fi + '') + config.grfn.impure.clonedRepos; + }; +} diff --git a/users/glittershark/system/home/modules/lib/zshFunctions.nix b/users/glittershark/system/home/modules/lib/zshFunctions.nix new file mode 100644 index 000000000000..7c39b3478cfd --- /dev/null +++ b/users/glittershark/system/home/modules/lib/zshFunctions.nix @@ -0,0 +1,21 @@ +{ config, lib, pkgs, ... }: + +with lib; + +{ + options = { + programs.zsh.functions = mkOption { + description = "An attribute set that maps function names to their source"; + default = {}; + type = with types; attrsOf (either str path); + }; + }; + + config.programs.zsh.initExtra = concatStringsSep "\n" ( + mapAttrsToList (name: funSrc: '' + function ${name}() { + ${funSrc} + } + '') config.programs.zsh.functions + ); +} diff --git a/users/glittershark/system/home/modules/nixos-logo.txt b/users/glittershark/system/home/modules/nixos-logo.txt new file mode 100644 index 000000000000..d4b16b44f0bf --- /dev/null +++ b/users/glittershark/system/home/modules/nixos-logo.txt @@ -0,0 +1,26 @@ + [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m/ + [38;5;m,[38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m/[38;5;m( [38;5;m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;m# [38;5;m.[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;m# + [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m/[38;5;061m/[38;5;m/ [38;5;m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;m. [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m/ + [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m/[38;5;067m([38;5;061m/[38;5;061m/[38;5;m, [38;5;m/[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# + [38;5;m([38;5;067m([38;5;067m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/ [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m( + [38;5;m.[38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m#[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m([38;5;m( + [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m.[38;5;m([38;5;067m([38;5;067m([38;5;m( + [38;5;m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;061m/[38;5;061m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;m( + [38;5;m([38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;074m#[38;5;m# [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( + [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m/[38;5;m([38;5;067m([38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;068m([38;5;m( + [38;5;m*[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m.[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m#[38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( + [38;5;m#[38;5;m#[38;5;m#[38;5;m%[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m([38;5;061m/[38;5;061m/[38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m([38;5;m( +[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m.[38;5;m#[38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( +[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m([38;5;m( [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( + [38;5;m#[38;5;m#[38;5;m#[38;5;m%[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m#[38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m. [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m/[38;5;m([38;5;m([38;5;m([38;5;m( + [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m( [38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/ + [38;5;m.[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/ [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m, + [38;5;m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;m* [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m( [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m/ + [38;5;m.[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;m# [38;5;m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m([38;5;m#[38;5;074m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m, + [38;5;m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m# [38;5;m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m% + [38;5;m.[38;5;m# [38;5;m([38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m/[38;5;061m([38;5;061m/[38;5;061m/[38;5;067m([38;5;m([38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;m% + [38;5;m([38;5;061m/[38;5;061m/[38;5;061m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m( [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m%[38;5;110m%[38;5;110m%[38;5;m( + [38;5;m/[38;5;061m/[38;5;067m([38;5;061m/[38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;m# + [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( [38;5;m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;067m([38;5;067m([38;5;068m( [38;5;m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m/ + [38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;m( [38;5;m/[38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;067m([38;5;068m([38;5;067m([38;5;m( [38;5;m.[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m#[38;5;110m#[38;5;110m#[38;5;m% + [38;5;m([38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;m( [38;5;m([38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;068m([38;5;067m([38;5;068m([38;5;m( [38;5;m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;110m%[38;5;110m#[38;5;m/ diff --git a/users/glittershark/system/home/modules/obs.nix b/users/glittershark/system/home/modules/obs.nix new file mode 100644 index 000000000000..39f7bbe3c6fc --- /dev/null +++ b/users/glittershark/system/home/modules/obs.nix @@ -0,0 +1,69 @@ +{ config, lib, pkgs, ... }: + +with pkgs; + +let + libuiohook = stdenv.mkDerivation rec { + pname = "libuiohook"; + version = "1.1"; + src = fetchFromGitHub { + owner = "kwhat"; + repo = "libuiohook"; + rev = version; + sha256 = "1isfxn3cfrdqq22d3mlz2lzm4asf9gprs7ww2xy9c3j3srk9kd7r"; + }; + + preConfigure = '' + ./bootstrap.sh + ''; + + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ + libtool autoconf automake + x11 + xorg.libXtst + xorg.libXinerama + xorg.libxkbfile + libxkbcommon + ]; + }; + + obs-input-overlay = stdenv.mkDerivation rec { + pname = "obs-input-overlay"; + version = "4.8"; + src = fetchFromGitHub { + owner = "univrsal"; + repo = "input-overlay"; + rev = "v${version}"; + sha256 = "1dklg0dx9ijwyhgwcaqz859rbpaivmqxqvh9w3h4byrh5pnkz8bf"; + fetchSubmodules = true; + }; + + nativeBuildInputs = [ cmake ]; + buildInputs = [ obs-studio libuiohook ]; + + postPatch = '' + sed -i CMakeLists.txt \ + -e '2iinclude(${obs-studio.src}/cmake/Modules/ObsHelpers.cmake)' \ + -e '2ifind_package(LibObs REQUIRED)' + ''; + + cmakeFlags = [ + "-Wno-dev" + ]; + }; +in +{ + home.packages = [ + obs-studio + obs-input-overlay + ]; + + xdg.configFile."obs-studio/plugins/input-overlay/bin/64bit/input-overlay.so".source = + "${obs-input-overlay}/lib/obs-plugins/input-overlay.so"; + xdg.configFile."obs-studio/plugins/input-overlay/data".source = + "${obs-input-overlay}/share/obs/obs-plugins/input-overlay"; + + xdg.configFile."obs-studio/plugins/obs-v4l2sink/bin/64bit/obs-v4l2sink.so".source = + "${pkgs.obs-v4l2sink}/share/obs/obs-plugins/v4l2sink/bin/64bit/v4l2sink.so"; +} diff --git a/users/glittershark/system/home/modules/ptt.nix b/users/glittershark/system/home/modules/ptt.nix new file mode 100644 index 000000000000..436c8f261797 --- /dev/null +++ b/users/glittershark/system/home/modules/ptt.nix @@ -0,0 +1,44 @@ +{ config, lib, pkgs, ... }: + +let + + pttKeycode = "152"; + sourceID = "3"; + + mute = pkgs.writeShellScript "mute-mic" '' + xset -r ${pttKeycode} + ${pkgs.pulseaudio}/bin/pactl set-source-mute ${sourceID} 1 + ''; + + unmute = pkgs.writeShellScript "unmute-mic" '' + xset -r ${pttKeycode} + ${pkgs.pulseaudio}/bin/pactl set-source-mute ${sourceID} 0 + ''; + +in + +{ + home.packages = with pkgs; [ + xbindkeys + ]; + + + home.file.".xbindkeysrc.scm".text = '' + (xbindkey '("c:${pttKeycode}") "${unmute}") + (xbindkey '(release "c:${pttKeycode}") "${mute}") + ''; + + systemd.user.services."xbindkeys" = { + Unit = { + Description = "Keybind daemon for push-to-talk"; + After = [ "graphical-session-pre.target" ]; + PartOf = [ "graphical-session.target" ]; + }; + + Install = { WantedBy = [ "graphical-session.target" ]; }; + + Service = { + ExecStart = "${pkgs.xbindkeys}/bin/xbindkeys -n -v"; + }; + }; +} diff --git a/users/glittershark/system/home/modules/pure.zsh-theme b/users/glittershark/system/home/modules/pure.zsh-theme new file mode 100755 index 000000000000..b4776e81596d --- /dev/null +++ b/users/glittershark/system/home/modules/pure.zsh-theme @@ -0,0 +1,151 @@ +#!/bin/zsh -f +# vim: ft=zsh: +# MIT License +# For my own and others sanity +# git: +# %b => current branch +# %a => current action (rebase/merge) +# prompt: +# %F => color dict +# %f => reset color +# %~ => current path +# %* => time +# %n => username +# %m => shortname host +# %(?..) => prompt conditional - %(condition.true.false) + +# turns seconds into human readable time +# 165392 => 1d 21h 56m 32s +prompt_pure_human_time() { + local tmp=$1 + local days=$(( tmp / 60 / 60 / 24 )) + local hours=$(( tmp / 60 / 60 % 24 )) + local minutes=$(( tmp / 60 % 60 )) + local seconds=$(( tmp % 60 )) + (( $days > 0 )) && echo -n "${days}d " + (( $hours > 0 )) && echo -n "${hours}h " + (( $minutes > 0 )) && echo -n "${minutes}m " + echo "${seconds}s" +} + +is_git_repo() { + command git rev-parse --is-inside-work-tree &>/dev/null + return $? +} + +# fastest possible way to check if repo is dirty +prompt_pure_git_dirty() { + # check if we're in a git repo + is_git_repo || return + # check if it's dirty + [[ "$PURE_GIT_UNTRACKED_DIRTY" == 0 ]] && local umode="-uno" || local umode="-unormal" + command test -n "$(git status --porcelain --ignore-submodules ${umode})" + + (($? == 0)) && echo '*' +} + +prompt_pure_git_wip() { + is_git_repo || return + local subject="$(command git show --pretty=%s --quiet HEAD 2>/dev/null)" + [ "$subject" == 'wip' ] && echo '[WIP]' +} + +# displays the exec time of the last command if set threshold was exceeded +prompt_pure_cmd_exec_time() { + local stop=$EPOCHSECONDS + local start=${cmd_timestamp:-$stop} + integer elapsed=$stop-$start + (($elapsed > ${PURE_CMD_MAX_EXEC_TIME:=5})) && prompt_pure_human_time $elapsed +} + +prompt_pure_preexec() { + cmd_timestamp=$EPOCHSECONDS + + # shows the current dir and executed command in the title when a process is active + print -Pn "\e]0;" + echo -nE "$PWD:t: $2" + print -Pn "\a" +} + +# string length ignoring ansi escapes +prompt_pure_string_length() { + echo ${#${(S%%)1//(\%([KF1]|)\{*\}|\%[Bbkf])}} +} + +prompt_pure_nix_info() { + local packages_info='' + if [[ -z $NIX_SHELL_PACKAGES ]]; then + packages_info='[nix-shell]' + else + packages_info="{ $NIX_SHELL_PACKAGES }" + fi + + case $IN_NIX_SHELL in + 'pure') + echo "$fg_bold[green][nix-shell] " + ;; + 'impure') + echo "$fg_bold[magenta][nix-shell] " + ;; + *) ;; + esac +} + +prompt_pure_precmd() { + # shows the full path in the title + print -Pn '\e]0;%~\a' + + # git info + vcs_info + + local prompt_pure_preprompt="\n$(prompt_pure_nix_info)$fg_bold[green]$prompt_pure_username%F{blue}%~%F{yellow}$vcs_info_msg_0_`prompt_pure_git_dirty` $fg_no_bold[red]`prompt_pure_git_wip`%f %F{yellow}`prompt_pure_cmd_exec_time`%f " + print -P $prompt_pure_preprompt + + # check async if there is anything to pull + # (( ${PURE_GIT_PULL:-1} )) && { + # # check if we're in a git repo + # command git rev-parse --is-inside-work-tree &>/dev/null && + # # make sure working tree is not $HOME + # [[ "$(command git rev-parse --show-toplevel)" != "$HOME" ]] && + # # check check if there is anything to pull + # command git fetch &>/dev/null && + # # check if there is an upstream configured for this branch + # command git rev-parse --abbrev-ref @'{u}' &>/dev/null && { + # local arrows='' + # (( $(command git rev-list --right-only --count HEAD...@'{u}' 2>/dev/null) > 0 )) && arrows='⇣' + # (( $(command git rev-list --left-only --count HEAD...@'{u}' 2>/dev/null) > 0 )) && arrows+='⇡' + # print -Pn "\e7\e[A\e[1G\e[`prompt_pure_string_length $prompt_pure_preprompt`C%F{cyan}${arrows}%f\e8" + # } + # } &! + + # reset value since `preexec` isn't always triggered + unset cmd_timestamp +} + + +prompt_pure_setup() { + # prevent percentage showing up + # if output doesn't end with a newline + export PROMPT_EOL_MARK='' + + prompt_opts=(cr subst percent) + + zmodload zsh/datetime + autoload -Uz add-zsh-hook + autoload -Uz vcs_info + + add-zsh-hook precmd prompt_pure_precmd + add-zsh-hook preexec prompt_pure_preexec + + zstyle ':vcs_info:*' enable git + zstyle ':vcs_info:git*' formats ' %b' + zstyle ':vcs_info:git*' actionformats ' %b|%a' + + # show username@host if logged in through SSH + [[ "$SSH_CONNECTION" != '' ]] && prompt_pure_username='%n@%m ' + + # prompt turns red if the previous command didn't exit with 0 + PROMPT='%(?.%F{green}.%F{red})❯%f ' +} + +prompt_pure_setup "$@" diff --git a/users/glittershark/system/home/modules/rtlsdr.nix b/users/glittershark/system/home/modules/rtlsdr.nix new file mode 100644 index 000000000000..a1c717617a62 --- /dev/null +++ b/users/glittershark/system/home/modules/rtlsdr.nix @@ -0,0 +1,21 @@ +{ config, lib, pkgs, ... }: + +let + + nixpkgs-gnuradio = import (pkgs.fetchFromGitHub { + owner = "doronbehar"; + repo = "nixpkgs"; + rev = "712561aa5f10bfe6112a1726a912585612a70d1f"; + sha256 = "04yqflbwjcfl9vlplphpj82csqqz9k6m3nj1ybhwgmsc4by7vivl"; + }) {}; + +in + +{ + home.packages = with pkgs; [ + rtl-sdr + nixpkgs-gnuradio.gnuradio + nixpkgs-gnuradio.gnuradio.plugins.osmosdr + nixpkgs-gnuradio.gqrx + ]; +} diff --git a/users/glittershark/system/home/modules/shell.nix b/users/glittershark/system/home/modules/shell.nix new file mode 100644 index 000000000000..09d2c6356172 --- /dev/null +++ b/users/glittershark/system/home/modules/shell.nix @@ -0,0 +1,181 @@ +{ config, lib, pkgs, ... }: +let + shellAliases = rec { + # NixOS stuff + hms = "home-manager switch"; + nor = "sudo nixos-rebuild switch"; + nrs = nor; + nrb = "sudo nixos-rebuild boot"; + ncg = "nix-collect-garbage"; + vihome = "vim ~/.config/nixpkgs/home.nix && home-manager switch"; + virc = "vim ~/code/system/home/modules/shell.nix && home-manager switch && source ~/.zshrc"; + visystem = "sudo vim /etc/nixos/configuration.nix && sudo nixos-rebuild switch"; + + # Nix + ns = "nix-shell"; + nb = "nix build -f ."; + nbl = "nix build -f . --builders ''"; # nix build local + lwo = "lorri watch --once"; + + # Docker and friends + "dcu" = "docker-compose up"; + "dcud" = "docker-compose up -d"; + "dc" = "docker-compose"; + "dcr" = "docker-compose restart"; + "dclf" = "docker-compose logs -f"; + "dck" = "docker"; + "dockerclean" = "dockercleancontainers && dockercleanimages"; + "dockercleanimages" = "docker images -a --no-trunc | grep none | awk '{print \$$3}' | xargs -L 1 -r docker rmi"; + "dockercleancontainers" = "docker ps -a --no-trunc| grep 'Exit' | awk '{print \$$1}' | xargs -L 1 -r docker rm"; + + # Directories + stck = "dirs -v"; + b= "cd ~1"; + ".." = "cd .."; + "..." = "cd ../.."; + "...." = "cd ../../.."; + "....." = "cd ../../../.."; + + # Aliases from old config + "http" = "http --style solarized"; + "grep" = "grep $GREP_OPTIONS"; + "bak" = "~/bin/backup.sh"; + "xmm" = "xmodmap ~/.Xmodmap"; + "asdflkj" = "asdf"; + "asdf" = "asdfghjkl"; + "asdfghjkl" = "echo \"Having some trouble?\""; + "ift" = "sudo iftop -i wlp3s0"; + "first" = "awk '{print \$$1}'"; + "cmt" = "git log --oneline | fzf-tmux | awk '{print \$$1}'"; + "workmon" = "xrandr --output DP-2 --pos 1440x900 --primary"; + "vi" = "vim"; + "adbdev" = "adb devices"; + "adbcon" = "adb connect $GNEX_IP"; + "mpalb" = "mpc search album"; + "mpart" = "mpc search artist"; + "mps" = "mpc search"; + "mpa" = "mpc add"; + "mpt" = "mpc toggle"; + "mpl" = "mpc playlist"; + "dsstore" = "find . -name '*.DS_Store' -type f -ls -delete"; + "df" = "df -h"; + "fs" = "stat -f '%z bytes'"; + "ll" = "ls -al"; + "la" = "ls -a"; + }; +in { + home.packages = with pkgs; [ + zsh + autojump + ]; + + home.sessionVariables = { + EDITOR = "vim"; + LS_COLORS = "no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.avi=01;35:*.fli=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.ogg=01;35:*.mp3=01;35:*.wav=01;35:"; + BROWSER = "firefox"; + BAT_THEME = "ansi-light"; + }; + + programs.bash = { + enable = true; + inherit shellAliases; + }; + + programs.zsh = { + enable = true; + enableAutosuggestions = true; + autocd = true; + + inherit shellAliases; + + history = rec { + save = 100000; + size = save; + }; + + oh-my-zsh = { + enable = true; + + plugins = [ + "battery" + "colorize" + "command-not-found" + "github" + "gitignore" + "postgres" + "systemd" + "themes" + "vi-mode" + ]; + + custom = "${pkgs.stdenv.mkDerivation { + name = "oh-my-zsh-custom"; + unpackPhase = ":"; + installPhase = '' + mkdir -p $out/themes + mkdir -p $out/custom/plugins + ln -s ${./pure.zsh-theme} $out/themes/pure.zsh-theme + ''; + }}"; + + theme = "pure"; + }; + + plugins = [{ + name = "pure-theme"; + src = pkgs.fetchFromGitHub { + owner = "sindresorhus"; + repo = "pure"; + rev = "0a92b02dd4172f6c64fdc9b81fe6cd4bddb0a23b"; + sha256 = "0l8jqhmmjn7p32hdjnv121xsjnqd2c0plhzgydv2yzrmqgyvx7cc"; + }; + }]; + + initExtraBeforeCompInit = '' + zstyle ':completion:*' completer _complete _ignored _correct _approximate + zstyle ':completion:*' matcher-list \'\' 'm:{[:lower:]}={[:upper:]} m:{[:lower:][:upper:]}={[:upper:][:lower:]} r:|[._- :]=** r:|=**' 'l:|=* r:|=*' + zstyle ':completion:*' max-errors 5 + zstyle ':completion:*' use-cache yes + zstyle ':completion::complete:grunt::options:' expire 1 + zstyle ':completion:*' prompt '%e errors' + # zstyle :compinstall filename '~/.zshrc' + autoload -Uz compinit + ''; + + initExtra = '' + source ${./zshrc} + source ${pkgs.fetchFromGitHub { + owner = "zsh-users"; + repo = "zsh-syntax-highlighting"; + rev = "7678a8a22780141617f809002eeccf054bf8f448"; + sha256 = "0xh4fbd54kvwwpqvabk8lpw7m80phxdzrd75q3y874jw0xx1a9q6"; + }}/zsh-syntax-highlighting.zsh + source ${pkgs.autojump}/share/autojump/autojump.zsh + source ${pkgs.fetchFromGitHub { + owner = "chisui"; + repo = "zsh-nix-shell"; + rev = "a65382a353eaee5a98f068c330947c032a1263bb"; + sha256 = "0l41ac5b7p8yyjvpfp438kw7zl9dblrpd7icjg1v3ig3xy87zv0n"; + }}/nix-shell.plugin.zsh + + export RPS1="" + autoload -U promptinit; promptinit + prompt pure + + if [[ "$TERM" == "dumb" ]]; then + unsetopt zle + unsetopt prompt_cr + unsetopt prompt_subst + unfunction precmd + unfunction preexec + export PS1='$ ' + fi + ''; + }; + + programs.fzf = { + enable = true; + enableBashIntegration = true; + enableZshIntegration = true; + }; +} diff --git a/users/glittershark/system/home/modules/tarsnap.nix b/users/glittershark/system/home/modules/tarsnap.nix new file mode 100644 index 000000000000..4bff19910f05 --- /dev/null +++ b/users/glittershark/system/home/modules/tarsnap.nix @@ -0,0 +1,64 @@ +{ config, lib, pkgs, ... }: + +{ + home.packages = with pkgs; [ + tarsnap + ]; + + home.file.".tarsnaprc".text = '' + ### Recommended options + + # Tarsnap cache directory + cachedir /home/grfn/.cache/tarsnap + + # Tarsnap key file + keyfile /home/grfn/.private/tarsnap.key + + # Don't archive files which have the nodump flag set. + nodump + + # Print statistics when creating or deleting archives. + print-stats + + # Create a checkpoint once per GB of uploaded data. + checkpoint-bytes 1G + + ### Commonly useful options + + # Use SI prefixes to make numbers printed by --print-stats more readable. + humanize-numbers + + ### Other options, not applicable to most systems + + # Aggressive network behaviour: Use multiple TCP connections when + # writing archives. Use of this option is recommended only in + # cases where TCP congestion control is known to be the limiting + # factor in upload performance. + #aggressive-networking + + # Exclude files and directories matching specified patterns. + # Only one file or directory per command; multiple "exclude" + # commands may be given. + #exclude + + # Include only files and directories matching specified patterns. + # Only one file or directory per command; multiple "include" + # commands may be given. + #include + + # Attempt to reduce tarsnap memory consumption. This option + # will slow down the process of creating archives, but may help + # on systems where the average size of files being backed up is + # less than 1 MB. + #lowmem + + # Try even harder to reduce tarsnap memory consumption. This can + # significantly slow down tarsnap, but reduces its memory usage + # by an additional factor of 2 beyond what the lowmem option does. + #verylowmem + + # Snapshot time. Use this option if you are backing up files + # from a filesystem snapshot rather than from a "live" filesystem. + #snaptime <file> + ''; +} diff --git a/users/glittershark/system/home/modules/twitter.nix b/users/glittershark/system/home/modules/twitter.nix new file mode 100644 index 000000000000..3cb2e90adc34 --- /dev/null +++ b/users/glittershark/system/home/modules/twitter.nix @@ -0,0 +1,23 @@ +{ pkgs, lib, ... }: + +{ + home.packages = with pkgs; [ + t + ]; + + home.sessionVariables = { + TWITTER_WHOAMI = "glittershark1"; + }; + + programs.zsh = { + shellAliases = { + "mytl" = "t tl $TWITTER_WHOAMI"; + }; + + functions = { + favelast = "t fave $(t tl -l $1 | head -n1 | cut -d' ' -f1)"; + rtlast = "t rt $(t tl -l $1 | head -n1 | cut -d' ' -f1)"; + tthread = "t reply $(t tl -l $TWITTER_WHOAMI | head -n1 | cut -d' ' -f1) $@"; + }; + }; +} diff --git a/users/glittershark/system/home/modules/vim.nix b/users/glittershark/system/home/modules/vim.nix new file mode 100644 index 000000000000..87d4309333dd --- /dev/null +++ b/users/glittershark/system/home/modules/vim.nix @@ -0,0 +1,47 @@ +{ config, pkgs, ... }: +{ + programs.neovim = { + enable = true; + viAlias = true; + vimAlias = true; + plugins = with pkgs.vimPlugins; [ + ctrlp + deoplete-nvim + syntastic + vim-abolish + vim-airline + vim-airline-themes + vim-bufferline + vim-closetag + # vim-colors-solarized + # solarized + (pkgs.vimUtils.buildVimPlugin { + name = "vim-colors-solarized"; + src = pkgs.fetchFromGitHub { + owner = "glittershark"; + repo = "vim-colors-solarized"; + rev = "4857c3221ec3f2693a45855154cb61a2cefb514d"; + sha256 = "0kqp5w14g7adaiinmixm7z3x4w74lv1lcgbqjbirx760f0wivf9y"; + }; + }) + vim-commentary + vim-dispatch + vim-endwise + vim-repeat + vim-fugitive + vim-markdown + vim-nix + vim-rhubarb + vim-sexp + vim-sexp-mappings-for-regular-people + vim-sleuth + vim-startify + vim-surround + vim-unimpaired + vinegar + ]; + extraConfig = '' + source ${./vimrc} + ''; + }; +} diff --git a/users/glittershark/system/home/modules/vimrc b/users/glittershark/system/home/modules/vimrc new file mode 100644 index 000000000000..3e33b5e2bee7 --- /dev/null +++ b/users/glittershark/system/home/modules/vimrc @@ -0,0 +1,1121 @@ +" vim:set fdm=marker fmr={{{,}}} ts=2 sts=2 sw=2 expandtab: + + +" Basic Options {{{ +set nocompatible +set modeline +set modelines=10 +syntax enable +filetype plugin indent on +set ruler +set showcmd +set number +set incsearch +set smartcase +set ignorecase +set scrolloff=10 +set tabstop=4 +set shiftwidth=4 +set softtabstop=4 +set nosmartindent +set expandtab +set noerrorbells visualbell t_vb= +set laststatus=2 +set hidden +let mapleader = ',' +let maplocalleader = '\' +set undofile +" set undodir=~/.vim/undo +set wildignore=*.pyc,*.o,.git +set clipboard=unnamedplus +" set backupdir=$HOME/.vim/backup +" set directory=$HOME/.vim/tmp +set foldmarker={{{,}}} +set colorcolumn=+1 +set concealcursor= +set formatoptions+=j +set wildmenu +set wildmode=longest,list:full +set noincsearch +" }}} + +" GUI options {{{ +set go-=m +set go-=T +set go-=r +set go-=L +set go-=e +set guifont=Meslo\ LG\ S\ DZ\ 9 +" }}} + +" Colors {{{ +" set t_Co=256 + +fu! ReverseBackground() + if &bg=="light" + se bg=dark + else + se bg=light + endif +endf +com! BgToggle call ReverseBackground() +nm <F12> :BgToggle<CR> + +set background=light +colorscheme solarized +" }}} + +" --------------------------------------------------------------------------- + +" CtrlP {{{ +let g:ctrlp_custom_ignore = { + \ 'dir': '(node_modules|target)' + \ } +let g:ctrlp_max_files = 0 +let g:ctrlp_max_depth = 100 +" }}} + +" YouCompleteMe {{{ +let g:ycm_semantic_triggers = { + \ 'c' : ['->', '.'], + \ 'objc' : ['->', '.'], + \ 'ocaml' : ['.', '#'], + \ 'cpp,objcpp' : ['->', '.', '::'], + \ 'perl' : ['->'], + \ 'php' : ['->', '::'], + \ 'cs,java,javascript,d,python,perl6,scala,vb,elixir,go' : ['.'], + \ 'vim' : ['re![_a-zA-Z]+[_\w]*\.'], + \ 'lua' : ['.', ':'], + \ 'erlang' : [':'], + \ 'clojure' : [], + \ 'haskell' : ['re!.*', '.', ' ', '('] + \ } + " \ 'haskell' : ['.', '(', ' '] + " \ 'ruby' : ['.', '::'], + " \ 'clojure' : ['(', '.', '/', '['] +" }}} + +" Neocomplete {{{ +if !has('nvim') + " Use neocomplete. + let g:neocomplete#enable_at_startup = 1 + " Use smartcase. + let g:neocomplete#enable_smart_case = 1 + " Set minimum syntax keyword length. + let g:neocomplete#sources#syntax#min_keyword_length = 3 + let g:neocomplete#lock_buffer_name_pattern = '\*ku\*' + + " Define dictionary. + " let g:neocomplete#sources#dictionary#dictionaries = { + " \ 'default' : '', + " \ 'vimshell' : $HOME.'/.vimshell_hist', + " \ 'scheme' : $HOME.'/.gosh_completions' + " \ } + + " Define keyword. + if !exists('g:neocomplete#keyword_patterns') + let g:neocomplete#keyword_patterns = {} + endif + let g:neocomplete#keyword_patterns['default'] = '\h\w*' + + " Plugin key-mappings. + inoremap <expr><C-g> neocomplete#undo_completion() + inoremap <expr><C-l> neocomplete#complete_common_string() + + " Recommended key-mappings. + " <CR>: close popup and save indent. + inoremap <silent> <CR> <C-r>=<SID>my_cr_function()<CR> + function! s:my_cr_function() + return (pumvisible() ? "\<C-y>" : "" ) . "\<CR>" + " For no inserting <CR> key. + "return pumvisible() ? "\<C-y>" : "\<CR>" + endfunction + " <TAB>: completion. + inoremap <expr><TAB> pumvisible() ? "\<C-n>" : "\<TAB>" + " <C-h>, <BS>: close popup and delete backword char. + inoremap <expr><C-h> neocomplete#smart_close_popup()."\<C-h>" + inoremap <expr><BS> neocomplete#smart_close_popup()."\<C-h>" + " Close popup by <Space>. + "inoremap <expr><Space> pumvisible() ? "\<C-y>" : "\<Space>" + + " AutoComplPop like behavior. + "let g:neocomplete#enable_auto_select = 1 + + " Shell like behavior(not recommended). + "set completeopt+=longest + "let g:neocomplete#enable_auto_select = 1 + "let g:neocomplete#disable_auto_complete = 1 + "inoremap <expr><TAB> pumvisible() ? "\<Down>" : "\<C-x>\<C-u>" + + " Enable omni completion. + " autocmd FileType css setlocal omnifunc=csscomplete#CompleteCSS + " autocmd FileType html,markdown setlocal omnifunc=htmlcomplete#CompleteTags + " autocmd FileType javascript setlocal omnifunc=javascriptcomplete#CompleteJS + " autocmd FileType python setlocal omnifunc=pythoncomplete#Complete + " autocmd FileType xml setlocal omnifunc=xmlcomplete#CompleteTags + + " Enable heavy omni completion. + if !exists('g:neocomplete#sources#omni#input_patterns') + let g:neocomplete#sources#omni#input_patterns = {} + endif +endif +" }}} + +" Deoplete {{{ +if has('nvim') + let g:deoplete#enable_at_startup = 1 + + inoremap <silent> <CR> <C-r>=<SID>my_cr_function()<CR> + function! s:my_cr_function() + return (pumvisible() ? "\<C-y>" : "" ) . "\<CR>" + " For no inserting <CR> key. + "return pumvisible() ? "\<C-y>" : "\<CR>" + endfunction + " <TAB>: completion. + inoremap <expr><TAB> pumvisible() ? "\<C-n>" : "\<TAB>" + inoremap <expr><S-TAB> pumvisible() ? "\<C-p>" : "\<TAB>" +endif +" }}} + +" Neovim Terminal mode {{{ +if has('nvim') + tnoremap <Esc> <C-\><C-n> + nnoremap \\ :tabedit term://zsh<CR> + nnoremap q\ :call <SID>OpenRepl()<CR> + + if !exists('g:repl_size') + let g:repl_size=9 + endif + + function! s:OpenRepl() " {{{ + " Check if buffer exists and is open + if exists('s:repl_bufname') && bufexists(s:repl_bufname) && bufwinnr(s:repl_bufname) >=? 0 + " If so, just switch to it + execute bufwinnr(s:repl_bufname) . 'wincmd' 'w' + norm i + return + endif + + if !exists('b:console') + let b:console=$SHELL + endif + + let l:console_cmd = b:console + + execute 'bot' g:repl_size . 'new' + set winfixheight nobuflisted + call termopen(l:console_cmd) + let s:repl_bufname = bufname('%') + norm i + endfunction " }}} +endif +" }}} + +" Tagbar options {{{ +let g:tagbar_autoclose = 1 +let g:tagbar_autofocus = 1 +let g:tagbar_compact = 1 +" }}} + +" delimitMate options {{{ +let g:delimitMate_expand_cr = 1 +" }}} + +" UltiSnips options {{{ +let g:UltiSnipsExpandTrigger = '<c-j>' + "g:UltiSnipsJumpForwardTrigger <c-j> + "g:UltiSnipsJumpBackwardTrigger <c-k> +" }}} + +" VDebug Options {{{ +let g:vdebug_options = {'server': '192.168.56.1'} +" }}} + +" Statusline {{{ +let g:airline_powerline_fonts=1 + +if !exists('g:airline_symbols') + let g:airline_symbols = {} +endif +let g:airline_symbols.space = "\ua0" + +let g:airline#extensions#tagbar#flags = 'f' +let g:airline#extensions#tabline#enabled = 1 +let g:airline#extensions#tabline#show_buffers = 0 +let g:airline#extensions#tabline#show_tabs = 1 +let g:airline#extensions#tabline#tab_min_count = 2 +let g:airline#extensions#tmuxline#enabled = 0 + +let g:tmuxline_theme = 'airline' +let g:tmuxline_preset = 'full' + +"set statusline= +"set statusline+=%2*[%n%H%M%R%W]%*\ " flags and buf no +"set statusline+=%-40f%<\ " path +"set statusline+=%=%40{fugitive#statusline()}\ " Vim status +"set statusline+=%1*%y%*%*\ " file type +"set statusline+=%10((%l,%c)%)\ " line and column +"set statusline+=%P " percentage of file +" }}} + +" Code review mode {{{ +fun! GetFontName() + return substitute(&guifont, '^\(.\{-}\)[0-9]*$', '\1', '') +endfun + +fun! <SID>CodeReviewMode() + let &guifont = GetFontName() . ' 15' +endfun +com! CodeReviewMode call <SID>CodeReviewMode() +" }}} + +" Syntastic {{{ +let g:syntastic_enable_signs = 0 + +" Python {{{ +let g:syntastic_python_checkers = ['flake8'] +let g:syntastic_python_flake8_post_args = "--ignore=E101,E223,E224,E301,E302,E303,E501,E701,W,F401,E111,E261" + +" }}} +" Javascript {{{ +let g:syntastic_javascript_checkers = ['eslint'] +let g:flow#autoclose = 1 +let g:flow#enable = 1 + +" augroup syntastic_javascript_jsx +" autocmd! +" autocmd BufReadPre,BufNewFile *.js +" autocmd BufReadPre,BufNewFile *.jsx +" \ let g:syntastic_javascript_checkers = ['jsxhint'] +" augroup END + +" }}} +" Haml {{{ +let g:syntastic_haml_checkers = ['haml_lint'] + +" }}} +" Html {{{ +let g:syntastic_html_checkers = [] + +" }}} +" Ruby {{{ +let g:syntastic_ruby_checkers = ['rubocop'] +" }}} +" SASS/SCSS {{{ +let g:syntastic_scss_checkers = ['scss_lint'] +" }}} +" Haskell {{{ +" let g:syntastic_haskell_checkers = ['ghc-mod'] +" }}} +" Elixir {{{ +let g:syntastic_elixir_checkers = ['elixir'] +let g:syntastic_enable_elixir_checker = 1 +" }}} +" }}} + +" Bufferline {{{ +let g:bufferline_echo=0 +" }}} + +" Eclim {{{ +let g:EclimCompletionMethod = 'omnifunc' +augroup eclim + au! + au FileType java call <SID>JavaSetup() + au FileType java set textwidth=120 +augroup END + +function! s:JavaSetup() abort + noremap <C-I> :JavaImport<CR> + nnoremap K :JavaDocPreview<CR> + nnoremap ]d :JavaSearchContext<CR> + nnoremap [d :JavaSearchContext<CR> + nnoremap g<CR> :JUnit<CR> + nnoremap g\ :Mvn test<CR> +endfunction +" }}} + +" Signify options {{{ +let g:signify_mapping_next_hunk = ']h' +let g:signify_mapping_prev_hunk = '[h' +let g:signify_vcs_list = ['git'] +let g:signify_sign_change = '~' +let g:signify_sign_delete = '-' +" }}} + +" Simplenote {{{ +let g:SimplenoteFiletype = 'markdown' +let g:SimplenoteSortOrder = 'pinned,modifydate,tagged,createdate' +let g:SimplenoteVertical = 1 + +nnoremap <Leader>nn :Simplenote -n<CR> +nnoremap <Leader>nl :Simplenote -l<CR> +nnoremap <Leader>nw :Simplenote -l work<CR> +nnoremap <Leader>nt :Simplenote -t<CR> +" }}} + +" Emmet {{{ +" Expand abbreviation +let g:user_emmet_leader_key = '<C-y>' +" }}} + +" Startify {{{ +let g:startify_bookmarks=[ '~/.vimrc', '~/.zshrc' ] +" }}} + +" Abolish {{{ +let g:abolish_save_file = expand('~/.vim/after/plugin/abolish.vim') +" }}} + +" Rails projections {{{ + +if !exists('g:rails_projections') + let g:rails_projections = {} +endif + +call extend(g:rails_projections, { + \ "config/routes.rb": { "command": "routes" }, + \ "config/structure.sql": { "command": "structure" } + \ }, 'keep') + +if !exists('g:rails_gem_projections') + let g:rails_gem_projections = {} +endif + +call extend(g:rails_gem_projections, { + \ "active_model_serializers": { + \ "app/serializers/*_serializer.rb": { + \ "command": "serializer", + \ "template": "class %SSerializer < ActiveModel::Serializer\nend", + \ "affinity": "model"}}, + \ "react-rails": { + \ "app/assets/javascripts/components/*.jsx": { + \ "command": "component", + \ "template": "var %S = window.%S = React.createClass({\n render: function() {\n }\n});", + \ "alternate": "spec/javascripts/components/%s_spec.jsx" }, + \ "spec/javascripts/components/*_spec.jsx": { + \ "alternate": "app/assets/javascripts/components/{}.jsx" }}, + \ "rspec": { + \ "spec/**/support/*.rb": { + \ "command": "support"}}, + \ "cucumber": { + \ "features/*.feature": { + \ "command": "feature", + \ "template": "Feature: %h"}, + \ "features/support/*.rb": { + \ "command": "support"}, + \ "features/support/env.rb": { + \ "command": "support"}, + \ "features/step_definitions/*_steps.rb": { + \ "command": "steps"}}, + \ "carrierwave": { + \ "app/uploaders/*_uploader.rb": { + \ "command": "uploader", + \ "template": "class %SUploader < CarrierWave::Uploader::Base\nend"}}, + \ "draper": { + \ "app/decorators/*_decorator.rb": { + \ "command": "decorator", + \ "affinity": "model", + \ "template": "class %SDecorator < Draper::Decorator\nend"}}, + \ "fabrication": { + \ "spec/fabricators/*_fabricator.rb": { + \ "command": ["fabricator", "factory"], + \ "alternate": "app/models/%s.rb", + \ "related": "db/schema.rb#%p", + \ "test": "spec/models/%s_spec.rb", + \ "template": "Fabricator :%s do\nend", + \ "affinity": "model"}}, + \ "factory_girl": { + \ "spec/factories/*.rb": { + \ "command": "factory", + \ "alternate": "app/models/%i.rb", + \ "related": "db/structure.sql#%s", + \ "test": "spec/models/%s_spec.rb", + \ "template": "FactoryGirl.define do\n factory :%i do\n end\nend", + \ "affinity": "model"}, + \ "spec/factories.rb": { + \ "command": "factory"}, + \ "test/factories.rb": { + \ "command": "factory"}} + \ }, 'keep') +" }}} + +" Other projections {{{ +let g:projectionist_heuristics = { + \ "config.ru&docker-compose.yml&app/&config/&OWNERS": { + \ "app/jobs/*.rb": { + \ "type": "job", + \ "alternate": "spec/jobs/{}_spec.rb" + \ }, + \ "app/models/*.rb": { + \ "type": "model", + \ "alternate": "spec/models/{}_spec.rb" + \ }, + \ "app/resources/*_resource.rb": { + \ "type": "resource", + \ "alternate": "spec/resources/{}_resource_spec.rb" + \ }, + \ "config/*.yml": { + \ "type": "config" + \ }, + \ "spec/*_spec.rb": { + \ "type": "spec", + \ "alternate": "app/{}.rb" + \ }, + \ "spec/factories/*.rb": { + \ "type": "factory", + \ } + \ }, + \ "svc-gateway.cabal": { + \ "src/*.hs": { + \ "type": "src", + \ "alternate": "test/{}Spec.hs" + \ }, + \ "test/*Spec.hs": { + \ "type": "spec", + \ "alternate": "src/{}.hs", + \ "template": [ + \ "module Gateway.Resource.HierarchySpec (main, spec) where", + \ "", + \ "import Prelude", + \ "import Test.Hspec", + \ "import Data.Aeson", + \ "", + \ "import Gateway.Resource.Hierarchy", + \ "", + \ "main :: IO ()", + \ "main = hspec spec", + \ "", + \ "spec :: Spec", + \ "spec = do", + \ " describe \"something\" $ undefined" + \ ] + \ }, + \ "svc-gateway.cabal": { + \ "type": "cabal" + \ } + \ }, + \ "package.json&.flowconfig": { + \ "src/*.*": { + \ "type": "src", + \ "alternate": "test/{}_spec.js" + \ } + \ }, + \ "pom.xml&src/main/clj/|src/main/cljs": { + \ "*": { + \ "start": "USE_NREPL=1 bin/run -m elephant.dev-system" , + \ "connect": "nrepl://localhost:5554", + \ "piggieback": "(figwheel-sidecar.repl-api/repl-env)" + \ }, + \ "pom.xml": { "type": "pom" }, + \ "src/main/clj/*.clj": { + \ "alternate": "src/test/clj/{}_test.clj", + \ "template": ["(ns {dot|hyphenate})"] + \ }, + \ "src/test/clj/*_test.clj": { + \ "alternate": "src/main/clj/{}.clj", + \ "dispatch": ":RunTests {dot|hyphenate}-test", + \ "template": ["(ns {dot|hyphenate}-test", + \ " (:require [clojure.test :refer :all]))"] + \ }, + \ "src/main/cljs/*.cljs": { + \ "alternate": "src/test/cljs/{}_test.cljs" + \ }, + \ "src/main/cljs/*_test.cljs": { + \ "alternate": "src/main/cljs/{}.cljs", + \ "dispatch": ":RunTests {dot|hyphenate}-test" + \ }, + \ "src/main/clj/*.cljc": { + \ "alternate": "src/test/clj/{}_test.cljc" + \ }, + \ "src/main/clj/*_test.cljc": { + \ "alternate": "src/test/clj/{}.cljc", + \ "dispatch": ":RunTests {dot|hyphenate}-test" + \ } + \ }} +" }}} + +" AutoPairs {{{ +let g:AutoPairsCenterLine = 0 +" }}} + +" Filetypes {{{ + +" Python {{{ +aug Python + au! + au FileType python set tabstop=4 shiftwidth=4 softtabstop=4 expandtab +aug END +let g:python_highlight_all=1 +" }}} + +" PHP {{{ +aug PHP + au! + "au FileType php setlocal fdm=marker fmr={{{,}}} +aug END " }}} + +" Mail {{{ +aug Mail + au FileType mail setlocal spell +aug END " }}} + +" Haskell {{{ +let g:haskell_conceal_wide = 1 +let g:haskellmode_completion_ghc = 0 +let g:necoghc_enable_detailed_browse = 1 + +augroup Haskell + autocmd! + autocmd FileType haskell setlocal textwidth=110 shiftwidth=2 + autocmd FileType haskell setlocal omnifunc=necoghc#omnifunc + autocmd FileType haskell call <SID>HaskellSetup() + autocmd FileType haskell setlocal keywordprg=hoogle\ -cie +augroup END + +function! s:HaskellSetup() + set sw=4 + " compiler cabal + " let b:start='cabal run' + " let b:console='cabal repl' + " let b:dispatch='cabal test' + compiler stack + let b:start='stack run' + let b:console='stack ghci' + let b:dispatch='stack test' + nnoremap <buffer> gy :HdevtoolsType<CR> + nnoremap <buffer> yu :HdevtoolsClear<CR> +endfunction +" }}} + +" Ruby {{{ + +function! s:RSpecSyntax() + syn keyword rspecMethod describe context it its specify shared_context + \ shared_examples shared_examples_for shared_context include_examples + \ include_context it_should_behave_like it_behaves_like before after + \ around fixtures controller_name helper_name scenario feature + \ background given described_class + syn match rspecMethod '\<let\>!\=' + syn match rspecMethod '\<subject\>!\=' + syn keyword rspecMethod violated pending expect expect_any_instance_of allow + \ allow_any_instance_of double instance_double mock mock_model + \ stub_model xit + syn match rspecMethod '\.\@<!\<stub\>!\@!' + + call s:RSpecHiDefaults() +endfunction + +function! s:RSpecHiDefaults() + hi def link rspecMethod rubyFunction +endfunction + +augroup Ruby + au! + " au FileType ruby let b:surround_114 = "\\(module|class,def,if,unless,case,while,until,begin,do) \r end" + " au FileType ruby set fdm=syntax + au FileType ruby set tw=110 + au FileType ruby set omnifunc= + au FileType ruby nnoremap <buffer> gy orequire 'pry'; binding.pry<ESC>^ + au FileType ruby nnoremap <buffer> gY Orequire 'pry'; binding.pry<ESC>^ + au FileType ruby nnoremap <buffer> yu :g/require 'pry'; binding.pry/d<CR> + au BufNewFile,BufRead *_spec.rb call <SID>RSpecSyntax() +augroup END + +let ruby_operators = 1 +let ruby_space_errors = 1 + +let g:rubycomplete_rails = 1 +command! -range ConvertHashSyntax <line1>,<line2>s/:(\S{-})(\s{-})=> /\1:\2/ +" }}} + +" Clojure {{{ + +aug Clojure + au! + autocmd FileType clojure nnoremap <C-S> :Slamhound<CR> + autocmd FileType clojure nnoremap <silent> gr :w <bar> Require <bar> e<CR> + let g:clojure_align_multiline_strings = 1 + let g:clojure_fuzzy_indent_patterns = + \ ['^with', '^def', '^let', '^fact'] + let g:clojure_special_indent_words = + \ 'deftype,defrecord,reify,proxy,extend-type,extend-protocol,letfn,html' + + autocmd FileType clojure setlocal textwidth=80 + autocmd FileType clojure setlocal lispwords+=GET,POST,PATCH,PUT,DELETE | + \ setlocal lispwords+=context,select + autocmd BufNewFile,BufReadPost *.cljx setfiletype clojure + autocmd BufNewFile,BufReadPost *.cljx setlocal omnifunc= + autocmd BufNewFile,BufReadPost *.cljs setlocal omnifunc= + autocmd FileType clojure call <SID>TangentInit() + autocmd FileType clojure call <SID>sexp_mappings() + autocmd BufRead *.cljc ClojureHighlightReferences + autocmd FileType clojure let b:AutoPairs = { + \ '"': '"', + \ '{': '}', + \ '(': ')', + \ '[': ']'} + " Don't auto-pair quote reader macros + " \'`': '`', + " \ '''': '''', + + autocmd User ProjectionistActivate call s:projectionist_connect() + + function! s:projectionist_connect() abort + let connected = !empty(fireplace#path()) + if !connected + for [root, value] in projectionist#query('connect') + try + silent execute "FireplaceConnect" value root + let connected = 1 + break + catch /.*Connection refused.*/ + endtry + endfor + endif + + " if connected && exists(':Piggieback') + " for [root, value] in projectionist#query('piggieback') + " silent execute "Piggieback" value + " break + " endfor + " endif + endfunction + + " autocmd BufNewFile,BufReadPost *.cljx setlocal omnifunc= + " autocmd BufNewFile,BufReadPost *.cljs setlocal omnifunc= + + autocmd FileType clojure let b:console='lein repl' + autocmd FileType clojure call <SID>ClojureMaps() + + function! s:ClojureMaps() abort + nnoremap <silent> <buffer> [m :call search('^(def', 'Wzb')<CR> + nnoremap <silent> <buffer> ]m :call search('^(def', 'Wz')<CR> + endfunction + + command! Scratch call <SID>OpenScratch() + autocmd FileType clojure nnoremap <buffer> \s :Scratch<CR> + + let g:scratch_buffer_name = 'SCRATCH' + + function! s:OpenScratch() + if bufwinnr(g:scratch_buffer_name) > 0 + execute bufwinnr(g:scratch_buffer_name) . 'wincmd' 'w' + return + endif + + vsplit SCRATCH + set buftype=nofile + set filetype=clojure + let b:scratch = 1 + endfunction +aug END + +function! s:sexp_mappings() abort + if !exists('g:sexp_loaded') + return + endif + + nmap <buffer> cfo <Plug>(sexp_raise_list) + nmap <buffer> cfO <Plug>(sexp_raise_element) + nmap <buffer> cfe <Plug>(sexp_raise_element) +endfunction + +function! s:TangentInit() abort + set textwidth=80 + command! TReset call fireplace#session_eval('(user/reset)') + command! TGo call fireplace#session_eval('(user/go)') + command! TMigrate call fireplace#session_eval('(user/migrate)') + command! TRollback call fireplace#session_eval('(user/rollback)') + nnoremap g\ :TReset<CR> +endfunction + +" }}} + +" Go {{{ + +let g:go_highlight_functions = 1 +let g:go_highlight_methods = 1 +let g:go_highlight_structs = 1 +let g:go_highlight_operators = 1 +let g:go_highlight_build_constraints = 1 + +augroup Go + autocmd! + autocmd FileType go setlocal omnifunc=go#complete#Complete + autocmd FileType go setlocal foldmethod=syntax + autocmd FileType go setlocal foldlevel=100 + autocmd FileType go nnoremap <buffer> <F9> :GoTest<CR> + autocmd FileType go inoremap <buffer> <F9> <ESC>:GoTest<CR>i +augroup END + +" }}} + +" RAML {{{ + +function! s:buffer_syntax() " {{{ + syn keyword ramlRAML RAML contained + syn match ramlVersionString '^#%RAML \d\.\d' contains=ramlRAML +endfunction " }}} + +augroup RAML + autocmd! + autocmd BufRead,BufNewFile *.raml set filetype=yaml + autocmd BufRead,BufNewFile *.raml call s:buffer_syntax() +augroup END + +hi def link ramlVersionString Special +hi def link ramlRAML Error +" }}} + +" Mustache/Handlebars {{{ +let g:mustache_abbreviations = 1 +" }}} + +" Netrw {{{ +augroup netrw + autocmd! + autocmd FileType netrw nnoremap <buffer> Q :Rexplore<CR> + + " Hee hee, oil and vinegar + function! s:setup_oil() abort + nnoremap <buffer> q <C-6> + xnoremap <buffer> q <C-6> + endfunction +augroup END +" }}} +" }}} + +" Remove trailing whitespace {{{ +fun! <SID>StripTrailingWhitespaces() + let l = line(".") + let c = col(".") + %s/\s\+$//e + call cursor(l, c) +endfun + +augroup striptrailingwhitespaces " {{{ +autocmd FileType c,cpp,java,php,ruby,python,sql,javascript,sh,jst,less,haskell,haml,coffee,scss,clojure,objc,elixir,yaml,json,eruby + \ autocmd BufWritePre <buffer> :call <SID>StripTrailingWhitespaces() +augroup END " }}} + +" }}} + +" Goyo {{{ +let g:limelight_conceal_ctermfg = "10" +let g:limelight_conceal_guifg = "#586e75" +autocmd! User GoyoEnter Limelight +autocmd! User GoyoLeave Limelight! +" }}} + +"----------------------------------------------------------------------------- + +" Commands {{{ + +" Edit temporary SQL files {{{ +let s:curr_sql = 0 +fun! <SID>EditSqlTempFile() + let l:fname = '/tmp/q' . s:curr_sql . '.sql' + execute 'edit' l:fname + let s:curr_sql = s:curr_sql + 1 +endfun +com! EditSqlTempFile call <SID>EditSqlTempFile() +" }}} + +" Double Indentation +command! -range DoubleIndentation <line1>,<line2>s/^\(\s.\{-}\)\(\S\)/\1\1\2/ + +" Quick-and-dirty fix capitalization of sql files +command! -range FixSqlCapitalization <line1>,<line2>v/\v(^\s*--.*$)|(TG_)/norm guu + +" VimPipe Commands {{{ +" let g:sql_type_default = 'pgsql' +command! SqlLive let b:vimpipe_command="vagrant ssh -c '~/mysql'" +command! SqlRails let b:vimpipe_command="bin/rails dbconsole" +command! SqlHeroku let b:vimpipe_command="heroku pg:psql" +command! SqlEntities let b:vimpipe_command="psql -h 127.1 entities nomi" +command! SqlUsers let b:vimpipe_command="psql -h 127.1 users nomi" +command! SqlTangent let b:vimpipe_command="psql -h local.docker tangent super" +" }}} + +" Git commands {{{ +command! -nargs=* Gpf Gpush -f <args> +command! -nargs=* Gcv Gcommit --verbose <args> +" }}} + +" Focus dispatch to only the last failures +command! -nargs=* FocusFailures FocusDispatch rspec --only-failures <args> + +" }}} + +" Autocommands {{{ + +augroup fugitive " {{{ + au! + autocmd BufNewFile,BufRead fugitive://* set bufhidden=delete +augroup END " }}} + +augroup omni " {{{ + au! + " autocmd FileType javascript setlocal omnifunc=tern#Complete + "autocmd FileType python setlocal omnifunc=pythoncomplete#Complete + autocmd FileType php setlocal omnifunc= +augroup END " }}} + +augroup sql " {{{ + au! + autocmd FileType sql let b:vimpipe_command="psql -h 127.0.0.1 landlordsny_development landlordsny" + autocmd FileType sql let b:vimpipe_filetype="postgresql" + autocmd FileType sql set syntax=postgresql + autocmd FileType postgresql set nowrap + autocmd BufNewFile,BufReadPost *.sql set syntax=pgsql +augroup END " }}} + +augroup markdown " {{{ + au! + autocmd FileType markdown let b:vimpipe_command='markdown' + autocmd FileType markdown let b:vimpipe_filetype='html' + autocmd FileType markdown set tw=80 +augroup END " }}} + +augroup typescript " {{{ + au! + autocmd FileType typescript let b:vimpipe_command='tsc' + autocmd FileType typescript let b:vimpipe_filetype='javascript' + autocmd FileType typescript TSSstarthere + autocmd FileType typescript nnoremap <buffer> gd :TSSdef<CR> +augroup END " }}} + +augroup jsx " {{{ + au! + " autocmd FileType jsx set syntax=javascript + autocmd FileType javascript set filetype=javascript.jsx +augroup END " }}} + +augroup nicefoldmethod " {{{ + au! + " Don't screw up folds when inserting text that might affect them, until + " leaving insert mode. Foldmethod is local to the window. Protect against + " screwing up folding when switching between windows. + autocmd InsertEnter * + \ if !exists('w:last_fdm') | + \ let w:last_fdm=&foldmethod | + \ setlocal foldmethod=manual | + \ endif + autocmd InsertLeave,WinLeave * + \ if exists('w:last_fdm') | + \ let &l:foldmethod=w:last_fdm | + \ unlet w:last_fdm | + \ endif +augroup END " }}} + +augroup visualbell " {{{ + au! + autocmd GUIEnter * set visualbell t_vb= +augroup END +" }}} + +augroup quickfix " {{{ + au! + autocmd QuickFixCmdPost grep cwindow +augroup END " }}} + +augroup php " {{{ + au! +augroup END "}}} + +augroup rubylang " {{{ + au! + autocmd FileType ruby compiler rake +augroup END " }}} + +augroup javascript "{{{ + au! + autocmd FileType javascript let &errorformat = + \ '%E%.%#%n) %s:,' . + \ '%C%.%#Error: %m,' . + \ '%C%.%#at %s (%f:%l:%c),' . + \ '%Z%.%#at %s (%f:%l:%c),' . + \ '%-G%.%#,' +augroup END " }}} + +augroup git " {{{ + autocmd! + autocmd FileType gitcommit set textwidth=72 +augroup END +" }}} +" }}} + +" Leader commands {{{ + +" Edit specific files {{{ +nnoremap <silent> <leader>ev :split $MYVIMRC<CR> +nnoremap <silent> <leader>eb :split ~/.vim_bundles<CR> +nnoremap <silent> <leader>es :UltiSnipsEdit<CR> +nnoremap <silent> <leader>ea :split ~/.vim/after/plugin/abolish.vim<CR> + +nnoremap <silent> <leader>sv :so $MYVIMRC<CR> +nnoremap <silent> <leader>sb :so ~/.vim_bundles<CR> +nnoremap <silent> <leader>sa :so ~/.vim/after/plugin/abolish.vim<CR> + +nnoremap <Leader>el :EditSqlTempFile<CR> +" }}} + +" Toggle navigation panels {{{ +nnoremap <Leader>l :TagbarToggle<CR> +nnoremap <Leader>mb :MBEToggle<CR> +nnoremap <Leader>u :GundoToggle<CR> + +nnoremap <Leader>t :CtrlP<CR> +nnoremap <Leader>z :FZF<CR> +nnoremap <Leader>b :CtrlPBuffer<CR> +nnoremap <Leader>a :CtrlPTag<CR> +nnoremap <Leader>r :CtrlPGitBranch<CR> +" }}} + +" CtrlP {{{ +let g:ctrlp_custom_ignore = { + \ 'dir': 'node_modules', + \ } +" }}} + +" Git leader commands {{{ +noremap <Leader>g :Git<SPACE> +noremap <Leader>gu :Gpull<CR> +noremap <Leader>gp :Gpush<CR> +noremap <Leader>s :Gstatus<CR> +noremap <Leader>cv :Gcommit --verbose<CR> +noremap <Leader>ca :Gcommit --verbose --amend<CR> + +nnoremap <Leader>dl :diffg LOCAL<CR> +nnoremap <Leader>dr :diffg REMOTE<CR> +nnoremap <Leader>db :diffg BASE<CR> +nnoremap <Leader>du :diffu<CR> +nnoremap <Leader>dg :diffg<CR> + +nnoremap <Leader>d2 :diffg //2<CR>:diffu<CR> +nnoremap <Leader>d3 :diffg //3<CR>:diffu<CR> + +nnoremap <Leader>yt :SignifyToggle<CR> +" }}} + +" Breakpoint Leader Commands {{{ +nnoremap <Leader>x :Breakpoint<CR> +nnoremap <Leader>dx :BreakpointRemove *<CR> +" }}} + +" Tabularize {{{ + " Leader Commands {{{ + nnoremap <localleader>t= :Tabularize /=<CR> + vmap <localleader>t= :Tabularize /=<CR> + + nnoremap <localleader>t> :Tabularize /=><CR> + vmap <localleader>t> :Tabularize /=><CR> + " }}} + + " => Aligning {{{ + function! s:rocketalign() + let l:p = '^.*=>\s.*$' + echo l:p + if exists(':Tabularize') && getline('.') =~# '^.*=' && + \ (getline(line('.')-1) =~# l:p || getline(line('.')+1) =~# l:p) + let column = strlen(substitute(getline('.')[0:col('.')],'[^=>]','','g')) + let position = strlen(matchstr(getline('.')[0:col('.')],'.*=>\s*\zs.*')) + Tabularize/=>/l1 + normal! $ + call search(repeat('[^=>]*=>',column).'\s\{-\}'.repeat('.',position),'ce',line('.')) + endif + endfunction + "inoremap <buffer> <space>=><space> =><Esc>:call <SID>rocketalign()<CR>a + " }}} + + " = Aligning {{{ + function! s:eqalign() + let l:p = '^.*=\s.*$' + if exists(':Tabularize') && getline('.') =~# '^.*=' && + \ (getline(line('.')-1) =~# l:p || getline(line('.')+1) =~# l:p) + let column = strlen(substitute(getline('.')[0:col('.')],'[^=]','','g')) + let position = strlen(matchstr(getline('.')[0:col('.')],'.*=\s*\zs.*')) + Tabularize/=/l1 + normal! $ + call search(repeat('[^=]*=',column).'\s\{-\}'.repeat('.',position),'ce',line('.')) + endif + endfunction + "inoremap <buffer><silent> <space>=<space> =<Esc>:call <SID>eqalign()<CR>a + " }}} + + " : Aligning {{{ + function! s:colonalign() + let l:p : '^.*:\s.*$' + if exists(':Tabularize') && getline('.') :~# '^.*:' && + \ (getline(line('.')-1) :~# l:p || getline(line('.')+1) :~# l:p) + let column : strlen(substitute(getline('.')[0:col('.')],'[^:]','','g')) + let position : strlen(matchstr(getline('.')[0:col('.')],'.*:\s*\zs.*')) + Tabularize/:/l1 + normal! $ + call search(repeat('[^:]*:',column).'\s\{-\}'.repeat('.',position),'ce',line('.')) + endif + endfunction + "inoremap <buffer><silent> <space>:<space> :<Esc>:call <SID>colonalign()<CR>a + " }}} +" }}} + +" }}} + +" Mappings {{{ +" 'delete current' +nnoremap dc 0d$ +nnoremap com :silent !tmux set status<CR> +nnoremap <F9> :Make<CR> +nnoremap g<CR> :Dispatch<CR> +nnoremap g\ :Start<CR> +inoremap <F9> <ESC>:Make<CR>i + +" Navigate buffers {{{ +nnoremap gb :bn<CR> +nnoremap gB :bp<CR> +" }}} + +" Window Navigation {{{ +nnoremap <space>w <C-w> +nnoremap <space>h <C-w>h +nnoremap <space>j <C-w>j +nnoremap <space>k <C-w>k +nnoremap <space>l <C-w>l +nnoremap <space>z <C-w>z +" }}} + + +" Sort with motion {{{ +if !exists("g:sort_motion_flags") + let g:sort_motion_flags = "" +endif +function! s:sort_motion(mode) abort + if a:mode == 'line' + execute "'[,']sort " . g:sort_motion_flags + elseif a:mode == 'char' + execute "normal! `[v`]y" + let sorted = join(sort(split(@@, ', ')), ', ') + execute "normal! v`]c" . sorted + elseif a:mode == 'V' || a:mode == '' + execute "'<,'>sort " . g:sort_motion_flags + endif +endfunction + +function! s:sort_lines() + let beginning = line('.') + let end = v:count + beginning - 1 + execute beginning . ',' . end . 'sort' +endfunction + +xnoremap <silent> <Plug>SortMotionVisual :<C-U>call <SID>sort_motion(visualmode())<CR> +nnoremap <silent> <Plug>SortMotion :<C-U>set opfunc=<SID>sort_motion<CR>g@ +nnoremap <silent> <Plug>SortLines :<C-U>call <SID>sort_lines()<CR> + +map go <Plug>SortMotion +vmap go <Plug>SortMotionVisual +map goo <Plug>SortLines +" }}} +" }}} + +let g:hare_executable = 'cabal exec -- ghc-hare' diff --git a/users/glittershark/system/home/modules/zshrc b/users/glittershark/system/home/modules/zshrc new file mode 100644 index 000000000000..a12173d6842d --- /dev/null +++ b/users/glittershark/system/home/modules/zshrc @@ -0,0 +1,327 @@ +#!/usr/bin/zsh +# vim: set fdm=marker fmr={{{,}}}: + +stty -ixon + +# Compinstall {{{ +zstyle ':completion:*' completer _complete _ignored _correct _approximate +zstyle ':completion:*' matcher-list '' 'm:{[:lower:]}={[:upper:]} m:{[:lower:][:upper:]}={[:upper:][:lower:]} r:|[._- :]=** r:|=**' 'l:|=* r:|=*' +zstyle ':completion:*' max-errors 5 +zstyle ':completion:*' use-cache yes +zstyle ':completion::complete:grunt::options:' expire 1 +zstyle ':completion:*' prompt '%e errors' +zstyle :compinstall filename '~/.zshrc' +autoload -Uz compinit +compinit +# }}} + +# Zsh-newuser-install {{{ +HISTFILE=~/.histfile +HISTSIZE=1000 +SAVEHIST=1000 +setopt appendhistory autocd extendedglob notify autopushd +unsetopt beep nomatch +bindkey -v +# }}} + +# Basic options {{{ +set -o vi +umask 022 +export VIRTUAL_ENV_DISABLE_PROMPT=1 +# export PATH=~/.local/bin:~/.cabal/bin:$PATH:~/code/go/bin:~/bin:~/npm/bin:~/.gem/ruby/2.1.0/bin:~/.gem/ruby/2.0.0/bin:/home/smith/bin +# }}} + +# Zsh highlight highlighters {{{ +ZSH_HIGHLIGHT_HIGHLIGHTERS=(main brackets pattern root) +# }}} + +# More basic options {{{ +setopt no_hist_verify +setopt histignorespace +# }}} + +# Utility Functions {{{ + +# Set the terminal's title bar. +function titlebar() { +echo -ne "\033]0;$*\007" +} + +function quiet() { +"$@" >/dev/null +} + +function quieter() { +"$@" >/dev/null 2>&1 +} + +# From http://stackoverflow.com/questions/370047/#370255 +function path_remove() { +IFS=: +# convert it to an array +t=($PATH) +unset IFS +# perform any array operations to remove elements from the array +t=(${t[@]%%$1}) +IFS=: +# output the new array +echo "${t[*]}" +} + +# }}} + +# Force screen to use zsh {{{ +# }}} + +# Environment {{{ +# }}} + +# Directory Stuff {{{ + +# Always use color output for `ls` + +# Directory listing + +# Easier navigation: .., ..., - + +# File size + +# Recursively delete `.DS_Store` files + +# Create a new directory and enter it +function md() { + mkdir -p "$@" && cd "$@" +} + +# }}} + +# MPD/MPC stuff {{{ +function mp() { +# Test if drive is already mounted +if ! lsblk | grep /media/external >/dev/null; then + if ! sudo mount /media/external; then + echo "External drive not plugged in, or could not mount" + return 1 + fi +fi +if (mpc >/dev/null 2>&1); then + ncmpcpp +else + mpd && + (pgrep mpdscribble || mpdscribble) && + ncmpcpp +fi +} + +# kill mp +function kmp() { +killall ncmpcpp +mpd --kill + +local files + +if (files=$(lsof 2>&1 | grep -v docker | grep external)); then + echo + echo "==> Still processes using external drive:" + echo + echo $files +else + sudo umount /media/external +fi +} + + +function mppal() { +mpc search album "$1" | mpc add && + mpc play; +} +# }}} + +# Git stuff {{{ +# function ga() { git add "${@:-.}"; } # Add all files by default +# Add non-whitespace changes +# function gc() { git checkout "${@:-master}"; } # Checkout master by default + +# open all changed files (that still actually exist) in the editor +function ged() { +local files=() +for f in $(git diff --name-only "$@"); do + [[ -e "$f" ]] && files=("${files[@]}" "$f") +done +local n=${#files[@]} +echo "Opening $n $([[ "$@" ]] || echo "modified ")file$([[ $n != 1 ]] && \ + echo s)${@:+ modified in }$@" +q "${files[@]}" +} + +# git find-replace +function gfr() { +if [[ "$#" == "0" ]]; then + echo 'Usage:' + echo ' gg_replace term replacement file_mask' + echo + echo 'Example:' + echo ' gg_replace cappuchino cappuccino *.html' + echo +else + find=$1; shift + replace=$1; shift + + ORIG_GLOBIGNORE=$GLOBIGNORE + GLOBIGNORE=*.* + if [[ "$#" = "0" ]]; then + set -- ' ' $@ + fi + + while [[ "$#" -gt "0" ]]; do + for file in `git grep -l $find -- $1`; do + sed -e "s/$find/$replace/g" -i'' $file + done + shift + done + + GLOBIGNORE=$ORIG_GLOBIGNORE +fi +} + +function vconflicts() { +$EDITOR $(git status --porcelain | awk '/^UU/ { print $2 }') +} +# }}} + +# fzf {{{ +v() { + local file + file=$(fzf-tmux --query="$1" --select-1 --exit-0) + [ -n "$file" ] && ${EDITOR:-vim} "$file" +} + +c() { + local dir + dir=$(find ${1:-*} -path '*/\.*' -prune -o -type d -print 2> /dev/null | fzf +m) && cd "$dir" +} + +co() { + local branch + branch=$(git branch -a | sed -s "s/\s*\**//g" | fzf --query="$1" --select-1 --exit-0) && git checkout "$branch" +} + + +# fh - repeat history +# h() { +# eval $(([ -n "$ZSH_NAME" ] && fc -l 1 || history) | fzf +s | sed 's/ *[0-9]* *//') +# } + +# fkill - kill process +fkill() { + ps -ef | sed 1d | fzf-tmux -m | awk '{print $2}' | xargs kill -${1:-9} +} +# }}} + +# Tmux utils {{{ +kill_detached() { + for sess in $(tmux ls | grep -v attached | sed -s "s/:.*$//"); do + tmux kill-session -t $sess; + done +} +# }}} + +# Docker {{{ + + +# dbp foo/bar . +function dbp () { + docker build -t $1 ${@:2} && docker push $1 +} + +# }}} + +# Twitter! {{{ + + +# favelast <username> +function favelast() { + t fave $(t tl -l $1 | head -n1 | first) +} + +function rtlast() { + t rt $(t tl -l $1 | head -n1 | first) +} + +function tthread() { + t reply $(t tl -l $TWITTER_WHOAMI | head -n1 | first) $@ +} +# }}} + +# Geeknote {{{ +gnc() { + gn create --title $1 --content '' && + gn find --count=1 "$1" + gn edit 1 +} +# }}} + +# Misc aliases {{{ + +function fw() { # fix white + local substitution + local substitution='s/\x1b\[90m/\x1b[92m/g' + $@ > >(perl -pe "$substitution") 2> >(perl -pe "$substitution" 1>&2) +} +# }}} + +# Grep options {{{ +unset GREP_OPTIONS +export GREP_OPTIONS= +# }}} + + +# Run docker containers {{{ + # -d \ + # -v $HOME/.pentadactyl:/home/firefox/.pentadactyl:rw \ + # -v $HOME/.pentadactylrc:/home/firefox/.pentadactylrc:rw \ + # -v $HOME/.mozilla:/home/firefox/.mozilla:rw \ + # -v $HOME/.config:/home/firefox/.config \ + # -v $HOME/Downloads:/home/firefox/Downloads:rw \ + # -v /etc/fonts:/etc/fonts \ + # -v /tmp/.X11-unix:/tmp/.X11-unix \ + # -v /dev/snd:/dev/snd \ + # --net=host \ + # -v $XDG_RUNTIME_DIR:$XDG_RUNTIME_DIR \ + # -e uid=$(id -u) \ + # -e gid=$(id -g) \ + # -e DISPLAY=$DISPLAY \ + # -e XDG_RUNTIME_DIR=$XDG_RUNTIME_DIR \ + # --name firefox \ + # --rm -it \ + # glittershark/firefox +# }}} + +# Change cursor shape on insert/normal mode {{{ +# (https://unix.stackexchange.com/q/433273/64261) + +KEYTIMEOUT=5 + +_fix_cursor() { + echo -ne '\e[5 q' +} + +precmd_functions+=(_fix_cursor) + +function zle-keymap-select { + if [[ ${KEYMAP} == vicmd ]] || + [[ $1 = 'block' ]]; then + echo -ne '\e[1 q' + + elif [[ ${KEYMAP} == main ]] || + [[ ${KEYMAP} == viins ]] || + [[ ${KEYMAP} = '' ]] || + [[ $1 = 'beam' ]]; then + echo -ne '\e[5 q' + fi +} +zle -N zle-keymap-select + +# }}} + +[ -f ./.localrc ] && source ./.localrc diff --git a/users/glittershark/system/home/platforms/darwin.nix b/users/glittershark/system/home/platforms/darwin.nix new file mode 100644 index 000000000000..cf0375e94162 --- /dev/null +++ b/users/glittershark/system/home/platforms/darwin.nix @@ -0,0 +1,26 @@ +{ config, lib, pkgs, ... }: + +with lib; + +{ + config = { + home.packages = with pkgs; [ + coreutils + gnupg + pinentry_mac + ]; + + home.activation.linkApplications = lib.hm.dag.entryAfter ["writeBoundary"] '' + $DRY_RUN_CMD ln -sf $VERBOSE_ARG \ + ~/.nix-profile/Applications/* ~/Applications/ + ''; + + programs.zsh.initExtra = '' + export NIX_PATH=$HOME/.nix-defexpr/channels:$NIX_PATH + + if [[ "$TERM" == "alacritty" ]]; then + export TERM="xterm-256color" + fi + ''; + }; +} diff --git a/users/glittershark/system/home/platforms/linux.nix b/users/glittershark/system/home/platforms/linux.nix new file mode 100644 index 000000000000..92924e419973 --- /dev/null +++ b/users/glittershark/system/home/platforms/linux.nix @@ -0,0 +1,93 @@ +{ config, pkgs, ... }: + +let + + depot = config.lib.depot; + +in + +{ + imports = [ + ../modules/alacritty.nix + ../modules/alsi.nix + ../modules/development.nix + ../modules/emacs.nix + ../modules/email.nix + ../modules/firefox.nix + ../modules/games.nix + ../modules/obs.nix + ../modules/i3.nix + ../modules/shell.nix + ../modules/tarsnap.nix + ../modules/vim.nix + ]; + + xsession.enable = true; + + home.packages = with pkgs; [ + (import (fetchTarball "https://github.com/ashkitten/nixpkgs/archive/init-glimpse.tar.gz") {}).glimpse + + # Desktop stuff + arandr + firefox + feh + chromium + xclip + xorg.xev + picom + peek + signal-desktop + apvlv # pdf viewer + vlc + irssi + gnutls + pandoc + barrier + depot.tools.nsfv-setup + + # System utilities + powertop + usbutils + pciutils + gdmap + lsof + tree + ncat + iftop + + # Security + gnupg + keybase + openssl + yubikey-manager + yubikey-manager-qt + + # Spotify...etc + spotify + playerctl + ]; + + services.redshift = { + enable = true; + provider = "geoclue2"; + }; + + services.pasystray.enable = true; + + services.gpg-agent = { + enable = true; + }; + + gtk = { + enable = true; + gtk3.bookmarks = [ + "file:///home/grfn/code" + ]; + }; + + programs.zsh.initExtra = '' + [[ ! $IN_NIX_SHELL ]] && alsi -l + ''; + + services.lorri.enable = true; +} diff --git a/users/glittershark/system/install b/users/glittershark/system/install new file mode 100755 index 000000000000..a9a45953da07 --- /dev/null +++ b/users/glittershark/system/install @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -eo pipefail + +if [[ -f /etc/nixos/.system-installed ]]; then + echo "=== System config already installed, skipping" +else + echo "==> Installing system config" + + [[ -d /etc/nixos ]] && sudo mv /etc/nixos{,.bak} + sudo mkdir -p /etc/nixos + sudo cp /etc/nixos.bak/hardware-configuration.nix /etc/nixos + + sudo cp ./system/configuration.nix /etc/nixos/ + sudo ln -s $(pwd)/system/{machines,modules,pkgs} /etc/nixos + sudo touch /etc/nixos/.system-installed + + echo "==> System config installed, your old configuration is at /etc/nixos.bak" +fi +echo + +if [[ -f ~/.config/nixpkgs/system-installed ]]; then + echo "=== home-manager config already installed, skipping" +else + echo "==> Installing home-manager config" + nix-channel --add https://github.com/rycee/home-manager/archive/master.tar.gz home-manager + nix-channel --update + # nix-shell '<home-manager>' -A install + + [[ -d ~/.config/nixpkgs ]] && mv ~/.config/{nixpkgs,nixpkgs.bak} + mkdir -p ~/.config/nixpkgs + ln -s $(pwd)/home/* ~/.config/nixpkgs + + echo "==> home-manager config installed" +fi diff --git a/users/glittershark/system/system/.skip-subtree b/users/glittershark/system/system/.skip-subtree new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/glittershark/system/system/.skip-subtree diff --git a/users/glittershark/system/system/configuration.nix b/users/glittershark/system/system/configuration.nix new file mode 100644 index 000000000000..eae567015b73 --- /dev/null +++ b/users/glittershark/system/system/configuration.nix @@ -0,0 +1,11 @@ +{ config, pkgs, ... }: + +let machine = throw "Pick a machine from ./machines"; in +{ + imports = + [ + /etc/nixos/hardware-configuration.nix + ./modules/common.nix + machine + ]; +} diff --git a/users/glittershark/system/system/default.nix b/users/glittershark/system/system/default.nix new file mode 100644 index 000000000000..aa0afb4c5935 --- /dev/null +++ b/users/glittershark/system/system/default.nix @@ -0,0 +1,93 @@ +args @ { depot, pkgs, ... }: + +let + nixpkgs = import pkgs.nixpkgsSrc {}; +in + +rec { + chupacabra = import ./machines/chupacabra.nix; + + chupacabraSystem = (pkgs.nixos { + configuration = chupacabra; + }).system; + + mugwump = import ./machines/mugwump.nix; + + mugwumpSystem = (pkgs.nixos { + configuration = mugwump; + }).system; + + roswell = import ./machines/roswell.nix; + + roswellSystem = (pkgs.nixos { + configuration = { ... }: { + imports = [ + ./machines/roswell.nix + "${nixpkgs.home-manager.src}/nixos" + "${depot.depotPath}/ops/nixos/depot.nix" + ]; + inherit depot; + + home-manager.users.grfn = { config, lib, ... }: { + imports = [ ../home/machines/roswell.nix ]; + lib.depot = depot; + _module.args.pkgs = lib.mkForce + (import pkgs.nixpkgsSrc + (lib.filterAttrs (n: v: v != null) config.nixpkgs)); + }; + }; + }).system; + + yeren = import ./machines/yeren.nix; + + yerenSystem = (pkgs.nixos { + configuration = { ... }: { + imports = [ + ./machines/yeren.nix + "${depot.depotPath}/ops/nixos/depot.nix" + ]; + inherit depot; + }; + }).system; + + iso = import ./iso.nix args; + + # Build chupacabra in CI + meta.targets = [ + "chupacabraSystem" + "mugwumpSystem" + "roswellSystem" + "yerenSystem" + + "iso" + ]; + + rebuilder = + let + depotPath = "/home/grfn/code/depot"; + + caseFor = hostname: '' + ${hostname}) + echo "Rebuilding NixOS for //users/glittershark/nixos/${hostname}" + system=$(nix-build -E '(import ${depotPath} {}).users.glittershark.system.system.${hostname}' --no-out-link) + ;; + ''; + in depot.third_party.writeShellScriptBin "rebuilder" '' + set -ue + if [[ $EUID -ne 0 ]]; then + echo "Oh no! Only root is allowed to rebuild the system!" >&2 + exit 1 + fi + + case $HOSTNAME in + ${caseFor "chupacabra"} + *) + echo "$HOSTNAME is not a known NixOS host!" >&2 + exit 1 + ;; + esac + + nix-env -p /nix/var/nix/profiles/system --set $system + $system/bin/switch-to-configuration switch + ''; +} diff --git a/users/glittershark/system/system/iso.nix b/users/glittershark/system/system/iso.nix new file mode 100644 index 000000000000..056922ee1e82 --- /dev/null +++ b/users/glittershark/system/system/iso.nix @@ -0,0 +1,17 @@ +{ pkgs, ... }: + +let + configuration = { ... }: { + imports = [ + "${pkgs.nixpkgsSrc}/nixos/modules/installer/cd-dvd/installation-cd-minimal-new-kernel.nix" + "${pkgs.nixpkgsSrc}/nixos/modules/installer/cd-dvd/channel.nix" + ]; + + networking.networkmanager.enable = true; + networking.useDHCP = false; + networking.firewall.enable = false; + networking.wireless.enable = pkgs.lib.mkForce false; + }; +in (pkgs.nixos { + inherit configuration; +}).config.system.build.isoImage diff --git a/users/glittershark/system/system/machines/bumblebee.nix b/users/glittershark/system/system/machines/bumblebee.nix new file mode 100644 index 000000000000..0fec21409255 --- /dev/null +++ b/users/glittershark/system/system/machines/bumblebee.nix @@ -0,0 +1,23 @@ +{ config, lib, pkgs, ... }: +{ + imports = [ + ../modules/reusable/battery.nix + ]; + + networking.hostName = "bumblebee"; + + powerManagement = { + enable = true; + cpuFreqGovernor = "powersave"; + powertop.enable = true; + }; + + # Hibernate on low battery + laptop.onLowBattery = { + enable = true; + action = "hibernate"; + thresholdPercentage = 5; + }; + + services.xserver.xkbOptions = "caps:swapescape"; +} diff --git a/users/glittershark/system/system/machines/chupacabra.nix b/users/glittershark/system/system/machines/chupacabra.nix new file mode 100644 index 000000000000..4f7f39ee5aa1 --- /dev/null +++ b/users/glittershark/system/system/machines/chupacabra.nix @@ -0,0 +1,142 @@ +{ config, lib, pkgs, ... }: + +{ + imports = [ + ../modules/common.nix + ../modules/laptop.nix + ../modules/tvl.nix + ../modules/fcitx.nix + ../modules/rtlsdr.nix + ../../../../../ops/nixos/v4l2loopback.nix + ../modules/desktop.nix + ../modules/development.nix + ]; + + hardware.enableRedistributableFirmware = true; + + networking.hostName = "chupacabra"; + + powerManagement = { + enable = true; + powertop.enable = true; + cpuFreqGovernor = "powersave"; + }; + + laptop.onLowBattery = { + enable = true; + action = "hibernate"; + thresholdPercentage = 5; + }; + + boot = { + blacklistedKernelModules = [ "nouveau" "intel" ]; + extraModulePackages = [ ]; + + initrd = { + availableKernelModules = [ "xhci_pci" "ahci" "nvme" "usb_storage" "sd_mod" "rtsx_pci_sdmmc" ]; + kernelModules = [ ]; + + luks.devices = { + "cryptroot".device = "/dev/disk/by-uuid/c2fc7ce7-a45e-48a1-8cde-be966ef601db"; + "cryptswap".device = "/dev/disk/by-uuid/3b6e2fd4-bfe9-4392-a6e0-4f3b3b76e019"; + }; + }; + + kernelModules = [ "kvm-intel" ]; + kernelParams = [ "acpi_rev_override" ]; + + kernel.sysctl = { + "kernel.perf_event_paranoid" = -1; + "vm.swappiness" = 1; + }; + }; + + services.thermald.enable = true; + + hardware.cpu.intel.updateMicrocode = true; + + # Intel-only graphics + hardware.nvidiaOptimus.disable = true; + services.xserver.videoDrivers = [ "intel" ]; + + # Nvidia Optimus (hybrid) - currently not working + # services.xserver.videoDrivers = [ "intel" "nvidia" ]; + # boot.blacklistedKernelModules = [ "nouveau" "bbswitch" ]; + # boot.extraModulePackages = [ pkgs.linuxPackages.nvidia_x11 ]; + # hardware.bumblebee.enable = true; + # hardware.bumblebee.pmMethod = "none"; + + systemd.services.disable-usb-autosuspend = { + description = "Disable USB autosuspend"; + wantedBy = [ "multi-user.target" ]; + serviceConfig = { Type = "oneshot"; }; + unitConfig.RequiresMountsFor = "/sys"; + script = '' + echo -1 > /sys/module/usbcore/parameters/autosuspend + ''; + }; + + # From hardware-configuration.nix + + fileSystems."/" = + { device = "/dev/mapper/cryptroot"; + fsType = "ext4"; + }; + + fileSystems."/boot" = + { device = "/dev/disk/by-uuid/3492-9E3A"; + fsType = "vfat"; + }; + + swapDevices = + [ { device = "/dev/disk/by-uuid/caa7e2ff-475b-4024-b29e-4f88f733fc4c"; } + ]; + + # High-DPI console + console.font = lib.mkDefault "${pkgs.terminus_font}/share/consolefonts/ter-u28n.psf.gz"; + + # from nixos-hardware TODO sort this around + services.tlp.enable = true; + services.fstrim.enable = lib.mkDefault true; + + # Intel cpu stuff + hardware.opengl.extraPackages = with pkgs; [ + vaapiIntel + vaapiVdpau + libvdpau-va-gl + intel-media-driver + ]; + + services.udev.extraRules = '' + # UDEV rules for Teensy USB devices + ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", ENV{ID_MM_DEVICE_IGNORE}="1" + ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789A]?", ENV{MTP_NO_PROBE}="1" + SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789ABCD]?", MODE:="0666" + KERNEL=="ttyACM*", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", MODE:="0666" + ''; + + # Necessary to get steam working + hardware.opengl.driSupport32Bit = true; + + nix = { + maxJobs = lib.mkDefault 12; + binaryCaches = [ "ssh://grfn@172.16.0.5" ]; + trustedBinaryCaches = [ "ssh://grfn@172.16.0.5" ]; + buildMachines = [ + { + hostName = "172.16.0.4"; + sshUser = "griffin"; + sshKey = "/home/grfn/.ssh/id_rsa"; + system = "x86_64-darwin"; + maxJobs = 8; # 16 cpus + } + { + hostName = "172.16.0.3"; + sshUser = "griffin"; + sshKey = "/home/grfn/.ssh/id_rsa"; + system = "x86_64-darwin"; + maxJobs = 4; + } + ]; + }; +} diff --git a/users/glittershark/system/system/machines/mugwump.nix b/users/glittershark/system/system/machines/mugwump.nix new file mode 100644 index 000000000000..64ee36ca72f3 --- /dev/null +++ b/users/glittershark/system/system/machines/mugwump.nix @@ -0,0 +1,212 @@ +{ config, lib, pkgs, modulesPath, ... }: + +with lib; + +{ + imports = [ + ../modules/common.nix + (modulesPath + "/installer/scan/not-detected.nix") + ]; + + networking.hostName = "mugwump"; + + boot = { + loader.systemd-boot.enable = true; + + kernelModules = [ "kvm-intel" ]; + extraModulePackages = [ ]; + + initrd = { + availableKernelModules = [ "xhci_pci" "ehci_pci" "ahci" "usb_storage" "usbhid" "sd_mod" ]; + kernelModules = [ + "uas" "usbcore" "usb_storage" "vfat" "nls_cp437" "nls_iso8859_1" + ]; + + postDeviceCommands = pkgs.lib.mkBefore '' + mkdir -m 0755 -p /key + sleep 2 + mount -n -t vfat -o ro `findfs UUID=9048-A9D5` /key + ''; + + luks.devices."cryptroot" = { + device = "/dev/disk/by-uuid/803a9028-339c-4617-a213-4fe138161f6d"; + keyFile = "/key/keyfile"; + preLVM = false; + }; + }; + }; + + fileSystems = { + "/" = { + device = "/dev/mapper/cryptroot"; + fsType = "btrfs"; + }; + "/boot" = { + device = "/dev/disk/by-uuid/7D74-0E4B"; + fsType = "vfat"; + }; + }; + + networking.interfaces = { + enp0s25.useDHCP = false; + wlp2s0.useDHCP = false; + }; + + networking.firewall.enable = true; + networking.firewall.allowedTCPPorts = [ 22 80 443 ]; + + security.sudo.extraRules = [{ + groups = ["wheel"]; + commands = [{ command = "ALL"; options = ["NOPASSWD"]; }]; + }]; + + nix.gc.dates = "monthly"; + + services.fail2ban = { + enable = true; + ignoreIP = [ + "172.16.0.0/16" + ]; + }; + + services.openssh = { + allowSFTP = false; + passwordAuthentication = false; + permitRootLogin = "no"; + }; + + services.grafana = { + enable = true; + port = 3000; + domain = "metrics.gws.fyi"; + rootUrl = "https://metrics.gws.fyi"; + dataDir = "/var/lib/grafana"; + analytics.reporting.enable = false; + + provision = { + enable = true; + datasources = [{ + name = "Prometheus"; + type = "prometheus"; + url = "http://localhost:9090"; + }]; + }; + }; + + security.acme.email = "root@gws.fyi"; + security.acme.acceptTerms = true; + + services.nginx = { + enable = true; + statusPage = true; + recommendedGzipSettings = true; + recommendedOptimisation = true; + recommendedTlsSettings = true; + + virtualHosts = { + "metrics.gws.fyi" = { + enableACME = true; + forceSSL = true; + locations."/" = { + proxyPass = "http://localhost:${toString config.services.grafana.port}"; + }; + }; + }; + }; + + security.acme.certs."metrics.gws.fyi" = { + dnsProvider = "namecheap"; + credentialsFile = "/etc/secrets/namecheap.env"; + webroot = mkForce null; + }; + + services.prometheus = { + enable = true; + exporters = { + node = { + enable = true; + openFirewall = false; + + enabledCollectors = [ + "processes" + "systemd" + "tcpstat" + "wifi" + "textfile" + ]; + + extraFlags = [ + "--collector.textfile.directory=/var/lib/prometheus/node-exporter" + ]; + }; + + nginx = { + enable = true; + openFirewall = true; + sslVerify = false; + constLabels = [ "host=mugwump" ]; + }; + }; + + scrapeConfigs = [{ + job_name = "node"; + scrape_interval = "5s"; + static_configs = [{ + targets = ["localhost:${toString config.services.prometheus.exporters.node.port}"]; + }]; + } { + job_name = "nginx"; + scrape_interval = "5s"; + static_configs = [{ + targets = ["localhost:${toString config.services.prometheus.exporters.nginx.port}"]; + }]; + }]; + }; + + systemd.services."prometheus-fail2ban-exporter" = { + wantedBy = [ "multi-user.target" ]; + after = [ "network.target" "fail2ban.service" ]; + serviceConfig = { + User = "root"; + Type = "oneshot"; + ExecStart = pkgs.writeShellScript "prometheus-fail2ban-exporter" '' + set -eo pipefail + mkdir -p /var/lib/prometheus/node-exporter + exec ${pkgs.python3.withPackages (p: [ + p.prometheus_client + ])}/bin/python ${pkgs.fetchurl { + url = "https://raw.githubusercontent.com/jangrewe/prometheus-fail2ban-exporter/11066950b47bb2dbef96ea8544f76e46ed829e81/fail2ban-exporter.py"; + sha256 = "049lsvw1nj65bbvp8ygyz3743ayzdawrbjixaxmpm03qbrcfmwc4"; + }} + ''; + }; + + path = with pkgs; [ fail2ban ]; + }; + + systemd.timers."prometheus-fail2ban-exporter" = { + wantedBy = [ "multi-user.target" ]; + timerConfig.OnCalendar = "minutely"; + }; + + virtualisation.docker.enable = true; + + services.buildkite-agents = listToAttrs (map (n: rec { + name = "mugwump-${toString n}"; + value = { + inherit name; + enable = true; + tokenPath = "/etc/secrets/buildkite-agent-token"; + privateSshKeyPath = "/etc/secrets/buildkite-ssh-key"; + runtimePackages = with pkgs; [ + docker + nix + gnutar + gzip + ]; + }; + }) (range 1 1)); + + users.users."buildkite-agent-mugwump-1".extraGroups = [ "docker" ]; + users.users."buildkite-agent-mugwump-2".extraGroups = [ "docker" ]; +} diff --git a/users/glittershark/system/system/machines/roswell.nix b/users/glittershark/system/system/machines/roswell.nix new file mode 100644 index 000000000000..c0ed2264a0e0 --- /dev/null +++ b/users/glittershark/system/system/machines/roswell.nix @@ -0,0 +1,17 @@ +{ config, lib, pkgs, modulesPath, ... }: + +{ + imports = [ + ../modules/common.nix + "${modulesPath}/installer/scan/not-detected.nix" + "${modulesPath}/virtualisation/amazon-image.nix" + ]; + + ec2.hvm = true; + + networking.hostName = "roswell"; + + users.users.grfn.openssh.authorizedKeys.keys = [ + config.depot.users.glittershark.keys.main + ]; +} diff --git a/users/glittershark/system/system/machines/yeren.nix b/users/glittershark/system/system/machines/yeren.nix new file mode 100644 index 000000000000..29dde88bbedb --- /dev/null +++ b/users/glittershark/system/system/machines/yeren.nix @@ -0,0 +1,121 @@ +{ modulesPath, config, lib, pkgs, ... }: + +{ + imports = [ + (modulesPath + "/installer/scan/not-detected.nix") + ../modules/common.nix + ../modules/laptop.nix + ../modules/xserver.nix + ../modules/fonts.nix + ../modules/sound.nix + ../modules/tvl.nix + ../modules/development.nix + ]; + + networking.hostName = "yeren"; + + system.stateVersion = "21.03"; + + boot = { + initrd = { + availableKernelModules = [ "xhci_pci" "thunderbolt" "nvme" "usb_storage" "sd_mod" "rtsx_pci_sdmmc" ]; + kernelModules = [ ]; + + luks.devices = { + "cryptroot".device = "/dev/disk/by-uuid/dcfbc22d-e0d2-411b-8dd3-96704d3aae2e"; + "cryptswap".device = "/dev/disk/by-uuid/48b8a8fd-559c-4759-a617-56f221cfaaec"; + }; + }; + + kernelPackages = pkgs.linuxPackages_latest; + kernelModules = [ "kvm-intel" ]; + blacklistedKernelModules = [ "psmouse" ]; + extraModulePackages = [ ]; + kernelParams = [ + "i915.preliminary_hw_support=1" + ]; + + # https://bbs.archlinux.org/viewtopic.php?pid=1933643#p1933643 + extraModprobeConfig = '' + options snd-intel-dspcfg dsp_driver=1 + ''; + }; + + fileSystems = { + "/" = { + device = "/dev/mapper/cryptroot"; + fsType = "btrfs"; + }; + + "/boot" = { + device = "/dev/disk/by-uuid/53A9-248B"; + fsType = "vfat"; + }; + }; + + swapDevices = [{ device = "/dev/mapper/cryptswap"; }]; + + services.xserver = { + exportConfiguration = true; + extraConfig = '' + Section "Device" + Identifier "Intel Graphics" + Driver "intel" + Option "TripleBuffer" "true" + Option "TearFree" "true" + Option "DRI" "true" + Option "AccelMethod" "sna" + EndSection + ''; + }; + + hardware.firmware = with pkgs; [ + alsa-firmware + sof-firmware + ]; + + hardware.opengl.extraPackages = with pkgs; [ + vaapiIntel + vaapiVdpau + libvdpau-va-gl + intel-media-driver + ]; + + # the fprintd module hardcodes pkgs.fprintd :'( + nixpkgs.overlays = [(_: _: { + fprintd = config.depot.users.glittershark.pkgs.fprintd; + })]; + + services.fprintd = { + enable = true; + package = config.depot.users.glittershark.pkgs.fprintd; + }; + + security.pam.loginLimits = [ + { + domain = "grfn"; + type = "soft"; + item = "nofile"; + value = "65535"; + } + ]; + + security.pam.services = { + login.fprintAuth = true; + sudo.fprintAuth = true; + i3lock.fprintAuth = false; + i3lock-color.fprintAuth = false; + lightdm.fprintAuth = true; + lightdm-greeter.fprintAuth = true; + }; + + hardware.opengl.driSupport32Bit = true; + + hardware.pulseaudio.extraConfig = '' + load-module module-remap-source source_name=KompleteAudio6_1 source_properties=device.description=KompleteAudio6Input1 master=alsa_input.usb-Native_Instruments_Komplete_Audio_6_458E0FFD-00.multichannel-input remix=no channels=1 master_channel_map=front-left channel_map=mono + load-module module-remap-source source_name=KompleteAudio6_2 source_properties=device.description=KompleteAudio6Input2 master=alsa_input.usb-Native_Instruments_Komplete_Audio_6_458E0FFD-00.multichannel-input remix=no channels=1 master_channel_map=front-right channel_map=mono + load-module module-remap-sink sink_name=KompleteAudio6_12 sink_properties=device.description=KompleteAudio6_12 remix=no master=alsa_output.usb-Native_Instruments_Komplete_Audio_6_458E0FFD-00.analog-surround-21 channels=2 master_channel_map=front-left,front-right channel_map=front-left,front-right + ''; + + services.fwupd.enable = true; +} diff --git a/users/glittershark/system/system/modules/common.nix b/users/glittershark/system/system/modules/common.nix new file mode 100644 index 000000000000..0eab590660a4 --- /dev/null +++ b/users/glittershark/system/system/modules/common.nix @@ -0,0 +1,78 @@ +{ config, lib, pkgs, ... }: + +let + + depot = import ../../../../.. {}; + +in + +with lib; + +{ + boot.loader.systemd-boot.enable = true; + boot.loader.efi.canTouchEfiVariables = true; + + networking.useDHCP = false; + networking.networkmanager.enable = true; + + i18n = { + defaultLocale = "en_US.UTF-8"; + }; + + time.timeZone = "America/New_York"; + + environment.systemPackages = with pkgs; [ + wget + vim + zsh + git + w3m + libnotify + file + lm_sensors + dnsutils + depot.users.glittershark.system.system.rebuilder + htop + ]; + + services.openssh.enable = true; + + programs.ssh.startAgent = true; + + networking.firewall.enable = mkDefault false; + + users.mutableUsers = true; + programs.zsh.enable = true; + environment.pathsToLink = [ "/share/zsh" ]; + users.users.grfn = { + isNormalUser = true; + initialPassword = "password"; + extraGroups = [ + "wheel" + "networkmanager" + "audio" + "docker" + ]; + shell = pkgs.zsh; + }; + + nixpkgs.config.allowUnfree = true; + + nix = { + trustedUsers = [ "grfn" ]; + autoOptimiseStore = true; + distributedBuilds = true; + + gc = { + automatic = true; + dates = mkDefault "weekly"; + options = "--delete-older-than 30d"; + }; + }; + + services.udev.packages = with pkgs; [ + yubikey-personalization + ]; + + services.pcscd.enable = true; +} diff --git a/users/glittershark/system/system/modules/desktop.nix b/users/glittershark/system/system/modules/desktop.nix new file mode 100644 index 000000000000..3adbd9d9b07f --- /dev/null +++ b/users/glittershark/system/system/modules/desktop.nix @@ -0,0 +1,19 @@ +{ config, lib, pkgs, ... }: + +{ + imports = [ + ./xserver.nix + ./fonts.nix + ./sound.nix + ./kernel.nix + ]; + + programs.nm-applet.enable = true; + + users.users.grfn.extraGroups = [ + "audio" + "video" + ]; + + services.geoclue2.enable = true; +} diff --git a/users/glittershark/system/system/modules/development.nix b/users/glittershark/system/system/modules/development.nix new file mode 100644 index 000000000000..bfa0e22cff0a --- /dev/null +++ b/users/glittershark/system/system/modules/development.nix @@ -0,0 +1,6 @@ +{ config, lib, pkgs, ... }: + +{ + virtualisation.docker.enable = true; + users.users.grfn.extraGroups = [ "docker" ]; +} diff --git a/users/glittershark/system/system/modules/fcitx.nix b/users/glittershark/system/system/modules/fcitx.nix new file mode 100644 index 000000000000..812f598f9f47 --- /dev/null +++ b/users/glittershark/system/system/modules/fcitx.nix @@ -0,0 +1,10 @@ +{ config, lib, pkgs, ... }: + +{ + i18n.inputMethod = { + enabled = "fcitx"; + fcitx.engines = with pkgs.fcitx-engines; [ + cloudpinyin + ]; + }; +} diff --git a/users/glittershark/system/system/modules/fonts.nix b/users/glittershark/system/system/modules/fonts.nix new file mode 100644 index 000000000000..babe30d4271f --- /dev/null +++ b/users/glittershark/system/system/modules/fonts.nix @@ -0,0 +1,12 @@ +{ config, lib, pkgs, ... }: +{ + fonts = { + fonts = with pkgs; [ + nerdfonts + noto-fonts-emoji + twitter-color-emoji + ]; + + fontconfig.defaultFonts.emoji = ["Twitter Color Emoji"]; + }; +} diff --git a/users/glittershark/system/system/modules/kernel.nix b/users/glittershark/system/system/modules/kernel.nix new file mode 100644 index 000000000000..5c5ff8551594 --- /dev/null +++ b/users/glittershark/system/system/modules/kernel.nix @@ -0,0 +1,39 @@ +{ config, lib, pkgs, ... }: +with lib.versions; +let + inherit (pkgs) stdenvNoCC; + kernelRelease = config.boot.kernelPackages.kernel.version; + mj = major kernelRelease; + mm = majorMinor kernelRelease; + patched-linux-ck = stdenvNoCC.mkDerivation { + name = "linux-ck"; + src = builtins.fetchurl { + name = "linux-ck-patch-${mm}-ck1.xz"; + # example: http://ck.kolivas.org/patches/5.0/5.4/5.4-ck1/patch-5.4-ck1.xz + url = "http://ck.kolivas.org/patches/${mj}.0/${mm}/${mm}-ck1/patch-${mm}-ck1.xz"; + sha256 = "14lfpq9hvq1amxrl0ayfid1d04kd35vwsvk1ppnqa87nqfkjq47c"; + }; + + unpackPhase = '' + ${pkgs.xz}/bin/unxz -kfdc $src > patch-${mm}-ck1 + ''; + + installPhase = '' + cp patch-${mm}-ck1 $out + ''; + }; +in +{ + boot.kernelPackages = pkgs.linuxPackages_5_11.extend (self: super: { + kernel = super.kernel.override { + ignoreConfigErrors = true; + kernelPatches = super.kernel.kernelPatches ++ [{ + name = "linux-ck"; + patch = patched-linux-ck; + }]; + argsOverride = { + modDirVersion = super.kernel.modDirVersion + "-ck1"; + }; + }; + }); +} diff --git a/users/glittershark/system/system/modules/laptop.nix b/users/glittershark/system/system/modules/laptop.nix new file mode 100644 index 000000000000..8c09f3a44266 --- /dev/null +++ b/users/glittershark/system/system/modules/laptop.nix @@ -0,0 +1,13 @@ +{ config, lib, pkgs, ... }: + +{ + imports = [ + ./reusable/battery.nix + ]; + + laptop.onLowBattery.enable = true; + + services.logind.extraConfig = '' + HandlePowerKey=hibernate + ''; +} diff --git a/users/glittershark/system/system/modules/reusable/README.org b/users/glittershark/system/system/modules/reusable/README.org new file mode 100644 index 000000000000..34d9bfdcb729 --- /dev/null +++ b/users/glittershark/system/system/modules/reusable/README.org @@ -0,0 +1,2 @@ +This directory contains things I'm eventually planning on contributing upstream +to nixpkgs diff --git a/users/glittershark/system/system/modules/reusable/battery.nix b/users/glittershark/system/system/modules/reusable/battery.nix new file mode 100644 index 000000000000..ca92e0c3f61c --- /dev/null +++ b/users/glittershark/system/system/modules/reusable/battery.nix @@ -0,0 +1,32 @@ +{ config, lib, pkgs, ... }: +with lib; +{ + options = { + laptop.onLowBattery = { + enable = mkEnableOption "Perform action on low battery"; + + thresholdPercentage = mkOption { + description = "Threshold battery percentage on which to perform the action"; + default = 8; + type = types.int; + }; + + action = mkOption { + description = "Action to perform on low battery"; + default = "hibernate"; + type = types.enum [ "hibernate" "suspend" "suspend-then-hibernate" ]; + }; + }; + }; + + config = + let cfg = config.laptop.onLowBattery; + in mkIf cfg.enable { + services.udev.extraRules = concatStrings [ + ''SUBSYSTEM=="power_supply", '' + ''ATTR{status}=="Discharging", '' + ''ATTR{capacity}=="[0-${toString cfg.thresholdPercentage}]", '' + ''RUN+="${pkgs.systemd}/bin/systemctl ${cfg.action}"'' + ]; + }; +} diff --git a/users/glittershark/system/system/modules/rtlsdr.nix b/users/glittershark/system/system/modules/rtlsdr.nix new file mode 100644 index 000000000000..ce58ebb0dcda --- /dev/null +++ b/users/glittershark/system/system/modules/rtlsdr.nix @@ -0,0 +1,17 @@ +{ config, lib, pkgs, ... }: + +{ + + environment.systemPackages = with pkgs; [ + rtl-sdr + ]; + + services.udev.packages = with pkgs; [ + rtl-sdr + ]; + + # blacklist for rtl-sdr + boot.blacklistedKernelModules = [ + "dvb_usb_rtl28xxu" + ]; +} diff --git a/users/glittershark/system/system/modules/sound.nix b/users/glittershark/system/system/modules/sound.nix new file mode 100644 index 000000000000..15c8dd5e3f92 --- /dev/null +++ b/users/glittershark/system/system/modules/sound.nix @@ -0,0 +1,16 @@ +{ config, lib, pkgs, ... }: +{ + # Enable sound. + sound.enable = true; + hardware.pulseaudio.enable = true; + nixpkgs.config.pulseaudio = true; + + environment.systemPackages = with pkgs; [ + pulseaudio-ctl + paprefs + pasystray + pavucontrol + ]; + + hardware.pulseaudio.package = pkgs.pulseaudioFull; +} diff --git a/users/glittershark/system/system/modules/tvl.nix b/users/glittershark/system/system/modules/tvl.nix new file mode 100644 index 000000000000..905ec8ced537 --- /dev/null +++ b/users/glittershark/system/system/modules/tvl.nix @@ -0,0 +1,37 @@ +{ config, lib, pkgs, ... }: + +{ + nix = { + buildMachines = [{ + hostName = "whitby.tvl.fyi"; + sshUser = "grfn"; + sshKey = "/root/.ssh/id_rsa"; + system = "x86_64-linux"; + maxJobs = 64; + supportedFeatures = ["big-parallel" "kvm" "nixos-test" "benchmark"]; + }]; + + extraOptions = '' + builders-use-substitutes = true + ''; + + binaryCaches = [ + "https://cache.nixos.org" + "ssh://nix-ssh@whitby.tvl.fyi" + ]; + trustedBinaryCaches = [ + "https://cache.nixos.org" + "ssh://nix-ssh@whitby.tvl.fyi" + ]; + binaryCachePublicKeys = [ + "cache.tvl.fyi:fd+9d1ceCPvDX/xVhcfv8nAa6njEhAGAEe+oGJDEeoc=" + ]; + }; + + programs.ssh.knownHosts.whitby = { + hostNames = [ "whitby" "whitby.tvl.fyi" "49.12.129.211"]; + publicKeyFile = pkgs.writeText "whitby.pub" '' + ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I + ''; + }; +} diff --git a/users/glittershark/system/system/modules/xserver.nix b/users/glittershark/system/system/modules/xserver.nix new file mode 100644 index 000000000000..35ee44112ea1 --- /dev/null +++ b/users/glittershark/system/system/modules/xserver.nix @@ -0,0 +1,16 @@ +{ config, pkgs, ... }: +{ + # Enable the X11 windowing system. + services.xserver = { + enable = true; + layout = "us"; + + libinput.enable = true; + + displayManager = { + defaultSession = "none+i3"; + }; + + windowManager.i3.enable = true; + }; +} diff --git a/users/glittershark/wigglydonke.rs/index.html b/users/glittershark/wigglydonke.rs/index.html new file mode 100644 index 000000000000..4fd7f25fcf8c --- /dev/null +++ b/users/glittershark/wigglydonke.rs/index.html @@ -0,0 +1,16 @@ +<!DOCTYPE html> +<html> + <head> + <title>Kids Love Wiggly Donkers!</title> + <style type="text/css"> + #wiggly-donkers { + width: 100%; + } + </style> + </head> + <body> + <a href="https://tvl.fyi"> + <img src="/wd.png" id="wiggly-donkers"/> + </a> + </body> +</html> diff --git a/users/glittershark/wigglydonke.rs/wd.png b/users/glittershark/wigglydonke.rs/wd.png new file mode 100644 index 000000000000..217443e2df82 --- /dev/null +++ b/users/glittershark/wigglydonke.rs/wd.png Binary files differdiff --git a/users/glittershark/xanthous/.envrc b/users/glittershark/xanthous/.envrc new file mode 100644 index 000000000000..be81feddb1a5 --- /dev/null +++ b/users/glittershark/xanthous/.envrc @@ -0,0 +1 @@ +eval "$(lorri direnv)" \ No newline at end of file diff --git a/users/glittershark/xanthous/.github/actions/nix-build/Dockerfile b/users/glittershark/xanthous/.github/actions/nix-build/Dockerfile new file mode 100644 index 000000000000..cfe8e35df091 --- /dev/null +++ b/users/glittershark/xanthous/.github/actions/nix-build/Dockerfile @@ -0,0 +1,23 @@ +FROM lnl7/nix:2.1.2 + +LABEL name="Nix Build for GitHub Actions" +LABEL version="1.0" +LABEL repository="http://github.com/glittershark/xanthous" +LABEL homepage="http://github.com/glittershark/xanthous" +LABEL maintainer="Griffin Smith <root at gws dot fyi>" + +LABEL "com.github.actions.name"="Nix Build" +LABEL "com.github.actions.description"="Runs 'nix-build'" +LABEL "com.github.actions.icon"="cpu" +LABEL "com.github.actions.color"="purple" + +RUN nix-env -iA \ + nixpkgs.gnutar nixpkgs.gzip \ + nixpkgs.gnugrep nixpkgs.git && \ + mkdir -p /etc/nix && \ + (echo "binary-caches = https://cache.nixos.org/" | tee -a /etc/nix/nix.conf) && \ + (echo "trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" | tee -a /etc/nix/nix.conf) + +COPY entrypoint.sh /entrypoint.sh +ENTRYPOINT [ "/entrypoint.sh" ] +CMD [ "--help" ] diff --git a/users/glittershark/xanthous/.github/actions/nix-build/entrypoint.sh b/users/glittershark/xanthous/.github/actions/nix-build/entrypoint.sh new file mode 100755 index 000000000000..cb7aca541a3f --- /dev/null +++ b/users/glittershark/xanthous/.github/actions/nix-build/entrypoint.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +# Entrypoint that runs nix-build and, optionally, copies Docker image tarballs +# to real files. The reason this is necessary is because once a Nix container +# exits, you must copy out the artifacts to the working directory before exit. + +[ "$DEBUG" = "1" ] && set -x +[ "$QUIET" = "1" ] && QUIET_ARG="-Q" + +set -e + +# file to build (e.g. release.nix) +file="$1" + +[ "$file" = "" ] && echo "No .nix file to build specified!" && exit 1 +[ ! -e "$file" ] && echo "File $file not exist!" && exit 1 + +echo "Building all attrs in $file..." +nix-build --no-link ${QUIET_ARG} "$file" "${@:2}" + +echo "Copying build closure to $(pwd)/store..." +mapfile -t storePaths < <(nix-build ${QUIET_ARG} --no-link "$file" | grep -v cache-deps) +printf '%s\n' "${storePaths[@]}" > store.roots +nix copy --to "file://$(pwd)/store" "${storePaths[@]}" diff --git a/users/glittershark/xanthous/.github/workflows/haskell.yml b/users/glittershark/xanthous/.github/workflows/haskell.yml new file mode 100644 index 000000000000..df82de3e8caf --- /dev/null +++ b/users/glittershark/xanthous/.github/workflows/haskell.yml @@ -0,0 +1,15 @@ +name: Haskell CI + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Nix Build + with: + args: default.nix --arg failOnWarnings true + uses: ./.github/actions/nix-build diff --git a/users/glittershark/xanthous/.gitignore b/users/glittershark/xanthous/.gitignore new file mode 100644 index 000000000000..74014978ffac --- /dev/null +++ b/users/glittershark/xanthous/.gitignore @@ -0,0 +1,34 @@ +dist +dist-* +cabal-dev +*.o +*.hi +*.hie +*.chi +*.chs.h +*.dyn_o +*.dyn_hi +.hpc +.hsenv +.cabal-sandbox/ +cabal.sandbox.config +*.prof +*.aux +*.hp +*.eventlog +.stack-work/ +cabal.project.local +cabal.project.local~ +.HTF/ +.ghc.environment.* + + +# from nix-build +result + +# grr +*_flymake.hs + +# app-specific +debug.log +data diff --git a/users/glittershark/xanthous/LICENSE b/users/glittershark/xanthous/LICENSE new file mode 100644 index 000000000000..45644ff76449 --- /dev/null +++ b/users/glittershark/xanthous/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<http://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/users/glittershark/xanthous/README.org b/users/glittershark/xanthous/README.org new file mode 100644 index 000000000000..7e1fedb069b1 --- /dev/null +++ b/users/glittershark/xanthous/README.org @@ -0,0 +1,36 @@ +#+TITLE: Xanthous + +* Building + +#+BEGIN_SRC shell +$ nix build +#+END_SRC + +* Running + +#+BEGIN_SRC shell +$ ./result/bin/xanthous [--help] +#+END_SRC + +** Keyboard commands + +Keyboard commands are currently undocumented, but can be found in [[[https://github.com/glittershark/xanthous/blob/master/src/Xanthous/Command.hs#L26][this file]]. +Movement uses the nethack-esque hjklybnu. + +* Development + +Use [[https://github.com/target/lorri][lorri]], or run everything in a ~nix-shell~ + +#+BEGIN_SRC shell +# Build (for dev) +$ cabal new-build + +# Run the game +$ cabal new-run xanthous + +# Run tests +$ cabal new-run test + +# Run a repl +$ cabal new-repl +#+END_SRC diff --git a/users/glittershark/xanthous/Setup.hs b/users/glittershark/xanthous/Setup.hs new file mode 100644 index 000000000000..9a994af677b0 --- /dev/null +++ b/users/glittershark/xanthous/Setup.hs @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff --git a/users/glittershark/xanthous/bench/Bench.hs b/users/glittershark/xanthous/bench/Bench.hs new file mode 100644 index 000000000000..5889618ee432 --- /dev/null +++ b/users/glittershark/xanthous/bench/Bench.hs @@ -0,0 +1,12 @@ +-------------------------------------------------------------------------------- +module Main where +-------------------------------------------------------------------------------- +import Bench.Prelude +-------------------------------------------------------------------------------- +import qualified Xanthous.RandomBench +import qualified Xanthous.Generators.UtilBench + +main :: IO () +main = defaultMain + [ Xanthous.Generators.UtilBench.benchmark + ] diff --git a/users/glittershark/xanthous/bench/Bench/Prelude.hs b/users/glittershark/xanthous/bench/Bench/Prelude.hs new file mode 100644 index 000000000000..c553abd6d5d0 --- /dev/null +++ b/users/glittershark/xanthous/bench/Bench/Prelude.hs @@ -0,0 +1,9 @@ +-------------------------------------------------------------------------------- +module Bench.Prelude + ( module Xanthous.Prelude + , module Criterion.Main + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Criterion.Main +-------------------------------------------------------------------------------- diff --git a/users/glittershark/xanthous/bench/Xanthous/Generators/UtilBench.hs b/users/glittershark/xanthous/bench/Xanthous/Generators/UtilBench.hs new file mode 100644 index 000000000000..56310e691c33 --- /dev/null +++ b/users/glittershark/xanthous/bench/Xanthous/Generators/UtilBench.hs @@ -0,0 +1,37 @@ +-------------------------------------------------------------------------------- +module Xanthous.Generators.UtilBench (benchmark, main) where +-------------------------------------------------------------------------------- +import Bench.Prelude +-------------------------------------------------------------------------------- +import Data.Array.IArray +import Data.Array.Unboxed +import System.Random (getStdGen) +-------------------------------------------------------------------------------- +import Xanthous.Generators.Util +import qualified Xanthous.Generators.CaveAutomata as CaveAutomata +import Xanthous.Data (Dimensions'(..)) +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain [benchmark] + +-------------------------------------------------------------------------------- + +benchmark :: Benchmark +benchmark = bgroup "Generators.Util" + [ bgroup "floodFill" + [ env (NFWrapper <$> cells) $ \(NFWrapper ir) -> + bench "checkerboard" $ nf (floodFill ir) (1,0) + ] + ] + where + cells :: IO Cells + cells = CaveAutomata.generate + CaveAutomata.defaultParams + (Dimensions 50 50) + <$> getStdGen + +newtype NFWrapper a = NFWrapper a + +instance NFData (NFWrapper a) where + rnf (NFWrapper x) = x `seq` () diff --git a/users/glittershark/xanthous/bench/Xanthous/RandomBench.hs b/users/glittershark/xanthous/bench/Xanthous/RandomBench.hs new file mode 100644 index 000000000000..fae4af92a7a5 --- /dev/null +++ b/users/glittershark/xanthous/bench/Xanthous/RandomBench.hs @@ -0,0 +1,32 @@ +-------------------------------------------------------------------------------- +module Xanthous.RandomBench (benchmark, main) where +-------------------------------------------------------------------------------- +import Bench.Prelude +-------------------------------------------------------------------------------- +import Control.Parallel.Strategies +import Control.Monad.Random +-------------------------------------------------------------------------------- +import Xanthous.Random +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain [benchmark] + +-------------------------------------------------------------------------------- + +benchmark :: Benchmark +benchmark = bgroup "Random" + [ bgroup "chooseSubset" + [ bench "serially" $ + nf (evalRand $ chooseSubset (0.5 :: Double) [1 :: Int ..1000000]) + (mkStdGen 1234) + ] + , bgroup "choose weightedBy" + [ bench "serially" $ + nf (evalRand + . choose + . weightedBy (\n -> product [n, pred n .. 1]) + $ [1 :: Int ..1000000]) + (mkStdGen 1234) + ] + ] diff --git a/users/glittershark/xanthous/build/generic-arbitrary-export-garbitrary.patch b/users/glittershark/xanthous/build/generic-arbitrary-export-garbitrary.patch new file mode 100644 index 000000000000..f0c936bfca18 --- /dev/null +++ b/users/glittershark/xanthous/build/generic-arbitrary-export-garbitrary.patch @@ -0,0 +1,12 @@ +diff --git a/src/Test/QuickCheck/Arbitrary/Generic.hs b/src/Test/QuickCheck/Arbitrary/Generic.hs +index fed6ab3..91f59f1 100644 +--- a/src/Test/QuickCheck/Arbitrary/Generic.hs ++++ b/src/Test/QuickCheck/Arbitrary/Generic.hs +@@ -23,6 +23,7 @@ The generated 'arbitrary' method is equivalent to + + module Test.QuickCheck.Arbitrary.Generic + ( Arbitrary(..) ++ , GArbitrary + , genericArbitrary + , genericShrink + ) where diff --git a/users/glittershark/xanthous/build/hgeometry-fix-haddock.patch b/users/glittershark/xanthous/build/hgeometry-fix-haddock.patch new file mode 100644 index 000000000000..748c65b3e0db --- /dev/null +++ b/users/glittershark/xanthous/build/hgeometry-fix-haddock.patch @@ -0,0 +1,13 @@ +diff --git a/src/Data/Geometry/PlanarSubdivision/Merge.hs b/src/Data/Geometry/PlanarSubdivision/Merge.hs +index 1136114..3f4e7bb 100644 +--- a/src/Data/Geometry/PlanarSubdivision/Merge.hs ++++ b/src/Data/Geometry/PlanarSubdivision/Merge.hs +@@ -153,7 +153,7 @@ mergeWith' mergeFaces p1 p2 = PlanarSubdivision cs vd rd rf + -- we have to shift the number of the *Arcs*. Since every dart + -- consists of two arcs, we have to shift by numDarts / 2 + -- Furthermore, we take numFaces - 1 since we want the first +- -- *internal* face of p2 (the one with FaceId 1) to correspond with the first free ++ -- /internal/ face of p2 (the one with FaceId 1) to correspond with the first free + -- position (at index numFaces) + + cs = p1^.components <> p2'^.components diff --git a/users/glittershark/xanthous/build/update-comonad-extras.patch b/users/glittershark/xanthous/build/update-comonad-extras.patch new file mode 100644 index 000000000000..cd1dbe24d361 --- /dev/null +++ b/users/glittershark/xanthous/build/update-comonad-extras.patch @@ -0,0 +1,92 @@ +diff --git a/comonad-extras.cabal b/comonad-extras.cabal +index fc3745a..77a2f0d 100644 +--- a/comonad-extras.cabal ++++ b/comonad-extras.cabal +@@ -1,7 +1,7 @@ + name: comonad-extras + category: Control, Comonads +-version: 4.0 ++version: 5.0 + x-revision: 1 + license: BSD3 + cabal-version: >= 1.6 + license-file: LICENSE +@@ -34,8 +34,8 @@ library + build-depends: + array >= 0.3 && < 0.6, +- base >= 4 && < 4.7, +- containers >= 0.4 && < 0.6, +- comonad >= 4 && < 5, ++ base >= 4 && < 5, ++ containers >= 0.6 && < 0.7, ++ comonad >= 5 && < 6, + distributive >= 0.3.2 && < 1, +- semigroupoids >= 4 && < 5, +- transformers >= 0.2 && < 0.4 ++ semigroupoids >= 5 && < 6, ++ transformers >= 0.5 && < 0.6 + + exposed-modules: + Control.Comonad.Store.Zipper +diff --git a/src/Control/Comonad/Store/Pointer.hs b/src/Control/Comonad/Store/Pointer.hs +index 5044a1e..8d4c62d 100644 +--- a/src/Control/Comonad/Store/Pointer.hs ++++ b/src/Control/Comonad/Store/Pointer.hs +@@ -41,7 +41,6 @@ module Control.Comonad.Store.Pointer + , module Control.Comonad.Store.Class + ) where + +-import Control.Applicative + import Control.Comonad + import Control.Comonad.Hoist.Class + import Control.Comonad.Trans.Class +@@ -51,27 +50,8 @@ import Control.Comonad.Env.Class + import Data.Functor.Identity + import Data.Functor.Extend + import Data.Array +- + #ifdef __GLASGOW_HASKELL__ + import Data.Typeable +-instance (Typeable i, Typeable1 w) => Typeable1 (PointerT i w) where +- typeOf1 diwa = mkTyConApp storeTTyCon [typeOf (i diwa), typeOf1 (w diwa)] +- where +- i :: PointerT i w a -> i +- i = undefined +- w :: PointerT i w a -> w a +- w = undefined +- +-instance (Typeable i, Typeable1 w, Typeable a) => Typeable (PointerT i w a) where +- typeOf = typeOfDefault +- +-storeTTyCon :: TyCon +-#if __GLASGOW_HASKELL__ < 704 +-storeTTyCon = mkTyCon "Control.Comonad.Trans.Store.Pointer.PointerT" +-#else +-storeTTyCon = mkTyCon3 "comonad-extras" "Control.Comonad.Trans.Store.Pointer" "PointerT" +-#endif +-{-# NOINLINE storeTTyCon #-} + #endif + + type Pointer i = PointerT i Identity +@@ -83,6 +63,9 @@ runPointer :: Pointer i a -> (Array i a, i) + runPointer (PointerT (Identity f) i) = (f, i) + + data PointerT i w a = PointerT (w (Array i a)) i ++#ifdef __GLASGOW_HASKELL__ ++ deriving Typeable ++#endif + + runPointerT :: PointerT i w a -> (w (Array i a), i) + runPointerT (PointerT g i) = (g, i) +diff --git a/src/Control/Comonad/Store/Zipper.hs b/src/Control/Comonad/Store/Zipper.hs +index 3b70c86..decc378 100644 +--- a/src/Control/Comonad/Store/Zipper.hs ++++ b/src/Control/Comonad/Store/Zipper.hs +@@ -15,7 +15,6 @@ + module Control.Comonad.Store.Zipper + ( Zipper, zipper, zipper1, unzipper, size) where + +-import Control.Applicative + import Control.Comonad (Comonad(..)) + import Data.Functor.Extend + import Data.Foldable diff --git a/users/glittershark/xanthous/default.nix b/users/glittershark/xanthous/default.nix new file mode 100644 index 000000000000..8dfd0bce4bd0 --- /dev/null +++ b/users/glittershark/xanthous/default.nix @@ -0,0 +1,10 @@ +{ pkgs ? (import ../../../. {}).third_party +, lib ? pkgs.lib +, ... +}: +(pkgs.haskell.lib.failOnAllWarnings ( + pkgs.haskellPackages.callPackage (import ./pkg.nix { inherit pkgs; }) {} +)) // { + # TODO(grfn): Get this passing (see https://buildkite.com/tvl/depot/builds/3055) + meta.ci = false; +} diff --git a/users/glittershark/xanthous/hie.yaml b/users/glittershark/xanthous/hie.yaml new file mode 100644 index 000000000000..49f8ec1fbb3d --- /dev/null +++ b/users/glittershark/xanthous/hie.yaml @@ -0,0 +1,10 @@ +cradle: + cabal: + - path: './src' + component: 'lib:xanthous' + - path: './test' + component: 'test:test' + - path: './src' + component: 'exe:xanthous' + - path: './bench' + component: 'bench:benchmark' diff --git a/users/glittershark/xanthous/nixpkgs.nix b/users/glittershark/xanthous/nixpkgs.nix new file mode 100644 index 000000000000..7d7c16440545 --- /dev/null +++ b/users/glittershark/xanthous/nixpkgs.nix @@ -0,0 +1,3 @@ +args: +let pkgs = (import ../../../. args).third_party; +in pkgs // { inherit pkgs; } diff --git a/users/glittershark/xanthous/package.yaml b/users/glittershark/xanthous/package.yaml new file mode 100644 index 000000000000..e954374f88d9 --- /dev/null +++ b/users/glittershark/xanthous/package.yaml @@ -0,0 +1,152 @@ +name: xanthous +version: 0.1.0.0 +github: "glittershark/xanthous" +license: GPL-3 +author: "Griffin Smith" +maintainer: "root@gws.fyi" +copyright: "2019 Griffin Smith" + +extra-source-files: +- README.org + +synopsis: A WIP TUI RPG +category: Game + +description: Please see the README on GitHub at <https://github.com/glittershark/xanthous> + +dependencies: +- base + +- aeson +- array +- async +- QuickCheck +- quickcheck-text +- quickcheck-instances +- brick +- bifunctors +- checkers +- classy-prelude +- comonad +- comonad-extras +- constraints +- containers +- criterion +- data-default +- deepseq +- directory +- fgl +- fgl-arbitrary +- file-embed +- filepath +- generic-arbitrary +- generic-monoid +- generic-lens +- groups +- hgeometry +- hgeometry-combinatorial +- JuicyPixels +- lens +- lifted-async +- linear +- megaparsec +- mmorph +- monad-control +- MonadRandom +- mtl +- optparse-applicative +- parallel +- parser-combinators +- pointed +- random +- random-fu +- random-extras +- random-source +- raw-strings-qq +- reflection +- Rasterific +- streams +- stache +- semigroupoids +- tomland +- transformers +- text +- text-zipper +- vector +- vty +- witherable +- yaml +- zlib + +default-extensions: +- BlockArguments +- ConstraintKinds +- DataKinds +- DeriveAnyClass +- DeriveGeneric +- DerivingStrategies +- DerivingVia +- FlexibleContexts +- FlexibleInstances +- FunctionalDependencies +- GADTSyntax +- GeneralizedNewtypeDeriving +- KindSignatures +- LambdaCase +- MultiWayIf +- NoImplicitPrelude +- NoStarIsType +- OverloadedStrings +- PolyKinds +- RankNTypes +- ScopedTypeVariables +- TupleSections +- TypeApplications +- TypeFamilies +- TypeOperators +- ViewPatterns + +ghc-options: +- -Wall + +library: + source-dirs: src + +executable: + source-dirs: src + main: Main.hs + dependencies: + - xanthous + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + - -O2 + +tests: + test: + main: Spec.hs + source-dirs: test + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + - -O0 + dependencies: + - xanthous + - tasty + - tasty-hunit + - tasty-quickcheck + - lens-properties + +benchmarks: + benchmark: + main: Bench.hs + source-dirs: bench + ghc-options: + - -threaded + - -rtsopts + - -with-rtsopts=-N + dependencies: + - xanthous + - criterion diff --git a/users/glittershark/xanthous/pkg.nix b/users/glittershark/xanthous/pkg.nix new file mode 100644 index 000000000000..fc303a3eda06 --- /dev/null +++ b/users/glittershark/xanthous/pkg.nix @@ -0,0 +1,7 @@ +{ pkgs ? (import ../../../. {}).third_party }: + +import (pkgs.haskellPackages.haskellSrc2nix { + name = "xanthous"; + src = pkgs.gitignoreSource ./.; + extraCabal2nixOptions = "--hpack"; +}) diff --git a/users/glittershark/xanthous/shell.nix b/users/glittershark/xanthous/shell.nix new file mode 100644 index 000000000000..9881e6bf0452 --- /dev/null +++ b/users/glittershark/xanthous/shell.nix @@ -0,0 +1,16 @@ +{ pkgs ? (import ../../../. {}).third_party, ... }: + +(pkgs.haskellPackages.extend (pkgs.haskell.lib.packageSourceOverrides { + xanthous = pkgs.gitignoreSource ./.; +})).shellFor { + packages = p: [p.xanthous]; + withHoogle = true; + doBenchmark = true; + buildInputs = with pkgs.haskellPackages; [ + cabal-install + ghc-prof-flamegraph + hp2pretty + hlint + pkgs.haskell-language-server.ghc883 + ]; +} diff --git a/users/glittershark/xanthous/src/Data/Aeson/Generic/DerivingVia.hs b/users/glittershark/xanthous/src/Data/Aeson/Generic/DerivingVia.hs new file mode 100644 index 000000000000..34f2a9403892 --- /dev/null +++ b/users/glittershark/xanthous/src/Data/Aeson/Generic/DerivingVia.hs @@ -0,0 +1,167 @@ +{-# LANGUAGE ConstraintKinds, DataKinds, DeriveGeneric, DerivingVia #-} +{-# LANGUAGE ExplicitNamespaces, FlexibleContexts, FlexibleInstances #-} +{-# LANGUAGE GADTs, GeneralizedNewtypeDeriving, MultiParamTypeClasses #-} +{-# LANGUAGE PolyKinds, ScopedTypeVariables, StandaloneDeriving #-} +{-# LANGUAGE TypeApplications, TypeFamilies, TypeInType, TypeOperators #-} +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wall #-} +-- | https://gist.github.com/konn/27c00f784dd883ec2b90eab8bc84a81d +module Data.Aeson.Generic.DerivingVia + ( StrFun(..), Setting(..), SumEncoding'(..), DefaultOptions, WithOptions(..) + , -- Utility type synonyms to save ticks (') before promoted data constructors + type Drop, type CamelTo2, type UserDefined + , type TaggedObj, type UntaggedVal, type ObjWithSingleField, type TwoElemArr + , type FieldLabelModifier + , type ConstructorTagModifier + , type AllNullaryToStringTag + , type OmitNothingFields + , type SumEnc + , type UnwrapUnaryRecords + , type TagSingleConstructors + ) + where + +import Prelude +import Data.Aeson (FromJSON (..), GFromJSON, GToJSON, + ToJSON (..)) +import Data.Aeson (Options (..), Zero, camelTo2, + genericParseJSON) +import Data.Aeson (defaultOptions, genericToJSON) +import qualified Data.Aeson as Aeson +import Data.Kind (Constraint, Type) +import Data.Proxy (Proxy (..)) +import Data.Reflection (Reifies (..)) +import GHC.Generics (Generic, Rep) +import GHC.TypeLits (KnownNat, KnownSymbol, natVal, symbolVal) +import GHC.TypeLits (Nat, Symbol) + +newtype WithOptions options a = WithOptions { runWithOptions :: a } + +data StrFun = Drop Nat + | CamelTo2 Symbol + | forall p. UserDefined p + +type Drop = 'Drop +type CamelTo2 = 'CamelTo2 +type UserDefined = 'UserDefined + +type family Demoted a where + Demoted Symbol = String + Demoted StrFun = String -> String + Demoted [a] = [Demoted a] + Demoted Setting = Options -> Options + Demoted SumEncoding' = Aeson.SumEncoding + Demoted a = a + +data SumEncoding' = TaggedObj {tagFieldName' :: Symbol, contentsFieldName :: Symbol } + | UntaggedVal + | ObjWithSingleField + | TwoElemArr + +type TaggedObj = 'TaggedObj +type UntaggedVal = 'UntaggedVal +type ObjWithSingleField = 'ObjWithSingleField +type TwoElemArr = 'TwoElemArr + +data Setting = FieldLabelModifier [StrFun] + | ConstructorTagModifier [StrFun] + | AllNullaryToStringTag Bool + | OmitNothingFields Bool + | SumEnc SumEncoding' + | UnwrapUnaryRecords Bool + | TagSingleConstructors Bool + +type FieldLabelModifier = 'FieldLabelModifier +type ConstructorTagModifier = 'ConstructorTagModifier +-- | If 'True' the constructors of a datatype, with all nullary constructors, +-- will be encoded to just a string with the constructor tag. If 'False' the +-- encoding will always follow the 'SumEncoding'. +type AllNullaryToStringTag = 'AllNullaryToStringTag +type OmitNothingFields = 'OmitNothingFields +type SumEnc = 'SumEnc +-- | Hide the field name when a record constructor has only one field, like a +-- newtype. +type UnwrapUnaryRecords = 'UnwrapUnaryRecords +-- | Encode types with a single constructor as sums, so that +-- 'AllNullaryToStringTag' and 'SumEncoding' apply. +type TagSingleConstructors = 'TagSingleConstructors + +class Demotable (a :: k) where + demote :: proxy a -> Demoted k + +type family All (p :: Type -> Constraint) (xs :: [k]) :: Constraint where + All p '[] = () + All p (x ': xs) = (p x, All p xs) + +instance Reifies f (String -> String) => Demotable ('UserDefined f) where + demote _ = reflect @f Proxy + +instance KnownSymbol sym => Demotable sym where + demote = symbolVal + +instance (KnownSymbol s, KnownSymbol t) => Demotable ('TaggedObj s t) where + demote _ = Aeson.TaggedObject (symbolVal @s Proxy) (symbolVal @t Proxy) + +instance Demotable 'UntaggedVal where + demote _ = Aeson.UntaggedValue + +instance Demotable 'ObjWithSingleField where + demote _ = Aeson.ObjectWithSingleField + +instance Demotable 'TwoElemArr where + demote _ = Aeson.TwoElemArray + +instance Demotable xs => Demotable ('FieldLabelModifier xs) where + demote _ o = o { fieldLabelModifier = foldr (.) id (demote (Proxy @xs)) } + +instance Demotable xs => Demotable ('ConstructorTagModifier xs) where + demote _ o = o { constructorTagModifier = foldr (.) id (demote (Proxy @xs)) } + +instance Demotable b => Demotable ('AllNullaryToStringTag b) where + demote _ o = o { allNullaryToStringTag = demote (Proxy @b) } + +instance Demotable b => Demotable ('OmitNothingFields b) where + demote _ o = o { omitNothingFields = demote (Proxy @b) } + +instance Demotable b => Demotable ('UnwrapUnaryRecords b) where + demote _ o = o { unwrapUnaryRecords = demote (Proxy @b) } + +instance Demotable b => Demotable ('TagSingleConstructors b) where + demote _ o = o { tagSingleConstructors = demote (Proxy @b) } + +instance Demotable b => Demotable ('SumEnc b) where + demote _ o = o { sumEncoding = demote (Proxy @b) } + +instance Demotable 'True where + demote _ = True + +instance Demotable 'False where + demote _ = False + +instance KnownNat n => Demotable ('Drop n) where + demote _ = drop (fromIntegral $ natVal (Proxy :: Proxy n)) + +instance KnownSymbol sym => Demotable ('CamelTo2 sym) where + demote _ = camelTo2 $ head $ symbolVal @sym Proxy + +instance {-# OVERLAPPING #-} Demotable ('[] :: [k]) where + demote _ = [] + +instance (Demotable (x :: k), Demotable (xs :: [k])) => Demotable (x ': xs) where + demote _ = demote (Proxy @x) : demote (Proxy @xs) + +type DefaultOptions = ('[] :: [Setting]) + +reflectOptions :: forall xs proxy. Demotable (xs :: [Setting]) => proxy xs -> Options +reflectOptions pxy = foldr (.) id (demote pxy) defaultOptions + +instance (Demotable (options :: [Setting])) => Reifies options Options where + reflect = reflectOptions + +instance (Generic a, GToJSON Zero (Rep a), Reifies (options :: k) Options) + => ToJSON (WithOptions options a) where + toJSON = genericToJSON (reflect (Proxy @options)) . runWithOptions + +instance (Generic a, GFromJSON Zero (Rep a), Reifies (options :: k) Options) + => FromJSON (WithOptions options a) where + parseJSON = fmap WithOptions . genericParseJSON (reflect (Proxy @options)) diff --git a/users/glittershark/xanthous/src/Main.hs b/users/glittershark/xanthous/src/Main.hs new file mode 100644 index 000000000000..dcd31afff9c7 --- /dev/null +++ b/users/glittershark/xanthous/src/Main.hs @@ -0,0 +1,159 @@ +module Main ( main ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (finally) +import Brick +import qualified Brick.BChan +import qualified Graphics.Vty as Vty +import qualified Options.Applicative as Opt +import System.Random +import Control.Monad.Random (getRandom) +import Control.Exception (finally) +import System.Exit (die) +-------------------------------------------------------------------------------- +import qualified Xanthous.Game as Game +import Xanthous.Game.Env (GameEnv(..)) +import Xanthous.App +import Xanthous.Generators + ( GeneratorInput + , parseGeneratorInput + , generateFromInput + , showCells + ) +import qualified Xanthous.Entities.Character as Character +import Xanthous.Generators.Util (regions) +import Xanthous.Generators.LevelContents +import Xanthous.Data (Dimensions, Dimensions'(Dimensions)) +import Data.Array.IArray ( amap ) +-------------------------------------------------------------------------------- + +data RunParams = RunParams + { seed :: Maybe Int + , characterName :: Maybe Text + } + deriving stock (Show, Eq) + +parseRunParams :: Opt.Parser RunParams +parseRunParams = RunParams + <$> optional (Opt.option Opt.auto + ( Opt.long "seed" + <> Opt.help "Random seed for the game." + )) + <*> optional (Opt.strOption + ( Opt.short 'n' + <> Opt.long "name" + <> Opt.help + ( "Name for the character. If not set on the command line, " + <> "will be prompted for at runtime" + ) + )) + +data Command + = Run RunParams + | Load FilePath + | Generate GeneratorInput Dimensions (Maybe Int) + +parseDimensions :: Opt.Parser Dimensions +parseDimensions = Dimensions + <$> Opt.option Opt.auto + ( Opt.short 'w' + <> Opt.long "width" + <> Opt.metavar "TILES" + ) + <*> Opt.option Opt.auto + ( Opt.short 'h' + <> Opt.long "height" + <> Opt.metavar "TILES" + ) + + +parseCommand :: Opt.Parser Command +parseCommand = (<|> Run <$> parseRunParams) $ Opt.subparser + $ Opt.command "run" + (Opt.info + (Run <$> parseRunParams) + (Opt.progDesc "Run the game")) + <> Opt.command "load" + (Opt.info + (Load <$> Opt.argument Opt.str (Opt.metavar "FILE")) + (Opt.progDesc "Load a saved game")) + <> Opt.command "generate" + (Opt.info + (Generate + <$> parseGeneratorInput + <*> parseDimensions + <*> optional + (Opt.option Opt.auto (Opt.long "seed")) + <**> Opt.helper + ) + (Opt.progDesc "Generate a sample level")) + +optParser :: Opt.ParserInfo Command +optParser = Opt.info + (parseCommand <**> Opt.helper) + (Opt.header "Xanthous: a WIP TUI RPG") + +thanks :: IO () +thanks = putStr "\n\n" >> putStrLn "Thanks for playing Xanthous!" + +newGame :: RunParams -> IO () +newGame rparams = do + gameSeed <- maybe getRandom pure $ seed rparams + when (isNothing $ seed rparams) + . putStrLn + $ "Seed: " <> tshow gameSeed + let initialState = Game.initialStateFromSeed gameSeed &~ do + for_ (characterName rparams) $ \cn -> + Game.character . Character.characterName ?= cn + runGame NewGame initialState `finally` do + thanks + when (isNothing $ seed rparams) + . putStrLn + $ "Seed: " <> tshow gameSeed + putStr "\n\n" + +loadGame :: FilePath -> IO () +loadGame saveFile = do + gameState <- maybe (die "Invalid save file!") pure + =<< Game.loadGame . fromStrict <$> readFile @IO saveFile + gameState `deepseq` runGame LoadGame gameState + +runGame :: RunType -> Game.GameState -> IO () +runGame rt gameState = do + eventChan <- Brick.BChan.newBChan 10 + let gameEnv = GameEnv eventChan + app <- makeApp gameEnv rt + let buildVty = Vty.mkVty Vty.defaultConfig + initialVty <- buildVty + _game' <- customMain + initialVty + buildVty + (Just eventChan) + app + gameState + pure () + +runGenerate :: GeneratorInput -> Dimensions -> Maybe Int -> IO () +runGenerate input dims mSeed = do + putStrLn "Generating..." + genSeed <- maybe getRandom pure mSeed + let randGen = mkStdGen genSeed + res = generateFromInput input dims randGen + rs = regions $ amap not res + when (isNothing mSeed) + . putStrLn + $ "Seed: " <> tshow genSeed + putStr "num regions: " + print $ length rs + putStr "region lengths: " + print $ length <$> rs + putStr "character position: " + print =<< chooseCharacterPosition res + putStrLn $ showCells res + +runCommand :: Command -> IO () +runCommand (Run runParams) = newGame runParams +runCommand (Load saveFile) = loadGame saveFile +runCommand (Generate input dims mSeed) = runGenerate input dims mSeed + +main :: IO () +main = runCommand =<< Opt.execParser optParser diff --git a/users/glittershark/xanthous/src/Xanthous/AI/Gormlak.hs b/users/glittershark/xanthous/src/Xanthous/AI/Gormlak.hs new file mode 100644 index 000000000000..a6cc789d6894 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/AI/Gormlak.hs @@ -0,0 +1,124 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# LANGUAGE UndecidableInstances #-} +-------------------------------------------------------------------------------- +module Xanthous.AI.Gormlak + ( HasVisionRadius(..) + , GormlakBrain(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (lines) +-------------------------------------------------------------------------------- +import Control.Monad.State +import Control.Monad.Random +import Data.Aeson (object) +import qualified Data.Aeson as A +import Data.Generics.Product.Fields +-------------------------------------------------------------------------------- +import Xanthous.Data + ( Positioned(..), positioned, position + , diffPositions, stepTowards, isUnit + , Ticks, (|*|), invertedRate + ) +import Xanthous.Data.EntityMap +import Xanthous.Entities.Creature.Hippocampus +import Xanthous.Entities.Character (Character) +import qualified Xanthous.Entities.Character as Character +import qualified Xanthous.Entities.RawTypes as Raw +import Xanthous.Entities.RawTypes (CreatureType) +import Xanthous.Game.State +import Xanthous.Game.Lenses + ( entitiesCollision, collisionAt + , character, characterPosition + ) +import Xanthous.Data.EntityMap.Graphics (linesOfSight, canSee) +import Xanthous.Random +import Xanthous.Monad (say) +-------------------------------------------------------------------------------- + +-- TODO move the following two classes to a more central location + +class HasVisionRadius a where visionRadius :: a -> Word + +type IsCreature entity = + ( HasVisionRadius entity + , HasField "_hippocampus" entity entity Hippocampus Hippocampus + , HasField "_creatureType" entity entity CreatureType CreatureType + , A.ToJSON entity + ) + +-------------------------------------------------------------------------------- + +stepGormlak + :: forall entity m. + ( MonadState GameState m, MonadRandom m + , IsCreature entity + ) + => Ticks + -> Positioned entity + -> m (Positioned entity) +stepGormlak ticks pe@(Positioned pos creature) = do + dest <- maybe (selectDestination pos creature) pure + $ creature ^. field @"_hippocampus" . destination + let progress' = + dest ^. destinationProgress + + creature ^. field @"_creatureType" . Raw.speed . invertedRate |*| ticks + if progress' < 1 + then pure + $ pe + & positioned . field @"_hippocampus" . destination + ?~ (dest & destinationProgress .~ progress') + else do + let newPos = dest ^. destinationPosition + remainingSpeed = progress' - 1 + newDest <- selectDestination newPos creature + <&> destinationProgress +~ remainingSpeed + let pe' = pe & positioned . field @"_hippocampus" . destination ?~ newDest + collisionAt newPos >>= \case + Nothing -> pure $ pe' & position .~ newPos + Just Stop -> pure pe' + Just Combat -> do + ents <- use $ entities . atPosition newPos + when (any (entityIs @Character) ents) attackCharacter + pure pe' + where + selectDestination pos' creature' = destinationFromPos <$> do + canSeeCharacter <- uses entities $ canSee (entityIs @Character) pos' vision + if canSeeCharacter + then do + charPos <- use characterPosition + if isUnit (pos' `diffPositions` charPos) + then attackCharacter $> pos' + else pure $ pos' `stepTowards` charPos + else do + lines <- map (takeWhile (isNothing . entitiesCollision . map snd . snd) + -- the first item on these lines is always the creature itself + . fromMaybe mempty . tailMay) + . linesOfSight pos' (visionRadius creature') + <$> use entities + line <- choose $ weightedBy length lines + pure $ fromMaybe pos' $ fmap fst . headMay =<< line + + vision = visionRadius creature + attackCharacter = do + say ["combat", "creatureAttack"] $ object [ "creature" A..= creature ] + character %= Character.damage 1 + +newtype GormlakBrain entity = GormlakBrain { _unGormlakBrain :: entity } + +instance (IsCreature entity) => Brain (GormlakBrain entity) where + step ticks + = fmap (fmap GormlakBrain) + . stepGormlak ticks + . fmap _unGormlakBrain + entityCanMove = const True + +-------------------------------------------------------------------------------- + +-- instance Brain Creature where +-- step = brainVia GormlakBrain +-- entityCanMove = const True + +-- instance Entity Creature where +-- blocksVision _ = False +-- description = view $ Creature.creatureType . Raw.description +-- entityChar = view $ Creature.creatureType . char diff --git a/users/glittershark/xanthous/src/Xanthous/App.hs b/users/glittershark/xanthous/src/Xanthous/App.hs new file mode 100644 index 000000000000..9091961b725c --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/App.hs @@ -0,0 +1,469 @@ +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE RecordWildCards #-} +-------------------------------------------------------------------------------- +module Xanthous.App + ( makeApp + , RunType(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Brick hiding (App, halt, continue, raw) +import qualified Brick +import Graphics.Vty.Attributes (defAttr) +import Graphics.Vty.Input.Events (Event(EvKey)) +import Control.Monad.State (get, gets) +import Control.Monad.State.Class (modify) +import Data.Aeson (object, ToJSON) +import qualified Data.Aeson as A +import qualified Data.Vector as V +import System.Exit +import System.Directory (doesFileExist) +import Data.List.NonEmpty (NonEmpty(..)) +-------------------------------------------------------------------------------- +import Xanthous.App.Common +import Xanthous.App.Time +import Xanthous.App.Prompt +import Xanthous.App.Autocommands +import Xanthous.Command +import Xanthous.Data + ( move + , Dimensions'(Dimensions) + , positioned + , position + , Position + , (|*|) + ) +import Xanthous.Data.App (ResourceName, Panel(..), AppEvent(..)) +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Data.Levels (prevLevel, nextLevel) +import qualified Xanthous.Data.Levels as Levels +import Xanthous.Data.Entities (blocksObject) +import Xanthous.Game +import Xanthous.Game.State +import Xanthous.Game.Env +import Xanthous.Game.Draw (drawGame) +import Xanthous.Game.Prompt +import qualified Xanthous.Messages as Messages +import Xanthous.Random +import Xanthous.Util (removeVectorIndex) +import Xanthous.Util.Inflection (toSentence) +-------------------------------------------------------------------------------- +import qualified Xanthous.Entities.Character as Character +import Xanthous.Entities.Character hiding (pickUpItem) +import Xanthous.Entities.Item (Item) +import qualified Xanthous.Entities.Item as Item +import Xanthous.Entities.Creature (Creature) +import qualified Xanthous.Entities.Creature as Creature +import Xanthous.Entities.Environment + (Door, open, closed, locked, GroundMessage(..), Staircase(..)) +import Xanthous.Entities.RawTypes + ( edible, eatMessage, hitpointsHealed + , attackMessage + ) +import Xanthous.Generators +import qualified Xanthous.Generators.CaveAutomata as CaveAutomata +import qualified Xanthous.Generators.Dungeon as Dungeon +-------------------------------------------------------------------------------- + +type App = Brick.App GameState AppEvent ResourceName + +data RunType = NewGame | LoadGame + deriving stock (Eq) + +makeApp :: GameEnv -> RunType -> IO App +makeApp env rt = pure $ Brick.App + { appDraw = drawGame + , appChooseCursor = const headMay + , appHandleEvent = \game event -> runAppM (handleEvent event) env game + , appStartEvent = case rt of + NewGame -> runAppM (startEvent >> get) env + LoadGame -> pure + , appAttrMap = const $ attrMap defAttr [] + } + +runAppM :: AppM a -> GameEnv -> GameState -> EventM ResourceName a +runAppM appm ge = fmap fst . runAppT appm ge + +startEvent :: AppM () +startEvent = do + initLevel + modify updateCharacterVision + use (character . characterName) >>= \case + Nothing -> prompt_ @'StringPrompt ["character", "namePrompt"] Uncancellable + $ \(StringResult s) -> do + character . characterName ?= s + say ["welcome"] =<< use character + Just n -> say ["welcome"] $ object [ "characterName" A..= n ] + +initLevel :: AppM () +initLevel = do + level <- genLevel 0 + entities <>= levelToEntityMap level + characterPosition .= level ^. levelCharacterPosition + +-------------------------------------------------------------------------------- + +handleEvent :: BrickEvent ResourceName AppEvent -> AppM (Next GameState) +handleEvent ev = use promptState >>= \case + NoPrompt -> handleNoPromptEvent ev + WaitingPrompt msg pr -> handlePromptEvent msg pr ev + + +handleNoPromptEvent :: BrickEvent ResourceName AppEvent -> AppM (Next GameState) +handleNoPromptEvent (VtyEvent (EvKey k mods)) + | Just command <- commandFromKey k mods + = do messageHistory %= nextTurn + cancelAutocommand + handleCommand command +handleNoPromptEvent (AppEvent AutoContinue) = do + preuse (autocommand . _ActiveAutocommand . _1) >>= traverse_ autoStep + continue +handleNoPromptEvent _ = continue + +handleCommand :: Command -> AppM (Next GameState) +handleCommand Quit = confirm_ ["quit", "confirm"] (liftIO exitSuccess) >> continue +handleCommand (Move dir) = do + newPos <- uses characterPosition $ move dir + collisionAt newPos >>= \case + Nothing -> do + characterPosition .= newPos + stepGameBy =<< uses (character . speed) (|*| 1) + describeEntitiesAt newPos + Just Combat -> attackAt newPos + Just Stop -> pure () + continue + +handleCommand PickUp = do + pos <- use characterPosition + uses entities (entitiesAtPositionWithType @Item pos) >>= \case + [] -> say_ ["pickUp", "nothingToPickUp"] + [item] -> pickUpItem item + items' -> + menu_ ["pickUp", "menu"] Cancellable (entityMenu_ items') + $ \(MenuResult item) -> pickUpItem item + continue + where + pickUpItem (itemID, item) = do + character %= Character.pickUpItem item + entities . at itemID .= Nothing + say ["pickUp", "pickUp"] $ object [ "item" A..= item ] + stepGameBy 100 -- TODO + +handleCommand Drop = do + selectItemFromInventory_ ["drop", "menu"] Cancellable id + (say_ ["drop", "nothing"]) + $ \(MenuResult item) -> do + entitiesAtCharacter %= (SomeEntity item <|) + say ["drop", "dropped"] $ object [ "item" A..= item ] + continue + +handleCommand PreviousMessage = do + messageHistory %= previousMessage + continue + +handleCommand Open = do + prompt_ @'DirectionPrompt ["open", "prompt"] Cancellable + $ \(DirectionResult dir) -> do + pos <- move dir <$> use characterPosition + doors <- uses entities $ entitiesAtPositionWithType @Door pos + if | null doors -> say_ ["open", "nothingToOpen"] + | any (view $ _2 . locked) doors -> say_ ["open", "locked"] + | all (view $ _2 . open) doors -> say_ ["open", "alreadyOpen"] + | otherwise -> do + for_ doors $ \(eid, _) -> + entities . ix eid . positioned . _SomeEntity . open .= True + say_ ["open", "success"] + pure () + stepGame -- TODO + continue + +handleCommand Close = do + prompt_ @'DirectionPrompt ["close", "prompt"] Cancellable + $ \(DirectionResult dir) -> do + pos <- move dir <$> use characterPosition + (nonDoors, doors) <- uses entities + $ partitionEithers + . toList + . map ( (matching . aside $ _SomeEntity @Door) + . over _2 (view positioned) + ) + . EntityMap.atPositionWithIDs pos + if | null doors -> say_ ["close", "nothingToClose"] + | all (view $ _2 . closed) doors -> say_ ["close", "alreadyClosed"] + | any (view blocksObject . entityAttributes . snd) nonDoors -> + say ["close", "blocked"] + $ object [ "entityDescriptions" + A..= ( toSentence + . map description + . filter (view blocksObject . entityAttributes) + . map snd + ) nonDoors + , "blockOrBlocks" + A..= ( if length nonDoors == 1 + then "blocks" + else "block" + :: Text) + ] + | otherwise -> do + for_ doors $ \(eid, _) -> + entities . ix eid . positioned . _SomeEntity . closed .= True + for_ nonDoors $ \(eid, _) -> + entities . ix eid . position %= move dir + say_ ["close", "success"] + pure () + stepGame -- TODO + continue + +handleCommand Look = do + prompt_ @'PointOnMap ["look", "prompt"] Cancellable + $ \(PointOnMapResult pos) -> + gets (revealedEntitiesAtPosition pos) + >>= \case + Empty -> say_ ["look", "nothing"] + ents -> describeEntities ents + continue + +handleCommand Wait = stepGame >> continue + +handleCommand Eat = do + uses (character . inventory . backpack) + (V.mapMaybe (\item -> (item,) <$> item ^. Item.itemType . edible)) + >>= \case + Empty -> say_ ["eat", "noFood"] + food -> + let foodMenuItem idx (item, edibleItem) + = ( item ^. Item.itemType . char . char + , MenuOption (description item) (idx, item, edibleItem)) + -- TODO refactor to use entityMenu_ + menuItems = mkMenuItems $ imap foodMenuItem food + in menu_ ["eat", "menuPrompt"] Cancellable menuItems + $ \(MenuResult (idx, item, edibleItem)) -> do + character . inventory . backpack %= removeVectorIndex idx + let msg = fromMaybe (Messages.lookup ["eat", "eat"]) + $ edibleItem ^. eatMessage + character . characterHitpoints' += + edibleItem ^. hitpointsHealed . to fromIntegral + message msg $ object ["item" A..= item] + stepGame -- TODO + continue + +handleCommand Read = do + -- TODO allow reading things in the inventory (combo direction+menu prompt?) + prompt_ @'DirectionPrompt ["read", "prompt"] Cancellable + $ \(DirectionResult dir) -> do + pos <- uses characterPosition $ move dir + uses entities + (fmap snd . entitiesAtPositionWithType @GroundMessage pos) >>= \case + Empty -> say_ ["read", "nothing"] + GroundMessage msg :< Empty -> + say ["read", "result"] $ object ["message" A..= msg] + msgs -> + let readAndContinue Empty = pure () + readAndContinue (msg :< msgs') = + prompt @'Continue + ["read", "result"] + (object ["message" A..= msg]) + Cancellable + . const + $ readAndContinue msgs' + readAndContinue _ = error "this is total" + in readAndContinue msgs + continue + +handleCommand ShowInventory = showPanel InventoryPanel >> continue + +handleCommand Wield = do + selectItemFromInventory_ ["wield", "menu"] Cancellable asWieldedItem + (say_ ["wield", "nothing"]) + $ \(MenuResult item) -> do + prevItems <- character . inventory . wielded <<.= inRightHand item + character . inventory . backpack + <>= fromList (prevItems ^.. wieldedItems . wieldedItem) + say ["wield", "wielded"] item + continue + +handleCommand Save = do + -- TODO default save locations / config file? + prompt_ @'StringPrompt ["save", "location"] Cancellable + $ \(StringResult filename) -> do + exists <- liftIO . doesFileExist $ unpack filename + if exists + then confirm ["save", "overwrite"] (object ["filename" A..= filename]) + $ doSave filename + else doSave filename + continue + where + doSave filename = do + src <- gets saveGame + lift . liftIO $ do + writeFile (unpack filename) $ toStrict src + exitSuccess + +handleCommand GoUp = do + hasStairs <- uses entitiesAtCharacter $ elem (SomeEntity UpStaircase) + if hasStairs + then uses levels prevLevel >>= \case + Just levs' -> levels .= levs' + Nothing -> + -- TODO in nethack, this leaves the game. Maybe something similar here? + say_ ["cant", "goUp"] + else say_ ["cant", "goUp"] + + continue + +handleCommand GoDown = do + hasStairs <- uses entitiesAtCharacter $ elem (SomeEntity DownStaircase) + + if hasStairs + then do + levs <- use levels + let newLevelNum = Levels.pos levs + 1 + levs' <- nextLevel (levelToGameLevel <$> genLevel newLevelNum) levs + cEID <- use characterEntityID + pCharacter <- entities . at cEID <<.= Nothing + levels .= levs' + entities . at cEID .= pCharacter + characterPosition .= extract levs' ^. upStaircasePosition + else say_ ["cant", "goDown"] + + continue + +handleCommand (StartAutoMove dir) = do + runAutocommand $ AutoMove dir + continue + +-- + +handleCommand ToggleRevealAll = do + val <- debugState . allRevealed <%= not + say ["debug", "toggleRevealAll"] $ object [ "revealAll" A..= val ] + continue + +-------------------------------------------------------------------------------- +attackAt :: Position -> AppM () +attackAt pos = + uses entities (entitiesAtPositionWithType @Creature pos) >>= \case + Empty -> say_ ["combat", "nothingToAttack"] + (creature :< Empty) -> attackCreature creature + creatures -> + menu_ ["combat", "menu"] Cancellable (entityMenu_ creatures) + $ \(MenuResult creature) -> attackCreature creature + where + attackCreature (creatureID, creature) = do + charDamage <- uses character characterDamage + let creature' = Creature.damage charDamage creature + msgParams = object ["creature" A..= creature'] + if Creature.isDead creature' + then do + say ["combat", "killed"] msgParams + entities . at creatureID .= Nothing + else do + msg <- uses character getAttackMessage + message msg msgParams + entities . ix creatureID . positioned .= SomeEntity creature' + + whenM (uses character $ isNothing . weapon) + $ whenM (chance (0.08 :: Float)) $ do + say_ ["combat", "fistSelfDamage"] + character %= Character.damage 1 + + stepGame -- TODO + weapon chr = chr ^? inventory . wielded . wieldedItems . wieldableItem + getAttackMessage chr = + case weapon chr of + Just wi -> + fromMaybe (Messages.lookup ["combat", "hit", "generic"]) + $ wi ^. attackMessage + Nothing -> + Messages.lookup ["combat", "hit", "fists"] + +entityMenu_ + :: (Comonad w, Entity entity) + => [w entity] + -> Map Char (MenuOption (w entity)) +entityMenu_ = mkMenuItems @[_] . map entityMenuItem + where + entityMenuItem wentity + = let entity = extract wentity + in (entityMenuChar entity, MenuOption (description entity) wentity) + + +entityMenuChar :: Entity a => a -> Char +entityMenuChar entity + = let ec = entityChar entity ^. char + in if ec `elem` (['a'..'z'] ++ ['A'..'Z']) + then ec + else 'a' + +-- | Prompt with an item to select out of the inventory, remove it from the +-- inventory, and call callback with it +selectItemFromInventory + :: forall item params. + (ToJSON params) + => [Text] -- ^ Menu message + -> params -- ^ Menu message params + -> PromptCancellable -- ^ Is the menu cancellable? + -> Prism' Item item -- ^ Attach some extra information to the item, in a + -- recoverable fashion. Prism vs iso so we can discard + -- items. + -> AppM () -- ^ Action to take if there are no items matching + -> (PromptResult ('Menu item) -> AppM ()) + -> AppM () +selectItemFromInventory msgPath msgParams cancellable extraInfo onEmpty cb = + uses (character . inventory . backpack) + (V.mapMaybe $ preview extraInfo) + >>= \case + Empty -> onEmpty + items' -> + menu msgPath msgParams cancellable (itemMenu items') + $ \(MenuResult (idx, item)) -> do + character . inventory . backpack %= removeVectorIndex idx + cb $ MenuResult item + where + itemMenu = mkMenuItems . imap itemMenuItem + itemMenuItem idx extraInfoItem = + let item = extraInfo # extraInfoItem + in ( entityMenuChar item + , MenuOption (description item) (idx, extraInfoItem)) + +selectItemFromInventory_ + :: forall item. + [Text] -- ^ Menu message + -> PromptCancellable -- ^ Is the menu cancellable? + -> Prism' Item item -- ^ Attach some extra information to the item, in a + -- recoverable fashion. Prism vs iso so we can discard + -- items. + -> AppM () -- ^ Action to take if there are no items matching + -> (PromptResult ('Menu item) -> AppM ()) + -> AppM () +selectItemFromInventory_ msgPath = selectItemFromInventory msgPath () + +-- entityMenu :: Entity entity => [entity] -> Map Char (MenuOption entity) +-- entityMenu = map (map runIdentity) . entityMenu_ . fmap Identity + +showPanel :: Panel -> AppM () +showPanel panel = do + activePanel ?= panel + prompt_ @'Continue ["generic", "continue"] Uncancellable + . const + $ activePanel .= Nothing + +-------------------------------------------------------------------------------- + +genLevel + :: Int -- ^ level number + -> AppM Level +genLevel _num = do + let dims = Dimensions 80 80 + generator <- choose $ CaveAutomata :| [Dungeon] + level <- case generator of + CaveAutomata -> generateLevel SCaveAutomata CaveAutomata.defaultParams dims + Dungeon -> generateLevel SDungeon Dungeon.defaultParams dims + pure $!! level + +levelToGameLevel :: Level -> GameLevel +levelToGameLevel level = + let _levelEntities = levelToEntityMap level + _upStaircasePosition = level ^. levelCharacterPosition + _levelRevealedPositions = mempty + in GameLevel {..} diff --git a/users/glittershark/xanthous/src/Xanthous/App/Autocommands.hs b/users/glittershark/xanthous/src/Xanthous/App/Autocommands.hs new file mode 100644 index 000000000000..f393a0e2ea9a --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/App/Autocommands.hs @@ -0,0 +1,64 @@ +-------------------------------------------------------------------------------- +module Xanthous.App.Autocommands + ( runAutocommand + , autoStep + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Control.Concurrent (threadDelay) +import qualified Data.Aeson as A +import Data.Aeson (object) +import Data.List.NonEmpty (nonEmpty) +import qualified Data.List.NonEmpty as NE +import Control.Monad.State (gets) +-------------------------------------------------------------------------------- +import Xanthous.App.Common +import Xanthous.App.Time +import Xanthous.Data +import Xanthous.Data.App +import Xanthous.Entities.Character (speed) +import Xanthous.Entities.Creature (Creature, creatureType) +import Xanthous.Entities.RawTypes (hostile) +import Xanthous.Game.State +-------------------------------------------------------------------------------- + +autoStep :: Autocommand -> AppM () +autoStep (AutoMove dir) = do + newPos <- uses characterPosition $ move dir + collisionAt newPos >>= \case + Nothing -> do + characterPosition .= newPos + stepGameBy =<< uses (character . speed) (|*| 1) + describeEntitiesAt newPos + maybeVisibleEnemies <- nonEmpty <$> enemiesInSight + for_ maybeVisibleEnemies $ \visibleEnemies -> do + say ["autoMove", "enemyInSight"] + $ object [ "firstEntity" A..= NE.head visibleEnemies ] + cancelAutocommand + Just _ -> cancelAutocommand + where + enemiesInSight :: AppM [Creature] + enemiesInSight = do + ents <- gets characterVisibleEntities + pure $ ents + ^.. folded + . _SomeEntity @Creature + . filtered (view $ creatureType . hostile) + +-------------------------------------------------------------------------------- + +autocommandIntervalμs :: Int +autocommandIntervalμs = 1000 * 50 -- 50 ms + +runAutocommand :: Autocommand -> AppM () +runAutocommand ac = do + env <- ask + tid <- liftIO . async $ runReaderT go env + autocommand .= ActiveAutocommand ac tid + where + go = everyμs autocommandIntervalμs $ sendEvent AutoContinue + +-- | Perform 'act' every μs microseconds forever +everyμs :: MonadIO m => Int -> m () -> m () +everyμs μs act = act >> liftIO (threadDelay μs) >> everyμs μs act diff --git a/users/glittershark/xanthous/src/Xanthous/App/Common.hs b/users/glittershark/xanthous/src/Xanthous/App/Common.hs new file mode 100644 index 000000000000..69ba6f0e0596 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/App/Common.hs @@ -0,0 +1,67 @@ +-------------------------------------------------------------------------------- +module Xanthous.App.Common + ( describeEntities + , describeEntitiesAt + , entitiesAtPositionWithType + + -- * Re-exports + , MonadState + , MonadRandom + , EntityMap + , module Xanthous.Game.Lenses + , module Xanthous.Monad + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Aeson (object) +import qualified Data.Aeson as A +import Control.Monad.State (MonadState) +import Control.Monad.Random (MonadRandom) +-------------------------------------------------------------------------------- +import Xanthous.Data (Position, positioned) +import Xanthous.Data.EntityMap (EntityMap) +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Game +import Xanthous.Game.Lenses +import Xanthous.Game.State +import Xanthous.Monad +import Xanthous.Entities.Character (Character) +import Xanthous.Util.Inflection (toSentence) +-------------------------------------------------------------------------------- + +entitiesAtPositionWithType + :: forall a. (Entity a, Typeable a) + => Position + -> EntityMap SomeEntity + -> [(EntityMap.EntityID, a)] +entitiesAtPositionWithType pos em = + let someEnts = EntityMap.atPositionWithIDs pos em + in flip foldMap someEnts $ \(eid, view positioned -> se) -> + case downcastEntity @a se of + Just e -> [(eid, e)] + Nothing -> [] + +describeEntitiesAt :: (MonadState GameState m, MonadRandom m) => Position -> m () +describeEntitiesAt pos = + use ( entities + . EntityMap.atPosition pos + . to (filter (not . entityIs @Character)) + ) >>= \case + Empty -> pure () + ents -> describeEntities ents + +describeEntities + :: ( Entity entity + , MonadRandom m + , MonadState GameState m + , MonoFoldable (f Text) + , Functor f + , Element (f Text) ~ Text + ) + => f entity + -> m () +describeEntities ents = + let descriptions = description <$> ents + in say ["entities", "description"] + $ object ["entityDescriptions" A..= toSentence descriptions] diff --git a/users/glittershark/xanthous/src/Xanthous/App/Prompt.hs b/users/glittershark/xanthous/src/Xanthous/App/Prompt.hs new file mode 100644 index 000000000000..9b5a3bf24fa7 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/App/Prompt.hs @@ -0,0 +1,161 @@ +{-# LANGUAGE UndecidableInstances #-} +-------------------------------------------------------------------------------- +module Xanthous.App.Prompt + ( handlePromptEvent + , clearPrompt + , prompt + , prompt_ + , confirm_ + , confirm + , menu + , menu_ + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Brick (BrickEvent(..), Next) +import Brick.Widgets.Edit (handleEditorEvent) +import Data.Aeson (ToJSON, object) +import Graphics.Vty.Input.Events (Event(EvKey), Key(..)) +import GHC.TypeLits (ErrorMessage(..)) +-------------------------------------------------------------------------------- +import Xanthous.App.Common +import Xanthous.Data (move) +import Xanthous.Command (directionFromChar) +import Xanthous.Data.App (ResourceName, AppEvent) +import Xanthous.Game.Prompt +import Xanthous.Game.State +import qualified Xanthous.Messages as Messages +-------------------------------------------------------------------------------- + +handlePromptEvent + :: Text -- ^ Prompt message + -> Prompt AppM + -> BrickEvent ResourceName AppEvent + -> AppM (Next GameState) + +handlePromptEvent _ (Prompt Cancellable _ _ _ _) (VtyEvent (EvKey KEsc [])) + = clearPrompt >> continue +handlePromptEvent _ pr (VtyEvent (EvKey KEnter [])) + = clearPrompt >> submitPrompt pr >> continue + +handlePromptEvent _ pr@(Prompt _ SConfirm _ _ _) (VtyEvent (EvKey (KChar 'y') [])) + = clearPrompt >> submitPrompt pr >> continue + +handlePromptEvent _ (Prompt _ SConfirm _ _ _) (VtyEvent (EvKey (KChar 'n') [])) + = clearPrompt >> continue + +handlePromptEvent + msg + (Prompt c SStringPrompt (StringPromptState edit) pri cb) + (VtyEvent ev) + = do + edit' <- lift $ handleEditorEvent ev edit + let prompt' = Prompt c SStringPrompt (StringPromptState edit') pri cb + promptState .= WaitingPrompt msg prompt' + continue + +handlePromptEvent _ (Prompt _ SDirectionPrompt _ _ cb) + (VtyEvent (EvKey (KChar (directionFromChar -> Just dir)) [])) + = clearPrompt >> cb (DirectionResult dir) >> continue +handlePromptEvent _ (Prompt _ SDirectionPrompt _ _ _) _ = continue + +handlePromptEvent _ (Prompt _ SMenu _ items' cb) (VtyEvent (EvKey (KChar chr) [])) + | Just (MenuOption _ res) <- items' ^. at chr + = clearPrompt >> cb (MenuResult res) >> continue + | otherwise + = continue + +handlePromptEvent + msg + (Prompt c SPointOnMap (PointOnMapPromptState pos) pri cb) + (VtyEvent (EvKey (KChar (directionFromChar -> Just dir)) [])) + = let pos' = move dir pos + prompt' = Prompt c SPointOnMap (PointOnMapPromptState pos') pri cb + in promptState .= WaitingPrompt msg prompt' + >> continue +handlePromptEvent _ (Prompt _ SPointOnMap _ _ _) _ = continue + +handlePromptEvent + _ + (Prompt Cancellable _ _ _ _) + (VtyEvent (EvKey (KChar 'q') [])) + = clearPrompt >> continue +handlePromptEvent _ _ _ = continue + +clearPrompt :: AppM () +clearPrompt = promptState .= NoPrompt + +class NotMenu (pt :: PromptType) +instance NotMenu 'StringPrompt +instance NotMenu 'Confirm +instance NotMenu 'DirectionPrompt +instance NotMenu 'PointOnMap +instance NotMenu 'Continue +instance TypeError ('Text "Cannot use `prompt` or `prompt_` for menu prompts" + ':$$: 'Text "Use `menu` or `menu_` instead") + => NotMenu ('Menu a) + +prompt + :: forall (pt :: PromptType) (params :: Type). + (ToJSON params, SingPromptType pt, NotMenu pt) + => [Text] -- ^ Message key + -> params -- ^ Message params + -> PromptCancellable + -> (PromptResult pt -> AppM ()) -- ^ Prompt promise handler + -> AppM () +prompt msgPath params cancellable cb = do + let pt = singPromptType @pt + msg <- Messages.message msgPath params + p <- case pt of + SPointOnMap -> do + charPos <- use characterPosition + pure $ mkPointOnMapPrompt cancellable charPos cb + SStringPrompt -> pure $ mkPrompt cancellable pt cb + SConfirm -> pure $ mkPrompt cancellable pt cb + SDirectionPrompt -> pure $ mkPrompt cancellable pt cb + SContinue -> pure $ mkPrompt cancellable pt cb + SMenu -> error "unreachable" + promptState .= WaitingPrompt msg p + +prompt_ + :: forall (pt :: PromptType). + (SingPromptType pt, NotMenu pt) + => [Text] -- ^ Message key + -> PromptCancellable + -> (PromptResult pt -> AppM ()) -- ^ Prompt promise handler + -> AppM () +prompt_ msg = prompt msg $ object [] + +confirm + :: ToJSON params + => [Text] -- ^ Message key + -> params + -> AppM () + -> AppM () +confirm msgPath params + = prompt @'Confirm msgPath params Cancellable . const + +confirm_ :: [Text] -> AppM () -> AppM () +confirm_ msgPath = confirm msgPath $ object [] + +menu :: forall (a :: Type) (params :: Type). + (ToJSON params) + => [Text] -- ^ Message key + -> params -- ^ Message params + -> PromptCancellable + -> Map Char (MenuOption a) -- ^ Menu items + -> (PromptResult ('Menu a) -> AppM ()) -- ^ Menu promise handler + -> AppM () +menu msgPath params cancellable items' cb = do + msg <- Messages.message msgPath params + let p = mkMenu cancellable items' cb + promptState .= WaitingPrompt msg p + +menu_ :: forall (a :: Type). + [Text] -- ^ Message key + -> PromptCancellable + -> Map Char (MenuOption a) -- ^ Menu items + -> (PromptResult ('Menu a) -> AppM ()) -- ^ Menu promise handler + -> AppM () +menu_ msgPath = menu msgPath $ object [] diff --git a/users/glittershark/xanthous/src/Xanthous/App/Time.hs b/users/glittershark/xanthous/src/Xanthous/App/Time.hs new file mode 100644 index 000000000000..b17348f3853e --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/App/Time.hs @@ -0,0 +1,40 @@ +-------------------------------------------------------------------------------- +module Xanthous.App.Time + ( stepGame + , stepGameBy + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import System.Exit +-------------------------------------------------------------------------------- +import Xanthous.Data (Ticks) +import Xanthous.App.Prompt +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Entities.Character (isDead) +import Xanthous.Game.State +import Xanthous.Game.Prompt +import Xanthous.Game.Lenses +import Control.Monad.State (modify) +-------------------------------------------------------------------------------- + + +stepGameBy :: Ticks -> AppM () +stepGameBy ticks = do + ents <- uses entities EntityMap.toEIDsAndPositioned + for_ ents $ \(eid, pEntity) -> do + pEntity' <- step ticks pEntity + entities . ix eid .= pEntity' + + modify updateCharacterVision + + whenM (uses character isDead) + . prompt_ @'Continue ["dead"] Uncancellable + . const . lift . liftIO + $ exitSuccess + +ticksPerTurn :: Ticks +ticksPerTurn = 100 + +stepGame :: AppM () +stepGame = stepGameBy ticksPerTurn diff --git a/users/glittershark/xanthous/src/Xanthous/Command.hs b/users/glittershark/xanthous/src/Xanthous/Command.hs new file mode 100644 index 000000000000..37025dd37ad2 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Command.hs @@ -0,0 +1,73 @@ +-------------------------------------------------------------------------------- +module Xanthous.Command where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (Left, Right, Down) +-------------------------------------------------------------------------------- +import Graphics.Vty.Input (Key(..), Modifier(..)) +import qualified Data.Char as Char +-------------------------------------------------------------------------------- +import Xanthous.Data (Direction(..)) +-------------------------------------------------------------------------------- + +data Command + = Quit + | Move Direction + | StartAutoMove Direction + | PreviousMessage + | PickUp + | Drop + | Open + | Close + | Wait + | Eat + | Look + | Save + | Read + | ShowInventory + | Wield + | GoUp + | GoDown + + -- | TODO replace with `:` commands + | ToggleRevealAll + +commandFromKey :: Key -> [Modifier] -> Maybe Command +commandFromKey (KChar 'q') [] = Just Quit +commandFromKey (KChar '.') [] = Just Wait +commandFromKey (KChar (directionFromChar -> Just dir)) [] = Just $ Move dir +commandFromKey (KChar c) [] + | Char.isUpper c + , Just dir <- directionFromChar $ Char.toLower c + = Just $ StartAutoMove dir +commandFromKey (KChar 'p') [MCtrl] = Just PreviousMessage +commandFromKey (KChar ',') [] = Just PickUp +commandFromKey (KChar 'd') [] = Just Drop +commandFromKey (KChar 'o') [] = Just Open +commandFromKey (KChar 'c') [] = Just Close +commandFromKey (KChar ';') [] = Just Look +commandFromKey (KChar 'e') [] = Just Eat +commandFromKey (KChar 'S') [] = Just Save +commandFromKey (KChar 'r') [] = Just Read +commandFromKey (KChar 'i') [] = Just ShowInventory +commandFromKey (KChar 'w') [] = Just Wield +commandFromKey (KChar '<') [] = Just GoUp +commandFromKey (KChar '>') [] = Just GoDown + +-- DEBUG COMMANDS -- +commandFromKey (KChar 'r') [MMeta] = Just ToggleRevealAll + +commandFromKey _ _ = Nothing + +-------------------------------------------------------------------------------- + +directionFromChar :: Char -> Maybe Direction +directionFromChar 'h' = Just Left +directionFromChar 'j' = Just Down +directionFromChar 'k' = Just Up +directionFromChar 'l' = Just Right +directionFromChar 'y' = Just UpLeft +directionFromChar 'u' = Just UpRight +directionFromChar 'b' = Just DownLeft +directionFromChar 'n' = Just DownRight +directionFromChar '.' = Just Here +directionFromChar _ = Nothing diff --git a/users/glittershark/xanthous/src/Xanthous/Data.hs b/users/glittershark/xanthous/src/Xanthous/Data.hs new file mode 100644 index 000000000000..c9c11b553b67 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data.hs @@ -0,0 +1,590 @@ +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE RoleAnnotations #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE NoTypeSynonymInstances #-} +{-# LANGUAGE DuplicateRecordFields #-} +-------------------------------------------------------------------------------- +-- | Common data types for Xanthous +-------------------------------------------------------------------------------- +module Xanthous.Data + ( Opposite(..) + + -- * + , Position'(..) + , Position + , x + , y + + -- ** + , Positioned(..) + , _Positioned + , position + , positioned + , loc + , _Position + , positionFromPair + , positionFromV2 + , addPositions + , diffPositions + , stepTowards + , isUnit + + -- * Boxes + , Box(..) + , topLeftCorner + , bottomRightCorner + , setBottomRightCorner + , dimensions + , inBox + , boxIntersects + , boxCenter + , boxEdge + , module Linear.V2 + + -- * + , Per(..) + , invertRate + , invertedRate + , (|*|) + , Ticks(..) + , Tiles(..) + , TicksPerTile + , TilesPerTick + , timesTiles + + -- * + , Dimensions'(..) + , Dimensions + , HasWidth(..) + , HasHeight(..) + + -- * + , Direction(..) + , move + , asPosition + , directionOf + , Cardinal(..) + + -- * + , Corner(..) + , Edge(..) + , cornerEdges + + -- * + , Neighbors(..) + , edges + , neighborDirections + , neighborPositions + , neighborCells + , arrayNeighbors + , rotations + , HasTopLeft(..) + , HasTop(..) + , HasTopRight(..) + , HasLeft(..) + , HasRight(..) + , HasBottomLeft(..) + , HasBottom(..) + , HasBottomRight(..) + + -- * + , Hitpoints(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (Left, Down, Right, (.=), elements) +-------------------------------------------------------------------------------- +import Linear.V2 hiding (_x, _y) +import qualified Linear.V2 as L +import Linear.V4 hiding (_x, _y) +import Test.QuickCheck (CoArbitrary, Function, elements) +import Test.QuickCheck.Arbitrary.Generic +import Data.Group +import Brick (Location(Location), Edges(..)) +import Data.Monoid (Product(..), Sum(..)) +import Data.Array.IArray +import Data.Aeson.Generic.DerivingVia +import Data.Aeson + ( ToJSON(..), FromJSON(..), object, (.=), (.:), withObject) +-------------------------------------------------------------------------------- +import Xanthous.Util (EqEqProp(..), EqProp, between) +import Xanthous.Util.QuickCheck (GenericArbitrary(..)) +import Xanthous.Orphans () +import Xanthous.Util.Graphics +-------------------------------------------------------------------------------- + +-- | opposite ∘ opposite ≡ id +class Opposite x where + opposite :: x -> x + +-------------------------------------------------------------------------------- + +-- fromScalar ∘ scalar ≡ id +class Scalar a where + scalar :: a -> Double + fromScalar :: Double -> a + +instance Scalar Double where + scalar = id + fromScalar = id + +newtype ScalarIntegral a = ScalarIntegral a + deriving newtype (Eq, Ord, Num, Enum, Real, Integral) +instance Integral a => Scalar (ScalarIntegral a) where + scalar = fromIntegral + fromScalar = floor + +deriving via (ScalarIntegral Integer) instance Scalar Integer +deriving via (ScalarIntegral Word) instance Scalar Word + +-------------------------------------------------------------------------------- + +data Position' a where + Position :: { _x :: a + , _y :: a + } -> (Position' a) + deriving stock (Show, Eq, Generic, Ord, Functor, Foldable, Traversable) + deriving anyclass (NFData, Hashable, CoArbitrary, Function) + deriving EqProp via EqEqProp (Position' a) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + (Position' a) + +x, y :: Lens' (Position' a) a +x = lens (\(Position xx _) -> xx) (\(Position _ yy) xx -> Position xx yy) +y = lens (\(Position _ yy) -> yy) (\(Position xx _) yy -> Position xx yy) + +type Position = Position' Int + +instance Arbitrary a => Arbitrary (Position' a) where + arbitrary = genericArbitrary + shrink (Position px py) = Position <$> shrink px <*> shrink py + + +instance Num a => Semigroup (Position' a) where + (Position x₁ y₁) <> (Position x₂ y₂) = Position (x₁ + x₂) (y₁ + y₂) + +instance Num a => Monoid (Position' a) where + mempty = Position 0 0 + +instance Num a => Group (Position' a) where + invert (Position px py) = Position (negate px) (negate py) + +-- | Positions convert to scalars by discarding their orientation and just +-- measuring the length from the origin +instance (Ord a, Num a, Scalar a) => Scalar (Position' a) where + scalar = fromIntegral . length . line 0 . view _Position + fromScalar n = Position (fromScalar n) (fromScalar n) + +data Positioned a where + Positioned :: Position -> a -> Positioned a + deriving stock (Show, Eq, Ord, Functor, Foldable, Traversable, Generic) + deriving anyclass (NFData, CoArbitrary, Function) +type role Positioned representational + +_Positioned :: Iso (Position, a) (Position, b) (Positioned a) (Positioned b) +_Positioned = iso hither yon + where + hither (pos, a) = Positioned pos a + yon (Positioned pos b) = (pos, b) + +instance Arbitrary a => Arbitrary (Positioned a) where + arbitrary = Positioned <$> arbitrary <*> arbitrary + +instance ToJSON a => ToJSON (Positioned a) where + toJSON (Positioned pos val) = object + [ "position" .= pos + , "data" .= val + ] + +instance FromJSON a => FromJSON (Positioned a) where + parseJSON = withObject "Positioned" $ \obj -> + Positioned <$> obj .: "position" <*> obj .: "data" + +position :: Lens' (Positioned a) Position +position = lens + (\(Positioned pos _) -> pos) + (\(Positioned _ a) pos -> Positioned pos a) + +positioned :: Lens (Positioned a) (Positioned b) a b +positioned = lens + (\(Positioned _ x') -> x') + (\(Positioned pos _) x' -> Positioned pos x') + +loc :: Iso' Position Location +loc = iso hither yon + where + hither (Position px py) = Location (px, py) + yon (Location (lx, ly)) = Position lx ly + +_Position :: Iso' (Position' a) (V2 a) +_Position = iso hither yon + where + hither (Position px py) = (V2 px py) + yon (V2 lx ly) = Position lx ly + +positionFromPair :: (Num a, Integral i, Integral j) => (i, j) -> Position' a +positionFromPair (i, j) = Position (fromIntegral i) (fromIntegral j) + +positionFromV2 :: (Num a, Integral i) => V2 i -> Position' a +positionFromV2 (V2 xx yy) = Position (fromIntegral xx) (fromIntegral yy) + +-- | Add two positions +-- +-- Operation for the additive group on positions +addPositions :: Num a => Position' a -> Position' a -> Position' a +addPositions = (<>) + +-- | Subtract two positions. +-- +-- diffPositions pos₁ pos₂ = pos₁ `addPositions` (invert pos₂) +diffPositions :: Num a => Position' a -> Position' a -> Position' a +diffPositions (Position x₁ y₁) (Position x₂ y₂) = Position (x₁ - x₂) (y₁ - y₂) + +-- | Is this position a unit position? or: When taken as a difference, does this +-- position represent a step of one tile? +-- +-- ∀ dir :: Direction. isUnit ('asPosition' dir) +isUnit :: (Eq a, Num a) => Position' a -> Bool +isUnit (Position px py) = + abs px `elem` [0,1] && abs py `elem` [0, 1] && (px, py) /= (0, 0) + +-------------------------------------------------------------------------------- + +data Dimensions' a = Dimensions + { _width :: a + , _height :: a + } + deriving stock (Show, Eq, Functor, Generic) + deriving anyclass (CoArbitrary, Function) +makeFieldsNoPrefix ''Dimensions' + +instance Arbitrary a => Arbitrary (Dimensions' a) where + arbitrary = Dimensions <$> arbitrary <*> arbitrary + +type Dimensions = Dimensions' Word + +-------------------------------------------------------------------------------- + +data Direction where + Up :: Direction + Down :: Direction + Left :: Direction + Right :: Direction + UpLeft :: Direction + UpRight :: Direction + DownLeft :: Direction + DownRight :: Direction + Here :: Direction + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (CoArbitrary, Function, NFData, ToJSON, FromJSON, Hashable) + deriving Arbitrary via GenericArbitrary Direction + +instance Opposite Direction where + opposite Up = Down + opposite Down = Up + opposite Left = Right + opposite Right = Left + opposite UpLeft = DownRight + opposite UpRight = DownLeft + opposite DownLeft = UpRight + opposite DownRight = UpLeft + opposite Here = Here + +move :: Num a => Direction -> Position' a -> Position' a +move Up = y -~ 1 +move Down = y +~ 1 +move Left = x -~ 1 +move Right = x +~ 1 +move UpLeft = move Up . move Left +move UpRight = move Up . move Right +move DownLeft = move Down . move Left +move DownRight = move Down . move Right +move Here = id + +asPosition :: Direction -> Position +asPosition dir = move dir mempty + +-- | Returns the direction that a given position is from a given source position +directionOf + :: Position -- ^ Source + -> Position -- ^ Target + -> Direction +directionOf (Position x₁ y₁) (Position x₂ y₂) = + case (x₁ `compare` x₂, y₁ `compare` y₂) of + (EQ, EQ) -> Here + (EQ, LT) -> Down + (EQ, GT) -> Up + (LT, EQ) -> Right + (GT, EQ) -> Left + + (LT, LT) -> DownRight + (GT, LT) -> DownLeft + + (LT, GT) -> UpRight + (GT, GT) -> UpLeft + +-- | Take one (potentially diagonal) step towards the given position +-- +-- ∀ src tgt. isUnit (src `diffPositions` (src `stepTowards tgt`)) +stepTowards + :: Position -- ^ Source + -> Position -- ^ Target + -> Position +stepTowards (view _Position -> p₁) (view _Position -> p₂) + | p₁ == p₂ = _Position # p₁ + | otherwise = + let (_:p:_) = line p₁ p₂ + in _Position # p + +-- | Newtype controlling arbitrary generation to only include cardinal +-- directions ('Up', 'Down', 'Left', 'Right') +newtype Cardinal = Cardinal { getCardinal :: Direction } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, Function, CoArbitrary) + deriving newtype (Opposite) + +instance Arbitrary Cardinal where + arbitrary = Cardinal <$> elements [Up, Down, Left, Right] + +-------------------------------------------------------------------------------- + +data Corner + = TopLeft + | TopRight + | BottomLeft + | BottomRight + deriving stock (Show, Eq, Ord, Enum, Bounded, Generic) + deriving Arbitrary via GenericArbitrary Corner + +instance Opposite Corner where + opposite TopLeft = BottomRight + opposite TopRight = BottomLeft + opposite BottomLeft = TopRight + opposite BottomRight = TopLeft + +data Edge + = TopEdge + | LeftEdge + | RightEdge + | BottomEdge + deriving stock (Show, Eq, Ord, Enum, Bounded, Generic) + deriving Arbitrary via GenericArbitrary Edge + +instance Opposite Edge where + opposite TopEdge = BottomEdge + opposite BottomEdge = TopEdge + opposite LeftEdge = RightEdge + opposite RightEdge = LeftEdge + +cornerEdges :: Corner -> (Edge, Edge) +cornerEdges TopLeft = (TopEdge, LeftEdge) +cornerEdges TopRight = (TopEdge, RightEdge) +cornerEdges BottomLeft = (BottomEdge, LeftEdge) +cornerEdges BottomRight = (BottomEdge, RightEdge) + +-------------------------------------------------------------------------------- + +data Neighbors a = Neighbors + { _topLeft + , _top + , _topRight + , _left + , _right + , _bottomLeft + , _bottom + , _bottomRight :: a + } + deriving stock (Show, Eq, Ord, Functor, Foldable, Traversable, Generic) + deriving anyclass (NFData, CoArbitrary, Function, MonoFoldable) + deriving Arbitrary via GenericArbitrary (Neighbors a) + +type instance Element (Neighbors a) = a + +makeFieldsNoPrefix ''Neighbors + +instance Applicative Neighbors where + pure α = Neighbors + { _topLeft = α + , _top = α + , _topRight = α + , _left = α + , _right = α + , _bottomLeft = α + , _bottom = α + , _bottomRight = α + } + nf <*> nx = Neighbors + { _topLeft = nf ^. topLeft $ nx ^. topLeft + , _top = nf ^. top $ nx ^. top + , _topRight = nf ^. topRight $ nx ^. topRight + , _left = nf ^. left $ nx ^. left + , _right = nf ^. right $ nx ^. right + , _bottomLeft = nf ^. bottomLeft $ nx ^. bottomLeft + , _bottom = nf ^. bottom $ nx ^. bottom + , _bottomRight = nf ^. bottomRight $ nx ^. bottomRight + } + +edges :: Neighbors a -> Edges a +edges neighs = Edges + { eTop = neighs ^. top + , eBottom = neighs ^. bottom + , eLeft = neighs ^. left + , eRight = neighs ^. right + } + +neighborDirections :: Neighbors Direction +neighborDirections = Neighbors + { _topLeft = UpLeft + , _top = Up + , _topRight = UpRight + , _left = Left + , _right = Right + , _bottomLeft = DownLeft + , _bottom = Down + , _bottomRight = DownRight + } + +neighborPositions :: Num a => Position' a -> Neighbors (Position' a) +neighborPositions pos = (`move` pos) <$> neighborDirections + +neighborCells :: Num a => V2 a -> Neighbors (V2 a) +neighborCells = map (view _Position) . neighborPositions . review _Position + +arrayNeighbors + :: (IArray a e, Ix i, Num i) + => a (V2 i) e + -> V2 i + -> Neighbors (Maybe e) +arrayNeighbors arr center = arrLookup <$> neighborPositions (_Position # center) + where + arrLookup (view _Position -> pos) + | inRange (bounds arr) pos = Just $ arr ! pos + | otherwise = Nothing + +-- | Returns a list of all 4 90-degree rotations of the given neighbors +rotations :: Neighbors a -> V4 (Neighbors a) +rotations orig@(Neighbors tl t tr l r bl b br) = V4 + orig -- tl t tr + -- l r + -- bl b br + + (Neighbors bl l tl b t br r tr) -- bl l tl + -- b t + -- br r tr + + (Neighbors br b bl r l tr t tl) -- br b bl + -- r l + -- tr t tl + + (Neighbors tr r br t b tl l bl) -- tr r br + -- t b + -- tl l bl + +-------------------------------------------------------------------------------- + +newtype Per a b = Rate Double + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (Num, Ord, Enum, Real, Fractional, ToJSON, FromJSON) via Double + deriving (Semigroup, Monoid) via Product Double +instance Arbitrary (Per a b) where arbitrary = genericArbitrary + +invertRate :: a `Per` b -> b `Per` a +invertRate (Rate p) = Rate $ 1 / p + +invertedRate :: Iso (a `Per` b) (b' `Per` a') (b `Per` a) (a' `Per` b') +invertedRate = iso invertRate invertRate + +infixl 7 |*| +(|*|) :: (Scalar a, Scalar b) => a `Per` b -> b -> a +(|*|) (Rate rate) b = fromScalar $ rate * scalar b + +newtype Ticks = Ticks Word + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (Num, Ord, Bounded, Enum, Integral, Real, ToJSON, FromJSON) via Word + deriving (Semigroup, Monoid) via (Sum Word) + deriving Scalar via ScalarIntegral Ticks +instance Arbitrary Ticks where arbitrary = genericArbitrary + +newtype Tiles = Tiles Double + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (Num, Ord, Enum, Real, ToJSON, FromJSON, Scalar) via Double + deriving (Semigroup, Monoid) via (Sum Double) +instance Arbitrary Tiles where arbitrary = genericArbitrary + +type TicksPerTile = Ticks `Per` Tiles +type TilesPerTick = Tiles `Per` Ticks + +timesTiles :: TicksPerTile -> Tiles -> Ticks +timesTiles = (|*|) + +-------------------------------------------------------------------------------- + +newtype Hitpoints = Hitpoints Word + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (Arbitrary, Num, Ord, Bounded, Enum, Integral, Real, ToJSON, FromJSON) + via Word + deriving (Semigroup, Monoid) via Sum Word + +-------------------------------------------------------------------------------- + +data Box a = Box + { _topLeftCorner :: V2 a + , _dimensions :: V2 a + } + deriving stock (Show, Eq, Ord, Functor, Generic) + deriving Arbitrary via GenericArbitrary (Box a) +makeFieldsNoPrefix ''Box + +bottomRightCorner :: Num a => Box a -> V2 a +bottomRightCorner box = + V2 (box ^. topLeftCorner . L._x + box ^. dimensions . L._x) + (box ^. topLeftCorner . L._y + box ^. dimensions . L._y) + +setBottomRightCorner :: (Num a, Ord a) => Box a -> V2 a -> Box a +setBottomRightCorner box br@(V2 brx bry) + | brx < box ^. topLeftCorner . L._x || bry < box ^. topLeftCorner . L._y + = box & topLeftCorner .~ br + & dimensions . L._x .~ ((box ^. topLeftCorner . L._x) - brx) + & dimensions . L._y .~ ((box ^. topLeftCorner . L._y) - bry) + | otherwise + = box & dimensions . L._x .~ (brx - (box ^. topLeftCorner . L._x)) + & dimensions . L._y .~ (bry - (box ^. topLeftCorner . L._y)) + +inBox :: (Ord a, Num a) => Box a -> V2 a -> Bool +inBox box pt = flip all [L._x, L._y] $ \component -> + between (box ^. topLeftCorner . component) + (box ^. to bottomRightCorner . component) + (pt ^. component) + +boxIntersects :: (Ord a, Num a) => Box a -> Box a -> Bool +boxIntersects box₁ box₂ + = any (inBox box₁) [box₂ ^. topLeftCorner, bottomRightCorner box₂] + +boxCenter :: (Fractional a) => Box a -> V2 a +boxCenter box = V2 cx cy + where + cx = box ^. topLeftCorner . L._x + (box ^. dimensions . L._x / 2) + cy = box ^. topLeftCorner . L._y + (box ^. dimensions . L._y / 2) + +boxEdge :: (Enum a, Num a) => Box a -> Edge -> [V2 a] +boxEdge box LeftEdge = + V2 (box ^. topLeftCorner . L._x) + <$> [box ^. topLeftCorner . L._y .. box ^. to bottomRightCorner . L._y] +boxEdge box RightEdge = + V2 (box ^. to bottomRightCorner . L._x) + <$> [box ^. to bottomRightCorner . L._y .. box ^. to bottomRightCorner . L._y] +boxEdge box TopEdge = + flip V2 (box ^. topLeftCorner . L._y) + <$> [box ^. topLeftCorner . L._x .. box ^. to bottomRightCorner . L._x] +boxEdge box BottomEdge = + flip V2 (box ^. to bottomRightCorner . L._y) + <$> [box ^. topLeftCorner . L._x .. box ^. to bottomRightCorner . L._x] diff --git a/users/glittershark/xanthous/src/Xanthous/Data/App.hs b/users/glittershark/xanthous/src/Xanthous/Data/App.hs new file mode 100644 index 000000000000..0361d2a59ed5 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/App.hs @@ -0,0 +1,39 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.App + ( Panel(..) + , ResourceName(..) + , AppEvent(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Test.QuickCheck +import Data.Aeson (ToJSON, FromJSON) +-------------------------------------------------------------------------------- +import Xanthous.Util.QuickCheck +-------------------------------------------------------------------------------- + +-- | Enum for "panels" displayed in the game's UI. +data Panel + = InventoryPanel -- ^ A panel displaying the character's inventory + deriving stock (Show, Eq, Ord, Generic, Enum, Bounded) + deriving anyclass (NFData, CoArbitrary, Function, ToJSON, FromJSON) + deriving Arbitrary via GenericArbitrary Panel + + +data ResourceName + = MapViewport -- ^ The main viewport where we display the game content + | Character -- ^ The character + | MessageBox -- ^ The box where we display messages to the user + | Prompt -- ^ The game's prompt + | Panel Panel -- ^ A panel in the game + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function, ToJSON, FromJSON) + deriving Arbitrary via GenericArbitrary ResourceName + +data AppEvent + = AutoContinue -- ^ Continue whatever autocommand has been requested by the + -- user + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function, ToJSON, FromJSON) + deriving Arbitrary via GenericArbitrary AppEvent diff --git a/users/glittershark/xanthous/src/Xanthous/Data/Entities.hs b/users/glittershark/xanthous/src/Xanthous/Data/Entities.hs new file mode 100644 index 000000000000..39953410f2f3 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/Entities.hs @@ -0,0 +1,68 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE RecordWildCards #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.Entities + ( -- * Collisions + Collision(..) + , _Stop + , _Combat + -- * Entity Attributes + , EntityAttributes(..) + , blocksVision + , blocksObject + , collision + , defaultEntityAttributes + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Aeson (ToJSON(..), FromJSON(..), (.:?), (.!=), withObject) +import Data.Aeson.Generic.DerivingVia +import Xanthous.Util.QuickCheck (GenericArbitrary(..)) +import Test.QuickCheck +-------------------------------------------------------------------------------- + +data Collision + = Stop -- ^ Can't move through this + | Combat -- ^ Moving into this equates to hitting it with a stick + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Collision + deriving (ToJSON, FromJSON) + via WithOptions '[ AllNullaryToStringTag 'True ] + Collision +makePrisms ''Collision + +-- | Attributes of an entity +data EntityAttributes = EntityAttributes + { _blocksVision :: Bool + -- | Does this entity block a large object from being put in the same tile as + -- it - eg a a door being closed on it + , _blocksObject :: Bool + -- | What type of collision happens when moving into this entity? + , _collision :: Collision + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary EntityAttributes + deriving (ToJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + EntityAttributes +makeLenses ''EntityAttributes + +instance FromJSON EntityAttributes where + parseJSON = withObject "EntityAttributes" $ \o -> do + _blocksVision <- o .:? "blocksVision" + .!= _blocksVision defaultEntityAttributes + _blocksObject <- o .:? "blocksObject" + .!= _blocksObject defaultEntityAttributes + _collision <- o .:? "collision" + .!= _collision defaultEntityAttributes + pure EntityAttributes {..} + +defaultEntityAttributes :: EntityAttributes +defaultEntityAttributes = EntityAttributes + { _blocksVision = False + , _blocksObject = False + , _collision = Stop + } diff --git a/users/glittershark/xanthous/src/Xanthous/Data/EntityChar.hs b/users/glittershark/xanthous/src/Xanthous/Data/EntityChar.hs new file mode 100644 index 000000000000..855a3462daee --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/EntityChar.hs @@ -0,0 +1,56 @@ +{-# LANGUAGE RoleAnnotations #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityChar + ( EntityChar(..) + , HasChar(..) + , HasStyle(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding ((.=)) +-------------------------------------------------------------------------------- +import qualified Graphics.Vty.Attributes as Vty +import Test.QuickCheck +import Data.Aeson +-------------------------------------------------------------------------------- +import Xanthous.Orphans () +import Xanthous.Util.QuickCheck (GenericArbitrary(..)) +-------------------------------------------------------------------------------- + + +class HasChar s a | s -> a where + char :: Lens' s a + {-# MINIMAL char #-} + +data EntityChar = EntityChar + { _char :: Char + , _style :: Vty.Attr + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary EntityChar +makeFieldsNoPrefix ''EntityChar + +instance FromJSON EntityChar where + parseJSON (String (chr :< Empty)) = pure $ EntityChar chr Vty.defAttr + parseJSON (Object o) = do + (EntityChar _char _) <- o .: "char" + _style <- o .:? "style" .!= Vty.defAttr + pure EntityChar {..} + parseJSON _ = fail "Invalid type, expected string or object" + +instance ToJSON EntityChar where + toJSON (EntityChar chr styl) + | styl == Vty.defAttr = String $ chr <| Empty + | otherwise = object + [ "char" .= chr + , "style" .= styl + ] + +instance IsString EntityChar where + fromString [ch] = EntityChar ch Vty.defAttr + fromString _ = error "Entity char must only be a single character" diff --git a/users/glittershark/xanthous/src/Xanthous/Data/EntityMap.hs b/users/glittershark/xanthous/src/Xanthous/Data/EntityMap.hs new file mode 100644 index 000000000000..d24defa841ab --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/EntityMap.hs @@ -0,0 +1,272 @@ +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE DeriveFunctor #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityMap + ( EntityMap + , _EntityMap + , EntityID + , emptyEntityMap + , insertAt + , insertAtReturningID + , fromEIDsAndPositioned + , toEIDsAndPositioned + , atPosition + , atPositionWithIDs + , positions + , lookup + , lookupWithPosition + -- , positionedEntities + , neighbors + , Deduplicate(..) + + -- * debug + , byID + , byPosition + , lastID + + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (lookup) +import Xanthous.Data + ( Position + , Positioned(..) + , positioned + , Neighbors(..) + , neighborPositions + ) +import Xanthous.Data.VectorBag +import Xanthous.Orphans () +import Xanthous.Util (EqEqProp(..)) +-------------------------------------------------------------------------------- +import Data.Monoid (Endo(..)) +import Test.QuickCheck (Arbitrary(..), CoArbitrary, Function) +import Test.QuickCheck.Checkers (EqProp) +import Test.QuickCheck.Instances.UnorderedContainers () +import Test.QuickCheck.Instances.Vector () +import Text.Show (showString, showParen) +import Data.Aeson +-------------------------------------------------------------------------------- + +type EntityID = Word32 +type NonNullSet a = NonNull (Set a) + +data EntityMap a where + EntityMap :: + { _byPosition :: Map Position (NonNullSet EntityID) + , _byID :: HashMap EntityID (Positioned a) + , _lastID :: EntityID + } -> EntityMap a + deriving stock (Functor, Foldable, Traversable, Generic) + deriving anyclass (NFData, CoArbitrary, Function) +deriving via (EqEqProp (EntityMap a)) instance (Eq a, Ord a) => EqProp (EntityMap a) +makeLenses ''EntityMap + +instance ToJSON a => ToJSON (EntityMap a) where + toJSON = toJSON . toEIDsAndPositioned + + +instance FromJSON a => FromJSON (EntityMap a) where + parseJSON = fmap (fromEIDsAndPositioned @[_]) . parseJSON + +byIDInvariantError :: forall a. a +byIDInvariantError = error $ "Invariant violation: All EntityIDs in byPosition " + <> "must point to entityIDs in byID" + +instance (Ord a, Eq a) => Eq (EntityMap a) where + -- em₁ == em₂ = em₁ ^. _EntityMap == em₂ ^. _EntityMap + (==) = (==) `on` view (_EntityMap . to sort) + +deriving stock instance (Ord a) => Ord (EntityMap a) + +instance Show a => Show (EntityMap a) where + showsPrec pr em + = showParen (pr > 10) + $ showString + . ("fromEIDsAndPositioned " <>) + . show + . toEIDsAndPositioned + $ em + +instance Arbitrary a => Arbitrary (EntityMap a) where + arbitrary = review _EntityMap <$> arbitrary + shrink em = review _EntityMap <$> shrink (em ^. _EntityMap) + +type instance Index (EntityMap a) = EntityID +type instance IxValue (EntityMap a) = (Positioned a) +instance Ixed (EntityMap a) where ix eid = at eid . traverse + +instance At (EntityMap a) where + at eid = lens (view $ byID . at eid) setter + where + setter :: EntityMap a -> Maybe (Positioned a) -> EntityMap a + setter m Nothing = fromMaybe m $ do + Positioned pos _ <- m ^. byID . at eid + pure $ m + & removeEIDAtPos pos + & byID . at eid .~ Nothing + setter m (Just pe@(Positioned pos _)) = m + & (case lookupWithPosition eid m of + Nothing -> id + Just (Positioned origPos _) -> removeEIDAtPos origPos + ) + & byID . at eid ?~ pe + & byPosition . at pos %~ \case + Nothing -> Just $ opoint eid + Just es -> Just $ ninsertSet eid es + removeEIDAtPos pos = + byPosition . at pos %~ (>>= fromNullable . ndeleteSet eid) + +instance Semigroup (EntityMap a) where + em₁ <> em₂ = alaf Endo foldMap (uncurry insertAt) (em₂ ^. _EntityMap) em₁ + +instance Monoid (EntityMap a) where + mempty = emptyEntityMap + +instance FunctorWithIndex EntityID EntityMap + +instance FoldableWithIndex EntityID EntityMap + +instance TraversableWithIndex EntityID EntityMap where + itraversed = byID . itraversed . rmap sequenceA . distrib + itraverse = itraverseOf itraversed + +type instance Element (EntityMap a) = a +instance MonoFoldable (EntityMap a) + +emptyEntityMap :: EntityMap a +emptyEntityMap = EntityMap mempty mempty 0 + +newtype Deduplicate a = Deduplicate (EntityMap a) + deriving stock (Show, Traversable, Generic) + deriving newtype (Eq, Functor, Foldable, EqProp, Arbitrary) + +instance Semigroup (Deduplicate a) where + (Deduplicate em₁) <> (Deduplicate em₂) = + let _byID = em₁ ^. byID <> em₂ ^. byID + _byPosition = mempty &~ do + ifor_ _byID $ \eid (Positioned pos _) -> + at pos %= \case + Just eids -> Just $ ninsertSet eid eids + Nothing -> Just $ opoint eid + _lastID = fromMaybe 1 $ maximumOf (ifolded . asIndex) _byID + in Deduplicate EntityMap{..} + + +-------------------------------------------------------------------------------- + +_EntityMap :: Iso' (EntityMap a) [(Position, a)] +_EntityMap = iso hither yon + where + hither :: EntityMap a -> [(Position, a)] + hither em = do + (pos, eids) <- em ^. byPosition . _Wrapped + eid <- toList eids + ent <- em ^.. byID . at eid . folded . positioned + pure (pos, ent) + yon :: [(Position, a)] -> EntityMap a + yon poses = alaf Endo foldMap (uncurry insertAt) poses emptyEntityMap + + +insertAtReturningID :: forall a. Position -> a -> EntityMap a -> (EntityID, EntityMap a) +insertAtReturningID pos e em = + let (eid, em') = em & lastID <+~ 1 + in em' + & byID . at eid ?~ Positioned pos e + & byPosition . at pos %~ \case + Nothing -> Just $ opoint eid + Just es -> Just $ ninsertSet eid es + & (eid, ) + +insertAt :: forall a. Position -> a -> EntityMap a -> EntityMap a +insertAt pos e = snd . insertAtReturningID pos e + +atPosition :: forall a. (Ord a, Show a) => Position -> Lens' (EntityMap a) (VectorBag a) +atPosition pos = lens getter setter + where + getter em = + let eids :: VectorBag EntityID + eids = maybe mempty (VectorBag . toVector . toNullable) + $ em ^. byPosition . at pos + in getEIDAssume em <$> eids + setter em Empty = em & byPosition . at pos .~ Nothing + setter em (sort -> entities) = + let origEIDs = maybe Empty toNullable $ em ^. byPosition . at pos + origEntitiesWithIDs = + sortOn snd $ toList origEIDs <&> \eid -> (eid, getEIDAssume em eid) + go alles₁@((eid, e₁) :< es₁) -- orig + (e₂ :< es₂) -- new + | e₁ == e₂ + -- same, do nothing + = let (eids, lastEID, byID') = go es₁ es₂ + in (insertSet eid eids, lastEID, byID') + | otherwise + -- e₂ is new, generate a new ID for it + = let (eids, lastEID, byID') = go alles₁ es₂ + eid' = succ lastEID + in (insertSet eid' eids, eid', byID' & at eid' ?~ Positioned pos e₂) + go Empty Empty = (mempty, em ^. lastID, em ^. byID) + go orig Empty = + let byID' = foldr deleteMap (em ^. byID) $ map fst orig + in (mempty, em ^. lastID, byID') + go Empty (new :< news) = + let (eids, lastEID, byID') = go Empty news + eid' = succ lastEID + in (insertSet eid' eids, eid', byID' & at eid' ?~ Positioned pos new) + go _ _ = error "unreachable" + (eidsAtPosition, newLastID, newByID) = go origEntitiesWithIDs entities + in em & byPosition . at pos .~ fromNullable eidsAtPosition + & byID .~ newByID + & lastID .~ newLastID + +getEIDAssume :: EntityMap a -> EntityID -> a +getEIDAssume em eid = fromMaybe byIDInvariantError + $ em ^? byID . ix eid . positioned + +atPositionWithIDs :: Position -> EntityMap a -> Vector (EntityID, Positioned a) +atPositionWithIDs pos em = + let eids = maybe mempty (toVector . toNullable) + $ em ^. byPosition . at pos + in (id &&& Positioned pos . getEIDAssume em) <$> eids + +fromEIDsAndPositioned + :: forall mono a. (MonoFoldable mono, Element mono ~ (EntityID, Positioned a)) + => mono + -> EntityMap a +fromEIDsAndPositioned eps = newLastID $ alaf Endo foldMap insert' eps mempty + where + insert' (eid, pe@(Positioned pos _)) + = (byID . at eid ?~ pe) + . (byPosition . at pos %~ \case + Just eids -> Just $ ninsertSet eid eids + Nothing -> Just $ opoint eid + ) + newLastID em = em & lastID + .~ fromMaybe 1 + (maximumOf (ifolded . asIndex) (em ^. byID)) + +toEIDsAndPositioned :: EntityMap a -> [(EntityID, Positioned a)] +toEIDsAndPositioned = itoListOf $ byID . ifolded + +positions :: EntityMap a -> [Position] +positions = toListOf $ byPosition . to keys . folded + +lookupWithPosition :: EntityID -> EntityMap a -> Maybe (Positioned a) +lookupWithPosition eid = view $ byID . at eid + +lookup :: EntityID -> EntityMap a -> Maybe a +lookup eid = fmap (view positioned) . lookupWithPosition eid + +-- unlawful :( +-- positionedEntities :: IndexedTraversal EntityID (EntityMap a) (EntityMap b) (Positioned a) (Positioned b) +-- positionedEntities = byID . itraversed + +neighbors :: (Ord a, Show a) => Position -> EntityMap a -> Neighbors (VectorBag a) +neighbors pos em = (\p -> view (atPosition p) em) <$> neighborPositions pos + +-------------------------------------------------------------------------------- +makeWrapped ''Deduplicate diff --git a/users/glittershark/xanthous/src/Xanthous/Data/EntityMap/Graphics.hs b/users/glittershark/xanthous/src/Xanthous/Data/EntityMap/Graphics.hs new file mode 100644 index 000000000000..19e7b0cdf086 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/EntityMap/Graphics.hs @@ -0,0 +1,64 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityMap.Graphics + ( visiblePositions + , visibleEntities + , linesOfSight + , canSee + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (lines) +-------------------------------------------------------------------------------- +import Xanthous.Util (takeWhileInclusive) +import Xanthous.Data +import Xanthous.Data.Entities +import Xanthous.Data.EntityMap +import Xanthous.Game.State +import Xanthous.Util.Graphics (circle, line) +-------------------------------------------------------------------------------- + +-- | Returns a set of positions that are visible, when taking into account +-- 'blocksVision', from the given position, within the given radius. +visiblePositions + :: Entity e + => Position + -> Word -- ^ Vision radius + -> EntityMap e + -> Set Position +visiblePositions pos radius + = setFromList . positions . visibleEntities pos radius + +-- | Returns a list of individual lines of sight, each of which is a list of +-- entities at positions on that line of sight +linesOfSight + :: forall e. Entity e + => Position + -> Word + -> EntityMap e + -> [[(Position, Vector (EntityID, e))]] +linesOfSight (view _Position -> pos) visionRadius em + = entitiesOnLines + <&> takeWhileInclusive + (none (view blocksVision . entityAttributes . snd) . snd) + where + radius = circle pos $ fromIntegral visionRadius + lines = line pos <$> radius + entitiesOnLines :: [[(Position, Vector (EntityID, e))]] + entitiesOnLines = lines <&> map getPositionedAt + getPositionedAt :: V2 Int -> (Position, Vector (EntityID, e)) + getPositionedAt p = + let ppos = _Position # p + in (ppos, over _2 (view positioned) <$> atPositionWithIDs ppos em) + +-- | Given a point and a radius of vision, returns a list of all entities that +-- are *visible* (eg, not blocked by an entity that obscures vision) from that +-- point +visibleEntities :: Entity e => Position -> Word -> EntityMap e -> EntityMap e +visibleEntities pos visionRadius + = fromEIDsAndPositioned + . foldMap (\(p, es) -> over _2 (Positioned p) <$> es) + . fold + . linesOfSight pos visionRadius + +canSee :: Entity e => (e -> Bool) -> Position -> Word -> EntityMap e -> Bool +canSee match pos radius = any match . visibleEntities pos radius +-- ^ this might be optimizable diff --git a/users/glittershark/xanthous/src/Xanthous/Data/Levels.hs b/users/glittershark/xanthous/src/Xanthous/Data/Levels.hs new file mode 100644 index 000000000000..efc0f53acecf --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/Levels.hs @@ -0,0 +1,170 @@ +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.Levels + ( Levels + , allLevels + , nextLevel + , prevLevel + , mkLevels1 + , mkLevels + , oneLevel + , current + , ComonadStore(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding ((<.>), Empty, foldMap) +import Xanthous.Util (between, EqProp, EqEqProp(..)) +import Xanthous.Util.Comonad (current) +import Xanthous.Orphans () +-------------------------------------------------------------------------------- +import Control.Comonad.Store +import Control.Comonad.Store.Zipper +import Data.Aeson (ToJSON(..), FromJSON(..)) +import Data.Aeson.Generic.DerivingVia +import Data.Functor.Apply +import Data.Foldable (foldMap) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NE +import Data.Maybe (fromJust) +import Data.Sequence (Seq((:<|), Empty)) +import Data.Semigroup.Foldable.Class +import Data.Text (replace) +import Test.QuickCheck +-------------------------------------------------------------------------------- + +-- | Collection of levels plus a pointer to the current level +-- +-- Navigation is via the 'Comonad' instance. We can get the current level with +-- 'extract': +-- +-- extract @Levels :: Levels level -> level +-- +-- For access to and modification of the level, use +-- 'Xanthous.Util.Comonad.current' +newtype Levels a = Levels { levelZipper :: Zipper Seq a } + deriving stock (Generic) + deriving (Functor, Comonad, Foldable) via (Zipper Seq) + deriving (ComonadStore Int) via (Zipper Seq) + +type instance Element (Levels a) = a +instance MonoFoldable (Levels a) +instance MonoFunctor (Levels a) +instance MonoTraversable (Levels a) + +instance Traversable Levels where + traverse f (Levels z) = Levels <$> traverse f z + +instance Foldable1 Levels + +instance Traversable1 Levels where + traverse1 f (Levels z) = seek (pos z) . partialMkLevels <$> go (unzipper z) + where + go Empty = error "empty seq, unreachable" + go (x :<| xs) = (<|) <$> f x <.> go xs + +-- | Always takes the position of the latter element +instance Semigroup (Levels a) where + levs₁ <> levs₂ + = seek (pos levs₂) + . partialMkLevels + $ allLevels levs₁ <> allLevels levs₂ + +-- | Make Levels from a Seq. Throws an error if the seq is not empty +partialMkLevels :: Seq a -> Levels a +partialMkLevels = Levels . fromJust . zipper + +-- | Make Levels from a possibly-empty structure +mkLevels :: Foldable1 f => f level -> Maybe (Levels level) +mkLevels = fmap Levels . zipper . foldMap pure + +-- | Make Levels from a non-empty structure +mkLevels1 :: Foldable1 f => f level -> Levels level +mkLevels1 = fromJust . mkLevels + +oneLevel :: a -> Levels a +oneLevel = mkLevels1 . Identity + +-- | Get a sequence of all the levels +allLevels :: Levels a -> Seq a +allLevels = unzipper . levelZipper + +-- | Step to the next level, generating a new level if necessary using the given +-- applicative action +nextLevel + :: Applicative m + => m level -- ^ Generate a new level, if necessary + -> Levels level + -> m (Levels level) +nextLevel genLevel levs + | pos levs + 1 < size (levelZipper levs) + = pure $ seeks succ levs + | otherwise + = genLevel <&> \level -> + seek (pos levs + 1) . partialMkLevels $ allLevels levs |> level + +-- | Go to the previous level. Returns Nothing if 'pos' is 0 +prevLevel :: Levels level -> Maybe (Levels level) +prevLevel levs | pos levs == 0 = Nothing + | otherwise = Just $ seeks pred levs + +-------------------------------------------------------------------------------- + +-- | alternate, slower representation of Levels we can Iso into to perform +-- various operations +data AltLevels a = AltLevels + { _levels :: NonEmpty a + , _currentLevel :: Int -- ^ invariant: is within the bounds of _levels + } + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + (AltLevels a) +makeLenses ''AltLevels + +alt :: Iso (Levels a) (Levels b) (AltLevels a) (AltLevels b) +alt = iso hither yon + where + hither levs = AltLevels (NE.fromList . toList $ allLevels levs) (pos levs) + yon (AltLevels levs curr) = seek curr $ mkLevels1 levs + +instance Eq a => Eq (Levels a) where + (==) = (==) `on` view alt + +deriving via EqEqProp (Levels a) instance Eq a => EqProp (Levels a) + +instance Show a => Show (Levels a) where + show = unpack . replace "AltLevels" "Levels" . pack . show . view alt + +instance NFData a => NFData (Levels a) where + rnf = rnf . view alt + +instance ToJSON a => ToJSON (Levels a) where + toJSON = toJSON . view alt + +instance FromJSON a => FromJSON (Levels a) where + parseJSON = fmap (review alt) . parseJSON + +instance Arbitrary a => Arbitrary (AltLevels a) where + arbitrary = do + _levels <- arbitrary + _currentLevel <- choose (0, length _levels - 1) + pure AltLevels {..} + shrink als = do + _levels <- shrink $ als ^. levels + _currentLevel <- filter (between 0 $ length _levels - 1) + $ shrink $ als ^. currentLevel + pure AltLevels {..} + + +instance Arbitrary a => Arbitrary (Levels a) where + arbitrary = review alt <$> arbitrary + shrink = fmap (review alt) . shrink . view alt + +instance CoArbitrary a => CoArbitrary (Levels a) where + coarbitrary = coarbitrary . view alt + +instance Function a => Function (Levels a) where + function = functionMap (view alt) (review alt) diff --git a/users/glittershark/xanthous/src/Xanthous/Data/NestedMap.hs b/users/glittershark/xanthous/src/Xanthous/Data/NestedMap.hs new file mode 100644 index 000000000000..1b875d448302 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/NestedMap.hs @@ -0,0 +1,227 @@ +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE QuantifiedConstraints #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE PolyKinds #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.NestedMap + ( NestedMapVal(..) + , NestedMap(..) + , lookup + , lookupVal + , insert + + -- * + , (:->) + , BifunctorFunctor'(..) + , BifunctorMonad'(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (lookup, foldMap) +import qualified Xanthous.Prelude as P +-------------------------------------------------------------------------------- +import Test.QuickCheck +import Data.Aeson +import Data.Function (fix) +import Data.Foldable (Foldable(..)) +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NE +-------------------------------------------------------------------------------- + +-- | Natural transformations on bifunctors +type (:->) p q = forall a b. p a b -> q a b +infixr 0 :-> + +class (forall b. Bifunctor b => Bifunctor (t b)) => BifunctorFunctor' t where + bifmap' :: (Bifunctor p, Bifunctor q) => (p :-> q) -> t p :-> t q + +class BifunctorFunctor' t => BifunctorMonad' t where + bireturn' :: (Bifunctor p) => p :-> t p + + bibind' :: (Bifunctor p, Bifunctor q) => (p :-> t q) -> t p :-> t q + bibind' f = bijoin' . bifmap' f + + bijoin' :: (Bifunctor p) => t (t p) :-> t p + bijoin' = bibind' id + + {-# MINIMAL bireturn', (bibind' | bijoin') #-} + +-------------------------------------------------------------------------------- + +data NestedMapVal m k v = Val v | Nested (NestedMap m k v) + +deriving stock instance + ( forall k' v'. (Show k', Show v') => Show (m k' v') + , Show k + , Show v + ) => Show (NestedMapVal m k v) + +deriving stock instance + ( forall k' v'. (Eq k', Eq v') => Eq (m k' v') + , Eq k + , Eq v + ) => Eq (NestedMapVal m k v) + +instance + forall m k v. + ( Arbitrary (m k v) + , Arbitrary (m k (NestedMapVal m k v)) + , Arbitrary k + , Arbitrary v + , IsMap (m k (NestedMapVal m k v)) + , MapValue (m k (NestedMapVal m k v)) ~ (NestedMapVal m k v) + , ContainerKey (m k (NestedMapVal m k v)) ~ k + ) => Arbitrary (NestedMapVal m k v) where + arbitrary = sized . fix $ \gen n -> + let nst = fmap (NestedMap . mapFromList) + . listOf + $ (,) <$> arbitrary @k <*> gen (n `div` 2) + in if n == 0 + then Val <$> arbitrary + else oneof [ Val <$> arbitrary + , Nested <$> nst] + shrink (Val v) = Val <$> shrink v + shrink (Nested mkv) = Nested <$> shrink mkv + +instance Functor (m k) => Functor (NestedMapVal m k) where + fmap f (Val v) = Val $ f v + fmap f (Nested m) = Nested $ fmap f m + +instance Bifunctor m => Bifunctor (NestedMapVal m) where + bimap _ g (Val v) = Val $ g v + bimap f g (Nested m) = Nested $ bimap f g m + +instance BifunctorFunctor' NestedMapVal where + bifmap' _ (Val v) = Val v + bifmap' f (Nested m) = Nested $ bifmap' f m + +instance (ToJSONKey k, ToJSON v, ToJSON (m k (NestedMapVal m k v))) + => ToJSON (NestedMapVal m k v) where + toJSON (Val v) = toJSON v + toJSON (Nested m) = toJSON m + +instance Foldable (m k) => Foldable (NestedMapVal m k) where + foldMap f (Val v) = f v + foldMap f (Nested m) = foldMap f m + +-- _NestedMapVal +-- :: forall m k v m' k' v'. +-- ( IsMap (m k v), IsMap (m' k' v') +-- , IsMap (m [k] v), IsMap (m' [k'] v') +-- , ContainerKey (m k v) ~ k, ContainerKey (m' k' v') ~ k' +-- , ContainerKey (m [k] v) ~ [k], ContainerKey (m' [k'] v') ~ [k'] +-- , MapValue (m k v) ~ v, MapValue (m' k' v') ~ v' +-- , MapValue (m [k] v) ~ v, MapValue (m' [k'] v') ~ v' +-- ) +-- => Iso (NestedMapVal m k v) +-- (NestedMapVal m' k' v') +-- (m [k] v) +-- (m' [k'] v') +-- _NestedMapVal = iso hither yon +-- where +-- hither :: NestedMapVal m k v -> m [k] v +-- hither (Val v) = singletonMap [] v +-- hither (Nested m) = bimap _ _ $ m ^. _NestedMap +-- yon = _ + +-------------------------------------------------------------------------------- + +newtype NestedMap m k v = NestedMap (m k (NestedMapVal m k v)) + +deriving stock instance + ( forall k' v'. (Eq k', Eq v') => Eq (m k' v') + , Eq k + , Eq v + ) => Eq (NestedMap m k v) + +deriving stock instance + ( forall k' v'. (Show k', Show v') => Show (m k' v') + , Show k + , Show v + ) => Show (NestedMap m k v) + +instance Arbitrary (m k (NestedMapVal m k v)) + => Arbitrary (NestedMap m k v) where + arbitrary = NestedMap <$> arbitrary + shrink (NestedMap m) = NestedMap <$> shrink m + +instance Functor (m k) => Functor (NestedMap m k) where + fmap f (NestedMap m) = NestedMap $ fmap (fmap f) m + +instance Bifunctor m => Bifunctor (NestedMap m) where + bimap f g (NestedMap m) = NestedMap $ bimap f (bimap f g) m + +instance BifunctorFunctor' NestedMap where + bifmap' f (NestedMap m) = NestedMap . f $ bimap id (bifmap' f) m + +instance (ToJSONKey k, ToJSON v, ToJSON (m k (NestedMapVal m k v))) + => ToJSON (NestedMap m k v) where + toJSON (NestedMap m) = toJSON m + +instance Foldable (m k) => Foldable (NestedMap m k) where + foldMap f (NestedMap m) = foldMap (foldMap f) m + +-------------------------------------------------------------------------------- + +lookup + :: ( IsMap (m k (NestedMapVal m k v)) + , MapValue (m k (NestedMapVal m k v)) ~ (NestedMapVal m k v) + , ContainerKey (m k (NestedMapVal m k v)) ~ k + ) + => NonEmpty k + -> NestedMap m k v + -> Maybe (NestedMapVal m k v) +lookup (p :| []) (NestedMap vs) = P.lookup p vs +lookup (p :| (p₁ : ps)) (NestedMap vs) = P.lookup p vs >>= \case + (Val _) -> Nothing + (Nested vs') -> lookup (p₁ :| ps) vs' + +lookupVal + :: ( IsMap (m k (NestedMapVal m k v)) + , MapValue (m k (NestedMapVal m k v)) ~ (NestedMapVal m k v) + , ContainerKey (m k (NestedMapVal m k v)) ~ k + ) + => NonEmpty k + -> NestedMap m k v + -> Maybe v +lookupVal ks m + | Just (Val v) <- lookup ks m = Just v + | otherwise = Nothing + +insert + :: ( IsMap (m k (NestedMapVal m k v)) + , MapValue (m k (NestedMapVal m k v)) ~ (NestedMapVal m k v) + , ContainerKey (m k (NestedMapVal m k v)) ~ k + ) + => NonEmpty k + -> v + -> NestedMap m k v + -> NestedMap m k v +insert (k :| []) v (NestedMap m) = NestedMap $ P.insertMap k (Val v) m +insert (k₁ :| (k₂ : ks)) v (NestedMap m) = NestedMap $ alterMap upd k₁ m + where + upd (Just (Nested nm)) = Just . Nested $ insert (k₂ :| ks) v nm + upd _ = Just $ + let (kΩ :| ks') = NE.reverse (k₂ :| ks) + in P.foldl' + (\m' k -> Nested . NestedMap . singletonMap k $ m') + (Nested . NestedMap . singletonMap kΩ $ Val v) + ks' + +-- _NestedMap +-- :: ( IsMap (m k v), IsMap (m' k' v') +-- , IsMap (m (NonEmpty k) v), IsMap (m' (NonEmpty k') v') +-- , ContainerKey (m k v) ~ k, ContainerKey (m' k' v') ~ k' +-- , ContainerKey (m (NonEmpty k) v) ~ (NonEmpty k) +-- , ContainerKey (m' (NonEmpty k') v') ~ (NonEmpty k') +-- , MapValue (m k v) ~ v, MapValue (m' k' v') ~ v' +-- , MapValue (m (NonEmpty k) v) ~ v, MapValue (m' (NonEmpty k') v') ~ v' +-- ) +-- => Iso (NestedMap m k v) +-- (NestedMap m' k' v') +-- (m (NonEmpty k) v) +-- (m' (NonEmpty k') v') +-- _NestedMap = iso undefined yon +-- where +-- hither (NestedMap m) = undefined . mapToList $ m +-- yon mkv = undefined diff --git a/users/glittershark/xanthous/src/Xanthous/Data/VectorBag.hs b/users/glittershark/xanthous/src/Xanthous/Data/VectorBag.hs new file mode 100644 index 000000000000..2e6d48062a45 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Data/VectorBag.hs @@ -0,0 +1,100 @@ +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.VectorBag + (VectorBag(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Data.Aeson +import qualified Data.Vector as V +import Test.QuickCheck +import Test.QuickCheck.Instances.Vector () +-------------------------------------------------------------------------------- + +-- | Acts exactly like a Vector, except ignores order when testing for equality +newtype VectorBag a = VectorBag (Vector a) + deriving stock + ( Traversable + , Generic + ) + deriving newtype + ( Show + , Read + , Foldable + , FromJSON + , FromJSON1 + , ToJSON + , Reversing + , Applicative + , Functor + , Monad + , Monoid + , Semigroup + , Arbitrary + , CoArbitrary + , Filterable + ) +makeWrapped ''VectorBag + +instance Function a => Function (VectorBag a) where + function = functionMap (\(VectorBag v) -> v) VectorBag + +type instance Element (VectorBag a) = a +deriving via (Vector a) instance MonoFoldable (VectorBag a) +deriving via (Vector a) instance GrowingAppend (VectorBag a) +deriving via (Vector a) instance SemiSequence (VectorBag a) +deriving via (Vector a) instance MonoPointed (VectorBag a) +deriving via (Vector a) instance MonoFunctor (VectorBag a) + +instance Cons (VectorBag a) (VectorBag b) a b where + _Cons = prism (\(x, VectorBag xs) -> VectorBag $ x <| xs) $ \(VectorBag v) -> + if V.null v + then Left (VectorBag mempty) + else Right (V.unsafeHead v, VectorBag $ V.unsafeTail v) + +instance AsEmpty (VectorBag a) where + _Empty = prism' (const $ VectorBag Empty) $ \case + (VectorBag Empty) -> Just () + _ -> Nothing + +instance Witherable VectorBag where + wither f (VectorBag v) = VectorBag <$> wither f v + witherM f (VectorBag v) = VectorBag <$> witherM f v + filterA p (VectorBag v) = VectorBag <$> filterA p v + +{- + TODO: + , Ixed + , FoldableWithIndex + , FunctorWithIndex + , TraversableWithIndex + , Snoc + , Each +-} + +instance Ord a => Eq (VectorBag a) where + (==) = (==) `on` (view _Wrapped . sort) + +instance Ord a => Ord (VectorBag a) where + compare = compare `on` (view _Wrapped . sort) + +instance MonoTraversable (VectorBag a) where + otraverse f (VectorBag v) = VectorBag <$> otraverse f v + +instance IsSequence (VectorBag a) where + fromList = VectorBag . fromList + break prd (VectorBag v) = bimap VectorBag VectorBag $ break prd v + span prd (VectorBag v) = bimap VectorBag VectorBag $ span prd v + dropWhile prd (VectorBag v) = VectorBag $ dropWhile prd v + takeWhile prd (VectorBag v) = VectorBag $ takeWhile prd v + splitAt idx (VectorBag v) = bimap VectorBag VectorBag $ splitAt idx v + unsafeSplitAt idx (VectorBag v) = + bimap VectorBag VectorBag $ unsafeSplitAt idx v + take n (VectorBag v) = VectorBag $ take n v + unsafeTake n (VectorBag v) = VectorBag $ unsafeTake n v + drop n (VectorBag v) = VectorBag $ drop n v + unsafeDrop n (VectorBag v) = VectorBag $ unsafeDrop n v + partition p (VectorBag v) = bimap VectorBag VectorBag $ partition p v diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Character.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Character.hs new file mode 100644 index 000000000000..c18d726a4bfd --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Character.hs @@ -0,0 +1,276 @@ +{-# LANGUAGE TemplateHaskell #-} +module Xanthous.Entities.Character + ( Character(..) + , characterName + , inventory + , characterDamage + , characterHitpoints' + , characterHitpoints + , hitpointRecoveryRate + , speed + + -- * Inventory + , Inventory(..) + , backpack + , wielded + , items + -- ** Wielded items + , Wielded(..) + , hands + , leftHand + , rightHand + , inLeftHand + , inRightHand + , doubleHanded + , wieldedItems + , WieldedItem(..) + , wieldedItem + , wieldableItem + , asWieldedItem + + -- * + , mkCharacter + , pickUpItem + , isDead + , damage + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Brick +import Data.Aeson.Generic.DerivingVia +import Data.Aeson (ToJSON, FromJSON) +import Data.Coerce (coerce) +import Test.QuickCheck +import Test.QuickCheck.Instances.Vector () +import Test.QuickCheck.Arbitrary.Generic +-------------------------------------------------------------------------------- +import Xanthous.Util.QuickCheck +import Xanthous.Game.State +import Xanthous.Entities.Item +import Xanthous.Data + ( TicksPerTile, Hitpoints, Per, Ticks, (|*|), positioned + , Positioned(..) + ) +import Xanthous.Entities.RawTypes (WieldableItem, wieldable) +import qualified Xanthous.Entities.RawTypes as Raw +-------------------------------------------------------------------------------- + +data WieldedItem = WieldedItem + { _wieldedItem :: Item + , _wieldableItem :: WieldableItem + -- ^ Invariant: item ^. itemType . wieldable ≡ Just wieldableItem + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + WieldedItem +makeFieldsNoPrefix ''WieldedItem + +asWieldedItem :: Prism' Item WieldedItem +asWieldedItem = prism' hither yon + where + yon item = WieldedItem item <$> item ^. itemType . wieldable + hither (WieldedItem item _) = item + +instance Brain WieldedItem where + step ticks (Positioned p wi) = + over positioned (\i -> WieldedItem i $ wi ^. wieldableItem) + <$> step ticks (Positioned p $ wi ^. wieldedItem) + +instance Draw WieldedItem where + draw = draw . view wieldedItem + +instance Entity WieldedItem where + entityAttributes = entityAttributes . view wieldedItem + description = description . view wieldedItem + entityChar = entityChar . view wieldedItem + +instance Arbitrary WieldedItem where + arbitrary = genericArbitrary <&> \wi -> + wi & wieldedItem . itemType . wieldable ?~ wi ^. wieldableItem + +data Wielded + = DoubleHanded WieldedItem + | Hands { _leftHand :: !(Maybe WieldedItem) + , _rightHand :: !(Maybe WieldedItem) + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Wielded + deriving (ToJSON, FromJSON) + via WithOptions '[ 'SumEnc 'ObjWithSingleField ] + Wielded + +hands :: Prism' Wielded (Maybe WieldedItem, Maybe WieldedItem) +hands = prism' (uncurry Hands) $ \case + Hands l r -> Just (l, r) + _ -> Nothing + +leftHand :: Traversal' Wielded WieldedItem +leftHand = hands . _1 . _Just + +inLeftHand :: WieldedItem -> Wielded +inLeftHand wi = Hands (Just wi) Nothing + +rightHand :: Traversal' Wielded WieldedItem +rightHand = hands . _2 . _Just + +inRightHand :: WieldedItem -> Wielded +inRightHand wi = Hands Nothing (Just wi) + +doubleHanded :: Prism' Wielded WieldedItem +doubleHanded = prism' DoubleHanded $ \case + DoubleHanded i -> Just i + _ -> Nothing + +wieldedItems :: Traversal' Wielded WieldedItem +wieldedItems k (DoubleHanded wielded) = DoubleHanded <$> k wielded +wieldedItems k (Hands l r) = Hands <$> _Just k l <*> _Just k r + +data Inventory = Inventory + { _backpack :: Vector Item + , _wielded :: Wielded + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Inventory + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Inventory +makeFieldsNoPrefix ''Inventory + +items :: Traversal' Inventory Item +items k (Inventory bp w) = Inventory + <$> traversed k bp + <*> (wieldedItems . wieldedItem) k w + +type instance Element Inventory = Item + +instance MonoFunctor Inventory where + omap = over items + +instance MonoFoldable Inventory where + ofoldMap = foldMapOf items + ofoldr = foldrOf items + ofoldl' = foldlOf' items + otoList = toListOf items + oall = allOf items + oany = anyOf items + onull = nullOf items + ofoldr1Ex = foldr1Of items + ofoldl1Ex' = foldl1Of' items + headEx = headEx . toListOf items + lastEx = lastEx . toListOf items + +instance MonoTraversable Inventory where + otraverse = traverseOf items + +instance Semigroup Inventory where + inv₁ <> inv₂ = + let backpack' = inv₁ ^. backpack <> inv₂ ^. backpack + (wielded', backpack'') = case (inv₁ ^. wielded, inv₂ ^. wielded) of + (wielded₁, wielded₂@(DoubleHanded _)) -> + (wielded₂, backpack' <> fromList (wielded₁ ^.. wieldedItems . wieldedItem)) + (wielded₁, wielded₂@(Hands (Just _) (Just _))) -> + (wielded₂, backpack' <> fromList (wielded₁ ^.. wieldedItems . wieldedItem)) + (wielded₁, Hands Nothing Nothing) -> (wielded₁, backpack') + (Hands Nothing Nothing, wielded₂) -> (wielded₂, backpack') + (Hands (Just l₁) Nothing, Hands Nothing (Just r₂)) -> + (Hands (Just l₁) (Just r₂), backpack') + (wielded₁@(DoubleHanded _), wielded₂) -> + (wielded₁, backpack' <> fromList (wielded₂ ^.. wieldedItems . wieldedItem)) + (Hands Nothing (Just r₁), Hands Nothing (Just r₂)) -> + (Hands Nothing (Just r₂), r₁ ^. wieldedItem <| backpack') + (Hands Nothing r₁, Hands (Just l₂) Nothing) -> + (Hands (Just l₂) r₁, backpack') + (Hands (Just l₁) Nothing, Hands (Just l₂) Nothing) -> + (Hands (Just l₂) Nothing, l₁ ^. wieldedItem <| backpack') + (Hands (Just l₁) (Just r₁), Hands Nothing (Just r₂)) -> + (Hands (Just l₁) (Just r₂), r₁ ^. wieldedItem <| backpack') + (Hands (Just l₁) (Just r₁), Hands (Just l₂) Nothing) -> + (Hands (Just l₂) (Just r₁), l₁ ^. wieldedItem <| backpack') + in Inventory backpack'' wielded' + +instance Monoid Inventory where + mempty = Inventory mempty $ Hands Nothing Nothing + +-------------------------------------------------------------------------------- + +data Character = Character + { _inventory :: !Inventory + , _characterName :: !(Maybe Text) + , _characterHitpoints' :: !Double + , _speed :: TicksPerTile + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Character +makeLenses ''Character + +characterHitpoints :: Character -> Hitpoints +characterHitpoints = views characterHitpoints' floor + +scrollOffset :: Int +scrollOffset = 5 + +instance Draw Character where + draw _ = visibleRegion rloc rreg $ str "@" + where + rloc = Location (negate scrollOffset, negate scrollOffset) + rreg = (2 * scrollOffset, 2 * scrollOffset) + drawPriority = const maxBound -- Character should always be on top, for now + +instance Brain Character where + step ticks = (pure .) $ positioned . characterHitpoints' %~ \hp -> + if hp > fromIntegral initialHitpoints + then hp + else hp + hitpointRecoveryRate |*| ticks + +instance Entity Character where + description _ = "yourself" + entityChar _ = "@" + +instance Arbitrary Character where + arbitrary = genericArbitrary + +initialHitpoints :: Hitpoints +initialHitpoints = 10 + +hitpointRecoveryRate :: Double `Per` Ticks +hitpointRecoveryRate = 1.0 / (15 * coerce defaultSpeed) + +defaultSpeed :: TicksPerTile +defaultSpeed = 100 + +mkCharacter :: Character +mkCharacter = Character + { _inventory = mempty + , _characterName = Nothing + , _characterHitpoints' = fromIntegral initialHitpoints + , _speed = defaultSpeed + } + +defaultCharacterDamage :: Hitpoints +defaultCharacterDamage = 1 + +-- | Returns the damage that the character currently does with an attack +-- TODO use double-handed/left-hand/right-hand here +characterDamage :: Character -> Hitpoints +characterDamage + = fromMaybe defaultCharacterDamage + . preview (inventory . wielded . wieldedItems . wieldableItem . Raw.damage) + +isDead :: Character -> Bool +isDead = (== 0) . characterHitpoints + +pickUpItem :: Item -> Character -> Character +pickUpItem it = inventory . backpack %~ (it <|) + +damage :: Hitpoints -> Character -> Character +damage (fromIntegral -> amount) = characterHitpoints' %~ \case + n | n <= amount -> 0 + | otherwise -> n - amount diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Creature.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Creature.hs new file mode 100644 index 000000000000..e95e9f0b985b --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Creature.hs @@ -0,0 +1,92 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.Creature + ( -- * Creature + Creature(..) + -- ** Lenses + , creatureType + , hitpoints + , hippocampus + + -- ** Creature functions + , newWithType + , damage + , isDead + , visionRadius + + -- * Hippocampus + , Hippocampus(..) + -- ** Lenses + , destination + -- ** Destination + , Destination(..) + , destinationFromPos + -- *** Lenses + , destinationPosition + , destinationProgress + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Test.QuickCheck +import Test.QuickCheck.Arbitrary.Generic +import Data.Aeson.Generic.DerivingVia +import Data.Aeson (ToJSON, FromJSON) +-------------------------------------------------------------------------------- +import Xanthous.AI.Gormlak +import Xanthous.Entities.RawTypes hiding + (Creature, description, damage) +import qualified Xanthous.Entities.RawTypes as Raw +import Xanthous.Game.State +import Xanthous.Data +import Xanthous.Data.Entities +import Xanthous.Entities.Creature.Hippocampus +-------------------------------------------------------------------------------- + +data Creature = Creature + { _creatureType :: !CreatureType + , _hitpoints :: !Hitpoints + , _hippocampus :: !Hippocampus + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Draw via DrawRawCharPriority "_creatureType" 1000 Creature + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Creature +instance Arbitrary Creature where arbitrary = genericArbitrary +makeLenses ''Creature + +instance HasVisionRadius Creature where + visionRadius = const 50 -- TODO + +instance Brain Creature where + step = brainVia GormlakBrain + entityCanMove = const True + +instance Entity Creature where + entityAttributes _ = defaultEntityAttributes + & blocksObject .~ True + description = view $ creatureType . Raw.description + entityChar = view $ creatureType . char + entityCollision = const $ Just Combat + +-------------------------------------------------------------------------------- + +newWithType :: CreatureType -> Creature +newWithType _creatureType = + let _hitpoints = _creatureType ^. maxHitpoints + _hippocampus = initialHippocampus + in Creature {..} + +damage :: Hitpoints -> Creature -> Creature +damage amount = hitpoints %~ \hp -> + if hp <= amount + then 0 + else hp - amount + +isDead :: Creature -> Bool +isDead = views hitpoints (== 0) + +{-# ANN module ("Hlint: ignore Use newtype instead of data" :: String) #-} diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Creature/Hippocampus.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Creature/Hippocampus.hs new file mode 100644 index 000000000000..501a5b597221 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Creature/Hippocampus.hs @@ -0,0 +1,64 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.Creature.Hippocampus + (-- * Hippocampus + Hippocampus(..) + , initialHippocampus + -- ** Lenses + , destination + -- ** Destination + , Destination(..) + , destinationFromPos + -- *** Lenses + , destinationPosition + , destinationProgress + ) +where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Aeson.Generic.DerivingVia +import Data.Aeson (ToJSON, FromJSON) +import Test.QuickCheck +import Test.QuickCheck.Arbitrary.Generic +-------------------------------------------------------------------------------- +import Xanthous.Data +import Xanthous.Util.QuickCheck +-------------------------------------------------------------------------------- + + +data Destination = Destination + { _destinationPosition :: !Position + -- | The progress towards the destination, tracked as an offset from the + -- creature's original position. + -- + -- When this value reaches >= 1, the creature has reached their destination + , _destinationProgress :: !Tiles + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Destination +instance Arbitrary Destination where arbitrary = genericArbitrary +makeLenses ''Destination + +destinationFromPos :: Position -> Destination +destinationFromPos _destinationPosition = + let _destinationProgress = 0 + in Destination{..} + +data Hippocampus = Hippocampus + { _destination :: !(Maybe Destination) + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Hippocampus + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Hippocampus +makeLenses ''Hippocampus + +initialHippocampus :: Hippocampus +initialHippocampus = Hippocampus Nothing diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Draw/Util.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Draw/Util.hs new file mode 100644 index 000000000000..aa6c5fa4fc47 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Draw/Util.hs @@ -0,0 +1,31 @@ +module Xanthous.Entities.Draw.Util where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Brick.Widgets.Border.Style +import Brick.Types (Edges(..)) +-------------------------------------------------------------------------------- + +borderFromEdges :: BorderStyle -> Edges Bool -> Char +borderFromEdges bstyle edges = ($ bstyle) $ case edges of + Edges False False False False -> const '☐' + + Edges True False False False -> bsVertical + Edges False True False False -> bsVertical + Edges False False True False -> bsHorizontal + Edges False False False True -> bsHorizontal + + Edges True True False False -> bsVertical + Edges True False True False -> bsCornerBR + Edges True False False True -> bsCornerBL + + Edges False True True False -> bsCornerTR + Edges False True False True -> bsCornerTL + Edges False False True True -> bsHorizontal + + Edges False True True True -> bsIntersectT + Edges True False True True -> bsIntersectB + Edges True True False True -> bsIntersectL + Edges True True True False -> bsIntersectR + + Edges True True True True -> bsIntersectFull diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs new file mode 100644 index 000000000000..a0c037a1b4ed --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE StandaloneDeriving #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.Entities () where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Test.QuickCheck +import qualified Test.QuickCheck.Gen as Gen +import Data.Aeson +-------------------------------------------------------------------------------- +import Xanthous.Entities.Character +import Xanthous.Entities.Item +import Xanthous.Entities.Creature +import Xanthous.Entities.Environment +import Xanthous.Entities.Marker +import Xanthous.Game.State +import Xanthous.Util.QuickCheck +import Data.Aeson.Generic.DerivingVia +-------------------------------------------------------------------------------- + +instance Arbitrary SomeEntity where + arbitrary = Gen.oneof + [ SomeEntity <$> arbitrary @Character + , SomeEntity <$> arbitrary @Item + , SomeEntity <$> arbitrary @Creature + , SomeEntity <$> arbitrary @Wall + , SomeEntity <$> arbitrary @Door + , SomeEntity <$> arbitrary @GroundMessage + , SomeEntity <$> arbitrary @Staircase + , SomeEntity <$> arbitrary @Marker + ] + +instance FromJSON SomeEntity where + parseJSON = withObject "Entity" $ \obj -> do + (entityType :: Text) <- obj .: "type" + case entityType of + "Character" -> SomeEntity @Character <$> obj .: "data" + "Item" -> SomeEntity @Item <$> obj .: "data" + "Creature" -> SomeEntity @Creature <$> obj .: "data" + "Wall" -> SomeEntity @Wall <$> obj .: "data" + "Door" -> SomeEntity @Door <$> obj .: "data" + "GroundMessage" -> SomeEntity @GroundMessage <$> obj .: "data" + "Staircase" -> SomeEntity @Staircase <$> obj .: "data" + "Marker" -> SomeEntity @Marker <$> obj .: "data" + _ -> fail . unpack $ "Invalid entity type \"" <> entityType <> "\"" + +deriving via WithOptions '[ FieldLabelModifier '[Drop 1] ] GameLevel + instance FromJSON GameLevel +deriving via WithOptions '[ FieldLabelModifier '[Drop 1] ] GameState + instance FromJSON GameState + +instance Entity SomeEntity where + entityAttributes (SomeEntity ent) = entityAttributes ent + description (SomeEntity ent) = description ent + entityChar (SomeEntity ent) = entityChar ent + entityCollision (SomeEntity ent) = entityCollision ent + +instance Function SomeEntity where + function = functionJSON + +instance CoArbitrary SomeEntity where + coarbitrary = coarbitrary . encode diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs-boot b/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs-boot new file mode 100644 index 000000000000..519a862c6a5a --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Entities.hs-boot @@ -0,0 +1,14 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} +module Xanthous.Entities.Entities where + +import Test.QuickCheck +import Data.Aeson +import Xanthous.Game.State (SomeEntity, GameState, Entity) + +instance Arbitrary SomeEntity +instance Function SomeEntity +instance CoArbitrary SomeEntity +instance FromJSON SomeEntity +instance Entity SomeEntity + +instance FromJSON GameState diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Environment.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Environment.hs new file mode 100644 index 000000000000..b45a91eabed2 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Environment.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE TemplateHaskell #-} +module Xanthous.Entities.Environment + ( + -- * Walls + Wall(..) + + -- * Doors + , Door(..) + , open + , closed + , locked + , unlockedDoor + + -- * Messages + , GroundMessage(..) + + -- * Stairs + , Staircase(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Test.QuickCheck +import Brick (str) +import Brick.Widgets.Border.Style (unicode) +import Brick.Types (Edges(..)) +import Data.Aeson +import Data.Aeson.Generic.DerivingVia +-------------------------------------------------------------------------------- +import Xanthous.Entities.Draw.Util +import Xanthous.Data +import Xanthous.Data.Entities +import Xanthous.Game.State +import Xanthous.Util.QuickCheck +-------------------------------------------------------------------------------- + +data Wall = Wall + deriving stock (Show, Eq, Ord, Generic, Enum) + deriving anyclass (NFData, CoArbitrary, Function) + +instance ToJSON Wall where + toJSON = const $ String "Wall" + +instance FromJSON Wall where + parseJSON = withText "Wall" $ \case + "Wall" -> pure Wall + _ -> fail "Invalid Wall: expected Wall" + +instance Brain Wall where step = brainVia Brainless + +instance Entity Wall where + entityAttributes _ = defaultEntityAttributes + & blocksVision .~ True + & blocksObject .~ True + description _ = "a wall" + entityChar _ = "┼" + +instance Arbitrary Wall where + arbitrary = pure Wall + +wallEdges :: (MonoFoldable mono, Element mono ~ SomeEntity) + => Neighbors mono -> Edges Bool +wallEdges neighs = any (entityIs @Wall) <$> edges neighs + +instance Draw Wall where + drawWithNeighbors neighs _wall = + str . pure . borderFromEdges unicode $ wallEdges neighs + +data Door = Door + { _open :: Bool + , _locked :: Bool + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function, ToJSON, FromJSON) + deriving Arbitrary via GenericArbitrary Door +makeLenses ''Door + +instance Draw Door where + drawWithNeighbors neighs door + = str . pure . ($ door ^. open) $ case wallEdges neighs of + Edges True False False False -> vertDoor + Edges False True False False -> vertDoor + Edges True True False False -> vertDoor + Edges False False True False -> horizDoor + Edges False False False True -> horizDoor + Edges False False True True -> horizDoor + _ -> allsidesDoor + where + horizDoor True = '␣' + horizDoor False = 'ᚔ' + vertDoor True = '[' + vertDoor False = 'ǂ' + allsidesDoor True = '+' + allsidesDoor False = '▥' + +instance Brain Door where step = brainVia Brainless + +instance Entity Door where + entityAttributes door = defaultEntityAttributes + & blocksVision .~ not (door ^. open) + description door | door ^. open = "an open door" + | otherwise = "a closed door" + entityChar _ = "d" + entityCollision door | door ^. open = Nothing + | otherwise = Just Stop + +closed :: Lens' Door Bool +closed = open . involuted not + +-- | A closed, unlocked door +unlockedDoor :: Door +unlockedDoor = Door + { _open = False + , _locked = False + } + +-------------------------------------------------------------------------------- + +newtype GroundMessage = GroundMessage Text + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary GroundMessage + deriving (ToJSON, FromJSON) + via WithOptions '[ 'TagSingleConstructors 'True + , 'SumEnc 'ObjWithSingleField + ] + GroundMessage + deriving Draw + via DrawStyledCharacter ('Just 'Yellow) 'Nothing "≈" + GroundMessage +instance Brain GroundMessage where step = brainVia Brainless + +instance Entity GroundMessage where + description = const "a message on the ground. Press r. to read it." + entityChar = const "≈" + entityCollision = const Nothing + +-------------------------------------------------------------------------------- + +data Staircase = UpStaircase | DownStaircase + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Staircase + deriving (ToJSON, FromJSON) + via WithOptions '[ 'TagSingleConstructors 'True + , 'SumEnc 'ObjWithSingleField + ] + Staircase +instance Brain Staircase where step = brainVia Brainless + +instance Draw Staircase where + draw UpStaircase = str "<" + draw DownStaircase = str ">" + +instance Entity Staircase where + description UpStaircase = "a staircase leading upwards" + description DownStaircase = "a staircase leading downwards" + entityChar UpStaircase = "<" + entityChar DownStaircase = ">" + entityCollision = const Nothing diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Item.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Item.hs new file mode 100644 index 000000000000..b50a5eab809d --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Item.hs @@ -0,0 +1,49 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE StandaloneDeriving #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.Item + ( Item(..) + , itemType + , newWithType + , isEdible + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Test.QuickCheck +import Data.Aeson (ToJSON, FromJSON) +import Data.Aeson.Generic.DerivingVia +-------------------------------------------------------------------------------- +import Xanthous.Entities.RawTypes hiding (Item, description, isEdible) +import qualified Xanthous.Entities.RawTypes as Raw +import Xanthous.Game.State +-------------------------------------------------------------------------------- + +data Item = Item + { _itemType :: ItemType + } + deriving stock (Eq, Show, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Draw via DrawRawChar "_itemType" Item + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + Item +makeLenses ''Item + +{-# ANN Item ("HLint: ignore Use newtype instead of data" :: String )#-} + +-- deriving via (Brainless Item) instance Brain Item +instance Brain Item where step = brainVia Brainless + +instance Arbitrary Item where + arbitrary = Item <$> arbitrary + +instance Entity Item where + description = view $ itemType . Raw.description + entityChar = view $ itemType . Raw.char + entityCollision = const Nothing + +newWithType :: ItemType -> Item +newWithType = Item + +isEdible :: Item -> Bool +isEdible = Raw.isEdible . view itemType diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Marker.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Marker.hs new file mode 100644 index 000000000000..14d02872ed4e --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Marker.hs @@ -0,0 +1,41 @@ +-------------------------------------------------------------------------------- +module Xanthous.Entities.Marker ( Marker(..) ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Aeson +import Test.QuickCheck +import qualified Graphics.Vty.Attributes as Vty +import qualified Graphics.Vty.Image as Vty +import Brick.Widgets.Core (raw) +-------------------------------------------------------------------------------- +import Xanthous.Game.State +import Xanthous.Data.Entities (EntityAttributes(..)) +-------------------------------------------------------------------------------- + +-- | Mark on the map - for use in debugging / development only. +newtype Marker = Marker Text + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (Semigroup, Monoid, ToJSON, FromJSON, Arbitrary) via Text + +instance Brain Marker where step = brainVia Brainless + +instance Entity Marker where + entityAttributes = const EntityAttributes + { _blocksVision = False + , _blocksObject = False + , _collision = Stop + } + description (Marker m) = "[M] " <> m + entityChar = const $ "X" & style .~ markerStyle + entityCollision = const Nothing + +instance Draw Marker where + draw = const . raw $ Vty.char markerStyle 'X' + drawPriority = const maxBound + +markerStyle :: Vty.Attr +markerStyle = Vty.defAttr + `Vty.withForeColor` Vty.red + `Vty.withBackColor` Vty.black diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/RawTypes.hs b/users/glittershark/xanthous/src/Xanthous/Entities/RawTypes.hs new file mode 100644 index 000000000000..30039662f071 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/RawTypes.hs @@ -0,0 +1,133 @@ +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE DuplicateRecordFields #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.RawTypes + ( + EntityRaw(..) + , _Creature + , _Item + + -- * Creatures + , CreatureType(..) + , hostile + + -- * Items + , ItemType(..) + -- ** Item sub-types + -- *** Edible + , EdibleItem(..) + , isEdible + -- *** Wieldable + , WieldableItem(..) + , isWieldable + + -- * Lens classes + , HasAttackMessage(..) + , HasChar(..) + , HasDamage(..) + , HasDescription(..) + , HasEatMessage(..) + , HasEdible(..) + , HasFriendly(..) + , HasHitpointsHealed(..) + , HasLongDescription(..) + , HasMaxHitpoints(..) + , HasName(..) + , HasSpeed(..) + , HasWieldable(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Test.QuickCheck +import Data.Aeson.Generic.DerivingVia +import Data.Aeson (ToJSON, FromJSON) +-------------------------------------------------------------------------------- +import Xanthous.Messages (Message(..)) +import Xanthous.Data (TicksPerTile, Hitpoints) +import Xanthous.Data.EntityChar +import Xanthous.Util.QuickCheck +-------------------------------------------------------------------------------- + +data CreatureType = CreatureType + { _name :: !Text + , _description :: !Text + , _char :: !EntityChar + , _maxHitpoints :: !Hitpoints + , _friendly :: !Bool + , _speed :: !TicksPerTile + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary CreatureType + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + CreatureType +makeFieldsNoPrefix ''CreatureType + +hostile :: Lens' CreatureType Bool +hostile = friendly . involuted not + +-------------------------------------------------------------------------------- + +data EdibleItem = EdibleItem + { _hitpointsHealed :: Int + , _eatMessage :: Maybe Message + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary EdibleItem + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + EdibleItem +makeFieldsNoPrefix ''EdibleItem + +data WieldableItem = WieldableItem + { _damage :: !Hitpoints + , _attackMessage :: !(Maybe Message) + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary WieldableItem + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + WieldableItem +makeFieldsNoPrefix ''WieldableItem + +-------------------------------------------------------------------------------- + +data ItemType = ItemType + { _name :: Text + , _description :: Text + , _longDescription :: Text + , _char :: EntityChar + , _edible :: Maybe EdibleItem + , _wieldable :: Maybe WieldableItem + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary ItemType + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + ItemType +makeFieldsNoPrefix ''ItemType + +-- | Can this item be eaten? +isEdible :: ItemType -> Bool +isEdible = has $ edible . _Just + +-- | Can this item be used as a weapon? +isWieldable :: ItemType -> Bool +isWieldable = has $ wieldable . _Just + +-------------------------------------------------------------------------------- + +data EntityRaw + = Creature CreatureType + | Item ItemType + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData) + deriving Arbitrary via GenericArbitrary EntityRaw + deriving (FromJSON) + via WithOptions '[ SumEnc ObjWithSingleField ] + EntityRaw +makePrisms ''EntityRaw diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Raws.hs b/users/glittershark/xanthous/src/Xanthous/Entities/Raws.hs new file mode 100644 index 000000000000..d4cae7ccc299 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Raws.hs @@ -0,0 +1,59 @@ +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Entities.Raws + ( raws + , raw + , RawType(..) + , rawsWithType + , entityFromRaw + ) where +-------------------------------------------------------------------------------- +import Data.FileEmbed +import qualified Data.Yaml as Yaml +import Xanthous.Prelude +import System.FilePath.Posix +-------------------------------------------------------------------------------- +import Xanthous.Entities.RawTypes +import Xanthous.Game.State +import qualified Xanthous.Entities.Creature as Creature +import qualified Xanthous.Entities.Item as Item +import Xanthous.AI.Gormlak () +-------------------------------------------------------------------------------- +rawRaws :: [(FilePath, ByteString)] +rawRaws = $(embedDir "src/Xanthous/Entities/Raws") + +raws :: HashMap Text EntityRaw +raws + = mapFromList + . map (bimap + (pack . takeBaseName) + (either (error . Yaml.prettyPrintParseException) id + . Yaml.decodeEither')) + $ rawRaws + +raw :: Text -> Maybe EntityRaw +raw n = raws ^. at n + +class RawType (a :: Type) where + _RawType :: Prism' EntityRaw a + +instance RawType CreatureType where + _RawType = prism' Creature $ \case + Creature c -> Just c + _ -> Nothing + +instance RawType ItemType where + _RawType = prism' Item $ \case + Item i -> Just i + _ -> Nothing + +rawsWithType :: forall a. RawType a => HashMap Text a +rawsWithType = mapFromList . itoListOf (ifolded . _RawType) $ raws + +-------------------------------------------------------------------------------- + +entityFromRaw :: EntityRaw -> SomeEntity +entityFromRaw (Creature creatureType) + = SomeEntity $ Creature.newWithType creatureType +entityFromRaw (Item itemType) + = SomeEntity $ Item.newWithType itemType diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Raws/gormlak.yaml b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/gormlak.yaml new file mode 100644 index 000000000000..2eac895190b3 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/gormlak.yaml @@ -0,0 +1,13 @@ +Creature: + name: gormlak + description: a gormlak + longDescription: | + A chittering imp-like creature with bright yellow horns. It adores shiny objects + and gathers in swarms. + char: + char: g + style: + foreground: red + maxHitpoints: 5 + speed: 125 + friendly: false diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Raws/noodles.yaml b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/noodles.yaml new file mode 100644 index 000000000000..c3f19dce91d1 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/noodles.yaml @@ -0,0 +1,12 @@ +Item: + name: noodles + description: "a big bowl o' noodles" + longDescription: You know exactly what kind of noodles + char: + char: 'n' + style: + foreground: yellow + edible: + hitpointsHealed: 2 + eatMessage: + - You slurp up the noodles. Yumm! diff --git a/users/glittershark/xanthous/src/Xanthous/Entities/Raws/stick.yaml b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/stick.yaml new file mode 100644 index 000000000000..bc7fde4d8b02 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Entities/Raws/stick.yaml @@ -0,0 +1,14 @@ +Item: + name: stick + description: a wooden stick + longDescription: A sturdy branch broken off from some sort of tree + char: + char: ∤ + style: + foreground: yellow + wieldable: + damage: 2 + attackMessage: + - You bonk the {{creature.creatureType.name}} over the head with your stick. + - You bash the {{creature.creatureType.name}} on the noggin with your stick. + - You whack the {{creature.creatureType.name}} with your stick. diff --git a/users/glittershark/xanthous/src/Xanthous/Game.hs b/users/glittershark/xanthous/src/Xanthous/Game.hs new file mode 100644 index 000000000000..89c23f0de850 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game.hs @@ -0,0 +1,73 @@ +module Xanthous.Game + ( GameState(..) + , levels + , entities + , revealedPositions + , messageHistory + , randomGen + , promptState + , GamePromptState(..) + + , getInitialState + , initialStateFromSeed + + , positionedCharacter + , character + , characterPosition + , updateCharacterVision + , characterVisiblePositions + , entitiesAtCharacter + , revealedEntitiesAtPosition + + -- * Messages + , MessageHistory(..) + , HasMessages(..) + , HasTurn(..) + , HasDisplayedTurn(..) + , pushMessage + , previousMessage + , nextTurn + + -- * Collisions + , Collision(..) + , collisionAt + + -- * App monad + , AppT(..) + + -- * Saving the game + , saveGame + , loadGame + , saved + + -- * Debug State + , DebugState(..) + , debugState + , allRevealed + ) where +-------------------------------------------------------------------------------- +import qualified Codec.Compression.Zlib as Zlib +import Codec.Compression.Zlib.Internal (DecompressError) +import qualified Data.Aeson as JSON +import System.IO.Unsafe +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Xanthous.Game.State +import Xanthous.Game.Lenses +import Xanthous.Game.Arbitrary () +import Xanthous.Entities.Entities () +-------------------------------------------------------------------------------- + +saveGame :: GameState -> LByteString +saveGame = Zlib.compress . JSON.encode + +loadGame :: LByteString -> Maybe GameState +loadGame = JSON.decode <=< decompressZlibMay + where + decompressZlibMay bs + = unsafeDupablePerformIO + $ (let r = Zlib.decompress bs in r `seq` pure (Just r)) + `catch` \(_ :: DecompressError) -> pure Nothing + +saved :: Prism' LByteString GameState +saved = prism' saveGame loadGame diff --git a/users/glittershark/xanthous/src/Xanthous/Game/Arbitrary.hs b/users/glittershark/xanthous/src/Xanthous/Game/Arbitrary.hs new file mode 100644 index 000000000000..a1eb789a33c9 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/Arbitrary.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE RecordWildCards #-} +-------------------------------------------------------------------------------- +module Xanthous.Game.Arbitrary where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (foldMap) +-------------------------------------------------------------------------------- +import Test.QuickCheck +import System.Random +import Data.Foldable (foldMap) +-------------------------------------------------------------------------------- +import Xanthous.Data.Levels +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Entities.Entities () +import Xanthous.Entities.Character +import Xanthous.Game.State +import Xanthous.Util.QuickCheck (GenericArbitrary(..)) +-------------------------------------------------------------------------------- + +deriving via GenericArbitrary GameLevel instance Arbitrary GameLevel + +instance Arbitrary GameState where + arbitrary = do + chr <- arbitrary @Character + _upStaircasePosition <- arbitrary + _messageHistory <- arbitrary + levs <- arbitrary @(Levels GameLevel) + _levelRevealedPositions <- + fmap setFromList + . sublistOf + . foldMap (EntityMap.positions . _levelEntities) + $ levs + let (_characterEntityID, _levelEntities) = + EntityMap.insertAtReturningID _upStaircasePosition (SomeEntity chr) + $ levs ^. current . levelEntities + _levels = levs & current .~ GameLevel {..} + _randomGen <- mkStdGen <$> arbitrary + let _promptState = NoPrompt -- TODO + _activePanel <- arbitrary + _debugState <- arbitrary + let _autocommand = NoAutocommand + pure $ GameState {..} + + +instance CoArbitrary GameLevel +instance Function GameLevel +instance CoArbitrary GameState +instance Function GameState diff --git a/users/glittershark/xanthous/src/Xanthous/Game/Draw.hs b/users/glittershark/xanthous/src/Xanthous/Game/Draw.hs new file mode 100644 index 000000000000..2375ae8c557e --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/Draw.hs @@ -0,0 +1,143 @@ +-------------------------------------------------------------------------------- +module Xanthous.Game.Draw + ( drawGame + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Brick hiding (loc, on) +import Brick.Widgets.Border +import Brick.Widgets.Border.Style +import Brick.Widgets.Edit +-------------------------------------------------------------------------------- +import Xanthous.Data +import Xanthous.Data.App (ResourceName, Panel(..)) +import qualified Xanthous.Data.App as Resource +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Game.State +import Xanthous.Entities.Character +import Xanthous.Entities.Item (Item) +import Xanthous.Game + ( characterPosition + , character + , revealedEntitiesAtPosition + ) +import Xanthous.Game.Prompt +import Xanthous.Orphans () +-------------------------------------------------------------------------------- + +cursorPosition :: GameState -> Widget ResourceName -> Widget ResourceName +cursorPosition game + | WaitingPrompt _ (Prompt _ SPointOnMap (PointOnMapPromptState pos) _ _) + <- game ^. promptState + = showCursor Resource.Prompt (pos ^. loc) + | otherwise + = showCursor Resource.Character (game ^. characterPosition . loc) + +drawMessages :: MessageHistory -> Widget ResourceName +drawMessages = txtWrap . (<> " ") . unwords . reverse . oextract + +drawPromptState :: GamePromptState m -> Widget ResourceName +drawPromptState NoPrompt = emptyWidget +drawPromptState (WaitingPrompt msg (Prompt _ pt ps pri _)) = + case (pt, ps, pri) of + (SStringPrompt, StringPromptState edit, _) -> + txtWrap msg <+> txt " " <+> renderEditor (txt . fold) True edit + (SDirectionPrompt, DirectionPromptState, _) -> txtWrap msg + (SContinue, _, _) -> txtWrap msg + (SMenu, _, menuItems) -> + txtWrap msg + <=> foldl' (<=>) emptyWidget (map drawMenuItem $ itoList menuItems) + _ -> txtWrap msg + where + drawMenuItem (chr, MenuOption m _) = + str ("[" <> pure chr <> "] ") <+> txtWrap m + +drawEntities + :: GameState + -> Widget ResourceName +drawEntities game = vBox rows + where + allEnts = game ^. entities + entityPositions = EntityMap.positions allEnts + maxY = fromMaybe 0 $ maximumOf (folded . y) entityPositions + maxX = fromMaybe 0 $ maximumOf (folded . x) entityPositions + rows = mkRow <$> [0..maxY] + mkRow rowY = hBox $ renderEntityAt . flip Position rowY <$> [0..maxX] + renderEntityAt pos + = renderTopEntity pos $ revealedEntitiesAtPosition pos game + renderTopEntity pos ents + = let neighbors = EntityMap.neighbors pos allEnts + in maybe (str " ") (drawWithNeighbors neighbors) + $ maximumBy (compare `on` drawPriority) + <$> fromNullable ents + +drawMap :: GameState -> Widget ResourceName +drawMap game + = viewport Resource.MapViewport Both + . cursorPosition game + $ drawEntities game + +bullet :: Char +bullet = '•' + +drawInventoryPanel :: GameState -> Widget ResourceName +drawInventoryPanel game + = drawWielded (game ^. character . inventory . wielded) + <=> drawBackpack (game ^. character . inventory . backpack) + where + drawWielded (Hands Nothing Nothing) = emptyWidget + drawWielded (DoubleHanded i) = + txtWrap $ "You are holding " <> description i <> " in both hands" + drawWielded (Hands l r) = drawHand "left" l <=> drawHand "right" r + drawHand side = maybe emptyWidget $ \i -> + txtWrap ( "You are holding " + <> description i + <> " in your " <> side <> " hand" + ) + <=> txt " " + + drawBackpack :: Vector Item -> Widget ResourceName + drawBackpack Empty = txtWrap "Your backpack is empty right now." + drawBackpack backpackItems + = txtWrap ( "You are currently carrying the following items in your " + <> "backpack:") + <=> txt " " + <=> foldl' (<=>) emptyWidget + (map + (txtWrap . ((bullet <| " ") <>) . description) + backpackItems) + + +drawPanel :: GameState -> Panel -> Widget ResourceName +drawPanel game panel + = border + . hLimit 35 + . viewport (Resource.Panel panel) Vertical + . case panel of + InventoryPanel -> drawInventoryPanel + $ game + +drawCharacterInfo :: Character -> Widget ResourceName +drawCharacterInfo ch = txt " " <+> charName <+> charHitpoints + where + charName | Just n <- ch ^. characterName + = txt $ n <> " " + | otherwise + = emptyWidget + charHitpoints + = txt "Hitpoints: " + <+> txt (tshow $ let Hitpoints hp = characterHitpoints ch in hp) + +drawGame :: GameState -> [Widget ResourceName] +drawGame game + = pure + . withBorderStyle unicode + $ case game ^. promptState of + NoPrompt -> drawMessages (game ^. messageHistory) + _ -> emptyWidget + <=> drawPromptState (game ^. promptState) + <=> + (maybe emptyWidget (drawPanel game) (game ^. activePanel) + <+> border (drawMap game) + ) + <=> drawCharacterInfo (game ^. character) diff --git a/users/glittershark/xanthous/src/Xanthous/Game/Env.hs b/users/glittershark/xanthous/src/Xanthous/Game/Env.hs new file mode 100644 index 000000000000..6e10d0f73581 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/Env.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Game.Env + ( GameEnv(..) + , eventChan + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Brick.BChan (BChan) +import Xanthous.Data.App (AppEvent) +-------------------------------------------------------------------------------- + +data GameEnv = GameEnv + { _eventChan :: BChan AppEvent + } + deriving stock (Generic) +makeLenses ''GameEnv +{-# ANN GameEnv ("HLint: ignore Use newtype instead of data" :: String) #-} diff --git a/users/glittershark/xanthous/src/Xanthous/Game/Lenses.hs b/users/glittershark/xanthous/src/Xanthous/Game/Lenses.hs new file mode 100644 index 000000000000..6242b855f1cc --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/Lenses.hs @@ -0,0 +1,150 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE QuantifiedConstraints #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +-------------------------------------------------------------------------------- +module Xanthous.Game.Lenses + ( positionedCharacter + , character + , characterPosition + , updateCharacterVision + , characterVisiblePositions + , characterVisibleEntities + , getInitialState + , initialStateFromSeed + , entitiesAtCharacter + , revealedEntitiesAtPosition + + -- * Collisions + , Collision(..) + , entitiesCollision + , collisionAt + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import System.Random +import Control.Monad.State +import Control.Monad.Random (getRandom) +-------------------------------------------------------------------------------- +import Xanthous.Game.State +import Xanthous.Data +import Xanthous.Data.Levels +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Data.EntityMap.Graphics + (visiblePositions, visibleEntities) +import Xanthous.Data.VectorBag +import Xanthous.Entities.Character (Character, mkCharacter) +import {-# SOURCE #-} Xanthous.Entities.Entities () +-------------------------------------------------------------------------------- + +getInitialState :: IO GameState +getInitialState = initialStateFromSeed <$> getRandom + +initialStateFromSeed :: Int -> GameState +initialStateFromSeed seed = + let _randomGen = mkStdGen seed + chr = mkCharacter + _upStaircasePosition = Position 0 0 + (_characterEntityID, _levelEntities) + = EntityMap.insertAtReturningID + _upStaircasePosition + (SomeEntity chr) + mempty + _levelRevealedPositions = mempty + level = GameLevel {..} + _levels = oneLevel level + _messageHistory = mempty + _promptState = NoPrompt + _activePanel = Nothing + _debugState = DebugState + { _allRevealed = False + } + _autocommand = NoAutocommand + in GameState {..} + + +positionedCharacter :: Lens' GameState (Positioned Character) +positionedCharacter = lens getPositionedCharacter setPositionedCharacter + where + setPositionedCharacter :: GameState -> Positioned Character -> GameState + setPositionedCharacter game chr + = game + & entities . at (game ^. characterEntityID) + ?~ fmap SomeEntity chr + + getPositionedCharacter :: GameState -> Positioned Character + getPositionedCharacter game + = over positioned + ( fromMaybe (error "Invariant error: Character was not a character!") + . downcastEntity + ) + . fromMaybe (error "Invariant error: Character not found!") + $ EntityMap.lookupWithPosition + (game ^. characterEntityID) + (game ^. entities) + + +character :: Lens' GameState Character +character = positionedCharacter . positioned + +characterPosition :: Lens' GameState Position +characterPosition = positionedCharacter . position + +visionRadius :: Word +visionRadius = 12 -- TODO make this dynamic + +-- | Update the revealed entities at the character's position based on their +-- vision +updateCharacterVision :: GameState -> GameState +updateCharacterVision game + = game & revealedPositions <>~ characterVisiblePositions game + +characterVisiblePositions :: GameState -> Set Position +characterVisiblePositions game = + let charPos = game ^. characterPosition + in visiblePositions charPos visionRadius $ game ^. entities + +characterVisibleEntities :: GameState -> EntityMap.EntityMap SomeEntity +characterVisibleEntities game = + let charPos = game ^. characterPosition + in visibleEntities charPos visionRadius $ game ^. entities + +entitiesCollision + :: ( Functor f + , forall xx. MonoFoldable (f xx) + , Element (f SomeEntity) ~ SomeEntity + , Element (f (Maybe Collision)) ~ Maybe Collision + , Show (f (Maybe Collision)) + , Show (f SomeEntity) + ) + => f SomeEntity + -> Maybe Collision +entitiesCollision = join . maximumMay . fmap entityCollision + +collisionAt :: MonadState GameState m => Position -> m (Maybe Collision) +collisionAt p = uses (entities . EntityMap.atPosition p) entitiesCollision + +entitiesAtCharacter :: Lens' GameState (VectorBag SomeEntity) +entitiesAtCharacter = lens getter setter + where + getter gs = gs ^. entities . EntityMap.atPosition (gs ^. characterPosition) + setter gs ents = gs + & entities . EntityMap.atPosition (gs ^. characterPosition) .~ ents + +-- | Returns all entities at the given position that are revealed to the +-- character. +-- +-- Concretely, this is either entities that are *currently* visible to the +-- character, or entities, that are immobile and that the character has seen +-- before +revealedEntitiesAtPosition :: Position -> GameState -> (VectorBag SomeEntity) +revealedEntitiesAtPosition p gs + | p `member` characterVisiblePositions gs + = entitiesAtPosition + | p `member` (gs ^. revealedPositions) + = immobileEntitiesAtPosition + | otherwise + = mempty + where + entitiesAtPosition = gs ^. entities . EntityMap.atPosition p + immobileEntitiesAtPosition = filter (not . entityCanMove) entitiesAtPosition diff --git a/users/glittershark/xanthous/src/Xanthous/Game/Prompt.hs b/users/glittershark/xanthous/src/Xanthous/Game/Prompt.hs new file mode 100644 index 000000000000..30b5fe7545e0 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/Prompt.hs @@ -0,0 +1,289 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE DeriveFunctor #-} +-------------------------------------------------------------------------------- +module Xanthous.Game.Prompt + ( PromptType(..) + , SPromptType(..) + , SingPromptType(..) + , PromptCancellable(..) + , PromptResult(..) + , PromptState(..) + , MenuOption(..) + , mkMenuItems + , PromptInput + , Prompt(..) + , mkPrompt + , mkMenu + , mkPointOnMapPrompt + , isCancellable + , submitPrompt + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Brick.Widgets.Edit (Editor, editorText, getEditContents) +import Test.QuickCheck +import Test.QuickCheck.Arbitrary.Generic +-------------------------------------------------------------------------------- +import Xanthous.Util (smallestNotIn) +import Xanthous.Data (Direction, Position) +import Xanthous.Data.App (ResourceName) +import qualified Xanthous.Data.App as Resource +-------------------------------------------------------------------------------- + +data PromptType where + StringPrompt :: PromptType + Confirm :: PromptType + Menu :: Type -> PromptType + DirectionPrompt :: PromptType + PointOnMap :: PromptType + Continue :: PromptType + deriving stock (Generic) + +instance Show PromptType where + show StringPrompt = "StringPrompt" + show Confirm = "Confirm" + show (Menu _) = "Menu" + show DirectionPrompt = "DirectionPrompt" + show PointOnMap = "PointOnMap" + show Continue = "Continue" + +data SPromptType :: PromptType -> Type where + SStringPrompt :: SPromptType 'StringPrompt + SConfirm :: SPromptType 'Confirm + SMenu :: SPromptType ('Menu a) + SDirectionPrompt :: SPromptType 'DirectionPrompt + SPointOnMap :: SPromptType 'PointOnMap + SContinue :: SPromptType 'Continue + +instance NFData (SPromptType pt) where + rnf SStringPrompt = () + rnf SConfirm = () + rnf SMenu = () + rnf SDirectionPrompt = () + rnf SPointOnMap = () + rnf SContinue = () + +class SingPromptType pt where singPromptType :: SPromptType pt +instance SingPromptType 'StringPrompt where singPromptType = SStringPrompt +instance SingPromptType 'Confirm where singPromptType = SConfirm +instance SingPromptType 'DirectionPrompt where singPromptType = SDirectionPrompt +instance SingPromptType 'PointOnMap where singPromptType = SPointOnMap +instance SingPromptType 'Continue where singPromptType = SContinue + +instance Show (SPromptType pt) where + show SStringPrompt = "SStringPrompt" + show SConfirm = "SConfirm" + show SMenu = "SMenu" + show SDirectionPrompt = "SDirectionPrompt" + show SPointOnMap = "SPointOnMap" + show SContinue = "SContinue" + +data PromptCancellable + = Cancellable + | Uncancellable + deriving stock (Show, Eq, Ord, Enum, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + +instance Arbitrary PromptCancellable where + arbitrary = genericArbitrary + +data PromptResult (pt :: PromptType) where + StringResult :: Text -> PromptResult 'StringPrompt + ConfirmResult :: Bool -> PromptResult 'Confirm + MenuResult :: forall a. a -> PromptResult ('Menu a) + DirectionResult :: Direction -> PromptResult 'DirectionPrompt + PointOnMapResult :: Position -> PromptResult 'PointOnMap + ContinueResult :: PromptResult 'Continue + +instance Arbitrary (PromptResult 'StringPrompt) where + arbitrary = StringResult <$> arbitrary + +instance Arbitrary (PromptResult 'Confirm) where + arbitrary = ConfirmResult <$> arbitrary + +instance Arbitrary a => Arbitrary (PromptResult ('Menu a)) where + arbitrary = MenuResult <$> arbitrary + +instance Arbitrary (PromptResult 'DirectionPrompt) where + arbitrary = DirectionResult <$> arbitrary + +instance Arbitrary (PromptResult 'PointOnMap) where + arbitrary = PointOnMapResult <$> arbitrary + +instance Arbitrary (PromptResult 'Continue) where + arbitrary = pure ContinueResult + +-------------------------------------------------------------------------------- + +data PromptState pt where + StringPromptState + :: Editor Text ResourceName -> PromptState 'StringPrompt + DirectionPromptState :: PromptState 'DirectionPrompt + ContinuePromptState :: PromptState 'Continue + ConfirmPromptState :: PromptState 'Confirm + MenuPromptState :: forall a. PromptState ('Menu a) + PointOnMapPromptState :: Position -> PromptState 'PointOnMap + +instance NFData (PromptState pt) where + rnf sps@(StringPromptState ed) = sps `deepseq` ed `deepseq` () + rnf DirectionPromptState = () + rnf ContinuePromptState = () + rnf ConfirmPromptState = () + rnf MenuPromptState = () + rnf pomps@(PointOnMapPromptState pos) = pomps `deepseq` pos `deepseq` () + +instance Arbitrary (PromptState 'StringPrompt) where + arbitrary = StringPromptState <$> arbitrary + +instance Arbitrary (PromptState 'DirectionPrompt) where + arbitrary = pure DirectionPromptState + +instance Arbitrary (PromptState 'Continue) where + arbitrary = pure ContinuePromptState + +instance Arbitrary (PromptState ('Menu a)) where + arbitrary = pure MenuPromptState + +instance CoArbitrary (PromptState 'StringPrompt) where + coarbitrary (StringPromptState ed) = coarbitrary ed + +instance CoArbitrary (PromptState 'DirectionPrompt) where + coarbitrary DirectionPromptState = coarbitrary () + +instance CoArbitrary (PromptState 'Continue) where + coarbitrary ContinuePromptState = coarbitrary () + +instance CoArbitrary (PromptState ('Menu a)) where + coarbitrary MenuPromptState = coarbitrary () + +deriving stock instance Show (PromptState pt) + +data MenuOption a = MenuOption Text a + deriving stock (Eq, Generic, Functor) + deriving anyclass (NFData, CoArbitrary, Function) + +instance Comonad MenuOption where + extract (MenuOption _ x) = x + extend cok mo@(MenuOption text _) = MenuOption text (cok mo) + +mkMenuItems :: (MonoFoldable f, Element f ~ (Char, MenuOption a)) + => f + -> Map Char (MenuOption a) +mkMenuItems = flip foldl' mempty $ \items (chr, option) -> + let chr' = if has (ix chr) items + then smallestNotIn $ keys items + else chr + in items & at chr' ?~ option + +instance Show (MenuOption a) where + show (MenuOption m _) = show m + +type family PromptInput (pt :: PromptType) :: Type where + PromptInput ('Menu a) = Map Char (MenuOption a) + PromptInput 'PointOnMap = Position -- Character pos + PromptInput _ = () + +data Prompt (m :: Type -> Type) where + Prompt + :: forall (pt :: PromptType) + (m :: Type -> Type). + PromptCancellable + -> SPromptType pt + -> PromptState pt + -> PromptInput pt + -> (PromptResult pt -> m ()) + -> Prompt m + +instance Show (Prompt m) where + show (Prompt c pt ps pri _) + = "(Prompt " + <> show c <> " " + <> show pt <> " " + <> show ps <> " " + <> showPri + <> " <function>)" + where showPri = case pt of + SMenu -> show pri + _ -> "()" + +instance NFData (Prompt m) where + rnf (Prompt c SMenu ps pri cb) + = c + `deepseq` ps + `deepseq` pri + `seq` cb + `seq` () + rnf (Prompt c spt ps pri cb) + = c + `deepseq` spt + `deepseq` ps + `deepseq` pri + `seq` cb + `seq` () + +instance CoArbitrary (m ()) => CoArbitrary (Prompt m) where + coarbitrary (Prompt c SStringPrompt ps pri cb) = + variant @Int 1 . coarbitrary (c, ps, pri, cb) + coarbitrary (Prompt c SConfirm _ pri cb) = -- TODO fill in prompt state + variant @Int 2 . coarbitrary (c, pri, cb) + coarbitrary (Prompt c SMenu _ps _pri _cb) = + variant @Int 3 . coarbitrary c {-, ps, pri, cb -} + coarbitrary (Prompt c SDirectionPrompt ps pri cb) = + variant @Int 4 . coarbitrary (c, ps, pri, cb) + coarbitrary (Prompt c SPointOnMap _ pri cb) = -- TODO fill in prompt state + variant @Int 5 . coarbitrary (c, pri, cb) + coarbitrary (Prompt c SContinue ps pri cb) = + variant @Int 6 . coarbitrary (c, ps, pri, cb) + +-- instance Function (Prompt m) where +-- function = functionMap toTuple _fromTuple +-- where +-- toTuple (Prompt c pt ps pri cb) = (c, pt, ps, pri, cb) + + +mkPrompt :: (PromptInput pt ~ ()) => PromptCancellable -> SPromptType pt -> (PromptResult pt -> m ()) -> Prompt m +mkPrompt c pt@SStringPrompt cb = + let ps = StringPromptState $ editorText Resource.Prompt (Just 1) "" + in Prompt c pt ps () cb +mkPrompt c pt@SDirectionPrompt cb = Prompt c pt DirectionPromptState () cb +mkPrompt c pt@SContinue cb = Prompt c pt ContinuePromptState () cb +mkPrompt c pt@SConfirm cb = Prompt c pt ConfirmPromptState () cb + +mkMenu + :: forall a m. + PromptCancellable + -> Map Char (MenuOption a) -- ^ Menu items + -> (PromptResult ('Menu a) -> m ()) + -> Prompt m +mkMenu c = Prompt c SMenu MenuPromptState + +mkPointOnMapPrompt + :: PromptCancellable + -> Position + -> (PromptResult 'PointOnMap -> m ()) + -> Prompt m +mkPointOnMapPrompt c pos = Prompt c SPointOnMap (PointOnMapPromptState pos) pos + +isCancellable :: Prompt m -> Bool +isCancellable (Prompt Cancellable _ _ _ _) = True +isCancellable (Prompt Uncancellable _ _ _ _) = False + +submitPrompt :: Applicative m => Prompt m -> m () +submitPrompt (Prompt _ pt ps _ cb) = + case (pt, ps) of + (SStringPrompt, StringPromptState edit) -> + cb . StringResult . mconcat . getEditContents $ edit + (SDirectionPrompt, DirectionPromptState) -> + pure () -- Don't use submit with a direction prompt + (SContinue, ContinuePromptState) -> + cb ContinueResult + (SMenu, MenuPromptState) -> + pure () -- Don't use submit with a menu prompt + (SPointOnMap, PointOnMapPromptState pos) -> + cb $ PointOnMapResult pos + (SConfirm, ConfirmPromptState) -> + cb $ ConfirmResult True diff --git a/users/glittershark/xanthous/src/Xanthous/Game/State.hs b/users/glittershark/xanthous/src/Xanthous/Game/State.hs new file mode 100644 index 000000000000..f614cad47339 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Game/State.hs @@ -0,0 +1,558 @@ +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +-------------------------------------------------------------------------------- +module Xanthous.Game.State + ( GameState(..) + , entities + , levels + , revealedPositions + , messageHistory + , randomGen + , activePanel + , promptState + , characterEntityID + , autocommand + , GamePromptState(..) + + -- * Game Level + , GameLevel(..) + , levelEntities + , upStaircasePosition + , levelRevealedPositions + + -- * Messages + , MessageHistory(..) + , HasMessages(..) + , HasTurn(..) + , HasDisplayedTurn(..) + , pushMessage + , previousMessage + , nextTurn + + -- * Autocommands + , Autocommand(..) + , AutocommandState(..) + , _NoAutocommand + , _ActiveAutocommand + + -- * App monad + , AppT(..) + , AppM + , runAppT + + -- * Entities + , Draw(..) + , Brain(..) + , Brainless(..) + , brainVia + , Collision(..) + , Entity(..) + , SomeEntity(..) + , downcastEntity + , _SomeEntity + , entityIs + -- ** Vias + , Color(..) + , DrawNothing(..) + , DrawRawChar(..) + , DrawRawCharPriority(..) + , DrawCharacter(..) + , DrawStyledCharacter(..) + , DeriveEntity(..) + -- ** Field classes + , HasChar(..) + , HasStyle(..) + + -- * Debug State + , DebugState(..) + , debugState + , allRevealed + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.List.NonEmpty ( NonEmpty((:|))) +import qualified Data.List.NonEmpty as NonEmpty +import Data.Typeable +import Data.Coerce +import System.Random +import Test.QuickCheck +import Test.QuickCheck.Arbitrary.Generic +import Control.Monad.Random.Class +import Control.Monad.State +import Control.Monad.Trans.Control (MonadTransControl(..)) +import Control.Monad.Trans.Compose +import Control.Monad.Morph (MFunctor(..)) +import Brick (EventM, Widget, raw, str, emptyWidget) +import Data.Aeson (ToJSON(..), FromJSON(..), Value(Null)) +import qualified Data.Aeson as JSON +import Data.Aeson.Generic.DerivingVia +import Data.Generics.Product.Fields +import qualified Graphics.Vty.Attributes as Vty +import qualified Graphics.Vty.Image as Vty +-------------------------------------------------------------------------------- +import Xanthous.Util (KnownBool(..)) +import Xanthous.Util.QuickCheck (GenericArbitrary(..)) +import Xanthous.Data +import Xanthous.Data.App +import Xanthous.Data.Levels +import Xanthous.Data.EntityMap (EntityMap, EntityID) +import Xanthous.Data.EntityChar +import Xanthous.Data.VectorBag +import Xanthous.Data.Entities +import Xanthous.Orphans () +import Xanthous.Game.Prompt +import Xanthous.Game.Env +-------------------------------------------------------------------------------- + +data MessageHistory + = MessageHistory + { _messages :: Map Word (NonEmpty Text) + , _turn :: Word + , _displayedTurn :: Maybe Word + } + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary MessageHistory + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + MessageHistory +makeFieldsNoPrefix ''MessageHistory + +instance Semigroup MessageHistory where + (MessageHistory msgs₁ turn₁ dt₁) <> (MessageHistory msgs₂ turn₂ dt₂) = + MessageHistory (msgs₁ <> msgs₂) (max turn₁ turn₂) $ case (dt₁, dt₂) of + (_, Nothing) -> Nothing + (Just t, _) -> Just t + (Nothing, Just t) -> Just t + +instance Monoid MessageHistory where + mempty = MessageHistory mempty 0 Nothing + +type instance Element MessageHistory = [Text] +instance MonoFunctor MessageHistory where + omap f mh@(MessageHistory _ t _) = + mh & messages . at t %~ (NonEmpty.nonEmpty . f . toList =<<) + +instance MonoComonad MessageHistory where + oextract (MessageHistory ms t dt) = maybe [] toList $ ms ^. at (fromMaybe t dt) + oextend cok mh@(MessageHistory _ t dt) = + mh & messages . at (fromMaybe t dt) .~ NonEmpty.nonEmpty (cok mh) + +pushMessage :: Text -> MessageHistory -> MessageHistory +pushMessage msg mh@(MessageHistory _ turn' _) = + mh + & messages . at turn' %~ \case + Nothing -> Just $ msg :| mempty + Just msgs -> Just $ msg <| msgs + & displayedTurn .~ Nothing + +nextTurn :: MessageHistory -> MessageHistory +nextTurn = (turn +~ 1) . (displayedTurn .~ Nothing) + +previousMessage :: MessageHistory -> MessageHistory +previousMessage mh = mh & displayedTurn .~ maximumOf + (messages . ifolded . asIndex . filtered (< mh ^. turn)) + mh + + +-------------------------------------------------------------------------------- + +data GamePromptState m where + NoPrompt :: GamePromptState m + WaitingPrompt :: Text -> Prompt m -> GamePromptState m + deriving stock (Show, Generic) + deriving anyclass (NFData) + +-- | Non-injective! We never try to serialize waiting prompts, since: +-- +-- * they contain callback functions +-- * we can't save the game when in a prompt anyway +instance ToJSON (GamePromptState m) where + toJSON _ = Null + +-- | Always expects Null +instance FromJSON (GamePromptState m) where + parseJSON Null = pure NoPrompt + parseJSON _ = fail "Invalid GamePromptState; expected null" + +instance CoArbitrary (GamePromptState m) where + coarbitrary NoPrompt = variant @Int 1 + coarbitrary (WaitingPrompt txt _) = variant @Int 2 . coarbitrary txt + +instance Function (GamePromptState m) where + function = functionMap onlyNoPrompt (const NoPrompt) + where + onlyNoPrompt NoPrompt = () + onlyNoPrompt (WaitingPrompt _ _) = + error "Can't handle prompts in Function!" + +-------------------------------------------------------------------------------- + +newtype AppT m a + = AppT { unAppT :: ReaderT GameEnv (StateT GameState m) a } + deriving ( Functor + , Applicative + , Monad + , MonadState GameState + , MonadReader GameEnv + , MonadIO + ) + via (ReaderT GameEnv (StateT GameState m)) + deriving ( MonadTrans + , MFunctor + ) + via (ReaderT GameEnv `ComposeT` StateT GameState) + +type AppM = AppT (EventM ResourceName) + +-------------------------------------------------------------------------------- + +class Draw a where + drawWithNeighbors :: Neighbors (VectorBag SomeEntity) -> a -> Widget n + drawWithNeighbors = const draw + + draw :: a -> Widget n + draw = drawWithNeighbors $ pure mempty + + -- | higher priority gets drawn on top + drawPriority :: a -> Word + drawPriority = const minBound + +instance Draw a => Draw (Positioned a) where + drawWithNeighbors ns (Positioned _ a) = drawWithNeighbors ns a + draw (Positioned _ a) = draw a + +newtype DrawCharacter (char :: Symbol) (a :: Type) where + DrawCharacter :: a -> DrawCharacter char a + +instance KnownSymbol char => Draw (DrawCharacter char a) where + draw _ = str $ symbolVal @char Proxy + +data Color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White + +class KnownColor (color :: Color) where + colorVal :: forall proxy. proxy color -> Vty.Color + +instance KnownColor 'Black where colorVal _ = Vty.black +instance KnownColor 'Red where colorVal _ = Vty.red +instance KnownColor 'Green where colorVal _ = Vty.green +instance KnownColor 'Yellow where colorVal _ = Vty.yellow +instance KnownColor 'Blue where colorVal _ = Vty.blue +instance KnownColor 'Magenta where colorVal _ = Vty.magenta +instance KnownColor 'Cyan where colorVal _ = Vty.cyan +instance KnownColor 'White where colorVal _ = Vty.white + +class KnownMaybeColor (maybeColor :: Maybe Color) where + maybeColorVal :: forall proxy. proxy maybeColor -> Maybe Vty.Color + +instance KnownMaybeColor 'Nothing where maybeColorVal _ = Nothing +instance KnownColor color => KnownMaybeColor ('Just color) where + maybeColorVal _ = Just $ colorVal @color Proxy + +newtype DrawStyledCharacter (fg :: Maybe Color) (bg :: Maybe Color) (char :: Symbol) (a :: Type) where + DrawStyledCharacter :: a -> DrawStyledCharacter fg bg char a + +instance + ( KnownMaybeColor fg + , KnownMaybeColor bg + , KnownSymbol char + ) + => Draw (DrawStyledCharacter fg bg char a) where + draw _ = raw $ Vty.string attr $ symbolVal @char Proxy + where attr = Vty.Attr + { Vty.attrStyle = Vty.Default + , Vty.attrForeColor = maybe Vty.Default Vty.SetTo + $ maybeColorVal @fg Proxy + , Vty.attrBackColor = maybe Vty.Default Vty.SetTo + $ maybeColorVal @bg Proxy + , Vty.attrURL = Vty.Default + } + +instance Draw EntityChar where + draw EntityChar{..} = raw $ Vty.string _style [_char] + +-------------------------------------------------------------------------------- + +newtype DrawNothing (a :: Type) = DrawNothing a + +instance Draw (DrawNothing a) where + draw = const emptyWidget + drawPriority = const 0 + +newtype DrawRawChar (rawField :: Symbol) (a :: Type) = DrawRawChar a + +instance + forall rawField a raw. + ( HasField rawField a a raw raw + , HasChar raw EntityChar + ) => Draw (DrawRawChar rawField a) where + draw (DrawRawChar e) = draw $ e ^. field @rawField . char + +newtype DrawRawCharPriority + (rawField :: Symbol) + (priority :: Nat) + (a :: Type) + = DrawRawCharPriority a + +instance + forall rawField priority a raw. + ( HasField rawField a a raw raw + , KnownNat priority + , HasChar raw EntityChar + ) => Draw (DrawRawCharPriority rawField priority a) where + draw (DrawRawCharPriority e) = draw $ e ^. field @rawField . char + drawPriority = const . fromIntegral $ natVal @priority Proxy + + +-------------------------------------------------------------------------------- + +class Brain a where + step :: Ticks -> Positioned a -> AppM (Positioned a) + -- | Does this entity ever move on its own? + entityCanMove :: a -> Bool + entityCanMove = const False + +newtype Brainless a = Brainless a + +instance Brain (Brainless a) where + step = const pure + +-- | Workaround for the inability to use DerivingVia on Brain due to the lack of +-- higher-order roles (specifically AppT not having its last type argument have +-- role representational bc of StateT) +brainVia + :: forall brain entity. (Coercible entity brain, Brain brain) + => (entity -> brain) -- ^ constructor, ignored + -> (Ticks -> Positioned entity -> AppM (Positioned entity)) +brainVia _ ticks = fmap coerce . step ticks . coerce @_ @(Positioned brain) + +-------------------------------------------------------------------------------- + +class ( Show a, Eq a, Ord a, NFData a + , ToJSON a, FromJSON a + , Draw a, Brain a + ) => Entity a where + entityAttributes :: a -> EntityAttributes + entityAttributes = const defaultEntityAttributes + description :: a -> Text + entityChar :: a -> EntityChar + entityCollision :: a -> Maybe Collision + entityCollision = const $ Just Stop + +data SomeEntity where + SomeEntity :: forall a. (Entity a, Typeable a) => a -> SomeEntity + +instance Show SomeEntity where + show (SomeEntity e) = "SomeEntity (" <> show e <> ")" + +instance Eq SomeEntity where + (SomeEntity (a :: ea)) == (SomeEntity (b :: eb)) = case eqT @ea @eb of + Just Refl -> a == b + _ -> False + +instance Ord SomeEntity where + compare (SomeEntity (a :: ea)) (SomeEntity (b :: eb)) = case eqT @ea @eb of + Just Refl -> compare a b + _ -> compare (typeRep $ Proxy @ea) (typeRep $ Proxy @eb) + + +instance NFData SomeEntity where + rnf (SomeEntity ent) = ent `deepseq` () + +instance ToJSON SomeEntity where + toJSON (SomeEntity ent) = entityToJSON ent + where + entityToJSON :: forall entity. (Entity entity, Typeable entity) + => entity -> JSON.Value + entityToJSON entity = JSON.object + [ "type" JSON..= tshow (typeRep @_ @entity Proxy) + , "data" JSON..= toJSON entity + ] + +instance Draw SomeEntity where + drawWithNeighbors ns (SomeEntity ent) = drawWithNeighbors ns ent + drawPriority (SomeEntity ent) = drawPriority ent + +instance Brain SomeEntity where + step ticks (Positioned p (SomeEntity ent)) = + fmap SomeEntity <$> step ticks (Positioned p ent) + entityCanMove (SomeEntity ent) = entityCanMove ent + +downcastEntity :: forall (a :: Type). (Typeable a) => SomeEntity -> Maybe a +downcastEntity (SomeEntity e) = cast e + +entityIs :: forall (a :: Type). (Typeable a) => SomeEntity -> Bool +entityIs = isJust . downcastEntity @a + +_SomeEntity :: forall a. (Entity a, Typeable a) => Prism' SomeEntity a +_SomeEntity = prism' SomeEntity downcastEntity + +newtype DeriveEntity + (blocksVision :: Bool) + (description :: Symbol) + (entityChar :: Symbol) + (entity :: Type) + = DeriveEntity entity + deriving newtype (Show, Eq, Ord, NFData, ToJSON, FromJSON, Draw) + +instance Brain entity => Brain (DeriveEntity b d c entity) where + step = brainVia $ \(DeriveEntity e) -> e + +instance + ( KnownBool blocksVision + , KnownSymbol description + , KnownSymbol entityChar + , Show entity, Eq entity, Ord entity, NFData entity + , ToJSON entity, FromJSON entity + , Draw entity, Brain entity + ) + => Entity (DeriveEntity blocksVision description entityChar entity) where + entityAttributes _ = defaultEntityAttributes + & blocksVision .~ boolVal @blocksVision + description _ = pack . symbolVal $ Proxy @description + entityChar _ = fromString . symbolVal $ Proxy @entityChar + +-------------------------------------------------------------------------------- + +data GameLevel = GameLevel + { _levelEntities :: !(EntityMap SomeEntity) + , _upStaircasePosition :: !Position + , _levelRevealedPositions :: !(Set Position) + } + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData) + deriving (ToJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + GameLevel + +-------------------------------------------------------------------------------- + +data Autocommand + = AutoMove Direction + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (NFData, Hashable, ToJSON, FromJSON, CoArbitrary, Function) + deriving Arbitrary via GenericArbitrary Autocommand +{-# ANN module ("HLint: ignore Use newtype instead of data" :: String) #-} + +data AutocommandState + = NoAutocommand + | ActiveAutocommand Autocommand (Async ()) + deriving stock (Eq, Ord, Generic) + deriving anyclass (Hashable) + +instance Show AutocommandState where + show NoAutocommand = "NoAutocommand" + show (ActiveAutocommand ac _) = + "(ActiveAutocommand " <> show ac <> " <Async>)" + +instance ToJSON AutocommandState where + toJSON = const Null + +instance FromJSON AutocommandState where + parseJSON Null = pure NoAutocommand + parseJSON _ = fail "Invalid AutocommandState; expected null" + +instance NFData AutocommandState where + rnf NoAutocommand = () + rnf (ActiveAutocommand ac t) = ac `deepseq` t `seq` () + +instance CoArbitrary AutocommandState where + coarbitrary NoAutocommand = variant @Int 1 + coarbitrary (ActiveAutocommand ac t) + = variant @Int 2 + . coarbitrary ac + . coarbitrary (hash t) + +instance Function AutocommandState where + function = functionMap onlyNoAC (const NoAutocommand) + where + onlyNoAC NoAutocommand = () + onlyNoAC _ = error "Can't handle autocommands in Function" + +-------------------------------------------------------------------------------- + + +data DebugState = DebugState + { _allRevealed :: !Bool + } + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData, CoArbitrary, Function) + deriving (ToJSON, FromJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + DebugState +{-# ANN DebugState ("HLint: ignore Use newtype instead of data" :: String) #-} + +instance Arbitrary DebugState where + arbitrary = genericArbitrary + +data GameState = GameState + { _levels :: !(Levels GameLevel) + , _characterEntityID :: !EntityID + , _messageHistory :: !MessageHistory + , _randomGen :: !StdGen + + -- | The active panel displayed in the UI, if any + , _activePanel :: !(Maybe Panel) + + , _promptState :: !(GamePromptState AppM) + , _debugState :: !DebugState + , _autocommand :: !AutocommandState + } + deriving stock (Show, Generic) + deriving anyclass (NFData) + deriving (ToJSON) + via WithOptions '[ FieldLabelModifier '[Drop 1] ] + GameState + +makeLenses ''GameLevel +makeLenses ''GameState + +entities :: Lens' GameState (EntityMap SomeEntity) +entities = levels . current . levelEntities + +revealedPositions :: Lens' GameState (Set Position) +revealedPositions = levels . current . levelRevealedPositions + +instance Eq GameState where + (==) = (==) `on` \gs -> + ( gs ^. entities + , gs ^. revealedPositions + , gs ^. characterEntityID + , gs ^. messageHistory + , gs ^. activePanel + , gs ^. debugState + ) + +-------------------------------------------------------------------------------- + +runAppT :: Monad m => AppT m a -> GameEnv -> GameState -> m (a, GameState) +runAppT appt env initialState + = flip runStateT initialState + . flip runReaderT env + . unAppT + $ appt + +instance (Monad m) => MonadRandom (AppT m) where + getRandomR rng = randomGen %%= randomR rng + getRandom = randomGen %%= random + getRandomRs rng = uses randomGen $ randomRs rng + getRandoms = uses randomGen randoms + +instance MonadTransControl AppT where + type StT AppT a = (a, GameState) + liftWith f + = AppT + . ReaderT $ \e + -> StateT $ \s + -> (,s) <$> f (\action -> runAppT action e s) + restoreT = AppT . ReaderT . const . StateT . const + +-------------------------------------------------------------------------------- + +makeLenses ''DebugState +makePrisms ''AutocommandState diff --git a/users/glittershark/xanthous/src/Xanthous/Generators.hs b/users/glittershark/xanthous/src/Xanthous/Generators.hs new file mode 100644 index 000000000000..ef37070b6ede --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators.hs @@ -0,0 +1,168 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Generators + ( generate + , Generator(..) + , SGenerator(..) + , GeneratorInput(..) + , generateFromInput + , parseGeneratorInput + , showCells + , Level(..) + , levelWalls + , levelItems + , levelCreatures + , levelDoors + , levelCharacterPosition + , levelTutorialMessage + , levelExtra + , generateLevel + , levelToEntityMap + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Data.Array.Unboxed +import qualified Options.Applicative as Opt +import Control.Monad.Random +-------------------------------------------------------------------------------- +import qualified Xanthous.Generators.CaveAutomata as CaveAutomata +import qualified Xanthous.Generators.Dungeon as Dungeon +import Xanthous.Generators.Util +import Xanthous.Generators.LevelContents +import Xanthous.Generators.Village as Village +import Xanthous.Data (Dimensions, Position'(Position), Position) +import Xanthous.Data.EntityMap (EntityMap, _EntityMap) +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Entities.Environment +import Xanthous.Entities.Item (Item) +import Xanthous.Entities.Creature (Creature) +import Xanthous.Game.State (SomeEntity(..)) +import Linear.V2 +-------------------------------------------------------------------------------- + +data Generator + = CaveAutomata + | Dungeon + deriving stock (Show, Eq) + +data SGenerator (gen :: Generator) where + SCaveAutomata :: SGenerator 'CaveAutomata + SDungeon :: SGenerator 'Dungeon + +type family Params (gen :: Generator) :: Type where + Params 'CaveAutomata = CaveAutomata.Params + Params 'Dungeon = Dungeon.Params + +generate + :: RandomGen g + => SGenerator gen + -> Params gen + -> Dimensions + -> g + -> Cells +generate SCaveAutomata = CaveAutomata.generate +generate SDungeon = Dungeon.generate + +data GeneratorInput where + GeneratorInput :: forall gen. SGenerator gen -> Params gen -> GeneratorInput + +generateFromInput :: RandomGen g => GeneratorInput -> Dimensions -> g -> Cells +generateFromInput (GeneratorInput sg ps) = generate sg ps + +parseGeneratorInput :: Opt.Parser GeneratorInput +parseGeneratorInput = Opt.subparser + $ generatorCommand SCaveAutomata + "cave" + "Cellular-automata based cave generator" + CaveAutomata.parseParams + <> generatorCommand SDungeon + "dungeon" + "Classic dungeon map generator" + Dungeon.parseParams + where + generatorCommand sgen name desc parseParams = + Opt.command name + (Opt.info + (GeneratorInput <$> pure sgen <*> parseParams) + (Opt.progDesc desc) + ) + + +showCells :: Cells -> Text +showCells arr = + let (V2 minX minY, V2 maxX maxY) = bounds arr + showCellVal True = "x" + showCellVal False = " " + showCell = showCellVal . (arr !) + row r = foldMap (showCell . (`V2` r)) [minX..maxX] + rows = row <$> [minY..maxY] + in intercalate "\n" rows + +cellsToWalls :: Cells -> EntityMap Wall +cellsToWalls cells = foldl' maybeInsertWall mempty . assocs $ cells + where + maybeInsertWall em (pos@(V2 x y), True) + | not (surroundedOnAllSides pos) = + let x' = fromIntegral x + y' = fromIntegral y + in EntityMap.insertAt (Position x' y') Wall em + maybeInsertWall em _ = em + surroundedOnAllSides pos = numAliveNeighbors cells pos == 8 + +-------------------------------------------------------------------------------- + +data Level = Level + { _levelWalls :: !(EntityMap Wall) + , _levelDoors :: !(EntityMap Door) + , _levelItems :: !(EntityMap Item) + , _levelCreatures :: !(EntityMap Creature) + , _levelTutorialMessage :: !(EntityMap GroundMessage) + , _levelStaircases :: !(EntityMap Staircase) + , _levelExtra :: !(EntityMap SomeEntity) -- ^ TODO this is a bit of a hack... + , _levelCharacterPosition :: !Position + } + deriving stock (Generic) + deriving anyclass (NFData) +makeLenses ''Level + +generateLevel + :: MonadRandom m + => SGenerator gen + -> Params gen + -> Dimensions + -> m Level +generateLevel gen ps dims = do + rand <- mkStdGen <$> getRandom + let cells = generate gen ps dims rand + _levelWalls = cellsToWalls cells + village <- generateVillage cells gen + let _levelExtra = village + _levelItems <- randomItems cells + _levelCreatures <- randomCreatures cells + _levelDoors <- randomDoors cells + _levelCharacterPosition <- chooseCharacterPosition cells + let upStaircase = _EntityMap # [(_levelCharacterPosition, UpStaircase)] + downStaircase <- placeDownStaircase cells + let _levelStaircases = upStaircase <> downStaircase + _levelTutorialMessage <- tutorialMessage cells _levelCharacterPosition + pure Level {..} + +levelToEntityMap :: Level -> EntityMap SomeEntity +levelToEntityMap level + = (SomeEntity <$> level ^. levelWalls) + <> (SomeEntity <$> level ^. levelDoors) + <> (SomeEntity <$> level ^. levelItems) + <> (SomeEntity <$> level ^. levelCreatures) + <> (SomeEntity <$> level ^. levelTutorialMessage) + <> (SomeEntity <$> level ^. levelStaircases) + <> (level ^. levelExtra) + +generateVillage + :: MonadRandom m + => Cells -- ^ Wall positions + -> SGenerator gen + -> m (EntityMap SomeEntity) +generateVillage wallPositions SCaveAutomata = Village.fromCave wallPositions +generateVillage _ _ = pure mempty diff --git a/users/glittershark/xanthous/src/Xanthous/Generators/CaveAutomata.hs b/users/glittershark/xanthous/src/Xanthous/Generators/CaveAutomata.hs new file mode 100644 index 000000000000..be904662f3f7 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators/CaveAutomata.hs @@ -0,0 +1,112 @@ +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Generators.CaveAutomata + ( Params(..) + , defaultParams + , parseParams + , generate + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Control.Monad.Random (RandomGen, runRandT) +import Data.Array.ST +import Data.Array.Unboxed +import qualified Options.Applicative as Opt +-------------------------------------------------------------------------------- +import Xanthous.Util (between) +import Xanthous.Util.Optparse +import Xanthous.Data (Dimensions, width, height) +import Xanthous.Generators.Util +import Linear.V2 +-------------------------------------------------------------------------------- + +data Params = Params + { _aliveStartChance :: Double + , _birthLimit :: Word + , _deathLimit :: Word + , _steps :: Word + } + deriving stock (Show, Eq, Generic) +makeLenses ''Params + +defaultParams :: Params +defaultParams = Params + { _aliveStartChance = 0.6 + , _birthLimit = 3 + , _deathLimit = 4 + , _steps = 4 + } + +parseParams :: Opt.Parser Params +parseParams = Params + <$> Opt.option parseChance + ( Opt.long "alive-start-chance" + <> Opt.value (defaultParams ^. aliveStartChance) + <> Opt.showDefault + <> Opt.help ( "Chance for each cell to start alive at the beginning of " + <> "the cellular automata" + ) + <> Opt.metavar "CHANCE" + ) + <*> Opt.option parseNeighbors + ( Opt.long "birth-limit" + <> Opt.value (defaultParams ^. birthLimit) + <> Opt.showDefault + <> Opt.help "Minimum neighbor count required for birth of a cell" + <> Opt.metavar "NEIGHBORS" + ) + <*> Opt.option parseNeighbors + ( Opt.long "death-limit" + <> Opt.value (defaultParams ^. deathLimit) + <> Opt.showDefault + <> Opt.help "Maximum neighbor count required for death of a cell" + <> Opt.metavar "NEIGHBORS" + ) + <*> Opt.option Opt.auto + ( Opt.long "steps" + <> Opt.value (defaultParams ^. steps) + <> Opt.showDefault + <> Opt.help "Number of generations to run the automata for" + <> Opt.metavar "STEPS" + ) + <**> Opt.helper + where + parseChance = readWithGuard + (between 0 1) + $ \res -> "Chance must be in the range [0,1], got: " <> show res + + parseNeighbors = readWithGuard + (between 0 8) + $ \res -> "Neighbors must be in the range [0,8], got: " <> show res + +generate :: RandomGen g => Params -> Dimensions -> g -> Cells +generate params dims gen + = runSTUArray + $ fmap fst + $ flip runRandT gen + $ generate' params dims + +generate' :: RandomGen g => Params -> Dimensions -> CellM g s (MCells s) +generate' params dims = do + cells <- randInitialize dims $ params ^. aliveStartChance + let steps' = params ^. steps + when (steps' > 0) + $ for_ [0 .. pred steps'] . const $ stepAutomata cells dims params + -- Remove all but the largest contiguous region of unfilled space + (_: smallerRegions) <- lift $ regions @UArray . amap not <$> freeze cells + lift $ fillAllM (fold smallerRegions) cells + lift $ fillOuterEdgesM cells + pure cells + +stepAutomata :: forall s g. MCells s -> Dimensions -> Params -> CellM g s () +stepAutomata cells dims params = do + origCells <- lift $ cloneMArray @_ @(STUArray s) cells + for_ (range (0, V2 (dims ^. width) (dims ^. height))) $ \pos -> do + neighs <- lift $ numAliveNeighborsM origCells pos + origValue <- lift $ readArray origCells pos + lift . writeArray cells pos + $ if origValue + then neighs >= params ^. deathLimit + else neighs > params ^. birthLimit diff --git a/users/glittershark/xanthous/src/Xanthous/Generators/Dungeon.hs b/users/glittershark/xanthous/src/Xanthous/Generators/Dungeon.hs new file mode 100644 index 000000000000..f30713ce1182 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators/Dungeon.hs @@ -0,0 +1,190 @@ +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Generators.Dungeon + ( Params(..) + , defaultParams + , parseParams + , generate + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding ((:>)) +-------------------------------------------------------------------------------- +import Control.Monad.Random +import Data.Array.ST +import Data.Array.IArray (amap) +import Data.Stream.Infinite (Stream(..)) +import qualified Data.Stream.Infinite as Stream +import qualified Data.Graph.Inductive.Graph as Graph +import Data.Graph.Inductive.PatriciaTree +import qualified Data.List.NonEmpty as NE +import Data.Maybe (fromJust) +import Linear.V2 +import Linear.Metric +import qualified Options.Applicative as Opt +-------------------------------------------------------------------------------- +import Xanthous.Random +import Xanthous.Data hiding (x, y, _x, _y, edges) +import Xanthous.Generators.Util +import Xanthous.Util.Graphics (delaunay, straightLine) +import Xanthous.Util.Graph (mstSubGraph) +-------------------------------------------------------------------------------- + +data Params = Params + { _numRoomsRange :: (Word, Word) + , _roomDimensionRange :: (Word, Word) + , _connectednessRatioRange :: (Double, Double) + } + deriving stock (Show, Eq, Ord, Generic) +makeLenses ''Params + +defaultParams :: Params +defaultParams = Params + { _numRoomsRange = (6, 8) + , _roomDimensionRange = (3, 12) + , _connectednessRatioRange = (0.1, 0.15) + } + +parseParams :: Opt.Parser Params +parseParams = Params + <$> parseRange + "num-rooms" + "number of rooms to generate in the dungeon" + "ROOMS" + (defaultParams ^. numRoomsRange) + <*> parseRange + "room-size" + "size in tiles of one of the sides of a room" + "TILES" + (defaultParams ^. roomDimensionRange) + <*> parseRange + "connectedness-ratio" + ( "ratio of edges from the delaunay triangulation to re-add to the " + <> "minimum-spanning-tree") + "RATIO" + (defaultParams ^. connectednessRatioRange) + <**> Opt.helper + where + parseRange name desc metavar (defMin, defMax) = + (,) + <$> Opt.option Opt.auto + ( Opt.long ("min-" <> name) + <> Opt.value defMin + <> Opt.showDefault + <> Opt.help ("Minimum " <> desc) + <> Opt.metavar metavar + ) + <*> Opt.option Opt.auto + ( Opt.long ("max-" <> name) + <> Opt.value defMax + <> Opt.showDefault + <> Opt.help ("Maximum " <> desc) + <> Opt.metavar metavar + ) + +generate :: RandomGen g => Params -> Dimensions -> g -> Cells +generate params dims gen + = amap not + $ runSTUArray + $ fmap fst + $ flip runRandT gen + $ generate' params dims + +-------------------------------------------------------------------------------- + +generate' :: RandomGen g => Params -> Dimensions -> CellM g s (MCells s) +generate' params dims = do + cells <- initializeEmpty dims + rooms <- genRooms params dims + for_ rooms $ fillRoom cells + + let fullRoomGraph = delaunayRoomGraph rooms + mst = mstSubGraph fullRoomGraph + mstEdges = Graph.edges mst + nonMSTEdges = filter (\(n₁, n₂, _) -> (n₁, n₂) `notElem` mstEdges) + $ Graph.labEdges fullRoomGraph + + reintroEdgeCount <- floor . (* fromIntegral (length nonMSTEdges)) + <$> getRandomR (params ^. connectednessRatioRange) + let reintroEdges = take reintroEdgeCount nonMSTEdges + corridorGraph = Graph.insEdges reintroEdges mst + + corridors <- traverse + ( uncurry corridorBetween + . over both (fromJust . Graph.lab corridorGraph) + ) $ Graph.edges corridorGraph + + for_ (join corridors) $ \pt -> lift $ writeArray cells pt True + + pure cells + +type Room = Box Word + +genRooms :: MonadRandom m => Params -> Dimensions -> m [Room] +genRooms params dims = do + numRooms <- fromIntegral <$> getRandomR (params ^. numRoomsRange) + subRand . fmap (Stream.take numRooms . removeIntersecting []) . infinitely $ do + roomWidth <- getRandomR $ params ^. roomDimensionRange + roomHeight <- getRandomR $ params ^. roomDimensionRange + xPos <- getRandomR (0, dims ^. width - roomWidth) + yPos <- getRandomR (0, dims ^. height - roomHeight) + pure Box + { _topLeftCorner = V2 xPos yPos + , _dimensions = V2 roomWidth roomHeight + } + where + removeIntersecting seen (room :> rooms) + | any (boxIntersects room) seen + = removeIntersecting seen rooms + | otherwise + = room :> removeIntersecting (room : seen) rooms + streamRepeat x = x :> streamRepeat x + infinitely = sequence . streamRepeat + +delaunayRoomGraph :: [Room] -> Gr Room Double +delaunayRoomGraph rooms = + Graph.insEdges edges . Graph.insNodes nodes $ Graph.empty + where + edges = map (\((n₁, room₁), (n₂, room₂)) -> (n₁, n₂, roomDist room₁ room₂)) + . over (mapped . both) snd + . delaunay @Double + . NE.fromList + . map (\p@(_, room) -> (boxCenter $ fromIntegral <$> room, p)) + $ nodes + nodes = zip [0..] rooms + roomDist = distance `on` (boxCenter . fmap fromIntegral) + +fillRoom :: MCells s -> Room -> CellM g s () +fillRoom cells room = + let V2 posx posy = room ^. topLeftCorner + V2 dimx dimy = room ^. dimensions + in for_ [posx .. posx + dimx] $ \x -> + for_ [posy .. posy + dimy] $ \y -> + lift $ writeArray cells (V2 x y) True + +corridorBetween :: MonadRandom m => Room -> Room -> m [V2 Word] +corridorBetween originRoom destinationRoom + = straightLine <$> origin <*> destination + where + origin = choose . NE.fromList =<< originEdge + destination = choose . NE.fromList =<< destinationEdge + originEdge = pickEdge originRoom originCorner + destinationEdge = pickEdge destinationRoom destinationCorner + pickEdge room corner = choose . over both (boxEdge room) $ cornerEdges corner + originCorner = + case ( compare (originRoom ^. topLeftCorner . _x) + (destinationRoom ^. topLeftCorner . _x) + , compare (originRoom ^. topLeftCorner . _y) + (destinationRoom ^. topLeftCorner . _y) + ) of + (LT, LT) -> BottomRight + (LT, GT) -> TopRight + (GT, LT) -> BottomLeft + (GT, GT) -> TopLeft + + (EQ, LT) -> BottomLeft + (EQ, GT) -> TopRight + (GT, EQ) -> TopLeft + (LT, EQ) -> BottomRight + (EQ, EQ) -> TopLeft -- should never happen + + destinationCorner = opposite originCorner diff --git a/users/glittershark/xanthous/src/Xanthous/Generators/LevelContents.hs b/users/glittershark/xanthous/src/Xanthous/Generators/LevelContents.hs new file mode 100644 index 000000000000..8ebcc7f4da83 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators/LevelContents.hs @@ -0,0 +1,133 @@ +-------------------------------------------------------------------------------- +module Xanthous.Generators.LevelContents + ( chooseCharacterPosition + , randomItems + , randomCreatures + , randomDoors + , placeDownStaircase + , tutorialMessage + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (any, toList) +-------------------------------------------------------------------------------- +import Control.Monad.Random +import Data.Array.IArray (amap, bounds, rangeSize, (!)) +import qualified Data.Array.IArray as Arr +import Data.Foldable (any, toList) +import Linear.V2 +-------------------------------------------------------------------------------- +import Xanthous.Generators.Util +import Xanthous.Random +import Xanthous.Data + ( positionFromV2, Position, _Position + , rotations, arrayNeighbors, Neighbors(..) + , neighborPositions + ) +import Xanthous.Data.EntityMap (EntityMap, _EntityMap) +import Xanthous.Entities.Raws (rawsWithType, RawType) +import qualified Xanthous.Entities.Item as Item +import Xanthous.Entities.Item (Item) +import qualified Xanthous.Entities.Creature as Creature +import Xanthous.Entities.Creature (Creature) +import Xanthous.Entities.Environment + (GroundMessage(..), Door(..), unlockedDoor, Staircase(..)) +import Xanthous.Messages (message_) +import Xanthous.Util.Graphics (circle) +-------------------------------------------------------------------------------- + +chooseCharacterPosition :: MonadRandom m => Cells -> m Position +chooseCharacterPosition = randomPosition + +randomItems :: MonadRandom m => Cells -> m (EntityMap Item) +randomItems = randomEntities Item.newWithType (0.0004, 0.001) + +placeDownStaircase :: MonadRandom m => Cells -> m (EntityMap Staircase) +placeDownStaircase cells = do + pos <- randomPosition cells + pure $ _EntityMap # [(pos, DownStaircase)] + +randomDoors :: MonadRandom m => Cells -> m (EntityMap Door) +randomDoors cells = do + doorRatio <- getRandomR subsetRange + let numDoors = floor $ doorRatio * fromIntegral (length candidateCells) + doorPositions = + removeAdjacent . fmap positionFromV2 . take numDoors $ candidateCells + doors = zip doorPositions $ repeat unlockedDoor + pure $ _EntityMap # doors + where + removeAdjacent = + foldr (\pos acc -> + if pos `elem` (acc >>= toList . neighborPositions) + then acc + else pos : acc + ) [] + candidateCells = filter doorable $ Arr.indices cells + subsetRange = (0.8 :: Double, 1.0) + doorable pos = + not (fromMaybe True $ cells ^? ix pos) + && any (teeish . fmap (fromMaybe True)) + (rotations $ arrayNeighbors cells pos) + -- only generate doors at the *ends* of hallways, eg (where O is walkable, + -- X is a wall, and D is a door): + -- + -- O O O + -- X D X + -- O + teeish (fmap not -> (Neighbors tl t tr l r _ b _ )) = + and [tl, t, tr, b] && (and . fmap not) [l, r] + +randomCreatures :: MonadRandom m => Cells -> m (EntityMap Creature) +randomCreatures = randomEntities Creature.newWithType (0.0007, 0.002) + +tutorialMessage :: MonadRandom m + => Cells + -> Position -- ^ CharacterPosition + -> m (EntityMap GroundMessage) +tutorialMessage cells characterPosition = do + let distance = 2 + pos <- fmap (fromMaybe (error "No valid positions for tutorial message?")) + . choose . ChooseElement + $ accessiblePositionsWithin distance cells characterPosition + msg <- message_ ["tutorial", "message1"] + pure $ _EntityMap # [(pos, GroundMessage msg)] + where + accessiblePositionsWithin :: Int -> Cells -> Position -> [Position] + accessiblePositionsWithin dist valid pos = + review _Position + <$> filter + (\pt -> not $ valid ! (fromIntegral <$> pt)) + (circle (pos ^. _Position) dist) + +randomEntities + :: forall entity raw m. (MonadRandom m, RawType raw) + => (raw -> entity) + -> (Float, Float) + -> Cells + -> m (EntityMap entity) +randomEntities newWithType sizeRange cells = + case fromNullable $ rawsWithType @raw of + Nothing -> pure mempty + Just raws -> do + let len = rangeSize $ bounds cells + (numEntities :: Int) <- + floor . (* fromIntegral len) <$> getRandomR sizeRange + entities <- for [0..numEntities] $ const $ do + pos <- randomPosition cells + raw <- choose raws + let entity = newWithType raw + pure (pos, entity) + pure $ _EntityMap # entities + +randomPosition :: MonadRandom m => Cells -> m Position +randomPosition = fmap positionFromV2 . choose . impureNonNull . cellCandidates + +-- cellCandidates :: Cells -> Cells +cellCandidates :: Cells -> Set (V2 Word) +cellCandidates + -- find the largest contiguous region of cells in the cave. + = maximumBy (compare `on` length) + . fromMaybe (error "No regions generated! this should never happen.") + . fromNullable + . regions + -- cells ends up with true = wall, we want true = can put an item here + . amap not diff --git a/users/glittershark/xanthous/src/Xanthous/Generators/Util.hs b/users/glittershark/xanthous/src/Xanthous/Generators/Util.hs new file mode 100644 index 000000000000..88aadd5aadd9 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators/Util.hs @@ -0,0 +1,220 @@ +{-# LANGUAGE QuantifiedConstraints #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +-------------------------------------------------------------------------------- +module Xanthous.Generators.Util + ( MCells + , Cells + , CellM + , randInitialize + , initializeEmpty + , numAliveNeighborsM + , numAliveNeighbors + , fillOuterEdgesM + , cloneMArray + , floodFill + , regions + , fillAll + , fillAllM + , fromPoints + , fromPointsM + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (Foldable, toList, for_) +-------------------------------------------------------------------------------- +import Data.Array.ST +import Data.Array.Unboxed +import Control.Monad.ST +import Control.Monad.Random +import Data.Monoid +import Data.Foldable (Foldable, toList, for_) +import qualified Data.Set as Set +import Data.Semigroup.Foldable +import Linear.V2 +-------------------------------------------------------------------------------- +import Xanthous.Util (foldlMapM', maximum1, minimum1) +import Xanthous.Data (Dimensions, width, height) +-------------------------------------------------------------------------------- + +type MCells s = STUArray s (V2 Word) Bool +type Cells = UArray (V2 Word) Bool +type CellM g s a = RandT g (ST s) a + +randInitialize :: RandomGen g => Dimensions -> Double -> CellM g s (MCells s) +randInitialize dims aliveChance = do + res <- initializeEmpty dims + for_ [0..dims ^. width] $ \i -> + for_ [0..dims ^. height] $ \j -> do + val <- (>= aliveChance) <$> getRandomR (0, 1) + lift $ writeArray res (V2 i j) val + pure res + +initializeEmpty :: RandomGen g => Dimensions -> CellM g s (MCells s) +initializeEmpty dims = + lift $ newArray (0, V2 (dims ^. width) (dims ^. height)) False + +numAliveNeighborsM + :: forall a i m + . (MArray a Bool m, Ix i, Integral i) + => a (V2 i) Bool + -> V2 i + -> m Word +numAliveNeighborsM cells (V2 x y) = do + cellBounds <- getBounds cells + getSum <$> foldlMapM' + (fmap (Sum . fromIntegral . fromEnum) . boundedGet cellBounds) + neighborPositions + + where + boundedGet :: (V2 i, V2 i) -> (Int, Int) -> m Bool + boundedGet (V2 minX minY, V2 maxX maxY) (i, j) + | x <= minX + || y <= minY + || x >= maxX + || y >= maxY + = pure True + | otherwise = + let nx = fromIntegral $ fromIntegral x + i + ny = fromIntegral $ fromIntegral y + j + in readArray cells $ V2 nx ny + +numAliveNeighbors + :: forall a i + . (IArray a Bool, Ix i, Integral i) + => a (V2 i) Bool + -> V2 i + -> Word +numAliveNeighbors cells (V2 x y) = + let cellBounds = bounds cells + in getSum $ foldMap + (Sum . fromIntegral . fromEnum . boundedGet cellBounds) + neighborPositions + + where + boundedGet :: (V2 i, V2 i) -> (Int, Int) -> Bool + boundedGet (V2 minX minY, V2 maxX maxY) (i, j) + | x <= minX + || y <= minY + || x >= maxX + || y >= maxY + = True + | otherwise = + let nx = fromIntegral $ fromIntegral x + i + ny = fromIntegral $ fromIntegral y + j + in cells ! V2 nx ny + +neighborPositions :: [(Int, Int)] +neighborPositions = [(i, j) | i <- [-1..1], j <- [-1..1], (i, j) /= (0, 0)] + +fillOuterEdgesM :: (MArray a Bool m, Ix i) => a (V2 i) Bool -> m () +fillOuterEdgesM arr = do + (V2 minX minY, V2 maxX maxY) <- getBounds arr + for_ (range (minX, maxX)) $ \x -> do + writeArray arr (V2 x minY) True + writeArray arr (V2 x maxY) True + for_ (range (minY, maxY)) $ \y -> do + writeArray arr (V2 minX y) True + writeArray arr (V2 maxX y) True + +cloneMArray + :: forall a a' i e m. + ( Ix i + , MArray a e m + , MArray a' e m + , IArray UArray e + ) + => a i e + -> m (a' i e) +cloneMArray = thaw @_ @UArray <=< freeze + +-------------------------------------------------------------------------------- + +-- | Flood fill a cell array starting at a point, returning a list of all the +-- (true) cell locations reachable from that point +floodFill :: forall a i. + ( IArray a Bool + , Ix i + , Enum i + , Bounded i + , Eq i + ) + => a (V2 i) Bool -- ^ array + -> (V2 i) -- ^ position + -> Set (V2 i) +floodFill = go mempty + where + go :: Set (V2 i) -> a (V2 i) Bool -> (V2 i) -> Set (V2 i) + go res arr@(bounds -> arrBounds) idx@(V2 x y) + | not (inRange arrBounds idx) = res + | not (arr ! idx) = res + | otherwise = + let neighbors + = filter (inRange arrBounds) + . filter (/= idx) + . filter (`notMember` res) + $ V2 + <$> [(if x == minBound then x else pred x) + .. + (if x == maxBound then x else succ x)] + <*> [(if y == minBound then y else pred y) + .. + (if y == maxBound then y else succ y)] + in foldl' (\r idx' -> + if arr ! idx' + then r <> (let r' = r & contains idx' .~ True + in r' `seq` go r' arr idx') + else r) + (res & contains idx .~ True) neighbors +{-# SPECIALIZE floodFill :: UArray (V2 Word) Bool -> (V2 Word) -> Set (V2 Word) #-} + +-- | Gives a list of all the disconnected regions in a cell array, represented +-- each as lists of points +regions :: forall a i. + ( IArray a Bool + , Ix i + , Enum i + , Bounded i + , Eq i + ) + => a (V2 i) Bool + -> [Set (V2 i)] +regions arr + | Just firstPoint <- findFirstPoint arr = + let region = floodFill arr firstPoint + arr' = fillAll region arr + in region : regions arr' + | otherwise = [] + where + findFirstPoint :: a (V2 i) Bool -> Maybe (V2 i) + findFirstPoint = fmap fst . headMay . filter snd . assocs +{-# SPECIALIZE regions :: UArray (V2 Word) Bool -> [Set (V2 Word)] #-} + +fillAll :: (IArray a Bool, Ix i, Foldable f) => f i -> a i Bool -> a i Bool +fillAll ixes a = accum (const fst) a $ (, (False, ())) <$> toList ixes + +fillAllM :: (MArray a Bool m, Ix i, Foldable f) => f i -> a i Bool -> m () +fillAllM ixes a = for_ ixes $ \i -> writeArray a i False + +fromPoints + :: forall a f i. + ( IArray a Bool + , Ix i + , Functor f + , Foldable1 f + ) + => f (i, i) + -> a (i, i) Bool +fromPoints points = + let pts = Set.fromList $ toList points + dims = ( (minimum1 $ fst <$> points, minimum1 $ snd <$> points) + , (maximum1 $ fst <$> points, maximum1 $ snd <$> points) + ) + in array dims $ range dims <&> \i -> (i, i `member` pts) + +fromPointsM + :: (MArray a Bool m, Ix i, Element f ~ i, MonoFoldable f) + => NonNull f + -> m (a i Bool) +fromPointsM points = do + arr <- newArray (minimum points, maximum points) False + fillAllM (otoList points) arr + pure arr diff --git a/users/glittershark/xanthous/src/Xanthous/Generators/Village.hs b/users/glittershark/xanthous/src/Xanthous/Generators/Village.hs new file mode 100644 index 000000000000..cc9c9d963f5c --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Generators/Village.hs @@ -0,0 +1,125 @@ +module Xanthous.Generators.Village + ( fromCave + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (any, failing, toList) +-------------------------------------------------------------------------------- +import Control.Monad.Random (MonadRandom) +import Control.Monad.State (execStateT, MonadState, modify) +import Control.Monad.Trans.Maybe +import Control.Parallel.Strategies +import Data.Array.IArray +import Data.Foldable (any, toList) +-------------------------------------------------------------------------------- +import Xanthous.Data +import Xanthous.Data.EntityMap (EntityMap) +import qualified Xanthous.Data.EntityMap as EntityMap +import Xanthous.Entities.Environment +import Xanthous.Generators.Util +import Xanthous.Game.State (SomeEntity(..)) +import Xanthous.Random +-------------------------------------------------------------------------------- + +fromCave :: MonadRandom m + => Cells -- ^ The positions of all the walls + -> m (EntityMap SomeEntity) +fromCave wallPositions = execStateT (fromCave' wallPositions) mempty + +fromCave' :: forall m. (MonadRandom m, MonadState (EntityMap SomeEntity) m) + => Cells + -> m () +fromCave' wallPositions = failing (pure ()) $ do + Just villageRegion <- + choose + . (`using` parTraversable rdeepseq) + . weightedBy (\reg -> let circSize = length $ circumference reg + in if circSize == 50 + then (1.0 :: Double) + else 1.0 / (fromIntegral . abs $ circSize - 50)) + $ regions closedHallways + + let circ = setFromList . circumference $ villageRegion + + centerPoints <- chooseSubset (0.1 :: Double) $ toList circ + + roomTiles <- foldM + (flip $ const $ stepOut circ) + (map pure centerPoints) + [0 :: Int ..2] + + let roomWalls = circumference . setFromList @(Set _) <$> roomTiles + allWalls = join roomWalls + + doorPositions <- fmap join . for roomWalls $ \room -> + let candidates = filter (`notMember` circ) room + in fmap toList . choose $ ChooseElement candidates + + let entryways = + filter (\pt -> + let ncs = neighborCells pt + in any ((&&) <$> (not . (wallPositions !)) + <*> (`notMember` villageRegion)) ncs + && any ((&&) <$> (`member` villageRegion) + <*> (`notElem` allWalls)) ncs) + $ toList villageRegion + + Just entryway <- choose $ ChooseElement entryways + + for_ (filter ((&&) <$> (`notElem` doorPositions) <*> (/= entryway)) allWalls) + $ insertEntity Wall + for_ (filter (/= entryway) doorPositions) $ insertEntity unlockedDoor + insertEntity unlockedDoor entryway + + + where + insertEntity e pt = modify $ EntityMap.insertAt (ptToPos pt) $ SomeEntity e + ptToPos pt = _Position # (fromIntegral <$> pt) + + stepOut :: Set (V2 Word) -> [[V2 Word]] -> MaybeT m [[V2 Word]] + stepOut circ rooms = for rooms $ \room -> + let nextLevels = hashNub $ toList . neighborCells =<< room + in pure + . (<> room) + $ filter ((&&) <$> (`notMember` circ) <*> (`notElem` join rooms)) + nextLevels + + circumference pts = + filter (any (`notMember` pts) . neighborCells) $ toList pts + closedHallways = closeHallways livePositions + livePositions = amap not wallPositions + +-------------------------------------------------------------------------------- + +closeHallways :: Cells -> Cells +closeHallways livePositions = + livePositions // mapMaybe closeHallway (assocs livePositions) + where + closeHallway (_, False) = Nothing + closeHallway (pos, _) + | isHallway pos = Just (pos, False) + | otherwise = Nothing + isHallway pos = any ((&&) <$> not . view left <*> not . view right) + . rotations + . fmap (fromMaybe False) + $ arrayNeighbors livePositions pos + +failing :: Monad m => m a -> MaybeT m a -> m a +failing result = (maybe result pure =<<) . runMaybeT + +{- + +import Xanthous.Generators.Village +import Xanthous.Generators +import Xanthous.Data +import System.Random +import qualified Data.Text +import qualified Xanthous.Generators.CaveAutomata as CA +let gi = GeneratorInput SCaveAutomata CA.defaultParams +wallPositions <- generateFromInput gi (Dimensions 80 50) <$> getStdGen +putStrLn . Data.Text.unpack $ showCells wallPositions + +import Data.Array.IArray +let closedHallways = closeHallways . amap not $ wallPositions +putStrLn . Data.Text.unpack . showCells $ amap not closedHallways + +-} diff --git a/users/glittershark/xanthous/src/Xanthous/Messages.hs b/users/glittershark/xanthous/src/Xanthous/Messages.hs new file mode 100644 index 000000000000..2b1b3da1e8c1 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Messages.hs @@ -0,0 +1,107 @@ +{-# LANGUAGE TemplateHaskell #-} +-------------------------------------------------------------------------------- +module Xanthous.Messages + ( Message(..) + , resolve + , MessageMap(..) + , lookupMessage + + -- * Game messages + , messages + , render + , lookup + , message + , message_ + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (lookup) +-------------------------------------------------------------------------------- +import Control.Monad.Random.Class (MonadRandom) +import Data.Aeson (FromJSON, ToJSON, toJSON) +import qualified Data.Aeson as JSON +import Data.Aeson.Generic.DerivingVia +import Data.FileEmbed +import Data.List.NonEmpty +import Test.QuickCheck hiding (choose) +import Test.QuickCheck.Arbitrary.Generic +import Test.QuickCheck.Instances.UnorderedContainers () +import Text.Mustache +import qualified Data.Yaml as Yaml +-------------------------------------------------------------------------------- +import Xanthous.Random +import Xanthous.Orphans () +-------------------------------------------------------------------------------- + +data Message = Single Template | Choice (NonEmpty Template) + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (CoArbitrary, Function, NFData) + deriving (ToJSON, FromJSON) + via WithOptions '[ SumEnc UntaggedVal ] + Message + +instance Arbitrary Message where + arbitrary = genericArbitrary + shrink = genericShrink + +resolve :: MonadRandom m => Message -> m Template +resolve (Single t) = pure t +resolve (Choice ts) = choose ts + +data MessageMap = Direct Message | Nested (HashMap Text MessageMap) + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (CoArbitrary, Function, NFData) + deriving (ToJSON, FromJSON) + via WithOptions '[ SumEnc UntaggedVal ] + MessageMap + +instance Arbitrary MessageMap where + arbitrary = frequency [ (10, Direct <$> arbitrary) + , (1, Nested <$> arbitrary) + ] + +lookupMessage :: [Text] -> MessageMap -> Maybe Message +lookupMessage [] (Direct msg) = Just msg +lookupMessage (k:ks) (Nested m) = lookupMessage ks =<< m ^. at k +lookupMessage _ _ = Nothing + +type instance Index MessageMap = [Text] +type instance IxValue MessageMap = Message +instance Ixed MessageMap where + ix [] f (Direct msg) = Direct <$> f msg + ix (k:ks) f (Nested m) = case m ^. at k of + Just m' -> ix ks f m' <&> \m'' -> + Nested $ m & at k ?~ m'' + Nothing -> pure $ Nested m + ix _ _ m = pure m + +-------------------------------------------------------------------------------- + +rawMessages :: ByteString +rawMessages = $(embedFile "src/Xanthous/messages.yaml") + +messages :: MessageMap +messages + = either (error . Yaml.prettyPrintParseException) id + $ Yaml.decodeEither' rawMessages + +render :: (MonadRandom m, ToJSON params) => Message -> params -> m Text +render msg params = do + tpl <- resolve msg + pure . toStrict . renderMustache tpl $ toJSON params + +lookup :: [Text] -> Message +lookup path = fromMaybe notFound $ messages ^? ix path + where notFound + = Single + $ compileMustacheText "template" "Message not found" + ^?! _Right + +message :: (MonadRandom m, ToJSON params) => [Text] -> params -> m Text +message path params = maybe notFound (`render` params) $ messages ^? ix path + where + notFound = pure "Message not found" + +message_ :: (MonadRandom m) => [Text] -> m Text +message_ path = maybe notFound (`render` JSON.object []) $ messages ^? ix path + where + notFound = pure "Message not found" diff --git a/users/glittershark/xanthous/src/Xanthous/Messages/Template.hs b/users/glittershark/xanthous/src/Xanthous/Messages/Template.hs new file mode 100644 index 000000000000..2998db7f7bf9 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Messages/Template.hs @@ -0,0 +1,275 @@ +{-# LANGUAGE DeriveDataTypeable #-} +-------------------------------------------------------------------------------- +module Xanthous.Messages.Template + ( -- * Template AST + Template(..) + , Substitution(..) + , Filter(..) + + -- ** Template AST transformations + , reduceTemplate + + -- * Template parser + , template + , runParser + , errorBundlePretty + + -- * Template pretty-printer + , ppTemplate + + -- * Rendering templates + , TemplateVar(..) + , nested + , TemplateVars(..) + , vars + , RenderError + , render + ) +where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding + (many, concat, try, elements, some, parts, Filter) +-------------------------------------------------------------------------------- +import Test.QuickCheck hiding (label) +import Test.QuickCheck.Instances.Text () +import Test.QuickCheck.Instances.Semigroup () +import Test.QuickCheck.Checkers (EqProp) +import Control.Monad.Combinators.NonEmpty +import Data.List.NonEmpty (NonEmpty(..)) +import Data.Data +import Text.Megaparsec hiding (sepBy1, some) +import Text.Megaparsec.Char +import qualified Text.Megaparsec.Char.Lexer as L +import Data.Function (fix) +-------------------------------------------------------------------------------- +import Xanthous.Util (EqEqProp(..)) +-------------------------------------------------------------------------------- + +genIdentifier :: Gen Text +genIdentifier = pack <$> listOf1 (elements identifierChars) + +identifierChars :: String +identifierChars = ['a'..'z'] <> ['A'..'Z'] <> ['-', '_'] + +newtype Filter = FilterName Text + deriving stock (Show, Eq, Ord, Generic, Data) + deriving anyclass (NFData) + deriving (IsString) via Text + +instance Arbitrary Filter where + arbitrary = FilterName <$> genIdentifier + shrink (FilterName fn) = fmap FilterName . filter (not . null) $ shrink fn + +data Substitution + = SubstPath (NonEmpty Text) + | SubstFilter Substitution Filter + deriving stock (Show, Eq, Ord, Generic, Data) + deriving anyclass (NFData) + +instance Arbitrary Substitution where + arbitrary = sized . fix $ \gen n -> + let leaves = + [ SubstPath <$> ((:|) <$> genIdentifier <*> listOf genIdentifier)] + subtree = gen $ n `div` 2 + in if n == 0 + then oneof leaves + else oneof $ leaves <> [ SubstFilter <$> subtree <*> arbitrary ] + shrink (SubstPath pth) = + fmap SubstPath + . filter (not . any ((||) <$> null <*> any (`notElem` identifierChars))) + $ shrink pth + shrink (SubstFilter s f) + = shrink s + <> (uncurry SubstFilter <$> shrink (s, f)) + +data Template + = Literal Text + | Subst Substitution + | Concat Template Template + deriving stock (Show, Generic, Data) + deriving anyclass (NFData) + deriving EqProp via EqEqProp Template + +instance Plated Template where + plate _ tpl@(Literal _) = pure tpl + plate _ tpl@(Subst _) = pure tpl + plate f (Concat tpl₁ tpl₂) = Concat <$> f tpl₁ <*> f tpl₂ + +reduceTemplate :: Template -> Template +reduceTemplate = transform $ \case + (Concat (Literal t₁) (Literal t₂)) -> Literal (t₁ <> t₂) + (Concat (Literal "") t) -> t + (Concat t (Literal "")) -> t + (Concat t₁ (Concat t₂ t₃)) -> Concat (Concat t₁ t₂) t₃ + (Concat (Concat t₁ (Literal t₂)) (Literal t₃)) -> (Concat t₁ (Literal $ t₂ <> t₃)) + t -> t + +instance Eq Template where + tpl₁ == tpl₂ = case (reduceTemplate tpl₁, reduceTemplate tpl₂) of + (Literal t₁, Literal t₂) -> t₁ == t₂ + (Subst s₁, Subst s₂) -> s₁ == s₂ + (Concat ta₁ ta₂, Concat tb₁ tb₂) -> ta₁ == tb₁ && ta₂ == tb₂ + _ -> False + +instance Arbitrary Template where + arbitrary = sized . fix $ \gen n -> + let leaves = [ Literal . pack . filter (`notElem` ['\\', '{']) <$> arbitrary + , Subst <$> arbitrary + ] + subtree = gen $ n `div` 2 + genConcat = Concat <$> subtree <*> subtree + in if n == 0 + then oneof leaves + else oneof $ genConcat : leaves + shrink (Literal t) = Literal <$> shrink t + shrink (Subst s) = Subst <$> shrink s + shrink (Concat t₁ t₂) + = shrink t₁ + <> shrink t₂ + <> (Concat <$> shrink t₁ <*> shrink t₂) + +instance Semigroup Template where + (<>) = Concat + +instance Monoid Template where + mempty = Literal "" + +-------------------------------------------------------------------------------- + +type Parser = Parsec Void Text + +sc :: Parser () +sc = L.space space1 empty empty + +lexeme :: Parser a -> Parser a +lexeme = L.lexeme sc + +symbol :: Text -> Parser Text +symbol = L.symbol sc + +identifier :: Parser Text +identifier = lexeme . label "identifier" $ do + firstChar <- letterChar <|> oneOf ['-', '_'] + restChars <- many $ alphaNumChar <|> oneOf ['-', '_'] + pure $ firstChar <| pack restChars + +filterName :: Parser Filter +filterName = FilterName <$> identifier + +substitutionPath :: Parser Substitution +substitutionPath = SubstPath <$> sepBy1 identifier (char '.') + +substitutionFilter :: Parser Substitution +substitutionFilter = do + path <- substitutionPath + fs <- some $ symbol "|" *> filterName + pure $ foldl' SubstFilter path fs + -- pure $ SubstFilter path f + +substitutionContents :: Parser Substitution +substitutionContents + = try substitutionFilter + <|> substitutionPath + +substitution :: Parser Substitution +substitution = between (string "{{") (string "}}") substitutionContents + +literal :: Parser Template +literal = Literal <$> + ( (string "\\{" $> "{") + <|> takeWhile1P Nothing (`notElem` ['\\', '{']) + ) + +subst :: Parser Template +subst = Subst <$> substitution + +template' :: Parser Template +template' = do + parts <- many $ literal <|> subst + pure $ foldr Concat (Literal "") parts + + +template :: Parser Template +template = reduceTemplate <$> template' <* eof + +-------------------------------------------------------------------------------- + +ppSubstitution :: Substitution -> Text +ppSubstitution (SubstPath substParts) = intercalate "." substParts +ppSubstitution (SubstFilter s (FilterName f)) = ppSubstitution s <> " | " <> f + +ppTemplate :: Template -> Text +ppTemplate (Literal txt) = txt +ppTemplate (Subst s) = "{{" <> ppSubstitution s <> "}}" +ppTemplate (Concat tpl₁ tpl₂) = ppTemplate tpl₁ <> ppTemplate tpl₂ + +-------------------------------------------------------------------------------- + +data TemplateVar + = Val Text + | Nested (Map Text TemplateVar) + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData) + +nested :: [(Text, TemplateVar)] -> TemplateVar +nested = Nested . mapFromList + +instance Arbitrary TemplateVar where + arbitrary = sized . fix $ \gen n -> + let nst = fmap mapFromList . listOf $ (,) <$> arbitrary <*> gen (n `div` 2) + in if n == 0 + then Val <$> arbitrary + else oneof [ Val <$> arbitrary + , Nested <$> nst] + +newtype TemplateVars = Vars { getTemplateVars :: Map Text TemplateVar } + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData) + deriving (Arbitrary) via (Map Text TemplateVar) + +type instance Index TemplateVars = Text +type instance IxValue TemplateVars = TemplateVar +instance Ixed TemplateVars where + ix k f (Vars vs) = Vars <$> ix k f vs +instance At TemplateVars where + at k f (Vars vs) = Vars <$> at k f vs + +vars :: [(Text, TemplateVar)] -> TemplateVars +vars = Vars . mapFromList + +lookupVar :: TemplateVars -> NonEmpty Text -> Maybe TemplateVar +lookupVar vs (p :| []) = vs ^. at p +lookupVar vs (p :| (p₁ : ps)) = vs ^. at p >>= \case + (Val _) -> Nothing + (Nested vs') -> lookupVar (Vars vs') $ p₁ :| ps + +data RenderError + = NoSuchVariable (NonEmpty Text) + | NestedFurther (NonEmpty Text) + | NoSuchFilter Filter + deriving stock (Show, Eq, Generic) + deriving anyclass (NFData) + +renderSubst + :: Map Filter (Text -> Text) -- ^ Filters + -> TemplateVars + -> Substitution + -> Either RenderError Text +renderSubst _ vs (SubstPath pth) = + case lookupVar vs pth of + Just (Val v) -> Right v + Just (Nested _) -> Left $ NestedFurther pth + Nothing -> Left $ NoSuchVariable pth +renderSubst fs vs (SubstFilter s fn) = + case fs ^. at fn of + Just filterFn -> filterFn <$> renderSubst fs vs s + Nothing -> Left $ NoSuchFilter fn + +render + :: Map Filter (Text -> Text) -- ^ Filters + -> TemplateVars -- ^ Template variables + -> Template -- ^ Template + -> Either RenderError Text +render _ _ (Literal s) = pure s +render fs vs (Concat t₁ t₂) = (<>) <$> render fs vs t₁ <*> render fs vs t₂ +render fs vs (Subst s) = renderSubst fs vs s diff --git a/users/glittershark/xanthous/src/Xanthous/Monad.hs b/users/glittershark/xanthous/src/Xanthous/Monad.hs new file mode 100644 index 000000000000..db602de56f3a --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Monad.hs @@ -0,0 +1,76 @@ +-------------------------------------------------------------------------------- +module Xanthous.Monad + ( AppT(..) + , AppM + , runAppT + , continue + , halt + + -- * Messages + , say + , say_ + , message + , message_ + , writeMessage + + -- * Autocommands + , cancelAutocommand + + -- * Events + , sendEvent + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Control.Monad.Random +import Control.Monad.State +import qualified Brick +import Brick (EventM, Next) +import Brick.BChan (writeBChan) +import Data.Aeson (ToJSON, object) +-------------------------------------------------------------------------------- +import Xanthous.Data.App (AppEvent) +import Xanthous.Game.State +import Xanthous.Game.Env +import Xanthous.Messages (Message) +import qualified Xanthous.Messages as Messages +-------------------------------------------------------------------------------- + +halt :: AppT (EventM n) (Next GameState) +halt = lift . Brick.halt =<< get + +continue :: AppT (EventM n) (Next GameState) +continue = lift . Brick.continue =<< get + +-------------------------------------------------------------------------------- + +say :: (MonadRandom m, ToJSON params, MonadState GameState m) + => [Text] -> params -> m () +say msgPath = writeMessage <=< Messages.message msgPath + +say_ :: (MonadRandom m, MonadState GameState m) => [Text] -> m () +say_ msgPath = say msgPath $ object [] + +message :: (MonadRandom m, ToJSON params, MonadState GameState m) + => Message -> params -> m () +message msg = writeMessage <=< Messages.render msg + +message_ :: (MonadRandom m, MonadState GameState m) + => Message -> m () +message_ msg = message msg $ object [] + +writeMessage :: MonadState GameState m => Text -> m () +writeMessage m = messageHistory %= pushMessage m + +-- | Cancel the currently active autocommand, if any +cancelAutocommand :: (MonadState GameState m, MonadIO m) => m () +cancelAutocommand = do + traverse_ (liftIO . cancel . snd) =<< preuse (autocommand . _ActiveAutocommand) + autocommand .= NoAutocommand + +-------------------------------------------------------------------------------- + +-- | Send an event to the app in an environment where the game env is available +sendEvent :: (MonadReader GameEnv m, MonadIO m) => AppEvent -> m () +sendEvent evt = do + ec <- view eventChan + liftIO $ writeBChan ec evt diff --git a/users/glittershark/xanthous/src/Xanthous/Orphans.hs b/users/glittershark/xanthous/src/Xanthous/Orphans.hs new file mode 100644 index 000000000000..39821150ef97 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Orphans.hs @@ -0,0 +1,338 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE PackageImports #-} +{-# OPTIONS_GHC -Wno-orphans #-} +-------------------------------------------------------------------------------- +module Xanthous.Orphans + ( ppTemplate + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (elements, (.=)) +-------------------------------------------------------------------------------- +import Data.Aeson +import Data.Aeson.Types (typeMismatch) +import Data.List.NonEmpty (NonEmpty(..)) +import Graphics.Vty.Attributes +import Brick.Widgets.Edit +import Data.Text.Zipper.Generic (GenericTextZipper) +import Brick.Widgets.Core (getName) +import System.Random (StdGen) +import Test.QuickCheck +import "quickcheck-instances" Test.QuickCheck.Instances () +import Text.Megaparsec (errorBundlePretty) +import Text.Megaparsec.Pos +import Text.Mustache +import Text.Mustache.Type ( showKey ) +import Control.Monad.State +import Linear +-------------------------------------------------------------------------------- +import Xanthous.Util.JSON +import Xanthous.Util.QuickCheck +-------------------------------------------------------------------------------- + +instance forall s a. + ( Cons s s a a + , IsSequence s + , Element s ~ a + ) => Cons (NonNull s) (NonNull s) a a where + _Cons = prism hither yon + where + hither :: (a, NonNull s) -> NonNull s + hither (a, ns) = + let s = toNullable ns + in impureNonNull $ a <| s + + yon :: NonNull s -> Either (NonNull s) (a, NonNull s) + yon ns = case nuncons ns of + (_, Nothing) -> Left ns + (x, Just xs) -> Right (x, xs) + +instance forall a. Cons (NonEmpty a) (NonEmpty a) a a where + _Cons = prism hither yon + where + hither :: (a, NonEmpty a) -> NonEmpty a + hither (a, x :| xs) = a :| (x : xs) + + yon :: NonEmpty a -> Either (NonEmpty a) (a, NonEmpty a) + yon ns@(x :| xs) = case xs of + (y : ys) -> Right (x, y :| ys) + [] -> Left ns + + +instance Arbitrary PName where + arbitrary = PName . pack <$> listOf1 (elements ['a'..'z']) + +instance Arbitrary Key where + arbitrary = Key <$> listOf1 arbSafeText + where arbSafeText = pack <$> listOf1 (elements ['a'..'z']) + shrink (Key []) = error "unreachable" + shrink k@(Key [_]) = pure k + shrink (Key (p:ps)) = Key . (p :) <$> shrink ps + +instance Arbitrary Pos where + arbitrary = mkPos . succ . abs <$> arbitrary + shrink (unPos -> 1) = [] + shrink (unPos -> x) = mkPos <$> [x..1] + +instance Arbitrary Node where + arbitrary = sized node + where + node n | n > 0 = oneof $ leaves ++ branches (n `div` 2) + node _ = oneof leaves + branches n = + [ Section <$> arbitrary <*> subnodes n + , InvertedSection <$> arbitrary <*> subnodes n + ] + subnodes = fmap concatTextBlocks . listOf . node + leaves = + [ TextBlock . pack <$> listOf1 (elements ['a'..'z']) + , EscapedVar <$> arbitrary + , UnescapedVar <$> arbitrary + -- TODO fix pretty-printing of mustache partials + -- , Partial <$> arbitrary <*> arbitrary + ] + shrink = genericShrink + +concatTextBlocks :: [Node] -> [Node] +concatTextBlocks [] = [] +concatTextBlocks [x] = [x] +concatTextBlocks (TextBlock txt₁ : TextBlock txt₂ : xs) + = concatTextBlocks $ TextBlock (txt₁ <> txt₂) : concatTextBlocks xs +concatTextBlocks (x : xs) = x : concatTextBlocks xs + +instance Arbitrary Template where + arbitrary = do + template <- concatTextBlocks <$> arbitrary + -- templateName <- arbitrary + -- rest <- arbitrary + let templateName = "template" + rest = mempty + pure $ Template + { templateActual = templateName + , templateCache = rest & at templateName ?~ template + } + shrink (Template actual cache) = + let Just tpl = cache ^. at actual + in do + cache' <- shrink cache + tpl' <- shrink tpl + actual' <- shrink actual + pure $ Template + { templateActual = actual' + , templateCache = cache' & at actual' ?~ tpl' + } + +instance CoArbitrary Template where + coarbitrary = coarbitrary . ppTemplate + +instance Function Template where + function = functionMap ppTemplate parseTemplatePartial + where + parseTemplatePartial txt + = compileMustacheText "template" txt ^?! _Right + +ppNode :: Map PName [Node] -> Node -> Text +ppNode _ (TextBlock txt) = txt +ppNode _ (EscapedVar k) = "{{" <> showKey k <> "}}" +ppNode ctx (Section k body) = + let sk = showKey k + in "{{#" <> sk <> "}}" <> foldMap (ppNode ctx) body <> "{{/" <> sk <> "}}" +ppNode _ (UnescapedVar k) = "{{{" <> showKey k <> "}}}" +ppNode ctx (InvertedSection k body) = + let sk = showKey k + in "{{^" <> sk <> "}}" <> foldMap (ppNode ctx) body <> "{{/" <> sk <> "}}" +ppNode _ (Partial n _) = "{{> " <> unPName n <> "}}" + +ppTemplate :: Template -> Text +ppTemplate (Template actual cache) = + case cache ^. at actual of + Nothing -> error "Template not found?" + Just nodes -> foldMap (ppNode cache) nodes + +instance ToJSON Template where + toJSON = String . ppTemplate + +instance FromJSON Template where + parseJSON + = withText "Template" + $ either (fail . errorBundlePretty) pure + . compileMustacheText "template" + +deriving anyclass instance NFData Node +deriving anyclass instance NFData Template + +instance FromJSON Color where + parseJSON (String "black") = pure black + parseJSON (String "red") = pure red + parseJSON (String "green") = pure green + parseJSON (String "yellow") = pure yellow + parseJSON (String "blue") = pure blue + parseJSON (String "magenta") = pure magenta + parseJSON (String "cyan") = pure cyan + parseJSON (String "white") = pure white + parseJSON (String "brightBlack") = pure brightBlack + parseJSON (String "brightRed") = pure brightRed + parseJSON (String "brightGreen") = pure brightGreen + parseJSON (String "brightYellow") = pure brightYellow + parseJSON (String "brightBlue") = pure brightBlue + parseJSON (String "brightMagenta") = pure brightMagenta + parseJSON (String "brightCyan") = pure brightCyan + parseJSON (String "brightWhite") = pure brightWhite + parseJSON n@(Number _) = Color240 <$> parseJSON n + parseJSON x = typeMismatch "Color" x + +instance ToJSON Color where + toJSON color + | color == black = "black" + | color == red = "red" + | color == green = "green" + | color == yellow = "yellow" + | color == blue = "blue" + | color == magenta = "magenta" + | color == cyan = "cyan" + | color == white = "white" + | color == brightBlack = "brightBlack" + | color == brightRed = "brightRed" + | color == brightGreen = "brightGreen" + | color == brightYellow = "brightYellow" + | color == brightBlue = "brightBlue" + | color == brightMagenta = "brightMagenta" + | color == brightCyan = "brightCyan" + | color == brightWhite = "brightWhite" + | Color240 num <- color = toJSON num + | otherwise = error $ "unimplemented: " <> show color + +instance (Eq a, Show a, Read a, FromJSON a) => FromJSON (MaybeDefault a) where + parseJSON Null = pure Default + parseJSON (String "keepCurrent") = pure KeepCurrent + parseJSON x = SetTo <$> parseJSON x + +instance ToJSON a => ToJSON (MaybeDefault a) where + toJSON Default = Null + toJSON KeepCurrent = String "keepCurrent" + toJSON (SetTo x) = toJSON x + +-------------------------------------------------------------------------------- + +instance Arbitrary Color where + arbitrary = oneof [ Color240 <$> choose (0, 239) + , ISOColor <$> choose (0, 15) + ] + +deriving anyclass instance CoArbitrary Color +deriving anyclass instance Function Color + +instance (Eq a, Show a, Read a, Arbitrary a) => Arbitrary (MaybeDefault a) where + arbitrary = oneof [ pure Default + , pure KeepCurrent + , SetTo <$> arbitrary + ] + +instance CoArbitrary a => CoArbitrary (MaybeDefault a) where + coarbitrary Default = variant @Int 1 + coarbitrary KeepCurrent = variant @Int 2 + coarbitrary (SetTo x) = variant @Int 3 . coarbitrary x + +instance (Eq a, Show a, Read a, Function a) => Function (MaybeDefault a) where + function = functionShow + +instance Arbitrary Attr where + arbitrary = do + attrStyle <- arbitrary + attrForeColor <- arbitrary + attrBackColor <- arbitrary + attrURL <- arbitrary + pure Attr {..} + +deriving anyclass instance CoArbitrary Attr +deriving anyclass instance Function Attr + +instance ToJSON Attr where + toJSON Attr{..} = object + [ "style" .= maybeDefaultToJSONWith styleToJSON attrStyle + , "foreground" .= attrForeColor + , "background" .= attrBackColor + , "url" .= attrURL + ] + where + maybeDefaultToJSONWith _ Default = Null + maybeDefaultToJSONWith _ KeepCurrent = String "keepCurrent" + maybeDefaultToJSONWith tj (SetTo x) = tj x + styleToJSON style + | style == standout = "standout" + | style == underline = "underline" + | style == reverseVideo = "reverseVideo" + | style == blink = "blink" + | style == dim = "dim" + | style == bold = "bold" + | style == italic = "italic" + | otherwise = toJSON style + +instance FromJSON Attr where + parseJSON = withObject "Attr" $ \obj -> do + attrStyle <- parseStyle =<< obj .:? "style" .!= Default + attrForeColor <- obj .:? "foreground" .!= Default + attrBackColor <- obj .:? "background" .!= Default + attrURL <- obj .:? "url" .!= Default + pure Attr{..} + + where + parseStyle (SetTo (String "standout")) = pure (SetTo standout) + parseStyle (SetTo (String "underline")) = pure (SetTo underline) + parseStyle (SetTo (String "reverseVideo")) = pure (SetTo reverseVideo) + parseStyle (SetTo (String "blink")) = pure (SetTo blink) + parseStyle (SetTo (String "dim")) = pure (SetTo dim) + parseStyle (SetTo (String "bold")) = pure (SetTo bold) + parseStyle (SetTo (String "italic")) = pure (SetTo italic) + parseStyle (SetTo n@(Number _)) = SetTo <$> parseJSON n + parseStyle (SetTo v) = typeMismatch "Style" v + parseStyle Default = pure Default + parseStyle KeepCurrent = pure KeepCurrent + +deriving stock instance Ord Color +deriving stock instance Ord a => Ord (MaybeDefault a) +deriving stock instance Ord Attr + +-------------------------------------------------------------------------------- + +instance NFData a => NFData (NonNull a) where + rnf xs = xs `seq` toNullable xs `deepseq` () + +instance forall t name. (NFData t, Monoid t, NFData name) + => NFData (Editor t name) where + rnf ed = getName @_ @name ed `deepseq` getEditContents ed `deepseq` () + +deriving via (ReadShowJSON StdGen) instance ToJSON StdGen +deriving via (ReadShowJSON StdGen) instance FromJSON StdGen + +-------------------------------------------------------------------------------- + +instance CoArbitrary a => CoArbitrary (NonNull a) where + coarbitrary = coarbitrary . toNullable + +instance (MonoFoldable a, Function a) => Function (NonNull a) where + function = functionMap toNullable $ fromMaybe (error "null") . fromNullable + +instance (Arbitrary t, Arbitrary n, GenericTextZipper t) + => Arbitrary (Editor t n) where + arbitrary = editor <$> arbitrary <*> arbitrary <*> arbitrary + +instance forall t n. (CoArbitrary t, CoArbitrary n, Monoid t) + => CoArbitrary (Editor t n) where + coarbitrary ed = coarbitrary (getName @_ @n ed, getEditContents ed) + +instance CoArbitrary StdGen where + coarbitrary = coarbitrary . show + +-------------------------------------------------------------------------------- + +deriving newtype instance (Arbitrary s, CoArbitrary (m (a, s))) + => CoArbitrary (StateT s m a) + +-------------------------------------------------------------------------------- + +deriving via (GenericArbitrary (V2 a)) instance Arbitrary a => Arbitrary (V2 a) +instance CoArbitrary a => CoArbitrary (V2 a) +instance Function a => Function (V2 a) diff --git a/users/glittershark/xanthous/src/Xanthous/Prelude.hs b/users/glittershark/xanthous/src/Xanthous/Prelude.hs new file mode 100644 index 000000000000..9bec777de7b1 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Prelude.hs @@ -0,0 +1,47 @@ +-------------------------------------------------------------------------------- +module Xanthous.Prelude + ( module ClassyPrelude + , Type + , Constraint + , module GHC.TypeLits + , module Control.Lens + , module Data.Void + , module Control.Comonad + , module Data.Witherable + , fail + + , (&!) + + -- * Classy-Prelude addons + , ninsertSet + , ndeleteSet + , toVector + ) where +-------------------------------------------------------------------------------- +import ClassyPrelude hiding + ( return, (<|), unsnoc, uncons, cons, snoc, index, (<.>), Index, say + , catMaybes, filter, mapMaybe, hashNub, ordNub + ) +import Data.Kind +import GHC.TypeLits hiding (Text) +import Control.Lens hiding (levels, Level) +import Data.Void +import Control.Comonad +import Data.Witherable +import Control.Monad.Fail (fail) +-------------------------------------------------------------------------------- + +ninsertSet + :: (IsSet set, MonoPointed set) + => Element set -> NonNull set -> NonNull set +ninsertSet x xs = impureNonNull $ opoint x `union` toNullable xs + +ndeleteSet :: IsSet b => Element b -> NonNull b -> b +ndeleteSet x = deleteSet x . toNullable + +toVector :: (MonoFoldable (f a), Element (f a) ~ a) => f a -> Vector a +toVector = fromList . toList + +infixl 1 &! +(&!) :: a -> (a -> b) -> b +(&!) = flip ($!) diff --git a/users/glittershark/xanthous/src/Xanthous/Random.hs b/users/glittershark/xanthous/src/Xanthous/Random.hs new file mode 100644 index 000000000000..6d34109df7f8 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Random.hs @@ -0,0 +1,118 @@ +-------------------------------------------------------------------------------- +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +-------------------------------------------------------------------------------- +module Xanthous.Random + ( Choose(..) + , ChooseElement(..) + , Weighted(..) + , evenlyWeighted + , weightedBy + , subRand + , chance + , chooseSubset + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.List.NonEmpty (NonEmpty(..)) +import Control.Monad.Random.Class (MonadRandom(getRandomR, getRandom)) +import Control.Monad.Random (Rand, evalRand, mkStdGen, StdGen) +import Data.Functor.Compose +import Data.Random.Shuffle.Weighted +import Data.Random.Distribution +import Data.Random.Distribution.Uniform +import Data.Random.Distribution.Uniform.Exclusive +import Data.Random.Sample +import qualified Data.Random.Source as DRS +-------------------------------------------------------------------------------- + +instance {-# INCOHERENT #-} (Monad m, MonadRandom m) => DRS.MonadRandom m where + getRandomWord8 = getRandom + getRandomWord16 = getRandom + getRandomWord32 = getRandom + getRandomWord64 = getRandom + getRandomDouble = getRandom + getRandomNByteInteger n = getRandomR (0, 256 ^ n) + +class Choose a where + type RandomResult a + choose :: MonadRandom m => a -> m (RandomResult a) + +newtype ChooseElement a = ChooseElement a + +instance MonoFoldable a => Choose (ChooseElement a) where + type RandomResult (ChooseElement a) = Maybe (Element a) + choose (ChooseElement xs) = do + chosenIdx <- getRandomR (0, olength xs - 1) + let pick _ (Just x) = Just x + pick (x, i) Nothing + | i == chosenIdx = Just x + | otherwise = Nothing + pure $ ofoldr pick Nothing $ zip (toList xs) [0..] + +instance MonoFoldable a => Choose (NonNull a) where + type RandomResult (NonNull a) = Element a + choose + = fmap (fromMaybe (error "unreachable")) -- why not lol + . choose + . ChooseElement + . toNullable + +instance Choose (NonEmpty a) where + type RandomResult (NonEmpty a) = a + choose = choose . fromNonEmpty @[_] + +instance Choose (a, a) where + type RandomResult (a, a) = a + choose (x, y) = choose (x :| [y]) + +newtype Weighted w t a = Weighted (t (w, a)) + deriving (Functor, Foldable) via (t `Compose` (,) w) + +instance Traversable t => Traversable (Weighted w t) where + traverse f (Weighted twa) = Weighted <$> (traverse . traverse) f twa + +evenlyWeighted :: [a] -> Weighted Int [] a +evenlyWeighted = Weighted . itoList + +-- | Weight the elements of some functor by a function. Larger values of 'w' per +-- its 'Ord' instance will be more likely to be generated +weightedBy :: Functor t => (a -> w) -> t a -> Weighted w t a +weightedBy weighting xs = Weighted $ (weighting &&& id) <$> xs + +instance (Num w, Ord w, Distribution Uniform w, Excludable w) => Choose (Weighted w [] a) where + type RandomResult (Weighted w [] a) = Maybe a + choose (Weighted ws) = sample $ headMay <$> weightedSample 1 ws + +instance (Num w, Ord w, Distribution Uniform w, Excludable w) => Choose (Weighted w NonEmpty a) where + type RandomResult (Weighted w NonEmpty a) = a + choose (Weighted ws) = + sample + $ fromMaybe (error "unreachable") . headMay + <$> weightedSample 1 (toList ws) + +subRand :: MonadRandom m => Rand StdGen a -> m a +subRand sub = evalRand sub . mkStdGen <$> getRandom + +-- | Has a @n@ chance of returning 'True' +-- +-- eg, chance 0.5 will return 'True' half the time +chance + :: (Num w, Ord w, Distribution Uniform w, Excludable w, MonadRandom m) + => w + -> m Bool +chance n = choose $ weightedBy (bool 1 (n * 2)) bools + +-- | Choose a random subset of *about* @w@ of the elements of the given +-- 'Witherable' structure +chooseSubset :: ( Num w, Ord w, Distribution Uniform w, Excludable w + , Witherable t + , MonadRandom m + ) => w -> t a -> m (t a) +chooseSubset = filterA . const . chance + +-------------------------------------------------------------------------------- + +bools :: NonEmpty Bool +bools = True :| [False] diff --git a/users/glittershark/xanthous/src/Xanthous/Util.hs b/users/glittershark/xanthous/src/Xanthous/Util.hs new file mode 100644 index 000000000000..524ad4819dac --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util.hs @@ -0,0 +1,252 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE QuantifiedConstraints #-} +-------------------------------------------------------------------------------- +module Xanthous.Util + ( EqEqProp(..) + , EqProp(..) + , foldlMapM + , foldlMapM' + , between + + , appendVia + + -- * Foldable + -- ** Uniqueness + -- *** Predicates on uniqueness + , isUniqueOf + , isUnique + -- *** Removing all duplicate elements in n * log n time + , uniqueOf + , unique + -- *** Removing sequentially duplicate elements in linear time + , uniqOf + , uniq + -- ** Bag sequence algorithms + , takeWhileInclusive + , smallestNotIn + , removeVectorIndex + , maximum1 + , minimum1 + + -- * Combinators + , times, times_ + + -- * Type-level programming utils + , KnownBool(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude hiding (foldr) +-------------------------------------------------------------------------------- +import Test.QuickCheck.Checkers +import Data.Foldable (foldr) +import Data.Monoid +import Data.Proxy +import qualified Data.Vector as V +import Data.Semigroup (Max(..), Min(..)) +import Data.Semigroup.Foldable +-------------------------------------------------------------------------------- + +newtype EqEqProp a = EqEqProp a + deriving newtype Eq + +instance Eq a => EqProp (EqEqProp a) where + (=-=) = eq + +foldlMapM :: forall g b a m. (Foldable g, Monoid b, Applicative m) => (a -> m b) -> g a -> m b +foldlMapM f = foldr f' (pure mempty) + where + f' :: a -> m b -> m b + f' x = liftA2 mappend (f x) + +-- Strict in the monoidal accumulator. For monads strict +-- in the left argument of bind, this will run in constant +-- space. +foldlMapM' :: forall g b a m. (Foldable g, Monoid b, Monad m) => (a -> m b) -> g a -> m b +foldlMapM' f xs = foldr f' pure xs mempty + where + f' :: a -> (b -> m b) -> b -> m b + f' x k bl = do + br <- f x + let !b = mappend bl br + k b + +between + :: Ord a + => a -- ^ lower bound + -> a -- ^ upper bound + -> a -- ^ scrutinee + -> Bool +between lower upper x = x >= lower && x <= upper + +-- | +-- >>> appendVia Sum 1 2 +-- 3 +appendVia :: (Rewrapping s t, Semigroup s) => (Unwrapped s -> s) -> Unwrapped s -> Unwrapped s -> Unwrapped s +appendVia wrap x y = op wrap $ wrap x <> wrap y + +-------------------------------------------------------------------------------- + +-- | Returns True if the targets of the given 'Fold' are unique per the 'Ord' instance for @a@ +-- +-- >>> isUniqueOf (folded . _1) ([(1, 2), (2, 2), (3, 2)] :: [(Int, Int)]) +-- True +-- +-- >>> isUniqueOf (folded . _2) ([(1, 2), (2, 2), (3, 2)] :: [(Int, Int)]) +-- False +-- +-- @ +-- 'isUniqueOf' :: Ord a => 'Getter' s a -> s -> 'Bool' +-- 'isUniqueOf' :: Ord a => 'Fold' s a -> s -> 'Bool' +-- 'isUniqueOf' :: Ord a => 'Lens'' s a -> s -> 'Bool' +-- 'isUniqueOf' :: Ord a => 'Iso'' s a -> s -> 'Bool' +-- 'isUniqueOf' :: Ord a => 'Traversal'' s a -> s -> 'Bool' +-- 'isUniqueOf' :: Ord a => 'Prism'' s a -> s -> 'Bool' +-- @ +isUniqueOf :: Ord a => Getting (Endo (Set a, Bool)) s a -> s -> Bool +isUniqueOf aFold = orOf _2 . foldrOf aFold rejectUnique (mempty, True) + where + rejectUnique x (seen, acc) + | seen ^. contains x = (seen, False) + | otherwise = (seen & contains x .~ True, acc) + +-- | Returns true if the given 'Foldable' container contains only unique +-- elements, as determined by the 'Ord' instance for @a@ +-- +-- >>> isUnique ([3, 1, 2] :: [Int]) +-- True +-- +-- >>> isUnique ([1, 1, 2, 2, 3, 1] :: [Int]) +-- False +isUnique :: (Foldable f, Ord a) => f a -> Bool +isUnique = isUniqueOf folded + + +-- | O(n * log n). Returns a monoidal, 'Cons'able container (a list, a Set, +-- etc.) consisting of the unique (per the 'Ord' instance for @a@) targets of +-- the given 'Fold' +-- +-- >>> uniqueOf (folded . _2) ([(1, 2), (2, 2), (3, 2), (4, 3)] :: [(Int, Int)]) :: [Int] +-- [2,3] +-- +-- @ +-- 'uniqueOf' :: Ord a => 'Getter' s a -> s -> [a] +-- 'uniqueOf' :: Ord a => 'Fold' s a -> s -> [a] +-- 'uniqueOf' :: Ord a => 'Lens'' s a -> s -> [a] +-- 'uniqueOf' :: Ord a => 'Iso'' s a -> s -> [a] +-- 'uniqueOf' :: Ord a => 'Traversal'' s a -> s -> [a] +-- 'uniqueOf' :: Ord a => 'Prism'' s a -> s -> [a] +-- @ +uniqueOf + :: (Monoid c, Ord w, Cons c c w w) => Getting (Endo (Set w, c)) a w -> a -> c +uniqueOf aFold = snd . foldrOf aFold rejectUnique (mempty, mempty) + where + rejectUnique x (seen, acc) + | seen ^. contains x = (seen, acc) + | otherwise = (seen & contains x .~ True, cons x acc) + +-- | Returns a monoidal, 'Cons'able container (a list, a Set, etc.) consisting +-- of the unique (per the 'Ord' instance for @a@) contents of the given +-- 'Foldable' container +-- +-- >>> unique [1, 1, 2, 2, 3, 1] :: [Int] +-- [2,3,1] + +-- >>> unique [1, 1, 2, 2, 3, 1] :: Set Int +-- fromList [3,2,1] +unique :: (Foldable f, Cons c c a a, Ord a, Monoid c) => f a -> c +unique = uniqueOf folded + +-------------------------------------------------------------------------------- + +-- | O(n). Returns a monoidal, 'Cons'able container (a list, a Vector, etc.) +-- consisting of the targets of the given 'Fold' with sequential duplicate +-- elements removed +-- +-- This function (sorry for the confusing name) differs from 'uniqueOf' in that +-- it only compares /sequentially/ duplicate elements (and thus operates in +-- linear time). +-- cf 'Data.Vector.uniq' and POSIX @uniq@ for the name +-- +-- >>> uniqOf (folded . _2) ([(1, 2), (2, 2), (3, 1), (4, 2)] :: [(Int, Int)]) :: [Int] +-- [2,1,2] +-- +-- @ +-- 'uniqOf' :: Eq a => 'Getter' s a -> s -> [a] +-- 'uniqOf' :: Eq a => 'Fold' s a -> s -> [a] +-- 'uniqOf' :: Eq a => 'Lens'' s a -> s -> [a] +-- 'uniqOf' :: Eq a => 'Iso'' s a -> s -> [a] +-- 'uniqOf' :: Eq a => 'Traversal'' s a -> s -> [a] +-- 'uniqOf' :: Eq a => 'Prism'' s a -> s -> [a] +-- @ +uniqOf :: (Monoid c, Cons c c w w, Eq w) => Getting (Endo (Maybe w, c)) a w -> a -> c +uniqOf aFold = snd . foldrOf aFold rejectSeen (Nothing, mempty) + where + rejectSeen x (Nothing, acc) = (Just x, x <| acc) + rejectSeen x tup@(Just a, acc) + | x == a = tup + | otherwise = (Just x, x <| acc) + +-- | O(n). Returns a monoidal, 'Cons'able container (a list, a Vector, etc.) +-- consisting of the targets of the given 'Foldable' container with sequential +-- duplicate elements removed +-- +-- This function (sorry for the confusing name) differs from 'unique' in that +-- it only compares /sequentially/ unique elements (and thus operates in linear +-- time). +-- cf 'Data.Vector.uniq' and POSIX @uniq@ for the name +-- +-- >>> uniq [1, 1, 1, 2, 2, 2, 3, 3, 1] :: [Int] +-- [1,2,3,1] +-- +-- >>> uniq [1, 1, 1, 2, 2, 2, 3, 3, 1] :: Vector Int +-- [1,2,3,1] +-- +uniq :: (Foldable f, Eq a, Cons c c a a, Monoid c) => f a -> c +uniq = uniqOf folded + +-- | Like 'takeWhile', but inclusive +takeWhileInclusive :: (a -> Bool) -> [a] -> [a] +takeWhileInclusive _ [] = [] +takeWhileInclusive p (x:xs) = x : if p x then takeWhileInclusive p xs else [] + +-- | Returns the smallest value not in a list +smallestNotIn :: (Ord a, Bounded a, Enum a) => [a] -> a +smallestNotIn xs = case uniq $ sort xs of + [] -> minBound + xs'@(x : _) + | x > minBound -> minBound + | otherwise + -> snd . headEx . filter (uncurry (/=)) $ zip (xs' ++ [minBound]) [minBound..] + +-- | Remove the element at the given index, if any, from the given vector +removeVectorIndex :: Int -> Vector a -> Vector a +removeVectorIndex idx vect = + let (before, after) = V.splitAt idx vect + in before <> fromMaybe Empty (tailMay after) + +maximum1 :: (Ord a, Foldable1 f) => f a -> a +maximum1 = getMax . foldMap1 Max + +minimum1 :: (Ord a, Foldable1 f) => f a -> a +minimum1 = getMin . foldMap1 Min + +times :: (Applicative f, Num n, Enum n) => n -> (n -> f b) -> f [b] +times n f = traverse f [1..n] + +times_ :: (Applicative f, Num n, Enum n) => n -> f a -> f [a] +times_ n fa = times n (const fa) + +-------------------------------------------------------------------------------- + +-- | This class gives a boolean associated with a type-level bool, a'la +-- 'KnownSymbol', 'KnownNat' etc. +class KnownBool (bool :: Bool) where + boolVal' :: forall proxy. proxy bool -> Bool + boolVal' _ = boolVal @bool + + boolVal :: Bool + boolVal = boolVal' $ Proxy @bool + +instance KnownBool 'True where boolVal = True +instance KnownBool 'False where boolVal = False diff --git a/users/glittershark/xanthous/src/Xanthous/Util/Comonad.hs b/users/glittershark/xanthous/src/Xanthous/Util/Comonad.hs new file mode 100644 index 000000000000..9e158cc8e2d4 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/Comonad.hs @@ -0,0 +1,24 @@ +-------------------------------------------------------------------------------- +module Xanthous.Util.Comonad + ( -- * Store comonad utils + replace + , current + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Control.Comonad.Store.Class +-------------------------------------------------------------------------------- + +-- | Replace the current position of a store comonad with a new value by +-- comparing positions +replace :: (Eq i, ComonadStore i w) => w a -> a -> w a +replace w x = w =>> \w' -> if pos w' == pos w then x else extract w' +{-# INLINE replace #-} + +-- | Lens into the current position of a store comonad. +-- +-- current = lens extract replace +current :: (Eq i, ComonadStore i w) => Lens' (w a) a +current = lens extract replace +{-# INLINE current #-} diff --git a/users/glittershark/xanthous/src/Xanthous/Util/Graph.hs b/users/glittershark/xanthous/src/Xanthous/Util/Graph.hs new file mode 100644 index 000000000000..8e5c04f4bfa9 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/Graph.hs @@ -0,0 +1,33 @@ +-------------------------------------------------------------------------------- +module Xanthous.Util.Graph where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Graph.Inductive.Query.MST (msTree) +import qualified Data.Graph.Inductive.Graph as Graph +import Data.Graph.Inductive.Graph +import Data.Graph.Inductive.Basic (undir) +import Data.Set (isSubsetOf) +-------------------------------------------------------------------------------- + +mstSubGraph + :: forall gr node edge. (DynGraph gr, Real edge, Show edge) + => gr node edge -> gr node edge +mstSubGraph graph = insEdges mstEdges . insNodes (labNodes graph) $ Graph.empty + where + mstEdges = ordNub $ do + LP path <- msTree $ undir graph + case path of + [] -> [] + [_] -> [] + ((n₂, edgeWeight) : (n₁, _) : _) -> + pure (n₁, n₂, edgeWeight) + +isSubGraphOf + :: (Graph gr1, Graph gr2, Ord node, Ord edge) + => gr1 node edge + -> gr2 node edge + -> Bool +isSubGraphOf graph₁ graph₂ + = setFromList (labNodes graph₁) `isSubsetOf` setFromList (labNodes graph₂) + && setFromList (labEdges graph₁) `isSubsetOf` setFromList (labEdges graph₂) diff --git a/users/glittershark/xanthous/src/Xanthous/Util/Graphics.hs b/users/glittershark/xanthous/src/Xanthous/Util/Graphics.hs new file mode 100644 index 000000000000..353fcfc59b14 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/Graphics.hs @@ -0,0 +1,178 @@ +{-# LANGUAGE TemplateHaskell #-} +-- | Graphics algorithms and utils for rendering things in 2D space +-------------------------------------------------------------------------------- +module Xanthous.Util.Graphics + ( circle + , filledCircle + , line + , straightLine + , delaunay + + -- * Debugging and testing tools + , renderBooleanGraphics + , showBooleanGraphics + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +-- https://github.com/noinia/hgeometry/issues/28 +-- import qualified Algorithms.Geometry.DelaunayTriangulation.DivideAndConquer +-- as Geometry +import qualified Algorithms.Geometry.DelaunayTriangulation.Naive + as Geometry +import qualified Algorithms.Geometry.DelaunayTriangulation.Types as Geometry +import Control.Monad.State (execState, State) +import qualified Data.Geometry.Point as Geometry +import Data.Ext ((:+)(..)) +import Data.List (unfoldr) +import Data.List.NonEmpty (NonEmpty((:|))) +import qualified Data.List.NonEmpty as NE +import Data.Ix (Ix) +import Linear.V2 +-------------------------------------------------------------------------------- + + +-- | Generate a circle centered at the given point and with the given radius +-- using the <midpoint circle algorithm +-- https://en.wikipedia.org/wiki/Midpoint_circle_algorithm>. +-- +-- Code taken from <https://rosettacode.org/wiki/Bitmap/Midpoint_circle_algorithm#Haskell> +circle :: (Num i, Ord i) + => V2 i -- ^ center + -> i -- ^ radius + -> [V2 i] +circle (V2 x₀ y₀) radius + -- Four initial points, plus the generated points + = V2 x₀ (y₀ + radius) + : V2 x₀ (y₀ - radius) + : V2 (x₀ + radius) y₀ + : V2 (x₀ - radius) y₀ + : points + where + -- Creates the (x, y) octet offsets, then maps them to absolute points in all octets. + points = concatMap generatePoints $ unfoldr step initialValues + + generatePoints (V2 x y) + = [ V2 (x₀ `xop` x') (y₀ `yop` y') + | (x', y') <- [(x, y), (y, x)] + , xop <- [(+), (-)] + , yop <- [(+), (-)] + ] + + initialValues = (1 - radius, 1, (-2) * radius, 0, radius) + + step (f, ddf_x, ddf_y, x, y) + | x >= y = Nothing + | otherwise = Just (V2 x' y', (f', ddf_x', ddf_y', x', y')) + where + (f', ddf_y', y') | f >= 0 = (f + ddf_y' + ddf_x', ddf_y + 2, y - 1) + | otherwise = (f + ddf_x, ddf_y, y) + ddf_x' = ddf_x + 2 + x' = x + 1 + + +data FillState i + = FillState + { _inCircle :: Bool + , _result :: NonEmpty (V2 i) + } +makeLenses ''FillState + +runFillState :: NonEmpty (V2 i) -> State (FillState i) a -> [V2 i] +runFillState circumference s + = toList + . view result + . execState s + $ FillState False circumference + +-- | Generate a *filled* circle centered at the given point and with the given +-- radius by filling a circle generated with 'circle' +filledCircle :: (Num i, Integral i, Ix i) + => V2 i -- ^ center + -> i -- ^ radius + -> [V2 i] +filledCircle center radius = + case NE.nonEmpty (circle center radius) of + Nothing -> [] + Just circumference -> runFillState circumference $ + -- the first and last lines of all circles are solid, so the whole "in the + -- circle, out of the circle" thing doesn't work... but that's fine since + -- we don't need to fill them. So just skip them + for_ [succ minX..pred maxX] $ \x -> + for_ [minY..maxY] $ \y -> do + let pt = V2 x y + next = V2 x $ succ y + whenM (use inCircle) $ result %= NE.cons pt + + when (pt `elem` circumference && next `notElem` circumference) + $ inCircle %= not + + where + (V2 minX minY, V2 maxX maxY) = minmaxes circumference + +-- | Draw a line between two points using Bresenham's line drawing algorithm +-- +-- Code taken from <https://wiki.haskell.org/Bresenham%27s_line_drawing_algorithm> +line :: (Num i, Ord i) => V2 i -> V2 i -> [V2 i] +line pa@(V2 xa ya) pb@(V2 xb yb) + = (if maySwitch pa < maySwitch pb then id else reverse) points + where + points = map maySwitch . unfoldr go $ (x₁, y₁, 0) + steep = abs (yb - ya) > abs (xb - xa) + maySwitch = if steep then view _yx else id + [V2 x₁ y₁, V2 x₂ y₂] = sort [maySwitch pa, maySwitch pb] + δx = x₂ - x₁ + δy = abs (y₂ - y₁) + ystep = if y₁ < y₂ then 1 else -1 + go (xTemp, yTemp, err) + | xTemp > x₂ = Nothing + | otherwise = Just ((V2 xTemp yTemp), (xTemp + 1, newY, newError)) + where + tempError = err + δy + (newY, newError) = if (2 * tempError) >= δx + then (yTemp + ystep, tempError - δx) + else (yTemp, tempError) +{-# SPECIALIZE line :: V2 Int -> V2 Int -> [V2 Int] #-} +{-# SPECIALIZE line :: V2 Word -> V2 Word -> [V2 Word] #-} + +straightLine :: (Num i, Ord i) => V2 i -> V2 i -> [V2 i] +straightLine pa@(V2 xa _) pb@(V2 _ yb) = line pa midpoint ++ line midpoint pb + where midpoint = V2 xa yb + + +delaunay + :: (Ord n, Fractional n) + => NonEmpty (V2 n, p) + -> [((V2 n, p), (V2 n, p))] +delaunay + = map (over both fromPoint) + . Geometry.triangulationEdges + . Geometry.delaunayTriangulation + . map toPoint + where + toPoint (V2 px py, pid) = Geometry.Point2 px py :+ pid + fromPoint (Geometry.Point2 px py :+ pid) = (V2 px py, pid) + +-------------------------------------------------------------------------------- + +renderBooleanGraphics :: forall i. (Num i, Ord i, Enum i) => [V2 i] -> String +renderBooleanGraphics [] = "" +renderBooleanGraphics (pt : pts') = intercalate "\n" rows + where + rows = row <$> [minX..maxX] + row x = [minY..maxY] <&> \y -> if V2 x y `member` ptSet then 'X' else ' ' + (V2 minX minY, V2 maxX maxY) = minmaxes pts + pts = pt :| pts' + ptSet :: Set (V2 i) + ptSet = setFromList $ toList pts + +showBooleanGraphics :: forall i. (Num i, Ord i, Enum i) => [V2 i] -> IO () +showBooleanGraphics = putStrLn . pack . renderBooleanGraphics + +minmaxes :: forall i. (Ord i) => NonEmpty (V2 i) -> (V2 i, V2 i) +minmaxes xs = + ( V2 (minimum1Of (traverse1 . _x) xs) + (minimum1Of (traverse1 . _y) xs) + , V2 (maximum1Of (traverse1 . _x) xs) + (maximum1Of (traverse1 . _y) xs) + ) diff --git a/users/glittershark/xanthous/src/Xanthous/Util/Inflection.hs b/users/glittershark/xanthous/src/Xanthous/Util/Inflection.hs new file mode 100644 index 000000000000..724f2339dd21 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/Inflection.hs @@ -0,0 +1,14 @@ + +module Xanthous.Util.Inflection + ( toSentence + ) where + +import Xanthous.Prelude + +toSentence :: (MonoFoldable mono, Element mono ~ Text) => mono -> Text +toSentence xs = case reverse . toList $ xs of + [] -> "" + [x] -> x + [b, a] -> a <> " and " <> b + (final : butlast) -> + intercalate ", " (reverse butlast) <> ", and " <> final diff --git a/users/glittershark/xanthous/src/Xanthous/Util/JSON.hs b/users/glittershark/xanthous/src/Xanthous/Util/JSON.hs new file mode 100644 index 000000000000..91d1328e4a10 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/JSON.hs @@ -0,0 +1,19 @@ +-------------------------------------------------------------------------------- +module Xanthous.Util.JSON + ( ReadShowJSON(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import Data.Aeson +-------------------------------------------------------------------------------- + +newtype ReadShowJSON a = ReadShowJSON a + deriving newtype (Read, Show) + +instance Show a => ToJSON (ReadShowJSON a) where + toJSON = toJSON . show + +instance Read a => FromJSON (ReadShowJSON a) where + parseJSON = withText "readable" + $ maybe (fail "Could not read") pure . readMay diff --git a/users/glittershark/xanthous/src/Xanthous/Util/Optparse.hs b/users/glittershark/xanthous/src/Xanthous/Util/Optparse.hs new file mode 100644 index 000000000000..dfa65372351d --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/Optparse.hs @@ -0,0 +1,21 @@ +-------------------------------------------------------------------------------- +module Xanthous.Util.Optparse + ( readWithGuard + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +-------------------------------------------------------------------------------- +import qualified Options.Applicative as Opt +-------------------------------------------------------------------------------- + +readWithGuard + :: Read b + => (b -> Bool) + -> (b -> String) + -> Opt.ReadM b +readWithGuard predicate errmsg = do + res <- Opt.auto + unless (predicate res) + $ Opt.readerError + $ errmsg res + pure res diff --git a/users/glittershark/xanthous/src/Xanthous/Util/QuickCheck.hs b/users/glittershark/xanthous/src/Xanthous/Util/QuickCheck.hs new file mode 100644 index 000000000000..be12bc294513 --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/Util/QuickCheck.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE UndecidableInstances #-} +module Xanthous.Util.QuickCheck + ( functionShow + , FunctionShow(..) + , functionJSON + , FunctionJSON(..) + , genericArbitrary + , GenericArbitrary(..) + ) where +-------------------------------------------------------------------------------- +import Xanthous.Prelude +import Test.QuickCheck +import Test.QuickCheck.Function +import Test.QuickCheck.Instances.ByteString () +import Test.QuickCheck.Arbitrary.Generic +import Data.Aeson +import GHC.Generics (Rep) +-------------------------------------------------------------------------------- + +newtype FunctionShow a = FunctionShow a + deriving newtype (Show, Read) + +instance (Show a, Read a) => Function (FunctionShow a) where + function = functionShow + +functionJSON :: (ToJSON a, FromJSON a) => (a -> c) -> a :-> c +functionJSON = functionMap encode (headEx . decode) + +newtype FunctionJSON a = FunctionJSON a + deriving newtype (ToJSON, FromJSON) + +instance (ToJSON a, FromJSON a) => Function (FunctionJSON a) where + function = functionJSON + +-------------------------------------------------------------------------------- + +newtype GenericArbitrary a = GenericArbitrary a + deriving newtype Generic + +instance (Generic a, GArbitrary rep, Rep a ~ rep) + => Arbitrary (GenericArbitrary a) where + arbitrary = genericArbitrary diff --git a/users/glittershark/xanthous/src/Xanthous/messages.yaml b/users/glittershark/xanthous/src/Xanthous/messages.yaml new file mode 100644 index 000000000000..c1835ef2327b --- /dev/null +++ b/users/glittershark/xanthous/src/Xanthous/messages.yaml @@ -0,0 +1,120 @@ +welcome: Welcome to Xanthous, {{characterName}}! It's dangerous out there, why not stay inside? Use hjklybnu to move. +dead: + - You have died... + - You die... + - You perish... + - You have perished... + +generic: + continue: Press enter to continue... + +save: + location: "Enter filename to save to: " + overwrite: "A file named {{filename}} already exists. Would you like to overwrite it? " + +quit: + confirm: Really quit without saving? + +entities: + description: You see here {{entityDescriptions}} + +pickUp: + menu: What would you like to pick up? + pickUp: You pick up the {{item.itemType.name}} + nothingToPickUp: "There's nothing here to pick up" + +cant: + goUp: + - You can't go up here + - There's nothing here that would let you go up + goDown: + - You can't go down here + - There's nothing here that would let you go down + +open: + prompt: Direction to open (hjklybnu.)? + success: "You open the door." + locked: "That door is locked" + nothingToOpen: "There's nothing to open there." + alreadyOpen: "That door is already open." + +close: + prompt: Direction to close (hjklybnu.)? + success: + - You close the door. + - You shut the door. + nothingToClose: "There's nothing to close there." + alreadyClosed: "That door is already closed." + blocked: "The {{entityDescriptions}} {{blockOrBlocks}} the door!" + +look: + prompt: Select a position on the map to describe (use Enter to confirm) + nothing: There's nothing there + +character: + namePrompt: "What's your name? " + +combat: + nothingToAttack: There's nothing to attack there. + menu: Which creature would you like to attack? + fistSelfDamage: + - You hit so hard with your fists you hurt yourself! + - The punch leaves your knuckles bloody! + hit: + fists: + - You punch the {{creature.creatureType.name}} with your bare fists! It hurts. A lot. + - You strike the {{creature.creatureType.name}} with your bare fists! It leaves a bit of a bruise on your knuckles. + generic: + - You hit the {{creature.creatureType.name}}. + - You attack the {{creature.creatureType.name}}. + creatureAttack: + - The {{creature.creatureType.name}} hits you! + - The {{creature.creatureType.name}} attacks you! + killed: + - You kill the {{creature.creatureType.name}}! + - You've killed the {{creature.creatureType.name}}! + +debug: + toggleRevealAll: revealAll now set to {{revealAll}} + +eat: + noFood: + - You have nothing edible. + - You don't have any food. + - You don't have anything to eat. + - You search your pockets for something edible, and come up short. + menuPrompt: What would you like to eat? + eat: You eat the {{item.itemType.name}}. + +read: + prompt: Direction to read (hjklybnu.)? + nothing: "There's nothing there to read" + result: "\"{{message}}\"" + +wield: + nothing: + - You aren't carrying anything you can wield + - You can't wield anything in your backpack + - You can't wield anything currently in your backpack + menu: What would you like to wield? + # TODO: use actual hands + wielded : You wield the {{wieldedItem.itemType.name}} in your right hand. + +drop: + nothing: You aren't carrying anything + menu: What would you like to drop? + # TODO: use actual hands + dropped: + - You drop the {{item.itemType.name}}. + - You drop the {{item.itemType.name}} on the ground. + - You put the {{item.itemType.name}} on the ground. + - You take the {{item.itemType.name}} out of your backpack and put it on the ground. + - You take the {{item.itemType.name}} out of your backpack and drop it on the ground. + +autoMove: + enemyInSight: + - There's a {{firstEntity.creatureType.name}} nearby! +### + +tutorial: + message1: The caves are dark and full of nightmarish creatures - and you are likely to perish without food. Seek out sustenance! You can pick items up with ,. diff --git a/users/glittershark/xanthous/test/Spec.hs b/users/glittershark/xanthous/test/Spec.hs new file mode 100644 index 000000000000..f15c393ac917 --- /dev/null +++ b/users/glittershark/xanthous/test/Spec.hs @@ -0,0 +1,47 @@ +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import qualified Xanthous.Data.EntitiesSpec +import qualified Xanthous.Data.EntityCharSpec +import qualified Xanthous.Data.EntityMap.GraphicsSpec +import qualified Xanthous.Data.EntityMapSpec +import qualified Xanthous.Data.LevelsSpec +import qualified Xanthous.Data.NestedMapSpec +import qualified Xanthous.DataSpec +import qualified Xanthous.Entities.RawsSpec +import qualified Xanthous.GameSpec +import qualified Xanthous.Generators.UtilSpec +import qualified Xanthous.MessageSpec +import qualified Xanthous.Messages.TemplateSpec +import qualified Xanthous.OrphansSpec +import qualified Xanthous.RandomSpec +import qualified Xanthous.Util.GraphSpec +import qualified Xanthous.Util.GraphicsSpec +import qualified Xanthous.Util.InflectionSpec +import qualified Xanthous.UtilSpec +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous" + [ Xanthous.Data.EntitiesSpec.test + , Xanthous.Data.EntityMap.GraphicsSpec.test + , Xanthous.Data.EntityMapSpec.test + , Xanthous.Data.LevelsSpec.test + , Xanthous.Data.NestedMapSpec.test + , Xanthous.DataSpec.test + , Xanthous.Entities.RawsSpec.test + , Xanthous.GameSpec.test + , Xanthous.Generators.UtilSpec.test + , Xanthous.MessageSpec.test + , Xanthous.Messages.TemplateSpec.test + , Xanthous.OrphansSpec.test + , Xanthous.RandomSpec.test + , Xanthous.Util.GraphSpec.test + , Xanthous.Util.GraphicsSpec.test + , Xanthous.Util.InflectionSpec.test + , Xanthous.UtilSpec.test + , Xanthous.Data.EntityCharSpec.test + ] diff --git a/users/glittershark/xanthous/test/Test/Prelude.hs b/users/glittershark/xanthous/test/Test/Prelude.hs new file mode 100644 index 000000000000..c423796184f7 --- /dev/null +++ b/users/glittershark/xanthous/test/Test/Prelude.hs @@ -0,0 +1,19 @@ +module Test.Prelude + ( module Xanthous.Prelude + , module Test.Tasty + , module Test.Tasty.HUnit + , module Test.Tasty.QuickCheck + , module Test.QuickCheck.Classes + , testBatch + ) where + +import Xanthous.Prelude hiding (assert, elements) +import Test.Tasty +import Test.Tasty.QuickCheck +import Test.Tasty.HUnit +import Test.QuickCheck.Classes +import Test.QuickCheck.Checkers (TestBatch) +import Test.QuickCheck.Instances.ByteString () + +testBatch :: TestBatch -> TestTree +testBatch (name, tests) = testGroup name $ uncurry testProperty <$> tests diff --git a/users/glittershark/xanthous/test/Xanthous/Data/EntitiesSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/EntitiesSpec.hs new file mode 100644 index 000000000000..e403503743c0 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/EntitiesSpec.hs @@ -0,0 +1,28 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.EntitiesSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import qualified Data.Aeson as JSON +-------------------------------------------------------------------------------- +import Xanthous.Data.Entities +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data.Entities" + [ testGroup "Collision" + [ testProperty "JSON round-trip" $ \(c :: Collision) -> + JSON.decode (JSON.encode c) === Just c + , testGroup "JSON encoding examples" + [ testCase "Stop" $ JSON.encode Stop @?= "\"Stop\"" + , testCase "Combat" $ JSON.encode Combat @?= "\"Combat\"" + ] + ] + , testGroup "EntityAttributes" + [ testProperty "JSON round-trip" $ \(ea :: EntityAttributes) -> + JSON.decode (JSON.encode ea) === Just ea + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Data/EntityCharSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/EntityCharSpec.hs new file mode 100644 index 000000000000..9e8024c9d223 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/EntityCharSpec.hs @@ -0,0 +1,18 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityCharSpec where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import qualified Data.Aeson as JSON +-------------------------------------------------------------------------------- +import Xanthous.Data.EntityChar +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data.EntityChar" + [ testProperty "JSON round-trip" $ \(ec :: EntityChar) -> + JSON.decode (JSON.encode ec) === Just ec + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Data/EntityMap/GraphicsSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/EntityMap/GraphicsSpec.hs new file mode 100644 index 000000000000..fd37548ce864 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/EntityMap/GraphicsSpec.hs @@ -0,0 +1,57 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityMap.GraphicsSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +import Data.Aeson +-------------------------------------------------------------------------------- +import Xanthous.Game.State +import Xanthous.Data +import Xanthous.Data.EntityMap +import Xanthous.Data.EntityMap.Graphics +import Xanthous.Entities.Environment (Wall(..)) +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data.EntityMap.Graphics" + [ testGroup "visiblePositions" + [ testProperty "one step in each cardinal direction is always visible" + $ \pos (Cardinal dir) (Positive r) (wallPositions :: Set Position)-> + pos `notMember` wallPositions ==> + let em = review _EntityMap . map (, Wall) . toList $ wallPositions + em' = em & atPosition (move dir pos) %~ (Wall <|) + poss = visiblePositions pos r em' + in counterexample ("visiblePositions: " <> show poss) + $ move dir pos `member` poss + , testGroup "bugs" + [ testCase "non-contiguous bug 1" + $ let charPos = Position 20 20 + gormlakPos = Position 17 19 + em = insertAt gormlakPos TestEntity + . insertAt charPos TestEntity + $ mempty + visPositions = visiblePositions charPos 12 em + in (gormlakPos `member` visPositions) @? + ( "not (" + <> show gormlakPos <> " `member` " + <> show visPositions + <> ")" + ) + ] + ] + ] + +-------------------------------------------------------------------------------- + +data TestEntity = TestEntity + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON, NFData) + +instance Brain TestEntity where + step _ = pure +instance Draw TestEntity +instance Entity TestEntity where + description _ = "" + entityChar _ = "e" diff --git a/users/glittershark/xanthous/test/Xanthous/Data/EntityMapSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/EntityMapSpec.hs new file mode 100644 index 000000000000..7c5cad019616 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/EntityMapSpec.hs @@ -0,0 +1,69 @@ +{-# LANGUAGE ApplicativeDo #-} +-------------------------------------------------------------------------------- +module Xanthous.Data.EntityMapSpec where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import qualified Data.Aeson as JSON +-------------------------------------------------------------------------------- +import Xanthous.Data.EntityMap +import Xanthous.Data (Positioned(..)) +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = localOption (QuickCheckTests 20) + $ testGroup "Xanthous.Data.EntityMap" + [ testBatch $ monoid @(EntityMap Int) mempty + , testGroup "Deduplicate" + [ testGroup "Semigroup laws" + [ testProperty "associative" $ \(a :: Deduplicate (EntityMap Int)) b c -> + a <> (b <> c) === (a <> b) <> c + ] + ] + , testGroup "Eq laws" + [ testProperty "reflexivity" $ \(em :: EntityMap Int) -> + em == em + , testProperty "symmetric" $ \(em₁ :: EntityMap Int) em₂ -> + (em₁ == em₂) == (em₂ == em₁) + , testProperty "transitive" $ \(em₁ :: EntityMap Int) em₂ em₃ -> + if (em₁ == em₂ && em₂ == em₃) + then (em₁ == em₃) + else True + ] + , testGroup "JSON encoding/decoding" + [ testProperty "round-trips" $ \(em :: EntityMap Int) -> + let em' = JSON.decode (JSON.encode em) + in counterexample (show (em' ^? _Just . lastID, em ^. lastID + , em' ^? _Just . byID == em ^. byID . re _Just + , em' ^? _Just . byPosition == em ^. byPosition . re _Just + , em' ^? _Just . _EntityMap == em ^. _EntityMap . re _Just + )) + $ em' === Just em + , testProperty "Preserves IDs" $ \(em :: EntityMap Int) -> + let Just em' = JSON.decode $ JSON.encode em + in toEIDsAndPositioned em' === toEIDsAndPositioned em + ] + + , localOption (QuickCheckTests 50) + $ testGroup "atPosition" + [ testProperty "setget" $ \pos (em :: EntityMap Int) es -> + view (atPosition pos) (set (atPosition pos) es em) === es + , testProperty "getset" $ \pos (em :: EntityMap Int) -> + set (atPosition pos) (view (atPosition pos) em) em === em + , testProperty "setset" $ \pos (em :: EntityMap Int) es -> + (set (atPosition pos) es . set (atPosition pos) es) em + === + set (atPosition pos) es em + -- testProperty "lens laws" $ \pos -> isLens $ atPosition @Int pos + , testProperty "preserves IDs" $ \(em :: EntityMap Int) e1 e2 p -> + let (eid, em') = insertAtReturningID p e1 em + em'' = em' & atPosition p %~ (e2 <|) + in + counterexample ("em': " <> show em') + . counterexample ("em'': " <> show em'') + $ em'' ^. at eid === Just (Positioned p e1) + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Data/LevelsSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/LevelsSpec.hs new file mode 100644 index 000000000000..4e46946a93b0 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/LevelsSpec.hs @@ -0,0 +1,66 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.LevelsSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import qualified Data.Aeson as JSON +-------------------------------------------------------------------------------- +import Xanthous.Util (between) +import Xanthous.Data.Levels +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data.Levels" + [ testGroup "current" + [ testProperty "view is extract" $ \(levels :: Levels Int) -> + levels ^. current === extract levels + , testProperty "set replaces current" $ \(levels :: Levels Int) new -> + extract (set current new levels) === new + , testProperty "set extract is id" $ \(levels :: Levels Int) -> + set current (extract levels) levels === levels + , testProperty "set y ∘ set x ≡ set y" $ \(levels :: Levels Int) x y -> + set current y (set current x levels) === set current y levels + ] + , localOption (QuickCheckTests 20) + $ testBatch $ semigroup @(Levels Int) (error "unused", 1 :: Int) + , testGroup "next/prev" + [ testGroup "nextLevel" + [ testProperty "seeks forwards" $ \(levels :: Levels Int) genned -> + (pos . runIdentity . nextLevel (Identity genned) $ levels) + === pos levels + 1 + , testProperty "maintains the invariant" $ \(levels :: Levels Int) genned -> + let levels' = runIdentity . nextLevel (Identity genned) $ levels + in between 0 (length levels') $ pos levels' + , testProperty "extract is total" $ \(levels :: Levels Int) genned -> + let levels' = runIdentity . nextLevel (Identity genned) $ levels + in total $ extract levels' + , testProperty "uses the generated level as the next level" + $ \(levels :: Levels Int) genned -> + let levels' = seek (length levels - 1) levels + levels'' = runIdentity . nextLevel (Identity genned) $ levels' + in counterexample (show levels'') + $ extract levels'' === genned + ] + , testGroup "prevLevel" + [ testProperty "seeks backwards" $ \(levels :: Levels Int) -> + case prevLevel levels of + Nothing -> property Discard + Just levels' -> pos levels' === pos levels - 1 + , testProperty "maintains the invariant" $ \(levels :: Levels Int) -> + case prevLevel levels of + Nothing -> property Discard + Just levels' -> property $ between 0 (length levels') $ pos levels' + , testProperty "extract is total" $ \(levels :: Levels Int) -> + case prevLevel levels of + Nothing -> property Discard + Just levels' -> total $ extract levels' + ] + ] + , testGroup "JSON" + [ testProperty "toJSON/parseJSON round-trip" $ \(levels :: Levels Int) -> + JSON.decode (JSON.encode levels) === Just levels + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Data/NestedMapSpec.hs b/users/glittershark/xanthous/test/Xanthous/Data/NestedMapSpec.hs new file mode 100644 index 000000000000..acf7a67268f4 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Data/NestedMapSpec.hs @@ -0,0 +1,20 @@ +-------------------------------------------------------------------------------- +module Xanthous.Data.NestedMapSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import Test.QuickCheck.Instances.Semigroup () +-------------------------------------------------------------------------------- +import qualified Xanthous.Data.NestedMap as NM +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data.NestedMap" + [ testProperty "insert/lookup" $ \nm ks v -> + let nm' = NM.insert ks v nm + in counterexample ("inserted: " <> show nm') + $ NM.lookup @Map @Int @Int ks nm' === Just (NM.Val v) + ] diff --git a/users/glittershark/xanthous/test/Xanthous/DataSpec.hs b/users/glittershark/xanthous/test/Xanthous/DataSpec.hs new file mode 100644 index 000000000000..91dc6cea1ba5 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/DataSpec.hs @@ -0,0 +1,98 @@ +-------------------------------------------------------------------------------- +module Xanthous.DataSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude hiding (Right, Left, Down, toList, all) +import Data.Group +import Data.Foldable (toList, all) +-------------------------------------------------------------------------------- +import Xanthous.Data +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Data" + [ testGroup "Position" + [ testBatch $ monoid @Position mempty + , testProperty "group laws" $ \(pos :: Position) -> + pos <> invert pos == mempty && invert pos <> pos == mempty + , testGroup "stepTowards laws" + [ testProperty "takes only one step" $ \src tgt -> + src /= tgt ==> + isUnit (src `diffPositions` (src `stepTowards` tgt)) + -- , testProperty "moves in the right direction" $ \src tgt -> + -- stepTowards src tgt == move (directionOf src tgt) src + ] + , testProperty "directionOf laws" $ \pos dir -> + directionOf pos (move dir pos) == dir + , testProperty "diffPositions is add inverse" $ \(pos₁ :: Position) pos₂ -> + diffPositions pos₁ pos₂ == addPositions pos₁ (invert pos₂) + , testGroup "isUnit" + [ testProperty "double direction is never unit" $ \dir -> + not . isUnit $ move dir (asPosition dir) + , testCase "examples" $ do + isUnit (Position @Int 1 1) @? "not . isUnit $ Position 1 1" + isUnit (Position @Int 0 (-1)) @? "not . isUnit $ Position 0 (-1)" + (not . isUnit) (Position @Int 1 13) @? "isUnit $ Position 1 13" + ] + ] + + , testGroup "Direction" + [ testProperty "opposite is involutive" $ \(dir :: Direction) -> + opposite (opposite dir) == dir + , testProperty "opposite provides inverse" $ \dir -> + invert (asPosition dir) === asPosition (opposite dir) + , testProperty "asPosition isUnit" $ \dir -> + dir /= Here ==> isUnit (asPosition dir) + , testGroup "Move" + [ testCase "Up" $ move Up mempty @?= Position @Int 0 (-1) + , testCase "Down" $ move Down mempty @?= Position @Int 0 1 + , testCase "Left" $ move Left mempty @?= Position @Int (-1) 0 + , testCase "Right" $ move Right mempty @?= Position @Int 1 0 + , testCase "UpLeft" $ move UpLeft mempty @?= Position @Int (-1) (-1) + , testCase "UpRight" $ move UpRight mempty @?= Position @Int 1 (-1) + , testCase "DownLeft" $ move DownLeft mempty @?= Position @Int (-1) 1 + , testCase "DownRight" $ move DownRight mempty @?= Position @Int 1 1 + ] + ] + + , testGroup "Corner" + [ testGroup "instance Opposite" + [ testProperty "involutive" $ \(corner :: Corner) -> + opposite (opposite corner) === corner + ] + ] + + , testGroup "Edge" + [ testGroup "instance Opposite" + [ testProperty "involutive" $ \(edge :: Edge) -> + opposite (opposite edge) === edge + ] + ] + + , testGroup "Box" + [ testGroup "boxIntersects" + [ testProperty "True" $ \dims -> + boxIntersects (Box @Word (V2 1 1) (V2 2 2)) + (Box (V2 2 2) dims) + , testProperty "False" $ \dims -> + not $ boxIntersects (Box @Word (V2 1 1) (V2 2 2)) + (Box (V2 4 2) dims) + ] + ] + + , testGroup "Neighbors" + [ testGroup "rotations" + [ testProperty "always has the same members" + $ \(neighs :: Neighbors Int) -> + all (\ns -> sort (toList ns) == sort (toList neighs)) + $ rotations neighs + , testProperty "all rotations have the same rotations" + $ \(neighs :: Neighbors Int) -> + let rots = rotations neighs + in all (\ns -> sort (toList $ rotations ns) == sort (toList rots)) + rots + ] + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Entities/RawsSpec.hs b/users/glittershark/xanthous/test/Xanthous/Entities/RawsSpec.hs new file mode 100644 index 000000000000..2e6f35457fc7 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Entities/RawsSpec.hs @@ -0,0 +1,16 @@ +-- | + +module Xanthous.Entities.RawsSpec (main, test) where + +import Test.Prelude +import Xanthous.Entities.Raws + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Entities.Raws" + [ testGroup "raws" + [ testCase "are all valid" $ raws `deepseq` pure () + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/GameSpec.hs b/users/glittershark/xanthous/test/Xanthous/GameSpec.hs new file mode 100644 index 000000000000..2fa8527d0e59 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/GameSpec.hs @@ -0,0 +1,55 @@ +module Xanthous.GameSpec where + +import Test.Prelude hiding (Down) +import Xanthous.Game +import Xanthous.Game.State +import Control.Lens.Properties +import Xanthous.Data (move, Direction(Down)) +import Xanthous.Data.EntityMap (atPosition) + +main :: IO () +main = defaultMain test + +test :: TestTree +test + = localOption (QuickCheckTests 10) + . localOption (QuickCheckMaxSize 10) + $ testGroup "Xanthous.Game" + [ testGroup "positionedCharacter" + [ testProperty "lens laws" $ isLens positionedCharacter + , testCase "updates the position of the character" $ do + initialGame <- getInitialState + let initialPos = initialGame ^. characterPosition + updatedGame = initialGame & characterPosition %~ move Down + updatedPos = updatedGame ^. characterPosition + updatedPos @?= move Down initialPos + updatedGame ^. entities . atPosition initialPos @?= fromList [] + updatedGame ^. entities . atPosition updatedPos + @?= fromList [SomeEntity $ initialGame ^. character] + ] + , testGroup "characterPosition" + [ testProperty "lens laws" $ isLens characterPosition + ] + , testGroup "character" + [ testProperty "lens laws" $ isLens character + ] + , testGroup "MessageHistory" + [ testGroup "MonoComonad laws" + [ testProperty "oextend oextract ≡ id" + $ \(mh :: MessageHistory) -> oextend oextract mh === mh + , testProperty "oextract ∘ oextend f ≡ f" + $ \(mh :: MessageHistory) f -> (oextract . oextend f) mh === f mh + , testProperty "oextend f ∘ oextend g ≡ oextend (f . oextend g)" + $ \(mh :: MessageHistory) f g -> + (oextend f . oextend g) mh === oextend (f . oextend g) mh + ] + ] + , testGroup "Saving the game" + [ testProperty "forms a prism" $ isPrism saved + , testProperty "round-trips" $ \gs -> + loadGame (saveGame gs) === Just gs + , testProperty "preserves the character ID" $ \gs -> + let Just gs' = loadGame $ saveGame gs + in gs' ^. character === gs ^. character + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Generators/UtilSpec.hs b/users/glittershark/xanthous/test/Xanthous/Generators/UtilSpec.hs new file mode 100644 index 000000000000..cdfadc06f505 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Generators/UtilSpec.hs @@ -0,0 +1,84 @@ +{-# LANGUAGE PackageImports #-} +-------------------------------------------------------------------------------- +module Xanthous.Generators.UtilSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +import System.Random (mkStdGen) +import Control.Monad.Random (runRandT) +import Data.Array.ST (STUArray, runSTUArray, thaw) +import Data.Array.IArray (bounds) +import Data.Array.MArray (newArray, readArray, writeArray) +import Data.Array (Array, range, listArray, Ix) +import Control.Monad.ST (ST, runST) +import "checkers" Test.QuickCheck.Instances.Array () +import Linear.V2 +-------------------------------------------------------------------------------- +import Xanthous.Util +import Xanthous.Data (width, height) +import Xanthous.Generators.Util +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +-------------------------------------------------------------------------------- + +newtype GenArray a b = GenArray (Array a b) + deriving stock (Show, Eq) + +instance (Ix a, Arbitrary a, CoArbitrary a, Arbitrary b) + => Arbitrary (GenArray a b) where + arbitrary = GenArray <$> do + (mkElem :: a -> b) <- arbitrary + minDims <- arbitrary + maxDims <- arbitrary + let bnds = (minDims, maxDims) + pure $ listArray bnds $ mkElem <$> range bnds + +test :: TestTree +test = testGroup "Xanthous.Generators.Util" + [ testGroup "randInitialize" + [ testProperty "returns an array of the correct dimensions" + $ \dims seed aliveChance -> + let gen = mkStdGen seed + res = runSTUArray + $ fmap fst + $ flip runRandT gen + $ randInitialize dims aliveChance + in bounds res === (0, V2 (dims ^. width) (dims ^. height)) + ] + , testGroup "numAliveNeighborsM" + [ testProperty "maxes out at 8" + $ \(GenArray (arr :: Array (V2 Word) Bool)) loc -> + let + act :: forall s. ST s Word + act = do + mArr <- thaw @_ @_ @_ @(STUArray s) arr + numAliveNeighborsM mArr loc + res = runST act + in counterexample (show res) $ between 0 8 res + ] + , testGroup "numAliveNeighbors" + [ testProperty "is equivalient to runST . numAliveNeighborsM . thaw" $ + \(GenArray (arr :: Array (V2 Word) Bool)) loc -> + let + act :: forall s. ST s Word + act = do + mArr <- thaw @_ @_ @_ @(STUArray s) arr + numAliveNeighborsM mArr loc + res = runST act + in numAliveNeighbors arr loc === res + ] + , testGroup "cloneMArray" + [ testCase "clones the array" $ runST $ + let + go :: forall s. ST s Assertion + go = do + arr <- newArray @(STUArray s) (0 :: Int, 5) (1 :: Int) + arr' <- cloneMArray @_ @(STUArray s) arr + writeArray arr' 0 1234 + x <- readArray arr 0 + pure $ x @?= 1 + in go + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/MessageSpec.hs b/users/glittershark/xanthous/test/Xanthous/MessageSpec.hs new file mode 100644 index 000000000000..b681e537efe6 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/MessageSpec.hs @@ -0,0 +1,53 @@ +{-# LANGUAGE OverloadedLists #-} +module Xanthous.MessageSpec ( main, test ) where + +import Test.Prelude +import Xanthous.Messages +import Data.Aeson +import Text.Mustache +import Control.Lens.Properties + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Messages" + [ testGroup "Message" + [ testGroup "JSON decoding" + [ testCase "Single" + $ decode "\"Test Single Template\"" + @?= Just (Single + $ compileMustacheText "template" "Test Single Template" + ^?! _Right) + , testCase "Choice" + $ decode "[\"Choice 1\", \"Choice 2\"]" + @?= Just + (Choice + [ compileMustacheText "template" "Choice 1" ^?! _Right + , compileMustacheText "template" "Choice 2" ^?! _Right + ]) + ] + ] + , localOption (QuickCheckTests 50) + . localOption (QuickCheckMaxSize 10) + $ testGroup "MessageMap" + [ testGroup "instance Ixed" + [ testProperty "traversal laws" $ \k -> + isTraversal $ ix @MessageMap k + , testCase "preview when exists" $ + let + Right tpl = compileMustacheText "foo" "bar" + msg = Single tpl + mm = Nested $ [("foo", Direct msg)] + in mm ^? ix ["foo"] @?= Just msg + ] + , testGroup "lookupMessage" + [ testProperty "is equivalent to preview ix" $ \msgMap path -> + lookupMessage path msgMap === msgMap ^? ix path + ] + ] + + , testGroup "Messages" + [ testCase "are all valid" $ messages `deepseq` pure () + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Messages/TemplateSpec.hs b/users/glittershark/xanthous/test/Xanthous/Messages/TemplateSpec.hs new file mode 100644 index 000000000000..2a3873c3b016 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Messages/TemplateSpec.hs @@ -0,0 +1,80 @@ +-------------------------------------------------------------------------------- +module Xanthous.Messages.TemplateSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +import Test.QuickCheck.Instances.Text () +import Data.List.NonEmpty (NonEmpty(..)) +import Data.Function (fix) +-------------------------------------------------------------------------------- +import Xanthous.Messages.Template +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Messages.Template" + [ testGroup "parsing" + [ testProperty "literals" $ forAll genLiteral $ \s -> + testParse template s === Right (Literal s) + , parseCase "escaped curlies" + "foo\\{" + $ Literal "foo{" + , parseCase "simple substitution" + "foo {{bar}}" + $ Literal "foo " `Concat` Subst (SubstPath $ "bar" :| []) + , parseCase "substitution with filters" + "foo {{bar | baz}}" + $ Literal "foo " + `Concat` Subst (SubstFilter (SubstPath $ "bar" :| []) + (FilterName "baz")) + , parseCase "substitution with multiple filters" + "foo {{bar | baz | qux}}" + $ Literal "foo " + `Concat` Subst (SubstFilter (SubstFilter (SubstPath $ "bar" :| []) + (FilterName "baz")) + (FilterName "qux")) + , parseCase "two substitutions and a literal" + "{{a}}{{b}}c" + $ Subst (SubstPath $ "a" :| []) + `Concat` Subst (SubstPath $ "b" :| []) + `Concat` Literal "c" + , localOption (QuickCheckTests 10) + $ testProperty "round-trips with ppTemplate" $ \tpl -> + testParse template (ppTemplate tpl) === Right tpl + ] + , testBatch $ monoid @Template mempty + , testGroup "rendering" + [ testProperty "rendering literals renders literally" + $ forAll genLiteral $ \s fs vs -> + render fs vs (Literal s) === Right s + , testProperty "rendering substitutions renders substitutions" + $ forAll genPath $ \ident val fs -> + let tpl = Subst (SubstPath ident) + tvs = varsWith ident val + in render fs tvs tpl === Right val + , testProperty "filters filter" $ forAll genPath + $ \ident filterName filterFn val -> + let tpl = Subst (SubstFilter (SubstPath ident) filterName) + fs = mapFromList [(filterName, filterFn)] + vs = varsWith ident val + in render fs vs tpl === Right (filterFn val) + ] + ] + where + genLiteral = pack . filter (`notElem` ['\\', '{']) <$> arbitrary + parseCase name input expected = + testCase name $ testParse template input @?= Right expected + testParse p = over _Left errorBundlePretty . runParser p "<test>" + genIdentifier = pack @Text <$> listOf1 (elements identifierChars) + identifierChars = ['a'..'z'] <> ['A'..'Z'] <> ['-', '_'] + + varsWith (p :| []) val = vars [(p, Val val)] + varsWith (phead :| ps) val = vars . pure . (phead ,) . flip fix ps $ + \next pth -> case pth of + [] -> Val val + p : ps' -> nested [(p, next ps')] + + genPath = (:|) <$> genIdentifier <*> listOf genIdentifier + +-- diff --git a/users/glittershark/xanthous/test/Xanthous/OrphansSpec.hs b/users/glittershark/xanthous/test/Xanthous/OrphansSpec.hs new file mode 100644 index 000000000000..3740945877ef --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/OrphansSpec.hs @@ -0,0 +1,42 @@ +{-# LANGUAGE BlockArguments #-} +-------------------------------------------------------------------------------- +module Xanthous.OrphansSpec where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import Text.Mustache +import Text.Megaparsec (errorBundlePretty) +import Graphics.Vty.Attributes +import qualified Data.Aeson as JSON +-------------------------------------------------------------------------------- +import Xanthous.Orphans +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Orphans" + [ localOption (QuickCheckTests 50) + . localOption (QuickCheckMaxSize 10) + $ testGroup "Template" + [ testProperty "ppTemplate / compileMustacheText " \tpl -> + let src = ppTemplate tpl + res :: Either String Template + res = over _Left errorBundlePretty + $ compileMustacheText (templateActual tpl) src + expected = templateCache tpl ^?! at (templateActual tpl) + in + counterexample (unpack src) + $ Right expected === do + (Template actual cache) <- res + maybe (Left "Template not found") Right $ cache ^? at actual + , testProperty "JSON round trip" $ \(tpl :: Template) -> + counterexample (unpack $ ppTemplate tpl) + $ JSON.decode (JSON.encode tpl) === Just tpl + ] + , testGroup "Attr" + [ testProperty "JSON round trip" $ \(attr :: Attr) -> + JSON.decode (JSON.encode attr) === Just attr + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/RandomSpec.hs b/users/glittershark/xanthous/test/Xanthous/RandomSpec.hs new file mode 100644 index 000000000000..187336f08650 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/RandomSpec.hs @@ -0,0 +1,25 @@ +-------------------------------------------------------------------------------- +module Xanthous.RandomSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import Control.Monad.Random +-------------------------------------------------------------------------------- +import Xanthous.Random +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Random" + [ testGroup "chooseSubset" + [ testProperty "chooses a subset" + $ \(l :: [Int]) (Positive (r :: Double)) -> randomTest $ do + ss <- chooseSubset r l + pure $ all (`elem` l) ss + + ] + ] + where + randomTest prop = evalRandT prop . mkStdGen =<< arbitrary diff --git a/users/glittershark/xanthous/test/Xanthous/Util/GraphSpec.hs b/users/glittershark/xanthous/test/Xanthous/Util/GraphSpec.hs new file mode 100644 index 000000000000..35ff090b28b9 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Util/GraphSpec.hs @@ -0,0 +1,39 @@ +module Xanthous.Util.GraphSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude +-------------------------------------------------------------------------------- +import Xanthous.Util.Graph +import Data.Graph.Inductive.Basic +import Data.Graph.Inductive.Graph (labNodes, size, order) +import Data.Graph.Inductive.PatriciaTree +import Data.Graph.Inductive.Arbitrary +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Util.Graph" + [ testGroup "mstSubGraph" + [ testProperty "always produces a subgraph" + $ \(CG _ (graph :: Gr Int Int)) -> + let msg = mstSubGraph $ undir graph + in counterexample (show msg) + $ msg `isSubGraphOf` undir graph + , testProperty "returns a graph with the same nodes" + $ \(CG _ (graph :: Gr Int Int)) -> + let msg = mstSubGraph graph + in counterexample (show msg) + $ labNodes msg === labNodes graph + , testProperty "has nodes - 1 edges" + $ \(CG _ (graph :: Gr Int Int)) -> + order graph > 1 ==> + let msg = mstSubGraph graph + in counterexample (show msg) + $ size msg === order graph - 1 + , testProperty "always produces a simple graph" + $ \(CG _ (graph :: Gr Int Int)) -> + let msg = mstSubGraph graph + in counterexample (show msg) $ isSimple msg + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/Util/GraphicsSpec.hs b/users/glittershark/xanthous/test/Xanthous/Util/GraphicsSpec.hs new file mode 100644 index 000000000000..61e589280362 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Util/GraphicsSpec.hs @@ -0,0 +1,72 @@ +module Xanthous.Util.GraphicsSpec (main, test) where +-------------------------------------------------------------------------------- +import Test.Prelude hiding (head) +-------------------------------------------------------------------------------- +import Data.List (nub, head) +import Data.Set (isSubsetOf) +import Linear.V2 +-------------------------------------------------------------------------------- +import Xanthous.Util.Graphics +import Xanthous.Util +import Xanthous.Orphans () +-------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Util.Graphics" + [ testGroup "circle" + [ testCase "radius 1, origin 2,2" + {- + | | 0 | 1 | 2 | 3 | + |---+---+---+---+---| + | 0 | | | | | + | 1 | | | x | | + | 2 | | x | | x | + | 3 | | | x | | + -} + $ (sort . unique @[] @[_]) (circle @Int (V2 2 2) 1) + @?= [ V2 1 2 + , V2 2 1, V2 2 3 + , V2 3 2 + ] + , testCase "radius 12, origin 0" + $ (sort . nub) (circle @Int 0 12) + @?= (sort . nub) + [ V2 (-12) (-4), V2 (-12) (-3), V2 (-12) (-2), V2 (-12) (-1) + , V2 (-12) 0, V2 (-12) 1, V2 (-12) 2, V2 (-12) 3, V2 (-12) 4 + , V2 (-11) (-6), V2 (-11) (-5), V2 (-11) 5, V2 (-11) 6, V2 (-10) (-7) + , V2 (-10) 7, V2 (-9) (-9), V2 (-9) (-8), V2 (-9) 8, V2 (-9) 9 + , V2 (-8) (-9), V2 (-8) 9, V2 (-7) (-10), V2 (-7) 10, V2 (-6) (-11) + , V2 (-6) 11, V2 (-5) (-11), V2 (-5) 11, V2 (-4) (-12), V2 (-4) 12 + , V2 (-3) (-12), V2 (-3) 12, V2 (-2) (-12), V2 (-2) 12, V2 (-1) (-12) + , V2 (-1) 12, V2 0 (-12), V2 0 12, V2 1 (-12), V2 1 12, V2 2 (-12) + , V2 2 12, V2 3 (-12), V2 3 12, V2 4 (-12), V2 4 12, V2 5 (-11) + , V2 5 11, V2 6 (-11), V2 6 11, V2 7 (-10), V2 7 10, V2 8 (-9), V2 8 9 + , V2 9 (-9), V2 9 (-8), V2 9 8, V2 9 9, V2 10 (-7), V2 10 7 + , V2 11 (-6), V2 11 (-5), V2 11 5, V2 11 6, V2 12 (-4), V2 12 (-3) + , V2 12 (-2), V2 12 (-1), V2 12 0, V2 12 1, V2 12 2, V2 12 3, V2 12 4 + ] + ] + , testGroup "filledCircle" + [ testProperty "is a superset of circle" $ \center radius -> + let circ = circle @Int center radius + filledCirc = filledCircle center radius + in counterexample ( "circle: " <> show circ + <> "\nfilledCircle: " <> show filledCirc) + $ setFromList circ `isSubsetOf` setFromList filledCirc + -- TODO later + -- , testProperty "is always contiguous" $ \center radius -> + -- let filledCirc = filledCircle center radius + -- in counterexample (renderBooleanGraphics filledCirc) $ + ] + , testGroup "line" + [ testProperty "starts and ends at the start and end points" $ \start end -> + let ℓ = line @Int start end + in counterexample ("line: " <> show ℓ) + $ length ℓ > 2 ==> (head ℓ === start) .&&. (head (reverse ℓ) === end) + ] + ] + +-------------------------------------------------------------------------------- diff --git a/users/glittershark/xanthous/test/Xanthous/Util/InflectionSpec.hs b/users/glittershark/xanthous/test/Xanthous/Util/InflectionSpec.hs new file mode 100644 index 000000000000..fad841043152 --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/Util/InflectionSpec.hs @@ -0,0 +1,18 @@ +module Xanthous.Util.InflectionSpec (main, test) where + +import Test.Prelude +import Xanthous.Util.Inflection + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Util.Inflection" + [ testGroup "toSentence" + [ testCase "empty" $ toSentence [] @?= "" + , testCase "single" $ toSentence ["x"] @?= "x" + , testCase "two" $ toSentence ["x", "y"] @?= "x and y" + , testCase "three" $ toSentence ["x", "y", "z"] @?= "x, y, and z" + , testCase "four" $ toSentence ["x", "y", "z", "w"] @?= "x, y, z, and w" + ] + ] diff --git a/users/glittershark/xanthous/test/Xanthous/UtilSpec.hs b/users/glittershark/xanthous/test/Xanthous/UtilSpec.hs new file mode 100644 index 000000000000..8538ea5098ba --- /dev/null +++ b/users/glittershark/xanthous/test/Xanthous/UtilSpec.hs @@ -0,0 +1,28 @@ +module Xanthous.UtilSpec (main, test) where + +import Test.Prelude +import Xanthous.Util + +main :: IO () +main = defaultMain test + +test :: TestTree +test = testGroup "Xanthous.Util" + [ testGroup "smallestNotIn" + [ testCase "examples" $ do + smallestNotIn [7 :: Word, 3, 7] @?= 0 + smallestNotIn [7 :: Word, 0, 1, 3, 7] @?= 2 + , testProperty "returns an element not in the list" $ \(xs :: [Word]) -> + smallestNotIn xs `notElem` xs + , testProperty "pred return is in the list" $ \(xs :: [Word]) -> + let res = smallestNotIn xs + in res /= 0 ==> pred res `elem` xs + , testProperty "ignores order" $ \(xs :: [Word]) -> + forAll (shuffle xs) $ \shuffledXs -> + smallestNotIn xs === smallestNotIn shuffledXs + ] + , testGroup "takeWhileInclusive" + [ testProperty "takeWhileInclusive (const True) ≡ id" + $ \(xs :: [Int]) -> takeWhileInclusive (const True) xs === xs + ] + ] diff --git a/users/glittershark/xanthous/xanthous.cabal b/users/glittershark/xanthous/xanthous.cabal new file mode 100644 index 000000000000..3f093a37a15a --- /dev/null +++ b/users/glittershark/xanthous/xanthous.cabal @@ -0,0 +1,455 @@ +cabal-version: 1.12 + +-- This file has been generated from package.yaml by hpack version 0.33.1. +-- +-- see: https://github.com/sol/hpack +-- +-- hash: bb0a26ab512a1b8d095f3fa71370dcc5221c3f20888042a0d5c41d054dc403cf + +name: xanthous +version: 0.1.0.0 +synopsis: A WIP TUI RPG +description: Please see the README on GitHub at <https://github.com/glittershark/xanthous> +category: Game +homepage: https://github.com/glittershark/xanthous#readme +bug-reports: https://github.com/glittershark/xanthous/issues +author: Griffin Smith +maintainer: root@gws.fyi +copyright: 2019 Griffin Smith +license: GPL-3 +license-file: LICENSE +build-type: Simple +extra-source-files: + README.org + +source-repository head + type: git + location: https://github.com/glittershark/xanthous + +library + exposed-modules: + Data.Aeson.Generic.DerivingVia + Main + Xanthous.AI.Gormlak + Xanthous.App + Xanthous.App.Autocommands + Xanthous.App.Common + Xanthous.App.Prompt + Xanthous.App.Time + Xanthous.Command + Xanthous.Data + Xanthous.Data.App + Xanthous.Data.Entities + Xanthous.Data.EntityChar + Xanthous.Data.EntityMap + Xanthous.Data.EntityMap.Graphics + Xanthous.Data.Levels + Xanthous.Data.NestedMap + Xanthous.Data.VectorBag + Xanthous.Entities.Character + Xanthous.Entities.Creature + Xanthous.Entities.Creature.Hippocampus + Xanthous.Entities.Draw.Util + Xanthous.Entities.Entities + Xanthous.Entities.Environment + Xanthous.Entities.Item + Xanthous.Entities.Marker + Xanthous.Entities.Raws + Xanthous.Entities.RawTypes + Xanthous.Game + Xanthous.Game.Arbitrary + Xanthous.Game.Draw + Xanthous.Game.Env + Xanthous.Game.Lenses + Xanthous.Game.Prompt + Xanthous.Game.State + Xanthous.Generators + Xanthous.Generators.CaveAutomata + Xanthous.Generators.Dungeon + Xanthous.Generators.LevelContents + Xanthous.Generators.Util + Xanthous.Generators.Village + Xanthous.Messages + Xanthous.Messages.Template + Xanthous.Monad + Xanthous.Orphans + Xanthous.Prelude + Xanthous.Random + Xanthous.Util + Xanthous.Util.Comonad + Xanthous.Util.Graph + Xanthous.Util.Graphics + Xanthous.Util.Inflection + Xanthous.Util.JSON + Xanthous.Util.Optparse + Xanthous.Util.QuickCheck + other-modules: + Paths_xanthous + hs-source-dirs: + src + default-extensions: BlockArguments ConstraintKinds DataKinds DeriveAnyClass DeriveGeneric DerivingStrategies DerivingVia FlexibleContexts FlexibleInstances FunctionalDependencies GADTSyntax GeneralizedNewtypeDeriving KindSignatures LambdaCase MultiWayIf NoImplicitPrelude NoStarIsType OverloadedStrings PolyKinds RankNTypes ScopedTypeVariables TupleSections TypeApplications TypeFamilies TypeOperators ViewPatterns + ghc-options: -Wall + build-depends: + JuicyPixels + , MonadRandom + , QuickCheck + , Rasterific + , aeson + , array + , async + , base + , bifunctors + , brick + , checkers + , classy-prelude + , comonad + , comonad-extras + , constraints + , containers + , criterion + , data-default + , deepseq + , directory + , fgl + , fgl-arbitrary + , file-embed + , filepath + , generic-arbitrary + , generic-lens + , generic-monoid + , groups + , hgeometry + , hgeometry-combinatorial + , lens + , lifted-async + , linear + , megaparsec + , mmorph + , monad-control + , mtl + , optparse-applicative + , parallel + , parser-combinators + , pointed + , quickcheck-instances + , quickcheck-text + , random + , random-extras + , random-fu + , random-source + , raw-strings-qq + , reflection + , semigroupoids + , stache + , streams + , text + , text-zipper + , tomland + , transformers + , vector + , vty + , witherable + , yaml + , zlib + default-language: Haskell2010 + +executable xanthous + main-is: Main.hs + other-modules: + Data.Aeson.Generic.DerivingVia + Xanthous.AI.Gormlak + Xanthous.App + Xanthous.App.Autocommands + Xanthous.App.Common + Xanthous.App.Prompt + Xanthous.App.Time + Xanthous.Command + Xanthous.Data + Xanthous.Data.App + Xanthous.Data.Entities + Xanthous.Data.EntityChar + Xanthous.Data.EntityMap + Xanthous.Data.EntityMap.Graphics + Xanthous.Data.Levels + Xanthous.Data.NestedMap + Xanthous.Data.VectorBag + Xanthous.Entities.Character + Xanthous.Entities.Creature + Xanthous.Entities.Creature.Hippocampus + Xanthous.Entities.Draw.Util + Xanthous.Entities.Entities + Xanthous.Entities.Environment + Xanthous.Entities.Item + Xanthous.Entities.Marker + Xanthous.Entities.Raws + Xanthous.Entities.RawTypes + Xanthous.Game + Xanthous.Game.Arbitrary + Xanthous.Game.Draw + Xanthous.Game.Env + Xanthous.Game.Lenses + Xanthous.Game.Prompt + Xanthous.Game.State + Xanthous.Generators + Xanthous.Generators.CaveAutomata + Xanthous.Generators.Dungeon + Xanthous.Generators.LevelContents + Xanthous.Generators.Util + Xanthous.Generators.Village + Xanthous.Messages + Xanthous.Messages.Template + Xanthous.Monad + Xanthous.Orphans + Xanthous.Prelude + Xanthous.Random + Xanthous.Util + Xanthous.Util.Comonad + Xanthous.Util.Graph + Xanthous.Util.Graphics + Xanthous.Util.Inflection + Xanthous.Util.JSON + Xanthous.Util.Optparse + Xanthous.Util.QuickCheck + Paths_xanthous + hs-source-dirs: + src + default-extensions: BlockArguments ConstraintKinds DataKinds DeriveAnyClass DeriveGeneric DerivingStrategies DerivingVia FlexibleContexts FlexibleInstances FunctionalDependencies GADTSyntax GeneralizedNewtypeDeriving KindSignatures LambdaCase MultiWayIf NoImplicitPrelude NoStarIsType OverloadedStrings PolyKinds RankNTypes ScopedTypeVariables TupleSections TypeApplications TypeFamilies TypeOperators ViewPatterns + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 + build-depends: + JuicyPixels + , MonadRandom + , QuickCheck + , Rasterific + , aeson + , array + , async + , base + , bifunctors + , brick + , checkers + , classy-prelude + , comonad + , comonad-extras + , constraints + , containers + , criterion + , data-default + , deepseq + , directory + , fgl + , fgl-arbitrary + , file-embed + , filepath + , generic-arbitrary + , generic-lens + , generic-monoid + , groups + , hgeometry + , hgeometry-combinatorial + , lens + , lifted-async + , linear + , megaparsec + , mmorph + , monad-control + , mtl + , optparse-applicative + , parallel + , parser-combinators + , pointed + , quickcheck-instances + , quickcheck-text + , random + , random-extras + , random-fu + , random-source + , raw-strings-qq + , reflection + , semigroupoids + , stache + , streams + , text + , text-zipper + , tomland + , transformers + , vector + , vty + , witherable + , xanthous + , yaml + , zlib + default-language: Haskell2010 + +test-suite test + type: exitcode-stdio-1.0 + main-is: Spec.hs + other-modules: + Test.Prelude + Xanthous.Data.EntitiesSpec + Xanthous.Data.EntityCharSpec + Xanthous.Data.EntityMap.GraphicsSpec + Xanthous.Data.EntityMapSpec + Xanthous.Data.LevelsSpec + Xanthous.Data.NestedMapSpec + Xanthous.DataSpec + Xanthous.Entities.RawsSpec + Xanthous.GameSpec + Xanthous.Generators.UtilSpec + Xanthous.Messages.TemplateSpec + Xanthous.MessageSpec + Xanthous.OrphansSpec + Xanthous.RandomSpec + Xanthous.Util.GraphicsSpec + Xanthous.Util.GraphSpec + Xanthous.Util.InflectionSpec + Xanthous.UtilSpec + Paths_xanthous + hs-source-dirs: + test + default-extensions: BlockArguments ConstraintKinds DataKinds DeriveAnyClass DeriveGeneric DerivingStrategies DerivingVia FlexibleContexts FlexibleInstances FunctionalDependencies GADTSyntax GeneralizedNewtypeDeriving KindSignatures LambdaCase MultiWayIf NoImplicitPrelude NoStarIsType OverloadedStrings PolyKinds RankNTypes ScopedTypeVariables TupleSections TypeApplications TypeFamilies TypeOperators ViewPatterns + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O0 + build-depends: + JuicyPixels + , MonadRandom + , QuickCheck + , Rasterific + , aeson + , array + , async + , base + , bifunctors + , brick + , checkers + , classy-prelude + , comonad + , comonad-extras + , constraints + , containers + , criterion + , data-default + , deepseq + , directory + , fgl + , fgl-arbitrary + , file-embed + , filepath + , generic-arbitrary + , generic-lens + , generic-monoid + , groups + , hgeometry + , hgeometry-combinatorial + , lens + , lens-properties + , lifted-async + , linear + , megaparsec + , mmorph + , monad-control + , mtl + , optparse-applicative + , parallel + , parser-combinators + , pointed + , quickcheck-instances + , quickcheck-text + , random + , random-extras + , random-fu + , random-source + , raw-strings-qq + , reflection + , semigroupoids + , stache + , streams + , tasty + , tasty-hunit + , tasty-quickcheck + , text + , text-zipper + , tomland + , transformers + , vector + , vty + , witherable + , xanthous + , yaml + , zlib + default-language: Haskell2010 + +benchmark benchmark + type: exitcode-stdio-1.0 + main-is: Bench.hs + other-modules: + Bench.Prelude + Xanthous.Generators.UtilBench + Xanthous.RandomBench + Paths_xanthous + hs-source-dirs: + bench + default-extensions: BlockArguments ConstraintKinds DataKinds DeriveAnyClass DeriveGeneric DerivingStrategies DerivingVia FlexibleContexts FlexibleInstances FunctionalDependencies GADTSyntax GeneralizedNewtypeDeriving KindSignatures LambdaCase MultiWayIf NoImplicitPrelude NoStarIsType OverloadedStrings PolyKinds RankNTypes ScopedTypeVariables TupleSections TypeApplications TypeFamilies TypeOperators ViewPatterns + ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N + build-depends: + JuicyPixels + , MonadRandom + , QuickCheck + , Rasterific + , aeson + , array + , async + , base + , bifunctors + , brick + , checkers + , classy-prelude + , comonad + , comonad-extras + , constraints + , containers + , criterion + , data-default + , deepseq + , directory + , fgl + , fgl-arbitrary + , file-embed + , filepath + , generic-arbitrary + , generic-lens + , generic-monoid + , groups + , hgeometry + , hgeometry-combinatorial + , lens + , lifted-async + , linear + , megaparsec + , mmorph + , monad-control + , mtl + , optparse-applicative + , parallel + , parser-combinators + , pointed + , quickcheck-instances + , quickcheck-text + , random + , random-extras + , random-fu + , random-source + , raw-strings-qq + , reflection + , semigroupoids + , stache + , streams + , text + , text-zipper + , tomland + , transformers + , vector + , vty + , witherable + , xanthous + , yaml + , zlib + default-language: Haskell2010 diff --git a/users/isomer/OWNERS b/users/isomer/OWNERS new file mode 100644 index 000000000000..6997cd391d9c --- /dev/null +++ b/users/isomer/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - isomer diff --git a/users/isomer/keys.nix b/users/isomer/keys.nix new file mode 100644 index 000000000000..8c29e27895db --- /dev/null +++ b/users/isomer/keys.nix @@ -0,0 +1,7 @@ +# SSH public keys +{ ... }: + +rec { + perry = "cert-authority,principals=perry ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCXWKN+FXlQAQ36R4+FHJ9f15Tz/48xLK1f85Yf9eBrvJJVMn6ge3Cy8AJ2nymBtVvCC86q616yl4Mn+CrKBH/vHr4jY9nxJ7HHgKI8ERr+7KpLIAiiaeIBljWwCy918lK3MijRCuj0P0d3v8CEFJjyCsiyglDVcNhsW87VqqZE6lUg4Alw1CGAmNjamxdoIZxjZAM9vJtZrlYnUiu+X7vTl5ttTaZkLCCfu+/bJAKFBWPG5BPaNjjfGVuTKqEc4plkI3JeZBu3Or3LzlYxcvp71i+eKGJ8F/nMBlo25iQsQpi8ZS7JYAhj3mYVrstw7j+nkgbordvDOK5NbDMi6GzX"; + all = [ perry ]; +} diff --git a/users/lukegb/OWNERS b/users/lukegb/OWNERS new file mode 100644 index 000000000000..676fbf185649 --- /dev/null +++ b/users/lukegb/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - lukegb diff --git a/users/lukegb/hgext/gerrithook.py b/users/lukegb/hgext/gerrithook.py new file mode 100644 index 000000000000..ef02126ba0f8 --- /dev/null +++ b/users/lukegb/hgext/gerrithook.py @@ -0,0 +1,63 @@ +"""Bizarre hacks to make Gerrit better.""" + +import collections +import re +import random +import mercurial + +_ = mercurial.i18n._ + +cmdtable = {} +command = mercurial.registrar.command(cmdtable) + +testedwith = '5.3.1' + +_changeid_regex = re.compile(b'^Change-Id: (I.*)$', re.M) + +def random_hash(): + """Returns a random SHA1-like hex string.""" + return b"%040x" % random.getrandbits(160) + +def reposetup(ui, repo): + + class GerritRepo(repo.__class__): + def commitctx(self, ctx, *args, **kwargs): + match = _changeid_regex.search(ctx._text) + if not match: + ctx._text = ctx._text.rstrip(b'\n') + ctx._text += b'\n\nChange-Id: I' + random_hash() + return super().commitctx(ctx, *args, **kwargs) + + repo.__class__ = GerritRepo + + +@command(b'gerrit-obsolete', [], _(b'[options]')) +def gerritobsolete(ui, repo, **opts): + """Mark draft commits as obsolete by public commits based on Gerrit Change-Id tag.""" + if repo.obsstore.readonly: + ui.error(b'obsstore is readonly') + return + changesets = collections.defaultdict(set) + drafts = set() + for draft in repo.set('draft() - obsolete()'): + match = _changeid_regex.search(draft.description()) + if not match: + continue + changesets[match.groups()[0]].add(draft) + drafts.add(draft) + if not drafts: + return + publicparent = next(repo.set( + b'ancestor((public() and bookmark("canon")), %s)' % ( + b', '.join(x.hex() for x in drafts)))) + megare = b're:(?ms)^Change-Id: (%s)$' % (b'|'.join(changesets.keys()),) + markers = [] + for public in repo.set('(%s..(public() and canon)) and desc(%s)', publicparent, megare): + match = _changeid_regex.search(public.description()) + if not match: + continue + drafts = changesets[match.groups()[0]] + if not drafts: + continue + markers.append((tuple(drafts), (public,))) + mercurial.obsolete.createmarkers(repo, markers, operation=b'gerrit-obsolete') diff --git a/users/lukegb/keys.nix b/users/lukegb/keys.nix new file mode 100644 index 000000000000..e54009122f92 --- /dev/null +++ b/users/lukegb/keys.nix @@ -0,0 +1,10 @@ +# My SSH public keys +{ ... }: + +rec { + termius = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINytpHct7PLdLNp6MoaOPP7ccBPUQKymVNMqix//Wt1f"; + porcorosso-wsl = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMhQ3yjf59eQjOfVXzXz5u8BS5c6hdL1yY8GqccaIjx3"; + porcorosso-nixos = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILid+1rq3k3k7Kbaw8X63vrPrQdanH55TucQwp3ZWfo+"; + clouvider-lon01-nix = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINQU7Y+Ha5m0ebwUjA55xXT/xbWZAWx1fVNFufle+vQj"; + all = [ termius porcorosso-wsl porcorosso-nixos clouvider-lon01-nix ]; +} diff --git a/users/qyliss/OWNERS b/users/qyliss/OWNERS new file mode 100644 index 000000000000..d54ea3622d21 --- /dev/null +++ b/users/qyliss/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - qyliss diff --git a/users/qyliss/keys.nix b/users/qyliss/keys.nix new file mode 100644 index 000000000000..d0837a7c6744 --- /dev/null +++ b/users/qyliss/keys.nix @@ -0,0 +1,8 @@ +# Public key from https://github.com/alyssais.keys +{ ... }: + +{ + all = [ + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDO11Pr7jKaRZ2It1yB312SKFN8mCV7aVYdry16LNwtnA6EDgFxyshG4Zmhl9asxQ9wa1lT3tdKB6ArA+VKxXMZB0zm15jYSLKpHQxMT7T3SqtTluJQpJD9zRtWeHbW/e1mtgn3tPYTHERB4HVGKIeGk97eOR2YOdXPHOIWhOXpogDtUlyt1bmWl0gyRHbWhViLeReHYhsu0KbZlo+ntN9aN7lPVkDfa7gUARv6IeGE5hAYHPRWmQ3VJCDaQnzsTtesLPFiNmV6Pq7qtWbHVNOG9XQLXJhD/305+yDZ2y/+KuBEQCroiWF8fPY/8gutfkZ0ZLjdGbXl38j5v+yRjreh+wjcN5MYWCWM18hMdutpoMd9D7PXaZz90V2vS+mRC81t3zXKrAy3Ke+LQBmlWSWxmKWdDoOTGOHjyPuCC/q+In7Q8hetB9/b9WUXTwEaaE3lUsa7y5JHAekNmdSoN3WD10nGYVUMvRRPGAlyqZTQdvxhn+6Pyu2piwIv/TMmC1CwiHr+fLbHxXQF745sOBQNmrdfiOzqDsKleybNB6i0AdDm5UZcYRcMLuxmryxN8O8qNUdMjMGoCeFcGwAIieqM+0xkPiByKr8ky2yV2lwOaZ4jrp/3j5GsGoQlvNKIPdCA/GQFad6vuqvhlbWcbdfiNpawrppLcJBsGB2NVjGbNQ==" + ]; +} diff --git a/users/riking/OWNERS b/users/riking/OWNERS new file mode 100644 index 000000000000..a39f4cd9f0ae --- /dev/null +++ b/users/riking/OWNERS @@ -0,0 +1,3 @@ +inherit: false +owners: + - riking diff --git a/users/riking/adventofcode-2020/.gitignore b/users/riking/adventofcode-2020/.gitignore new file mode 100644 index 000000000000..076ff412156a --- /dev/null +++ b/users/riking/adventofcode-2020/.gitignore @@ -0,0 +1,2 @@ +*/target +*/input.txt diff --git a/users/riking/adventofcode-2020/day01/Cargo.lock b/users/riking/adventofcode-2020/day01/Cargo.lock new file mode 100644 index 000000000000..a1a18948a7ea --- /dev/null +++ b/users/riking/adventofcode-2020/day01/Cargo.lock @@ -0,0 +1,14 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "anyhow" +version = "1.0.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7" + +[[package]] +name = "day01" +version = "0.1.0" +dependencies = [ + "anyhow", +] diff --git a/users/riking/adventofcode-2020/day01/Cargo.toml b/users/riking/adventofcode-2020/day01/Cargo.toml new file mode 100644 index 000000000000..d90ab548bb7b --- /dev/null +++ b/users/riking/adventofcode-2020/day01/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "day01" +version = "0.1.0" +authors = ["Kane York <kanepyork@gmail.com>"] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0.34" diff --git a/users/riking/adventofcode-2020/day01/default.nix b/users/riking/adventofcode-2020/day01/default.nix new file mode 100644 index 000000000000..0648a05af683 --- /dev/null +++ b/users/riking/adventofcode-2020/day01/default.nix @@ -0,0 +1,10 @@ +{ depot, ... }: + +with depot.third_party; + +naersk.buildPackage { + src = ./.; + + buildInputs = []; + doCheck = true; +} diff --git a/users/riking/adventofcode-2020/day01/src/main.rs b/users/riking/adventofcode-2020/day01/src/main.rs new file mode 100644 index 000000000000..3e6b339d7c2b --- /dev/null +++ b/users/riking/adventofcode-2020/day01/src/main.rs @@ -0,0 +1,85 @@ +use anyhow::anyhow; +use std::fs::File; +use std::io::prelude::*; +use std::io::BufReader; + +const PART_2: bool = true; + +fn day01(is_part2: bool, numbers: &Vec<i64>) -> Result<String, anyhow::Error> { + //println!("{:?}", numbers); + + for n1 in numbers.iter() { + for n2 in numbers.iter() { + if is_part2 { + for n3 in numbers.iter() { + if n1 + n2 + n3 == 2020 { + return Ok((n1 * n2 * n3).to_string()); + } + } + } else { + if n1 + n2 == 2020 { + return Ok((n1 * n2).to_string()); + } + } + } + } + + Err(anyhow!("no solution found")) +} + +fn parse(filename: &str) -> Result<Vec<i64>, anyhow::Error> { + let f = File::open(filename)?; + let mut reader = BufReader::new(f); + + let mut values = Vec::<i64>::new(); + + let mut line = String::new(); + loop { + line.clear(); + reader.read_line(&mut line)?; + let trimmed_line = line.trim(); + if trimmed_line.is_empty() { + break; + } + + values.push(trimmed_line.parse()?); + } + Ok(values) +} + +fn main() -> anyhow::Result<()> { + let args: Vec<String> = std::env::args().collect(); + + //println!("{:?}", args); + if args.len() != 2 { + return Err(anyhow!("usage: day01 input_file")); + } + let filename = args.into_iter().skip(1).next().expect("args len == 1"); + + let numbers = parse(&filename)?; + + println!("{}", day01(PART_2, &numbers)?); + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::day01; + + #[test] + fn test_part1() { + let vec = vec![1721, 979, 366, 299, 675, 1456]; + let result = day01(false, &vec).unwrap(); + + assert_eq!(result, 514579.to_string()); + } + + #[test] + fn test_part2() { + let vec = vec![1721, 979, 366, 299, 675, 1456]; + let result = day01(true, &vec).unwrap(); + + assert_eq!(result, 241861950.to_string()); + } +} diff --git a/users/riking/dotfiles/.mybashrc b/users/riking/dotfiles/.mybashrc new file mode 100644 index 000000000000..c5ebc34a1f4f --- /dev/null +++ b/users/riking/dotfiles/.mybashrc @@ -0,0 +1,53 @@ + +# BEGIN: __USER_FUNCTIONS__ +function gh-clone() { + if [[ "x$2" == "x" ]]; then + IFS='/' read -ra PARTS <<< "$1" + user="${PARTS[0]}" + repo="${PARTS[1]}" + else + user="$1" + repo="$2" + fi + if [[ -d ~/go/src/github.com/"$user"/"$repo" ]]; then + cd ~/go/src/github.com/"${user}"/"${repo}" + return 0 + fi + mkdir -p ~/go/src/github.com/"${user}" + cd ~/go/src/github.com/"${user}" + git clone git@github.com:"${user}"/"${repo}".git + cd ~/go/src/github.com/"${user}"/"${repo}" +} + +function download() { + cd "${HOME}/Downloads" + wget "$@" +} + +# todo: only one password pls +function prodaccess() { + (ssh-add -L | grep -q 'ZgEu6S3SLatYN') || ssh-add "$HOME"/.ssh/id_ed25519 + (ssh-add -L | grep -q 'Gfh2S3kUwZ8A6') || ssh-add "$HOME"/.ssh/id_rsa.discourse + echo "signing test" | gpg --clearsign > /dev/null +} + +function reset-audio() { + pulseaudio -k && sudo alsa force-reload +} + +function tvl-push() { + git push origin HEAD:refs/for/canon +} + +# END: __USER_FUNCTIONS__ + +# BEGIN: __USER_ENV__ +GOPATH=$HOME/go +CDPATH=$HOME/go/src +export GPG_TTY="$(tty)" + +export PATH="/usr/local/go/bin:$HOME/go/bin:$HOME/.rbenv/bin:$PATH" + +eval "$(rbenv init -)" +# END: __USER_ENV__ + diff --git a/users/riking/dotfiles/fish/conf.d/nix-env.fish b/users/riking/dotfiles/fish/conf.d/nix-env.fish new file mode 100644 index 000000000000..6f79f9752855 --- /dev/null +++ b/users/riking/dotfiles/fish/conf.d/nix-env.fish @@ -0,0 +1,141 @@ +# SPDX-License-Identifier: Unlicense +# https://raw.githubusercontent.com/lilyball/nix-env.fish/master/conf.d/nix-env.fish + +# Setup Nix + +# We need to distinguish between single-user and multi-user installs. +# This is difficult because there's no official way to do this. +# We could look for the presence of /nix/var/nix/daemon-socket/socket but this will fail if the +# daemon hasn't started yet. /nix/var/nix/daemon-socket will exist if the daemon has ever run, but +# I don't think there's any protection against accidentally running `nix-daemon` as a user. +# We also can't just look for /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh because +# older single-user installs used the default profile instead of a per-user profile. +# We can still check for it first, because all multi-user installs should have it, and so if it's +# not present that's a pretty big indicator that this is a single-user install. If it does exist, +# we still need to verify the install type. To that end we'll look for a root owner and sticky bit +# on /nix/store. Multi-user installs set both, single-user installs don't. It's certainly possible +# someone could do a single-user install as root and then manually set the sticky bit but that +# would be extremely unusual. + +set -l nix_profile_path /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +set -l single_user_profile_path ~/.nix-profile/etc/profile.d/nix.sh +if test -e $nix_profile_path + # The path exists. Double-check that this is a multi-user install. + # We can't just check for ~/.nix-profile/… because this may be a single-user install running as + # the wrong user. + + # stat is not portable. Splitting the output of ls -nd is reliable on most platforms. + set -l owner (string split -n ' ' (ls -nd /nix/store 2>/dev/null))[3] + if not test -k /nix/store -a $owner -eq 0 + # /nix/store is either not owned by root or not sticky. Assume single-user. + set nix_profile_path $single_user_profile_path + end +else + # The path doesn't exist. Assume single-user + set nix_profile_path $single_user_profile_path +end + +if test -e $nix_profile_path + # Source the nix setup script + # We're going to run the regular Nix profile under bash and then print out a few variables + for line in (env -u BASH_ENV bash -c '. "$0"; for name in PATH "${!NIX_@}"; do printf "%s=%s\0" "$name" "${!name}"; done' $nix_profile_path | string split0) + set -xg (string split -m 1 = $line) + end + + # Insert Nix's fish share directories into fish's special variables. + # nixpkgs-installed fish tries to set these up already if NIX_PROFILES is defined, which won't + # be the case when sourcing $__fish_data_dir/share/config.fish normally, but might be for a + # recursive invocation. To guard against that, we'll only insert paths that don't already exit. + # Furthermore, for the vendor_conf.d sourcing, we'll use the pre-existing presence of a path in + # $fish_function_path to determine whether we want to source the relevant vendor_conf.d folder. + + # To start, let's locally define NIX_PROFILES if it doesn't already exist. + set -al NIX_PROFILES + if test (count $NIX_PROFILES) -eq 0 + set -a NIX_PROFILES $HOME/.nix-profile + end + # Replicate the logic from nixpkgs version of $__fish_data_dir/__fish_build_paths.fish. + set -l __nix_profile_paths (string split ' ' -- $NIX_PROFILES)[-1..1] + set -l __extra_completionsdir \ + $__nix_profile_paths/etc/fish/completions \ + $__nix_profile_paths/share/fish/vendor_completions.d + set -l __extra_functionsdir \ + $__nix_profile_paths/etc/fish/functions \ + $__nix_profile_paths/share/fish/vendor_functions.d + set -l __extra_confdir \ + $__nix_profile_paths/etc/fish/conf.d \ + $__nix_profile_paths/share/fish/vendor_conf.d \ + + ### Configure fish_function_path ### + # Remove any of our extra paths that may already exist. + # Record the equivalent __extra_confdir path for any function path that exists. + set -l existing_conf_paths + for path in $__extra_functionsdir + if set -l idx (contains --index -- $path $fish_function_path) + set -e fish_function_path[$idx] + set -a existing_conf_paths $__extra_confdir[(contains --index -- $path $__extra_functionsdir)] + end + end + # Insert the paths before $__fish_data_dir. + if set -l idx (contains --index -- $__fish_data_dir/functions $fish_function_path) + # Fish has no way to simply insert into the middle of an array. + set -l new_path $fish_function_path[1..$idx] + set -e new_path[$idx] + set -a new_path $__extra_functionsdir + set fish_function_path $new_path $fish_function_path[$idx..-1] + else + set -a fish_function_path $__extra_functionsdir + end + + ### Configure fish_complete_path ### + # Remove any of our extra paths that may already exist. + for path in $__extra_completionsdir + if set -l idx (contains --index -- $path $fish_complete_path) + set -e fish_complete_path[$idx] + end + end + # Insert the paths before $__fish_data_dir. + if set -l idx (contains --index -- $__fish_data_dir/completions $fish_complete_path) + set -l new_path $fish_complete_path[1..$idx] + set -e new_path[$idx] + set -a new_path $__extra_completionsdir + set fish_complete_path $new_path $fish_complete_path[$idx..-1] + else + set -a fish_complete_path $__extra_completionsdir + end + + ### Source conf directories ### + # The built-in directories were already sourced during shell initialization. + # Any __extra_confdir that came from $__fish_data_dir/__fish_build_paths.fish was also sourced. + # As explained above, we're using the presence of pre-existing paths in $fish_function_path as a + # signal that the corresponding conf dir has also already been sourced. + # In order to simulate this, we'll run through the same algorithm as found in + # $__fish_data_dir/config.fish except we'll avoid sourcing the file if it comes from an + # already-sourced location. + # Caveats: + # * Files will be sourced in a different order than we'd ideally do (because we're coming in + # after the fact to source them). + # * If there are existing extra conf paths, files in them may have been sourced that should have + # been suppressed by paths we're inserting in front. + # * Similarly any files in $__fish_data_dir/vendor_conf.d that should have been suppressed won't + # have been. + set -l sourcelist + for file in $__fish_config_dir/conf.d/*.fish $__fish_sysconf_dir/conf.d/*.fish + # We know these paths were sourced already. Just record them. + set -l basename (string replace -r '^.*/' '' -- $file) + contains -- $basename $sourcelist + or set -a sourcelist $basename + end + for root in $__extra_confdir + for file in $root/*.fish + set -l basename (string replace -r '^.*/' '' -- $file) + contains -- $basename $sourcelist + and continue + set -a sourcelist $basename + contains -- $root $existing_conf_paths + and continue # this is a pre-existing path, it will have been sourced already + [ -f $file -a -r $file ] + and source $file + end + end +end diff --git a/users/riking/dotfiles/fish/config.fish b/users/riking/dotfiles/fish/config.fish new file mode 100644 index 000000000000..c2454762bddf --- /dev/null +++ b/users/riking/dotfiles/fish/config.fish @@ -0,0 +1,8 @@ +set -gx GOPATH "$HOME/go" +set -gx GPG_TTY (tty) +set -gx DEPOT_ROOT "$GOPATH/src/code.tvl.fyi" + +set -gx PATH '/usr/local/go/bin' "$HOME/.cargo/bin" "$HOME/.rbenv/bin" $PATH +status --is-interactive; and rbenv init - | source +source ~/.opsrc.fish # work +set -gx PATH "$HOME/go/bin" $PATH diff --git a/users/riking/dotfiles/fish/fish_variables b/users/riking/dotfiles/fish/fish_variables new file mode 100644 index 000000000000..fa8bff919f40 --- /dev/null +++ b/users/riking/dotfiles/fish/fish_variables @@ -0,0 +1,32 @@ +# This file contains fish universal variable definitions. +# VERSION: 3.0 +SETUVAR __fish_initialized:3100 +SETUVAR fish_color_autosuggestion:555\x1ebrblack +SETUVAR fish_color_cancel:\x2dr +SETUVAR fish_color_command:005fd7 +SETUVAR fish_color_comment:990000 +SETUVAR fish_color_cwd:green +SETUVAR fish_color_cwd_root:red +SETUVAR fish_color_end:009900 +SETUVAR fish_color_error:ff0000 +SETUVAR fish_color_escape:00a6b2 +SETUVAR fish_color_history_current:\x2d\x2dbold +SETUVAR fish_color_host:normal +SETUVAR fish_color_host_remote:yellow +SETUVAR fish_color_match:\x2d\x2dbackground\x3dbrblue +SETUVAR fish_color_normal:normal +SETUVAR fish_color_operator:00a6b2 +SETUVAR fish_color_param:00afff +SETUVAR fish_color_quote:999900 +SETUVAR fish_color_redirection:00afff +SETUVAR fish_color_search_match:bryellow\x1e\x2d\x2dbackground\x3dbrblack +SETUVAR fish_color_selection:white\x1e\x2d\x2dbold\x1e\x2d\x2dbackground\x3dbrblack +SETUVAR fish_color_status:red +SETUVAR fish_color_user:brgreen +SETUVAR fish_color_valid_path:\x2d\x2dunderline +SETUVAR fish_greeting:Welcome\x20to\x20fish\x2c\x20the\x20friendly\x20interactive\x20shell\x0aType\x20\x60help\x60\x20for\x20instructions\x20on\x20how\x20to\x20use\x20fish +SETUVAR fish_key_bindings:fish_default_key_bindings +SETUVAR fish_pager_color_completion:\x1d +SETUVAR fish_pager_color_description:B3A06D\x1eyellow +SETUVAR fish_pager_color_prefix:white\x1e\x2d\x2dbold\x1e\x2d\x2dunderline +SETUVAR fish_pager_color_progress:brwhite\x1e\x2d\x2dbackground\x3dcyan diff --git a/users/riking/dotfiles/fish/functions/ddate.fish b/users/riking/dotfiles/fish/functions/ddate.fish new file mode 100644 index 000000000000..8152d31680e7 --- /dev/null +++ b/users/riking/dotfiles/fish/functions/ddate.fish @@ -0,0 +1,3 @@ +function ddate --description 'current date in Discourse format' + TZ=UTC date '+[date=%Y-%m-%d time=%H:%M:%S timezone=\"%Z\"]' +end diff --git a/users/riking/dotfiles/fish/functions/gh-clone.fish b/users/riking/dotfiles/fish/functions/gh-clone.fish new file mode 100644 index 000000000000..109ec353f6db --- /dev/null +++ b/users/riking/dotfiles/fish/functions/gh-clone.fish @@ -0,0 +1,18 @@ +function gh-clone --description 'Clone and CD to a github repository' + if test (count $argv) -eq 1 + set user (string split "/" -- $argv[1])[1] + set repo (string split "/" -- $argv[1])[2] + else + set user $argv[1] + set repo $argv[2] + end + + if test -d "$HOME/go/src/github.com/$user/$repo" + cd "$HOME/go/src/github.com/$user/$repo" + return 0 + end + mkdir -p "$HOME/go/src/github.com/$user" + cd "$HOME/go/src/github.com/$user" + git clone "git@github.com:$user/$repo.git" + cd "$HOME/go/src/github.com/$user/$repo" +end diff --git a/users/riking/dotfiles/fish/functions/prodaccess.fish b/users/riking/dotfiles/fish/functions/prodaccess.fish new file mode 100644 index 000000000000..876c14c5e31e --- /dev/null +++ b/users/riking/dotfiles/fish/functions/prodaccess.fish @@ -0,0 +1,6 @@ +function prodaccess + ssh-add "$HOME/.ssh/id_ecdsa_sk" + begin; ssh-add -L | grep -q 'ZgEu6S3SLatYN'; end || ssh-add "$HOME"/.ssh/id_ed25519 + begin; ssh-add -L | grep -q 'Gfh2S3kUwZ8A6'; end || ssh-add "$HOME"/.ssh/id_rsa.discourse + echo "signing test" | gpg --clearsign > /dev/null +end diff --git a/users/riking/dotfiles/fish/functions/reset-audio.fish b/users/riking/dotfiles/fish/functions/reset-audio.fish new file mode 100644 index 000000000000..eb48578a52bc --- /dev/null +++ b/users/riking/dotfiles/fish/functions/reset-audio.fish @@ -0,0 +1,4 @@ +function reset-audio --description "Resets pulse and alsa" + pulseaudio -k + sudo alsa force-reload +end diff --git a/users/riking/dotfiles/fish/functions/tvl-push.fish b/users/riking/dotfiles/fish/functions/tvl-push.fish new file mode 100644 index 000000000000..f04ac830c005 --- /dev/null +++ b/users/riking/dotfiles/fish/functions/tvl-push.fish @@ -0,0 +1,3 @@ +function tvl-push + git push origin HEAD:refs/for/canon +end diff --git a/users/riking/dotfiles/regolith/Xresources b/users/riking/dotfiles/regolith/Xresources new file mode 100644 index 000000000000..f47b93511af0 --- /dev/null +++ b/users/riking/dotfiles/regolith/Xresources @@ -0,0 +1,5 @@ +#include "/etc/regolith/styles/ubuntu/root" + +i3-wm.program.lock: xset s activate +i3-wm.program.1: /bin/sh $HOME/.config/regolith/initrc + diff --git a/users/riking/dotfiles/regolith/flags/first-time-setup-r1-4-1 b/users/riking/dotfiles/regolith/flags/first-time-setup-r1-4-1 new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/riking/dotfiles/regolith/flags/first-time-setup-r1-4-1 diff --git a/users/riking/dotfiles/regolith/flags/show-shortcuts b/users/riking/dotfiles/regolith/flags/show-shortcuts new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/riking/dotfiles/regolith/flags/show-shortcuts diff --git a/users/riking/dotfiles/regolith/flags/term-profile b/users/riking/dotfiles/regolith/flags/term-profile new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/riking/dotfiles/regolith/flags/term-profile diff --git a/users/riking/dotfiles/regolith/flags/ui-fingerprint b/users/riking/dotfiles/regolith/flags/ui-fingerprint new file mode 100644 index 000000000000..b35aedd2dc0f --- /dev/null +++ b/users/riking/dotfiles/regolith/flags/ui-fingerprint @@ -0,0 +1 @@ +ec33ee15ff705ac4b167ba6b7f6df3c2 diff --git a/users/riking/dotfiles/regolith/initrc b/users/riking/dotfiles/regolith/initrc new file mode 100755 index 000000000000..9b14613cd4eb --- /dev/null +++ b/users/riking/dotfiles/regolith/initrc @@ -0,0 +1,3 @@ + +xset s 900 5 +( xss-lock -n /usr/lib/xsecurelock/dimmer -l -- sh -c "XSECURELOCK_PASSWORD_PROMPT=time_hex XSECURELOCK_SHOW_DATETIME=1 XSECURELOCK_SAVER=saver_mpv XSECURELOCK_IMAGE_DURATION_SECONDS=10 XSECURELOCK_LIST_VIDEOS_COMMAND='find ~/Videos/Screensaver -type f' xsecurelock" )& diff --git a/users/riking/dotfiles/tmux.conf b/users/riking/dotfiles/tmux.conf new file mode 100644 index 000000000000..1f253cb27f0c --- /dev/null +++ b/users/riking/dotfiles/tmux.conf @@ -0,0 +1,6 @@ + +set -g mouse on +set-option -g prefix C-a +bind-key C-a send-prefix +bind | split-window -h +bind - split-window -v diff --git a/users/riking/keys.nix b/users/riking/keys.nix new file mode 100644 index 000000000000..6dd2ff18a30f --- /dev/null +++ b/users/riking/keys.nix @@ -0,0 +1,20 @@ +# SSH public keys +{ ... }: + +rec { + sk-ecljg09 = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBBwJ7dJJUkvIK+bDsVsCsCZSlbs90aOLsHN7XesC8/AmLA5rIRLO8I5ADoOjsWAXl/WAgxqOMmB4LxZjoXWa1a0AAAAEc3NoOg== riking@sk-ECLJG09"; + sk-portable1 = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBCfA8/0nKk4jXclWHjRZIuicPeyIo9oDwahpnWjEATr7YaFDAo632KTSgqlW0lpx8lX9alLsJRhFV2XaSurYw/EAAAAEc3NoOg== riking@sk-portable1"; + sk-portable2 = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBEX3DXreQR93SR68QZHTdaVd5RjlRM8C0jcZ4kI4OZwqk7xuk68w3g22q2OM7O+chj+n1N3u0hLxi82QfRnwyasAAAAEc3NoOg== riking@sk-portable2"; + sk-desktop = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBB+JvN8nAxD+yo49Ohf/UDq7Z049yvkURJIA1XNbvKaAkvfWnCN5m9vTC1FyGxTyCwy4QpD1pFP5fIn0X/kvvfgAAAAEc3NoOg== riking@sk-kane-DAN-A4"; + + u2f = [sk-ecljg09 sk-portable1 sk-portable2 sk-desktop]; + + ed1 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAjWIfFH2bAWMZG+HudV1MVHWUl83M/ZgEu6S3SLatYN riking@kane-DAN-A4"; + ed2 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICBblB4C9IgAijv+qN6Zs8TM2Sz7phQvVmRrcDn4VYNo riking@ECLJG09"; + + passworded = [ed1 ed2]; + + unprotected = []; + + all = u2f ++ passworded ++ unprotected; +} diff --git a/users/sterni/OWNERS b/users/sterni/OWNERS new file mode 100644 index 000000000000..cace4d0f3759 --- /dev/null +++ b/users/sterni/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - sterni diff --git a/users/sterni/clhs-lookup/README.md b/users/sterni/clhs-lookup/README.md new file mode 100644 index 000000000000..1f42ff43a210 --- /dev/null +++ b/users/sterni/clhs-lookup/README.md @@ -0,0 +1,13 @@ +# clhs-lookup + +Simple cli to lookup symbols' documentation in a local copy of the +Common Lisp HyperSpec. + +## usage + +``` +clhs-lookup [--print] symbol [symbol [...]] + + --print Print documentation paths to stdout instead of + opening them with $BROWSER (defaults to xdg-open). +``` diff --git a/users/sterni/clhs-lookup/clhs-lookup.lisp b/users/sterni/clhs-lookup/clhs-lookup.lisp new file mode 100644 index 000000000000..0e61dd901f93 --- /dev/null +++ b/users/sterni/clhs-lookup/clhs-lookup.lisp @@ -0,0 +1,46 @@ +(in-package :clhs-lookup) +(declaim (optimize (safety 3))) + +(defun find-symbols-paths (syms clhs) + "Find pathnames to HyperSpec files describing the listed + symbol names (as strings). Paths are returned in the order + of the symbols given with missing entries removed." + (check-type syms list) + (check-type clhs pathname) + (let* ((data-dir (merge-pathnames "HyperSpec/Data/" clhs)) + (data (merge-pathnames "Map_Sym.txt" data-dir)) + (found (make-hash-table :test #'equal)) + (syms (mapcar #'string-upcase syms))) + (with-open-file (s data :direction :input) + (loop + with missing = syms + for symbol-line = (read-line s nil :eof) + for path-line = (read-line s nil :eof) + until (or (eq symbol-line :eof) + (eq path-line :eof) + (null missing)) + for pos = (position symbol-line missing :test #'equal) + when pos + do (progn + (delete symbol-line missing) + (setf (gethash symbol-line found) path-line))) + ; TODO(sterni): get rid of Data/../ in path + (mapcar + (lambda (x) (merge-pathnames x data-dir)) + (remove nil + (mapcar (lambda (x) (gethash x found)) syms)))))) + +(defun main () + (let* ((browser (or (uiop:getenvp "BROWSER") "xdg-open")) + (args (uiop:command-line-arguments)) + (prin (member "--print" args :test #'equal)) + (syms (remove-if (lambda (x) (eq (char x 0) #\-)) args)) + (paths (find-symbols-paths syms *clhs-path*))) + (if (null paths) + (uiop:quit 1) + (dolist (p paths) + (if prin + (format t "~A~%" p) + (uiop:launch-program + (format nil "~A ~A" browser p) + :force-shell t)))))) diff --git a/users/sterni/clhs-lookup/default.nix b/users/sterni/clhs-lookup/default.nix new file mode 100644 index 000000000000..951b94d72f19 --- /dev/null +++ b/users/sterni/clhs-lookup/default.nix @@ -0,0 +1,36 @@ +{ pkgs, depot, ... }: + +let + inherit (pkgs) fetchzip writeText; + inherit (depot.nix) buildLisp; + inherit (builtins) replaceStrings; + + clhsVersion = "7-0"; + + clhs = fetchzip { + name = "HyperSpec-${replaceStrings [ "-" ] [ "." ] clhsVersion}"; + url = "ftp://ftp.lispworks.com/pub/software_tools/reference/HyperSpec-${clhsVersion}.tar.gz"; + sha256 = "1zsi35245m5sfb862ibzy0pzlph48wvlggnqanymhgqkpa1v20ak"; + stripRoot = false; + }; + + clhs-path = writeText "clhs-path.lisp" '' + (in-package :clhs-lookup.clhs-path) + (defparameter *clhs-path* (pathname "${clhs}/")) + ''; + + clhs-lookup = buildLisp.program { + name = "clhs-lookup"; + + deps = [ + (buildLisp.bundled "uiop") + ]; + + srcs = [ + ./packages.lisp + clhs-path + ./clhs-lookup.lisp + ]; + }; +in + clhs-lookup diff --git a/users/sterni/clhs-lookup/packages.lisp b/users/sterni/clhs-lookup/packages.lisp new file mode 100644 index 000000000000..d059b96ce9f0 --- /dev/null +++ b/users/sterni/clhs-lookup/packages.lisp @@ -0,0 +1,10 @@ +(defpackage :clhs-lookup.clhs-path + (:use :cl) + (:export :*clhs-path*)) + +(defpackage clhs-lookup + (:use :cl :uiop) + (:import-from :clhs-lookup.clhs-path :*clhs-path*) + (:export :main + :find-symbols-paths)) + diff --git a/users/sterni/htmlman/README.md b/users/sterni/htmlman/README.md new file mode 100644 index 000000000000..258233d4c4d2 --- /dev/null +++ b/users/sterni/htmlman/README.md @@ -0,0 +1,36 @@ +# htmlman + +static site generator for man pages intended for +rendering man page documentation viewable using +a web browser. + +## usage + +If you have a nix expression, `doc.nix`, like this: + +```nix +{ depot, ... }: + +depot.users.sterni.htmlman { + title = "foo project"; + pages = [ + { + name = "foo"; + section = 1; + } + { + name = "foo"; + section = 3; + path = ../devman/foo.3; + } + ]; + manDir = ../man; +} +``` + +You can run the following to directly deploy the resulting +documentation output to a specific target directory: + +```sh +nix-build -A deploy doc.nix && ./result target_directory +``` diff --git a/users/sterni/htmlman/default.nix b/users/sterni/htmlman/default.nix new file mode 100644 index 000000000000..b88bc264103b --- /dev/null +++ b/users/sterni/htmlman/default.nix @@ -0,0 +1,234 @@ +{ depot, lib, pkgs, ... }: + +let + inherit (depot.nix) + getBins + runExecline + yants + ; + + inherit (depot.tools) + cheddar + ; + + inherit (pkgs) + mandoc + coreutils + fetchurl + writers + ; + + bins = getBins cheddar [ "cheddar" ] + // getBins mandoc [ "mandoc" ] + // getBins coreutils [ "cat" "mv" "mkdir" ] + ; + + normalizeDrv = fetchurl { + url = "https://necolas.github.io/normalize.css/8.0.1/normalize.css"; + sha256 = "04jmvybwh2ks4dlnfa70sb3a3z3ig4cv0ya9rizjvm140xq1h22q"; + }; + + execlineStdoutInto = target: line: [ + "redirfd" "-w" "1" target + ] ++ line; + + # I will not write a pure nix markdown renderer + # I will not write a pure nix markdown renderer + # I will not write a pure nix markdown renderer + # I will not write a pure nix markdown renderer + # I will not write a pure nix markdown renderer + markdown = md: + let + html = runExecline.local "rendered-markdown" { + stdin = md; + } ([ + "importas" "-iu" "out" "out" + ] ++ execlineStdoutInto "$out" [ + bins.cheddar "--about-filter" "description.md" + ]); + in builtins.readFile html; + + indexTemplate = { title, description, pages ? [] }: '' + <!doctype html> + <html> + <head> + <meta charset="utf-8"> + <title>${title}</title> + <link rel="stylesheet" type="text/css" href="style.css"/> + </head> + <body> + <div class="index-text"> + <h1>${title}</h1> + ${markdown description} + <h2>man pages</h2> + <ul> + ${lib.concatMapStrings ({ name, section, ... }: '' + <li><a href="${name}.${toString section}.html">${name}(${toString section})</a></li> + '') pages} + </ul> + </div> + </body> + </html> + ''; + + defaultStyle = import ./defaultStyle.nix { }; + + # This deploy script automatically copies the build result into + # a TARGET directory and marks it as writeable optionally. + # It is exposed as the deploy attribute of the result of + # htmlman, so an htmlman expression can be used like this: + # nix-build -A deploy htmlman.nix && ./result target_dir + deployScript = title: drv: writers.writeDash "deploy-${title}" '' + usage() { + printf 'Usage: %s [-w] TARGET\n\n' "$0" + printf 'Deploy htmlman documentation to TARGET directory.\n\n' + printf ' -h Display this help message\n' + printf ' -w Make TARGET directory writeable\n' + } + + if test "$#" -lt 1; then + usage + exit 100 + fi + + writeable=false + + while test "$#" -gt 0; do + case "$1" in + -h) + usage + exit 0 + ;; + -w) + writeable=true + ;; + -*) + usage + exit 100 + ;; + *) + if test -z "$target"; then + target="$1" + else + echo "Too many arguments" + exit 100 + fi + ;; + esac + + shift + done + + if test -z "$target"; then + echo "Missing TARGET" + usage + exit 100 + fi + + set -ex + + mkdir -p "$target" + cp -RTL --reflink=auto "${drv}" "$target" + + if $writeable; then + chmod -R +w "$target" + fi + ''; + + htmlman = + { title + # title of the index page + , description ? "" + # description which is displayed after + # the main heading on the index page + , pages ? [] + # man pages of the following structure: + # { + # name : string; + # section : int; + # path : either path string; + # } + # path is optional, if it is not given, + # the man page source must be located at + # "${manDir}/${name}.${toString section}" + , manDir ? null + # directory in which man page sources are located + , style ? defaultStyle + # CSS to use as a string + , normalizeCss ? true + # whether to include normalize.css before the custom CSS + , linkXr ? "all" + # How to handle cross references in the html output: + # + # * none: don't convert cross references into hyperlinks + # * all: link all cross references as if they were + # rendered into $out by htmlman + # * inManDir: link to all man pages which have their source + # in `manDir` and use the format string defined + # in linkXrFallback for all other cross references. + , linkXrFallback ? "https://manpages.debian.org/unstable/%N.%S.en.html" + # fallback link to use if linkXr == "inManDir" and the man + # page is not in ${manDir}. Placeholders %N (name of page) + # and %S (section of page) can be used. See mandoc(1) for + # more information. + }: + + let + linkXrEnum = yants.enum "linkXr" [ "all" "inManDir" "none" ]; + + index = indexTemplate { + inherit title description pages; + }; + + resolvePath = { path ? null, name, section }: + if path != null + then path + else "${manDir}/${name}.${toString section}"; + + mandocOpts = lib.concatStringsSep "," ([ + "style=style.css" + ] ++ linkXrEnum.match linkXr { + all = [ "man=./%N.%S.html" ]; + inManDir = [ "man=./%N.%S.html;${linkXrFallback}" ]; + none = [ ]; + }); + + html = + runExecline.local "htmlman-${title}" { + derivationArgs = { + inherit index style; + passAsFile = [ "index" "style" ]; + }; + } ([ + "multisubstitute" [ + "importas" "-iu" "out" "out" + "importas" "-iu" "index" "indexPath" + "importas" "-iu" "style" "stylePath" + ] + "if" [ bins.mkdir "-p" "$out" ] + "if" [ bins.mv "$index" "\${out}/index.html" ] + "if" (execlineStdoutInto "\${out}/style.css" [ + "if" ([ + bins.cat + ] ++ lib.optional normalizeCss normalizeDrv + ++ [ + "$style" + ]) + ]) + # let mandoc check for available man pages + "execline-cd" "${manDir}" + ] ++ lib.concatMap ({ name, section, ... }@p: + execlineStdoutInto "\${out}/${name}.${toString section}.html" [ + "if" [ + bins.mandoc + "-mdoc" + "-T" "html" + "-O" mandocOpts + (resolvePath p) + ] + ]) pages); + in html // { + deploy = deployScript title html; + }; +in + htmlman diff --git a/users/sterni/htmlman/defaultStyle.nix b/users/sterni/htmlman/defaultStyle.nix new file mode 100644 index 000000000000..a44b5ef06934 --- /dev/null +++ b/users/sterni/htmlman/defaultStyle.nix @@ -0,0 +1,49 @@ +{ ... }: + +'' + body { + font-size: 1em; + line-height: 1.5; + font-family: serif; + background-color: #efefef; + } + + h1, h2, h3, h4, h5, h6 { + font-family: sans-serif; + font-size: 1em; + margin: 5px 0; + } + + h1 { + margin-top: 0; + } + + a:link, a:visited { + color: #3e7eff; + } + + h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + text-decoration: none; + } + + .manual-text, .index-text { + padding: 20px; + max-width: 800px; + background-color: white; + margin: 0 auto; + } + + table.head, table.foot { + display: none; + } + + .Nd { + display: inline; + } + + /* use same as cheddar for man pages */ + pre { + padding: 16px; + background-color: #f6f8fa; + } +'' diff --git a/users/sterni/keys.nix b/users/sterni/keys.nix new file mode 100644 index 000000000000..815f62ee080e --- /dev/null +++ b/users/sterni/keys.nix @@ -0,0 +1,7 @@ +{ ... }: + +{ + all = [ + "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJk+KvgvI2oJTppMASNUfMcMkA2G5ZNt+HnWDzaXKLlo lukas@wolfgang" + ]; +} diff --git a/users/sterni/nix/char/all-chars.bin b/users/sterni/nix/char/all-chars.bin new file mode 100644 index 000000000000..017b909e8e8e --- /dev/null +++ b/users/sterni/nix/char/all-chars.bin @@ -0,0 +1,2 @@ + + !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \ No newline at end of file diff --git a/users/sterni/nix/char/default.nix b/users/sterni/nix/char/default.nix new file mode 100644 index 000000000000..e6b8d6d7f168 --- /dev/null +++ b/users/sterni/nix/char/default.nix @@ -0,0 +1,90 @@ +{ depot, lib, pkgs, ... }: + +let + + inherit (depot.users.sterni.nix.flow) + cond + ; + + inherit (depot.nix) + yants + ; + + inherit (depot.users.sterni.nix) + string + ; + + # A char is the atomic element of a nix string + # which is essentially an array of arbitrary bytes + # as long as they are not a NUL byte. + # + # A char is neither a byte nor a unicode codepoint! + char = yants.restrict "char" (s: builtins.stringLength s == 1) yants.string; + + # integer representation of char + charval = yants.restrict "charval" (i: i >= 1 && i < 256) yants.int; + + allChars = builtins.readFile ./all-chars.bin; + + # Originally I searched a list for this, but came to the + # conclusion that this can never be fast enough in Nix. + # We therefore use a solution similar to infinisil's. + ordMap = builtins.listToAttrs + (lib.imap1 (i: v: { name = v; value = i; }) + (string.toChars allChars)); + + # Note on performance: + # chr and ord have been benchmarked using the following cases: + # + # builtins.map ord (lib.stringToCharacters allChars) + # builtins.map chr (builtins.genList (int.add 1) 255 + # + # The findings are as follows: + # 1. Searching through either strings using recursion is + # unbearably slow in Nix, leading to evaluation times + # of up to 3s for the following very small test case. + # This is why we use the trusty attribute set for ord. + # 2. String indexing is much faster than list indexing which + # is why we use the former for chr. + ord = c: ordMap."${c}"; + + chr = i: string.charAt (i - 1) allChars; + + asciiAlpha = c: + let + v = ord c; + in (v >= 65 && v <= 90) + || (v >= 97 && v <= 122); + + asciiNum = c: + let + v = ord c; + in v >= 48 && v <= 57; + + asciiAlphaNum = c: asciiAlpha c || asciiNum c; + +in { + inherit + allChars + char + charval + ord + chr + asciiAlpha + asciiNum + asciiAlphaNum + ; + + # originally I generated a nix file containing a list of + # characters, but infinisil uses a better way which I adapt + # which is using builtins.readFile instead of import. + __generateAllChars = pkgs.writers.writeC "generate-all-chars" {} '' + #include <stdio.h> + + int main(void) { + for(int i = 1; i <= 0xff; i++) { + putchar(i); + } + } + ''; +} diff --git a/users/sterni/nix/char/tests/default.nix b/users/sterni/nix/char/tests/default.nix new file mode 100644 index 000000000000..49b439adbb84 --- /dev/null +++ b/users/sterni/nix/char/tests/default.nix @@ -0,0 +1,31 @@ +{ depot, ... }: + +let + inherit (depot.nix.runTestsuite) + it + assertEq + runTestsuite + ; + + inherit (depot.users.sterni.nix) + char + string + int + fun + ; + + charList = string.toChars char.allChars; + + testAllCharConversion = it "tests conversion of all chars" [ + (assertEq "char.chr converts to char.allChars" + (builtins.genList (fun.rl char.chr (int.add 1)) 255) + charList) + (assertEq "char.ord converts from char.allChars" + (builtins.genList (int.add 1) 255) + (builtins.map char.ord charList)) + ]; + +in + runTestsuite "char" [ + testAllCharConversion + ] diff --git a/users/sterni/nix/flow/default.nix b/users/sterni/nix/flow/default.nix new file mode 100644 index 000000000000..b5783bd86deb --- /dev/null +++ b/users/sterni/nix/flow/default.nix @@ -0,0 +1,82 @@ +{ depot, ... }: + +let + + inherit (depot.nix) + yants + ; + + inherit (depot.users.sterni.nix) + fun + ; + + # we must avoid evaluating any of the sublists + # as they may contain conditions that throw + condition = yants.restrict "condition" + (ls: builtins.length ls == 2) + (yants.list yants.any); + + /* Like the common lisp macro: takes a list + of two elemented lists whose first element + is a boolean. The second element of the + first list that has true as its first + element is returned. + + Type: [ [ bool a ] ] -> a + + Example: + + cond [ + [ (builtins.isString true) 12 ] + [ (3 == 2) 13 ] + [ true 42 ] + ] + + => 42 + */ + cond = conds: switch true conds; + + /* Generic pattern match-ish construct for nix. + Takes a bunch of lists which are of length + two and checks the first element for either + a predicate or a value. The second value of + the first list which either has a value equal + to or a function that evaluates to true for + the given value. + + Type: a -> [ [ (function | a) b ] ] -> b + + Example: + + switch "foo" [ + [ "smol" "SMOL!!!" ] + [ (x: builtins.stringLength x <= 3) "smol-ish" ] + [ (fun.const true) "not smol" ] + ] + + => "smol-ish" + */ + switch = x: conds: + if builtins.length conds == 0 + then builtins.throw "exhausted all conditions" + else + let + c = condition (builtins.head conds); + s = builtins.head c; + b = + if builtins.isFunction s + then s x + else x == s; + in + if b + then builtins.elemAt c 1 + else switch x (builtins.tail conds); + + + +in { + inherit + cond + switch + ; +} diff --git a/users/sterni/nix/flow/tests/default.nix b/users/sterni/nix/flow/tests/default.nix new file mode 100644 index 000000000000..54cea01858e7 --- /dev/null +++ b/users/sterni/nix/flow/tests/default.nix @@ -0,0 +1,39 @@ +{ depot, ... }: + +let + + inherit (depot.nix.runTestsuite) + runTestsuite + it + assertEq + assertThrows + ; + + inherit (depot.users.sterni.nix.flow) + cond + match + ; + + dontEval = builtins.throw "this should not get evaluated"; + + testCond = it "tests cond" [ + (assertThrows "malformed cond list" + (cond [ [ true 1 2 ] [ false 1 ] ])) + (assertEq "last is true" "last" + (cond [ + [ false dontEval] + [ false dontEval ] + [ true "last" ] + ])) + (assertEq "first is true" 1 + (cond [ + [ true 1 ] + [ true dontEval ] + [ true dontEval ] + ])) + ]; + +in + runTestsuite "nix.flow" [ + testCond + ] diff --git a/users/sterni/nix/fun/default.nix b/users/sterni/nix/fun/default.nix new file mode 100644 index 000000000000..a32b70a62b79 --- /dev/null +++ b/users/sterni/nix/fun/default.nix @@ -0,0 +1,45 @@ +{ depot, lib, ... }: + +let + + inherit (lib) + id + ; + + # Simple function composition, + # application is right to left. + rl = f1: f2: + (x: f1 (f2 x)); + + # Compose a list of functions, + # application is right to left. + rls = fs: + builtins.foldl' (fOut: f: lr f fOut) id fs; + + # Simple function composition, + # application is left to right. + lr = f1: f2: + (x: f2 (f1 x)); + + # Compose a list of functions, + # application is left to right + lrs = x: fs: + builtins.foldl' (v: f: f v) x fs; + +in + +{ + inherit (lib) + fix + flip + const + ; + + inherit + id + rl + rls + lr + lrs + ; +} diff --git a/users/sterni/nix/int/default.nix b/users/sterni/nix/int/default.nix new file mode 100644 index 000000000000..b3157571272f --- /dev/null +++ b/users/sterni/nix/int/default.nix @@ -0,0 +1,124 @@ +{ depot, lib, ... }: + +let + + # TODO(sterni): implement nix.float and figure out which of these + # functions can be split out into a common nix.num + # library. + + inherit (depot.users.sterni.nix) + string + ; + + inherit (builtins) + bitOr + bitAnd + bitXor + mul + div + add + sub + ; + + abs = i: if i < 0 then -i else i; + + exp = base: pow: + if pow > 0 + then base * (exp base (pow - 1)) + else if pow < 0 + then 1.0 / exp base (abs pow) + else 1; + + bitShiftR = bit: count: + if count == 0 + then bit + else div (bitShiftR bit (count - 1)) 2; + + bitShiftL = bit: count: + if count == 0 + then bit + else 2 * (bitShiftL bit (count - 1)); + + hexdigits = "0123456789ABCDEF"; + + toHex = int: + let + go = i: + if i == 0 + then "" + else go (bitShiftR i 4) + + string.charAt (bitAnd i 15) hexdigits; + sign = lib.optionalString (int < 0) "-"; + in + if int == 0 + then "0" + else "${sign}${go (abs int)}"; + + fromHexMap = builtins.listToAttrs + (lib.imap0 (i: c: { name = c; value = i; }) + (lib.stringToCharacters hexdigits)); + + fromHex = literal: + let + negative = string.charAt 0 literal == "-"; + start = if negative then 1 else 0; + len = builtins.stringLength literal; + # reversed list of all digits + digits = builtins.genList + (i: string.charAt (len - 1 - i) literal) + (len - start); + parsed = builtins.foldl' + (v: d: { + val = v.val + (fromHexMap."${d}" * v.mul); + mul = v.mul * 16; + }) + { val = 0; mul = 1; } digits; + in + if negative + then -parsed.val + else parsed.val; + + # A nix integer is a 64bit signed integer + maxBound = 9223372036854775807; + + # fun fact: -9223372036854775808 is the lower bound + # for a nix integer (as you would expect), but you can't + # use it as an integer literal or you'll be greeted with: + # error: invalid integer '9223372036854775808' + # This is because all int literals when parsing are + # positive, negative "literals" are positive literals + # which are preceded by the arithmetric negation operator. + minBound = -9223372036854775807 - 1; + + odd = x: bitAnd x 1 == 1; + even = x: bitAnd x 1 == 0; + + # div and mod behave like quot and rem in Haskell, + # i. e. they truncate towards 0 + mod = a: b: let res = a / b; in a - (res * b); + + inRange = a: b: x: x >= a && x <= b; + +in { + inherit + maxBound + minBound + abs + exp + odd + even + add + sub + mul + div + mod + bitShiftR + bitShiftL + bitOr + bitAnd + bitXor + toHex + fromHex + inRange + ; +} diff --git a/users/sterni/nix/int/tests/default.nix b/users/sterni/nix/int/tests/default.nix new file mode 100644 index 000000000000..fac45dd251e1 --- /dev/null +++ b/users/sterni/nix/int/tests/default.nix @@ -0,0 +1,203 @@ +{ depot, lib, ... }: + +let + + inherit (depot.nix.runTestsuite) + runTestsuite + it + assertEq + ; + + inherit (depot.users.sterni.nix) + int + string + fun + ; + + testBounds = it "checks minBound and maxBound" [ + # this is gonna blow up in my face because + # integer overflow is undefined behavior in + # C++, so most likely anything could happen? + (assertEq "maxBound is the maxBound" true + (int.maxBound + 1 < int.maxBound)) + (assertEq "minBound is the minBound" true + (int.minBound - 1 > int.minBound)) + (assertEq "maxBound overflows to minBound" + (int.maxBound + 1) + int.minBound) + (assertEq "minBound overflows to maxBound" + (int.minBound - 1) + int.maxBound) + ]; + + expectedBytes = [ + "00" "01" "02" "03" "04" "05" "06" "07" "08" "09" "0A" "0B" "0C" "0D" "0E" "0F" + "10" "11" "12" "13" "14" "15" "16" "17" "18" "19" "1A" "1B" "1C" "1D" "1E" "1F" + "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "2A" "2B" "2C" "2D" "2E" "2F" + "30" "31" "32" "33" "34" "35" "36" "37" "38" "39" "3A" "3B" "3C" "3D" "3E" "3F" + "40" "41" "42" "43" "44" "45" "46" "47" "48" "49" "4A" "4B" "4C" "4D" "4E" "4F" + "50" "51" "52" "53" "54" "55" "56" "57" "58" "59" "5A" "5B" "5C" "5D" "5E" "5F" + "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "6A" "6B" "6C" "6D" "6E" "6F" + "70" "71" "72" "73" "74" "75" "76" "77" "78" "79" "7A" "7B" "7C" "7D" "7E" "7F" + "80" "81" "82" "83" "84" "85" "86" "87" "88" "89" "8A" "8B" "8C" "8D" "8E" "8F" + "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "9A" "9B" "9C" "9D" "9E" "9F" + "A0" "A1" "A2" "A3" "A4" "A5" "A6" "A7" "A8" "A9" "AA" "AB" "AC" "AD" "AE" "AF" + "B0" "B1" "B2" "B3" "B4" "B5" "B6" "B7" "B8" "B9" "BA" "BB" "BC" "BD" "BE" "BF" + "C0" "C1" "C2" "C3" "C4" "C5" "C6" "C7" "C8" "C9" "CA" "CB" "CC" "CD" "CE" "CF" + "D0" "D1" "D2" "D3" "D4" "D5" "D6" "D7" "D8" "D9" "DA" "DB" "DC" "DD" "DE" "DF" + "E0" "E1" "E2" "E3" "E4" "E5" "E6" "E7" "E8" "E9" "EA" "EB" "EC" "ED" "EE" "EF" + "F0" "F1" "F2" "F3" "F4" "F5" "F6" "F7" "F8" "F9" "FA" "FB" "FC" "FD" "FE" "FF" + ]; + + hexByte = i: string.fit { width = 2; char = "0"; } (int.toHex i); + + hexInts = [ + { left = 0; right = "0"; } + { left = 1; right = "1"; } + { left = 11; right = "B"; } + { left = 123; right = "7B"; } + { left = 9000; right = "2328"; } + { left = 2323; right = "913"; } + { left = 4096; right = "1000"; } + { left = int.maxBound; right = "7FFFFFFFFFFFFFFF"; } + { left = int.minBound; right = "-8000000000000000"; } + ]; + + testHex = it "checks conversion to hex" (lib.flatten [ + (lib.imap0 (i: hex: [ + (assertEq "hexByte ${toString i} == ${hex}" (hexByte i) hex) + (assertEq "${toString i} == fromHex ${hex}" i (int.fromHex hex)) + ]) expectedBytes) + (builtins.map ({ left, right }: [ + (assertEq "toHex ${toString left} == ${right}" (int.toHex left) right) + (assertEq "${toString left} == fromHex ${right}" left (int.fromHex right)) + ]) hexInts) + ]); + + testBasic = it "checks basic int operations" [ + (assertEq "122 is even" (int.even 122 && !(int.odd 122)) true) + (assertEq "123 is odd" (int.odd 123 && !(int.even 123)) true) + (assertEq "abs -4959" (int.abs (-4959)) 4959) + ]; + + expNumbers = [ + { left = -3; right = 0.125; } + { left = -2; right = 0.25; } + { left = -1; right = 0.5; } + { left = 0; right = 1; } + { left = 1; right = 2; } + { left = 2; right = 4; } + { left = 3; right = 8; } + { left = 4; right = 16; } + { left = 5; right = 32; } + { left = 16; right = 65536; } + ]; + + testExp = it "checks exponentiation" + (builtins.map ({ left, right }: + assertEq + "2 ^ ${toString left} == ${toString right}" + (int.exp 2 left) right) expNumbers); + + shifts = [ + { a = 2; b = 5; c = 64; op = "<<"; } + { a = -2; b = 5; c = -64; op = "<<"; } + { a = 123; b = 4; c = 1968; op = "<<"; } + { a = 1; b = 8; c = 256; op = "<<"; } + { a = 256; b = 8; c = 1; op = ">>"; } + { a = 374; b = 2; c = 93; op = ">>"; } + { a = 2; b = 2; c = 0; op = ">>"; } + { a = 99; b = 9; c = 0; op = ">>"; } + ]; + + checkShift = { a, b, c, op }@args: + let + f = string.match op { + "<<" = int.bitShiftL; + ">>" = int.bitShiftR; + }; + in assertEq "${toString a} ${op} ${toString b} == ${toString c}" (f a b) c; + + checkShiftRDivExp = n: + assertEq "${toString n} >> 5 == ${toString n} / 2 ^ 5" + (int.bitShiftR n 5) (int.div n (int.exp 2 5)); + + checkShiftLMulExp = n: + assertEq "${toString n} >> 6 == ${toString n} * 2 ^ 6" + (int.bitShiftL n 5) (int.mul n (int.exp 2 5)); + + testBit = it "checks bitwise operations" (lib.flatten [ + (builtins.map checkShift shifts) + (builtins.map checkShiftRDivExp [ + 1 + 2 + 3 + 5 + 7 + 23 + 1623 + 238 + 34 + 348 + 2834 + 834 + 348 + ]) + (builtins.map checkShiftLMulExp [ + 1 + 2 + 3 + 5 + 7 + 23 + 384 + 3 + 2 + 5991 + 85109 + 38 + ]) + ]); + + divisions = [ + { a = 2; b = 1; c = 2; mod = 0;} + { a = 2; b = 2; c = 1; mod = 0;} + { a = 20; b = 10; c = 2; mod = 0;} + { a = 12; b = 5; c = 2; mod = 2;} + { a = 23; b = 4; c = 5; mod = 3;} + ]; + + checkDiv = n: { a, b, c, mod }: [ + (assertEq "${n}: div result" (int.div a b) c) + (assertEq "${n}: mod result" (int.mod a b) mod) + (assertEq "${n}: divMod law" ((int.div a b) * b + (int.mod a b)) a) + ]; + + testDivMod = it "checks integer division and modulo" + (lib.flatten [ + (builtins.map (checkDiv "+a / +b") divisions) + (builtins.map (fun.rl (checkDiv "-a / +b") (x: x // { + a = -x.a; + c = -x.c; + mod = -x.mod; + })) divisions) + (builtins.map (fun.rl (checkDiv "+a / -b") (x: x // { + b = -x.b; + c = -x.c; + })) divisions) + (builtins.map (fun.rl (checkDiv "-a / -b") (x: x // { + a = -x.a; + b = -x.b; + mod = -x.mod; + })) divisions) + ]); + +in + runTestsuite "nix.int" [ + testBounds + testHex + testBasic + testExp + testBit + testDivMod + ] diff --git a/users/sterni/nix/string/default.nix b/users/sterni/nix/string/default.nix new file mode 100644 index 000000000000..3fe7c04618c3 --- /dev/null +++ b/users/sterni/nix/string/default.nix @@ -0,0 +1,76 @@ +{ depot, lib, ... }: + +let + + inherit (depot.users.sterni.nix.char) + chr + ord + ; + + inherit (depot.users.sterni.nix) + int + flow + ; + + take = n: s: + builtins.substring 0 n s; + + drop = n: s: + builtins.substring n int.maxBound s; + + charAt = i: s: + let + r = builtins.substring i 1 s; + in if r == "" then null else r; + + charIndex = char: s: + let + len = builtins.stringLength s; + go = i: + flow.cond [ + [ (i >= len) null ] + [ (charAt i s == char) i ] + [ true (go (i + 1)) ] + ]; + in go 0; + + toChars = lib.stringToCharacters; + fromChars = lib.concatStrings; + + toBytes = str: + builtins.map ord (toChars str); + + fromBytes = is: lib.concatMapStrings chr is; + + pad = { left ? 0, right ? 0, char ? " " }: s: + let + leftS = fromChars (builtins.genList (_: char) left); + rightS = fromChars (builtins.genList (_: char) right); + in "${leftS}${s}${rightS}"; + + fit = { char ? " ", width, side ? "left" }: s: + let + diff = width - builtins.stringLength s; + in + if diff <= 0 + then s + else pad { inherit char; "${side}" = diff; } s; + + # pattern matching for strings only + match = val: matcher: matcher."${val}"; + +in { + inherit + take + drop + charAt + charIndex + toBytes + fromBytes + toChars + fromChars + pad + fit + match + ; +} diff --git a/users/sterni/nix/string/tests/default.nix b/users/sterni/nix/string/tests/default.nix new file mode 100644 index 000000000000..2caecbfa7b3f --- /dev/null +++ b/users/sterni/nix/string/tests/default.nix @@ -0,0 +1,65 @@ +{ depot, ... }: + +let + + inherit (depot.users.sterni.nix) + string + ; + + inherit (depot.nix.runTestsuite) + it + assertEq + runTestsuite + ; + + testTakeDrop = it "tests take and drop" [ + (assertEq "take" + (string.take 5 "five and more") + "five ") + (assertEq "drop" + (string.drop 2 "coin") + "in") + (assertEq "take out of bounds" + (string.take 100 "foo") + "foo") + (assertEq "drop out of bounds" + (string.drop 42 "lol") + "") + ]; + + testIndexing = it "tests string indexing" [ + (assertEq "normal charAt" + (string.charAt 3 "helo") + "o") + (assertEq "out of bounds charAt" + (string.charAt 5 "helo") + null) + ]; + + testFinding = it "tests finding in strings" [ + (assertEq "normal charIndex" + (string.charIndex "d" "abcdefghijkl") + 3) + (assertEq "charIndex no match" + (string.charIndex "w" "zZzZzzzZZZ") + null) + ]; + + dontEval = builtins.throw "this should not get evaluated"; + + testMatch = it "tests match" [ + (assertEq "basic match usage" 42 + (string.match "answer" { + "answer" = 42; + "banana" = dontEval; + "maleur" = dontEval; + })) + ]; + +in + runTestsuite "nix.string" [ + testTakeDrop + testIndexing + testFinding + testMatch + ] diff --git a/users/sterni/nix/url/default.nix b/users/sterni/nix/url/default.nix new file mode 100644 index 000000000000..ce7ed9b83437 --- /dev/null +++ b/users/sterni/nix/url/default.nix @@ -0,0 +1,42 @@ +{ depot, lib, ... }: + +let + + inherit (depot.users.sterni.nix) + char + int + string + ; + + reserved = c: builtins.elem c [ + "!" "#" "$" "&" "'" "(" ")" + "*" "+" "," "/" ":" ";" "=" + "?" "@" "[" "]" + ]; + + unreserved = c: char.asciiAlphaNum c + || builtins.elem c [ "-" "_" "." "~" ]; + + percentEncode = c: + if unreserved c + then c + else "%" + (string.fit { + width = 2; + char = "0"; + side = "left"; + } (int.toHex (char.ord c))); + + encode = { leaveReserved ? false }: s: + let + chars = lib.stringToCharacters s; + tr = c: + if leaveReserved && reserved c + then c + else percentEncode c; + in lib.concatStrings (builtins.map tr chars); + +in { + inherit + encode + ; +} diff --git a/users/sterni/nix/url/tests/default.nix b/users/sterni/nix/url/tests/default.nix new file mode 100644 index 000000000000..f58cf12a02b2 --- /dev/null +++ b/users/sterni/nix/url/tests/default.nix @@ -0,0 +1,49 @@ +{ depot, ... }: + +let + + inherit (depot.nix.runTestsuite) + it + assertEq + runTestsuite + ; + + inherit (depot.users.sterni.nix) + url + ; + + checkEncoding = args: { left, right }: + assertEq "encode ${builtins.toJSON left} == ${builtins.toJSON right}" + (url.encode args left) right; + + unreserved = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-_~"; + + encodeExpected = [ + { left = "Laguna Beach"; right = "Laguna%20Beach"; } + { left = "👾 Exterminate!"; right = "%F0%9F%91%BE%20Exterminate%21"; } + { left = unreserved; right = unreserved; } + { + left = "`!@#$%^&*()+={}[]:;'\\|<>,?/ \""; + right = "%60%21%40%23%24%25%5E%26%2A%28%29%2B%3D%7B%7D%5B%5D%3A%3B%27%5C%7C%3C%3E%2C%3F%2F%20%22"; + } + ]; + + testEncode = it "checks url.encode" + (builtins.map (checkEncoding {}) encodeExpected); + + testLeaveReserved = it "checks that leaveReserved is like id for valid URLs" + (builtins.map (x: checkEncoding { leaveReserved = true; } { left = x; right = x; }) [ + "ftp://ftp.is.co.za/rfc/rfc1808.txt" + "http://www.ietf.org/rfc/rfc2396.txt" + "ldap://[2001:db8::7]/c=GB?objectClass?one" + "mailto:John.Doe@example.com" + "news:comp.infosystems.www.servers.unix" + "tel:+1-816-555-1212" + "telnet://192.0.2.16:80/" + "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" + ]); +in + runTestsuite "nix.url" [ + testEncode + testLeaveReserved + ] diff --git a/users/sterni/nix/utf8/default.nix b/users/sterni/nix/utf8/default.nix new file mode 100644 index 000000000000..713f1f57cbe6 --- /dev/null +++ b/users/sterni/nix/utf8/default.nix @@ -0,0 +1,208 @@ +{ depot, lib, ... }: + +let + + # TODO(sterni): encode + + inherit (depot.users.sterni.nix) + char + flow + fun + int + string + util + ; + + /* (Internal) function to determine the amount + bytes left in a UTF-8 byte sequence from the + first byte. + + This function will throw if the given first + byte is ill-formed, but will not detect all + cases of ill-formed-ness. + + Based on table 3-6. from The Unicode Standard, + Version 13.0, section 3.9. + + Type: integer -> integer + */ + byteCount = i: flow.cond [ + [ (int.bitAnd i 128 == 0) 1 ] + [ (int.bitAnd i 224 == 192) 2 ] + [ (int.bitAnd i 240 == 224) 3 ] + [ (int.bitAnd i 248 == 240) 4 ] + [ true (builtins.throw "Ill-formed first byte ${int.toHex i}") ] + ]; + + /* (Internal) function to check if a given byte in + an UTF-8 byte sequence is well-formed. + + Based on table 3-7. from The Unicode Standard, + Version 13.0, section 3.9. + + Throws if the first byte is invalid. + + Type: integer -> integer -> (integer -> bool) + */ + wellFormedByte = + # first byte's integer value + first: + # byte position as an index starting with 0 + pos: + let + defaultRange = int.inRange 128 191; + in + # The first byte is either ASCII which requires no checks + # or we automatically check it when we check the subsequent + # bytes. The downside is that this may generate bad error + # messages in very rare cases. + if pos == 0 + then lib.const true + else if pos > 1 # 3rd and 4th byte have only one validity rule + then defaultRange + else assert pos == 1; flow.switch first [ + [ (int.inRange 194 223) defaultRange ] # C2..DF + [ 224 (int.inRange 160 191) ] # E0 + [ (int.inRange 225 236) defaultRange ] # E1..EC + [ 237 (int.inRange 128 159) ] # ED + [ (int.inRange 238 239) defaultRange ] # EE..EF + [ 240 (int.inRange 144 191) ] # F0 + [ (int.inRange 241 243) defaultRange ] # F1..F3 + [ 244 (int.inRange 128 143) ] # F4 + [ + (fun.const true) + (builtins.throw "Invalid first byte ${int.toHex first}") + ] + ]; + + /* Iteration step for decoding an UTF-8 byte sequence. + It decodes incrementally, i. e. it has to be fed + one byte at a time and then returns either a + new state or a final result. + + If the resulting attribute set contains the attribute + result, it is finished and the decoded codepoint is + contained in that attribute. In all other cases, + pass the returned set to step again along with + a new byte. The initial state to pass is the empty + set. + + Extra attributes are always passed through, so you + can pass extra state. Be sure not to use result, + pos, code, first or count. + + This function will throw with a fairly detailed + message if it encounters ill-formed bytes. + + The implementation is based on The Unicode Standard, + Version 13.0, section 3.9, especially table 3-6. + + Type: { ... } -> string -> ({ result :: integer, ... } | { ... }) + + Example: utf8.step {} "f" + => { result = 102; } + */ + step = { pos ? 0, code ? 0, ... }@args: byte: + let + value = char.ord byte; + # first byte is context for well-formed-ness + first = args.first or value; + count = args.count or (byteCount first); + newCode = + if count == 1 + then int.bitAnd 127 first # ascii character + else # multi byte UTF-8 sequence + let + # Calculate the bitmask for extracting the + # codepoint data in the current byte. + # If the codepoint is not ASCII, the bits + # used for codepoint data differ depending + # on the byte position and overall byte + # count. The first byte always ignores + # the (count + 1) most significant bits. + # For all subsequent bytes, the 2 most + # significant bits need to be ignored. + # See also table 3-6. + mask = + if pos == 0 + then int.exp 2 (8 - (count + 1)) - 1 + else 63; + # UTF-8 uses the 6 least significant bits in all + # subsequent bytes after the first one. Therefore + # We can determine the amount we need to shift + # the current value by the amount of bytes left. + offset = (count - (pos + 1)) * 6; + in + code + (int.bitShiftL (int.bitAnd mask value) offset); + illFormedMsg = + "Ill-formed byte ${int.toHex value} at position ${toString pos} in ${toString count} byte UTF-8 sequence"; + in + if !(wellFormedByte first pos value) then builtins.throw illFormedMsg + else if pos + 1 == count + then (builtins.removeAttrs args [ # allow extra state being passed through + "count" + "code" + "pos" + "first" + ]) // { result = newCode; } + else (builtins.removeAttrs args [ "result" ]) // { + inherit count first; + code = newCode; + pos = pos + 1; + }; + + /* Decode an UTF-8 string into a list of codepoints. + + Throws if the string is ill-formed UTF-8. + + Type: string -> [ integer ] + */ + # TODO(sterni): option to fallback to replacement char instead of failure + decode = s: + let + iter = { codes ? [], ... }@args: byte: + let + res = step args byte; + in + # foldl' forceValues the calculate value only at the end + # this makes the thunk grow large enough to cause a stack + # overflow with sufficiently large strings. To avoid this + # we always deepSeq the result which also keeps memory + # usage of decode reasonable. + builtins.deepSeq res + (if res ? result + then res // { + codes = codes ++ [ res.result ]; + } + else res); + iterResult = + builtins.foldl' iter {} (string.toChars s); + earlyEndMsg = + if iterResult ? count && iterResult ? pos + then "Missing ${toString (with iterResult; count - pos)} bytes at end of input" + else "Unexpected end of input"; + in + if iterResult ? result + then iterResult.codes + else builtins.throw earlyEndMsg; + + /* Decodes an UTF-8 string, but doesn't throw on error. + Instead it returns null. + + Type: string -> ( [ integer ] | null) + */ + decodeSafe = s: + let + res = builtins.tryEval (decode s); + in + if res.success + then res.value + else null; + +in { + inherit + decode + decodeSafe + step + ; +} diff --git a/users/sterni/nix/utf8/tests/default.nix b/users/sterni/nix/utf8/tests/default.nix new file mode 100644 index 000000000000..d9d8ae77105d --- /dev/null +++ b/users/sterni/nix/utf8/tests/default.nix @@ -0,0 +1,121 @@ +{ depot, lib, ... }: + +let + + inherit (depot.third_party) + runCommandLocal + ; + + inherit (depot.nix.runTestsuite) + runTestsuite + it + assertEq + assertThrows + assertDoesNotThrow + ; + + inherit (depot.users.Profpatsch.writers) + rustSimple + ; + + inherit (depot.users.sterni.nix) + int + utf8 + string + char + ; + + rustDecoder = rustSimple { + name = "utf8-decode"; + } '' + use std::io::{self, Read}; + fn main() -> std::io::Result<()> { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer)?; + + print!("[ "); + + for c in buffer.chars() { + print!("{} ", u32::from(c)); + } + + print!("]"); + + Ok(()) + } + ''; + + rustDecode = s: + let + expr = runCommandLocal "${s}-decoded" {} '' + printf '%s' ${lib.escapeShellArg s} | ${rustDecoder} > $out + ''; + in import expr; + + hexDecode = l: + utf8.decode (string.fromBytes (builtins.map int.fromHex l)); + + testFailures = it "checks UTF-8 decoding failures" [ + (assertThrows "emtpy bytestring throws" (utf8.decode "")) + (assertThrows "truncated UTF-8 string throws" (hexDecode [ "F0" "9F" ])) + # examples from The Unicode Standard + (assertThrows "ill-formed: C0 AF" (hexDecode [ "C0" "AF" ])) + (assertThrows "ill-formed: E0 9F 80" (hexDecode [ "E0" "9F" "80" ])) + (assertEq "well-formed: F4 80 83 92" (hexDecode [ "F4" "80" "83" "92" ]) [ 1048786 ]) + ]; + + testAscii = it "checks decoding of ascii strings" + (builtins.map (s: assertEq "ASCII decoding is equal to UTF-8 decoding for \"${s}\"" + (string.toBytes s) (utf8.decode s)) [ + "foo bar" + "hello\nworld" + "carriage\r\nreturn" + "1238398494829304 []<><>({})[]!!)" + (string.take 127 char.allChars) + ]); + + randomUnicode = [ + "🥰👨👨👧👦🐈⬛👩🏽🦰" + # https://kermitproject.org/utf8.html + "ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ" + "An preost wes on leoden, Laȝamon was ihoten" + "Sîne klâwen durh die wolken sint geslagen," + "Τὴ γλῶσσα μοῦ ἔδωσαν ἑλληνικὴ" + "На берегу пустынных волн" + "ვეპხის ტყაოსანი შოთა რუსთაველი" + "யாமறிந்த மொழிகளிலே தமிழ்மொழி போல் இனிதாவது எங்கும் காணோம், " + "ಬಾ ಇಲ್ಲಿ ಸಂಭವಿಸು " + ]; + + # https://kermitproject.org/utf8.html + glassSentences = [ + "Euro Symbol: €." + "Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα." + "Íslenska / Icelandic: Ég get etið gler án þess að meiða mig." + "Polish: Mogę jeść szkło, i mi nie szkodzi." + "Romanian: Pot să mănânc sticlă și ea nu mă rănește." + "Ukrainian: Я можу їсти шкло, й воно мені не пошкодить." + "Armenian: Կրնամ ապակի ուտել և ինծի անհանգիստ չըներ։" + "Georgian: მინას ვჭამ და არა მტკივა." + "Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती." + "Hebrew(2): אני יכול לאכול זכוכית וזה לא מזיק לי." + "Yiddish(2): איך קען עסן גלאָז און עס טוט מיר נישט װײ." + "Arabic(2): أنا قادر على أكل الزجاج و هذا لا يؤلمني." + "Japanese: 私はガラスを食べられます。それは私を傷つけません。" + "Thai: ฉันกินกระจกได้ แต่มันไม่ทำให้ฉันเจ็บ " + ]; + + testDecoding = it "checks decoding of UTF-8 strings against Rust's String" + (builtins.map + (s: assertEq "Decoding of “${s}” is correct" (utf8.decode s) (rustDecode s)) + (lib.flatten [ + glassSentences + randomUnicode + ])); + +in + runTestsuite "nix.utf8" [ + testFailures + testAscii + testDecoding + ] diff --git a/users/tazjin/OWNERS b/users/tazjin/OWNERS new file mode 100644 index 000000000000..c86f6eaa6adb --- /dev/null +++ b/users/tazjin/OWNERS @@ -0,0 +1,3 @@ +inherited: false +owners: + - tazjin diff --git a/users/tazjin/aoc2019/default.nix b/users/tazjin/aoc2019/default.nix new file mode 100644 index 000000000000..ce3146d1f74e --- /dev/null +++ b/users/tazjin/aoc2019/default.nix @@ -0,0 +1,22 @@ +# Solutions for Advent of Code 2019, written in Emacs Lisp. +# +# For each day a new file is created as "solution-day$n.el". +{ depot, ... }: + +let + inherit (builtins) attrNames filter head listToAttrs match readDir; + dir = readDir ./.; + matchSolution = match "solution-(.*)\.el"; + isSolution = f: (matchSolution f) != null; + getDay = f: head (matchSolution f); + + solutionFiles = filter (e: dir."${e}" == "regular" && isSolution e) (attrNames dir); + solutions = map (f: let day = getDay f; in { + name = day; + value = depot.nix.writeElispBin { + name = "aoc2019"; + deps = p: with p; [ dash s ht ]; + src = ./. + ("/" + f); + }; + }) solutionFiles; +in listToAttrs solutions diff --git a/users/tazjin/aoc2019/solution-day1.el b/users/tazjin/aoc2019/solution-day1.el new file mode 100644 index 000000000000..d805c22ec870 --- /dev/null +++ b/users/tazjin/aoc2019/solution-day1.el @@ -0,0 +1,28 @@ +;; Advent of Code 2019 - Day 1 +(require 'dash) + +;; Puzzle 1: + +(defvar day-1/input + '(83285 96868 121640 51455 128067 128390 141809 52325 68310 140707 124520 149678 + 87961 52040 133133 52203 117483 85643 84414 86558 65402 122692 88565 61895 + 126271 128802 140363 109764 53600 114391 98973 124467 99574 69140 144856 + 56809 149944 138738 128823 82776 77557 51994 74322 64716 114506 124074 + 73096 97066 96731 149307 135626 121413 69575 98581 50570 60754 94843 72165 + 146504 53290 63491 50936 79644 119081 70218 85849 133228 114550 131943 + 67288 68499 80512 148872 99264 119723 68295 90348 146534 52661 99146 95993 + 130363 78956 126736 82065 77227 129950 97946 132345 107137 79623 148477 + 88928 118911 75277 97162 80664 149742 88983 74518)) + +(defun calculate-fuel (mass) + (- (/ mass 3) 2)) + +(message "Solution to day1/1: %d" (apply #'+ (-map #'calculate-fuel day-1/input))) + +;; Puzzle 2: +(defun calculate-recursive-fuel (mass) + (let ((fuel (calculate-fuel mass))) + (if (< fuel 0) 0 + (+ fuel (calculate-recursive-fuel fuel))))) + +(message "Solution to day1/2: %d" (apply #'+ (-map #'calculate-recursive-fuel day-1/input))) diff --git a/users/tazjin/aoc2019/solution-day2.el b/users/tazjin/aoc2019/solution-day2.el new file mode 100644 index 000000000000..6ecac1e2016c --- /dev/null +++ b/users/tazjin/aoc2019/solution-day2.el @@ -0,0 +1,53 @@ +;; -*- lexical-binding: t; -*- +;; Advent of Code 2019 - Day 2 +(require 'dash) +(require 'ht) + +(defvar day2/input + [1 0 0 3 1 1 2 3 1 3 4 3 1 5 0 3 2 1 9 19 1 19 5 23 1 13 23 27 1 27 6 31 + 2 31 6 35 2 6 35 39 1 39 5 43 1 13 43 47 1 6 47 51 2 13 51 55 1 10 55 + 59 1 59 5 63 1 10 63 67 1 67 5 71 1 71 10 75 1 9 75 79 2 13 79 83 1 9 + 83 87 2 87 13 91 1 10 91 95 1 95 9 99 1 13 99 103 2 103 13 107 1 107 10 + 111 2 10 111 115 1 115 9 119 2 119 6 123 1 5 123 127 1 5 127 131 1 10 + 131 135 1 135 6 139 1 10 139 143 1 143 6 147 2 147 13 151 1 5 151 155 1 + 155 5 159 1 159 2 163 1 163 9 0 99 2 14 0 0]) + +;; Puzzle 1 + +(defun day2/single-op (f state idx) + (let* ((a (aref state (aref state (+ 1 idx)))) + (b (aref state (aref state (+ 2 idx)))) + (p (aref state (+ 3 idx))) + (result (funcall f a b))) + (aset state p (funcall f a b)))) + +(defun day2/operate (state idx) + (pcase (aref state idx) + (99 (aref state 0)) + (1 (day2/single-op #'+ state idx) + (day2/operate state (+ 4 idx))) + (2 (day2/single-op #'* state idx) + (day2/operate state (+ 4 idx))) + (other (error "Unknown opcode: %s" other)))) + +(defun day2/program-with-inputs (noun verb) + (let* ((input (copy-tree day2/input t))) + (aset input 1 noun) + (aset input 2 verb) + (day2/operate input 0))) + +(message "Solution to day2/1: %s" (day2/program-with-inputs 12 2)) + +;; Puzzle 2 +(let* ((used (ht)) + (noun 0) + (verb 0) + (result (day2/program-with-inputs noun verb))) + (while (/= 19690720 result) + (setq noun (random 100)) + (setq verb (random 100)) + (unless (ht-get used (format "%d%d" noun verb)) + (ht-set used (format "%d%d" noun verb) t) + (setq result (day2/program-with-inputs noun verb)))) + + (message "Solution to day2/2: %s%s" noun verb)) diff --git a/users/tazjin/aoc2019/solution-day3.el b/users/tazjin/aoc2019/solution-day3.el new file mode 100644 index 000000000000..b7dfdd245fb1 --- /dev/null +++ b/users/tazjin/aoc2019/solution-day3.el @@ -0,0 +1,64 @@ +;; -*- lexical-binding: t; -*- +;; Advent of Code 2019 - Day 3 + +(require 'cl-lib) +(require 'dash) +(require 'ht) +(require 's) + +(defvar day3/input/wire1 + "R1010,D422,L354,U494,L686,U894,R212,U777,L216,U9,L374,U77,R947,U385,L170,U916,R492,D553,L992,D890,L531,U360,R128,U653,L362,U522,R817,U198,L126,D629,L569,U300,L241,U145,R889,D196,L450,D576,L319,D147,R985,U889,L941,U837,L608,D77,L864,U911,L270,D869,R771,U132,L249,U603,L36,D328,L597,U992,L733,D370,L947,D595,L308,U536,L145,U318,R55,D773,R175,D505,R483,D13,R780,U778,R445,D107,R490,U245,L587,U502,R446,U639,R150,U35,L455,D522,R866,U858,R394,D975,R513,D378,R58,D646,L374,D675,R209,U228,R530,U543,L480,U677,L912,D164,L573,U587,L784,D626,L994,U250,L215,U985,R684,D79,L877,U811,L766,U617,L665,D246,L408,U800,L360,D272,L436,U138,R240,U735,L681,U68,L608,D59,R532,D808,L104,U968,R887,U819,R346,U698,L317,U582,R516,U55,L303,U607,L457,U479,L510,D366,L583,U519,R878,D195,R970,D267,R842,U784,R9,D946,R833,D238,L232,D94,L860,D47,L346,U951,R491,D745,R849,U273,R263,U392,L341,D808,R696,U326,R886,D296,L865,U833,R241,U644,R729,D216,R661,D712,L466,D699,L738,U5,L556,D693,R912,D13,R48,U63,L877,U628,L689,D929,R74,U924,R612,U153,R417,U425,L879,D378,R79,D248,L3,U519,R366,U281,R439,D823,R149,D668,R326,D342,L213,D735,R504,U265,L718,D842,L565,U105,L214,U963,R518,D681,R642,U170,L111,U6,R697,U572,R18,U331,L618,D255,R534,D322,L399,U595,L246,U651,L836,U757,R417,D795,R291,U759,L568,U965,R828,D570,R350,U317,R338,D173,L74,D833,L650,D844,L70,U913,R594,U407,R674,D684,L481,D564,L128,D277,R851,D274,L435,D582,R469,U729,R387,D818,R443,U504,R414,U8,L842,U845,R275,U986,R53,U660,R661,D225,R614,U159,R477") + +(defvar day3/input/wire2 + "L1010,D698,R442,U660,L719,U702,L456,D86,R938,D177,L835,D639,R166,D285,L694,U468,L569,D104,L234,D574,L669,U299,L124,D275,L179,D519,R617,U72,L985,D248,R257,D276,L759,D834,R490,U864,L406,U181,R911,U873,R261,D864,R260,U759,R648,U158,R308,D386,L835,D27,L745,U91,R840,U707,R275,U543,L663,U736,L617,D699,R924,U103,R225,U455,R708,U319,R569,U38,R315,D432,L179,D975,R519,D546,L295,U680,L685,U603,R262,D250,R7,U171,R261,U519,L832,U534,L471,U431,L474,U886,R10,D179,L79,D555,R452,U452,L832,U863,L367,U538,L237,D160,R441,U605,R942,U259,L811,D552,R646,D353,L225,D94,L35,D307,R752,U23,R698,U610,L379,D932,R698,D751,R178,D347,R325,D156,R471,D555,R558,D593,R773,U2,L955,U764,L735,U438,R364,D640,L757,U534,R919,U409,R361,U407,R336,D808,R877,D648,R610,U198,R340,U94,R795,D667,R811,U975,L965,D224,R565,D681,L64,U567,R621,U922,L665,U329,R242,U592,L727,D481,L339,U402,R213,D280,R656,U169,R976,D962,L294,D505,L251,D689,L497,U133,R230,D441,L90,D220,L896,D657,L500,U331,R502,U723,R762,D613,L447,D256,L226,U309,L935,U384,L740,D459,R309,D707,R952,D747,L304,D105,R977,D539,R941,D21,R291,U216,R132,D543,R515,U453,L854,D42,R982,U102,L469,D639,R559,D68,R302,U734,R980,D214,R107,D191,L730,D793,L63,U17,R807,U196,R412,D592,R330,D941,L87,D291,L44,D94,L272,D780,R968,U837,L712,D704,R163,U981,R537,U778,R220,D303,L196,D951,R163,D446,R11,D623,L72,D778,L158,U660,L189,D510,L247,D716,L89,U887,L115,U114,L36,U81,R927,U293,L265,U183,R331,D267,R745,D298,L561,D918,R299,U810,L322,U679,L739,D854,L581,U34,L862,D779,R23") + +;; Puzzle 1 + +(defun wire-from (raw) + (-map (lambda (s) + (cons (substring s 0 1) (string-to-number (substring s 1)))) + (s-split "," raw))) + +(defun day3/move (x y next) + (cl-flet ((steps (by op) + (-map op (reverse (number-sequence 1 by))))) + (pcase next + (`("L" . ,by) (steps by (lambda (n) (cons (- x n) y)))) + (`("R" . ,by) (steps by (lambda (n) (cons (+ x n) y)))) + (`("U" . ,by) (steps by (lambda (n) (cons x (+ y n))))) + (`("D" . ,by) (steps by (lambda (n) (cons x (- y n)))))))) + +(defun day3/wire-points (wire) + (let ((points (ht)) + (point-list (-reduce-from + (lambda (acc point) + (-let* (((x . y) (car acc)) + (next (day3/move x y point))) + (-concat next acc))) + '((0 . 0)) wire))) + (-map (-lambda ((s . p)) (ht-set! points p s)) + (-zip (reverse (number-sequence 0 (- (length point-list) 1))) point-list)) + (ht-remove! points '(0 . 0)) + points)) + +(defun day3/closest-intersection (crossed-points) + (car (-sort #'< + (-map (-lambda ((x . y)) + (+ (abs x) (abs y))) + crossed-points)))) + +(defun day3/minimum-steps (wire1 wire2 crossed) + (car (-sort #'< + (-map (-lambda (p) + (+ (ht-get wire1 p) (ht-get wire2 p))) + crossed)))) + +;; Example: +(let* ((wire1-points (day3/wire-points (wire-from day3/input/wire1))) + (wire2-points (day3/wire-points (wire-from day3/input/wire2))) + (crossed-points (-filter (lambda (p) (ht-contains? wire1-points p)) + (ht-keys wire2-points)))) + (message "Solution for day3/1: %d" (day3/closest-intersection crossed-points)) + (message "Solution for day3/2: %d" (day3/minimum-steps wire1-points + wire2-points + crossed-points))) diff --git a/users/tazjin/aoc2019/solution-day4.el b/users/tazjin/aoc2019/solution-day4.el new file mode 100644 index 000000000000..2805f3f4e9cd --- /dev/null +++ b/users/tazjin/aoc2019/solution-day4.el @@ -0,0 +1,73 @@ +;; -*- lexical-binding: t; -*- +;; Advent of Code 2019 - Day 4 + +(require 'cl-lib) +(require 'dash) + +;; Puzzle 1 + +(defun day4/to-digits (num) + "Convert NUM to a list of its digits." + (cl-labels ((steps (n digits) + (if (= n 0) digits + (steps (/ n 10) (cons (% n 10) digits))))) + (steps num '()))) + +(defvar day4/input (-map #'day4/to-digits (number-sequence 128392 643281))) + +(defun day4/filter-password (digits) + "Determines whether the given rules match the supplied + number." + + (and + ;; It is a six digit number + (= 6 (length digits)) + + ;; Value is within the range given in puzzle input + ;; (noop because the range is generated from the input) + + ;; Two adjacent digits are the same (like 22 in 122345). + (car (-reduce-from (-lambda ((acc . prev) next) + (cons (or acc (= prev next)) next)) + '(nil . 0) digits)) + + ;; Going from left to right, the digits never decrease; they only + ;; ever increase or stay the same (like 111123 or 135679). + (car (-reduce-from (-lambda ((acc . prev) next) + (cons (and acc (>= next prev)) next)) + '(t . 0) digits)))) + +;; Puzzle 2 +;; +;; Additional criteria: If there's matching digits, they're not in a group. + +(cl-defstruct day4/acc state prev count) + +(defun day4/filter-longer-groups (digits) + (let ((res (-reduce-from + (lambda (acc next) + (cond ;; sequence is broken and count was at 1 -> + ;; match! + ((and (= (day4/acc-count acc) 2) + (/= (day4/acc-prev acc) next)) + (setf (day4/acc-state acc) t)) + + ;; sequence continues, counter increment! + ((= (day4/acc-prev acc) next) + (setf (day4/acc-count acc) (+ 1 (day4/acc-count acc)))) + + ;; sequence broken, reset counter + ((/= (day4/acc-prev acc) next) + (setf (day4/acc-count acc) 1))) + + (setf (day4/acc-prev acc) next) + acc) + (make-day4/acc :prev 0 :count 0) digits))) + (or (day4/acc-state res) + (= 2 (day4/acc-count res))))) + +(let* ((simple (-filter #'day4/filter-password day4/input)) + (complex (-filter #'day4/filter-longer-groups simple))) + (message "Solution to day4/1: %d" (length simple)) + (message "Solution to day4/2: %d" (length complex))) + diff --git a/users/tazjin/aoc2020/default.nix b/users/tazjin/aoc2020/default.nix new file mode 100644 index 000000000000..5989224f8d62 --- /dev/null +++ b/users/tazjin/aoc2020/default.nix @@ -0,0 +1,22 @@ +# Solutions for Advent of Code 2020, written in Emacs Lisp. +# +# For each day a new file is created as "solution-day$n.el". +{ depot, ... }: + +let + inherit (builtins) attrNames filter head listToAttrs match readDir; + dir = readDir ./.; + matchSolution = match "solution-(.*)\.el"; + isSolution = f: (matchSolution f) != null; + getDay = f: head (matchSolution f); + + solutionFiles = filter (e: dir."${e}" == "regular" && isSolution e) (attrNames dir); + solutions = map (f: let day = getDay f; in depot.nix.writeElispBin { + name = day; + deps = p: with p; [ dash s ht p.f ]; + src = ./. + ("/" + f); + }) solutionFiles; +in depot.third_party.symlinkJoin { + name = "aoc2020"; + paths = solutions; +} diff --git a/users/tazjin/aoc2020/solution-day1.el b/users/tazjin/aoc2020/solution-day1.el new file mode 100644 index 000000000000..a04f43d15197 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day1.el @@ -0,0 +1,44 @@ +;; Advent of Code 2020 - Day 1 +(require 'cl) +(require 'ht) +(require 'dash) + +(defmacro hash-set (&rest elements) + "Define a hash-table with empty values, for use as a set." + (cons 'ht (-map (lambda (x) (list x nil)) elements))) + +;; Puzzle 1: + +(defvar day1/input + (hash-set 1645 1995 1658 1062 1472 1710 1424 1823 1518 1656 1811 1511 1320 1521 1395 + 1996 1724 1666 1637 1504 1766 534 1738 1791 1372 1225 1690 1949 1495 1436 1166 + 1686 1861 1889 1887 997 1202 1478 833 1497 1459 1717 1272 1047 1751 1549 1204 + 1230 1260 1611 1506 1648 1354 1415 1615 1327 1622 1592 1807 1601 1026 1757 1376 + 1707 1514 1905 1660 1578 1963 1292 390 1898 1019 1580 1499 1830 1801 1881 1764 + 1442 1838 1088 1087 1040 1349 1644 1908 1697 1115 1178 1224 1810 1445 1594 1894 + 1287 1676 1435 1294 1796 1350 1685 1118 1488 1726 1696 1190 1538 1780 1806 1207 + 1346 1705 983 1249 1455 2002 1466 1723 1227 1390 1281 1715 1603 1862 1744 1774 + 1385 1312 1654 1872 1142 1273 1508 1639 1827 1461 1795 1533 1304 1417 1984 28 + 1693 1951 1391 1931 1179 1278 1400 1361 1369 1343 1416 1426 314 1510 1933 1239 + 1218 1918 1797 1255 1399 1229 723 1992 1595 1191 1916 1525 1605 1524 1869 1652 + 1874 1756 1246 1310 1219 1482 1429 1244 1554 1575 1123 1194 1408 1917 1613 1773 + 1809 1987 1733 1844 1423 1718 1714 1923 1503)) + +(message "Solution to day1/1: %s" + (cl-loop for first being the hash-keys of day1/input + for second = (- 2020 first) + when (ht-contains? day1/input second) + return (* first second))) + +;; Puzzle 2: + +(message "Solution to day1/1: %s" + (cl-loop for first being the hash-keys of day1/input + for result = + (cl-loop + for second being the elements of (-drop 1 (ht-keys day1/input)) + for third = (- 2020 first second) + when (ht-contains? day1/input third) + return (* first second third)) + + when result return result)) diff --git a/users/tazjin/aoc2020/solution-day2.el b/users/tazjin/aoc2020/solution-day2.el new file mode 100644 index 000000000000..5993bf3407e4 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day2.el @@ -0,0 +1,54 @@ +;; Advent of Code 2020 - Day 2 + +(require 'cl-lib) +(require 'f) +(require 'ht) +(require 's) +(require 'seq) + +(defvar day2/input + ;; This one was too large to inline. + (s-lines (f-read "/tmp/aoc/day2.txt"))) + +(defun day2/count-letters (password) + (let ((table (ht-create))) + (cl-loop for char across password + for current = (ht-get table char) + do (ht-set table char + (if current (+ 1 current) 1))) + table)) + +(defun day2/parse (input) + (let* ((split (s-split " " input)) + (range (s-split "-" (car split)))) + (list (string-to-number (car range)) + (string-to-number (cadr range)) + (string-to-char (cadr split)) + (caddr split)))) + +(defun day2/count-with-validation (func) + (length (-filter + (lambda (password) + (and (not (seq-empty-p password)) + (apply func (day2/parse password)))) + day2/input))) + +;; Puzzle 1 + +(defun day2/validate-oldjob (min max char password) + (let ((count (ht-get (day2/count-letters password) char))) + (when count + (and (>= count min) + (<= count max))))) + +(message "Solution to day2/1: %s" + (day2/count-with-validation #'day2/validate-oldjob)) + +;; Puzzle 2 + +(defun day2/validate-toboggan (pos1 pos2 char password) + (xor (= char (aref password (- pos1 1))) + (= char (aref password (- pos2 1))))) + +(message "Solution to day2/2: %s" + (day2/count-with-validation #'day2/validate-toboggan)) diff --git a/users/tazjin/aoc2020/solution-day3.el b/users/tazjin/aoc2020/solution-day3.el new file mode 100644 index 000000000000..80ea4a226405 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day3.el @@ -0,0 +1,43 @@ +;; Advent of Code 2020 - Day 3 + +(require 'cl-lib) +(require 'dash) +(require 'f) +(require 's) +(require 'seq) + +(setq day3/input + (-filter (lambda (s) (not (seq-empty-p s))) + (s-lines (f-read "/tmp/aoc/day3.txt")))) + +(setq day3/input-width (length (elt day3/input 0))) +(setq day3/input-height (length day3/input)) + +(defun day3/thing-at-point (x y) + "Pun intentional." + (when (>= day3/input-height y) + (let ((x-repeated (mod (- x 1) day3/input-width))) + (elt (elt day3/input (- y 1)) x-repeated)))) + +(defun day3/slope (x-steps y-steps) + "Produce the objects encountered through this slope until the + bottom of the map." + (cl-loop for x from 1 by x-steps + for y from 1 to day3/input-height by y-steps + collect (day3/thing-at-point x y))) + +;; Puzzle 1 + +(defun day3/count-trees (x-steps y-steps) + (cl-loop for thing being the elements of (day3/slope x-steps y-steps) + count (= thing ?#))) + +(message "Solution to day3/1: One encounters %s trees" (day3/count-trees 3 1)) + +;; Puzzle 2 + +(message "Solution to day3/2 %s" (* (day3/count-trees 1 1) + (day3/count-trees 3 1) + (day3/count-trees 5 1) + (day3/count-trees 7 1) + (day3/count-trees 1 2))) diff --git a/users/tazjin/aoc2020/solution-day4.el b/users/tazjin/aoc2020/solution-day4.el new file mode 100644 index 000000000000..034a40a9558d --- /dev/null +++ b/users/tazjin/aoc2020/solution-day4.el @@ -0,0 +1,98 @@ +;; Advent of Code 2020 - Day 4 + +(require 'cl-lib) +(require 's) +(require 'dash) +(require 'f) + +(cl-defstruct day4/passport + byr ;; Birth Year + iyr ;; Issue Year + eyr ;; Expiration Year + hgt ;; Height + hcl ;; Hair Color + ecl ;; Eye Color + pid ;; Passport ID + cid ;; Country ID + ) + +(defun day4/parse-passport (input) + (let* ((pairs (s-split " " (s-replace "\n" " " input) t)) + (slots + (-map + (lambda (pair) + (pcase-let ((`(,key ,value) (s-split ":" (s-trim pair)))) + (list (intern (format ":%s" key)) value))) + pairs))) + (apply #'make-day4/passport (-flatten slots)))) + +(defun day4/parse-passports (input) + (-map #'day4/parse-passport (s-split "\n\n" input t))) + +(setq day4/input (day4/parse-passports (f-read "/tmp/aoc/day4.txt"))) + +;; Puzzle 1 + +(defun day4/validate (passport) + "Check that all fields except CID are present." + (cl-check-type passport day4/passport) + (and (day4/passport-byr passport) + (day4/passport-iyr passport) + (day4/passport-eyr passport) + (day4/passport-hgt passport) + (day4/passport-hcl passport) + (day4/passport-ecl passport) + (day4/passport-pid passport))) + +(message "Solution to day4/1: %s" (cl-loop for passport being the elements of day4/input + count (day4/validate passport))) + +;; Puzzle 2 + +(defun day4/year-bound (min max value) + (and + (s-matches? (rx (= 4 digit)) value) + (<= min (string-to-number value) max))) + +(defun day4/check-unit (unit min max value) + (and + (string-match (rx (group (+? digit)) (literal unit)) value) + (<= min (string-to-number (match-string 1 value)) max))) + +(defun day4/properly-validate (passport) + "Opting for readable rather than clever here." + (and + (day4/validate passport) + + ;; byr (Birth Year) - four digits; at least 1920 and at most 2002. + (day4/year-bound 1920 2002 (day4/passport-byr passport)) + + ;; iyr (Issue Year) - four digits; at least 2010 and at most 2020. + (day4/year-bound 2010 2020 (day4/passport-iyr passport)) + + ;; eyr (Expiration Year) - four digits; at least 2020 and at most 2030. + (day4/year-bound 2020 2030 (day4/passport-eyr passport)) + + ;; hgt (Height) - a number followed by either cm or in: + ;; If cm, the number must be at least 150 and at most 193. + ;; If in, the number must be at least 59 and at most 76. + (or (day4/check-unit "cm" 150 193 (day4/passport-hgt passport)) + (day4/check-unit "in" 59 76 (day4/passport-hgt passport))) + + ;; hcl (Hair Color) - a # followed by exactly six characters 0-9 or a-f. + (s-matches? (rx ?# (= 6 hex)) (day4/passport-hcl passport)) + + ;; ecl (Eye Color) - exactly one of: amb blu brn gry grn hzl oth. + (-contains? '("amb" "blu" "brn" "gry" "grn" "hzl" "oth") + (day4/passport-ecl passport)) + + ;; pid (Passport ID) - a nine-digit number, including leading zeroes. + (s-matches? (rx line-start (= 9 digit) line-end) + (day4/passport-pid passport)) + + ;; cid (Country ID) - ignored, missing or not. + )) + +(message "Solution to day4/2: %s" + (cl-loop for passport being the elements of day4/input + count (day4/properly-validate passport))) diff --git a/users/tazjin/aoc2020/solution-day5.el b/users/tazjin/aoc2020/solution-day5.el new file mode 100644 index 000000000000..9bba322902b0 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day5.el @@ -0,0 +1,61 @@ +;; Advent of Code 2020 - Day 5 + +(require 'cl-lib) +(require 'dash) +(require 'f) +(require 'ht) +(require 's) +(require 'seq) + +(defvar day5/input + (-filter (lambda (s) (not (seq-empty-p s))) + (s-lines (f-read "/tmp/aoc/day5.txt")))) + +(defun day5/lower (sequence) + (seq-subseq sequence 0 (/ (length sequence) 2))) + +(defun day5/upper (sequence) + (seq-subseq sequence (/ (length sequence) 2))) + +(defun day5/seat-id (column row) + (+ column (* 8 row))) + +(defun day5/find-seat (boarding-pass) + (let ((rows (number-sequence 0 127)) + (columns (number-sequence 0 7))) + (cl-loop for char across boarding-pass + do (pcase char + (?F (setq rows (day5/lower rows))) + (?B (setq rows (day5/upper rows))) + (?R (setq columns (day5/upper columns))) + (?L (setq columns (day5/lower columns)))) + finally return (day5/seat-id (car columns) (car rows))))) + +;; Puzzle 1 + +(message "Solution to day5/1: %s" + (cl-loop for boarding-pass in day5/input + maximize (day5/find-seat boarding-pass))) + +;; Puzzle 2 + +(defun day5/all-seats-in (row) + (-map (lambda (column) (day5/seat-id column row)) + (number-sequence 0 7))) + +(message "Solution to day5/2: %s" + (let ((all-seats (ht-create))) + (-each (-mapcat #'day5/all-seats-in (number-sequence 1 126)) + (lambda (seat) (ht-set all-seats seat nil))) + + (cl-loop for boarding-pass in day5/input + do (ht-remove all-seats (day5/find-seat boarding-pass)) + + ;; Remove seats that lack adjacent entries, those + ;; are missing on the plane. + finally return + (car + (-filter (lambda (seat) + (and (not (ht-contains? all-seats (- seat 1))) + (not (ht-contains? all-seats (+ seat 1))))) + (ht-keys all-seats)))))) diff --git a/users/tazjin/aoc2020/solution-day6.el b/users/tazjin/aoc2020/solution-day6.el new file mode 100644 index 000000000000..8179c79af2bd --- /dev/null +++ b/users/tazjin/aoc2020/solution-day6.el @@ -0,0 +1,40 @@ +;; Advent of Code 2020 - Day 6 + +(require 'cl-lib) +(require 'dash) +(require 'f) +(require 'ht) +(require 's) + +(defvar day6/input (s-split "\n\n" (f-read "/tmp/aoc/day6.txt") t) + "Input, split into groups (with people in each group still distinct)") + +;; Puzzle 1 + +(defun day6/count-answers (group-answers) + "I suspect doing it this way will be useful in puzzle 2." + (let ((table (ht-create))) + (-each group-answers + (lambda (answer) + (cl-loop for char across answer + do (ht-set table char (+ 1 (or (ht-get table char) + 0)))))) + table)) + +(message "Solution to day6/1: %s" + (cl-loop for group being the elements of day6/input + sum (length + (ht-keys + (day6/count-answers (s-lines group)))))) + +;; Puzzle 2 + +(defun day6/count-unanimous-answers (answers) + (ht-reject (lambda (_key value) (not (= value (length answers)))) + (day6/count-answers answers))) + +(message "Solution to day6/2: %s" + (cl-loop for group being the elements of day6/input + sum (length + (ht-keys + (day6/count-unanimous-answers (s-split "\n" group t)))))) diff --git a/users/tazjin/aoc2020/solution-day7.el b/users/tazjin/aoc2020/solution-day7.el new file mode 100644 index 000000000000..251a85fede02 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day7.el @@ -0,0 +1,92 @@ +;; Advent of Code 2020 - Day 7 + +(require 'cl-lib) +(require 'dash) +(require 'f) +(require 's) +(require 'ht) + +(defvar day7/input + (s-lines (s-chomp (f-read "/tmp/aoc/day7.txt")))) + +(defun day7/parse-bag (input) + (string-match (rx line-start + (group (one-or-more (or letter space))) + "s contain " + (group (one-or-more anything)) + "." line-end) + input) + (cons (match-string 1 input) + (-map + (lambda (content) + (unless (equal content "no other bags") + (progn + (string-match + (rx (group (one-or-more digit)) + space + (group (one-or-more anything) "bag")) + content) + (cons (match-string 2 content) + (string-to-number (match-string 1 content)))))) + (s-split ", " (match-string 2 input))))) + +(defun day7/id-or-next (table bag-type) + (unless (ht-contains? table bag-type) + (ht-set table bag-type (length (ht-keys table)))) + (ht-get table bag-type)) + +(defun day7/build-graph (input &optional flip) + "Represent graph mappings directionally using an adjacency + matrix, because that's probably easiest. + + By default an edge means 'contains', with optional argument + FLIP edges are inverted and mean 'contained by'." + + (let ((bag-mapping (ht-create)) + (graph (let ((length (length input))) + (apply #'vector + (-map (lambda (_) (make-vector length 0)) input))))) + (cl-loop for bag in (-map #'day7/parse-bag input) + for bag-id = (day7/id-or-next bag-mapping (car bag)) + do (-each (-filter #'identity (cdr bag)) + (pcase-lambda (`(,contained-type . ,count)) + (let ((contained-id (day7/id-or-next bag-mapping contained-type))) + (if flip + (aset (aref graph contained-id) bag-id count) + (aset (aref graph bag-id) contained-id count)))))) + (cons bag-mapping graph))) + +;; Puzzle 1 + +(defun day7/find-ancestors (visited graph start) + (ht-set visited start t) + (cl-loop for bag-count being the elements of (aref graph start) + using (index bag-id) + when (and (> bag-count 0) + (not (ht-contains? visited bag-id))) + do (day7/find-ancestors visited graph bag-id))) + +(message + "Solution to day7/1: %s" + (pcase-let* ((`(,mapping . ,graph) (day7/build-graph day7/input t)) + (shiny-gold-id (ht-get mapping "shiny gold bag")) + (visited (ht-create))) + (day7/find-ancestors visited graph shiny-gold-id) + (- (length (ht-keys visited)) 1))) + +;; Puzzle 2 + +(defun ht-find-by-value (table value) + (ht-find (lambda (_key item-value) (equal item-value value)) table)) + +(defun day7/count-contained-bags (mapping graph start) + (cl-loop for bag-count being the elements of (aref graph start) + using (index bag-id) + when (> bag-count 0) + sum (+ bag-count + (* bag-count (day7/count-contained-bags mapping graph bag-id))))) + +(message "Solution to day7/2: %s" + (pcase-let* ((`(,mapping . ,graph) (day7/build-graph day7/input)) + (shiny-gold-id (ht-get mapping "shiny gold bag"))) + (day7/count-contained-bags mapping graph shiny-gold-id))) diff --git a/users/tazjin/aoc2020/solution-day8.el b/users/tazjin/aoc2020/solution-day8.el new file mode 100644 index 000000000000..591a07fbf3a0 --- /dev/null +++ b/users/tazjin/aoc2020/solution-day8.el @@ -0,0 +1,63 @@ +;; Advent of Code 2020 - Day + +(require 'cl-lib) +(require 'dash) +(require 'f) +(require 's) + +(setq day8/input + (apply #'vector + (-map (lambda (s) + (pcase-let ((`(,op ,val) (s-split " " s t))) + (cons (intern op) (string-to-number val)))) + (s-lines (s-chomp (f-read "/tmp/aoc/day8.txt")))))) + +(defun day8/step (code position acc) + (if (>= position (length code)) + (cons 'final acc) + + (let ((current (aref code position))) + (aset code position :done) + (pcase current + (:done (cons 'loop acc)) + (`(nop . ,val) (cons (+ position 1) acc)) + (`(acc . ,val) (cons (+ position 1) (+ acc val))) + (`(jmp . ,val) (cons (+ position val) acc)))))) + +;; Puzzle 1 + +(message "Solution to day8/1: %s" + (let ((code (copy-sequence day8/input)) + (position 0) + (acc 0)) + (cl-loop for next = (day8/step code position acc) + when (equal 'loop (car next)) return (cdr next) + do (setq position (car next)) + do (setq acc (cdr next))))) + +;; Puzzle 2 + +(defun day8/flip-at (code pos) + (pcase (aref code pos) + (`(nop . ,val) (aset code pos `(jmp . ,val))) + (`(jmp . ,val) (aset code pos `(nop . ,val))) + (other (error "Unexpected flip op: %s" other)))) + +(defun day8/try-flip (flip-at code position acc) + (day8/flip-at code flip-at) + (cl-loop for next = (day8/step code position acc) + when (equal 'loop (car next)) return nil + when (equal 'final (car next)) return (cdr next) + do (setq position (car next)) + do (setq acc (cdr next)))) + +(message "Solution to day8/2: %s" + (let ((flip-options (cl-loop for op being the elements of day8/input + using (index idx) + for opcode = (car op) + when (or (equal 'nop opcode) + (equal 'jmp opcode)) + collect idx))) + (cl-loop for flip-at in flip-options + for result = (day8/try-flip flip-at (copy-sequence day8/input) 0 0) + when result return result))) diff --git a/users/tazjin/atom-feed/default.nix b/users/tazjin/atom-feed/default.nix new file mode 100644 index 000000000000..cd189b86754d --- /dev/null +++ b/users/tazjin/atom-feed/default.nix @@ -0,0 +1,141 @@ +# This file defines functions for generating an Atom feed. + +{ depot, lib, ... }: + +with depot.nix.yants; + +let + inherit (builtins) map readFile replaceStrings; + inherit (lib) concatStrings concatStringsSep removeSuffix; + inherit (depot.third_party) runCommandNoCC; + + # 'link' describes a related link to a feed, or feed element. + # + # https://validator.w3.org/feed/docs/atom.html#link + link = struct "link" { + rel = string; + href = string; + }; + + # 'entry' describes a feed entry, for example a single post on a + # blog. Some optional fields have been omitted. + # + # https://validator.w3.org/feed/docs/atom.html#requiredEntryElements + entry = struct "entry" { + # Identifies the entry using a universally unique and permanent URI. + id = string; + + # Contains a human readable title for the entry. This value should + # not be blank. + title = string; + + # Content of the entry. + content = option string; + + # Indicates the last time the entry was modified in a significant + # way (in seconds since epoch). + updated = int; + + # Names authors of the entry. Recommended element. + authors = option (list string); + + # Related web pages, such as the web location of a blog post. + links = option (list link); + + # Conveys a short summary, abstract, or excerpt of the entry. + summary = option string; + + # Contains the time of the initial creation or first availability + # of the entry. + published = option int; + + # Conveys information about rights, e.g. copyrights, held in and + # over the entry. + rights = option string; + }; + + # 'feed' describes the metadata of the Atom feed itself. + # + # Some optional fields have been omitted. + # + # https://validator.w3.org/feed/docs/atom.html#requiredFeedElements + feed = struct "feed" { + # Identifies the feed using a universally unique and permanent URI. + id = string; + + # Contains a human readable title for the feed. + title = string; + + # Indicates the last time the feed was modified in a significant + # way (in seconds since epoch). Recommended element. + updated = int; + + # Entries contained within the feed. + entries = list entry; + + # Names authors of the feed. Recommended element. + authors = option (list string); + + # Related web locations. Recommended element. + links = option (list link); + + # Conveys information about rights, e.g. copyrights, held in and + # over the feed. + rights = option string; + + # Contains a human-readable description or subtitle for the feed. + subtitle = option string; + }; + + # Feed generation functions: + + renderEpoch = epoch: removeSuffix "\n" (readFile (runCommandNoCC "date-${toString epoch}" {} '' + date --date='@${toString epoch}' --utc --iso-8601='seconds' > $out + '')); + + escape = replaceStrings [ "<" ">" "&" "'" ] [ "<" ">" "&" "'" ]; + + elem = name: content: ''<${name}>${escape content}</${name}>''; + + renderLink = defun [ link string ] (l: '' + <link href="${escape l.href}" rel="${escape l.rel}" /> + ''); + + # Technically the author element can also contain 'uri' and 'email' + # fields, but they are not used for the purpose of this feed and are + # omitted. + renderAuthor = author: ''<author><name>${escape author}</name></author>''; + + renderEntry = defun [ entry string ] (e: '' + <entry> + ${elem "title" e.title} + ${elem "id" e.id} + ${elem "updated" (renderEpoch e.updated)} + ${if e ? content + then ''<content type="html">${escape e.content}</content>'' + else "" + } + ${if e ? summary then elem "summary" e.summary else ""} + ${concatStrings (map renderAuthor (e.authors or []))} + ${if e ? subtitle then elem "subtitle" e.subtitle else ""} + ${if e ? rights then elem "rights" e.rights else ""} + ${concatStrings (map renderLink (e.links or []))} + </entry> + ''); + + renderFeed = defun [ feed string ] (f: '' + <?xml version="1.0" encoding="utf-8"?> + <feed xmlns="http://www.w3.org/2005/Atom"> + ${elem "id" f.id} + ${elem "title" f.title} + ${elem "updated" (renderEpoch f.updated)} + ${concatStringsSep "\n" (map renderAuthor (f.authors or []))} + ${if f ? subtitle then elem "subtitle" f.subtitle else ""} + ${if f ? rights then elem "rights" f.rights else ""} + ${concatStrings (map renderLink (f.links or []))} + ${concatStrings (map renderEntry f.entries)} + </feed> + ''); +in { + inherit entry feed renderFeed renderEpoch; +} diff --git a/users/tazjin/avatar.jpeg b/users/tazjin/avatar.jpeg new file mode 100644 index 000000000000..f6888e01c7dc --- /dev/null +++ b/users/tazjin/avatar.jpeg Binary files differdiff --git a/users/tazjin/blog/.skip-subtree b/users/tazjin/blog/.skip-subtree new file mode 100644 index 000000000000..e7fa50d49bdd --- /dev/null +++ b/users/tazjin/blog/.skip-subtree @@ -0,0 +1 @@ +Subdirectories contain blog posts and static assets only diff --git a/users/tazjin/blog/default.nix b/users/tazjin/blog/default.nix new file mode 100644 index 000000000000..d2f04aaea574 --- /dev/null +++ b/users/tazjin/blog/default.nix @@ -0,0 +1,59 @@ +# This creates the static files that make up my blog from the Markdown +# files in this repository. +# +# All blog posts are rendered from Markdown by cheddar. +{ depot, lib, ... }@args: + +with depot.nix.yants; + +let + inherit (builtins) filter hasAttr map; + + # Type definition for a single blog post. + post = struct "blog-post" { + key = string; # + title = string; + date = int; + + # Path to the Markdown file containing the post content. + content = path; + + # Should this post be included in the index? (defaults to true) + listed = option bool; + + # Is this a draft? (adds a banner indicating that the link should + # not be shared) + draft = option bool; + + # Previously each post title had a numeric ID. For these numeric + # IDs, redirects are generated so that old URLs stay compatible. + oldKey = option string; + }; + + posts = list post (import ./posts.nix); + fragments = import ./fragments.nix args; + + rendered = depot.third_party.runCommandNoCC "tazjins-blog" {} '' + mkdir -p $out + + ${lib.concatStringsSep "\n" (map (post: + "cp ${fragments.renderPost post} $out/${post.key}.html" + ) posts)} + ''; + + includePost = post: !(fragments.isDraft post) && !(fragments.isUnlisted post); +in { + inherit post rendered; + static = ./static; + + # Only include listed posts + posts = filter includePost posts; + + # Generate embeddable nginx configuration for redirects from old post URLs + oldRedirects = lib.concatStringsSep "\n" (map (post: '' + location ~* ^(/en)?/${post.oldKey} { + # TODO(tazjin): 301 once this works + return 302 https://tazj.in/blog/${post.key}; + } + '') (filter (hasAttr "oldKey") posts)); +} diff --git a/users/tazjin/blog/fragments.nix b/users/tazjin/blog/fragments.nix new file mode 100644 index 000000000000..fb750a729910 --- /dev/null +++ b/users/tazjin/blog/fragments.nix @@ -0,0 +1,96 @@ +# This file defines various fragments of the blog, such as the header +# and footer, as functions that receive arguments to be templated into +# them. +# +# An entire post is rendered by `renderPost`, which assembles the +# fragments together in a runCommand execution. +# +# The post index is generated by //users/tazjin/homepage, not by this +# code. +{ depot, lib, ... }: + +let + inherit (builtins) filter map hasAttr replaceStrings toFile; + inherit (depot.third_party) runCommandNoCC writeText; + inherit (depot.users.tazjin) renderMarkdown; + + # Generate a post list for all listed, non-draft posts. + isDraft = post: (hasAttr "draft" post) && post.draft; + isUnlisted = post: (hasAttr "listed" post) && !post.listed; + + escape = replaceStrings [ "<" ">" "&" "'" ] [ "<" ">" "&" "'" ]; + + header = title: '' + <!DOCTYPE html> + <head> + <meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1"> + <meta name="description" content="tazjin's blog"> + <link rel="stylesheet" type="text/css" href="/static/tazjin.css" media="all"> + <link rel="icon" type="image/webp" href="/static/favicon.webp"> + <link rel="alternate" type="application/atom+xml" title="Atom Feed" href="/feed.atom"> + <title>tazjin's blog: ${escape title}</title> + </head> + <body class="light"> + <header> + <h1><a class="blog-title" href="/">tazjin's interblag</a> </h1> + <hr> + </header> + ''; + + footer = '' + <hr> + <footer> + <p class="footer"> + <a class="uncoloured-link" href="https://tazj.in">homepage</a> + | + <a class="uncoloured-link" href="https://cs.tvl.fyi/">code</a> + </p> + <p class="lod">ಠ_ಠ</p> + </footer> + </body> + ''; + + draftWarning = toFile "draft.html" '' + <p class="cheddar-callout cheddar-warning"> + <b>Note:</b> This post is a <b>draft</b>! Please do not share + the link to it without asking me first. + </p> + <hr> + ''; + + unlistedWarning = toFile "unlisted.html" '' + <p class="cheddar-callout cheddar-warning"> + <b>Note:</b> This post is <b>unlisted</b>! Please do not share + the link to it without asking me first. + </p> + <hr> + ''; + + renderPost = post: runCommandNoCC "${post.key}.html" {} '' + cat ${toFile "header.html" (header post.title)} > $out + + # Write the post title & date + echo '<article><h2 class="inline">${escape post.title}</h2>' >> $out + echo '<aside class="date">' >> $out + date --date="@${toString post.date}" '+%Y-%m-%d' >> $out + echo '</aside>' >> $out + + ${ + # Add a warning to draft/unlisted posts to make it clear that + # people should not share the post. + + if (isDraft post) then "cat ${draftWarning} >> $out" + else if (isUnlisted post) then "cat ${unlistedWarning} >> $out" + else "# Your ads could be here?" + } + + # Write the actual post through cheddar's about-filter mechanism + cat ${renderMarkdown post.content} >> $out + echo '</article>' >> $out + + cat ${toFile "footer.html" footer} >> $out + ''; +in { + inherit renderPost isDraft isUnlisted; +} diff --git a/users/tazjin/blog/posts.nix b/users/tazjin/blog/posts.nix new file mode 100644 index 000000000000..b43598d01358 --- /dev/null +++ b/users/tazjin/blog/posts.nix @@ -0,0 +1,57 @@ +# This file defines all the blog posts. +[ + { + key = "emacs-is-underrated"; + title = "Emacs is the most underrated tool"; + date = 1581286656; + content = ./posts/emacs-is-underrated.md; + draft = true; + } + { + key = "best-tools"; + title = "tazjin's best tools"; + date = 1576800001; + content = ./posts/best-tools.md; + } + { + key = "nixery-layers"; + title = "Nixery: Improved Layering Design"; + date = 1565391600; + content = ./posts/nixery-layers.md; + } + { + key = "reversing-watchguard-vpn"; + title = "Reverse-engineering WatchGuard Mobile VPN"; + date = 1486830338; + content = ./posts/reversing-watchguard-vpn.md; + oldKey = "1486830338"; + } + { + key = "make-object-t-again"; + title = "Make Object <T> Again!"; + date = 1476807384; + content = ./posts/make-object-t-again.md; + oldKey = "1476807384"; + } + { + key = "the-smu-problem"; + title = "The SMU-problem of messaging apps"; + date = 1450354078; + content =./posts/the-smu-problem.md; + oldKey = "1450354078"; + } + { + key = "sick-in-sweden"; + title = "Being sick in Sweden"; + date = 1423995834; + content = ./posts/sick-in-sweden.md; + oldKey = "1423995834"; + } + { + key = "nsa-zettabytes"; + title = "The NSA's 5 zettabytes of data"; + date = 1375310627; + content = ./posts/nsa-zettabytes.md; + oldKey = "1375310627"; + } +] diff --git a/users/tazjin/blog/posts/best-tools.md b/users/tazjin/blog/posts/best-tools.md new file mode 100644 index 000000000000..e4bad8f4cd07 --- /dev/null +++ b/users/tazjin/blog/posts/best-tools.md @@ -0,0 +1,160 @@ +In the spirit of various other "Which X do you use?"-pages I thought it would be +fun to have a little post here that describes which tools I've found to work +well for myself. + +When I say "tools" here, it's not about software - it's about real, physical +tools! + +If something goes on this list that's because I think it's seriously a +best-in-class type of product. + +<!-- markdown-toc start - Don't edit this section. Run M-x markdown-toc-refresh-toc --> +- [Media & Tech](#media--tech) + - [Keyboard](#keyboard) + - [Speakers](#speakers) + - [Headphones](#headphones) + - [Earphones](#earphones) + - [Phone](#phone) +- [Other stuff](#other-stuff) + - [Toothbrush](#toothbrush) + - [Shavers](#shavers) + - [Shoulder bag](#shoulder-bag) + - [Wallet](#wallet) +<!-- markdown-toc end --> + +--------- + +# Media & Tech + +## Keyboard + +The best keyboard that money will buy you at the moment is the [Kinesis +Advantage][advantage]. There's a variety of contoured & similarly shaped +keyboards on the market, but the Kinesis is the only one I've tried that has +properly implemented the keywell concept. + +I struggle with RSI issues and the Kinesis actually makes it possible for me to +type for longer periods of time, which always leads to extra discomfort on +laptop keyboards and such. + +Honestly, the Kinesis is probably the best piece of equipment on this entire +list. I own several of them and there will probably be more in the future. They +last forever and your wrists will thank you in the future, even if you do not +suffer from RSI yet. + +[advantage]: https://kinesis-ergo.com/shop/advantage2/ + +## Speakers + +The speakers that I've hooked up to my audio setup (including both record player +& Chromecast / TV) are the [Teufel Motiv 2][motiv-2]. I've had these for over a +decade and they're incredibly good, but unfortunately Teufel no longer makes +them. + +It's possible to grab a pair on eBay occasionally, so keep an eye out if you're +interested! + +[motiv-2]: https://www.teufelaudio.com/uk/pc/motiv-2-p167.html + +## Headphones + +I use the [Bose QC35][qc35] (note: link goes to a newer generation than the one +I own) for their outstanding noise cancelling functionality and decent sound. + +When I first bought them I didn't expect them to end up on this list as the +firmware had issues that made them only barely usable, but Bose has managed to +iron these problems out over time. + +I avoid using Bluetooth when outside and fortunately the QC35 come with an +optional cable that you can plug into any good old 3.5mm jack. + +[qc35]: https://www.bose.co.uk/en_gb/products/headphones/over_ear_headphones/quietcomfort-35-wireless-ii.html + +### Earphones + +Actually, to follow up on the above - most of the time I'm not using (over-ear) +headphones, but (in-ear) earphones - specifically the (**wired!!!**) [Apple +EarPods][earpods]. + +Apple will probably stop selling these soon because they've gotten into the +habit of cancelling all of their good products, so I have a stash of these +around. You will usually find no fewer than 3-4 of them lying around in my +flat. + +[earpods]: https://www.apple.com/uk/shop/product/MNHF2ZM/A/earpods-with-35mm-headphone-plug + +## Phone + +The best phone I have used in recent years is the [iPhone SE][se]. It was the +*last* phone that had a reasonable size (up to 4") *and* a 3.5mm headphone jack. + +Unfortunately, it runs iOS. Despite owning a whole bunch of SEs, I have finally +moved on to an Android phone that is only moderately larger (still by an +annoying amount), but does at least have a headphone jack: The [Samsung Galaxy +S10e][s10e]. + +It has pretty good hardware and I can almost reach 70% of the screen, which is +better than other phones out there right now. Unfortunately it runs Samsung's +impossible-to-remove bloatware on top of Android, but that is still less +annoying to use than iOS. + +QUESTION: This is the only item on this list for which I am actively seeking a +replacement, so if you have any tips about new phones that might fit these +criteria that I've missed please let me know! + +[se]: https://en.wikipedia.org/wiki/IPhone_SE +[s10e]: https://www.phonearena.com/phones/Samsung-Galaxy-S10e_id11114 + +# Other stuff + +## Toothbrush + +The [Philips Sonicare][sonicare] (note: link goes to a newer generation than +mine) is excellent and well worth its money. + +I've had it for a few years and whereas I occasionally had minor teeth issues +before, they seem to be mostly gone now. According to my dentist the state of my +teeth is now usually pretty good and I draw a direct correlation back to this +thing. + +The newer generations come with flashy features like apps and probably more +LEDs, but I suspect that those can just be ignored. + +[sonicare]: https://www.philips.co.uk/c-m-pe/electric-toothbrushes + +## Shavers + +The [Philipps SensoTouch 3D][sensotouch] is excellent. Super-comfortable close +face shave in no time and leaves absolutely no mess around, as far as I can +tell! I've had this for ~5 years and it's not showing any signs of aging yet. + +Another bonus is that its battery time is effectively infinite. I've never had +to worry when bringing it on a longer trip! + +[sensotouch]: https://www.philips.co.uk/c-p/1250X_40/norelco-sensotouch-3d-wet-and-dry-electric-razor-with-precision-trimmer + +## Shoulder bag + +When I moved to London I wanted to stop using backpacks most of the time, as +those are just annoying to deal with when commuting on the tube. + +To work around this I wanted a good shoulder bag with a vertical format (to save +space), but it turned out that there's very few of those around that reach any +kind of quality standard. + +The one I settled on is the [Waterfield Muzetto][muzetto] leather bag. It's one +of those things that comes with a bit of a price tag attached, but it's well +worth it! + +[muzetto]: https://www.sfbags.com/collections/shoulder-messenger-bags/products/muzetto-leather-bag + +## Wallet + +My wallet is the [Bellroy Slim Sleeve][slim-sleeve]. I don't carry cash unless +I'm attending an event in Germany and this wallet fits that lifestyle perfectly. + +It's near indestructible, looks great, is very slim and fits a ton of cards, +business cards, receipts and whatever else you want to be lugging around with +you! + +[slim-sleeve]: https://bellroy.com/products/slim-sleeve-wallet/default/charcoal diff --git a/users/tazjin/blog/posts/emacs-is-underrated.md b/users/tazjin/blog/posts/emacs-is-underrated.md new file mode 100644 index 000000000000..afb8dc889e53 --- /dev/null +++ b/users/tazjin/blog/posts/emacs-is-underrated.md @@ -0,0 +1,233 @@ +TIP: Hello, and thanks for offering to review my draft! This post +intends to convey to people what the point of Emacs is. Not to convert +them to use it, but at least with opening their minds to the +possibility that it might contain valuable things. I don't know if I'm +on track in the right direction, and your input will help me figure it +out. Thanks! + +TODO(tazjin): Restructure sections: Intro -> Introspectability (and +story) -> text-based UIs (which lead to fluidity, muscle memory across +programs and "translatability" of workflows) -> Outro. It needs more +flow! + +TODO(tazjin): Highlight more that it's not about editing: People can +derive useful things from Emacs by just using magit/org/notmuch/etc.! + +TODO(tazjin): Note that there's value in trying Emacs even if people +don't end up using it, similar to how learning languages like Lisp or +Haskell helps grow as a programmer even without using them day-to-day. + +*Real post starts below!* + +--------- + +There are two kinds of people: Those who use Emacs, and those who +think it is a text editor. This post is aimed at those in the second +category. + +Emacs is the most critical piece of software I run. My [Emacs +configuration][emacs-config] has steadily evolved for almost a decade. +Emacs is my window manager, mail client, terminal, git client, +information management system and - perhaps unsurprisingly - text +editor. + +Before going into why I chose to invest so much into this program, +follow me along on a little thought experiment: + +---------- + +Lets say you use a proprietary spreadsheet program. You find that +there are features in it that *almost, but not quite* do what you +want. + +What can you do? You can file a feature request to the company that +makes it and hope they listen, but for the likes of Apple and +Microsoft chances are they won't and there is nothing you can do. + +Let's say you are also running an open-source program for image +manipulation. You again find that some of its features are subtly +different from what you would want them to do. + +Things look a bit different this time - after all, the program is +open-source! You can go and fetch its source code, figure out its +internal structure and wrangle various layers of code into submission +until you find the piece that implements the functionality you want to +change. If you know the language it is written in; you can modify the +feature. + +Now all that's left is figuring out its build system[^1], building and +installing it and moving over to the new version. + +Realistically you are not going to do this much in the real world. The +friction to contributing to projects, especially complex ones, is +often quite high. For minor inconveniences, you might often find +yourself just shrugging and working around them. + +What if it didn't have to be this way? + +------------- + +One of the core properties of Emacs is that it is *introspective* and +*self-documenting*. + +For example: A few years ago, I had just switched over to using +[EXWM][], the Emacs X Window Manager. To launch applications I was +using an Emacs program called Helm that let me select installed +programs interactively and press <kbd>RET</kbd> to execute them. + +This was very useful - until I discovered that if I tried to open a +second terminal window, it would display an error: + + Error: urxvt is already running + +Had this been dmenu, I might have had to go through the whole process +described above to fix the issue. But it wasn't dmenu - it was an +Emacs program, and I did the following things: + +1. I pressed <kbd>C-h k</kbd>[^2] (which means "please tell me what + the following key does"), followed by <kbd>s-d</kbd> (which was my + keybinding for launching programs). + +2. Emacs displayed a new buffer saying, roughly: + + ``` + s-d runs the command helm-run-external-command (found in global-map), + which is an interactive autoloaded compiled Lisp function in + ‘.../helm-external.el’. + + It is bound to s-d. + ``` + + I clicked on the filename. + +3. Emacs opened the file and jumped to the definition of + `helm-run-external-command`. After a few seconds of reading through + the code, I found this snippet: + + ```lisp + (if (get-process proc) + (if helm-raise-command + (shell-command (format helm-raise-command real-com)) + (error "Error: %s is already running" real-com)) + ;; ... the actual code to launch programs followed below ... + ) + ``` + +4. I deleted the outer if-expression which implemented the behaviour I + didn't want, pressed <kbd>C-M-x</kbd> to reload the code and saved + the file. + +The whole process took maybe a minute, and the problem was now gone. + +Emacs isn't just "open-source", it actively encourages the user to +modify it, discover what to modify and experiment while it is running. + +In some sense it is like the experience of the old Lisp machines, a +paradigm that we have completely forgotten. + +--------------- + +Circling back to my opening statement: If Emacs is not a text editor, +then what *is* it? + +The Emacs website says this: + +> [Emacs] is an interpreter for Emacs Lisp, a dialect of the Lisp +> programming language with extensions to support text editing + +The core of Emacs implements the language and the functionality needed +to evaluate and run it, as well as various primitives for user +interface construction such as buffers, windows and frames. + +Every other feature of Emacs is implemented *in Emacs Lisp*. + +The Emacs distribution ships with rudimentary text editing +functionality (and some language-specific support for the most popular +languages), but it also brings with it two IRC clients, a Tetris +implementation, a text-mode web browser, [org-mode][] and many other +tools. + +Outside of the core distribution there is a myriad of available +programs for Emacs: [magit][] (the famous git porcelain), text-based +[HTTP clients][], even interactive [Kubernetes frontends][k8s]. + +What all of these tools have in common is that they use text-based +user interfaces (UI elements like images are used only sparingly in +Emacs), and that they can be introspected and composed like everything +else in Emacs. + +If magit does not expose a git flag I need, it's trivial to add. If I +want a keybinding to jump from a buffer showing me a Kubernetes pod to +a magit buffer for the source code of the container, it only takes a +few lines of Emacs Lisp to implement. + +As proficiency with Emacs Lisp ramps up, the environment becomes +malleable like clay and evolves along with the user's taste and needs. +Muscle memory learned for one program translates seamlessly to others, +and the overall effect is an improvement in *workflow fluidity* that +is difficult to overstate. + +Also, workflows based on Emacs are *stable*. Moving my window +management to Emacs has meant that I'm not subject to the whim of some +third-party developer changing my window layouting features (as they +often do on MacOS). + +To illustrate this: Emacs has development history back to the 1970s, +continuous git history that survived multiple VCS migrations [since +1985][first-commit] (that's 22 years before git itself was released!) +and there is code[^3] implementing interactive functionality that has +survived unmodified in Emacs *since then*. + +--------------- + +Now, what is the point of this post? + +I decided to write this after a recent [tweet][] by @IanColdwater (in +the context of todo-management apps): + +> The fact that it's 2020 and the most viable answer to this appears +> to be Emacs might be the saddest thing I've ever heard + +What bothers me is that people see this as *sad*. Emacs being around +for this long and still being unparalleled for many of the UX +paradigms implemented by its programs is, in my book, incredible - and +not sad. + +How many other paradigms have survived this long? How many other tools +still have fervent followers, amazing [developer tooling][] and a +[vibrant ecosystem][] at this age? + +Steve Yegge [said it best][babel][^5]: Emacs has the Quality Without a +Name. + +What I wish you, the reader, should take away from this post is the +following: + +TODO(tazjin): Figure out what people should take away from this post. +I need to sleep on it. It's something about not dismissing tools just +because of their age, urging them to explore paradigms that might seem +unfamiliar and so on. Ideas welcome. + +--------------- + +[^1]: Wouldn't it be a joy if every project just used Nix? I digress ... +[^2]: These are keyboard shortcuts written in [Emacs Key Notation][ekn]. +[^3]: For example, [functionality for online memes][studly] that + wouldn't be invented for decades to come! +[^4]: ... and some things wrong, but that is an issue for a separate post! +[^5]: And I really *do* urge you to read that post's section on Emacs. + +[emacs-config]: https://git.tazj.in/tree/tools/emacs +[EXWM]: https://github.com/ch11ng/exwm +[helm]: https://github.com/emacs-helm/helm +[ekn]: https://www.gnu.org/software/emacs/manual/html_node/efaq/Basic-keys.html +[org-mode]: https://orgmode.org/ +[magit]: https://magit.vc +[HTTP clients]: https://github.com/pashky/restclient.el +[k8s]: https://github.com/jypma/kubectl +[first-commit]: http://git.savannah.gnu.org/cgit/emacs.git/commit/?id=ce5584125c44a1a2fbb46e810459c50b227a95e2 +[studly]: http://git.savannah.gnu.org/cgit/emacs.git/commit/?id=47bdd84a0a9d20aab934482a64b84d0db63e7532 +[tweet]: https://twitter.com/IanColdwater/status/1220824466525229056 +[developer tooling]: https://github.com/alphapapa/emacs-package-dev-handbook +[vibrant ecosystem]: https://github.com/emacs-tw/awesome-emacs +[babel]: https://sites.google.com/site/steveyegge2/tour-de-babel#TOC-Lisp diff --git a/users/tazjin/blog/posts/make-object-t-again.md b/users/tazjin/blog/posts/make-object-t-again.md new file mode 100644 index 000000000000..420b57c0fde9 --- /dev/null +++ b/users/tazjin/blog/posts/make-object-t-again.md @@ -0,0 +1,98 @@ +A few minutes ago I found myself debugging a strange Java issue related +to Jackson, one of the most common Java JSON serialization libraries. + +The gist of the issue was that a short wrapper using some types from +[Javaslang](http://www.javaslang.io/) was causing unexpected problems: + +```java +public <T> Try<T> readValue(String json, TypeReference type) { + return Try.of(() -> objectMapper.readValue(json, type)); +} +``` + +The signature of this function was based on the original Jackson +`readValue` type signature: + +```java +public <T> T readValue(String content, TypeReference valueTypeRef) +``` + +While happily using my wrapper function I suddenly got an unexpected +error telling me that `Object` is incompatible with the type I was +asking Jackson to de-serialize, which got me to re-evaluate the above +type signature again. + +Lets look for a second at some code that will *happily compile* if you +are using Jackson\'s own `readValue`: + +```java +// This shouldn't compile! +Long l = objectMapper.readValue("\"foo\"", new TypeReference<String>(){}); +``` + +As you can see there we ask Jackson to decode the JSON into a `String` +as enclosed in the `TypeReference`, but assign the result to a `Long`. +And it compiles. And it failes at runtime with +`java.lang.ClassCastException: java.lang.String cannot be cast to java.lang.Long`. +Huh? + +Looking at the Jackson `readValue` implementation it becomes clear +what\'s going on here: + +```java +@SuppressWarnings({ "unchecked", "rawtypes" }) +public <T> T readValue(String content, TypeReference valueTypeRef) + throws IOException, JsonParseException, JsonMappingException +{ + return (T) _readMapAndClose(/* whatever */); +} +``` + +The function is parameterised over the type `T`, however the only place +where `T` occurs in the signature is in the parameter declaration and +the function return type. Java will happily let you use generic +functions and types without specifying type parameters: + +```java +// Compiles fine! +final List myList = List.of(1,2,3); + +// Type is now myList : List<Object> +``` + +Meaning that those parameters default to `Object`. Now in the code above +Jackson also explicitly casts the return value of its inner function +call to `T`. + +What ends up happening is that Java infers the expected return type from +the context of the `readValue` and then happily uses the unchecked cast +to fit that return type. If the type hints of the context aren\'t strong +enough we simply get `Object` back. + +So what\'s the fix for this? It\'s quite simple: + +```java +public <T> T readValue(String content, TypeReference<T> valueTypeRef) +``` + +By also making the parameter appear in the `TypeReference` we \"bind\" +`T` to the type enclosed in the type reference. The cast can then also +safely be removed. + +The cherries on top of this are: + +1. `@SuppressWarnings({ "rawtypes" })` explicitly disables a + warning that would\'ve caught this + +2. the `readValue` implementation using the less powerful `Class` + class to carry the type parameter does this correctly: `public <T> + T readValue(String content, Class<T> valueType)` + +The big question I have about this is *why* does Jackson do it this way? +Obviously the warning did not just appear there by chance, so somebody +must have thought about this? + +If anyone knows what the reason is, I\'d be happy to hear from you. + +PS: Shoutout to David & Lucia for helping me not lose my sanity over +this. diff --git a/users/tazjin/blog/posts/nixery-layers.md b/users/tazjin/blog/posts/nixery-layers.md new file mode 100644 index 000000000000..3f25ceadce7b --- /dev/null +++ b/users/tazjin/blog/posts/nixery-layers.md @@ -0,0 +1,272 @@ +TIP: This blog post was originally published as a design document for +[Nixery][] and is not written in the same style +as other blog posts. + +Thanks to my colleagues at Google and various people from the Nix community for +reviewing this. + +------ + +# Nixery: Improved Layering + +**Authors**: tazjin@ + +**Reviewers**: so...@, en...@, pe...@ + +**Status**: Implemented + +**Last Updated**: 2019-08-10 + +## Introduction + +This document describes a design for an improved image layering method for use +in Nixery. The algorithm [currently used][grhmc] is designed for a slightly +different use-case and we can improve upon it by making use of more of the +available data. + +## Background / Motivation + +Nixery is a service that uses the [Nix package manager][nix] to build container +images (for runtimes such as Docker), that are served on-demand via the +container [registry protocols][]. A demo instance is available at +[nixery.dev][]. + +In practice this means users can simply issue a command such as `docker pull +nixery.dev/shell/git` and receive an image that was built ad-hoc containing a +shell environment and git. + +One of the major advantages of building container images via Nix (as described +for `buildLayeredImage` in [this blog post][grhmc]) is that the +content-addressable nature of container image layers can be used to provide more +efficient caching characteristics (caching based on layer content) than what is +common with Dockerfiles and other image creation methods (caching based on layer +creation method). + +However, this is constrained by the maximum number of layers supported in an +image (125). A naive approach such as putting each included package (any +library, binary, etc.) in its own layer quickly runs into this limitation due to +the large number of dependencies more complex systems tend to have. In addition, +users wanting to extend images created by Nixery (e.g. via `FROM nixery.dev/…`) +share this layer maximum with the created image - limiting extensibility if all +layers are used up by Nixery. + +In theory the layering strategy of `buildLayeredImage` should already provide +good caching characteristics, but in practice we are seeing many images with +significantly more packages than the number of layers configured, leading to +more frequent cache-misses than desired. + +The current implementation of `buildLayeredImage` inspects a graph of image +dependencies and determines the total number of references (direct & indirect) +to any node in the graph. It then sorts all dependencies by this popularity +metric and puts the first `n - 2` (for `n` being the maximum number of layers) +packages in their own layers, all remaining packages in one layer and the image +configuration in the final layer. + +## Design / Proposal + +## (Close-to) ideal layer-layout using more data + +We start out by considering what a close to ideal layout of layers would look +like for a simple use-case. + +![Ideal layout](/static/img/nixery/ideal_layout.webp) + +In this example, counting the total number of references to each node in the +graph yields the following result: + +| pkg | refs | +|-------|------| +| E | 3 | +| D | 2 | +| F | 2 | +| A,B,C | 1 | + +Assuming we are constrained to 4 layers, the current algorithm would yield these layers: + +``` +L1: E +L2: D +L3: F +L4: A, B, C +``` + +The initial proposal for this design is that additional data should be +considered in addition to the total number of references, in particular a +distinction should be made between direct and indirect references. Packages that +are only referenced indirectly should be merged with their parents. + +This yields the following table: + +| pkg | direct | indirect | +|-------|--------|----------| +| E | 3 | 3 | +| D | 2 | 2 | +| F | *1* | 2 | +| A,B,C | 1 | 1 | + +Despite having two indirect references, F is in fact only being referred to +once. Assuming that we have no other data available outside of this graph, we +have no reason to assume that F has any popularity outside of the scope of D. +This might yield the following layers: + +``` +L1: E +L2: D, F +L3: A +L4: B, C +``` + +D and F were grouped, while the top-level references (i.e. the packages +explicitly requested by the user) were split up. + +An assumption is introduced here to justify this split: The top-level packages +is what the user is modifying directly, and those groupings are likely +unpredictable. Thus it is opportune to not group top-level packages in the same +layer. + +This raises a new question: Can we make better decisions about where to split +the top-level? + +## (Even closer to) ideal layering using (even) more data + +So far when deciding layer layouts, only information immediately available in +the build graph of the image has been considered. We do however have much more +information available, as we have both the entire nixpkgs-tree and potentially +other information (such as download statistics). + +We can calculate the total number of references to any derivation in nixpkgs and +use that to rank the popularity of each package. Packages within some percentile +can then be singled out as good candidates for a separate layer. + +When faced with a splitting decision such as in the last section, this data can +aid the decision. Assume for example that package B in the above is actually +`openssl`, which is a very popular package. Taking this into account would +instead yield the following layers: + +``` +L1: E, +L2: D, F +L3: B, +L4: A, C +``` + +## Layer budgets and download size considerations + +As described in the introduction, there is a finite amount of layers available +for each image (the “layer budget”). When calculating the layer distribution, we +might end up with the “ideal” list of layers that we would like to create. Using +our previous example: + +``` +L1: E, +L2: D, F +L3: A +L4: B +L5: C +``` + +If we only have a layer budget of 4 available, something needs to be merged into +the same layer. To make a decision here we could consider only the package +popularity, but there is in fact another piece of information that has not come +up yet: The actual size of the package. + +Presumably a user would not mind downloading a library that is a few kilobytes +in size repeatedly, but they would if it was a 200 megabyte binary instead. + +Conversely if a large binary was successfully cached, but an extremely popular +small library is not, the total download size might also grow to irritating +levels. + +To avoid this we can calculate a merge rating: + + merge_rating(pkg) = popularity_percentile(pkg) × size(pkg.subtree) + +Packages with a low merge rating would be merged together before packages with +higher merge ratings. + +## Implementation + +There are two primary components of the implementation: + +1. The layering component which, given an image specification, decides the image + layers. + +2. The popularity component which, given the entire nixpkgs-tree, calculates the + popularity of packages. + +## Layering component + +It turns out that graph theory’s concept of [dominator trees][] maps reasonably +well onto the proposed idea of separating direct and indirect dependencies. This +becomes visible when creating the dominator tree of a simple example: + +![Example without extra edges](/static/img/nixery/example_plain.webp) + +Before calculating the dominator tree, we inspect each node and insert extra +edges from the root for packages that match a certain popularity or size +threshold. In this example, G is popular and an extra edge is inserted: + +![Example with extra edges](/static/img/nixery/example_extra.webp) + +Calculating the dominator tree of this graph now yields our ideal layer +distribution: + +![Dominator tree of example](/static/img/nixery/dominator.webp) + +The nodes immediately dominated by the root node can now be “harvested” as image +layers, and merging can be performed as described above until the result fits +into the layer budget. + +To implement this, the layering component uses the [gonum/graph][] library which +supports calculating dominator trees. The program is fed with Nix’s +`exportReferencesGraph` (which contains the runtime dependency graph and runtime +closure size) as well as the popularity data and layer budget. It returns a list +of layers, each specifying the paths it should contain. + +Nix invokes this program and uses the output to create a derivation for each +layer, which is then built and returned to Nixery as usual. + +TIP: This is implemented in [`layers.go`][layers.go] in Nixery. The file starts +with an explanatory comment that talks through the process in detail. + +## Popularity component + +The primary issue in calculating the popularity of each package in the tree is +that we are interested in the runtime dependencies of a derivation, not its +build dependencies. + +To access information about the runtime dependency, the derivation actually +needs to be built by Nix - it can not be inferred because Nix does not know +which store paths will still be referenced by the build output. + +However for packages that are cached in the NixOS cache, we can simply inspect +the `narinfo`-files and use those to determine popularity. + +Not every package in nixpkgs is cached, but we can expect all *popular* packages +to be cached. Relying on the cache should therefore be reasonable and avoids us +having to rebuild/download all packages. + +The implementation will read the `narinfo` for each store path in the cache at a +given commit and create a JSON-file containing the total reference count per +package. + +For the public Nixery instance, these popularity files will be distributed via a +GCS bucket. + +TIP: This is implemented in [popcount][] in Nixery. + +-------- + +Hopefully this detailed design review was useful to you. You can also watch [my +NixCon talk][talk] about Nixery for a review of some of this, and some demos. + +[Nixery]: https://github.com/google/nixery +[grhmc]: https://grahamc.com/blog/nix-and-layered-docker-images +[Nix]: https://nixos.org/nix +[registry protocols]: https://github.com/opencontainers/distribution-spec/blob/master/spec.md +[nixery.dev]: https://nixery.dev +[dominator trees]: https://en.wikipedia.org/wiki/Dominator_(graph_theory) +[gonum/graph]: https://godoc.org/gonum.org/v1/gonum/graph +[layers.go]: https://github.com/google/nixery/blob/master/builder/layers.go +[popcount]: https://github.com/google/nixery/tree/master/popcount +[talk]: https://www.youtube.com/watch?v=pOI9H4oeXqA diff --git a/users/tazjin/blog/posts/nsa-zettabytes.md b/users/tazjin/blog/posts/nsa-zettabytes.md new file mode 100644 index 000000000000..f8b326f2fb42 --- /dev/null +++ b/users/tazjin/blog/posts/nsa-zettabytes.md @@ -0,0 +1,93 @@ +I've been reading a few discussions on Reddit about the new NSA data +centre that is being built and stumbled upon [this +post](http://www.reddit.com/r/restorethefourth/comments/1jf6cx/the_guardian_releases_another_leaked_document_nsa/cbe5hnc), +putting its alleged storage capacity at *5 zettabytes*. + +That seems to be a bit much which I tried to explain to that guy, but I +was quickly blocked by the common conspiracy argument that government +technology is somehow far beyond the wildest dreams of us mere mortals - +thus I wrote a very long reply that will most likely never be seen by +anybody. Therefore I've decided to repost it here. + +------------------------------------------------------------------------ + +I feel like I've entered /r/conspiracy. Please have some facts (and do +read them!) + +A one terabyte SSD (I assume that\'s what you meant by flash-drive) +would require 5000000000 of those. That is *five billion* of those flash +drives. Can you visualise how much five billion flash-drives are? + +A single SSD is roughly 2cm\*13cm\*13cm with an approximate weight of +80g. That would make 400 000 metric tons of SSDs, a weight equivalent to +*over one thousand Boeing 747 airplanes*. Even if we assume that they +solder the flash chips directly onto some kind of controller (which also +weighs something), the raw material for that would be completely insane. + +Another visualization: If you stacked 5 billion SSDs on top of each +other you would get an SSD tower that is a hundred thousand kilometres +high, that is equivalent to 2,5 x the equatorial circumference of +*Earth* or 62000 miles. + +The volume of those SSDs would be clocking in at 1690000000 cubic +metres, more than the Empire State building. Are you still with me? + +Lets speak cost. The Samsung SSD that I assume you are referring to will +clock in at \$600, lets assume that the NSA gets a discount when buying +*five billion* of those and gets them at the cheap price of \$250. That +makes 1.25 trillion dollars. That would be a significant chunk of the +current US national debt. + +And all of this is just SSDs to stick into servers and storage units, +which need a whole bunch of other equipment as well to support them - +the cost would probably shoot up to something like 8 trillion dollars if +they were to build this. It would with very high certainty be more than +the annual production of SSDs (I can\'t find numbers on that +unfortunately) and take up *slightly* more space than they have in the +Utah data centre (assuming you\'re not going to tell me that it is in +fact attached to an underground base that goes down to the core of the +Earth). + +Lets look at the \"But the government has better technologies!\" idea. + +Putting aside the fact that the military *most likely* does not have a +secret base on Mars that deals with advanced science that the rest of us +can only dream of, and doing this under the assumption that they do have +this base, lets assume that they build a storage chip that stores 100TB. +This reduces the amount of needed chips to \"just\" 50 million, lets say +they get 10 of those into a server / some kind of specialized storage +unit and we only need 5 million of those specially engineered servers, +with custom connectors, software, chips, storage, most likely also power +sources and whatever - 10 million completely custom units built with +technology that is not available to the market. Google is estimated to +have about a million servers in total, I don\'t know exactly in how many +data centres those are placed but numbers I heard recently said that +it\'s about 40. When Apple assembles a new iPhone model they need +massive factories with thousands of workers and supplies from many +different countries, over several months, to assemble just a few million +units for their launch month. + +You are seriously proposing that the NSA is better than Google and Apple +and the rest of the tech industry, world-wide, combined at designing +*everything* in tech, manufacturing *everything* in tech, without *any* +information about that leaking and without *any* of the science behind +it being known? That\'s not just insane, that\'s outright impossible. + +And we haven\'t even touched upon how they would route the necessary +amounts of bandwidth (crazy insane) to save *the entire internet* into +that data center. + +------------------------------------------------------------------------ + +I\'m not saying that the NSA is not building a data center to store +surveillance information, to have more capacity to spy on people and all +that - I\'m merely making the point that the extent in which conspiracy +sites say they do this vastly overestimates their actual abilities. They +don\'t have magic available to them! Instead of making up insane figures +like that you should focus on what we actually know about their +operations, because using those figures in a debate with somebody who is +responsible for this (and knows what they\'re talking about) will end +with you being destroyed - nobody will listen to the rest of what +you\'re saying when that happens. + +\"Stick to the facts\" is valid for our side as well. diff --git a/users/tazjin/blog/posts/reversing-watchguard-vpn.md b/users/tazjin/blog/posts/reversing-watchguard-vpn.md new file mode 100644 index 000000000000..f1b779d8d993 --- /dev/null +++ b/users/tazjin/blog/posts/reversing-watchguard-vpn.md @@ -0,0 +1,158 @@ +TIP: WatchGuard has +[responded](https://www.reddit.com/r/netsec/comments/5tg0f9/reverseengineering_watchguard_mobile_vpn/dds6knx/) +to this post on Reddit. If you haven\'t read the post yet I\'d recommend +doing that first before reading the response to have the proper context. + +------------------------------------------------------------------------ + +One of my current client makes use of +[WatchGuard](http://www.watchguard.com/help/docs/fireware/11/en-US/Content/en-US/mvpn/ssl/mvpn_ssl_client-install_c.html) +Mobile VPN software to provide access to the internal network. + +Currently WatchGuard only provides clients for OS X and Windows, neither +of which I am very fond of. In addition an OpenVPN configuration file is +provided, but it quickly turned out that this was only a piece of the +puzzle. + +The problem is that this VPN setup is secured using 2-factor +authentication (good!), but it does not use OpenVPN\'s default +[challenge/response](https://openvpn.net/index.php/open-source/documentation/miscellaneous/79-management-interface.html) +functionality to negotiate the credentials. + +Connecting with the OpenVPN config that the website supplied caused the +VPN server to send me a token to my phone, but I simply couldn\'t figure +out how to supply it back to the server. In a normal challenge/response +setting the token would be supplied as the password on the second +authentication round, but the VPN server kept rejecting that. + +Other possibilities were various combinations of username&password +(I\'ve seen a lot of those around) so I tried a whole bunch, for example +`$password:$token` or even a `sha1(password, token)` - to no avail. + +At this point it was time to crank out +[Hopper](https://www.hopperapp.com/) and see what\'s actually going on +in the official OS X client - which uses OpenVPN under the hood! + +Diving into the client +---------------------- + +The first surprise came up right after opening the executable: It had +debug symbols in it - and was written in Objective-C! + +![Debug symbols](/static/img/watchblob_1.webp) + +A good first step when looking at an application binary is going through +the strings that are included in it, and the WatchGuard client had a lot +to offer. Among the most interesting were a bunch of URIs that looked +important: + +![Some URIs](/static/img/watchblob_2.webp) + +I started with the first one + + %@?action=sslvpn_download&filename=%@&fw_password=%@&fw_username=%@ + +and just curled it on the VPN host, replacing the username and +password fields with bogus data and the filename field with +`client.wgssl` - another string in the executable that looked like a +filename. + +To my surprise this endpoint immediately responded with a GZIPed file +containing the OpenVPN config, CA certificate, and the client +*certificate and key*, which I previously thought was only accessible +after logging in to the web UI - oh well. + +The next endpoint I tried ended up being a bit more interesting still: + + /?action=sslvpn_logon&fw_username=%@&fw_password=%@&style=fw_logon_progress.xsl&fw_logon_type=logon&fw_domain=Firebox-DB + +Inserting the correct username and password into the query parameters +actually triggered the process that sent a token to my phone. The +response was a simple XML blob: + +```xml +<?xml version="1.0" encoding="UTF-8"?> +<resp> + <action>sslvpn_logon</action> + <logon_status>4</logon_status> + <auth-domain-list> + <auth-domain> + <name>RADIUS</name> + </auth-domain> + </auth-domain-list> + <logon_id>441</logon_id> + <chaStr>Enter Your 6 Digit Passcode </chaStr> +</resp> +``` + +Somewhat unsurprisingly that `chaStr` field is actually the challenge +string displayed in the client when logging in. + +This was obviously going in the right direction so I proceeded to the +procedures making use of this string. The first step was a relatively +uninteresting function called `-[VPNController sslvpnLogon]` which +formatted the URL, opened it and checked whether the `logon_status` was +`4` before proceeding with the `logon_id` and `chaStr` contained in the +response. + +*(Code snippets from here on are Hopper\'s pseudo-Objective-C)* + +![sslvpnLogon](/static/img/watchblob_3.webp) + +It proceeded to the function `-[VPNController processTokenPrompt]` which +showed the dialog window into which the user enters the token, sent it +off to the next URL and checked the `logon_status` again: + +(`r12` is the reference to the `VPNController` instance, i.e. `self`). + +![processTokenPrompt](/static/img/watchblob_4.webp) + +If the `logon_status` was `1` (apparently \"success\" here) it proceeded +to do something quite interesting: + +![processTokenPrompt2](/static/img/watchblob_5.webp) + +The user\'s password was overwritten with the (verified) OTP token - +before OpenVPN had even been started! + +Reading a bit more of the code in the subsequent +`-[VPNController doLogin]` method revealed that it shelled out to +`openvpn` and enabled the management socket, which makes it possible to +remotely control an `openvpn` process by sending it commands over TCP. + +It then simply sent the username and the OTP token as the credentials +after configuring OpenVPN with the correct config file: + +![doLogin](/static/img/watchblob_6.webp) + +... and the OpenVPN connection then succeeds. + +TL;DR +----- + +Rather than using OpenVPN\'s built-in challenge/response mechanism, the +WatchGuard client validates user credentials *outside* of the VPN +connection protocol and then passes on the OTP token, which seems to be +temporarily in a \'blessed\' state after verification, as the user\'s +password. + +I didn\'t check to see how much verification of this token is performed +(does it check the source IP against the IP that performed the challenge +validation?), but this certainly seems like a bit of a security issue - +considering that an attacker on the same network would, if they time the +attack right, only need your username and 6-digit OTP token to +authenticate. + +Don\'t roll your own security, folks! + +Bonus +----- + +The whole reason why I set out to do this is so I could connect to this +VPN from Linux, so this blog post wouldn\'t be complete without a +solution for that. + +To make this process really easy I\'ve written a [little +tool](https://github.com/tazjin/watchblob) that performs the steps +mentioned above from the CLI and lets users know when they can +authenticate using their OTP token. diff --git a/users/tazjin/blog/posts/sick-in-sweden.md b/users/tazjin/blog/posts/sick-in-sweden.md new file mode 100644 index 000000000000..0c43c5832d73 --- /dev/null +++ b/users/tazjin/blog/posts/sick-in-sweden.md @@ -0,0 +1,26 @@ +I\'ve been sick more in the two years in Sweden than in the ten years +before that. + +Why? I have a theory about it and after briefly discussing it with one +of my roommates (who is experiencing the same thing) I\'d like to share +it with you: + +Normally when people get sick, are coughing, have a fever and so on they +take a few days off from work and stay at home. The reasons are twofold: +You want to rest a bit in order to get rid of the disease and you want +to *avoid infecting your co-workers*. + +In Sweden people will drag themselves into work anyways, because of a +concept called the +[karensdag](https://www.forsakringskassan.se/wps/portal/sjukvard/sjukskrivning_och_sjukpenning/karensdag_och_forstadagsintyg). +The TL;DR of this is \'if you take days off sick you won\'t get paid for +the first day, and only 80% of your salary on the remaining days\'. + +Many people are not willing to take that financial hit. In combination +with Sweden\'s rather mediocre healthcare system you end up constantly +being surrounded by sick people, not just in your own office but also on +public transport and basically all other public places. + +Oh and the best thing about this? Swedish politicians [often ignore +this](https://www.aftonbladet.se/nyheter/article10506886.ab) rule and +just don\'t report their sick days. Nice. diff --git a/users/tazjin/blog/posts/the-smu-problem.md b/users/tazjin/blog/posts/the-smu-problem.md new file mode 100644 index 000000000000..f411e3116046 --- /dev/null +++ b/users/tazjin/blog/posts/the-smu-problem.md @@ -0,0 +1,151 @@ +After having tested countless messaging apps over the years, being +unsatisfied with most of them and finally getting stuck with +[Telegram](https://telegram.org/) I have developed a little theory about +messaging apps. + +SMU stands for *Security*, *Multi-Device* and *Usability*. Quite like +the [CAP-theorem](https://en.wikipedia.org/wiki/CAP_theorem) I believe +that you can - using current models - only solve two out of three things +on this list. Let me elaborate what I mean by the individual points: + +**Security**: This is mainly about encryption of messages, not so much +about hiding identities to third-parties. Commonly some kind of +asymmetric encryption scheme. Verification of keys used must be possible +for the user. + +**Multi-Device**: Messaging-app clients for multiple devices, with +devices being linked to the same identifier, receiving the same messages +and being independent of each other. A nice bonus is also an open +protocol (like Telegram\'s) that would let people write new clients. + +**Usability**: Usability is a bit of a broad term, but what I mean by it +here is handling contacts and identities. It should be easy to create +accounts, give contact information to people and have everything just +work in a somewhat automated fashion. + +Some categorisation of popular messaging apps: + +**SU**: Threema + +**MU**: Telegram, Google Hangouts, iMessage, Facebook Messenger + +**SM**: +[Signal](https://gist.github.com/TheBlueMatt/d2fcfb78d29faca117f5) + +*Side note: The most popular messaging app - WhatsApp - only scores a +single letter (U). This makes it completely uninteresting to me.* + +Let\'s talk about **SM** - which might contain the key to solving SMU. +Two approaches are interesting here. + +The single key model +-------------------- + +In Signal there is a single identity key which can be used to register a +device on the server. There exists a process for sharing this identity +key from a primary device to a secondary one, so that the secondary +device can register itself (see the link above for a description). + +This *almost* breaks M because there is still a dependence on a primary +device and newly onboarded devices can not be used to onboard further +devices. However, for lack of a better SM example I\'ll give it a pass. + +The other thing it obviously breaks is U as the process for setting it +up is annoying and having to rely on the primary device is a SPOF (there +might be a way to recover from a lost primary device, but I didn\'t find +any information so far). + +The multiple key model +---------------------- + +In iMessage every device that a user logs into creates a new key pair +and submits its public key to a per-account key pool. Senders fetch all +available public keys for a recipient and encrypt to all of the keys. + +Devices that join can catch up on history by receiving it from other +devices that use its public key. + +This *almost* solves all of SMU, but its compliance with S breaks due to +the fact that the key pool is not auditable, and controlled by a +third-party (Apple). How can you verify that they don\'t go and add +another key to your pool? + +A possible solution +------------------- + +Out of these two approaches I believe the multiple key one looks more +promising. If there was a third-party handling the key pool but in a way +that is verifiable, transparent and auditable that model could be used +to solve SMU. + +The technology I have been thinking about for this is some kind of +blockchain model and here\'s how I think it could work: + +1. Bob installs the app and begins onboarding. The first device + generates its keypair, submits the public key and an account + creation request. + +2. Bob\'s account is created on the messaging apps\' servers and a + unique identifier plus the fingerprint of the first device\'s public + key is written to the chain. + +3. Alice sends a message to Bob, her device asks the messaging service + for Bob\'s account\'s identity and public keys. Her device verifies + the public key fingerprint against the one in the blockchain before + encrypting to it and sending the message. + +4. Bob receives Alice\'s message on his first device. + +5. Bob logs in to his account on a second device. The device generates + a key pair and sends the public key to the service, the service + writes it to the blockchain using its identifier. + +6. The messaging service requests that Bob\'s first device signs the + second device\'s key and triggers a simple confirmation popup. + +7. Bob confirms the second device on his first device. It signs the key + and writes the signature to the chain. + +8. Alice sends another message, her device requests Bob\'s current keys + and receives the new key. It verifies that both the messaging + service and one of Bob\'s older devices have confirmed this key in + the chain. It encrypts the message to both keys and sends it on. + +9. Bob receives Alice\'s message on both devices. + +After this the second device can request conversation history from the +first one to synchronise old messages. + +Further devices added to an account can be confirmed by any of the +devices already in the account. + +The messaging service could not add new keys for an account on its own +because it does not control any of the private keys confirmed by the +chain. + +In case all devices were lost, the messaging service could associate the +account with a fresh identity in the block chain. Message history +synchronisation would of course be impossible. + +Feedback welcome +---------------- + +I would love to hear some input on this idea, especially if anyone knows +of an attempt to implement a similar model already. Possible attack +vectors would also be really interesting. + +Until something like this comes to fruition, I\'ll continue using +Telegram with GPG as the security layer when needed. + +**Update:** WhatsApp has launched an integration with the Signal guys +and added their protocol to the official WhatsApp app. This means +WhatsApp now firmly sits in the SU-category, but it still does not solve +this problem. + +**Update 2:** Facebook Messenger has also integrated with Signal, but +their secret chats do not support multi-device well (it is Signal +afterall). This means it scores either SU or MU depending on which mode +you use it in. + +An interesting service I have not yet evaluated properly is +[Matrix](http://matrix.org/). diff --git a/users/tazjin/cloud-dns/import b/users/tazjin/cloud-dns/import new file mode 100755 index 000000000000..e79e426b5553 --- /dev/null +++ b/users/tazjin/cloud-dns/import @@ -0,0 +1,11 @@ +#!/bin/sh +set -ue + +# Imports a zone file into a Google Cloud DNS zone of the same name +readonly ZONE="${1}" + +gcloud dns record-sets import "${ZONE}" \ + --project composite-watch-759 \ + --zone-file-format \ + --delete-all-existing \ + --zone "${ZONE}" diff --git a/users/tazjin/cloud-dns/kontemplate-works b/users/tazjin/cloud-dns/kontemplate-works new file mode 100644 index 000000000000..326a129d2105 --- /dev/null +++ b/users/tazjin/cloud-dns/kontemplate-works @@ -0,0 +1,15 @@ +;; -*- mode: zone; -*- +;; Do not delete these +kontemplate.works. 21600 IN NS ns-cloud-d1.googledomains.com. +kontemplate.works. 21600 IN NS ns-cloud-d2.googledomains.com. +kontemplate.works. 21600 IN NS ns-cloud-d3.googledomains.com. +kontemplate.works. 21600 IN NS ns-cloud-d4.googledomains.com. +kontemplate.works. 21600 IN SOA ns-cloud-d1.googledomains.com. cloud-dns-hostmaster.google.com. 4 21600 3600 259200 300 + +;; Github site setup +kontemplate.works. 60 IN A 185.199.108.153 +kontemplate.works. 60 IN A 185.199.109.153 +kontemplate.works. 60 IN A 185.199.110.153 +kontemplate.works. 60 IN A 185.199.111.153 + +www.kontemplate.works. 60 IN CNAME tazjin.github.io. diff --git a/users/tazjin/cloud-dns/oslo-pub b/users/tazjin/cloud-dns/oslo-pub new file mode 100644 index 000000000000..674687484b90 --- /dev/null +++ b/users/tazjin/cloud-dns/oslo-pub @@ -0,0 +1,8 @@ +;; Do not delete these +oslo.pub. 21600 IN NS ns-cloud-c1.googledomains.com. +oslo.pub. 21600 IN NS ns-cloud-c2.googledomains.com. +oslo.pub. 21600 IN NS ns-cloud-c3.googledomains.com. +oslo.pub. 21600 IN NS ns-cloud-c4.googledomains.com. +oslo.pub. 21600 IN SOA ns-cloud-c1.googledomains.com. cloud-dns-hostmaster.google.com. 4 21600 3600 1209600 300 + +oslo.pub. 60 IN A 46.21.106.241 diff --git a/users/tazjin/cloud-dns/root-tazj-in b/users/tazjin/cloud-dns/root-tazj-in new file mode 100644 index 000000000000..43db5834a0ca --- /dev/null +++ b/users/tazjin/cloud-dns/root-tazj-in @@ -0,0 +1,33 @@ +;; -*- mode: zone; -*- +;; Do not delete these +tazj.in. 21600 IN NS ns-cloud-a1.googledomains.com. +tazj.in. 21600 IN NS ns-cloud-a2.googledomains.com. +tazj.in. 21600 IN NS ns-cloud-a3.googledomains.com. +tazj.in. 21600 IN NS ns-cloud-a4.googledomains.com. +tazj.in. 21600 IN SOA ns-cloud-a1.googledomains.com. cloud-dns-hostmaster.google.com. 123 21600 3600 1209600 300 + +;; Email setup +tazj.in. 300 IN MX 1 aspmx.l.google.com. +tazj.in. 300 IN MX 5 alt1.aspmx.l.google.com. +tazj.in. 300 IN MX 5 alt2.aspmx.l.google.com. +tazj.in. 300 IN MX 10 alt3.aspmx.l.google.com. +tazj.in. 300 IN MX 10 alt4.aspmx.l.google.com. +tazj.in. 300 IN TXT "v=spf1 include:_spf.google.com ~all" +google._domainkey.tazj.in. 21600 IN TXT "v=DKIM1; k=rsa; p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA9AphX/WJf8zVXQB5Jk0Ry1MI6ARa6vEyAoJtpjpt9Nbm7XU4qVWFRJm+L0VFd5EZ5YDPJTIZ90lJE3/B8vae2ipnoGbJbj8LaVSzzIPMbWmhPhX3fkLJFdkv7xRDMDn730iYXRlfkgv6GsqbS8vZt7mzxx4mpnePTI323yjRVkwRW8nGVbsmB25ZoG1/0985" "kg4mSYxzWeJ2ozCPFhT4sfMtZMXe/4QEkJz/zkod29KZfFJmLgEaf73WLdBX8kdwbhuh2PYXt/PwzUrRzF5ujVCsSaTZwdRVPErcf+yo4NvedelTjjs8rFVfoJiaDD1q2bQ3w0gDEBWPdC2VP7k9zwIDAQAB" + +;; Site verifications +tazj.in. 3600 IN TXT "keybase-site-verification=gC4kzEmnLzY7F669PjN-pw2Cf__xHqcxQ08Gb-W9dhE" +tazj.in. 300 IN TXT "google-site-verification=d3_MI1OwD6q2OT42Vvh0I9w2u3Q5KFBu-PieNUE1Fig" +www.tazj.in. 3600 IN TXT "keybase-site-verification=ER8m_byyqAhzeIy9TyzkAU1H2p2yHtpvImuB_XrRF2U" + +;; Blog "storage engine" +blog.tazj.in. 21600 IN NS ns-cloud-c1.googledomains.com. +blog.tazj.in. 21600 IN NS ns-cloud-c2.googledomains.com. +blog.tazj.in. 21600 IN NS ns-cloud-c3.googledomains.com. +blog.tazj.in. 21600 IN NS ns-cloud-c4.googledomains.com. + +;; Webpage records setup +tazj.in. 300 IN A 34.98.120.189 +www.tazj.in. 300 IN A 34.98.120.189 +git.tazj.in. 300 IN A 34.98.120.189 +files.tazj.in. 300 IN CNAME c.storage.googleapis.com. diff --git a/users/tazjin/covid/us_mortality.jq b/users/tazjin/covid/us_mortality.jq new file mode 100644 index 000000000000..584be3ef9afe --- /dev/null +++ b/users/tazjin/covid/us_mortality.jq @@ -0,0 +1,36 @@ +# This turns the CDC mortality data[0] into a format useful for my +# excess mortality spreadsheet. The US format is by far the worst one +# I have dealt with, as expected. +# +# This requires miller for transforming the CSV appropriately. +# +# Params: +# state: abbreviation of the state to extract ('US' for whole country) +# period: time period (either "2020" for current data, or anything else +# for historical averages) +# +# Call as: +# mlr --icsv --ojson cat weekly.csv | \ +# jq -rsf us_mortality.jq --arg state US --arg period 2020 +# +# [0]: https://www.cdc.gov/nchs/nvss/vsrr/covid19/excess_deaths.htm + +def filter_period(period): + if period == "2020" + then . | map(select(.["Time Period"] == 2020)) + else . | map(select(.["Time Period"] == "2015-2019")) + end; + +def collate_weeks(period): + (. | map(.["Number of Deaths"]) | add) as $count + | { + count: (if period == "2020" then $count else $count / 5 end), + week: .[0].Week, + }; + +. | map(select(.Type == "Predicted (weighted)")) + | map(select(.["State Abbreviation"] == $state)) + | filter_period($period) + | group_by(.Week) + | map(collate_weeks($period)) + | .[] | "week \(.week): \(.count)" diff --git a/users/tazjin/dotfiles/config.fish b/users/tazjin/dotfiles/config.fish new file mode 100644 index 000000000000..de2c99ae6007 --- /dev/null +++ b/users/tazjin/dotfiles/config.fish @@ -0,0 +1,40 @@ +# Configure classic prompt +set fish_color_user --bold blue +set fish_color_cwd --bold white + +# Enable colour hints in VCS prompt: +set __fish_git_prompt_showcolorhints yes +set __fish_git_prompt_color_prefix purple +set __fish_git_prompt_color_suffix purple + +# Fish configuration +set fish_greeting "" +set PATH $HOME/.local/bin $HOME/.cargo/bin $PATH + +# Editor configuration +set -gx EDITOR "emacsclient" +set -gx ALTERNATE_EDITOR "emacs -q -nw" +set -gx VISUAL "emacsclient" + +# Miscellaneous +eval (direnv hook fish) + +# Useful command aliases +alias gpr 'git pull --rebase' +alias gco 'git checkout' +alias gf 'git fetch' +alias gap 'git add -p' +alias pbcopy 'xclip -selection clipboard' +alias edit 'emacsclient -n' +alias servedir 'nix-shell -p haskellPackages.wai-app-static --run warp' + +# Old habits die hard (also ls is just easier to type): +alias ls 'exa' + +# Fix up nix-env & friends for Nix 2.0 +export NIX_REMOTE=daemon + +# Fix display of fish in emacs' term-mode: +function fish_title + true +end diff --git a/users/tazjin/dotfiles/msmtprc b/users/tazjin/dotfiles/msmtprc new file mode 100644 index 000000000000..2af3b9433a6d --- /dev/null +++ b/users/tazjin/dotfiles/msmtprc @@ -0,0 +1,15 @@ +defaults +port 587 +tls on +tls_trust_file /etc/ssl/certs/ca-certificates.crt + +# GSuite for tazj.in +account tazjin +host smtp.gmail.com +port 587 +from mail@tazj.in +auth oauthbearer +user mail@tazj.in +passwordeval "cat ~/mail/account.tazjin/.credentials.gmailieer.json | jq -r '.access_token'" + +account default : tazjin diff --git a/users/tazjin/dotfiles/notmuch-config b/users/tazjin/dotfiles/notmuch-config new file mode 100644 index 000000000000..a490774e635f --- /dev/null +++ b/users/tazjin/dotfiles/notmuch-config @@ -0,0 +1,21 @@ +# .notmuch-config - Configuration file for the notmuch mail system +# +# For more information about notmuch, see https://notmuchmail.org + +[database] +path=/home/vincent/mail + +[user] +name=Vincent Ambo +primary_email=mail@tazj.in +other_email=tazjin@gmail.com; + +[new] +tags=unread;inbox; +ignore= + +[search] +exclude_tags=deleted;spam;draft; + +[maildir] +synchronize_flags=true diff --git a/users/tazjin/dt/CMakeLists.txt b/users/tazjin/dt/CMakeLists.txt new file mode 100644 index 000000000000..85b659fea862 --- /dev/null +++ b/users/tazjin/dt/CMakeLists.txt @@ -0,0 +1,16 @@ +# -*- mode: cmake; -*- +cmake_minimum_required(VERSION 3.16) +project(dt) +add_executable(dt dt.cc) +find_package(absl REQUIRED) + +target_link_libraries(dt + absl::flags + absl::flags_parse + absl::hash + absl::time + absl::strings + farmhash +) + +install(TARGETS dt DESTINATION bin) diff --git a/users/tazjin/dt/README.md b/users/tazjin/dt/README.md new file mode 100644 index 000000000000..ee43d5606409 --- /dev/null +++ b/users/tazjin/dt/README.md @@ -0,0 +1,11 @@ +dt +== + +It's got a purpose. + +## Usage: + +``` +nix-build -E '(import (builtins.fetchGit "https://git.tazj.in/") {}).fun.dt' +./result/bin/dt --one ... --two ... +``` diff --git a/users/tazjin/dt/default.nix b/users/tazjin/dt/default.nix new file mode 100644 index 000000000000..04430c883bf6 --- /dev/null +++ b/users/tazjin/dt/default.nix @@ -0,0 +1,15 @@ +{ depot, pkgs, ... }: + +let + stdenv = with pkgs; overrideCC clangStdenv clang_11; + abseil_cpp = pkgs.abseil_cpp; +in stdenv.mkDerivation { + name = "dt"; + src = ./.; + nativeBuildInputs = [ pkgs.cmake ]; + buildInputs = with pkgs; [ + abseil_cpp + farmhash + ]; + meta.ci = false; +} diff --git a/users/tazjin/dt/dt.cc b/users/tazjin/dt/dt.cc new file mode 100644 index 000000000000..5c4c3da76853 --- /dev/null +++ b/users/tazjin/dt/dt.cc @@ -0,0 +1,79 @@ +#include <iostream> +#include <vector> + +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "absl/hash/hash.h" +#include "absl/strings/str_cat.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "absl/types/optional.h" +#include "farmhash.h" + +ABSL_FLAG(std::vector<std::string>, words, {}, "words to use"); + +struct Result { + std::string a; + int ec; + absl::optional<std::string> p; +}; + +std::string which(const std::vector<std::string>& words) { + uint64_t fp; + std::string word; + + for (const auto& w : words) { + auto nfp = util::Fingerprint64(w); + if (nfp > fp) { + fp = nfp; + word = w; + } + } + + return word; +} + +Result decide(const std::vector<std::string>& words) { + auto input = absl::FormatTime("%Y%m%d", absl::Now(), absl::UTCTimeZone()); + for (const auto& w : words) { + input += w; + } + + auto base = util::Fingerprint64(input); + Result result = { "nope" }; + + if (base % 10 == 0) { + result.a = "ca"; + } else if (base % 8 == 0) { + result.a = "c1"; + result.p = which(words); + } else if (base % 6 == 0) { + result.a = "skip"; + } else if (base % 3 == 0) { + result.a = "e1"; + result.ec = base % 10; + result.p = which(words); + } else if (base % 2 == 0) { + result.a = "ea"; + result.ec = base % 10; + } + + return result; +} + +int main(int argc, char *argv[]) { + absl::ParseCommandLine(argc, argv); + + auto words = absl::GetFlag(FLAGS_words); + if (words.size() < 2) { + std::cerr << "needs at least two!" << std::endl; + return 1; + } + + auto result = decide(words); + std::cout << result.a + << (result.p.has_value() ? absl::StrCat(" ", "(", result.p.value(), ")") + : "") + << (result.ec > 0 ? absl::StrCat(": ", result.ec) : "") + << std::endl; +} diff --git a/users/tazjin/emacs/.gitignore b/users/tazjin/emacs/.gitignore new file mode 100644 index 000000000000..7b666905f847 --- /dev/null +++ b/users/tazjin/emacs/.gitignore @@ -0,0 +1,11 @@ +.smex-items +*token* +auto-save-list/ +clones/ +elpa/ +irc.el +local.el +other/ +scripts/ +themes/ +*.elc diff --git a/users/tazjin/emacs/README.md b/users/tazjin/emacs/README.md new file mode 100644 index 000000000000..5c667333962e --- /dev/null +++ b/users/tazjin/emacs/README.md @@ -0,0 +1,7 @@ +tools/emacs +=========== + +This sub-folder builds my Emacs configuration, supplying packages from +Nix and configuration from this folder. + +I use Emacs for many things (including as my desktop environment). diff --git a/users/tazjin/emacs/config/bindings.el b/users/tazjin/emacs/config/bindings.el new file mode 100644 index 000000000000..029aa2d0e507 --- /dev/null +++ b/users/tazjin/emacs/config/bindings.el @@ -0,0 +1,59 @@ +;; Font size +(define-key global-map (kbd "C-=") 'increase-default-text-scale) ;; '=' because there lies '+' +(define-key global-map (kbd "C--") 'decrease-default-text-scale) +(define-key global-map (kbd "C-x C-0") 'set-default-text-scale) + +;; What does <tab> do? Well, it depends ... +(define-key prog-mode-map (kbd "<tab>") #'company-indent-or-complete-common) + +;; imenu instead of insert-file +(global-set-key (kbd "C-x i") 'imenu) + +;; Window switching. (C-x o goes to the next window) +(windmove-default-keybindings) ;; Shift+direction + +;; Start eshell or switch to it if it's active. +(global-set-key (kbd "C-x m") 'eshell) + +(global-set-key (kbd "C-x C-p") 'browse-repositories) +(global-set-key (kbd "M-g M-g") 'goto-line-with-feedback) + +;; Miscellaneous editing commands +(global-set-key (kbd "C-c w") 'whitespace-cleanup) +(global-set-key (kbd "C-c a") 'align-regexp) +(global-set-key (kbd "C-c m") 'mc/mark-dwim) + +;; Browse URLs (very useful for Gitlab's SSH output!) +(global-set-key (kbd "C-c b p") 'browse-url-at-point) +(global-set-key (kbd "C-c b b") 'browse-url) + +;; C-x REALLY QUIT (idea by @magnars) +(global-set-key (kbd "C-x r q") 'save-buffers-kill-terminal) +(global-set-key (kbd "C-x C-c") 'ignore) + +;; Open a file in project: +(global-set-key (kbd "C-c f") 'project-find-file) + +;; Search in a project +(global-set-key (kbd "C-c r g") 'rg-in-project) + +;; Open a file via magit: +(global-set-key (kbd "C-c C-f") #'magit-find-file-worktree) + +;; Insert TODO comments +(global-set-key (kbd "C-c t") 'insert-todo-comment) + +;; Make sharing music easier +(global-set-key (kbd "s-s w") #'songwhip-lookup-url) + +;; Add subthread collapsing to notmuch-show. +;; +;; C-, closes a thread, C-. opens a thread. This mirrors stepping +;; in/out of definitions. +(define-key notmuch-show-mode-map (kbd "C-,") 'notmuch-show-open-or-close-subthread) +(define-key notmuch-show-mode-map (kbd "C-.") + (lambda () + (interactive) + (notmuch-show-open-or-close-subthread t))) ;; open + +(provide 'bindings) diff --git a/users/tazjin/emacs/config/custom.el b/users/tazjin/emacs/config/custom.el new file mode 100644 index 000000000000..91eaf69ae59b --- /dev/null +++ b/users/tazjin/emacs/config/custom.el @@ -0,0 +1,27 @@ +(custom-set-variables + ;; custom-set-variables was added by Custom. + ;; If you edit it by hand, you could mess it up, so be careful. + ;; Your init file should contain only one such instance. + ;; If there is more than one, they won't work right. + '(ac-auto-show-menu 0.8) + '(ac-delay 0.2) + '(avy-background t) + '(cargo-process--enable-rust-backtrace 1) + '(company-auto-complete (quote (quote company-explicit-action-p))) + '(company-idle-delay 0.5) + '(custom-safe-themes + (quote + ("d61fc0e6409f0c2a22e97162d7d151dee9e192a90fa623f8d6a071dbf49229c6" "3c83b3676d796422704082049fc38b6966bcad960f896669dfc21a7a37a748fa" "89336ca71dae5068c165d932418a368a394848c3b8881b2f96807405d8c6b5b6" default))) + '(display-time-default-load-average nil) + '(display-time-interval 30) + '(elnode-send-file-program "/run/current-system/sw/bin/cat") + '(frame-brackground-mode (quote dark)) + '(global-auto-complete-mode t) + '(kubernetes-commands-display-buffer-function (quote display-buffer)) + '(lsp-gopls-server-path "/home/tazjin/go/bin/gopls") + '(magit-log-show-gpg-status t) + '(ns-alternate-modifier (quote none)) + '(ns-command-modifier (quote control)) + '(ns-right-command-modifier (quote meta)) + '(require-final-newline (quote visit-save)) + '(tls-program (quote ("gnutls-cli --x509cafile %t -p %p %h")))) diff --git a/users/tazjin/emacs/config/desktop.el b/users/tazjin/emacs/config/desktop.el new file mode 100644 index 000000000000..e411da5a529a --- /dev/null +++ b/users/tazjin/emacs/config/desktop.el @@ -0,0 +1,279 @@ +;; -*- lexical-binding: t; -*- +;; +;; Configure desktop environment settings, including both +;; window-management (EXWM) as well as additional system-wide +;; commands. + +(require 's) +(require 'f) +(require 'dash) +(require 'exwm) +(require 'exwm-config) +(require 'exwm-randr) +(require 'exwm-systemtray) + +(defun pactl (cmd) + (shell-command (concat "pactl " cmd)) + (message "Volume command: %s" cmd)) + +(defun volume-mute () (interactive) (pactl "set-sink-mute @DEFAULT_SINK@ toggle")) +(defun volume-up () (interactive) (pactl "set-sink-volume @DEFAULT_SINK@ +5%")) +(defun volume-down () (interactive) (pactl "set-sink-volume @DEFAULT_SINK@ -5%")) + +(defun brightness-up () + (interactive) + (shell-command "xbacklight -inc 5") + (message "Brightness increased")) + +(defun brightness-down () + (interactive) + (shell-command "xbacklight -dec 5") + (message "Brightness decreased")) + +(defun set-xkb-layout (layout) + "Set the current X keyboard layout." + + (shell-command (format "setxkbmap %s" layout)) + (shell-command "setxkbmap -option caps:super") + (message "Set X11 keyboard layout to '%s'" layout)) + +(defun lock-screen () + (interactive) + (set-xkb-layout "us") + + ;; A sudoers configuration is in place that lets me execute this + ;; particular command without having to enter a password. + ;; + ;; The reason for things being set up this way is that I want + ;; xsecurelock.service to be started as a system-wide service that + ;; is tied to suspend.target. + (shell-command "/usr/bin/sudo /usr/bin/systemctl start xsecurelock.service")) + +(defun create-window-name () + "Construct window names to be used for EXWM buffers by + inspecting the window's X11 class and title. + + A lot of commonly used applications either create titles that + are too long by default, or in the case of web + applications (such as Cider) end up being constructed in + awkward ways. + + To avoid this issue, some rewrite rules are applied for more + human-accessible titles." + + (pcase (list (or exwm-class-name "unknown") (or exwm-title "unknown")) + ;; In Cider windows, rename the class and keep the workspace/file + ;; as the title. + (`("Google-chrome" ,(and (pred (lambda (title) (s-ends-with? " - Cider" title))) title)) + (format "Cider<%s>" (s-chop-suffix " - Cider" title))) + + ;; Attempt to detect IRCCloud windows via their title, which is a + ;; combination of the channel name and network. + ;; + ;; This is what would often be referred to as a "hack". The regexp + ;; will not work if a network connection buffer is selected in + ;; IRCCloud, but since the title contains no other indication that + ;; we're dealing with an IRCCloud window + (`("Google-chrome" + ,(and (pred (lambda (title) + (s-matches? "^[\*\+]\s#[a-zA-Z0-9/\-]+\s\|\s[a-zA-Z\.]+$" title))) + title)) + (format "IRCCloud<%s>" title)) + + ;; For other Chrome windows, make the title shorter. + (`("Google-chrome" ,title) + (format "Chrome<%s>" (s-truncate 42 (s-chop-suffix " - Google Chrome" title)))) + + ;; Gnome-terminal -> Term + (`("Gnome-terminal" ,title) + ;; fish-shell buffers contain some unnecessary whitespace and + ;; such before the current working directory. This can be + ;; stripped since most of my terminals are fish shells anyways. + (format "Term<%s>" (s-trim-left (s-chop-prefix "fish" title)))) + + ;; Quassel buffers + ;; + ;; These have a title format that looks like: + ;; "Quassel IRC - ##tvl (Freenode) — Quassel IRC" + (`("quassel" ,title) + (progn + (if (string-match + (rx "Quassel IRC - " + (group (one-or-more (any alnum "[" "]" "&" "-" "#"))) ;; <-- channel name + " (" (group (one-or-more (any ascii space))) ")" ;; <-- network name + " — Quassel IRC") + title) + (format "Quassel<%s>" (match-string 2 title)) + title))) + + ;; For any other application, a name is constructed from the + ;; window's class and name. + (`(,class ,title) (format "%s<%s>" class (s-truncate 12 title))))) + +;; EXWM launch configuration +;; +;; This used to use use-package, but when something breaks use-package +;; it doesn't exactly make debugging any easier. + +(let ((titlef (lambda () + (exwm-workspace-rename-buffer (create-window-name))))) + (add-hook 'exwm-update-class-hook titlef) + (add-hook 'exwm-update-title-hook titlef)) + +(fringe-mode 3) +(exwm-enable) + +;; 's-N': Switch to certain workspace +(setq exwm-workspace-number 10) +(dotimes (i 10) + (exwm-input-set-key (kbd (format "s-%d" i)) + `(lambda () + (interactive) + (exwm-workspace-switch-create ,i)))) + +;; Launch applications / any command with completion (dmenu style!) +(exwm-input-set-key (kbd "s-d") #'counsel-linux-app) +(exwm-input-set-key (kbd "s-x") #'run-external-command) +(exwm-input-set-key (kbd "s-p") #'password-store-lookup) + +;; Add X11 terminal selector to a key +(exwm-input-set-key (kbd "C-x t") #'ts/switch-to-terminal) + +;; Toggle between line-mode / char-mode +(exwm-input-set-key (kbd "C-c C-t C-t") #'exwm-input-toggle-keyboard) + +;; Volume keys +(exwm-input-set-key (kbd "<XF86AudioMute>") #'volume-mute) +(exwm-input-set-key (kbd "<XF86AudioRaiseVolume>") #'volume-up) +(exwm-input-set-key (kbd "<XF86AudioLowerVolume>") #'volume-down) + +;; Brightness keys +(exwm-input-set-key (kbd "<XF86MonBrightnessDown>") #'brightness-down) +(exwm-input-set-key (kbd "<XF86MonBrightnessUp>") #'brightness-up) +(exwm-input-set-key (kbd "<XF86Display>") #'lock-screen) + +;; Shortcuts for switching between keyboard layouts +(defmacro bind-xkb (lang key) + `(exwm-input-set-key (kbd (format "s-%s" ,key)) + (lambda () + (interactive) + (set-xkb-layout ,lang)))) + +(bind-xkb "us" "k u") +(bind-xkb "de" "k d") +(bind-xkb "no" "k n") +(bind-xkb "ru" "k r") +(bind-xkb "se" "k s") + +;; These are commented out because Emacs no longer starts (??) if +;; they're set at launch. +;; +;; (bind-xkb "us" "л г") +;; (bind-xkb "de" "л в") +;; (bind-xkb "no" "л т") +;; (bind-xkb "ru" "л к") + +;; Line-editing shortcuts +(exwm-input-set-simulation-keys + '(([?\C-d] . delete) + ([?\C-w] . ?\C-c))) + +;; Show time & battery status in the mode line +(display-time-mode) +(display-battery-mode) + +;; enable display of X11 system tray within Emacs +(exwm-systemtray-enable) + +;; Configure xrandr (multi-monitor setup). +;; +;; This makes some assumptions about how my machines are connected to +;; my home setup during the COVID19 isolation period. + +(defun set-randr-config (screens) + (setq exwm-randr-workspace-monitor-plist + (-flatten (-map (lambda (screen) + (-map (lambda (screen-id) (list screen-id (car screen))) (cdr screen))) + screens)))) + +;; Layouts for Vauxhall (laptop) + +(defun randr-vauxhall-layout-single () + "Laptop screen only!" + (interactive) + (set-randr-config '(("eDP1" (number-sequence 0 9)))) + (shell-command "xrandr --output eDP1 --auto --primary") + (shell-command "xrandr --output HDMI1 --off") + (shell-command "xrandr --output DP2 --off") + (exwm-randr-refresh)) + +(defun randr-vauxhall-layout-all () + "Use all screens at home." + (interactive) + (set-randr-config + '(("eDP1" 0) + ("HDMI1" 1 2 3 4 5) + ("DP2" 6 7 8 9))) + + (shell-command "xrandr --output HDMI1 --right-of eDP1 --auto --primary") + (shell-command "xrandr --output DP2 --right-of HDMI1 --auto --rotate left") + (exwm-randr-refresh)) + +(defun randr-vauxhall-layout-wide-only () + "Use only the wide screen at home." + (interactive) + (set-randr-config + '(("eDP1" 8 9 0) + ("HDMI1" 1 2 4 5 6 7))) + + (shell-command "xrandr --output DP2 --off") + (shell-command "xrandr --output HDMI1 --right-of eDP1 --auto --primary") + (exwm-randr-refresh)) + +(defun randr-vauxhall-layout-remarkable () + "Make the reMarkable the primary screen." + (interactive) + (shell-command "xrandr --output VIRTUAL1 --primary")) + +;; Layouts for frog (desktop) + +(defun randr-frog-layout-right-only () + "Use only the right screen on frog." + (interactive) + (set-randr-config `(("DisplayPort-0" ,(number-sequence 0 9)))) + (shell-command "xrandr --output DisplayPort-0 --off") + (shell-command "xrandr --output DisplayPort-1 --auto --primary")) + +(defun randr-frog-layout-both () + "Use the left and right screen on frog." + (interactive) + (set-randr-config `(("DisplayPort-0" 1 2 3 4 5) + ("DisplayPort-1" 6 7 8 9 0))) + + (shell-command "xrandr --output DisplayPort-0 --auto --primary --left-of DisplayPort-1") + (shell-command "xrandr --output DisplayPort-1 --auto --right-of DisplayPort-0 --rotate left")) + +(pcase (s-trim (shell-command-to-string "hostname")) + ("vauxhall" + (exwm-input-set-key (kbd "s-m s") #'randr-vauxhall-layout-single) + (exwm-input-set-key (kbd "s-m a") #'randr-vauxhall-layout-all) + (exwm-input-set-key (kbd "s-m w") #'randr-vauxhall-layout-wide-only) + (exwm-input-set-key (kbd "s-m r") #'randr-vauxhall-layout-remarkable)) + + ("frog" + (exwm-input-set-key (kbd "s-m b") #'randr-frog-layout-both) + (exwm-input-set-key (kbd "s-m r") #'randr-frog-layout-right-only))) + +;; Notmuch shortcuts as EXWM globals +;; (g m => gmail) +(exwm-input-set-key (kbd "s-g m") #'notmuch) +(exwm-input-set-key (kbd "s-g M") #'counsel-notmuch) + +(exwm-randr-enable) + +;; Let buffers move seamlessly between workspaces by making them +;; accessible in selectors on all frames. +(setq exwm-workspace-show-all-buffers t) +(setq exwm-layout-show-all-buffers t) + +(provide 'desktop) diff --git a/users/tazjin/emacs/config/eshell-setup.el b/users/tazjin/emacs/config/eshell-setup.el new file mode 100644 index 000000000000..0b23c5a2d1bc --- /dev/null +++ b/users/tazjin/emacs/config/eshell-setup.el @@ -0,0 +1,68 @@ +;; EShell configuration + +(require 'eshell) + +;; Generic settings +;; Hide banner message ... +(setq eshell-banner-message "") + +;; Prompt configuration +(defun clean-pwd (path) + "Turns a path of the form /foo/bar/baz into /f/b/baz + (inspired by fish shell)" + (let* ((hpath (replace-regexp-in-string home-dir + "~" + path)) + (current-dir (split-string hpath "/")) + (cdir (last current-dir)) + (head (butlast current-dir))) + (concat (mapconcat (lambda (s) + (if (string= "" s) nil + (substring s 0 1))) + head + "/") + (if head "/" nil) + (car cdir)))) + +(defun vcprompt (&optional args) + "Call the external vcprompt command with optional arguments. + VCPrompt" + (replace-regexp-in-string + "\n" "" + (shell-command-to-string (concat "vcprompt" args)))) + +(defmacro with-face (str &rest properties) + `(propertize ,str 'face (list ,@properties))) + +(defun prompt-f () + "EShell prompt displaying VC info and such" + (concat + (with-face (concat (clean-pwd (eshell/pwd)) " ") :foreground "#96a6c8") + (if (= 0 (user-uid)) + (with-face "#" :foreground "#f43841") + (with-face "$" :foreground "#73c936")) + (with-face " " :foreground "#95a99f"))) + + +(setq eshell-prompt-function 'prompt-f) +(setq eshell-highlight-prompt nil) +(setq eshell-prompt-regexp "^.+? \\((\\(git\\|svn\\|hg\\|darcs\\|cvs\\|bzr\\):.+?) \\)?[$#] ") + +;; Ignore version control folders in autocompletion +(setq eshell-cmpl-cycle-completions nil + eshell-save-history-on-exit t + eshell-cmpl-dir-ignore "\\`\\(\\.\\.?\\|CVS\\|\\.svn\\|\\.git\\)/\\'") + +;; Load some EShell extensions +(eval-after-load 'esh-opt + '(progn + (require 'em-term) + (require 'em-cmpl) + ;; More visual commands! + (add-to-list 'eshell-visual-commands "ssh") + (add-to-list 'eshell-visual-commands "tail") + (add-to-list 'eshell-visual-commands "sl"))) + +(setq eshell-directory-name "~/.config/eshell/") + +(provide 'eshell-setup) diff --git a/users/tazjin/emacs/config/functions.el b/users/tazjin/emacs/config/functions.el new file mode 100644 index 000000000000..5963d142c14b --- /dev/null +++ b/users/tazjin/emacs/config/functions.el @@ -0,0 +1,328 @@ +(require 'chart) +(require 'dash) +(require 'map) + +(defun load-file-if-exists (filename) + (if (file-exists-p filename) + (load filename))) + +(defun goto-line-with-feedback () + "Show line numbers temporarily, while prompting for the line number input" + (interactive) + (unwind-protect + (progn + (setq-local display-line-numbers t) + (let ((target (read-number "Goto line: "))) + (avy-push-mark) + (goto-line target))) + (setq-local display-line-numbers nil))) + +;; These come from the emacs starter kit + +(defun esk-add-watchwords () + (font-lock-add-keywords + nil '(("\\<\\(FIX\\(ME\\)?\\|TODO\\|DEBUG\\|HACK\\|REFACTOR\\|NOCOMMIT\\)" + 1 font-lock-warning-face t)))) + +(defun esk-sudo-edit (&optional arg) + (interactive "p") + (if (or arg (not buffer-file-name)) + (find-file (concat "/sudo:root@localhost:" (read-file-name "File: "))) + (find-alternate-file (concat "/sudo:root@localhost:" buffer-file-name)))) + +;; Open the NixOS man page +(defun nixos-man () + (interactive) + (man "configuration.nix")) + +;; Get the nix store path for a given derivation. +;; If the derivation has not been built before, this will trigger a build. +(defun nix-store-path (derivation) + (let ((expr (concat "with import <nixos> {}; " derivation))) + (s-chomp (shell-command-to-string (concat "nix-build -E '" expr "'"))))) + +(defun insert-nix-store-path () + (interactive) + (let ((derivation (read-string "Derivation name (in <nixos>): "))) + (insert (nix-store-path derivation)))) + +(defun toggle-force-newline () + "Buffer-local toggle for enforcing final newline on save." + (interactive) + (setq-local require-final-newline (not require-final-newline)) + (message "require-final-newline in buffer %s is now %s" + (buffer-name) + require-final-newline)) + +(defun list-external-commands () + "Creates a list of all external commands available on $PATH + while filtering NixOS wrappers." + (cl-loop + for dir in (split-string (getenv "PATH") path-separator) + when (and (file-exists-p dir) (file-accessible-directory-p dir)) + for lsdir = (cl-loop for i in (directory-files dir t) + for bn = (file-name-nondirectory i) + when (and (not (s-contains? "-wrapped" i)) + (not (member bn completions)) + (not (file-directory-p i)) + (file-executable-p i)) + collect bn) + append lsdir into completions + finally return (sort completions 'string-lessp))) + +(defvar external-command-flag-overrides + '(("google-chrome" . "--force-device-scale-factor=1.4")) + + "This setting lets me add additional flags to specific commands + that are run interactively via `run-external-command'.") + +(defun run-external-command--handler (cmd) + "Execute the specified command and notify the user when it + finishes." + (let* ((extra-flags (cdr (assoc cmd external-command-flag-overrides))) + (cmd (if extra-flags (s-join " " (list cmd extra-flags)) cmd))) + (message "Starting %s..." cmd) + (set-process-sentinel + (start-process-shell-command cmd nil cmd) + (lambda (process event) + (when (string= event "finished\n") + (message "%s process finished." process)))))) + +(defun run-external-command () + "Prompts the user with a list of all installed applications and + lets them select one to launch." + + (interactive) + (let ((external-commands-list (list-external-commands))) + (run-external-command--handler + (completing-read "Command: " external-commands-list + nil ;; predicate + t ;; require-match + nil ;; initial-input + ;; hist + 'external-commands-history)))) + +(defun password-store-lookup (&optional password-store-dir) + "Interactive password-store lookup function that actually uses +the GPG agent correctly." + + (interactive) + + (let* ((entry (completing-read "Copy password of entry: " + (password-store-list (or password-store-dir + (password-store-dir))) + nil ;; predicate + t ;; require-match + )) + (password (auth-source-pass-get 'secret entry))) + (password-store-clear) + (kill-new password) + (setq password-store-kill-ring-pointer kill-ring-yank-pointer) + (message "Copied %s to the kill ring. Will clear in %s seconds." + entry (password-store-timeout)) + (setq password-store-timeout-timer + (run-at-time (password-store-timeout) + nil 'password-store-clear)))) + +(defun browse-repositories () + "Select a git repository and open its associated magit buffer." + + (interactive) + (magit-status + (completing-read "Repository: " (magit-list-repos)))) + +(defun bottom-right-window-p () + "Determines whether the last (i.e. bottom-right) window of the + active frame is showing the buffer in which this function is + executed." + (let* ((frame (selected-frame)) + (right-windows (window-at-side-list frame 'right)) + (bottom-windows (window-at-side-list frame 'bottom)) + (last-window (car (seq-intersection right-windows bottom-windows)))) + (eq (current-buffer) (window-buffer last-window)))) + +(defhydra mc/mark-more-hydra (:color pink) + ("<up>" mmlte--up "Mark previous like this") + ("<down>" mc/mmlte--down "Mark next like this") + ("<left>" mc/mmlte--left (if (eq mc/mark-more-like-this-extended-direction 'up) + "Skip past the cursor furthest up" + "Remove the cursor furthest down")) + ("<right>" mc/mmlte--right (if (eq mc/mark-more-like-this-extended-direction 'up) + "Remove the cursor furthest up" + "Skip past the cursor furthest down")) + ("f" nil "Finish selecting")) + +;; Mute the message that mc/mmlte wants to print on its own +(advice-add 'mc/mmlte--message :around (lambda (&rest args) (ignore))) + +(defun mc/mark-dwim (arg) + "Select multiple things, but do what I mean." + + (interactive "p") + (if (not (region-active-p)) (mc/mark-next-lines arg) + (if (< 1 (count-lines (region-beginning) + (region-end))) + (mc/edit-lines arg) + ;; The following is almost identical to `mc/mark-more-like-this-extended', + ;; but uses a hydra (`mc/mark-more-hydra') instead of a transient key map. + (mc/mmlte--down) + (mc/mark-more-hydra/body)))) + +(defun memespace-region () + "Make a meme out of it." + + (interactive) + (let* ((start (region-beginning)) + (end (region-end)) + (memed + (message + (s-trim-right + (apply #'string + (-flatten + (nreverse + (-reduce-from (lambda (acc x) + (cons (cons x (-repeat (+ 1 (length acc)) 32)) acc)) + '() + (string-to-list (buffer-substring-no-properties start end)))))))))) + + (save-excursion (delete-region start end) + (goto-char start) + (insert memed)))) + +(defun insert-todo-comment (prefix todo) + "Insert a comment at point with something for me to do." + + (interactive "P\nsWhat needs doing? ") + (save-excursion + (move-end-of-line nil) + (insert (format " %s TODO(%s): %s" + (s-trim-right comment-start) + (if prefix (read-string "Who needs to do this? ") + (getenv "USER")) + todo)))) + +;; Custom text scale adjustment functions that operate on the entire instance +(defun modify-text-scale (factor) + (set-face-attribute 'default nil + :height (+ (* factor 5) (face-attribute 'default :height)))) + +(defun increase-default-text-scale (prefix) + "Increase default text scale in all Emacs frames, or just the + current frame if PREFIX is set." + + (interactive "P") + (if prefix (text-scale-increase 1) + (modify-text-scale 1))) + +(defun decrease-default-text-scale (prefix) + "Increase default text scale in all Emacs frames, or just the + current frame if PREFIX is set." + + (interactive "P") + (if prefix (text-scale-decrease 1) + (modify-text-scale -1))) + +(defun set-default-text-scale (prefix &optional to) + "Set the default text scale to the specified value, or the + default. Restores current frame's text scale only, if PREFIX is + set." + + (interactive "P") + (if prefix (text-scale-adjust 0) + (set-face-attribute 'default nil :height (or to 120)))) + +(defun scrot-select () + "Take a screenshot based on a mouse-selection and save it to + ~/screenshots." + (interactive) + (shell-command "scrot '$a_%Y-%m-%d_%s.png' -s -e 'mv $f ~/screenshots/'")) + +(defun graph-unread-mails () + "Create a bar chart of unread mails based on notmuch tags. + Certain tags are excluded from the overview." + + (interactive) + (let ((tag-counts + (-keep (-lambda ((name . search)) + (let ((count + (string-to-number + (s-trim + (notmuch-command-to-string "count" search "and" "tag:unread"))))) + (when (>= count 1) (cons name count)))) + (notmuch-hello-generate-tag-alist '("unread" "signed" "attachment" "important"))))) + + (chart-bar-quickie + (if (< (length tag-counts) 6) + 'vertical 'horizontal) + "Unread emails" + (-map #'car tag-counts) "Tag:" + (-map #'cdr tag-counts) "Count:"))) + +(defun notmuch-show-open-or-close-subthread (&optional prefix) + "Open or close the subthread from (and including) the message at point." + (interactive "P") + (save-excursion + (let ((current-depth (map-elt (notmuch-show-get-message-properties) :depth 0))) + (loop do (notmuch-show-message-visible (notmuch-show-get-message-properties) prefix) + until (or (not (notmuch-show-goto-message-next)) + (= (map-elt (notmuch-show-get-message-properties) :depth) current-depth))))) + (force-window-update)) + +(defun vterm-send-ctrl-x () + "Sends `C-x' to the libvterm." + (interactive) + (vterm-send-key "x" nil nil t)) + +(defun find-depot-project (dir) + "Function used in the `project-find-functions' hook list to + determine the current project root of a depot project." + (when (s-starts-with? "/depot" dir) + (if (f-exists-p (f-join dir "default.nix")) + (cons 'transient dir) + (find-depot-project (f-parent dir))))) + +(add-to-list 'project-find-functions #'find-depot-project) + +(defun magit-find-file-worktree () + (interactive) + "Find a file in the current (ma)git worktree." + (magit-find-file--internal "{worktree}" + (magit-read-file-from-rev "HEAD" "Find file") + #'pop-to-buffer-same-window)) + +(defun songwhip--handle-result (status &optional cbargs) + ;; TODO(tazjin): Inspect status, which looks different in practice + ;; than the manual claims. + (if-let* ((response (json-parse-string + (buffer-substring url-http-end-of-headers (point-max)))) + (sw-path (ht-get* response "data" "path")) + (link (format "https://songwhip.com/%s" sw-path)) + (select-enable-clipboard t)) + (progn + (kill-new link) + (message "Copied Songwhip link (%s)" link)) + (warn "Something went wrong while retrieving Songwhip link!") + ;; For debug purposes, the buffer is persisted in this case. + (setq songwhip--debug-buffer (current-buffer)))) + +(defun songwhip-lookup-url (url) + "Look up URL on Songwhip and copy the resulting link to the clipboard." + (interactive "sEnter source URL: ") + (let ((songwhip-url "https://songwhip.com/api/") + (url-request-method "POST") + (url-request-extra-headers '(("Content-Type" . "application/json"))) + (url-request-data + (json-serialize `((country . "GB") + (url . ,url))))) + (url-retrieve "https://songwhip.com/api/" #'songwhip--handle-result nil t t) + (message "Requesting Songwhip URL ... please hold the line."))) + +(defun rg-in-project (&optional prefix) + "Interactively call ripgrep in the current project, or fall + back to ripgrep default behaviour if prefix is set." + (interactive "P") + (counsel-rg nil (unless prefix + (if-let ((pr (project-current))) + (project-root pr))))) + +(provide 'functions) diff --git a/users/tazjin/emacs/config/init.el b/users/tazjin/emacs/config/init.el new file mode 100644 index 000000000000..b473746fbe6a --- /dev/null +++ b/users/tazjin/emacs/config/init.el @@ -0,0 +1,269 @@ +;;; init.el --- Package bootstrapping. -*- lexical-binding: t; -*- + +;; Packages are installed via Nix configuration, this file only +;; initialises the newly loaded packages. + +(require 'use-package) +(require 'seq) + +;; TODO(tazjin): Figure out what's up with vc. +;; +;; Leaving vc enabled breaks all find-file operations with messages +;; about .git folders being absent, but in random places. +(require 'vc) +(setq vc-handled-backends nil) + +(package-initialize) + +;; Initialise all packages installed via Nix. +;; +;; TODO: Generate this section in Nix for all packages that do not +;; require special configuration. + +;; +;; Packages providing generic functionality. +;; + +(use-package ace-window + :bind (("C-x o" . ace-window)) + :config + (setq aw-keys '(?f ?j ?d ?k ?s ?l ?a) + aw-scope 'frame)) + +(use-package auth-source-pass :config (auth-source-pass-enable)) + +(use-package avy + :bind (("M-j" . avy-goto-char) + ("M-p" . avy-pop-mark) + ("M-g g" . avy-goto-line))) + +(use-package browse-kill-ring) + +(use-package company + :hook ((prog-mode . company-mode)) + :config (setq company-tooltip-align-annotations t)) + +(use-package counsel + :after (ivy) + :config (counsel-mode 1)) + +(use-package dash) +(use-package dash-functional) +(use-package gruber-darker-theme) + +(use-package eglot + :custom + (eglot-autoshutdown t) + (eglot-send-changes-idle-time 0.3)) + +(use-package ht) + +(use-package hydra) +(use-package idle-highlight-mode :hook ((prog-mode . idle-highlight-mode))) + +(use-package ivy + :config + (ivy-mode 1) + (setq enable-recursive-minibuffers t) + (setq ivy-use-virtual-buffers t)) + +(use-package ivy-prescient + :after (ivy prescient) + :config + (ivy-prescient-mode) + ;; Fixes an issue with how regexes are passed to ripgrep from counsel, + ;; see raxod502/prescient.el#43 + (setf (alist-get 'counsel-rg ivy-re-builders-alist) #'ivy--regex-plus)) + +(use-package multiple-cursors) + +(use-package notmuch + :config + (setq notmuch-search-oldest-first nil) + (setq notmuch-show-all-tags-list t) + (setq notmuch-hello-tag-list-make-query "tag:unread")) + +(use-package paredit :hook ((lisp-mode . paredit-mode) + (emacs-lisp-mode . paredit-mode))) + +(use-package pinentry + :config + (setq epa-pinentry-mode 'loopback) + (pinentry-start)) + +(use-package prescient + :after (ivy counsel) + :config (prescient-persist-mode)) + +(use-package rainbow-delimiters :hook (prog-mode . rainbow-delimiters-mode)) +(use-package rainbow-mode) +(use-package s) +(use-package string-edit) + +(use-package swiper + :after (counsel ivy) + :bind (("C-s" . swiper))) + +(use-package telephone-line) ;; configuration happens outside of use-package +(use-package term-switcher) +(use-package undo-tree :config (global-undo-tree-mode)) +(use-package uuidgen) +(use-package which-key :config (which-key-mode t)) + +;; +;; Applications in emacs +;; + +(use-package magit + :bind ("C-c g" . magit-status) + :config (setq magit-repository-directories '(("/home/tazjin/projects" . 2) + ("/home/tazjin" . 1)))) + +(use-package password-store) +(use-package restclient) + +(use-package vterm + :config (progn + (setq vterm-shell "fish") + (setq vterm-exit-functions + (lambda (&rest _) (kill-buffer (current-buffer)))) + (setq vterm-kill-buffer-on-exit t))) + +;; vterm removed the ability to set a custom title generator function +;; via the public API, so this overrides its private title generation +;; function instead +(defun vterm--set-title (title) + (rename-buffer + (generate-new-buffer-name + (format "vterm<%s>" + (s-trim-left + (s-chop-prefix "fish" title)))))) + +;; +;; Packages providing language-specific functionality +;; + +(use-package cargo + :hook ((rust-mode . cargo-minor-mode) + (cargo-process-mode . visual-line-mode)) + :bind (:map cargo-minor-mode-map ("C-c C-c C-l" . ignore))) + +(use-package dockerfile-mode) + +(use-package erlang + :hook ((erlang-mode . (lambda () + ;; Don't indent after '>' while I'm writing + (local-set-key ">" 'self-insert-command))))) + +(use-package f) + +(use-package go-mode + :bind (:map go-mode-map ("C-c C-r" . recompile)) + :hook ((go-mode . (lambda () + (setq tab-width 2) + (setq-local compile-command + (concat "go build " buffer-file-name)))))) + +(use-package haskell-mode) + +(use-package ielm + :hook ((inferior-emacs-lisp-mode . (lambda () + (paredit-mode) + (rainbow-delimiters-mode-enable) + (company-mode))))) + +(use-package jq-mode + :config (add-to-list 'auto-mode-alist '("\\.jq\\'" . jq-mode))) + +(use-package kotlin-mode + :hook ((kotlin-mode . (lambda () + (setq indent-line-function #'indent-relative))))) + +(use-package lsp-mode) + +(use-package markdown-mode + :config + (add-to-list 'auto-mode-alist '("\\.markdown\\'" . markdown-mode)) + (add-to-list 'auto-mode-alist '("\\.md\\'" . markdown-mode))) + +(use-package markdown-toc) + +(use-package nix-mode + :hook ((nix-mode . (lambda () + (setq indent-line-function #'nix-indent-line))))) + +(use-package nix-util) +(use-package nginx-mode) +(use-package rust-mode) + +(use-package sly + :hook ((sly-mrepl-mode . (lambda () + (paredit-mode) + (rainbow-delimiters-mode-enable) + (company-mode)))) + :config + (setq common-lisp-hyperspec-root "file:///home/tazjin/docs/lisp/")) + +(use-package telega + :bind (:map global-map ("s-t" . telega)) + :config (telega-mode-line-mode 1)) + +(use-package terraform-mode) +(use-package toml-mode) + +(use-package tvl + :custom + (tvl-gerrit-remote "gerrit")) + +(use-package web-mode) +(use-package yaml-mode) + +;; Initialise midnight.el, which by default automatically cleans up +;; unused buffers at midnight. +(require 'midnight) + +(defgroup tazjin nil + "Settings related to my configuration") + +(defcustom depot-path "/depot" + "Local path to the depot checkout" + :group 'tazjin) + +;; Configuration changes in `customize` can not actually be persisted +;; to the customise file that Emacs is currently using (since it comes +;; from the Nix store). +;; +;; The way this will work for now is that Emacs will *write* +;; configuration to the file tracked in my repository, while not +;; actually *reading* it from there (unless Emacs is rebuilt). +(setq custom-file (expand-file-name "~/depot/tools/emacs/config/custom.el")) +(load-library "custom") + +(defvar home-dir (expand-file-name "~")) + +;; Seed RNG +(random t) + +;; Load all other Emacs configuration. These configurations are +;; added to `load-path' by Nix. +(mapc 'require '(desktop + mail-setup + look-and-feel + functions + settings + modes + bindings + eshell-setup)) +(telephone-line-setup) +(ace-window-display-mode) + +;; If a local configuration library exists, it should be loaded. +;; +;; This can be provided by calling my Emacs derivation with +;; `withLocalConfig'. +(if-let (local-file (locate-library "local")) + (load local-file)) + +(require 'dottime) + +(provide 'init) diff --git a/users/tazjin/emacs/config/look-and-feel.el b/users/tazjin/emacs/config/look-and-feel.el new file mode 100644 index 000000000000..8cca6e1bf08e --- /dev/null +++ b/users/tazjin/emacs/config/look-and-feel.el @@ -0,0 +1,122 @@ +;;; -*- lexical-binding: t; -*- + +;; Hide those ugly tool bars: +(tool-bar-mode 0) +(scroll-bar-mode 0) +(menu-bar-mode 0) +(add-hook 'after-make-frame-functions + (lambda (frame) (scroll-bar-mode 0))) + +;; Don't do any annoying things: +(setq ring-bell-function 'ignore) +(setq initial-scratch-message "") + +;; Remember layout changes +(winner-mode 1) + +;; Usually emacs will run as a proper GUI application, in which case a few +;; extra settings are nice-to-have: +(when window-system + (setq frame-title-format '(buffer-file-name "%f" ("%b"))) + (mouse-wheel-mode t) + (blink-cursor-mode -1)) + +;; Configure Emacs fonts. +(let ((font (if (equal "frog" (s-trim (shell-command-to-string "hostname"))) + ;; For unclear reasons, frog refuses to render the + ;; regular font weight - everything ends up bold, + ;; which makes it hard to distinguish e.g. read/unread + ;; emails. + ;; + ;; Semi-bold looks a little different than on vauxhall + ;; and other machines, but it's alright. + (format "JetBrains Mono Semi Light-%d" 12) + (format "JetBrains Mono-%d" 12)))) + (setq default-frame-alist `((font . ,font))) + (set-frame-font font t t)) + +;; Configure telephone-line +(defun telephone-misc-if-last-window () + "Renders the mode-line-misc-info string for display in the + mode-line if the currently active window is the last one in the + frame. + + The idea is to not display information like the current time, + load, battery levels on all buffers." + + (when (bottom-right-window-p) + (telephone-line-raw mode-line-misc-info t))) + +(defun telephone-line-setup () + (telephone-line-defsegment telephone-line-last-window-segment () + (telephone-misc-if-last-window)) + + ;; Display the current EXWM workspace index in the mode-line + (telephone-line-defsegment telephone-line-exwm-workspace-index () + (when (bottom-right-window-p) + (format "[%s]" exwm-workspace-current-index))) + + ;; Define a highlight font for ~ important ~ information in the last + ;; window. + (defface special-highlight '((t (:foreground "white" :background "#5f627f"))) "") + (add-to-list 'telephone-line-faces + '(highlight . (special-highlight . special-highlight))) + + (setq telephone-line-lhs + '((nil . (telephone-line-position-segment)) + (accent . (telephone-line-buffer-segment)))) + + (setq telephone-line-rhs + '((accent . (telephone-line-major-mode-segment)) + (nil . (telephone-line-last-window-segment + telephone-line-exwm-workspace-index)) + + ;; TODO(tazjin): lets not do this particular thing while I + ;; don't actually run notmuch, there are too many things + ;; that have a dependency on the modeline drawing correctly + ;; (including randr operations!) + ;; + ;; (highlight . (telephone-line-notmuch-counts)) + )) + + (setq telephone-line-primary-left-separator 'telephone-line-tan-left + telephone-line-primary-right-separator 'telephone-line-tan-right + telephone-line-secondary-left-separator 'telephone-line-tan-hollow-left + telephone-line-secondary-right-separator 'telephone-line-tan-hollow-right) + + (telephone-line-mode 1)) + +;; Auto refresh buffers +(global-auto-revert-mode 1) + +;; Use clipboard properly +(setq select-enable-clipboard t) + +;; Show in-progress chords in minibuffer +(setq echo-keystrokes 0.1) + +;; Show column numbers in all buffers +(column-number-mode t) + +(defalias 'yes-or-no-p 'y-or-n-p) +(defalias 'auto-tail-revert-mode 'tail-mode) + +;; Style line numbers (shown with M-g g) +(setq linum-format + (lambda (line) + (propertize + (format (concat " %" + (number-to-string + (length (number-to-string + (line-number-at-pos (point-max))))) + "d ") + line) + 'face 'linum))) + +;; Display tabs as 2 spaces +(setq tab-width 2) + +;; Don't wrap around when moving between buffers +(setq windmove-wrap-around nil) + +(provide 'look-and-feel) diff --git a/users/tazjin/emacs/config/mail-setup.el b/users/tazjin/emacs/config/mail-setup.el new file mode 100644 index 000000000000..2d89ed75edcc --- /dev/null +++ b/users/tazjin/emacs/config/mail-setup.el @@ -0,0 +1,84 @@ +(require 'notmuch) +(require 'counsel-notmuch) + +;; (global-set-key (kbd "C-c m") 'notmuch-hello) +;; (global-set-key (kbd "C-c C-m") 'counsel-notmuch) +;; (global-set-key (kbd "C-c C-e n") 'notmuch-mua-new-mail) + +(setq notmuch-cache-dir (format "%s/.cache/notmuch" (getenv "HOME"))) +(make-directory notmuch-cache-dir t) + +;; Cache addresses for completion: +(setq notmuch-address-save-filename (concat notmuch-cache-dir "/addresses")) + +;; Don't spam my home folder with drafts: +(setq notmuch-draft-folder "drafts") ;; relative to notmuch database + +;; Mark things as read when archiving them: +(setq notmuch-archive-tags '("-inbox" "-unread" "+archive")) + +;; Show me saved searches that I care about: +(setq notmuch-saved-searches + '((:name "inbox" :query "tag:inbox" :count-query "tag:inbox AND tag:unread" :key "i") + (:name "sent" :query "tag:sent" :key "t") + (:name "drafts" :query "tag:draft"))) +(custom-set-variables '(notmuch-search-oldest-first nil)) +(setq notmuch-show-empty-saved-searches t) + +;; Mail sending configuration +(setq send-mail-function 'sendmail-send-it) ;; sendmail provided by MSMTP +(setq notmuch-always-prompt-for-sender t) +(setq notmuch-mua-user-agent-function + (lambda () (format "Emacs %s; notmuch.el %s" emacs-version notmuch-emacs-version))) +(setq mail-host-address (system-name)) +(setq notmuch-mua-cite-function #'message-cite-original-without-signature) +(setq notmuch-fcc-dirs nil) ;; Gmail does this server-side +(setq message-signature nil) ;; Insert message signature manually with C-c C-w + +;; Close mail buffers after sending mail +(setq message-kill-buffer-on-exit t) + +;; Ensure sender is correctly passed to msmtp +(setq mail-specify-envelope-from t + message-sendmail-envelope-from 'header + mail-envelope-from 'header) + +;; Store sent mail in the correct folder per account +(setq notmuch-maildir-use-notmuch-insert nil) + +;; I don't use drafts but I instinctively hit C-x C-s constantly, lets +;; handle that gracefully. +(define-key notmuch-message-mode-map (kbd "C-x C-s") #'ignore) + +;; Define a telephone-line segment for displaying the count of unread, +;; important mails in the last window's mode-line: +(defvar *last-notmuch-count-redraw* 0) +(defvar *current-notmuch-count* nil) + +(defun update-display-notmuch-counts () + "Update and render the current state of the notmuch unread + count for display in the mode-line. + + The offlineimap-timer runs every 2 minutes, so it does not make + sense to refresh this much more often than that." + + (when (> (- (float-time) *last-notmuch-count-redraw*) 30) + (setq *last-notmuch-count-redraw* (float-time)) + (let* ((inbox-unread (notmuch-saved-search-count "tag:inbox and tag:unread")) + (notmuch-count (format "I: %s; D: %s" inbox-unread))) + (setq *current-notmuch-count* notmuch-count))) + + (when (and (bottom-right-window-p) + ;; Only render if the initial update is done and there + ;; are unread mails: + *current-notmuch-count* + (not (equal *current-notmuch-count* "I: 0; D: 0"))) + *current-notmuch-count*)) + +(telephone-line-defsegment telephone-line-notmuch-counts () + "This segment displays the count of unread notmuch messages in + the last window's mode-line (if unread messages are present)." + + (update-display-notmuch-counts)) + +(provide 'mail-setup) diff --git a/users/tazjin/emacs/config/modes.el b/users/tazjin/emacs/config/modes.el new file mode 100644 index 000000000000..69fb523d0d91 --- /dev/null +++ b/users/tazjin/emacs/config/modes.el @@ -0,0 +1,37 @@ +;; Initializes modes I use. + +(add-hook 'prog-mode-hook 'esk-add-watchwords) +(add-hook 'prog-mode-hook 'hl-line-mode) + +;; Use auto-complete as completion at point +(defun set-auto-complete-as-completion-at-point-function () + (setq completion-at-point-functions '(auto-complete))) + +(add-hook 'auto-complete-mode-hook + 'set-auto-complete-as-completion-at-point-function) + +;; Enable rainbow-delimiters for all things programming +(add-hook 'prog-mode-hook 'rainbow-delimiters-mode) + +;; Enable Paredit & Company in Emacs Lisp mode +(add-hook 'emacs-lisp-mode-hook 'company-mode) + +;; Always highlight matching brackets +(show-paren-mode 1) + +;; Always auto-close parantheses and other pairs +(electric-pair-mode) + +;; Keep track of recent files +(recentf-mode) + +;; Easily navigate sillycased words +(global-subword-mode 1) + +;; Transparently open compressed files +(auto-compression-mode t) + +;; Configure go-mode for Go2 Alpha +(add-to-list 'auto-mode-alist '("\\.go2$" . go-mode)) + +(provide 'modes) diff --git a/users/tazjin/emacs/config/settings.el b/users/tazjin/emacs/config/settings.el new file mode 100644 index 000000000000..8b15b6cda183 --- /dev/null +++ b/users/tazjin/emacs/config/settings.el @@ -0,0 +1,48 @@ +(require 'uniquify) + +;; We don't live in the 80s, but we're also not a shitty web app. +(setq gc-cons-threshold 20000000) + +(setq uniquify-buffer-name-style 'forward) + +; Fix some defaults +(setq visible-bell nil + inhibit-startup-message t + color-theme-is-global t + sentence-end-double-space nil + shift-select-mode nil + uniquify-buffer-name-style 'forward + whitespace-style '(face trailing lines-tail tabs) + whitespace-line-column 80 + default-directory "~" + fill-column 80 + ediff-split-window-function 'split-window-horizontally + initial-major-mode 'emacs-lisp-mode) + +(add-to-list 'safe-local-variable-values '(lexical-binding . t)) +(add-to-list 'safe-local-variable-values '(whitespace-line-column . 80)) + +(set-default 'indent-tabs-mode nil) + +;; UTF-8 please +(setq locale-coding-system 'utf-8) ; pretty +(set-terminal-coding-system 'utf-8) ; pretty +(set-keyboard-coding-system 'utf-8) ; pretty +(set-selection-coding-system 'utf-8) ; please +(prefer-coding-system 'utf-8) ; with sugar on top + +;; Make emacs behave sanely (overwrite selected text) +(delete-selection-mode 1) + +;; Keep your temporary files in tmp, emacs! +(setq auto-save-file-name-transforms + `((".*" ,temporary-file-directory t))) +(setq backup-directory-alist + `((".*" . ,temporary-file-directory))) + +(remove-hook 'kill-buffer-query-functions 'server-kill-buffer-query-function) + +;; Show time in 24h format +(setq display-time-24hr-format t) + +(provide 'settings) diff --git a/users/tazjin/emacs/default.nix b/users/tazjin/emacs/default.nix new file mode 100644 index 000000000000..f7ebcb1e22ef --- /dev/null +++ b/users/tazjin/emacs/default.nix @@ -0,0 +1,141 @@ +# This file builds an Emacs pre-configured with the packages I need +# and my personal Emacs configuration. +# +# On NixOS machines, this Emacs currently does not support +# Imagemagick, see https://github.com/NixOS/nixpkgs/issues/70631. +# +# Forcing Emacs to link against Imagemagick currently causes libvterm +# to segfault, which is a lot less desirable than not having telega +# render images correctly. +{ depot, lib, ... }: + +let + inherit (depot) third_party; + + emacsWithPackages = (third_party.emacsPackagesGen third_party.emacs27).emacsWithPackages; + + # $PATH for binaries that need to be available to Emacs + emacsBinPath = lib.makeBinPath [ third_party.telega ]; + + identity = x: x; + + tazjinsEmacs = pkgfun: (emacsWithPackages(epkgs: pkgfun( + # Actual ELPA packages (the enlightened!) + (with epkgs.elpaPackages; [ + ace-window + avy + flymake + pinentry + rainbow-mode + undo-tree + xelb + ]) ++ + + # MELPA packages: + (with epkgs.melpaPackages; [ + ace-link + bazel-mode + browse-kill-ring + cargo + clojure-mode + cmake-mode + counsel + counsel-notmuch + dash-functional + direnv + dockerfile-mode + eglot + elixir-mode + elm-mode + erlang + go-mode + gruber-darker-theme + haskell-mode + ht + hydra + idle-highlight-mode + ivy + ivy-prescient + jq-mode + kotlin-mode + lsp-mode + magit + markdown-toc + meson-mode + multi-term + multiple-cursors + nginx-mode + nix-mode + notmuch # this comes from pkgs.third_party + paredit + password-store + polymode + prescient + protobuf-mode + rainbow-delimiters + refine + request + restclient + sly + string-edit + swiper + telephone-line + terraform-mode + toml-mode + transient + use-package + uuidgen + web-mode + websocket + which-key + yaml-mode + yasnippet + ]) ++ + + # Custom packages + (with depot.tools.emacs-pkgs; [ + dottime + nix-util + term-switcher + tvl + + # patched / overridden versions of packages + depot.third_party.telega + depot.third_party.emacs.exwm + depot.third_party.emacs.rcirc + depot.third_party.emacs.vterm + depot.third_party.emacs.explain-pause-mode + ])))); +in lib.fix(self: l: f: third_party.writeShellScriptBin "tazjins-emacs" '' + export PATH="${emacsBinPath}:$PATH" + exec ${tazjinsEmacs f}/bin/emacs \ + --debug-init \ + --no-site-file \ + --no-site-lisp \ + --no-init-file \ + --directory ${./config} ${if l != null then "--directory ${l}" else ""} \ + --eval "(require 'init)" $@ + '' // { + # Call overrideEmacs with a function (pkgs -> pkgs) to modify the + # packages that should be included in this Emacs distribution. + overrideEmacs = f': self l f'; + + # Call withLocalConfig with the path to a *folder* containing a + # `local.el` which provides local system configuration. + withLocalConfig = confDir: self confDir f; + + # Build a derivation that uses the specified local Emacs (i.e. + # built outside of Nix) instead + withLocalEmacs = emacsBin: third_party.writeShellScriptBin "tazjins-emacs" '' + export PATH="${emacsBinPath}:$PATH" + export EMACSLOADPATH="${(tazjinsEmacs f).deps}/share/emacs/site-lisp:" + exec ${emacsBin} \ + --debug-init \ + --no-site-file \ + --no-site-lisp \ + --no-init-file \ + --directory ${./config} \ + ${if l != null then "--directory ${l}" else ""} \ + --eval "(require 'init)" $@ + ''; + }) null identity diff --git a/users/tazjin/finito/.gitignore b/users/tazjin/finito/.gitignore new file mode 100644 index 000000000000..548206b0b297 --- /dev/null +++ b/users/tazjin/finito/.gitignore @@ -0,0 +1,3 @@ +.envrc +/target/ +**/*.rs.bk diff --git a/users/tazjin/finito/Cargo.lock b/users/tazjin/finito/Cargo.lock new file mode 100644 index 000000000000..7427a6b11c42 --- /dev/null +++ b/users/tazjin/finito/Cargo.lock @@ -0,0 +1,773 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "addr2line" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "602d785912f476e480434627e8732e6766b760c045bbf897d9dfaa9f4fbd399c" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler32" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b077b825e468cc974f0020d4082ee6e03132512f207ef1a02fd5d00d1f32d" + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "autocfg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" + +[[package]] +name = "backtrace" +version = "0.3.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05100821de9e028f12ae3d189176b41ee198341eb8f369956407fea2f5cc666c" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96434f987501f0ed4eb336a411e0631ecd1afa11574fe148587adc4ff96143c9" +dependencies = [ + "byteorder", + "safemem", +] + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "block-buffer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" +dependencies = [ + "arrayref", + "byte-tools", +] + +[[package]] +name = "byte-tools" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "bytes" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" +dependencies = [ + "byteorder", + "iovec", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "chrono" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2" +dependencies = [ + "num-integer", + "num-traits", + "time", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "crypto-mac" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0999b4ff4d3446d4ddb19a63e9e00c1876e75cd7000d20e57a693b4b3f08d958" +dependencies = [ + "constant_time_eq", + "generic-array", +] + +[[package]] +name = "digest" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90" +dependencies = [ + "generic-array", +] + +[[package]] +name = "failure" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" +dependencies = [ + "backtrace", + "failure_derive", +] + +[[package]] +name = "failure_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" +dependencies = [ + "proc-macro2 1.0.18", + "quote 1.0.7", + "syn 1.0.33", + "synstructure", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fallible-iterator" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb7217124812dc5672b7476d0c2d20cfe9f7c0f1ba0904b674a9762a0212f72e" + +[[package]] +name = "finito" +version = "0.1.0" +dependencies = [ + "serde", +] + +[[package]] +name = "finito-door" +version = "0.1.0" +dependencies = [ + "failure", + "finito", + "serde", + "serde_derive", +] + +[[package]] +name = "finito-postgres" +version = "0.1.0" +dependencies = [ + "chrono", + "finito", + "finito-door", + "postgres", + "postgres-derive", + "r2d2_postgres", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "generic-array" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d" +dependencies = [ + "typenum", +] + +[[package]] +name = "gimli" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc8e0c9bce37868955864dbecd2b1ab2bdf967e6f28066d65aaac620444b65c" + +[[package]] +name = "hex" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" + +[[package]] +name = "hmac" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f3bdb08579d99d7dc761c0e266f13b5f2ab8c8c703b9fc9ef333cd8f48f55e" +dependencies = [ + "crypto-mac", + "digest", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", +] + +[[package]] +name = "itoa" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" + +[[package]] +name = "libc" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49" + +[[package]] +name = "lock_api" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "matches" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" + +[[package]] +name = "md5" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79c56d6a0b07f9e19282511c83fc5b086364cbae4ba8c7d5f190c3d9b0425a48" + +[[package]] +name = "memchr" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "148fab2e51b4f1cfc66da2a7c32981d1d3c083a803978268bb11fe4b86925e7a" +dependencies = [ + "libc", +] + +[[package]] +name = "miniz_oxide" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791daaae1ed6889560f8c4359194f56648355540573244a5448a83ba1ecc7435" +dependencies = [ + "adler32", +] + +[[package]] +name = "num-integer" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d59457e662d541ba17869cf51cf177c0b5f0cbf476c66bdc90bf1edac4f875b" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ab52be62400ca80aa00285d25253d7f7c437b7375c4de678f5405d3afe82ca5" + +[[package]] +name = "parking_lot" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3" +dependencies = [ + "cfg-if", + "cloudabi", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "phf" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" +dependencies = [ + "siphasher", +] + +[[package]] +name = "postgres" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115dde90ef51af573580c035857badbece2aa5cde3de1dfb3c932969ca92a6c5" +dependencies = [ + "bytes", + "fallible-iterator", + "log", + "postgres-protocol", + "postgres-shared", + "socket2", +] + +[[package]] +name = "postgres-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44ef42ae50f1547dde36aa78d5e44189cbf21f4e77ce6ddc2bbaa068337fc221" +dependencies = [ + "quote 0.5.2", + "syn 0.13.11", +] + +[[package]] +name = "postgres-protocol" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2487e66455bf88a1b247bf08a3ce7fe5197ac6d67228d920b0ee6a0e97fd7312" +dependencies = [ + "base64", + "byteorder", + "bytes", + "fallible-iterator", + "generic-array", + "hmac", + "md5", + "memchr", + "rand 0.3.23", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-shared" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffac35b3e0029b404c24a3b82149b4e904f293e8ca4a327eefa24d3ca50df36f" +dependencies = [ + "chrono", + "fallible-iterator", + "hex", + "phf", + "postgres-protocol", + "serde_json", + "uuid", +] + +[[package]] +name = "proc-macro2" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b06e2f335f48d24442b35a19df506a835fb3547bc3c06ef27340da9acf5cae7" +dependencies = [ + "unicode-xid 0.1.0", +] + +[[package]] +name = "proc-macro2" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beae6331a816b1f65d04c45b078fd8e6c93e8071771f41b8163255bbd8d7c8fa" +dependencies = [ + "unicode-xid 0.2.1", +] + +[[package]] +name = "quote" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8" +dependencies = [ + "proc-macro2 0.3.8", +] + +[[package]] +name = "quote" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" +dependencies = [ + "proc-macro2 1.0.18", +] + +[[package]] +name = "r2d2" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1497e40855348e4a8a40767d8e55174bce1e445a3ac9254ad44ad468ee0485af" +dependencies = [ + "log", + "parking_lot", + "scheduled-thread-pool", +] + +[[package]] +name = "r2d2_postgres" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c7fe9c0c3d2c298cf262bc3ce4b89cdf0eab620fd9fe759f65b34a1a00fb93" +dependencies = [ + "postgres", + "postgres-shared", + "r2d2", +] + +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" + +[[package]] +name = "rustc-demangle" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "safemem" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f" + +[[package]] +name = "scheduled-thread-pool" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0988d7fdf88d5e5fcf5923a0f1e8ab345f3e98ab4bc6bc45a2d5ff7f7458fbf6" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "serde" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3" + +[[package]] +name = "serde_derive" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e" +dependencies = [ + "proc-macro2 1.0.18", + "quote 1.0.7", + "syn 1.0.33", +] + +[[package]] +name = "serde_json" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3433e879a558dde8b5e8feb2a04899cf34fdde1fafb894687e52105fc1162ac3" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0" +dependencies = [ + "block-buffer", + "byte-tools", + "digest", + "fake-simd", +] + +[[package]] +name = "siphasher" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" + +[[package]] +name = "smallvec" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7cb5678e1615754284ec264d9bb5b4c27d2018577fd90ac0ceb578591ed5ee4" + +[[package]] +name = "socket2" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "winapi", +] + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "syn" +version = "0.13.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f9bf6292f3a61d2c716723fdb789a41bbe104168e6f496dc6497e531ea1b9b" +dependencies = [ + "proc-macro2 0.3.8", + "quote 0.5.2", + "unicode-xid 0.1.0", +] + +[[package]] +name = "syn" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8d5d96e8cbb005d6959f119f773bfaebb5684296108fb32600c00cde305b2cd" +dependencies = [ + "proc-macro2 1.0.18", + "quote 1.0.7", + "unicode-xid 0.2.1", +] + +[[package]] +name = "synstructure" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" +dependencies = [ + "proc-macro2 1.0.18", + "quote 1.0.7", + "syn 1.0.33", + "unicode-xid 0.2.1", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "tinyvec" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" + +[[package]] +name = "typenum" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" + +[[package]] +name = "unicode-bidi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +dependencies = [ + "matches", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "uuid" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc7e3b898aa6f6c08e5295b6c89258d1331e9ac578cc992fb818759951bdc22" +dependencies = [ + "rand 0.3.23", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/users/tazjin/finito/Cargo.toml b/users/tazjin/finito/Cargo.toml new file mode 100644 index 000000000000..310133abeebb --- /dev/null +++ b/users/tazjin/finito/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] +members = [ + "finito-core", + "finito-door", + "finito-postgres" +] diff --git a/users/tazjin/finito/README.md b/users/tazjin/finito/README.md new file mode 100644 index 000000000000..5acd67d3bea7 --- /dev/null +++ b/users/tazjin/finito/README.md @@ -0,0 +1,27 @@ +Finito +====== + +This is a Rust port of the Haskell state-machine library Finito. It is +slightly less featureful because it loses the ability to ensure that +side-effects are contained and because of a slight reduction in +expressivity, which makes it a bit more restrictive. + +However, it still implements the FSM model well enough. + +# Components + +Finito is split up into multiple independent components (note: not all +of these exist yet), separating functionality related to FSM +persistence from other things. + +* `finito`: Core abstraction implemented by Finito +* `finito-door`: Example implementation of a simple, lockable door +* `finito-postgres`: Persistent state-machines using Postgres + +**Note**: The `finito` core library does not contain any tests. Its +coverage is instead provided by the `finito-door` library, which +actually implements an example FSM. + +These are split out because the documentation for `finito-door` is +interesting regardless and because other Finito packages also need an +example implementation. diff --git a/users/tazjin/finito/default.nix b/users/tazjin/finito/default.nix new file mode 100644 index 000000000000..e50ac32be452 --- /dev/null +++ b/users/tazjin/finito/default.nix @@ -0,0 +1,5 @@ +{ depot, ... }: + +depot.third_party.naersk.buildPackage { + src = ./.; +} diff --git a/users/tazjin/finito/finito-core/Cargo.toml b/users/tazjin/finito/finito-core/Cargo.toml new file mode 100644 index 000000000000..1d7bdb8b01fe --- /dev/null +++ b/users/tazjin/finito/finito-core/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "finito" +version = "0.1.0" +authors = ["Vincent Ambo <mail@tazj.in>"] + +[dependencies] +serde = "1.0" diff --git a/users/tazjin/finito/finito-core/src/lib.rs b/users/tazjin/finito/finito-core/src/lib.rs new file mode 100644 index 000000000000..517bfad2bc74 --- /dev/null +++ b/users/tazjin/finito/finito-core/src/lib.rs @@ -0,0 +1,243 @@ +//! Finito's core finite-state machine abstraction. +//! +//! # What & why? +//! +//! Most processes that occur in software applications can be modeled +//! as finite-state machines (FSMs), however the actual states, the +//! transitions between them and the model's interaction with the +//! external world is often implicit. +//! +//! Making the states of a process explicit using a simple language +//! that works for both software developers and other people who may +//! have opinions on processes makes it easier to synchronise thoughts, +//! extend software and keep a good level of control over what is going +//! on. +//! +//! This library aims to provide functionality for implementing +//! finite-state machines in a way that balances expressivity and +//! safety. +//! +//! Finito does not aim to prevent every possible incorrect +//! transition, but aims for somewhere "safe-enough" (please don't +//! lynch me) that is still easily understood. +//! +//! # Conceptual overview +//! +//! The core idea behind Finito can be expressed in a single line and +//! will potentially look familiar if you have used Erlang in a +//! previous life. The syntax used here is the type-signature notation +//! of Haskell. +//! +//! ```text +//! advance :: state -> event -> (state, [action]) +//! ``` +//! +//! In short, every FSM is made up of three distinct types: +//! +//! * a state type representing all possible states of the machine +//! +//! * an event type representing all possible events in the machine +//! +//! * an action type representing a description of all possible +//! side-effects of the machine +//! +//! Using the definition above we can now say that a transition in a +//! state-machine, involving these three types, takes an initial state +//! and an event to apply it to and returns a new state and a list of +//! actions to execute. +//! +//! With this definition most processes can already be modeled quite +//! well. Two additional functions are required to make it all work: +//! +//! ```text +//! -- | The ability to cause additional side-effects after entering +//! -- a new state. +//! > enter :: state -> [action] +//! ``` +//! +//! as well as +//! +//! ```text +//! -- | An interpreter for side-effects +//! act :: action -> m [event] +//! ``` +//! +//! **Note**: This library is based on an original Haskell library. In +//! Haskell, side-effects can be controlled via the type system which +//! is impossible in Rust. +//! +//! Some parts of Finito make assumptions about the programmer not +//! making certain kinds of mistakes, which are pointed out in the +//! documentation. Unfortunately those assumptions are not +//! automatically verifiable in Rust. +//! +//! ## Example +//! +//! Please consult `finito-door` for an example representing a simple, +//! lockable door as a finite-state machine. This gives an overview +//! over Finito's primary features. +//! +//! If you happen to be the kind of person who likes to learn about +//! libraries by reading code, you should familiarise yourself with the +//! door as it shows up as the example in other finito-related +//! libraries, too. +//! +//! # Persistence, side-effects and mud +//! +//! These three things are inescapable in the fateful realm of +//! computers, but Finito separates them out into separate libraries +//! that you can drag in as you need them. +//! +//! Currently, those libraries include: +//! +//! * `finito`: Core components and classes of Finito +//! +//! * `finito-in-mem`: In-memory implementation of state machines +//! that do not need to live longer than an application using +//! standard library concurrency primitives. +//! +//! * `finito-postgres`: Postgres-backed, persistent implementation +//! of state machines that, well, do need to live longer. Uses +//! Postgres for concurrency synchronisation, so keep that in +//! mind. +//! +//! Which should cover most use-cases. Okay, enough prose, lets dive +//! in. +//! +//! # Does Finito make you want to scream? +//! +//! Please reach out! I want to know why! + +extern crate serde; + +use serde::Serialize; +use serde::de::DeserializeOwned; +use std::fmt::Debug; +use std::mem; + +/// Primary trait that needs to be implemented for every state type +/// representing the states of an FSM. +/// +/// This trait is used to implement transition logic and to "tie the +/// room together", with the room being our triplet of types. +pub trait FSM where Self: Sized { + /// A human-readable string uniquely describing what this FSM + /// models. This is used in log messages, database tables and + /// various other things throughout Finito. + const FSM_NAME: &'static str; + + /// The associated event type of an FSM represents all possible + /// events that can occur in the state-machine. + type Event; + + /// The associated action type of an FSM represents all possible + /// actions that can occur in the state-machine. + type Action; + + /// The associated error type of an FSM represents failures that + /// can occur during action processing. + type Error: Debug; + + /// The associated state type of an FSM describes the state that + /// is made available to the implementation of action + /// interpretations. + type State; + + /// `handle` deals with any incoming events to cause state + /// transitions and emit actions. This function is the core logic + /// of any state machine. + /// + /// Implementations of this function **must not** cause any + /// side-effects to avoid breaking the guarantees of Finitos + /// conceptual model. + fn handle(self, event: Self::Event) -> (Self, Vec<Self::Action>); + + /// `enter` is called when a new state is entered, allowing a + /// state to produce additional side-effects. + /// + /// This is useful for side-effects that event handlers do not + /// need to know about and for resting assured that a certain + /// action has been caused when a state is entered. + /// + /// FSM state types are expected to be enum (i.e. sum) types. A + /// state is considered "new" and enter calls are run if is of a + /// different enum variant. + fn enter(&self) -> Vec<Self::Action>; + + /// `act` interprets and executes FSM actions. This is the only + /// part of an FSM in which side-effects are allowed. + fn act(Self::Action, &Self::State) -> Result<Vec<Self::Event>, Self::Error>; +} + +/// This function is the primary function used to advance a state +/// machine. It takes care of both running the event handler as well +/// as possible state-enter calls and returning the result. +/// +/// Users of Finito should basically always use this function when +/// advancing state-machines manually, and never call FSM-trait +/// methods directly. +pub fn advance<S: FSM>(state: S, event: S::Event) -> (S, Vec<S::Action>) { + // Determine the enum variant of the initial state (used to + // trigger enter calls). + let old_discriminant = mem::discriminant(&state); + + let (new_state, mut actions) = state.handle(event); + + // Compare the enum variant of the resulting state to the old one + // and run `enter` if they differ. + let new_discriminant = mem::discriminant(&new_state); + let mut enter_actions = if old_discriminant != new_discriminant { + new_state.enter() + } else { + vec![] + }; + + actions.append(&mut enter_actions); + + (new_state, actions) +} + +/// This trait is implemented by Finito backends. Backends are +/// expected to be able to keep track of the current state of an FSM +/// and retrieve it / apply updates transactionally. +/// +/// See the `finito-postgres` and `finito-in-mem` crates for example +/// implementations of this trait. +/// +/// Backends must be parameterised over an additional (user-supplied) +/// state type which can be used to track application state that must +/// be made available to action handlers, for example to pass along +/// database connections. +pub trait FSMBackend<S: 'static> { + /// Key type used to identify individual state machines in this + /// backend. + /// + /// TODO: Should be parameterised over FSM type after rustc + /// #44265. + type Key; + + /// Error type for all potential failures that can occur when + /// interacting with this backend. + type Error: Debug; + + /// Insert a new state-machine into the backend's storage and + /// return its newly allocated key. + fn insert_machine<F>(&self, initial: F) -> Result<Self::Key, Self::Error> + where F: FSM + Serialize + DeserializeOwned; + + /// Retrieve the current state of an FSM by its key. + fn get_machine<F: FSM>(&self, key: Self::Key) -> Result<F, Self::Error> + where F: FSM + Serialize + DeserializeOwned; + + /// Advance a state machine by applying an event and persisting it + /// as well as any resulting actions. + /// + /// **Note**: Whether actions are automatically executed depends + /// on the backend used. Please consult the backend's + /// documentation for details. + fn advance<'a, F: FSM>(&'a self, key: Self::Key, event: F::Event) -> Result<F, Self::Error> + where F: FSM + Serialize + DeserializeOwned, + F::State: From<&'a S>, + F::Event: Serialize + DeserializeOwned, + F::Action: Serialize + DeserializeOwned; +} diff --git a/users/tazjin/finito/finito-door/Cargo.toml b/users/tazjin/finito/finito-door/Cargo.toml new file mode 100644 index 000000000000..32c0a5a7c4ef --- /dev/null +++ b/users/tazjin/finito/finito-door/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "finito-door" +version = "0.1.0" +authors = ["Vincent Ambo <mail@tazj.in>"] + +[dependencies] +failure = "0.1" +serde = "1.0" +serde_derive = "1.0" + +[dependencies.finito] +path = "../finito-core" diff --git a/users/tazjin/finito/finito-door/src/lib.rs b/users/tazjin/finito/finito-door/src/lib.rs new file mode 100644 index 000000000000..68542c0bc448 --- /dev/null +++ b/users/tazjin/finito/finito-door/src/lib.rs @@ -0,0 +1,327 @@ +//! Example implementation of a lockable door in Finito +//! +//! # What & why? +//! +//! This module serves as a (hopefully simple) example of how to +//! implement finite-state machines using Finito. Note that the +//! concepts of Finito itself won't be explained in detail here, +//! consult its library documentation for that. +//! +//! Reading through this module should give you a rough idea of how to +//! work with Finito and get you up and running modeling things +//! *quickly*. +//! +//! Note: The generated documentation for this module will display the +//! various components of the door, but it will not inform you about +//! the actual transition logic and all that stuff. Read the source, +//! too! +//! +//! # The Door +//! +//! My favourite example when explaining these state-machines +//! conceptually has been to use a simple, lockable door. Our door has +//! a keypad next to it which can be used to lock the door by entering +//! a code, after which the same code must be entered to unlock it +//! again. +//! +//! The door can only be locked if it is closed. Oh, and it has a few +//! extra features: +//! +//! * whenever the door's state changes, an IRC channel receives a +//! message about that +//! +//! * the door calls the police if the code is intered incorrectly more +//! than a specified number of times (mhm, lets say, three) +//! +//! * if the police is called the door can not be interacted with +//! anymore (and honestly, for the sake of this example, we don't +//! care how its functionality is restored) +//! +//! ## The Door - Visualized +//! +//! Here's a rough attempt at drawing a state diagram in ASCII. The +//! bracketed words denote states, the arrows denote events: +//! +//! ```text +//! <--Open--- <--Unlock-- correct code? --Unlock--> +//! [Opened] [Closed] [Locked] [Disabled] +//! --Close--> ----Lock--> +//! ``` +//! +//! I'm so sorry for that drawing. +//! +//! ## The Door - Usage example +//! +//! An interaction session with our final door could look like this: +//! +//! ```rust,ignore +//! use finito_postgres::{insert_machine, advance}; +//! +//! let door = insert_machine(&conn, &DoorState::Opened)?; +//! +//! advance(&conn, &door, DoorEvent::Close)?; +//! advance(&conn, &door, DoorEvent::Lock(1337))?; +//! +//! format!("Door is now: {}", get_machine(&conn, &door)?); +//! ``` +//! +//! Here we have created, closed and then locked a door and inspected +//! its state. We will see that it is locked, has the locking code we +//! gave it and three remaining attempts to open it. +//! +//! Alright, enough foreplay, lets dive in! + +#[macro_use] extern crate serde_derive; + +extern crate failure; +extern crate finito; + +use finito::FSM; + +/// Type synonym to represent the code with which the door is locked. This +/// exists only for clarity in the signatures below and please do not email me +/// about the fact that an integer is not actually a good representation of +/// numerical digits. Thanks! +type Code = usize; + +/// Type synonym to represent the remaining number of unlock attempts. +type Attempts = usize; + +/// This type represents the possible door states and the data that they carry. +/// We can infer this from the "diagram" in the documentation above. +/// +/// This type is the one for which `finito::FSM` will be implemented, making it +/// the wooden (?) heart of our door. +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub enum DoorState { + /// In `Opened` state, the door is wide open and anyone who fits through can + /// go through. + Opened, + + /// In `Closed` state, the door is shut but does not prevent anyone from + /// opening it. + Closed, + + /// In `Locked` state, the door is locked and waiting for someone to enter + /// its locking code on the keypad. + /// + /// This state contains the code that the door is locked with, as well as + /// the remaining number of attempts before the door calls the police and + /// becomes unusable. + Locked { code: Code, attempts: Attempts }, + + /// This state represents a disabled door after the police has been called. + /// The police will need to unlock it manually! + Disabled, +} + +/// This type represents the events that can occur in our door, i.e. the input +/// and interactions it receives. +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub enum DoorEvent { + /// `Open` means someone is opening the door! + Open, + + /// `Close` means, you guessed it, the exact opposite. + Close, + + /// `Lock` means somebody has entered a locking code on the + /// keypad. + Lock(Code), + + /// `Unlock` means someone has attempted to unlock the door. + Unlock(Code), +} + +/// This type represents the possible actions, a.k.a. everything our door "does" +/// that does not just impact itself, a.k.a. side-effects. +/// +/// **Note**: This type by itself *is not* a collection of side-effects, it +/// merely describes the side-effects we want to occur (which are then +/// interpreted by the machinery later). +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub enum DoorAction { + /// `NotifyIRC` is used to display some kind of message on the + /// aforementioned IRC channel that is, for some reason, very interested in + /// the state of the door. + NotifyIRC(String), + + /// `CallThePolice` does what you think it does. + /// + /// **Note**: For safety reasons, causing this action is not recommended for + /// users inside the US! + CallThePolice, +} + +/// This trait implementation turns our 'DoorState' into a type actually +/// representing a finite-state machine. To implement it, we need to do three +/// main things: +/// +/// * Define what our associated `Event` and `Action` type should be +/// +/// * Define the event-handling and state-entering logic (i.e. the meat of the +/// ... door) +/// +/// * Implement the interpretation of our actions, i.e. implement actual +/// side-effects +impl FSM for DoorState { + const FSM_NAME: &'static str = "door"; + + // As you might expect, our `Event` type is 'DoorEvent' and our `Action` + // type is 'DoorAction'. + type Event = DoorEvent; + type Action = DoorAction; + type State = (); + + // For error handling, the door simply uses `failure` which provides a + // generic, chainable error type. In real-world implementations you may want + // to use a custom error type or similar. + type Error = failure::Error; + + // The implementation of `handle` provides us with the actual transition + // logic of the door. + // + // The door is conceptually not that complicated so it is relatively short. + fn handle(self, event: DoorEvent) -> (Self, Vec<DoorAction>) { + match (self, event) { + // An opened door can be closed: + (DoorState::Opened, DoorEvent::Close) => return (DoorState::Closed, vec![]), + + // A closed door can be opened: + (DoorState::Closed, DoorEvent::Open) => return (DoorState::Opened, vec![]), + + // A closed door can also be locked, in which case the locking code + // is stored with the next state and the unlock attempts default to + // three: + (DoorState::Closed, DoorEvent::Lock(code)) => { + return (DoorState::Locked { code, attempts: 3 }, vec![]) + } + + // A locked door receiving an `Unlock`-event can do several + // different things ... + (DoorState::Locked { code, attempts }, DoorEvent::Unlock(unlock_code)) => { + // In the happy case, entry of a correct code leads to the door + // becoming unlocked (i.e. transitioning back to `Closed`). + if code == unlock_code { + return (DoorState::Closed, vec![]); + } + + // If the code wasn't correct and the fraudulent unlocker ran + // out of attempts (i.e. there was only one attempt remaining), + // it's time for some consequences. + if attempts == 1 { + return (DoorState::Disabled, vec![DoorAction::CallThePolice]); + } + + // If the code wasn't correct, but there are still some + // remaining attempts, the user doesn't have to face the police + // quite yet but IRC gets to laugh about it. + return ( + DoorState::Locked { + code, + attempts: attempts - 1, + }, + vec![DoorAction::NotifyIRC("invalid code entered".into())], + ); + } + + // This actually already concludes our event-handling logic. Our + // uncaring door does absolutely nothing if you attempt to do + // something with it that it doesn't support, so the last handler is + // a simple fallback. + // + // In a real-world state machine, especially one that receives + // events from external sources, you may want fallback handlers to + // actually do something. One example could be creating an action + // that logs information about unexpected events, alerts a + // monitoring service, or whatever else. + (current, _) => (current, vec![]), + } + } + + // The implementation of `enter` lets door states cause additional actions + // they are transitioned to. In the door example we use this only to notify + // IRC about what is going on. + fn enter(&self) -> Vec<DoorAction> { + let msg = match self { + DoorState::Opened => "door was opened", + DoorState::Closed => "door was closed", + DoorState::Locked { .. } => "door was locked", + DoorState::Disabled => "door was disabled", + }; + + vec![DoorAction::NotifyIRC(msg.into())] + } + + // The implementation of `act` lets us perform actual side-effects. + // + // Again, for the sake of educational simplicity, this does not deal with + // all potential (or in fact any) error cases that can occur during this toy + // implementation of actions. + // + // Additionally the `act` function can return new events. This is useful for + // a sort of "callback-like" pattern (cause an action to fetch some data, + // receive it as an event) but is not used in this example. + fn act(action: DoorAction, _state: &()) -> Result<Vec<DoorEvent>, failure::Error> { + match action { + DoorAction::NotifyIRC(msg) => { + use std::fs::OpenOptions; + use std::io::Write; + + let mut file = OpenOptions::new() + .append(true) + .create(true) + .open("/tmp/door-irc.log")?; + + write!(file, "<doorbot> {}\n", msg)?; + Ok(vec![]) + } + + DoorAction::CallThePolice => { + // TODO: call the police + println!("The police was called! For real!"); + Ok(vec![]) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use finito::advance; + + fn test_fsm<S: FSM>(initial: S, events: Vec<S::Event>) -> (S, Vec<S::Action>) { + events.into_iter().fold((initial, vec![]), |(state, mut actions), event| { + let (new_state, mut new_actions) = advance(state, event); + actions.append(&mut new_actions); + (new_state, actions) + }) + } + + #[test] + fn test_door() { + let initial = DoorState::Opened; + let events = vec![ + DoorEvent::Close, + DoorEvent::Open, + DoorEvent::Close, + DoorEvent::Lock(1234), + DoorEvent::Unlock(1234), + DoorEvent::Lock(4567), + DoorEvent::Unlock(1234), + ]; + let (final_state, actions) = test_fsm(initial, events); + + assert_eq!(final_state, DoorState::Locked { code: 4567, attempts: 2 }); + assert_eq!(actions, vec![ + DoorAction::NotifyIRC("door was closed".into()), + DoorAction::NotifyIRC("door was opened".into()), + DoorAction::NotifyIRC("door was closed".into()), + DoorAction::NotifyIRC("door was locked".into()), + DoorAction::NotifyIRC("door was closed".into()), + DoorAction::NotifyIRC("door was locked".into()), + DoorAction::NotifyIRC("invalid code entered".into()), + ]); + } +} diff --git a/users/tazjin/finito/finito-postgres/Cargo.toml b/users/tazjin/finito/finito-postgres/Cargo.toml new file mode 100644 index 000000000000..dd8d1d000304 --- /dev/null +++ b/users/tazjin/finito/finito-postgres/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "finito-postgres" +version = "0.1.0" +authors = ["Vincent Ambo <mail@tazj.in>"] + +[dependencies] +chrono = "0.4" +postgres-derive = "0.3" +serde = "1.0" +serde_json = "1.0" +r2d2_postgres = "0.14" + +[dependencies.postgres] +version = "0.15" +features = [ "with-uuid", "with-chrono", "with-serde_json" ] + +[dependencies.uuid] +version = "0.5" +features = [ "v4" ] + +[dependencies.finito] +path = "../finito-core" + +[dev-dependencies.finito-door] +path = "../finito-door" diff --git a/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/down.sql b/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/down.sql new file mode 100644 index 000000000000..9b56f9d35abe --- /dev/null +++ b/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/down.sql @@ -0,0 +1,4 @@ +DROP TABLE actions; +DROP TYPE ActionStatus; +DROP TABLE events; +DROP TABLE machines; diff --git a/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/up.sql b/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/up.sql new file mode 100644 index 000000000000..18ace393b8d9 --- /dev/null +++ b/users/tazjin/finito/finito-postgres/migrations/2018-09-26-160621_bootstrap_finito_schema/up.sql @@ -0,0 +1,37 @@ +-- Creates the initial schema required by finito-postgres. + +CREATE TABLE machines ( + id UUID PRIMARY KEY, + created TIMESTAMPTZ NOT NULL DEFAULT NOW(), + fsm TEXT NOT NULL, + state JSONB NOT NULL +); + +CREATE TABLE events ( + id UUID PRIMARY KEY, + created TIMESTAMPTZ NOT NULL DEFAULT NOW(), + fsm TEXT NOT NULL, + fsm_id UUID NOT NULL REFERENCES machines(id), + event JSONB NOT NULL +); +CREATE INDEX idx_events_machines ON events(fsm_id); + +CREATE TYPE ActionStatus AS ENUM ( + 'Pending', + 'Completed', + 'Failed' +); + +CREATE TABLE actions ( + id UUID PRIMARY KEY, + created TIMESTAMPTZ NOT NULL DEFAULT NOW(), + fsm TEXT NOT NULL, + fsm_id UUID NOT NULL REFERENCES machines(id), + event_id UUID NOT NULL REFERENCES events(id), + content JSONB NOT NULL, + status ActionStatus NOT NULL, + error TEXT +); + +CREATE INDEX idx_actions_machines ON actions(fsm_id); +CREATE INDEX idx_actions_events ON actions(event_id); diff --git a/users/tazjin/finito/finito-postgres/src/error.rs b/users/tazjin/finito/finito-postgres/src/error.rs new file mode 100644 index 000000000000..e130d18361f1 --- /dev/null +++ b/users/tazjin/finito/finito-postgres/src/error.rs @@ -0,0 +1,109 @@ +//! This module defines error types and conversions for issue that can +//! occur while dealing with persisted state machines. + +use std::result; +use std::fmt; +use uuid::Uuid; +use std::error::Error as StdError; + +// errors to chain: +use postgres::Error as PgError; +use r2d2_postgres::r2d2::Error as PoolError; +use serde_json::Error as JsonError; + +pub type Result<T> = result::Result<T, Error>; + +#[derive(Debug)] +pub struct Error { + pub kind: ErrorKind, + pub context: Option<String>, +} + +#[derive(Debug)] +pub enum ErrorKind { + /// Errors occuring during JSON serialization of FSM types. + Serialization(String), + + /// Errors occuring during communication with the database. + Database(String), + + /// Errors with the database connection pool. + DBPool(String), + + /// State machine could not be found. + FSMNotFound(Uuid), + + /// Action could not be found. + ActionNotFound(Uuid), +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use ErrorKind::*; + let msg = match &self.kind { + Serialization(err) => + format!("JSON serialization error: {}", err), + + Database(err) => + format!("PostgreSQL error: {}", err), + + DBPool(err) => + format!("Database connection pool error: {}", err), + + FSMNotFound(id) => + format!("FSM with ID {} not found", id), + + ActionNotFound(id) => + format!("Action with ID {} not found", id), + }; + + match &self.context { + None => write!(f, "{}", msg), + Some(ctx) => write!(f, "{}: {}", ctx, msg), + } + } +} + +impl StdError for Error {} + +impl <E: Into<ErrorKind>> From<E> for Error { + fn from(err: E) -> Error { + Error { + kind: err.into(), + context: None, + } + } +} + +impl From<JsonError> for ErrorKind { + fn from(err: JsonError) -> ErrorKind { + ErrorKind::Serialization(err.to_string()) + } +} + +impl From<PgError> for ErrorKind { + fn from(err: PgError) -> ErrorKind { + ErrorKind::Database(err.to_string()) + } +} + +impl From<PoolError> for ErrorKind { + fn from(err: PoolError) -> ErrorKind { + ErrorKind::DBPool(err.to_string()) + } +} + +/// Helper trait that makes it possible to supply contextual +/// information with an error. +pub trait ResultExt<T> { + fn context<C: fmt::Display>(self, ctx: C) -> Result<T>; +} + +impl <T, E: Into<Error>> ResultExt<T> for result::Result<T, E> { + fn context<C: fmt::Display>(self, ctx: C) -> Result<T> { + self.map_err(|err| Error { + context: Some(format!("{}", ctx)), + .. err.into() + }) + } +} diff --git a/users/tazjin/finito/finito-postgres/src/lib.rs b/users/tazjin/finito/finito-postgres/src/lib.rs new file mode 100644 index 000000000000..ae147f751f88 --- /dev/null +++ b/users/tazjin/finito/finito-postgres/src/lib.rs @@ -0,0 +1,431 @@ +//! PostgreSQL-backed persistence for Finito state machines +//! +//! This module implements ... TODO when I can write again. +//! +//! TODO: events & actions should have `SERIAL` keys + +#[macro_use] extern crate postgres; +#[macro_use] extern crate postgres_derive; + +extern crate chrono; +extern crate finito; +extern crate r2d2_postgres; +extern crate serde; +extern crate serde_json; +extern crate uuid; + +#[cfg(test)] mod tests; +#[cfg(test)] extern crate finito_door; + +mod error; +pub use error::{Result, Error, ErrorKind}; + +use chrono::prelude::{DateTime, Utc}; +use error::ResultExt; +use finito::{FSM, FSMBackend}; +use postgres::transaction::Transaction; +use postgres::GenericConnection; +use serde::Serialize; +use serde::de::DeserializeOwned; +use serde_json::Value; +use std::marker::PhantomData; +use uuid::Uuid; +use r2d2_postgres::{r2d2, PostgresConnectionManager}; + +type DBPool = r2d2::Pool<PostgresConnectionManager>; +type DBConn = r2d2::PooledConnection<PostgresConnectionManager>; + +/// This struct represents rows in the database table in which events +/// are persisted. +#[derive(Debug, ToSql, FromSql)] +struct EventT { + /// ID of the persisted event. + id: Uuid, + + /// Timestamp at which the event was stored. + created: DateTime<Utc>, + + /// Name of the type of FSM that this state belongs to. + fsm: String, + + /// ID of the state machine belonging to this event. + fsm_id: Uuid, + + /// Serialised content of the event. + event: Value, +} + +/// This enum represents the possible statuses an action can be in. +#[derive(Debug, PartialEq, ToSql, FromSql)] +#[postgres(name = "actionstatus")] +enum ActionStatus { + /// The action was requested but has not run yet. + Pending, + + /// The action completed successfully. + Completed, + + /// The action failed to run. Information about the error will + /// have been persisted in Postgres. + Failed, +} + +/// This struct represents rows in the database table in which actions +/// are persisted. +#[derive(Debug, ToSql, FromSql)] +struct ActionT { + /// ID of the persisted event. + id: Uuid, + + /// Timestamp at which the event was stored. + created: DateTime<Utc>, + + /// Name of the type of FSM that this state belongs to. + fsm: String, + + /// ID of the state machine belonging to this event. + fsm_id: Uuid, + + /// ID of the event that resulted in this action. + event_id: Uuid, + + /// Serialised content of the action. + #[postgres(name = "content")] // renamed because 'action' is a keyword in PG + action: Value, + + /// Current status of the action. + status: ActionStatus, + + /// Detailed (i.e. Debug-trait formatted) error message, if an + /// error occured during action processing. + error: Option<String>, +} + +// The following functions implement the public interface of +// `finito-postgres`. + +/// TODO: Write docs for this type, brain does not want to do it right +/// now. +pub struct FinitoPostgres<S> { + state: S, + + db_pool: DBPool, +} + +impl <S> FinitoPostgres<S> { + pub fn new(state: S, db_pool: DBPool, _pool_size: usize) -> Self { + FinitoPostgres { + state, db_pool, + } + } +} + +impl <State: 'static> FSMBackend<State> for FinitoPostgres<State> { + type Key = Uuid; + type Error = Error; + + fn insert_machine<S: FSM + Serialize>(&self, initial: S) -> Result<Uuid> { + let query = r#" + INSERT INTO machines (id, fsm, state) + VALUES ($1, $2, $3) + "#; + + let id = Uuid::new_v4(); + let fsm = S::FSM_NAME.to_string(); + let state = serde_json::to_value(initial).context("failed to serialise FSM")?; + + self.conn()?.execute(query, &[&id, &fsm, &state]).context("failed to insert FSM")?; + + return Ok(id); + + } + + fn get_machine<S: FSM + DeserializeOwned>(&self, key: Uuid) -> Result<S> { + get_machine_internal(&*self.conn()?, key, false) + } + + /// Advance a persisted state machine by applying an event, and + /// storing the event as well as all resulting actions. + /// + /// This function holds a database-lock on the state's row while + /// advancing the machine. + /// + /// **Note**: This function returns the new state of the machine + /// immediately after applying the event, however this does not + /// necessarily equate to the state of the machine after all related + /// processing is finished as running actions may result in additional + /// transitions. + fn advance<'a, S>(&'a self, key: Uuid, event: S::Event) -> Result<S> + where S: FSM + Serialize + DeserializeOwned, + S::State: From<&'a State>, + S::Event: Serialize + DeserializeOwned, + S::Action: Serialize + DeserializeOwned { + let conn = self.conn()?; + let tx = conn.transaction().context("could not begin transaction")?; + let state = get_machine_internal(&tx, key, true)?; + + // Advancing the FSM consumes the event, so it is persisted first: + let event_id = insert_event::<_, S>(&tx, key, &event)?; + + // Core advancing logic is run: + let (new_state, actions) = finito::advance(state, event); + + // Resulting actions are persisted (TODO: and interpreted) + let mut action_ids = vec![]; + for action in actions { + let action_id = insert_action::<_, S>(&tx, key, event_id, &action)?; + action_ids.push(action_id); + } + + // And finally the state is updated: + update_state(&tx, key, &new_state)?; + tx.commit().context("could not commit transaction")?; + + self.run_actions::<S>(key, action_ids); + + Ok(new_state) + } +} + +impl <State: 'static> FinitoPostgres<State> { + /// Execute several actions at the same time, each in a separate + /// thread. Note that actions returning further events, causing + /// further transitions, returning further actions and so on will + /// potentially cause multiple threads to get created. + fn run_actions<'a, S>(&'a self, fsm_id: Uuid, action_ids: Vec<Uuid>) where + S: FSM + Serialize + DeserializeOwned, + S::Event: Serialize + DeserializeOwned, + S::Action: Serialize + DeserializeOwned, + S::State: From<&'a State> { + let state: S::State = (&self.state).into(); + let conn = self.conn().expect("TODO"); + + for action_id in action_ids { + let tx = conn.transaction().expect("TODO"); + + // TODO: Determine which concurrency setup we actually want. + if let Ok(events) = run_action(tx, action_id, &state, PhantomData::<S>) { + for event in events { + self.advance::<S>(fsm_id, event).expect("TODO"); + } + } + } + } + + /// Retrieve a single connection from the database connection pool. + fn conn(&self) -> Result<DBConn> { + self.db_pool.get().context("failed to retrieve connection from pool") + } +} + + + +/// Insert a single state-machine into the database and return its +/// newly allocated, random UUID. +pub fn insert_machine<C, S>(conn: &C, initial: S) -> Result<Uuid> where + C: GenericConnection, + S: FSM + Serialize { + let query = r#" + INSERT INTO machines (id, fsm, state) + VALUES ($1, $2, $3) + "#; + + let id = Uuid::new_v4(); + let fsm = S::FSM_NAME.to_string(); + let state = serde_json::to_value(initial).context("failed to serialize FSM")?; + + conn.execute(query, &[&id, &fsm, &state])?; + + return Ok(id); +} + +/// Insert a single event into the database and return its UUID. +fn insert_event<C, S>(conn: &C, + fsm_id: Uuid, + event: &S::Event) -> Result<Uuid> +where + C: GenericConnection, + S: FSM, + S::Event: Serialize { + let query = r#" + INSERT INTO events (id, fsm, fsm_id, event) + VALUES ($1, $2, $3, $4) + "#; + + let id = Uuid::new_v4(); + let fsm = S::FSM_NAME.to_string(); + let event_value = serde_json::to_value(event) + .context("failed to serialize event")?; + + conn.execute(query, &[&id, &fsm, &fsm_id, &event_value])?; + return Ok(id) +} + +/// Insert a single action into the database and return its UUID. +fn insert_action<C, S>(conn: &C, + fsm_id: Uuid, + event_id: Uuid, + action: &S::Action) -> Result<Uuid> where + C: GenericConnection, + S: FSM, + S::Action: Serialize { + let query = r#" + INSERT INTO actions (id, fsm, fsm_id, event_id, content, status) + VALUES ($1, $2, $3, $4, $5, $6) + "#; + + let id = Uuid::new_v4(); + let fsm = S::FSM_NAME.to_string(); + let action_value = serde_json::to_value(action) + .context("failed to serialize action")?; + + conn.execute( + query, + &[&id, &fsm, &fsm_id, &event_id, &action_value, &ActionStatus::Pending] + )?; + + return Ok(id) +} + +/// Update the state of a specified machine. +fn update_state<C, S>(conn: &C, + fsm_id: Uuid, + state: &S) -> Result<()> where + C: GenericConnection, + S: FSM + Serialize { + let query = r#" + UPDATE machines SET state = $1 WHERE id = $2 + "#; + + let state_value = serde_json::to_value(state).context("failed to serialize FSM")?; + let res_count = conn.execute(query, &[&state_value, &fsm_id])?; + + if res_count != 1 { + Err(ErrorKind::FSMNotFound(fsm_id).into()) + } else { + Ok(()) + } +} + +/// Conditionally alter SQL statement to append locking clause inside +/// of a transaction. +fn alter_for_update(alter: bool, query: &str) -> String { + match alter { + false => query.to_string(), + true => format!("{} FOR UPDATE", query), + } +} + +/// Retrieve the current state of a state machine from the database, +/// optionally locking the machine state for the duration of some +/// enclosing transaction. +fn get_machine_internal<C, S>(conn: &C, + id: Uuid, + for_update: bool) -> Result<S> where + C: GenericConnection, + S: FSM + DeserializeOwned { + let query = alter_for_update(for_update, r#" + SELECT state FROM machines WHERE id = $1 + "#); + + let rows = conn.query(&query, &[&id]).context("failed to retrieve FSM")?; + + if let Some(row) = rows.into_iter().next() { + Ok(serde_json::from_value(row.get(0)).context("failed to deserialize FSM")?) + } else { + Err(ErrorKind::FSMNotFound(id).into()) + } +} + +/// Retrieve an action from the database, optionally locking it for +/// the duration of some enclosing transaction. +fn get_action<C, S>(conn: &C, id: Uuid) -> Result<(ActionStatus, S::Action)> where + C: GenericConnection, + S: FSM, + S::Action: DeserializeOwned { + let query = alter_for_update(true, r#" + SELECT status, content FROM actions + WHERE id = $1 AND fsm = $2 + "#); + + let rows = conn.query(&query, &[&id, &S::FSM_NAME])?; + + if let Some(row) = rows.into_iter().next() { + let action = serde_json::from_value(row.get(1)) + .context("failed to deserialize FSM action")?; + Ok((row.get(0), action)) + } else { + Err(ErrorKind::ActionNotFound(id).into()) + } +} + +/// Update the status of an action after an attempt to run it. +fn update_action_status<C, S>(conn: &C, + id: Uuid, + status: ActionStatus, + error: Option<String>, + _fsm: PhantomData<S>) -> Result<()> where + C: GenericConnection, + S: FSM { + let query = r#" + UPDATE actions SET status = $1, error = $2 + WHERE id = $3 AND fsm = $4 + "#; + + let result = conn.execute(&query, &[&status, &error, &id, &S::FSM_NAME])?; + + if result != 1 { + Err(ErrorKind::ActionNotFound(id).into()) + } else { + Ok(()) + } +} + +/// Execute a single action in case it is pending or retryable. Holds +/// a lock on the action's database row while performing the action +/// and writes back the status afterwards. +/// +/// Should the execution of an action fail cleanly (i.e. without a +/// panic), the error will be persisted. Should it fail by panicking +/// (which developers should never do explicitly in action +/// interpreters) its status will not be changed. +fn run_action<S>(tx: Transaction, id: Uuid, state: &S::State, _fsm: PhantomData<S>) + -> Result<Vec<S::Event>> where + S: FSM, + S::Action: DeserializeOwned { + let (status, action) = get_action::<Transaction, S>(&tx, id)?; + + let result = match status { + ActionStatus::Pending => { + match S::act(action, state) { + // If the action succeeded, update its status to + // completed and return the created events. + Ok(events) => { + update_action_status( + &tx, id, ActionStatus::Completed, None, PhantomData::<S> + )?; + events + }, + + // If the action failed, persist the debug message and + // return nothing. + Err(err) => { + let msg = Some(format!("{:?}", err)); + update_action_status( + &tx, id, ActionStatus::Failed, msg, PhantomData::<S> + )?; + vec![] + }, + } + }, + + _ => { + // TODO: Currently only pending actions are run because + // retryable actions are not yet implemented. + vec![] + }, + }; + + tx.commit().context("failed to commit transaction")?; + Ok(result) +} diff --git a/users/tazjin/finito/finito-postgres/src/tests.rs b/users/tazjin/finito/finito-postgres/src/tests.rs new file mode 100644 index 000000000000..b1b5821be3c4 --- /dev/null +++ b/users/tazjin/finito/finito-postgres/src/tests.rs @@ -0,0 +1,47 @@ +use super::*; + +use finito_door::*; +use postgres::{Connection, TlsMode}; + +// TODO: read config from environment +fn open_test_connection() -> Connection { + Connection::connect("postgres://finito:finito@localhost/finito", TlsMode::None) + .expect("Failed to connect to test database") +} + +#[test] +fn test_insert_machine() { + let conn = open_test_connection(); + let initial = DoorState::Opened; + let door = insert_machine(&conn, initial).expect("Failed to insert door"); + let result = get_machine(&conn, &door, false).expect("Failed to fetch door"); + + assert_eq!(result, DoorState::Opened, "Inserted door state should match"); +} + +#[test] +fn test_advance() { + let conn = open_test_connection(); + + let initial = DoorState::Opened; + let events = vec![ + DoorEvent::Close, + DoorEvent::Open, + DoorEvent::Close, + DoorEvent::Lock(1234), + DoorEvent::Unlock(1234), + DoorEvent::Lock(4567), + DoorEvent::Unlock(1234), + ]; + + let door = insert_machine(&conn, initial).expect("Failed to insert door"); + + for event in events { + advance(&conn, &door, event).expect("Failed to advance door FSM"); + } + + let result = get_machine(&conn, &door, false).expect("Failed to fetch door"); + let expected = DoorState::Locked { code: 4567, attempts: 2 }; + + assert_eq!(result, expected, "Advanced door state should match"); +} diff --git a/users/tazjin/hanebuschtag.txt b/users/tazjin/hanebuschtag.txt new file mode 100644 index 000000000000..fc5c06522040 --- /dev/null +++ b/users/tazjin/hanebuschtag.txt @@ -0,0 +1,63 @@ +bazurschnaburkini +buchweizengrütze +burkischnurkischnurzelwutz +burwurgurken +burwurka +gaschnurzel +gezwurkel +gurzelschnurzelgurke +hanemazurka +hanemazurkelgurkel +haneschlawitzka +haneschnaburkeln +haneschnawurkagurka +haneschnawurkel +haneschnuren +haneschnurkissima +hanewurka +hanewurkini +hanewurzeln +ronzelschlawonzel +ronzelwonzel +schlagurkelwini +schlaraffenwurburzel +schlawiburschnurschlakini +schlawonzel +schlawurkinischnagurka +schlawurzelgegurkel +schlawurzeltrollurzel +schlunzelgarfunzel +schmonzelgafonzel +schmotzrotzel +schnaburka +schnaburkel +schnaburkini +schnackel +schnarkelbarkel +schnarwurzelka +schnawurkeln +schnawurzelgackschnurschnacksschnicks +schnawurzini +schniepel +schnirkelschini +schnöckel +schnockelgockel +schnorchel +schnörk +schnorkelbusch +schnörkelknörkel +schnorkelorgel +schnörks +schnotzelgekrotzel +schnudelwurkini +schnurburka +schnurkini +schnurkinihanfini +schnurzelgawurzel +schnurzelwurzelwutz +schnurzelwutz +strazurkeln +wazurka +wurkelgurkel +wurkelschnurrini +wurzelchakramahurka diff --git a/users/tazjin/homepage/default.nix b/users/tazjin/homepage/default.nix new file mode 100644 index 000000000000..8f53eba67e80 --- /dev/null +++ b/users/tazjin/homepage/default.nix @@ -0,0 +1,75 @@ +# Assembles the website index and configures an nginx instance to +# serve it. +# +# The website is made up of a simple header&footer and content +# elements for things such as blog posts and projects. +# +# Content for the blog is in //users/tazjin/blog instead of here. +{ depot, lib, ... }@args: + +with depot; +with nix.yants; + +let + inherit (builtins) readFile replaceStrings sort; + inherit (third_party) writeFile runCommandNoCC; + + # The different types of entries on the homepage. + entryClass = enum "entryClass" [ "blog" "project" "misc" ]; + + # The definition of a single entry. + entry = struct "entry" { + class = entryClass; + title = string; + url = string; + date = int; # epoch + description = option string; + }; + + escape = replaceStrings [ "<" ">" "&" "'" ] [ "<" ">" "&" "'" ]; + + postToEntry = defun [ users.tazjin.blog.post entry ] (post: { + class = "blog"; + title = post.title; + url = "/blog/${post.key}"; + date = post.date; + }); + + formatDate = defun [ int string ] (date: readFile (runCommandNoCC "date" {} '' + date --date='@${toString date}' '+%Y-%m-%d' > $out + '')); + + formatEntryDate = defun [ entry string ] (entry: entryClass.match entry.class { + blog = "Blog post from ${formatDate entry.date}"; + project = "Project from ${formatDate entry.date}"; + misc = "Posted on ${formatDate entry.date}"; + }); + + entryToDiv = defun [ entry string ] (entry: '' + <a href="${entry.url}" class="entry ${entry.class}"> + <div> + <p class="entry-title">${escape entry.title}</p> + ${ + lib.optionalString ((entry ? description) && (entry.description != null)) + "<p class=\"entry-description\">${escape entry.description}</p>" + } + <p class="entry-date">${formatEntryDate entry}</p> + </div> + </a> + ''); + + index = entries: third_party.writeText "index.html" (lib.concatStrings ( + [ (builtins.readFile ./header.html) ] + ++ (map entryToDiv (sort (a: b: a.date > b.date) entries)) + ++ [ (builtins.readFile ./footer.html) ] + )); + + pageEntries = import ./entries.nix; + homepage = index ((map postToEntry users.tazjin.blog.posts) ++ pageEntries); + atomFeed = import ./feed.nix (args // { inherit entry pageEntries; }); +in runCommandNoCC "website" {} '' + mkdir $out + cp ${homepage} $out/index.html + cp ${atomFeed} $out/feed.atom + cp -r ${./static} $out/static +'' diff --git a/users/tazjin/homepage/entries.nix b/users/tazjin/homepage/entries.nix new file mode 100644 index 000000000000..1e2b0b03dfc9 --- /dev/null +++ b/users/tazjin/homepage/entries.nix @@ -0,0 +1,74 @@ +[ + { + class = "misc"; + title = "Interview with Joscha Bach"; + url = "https://www.youtube.com/watch?v=P-2P3MSZrBM"; + date = 1594594800; + description = '' + A fascinating, mind-bending interview by Lex Fridman with Joscha + Bach about the Nature of the Universe. + ''; + } + { + class = "misc"; + title = "The Virus Lounge"; + url = "https://tvl.fyi"; + date = 1587435629; + description = "A daily social video call in these trying pandemic times. Join us!"; + } + { + class = "project"; + title = "depot"; + url = "https://code.tvl.fyi/about"; + date = 1576800000; + description = "Merging all of my projects into a single, Nix-based monorepo"; + } + { + class = "project"; + title = "Nixery"; + url = "https://github.com/google/nixery"; + date = 1565132400; + description = "A Nix-backed container registry that builds container images on demand"; + } + { + class = "project"; + title = "kontemplate"; + url = "https://code.tvl.fyi/about/ops/kontemplate"; + date = 1486550940; + description = "Simple file templating tool built for Kubernetes resources"; + } + { + class = "misc"; + title = "dottime"; + url = "https://dotti.me/"; + date = 1560898800; + description = "A universal convention for conveying time (by edef <3)"; + } + { + class = "project"; + title = "journaldriver"; + url = "https://code.tvl.fyi/about/ops/journaldriver"; + date = 1527375600; + description = "Small daemon to forward logs from journald to Stackdriver Logging"; + } + { + class = "misc"; + title = "Principia Discordia"; + url = "https://principiadiscordia.com/book/1.php"; + date = 1495494000; + description = '' + The Principia is a short book I read as a child, and didn't + understand until much later. It shaped much of my world view. + ''; + } + { + class = "misc"; + title = "This Week in Virology"; + url = "http://www.microbe.tv/twiv/"; + date = 1585517557; + description = '' + Podcast with high-quality information about virology, + epidemiology and so on. Highly relevant to COVID19. + ''; + } +] diff --git a/users/tazjin/homepage/feed.nix b/users/tazjin/homepage/feed.nix new file mode 100644 index 000000000000..5e1fa15da6eb --- /dev/null +++ b/users/tazjin/homepage/feed.nix @@ -0,0 +1,56 @@ +# Creates the Atom feed for my homepage. +{ depot, lib, pkgs, entry, pageEntries, ... }: + +with depot.nix.yants; + +let + inherit (builtins) map readFile sort; + inherit (lib) singleton; + inherit (pkgs) writeText; + inherit (depot.users.tazjin) atom-feed blog renderMarkdown; + + postToEntry = defun [ blog.post atom-feed.entry ] (post: rec { + id = "https://tazj.in/blog/${post.key}"; + title = post.title; + content = readFile (renderMarkdown post.content); + published = post.date; + updated = post.date; # TODO(tazjin): this should be distinct from published + + links = singleton { + rel = "alternate"; + href = id; + }; + }); + + pageEntryToEntry = defun [ entry atom-feed.entry ] (e: { + id = "tazjin:${e.class}:${toString e.date}"; + updated = e.date; + published = e.date; + title = e.title; + summary = e.description; + + links = singleton { + rel = "alternate"; + href = e.url; + }; + }); + + allEntries = (map postToEntry blog.posts) ++ (map pageEntryToEntry pageEntries); + + feed = { + id = "https://tazj.in/"; + title = "tazjin's interblag"; + subtitle = "my posts, projects and other interesting things"; + # TODO(tazjin): Take the most recently updated entry time instead. + updated = builtins.currentTime; + rights = "© 2020 tazjin"; + authors = [ "tazjin" ]; + + links = singleton { + rel = "self"; + href = "https://tazjin/feed.atom"; + }; + + entries = sort (a: b: a.published > b.published) allEntries; + }; +in writeText "feed.atom" (atom-feed.renderFeed feed) diff --git a/users/tazjin/homepage/footer.html b/users/tazjin/homepage/footer.html new file mode 100644 index 000000000000..2f17135066e8 --- /dev/null +++ b/users/tazjin/homepage/footer.html @@ -0,0 +1,2 @@ + </div> +</body> diff --git a/users/tazjin/homepage/header.html b/users/tazjin/homepage/header.html new file mode 100644 index 000000000000..60a32783ac61 --- /dev/null +++ b/users/tazjin/homepage/header.html @@ -0,0 +1,33 @@ +<!DOCTYPE html> +<head><meta charset="utf-8"> + <meta name="viewport" content="width=device-width, initial-scale=1"> + <meta name="description" content="tazjin's blog"> + <link rel="stylesheet" type="text/css" href="static/tazjin.css" media="all"> + <link rel="icon" type="image/webp" href="/static/favicon.webp"> + <link rel="alternate" type="application/atom+xml" href="/feed.atom"> + <title>tazjin's interblag</title> +</head> +<body class="dark"> + <header> + <h1> + <a class="interblag-title" href="/">tazjin's interblag</a> + </h1> + <hr> + </header> + <div class="introduction"> + <p>Hello, illuminated visitor.</p> + <p> + I'm tazjin. Usually you can find + me <a class="dark-link" href="https://git.tazj.in/about">programming computers</a> + using tools such as <a class="dark-link" href="https://nixos.org/nix">Nix</a> + and <a class="dark-link" href="https://www.gnu.org/software/emacs/">Emacs</a>. + </p> + <p> + Below is a collection of + my <span class="project">projects</span>, <span class="blog">blog + posts</span> and some <span class="misc">random things</span> by + me or others. If you'd like to get in touch about anything, send + me a mail at mail@[this domain] or ping me on IRC. + </p> + </div> + <div class="entry-container"> diff --git a/users/tazjin/homepage/static/favicon.webp b/users/tazjin/homepage/static/favicon.webp new file mode 100644 index 000000000000..f99c9085340b --- /dev/null +++ b/users/tazjin/homepage/static/favicon.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/nixery/dominator.webp b/users/tazjin/homepage/static/img/nixery/dominator.webp new file mode 100644 index 000000000000..2d8569a6ca21 --- /dev/null +++ b/users/tazjin/homepage/static/img/nixery/dominator.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/nixery/example_extra.webp b/users/tazjin/homepage/static/img/nixery/example_extra.webp new file mode 100644 index 000000000000..101f0f633aef --- /dev/null +++ b/users/tazjin/homepage/static/img/nixery/example_extra.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/nixery/example_plain.webp b/users/tazjin/homepage/static/img/nixery/example_plain.webp new file mode 100644 index 000000000000..a2b90b3e21d5 --- /dev/null +++ b/users/tazjin/homepage/static/img/nixery/example_plain.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/nixery/ideal_layout.webp b/users/tazjin/homepage/static/img/nixery/ideal_layout.webp new file mode 100644 index 000000000000..0e9f74556682 --- /dev/null +++ b/users/tazjin/homepage/static/img/nixery/ideal_layout.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_1.webp b/users/tazjin/homepage/static/img/watchblob_1.webp new file mode 100644 index 000000000000..27e588e1a145 --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_1.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_2.webp b/users/tazjin/homepage/static/img/watchblob_2.webp new file mode 100644 index 000000000000..b2dea98b4fb4 --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_2.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_3.webp b/users/tazjin/homepage/static/img/watchblob_3.webp new file mode 100644 index 000000000000..99b49373b5b4 --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_3.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_4.webp b/users/tazjin/homepage/static/img/watchblob_4.webp new file mode 100644 index 000000000000..41dbdb6be1cf --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_4.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_5.webp b/users/tazjin/homepage/static/img/watchblob_5.webp new file mode 100644 index 000000000000..c42a4ce1bc0f --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_5.webp Binary files differdiff --git a/users/tazjin/homepage/static/img/watchblob_6.webp b/users/tazjin/homepage/static/img/watchblob_6.webp new file mode 100644 index 000000000000..1440761859dd --- /dev/null +++ b/users/tazjin/homepage/static/img/watchblob_6.webp Binary files differdiff --git a/users/tazjin/homepage/static/jetbrains-mono-bold-italic.woff2 b/users/tazjin/homepage/static/jetbrains-mono-bold-italic.woff2 new file mode 100644 index 000000000000..34b5c69ae1cf --- /dev/null +++ b/users/tazjin/homepage/static/jetbrains-mono-bold-italic.woff2 Binary files differdiff --git a/users/tazjin/homepage/static/jetbrains-mono-bold.woff2 b/users/tazjin/homepage/static/jetbrains-mono-bold.woff2 new file mode 100644 index 000000000000..84a008af7edb --- /dev/null +++ b/users/tazjin/homepage/static/jetbrains-mono-bold.woff2 Binary files differdiff --git a/users/tazjin/homepage/static/jetbrains-mono-italic.woff2 b/users/tazjin/homepage/static/jetbrains-mono-italic.woff2 new file mode 100644 index 000000000000..85fd4687891e --- /dev/null +++ b/users/tazjin/homepage/static/jetbrains-mono-italic.woff2 Binary files differdiff --git a/users/tazjin/homepage/static/jetbrains-mono.woff2 b/users/tazjin/homepage/static/jetbrains-mono.woff2 new file mode 100644 index 000000000000..d5b94cb9e7db --- /dev/null +++ b/users/tazjin/homepage/static/jetbrains-mono.woff2 Binary files differdiff --git a/users/tazjin/homepage/static/tazjin.css b/users/tazjin/homepage/static/tazjin.css new file mode 100644 index 000000000000..aea4d426eac3 --- /dev/null +++ b/users/tazjin/homepage/static/tazjin.css @@ -0,0 +1,183 @@ +/* Jetbrains Mono font from https://www.jetbrains.com/lp/mono/ + licensed under Apache 2.0. Thanks, Jetbrains! */ +@font-face { + font-family: jetbrains-mono; + src: url(jetbrains-mono.woff2); +} + +@font-face { + font-family: jetbrains-mono; + font-weight: bold; + src: url(jetbrains-mono-bold.woff2); +} + +@font-face { + font-family: jetbrains-mono; + font-style: italic; + src: url(jetbrains-mono-italic.woff2); +} + +@font-face { + font-family: jetbrains-mono; + font-weight: bold; + font-style: italic; + src: url(jetbrains-mono-bold-italic.woff2); +} + +/* Generic-purpose styling */ + +body { + max-width: 800px; + margin: 40px auto; + line-height: 1.6; + font-size: 18px; + padding: 0 10px; + font-family: jetbrains-mono, monospace; +} + +p, a :not(.uncoloured-link) { + color: inherit; +} + +h1, h2, h3 { + line-height: 1.2 +} + +/* Homepage styling */ + +.dark { + background-color: #181818; + color: #e4e4ef; +} + +.dark-link, .interblag-title { + color: #96a6c8; +} + +.entry-container { + display: flex; + flex-direction: row; + flex-wrap: wrap; + justify-content: flex-start; +} + +.interblag-title { + text-decoration: none; +} + +.entry { + width: 42%; + margin: 5px; + padding-left: 7px; + padding-right: 5px; + border: 2px solid; + border-radius: 5px; + flex-grow: 1; + text-decoration: none; +} + +.misc { + color: #73c936; + border-color: #73c936; +} + +.blog { + color: #268bd2; + border-color: #268bd2; +} + +.project { + color: #ff4f58; + border-color: #ff4f58; +} + +.entry-title { + color: inherit !important; + font-weight: bold; + text-decoration: none; +} + +.entry-date { + font-style: italic; +} + +/* Blog styling */ + +.light { + color: #383838; +} + +.blog-title { + color: inherit; + text-decoration: none; +} + +.footer { + text-align: right; +} + +.date { + text-align: right; + font-style: italic; + float: right; +} + +.inline { + display: inline; +} + +.lod { + text-align: center; +} + +.uncoloured-link { + color: inherit; +} + +pre { + width: 100%; + overflow: auto; +} + +img { + max-width: 100%; +} + +.cheddar-callout { + display: block; + padding: 10px; +} + +.cheddar-question { + color: #3367d6; + background-color: #e8f0fe; +} + +.cheddar-todo { + color: #616161; + background-color: #eeeeee; +} + +.cheddar-tip { + color: #00796b; + background-color: #e0f2f1; +} + +.cheddar-warning { + color: #a52714; + background-color: #fbe9e7; +} + +kbd { + background-color: #eee; + border-radius: 3px; + border: 1px solid #b4b4b4; + box-shadow: 0 1px 1px rgba(0, 0, 0, .2), 0 2px 0 0 rgba(255, 255, 255, .7) inset; + color: #333; + display: inline-block; + font-size: .85em; + font-weight: 700; + line-height: 1; + padding: 2px 4px; + white-space: nowrap; +} diff --git a/users/tazjin/keys.nix b/users/tazjin/keys.nix new file mode 100644 index 000000000000..6e66cb81c51b --- /dev/null +++ b/users/tazjin/keys.nix @@ -0,0 +1,10 @@ +# My SSH public keys +{ ... }: + +rec { + frog = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKMZzRdcrHTuCPoaFy36MPr5IW/hnImlse/OBOn6udL/ tazjin@frog"; + vauxhall = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHs+9QfZTD5qGsBQaWqp5whmXJ9qy/m9swE2M9QBaIQVoIYGemq3HXTzrQ6XekwudJCltP4EpM7h/Qc+Or309Yw="; + s10e = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDf7CNlYoauHcSYsMNnCZt5h9QSYH/7keYkg8g3hT32+"; + + all = [ frog vauxhall s10e ]; +} diff --git a/users/tazjin/nisp/transform.el b/users/tazjin/nisp/transform.el new file mode 100644 index 000000000000..89b2bb104d27 --- /dev/null +++ b/users/tazjin/nisp/transform.el @@ -0,0 +1,137 @@ +;; Nix as a Lisp + +(require 'cl-lib) +(require 'json) +(require 's) +(require 'dash) + +(defun nisp/expr (form) + "Entrypoint for Nisp->Nix transformation. Will translate FORM +into Nix code, if it is a valid Nisp expression. + +To make code generation slightly easier, each +expression (including literals) is wrapped in an extra pair of +parens." + (concat + "(" + (pcase form + ;; Special keywords + ('() "null") + (`(let . ,rest) (nisp/let form)) + (`(fn . ,rest) (nisp/fn form)) + (`(if ,cond ,then ,else) (nisp/if cond then else)) + + ;; Nix operators & builtins that need special handling + (`(or ,lhs ,rhs) (nisp/infix "||" lhs rhs)) + (`(and ,lhs ,rhs) (nisp/infix "&&" lhs rhs)) + (`(> ,lhs ,rhs) (nisp/infix ">" lhs rhs)) + (`(< ,lhs ,rhs) (nisp/infix "<" lhs rhs)) + (`(>= ,lhs ,rhs) (nisp/infix ">=" lhs rhs)) + (`(<= ,lhs ,rhs) (nisp/infix "<=" lhs rhs)) + (`(+ ,lhs ,rhs) (nisp/infix "+" lhs rhs)) + (`(- ,lhs ,rhs) (nisp/infix "-" lhs rhs)) + (`(* ,lhs ,rhs) (nisp/infix "*" lhs rhs)) + (`(/ ,lhs ,rhs) (nisp/infix "/" lhs rhs)) + (`(-> ,lhs ,rhs) (nisp/infix "->" lhs rhs)) + (`(? ,lhs ,rhs) (nisp/infix "?" lhs rhs)) + (`(// ,lhs ,rhs) (nisp/infix "//" lhs rhs)) + (`(++ ,lhs ,rhs) (nisp/infix "++" lhs rhs)) + (`(== ,lhs ,rhs) (nisp/infix "==" lhs rhs)) + (`(!= ,lhs ,rhs) (nisp/infix "!=" lhs rhs)) + (`(! ,term) (concat "!" (nisp/expr term))) + (`(- ,term) (concat "-" (nisp/expr term))) + + ;; Attribute sets + (`(attrs . ,rest) (nisp/attribute-set form)) + + ;; Function calls + ((and `(,func . ,args) + (guard (symbolp func))) + (nisp/funcall func args)) + + ;; Primitives + ((pred stringp) (json-encode-string form)) + ((pred numberp) (json-encode-number form)) + ((pred keywordp) (substring (symbol-name form) 1)) + ((pred symbolp) (symbol-name form)) + + ;; Lists + ((pred arrayp) (nisp/list form)) + + (other (error "Encountered unhandled form: %s" other))) + ")")) + +(defun nisp/infix (op lhs rhs) + (concat (nisp/expr lhs) " " op " " (nisp/expr rhs))) + +(defun nisp/funcall (func args) + (concat (symbol-name func) " " (s-join " " (-map #'nisp/expr args)))) + +(defun nisp/let (form) + (pcase form + (`(let . (,bindings . (,body . ()))) (concat "let " + (nisp/let bindings) + (nisp/expr body))) + (`((:inherit . ,inherits) . ,rest) (concat (nisp/inherit (car form)) + " " + (nisp/let rest))) + (`((,name . (,value . ())) .,rest) (concat (symbol-name name) " = " + (nisp/expr value) "; " + (nisp/let rest))) + ('() "in ") + (other (error "malformed form '%s' in let expression" other)))) + +(defun nisp/inherit (form) + (pcase form + (`(:inherit . ,rest) (concat "inherit " (nisp/inherit rest))) + (`((,source) . ,rest) (concat "(" (symbol-name source) ") " (nisp/inherit rest))) + (`(,item . ,rest) (concat (symbol-name item) " " (nisp/inherit rest))) + ('() ";"))) + +(defun nisp/if (cond then else) + (concat "if " (nisp/expr cond) + " then " (nisp/expr then) + " else " (nisp/expr else))) + +(defun nisp/list (form) + (cl-check-type form array) + (concat "[ " + (mapconcat #'nisp/expr form " ") + "]")) + + +(defun nisp/attribute-set (form) + "Attribute sets have spooky special handling because they are +not supported by the reader." + (pcase form + (`(attrs . ,rest) (concat "{ " (nisp/attribute-set rest))) + ((and `(,name . (,value . ,rest)) + (guard (keywordp name))) + (concat (substring (symbol-name name) 1) " = " + (nisp/expr value) "; " + (nisp/attribute-set rest))) + ('() "}"))) + +(defun nisp/fn (form) + (pcase form + (`(fn ,args ,body) (concat + (cl-loop for arg in args + concat (format "%s: " arg)) + (nisp/expr body))))) + +;; The following functions are not part of the transform. + +(defun nisp/eval (form) + (interactive "sExpression: ") + (when (stringp form) + (setq form (read form))) + + (message + ;; TODO(tazjin): Construct argv manually to avoid quoting issues. + (s-chomp + (shell-command-to-string + (concat "nix-instantiate --eval -E '" (nisp/expr form) "'"))))) + +(defun nisp/eval-last-sexp () + (interactive) + (nisp/eval (edebug-last-sexp))) diff --git a/users/tazjin/nittredir/background.js b/users/tazjin/nittredir/background.js new file mode 100644 index 000000000000..8d07f8528520 --- /dev/null +++ b/users/tazjin/nittredir/background.js @@ -0,0 +1,10 @@ +/** + * Register a URL change handler that redirects twitter.com links to nitter.net + */ + +chrome.webRequest.onBeforeRequest.addListener(function(details) { + let url = new URL(details.url); + return { + redirectUrl: ('https://nitter.net' + url.pathname) + }; +}, {urls: ['*://twitter.com/*'], types: ['main_frame']}, ['blocking']); diff --git a/users/tazjin/nittredir/manifest.json b/users/tazjin/nittredir/manifest.json new file mode 100644 index 000000000000..4efe1a6cc245 --- /dev/null +++ b/users/tazjin/nittredir/manifest.json @@ -0,0 +1,15 @@ +{ + "manifest_version": 2, + "name": "nittredir", + "version": "1.0", + "description": "Redirect twitter.com to nitter.net", + "background": { + "scripts": ["background.js"], + "persistent": true + }, + "permissions": [ + "webRequest", + "webRequestBlocking", + "*://twitter.com/*" + ] +} diff --git a/users/tazjin/nixos/README.md b/users/tazjin/nixos/README.md new file mode 100644 index 000000000000..0093f4ac65e1 --- /dev/null +++ b/users/tazjin/nixos/README.md @@ -0,0 +1,20 @@ +NixOS configuration +=================== + +My NixOS configuration! It configures most of the packages I require +on my systems, sets up Emacs the way I need and does a bunch of other +interesting things. + +System configuration lives in folders for each machine and a custom +fixed point evaluation (similar to standard NixOS module +configuration) is used to combine configuration together. + +Building `ops.nixos.rebuilder` yields a script that will automatically +build and activate the newest configuration based on the current +hostname. + +## Configured hosts: + +* `frog` - weapon of mass computation at home +* `camden` - NUC serving tazj.in, tvl.fyi & co +* ~~`urdhva` - T470s~~ (currently with edef) diff --git a/users/tazjin/nixos/camden/default.nix b/users/tazjin/nixos/camden/default.nix new file mode 100644 index 000000000000..d8c439b4f46c --- /dev/null +++ b/users/tazjin/nixos/camden/default.nix @@ -0,0 +1,365 @@ +# This file configures camden.tazj.in, my homeserver. +{ depot, pkgs, lib, ... }: + +config: let + nixpkgs = import depot.third_party.nixpkgsSrc { + config.allowUnfree = true; + }; + + nginxRedirect = { from, to, acmeHost }: { + serverName = from; + useACMEHost = acmeHost; + forceSSL = true; + + extraConfig = "return 301 https://${to}$request_uri;"; + }; +in lib.fix(self: { + depot = depot; + + # Disable the current ACME module and use the old one from 19.09 + # instead, until the various regressions have been sorted out. + # TODO(tazjin): Remove this once the new ACME module works. + disabledModules = [ "security/acme.nix" ]; + imports = + let oldChannel = fetchTarball { + # NixOS 19.09 on 2020-10-04 + url = "https://github.com/NixOS/nixpkgs-channels/archive/75f4ba05c63be3f147bcc2f7bd4ba1f029cedcb1.tar.gz"; + sha256 = "157c64220lf825ll4c0cxsdwg7cxqdx4z559fdp7kpz0g6p8fhhr"; + }; + in [ + "${depot.depotPath}/ops/nixos/depot.nix" + "${depot.depotPath}/ops/nixos/quassel.nix" + "${depot.depotPath}/ops/nixos/smtprelay.nix" + "${oldChannel}/nixos/modules/security/acme.nix" + ]; + + # camden is intended to boot unattended, despite having an encrypted + # root partition. + # + # The below configuration uses an externally connected USB drive + # that contains a LUKS key file to unlock the disk automatically at + # boot. + # + # TODO(tazjin): Configure LUKS unlocking via SSH instead. + boot = { + initrd = { + availableKernelModules = [ + "ahci" "xhci_pci" "usbhid" "usb_storage" "sd_mod" "sdhci_pci" + "rtsx_usb_sdmmc" "r8169" + ]; + + kernelModules = [ "dm-snapshot" ]; + + luks.devices.camden-crypt = { + fallbackToPassword = true; + device = "/dev/disk/by-label/camden-crypt"; + keyFile = "/dev/sdb"; + keyFileSize = 4096; + }; + }; + + loader = { + systemd-boot.enable = true; + efi.canTouchEfiVariables = true; + }; + + cleanTmpDir = true; + }; + + fileSystems = { + "/" = { + device = "/dev/disk/by-label/camden-root"; + fsType = "ext4"; + }; + + "/home" = { + device = "/dev/disk/by-label/camden-home"; + fsType = "ext4"; + }; + + "/boot" = { + device = "/dev/disk/by-label/BOOT"; + fsType = "vfat"; + }; + }; + + nix = { + maxJobs = lib.mkDefault 4; + + nixPath = [ + "depot=/home/tazjin/depot" + "nixpkgs=${depot.third_party.nixpkgsSrc}" + ]; + + trustedUsers = [ "root" "tazjin" ]; + + binaryCaches = [ + "https://tazjin.cachix.org" + ]; + + binaryCachePublicKeys = [ + "tazjin.cachix.org-1:IZkgLeqfOr1kAZjypItHMg1NoBjm4zX9Zzep8oRSh7U=" + ]; + }; + nixpkgs.pkgs = nixpkgs; + + powerManagement.cpuFreqGovernor = lib.mkDefault "powersave"; + + networking = { + hostName = "camden"; + interfaces.enp1s0.useDHCP = true; + interfaces.enp1s0.ipv6.addresses = [ + { + address = "2a01:4b00:821a:ce02::5"; + prefixLength = 64; + } + ]; + + firewall.enable = false; + }; + + time.timeZone = "UTC"; + + # System-wide application setup + programs.fish.enable = true; + programs.mosh.enable = true; + + fonts = { + fonts = [ nixpkgs.jetbrains-mono ]; + fontconfig.defaultFonts.monospace = [ "JetBrains Mono" ]; + }; + + environment.systemPackages = + # programs from the depot + (with depot; [ + fun.idual.script + fun.idual.setAlarm + third_party.pounce + ]) ++ + + # programs from nixpkgs + (with nixpkgs; [ + bat + curl + direnv + emacs26-nox + fswebcam + git + gnupg + google-cloud-sdk + htop + jq + pass + pciutils + restic + ripgrep + screen + ]); + + users = { + # Set up my own user for logging in and doing things ... + users.tazjin = { + isNormalUser = true; + uid = 1000; + extraGroups = [ "git" "wheel" "quassel" "video" ]; + shell = nixpkgs.fish; + }; + + # Set up a user & group for general git shenanigans + groups.git = {}; + users.git = { + group = "git"; + isNormalUser = false; + }; + }; + + # Services setup + services.openssh.enable = true; + services.haveged.enable = true; + + # Join Tailscale into home network + services.tailscale.enable = true; + + # Allow sudo-ing via the forwarded SSH agent. + security.pam.enableSSHAgentAuth = true; + + # NixOS 20.03 broke nginx and I can't be bothered to debug it + # anymore, all solution attempts have failed, so here's a + # brute-force fix. + systemd.services.fix-nginx = { + script = "${nixpkgs.coreutils}/bin/chown -R nginx: /var/spool/nginx /var/cache/nginx"; + + serviceConfig = { + User = "root"; + Type = "oneshot"; + }; + }; + + systemd.timers.fix-nginx = { + wantedBy = [ "multi-user.target" ]; + timerConfig = { + OnCalendar = "minutely"; + }; + }; + + # Provision a TLS certificate outside of nginx to avoid + # nixpkgs#38144 + security.acme = { + # acceptTerms = true; + + certs."tazj.in" = { + email = "mail@tazj.in"; + user = "nginx"; + group = "nginx"; + webroot = "/var/lib/acme/acme-challenge"; + extraDomains = { + "cs.tazj.in" = null; + "git.tazj.in" = null; + "www.tazj.in" = null; + + # Local domains (for this machine only) + "camden.tazj.in" = null; + }; + postRun = "systemctl reload nginx"; + }; + + certs."quassel.tazj.in" = { + email = "mail@tazj.in"; + webroot = "/var/lib/acme/challenge-quassel"; + user = "nginx"; # required because of a bug in the ACME module + group = "quassel"; + allowKeysForGroup = true; + }; + }; + + # Forward logs to Google Cloud Platform + services.journaldriver = { + enable = true; + logStream = "home"; + googleCloudProject = "tazjins-infrastructure"; + applicationCredentials = "/etc/gcp/key.json"; + }; + + services.depot.quassel = { + enable = true; + acmeHost = "quassel.tazj.in"; + bindAddresses = [ + "0.0.0.0" + ]; + }; + + services.bitlbee = { + enable = true; + portNumber = 2337; # bees + }; + + # serve my website(s) + services.nginx = { + enable = true; + enableReload = true; + package = with nixpkgs; nginx.override { + modules = [ nginxModules.rtmp ]; + }; + + recommendedTlsSettings = true; + recommendedGzipSettings = true; + recommendedProxySettings = true; + + appendConfig = '' + rtmp_auto_push on; + rtmp { + server { + listen 1935; + chunk_size 4000; + + application tvl { + live on; + + allow publish 88.98.195.213; + allow publish 10.0.1.0/24; + deny publish all; + + allow play all; + } + } + } + ''; + + commonHttpConfig = '' + log_format json_combined escape=json + '{' + '"remote_addr":"$remote_addr",' + '"method":"$request_method",' + '"uri":"$request_uri",' + '"status":$status,' + '"request_size":$request_length,' + '"response_size":$body_bytes_sent,' + '"response_time":$request_time,' + '"referrer":"$http_referer",' + '"user_agent":"$http_user_agent"' + '}'; + + access_log syslog:server=unix:/dev/log,nohostname json_combined; + ''; + + virtualHosts.homepage = { + serverName = "tazj.in"; + serverAliases = [ "camden.tazj.in" ]; + default = true; + useACMEHost = "tazj.in"; + root = depot.users.tazjin.homepage; + forceSSL = true; + + extraConfig = '' + ${depot.users.tazjin.blog.oldRedirects} + + add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always; + + location ~* \.(webp|woff2)$ { + add_header Cache-Control "public, max-age=31536000"; + } + + location /blog/ { + alias ${depot.users.tazjin.blog.rendered}/; + + if ($request_uri ~ ^/(.*)\.html$) { + return 302 /$1; + } + + try_files $uri $uri.html $uri/ =404; + } + + location = /tazjin { + return 200 "tazjin"; + } + + location /blobs/ { + alias /var/www/blobs/; + } + ''; + }; + + virtualHosts.cgit-old = nginxRedirect { + from = "git.tazj.in"; + to = "code.tvl.fyi"; + acmeHost = "tazj.in"; + }; + + virtualHosts.cs-old = nginxRedirect { + from = "cs.tazj.in"; + to = "cs.tvl.fyi"; + acmeHost = "tazj.in"; + }; + }; + + # Timer units that can be started with systemd-run to set my alarm. + systemd.user.services.light-alarm = { + script = "${depot.fun.idual.script}/bin/idualctl wakey"; + postStart = "${pkgs.systemd}/bin/systemctl --user stop light-alarm.timer"; + serviceConfig = { + Type = "oneshot"; + }; + }; + + system.stateVersion = "19.09"; +}) diff --git a/users/tazjin/nixos/default.nix b/users/tazjin/nixos/default.nix new file mode 100644 index 000000000000..d4576bd3c725 --- /dev/null +++ b/users/tazjin/nixos/default.nix @@ -0,0 +1,46 @@ +# TODO(tazjin): Generalise this and move to //ops/nixos +{ depot, lib, ... }: + +let + inherit (builtins) foldl'; + + systemFor = configs: (depot.third_party.nixos { + configuration = lib.fix(config: + foldl' lib.recursiveUpdate {} (map (c: c config) configs) + ); + }).system; + + caseFor = hostname: '' + ${hostname}) + echo "Rebuilding NixOS for //users/tazjin/nixos/${hostname}" + system=$(nix-build -E '(import <depot> {}).users.tazjin.nixos.${hostname}System' --no-out-link) + ;; + ''; + + rebuilder = depot.third_party.writeShellScriptBin "rebuilder" '' + set -ue + if [[ $EUID -ne 0 ]]; then + echo "Oh no! Only root is allowed to rebuild the system!" >&2 + exit 1 + fi + + case $HOSTNAME in + ${caseFor "camden"} + ${caseFor "frog"} + *) + echo "$HOSTNAME is not a known NixOS host!" >&2 + exit 1 + ;; + esac + + nix-env -p /nix/var/nix/profiles/system --set $system + $system/bin/switch-to-configuration switch + ''; +in { + inherit rebuilder; + + camdenSystem = systemFor [ depot.users.tazjin.nixos.camden ]; + frogSystem = systemFor [ depot.users.tazjin.nixos.frog ]; + + meta.targets = [ "camdenSystem" "frogSystem" ]; +} diff --git a/users/tazjin/nixos/frog/default.nix b/users/tazjin/nixos/frog/default.nix new file mode 100644 index 000000000000..2f0b204a615b --- /dev/null +++ b/users/tazjin/nixos/frog/default.nix @@ -0,0 +1,300 @@ +{ depot, lib, ... }: + +config: let + inherit (depot.third_party) lieer; + nixpkgs = import depot.third_party.nixpkgsSrc { + config.allowUnfree = true; + }; + + # add google-c-style here because other machines get it from, eh, + # elsewhere. + frogEmacs = (depot.users.tazjin.emacs.overrideEmacs(epkgs: epkgs ++ [ + depot.third_party.emacsPackages.google-c-style + ])); + + quasselClient = depot.third_party.quassel.override { + client = true; + enableDaemon = false; + monolithic = false; + }; +in depot.lib.fix(self: { + imports = [ + "${depot.depotPath}/ops/nixos/v4l2loopback.nix" + ]; + + boot = { + tmpOnTmpfs = true; + kernelModules = [ "kvm-amd" ]; + + loader = { + systemd-boot.enable = true; + efi.canTouchEfiVariables = true; + }; + + initrd = { + luks.devices.frog-crypt.device = "/dev/disk/by-label/frog-crypt"; + availableKernelModules = [ "xhci_pci" "ahci" "nvme" "usb_storage" "usbhid" "sd_mod" ]; + kernelModules = [ "dm-snapshot" ]; + }; + + kernelPackages = nixpkgs.linuxPackages_latest; + kernel.sysctl = { + "kernel.perf_event_paranoid" = -1; + }; + + kernelPatches = [ + depot.third_party.kernelPatches.trx40_usb_audio + ]; + }; + + hardware = { + cpu.amd.updateMicrocode = true; + enableRedistributableFirmware = true; + opengl = { + enable = true; + driSupport = true; + driSupport32Bit = true; + }; + + pulseaudio = { + enable = true; + package = nixpkgs.pulseaudioFull; + }; + + bluetooth = { + enable = true; + }; + }; + + nix = { + maxJobs = 48; + nixPath = [ + "depot=/depot" + "nixpkgs=${depot.third_party.nixpkgsSrc}" + ]; + + binaryCaches = ["ssh://nix-ssh@whitby.tvl.fyi"]; + binaryCachePublicKeys = ["cache.tvl.fyi:fd+9d1ceCPvDX/xVhcfv8nAa6njEhAGAEe+oGJDEeoc="]; + }; + + nixpkgs.pkgs = nixpkgs; + + networking = { + hostName = "frog"; + useDHCP = true; + + # Don't use ISP's DNS servers: + nameservers = [ + "8.8.8.8" + "8.8.4.4" + ]; + + firewall.enable = false; + }; + + # Generate an immutable /etc/resolv.conf from the nameserver settings + # above (otherwise DHCP overwrites it): + environment.etc."resolv.conf" = with lib; { + source = depot.third_party.writeText "resolv.conf" '' + ${concatStringsSep "\n" (map (ns: "nameserver ${ns}") self.networking.nameservers)} + options edns0 + ''; + }; + + time.timeZone = "Europe/London"; + + fileSystems = { + "/".device = "/dev/disk/by-label/frog-root"; + "/boot".device = "/dev/disk/by-label/BOOT"; + "/home".device = "/dev/disk/by-label/frog-home"; + }; + + # Configure user account + users.extraUsers.tazjin = { + extraGroups = [ "wheel" "audio" "docker" ]; + isNormalUser = true; + uid = 1000; + shell = nixpkgs.fish; + }; + + security.sudo = { + enable = true; + extraConfig = "wheel ALL=(ALL:ALL) SETENV: ALL"; + }; + + fonts = { + fonts = with nixpkgs; [ + corefonts + dejavu_fonts + jetbrains-mono + noto-fonts-cjk + noto-fonts-emoji + ]; + + fontconfig = { + hinting.enable = true; + subpixel.lcdfilter = "light"; + + defaultFonts = { + monospace = [ "JetBrains Mono" ]; + }; + }; + }; + + # Configure location (Vauxhall, London) for services that need it. + location = { + latitude = 51.4819109; + longitude = -0.1252998; + }; + + programs.fish.enable = true; + programs.ssh.startAgent = true; + + services.redshift.enable = true; + services.openssh.enable = true; + services.fstrim.enable = true; + services.blueman.enable = true; + + # Required for Yubikey usage as smartcard + services.pcscd.enable = true; + services.udev.packages = [ + nixpkgs.yubikey-personalization + ]; + + # Enable Docker for Nixery testing + virtualisation.docker = { + enable = true; + autoPrune.enable = true; + }; + + services.xserver = { + enable = true; + layout = "us"; + xkbOptions = "caps:super"; + exportConfiguration = true; + videoDrivers = [ "amdgpu" ]; + displayManager = { + # Give EXWM permission to control the session. + sessionCommands = "${nixpkgs.xorg.xhost}/bin/xhost +SI:localuser:$USER"; + + lightdm.enable = true; + lightdm.greeters.gtk.clock-format = "%H·%M"; # TODO(tazjin): TZ? + }; + + windowManager.session = lib.singleton { + name = "exwm"; + start = "${frogEmacs}/bin/tazjins-emacs"; + }; + }; + + # Do not restart the display manager automatically + systemd.services.display-manager.restartIfChanged = lib.mkForce false; + + # clangd needs more than ~2GB in the runtime directory to start up + services.logind.extraConfig = '' + RuntimeDirectorySize=16G + ''; + + # Configure email setup + systemd.user.services.lieer-tazjin = { + description = "Synchronise mail@tazj.in via lieer"; + script = "${lieer}/bin/gmi sync"; + + serviceConfig = { + WorkingDirectory = "%h/mail/account.tazjin"; + Type = "oneshot"; + }; + }; + + systemd.user.timers.lieer-tazjin = { + wantedBy = [ "timers.target" ]; + + timerConfig = { + OnActiveSec = "1"; + OnUnitActiveSec = "180"; + }; + }; + + environment.systemPackages = + # programs from the depot + (with depot; [ + frogEmacs + fun.idual.script + fun.uggc + lieer + ops.kontemplate + quasselClient + third_party.ffmpeg + third_party.git + third_party.lutris + third_party.rr + tools.nsfv-setup + ]) ++ + + # programs from nixpkgs + (with nixpkgs; [ + age + bat + chromium + clang-manpages + clang-tools + clang_11 + curl + direnv + dnsutils + emacs26 # mostly for emacsclient + exa + fd + file + gdb + gnupg + go + google-chrome + google-cloud-sdk + htop + hyperfine + i3lock + iftop + imagemagick + jq + kubectl + linuxPackages.perf + manpages + miller + msmtp + nix-prefetch-github + notmuch + obs-studio + obs-v4l2sink + openssh + openssl + pass + pavucontrol + pciutils + pinentry + pinentry-emacs + pmutils + pwgen + ripgrep + rustup + screen + scrot + spotify + steam + tokei + transmission + tree + unzip + usbutils + v4l-utils + vlc + xclip + xsecurelock + yubico-piv-tool + yubikey-personalization + zoxide + ]); + + # ... and other nonsense. + system.stateVersion = "20.03"; +}) diff --git a/users/tazjin/presentations/bootstrapping-2018/README.md b/users/tazjin/presentations/bootstrapping-2018/README.md new file mode 100644 index 000000000000..e9573ae3f2e1 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/README.md @@ -0,0 +1,5 @@ +These are the slides for a talk I gave at the Norwegian Unix User Group on +2018-03-13. + +There is more information and a recording on the [event +page](https://www.nuug.no/aktiviteter/20180313-reproduible-compiler/). diff --git a/users/tazjin/presentations/bootstrapping-2018/default.nix b/users/tazjin/presentations/bootstrapping-2018/default.nix new file mode 100644 index 000000000000..0dff14b2a1a6 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/default.nix @@ -0,0 +1,50 @@ +# This derivation builds the LaTeX presentation. + +{ pkgs, ... }: + +with pkgs; + +let tex = texlive.combine { + inherit (texlive) + beamer + beamertheme-metropolis + etoolbox + euenc + extsizes + fontspec + lualibs + luaotfload + luatex + minted + ms + pgfopts + scheme-basic + translator; +}; +in stdenv.mkDerivation { + name = "nuug-bootstrapping-slides"; + src = ./.; + + FONTCONFIG_FILE = makeFontsConf { + fontDirectories = [ fira fira-code fira-mono ]; + }; + + buildInputs = [ tex fira fira-code fira-mono ]; + buildPhase = '' + # LaTeX needs a cache folder in /home/ ... + mkdir home + export HOME=$PWD/home + # ${tex}/bin/luaotfload-tool -ufv + + # As usual, TeX needs to be run twice ... + function run() { + ${tex}/bin/lualatex presentation.tex + } + run && run + ''; + + installPhase = '' + mkdir -p $out + cp presentation.pdf $out/ + ''; +} diff --git a/users/tazjin/presentations/bootstrapping-2018/drake-meme.png b/users/tazjin/presentations/bootstrapping-2018/drake-meme.png new file mode 100644 index 000000000000..4b036754384f --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/drake-meme.png Binary files differdiff --git a/users/tazjin/presentations/bootstrapping-2018/nixos-logo.png b/users/tazjin/presentations/bootstrapping-2018/nixos-logo.png new file mode 100644 index 000000000000..ce0c98c2cabb --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/nixos-logo.png Binary files differdiff --git a/users/tazjin/presentations/bootstrapping-2018/notes.org b/users/tazjin/presentations/bootstrapping-2018/notes.org new file mode 100644 index 000000000000..363d75352e62 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/notes.org @@ -0,0 +1,89 @@ +#+TITLE: Bootstrapping, reproducibility, etc. +#+AUTHOR: Vincent Ambo +#+DATE: <2018-03-10 Sat> + +* Compiler bootstrapping + This section contains notes about compiler bootstrapping, the + history thereof, which compilers need it - and so on: + +** C + +** Haskell + - self-hosted compiler (GHC) + +** Common Lisp + CL is fairly interesting in this space because it is a language + that is defined via an ANSI standard that compiler implementations + normally actually follow! + + CL has several ecosystem components that focus on making + abstracting away implementation-specific calls and if a self-hosted + compiler is written in CL using those components it can be + cross-bootstrapped. + +** Python + +* A note on runtimes + Sometimes the compiler just isn't enough ... + +** LLVM +** JVM + +* References + https://github.com/mame/quine-relay + https://manishearth.github.io/blog/2016/12/02/reflections-on-rusting-trust/ + https://tests.reproducible-builds.org/debian/reproducible.html + +* Slide thoughts: + 1. Hardware trust has been discussed here a bunch, most recently + during the puri.sm talk. Hardware trust is important, as we see + with IME, but it's striking that people often take a leap to "I'm + now on my trusted Debian with free software". + + Unless you built it yourself from scratch (Spoiler: you haven't) + you're placing trust in what is basically foreign binary blobs. + + Agenda: Implications/attack vectors of this, state of the chicken + & egg, the topic of reproducibility, what can you do? (Nix!) + + 2. Chicken-and-egg issue + + It's an important milestone for a language to become self-hosted: + You begin doing a kind of dogfeeding, you begin to enforce + reliability & consistency guarantees to avoid having to redo your + own codebase constantly and so on. + + However, the implication is now that you need your own compiler + to compile itself. + + Common examples: + - C/C++ compilers needed to build C/C++ compilers: + + GCC 4.7 was the last version of GCC that could be built with a + standard C-compiler, nowadays it is mostly written in C++. + + Certain versions of GCC can be built with LLVM/Clang. + + Clang/LLVM can be compiled by itself and also GCC. + + - Rust was originally written in OCAML but moved to being + self-hosted in 2011. Currently rustc-releases are always built + with a copy of the previous release. + + It's relatively new so we can build the chain all the way. + + Notable exceptions: Some popular languages are not self-hosted, + for example Clojure. Languages also have runtimes, which may be + written in something else (e.g. Haskell -> C runtime) +* How to help: + Most of this advice is about reproducible builds, not bootstrapping, + as that is a much harder project. + + - fix reproducibility issues listed in Debian's issue tracker (focus + on non-Debian specific ones though) + - experiment with NixOS / GuixSD to get a better grasp on the + problem space of reproducibility + + If you want to contribute to bootstrapping, look at + bootstrappable.org and their wiki. Several initiatives such as MES + could need help! diff --git a/users/tazjin/presentations/bootstrapping-2018/presentation.pdf b/users/tazjin/presentations/bootstrapping-2018/presentation.pdf new file mode 100644 index 000000000000..7f435fe5b539 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/presentation.pdf Binary files differdiff --git a/users/tazjin/presentations/bootstrapping-2018/presentation.tex b/users/tazjin/presentations/bootstrapping-2018/presentation.tex new file mode 100644 index 000000000000..d3aa61337554 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/presentation.tex @@ -0,0 +1,251 @@ +\documentclass[12pt]{beamer} +\usetheme{metropolis} +\newenvironment{code}{\ttfamily}{\par} +\title{Where does \textit{your} compiler come from?} +\date{2018-03-13} +\author{Vincent Ambo} +\institute{Norwegian Unix User Group} +\begin{document} + \maketitle + + %% Slide 1: + \section{Introduction} + + %% Slide 2: + \begin{frame}{Chicken and egg} + Self-hosted compilers are often built using themselves, for example: + + \begin{itemize} + \item C-family compilers bootstrap themselves \& each other + \item (Some!) Common Lisp compilers can bootstrap each other + \item \texttt{rustc} bootstraps itself with a previous version + \item ... same for many other languages! + \end{itemize} + \end{frame} + + \begin{frame}{Chicken, egg and ... lizard?} + It's not just compilers: Languages have runtimes, too. + + \begin{itemize} + \item JVM is implemented in C++ + \item Erlang-VM is C + \item Haskell runtime is C + \end{itemize} + + ... we can't ever get away from C, can we? + \end{frame} + + %% Slide 3: + \begin{frame}{Trusting Trust} + \begin{center} + \huge{Could this be exploited?} + \end{center} + \end{frame} + + %% Slide 4: + \begin{frame}{Short interlude: A quine} + \begin{center} + \begin{code} + ((lambda (x) (list x (list 'quote x))) + \newline\vspace*{6mm} '(lambda (x) (list x (list 'quote x)))) + \end{code} + \end{center} + \end{frame} + + %% Slide 5: + \begin{frame}{Short interlude: Quine Relay} + \begin{center} + \includegraphics[ + keepaspectratio=true, + height=\textheight + ]{quine-relay.png} + \end{center} + \end{frame} + + %% Slide 6: + \begin{frame}{Trusting Trust} + An attack described by Ken Thompson in 1983: + + \begin{enumerate} + \item Modify a compiler to detect when it's compiling itself. + \item Let the modification insert \textit{itself} into the new compiler. + \item Add arbitrary attack code to the modification. + \item \textit{Optional!} Remove the attack from the source after compilation. + \end{enumerate} + \end{frame} + + %% Slide 7: + \begin{frame}{Damage potential?} + \begin{center} + \large{Let your imagination run wild!} + \end{center} + \end{frame} + + %% Slide 8: + \section{Countermeasures} + + %% Slide 9: + \begin{frame}{Diverse Double-Compiling} + Assume we have: + + \begin{itemize} + \item Target language compilers $A$ and $T$ + \item The source code of $A$: $ S_{A} $ + \end{itemize} + \end{frame} + + %% Slide 10: + \begin{frame}{Diverse Double-Compiling} + Apply the first stage (functional equivalence): + + \begin{itemize} + \item $ X = A(S_{A})$ + \item $ Y = T(S_{A})$ + \end{itemize} + + Apply the second stage (bit-for-bit equivalence): + + \begin{itemize} + \item $ V = X(S_{A})$ + \item $ W = Y(S_{A})$ + \end{itemize} + + Now we have a new problem: Reproducibility! + \end{frame} + + %% Slide 11: + \begin{frame}{Reproducibility} + Bit-for-bit equivalent output is hard, for example: + + \begin{itemize} + \item Timestamps in output artifacts + \item Non-deterministic linking order in concurrent builds + \item Non-deterministic VM \& memory states in outputs + \item Randomness in builds (sic!) + \end{itemize} + \end{frame} + + \begin{frame}{Reproducibility} + \begin{center} + Without reproducibility, we can never trust that any shipped + binary matches the source code! + \end{center} + \end{frame} + + %% Slide 12: + \section{(Partial) State of the Union} + + \begin{frame}{The Desired State} + \begin{center} + \begin{enumerate} + \item Full-source bootstrap! + \item All packages reproducible! + \end{enumerate} + \end{center} + \end{frame} + + %% Slide 13: + \begin{frame}{Bootstrapping Debian} + \begin{itemize} + \item Sparse information on the Debian-wiki + \item Bootstrapping discussions mostly resolve around new architectures + \item GCC is compiled by depending on previous versions of GCC + \end{itemize} + \end{frame} + + \begin{frame}{Reproducing Debian} + Debian has a very active effort for reproducible builds: + + \begin{itemize} + \item Organised information about reproducibility status + \item Over 90\% reproducibility in Debian package base! + \end{itemize} + \end{frame} + + \begin{frame}{Short interlude: Nix} + \begin{center} + \includegraphics[ + keepaspectratio=true, + height=0.7\textheight + ]{nixos-logo.png} + \end{center} + \end{frame} + + \begin{frame}{Short interlude: Nix} + \begin{center} + \includegraphics[ + keepaspectratio=true, + height=0.90\textheight + ]{drake-meme.png} + \end{center} + \end{frame} + + \begin{frame}{Short interlude: Nix} + \begin{center} + \includegraphics[ + keepaspectratio=true, + height=0.7\textheight + ]{nixos-logo.png} + \end{center} + \end{frame} + + \begin{frame}{Bootstrapping NixOS} + Nix evaluation can not recurse forever: The bootstrap can not + simply depend on a previous GCC. + + Workaround: \texttt{bootstrap-tools} tarball from a previous + binary cache is fetched and used. + + An unfortunate magic binary blob ... + \end{frame} + + \begin{frame}{Reproducing NixOS} + Not all reproducibility patches have been ported from Debian. + + However: Builds are fully repeatable via the Nix fundamentals! + \end{frame} + + \section{Future Developments} + + \begin{frame}{Bootstrappable: stage0} + Hand-rolled ``Cthulhu's Path to Madness'' hex-programs: + + \begin{itemize} + \item No non-auditable binary blobs + \item Aims for understandability by 70\% of programmers + \item End goal is a full-source bootstrap of GCC + \end{itemize} + \end{frame} + + + \begin{frame}{Bootstrappable: MES} + Bootstrapping the ``Maxwell Equations of Software'': + + \begin{itemize} + \item Minimal C-compiler written in Scheme + \item Minimal Scheme-interpreter (currently in C, but intended to + be rewritten in stage0 macros) + \item End goal is full-source bootstrap of the entire GuixSD + \end{itemize} + \end{frame} + + \begin{frame}{Other platforms} + \begin{itemize} + \item Nix for Darwin is actively maintained + \item F-Droid Android repository works towards fully reproducible + builds of (open) Android software + \item Mobile devices (phones, tablets, etc.) are a lost cause at + the moment + \end{itemize} + \end{frame} + + \begin{frame}{Thanks!} + Resources: + \begin{itemize} + \item bootstrappable.org + \item reproducible-builds.org + \end{itemize} + + @tazjin | mail@tazj.in + \end{frame} +\end{document} diff --git a/users/tazjin/presentations/bootstrapping-2018/quine-relay.png b/users/tazjin/presentations/bootstrapping-2018/quine-relay.png new file mode 100644 index 000000000000..5644dc3900e3 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/quine-relay.png Binary files differdiff --git a/users/tazjin/presentations/bootstrapping-2018/result.pdfpc b/users/tazjin/presentations/bootstrapping-2018/result.pdfpc new file mode 100644 index 000000000000..b0fa6c9a0ef8 --- /dev/null +++ b/users/tazjin/presentations/bootstrapping-2018/result.pdfpc @@ -0,0 +1,142 @@ +[file] +result +[last_saved_slide] +10 +[font_size] +20000 +[notes] +### 1 +- previous discussions of hardware trust (e.g. purism presentation) +- people leap to "now I'm on my trusted Debian!" +- unless you built it from scratch (spoiler: you haven't) you're *trusting* someone + +Agenda: Implications of trust with focus on bootstrap paths and reproducibility, plus how you can help.### 2 +self-hosting: +- C-family: GCC pre/post 4.7, Clang +- Common Lisp: Sunshine land! (with SBCL) +- rustc: Bootstrap based on previous versions (C++ transpiler underway!) +- many other languages also work this way! + +(Noteable counterexample: Clojure is written in Java!)### 3 + +- compilers are just one bit, the various runtimes exist, too!### 4 + +Could this be exploited? + +People don't think about where their compiler comes from. + +Even if they do, they may only go so far as to say "I'll just recompile it using <other compiler>". + +Unfortunately, spoiler alert, life isn't that easy in the computer world and yes, exploitation is possible.### 5 + +- describe what a quine is +- classic Lisp quine +- explain demo quine +- demo demo quine + +- this is interesting, but not useful - can quines do more than that?### 6 + +- quine-relay: "art project" with 128-language circular quine + +- show source of quine-relay + +- (demo quine relay?) + +- side-note: this program is very, very trustworthy!### 7 + +Ken Thompson (designer of UNIX and a couple other things!) received Turing award in 1983, and described attack in speech. + +- figure out how to detect self-compilation +- make that modification a quine +- insert modification into new compiler +- add attack code to modification +- remove attack from source, distributed binary will still be compromised! it's like evolution :)### 8 + +damage potential is basically infinite: + +- classic "login" attack +=> also applicable to other credentials + +- attack (weaken) crypto algorithms + +- you can probably think of more!### 10 + +idea being: potential vulnerability would have to work across compilers: + +the more compilers we can introduce (e.g. more architectures, different versions, different compilers), the harder it gets for a vulnerability to survive all of those + +The more compilers, the merrier! Lisps are pretty good at this.### 11 + +if we get a bit-mismatch after DDC, not all hope is lost: Maybe the thing just isn't reproducible! + +- many reasons for failures +- timestamps are a classic! artifacts can be build logs, metadata in ZIP-files or whatever +- non-determinism is the devil +- sometimes people actively introduce build-randomness (NaCl)### 12 + +- Does that binary download on the project's website really match the source? + +- Your Linux packages are signed by someone - cool - but what does that mean?### 13 + +Two things should be achieved - gross oversimplification - to get to the ideal "desired state of the union": + +1. full-source bootstrap: without ever introducing any binaries, go from nothing to a full Linux distribution + +2. when packages are distributed, we should be able to know the expected output of a source package beforehand + +=> suddenly binary distributions become a cache! But more on Nix later.### 14 + +- Debian project does not seem as concerned with bootstrapping as with reproducibility +- Debian mostly bootstraps on new architectures (using cross-compilation and similar techniques, from an existing binary base) +- core bootstrap (GCC & friends) is performed with previous Debian version and depending on GCC### 15 + +... however! Debian cares about reproducibility. + +- automated testing of reproducibility +- information about the status of all packages is made available in repos +- Over 90% packages of packages are reproducible! + +< show reproducible builds website > + +Debian is still fundamentally a binary distribution though, but it doesn't have to be that way.### 16 + +Nix - a purely functional package manager + +It's not a new project (10+ years), been discussed here before, has multiple components: package manager, language, NixOS. + +Instead of describing *how* to build a thing, Nix describes *what* to build:### 17 +### 19 + +In Nix, it's impossible to say "GCC is the result of applying GCC to the GCC source", because that happens to be infinite recursion. + +Bootstrapping in Nix works by introducing a binary pinned by its full-hash, which was built on some previous Nix version. + +Unfortunately also just a magic binary blob ... ### 20 + +NixOS is not actively porting all of Debian's reproducibility patches, but builds are fully repeatable: + +- introducing a malicious compiler would produce a different input hash -> different package + +Future slide: hope is not lost! Things are underway.### 21 + +- bootstrappable.org (demo?) is an umbrella page for several projects working on bootstrappability + +- stage0 is an important piece: manually, small, auditable Hex programs to get to a Hex macro expander + +- end goal is a full-source bootrap, but pieces are missing### 22 + +MES is out of the GuixSD circles (explain Guix, GNU Hurd joke) + +- idea being that once you have a Lisp, you have all of computing (as Alan Key said) + +- includes MesCC in Scheme -> can *almost* make a working tinyCC -> can *almost* make a working gcc 4.7 + +- minimal Scheme interpreter, currently built in C to get the higher-level stuff to work, goal is rewrite in hex +- bootstrapping Guix is the end goal### 23 + +- userspace in Darwin has a Nix project +- unsure about other BSDs, but if anyone knows - input welcome! +- F-Droid has reproducible Android packages, but that's also userspace only +- All other mobile platforms are a lost cause + +Generally, all closed-source software is impossible to trust. diff --git a/users/tazjin/presentations/erlang-2016/.skip-subtree b/users/tazjin/presentations/erlang-2016/.skip-subtree new file mode 100644 index 000000000000..e69de29bb2d1 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/.skip-subtree diff --git a/users/tazjin/presentations/erlang-2016/README.md b/users/tazjin/presentations/erlang-2016/README.md new file mode 100644 index 000000000000..e1b6c83b99cc --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/README.md @@ -0,0 +1,6 @@ +These are the slides for a presentation I gave for the Oslo javaBin meetup in +2016. + +Unfortunately there is no recording of the presentation due to a technical error +(video was recorded, but no audio). This is a bit of a shame because I think +these are some of the best slides I've ever made. diff --git a/users/tazjin/presentations/erlang-2016/presentation.md b/users/tazjin/presentations/erlang-2016/presentation.md new file mode 100644 index 000000000000..526564b88268 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/presentation.md @@ -0,0 +1,222 @@ +slidenumbers: true +Erlang. +====== + +### Fault-tolerant, concurrent programming. + +--- + +## A brief history of Erlang + +--- + +![](https://www.ericsson.com/thinkingahead/the-networked-society-blog/wp-content/uploads/2014/09/bfW5FSr.jpg) + + +^ Telefontornet in Stockholm, around 1890. Used until 1913. + +--- + +![](https://3.bp.blogspot.com/-UF7W9yTUO2g/VBqw-1HNTzI/AAAAAAAAPeg/KvsMbNSAcII/s1600/6835942484_1531372d8f_b.jpg) + +^ Telephones were operated manually at Switchboards. Anyone old enough to remember? I'm certainly not. + +--- + +![fit](https://russcam.github.io/fsharp-akka-talk/images/ericsson-301-AXD.png) + +^ Eventually we did that in software, and we got better at it over time. Ericsson AXD 301, first commercial Erlang switch. But lets take a step back. + +--- + +## Phone switches must be ... + +Highly concurrent + +Fault-tolerant + +Distributed + +(Fast!) + +![right 150%](http://learnyousomeerlang.com/static/img/erlang-the-movie.png) + +--- + +## ... and so is Erlang! + +--- + +## Erlang as a whole: + +- Unique process model (actors!) +- Built-in fault-tolerance & error handling +- Distributed processes +- Three parts! + +--- + +## Part 1: Erlang, the language + +- Functional +- Prolog-inspired syntax +- Everything is immutable +- *Extreme* pattern-matching + +--- +### Hello Joe + +```erlang +hello_joe. +``` + +--- +### Hello Joe + +```erlang +-module(hello1). +-export([hello_joe/0]). + +hello_joe() -> + hello_joe. +``` + +--- +### Hello Joe + +```erlang +-module(hello1). +-export([hello_joe/0]). + +hello_joe() -> + hello_joe. + +% 1> c(hello1). +% {ok,hello1} +% 2> hello1:hello_joe(). +% hello_joe +``` + +--- +### Hello Joe + +```erlang +-module(hello2). +-export([hello/1]). + +hello(Name) -> + io:format("Hello ~s!~n", [Name]). + +% 3> c(hello2). +% {ok,hello2} +% 4> hello2:hello("Joe"). +% Hello Joe! +% ok +``` + +--- + +## [fit] Hello ~~world~~ Joe is boring! +## [fit] Lets do it with processes. + +--- +### Hello Server + +```erlang +-module(hello_server). +-export([start_server/0]). + +start_server() -> + spawn(fun() -> server() end). + +server() -> + receive + {greet, Name} -> + io:format("Hello ~s!~n", [Name]), + server() + end. +``` + +--- + +## [fit] Some issues with that ... + +- What about unused messages? +- What if the server crashes? + +--- + +## [fit] Part 2: Open Telecom Platform + +### **It's called Erlang/OTP for a reason.** + +--- + +# OTP: An Application Framework + +- Supervision - keep processes alive! + +- OTP Behaviours - common process patterns + +- Extensive standard library + +- Error handling, debuggers, testing, ... + +- Lots more! + +^ Standard library includes lots of things from simple network libraries over testing frameworks to cryptography, complete LDAP clients etc. + +--- + +# Supervision + +![inline](http://erlang.org/doc/design_principles/sup6.gif) + +^ Supervision keeps processes alive, different restart behaviours, everything should be supervised to avoid "process" (and therefore memory) leaks + +--- + +# OTP Behaviours + +* `gen_server` +* `gen_statem` +* `gen_event` +* `supervisor` + +^ gen = generic. explain server, explain statem, event = event handling with registered handlers, supervisor ... + +--- + +`gen_server` + +--- + +## [fit] Part 3: BEAM + +### Bogdan/Bjørn Erlang Abstract machine + +--- + +## A VM for Erlang + +* Many were written, BEAM survived +* Concurrent garbage-collection +* Lower-level bytecode than JVM +* Very open to new languages + (Elixir, LFE, Joxa, ...) + +--- + +## What next? + +* Ole's talk, obviously! +* Learn You Some Erlang! + www.learnyousomeerlang.com +* Watch *Erlang the Movie* +* (soon!) Join the Oslo BEAM meetup group + +--- + +# [fit] Questions? + +`@tazjin` diff --git a/users/tazjin/presentations/erlang-2016/presentation.pdf b/users/tazjin/presentations/erlang-2016/presentation.pdf new file mode 100644 index 000000000000..ec8d996704b2 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/presentation.pdf Binary files differdiff --git a/users/tazjin/presentations/erlang-2016/src/hello.erl b/users/tazjin/presentations/erlang-2016/src/hello.erl new file mode 100644 index 000000000000..56404a0c5a20 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello.erl @@ -0,0 +1,5 @@ +-module(hello). +-export([hello_joe/0]). + +hello_joe() -> + hello_joe. diff --git a/users/tazjin/presentations/erlang-2016/src/hello1.erl b/users/tazjin/presentations/erlang-2016/src/hello1.erl new file mode 100644 index 000000000000..ca78261399e1 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello1.erl @@ -0,0 +1,5 @@ +-module(hello1). +-export([hello_joe/0]). + +hello_joe() -> + hello_joe. diff --git a/users/tazjin/presentations/erlang-2016/src/hello2.erl b/users/tazjin/presentations/erlang-2016/src/hello2.erl new file mode 100644 index 000000000000..2d1f6c84c401 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello2.erl @@ -0,0 +1,11 @@ +-module(hello2). +-export([hello/1]). + +hello(Name) -> + io:format("Hey ~s!~n", [Name]). + +% 3> c(hello2). +% {ok,hello2} +% 4> hello2:hello("Joe"). +% Hello Joe! +% ok diff --git a/users/tazjin/presentations/erlang-2016/src/hello_server.erl b/users/tazjin/presentations/erlang-2016/src/hello_server.erl new file mode 100644 index 000000000000..01df14ac57d5 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello_server.erl @@ -0,0 +1,12 @@ +-module(hello_server). +-export([start_server/0, server/0]). + +start_server() -> + spawn(fun() -> server() end). + +server() -> + receive + {greet, Name} -> + io:format("Hello ~s!~n", [Name]), + hello_server:server() + end. diff --git a/users/tazjin/presentations/erlang-2016/src/hello_server2.erl b/users/tazjin/presentations/erlang-2016/src/hello_server2.erl new file mode 100644 index 000000000000..24bb934ee503 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello_server2.erl @@ -0,0 +1,36 @@ +-module(hello_server2). +-behaviour(gen_server). +-compile(export_all). + +%%% Start callback for supervisor +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +%%% gen_server callbacks + +init([]) -> + {ok, sets:new()}. + +handle_call({greet, Name}, _From, State) -> + io:format("Hello ~s!~n", [Name]), + NewState = sets:add_element(Name, State), + {reply, ok, NewState}; + +handle_call({bye, Name}, _From, State) -> + io:format("Goodbye ~s!~n", [Name]), + NewState = sets:del_element(Name, State), + {reply, ok, NewState}. + +terminate(normal, State) -> + [io:format("Goodbye ~s!~n", [Name]) || Name <- State], + ok. + +%%% Unused gen_server callbacks +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +handle_cast(_Request, State) -> + {noreply, State}. diff --git a/users/tazjin/presentations/erlang-2016/src/hello_sup.erl b/users/tazjin/presentations/erlang-2016/src/hello_sup.erl new file mode 100644 index 000000000000..7fee0928c575 --- /dev/null +++ b/users/tazjin/presentations/erlang-2016/src/hello_sup.erl @@ -0,0 +1,24 @@ +-module(hello_sup). +-behaviour(supervisor). +-export([start_link/0, init/1]). + +%%% Module API + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +%%% Supervisor callbacks + +init([]) -> + Children = [hello_spec()], + {ok, { {one_for_one, 5, 10}, Children}}. + +%%% Private + +hello_spec() -> + #{id => hello_server2, + start => {hello_server2, start_link, []}, + restart => permanent, + shutdown => 5000, + type => worker, + module => [hello_server2]}. diff --git a/users/tazjin/presentations/servant-2016/Makefile b/users/tazjin/presentations/servant-2016/Makefile new file mode 100644 index 000000000000..96115ec2cbfc --- /dev/null +++ b/users/tazjin/presentations/servant-2016/Makefile @@ -0,0 +1,8 @@ +all: slides + +slides: + lualatex --shell-escape slides.tex + +clean: + rm -f slides.aux slides.log slides.nav \ + slides.out slides.toc slides.snm diff --git a/users/tazjin/presentations/servant-2016/README.md b/users/tazjin/presentations/servant-2016/README.md new file mode 100644 index 000000000000..8cfb04a42417 --- /dev/null +++ b/users/tazjin/presentations/servant-2016/README.md @@ -0,0 +1,7 @@ +These are the slides for my presentation about [servant][] at [Oslo Haskell][]. + +A full video recording of the presentation is available [on Vimeo][]. + +[servant]: https://haskell-servant.github.io/ +[Oslo Haskell]: http://www.meetup.com/Oslo-Haskell/events/227107530/ +[on Vimeo]: https://vimeo.com/153901805 diff --git a/users/tazjin/presentations/servant-2016/slides.pdf b/users/tazjin/presentations/servant-2016/slides.pdf new file mode 100644 index 000000000000..842a667e1bcc --- /dev/null +++ b/users/tazjin/presentations/servant-2016/slides.pdf Binary files differdiff --git a/users/tazjin/presentations/servant-2016/slides.pdfpc b/users/tazjin/presentations/servant-2016/slides.pdfpc new file mode 100644 index 000000000000..ed46003768c0 --- /dev/null +++ b/users/tazjin/presentations/servant-2016/slides.pdfpc @@ -0,0 +1,75 @@ +[file] +slides.pdf +[font_size] +10897 +[notes] +### 1 +13### 2 +Let's talk about servant, which is several things: +API description DSL, we'll speak about how this DSL works +and why it's at the type level + +Interpretations of the types resulting from that DSL, for example in +web servers or API clients + +Servant is commonly used or implementing services with APIs, or for accessing +other APIs with a simple, typed client +### 3 +Why type-level DSLs? +Type-level DSL: express *something*, e.g. endpoints of API, on type level by combining types. Types can be uninhabited + +Phil Wadler's: expression problem: things should be extensible both in the cases of a type, and in the functions operating on the type +Normal data types: can't add new constructors easily +Servant lifts thisup to simply allow the declaration of new types that can be included in the DSL, and new interpretations that can be attached to the types through typeclasses + +APIs become first-class citizens, can pass them around, combine them etc, they are separate from interpretations such as server implementations. In contrast, in most webframeworks, API declaration is implicit + +(Mention previous attemps at type-safe web, Yesod / web-routes + boomerang etc) +### 4 +Three extensions are necessary: +TypeOperators lets us use infix operators on the type level as constructors +DataKinds promotes new type declarations to the kind level, makes type-level literals (strings and natural numbers) available, lets us use type-level lists and pairs in combination with typeoperators +TypeFamilies: Type-level functions, map one set of types to another, come in two forms (type families, non-injective; data families, injective), more powerful than associated types +### 5 +Here you can see servant's general syntax, we define an API type as a simple alias of some other type combinations +strings are type-level strings, not actually values, represent path elements +endpoints are separated by :<|>, all endpoints end in a method with content types and return types +Capture captures path segments, but there are other combinators, for example for headers +Everything that is used from the request is expressed in types, enforcing checkability, no "escape hatch" inside handlers to get request +Every combinator has associated interpretations through typeclasses +### 6 +Explain type alias, point out Capture +Server is a type level function (type family), as mentioned earlier +### 7 +If we expand server (in ghci with kind!) we can see the actual type of the +function +### 8 +Lets speak about some interpretations of these things +### 9 +Servant server is the main interpretation that people are interested in, it's used +for taking a type specification and creating a server from it +Based on WAI, the web application interface, common abstraction for web servers which came out of the Yesod project. Implemented by the web server warp, which Yesod runs on +### 10 +Explain snippet, path gets removed from server type (irrelevant for handler), +route extracts string to value level +### 11 +Explain echo server quickly +### 12 +servant client allows generation of Haskell functions that query the API with the same types +this makes for easy to use RPC for example +### 13 +A lot of other interpretations exist for all kinds of things, mock servers for testing, foreign functions in various languages, documentation ... +### 14 +Demo! +1. Go quickly through code +2. Run server, query with curl +3. Open javascript function +4. Show JS code in the thing +5. Open the map itself +6. Open GHCi, use client +7. Generate docs +### 15 +Conclusion +Servant is pretty good, it's very easy to get started and it's great to raise the level of things that the compiler can tell you about when you do them wrong. +### 16 +Drawbacks. diff --git a/users/tazjin/presentations/servant-2016/slides.tex b/users/tazjin/presentations/servant-2016/slides.tex new file mode 100644 index 000000000000..d5947eb9421a --- /dev/null +++ b/users/tazjin/presentations/servant-2016/slides.tex @@ -0,0 +1,137 @@ +\documentclass[12pt]{beamer} +\usetheme{metropolis} +\usepackage{minted} + +\newenvironment{code}{\ttfamily}{\par} + +\title{servant} +\subtitle{Defining web APIs at the type-level} + +\begin{document} +\metroset{titleformat frame=smallcaps} +\setminted{fontsize=\scriptsize} + + +\maketitle + +\section{Introduction} + +\begin{frame}{Type-level DSLs?} + \begin{itemize} + \item (Uninhabited) types with attached ``meaning'' + \item The Expression Problem (Wadler 1998) + \item API representation and interpretation are separated + \item APIs become first-class citizens + \end{itemize} +\end{frame} + +\begin{frame}{Haskell extensions} + \begin{itemize} + \item TypeOperators + \item DataKinds + \item TypeFamilies + \end{itemize} +\end{frame} + +\begin{frame}[fragile]{A servant example} + \begin{minted}{haskell} + type PubAPI = "pubs" :> Get ’[JSON] [Pub] + :<|> "pubs" :> "tagged" + :> Capture "tag" Text + :> Get ’[JSON] [Pub] + \end{minted} +\end{frame} + +\begin{frame}[fragile]{Computed types} + \begin{minted}{haskell} + type TaggedPubs = "tagged" :> Capture "tag" Text :> ... + + taggedPubsHandler :: Server TaggedPubs + taggedPubsHandler tag = ... + \end{minted} +\end{frame} + +\begin{frame}[fragile]{Computed types} + \begin{minted}{haskell} + type TaggedPubs = "tagged" :> Capture "tag" Text :> ... + + taggedPubsHandler :: Server TaggedPubs + taggedPubsHandler tag = ... + + Server TaggedPubs ~ + Text -> EitherT ServantErr IO [Pub] + \end{minted} +\end{frame} + +\section{Interpretations} + +\begin{frame}{servant-server} + The one everyone is interested in! + + \begin{itemize} + \item Based on WAI, can run on warp + \item Interprets combinators with a simple \texttt{HasServer c} class + \item Easy to use! + \end{itemize} +\end{frame} + +\begin{frame}[fragile]{HasServer ...} + \begin{minted}{haskell} + instance (KnownSymbol path, HasServer sublayout) + => HasServer (path :> sublayout) where + type ServerT (path :> sublayout) m = ServerT sublayout m + + route ... + where + pathString = symbolVal (Proxy :: Proxy path) + \end{minted} +\end{frame} + +\begin{frame}[fragile]{Server example} + \begin{minted}{haskell} + type Echo = Capture "echo" Text :> Get ’[PlainText] Text + + echoAPI :: Proxy Echo + echoAPI = Proxy + + echoServer :: Server Echo + echoServer = return + \end{minted} +\end{frame} + +\begin{frame}{servant-client} + \begin{itemize} + \item Generates Haskell client functions for API + \item Same types as API specification: For RPC the whole ``web layer'' is abstracted away + \item Also easy to use! + \end{itemize} +\end{frame} + +\begin{frame}{servant-docs, servant-js ...} + Many other interpretations exist already, for example: + \begin{itemize} + \item Documentation generation + \item Foreign function export (e.g. Elm, JavaScript) + \item Mock-server generation + \end{itemize} +\end{frame} + +\section{Demo} + +\section{Conclusion} + +\begin{frame}{Drawbacks} + \begin{itemize} + \item Haskell has no custom open kinds (yet) + \item Proxies are ugly + \item Errors can be a bit daunting + \end{itemize} +\end{frame} + +\begin{frame}{Questions?} + Ølkartet: github.com/tazjin/pubkartet \\ + Slides: github.com/tazjin/servant-presentation + + @tazjin +\end{frame} +\end{document} diff --git a/users/tazjin/presentations/systemd-2016/.gitignore b/users/tazjin/presentations/systemd-2016/.gitignore new file mode 100644 index 000000000000..1a38620fe9cc --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/.gitignore @@ -0,0 +1,6 @@ +slides.aux +slides.log +slides.nav +slides.out +slides.snm +slides.toc diff --git a/users/tazjin/presentations/systemd-2016/.skip-subtree b/users/tazjin/presentations/systemd-2016/.skip-subtree new file mode 100644 index 000000000000..108b3507ddd1 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/.skip-subtree @@ -0,0 +1 @@ +No Nix files will ever be under this tree ... diff --git a/users/tazjin/presentations/systemd-2016/Makefile b/users/tazjin/presentations/systemd-2016/Makefile new file mode 100644 index 000000000000..ac5dde3cb32f --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/Makefile @@ -0,0 +1,11 @@ +all: slides.pdf + +slides.toc: + lualatex slides.tex + +slides.pdf: slides.toc + lualatex slides.tex + +clean: + rm -f slides.aux slides.log slides.nav \ + slides.out slides.toc slides.snm diff --git a/users/tazjin/presentations/systemd-2016/README.md b/users/tazjin/presentations/systemd-2016/README.md new file mode 100644 index 000000000000..7f004b7d14ca --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/README.md @@ -0,0 +1,6 @@ +This repository contains the slides for my systemd presentation at Hackeriet. + +Requires LaTeX, [beamer][] and the [metropolis][] theme. + +[beamer]: http://mirror.hmc.edu/ctan/macros/latex/contrib/beamer/ +[metropolis]: https://github.com/matze/mtheme diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-error.service b/users/tazjin/presentations/systemd-2016/demo/demo-error.service new file mode 100644 index 000000000000..b2d4c9d34799 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-error.service @@ -0,0 +1,7 @@ +[Unit] +Description=Demonstrate failing units +OnFailure=demo-notify@%n.service + +[Service] +Type=oneshot +ExecStart=/usr/bin/false diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-limits.slice b/users/tazjin/presentations/systemd-2016/demo/demo-limits.slice new file mode 100644 index 000000000000..998185d26177 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-limits.slice @@ -0,0 +1,7 @@ +[Unit] +Description=Limited resources demo +DefaultDependencies=no +Before=slices.target + +[Slice] +CPUQuota=10% diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-notify@.service b/users/tazjin/presentations/systemd-2016/demo/demo-notify@.service new file mode 100644 index 000000000000..e25524b4e230 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-notify@.service @@ -0,0 +1,6 @@ +[Unit] +Description=Demonstrate systemd templating by sending a notification + +[Service] +Type=oneshot +ExecStart=/usr/bin/notify-send 'Systemd notification' '%i' diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-path.path b/users/tazjin/presentations/systemd-2016/demo/demo-path.path new file mode 100644 index 000000000000..87f1342da995 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-path.path @@ -0,0 +1,6 @@ +[Unit] +Description=Demonstrate systemd path units + +[Path] +DirectoryNotEmpty=/tmp/hackeriet +Unit=demo.service diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-stress.service b/users/tazjin/presentations/systemd-2016/demo/demo-stress.service new file mode 100644 index 000000000000..7e14f13e29d9 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-stress.service @@ -0,0 +1,6 @@ +[Unit] +Description=Stress test CPU + +[Service] +Slice=demo.slice +ExecStart=/usr/bin/stress -c 5 diff --git a/users/tazjin/presentations/systemd-2016/demo/demo-timer.timer b/users/tazjin/presentations/systemd-2016/demo/demo-timer.timer new file mode 100644 index 000000000000..34eccb98b02a --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo-timer.timer @@ -0,0 +1,12 @@ +[Unit] +Description=Demonstrate systemd timers + +[Timer] +OnActiveSec=2 +OnUnitActiveSec=5 +AccuracySec=5 +Unit=demo.service +# OnCalendar=Thu,Fri 2016-*-1,5 11:12:13 + +[Install] +WantedBy=multi-user.target diff --git a/users/tazjin/presentations/systemd-2016/demo/demo.service b/users/tazjin/presentations/systemd-2016/demo/demo.service new file mode 100644 index 000000000000..fcc710ad933f --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/demo.service @@ -0,0 +1,6 @@ +[Unit] +Description=Demo unit for systemd + +[Service] +Type=oneshot +ExecStart=/usr/bin/echo "Systemd unit activated. Hello Hackeriet." diff --git a/users/tazjin/presentations/systemd-2016/demo/notes.md b/users/tazjin/presentations/systemd-2016/demo/notes.md new file mode 100644 index 000000000000..b4866b1642bb --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/demo/notes.md @@ -0,0 +1,27 @@ +# simple oneshot + +Run `demo-notify@hello.service` + +# simple timer + +Run `demo-timer.timer`, show both + +# enabling + +Enable `demo-timer.timer`, go to symlink folder, disable + +# OnError + +Show & run `demo-error.service` + +# cgroups demo + +Start `demo-stress.service` without, show in htop, stop +Show slice unit, start slice unit +Add Slice=demo-limits.slice +daemon-reload +Start stress again + +# Proper service + +Look at nginx unit diff --git a/users/tazjin/presentations/systemd-2016/slides.pdf b/users/tazjin/presentations/systemd-2016/slides.pdf new file mode 100644 index 000000000000..384db2a6e0af --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/slides.pdf Binary files differdiff --git a/users/tazjin/presentations/systemd-2016/slides.pdfpc b/users/tazjin/presentations/systemd-2016/slides.pdfpc new file mode 100644 index 000000000000..99326bd8bf4e --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/slides.pdfpc @@ -0,0 +1,85 @@ +[file] +slides.pdf +[notes] +### 1 +### 2 +Let's start off by looking at what an init system is, how they used to work and what systemd does different before we go into more systemd-specific details. +### 3 +system processes that are started include for example FS mounts, network settings, powertop... +system services are long-running processes such as daemons, e.g. SSH, database or web servers, session managers, udev ... + +orphans: Process whose parent has finished somehow, gets adopted by init system +-> when a process terminates its parent must call wait() to get its exit() code, if there is no init system adopting orphans the process would become a zombie +### 4 +Before systemd there were simple init systems that just did the tasks listed on the previous slide. +Init scripts -> increased greatly in complexity over time, look at incomprehensible skeleton for Debian service init scripts +Runlevels -> things such as single-user mode, full multiuser mode, reboot, halt + +Init will run all the scripts, but it will not do much more than print information on success/failure of started scripts + +Init scripts run strictly sequential + +Init is unaware of inter-service dependencies, expressed through prefixing scripts with numbers etc. + +Init will not watch processes after system is booted -> crashing daemons will not automatically restart +### 5 +### 6 +How systemd came to be + +Considering the lack of process monitoring, problematic things about init scripts -> legacy init systems have drawbacks + +Apple had already built launchd, a more featured init system that monitored running processes, could automatically restart them and allowed for certain advanced features -> however it is awful to use and wrap your head around + +Lennart Poettering of Pulseaudio fame and Kay Sievers decided to implement a new init system to address these problems, while taking certain clues from Apple's design +### 7 +Systemd's design goals +### 8 +No more init scripts with opaque effects -> services are clearly defined units +Unit dependencies -> systemd can figure out what can be started in parallel +Process supervision: Unit can be configured in many ways, e.g. always restart, only restart on success etc +Service logs: We'll talk more about this later +### 9 +Units are the core component of systemd that users deal with. They define services and everything else that systemd needs to start and manage. +Note that all these are the names of the respective man page on a system with systemd installed +Types: +systemd.service - processes controlled by systemd +systemd.target - equivalent to "runlevels", grouping of units for synchronisation +systemd.timer - more powerful replacement of cron that starts other units +systemd.path - systemd equvialent of inotify, watches files/folders -> launches units +systemd.socket - expose local IPC or network sockets, launch units on connections +systemd.device - trigger units when certain devices are connected +systemd.mount - systemd equivalent of fstab entries +systemd.swap - like mount +systemd.slice - unit groups for resource management purposes +... and a few more specialised ones +### 10 +Linux cgroups are a new resource management feature added quite a long time ago, but not used much. +Cgroups can be created manually and processes can be moved into them in order to control resource utilisation +Few people used them before systemd, limits.conf was often much easier but not as fine-grained +Systemd changed this +### 11 +Systemd collects standard output and stderr from all processes into its journal system +they provide a tool for querying the log, for example grouping service logs together with correct timestamps, querying, +### 12 +Systemd tooling, most important one is systemctl for general service management +journalctl is the query and management tool for journald +systemd-analyze is used for figuring out performance issues, for example by analysing the boot process, can make cool graphs of dependencies +systemd-cgtop is like top, but not on a process level - it's on a cgroup/slice level, shows combined usage of cgroups +systemd-cgls lists contents of systemd's cgroups to see which services are in what group +there also exist a bunch of others that we'll skip for now +### 13 +### 14 +### 15 +Systemd criticism comes from many directions and usually focuses on a few points +feature-creep: systemd is absorbing a lot of different services +### 16 +explain diagram a bit +### 17 +opaque: as a result, systemd has a lot more internal complexity that people can't easily wrap your mind around. However I argue that unless you're using something like suckless' sinit with your own scripts, you probably have no idea what your init does today anyways +unstable: this was definitely true even in the first stable release, with the binary log format getting corrupted for example. I haven't personally experienced any trouble with it recently though. +Another thing is that services start depending on systemd when they shouldn't, a problem for the BSD world (who cares (hey christoph!)) +### 18 +Despite criticism, systemd was adopted rapidly by large portions of the Linux +Initially in RedHat, because Poettering and co work there and it was clear from the beginning that it would be there +ArchLinux (which I'm using) and a few others followed suit quite quickly +Eventually, the big Debian init system discussion - after a lot of flaming - led to Debian adopting it as well, which had a ripple effect for related distros such as Ubuntu which abandoned upstart for it. \ No newline at end of file diff --git a/users/tazjin/presentations/systemd-2016/slides.tex b/users/tazjin/presentations/systemd-2016/slides.tex new file mode 100644 index 000000000000..c613cefd7ec4 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/slides.tex @@ -0,0 +1,160 @@ +\documentclass[12pt]{beamer} +\usetheme{metropolis} + +\newenvironment{code}{\ttfamily}{\par} + +\title{systemd} +\subtitle{The standard Linux init system} + +\begin{document} +\metroset{titleformat frame=smallcaps} + +\maketitle + +\section{Introduction} + +\begin{frame}{What is an init system?} + An init system is the first userspace process (PID 1) started in a UNIX-like system. It handles: + + \begin{itemize} + \item Starting system processes and services to prepare the environment + \item Adopting and ``reaping'' orphaned processes + \end{itemize} +\end{frame} + +\begin{frame}{Classical init systems} + Init systems before systemd - such as SysVinit - were very simple. + + \begin{itemize} + \item Services and processes to run are organised into ``init scripts'' + \item Scripts are linked to specific runlevels + \item Init system is configured to boot into a runlevel + \end{itemize} + +\end{frame} + +\section{systemd} + +\begin{frame}{Can we do better?} + \begin{itemize} + \item ``legacy'' init systems have a lot of drawbacks + \item Apple is taking a different approach on OS X + \item Systemd project was founded to address these issues + \end{itemize} +\end{frame} + +\begin{frame}{Systemd design goals} + \begin{itemize} + \item Expressing service dependencies + \item Monitoring service status + \item Enable parallel service startups + \item Ease of use + \end{itemize} +\end{frame} + +\begin{frame}{Systemd - the basics} + \begin{itemize} + \item No scripts are executed, only declarative units + \item Units have explicit dependencies + \item Processes are supervised + \item cgroups are utilised to apply resource limits + \item Service logs are managed and centrally queryable + \item Much more! + \end{itemize} +\end{frame} + +\begin{frame}{Systemd units} + Units specify how and what to start. Several types exist: + \begin{code} + \small + \begin{columns}[T,onlytextwidth] + \column{0.5\textwidth} + \begin{itemize} + \item systemd.service + \item systemd.target + \item systemd.timer + \item systemd.path + \item systemd.socket + \end{itemize} + \column{0.5\textwidth} + \begin{itemize} + \item systemd.device + \item systemd.mount + \item systemd.swap + \item systemd.slice + \end{itemize} + \end{columns} + \end{code} +\end{frame} + + +\begin{frame}{Resource management} + Systemd utilises Linux \texttt{cgroups} for resource management, specifically CPU, disk I/O and memory usage. + + \begin{itemize} + \item Hierarchical setup of groups makes it easy to limit resources for a set of services + \item Units can be attached to a \texttt{systemd.slice} for controlling resources for a group of services + \item Resource limits can also be specified directly in the unit + \end{itemize} +\end{frame} + +\begin{frame}{journald} + Systemd comes with an integrated log management solution, replacing software such as \texttt{syslog-ng}. + \begin{itemize} + \item All process output is collected in the journal + \item \texttt{journalctl} tool provides many options for querying and tailing logs + \item Children of processes automatically log to the journal as well + \item \textbf{Caveat:} Hard to learn initially + \end{itemize} +\end{frame} + +\begin{frame}{Systemd tooling} + A variety of CLI-tools exist for managing systemd systems. + \begin{code} + \begin{itemize} + \item systemctl + \item journalctl + \item systemd-analyze + \item systemd-cgtop + \item systemd-cgls + \end{itemize} + \end{code} + + Let's look at some of them. +\end{frame} + +\section{Demo} + +\section{Controversies} + +\begin{frame}{Systemd criticism} + Systemd has been heavily criticised, usually focusing around a few points: + \begin{itemize} + \item Feature-creep: Systemd absorbs more and more other services + \end{itemize} +\end{frame} + +\begin{frame}{Systemd criticism} + \includegraphics[keepaspectratio=true,width=\textwidth]{systemdcomponents.png} +\end{frame} + +\begin{frame}{Systemd criticism} + Systemd has been heavily criticised, usually focusing around a few points: + \begin{itemize} + \item Feature-creep: Systemd absorbs more and more other services + \item Opaque: systemd's inner workings are harder to understand than old \texttt{init} + \item Unstable: development is quick and breakage happens + \end{itemize} +\end{frame} + +\begin{frame}{Systemd adoption} + Systemd was initially adopted by RedHat (and related distributions). + + It spread quickly to others, for example ArchLinux. + + Debian and Ubuntu were the last major players who decided to adopt it, but not without drama. +\end{frame} + +\section{Questions?} + +\end{document} diff --git a/users/tazjin/presentations/systemd-2016/systemdcomponents.png b/users/tazjin/presentations/systemd-2016/systemdcomponents.png new file mode 100644 index 000000000000..a22c762f7e13 --- /dev/null +++ b/users/tazjin/presentations/systemd-2016/systemdcomponents.png Binary files differdiff --git a/users/tazjin/renderMarkdown.nix b/users/tazjin/renderMarkdown.nix new file mode 100644 index 000000000000..58f29c30e0f3 --- /dev/null +++ b/users/tazjin/renderMarkdown.nix @@ -0,0 +1,9 @@ +# Render a Markdown file to HTML. +{ depot, ... }: + +with depot.nix.yants; + +defun [ path drv ] (file: depot.third_party.runCommandNoCC "${file}.rendered.html" {} '' + cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out +'') + diff --git a/users/tazjin/rlox/.gitignore b/users/tazjin/rlox/.gitignore new file mode 100644 index 000000000000..29e65519ba35 --- /dev/null +++ b/users/tazjin/rlox/.gitignore @@ -0,0 +1,3 @@ +result +/target +**/*.rs.bk diff --git a/users/tazjin/rlox/Cargo.lock b/users/tazjin/rlox/Cargo.lock new file mode 100644 index 000000000000..d8107726e067 --- /dev/null +++ b/users/tazjin/rlox/Cargo.lock @@ -0,0 +1,6 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "rlox" +version = "0.1.0" + diff --git a/users/tazjin/rlox/Cargo.toml b/users/tazjin/rlox/Cargo.toml new file mode 100644 index 000000000000..b66af6ba85d3 --- /dev/null +++ b/users/tazjin/rlox/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "rlox" +version = "0.1.0" +authors = ["Vincent Ambo <mail@tazj.in>"] +edition = "2018" + +[features] +# Enables debugging/disassembling in the bytecode interpreter. Off by +# default as it is quite spammy. +disassemble = [] diff --git a/users/tazjin/rlox/README.md b/users/tazjin/rlox/README.md new file mode 100644 index 000000000000..1d2692d09cc1 --- /dev/null +++ b/users/tazjin/rlox/README.md @@ -0,0 +1,7 @@ +This is an interpreter for the Lox language, based on the book "[Crafting +Interpreters](https://craftinginterpreters.com/)". + +The book's original code uses Java, but I don't want to use Java, so I've +decided to take on the extra complexity of porting it to Rust. + +Note: This implements the first of two Lox interpreters. diff --git a/users/tazjin/rlox/default.nix b/users/tazjin/rlox/default.nix new file mode 100644 index 000000000000..4b2d650cb585 --- /dev/null +++ b/users/tazjin/rlox/default.nix @@ -0,0 +1,5 @@ +{ pkgs, ... }: + +pkgs.naersk.buildPackage { + src = ./.; +} diff --git a/users/tazjin/rlox/rustfmt.toml b/users/tazjin/rlox/rustfmt.toml new file mode 100644 index 000000000000..df99c69198f5 --- /dev/null +++ b/users/tazjin/rlox/rustfmt.toml @@ -0,0 +1 @@ +max_width = 80 diff --git a/users/tazjin/rlox/src/bytecode/chunk.rs b/users/tazjin/rlox/src/bytecode/chunk.rs new file mode 100644 index 000000000000..7132be430a0f --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/chunk.rs @@ -0,0 +1,93 @@ +use std::ops::Index; + +use super::opcode::OpCode; +use super::value; + +// In the book, this type is a hand-rolled dynamic array +// implementation in C. The main benefit of following that approach +// would be avoiding issues with OpCode variants not having equal +// sizes, but for the purpose of this I'm going to ignore that +// problem. +#[derive(Debug, Default)] +pub struct Chunk { + pub code: Vec<OpCode>, + lines: Vec<Span>, + constants: Vec<value::Value>, +} + +#[derive(Debug)] +struct Span { + /// Source code line + line: usize, + + /// Number of instructions derived from this line + count: usize, +} + +impl Chunk { + pub fn add_op(&mut self, data: OpCode, line: usize) -> usize { + let idx = self.code.len(); + self.code.push(data); + self.add_line(line); + idx + } + + pub fn add_constant(&mut self, data: value::Value) -> usize { + let idx = self.constants.len(); + self.constants.push(data); + idx + } + + pub fn constant(&self, idx: usize) -> &value::Value { + self.constants.index(idx) + } + + fn add_line(&mut self, line: usize) { + match self.lines.last_mut() { + Some(span) if span.line == line => span.count += 1, + _ => self.lines.push(Span { line, count: 1 }), + } + } + + pub fn get_line(&self, offset: usize) -> usize { + let mut pos = 0; + for span in &self.lines { + pos += span.count; + if pos > offset { + return span.line; + } + } + + panic!("invalid chunk state: line missing for offset {}", offset); + } +} + +// Disassembler + +/// Print a single disassembled instruction at the specified offset. +/// Some instructions are printed "raw", others have special handling. +#[cfg(feature = "disassemble")] +pub fn disassemble_instruction(chunk: &Chunk, offset: usize) { + print!("{:04} ", offset); + + let line = chunk.get_line(offset); + if offset > 0 && line == chunk.get_line(offset - 1) { + print!(" | "); + } else { + print!("{:4} ", line); + } + + match chunk.code.index(offset) { + OpCode::OpConstant(idx) => { + println!("OpConstant({}) '{:?}'", *idx, chunk.constant(*idx)) + } + op => println!("{:?}", op), + } +} + +#[cfg(feature = "disassemble")] +pub fn disassemble_chunk(chunk: &Chunk) { + for (idx, _) in chunk.code.iter().enumerate() { + disassemble_instruction(chunk, idx); + } +} diff --git a/users/tazjin/rlox/src/bytecode/compiler.rs b/users/tazjin/rlox/src/bytecode/compiler.rs new file mode 100644 index 000000000000..b8b91667d3fa --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/compiler.rs @@ -0,0 +1,526 @@ +use super::chunk::Chunk; +use super::errors::{Error, ErrorKind, LoxResult}; +use super::interner::{InternedStr, Interner}; +use super::opcode::OpCode; +use super::value::Value; +use crate::scanner::{self, Token, TokenKind}; + +#[cfg(feature = "disassemble")] +use super::chunk; + +struct Compiler<T: Iterator<Item = Token>> { + tokens: T, + chunk: Chunk, + panic: bool, + errors: Vec<Error>, + strings: Interner, + + current: Option<Token>, + previous: Option<Token>, +} + +#[derive(Debug, PartialEq, PartialOrd)] +enum Precedence { + None, + Assignment, // = + Or, // or + And, // and + Equality, // == != + Comparison, // < > <= >= + Term, // + - + Factor, // * / + Unary, // ! - + Call, // . () + Primary, +} + +type ParseFn<T> = fn(&mut Compiler<T>) -> LoxResult<()>; + +struct ParseRule<T: Iterator<Item = Token>> { + prefix: Option<ParseFn<T>>, + infix: Option<ParseFn<T>>, + precedence: Precedence, +} + +impl<T: Iterator<Item = Token>> ParseRule<T> { + fn new( + prefix: Option<ParseFn<T>>, + infix: Option<ParseFn<T>>, + precedence: Precedence, + ) -> Self { + ParseRule { + prefix, + infix, + precedence, + } + } +} + +impl Precedence { + // Return the next highest precedence, if there is one. + fn next(&self) -> Self { + match self { + Precedence::None => Precedence::Assignment, + Precedence::Assignment => Precedence::Or, + Precedence::Or => Precedence::And, + Precedence::And => Precedence::Equality, + Precedence::Equality => Precedence::Comparison, + Precedence::Comparison => Precedence::Term, + Precedence::Term => Precedence::Factor, + Precedence::Factor => Precedence::Unary, + Precedence::Unary => Precedence::Call, + Precedence::Call => Precedence::Primary, + Precedence::Primary => panic!( + "invalid parser state: no higher precedence than Primary" + ), + } + } +} + +fn rule_for<T: Iterator<Item = Token>>(token: &TokenKind) -> ParseRule<T> { + match token { + TokenKind::LeftParen => { + ParseRule::new(Some(Compiler::grouping), None, Precedence::None) + } + + TokenKind::Minus => ParseRule::new( + Some(Compiler::unary), + Some(Compiler::binary), + Precedence::Term, + ), + + TokenKind::Plus => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Term) + } + + TokenKind::Slash => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Factor) + } + + TokenKind::Star => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Factor) + } + + TokenKind::Number(_) => { + ParseRule::new(Some(Compiler::number), None, Precedence::None) + } + + TokenKind::True => { + ParseRule::new(Some(Compiler::literal), None, Precedence::None) + } + + TokenKind::False => { + ParseRule::new(Some(Compiler::literal), None, Precedence::None) + } + + TokenKind::Nil => { + ParseRule::new(Some(Compiler::literal), None, Precedence::None) + } + + TokenKind::Bang => { + ParseRule::new(Some(Compiler::unary), None, Precedence::None) + } + + TokenKind::BangEqual => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Equality) + } + + TokenKind::EqualEqual => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Equality) + } + + TokenKind::Greater => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Comparison) + } + + TokenKind::GreaterEqual => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Comparison) + } + + TokenKind::Less => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Comparison) + } + + TokenKind::LessEqual => { + ParseRule::new(None, Some(Compiler::binary), Precedence::Comparison) + } + + TokenKind::Identifier(_) => { + ParseRule::new(Some(Compiler::variable), None, Precedence::None) + } + + TokenKind::String(_) => { + ParseRule::new(Some(Compiler::string), None, Precedence::None) + } + + _ => ParseRule::new(None, None, Precedence::None), + } +} + +macro_rules! consume { + ( $self:ident, $expected:pat, $err:expr ) => { + match $self.current().kind { + $expected => $self.advance(), + _ => $self.error_at($self.current().line, $err), + } + }; +} + +impl<T: Iterator<Item = Token>> Compiler<T> { + fn compile(&mut self) -> LoxResult<()> { + self.advance(); + + while !self.match_token(&TokenKind::Eof) { + self.declaration()?; + } + + self.end_compiler() + } + + fn advance(&mut self) { + self.previous = self.current.take(); + self.current = self.tokens.next(); + } + + fn expression(&mut self) -> LoxResult<()> { + self.parse_precedence(Precedence::Assignment) + } + + fn var_declaration(&mut self) -> LoxResult<()> { + let global = self.parse_variable()?; + + if self.match_token(&TokenKind::Equal) { + self.expression()?; + } else { + self.emit_op(OpCode::OpNil); + } + + self.expect_semicolon("expect ';' after variable declaration")?; + self.define_variable(global) + } + + fn define_variable(&mut self, var: usize) -> LoxResult<()> { + self.emit_op(OpCode::OpDefineGlobal(var)); + Ok(()) + } + + fn declaration(&mut self) -> LoxResult<()> { + if self.match_token(&TokenKind::Var) { + self.var_declaration()?; + } else { + self.statement()?; + } + + if self.panic { + self.synchronise(); + } + + Ok(()) + } + + fn statement(&mut self) -> LoxResult<()> { + if self.match_token(&TokenKind::Print) { + self.print_statement() + } else { + self.expression_statement() + } + } + + fn print_statement(&mut self) -> LoxResult<()> { + self.expression()?; + self.expect_semicolon("expect ';' after print statement")?; + self.emit_op(OpCode::OpPrint); + Ok(()) + } + + fn expression_statement(&mut self) -> LoxResult<()> { + self.expression()?; + self.expect_semicolon("expect ';' after expression")?; + self.emit_op(OpCode::OpPop); + Ok(()) + } + + fn number(&mut self) -> LoxResult<()> { + if let TokenKind::Number(num) = self.previous().kind { + self.emit_constant(Value::Number(num), true); + return Ok(()); + } + + unreachable!("internal parser error: entered number() incorrectly") + } + + fn grouping(&mut self) -> LoxResult<()> { + self.expression()?; + consume!( + self, + TokenKind::RightParen, + ErrorKind::ExpectedToken("Expected ')' after expression") + ); + Ok(()) + } + + fn unary(&mut self) -> LoxResult<()> { + // TODO(tazjin): Avoid clone + let kind = self.previous().kind.clone(); + + // Compile the operand + self.parse_precedence(Precedence::Unary)?; + + // Emit operator instruction + match kind { + TokenKind::Bang => self.emit_op(OpCode::OpNot), + TokenKind::Minus => self.emit_op(OpCode::OpNegate), + _ => unreachable!("only called for unary operator tokens"), + } + + Ok(()) + } + + fn binary(&mut self) -> LoxResult<()> { + // Remember the operator + let operator = self.previous().kind.clone(); + + // Compile the right operand + let rule: ParseRule<T> = rule_for(&operator); + self.parse_precedence(rule.precedence.next())?; + + // Emit operator instruction + match operator { + TokenKind::Minus => self.emit_op(OpCode::OpSubtract), + TokenKind::Plus => self.emit_op(OpCode::OpAdd), + TokenKind::Star => self.emit_op(OpCode::OpMultiply), + TokenKind::Slash => self.emit_op(OpCode::OpDivide), + + TokenKind::BangEqual => { + self.emit_op(OpCode::OpEqual); + self.emit_op(OpCode::OpNot); + } + + TokenKind::EqualEqual => self.emit_op(OpCode::OpEqual), + TokenKind::Greater => self.emit_op(OpCode::OpGreater), + + TokenKind::GreaterEqual => { + self.emit_op(OpCode::OpLess); + self.emit_op(OpCode::OpNot); + } + + TokenKind::Less => self.emit_op(OpCode::OpLess), + TokenKind::LessEqual => { + self.emit_op(OpCode::OpGreater); + self.emit_op(OpCode::OpNot); + } + + _ => unreachable!("only called for binary operator tokens"), + } + + Ok(()) + } + + fn literal(&mut self) -> LoxResult<()> { + match self.previous().kind { + TokenKind::Nil => self.emit_op(OpCode::OpNil), + TokenKind::True => self.emit_op(OpCode::OpTrue), + TokenKind::False => self.emit_op(OpCode::OpFalse), + _ => unreachable!("only called for literal value tokens"), + } + + Ok(()) + } + + fn string(&mut self) -> LoxResult<()> { + let val = match &self.previous().kind { + TokenKind::String(s) => s.clone(), + _ => unreachable!("only called for strings"), + }; + + let id = self.strings.intern(val); + self.emit_constant(Value::String(id.into()), true); + + Ok(()) + } + + fn named_variable(&mut self) -> LoxResult<()> { + let ident = self.identifier_str(Self::previous)?; + let constant_id = + self.emit_constant(Value::String(ident.into()), false); + self.emit_op(OpCode::OpGetGlobal(constant_id)); + Ok(()) + } + + fn variable(&mut self) -> LoxResult<()> { + self.named_variable() + } + + fn parse_precedence(&mut self, precedence: Precedence) -> LoxResult<()> { + self.advance(); + let rule: ParseRule<T> = rule_for(&self.previous().kind); + let prefix_fn = match rule.prefix { + None => unimplemented!("expected expression or something, unclear"), + Some(func) => func, + }; + + prefix_fn(self)?; + + while precedence <= rule_for::<T>(&self.current().kind).precedence { + self.advance(); + match rule_for::<T>(&self.previous().kind).infix { + Some(func) => { + func(self)?; + } + None => { + unreachable!("invalid compiler state: error in parse rules") + } + } + } + + Ok(()) + } + + fn identifier_str( + &mut self, + token_fn: fn(&Self) -> &Token, + ) -> LoxResult<InternedStr> { + let ident = match &token_fn(self).kind { + TokenKind::Identifier(ident) => ident.to_string(), + _ => { + return Err(Error { + line: self.current().line, + kind: ErrorKind::ExpectedToken("Expected identifier"), + }) + } + }; + + Ok(self.strings.intern(ident)) + } + + fn parse_variable(&mut self) -> LoxResult<usize> { + consume!( + self, + TokenKind::Identifier(_), + ErrorKind::ExpectedToken("expected identifier") + ); + + let id = self.identifier_str(Self::previous)?; + Ok(self.emit_constant(Value::String(id.into()), false)) + } + + fn current_chunk(&mut self) -> &mut Chunk { + &mut self.chunk + } + + fn end_compiler(&mut self) -> LoxResult<()> { + self.emit_op(OpCode::OpReturn); + + #[cfg(feature = "disassemble")] + { + chunk::disassemble_chunk(&self.chunk); + println!("== compilation finished =="); + } + + Ok(()) + } + + fn emit_op(&mut self, op: OpCode) { + let line = self.previous().line; + self.current_chunk().add_op(op, line); + } + + fn emit_constant(&mut self, val: Value, with_op: bool) -> usize { + let idx = self.chunk.add_constant(val); + + if with_op { + self.emit_op(OpCode::OpConstant(idx)); + } + + idx + } + + fn previous(&self) -> &Token { + self.previous + .as_ref() + .expect("invalid internal compiler state: missing previous token") + } + + fn current(&self) -> &Token { + self.current + .as_ref() + .expect("invalid internal compiler state: missing current token") + } + + fn error_at(&mut self, line: usize, kind: ErrorKind) { + if self.panic { + return; + } + + self.panic = true; + self.errors.push(Error { kind, line }) + } + + fn match_token(&mut self, token: &TokenKind) -> bool { + if !self.check(token) { + return false; + } + + self.advance(); + true + } + + fn check(&self, token: &TokenKind) -> bool { + return self.current().kind == *token; + } + + fn synchronise(&mut self) { + self.panic = false; + + while self.current().kind != TokenKind::Eof { + if self.previous().kind == TokenKind::Semicolon { + return; + } + + match self.current().kind { + TokenKind::Class + | TokenKind::Fun + | TokenKind::Var + | TokenKind::For + | TokenKind::If + | TokenKind::While + | TokenKind::Print + | TokenKind::Return => return, + + _ => { + self.advance(); + } + } + } + } + + fn expect_semicolon(&mut self, msg: &'static str) -> LoxResult<()> { + consume!(self, TokenKind::Semicolon, ErrorKind::ExpectedToken(msg)); + Ok(()) + } +} + +pub fn compile(code: &str) -> Result<(Interner, Chunk), Vec<Error>> { + let chars = code.chars().collect::<Vec<char>>(); + let tokens = scanner::scan(&chars).map_err(|errors| { + errors.into_iter().map(Into::into).collect::<Vec<Error>>() + })?; + + let mut compiler = Compiler { + tokens: tokens.into_iter().peekable(), + chunk: Default::default(), + panic: false, + errors: vec![], + strings: Interner::with_capacity(1024), + current: None, + previous: None, + }; + + compiler.compile()?; + + if compiler.errors.is_empty() { + Ok((compiler.strings, compiler.chunk)) + } else { + Err(compiler.errors) + } +} diff --git a/users/tazjin/rlox/src/bytecode/errors.rs b/users/tazjin/rlox/src/bytecode/errors.rs new file mode 100644 index 000000000000..c6b86172f86d --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/errors.rs @@ -0,0 +1,50 @@ +use crate::scanner::ScannerError; + +use std::fmt; + +#[derive(Debug)] +pub enum ErrorKind { + UnexpectedChar(char), + UnterminatedString, + ExpectedToken(&'static str), + InternalError(&'static str), + TypeError(String), +} + +#[derive(Debug)] +pub struct Error { + pub kind: ErrorKind, + pub line: usize, +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[line NYI] Error: {:?}", self.kind) + } +} + +impl From<ScannerError> for Error { + fn from(err: ScannerError) -> Self { + match err { + ScannerError::UnexpectedChar { line, unexpected } => Error { + line, + kind: ErrorKind::UnexpectedChar(unexpected), + }, + + ScannerError::UnterminatedString { line } => Error { + line, + kind: ErrorKind::UnterminatedString, + }, + } + } +} + +// Convenience implementation as we're often dealing with vectors of +// errors (to report as many issues as possible before terminating) +impl From<Error> for Vec<Error> { + fn from(err: Error) -> Self { + vec![err] + } +} + +pub type LoxResult<T> = Result<T, Error>; diff --git a/users/tazjin/rlox/src/bytecode/interner/mod.rs b/users/tazjin/rlox/src/bytecode/interner/mod.rs new file mode 100644 index 000000000000..1da1a24b2c5f --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/interner/mod.rs @@ -0,0 +1,87 @@ +//! String-interning implementation for values that are likely to +//! benefit from fast comparisons and deduplication (e.g. instances of +//! variable names). +//! +//! This uses a trick from the typed-arena crate for guaranteeing +//! stable addresses by never resizing the existing String buffer, and +//! collecting full buffers in a vector. + +use std::collections::HashMap; + +#[cfg(test)] +mod tests; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct InternedStr { + id: usize, +} + +#[derive(Default)] +pub struct Interner { + map: HashMap<&'static str, InternedStr>, + vec: Vec<&'static str>, + buf: String, + full: Vec<String>, +} + +impl Interner { + pub fn with_capacity(cap: usize) -> Self { + Interner { + buf: String::with_capacity(cap), + ..Default::default() + } + } + + pub fn intern<S: AsRef<str>>(&mut self, name: S) -> InternedStr { + let name = name.as_ref(); + if let Some(&id) = self.map.get(name) { + return id; + } + + let name = self.alloc(name); + let id = InternedStr { + id: self.vec.len() as usize, + }; + + self.map.insert(name, id); + self.vec.push(name); + + debug_assert!(self.lookup(id) == name); + debug_assert!(self.intern(name) == id); + + id + } + + pub fn lookup<'a>(&'a self, id: InternedStr) -> &'a str { + self.vec[id.id] + } + + fn alloc<'a>(&'a mut self, name: &str) -> &'static str { + let cap = self.buf.capacity(); + if cap < self.buf.len() + name.len() { + let new_cap = (cap.max(name.len()) + 1).next_power_of_two(); + let new_buf = String::with_capacity(new_cap); + let old_buf = std::mem::replace(&mut self.buf, new_buf); + self.full.push(old_buf); + } + + let interned: &'a str = { + let start = self.buf.len(); + self.buf.push_str(name); + &self.buf[start..] + }; + + unsafe { + // This is sound for two reasons: + // + // 1. This function (Interner::alloc) is private, which + // prevents users from allocating a supposedly static + // reference. + // + // 2. Interner::lookup explicitly shortens the lifetime of + // references that are handed out to that of the + // reference to self. + return &*(interned as *const str); + } + } +} diff --git a/users/tazjin/rlox/src/bytecode/interner/tests.rs b/users/tazjin/rlox/src/bytecode/interner/tests.rs new file mode 100644 index 000000000000..b34bf6835389 --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/interner/tests.rs @@ -0,0 +1,24 @@ +use super::*; + +#[test] +fn interns_strings() { + let mut interner = Interner::with_capacity(128); + let id = interner.intern("hello world"); + assert_eq!("hello world", interner.lookup(id)); +} + +#[test] +fn deduplicates_strings() { + let mut interner = Interner::with_capacity(128); + let id_1 = interner.intern("hello world"); + let id_2 = interner.intern("hello world"); + assert_eq!(id_1, id_2); +} + +#[test] +fn ids_survive_growing() { + let mut interner = Interner::with_capacity(16); + let id = interner.intern("hello"); + interner.intern("excessively large string that will cause eallocation"); + assert_eq!("hello", interner.lookup(id)); +} diff --git a/users/tazjin/rlox/src/bytecode/mod.rs b/users/tazjin/rlox/src/bytecode/mod.rs new file mode 100644 index 000000000000..c6f3a737aef8 --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/mod.rs @@ -0,0 +1,33 @@ +//! Bytecode interpreter for Lox. +//! +//! https://craftinginterpreters.com/chunks-of-bytecode.html + +mod chunk; +mod compiler; +mod errors; +mod interner; +mod opcode; +mod value; +mod vm; + +#[cfg(test)] +mod tests; + +pub struct Interpreter {} + +impl crate::Lox for Interpreter { + type Error = errors::Error; + type Value = value::Value; + + fn create() -> Self { + Interpreter {} + } + + fn interpret( + &mut self, + code: String, + ) -> Result<Self::Value, Vec<Self::Error>> { + let (strings, chunk) = compiler::compile(&code)?; + vm::interpret(strings, chunk).map_err(|e| vec![e]) + } +} diff --git a/users/tazjin/rlox/src/bytecode/opcode.rs b/users/tazjin/rlox/src/bytecode/opcode.rs new file mode 100644 index 000000000000..1c23449e76b3 --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/opcode.rs @@ -0,0 +1,36 @@ +#[derive(Debug)] +pub enum OpCode { + /// Push a constant onto the stack. + OpConstant(usize), + + // Literal pushes + OpNil, + OpTrue, + OpFalse, + + /// Return from the current function. + OpReturn, + + // Boolean & comparison operators + OpNot, + OpEqual, + OpGreater, + OpLess, + + /// Unary negation + OpNegate, + + // Arithmetic operators + OpAdd, + OpSubtract, + OpMultiply, + OpDivide, + + // Built in operations + OpPrint, + OpPop, + + // Variable management + OpDefineGlobal(usize), + OpGetGlobal(usize), +} diff --git a/users/tazjin/rlox/src/bytecode/tests.rs b/users/tazjin/rlox/src/bytecode/tests.rs new file mode 100644 index 000000000000..d5b6ab020389 --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/tests.rs @@ -0,0 +1,120 @@ +use super::value::Value; +use super::*; + +use crate::Lox; + +fn expect(code: &str, value: Value) { + let result = Interpreter::create() + .interpret(code.into()) + .expect("evaluation failed"); + assert_eq!(result, value); +} + +fn expect_num(code: &str, value: f64) { + expect(code, Value::Number(value)) +} + +fn expect_bool(code: &str, value: bool) { + expect(code, Value::Bool(value)) +} + +fn expect_str(code: &str, value: &str) { + expect(code, Value::String(value.to_string().into())) +} + +#[test] +fn numbers() { + expect_num("1;", 1.0); + expect_num("13.37;", 13.37); +} + +#[test] +fn negative_numbers() { + // Note: This technically tests unary operators. + expect_num("-1;", -1.0); + expect_num("-13.37;", -13.37); +} + +#[test] +fn terms() { + expect_num("1 + 2;", 3.0); + expect_num("3 - 1;", 2.0); + expect_num("0.7 + 0.3;", 1.0); + expect_num("1 + -3;", -2.0); + expect_num("-1 - -1;", 0.0); + expect_num("10 - -10 + 10;", 30.0); +} + +#[test] +fn factors() { + expect_num("1 * 2;", 2.0); + expect_num("10 / 5;", 2.0); + expect_num("0.7 * 4 / 1.4;", 2.0); + expect_num("10 * -10 / 10;", -10.0); +} + +#[test] +fn arithmetic() { + expect_num("10 - 3 * 2;", 4.0); + expect_num("-4 * -4 + (14 - 5);", 25.0); + expect_num("(702 + 408) - ((239 - 734) / -5) + -4;", 1007.0); +} + +#[test] +fn trivial_literals() { + expect("true;", Value::Bool(true)); + expect("false;", Value::Bool(false)); + expect("nil;", Value::Nil); +} + +#[test] +fn negation() { + expect_bool("!true;", false); + expect_bool("!false;", true); + expect_bool("!nil;", true); + expect_bool("!13.5;", false); + expect_bool("!-42;", false); +} + +#[test] +fn equality() { + expect_bool("42 == 42;", true); + expect_bool("42 != 42;", false); + expect_bool("42 == 42.0;", true); + + expect_bool("true == true;", true); + expect_bool("true == false;", false); + expect_bool("true == !false;", true); + expect_bool("true != true;", false); + expect_bool("true != false;", true); + + expect_bool("42 == false;", false); + expect_bool("42 == true;", false); + expect_bool("!42 == !true;", true); +} + +#[test] +fn comparisons() { + expect_bool("42 > 23;", true); + expect_bool("42 < 23;", false); + expect_bool("42 <= 42;", true); + expect_bool("42 <= 23;", false); + expect_bool("42 >= 42;", true); + expect_bool("42 >= 23;", true); +} + +#[test] +fn strings() { + expect_str("\"hello\";", "hello"); + expect_str("\"hello\" + \" world\";", "hello world"); +} + +#[test] +fn variables() { + expect_num("var a = 5; a;", 5.0); + expect_num("var a = 5; var b = 2; a * b;", 10.0); + expect_str( + "var greeting = \"hello\"; var name = \"Zubnog\"; greeting + \" \" + name;", + "hello Zubnog", + ); +} diff --git a/users/tazjin/rlox/src/bytecode/value.rs b/users/tazjin/rlox/src/bytecode/value.rs new file mode 100644 index 000000000000..4170efadf8fe --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/value.rs @@ -0,0 +1,37 @@ +use super::interner::InternedStr; + +#[derive(Clone, Debug, PartialEq)] +pub enum Value { + Nil, + Bool(bool), + Number(f64), + String(LoxString), +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum LoxString { + Heap(String), + Interned(InternedStr), +} + +impl From<String> for LoxString { + fn from(s: String) -> Self { + LoxString::Heap(s) + } +} + +impl From<InternedStr> for LoxString { + fn from(s: InternedStr) -> Self { + LoxString::Interned(s) + } +} + +impl Value { + pub fn is_falsey(&self) -> bool { + match self { + Value::Nil => true, + Value::Bool(false) => true, + _ => false, + } + } +} diff --git a/users/tazjin/rlox/src/bytecode/vm.rs b/users/tazjin/rlox/src/bytecode/vm.rs new file mode 100644 index 000000000000..0cd0853764e0 --- /dev/null +++ b/users/tazjin/rlox/src/bytecode/vm.rs @@ -0,0 +1,231 @@ +use std::collections::HashMap; + +use super::chunk; +use super::errors::*; +use super::interner::Interner; +use super::opcode::OpCode; +use super::value::{LoxString, Value}; + +pub struct VM { + chunk: chunk::Chunk, + + // TODO(tazjin): Accessing array elements constantly is not ideal, + // lets see if something clever can be done with iterators. + ip: usize, + + stack: Vec<Value>, + strings: Interner, + + globals: HashMap<LoxString, Value>, + + // Operations that consume values from the stack without pushing + // anything leave their last value in this slot, which makes it + // possible to return values from interpreters that ran code which + // ended with a statement. + last_drop: Option<Value>, +} + +impl VM { + fn push(&mut self, value: Value) { + self.stack.push(value) + } + + fn pop(&mut self) -> Value { + self.stack.pop().expect("fatal error: stack empty!") + } +} + +macro_rules! with_type { + ( $self:ident, $val:ident, $type:pat, $body:expr ) => { + match $val { + $type => $body, + _ => { + return Err(Error { + line: $self.chunk.get_line($self.ip - 1), + kind: ErrorKind::TypeError(format!( + "Expected type {}, but found value: {:?}", + stringify!($type), + $val, + )), + }) + } + } + }; +} + +macro_rules! binary_op { + ( $vm:ident, $type:tt, $op:tt ) => { + binary_op!($vm, $type, $type, $op) + }; + + ( $vm:ident, $in_type:tt, $out_type:tt, $op:tt ) => {{ + let b = $vm.pop(); + let a = $vm.pop(); + + with_type!($vm, b, Value::$in_type(val_b), { + with_type!($vm, a, Value::$in_type(val_a), { + $vm.push(Value::$out_type(val_a $op val_b)) + }) + }) + }}; +} + +impl VM { + fn run(&mut self) -> LoxResult<Value> { + loop { + let op = &self.chunk.code[self.ip]; + + #[cfg(feature = "disassemble")] + chunk::disassemble_instruction(&self.chunk, self.ip); + + self.ip += 1; + + match op { + OpCode::OpReturn => { + if !self.stack.is_empty() { + let val = self.pop(); + return Ok(self.return_value(val)); + } else if self.last_drop.is_some() { + let val = self.last_drop.take().unwrap(); + return Ok(self.return_value(val)); + } else { + return Ok(Value::Nil); + } + } + + OpCode::OpConstant(idx) => { + let c = self.chunk.constant(*idx).clone(); + self.push(c); + } + + OpCode::OpNil => self.push(Value::Nil), + OpCode::OpTrue => self.push(Value::Bool(true)), + OpCode::OpFalse => self.push(Value::Bool(false)), + + OpCode::OpNot => { + let v = self.pop(); + self.push(Value::Bool(v.is_falsey())); + } + + OpCode::OpEqual => { + let b = self.pop(); + let a = self.pop(); + self.push(Value::Bool(a == b)); + } + + OpCode::OpLess => binary_op!(self, Number, Bool, <), + OpCode::OpGreater => binary_op!(self, Number, Bool, >), + + OpCode::OpNegate => { + let v = self.pop(); + with_type!( + self, + v, + Value::Number(num), + self.push(Value::Number(-num)) + ); + } + + OpCode::OpSubtract => binary_op!(self, Number, -), + OpCode::OpMultiply => binary_op!(self, Number, *), + OpCode::OpDivide => binary_op!(self, Number, /), + + OpCode::OpAdd => { + let b = self.pop(); + let a = self.pop(); + + match (a, b) { + (Value::String(s_a), Value::String(s_b)) => { + let mut new_s = self.resolve_str(&s_a).to_string(); + new_s.push_str(self.resolve_str(&s_b)); + self.push(Value::String(new_s.into())); + } + + (Value::Number(n_a), Value::Number(n_b)) => + self.push(Value::Number(n_a + n_b)), + + _ => return Err(Error { + line: self.chunk.get_line(self.ip - 1), + kind: ErrorKind::TypeError( + "'+' operator only works on strings and numbers".into() + ), + }) + } + } + + OpCode::OpPrint => { + let val = self.pop(); + println!("{}", self.print_value(val)); + } + + OpCode::OpPop => { + self.last_drop = Some(self.pop()); + } + + OpCode::OpDefineGlobal(name_idx) => { + let name = self.chunk.constant(*name_idx); + with_type!(self, name, Value::String(name), { + let name = name.clone(); + let val = self.pop(); + self.globals.insert(name, val); + }); + } + + OpCode::OpGetGlobal(name_idx) => { + let name = self.chunk.constant(*name_idx); + with_type!(self, name, Value::String(name), { + let val = match self.globals.get(name) { + None => unimplemented!("variable not found error"), + Some(val) => val.clone(), + }; + self.push(val) + }); + } + } + + #[cfg(feature = "disassemble")] + println!("=> {:?}", self.stack); + } + } + + // For some types of values (e.g. interned strings), returns + // should no longer include any references into the interpreter. + fn return_value(&self, val: Value) -> Value { + match val { + Value::String(string @ LoxString::Interned(_)) => { + Value::String(self.resolve_str(&string).to_string().into()) + } + _ => val, + } + } + + fn resolve_str<'a>(&'a self, string: &'a LoxString) -> &'a str { + match string { + LoxString::Heap(s) => s.as_str(), + LoxString::Interned(id) => self.strings.lookup(*id), + } + } + + fn print_value(&self, val: Value) -> String { + match val { + Value::String(LoxString::Heap(s)) => s, + Value::String(LoxString::Interned(id)) => { + self.strings.lookup(id).into() + } + _ => format!("{:?}", val), + } + } +} + +pub fn interpret(strings: Interner, chunk: chunk::Chunk) -> LoxResult<Value> { + let mut vm = VM { + chunk, + strings, + globals: HashMap::new(), + ip: 0, + stack: vec![], + last_drop: None, + }; + + vm.run() +} diff --git a/users/tazjin/rlox/src/main.rs b/users/tazjin/rlox/src/main.rs new file mode 100644 index 000000000000..2d8cf4f354ea --- /dev/null +++ b/users/tazjin/rlox/src/main.rs @@ -0,0 +1,80 @@ +use std::env; +use std::fs; +use std::io; +use std::io::Write; +use std::process; + +mod bytecode; +mod scanner; +mod treewalk; + +/// Trait for making the different interpreters callable in the same +/// way. +pub trait Lox { + type Value: std::fmt::Debug; + type Error: std::fmt::Display; + + fn create() -> Self; + fn interpret( + &mut self, + source: String, + ) -> Result<Self::Value, Vec<Self::Error>>; +} + +fn main() { + let mut args = env::args(); + if args.len() > 2 { + println!("Usage: rlox [script]"); + process::exit(1); + } + + match env::var("LOX_INTERPRETER").as_ref().map(String::as_str) { + Ok("treewalk") => { + pick::<treewalk::interpreter::Interpreter>(args.nth(1)) + } + _ => pick::<bytecode::Interpreter>(args.nth(1)), + } +} + +fn pick<I: Lox>(file_arg: Option<String>) { + if let Some(file) = file_arg { + run_file::<I>(&file); + } else { + run_prompt::<I>(); + } +} + +// Run Lox code from a file and print results to stdout +fn run_file<I: Lox>(file: &str) { + let contents = + fs::read_to_string(file).expect("failed to read the input file"); + let mut lox = I::create(); + run(&mut lox, contents); +} + +// Evaluate Lox code interactively in a shitty REPL. +fn run_prompt<I: Lox>() { + let mut line = String::new(); + let mut lox = I::create(); + + loop { + print!("> "); + io::stdout().flush().unwrap(); + io::stdin() + .read_line(&mut line) + .expect("failed to read user input"); + run(&mut lox, std::mem::take(&mut line)); + line.clear(); + } +} + +fn run<I: Lox>(lox: &mut I, code: String) { + match lox.interpret(code) { + Ok(result) => println!("=> {:?}", result), + Err(errors) => { + for error in errors { + eprintln!("{}", error); + } + } + } +} diff --git a/users/tazjin/rlox/src/scanner.rs b/users/tazjin/rlox/src/scanner.rs new file mode 100644 index 000000000000..4e8f07b61f5e --- /dev/null +++ b/users/tazjin/rlox/src/scanner.rs @@ -0,0 +1,291 @@ +#[derive(Clone, Debug, PartialEq)] +pub enum TokenKind { + // Single-character tokens. + LeftParen, + RightParen, + LeftBrace, + RightBrace, + Comma, + Dot, + Minus, + Plus, + Semicolon, + Slash, + Star, + + // One or two character tokens. + Bang, + BangEqual, + Equal, + EqualEqual, + Greater, + GreaterEqual, + Less, + LessEqual, + + // Literals. + Identifier(String), + String(String), + Number(f64), + True, + False, + Nil, + + // Keywords. + And, + Class, + Else, + Fun, + For, + If, + Or, + Print, + Return, + Super, + This, + Var, + While, + + // Special things + Eof, +} + +#[derive(Clone, Debug)] +pub struct Token { + pub kind: TokenKind, + pub lexeme: String, + pub line: usize, +} + +pub enum ScannerError { + UnexpectedChar { line: usize, unexpected: char }, + UnterminatedString { line: usize }, +} + +struct Scanner<'a> { + source: &'a [char], + tokens: Vec<Token>, + errors: Vec<ScannerError>, + start: usize, // offset of first character in current lexeme + current: usize, // current offset into source + line: usize, // current line in source +} + +impl<'a> Scanner<'a> { + fn is_at_end(&self) -> bool { + return self.current >= self.source.len(); + } + + fn advance(&mut self) -> char { + self.current += 1; + self.source[self.current - 1] + } + + fn add_token(&mut self, kind: TokenKind) { + let lexeme = &self.source[self.start..self.current]; + self.tokens.push(Token { + kind, + lexeme: lexeme.into_iter().collect(), + line: self.line, + }) + } + + fn scan_token(&mut self) { + match self.advance() { + // simple single-character tokens + '(' => self.add_token(TokenKind::LeftParen), + ')' => self.add_token(TokenKind::RightParen), + '{' => self.add_token(TokenKind::LeftBrace), + '}' => self.add_token(TokenKind::RightBrace), + ',' => self.add_token(TokenKind::Comma), + '.' => self.add_token(TokenKind::Dot), + '-' => self.add_token(TokenKind::Minus), + '+' => self.add_token(TokenKind::Plus), + ';' => self.add_token(TokenKind::Semicolon), + '*' => self.add_token(TokenKind::Star), + + // possible multi-character tokens + '!' => self.add_if_next('=', TokenKind::BangEqual, TokenKind::Bang), + '=' => { + self.add_if_next('=', TokenKind::EqualEqual, TokenKind::Equal) + } + '<' => self.add_if_next('=', TokenKind::LessEqual, TokenKind::Less), + '>' => self.add_if_next( + '=', + TokenKind::GreaterEqual, + TokenKind::Greater, + ), + + '/' => { + // support comments until EOL by discarding characters + if self.match_next('/') { + while self.peek() != '\n' && !self.is_at_end() { + self.advance(); + } + } else { + self.add_token(TokenKind::Slash); + } + } + + // ignore whitespace + ws if ws.is_whitespace() => { + if ws == '\n' { + self.line += 1 + } + } + + '"' => self.scan_string(), + + digit if digit.is_digit(10) => self.scan_number(), + + chr if chr.is_alphabetic() || chr == '_' => self.scan_identifier(), + + unexpected => self.errors.push(ScannerError::UnexpectedChar { + line: self.line, + unexpected, + }), + }; + } + + fn match_next(&mut self, expected: char) -> bool { + if self.is_at_end() || self.source[self.current] != expected { + false + } else { + self.current += 1; + true + } + } + + fn add_if_next(&mut self, expected: char, then: TokenKind, or: TokenKind) { + if self.match_next(expected) { + self.add_token(then); + } else { + self.add_token(or); + } + } + + fn peek(&self) -> char { + if self.is_at_end() { + return '\0'; + } else { + return self.source[self.current]; + } + } + + fn peek_next(&self) -> char { + if self.current + 1 >= self.source.len() { + return '\0'; + } else { + return self.source[self.current + 1]; + } + } + + fn scan_string(&mut self) { + while self.peek() != '"' && !self.is_at_end() { + if self.peek() == '\n' { + self.line += 1; + } + + self.advance(); + } + + if self.is_at_end() { + self.errors + .push(ScannerError::UnterminatedString { line: self.line }); + return; + } + + // closing '"' + self.advance(); + + // add token without surrounding quotes + let string: String = self.source[(self.start + 1)..(self.current - 1)] + .iter() + .collect(); + self.add_token(TokenKind::String(string)); + } + + fn scan_number(&mut self) { + while self.peek().is_digit(10) { + self.advance(); + } + + // Look for a fractional part + if self.peek() == '.' && self.peek_next().is_digit(10) { + // consume '.' + self.advance(); + + while self.peek().is_digit(10) { + self.advance(); + } + } + + let num: f64 = self.source[self.start..self.current] + .iter() + .collect::<String>() + .parse() + .expect("float parsing should always work"); + + self.add_token(TokenKind::Number(num)); + } + + fn scan_identifier(&mut self) { + while self.peek().is_alphanumeric() || self.peek() == '_' { + self.advance(); + } + + let ident: String = + self.source[self.start..self.current].iter().collect(); + + // Determine whether this is an identifier, or a keyword: + let token_kind = match ident.as_str() { + "and" => TokenKind::And, + "class" => TokenKind::Class, + "else" => TokenKind::Else, + "false" => TokenKind::False, + "for" => TokenKind::For, + "fun" => TokenKind::Fun, + "if" => TokenKind::If, + "nil" => TokenKind::Nil, + "or" => TokenKind::Or, + "print" => TokenKind::Print, + "return" => TokenKind::Return, + "super" => TokenKind::Super, + "this" => TokenKind::This, + "true" => TokenKind::True, + "var" => TokenKind::Var, + "while" => TokenKind::While, + _ => TokenKind::Identifier(ident), + }; + + self.add_token(token_kind); + } + + fn scan_tokens(&mut self) { + while !self.is_at_end() { + self.start = self.current; + self.scan_token(); + } + + self.add_token(TokenKind::Eof); + } +} + +pub fn scan<'a>(input: &'a [char]) -> Result<Vec<Token>, Vec<ScannerError>> { + let mut scanner = Scanner { + source: &input, + tokens: vec![], + errors: vec![], + start: 0, + current: 0, + line: 0, + }; + + scanner.scan_tokens(); + + if !scanner.errors.is_empty() { + return Err(scanner.errors); + } + + return Ok(scanner.tokens); +} diff --git a/users/tazjin/rlox/src/treewalk/errors.rs b/users/tazjin/rlox/src/treewalk/errors.rs new file mode 100644 index 000000000000..391663d51b18 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/errors.rs @@ -0,0 +1,59 @@ +use crate::scanner::ScannerError; +use crate::treewalk::interpreter::Value; + +use std::fmt; + +#[derive(Debug)] +pub enum ErrorKind { + UnexpectedChar(char), + UnterminatedString, + UnmatchedParens, + ExpectedExpression(String), + ExpectedSemicolon, + ExpectedClosingBrace, + ExpectedToken(&'static str), + TypeError(String), + UndefinedVariable(String), + InternalError(String), + InvalidAssignmentTarget(String), + RuntimeError(String), + StaticError(String), + + // This variant is not an error, rather it is used for + // short-circuiting out of a function body that hits a `return` + // statement. + // + // It's implemented this way because in the original book the + // author uses exceptions for control flow, and this is the + // closest equivalent that I had available without diverging too + // much. + FunctionReturn(Value), +} + +#[derive(Debug)] +pub struct Error { + pub line: usize, + pub kind: ErrorKind, +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[line {}] Error: {:?}", self.line, self.kind) + } +} + +impl From<ScannerError> for Error { + fn from(err: ScannerError) -> Self { + match err { + ScannerError::UnexpectedChar { line, unexpected } => Error { + line, + kind: ErrorKind::UnexpectedChar(unexpected), + }, + + ScannerError::UnterminatedString { line } => Error { + line, + kind: ErrorKind::UnterminatedString, + }, + } + } +} diff --git a/users/tazjin/rlox/src/treewalk/interpreter.rs b/users/tazjin/rlox/src/treewalk/interpreter.rs new file mode 100644 index 000000000000..d9fe33661684 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/interpreter.rs @@ -0,0 +1,556 @@ +use crate::treewalk::errors::{Error, ErrorKind}; +use crate::treewalk::parser::{self, Block, Expr, Literal, Statement}; +use crate::treewalk::resolver; +use crate::treewalk::scanner::{self, TokenKind}; +use crate::Lox; +use std::collections::HashMap; +use std::rc::Rc; +use std::sync::RwLock; + +// Implementation of built-in functions. +mod builtins; + +#[cfg(test)] +mod tests; + +// Tree-walk interpreter + +// Representation of all callables, including builtins & user-defined +// functions. +#[derive(Clone, Debug)] +pub enum Callable { + Builtin(&'static dyn builtins::Builtin), + Function { + func: Rc<parser::Function>, + closure: Rc<RwLock<Environment>>, + }, +} + +impl Callable { + fn arity(&self) -> usize { + match self { + Callable::Builtin(builtin) => builtin.arity(), + Callable::Function { func, .. } => func.params.len(), + } + } + + fn call( + &self, + lox: &mut Interpreter, + args: Vec<Value>, + ) -> Result<Value, Error> { + match self { + Callable::Builtin(builtin) => builtin.call(args), + + Callable::Function { func, closure } => { + let mut fn_env: Environment = Default::default(); + fn_env.enclosing = Some(closure.clone()); + + for (param, value) in func.params.iter().zip(args.into_iter()) { + fn_env.define(param, value)?; + } + + let result = lox.interpret_block_with_env( + Some(Rc::new(RwLock::new(fn_env))), + &func.body, + ); + + match result { + // extract returned values if applicable + Err(Error { + kind: ErrorKind::FunctionReturn(value), + .. + }) => Ok(value), + + // otherwise just return the result itself + _ => result, + } + } + } + } +} + +// Representation of an in-language value. +#[derive(Clone, Debug)] +pub enum Value { + Literal(Literal), + Callable(Callable), +} + +impl PartialEq for Value { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Value::Literal(lhs), Value::Literal(rhs)) => lhs == rhs, + // functions do not have equality + _ => false, + } + } +} + +impl From<Literal> for Value { + fn from(lit: Literal) -> Value { + Value::Literal(lit) + } +} + +impl Value { + fn expect_literal(self) -> Result<Literal, Error> { + match self { + Value::Literal(lit) => Ok(lit), + _ => unimplemented!(), // which error? which line? + } + } +} + +#[derive(Debug, Default)] +pub struct Environment { + enclosing: Option<Rc<RwLock<Environment>>>, + values: HashMap<String, Value>, +} + +impl Environment { + fn define( + &mut self, + name: &scanner::Token, + value: Value, + ) -> Result<(), Error> { + let ident = identifier_str(name)?; + self.values.insert(ident.into(), value); + Ok(()) + } + + fn get( + &self, + ident: &str, + line: usize, + depth: usize, + ) -> Result<Value, Error> { + if depth > 0 { + match &self.enclosing { + None => { + return Err(Error { + line, + kind: ErrorKind::InternalError(format!( + "invalid depth {} for {}", + depth, ident + )), + }) + } + Some(parent) => { + let env = parent + .read() + .expect("fatal: environment lock poisoned"); + return env.get(ident, line, depth - 1); + } + } + } + + self.values + .get(ident) + .map(Clone::clone) + .ok_or_else(|| Error { + line, + kind: ErrorKind::UndefinedVariable(ident.into()), + }) + } + + fn assign( + &mut self, + name: &scanner::Token, + value: Value, + ) -> Result<(), Error> { + let ident = identifier_str(name)?; + + match self.values.get_mut(ident) { + Some(target) => { + *target = value; + Ok(()) + } + None => { + if let Some(parent) = &self.enclosing { + return parent.write().unwrap().assign(name, value); + } + + Err(Error { + line: name.line, + kind: ErrorKind::UndefinedVariable(ident.into()), + }) + } + } + } +} + +fn identifier_str(name: &scanner::Token) -> Result<&str, Error> { + if let TokenKind::Identifier(ident) = &name.kind { + Ok(ident) + } else { + Err(Error { + line: name.line, + kind: ErrorKind::InternalError("unexpected identifier kind".into()), + }) + } +} + +#[derive(Debug)] +pub struct Interpreter { + env: Rc<RwLock<Environment>>, +} + +impl Lox for Interpreter { + type Value = Value; + type Error = Error; + + /// Create a new interpreter and configure the initial global + /// variable set. + fn create() -> Self { + let mut globals = HashMap::new(); + + globals.insert( + "clock".into(), + Value::Callable(Callable::Builtin(&builtins::Clock {})), + ); + + Interpreter { + env: Rc::new(RwLock::new(Environment { + enclosing: None, + values: globals, + })), + } + } + + fn interpret(&mut self, code: String) -> Result<Value, Vec<Error>> { + let chars: Vec<char> = code.chars().collect(); + + let mut program = scanner::scan(&chars) + .map_err(|errors| errors.into_iter().map(Into::into).collect()) + .and_then(|tokens| parser::parse(tokens))?; + + let globals = self + .env + .read() + .expect("static globals lock poisoned") + .values + .keys() + .map(Clone::clone) + .collect::<Vec<String>>(); + + resolver::resolve(&globals, &mut program).map_err(|e| vec![e])?; + self.interpret_block_with_env(None, &program) + .map_err(|e| vec![e]) + } +} + +impl Interpreter { + // Environment modification helpers + fn define_var( + &mut self, + name: &scanner::Token, + value: Value, + ) -> Result<(), Error> { + self.env + .write() + .expect("environment lock is poisoned") + .define(name, value) + } + + fn assign_var( + &mut self, + name: &scanner::Token, + value: Value, + ) -> Result<(), Error> { + self.env + .write() + .expect("environment lock is poisoned") + .assign(name, value) + } + + fn get_var(&mut self, var: &parser::Variable) -> Result<Value, Error> { + let ident = identifier_str(&var.name)?; + let depth = var.depth.ok_or_else(|| Error { + line: var.name.line, + kind: ErrorKind::UndefinedVariable(ident.into()), + })?; + + self.env.read().expect("environment lock is poisoned").get( + ident, + var.name.line, + depth, + ) + } + + /// Interpret the block in the supplied environment. If no + /// environment is supplied, a new one is created using the + /// current one as its parent. + fn interpret_block_with_env( + &mut self, + env: Option<Rc<RwLock<Environment>>>, + block: &parser::Block, + ) -> Result<Value, Error> { + let env = match env { + Some(env) => env, + None => { + let env: Rc<RwLock<Environment>> = Default::default(); + set_enclosing_env(&env, self.env.clone()); + env + } + }; + + let previous = std::mem::replace(&mut self.env, env); + let result = self.interpret_block(block); + + // Swap it back, discarding the child env. + self.env = previous; + + return result; + } + + fn interpret_block(&mut self, program: &Block) -> Result<Value, Error> { + let mut value = Value::Literal(Literal::Nil); + + for stmt in program { + value = self.interpret_stmt(stmt)?; + } + + Ok(value) + } + + fn interpret_stmt(&mut self, stmt: &Statement) -> Result<Value, Error> { + let value = match stmt { + Statement::Expr(expr) => self.eval(expr)?, + Statement::Print(expr) => { + let result = self.eval(expr)?; + let output = format!("{:?}", result); + println!("{}", output); + Value::Literal(Literal::String(output)) + } + Statement::Var(var) => return self.interpret_var(var), + Statement::Block(block) => { + return self.interpret_block_with_env(None, block) + } + Statement::If(if_stmt) => return self.interpret_if(if_stmt), + Statement::While(while_stmt) => { + return self.interpret_while(while_stmt) + } + Statement::Function(func) => { + return self.interpret_function(func.clone()) + } + Statement::Return(ret) => { + return Err(Error { + line: 0, + kind: ErrorKind::FunctionReturn(self.eval(&ret.value)?), + }) + } + }; + + Ok(value) + } + + fn interpret_var(&mut self, var: &parser::Var) -> Result<Value, Error> { + let init = var.initialiser.as_ref().ok_or_else(|| Error { + line: var.name.line, + kind: ErrorKind::InternalError( + "missing variable initialiser".into(), + ), + })?; + let value = self.eval(init)?; + self.define_var(&var.name, value.clone())?; + Ok(value) + } + + fn interpret_if(&mut self, if_stmt: &parser::If) -> Result<Value, Error> { + let condition = self.eval(&if_stmt.condition)?; + + if eval_truthy(&condition) { + self.interpret_stmt(&if_stmt.then_branch) + } else if let Some(else_branch) = &if_stmt.else_branch { + self.interpret_stmt(else_branch) + } else { + Ok(Value::Literal(Literal::Nil)) + } + } + + fn interpret_while( + &mut self, + stmt: &parser::While, + ) -> Result<Value, Error> { + let mut value = Value::Literal(Literal::Nil); + while eval_truthy(&self.eval(&stmt.condition)?) { + value = self.interpret_stmt(&stmt.body)?; + } + + Ok(value) + } + + fn interpret_function( + &mut self, + func: Rc<parser::Function>, + ) -> Result<Value, Error> { + let name = func.name.clone(); + let value = Value::Callable(Callable::Function { + func, + closure: self.env.clone(), + }); + self.define_var(&name, value.clone())?; + Ok(value) + } + + fn eval(&mut self, expr: &Expr) -> Result<Value, Error> { + match expr { + Expr::Assign(assign) => self.eval_assign(assign), + Expr::Literal(lit) => Ok(lit.clone().into()), + Expr::Grouping(grouping) => self.eval(&*grouping.0), + Expr::Unary(unary) => self.eval_unary(unary), + Expr::Binary(binary) => self.eval_binary(binary), + Expr::Variable(var) => self.get_var(var), + Expr::Logical(log) => self.eval_logical(log), + Expr::Call(call) => self.eval_call(call), + } + } + + fn eval_unary(&mut self, expr: &parser::Unary) -> Result<Value, Error> { + let right = self.eval(&*expr.right)?; + + match (&expr.operator.kind, right) { + (TokenKind::Minus, Value::Literal(Literal::Number(num))) => { + Ok(Literal::Number(-num).into()) + } + (TokenKind::Bang, right) => { + Ok(Literal::Boolean(!eval_truthy(&right)).into()) + } + + (op, right) => Err(Error { + line: expr.operator.line, + kind: ErrorKind::TypeError(format!( + "Operator '{:?}' can not be called with argument '{:?}'", + op, right + )), + }), + } + } + + fn eval_binary(&mut self, expr: &parser::Binary) -> Result<Value, Error> { + let left = self.eval(&*expr.left)?.expect_literal()?; + let right = self.eval(&*expr.right)?.expect_literal()?; + + let result = match (&expr.operator.kind, left, right) { + // Numeric + (TokenKind::Minus, Literal::Number(l), Literal::Number(r)) => Literal::Number(l - r), + (TokenKind::Slash, Literal::Number(l), Literal::Number(r)) => Literal::Number(l / r), + (TokenKind::Star, Literal::Number(l), Literal::Number(r)) => Literal::Number(l * r), + (TokenKind::Plus, Literal::Number(l), Literal::Number(r)) => Literal::Number(l + r), + + // Strings + (TokenKind::Plus, Literal::String(l), Literal::String(r)) => { + Literal::String(format!("{}{}", l, r)) + } + + // Comparators (on numbers only?) + (TokenKind::Greater, Literal::Number(l), Literal::Number(r)) => Literal::Boolean(l > r), + (TokenKind::GreaterEqual, Literal::Number(l), Literal::Number(r)) => { + Literal::Boolean(l >= r) + } + (TokenKind::Less, Literal::Number(l), Literal::Number(r)) => Literal::Boolean(l < r), + (TokenKind::LessEqual, Literal::Number(l), Literal::Number(r)) => { + Literal::Boolean(l <= r) + } + + // Equality + (TokenKind::Equal, l, r) => Literal::Boolean(l == r), + (TokenKind::BangEqual, l, r) => Literal::Boolean(l != r), + + (op, left, right) => { + return Err(Error { + line: expr.operator.line, + kind: ErrorKind::TypeError(format!( + "Operator '{:?}' can not be called with arguments '({:?}, {:?})'", + op, left, right + )), + }) + } + }; + + Ok(result.into()) + } + + fn eval_assign(&mut self, assign: &parser::Assign) -> Result<Value, Error> { + let value = self.eval(&assign.value)?; + self.assign_var(&assign.name, value.clone())?; + Ok(value) + } + + fn eval_logical( + &mut self, + logical: &parser::Logical, + ) -> Result<Value, Error> { + let left = eval_truthy(&self.eval(&logical.left)?); + let right = eval_truthy(&self.eval(&logical.right)?); + + match &logical.operator.kind { + TokenKind::And => Ok(Literal::Boolean(left && right).into()), + TokenKind::Or => Ok(Literal::Boolean(left || right).into()), + kind => Err(Error { + line: logical.operator.line, + kind: ErrorKind::InternalError(format!( + "Invalid logical operator: {:?}", + kind + )), + }), + } + } + + fn eval_call(&mut self, call: &parser::Call) -> Result<Value, Error> { + let callable = match self.eval(&call.callee)? { + Value::Callable(c) => c, + Value::Literal(v) => { + return Err(Error { + line: call.paren.line, + kind: ErrorKind::RuntimeError(format!( + "not callable: {:?}", + v + )), + }) + } + }; + + let mut args = vec![]; + for arg in &call.args { + args.push(self.eval(arg)?); + } + + if callable.arity() != args.len() { + return Err(Error { + line: call.paren.line, + kind: ErrorKind::RuntimeError(format!( + "Expected {} arguments, but got {}", + callable.arity(), + args.len(), + )), + }); + } + + callable.call(self, args) + } +} + +// Interpreter functions not dependent on interpreter-state. + +fn eval_truthy(lit: &Value) -> bool { + if let Value::Literal(lit) = lit { + match lit { + Literal::Nil => false, + Literal::Boolean(b) => *b, + _ => true, + } + } else { + false + } +} + +fn set_enclosing_env( + this: &RwLock<Environment>, + parent: Rc<RwLock<Environment>>, +) { + this.write() + .expect("environment lock is poisoned") + .enclosing = Some(parent); +} diff --git a/users/tazjin/rlox/src/treewalk/interpreter/builtins.rs b/users/tazjin/rlox/src/treewalk/interpreter/builtins.rs new file mode 100644 index 000000000000..c502d2a1718a --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/interpreter/builtins.rs @@ -0,0 +1,25 @@ +use std::fmt; +use std::time::{SystemTime, UNIX_EPOCH}; + +use crate::treewalk::errors::Error; +use crate::treewalk::interpreter::Value; +use crate::treewalk::parser::Literal; + +pub trait Builtin: fmt::Debug { + fn arity(&self) -> usize; + fn call(&self, args: Vec<Value>) -> Result<Value, Error>; +} + +// Builtin to return the current timestamp. +#[derive(Debug)] +pub struct Clock {} +impl Builtin for Clock { + fn arity(&self) -> usize { + 0 + } + + fn call(&self, _args: Vec<Value>) -> Result<Value, Error> { + let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); + Ok(Value::Literal(Literal::Number(now.as_secs() as f64))) + } +} diff --git a/users/tazjin/rlox/src/treewalk/interpreter/tests.rs b/users/tazjin/rlox/src/treewalk/interpreter/tests.rs new file mode 100644 index 000000000000..2fc6f4fee978 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/interpreter/tests.rs @@ -0,0 +1,97 @@ +use super::*; + +/// Evaluate a code snippet, returning a value. +fn parse_eval(code: &str) -> Value { + Interpreter::create() + .interpret(code.into()) + .expect("could not interpret code") +} + +#[test] +fn test_if() { + let result = parse_eval( + r#" +if (42 > 23) + "pass"; +else + "fail"; +"#, + ); + + assert_eq!(Value::Literal(Literal::String("pass".into())), result,); +} + +#[test] +fn test_scope() { + let result = parse_eval( + r#" +var result = ""; + +var a = "global a, "; +var b = "global b, "; +var c = "global c"; + +{ + var a = "outer a, "; + var b = "outer b, "; + + { + var a = "inner a, "; + result = a + b + c; + } +} +"#, + ); + + assert_eq!( + Value::Literal(Literal::String("inner a, outer b, global c".into())), + result, + ); +} + +#[test] +fn test_binary_operators() { + assert_eq!(Value::Literal(Literal::Number(42.0)), parse_eval("40 + 2;")); + + assert_eq!( + Value::Literal(Literal::String("foobar".into())), + parse_eval("\"foo\" + \"bar\";") + ); +} + +#[test] +fn test_functions() { + let result = parse_eval( + r#" +fun add(a, b, c) { + a + b + c; +} + +add(1, 2, 3); +"#, + ); + + assert_eq!(Value::Literal(Literal::Number(6.0)), result); +} + +#[test] +fn test_closure() { + let result = parse_eval( + r#" +fun makeCounter() { + var i = 0; + fun count() { + i = i + 1; + } + + return count; +} + +var counter = makeCounter(); +counter(); // "1". +counter(); // "2". +"#, + ); + + assert_eq!(Value::Literal(Literal::Number(2.0)), result); +} diff --git a/users/tazjin/rlox/src/treewalk/mod.rs b/users/tazjin/rlox/src/treewalk/mod.rs new file mode 100644 index 000000000000..2d82b3320a90 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/mod.rs @@ -0,0 +1,6 @@ +use crate::scanner; + +mod errors; +pub mod interpreter; +mod parser; +mod resolver; diff --git a/users/tazjin/rlox/src/treewalk/parser.rs b/users/tazjin/rlox/src/treewalk/parser.rs new file mode 100644 index 000000000000..003cc34b4665 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/parser.rs @@ -0,0 +1,716 @@ +// This implements the grammar of Lox as described starting in the +// Crafting Interpreters chapter "Representing Code". Note that the +// upstream Java implementation works around Java being bad at value +// classes by writing a code generator for Java. +// +// My Rust implementation skips this step because it's unnecessary, we +// have real types. +use crate::treewalk::errors::{Error, ErrorKind}; +use crate::treewalk::scanner::{Token, TokenKind}; +use std::rc::Rc; + +// AST + +#[derive(Debug)] +pub struct Assign { + pub name: Token, + pub value: Box<Expr>, + pub depth: Option<usize>, +} + +#[derive(Debug)] +pub struct Binary { + pub left: Box<Expr>, + pub operator: Token, + pub right: Box<Expr>, +} + +#[derive(Debug)] +pub struct Logical { + pub left: Box<Expr>, + pub operator: Token, + pub right: Box<Expr>, +} + +#[derive(Debug)] +pub struct Grouping(pub Box<Expr>); + +#[derive(Debug, Clone, PartialEq)] +pub enum Literal { + Boolean(bool), + Number(f64), + String(String), + Nil, +} + +#[derive(Debug)] +pub struct Unary { + pub operator: Token, + pub right: Box<Expr>, +} + +#[derive(Debug)] +pub struct Call { + pub callee: Box<Expr>, + pub paren: Token, + pub args: Vec<Expr>, +} + +// Not to be confused with `Var`, which is for assignment. +#[derive(Debug)] +pub struct Variable { + pub name: Token, + pub depth: Option<usize>, +} + +#[derive(Debug)] +pub enum Expr { + Assign(Assign), + Binary(Binary), + Grouping(Grouping), + Literal(Literal), + Unary(Unary), + Call(Call), + Variable(Variable), + Logical(Logical), +} + +// Variable assignment. Not to be confused with `Variable`, which is +// for access. +#[derive(Debug)] +pub struct Var { + pub name: Token, + pub initialiser: Option<Expr>, +} + +#[derive(Debug)] +pub struct Return { + pub value: Expr, +} + +#[derive(Debug)] +pub struct If { + pub condition: Expr, + pub then_branch: Box<Statement>, + pub else_branch: Option<Box<Statement>>, +} + +#[derive(Debug)] +pub struct While { + pub condition: Expr, + pub body: Box<Statement>, +} + +pub type Block = Vec<Statement>; + +#[derive(Debug)] +pub struct Function { + pub name: Token, + pub params: Vec<Token>, + pub body: Block, +} + +#[derive(Debug)] +pub enum Statement { + Expr(Expr), + Print(Expr), + Var(Var), + Block(Block), + If(If), + While(While), + Function(Rc<Function>), + Return(Return), +} + +// Parser + +/* +program → declaration* EOF ; + +declaration → funDecl + | varDecl + | statement ; + +funDecl → "fun" function ; +function → IDENTIFIER "(" parameters? ")" block ; +parameters → IDENTIFIER ( "," IDENTIFIER )* ; + + +statement → exprStmt + | forStmt + | ifStmt + | printStmt + | returnStmt + | whileStmt + | block ; + +forStmt → "for" "(" ( varDecl | exprStmt | ";" ) + expression? ";" + expression? ")" statement ; + +returnStmt → "return" expression? ";" ; + +whileStmt → "while" "(" expression ")" statement ; + +exprStmt → expression ";" ; + +ifStmt → "if" "(" expression ")" statement + ( "else" statement )? ; + +printStmt → "print" expression ";" ; + +expression → assignment ; +assignment → IDENTIFIER "=" assignment + | logic_or ; +logic_or → logic_and ( "or" logic_and )* ; +logic_and → equality ( "and" equality )* ; +equality → comparison ( ( "!=" | "==" ) comparison )* ; +comparison → term ( ( ">" | ">=" | "<" | "<=" ) term )* ; +term → factor ( ( "-" | "+" ) factor )* ; +factor → unary ( ( "/" | "*" ) unary )* ; +unary → ( "!" | "-" ) unary | call ; +call → primary ( "(" arguments? ")" )* ; +arguments → expression ( "," expression )* ; +primary → NUMBER | STRING | "true" | "false" | "nil" + | "(" expression ")" ; +*/ + +struct Parser { + tokens: Vec<Token>, + current: usize, +} + +type ExprResult = Result<Expr, Error>; +type StmtResult = Result<Statement, Error>; + +impl Parser { + // recursive-descent parser functions + + fn declaration(&mut self) -> StmtResult { + if self.match_token(&TokenKind::Fun) { + return self.function(); + } + + if self.match_token(&TokenKind::Var) { + return self.var_declaration(); + } + + self.statement() + } + + fn function(&mut self) -> StmtResult { + let name = self.identifier("Expected function name.")?; + + self.consume( + &TokenKind::LeftParen, + ErrorKind::ExpectedToken("Expect '(' after function name."), + )?; + + let mut params = vec![]; + + if !self.check_token(&TokenKind::RightParen) { + loop { + if params.len() >= 255 { + return Err(Error { + line: self.peek().line, + kind: ErrorKind::InternalError( + "255 parameter limit exceeded.".into(), + ), + }); + } + + params.push(self.identifier("Expected parameter name.")?); + + if !self.match_token(&TokenKind::Comma) { + break; + } + } + } + + self.consume( + &TokenKind::RightParen, + ErrorKind::ExpectedToken("Expect ')' after parameters."), + )?; + + self.consume( + &TokenKind::LeftBrace, + ErrorKind::ExpectedToken("Expect '{' before function body."), + )?; + + Ok(Statement::Function(Rc::new(Function { + name, + params, + body: self.block_statement()?, + }))) + } + + fn var_declaration(&mut self) -> StmtResult { + // Since `TokenKind::Identifier` carries data, we can't use + // `consume`. + let mut var = Var { + name: self.identifier("Expected variable name.")?, + initialiser: None, + }; + + if self.match_token(&TokenKind::Equal) { + var.initialiser = Some(self.expression()?); + } + + self.consume(&TokenKind::Semicolon, ErrorKind::ExpectedSemicolon)?; + Ok(Statement::Var(var)) + } + + fn statement(&mut self) -> StmtResult { + if self.match_token(&TokenKind::Print) { + self.print_statement() + } else if self.match_token(&TokenKind::LeftBrace) { + Ok(Statement::Block(self.block_statement()?)) + } else if self.match_token(&TokenKind::If) { + self.if_statement() + } else if self.match_token(&TokenKind::While) { + self.while_statement() + } else if self.match_token(&TokenKind::For) { + self.for_statement() + } else if self.match_token(&TokenKind::Return) { + self.return_statement() + } else { + self.expr_statement() + } + } + + fn print_statement(&mut self) -> StmtResult { + let expr = self.expression()?; + self.consume(&TokenKind::Semicolon, ErrorKind::ExpectedSemicolon)?; + Ok(Statement::Print(expr)) + } + + fn block_statement(&mut self) -> Result<Block, Error> { + let mut block: Block = vec![]; + + while !self.check_token(&TokenKind::RightBrace) && !self.is_at_end() { + block.push(self.declaration()?); + } + + self.consume(&TokenKind::RightBrace, ErrorKind::ExpectedClosingBrace)?; + + Ok(block) + } + + fn if_statement(&mut self) -> StmtResult { + self.consume( + &TokenKind::LeftParen, + ErrorKind::ExpectedToken("Expected '(' after 'if'"), + )?; + let condition = self.expression()?; + self.consume( + &TokenKind::RightParen, + ErrorKind::ExpectedToken("Expected ')' after condition"), + )?; + + let then_branch = Box::new(self.statement()?); + + let mut stmt = If { + condition, + then_branch, + else_branch: Option::None, + }; + + if self.match_token(&TokenKind::Else) { + stmt.else_branch = Some(Box::new(self.statement()?)); + } + + Ok(Statement::If(stmt)) + } + + fn while_statement(&mut self) -> StmtResult { + self.consume( + &TokenKind::LeftParen, + ErrorKind::ExpectedToken("Expected '(' after 'while'"), + )?; + + let condition = self.expression()?; + + self.consume( + &TokenKind::RightParen, + ErrorKind::ExpectedToken("Expected ')' after 'while'"), + )?; + + Ok(Statement::While(While { + condition, + body: Box::new(self.statement()?), + })) + } + + fn for_statement(&mut self) -> StmtResult { + // Parsing of clauses ... + self.consume( + &TokenKind::LeftParen, + ErrorKind::ExpectedToken("Expected '(' after 'for'"), + )?; + + let initialiser = if self.match_token(&TokenKind::Semicolon) { + None + } else if self.match_token(&TokenKind::Var) { + Some(self.var_declaration()?) + } else { + Some(self.expr_statement()?) + }; + + let condition = if self.check_token(&TokenKind::Semicolon) { + // unspecified condition => infinite loop + Expr::Literal(Literal::Boolean(true)) + } else { + self.expression()? + }; + + self.consume(&TokenKind::Semicolon, ErrorKind::ExpectedSemicolon)?; + + let increment = if self.check_token(&TokenKind::RightParen) { + None + } else { + Some(self.expression()?) + }; + + self.consume( + &TokenKind::RightParen, + ErrorKind::ExpectedToken("Expected ')' after for clauses"), + )?; + + let mut body = self.statement()?; + + // ... desugaring to while + + if let Some(inc) = increment { + body = Statement::Block(vec![body, Statement::Expr(inc)]); + } + + body = Statement::While(While { + condition, + body: Box::new(body), + }); + + if let Some(init) = initialiser { + body = Statement::Block(vec![init, body]); + } + + Ok(body) + } + + fn return_statement(&mut self) -> StmtResult { + let value = self.expression()?; + self.consume(&TokenKind::Semicolon, ErrorKind::ExpectedSemicolon)?; + Ok(Statement::Return(Return { value })) + } + + fn expr_statement(&mut self) -> StmtResult { + let expr = self.expression()?; + self.consume(&TokenKind::Semicolon, ErrorKind::ExpectedSemicolon)?; + Ok(Statement::Expr(expr)) + } + + fn expression(&mut self) -> ExprResult { + self.assignment() + } + + fn assignment(&mut self) -> ExprResult { + let expr = self.logic_or()?; + + if self.match_token(&TokenKind::Equal) { + let equals = self.previous().clone(); + let value = self.assignment()?; + + if let Expr::Variable(Variable { name, .. }) = expr { + return Ok(Expr::Assign(Assign { + name, + value: Box::new(value), + depth: None, + })); + } + + return Err(Error { + line: equals.line, + kind: ErrorKind::InvalidAssignmentTarget(format!( + "{:?}", + equals + )), + }); + } + + Ok(expr) + } + + fn logic_or(&mut self) -> ExprResult { + let mut expr = self.logic_and()?; + + while self.match_token(&TokenKind::Or) { + expr = Expr::Logical(Logical { + left: Box::new(expr), + operator: self.previous().clone(), + right: Box::new(self.logic_and()?), + }) + } + + Ok(expr) + } + + fn logic_and(&mut self) -> ExprResult { + let mut expr = self.equality()?; + + while self.match_token(&TokenKind::And) { + expr = Expr::Logical(Logical { + left: Box::new(expr), + operator: self.previous().clone(), + right: Box::new(self.equality()?), + }) + } + + Ok(expr) + } + + fn equality(&mut self) -> ExprResult { + self.binary_operator( + &[TokenKind::BangEqual, TokenKind::EqualEqual], + Self::comparison, + ) + } + + fn comparison(&mut self) -> ExprResult { + self.binary_operator( + &[ + TokenKind::Greater, + TokenKind::GreaterEqual, + TokenKind::Less, + TokenKind::LessEqual, + ], + Self::term, + ) + } + + fn term(&mut self) -> ExprResult { + self.binary_operator(&[TokenKind::Minus, TokenKind::Plus], Self::factor) + } + + fn factor(&mut self) -> ExprResult { + self.binary_operator(&[TokenKind::Slash, TokenKind::Star], Self::unary) + } + + fn unary(&mut self) -> ExprResult { + if self.match_token(&TokenKind::Bang) + || self.match_token(&TokenKind::Minus) + { + return Ok(Expr::Unary(Unary { + operator: self.previous().clone(), + right: Box::new(self.unary()?), + })); + } + + return self.call(); + } + + fn call(&mut self) -> ExprResult { + let mut expr = self.primary()?; + + loop { + if self.match_token(&TokenKind::LeftParen) { + expr = self.finish_call(expr)?; + } else { + break; + } + } + + Ok(expr) + } + + fn finish_call(&mut self, callee: Expr) -> ExprResult { + let mut args = vec![]; + + if !self.check_token(&TokenKind::RightParen) { + loop { + // TODO(tazjin): Check for max args count + args.push(self.expression()?); + if !self.match_token(&TokenKind::Comma) { + break; + } + } + } + + let paren = self.consume( + &TokenKind::RightParen, + ErrorKind::ExpectedToken("Expect ')' after arguments."), + )?; + + Ok(Expr::Call(Call { + args, + callee: Box::new(callee), + paren, + })) + } + + fn primary(&mut self) -> ExprResult { + let next = self.advance(); + let literal = match next.kind { + TokenKind::True => Literal::Boolean(true), + TokenKind::False => Literal::Boolean(false), + TokenKind::Nil => Literal::Nil, + TokenKind::Number(num) => Literal::Number(num), + TokenKind::String(string) => Literal::String(string), + + TokenKind::LeftParen => { + let expr = self.expression()?; + self.consume( + &TokenKind::RightParen, + ErrorKind::UnmatchedParens, + )?; + return Ok(Expr::Grouping(Grouping(Box::new(expr)))); + } + + TokenKind::Identifier(_) => { + return Ok(Expr::Variable(Variable { + name: next, + depth: None, + })) + } + + unexpected => { + eprintln!("encountered {:?}", unexpected); + return Err(Error { + line: next.line, + kind: ErrorKind::ExpectedExpression(next.lexeme), + }); + } + }; + + Ok(Expr::Literal(literal)) + } + + // internal helpers + + fn identifier(&mut self, err: &'static str) -> Result<Token, Error> { + if let TokenKind::Identifier(_) = self.peek().kind { + Ok(self.advance()) + } else { + Err(Error { + line: self.peek().line, + kind: ErrorKind::ExpectedToken(err), + }) + } + } + + /// Check if the next token is in `oneof`, and advance if it is. + fn match_token(&mut self, token: &TokenKind) -> bool { + if self.check_token(token) { + self.advance(); + return true; + } + + false + } + + /// Return the next token and advance parser state. + fn advance(&mut self) -> Token { + if !self.is_at_end() { + self.current += 1; + } + + return self.previous().clone(); + } + + fn is_at_end(&self) -> bool { + self.check_token(&TokenKind::Eof) + } + + /// Is the next token `token`? + fn check_token(&self, token: &TokenKind) -> bool { + self.peek().kind == *token + } + + fn peek(&self) -> &Token { + &self.tokens[self.current] + } + + fn previous(&self) -> &Token { + &self.tokens[self.current - 1] + } + + fn consume( + &mut self, + kind: &TokenKind, + err: ErrorKind, + ) -> Result<Token, Error> { + if self.check_token(kind) { + return Ok(self.advance()); + } + + Err(Error { + line: self.peek().line, + kind: err, + }) + } + + fn synchronise(&mut self) { + self.advance(); + + while !self.is_at_end() { + if self.previous().kind == TokenKind::Semicolon { + return; + } + + match self.peek().kind { + TokenKind::Class + | TokenKind::Fun + | TokenKind::Var + | TokenKind::For + | TokenKind::If + | TokenKind::While + | TokenKind::Print + | TokenKind::Return => return, + + _ => { + self.advance(); + } + } + } + } + + fn binary_operator( + &mut self, + oneof: &[TokenKind], + each: fn(&mut Parser) -> ExprResult, + ) -> ExprResult { + let mut expr = each(self)?; + + while oneof.iter().any(|t| self.match_token(t)) { + expr = Expr::Binary(Binary { + left: Box::new(expr), + operator: self.previous().clone(), + right: Box::new(each(self)?), + }) + } + + return Ok(expr); + } +} + +pub fn parse(tokens: Vec<Token>) -> Result<Block, Vec<Error>> { + let mut parser = Parser { tokens, current: 0 }; + let mut program: Block = vec![]; + let mut errors: Vec<Error> = vec![]; + + while !parser.is_at_end() { + match parser.declaration() { + Err(err) => { + errors.push(err); + parser.synchronise(); + } + Ok(decl) => { + program.push(decl); + } + } + } + + if errors.is_empty() { + Ok(program) + } else { + Err(errors) + } +} diff --git a/users/tazjin/rlox/src/treewalk/resolver.rs b/users/tazjin/rlox/src/treewalk/resolver.rs new file mode 100644 index 000000000000..8231ce5a9e58 --- /dev/null +++ b/users/tazjin/rlox/src/treewalk/resolver.rs @@ -0,0 +1,214 @@ +// Resolves variable access to their specific instances in the +// environment chain. +// +// https://craftinginterpreters.com/resolving-and-binding.html + +use std::collections::HashMap; +use std::rc::Rc; + +use crate::treewalk::errors::{Error, ErrorKind}; +use crate::treewalk::parser::{self, Expr, Statement}; +use crate::treewalk::scanner::Token; + +#[derive(Default)] +struct Resolver<'a> { + scopes: Vec<HashMap<&'a str, bool>>, +} + +impl<'a> Resolver<'a> { + // AST traversal + fn resolve(&mut self, program: &'a mut parser::Block) -> Result<(), Error> { + self.begin_scope(); + for stmt in program { + self.resolve_stmt(stmt)?; + } + self.end_scope(); + + Ok(()) + } + + fn resolve_stmt(&mut self, stmt: &'a mut Statement) -> Result<(), Error> { + match stmt { + Statement::Expr(expr) => self.resolve_expr(expr), + Statement::Print(expr) => self.resolve_expr(expr), + Statement::Var(var) => self.resolve_var(var), + Statement::Return(ret) => self.resolve_expr(&mut ret.value), + Statement::Block(block) => self.resolve(block), + + Statement::If(if_stmt) => { + self.resolve_expr(&mut if_stmt.condition)?; + self.resolve_stmt(&mut if_stmt.then_branch)?; + + if let Some(branch) = if_stmt.else_branch.as_mut() { + self.resolve_stmt(branch)?; + } + + Ok(()) + } + + Statement::While(while_stmt) => { + self.resolve_expr(&mut while_stmt.condition)?; + self.resolve_stmt(&mut while_stmt.body) + } + + Statement::Function(func) => match Rc::get_mut(func) { + Some(func) => self.resolve_function(func), + // The resolver does not clone references, so unless + // the interpreter is called before the resolver this + // case should never happen. + None => return Err(Error { + line: 0, + kind: ErrorKind::InternalError( + "multiple function references before interpretation" + .into(), + ), + }), + }, + } + } + + fn resolve_var(&mut self, var: &'a mut parser::Var) -> Result<(), Error> { + self.declare(&var.name.lexeme); + + if let Some(init) = &mut var.initialiser { + self.resolve_expr(init)?; + } + + self.define(&var.name.lexeme); + + Ok(()) + } + + fn resolve_function( + &mut self, + func: &'a mut parser::Function, + ) -> Result<(), Error> { + self.declare(&func.name.lexeme); + self.define(&func.name.lexeme); + + self.begin_scope(); + + for param in &func.params { + self.declare(¶m.lexeme); + self.define(¶m.lexeme); + } + + for stmt in &mut func.body { + self.resolve_stmt(stmt)?; + } + + self.end_scope(); + + Ok(()) + } + + fn resolve_expr(&mut self, expr: &'a mut Expr) -> Result<(), Error> { + match expr { + Expr::Variable(var) => self.resolve_variable(var), + Expr::Assign(assign) => self.resolve_assign(assign), + Expr::Grouping(grouping) => self.resolve_expr(&mut grouping.0), + Expr::Call(call) => self.resolve_call(call), + Expr::Literal(_) => Ok(()), + Expr::Unary(unary) => self.resolve_expr(&mut unary.right), + + Expr::Logical(log) => { + self.resolve_expr(&mut log.left)?; + self.resolve_expr(&mut log.right) + } + + Expr::Binary(binary) => { + self.resolve_expr(&mut binary.left)?; + self.resolve_expr(&mut binary.right) + } + } + } + + fn resolve_variable( + &mut self, + var: &'a mut parser::Variable, + ) -> Result<(), Error> { + if let Some(scope) = self.scopes.last_mut() { + if let Some(false) = scope.get(var.name.lexeme.as_str()) { + return Err(Error { + line: var.name.line, + kind: ErrorKind::StaticError( + "can't read local variable in its own initialiser" + .into(), + ), + }); + } + } + + var.depth = self.resolve_local(&var.name); + Ok(()) + } + + fn resolve_assign( + &mut self, + assign: &'a mut parser::Assign, + ) -> Result<(), Error> { + self.resolve_expr(&mut assign.value)?; + assign.depth = self.resolve_local(&assign.name); + Ok(()) + } + + fn resolve_local(&mut self, name: &'a Token) -> Option<usize> { + for (c, scope) in self.scopes.iter().rev().enumerate() { + if scope.contains_key(name.lexeme.as_str()) { + return Some(c); + } + } + + None + } + + fn resolve_call( + &mut self, + call: &'a mut parser::Call, + ) -> Result<(), Error> { + self.resolve_expr(&mut call.callee)?; + + for arg in call.args.iter_mut() { + self.resolve_expr(arg)?; + } + + Ok(()) + } + + // Internal helpers + + fn declare(&mut self, name: &'a str) { + if let Some(scope) = self.scopes.last_mut() { + scope.insert(&name, false); + } + } + + fn define(&mut self, name: &'a str) { + if let Some(scope) = self.scopes.last_mut() { + scope.insert(&name, true); + } + } + + fn begin_scope(&mut self) { + self.scopes.push(Default::default()); + } + + fn end_scope(&mut self) { + self.scopes.pop(); + } +} + +pub fn resolve( + globals: &[String], + block: &mut parser::Block, +) -> Result<(), Error> { + let mut resolver: Resolver = Default::default(); + + // Scope for static globals only starts, never ends. + resolver.begin_scope(); + for global in globals { + resolver.define(global); + } + + resolver.resolve(block) +} diff --git a/users/tazjin/wallpapers/bio_thehost_1920.webp b/users/tazjin/wallpapers/bio_thehost_1920.webp new file mode 100644 index 000000000000..1b904c06fadf --- /dev/null +++ b/users/tazjin/wallpapers/bio_thehost_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/busride2_1920.webp b/users/tazjin/wallpapers/busride2_1920.webp new file mode 100644 index 000000000000..ad6ec446f661 --- /dev/null +++ b/users/tazjin/wallpapers/busride2_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_belltowers_2880.webp b/users/tazjin/wallpapers/by_belltowers_2880.webp new file mode 100644 index 000000000000..f7477f168914 --- /dev/null +++ b/users/tazjin/wallpapers/by_belltowers_2880.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_crossing_2560.webp b/users/tazjin/wallpapers/by_crossing_2560.webp new file mode 100644 index 000000000000..efa263790b45 --- /dev/null +++ b/users/tazjin/wallpapers/by_crossing_2560.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_gathering3_2880.webp b/users/tazjin/wallpapers/by_gathering3_2880.webp new file mode 100644 index 000000000000..e6b83bdcd430 --- /dev/null +++ b/users/tazjin/wallpapers/by_gathering3_2880.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_mainservers1_1920.webp b/users/tazjin/wallpapers/by_mainservers1_1920.webp new file mode 100644 index 000000000000..f88d237e2b91 --- /dev/null +++ b/users/tazjin/wallpapers/by_mainservers1_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_warmachines1_2560.webp b/users/tazjin/wallpapers/by_warmachines1_2560.webp new file mode 100644 index 000000000000..848bf62bd7ed --- /dev/null +++ b/users/tazjin/wallpapers/by_warmachines1_2560.webp Binary files differdiff --git a/users/tazjin/wallpapers/by_warmachines3_1920.webp b/users/tazjin/wallpapers/by_warmachines3_1920.webp new file mode 100644 index 000000000000..6002ad695a1e --- /dev/null +++ b/users/tazjin/wallpapers/by_warmachines3_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/clever-man_2880.webp b/users/tazjin/wallpapers/clever-man_2880.webp new file mode 100644 index 000000000000..eb4d3f1bfa33 --- /dev/null +++ b/users/tazjin/wallpapers/clever-man_2880.webp Binary files differdiff --git a/users/tazjin/wallpapers/december1994_1920.webp b/users/tazjin/wallpapers/december1994_1920.webp new file mode 100644 index 000000000000..d2c4da80187c --- /dev/null +++ b/users/tazjin/wallpapers/december1994_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/flyby_1920.webp b/users/tazjin/wallpapers/flyby_1920.webp new file mode 100644 index 000000000000..8df5b1132e74 --- /dev/null +++ b/users/tazjin/wallpapers/flyby_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/gaussfraktarna_1920_badge.webp b/users/tazjin/wallpapers/gaussfraktarna_1920_badge.webp new file mode 100644 index 000000000000..3274a3a2d21d --- /dev/null +++ b/users/tazjin/wallpapers/gaussfraktarna_1920_badge.webp Binary files differdiff --git a/users/tazjin/wallpapers/kraftahq_1920.webp b/users/tazjin/wallpapers/kraftahq_1920.webp new file mode 100644 index 000000000000..62a6debf476f --- /dev/null +++ b/users/tazjin/wallpapers/kraftahq_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/peripheral2_1920.webp b/users/tazjin/wallpapers/peripheral2_1920.webp new file mode 100644 index 000000000000..e454072ac42a --- /dev/null +++ b/users/tazjin/wallpapers/peripheral2_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/ship14_1920.webp b/users/tazjin/wallpapers/ship14_1920.webp new file mode 100644 index 000000000000..502f5dac903e --- /dev/null +++ b/users/tazjin/wallpapers/ship14_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/shipyard_1920.webp b/users/tazjin/wallpapers/shipyard_1920.webp new file mode 100644 index 000000000000..3d4115305d10 --- /dev/null +++ b/users/tazjin/wallpapers/shipyard_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/specky_1920.webp b/users/tazjin/wallpapers/specky_1920.webp new file mode 100644 index 000000000000..b8246618bebc --- /dev/null +++ b/users/tazjin/wallpapers/specky_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/summerlove2_1920.webp b/users/tazjin/wallpapers/summerlove2_1920.webp new file mode 100644 index 000000000000..d64a1cb867ec --- /dev/null +++ b/users/tazjin/wallpapers/summerlove2_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/t50_1920_badge.webp b/users/tazjin/wallpapers/t50_1920_badge.webp new file mode 100644 index 000000000000..f8cb6107f30c --- /dev/null +++ b/users/tazjin/wallpapers/t50_1920_badge.webp Binary files differdiff --git a/users/tazjin/wallpapers/theflood1_1920.webp b/users/tazjin/wallpapers/theflood1_1920.webp new file mode 100644 index 000000000000..335efb057172 --- /dev/null +++ b/users/tazjin/wallpapers/theflood1_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/thelan_1920.webp b/users/tazjin/wallpapers/thelan_1920.webp new file mode 100644 index 000000000000..55e6c22ad212 --- /dev/null +++ b/users/tazjin/wallpapers/thelan_1920.webp Binary files differdiff --git a/users/tazjin/wallpapers/vadrare_1920_badge.webp b/users/tazjin/wallpapers/vadrare_1920_badge.webp new file mode 100644 index 000000000000..887c891da36c --- /dev/null +++ b/users/tazjin/wallpapers/vadrare_1920_badge.webp Binary files differdiff --git a/users/tvlbot.jpg b/users/tvlbot.jpg new file mode 100644 index 000000000000..f0811418dff5 --- /dev/null +++ b/users/tvlbot.jpg Binary files differ |