about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--corp/website/default.nix3
-rw-r--r--default.nix19
-rw-r--r--fun/gemma/default.nix3
-rw-r--r--fun/idual/default.nix11
-rw-r--r--fun/owothia/default.nix5
-rw-r--r--fun/owothia/pkg.nix26
-rw-r--r--fun/owothia/shell.nix2
-rw-r--r--fun/uggc/default.nix3
-rw-r--r--fun/🕰️/default.nix3
-rw-r--r--nix/binify/default.nix2
-rw-r--r--nix/buildGo/default.nix91
-rw-r--r--nix/buildGo/example/default.nix5
-rw-r--r--nix/buildGo/external/default.nix50
-rw-r--r--nix/buildGo/proto.nix3
-rw-r--r--nix/buildLisp/default.nix277
-rw-r--r--nix/buildLisp/example/default.nix19
-rw-r--r--nix/buildManPages/default.nix77
-rw-r--r--nix/buildkite/default.nix312
-rw-r--r--nix/drvSeqL/default.nix26
-rw-r--r--nix/emptyDerivation/default.nix3
-rw-r--r--nix/emptyDerivation/emptyDerivation.nix12
-rw-r--r--nix/emptyDerivation/tests.nix18
-rw-r--r--nix/escapeExecline/default.nix19
-rw-r--r--nix/getBins/default.nix21
-rw-r--r--nix/getBins/tests.nix20
-rw-r--r--nix/mergePatch/default.nix140
-rw-r--r--nix/netstring/attrsToKeyValList.nix4
-rw-r--r--nix/nint/default.nix14
-rw-r--r--nix/readTree/default.nix139
-rw-r--r--nix/readTree/tests/default.nix18
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/default.nix2
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/nested/default.nix2
-rw-r--r--nix/readTree/tests/test-marker/file-children/one.nix2
-rw-r--r--nix/readTree/tests/test-marker/file-children/two.nix2
-rw-r--r--nix/renderMarkdown/default.nix2
-rw-r--r--nix/runExecline/default.nix5
-rw-r--r--nix/runExecline/runExecline.nix64
-rw-r--r--nix/runExecline/tests.nix121
-rw-r--r--nix/runTestsuite/default.nix76
-rw-r--r--nix/sparseTree/default.nix20
-rw-r--r--nix/tag/default.nix84
-rw-r--r--nix/tag/tests.nix44
-rw-r--r--nix/tailscale/default.nix3
-rw-r--r--nix/utils/default.nix39
-rw-r--r--nix/utils/tests/default.nix54
-rw-r--r--nix/writeElispBin/default.nix8
-rw-r--r--nix/writeExecline/default.nix17
-rw-r--r--nix/writeScript/default.nix20
-rw-r--r--nix/writers/default.nix126
-rw-r--r--nix/writers/tests/rust.nix40
-rw-r--r--nix/yants/default.nix301
-rw-r--r--nix/yants/tests/default.nix34
-rw-r--r--ops/dns/default.nix5
-rw-r--r--ops/glesys/default.nix2
-rw-r--r--ops/journaldriver/default.nix4
-rw-r--r--ops/keycloak/default.nix2
-rw-r--r--ops/kontemplate/release.nix22
-rw-r--r--ops/machines/whitby/default.nix141
-rw-r--r--ops/modules/atward.nix3
-rw-r--r--ops/modules/auto-deploy.nix3
-rw-r--r--ops/modules/automatic-gc.nix3
-rw-r--r--ops/modules/clbot.nix7
-rw-r--r--ops/modules/default.nix2
-rw-r--r--ops/modules/gerrit-queue.nix3
-rw-r--r--ops/modules/git-serving.nix3
-rw-r--r--ops/modules/irccat.nix3
-rw-r--r--ops/modules/monorepo-gerrit.nix5
-rw-r--r--ops/modules/nixery.nix3
-rw-r--r--ops/modules/oauth2_proxy.nix3
-rw-r--r--ops/modules/owothia.nix3
-rw-r--r--ops/modules/panettone.nix40
-rw-r--r--ops/modules/paroxysm.nix3
-rw-r--r--ops/modules/quassel.nix5
-rw-r--r--ops/modules/restic.nix3
-rw-r--r--ops/modules/smtprelay.nix5
-rw-r--r--ops/modules/sourcegraph.nix6
-rw-r--r--ops/modules/tvl-buildkite.nix67
-rw-r--r--ops/modules/tvl-slapd/default.nix5
-rw-r--r--ops/pipelines/depot.nix8
-rw-r--r--ops/secrets/mkSecrets.nix6
-rw-r--r--ops/secrets/secrets.nix3
-rw-r--r--third_party/abseil_cpp/default.nix16
-rw-r--r--third_party/agenix/default.nix3
-rw-r--r--third_party/arion/default.nix14
-rw-r--r--third_party/bat_syntaxes/default.nix3
-rw-r--r--third_party/cgit/default.nix3
-rw-r--r--third_party/clj2nix/default.nix3
-rw-r--r--third_party/default.nix55
-rw-r--r--third_party/dhall/default.nix13
-rw-r--r--third_party/elmPackages_0_18/default.nix14
-rw-r--r--third_party/gerrit_plugins/builder.nix58
-rw-r--r--third_party/gerrit_plugins/default.nix3
-rw-r--r--third_party/gerrit_plugins/oauth/default.nix3
-rw-r--r--third_party/git/default.nix4
-rw-r--r--third_party/gitignoreSource/default.nix17
-rw-r--r--third_party/gopkgs/github.com/charmbracelet/bubbletea/default.nix10
-rw-r--r--third_party/grpc/default.nix2
-rw-r--r--third_party/gtest/default.nix2
-rw-r--r--third_party/josh/default.nix12
-rw-r--r--third_party/lisp/bordeaux-threads.nix3
-rw-r--r--third_party/lisp/cl-fad.nix4
-rw-r--r--third_party/lisp/cl-json.nix23
-rw-r--r--third_party/lisp/cl-plus-ssl.nix14
-rw-r--r--third_party/lisp/cl-unicode.nix6
-rw-r--r--third_party/lisp/cl-yacc.nix6
-rw-r--r--third_party/lisp/closure-common.nix11
-rw-r--r--third_party/lisp/easy-routes.nix3
-rw-r--r--third_party/lisp/flexi-streams.nix2
-rw-r--r--third_party/lisp/global-vars.nix2
-rw-r--r--third_party/lisp/hunchentoot.nix5
-rw-r--r--third_party/lisp/ironclad.nix5
-rw-r--r--third_party/lisp/lass.nix3
-rw-r--r--third_party/lisp/lisp-binary.nix16
-rw-r--r--third_party/lisp/local-time.nix3
-rw-r--r--third_party/lisp/nibbles.nix3
-rw-r--r--third_party/lisp/postmodern.nix3
-rw-r--r--third_party/lisp/routes.nix3
-rw-r--r--third_party/lisp/s-xml/default.nix18
-rw-r--r--third_party/lisp/trivial-ldap.nix6
-rw-r--r--third_party/lisp/trivial-mimes.nix5
-rw-r--r--third_party/lisp/uax-15.nix5
-rw-r--r--third_party/lisp/unix-opts.nix2
-rw-r--r--third_party/lisp/usocket-server.nix3
-rw-r--r--third_party/lisp/usocket.nix3
-rw-r--r--third_party/naersk/default.nix14
-rw-r--r--third_party/nix/corepkgs/buildenv.nix12
-rw-r--r--third_party/nix/corepkgs/derivation.nix9
-rw-r--r--third_party/nix/corepkgs/fetchurl.nix19
-rw-r--r--third_party/nix/corepkgs/imported-drv-to-derivation.nix13
-rw-r--r--third_party/nix/default.nix58
-rw-r--r--third_party/nix/test-vm.nix3
-rw-r--r--third_party/nixery/default.nix3
-rw-r--r--third_party/nixpkgs/default.nix9
-rw-r--r--third_party/overlays/emacs.nix3
-rw-r--r--third_party/overlays/haskell/default.nix3
-rw-r--r--third_party/overlays/tvl.nix10
-rw-r--r--third_party/prometheus-fail2ban-exporter/default.nix3
-rw-r--r--third_party/python/broadlink/default.nix3
-rw-r--r--third_party/rust-crates/default.nix22
-rw-r--r--third_party/rustsec-advisory-db/default.nix2
-rw-r--r--tools/depot-nixpkgs-update.nix4
-rw-r--r--tools/depot-scanner/default.nix22
-rw-r--r--tools/depotfmt.nix3
-rw-r--r--tools/emacs-pkgs/buildEmacsPackage.nix30
-rw-r--r--tools/emacs-pkgs/notable/default.nix4
-rw-r--r--tools/eprintf.nix12
-rw-r--r--tools/nsfv-setup/default.nix3
-rw-r--r--tools/rust-crates-advisory/default.nix160
-rw-r--r--tools/tvlc/default.nix3
-rw-r--r--tvix/default.nix4
-rw-r--r--tvix/docs/default.nix9
-rw-r--r--tvix/proto/default.nix2
-rw-r--r--tvix/shell.nix5
-rw-r--r--users/Profpatsch/arglib/netencode.nix18
-rw-r--r--users/Profpatsch/atomically-write.nix3
-rw-r--r--users/Profpatsch/blog/default.nix338
-rw-r--r--users/Profpatsch/cdb.nix20
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/default.nix4
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/shell.nix7
-rw-r--r--users/Profpatsch/execline/default.nix24
-rw-r--r--users/Profpatsch/git-db/default.nix6
-rw-r--r--users/Profpatsch/imap-idle.nix23
-rw-r--r--users/Profpatsch/lens.nix7
-rw-r--r--users/Profpatsch/lib.nix79
-rw-r--r--users/Profpatsch/netencode/default.nix105
-rw-r--r--users/Profpatsch/netencode/gen.nix40
-rw-r--r--users/Profpatsch/netstring/default.nix19
-rw-r--r--users/Profpatsch/netstring/tests/default.nix29
-rw-r--r--users/Profpatsch/nix-home/default.nix180
-rw-r--r--users/Profpatsch/nixpkgs-rewriter/default.nix128
-rw-r--r--users/Profpatsch/read-http.nix25
-rw-r--r--users/Profpatsch/reverse-haskell-deps.nix25
-rw-r--r--users/Profpatsch/struct-edit/default.nix20
-rw-r--r--users/Profpatsch/tree-sitter.nix176
-rw-r--r--users/Profpatsch/writers/default.nix115
-rw-r--r--users/Profpatsch/writers/tests/default.nix40
-rw-r--r--users/Profpatsch/ytextr/create-symlink-farm.nix13
-rw-r--r--users/Profpatsch/ytextr/default.nix91
-rw-r--r--users/cynthia/keys.nix2
-rw-r--r--users/edef/depot-scan/wrap.nix7
-rw-r--r--users/eta/keys.nix2
-rw-r--r--users/grfn/achilles/shell.nix2
-rw-r--r--users/grfn/bbbg/arion-pkgs.nix2
-rw-r--r--users/grfn/bbbg/default.nix11
-rw-r--r--users/grfn/bbbg/deps.nix2953
-rw-r--r--users/grfn/bbbg/module.nix9
-rw-r--r--users/grfn/bbbg/shell.nix2
-rw-r--r--users/grfn/bbbg/tf.nix13
-rw-r--r--users/grfn/gws.fyi/default.nix12
-rw-r--r--users/grfn/gws.fyi/orgExportHTML.nix13
-rw-r--r--users/grfn/gws.fyi/site.nix2
-rw-r--r--users/grfn/resume/default.nix53
-rw-r--r--users/grfn/secrets/shell.nix2
-rw-r--r--users/grfn/system/home/common/solarized.nix30
-rw-r--r--users/grfn/system/home/machines/roswell.nix3
-rw-r--r--users/grfn/system/home/machines/yeren.nix8
-rw-r--r--users/grfn/system/home/modules/alsi.nix46
-rw-r--r--users/grfn/system/home/modules/common.nix3
-rw-r--r--users/grfn/system/home/modules/development.nix18
-rw-r--r--users/grfn/system/home/modules/development/kube.nix2
-rw-r--r--users/grfn/system/home/modules/emacs.nix21
-rw-r--r--users/grfn/system/home/modules/email.nix64
-rw-r--r--users/grfn/system/home/modules/games.nix9
-rw-r--r--users/grfn/system/home/modules/i3.nix324
-rw-r--r--users/grfn/system/home/modules/lib/cloneRepo.nix96
-rw-r--r--users/grfn/system/home/modules/lib/zshFunctions.nix14
-rw-r--r--users/grfn/system/home/modules/obs.nix4
-rw-r--r--users/grfn/system/home/modules/rtlsdr.nix14
-rw-r--r--users/grfn/system/home/modules/shell.nix5
-rw-r--r--users/grfn/system/home/modules/tarsnap.nix80
-rw-r--r--users/grfn/system/home/platforms/darwin.nix2
-rw-r--r--users/grfn/system/system/iso.nix3
-rw-r--r--users/grfn/system/system/machines/mugwump.nix156
-rw-r--r--users/grfn/system/system/modules/common.nix2
-rw-r--r--users/grfn/system/system/modules/fonts.nix2
-rw-r--r--users/grfn/system/system/modules/reusable/battery.nix14
-rw-r--r--users/grfn/system/system/modules/tvl.nix4
-rw-r--r--users/grfn/system/system/modules/work/kolide.nix10
-rw-r--r--users/grfn/terraform/globals.nix19
-rw-r--r--users/grfn/terraform/nixosMachine.nix71
-rw-r--r--users/grfn/terraform/workspace.nix25
-rw-r--r--users/grfn/xanthous/default.nix9
-rw-r--r--users/grfn/xanthous/pkg.nix387
-rw-r--r--users/grfn/xanthous/server/default.nix3
-rw-r--r--users/grfn/xanthous/server/docker.nix8
-rw-r--r--users/grfn/xanthous/server/module.nix3
-rw-r--r--users/grfn/xanthous/server/shell.nix2
-rw-r--r--users/grfn/xanthous/shell.nix4
-rw-r--r--users/riking/adventofcode-2020/day01/default.nix2
-rw-r--r--users/riking/keys.nix6
-rw-r--r--users/sterni/clhs-lookup/default.nix2
-rw-r--r--users/sterni/dot-time-man-pages/default.nix12
-rw-r--r--users/sterni/exercises/aoc/2021/default.nix2
-rw-r--r--users/sterni/htmlman/default.nix188
-rw-r--r--users/sterni/nix/char/default.nix32
-rw-r--r--users/sterni/nix/char/tests/default.nix6
-rw-r--r--users/sterni/nix/flow/default.nix9
-rw-r--r--users/sterni/nix/flow/tests/default.nix8
-rw-r--r--users/sterni/nix/fun/tests/default.nix6
-rw-r--r--users/sterni/nix/html/default.nix19
-rw-r--r--users/sterni/nix/html/tests/default.nix73
-rw-r--r--users/sterni/nix/int/default.nix20
-rw-r--r--users/sterni/nix/int/tests/default.nix386
-rw-r--r--users/sterni/nix/string/default.nix40
-rw-r--r--users/sterni/nix/string/tests/default.nix14
-rw-r--r--users/sterni/nix/url/default.nix41
-rw-r--r--users/sterni/nix/url/tests/default.nix18
-rw-r--r--users/sterni/nix/utf8/default.nix200
-rw-r--r--users/sterni/nix/utf8/tests/default.nix57
-rw-r--r--users/sterni/nixpkgs-crate-holes/default.nix236
-rw-r--r--users/tazjin/aoc2019/default.nix22
-rw-r--r--users/tazjin/aoc2020/default.nix16
-rw-r--r--users/tazjin/blog/default.nix17
-rw-r--r--users/tazjin/blog/posts.nix2
-rw-r--r--users/tazjin/dns/default.nix7
-rw-r--r--users/tazjin/emacs/default.nix305
-rw-r--r--users/tazjin/homepage/default.nix5
-rw-r--r--users/tazjin/homepage/feed.nix5
-rw-r--r--users/tazjin/nixos/camden/default.nix38
-rw-r--r--users/tazjin/nixos/frog/default.nix10
-rw-r--r--users/tazjin/nixos/tverskoy/default.nix186
-rw-r--r--users/tazjin/presentations/bootstrapping-2018/default.nix38
-rw-r--r--users/wpcarro/assessments/brilliant/default.nix2
-rw-r--r--users/wpcarro/buildHaskell/default.nix40
-rw-r--r--users/wpcarro/ci/pipelines/post-receive.nix3
-rw-r--r--users/wpcarro/clients/monsterpoker/default.nix2
-rw-r--r--users/wpcarro/common.nix3
-rw-r--r--users/wpcarro/configs/default.nix3
-rw-r--r--users/wpcarro/emacs/default.nix12
-rw-r--r--users/wpcarro/haskell-file/shell.nix2
-rw-r--r--users/wpcarro/nixos/diogenes/default.nix13
-rw-r--r--users/wpcarro/nixos/marcus/default.nix3
-rw-r--r--users/wpcarro/nixos/marcus/hardware.nix4
-rw-r--r--users/wpcarro/playbooks/nix_gcr/cloud_run.nix2
-rw-r--r--users/wpcarro/scratch/blockchain/default.nix3
-rw-r--r--users/wpcarro/scratch/groceries/shell.nix2
-rw-r--r--users/wpcarro/scratch/picoctf/challenge_166/shell.nix5
-rw-r--r--users/wpcarro/terraform/default.nix336
-rw-r--r--users/wpcarro/tools/monzo_ynab/job.nix3
-rw-r--r--users/wpcarro/tools/monzo_ynab/tokens.nix3
-rw-r--r--users/wpcarro/tools/rfcToKindle/default.nix2
-rw-r--r--users/wpcarro/tools/symlinkManager/default.nix3
-rw-r--r--users/wpcarro/tools/url-blocker/default.nix13
-rw-r--r--users/wpcarro/utils/builder.nix3
-rw-r--r--users/wpcarro/utils/default.nix5
-rw-r--r--users/wpcarro/utils/fs.nix9
-rw-r--r--users/wpcarro/website/blog/default.nix7
-rw-r--r--users/wpcarro/website/default.nix15
-rw-r--r--users/wpcarro/website/habit-screens/default.nix41
-rw-r--r--users/wpcarro/website/habit-screens/elm-srcs.nix120
-rw-r--r--users/wpcarro/website/sandbox/learnpianochords/default.nix41
-rw-r--r--users/wpcarro/website/sandbox/learnpianochords/elm-srcs.nix104
-rw-r--r--views/kit/default.nix2
-rw-r--r--web/atom-feed/default.nix5
-rw-r--r--web/blog/default.nix5
-rw-r--r--web/blog/fragments.nix45
-rw-r--r--web/bubblegum/default.nix66
-rw-r--r--web/bubblegum/examples/blog.nix43
-rw-r--r--web/bubblegum/examples/default.nix61
-rw-r--r--web/bubblegum/examples/derivation-svg.nix8
-rw-r--r--web/bubblegum/examples/hello.nix8
-rw-r--r--web/cgit-taz/default.nix5
-rw-r--r--web/panettone/shell.nix2
-rw-r--r--web/static/default.nix4
-rw-r--r--web/todolist/default.nix29
-rw-r--r--web/tvl/blog/default.nix2
-rw-r--r--web/tvl/default.nix10
-rw-r--r--web/tvl/footer/default.nix2
-rw-r--r--web/tvl/logo/default.nix42
-rw-r--r--web/tvl/template/default.nix16
310 files changed, 7339 insertions, 5551 deletions
diff --git a/corp/website/default.nix b/corp/website/default.nix
index 8740041d9be7..2011e2a376ab 100644
--- a/corp/website/default.nix
+++ b/corp/website/default.nix
@@ -30,7 +30,8 @@ let
       </style>
     '';
   };
-in pkgs.runCommandNoCC "corp-website" {} ''
+in
+pkgs.runCommandNoCC "corp-website" { } ''
   mkdir $out
   cp ${index} $out/index.html
 ''
diff --git a/default.nix b/default.nix
index 0ffda5b4d307..7cdf32bef977 100644
--- a/default.nix
+++ b/default.nix
@@ -4,14 +4,16 @@
 
 { nixpkgsBisectPath ? null
 , parentTargetMap ? null
-, nixpkgsConfig ? {}, ... }@args:
+, nixpkgsConfig ? { }
+, ...
+}@args:
 
 let
   inherit (builtins)
     filter
     ;
 
-  readTree = import ./nix/readTree {};
+  readTree = import ./nix/readTree { };
 
   # Disallow access to //users from other depot parts.
   usersFilter = readTree.restrictFolder {
@@ -70,7 +72,8 @@ let
   # Is this tree node eligible for build inclusion?
   eligible = node: (node ? outPath) && !(node.meta.ci.skip or false);
 
-in readTree.fix(self: (readDepot {
+in
+readTree.fix (self: (readDepot {
   depot = self;
 
   # Pass third_party as 'pkgs' (for compatibility with external
@@ -110,8 +113,10 @@ in readTree.fix(self: (readDepot {
   });
 
   # Derivation that gcroots all depot targets.
-  ci.gcroot = with self.third_party.nixpkgs; makeSetupHook {
-    name = "depot-gcroot";
-    deps = self.ci.targets;
-  } emptyFile;
+  ci.gcroot = with self.third_party.nixpkgs; makeSetupHook
+    {
+      name = "depot-gcroot";
+      deps = self.ci.targets;
+    }
+    emptyFile;
 })
diff --git a/fun/gemma/default.nix b/fun/gemma/default.nix
index 0b5e67b1594b..4a26005852f9 100644
--- a/fun/gemma/default.nix
+++ b/fun/gemma/default.nix
@@ -33,7 +33,8 @@ let
       cp ${frontend} $out/index.html
     ''}/")
   '';
-in depot.nix.buildLisp.program {
+in
+depot.nix.buildLisp.program {
   name = "gemma";
 
   deps = with depot.third_party.lisp; [
diff --git a/fun/idual/default.nix b/fun/idual/default.nix
index 0f87f4ae1a76..1acf287bfbd7 100644
--- a/fun/idual/default.nix
+++ b/fun/idual/default.nix
@@ -4,19 +4,20 @@ let
   inherit (pkgs) python3 python3Packages;
 
   opts = {
-    pname   = "idualctl";
+    pname = "idualctl";
     version = "0.1";
-    src     = ./.;
+    src = ./.;
 
     propagatedBuildInputs = [
       depot.third_party.python.broadlink
     ];
   };
   package = python3Packages.buildPythonPackage opts;
-  script  = python3Packages.buildPythonApplication opts;
-in depot.nix.readTree.drvTargets {
+  script = python3Packages.buildPythonApplication opts;
+in
+depot.nix.readTree.drvTargets {
   inherit script;
-  python  = python3.withPackages (_: [ package ]);
+  python = python3.withPackages (_: [ package ]);
   setAlarm = pkgs.writeShellScriptBin "set-alarm" ''
     echo "setting an alarm for ''${1}"
     ${pkgs.systemd}/bin/systemd-run --user --on-calendar="''${1} Europe/London" --unit=light-alarm.service
diff --git a/fun/owothia/default.nix b/fun/owothia/default.nix
index b70d0525c152..04f98e97fba1 100644
--- a/fun/owothia/default.nix
+++ b/fun/owothia/default.nix
@@ -1,6 +1,7 @@
-{ depot ? (import ../../../. {})
+{ depot ? (import ../../../. { })
 , pkgs ? depot.third_party.nixpkgs
-, ... }:
+, ...
+}:
 
 let
   basePkg = pkgs.haskellPackages.callPackage ./pkg.nix { };
diff --git a/fun/owothia/pkg.nix b/fun/owothia/pkg.nix
index d0941a848958..c812e5e11633 100644
--- a/fun/owothia/pkg.nix
+++ b/fun/owothia/pkg.nix
@@ -1,5 +1,15 @@
-{ mkDerivation, base, bytestring, chatter, containers, envy
-, irc-client, lens, lib, random, relude, text
+{ mkDerivation
+, base
+, bytestring
+, chatter
+, containers
+, envy
+, irc-client
+, lens
+, lib
+, random
+, relude
+, text
 }:
 mkDerivation {
   pname = "owothia";
@@ -8,8 +18,16 @@ mkDerivation {
   isLibrary = false;
   isExecutable = true;
   executableHaskellDepends = [
-    base bytestring chatter containers envy irc-client lens random
-    relude text
+    base
+    bytestring
+    chatter
+    containers
+    envy
+    irc-client
+    lens
+    random
+    relude
+    text
   ];
   license = "unknown";
   hydraPlatforms = lib.platforms.none;
diff --git a/fun/owothia/shell.nix b/fun/owothia/shell.nix
index 1ad70c907baa..0304581d9d4a 100644
--- a/fun/owothia/shell.nix
+++ b/fun/owothia/shell.nix
@@ -1,4 +1,4 @@
-{ pkgs ? (import ../../../. {}).third_party, ... }:
+{ pkgs ? (import ../../../. { }).third_party, ... }:
 
 let
   inherit (pkgs)
diff --git a/fun/uggc/default.nix b/fun/uggc/default.nix
index ca622666dcd5..980ad16bcc9d 100644
--- a/fun/uggc/default.nix
+++ b/fun/uggc/default.nix
@@ -12,7 +12,8 @@ let
       gopkgs."github.com".pkg.browser.gopkg
     ];
   };
-in uggc.overrideAttrs(old: {
+in
+uggc.overrideAttrs (old: {
   buildCommand = old.buildCommand + ''
     install -D ${./uggc.desktop} $out/share/applications/uggc.desktop
     sed "s|@out@|$out|g" -i $out/share/applications/uggc.desktop
diff --git a/fun/🕰️/default.nix b/fun/🕰️/default.nix
index 230d9f02f137..2b1a94640009 100644
--- a/fun/🕰️/default.nix
+++ b/fun/🕰️/default.nix
@@ -38,6 +38,7 @@ let
       "ecl" # refuses to create non-ASCII paths even on POSIX…
     ];
   };
-in bin // {
+in
+bin // {
   inherit lib;
 }
diff --git a/nix/binify/default.nix b/nix/binify/default.nix
index d40930fd3334..a9900caf43d5 100644
--- a/nix/binify/default.nix
+++ b/nix/binify/default.nix
@@ -10,7 +10,7 @@
 # with `binify { exe = …; name = "hello" }`.
 { exe, name }:
 
-pkgs.runCommandLocal "${name}-bin" {} ''
+pkgs.runCommandLocal "${name}-bin" { } ''
   mkdir -p $out/bin
   ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
 ''
diff --git a/nix/buildGo/default.nix b/nix/buildGo/default.nix
index a2396dc3f770..0126a93d1621 100644
--- a/nix/buildGo/default.nix
+++ b/nix/buildGo/default.nix
@@ -4,8 +4,9 @@
 # buildGo provides Nix functions to build Go packages in the style of Bazel's
 # rules_go.
 
-{ pkgs ? import <nixpkgs> {}
-, ... }:
+{ pkgs ? import <nixpkgs> { }
+, ...
+}:
 
 let
   inherit (builtins)
@@ -40,7 +41,7 @@ let
 
   xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
 
-  pathToName = p: replaceStrings ["/"] ["_"] (toString p);
+  pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
 
   # Add an `overrideGo` attribute to a function result that works
   # similar to `overrideAttrs`, but is used specifically for the
@@ -52,49 +53,50 @@ let
   # High-level build functions
 
   # Build a Go program out of the specified files and dependencies.
-  program = { name, srcs, deps ? [], x_defs ? {} }:
-  let uniqueDeps = allDeps (map (d: d.gopkg) deps);
-  in runCommand name {} ''
-    ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
-    mkdir -p $out/bin
-    export GOROOT_FINAL=go
-    ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
-  '';
+  program = { name, srcs, deps ? [ ], x_defs ? { } }:
+    let uniqueDeps = allDeps (map (d: d.gopkg) deps);
+    in runCommand name { } ''
+      ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
+      mkdir -p $out/bin
+      export GOROOT_FINAL=go
+      ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
+    '';
 
   # Build a Go library assembled out of the specified files.
   #
   # This outputs both the sources and compiled binary, as both are
   # needed when downstream packages depend on it.
-  package = { name, srcs, deps ? [], path ? name, sfiles ? [] }:
-  let
-    uniqueDeps = allDeps (map (d: d.gopkg) deps);
-
-    # The build steps below need to be executed conditionally for Go
-    # assembly if the analyser detected any *.s files.
-    #
-    # This is required for several popular packages (e.g. x/sys).
-    ifAsm = do: lib.optionalString (sfiles != []) do;
-    asmBuild = ifAsm ''
-      ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
-      ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
-    '';
-    asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
-    asmPack = ifAsm ''
-      ${go}/bin/go tool pack r $out/${path}.a ./asm.o
-    '';
-
-    gopkg = (runCommand "golib-${name}" {} ''
-      mkdir -p $out/${path}
-      ${srcList path (map (s: "${s}") srcs)}
-      ${asmBuild}
-      ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
-      ${asmPack}
-    '') // {
-      inherit gopkg;
-      goDeps = uniqueDeps;
-      goImportPath = path;
-    };
-  in gopkg;
+  package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }:
+    let
+      uniqueDeps = allDeps (map (d: d.gopkg) deps);
+
+      # The build steps below need to be executed conditionally for Go
+      # assembly if the analyser detected any *.s files.
+      #
+      # This is required for several popular packages (e.g. x/sys).
+      ifAsm = do: lib.optionalString (sfiles != [ ]) do;
+      asmBuild = ifAsm ''
+        ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
+        ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
+      '';
+      asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
+      asmPack = ifAsm ''
+        ${go}/bin/go tool pack r $out/${path}.a ./asm.o
+      '';
+
+      gopkg = (runCommand "golib-${name}" { } ''
+        mkdir -p $out/${path}
+        ${srcList path (map (s: "${s}") srcs)}
+        ${asmBuild}
+        ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
+        ${asmPack}
+      '') // {
+        inherit gopkg;
+        goDeps = uniqueDeps;
+        goImportPath = path;
+      };
+    in
+    gopkg;
 
   # Build a tree of Go libraries out of an external Go source
   # directory that follows the standard Go layout and was not built
@@ -110,10 +112,10 @@ let
   };
 
   # Build a Go library out of the specified protobuf definition.
-  proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [] }: (makeOverridable package) {
+  proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [ ] }: (makeOverridable package) {
     inherit name path;
     deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
-    srcs = lib.singleton (runCommand "goproto-${name}.pb.go" {} ''
+    srcs = lib.singleton (runCommand "goproto-${name}.pb.go" { } ''
       cp ${proto} ${baseNameOf proto}
       ${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
         --go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
@@ -124,7 +126,8 @@ let
   # Build a Go library out of the specified gRPC definition.
   grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
 
-in {
+in
+{
   # Only the high-level builder functions are exposed, but made
   # overrideable.
   program = makeOverridable program;
diff --git a/nix/buildGo/example/default.nix b/nix/buildGo/example/default.nix
index 99c0a7d79bd6..08da075e1818 100644
--- a/nix/buildGo/example/default.nix
+++ b/nix/buildGo/example/default.nix
@@ -8,7 +8,7 @@
 # users a quick introduction to how to use buildGo.
 
 let
-  buildGo = import ../default.nix {};
+  buildGo = import ../default.nix { };
 
   # Example use of buildGo.package, which creates an importable Go
   # package from the specified source files.
@@ -29,7 +29,8 @@ let
   # Example use of buildGo.program, which builds an executable using
   # the specified name and dependencies (which in turn must have been
   # created via buildGo.package etc.)
-in buildGo.program {
+in
+buildGo.program {
   name = "example";
 
   srcs = [
diff --git a/nix/buildGo/external/default.nix b/nix/buildGo/external/default.nix
index 6540faf04c36..f713783a58be 100644
--- a/nix/buildGo/external/default.nix
+++ b/nix/buildGo/external/default.nix
@@ -17,12 +17,12 @@ let
 
   inherit (pkgs) lib runCommand go jq ripgrep;
 
-  pathToName = p: replaceStrings ["/"] ["_"] (toString p);
+  pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
 
   # Collect all non-vendored dependencies from the Go standard library
   # into a file that can be used to filter them out when processing
   # dependencies.
-  stdlibPackages = runCommand "stdlib-pkgs.json" {} ''
+  stdlibPackages = runCommand "stdlib-pkgs.json" { } ''
     export HOME=$PWD
     export GOPATH=/dev/null
     ${go}/bin/go list std | \
@@ -45,20 +45,28 @@ let
   };
 
   mkset = path: value:
-    if path == [] then { gopkg = value; }
+    if path == [ ] then { gopkg = value; }
     else { "${head path}" = mkset (tail path) value; };
 
   last = l: elemAt l ((length l) - 1);
 
   toPackage = self: src: path: depMap: entry:
     let
-      localDeps = map (d: lib.attrByPath (d ++ [ "gopkg" ]) (
-        throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
-      ) self) entry.localDeps;
-
-      foreignDeps = map (d: lib.attrByPath [ d.path ] (
-        throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
-      ) depMap) entry.foreignDeps;
+      localDeps = map
+        (d: lib.attrByPath (d ++ [ "gopkg" ])
+          (
+            throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
+          )
+          self)
+        entry.localDeps;
+
+      foreignDeps = map
+        (d: lib.attrByPath [ d.path ]
+          (
+            throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
+          )
+          depMap)
+        entry.foreignDeps;
 
       args = {
         srcs = map (f: src + ("/" + f)) entry.files;
@@ -74,22 +82,28 @@ let
       binArgs = args // {
         name = (last ((lib.splitString "/" path) ++ entry.locator));
       };
-    in if entry.isCommand then (program binArgs) else (package libArgs);
+    in
+    if entry.isCommand then (program binArgs) else (package libArgs);
 
-in { src, path, deps ? [] }: let
+in
+{ src, path, deps ? [ ] }:
+let
   # Build a map of dependencies (from their import paths to their
   # derivation) so that they can be conditionally imported only in
   # sub-packages that require them.
-  depMap = listToAttrs (map (d: {
-    name = d.goImportPath;
-    value = d;
-  }) (map (d: d.gopkg) deps));
+  depMap = listToAttrs (map
+    (d: {
+      name = d.goImportPath;
+      value = d;
+    })
+    (map (d: d.gopkg) deps));
 
   name = pathToName path;
-  analysisOutput = runCommand "${name}-structure.json" {} ''
+  analysisOutput = runCommand "${name}-structure.json" { } ''
     ${analyser}/bin/analyser -path ${path} -source ${src} > $out
   '';
   analysis = fromJSON (readFile analysisOutput);
-in lib.fix(self: foldl' lib.recursiveUpdate {} (
+in
+lib.fix (self: foldl' lib.recursiveUpdate { } (
   map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
 ))
diff --git a/nix/buildGo/proto.nix b/nix/buildGo/proto.nix
index 4bd3a572761d..6c37f758ced7 100644
--- a/nix/buildGo/proto.nix
+++ b/nix/buildGo/proto.nix
@@ -8,7 +8,8 @@
 
 let
   inherit (builtins) fetchGit map;
-in rec {
+in
+rec {
   goProto = external {
     path = "github.com/golang/protobuf";
     src = fetchGit {
diff --git a/nix/buildLisp/default.nix b/nix/buildLisp/default.nix
index ab23b302107d..30b90d9049d7 100644
--- a/nix/buildLisp/default.nix
+++ b/nix/buildLisp/default.nix
@@ -4,7 +4,7 @@
 # buildLisp is designed to enforce conventions and do away with the
 # free-for-all of existing Lisp build systems.
 
-{ pkgs ? import <nixpkgs> {}, ... }:
+{ pkgs ? import <nixpkgs> { }, ... }:
 
 let
   inherit (builtins) map elemAt match filter;
@@ -70,11 +70,16 @@ let
   implFilter = impl: xs:
     let
       isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
-    in builtins.map (
-      x: if isFilterSet x then x.${impl.name} or x.default else x
-    ) (builtins.filter (
-      x: !(isFilterSet x) || x ? ${impl.name} || x ? default
-    ) xs);
+    in
+    builtins.map
+      (
+        x: if isFilterSet x then x.${impl.name} or x.default else x
+      )
+      (builtins.filter
+        (
+          x: !(isFilterSet x) || x ? ${impl.name} || x ? default
+        )
+        xs);
 
   # Generates lisp code which instructs the given lisp implementation to load
   # all the given dependencies.
@@ -103,17 +108,21 @@ let
   # 'allDeps' flattens the list of dependencies (and their
   # dependencies) into one ordered list of unique deps which
   # all use the given implementation.
-  allDeps = impl: deps: let
-    # The override _should_ propagate itself recursively, as every derivation
-    # would only expose its actually used dependencies. Use implementation
-    # attribute created by withExtras if present, override in all other cases
-    # (mainly bundled).
-    deps' = builtins.map (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
-      implementation = impl;
-    }))) deps;
-  in (lib.toposort dependsOn (lib.unique (
-    lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
-  ))).result;
+  allDeps = impl: deps:
+    let
+      # The override _should_ propagate itself recursively, as every derivation
+      # would only expose its actually used dependencies. Use implementation
+      # attribute created by withExtras if present, override in all other cases
+      # (mainly bundled).
+      deps' = builtins.map
+        (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
+          implementation = impl;
+        })))
+        deps;
+    in
+    (lib.toposort dependsOn (lib.unique (
+      lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
+    ))).result;
 
   # 'allNative' extracts all native dependencies of a dependency list
   # to ensure that library load paths are set correctly during all
@@ -138,42 +147,49 @@ let
   withExtras = f: args:
     let
       drv = (makeOverridable f) args;
-    in lib.fix (self:
-      drv.overrideLisp (old:
-        let
-          implementation = old.implementation or defaultImplementation;
-          brokenOn = old.brokenOn or [];
-          targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
-            (builtins.attrNames impls);
-        in {
-          passthru = (old.passthru or {}) // {
-            repl = implementation.lispWith [ self ];
-
-            # meta is done via passthru to minimize rebuilds caused by overriding
-            meta = (old.passthru.meta or {}) // {
-              inherit targets;
-            };
-          } // builtins.listToAttrs (builtins.map (impl: {
-            inherit (impl) name;
-            value = self.overrideLisp (_: {
-              implementation = impl;
-            });
-          }) (builtins.attrValues impls));
-        }) // {
-          overrideLisp = new: withExtras f (args // new args);
-        });
+    in
+    lib.fix (self:
+      drv.overrideLisp
+        (old:
+          let
+            implementation = old.implementation or defaultImplementation;
+            brokenOn = old.brokenOn or [ ];
+            targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
+              (builtins.attrNames impls);
+          in
+          {
+            passthru = (old.passthru or { }) // {
+              repl = implementation.lispWith [ self ];
+
+              # meta is done via passthru to minimize rebuilds caused by overriding
+              meta = (old.passthru.meta or { }) // {
+                inherit targets;
+              };
+            } // builtins.listToAttrs (builtins.map
+              (impl: {
+                inherit (impl) name;
+                value = self.overrideLisp (_: {
+                  implementation = impl;
+                });
+              })
+              (builtins.attrValues impls));
+          }) // {
+        overrideLisp = new: withExtras f (args // new args);
+      });
 
   # 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
   # and then executes expression to check its result
-  testSuite = { name, expression, srcs, deps ? [], native ? [], implementation }:
+  testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }:
     let
       lispDeps = allDeps implementation (implFilter implementation deps);
       lispNativeDeps = allNative native lispDeps;
       filteredSrcs = implFilter implementation srcs;
-    in runCommandNoCC name {
-      LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
-      LANG = "C.UTF-8";
-    } ''
+    in
+    runCommandNoCC name
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+      } ''
       echo "Running test suite ${name}"
 
       ${implementation.runScript} ${
@@ -452,15 +468,16 @@ let
           } $@
         '';
 
-      bundled = name: runCommandNoCC "${name}-cllib" {
-        passthru = {
-          lispName = name;
-          lispNativeDeps = [];
-          lispDeps = [];
-          lispBinary = false;
-          repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
-        };
-      } ''
+      bundled = name: runCommandNoCC "${name}-cllib"
+        {
+          passthru = {
+            lispName = name;
+            lispNativeDeps = [ ];
+            lispDeps = [ ];
+            lispBinary = false;
+            repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
+          };
+        } ''
         mkdir -p "$out"
         ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
         ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
@@ -489,7 +506,8 @@ let
 
       # See https://ccl.clozure.com/docs/ccl.html#building-definitions
       faslExt =
-        /**/ if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
+        /**/
+        if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
         else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl"
         else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
         else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
@@ -572,7 +590,7 @@ let
             lib.optionalString (deps != [])
               "--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
           } "$@"
-      '';
+        '';
     };
   };
 
@@ -586,37 +604,42 @@ let
   library =
     { name
     , implementation ? defaultImplementation
-    , brokenOn ? [] # TODO(sterni): make this a warning
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
     , srcs
-    , deps ? []
-    , native ? []
+    , deps ? [ ]
+    , native ? [ ]
     , tests ? null
-    , passthru ? {}
+    , passthru ? { }
     }:
     let
       filteredDeps = implFilter implementation deps;
       filteredSrcs = implFilter implementation srcs;
       lispNativeDeps = (allNative native filteredDeps);
       lispDeps = allDeps implementation filteredDeps;
-      testDrv = if ! isNull tests
-        then testSuite {
-          name = tests.name or "${name}-test";
-          srcs = filteredSrcs ++ (tests.srcs or []);
-          deps = filteredDeps ++ (tests.deps or []);
-          expression = tests.expression;
-          inherit implementation;
-        }
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs = filteredSrcs ++ (tests.srcs or [ ]);
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
         else null;
-    in lib.fix (self: runCommandNoCC "${name}-cllib" {
-      LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
-      LANG = "C.UTF-8";
-      passthru = passthru // {
-        inherit lispNativeDeps lispDeps;
-        lispName = name;
-        lispBinary = false;
-        tests = testDrv;
-      };
-    } ''
+    in
+    lib.fix (self: runCommandNoCC "${name}-cllib"
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          inherit lispNativeDeps lispDeps;
+          lispName = name;
+          lispBinary = false;
+          tests = testDrv;
+        };
+      } ''
       ${if ! isNull testDrv
         then "echo 'Test ${testDrv} succeeded'"
         else "echo 'No tests run'"}
@@ -637,13 +660,13 @@ let
   program =
     { name
     , implementation ? defaultImplementation
-    , brokenOn ? [] # TODO(sterni): make this a warning
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
     , main ? "${name}:main"
     , srcs
-    , deps ? []
-    , native ? []
+    , deps ? [ ]
+    , native ? [ ]
     , tests ? null
-    , passthru ? {}
+    , passthru ? { }
     }:
     let
       filteredSrcs = implFilter implementation srcs;
@@ -656,45 +679,53 @@ let
         deps = lispDeps;
         srcs = filteredSrcs;
       };
-      testDrv = if ! isNull tests
-        then testSuite {
-          name = tests.name or "${name}-test";
-          srcs =
-            ( # testSuite does run implFilter as well
-              filteredSrcs ++ (tests.srcs or []));
-          deps = filteredDeps ++ (tests.deps or []);
-          expression = tests.expression;
-          inherit implementation;
-        }
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs =
+                (
+                  # testSuite does run implFilter as well
+                  filteredSrcs ++ (tests.srcs or [ ])
+                );
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
         else null;
-    in lib.fix (self: runCommandNoCC "${name}" {
-      nativeBuildInputs = [ makeWrapper ];
-      LD_LIBRARY_PATH = libPath;
-      LANG = "C.UTF-8";
-      passthru = passthru // {
-        lispName = name;
-        lispDeps = [ selfLib ];
-        lispNativeDeps = native;
-        lispBinary = true;
-        tests = testDrv;
-      };
-    } (''
-      ${if ! isNull testDrv
-        then "echo 'Test ${testDrv} succeeded'"
-        else ""}
-      mkdir -p $out/bin
-
-      ${implementation.runScript} ${
-        implementation.genDumpLisp {
-          inherit name main;
-          deps = ([ selfLib ] ++ lispDeps);
-        }
+    in
+    lib.fix (self: runCommandNoCC "${name}"
+      {
+        nativeBuildInputs = [ makeWrapper ];
+        LD_LIBRARY_PATH = libPath;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          lispName = name;
+          lispDeps = [ selfLib ];
+          lispNativeDeps = native;
+          lispBinary = true;
+          tests = testDrv;
+        };
       }
-    '' + lib.optionalString implementation.wrapProgram ''
-      wrapProgram $out/bin/${name} \
-        --prefix LD_LIBRARY_PATH : "${libPath}" \
-        --add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
-    ''));
+      (''
+        ${if ! isNull testDrv
+          then "echo 'Test ${testDrv} succeeded'"
+          else ""}
+        mkdir -p $out/bin
+
+        ${implementation.runScript} ${
+          implementation.genDumpLisp {
+            inherit name main;
+            deps = ([ selfLib ] ++ lispDeps);
+          }
+        }
+      '' + lib.optionalString implementation.wrapProgram ''
+        wrapProgram $out/bin/${name} \
+          --prefix LD_LIBRARY_PATH : "${libPath}" \
+          --add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
+      ''));
 
   # 'bundled' creates a "library" which makes a built-in package available,
   # such as any of SBCL's sb-* packages or ASDF. By default this is done
@@ -714,11 +745,13 @@ let
         }:
         implementation.bundled or (defaultBundled implementation) name;
 
-    in (makeOverridable bundled') {
+    in
+    (makeOverridable bundled') {
       inherit name;
     };
 
-in {
+in
+{
   library = withExtras library;
   program = withExtras program;
   inherit bundled;
diff --git a/nix/buildLisp/example/default.nix b/nix/buildLisp/example/default.nix
index 6a518e4964a1..6add2676f10c 100644
--- a/nix/buildLisp/example/default.nix
+++ b/nix/buildLisp/example/default.nix
@@ -14,15 +14,16 @@ let
     ];
   };
 
-# Example Lisp program.
-#
-# This builds & writes an executable for a program using the library
-# above to disk.
-#
-# By default, buildLisp.program expects the entry point to be
-# `$name:main`. This can be overridden by configuring the `main`
-# attribute.
-in buildLisp.program {
+  # Example Lisp program.
+  #
+  # This builds & writes an executable for a program using the library
+  # above to disk.
+  #
+  # By default, buildLisp.program expects the entry point to be
+  # `$name:main`. This can be overridden by configuring the `main`
+  # attribute.
+in
+buildLisp.program {
   name = "example";
   deps = [ libExample ];
 
diff --git a/nix/buildManPages/default.nix b/nix/buildManPages/default.nix
index fe6d145f6cee..746ed25182b4 100644
--- a/nix/buildManPages/default.nix
+++ b/nix/buildManPages/default.nix
@@ -13,9 +13,9 @@ let
     ;
 
   bins = getBins mandoc [ "mandoc" ]
-      // getBins gzip   [ "gzip" ]
-      // getBins coreutils [ "mkdir" "ln" "cp" ]
-      ;
+    // getBins gzip [ "gzip" ]
+    // getBins coreutils [ "mkdir" "ln" "cp" ]
+  ;
 
   defaultGzip = true;
 
@@ -35,41 +35,68 @@ let
     }:
     { content
     , ...
-    }@page: let
+    }@page:
+    let
       source = builtins.toFile (basename false page) content;
-    in runExecline (basename gzip page) {} ([
-      (if requireLint then "if" else "foreground") [
-        bins.mandoc "-mdoc" "-T" "lint" source
+    in
+    runExecline (basename gzip page) { } ([
+      (if requireLint then "if" else "foreground")
+      [
+        bins.mandoc
+        "-mdoc"
+        "-T"
+        "lint"
+        source
       ]
-      "importas" "out" "out"
+      "importas"
+      "out"
+      "out"
     ] ++ (if gzip then [
-      "redirfd" "-w" "1" "$out"
-      bins.gzip "-c" source
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.gzip
+      "-c"
+      source
     ] else [
-      bins.cp "--reflink=auto" source "$out"
+      bins.cp
+      "--reflink=auto"
+      source
+      "$out"
     ]));
 
   buildManPages =
     name:
-    { derivationArgs ? {}
+    { derivationArgs ? { }
     , gzip ? defaultGzip
     , ...
     }@args:
     pages:
-    runExecline "${name}-man-pages" {
-      inherit derivationArgs;
-    } ([
-      "importas" "out" "out"
-    ] ++ lib.concatMap ({ name, section, content }@page: [
-      "if" [ bins.mkdir "-p" (manDir page) ]
-      "if" [
-        bins.ln "-s"
-        (buildManPage args page)
-        (target gzip page)
-      ]
-    ]) pages);
+    runExecline "${name}-man-pages"
+      {
+        inherit derivationArgs;
+      }
+      ([
+        "importas"
+        "out"
+        "out"
+      ] ++ lib.concatMap
+        ({ name, section, content }@page: [
+          "if"
+          [ bins.mkdir "-p" (manDir page) ]
+          "if"
+          [
+            bins.ln
+            "-s"
+            (buildManPage args page)
+            (target gzip page)
+          ]
+        ])
+        pages);
 
-in {
+in
+{
   __functor = _: buildManPages;
 
   single = buildManPage;
diff --git a/nix/buildkite/default.nix b/nix/buildkite/default.nix
index e0c947deae91..d17b5c86c4a6 100644
--- a/nix/buildkite/default.nix
+++ b/nix/buildkite/default.nix
@@ -29,7 +29,8 @@ let
     unsafeDiscardStringContext;
 
   inherit (pkgs) lib runCommandNoCC writeText;
-in rec {
+in
+rec {
   # Creates a Nix expression that yields the target at the specified
   # location in the repository.
   #
@@ -42,14 +43,15 @@ in rec {
       descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
       targetExpr = foldl' descend "import ./. {}" target.__readTree;
       subtargetExpr = descend targetExpr target.__subtarget;
-    in if target ? __subtarget then subtargetExpr else targetExpr;
+    in
+    if target ? __subtarget then subtargetExpr else targetExpr;
 
   # Create a pipeline label from the target's tree location.
   mkLabel = target:
     let label = concatStringsSep "/" target.__readTree;
     in if target ? __subtarget
-      then "${label}:${target.__subtarget}"
-      else label;
+    then "${label}:${target.__subtarget}"
+    else label;
 
   # Determine whether to skip a target if it has not diverged from the
   # HEAD branch.
@@ -74,33 +76,36 @@ in rec {
 
   # Create a pipeline step from a single target.
   mkStep = headBranch: parentTargetMap: target:
-  let
-    label = mkLabel target;
-    drvPath = unsafeDiscardStringContext target.drvPath;
-    shouldSkip' = shouldSkip parentTargetMap;
-  in {
-    label = ":nix: " + label;
-    key = hashString "sha1" label;
-    skip = shouldSkip' label drvPath;
-    command = mkBuildCommand target drvPath;
-    env.READTREE_TARGET = label;
-
-    # Add a dependency on the initial static pipeline step which
-    # always runs. This allows build steps uploaded in batches to
-    # start running before all batches have been uploaded.
-    depends_on = ":init:";
-  };
+    let
+      label = mkLabel target;
+      drvPath = unsafeDiscardStringContext target.drvPath;
+      shouldSkip' = shouldSkip parentTargetMap;
+    in
+    {
+      label = ":nix: " + label;
+      key = hashString "sha1" label;
+      skip = shouldSkip' label drvPath;
+      command = mkBuildCommand target drvPath;
+      env.READTREE_TARGET = label;
+
+      # Add a dependency on the initial static pipeline step which
+      # always runs. This allows build steps uploaded in batches to
+      # start running before all batches have been uploaded.
+      depends_on = ":init:";
+    };
 
   # Helper function to inelegantly divide a list into chunks of at
   # most n elements.
   #
   # This works by assigning each element a chunk ID based on its
   # index, and then grouping all elements by their chunk ID.
-  chunksOf = n: list: let
-    chunkId = idx: toString (idx / n + 1);
-    assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
-    unchunk = mapAttrs (_: elements: map (e: e.value) elements);
-  in unchunk (lib.groupBy (e: e.chunk) assigned);
+  chunksOf = n: list:
+    let
+      chunkId = idx: toString (idx / n + 1);
+      assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
+      unchunk = mapAttrs (_: elements: map (e: e.value) elements);
+    in
+    unchunk (lib.groupBy (e: e.chunk) assigned);
 
   # Define a build pipeline chunk as a JSON file, using the pipeline
   # format documented on
@@ -120,104 +125,112 @@ in rec {
     attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
 
   # Create a pipeline structure for the given targets.
-  mkPipeline = {
-    # HEAD branch of the repository on which release steps, GC
-    # anchoring and other "mainline only" steps should run.
-    headBranch,
-
-    # List of derivations as read by readTree (in most cases just the
-    # output of readTree.gather) that should be built in Buildkite.
-    #
-    # These are scheduled as the first build steps and run as fast as
-    # possible, in order, without any concurrency restrictions.
-    drvTargets,
-
-    # Derivation map of a parent commit. Only targets which no longer
-    # correspond to the content of this map will be built. Passing an
-    # empty map will always build all targets.
-    parentTargetMap ? {},
-
-    # A list of plain Buildkite step structures to run alongside the
-    # build for all drvTargets, but before proceeding with any
-    # post-build actions such as status reporting.
-    #
-    # Can be used for things like code formatting checks.
-    additionalSteps ? [],
-
-    # A list of plain Buildkite step structures to run after all
-    # previous steps succeeded.
-    #
-    # Can be used for status reporting steps and the like.
-    postBuildSteps ? []
-  }: let
-    # Convert a target into all of its build and post-build steps,
-    # treated separately as they need to be in different chunks.
-    targetToSteps = target: let
-      step = mkStep headBranch parentTargetMap target;
-
-      # Split build/post-build steps
-      splitExtraSteps = partition ({ postStep, ... }: postStep)
-       (attrValues (mapAttrs (name: value: {
-         inherit name value;
-         postStep = (value ? prompt) || (value.postBuild or false);
-        }) (target.meta.ci.extraSteps or {})));
-
-      mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
-      extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
-      extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
-    in {
-      buildSteps = [ step ] ++ extraBuildSteps;
-      postSteps = extraPostSteps;
-    };
-
-    # Combine all target steps into separate build and post-build step lists.
-    steps = foldl' (acc: t: {
-      buildSteps = acc.buildSteps ++ t.buildSteps;
-      postSteps = acc.postSteps ++ t.postSteps;
-    }) { buildSteps = []; postSteps = []; } (map targetToSteps drvTargets);
-
-    buildSteps =
-      # Add build steps for each derivation target and their extra
-      # steps.
-      steps.buildSteps
-
-      # Add additional steps (if set).
-      ++ additionalSteps;
-
-    postSteps =
-      # Add post-build steps for each derivation target.
-      steps.postSteps
-
-      # Add any globally defined post-build steps.
-      ++ postBuildSteps;
-
-    buildChunks = pipelineChunks "build" buildSteps;
-    postBuildChunks = pipelineChunks "post" postSteps;
-    chunks = buildChunks ++ postBuildChunks;
-  in runCommandNoCC "buildkite-pipeline" {} ''
-    mkdir $out
-    echo "Generated ${toString (length chunks)} pipeline chunks"
-    ${
-      lib.concatMapStringsSep "\n"
-        (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
-    }
-  '';
+  mkPipeline =
+    {
+      # HEAD branch of the repository on which release steps, GC
+      # anchoring and other "mainline only" steps should run.
+      headBranch
+    , # List of derivations as read by readTree (in most cases just the
+      # output of readTree.gather) that should be built in Buildkite.
+      #
+      # These are scheduled as the first build steps and run as fast as
+      # possible, in order, without any concurrency restrictions.
+      drvTargets
+    , # Derivation map of a parent commit. Only targets which no longer
+      # correspond to the content of this map will be built. Passing an
+      # empty map will always build all targets.
+      parentTargetMap ? { }
+    , # A list of plain Buildkite step structures to run alongside the
+      # build for all drvTargets, but before proceeding with any
+      # post-build actions such as status reporting.
+      #
+      # Can be used for things like code formatting checks.
+      additionalSteps ? [ ]
+    , # A list of plain Buildkite step structures to run after all
+      # previous steps succeeded.
+      #
+      # Can be used for status reporting steps and the like.
+      postBuildSteps ? [ ]
+    }:
+    let
+      # Convert a target into all of its build and post-build steps,
+      # treated separately as they need to be in different chunks.
+      targetToSteps = target:
+        let
+          step = mkStep headBranch parentTargetMap target;
+
+          # Split build/post-build steps
+          splitExtraSteps = partition ({ postStep, ... }: postStep)
+            (attrValues (mapAttrs
+              (name: value: {
+                inherit name value;
+                postStep = (value ? prompt) || (value.postBuild or false);
+              })
+              (target.meta.ci.extraSteps or { })));
+
+          mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
+          extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
+          extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
+        in
+        {
+          buildSteps = [ step ] ++ extraBuildSteps;
+          postSteps = extraPostSteps;
+        };
+
+      # Combine all target steps into separate build and post-build step lists.
+      steps = foldl'
+        (acc: t: {
+          buildSteps = acc.buildSteps ++ t.buildSteps;
+          postSteps = acc.postSteps ++ t.postSteps;
+        })
+        { buildSteps = [ ]; postSteps = [ ]; }
+        (map targetToSteps drvTargets);
+
+      buildSteps =
+        # Add build steps for each derivation target and their extra
+        # steps.
+        steps.buildSteps
+
+        # Add additional steps (if set).
+        ++ additionalSteps;
+
+      postSteps =
+        # Add post-build steps for each derivation target.
+        steps.postSteps
+
+        # Add any globally defined post-build steps.
+        ++ postBuildSteps;
+
+      buildChunks = pipelineChunks "build" buildSteps;
+      postBuildChunks = pipelineChunks "post" postSteps;
+      chunks = buildChunks ++ postBuildChunks;
+    in
+    runCommandNoCC "buildkite-pipeline" { } ''
+      mkdir $out
+      echo "Generated ${toString (length chunks)} pipeline chunks"
+      ${
+        lib.concatMapStringsSep "\n"
+          (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
+      }
+    '';
 
   # Create a drvmap structure for the given targets, containing the
   # mapping of all target paths to their derivations. The mapping can
   # be persisted for future use.
-  mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map (target: {
-    name = mkLabel target;
-    value = {
-      drvPath = unsafeDiscardStringContext target.drvPath;
-
-      # Include the attrPath in the output to reconstruct the drv
-      # without parsing the human-readable label.
-      attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
-        target.__subtarget
-      ];
-    };
-  }) drvTargets)));
+  mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
+    (target: {
+      name = mkLabel target;
+      value = {
+        drvPath = unsafeDiscardStringContext target.drvPath;
+
+        # Include the attrPath in the output to reconstruct the drv
+        # without parsing the human-readable label.
+        attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
+          target.__subtarget
+        ];
+      };
+    })
+    drvTargets)));
 
   # Implementation of extra step logic.
   #
@@ -278,34 +291,37 @@ in rec {
 
   # Create the Buildkite configuration for an extra step, optionally
   # wrapping it in a gate group.
-  mkExtraStep = parent: key: {
-    command,
-    label ? key,
-    prompt ? false,
-    needsOutput ? false,
-    branches ? null,
-    alwaysRun ? false,
-    postBuild ? false
-  }@cfg: let
-    parentLabel = parent.env.READTREE_TARGET;
-
-    step = {
-      label = ":gear: ${label} (from ${parentLabel})";
-      skip = if alwaysRun then false else parent.skip or false;
-      depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
-      branches = if branches != null then lib.concatStringsSep " " branches else null;
-
-      command = pkgs.writeShellScript "${key}-script" ''
-        set -ueo pipefail
-        ${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
-        ${lib.optionalString needsOutput parent.command}
-        echo '+++ Running extra step command'
-        exec ${command}
-      '';
-    };
-  in if (isString prompt)
-    then mkGatedStep {
-      inherit step label parent prompt;
-    }
+  mkExtraStep = parent: key: { command
+                             , label ? key
+                             , prompt ? false
+                             , needsOutput ? false
+                             , branches ? null
+                             , alwaysRun ? false
+                             , postBuild ? false
+                             }@cfg:
+    let
+      parentLabel = parent.env.READTREE_TARGET;
+
+      step = {
+        label = ":gear: ${label} (from ${parentLabel})";
+        skip = if alwaysRun then false else parent.skip or false;
+        depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
+        branches = if branches != null then lib.concatStringsSep " " branches else null;
+
+        command = pkgs.writeShellScript "${key}-script" ''
+          set -ueo pipefail
+          ${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
+          ${lib.optionalString needsOutput parent.command}
+          echo '+++ Running extra step command'
+          exec ${command}
+        '';
+      };
+    in
+    if (isString prompt)
+    then
+      mkGatedStep
+        {
+          inherit step label parent prompt;
+        }
     else step;
 }
diff --git a/nix/drvSeqL/default.nix b/nix/drvSeqL/default.nix
index 3339289b3bd1..6437e1a043a5 100644
--- a/nix/drvSeqL/default.nix
+++ b/nix/drvSeqL/default.nix
@@ -17,9 +17,10 @@ let
   drvSeqL = defun [ (list drv) drv drv ]
     (drvDeps: drvOut:
       let
-        drvOutOutputs = drvOut.outputs or ["out"];
+        drvOutOutputs = drvOut.outputs or [ "out" ];
       in
-        pkgs.runCommandLocal drvOut.name {
+      pkgs.runCommandLocal drvOut.name
+        {
           # we inherit all attributes in order to replicate
           # the original derivation as much as possible
           outputs = drvOutOutputs;
@@ -29,15 +30,18 @@ let
         }
         # the outputs of the original derivation are replicated
         # by creating a symlink to the old output path
-        (lib.concatMapStrings (output: ''
-          target=${lib.escapeShellArg drvOut.${output}}
-          # if the target is already a symlink, follow it until it’s not;
-          # this is done to prevent too many dereferences
-          target=$(readlink -e "$target")
-          # link to the output
-          ln -s "$target" "${"$"}${output}"
-        '') drvOutOutputs));
+        (lib.concatMapStrings
+          (output: ''
+            target=${lib.escapeShellArg drvOut.${output}}
+            # if the target is already a symlink, follow it until it’s not;
+            # this is done to prevent too many dereferences
+            target=$(readlink -e "$target")
+            # link to the output
+            ln -s "$target" "${"$"}${output}"
+          '')
+          drvOutOutputs));
 
-in {
+in
+{
   __functor = _: drvSeqL;
 }
diff --git a/nix/emptyDerivation/default.nix b/nix/emptyDerivation/default.nix
index 4165d4fd9ac1..8433984012c7 100644
--- a/nix/emptyDerivation/default.nix
+++ b/nix/emptyDerivation/default.nix
@@ -14,7 +14,8 @@ let
     inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
   };
 
-in {
+in
+{
   __functor = _: emptyDerivation;
   inherit tests;
 }
diff --git a/nix/emptyDerivation/emptyDerivation.nix b/nix/emptyDerivation/emptyDerivation.nix
index 5e84abe2d505..772df9635214 100644
--- a/nix/emptyDerivation/emptyDerivation.nix
+++ b/nix/emptyDerivation/emptyDerivation.nix
@@ -11,7 +11,7 @@
 
 let
   bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
-      // getBins pkgs.execline [ "importas" "exec" ];
+    // getBins pkgs.execline [ "importas" "exec" ];
 
   emptiness = {
     name = "empty-derivation";
@@ -21,12 +21,16 @@ let
 
     builder = bins.exec;
     args = [
-      bins.importas "out" "out"
-      bins.s6-touch "$out"
+      bins.importas
+      "out"
+      "out"
+      bins.s6-touch
+      "$out"
     ];
   };
 
-in (derivation emptiness) // {
+in
+(derivation emptiness) // {
   # This allows us to call the empty derivation
   # like a function and override fields/add new fields.
   __functor = _: overrides:
diff --git a/nix/emptyDerivation/tests.nix b/nix/emptyDerivation/tests.nix
index 053603b02772..a73842882499 100644
--- a/nix/emptyDerivation/tests.nix
+++ b/nix/emptyDerivation/tests.nix
@@ -10,10 +10,17 @@ let
   ];
 
   fooOut = emptyDerivation {
-    builder = writeExecline "foo-builder" {} [
-      "importas" "out" "out"
-      "redirfd" "-w" "1" "$out"
-      bins.s6-echo "-n" "foo"
+    builder = writeExecline "foo-builder" { } [
+      "importas"
+      "out"
+      "out"
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.s6-echo
+      "-n"
+      "foo"
     ];
   };
 
@@ -26,7 +33,8 @@ let
       "bar")
   ];
 
-in runTestsuite "emptyDerivation" [
+in
+runTestsuite "emptyDerivation" [
   empty
   overrideBuilder
 ]
diff --git a/nix/escapeExecline/default.nix b/nix/escapeExecline/default.nix
index deef5c2c4ec8..d2c39dd39894 100644
--- a/nix/escapeExecline/default.nix
+++ b/nix/escapeExecline/default.nix
@@ -16,14 +16,17 @@ let
   #   escapeExecline [ "if" [ "somecommand" ] "true" ]
   #   == ''"if" { "somecommand" } "true"''
   escapeExecline = execlineList: lib.concatStringsSep " "
-    (let
-      go = arg:
-        if      builtins.isString arg then [(escapeExeclineArg arg)]
-        else if builtins.isPath arg then [(escapeExeclineArg "${arg}")]
-        else if lib.isDerivation arg then [(escapeExeclineArg arg)]
-        else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
-        else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
-     in builtins.concatMap go execlineList);
+    (
+      let
+        go = arg:
+          if builtins.isString arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
+          else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
+          else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
+      in
+      builtins.concatMap go execlineList
+    );
 
 in
 escapeExecline
diff --git a/nix/getBins/default.nix b/nix/getBins/default.nix
index 5ba7584ed844..e354b176c885 100644
--- a/nix/getBins/default.nix
+++ b/nix/getBins/default.nix
@@ -26,14 +26,16 @@
 
 let
   getBins = drv: xs:
-    let f = x:
-      # TODO(Profpatsch): typecheck
-      let x' = if builtins.isString x then { use = x; as = x; } else x;
-      in {
-        name = x'.as;
-        value = "${lib.getBin drv}/bin/${x'.use}";
-      };
-    in builtins.listToAttrs (builtins.map f xs);
+    let
+      f = x:
+        # TODO(Profpatsch): typecheck
+        let x' = if builtins.isString x then { use = x; as = x; } else x;
+        in {
+          name = x'.as;
+          value = "${lib.getBin drv}/bin/${x'.use}";
+        };
+    in
+    builtins.listToAttrs (builtins.map f xs);
 
 
   tests = import ./tests.nix {
@@ -42,7 +44,8 @@ let
     inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
   };
 
-in {
+in
+{
   __functor = _: getBins;
   inherit tests;
 }
diff --git a/nix/getBins/tests.nix b/nix/getBins/tests.nix
index ff81deb5f1ec..e0f5ab426364 100644
--- a/nix/getBins/tests.nix
+++ b/nix/getBins/tests.nix
@@ -5,11 +5,11 @@ let
   drv2 = writeScriptBin "goodbye" "tschau";
 
   bins = getBins drv [
-            "hello"
-            { use = "hello"; as = "also-hello"; }
-          ]
-      // getBins drv2 [ "goodbye" ]
-      ;
+    "hello"
+    { use = "hello"; as = "also-hello"; }
+  ]
+  // getBins drv2 [ "goodbye" ]
+  ;
 
   simple = it "path is equal to the executable name" [
     (assertEq "path"
@@ -33,8 +33,8 @@ let
   ];
 
 in
-  runTestsuite "getBins" [
-    simple
-    useAs
-    secondDrv
-  ]
+runTestsuite "getBins" [
+  simple
+  useAs
+  secondDrv
+]
diff --git a/nix/mergePatch/default.nix b/nix/mergePatch/default.nix
index 0f80b93d4c65..d56106925a65 100644
--- a/nix/mergePatch/default.nix
+++ b/nix/mergePatch/default.nix
@@ -8,31 +8,31 @@
   For example, given the following original document:
 
   {
-    a = "b";
-    c = {
+  a = "b";
+  c = {
       d = "e";
       f = "g";
-    }
+  }
   }
 
   Changing the value of `a` and removing `f` can be achieved by merging the patch
 
   {
-    a = "z";
-    c.f = null;
+  a = "z";
+  c.f = null;
   }
 
   which results in
 
   {
-    a = "z";
-    c = {
+  a = "z";
+  c = {
       d = "e";
-    };
+  };
   }
 
   Pseudo-code:
-    define MergePatch(Target, Patch):
+  define MergePatch(Target, Patch):
       if Patch is an Object:
         if Target is not an Object:
           Target = {} # Ignore the contents and set it to an empty Object
@@ -55,19 +55,19 @@ let
   mergePatch = target: patch:
     if lib.isAttrs patch
     then
-      let target' = if lib.isAttrs target then target else {};
+      let target' = if lib.isAttrs target then target else { };
       in foldlAttrs
-          (acc: patchEl:
-            if patchEl.value == null
-            then removeAttrs acc [ patchEl.name ]
-            else acc // {
-              ${patchEl.name} =
-                mergePatch
-                  (acc.${patchEl.name} or "unnused")
-                  patchEl.value;
-            })
-          target'
-          patch
+        (acc: patchEl:
+          if patchEl.value == null
+          then removeAttrs acc [ patchEl.name ]
+          else acc // {
+            ${patchEl.name} =
+              mergePatch
+                (acc.${patchEl.name} or "unnused")
+                patchEl.value;
+          })
+        target'
+        patch
     else patch;
 
   inherit (depot.nix.runTestsuite)
@@ -93,46 +93,49 @@ let
       };
       emptyPatch = it "the empty patch returns the original target" [
         (assertEq "id"
-          (mergePatch testTarget {})
+          (mergePatch testTarget { })
           testTarget)
       ];
       nonAttrs = it "one side is a non-attrset value" [
         (assertEq "target is a value means the value is replaced by the patch"
           (mergePatch 42 testPatch)
-          (mergePatch {} testPatch))
+          (mergePatch { } testPatch))
         (assertEq "patch is a value means it replaces target alltogether"
           (mergePatch testTarget 42)
           42)
       ];
       rfcExamples = it "the examples from the RFC" [
         (assertEq "a subset is deleted and overwritten"
-          (mergePatch testTarget testPatch) {
+          (mergePatch testTarget testPatch)
+          {
             a = "z";
             c = {
               d = "e";
             };
           })
         (assertEq "a more complicated example from the example section"
-          (mergePatch {
-            title = "Goodbye!";
+          (mergePatch
+            {
+              title = "Goodbye!";
               author = {
                 givenName = "John";
                 familyName = "Doe";
               };
-            tags = [ "example" "sample" ];
-            content = "This will be unchanged";
-          } {
-            title = "Hello!";
-            phoneNumber = "+01-123-456-7890";
-            author.familyName = null;
-            tags = [ "example" ];
-          })
+              tags = [ "example" "sample" ];
+              content = "This will be unchanged";
+            }
+            {
+              title = "Hello!";
+              phoneNumber = "+01-123-456-7890";
+              author.familyName = null;
+              tags = [ "example" ];
+            })
           {
             title = "Hello!";
             phoneNumber = "+01-123-456-7890";
-              author = {
-                givenName = "John";
-              };
+            author = {
+              givenName = "John";
+            };
             tags = [ "example" ];
             content = "This will be unchanged";
           })
@@ -144,42 +147,45 @@ let
             (assertEq "test number ${toString index}"
               (mergePatch target patch)
               res);
-          in it "the test suite from the RFC" [
-              (r 1  {"a" = "b";}       {"a" = "c";}       {"a" = "c";})
-              (r 2  {"a" = "b";}       {"b" = "c";}       {"a" = "b"; "b" = "c";})
-              (r 3  {"a" = "b";}       {"a" = null;}      {})
-              (r 4  {"a" = "b"; "b" = "c";}
-                    {"a" = null;}
-                    {"b" = "c";})
-              (r 5  {"a" = ["b"];}     {"a" = "c";}       {"a" = "c";})
-              (r 6  {"a" = "c";}       {"a" = ["b"];}     {"a" = ["b"];})
-              (r 7  {"a" = {"b" = "c";}; }
-                    {"a" = {"b" = "d"; "c" = null;};}
-                    {"a" = {"b" = "d";};})
-              (r 8  {"a" = [{"b" = "c";}];}
-                    {"a" = [1];}
-                    {"a" = [1];})
-              (r 9  ["a" "b"]          ["c" "d"]       ["c" "d"])
-              (r 10 {"a" = "b";}       ["c"]           ["c"])
-              (r 11 {"a" = "foo";}     null            null)
-              (r 12 {"a" = "foo";}     "bar"           "bar")
-              (r 13 {"e" = null;}      {"a" = 1;}      {"e" = null; "a" = 1;})
-              (r 14 [1 2]
-                    {"a" = "b"; "c" = null;}
-                    {"a" = "b";})
-              (r 15 {}
-                {"a" = {"bb" = {"ccc" = null;};};}
-                {"a" = {"bb" = {};};})
-            ];
-
-    in runTestsuite "mergePatch" [
+        in
+        it "the test suite from the RFC" [
+          (r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; })
+          (r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; })
+          (r 3 { "a" = "b"; } { "a" = null; } { })
+          (r 4 { "a" = "b"; "b" = "c"; }
+            { "a" = null; }
+            { "b" = "c"; })
+          (r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; })
+          (r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; })
+          (r 7 { "a" = { "b" = "c"; }; }
+            { "a" = { "b" = "d"; "c" = null; }; }
+            { "a" = { "b" = "d"; }; })
+          (r 8 { "a" = [{ "b" = "c"; }]; }
+            { "a" = [ 1 ]; }
+            { "a" = [ 1 ]; })
+          (r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ])
+          (r 10 { "a" = "b"; } [ "c" ] [ "c" ])
+          (r 11 { "a" = "foo"; } null null)
+          (r 12 { "a" = "foo"; } "bar" "bar")
+          (r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; })
+          (r 14 [ 1 2 ]
+            { "a" = "b"; "c" = null; }
+            { "a" = "b"; })
+          (r 15 { }
+            { "a" = { "bb" = { "ccc" = null; }; }; }
+            { "a" = { "bb" = { }; }; })
+        ];
+
+    in
+    runTestsuite "mergePatch" [
       emptyPatch
       nonAttrs
       rfcExamples
       rfcTests
     ];
 
-in {
+in
+{
   __functor = _: mergePatch;
 
   inherit tests;
diff --git a/nix/netstring/attrsToKeyValList.nix b/nix/netstring/attrsToKeyValList.nix
index 2805d0fbcef6..c854b5695502 100644
--- a/nix/netstring/attrsToKeyValList.nix
+++ b/nix/netstring/attrsToKeyValList.nix
@@ -28,6 +28,6 @@ attrs:
 lib.concatStrings
   (lib.mapAttrsToList
     (k: v: depot.nix.netstring.fromString
-       ( depot.nix.netstring.fromString k
-       + depot.nix.netstring.fromString v))
+      (depot.nix.netstring.fromString k
+        + depot.nix.netstring.fromString v))
     attrs)
diff --git a/nix/nint/default.nix b/nix/nint/default.nix
index 5cf83d15d6f3..0087fc041603 100644
--- a/nix/nint/default.nix
+++ b/nix/nint/default.nix
@@ -6,9 +6,11 @@ let
     ;
 in
 
-  rustSimpleBin {
-    name = "nint";
-    dependencies = [
-      depot.third_party.rust-crates.serde_json
-    ];
-  } (builtins.readFile ./nint.rs)
+rustSimpleBin
+{
+  name = "nint";
+  dependencies = [
+    depot.third_party.rust-crates.serde_json
+  ];
+}
+  (builtins.readFile ./nint.rs)
diff --git a/nix/readTree/default.nix b/nix/readTree/default.nix
index 259f2f2fbfcd..0c59c890d4fa 100644
--- a/nix/readTree/default.nix
+++ b/nix/readTree/default.nix
@@ -43,10 +43,13 @@ let
       children = readDir path;
       isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
       names = filter isVisible (attrNames children);
-    in listToAttrs (map (name: {
-      inherit name;
-      value = children.${name};
-    }) names);
+    in
+    listToAttrs (map
+      (name: {
+        inherit name;
+        value = children.${name};
+      })
+      names);
 
   # Create a mark containing the location of this attribute and
   # a list of all child attribute names added by readTree.
@@ -57,12 +60,13 @@ let
 
   # Import a file and enforce our calling convention
   importFile = args: scopedArgs: path: parts: filter:
-  let
-      importedFile = if scopedArgs != {}
-                     then builtins.scopedImport scopedArgs path
-                     else import path;
+    let
+      importedFile =
+        if scopedArgs != { }
+        then builtins.scopedImport scopedArgs path
+        else import path;
       pathType = builtins.typeOf importedFile;
-  in
+    in
     if pathType != "lambda"
     then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
     else importedFile (filter parts (argsWithPath args parts));
@@ -76,8 +80,9 @@ let
       dir = readDirVisible initPath;
       joinChild = c: initPath + ("/" + c);
 
-      self = if rootDir
-        then { __readTree = []; }
+      self =
+        if rootDir
+        then { __readTree = [ ]; }
         else importFile args scopedArgs initPath parts argsFilter;
 
       # Import subdirectories of the current one, unless the special
@@ -88,33 +93,41 @@ let
       # should be ignored, but its content is not inspected by
       # readTree
       filterDir = f: dir."${f}" == "directory";
-      children = if hasAttr ".skip-subtree" dir then [] else map (c: {
-        name = c;
-        value = readTree {
-          inherit argsFilter scopedArgs;
-          args = args;
-          initPath = (joinChild c);
-          rootDir = false;
-          parts = (parts ++ [ c ]);
-        };
-      }) (filter filterDir (attrNames dir));
+      children = if hasAttr ".skip-subtree" dir then [ ] else
+      map
+        (c: {
+          name = c;
+          value = readTree {
+            inherit argsFilter scopedArgs;
+            args = args;
+            initPath = (joinChild c);
+            rootDir = false;
+            parts = (parts ++ [ c ]);
+          };
+        })
+        (filter filterDir (attrNames dir));
 
       # Import Nix files
-      nixFiles = if hasAttr ".skip-subtree" dir then []
+      nixFiles =
+        if hasAttr ".skip-subtree" dir then [ ]
         else filter (f: f != null) (map nixFileName (attrNames dir));
-      nixChildren = map (c: let
-        p = joinChild (c + ".nix");
-        childParts = parts ++ [ c ];
-        imported = importFile args scopedArgs p childParts argsFilter;
-      in {
-        name = c;
-        value =
-          if isAttrs imported
-          then imported // marker childParts {}
-          else imported;
-      }) nixFiles;
-
-      nodeValue = if dir ? "default.nix" then self else {};
+      nixChildren = map
+        (c:
+          let
+            p = joinChild (c + ".nix");
+            childParts = parts ++ [ c ];
+            imported = importFile args scopedArgs p childParts argsFilter;
+          in
+          {
+            name = c;
+            value =
+              if isAttrs imported
+              then imported // marker childParts { }
+              else imported;
+          })
+        nixFiles;
+
+      nodeValue = if dir ? "default.nix" then self else { };
 
       allChildren = listToAttrs (
         if dir ? "default.nix"
@@ -123,9 +136,9 @@ let
       );
 
     in
-      if isAttrs nodeValue
-      then nodeValue // allChildren // (marker parts allChildren)
-      else nodeValue;
+    if isAttrs nodeValue
+    then nodeValue // allChildren // (marker parts allChildren)
+    else nodeValue;
 
   # Function which can be used to find all readTree targets within an
   # attribute set.
@@ -143,40 +156,42 @@ let
   #             should be included in the build.
   gather = eligible: node:
     if node ? __readTree then
-      # Include the node itself if it is eligible.
-      (if eligible node then [ node ] else [])
+    # Include the node itself if it is eligible.
+      (if eligible node then [ node ] else [ ])
       # Include eligible children of the node
       ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
       # Include specified sub-targets of the node
       ++ filter eligible (map
-           (k: (node."${k}" or {}) // {
-             # Keep the same tree location, but explicitly mark this
-             # node as a subtarget.
-             __readTree = node.__readTree;
-             __readTreeChildren = [];
-             __subtarget = k;
-           })
-           (node.meta.targets or []))
-    else [];
+        (k: (node."${k}" or { }) // {
+          # Keep the same tree location, but explicitly mark this
+          # node as a subtarget.
+          __readTree = node.__readTree;
+          __readTreeChildren = [ ];
+          __subtarget = k;
+        })
+        (node.meta.targets or [ ]))
+    else [ ];
 
   # Determine whether a given value is a derivation.
   # Copied from nixpkgs/lib for cases where lib is not available yet.
   isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
-in {
+in
+{
   inherit gather;
 
   __functor = _:
     { path
     , args
     , filter ? (_parts: x: x)
-    , scopedArgs ? {} }:
-      readTree {
-        inherit args scopedArgs;
-        argsFilter = filter;
-        initPath = path;
-        rootDir = true;
-        parts = [];
-      };
+    , scopedArgs ? { }
+    }:
+    readTree {
+      inherit args scopedArgs;
+      argsFilter = filter;
+      initPath = path;
+      rootDir = true;
+      parts = [ ];
+    };
 
   # In addition to readTree itself, some functionality is exposed that
   # is useful for users of readTree.
@@ -193,7 +208,7 @@ in {
   #               which should be able to access the restricted folder.
   #
   #   reason: Textual explanation for the restriction (included in errors)
-  restrictFolder = { folder, exceptions ? [], reason }: parts: args:
+  restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
     if (elemAt parts 0) == folder || elem parts exceptions
     then args
     else args // {
@@ -224,8 +239,8 @@ in {
   drvTargets = attrs: attrs // {
     meta = {
       targets = builtins.filter
-      (x: isDerivation attrs."${x}")
-      (builtins.attrNames attrs);
-    } // (attrs.meta or {});
+        (x: isDerivation attrs."${x}")
+        (builtins.attrNames attrs);
+    } // (attrs.meta or { });
   };
 }
diff --git a/nix/readTree/tests/default.nix b/nix/readTree/tests/default.nix
index 3354a4fe5e75..fcca141714a8 100644
--- a/nix/readTree/tests/default.nix
+++ b/nix/readTree/tests/default.nix
@@ -10,13 +10,13 @@ let
 
   tree-ex = depot.nix.readTree {
     path = ./test-example;
-    args = {};
+    args = { };
   };
 
   example = it "corresponds to the README example" [
     (assertEq "third_party attrset"
       (lib.isAttrs tree-ex.third_party
-      && (! lib.isDerivation tree-ex.third_party))
+        && (! lib.isDerivation tree-ex.third_party))
       true)
     (assertEq "third_party attrset other attribute"
       tree-ex.third_party.favouriteColour
@@ -37,7 +37,7 @@ let
 
   tree-tl = depot.nix.readTree {
     path = ./test-tree-traversal;
-    args = {};
+    args = { };
   };
 
   traversal-logic = it "corresponds to the traversal logic in the README" [
@@ -82,7 +82,7 @@ let
       "Picked up through the drv")
     (assertEq "default.nix drv is not changed by readTree"
       tree-tl.default-nix.can-be-drv
-      (import ./test-tree-traversal/default-nix/can-be-drv/default.nix {}))
+      (import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
   ];
 
   # these each call readTree themselves because the throws have to happen inside assertThrows
@@ -90,7 +90,7 @@ let
     (assertThrows "this file is not a function"
       (depot.nix.readTree {
         path = ./test-wrong-not-a-function;
-        args = {};
+        args = { };
       }).not-a-function)
     # can’t test for that, assertThrows can’t catch this error
     # (assertThrows "this file is a function but doesn’t have dots"
@@ -99,12 +99,13 @@ let
 
   read-markers = depot.nix.readTree {
     path = ./test-marker;
-    args = {};
+    args = { };
   };
 
   assertMarkerByPath = path:
     assertEq "${lib.concatStringsSep "." path} is marked correctly"
-      (lib.getAttrFromPath path read-markers).__readTree path;
+      (lib.getAttrFromPath path read-markers).__readTree
+      path;
 
   markers = it "marks nodes correctly" [
     (assertMarkerByPath [ "directory-marked" ])
@@ -119,7 +120,8 @@ let
       read-markers.directory-marked.nested.__readTreeChildren [ ])
   ];
 
-in runTestsuite "readTree" [
+in
+runTestsuite "readTree" [
   example
   traversal-logic
   wrong
diff --git a/nix/readTree/tests/test-marker/directory-marked/default.nix b/nix/readTree/tests/test-marker/directory-marked/default.nix
index a3f961128e79..5bd3e36b5397 100644
--- a/nix/readTree/tests/test-marker/directory-marked/default.nix
+++ b/nix/readTree/tests/test-marker/directory-marked/default.nix
@@ -1,3 +1,3 @@
 { ... }:
 
-{}
+{ }
diff --git a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
index a3f961128e79..5bd3e36b5397 100644
--- a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
+++ b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
@@ -1,3 +1,3 @@
 { ... }:
 
-{}
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/one.nix b/nix/readTree/tests/test-marker/file-children/one.nix
index a3f961128e79..5bd3e36b5397 100644
--- a/nix/readTree/tests/test-marker/file-children/one.nix
+++ b/nix/readTree/tests/test-marker/file-children/one.nix
@@ -1,3 +1,3 @@
 { ... }:
 
-{}
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/two.nix b/nix/readTree/tests/test-marker/file-children/two.nix
index a3f961128e79..5bd3e36b5397 100644
--- a/nix/readTree/tests/test-marker/file-children/two.nix
+++ b/nix/readTree/tests/test-marker/file-children/two.nix
@@ -1,3 +1,3 @@
 { ... }:
 
-{}
+{ }
diff --git a/nix/renderMarkdown/default.nix b/nix/renderMarkdown/default.nix
index 3e5a59954b20..8d6b31cfccca 100644
--- a/nix/renderMarkdown/default.nix
+++ b/nix/renderMarkdown/default.nix
@@ -3,6 +3,6 @@
 
 with depot.nix.yants;
 
-defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" {} ''
+defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" { } ''
   cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out
 '')
diff --git a/nix/runExecline/default.nix b/nix/runExecline/default.nix
index fd92203d0146..76fffdce7b0d 100644
--- a/nix/runExecline/default.nix
+++ b/nix/runExecline/default.nix
@@ -9,7 +9,7 @@ let
   runExeclineLocal = name: args: execline:
     runExecline name
       (args // {
-        derivationArgs = args.derivationArgs or {} // {
+        derivationArgs = args.derivationArgs or { } // {
           preferLocalBuild = true;
           allowSubstitutes = false;
         };
@@ -23,7 +23,8 @@ let
     inherit pkgs;
   };
 
-in {
+in
+{
   __functor = _: runExecline;
   local = runExeclineLocal;
   inherit tests;
diff --git a/nix/runExecline/runExecline.nix b/nix/runExecline/runExecline.nix
index 0e45080735bb..23b9a6330370 100644
--- a/nix/runExecline/runExecline.nix
+++ b/nix/runExecline/runExecline.nix
@@ -35,32 +35,32 @@
 
 let
   bins = getBins pkgs.execline [
-           "execlineb"
-           { use = "if"; as = "execlineIf"; }
-           "redirfd"
-           "importas"
-           "exec"
-         ]
-      // getBins pkgs.s6-portable-utils [
-           "s6-cat"
-           "s6-grep"
-           "s6-touch"
-           "s6-test"
-           "s6-chmod"
-         ];
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+    "exec"
+  ]
+  // getBins pkgs.s6-portable-utils [
+    "s6-cat"
+    "s6-grep"
+    "s6-touch"
+    "s6-test"
+    "s6-chmod"
+  ];
 
 in
 
 # TODO: move name into the attrset
 name:
 {
-# a string to pass as stdin to the execline script
-stdin ? ""
-# a program wrapping the acutal execline invocation;
-# should be in Bernstein-chaining style
+  # a string to pass as stdin to the execline script
+  stdin ? ""
+  # a program wrapping the acutal execline invocation;
+  # should be in Bernstein-chaining style
 , builderWrapper ? bins.exec
-# additional arguments to pass to the derivation
-, derivationArgs ? {}
+  # additional arguments to pass to the derivation
+, derivationArgs ? { }
 }:
 # the execline script as a nested list of string,
 # representing the blocks;
@@ -90,33 +90,33 @@ derivation (derivationArgs // {
   passAsFile = [
     "_runExeclineScript"
     "_runExeclineStdin"
-  ] ++ derivationArgs.passAsFile or [];
+  ] ++ derivationArgs.passAsFile or [ ];
 
   # the default, exec acts as identity executable
   builder = builderWrapper;
 
   args = [
-    bins.importas            # import script file as $script
-    "-ui"                    # drop the envvar afterwards
-    "script"                 # substitution name
+    bins.importas # import script file as $script
+    "-ui" # drop the envvar afterwards
+    "script" # substitution name
     "_runExeclineScriptPath" # passed script file
 
-    bins.importas            # do the same for $stdin
+    bins.importas # do the same for $stdin
     "-ui"
     "stdin"
     "_runExeclineStdinPath"
 
-    bins.redirfd             # now we
-    "-r"                     # read the file
-    "0"                      # into the stdin of execlineb
-    "$stdin"                 # that was given via stdin
+    bins.redirfd # now we
+    "-r" # read the file
+    "0" # into the stdin of execlineb
+    "$stdin" # that was given via stdin
 
-    bins.execlineb           # the actual invocation
+    bins.execlineb # the actual invocation
     # TODO(Profpatsch): depending on the use-case, -S0 might not be enough
     # in all use-cases, then a wrapper for execlineb arguments
     # should be added (-P, -S, -s).
-    "-S0"                    # set $@ inside the execline script
-    "-W"                     # die on syntax error
-    "$script"                # substituted by importas
+    "-S0" # set $@ inside the execline script
+    "-W" # die on syntax error
+    "$script" # substituted by importas
   ];
 })
diff --git a/nix/runExecline/tests.nix b/nix/runExecline/tests.nix
index d2f5a1780c16..f82b544224ee 100644
--- a/nix/runExecline/tests.nix
+++ b/nix/runExecline/tests.nix
@@ -1,23 +1,29 @@
-{ stdenv, pkgs, runExecline, runExeclineLocal, getBins, writeScript
-# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
-, coreutils }:
+{ stdenv
+, pkgs
+, runExecline
+, runExeclineLocal
+, getBins
+, writeScript
+  # https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
+, coreutils
+}:
 
 let
 
   bins = getBins coreutils [ "mv" ]
-      // getBins pkgs.execline [
-           "execlineb"
-           { use = "if"; as = "execlineIf"; }
-           "redirfd"
-           "importas"
-         ]
-      // getBins pkgs.s6-portable-utils [
-           "s6-chmod"
-           "s6-grep"
-           "s6-touch"
-           "s6-cat"
-           "s6-test"
-         ];
+    // getBins pkgs.execline [
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+  ]
+    // getBins pkgs.s6-portable-utils [
+    "s6-chmod"
+    "s6-grep"
+    "s6-touch"
+    "s6-cat"
+    "s6-test"
+  ];
 
   # execline block of depth 1
   block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
@@ -31,49 +37,80 @@ let
     builder = bins.execlineIf;
     args =
       (block [
-        bins.redirfd "-r" "0" file   # read file to stdin
-        bins.s6-grep "-F" "-q" line   # and grep for the line
+        bins.redirfd
+        "-r"
+        "0"
+        file # read file to stdin
+        bins.s6-grep
+        "-F"
+        "-q"
+        line # and grep for the line
       ])
       ++ [
         # if the block succeeded, touch $out
-        bins.importas "-ui" "out" "out"
-        bins.s6-touch "$out"
+        bins.importas
+        "-ui"
+        "out"
+        "out"
+        bins.s6-touch
+        "$out"
       ];
     preferLocalBuild = true;
     allowSubstitutes = false;
   };
 
   # basic test that touches out
-  basic = runExeclineLocal "run-execline-test-basic" {
-  } [
-      "importas" "-ui" "out" "out"
-      "${bins.s6-touch}" "$out"
+  basic = runExeclineLocal "run-execline-test-basic"
+    { } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    "${bins.s6-touch}"
+    "$out"
   ];
 
   # whether the stdin argument works as intended
-  stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin" {
-    stdin = "foo\nbar\nfoo";
-  } [
-      "importas" "-ui" "out" "out"
-      # this pipes stdout of s6-cat to $out
-      # and s6-cat redirects from stdin to stdout
-      "redirfd" "-w" "1" "$out" bins.s6-cat
+  stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin"
+    {
+      stdin = "foo\nbar\nfoo";
+    } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    # this pipes stdout of s6-cat to $out
+    # and s6-cat redirects from stdin to stdout
+    "redirfd"
+    "-w"
+    "1"
+    "$out"
+    bins.s6-cat
   ]);
 
 
-  wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var" {
-    builderWrapper = writeScript "var-wrapper" ''
-      #!${bins.execlineb} -S0
-      export myvar myvalue $@
-    '';
-  } [
-    "importas" "-ui" "v" "myvar"
-    "if" [ bins.s6-test "myvalue" "=" "$v" ]
-      "importas" "out" "out"
-      bins.s6-touch "$out"
+  wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var"
+    {
+      builderWrapper = writeScript "var-wrapper" ''
+        #!${bins.execlineb} -S0
+        export myvar myvalue $@
+      '';
+    } [
+    "importas"
+    "-ui"
+    "v"
+    "myvar"
+    "if"
+    [ bins.s6-test "myvalue" "=" "$v" ]
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
   ];
 
-in [
+in
+[
   basic
   stdin
   wrapWithVar
diff --git a/nix/runTestsuite/default.nix b/nix/runTestsuite/default.nix
index 9eb507099678..8b02ed86d8ea 100644
--- a/nix/runTestsuite/default.nix
+++ b/nix/runTestsuite/default.nix
@@ -38,11 +38,11 @@ let
     ;
 
   bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
-      // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
+    // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
 
   # Returns true if the given expression throws when `deepSeq`-ed
   throws = expr:
-    !(builtins.tryEval (builtins.deepSeq expr {})).success;
+    !(builtins.tryEval (builtins.deepSeq expr { })).success;
 
   # rewrite the builtins.partition result
   # to use `ok` and `err` instead of `right` and `wrong`.
@@ -99,11 +99,12 @@ let
     (context: desc: res:
       if res
       then { yep = { test = desc; }; }
-      else { nope = {
-        test = desc;
-        inherit context;
-      };
-    });
+      else {
+        nope = {
+          test = desc;
+          inherit context;
+        };
+      });
 
   # assert that left and right values are equal
   assertEq = defun [ string any any AssertResult ]
@@ -111,7 +112,7 @@ let
       let
         context = { not-equal = { inherit left right; }; };
       in
-        assertBoolContext context desc (left == right));
+      assertBoolContext context desc (left == right));
 
   # assert that the expression throws when `deepSeq`-ed
   assertThrows = defun [ string any AssertResult ]
@@ -119,7 +120,7 @@ let
       let
         context = { should-throw = { inherit expr; }; };
       in
-        assertBoolContext context desc (throws expr));
+      assertBoolContext context desc (throws expr));
 
   # assert that the expression does not throw when `deepSeq`-ed
   assertDoesNotThrow = defun [ string any AssertResult ]
@@ -144,31 +145,50 @@ let
           yep = _: true;
           nope = _: false;
         };
-        res = partitionTests (it:
-          (partitionTests goodAss it.asserts).err == []
-        ) itResults;
-        prettyRes = lib.generators.toPretty {} res;
+        res = partitionTests
+          (it:
+            (partitionTests goodAss it.asserts).err == [ ]
+          )
+          itResults;
+        prettyRes = lib.generators.toPretty { } res;
       in
-        if res.err == []
-        then depot.nix.runExecline.local "testsuite-${name}-successful" {} [
-          "importas" "out" "out"
+      if res.err == [ ]
+      then
+        depot.nix.runExecline.local "testsuite-${name}-successful" { } [
+          "importas"
+          "out"
+          "out"
           # force derivation to rebuild if test case list changes
-          "ifelse" [ bins.s6-false ] [
-            bins.printf "" (builtins.hashString "sha512" prettyRes)
+          "ifelse"
+          [ bins.s6-false ]
+          [
+            bins.printf
+            ""
+            (builtins.hashString "sha512" prettyRes)
           ]
-          "if" [ bins.printf "%s\n" "testsuite ${name} successful!" ]
-          bins.s6-touch "$out"
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} successful!" ]
+          bins.s6-touch
+          "$out"
         ]
-        else depot.nix.runExecline.local "testsuite-${name}-failed" {
-          stdin = prettyRes + "\n";
-        } [
-          "importas" "out" "out"
-          "if" [ bins.printf "%s\n" "testsuite ${name} failed!" ]
-          "if" [ bins.s6-cat ]
-          "exit" "1"
+      else
+        depot.nix.runExecline.local "testsuite-${name}-failed"
+          {
+            stdin = prettyRes + "\n";
+          } [
+          "importas"
+          "out"
+          "out"
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} failed!" ]
+          "if"
+          [ bins.s6-cat ]
+          "exit"
+          "1"
         ]);
 
-in {
+in
+{
   inherit
     assertEq
     assertThrows
diff --git a/nix/sparseTree/default.nix b/nix/sparseTree/default.nix
index 5184f33d5c46..16fc9b6103f9 100644
--- a/nix/sparseTree/default.nix
+++ b/nix/sparseTree/default.nix
@@ -45,14 +45,16 @@ let
     let
       withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
       fullPath =
-        /**/ if builtins.isPath path then path
+        /**/
+        if builtins.isPath path then path
         else if builtins.isString path then (root + withLeading path)
         else builtins.throw "Unsupported path type ${builtins.typeOf path}";
       strPath = toString fullPath;
       contextPath = "${fullPath}";
       belowRoot = builtins.substring rootLength (-1) strPath;
       prefix = builtins.substring 0 rootLength strPath;
-    in assert toString root == prefix; {
+    in
+    assert toString root == prefix; {
       src = contextPath;
       dst = belowRoot;
     };
@@ -61,10 +63,12 @@ let
 in
 
 # TODO(sterni): teach readTree to also read symlinked directories,
-# so we ln -sT instead of cp -aT.
-pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" {} (
-  lib.concatMapStrings ({ src, dst }: ''
-    mkdir -p "$(dirname "$out${dst}")"
-    cp -aT --reflink=auto "${src}" "$out${dst}"
-  '') symlinks
+  # so we ln -sT instead of cp -aT.
+pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" { } (
+  lib.concatMapStrings
+    ({ src, dst }: ''
+      mkdir -p "$(dirname "$out${dst}")"
+      cp -aT --reflink=auto "${src}" "$out${dst}"
+    '')
+    symlinks
 )
diff --git a/nix/tag/default.nix b/nix/tag/default.nix
index 9c55e6263b39..0038404460b1 100644
--- a/nix/tag/default.nix
+++ b/nix/tag/default.nix
@@ -4,22 +4,24 @@ let
   # if so sets `isTag` to `true` and sets the name and value.
   # If not, sets `isTag` to `false` and sets `errmsg`.
   verifyTag = tag:
-    let cases = builtins.attrNames tag;
-        len = builtins.length cases;
+    let
+      cases = builtins.attrNames tag;
+      len = builtins.length cases;
     in
     if builtins.length cases == 1
-    then let name = builtins.head cases; in {
-      isTag = true;
-      name = name;
-      val = tag.${name};
-      errmsg = null;
-    }
+    then
+      let name = builtins.head cases; in {
+        isTag = true;
+        name = name;
+        val = tag.${name};
+        errmsg = null;
+      }
     else {
       isTag = false;
       errmsg =
-        ( "match: an instance of a sum is an attrset "
-        + "with exactly one element, yours had ${toString len}"
-        + ", namely: ${lib.generators.toPretty {} cases}" );
+        ("match: an instance of a sum is an attrset "
+          + "with exactly one element, yours had ${toString len}"
+          + ", namely: ${lib.generators.toPretty {} cases}");
       name = null;
       val = null;
     };
@@ -63,21 +65,22 @@ let
   #   ] 1
   #   => { smol = 1; }
   discrDef = defTag: fs: v:
-    let res = lib.findFirst
-                (t: t.val v)
-                null
-                (map assertIsTag fs);
+    let
+      res = lib.findFirst
+        (t: t.val v)
+        null
+        (map assertIsTag fs);
     in
-      if res == null
-      then { ${defTag} = v; }
-      else { ${res.name} = v; };
+    if res == null
+    then { ${defTag} = v; }
+    else { ${res.name} = v; };
 
   # Like `discrDef`, but fail if there is no match.
   discr = fs: v:
     let res = discrDef null fs v; in
-      assert lib.assertMsg (res != null)
-        "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
-      res;
+    assert lib.assertMsg (res != null)
+      "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
+    res;
 
   # The canonical pattern matching primitive.
   # A sum value is an attribute set with one element,
@@ -104,17 +107,17 @@ let
   match = sum: matcher:
     let cases = builtins.attrNames sum;
     in assert
-      let len = builtins.length cases; in
-        lib.assertMsg (len == 1)
-          ( "match: an instance of a sum is an attrset "
-          + "with exactly one element, yours had ${toString len}"
-          + ", namely: ${lib.generators.toPretty {} cases}" );
+    let len = builtins.length cases; in
+    lib.assertMsg (len == 1)
+      ("match: an instance of a sum is an attrset "
+        + "with exactly one element, yours had ${toString len}"
+        + ", namely: ${lib.generators.toPretty {} cases}");
     let case = builtins.head cases;
     in assert
-        lib.assertMsg (matcher ? ${case})
-        ( "match: \"${case}\" is not a valid case of this sum, "
+    lib.assertMsg (matcher ? ${case})
+      ("match: \"${case}\" is not a valid case of this sum, "
         + "the matcher accepts: ${lib.generators.toPretty {}
-            (builtins.attrNames matcher)}" );
+            (builtins.attrNames matcher)}");
     matcher.${case} sum.${case};
 
   # A `match` with the arguments flipped.
@@ -148,15 +151,16 @@ let
       ;
   };
 
-in {
-   inherit
-     verifyTag
-     tagName
-     tagValue
-     discr
-     discrDef
-     match
-     matchLam
-     tests
-     ;
+in
+{
+  inherit
+    verifyTag
+    tagName
+    tagValue
+    discr
+    discrDef
+    match
+    matchLam
+    tests
+    ;
 }
diff --git a/nix/tag/tests.nix b/nix/tag/tests.nix
index 8c9c73807472..bcc42c758a6c 100644
--- a/nix/tag/tests.nix
+++ b/nix/tag/tests.nix
@@ -17,7 +17,7 @@ let
         errmsg = null;
       })
     (assertEq "is not Tag"
-      (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) ["errmsg"])
+      (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ])
       {
         isTag = false;
         name = null;
@@ -41,7 +41,8 @@ let
       (discr [
         { bool = lib.isBool; }
         { int = lib.isInt; }
-      ] true)
+      ]
+        true)
       { bool = true; })
     (assertEq "fallback to default"
       (discrDef "def" [
@@ -53,19 +54,24 @@ let
 
   match-test = it "can match things" [
     (assertEq "match example"
-      (let
-        success = { res = 42; };
-        failure = { err = "no answer"; };
-        matcher = {
-          res = i: i + 1;
-          err = _: 0;
-        };
-      in {
-        one = match success matcher;
-        two = match failure matcher;
+      (
+        let
+          success = { res = 42; };
+          failure = { err = "no answer"; };
+          matcher = {
+            res = i: i + 1;
+            err = _: 0;
+          };
+        in
+        {
+          one = match success matcher;
+          two = match failure matcher;
+        }
+      )
+      {
+        one = 43;
+        two = 0;
       })
-      { one = 43;
-        two = 0; })
     (assertEq "matchLam & pipe"
       (lib.pipe { foo = 42; } [
         (matchLam {
@@ -81,8 +87,8 @@ let
   ];
 
 in
-  runTestsuite "tag" [
-    isTag-test
-    discr-test
-    match-test
-  ]
+runTestsuite "tag" [
+  isTag-test
+  discr-test
+  match-test
+]
diff --git a/nix/tailscale/default.nix b/nix/tailscale/default.nix
index 8d6a0f661b92..363f717db6de 100644
--- a/nix/tailscale/default.nix
+++ b/nix/tailscale/default.nix
@@ -27,4 +27,5 @@ let
     # Actual ACL entries
     ACLs = list acl;
   };
-in config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
+in
+config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
diff --git a/nix/utils/default.nix b/nix/utils/default.nix
index 258e372a2a2d..cabea5bbeeb3 100644
--- a/nix/utils/default.nix
+++ b/nix/utils/default.nix
@@ -34,14 +34,14 @@ let
         basename = builtins.unsafeDiscardStringContext
           (builtins.baseNameOf strPath);
       in
-        # If p is a direct child of storeDir, we need to remove
+      # If p is a direct child of storeDir, we need to remove
         # the leading hash as well to make sure that:
         # `storePathName drv == storePathName (toString drv)`.
-        if noStoreDir == basename
-        then builtins.substring 33 (-1) basename
-        else basename
+      if noStoreDir == basename
+      then builtins.substring 33 (-1) basename
+      else basename
     else builtins.throw "Don't know how to get (base)name of "
-      + lib.generators.toPretty {} p;
+      + lib.generators.toPretty { } p;
 
   /* Query the type of a path exposing the same information as would be by
      `builtins.readDir`, but for a single, specific target path.
@@ -106,7 +106,7 @@ let
       # We need to call toString to prevent unsafeDiscardStringContext
       # from importing a path into store which messes with base- and
       # dirname of course.
-      path'= builtins.unsafeDiscardStringContext (toString path);
+      path' = builtins.unsafeDiscardStringContext (toString path);
       # To read the containing directory we absolutely need
       # to keep the string context, otherwise a derivation
       # would not be realized before our check (at eval time)
@@ -120,20 +120,22 @@ let
       # directory. If not, either the target doesn't exist or is a regular file.
       # TODO(sterni): is there a way to check reliably if the symlink target exists?
       isSymlinkDir = builtins.pathExists (path' + "/.");
-    in {
+    in
+    {
       ${thisPathType} =
-        /**/ if thisPathType != "symlink" then true
-        else if isSymlinkDir              then "directory"
-        else                                   "regular-or-missing";
+        /**/
+        if thisPathType != "symlink" then true
+        else if isSymlinkDir then "directory"
+        else "regular-or-missing";
     };
 
   pathType' = path:
     let
       p = pathType path;
     in
-      if p ? missing
-      then builtins.throw "${lib.generators.toPretty {} path} does not exist"
-      else p;
+    if p ? missing
+    then builtins.throw "${lib.generators.toPretty {} path} does not exist"
+    else p;
 
   /* Check whether the given path is a directory.
      Throws if the path in question doesn't exist.
@@ -151,9 +153,11 @@ let
 
      Type: path(-like) -> bool
   */
-  realPathIsDirectory = path: let
-    pt = pathType' path;
-  in pt ? directory || pt.symlink or null == "directory";
+  realPathIsDirectory = path:
+    let
+      pt = pathType' path;
+    in
+    pt ? directory || pt.symlink or null == "directory";
 
   /* Check whether the given path is a regular file.
      Throws if the path in question doesn't exist.
@@ -169,7 +173,8 @@ let
   */
   isSymlink = path: pathType' path ? symlink;
 
-in {
+in
+{
   inherit
     storePathName
     pathType
diff --git a/nix/utils/tests/default.nix b/nix/utils/tests/default.nix
index 8a078684f3aa..52b7ca41d215 100644
--- a/nix/utils/tests/default.nix
+++ b/nix/utils/tests/default.nix
@@ -26,38 +26,53 @@ let
   pathPredicates = it "judges paths correctly" (lib.flatten [
     # isDirectory
     (assertUtilsPred "directory isDirectory"
-      (isDirectory ./directory) true)
+      (isDirectory ./directory)
+      true)
     (assertUtilsPred "symlink not isDirectory"
-      (isDirectory ./symlink-directory) false)
+      (isDirectory ./symlink-directory)
+      false)
     (assertUtilsPred "file not isDirectory"
-      (isDirectory ./directory/file) false)
+      (isDirectory ./directory/file)
+      false)
     # realPathIsDirectory
     (assertUtilsPred "directory realPathIsDirectory"
-      (realPathIsDirectory ./directory) true)
+      (realPathIsDirectory ./directory)
+      true)
     (assertUtilsPred "symlink to directory realPathIsDirectory"
-      (realPathIsDirectory ./symlink-directory) true)
+      (realPathIsDirectory ./symlink-directory)
+      true)
     (assertUtilsPred "realPathIsDirectory resolves chained symlinks"
-      (realPathIsDirectory ./symlink-symlink-directory) true)
+      (realPathIsDirectory ./symlink-symlink-directory)
+      true)
     # isRegularFile
     (assertUtilsPred "file isRegularFile"
-      (isRegularFile ./directory/file) true)
+      (isRegularFile ./directory/file)
+      true)
     (assertUtilsPred "symlink not isRegularFile"
-      (isRegularFile ./symlink-file) false)
+      (isRegularFile ./symlink-file)
+      false)
     (assertUtilsPred "directory not isRegularFile"
-      (isRegularFile ./directory) false)
+      (isRegularFile ./directory)
+      false)
     # isSymlink
     (assertUtilsPred "symlink to file isSymlink"
-      (isSymlink ./symlink-file) true)
+      (isSymlink ./symlink-file)
+      true)
     (assertUtilsPred "symlink to directory isSymlink"
-      (isSymlink ./symlink-directory) true)
+      (isSymlink ./symlink-directory)
+      true)
     (assertUtilsPred "symlink to symlink isSymlink"
-      (isSymlink ./symlink-symlink-file) true)
+      (isSymlink ./symlink-symlink-file)
+      true)
     (assertUtilsPred "symlink to missing file isSymlink"
-      (isSymlink ./missing) true)
+      (isSymlink ./missing)
+      true)
     (assertUtilsPred "directory not isSymlink"
-      (isSymlink ./directory) false)
+      (isSymlink ./directory)
+      false)
     (assertUtilsPred "file not isSymlink"
-      (isSymlink ./directory/file) false)
+      (isSymlink ./directory/file)
+      false)
     # missing files throw
     (assertThrows "isDirectory throws on missing file"
       (isDirectory ./does-not-exist))
@@ -89,15 +104,18 @@ let
 
   storePathNameTests = it "correctly gets the basename of a store path" [
     (assertEq "base name of a derivation"
-      (storePathName depot.tools.cheddar) depot.tools.cheddar.name)
+      (storePathName depot.tools.cheddar)
+      depot.tools.cheddar.name)
     (assertEq "base name of a store path string"
-      (storePathName cheddarStorePath) depot.tools.cheddar.name)
+      (storePathName cheddarStorePath)
+      depot.tools.cheddar.name)
     (assertEq "base name of a path within a store path"
       (storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
     (assertEq "base name of a path"
       (storePathName ../default.nix) "default.nix")
     (assertEq "base name of a cleanSourced path"
-      (storePathName cleanedSource) cleanedSource.name)
+      (storePathName cleanedSource)
+      cleanedSource.name)
   ];
 in
 
diff --git a/nix/writeElispBin/default.nix b/nix/writeElispBin/default.nix
index c116607e0aeb..3ea2da58aab2 100644
--- a/nix/writeElispBin/default.nix
+++ b/nix/writeElispBin/default.nix
@@ -1,6 +1,6 @@
 { depot, pkgs, ... }:
 
-{ name, src, deps ? (_: []), emacs ? pkgs.emacs27-nox }:
+{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs27-nox }:
 
 let
   inherit (pkgs) emacsPackages emacsPackagesGen;
@@ -8,11 +8,13 @@ let
 
   finalEmacs = (emacsPackagesGen emacs).emacsWithPackages deps;
 
-  srcFile = if isString src
+  srcFile =
+    if isString src
     then toFile "${name}.el" src
     else src;
 
-in depot.nix.writeScriptBin name ''
+in
+depot.nix.writeScriptBin name ''
   #!/bin/sh
   ${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
 ''
diff --git a/nix/writeExecline/default.nix b/nix/writeExecline/default.nix
index 8626aa46080f..5169b01386ea 100644
--- a/nix/writeExecline/default.nix
+++ b/nix/writeExecline/default.nix
@@ -14,9 +14,10 @@ name:
   # "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
   # "none": don’t substitute or set any positional arguments
   # "env-no-push": like "env", but bypass the push-phase. Not recommended.
-  argMode ? "var",
-  # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
-  readNArgs ? 0,
+  argMode ? "var"
+, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
+  readNArgs ? 0
+,
 }:
 # Nested list of lists of commands.
 # Inner lists are translated to execline blocks.
@@ -24,7 +25,7 @@ argList:
 
 let
   env =
-    if      argMode == "var" then "s${toString readNArgs}"
+    if argMode == "var" then "s${toString readNArgs}"
     else if argMode == "var-full" then "S${toString readNArgs}"
     else if argMode == "env" then ""
     else if argMode == "none" then "P"
@@ -32,7 +33,7 @@ let
     else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
 
 in
-  depot.nix.writeScript name ''
-    #!${pkgs.execline}/bin/execlineb -W${env}
-    ${depot.nix.escapeExecline argList}
-  ''
+depot.nix.writeScript name ''
+  #!${pkgs.execline}/bin/execlineb -W${env}
+  ${depot.nix.escapeExecline argList}
+''
diff --git a/nix/writeScript/default.nix b/nix/writeScript/default.nix
index e8e6e0fa10ac..1f53b4e4ff51 100644
--- a/nix/writeScript/default.nix
+++ b/nix/writeScript/default.nix
@@ -5,25 +5,31 @@
 
 let
   bins = depot.nix.getBins pkgs.s6-portable-utils [
-           "s6-cat"
-           "s6-chmod"
-         ];
+    "s6-cat"
+    "s6-chmod"
+  ];
 
 in
 name:
 # string of the executable script that is put in $out
 script:
 
-depot.nix.runExecline name {
+depot.nix.runExecline name
+{
   stdin = script;
   derivationArgs = {
     preferLocalBuild = true;
     allowSubstitutes = false;
   };
 } [
-  "importas" "out" "out"
+  "importas"
+  "out"
+  "out"
   # this pipes stdout of s6-cat to $out
   # and s6-cat redirects from stdin to stdout
-  "if" [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
-  bins.s6-chmod "0755" "$out"
+  "if"
+  [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
+  bins.s6-chmod
+  "0755"
+  "$out"
 ]
diff --git a/nix/writers/default.nix b/nix/writers/default.nix
index b0795ab2c886..55355913a9f4 100644
--- a/nix/writers/default.nix
+++ b/nix/writers/default.nix
@@ -2,62 +2,71 @@
 
 let
   bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
-    ;
+  ;
 
-  linkTo = name: path: depot.nix.runExecline.local name {} [
-    "importas" "out" "out"
-    bins.s6-ln "-s" path "$out"
+  linkTo = name: path: depot.nix.runExecline.local name { } [
+    "importas"
+    "out"
+    "out"
+    bins.s6-ln
+    "-s"
+    path
+    "$out"
   ];
 
   # Build a rust executable, $out is the executable.
-  rustSimple = args@{name, ...}: src:
+  rustSimple = args@{ name, ... }: src:
     linkTo name "${rustSimpleBin args src}/bin/${name}";
 
   # Like `rustSimple`, but put the binary in `$out/bin/`.
-  rustSimpleBin = {
-    name,
-    dependencies ? [],
-    doCheck ? true,
-  }: src:
+  rustSimpleBin =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
     (if doCheck then testRustSimple else pkgs.lib.id)
-    (pkgs.buildRustCrate ({
-      pname = name;
-      version = "1.0.0";
-      crateName = name;
-      crateBin = [ name ];
-      dependencies = dependencies;
-      src = pkgs.runCommandLocal "write-main.rs" {
-        src = src;
-        passAsFile = [ "src" ];
-      } ''
-        mkdir -p $out/src/bin
-        cp "$srcPath" $out/src/bin/${name}.rs
-        find $out
-      '';
-    }));
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        crateBin = [ name ];
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-main.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src/bin
+          cp "$srcPath" $out/src/bin/${name}.rs
+          find $out
+        '';
+      }));
 
   # Build a rust library, that can be used as dependency to `rustSimple`.
   # Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
-  rustSimpleLib = {
-    name,
-    dependencies ? [],
-    doCheck ? true,
-  }: src:
+  rustSimpleLib =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
     (if doCheck then testRustSimple else pkgs.lib.id)
-    (pkgs.buildRustCrate ({
-      pname = name;
-      version = "1.0.0";
-      crateName = name;
-      dependencies = dependencies;
-      src = pkgs.runCommandLocal "write-lib.rs" {
-        src = src;
-        passAsFile = [ "src" ];
-      } ''
-        mkdir -p $out/src
-        cp "$srcPath" $out/src/lib.rs
-        find $out
-      '';
-    }));
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-lib.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src
+          cp "$srcPath" $out/src/lib.rs
+          find $out
+        '';
+      }));
 
   /* Takes a `buildRustCrate` derivation as an input,
     * builds it with `{ buildTests = true; }` and runs
@@ -72,19 +81,30 @@ let
   testRustSimple = rustDrv:
     let
       crate = buildTests: rustDrv.override { inherit buildTests; };
-      tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [
-        "importas" "out" "out"
-        "if" [
-          "pipeline" [ bins.s6-ls "${crate true}/tests" ]
-          "forstdin" "-o0" "test"
-          "importas" "test" "test"
+      tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [
+        "importas"
+        "out"
+        "out"
+        "if"
+        [
+          "pipeline"
+          [ bins.s6-ls "${crate true}/tests" ]
+          "forstdin"
+          "-o0"
+          "test"
+          "importas"
+          "test"
+          "test"
           "${crate true}/tests/$test"
         ]
-        bins.s6-touch "$out"
+        bins.s6-touch
+        "$out"
       ];
-    in depot.nix.drvSeqL [ tests ] (crate false);
+    in
+    depot.nix.drvSeqL [ tests ] (crate false);
 
-in {
+in
+{
   inherit
     rustSimple
     rustSimpleBin
diff --git a/nix/writers/tests/rust.nix b/nix/writers/tests/rust.nix
index 8a12c95ec7da..232a2dc60808 100644
--- a/nix/writers/tests/rust.nix
+++ b/nix/writers/tests/rust.nix
@@ -11,15 +11,20 @@ let
     coreutils
     ;
 
-  run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [
-    "if" [ drv ]
-    "importas" "out" "out"
-    "${coreutils}/bin/touch" "$out"
+  run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
+    "if"
+    [ drv ]
+    "importas"
+    "out"
+    "out"
+    "${coreutils}/bin/touch"
+    "$out"
   ];
 
-  rustTransitiveLib = rustSimpleLib {
-    name = "transitive";
-  } ''
+  rustTransitiveLib = rustSimpleLib
+    {
+      name = "transitive";
+    } ''
     pub fn transitive(s: &str) -> String {
       let mut new = s.to_string();
       new.push_str(" 1 2 3");
@@ -37,10 +42,11 @@ let
     }
   '';
 
-  rustTestLib = rustSimpleLib {
-    name = "test_lib";
-    dependencies = [ rustTransitiveLib ];
-  } ''
+  rustTestLib = rustSimpleLib
+    {
+      name = "test_lib";
+      dependencies = [ rustTransitiveLib ];
+    } ''
     extern crate transitive;
     use transitive::{transitive};
     pub fn test() -> String {
@@ -48,10 +54,11 @@ let
     }
   '';
 
-  rustWithLib = run (rustSimple {
-    name = "rust-with-lib";
-    dependencies = [ rustTestLib ];
-  } ''
+  rustWithLib = run (rustSimple
+    {
+      name = "rust-with-lib";
+      dependencies = [ rustTestLib ];
+    } ''
     extern crate test_lib;
 
     fn main() {
@@ -60,7 +67,8 @@ let
   '');
 
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     rustTransitiveLib
     rustWithLib
diff --git a/nix/yants/default.nix b/nix/yants/default.nix
index 2bbf4dd15a9e..cb9fc08287fb 100644
--- a/nix/yants/default.nix
+++ b/nix/yants/default.nix
@@ -6,10 +6,10 @@
 #
 # All types (should) compose as expected.
 
-{ lib ?  (import <nixpkgs> {}).lib, ... }:
+{ lib ? (import <nixpkgs> { }).lib, ... }:
 
 with builtins; let
-  prettyPrint = lib.generators.toPretty {};
+  prettyPrint = lib.generators.toPretty { };
 
   # typedef' :: struct {
   #   name = string;
@@ -34,41 +34,44 @@ with builtins; let
   #
   # This function is the low-level primitive used to create types. For
   # many cases the higher-level 'typedef' function is more appropriate.
-  typedef' = { name, checkType
-             , checkToBool ? (result: result.ok)
-             , toError ? (_: result: result.err)
-             , def ? null
-             , match ? null }: {
-    inherit name checkToBool toError;
-
-    # check :: a -> bool
-    #
-    # This function is used to determine whether a given type is
-    # conformant.
-    check = value: checkToBool (checkType value);
-
-    # checkType :: a -> struct { ok = bool; err = option string; }
-    #
-    # This function checks whether the passed value is type conformant
-    # and returns an optional type error string otherwise.
-    inherit checkType;
-
-    # __functor :: a -> a
-    #
-    # This function checks whether the passed value is type conformant
-    # and throws an error if it is not.
-    #
-    # The name of this function is a special attribute in Nix that
-    # makes it possible to execute a type attribute set like a normal
-    # function.
-    __functor = self: value:
-    let result = self.checkType value;
-    in if checkToBool result then value
-       else throw (toError value result);
-  };
+  typedef' =
+    { name
+    , checkType
+    , checkToBool ? (result: result.ok)
+    , toError ? (_: result: result.err)
+    , def ? null
+    , match ? null
+    }: {
+      inherit name checkToBool toError;
+
+      # check :: a -> bool
+      #
+      # This function is used to determine whether a given type is
+      # conformant.
+      check = value: checkToBool (checkType value);
+
+      # checkType :: a -> struct { ok = bool; err = option string; }
+      #
+      # This function checks whether the passed value is type conformant
+      # and returns an optional type error string otherwise.
+      inherit checkType;
+
+      # __functor :: a -> a
+      #
+      # This function checks whether the passed value is type conformant
+      # and throws an error if it is not.
+      #
+      # The name of this function is a special attribute in Nix that
+      # makes it possible to execute a type attribute set like a normal
+      # function.
+      __functor = self: value:
+        let result = self.checkType value;
+        in if checkToBool result then value
+        else throw (toError value result);
+    };
 
   typeError = type: val:
-  "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
+    "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
 
   # typedef :: string -> (a -> bool) -> type
   #
@@ -85,27 +88,34 @@ with builtins; let
       });
   };
 
-  checkEach = name: t: l: foldl' (acc: e:
-    let res = t.checkType e;
+  checkEach = name: t: l: foldl'
+    (acc: e:
+      let
+        res = t.checkType e;
         isT = t.checkToBool res;
-    in {
-      ok = acc.ok && isT;
-      err = if isT
-        then acc.err
-        else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
-    }) { ok = true; err = "expected type ${name}, but found:\n"; } l;
-in lib.fix (self: {
+      in
+      {
+        ok = acc.ok && isT;
+        err =
+          if isT
+          then acc.err
+          else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
+      })
+    { ok = true; err = "expected type ${name}, but found:\n"; }
+    l;
+in
+lib.fix (self: {
   # Primitive types
-  any      = typedef "any" (_: true);
-  unit     = typedef "unit" (v: v == {});
-  int      = typedef "int" isInt;
-  bool     = typedef "bool" isBool;
-  float    = typedef "float" isFloat;
-  string   = typedef "string" isString;
-  path     = typedef "path" (x: typeOf x == "path");
-  drv      = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
+  any = typedef "any" (_: true);
+  unit = typedef "unit" (v: v == { });
+  int = typedef "int" isInt;
+  bool = typedef "bool" isBool;
+  float = typedef "float" isFloat;
+  string = typedef "string" isString;
+  path = typedef "path" (x: typeOf x == "path");
+  drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
   function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
-                                                 && isFunction x.__functor));
+    && isFunction x.__functor));
 
   # Type for types themselves. Useful when defining polymorphic types.
   type = typedef "type" (x:
@@ -124,7 +134,7 @@ in lib.fix (self: {
       in {
         ok = isNull v || (self.type t).checkToBool res;
         err = "expected type ${name}, but value does not conform to '${t.name}': "
-         + t.toError v res;
+          + t.toError v res;
       };
   };
 
@@ -136,7 +146,8 @@ in lib.fix (self: {
   list = t: typedef' rec {
     name = "list<${t.name}>";
 
-    checkType = v: if isList v
+    checkType = v:
+      if isList v
       then checkEach name (self.type t) v
       else {
         ok = false;
@@ -147,7 +158,8 @@ in lib.fix (self: {
   attrs = t: typedef' rec {
     name = "attrs<${t.name}>";
 
-    checkType = v: if isAttrs v
+    checkType = v:
+      if isAttrs v
       then checkEach name (self.type t) (attrValues v)
       else {
         ok = false;
@@ -172,20 +184,23 @@ in lib.fix (self: {
       # checkField checks an individual field of the struct against
       # its definition and creates a typecheck result. These results
       # are aggregated during the actual checking.
-      checkField = def: name: value: let result = def.checkType value; in rec {
-        ok = def.checkToBool result;
-        err = if !ok && isNull value
-          then "missing required ${def.name} field '${name}'\n"
-          else "field '${name}': ${def.toError value result}\n";
-      };
+      checkField = def: name: value:
+        let result = def.checkType value; in rec {
+          ok = def.checkToBool result;
+          err =
+            if !ok && isNull value
+            then "missing required ${def.name} field '${name}'\n"
+            else "field '${name}': ${def.toError value result}\n";
+        };
 
       # checkExtraneous determines whether a (closed) struct contains
       # any fields that are not part of the definition.
       checkExtraneous = def: has: acc:
         if (length has) == 0 then acc
         else if (hasAttr (head has) def)
-          then checkExtraneous def (tail has) acc
-          else checkExtraneous def (tail has) {
+        then checkExtraneous def (tail has) acc
+        else
+          checkExtraneous def (tail has) {
             ok = false;
             err = acc.err + "unexpected struct field '${head has}'\n";
           };
@@ -197,85 +212,102 @@ in lib.fix (self: {
           init = { ok = true; err = ""; };
           extraneous = checkExtraneous def (attrNames value) init;
 
-          checkedFields = map (n:
-            let v = if hasAttr n value then value."${n}" else null;
-            in checkField def."${n}" n v) (attrNames def);
-
-          combined = foldl' (acc: res: {
-            ok = acc.ok && res.ok;
-            err = if !res.ok then acc.err + res.err else acc.err;
-          }) init checkedFields;
-        in {
+          checkedFields = map
+            (n:
+              let v = if hasAttr n value then value."${n}" else null;
+              in checkField def."${n}" n v)
+            (attrNames def);
+
+          combined = foldl'
+            (acc: res: {
+              ok = acc.ok && res.ok;
+              err = if !res.ok then acc.err + res.err else acc.err;
+            })
+            init
+            checkedFields;
+        in
+        {
           ok = combined.ok && extraneous.ok;
           err = combined.err + extraneous.err;
         };
 
       struct' = name: def: typedef' {
         inherit name def;
-        checkType = value: if isAttrs value
+        checkType = value:
+          if isAttrs value
           then (checkStruct (self.attrs self.type def) value)
           else { ok = false; err = typeError name value; };
 
-          toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
+        toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
       };
-    in arg: if isString arg then (struct' arg) else (struct' "anon" arg);
+    in
+    arg: if isString arg then (struct' arg) else (struct' "anon" arg);
 
   # Enums & pattern matching
   enum =
-  let
-    plain = name: def: typedef' {
-      inherit name def;
+    let
+      plain = name: def: typedef' {
+        inherit name def;
 
-      checkType = (x: isString x && elem x def);
-      checkToBool = x: x;
-      toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
-    };
-    enum' = name: def: lib.fix (e: (plain name def) // {
-      match = x: actions: deepSeq (map e (attrNames actions)) (
-      let
-        actionKeys = attrNames actions;
-        missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] def;
-      in if (length missing) > 0
-        then throw "Missing match action for members: ${prettyPrint missing}"
-        else actions."${e x}");
-    });
-  in arg: if isString arg then (enum' arg) else (enum' "anon" arg);
+        checkType = (x: isString x && elem x def);
+        checkToBool = x: x;
+        toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
+      };
+      enum' = name: def: lib.fix (e: (plain name def) // {
+        match = x: actions: deepSeq (map e (attrNames actions)) (
+          let
+            actionKeys = attrNames actions;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for members: ${prettyPrint missing}"
+          else actions."${e x}"
+        );
+      });
+    in
+    arg: if isString arg then (enum' arg) else (enum' "anon" arg);
 
   # Sum types
   #
   # The representation of a sum type is an attribute set with only one
   # value, where the key of the value denotes the variant of the type.
   sum =
-  let
-    plain = name: def: typedef' {
-      inherit name def;
-      checkType = (x:
-        let variant = elemAt (attrNames x) 0;
-        in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
-          then let t = def."${variant}";
-                   v = x."${variant}";
-                   res = t.checkType v;
-               in if t.checkToBool res
-                  then { ok = true; }
-                  else {
-                    ok = false;
-                    err = "while checking '${name}' variant '${variant}': "
-                          + t.toError v res;
-                  }
+    let
+      plain = name: def: typedef' {
+        inherit name def;
+        checkType = (x:
+          let variant = elemAt (attrNames x) 0;
+          in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
+          then
+            let
+              t = def."${variant}";
+              v = x."${variant}";
+              res = t.checkType v;
+            in
+            if t.checkToBool res
+            then { ok = true; }
+            else {
+              ok = false;
+              err = "while checking '${name}' variant '${variant}': "
+                + t.toError v res;
+            }
           else { ok = false; err = typeError name x; }
-      );
-    };
-    sum' = name: def: lib.fix (s: (plain name def) // {
-    match = x: actions:
-    let variant = deepSeq (s x) (elemAt (attrNames x) 0);
-        actionKeys = attrNames actions;
-        defKeys = attrNames def;
-        missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] defKeys;
-    in if (length missing) > 0
-      then throw "Missing match action for variants: ${prettyPrint missing}"
-      else actions."${variant}" x."${variant}";
-    });
-    in arg: if isString arg then (sum' arg) else (sum' "anon" arg);
+        );
+      };
+      sum' = name: def: lib.fix (s: (plain name def) // {
+        match = x: actions:
+          let
+            variant = deepSeq (s x) (elemAt (attrNames x) 0);
+            actionKeys = attrNames actions;
+            defKeys = attrNames def;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for variants: ${prettyPrint missing}"
+          else actions."${variant}" x."${variant}";
+      });
+    in
+    arg: if isString arg then (sum' arg) else (sum' "anon" arg);
 
   # Typed function definitions
   #
@@ -289,15 +321,19 @@ in lib.fix (self: {
       mkFunc = sig: f: {
         inherit sig;
         __toString = self: foldl' (s: t: "${s} -> ${t.name}")
-                                  "λ :: ${(head self.sig).name}" (tail self.sig);
+          "λ :: ${(head self.sig).name}"
+          (tail self.sig);
         __functor = _: f;
       };
 
-      defun' = sig: func: if length sig > 2
+      defun' = sig: func:
+        if length sig > 2
         then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
         else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
 
-    in sig: func: if length sig < 2
+    in
+    sig: func:
+      if length sig < 2
       then (throw "Signature must at least have two types (a -> b)")
       else defun' sig func;
 
@@ -311,21 +347,22 @@ in lib.fix (self: {
   # depend on the value being of the wrapped type.
   restrict = name: pred: t:
     let restriction = "${t.name}[${name}]"; in typedef' {
-    name = restriction;
-    checkType = v:
-      let res = t.checkType v;
-      in
+      name = restriction;
+      checkType = v:
+        let res = t.checkType v;
+        in
         if !(t.checkToBool res)
         then res
         else
           let
             iok = pred v;
-          in if isBool iok then {
+          in
+          if isBool iok then {
             ok = iok;
             err = "${prettyPrint v} does not conform to restriction '${restriction}'";
           } else
-            # use throw here to avoid spamming the build log
+          # use throw here to avoid spamming the build log
             throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
-  };
+    };
 
 })
diff --git a/nix/yants/tests/default.nix b/nix/yants/tests/default.nix
index 9a0b2403e124..0c7ec2418802 100644
--- a/nix/yants/tests/default.nix
+++ b/nix/yants/tests/default.nix
@@ -25,7 +25,7 @@ let
   };
 
   testPrimitives = it "checks that all primitive types match" [
-    (assertDoesNotThrow "unit type" (unit {}))
+    (assertDoesNotThrow "unit type" (unit { }))
     (assertDoesNotThrow "int type" (int 15))
     (assertDoesNotThrow "bool type" (bool false))
     (assertDoesNotThrow "float type" (float 13.37))
@@ -44,7 +44,7 @@ let
   # Test that structures work as planned.
   person = struct "person" {
     name = string;
-    age  = int;
+    age = int;
 
     contact = option (struct {
       email = string;
@@ -55,7 +55,7 @@ let
   testStruct = it "checks that structures work as intended" [
     (assertDoesNotThrow "person struct" (person {
       name = "Brynhjulf";
-      age  = 42;
+      age = 42;
       contact.email = "brynhjulf@yants.nix";
     }))
   ];
@@ -70,7 +70,8 @@ let
 
   testEnum = it "checks enum definitions and matching" [
     (assertEq "enum is matched correctly"
-      "It is in fact red!" (colour.match "red" colourMatcher))
+      "It is in fact red!"
+      (colour.match "red" colourMatcher))
     (assertThrows "out of bounds enum fails"
       (colour.match "alpha" (colourMatcher // {
         alpha = "This should never happen";
@@ -97,7 +98,8 @@ let
   testSum = it "checks sum types definitions and matching" [
     (assertDoesNotThrow "creature sum type" some-human)
     (assertEq "sum type is matched correctly"
-      "It's a human named Brynhjulf" (creature.match some-human {
+      "It's a human named Brynhjulf"
+      (creature.match some-human {
         human = v: "It's a human named ${v.name}";
         pet = v: "It's not supposed to be a pet!";
       })
@@ -106,7 +108,7 @@ let
 
   # Test curried function definitions
   func = defun [ string int string ]
-  (name: age: "${name} is ${toString age} years old");
+    (name: age: "${name} is ${toString age} years old");
 
   testFunctions = it "checks function definitions" [
     (assertDoesNotThrow "function application" (func "Brynhjulf" 42))
@@ -144,13 +146,13 @@ let
   ];
 
 in
-  runTestsuite "yants" [
-    testPrimitives
-    testPoly
-    testStruct
-    testEnum
-    testSum
-    testFunctions
-    testTypes
-    testRestrict
-  ]
+runTestsuite "yants" [
+  testPrimitives
+  testPoly
+  testStruct
+  testEnum
+  testSum
+  testFunctions
+  testTypes
+  testRestrict
+]
diff --git a/ops/dns/default.nix b/ops/dns/default.nix
index 136a4c58dca7..ad6e136f2756 100644
--- a/ops/dns/default.nix
+++ b/ops/dns/default.nix
@@ -2,11 +2,12 @@
 { depot, pkgs, ... }:
 
 let
-  checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" {} ''
+  checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" { } ''
     ${pkgs.bind}/bin/named-checkzone -i local ${zone} ${file} | tee $out
   '';
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   nixery-dev = checkZone "nixery.dev" ./nixery.dev.zone;
   tvl-fyi = checkZone "tvl.fyi" ./tvl.fyi.zone;
   tvl-su = checkZone "tvl.su" ./tvl.su.zone;
diff --git a/ops/glesys/default.nix b/ops/glesys/default.nix
index f4c0478c5d89..2dfb505fb423 100644
--- a/ops/glesys/default.nix
+++ b/ops/glesys/default.nix
@@ -2,7 +2,7 @@
 
 depot.nix.readTree.drvTargets {
   # Provide a Terraform wrapper with the right provider installed.
-  terraform = pkgs.terraform.withPlugins(_: [
+  terraform = pkgs.terraform.withPlugins (_: [
     depot.third_party.terraform-provider-glesys
   ]);
 }
diff --git a/ops/journaldriver/default.nix b/ops/journaldriver/default.nix
index d2413e74cc92..a06a858fa12a 100644
--- a/ops/journaldriver/default.nix
+++ b/ops/journaldriver/default.nix
@@ -4,6 +4,8 @@ depot.third_party.naersk.buildPackage {
   src = ./.;
 
   buildInputs = with pkgs; [
-    pkgconfig openssl systemd.dev
+    pkgconfig
+    openssl
+    systemd.dev
   ];
 }
diff --git a/ops/keycloak/default.nix b/ops/keycloak/default.nix
index 96f0c40e5e2a..5757debd1a39 100644
--- a/ops/keycloak/default.nix
+++ b/ops/keycloak/default.nix
@@ -2,7 +2,7 @@
 
 depot.nix.readTree.drvTargets {
   # Provide a Terraform wrapper with the right provider installed.
-  terraform = pkgs.terraform.withPlugins(p: [
+  terraform = pkgs.terraform.withPlugins (p: [
     p.keycloak
   ]);
 }
diff --git a/ops/kontemplate/release.nix b/ops/kontemplate/release.nix
index 8a04109526cf..6a3dbd5efe87 100644
--- a/ops/kontemplate/release.nix
+++ b/ops/kontemplate/release.nix
@@ -10,13 +10,17 @@
 # This file is the Nix derivation used to build release binaries for
 # several different architectures and operating systems.
 
-let pkgs = import ((import <nixpkgs> {}).fetchFromGitHub {
-  owner = "NixOS";
-  repo = "nixpkgs-channels";
-  rev = "541d9cce8af7a490fb9085305939569567cb58e6";
-  sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
-}) {};
-in with pkgs; buildGoPackage rec {
+let
+  pkgs = import
+    ((import <nixpkgs> { }).fetchFromGitHub {
+      owner = "NixOS";
+      repo = "nixpkgs-channels";
+      rev = "541d9cce8af7a490fb9085305939569567cb58e6";
+      sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
+    })
+    { };
+in
+with pkgs; buildGoPackage rec {
   name = "kontemplate-${version}";
   version = "canon";
   src = ./.;
@@ -29,8 +33,8 @@ in with pkgs; buildGoPackage rec {
   # reason for setting the 'allowGoReference' flag.
   dontStrip = true; # Linker configuration handles stripping
   allowGoReference = true;
-  CGO_ENABLED="0";
-  GOCACHE="off";
+  CGO_ENABLED = "0";
+  GOCACHE = "off";
 
   # Configure release builds via the "build-matrix" script:
   buildInputs = [ git ];
diff --git a/ops/machines/whitby/default.nix b/ops/machines/whitby/default.nix
index a6a5a763177a..0aa61fda80cf 100644
--- a/ops/machines/whitby/default.nix
+++ b/ops/machines/whitby/default.nix
@@ -4,7 +4,8 @@
 let
   inherit (builtins) listToAttrs;
   inherit (lib) range;
-in {
+in
+{
   imports = [
     "${depot.path}/ops/modules/atward.nix"
     "${depot.path}/ops/modules/clbot.nix"
@@ -55,7 +56,13 @@ in {
 
     initrd = {
       availableKernelModules = [
-        "igb" "xhci_pci" "nvme" "ahci" "usbhid" "usb_storage" "sr_mod"
+        "igb"
+        "xhci_pci"
+        "nvme"
+        "ahci"
+        "usbhid"
+        "usb_storage"
+        "sr_mod"
       ];
 
       # Enable SSH in the initrd so that we can enter disk encryption
@@ -189,7 +196,7 @@ in {
         ++ lukegb.keys.all
         ++ [ grfn.keys.whitby ]
         ++ sterni.keys.all
-        ;
+      ;
     };
   };
 
@@ -205,7 +212,8 @@ in {
   age.secrets =
     let
       secretFile = name: depot.ops.secrets."${name}.age";
-    in {
+    in
+    {
       clbot.file = secretFile "clbot";
       gerrit-queue.file = secretFile "gerrit-queue";
       grafana.file = secretFile "grafana";
@@ -509,15 +517,16 @@ in {
       job_name = "node";
       scrape_interval = "5s";
       static_configs = [{
-        targets = ["localhost:${toString config.services.prometheus.exporters.node.port}"];
+        targets = [ "localhost:${toString config.services.prometheus.exporters.node.port}" ];
       }];
-    } {
-      job_name = "nginx";
-      scrape_interval = "5s";
-      static_configs = [{
-        targets = ["localhost:${toString config.services.prometheus.exporters.nginx.port}"];
+    }
+      {
+        job_name = "nginx";
+        scrape_interval = "5s";
+        static_configs = [{
+          targets = [ "localhost:${toString config.services.prometheus.exporters.nginx.port}" ];
+        }];
       }];
-    }];
   };
 
   services.grafana = {
@@ -526,58 +535,62 @@ in {
     domain = "status.tvl.su";
     rootUrl = "https://status.tvl.su";
     analytics.reporting.enable = false;
-    extraOptions = let
-      options = {
-        auth = {
-          generic_oauth = {
-            enabled = true;
-            client_id = "grafana";
-            scopes = "openid profile email";
-            name = "TVL";
-            email_attribute_path = "mail";
-            login_attribute_path = "sub";
-            name_attribute_path = "displayName";
-            auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
-            token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
-            api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
-
-            # Give lukegb, grfn, tazjin "Admin" rights.
-            role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
-
-            # Allow creating new Grafana accounts from OAuth accounts.
-            allow_sign_up = true;
-          };
-
-          anonymous = {
-            enabled = true;
-            org_name = "The Virus Lounge";
-            org_role = "Viewer";
+    extraOptions =
+      let
+        options = {
+          auth = {
+            generic_oauth = {
+              enabled = true;
+              client_id = "grafana";
+              scopes = "openid profile email";
+              name = "TVL";
+              email_attribute_path = "mail";
+              login_attribute_path = "sub";
+              name_attribute_path = "displayName";
+              auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
+              token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
+              api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
+
+              # Give lukegb, grfn, tazjin "Admin" rights.
+              role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
+
+              # Allow creating new Grafana accounts from OAuth accounts.
+              allow_sign_up = true;
+            };
+
+            anonymous = {
+              enabled = true;
+              org_name = "The Virus Lounge";
+              org_role = "Viewer";
+            };
+
+            basic.enabled = false;
+            oauth_auto_login = true;
+            disable_login_form = true;
           };
-
-          basic.enabled = false;
-          oauth_auto_login = true;
-          disable_login_form = true;
         };
-      };
-      inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
-      inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
-
-      # Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
-      encodeName = raw: replaceStrings ["."] ["_"] (toUpper (concatStringsSep "_" raw));
-
-      # Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
-      optionToString = value:
-        if (typeOf value) == "bool" then
-          if value then "true" else "false"
-        else builtins.toString value;
-
-      # Turn an nested options attrset into a flat listToAttrs-compatible list.
-      encodeOptions = prefix: inp: concatLists (mapAttrsToList (name: value:
-        if (typeOf value) == "set"
-          then encodeOptions (prefix ++ [name]) value
-          else [ (nameValuePair (encodeName (prefix ++ [name])) (optionToString value)) ]
-        ) inp);
-    in listToAttrs (encodeOptions [] options);
+        inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
+        inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
+
+        # Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
+        encodeName = raw: replaceStrings [ "." ] [ "_" ] (toUpper (concatStringsSep "_" raw));
+
+        # Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
+        optionToString = value:
+          if (typeOf value) == "bool" then
+            if value then "true" else "false"
+          else builtins.toString value;
+
+        # Turn an nested options attrset into a flat listToAttrs-compatible list.
+        encodeOptions = prefix: inp: concatLists (mapAttrsToList
+          (name: value:
+            if (typeOf value) == "set"
+            then encodeOptions (prefix ++ [ name ]) value
+            else [ (nameValuePair (encodeName (prefix ++ [ name ])) (optionToString value)) ]
+          )
+          inp);
+      in
+      listToAttrs (encodeOptions [ ] options);
 
     provision = {
       enable = true;
@@ -623,8 +636,8 @@ in {
 
   security.sudo.extraRules = [
     {
-      groups = ["wheel"];
-      commands = [{ command = "ALL"; options = ["NOPASSWD"]; }];
+      groups = [ "wheel" ];
+      commands = [{ command = "ALL"; options = [ "NOPASSWD" ]; }];
     }
   ];
 
@@ -705,7 +718,7 @@ in {
     };
 
     # Set up a user & group for git shenanigans
-    groups.git = {};
+    groups.git = { };
     users.git = {
       group = "git";
       isSystemUser = true;
diff --git a/ops/modules/atward.nix b/ops/modules/atward.nix
index 354f9ebdd3cb..f345a08e3131 100644
--- a/ops/modules/atward.nix
+++ b/ops/modules/atward.nix
@@ -3,7 +3,8 @@
 let
   cfg = config.services.depot.atward;
   description = "atward - (attempt to) cleverly route queries";
-in {
+in
+{
   options.services.depot.atward = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/auto-deploy.nix b/ops/modules/auto-deploy.nix
index 83a8273562f6..c504906b2b94 100644
--- a/ops/modules/auto-deploy.nix
+++ b/ops/modules/auto-deploy.nix
@@ -45,7 +45,8 @@ let
     # NixOS in $STATE_DIRECTORY
     (cd / && ${rebuild-system}/bin/rebuild-system)
   '';
-in {
+in
+{
   options.services.depot.auto-deploy = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/automatic-gc.nix b/ops/modules/automatic-gc.nix
index 634785721024..ad53a63f7f16 100644
--- a/ops/modules/automatic-gc.nix
+++ b/ops/modules/automatic-gc.nix
@@ -29,7 +29,8 @@ let
       echo "Skipping GC, enough space available"
     fi
   '';
-in {
+in
+{
   options.services.depot.automatic-gc = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/clbot.nix b/ops/modules/clbot.nix
index ef4c2ab23795..958d321f81ad 100644
--- a/ops/modules/clbot.nix
+++ b/ops/modules/clbot.nix
@@ -21,7 +21,7 @@ let
       (attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"") flags));
 
   # Escapes a unit name for use in systemd
-  systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" {} ''
+  systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" { } ''
     ${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
   ''));
 
@@ -42,7 +42,8 @@ let
       };
     };
   };
-in {
+in
+{
   options.services.depot.clbot = {
     enable = mkEnableOption description;
 
@@ -68,7 +69,7 @@ in {
     # (notably the SSH private key) readable by this user outside of
     # the module.
     users = {
-      groups.clbot = {};
+      groups.clbot = { };
 
       users.clbot = {
         group = "clbot";
diff --git a/ops/modules/default.nix b/ops/modules/default.nix
index 8bdfecdf41b0..d747e8e1319a 100644
--- a/ops/modules/default.nix
+++ b/ops/modules/default.nix
@@ -1,2 +1,2 @@
 # Make readTree happy at this level.
-_: {}
+_: { }
diff --git a/ops/modules/gerrit-queue.nix b/ops/modules/gerrit-queue.nix
index a4b073f8560b..66d584cc3361 100644
--- a/ops/modules/gerrit-queue.nix
+++ b/ops/modules/gerrit-queue.nix
@@ -8,7 +8,8 @@ let
     inherit default;
     type = lib.types.str;
   };
-in {
+in
+{
   options.services.depot.gerrit-queue = {
     enable = lib.mkEnableOption description;
     gerritUrl = mkStringOption "https://cl.tvl.fyi";
diff --git a/ops/modules/git-serving.nix b/ops/modules/git-serving.nix
index 6b8bef29b15a..49af01a0fd3e 100644
--- a/ops/modules/git-serving.nix
+++ b/ops/modules/git-serving.nix
@@ -12,7 +12,8 @@
 
 let
   cfg = config.services.depot.git-serving;
-in {
+in
+{
   options.services.depot.git-serving = with lib; {
     enable = mkEnableOption "Enable cgit & josh configuration";
 
diff --git a/ops/modules/irccat.nix b/ops/modules/irccat.nix
index deb0b4ecaf34..05a783fd6614 100644
--- a/ops/modules/irccat.nix
+++ b/ops/modules/irccat.nix
@@ -27,7 +27,8 @@ let
 
     exec ${depot.third_party.irccat}/bin/irccat
   '';
-in {
+in
+{
   options.services.depot.irccat = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/monorepo-gerrit.nix b/ops/modules/monorepo-gerrit.nix
index 6638f30b3f9f..509500c9139d 100644
--- a/ops/modules/monorepo-gerrit.nix
+++ b/ops/modules/monorepo-gerrit.nix
@@ -9,12 +9,13 @@ let
     exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@"
   '';
 
-  gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" {} ''
+  gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" { } ''
     mkdir -p $out
     ln -s ${besadiiWithConfig "change-merged"} $out/change-merged
     ln -s ${besadiiWithConfig "patchset-created"} $out/patchset-created
   '';
-in {
+in
+{
   services.gerrit = {
     enable = true;
     listenAddress = "[::]:4778"; # 4778 - grrt
diff --git a/ops/modules/nixery.nix b/ops/modules/nixery.nix
index 60d151045756..33f196372dbb 100644
--- a/ops/modules/nixery.nix
+++ b/ops/modules/nixery.nix
@@ -6,7 +6,8 @@ let
   cfg = config.services.depot.nixery;
   description = "Nixery - container images on-demand";
   storagePath = "/var/lib/nixery/${pkgs.nixpkgsCommits.unstable}";
-in {
+in
+{
   options.services.depot.nixery = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/oauth2_proxy.nix b/ops/modules/oauth2_proxy.nix
index 07ba8861e745..423f9010c5d2 100644
--- a/ops/modules/oauth2_proxy.nix
+++ b/ops/modules/oauth2_proxy.nix
@@ -19,7 +19,8 @@ let
     reverse_proxy = true
     set_xauthrequest = true
   '';
-in {
+in
+{
   options.services.depot.oauth2_proxy = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/owothia.nix b/ops/modules/owothia.nix
index b2a77cddc2dd..d11fdd26ecbc 100644
--- a/ops/modules/owothia.nix
+++ b/ops/modules/owothia.nix
@@ -4,7 +4,8 @@
 let
   cfg = config.services.depot.owothia;
   description = "owothia - i'm a service owo";
-in {
+in
+{
   options.services.depot.owothia = {
     enable = lib.mkEnableOption description;
 
diff --git a/ops/modules/panettone.nix b/ops/modules/panettone.nix
index 11e934ec2e8d..d57e53e75442 100644
--- a/ops/modules/panettone.nix
+++ b/ops/modules/panettone.nix
@@ -2,7 +2,8 @@
 
 let
   cfg = config.services.depot.panettone;
-in {
+in
+{
   options.services.depot.panettone = with lib; {
     enable = mkEnableOption "Panettone issue tracker";
 
@@ -62,23 +63,26 @@ in {
       assertion =
         cfg.dbHost != "localhost" || config.services.postgresql.enable;
       message = "Panettone requires a postgresql database";
-    } {
-      assertion =
-        cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
-      message = "Panettone can only connect to the postgresql database over TCP";
-    } {
-      assertion =
-        cfg.dbHost != "localhost" || (lib.any
-          (user: user.name == cfg.dbUser)
-          config.services.postgresql.ensureUsers);
-      message = "Panettone requires a database user";
-    } {
-      assertion =
-        cfg.dbHost != "localhost" || (lib.any
-          (db: db == cfg.dbName)
-          config.services.postgresql.ensureDatabases);
-      message = "Panettone requires a database";
-    }];
+    }
+      {
+        assertion =
+          cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
+        message = "Panettone can only connect to the postgresql database over TCP";
+      }
+      {
+        assertion =
+          cfg.dbHost != "localhost" || (lib.any
+            (user: user.name == cfg.dbUser)
+            config.services.postgresql.ensureUsers);
+        message = "Panettone requires a database user";
+      }
+      {
+        assertion =
+          cfg.dbHost != "localhost" || (lib.any
+            (db: db == cfg.dbName)
+            config.services.postgresql.ensureDatabases);
+        message = "Panettone requires a database";
+      }];
 
     systemd.services.panettone = {
       wantedBy = [ "multi-user.target" ];
diff --git a/ops/modules/paroxysm.nix b/ops/modules/paroxysm.nix
index cd9cd3866e47..070e7623db09 100644
--- a/ops/modules/paroxysm.nix
+++ b/ops/modules/paroxysm.nix
@@ -3,7 +3,8 @@
 let
   cfg = config.services.depot.paroxysm;
   description = "TVL's majestic IRC bot";
-in {
+in
+{
   options.services.depot.paroxysm.enable = lib.mkEnableOption description;
 
   config = lib.mkIf cfg.enable {
diff --git a/ops/modules/quassel.nix b/ops/modules/quassel.nix
index 9c8692629a2a..4a0b64ffc100 100644
--- a/ops/modules/quassel.nix
+++ b/ops/modules/quassel.nix
@@ -8,7 +8,8 @@ let
     enableDaemon = true;
     withKDE = false;
   };
-in {
+in
+{
   options.services.depot.quassel = with lib; {
     enable = mkEnableOption "Quassel IRC daemon";
 
@@ -70,7 +71,7 @@ in {
         group = "quassel";
       };
 
-      groups.quassel = {};
+      groups.quassel = { };
     };
   };
 }
diff --git a/ops/modules/restic.nix b/ops/modules/restic.nix
index 1aacf68973e3..869539603578 100644
--- a/ops/modules/restic.nix
+++ b/ops/modules/restic.nix
@@ -14,7 +14,8 @@ let
     inherit default;
     type = lib.types.str;
   };
-in {
+in
+{
   options.services.depot.restic = {
     enable = lib.mkEnableOption description;
     bucketEndpoint = mkStringOption "objects.dc-sto1.glesys.net";
diff --git a/ops/modules/smtprelay.nix b/ops/modules/smtprelay.nix
index 106593fe39d1..cfb185ecd107 100644
--- a/ops/modules/smtprelay.nix
+++ b/ops/modules/smtprelay.nix
@@ -27,8 +27,9 @@ let
   prepareArgs = args:
     concatStringsSep " "
       (attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"")
-                            (args // overrideArgs)));
-in {
+        (args // overrideArgs)));
+in
+{
   options.services.depot.smtprelay = {
     enable = mkEnableOption description;
 
diff --git a/ops/modules/sourcegraph.nix b/ops/modules/sourcegraph.nix
index a72cd75d477d..5311b42dd1db 100644
--- a/ops/modules/sourcegraph.nix
+++ b/ops/modules/sourcegraph.nix
@@ -4,7 +4,8 @@
 
 let
   cfg = config.services.depot.sourcegraph;
-in {
+in
+{
   options.services.depot.sourcegraph = with lib; {
     enable = mkEnableOption "SourceGraph code search engine";
 
@@ -51,7 +52,8 @@ in {
       # Sourcegraph needs a higher nofile limit, it logs warnings
       # otherwise (unclear whether it actually affects the service).
       extraOptions = [
-        "--ulimit" "nofile=10000:10000"
+        "--ulimit"
+        "nofile=10000:10000"
       ];
     };
   };
diff --git a/ops/modules/tvl-buildkite.nix b/ops/modules/tvl-buildkite.nix
index aaeb5a0f7554..a6e7372a250a 100644
--- a/ops/modules/tvl-buildkite.nix
+++ b/ops/modules/tvl-buildkite.nix
@@ -13,7 +13,7 @@ let
 
   # All Buildkite hooks are actually besadii, but it's being invoked
   # with different names.
-  buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" {} ''
+  buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" { } ''
     mkdir -p $out/bin
     ln -s ${besadiiWithConfig "post-command"} $out/bin/post-command
   '';
@@ -22,7 +22,8 @@ let
     echo 'username=buildkite'
     echo "password=$(jq -r '.gerritPassword' /run/agenix/buildkite-besadii-config)"
   '';
-in {
+in
+{
   options.services.depot.buildkite = {
     enable = lib.mkEnableOption description;
     agentCount = lib.mkOption {
@@ -33,39 +34,43 @@ in {
 
   config = lib.mkIf cfg.enable {
     # Run the Buildkite agents using the default upstream module.
-    services.buildkite-agents = builtins.listToAttrs (map (n: rec {
-      name = "whitby-${toString n}";
-      value = {
-        inherit name;
-        enable = true;
-        tokenPath = "/run/agenix/buildkite-agent-token";
-        hooks.post-command = "${buildkiteHooks}/bin/post-command";
+    services.buildkite-agents = builtins.listToAttrs (map
+      (n: rec {
+        name = "whitby-${toString n}";
+        value = {
+          inherit name;
+          enable = true;
+          tokenPath = "/run/agenix/buildkite-agent-token";
+          hooks.post-command = "${buildkiteHooks}/bin/post-command";
 
-        runtimePackages = with pkgs; [
-          bash
-          coreutils
-          credentialHelper
-          curl
-          git
-          gnutar
-          gzip
-          jq
-          nix
-        ];
-      };
-    }) agents);
+          runtimePackages = with pkgs; [
+            bash
+            coreutils
+            credentialHelper
+            curl
+            git
+            gnutar
+            gzip
+            jq
+            nix
+          ];
+        };
+      })
+      agents);
 
     # Set up a group for all Buildkite agent users
     users = {
-      groups.buildkite-agents = {};
-      users = builtins.listToAttrs (map (n: rec {
-        name = "buildkite-agent-whitby-${toString n}";
-        value = {
-          isSystemUser = true;
-          group = lib.mkForce "buildkite-agents";
-          extraGroups = [ name "docker" ];
-        };
-      }) agents);
+      groups.buildkite-agents = { };
+      users = builtins.listToAttrs (map
+        (n: rec {
+          name = "buildkite-agent-whitby-${toString n}";
+          value = {
+            isSystemUser = true;
+            group = lib.mkForce "buildkite-agents";
+            extraGroups = [ name "docker" ];
+          };
+        })
+        agents);
     };
   };
 }
diff --git a/ops/modules/tvl-slapd/default.nix b/ops/modules/tvl-slapd/default.nix
index dbcf139338ea..d0d6616e224b 100644
--- a/ops/modules/tvl-slapd/default.nix
+++ b/ops/modules/tvl-slapd/default.nix
@@ -26,7 +26,8 @@ let
 
   inherit (depot.ops) users;
 
-in {
+in
+{
   services.openldap = {
     enable = true;
 
@@ -48,7 +49,7 @@ in {
 
       "cn=schema".includes =
         map (schema: "${pkgs.openldap}/etc/schema/${schema}.ldif")
-            [ "core" "cosine" "inetorgperson" "nis" ];
+          [ "core" "cosine" "inetorgperson" "nis" ];
     };
 
     # Contents are immutable at runtime, and adding user accounts etc.
diff --git a/ops/pipelines/depot.nix b/ops/pipelines/depot.nix
index b6941ba38aa7..6d9e625e04be 100644
--- a/ops/pipelines/depot.nix
+++ b/ops/pipelines/depot.nix
@@ -16,9 +16,10 @@ let
     drvTargets = depot.ci.targets;
     additionalSteps = [ protoCheck ];
 
-    parentTargetMap = if (externalArgs ? parentTargetMap)
+    parentTargetMap =
+      if (externalArgs ? parentTargetMap)
       then builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap)
-      else {};
+      else { };
 
     postBuildSteps = [
       # After successful builds, create a gcroot for builds on canon.
@@ -40,7 +41,8 @@ let
   };
 
   drvmap = depot.nix.buildkite.mkDrvmap depot.ci.targets;
-in pkgs.runCommandNoCC "depot-pipeline" {} ''
+in
+pkgs.runCommandNoCC "depot-pipeline" { } ''
   mkdir $out
   cp -r ${pipeline}/* $out
   cp ${drvmap} $out/drvmap.json
diff --git a/ops/secrets/mkSecrets.nix b/ops/secrets/mkSecrets.nix
index 4e40112b9610..c99130835f15 100644
--- a/ops/secrets/mkSecrets.nix
+++ b/ops/secrets/mkSecrets.nix
@@ -22,6 +22,6 @@ in
 
 defun [ path (attrs agenixSecret) (attrs any) ]
   (path: secrets:
-    depot.nix.readTree.drvTargets
-      # Import each secret into the Nix store
-      (builtins.mapAttrs (name: _: "${path}/${name}") secrets))
+  depot.nix.readTree.drvTargets
+    # Import each secret into the Nix store
+    (builtins.mapAttrs (name: _: "${path}/${name}") secrets))
diff --git a/ops/secrets/secrets.nix b/ops/secrets/secrets.nix
index 52b79e444a7c..2c08bb1aee5d 100644
--- a/ops/secrets/secrets.nix
+++ b/ops/secrets/secrets.nix
@@ -15,7 +15,8 @@ let
   whitby = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I";
 
   default.publicKeys = tazjin ++ grfn ++ sterni ++ [ whitby ];
-in {
+in
+{
   "besadii.age" = default;
   "buildkite-agent-token.age" = default;
   "buildkite-graphql-token.age" = default;
diff --git a/third_party/abseil_cpp/default.nix b/third_party/abseil_cpp/default.nix
index 66ab4f6ba143..dd3ca9de9de1 100644
--- a/third_party/abseil_cpp/default.nix
+++ b/third_party/abseil_cpp/default.nix
@@ -8,7 +8,7 @@ in pkgs.abseil-cpp.override {
 
 /* TODO(tazjin): update abseil subtree
 
-fullLlvm11Stdenv.mkDerivation rec {
+  fullLlvm11Stdenv.mkDerivation rec {
   pname = "abseil-cpp";
   version = "20200519-768eb2ca+tvl-1";
   src = ./.;
@@ -17,15 +17,15 @@ fullLlvm11Stdenv.mkDerivation rec {
   # doCheck = true;
 
   cmakeFlags = [
-    "-DCMAKE_CXX_STANDARD=17"
-    #"-DABSL_RUN_TESTS=1"
+  "-DCMAKE_CXX_STANDARD=17"
+  #"-DABSL_RUN_TESTS=1"
   ];
 
   meta = with lib; {
-    description = "An open-source collection of C++ code designed to augment the C++ standard library";
-    homepage = https://abseil.io/;
-    license = licenses.asl20;
-    maintainers = [ maintainers.andersk ];
+  description = "An open-source collection of C++ code designed to augment the C++ standard library";
+  homepage = https://abseil.io/;
+  license = licenses.asl20;
+  maintainers = [ maintainers.andersk ];
   };
-}
+  }
 */
diff --git a/third_party/agenix/default.nix b/third_party/agenix/default.nix
index 91fd9576b091..96afbe16994a 100644
--- a/third_party/agenix/default.nix
+++ b/third_party/agenix/default.nix
@@ -9,7 +9,8 @@ let
   agenix = import src {
     inherit pkgs;
   };
-in {
+in
+{
   inherit src;
   cli = agenix.agenix;
 }
diff --git a/third_party/arion/default.nix b/third_party/arion/default.nix
index 16cd92ea3f48..7b0aa4ee334a 100644
--- a/third_party/arion/default.nix
+++ b/third_party/arion/default.nix
@@ -1,8 +1,10 @@
 { pkgs, ... }:
 
-(import (pkgs.fetchFromGitHub {
-  owner = "hercules-ci";
-  repo = "arion";
-  rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
-  sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
-}) { inherit pkgs; }).arion
+(import
+  (pkgs.fetchFromGitHub {
+    owner = "hercules-ci";
+    repo = "arion";
+    rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
+    sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
+  })
+  { inherit pkgs; }).arion
diff --git a/third_party/bat_syntaxes/default.nix b/third_party/bat_syntaxes/default.nix
index 2b7d025fae57..a48962dd364d 100644
--- a/third_party/bat_syntaxes/default.nix
+++ b/third_party/bat_syntaxes/default.nix
@@ -8,7 +8,8 @@
 
 let
   inherit (pkgs) bat runCommandNoCC;
-in runCommandNoCC "bat-syntaxes.bin" {} ''
+in
+runCommandNoCC "bat-syntaxes.bin" { } ''
   export HOME=$PWD
   mkdir -p .config/bat/syntaxes
   cp ${./Prolog.sublime-syntax} .config/bat/syntaxes
diff --git a/third_party/cgit/default.nix b/third_party/cgit/default.nix
index 025877ee4b72..55aaa02f32d4 100644
--- a/third_party/cgit/default.nix
+++ b/third_party/cgit/default.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (pkgs) stdenv gzip bzip2 xz luajit zlib autoconf openssl pkgconfig;
-in stdenv.mkDerivation rec {
+in
+stdenv.mkDerivation rec {
   pname = "cgit";
   version = "master";
   src = ./.;
diff --git a/third_party/clj2nix/default.nix b/third_party/clj2nix/default.nix
index 3bd21df7568f..f582debf29df 100644
--- a/third_party/clj2nix/default.nix
+++ b/third_party/clj2nix/default.nix
@@ -5,4 +5,5 @@ pkgs.callPackage "${(pkgs.fetchFromGitHub {
   repo = "clj2nix";
   rev = "3d0a38c954c8e0926f57de1d80d357df05fc2f94";
   sha256 = "0y77b988qdgsrp4w72v1f5rrh33awbps2qdgp2wr2nmmi44541w5";
-})}/clj2nix.nix" {}
+})}/clj2nix.nix"
+{ }
diff --git a/third_party/default.nix b/third_party/default.nix
index e9e3c117cec0..169727f4b9d6 100644
--- a/third_party/default.nix
+++ b/third_party/default.nix
@@ -24,32 +24,33 @@
   # be able to pass `specialArgs`. We depend on this because `depot`
   # needs to be partially evaluated in NixOS configuration before
   # module imports are resolved.
-  nixos = {
-    configuration,
-    specialArgs ? {},
-    system ? builtins.currentSystem,
-    ...
-  }:
-  let
-    eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
-      inherit specialArgs system;
-      modules = [
-        configuration
-        (import "${depot.path + "/ops/modules/default-imports.nix"}")
-      ];
-    };
+  nixos =
+    { configuration
+    , specialArgs ? { }
+    , system ? builtins.currentSystem
+    , ...
+    }:
+    let
+      eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
+        inherit specialArgs system;
+        modules = [
+          configuration
+          (import "${depot.path + "/ops/modules/default-imports.nix"}")
+        ];
+      };
 
-    # This is for `nixos-rebuild build-vm'.
-    vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
-      inherit specialArgs system;
-      modules = [
-        configuration
-        "${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
-      ];
-    }).config;
-  in {
-    inherit (eval) pkgs config options;
-    system = eval.config.system.build.toplevel;
-    vm = vmConfig.system.build.vm;
-  };
+      # This is for `nixos-rebuild build-vm'.
+      vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
+        inherit specialArgs system;
+        modules = [
+          configuration
+          "${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
+        ];
+      }).config;
+    in
+    {
+      inherit (eval) pkgs config options;
+      system = eval.config.system.build.toplevel;
+      vm = vmConfig.system.build.vm;
+    };
 }
diff --git a/third_party/dhall/default.nix b/third_party/dhall/default.nix
index 6f3782233fa3..b4de613125b7 100644
--- a/third_party/dhall/default.nix
+++ b/third_party/dhall/default.nix
@@ -5,11 +5,14 @@ let
   # broken most of the time. The binaries are also fully static
   # builds, instead of the half-static crap that nixpkgs produces.
   easy-dhall-nix =
-    import (builtins.fetchTarball {
-      url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
-      sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
-    }) { inherit pkgs; };
-in {
+    import
+      (builtins.fetchTarball {
+        url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
+        sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
+      })
+      { inherit pkgs; };
+in
+{
   dhall = easy-dhall-nix.dhall-simple;
   dhall-bash = easy-dhall-nix.dhall-bash-simple;
   dhall-docs = easy-dhall-nix.dhall-docs-simple;
diff --git a/third_party/elmPackages_0_18/default.nix b/third_party/elmPackages_0_18/default.nix
index 55d66f062926..e1e4f6f9c2eb 100644
--- a/third_party/elmPackages_0_18/default.nix
+++ b/third_party/elmPackages_0_18/default.nix
@@ -7,9 +7,11 @@
 
 { pkgs, ... }:
 
-(import (pkgs.fetchFromGitHub {
-  owner = "NixOS";
-  repo = "nixpkgs";
-  rev = "14f9ee66e63077539252f8b4550049381a082518";
-  sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
-}) {}).elmPackages
+(import
+  (pkgs.fetchFromGitHub {
+    owner = "NixOS";
+    repo = "nixpkgs";
+    rev = "14f9ee66e63077539252f8b4550049381a082518";
+    sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
+  })
+  { }).elmPackages
diff --git a/third_party/gerrit_plugins/builder.nix b/third_party/gerrit_plugins/builder.nix
index ff1754e088f3..0b6501801cd1 100644
--- a/third_party/gerrit_plugins/builder.nix
+++ b/third_party/gerrit_plugins/builder.nix
@@ -1,33 +1,35 @@
 { depot, pkgs, ... }:
 {
-  buildGerritBazelPlugin = {
-    name,
-    src,
-    depsOutputHash,
-    overlayPluginCmd ? ''
-      cp -R "${src}" "$out/plugins/${name}"
-    '',
-    postPatch ? "",
-  }: ((depot.third_party.gerrit.override {
-    name = "${name}.jar";
+  buildGerritBazelPlugin =
+    { name
+    , src
+    , depsOutputHash
+    , overlayPluginCmd ? ''
+        cp -R "${src}" "$out/plugins/${name}"
+      ''
+    , postPatch ? ""
+    ,
+    }: ((depot.third_party.gerrit.override {
+      name = "${name}.jar";
 
-    src = pkgs.runCommandLocal "${name}-src" {} ''
-      cp -R "${depot.third_party.gerrit.src}" "$out"
-      chmod +w "$out/plugins"
-      ${overlayPluginCmd}
-    '';
+      src = pkgs.runCommandLocal "${name}-src" { } ''
+        cp -R "${depot.third_party.gerrit.src}" "$out"
+        chmod +w "$out/plugins"
+        ${overlayPluginCmd}
+      '';
 
-    bazelTarget = "//plugins/${name}";
-  }).overrideAttrs (super: {
-    deps = super.deps.overrideAttrs (superDeps: {
-      outputHash = depsOutputHash;
-    });
-    installPhase = ''
-      cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
-    '';
-    postPatch = if super ? postPatch then ''
-      ${super.postPatch}
-      ${postPatch}
-    '' else postPatch;
-  }));
+      bazelTarget = "//plugins/${name}";
+    }).overrideAttrs (super: {
+      deps = super.deps.overrideAttrs (superDeps: {
+        outputHash = depsOutputHash;
+      });
+      installPhase = ''
+        cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
+      '';
+      postPatch =
+        if super ? postPatch then ''
+          ${super.postPatch}
+          ${postPatch}
+        '' else postPatch;
+    }));
 }
diff --git a/third_party/gerrit_plugins/default.nix b/third_party/gerrit_plugins/default.nix
index 8131ca2eb014..b342bf3f0e62 100644
--- a/third_party/gerrit_plugins/default.nix
+++ b/third_party/gerrit_plugins/default.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (import ./builder.nix args) buildGerritBazelPlugin;
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   # https://gerrit.googlesource.com/plugins/owners
   owners = buildGerritBazelPlugin rec {
     name = "owners";
diff --git a/third_party/gerrit_plugins/oauth/default.nix b/third_party/gerrit_plugins/oauth/default.nix
index 76c053ae423f..01748ba84220 100644
--- a/third_party/gerrit_plugins/oauth/default.nix
+++ b/third_party/gerrit_plugins/oauth/default.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (import ../builder.nix args) buildGerritBazelPlugin;
-in buildGerritBazelPlugin rec {
+in
+buildGerritBazelPlugin rec {
   name = "oauth";
   depsOutputHash = "sha256:0j86amkw54y177s522hc988hqg034fsrkywbsb9a7h14zwcqbran";
   src = pkgs.fetchgit {
diff --git a/third_party/git/default.nix b/third_party/git/default.nix
index 75131c03c2ff..eed07b5616c6 100644
--- a/third_party/git/default.nix
+++ b/third_party/git/default.nix
@@ -2,8 +2,8 @@
 # `pkgs.srcOnly`.
 { pkgs, ... }:
 
-pkgs.git.overrideAttrs(old: {
-  patches = (old.patches or []) ++ [
+pkgs.git.overrideAttrs (old: {
+  patches = (old.patches or [ ]) ++ [
     ./0001-feat-third_party-git-date-add-dottime-format.patch
   ];
 })
diff --git a/third_party/gitignoreSource/default.nix b/third_party/gitignoreSource/default.nix
index 8bdd974e8dbe..150de7c990e4 100644
--- a/third_party/gitignoreSource/default.nix
+++ b/third_party/gitignoreSource/default.nix
@@ -1,14 +1,17 @@
 { pkgs, ... }:
 
 let
-  gitignoreNix = import (pkgs.fetchFromGitHub {
-    owner = "hercules-ci";
-    repo = "gitignore";
-    rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
-    sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
-  }) { inherit (pkgs) lib; };
+  gitignoreNix = import
+    (pkgs.fetchFromGitHub {
+      owner = "hercules-ci";
+      repo = "gitignore";
+      rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
+      sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
+    })
+    { inherit (pkgs) lib; };
 
-in {
+in
+{
   __functor = _: gitignoreNix.gitignoreSource;
 
   # expose extra functions here
diff --git a/third_party/gopkgs/github.com/charmbracelet/bubbletea/default.nix b/third_party/gopkgs/github.com/charmbracelet/bubbletea/default.nix
index 75eb5402c17c..8dc25bd918e0 100644
--- a/third_party/gopkgs/github.com/charmbracelet/bubbletea/default.nix
+++ b/third_party/gopkgs/github.com/charmbracelet/bubbletea/default.nix
@@ -3,15 +3,17 @@
 depot.nix.buildGo.external {
   path = "github.com/charmbracelet/bubbletea";
   src =
-    let gitSrc = pkgs.fetchFromGitHub {
+    let
+      gitSrc = pkgs.fetchFromGitHub {
         owner = "charmbracelet";
         repo = "bubbletea";
         rev = "v0.13.1";
         sha256 = "0yf2fjkvx8ym9n6f3qp2z7sxs0qsfpj148sfvbrp38k67s3h20cs";
       };
-    # The examples/ directory is fairly extensive,
-    # but it also adds most of the dependencies.
-    in pkgs.runCommand gitSrc.name {} ''
+      # The examples/ directory is fairly extensive,
+      # but it also adds most of the dependencies.
+    in
+    pkgs.runCommand gitSrc.name { } ''
       mkdir -p $out
       ln -s "${gitSrc}"/* $out
       rm -r $out/examples
diff --git a/third_party/grpc/default.nix b/third_party/grpc/default.nix
index 2ef67d402858..bef5dd9d112f 100644
--- a/third_party/grpc/default.nix
+++ b/third_party/grpc/default.nix
@@ -5,7 +5,7 @@
   stdenv = pkgs.fullLlvm11Stdenv;
   abseil-cpp = depot.third_party.abseil_cpp;
   re2 = depot.third_party.re2;
-}).overrideAttrs(orig: rec {
+}).overrideAttrs (orig: rec {
   cmakeFlags = orig.cmakeFlags ++ [
     "-DCMAKE_CXX_STANDARD_REQUIRED=ON"
     "-DCMAKE_CXX_STANDARD=17"
diff --git a/third_party/gtest/default.nix b/third_party/gtest/default.nix
index 70516853ed5c..d3540a48311a 100644
--- a/third_party/gtest/default.nix
+++ b/third_party/gtest/default.nix
@@ -2,7 +2,7 @@
 
 (pkgs.gtest.override {
   stdenv = pkgs.fullLlvm11Stdenv;
-}).overrideAttrs(_: {
+}).overrideAttrs (_: {
   src = pkgs.fetchFromGitHub {
     owner = "google";
     repo = "googletest";
diff --git a/third_party/josh/default.nix b/third_party/josh/default.nix
index 3a4e494e5f74..14e07617bea0 100644
--- a/third_party/josh/default.nix
+++ b/third_party/josh/default.nix
@@ -8,7 +8,8 @@ let
     rev = "69dc986e506ba5631c8bbf52835da076a18ec8dc";
     hash = "sha256:0ybc6ivjkm7bchaszs9lhbl1gbjnyhwq7a3vw6jml3ama84l52lb";
   };
-in depot.third_party.naersk.buildPackage {
+in
+depot.third_party.naersk.buildPackage {
   inherit src;
 
   buildInputs = with pkgs; [
@@ -18,8 +19,11 @@ in depot.third_party.naersk.buildPackage {
   ];
 
   cargoBuildOptions = x: x ++ [
-    "-p" "josh"
-    "-p" "josh-proxy"
-    "-p" "josh-ui"
+    "-p"
+    "josh"
+    "-p"
+    "josh-proxy"
+    "-p"
+    "josh-ui"
   ];
 }
diff --git a/third_party/lisp/bordeaux-threads.nix b/third_party/lisp/bordeaux-threads.nix
index 17ee6e539ef8..8a2e09950887 100644
--- a/third_party/lisp/bordeaux-threads.nix
+++ b/third_party/lisp/bordeaux-threads.nix
@@ -5,7 +5,8 @@
 let
   src = with pkgs; srcOnly lispPackages.bordeaux-threads;
   getSrc = f: "${src}/src/${f}";
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "bordeaux-threads";
   deps = [ depot.third_party.lisp.alexandria ];
 
diff --git a/third_party/lisp/cl-fad.nix b/third_party/lisp/cl-fad.nix
index ec1170bf1661..9350abe2e3a3 100644
--- a/third_party/lisp/cl-fad.nix
+++ b/third_party/lisp/cl-fad.nix
@@ -1,5 +1,5 @@
 # Portable pathname library
-{ depot, pkgs, ...}:
+{ depot, pkgs, ... }:
 
 with depot.nix;
 
@@ -18,7 +18,7 @@ in buildLisp.library {
   srcs = map (f: src + ("/" + f)) [
     "packages.lisp"
   ] ++ [
-    { ccl =  "${src}/openmcl.lisp"; }
+    { ccl = "${src}/openmcl.lisp"; }
   ] ++ map (f: src + ("/" + f)) [
     "fad.lisp"
     "path.lisp"
diff --git a/third_party/lisp/cl-json.nix b/third_party/lisp/cl-json.nix
index 5d1450a3e9a1..0230f274afc0 100644
--- a/third_party/lisp/cl-json.nix
+++ b/third_party/lisp/cl-json.nix
@@ -10,19 +10,20 @@ let
     rev = "6dfebb9540bfc3cc33582d0c03c9ec27cb913e79";
     sha256 = "0fx3m3x3s5ji950yzpazz4s0img3l6b3d6l3jrfjv0lr702496lh";
   };
-in buildLisp.library {
+in
+buildLisp.library {
   name = "cl-json";
   deps = [ (buildLisp.bundled "asdf") ];
 
   srcs = [ "${src}/cl-json.asd" ] ++
-  (map (f: src + ("/src/" + f)) [
-    "package.lisp"
-    "common.lisp"
-    "objects.lisp"
-    "camel-case.lisp"
-    "decoder.lisp"
-    "encoder.lisp"
-    "utils.lisp"
-    "json-rpc.lisp"
-  ]);
+    (map (f: src + ("/src/" + f)) [
+      "package.lisp"
+      "common.lisp"
+      "objects.lisp"
+      "camel-case.lisp"
+      "decoder.lisp"
+      "encoder.lisp"
+      "utils.lisp"
+      "json-rpc.lisp"
+    ]);
 }
diff --git a/third_party/lisp/cl-plus-ssl.nix b/third_party/lisp/cl-plus-ssl.nix
index bec5d5b3a205..dc0a95944fe7 100644
--- a/third_party/lisp/cl-plus-ssl.nix
+++ b/third_party/lisp/cl-plus-ssl.nix
@@ -3,12 +3,14 @@
 
 with depot.nix;
 
-let src = pkgs.fetchgit {
-  url = "https://github.com/cl-plus-ssl/cl-plus-ssl.git";
-  rev = "29081992f6d7b4e3aa2c5eeece4cd92b745071f4";
-  hash = "sha256:16lyrixl98b7vy29dbbzkbq0xaz789350dajrr1gdny5i55rkjq0";
-};
-in buildLisp.library {
+let
+  src = pkgs.fetchgit {
+    url = "https://github.com/cl-plus-ssl/cl-plus-ssl.git";
+    rev = "29081992f6d7b4e3aa2c5eeece4cd92b745071f4";
+    hash = "sha256:16lyrixl98b7vy29dbbzkbq0xaz789350dajrr1gdny5i55rkjq0";
+  };
+in
+buildLisp.library {
   name = "cl-plus-ssl";
   deps = with depot.third_party.lisp; [
     alexandria
diff --git a/third_party/lisp/cl-unicode.nix b/third_party/lisp/cl-unicode.nix
index 5fff1fbe6bb2..815d99c2dc8e 100644
--- a/third_party/lisp/cl-unicode.nix
+++ b/third_party/lisp/cl-unicode.nix
@@ -40,7 +40,7 @@ let
       "char-info.lisp"
       "read.lisp"
     ]) ++ [
-      (runCommand "dump.lisp" {} ''
+      (runCommand "dump.lisp" { } ''
         substitute ${src}/build/dump.lisp $out \
           --replace ':defaults *this-file*' ":defaults (uiop:getcwd)"
       '')
@@ -55,7 +55,7 @@ let
   };
 
 
-  generated = runCommand "cl-unicode-generated" {} ''
+  generated = runCommand "cl-unicode-generated" { } ''
     mkdir -p $out/build
     mkdir -p $out/test
     cd $out/build
@@ -66,7 +66,7 @@ let
 in
 depot.nix.buildLisp.library {
   name = "cl-unicode";
-  deps = [cl-unicode-base];
+  deps = [ cl-unicode-base ];
   srcs = [
     "${src}/conditions.lisp"
     "${generated}/lists.lisp"
diff --git a/third_party/lisp/cl-yacc.nix b/third_party/lisp/cl-yacc.nix
index d2ceb81103e2..b40d5d0601b2 100644
--- a/third_party/lisp/cl-yacc.nix
+++ b/third_party/lisp/cl-yacc.nix
@@ -1,12 +1,14 @@
 { depot, pkgs, ... }:
 
-let src = pkgs.fetchFromGitHub {
+let
+  src = pkgs.fetchFromGitHub {
     owner = "jech";
     repo = "cl-yacc";
     rev = "1334f5469251ffb3f8738a682dc8ee646cb26635";
     sha256 = "16946pzf8vvadnyfayvj8rbh4zjzw90h0azz2qk1mxrvhh5wklib";
   };
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "cl-yacc";
 
   srcs = map (f: src + ("/" + f)) [
diff --git a/third_party/lisp/closure-common.nix b/third_party/lisp/closure-common.nix
index 0856fc9e52d8..7f7f79f8551b 100644
--- a/third_party/lisp/closure-common.nix
+++ b/third_party/lisp/closure-common.nix
@@ -3,7 +3,8 @@
 let
   src = with pkgs; srcOnly lispPackages.closure-common;
   getSrcs = builtins.map (p: "${src}/${p}");
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "closure-common";
 
   # closure-common.asd surpresses some warnings otherwise breaking
@@ -18,12 +19,12 @@ in depot.nix.buildLisp.library {
     "closure-common.asd"
     "package.lisp"
     "definline.lisp"
-    "characters.lisp"     #+rune-is-character
+    "characters.lisp" #+rune-is-character
     "syntax.lisp"
-    "encodings.lisp"      #-x&y-streams-are-stream
+    "encodings.lisp" #-x&y-streams-are-stream
     "encodings-data.lisp" #-x&y-streams-are-stream
-    "xstream.lisp"        #-x&y-streams-are-stream
-    "ystream.lisp"        #-x&y-streams-are-stream
+    "xstream.lisp" #-x&y-streams-are-stream
+    "ystream.lisp" #-x&y-streams-are-stream
     "hax.lisp"
   ];
 
diff --git a/third_party/lisp/easy-routes.nix b/third_party/lisp/easy-routes.nix
index 93aed8a66765..5caf8261fa1c 100644
--- a/third_party/lisp/easy-routes.nix
+++ b/third_party/lisp/easy-routes.nix
@@ -9,7 +9,8 @@ let
     sha256 = "06lnipwc6mmg0v5gybcnr7wn5xmn5xfd1gs19vbima777245bfka";
   };
 
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "easy-routes";
   deps = with depot.third_party.lisp; [
     hunchentoot
diff --git a/third_party/lisp/flexi-streams.nix b/third_party/lisp/flexi-streams.nix
index 4b8880902458..a6a06d4ad057 100644
--- a/third_party/lisp/flexi-streams.nix
+++ b/third_party/lisp/flexi-streams.nix
@@ -28,6 +28,6 @@ in depot.nix.buildLisp.library {
     "input.lisp"
     "io.lisp"
     "strings.lisp"
- ];
+  ];
 }
 
diff --git a/third_party/lisp/global-vars.nix b/third_party/lisp/global-vars.nix
index 0f6630f721de..a3d27a09b6a0 100644
--- a/third_party/lisp/global-vars.nix
+++ b/third_party/lisp/global-vars.nix
@@ -3,5 +3,5 @@
 let src = with pkgs; srcOnly lispPackages.global-vars;
 in depot.nix.buildLisp.library {
   name = "global-vars";
-  srcs = [ "${src}/global-vars.lisp" ] ;
+  srcs = [ "${src}/global-vars.lisp" ];
 }
diff --git a/third_party/lisp/hunchentoot.nix b/third_party/lisp/hunchentoot.nix
index 5b953d94b29e..e2480cd349f1 100644
--- a/third_party/lisp/hunchentoot.nix
+++ b/third_party/lisp/hunchentoot.nix
@@ -1,5 +1,5 @@
 # Hunchentoot is a web framework for Common Lisp.
-{ depot, pkgs, ...}:
+{ depot, pkgs, ... }:
 
 let
   src = with pkgs; srcOnly lispPackages.hunchentoot;
@@ -15,7 +15,8 @@ let
       "url-rewrite.lisp"
     ];
   };
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "hunchentoot";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/ironclad.nix b/third_party/lisp/ironclad.nix
index 3436776b7da6..324c5da265d8 100644
--- a/third_party/lisp/ironclad.nix
+++ b/third_party/lisp/ironclad.nix
@@ -1,4 +1,4 @@
-{ depot, pkgs, ...}:
+{ depot, pkgs, ... }:
 
 let
   inherit (pkgs) runCommand;
@@ -6,7 +6,8 @@ let
   src = with pkgs; srcOnly lispPackages.ironclad;
   getSrc = f: "${src}/src/${f}";
 
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "ironclad";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/lass.nix b/third_party/lisp/lass.nix
index 457e25c7e532..00f66c1fe314 100644
--- a/third_party/lisp/lass.nix
+++ b/third_party/lisp/lass.nix
@@ -8,7 +8,8 @@ let
     sha256 = "11mxzyx34ynsfsrs8pgrarqi9s442vkpmh7kdpzvarhj7i97g8yx";
   };
 
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "lass";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/lisp-binary.nix b/third_party/lisp/lisp-binary.nix
index 3e7a43b8ac67..8deba4546fe6 100644
--- a/third_party/lisp/lisp-binary.nix
+++ b/third_party/lisp/lisp-binary.nix
@@ -1,13 +1,15 @@
 # A library to easily read and write complex binary formats.
 { depot, pkgs, ... }:
 
-let src = pkgs.fetchFromGitHub {
-  owner = "j3pic";
-  repo = "lisp-binary";
-  rev = "052df578900dea59bf951e0a6749281fa73432e4";
-  sha256 = "1i1s5g01aimfq6lndcl1pnw7ly5hdh0wmjp2dj9cjjwbkz9lnwcf";
-};
-in depot.nix.buildLisp.library {
+let
+  src = pkgs.fetchFromGitHub {
+    owner = "j3pic";
+    repo = "lisp-binary";
+    rev = "052df578900dea59bf951e0a6749281fa73432e4";
+    sha256 = "1i1s5g01aimfq6lndcl1pnw7ly5hdh0wmjp2dj9cjjwbkz9lnwcf";
+  };
+in
+depot.nix.buildLisp.library {
   name = "lisp-binary";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/local-time.nix b/third_party/lisp/local-time.nix
index 8e96c5e51714..1358408d387c 100644
--- a/third_party/lisp/local-time.nix
+++ b/third_party/lisp/local-time.nix
@@ -4,7 +4,8 @@
 let
   inherit (depot.nix) buildLisp;
   src = with pkgs; srcOnly lispPackages.local-time;
-in buildLisp.library {
+in
+buildLisp.library {
   name = "local-time";
   deps = [
     depot.third_party.lisp.cl-fad
diff --git a/third_party/lisp/nibbles.nix b/third_party/lisp/nibbles.nix
index 3c0a75e46dc7..b71f439c939a 100644
--- a/third_party/lisp/nibbles.nix
+++ b/third_party/lisp/nibbles.nix
@@ -3,7 +3,8 @@
 let
   inherit (depot.nix.buildLisp) bundled;
   src = with pkgs; srcOnly lispPackages.nibbles;
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "nibbles";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/postmodern.nix b/third_party/lisp/postmodern.nix
index b2ea318f8a40..25e0625c20bc 100644
--- a/third_party/lisp/postmodern.nix
+++ b/third_party/lisp/postmodern.nix
@@ -88,6 +88,7 @@ let
     ];
   };
 
-in postmodern // {
+in
+postmodern // {
   inherit s-sql cl-postgres;
 }
diff --git a/third_party/lisp/routes.nix b/third_party/lisp/routes.nix
index a76912c651a9..fc7d4e306713 100644
--- a/third_party/lisp/routes.nix
+++ b/third_party/lisp/routes.nix
@@ -20,7 +20,8 @@ let
     ];
   };
 
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "routes";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/s-xml/default.nix b/third_party/lisp/s-xml/default.nix
index 3cd13ffb6b67..486e1c1ac8d3 100644
--- a/third_party/lisp/s-xml/default.nix
+++ b/third_party/lisp/s-xml/default.nix
@@ -1,15 +1,17 @@
 # XML serialiser for Common Lisp.
 { depot, pkgs, ... }:
 
-let src = pkgs.applyPatches {
-  name = "s-xml-source";
-  src = pkgs.lispPackages.s-xml.src;
+let
+  src = pkgs.applyPatches {
+    name = "s-xml-source";
+    src = pkgs.lispPackages.s-xml.src;
 
-  patches = [
-    ./0001-fix-definition-order-in-xml.lisp.patch
-  ];
-};
-in depot.nix.buildLisp.library {
+    patches = [
+      ./0001-fix-definition-order-in-xml.lisp.patch
+    ];
+  };
+in
+depot.nix.buildLisp.library {
   name = "s-xml";
 
   srcs = map (f: src + ("/src/" + f)) [
diff --git a/third_party/lisp/trivial-ldap.nix b/third_party/lisp/trivial-ldap.nix
index c8a27431c687..c85fe2accbb9 100644
--- a/third_party/lisp/trivial-ldap.nix
+++ b/third_party/lisp/trivial-ldap.nix
@@ -1,12 +1,14 @@
 { depot, pkgs, ... }:
 
-let src = pkgs.fetchFromGitHub {
+let
+  src = pkgs.fetchFromGitHub {
     owner = "rwiker";
     repo = "trivial-ldap";
     rev = "3b8f1ff85f29ea63e6ab2d0d27029d68b046faf8";
     sha256 = "1zaa4wnk5y5ff211pkg6dl27j4pjwh56hq0246slxsdxv6kvp1z9";
   };
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "trivial-ldap";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/trivial-mimes.nix b/third_party/lisp/trivial-mimes.nix
index 04e8b5ef5d10..b097a3d0ee67 100644
--- a/third_party/lisp/trivial-mimes.nix
+++ b/third_party/lisp/trivial-mimes.nix
@@ -3,7 +3,7 @@
 let
   src = with pkgs; srcOnly lispPackages.trivial-mimes;
 
-  mime-types = pkgs.runCommand "mime-types.lisp" {} ''
+  mime-types = pkgs.runCommand "mime-types.lisp" { } ''
     substitute ${src}/mime-types.lisp $out \
       --replace /etc/mime.types ${src}/mime.types \
       --replace "(asdf:system-source-directory :trivial-mimes)" '"/bogus-dir"'
@@ -11,7 +11,8 @@ let
       # generally fail — we are not using ASDF after all.
   '';
 
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "trivial-mimes";
 
   deps = [
diff --git a/third_party/lisp/uax-15.nix b/third_party/lisp/uax-15.nix
index 1e44f88d5cb7..f98c029d3688 100644
--- a/third_party/lisp/uax-15.nix
+++ b/third_party/lisp/uax-15.nix
@@ -4,7 +4,8 @@ let
   inherit (pkgs) runCommand;
   inherit (depot.nix.buildLisp) bundled;
   src = with pkgs; srcOnly lispPackages.uax-15;
-in depot.nix.buildLisp.library {
+in
+depot.nix.buildLisp.library {
   name = "uax-15";
 
   deps = with depot.third_party.lisp; [
@@ -23,7 +24,7 @@ in depot.nix.buildLisp.library {
     #
     # additionally there are some wonky variable usages of variables
     # that are never defined, for which we patch in defvar statements.
-    (runCommand "precomputed-tables.lisp" {} ''
+    (runCommand "precomputed-tables.lisp" { } ''
       substitute ${src}/src/precomputed-tables.lisp precomputed-tables.lisp \
         --replace "(asdf:system-source-directory (asdf:find-system 'uax-15 nil))" \
                   '"${src}/"'
diff --git a/third_party/lisp/unix-opts.nix b/third_party/lisp/unix-opts.nix
index e52eab959d3e..248296113263 100644
--- a/third_party/lisp/unix-opts.nix
+++ b/third_party/lisp/unix-opts.nix
@@ -1,5 +1,5 @@
 # unix-opts is a portable command line argument parser
-{ depot, pkgs, ...}:
+{ depot, pkgs, ... }:
 
 
 let src = with pkgs; srcOnly lispPackages.unix-opts;
diff --git a/third_party/lisp/usocket-server.nix b/third_party/lisp/usocket-server.nix
index f2f11d7a17c8..5d6d04535f0c 100644
--- a/third_party/lisp/usocket-server.nix
+++ b/third_party/lisp/usocket-server.nix
@@ -4,7 +4,8 @@
 let
   inherit (depot.nix) buildLisp;
   src = with pkgs; srcOnly lispPackages.usocket-server;
-in buildLisp.library {
+in
+buildLisp.library {
   name = "usocket-server";
 
   deps = with depot.third_party.lisp; [
diff --git a/third_party/lisp/usocket.nix b/third_party/lisp/usocket.nix
index 335954978494..589a3a0cfc92 100644
--- a/third_party/lisp/usocket.nix
+++ b/third_party/lisp/usocket.nix
@@ -4,7 +4,8 @@
 let
   inherit (depot.nix) buildLisp;
   src = with pkgs; srcOnly lispPackages.usocket;
-in buildLisp.library {
+in
+buildLisp.library {
   name = "usocket";
   deps = with depot.third_party.lisp; [
     (buildLisp.bundled "asdf")
diff --git a/third_party/naersk/default.nix b/third_party/naersk/default.nix
index 865fcf04d23f..855e2bd01ba2 100644
--- a/third_party/naersk/default.nix
+++ b/third_party/naersk/default.nix
@@ -1,8 +1,10 @@
 { pkgs, ... }:
 
-pkgs.callPackage (pkgs.fetchFromGitHub {
-  owner = "nmattia";
-  repo = "naersk";
-  rev = "a3f40fe42cc6d267ff7518fa3199e99ff1444ac4";
-  sha256 = "1nf7fn8anghwf6p5p58ywbcwdkjxq112qv663rn52jq9k95iakdi";
-}) {}
+pkgs.callPackage
+  (pkgs.fetchFromGitHub {
+    owner = "nmattia";
+    repo = "naersk";
+    rev = "a3f40fe42cc6d267ff7518fa3199e99ff1444ac4";
+    sha256 = "1nf7fn8anghwf6p5p58ywbcwdkjxq112qv663rn52jq9k95iakdi";
+  })
+{ }
diff --git a/third_party/nix/corepkgs/buildenv.nix b/third_party/nix/corepkgs/buildenv.nix
index 0bac4c44b48a..4da0db2ae2ae 100644
--- a/third_party/nix/corepkgs/buildenv.nix
+++ b/third_party/nix/corepkgs/buildenv.nix
@@ -9,11 +9,13 @@ derivation {
 
   # !!! grmbl, need structured data for passing this in a clean way.
   derivations =
-    map (d:
-      [ (d.meta.active or "true")
-        (d.meta.priority or 5)
-        (builtins.length d.outputs)
-      ] ++ map (output: builtins.getAttr output d) d.outputs)
+    map
+      (d:
+        [
+          (d.meta.active or "true")
+          (d.meta.priority or 5)
+          (builtins.length d.outputs)
+        ] ++ map (output: builtins.getAttr output d) d.outputs)
       derivations;
 
   # Building user environments remotely just causes huge amounts of
diff --git a/third_party/nix/corepkgs/derivation.nix b/third_party/nix/corepkgs/derivation.nix
index c0fbe8082cd3..1f95cf88ec44 100644
--- a/third_party/nix/corepkgs/derivation.nix
+++ b/third_party/nix/corepkgs/derivation.nix
@@ -8,12 +8,14 @@ let
   strict = derivationStrict drvAttrs;
 
   commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
-    { all = map (x: x.value) outputsList;
+    {
+      all = map (x: x.value) outputsList;
       inherit drvAttrs;
     };
 
   outputToAttrListElement = outputName:
-    { name = outputName;
+    {
+      name = outputName;
       value = commonAttrs // {
         outPath = builtins.getAttr outputName strict;
         drvPath = strict.drvPath;
@@ -24,4 +26,5 @@ let
 
   outputsList = map outputToAttrListElement outputs;
 
-in (builtins.head outputsList).value
+in
+(builtins.head outputsList).value
diff --git a/third_party/nix/corepkgs/fetchurl.nix b/third_party/nix/corepkgs/fetchurl.nix
index a84777f57448..9933b7cc120c 100644
--- a/third_party/nix/corepkgs/fetchurl.nix
+++ b/third_party/nix/corepkgs/fetchurl.nix
@@ -2,12 +2,13 @@
 , url
 , hash ? "" # an SRI ash
 
-# Legacy hash specification
-, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? ""
-, outputHash ?
-    if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256
-, outputHashAlgo ?
-    if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256"
+  # Legacy hash specification
+, md5 ? ""
+, sha1 ? ""
+, sha256 ? ""
+, sha512 ? ""
+, outputHash ? if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256
+, outputHashAlgo ? if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256"
 
 , executable ? false
 , unpack ? false
@@ -33,7 +34,11 @@ derivation {
     # easy proxy configuration.  This is impure, but a fixed-output
     # derivation like fetchurl is allowed to do so since its result is
     # by definition pure.
-    "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
+    "http_proxy"
+    "https_proxy"
+    "ftp_proxy"
+    "all_proxy"
+    "no_proxy"
   ];
 
   # To make "nix-prefetch-url" work.
diff --git a/third_party/nix/corepkgs/imported-drv-to-derivation.nix b/third_party/nix/corepkgs/imported-drv-to-derivation.nix
index eab8b050e8ff..639f068332f2 100644
--- a/third_party/nix/corepkgs/imported-drv-to-derivation.nix
+++ b/third_party/nix/corepkgs/imported-drv-to-derivation.nix
@@ -3,19 +3,22 @@ attrs @ { drvPath, outputs, name, ... }:
 let
 
   commonAttrs = (builtins.listToAttrs outputsList) //
-    { all = map (x: x.value) outputsList;
+    {
+      all = map (x: x.value) outputsList;
       inherit drvPath name;
       type = "derivation";
     };
 
   outputToAttrListElement = outputName:
-    { name = outputName;
+    {
+      name = outputName;
       value = commonAttrs // {
         outPath = builtins.getAttr outputName attrs;
         inherit outputName;
       };
     };
-    
+
   outputsList = map outputToAttrListElement outputs;
-    
-in (builtins.head outputsList).value
+
+in
+(builtins.head outputsList).value
diff --git a/third_party/nix/default.nix b/third_party/nix/default.nix
index e01fad12f034..bd448156f092 100644
--- a/third_party/nix/default.nix
+++ b/third_party/nix/default.nix
@@ -1,5 +1,4 @@
-args@{
-  depot ? (import ../.. {})
+args@{ depot ? (import ../.. { })
 , pkgs ? depot.third_party.nixpkgs
 , lib
 , buildType ? "release"
@@ -8,39 +7,43 @@ args@{
 
 let
   aws-s3-cpp = pkgs.aws-sdk-cpp.override {
-    apis = ["s3" "transfer"];
+    apis = [ "s3" "transfer" ];
     customMemoryManagement = false;
   };
 
-  src = let
-    srcDir = ./.;
-    # create relative paths for all the sources we are filtering
-    asRelative = path:
-      let
-        srcS = toString srcDir;
-        pathS = toString path;
-      in
+  src =
+    let
+      srcDir = ./.;
+      # create relative paths for all the sources we are filtering
+      asRelative = path:
+        let
+          srcS = toString srcDir;
+          pathS = toString path;
+        in
         if ! lib.hasPrefix srcS pathS then
           throw "Path is outside of the working directory."
         else
-        lib.removePrefix srcS pathS;
+          lib.removePrefix srcS pathS;
 
-  in builtins.filterSource (path: type:
-    # Strip out .nix files that are in the root of the repository.  Changing
-    # the expression of tvix shouldn't cause a rebuild of tvix unless really
-    # required.
-    !(dirOf (asRelative path) == "/" && lib.hasSuffix ".nix" path) &&
+    in
+    builtins.filterSource
+      (path: type:
+        # Strip out .nix files that are in the root of the repository.  Changing
+        # the expression of tvix shouldn't cause a rebuild of tvix unless really
+        # required.
+        !(dirOf (asRelative path) == "/" && lib.hasSuffix ".nix" path) &&
 
-    # remove the proto files from the repo as those are compiled separately
-    !(lib.hasPrefix "src/proto" (asRelative path)) &&
+        # remove the proto files from the repo as those are compiled separately
+        !(lib.hasPrefix "src/proto" (asRelative path)) &&
 
-    # ignore result symlinks
-    !(type == "symlink" && lib.hasPrefix "result" (baseNameOf path))
-  ) srcDir;
+        # ignore result symlinks
+        !(type == "symlink" && lib.hasPrefix "result" (baseNameOf path))
+      )
+      srcDir;
 
   # Proto generation in CMake is theoretically possible, but that is
   # very theoretical - this does it in Nix instead.
-  protoSrcs = pkgs.runCommand "nix-proto-srcs" {} ''
+  protoSrcs = pkgs.runCommand "nix-proto-srcs" { } ''
     export PROTO_SRCS=${./src/proto}
     mkdir -p $out/libproto
     ${depot.third_party.protobuf}/bin/protoc -I=$PROTO_SRCS \
@@ -52,12 +55,13 @@ let
 
   # Derivation for busybox that just has the `busybox` binary in bin/, not all
   # the symlinks, so cmake can find it
-  busybox = pkgs.runCommand "busybox" {} ''
+  busybox = pkgs.runCommand "busybox" { } ''
     mkdir -p $out/bin
     cp ${pkgs.busybox}/bin/busybox $out/bin
   '';
 
-in lib.fix (self: pkgs.fullLlvm11Stdenv.mkDerivation {
+in
+lib.fix (self: pkgs.fullLlvm11Stdenv.mkDerivation {
   pname = "tvix";
   version = "2.3.4";
   inherit src;
@@ -141,7 +145,7 @@ in lib.fix (self: pkgs.fullLlvm11Stdenv.mkDerivation {
   # Work around broken system header include flags in the cxx toolchain.
   LIBCXX_INCLUDE = "${pkgs.llvmPackages_11.libcxx}/include/c++/v1";
 
-  SANDBOX_SHELL="${pkgs.busybox}/bin/busybox";
+  SANDBOX_SHELL = "${pkgs.busybox}/bin/busybox";
 
   # Install the various symlinks to the Nix binary which users expect
   # to exist.
@@ -190,7 +194,7 @@ in lib.fix (self: pkgs.fullLlvm11Stdenv.mkDerivation {
         ${pkgs.jq}/bin/jq < compile_commands.json -r 'map(.file)|.[]' | grep -v '/generated/' | ${pkgs.parallel}/bin/parallel ${pkgs.clang-tools}/bin/clang-tidy -p compile_commands.json $@
       '';
 
-      installCheckInputs = up.installCheckInputs ++ [run_clang_tidy];
+      installCheckInputs = up.installCheckInputs ++ [ run_clang_tidy ];
 
       shellHook = ''
         export NIX_DATA_DIR="${toString depot.path}/third_party"
diff --git a/third_party/nix/test-vm.nix b/third_party/nix/test-vm.nix
index e5f8690fcb2d..8b00e5515ba2 100644
--- a/third_party/nix/test-vm.nix
+++ b/third_party/nix/test-vm.nix
@@ -16,4 +16,5 @@ let
   };
 
   system = depot.third_party.nixos { inherit configuration; };
-in system.vm
+in
+system.vm
diff --git a/third_party/nixery/default.nix b/third_party/nixery/default.nix
index be3a9dfc1b7d..7a798ca0b074 100644
--- a/third_party/nixery/default.nix
+++ b/third_party/nixery/default.nix
@@ -12,7 +12,8 @@ let
     rev = commit;
     sha256 = "195rz25y3hfxcmniysajzjg7g69qhz7w06lql8fn0dbcdcxsq6g4";
   };
-in drvTargets (import src {
+in
+drvTargets (import src {
   inherit pkgs;
   commitHash = _: commit;
 })
diff --git a/third_party/nixpkgs/default.nix b/third_party/nixpkgs/default.nix
index 5afed93e4c60..c5fa6a6781b0 100644
--- a/third_party/nixpkgs/default.nix
+++ b/third_party/nixpkgs/default.nix
@@ -6,7 +6,7 @@
 # in //default.nix passes this attribute as the `pkgs` argument to all
 # readTree derivations.
 
-{ depot ? {}, externalArgs ? {}, depotOverlays ? true, ... }:
+{ depot ? { }, externalArgs ? { }, depotOverlays ? true, ... }:
 
 let
   # This provides the sources of nixpkgs. We track both
@@ -42,7 +42,7 @@ let
 
   # Stable package set is imported, but not exposed, to overlay
   # required packages into the unstable set.
-  stableNixpkgs = import stableNixpkgsSrc {};
+  stableNixpkgs = import stableNixpkgsSrc { };
 
   # Overlay for packages that should come from the stable channel
   # instead (e.g. because something is broken in unstable).
@@ -58,7 +58,8 @@ let
     };
   };
 
-in import nixpkgsSrc {
+in
+import nixpkgsSrc {
   # allow users to inject their config into builds (e.g. to test CA derivations)
   config =
     (if externalArgs ? nixpkgsConfig then externalArgs.nixpkgsConfig else { })
@@ -75,5 +76,5 @@ in import nixpkgsSrc {
     depot.third_party.overlays.emacs
     depot.third_party.overlays.tvl
     depot.third_party.overlays.ecl-static
-  ] else []);
+  ] else [ ]);
 }
diff --git a/third_party/overlays/emacs.nix b/third_party/overlays/emacs.nix
index 895c45a9bcee..1dba4a739303 100644
--- a/third_party/overlays/emacs.nix
+++ b/third_party/overlays/emacs.nix
@@ -8,4 +8,5 @@ let
     url = "https://github.com/nix-community/emacs-overlay/archive/${commit}.tar.gz";
     sha256 = "1b7rmshf1wc9wcml7jlzggdzilj644brk5m49fry6lv53vqmykjq";
   };
-in import src
+in
+import src
diff --git a/third_party/overlays/haskell/default.nix b/third_party/overlays/haskell/default.nix
index 6e1ec2d0d1e5..8ea57f209662 100644
--- a/third_party/overlays/haskell/default.nix
+++ b/third_party/overlays/haskell/default.nix
@@ -11,7 +11,8 @@ let
     generic-arbitrary = appendPatch hsSuper.generic-arbitrary
       [ ./patches/generic-arbitrary-export-garbitrary.patch ];
   };
-in {
+in
+{
   haskellPackages = super.haskellPackages.override {
     inherit overrides;
   };
diff --git a/third_party/overlays/tvl.nix b/third_party/overlays/tvl.nix
index e6e97b834785..1b2f2b9792f6 100644
--- a/third_party/overlays/tvl.nix
+++ b/third_party/overlays/tvl.nix
@@ -28,14 +28,14 @@ self: super: {
       notmuch = super.notmuch.emacs;
 
       # Build EXWM with the depot sources instead.
-      exwm = esuper.exwm.overrideAttrs(_: {
+      exwm = esuper.exwm.overrideAttrs (_: {
         src = depot.path.origSrc + "/third_party/exwm";
       });
     })
   );
 
   # dottime support for notmuch
-  notmuch = super.notmuch.overrideAttrs(old: {
+  notmuch = super.notmuch.overrideAttrs (old: {
     passthru = old.passthru // {
       patches = old.patches ++ [ ./patches/notmuch-dottime.patch ];
     };
@@ -46,13 +46,13 @@ self: super: {
   nix-serve = super.nix-serve.override { nix = super.nix_2_3; };
 
   # Avoid builds of mkShell derivations in CI.
-  mkShell = super.lib.makeOverridable(args: (super.mkShell args) // {
+  mkShell = super.lib.makeOverridable (args: (super.mkShell args) // {
     meta.ci.skip = true;
   });
 
   # bump nixpkgs-fmt to a version that doesn't touch whitespace in
   # strings
-  nixpkgs-fmt = super.nixpkgs-fmt.overrideAttrs(old: rec {
+  nixpkgs-fmt = super.nixpkgs-fmt.overrideAttrs (old: rec {
     src = self.fetchFromGitHub {
       owner = "nix-community";
       repo = "nixpkgs-fmt";
@@ -60,7 +60,7 @@ self: super: {
       sha256 = "0hjkbcgz62793hzfzlaxyah8a2c1k79n1k891lg7kfw8mkbq0w4p";
     };
 
-    cargoDeps = old.cargoDeps.overrideAttrs(_: {
+    cargoDeps = old.cargoDeps.overrideAttrs (_: {
       inherit src;
       outputHash = "10if2lmv8d95j3walq3ggx3y423yfy4yl9vplw3apd0s671bly8b";
     });
diff --git a/third_party/prometheus-fail2ban-exporter/default.nix b/third_party/prometheus-fail2ban-exporter/default.nix
index bed5bd630161..818839e48c72 100644
--- a/third_party/prometheus-fail2ban-exporter/default.nix
+++ b/third_party/prometheus-fail2ban-exporter/default.nix
@@ -10,7 +10,8 @@ let
     p.prometheus_client
   ]);
 
-in pkgs.writeShellScriptBin "prometheus-fail2ban-exporter" ''
+in
+pkgs.writeShellScriptBin "prometheus-fail2ban-exporter" ''
   set -eo pipefail
 
   exec "${python}/bin/python" "${script}"
diff --git a/third_party/python/broadlink/default.nix b/third_party/python/broadlink/default.nix
index e316d83d1d71..b1dcf300811d 100644
--- a/third_party/python/broadlink/default.nix
+++ b/third_party/python/broadlink/default.nix
@@ -7,7 +7,8 @@
 let
   inherit (pkgs) fetchFromGitHub;
   inherit (pkgs.python3Packages) buildPythonPackage cryptography;
-in buildPythonPackage (lib.fix (self: {
+in
+buildPythonPackage (lib.fix (self: {
   pname = "python-broadlink";
   version = "0.13.2";
   src = ./.;
diff --git a/third_party/rust-crates/default.nix b/third_party/rust-crates/default.nix
index b912a9bde3d4..b3246dd0a363 100644
--- a/third_party/rust-crates/default.nix
+++ b/third_party/rust-crates/default.nix
@@ -4,17 +4,17 @@
 # Intended for manual updates, which keeps us honest with what we pull into our closure.
 
 let
-  buildRustCrate = attrs@{
-    edition ? "2018",
-    pname,
-    crateName ? pname,
-    ...
-  }: pkgs.buildRustCrate (attrs // {
-    inherit
-      crateName
-      edition
-      ;
-   });
+  buildRustCrate =
+    attrs@{ edition ? "2018"
+    , pname
+    , crateName ? pname
+    , ...
+    }: pkgs.buildRustCrate (attrs // {
+      inherit
+        crateName
+        edition
+        ;
+    });
 in
 
 rec {
diff --git a/third_party/rustsec-advisory-db/default.nix b/third_party/rustsec-advisory-db/default.nix
index 3d3b387be84f..b30e98fa4aa8 100644
--- a/third_party/rustsec-advisory-db/default.nix
+++ b/third_party/rustsec-advisory-db/default.nix
@@ -20,5 +20,5 @@ pkgs.fetchFromGitHub {
   inherit (pin)
     rev
     sha256
-  ;
+    ;
 }
diff --git a/tools/depot-nixpkgs-update.nix b/tools/depot-nixpkgs-update.nix
index 6557b2f25a4f..c92bc76b62ea 100644
--- a/tools/depot-nixpkgs-update.nix
+++ b/tools/depot-nixpkgs-update.nix
@@ -11,8 +11,8 @@ let
   archiveUrl = "https://github.com/NixOS/nixpkgs/archive/";
 
   bins = getBins pkgs.nix [ "nix-prefetch-url" ]
-    //   getBins pkgs.curl [ "curl" ]
-    ;
+    // getBins pkgs.curl [ "curl" ]
+  ;
 
 in
 
diff --git a/tools/depot-scanner/default.nix b/tools/depot-scanner/default.nix
index d18034cff0fd..59b6e53f7097 100644
--- a/tools/depot-scanner/default.nix
+++ b/tools/depot-scanner/default.nix
@@ -1,16 +1,18 @@
-{ depot, pkgs, ...}:
+{ depot, pkgs, ... }:
 
 let
   localProto = depot.nix.buildGo.grpc {
     name = "code.tvl.fyi/tools/depot-scanner/proto";
     proto = ./depot_scanner.proto;
   };
-in depot.nix.buildGo.program {
-  name = "depot-scanner";
-  srcs = [
-    ./main.go
-  ];
-  deps = [
-    localProto
-  ];
-} // { inherit localProto; }
+in
+depot.nix.buildGo.program
+  {
+    name = "depot-scanner";
+    srcs = [
+      ./main.go
+    ];
+    deps = [
+      localProto
+    ];
+  } // { inherit localProto; }
diff --git a/tools/depotfmt.nix b/tools/depotfmt.nix
index 8c8e99c0891e..0be32091b7fd 100644
--- a/tools/depotfmt.nix
+++ b/tools/depotfmt.nix
@@ -34,7 +34,8 @@ let
       --config-file ${config} \
       --tree-root .
   '';
-in depotfmt.overrideAttrs(_: {
+in
+depotfmt.overrideAttrs (_: {
   passthru.meta.ci.extraSteps.check = {
     label = "depot formatting check";
     command = check;
diff --git a/tools/emacs-pkgs/buildEmacsPackage.nix b/tools/emacs-pkgs/buildEmacsPackage.nix
index 160c0626136d..990b53b763b0 100644
--- a/tools/emacs-pkgs/buildEmacsPackage.nix
+++ b/tools/emacs-pkgs/buildEmacsPackage.nix
@@ -16,19 +16,23 @@
 
 buildArgs:
 
-pkgs.callPackage({ emacsPackages }:
+pkgs.callPackage
+  ({ emacsPackages }:
 
-let
-  # Select external dependencies from the emacsPackages set
-  externalDeps = (buildArgs.externalRequires or (_: [])) emacsPackages;
+  let
+    # Select external dependencies from the emacsPackages set
+    externalDeps = (buildArgs.externalRequires or (_: [ ])) emacsPackages;
 
-  # Override emacsPackages for depot-internal packages
-  internalDeps = map (p: p.override { inherit emacsPackages; })
-                     (buildArgs.internalRequires or []);
+    # Override emacsPackages for depot-internal packages
+    internalDeps = map (p: p.override { inherit emacsPackages; })
+      (buildArgs.internalRequires or [ ]);
 
-  trivialBuildArgs = builtins.removeAttrs buildArgs [
-    "externalRequires" "internalRequires"
-  ] // {
-    packageRequires = externalDeps ++ internalDeps;
-  };
-in emacsPackages.trivialBuild trivialBuildArgs) {}
+    trivialBuildArgs = builtins.removeAttrs buildArgs [
+      "externalRequires"
+      "internalRequires"
+    ] // {
+      packageRequires = externalDeps ++ internalDeps;
+    };
+  in
+  emacsPackages.trivialBuild trivialBuildArgs)
+{ }
diff --git a/tools/emacs-pkgs/notable/default.nix b/tools/emacs-pkgs/notable/default.nix
index 8c6935fe886b..f57b1c66ae3f 100644
--- a/tools/emacs-pkgs/notable/default.nix
+++ b/tools/emacs-pkgs/notable/default.nix
@@ -6,7 +6,9 @@ depot.tools.emacs-pkgs.buildEmacsPackage rec {
   src = ./notable.el;
 
   externalRequires = epkgs: with epkgs; [
-    f ht s
+    f
+    ht
+    s
   ];
 
   internalRequires = [
diff --git a/tools/eprintf.nix b/tools/eprintf.nix
index eeacca4c8c72..933d73ea71ae 100644
--- a/tools/eprintf.nix
+++ b/tools/eprintf.nix
@@ -3,7 +3,13 @@
 let
   bins = depot.nix.getBins pkgs.coreutils [ "printf" ];
 
-# printf(1), but redirect to stderr
-in depot.nix.writeExecline "eprintf" {} [
-  "fdmove" "-c" "1" "2" bins.printf "$@"
+  # printf(1), but redirect to stderr
+in
+depot.nix.writeExecline "eprintf" { } [
+  "fdmove"
+  "-c"
+  "1"
+  "2"
+  bins.printf
+  "$@"
 ]
diff --git a/tools/nsfv-setup/default.nix b/tools/nsfv-setup/default.nix
index 98dcc61b7bc1..1e353e32697b 100644
--- a/tools/nsfv-setup/default.nix
+++ b/tools/nsfv-setup/default.nix
@@ -15,7 +15,8 @@
 let
   inherit (pkgs) ripgrep pulseaudio;
   inherit (depot.third_party) nsfv;
-in pkgs.writeShellScriptBin "nsfv-setup" ''
+in
+pkgs.writeShellScriptBin "nsfv-setup" ''
   export PATH="${ripgrep}/bin:${pulseaudio}/bin:$PATH"
 
   if pacmd list-sinks | rg librnnoise_ladspa.so >/dev/null; then
diff --git a/tools/rust-crates-advisory/default.nix b/tools/rust-crates-advisory/default.nix
index c0cd4dc03e05..71a51bb1af1b 100644
--- a/tools/rust-crates-advisory/default.nix
+++ b/tools/rust-crates-advisory/default.nix
@@ -3,81 +3,137 @@
 let
 
   bins =
-       depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-cat" "s6-echo" "s6-mkdir" "s6-test" "s6-touch" ]
+    depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-cat" "s6-echo" "s6-mkdir" "s6-test" "s6-touch" ]
     // depot.nix.getBins pkgs.lr [ "lr" ]
-    ;
+  ;
 
   crate-advisories = "${depot.third_party.rustsec-advisory-db}/crates";
 
   our-crates = lib.filter (v: v ? outPath)
     (builtins.attrValues depot.third_party.rust-crates);
 
-  check-security-advisory = depot.nix.writers.rustSimple {
-    name = "parse-security-advisory";
-    dependencies = [
-      depot.third_party.rust-crates.toml
-      depot.third_party.rust-crates.semver
-    ];
-  } (builtins.readFile ./check-security-advisory.rs);
+  check-security-advisory = depot.nix.writers.rustSimple
+    {
+      name = "parse-security-advisory";
+      dependencies = [
+        depot.third_party.rust-crates.toml
+        depot.third_party.rust-crates.semver
+      ];
+    }
+    (builtins.readFile ./check-security-advisory.rs);
 
   # $1 is the directory with advisories for crate $2 with version $3
   check-crate-advisory = depot.nix.writeExecline "check-crate-advisory" { readNArgs = 3; } [
-    "pipeline" [ bins.lr "-0" "-t" "depth == 1" "$1" ]
-    "forstdin" "-0" "-Eo" "0" "advisory"
-    "if" [ depot.tools.eprintf "advisory %s\n" "$advisory" ]
-    check-security-advisory "$advisory" "$3"
+    "pipeline"
+    [ bins.lr "-0" "-t" "depth == 1" "$1" ]
+    "forstdin"
+    "-0"
+    "-Eo"
+    "0"
+    "advisory"
+    "if"
+    [ depot.tools.eprintf "advisory %s\n" "$advisory" ]
+    check-security-advisory
+    "$advisory"
+    "$3"
   ];
 
   # Run through everything in the `crate-advisories` repository
   # and check whether we can parse all the advisories without crashing.
-  test-parsing-all-security-advisories = depot.nix.runExecline "check-all-our-crates" {} [
-    "pipeline" [ bins.lr "-0" "-t" "depth == 1" crate-advisories ]
-    "if" [
+  test-parsing-all-security-advisories = depot.nix.runExecline "check-all-our-crates" { } [
+    "pipeline"
+    [ bins.lr "-0" "-t" "depth == 1" crate-advisories ]
+    "if"
+    [
       # this will succeed as long as check-crate-advisory doesn’t `panic!()` (status 101)
-      "forstdin" "-0" "-E" "-x" "101" "crate_advisories"
-      check-crate-advisory "$crate_advisories" "foo" "0.0.0"
+      "forstdin"
+      "-0"
+      "-E"
+      "-x"
+      "101"
+      "crate_advisories"
+      check-crate-advisory
+      "$crate_advisories"
+      "foo"
+      "0.0.0"
     ]
-    "importas" "out" "out"
-    bins.s6-touch "$out"
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
   ];
 
 
-  check-all-our-crates = depot.nix.runExecline "check-all-our-crates" {
-    stdin = lib.concatStrings
-      (map
-        (crate:
-          depot.nix.netstring.fromString
-            ( depot.nix.netstring.fromString crate.crateName
-            + depot.nix.netstring.fromString crate.version ))
-        our-crates);
-  } [
-    "if" [
-      "forstdin" "-o" "0" "-Ed" "" "crateNetstring"
-      "multidefine" "-d" "" "$crateNetstring" [ "crate" "crate_version" ]
-      "if" [ depot.tools.eprintf "checking %s, version %s\n" "$crate" "$crate_version" ]
+  check-all-our-crates = depot.nix.runExecline "check-all-our-crates"
+    {
+      stdin = lib.concatStrings
+        (map
+          (crate:
+            depot.nix.netstring.fromString
+              (depot.nix.netstring.fromString crate.crateName
+                + depot.nix.netstring.fromString crate.version))
+          our-crates);
+    } [
+    "if"
+    [
+      "forstdin"
+      "-o"
+      "0"
+      "-Ed"
+      ""
+      "crateNetstring"
+      "multidefine"
+      "-d"
+      ""
+      "$crateNetstring"
+      [ "crate" "crate_version" ]
+      "if"
+      [ depot.tools.eprintf "checking %s, version %s\n" "$crate" "$crate_version" ]
 
-      "ifthenelse" [ bins.s6-test "-d" "${crate-advisories}/\${crate}" ]
-          [ # also print the full advisory text if it matches
-            "export" "PRINT_ADVISORY" "1"
-            check-crate-advisory "${crate-advisories}/\${crate}" "$crate" "$crate_version"
-          ]
-        [ depot.tools.eprintf "No advisories found for crate %s\n" "$crate" ]
-        "importas" "-ui" "ret" "?"
-        # put a marker in ./failed to read at the end
-        "ifelse" [ bins.s6-test "$ret" "-eq" "1" ]
-          [ bins.s6-touch "./failed" ]
-        "if" [ depot.tools.eprintf "\n" ]
-        "exit" "$ret"
-    ]
-    "ifelse" [ bins.s6-test "-f" "./failed" ]
-      [ "if" [ depot.tools.eprintf "Error: Found active advisories!" ]
-        "exit" "1"
+      "ifthenelse"
+      [ bins.s6-test "-d" "${crate-advisories}/\${crate}" ]
+      [
+        # also print the full advisory text if it matches
+        "export"
+        "PRINT_ADVISORY"
+        "1"
+        check-crate-advisory
+        "${crate-advisories}/\${crate}"
+        "$crate"
+        "$crate_version"
       ]
-    "importas" "out" "out"
-    bins.s6-touch "$out"
+      [ depot.tools.eprintf "No advisories found for crate %s\n" "$crate" ]
+      "importas"
+      "-ui"
+      "ret"
+      "?"
+      # put a marker in ./failed to read at the end
+      "ifelse"
+      [ bins.s6-test "$ret" "-eq" "1" ]
+      [ bins.s6-touch "./failed" ]
+      "if"
+      [ depot.tools.eprintf "\n" ]
+      "exit"
+      "$ret"
+    ]
+    "ifelse"
+    [ bins.s6-test "-f" "./failed" ]
+    [
+      "if"
+      [ depot.tools.eprintf "Error: Found active advisories!" ]
+      "exit"
+      "1"
+    ]
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
   ];
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
 
   check-all-our-crates =
     depot.nix.drvSeqL
diff --git a/tools/tvlc/default.nix b/tools/tvlc/default.nix
index f40f30a44e33..a6f201485ff2 100644
--- a/tools/tvlc/default.nix
+++ b/tools/tvlc/default.nix
@@ -43,7 +43,8 @@ let
     '';
   };
 
-in {
+in
+{
   inherit pathScripts;
   inherit commonsh;
   inherit tvlcNew;
diff --git a/tvix/default.nix b/tvix/default.nix
index 82cd87cda452..fb4b367ecb6f 100644
--- a/tvix/default.nix
+++ b/tvix/default.nix
@@ -1,2 +1,2 @@
-{...}:
-{}
+{ ... }:
+{ }
diff --git a/tvix/docs/default.nix b/tvix/docs/default.nix
index 4b09f8d2dae2..016d641df59f 100644
--- a/tvix/docs/default.nix
+++ b/tvix/docs/default.nix
@@ -4,9 +4,9 @@ let
 
   tl = pkgs.texlive.combine {
     inherit (pkgs.texlive) scheme-medium wrapfig ulem capt-of
-    titlesec preprint enumitem paralist ctex environ svg
-    beamer trimspaces zhnumber changepage framed pdfpages
-    fvextra minted upquote ifplatform xstring;
+      titlesec preprint enumitem paralist ctex environ svg
+      beamer trimspaces zhnumber changepage framed pdfpages
+      fvextra minted upquote ifplatform xstring;
   };
 
   csl = pkgs.fetchurl {
@@ -15,7 +15,8 @@ let
     sha256 = "1yfhhnhbzvhrv93baz98frmgsx5y442nzhb0l956l4j35fb0cc3h";
   };
 
-in pkgs.stdenv.mkDerivation {
+in
+pkgs.stdenv.mkDerivation {
   pname = "tvix-doc";
   version = "0.1";
 
diff --git a/tvix/proto/default.nix b/tvix/proto/default.nix
index 7ff03ba3b74f..ac0ee66e8720 100644
--- a/tvix/proto/default.nix
+++ b/tvix/proto/default.nix
@@ -3,7 +3,7 @@
 # anywhere, it just functions as a CI check for now.
 { pkgs, ... }:
 
-pkgs.runCommandNoCC "tvix-cc-proto" {} ''
+pkgs.runCommandNoCC "tvix-cc-proto" { } ''
   mkdir $out
   ${pkgs.protobuf}/bin/protoc -I ${./.} evaluator.proto --cpp_out=$out
 ''
diff --git a/tvix/shell.nix b/tvix/shell.nix
index 600d96d76d28..f57019cd94a4 100644
--- a/tvix/shell.nix
+++ b/tvix/shell.nix
@@ -1,8 +1,9 @@
 let
-  depot = (import ./.. {});
+  depot = (import ./.. { });
   pkgs = depot.third_party.nixpkgs;
 
-in pkgs.mkShell {
+in
+pkgs.mkShell {
   buildInputs = [
     pkgs.rustup
     pkgs.rust-analyzer
diff --git a/users/Profpatsch/arglib/netencode.nix b/users/Profpatsch/arglib/netencode.nix
index 50f4c11c2d8f..3f1d121e5132 100644
--- a/users/Profpatsch/arglib/netencode.nix
+++ b/users/Profpatsch/arglib/netencode.nix
@@ -2,13 +2,14 @@
 
 let
   netencode = {
-    rust = depot.nix.writers.rustSimpleLib {
-      name = "arglib-netencode";
-      dependencies = [
-        depot.users.Profpatsch.execline.exec-helpers
-        depot.users.Profpatsch.netencode.netencode-rs
-      ];
-    } ''
+    rust = depot.nix.writers.rustSimpleLib
+      {
+        name = "arglib-netencode";
+        dependencies = [
+          depot.users.Profpatsch.execline.exec-helpers
+          depot.users.Profpatsch.netencode.netencode-rs
+        ];
+      } ''
       extern crate netencode;
       extern crate exec_helpers;
 
@@ -37,4 +38,5 @@ let
     '';
   };
 
-in depot.nix.readTree.drvTargets netencode
+in
+depot.nix.readTree.drvTargets netencode
diff --git a/users/Profpatsch/atomically-write.nix b/users/Profpatsch/atomically-write.nix
index d5039d3e46b9..c4d07cfbb1fa 100644
--- a/users/Profpatsch/atomically-write.nix
+++ b/users/Profpatsch/atomically-write.nix
@@ -25,4 +25,5 @@ let
     mv "$tmp/out" "$to"
   '';
 
-in atomically-write
+in
+atomically-write
diff --git a/users/Profpatsch/blog/default.nix b/users/Profpatsch/blog/default.nix
index d3c5c596b444..44a7a21aa3c3 100644
--- a/users/Profpatsch/blog/default.nix
+++ b/users/Profpatsch/blog/default.nix
@@ -2,11 +2,11 @@
 
 let
   bins = depot.nix.getBins pkgs.lowdown [ "lowdown" ]
-      // depot.nix.getBins pkgs.cdb [ "cdbget" "cdbmake" "cdbdump" ]
-      // depot.nix.getBins pkgs.coreutils [ "mv" "cat" "printf" "test" ]
-      // depot.nix.getBins pkgs.s6-networking [ "s6-tcpserver" ]
-      // depot.nix.getBins pkgs.time [ "time" ]
-      ;
+    // depot.nix.getBins pkgs.cdb [ "cdbget" "cdbmake" "cdbdump" ]
+    // depot.nix.getBins pkgs.coreutils [ "mv" "cat" "printf" "test" ]
+    // depot.nix.getBins pkgs.s6-networking [ "s6-tcpserver" ]
+    // depot.nix.getBins pkgs.time [ "time" ]
+  ;
 
   # /
   # TODO: use
@@ -14,7 +14,7 @@ let
     {
       route = [ "notes" ];
       name = "Notes";
-      page = {cssFile}: router cssFile;
+      page = { cssFile }: router cssFile;
     }
     {
       route = [ "projects" ];
@@ -28,7 +28,7 @@ let
     {
       route = [ "notes" "preventing-oom" ];
       name = "Preventing out-of-memory (OOM) errors on Linux";
-      page = {cssFile}: markdownToHtml {
+      page = { cssFile }: markdownToHtml {
         name = "preventing-oom";
         markdown = ./notes/preventing-oom.md;
         inherit cssFile;
@@ -37,7 +37,7 @@ let
     {
       route = [ "notes" "rust-string-conversions" ];
       name = "Converting between different String types in Rust";
-      page = {cssFile}: markdownToHtml {
+      page = { cssFile }: markdownToHtml {
         name = "rust-string-conversions";
         markdown = ./notes/rust-string-conversions.md;
         inherit cssFile;
@@ -69,48 +69,52 @@ let
       title = "Ligature Emulation in Emacs";
       subtitle = "It’s not pretty, but the results are";
       description = "How to set up ligatures using <code>prettify-symbols-mode</code> and the Hasklig/FiraCode fonts.";
-      page = {cssFile}: markdownToHtml {
+      page = { cssFile }: markdownToHtml {
         name = "2017-05-04-ligature-emluation-in-emacs";
         markdown = ./posts/2017-05-04-ligature-emulation-in-emacs.md;
         inherit cssFile;
       };
       route = [ "posts" "2017-05-04-ligature-emluation-in-emacs" ];
-      tags = ["emacs"];
+      tags = [ "emacs" ];
     }
   ];
 
   # convert a markdown file to html via lowdown
-  markdownToHtml = {
-    name,
-    # the file to convert
-    markdown,
-    # css file to add to the final result, as { route }
-    cssFile
-  }:
-    depot.nix.runExecline "${name}.html" {} ([
-      "importas" "out" "out"
+  markdownToHtml =
+    { name
+    , # the file to convert
+      markdown
+    , # css file to add to the final result, as { route }
+      cssFile
+    }:
+    depot.nix.runExecline "${name}.html" { } ([
+      "importas"
+      "out"
+      "out"
       (depot.users.Profpatsch.lib.debugExec "")
       bins.lowdown
-        "-s" "-Thtml"
-      ] ++
-        (lib.optional (cssFile != null) (["-M" "css=${mkRoute cssFile.route}"]))
-      ++ [
-        "-o" "$out"
-        markdown
+      "-s"
+      "-Thtml"
+    ] ++
+    (lib.optional (cssFile != null) ([ "-M" "css=${mkRoute cssFile.route}" ]))
+    ++ [
+      "-o"
+      "$out"
+      markdown
     ]);
 
   # takes a { route … } attrset and converts the route lists to an absolute path
   fullRoute = attrs: lib.pipe attrs [
-    (map (x@{route, ...}: x // { route = mkRoute route; }))
+    (map (x@{ route, ... }: x // { route = mkRoute route; }))
   ];
 
   # a cdb from route to a netencoded version of data for each route
   router = cssFile: lib.pipe (notes ++ posts) [
     (map (r: with depot.users.Profpatsch.lens;
-      lib.pipe r [
-        (over (field "route") mkRoute)
-        (over (field "page") (_ { inherit cssFile; }))
-      ]))
+    lib.pipe r [
+      (over (field "route") mkRoute)
+      (over (field "page") (_ { inherit cssFile; }))
+    ]))
     (map (x: {
       name = x.route;
       value = depot.users.Profpatsch.netencode.gen.dwim x;
@@ -121,11 +125,12 @@ let
 
   # Create a link to the given source file/directory, given the relative path in the depot repo.
   # Checks that the file exists at evaluation time.
-  depotCgitLink = {
-    # relative path from the depot root (without leading /).
-    relativePath
-  }:
-    assert
+  depotCgitLink =
+    {
+      # relative path from the depot root (without leading /).
+      relativePath
+    }:
+      assert
       (lib.assertMsg
         (builtins.pathExists (depot.path.origSrc + "/${relativePath}"))
         "depotCgitLink: path /${relativePath} does not exist in depot, and depot.path was ${toString depot.path}");
@@ -133,12 +138,20 @@ let
 
   # look up a route by path ($1)
   router-lookup = cssFile: depot.nix.writeExecline "router-lookup" { readNArgs = 1; } [
-    cdbLookup (router cssFile) "$1"
+    cdbLookup
+    (router cssFile)
+    "$1"
   ];
 
   runExeclineStdout = name: args: cmd: depot.nix.runExecline name args ([
-    "importas" "-ui" "out" "out"
-    "redirfd" "-w" "1" "$out"
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    "redirfd"
+    "-w"
+    "1"
+    "$out"
   ] ++ cmd);
 
   notes-index-html =
@@ -170,106 +183,167 @@ let
   html = s: s;
 
   projects-index-html =
-  let o = projects;
-  in ''
-    <dl>
-    ${scope o (o: ''
-      <dt><a href="${str o.link}">${esc o.name}</a></dt>
-      <dd>${html o.description}</dd>
-    '')}
-    </dl>
-  '';
+    let o = projects;
+    in ''
+      <dl>
+      ${scope o (o: ''
+        <dt><a href="${str o.link}">${esc o.name}</a></dt>
+        <dd>${html o.description}</dd>
+      '')}
+      </dl>
+    '';
 
   projects-index = pkgs.writeText "projects-index.html" projects-index-html;
 
   posts-index-html =
-  let o = fullRoute posts;
-  in ''
-    <dl>
-    ${scope o (o: ''
-      <dt>${str o.date} <a href="${str o.route}">${esc o.title}</a></dt>
-      <dd>${html o.description}</dd>
-    '')}
-    </dl>
-  '';
+    let o = fullRoute posts;
+    in ''
+      <dl>
+      ${scope o (o: ''
+        <dt>${str o.date} <a href="${str o.route}">${esc o.title}</a></dt>
+        <dd>${html o.description}</dd>
+      '')}
+      </dl>
+    '';
 
   posts-index = pkgs.writeText "projects-index.html" posts-index-html;
 
   arglibNetencode = val: depot.nix.writeExecline "arglib-netencode" { } [
-    "export" "ARGLIB_NETENCODE" (depot.users.Profpatsch.netencode.gen.dwim val)
+    "export"
+    "ARGLIB_NETENCODE"
+    (depot.users.Profpatsch.netencode.gen.dwim val)
     "$@"
   ];
 
   # A simple http server that serves the site. Yes, it’s horrible.
-  site-server = { cssFile, port }: depot.nix.writeExecline "blog-server" {} [
+  site-server = { cssFile, port }: depot.nix.writeExecline "blog-server" { } [
     (depot.users.Profpatsch.lib.runInEmptyEnv [ "PATH" ])
-    bins.s6-tcpserver "127.0.0.1" port
-    bins.time "--format=time: %es" "--"
-    runOr return400
-    "pipeline" [
+    bins.s6-tcpserver
+    "127.0.0.1"
+    port
+    bins.time
+    "--format=time: %es"
+    "--"
+    runOr
+    return400
+    "pipeline"
+    [
       (arglibNetencode {
         what = "request";
       })
       depot.users.Profpatsch.read-http
     ]
     depot.users.Profpatsch.netencode.record-splice-env
-    runOr return500
-    "importas" "-i" "path" "path"
-    "if" [ depot.tools.eprintf "GET \${path}\n" ]
-    runOr return404
-    "backtick" "-ni" "TEMPLATE_DATA" [
+    runOr
+    return500
+    "importas"
+    "-i"
+    "path"
+    "path"
+    "if"
+    [ depot.tools.eprintf "GET \${path}\n" ]
+    runOr
+    return404
+    "backtick"
+    "-ni"
+    "TEMPLATE_DATA"
+    [
       # TODO: factor this out of here, this is routing not serving
-      "ifelse" [ bins.test "$path" "=" "/notes" ]
-        [ "export" "content-type" "text/html"
-          "export" "serve-file" notes-index
-          depot.users.Profpatsch.netencode.env-splice-record
-        ]
-      "ifelse" [ bins.test "$path" "=" "/projects" ]
-        [ "export" "content-type" "text/html"
-          "export" "serve-file" projects-index
-          depot.users.Profpatsch.netencode.env-splice-record
-        ]
-      "ifelse" [ bins.test "$path" "=" "/posts" ]
-        [ "export" "content-type" "text/html"
-          "export" "serve-file" posts-index
-          depot.users.Profpatsch.netencode.env-splice-record
-        ]
+      "ifelse"
+      [ bins.test "$path" "=" "/notes" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        notes-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
+      "ifelse"
+      [ bins.test "$path" "=" "/projects" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        projects-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
+      "ifelse"
+      [ bins.test "$path" "=" "/posts" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        posts-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
       # TODO: ignore potential query arguments. See 404 message
-      "pipeline" [ (router-lookup cssFile) "$path" ]
+      "pipeline"
+      [ (router-lookup cssFile) "$path" ]
       depot.users.Profpatsch.netencode.record-splice-env
-      "importas" "-ui" "page" "page"
-      "export" "content-type" "text/html"
-      "export" "serve-file" "$page"
+      "importas"
+      "-ui"
+      "page"
+      "page"
+      "export"
+      "content-type"
+      "text/html"
+      "export"
+      "serve-file"
+      "$page"
       depot.users.Profpatsch.netencode.env-splice-record
     ]
-    runOr return500
-    "if" [
-      "pipeline" [ bins.printf ''
-        HTTP/1.1 200 OK
-        Content-Type: {{{content-type}}}; charset=UTF-8
-        Connection: close
-
-      '' ]
+    runOr
+    return500
+    "if"
+    [
+      "pipeline"
+      [
+        bins.printf
+        ''
+          HTTP/1.1 200 OK
+          Content-Type: {{{content-type}}}; charset=UTF-8
+          Connection: close
+
+        ''
+      ]
       depot.users.Profpatsch.netencode.netencode-mustache
     ]
-    "pipeline" [ "importas" "t" "TEMPLATE_DATA" bins.printf "%s" "$t" ]
+    "pipeline"
+    [ "importas" "t" "TEMPLATE_DATA" bins.printf "%s" "$t" ]
     depot.users.Profpatsch.netencode.record-splice-env
-    "importas" "-ui" "serve-file" "serve-file"
-    bins.cat "$serve-file"
+    "importas"
+    "-ui"
+    "serve-file"
+    "serve-file"
+    bins.cat
+    "$serve-file"
   ];
 
   # run argv or $1 if argv returns a failure status code.
   runOr = depot.nix.writeExecline "run-or" { readNArgs = 1; } [
-    "foreground" [ "$@" ]
-    "importas" "?" "?"
-    "ifelse" [ bins.test "$?" "-eq" "0" ]
-    []
-    "if" [ depot.tools.eprintf "runOr: exited \${?}, running \${1}\n" ]
+    "foreground"
+    [ "$@" ]
+    "importas"
+    "?"
+    "?"
+    "ifelse"
+    [ bins.test "$?" "-eq" "0" ]
+    [ ]
+    "if"
+    [ depot.tools.eprintf "runOr: exited \${?}, running \${1}\n" ]
     "$1"
   ];
 
-  return400 = depot.nix.writeExecline "return400" {} [
-    bins.printf "%s" ''
+  return400 = depot.nix.writeExecline "return400" { } [
+    bins.printf
+    "%s"
+    ''
       HTTP/1.1 400 Bad Request
       Content-Type: text/plain; charset=UTF-8
       Connection: close
@@ -277,8 +351,10 @@ let
     ''
   ];
 
-  return404 = depot.nix.writeExecline "return404" {} [
-    bins.printf "%s" ''
+  return404 = depot.nix.writeExecline "return404" { } [
+    bins.printf
+    "%s"
+    ''
       HTTP/1.1 404 Not Found
       Content-Type: text/plain; charset=UTF-8
       Connection: close
@@ -287,8 +363,10 @@ let
     ''
   ];
 
-  return500 = depot.nix.writeExecline "return500" {} [
-    bins.printf "%s" ''
+  return500 = depot.nix.writeExecline "return500" { } [
+    bins.printf
+    "%s"
+    ''
       HTTP/1.1 500 Internal Server Error
       Content-Type: text/plain; charset=UTF-8
       Connection: close
@@ -297,10 +375,11 @@ let
     ''
   ];
 
-  capture-stdin = depot.nix.writers.rustSimple {
-    name = "capture-stdin";
-    dependencies = [ depot.users.Profpatsch.execline.exec-helpers ];
-  } ''
+  capture-stdin = depot.nix.writers.rustSimple
+    {
+      name = "capture-stdin";
+      dependencies = [ depot.users.Profpatsch.execline.exec-helpers ];
+    } ''
     extern crate exec_helpers;
     use std::io::Read;
     fn main() {
@@ -337,29 +416,40 @@ let
   cdbRecords =
     with depot.nix.yants;
     defun [ (attrs (either drv string)) string ]
-    (attrs:
-      (lib.concatStrings (lib.mapAttrsToList cdbRecord attrs)) + "\n");
+      (attrs:
+        (lib.concatStrings (lib.mapAttrsToList cdbRecord attrs)) + "\n");
 
   # run cdbmake on a list of key/value pairs (strings
-  cdbMake = name: attrs: depot.nix.runExecline "${name}.cdb" {
-    stdin = cdbRecords attrs;
-  } [
-    "importas" "out" "out"
+  cdbMake = name: attrs: depot.nix.runExecline "${name}.cdb"
+    {
+      stdin = cdbRecords attrs;
+    } [
+    "importas"
+    "out"
+    "out"
     depot.users.Profpatsch.lib.eprint-stdin
-    "if" [ bins.cdbmake "db" "tmp" ]
-    bins.mv "db" "$out"
+    "if"
+    [ bins.cdbmake "db" "tmp" ]
+    bins.mv
+    "db"
+    "$out"
   ];
 
   # look up a key ($2) in the given cdb ($1)
   cdbLookup = depot.nix.writeExecline "cdb-lookup" { readNArgs = 2; } [
     # cdb ($1) on stdin
-    "redirfd" "-r" "0" "$1"
+    "redirfd"
+    "-r"
+    "0"
+    "$1"
     # key ($2) lookup
-    bins.cdbget "$2"
+    bins.cdbget
+    "$2"
   ];
 
-in depot.nix.readTree.drvTargets {
-   inherit
+in
+depot.nix.readTree.drvTargets {
+  inherit
     router
     depotCgitLink
     site-server
diff --git a/users/Profpatsch/cdb.nix b/users/Profpatsch/cdb.nix
index 8cfaa3ea7ac6..86e0a2d58f24 100644
--- a/users/Profpatsch/cdb.nix
+++ b/users/Profpatsch/cdb.nix
@@ -1,14 +1,15 @@
 { depot, pkgs, ... }:
 
 let
-  cdbListToNetencode = depot.nix.writers.rustSimple {
-    name = "cdb-list-to-netencode";
-    dependencies = [
-      depot.third_party.rust-crates.nom
-      depot.users.Profpatsch.execline.exec-helpers
-      depot.users.Profpatsch.netencode.netencode-rs
-    ];
-  } ''
+  cdbListToNetencode = depot.nix.writers.rustSimple
+    {
+      name = "cdb-list-to-netencode";
+      dependencies = [
+        depot.third_party.rust-crates.nom
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.users.Profpatsch.netencode.netencode-rs
+      ];
+    } ''
     extern crate nom;
     extern crate exec_helpers;
     extern crate netencode;
@@ -84,7 +85,8 @@ let
 
   '';
 
-in {
+in
+{
   inherit
     cdbListToNetencode
     ;
diff --git a/users/Profpatsch/emacs-tree-sitter-move/default.nix b/users/Profpatsch/emacs-tree-sitter-move/default.nix
index fdc059c089b6..a9f259d96d20 100644
--- a/users/Profpatsch/emacs-tree-sitter-move/default.nix
+++ b/users/Profpatsch/emacs-tree-sitter-move/default.nix
@@ -1,3 +1,3 @@
 # nothing yet (TODO: expose shell & tool)
-{...}:
-{}
+{ ... }:
+{ }
diff --git a/users/Profpatsch/emacs-tree-sitter-move/shell.nix b/users/Profpatsch/emacs-tree-sitter-move/shell.nix
index 81d622ac73e5..f400d5c02161 100644
--- a/users/Profpatsch/emacs-tree-sitter-move/shell.nix
+++ b/users/Profpatsch/emacs-tree-sitter-move/shell.nix
@@ -1,14 +1,15 @@
-{ pkgs ? import ../../../third_party {}, ... }:
+{ pkgs ? import ../../../third_party { }, ... }:
 let
   inherit (pkgs) lib;
 
-  treeSitterGrammars = pkgs.runCommandLocal "grammars" {} ''
+  treeSitterGrammars = pkgs.runCommandLocal "grammars" { } ''
     mkdir -p $out/bin
     ${lib.concatStringsSep "\n"
       (lib.mapAttrsToList (name: src: "ln -s ${src}/parser $out/bin/${name}.so") pkgs.tree-sitter.builtGrammars)};
   '';
 
-in pkgs.mkShell {
+in
+pkgs.mkShell {
   buildInputs = [
     pkgs.tree-sitter.builtGrammars.python
   ];
diff --git a/users/Profpatsch/execline/default.nix b/users/Profpatsch/execline/default.nix
index c6a8d284a634..752774e6ad0c 100644
--- a/users/Profpatsch/execline/default.nix
+++ b/users/Profpatsch/execline/default.nix
@@ -1,16 +1,19 @@
 { depot, pkgs, lib, ... }:
 
 let
-  exec-helpers = depot.nix.writers.rustSimpleLib {
-    name = "exec-helpers";
-  } (builtins.readFile ./exec_helpers.rs);
+  exec-helpers = depot.nix.writers.rustSimpleLib
+    {
+      name = "exec-helpers";
+    }
+    (builtins.readFile ./exec_helpers.rs);
 
-  print-one-env = depot.nix.writers.rustSimple {
-    name = "print-one-env";
-    dependencies = [
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } ''
+  print-one-env = depot.nix.writers.rustSimple
+    {
+      name = "print-one-env";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
     extern crate exec_helpers;
     use std::os::unix::ffi::OsStrExt;
     use std::io::Write;
@@ -25,7 +28,8 @@ let
     }
   '';
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     exec-helpers
     print-one-env
diff --git a/users/Profpatsch/git-db/default.nix b/users/Profpatsch/git-db/default.nix
index 7c6f1aee7c5c..ad5d927677bf 100644
--- a/users/Profpatsch/git-db/default.nix
+++ b/users/Profpatsch/git-db/default.nix
@@ -1,8 +1,10 @@
 { depot, pkgs, lib, ... }:
 
-depot.nix.writers.rustSimple {
+depot.nix.writers.rustSimple
+{
   name = "git-db";
   dependencies = [
     depot.third_party.rust-crates.git2
   ];
-} (builtins.readFile ./git-db.rs)
+}
+  (builtins.readFile ./git-db.rs)
diff --git a/users/Profpatsch/imap-idle.nix b/users/Profpatsch/imap-idle.nix
index 3ad5375d89af..84af5d0e54a9 100644
--- a/users/Profpatsch/imap-idle.nix
+++ b/users/Profpatsch/imap-idle.nix
@@ -1,14 +1,17 @@
 { depot, pkgs, lib, ... }:
 
 let
-  imap-idle = depot.nix.writers.rustSimple {
-    name = "imap-idle";
-    dependencies = [
-      depot.users.Profpatsch.arglib.netencode.rust
-      depot.third_party.rust-crates.imap
-      depot.third_party.rust-crates.epoll
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } (builtins.readFile ./imap-idle.rs);
+  imap-idle = depot.nix.writers.rustSimple
+    {
+      name = "imap-idle";
+      dependencies = [
+        depot.users.Profpatsch.arglib.netencode.rust
+        depot.third_party.rust-crates.imap
+        depot.third_party.rust-crates.epoll
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    }
+    (builtins.readFile ./imap-idle.rs);
 
-in imap-idle
+in
+imap-idle
diff --git a/users/Profpatsch/lens.nix b/users/Profpatsch/lens.nix
index 58d9c27f5242..28f7506bddae 100644
--- a/users/Profpatsch/lens.nix
+++ b/users/Profpatsch/lens.nix
@@ -32,7 +32,7 @@ let
     inherit fst snd;
   };
 
-  swap = {fst, snd}: {
+  swap = { fst, snd }: {
     fst = snd;
     snd = fst;
   };
@@ -71,7 +71,7 @@ let
   lensP = strong: to: pab:
     strong.dimap
       to
-      ({fst,snd}: snd fst)
+      ({ fst, snd }: snd fst)
       (strong.firstP pab);
 
   # first element of a tuple
@@ -112,7 +112,8 @@ let
       (map (accessor: accessor profunctorSubclass) accessors);
 
 
-in {
+in
+{
   inherit
     id
     _
diff --git a/users/Profpatsch/lib.nix b/users/Profpatsch/lib.nix
index e3d59b7d8beb..879d87755d56 100644
--- a/users/Profpatsch/lib.nix
+++ b/users/Profpatsch/lib.nix
@@ -1,34 +1,49 @@
 { depot, pkgs, ... }:
 let
   bins = depot.nix.getBins pkgs.coreutils [ "printf" "echo" "cat" "printenv" "tee" ]
-      // depot.nix.getBins pkgs.bash [ "bash" ]
-      // depot.nix.getBins pkgs.fdtools [ "multitee" ]
-      ;
+    // depot.nix.getBins pkgs.bash [ "bash" ]
+    // depot.nix.getBins pkgs.fdtools [ "multitee" ]
+  ;
 
   # Print `msg` and and argv to stderr, then execute into argv
-  debugExec = msg: depot.nix.writeExecline "debug-exec" {} [
-    "if" [
-      "fdmove" "-c" "1" "2"
-      "if" [ bins.printf "%s: " msg ]
-      "if" [ bins.echo "$@" ]
+  debugExec = msg: depot.nix.writeExecline "debug-exec" { } [
+    "if"
+    [
+      "fdmove"
+      "-c"
+      "1"
+      "2"
+      "if"
+      [ bins.printf "%s: " msg ]
+      "if"
+      [ bins.echo "$@" ]
     ]
     "$@"
   ];
 
   # Print stdin to stderr and stdout
-  eprint-stdin = depot.nix.writeExecline "eprint-stdin" {} [
-    "pipeline" [ bins.multitee "0-1,2" ] "$@"
+  eprint-stdin = depot.nix.writeExecline "eprint-stdin" { } [
+    "pipeline"
+    [ bins.multitee "0-1,2" ]
+    "$@"
   ];
 
   # Assume the input on stdin is netencode, pretty print it to stderr and forward it to stdout
-  eprint-stdin-netencode = depot.nix.writeExecline "eprint-stdin-netencode" {} [
-    "pipeline" [
+  eprint-stdin-netencode = depot.nix.writeExecline "eprint-stdin-netencode" { } [
+    "pipeline"
+    [
       # move stdout to 3
-      "fdmove" "3" "1"
+      "fdmove"
+      "3"
+      "1"
       # the multitee copies stdin to 1 (the other pipeline end) and 3 (the stdout of the outer pipeline block)
-      "pipeline" [ bins.multitee "0-1,3" ]
+      "pipeline"
+      [ bins.multitee "0-1,3" ]
       # make stderr the stdout of pretty, merging with the stderr of pretty
-      "fdmove" "-c" "1" "2"
+      "fdmove"
+      "-c"
+      "1"
+      "2"
       depot.users.Profpatsch.netencode.pretty
     ]
     "$@"
@@ -36,9 +51,11 @@ let
 
   # print the given environment variable in $1 to stderr, then execute into the rest of argv
   eprintenv = depot.nix.writeExecline "eprintenv" { readNArgs = 1; } [
-    "ifelse" [ "fdmove" "-c" "1" "2" bins.printenv "$1" ]
+    "ifelse"
+    [ "fdmove" "-c" "1" "2" bins.printenv "$1" ]
     [ "$@" ]
-    "if" [ depot.tools.eprintf "eprintenv: could not find \"\${1}\" in the environment\n" ]
+    "if"
+    [ depot.tools.eprintf "eprintenv: could not find \"\${1}\" in the environment\n" ]
     "$@"
   ];
 
@@ -52,24 +69,34 @@ let
   #   stdout: foo\n
   #   stderr: foo\nbar\n
   split-stdin = depot.nix.writeExecline "split-stdin" { argMode = "env"; } [
-    "pipeline" [
+    "pipeline"
+    [
       # this is horrible yes but the quickest way I knew how to implement it
-      "runblock" "1" bins.bash "-c" ''${bins.tee} >("$@")'' "bash-split-stdin"
+      "runblock"
+      "1"
+      bins.bash
+      "-c"
+      ''${bins.tee} >("$@")''
+      "bash-split-stdin"
     ]
-    "runblock" "-r" "1"
+    "runblock"
+    "-r"
+    "1"
   ];
 
   # remove everything but a few selected environment variables
   runInEmptyEnv = keepVars:
     let
-        importas = pkgs.lib.concatMap (var: [ "importas" "-i" var var ]) keepVars;
-        # we have to explicitely call export here, because PATH is probably empty
-        export = pkgs.lib.concatMap (var: [ "${pkgs.execline}/bin/export" var ''''${${var}}'' ]) keepVars;
-    in depot.nix.writeExecline "empty-env" {}
-         (importas ++ [ "emptyenv" ] ++ export ++ [ "${pkgs.execline}/bin/exec" "$@" ]);
+      importas = pkgs.lib.concatMap (var: [ "importas" "-i" var var ]) keepVars;
+      # we have to explicitely call export here, because PATH is probably empty
+      export = pkgs.lib.concatMap (var: [ "${pkgs.execline}/bin/export" var ''''${${var}}'' ]) keepVars;
+    in
+    depot.nix.writeExecline "empty-env" { }
+      (importas ++ [ "emptyenv" ] ++ export ++ [ "${pkgs.execline}/bin/exec" "$@" ]);
 
 
-in {
+in
+{
   inherit
     debugExec
     eprint-stdin
diff --git a/users/Profpatsch/netencode/default.nix b/users/Profpatsch/netencode/default.nix
index 739bda3d78c9..d38925814832 100644
--- a/users/Profpatsch/netencode/default.nix
+++ b/users/Profpatsch/netencode/default.nix
@@ -1,31 +1,36 @@
 { depot, pkgs, lib, ... }:
 
 let
-  netencode-rs = depot.nix.writers.rustSimpleLib {
+  netencode-rs = depot.nix.writers.rustSimpleLib
+    {
       name = "netencode";
       dependencies = [
         depot.third_party.rust-crates.nom
         depot.users.Profpatsch.execline.exec-helpers
       ];
-    } (builtins.readFile ./netencode.rs);
+    }
+    (builtins.readFile ./netencode.rs);
 
   gen = import ./gen.nix { inherit lib; };
 
-  pretty-rs = depot.nix.writers.rustSimpleLib {
-    name = "netencode-pretty";
-    dependencies = [
-      netencode-rs
-    ];
-  } (builtins.readFile ./pretty.rs);
+  pretty-rs = depot.nix.writers.rustSimpleLib
+    {
+      name = "netencode-pretty";
+      dependencies = [
+        netencode-rs
+      ];
+    }
+    (builtins.readFile ./pretty.rs);
 
-  pretty = depot.nix.writers.rustSimple {
-    name = "netencode-pretty";
-    dependencies = [
-      netencode-rs
-      pretty-rs
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } ''
+  pretty = depot.nix.writers.rustSimple
+    {
+      name = "netencode-pretty";
+      dependencies = [
+        netencode-rs
+        pretty-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
     extern crate netencode;
     extern crate netencode_pretty;
     extern crate exec_helpers;
@@ -41,24 +46,27 @@ let
     }
   '';
 
-  netencode-mustache = depot.nix.writers.rustSimple {
-    name = "netencode_mustache";
-    dependencies = [
-      depot.users.Profpatsch.arglib.netencode.rust
-      netencode-rs
-      depot.third_party.rust-crates.mustache
-    ];
-  } (builtins.readFile ./netencode-mustache.rs);
+  netencode-mustache = depot.nix.writers.rustSimple
+    {
+      name = "netencode_mustache";
+      dependencies = [
+        depot.users.Profpatsch.arglib.netencode.rust
+        netencode-rs
+        depot.third_party.rust-crates.mustache
+      ];
+    }
+    (builtins.readFile ./netencode-mustache.rs);
 
 
-  record-get = depot.nix.writers.rustSimple {
-    name = "record-get";
-    dependencies = [
-      netencode-rs
-      depot.users.Profpatsch.execline.exec-helpers
-      depot.users.Profpatsch.arglib.netencode.rust
-    ];
-  } ''
+  record-get = depot.nix.writers.rustSimple
+    {
+      name = "record-get";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.users.Profpatsch.arglib.netencode.rust
+      ];
+    } ''
     extern crate netencode;
     extern crate arglib_netencode;
     extern crate exec_helpers;
@@ -80,13 +88,14 @@ let
     }
   '';
 
-  record-splice-env = depot.nix.writers.rustSimple {
-    name = "record-splice-env";
-    dependencies = [
-      netencode-rs
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } ''
+  record-splice-env = depot.nix.writers.rustSimple
+    {
+      name = "record-splice-env";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
     extern crate netencode;
     extern crate exec_helpers;
     use netencode::dec::{Record, Try, ScalarAsBytes, Decoder, DecodeError};
@@ -109,13 +118,14 @@ let
     }
   '';
 
-  env-splice-record = depot.nix.writers.rustSimple {
-    name = "env-splice-record";
-    dependencies = [
-      netencode-rs
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } ''
+  env-splice-record = depot.nix.writers.rustSimple
+    {
+      name = "env-splice-record";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
     extern crate netencode;
     extern crate exec_helpers;
     use netencode::{T};
@@ -135,7 +145,8 @@ let
     }
   '';
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     netencode-rs
     pretty-rs
diff --git a/users/Profpatsch/netencode/gen.nix b/users/Profpatsch/netencode/gen.nix
index 305ff7b08dd6..efc9629ca0df 100644
--- a/users/Profpatsch/netencode/gen.nix
+++ b/users/Profpatsch/netencode/gen.nix
@@ -27,29 +27,33 @@ let
   concatStrings = builtins.concatStringsSep "";
 
   record = lokv: netstring "{" "}"
-    (concatStrings (map ({key, val}: tag key val) lokv));
+    (concatStrings (map ({ key, val }: tag key val) lokv));
 
   list = l: netstring "[" "]" (concatStrings l);
 
   dwim = val:
-    let match = {
-      "bool" = n1;
-      "int" = i6;
-      "string" = text;
-      "set" = attrs:
-        # it could be a derivation, then just return the path
-        if attrs.type or "" == "derivation" then text "${attrs}"
-        else
-          record (lib.mapAttrsToList
-          (k: v: {
-            key = k;
-            val = dwim v;
-          }) attrs);
-      "list" = l: list (map dwim l);
-    };
-    in match.${builtins.typeOf val} val;
+    let
+      match = {
+        "bool" = n1;
+        "int" = i6;
+        "string" = text;
+        "set" = attrs:
+          # it could be a derivation, then just return the path
+          if attrs.type or "" == "derivation" then text "${attrs}"
+          else
+            record (lib.mapAttrsToList
+              (k: v: {
+                key = k;
+                val = dwim v;
+              })
+              attrs);
+        "list" = l: list (map dwim l);
+      };
+    in
+    match.${builtins.typeOf val} val;
 
-in {
+in
+{
   inherit
     unit
     n1
diff --git a/users/Profpatsch/netstring/default.nix b/users/Profpatsch/netstring/default.nix
index b4990cae6766..e85cf24dd8e6 100644
--- a/users/Profpatsch/netstring/default.nix
+++ b/users/Profpatsch/netstring/default.nix
@@ -12,9 +12,10 @@ let
         (k: v: toNetstring (toNetstring k + toNetstring v))
         attrs);
 
-  python-netstring = depot.users.Profpatsch.writers.python3Lib {
-    name = "netstring";
-  } ''
+  python-netstring = depot.users.Profpatsch.writers.python3Lib
+    {
+      name = "netstring";
+    } ''
     def read_netstring(bytes):
         (int_length, rest) = bytes.split(sep=b':', maxsplit=1)
         val = rest[:int(int_length)]
@@ -39,9 +40,10 @@ let
         return res
   '';
 
-  rust-netstring = depot.nix.writers.rustSimpleLib {
-    name = "netstring";
-  } ''
+  rust-netstring = depot.nix.writers.rustSimpleLib
+    {
+      name = "netstring";
+    } ''
     pub fn to_netstring(s: &[u8]) -> Vec<u8> {
         let len = s.len();
         // length of the integer as ascii
@@ -55,12 +57,13 @@ let
     }
   '';
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     toNetstring
     toNetstringList
     toNetstringKeyVal
     python-netstring
     rust-netstring
-      ;
+    ;
 }
diff --git a/users/Profpatsch/netstring/tests/default.nix b/users/Profpatsch/netstring/tests/default.nix
index 710ba3d30526..6a1062988f1e 100644
--- a/users/Profpatsch/netstring/tests/default.nix
+++ b/users/Profpatsch/netstring/tests/default.nix
@@ -2,12 +2,13 @@
 
 let
 
-  python-netstring-test = depot.users.Profpatsch.writers.python3 {
-    name = "python-netstring-test";
-    libraries = p: [
-      depot.users.Profpatsch.netstring.python-netstring
-    ];
-  } ''
+  python-netstring-test = depot.users.Profpatsch.writers.python3
+    {
+      name = "python-netstring-test";
+      libraries = p: [
+        depot.users.Profpatsch.netstring.python-netstring
+      ];
+    } ''
     import netstring
 
     def assEq(left, right):
@@ -33,12 +34,13 @@ let
     )
   '';
 
-  rust-netstring-test = depot.nix.writers.rustSimple {
-    name = "rust-netstring-test";
-    dependencies = [
-      depot.users.Profpatsch.netstring.rust-netstring
-    ];
-  } ''
+  rust-netstring-test = depot.nix.writers.rustSimple
+    {
+      name = "rust-netstring-test";
+      dependencies = [
+        depot.users.Profpatsch.netstring.rust-netstring
+      ];
+    } ''
     extern crate netstring;
 
     fn main() {
@@ -53,7 +55,8 @@ let
     }
   '';
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     python-netstring-test
     rust-netstring-test
diff --git a/users/Profpatsch/nix-home/default.nix b/users/Profpatsch/nix-home/default.nix
index cf9ab0d4d4ae..8f52055ade31 100644
--- a/users/Profpatsch/nix-home/default.nix
+++ b/users/Profpatsch/nix-home/default.nix
@@ -2,97 +2,145 @@
 
 let
   bins = depot.nix.getBins pkgs.stow [ "stow" ]
-      // depot.nix.getBins pkgs.coreutils [ "mkdir" "ln" "printenv" "rm" ]
-      // depot.nix.getBins pkgs.xe [ "xe" ]
-      // depot.nix.getBins pkgs.lr [ "lr" ]
-      // depot.nix.getBins pkgs.nix [ "nix-store" ]
-      ;
+    // depot.nix.getBins pkgs.coreutils [ "mkdir" "ln" "printenv" "rm" ]
+    // depot.nix.getBins pkgs.xe [ "xe" ]
+    // depot.nix.getBins pkgs.lr [ "lr" ]
+    // depot.nix.getBins pkgs.nix [ "nix-store" ]
+  ;
 
   # run stow to populate the target directory with the given stow package, read from stowDir.
   # Bear in mind that `stowDirOriginPath` should always be semantically bound to the given `stowDir`, otherwise stow might become rather confused.
-  runStow = {
-    # “stow package” to stow (see manpage)
-    stowPackage,
-    # “target directory” to stow in (see manpage)
-    targetDir,
-    # The “stow directory” (see manpage), containing “stow packages” (see manpage)
-    stowDir,
-    # representative directory for the stowDir in the file system, against which stow will create relative links.
-    # ATTN: this is always overwritten with the contents of `stowDir`! You shouldn’t re-use the same `stowDirOriginPath` for different `stowDir`s, otherwise there might be surprises.
-    stowDirOriginPath,
-  }: depot.nix.writeExecline "stow-${stowPackage}" {} [
-    # first, create a temporary stow directory to use as source
-    # (stow will use it to determine the origin of files)
-    "if" [ bins.mkdir "-p" stowDirOriginPath ]
-    # remove old symlinks
-    "if" [
-      "pipeline" [
-        bins.lr "-0" "-t" "depth == 1 && type == l" stowDirOriginPath
+  runStow =
+    {
+      # “stow package” to stow (see manpage)
+      stowPackage
+    , # “target directory” to stow in (see manpage)
+      targetDir
+    , # The “stow directory” (see manpage), containing “stow packages” (see manpage)
+      stowDir
+    , # representative directory for the stowDir in the file system, against which stow will create relative links.
+      # ATTN: this is always overwritten with the contents of `stowDir`! You shouldn’t re-use the same `stowDirOriginPath` for different `stowDir`s, otherwise there might be surprises.
+      stowDirOriginPath
+    ,
+    }: depot.nix.writeExecline "stow-${stowPackage}" { } [
+      # first, create a temporary stow directory to use as source
+      # (stow will use it to determine the origin of files)
+      "if"
+      [ bins.mkdir "-p" stowDirOriginPath ]
+      # remove old symlinks
+      "if"
+      [
+        "pipeline"
+        [
+          bins.lr
+          "-0"
+          "-t"
+          "depth == 1 && type == l"
+          stowDirOriginPath
+        ]
+        bins.xe
+        "-0"
+        bins.rm
       ]
-      bins.xe "-0" bins.rm
-    ]
-    # create an indirect gc root so our config is not cleaned under our asses by a garbage collect
-    "if" [
-      bins.nix-store
+      # create an indirect gc root so our config is not cleaned under our asses by a garbage collect
+      "if"
+      [
+        bins.nix-store
         "--realise"
         "--indirect"
-        "--add-root" "${stowDirOriginPath}/.nix-stowdir-gc-root"
+        "--add-root"
+        "${stowDirOriginPath}/.nix-stowdir-gc-root"
         stowDir
-    ]
-    # populate with new stow targets
-    "if" [
-      "elglob" "-w0" "stowPackages" "${stowDir}/*"
-      bins.ln "--force" "-st" stowDirOriginPath "$stowPackages"
-    ]
-    # stow always looks for $HOME/.stowrc to read more arguments
-    "export" "HOME" "/homeless-shelter"
-    bins.stow
+      ]
+      # populate with new stow targets
+      "if"
+      [
+        "elglob"
+        "-w0"
+        "stowPackages"
+        "${stowDir}/*"
+        bins.ln
+        "--force"
+        "-st"
+        stowDirOriginPath
+        "$stowPackages"
+      ]
+      # stow always looks for $HOME/.stowrc to read more arguments
+      "export"
+      "HOME"
+      "/homeless-shelter"
+      bins.stow
       # always run restow for now; this does more stat but will remove stale links
       "--restow"
-      "--dir" stowDirOriginPath
-      "--target" targetDir
+      "--dir"
+      stowDirOriginPath
+      "--target"
+      targetDir
       stowPackage
-  ];
+    ];
 
   # create a stow dir from a list of drv paths and a stow package name.
   makeStowDir =
     (with depot.nix.yants;
-     defun
-       [ (list (struct {
+    defun
+      [
+        (list (struct {
           originalDir = drv;
           stowPackage = string;
         }))
         drv
-       ] )
-    (dirs:
-      depot.nix.runExecline "make-stow-dir" {
-        stdin = lib.pipe dirs [
-          (map depot.users.Profpatsch.netencode.gen.dwim)
-          depot.users.Profpatsch.netstring.toNetstringList
-        ];
-      } [
-        "importas" "out" "out"
-        "if" [ bins.mkdir "-p" "$out" ]
-        "forstdin" "-d" "" "-o" "0" "line"
-        "pipeline" [
-          depot.users.Profpatsch.execline.print-one-env "line"
-        ]
-        depot.users.Profpatsch.netencode.record-splice-env
-        "importas" "-ui" "originalDir" "originalDir"
-        "importas" "-ui" "stowPackage" "stowPackage"
-        bins.ln "-sT" "$originalDir" "\${out}/\${stowPackage}"
-      ]);
+      ])
+      (dirs:
+        depot.nix.runExecline "make-stow-dir"
+          {
+            stdin = lib.pipe dirs [
+              (map depot.users.Profpatsch.netencode.gen.dwim)
+              depot.users.Profpatsch.netstring.toNetstringList
+            ];
+          } [
+          "importas"
+          "out"
+          "out"
+          "if"
+          [ bins.mkdir "-p" "$out" ]
+          "forstdin"
+          "-d"
+          ""
+          "-o"
+          "0"
+          "line"
+          "pipeline"
+          [
+            depot.users.Profpatsch.execline.print-one-env
+            "line"
+          ]
+          depot.users.Profpatsch.netencode.record-splice-env
+          "importas"
+          "-ui"
+          "originalDir"
+          "originalDir"
+          "importas"
+          "-ui"
+          "stowPackage"
+          "stowPackage"
+          bins.ln
+          "-sT"
+          "$originalDir"
+          "\${out}/\${stowPackage}"
+        ]);
 
 in
 
 # TODO: temp setup
-lib.pipe {} [
+lib.pipe { } [
   (_: makeStowDir [{
     stowPackage = "scripts";
     originalDir = pkgs.linkFarm "scripts-farm" [
-        { name = "scripts/ytextr";
-          path = depot.users.Profpatsch.ytextr; }
-      ];
+      {
+        name = "scripts/ytextr";
+        path = depot.users.Profpatsch.ytextr;
+      }
+    ];
   }])
   (d: runStow {
     stowDir = d;
diff --git a/users/Profpatsch/nixpkgs-rewriter/default.nix b/users/Profpatsch/nixpkgs-rewriter/default.nix
index 787162d4973a..0740a870aa4a 100644
--- a/users/Profpatsch/nixpkgs-rewriter/default.nix
+++ b/users/Profpatsch/nixpkgs-rewriter/default.nix
@@ -8,12 +8,12 @@ let
     ;
 
   bins = depot.nix.getBins pkgs.coreutils [ "head" "shuf" ]
-      // depot.nix.getBins pkgs.jq [ "jq" ]
-      // depot.nix.getBins pkgs.findutils [ "xargs" ]
-      // depot.nix.getBins pkgs.gnused [ "sed" ]
-      ;
+    // depot.nix.getBins pkgs.jq [ "jq" ]
+    // depot.nix.getBins pkgs.findutils [ "xargs" ]
+    // depot.nix.getBins pkgs.gnused [ "sed" ]
+  ;
 
-  export-json-object = pkgs.writers.writePython3 "export-json-object" {} ''
+  export-json-object = pkgs.writers.writePython3 "export-json-object" { } ''
     import json
     import sys
     import os
@@ -29,34 +29,50 @@ let
     os.execvp(sys.argv[1], sys.argv[1:])
   '';
 
-  meta-stdenv-lib = pkgs.writers.writeHaskell "meta-stdenv-lib" {
-    libraries = [
-      pkgs.haskellPackages.hnix
-      pkgs.haskellPackages.aeson
-    ];
-  } ./MetaStdenvLib.hs;
+  meta-stdenv-lib = pkgs.writers.writeHaskell "meta-stdenv-lib"
+    {
+      libraries = [
+        pkgs.haskellPackages.hnix
+        pkgs.haskellPackages.aeson
+      ];
+    } ./MetaStdenvLib.hs;
 
   replace-between-lines = writeExecline "replace-between-lines" { readNArgs = 1; } [
-    "importas" "-ui" "file" "fileName"
-    "importas" "-ui" "from" "fromLine"
-    "importas" "-ui" "to" "toLine"
-    "if" [ depot.tools.eprintf "%s-%s\n" "$from" "$to" ]
+    "importas"
+    "-ui"
+    "file"
+    "fileName"
+    "importas"
+    "-ui"
+    "from"
+    "fromLine"
+    "importas"
+    "-ui"
+    "to"
+    "toLine"
+    "if"
+    [ depot.tools.eprintf "%s-%s\n" "$from" "$to" ]
     (debugExec "adding lib")
     bins.sed
-      "-e" "\${from},\${to} \${1}"
-      "-i" "$file"
+    "-e"
+    "\${from},\${to} \${1}"
+    "-i"
+    "$file"
   ];
 
   add-lib-if-necessary = writeExecline "add-lib-if-necessary" { readNArgs = 1; } [
-    "pipeline" [ meta-stdenv-lib "$1" ]
-     export-json-object
-     # first replace any stdenv.lib mentions in the arg header
-     # if this is not done, the replace below kills these.
-     # Since we want it anyway ultimately, let’s do it here.
-     "if" [ replace-between-lines "s/stdenv\.lib/lib/" ]
-     # then add the lib argument
-     # (has to be before stdenv, otherwise default arguments might be in the way)
-     replace-between-lines "s/stdenv/lib, stdenv/"
+    "pipeline"
+    [ meta-stdenv-lib "$1" ]
+    export-json-object
+    # first replace any stdenv.lib mentions in the arg header
+    # if this is not done, the replace below kills these.
+    # Since we want it anyway ultimately, let’s do it here.
+    "if"
+    [ replace-between-lines "s/stdenv\.lib/lib/" ]
+    # then add the lib argument
+    # (has to be before stdenv, otherwise default arguments might be in the way)
+    replace-between-lines
+    "s/stdenv/lib, stdenv/"
   ];
 
   metaString = ''meta = with stdenv.lib; {'';
@@ -80,33 +96,53 @@ let
   '';
 
   instantiate-nixpkgs-randomly = writeExecline "instantiate-nixpkgs-randomly" { readNArgs = 1; } [
-    "export" "NIXPKGS_ALLOW_BROKEN" "1"
-    "export" "NIXPKGS_ALLOW_UNFREE" "1"
-    "export" "NIXPKGS_ALLOW_INSECURE" "1"
-    "export" "NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM" "1"
-    "pipeline" [
+    "export"
+    "NIXPKGS_ALLOW_BROKEN"
+    "1"
+    "export"
+    "NIXPKGS_ALLOW_UNFREE"
+    "1"
+    "export"
+    "NIXPKGS_ALLOW_INSECURE"
+    "1"
+    "export"
+    "NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM"
+    "1"
+    "pipeline"
+    [
       "nix"
-        "eval"
-        "--raw"
-        ''(
+      "eval"
+      "--raw"
+      ''(
           let pkgs = import ''${1} {};
           in builtins.toJSON (builtins.attrNames pkgs)
         )''
     ]
-    "pipeline" [ bins.jq "-r" ".[]" ]
-    "pipeline" [ bins.shuf ]
-    "pipeline" [ bins.head "-n" "1000" ]
-    bins.xargs "-I" "{}" "-n1"
-    "if" [ depot.tools.eprintf "instantiating %s\n" "{}" ]
-    "nix-instantiate" "$1" "-A" "{}"
+    "pipeline"
+    [ bins.jq "-r" ".[]" ]
+    "pipeline"
+    [ bins.shuf ]
+    "pipeline"
+    [ bins.head "-n" "1000" ]
+    bins.xargs
+    "-I"
+    "{}"
+    "-n1"
+    "if"
+    [ depot.tools.eprintf "instantiating %s\n" "{}" ]
+    "nix-instantiate"
+    "$1"
+    "-A"
+    "{}"
   ];
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
-   instantiate-nixpkgs-randomly
-  # requires hnix, which we don’t want in tvl for now
-  # uncomment manually if you want to use it.
-  #   meta-stdenv-lib
-  #   replace-stdenv-lib
+    instantiate-nixpkgs-randomly
+    # requires hnix, which we don’t want in tvl for now
+    # uncomment manually if you want to use it.
+    #   meta-stdenv-lib
+    #   replace-stdenv-lib
     ;
 }
diff --git a/users/Profpatsch/read-http.nix b/users/Profpatsch/read-http.nix
index 854a11b7d099..d9ad6fc30d94 100644
--- a/users/Profpatsch/read-http.nix
+++ b/users/Profpatsch/read-http.nix
@@ -2,15 +2,18 @@
 
 let
 
-  read-http = depot.nix.writers.rustSimple {
-    name = "read-http";
-    dependencies = [
-      depot.third_party.rust-crates.ascii
-      depot.third_party.rust-crates.httparse
-      depot.users.Profpatsch.netencode.netencode-rs
-      depot.users.Profpatsch.arglib.netencode.rust
-      depot.users.Profpatsch.execline.exec-helpers
-    ];
-  } (builtins.readFile ./read-http.rs);
+  read-http = depot.nix.writers.rustSimple
+    {
+      name = "read-http";
+      dependencies = [
+        depot.third_party.rust-crates.ascii
+        depot.third_party.rust-crates.httparse
+        depot.users.Profpatsch.netencode.netencode-rs
+        depot.users.Profpatsch.arglib.netencode.rust
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    }
+    (builtins.readFile ./read-http.rs);
 
-in read-http
+in
+read-http
diff --git a/users/Profpatsch/reverse-haskell-deps.nix b/users/Profpatsch/reverse-haskell-deps.nix
index b47347ea9fea..6df7bc6329cd 100644
--- a/users/Profpatsch/reverse-haskell-deps.nix
+++ b/users/Profpatsch/reverse-haskell-deps.nix
@@ -5,22 +5,27 @@
 
 let
 
-  rev = depot.nix.writeExecline "reverse-haskell-deps" {} [
-    "pipeline" [
-      "${pkgs.curl}/bin/curl" "-L" "https://packdeps.haskellers.com/reverse"
+  rev = depot.nix.writeExecline "reverse-haskell-deps" { } [
+    "pipeline"
+    [
+      "${pkgs.curl}/bin/curl"
+      "-L"
+      "https://packdeps.haskellers.com/reverse"
     ]
     rev-hs
 
   ];
 
-  rev-hs = pkgs.writers.writeHaskell "revers-haskell-deps-hs" {
-    libraries =  [
-      pkgs.haskellPackages.nicify-lib
-      pkgs.haskellPackages.tagsoup
-    ];
+  rev-hs = pkgs.writers.writeHaskell "revers-haskell-deps-hs"
+    {
+      libraries = [
+        pkgs.haskellPackages.nicify-lib
+        pkgs.haskellPackages.tagsoup
+      ];
 
-  }
+    }
     ./reverse-haskell-deps.hs;
 
 
-in rev
+in
+rev
diff --git a/users/Profpatsch/struct-edit/default.nix b/users/Profpatsch/struct-edit/default.nix
index 970cdd4d028b..11a7200ce427 100644
--- a/users/Profpatsch/struct-edit/default.nix
+++ b/users/Profpatsch/struct-edit/default.nix
@@ -1,13 +1,13 @@
 { depot, ... }:
 depot.nix.buildGo.program {
-    name = "struct-edit";
-    srcs = [
-      ./main.go
-    ];
-    deps = [
-      depot.third_party.gopkgs."github.com".charmbracelet.bubbletea
-      depot.third_party.gopkgs."github.com".charmbracelet.lipgloss
-      depot.third_party.gopkgs."github.com".muesli.termenv
-      depot.third_party.gopkgs."github.com".mattn.go-isatty
-    ];
+  name = "struct-edit";
+  srcs = [
+    ./main.go
+  ];
+  deps = [
+    depot.third_party.gopkgs."github.com".charmbracelet.bubbletea
+    depot.third_party.gopkgs."github.com".charmbracelet.lipgloss
+    depot.third_party.gopkgs."github.com".muesli.termenv
+    depot.third_party.gopkgs."github.com".mattn.go-isatty
+  ];
 }
diff --git a/users/Profpatsch/tree-sitter.nix b/users/Profpatsch/tree-sitter.nix
index 4f81b8e7a77c..2224da2a3b8c 100644
--- a/users/Profpatsch/tree-sitter.nix
+++ b/users/Profpatsch/tree-sitter.nix
@@ -2,17 +2,18 @@
 
 let
   bins = depot.nix.getBins pkgs.coreutils [ "head" "printf" "cat" ]
-      // depot.nix.getBins pkgs.ncurses [ "tput" ]
-      // depot.nix.getBins pkgs.bc [ "bc" ]
-      // depot.nix.getBins pkgs.ocamlPackages.sexp [ "sexp" ];
-
-  print-ast = depot.nix.writers.rustSimple {
-    name = "print-ast";
-    dependencies = with depot.third_party.rust-crates; [
-      libloading
-      tree-sitter
-    ];
-  } ''
+    // depot.nix.getBins pkgs.ncurses [ "tput" ]
+    // depot.nix.getBins pkgs.bc [ "bc" ]
+    // depot.nix.getBins pkgs.ocamlPackages.sexp [ "sexp" ];
+
+  print-ast = depot.nix.writers.rustSimple
+    {
+      name = "print-ast";
+      dependencies = with depot.third_party.rust-crates; [
+        libloading
+        tree-sitter
+      ];
+    } ''
     extern crate libloading;
     extern crate tree_sitter;
     use std::mem;
@@ -58,13 +59,14 @@ let
     };
   };
 
-  watch-file-modified = depot.nix.writers.rustSimple {
-    name = "watch-file-modified";
-    dependencies = [
-      depot.third_party.rust-crates.inotify
-      depot.users.Profpatsch.netstring.rust-netstring
-    ];
-  } ''
+  watch-file-modified = depot.nix.writers.rustSimple
+    {
+      name = "watch-file-modified";
+      dependencies = [
+        depot.third_party.rust-crates.inotify
+        depot.users.Profpatsch.netstring.rust-netstring
+      ];
+    } ''
     extern crate inotify;
     extern crate netstring;
     use inotify::{EventMask, WatchMask, Inotify};
@@ -101,75 +103,103 @@ let
   '';
 
   # clear screen and set LINES and COLUMNS to terminal height & width
-  clear-screen = depot.nix.writeExecline "clear-screen" {} [
-    "if" [ bins.tput "clear" ]
-    "backtick" "-in" "LINES" [ bins.tput "lines" ]
-    "backtick" "-in" "COLUMNS" [ bins.tput "cols" ]
+  clear-screen = depot.nix.writeExecline "clear-screen" { } [
+    "if"
+    [ bins.tput "clear" ]
+    "backtick"
+    "-in"
+    "LINES"
+    [ bins.tput "lines" ]
+    "backtick"
+    "-in"
+    "COLUMNS"
+    [ bins.tput "cols" ]
     "$@"
   ];
 
   print-nix-file = depot.nix.writeExecline "print-nix-file" { readNArgs = 1; } [
-    "pipeline" [ print-ast "${tree-sitter-nix}/parser" "tree_sitter_nix" "$1" ]
-    "pipeline" [ bins.sexp "print" ]
+    "pipeline"
+    [ print-ast "${tree-sitter-nix}/parser" "tree_sitter_nix" "$1" ]
+    "pipeline"
+    [ bins.sexp "print" ]
     clear-screen
-    "importas" "-ui" "lines" "LINES"
-    "backtick" "-in" "ls" [
+    "importas"
+    "-ui"
+    "lines"
+    "LINES"
+    "backtick"
+    "-in"
+    "ls"
+    [
       "pipeline"
-        # when you pull out bc to decrement an integer it’s time to switch to python lol
-        [ bins.printf "x=%s; --x\n" "$lines" ]
-        bins.bc
+      # when you pull out bc to decrement an integer it’s time to switch to python lol
+      [ bins.printf "x=%s; --x\n" "$lines" ]
+      bins.bc
     ]
-    "importas" "-ui" "l" "ls"
-    bins.head "-n\${l}"
+    "importas"
+    "-ui"
+    "l"
+    "ls"
+    bins.head
+    "-n\${l}"
   ];
 
   print-nix-file-on-update = depot.nix.writeExecline "print-nix-file-on-update" { readNArgs = 1; } [
-    "if" [ print-nix-file "$1" ]
-    "pipeline" [ watch-file-modified "$1" ]
-    "forstdin" "-d" "" "file"
-    "importas" "file" "file"
-    print-nix-file "$file"
+    "if"
+    [ print-nix-file "$1" ]
+    "pipeline"
+    [ watch-file-modified "$1" ]
+    "forstdin"
+    "-d"
+    ""
+    "file"
+    "importas"
+    "file"
+    "file"
+    print-nix-file
+    "$file"
   ];
 
   # copied from nixpkgs
   buildTreeSitterGrammar =
-      {
-        # language name
-        language
-        # source for the language grammar
-      , source
-      }:
-
-      pkgs.stdenv.mkDerivation {
-
-        pname = "${language}-grammar";
-        inherit (pkgs.tree-sitter) version;
-
-        src = source;
-
-        buildInputs = [ pkgs.tree-sitter ];
-
-        dontUnpack = true;
-        configurePhase= ":";
-        buildPhase = ''
-          runHook preBuild
-          scanner_cc="$src/src/scanner.cc"
-          if [ ! -f "$scanner_cc" ]; then
-            scanner_cc=""
-          fi
-          $CXX -I$src/src/ -c $scanner_cc
-          $CC -I$src/src/ -shared -o parser -Os  scanner.o $src/src/parser.c -lstdc++
-          runHook postBuild
-        '';
-        installPhase = ''
-          runHook preInstall
-          mkdir $out
-          mv parser $out/
-          runHook postInstall
-        '';
-      };
-
-in depot.nix.readTree.drvTargets {
+    {
+      # language name
+      language
+      # source for the language grammar
+    , source
+    }:
+
+    pkgs.stdenv.mkDerivation {
+
+      pname = "${language}-grammar";
+      inherit (pkgs.tree-sitter) version;
+
+      src = source;
+
+      buildInputs = [ pkgs.tree-sitter ];
+
+      dontUnpack = true;
+      configurePhase = ":";
+      buildPhase = ''
+        runHook preBuild
+        scanner_cc="$src/src/scanner.cc"
+        if [ ! -f "$scanner_cc" ]; then
+          scanner_cc=""
+        fi
+        $CXX -I$src/src/ -c $scanner_cc
+        $CC -I$src/src/ -shared -o parser -Os  scanner.o $src/src/parser.c -lstdc++
+        runHook postBuild
+      '';
+      installPhase = ''
+        runHook preInstall
+        mkdir $out
+        mv parser $out/
+        runHook postInstall
+      '';
+    };
+
+in
+depot.nix.readTree.drvTargets {
   inherit
     print-ast
     tree-sitter-nix
diff --git a/users/Profpatsch/writers/default.nix b/users/Profpatsch/writers/default.nix
index 3151a9d3bd44..02f39da02dbe 100644
--- a/users/Profpatsch/writers/default.nix
+++ b/users/Profpatsch/writers/default.nix
@@ -1,7 +1,7 @@
 { depot, pkgs, lib, ... }:
 let
-  bins = depot.nix.getBins pkgs.s6-portable-utils ["s6-mkdir" "s6-cat" "s6-ln" "s6-ls" "s6-touch" ]
-      // depot.nix.getBins pkgs.coreutils ["printf" ];
+  bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-mkdir" "s6-cat" "s6-ln" "s6-ls" "s6-touch" ]
+    // depot.nix.getBins pkgs.coreutils [ "printf" ];
 
   inherit (depot.nix.yants) defun struct restrict attrs list string drv any;
 
@@ -11,56 +11,76 @@ let
     restrict
       "flake error"
       (s: lib.any (prefix: (builtins.substring 0 1 s) == prefix)
-          [ "E" "W" ])
+        [ "E" "W" ])
       string;
   Libraries = defun [ (attrs any) (list drv) ];
 
-  python3 = {
-    name,
-    libraries ? (_: []),
-    flakeIgnore ? []
-  }: pkgs.writers.writePython3 name {
-    libraries = Libraries libraries pkgs.python3Packages;
-    flakeIgnore =
-      let ignoreTheseErrors = [
-        # whitespace after {
-        "E201"
-        # whitespace before }
-        "E202"
-        # fuck 4-space indentation
-        "E121" "E111"
-        # who cares about blank lines …
-        # … at end of files
-        "W391"
-        # … between functions
-        "E302" "E305"
-      ];
-      in list FlakeError (ignoreTheseErrors ++ flakeIgnore);
-  };
+  python3 =
+    { name
+    , libraries ? (_: [ ])
+    , flakeIgnore ? [ ]
+    }: pkgs.writers.writePython3 name {
+      libraries = Libraries libraries pkgs.python3Packages;
+      flakeIgnore =
+        let
+          ignoreTheseErrors = [
+            # whitespace after {
+            "E201"
+            # whitespace before }
+            "E202"
+            # fuck 4-space indentation
+            "E121"
+            "E111"
+            # who cares about blank lines …
+            # … at end of files
+            "W391"
+            # … between functions
+            "E302"
+            "E305"
+          ];
+        in
+        list FlakeError (ignoreTheseErrors ++ flakeIgnore);
+    };
 
   # TODO: add the same flake check as the pyhon3 writer
-  python3Lib = { name, libraries ? (_: []) }: moduleString:
-    let srcTree = depot.nix.runExecline.local name { stdin = moduleString; } [
-      "importas" "out" "out"
-      "if" [ bins.s6-mkdir "-p" "\${out}/${name}" ]
-      "if" [
-        "redirfd" "-w" "1" "\${out}/setup.py"
-        bins.printf ''
-          from distutils.core import setup
+  python3Lib = { name, libraries ? (_: [ ]) }: moduleString:
+    let
+      srcTree = depot.nix.runExecline.local name { stdin = moduleString; } [
+        "importas"
+        "out"
+        "out"
+        "if"
+        [ bins.s6-mkdir "-p" "\${out}/${name}" ]
+        "if"
+        [
+          "redirfd"
+          "-w"
+          "1"
+          "\${out}/setup.py"
+          bins.printf
+          ''
+            from distutils.core import setup
 
-          setup(
-            name='%s',
-            packages=['%s']
-          )
-        '' name name
-      ]
-      "if" [
-        # redirect stdin to the init py
-        "redirfd" "-w" "1" "\${out}/${name}/__init__.py"
-        bins.s6-cat
-      ]
-    ];
-    in pkgs.python3Packages.buildPythonPackage {
+            setup(
+              name='%s',
+              packages=['%s']
+            )
+          ''
+          name
+          name
+        ]
+        "if"
+        [
+          # redirect stdin to the init py
+          "redirfd"
+          "-w"
+          "1"
+          "\${out}/${name}/__init__.py"
+          bins.s6-cat
+        ]
+      ];
+    in
+    pkgs.python3Packages.buildPythonPackage {
       inherit name;
       src = srcTree;
       propagatedBuildInputs = libraries pkgs.python3Packages;
@@ -68,7 +88,8 @@ let
     };
 
 
-in {
+in
+{
   inherit
     python3
     python3Lib
diff --git a/users/Profpatsch/writers/tests/default.nix b/users/Profpatsch/writers/tests/default.nix
index dc760af9e16e..d0d62d3b0e1b 100644
--- a/users/Profpatsch/writers/tests/default.nix
+++ b/users/Profpatsch/writers/tests/default.nix
@@ -10,38 +10,46 @@ let
     coreutils
     ;
 
-  run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [
-    "if" [ drv ]
-    "importas" "out" "out"
-    "${coreutils}/bin/touch" "$out"
+  run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
+    "if"
+    [ drv ]
+    "importas"
+    "out"
+    "out"
+    "${coreutils}/bin/touch"
+    "$out"
   ];
 
-  pythonTransitiveLib = python3Lib {
-    name = "transitive";
-  } ''
+  pythonTransitiveLib = python3Lib
+    {
+      name = "transitive";
+    } ''
     def transitive(s):
       return s + " 1 2 3"
   '';
 
-  pythonTestLib = python3Lib {
-    name = "test_lib";
-    libraries = _: [ pythonTransitiveLib ];
-  } ''
+  pythonTestLib = python3Lib
+    {
+      name = "test_lib";
+      libraries = _: [ pythonTransitiveLib ];
+    } ''
     import transitive
     def test():
       return transitive.transitive("test")
   '';
 
-  pythonWithLib = run (python3 {
-    name = "python-with-lib";
-    libraries = _: [ pythonTestLib ];
-  } ''
+  pythonWithLib = run (python3
+    {
+      name = "python-with-lib";
+      libraries = _: [ pythonTestLib ];
+    } ''
     import test_lib
 
     assert(test_lib.test() == "test 1 2 3")
   '');
 
-in depot.nix.readTree.drvTargets {
+in
+depot.nix.readTree.drvTargets {
   inherit
     pythonWithLib
     ;
diff --git a/users/Profpatsch/ytextr/create-symlink-farm.nix b/users/Profpatsch/ytextr/create-symlink-farm.nix
index 583a3a90f5c5..7b3a45b91681 100644
--- a/users/Profpatsch/ytextr/create-symlink-farm.nix
+++ b/users/Profpatsch/ytextr/create-symlink-farm.nix
@@ -1,9 +1,10 @@
 {
   # list of package attribute names to get at run time
-  packageNamesAtRuntimeJsonPath,
+  packageNamesAtRuntimeJsonPath
+,
 }:
 let
-  pkgs = import <nixpkgs> {};
+  pkgs = import <nixpkgs> { };
 
   getPkg = pkgName: pkgs.${pkgName};
 
@@ -12,7 +13,7 @@ let
   runtime = map getPkg packageNamesAtRuntime;
 
 in
-  pkgs.symlinkJoin {
-    name = "symlink-farm";
-    paths = runtime;
-  }
+pkgs.symlinkJoin {
+  name = "symlink-farm";
+  paths = runtime;
+}
diff --git a/users/Profpatsch/ytextr/default.nix b/users/Profpatsch/ytextr/default.nix
index dba6bbb8b400..ac630603b90c 100644
--- a/users/Profpatsch/ytextr/default.nix
+++ b/users/Profpatsch/ytextr/default.nix
@@ -12,48 +12,71 @@
 
 let
   bins = depot.nix.getBins pkgs.nix [ "nix-build" ]
-      // depot.nix.getBins pkgs.bubblewrap [ "bwrap" ];
+    // depot.nix.getBins pkgs.bubblewrap [ "bwrap" ];
 
   # Run a command, with the given packages in scope, and `packageNamesAtRuntime` being fetched at the start in the given nix `channel`.
-  nix-run-with-channel = {
-    # The channel to get `packageNamesAtRuntime` from
-    channel,
-    # executable to run with `packageNamesAtRuntime` in PATH
-    # and the argv
-    executable,
-    # A list of nixpkgs package attribute names that should be put into PATH when running `command`.
-    packageNamesAtRuntime,
-  }: depot.nix.writeExecline "nix-run-with-channel-${channel}" {} [
-    # TODO: prevent race condition by writing a temporary gc root
-    "backtick" "-iE" "storepath" [
-      bins.nix-build
-        "-I" "nixpkgs=channel:${channel}"
+  nix-run-with-channel =
+    {
+      # The channel to get `packageNamesAtRuntime` from
+      channel
+    , # executable to run with `packageNamesAtRuntime` in PATH
+      # and the argv
+      executable
+    , # A list of nixpkgs package attribute names that should be put into PATH when running `command`.
+      packageNamesAtRuntime
+    ,
+    }: depot.nix.writeExecline "nix-run-with-channel-${channel}" { } [
+      # TODO: prevent race condition by writing a temporary gc root
+      "backtick"
+      "-iE"
+      "storepath"
+      [
+        bins.nix-build
+        "-I"
+        "nixpkgs=channel:${channel}"
         "--arg"
-          "packageNamesAtRuntimeJsonPath"
-          (pkgs.writeText "packageNamesAtRuntime.json" (builtins.toJSON packageNamesAtRuntime))
+        "packageNamesAtRuntimeJsonPath"
+        (pkgs.writeText "packageNamesAtRuntime.json" (builtins.toJSON packageNamesAtRuntime))
         ./create-symlink-farm.nix
-    ]
-    "importas" "-ui" "PATH" "PATH"
-    "export" "PATH" "\${storepath}/bin:\${PATH}"
-    executable "$@"
-  ];
+      ]
+      "importas"
+      "-ui"
+      "PATH"
+      "PATH"
+      "export"
+      "PATH"
+      "\${storepath}/bin:\${PATH}"
+      executable
+      "$@"
+    ];
 
-in nix-run-with-channel {
+in
+nix-run-with-channel {
   channel = "nixos-unstable";
   packageNamesAtRuntime = [ "yt-dlp" ];
   executable = depot.nix.writeExecline "ytextr" { readNArgs = 1; } [
-    "getcwd" "-E" "cwd"
+    "getcwd"
+    "-E"
+    "cwd"
     bins.bwrap
-      "--ro-bind" "/nix/store" "/nix/store"
-      "--ro-bind" "/etc" "/etc"
-      "--bind" "$cwd" "$cwd"
-        "yt-dlp"
-        "--no-playlist"
-        "--write-sub"
-        "--all-subs"
-        "--embed-subs"
-        "--merge-output-format" "mkv"
-        "-f" "bestvideo[height<=?1080]+bestaudio/best"
-        "$1"
+    "--ro-bind"
+    "/nix/store"
+    "/nix/store"
+    "--ro-bind"
+    "/etc"
+    "/etc"
+    "--bind"
+    "$cwd"
+    "$cwd"
+    "yt-dlp"
+    "--no-playlist"
+    "--write-sub"
+    "--all-subs"
+    "--embed-subs"
+    "--merge-output-format"
+    "mkv"
+    "-f"
+    "bestvideo[height<=?1080]+bestaudio/best"
+    "$1"
   ];
 }
diff --git a/users/cynthia/keys.nix b/users/cynthia/keys.nix
index bac8dc1c57ae..e2f4ce488c9e 100644
--- a/users/cynthia/keys.nix
+++ b/users/cynthia/keys.nix
@@ -4,4 +4,4 @@
   all = [
     "cert-authority ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICsj3W6QczgxE3s5GGT8qg0aLrCM+QeRnSq9RkiZtKvz meow"
   ];
-}
\ No newline at end of file
+}
diff --git a/users/edef/depot-scan/wrap.nix b/users/edef/depot-scan/wrap.nix
index dcb557a24b1e..77362b3f61f9 100644
--- a/users/edef/depot-scan/wrap.nix
+++ b/users/edef/depot-scan/wrap.nix
@@ -3,13 +3,14 @@
 let
 
   global = {
-    import = global.scopedImport {};
+    import = global.scopedImport { };
     scopedImport = x: builtins.scopedImport (global // x);
     builtins = builtins // {
       inherit (global) import scopedImport;
       readFile = path: builtins.trace "depot-scan '${toString path}'" (builtins.readFile path);
-      readDir  = path: builtins.trace "depot-scan '${toString path}'" (builtins.readDir  path);
+      readDir = path: builtins.trace "depot-scan '${toString path}'" (builtins.readDir path);
     };
   };
 
-in global.import
+in
+global.import
diff --git a/users/eta/keys.nix b/users/eta/keys.nix
index 247a182843f1..ebdc8479a5a8 100644
--- a/users/eta/keys.nix
+++ b/users/eta/keys.nix
@@ -9,4 +9,4 @@ let
     whitby = [ keys.yubikey4 keys.yubikey5 ];
   };
 in
-  configs
+configs
diff --git a/users/grfn/achilles/shell.nix b/users/grfn/achilles/shell.nix
index f32dce3ba39d..1434cf8a32c2 100644
--- a/users/grfn/achilles/shell.nix
+++ b/users/grfn/achilles/shell.nix
@@ -1,4 +1,4 @@
-with (import ../../.. {}).third_party.nixpkgs;
+with (import ../../.. { }).third_party.nixpkgs;
 
 mkShell {
   buildInputs = [
diff --git a/users/grfn/bbbg/arion-pkgs.nix b/users/grfn/bbbg/arion-pkgs.nix
index 66c016c28368..c6d603be2a99 100644
--- a/users/grfn/bbbg/arion-pkgs.nix
+++ b/users/grfn/bbbg/arion-pkgs.nix
@@ -1,2 +1,2 @@
-let depot = import ../../.. {};
+let depot = import ../../.. { };
 in depot.third_party.nixpkgs
diff --git a/users/grfn/bbbg/default.nix b/users/grfn/bbbg/default.nix
index 5b5b4badbf4b..cca4e68ce6b2 100644
--- a/users/grfn/bbbg/default.nix
+++ b/users/grfn/bbbg/default.nix
@@ -8,27 +8,28 @@ let
   deps = import ./deps.nix {
     inherit (pkgs) fetchMavenArtifact fetchgit lib;
   };
-in rec {
+in
+rec {
   meta.targets = [
     "db-util"
     "server"
     "tf"
   ];
 
-  depsPaths = deps.makePaths {};
+  depsPaths = deps.makePaths { };
 
   resources = builtins.filterSource (_: type: type != "symlink") ./resources;
 
   classpath.dev = concatStringsSep ":" (
-    (map gitignoreSource [./src ./test ./env/dev]) ++ [resources] ++ depsPaths
+    (map gitignoreSource [ ./src ./test ./env/dev ]) ++ [ resources ] ++ depsPaths
   );
 
   classpath.test = concatStringsSep ":" (
-    (map gitignoreSource [./src ./test ./env/test]) ++ [resources] ++ depsPaths
+    (map gitignoreSource [ ./src ./test ./env/test ]) ++ [ resources ] ++ depsPaths
   );
 
   classpath.prod = concatStringsSep ":" (
-    (map gitignoreSource [./src ./env/prod]) ++ [resources] ++ depsPaths
+    (map gitignoreSource [ ./src ./env/prod ]) ++ [ resources ] ++ depsPaths
   );
 
   testClojure = pkgs.writeShellScript "test-clojure" ''
diff --git a/users/grfn/bbbg/deps.nix b/users/grfn/bbbg/deps.nix
index 0d014cf373bb..02f5ecb4683c 100644
--- a/users/grfn/bbbg/deps.nix
+++ b/users/grfn/bbbg/deps.nix
@@ -1,16 +1,21 @@
 # generated by clj2nix-1.1.0-rc
 { fetchMavenArtifact, fetchgit, lib }:
 
-let repos = [
-        "https://repo1.maven.org/maven2/"
-        "https://repo.clojars.org/" ];
-
-  in rec {
-      makePaths = {extraClasspaths ? []}:
-        if (builtins.typeOf extraClasspaths != "list")
-        then builtins.throw "extraClasspaths must be of type 'list'!"
-        else (lib.concatMap (dep:
-          builtins.map (path:
+let
+  repos = [
+    "https://repo1.maven.org/maven2/"
+    "https://repo.clojars.org/"
+  ];
+
+in
+rec {
+  makePaths = { extraClasspaths ? [ ] }:
+    if (builtins.typeOf extraClasspaths != "list")
+    then builtins.throw "extraClasspaths must be of type 'list'!"
+    else (lib.concatMap
+      (dep:
+        builtins.map
+          (path:
             if builtins.isString path then
               path
             else if builtins.hasAttr "jar" path then
@@ -19,1471 +24,1471 @@ let repos = [
               path.outPath
             else
               path
-            )
+          )
           dep.paths)
-        packages) ++ extraClasspaths;
-      makeClasspaths = {extraClasspaths ? []}:
-       if (builtins.typeOf extraClasspaths != "list")
-       then builtins.throw "extraClasspaths must be of type 'list'!"
-       else builtins.concatStringsSep ":" (makePaths {inherit extraClasspaths;});
-      packageSources = builtins.map (dep: dep.src) packages;
-      packages = [
-  rec {
-    name = "cambium.logback.json/cambium";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "cambium.logback.json";
-      groupId = "cambium";
-      sha512 = "8e3f32bc1e11071ddc8700204333ba653585de7985c03d14c351950a7896975092e9deffd658bfec7b0b8b9cc72dc025d8e5179a185bd25da26e500218ec37a5";
-      version = "0.4.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clojure/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clojure";
-      groupId = "org.clojure";
-      sha512 = "a242514f623a17601b360886563c4a4fe09335e4e16522ac42bbcacda073ae77651cfed446daae7fe74061bb7dff5adc454769c0edc0ded350136c3c707e75b9";
-      version = "1.11.0-alpha3";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "joda-time/joda-time";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "joda-time";
-      groupId = "joda-time";
-      sha512 = "012fb9aa9b00b456f72a92374855a7f062f8617c026c436eee2cda67dffa2f8622201909c0f4f454bb346ff5a3ed6f60c236fafb19fa66f612d9861f27b38d3a";
-      version = "2.10";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-codec/commons-codec";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-codec";
-      groupId = "commons-codec";
-      sha512 = "da30a716770795fce390e4dd340a8b728f220c6572383ffef55bd5839655d5611fcc06128b2144f6cdcb36f53072a12ec80b04afee787665e7ad0b6e888a6787";
-      version = "1.15";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "HikariCP/com.zaxxer";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "HikariCP";
-      groupId = "com.zaxxer";
-      sha512 = "a41b6d8b1c4656e633459824f10320965976eeead01bd5cb24911040073181730e61feb797aef89d9e01c922e89cb58654f364df0a6b1bf62ab3e6f9cc367d77";
-      version = "5.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-devel/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-devel";
-      groupId = "ring";
-      sha512 = "79a1ec9f9d03aa4fa0426353970b13468ee65ce314b51ab7a2682212a196a9b5c985eacdee5dbc6ff2f1b536a4e06d0e85e9dd7cc9a49958735c9c4e6d427fd5";
-      version = "1.9.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient";
-      groupId = "io.prometheus";
-      sha512 = "60af1cefff04e7036467eae54f5930d5677e4ab066f8ed38a391b54df17733acfefac45e19ee53cef289347bddce5fc69a2766f4e580d21a22cfd9e2348e2723";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-lang3/org.apache.commons";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-lang3";
-      groupId = "org.apache.commons";
-      sha512 = "fbdbc0943cb3498b0148e86a39b773f97c8e6013740f72dbc727faeabea402073e2cc8c4d68198e5fc6b08a13b7700236292e99d4785f2c9989f2e5fac11fd81";
-      version = "3.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tools.logging/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tools.logging";
-      groupId = "org.clojure";
-      sha512 = "b7a9680f1156fc7c1574a4364ca550d47668ba727fc80110fdd00c159bedb45c5be82f09cdfb8e8e988e3381e2cf8881ea70651e38001e3eaa4ece31ad0bf0c5";
-      version = "1.2.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "core.specs.alpha/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "core.specs.alpha";
-      groupId = "org.clojure";
-      sha512 = "f521f95b362a47bb35f7c85528c34537f905fb3dd24f2284201e445635a0df701b35d8419d53c6507cc78d3717c1f83cda35ea4c82abd8943cd2ab3de3fcad70";
-      version = "0.2.62";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-common/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-common";
-      groupId = "io.netty";
-      sha512 = "7efc2f6774a3dbe8408fe182e19830b5b7a994a0d1b0eb50699df691c2450befa05ac205bbf341ad57bef3a04281ce435031e97e725c5c4edfc705a418828ce8";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jackson-databind/com.fasterxml.jackson.core";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jackson-databind";
-      groupId = "com.fasterxml.jackson.core";
-      sha512 = "9f771e78af669b1e1683d6c5903bbf4790aaa88b6b420c2018437da318c3fa4220cd7fa726f3e42a1b8075def1fdbd3744937c15f3bcedfca3050199247363e8";
-      version = "2.12.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "expound/expound";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "expound";
-      groupId = "expound";
-      sha512 = "ca0a57cfd215cff6be36d1f83461ec2d0559c0eae172c8a8bd6e1676d49933d3c30a71192889bd75d813581707d5eda0ec05de03326396bc0cedebf2d71811e5";
-      version = "0.8.10";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "spec.alpha/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "spec.alpha";
-      groupId = "org.clojure";
-      sha512 = "ddfe4fa84622abd8ac56e2aa565a56e6bdc0bf330f377ff3e269ddc241bb9dbcac332c13502dfd4c09c2c08fe24d8d2e8cf3d04a1bc819ca5657b4e41feaa7c2";
-      version = "0.3.218";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tools.cli/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tools.cli";
-      groupId = "org.clojure";
-      sha512 = "1d88aa03eb6a664bf2c0ce22c45e7296d54d716e29b11904115be80ea1661623cf3e81fc222d164047058239010eb678af92ffedc7c3006475cceb59f3b21265";
-      version = "1.0.206";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "compojure/compojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "compojure";
-      groupId = "compojure";
-      sha512 = "1f4ba1354bd95772963a4ef0e129dde59d16f4f9fac0f89f2505a1d5de3b4527e45073219c0478e0b3285da46793e7c145ec5a55a9dae2fca6b77dc8d67b4db6";
-      version = "1.6.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-fileupload/commons-fileupload";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-fileupload";
-      groupId = "commons-fileupload";
-      sha512 = "a8780b7dd7ab68f9e1df38e77a5207c45ff50ec53d8b1476570d069edc8f59e52fb1d0fc534d7e513ac5a01b385ba73c320794c82369a72bd6d817a3b3b21f39";
-      version = "1.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jetty-http/org.eclipse.jetty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jetty-http";
-      groupId = "org.eclipse.jetty";
-      sha512 = "60422ff3ef311f1d9d7340c2accdf611d40e738a39e9128967175ede4990439f4725995988849957742d488f749dd2e0740f74dc5bd9b3364e32fbaa66689308";
-      version = "9.4.42.v20210604";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jetty-util/org.eclipse.jetty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jetty-util";
-      groupId = "org.eclipse.jetty";
-      sha512 = "d69084e2cfe0c3af1dc7ee2745d563549a4068b6e8aed5cd2b9f31167168fb64d418c4134a6dfb811b627ec0051d7ff71e0a02e4e775d18a53543d0871c44730";
-      version = "9.4.42.v20210604";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "janino/org.codehaus.janino";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "janino";
-      groupId = "org.codehaus.janino";
-      sha512 = "6853d7d53d3629df43a3a17ff5c989f59ec14e9030be5f67426deb9d0797fa3996b0609d582c65f22a4f7680c941b39ab6d466c480b2fea4bf92218a9b89651d";
-      version = "3.1.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jcl-over-slf4j/org.slf4j";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jcl-over-slf4j";
-      groupId = "org.slf4j";
-      sha512 = "23662fe407fcdbcba8865a8cd3f8bb09d4eb178a2a6511a32e35b995722b345e73f5dc1dd85d2d0a5c707db05aa57e0b3d0b96b59e55403fc486343d5ca4c0d6";
-      version = "2.0.0-alpha4";
-      
-    };
-    paths = [ src ];
-  }
-
-  (rec {
-    name = "io.github.cognitect-labs/test-runner";
-    src = fetchgit {
-      name = "test-runner";
-      url = "https://github.com/cognitect-labs/test-runner";
-      rev = "cc75980b43011773162b485f46f939dc5fba91e4";
-      sha256 = "1661ddmmqva1yiz9p09i5l32lfpi0a99h56022zgvz03nca2ksbg";
-    };
-    paths = map (path: src + path) [
-      "/src"
-    ];
-  })
-
-  rec {
-    name = "cambium.logback.core/cambium";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "cambium.logback.core";
-      groupId = "cambium";
-      sha512 = "83ee9a583dd8a7b2e82e0981b4e51b005095a27257eb1b07165d9701645609060c466ae67fb9431f524a544d52b71fa00009b8acf05aadbeb549043515f9b382";
-      version = "0.4.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpasyncclient/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpasyncclient";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "0a80db5dbf772f02d02ba6c7c163e8da9517dd7195714b495acb845c429580c1fc926d3e71c115e75be8c145651dce2fdfa0dc380132f7809c14b3ad95492aee";
-      version = "4.1.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "logback-jackson/ch.qos.logback.contrib";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "logback-jackson";
-      groupId = "ch.qos.logback.contrib";
-      sha512 = "d9a3d4cb6cf4eda6fc18e2d374007d27c6ddba98e989a8d8a01b49859b280450113f685df6e16c5fbe0472bc9e26308bc7e8b7e0aedab9404cf0b492d7511685";
-      version = "0.1.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient_tracer_otel/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient_tracer_otel";
-      groupId = "io.prometheus";
-      sha512 = "bce192e6162cb3ada7dd6c2d10456e78bce71c170faa09bad2896272fa1bd4a036288d707f3d47747991d8946c74fe21c565713fb15c7052305eb753c94dd939";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-codec/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-codec";
-      groupId = "io.netty";
-      sha512 = "f6d9c4a5b508ca0d5f0e213473088f5d7b2e184e447dc092e69227109e28da9b8e68b2238ca6ab4e9915bacacf59cc0dce6ebcbbb05dad34a03b7976d9670c51";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-oauth2/ring-oauth2";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-oauth2";
-      groupId = "ring-oauth2";
-      sha512 = "3ed765b4bbb5749fcdcdb501b93ab656a413ade5af24c7aa34639718ed1fd0a5f325b05bd135540d56e55cbb456a2cb7852ba0e45bc5233e28229986eef75bb9";
-      version = "0.2.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tools.macro/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tools.macro";
-      groupId = "org.clojure";
-      sha512 = "65ce5e29379620ac458274c53cd9926e4b764fcaebb1a2b3bc8aef86bbe10c79e654b028bc4328905d2495a680fa90f5002cf5c47885f6449fad43a04a594b26";
-      version = "0.1.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jackson-dataformat-cbor/com.fasterxml.jackson.dataformat";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jackson-dataformat-cbor";
-      groupId = "com.fasterxml.jackson.dataformat";
-      sha512 = "ea5d049eac1b94666479c5e36de14d8fa4b7f24cb92f0f310d2ec2b4de66ef9023161060e67228ef2d7420a002ef861db12a29cad0864638c21612da49686f4f";
-      version = "2.12.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "depstar/seancorfield";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "depstar";
-      groupId = "seancorfield";
-      sha512 = "0f4458b39b8b1949755bc2fe64b239673a9efa3a0140998464bbbcab216ec847344c1b8920611f7c9ca07261850f3a08144ae221cc2c41813a080189e32f9c10";
-      version = "1.0.94";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "logback-core/ch.qos.logback";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "logback-core";
-      groupId = "ch.qos.logback";
-      sha512 = "fc554548f499e284007eeecf76bf4e1995effb6ac8a6262aa96118f623bf9085a9d5bec3741833dd3cae6a76b2ff78c6d0a1fe68bc01213207c93d8e2da345ca";
-      version = "1.3.0-alpha12";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "honeysql/honeysql";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "honeysql";
-      groupId = "honeysql";
-      sha512 = "74d1d93c968b33686848e3bf8934f3b5f002c2b69b1b55a3a3b172c952e9991324e6e95e3a0ce2fecf1de0d3a036f4dff7286df689f0733f253909464e0269f6";
-      version = "1.0.461";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-buffer/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-buffer";
-      groupId = "io.netty";
-      sha512 = "181b55d99d8d46bbf5f67f05bdccb0381af23a9fca3e6d935e6cde727b132c67133de1c3d81ed19b04c1a5b232be0de16ec1de7e81b532878bc69564237c15dc";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "slingshot/slingshot";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "slingshot";
-      groupId = "slingshot";
-      sha512 = "ff2b2a27b441d230261c7f3ec8c38aa551865e05ab6438a74bd12bfcbc5f6bdc88199d42aaf5932b47df84f3d2700c8f514b9f4e9b5da28d29da7ff6b09a7fb5";
-      version = "0.12.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpcore-nio/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpcore-nio";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "002af5f72b68a4ff1b1ff46b788013283d195e1d62ee1d7b102aa930b30f77f7e215a6d18edbea0fccd18fb1fa3a66cc4aef6070d72d6d1886f0044dfe0e16c7";
-      version = "4.4.10";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-jetty-adapter/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-jetty-adapter";
-      groupId = "ring";
-      sha512 = "93075903ad73a8b73cb77ee9f53ed33594f40a5dafe8129089adb4cfa333e37468764203c00244568f02abf0c0eee9f5d9a9f96c420919027cf2746a41ec38e3";
-      version = "1.9.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient_tracer_common/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient_tracer_common";
-      groupId = "io.prometheus";
-      sha512 = "6f717af63340efd84c5467ae752be7e66f586f0e8b57adb5b7a8ef99b223203ed829aad6797f6ef1811d6d861b00a621a1288c9271ec2ba77018d6d9eb9e7987";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "component/com.stuartsierra";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "component";
-      groupId = "com.stuartsierra";
-      sha512 = "108b02f51165ad07c2cf5232fbd954d052880c2456e6fb6db3342bda6851c76b73bf9145f03fb0df2b5782fe39f368b2868780c1e8e2dfa2ab2c68dd97f34ab7";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-handler/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-handler";
-      groupId = "io.netty";
-      sha512 = "48874727553dd7084f5c48d90de123704ae334837c3a103f598887bb21405dd62c57603b59300ac2fcdd936f0af99ed0730487fb9fb8917d236b8fe3f78f3c02";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "yuicompressor/com.yahoo.platform.yui";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "yuicompressor";
-      groupId = "com.yahoo.platform.yui";
-      sha512 = "ba2588bd50eaa3005b1919daad9f9c86a33351ceb9b7b5f0a9a498a548cc523e99f9345917a64303f8e23925feea226386d3eac01f640f788d1be4c7cf0315e0";
-      version = "2.4.8";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-io/commons-io";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-io";
-      groupId = "commons-io";
-      sha512 = "6af22dffaaecd1553147e788b5cf50368582318f396e456fe9ff33f5175836713a5d700e51720465c932c2b1987daa83027358005812d6a95d5755432de3a79d";
-      version = "2.10.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tools.namespace/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tools.namespace";
-      groupId = "org.clojure";
-      sha512 = "2cdb9c5d9bc4fd01dae182e9ad4b91eeaa2487003a977e7d8d5e66f562a9544b59f558710eccf421ea63cbbfa953ac8944fe9b9a76049fb82a47eb2bdcb3a4d7";
-      version = "1.1.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "honeysql/com.github.seancorfield";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "honeysql";
-      groupId = "com.github.seancorfield";
-      sha512 = "a0e5ebbf922aaf170c2d74ec0efc0df7e3bda92d0b8cc5f40ee4c8ddcb8c7e0e46556fac381513e0ac76b10f681c14c2d2569010c2f8eab4ff04f6373c2bf229";
-      version = "2.2.840";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jackson-core/com.fasterxml.jackson.core";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jackson-core";
-      groupId = "com.fasterxml.jackson.core";
-      sha512 = "428e0ebb16dd4c74ab0adf712058fd0dc0cd788f6e6f90c60c627da6577b345fac60a30694e111f1cd4e3e8bf79a1f1b820d30ada114984b26c28e299e326eaa";
-      version = "2.12.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clj-time/clj-time";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clj-time";
-      groupId = "clj-time";
-      sha512 = "cfeb46af59fd4112aa5a5d0087a39355f0fc19514b4c02bc6c3d9f81c9bda40491686207836e9a7943aebeb82a3b36f4e8b7407a8908c5ef151122644b278d75";
-      version = "0.15.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clj-http/clj-http";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clj-http";
-      groupId = "clj-http";
-      sha512 = "9884557d4f38068cb3234aec80acc0de8f9716645529693ffd9bd6db8221f5d1cf9e2d1b8bf7c7df4215d71372b02d83043ebf8fc27dc422552b32c9bdba1602";
-      version = "3.12.3";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jul-to-slf4j/org.slf4j";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jul-to-slf4j";
-      groupId = "org.slf4j";
-      sha512 = "350cfb889248d724b27dce697f635f12d9db463f107830b9518ce184dc4cc1ab3933eb5bdab08515e69766c3d5be24547dac289d6406c44eca90717230714b91";
-      version = "2.0.0-alpha4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "migratus/migratus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "migratus";
-      groupId = "migratus";
-      sha512 = "ee5ce8601930d063e0d9d90fc8e165b78fc1587bfd7e0fc9922735bc2f9fc27f8cf8bf10d49d6fd57b899ac4b250145bd653915ed770424416e026ba37d1b604";
-      version = "1.3.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpcore/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpcore";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "f16a652f4a7b87dbf7cb16f8590d54a3f719c4c7b2f8883ce59db2d73be4701b64f2ca8a2c45aca6a5dbeaddeedff0c280a03722f70c076e239b645faa54eff9";
-      version = "4.4.14";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpclient-cache/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpclient-cache";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "e150e8dc49c8c9972d8b324b56bb292b15e2f0e686f1292c4edac975615dfb16e5edb8ab325e614732a7d43a03061ca4fe93fe1e1f7487851a4d4d3af50a61f9";
-      version = "4.5.13";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "instaparse/instaparse";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "instaparse";
-      groupId = "instaparse";
-      sha512 = "ec2fcf4a09319a8fa9489b08fd9c9a5fe6e63155dde74d096f947fabc4f68d3d1bf68faf21e175e80eaee785f563a1903d30c550f93fb13a16a240609e3dfa2e";
-      version = "1.4.8";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "honeysql-postgres/nilenso";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "honeysql-postgres";
-      groupId = "nilenso";
-      sha512 = "d4accd3b8819cf715ecdb29496cf5a6a5ad3871fd579e55c7148d4e05774cb896c681b0c6f84df88aa9cd8e6ef9bfd65788ede9a49ba365ad0e32ee350091879";
-      version = "0.4.112";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clj-tuple/clj-tuple";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clj-tuple";
-      groupId = "clj-tuple";
-      sha512 = "dd626944d0aba679a21b164ed0c77ea84449359361496cba810f83b9fdeab751e5889963888098ce4bf8afa112dbda0a46ed60348a9c01ad36a2e255deb7ab6d";
-      version = "0.2.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jackson-annotations/com.fasterxml.jackson.core";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jackson-annotations";
-      groupId = "com.fasterxml.jackson.core";
-      sha512 = "6fdad6c5bb71a97331a662fe26265aacab6869f3307a710697d5c2f256fd48935764bfb0b3505a2cbb1605daf0b7350abdf84a1b1cf2bb1e91d9184565243c8e";
-      version = "2.12.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "hiccup/hiccup";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "hiccup";
-      groupId = "hiccup";
-      sha512 = "034f15be46c35029f41869c912f82cb2929fbbb0524ea64bd98dcdb9cf09875b28c75e926fa5fff53942b0f9e543e85a73a2d03c3f2112eecae30fcef8b148f4";
-      version = "1.0.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "riddley/riddley";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "riddley";
-      groupId = "riddley";
-      sha512 = "b478ecba9d1ab9d38c84a42354586fcece763000907b40c97bc43c0f16dc560b0860144efe410193cb3b7cb0149fbc1724fdd737cc3ba53de23618f5b30e6f9f";
-      version = "0.1.12";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "java.classpath/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "java.classpath";
-      groupId = "org.clojure";
-      sha512 = "90cd8edeaea02bd908d8cfb0cf5b1cf901aeb38ea3f4971c4b813d33210438aae6fff8e724a8272d2ea9441d373e7d936fa5870e309c1e9721299f662dbbdb9a";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient_pushgateway/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient_pushgateway";
-      groupId = "io.prometheus";
-      sha512 = "31c8878929f516ba7030cc9ec4ac4cbcb09955a9fdae23c6904bc481e40e70e1b3e05619c49b646119077ef6f57c430cc7944f6bafdbca24c9efa8145474fcf7";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ns-tracker/ns-tracker";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ns-tracker";
-      groupId = "ns-tracker";
-      sha512 = "cfb6c2c9f899b43d1284acdc572b34b977936c4df734b38137dfea045421b74d529509cde23695f1dc5ee06d046c2f6b61a2cd98058da1c7220c21dd0361964f";
-      version = "0.4.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clout/clout";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clout";
-      groupId = "clout";
-      sha512 = "99d6e1a8c5726ca4e5d12b280a39e6d1182d734922600f27d588d3d65fbc830c5e03f9e0421ff25c819deee4d1f389fd3906222716ace1eb17ce70ef9c5e8f4b";
-      version = "2.2.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-logging/commons-logging";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-logging";
-      groupId = "commons-logging";
-      sha512 = "ed00dbfabd9ae00efa26dd400983601d076fe36408b7d6520084b447e5d1fa527ce65bd6afdcb58506c3a808323d28e88f26cb99c6f5db9ff64f6525ecdfa557";
-      version = "1.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clojure.java-time/clojure.java-time";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clojure.java-time";
-      groupId = "clojure.java-time";
-      sha512 = "62d8a286ec3393594e7f84eba22dbb02c1305a80a18b2574058ae963d3f3e829ff960c8b66e89069e6c071a11f869203134c6c4cdec6f8e516c9b314796c8108";
-      version = "0.3.3";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "data.csv/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "data.csv";
-      groupId = "org.clojure";
-      sha512 = "b039775a859ed27eca8f8ae74ccb6afde3ad1fe2b3cbe542240c324d60fe1237e495eb1300ee9eb4ff4ef59f01faf7aec6ef1dd6a025ee4fe556c1d91acfcf1b";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient_tracer_otel_agent/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient_tracer_otel_agent";
-      groupId = "io.prometheus";
-      sha512 = "97694210d9a5b48a7cb9dda2a187432c4813edb3051edfa5832a0a471e0b2d5988dab92b70c292e78f59b169345deb5c1c706361fd726f3dc2480766dedfdcec";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "next.jdbc/com.github.seancorfield";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "next.jdbc";
-      groupId = "com.github.seancorfield";
-      sha512 = "0b4b01ba126bb8b1e2c14262db9fca75456b274d09535d9a7bb386699bf20dc9ac11590d210769e7429ca59ebfdfbb06916b3ff275cc817d74eac5bbabdab8f2";
-      version = "1.2.761";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "java.jdbc/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "java.jdbc";
-      groupId = "org.clojure";
-      sha512 = "6162b7774dca58b62a94bc5a04ba845e4c7065c9c589cc3bb802becfec0baf0989a338c1bf9a5db7c3128873702840d5f2451628f3aac977245975d65a683b7d";
-      version = "0.7.11";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-transport/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-transport";
-      groupId = "io.netty";
-      sha512 = "c11d690ffeaf3267b2166f73a43108fb89d588fcef3f6d3053bf4b6f6669483baa618fd97438010692a6fa28334372d5a31b7c0996961d4eabb60cbdc358a536";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "crypto-random/crypto-random";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "crypto-random";
-      groupId = "crypto-random";
-      sha512 = "3520df744f250dbe061d1a5d7a05b7143f3a67a4c3f9ad87b8044ee68a36a702a0bcb3a203e35d380899dd01c28e01988b0a7af914b942ccbe0c35c9bdb22e11";
-      version = "1.2.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-transport-native-unix-common/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-transport-native-unix-common";
-      groupId = "io.netty";
-      sha512 = "b63e5f8a44b7f37f3dba378bd06af64dd1d7be3f0b1a7d47ad139ff06e0212b4c7081275b1b5b12183aeb72eb5f9bf9ef03ed8c78bc302aeb4817dca7bd89f3a";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-codec/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-codec";
-      groupId = "ring";
-      sha512 = "38b9775a794831b8afd8d66991a75aa5910cd50952c9035866bf9cc01353810aedafbc3f35d8f9e56981ebf9e5c37c00b968759ed087d2855348b3f46d8d0487";
-      version = "1.1.3";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "spy/com.impossibl.pgjdbc-ng";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "spy";
-      groupId = "com.impossibl.pgjdbc-ng";
-      sha512 = "173615c39aa6015a732e329217b40e3ea1c304c9c168d2764d6ef23ab8775e2f4432339bc22d049662561f09d3fd890b5415738620d64dcedb762d5da26b4ebb";
-      version = "0.8.9";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "logback-json-core/ch.qos.logback.contrib";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "logback-json-core";
-      groupId = "ch.qos.logback.contrib";
-      sha512 = "2a826036f21997e2979fda83ae3e33cf62f3b2b2df15a7b11d1fd8a52163b09f0f2f8d72f5fdcea0ec1289b3d27727ed5e6b0bcdf4c5d741f4bac07b7b6139e8";
-      version = "0.1.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpclient/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpclient";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "3567739186e551f84cad3e4b6b270c5b8b19aba297675a96bcdff3663ff7d20d188611d21f675fe5ff1bfd7d8ca31362070910d7b92ab1b699872a120aa6f089";
-      version = "4.5.13";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "crypto-equality/crypto-equality";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "crypto-equality";
-      groupId = "crypto-equality";
-      sha512 = "54cf3bd28f633665962bf6b41f5ccbf2634d0db210a739e10a7b12f635e13c7ef532efe1d5d8c0120bb46478bbd08000b179f4c2dd52123242dab79fea97d6a6";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "cheshire/cheshire";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "cheshire";
-      groupId = "cheshire";
-      sha512 = "855e9c42a8d1c64f4db5cda45e31e914eb5ed99a715e8d7a5759a9c4ab6c69a82353635ca7b0837880c6cf9b41b11184ae11e09cbf2c07aa13db32c539e5dfd4";
-      version = "5.10.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tigris/tigris";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tigris";
-      groupId = "tigris";
-      sha512 = "fdff4ef5e7175a973aaef98de4f37dee8e125fc711c495382e280aaf3e11341fe8925d52567ca60f3f1795511ade11bc23461c88959632dfae3cf50374d02bf6";
-      version = "0.1.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "config/yogthos";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "config";
-      groupId = "yogthos";
-      sha512 = "3437992d192465edc74aec5259d5e0c0ad7e631dff860b2ee14cef27f13cee7c60487202cf00fc160a95fb0b85ce1ddf56cbdd0c008b47ac598061bf115f6a23";
-      version = "1.1.9";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jetty-io/org.eclipse.jetty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jetty-io";
-      groupId = "org.eclipse.jetty";
-      sha512 = "a8c5f73089daa0c8b27f836acddf40bcbf07bbb2571a4d73653be8aac3fb339022f546326722f216bad78a68886934d24db9bec54235124592dd29dbeab69051";
-      version = "9.4.42.v20210604";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "logback-json-classic/ch.qos.logback.contrib";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "logback-json-classic";
-      groupId = "ch.qos.logback.contrib";
-      sha512 = "d30bf70217d316914d83d46cc15783f656354084087d59cbc0620a746f10b4a42e56d33b3e50a8b3596a64ec8314730bf5ff9a3f7dc3417bdd0582665be009ec";
-      version = "0.1.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "tools.reader/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "tools.reader";
-      groupId = "org.clojure";
-      sha512 = "3481259c7a1eac719db2921e60173686726a0c2b65879d51a64d516a37f6120db8ffbb74b8bd273404285d7b25143ab5c7ced37e7c0eaf4ab1e44586ccd3c651";
-      version = "1.3.6";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "simpleclient_common/io.prometheus";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "simpleclient_common";
-      groupId = "io.prometheus";
-      sha512 = "dedd003638eb3651c112e2d697ac94eb4e3b3e32c94fa41bb1efe2c889a347cdc7bd13256e05423f3370592d4fd65faf8db57f0387ab75814d7fa77b14cbbadf";
-      version = "0.12.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "commons-compiler/org.codehaus.janino";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "commons-compiler";
-      groupId = "org.codehaus.janino";
-      sha512 = "f0778b891ef14d8ee6776747eab0b25da716cdc530752a81aedec2a77570e2f66402179b9408a6efde8125c808eb060a720d2f4977c1f1d022bdaae7eac8d011";
-      version = "3.1.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "servlet-api/javax.servlet";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "servlet-api";
-      groupId = "javax.servlet";
-      sha512 = "363ba5590436ab82067b7a2e14b481aeb2b12ca4048d7a1519a2e549b2d3c09ddf718ac64dc2be6c2fc24c51fdc9c8160261329403113369588ce27d87771db6";
-      version = "2.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "iapetos/clj-commons";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "iapetos";
-      groupId = "clj-commons";
-      sha512 = "d17f36c0cf0ec78db5e893e5c033f8562b31650bda6f5ee582e68f84a07a3631d04d6f69e4e18b1ca64e732c180fa669dfb69a78849e13f601cd563a7a8aab94";
-      version = "0.1.12";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "javax.servlet-api/javax.servlet";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "javax.servlet-api";
-      groupId = "javax.servlet";
-      sha512 = "32f7e3565c6cdf3d9a562f8fd597fe5059af0cf6b05b772a144a74bbc95927ac275eb38374538ec1c72adcce4c8e1e2c9f774a7b545db56b8085af0065e4a1e5";
-      version = "3.1.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "potemkin/potemkin";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "potemkin";
-      groupId = "potemkin";
-      sha512 = "5abc050bf7ff0b27d8c45aaa5e378201980815b711b2db99735db73304576c17e285026ea48a714bf0b0df7ad7a008de38b7d182cdc0e8989f4be1e6b3afa8aa";
-      version = "0.4.5";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-resolver/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-resolver";
-      groupId = "io.netty";
-      sha512 = "fabf893de74264caa1799c15d184ed8f20b7bf9b1c41abb29f29adf728a934951f97892a4924634f9efbda17c8cf74ea3ff97bafca616711e3c5f79b8ed9ef3e";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-transport-native-epoll/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-transport-native-epoll";
-      groupId = "io.netty";
-      sha512 = "6fbc2dd2622699f3fc1f329acbd94baf7f1d8923c5cfcae262e6f2d64b4fd71b606561bce5e2b511dff8e052cdade930091fab683fd98713f6b62a622a2c6254";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "clj-stacktrace/clj-stacktrace";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "clj-stacktrace";
-      groupId = "clj-stacktrace";
-      sha512 = "993f8a544203801fc074eefacee8e553e426422b3492d47b857d87ac73cde72c91e29f629382b9eae8cf9600bc2c4c29d2e7169e509c46302ab973c86e73af0c";
-      version = "0.2.8";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "cambium.codec-cheshire/cambium";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "cambium.codec-cheshire";
-      groupId = "cambium";
-      sha512 = "614491cf752a597f29ae29885db6c1ed191341303d89183bee52e4e2c76eb8eb14693562ad09484f379a074b36d97085e848ec3845e069440e6422506c1636f1";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "slf4j-api/org.slf4j";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "slf4j-api";
-      groupId = "org.slf4j";
-      sha512 = "ad705ab6fd5cd904ef6861c0adf08af19593cf6a486b18de548fe3d68e57b1baa7e02947584fd4dcc350ddcddcf906c01e8d9ba7943a202690d0d788627696b5";
-      version = "2.0.0-alpha4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "test.check/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "test.check";
-      groupId = "org.clojure";
-      sha512 = "b8d7a330b0b5514cd6a00c4382052fab51c3c9d3bc53133f8506791fa670e7c5ecd65094977ea5ced91f59623b0abd1ab8feeec96d63c5c6e459b265a655c577";
-      version = "1.1.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-logger/ring-logger";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-logger";
-      groupId = "ring-logger";
-      sha512 = "b675a61c173289fc610d84920ba40178bf62b3bc680923cb66866d78ee2a508296b27a1ab14b66bfbe0304a64166a7e3c3ddee36564dd4a2f988861bce455a3a";
-      version = "1.0.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-servlet/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-servlet";
-      groupId = "ring";
-      sha512 = "3d8e6ec224e13d54810a945c0b6c0d2d863736a48d8c4bfc8fadb96b6b0fa9baa638644d0d92d8a53650b188e6e75d391731b08b26eb0f551e90a7504e7f4267";
-      version = "1.9.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "logback-classic/ch.qos.logback";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "logback-classic";
-      groupId = "ch.qos.logback";
-      sha512 = "f9fe0f126061f4abe3973b631b8d8244ba9e9d77783479a6500d629d772050dee508a001fc14d2131407fbdd0d33dd6b8aeb9b1ea9125b471bb8412e8de659e6";
-      version = "1.3.0-alpha12";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "dependency/com.stuartsierra";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "dependency";
-      groupId = "com.stuartsierra";
-      sha512 = "d32fbc4813bd16f2ed8c82e2915e1fb564e88422159bd3580a85c8cd969d1bbbe315bdc13d29c2f0eaceeeafcf649ee712c8df4532464d560aaeae4ae5953866";
-      version = "1.0.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "camel-snake-kebab/camel-snake-kebab";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "camel-snake-kebab";
-      groupId = "camel-snake-kebab";
-      sha512 = "589d34b500560b7113760a16bfb6f0ccd8f162a1ce8c9bc829495432159ba9c95aebf6bc43aa126237a0525806a205a05f9910122074902b659e7fd151d176b1";
-      version = "0.4.2";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring";
-      groupId = "ring";
-      sha512 = "93c48fb670736b91fb41d8076e1e9c4f53c67693d15e75290da319e7d7881b829a24180029b3a0fa051473c6c77ac3c97b519254ebf2b2c9538b185e79b69162";
-      version = "1.9.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "netty-transport-native-kqueue/io.netty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "netty-transport-native-kqueue";
-      groupId = "io.netty";
-      sha512 = "87e10c06e394a1698d65381d3be8336f753c55e3e899e297510161d0c72540023f30f9032322957e035ead793204a084b988bc21a2bc312fcf7567a22d02a3c4";
-      version = "4.1.63.Final";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "java.data/org.clojure";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "java.data";
-      groupId = "org.clojure";
-      sha512 = "225e1eafd1a659278212d831f7cd8609359f8c880ef3d69b4ade6301ce3c511307ce31d94cb82d5407314b990bd04714ec26273bb3036b248116a7a75fa75e1f";
-      version = "1.0.95";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jetty-server/org.eclipse.jetty";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jetty-server";
-      groupId = "org.eclipse.jetty";
-      sha512 = "b347f8a6e5b84e0f460037027e238a61edec710ade768c95e7be13dcea498abe43d5e622ee69ac7494138d1a8fcf92e07b7deab569c554831c57baad71c53b9b";
-      version = "9.4.42.v20210604";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "httpmime/org.apache.httpcomponents";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "httpmime";
-      groupId = "org.apache.httpcomponents";
-      sha512 = "e1b0ee84bce78576074dc1b6836a69d8f5518eade38562e6890e3ddaa72b7f54bf735c8e2286142c58cddf45f745da31261e5d73b7d8092eb6ecfb20946eb36c";
-      version = "4.5.13";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "log4j-over-slf4j/org.slf4j";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "log4j-over-slf4j";
-      groupId = "org.slf4j";
-      sha512 = "48fa023c57294b73b9bd2f53e3dd3169e03426e5b3aa9d80e1bb1a9abf927fc26ef9f64d02b9769d5577d83094d0f41f044d35bb3b4f6037d66d6b2f19b484a1";
-      version = "2.0.0-alpha4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "ring-core/ring";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "ring-core";
-      groupId = "ring";
-      sha512 = "38d7214a3fc1b80ab55999036638dd1971272e01bec4cb8e0ee0a4aa83f51b8c41ba8a5850b0660227f067d2f9c6d75c0c0737725ea02762bbf8d192dc72febe";
-      version = "1.9.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "cambium.core/cambium";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "cambium.core";
-      groupId = "cambium";
-      sha512 = "0e1fe626c6d0b31aad84ea2e4466273065925548ee5915f442b7997ebfe795faea36dbeac50a0f8c16bbd20d877511e3f8c4ff4f2b916a4538513aaa5cc20112";
-      version = "1.1.1";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "medley/medley";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "medley";
-      groupId = "medley";
-      sha512 = "749ef43b5ea2cae7dc96db871cdd15c7b3c9cfbd96828c20ab08e67d39a5e938357d15994d8d413bc68678285d6c666f2a7296fbf305706d03b3007254e3c55c";
-      version = "1.3.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "garden/garden";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "garden";
-      groupId = "garden";
-      sha512 = "2cc29f071b68bf451835f76de351ac2efb930b5df9ca7237fdca439d3c4d797d7fa207a147886efe1738ab1c50b76c1e366bf9ffcd6f286b0b211260aedd0b25";
-      version = "1.3.10";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jackson-dataformat-smile/com.fasterxml.jackson.dataformat";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jackson-dataformat-smile";
-      groupId = "com.fasterxml.jackson.dataformat";
-      sha512 = "69676964a2b09516b8ffd0d847b6f9a9b843424185453731b548c25e7e9ce30e808c56d66923f9183e2b5c1ba007421b146a6806e768b8e6b07470d60227f1dd";
-      version = "2.12.4";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "jaxb-api/javax.xml.bind";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "jaxb-api";
-      groupId = "javax.xml.bind";
-      sha512 = "0c5bfc2c9f655bf5e6d596e0c196dcb9344d6dc78bf774207c8f8b6be59f69addf2b3121e81491983eff648dfbd55002b9878132de190825dad3ef3a1265b367";
-      version = "2.3.0";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "pgjdbc-ng/com.impossibl.pgjdbc-ng";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "pgjdbc-ng";
-      groupId = "com.impossibl.pgjdbc-ng";
-      sha512 = "a34ac9146257329f6e9b354f13f564c65dbea6463addae383e3918d3a64c90c67f5f7fda6b5c3866de991a568d6690edb3fb09f2507593390a6e30ec0c79e02c";
-      version = "0.8.9";
-      
-    };
-    paths = [ src ];
-  }
-
-  rec {
-    name = "http-kit/http-kit";
-    src = fetchMavenArtifact {
-      inherit repos;
-      artifactId = "http-kit";
-      groupId = "http-kit";
-      sha512 = "4186a2429984745e18730aa8fd545f1fc1812083819ebf77aecfc04e0d31585358a5e25a308c7f21d81359418bbc72390c281f5ed91ae116cf1af79860ba22c3";
-      version = "2.5.3";
-      
-    };
-    paths = [ src ];
-  }
+      packages) ++ extraClasspaths;
+  makeClasspaths = { extraClasspaths ? [ ] }:
+    if (builtins.typeOf extraClasspaths != "list")
+    then builtins.throw "extraClasspaths must be of type 'list'!"
+    else builtins.concatStringsSep ":" (makePaths { inherit extraClasspaths; });
+  packageSources = builtins.map (dep: dep.src) packages;
+  packages = [
+    rec {
+      name = "cambium.logback.json/cambium";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "cambium.logback.json";
+        groupId = "cambium";
+        sha512 = "8e3f32bc1e11071ddc8700204333ba653585de7985c03d14c351950a7896975092e9deffd658bfec7b0b8b9cc72dc025d8e5179a185bd25da26e500218ec37a5";
+        version = "0.4.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clojure/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clojure";
+        groupId = "org.clojure";
+        sha512 = "a242514f623a17601b360886563c4a4fe09335e4e16522ac42bbcacda073ae77651cfed446daae7fe74061bb7dff5adc454769c0edc0ded350136c3c707e75b9";
+        version = "1.11.0-alpha3";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "joda-time/joda-time";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "joda-time";
+        groupId = "joda-time";
+        sha512 = "012fb9aa9b00b456f72a92374855a7f062f8617c026c436eee2cda67dffa2f8622201909c0f4f454bb346ff5a3ed6f60c236fafb19fa66f612d9861f27b38d3a";
+        version = "2.10";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-codec/commons-codec";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-codec";
+        groupId = "commons-codec";
+        sha512 = "da30a716770795fce390e4dd340a8b728f220c6572383ffef55bd5839655d5611fcc06128b2144f6cdcb36f53072a12ec80b04afee787665e7ad0b6e888a6787";
+        version = "1.15";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "HikariCP/com.zaxxer";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "HikariCP";
+        groupId = "com.zaxxer";
+        sha512 = "a41b6d8b1c4656e633459824f10320965976eeead01bd5cb24911040073181730e61feb797aef89d9e01c922e89cb58654f364df0a6b1bf62ab3e6f9cc367d77";
+        version = "5.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-devel/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-devel";
+        groupId = "ring";
+        sha512 = "79a1ec9f9d03aa4fa0426353970b13468ee65ce314b51ab7a2682212a196a9b5c985eacdee5dbc6ff2f1b536a4e06d0e85e9dd7cc9a49958735c9c4e6d427fd5";
+        version = "1.9.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient";
+        groupId = "io.prometheus";
+        sha512 = "60af1cefff04e7036467eae54f5930d5677e4ab066f8ed38a391b54df17733acfefac45e19ee53cef289347bddce5fc69a2766f4e580d21a22cfd9e2348e2723";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-lang3/org.apache.commons";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-lang3";
+        groupId = "org.apache.commons";
+        sha512 = "fbdbc0943cb3498b0148e86a39b773f97c8e6013740f72dbc727faeabea402073e2cc8c4d68198e5fc6b08a13b7700236292e99d4785f2c9989f2e5fac11fd81";
+        version = "3.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tools.logging/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tools.logging";
+        groupId = "org.clojure";
+        sha512 = "b7a9680f1156fc7c1574a4364ca550d47668ba727fc80110fdd00c159bedb45c5be82f09cdfb8e8e988e3381e2cf8881ea70651e38001e3eaa4ece31ad0bf0c5";
+        version = "1.2.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "core.specs.alpha/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "core.specs.alpha";
+        groupId = "org.clojure";
+        sha512 = "f521f95b362a47bb35f7c85528c34537f905fb3dd24f2284201e445635a0df701b35d8419d53c6507cc78d3717c1f83cda35ea4c82abd8943cd2ab3de3fcad70";
+        version = "0.2.62";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-common/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-common";
+        groupId = "io.netty";
+        sha512 = "7efc2f6774a3dbe8408fe182e19830b5b7a994a0d1b0eb50699df691c2450befa05ac205bbf341ad57bef3a04281ce435031e97e725c5c4edfc705a418828ce8";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jackson-databind/com.fasterxml.jackson.core";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jackson-databind";
+        groupId = "com.fasterxml.jackson.core";
+        sha512 = "9f771e78af669b1e1683d6c5903bbf4790aaa88b6b420c2018437da318c3fa4220cd7fa726f3e42a1b8075def1fdbd3744937c15f3bcedfca3050199247363e8";
+        version = "2.12.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "expound/expound";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "expound";
+        groupId = "expound";
+        sha512 = "ca0a57cfd215cff6be36d1f83461ec2d0559c0eae172c8a8bd6e1676d49933d3c30a71192889bd75d813581707d5eda0ec05de03326396bc0cedebf2d71811e5";
+        version = "0.8.10";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "spec.alpha/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "spec.alpha";
+        groupId = "org.clojure";
+        sha512 = "ddfe4fa84622abd8ac56e2aa565a56e6bdc0bf330f377ff3e269ddc241bb9dbcac332c13502dfd4c09c2c08fe24d8d2e8cf3d04a1bc819ca5657b4e41feaa7c2";
+        version = "0.3.218";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tools.cli/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tools.cli";
+        groupId = "org.clojure";
+        sha512 = "1d88aa03eb6a664bf2c0ce22c45e7296d54d716e29b11904115be80ea1661623cf3e81fc222d164047058239010eb678af92ffedc7c3006475cceb59f3b21265";
+        version = "1.0.206";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "compojure/compojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "compojure";
+        groupId = "compojure";
+        sha512 = "1f4ba1354bd95772963a4ef0e129dde59d16f4f9fac0f89f2505a1d5de3b4527e45073219c0478e0b3285da46793e7c145ec5a55a9dae2fca6b77dc8d67b4db6";
+        version = "1.6.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-fileupload/commons-fileupload";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-fileupload";
+        groupId = "commons-fileupload";
+        sha512 = "a8780b7dd7ab68f9e1df38e77a5207c45ff50ec53d8b1476570d069edc8f59e52fb1d0fc534d7e513ac5a01b385ba73c320794c82369a72bd6d817a3b3b21f39";
+        version = "1.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jetty-http/org.eclipse.jetty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jetty-http";
+        groupId = "org.eclipse.jetty";
+        sha512 = "60422ff3ef311f1d9d7340c2accdf611d40e738a39e9128967175ede4990439f4725995988849957742d488f749dd2e0740f74dc5bd9b3364e32fbaa66689308";
+        version = "9.4.42.v20210604";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jetty-util/org.eclipse.jetty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jetty-util";
+        groupId = "org.eclipse.jetty";
+        sha512 = "d69084e2cfe0c3af1dc7ee2745d563549a4068b6e8aed5cd2b9f31167168fb64d418c4134a6dfb811b627ec0051d7ff71e0a02e4e775d18a53543d0871c44730";
+        version = "9.4.42.v20210604";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "janino/org.codehaus.janino";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "janino";
+        groupId = "org.codehaus.janino";
+        sha512 = "6853d7d53d3629df43a3a17ff5c989f59ec14e9030be5f67426deb9d0797fa3996b0609d582c65f22a4f7680c941b39ab6d466c480b2fea4bf92218a9b89651d";
+        version = "3.1.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jcl-over-slf4j/org.slf4j";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jcl-over-slf4j";
+        groupId = "org.slf4j";
+        sha512 = "23662fe407fcdbcba8865a8cd3f8bb09d4eb178a2a6511a32e35b995722b345e73f5dc1dd85d2d0a5c707db05aa57e0b3d0b96b59e55403fc486343d5ca4c0d6";
+        version = "2.0.0-alpha4";
+
+      };
+      paths = [ src ];
+    }
+
+    (rec {
+      name = "io.github.cognitect-labs/test-runner";
+      src = fetchgit {
+        name = "test-runner";
+        url = "https://github.com/cognitect-labs/test-runner";
+        rev = "cc75980b43011773162b485f46f939dc5fba91e4";
+        sha256 = "1661ddmmqva1yiz9p09i5l32lfpi0a99h56022zgvz03nca2ksbg";
+      };
+      paths = map (path: src + path) [
+        "/src"
+      ];
+    })
+
+    rec {
+      name = "cambium.logback.core/cambium";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "cambium.logback.core";
+        groupId = "cambium";
+        sha512 = "83ee9a583dd8a7b2e82e0981b4e51b005095a27257eb1b07165d9701645609060c466ae67fb9431f524a544d52b71fa00009b8acf05aadbeb549043515f9b382";
+        version = "0.4.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpasyncclient/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpasyncclient";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "0a80db5dbf772f02d02ba6c7c163e8da9517dd7195714b495acb845c429580c1fc926d3e71c115e75be8c145651dce2fdfa0dc380132f7809c14b3ad95492aee";
+        version = "4.1.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "logback-jackson/ch.qos.logback.contrib";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "logback-jackson";
+        groupId = "ch.qos.logback.contrib";
+        sha512 = "d9a3d4cb6cf4eda6fc18e2d374007d27c6ddba98e989a8d8a01b49859b280450113f685df6e16c5fbe0472bc9e26308bc7e8b7e0aedab9404cf0b492d7511685";
+        version = "0.1.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient_tracer_otel/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient_tracer_otel";
+        groupId = "io.prometheus";
+        sha512 = "bce192e6162cb3ada7dd6c2d10456e78bce71c170faa09bad2896272fa1bd4a036288d707f3d47747991d8946c74fe21c565713fb15c7052305eb753c94dd939";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-codec/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-codec";
+        groupId = "io.netty";
+        sha512 = "f6d9c4a5b508ca0d5f0e213473088f5d7b2e184e447dc092e69227109e28da9b8e68b2238ca6ab4e9915bacacf59cc0dce6ebcbbb05dad34a03b7976d9670c51";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-oauth2/ring-oauth2";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-oauth2";
+        groupId = "ring-oauth2";
+        sha512 = "3ed765b4bbb5749fcdcdb501b93ab656a413ade5af24c7aa34639718ed1fd0a5f325b05bd135540d56e55cbb456a2cb7852ba0e45bc5233e28229986eef75bb9";
+        version = "0.2.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tools.macro/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tools.macro";
+        groupId = "org.clojure";
+        sha512 = "65ce5e29379620ac458274c53cd9926e4b764fcaebb1a2b3bc8aef86bbe10c79e654b028bc4328905d2495a680fa90f5002cf5c47885f6449fad43a04a594b26";
+        version = "0.1.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jackson-dataformat-cbor/com.fasterxml.jackson.dataformat";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jackson-dataformat-cbor";
+        groupId = "com.fasterxml.jackson.dataformat";
+        sha512 = "ea5d049eac1b94666479c5e36de14d8fa4b7f24cb92f0f310d2ec2b4de66ef9023161060e67228ef2d7420a002ef861db12a29cad0864638c21612da49686f4f";
+        version = "2.12.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "depstar/seancorfield";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "depstar";
+        groupId = "seancorfield";
+        sha512 = "0f4458b39b8b1949755bc2fe64b239673a9efa3a0140998464bbbcab216ec847344c1b8920611f7c9ca07261850f3a08144ae221cc2c41813a080189e32f9c10";
+        version = "1.0.94";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "logback-core/ch.qos.logback";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "logback-core";
+        groupId = "ch.qos.logback";
+        sha512 = "fc554548f499e284007eeecf76bf4e1995effb6ac8a6262aa96118f623bf9085a9d5bec3741833dd3cae6a76b2ff78c6d0a1fe68bc01213207c93d8e2da345ca";
+        version = "1.3.0-alpha12";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "honeysql/honeysql";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "honeysql";
+        groupId = "honeysql";
+        sha512 = "74d1d93c968b33686848e3bf8934f3b5f002c2b69b1b55a3a3b172c952e9991324e6e95e3a0ce2fecf1de0d3a036f4dff7286df689f0733f253909464e0269f6";
+        version = "1.0.461";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-buffer/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-buffer";
+        groupId = "io.netty";
+        sha512 = "181b55d99d8d46bbf5f67f05bdccb0381af23a9fca3e6d935e6cde727b132c67133de1c3d81ed19b04c1a5b232be0de16ec1de7e81b532878bc69564237c15dc";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "slingshot/slingshot";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "slingshot";
+        groupId = "slingshot";
+        sha512 = "ff2b2a27b441d230261c7f3ec8c38aa551865e05ab6438a74bd12bfcbc5f6bdc88199d42aaf5932b47df84f3d2700c8f514b9f4e9b5da28d29da7ff6b09a7fb5";
+        version = "0.12.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpcore-nio/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpcore-nio";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "002af5f72b68a4ff1b1ff46b788013283d195e1d62ee1d7b102aa930b30f77f7e215a6d18edbea0fccd18fb1fa3a66cc4aef6070d72d6d1886f0044dfe0e16c7";
+        version = "4.4.10";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-jetty-adapter/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-jetty-adapter";
+        groupId = "ring";
+        sha512 = "93075903ad73a8b73cb77ee9f53ed33594f40a5dafe8129089adb4cfa333e37468764203c00244568f02abf0c0eee9f5d9a9f96c420919027cf2746a41ec38e3";
+        version = "1.9.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient_tracer_common/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient_tracer_common";
+        groupId = "io.prometheus";
+        sha512 = "6f717af63340efd84c5467ae752be7e66f586f0e8b57adb5b7a8ef99b223203ed829aad6797f6ef1811d6d861b00a621a1288c9271ec2ba77018d6d9eb9e7987";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "component/com.stuartsierra";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "component";
+        groupId = "com.stuartsierra";
+        sha512 = "108b02f51165ad07c2cf5232fbd954d052880c2456e6fb6db3342bda6851c76b73bf9145f03fb0df2b5782fe39f368b2868780c1e8e2dfa2ab2c68dd97f34ab7";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-handler/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-handler";
+        groupId = "io.netty";
+        sha512 = "48874727553dd7084f5c48d90de123704ae334837c3a103f598887bb21405dd62c57603b59300ac2fcdd936f0af99ed0730487fb9fb8917d236b8fe3f78f3c02";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "yuicompressor/com.yahoo.platform.yui";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "yuicompressor";
+        groupId = "com.yahoo.platform.yui";
+        sha512 = "ba2588bd50eaa3005b1919daad9f9c86a33351ceb9b7b5f0a9a498a548cc523e99f9345917a64303f8e23925feea226386d3eac01f640f788d1be4c7cf0315e0";
+        version = "2.4.8";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-io/commons-io";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-io";
+        groupId = "commons-io";
+        sha512 = "6af22dffaaecd1553147e788b5cf50368582318f396e456fe9ff33f5175836713a5d700e51720465c932c2b1987daa83027358005812d6a95d5755432de3a79d";
+        version = "2.10.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tools.namespace/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tools.namespace";
+        groupId = "org.clojure";
+        sha512 = "2cdb9c5d9bc4fd01dae182e9ad4b91eeaa2487003a977e7d8d5e66f562a9544b59f558710eccf421ea63cbbfa953ac8944fe9b9a76049fb82a47eb2bdcb3a4d7";
+        version = "1.1.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "honeysql/com.github.seancorfield";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "honeysql";
+        groupId = "com.github.seancorfield";
+        sha512 = "a0e5ebbf922aaf170c2d74ec0efc0df7e3bda92d0b8cc5f40ee4c8ddcb8c7e0e46556fac381513e0ac76b10f681c14c2d2569010c2f8eab4ff04f6373c2bf229";
+        version = "2.2.840";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jackson-core/com.fasterxml.jackson.core";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jackson-core";
+        groupId = "com.fasterxml.jackson.core";
+        sha512 = "428e0ebb16dd4c74ab0adf712058fd0dc0cd788f6e6f90c60c627da6577b345fac60a30694e111f1cd4e3e8bf79a1f1b820d30ada114984b26c28e299e326eaa";
+        version = "2.12.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clj-time/clj-time";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clj-time";
+        groupId = "clj-time";
+        sha512 = "cfeb46af59fd4112aa5a5d0087a39355f0fc19514b4c02bc6c3d9f81c9bda40491686207836e9a7943aebeb82a3b36f4e8b7407a8908c5ef151122644b278d75";
+        version = "0.15.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clj-http/clj-http";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clj-http";
+        groupId = "clj-http";
+        sha512 = "9884557d4f38068cb3234aec80acc0de8f9716645529693ffd9bd6db8221f5d1cf9e2d1b8bf7c7df4215d71372b02d83043ebf8fc27dc422552b32c9bdba1602";
+        version = "3.12.3";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jul-to-slf4j/org.slf4j";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jul-to-slf4j";
+        groupId = "org.slf4j";
+        sha512 = "350cfb889248d724b27dce697f635f12d9db463f107830b9518ce184dc4cc1ab3933eb5bdab08515e69766c3d5be24547dac289d6406c44eca90717230714b91";
+        version = "2.0.0-alpha4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "migratus/migratus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "migratus";
+        groupId = "migratus";
+        sha512 = "ee5ce8601930d063e0d9d90fc8e165b78fc1587bfd7e0fc9922735bc2f9fc27f8cf8bf10d49d6fd57b899ac4b250145bd653915ed770424416e026ba37d1b604";
+        version = "1.3.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpcore/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpcore";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "f16a652f4a7b87dbf7cb16f8590d54a3f719c4c7b2f8883ce59db2d73be4701b64f2ca8a2c45aca6a5dbeaddeedff0c280a03722f70c076e239b645faa54eff9";
+        version = "4.4.14";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpclient-cache/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpclient-cache";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "e150e8dc49c8c9972d8b324b56bb292b15e2f0e686f1292c4edac975615dfb16e5edb8ab325e614732a7d43a03061ca4fe93fe1e1f7487851a4d4d3af50a61f9";
+        version = "4.5.13";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "instaparse/instaparse";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "instaparse";
+        groupId = "instaparse";
+        sha512 = "ec2fcf4a09319a8fa9489b08fd9c9a5fe6e63155dde74d096f947fabc4f68d3d1bf68faf21e175e80eaee785f563a1903d30c550f93fb13a16a240609e3dfa2e";
+        version = "1.4.8";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "honeysql-postgres/nilenso";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "honeysql-postgres";
+        groupId = "nilenso";
+        sha512 = "d4accd3b8819cf715ecdb29496cf5a6a5ad3871fd579e55c7148d4e05774cb896c681b0c6f84df88aa9cd8e6ef9bfd65788ede9a49ba365ad0e32ee350091879";
+        version = "0.4.112";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clj-tuple/clj-tuple";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clj-tuple";
+        groupId = "clj-tuple";
+        sha512 = "dd626944d0aba679a21b164ed0c77ea84449359361496cba810f83b9fdeab751e5889963888098ce4bf8afa112dbda0a46ed60348a9c01ad36a2e255deb7ab6d";
+        version = "0.2.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jackson-annotations/com.fasterxml.jackson.core";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jackson-annotations";
+        groupId = "com.fasterxml.jackson.core";
+        sha512 = "6fdad6c5bb71a97331a662fe26265aacab6869f3307a710697d5c2f256fd48935764bfb0b3505a2cbb1605daf0b7350abdf84a1b1cf2bb1e91d9184565243c8e";
+        version = "2.12.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "hiccup/hiccup";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "hiccup";
+        groupId = "hiccup";
+        sha512 = "034f15be46c35029f41869c912f82cb2929fbbb0524ea64bd98dcdb9cf09875b28c75e926fa5fff53942b0f9e543e85a73a2d03c3f2112eecae30fcef8b148f4";
+        version = "1.0.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "riddley/riddley";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "riddley";
+        groupId = "riddley";
+        sha512 = "b478ecba9d1ab9d38c84a42354586fcece763000907b40c97bc43c0f16dc560b0860144efe410193cb3b7cb0149fbc1724fdd737cc3ba53de23618f5b30e6f9f";
+        version = "0.1.12";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "java.classpath/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "java.classpath";
+        groupId = "org.clojure";
+        sha512 = "90cd8edeaea02bd908d8cfb0cf5b1cf901aeb38ea3f4971c4b813d33210438aae6fff8e724a8272d2ea9441d373e7d936fa5870e309c1e9721299f662dbbdb9a";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient_pushgateway/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient_pushgateway";
+        groupId = "io.prometheus";
+        sha512 = "31c8878929f516ba7030cc9ec4ac4cbcb09955a9fdae23c6904bc481e40e70e1b3e05619c49b646119077ef6f57c430cc7944f6bafdbca24c9efa8145474fcf7";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ns-tracker/ns-tracker";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ns-tracker";
+        groupId = "ns-tracker";
+        sha512 = "cfb6c2c9f899b43d1284acdc572b34b977936c4df734b38137dfea045421b74d529509cde23695f1dc5ee06d046c2f6b61a2cd98058da1c7220c21dd0361964f";
+        version = "0.4.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clout/clout";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clout";
+        groupId = "clout";
+        sha512 = "99d6e1a8c5726ca4e5d12b280a39e6d1182d734922600f27d588d3d65fbc830c5e03f9e0421ff25c819deee4d1f389fd3906222716ace1eb17ce70ef9c5e8f4b";
+        version = "2.2.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-logging/commons-logging";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-logging";
+        groupId = "commons-logging";
+        sha512 = "ed00dbfabd9ae00efa26dd400983601d076fe36408b7d6520084b447e5d1fa527ce65bd6afdcb58506c3a808323d28e88f26cb99c6f5db9ff64f6525ecdfa557";
+        version = "1.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clojure.java-time/clojure.java-time";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clojure.java-time";
+        groupId = "clojure.java-time";
+        sha512 = "62d8a286ec3393594e7f84eba22dbb02c1305a80a18b2574058ae963d3f3e829ff960c8b66e89069e6c071a11f869203134c6c4cdec6f8e516c9b314796c8108";
+        version = "0.3.3";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "data.csv/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "data.csv";
+        groupId = "org.clojure";
+        sha512 = "b039775a859ed27eca8f8ae74ccb6afde3ad1fe2b3cbe542240c324d60fe1237e495eb1300ee9eb4ff4ef59f01faf7aec6ef1dd6a025ee4fe556c1d91acfcf1b";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient_tracer_otel_agent/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient_tracer_otel_agent";
+        groupId = "io.prometheus";
+        sha512 = "97694210d9a5b48a7cb9dda2a187432c4813edb3051edfa5832a0a471e0b2d5988dab92b70c292e78f59b169345deb5c1c706361fd726f3dc2480766dedfdcec";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "next.jdbc/com.github.seancorfield";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "next.jdbc";
+        groupId = "com.github.seancorfield";
+        sha512 = "0b4b01ba126bb8b1e2c14262db9fca75456b274d09535d9a7bb386699bf20dc9ac11590d210769e7429ca59ebfdfbb06916b3ff275cc817d74eac5bbabdab8f2";
+        version = "1.2.761";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "java.jdbc/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "java.jdbc";
+        groupId = "org.clojure";
+        sha512 = "6162b7774dca58b62a94bc5a04ba845e4c7065c9c589cc3bb802becfec0baf0989a338c1bf9a5db7c3128873702840d5f2451628f3aac977245975d65a683b7d";
+        version = "0.7.11";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-transport/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-transport";
+        groupId = "io.netty";
+        sha512 = "c11d690ffeaf3267b2166f73a43108fb89d588fcef3f6d3053bf4b6f6669483baa618fd97438010692a6fa28334372d5a31b7c0996961d4eabb60cbdc358a536";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "crypto-random/crypto-random";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "crypto-random";
+        groupId = "crypto-random";
+        sha512 = "3520df744f250dbe061d1a5d7a05b7143f3a67a4c3f9ad87b8044ee68a36a702a0bcb3a203e35d380899dd01c28e01988b0a7af914b942ccbe0c35c9bdb22e11";
+        version = "1.2.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-transport-native-unix-common/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-transport-native-unix-common";
+        groupId = "io.netty";
+        sha512 = "b63e5f8a44b7f37f3dba378bd06af64dd1d7be3f0b1a7d47ad139ff06e0212b4c7081275b1b5b12183aeb72eb5f9bf9ef03ed8c78bc302aeb4817dca7bd89f3a";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-codec/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-codec";
+        groupId = "ring";
+        sha512 = "38b9775a794831b8afd8d66991a75aa5910cd50952c9035866bf9cc01353810aedafbc3f35d8f9e56981ebf9e5c37c00b968759ed087d2855348b3f46d8d0487";
+        version = "1.1.3";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "spy/com.impossibl.pgjdbc-ng";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "spy";
+        groupId = "com.impossibl.pgjdbc-ng";
+        sha512 = "173615c39aa6015a732e329217b40e3ea1c304c9c168d2764d6ef23ab8775e2f4432339bc22d049662561f09d3fd890b5415738620d64dcedb762d5da26b4ebb";
+        version = "0.8.9";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "logback-json-core/ch.qos.logback.contrib";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "logback-json-core";
+        groupId = "ch.qos.logback.contrib";
+        sha512 = "2a826036f21997e2979fda83ae3e33cf62f3b2b2df15a7b11d1fd8a52163b09f0f2f8d72f5fdcea0ec1289b3d27727ed5e6b0bcdf4c5d741f4bac07b7b6139e8";
+        version = "0.1.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpclient/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpclient";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "3567739186e551f84cad3e4b6b270c5b8b19aba297675a96bcdff3663ff7d20d188611d21f675fe5ff1bfd7d8ca31362070910d7b92ab1b699872a120aa6f089";
+        version = "4.5.13";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "crypto-equality/crypto-equality";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "crypto-equality";
+        groupId = "crypto-equality";
+        sha512 = "54cf3bd28f633665962bf6b41f5ccbf2634d0db210a739e10a7b12f635e13c7ef532efe1d5d8c0120bb46478bbd08000b179f4c2dd52123242dab79fea97d6a6";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "cheshire/cheshire";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "cheshire";
+        groupId = "cheshire";
+        sha512 = "855e9c42a8d1c64f4db5cda45e31e914eb5ed99a715e8d7a5759a9c4ab6c69a82353635ca7b0837880c6cf9b41b11184ae11e09cbf2c07aa13db32c539e5dfd4";
+        version = "5.10.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tigris/tigris";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tigris";
+        groupId = "tigris";
+        sha512 = "fdff4ef5e7175a973aaef98de4f37dee8e125fc711c495382e280aaf3e11341fe8925d52567ca60f3f1795511ade11bc23461c88959632dfae3cf50374d02bf6";
+        version = "0.1.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "config/yogthos";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "config";
+        groupId = "yogthos";
+        sha512 = "3437992d192465edc74aec5259d5e0c0ad7e631dff860b2ee14cef27f13cee7c60487202cf00fc160a95fb0b85ce1ddf56cbdd0c008b47ac598061bf115f6a23";
+        version = "1.1.9";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jetty-io/org.eclipse.jetty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jetty-io";
+        groupId = "org.eclipse.jetty";
+        sha512 = "a8c5f73089daa0c8b27f836acddf40bcbf07bbb2571a4d73653be8aac3fb339022f546326722f216bad78a68886934d24db9bec54235124592dd29dbeab69051";
+        version = "9.4.42.v20210604";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "logback-json-classic/ch.qos.logback.contrib";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "logback-json-classic";
+        groupId = "ch.qos.logback.contrib";
+        sha512 = "d30bf70217d316914d83d46cc15783f656354084087d59cbc0620a746f10b4a42e56d33b3e50a8b3596a64ec8314730bf5ff9a3f7dc3417bdd0582665be009ec";
+        version = "0.1.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "tools.reader/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "tools.reader";
+        groupId = "org.clojure";
+        sha512 = "3481259c7a1eac719db2921e60173686726a0c2b65879d51a64d516a37f6120db8ffbb74b8bd273404285d7b25143ab5c7ced37e7c0eaf4ab1e44586ccd3c651";
+        version = "1.3.6";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "simpleclient_common/io.prometheus";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "simpleclient_common";
+        groupId = "io.prometheus";
+        sha512 = "dedd003638eb3651c112e2d697ac94eb4e3b3e32c94fa41bb1efe2c889a347cdc7bd13256e05423f3370592d4fd65faf8db57f0387ab75814d7fa77b14cbbadf";
+        version = "0.12.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "commons-compiler/org.codehaus.janino";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "commons-compiler";
+        groupId = "org.codehaus.janino";
+        sha512 = "f0778b891ef14d8ee6776747eab0b25da716cdc530752a81aedec2a77570e2f66402179b9408a6efde8125c808eb060a720d2f4977c1f1d022bdaae7eac8d011";
+        version = "3.1.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "servlet-api/javax.servlet";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "servlet-api";
+        groupId = "javax.servlet";
+        sha512 = "363ba5590436ab82067b7a2e14b481aeb2b12ca4048d7a1519a2e549b2d3c09ddf718ac64dc2be6c2fc24c51fdc9c8160261329403113369588ce27d87771db6";
+        version = "2.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "iapetos/clj-commons";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "iapetos";
+        groupId = "clj-commons";
+        sha512 = "d17f36c0cf0ec78db5e893e5c033f8562b31650bda6f5ee582e68f84a07a3631d04d6f69e4e18b1ca64e732c180fa669dfb69a78849e13f601cd563a7a8aab94";
+        version = "0.1.12";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "javax.servlet-api/javax.servlet";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "javax.servlet-api";
+        groupId = "javax.servlet";
+        sha512 = "32f7e3565c6cdf3d9a562f8fd597fe5059af0cf6b05b772a144a74bbc95927ac275eb38374538ec1c72adcce4c8e1e2c9f774a7b545db56b8085af0065e4a1e5";
+        version = "3.1.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "potemkin/potemkin";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "potemkin";
+        groupId = "potemkin";
+        sha512 = "5abc050bf7ff0b27d8c45aaa5e378201980815b711b2db99735db73304576c17e285026ea48a714bf0b0df7ad7a008de38b7d182cdc0e8989f4be1e6b3afa8aa";
+        version = "0.4.5";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-resolver/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-resolver";
+        groupId = "io.netty";
+        sha512 = "fabf893de74264caa1799c15d184ed8f20b7bf9b1c41abb29f29adf728a934951f97892a4924634f9efbda17c8cf74ea3ff97bafca616711e3c5f79b8ed9ef3e";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-transport-native-epoll/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-transport-native-epoll";
+        groupId = "io.netty";
+        sha512 = "6fbc2dd2622699f3fc1f329acbd94baf7f1d8923c5cfcae262e6f2d64b4fd71b606561bce5e2b511dff8e052cdade930091fab683fd98713f6b62a622a2c6254";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "clj-stacktrace/clj-stacktrace";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "clj-stacktrace";
+        groupId = "clj-stacktrace";
+        sha512 = "993f8a544203801fc074eefacee8e553e426422b3492d47b857d87ac73cde72c91e29f629382b9eae8cf9600bc2c4c29d2e7169e509c46302ab973c86e73af0c";
+        version = "0.2.8";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "cambium.codec-cheshire/cambium";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "cambium.codec-cheshire";
+        groupId = "cambium";
+        sha512 = "614491cf752a597f29ae29885db6c1ed191341303d89183bee52e4e2c76eb8eb14693562ad09484f379a074b36d97085e848ec3845e069440e6422506c1636f1";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "slf4j-api/org.slf4j";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "slf4j-api";
+        groupId = "org.slf4j";
+        sha512 = "ad705ab6fd5cd904ef6861c0adf08af19593cf6a486b18de548fe3d68e57b1baa7e02947584fd4dcc350ddcddcf906c01e8d9ba7943a202690d0d788627696b5";
+        version = "2.0.0-alpha4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "test.check/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "test.check";
+        groupId = "org.clojure";
+        sha512 = "b8d7a330b0b5514cd6a00c4382052fab51c3c9d3bc53133f8506791fa670e7c5ecd65094977ea5ced91f59623b0abd1ab8feeec96d63c5c6e459b265a655c577";
+        version = "1.1.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-logger/ring-logger";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-logger";
+        groupId = "ring-logger";
+        sha512 = "b675a61c173289fc610d84920ba40178bf62b3bc680923cb66866d78ee2a508296b27a1ab14b66bfbe0304a64166a7e3c3ddee36564dd4a2f988861bce455a3a";
+        version = "1.0.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-servlet/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-servlet";
+        groupId = "ring";
+        sha512 = "3d8e6ec224e13d54810a945c0b6c0d2d863736a48d8c4bfc8fadb96b6b0fa9baa638644d0d92d8a53650b188e6e75d391731b08b26eb0f551e90a7504e7f4267";
+        version = "1.9.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "logback-classic/ch.qos.logback";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "logback-classic";
+        groupId = "ch.qos.logback";
+        sha512 = "f9fe0f126061f4abe3973b631b8d8244ba9e9d77783479a6500d629d772050dee508a001fc14d2131407fbdd0d33dd6b8aeb9b1ea9125b471bb8412e8de659e6";
+        version = "1.3.0-alpha12";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "dependency/com.stuartsierra";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "dependency";
+        groupId = "com.stuartsierra";
+        sha512 = "d32fbc4813bd16f2ed8c82e2915e1fb564e88422159bd3580a85c8cd969d1bbbe315bdc13d29c2f0eaceeeafcf649ee712c8df4532464d560aaeae4ae5953866";
+        version = "1.0.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "camel-snake-kebab/camel-snake-kebab";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "camel-snake-kebab";
+        groupId = "camel-snake-kebab";
+        sha512 = "589d34b500560b7113760a16bfb6f0ccd8f162a1ce8c9bc829495432159ba9c95aebf6bc43aa126237a0525806a205a05f9910122074902b659e7fd151d176b1";
+        version = "0.4.2";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring";
+        groupId = "ring";
+        sha512 = "93c48fb670736b91fb41d8076e1e9c4f53c67693d15e75290da319e7d7881b829a24180029b3a0fa051473c6c77ac3c97b519254ebf2b2c9538b185e79b69162";
+        version = "1.9.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "netty-transport-native-kqueue/io.netty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "netty-transport-native-kqueue";
+        groupId = "io.netty";
+        sha512 = "87e10c06e394a1698d65381d3be8336f753c55e3e899e297510161d0c72540023f30f9032322957e035ead793204a084b988bc21a2bc312fcf7567a22d02a3c4";
+        version = "4.1.63.Final";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "java.data/org.clojure";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "java.data";
+        groupId = "org.clojure";
+        sha512 = "225e1eafd1a659278212d831f7cd8609359f8c880ef3d69b4ade6301ce3c511307ce31d94cb82d5407314b990bd04714ec26273bb3036b248116a7a75fa75e1f";
+        version = "1.0.95";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jetty-server/org.eclipse.jetty";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jetty-server";
+        groupId = "org.eclipse.jetty";
+        sha512 = "b347f8a6e5b84e0f460037027e238a61edec710ade768c95e7be13dcea498abe43d5e622ee69ac7494138d1a8fcf92e07b7deab569c554831c57baad71c53b9b";
+        version = "9.4.42.v20210604";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "httpmime/org.apache.httpcomponents";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "httpmime";
+        groupId = "org.apache.httpcomponents";
+        sha512 = "e1b0ee84bce78576074dc1b6836a69d8f5518eade38562e6890e3ddaa72b7f54bf735c8e2286142c58cddf45f745da31261e5d73b7d8092eb6ecfb20946eb36c";
+        version = "4.5.13";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "log4j-over-slf4j/org.slf4j";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "log4j-over-slf4j";
+        groupId = "org.slf4j";
+        sha512 = "48fa023c57294b73b9bd2f53e3dd3169e03426e5b3aa9d80e1bb1a9abf927fc26ef9f64d02b9769d5577d83094d0f41f044d35bb3b4f6037d66d6b2f19b484a1";
+        version = "2.0.0-alpha4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "ring-core/ring";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "ring-core";
+        groupId = "ring";
+        sha512 = "38d7214a3fc1b80ab55999036638dd1971272e01bec4cb8e0ee0a4aa83f51b8c41ba8a5850b0660227f067d2f9c6d75c0c0737725ea02762bbf8d192dc72febe";
+        version = "1.9.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "cambium.core/cambium";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "cambium.core";
+        groupId = "cambium";
+        sha512 = "0e1fe626c6d0b31aad84ea2e4466273065925548ee5915f442b7997ebfe795faea36dbeac50a0f8c16bbd20d877511e3f8c4ff4f2b916a4538513aaa5cc20112";
+        version = "1.1.1";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "medley/medley";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "medley";
+        groupId = "medley";
+        sha512 = "749ef43b5ea2cae7dc96db871cdd15c7b3c9cfbd96828c20ab08e67d39a5e938357d15994d8d413bc68678285d6c666f2a7296fbf305706d03b3007254e3c55c";
+        version = "1.3.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "garden/garden";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "garden";
+        groupId = "garden";
+        sha512 = "2cc29f071b68bf451835f76de351ac2efb930b5df9ca7237fdca439d3c4d797d7fa207a147886efe1738ab1c50b76c1e366bf9ffcd6f286b0b211260aedd0b25";
+        version = "1.3.10";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jackson-dataformat-smile/com.fasterxml.jackson.dataformat";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jackson-dataformat-smile";
+        groupId = "com.fasterxml.jackson.dataformat";
+        sha512 = "69676964a2b09516b8ffd0d847b6f9a9b843424185453731b548c25e7e9ce30e808c56d66923f9183e2b5c1ba007421b146a6806e768b8e6b07470d60227f1dd";
+        version = "2.12.4";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "jaxb-api/javax.xml.bind";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "jaxb-api";
+        groupId = "javax.xml.bind";
+        sha512 = "0c5bfc2c9f655bf5e6d596e0c196dcb9344d6dc78bf774207c8f8b6be59f69addf2b3121e81491983eff648dfbd55002b9878132de190825dad3ef3a1265b367";
+        version = "2.3.0";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "pgjdbc-ng/com.impossibl.pgjdbc-ng";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "pgjdbc-ng";
+        groupId = "com.impossibl.pgjdbc-ng";
+        sha512 = "a34ac9146257329f6e9b354f13f564c65dbea6463addae383e3918d3a64c90c67f5f7fda6b5c3866de991a568d6690edb3fb09f2507593390a6e30ec0c79e02c";
+        version = "0.8.9";
+
+      };
+      paths = [ src ];
+    }
+
+    rec {
+      name = "http-kit/http-kit";
+      src = fetchMavenArtifact {
+        inherit repos;
+        artifactId = "http-kit";
+        groupId = "http-kit";
+        sha512 = "4186a2429984745e18730aa8fd545f1fc1812083819ebf77aecfc04e0d31585358a5e25a308c7f21d81359418bbc72390c281f5ed91ae116cf1af79860ba22c3";
+        version = "2.5.3";
+
+      };
+      paths = [ src ];
+    }
 
   ];
-  }
+}
   
\ No newline at end of file
diff --git a/users/grfn/bbbg/module.nix b/users/grfn/bbbg/module.nix
index 297d846f22b6..7a49f7934a37 100644
--- a/users/grfn/bbbg/module.nix
+++ b/users/grfn/bbbg/module.nix
@@ -3,7 +3,8 @@
 let
   bbbg = depot.users.grfn.bbbg;
   cfg = config.services.bbbg;
-in {
+in
+{
   options = with lib; {
     services.bbbg = {
       enable = mkEnableOption "BBBG Server";
@@ -81,9 +82,9 @@ in {
         description = "Run database migrations for BBBG";
         wantedBy = [ "bbbg-server.service" ];
         after = ([ "network.target" ]
-                 ++ (if cfg.database.enable
-                     then ["postgresql.service"]
-                     else []));
+          ++ (if cfg.database.enable
+        then [ "postgresql.service" ]
+        else [ ]));
 
         serviceConfig = {
           Type = "oneshot";
diff --git a/users/grfn/bbbg/shell.nix b/users/grfn/bbbg/shell.nix
index 48bcd73759d0..e26569657f07 100644
--- a/users/grfn/bbbg/shell.nix
+++ b/users/grfn/bbbg/shell.nix
@@ -1,5 +1,5 @@
 let
- depot = import ../../.. {};
+  depot = import ../../.. { };
 in
 with depot.third_party.nixpkgs;
 
diff --git a/users/grfn/bbbg/tf.nix b/users/grfn/bbbg/tf.nix
index 097ad595797b..d5b19d9ebc88 100644
--- a/users/grfn/bbbg/tf.nix
+++ b/users/grfn/bbbg/tf.nix
@@ -3,14 +3,17 @@
 let
   inherit (depot.users.grfn)
     terraform
-  ;
+    ;
 
-in terraform.workspace "bbbg" {
+in
+terraform.workspace "bbbg"
+{
   plugins = (p: with p; [
     aws
     cloudflare
   ]);
-} {
+}
+{
   machine = terraform.nixosMachine {
     name = "bbbg";
     instanceType = "t3a.small";
@@ -52,8 +55,8 @@ in terraform.workspace "bbbg" {
       };
 
       security.sudo.extraRules = [{
-        groups = ["wheel"];
-        commands = [{ command = "ALL"; options = ["NOPASSWD"]; }];
+        groups = [ "wheel" ];
+        commands = [{ command = "ALL"; options = [ "NOPASSWD" ]; }];
       }];
 
       nix.gc = {
diff --git a/users/grfn/gws.fyi/default.nix b/users/grfn/gws.fyi/default.nix
index 5b7d8fc0e19a..5ab3614d7902 100644
--- a/users/grfn/gws.fyi/default.nix
+++ b/users/grfn/gws.fyi/default.nix
@@ -6,13 +6,14 @@ let
   bucket = "s3://gws.fyi";
   distributionID = "E2ST43JNBH8C64";
 
-  css = runCommand "main.css" {
-    buildInputs = [ pkgs.minify ];
-  } ''
+  css = runCommand "main.css"
+    {
+      buildInputs = [ pkgs.minify ];
+    } ''
     minify --type css < ${./main.css} > $out
   '';
 
-  keys = runCommand "ssh-keys" {} ''
+  keys = runCommand "ssh-keys" { } ''
     touch $out
     echo "${depot.users.grfn.keys.main}" >> $out
   '';
@@ -27,7 +28,8 @@ let
       cp ${keys} $out/keys
     '';
 
-in (writeShellScript "deploy.sh" ''
+in
+(writeShellScript "deploy.sh" ''
   ${awscli2}/bin/aws --profile personal s3 sync ${website}/ ${bucket}
   echo "Deployed to http://gws.fyi"
 '') // {
diff --git a/users/grfn/gws.fyi/orgExportHTML.nix b/users/grfn/gws.fyi/orgExportHTML.nix
index ac28580a5926..f9cb8d38fb77 100644
--- a/users/grfn/gws.fyi/orgExportHTML.nix
+++ b/users/grfn/gws.fyi/orgExportHTML.nix
@@ -25,15 +25,16 @@ let
   outName =
     if isNull headline
     then
-      let bn = builtins.baseNameOf src;
-          filename = elemAt (splitString "." bn) 0;
+      let
+        bn = builtins.baseNameOf src;
+        filename = elemAt (splitString "." bn) 0;
       in
-        if depot.nix.utils.isDirectory src
-        then filename
-        else filename + ".html"
+      if depot.nix.utils.isDirectory src
+      then filename
+      else filename + ".html"
     else "${filename}-${replaceStrings [" "] ["-"] filename}.html";
 
-  escapeDoubleQuotes = replaceStrings ["\""] ["\\\""];
+  escapeDoubleQuotes = replaceStrings [ "\"" ] [ "\\\"" ];
 
   navToHeadline = optionalString (! isNull headline) ''
     (search-forward "${escapeDoubleQuotes headline}")
diff --git a/users/grfn/gws.fyi/site.nix b/users/grfn/gws.fyi/site.nix
index 33d4a71e7b1f..057c4d3ee698 100644
--- a/users/grfn/gws.fyi/site.nix
+++ b/users/grfn/gws.fyi/site.nix
@@ -1,4 +1,4 @@
-args@{ pkgs ? import <nixpkgs> {}, ... }:
+args@{ pkgs ? import <nixpkgs> { }, ... }:
 
 let
 
diff --git a/users/grfn/resume/default.nix b/users/grfn/resume/default.nix
index 2db6a650bc6e..21801ad9e7a8 100644
--- a/users/grfn/resume/default.nix
+++ b/users/grfn/resume/default.nix
@@ -2,31 +2,34 @@
 
 with pkgs.lib;
 
-pkgs.runCommandNoCC "resume.pdf" {
-  buildInputs = [(pkgs.texlive.combine {
-    inherit (pkgs.texlive)
-      capt-of
-      collection-fontsrecommended
-      enumitem
-      etoolbox
-      fancyvrb
-      float
-      fncychap
-      framed
-      l3packages
-      microtype
-      needspace
-      parskip
-      scheme-basic
-      tabulary
-      titlesec
-      ulem
-      upquote
-      varwidth
-      wrapfig
-      xcolor
-      ;
-  })];
+pkgs.runCommandNoCC "resume.pdf"
+{
+  buildInputs = [
+    (pkgs.texlive.combine {
+      inherit (pkgs.texlive)
+        capt-of
+        collection-fontsrecommended
+        enumitem
+        etoolbox
+        fancyvrb
+        float
+        fncychap
+        framed
+        l3packages
+        microtype
+        needspace
+        parskip
+        scheme-basic
+        tabulary
+        titlesec
+        ulem
+        upquote
+        varwidth
+        wrapfig
+        xcolor
+        ;
+    })
+  ];
 } ''
   cp ${builtins.filterSource (path: type:
     type == "regular" &&
diff --git a/users/grfn/secrets/shell.nix b/users/grfn/secrets/shell.nix
index fe912fe791ea..6e70458d1972 100644
--- a/users/grfn/secrets/shell.nix
+++ b/users/grfn/secrets/shell.nix
@@ -1,5 +1,5 @@
 let
-  depot = import ../../.. {};
+  depot = import ../../.. { };
 in
 depot.third_party.nixpkgs.mkShell {
   buildInputs = [
diff --git a/users/grfn/system/home/common/solarized.nix b/users/grfn/system/home/common/solarized.nix
index e94693edc566..554ee0523e46 100644
--- a/users/grfn/system/home/common/solarized.nix
+++ b/users/grfn/system/home/common/solarized.nix
@@ -1,18 +1,18 @@
 rec {
-  base03  = "#002B36";
-  base02  = "#073642";
-  base01  = "#586e75";
-  base00  = "#657b83";
-  base0   = "#839496";
-  base1   = "#93a1a1";
-  base2   = "#eee8d5";
-  base3   = "#fdf6e3";
-  yellow  = "#b58900";
-  orange  = "#cb4b16";
-  red     = "#dc322f";
+  base03 = "#002B36";
+  base02 = "#073642";
+  base01 = "#586e75";
+  base00 = "#657b83";
+  base0 = "#839496";
+  base1 = "#93a1a1";
+  base2 = "#eee8d5";
+  base3 = "#fdf6e3";
+  yellow = "#b58900";
+  orange = "#cb4b16";
+  red = "#dc322f";
   magenta = "#d33682";
-  violet  = "#6c71c4";
-  blue    = "#268bd2";
-  cyan    = "#2aa198";
-  green   = "#859900";
+  violet = "#6c71c4";
+  blue = "#268bd2";
+  cyan = "#2aa198";
+  green = "#859900";
 }
diff --git a/users/grfn/system/home/machines/roswell.nix b/users/grfn/system/home/machines/roswell.nix
index f68f3a7be36f..2e86eda4992d 100644
--- a/users/grfn/system/home/machines/roswell.nix
+++ b/users/grfn/system/home/machines/roswell.nix
@@ -19,7 +19,8 @@ in
     htop
     killall
     bind
-    zip unzip
+    zip
+    unzip
     tree
     ncat
     bc
diff --git a/users/grfn/system/home/machines/yeren.nix b/users/grfn/system/home/machines/yeren.nix
index 0c89e9d12393..343ebf2c9a09 100644
--- a/users/grfn/system/home/machines/yeren.nix
+++ b/users/grfn/system/home/machines/yeren.nix
@@ -58,10 +58,10 @@ in
       RemainAfterExit = true;
       ExecStart = (
         "${pkgs.xorg.setxkbmap}/bin/setxkbmap "
-          + "-device ${laptopKeyboardId} "
-          + "-option caps:swapescape "
-          + "-option compose:ralt "
-          + "-option altwin:swap_alt_win"
+        + "-device ${laptopKeyboardId} "
+        + "-option caps:swapescape "
+        + "-option compose:ralt "
+        + "-option altwin:swap_alt_win"
       );
     };
   };
diff --git a/users/grfn/system/home/modules/alsi.nix b/users/grfn/system/home/modules/alsi.nix
index c4a14e683c39..204f9c8e1428 100644
--- a/users/grfn/system/home/modules/alsi.nix
+++ b/users/grfn/system/home/modules/alsi.nix
@@ -10,30 +10,30 @@
   xdg.configFile."alsi/alsi.conf" = {
     force = true;
     text = ''
-    #!${pkgs.perl}/bin/perl
+      #!${pkgs.perl}/bin/perl
 
-    scalar {
-      ALSI_VERSION         => "0.4.8",
-      COLORS_FILE          => "/home/grfn/.config/alsi/alsi.colors",
-      DE_FILE              => "/home/grfn/.config/alsi/alsi.de",
-      DEFAULT_COLOR_BOLD   => "blue",
-      DEFAULT_COLOR_NORMAL => "blue",
-      DF_COMMAND           => "df -Th -x sys -x tmpfs -x devtmpfs &>/dev/stdout",
-      GTK2_RC_FILE         => "/home/grfn/.gtkrc-2.0",
-      GTK3_RC_FILE         => "/home/grfn/.config/gtk-3.0/settings.ini",
-      LOGO_FILE            => "/home/grfn/.config/alsi/alsi.logo",
-      OUTPUT_FILE          => "/home/grfn/.config/alsi/alsi.output",
-      # PACKAGES_PATH      => "/var/lib/pacman/local/",
-      PS_COMMAND           => "ps -A",
-      USAGE_COLORS         => 0,
-      USAGE_COLORS_BOLD    => 0,
-      USAGE_PRECENT_GREEN  => 50,
-      USAGE_PRECENT_RED    => 100,
-      USAGE_PRECENT_YELLOW => 85,
-      USE_LOGO_FROM_FILE   => 1,
-      USE_VALUES_COLOR     => 0,
-      WM_FILE              => "/home/grfn/.config/alsi/alsi.wm",
-    }
+      scalar {
+        ALSI_VERSION         => "0.4.8",
+        COLORS_FILE          => "/home/grfn/.config/alsi/alsi.colors",
+        DE_FILE              => "/home/grfn/.config/alsi/alsi.de",
+        DEFAULT_COLOR_BOLD   => "blue",
+        DEFAULT_COLOR_NORMAL => "blue",
+        DF_COMMAND           => "df -Th -x sys -x tmpfs -x devtmpfs &>/dev/stdout",
+        GTK2_RC_FILE         => "/home/grfn/.gtkrc-2.0",
+        GTK3_RC_FILE         => "/home/grfn/.config/gtk-3.0/settings.ini",
+        LOGO_FILE            => "/home/grfn/.config/alsi/alsi.logo",
+        OUTPUT_FILE          => "/home/grfn/.config/alsi/alsi.output",
+        # PACKAGES_PATH      => "/var/lib/pacman/local/",
+        PS_COMMAND           => "ps -A",
+        USAGE_COLORS         => 0,
+        USAGE_COLORS_BOLD    => 0,
+        USAGE_PRECENT_GREEN  => 50,
+        USAGE_PRECENT_RED    => 100,
+        USAGE_PRECENT_YELLOW => 85,
+        USE_LOGO_FROM_FILE   => 1,
+        USE_VALUES_COLOR     => 0,
+        WM_FILE              => "/home/grfn/.config/alsi/alsi.wm",
+      }
     '';
   };
 
diff --git a/users/grfn/system/home/modules/common.nix b/users/grfn/system/home/modules/common.nix
index 6871cadda1cf..a31e1718192e 100644
--- a/users/grfn/system/home/modules/common.nix
+++ b/users/grfn/system/home/modules/common.nix
@@ -28,7 +28,8 @@
     htop
     killall
     bind
-    zip unzip
+    zip
+    unzip
     tree
     ncat
     bc
diff --git a/users/grfn/system/home/modules/development.nix b/users/grfn/system/home/modules/development.nix
index 653ce5d83ae1..d60e6ba60759 100644
--- a/users/grfn/system/home/modules/development.nix
+++ b/users/grfn/system/home/modules/development.nix
@@ -2,12 +2,14 @@
 
 let
 
-  clj2nix = pkgs.callPackage (pkgs.fetchFromGitHub {
-    owner = "hlolli";
-    repo = "clj2nix";
-    rev = "3ab3480a25e850b35d1f532a5e4e7b3202232383";
-    sha256 = "1lry026mlpxp1j563qs13nhxf37i2zpl7lh0lgfdwc44afybqka6";
-  }) {};
+  clj2nix = pkgs.callPackage
+    (pkgs.fetchFromGitHub {
+      owner = "hlolli";
+      repo = "clj2nix";
+      rev = "3ab3480a25e850b35d1f532a5e4e7b3202232383";
+      sha256 = "1lry026mlpxp1j563qs13nhxf37i2zpl7lh0lgfdwc44afybqka6";
+    })
+    { };
 
   pg-dump-upsert = pkgs.buildGoModule rec {
     pname = "pg-dump-upsert";
@@ -87,7 +89,7 @@ with lib;
     enable = true;
     package = pkgs.gitFull;
     userEmail = "root@gws.fyi";
-    userName  = "Griffin Smith";
+    userName = "Griffin Smith";
     ignores = [
       "*.sw*"
       ".classpath"
@@ -206,7 +208,7 @@ with lib;
 
     functions = {
       gdelmerged = ''
-      git branch --merged | egrep -v 'master' | tr -d '+ ' | xargs git branch -d
+        git branch --merged | egrep -v 'master' | tr -d '+ ' | xargs git branch -d
       '';
     };
   };
diff --git a/users/grfn/system/home/modules/development/kube.nix b/users/grfn/system/home/modules/development/kube.nix
index 97ae4760d43b..876b0c08df1d 100644
--- a/users/grfn/system/home/modules/development/kube.nix
+++ b/users/grfn/system/home/modules/development/kube.nix
@@ -16,7 +16,7 @@
     "kpa" = "kubectl get pods --all-namespaces";
     "klf" = "kubectl logs -f";
     "kdep" = "kubectl get deployments";
-    "ked" =  "kubectl edit deployment";
+    "ked" = "kubectl edit deployment";
     "kpw" = "kubectl get pods -w";
     "kew" = "kubectl get events -w";
     "kdel" = "kubectl delete";
diff --git a/users/grfn/system/home/modules/emacs.nix b/users/grfn/system/home/modules/emacs.nix
index 6cc38bc7ab05..c65d88aacf6f 100644
--- a/users/grfn/system/home/modules/emacs.nix
+++ b/users/grfn/system/home/modules/emacs.nix
@@ -3,16 +3,17 @@
 with lib;
 
 let
- # doom-emacs = pkgs.callPackage (builtins.fetchTarball {
- #   url = https://github.com/vlaci/nix-doom-emacs/archive/master.tar.gz;
- # }) {
- #   doomPrivateDir = ./doom.d;  # Directory containing your config.el init.el
- #                               # and packages.el files
- # };
+  # doom-emacs = pkgs.callPackage (builtins.fetchTarball {
+  #   url = https://github.com/vlaci/nix-doom-emacs/archive/master.tar.gz;
+  # }) {
+  #   doomPrivateDir = ./doom.d;  # Directory containing your config.el init.el
+  #                               # and packages.el files
+  # };
 
   depot = config.lib.depot;
 
-in {
+in
+{
   imports = [
     ./lib/cloneRepo.nix
   ];
@@ -47,7 +48,7 @@ in {
             upquote
             varwidth
             wrapfig
-          ;
+            ;
         })
 
         ispell
@@ -82,14 +83,14 @@ in {
         doomEmacs = {
           github = "hlissner/doom-emacs";
           path = ".emacs.d";
-          after = ["emacs.d"];
+          after = [ "emacs.d" ];
           onClone = "bin/doom install";
         };
 
         "emacs.d" = {
           github = "glittershark/emacs.d";
           path = ".doom.d";
-          after = ["orgClubhouse"];
+          after = [ "orgClubhouse" ];
         };
       };
 
diff --git a/users/grfn/system/home/modules/email.nix b/users/grfn/system/home/modules/email.nix
index 0a3e58205ba6..63dfeeb6f480 100644
--- a/users/grfn/system/home/modules/email.nix
+++ b/users/grfn/system/home/modules/email.nix
@@ -9,7 +9,8 @@ let
     let
       good = upperChars ++ lowerChars ++ stringToCharacters "0123456789-_";
       subst = c: if any (x: x == c) good then c else "-";
-    in stringAsChars subst name;
+    in
+    stringAsChars subst name;
 
   accounts = {
     personal = {
@@ -26,7 +27,8 @@ let
 
   };
 
-in {
+in
+{
   programs.lieer.enable = true;
   programs.notmuch.enable = true;
   services.lieer.enable = true;
@@ -37,16 +39,18 @@ in {
     msmtp
   ];
 
-  systemd.user.services = mapAttrs' (name: account: {
-    name = escapeUnitName "lieer-${name}";
-    value.Service = {
-      ExecStart = mkForce "${pkgs.writeShellScript "sync-${name}" ''
+  systemd.user.services = mapAttrs'
+    (name: account: {
+      name = escapeUnitName "lieer-${name}";
+      value.Service = {
+        ExecStart = mkForce "${pkgs.writeShellScript "sync-${name}" ''
         ${pkgs.gmailieer}/bin/gmi sync --path ~/mail/${name}
       ''}";
-      Environment = "NOTMUCH_CONFIG=${config.home.sessionVariables.NOTMUCH_CONFIG}";
-    };
+        Environment = "NOTMUCH_CONFIG=${config.home.sessionVariables.NOTMUCH_CONFIG}";
+      };
 
-  }) accounts;
+    })
+    accounts;
 
   # xdg.configFile."notifymuch/notifymuch.cfg".text = generators.toINI {} {
   #   notifymuch = {
@@ -58,30 +62,32 @@ in {
   # };
 
   accounts.email.maildirBasePath = "mail";
-  accounts.email.accounts = mapAttrs (_: params@{ passEntry, ... }: {
-    realName = "Griffin Smith";
-    passwordCommand = "pass ${passEntry}";
+  accounts.email.accounts = mapAttrs
+    (_: params@{ passEntry, ... }: {
+      realName = "Griffin Smith";
+      passwordCommand = "pass ${passEntry}";
 
-    flavor = "gmail.com";
+      flavor = "gmail.com";
 
-    imapnotify = {
-      enable = true;
-      boxes = [ "Inbox" ];
-    };
+      imapnotify = {
+        enable = true;
+        boxes = [ "Inbox" ];
+      };
 
-    gpg = {
-      key = "0F11A989879E8BBBFDC1E23644EF5B5E861C09A7";
-      signByDefault = true;
-    };
+      gpg = {
+        key = "0F11A989879E8BBBFDC1E23644EF5B5E861C09A7";
+        signByDefault = true;
+      };
 
-    notmuch.enable = true;
-    lieer = {
-      enable = true;
-      sync = {
+      notmuch.enable = true;
+      lieer = {
         enable = true;
-        frequency = "*:*";
+        sync = {
+          enable = true;
+          frequency = "*:*";
+        };
       };
-    };
-    msmtp.enable = true;
-  } // builtins.removeAttrs params ["passEntry"]) accounts;
+      msmtp.enable = true;
+    } // builtins.removeAttrs params [ "passEntry" ])
+    accounts;
 }
diff --git a/users/grfn/system/home/modules/games.nix b/users/grfn/system/home/modules/games.nix
index 26dc9d31f37a..8067caf65fb1 100644
--- a/users/grfn/system/home/modules/games.nix
+++ b/users/grfn/system/home/modules/games.nix
@@ -13,14 +13,14 @@ let
     enableFPS = true;
   });
 
-  init = runCommand "init.txt" {} ''
+  init = runCommand "init.txt" { } ''
     substitute "${df-orig}/data/init/init.txt" $out \
       --replace "[INTRO:YES]" "[INTRO:NO]" \
       --replace "[VOLUME:255]" "[VOLUME:0]" \
       --replace "[FPS:NO]" "[FPS:YES]"
   '';
 
-  d_init = runCommand "d_init.txt" {} ''
+  d_init = runCommand "d_init.txt" { } ''
     substitute "${df-orig}/data/init/d_init.txt" $out \
       --replace "[AUTOSAVE:NONE]" "[AUTOSAVE:SEASONAL]" \
       --replace "[AUTOSAVE_PAUSE:NO]" "[AUTOSAVE_PAUSE:YES]" \
@@ -30,7 +30,7 @@ let
       --replace "[SHOW_FLOW_AMOUNTS:NO]" "[SHOW_FLOW_AMOUNTS:YES]"
   '';
 
-  df = runCommand "dwarf-fortress" {} ''
+  df = runCommand "dwarf-fortress" { } ''
     mkdir -p $out/bin
     sed \
       -e '4icp -f ${init} "$DF_DIR/data/init/init.txt"' \
@@ -43,7 +43,8 @@ let
     chmod +x $out/bin/dwarf-fortress
   '';
 
-in mkMerge [
+in
+mkMerge [
   {
     home.packages = [
       crawl
diff --git a/users/grfn/system/home/modules/i3.nix b/users/grfn/system/home/modules/i3.nix
index 111f2a08c225..7bd371e117d1 100644
--- a/users/grfn/system/home/modules/i3.nix
+++ b/users/grfn/system/home/modules/i3.nix
@@ -20,7 +20,8 @@ let
 
   inherit (builtins) map;
   inherit (lib) mkMerge range;
-in {
+in
+{
   options = with lib; {
     system.machine.wirelessInterface = mkOption {
       description = ''
@@ -77,50 +78,51 @@ in {
                     "move container to workspace ${toString n}";
                 })
                 (range 0 9))
-            ++ [(rec {
-              "${mod}+h" = "focus left";
-              "${mod}+j" = "focus down";
-              "${mod}+k" = "focus up";
-              "${mod}+l" = "focus right";
-              "${mod}+semicolon" = "focus parent";
+              ++ [
+                (rec {
+                  "${mod}+h" = "focus left";
+                  "${mod}+j" = "focus down";
+                  "${mod}+k" = "focus up";
+                  "${mod}+l" = "focus right";
+                  "${mod}+semicolon" = "focus parent";
 
-              "${mod}+Shift+h" = "move left";
-              "${mod}+Shift+j" = "move down";
-              "${mod}+Shift+k" = "move up";
-              "${mod}+Shift+l" = "move right";
+                  "${mod}+Shift+h" = "move left";
+                  "${mod}+Shift+j" = "move down";
+                  "${mod}+Shift+k" = "move up";
+                  "${mod}+Shift+l" = "move right";
 
-              "${mod}+Shift+x" = "kill";
+                  "${mod}+Shift+x" = "kill";
 
-              "${mod}+Return" = "exec alacritty";
+                  "${mod}+Return" = "exec alacritty";
 
-              "${mod}+Shift+s" = "split h";
-              "${mod}+Shift+v" = "split v";
-              "${mod}+e" = "layout toggle split";
-              "${mod}+w" = "layout tabbed";
-              "${mod}+s" = "layout stacking";
+                  "${mod}+Shift+s" = "split h";
+                  "${mod}+Shift+v" = "split v";
+                  "${mod}+e" = "layout toggle split";
+                  "${mod}+w" = "layout tabbed";
+                  "${mod}+s" = "layout stacking";
 
-              "${mod}+f" = "fullscreen";
+                  "${mod}+f" = "fullscreen";
 
-              "${mod}+Shift+r" = "restart";
+                  "${mod}+Shift+r" = "restart";
 
-              "${mod}+r" = "mode resize";
+                  "${mod}+r" = "mode resize";
 
-              # Marks
-              "${mod}+Shift+m" = ''exec i3-input -F "mark %s" -l 1 -P 'Mark: ' '';
-              "${mod}+m" = ''exec i3-input -F '[con_mark="%s"] focus' -l 1 -P 'Go to: ' '';
+                  # Marks
+                  "${mod}+Shift+m" = ''exec i3-input -F "mark %s" -l 1 -P 'Mark: ' '';
+                  "${mod}+m" = ''exec i3-input -F '[con_mark="%s"] focus' -l 1 -P 'Go to: ' '';
 
-              # Screenshots
-              "${mod}+q" = "exec \"maim | xclip -selection clipboard -t image/png\"";
-              "${mod}+Shift+q" = "exec \"maim -s | xclip -selection clipboard -t image/png\"";
-              "${mod}+Ctrl+q" = "exec ${pkgs.writeShellScript "peek.sh" ''
+                  # Screenshots
+                  "${mod}+q" = "exec \"maim | xclip -selection clipboard -t image/png\"";
+                  "${mod}+Shift+q" = "exec \"maim -s | xclip -selection clipboard -t image/png\"";
+                  "${mod}+Ctrl+q" = "exec ${pkgs.writeShellScript "peek.sh" ''
               ${pkgs.picom}/bin/picom &
               picom_pid=$!
               ${pkgs.peek}/bin/peek || true
               kill -SIGINT $picom_pid
             ''}";
 
-              # Launching applications
-              "${mod}+u" = "exec ${pkgs.writeShellScript "rofi" ''
+                  # Launching applications
+                  "${mod}+u" = "exec ${pkgs.writeShellScript "rofi" ''
               rofi \
                 -modi 'combi' \
                 -combi-modi "window,drun,ssh,run" \
@@ -128,49 +130,51 @@ in {
                 -show combi
             ''}";
 
-              # Passwords
-              "${mod}+p" = "exec rofi-pass -font '${decorationFont}'";
-
-              # Media
-              "XF86AudioPlay" = "exec playerctl play-pause";
-              "XF86AudioNext" = "exec playerctl next";
-              "XF86AudioPrev" = "exec playerctl previous";
-              "XF86AudioRaiseVolume" = "exec pulseaudio-ctl up";
-              "XF86AudioLowerVolume" = "exec pulseaudio-ctl down";
-              "XF86AudioMute" = "exec pulseaudio-ctl mute";
-
-              # Lock
-              Pause = "exec lock";
-
-              # Brightness
-              "XF86MonBrightnessDown" = "exec ${pkgs.brightnessctl}/bin/brightnessctl -q s 5%-";
-              "XF86MonBrightnessUp" = "exec ${pkgs.brightnessctl}/bin/brightnessctl -q s 5%+";
-
-              # Sleep/hibernate
-              # "${mod}+Escape" = "exec systemctl suspend";
-              # "${mod}+Shift+Escape" = "exec systemctl hibernate";
-
-              # Scratch buffer
-              "${mod}+minus" = "scratchpad show";
-              "${mod}+Shift+minus" = "move scratchpad";
-              "${mod}+space" = "focus mode_toggle";
-              "${mod}+Shift+space" = "floating toggle";
-
-              # Screen Layout
-              "${mod}+Shift+t" = "exec xrandr --auto";
-              "${mod}+t" = "exec ${screenlayout.home}";
-              "${mod}+Ctrl+t" = "exec ${pkgs.writeShellScript "fix_term.sh" ''
+                  # Passwords
+                  "${mod}+p" = "exec rofi-pass -font '${decorationFont}'";
+
+                  # Media
+                  "XF86AudioPlay" = "exec playerctl play-pause";
+                  "XF86AudioNext" = "exec playerctl next";
+                  "XF86AudioPrev" = "exec playerctl previous";
+                  "XF86AudioRaiseVolume" = "exec pulseaudio-ctl up";
+                  "XF86AudioLowerVolume" = "exec pulseaudio-ctl down";
+                  "XF86AudioMute" = "exec pulseaudio-ctl mute";
+
+                  # Lock
+                  Pause = "exec lock";
+
+                  # Brightness
+                  "XF86MonBrightnessDown" = "exec ${pkgs.brightnessctl}/bin/brightnessctl -q s 5%-";
+                  "XF86MonBrightnessUp" = "exec ${pkgs.brightnessctl}/bin/brightnessctl -q s 5%+";
+
+                  # Sleep/hibernate
+                  # "${mod}+Escape" = "exec systemctl suspend";
+                  # "${mod}+Shift+Escape" = "exec systemctl hibernate";
+
+                  # Scratch buffer
+                  "${mod}+minus" = "scratchpad show";
+                  "${mod}+Shift+minus" = "move scratchpad";
+                  "${mod}+space" = "focus mode_toggle";
+                  "${mod}+Shift+space" = "floating toggle";
+
+                  # Screen Layout
+                  "${mod}+Shift+t" = "exec xrandr --auto";
+                  "${mod}+t" = "exec ${screenlayout.home}";
+                  "${mod}+Ctrl+t" = "exec ${pkgs.writeShellScript "fix_term.sh" ''
               xrandr --output eDP-1 --off && ${screenlayout.home}
             ''}";
 
-              # Notifications
-              "${mod}+Shift+n" = "exec killall -SIGUSR1 .dunst-wrapped";
-              "${mod}+n" = "exec killall -SIGUSR2 .dunst-wrapped";
-              "Control+space" = "exec ${pkgs.dunst}/bin/dunstctl close";
-              "Control+Shift+space" = "exec ${pkgs.dunst}/bin/dunstctl close-all";
-              "Control+grave" = "exec ${pkgs.dunst}/bin/dunstctl history-pop";
-              "Control+Shift+period" = "exec ${pkgs.dunst}/bin/dunstctl action";
-            })]);
+                  # Notifications
+                  "${mod}+Shift+n" = "exec killall -SIGUSR1 .dunst-wrapped";
+                  "${mod}+n" = "exec killall -SIGUSR2 .dunst-wrapped";
+                  "Control+space" = "exec ${pkgs.dunst}/bin/dunstctl close";
+                  "Control+Shift+space" = "exec ${pkgs.dunst}/bin/dunstctl close-all";
+                  "Control+grave" = "exec ${pkgs.dunst}/bin/dunstctl history-pop";
+                  "Control+Shift+period" = "exec ${pkgs.dunst}/bin/dunstctl action";
+                })
+              ]
+            );
 
           fonts = [ decorationFont ];
 
@@ -202,94 +206,96 @@ in {
 
           bars = [{
             statusCommand =
-              let i3status-conf = pkgs.writeText "i3status.conf" ''
-              general {
-                  output_format = i3bar
-                  colors = true
-                  color_good = "#859900"
-
-                  interval = 1
-              }
-
-              order += "external_script current_task"
-              order += "external_script inbox"
-              order += "spotify"
-              order += "volume_status"
-              order += "wireless ${config.system.machine.wirelessInterface}"
-              # order += "ethernet enp3s0f0"
-              order += "cpu_usage"
-              order += "battery 0"
-              # order += "volume master"
-              order += "time"
-              order += "tztime utc"
-
-              mpd {
-                  format = "%artist - %album - %title"
-              }
-
-              wireless ${config.system.machine.wirelessInterface} {
-                  format_up = "W: (%quality - %essid - %bitrate) %ip"
-                  format_down = "W: -"
-              }
-
-              ethernet enp3s0f0 {
-                  format_up = "E: %ip"
-                  format_down = "E: -"
-              }
-
-              battery 0 {
-                  format = "%status %percentage"
-                  path = "/sys/class/power_supply/BAT%d/uevent"
-                  low_threshold = 10
-              }
-
-              cpu_usage {
-                  format = "CPU: %usage"
-              }
-
-              load {
-                  format = "%5min"
-              }
-
-              time {
-                  format = "    %a %h %d ⌚   %I:%M     "
-              }
-
-              spotify {
-                  color_playing = "#fdf6e3"
-                  color_paused = "#93a1a1"
-                  format_stopped = ""
-                  format_down = ""
-                  format = "{title} - {artist} ({album})"
-              }
-
-              external_script inbox {
-                  script_path = '${emacsclient "(grfn/num-inbox-items-message)"}'
-                  format = 'Inbox: {output}'
-                  cache_timeout = 120
-                  color = "#93a1a1"
-              }
-
-              external_script current_task {
-                  script_path = '${emacsclient "(grfn/org-current-clocked-in-task-message)"}'
-                  # format = '{output}'
-                  cache_timeout = 60
-                  color = "#93a1a1"
-              }
-
-              tztime utc {
-                  timezone = "UTC"
-                  format = "    %H·%M    "
-              }
-
-              volume_status {
-                  format = "☊ {percentage}"
-                  format_muted = "☊ X"
-                  # device = "default"
-                  # mixer_idx = 0
-              }
-            '';
-              in "py3status -c ${i3status-conf}";
+              let
+                i3status-conf = pkgs.writeText "i3status.conf" ''
+                  general {
+                      output_format = i3bar
+                      colors = true
+                      color_good = "#859900"
+
+                      interval = 1
+                  }
+
+                  order += "external_script current_task"
+                  order += "external_script inbox"
+                  order += "spotify"
+                  order += "volume_status"
+                  order += "wireless ${config.system.machine.wirelessInterface}"
+                  # order += "ethernet enp3s0f0"
+                  order += "cpu_usage"
+                  order += "battery 0"
+                  # order += "volume master"
+                  order += "time"
+                  order += "tztime utc"
+
+                  mpd {
+                      format = "%artist - %album - %title"
+                  }
+
+                  wireless ${config.system.machine.wirelessInterface} {
+                      format_up = "W: (%quality - %essid - %bitrate) %ip"
+                      format_down = "W: -"
+                  }
+
+                  ethernet enp3s0f0 {
+                      format_up = "E: %ip"
+                      format_down = "E: -"
+                  }
+
+                  battery 0 {
+                      format = "%status %percentage"
+                      path = "/sys/class/power_supply/BAT%d/uevent"
+                      low_threshold = 10
+                  }
+
+                  cpu_usage {
+                      format = "CPU: %usage"
+                  }
+
+                  load {
+                      format = "%5min"
+                  }
+
+                  time {
+                      format = "    %a %h %d ⌚   %I:%M     "
+                  }
+
+                  spotify {
+                      color_playing = "#fdf6e3"
+                      color_paused = "#93a1a1"
+                      format_stopped = ""
+                      format_down = ""
+                      format = "{title} - {artist} ({album})"
+                  }
+
+                  external_script inbox {
+                      script_path = '${emacsclient "(grfn/num-inbox-items-message)"}'
+                      format = 'Inbox: {output}'
+                      cache_timeout = 120
+                      color = "#93a1a1"
+                  }
+
+                  external_script current_task {
+                      script_path = '${emacsclient "(grfn/org-current-clocked-in-task-message)"}'
+                      # format = '{output}'
+                      cache_timeout = 60
+                      color = "#93a1a1"
+                  }
+
+                  tztime utc {
+                      timezone = "UTC"
+                      format = "    %H·%M    "
+                  }
+
+                  volume_status {
+                      format = "☊ {percentage}"
+                      format_muted = "☊ X"
+                      # device = "default"
+                      # mixer_idx = 0
+                  }
+                '';
+              in
+              "py3status -c ${i3status-conf}";
             fonts = [ decorationFont ];
             position = "top";
             colors = with solarized; rec {
@@ -362,5 +368,5 @@ in {
         iconTheme.name = "Adwaita";
         theme.name = "Adwaita";
       };
-  };
+    };
 }
diff --git a/users/grfn/system/home/modules/lib/cloneRepo.nix b/users/grfn/system/home/modules/lib/cloneRepo.nix
index dc487dc6bd05..54992bd69bae 100644
--- a/users/grfn/system/home/modules/lib/cloneRepo.nix
+++ b/users/grfn/system/home/modules/lib/cloneRepo.nix
@@ -4,44 +4,46 @@ with lib;
   options = {
     grfn.impure.clonedRepos = mkOption {
       description = "Repositories to clone";
-      default = {};
+      default = { };
       type = with types; loaOf (
-        let sm = submodule {
-          options = {
-            url = mkOption {
-              type = nullOr str;
-              description = "URL of repository to clone";
-              default = null;
-            };
+        let
+          sm = submodule {
+            options = {
+              url = mkOption {
+                type = nullOr str;
+                description = "URL of repository to clone";
+                default = null;
+              };
 
-            github = mkOption {
-              type = nullOr str;
-              description = "Github owner/repo of repository to clone";
-              default = null;
-            };
+              github = mkOption {
+                type = nullOr str;
+                description = "Github owner/repo of repository to clone";
+                default = null;
+              };
 
-            path = mkOption {
-              type = str;
-              description = "Path to clone to";
-            };
+              path = mkOption {
+                type = str;
+                description = "Path to clone to";
+              };
 
-            onClone = mkOption {
-              type = str;
-              description = ''
-                Shell command to run after cloning the repo for the first time.
-                Runs inside the repo itself.
-              '';
-              default = "";
-            };
+              onClone = mkOption {
+                type = str;
+                description = ''
+                  Shell command to run after cloning the repo for the first time.
+                  Runs inside the repo itself.
+                '';
+                default = "";
+              };
 
-            after = mkOption {
-              type = listOf str;
-              description = "Activation hooks that this repository must be cloned after";
-              default = [];
+              after = mkOption {
+                type = listOf str;
+                description = "Activation hooks that this repository must be cloned after";
+                default = [ ];
+              };
             };
           };
-        };
-        in addCheck sm (cr: (! isNull cr.url || ! isNull cr.github))
+        in
+        addCheck sm (cr: (! isNull cr.url || ! isNull cr.github))
       );
     };
   };
@@ -49,19 +51,23 @@ with lib;
   config = {
     home.activation =
       mapAttrs
-      (_: {
-        url, path, github, onClone, after, ...
-      }:
-        let repoURL = if isNull url then "git@github.com:${github}" else url;
-        in hm.dag.entryAfter (["writeBoundary"] ++ after) ''
-          $DRY_RUN_CMD mkdir -p $(dirname "${path}")
-          if [[ ! -d ${path} ]]; then
-            $DRY_RUN_CMD git clone "${repoURL}" "${path}"
-            pushd ${path}
-            $DRY_RUN_CMD ${onClone}
-            popd
-          fi
-        '')
-      config.grfn.impure.clonedRepos;
+        (_: { url
+            , path
+            , github
+            , onClone
+            , after
+            , ...
+            }:
+          let repoURL = if isNull url then "git@github.com:${github}" else url;
+          in hm.dag.entryAfter ([ "writeBoundary" ] ++ after) ''
+            $DRY_RUN_CMD mkdir -p $(dirname "${path}")
+            if [[ ! -d ${path} ]]; then
+              $DRY_RUN_CMD git clone "${repoURL}" "${path}"
+              pushd ${path}
+              $DRY_RUN_CMD ${onClone}
+              popd
+            fi
+          '')
+        config.grfn.impure.clonedRepos;
   };
 }
diff --git a/users/grfn/system/home/modules/lib/zshFunctions.nix b/users/grfn/system/home/modules/lib/zshFunctions.nix
index 7c39b3478cfd..228dc6379fd6 100644
--- a/users/grfn/system/home/modules/lib/zshFunctions.nix
+++ b/users/grfn/system/home/modules/lib/zshFunctions.nix
@@ -6,16 +6,18 @@ with lib;
   options = {
     programs.zsh.functions = mkOption {
       description = "An attribute set that maps function names to their source";
-      default = {};
+      default = { };
       type = with types; attrsOf (either str path);
     };
   };
 
   config.programs.zsh.initExtra = concatStringsSep "\n" (
-    mapAttrsToList (name: funSrc: ''
-      function ${name}() {
-        ${funSrc}
-      }
-    '') config.programs.zsh.functions
+    mapAttrsToList
+      (name: funSrc: ''
+        function ${name}() {
+          ${funSrc}
+        }
+      '')
+      config.programs.zsh.functions
   );
 }
diff --git a/users/grfn/system/home/modules/obs.nix b/users/grfn/system/home/modules/obs.nix
index d1dade477ccc..39d57d712a46 100644
--- a/users/grfn/system/home/modules/obs.nix
+++ b/users/grfn/system/home/modules/obs.nix
@@ -19,7 +19,9 @@ let
 
     nativeBuildInputs = [ pkg-config ];
     buildInputs = [
-      libtool autoconf automake
+      libtool
+      autoconf
+      automake
       x11
       xorg.libXtst
       xorg.libXinerama
diff --git a/users/grfn/system/home/modules/rtlsdr.nix b/users/grfn/system/home/modules/rtlsdr.nix
index a1c717617a62..c8a404a1f49e 100644
--- a/users/grfn/system/home/modules/rtlsdr.nix
+++ b/users/grfn/system/home/modules/rtlsdr.nix
@@ -2,12 +2,14 @@
 
 let
 
-  nixpkgs-gnuradio = import (pkgs.fetchFromGitHub {
-    owner = "doronbehar";
-    repo = "nixpkgs";
-    rev = "712561aa5f10bfe6112a1726a912585612a70d1f";
-    sha256 = "04yqflbwjcfl9vlplphpj82csqqz9k6m3nj1ybhwgmsc4by7vivl";
-  }) {};
+  nixpkgs-gnuradio = import
+    (pkgs.fetchFromGitHub {
+      owner = "doronbehar";
+      repo = "nixpkgs";
+      rev = "712561aa5f10bfe6112a1726a912585612a70d1f";
+      sha256 = "04yqflbwjcfl9vlplphpj82csqqz9k6m3nj1ybhwgmsc4by7vivl";
+    })
+    { };
 
 in
 
diff --git a/users/grfn/system/home/modules/shell.nix b/users/grfn/system/home/modules/shell.nix
index 088fe9238c69..ed82292b0aab 100644
--- a/users/grfn/system/home/modules/shell.nix
+++ b/users/grfn/system/home/modules/shell.nix
@@ -30,7 +30,7 @@ let
 
     # Directories
     stck = "dirs -v";
-    b= "cd ~1";
+    b = "cd ~1";
     ".." = "cd ..";
     "..." = "cd ../..";
     "...." = "cd ../../..";
@@ -63,7 +63,8 @@ let
     "ll" = "ls -al";
     "la" = "ls -a";
   };
-in {
+in
+{
   home.packages = with pkgs; [
     zsh
     autojump
diff --git a/users/grfn/system/home/modules/tarsnap.nix b/users/grfn/system/home/modules/tarsnap.nix
index 4bff19910f05..87002610cbcf 100644
--- a/users/grfn/system/home/modules/tarsnap.nix
+++ b/users/grfn/system/home/modules/tarsnap.nix
@@ -6,59 +6,59 @@
   ];
 
   home.file.".tarsnaprc".text = ''
-  ### Recommended options
+    ### Recommended options
 
-  # Tarsnap cache directory
-  cachedir /home/grfn/.cache/tarsnap
+    # Tarsnap cache directory
+    cachedir /home/grfn/.cache/tarsnap
 
-  # Tarsnap key file
-  keyfile /home/grfn/.private/tarsnap.key
+    # Tarsnap key file
+    keyfile /home/grfn/.private/tarsnap.key
 
-  # Don't archive files which have the nodump flag set.
-  nodump
+    # Don't archive files which have the nodump flag set.
+    nodump
 
-  # Print statistics when creating or deleting archives.
-  print-stats
+    # Print statistics when creating or deleting archives.
+    print-stats
 
-  # Create a checkpoint once per GB of uploaded data.
-  checkpoint-bytes 1G
+    # Create a checkpoint once per GB of uploaded data.
+    checkpoint-bytes 1G
 
-  ### Commonly useful options
+    ### Commonly useful options
 
-  # Use SI prefixes to make numbers printed by --print-stats more readable.
-  humanize-numbers
+    # Use SI prefixes to make numbers printed by --print-stats more readable.
+    humanize-numbers
 
-  ### Other options, not applicable to most systems
+    ### Other options, not applicable to most systems
 
-  # Aggressive network behaviour: Use multiple TCP connections when
-  # writing archives.  Use of this option is recommended only in
-  # cases where TCP congestion control is known to be the limiting
-  # factor in upload performance.
-  #aggressive-networking
+    # Aggressive network behaviour: Use multiple TCP connections when
+    # writing archives.  Use of this option is recommended only in
+    # cases where TCP congestion control is known to be the limiting
+    # factor in upload performance.
+    #aggressive-networking
 
-  # Exclude files and directories matching specified patterns.
-  # Only one file or directory per command; multiple "exclude"
-  # commands may be given.
-  #exclude
+    # Exclude files and directories matching specified patterns.
+    # Only one file or directory per command; multiple "exclude"
+    # commands may be given.
+    #exclude
 
-  # Include only files and directories matching specified patterns.
-  # Only one file or directory per command; multiple "include"
-  # commands may be given.
-  #include
+    # Include only files and directories matching specified patterns.
+    # Only one file or directory per command; multiple "include"
+    # commands may be given.
+    #include
 
-  # Attempt to reduce tarsnap memory consumption.  This option
-  # will slow down the process of creating archives, but may help
-  # on systems where the average size of files being backed up is
-  # less than 1 MB.
-  #lowmem
+    # Attempt to reduce tarsnap memory consumption.  This option
+    # will slow down the process of creating archives, but may help
+    # on systems where the average size of files being backed up is
+    # less than 1 MB.
+    #lowmem
 
-  # Try even harder to reduce tarsnap memory consumption.  This can
-  # significantly slow down tarsnap, but reduces its memory usage
-  # by an additional factor of 2 beyond what the lowmem option does.
-  #verylowmem
+    # Try even harder to reduce tarsnap memory consumption.  This can
+    # significantly slow down tarsnap, but reduces its memory usage
+    # by an additional factor of 2 beyond what the lowmem option does.
+    #verylowmem
 
-  # Snapshot time.  Use this option if you are backing up files
-  # from a filesystem snapshot rather than from a "live" filesystem.
-  #snaptime <file>
+    # Snapshot time.  Use this option if you are backing up files
+    # from a filesystem snapshot rather than from a "live" filesystem.
+    #snaptime <file>
   '';
 }
diff --git a/users/grfn/system/home/platforms/darwin.nix b/users/grfn/system/home/platforms/darwin.nix
index cf0375e94162..f98b80f26915 100644
--- a/users/grfn/system/home/platforms/darwin.nix
+++ b/users/grfn/system/home/platforms/darwin.nix
@@ -10,7 +10,7 @@ with lib;
       pinentry_mac
     ];
 
-    home.activation.linkApplications = lib.hm.dag.entryAfter ["writeBoundary"] ''
+    home.activation.linkApplications = lib.hm.dag.entryAfter [ "writeBoundary" ] ''
       $DRY_RUN_CMD ln -sf $VERBOSE_ARG \
         ~/.nix-profile/Applications/* ~/Applications/
     '';
diff --git a/users/grfn/system/system/iso.nix b/users/grfn/system/system/iso.nix
index 4adccebfb8a2..92a13f655214 100644
--- a/users/grfn/system/system/iso.nix
+++ b/users/grfn/system/system/iso.nix
@@ -12,6 +12,7 @@ let
     networking.firewall.enable = false;
     networking.wireless.enable = lib.mkForce false;
   };
-in (depot.third_party.nixos {
+in
+(depot.third_party.nixos {
   inherit configuration;
 }).config.system.build.isoImage
diff --git a/users/grfn/system/system/machines/mugwump.nix b/users/grfn/system/system/machines/mugwump.nix
index 7de6555878d9..a8bf91caacc0 100644
--- a/users/grfn/system/system/machines/mugwump.nix
+++ b/users/grfn/system/system/machines/mugwump.nix
@@ -23,7 +23,12 @@ with lib;
     initrd = {
       availableKernelModules = [ "xhci_pci" "ehci_pci" "ahci" "usb_storage" "usbhid" "sd_mod" ];
       kernelModules = [
-        "uas" "usbcore" "usb_storage" "vfat" "nls_cp437" "nls_iso8859_1"
+        "uas"
+        "usbcore"
+        "usb_storage"
+        "vfat"
+        "nls_cp437"
+        "nls_iso8859_1"
       ];
 
       postDeviceCommands = pkgs.lib.mkBefore ''
@@ -60,31 +65,33 @@ with lib;
   networking.firewall.allowedTCPPorts = [ 22 80 443 ];
 
   security.sudo.extraRules = [{
-    groups = ["wheel"];
-    commands = [{ command = "ALL"; options = ["NOPASSWD"]; }];
+    groups = [ "wheel" ];
+    commands = [{ command = "ALL"; options = [ "NOPASSWD" ]; }];
   }];
 
   nix.gc.dates = "monthly";
 
-  age.secrets = let
-    secret = name: depot.users.grfn.secrets."${name}.age";
-  in {
-    bbbg.file = secret "bbbg";
-    cloudflare.file = secret "cloudflare";
-    ddclient-password.file = secret "ddclient-password";
-
-    buildkite-ssh-key = {
-      file = secret "buildkite-ssh-key";
-      group = "keys";
-      mode = "0440";
-    };
+  age.secrets =
+    let
+      secret = name: depot.users.grfn.secrets."${name}.age";
+    in
+    {
+      bbbg.file = secret "bbbg";
+      cloudflare.file = secret "cloudflare";
+      ddclient-password.file = secret "ddclient-password";
+
+      buildkite-ssh-key = {
+        file = secret "buildkite-ssh-key";
+        group = "keys";
+        mode = "0440";
+      };
 
-    buildkite-token = {
-      file = secret "buildkite-token";
-      group = "keys";
-      mode = "0440";
+      buildkite-token = {
+        file = secret "buildkite-token";
+        group = "keys";
+        mode = "0440";
+      };
     };
-  };
 
   services.depot.auto-deploy = {
     enable = true;
@@ -207,44 +214,49 @@ with lib;
       job_name = "node";
       scrape_interval = "5s";
       static_configs = [{
-        targets = ["localhost:${toString config.services.prometheus.exporters.node.port}"];
-      }];
-    } {
-      job_name = "nginx";
-      scrape_interval = "5s";
-      static_configs = [{
-        targets = ["localhost:${toString config.services.prometheus.exporters.nginx.port}"];
+        targets = [ "localhost:${toString config.services.prometheus.exporters.node.port}" ];
       }];
-    } {
-      job_name = "xanthous_server";
-      scrape_interval = "1s";
-      static_configs = [{
-        targets = ["localhost:${toString config.services.xanthous-server.metricsPort}"];
+    }
+      {
+        job_name = "nginx";
+        scrape_interval = "5s";
+        static_configs = [{
+          targets = [ "localhost:${toString config.services.prometheus.exporters.nginx.port}" ];
+        }];
+      }
+      {
+        job_name = "xanthous_server";
+        scrape_interval = "1s";
+        static_configs = [{
+          targets = [ "localhost:${toString config.services.xanthous-server.metricsPort}" ];
+        }];
+      }
+      {
+        job_name = "blackbox";
+        metrics_path = "/probe";
+        params.module = [ "https_2xx" ];
+        scrape_interval = "5s";
+        static_configs = [{
+          targets = [
+            "https://gws.fyi"
+            "https://windtunnel.ci"
+            "https://app.windtunnel.ci"
+            "https://metrics.gws.fyi"
+          ];
+        }];
+        relabel_configs = [{
+          source_labels = [ "__address__" ];
+          target_label = "__param_target";
+        }
+          {
+            source_labels = [ "__param_target" ];
+            target_label = "instance";
+          }
+          {
+            target_label = "__address__";
+            replacement = "localhost:${toString config.services.prometheus.exporters.blackbox.port}";
+          }];
       }];
-    } {
-      job_name = "blackbox";
-      metrics_path = "/probe";
-      params.module = ["https_2xx"];
-      scrape_interval = "5s";
-      static_configs = [{
-        targets = [
-          "https://gws.fyi"
-          "https://windtunnel.ci"
-          "https://app.windtunnel.ci"
-          "https://metrics.gws.fyi"
-        ];
-      }];
-      relabel_configs = [{
-        source_labels = ["__address__"];
-        target_label = "__param_target";
-      } {
-        source_labels = ["__param_target"];
-        target_label = "instance";
-      } {
-        target_label = "__address__";
-        replacement = "localhost:${toString config.services.prometheus.exporters.blackbox.port}";
-      }];
-    }];
   };
 
   services.xanthous-server.enable = true;
@@ -256,21 +268,23 @@ with lib;
 
   virtualisation.docker.enable = true;
 
-  services.buildkite-agents = listToAttrs (map (n: rec {
-    name = "mugwump-${toString n}";
-    value = {
-      inherit name;
-      enable = true;
-      tokenPath = "/run/agenix/buildkite-agent-token";
-      privateSshKeyPath = "/run/agenix/buildkite-ssh-key";
-      runtimePackages = with pkgs; [
-        docker
-        nix
-        gnutar
-        gzip
-      ];
-    };
-  }) (range 1 1));
+  services.buildkite-agents = listToAttrs (map
+    (n: rec {
+      name = "mugwump-${toString n}";
+      value = {
+        inherit name;
+        enable = true;
+        tokenPath = "/run/agenix/buildkite-agent-token";
+        privateSshKeyPath = "/run/agenix/buildkite-ssh-key";
+        runtimePackages = with pkgs; [
+          docker
+          nix
+          gnutar
+          gzip
+        ];
+      };
+    })
+    (range 1 1));
 
   users.users."buildkite-agent-mugwump-1" = {
     isSystemUser = true;
diff --git a/users/grfn/system/system/modules/common.nix b/users/grfn/system/system/modules/common.nix
index 91723973f5cd..635747d118ff 100644
--- a/users/grfn/system/system/modules/common.nix
+++ b/users/grfn/system/system/modules/common.nix
@@ -2,7 +2,7 @@
 
 let
 
-  depot = import ../../../../.. {};
+  depot = import ../../../../.. { };
 
 in
 
diff --git a/users/grfn/system/system/modules/fonts.nix b/users/grfn/system/system/modules/fonts.nix
index babe30d4271f..f30600b28b39 100644
--- a/users/grfn/system/system/modules/fonts.nix
+++ b/users/grfn/system/system/modules/fonts.nix
@@ -7,6 +7,6 @@
       twitter-color-emoji
     ];
 
-    fontconfig.defaultFonts.emoji = ["Twitter Color Emoji"];
+    fontconfig.defaultFonts.emoji = [ "Twitter Color Emoji" ];
   };
 }
diff --git a/users/grfn/system/system/modules/reusable/battery.nix b/users/grfn/system/system/modules/reusable/battery.nix
index ca92e0c3f61c..151c2a246f32 100644
--- a/users/grfn/system/system/modules/reusable/battery.nix
+++ b/users/grfn/system/system/modules/reusable/battery.nix
@@ -22,11 +22,11 @@ with lib;
   config =
     let cfg = config.laptop.onLowBattery;
     in mkIf cfg.enable {
-    services.udev.extraRules = concatStrings [
-      ''SUBSYSTEM=="power_supply", ''
-      ''ATTR{status}=="Discharging", ''
-      ''ATTR{capacity}=="[0-${toString cfg.thresholdPercentage}]", ''
-      ''RUN+="${pkgs.systemd}/bin/systemctl ${cfg.action}"''
-    ];
-  };
+      services.udev.extraRules = concatStrings [
+        ''SUBSYSTEM=="power_supply", ''
+        ''ATTR{status}=="Discharging", ''
+        ''ATTR{capacity}=="[0-${toString cfg.thresholdPercentage}]", ''
+        ''RUN+="${pkgs.systemd}/bin/systemctl ${cfg.action}"''
+      ];
+    };
 }
diff --git a/users/grfn/system/system/modules/tvl.nix b/users/grfn/system/system/modules/tvl.nix
index 905ec8ced537..959f8449f659 100644
--- a/users/grfn/system/system/modules/tvl.nix
+++ b/users/grfn/system/system/modules/tvl.nix
@@ -8,7 +8,7 @@
       sshKey = "/root/.ssh/id_rsa";
       system = "x86_64-linux";
       maxJobs = 64;
-      supportedFeatures = ["big-parallel" "kvm" "nixos-test" "benchmark"];
+      supportedFeatures = [ "big-parallel" "kvm" "nixos-test" "benchmark" ];
     }];
 
     extraOptions = ''
@@ -29,7 +29,7 @@
   };
 
   programs.ssh.knownHosts.whitby = {
-    hostNames = [ "whitby" "whitby.tvl.fyi" "49.12.129.211"];
+    hostNames = [ "whitby" "whitby.tvl.fyi" "49.12.129.211" ];
     publicKeyFile = pkgs.writeText "whitby.pub" ''
       ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I
     '';
diff --git a/users/grfn/system/system/modules/work/kolide.nix b/users/grfn/system/system/modules/work/kolide.nix
index 29ee0a0d7ce4..e4ee786f0cbe 100644
--- a/users/grfn/system/system/modules/work/kolide.nix
+++ b/users/grfn/system/system/modules/work/kolide.nix
@@ -3,9 +3,10 @@
 let
   deb = ./kolide.deb;
 
-  kolide = pkgs.runCommand "kolide-data" {
-    buildInputs = [ pkgs.binutils-unwrapped ];
-  } ''
+  kolide = pkgs.runCommand "kolide-data"
+    {
+      buildInputs = [ pkgs.binutils-unwrapped ];
+    } ''
     cp ${deb} ./kolide.deb
     ar x kolide.deb
     mkdir result
@@ -19,7 +20,8 @@ let
     mv result $out
   '';
 
-in {
+in
+{
   systemd.services."launcher.kolide-k2" = {
     wantedBy = [ "multi-user.target" ];
     after = [ "network.target" "syslog.service" ];
diff --git a/users/grfn/terraform/globals.nix b/users/grfn/terraform/globals.nix
index 5f373c664604..c6bc24c22b65 100644
--- a/users/grfn/terraform/globals.nix
+++ b/users/grfn/terraform/globals.nix
@@ -1,20 +1,23 @@
 { pkgs, ... }:
 
 {
-  provider.aws = map (region: {
-    inherit region;
-    alias = region;
-    profile = "personal";
-  }) [
+  provider.aws = map
+    (region: {
+      inherit region;
+      alias = region;
+      profile = "personal";
+    }) [
     "us-east-1"
     "us-east-2"
     "us-west-2"
   ];
 
   data.external.cloudflare_api_key = {
-    program = [(pkgs.writeShellScript "cloudflare_api_key" ''
-      jq -n --arg api_key "$(pass cloudflare-api-key)" '{"api_key":$api_key}'
-    '')];
+    program = [
+      (pkgs.writeShellScript "cloudflare_api_key" ''
+        jq -n --arg api_key "$(pass cloudflare-api-key)" '{"api_key":$api_key}'
+      '')
+    ];
   };
 
   provider.cloudflare = {
diff --git a/users/grfn/terraform/nixosMachine.nix b/users/grfn/terraform/nixosMachine.nix
index ef8830d66c21..dfecbff60a3e 100644
--- a/users/grfn/terraform/nixosMachine.nix
+++ b/users/grfn/terraform/nixosMachine.nix
@@ -9,7 +9,7 @@
 , region ? "us-east-2"
 , rootVolumeSizeGb ? 50
 , securityGroupId ? null
-, extraIngressPorts ? []
+, extraIngressPorts ? [ ]
 }:
 
 let
@@ -40,13 +40,14 @@ let
 
   machineResource = "aws_instance.${prefix}machine";
 
-  recursiveMerge = builtins.foldl' lib.recursiveUpdate {};
+  recursiveMerge = builtins.foldl' lib.recursiveUpdate { };
 
   securityGroupId' =
     if isNull securityGroupId
     then "\${aws_security_group.${prefix}group.id}"
     else securityGroupId;
-in recursiveMerge [
+in
+recursiveMerge [
   (lib.optionalAttrs (isNull securityGroupId) {
     resource.aws_security_group."${prefix}group" = {
       provider = "aws.${region}";
@@ -60,12 +61,12 @@ in recursiveMerge [
     resource.aws_security_group_rule.all_egress = {
       provider = "aws.${region}";
       security_group_id = securityGroupId';
-      type            = "egress";
-      protocol        = "-1";
-      from_port       = 0;
-      to_port         = 0;
-      cidr_blocks     = ["0.0.0.0/0"];
-      ipv6_cidr_blocks = ["::/0"];
+      type = "egress";
+      protocol = "-1";
+      from_port = 0;
+      to_port = 0;
+      cidr_blocks = [ "0.0.0.0/0" ];
+      ipv6_cidr_blocks = [ "::/0" ];
 
       description = null;
       prefix_list_ids = null;
@@ -74,12 +75,14 @@ in recursiveMerge [
   })
   rec {
     data.external.my_ip = {
-      program = [(pkgs.writeShellScript "my_ip" ''
-        ${pkgs.jq}/bin/jq \
-          -n \
-          --arg ip "$(curl ifconfig.me)" \
-          '{"ip":$ip}'
-      '')];
+      program = [
+        (pkgs.writeShellScript "my_ip" ''
+          ${pkgs.jq}/bin/jq \
+            -n \
+            --arg ip "$(curl ifconfig.me)" \
+            '{"ip":$ip}'
+        '')
+      ];
     };
 
     resource.aws_security_group_rule.provision_ssh_access = {
@@ -89,8 +92,8 @@ in recursiveMerge [
       protocol = "TCP";
       from_port = 22;
       to_port = 22;
-      cidr_blocks = ["\${data.external.my_ip.result.ip}/32"];
-      ipv6_cidr_blocks = [];
+      cidr_blocks = [ "\${data.external.my_ip.result.ip}/32" ];
+      ipv6_cidr_blocks = [ ];
       description = null;
       prefix_list_ids = null;
       self = null;
@@ -183,21 +186,23 @@ in recursiveMerge [
   }
 
   {
-    resource.aws_security_group_rule = builtins.listToAttrs (map (port: {
-      name = "ingress_${toString port}";
-      value = {
-        provider = "aws.${region}";
-        security_group_id = securityGroupId';
-        type = "ingress";
-        protocol = "TCP";
-        from_port = port;
-        to_port = port;
-        cidr_blocks = ["0.0.0.0/0"];
-        ipv6_cidr_blocks = [];
-        description = null;
-        prefix_list_ids = null;
-        self = null;
-      };
-    }) extraIngressPorts);
+    resource.aws_security_group_rule = builtins.listToAttrs (map
+      (port: {
+        name = "ingress_${toString port}";
+        value = {
+          provider = "aws.${region}";
+          security_group_id = securityGroupId';
+          type = "ingress";
+          protocol = "TCP";
+          from_port = port;
+          to_port = port;
+          cidr_blocks = [ "0.0.0.0/0" ];
+          ipv6_cidr_blocks = [ ];
+          description = null;
+          prefix_list_ids = null;
+          self = null;
+        };
+      })
+      extraIngressPorts);
   }
 ]
diff --git a/users/grfn/terraform/workspace.nix b/users/grfn/terraform/workspace.nix
index c2a0fdb97793..92bf6e4ec1c5 100644
--- a/users/grfn/terraform/workspace.nix
+++ b/users/grfn/terraform/workspace.nix
@@ -21,13 +21,15 @@ let
   ]));
 
   plugins_tf = {
-    terraform.required_providers = (builtins.listToAttrs (map (p: {
-      name = lib.last (lib.splitString "/" p.provider-source-address);
-      value = {
-        source = p.provider-source-address;
-        version = p.version;
-      };
-    }) (allPlugins pkgs.terraform.plugins)));
+    terraform.required_providers = (builtins.listToAttrs (map
+      (p: {
+        name = lib.last (lib.splitString "/" p.provider-source-address);
+        value = {
+          source = p.provider-source-address;
+          version = p.version;
+        };
+      })
+      (allPlugins pkgs.terraform.plugins)));
   };
 
 
@@ -36,7 +38,7 @@ let
     plugins = plugins_tf;
   };
 
-  module = runCommandNoCC "module" {} ''
+  module = runCommandNoCC "module" { } ''
     mkdir $out
     ${lib.concatStrings (lib.mapAttrsToList (k: config_tf:
       (let
@@ -70,7 +72,7 @@ let
   '';
 
   # TODO: import (-config)
-  tfcmds = runCommandNoCC "${name}-tfcmds" {} ''
+  tfcmds = runCommandNoCC "${name}-tfcmds" { } ''
     mkdir -p $out/bin
     ln -s ${init} $out/bin/init
     ln -s ${tfcmd} $out/bin/validate
@@ -79,7 +81,8 @@ let
     ln -s ${tfcmd} $out/bin/destroy
   '';
 
-in {
+in
+{
   inherit name module;
   terraform = tf;
   cmds = tfcmds;
@@ -92,7 +95,7 @@ in {
   #   destroy = depot.nix.nixRunWrapper "destroy" tfcmds;
   # };
 
-  test = runCommandNoCC "${name}-test" {} ''
+  test = runCommandNoCC "${name}-test" { } ''
     set -e
     export TF_STATE_ROOT=$(pwd)
     ${tfcmds}/bin/init
diff --git a/users/grfn/xanthous/default.nix b/users/grfn/xanthous/default.nix
index c0eca446c9ed..e8a1eb14eb1a 100644
--- a/users/grfn/xanthous/default.nix
+++ b/users/grfn/xanthous/default.nix
@@ -1,6 +1,7 @@
-{ depot ? (import ../../../. {})
+{ depot ? (import ../../../. { })
 , pkgs ? depot.third_party.nixpkgs
-, ... }:
+, ...
+}:
 
 let
   ignore = depot.third_party.gitignoreSource.gitignoreFilter ./.;
@@ -11,7 +12,7 @@ let
       !(type == "directory" && builtins.baseNameOf path == "server")
       && !(type == "directory" && builtins.baseNameOf path == "docs")
       && (ignore path type
-          || builtins.baseNameOf path == "package.yaml");
+      || builtins.baseNameOf path == "package.yaml");
   };
   # generated by cabal2nix
   basePkg = pkgs.haskellPackages.callPackage ./pkg.nix { };
@@ -22,5 +23,5 @@ pkgs.haskell.lib.overrideCabal basePkg (default: {
   version = "canon";
   configureFlags = [
     "--ghc-option=-Wall --ghc-option=-Werror"
-  ] ++ (default.configureFlags or []);
+  ] ++ (default.configureFlags or [ ]);
 })
diff --git a/users/grfn/xanthous/pkg.nix b/users/grfn/xanthous/pkg.nix
index 0f0dbfc9822d..f8364c467abe 100644
--- a/users/grfn/xanthous/pkg.nix
+++ b/users/grfn/xanthous/pkg.nix
@@ -1,17 +1,74 @@
-{ mkDerivation, aeson, array, async, base, bifunctors, brick
-, checkers, classy-prelude, comonad, comonad-extras, constraints
-, containers, criterion, data-default, data-interval, deepseq
-, directory, fgl, fgl-arbitrary, file-embed, filepath
-, generic-arbitrary, generic-lens, groups, hgeometry
-, hgeometry-combinatorial, hpack, JuicyPixels, lens
-, lens-properties, lib, lifted-async, linear, megaparsec, mmorph
-, monad-control, MonadRandom, mtl, optparse-applicative, parallel
-, parser-combinators, pointed, QuickCheck, quickcheck-instances
-, quickcheck-text, random, random-extras, random-fu, random-source
-, Rasterific, raw-strings-qq, reflection, semigroupoids, semigroups
-, splitmix, stache, streams, tasty, tasty-hunit, tasty-quickcheck
-, tasty-rerun, text, text-zipper, tomland, transformers, vector
-, vty, witherable, yaml, zlib
+{ mkDerivation
+, aeson
+, array
+, async
+, base
+, bifunctors
+, brick
+, checkers
+, classy-prelude
+, comonad
+, comonad-extras
+, constraints
+, containers
+, criterion
+, data-default
+, data-interval
+, deepseq
+, directory
+, fgl
+, fgl-arbitrary
+, file-embed
+, filepath
+, generic-arbitrary
+, generic-lens
+, groups
+, hgeometry
+, hgeometry-combinatorial
+, hpack
+, JuicyPixels
+, lens
+, lens-properties
+, lib
+, lifted-async
+, linear
+, megaparsec
+, mmorph
+, monad-control
+, MonadRandom
+, mtl
+, optparse-applicative
+, parallel
+, parser-combinators
+, pointed
+, QuickCheck
+, quickcheck-instances
+, quickcheck-text
+, random
+, random-extras
+, random-fu
+, random-source
+, Rasterific
+, raw-strings-qq
+, reflection
+, semigroupoids
+, semigroups
+, splitmix
+, stache
+, streams
+, tasty
+, tasty-hunit
+, tasty-quickcheck
+, tasty-rerun
+, text
+, text-zipper
+, tomland
+, transformers
+, vector
+, vty
+, witherable
+, yaml
+, zlib
 }:
 mkDerivation {
   pname = "xanthous";
@@ -20,58 +77,270 @@ mkDerivation {
   isLibrary = true;
   isExecutable = true;
   libraryHaskellDepends = [
-    aeson array async base bifunctors brick checkers classy-prelude
-    comonad comonad-extras constraints containers criterion
-    data-default data-interval deepseq directory fgl fgl-arbitrary
-    file-embed filepath generic-arbitrary generic-lens groups hgeometry
-    hgeometry-combinatorial JuicyPixels lens lifted-async linear
-    megaparsec mmorph monad-control MonadRandom mtl
-    optparse-applicative parallel parser-combinators pointed QuickCheck
-    quickcheck-instances quickcheck-text random random-extras random-fu
-    random-source Rasterific raw-strings-qq reflection semigroupoids
-    semigroups splitmix stache streams text text-zipper tomland
-    transformers vector vty witherable yaml zlib
+    aeson
+    array
+    async
+    base
+    bifunctors
+    brick
+    checkers
+    classy-prelude
+    comonad
+    comonad-extras
+    constraints
+    containers
+    criterion
+    data-default
+    data-interval
+    deepseq
+    directory
+    fgl
+    fgl-arbitrary
+    file-embed
+    filepath
+    generic-arbitrary
+    generic-lens
+    groups
+    hgeometry
+    hgeometry-combinatorial
+    JuicyPixels
+    lens
+    lifted-async
+    linear
+    megaparsec
+    mmorph
+    monad-control
+    MonadRandom
+    mtl
+    optparse-applicative
+    parallel
+    parser-combinators
+    pointed
+    QuickCheck
+    quickcheck-instances
+    quickcheck-text
+    random
+    random-extras
+    random-fu
+    random-source
+    Rasterific
+    raw-strings-qq
+    reflection
+    semigroupoids
+    semigroups
+    splitmix
+    stache
+    streams
+    text
+    text-zipper
+    tomland
+    transformers
+    vector
+    vty
+    witherable
+    yaml
+    zlib
   ];
   libraryToolDepends = [ hpack ];
   executableHaskellDepends = [
-    aeson array async base bifunctors brick checkers classy-prelude
-    comonad comonad-extras constraints containers criterion
-    data-default data-interval deepseq directory fgl fgl-arbitrary
-    file-embed filepath generic-arbitrary generic-lens groups hgeometry
-    hgeometry-combinatorial JuicyPixels lens lifted-async linear
-    megaparsec mmorph monad-control MonadRandom mtl
-    optparse-applicative parallel parser-combinators pointed QuickCheck
-    quickcheck-instances quickcheck-text random random-extras random-fu
-    random-source Rasterific raw-strings-qq reflection semigroupoids
-    semigroups splitmix stache streams text text-zipper tomland
-    transformers vector vty witherable yaml zlib
+    aeson
+    array
+    async
+    base
+    bifunctors
+    brick
+    checkers
+    classy-prelude
+    comonad
+    comonad-extras
+    constraints
+    containers
+    criterion
+    data-default
+    data-interval
+    deepseq
+    directory
+    fgl
+    fgl-arbitrary
+    file-embed
+    filepath
+    generic-arbitrary
+    generic-lens
+    groups
+    hgeometry
+    hgeometry-combinatorial
+    JuicyPixels
+    lens
+    lifted-async
+    linear
+    megaparsec
+    mmorph
+    monad-control
+    MonadRandom
+    mtl
+    optparse-applicative
+    parallel
+    parser-combinators
+    pointed
+    QuickCheck
+    quickcheck-instances
+    quickcheck-text
+    random
+    random-extras
+    random-fu
+    random-source
+    Rasterific
+    raw-strings-qq
+    reflection
+    semigroupoids
+    semigroups
+    splitmix
+    stache
+    streams
+    text
+    text-zipper
+    tomland
+    transformers
+    vector
+    vty
+    witherable
+    yaml
+    zlib
   ];
   testHaskellDepends = [
-    aeson array async base bifunctors brick checkers classy-prelude
-    comonad comonad-extras constraints containers criterion
-    data-default data-interval deepseq directory fgl fgl-arbitrary
-    file-embed filepath generic-arbitrary generic-lens groups hgeometry
-    hgeometry-combinatorial JuicyPixels lens lens-properties
-    lifted-async linear megaparsec mmorph monad-control MonadRandom mtl
-    optparse-applicative parallel parser-combinators pointed QuickCheck
-    quickcheck-instances quickcheck-text random random-extras random-fu
-    random-source Rasterific raw-strings-qq reflection semigroupoids
-    semigroups splitmix stache streams tasty tasty-hunit
-    tasty-quickcheck tasty-rerun text text-zipper tomland transformers
-    vector vty witherable yaml zlib
+    aeson
+    array
+    async
+    base
+    bifunctors
+    brick
+    checkers
+    classy-prelude
+    comonad
+    comonad-extras
+    constraints
+    containers
+    criterion
+    data-default
+    data-interval
+    deepseq
+    directory
+    fgl
+    fgl-arbitrary
+    file-embed
+    filepath
+    generic-arbitrary
+    generic-lens
+    groups
+    hgeometry
+    hgeometry-combinatorial
+    JuicyPixels
+    lens
+    lens-properties
+    lifted-async
+    linear
+    megaparsec
+    mmorph
+    monad-control
+    MonadRandom
+    mtl
+    optparse-applicative
+    parallel
+    parser-combinators
+    pointed
+    QuickCheck
+    quickcheck-instances
+    quickcheck-text
+    random
+    random-extras
+    random-fu
+    random-source
+    Rasterific
+    raw-strings-qq
+    reflection
+    semigroupoids
+    semigroups
+    splitmix
+    stache
+    streams
+    tasty
+    tasty-hunit
+    tasty-quickcheck
+    tasty-rerun
+    text
+    text-zipper
+    tomland
+    transformers
+    vector
+    vty
+    witherable
+    yaml
+    zlib
   ];
   benchmarkHaskellDepends = [
-    aeson array async base bifunctors brick checkers classy-prelude
-    comonad comonad-extras constraints containers criterion
-    data-default data-interval deepseq directory fgl fgl-arbitrary
-    file-embed filepath generic-arbitrary generic-lens groups hgeometry
-    hgeometry-combinatorial JuicyPixels lens lifted-async linear
-    megaparsec mmorph monad-control MonadRandom mtl
-    optparse-applicative parallel parser-combinators pointed QuickCheck
-    quickcheck-instances quickcheck-text random random-extras random-fu
-    random-source Rasterific raw-strings-qq reflection semigroupoids
-    semigroups splitmix stache streams text text-zipper tomland
-    transformers vector vty witherable yaml zlib
+    aeson
+    array
+    async
+    base
+    bifunctors
+    brick
+    checkers
+    classy-prelude
+    comonad
+    comonad-extras
+    constraints
+    containers
+    criterion
+    data-default
+    data-interval
+    deepseq
+    directory
+    fgl
+    fgl-arbitrary
+    file-embed
+    filepath
+    generic-arbitrary
+    generic-lens
+    groups
+    hgeometry
+    hgeometry-combinatorial
+    JuicyPixels
+    lens
+    lifted-async
+    linear
+    megaparsec
+    mmorph
+    monad-control
+    MonadRandom
+    mtl
+    optparse-applicative
+    parallel
+    parser-combinators
+    pointed
+    QuickCheck
+    quickcheck-instances
+    quickcheck-text
+    random
+    random-extras
+    random-fu
+    random-source
+    Rasterific
+    raw-strings-qq
+    reflection
+    semigroupoids
+    semigroups
+    splitmix
+    stache
+    streams
+    text
+    text-zipper
+    tomland
+    transformers
+    vector
+    vty
+    witherable
+    yaml
+    zlib
   ];
   prePatch = "hpack";
   homepage = "https://github.com/glittershark/xanthous#readme";
diff --git a/users/grfn/xanthous/server/default.nix b/users/grfn/xanthous/server/default.nix
index 0b3900e4d5fe..95c2b15ec95f 100644
--- a/users/grfn/xanthous/server/default.nix
+++ b/users/grfn/xanthous/server/default.nix
@@ -1,5 +1,4 @@
-args@{
-  depot ? import ../../../.. {}
+args@{ depot ? import ../../../.. { }
 , pkgs ? depot.third_party.nixpkgs
 , ...
 }:
diff --git a/users/grfn/xanthous/server/docker.nix b/users/grfn/xanthous/server/docker.nix
index a62943c2b077..09054cb00fcf 100644
--- a/users/grfn/xanthous/server/docker.nix
+++ b/users/grfn/xanthous/server/docker.nix
@@ -1,4 +1,4 @@
-{ depot ? import ../../../.. {}
+{ depot ? import ../../../.. { }
 , pkgs ? depot.third_party.nixpkgs
 , ...
 }:
@@ -6,14 +6,16 @@
 let
   inherit (depot.users.grfn) xanthous;
   xanthous-server = xanthous.server;
-in pkgs.dockerTools.buildLayeredImage {
+in
+pkgs.dockerTools.buildLayeredImage {
   name = "xanthous-server";
   tag = "latest";
   contents = [ xanthous xanthous-server ];
   config = {
     Cmd = [
       "${xanthous-server}/bin/xanthous-server"
-      "--xanthous-binary-path" "${xanthous}/bin/xanthous"
+      "--xanthous-binary-path"
+      "${xanthous}/bin/xanthous"
     ];
   };
 }
diff --git a/users/grfn/xanthous/server/module.nix b/users/grfn/xanthous/server/module.nix
index 73ac276caf4a..82de6e38e1af 100644
--- a/users/grfn/xanthous/server/module.nix
+++ b/users/grfn/xanthous/server/module.nix
@@ -2,7 +2,8 @@
 
 let
   cfg = config.services.xanthous-server;
-in {
+in
+{
   options = with lib; {
     services.xanthous-server = {
       enable = mkEnableOption "xanthous server";
diff --git a/users/grfn/xanthous/server/shell.nix b/users/grfn/xanthous/server/shell.nix
index a6747175f105..e01c0316a6b2 100644
--- a/users/grfn/xanthous/server/shell.nix
+++ b/users/grfn/xanthous/server/shell.nix
@@ -1,5 +1,5 @@
 let
-  depot = import ../../../.. {};
+  depot = import ../../../.. { };
   pkgs = depot.third_party.nixpkgs;
 in
 
diff --git a/users/grfn/xanthous/shell.nix b/users/grfn/xanthous/shell.nix
index 572ed211bcf4..53fbd7a7c14a 100644
--- a/users/grfn/xanthous/shell.nix
+++ b/users/grfn/xanthous/shell.nix
@@ -1,5 +1,5 @@
 let
-  depot = import ../../../. {};
+  depot = import ../../../. { };
   inherit (depot) third_party;
   pkgs = third_party.nixpkgs;
 in
@@ -7,7 +7,7 @@ in
 (pkgs.haskellPackages.extend (pkgs.haskell.lib.packageSourceOverrides {
   xanthous = third_party.gitignoreSource ./.;
 })).shellFor {
-  packages = p: [p.xanthous];
+  packages = p: [ p.xanthous ];
   withHoogle = true;
   doBenchmark = true;
   buildInputs = (with pkgs.haskellPackages; [
diff --git a/users/riking/adventofcode-2020/day01/default.nix b/users/riking/adventofcode-2020/day01/default.nix
index 0648a05af683..946069e3a68e 100644
--- a/users/riking/adventofcode-2020/day01/default.nix
+++ b/users/riking/adventofcode-2020/day01/default.nix
@@ -5,6 +5,6 @@ with depot.third_party;
 naersk.buildPackage {
   src = ./.;
 
-  buildInputs = [];
+  buildInputs = [ ];
   doCheck = true;
 }
diff --git a/users/riking/keys.nix b/users/riking/keys.nix
index 6dd2ff18a30f..50287098244a 100644
--- a/users/riking/keys.nix
+++ b/users/riking/keys.nix
@@ -7,14 +7,14 @@ rec {
   sk-portable2 = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBEX3DXreQR93SR68QZHTdaVd5RjlRM8C0jcZ4kI4OZwqk7xuk68w3g22q2OM7O+chj+n1N3u0hLxi82QfRnwyasAAAAEc3NoOg== riking@sk-portable2";
   sk-desktop = "sk-ecdsa-sha2-nistp256@openssh.com AAAAInNrLWVjZHNhLXNoYTItbmlzdHAyNTZAb3BlbnNzaC5jb20AAAAIbmlzdHAyNTYAAABBBB+JvN8nAxD+yo49Ohf/UDq7Z049yvkURJIA1XNbvKaAkvfWnCN5m9vTC1FyGxTyCwy4QpD1pFP5fIn0X/kvvfgAAAAEc3NoOg== riking@sk-kane-DAN-A4";
 
-  u2f = [sk-ecljg09 sk-portable1 sk-portable2 sk-desktop];
+  u2f = [ sk-ecljg09 sk-portable1 sk-portable2 sk-desktop ];
 
   ed1 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAjWIfFH2bAWMZG+HudV1MVHWUl83M/ZgEu6S3SLatYN riking@kane-DAN-A4";
   ed2 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICBblB4C9IgAijv+qN6Zs8TM2Sz7phQvVmRrcDn4VYNo riking@ECLJG09";
 
-  passworded = [ed1 ed2];
+  passworded = [ ed1 ed2 ];
 
-  unprotected = [];
+  unprotected = [ ];
 
   all = u2f ++ passworded ++ unprotected;
 }
diff --git a/users/sterni/clhs-lookup/default.nix b/users/sterni/clhs-lookup/default.nix
index b6a0bd06790f..1cde38e8ce3b 100644
--- a/users/sterni/clhs-lookup/default.nix
+++ b/users/sterni/clhs-lookup/default.nix
@@ -36,4 +36,4 @@ let
     ];
   };
 in
-  clhs-lookup
+clhs-lookup
diff --git a/users/sterni/dot-time-man-pages/default.nix b/users/sterni/dot-time-man-pages/default.nix
index bf7d63dbd797..c449cde613f9 100644
--- a/users/sterni/dot-time-man-pages/default.nix
+++ b/users/sterni/dot-time-man-pages/default.nix
@@ -9,9 +9,9 @@ let
       res = builtins.filter ({ username, ... }: username == user) depot.ops.users;
       len = builtins.length res;
     in
-      if len == 1
-      then (builtins.head res).email
-      else builtins.throw "findEmail: got ${toString len} results instead of 1";
+    if len == 1
+    then (builtins.head res).email
+    else builtins.throw "findEmail: got ${toString len} results instead of 1";
 
   # dot-time(7) man page, ported from dotti.me
   dot-time = rec {
@@ -65,6 +65,6 @@ let
   };
 
 in
-  depot.nix.buildManPages "dot-time" {} [
-    dot-time
-  ]
+depot.nix.buildManPages "dot-time" { } [
+  dot-time
+]
diff --git a/users/sterni/exercises/aoc/2021/default.nix b/users/sterni/exercises/aoc/2021/default.nix
index 33d3a92ac7bd..d3ed563ec6f8 100644
--- a/users/sterni/exercises/aoc/2021/default.nix
+++ b/users/sterni/exercises/aoc/2021/default.nix
@@ -1,4 +1,4 @@
-{ depot ? import ../../../../.. {}
+{ depot ? import ../../../../.. { }
 , pkgs ? depot.third_party.nixpkgs
 , ...
 }:
diff --git a/users/sterni/htmlman/default.nix b/users/sterni/htmlman/default.nix
index b88bc264103b..6bf21ce2dbfd 100644
--- a/users/sterni/htmlman/default.nix
+++ b/users/sterni/htmlman/default.nix
@@ -19,9 +19,9 @@ let
     ;
 
   bins = getBins cheddar [ "cheddar" ]
-      // getBins mandoc [ "mandoc" ]
-      // getBins coreutils [ "cat" "mv" "mkdir" ]
-      ;
+    // getBins mandoc [ "mandoc" ]
+    // getBins coreutils [ "cat" "mv" "mkdir" ]
+  ;
 
   normalizeDrv = fetchurl {
     url = "https://necolas.github.io/normalize.css/8.0.1/normalize.css";
@@ -29,7 +29,10 @@ let
   };
 
   execlineStdoutInto = target: line: [
-    "redirfd" "-w" "1" target
+    "redirfd"
+    "-w"
+    "1"
+    target
   ] ++ line;
 
   # I will not write a pure nix markdown renderer
@@ -39,16 +42,24 @@ let
   # I will not write a pure nix markdown renderer
   markdown = md:
     let
-      html = runExecline.local "rendered-markdown" {
-        stdin = md;
-      } ([
-        "importas" "-iu" "out" "out"
-      ] ++ execlineStdoutInto "$out" [
-        bins.cheddar "--about-filter" "description.md"
-      ]);
-    in builtins.readFile html;
-
-  indexTemplate = { title, description, pages ? [] }: ''
+      html = runExecline.local "rendered-markdown"
+        {
+          stdin = md;
+        }
+        ([
+          "importas"
+          "-iu"
+          "out"
+          "out"
+        ] ++ execlineStdoutInto "$out" [
+          bins.cheddar
+          "--about-filter"
+          "description.md"
+        ]);
+    in
+    builtins.readFile html;
+
+  indexTemplate = { title, description, pages ? [ ] }: ''
     <!doctype html>
     <html>
       <head>
@@ -137,40 +148,40 @@ let
 
   htmlman =
     { title
-    # title of the index page
+      # title of the index page
     , description ? ""
-    # description which is displayed after
-    # the main heading on the index page
-    , pages ? []
-    # man pages of the following structure:
-    # {
-    #   name : string;
-    #   section : int;
-    #   path : either path string;
-    # }
-    # path is optional, if it is not given,
-    # the man page source must be located at
-    # "${manDir}/${name}.${toString section}"
+      # description which is displayed after
+      # the main heading on the index page
+    , pages ? [ ]
+      # man pages of the following structure:
+      # {
+      #   name : string;
+      #   section : int;
+      #   path : either path string;
+      # }
+      # path is optional, if it is not given,
+      # the man page source must be located at
+      # "${manDir}/${name}.${toString section}"
     , manDir ? null
-    # directory in which man page sources are located
+      # directory in which man page sources are located
     , style ? defaultStyle
-    # CSS to use as a string
+      # CSS to use as a string
     , normalizeCss ? true
-    # whether to include normalize.css before the custom CSS
+      # whether to include normalize.css before the custom CSS
     , linkXr ? "all"
-    # How to handle cross references in the html output:
-    #
-    # * none:     don't convert cross references into hyperlinks
-    # * all:      link all cross references as if they were
-    #             rendered into $out by htmlman
-    # * inManDir: link to all man pages which have their source
-    #             in `manDir` and use the format string defined
-    #             in linkXrFallback for all other cross references.
+      # How to handle cross references in the html output:
+      #
+      # * none:     don't convert cross references into hyperlinks
+      # * all:      link all cross references as if they were
+      #             rendered into $out by htmlman
+      # * inManDir: link to all man pages which have their source
+      #             in `manDir` and use the format string defined
+      #             in linkXrFallback for all other cross references.
     , linkXrFallback ? "https://manpages.debian.org/unstable/%N.%S.en.html"
-    # fallback link to use if linkXr == "inManDir" and the man
-    # page is not in ${manDir}. Placeholders %N (name of page)
-    # and %S (section of page) can be used. See mandoc(1) for
-    # more information.
+      # fallback link to use if linkXr == "inManDir" and the man
+      # page is not in ${manDir}. Placeholders %N (name of page)
+      # and %S (section of page) can be used. See mandoc(1) for
+      # more information.
     }:
 
     let
@@ -188,47 +199,70 @@ let
       mandocOpts = lib.concatStringsSep "," ([
         "style=style.css"
       ] ++ linkXrEnum.match linkXr {
-        all      = [ "man=./%N.%S.html" ];
+        all = [ "man=./%N.%S.html" ];
         inManDir = [ "man=./%N.%S.html;${linkXrFallback}" ];
-        none     = [ ];
+        none = [ ];
       });
 
       html =
-        runExecline.local "htmlman-${title}" {
-          derivationArgs = {
-            inherit index style;
-            passAsFile = [ "index" "style" ];
-          };
-        } ([
-          "multisubstitute" [
-            "importas" "-iu" "out" "out"
-            "importas" "-iu" "index" "indexPath"
-            "importas" "-iu" "style" "stylePath"
-          ]
-          "if" [ bins.mkdir "-p" "$out" ]
-          "if" [ bins.mv "$index" "\${out}/index.html" ]
-          "if" (execlineStdoutInto "\${out}/style.css" [
-            "if" ([
-              bins.cat
-            ] ++ lib.optional normalizeCss normalizeDrv
+        runExecline.local "htmlman-${title}"
+          {
+            derivationArgs = {
+              inherit index style;
+              passAsFile = [ "index" "style" ];
+            };
+          }
+          ([
+            "multisubstitute"
+            [
+              "importas"
+              "-iu"
+              "out"
+              "out"
+              "importas"
+              "-iu"
+              "index"
+              "indexPath"
+              "importas"
+              "-iu"
+              "style"
+              "stylePath"
+            ]
+            "if"
+            [ bins.mkdir "-p" "$out" ]
+            "if"
+            [ bins.mv "$index" "\${out}/index.html" ]
+            "if"
+            (execlineStdoutInto "\${out}/style.css" [
+              "if"
+              ([
+                bins.cat
+              ] ++ lib.optional normalizeCss normalizeDrv
               ++ [
-              "$style"
+                "$style"
+              ])
             ])
-          ])
-          # let mandoc check for available man pages
-          "execline-cd" "${manDir}"
-        ] ++ lib.concatMap ({ name, section, ... }@p:
-          execlineStdoutInto "\${out}/${name}.${toString section}.html" [
-          "if" [
-            bins.mandoc
-            "-mdoc"
-            "-T" "html"
-            "-O" mandocOpts
-            (resolvePath p)
-          ]
-        ]) pages);
-    in html // {
+            # let mandoc check for available man pages
+            "execline-cd"
+            "${manDir}"
+          ] ++ lib.concatMap
+            ({ name, section, ... }@p:
+              execlineStdoutInto "\${out}/${name}.${toString section}.html" [
+                "if"
+                [
+                  bins.mandoc
+                  "-mdoc"
+                  "-T"
+                  "html"
+                  "-O"
+                  mandocOpts
+                  (resolvePath p)
+                ]
+              ])
+            pages);
+    in
+    html // {
       deploy = deployScript title html;
     };
 in
-  htmlman
+htmlman
diff --git a/users/sterni/nix/char/default.nix b/users/sterni/nix/char/default.nix
index aacfc9dcbe4d..9c6ce2fb250b 100644
--- a/users/sterni/nix/char/default.nix
+++ b/users/sterni/nix/char/default.nix
@@ -53,17 +53,20 @@ let
   asciiAlpha = c:
     let
       v = ord c;
-    in (v >= 65 && v <= 90)
+    in
+    (v >= 65 && v <= 90)
     || (v >= 97 && v <= 122);
 
   asciiNum = c:
     let
       v = ord c;
-    in v >= 48 && v <= 57;
+    in
+    v >= 48 && v <= 57;
 
   asciiAlphaNum = c: asciiAlpha c || asciiNum c;
 
-in {
+in
+{
   inherit
     allChars
     char
@@ -78,18 +81,19 @@ in {
   # originally I generated a nix file containing a list of
   # characters, but infinisil uses a better way which I adapt
   # which is using builtins.readFile instead of import.
-  __generateAllChars = pkgs.runCommandCC "generate-all-chars" {
-    source = ''
-      #include <stdio.h>
-
-      int main(void) {
-        for(int i = 1; i <= 0xff; i++) {
-          putchar(i);
+  __generateAllChars = pkgs.runCommandCC "generate-all-chars"
+    {
+      source = ''
+        #include <stdio.h>
+
+        int main(void) {
+          for(int i = 1; i <= 0xff; i++) {
+            putchar(i);
+          }
         }
-      }
-    '';
-    passAsFile = [ "source" ];
-  } ''
+      '';
+      passAsFile = [ "source" ];
+    } ''
     $CC -o "$out" -x c "$sourcePath"
   '';
 }
diff --git a/users/sterni/nix/char/tests/default.nix b/users/sterni/nix/char/tests/default.nix
index 49b439adbb84..313df474514c 100644
--- a/users/sterni/nix/char/tests/default.nix
+++ b/users/sterni/nix/char/tests/default.nix
@@ -26,6 +26,6 @@ let
   ];
 
 in
-  runTestsuite "char" [
-    testAllCharConversion
-  ]
+runTestsuite "char" [
+  testAllCharConversion
+]
diff --git a/users/sterni/nix/flow/default.nix b/users/sterni/nix/flow/default.nix
index b5783bd86deb..4bef0abb91e9 100644
--- a/users/sterni/nix/flow/default.nix
+++ b/users/sterni/nix/flow/default.nix
@@ -68,13 +68,14 @@ let
           then s x
           else x == s;
       in
-        if b
-        then builtins.elemAt c 1
-        else switch x (builtins.tail conds);
+      if b
+      then builtins.elemAt c 1
+      else switch x (builtins.tail conds);
 
 
 
-in {
+in
+{
   inherit
     cond
     switch
diff --git a/users/sterni/nix/flow/tests/default.nix b/users/sterni/nix/flow/tests/default.nix
index 54cea01858e7..9f974a61c7b2 100644
--- a/users/sterni/nix/flow/tests/default.nix
+++ b/users/sterni/nix/flow/tests/default.nix
@@ -21,7 +21,7 @@ let
       (cond [ [ true 1 2 ] [ false 1 ] ]))
     (assertEq "last is true" "last"
       (cond [
-        [ false dontEval]
+        [ false dontEval ]
         [ false dontEval ]
         [ true "last" ]
       ]))
@@ -34,6 +34,6 @@ let
   ];
 
 in
-  runTestsuite "nix.flow" [
-    testCond
-  ]
+runTestsuite "nix.flow" [
+  testCond
+]
diff --git a/users/sterni/nix/fun/tests/default.nix b/users/sterni/nix/fun/tests/default.nix
index 6492554306e1..f02f19943373 100644
--- a/users/sterni/nix/fun/tests/default.nix
+++ b/users/sterni/nix/fun/tests/default.nix
@@ -24,6 +24,6 @@ let
       (fun.hasEllipsis ({ depot, pkgs, ... }: 42)))
   ];
 in
-  runTestsuite "nix.fun" [
-    hasEllipsisTests
-  ]
+runTestsuite "nix.fun" [
+  hasEllipsisTests
+]
diff --git a/users/sterni/nix/html/default.nix b/users/sterni/nix/html/default.nix
index 2498d832aadf..d25a7ab8dac0 100644
--- a/users/sterni/nix/html/default.nix
+++ b/users/sterni/nix/html/default.nix
@@ -20,7 +20,7 @@ let
      => "&lt;hello&gt;"
   */
   escapeMinimal = builtins.replaceStrings
-    [ "<"    ">"    "&"     "\""     "'"      ]
+    [ "<" ">" "&" "\"" "'" ]
     [ "&lt;" "&gt;" "&amp;" "&quot;" "&#039;" ];
 
   /* Return a string with a correctly rendered tag of the given name,
@@ -87,18 +87,20 @@ let
   renderTag = tag: attrs: content:
     let
       attrs' = builtins.concatStringsSep "" (
-        builtins.map (n:
-          " ${escapeMinimal n}=\"${escapeMinimal (toString attrs.${n})}\""
-        ) (builtins.attrNames attrs)
+        builtins.map
+          (n:
+            " ${escapeMinimal n}=\"${escapeMinimal (toString attrs.${n})}\""
+          )
+          (builtins.attrNames attrs)
       );
       content' =
         if builtins.isList content
         then builtins.concatStringsSep "" content
         else content;
     in
-      if content == null
-      then "<${tag}${attrs'}/>"
-      else "<${tag}${attrs'}>${content'}</${tag}>";
+    if content == null
+    then "<${tag}${attrs'}/>"
+    else "<${tag}${attrs'}>${content'}</${tag}>";
 
   /* Prepend "<!DOCTYPE html>" to a string.
 
@@ -111,7 +113,8 @@ let
   */
   withDoctype = doc: "<!DOCTYPE html>" + doc;
 
-in {
+in
+{
   inherit escapeMinimal renderTag withDoctype;
 
   __findFile = _: renderTag;
diff --git a/users/sterni/nix/html/tests/default.nix b/users/sterni/nix/html/tests/default.nix
index 8688b6937130..0d80f2f1cd70 100644
--- a/users/sterni/nix/html/tests/default.nix
+++ b/users/sterni/nix/html/tests/default.nix
@@ -8,15 +8,17 @@ let
     ;
 
   exampleDocument = withDoctype (<html> { lang = "en"; } [
-    (<head> {} [
+    (<head> { } [
       (<meta> { charset = "utf-8"; } null)
-      (<title> {} "html.nix example document")
-      (<link> {
-        rel = "license";
-        href = "https://code.tvl.fyi/about/LICENSE";
-        type = "text/html";
-      } null)
-      (<style> {}  (esc ''
+      (<title> { } "html.nix example document")
+      (<link>
+        {
+          rel = "license";
+          href = "https://code.tvl.fyi/about/LICENSE";
+          type = "text/html";
+        }
+        null)
+      (<style> { } (esc ''
         hgroup h2 {
           font-weight: normal;
         }
@@ -26,39 +28,45 @@ let
         }
       ''))
     ])
-    (<body> {} [
-      (<main> {} [
-        (<hgroup> {} [
-          (<h1> {} (esc "html.nix"))
-          (<h2> {} [
-            (<em> {} "the")
+    (<body> { } [
+      (<main> { } [
+        (<hgroup> { } [
+          (<h1> { } (esc "html.nix"))
+          (<h2> { } [
+            (<em> { } "the")
             (esc " most cursed HTML DSL ever!")
           ])
         ])
-        (<dl> {} [
-          (<dt> {} [
+        (<dl> { } [
+          (<dt> { } [
             (esc "Q: Wait, it's all ")
-            (<a> {
-              href = "https://cl.tvl.fyi/q/hashtag:cursed";
-            } (esc "cursed"))
+            (<a>
+              {
+                href = "https://cl.tvl.fyi/q/hashtag:cursed";
+              }
+              (esc "cursed"))
             (esc " nix hacks?")
           ])
-          (<dd> {} (esc "A: Always has been. 🔫"))
-          (<dt> {} (esc "Q: Why does this work?"))
-          (<dd> {} [
+          (<dd> { } (esc "A: Always has been. 🔫"))
+          (<dt> { } (esc "Q: Why does this work?"))
+          (<dd> { } [
             (esc "Because nix ")
-            (<a> {
-              href = "https://github.com/NixOS/nix/blob/293220bed5a75efc963e33c183787e87e55e28d9/src/libexpr/parser.y#L410-L416";
-            } (esc "translates "))
-            (<a> {
-              href = "https://github.com/NixOS/nix/blob/293220bed5a75efc963e33c183787e87e55e28d9/src/libexpr/lexer.l#L100";
-            } (esc "SPATH tokens"))
+            (<a>
+              {
+                href = "https://github.com/NixOS/nix/blob/293220bed5a75efc963e33c183787e87e55e28d9/src/libexpr/parser.y#L410-L416";
+              }
+              (esc "translates "))
+            (<a>
+              {
+                href = "https://github.com/NixOS/nix/blob/293220bed5a75efc963e33c183787e87e55e28d9/src/libexpr/lexer.l#L100";
+              }
+              (esc "SPATH tokens"))
             (esc " like ")
-            (<code> {} (esc "<nixpkgs>"))
+            (<code> { } (esc "<nixpkgs>"))
             (esc " into calls to ")
-            (<code> {} (esc "__findFile"))
+            (<code> { } (esc "__findFile"))
             (esc " in the ")
-            (<em> {} (esc "current"))
+            (<em> { } (esc "current"))
             (esc " scope.")
           ])
         ])
@@ -67,7 +75,8 @@ let
   ]);
 in
 
-pkgs.runCommandNoCC "html.nix.html" {
+pkgs.runCommandNoCC "html.nix.html"
+{
   passAsFile = [ "exampleDocument" ];
   inherit exampleDocument;
   nativeBuildInputs = [ pkgs.html5validator ];
diff --git a/users/sterni/nix/int/default.nix b/users/sterni/nix/int/default.nix
index b3157571272f..54b55964722d 100644
--- a/users/sterni/nix/int/default.nix
+++ b/users/sterni/nix/int/default.nix
@@ -47,12 +47,12 @@ let
         if i == 0
         then ""
         else go (bitShiftR i 4)
-           + string.charAt (bitAnd i 15) hexdigits;
+          + string.charAt (bitAnd i 15) hexdigits;
       sign = lib.optionalString (int < 0) "-";
     in
-      if int == 0
-      then "0"
-      else "${sign}${go (abs int)}";
+    if int == 0
+    then "0"
+    else "${sign}${go (abs int)}";
 
   fromHexMap = builtins.listToAttrs
     (lib.imap0 (i: c: { name = c; value = i; })
@@ -72,11 +72,12 @@ let
           val = v.val + (fromHexMap."${d}" * v.mul);
           mul = v.mul * 16;
         })
-        { val = 0; mul = 1; } digits;
+        { val = 0; mul = 1; }
+        digits;
     in
-      if negative
-      then -parsed.val
-      else parsed.val;
+    if negative
+    then -parsed.val
+    else parsed.val;
 
   # A nix integer is a 64bit signed integer
   maxBound = 9223372036854775807;
@@ -99,7 +100,8 @@ let
 
   inRange = a: b: x: x >= a && x <= b;
 
-in {
+in
+{
   inherit
     maxBound
     minBound
diff --git a/users/sterni/nix/int/tests/default.nix b/users/sterni/nix/int/tests/default.nix
index fac45dd251e1..8d2263b42117 100644
--- a/users/sterni/nix/int/tests/default.nix
+++ b/users/sterni/nix/int/tests/default.nix
@@ -31,22 +31,262 @@ let
   ];
 
   expectedBytes = [
-    "00" "01" "02" "03" "04" "05" "06" "07" "08" "09" "0A" "0B" "0C" "0D" "0E" "0F"
-    "10" "11" "12" "13" "14" "15" "16" "17" "18" "19" "1A" "1B" "1C" "1D" "1E" "1F"
-    "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "2A" "2B" "2C" "2D" "2E" "2F"
-    "30" "31" "32" "33" "34" "35" "36" "37" "38" "39" "3A" "3B" "3C" "3D" "3E" "3F"
-    "40" "41" "42" "43" "44" "45" "46" "47" "48" "49" "4A" "4B" "4C" "4D" "4E" "4F"
-    "50" "51" "52" "53" "54" "55" "56" "57" "58" "59" "5A" "5B" "5C" "5D" "5E" "5F"
-    "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "6A" "6B" "6C" "6D" "6E" "6F"
-    "70" "71" "72" "73" "74" "75" "76" "77" "78" "79" "7A" "7B" "7C" "7D" "7E" "7F"
-    "80" "81" "82" "83" "84" "85" "86" "87" "88" "89" "8A" "8B" "8C" "8D" "8E" "8F"
-    "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "9A" "9B" "9C" "9D" "9E" "9F"
-    "A0" "A1" "A2" "A3" "A4" "A5" "A6" "A7" "A8" "A9" "AA" "AB" "AC" "AD" "AE" "AF"
-    "B0" "B1" "B2" "B3" "B4" "B5" "B6" "B7" "B8" "B9" "BA" "BB" "BC" "BD" "BE" "BF"
-    "C0" "C1" "C2" "C3" "C4" "C5" "C6" "C7" "C8" "C9" "CA" "CB" "CC" "CD" "CE" "CF"
-    "D0" "D1" "D2" "D3" "D4" "D5" "D6" "D7" "D8" "D9" "DA" "DB" "DC" "DD" "DE" "DF"
-    "E0" "E1" "E2" "E3" "E4" "E5" "E6" "E7" "E8" "E9" "EA" "EB" "EC" "ED" "EE" "EF"
-    "F0" "F1" "F2" "F3" "F4" "F5" "F6" "F7" "F8" "F9" "FA" "FB" "FC" "FD" "FE" "FF"
+    "00"
+    "01"
+    "02"
+    "03"
+    "04"
+    "05"
+    "06"
+    "07"
+    "08"
+    "09"
+    "0A"
+    "0B"
+    "0C"
+    "0D"
+    "0E"
+    "0F"
+    "10"
+    "11"
+    "12"
+    "13"
+    "14"
+    "15"
+    "16"
+    "17"
+    "18"
+    "19"
+    "1A"
+    "1B"
+    "1C"
+    "1D"
+    "1E"
+    "1F"
+    "20"
+    "21"
+    "22"
+    "23"
+    "24"
+    "25"
+    "26"
+    "27"
+    "28"
+    "29"
+    "2A"
+    "2B"
+    "2C"
+    "2D"
+    "2E"
+    "2F"
+    "30"
+    "31"
+    "32"
+    "33"
+    "34"
+    "35"
+    "36"
+    "37"
+    "38"
+    "39"
+    "3A"
+    "3B"
+    "3C"
+    "3D"
+    "3E"
+    "3F"
+    "40"
+    "41"
+    "42"
+    "43"
+    "44"
+    "45"
+    "46"
+    "47"
+    "48"
+    "49"
+    "4A"
+    "4B"
+    "4C"
+    "4D"
+    "4E"
+    "4F"
+    "50"
+    "51"
+    "52"
+    "53"
+    "54"
+    "55"
+    "56"
+    "57"
+    "58"
+    "59"
+    "5A"
+    "5B"
+    "5C"
+    "5D"
+    "5E"
+    "5F"
+    "60"
+    "61"
+    "62"
+    "63"
+    "64"
+    "65"
+    "66"
+    "67"
+    "68"
+    "69"
+    "6A"
+    "6B"
+    "6C"
+    "6D"
+    "6E"
+    "6F"
+    "70"
+    "71"
+    "72"
+    "73"
+    "74"
+    "75"
+    "76"
+    "77"
+    "78"
+    "79"
+    "7A"
+    "7B"
+    "7C"
+    "7D"
+    "7E"
+    "7F"
+    "80"
+    "81"
+    "82"
+    "83"
+    "84"
+    "85"
+    "86"
+    "87"
+    "88"
+    "89"
+    "8A"
+    "8B"
+    "8C"
+    "8D"
+    "8E"
+    "8F"
+    "90"
+    "91"
+    "92"
+    "93"
+    "94"
+    "95"
+    "96"
+    "97"
+    "98"
+    "99"
+    "9A"
+    "9B"
+    "9C"
+    "9D"
+    "9E"
+    "9F"
+    "A0"
+    "A1"
+    "A2"
+    "A3"
+    "A4"
+    "A5"
+    "A6"
+    "A7"
+    "A8"
+    "A9"
+    "AA"
+    "AB"
+    "AC"
+    "AD"
+    "AE"
+    "AF"
+    "B0"
+    "B1"
+    "B2"
+    "B3"
+    "B4"
+    "B5"
+    "B6"
+    "B7"
+    "B8"
+    "B9"
+    "BA"
+    "BB"
+    "BC"
+    "BD"
+    "BE"
+    "BF"
+    "C0"
+    "C1"
+    "C2"
+    "C3"
+    "C4"
+    "C5"
+    "C6"
+    "C7"
+    "C8"
+    "C9"
+    "CA"
+    "CB"
+    "CC"
+    "CD"
+    "CE"
+    "CF"
+    "D0"
+    "D1"
+    "D2"
+    "D3"
+    "D4"
+    "D5"
+    "D6"
+    "D7"
+    "D8"
+    "D9"
+    "DA"
+    "DB"
+    "DC"
+    "DD"
+    "DE"
+    "DF"
+    "E0"
+    "E1"
+    "E2"
+    "E3"
+    "E4"
+    "E5"
+    "E6"
+    "E7"
+    "E8"
+    "E9"
+    "EA"
+    "EB"
+    "EC"
+    "ED"
+    "EE"
+    "EF"
+    "F0"
+    "F1"
+    "F2"
+    "F3"
+    "F4"
+    "F5"
+    "F6"
+    "F7"
+    "F8"
+    "F9"
+    "FA"
+    "FB"
+    "FC"
+    "FD"
+    "FE"
+    "FF"
   ];
 
   hexByte = i: string.fit { width = 2; char = "0"; } (int.toHex i);
@@ -64,14 +304,18 @@ let
   ];
 
   testHex = it "checks conversion to hex" (lib.flatten [
-    (lib.imap0 (i: hex: [
-      (assertEq "hexByte ${toString i} == ${hex}" (hexByte i) hex)
-      (assertEq "${toString i} == fromHex ${hex}" i (int.fromHex hex))
-    ]) expectedBytes)
-    (builtins.map ({ left, right }: [
-      (assertEq "toHex ${toString left} == ${right}" (int.toHex left) right)
-      (assertEq "${toString left} == fromHex ${right}" left (int.fromHex right))
-    ]) hexInts)
+    (lib.imap0
+      (i: hex: [
+        (assertEq "hexByte ${toString i} == ${hex}" (hexByte i) hex)
+        (assertEq "${toString i} == fromHex ${hex}" i (int.fromHex hex))
+      ])
+      expectedBytes)
+    (builtins.map
+      ({ left, right }: [
+        (assertEq "toHex ${toString left} == ${right}" (int.toHex left) right)
+        (assertEq "${toString left} == fromHex ${right}" left (int.fromHex right))
+      ])
+      hexInts)
   ]);
 
   testBasic = it "checks basic int operations" [
@@ -94,20 +338,23 @@ let
   ];
 
   testExp = it "checks exponentiation"
-    (builtins.map ({ left, right }:
-      assertEq
-        "2 ^ ${toString left} == ${toString right}"
-        (int.exp 2 left) right) expNumbers);
+    (builtins.map
+      ({ left, right }:
+        assertEq
+          "2 ^ ${toString left} == ${toString right}"
+          (int.exp 2 left)
+          right)
+      expNumbers);
 
   shifts = [
-    { a =   2; b = 5; c =   64; op = "<<"; }
-    { a =  -2; b = 5; c =  -64; op = "<<"; }
+    { a = 2; b = 5; c = 64; op = "<<"; }
+    { a = -2; b = 5; c = -64; op = "<<"; }
     { a = 123; b = 4; c = 1968; op = "<<"; }
-    { a =   1; b = 8; c =  256; op = "<<"; }
-    { a = 256; b = 8; c =    1; op = ">>"; }
-    { a = 374; b = 2; c =   93; op = ">>"; }
-    { a =   2; b = 2; c =    0; op = ">>"; }
-    { a =  99; b = 9; c =    0; op = ">>"; }
+    { a = 1; b = 8; c = 256; op = "<<"; }
+    { a = 256; b = 8; c = 1; op = ">>"; }
+    { a = 374; b = 2; c = 93; op = ">>"; }
+    { a = 2; b = 2; c = 0; op = ">>"; }
+    { a = 99; b = 9; c = 0; op = ">>"; }
   ];
 
   checkShift = { a, b, c, op }@args:
@@ -116,15 +363,18 @@ let
         "<<" = int.bitShiftL;
         ">>" = int.bitShiftR;
       };
-    in assertEq "${toString a} ${op} ${toString b} == ${toString c}" (f a b) c;
+    in
+    assertEq "${toString a} ${op} ${toString b} == ${toString c}" (f a b) c;
 
   checkShiftRDivExp = n:
     assertEq "${toString n} >> 5 == ${toString n} / 2 ^ 5"
-      (int.bitShiftR n 5) (int.div n (int.exp 2 5));
+      (int.bitShiftR n 5)
+      (int.div n (int.exp 2 5));
 
   checkShiftLMulExp = n:
     assertEq "${toString n} >> 6 == ${toString n} * 2 ^ 6"
-      (int.bitShiftL n 5) (int.mul n (int.exp 2 5));
+      (int.bitShiftL n 5)
+      (int.mul n (int.exp 2 5));
 
   testBit = it "checks bitwise operations" (lib.flatten [
     (builtins.map checkShift shifts)
@@ -160,11 +410,11 @@ let
   ]);
 
   divisions = [
-    { a =  2; b =  1; c = 2; mod = 0;}
-    { a =  2; b =  2; c = 1; mod = 0;}
-    { a = 20; b = 10; c = 2; mod = 0;}
-    { a = 12; b =  5; c = 2; mod = 2;}
-    { a = 23; b =  4; c = 5; mod = 3;}
+    { a = 2; b = 1; c = 2; mod = 0; }
+    { a = 2; b = 2; c = 1; mod = 0; }
+    { a = 20; b = 10; c = 2; mod = 0; }
+    { a = 12; b = 5; c = 2; mod = 2; }
+    { a = 23; b = 4; c = 5; mod = 3; }
   ];
 
   checkDiv = n: { a, b, c, mod }: [
@@ -176,28 +426,34 @@ let
   testDivMod = it "checks integer division and modulo"
     (lib.flatten [
       (builtins.map (checkDiv "+a / +b") divisions)
-      (builtins.map (fun.rl (checkDiv "-a / +b") (x: x // {
-        a = -x.a;
-        c = -x.c;
-        mod = -x.mod;
-      })) divisions)
-      (builtins.map (fun.rl (checkDiv "+a / -b") (x: x // {
-        b = -x.b;
-        c = -x.c;
-      })) divisions)
-      (builtins.map (fun.rl (checkDiv "-a / -b") (x: x // {
-        a = -x.a;
-        b = -x.b;
-        mod = -x.mod;
-      })) divisions)
+      (builtins.map
+        (fun.rl (checkDiv "-a / +b") (x: x // {
+          a = -x.a;
+          c = -x.c;
+          mod = -x.mod;
+        }))
+        divisions)
+      (builtins.map
+        (fun.rl (checkDiv "+a / -b") (x: x // {
+          b = -x.b;
+          c = -x.c;
+        }))
+        divisions)
+      (builtins.map
+        (fun.rl (checkDiv "-a / -b") (x: x // {
+          a = -x.a;
+          b = -x.b;
+          mod = -x.mod;
+        }))
+        divisions)
     ]);
 
 in
-  runTestsuite "nix.int" [
-    testBounds
-    testHex
-    testBasic
-    testExp
-    testBit
-    testDivMod
-  ]
+runTestsuite "nix.int" [
+  testBounds
+  testHex
+  testBasic
+  testExp
+  testBit
+  testDivMod
+]
diff --git a/users/sterni/nix/string/default.nix b/users/sterni/nix/string/default.nix
index 19d2cec243c0..852ef2538fdc 100644
--- a/users/sterni/nix/string/default.nix
+++ b/users/sterni/nix/string/default.nix
@@ -21,7 +21,8 @@ let
   charAt = i: s:
     let
       r = builtins.substring i 1 s;
-    in if r == "" then null else r;
+    in
+    if r == "" then null else r;
 
   charIndex = char: s:
     let
@@ -32,7 +33,8 @@ let
           [ (charAt i s == char) i ]
           [ true (go (i + 1)) ]
         ];
-    in go 0;
+    in
+    go 0;
 
   toChars = lib.stringToCharacters;
   fromChars = lib.concatStrings;
@@ -46,15 +48,16 @@ let
     let
       leftS = fromChars (builtins.genList (_: char) left);
       rightS = fromChars (builtins.genList (_: char) right);
-    in "${leftS}${s}${rightS}";
+    in
+    "${leftS}${s}${rightS}";
 
   fit = { char ? " ", width, side ? "left" }: s:
     let
       diff = width - builtins.stringLength s;
     in
-      if diff <= 0
-      then s
-      else pad { inherit char; "${side}" = diff; } s;
+    if diff <= 0
+    then s
+    else pad { inherit char; "${side}" = diff; } s;
 
   # pattern matching for strings only
   match = val: matcher: matcher."${val}";
@@ -80,23 +83,28 @@ let
       tokens = lib.flatten (builtins.split "(%.)" formatString);
       argsNeeded = builtins.length (builtins.filter specifierWithArg tokens);
 
-      format = args: (builtins.foldl' ({ out ? "", argIndex ? 0 }: token: {
-        argIndex = argIndex + (if specifierWithArg token then 1 else 0);
-        out =
-          /**/ if token == "%s" then out + builtins.elemAt args argIndex
-          else if token == "%%" then out + "%"
-          else if isSpecifier token then throw "Unsupported format specifier ${token}"
-          else out + token;
-      }) {} tokens).out;
+      format = args: (builtins.foldl'
+        ({ out ? "", argIndex ? 0 }: token: {
+          argIndex = argIndex + (if specifierWithArg token then 1 else 0);
+          out =
+            /**/
+            if token == "%s" then out + builtins.elemAt args argIndex
+            else if token == "%%" then out + "%"
+            else if isSpecifier token then throw "Unsupported format specifier ${token}"
+            else out + token;
+        })
+        { }
+        tokens).out;
 
       accumulateArgs = argCount: args:
         if argCount > 0
         then arg: accumulateArgs (argCount - 1) (args ++ [ arg ])
         else format args;
     in
-      accumulateArgs argsNeeded [];
+    accumulateArgs argsNeeded [ ];
 
-in {
+in
+{
   inherit
     take
     drop
diff --git a/users/sterni/nix/string/tests/default.nix b/users/sterni/nix/string/tests/default.nix
index c8aec9464077..e9015e95dca4 100644
--- a/users/sterni/nix/string/tests/default.nix
+++ b/users/sterni/nix/string/tests/default.nix
@@ -63,10 +63,10 @@ let
   ];
 
 in
-  runTestsuite "nix.string" [
-    testTakeDrop
-    testIndexing
-    testFinding
-    testMatch
-    testPrintf
-  ]
+runTestsuite "nix.string" [
+  testTakeDrop
+  testIndexing
+  testFinding
+  testMatch
+  testPrintf
+]
diff --git a/users/sterni/nix/url/default.nix b/users/sterni/nix/url/default.nix
index 37bd0de66ac9..4a401873a1f2 100644
--- a/users/sterni/nix/url/default.nix
+++ b/users/sterni/nix/url/default.nix
@@ -10,9 +10,24 @@ let
     ;
 
   reserved = c: builtins.elem c [
-    "!" "#" "$" "&" "'" "(" ")"
-    "*" "+" "," "/" ":" ";" "="
-    "?" "@" "[" "]"
+    "!"
+    "#"
+    "$"
+    "&"
+    "'"
+    "("
+    ")"
+    "*"
+    "+"
+    ","
+    "/"
+    ":"
+    ";"
+    "="
+    "?"
+    "@"
+    "["
+    "]"
   ];
 
   unreserved = c: char.asciiAlphaNum c
@@ -21,11 +36,13 @@ let
   percentEncode = c:
     if unreserved c
     then c
-    else "%" + (string.fit {
-      width = 2;
-      char = "0";
-      side = "left";
-    } (int.toHex (char.ord c)));
+    else "%" + (string.fit
+      {
+        width = 2;
+        char = "0";
+        side = "left";
+      }
+      (int.toHex (char.ord c)));
 
   encode = { leaveReserved ? false }: s:
     let
@@ -34,7 +51,8 @@ let
         if leaveReserved && reserved c
         then c
         else percentEncode c;
-    in lib.concatStrings (builtins.map tr chars);
+    in
+    lib.concatStrings (builtins.map tr chars);
 
   decode = s:
     let
@@ -71,9 +89,10 @@ let
         ];
 
     in
-      (builtins.foldl' decodeStep {} tokens).result;
+    (builtins.foldl' decodeStep { } tokens).result;
 
-in {
+in
+{
   inherit
     encode
     decode
diff --git a/users/sterni/nix/url/tests/default.nix b/users/sterni/nix/url/tests/default.nix
index 7cf53cde1555..4eb6f95ccd07 100644
--- a/users/sterni/nix/url/tests/default.nix
+++ b/users/sterni/nix/url/tests/default.nix
@@ -14,11 +14,13 @@ let
 
   checkEncoding = args: { left, right }:
     assertEq "encode ${builtins.toJSON left} == ${builtins.toJSON right}"
-      (url.encode args left) right;
+      (url.encode args left)
+      right;
 
   checkDecoding = { left, right }:
-  assertEq "${builtins.toJSON left} == decode ${builtins.toJSON right}"
-    (url.decode left) right;
+    assertEq "${builtins.toJSON left} == decode ${builtins.toJSON right}"
+      (url.decode left)
+      right;
 
   unreserved = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-_~";
 
@@ -33,7 +35,7 @@ let
   ];
 
   testEncode = it "checks url.encode"
-    (builtins.map (checkEncoding {}) encodeExpected);
+    (builtins.map (checkEncoding { }) encodeExpected);
 
   testDecode = it "checks url.decode"
     (builtins.map checkDecoding encodeExpected);
@@ -50,7 +52,7 @@ let
       "urn:oasis:names:specification:docbook:dtd:xml:4.1.2"
     ]);
 in
-  runTestsuite "nix.url" [
-    testEncode
-    testLeaveReserved
-  ]
+runTestsuite "nix.url" [
+  testEncode
+  testLeaveReserved
+]
diff --git a/users/sterni/nix/utf8/default.nix b/users/sterni/nix/utf8/default.nix
index 270da934b6a6..71c846c0421e 100644
--- a/users/sterni/nix/utf8/default.nix
+++ b/users/sterni/nix/utf8/default.nix
@@ -25,7 +25,7 @@ let
      Type: integer -> integer
   */
   byteCount = i: flow.cond [
-    [ (int.bitAnd i 128 == 0)   1 ]
+    [ (int.bitAnd i 128 == 0) 1 ]
     [ (int.bitAnd i 224 == 192) 2 ]
     [ (int.bitAnd i 240 == 224) 3 ]
     [ (int.bitAnd i 248 == 240) 4 ]
@@ -45,30 +45,30 @@ let
     first:
     # byte position as an index starting with 0
     pos:
-      let
-        defaultRange = int.inRange 128 191;
-
-        secondBytePredicate = flow.switch first [
-          [ (int.inRange 194 223) defaultRange          ] # C2..DF
-          [ 224                   (int.inRange 160 191) ] # E0
-          [ (int.inRange 225 236) defaultRange          ] # E1..EC
-          [ 237                   (int.inRange 128 159) ] # ED
-          [ (int.inRange 238 239) defaultRange          ] # EE..EF
-          [ 240                   (int.inRange 144 191) ] # F0
-          [ (int.inRange 241 243) defaultRange          ] # F1..F3
-          [ 244                   (int.inRange 128 143) ] # F4
-          [ (fun.const true)      null                  ]
-        ];
+    let
+      defaultRange = int.inRange 128 191;
+
+      secondBytePredicate = flow.switch first [
+        [ (int.inRange 194 223) defaultRange ] # C2..DF
+        [ 224 (int.inRange 160 191) ] # E0
+        [ (int.inRange 225 236) defaultRange ] # E1..EC
+        [ 237 (int.inRange 128 159) ] # ED
+        [ (int.inRange 238 239) defaultRange ] # EE..EF
+        [ 240 (int.inRange 144 191) ] # F0
+        [ (int.inRange 241 243) defaultRange ] # F1..F3
+        [ 244 (int.inRange 128 143) ] # F4
+        [ (fun.const true) null ]
+      ];
 
-        firstBytePredicate = byte: assert first == byte;
-          first < 128 || secondBytePredicate != null;
-      in
-        # Either ASCII or in one of the byte ranges of Table 3-6.
-        if pos == 0 then firstBytePredicate
-        # return predicate according to Table 3-6.
-        else if pos == 1 then assert secondBytePredicate != null; secondBytePredicate
-        # 3rd and 4th byte have only one validity rule
-        else defaultRange;
+      firstBytePredicate = byte: assert first == byte;
+        first < 128 || secondBytePredicate != null;
+    in
+    # Either ASCII or in one of the byte ranges of Table 3-6.
+    if pos == 0 then firstBytePredicate
+    # return predicate according to Table 3-6.
+    else if pos == 1 then assert secondBytePredicate != null; secondBytePredicate
+    # 3rd and 4th byte have only one validity rule
+    else defaultRange;
 
   /* Iteration step for decoding an UTF-8 byte sequence.
      It decodes incrementally, i. e. it has to be fed
@@ -128,23 +128,24 @@ let
             # the current value by the amount of bytes left.
             offset = (count - (pos + 1)) * 6;
           in
-            code + (int.bitShiftL (int.bitAnd mask value) offset);
+          code + (int.bitShiftL (int.bitAnd mask value) offset);
       illFormedMsg =
         "Ill-formed byte ${int.toHex value} at position ${toString pos} in ${toString count} byte UTF-8 sequence";
     in
-      if !(wellFormedByte first pos value) then builtins.throw illFormedMsg
-      else if pos + 1 == count
-      then (builtins.removeAttrs args [ # allow extra state being passed through
-        "count"
-        "code"
-        "pos"
-        "first"
-      ]) // { result = newCode; }
-      else (builtins.removeAttrs args [ "result" ]) // {
-        inherit count first;
-        code = newCode;
-        pos  = pos + 1;
-      };
+    if !(wellFormedByte first pos value) then builtins.throw illFormedMsg
+    else if pos + 1 == count
+    then (builtins.removeAttrs args [
+      # allow extra state being passed through
+      "count"
+      "code"
+      "pos"
+      "first"
+    ]) // { result = newCode; }
+    else (builtins.removeAttrs args [ "result" ]) // {
+      inherit count first;
+      code = newCode;
+      pos = pos + 1;
+    };
 
   /* Decode an UTF-8 string into a list of codepoints.
 
@@ -161,7 +162,7 @@ let
           {
             key = "start";
             stringIndex = -1;
-            state = {};
+            state = { };
             codepoint = null;
           }
         ];
@@ -170,7 +171,8 @@ let
             # updated values for current iteration step
             newIndex = stringIndex + 1;
             newState = step state (builtins.substring newIndex 1 s);
-          in lib.optional (newIndex < stringLength) {
+          in
+          lib.optional (newIndex < stringLength) {
             # unique keys to make genericClosure happy
             key = toString newIndex;
             # carryover state for the next step
@@ -183,35 +185,39 @@ let
     in
     # extract all steps that yield a code point into a list
     builtins.map (v: v.codepoint) (
-      builtins.filter (
-        { codepoint, stringIndex, state, ... }:
-
-        let
-          # error message in case we are missing bytes at the end of input
-          earlyEndMsg =
-            if state ? count && state ? pos
-            then "Missing ${toString (with state; count - pos)} bytes at end of input"
-            else "Unexpected end of input";
-        in
-
-        # filter out all iteration steps without a codepoint value
-        codepoint != null
+      builtins.filter
+        (
+          { codepoint, stringIndex, state, ... }:
+
+          let
+            # error message in case we are missing bytes at the end of input
+            earlyEndMsg =
+              if state ? count && state ? pos
+              then "Missing ${toString (with state; count - pos)} bytes at end of input"
+              else "Unexpected end of input";
+          in
+
+          # filter out all iteration steps without a codepoint value
+          codepoint != null
           # if we are at the iteration step of a non-empty input string, throw
           # an error if no codepoint was returned, as it indicates an incomplete
           # UTF-8 sequence.
           || (stringLength > 0 && stringIndex == stringLength - 1 && throw earlyEndMsg)
 
-      ) iterResult
+        )
+        iterResult
     );
 
   /* Pretty prints a Unicode codepoint in the U+<HEX> notation.
 
      Type: integer -> string
   */
-  formatCodepoint = cp: "U+" + string.fit {
-    width = 4;
-    char = "0";
-  } (int.toHex cp);
+  formatCodepoint = cp: "U+" + string.fit
+    {
+      width = 4;
+      char = "0";
+    }
+    (int.toHex cp);
 
   encodeCodepoint = cp:
     let
@@ -219,11 +225,11 @@ let
       # Note that this doesn't check if the Unicode codepoint is allowed,
       # but rather allows all theoretically UTF-8-encodeable ones.
       count = flow.switch cp [
-        [ (int.inRange 0 127)         1 ] # 00000000 0xxxxxxx
-        [ (int.inRange 128 2047)      2 ] # 00000yyy yyxxxxxx
-        [ (int.inRange 2048 65535)    3 ] # zzzzyyyy yyxxxxxx
+        [ (int.inRange 0 127) 1 ] # 00000000 0xxxxxxx
+        [ (int.inRange 128 2047) 2 ] # 00000yyy yyxxxxxx
+        [ (int.inRange 2048 65535) 3 ] # zzzzyyyy yyxxxxxx
         [ (int.inRange 65536 1114111) 4 ] # 000uuuuu zzzzyyyy yyxxxxxx,
-                                          # capped at U+10FFFF
+        # capped at U+10FFFF
 
         [ (fun.const true) (builtins.throw invalidCodepointMsg) ]
       ];
@@ -234,32 +240,34 @@ let
       # according to Table 3-6. from The Unicode Standard, Version 13.0,
       # section 3.9. u is split into uh and ul since they are used in
       # different bytes in the end.
-      components = lib.mapAttrs (_: { mask, offset }:
-        int.bitAnd (int.bitShiftR cp offset) mask
-      ) {
-        x = {
-          mask = if count > 1 then 63 else 127;
-          offset = 0;
-        };
-        y = {
-          mask = if count > 2 then 63 else 31;
-          offset = 6;
-        };
-        z = {
-          mask = 15;
-          offset = 12;
-        };
-        # u which belongs into the second byte
-        ul = {
-          mask = 3;
-          offset = 16;
-        };
-        # u which belongs into the first byte
-        uh = {
-          mask = 7;
-          offset = 18;
+      components = lib.mapAttrs
+        (_: { mask, offset }:
+          int.bitAnd (int.bitShiftR cp offset) mask
+        )
+        {
+          x = {
+            mask = if count > 1 then 63 else 127;
+            offset = 0;
+          };
+          y = {
+            mask = if count > 2 then 63 else 31;
+            offset = 6;
+          };
+          z = {
+            mask = 15;
+            offset = 12;
+          };
+          # u which belongs into the second byte
+          ul = {
+            mask = 3;
+            offset = 16;
+          };
+          # u which belongs into the first byte
+          uh = {
+            mask = 7;
+            offset = 18;
+          };
         };
-      };
       inherit (components) x y z ul uh;
 
       # Finally construct the byte sequence for the given codepoint. This is
@@ -286,15 +294,18 @@ let
 
       unableToEncodeMessage = "Can't encode ${formatCodepoint cp} as UTF-8";
 
-    in string.fromBytes (
-      builtins.genList (i:
-        let
-          byte = builtins.elemAt bytes i;
-        in
+    in
+    string.fromBytes (
+      builtins.genList
+        (i:
+          let
+            byte = builtins.elemAt bytes i;
+          in
           if wellFormedByte firstByte i byte
           then byte
           else builtins.throw unableToEncodeMessage
-      ) count
+        )
+        count
     );
 
   /* Encode a list of Unicode codepoints into an UTF-8 string.
@@ -303,7 +314,8 @@ let
   */
   encode = lib.concatMapStrings encodeCodepoint;
 
-in {
+in
+{
   inherit
     encode
     decode
diff --git a/users/sterni/nix/utf8/tests/default.nix b/users/sterni/nix/utf8/tests/default.nix
index ddcd34208a6d..40783eab2421 100644
--- a/users/sterni/nix/utf8/tests/default.nix
+++ b/users/sterni/nix/utf8/tests/default.nix
@@ -25,9 +25,10 @@ let
     char
     ;
 
-  rustDecoder = rustSimple {
-    name = "utf8-decode";
-  } ''
+  rustDecoder = rustSimple
+    {
+      name = "utf8-decode";
+    } ''
     use std::io::{self, Read};
     fn main() -> std::io::Result<()> {
       let mut buffer = String::new();
@@ -47,10 +48,11 @@ let
 
   rustDecode = s:
     let
-      expr = runCommandLocal "${s}-decoded" {} ''
+      expr = runCommandLocal "${s}-decoded" { } ''
         printf '%s' ${lib.escapeShellArg s} | ${rustDecoder} > $out
       '';
-    in import expr;
+    in
+    import expr;
 
   hexDecode = l:
     utf8.decode (string.fromBytes (builtins.map int.fromHex l));
@@ -65,23 +67,27 @@ let
     (assertEq "well-formed: F4 80 83 92" (hexDecode [ "F4" "80" "83" "92" ]) [ 1048786 ])
     (assertThrows "Codepoint out of range: 0xFFFFFF" (hexEncode [ "FFFFFF" ]))
     (assertThrows "Codepoint out of range: -0x02" (hexEncode [ "-02" ]))
-  ] ++ builtins.genList (i:
-    let
-      cp = i + int.fromHex "D800";
-    in
+  ] ++ builtins.genList
+    (i:
+      let
+        cp = i + int.fromHex "D800";
+      in
       assertThrows "Can't encode UTF-16 reserved characters: ${utf8.formatCodepoint cp}"
         (utf8.encode [ cp ])
-  ) (int.fromHex "07FF"));
+    )
+    (int.fromHex "07FF"));
 
   testAscii = it "checks decoding of ascii strings"
-    (builtins.map (s: assertEq "ASCII decoding is equal to UTF-8 decoding for \"${s}\""
-      (string.toBytes s) (utf8.decode s)) [
-        "foo bar"
-        "hello\nworld"
-        "carriage\r\nreturn"
-        "1238398494829304 []<><>({})[]!!)"
-        (string.take 127 char.allChars)
-      ]);
+    (builtins.map
+      (s: assertEq "ASCII decoding is equal to UTF-8 decoding for \"${s}\""
+        (string.toBytes s)
+        (utf8.decode s)) [
+      "foo bar"
+      "hello\nworld"
+      "carriage\r\nreturn"
+      "1238398494829304 []<><>({})[]!!)"
+      (string.take 127 char.allChars)
+    ]);
 
   randomUnicode = [
     "" # empty string should yield empty list
@@ -126,16 +132,17 @@ let
   testDecodingEncoding = it "checks that decoding and then encoding forms an identity"
     (builtins.map
       (s: assertEq "Decoding and then encoding “${s}” yields itself"
-        (utf8.encode (utf8.decode s)) s)
+        (utf8.encode (utf8.decode s))
+        s)
       (lib.flatten [
         glassSentences
         randomUnicode
       ]));
 
 in
-  runTestsuite "nix.utf8" [
-    testFailures
-    testAscii
-    testDecoding
-    testDecodingEncoding
-  ]
+runTestsuite "nix.utf8" [
+  testFailures
+  testAscii
+  testDecoding
+  testDecodingEncoding
+]
diff --git a/users/sterni/nixpkgs-crate-holes/default.nix b/users/sterni/nixpkgs-crate-holes/default.nix
index a022568dc941..b659c9c89eb2 100644
--- a/users/sterni/nixpkgs-crate-holes/default.nix
+++ b/users/sterni/nixpkgs-crate-holes/default.nix
@@ -33,7 +33,7 @@ let
       symphorien
       erictapen
       expipiplus1
-    ;
+      ;
   };
 
   # buildRustPackage handling
@@ -55,24 +55,25 @@ let
   extractCargoLock = drv:
     if !(drv ? cargoDeps.outPath)
     then null
-    else pkgs.runCommandNoCC "${drv.name}-Cargo.lock" {} ''
-      if test -d "${drv.cargoDeps}"; then
-        cp "${drv.cargoDeps}/Cargo.lock" "$out"
-      fi
-
-      if test -f "${drv.cargoDeps}"; then
-        tar -xO \
-          --no-wildcards-match-slash --wildcards \
-          -f "${drv.cargoDeps}" \
-          '*/Cargo.lock' \
-          > "$out"
-      fi
-    '';
+    else
+      pkgs.runCommandNoCC "${drv.name}-Cargo.lock" { } ''
+        if test -d "${drv.cargoDeps}"; then
+          cp "${drv.cargoDeps}/Cargo.lock" "$out"
+        fi
+
+        if test -f "${drv.cargoDeps}"; then
+          tar -xO \
+            --no-wildcards-match-slash --wildcards \
+            -f "${drv.cargoDeps}" \
+            '*/Cargo.lock' \
+            > "$out"
+        fi
+      '';
 
   # nixpkgs traversal
 
   # Condition for us to recurse: Either at top-level or recurseForDerivation.
-  recurseInto = path: x: path == [] ||
+  recurseInto = path: x: path == [ ] ||
     (lib.isAttrs x && (x.recurseForDerivations or false));
 
   # Returns the value or false if an eval error occurs.
@@ -97,46 +98,69 @@ let
           doRec = tryEvalOrFalse (recurseInto path x);
           isRust = tryEvalOrFalse (isRustPackage x);
         in
-          if doRec then lib.concatLists (
-            lib.mapAttrsToList (n: go (path ++ [ n ])) x
-          ) else if isDrv && isRust then [
-            {
-              attr = path;
-              lock = extractCargoLock x;
-              maintainers = x.meta.maintainers or [];
-            }
-          ] else [];
-    in go [];
+        if doRec then
+          lib.concatLists
+            (
+              lib.mapAttrsToList (n: go (path ++ [ n ])) x
+            ) else if isDrv && isRust then [
+          {
+            attr = path;
+            lock = extractCargoLock x;
+            maintainers = x.meta.maintainers or [ ];
+          }
+        ] else [ ];
+    in
+    go [ ];
 
   # Report generation and formatting
 
-  reportFor = { attr, lock, maintainers ? [] }: let
-    # naïve attribute path to Nix syntax conversion
-    strAttr = lib.concatStringsSep "." attr;
-    strMaintainers = lib.concatMapStringsSep " " (m: "@${m.github}") (
-      builtins.filter (x: builtins.elem x maintainerWhitelist) maintainers
-    );
-  in
+  reportFor = { attr, lock, maintainers ? [ ] }:
+    let
+      # naïve attribute path to Nix syntax conversion
+      strAttr = lib.concatStringsSep "." attr;
+      strMaintainers = lib.concatMapStringsSep " " (m: "@${m.github}") (
+        builtins.filter (x: builtins.elem x maintainerWhitelist) maintainers
+      );
+    in
     if lock == null
     then pkgs.emptyFile
-    else depot.nix.runExecline "${strAttr}-vulnerability-report" {} [
-      "pipeline" [
-        bins.cargo-audit
-        "audit" "--json"
-        "-n" "--db" rustsec-advisory-db
-        "-f" lock
-      ]
-      "importas" "out" "out"
-      "redirfd" "-w" "1" "$out"
-      bins.jq "-rj" "-f" ./format-audit-result.jq
-      "--arg" "attr" strAttr
-      "--arg" "maintainers" strMaintainers
-    ];
+    else
+      depot.nix.runExecline "${strAttr}-vulnerability-report" { } [
+        "pipeline"
+        [
+          bins.cargo-audit
+          "audit"
+          "--json"
+          "-n"
+          "--db"
+          rustsec-advisory-db
+          "-f"
+          lock
+        ]
+        "importas"
+        "out"
+        "out"
+        "redirfd"
+        "-w"
+        "1"
+        "$out"
+        bins.jq
+        "-rj"
+        "-f"
+        ./format-audit-result.jq
+        "--arg"
+        "attr"
+        strAttr
+        "--arg"
+        "maintainers"
+        strMaintainers
+      ];
 
   # GHMF in issues splits paragraphs on newlines
-  description = lib.concatMapStringsSep "\n\n" (
-    builtins.replaceStrings [ "\n" ] [ " " ]
-  ) [
+  description = lib.concatMapStringsSep "\n\n"
+    (
+      builtins.replaceStrings [ "\n" ] [ " " ]
+    ) [
     ''
       The vulnerability report below was generated by
       [nixpkgs-crate-holes](https://code.tvl.fyi/tree/users/sterni/nixpkgs-crate-holes)
@@ -194,39 +218,63 @@ let
       );
     in
 
-    depot.nix.runExecline "nixpkgs-rust-pkgs-vulnerability-report.md" {
-      stdin = lib.concatMapStrings (report: "${report}\n") reports;
-    } [
-      "importas" "out" "out"
-      "redirfd" "-w" "1" "$out"
+    depot.nix.runExecline "nixpkgs-rust-pkgs-vulnerability-report.md"
+      {
+        stdin = lib.concatMapStrings (report: "${report}\n") reports;
+      } [
+      "importas"
+      "out"
+      "out"
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
       # Print introduction paragraph for the issue
-      "if" [ bins.printf "%s\n\n" description ]
+      "if"
+      [ bins.printf "%s\n\n" description ]
       # Print all reports
-      "foreground" [
-        "forstdin" "-E" "report" bins.cat "$report"
+      "foreground"
+      [
+        "forstdin"
+        "-E"
+        "report"
+        bins.cat
+        "$report"
       ]
       # Print stats at the end (mostly as a gimmick), we already know how many
       # attributes there are and count the attributes with vulnerability by
       # finding the number of checkable list entries in the output.
-      "backtick" "-E" "vulnerableCount" [
-        "pipeline" [
-          bins.grep "^- \\[ \\]" "$out"
+      "backtick"
+      "-E"
+      "vulnerableCount"
+      [
+        "pipeline"
+        [
+          bins.grep
+          "^- \\[ \\]"
+          "$out"
         ]
-        bins.wc "-l"
+        bins.wc
+        "-l"
       ]
-      "if" [
+      "if"
+      [
         bins.printf
         "\n%s of %s checked attributes have vulnerable dependencies.\n\n"
         "$vulnerableCount"
         (toString (builtins.length reports))
       ]
-      "if" [
-        bins.printf "%s\n\n" runInstructions
+      "if"
+      [
+        bins.printf
+        "%s\n\n"
+        runInstructions
       ]
     ];
 
   singleReport =
-    { # Attribute to check: string or list of strings (attr path)
+    {
+      # Attribute to check: string or list of strings (attr path)
       attr
       # Path to importable nixpkgs checkout
     , nixpkgsPath
@@ -241,37 +289,63 @@ let
       strAttr = lib.concatStringsSep "." attr';
     in
 
-    depot.nix.runExecline "${strAttr}-report.html" {} [
-      "importas" "out" "out"
-      "backtick" "-I" "-E" "-N" "report" [
-        bins.cargo-audit "audit"
+    depot.nix.runExecline "${strAttr}-report.html" { } [
+      "importas"
+      "out"
+      "out"
+      "backtick"
+      "-I"
+      "-E"
+      "-N"
+      "report"
+      [
+        bins.cargo-audit
+        "audit"
         "--quiet"
-        "-n" "--db" rustsec-advisory-db
-        "-f" lockFile
+        "-n"
+        "--db"
+        rustsec-advisory-db
+        "-f"
+        lockFile
       ]
-      "pipeline" [
-        "ifte" [
-          bins.printf "%s" "$report"
-        ] [
-          bins.printf "%s\n" "No vulnerabilities found"
+      "pipeline"
+      [
+        "ifte"
+        [
+          bins.printf
+          "%s"
+          "$report"
+        ]
+        [
+          bins.printf
+          "%s\n"
+          "No vulnerabilities found"
         ]
-        bins.test "-n" "$report"
+        bins.test
+        "-n"
+        "$report"
       ]
-      "pipeline" [
-        bins.tee "/dev/stderr"
+      "pipeline"
+      [
+        bins.tee
+        "/dev/stderr"
       ]
-      "redirfd" "-w" "1" "$out"
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
       bins.ansi2html
     ];
 
-in {
+in
+{
   full = reportForNixpkgs;
   single = singleReport;
 
   inherit
     extractCargoLock
     allLockFiles
-  ;
+    ;
 
   # simple sanity check, doesn't cover everything, but testing the full report
   # is quite expensive in terms of evaluation.
diff --git a/users/tazjin/aoc2019/default.nix b/users/tazjin/aoc2019/default.nix
index ce3146d1f74e..a1798f400174 100644
--- a/users/tazjin/aoc2019/default.nix
+++ b/users/tazjin/aoc2019/default.nix
@@ -11,12 +11,16 @@ let
   getDay = f: head (matchSolution f);
 
   solutionFiles = filter (e: dir."${e}" == "regular" && isSolution e) (attrNames dir);
-  solutions = map (f: let day = getDay f; in {
-    name = day;
-    value = depot.nix.writeElispBin {
-      name = "aoc2019";
-      deps = p: with p; [ dash s ht ];
-      src = ./. + ("/" + f);
-    };
-  }) solutionFiles;
-in listToAttrs solutions
+  solutions = map
+    (f:
+      let day = getDay f; in {
+        name = day;
+        value = depot.nix.writeElispBin {
+          name = "aoc2019";
+          deps = p: with p; [ dash s ht ];
+          src = ./. + ("/" + f);
+        };
+      })
+    solutionFiles;
+in
+listToAttrs solutions
diff --git a/users/tazjin/aoc2020/default.nix b/users/tazjin/aoc2020/default.nix
index 7a7309ac5aaa..cd89da7de412 100644
--- a/users/tazjin/aoc2020/default.nix
+++ b/users/tazjin/aoc2020/default.nix
@@ -11,12 +11,16 @@ let
   getDay = f: head (matchSolution f);
 
   solutionFiles = filter (e: dir."${e}" == "regular" && isSolution e) (attrNames dir);
-  solutions = map (f: let day = getDay f; in depot.nix.writeElispBin {
-      name = day;
-      deps = p: with p; [ dash s ht p.f ];
-      src = ./. + ("/" + f);
-  }) solutionFiles;
-in pkgs.symlinkJoin {
+  solutions = map
+    (f:
+      let day = getDay f; in depot.nix.writeElispBin {
+        name = day;
+        deps = p: with p; [ dash s ht p.f ];
+        src = ./. + ("/" + f);
+      })
+    solutionFiles;
+in
+pkgs.symlinkJoin {
   name = "aoc2020";
   paths = solutions;
 }
diff --git a/users/tazjin/blog/default.nix b/users/tazjin/blog/default.nix
index 6ac89e46656a..c8b3c318995b 100644
--- a/users/tazjin/blog/default.nix
+++ b/users/tazjin/blog/default.nix
@@ -23,7 +23,7 @@ let
 
   posts = filter includePost (list post (import ./posts.nix));
 
-  rendered = pkgs.runCommandNoCC "tazjins-blog" {} ''
+  rendered = pkgs.runCommandNoCC "tazjins-blog" { } ''
     mkdir -p $out
 
     ${lib.concatStringsSep "\n" (map (post:
@@ -31,13 +31,16 @@ let
     ) posts)}
   '';
 
-in {
+in
+{
   inherit posts rendered config;
 
   # Generate embeddable nginx configuration for redirects from old post URLs
-  oldRedirects = lib.concatStringsSep "\n" (map (post: ''
-    location ~* ^(/en)?/${post.oldKey} {
-      return 301 https://tazj.in/blog/${post.key};
-    }
-  '') (filter (hasAttr "oldKey") posts));
+  oldRedirects = lib.concatStringsSep "\n" (map
+    (post: ''
+      location ~* ^(/en)?/${post.oldKey} {
+        return 301 https://tazj.in/blog/${post.key};
+      }
+    '')
+    (filter (hasAttr "oldKey") posts));
 }
diff --git a/users/tazjin/blog/posts.nix b/users/tazjin/blog/posts.nix
index b43598d01358..eeba600286db 100644
--- a/users/tazjin/blog/posts.nix
+++ b/users/tazjin/blog/posts.nix
@@ -37,7 +37,7 @@
     key = "the-smu-problem";
     title = "The SMU-problem of messaging apps";
     date = 1450354078;
-    content =./posts/the-smu-problem.md;
+    content = ./posts/the-smu-problem.md;
     oldKey = "1450354078";
   }
   {
diff --git a/users/tazjin/dns/default.nix b/users/tazjin/dns/default.nix
index da92b88beade..6c51cb5de4f5 100644
--- a/users/tazjin/dns/default.nix
+++ b/users/tazjin/dns/default.nix
@@ -2,11 +2,12 @@
 { depot, pkgs, ... }:
 
 let
-  checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" {} ''
+  checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" { } ''
     ${pkgs.bind}/bin/named-checkzone -i local ${zone} ${file} | tee $out
   '';
 
-in depot.nix.readTree.drvTargets {
-  kontemplate-works = checkZone "kontemplate.works"./kontemplate.works.zone;
+in
+depot.nix.readTree.drvTargets {
+  kontemplate-works = checkZone "kontemplate.works" ./kontemplate.works.zone;
   tazj-in = checkZone "tazj.in" ./tazj.in.zone;
 }
diff --git a/users/tazjin/emacs/default.nix b/users/tazjin/emacs/default.nix
index c7c57ba35543..08ff76c00ca1 100644
--- a/users/tazjin/emacs/default.nix
+++ b/users/tazjin/emacs/default.nix
@@ -2,166 +2,173 @@
 # and my personal Emacs configuration.
 { lib, pkgs, ... }:
 
-pkgs.makeOverridable({ emacs ? pkgs.emacsGcc }:
-let
-  emacsWithPackages = (pkgs.emacsPackagesGen emacs).emacsWithPackages;
+pkgs.makeOverridable
+  ({ emacs ? pkgs.emacsGcc }:
+  let
+    emacsWithPackages = (pkgs.emacsPackagesGen emacs).emacsWithPackages;
 
-  # If switching telega versions, use this variable because it will
-  # keep the version check, binary path and so on in sync.
-  currentTelega = epkgs: epkgs.melpaPackages.telega;
+    # If switching telega versions, use this variable because it will
+    # keep the version check, binary path and so on in sync.
+    currentTelega = epkgs: epkgs.melpaPackages.telega;
 
-  # $PATH for binaries that need to be available to Emacs
-  emacsBinPath = lib.makeBinPath [
-    (currentTelega pkgs.emacsPackages)
-    pkgs.libwebp # for dwebp, required by telega
-  ];
+    # $PATH for binaries that need to be available to Emacs
+    emacsBinPath = lib.makeBinPath [
+      (currentTelega pkgs.emacsPackages)
+      pkgs.libwebp # for dwebp, required by telega
+    ];
 
-  identity = x: x;
+    identity = x: x;
 
-  tazjinsEmacs = pkgfun: (emacsWithPackages(epkgs: pkgfun(with epkgs; [
-    ace-link
-    ace-window
-    avy
-    bazel
-    browse-kill-ring
-    cargo
-    clojure-mode
-    cmake-mode
-    company
-    counsel
-    counsel-notmuch
-    d-mode
-    direnv
-    dockerfile-mode
-    eglot
-    elfeed
-    elixir-mode
-    elm-mode
-    erlang
-    exwm
-    flymake
-    go-mode
-    google-c-style
-    gruber-darker-theme
-    haskell-mode
-    ht
-    hydra
-    idle-highlight-mode
-    ivy
-    ivy-prescient
-    jq-mode
-    kotlin-mode
-    lsp-mode
-    magit
-    markdown-toc
-    meson-mode
-    multi-term
-    multiple-cursors
-    nginx-mode
-    nix-mode
-    notmuch
-    paredit
-    password-store
-    pinentry
-    polymode
-    prescient
-    protobuf-mode
-    rainbow-delimiters
-    rainbow-mode
-    refine
-    request
-    restclient
-    rust-mode
-    sly
-    string-edit
-    swiper
-    telephone-line
-    terraform-mode
-    toml-mode
-    transient
-    undo-tree
-    use-package
-    uuidgen
-    vterm
-    web-mode
-    websocket
-    which-key
-    xelb
-    yaml-mode
-    yasnippet
-    zoxide
+    tazjinsEmacs = pkgfun: (emacsWithPackages (epkgs: pkgfun (with epkgs; [
+      ace-link
+      ace-window
+      avy
+      bazel
+      browse-kill-ring
+      cargo
+      clojure-mode
+      cmake-mode
+      company
+      counsel
+      counsel-notmuch
+      d-mode
+      direnv
+      dockerfile-mode
+      eglot
+      elfeed
+      elixir-mode
+      elm-mode
+      erlang
+      exwm
+      flymake
+      go-mode
+      google-c-style
+      gruber-darker-theme
+      haskell-mode
+      ht
+      hydra
+      idle-highlight-mode
+      ivy
+      ivy-prescient
+      jq-mode
+      kotlin-mode
+      lsp-mode
+      magit
+      markdown-toc
+      meson-mode
+      multi-term
+      multiple-cursors
+      nginx-mode
+      nix-mode
+      notmuch
+      paredit
+      password-store
+      pinentry
+      polymode
+      prescient
+      protobuf-mode
+      rainbow-delimiters
+      rainbow-mode
+      refine
+      request
+      restclient
+      rust-mode
+      sly
+      string-edit
+      swiper
+      telephone-line
+      terraform-mode
+      toml-mode
+      transient
+      undo-tree
+      use-package
+      uuidgen
+      vterm
+      web-mode
+      websocket
+      which-key
+      xelb
+      yaml-mode
+      yasnippet
+      zoxide
 
-    # Wonky stuff
-    (currentTelega epkgs)
+      # Wonky stuff
+      (currentTelega epkgs)
 
-    # Custom depot packages (either ours, or overridden ones)
-    tvlPackages.dottime
-    tvlPackages.nix-util
-    tvlPackages.passively
-    tvlPackages.rcirc
-    tvlPackages.term-switcher
-    tvlPackages.tvl
-  ])));
+      # Custom depot packages (either ours, or overridden ones)
+      tvlPackages.dottime
+      tvlPackages.nix-util
+      tvlPackages.passively
+      tvlPackages.rcirc
+      tvlPackages.term-switcher
+      tvlPackages.tvl
+    ])));
 
-  # Tired of telega.el runtime breakages through tdlib
-  # incompatibility. Target to make that a build failure instead.
-  tdlibCheck =
-    let
-      tgEmacs = emacsWithPackages(epkgs: [ (currentTelega epkgs) ]);
-      verifyTdlibVersion = builtins.toFile "verify-tdlib-version.el" ''
-        (require 'telega)
-        (defvar tdlib-version "${pkgs.tdlib.version}")
-        (when (or (version< tdlib-version
-                            telega-tdlib-min-version)
-                  (and telega-tdlib-max-version
-                        (version< telega-tdlib-max-version
-                                  tdlib-version)))
-           (message "Found TDLib version %s, but require %s to %s"
-                   tdlib-version telega-tdlib-min-version telega-tdlib-max-version)
-          (kill-emacs 1))
-       '';
-    in pkgs.runCommandNoCC "tdlibCheck" {} ''
+    # Tired of telega.el runtime breakages through tdlib
+    # incompatibility. Target to make that a build failure instead.
+    tdlibCheck =
+      let
+        tgEmacs = emacsWithPackages (epkgs: [ (currentTelega epkgs) ]);
+        verifyTdlibVersion = builtins.toFile "verify-tdlib-version.el" ''
+          (require 'telega)
+          (defvar tdlib-version "${pkgs.tdlib.version}")
+          (when (or (version< tdlib-version
+                              telega-tdlib-min-version)
+                    (and telega-tdlib-max-version
+                          (version< telega-tdlib-max-version
+                                    tdlib-version)))
+             (message "Found TDLib version %s, but require %s to %s"
+                     tdlib-version telega-tdlib-min-version telega-tdlib-max-version)
+            (kill-emacs 1))
+        '';
+      in
+      pkgs.runCommandNoCC "tdlibCheck" { } ''
+        export PATH="${emacsBinPath}:$PATH"
+        ${tgEmacs}/bin/emacs --script ${verifyTdlibVersion} && touch $out
+      '';
+  in
+  lib.fix
+    (self: l: f: pkgs.writeShellScriptBin "tazjins-emacs" ''
       export PATH="${emacsBinPath}:$PATH"
-      ${tgEmacs}/bin/emacs --script ${verifyTdlibVersion} && touch $out
-    '';
-in lib.fix(self: l: f: pkgs.writeShellScriptBin "tazjins-emacs" ''
-  export PATH="${emacsBinPath}:$PATH"
-  exec ${tazjinsEmacs f}/bin/emacs \
-    --debug-init \
-    --no-site-file \
-    --no-site-lisp \
-    --no-init-file \
-    --directory ${./config} ${if l != null then "--directory ${l}" else ""} \
-    --eval "(require 'init)" $@
-  '' // {
-    # Call overrideEmacs with a function (pkgs -> pkgs) to modify the
-    # packages that should be included in this Emacs distribution.
-    overrideEmacs = f': self l f';
-
-    # Call withLocalConfig with the path to a *folder* containing a
-    # `local.el` which provides local system configuration.
-    withLocalConfig = confDir: self confDir f;
-
-    # Build a derivation that uses the specified local Emacs (i.e.
-    # built outside of Nix) instead
-    withLocalEmacs = emacsBin: pkgs.writeShellScriptBin "tazjins-emacs" ''
-      export PATH="${emacsBinPath}:$PATH"
-      export EMACSLOADPATH="${(tazjinsEmacs f).deps}/share/emacs/site-lisp:"
-      exec ${emacsBin} \
+      exec ${tazjinsEmacs f}/bin/emacs \
         --debug-init \
         --no-site-file \
         --no-site-lisp \
         --no-init-file \
-        --directory ${./config} \
-        ${if l != null then "--directory ${l}" else ""} \
+        --directory ${./config} ${if l != null then "--directory ${l}" else ""} \
         --eval "(require 'init)" $@
-    '';
+    '' // {
+      # Call overrideEmacs with a function (pkgs -> pkgs) to modify the
+      # packages that should be included in this Emacs distribution.
+      overrideEmacs = f': self l f';
+
+      # Call withLocalConfig with the path to a *folder* containing a
+      # `local.el` which provides local system configuration.
+      withLocalConfig = confDir: self confDir f;
+
+      # Build a derivation that uses the specified local Emacs (i.e.
+      # built outside of Nix) instead
+      withLocalEmacs = emacsBin: pkgs.writeShellScriptBin "tazjins-emacs" ''
+        export PATH="${emacsBinPath}:$PATH"
+        export EMACSLOADPATH="${(tazjinsEmacs f).deps}/share/emacs/site-lisp:"
+        exec ${emacsBin} \
+          --debug-init \
+          --no-site-file \
+          --no-site-lisp \
+          --no-init-file \
+          --directory ${./config} \
+          ${if l != null then "--directory ${l}" else ""} \
+          --eval "(require 'init)" $@
+      '';
 
-    # Expose telega/tdlib version check as a target that is built in
-    # CI.
-    #
-    # TODO(tazjin): uncomment when telega works again
-    inherit tdlibCheck;
-    # meta.targets = [ "tdlibCheck" ];
-  }) null identity
-) {}
+      # Expose telega/tdlib version check as a target that is built in
+      # CI.
+      #
+      # TODO(tazjin): uncomment when telega works again
+      inherit tdlibCheck;
+      # meta.targets = [ "tdlibCheck" ];
+    })
+    null
+    identity
+  )
+{ }
diff --git a/users/tazjin/homepage/default.nix b/users/tazjin/homepage/default.nix
index 2ce1cf632255..0edb75d60933 100644
--- a/users/tazjin/homepage/default.nix
+++ b/users/tazjin/homepage/default.nix
@@ -35,7 +35,7 @@ let
     date = post.date;
   });
 
-  formatDate = defun [ int string ] (date: readFile (runCommandNoCC "date" {} ''
+  formatDate = defun [ int string ] (date: readFile (runCommandNoCC "date" { } ''
     date --date='@${toString date}' '+%Y-%m-%d' > $out
   ''));
 
@@ -67,7 +67,8 @@ let
   pageEntries = import ./entries.nix;
   homepage = index ((map postToEntry users.tazjin.blog.posts) ++ pageEntries);
   atomFeed = import ./feed.nix (args // { inherit entry pageEntries; });
-in runCommandNoCC "website" {} ''
+in
+runCommandNoCC "website" { } ''
   mkdir $out
   cp ${homepage} $out/index.html
   cp ${atomFeed} $out/feed.atom
diff --git a/users/tazjin/homepage/feed.nix b/users/tazjin/homepage/feed.nix
index 2a033444e8ba..09bc36341401 100644
--- a/users/tazjin/homepage/feed.nix
+++ b/users/tazjin/homepage/feed.nix
@@ -23,7 +23,7 @@ let
   });
 
   allEntries = (with depot.users.tazjin.blog; map (blog.toFeedEntry config) posts)
-             ++ (map pageEntryToEntry pageEntries);
+    ++ (map pageEntryToEntry pageEntries);
 
   feed = {
     id = "https://tazj.in/";
@@ -39,4 +39,5 @@ let
 
     entries = allEntries;
   };
-in writeText "feed.atom" (atom-feed.renderFeed feed)
+in
+writeText "feed.atom" (atom-feed.renderFeed feed)
diff --git a/users/tazjin/nixos/camden/default.nix b/users/tazjin/nixos/camden/default.nix
index 4b5e4b4872c2..b8b8e25fc3f4 100644
--- a/users/tazjin/nixos/camden/default.nix
+++ b/users/tazjin/nixos/camden/default.nix
@@ -1,7 +1,8 @@
 # This file configures camden.tazj.in, my homeserver.
 { depot, pkgs, lib, ... }:
 
-config: let
+config:
+let
   nginxRedirect = { from, to, acmeHost }: {
     serverName = from;
     useACMEHost = acmeHost;
@@ -9,18 +10,21 @@ config: let
 
     extraConfig = "return 301 https://${to}$request_uri;";
   };
-in lib.fix(self: {
+in
+lib.fix (self: {
   # Disable the current ACME module and use the old one from 19.09
   # instead, until the various regressions have been sorted out.
   # TODO(tazjin): Remove this once the new ACME module works.
   disabledModules = [ "security/acme" ];
   imports =
-    let oldChannel = fetchTarball {
-      # NixOS 19.09 on 2020-10-04
-      url = "https://github.com/NixOS/nixpkgs-channels/archive/75f4ba05c63be3f147bcc2f7bd4ba1f029cedcb1.tar.gz";
-      sha256 = "157c64220lf825ll4c0cxsdwg7cxqdx4z559fdp7kpz0g6p8fhhr";
-    };
-    in [
+    let
+      oldChannel = fetchTarball {
+        # NixOS 19.09 on 2020-10-04
+        url = "https://github.com/NixOS/nixpkgs-channels/archive/75f4ba05c63be3f147bcc2f7bd4ba1f029cedcb1.tar.gz";
+        sha256 = "157c64220lf825ll4c0cxsdwg7cxqdx4z559fdp7kpz0g6p8fhhr";
+      };
+    in
+    [
       "${depot.path}/ops/modules/quassel.nix"
       "${depot.path}/ops/modules/smtprelay.nix"
       "${oldChannel}/nixos/modules/security/acme.nix"
@@ -37,8 +41,14 @@ in lib.fix(self: {
   boot = {
     initrd = {
       availableKernelModules = [
-        "ahci" "xhci_pci" "usbhid" "usb_storage" "sd_mod" "sdhci_pci"
-        "rtsx_usb_sdmmc" "r8169"
+        "ahci"
+        "xhci_pci"
+        "usbhid"
+        "usb_storage"
+        "sd_mod"
+        "sdhci_pci"
+        "rtsx_usb_sdmmc"
+        "r8169"
       ];
 
       kernelModules = [ "dm-snapshot" ];
@@ -152,7 +162,7 @@ in lib.fix(self: {
     };
 
     # Set up a user & group for general git shenanigans
-    groups.git = {};
+    groups.git = { };
     users.git = {
       group = "git";
       isSystemUser = true;
@@ -220,9 +230,9 @@ in lib.fix(self: {
 
   # Forward logs to Google Cloud Platform
   services.journaldriver = {
-    enable                 = true;
-    logStream              = "home";
-    googleCloudProject     = "tazjins-infrastructure";
+    enable = true;
+    logStream = "home";
+    googleCloudProject = "tazjins-infrastructure";
     applicationCredentials = "/etc/gcp/key.json";
   };
 
diff --git a/users/tazjin/nixos/frog/default.nix b/users/tazjin/nixos/frog/default.nix
index b3c803c87131..b789ab04f3bf 100644
--- a/users/tazjin/nixos/frog/default.nix
+++ b/users/tazjin/nixos/frog/default.nix
@@ -1,6 +1,7 @@
 { depot, lib, pkgs, ... }:
 
-config: let
+config:
+let
   inherit (pkgs) lieer;
 
   quasselClient = pkgs.quassel.override {
@@ -8,7 +9,8 @@ config: let
     enableDaemon = false;
     monolithic = false;
   };
-in lib.fix(self: {
+in
+lib.fix (self: {
   imports = [
     "${depot.path}/ops/modules/v4l2loopback.nix"
   ];
@@ -61,8 +63,8 @@ in lib.fix(self: {
 
   nix = {
     maxJobs = 48;
-    binaryCaches = ["ssh://nix-ssh@whitby.tvl.fyi"];
-    binaryCachePublicKeys = ["cache.tvl.fyi:fd+9d1ceCPvDX/xVhcfv8nAa6njEhAGAEe+oGJDEeoc="];
+    binaryCaches = [ "ssh://nix-ssh@whitby.tvl.fyi" ];
+    binaryCachePublicKeys = [ "cache.tvl.fyi:fd+9d1ceCPvDX/xVhcfv8nAa6njEhAGAEe+oGJDEeoc=" ];
   };
 
   networking = {
diff --git a/users/tazjin/nixos/tverskoy/default.nix b/users/tazjin/nixos/tverskoy/default.nix
index 24a4a2d49122..f19501362ab5 100644
--- a/users/tazjin/nixos/tverskoy/default.nix
+++ b/users/tazjin/nixos/tverskoy/default.nix
@@ -1,6 +1,7 @@
 { depot, lib, pkgs, ... }:
 
-config: let
+config:
+let
   quasselClient = pkgs.quassel.override {
     client = true;
     enableDaemon = false;
@@ -15,7 +16,8 @@ config: let
     ${pkgs.xorg.setxkbmap}/bin/setxkbmap -option caps:super
     exec ${pkgs.xsecurelock}/bin/xsecurelock
   '';
-in lib.fix(self: {
+in
+lib.fix (self: {
   imports = [
     "${depot.third_party.impermanence}/nixos.nix"
     "${pkgs.home-manager.src}/nixos"
@@ -43,7 +45,7 @@ in lib.fix(self: {
   };
 
   fileSystems = {
-    "/" =  {
+    "/" = {
       device = "tmpfs";
       fsType = "tmpfs";
       options = [ "defaults" "size=8G" "mode=755" ];
@@ -320,109 +322,109 @@ in lib.fix(self: {
       zoxide
     ]);
 
-    systemd.user.services.lieer-tazjin = {
-      description = "Synchronise mail@tazj.in via lieer";
-      script = "${pkgs.lieer}/bin/gmi sync";
+  systemd.user.services.lieer-tazjin = {
+    description = "Synchronise mail@tazj.in via lieer";
+    script = "${pkgs.lieer}/bin/gmi sync";
 
-      serviceConfig = {
-        WorkingDirectory = "%h/mail/account.tazjin";
-        Type = "oneshot";
-      };
+    serviceConfig = {
+      WorkingDirectory = "%h/mail/account.tazjin";
+      Type = "oneshot";
     };
+  };
 
-    systemd.user.timers.lieer-tazjin = {
-      wantedBy = [ "timers.target" ];
+  systemd.user.timers.lieer-tazjin = {
+    wantedBy = [ "timers.target" ];
 
-      timerConfig = {
-        OnActiveSec = "1";
-        OnUnitActiveSec = "180";
-      };
+    timerConfig = {
+      OnActiveSec = "1";
+      OnUnitActiveSec = "180";
     };
+  };
 
-    home-manager.useGlobalPkgs = true;
-    home-manager.users.tazjin = { config, lib, ... }: {
-      imports = [ "${depot.third_party.impermanence}/home-manager.nix" ];
-
-      home.persistence."/persist/tazjin/home" = {
-        allowOther = true;
-
-        directories = [
-          ".cargo"
-          ".config/audacity"
-          ".config/google-chrome"
-          ".config/quassel-irc.org"
-          ".config/spotify"
-          ".config/syncthing"
-          ".elfeed"
-          ".gnupg"
-          ".local/share/Steam"
-          ".local/share/audacity"
-          ".local/share/direnv"
-          ".local/share/fish"
-          ".local/share/keyrings"
-          ".local/share/zoxide"
-          ".mozilla/firefox"
-          ".password-store"
-          ".rustup"
-          ".ssh"
-          ".steam"
-          ".telega"
-          "go"
-          "mail"
-        ];
-
-        files = [
-          ".notmuch-config"
-        ];
-      };
+  home-manager.useGlobalPkgs = true;
+  home-manager.users.tazjin = { config, lib, ... }: {
+    imports = [ "${depot.third_party.impermanence}/home-manager.nix" ];
+
+    home.persistence."/persist/tazjin/home" = {
+      allowOther = true;
+
+      directories = [
+        ".cargo"
+        ".config/audacity"
+        ".config/google-chrome"
+        ".config/quassel-irc.org"
+        ".config/spotify"
+        ".config/syncthing"
+        ".elfeed"
+        ".gnupg"
+        ".local/share/Steam"
+        ".local/share/audacity"
+        ".local/share/direnv"
+        ".local/share/fish"
+        ".local/share/keyrings"
+        ".local/share/zoxide"
+        ".mozilla/firefox"
+        ".password-store"
+        ".rustup"
+        ".ssh"
+        ".steam"
+        ".telega"
+        "go"
+        "mail"
+      ];
 
-      home.activation.screenshots = lib.hm.dag.entryAnywhere ''
-        $DRY_RUN_CMD mkdir -p $HOME/screenshots
-      '';
+      files = [
+        ".notmuch-config"
+      ];
+    };
 
-      programs.git = {
-        enable = true;
-        userName = "Vincent Ambo";
-        userEmail = "mail@tazj.in";
-        extraConfig = {
-          pull.rebase = true;
-          init.defaultBranch = "canon";
-        };
-      };
+    home.activation.screenshots = lib.hm.dag.entryAnywhere ''
+      $DRY_RUN_CMD mkdir -p $HOME/screenshots
+    '';
 
-      programs.fish = {
-        enable = true;
-        interactiveShellInit = ''
-          ${pkgs.zoxide}/bin/zoxide init fish | source
-        '';
+    programs.git = {
+      enable = true;
+      userName = "Vincent Ambo";
+      userEmail = "mail@tazj.in";
+      extraConfig = {
+        pull.rebase = true;
+        init.defaultBranch = "canon";
       };
+    };
 
-      services.screen-locker = {
-        enable = true;
-        enableDetectSleep = true;
-        inactiveInterval = 10; # minutes
-        lockCmd = "${screenLock}/bin/tazjin-screen-lock";
-      };
+    programs.fish = {
+      enable = true;
+      interactiveShellInit = ''
+        ${pkgs.zoxide}/bin/zoxide init fish | source
+      '';
+    };
 
-      services.picom = {
-        enable = true;
-        vSync = true;
-        backend = "glx";
-      };
+    services.screen-locker = {
+      enable = true;
+      enableDetectSleep = true;
+      inactiveInterval = 10; # minutes
+      lockCmd = "${screenLock}/bin/tazjin-screen-lock";
+    };
 
-      # Enable the dunst notification daemon, but force the
-      # configuration file separately instead of going via the strange
-      # Nix->dunstrc encoding route.
-      services.dunst.enable = true;
-      xdg.configFile."dunst/dunstrc" = {
-        source = depot.users.tazjin.dotfiles.dunstrc;
-        onChange = ''
-          ${pkgs.procps}/bin/pkill -u "$USER" ''${VERBOSE+-e} dunst || true
-        '';
-      };
+    services.picom = {
+      enable = true;
+      vSync = true;
+      backend = "glx";
+    };
 
-      systemd.user.startServices = true;
+    # Enable the dunst notification daemon, but force the
+    # configuration file separately instead of going via the strange
+    # Nix->dunstrc encoding route.
+    services.dunst.enable = true;
+    xdg.configFile."dunst/dunstrc" = {
+      source = depot.users.tazjin.dotfiles.dunstrc;
+      onChange = ''
+        ${pkgs.procps}/bin/pkill -u "$USER" ''${VERBOSE+-e} dunst || true
+      '';
     };
 
-    system.stateVersion = "20.09";
+    systemd.user.startServices = true;
+  };
+
+  system.stateVersion = "20.09";
 })
diff --git a/users/tazjin/presentations/bootstrapping-2018/default.nix b/users/tazjin/presentations/bootstrapping-2018/default.nix
index 0dff14b2a1a6..2775d0b3fbbb 100644
--- a/users/tazjin/presentations/bootstrapping-2018/default.nix
+++ b/users/tazjin/presentations/bootstrapping-2018/default.nix
@@ -4,24 +4,26 @@
 
 with pkgs;
 
-let tex = texlive.combine {
-  inherit (texlive)
-    beamer
-    beamertheme-metropolis
-    etoolbox
-    euenc
-    extsizes
-    fontspec
-    lualibs
-    luaotfload
-    luatex
-    minted
-    ms
-    pgfopts
-    scheme-basic
-    translator;
-};
-in stdenv.mkDerivation {
+let
+  tex = texlive.combine {
+    inherit (texlive)
+      beamer
+      beamertheme-metropolis
+      etoolbox
+      euenc
+      extsizes
+      fontspec
+      lualibs
+      luaotfload
+      luatex
+      minted
+      ms
+      pgfopts
+      scheme-basic
+      translator;
+  };
+in
+stdenv.mkDerivation {
   name = "nuug-bootstrapping-slides";
   src = ./.;
 
diff --git a/users/wpcarro/assessments/brilliant/default.nix b/users/wpcarro/assessments/brilliant/default.nix
index 536e54d3650a..0628679c0127 100644
--- a/users/wpcarro/assessments/brilliant/default.nix
+++ b/users/wpcarro/assessments/brilliant/default.nix
@@ -12,5 +12,5 @@ depot.users.wpcarro.buildHaskell.program {
     split
     rio
   ];
-  ghcExtensions = [];
+  ghcExtensions = [ ];
 }
diff --git a/users/wpcarro/buildHaskell/default.nix b/users/wpcarro/buildHaskell/default.nix
index 5958b1ea26ae..2f0fd9e1c2d1 100644
--- a/users/wpcarro/buildHaskell/default.nix
+++ b/users/wpcarro/buildHaskell/default.nix
@@ -2,11 +2,13 @@
 
 {
   # Create a nix-shell for Haskell development.
-  shell = { deps }: let
-    ghc = pkgs.haskellPackages.ghcWithPackages (hpkgs: deps hpkgs);
-  in pkgs.mkShell {
-    buildInputs = [ghc];
-  };
+  shell = { deps }:
+    let
+      ghc = pkgs.haskellPackages.ghcWithPackages (hpkgs: deps hpkgs);
+    in
+    pkgs.mkShell {
+      buildInputs = [ ghc ];
+    };
 
   # Build a Haskell executable. This assumes a project directory with a
   # top-level Main.hs.
@@ -15,17 +17,19 @@
   # - `deps`: A function that accepts `hpkgs` and returns a list of Haskell
   # - `ghcExtensions`: A list of strings representing the language extensions to
   #   use.
-  program = { name, srcs, deps, ghcExtensions }: let
-    ghc = pkgs.haskellPackages.ghcWithPackages (hpkgs: deps hpkgs);
-  in pkgs.stdenv.mkDerivation {
-    name = name;
-    buildInputs = [];
-    srcs = srcs;
-    buildPhase = ''
-      ${ghc}/bin/ghc -Wall Main.hs ${pkgs.lib.concatMapStrings (x: "-X${x} ") ghcExtensions}
-    '';
-    installPhase = ''
-      mkdir -p $out && mv Main $out/${name}
-    '';
-  };
+  program = { name, srcs, deps, ghcExtensions }:
+    let
+      ghc = pkgs.haskellPackages.ghcWithPackages (hpkgs: deps hpkgs);
+    in
+    pkgs.stdenv.mkDerivation {
+      name = name;
+      buildInputs = [ ];
+      srcs = srcs;
+      buildPhase = ''
+        ${ghc}/bin/ghc -Wall Main.hs ${pkgs.lib.concatMapStrings (x: "-X${x} ") ghcExtensions}
+      '';
+      installPhase = ''
+        mkdir -p $out && mv Main $out/${name}
+      '';
+    };
 }
diff --git a/users/wpcarro/ci/pipelines/post-receive.nix b/users/wpcarro/ci/pipelines/post-receive.nix
index cb7501bdeacf..09b8990e13e2 100644
--- a/users/wpcarro/ci/pipelines/post-receive.nix
+++ b/users/wpcarro/ci/pipelines/post-receive.nix
@@ -10,4 +10,5 @@ let
       label = ":broom: lint secrets";
     }
   ];
-in pkgs.writeText "pipeline.yaml" (toJSON pipeline)
+in
+pkgs.writeText "pipeline.yaml" (toJSON pipeline)
diff --git a/users/wpcarro/clients/monsterpoker/default.nix b/users/wpcarro/clients/monsterpoker/default.nix
index 0e079261fb6a..e5dca4bca21c 100644
--- a/users/wpcarro/clients/monsterpoker/default.nix
+++ b/users/wpcarro/clients/monsterpoker/default.nix
@@ -1,6 +1,6 @@
 { depot, pkgs, ... }:
 
-pkgs.runCommandNoCC "monsterpoker.app" {} ''
+pkgs.runCommandNoCC "monsterpoker.app" { } ''
   mkdir -p $out
   cp ${./index.html} $out/index.html
 ''
diff --git a/users/wpcarro/common.nix b/users/wpcarro/common.nix
index 829c3394d1e2..d88b83b9d2b8 100644
--- a/users/wpcarro/common.nix
+++ b/users/wpcarro/common.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (depot.users) wpcarro;
-in {
+in
+{
   programs = {
     fish.enable = true;
 
diff --git a/users/wpcarro/configs/default.nix b/users/wpcarro/configs/default.nix
index 81ba5b4d48ac..681f976052c7 100644
--- a/users/wpcarro/configs/default.nix
+++ b/users/wpcarro/configs/default.nix
@@ -3,7 +3,8 @@
 let
   inherit (pkgs) writeShellScript;
   inherit (pkgs.lib.strings) makeBinPath;
-in {
+in
+{
   install = writeShellScript "install-configs" ''
     cd "$WPCARRO/configs" && ${pkgs.stow}/bin/stow --target="$HOME" .
   '';
diff --git a/users/wpcarro/emacs/default.nix b/users/wpcarro/emacs/default.nix
index c2669f6949e5..b751849eab9c 100644
--- a/users/wpcarro/emacs/default.nix
+++ b/users/wpcarro/emacs/default.nix
@@ -18,7 +18,8 @@ let
       pass
       scrot
       xorg.xset
-    ]));
+    ])
+  );
 
   emacsWithPackages = (emacsPackagesGen emacs27).emacsWithPackages;
 
@@ -128,7 +129,7 @@ let
   makeEnvVars = env: concatStringsSep "\n"
     (mapAttrsToList (k: v: "export ${k}=\"${v}\"") env);
 
-  withEmacsPath = { emacsBin, env ? {}, load ? [] }:
+  withEmacsPath = { emacsBin, env ? { }, load ? [ ] }:
     writeShellScriptBin "wpcarros-emacs" ''
       export XMODIFIERS=emacs
       export PATH="${emacsBinPath}:$PATH"
@@ -143,16 +144,17 @@ let
         ${concatStringsSep "\n" (map (el: "--load ${el} \\") load)}
         "$@"
     '';
-in {
+in
+{
   inherit withEmacsPath;
 
-  nixos = { load ? [] }: withEmacsPath {
+  nixos = { load ? [ ] }: withEmacsPath {
     inherit load;
     emacsBin = "${wpcarrosEmacs}/bin/emacs";
   };
 
   # Script that asserts my Emacs can initialize without warnings or errors.
-  check = runCommand "check-emacs" {} ''
+  check = runCommand "check-emacs" { } ''
     # Even though Buildkite defines this, I'd still like still be able to test
     # this locally without depending on my ability to remember to set CI=true.
     export CI=true
diff --git a/users/wpcarro/haskell-file/shell.nix b/users/wpcarro/haskell-file/shell.nix
index 7682e8246cac..0c6a298bf2b0 100644
--- a/users/wpcarro/haskell-file/shell.nix
+++ b/users/wpcarro/haskell-file/shell.nix
@@ -1,5 +1,5 @@
 { depot, ... }:
 
 depot.users.wpcarro.buildHaskell.shell {
-  deps = hpkgs: [];
+  deps = hpkgs: [ ];
 }
diff --git a/users/wpcarro/nixos/diogenes/default.nix b/users/wpcarro/nixos/diogenes/default.nix
index d1246de4f34f..f92de19d18bf 100644
--- a/users/wpcarro/nixos/diogenes/default.nix
+++ b/users/wpcarro/nixos/diogenes/default.nix
@@ -4,7 +4,8 @@ let
   inherit (depot.users) wpcarro;
   name = "diogenes";
   domainName = "billandhiscomputer.com";
-in wpcarro.terraform.googleCloudVM {
+in
+wpcarro.terraform.googleCloudVM {
   project = "wpcarros-infrastructure";
   name = "diogenes";
   region = "us-central1";
@@ -23,7 +24,7 @@ in wpcarro.terraform.googleCloudVM {
       type = "A";
       ttl = 300; # 5m
       managed_zone = "\${google_dns_managed_zone.${name}.name}";
-      rrdatas = ["\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}"];
+      rrdatas = [ "\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}" ];
     };
 
     resource.google_compute_instance."${name}" = {
@@ -43,7 +44,7 @@ in wpcarro.terraform.googleCloudVM {
       type = "A";
       ttl = 300; # 5m
       managed_zone = "\${google_dns_managed_zone.monsterpoker.name}";
-      rrdatas = ["\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}"];
+      rrdatas = [ "\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}" ];
     };
   };
 
@@ -54,9 +55,9 @@ in wpcarro.terraform.googleCloudVM {
 
     networking = {
       firewall.allowedTCPPorts = [
-        22   # ssh
-        80   # http
-        443  # https
+        22 # ssh
+        80 # http
+        443 # https
         6698 # quassel
       ];
       firewall.allowedUDPPortRanges = [
diff --git a/users/wpcarro/nixos/marcus/default.nix b/users/wpcarro/nixos/marcus/default.nix
index 1de320f10c20..a84d33ead75b 100644
--- a/users/wpcarro/nixos/marcus/default.nix
+++ b/users/wpcarro/nixos/marcus/default.nix
@@ -13,7 +13,8 @@ let
     enableDaemon = false;
     monolithic = false;
   };
-in {
+in
+{
   imports = [
     (depot.path + "/users/wpcarro/nixos/marcus/hardware.nix")
     "${pkgs.home-manager.src}/nixos"
diff --git a/users/wpcarro/nixos/marcus/hardware.nix b/users/wpcarro/nixos/marcus/hardware.nix
index a5735bc8e22b..cd80685abe13 100644
--- a/users/wpcarro/nixos/marcus/hardware.nix
+++ b/users/wpcarro/nixos/marcus/hardware.nix
@@ -6,9 +6,9 @@
   ];
 
   boot.initrd.availableKernelModules = [ "xhci_pci" "nvme" "usb_storage" "sd_mod" ];
-  boot.initrd.kernelModules = [];
+  boot.initrd.kernelModules = [ ];
   boot.kernelModules = [ "kvm-intel" ];
-  boot.extraModulePackages = [];
+  boot.extraModulePackages = [ ];
 
   fileSystems."/" = {
     device = "/dev/disk/by-uuid/b8b911ee-e9b9-40ea-89d6-551f11350e7b";
diff --git a/users/wpcarro/playbooks/nix_gcr/cloud_run.nix b/users/wpcarro/playbooks/nix_gcr/cloud_run.nix
index 3d981611817b..1f473b5f59fa 100644
--- a/users/wpcarro/playbooks/nix_gcr/cloud_run.nix
+++ b/users/wpcarro/playbooks/nix_gcr/cloud_run.nix
@@ -4,7 +4,7 @@ pkgs.dockerTools.buildLayeredImage {
   name = "gemma";
   tag = "latest";
   config.ExposedPorts = {
-    "4242" = {};
+    "4242" = { };
   };
   config.Env = [
     "GEMMA_CONFIG=${./config.lisp}"
diff --git a/users/wpcarro/scratch/blockchain/default.nix b/users/wpcarro/scratch/blockchain/default.nix
index 745e7a5ab490..c02f9a9c8108 100644
--- a/users/wpcarro/scratch/blockchain/default.nix
+++ b/users/wpcarro/scratch/blockchain/default.nix
@@ -2,7 +2,8 @@
 
 let
   pypkgs = pkgs.python3Packages;
-in pkgs.python3Packages.buildPythonApplication {
+in
+pkgs.python3Packages.buildPythonApplication {
   pname = "main";
   src = ./.;
   version = "0.0.1";
diff --git a/users/wpcarro/scratch/groceries/shell.nix b/users/wpcarro/scratch/groceries/shell.nix
index 7682e8246cac..0c6a298bf2b0 100644
--- a/users/wpcarro/scratch/groceries/shell.nix
+++ b/users/wpcarro/scratch/groceries/shell.nix
@@ -1,5 +1,5 @@
 { depot, ... }:
 
 depot.users.wpcarro.buildHaskell.shell {
-  deps = hpkgs: [];
+  deps = hpkgs: [ ];
 }
diff --git a/users/wpcarro/scratch/picoctf/challenge_166/shell.nix b/users/wpcarro/scratch/picoctf/challenge_166/shell.nix
index 07a3a2e281b4..85d3865a51bf 100644
--- a/users/wpcarro/scratch/picoctf/challenge_166/shell.nix
+++ b/users/wpcarro/scratch/picoctf/challenge_166/shell.nix
@@ -1,7 +1,8 @@
 { pkgs, ... }:
 
 let
-  python =pkgs.python3.withPackages (pypkgs: with pypkgs; [
+  python = pkgs.python3.withPackages (pypkgs: with pypkgs; [
     cryptography
   ]);
-in python.env
+in
+python.env
diff --git a/users/wpcarro/terraform/default.nix b/users/wpcarro/terraform/default.nix
index be35785a54d0..d73d46dbf91e 100644
--- a/users/wpcarro/terraform/default.nix
+++ b/users/wpcarro/terraform/default.nix
@@ -7,179 +7,183 @@ let
 
   images = import "${pkgs.path}/nixos/modules/virtualisation/gce-images.nix";
   nixosImage = images."20.09";
-in {
-  googleCloudVM = {
-    project,
-    name,
-    region,
-    zone,
-    configuration,
-    extraConfig ? {},
-  }: let
-    inherit (configuration.users.users) root;
-    inherit (configuration.networking) firewall;
-
-    # Convert NixOS-style port numbers to Terraform-style.
-    asStrings = xs: map toString xs;
-    asRanges = xs: map (x: "${toString x.from}-${toString x.to}") xs;
-
-    sshKeys = concatStringsSep "\n"
-      (map (key: "root:${key}") root.openssh.authorizedKeys.keys);
-
-    os = depot.ops.nixos.nixosFor (_: {
-      imports = [
-        "${pkgs.path}/nixos/modules/virtualisation/google-compute-image.nix"
-        configuration
-      ];
-
-      networking.hostName = name;
-
-      fileSystems."/nix" = {
-        device = "/dev/disk/by-label/google-${name}-disk";
-        fsType = "ext4";
+in
+{
+  googleCloudVM =
+    { project
+    , name
+    , region
+    , zone
+    , configuration
+    , extraConfig ? { }
+    ,
+    }:
+    let
+      inherit (configuration.users.users) root;
+      inherit (configuration.networking) firewall;
+
+      # Convert NixOS-style port numbers to Terraform-style.
+      asStrings = xs: map toString xs;
+      asRanges = xs: map (x: "${toString x.from}-${toString x.to}") xs;
+
+      sshKeys = concatStringsSep "\n"
+        (map (key: "root:${key}") root.openssh.authorizedKeys.keys);
+
+      os = depot.ops.nixos.nixosFor (_: {
+        imports = [
+          "${pkgs.path}/nixos/modules/virtualisation/google-compute-image.nix"
+          configuration
+        ];
+
+        networking.hostName = name;
+
+        fileSystems."/nix" = {
+          device = "/dev/disk/by-label/google-${name}-disk";
+          fsType = "ext4";
+        };
+      });
+
+      osRoot = os.config.system.build.toplevel;
+      osPath = unsafeDiscardStringContext (toString osRoot.outPath);
+      drvPath = unsafeDiscardStringContext (toString osRoot.drvPath);
+    in
+    writeText "terraform.tf.json" (toJSON (lib.recursiveUpdate extraConfig {
+      provider.google = {
+        inherit project region zone;
       };
-    });
-
-    osRoot = os.config.system.build.toplevel;
-    osPath = unsafeDiscardStringContext (toString osRoot.outPath);
-    drvPath = unsafeDiscardStringContext (toString osRoot.drvPath);
-  in writeText "terraform.tf.json" (toJSON (lib.recursiveUpdate extraConfig {
-    provider.google = {
-      inherit project region zone;
-    };
-
-    resource.google_compute_instance."${name}" = {
-      inherit name zone;
-      machine_type = "e2-standard-2";
-  
-      tags = [
-        "http-server"
-        "https-server"
-        "${name}-firewall"
-      ];
-  
-      boot_disk = {
-        device_name = "boot";
-        initialize_params = {
-          size = 10;
-          image = "projects/nixos-cloud/global/images/${nixosImage.name}";
+
+      resource.google_compute_instance."${name}" = {
+        inherit name zone;
+        machine_type = "e2-standard-2";
+
+        tags = [
+          "http-server"
+          "https-server"
+          "${name}-firewall"
+        ];
+
+        boot_disk = {
+          device_name = "boot";
+          initialize_params = {
+            size = 10;
+            image = "projects/nixos-cloud/global/images/${nixosImage.name}";
+          };
+        };
+
+        attached_disk = {
+          source = "\${google_compute_disk.${name}.id}";
+          device_name = "${name}-disk";
+        };
+
+        network_interface = {
+          network = "default";
+          subnetwork = "default";
+          access_config = { };
+        };
+
+        # Copy root's SSH keys from the NixOS configuration and expose them to the
+        # metadata server.
+        metadata = {
+          inherit sshKeys;
+          ssh-keys = sshKeys;
+
+          # NixOS's fetch-instance-ssh-keys.bash relies on these fields being
+          # available on the metadata server.
+          ssh_host_ed25519_key = "\${tls_private_key.${name}.private_key_pem}";
+          ssh_host_ed25519_key_pub = "\${tls_private_key.${name}.public_key_pem}";
+
+          # Even though we have SSH access, having oslogin can still be useful for
+          # troubleshooting in the browser if for some reason SSH isn't working as
+          # expected.
+          enable-oslogin = "TRUE";
         };
+
+        service_account.scopes = [ "cloud-platform" ];
       };
-  
-      attached_disk = {
-        source = "\${google_compute_disk.${name}.id}";
-        device_name = "${name}-disk";
+
+      resource.tls_private_key."${name}" = {
+        algorithm = "ECDSA";
+        ecdsa_curve = "P384";
       };
-  
-      network_interface = {
+
+      resource.google_compute_firewall."${name}" = {
+        name = "${name}-firewall";
         network = "default";
-        subnetwork = "default";
-        access_config = {};
-      };
-  
-      # Copy root's SSH keys from the NixOS configuration and expose them to the
-      # metadata server.
-      metadata = {
-        inherit sshKeys;
-        ssh-keys = sshKeys;
-
-        # NixOS's fetch-instance-ssh-keys.bash relies on these fields being
-        # available on the metadata server.
-        ssh_host_ed25519_key = "\${tls_private_key.${name}.private_key_pem}";
-        ssh_host_ed25519_key_pub = "\${tls_private_key.${name}.public_key_pem}";
-
-        # Even though we have SSH access, having oslogin can still be useful for
-        # troubleshooting in the browser if for some reason SSH isn't working as
-        # expected.
-        enable-oslogin = "TRUE";
-      };
-  
-      service_account.scopes = ["cloud-platform"];
-    };
-
-    resource.tls_private_key."${name}" = {
-      algorithm = "ECDSA";
-      ecdsa_curve = "P384";
-    };
-
-    resource.google_compute_firewall."${name}" = {
-      name = "${name}-firewall";
-      network = "default";
-
-      # Read the firewall configuration from the NixOS configuration.
-      allow = [
-        {
-          protocol = "tcp";
-          ports = concatLists [
-            (asStrings (firewall.allowedTCPPorts or []))
-            (asRanges (firewall.allowedTCPPortRanges or []))
-          ];
-        }
-        {
-          protocol = "udp";
-          ports = concatLists [
-            (asStrings (firewall.allowedUDPPorts or []))
-            (asRanges (firewall.allowedUDPPortRanges or []))
-          ];
-        }
-      ];
-      source_ranges = ["0.0.0.0/0"];
-    };
-  
-    resource.google_compute_disk."${name}" = {
-      inherit zone;
-      name = "${name}-disk";
-      size = 100;
-    };
-
-    resource.null_resource.deploy_nixos = {
-      triggers = {
-        # Redeploy when the NixOS configuration changes.
-        os = "${osPath}";
-        # Redeploy when a new machine is provisioned.
-        machine_id = "\${google_compute_instance.${name}.id}";
+
+        # Read the firewall configuration from the NixOS configuration.
+        allow = [
+          {
+            protocol = "tcp";
+            ports = concatLists [
+              (asStrings (firewall.allowedTCPPorts or [ ]))
+              (asRanges (firewall.allowedTCPPortRanges or [ ]))
+            ];
+          }
+          {
+            protocol = "udp";
+            ports = concatLists [
+              (asStrings (firewall.allowedUDPPorts or [ ]))
+              (asRanges (firewall.allowedUDPPortRanges or [ ]))
+            ];
+          }
+        ];
+        source_ranges = [ "0.0.0.0/0" ];
       };
 
-      connection = {
-        host = "\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}";
+      resource.google_compute_disk."${name}" = {
+        inherit zone;
+        name = "${name}-disk";
+        size = 100;
       };
 
-      provisioner = [
-        { remote-exec.inline = ["true"]; }
-        {
-          local-exec.command = ''
-            export PATH="${pkgs.openssh}/bin:$PATH"
-
-            scratch="$(mktemp -d)"
-            function cleanup() {
-              rm -rf $scratch
-            }
-            trap cleanup EXIT
-
-            # write out ssh key
-            echo -n "''${tls_private_key.${name}.private_key_pem}" > $scratch/id_rsa.pem
-            chmod 0600 $scratch/id_rsa.pem
-
-            export NIX_SSHOPTS="\
-              -o StrictHostKeyChecking=no\
-              -o UserKnownHostsFile=/dev/null\
-              -o GlobalKnownHostsFile=/dev/null\
-              -o IdentityFile=$scratch/id_rsa.pem
-            "
-
-            nix-build ${drvPath}
-            nix-copy-closure --to \
-              root@''${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip} \
-              ${osPath} --gzip --use-substitutes
-          '';
-        }
-        {
-          remote-exec.inline = [
-            "nix-env --profile /nix/var/nix/profiles/system --set ${osPath}"
-            "${osPath}/bin/switch-to-configuration switch"
-          ];
-        }
-      ];
-    };
-  }));
+      resource.null_resource.deploy_nixos = {
+        triggers = {
+          # Redeploy when the NixOS configuration changes.
+          os = "${osPath}";
+          # Redeploy when a new machine is provisioned.
+          machine_id = "\${google_compute_instance.${name}.id}";
+        };
+
+        connection = {
+          host = "\${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip}";
+        };
+
+        provisioner = [
+          { remote-exec.inline = [ "true" ]; }
+          {
+            local-exec.command = ''
+              export PATH="${pkgs.openssh}/bin:$PATH"
+
+              scratch="$(mktemp -d)"
+              function cleanup() {
+                rm -rf $scratch
+              }
+              trap cleanup EXIT
+
+              # write out ssh key
+              echo -n "''${tls_private_key.${name}.private_key_pem}" > $scratch/id_rsa.pem
+              chmod 0600 $scratch/id_rsa.pem
+
+              export NIX_SSHOPTS="\
+                -o StrictHostKeyChecking=no\
+                -o UserKnownHostsFile=/dev/null\
+                -o GlobalKnownHostsFile=/dev/null\
+                -o IdentityFile=$scratch/id_rsa.pem
+              "
+
+              nix-build ${drvPath}
+              nix-copy-closure --to \
+                root@''${google_compute_instance.${name}.network_interface[0].access_config[0].nat_ip} \
+                ${osPath} --gzip --use-substitutes
+            '';
+          }
+          {
+            remote-exec.inline = [
+              "nix-env --profile /nix/var/nix/profiles/system --set ${osPath}"
+              "${osPath}/bin/switch-to-configuration switch"
+            ];
+          }
+        ];
+      };
+    }));
 }
diff --git a/users/wpcarro/tools/monzo_ynab/job.nix b/users/wpcarro/tools/monzo_ynab/job.nix
index c2c8baab3b45..f710b73cefdb 100644
--- a/users/wpcarro/tools/monzo_ynab/job.nix
+++ b/users/wpcarro/tools/monzo_ynab/job.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (depot.users.wpcarro) gopkgs;
-in depot.nix.buildGo.program {
+in
+depot.nix.buildGo.program {
   name = "job";
   srcs = [
     ./main.go
diff --git a/users/wpcarro/tools/monzo_ynab/tokens.nix b/users/wpcarro/tools/monzo_ynab/tokens.nix
index b58c272bde02..4e2761bc7882 100644
--- a/users/wpcarro/tools/monzo_ynab/tokens.nix
+++ b/users/wpcarro/tools/monzo_ynab/tokens.nix
@@ -12,7 +12,8 @@ let
       utils
     ];
   };
-in depot.nix.buildGo.program {
+in
+depot.nix.buildGo.program {
   name = "token-server";
   srcs = [
     ./tokens.go
diff --git a/users/wpcarro/tools/rfcToKindle/default.nix b/users/wpcarro/tools/rfcToKindle/default.nix
index 4ea271943950..ca87abdee012 100644
--- a/users/wpcarro/tools/rfcToKindle/default.nix
+++ b/users/wpcarro/tools/rfcToKindle/default.nix
@@ -7,5 +7,5 @@ depot.nix.buildGo.program {
   srcs = [
     ./main.go
   ];
-  deps = [];
+  deps = [ ];
 }
diff --git a/users/wpcarro/tools/symlinkManager/default.nix b/users/wpcarro/tools/symlinkManager/default.nix
index 4e261d730932..7d022828ee97 100644
--- a/users/wpcarro/tools/symlinkManager/default.nix
+++ b/users/wpcarro/tools/symlinkManager/default.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (depot.users.wpcarro) gopkgs;
-in depot.nix.buildGo.program {
+in
+depot.nix.buildGo.program {
   name = "symlink-mgr";
   srcs = [
     ./main.go
diff --git a/users/wpcarro/tools/url-blocker/default.nix b/users/wpcarro/tools/url-blocker/default.nix
index 943644e5f542..ae24aa41b7ca 100644
--- a/users/wpcarro/tools/url-blocker/default.nix
+++ b/users/wpcarro/tools/url-blocker/default.nix
@@ -12,11 +12,11 @@ let
     name = "url-blocker";
     src = builtins.path { path = ./.; name = "url-blocker"; };
     buildPhase = ''
-    ${ghc}/bin/ghc Main.hs
-  '';
+      ${ghc}/bin/ghc Main.hs
+    '';
     installPhase = ''
-    mv ./Main $out
-  '';
+      mv ./Main $out
+    '';
   };
 
   # This is the systemd timer unit.
@@ -26,8 +26,9 @@ let
     systemd = {
       timers.simple-timer = {
         wantedBy = [ "timers.target" ];
-        partOf = [];
+        partOf = [ ];
       };
     };
   };
-in null
+in
+null
diff --git a/users/wpcarro/utils/builder.nix b/users/wpcarro/utils/builder.nix
index 45e783cf0ba8..2bc061d3661b 100644
--- a/users/wpcarro/utils/builder.nix
+++ b/users/wpcarro/utils/builder.nix
@@ -2,7 +2,8 @@
 
 let
   inherit (pkgs) writeShellScriptBin;
-in {
+in
+{
   # Create a derivation that creates an executable shell script named `as` that
   # calls the program located at `path`, forwarding all of the arguments.
   wrapNonNixProgram = { path, as }: writeShellScriptBin as ''
diff --git a/users/wpcarro/utils/default.nix b/users/wpcarro/utils/default.nix
index 59aa322076b4..46d30acfa24e 100644
--- a/users/wpcarro/utils/default.nix
+++ b/users/wpcarro/utils/default.nix
@@ -8,7 +8,8 @@ args@{ pkgs, ... }:
 
 let
   builder = import ./builder.nix args;
-  fs      = import ./fs.nix args;
-in {
+  fs = import ./fs.nix args;
+in
+{
   inherit builder fs;
 }
diff --git a/users/wpcarro/utils/fs.nix b/users/wpcarro/utils/fs.nix
index 6305e705b141..d7d5e34e991b 100644
--- a/users/wpcarro/utils/fs.nix
+++ b/users/wpcarro/utils/fs.nix
@@ -5,7 +5,8 @@
 let
   inherit (builtins) attrNames hasAttr map readDir;
   inherit (pkgs.lib) filterAttrs;
-in {
+in
+{
   # Returns a list of all of the regular files in `dir`.
   files = dir:
     map (name: dir + "/${name}")
@@ -31,11 +32,11 @@ in {
     if hasAttr name (readDir dir) then
       dir + "/${name}"
     else
-      # This prevents the function from infinitely recursing and eventually
-      # stack overflowing.
+    # This prevents the function from infinitely recursing and eventually
+    # stack overflowing.
       if (dirOf dir) == dir then
         null
       else
         resolve name (dirOf dir);
-  };
+};
 }
diff --git a/users/wpcarro/website/blog/default.nix b/users/wpcarro/website/blog/default.nix
index 4ccf200223ce..d87b714b6fe1 100644
--- a/users/wpcarro/website/blog/default.nix
+++ b/users/wpcarro/website/blog/default.nix
@@ -17,7 +17,7 @@ let
   posts = sort (x: y: x.date > y.date)
     (filter includePost (list post (import ./posts.nix)));
 
-  rendered = pkgs.runCommandNoCC "blog-posts" {} ''
+  rendered = pkgs.runCommandNoCC "blog-posts" { } ''
     mkdir -p $out
 
     ${lib.concatStringsSep "\n" (map (post:
@@ -25,7 +25,7 @@ let
     ) posts)}
   '';
 
-  formatDate = date: readFile (pkgs.runCommandNoCC "date" {} ''
+  formatDate = date: readFile (pkgs.runCommandNoCC "date" { } ''
     date --date='@${toString date}' '+%B %e, %Y' > $out
   '');
 
@@ -38,7 +38,8 @@ let
     postTitle = post.title;
     postDate = formatDate post.date;
   });
-in pkgs.runCommandNoCC "blog" {} ''
+in
+pkgs.runCommandNoCC "blog" { } ''
   mkdir -p $out
   cp ${withBrand (readFile postsHtml)} $out/index.html
   cp -r ${rendered} $out/posts
diff --git a/users/wpcarro/website/default.nix b/users/wpcarro/website/default.nix
index 9694aad17d60..19229aab5a35 100644
--- a/users/wpcarro/website/default.nix
+++ b/users/wpcarro/website/default.nix
@@ -8,11 +8,11 @@ let
 
   globalVars = {
     inherit domain;
-    homepage  = "https://${domain}/";
-    blog      = "https://${domain}/blog";
-    habits    = "https://${domain}/habits";
-    github    = "https://github.com/wpcarro";
-    linkedin  = "https://linkedin.com/in/williampatrickcarroll";
+    homepage = "https://${domain}/";
+    blog = "https://${domain}/blog";
+    habits = "https://${domain}/habits";
+    github = "https://github.com/wpcarro";
+    linkedin = "https://linkedin.com/in/williampatrickcarroll";
     depotWork = "https://cs.tvl.fyi/depot/-/blob/users/wpcarro";
   };
 
@@ -23,10 +23,11 @@ let
   withBrand = contentHtml: renderTemplate ./fragments/template.html {
     inherit contentHtml;
   };
-in {
+in
+{
   inherit domain renderTemplate withBrand;
 
-  root = pkgs.runCommandNoCC "wpcarro.dev" {} ''
+  root = pkgs.runCommandNoCC "wpcarro.dev" { } ''
     mkdir -p $out
 
     # /
diff --git a/users/wpcarro/website/habit-screens/default.nix b/users/wpcarro/website/habit-screens/default.nix
index 345e6f010d48..3036ba1821cb 100644
--- a/users/wpcarro/website/habit-screens/default.nix
+++ b/users/wpcarro/website/habit-screens/default.nix
@@ -8,7 +8,7 @@ let
     , src
     , name
     , srcdir ? "./src"
-    , targets ? []
+    , targets ? [ ]
     , registryDat ? ./registry.dat
     , outputJavaScript ? false
     }:
@@ -24,33 +24,36 @@ let
         inherit registryDat;
       };
 
-      installPhase = let
-        elmfile = module: "${srcdir}/${builtins.replaceStrings ["."] ["/"] module}.elm";
-        extension = if outputJavaScript then "js" else "html";
-      in ''
-        mkdir -p $out/share/doc
-        ${lib.concatStrings (map (module: ''
-          echo "compiling ${elmfile module}"
-          elm make ${elmfile module} --output $out/${module}.${extension} --docs $out/share/doc/${module}.json
-          ${lib.optionalString outputJavaScript ''
-            echo "minifying ${elmfile module}"
-            uglifyjs $out/${module}.${extension} --compress 'pure_funcs="F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9",pure_getters,keep_fargs=false,unsafe_comps,unsafe' \
-                | uglifyjs --mangle --output $out/${module}.min.${extension}
-          ''}
-        '') targets)}
-      '';
+      installPhase =
+        let
+          elmfile = module: "${srcdir}/${builtins.replaceStrings ["."] ["/"] module}.elm";
+          extension = if outputJavaScript then "js" else "html";
+        in
+        ''
+          mkdir -p $out/share/doc
+          ${lib.concatStrings (map (module: ''
+            echo "compiling ${elmfile module}"
+            elm make ${elmfile module} --output $out/${module}.${extension} --docs $out/share/doc/${module}.json
+            ${lib.optionalString outputJavaScript ''
+              echo "minifying ${elmfile module}"
+              uglifyjs $out/${module}.${extension} --compress 'pure_funcs="F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9",pure_getters,keep_fargs=false,unsafe_comps,unsafe' \
+                  | uglifyjs --mangle --output $out/${module}.min.${extension}
+            ''}
+          '') targets)}
+        '';
     };
   mainDotElm = mkDerivation {
     name = "elm-app-0.1.0";
     srcs = ./elm-srcs.nix;
     src = ./.;
-    targets = ["Main"];
+    targets = [ "Main" ];
     srcdir = "./src";
     outputJavaScript = true;
   };
-in stdenv.mkDerivation {
+in
+stdenv.mkDerivation {
   name = "habit-screens";
-  buildInputs = [];
+  buildInputs = [ ];
   src = builtins.path { path = ./.; name = "habit-screens"; };
   buildPhase = ''
     mkdir -p $out
diff --git a/users/wpcarro/website/habit-screens/elm-srcs.nix b/users/wpcarro/website/habit-screens/elm-srcs.nix
index 167708e072b0..7f6f77741a9c 100644
--- a/users/wpcarro/website/habit-screens/elm-srcs.nix
+++ b/users/wpcarro/website/habit-screens/elm-srcs.nix
@@ -1,77 +1,77 @@
 {
 
-      "elm-community/maybe-extra" = {
-        sha256 = "0qslmgswa625d218djd3p62pnqcrz38f5p558mbjl6kc1ss0kzv3";
-        version = "5.2.0";
-      };
+  "elm-community/maybe-extra" = {
+    sha256 = "0qslmgswa625d218djd3p62pnqcrz38f5p558mbjl6kc1ss0kzv3";
+    version = "5.2.0";
+  };
 
-      "elm/html" = {
-        sha256 = "1n3gpzmpqqdsldys4ipgyl1zacn0kbpc3g4v3hdpiyfjlgh8bf3k";
-        version = "1.0.0";
-      };
+  "elm/html" = {
+    sha256 = "1n3gpzmpqqdsldys4ipgyl1zacn0kbpc3g4v3hdpiyfjlgh8bf3k";
+    version = "1.0.0";
+  };
 
-      "elm-community/random-extra" = {
-        sha256 = "1dg2nz77w2cvp16xazbdsxkkw0xc9ycqpkd032faqdyky6gmz9g6";
-        version = "3.1.0";
-      };
+  "elm-community/random-extra" = {
+    sha256 = "1dg2nz77w2cvp16xazbdsxkkw0xc9ycqpkd032faqdyky6gmz9g6";
+    version = "3.1.0";
+  };
 
-      "elm/svg" = {
-        sha256 = "1cwcj73p61q45wqwgqvrvz3aypjyy3fw732xyxdyj6s256hwkn0k";
-        version = "1.0.1";
-      };
+  "elm/svg" = {
+    sha256 = "1cwcj73p61q45wqwgqvrvz3aypjyy3fw732xyxdyj6s256hwkn0k";
+    version = "1.0.1";
+  };
 
-      "justinmimbs/date" = {
-        sha256 = "1f0wcl8yhlvp3x4rj53rdy4r4ga7lkl6n8fdfh6b96scz2rnxmd4";
-        version = "3.2.1";
-      };
+  "justinmimbs/date" = {
+    sha256 = "1f0wcl8yhlvp3x4rj53rdy4r4ga7lkl6n8fdfh6b96scz2rnxmd4";
+    version = "3.2.1";
+  };
 
-      "elm/browser" = {
-        sha256 = "0nagb9ajacxbbg985r4k9h0jadqpp0gp84nm94kcgbr5sf8i9x13";
-        version = "1.0.2";
-      };
+  "elm/browser" = {
+    sha256 = "0nagb9ajacxbbg985r4k9h0jadqpp0gp84nm94kcgbr5sf8i9x13";
+    version = "1.0.2";
+  };
 
-      "elm/core" = {
-        sha256 = "19w0iisdd66ywjayyga4kv2p1v9rxzqjaxhckp8ni6n8i0fb2dvf";
-        version = "1.0.5";
-      };
+  "elm/core" = {
+    sha256 = "19w0iisdd66ywjayyga4kv2p1v9rxzqjaxhckp8ni6n8i0fb2dvf";
+    version = "1.0.5";
+  };
 
-      "elm-community/list-extra" = {
-        sha256 = "1ayv3148drynqnxdfwpjxal8vwzgsjqanjg7yxp6lhdcbkxgd3vd";
-        version = "8.2.3";
-      };
+  "elm-community/list-extra" = {
+    sha256 = "1ayv3148drynqnxdfwpjxal8vwzgsjqanjg7yxp6lhdcbkxgd3vd";
+    version = "8.2.3";
+  };
 
-      "elm/random" = {
-        sha256 = "138n2455wdjwa657w6sjq18wx2r0k60ibpc4frhbqr50sncxrfdl";
-        version = "1.0.0";
-      };
+  "elm/random" = {
+    sha256 = "138n2455wdjwa657w6sjq18wx2r0k60ibpc4frhbqr50sncxrfdl";
+    version = "1.0.0";
+  };
 
-      "elm/time" = {
-        sha256 = "0vch7i86vn0x8b850w1p69vplll1bnbkp8s383z7pinyg94cm2z1";
-        version = "1.0.0";
-      };
+  "elm/time" = {
+    sha256 = "0vch7i86vn0x8b850w1p69vplll1bnbkp8s383z7pinyg94cm2z1";
+    version = "1.0.0";
+  };
 
-      "elm/json" = {
-        sha256 = "0kjwrz195z84kwywaxhhlnpl3p251qlbm5iz6byd6jky2crmyqyh";
-        version = "1.1.3";
-      };
+  "elm/json" = {
+    sha256 = "0kjwrz195z84kwywaxhhlnpl3p251qlbm5iz6byd6jky2crmyqyh";
+    version = "1.1.3";
+  };
 
-      "elm/parser" = {
-        sha256 = "0a3cxrvbm7mwg9ykynhp7vjid58zsw03r63qxipxp3z09qks7512";
-        version = "1.1.0";
-      };
+  "elm/parser" = {
+    sha256 = "0a3cxrvbm7mwg9ykynhp7vjid58zsw03r63qxipxp3z09qks7512";
+    version = "1.1.0";
+  };
 
-      "owanturist/elm-union-find" = {
-        sha256 = "13gm7msnp0gr1lqia5m7m4lhy3m6kvjg37d304whb3psn88wqhj5";
-        version = "1.0.0";
-      };
+  "owanturist/elm-union-find" = {
+    sha256 = "13gm7msnp0gr1lqia5m7m4lhy3m6kvjg37d304whb3psn88wqhj5";
+    version = "1.0.0";
+  };
 
-      "elm/url" = {
-        sha256 = "0av8x5syid40sgpl5vd7pry2rq0q4pga28b4yykn9gd9v12rs3l4";
-        version = "1.0.0";
-      };
+  "elm/url" = {
+    sha256 = "0av8x5syid40sgpl5vd7pry2rq0q4pga28b4yykn9gd9v12rs3l4";
+    version = "1.0.0";
+  };
 
-      "elm/virtual-dom" = {
-        sha256 = "0q1v5gi4g336bzz1lgwpn5b1639lrn63d8y6k6pimcyismp2i1yg";
-        version = "1.0.2";
-      };
+  "elm/virtual-dom" = {
+    sha256 = "0q1v5gi4g336bzz1lgwpn5b1639lrn63d8y6k6pimcyismp2i1yg";
+    version = "1.0.2";
+  };
 }
diff --git a/users/wpcarro/website/sandbox/learnpianochords/default.nix b/users/wpcarro/website/sandbox/learnpianochords/default.nix
index 934fbd70ac17..7cfdf7c45137 100644
--- a/users/wpcarro/website/sandbox/learnpianochords/default.nix
+++ b/users/wpcarro/website/sandbox/learnpianochords/default.nix
@@ -8,7 +8,7 @@ let
     , src
     , name
     , srcdir ? "./src"
-    , targets ? []
+    , targets ? [ ]
     , registryDat ? ./registry.dat
     , outputJavaScript ? false
     }:
@@ -24,33 +24,36 @@ let
         inherit registryDat;
       };
 
-      installPhase = let
-        elmfile = module: "${srcdir}/${builtins.replaceStrings ["."] ["/"] module}.elm";
-        extension = if outputJavaScript then "js" else "html";
-      in ''
-        mkdir -p $out/share/doc
-        ${lib.concatStrings (map (module: ''
-          echo "compiling ${elmfile module}"
-          elm make ${elmfile module} --output $out/${module}.${extension} --docs $out/share/doc/${module}.json
-          ${lib.optionalString outputJavaScript ''
-            echo "minifying ${elmfile module}"
-            uglifyjs $out/${module}.${extension} --compress 'pure_funcs="F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9",pure_getters,keep_fargs=false,unsafe_comps,unsafe' \
-                | uglifyjs --mangle --output $out/${module}.min.${extension}
-          ''}
-        '') targets)}
-      '';
+      installPhase =
+        let
+          elmfile = module: "${srcdir}/${builtins.replaceStrings ["."] ["/"] module}.elm";
+          extension = if outputJavaScript then "js" else "html";
+        in
+        ''
+          mkdir -p $out/share/doc
+          ${lib.concatStrings (map (module: ''
+            echo "compiling ${elmfile module}"
+            elm make ${elmfile module} --output $out/${module}.${extension} --docs $out/share/doc/${module}.json
+            ${lib.optionalString outputJavaScript ''
+              echo "minifying ${elmfile module}"
+              uglifyjs $out/${module}.${extension} --compress 'pure_funcs="F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9",pure_getters,keep_fargs=false,unsafe_comps,unsafe' \
+                  | uglifyjs --mangle --output $out/${module}.min.${extension}
+            ''}
+          '') targets)}
+        '';
     };
   mainDotElm = mkDerivation {
     name = "elm-app-0.1.0";
     srcs = ./elm-srcs.nix;
     src = builtins.path { path = ./.; name = "learnpianochords"; };
-    targets = ["Main"];
+    targets = [ "Main" ];
     srcdir = "./src";
     outputJavaScript = true;
   };
-in stdenv.mkDerivation {
+in
+stdenv.mkDerivation {
   name = "learn-piano-chords";
-  buildInputs = [];
+  buildInputs = [ ];
   src = builtins.path { path = ./.; name = "learnpianochords"; };
   buildPhase = ''
     mkdir -p $out
diff --git a/users/wpcarro/website/sandbox/learnpianochords/elm-srcs.nix b/users/wpcarro/website/sandbox/learnpianochords/elm-srcs.nix
index 2823b430f887..c62262e6835a 100644
--- a/users/wpcarro/website/sandbox/learnpianochords/elm-srcs.nix
+++ b/users/wpcarro/website/sandbox/learnpianochords/elm-srcs.nix
@@ -1,67 +1,67 @@
 {
 
-      "elm-community/maybe-extra" = {
-        sha256 = "0qslmgswa625d218djd3p62pnqcrz38f5p558mbjl6kc1ss0kzv3";
-        version = "5.2.0";
-      };
+  "elm-community/maybe-extra" = {
+    sha256 = "0qslmgswa625d218djd3p62pnqcrz38f5p558mbjl6kc1ss0kzv3";
+    version = "5.2.0";
+  };
 
-      "elm/html" = {
-        sha256 = "1n3gpzmpqqdsldys4ipgyl1zacn0kbpc3g4v3hdpiyfjlgh8bf3k";
-        version = "1.0.0";
-      };
+  "elm/html" = {
+    sha256 = "1n3gpzmpqqdsldys4ipgyl1zacn0kbpc3g4v3hdpiyfjlgh8bf3k";
+    version = "1.0.0";
+  };
 
-      "elm-community/random-extra" = {
-        sha256 = "1dg2nz77w2cvp16xazbdsxkkw0xc9ycqpkd032faqdyky6gmz9g6";
-        version = "3.1.0";
-      };
+  "elm-community/random-extra" = {
+    sha256 = "1dg2nz77w2cvp16xazbdsxkkw0xc9ycqpkd032faqdyky6gmz9g6";
+    version = "3.1.0";
+  };
 
-      "elm/svg" = {
-        sha256 = "1cwcj73p61q45wqwgqvrvz3aypjyy3fw732xyxdyj6s256hwkn0k";
-        version = "1.0.1";
-      };
+  "elm/svg" = {
+    sha256 = "1cwcj73p61q45wqwgqvrvz3aypjyy3fw732xyxdyj6s256hwkn0k";
+    version = "1.0.1";
+  };
 
-      "elm/browser" = {
-        sha256 = "0nagb9ajacxbbg985r4k9h0jadqpp0gp84nm94kcgbr5sf8i9x13";
-        version = "1.0.2";
-      };
+  "elm/browser" = {
+    sha256 = "0nagb9ajacxbbg985r4k9h0jadqpp0gp84nm94kcgbr5sf8i9x13";
+    version = "1.0.2";
+  };
 
-      "elm/core" = {
-        sha256 = "19w0iisdd66ywjayyga4kv2p1v9rxzqjaxhckp8ni6n8i0fb2dvf";
-        version = "1.0.5";
-      };
+  "elm/core" = {
+    sha256 = "19w0iisdd66ywjayyga4kv2p1v9rxzqjaxhckp8ni6n8i0fb2dvf";
+    version = "1.0.5";
+  };
 
-      "elm-community/list-extra" = {
-        sha256 = "1ayv3148drynqnxdfwpjxal8vwzgsjqanjg7yxp6lhdcbkxgd3vd";
-        version = "8.2.3";
-      };
+  "elm-community/list-extra" = {
+    sha256 = "1ayv3148drynqnxdfwpjxal8vwzgsjqanjg7yxp6lhdcbkxgd3vd";
+    version = "8.2.3";
+  };
 
-      "elm/random" = {
-        sha256 = "138n2455wdjwa657w6sjq18wx2r0k60ibpc4frhbqr50sncxrfdl";
-        version = "1.0.0";
-      };
+  "elm/random" = {
+    sha256 = "138n2455wdjwa657w6sjq18wx2r0k60ibpc4frhbqr50sncxrfdl";
+    version = "1.0.0";
+  };
 
-      "elm/time" = {
-        sha256 = "0vch7i86vn0x8b850w1p69vplll1bnbkp8s383z7pinyg94cm2z1";
-        version = "1.0.0";
-      };
+  "elm/time" = {
+    sha256 = "0vch7i86vn0x8b850w1p69vplll1bnbkp8s383z7pinyg94cm2z1";
+    version = "1.0.0";
+  };
 
-      "elm/json" = {
-        sha256 = "0kjwrz195z84kwywaxhhlnpl3p251qlbm5iz6byd6jky2crmyqyh";
-        version = "1.1.3";
-      };
+  "elm/json" = {
+    sha256 = "0kjwrz195z84kwywaxhhlnpl3p251qlbm5iz6byd6jky2crmyqyh";
+    version = "1.1.3";
+  };
 
-      "owanturist/elm-union-find" = {
-        sha256 = "13gm7msnp0gr1lqia5m7m4lhy3m6kvjg37d304whb3psn88wqhj5";
-        version = "1.0.0";
-      };
+  "owanturist/elm-union-find" = {
+    sha256 = "13gm7msnp0gr1lqia5m7m4lhy3m6kvjg37d304whb3psn88wqhj5";
+    version = "1.0.0";
+  };
 
-      "elm/url" = {
-        sha256 = "0av8x5syid40sgpl5vd7pry2rq0q4pga28b4yykn9gd9v12rs3l4";
-        version = "1.0.0";
-      };
+  "elm/url" = {
+    sha256 = "0av8x5syid40sgpl5vd7pry2rq0q4pga28b4yykn9gd9v12rs3l4";
+    version = "1.0.0";
+  };
 
-      "elm/virtual-dom" = {
-        sha256 = "0q1v5gi4g336bzz1lgwpn5b1639lrn63d8y6k6pimcyismp2i1yg";
-        version = "1.0.2";
-      };
+  "elm/virtual-dom" = {
+    sha256 = "0q1v5gi4g336bzz1lgwpn5b1639lrn63d8y6k6pimcyismp2i1yg";
+    version = "1.0.2";
+  };
 }
diff --git a/views/kit/default.nix b/views/kit/default.nix
index 759b36997ad3..7dd291019911 100644
--- a/views/kit/default.nix
+++ b/views/kit/default.nix
@@ -8,7 +8,7 @@
 
 { pkgs ? import ./nixpkgs { depotOverlays = false; }, ... }:
 
-pkgs.lib.fix(self: {
+pkgs.lib.fix (self: {
   buildGo = import ./buildGo { inherit pkgs; };
   buildkite = import ./buildkite { inherit pkgs; };
   readTree = import ./readTree { };
diff --git a/web/atom-feed/default.nix b/web/atom-feed/default.nix
index 1fbcde9bd4ff..fca69e20fad9 100644
--- a/web/atom-feed/default.nix
+++ b/web/atom-feed/default.nix
@@ -90,7 +90,7 @@ let
 
   # Feed generation functions:
 
-  renderEpoch = epoch: removeSuffix "\n" (readFile (runCommandNoCC "date-${toString epoch}" {} ''
+  renderEpoch = epoch: removeSuffix "\n" (readFile (runCommandNoCC "date-${toString epoch}" { } ''
     date --date='@${toString epoch}' --utc --iso-8601='seconds' > $out
   ''));
 
@@ -147,6 +147,7 @@ let
       ${concatStrings (map renderEntry (sortEntries f.entries))}
     </feed>
   '');
-in {
+in
+{
   inherit entry feed renderFeed renderEpoch;
 }
diff --git a/web/blog/default.nix b/web/blog/default.nix
index 2cabc09b5524..f55c33a63a54 100644
--- a/web/blog/default.nix
+++ b/web/blog/default.nix
@@ -40,7 +40,7 @@ let
   fragments = import ./fragments.nix args;
 
   # Functions for generating feeds for these blogs using //web/atom-feed.
-  toFeedEntry = { baseUrl, ...}: defun [ post atom-feed.entry ] (post: rec {
+  toFeedEntry = { baseUrl, ... }: defun [ post atom-feed.entry ] (post: rec {
     id = "${baseUrl}/${post.key}";
     title = post.title;
     content = readFile (renderMarkdown post.content);
@@ -52,7 +52,8 @@ let
       href = id;
     };
   });
-in {
+in
+{
   inherit post toFeedEntry;
   inherit (fragments) renderPost;
 
diff --git a/web/blog/fragments.nix b/web/blog/fragments.nix
index 63fc1ab4c5cf..19d62fa4744d 100644
--- a/web/blog/fragments.nix
+++ b/web/blog/fragments.nix
@@ -20,29 +20,29 @@ let
   escape = replaceStrings [ "<" ">" "&" "'" ] [ "&lt;" "&gt;" "&amp;" "&#39;" ];
 
   header = name: title: ''
-  <!DOCTYPE html>
-  <head>
-    <meta charset="utf-8">
-    <meta name="viewport" content="width=device-width, initial-scale=1">
-    <meta name="description" content="${escape name}">
-    <link rel="stylesheet" type="text/css" href="${staticUrl}/tvl.css" media="all">
-    <link rel="icon" type="image/webp" href="/static/favicon.webp">
-    <link rel="alternate" type="application/atom+xml" title="Atom Feed" href="https://tvl.fyi/feed.atom">
-    <title>${escape name}: ${escape title}</title>
-  </head>
-  <body class="light">
-    <header>
-      <h1><a class="blog-title" href="/">${escape name}</a> </h1>
-      <hr>
-    </header>
+    <!DOCTYPE html>
+    <head>
+      <meta charset="utf-8">
+      <meta name="viewport" content="width=device-width, initial-scale=1">
+      <meta name="description" content="${escape name}">
+      <link rel="stylesheet" type="text/css" href="${staticUrl}/tvl.css" media="all">
+      <link rel="icon" type="image/webp" href="/static/favicon.webp">
+      <link rel="alternate" type="application/atom+xml" title="Atom Feed" href="https://tvl.fyi/feed.atom">
+      <title>${escape name}: ${escape title}</title>
+    </head>
+    <body class="light">
+      <header>
+        <h1><a class="blog-title" href="/">${escape name}</a> </h1>
+        <hr>
+      </header>
   '';
 
   fullFooter = content: ''
-    <hr>
-    <footer>
-      ${content}
-    </footer>
-  </body>
+      <hr>
+      <footer>
+        ${content}
+      </footer>
+    </body>
   '';
 
   draftWarning = writeText "draft.html" ''
@@ -61,7 +61,7 @@ let
     <hr>
   '';
 
-  renderPost = { name, footer, ... }: post: runCommandNoCC "${post.key}.html" {} ''
+  renderPost = { name, footer, ... }: post: runCommandNoCC "${post.key}.html" { } ''
     cat ${writeText "header.html" (header name post.title)} > $out
 
     # Write the post title & date
@@ -90,6 +90,7 @@ let
 
     cat ${writeText "footer.html" (fullFooter footer)} >> $out
   '';
-in {
+in
+{
   inherit isDraft isUnlisted renderPost;
 }
diff --git a/web/bubblegum/default.nix b/web/bubblegum/default.nix
index 1fbe544bce11..528d73032ba6 100644
--- a/web/bubblegum/default.nix
+++ b/web/bubblegum/default.nix
@@ -134,20 +134,23 @@ let
           code = statusCodes."${statusArg}" or null;
           line = statusArg;
         } else {
-          code = null; line = null;
+          code = null;
+          line = null;
         };
       renderedHeaders = lib.concatStrings
         (lib.mapAttrsToList (n: v: "${n}: ${toString v}\r\n") headers);
-      internalError = msg: respond 500 {
-        Content-type = "text/plain";
-      } "bubblegum error: ${msg}";
+      internalError = msg: respond 500
+        {
+          Content-type = "text/plain";
+        } "bubblegum error: ${msg}";
       body = builtins.tryEval bodyArg;
     in
-      if status.code == null || status.line == null
-      then internalError "Invalid status ${lib.generators.toPretty {} statusArg}."
-      else if !body.success
-      then internalError "Unknown evaluation error in user code"
-      else lib.concatStrings [
+    if status.code == null || status.line == null
+    then internalError "Invalid status ${lib.generators.toPretty {} statusArg}."
+    else if !body.success
+    then internalError "Unknown evaluation error in user code"
+    else
+      lib.concatStrings [
         "Status: ${toString status.code} ${status.line}\r\n"
         renderedHeaders
         "\r\n"
@@ -169,9 +172,9 @@ let
     let
       p = builtins.getEnv "PATH_INFO";
     in
-      if builtins.stringLength p == 0
-      then "/"
-      else p;
+    if builtins.stringLength p == 0
+    then "/"
+    else p;
 
   /* Helper function which converts a path from the
      root of the CGI script (i. e. something which
@@ -187,12 +190,13 @@ let
     else "${scriptName}/${path}";
 
   bins = getBins pkgs.coreutils [ "env" "tee" "cat" "printf" "chmod" ]
-      // getBins nint [ "nint" ];
+    // getBins nint [ "nint" ];
 
   /* Type: args -> either path derivation string -> derivation
   */
   writeCGI =
-    { # if given sets the `PATH` to search for `nix-instantiate`
+    {
+      # if given sets the `PATH` to search for `nix-instantiate`
       # Useful when using for example thttpd which unsets `PATH`
       # in the CGI environment.
       binPath ? ""
@@ -202,7 +206,8 @@ let
     , name ? null
     , ...
     }@args:
-    input: let
+    input:
+    let
       drvName =
         if builtins.isString input || args ? name
         then args.name
@@ -227,20 +232,31 @@ let
         # always pass depot so scripts can use this library
         "--arg depot '(import ${minimalDepot} {})'"
       ]);
-    in runExecline.local drvName {} [
-      "importas" "out" "out"
-      "pipeline" [
-        "foreground" [
-          "if" [ bins.printf "%s\n" shebang ]
+    in
+    runExecline.local drvName { } [
+      "importas"
+      "out"
+      "out"
+      "pipeline"
+      [
+        "foreground"
+        [
+          "if"
+          [ bins.printf "%s\n" shebang ]
         ]
-        "if" [ bins.cat script ]
+        "if"
+        [ bins.cat script ]
       ]
-      "if" [ bins.tee "$out" ]
-      "if" [ bins.chmod "+x" "$out" ]
-      "exit" "0"
+      "if"
+      [ bins.tee "$out" ]
+      "if"
+      [ bins.chmod "+x" "$out" ]
+      "exit"
+      "0"
     ];
 
-in {
+in
+{
   inherit
     respond
     pathInfo
diff --git a/web/bubblegum/examples/blog.nix b/web/bubblegum/examples/blog.nix
index 9359d38fa28e..76b91168b894 100644
--- a/web/bubblegum/examples/blog.nix
+++ b/web/bubblegum/examples/blog.nix
@@ -26,17 +26,17 @@ let
     let
       matched = builtins.match "/?([0-9]+)-([0-9]+)-([0-9]+)-.+" post;
     in
-      if matched == null
-      then [ 0 0 0 ]
-      else builtins.map builtins.fromJSON matched;
+    if matched == null
+    then [ 0 0 0 ]
+    else builtins.map builtins.fromJSON matched;
 
   parseTitle = post:
     let
       matched = builtins.match "/?[0-9]+-[0-9]+-[0-9]+-(.+).html" post;
     in
-      if matched == null
-      then "no title"
-      else builtins.head matched;
+    if matched == null
+    then "no title"
+    else builtins.head matched;
 
   dateAtLeast = a: b:
     builtins.all fun.id
@@ -68,11 +68,13 @@ let
     <main>
       <h1>blog posts</h1>
       <ul>
-  '' + lib.concatMapStrings (post: ''
+  '' + lib.concatMapStrings
+    (post: ''
       <li>
         <a href="${absolutePath (url.encode {} post)}">${parseTitle post}</a>
       </li>
-  '') posts + ''
+    '')
+    posts + ''
       </ul>
     </main>
   '';
@@ -80,10 +82,14 @@ let
   formatDate =
     let
       # Assume we never deal with years < 1000
-      formatDigit = d: string.fit {
-        char = "0"; width = 2;
-      } (toString d);
-    in lib.concatMapStringsSep "-" formatDigit;
+      formatDigit = d: string.fit
+        {
+          char = "0";
+          width = 2;
+        }
+        (toString d);
+    in
+    lib.concatMapStringsSep "-" formatDigit;
 
   post = title: post: ''
     <main>
@@ -101,8 +107,9 @@ let
   validatePathInfo = pathInfo:
     let
       chars = string.toChars pathInfo;
-    in builtins.length chars > 1
-      && !(builtins.elem "/" (builtins.tail chars));
+    in
+    builtins.length chars > 1
+    && !(builtins.elem "/" (builtins.tail chars));
 
   response =
     if pathInfo == "/"
@@ -129,6 +136,8 @@ let
       inner = "<h1>404 — not found</h1>";
     };
 in
-  respond response.status {
-    "Content-type" = "text/html";
-  } (generic response)
+respond response.status
+{
+  "Content-type" = "text/html";
+}
+  (generic response)
diff --git a/web/bubblegum/examples/default.nix b/web/bubblegum/examples/default.nix
index 3f0f51db6369..89482f93eacc 100644
--- a/web/bubblegum/examples/default.nix
+++ b/web/bubblegum/examples/default.nix
@@ -29,33 +29,54 @@ let
     ;
 
   bins = (getBins pkgs.thttpd [ "thttpd" ])
-      // (getBins pkgs.coreutils [ "printf" "cp" "mkdir" ]);
+    // (getBins pkgs.coreutils [ "printf" "cp" "mkdir" ]);
 
   webRoot =
     let
       copyScripts = lib.concatMap
-        (path: let
-          cgi = writeCGI {
-            # assume we are on NixOS since thttpd doesn't set PATH.
-            # using third_party.nix is tricky because not everyone
-            # has a tvix daemon running.
-            binPath = "/run/current-system/sw/bin";
-          } path;
-        in [
-          "if" [ bins.cp cgi "\${out}/${cgi.name}" ]
-        ]) scripts;
-    in runExecline.local "webroot" {} ([
-      "importas" "out" "out"
-      "if" [ bins.mkdir "-p" "$out" ]
+        (path:
+          let
+            cgi = writeCGI
+              {
+                # assume we are on NixOS since thttpd doesn't set PATH.
+                # using third_party.nix is tricky because not everyone
+                # has a tvix daemon running.
+                binPath = "/run/current-system/sw/bin";
+              }
+              path;
+          in
+          [
+            "if"
+            [ bins.cp cgi "\${out}/${cgi.name}" ]
+          ])
+        scripts;
+    in
+    runExecline.local "webroot" { } ([
+      "importas"
+      "out"
+      "out"
+      "if"
+      [ bins.mkdir "-p" "$out" ]
     ] ++ copyScripts);
 
   port = 9000;
 
 in
-  writeExecline "serve-examples" {} [
-    "foreground" [
-      bins.printf "%s\n" "Running on http://localhost:${toString port}"
-    ]
-    "${bins.thttpd}" "-D" "-p" (toString port) "-l" "/dev/stderr"
-                     "-c" "*.nix" "-d" webRoot
+writeExecline "serve-examples" { } [
+  "foreground"
+  [
+    bins.printf
+    "%s\n"
+    "Running on http://localhost:${toString port}"
   ]
+  "${bins.thttpd}"
+  "-D"
+  "-p"
+  (toString port)
+  "-l"
+  "/dev/stderr"
+  "-c"
+  "*.nix"
+  "-d"
+  webRoot
+]
diff --git a/web/bubblegum/examples/derivation-svg.nix b/web/bubblegum/examples/derivation-svg.nix
index a5f30a2bd155..9a625afb55d7 100644
--- a/web/bubblegum/examples/derivation-svg.nix
+++ b/web/bubblegum/examples/derivation-svg.nix
@@ -6,6 +6,8 @@ let
     respond
     ;
 in
-  respond "OK" {
-    Content-type = "image/svg+xml";
-  } (builtins.readFile "${depot.tvix.docs.svg}/component-flow.svg")
+respond "OK"
+{
+  Content-type = "image/svg+xml";
+}
+  (builtins.readFile "${depot.tvix.docs.svg}/component-flow.svg")
diff --git a/web/bubblegum/examples/hello.nix b/web/bubblegum/examples/hello.nix
index db04d4082371..bd4891f7d66d 100644
--- a/web/bubblegum/examples/hello.nix
+++ b/web/bubblegum/examples/hello.nix
@@ -87,6 +87,8 @@ let
   response = routes."${pathInfo}" or notFound;
 
 in
-  respond response.status {
-    "Content-type" = "text/html";
-  } (template response)
+respond response.status
+{
+  "Content-type" = "text/html";
+}
+  (template response)
diff --git a/web/cgit-taz/default.nix b/web/cgit-taz/default.nix
index a89b96fd92ba..83ec822e5d6b 100644
--- a/web/cgit-taz/default.nix
+++ b/web/cgit-taz/default.nix
@@ -63,12 +63,13 @@ let
          envp[envn++] = build_env( "PATH=%s", CGI_PATH );
      #ifdef CGI_LD_LIBRARY_PATH
   '';
-  thttpdCgit = thttpd.overrideAttrs(old: {
+  thttpdCgit = thttpd.overrideAttrs (old: {
     patches = [
       ./thttpd_cgi_idx.patch
       thttpdConfigPatch
     ];
   });
-in writeShellScriptBin "cgit-launch" ''
+in
+writeShellScriptBin "cgit-launch" ''
   exec ${thttpdCgit}/bin/thttpd -D -C ${thttpdConfig}
 ''
diff --git a/web/panettone/shell.nix b/web/panettone/shell.nix
index aeafc7afcd99..54bc49013b79 100644
--- a/web/panettone/shell.nix
+++ b/web/panettone/shell.nix
@@ -1,4 +1,4 @@
-{ depot ? import ../.. {} }:
+{ depot ? import ../.. { } }:
 
 with depot.third_party;
 
diff --git a/web/static/default.nix b/web/static/default.nix
index 2120e649f03c..9eaeb0ec1495 100644
--- a/web/static/default.nix
+++ b/web/static/default.nix
@@ -5,7 +5,9 @@
 let
   storeDirLength = with builtins; (stringLength storeDir) + 1;
   logo = depot.web.tvl.logo;
-in lib.fix(self: pkgs.runCommand "tvl-static" {
+in
+lib.fix (self: pkgs.runCommand "tvl-static"
+{
   passthru = {
     # Preserving the string context here makes little sense: While we are
     # referencing this derivation, we are not doing so via the nix store,
diff --git a/web/todolist/default.nix b/web/todolist/default.nix
index a2fcb501bde6..c37a65555956 100644
--- a/web/todolist/default.nix
+++ b/web/todolist/default.nix
@@ -39,7 +39,7 @@ let
     user = string;
   };
 
-  allTodos = fromJSON (readFile (runCommandNoCC "depot-todos.json" {} ''
+  allTodos = fromJSON (readFile (runCommandNoCC "depot-todos.json" { } ''
     ${ripgrep}/bin/rg --json 'TODO\(\w+\):.*$' ${depot.path} | \
       ${jq}/bin/jq -s -f ${./extract-todos.jq} > $out
   ''));
@@ -58,22 +58,23 @@ let
   '');
 
   userParagraph = todos:
-  let user = (head todos).user;
-  in ''
-    <p>
-      <h3>
-        <a style="color:inherit; text-decoration: none;"
-           name="${user}"
-           href="#${user}">${user}</a>
-      </h3>
-      ${concatStringsSep "\n" (map todoElement todos)}
-    </p>
-    <hr>
-  '';
+    let user = (head todos).user;
+    in ''
+      <p>
+        <h3>
+          <a style="color:inherit; text-decoration: none;"
+             name="${user}"
+             href="#${user}">${user}</a>
+        </h3>
+        ${concatStringsSep "\n" (map todoElement todos)}
+      </p>
+      <hr>
+    '';
 
   staticUrl = "https://static.tvl.fyi/${depot.web.static.drvHash}";
 
-in writeTextFile {
+in
+writeTextFile {
   name = "tvl-todos";
   destination = "/index.html";
   text = ''
diff --git a/web/tvl/blog/default.nix b/web/tvl/blog/default.nix
index fe8d1c42d6dd..963bb635e3b9 100644
--- a/web/tvl/blog/default.nix
+++ b/web/tvl/blog/default.nix
@@ -3,7 +3,7 @@
 {
   config = {
     name = "TVL's blog";
-    footer = depot.web.tvl.footer {};
+    footer = depot.web.tvl.footer { };
     baseUrl = "https://tvl.fyi/blog";
   };
 
diff --git a/web/tvl/default.nix b/web/tvl/default.nix
index 1025b1a7b2b4..262be54c0ef1 100644
--- a/web/tvl/default.nix
+++ b/web/tvl/default.nix
@@ -16,9 +16,10 @@ let
       (map (p: "cp ${blog.renderPost tvl.blog.config p} $out/blog/${p.key}.html") posts)
   );
 
-  tvlGraph = runCommandNoCC "tvl.svg" {
-    nativeBuildInputs = with pkgs; [ fontconfig freetype cairo jetbrains-mono ];
-  } ''
+  tvlGraph = runCommandNoCC "tvl.svg"
+    {
+      nativeBuildInputs = with pkgs; [ fontconfig freetype cairo jetbrains-mono ];
+    } ''
     ${graphviz}/bin/neato -Tsvg ${./tvl.dot} > $out
   '';
 
@@ -121,7 +122,8 @@ let
       </style>
     '';
   };
-in runCommandNoCC "website" {} ''
+in
+runCommandNoCC "website" { } ''
   mkdir -p $out/blog
   cp ${homepage} $out/index.html
   ${postRenderingCommands tvl.blog.posts}
diff --git a/web/tvl/footer/default.nix b/web/tvl/footer/default.nix
index 7412d019ee56..dc2c963f90f4 100644
--- a/web/tvl/footer/default.nix
+++ b/web/tvl/footer/default.nix
@@ -15,7 +15,7 @@ args: ''
     <a class="uncoloured-link" href="https://todo.tvl.fyi/">todos</a>
     |
     <a class="uncoloured-link" href="https://atward.tvl.fyi/">search</a>
-    '' + lib.optionalString (args ? extraFooter) args.extraFooter + ''
+'' + lib.optionalString (args ? extraFooter) args.extraFooter + ''
   </p>
   <p class="lod">ಠ_ಠ</p>
 ''
diff --git a/web/tvl/logo/default.nix b/web/tvl/logo/default.nix
index 940f67199bf6..d9e023946a42 100644
--- a/web/tvl/logo/default.nix
+++ b/web/tvl/logo/default.nix
@@ -21,20 +21,22 @@ let
 
   # Create an animated CSS that equally spreads out the colours over
   # the animation duration (1min).
-  animatedCss = colours: let
-    # Calculate at which percentage offset each colour should appear.
-    stepSize = 100 / ((builtins.length colours) - 1);
-    frames = lib.imap0 (idx: colour: { inherit colour; at = idx * stepSize; }) colours;
-    frameCss = frame: "${toString frame.at}% { fill: ${frame.colour}; }";
-  in ''
-    #armchair-background {
-      animation: 30s infinite alternate armchairPalette;
-    }
+  animatedCss = colours:
+    let
+      # Calculate at which percentage offset each colour should appear.
+      stepSize = 100 / ((builtins.length colours) - 1);
+      frames = lib.imap0 (idx: colour: { inherit colour; at = idx * stepSize; }) colours;
+      frameCss = frame: "${toString frame.at}% { fill: ${frame.colour}; }";
+    in
+    ''
+      #armchair-background {
+        animation: 30s infinite alternate armchairPalette;
+      }
 
-    @keyframes armchairPalette {
-    ${lib.concatStringsSep "\n" (map frameCss frames)}
-    }
-  '';
+      @keyframes armchairPalette {
+      ${lib.concatStringsSep "\n" (map frameCss frames)}
+      }
+    '';
 
   # Dark version of the logo, suitable for light backgrounds.
   darkCss = armchairCss: ''
@@ -67,7 +69,8 @@ let
     </svg>
   '';
 
-in depot.nix.readTree.drvTargets(lib.fix (self: {
+in
+depot.nix.readTree.drvTargets (lib.fix (self: {
   # Expose the logo construction functions.
   inherit palette darkCss lightCss animatedCss staticCss;
 
@@ -75,7 +78,7 @@ in depot.nix.readTree.drvTargets(lib.fix (self: {
   logoSvg = style: pkgs.writeText "logo.svg" (logoSvg style);
 
   # Create a PNG of the TVL logo with the specified style and DPI.
-  logoPng = style: dpi: pkgs.runCommandNoCC "logo.png" {} ''
+  logoPng = style: dpi: pkgs.runCommandNoCC "logo.png" { } ''
     ${pkgs.inkscape}/bin/inkscape \
       --export-area-drawing \
       --export-background-opacity 0 \
@@ -87,7 +90,8 @@ in depot.nix.readTree.drvTargets(lib.fix (self: {
   pastelRainbow = self.logoSvg (darkCss (animatedCss (lib.attrValues palette)));
 }
 
-# Add individual outputs for static dark logos of each colour.
-// (lib.mapAttrs'
-    (k: v: lib.nameValuePair "${k}Png"
-     (self.logoPng (darkCss (staticCss v)) 96)) palette)))
+  # Add individual outputs for static dark logos of each colour.
+  // (lib.mapAttrs'
+  (k: v: lib.nameValuePair "${k}Png"
+    (self.logoPng (darkCss (staticCss v)) 96))
+  palette)))
diff --git a/web/tvl/template/default.nix b/web/tvl/template/default.nix
index 6ccc10de6235..6b6a5b03038e 100644
--- a/web/tvl/template/default.nix
+++ b/web/tvl/template/default.nix
@@ -1,6 +1,7 @@
 { depot, pkgs, lib, ... }:
 
-{ # content of the <title> tag
+{
+  # content of the <title> tag
   title
   # main part of the page, usually wrapped with <main>
 , content
@@ -17,7 +18,8 @@ let
   inherit (depot.tools) cheddar;
 in
 
-runCommandNoCC "${lib.strings.sanitizeDerivationName title}-index.html" {
+runCommandNoCC "${lib.strings.sanitizeDerivationName title}-index.html"
+{
   headerPart = ''
     <!DOCTYPE html>
     <head>
@@ -36,11 +38,11 @@ runCommandNoCC "${lib.strings.sanitizeDerivationName title}-index.html" {
   inherit content;
 
   footerPart = ''
-    <hr>
-    <footer>
-      ${depot.web.tvl.footer args}
-    </footer>
-  </body>
+      <hr>
+      <footer>
+        ${depot.web.tvl.footer args}
+      </footer>
+    </body>
   '';
 
   passAsFile = [ "headerPart" "content" "footerPart" ];