about summary refs log tree commit diff
path: root/users/Profpatsch
diff options
context:
space:
mode:
Diffstat (limited to 'users/Profpatsch')
-rw-r--r--users/Profpatsch/.envrc5
-rw-r--r--users/Profpatsch/.gitignore1
-rw-r--r--users/Profpatsch/.hlint.yaml357
-rw-r--r--users/Profpatsch/.vscode/launch.json18
-rw-r--r--users/Profpatsch/.vscode/settings.json25
-rw-r--r--users/Profpatsch/OWNERS7
-rw-r--r--users/Profpatsch/README.md10
-rw-r--r--users/Profpatsch/advent-of-code/2020/01/main.py22
-rw-r--r--users/Profpatsch/advent-of-code/2020/02/main.py77
-rw-r--r--users/Profpatsch/advent-of-code/2020/03/main.py66
-rw-r--r--users/Profpatsch/advent-of-code/2020/04/main.py104
-rw-r--r--users/Profpatsch/alacritty.nix27
-rw-r--r--users/Profpatsch/aliases.nix88
-rw-r--r--users/Profpatsch/arglib/ArglibNetencode.hs22
-rw-r--r--users/Profpatsch/arglib/arglib-netencode.cabal65
-rw-r--r--users/Profpatsch/arglib/netencode.nix81
-rw-r--r--users/Profpatsch/atomically-write.nix29
-rw-r--r--users/Profpatsch/blog/README.md7
-rw-r--r--users/Profpatsch/blog/default.nix481
-rw-r--r--users/Profpatsch/blog/notes/an-idealized-conflang.md298
-rw-r--r--users/Profpatsch/blog/notes/preventing-oom.md33
-rw-r--r--users/Profpatsch/blog/notes/private-trackers-are-markets.md46
-rw-r--r--users/Profpatsch/blog/notes/rust-string-conversions.md53
-rw-r--r--users/Profpatsch/blog/posts/2017-05-04-ligature-emulation-in-emacs.md123
-rw-r--r--users/Profpatsch/cabal.project14
-rw-r--r--users/Profpatsch/cas-serve/CasServe.hs247
-rw-r--r--users/Profpatsch/cas-serve/cas-serve.cabal73
-rw-r--r--users/Profpatsch/cas-serve/default.nix38
-rw-r--r--users/Profpatsch/cas-serve/schema.sql38
-rw-r--r--users/Profpatsch/cas-serve/wordlist.json1
-rw-r--r--users/Profpatsch/cas-serve/wordlist.sqlitebin0 -> 36864 bytes
-rw-r--r--users/Profpatsch/cdb.nix93
-rw-r--r--users/Profpatsch/declib/.eslintrc.json14
-rw-r--r--users/Profpatsch/declib/.gitignore6
-rw-r--r--users/Profpatsch/declib/.prettierrc8
-rw-r--r--users/Profpatsch/declib/README.md4
-rw-r--r--users/Profpatsch/declib/build.ninja16
-rw-r--r--users/Profpatsch/declib/index.ts245
-rw-r--r--users/Profpatsch/declib/package.json25
-rw-r--r--users/Profpatsch/declib/tsconfig.json25
-rw-r--r--users/Profpatsch/dhall/lib.dhall84
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/README.md5
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/default.nix3
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/shell.nix17
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/test.json14
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/test.py13
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/test.sh14
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/tmp.el28
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el139
-rw-r--r--users/Profpatsch/exactSource.nix90
-rw-r--r--users/Profpatsch/execline/ExecHelpers.hs48
-rw-r--r--users/Profpatsch/execline/default.nix70
-rw-r--r--users/Profpatsch/execline/exec-helpers.cabal14
-rw-r--r--users/Profpatsch/execline/exec-helpers/Cargo.lock7
-rw-r--r--users/Profpatsch/execline/exec-helpers/Cargo.toml8
-rw-r--r--users/Profpatsch/execline/exec-helpers/default.nix6
-rw-r--r--users/Profpatsch/execline/exec-helpers/exec_helpers.rs149
-rw-r--r--users/Profpatsch/fafo.jpgbin0 -> 26139 bytes
-rw-r--r--users/Profpatsch/git-db/default.nix10
-rw-r--r--users/Profpatsch/git-db/git-db.rs90
-rw-r--r--users/Profpatsch/haskell-module-deps/README.md5
-rw-r--r--users/Profpatsch/haskell-module-deps/default.nix55
-rw-r--r--users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.pngbin0 -> 415873 bytes
-rw-r--r--users/Profpatsch/hie.yaml36
-rw-r--r--users/Profpatsch/htmx-experiment/Main.hs4
-rw-r--r--users/Profpatsch/htmx-experiment/default.nix46
-rw-r--r--users/Profpatsch/htmx-experiment/htmx-experiment.cabal89
-rw-r--r--users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs377
-rw-r--r--users/Profpatsch/htmx-experiment/src/ServerErrors.hs244
-rw-r--r--users/Profpatsch/htmx-experiment/src/ValidationParseT.hs40
-rw-r--r--users/Profpatsch/httzip/Httzip.hs66
-rw-r--r--users/Profpatsch/httzip/default.nix40
-rw-r--r--users/Profpatsch/httzip/httzip.cabal73
-rw-r--r--users/Profpatsch/ical-smolify/IcalSmolify.hs124
-rw-r--r--users/Profpatsch/ical-smolify/README.md5
-rw-r--r--users/Profpatsch/ical-smolify/default.nix23
-rw-r--r--users/Profpatsch/ical-smolify/ical-smolify.cabal18
-rw-r--r--users/Profpatsch/imap-idle.nix17
-rw-r--r--users/Profpatsch/imap-idle.rs140
-rw-r--r--users/Profpatsch/importDhall.nix93
-rw-r--r--users/Profpatsch/ini/default.nix6
-rw-r--r--users/Profpatsch/ini/ini.dhall36
-rw-r--r--users/Profpatsch/jaeger.nix46
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs389
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/default.nix33
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal76
-rw-r--r--users/Profpatsch/lens.nix137
-rw-r--r--users/Profpatsch/lib.nix108
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs173
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/README.md7
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/default.nix20
-rw-r--r--users/Profpatsch/mailbox-org/MailboxOrg.hs523
-rw-r--r--users/Profpatsch/mailbox-org/README.md7
-rw-r--r--users/Profpatsch/mailbox-org/default.nix38
-rw-r--r--users/Profpatsch/mailbox-org/mailbox-org.cabal95
-rw-r--r--users/Profpatsch/mailbox-org/src/AesonQQ.hs24
-rw-r--r--users/Profpatsch/my-prelude/README.md42
-rw-r--r--users/Profpatsch/my-prelude/default.nix52
-rw-r--r--users/Profpatsch/my-prelude/my-prelude.cabal121
-rw-r--r--users/Profpatsch/my-prelude/src/Aeson.hs176
-rw-r--r--users/Profpatsch/my-prelude/src/Arg.hs34
-rw-r--r--users/Profpatsch/my-prelude/src/AtLeast.hs51
-rw-r--r--users/Profpatsch/my-prelude/src/MyPrelude.hs776
-rw-r--r--users/Profpatsch/my-prelude/src/Parse.hs174
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/Decoder.hs94
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs931
-rw-r--r--users/Profpatsch/my-prelude/src/Pretty.hs108
-rw-r--r--users/Profpatsch/my-prelude/src/Seconds.hs55
-rw-r--r--users/Profpatsch/my-prelude/src/Test.hs115
-rw-r--r--users/Profpatsch/my-prelude/src/Tool.hs75
-rw-r--r--users/Profpatsch/my-prelude/src/ValidationParseT.hs16
-rw-r--r--users/Profpatsch/my-webstuff/default.nix27
-rw-r--r--users/Profpatsch/my-webstuff/my-webstuff.cabal72
-rw-r--r--users/Profpatsch/my-webstuff/src/Multipart2.hs220
-rw-r--r--users/Profpatsch/my-xmonad/Xmonad.hs127
-rw-r--r--users/Profpatsch/my-xmonad/default.nix25
-rw-r--r--users/Profpatsch/my-xmonad/my-xmonad.cabal62
-rw-r--r--users/Profpatsch/netencode/Netencode.hs433
-rw-r--r--users/Profpatsch/netencode/Netencode/Parse.hs102
-rw-r--r--users/Profpatsch/netencode/README.md133
-rw-r--r--users/Profpatsch/netencode/default.nix184
-rw-r--r--users/Profpatsch/netencode/gen.nix73
-rw-r--r--users/Profpatsch/netencode/netencode-mustache.rs52
-rw-r--r--users/Profpatsch/netencode/netencode.cabal74
-rw-r--r--users/Profpatsch/netencode/netencode.rs969
-rw-r--r--users/Profpatsch/netencode/pretty.rs163
-rw-r--r--users/Profpatsch/netstring/README.md18
-rw-r--r--users/Profpatsch/netstring/default.nix64
-rw-r--r--users/Profpatsch/netstring/tests/default.nix64
-rw-r--r--users/Profpatsch/nix-home/README.md7
-rw-r--r--users/Profpatsch/nix-home/default.nix212
-rw-r--r--users/Profpatsch/nix-tools.nix159
-rw-r--r--users/Profpatsch/openlab-tools/Main.hs6
-rw-r--r--users/Profpatsch/openlab-tools/default.nix69
-rw-r--r--users/Profpatsch/openlab-tools/openlab-tools.cabal111
-rw-r--r--users/Profpatsch/openlab-tools/src/OpenlabTools.hs551
-rw-r--r--users/Profpatsch/read-http.nix19
-rw-r--r--users/Profpatsch/read-http.rs249
-rw-r--r--users/Profpatsch/reverse-haskell-deps/README.md3
-rw-r--r--users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs76
-rw-r--r--users/Profpatsch/reverse-haskell-deps/default.nix32
-rw-r--r--users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal16
-rw-r--r--users/Profpatsch/shell.nix110
-rw-r--r--users/Profpatsch/shortcuttable/default.nix172
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md3
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix31
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall139
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py133
-rw-r--r--users/Profpatsch/tagtime/README.md18
-rw-r--r--users/Profpatsch/toINI.nix79
-rw-r--r--users/Profpatsch/tree-sitter.nix209
-rw-r--r--users/Profpatsch/whatcd-resolver/Main.hs6
-rw-r--r--users/Profpatsch/whatcd-resolver/README.md21
-rw-r--r--users/Profpatsch/whatcd-resolver/build.ninja20
-rw-r--r--users/Profpatsch/whatcd-resolver/default.nix76
-rw-r--r--users/Profpatsch/whatcd-resolver/notes.org48
-rw-r--r--users/Profpatsch/whatcd-resolver/server-notes.org2
-rw-r--r--users/Profpatsch/whatcd-resolver/services/.gitignore3
-rwxr-xr-xusers/Profpatsch/whatcd-resolver/services/jaeger/run3
-rwxr-xr-xusers/Profpatsch/whatcd-resolver/services/reverse-proxy/run2
-rw-r--r--users/Profpatsch/whatcd-resolver/src/AppT.hs150
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Html.hs69
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Http.hs129
-rw-r--r--users/Profpatsch/whatcd-resolver/src/JsonLd.hs138
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Optional.hs18
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Redacted.hs540
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Transmission.hs306
-rw-r--r--users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs714
-rw-r--r--users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal125
-rw-r--r--users/Profpatsch/writers/default.nix120
-rw-r--r--users/Profpatsch/writers/tests/default.nix56
-rw-r--r--users/Profpatsch/ytextr/README.md5
-rw-r--r--users/Profpatsch/ytextr/create-symlink-farm.nix19
-rw-r--r--users/Profpatsch/ytextr/default.nix82
174 files changed, 17649 insertions, 3 deletions
diff --git a/users/Profpatsch/.envrc b/users/Profpatsch/.envrc
new file mode 100644
index 0000000000..c91f923756
--- /dev/null
+++ b/users/Profpatsch/.envrc
@@ -0,0 +1,5 @@
+if pass apps/declib/mastodon_access_token >/dev/null; then
+    export DECLIB_MASTODON_ACCESS_TOKEN=$(pass apps/declib/mastodon_access_token)
+fi
+
+eval "$(lorri direnv)"
diff --git a/users/Profpatsch/.gitignore b/users/Profpatsch/.gitignore
new file mode 100644
index 0000000000..c33954f53a
--- /dev/null
+++ b/users/Profpatsch/.gitignore
@@ -0,0 +1 @@
+dist-newstyle/
diff --git a/users/Profpatsch/.hlint.yaml b/users/Profpatsch/.hlint.yaml
new file mode 100644
index 0000000000..f00f78c525
--- /dev/null
+++ b/users/Profpatsch/.hlint.yaml
@@ -0,0 +1,357 @@
+# HLint configuration file
+# https://github.com/ndmitchell/hlint
+# Run `hlint --default` to see the example configuration file.
+##########################
+
+# WARNING: These need to be synced with the default-extensions field
+# in the cabal file.
+- arguments: [-XGHC2021, -XOverloadedRecordDot]
+
+# Ignore some builtin hints
+
+# often functions are more readable with explicit arguments
+- ignore: { name: Eta reduce }
+
+# these redundancy warnings are just completely irrelevant
+- ignore: { name: Redundant bracket }
+- ignore: { name: Move brackets to avoid $ }
+- ignore: { name: Redundant $ }
+- ignore: { name: Redundant do }
+- ignore: { name: Redundant multi-way if }
+
+# allow case-matching on bool, because why not
+- ignore: { name: Use if }
+
+# hlint cannot distinguish actual newtypes from data types
+# that accidentally have only one field
+# (but might have more in the future).
+# Since it’s a mostly irrelevant runtime optimization, we don’t care.
+- ignore: { name: Use newtype instead of data }
+
+# these lead to harder-to-read/more implicit code
+- ignore: { name: Use fmap }
+- ignore: { name: Use <$> }
+- ignore: { name: Use tuple-section }
+- ignore: { name: Use forM_ }
+- ignore: { name: Functor law }
+# fst and snd are usually a code smell and should be explicit matches, _naming the ignored side.
+- ignore: { name: Use fst }
+- ignore: { name: Use snd }
+- ignore: { name: Use fromMaybe }
+- ignore: { name: Use const }
+- ignore: { name: Replace case with maybe }
+- ignore: { name: Replace case with fromMaybe }
+- ignore: { name: Avoid lambda }
+- ignore: { name: Avoid lambda using `infix` }
+- ignore: { name: Use curry }
+- ignore: { name: Use uncurry }
+- ignore: { name: Use first }
+- ignore: { name: Redundant first }
+- ignore: { name: Use second }
+- ignore: { name: Use bimap }
+# just use `not x`
+- ignore: { name: Use unless }
+- ignore: { name: Redundant <&> }
+
+# list comprehensions are a seldomly used part of the Haskell language
+# and they introduce syntactic overhead that is usually not worth the conciseness
+- ignore: { name: Use list comprehension }
+
+# Seems to be buggy in cases
+- ignore: { name: Use section }
+
+# multiple maps in a row are usually used for clarity,
+# and the compiler will optimize them away, thank you very much.
+- ignore: { name: Use map once }
+- ignore: { name: Fuse foldr/map }
+- ignore: { name: Fuse traverse/map }
+- ignore: { name: Fuse traverse_/map }
+- ignore: { name: Fuse traverse/<$> }
+
+# this is silly, why would I use a special function if I can just (heh) `== Nothing`
+- ignore: { name: Use isNothing }
+
+# The duplication heuristic is not very smart
+# and more annoying than helpful.
+# see https://github.com/ndmitchell/hlint/issues/1009
+- ignore: { name: Reduce duplication }
+
+# Stops the pattern match trick
+- ignore: { name: Use record patterns }
+- ignore: { name: Use null }
+- ignore: { name: Use uncurry }
+
+# we don’t want void, see below
+- ignore: { name: Use void }
+
+- functions:
+    # disallow Enum instance functions, they are partial
+    - name: Prelude.succ
+      within: [Relude.Extra.Enum]
+      message: "Dangerous, will fail for highest element"
+    - name: Prelude.pred
+      within: [Relude.Extra.Enum]
+      message: "Dangerous, will fail for lowest element"
+    - name: Prelude.toEnum
+      within: []
+      message: "Extremely partial"
+    - name: Prelude.fromEnum
+      within: []
+      message: "Dangerous for most uses"
+    - name: Prelude.enumFrom
+      within: []
+    - name: Prelude.enumFromThen
+      within: []
+    - name: Prelude.enumFromThenTo
+      within: []
+    - name: Prelude.oundedEnumFrom
+      within: []
+    - name: Prelude.boundedEnumFromThen
+      within: []
+
+    - name: Text.Read.readMaybe
+      within:
+        # The BSON ObjectId depends on Read for parsing
+        - Milkmap.Milkmap
+        - Milkmap.FieldData.Value
+      message: "`readMaybe` is probably not what you want for parsing values, please use the `FieldParser` module."
+
+    # `void` discards its argument and is polymorphic,
+    # thus making it brittle in the face of code changes.
+    # (see https://tech.freckle.com/2020/09/23/void-is-a-smell/)
+    # Use an explicit `_ <- …` instead.
+    - name: Data.Functor.void
+      within: []
+      message: "`void` leads to bugs. Use an explicit `_ <- …` instead"
+
+    - name: Data.Foldable.length
+      within: ["MyPrelude"]
+      message: "`Data.Foldable.length` is dangerous to use, because it also works on types you wouldn’t expect, like `length (3,4) == 1` and `length (Just 2) == 1`. Use the `length` function for your specific type instead, for example `List.length` or `Map.length`."
+
+    - name: Prelude.length
+      within: ["MyPrelude"]
+      message: "`Prelude.length` is dangerous to use, because it also works on types you wouldn’t expect, like `length (3,4) == 1` and `length (Just 2) == 1`. Use the `length` function for your specific type instead, for example `List.length` or `Map.length`."
+
+    # Using an explicit lambda with its argument “underscored”
+    # is more clear in every case.
+    # e.g. `const True` => `\_request -> True`
+    # shows the reader that the ignored argument was a request.
+    - name: Prelude.const
+      within: []
+      message: "Replace `const` with an explicit lambda with type annotation for code clarity and type safety, e.g.: `const True` => `\\(_ :: Request) -> True`. If you really don’t want to spell out the type (which might lead to bugs!), you can also use something like `\_request -> True`."
+
+    - name: Data.List.nub
+      within: []
+      message: "O(n²), use `Data.Containers.ListUtils.nubOrd"
+
+    - name: Prelude.maximum
+      within: []
+      message: "`maximum` crashes on empty list; use non-empty lists and `maximum1`"
+
+    - name: Data.List.maximum
+      within: []
+      message: "`maximum` crashes on empty list; use non-empty lists and `maximum1`"
+
+    - name: Prelude.minimum
+      within: []
+      message: "`minimum` crashes on empty list; use non-empty lists and `minimum1`"
+
+    - name: Data.List.minimum
+      within: []
+      message: "`minimum` crashes on empty list; use non-empty lists and `minimum1`"
+
+    - name: Data.Foldable.maximum
+      within: []
+      message: "`maximum` crashes on empty foldable stucture; use Foldable1 and `maximum1`."
+
+    - name: Data.Foldable.minimum
+      within: []
+      message: "`minimum` crashes on empty foldable stucture; use Foldable1 and `minimum1`."
+
+    # Using prelude functions instead of stdlib functions
+
+    - name: "Data.Text.Encoding.encodeUtf8"
+      within: ["MyPrelude"]
+      message: "Use `textToBytesUtf8`"
+
+    - name: "Data.Text.Lazy.Encoding.encodeUtf8"
+      within: ["MyPrelude"]
+      message: "Use `textToBytesUtf8Lazy`"
+
+    - name: "Data.Text.Encoding.decodeUtf8'"
+      within: ["MyPrelude"]
+      message: "Use `bytesToTextUtf8`"
+
+    - name: "Data.Text.Encoding.Lazy.decodeUtf8'"
+      within: ["MyPrelude"]
+      message: "Use `bytesToTextUtf8Lazy`"
+
+    - name: "Data.Text.Encoding.decodeUtf8"
+      within: ["MyPrelude"]
+      message: "Either check for errors with `bytesToTextUtf8`, decode leniently with unicode replacement characters with `bytesToTextUtf8Lenient` or use the crashing version `bytesToTextUtf8Unsafe` (discouraged)."
+
+    - name: "Data.Text.Encoding.Lazy.decodeUtf8"
+      within: ["MyPrelude"]
+      message: "Either check for errors with `bytesToTextUtf8Lazy`, decode leniently with unicode replacement characters with `bytesToTextUtf8LenientLazy` or use the crashing version `bytesToTextUtf8UnsafeLazy` (discouraged)."
+
+    - name: "Data.Text.Lazy.toStrict"
+      within: ["MyPrelude"]
+      message: "Use `toStrict`"
+
+    - name: "Data.Text.Lazy.fromStrict"
+      within: ["MyPrelude"]
+      message: "Use `toLazy`"
+
+    - name: "Data.ByteString.Lazy.toStrict"
+      within: ["MyPrelude"]
+      message: "Use `toStrictBytes`"
+
+    - name: "Data.ByteString.Lazy.fromStrict"
+      within: ["MyPrelude"]
+      message: "Use `toLazyBytes`"
+
+    - name: "Data.Text.unpack"
+      within: ["MyPrelude"]
+      message: "Use `textToString`"
+
+    - name: "Data.Text.pack"
+      within: ["MyPrelude"]
+      message: "Use `stringToText`"
+
+    - name: "Data.Maybe.listToMaybe"
+      within: []
+      message: |
+        `listToMaybe`` throws away everything but the first element of a list (it is essentially `safeHead`).
+        If that is what you want, please use a pattern match like
+
+        ```
+        case xs of
+          [] -> …
+          (x:_) -> …
+        ```
+
+    - name: "Data.List.head"
+      within: []
+      message: |
+        `List.head` fails on an empty list. I didn’t think I have to say this, but please use a pattern match on the list, like:
+
+        ```
+        case xs of
+          [] -> … error handling …
+          (x:_) -> …
+        ```
+
+        Also think about why the rest of the list should be ignored.
+
+    - name: "Prelude.head"
+      within: []
+      message: |
+        `List.head` fails on an empty list. I didn’t think I have to say this, but please use a pattern match on the list, like.
+
+        ```
+        case xs of
+          [] -> … error handling …
+          (x:_) -> …
+        ```
+
+        Also think about why the rest of the list should be ignored.
+
+    - name: "Data.Maybe.fromJust"
+      within: []
+      message: |
+        `Maybe.fromJust` is obviously partial. Please use a pattern match.
+
+        In case you actually want to throw an error on an empty list,
+        please add an error message, like so:
+
+        ```
+        myMaybe & annotate "my error message" & unwrapError
+        ```
+
+        If you are in `IO`, use `unwrapIOError` instead,
+        or throw a monad-specific error.
+
+    - name: "Data.Either.fromLeft"
+      within: []
+      message: |
+        `Either.fromLeft` is obviously partial. Please use a pattern match.
+
+    - name: "Data.Either.fromRight"
+      within: []
+      message: |
+        `Either.fromRight` is obviously partial. Please use a pattern match.
+
+# Make restricted functions into an error if found
+- error: { name: "Avoid restricted function, see comment in .hlint.yaml" }
+
+# Some functions that have (more modern) aliases.
+# They are not dangerous per se,
+# but we want to make it easier to read our code so we should
+# make sure we don’t use too many things that are renames.
+
+- hint:
+    lhs: "undefined"
+    rhs: "todo"
+    note: "`undefined` is a silent error, `todo` will display a warning as long as it exists in the code."
+
+- hint:
+    lhs: "return"
+    rhs: "pure"
+    note: "Use `pure` from `Applicative` instead, it’s the exact same function."
+
+- hint:
+    lhs: "mapM"
+    rhs: "traverse"
+    note: "Use `traverse` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "mapM_"
+    rhs: "traverse_"
+    note: "Use `traverse_` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "forM"
+    rhs: "for"
+    note: "Use `for` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "forM_"
+    rhs: "for_"
+    note: "Use `for_` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "stringToText (show x)"
+    rhs: "showToText x"
+
+- hint:
+    lhs: "Data.Set.toList (Data.Set.fromList x)"
+    rhs: "List.nubOrd x"
+    note: "`nubOrd` removes duplicate elements from a list."
+
+- modules:
+    # Disallowed Modules
+    - name: Data.Map
+      within: []
+      message: "Lazy maps leak space, use `import Data.Map.Strict as Map` instead"
+    - name: Control.Monad.Writer
+      within: []
+      message: "Lazy writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+    - name: Control.Monad.Trans.Writer.Lazy
+      within: []
+      message: "Lazy writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+    - name: Control.Monad.Trans.Writer.Strict
+      within: []
+      message: "Even strict writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+
+    # Qualified module imports
+    - { name: Data.Map.Strict, as: Map }
+    - { name: Data.HashMap.Strict, as: HashMap }
+    - { name: Data.Set, as: Set }
+    - { name: Data.ByteString.Char8, as: Char8 }
+    - { name: Data.ByteString.Lazy.Char8, as: Char8.Lazy }
+    - { name: Data.Text, as: Text }
+    - { name: Data.Vector, as: Vector }
+    - { name: Data.Vault.Lazy, as: Vault }
+    - { name: Data.Aeson, as: Json }
+    - { name: Data.Aeson.Types, as: Json }
+    - { name: Data.Aeson.BetterErrors as Json }
diff --git a/users/Profpatsch/.vscode/launch.json b/users/Profpatsch/.vscode/launch.json
new file mode 100644
index 0000000000..baa087d437
--- /dev/null
+++ b/users/Profpatsch/.vscode/launch.json
@@ -0,0 +1,18 @@
+{
+    // Use IntelliSense to learn about possible attributes.
+    // Hover to view descriptions of existing attributes.
+    // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+    "version": "0.2.0",
+    "configurations": [
+        {
+            "name": "run declib",
+            "type": "node",
+            "cwd": "${workspaceFolder}/declib",
+            "request": "launch",
+            "runtimeExecutable": "ninja",
+            "runtimeArgs": [
+                "run",
+            ],
+        }
+    ]
+}
diff --git a/users/Profpatsch/.vscode/settings.json b/users/Profpatsch/.vscode/settings.json
new file mode 100644
index 0000000000..7984076c16
--- /dev/null
+++ b/users/Profpatsch/.vscode/settings.json
@@ -0,0 +1,25 @@
+{
+    "sqltools.connections": [
+        {
+            "previewLimit": 50,
+            "driver": "SQLite",
+            "name": "cas-serve",
+            "database": "${workspaceFolder:Profpatsch}/cas-serve/data.sqlite"
+        }
+    ],
+    "sqltools.useNodeRuntime": true,
+    "editor.formatOnSave": true,
+    "[typescript]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "[javascript]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "[json]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "purescript.codegenTargets": [
+        "corefn"
+    ],
+    "purescript.foreignExt": "nix"
+}
diff --git a/users/Profpatsch/OWNERS b/users/Profpatsch/OWNERS
index a907667374..ac23e72256 100644
--- a/users/Profpatsch/OWNERS
+++ b/users/Profpatsch/OWNERS
@@ -1,3 +1,4 @@
-inherited: false
-owners:
-  - Profpatsch
+set noparent
+
+Profpatsch
+sterni
diff --git a/users/Profpatsch/README.md b/users/Profpatsch/README.md
new file mode 100644
index 0000000000..5bb74cd758
--- /dev/null
+++ b/users/Profpatsch/README.md
@@ -0,0 +1,10 @@
+# Profpatsch’s assemblage of peculiarities and curiosities
+
+Welcome, Welcome.
+
+Welcome to my user dir, where we optimize f*** around, in order to optimize finding out.
+
+![fafo graph](./fafo.jpg)
+
+DISCLAIMER: All of this code is of the “do not try at work” sort, unless noted otherwise.
+You might try at home, however. Get inspired or get grossed out, whichever you like.
diff --git a/users/Profpatsch/advent-of-code/2020/01/main.py b/users/Profpatsch/advent-of-code/2020/01/main.py
new file mode 100644
index 0000000000..e636017a54
--- /dev/null
+++ b/users/Profpatsch/advent-of-code/2020/01/main.py
@@ -0,0 +1,22 @@
+import sys
+
+l = []
+with open('./input', 'r') as f:
+    for line in f:
+        l.append(int(line))
+
+s = set(l)
+
+res=None
+for el in s:
+    for el2 in s:
+        if (2020-(el+el2)) in s:
+            res=(el, el2, 2020-(el+el2))
+            break
+
+if res == None:
+    sys.exit("could not find a number that adds to 2020")
+
+print(res)
+
+print(res[0] * res[1] * res[2])
diff --git a/users/Profpatsch/advent-of-code/2020/02/main.py b/users/Profpatsch/advent-of-code/2020/02/main.py
new file mode 100644
index 0000000000..e3b27c382a
--- /dev/null
+++ b/users/Profpatsch/advent-of-code/2020/02/main.py
@@ -0,0 +1,77 @@
+import sys
+
+def parse(line):
+    a = line.split(sep=" ", maxsplit=1)
+    assert len(a) == 2
+    fromto = a[0].split(sep="-")
+    assert len(fromto) == 2
+    (from_, to) = (int(fromto[0]), int(fromto[1]))
+    charpass = a[1].split(sep=": ")
+    assert len(charpass) == 2
+    char = charpass[0]
+    assert len(char) == 1
+    pass_ = charpass[1]
+    assert pass_.endswith("\n")
+    pass_ = pass_[:-1]
+    return {
+        "from": from_,
+        "to": to,
+        "char": char,
+        "pass": pass_
+    }
+
+def char_in_pass(char, pass_):
+    return pass_.count(char)
+
+def validate_01(entry):
+    no = char_in_pass(entry["char"], entry["pass"])
+    if no < entry["from"]:
+        return { "too-small": entry }
+    elif no > entry["to"]:
+        return { "too-big": entry }
+    else:
+        return { "ok": entry }
+
+def char_at_pos(char, pos, pass_):
+    assert pos <= len(pass_)
+    return pass_[pos-1] == char
+
+def validate_02(entry):
+    one = char_at_pos(entry["char"], entry["from"], entry["pass"])
+    two = char_at_pos(entry["char"], entry["to"], entry["pass"])
+    if one and two:
+        return { "both": entry }
+    elif one:
+        return { "one": entry }
+    elif two:
+        return { "two": entry }
+    else:
+        return { "none": entry }
+
+
+res01 = []
+res02 = []
+with open("./input", 'r') as f:
+    for line in f:
+        p = parse(line)
+        res01.append(validate_01(p))
+        res02.append(validate_02(p))
+
+count01=0
+for r in res01:
+    print(r)
+    if r.get("ok", False):
+        count01=count01+1
+
+count02=0
+for r in res02:
+    print(r)
+    if r.get("one", False):
+        count02=count02+1
+    elif r.get("two", False):
+        count02=count02+1
+    else:
+        pass
+
+print("count 1: {}".format(count01))
+print("count 2: {}".format(count02))
diff --git a/users/Profpatsch/advent-of-code/2020/03/main.py b/users/Profpatsch/advent-of-code/2020/03/main.py
new file mode 100644
index 0000000000..4d6baf946c
--- /dev/null
+++ b/users/Profpatsch/advent-of-code/2020/03/main.py
@@ -0,0 +1,66 @@
+import itertools
+import math
+
+def tree_line(init):
+    return {
+        "init-len": len(init),
+        "known": '',
+        "rest": itertools.repeat(init)
+    }
+
+def tree_line_at(pos, tree_line):
+    needed = (pos + 1) - len(tree_line["known"])
+    # internally advance the tree line to the position requested
+    if needed > 0:
+        tree_line["known"] = tree_line["known"] \
+          + ''.join(
+            itertools.islice(
+                tree_line["rest"],
+                1+math.floor(needed / tree_line["init-len"])))
+    # print(tree_line)
+    return tree_line["known"][pos] == '#'
+
+def tree_at(linepos, pos, trees):
+    return tree_line_at(pos, trees[linepos])
+
+def slope_positions(trees, right, down):
+    line = 0
+    pos = 0
+    while line < len(trees):
+        yield (line, pos)
+        line = line + down
+        pos = pos + right
+
+trees = []
+with open("./input", 'r') as f:
+    for line in f:
+        line = line.rstrip()
+        trees.append(tree_line(line))
+
+# print(list(itertools.islice(trees[0], 5)))
+# print(list(map(
+#     lambda x: tree_at(0, x, trees),
+#     range(100)
+# )))
+# print(list(slope_positions(trees, right=3, down=1)))
+
+def count_slope_positions(trees, slope):
+    count = 0
+    for (line, pos) in slope:
+        if tree_at(line, pos, trees):
+            count = count + 1
+    return count
+
+print(
+        count_slope_positions(trees, slope_positions(trees, right=1, down=1))
+    *
+        count_slope_positions(trees, slope_positions(trees, right=3, down=1))
+    *
+        count_slope_positions(trees, slope_positions(trees, right=5, down=1))
+    *
+        count_slope_positions(trees, slope_positions(trees, right=7, down=1))
+    *
+        count_slope_positions(trees, slope_positions(trees, right=1, down=2))
+)
+
+# I realized I could have just used a modulo instead …
diff --git a/users/Profpatsch/advent-of-code/2020/04/main.py b/users/Profpatsch/advent-of-code/2020/04/main.py
new file mode 100644
index 0000000000..36bbed7146
--- /dev/null
+++ b/users/Profpatsch/advent-of-code/2020/04/main.py
@@ -0,0 +1,104 @@
+import sys
+import itertools
+import re
+import pprint
+
+def get_entry(fd):
+    def to_dict(keyval):
+        res = {}
+        for (k, v) in keyval:
+            assert k not in res
+            res[k] = v
+        return res
+
+    res = []
+    for line in fd:
+        if line == "\n":
+            yield to_dict(res)
+            res = []
+        else:
+            line = line.rstrip()
+            items = line.split(" ")
+            for i in items:
+                res.append(i.split(":", maxsplit=2))
+
+def val_hgt(hgt):
+    m = re.fullmatch(r'([0-9]+)(cm|in)', hgt)
+    if m:
+        (i, what) = m.group(1,2)
+        i = int(i)
+        if what == "cm":
+            return i >= 150 and i <= 193
+        elif what == "in":
+            return i >= 59 and i <= 76
+        else:
+            return False
+
+required_fields = [
+    { "name": "byr",
+      "check": lambda s: int(s) >= 1920 and int(s) <= 2002
+    },
+    { "name": "iyr",
+      "check": lambda s: int(s) >= 2010 and int(s) <= 2020
+    },
+    { "name": "eyr",
+      "check": lambda s: int(s) >= 2020 and int(s) <= 2030,
+    },
+    { "name": "hgt",
+      "check": lambda s: val_hgt(s)
+    },
+    { "name": "hcl",
+      "check": lambda s: re.fullmatch(r'#[0-9a-f]{6}', s)
+    },
+    { "name": "ecl",
+      "check": lambda s: re.fullmatch(r'amb|blu|brn|gry|grn|hzl|oth', s)
+    },
+    { "name": "pid",
+      "check": lambda s: re.fullmatch(r'[0-9]{9}', s)
+    },
+    # we should treat it as not required
+    # "cid"
+]
+
+required_dict = {}
+for f in required_fields:
+    required_dict[f["name"]] = f
+
+def validate(keyval):
+    if keyval[0] not in required_dict:
+        return { "ok": keyval }
+    if required_dict[keyval[0]]["check"](keyval[1]):
+        return { "ok": keyval }
+    else:
+        return { "validation": keyval }
+
+def all_fields(entry):
+    missing = []
+    for r in required_dict:
+        if r not in e:
+            missing.append(r)
+    if missing == []:
+        return { "ok": entry }
+    else:
+        return { "missing": missing }
+
+count=0
+for e in get_entry(sys.stdin):
+    a = all_fields(e)
+    if a.get("ok", False):
+        res = {}
+        bad = False
+        for keyval in e.items():
+            r = validate(keyval)
+            if r.get("validation", False):
+                bad = True
+            res[keyval[0]] = r
+        if bad:
+            pprint.pprint({ "validation": res })
+        else:
+            pprint.pprint({ "ok": e })
+            count = count+1
+    else:
+        pprint.pprint(a)
+
+print(count)
diff --git a/users/Profpatsch/alacritty.nix b/users/Profpatsch/alacritty.nix
new file mode 100644
index 0000000000..d3461c4aad
--- /dev/null
+++ b/users/Profpatsch/alacritty.nix
@@ -0,0 +1,27 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.alacritty [ "alacritty" ];
+
+  config =
+    {
+      alacritty-config = { font.size = 18; scrolling.history = 100000; };
+      #  This disables the dpi-sensitive scaling (cause otherwise the font will be humongous on my laptop screen)
+      alacritty-env.WINIT_X11_SCALE_FACTOR = 1;
+    };
+
+
+  config-file = (pkgs.formats.toml { }).generate "alacritty.conf" config.alacritty-config;
+
+  alacritty = depot.nix.writeExecline "alacritty" { } (
+    (lib.concatLists (lib.mapAttrsToList (k: v: [ "export" k (toString v) ]) config.alacritty-env))
+    ++ [
+      bins.alacritty
+      "--config-file"
+      config-file
+      "$@"
+    ]
+  );
+
+in
+alacritty
diff --git a/users/Profpatsch/aliases.nix b/users/Profpatsch/aliases.nix
new file mode 100644
index 0000000000..109de8ce33
--- /dev/null
+++ b/users/Profpatsch/aliases.nix
@@ -0,0 +1,88 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.findutils [ "find" ];
+
+in
+depot.nix.readTree.drvTargets {
+
+  findia = depot.nix.writeExecline "findia"
+    {
+      readNArgs = 1;
+      # TODO: comment out, thanks to sterni blocking the runExecline change
+      # meta.description = ''
+      #   Find case-insensitive anywhere (globbing)
+
+      #   Usage: findia <pattern> <more find(1) arguments>
+      # '';
+    } [
+    bins.find
+    "-iname"
+    "*\${1}*"
+    "$@"
+  ];
+
+  findial = depot.nix.writeExecline "findial"
+    {
+      readNArgs = 1;
+      # TODO: comment out, thanks to sterni blocking the runExecline change
+      # meta.description = ''
+      #   Find case-insensitive anywhere (globbing), follow symlinks";
+
+      #   Usage: findial <pattern> <more find(1) arguments>
+      # '';
+    } [
+    bins.find
+    "-L"
+    "-iname"
+    "*\${1}*"
+    "$@"
+  ];
+
+  findian = depot.nix.writeExecline "findian"
+    {
+      readNArgs = 2;
+      # TODO: comment out, thanks to sterni blocking the runExecline change
+      # meta.description = ''
+      #   Find case-insensitive anywhere (globbing) in directory
+
+      #   Usage: findian <directory> <pattern> <more find(1) arguments>
+      # '';
+    } [
+    bins.find
+    "$1"
+    "-iname"
+    "*\${2}*"
+    "$@"
+  ];
+
+  findiap = depot.nix.writeExecline "findiap"
+    {
+      readNArgs = 2;
+      # TODO: comment out, thanks to sterni blocking the runExecline change
+      # meta.description = ''
+      #   Find case-insensitive anywhere (globbing) in directory, the pattern allows for paths.
+
+      #   Usage: findiap <directory> <pattern> <more find(1) arguments>
+      # '';
+    } [
+    bins.find
+    "$1"
+    "-ipath"
+    "*\${2}*"
+    "$@"
+  ];
+
+  bell = depot.nix.writeExecline "bell" { } [
+    "if"
+    [
+      "pactl"
+      "upload-sample"
+      "${pkgs.sound-theme-freedesktop}/share/sounds/freedesktop/stereo/complete.oga"
+      "bell-window-system"
+    ]
+    "pactl"
+    "play-sample"
+    "bell-window-system"
+  ];
+}
diff --git a/users/Profpatsch/arglib/ArglibNetencode.hs b/users/Profpatsch/arglib/ArglibNetencode.hs
new file mode 100644
index 0000000000..4531151ca2
--- /dev/null
+++ b/users/Profpatsch/arglib/ArglibNetencode.hs
@@ -0,0 +1,22 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module ArglibNetencode where
+
+import Data.Attoparsec.ByteString qualified as Atto
+import ExecHelpers
+import Label
+import Netencode qualified
+import PossehlAnalyticsPrelude
+import System.Posix.Env.ByteString qualified as ByteEnv
+
+arglibNetencode :: CurrentProgramName -> Maybe (Label "arglibEnvvar" Text) -> IO Netencode.T
+arglibNetencode progName mEnvvar = do
+  let envvar = mEnvvar <&> (.arglibEnvvar) & fromMaybe "ARGLIB_NETENCODE" & textToBytesUtf8
+  ByteEnv.getEnv envvar >>= \case
+    Nothing -> dieUserError progName [fmt|could not read args, envvar {envvar} not set|]
+    Just bytes ->
+      case Atto.parseOnly (Netencode.netencodeParser <* Atto.endOfInput) bytes of
+        Left err -> dieEnvironmentProblem progName [fmt|arglib parsing error: {err}|]
+        Right t -> do
+          ByteEnv.unsetEnv envvar
+          pure t
diff --git a/users/Profpatsch/arglib/arglib-netencode.cabal b/users/Profpatsch/arglib/arglib-netencode.cabal
new file mode 100644
index 0000000000..42b524f405
--- /dev/null
+++ b/users/Profpatsch/arglib/arglib-netencode.cabal
@@ -0,0 +1,65 @@
+cabal-version:      3.0
+name:               arglib-netencode
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    exposed-modules:          ArglibNetencode
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        netencode,
+        exec-helpers,
+        attoparsec,
+        unix
diff --git a/users/Profpatsch/arglib/netencode.nix b/users/Profpatsch/arglib/netencode.nix
new file mode 100644
index 0000000000..83a94ddd6c
--- /dev/null
+++ b/users/Profpatsch/arglib/netencode.nix
@@ -0,0 +1,81 @@
+{ depot, pkgs, lib, ... }:
+
+let
+
+  # Add the given nix arguments to the program as ARGLIB_NETENCODE envvar
+  #
+  # Calls `netencode.gen.dwim` on the provided nix args value.
+  with-args = name: args: prog: depot.nix.writeExecline "${name}-with-args" { } [
+    "export"
+    "ARGLIB_NETENCODE"
+    (depot.users.Profpatsch.netencode.gen.dwim args)
+    prog
+  ];
+
+  rust = depot.nix.writers.rustSimpleLib
+    {
+      name = "arglib-netencode";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.users.Profpatsch.netencode.netencode-rs
+      ];
+    } ''
+    extern crate netencode;
+    extern crate exec_helpers;
+
+    use netencode::{T};
+    use std::os::unix::ffi::OsStrExt;
+
+    pub fn arglib_netencode(prog_name: &str, env: Option<&std::ffi::OsStr>) -> T {
+        let env = match env {
+            None => std::ffi::OsStr::from_bytes("ARGLIB_NETENCODE".as_bytes()),
+            Some(a) => a
+        };
+        let t = match std::env::var_os(env) {
+            None => exec_helpers::die_user_error(prog_name, format!("could not read args, envvar {} not set", env.to_string_lossy())),
+            // TODO: good error handling for the different parser errors
+            Some(soup) => match netencode::parse::t_t(soup.as_bytes()) {
+                Ok((remainder, t)) => match remainder.is_empty() {
+                    true => t,
+                    false => exec_helpers::die_environment_problem(prog_name, format!("arglib: there was some unparsed bytes remaining: {:?}", remainder))
+                },
+                Err(err) => exec_helpers::die_environment_problem(prog_name, format!("arglib parsing error: {:?}", err))
+            }
+        };
+        std::env::remove_var(env);
+        t
+    }
+  '';
+
+  haskell = pkgs.haskellPackages.mkDerivation {
+    pname = "arglib-netencode";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./arglib-netencode.cabal
+      ./ArglibNetencode.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      depot.users.Profpatsch.netencode.netencode-hs
+      depot.users.Profpatsch.execline.exec-helpers-hs
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+
+
+  };
+
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    with-args
+    rust
+    haskell
+    ;
+}
diff --git a/users/Profpatsch/atomically-write.nix b/users/Profpatsch/atomically-write.nix
new file mode 100644
index 0000000000..c4d07cfbb1
--- /dev/null
+++ b/users/Profpatsch/atomically-write.nix
@@ -0,0 +1,29 @@
+{ depot, pkgs, ... }:
+# Atomically write a file (just `>` redirection in bash
+# empties a file even if the command crashes).
+#
+# Maybe there is an existing tool for that?
+# But it’s easy enough to implement.
+#
+# Example:
+#   atomically-write
+#     ./to
+#     echo "foo"
+#
+# will atomically write the string "foo" into ./to
+let
+  atomically-write = pkgs.writers.writeDash "atomically-write" ''
+    set -e
+    to=$1
+    shift
+    # assumes that the tempfile is on the same file system, (or in memory)
+    # for the `mv` at the end to be more-or-less atomic.
+    tmp=$(${pkgs.coreutils}/bin/mktemp -d)
+    trap 'rm -r "$tmp"' EXIT
+    "$@" \
+      > "$tmp/out"
+    mv "$tmp/out" "$to"
+  '';
+
+in
+atomically-write
diff --git a/users/Profpatsch/blog/README.md b/users/Profpatsch/blog/README.md
new file mode 100644
index 0000000000..0753ebdea5
--- /dev/null
+++ b/users/Profpatsch/blog/README.md
@@ -0,0 +1,7 @@
+# (Parts of) my website
+
+This is a part of https://profpatsch.de/, notably the blog posts.
+
+The other parts can be found in [vuizvui](https://github.com/openlab-aux/vuizvui/tree/master/pkgs/profpatsch/profpatsch.de). It’s a mess.
+
+And yes, this implements a webserver & routing engine with nix, execline & s6 utils. “Bis einer weint”, as we say in German.
diff --git a/users/Profpatsch/blog/default.nix b/users/Profpatsch/blog/default.nix
new file mode 100644
index 0000000000..f233eda9bb
--- /dev/null
+++ b/users/Profpatsch/blog/default.nix
@@ -0,0 +1,481 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.lowdown [ "lowdown" ]
+    // depot.nix.getBins pkgs.cdb [ "cdbget" "cdbmake" "cdbdump" ]
+    // depot.nix.getBins pkgs.coreutils [ "mv" "cat" "printf" "test" ]
+    // depot.nix.getBins pkgs.s6-networking [ "s6-tcpserver" ]
+    // depot.nix.getBins pkgs.time [ "time" ]
+  ;
+
+  # /
+  # TODO: use
+  toplevel = [
+    {
+      route = [ "notes" ];
+      name = "Notes";
+      page = { cssFile }: router cssFile;
+    }
+    {
+      route = [ "projects" ];
+      name = "Projects";
+      # page = projects;
+    }
+  ];
+
+  # /notes/*
+  notes = [
+    {
+      route = [ "notes" "private-trackers-are-markets" ];
+      name = "Private bittorrent trackers are markets";
+      page = { cssFile }: markdownToHtml {
+        name = "private-trackers-are-markets";
+        markdown = ./notes/private-trackers-are-markets.md;
+        inherit cssFile;
+      };
+    }
+    {
+      route = [ "notes" "an-idealized-conflang" ];
+      name = "An Idealized Configuration Language";
+      page = { cssFile }: markdownToHtml {
+        name = "an-idealized-conflang";
+        markdown = ./notes/an-idealized-conflang.md;
+        inherit cssFile;
+      };
+    }
+    {
+      route = [ "notes" "rust-string-conversions" ];
+      name = "Converting between different String types in Rust";
+      page = { cssFile }: markdownToHtml {
+        name = "rust-string-conversions";
+        markdown = ./notes/rust-string-conversions.md;
+        inherit cssFile;
+      };
+    }
+    {
+      route = [ "notes" "preventing-oom" ];
+      name = "Preventing out-of-memory (OOM) errors on Linux";
+      page = { cssFile }: markdownToHtml {
+        name = "preventing-oom";
+        markdown = ./notes/preventing-oom.md;
+        inherit cssFile;
+      };
+    }
+  ];
+
+  projects = [
+    {
+      name = "lorri";
+      description = "<code>nix-shell</code> replacement for projects";
+      link = "https://github.com/nix-community/lorri";
+    }
+    {
+      name = "netencode";
+      description = ''A human-readble nested data exchange format inspired by <a href="https://en.wikipedia.org/wiki/Netstring">netstrings</a> and <a href="https://en.wikipedia.org/wiki/Bencode">bencode</a>.'';
+      link = depotCgitLink { relativePath = "users/Profpatsch/netencode/README.md"; };
+    }
+    {
+      name = "yarn2nix";
+      description = ''nix dependency generator for the <a href="https://yarnpkg.com/"><code>yarn</code> Javascript package manager</a>'';
+      link = "https://github.com/Profpatsch/yarn2nix";
+    }
+  ];
+
+  posts = [
+    {
+      date = "2017-05-04";
+      title = "Ligature Emulation in Emacs";
+      subtitle = "It’s not pretty, but the results are";
+      description = "How to set up ligatures using <code>prettify-symbols-mode</code> and the Hasklig/FiraCode fonts.";
+      page = { cssFile }: markdownToHtml {
+        name = "2017-05-04-ligature-emluation-in-emacs";
+        markdown = ./posts/2017-05-04-ligature-emulation-in-emacs.md;
+        inherit cssFile;
+      };
+      route = [ "posts" "2017-05-04-ligature-emluation-in-emacs" ];
+      tags = [ "emacs" ];
+    }
+  ];
+
+  # convert a markdown file to html via lowdown
+  markdownToHtml =
+    { name
+    , # the file to convert
+      markdown
+    , # css file to add to the final result, as { route }
+      cssFile
+    }:
+    depot.nix.runExecline "${name}.html" { } ([
+      "importas"
+      "out"
+      "out"
+      (depot.users.Profpatsch.lib.debugExec "")
+      bins.lowdown
+      "-s"
+      "-Thtml"
+    ] ++
+    (lib.optional (cssFile != null) ([ "-M" "css=${mkRoute cssFile.route}" ]))
+    ++ [
+      "-o"
+      "$out"
+      markdown
+    ]);
+
+  # takes a { route … } attrset and converts the route lists to an absolute path
+  fullRoute = attrs: lib.pipe attrs [
+    (map (x@{ route, ... }: x // { route = mkRoute route; }))
+  ];
+
+  # a cdb from route to a netencoded version of data for each route
+  router = cssFile: lib.pipe (notes ++ posts) [
+    (map (r: with depot.users.Profpatsch.lens;
+    lib.pipe r [
+      (over (field "route") mkRoute)
+      (over (field "page") (_ { inherit cssFile; }))
+    ]))
+    (map (x: {
+      name = x.route;
+      value = depot.users.Profpatsch.netencode.gen.dwim x;
+    }))
+    lib.listToAttrs
+    (cdbMake "router")
+  ];
+
+  # Create a link to the given source file/directory, given the relative path in the depot repo.
+  # Checks that the file exists at evaluation time.
+  depotCgitLink =
+    {
+      # relative path from the depot root (without leading /).
+      relativePath
+    }:
+      assert
+      (lib.assertMsg
+        (builtins.pathExists (depot.path.origSrc + ("/" + relativePath)))
+        "depotCgitLink: path /${relativePath} does not exist in depot, and depot.path was ${toString depot.path}");
+      "https://code.tvl.fyi/tree/${relativePath}";
+
+  # look up a route by path ($1)
+  router-lookup = cssFile: depot.nix.writeExecline "router-lookup" { readNArgs = 1; } [
+    cdbLookup
+    (router cssFile)
+    "$1"
+  ];
+
+  runExeclineStdout = name: args: cmd: depot.nix.runExecline name args ([
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    "redirfd"
+    "-w"
+    "1"
+    "$out"
+  ] ++ cmd);
+
+  notes-index-html =
+    let o = fullRoute notes;
+    in ''
+      <ul>
+      ${scope o (o: ''
+        <li><a href="${str o.route}">${esc o.name}</a></li>
+      '')}
+      </ul>
+    '';
+
+  notes-index = pkgs.writeText "notes-index.html" notes-index-html;
+
+  # A simple mustache-inspired string interpolation combinator
+  # that takes an object and a template (a function from o to string)
+  # and returns a string.
+  scope = o: tpl:
+    if builtins.typeOf o == "list" then
+      lib.concatMapStringsSep "\n" tpl o
+    else if builtins.typeOf o == "set" then
+      tpl o
+    else throw "${lib.generators.toPretty {} o} not allowed in template";
+
+  # string-escape html (TODO)
+  str = s: s;
+  # html-escape (TODO)
+  esc = s: s;
+  html = s: s;
+
+  projects-index-html =
+    let o = projects;
+    in ''
+      <dl>
+      ${scope o (o: ''
+        <dt><a href="${str o.link}">${esc o.name}</a></dt>
+        <dd>${html o.description}</dd>
+      '')}
+      </dl>
+    '';
+
+  projects-index = pkgs.writeText "projects-index.html" projects-index-html;
+
+  posts-index-html =
+    let o = fullRoute posts;
+    in ''
+      <dl>
+      ${scope o (o: ''
+        <dt>${str o.date} <a href="${str o.route}">${esc o.title}</a></dt>
+        <dd>${html o.description}</dd>
+      '')}
+      </dl>
+    '';
+
+  posts-index = pkgs.writeText "projects-index.html" posts-index-html;
+
+  arglibNetencode = val: depot.nix.writeExecline "arglib-netencode" { } [
+    "export"
+    "ARGLIB_NETENCODE"
+    (depot.users.Profpatsch.netencode.gen.dwim val)
+    "$@"
+  ];
+
+  # A simple http server that serves the site. Yes, it’s horrible.
+  site-server = { cssFile, port }: depot.nix.writeExecline "blog-server" { } [
+    (depot.users.Profpatsch.lib.runInEmptyEnv [ "PATH" ])
+    bins.s6-tcpserver
+    "127.0.0.1"
+    port
+    bins.time
+    "--format=time: %es"
+    "--"
+    runOr
+    return400
+    "pipeline"
+    [
+      (arglibNetencode {
+        what = "request";
+      })
+      depot.users.Profpatsch.read-http
+    ]
+    depot.users.Profpatsch.netencode.record-splice-env
+    runOr
+    return500
+    "importas"
+    "-i"
+    "path"
+    "path"
+    "if"
+    [ depot.tools.eprintf "GET \${path}\n" ]
+    runOr
+    return404
+    "backtick"
+    "-ni"
+    "TEMPLATE_DATA"
+    [
+      # TODO: factor this out of here, this is routing not serving
+      "ifelse"
+      [ bins.test "$path" "=" "/notes" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        notes-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
+      "ifelse"
+      [ bins.test "$path" "=" "/projects" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        projects-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
+      "ifelse"
+      [ bins.test "$path" "=" "/posts" ]
+      [
+        "export"
+        "content-type"
+        "text/html"
+        "export"
+        "serve-file"
+        posts-index
+        depot.users.Profpatsch.netencode.env-splice-record
+      ]
+      # TODO: ignore potential query arguments. See 404 message
+      "pipeline"
+      [ (router-lookup cssFile) "$path" ]
+      depot.users.Profpatsch.netencode.record-splice-env
+      "importas"
+      "-ui"
+      "page"
+      "page"
+      "export"
+      "content-type"
+      "text/html"
+      "export"
+      "serve-file"
+      "$page"
+      depot.users.Profpatsch.netencode.env-splice-record
+    ]
+    runOr
+    return500
+    "if"
+    [
+      "pipeline"
+      [
+        bins.printf
+        ''
+          HTTP/1.1 200 OK
+          Content-Type: {{{content-type}}}; charset=UTF-8
+          Connection: close
+
+        ''
+      ]
+      depot.users.Profpatsch.netencode.netencode-mustache
+    ]
+    "pipeline"
+    [ "importas" "t" "TEMPLATE_DATA" bins.printf "%s" "$t" ]
+    depot.users.Profpatsch.netencode.record-splice-env
+    "importas"
+    "-ui"
+    "serve-file"
+    "serve-file"
+    bins.cat
+    "$serve-file"
+  ];
+
+  # run argv or $1 if argv returns a failure status code.
+  runOr = depot.nix.writeExecline "run-or" { readNArgs = 1; } [
+    "foreground"
+    [ "$@" ]
+    "importas"
+    "?"
+    "?"
+    "ifelse"
+    [ bins.test "$?" "-eq" "0" ]
+    [ ]
+    "if"
+    [ depot.tools.eprintf "runOr: exited \${?}, running \${1}\n" ]
+    "$1"
+  ];
+
+  return400 = depot.nix.writeExecline "return400" { } [
+    bins.printf
+    "%s"
+    ''
+      HTTP/1.1 400 Bad Request
+      Content-Type: text/plain; charset=UTF-8
+      Connection: close
+
+    ''
+  ];
+
+  return404 = depot.nix.writeExecline "return404" { } [
+    bins.printf
+    "%s"
+    ''
+      HTTP/1.1 404 Not Found
+      Content-Type: text/plain; charset=UTF-8
+      Connection: close
+
+      This page doesn’t exist! Query arguments are not handled at the moment.
+    ''
+  ];
+
+  return500 = depot.nix.writeExecline "return500" { } [
+    bins.printf
+    "%s"
+    ''
+      HTTP/1.1 500 Internal Server Error
+      Content-Type: text/plain; charset=UTF-8
+      Connection: close
+
+      Encountered an internal server error. Please try again.
+    ''
+  ];
+
+  capture-stdin = depot.nix.writers.rustSimple
+    {
+      name = "capture-stdin";
+      dependencies = [ depot.users.Profpatsch.execline.exec-helpers ];
+    } ''
+    extern crate exec_helpers;
+    use std::io::Read;
+    fn main() {
+      let (args, prog) = exec_helpers::args_for_exec("capture-stdin", 1);
+      let valname = &args[1];
+      let mut v : Vec<u8> = vec![];
+      std::io::stdin().lock().read_to_end(&mut v).unwrap();
+      exec_helpers::exec_into_args("capture-stdin", prog, vec![(valname, v)]);
+    }
+  '';
+
+  # go from a list of path elements to an absolute route string
+  mkRoute = route: "/" + lib.concatMapStringsSep "/" urlencodeAscii route;
+
+  # urlencodes, but only ASCII characters
+  # https://en.wikipedia.org/wiki/Percent-encoding
+  urlencodeAscii = urlPiece:
+    let
+      raw = [ "!" "#" "$" "%" "&" "'" "(" ")" "*" "+" "," "/" ":" ";" "=" "?" "@" "[" "]" ];
+      enc = [ "%21" "%23" "%24" "%25" "%26" "%27" "%28" "%29" "%2A" "%2B" "%2C" "%2F" "%3A" "%3B" "%3D" "%3F" "%40" "%5B" "%5D" ];
+      rest = [ "A" "B" "C" "D" "E" "F" "G" "H" "I" "J" "K" "L" "M" "N" "O" "P" "Q" "R" "S" "T" "U" "V" "W" "X" "Y" "Z" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "l" "m" "n" "o" "p" "q" "r" "s" "t" "u" "v" "w" "x" "y" "z" "0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "-" "_" "." "~" ];
+    in
+    assert lib.assertMsg (lib.all (c: builtins.elem c (raw ++ rest)) (lib.stringToCharacters urlPiece))
+      "urlencodeAscii: the urlPiece must only contain valid url ASCII characters, was: ${urlPiece}";
+    builtins.replaceStrings raw enc urlPiece;
+
+
+  # create a cdb record entry, as required by the cdbmake tool
+  cdbRecord = key: val:
+    "+${toString (builtins.stringLength key)},${toString (builtins.stringLength val)}:"
+    + "${key}->${val}\n";
+
+  # create a full cdbmake input from an attribute set of keys to values (strings)
+  cdbRecords =
+    with depot.nix.yants;
+    defun [ (attrs (either drv string)) string ]
+      (attrs:
+        (lib.concatStrings (lib.mapAttrsToList cdbRecord attrs)) + "\n");
+
+  # run cdbmake on a list of key/value pairs (strings
+  cdbMake = name: attrs: depot.nix.runExecline "${name}.cdb"
+    {
+      stdin = cdbRecords attrs;
+    } [
+    "importas"
+    "out"
+    "out"
+    depot.users.Profpatsch.lib.eprint-stdin
+    "if"
+    [ bins.cdbmake "db" "tmp" ]
+    bins.mv
+    "db"
+    "$out"
+  ];
+
+  # look up a key ($2) in the given cdb ($1)
+  cdbLookup = depot.nix.writeExecline "cdb-lookup" { readNArgs = 2; } [
+    # cdb ($1) on stdin
+    "redirfd"
+    "-r"
+    "0"
+    "$1"
+    # key ($2) lookup
+    bins.cdbget
+    "$2"
+  ];
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    router
+    depotCgitLink
+    site-server
+    notes-index
+    notes-index-html
+    projects-index
+    projects-index-html
+    posts-index-html
+    ;
+
+}
diff --git a/users/Profpatsch/blog/notes/an-idealized-conflang.md b/users/Profpatsch/blog/notes/an-idealized-conflang.md
new file mode 100644
index 0000000000..5c6b39f6e8
--- /dev/null
+++ b/users/Profpatsch/blog/notes/an-idealized-conflang.md
@@ -0,0 +1,298 @@
+tags: netencode, json
+date: 2022-03-31
+certainty: likely
+status: initial
+title: An idealized Configuration Language
+
+# An Idealized Configuration Language
+
+JSON brought us one step closer to what an idealized configuration language is,
+which I define as “data, stripped of all externalities of the system it is working in”.
+
+Specifically, JSON is very close to what I consider the minimal properties to represent structured data.
+
+## A short history, according to me
+
+In the beginning, Lisp defined s-expressions as a stand-in for an actual syntax.
+Then, people figured out that it’s also a way to represent structured data.
+It has scalars, which can be nested into lists, recursively.
+
+```
+(this is (a (list) (of lists)))
+```
+
+This provides the first three rules of our idealized language:
+
+1. A **scalar** is a primitive value that is domain-specific.
+   We can assume a bunch of bytes here, or a text or an integer.
+   
+2. A **list** gives an ordering to `0..n` (or `1..n`) values
+   
+3. Both a scalar and a list are the *same kind* of “thing” (from here on called **value**),
+   lists can be created from arbitrary values *recursively*
+   (for example scalars, or lists of scalars and other lists)
+
+
+Later, ASN.1 came and had the important insight that the same idealized data structure
+can be represented in different fashions,
+for example as a binary-efficient version and a human-readable format.
+
+Then, XML “graced” the world for a decade or two, and the main lesson from it was
+that you don’t want to mix markup languages and configuration languages,
+and that you don’t want a committee to design these things.
+
+---
+
+In the meantime, Brendan Eich designed Javascript. Its prototype-based object system
+arguably stripped down the rituals of existing OO-systems.
+Douglas Crockford later extracted the object format (minus functions) into a syntax, and we got JSON.
+
+```
+{
+  "foo": [
+    { "nested": "attrs" },
+    "some text"
+  ],
+  "bar": 42
+}
+```
+
+JSON adds another fundamental idea into the mix:
+
+4. **Records** are unordered collections of `name`/`value` pairs.
+   A `name` is defined to be a unicode string, so a semantic descriptor of the nested `value`.
+
+Unfortunately, the JSON syntax does not actually specify any semantics of records (`objects` in JSON lingo),
+in particular it does not mention what the meaning is if a `name` appears twice in one record.
+
+If records can have multiple entries with the same `name`, suddenly ordering becomes important!
+But wait, remember earlier we defined *lists* to impose ordering on two values.
+So in order to rectify that problem, we say that
+
+5. A `name` can only appear in a record *once*, names must be unique.
+
+This is the current state of the programming community at large,
+where most “modern” configuration languages basically use a version of the JSON model
+as their underlying data structure. (However not all of them use the same version.)
+
+## Improving JSON’s data model
+
+We are not yet at the final “idealized” configuration language, though.
+
+Modern languages like Standard ML define their data types as a mixture of 
+
+* *records* (“structs” in the C lingo)
+* and *sums* (which you can think about as enums that can hold more `value`s inside them)
+
+This allows to express the common pattern where some fields in a record are only meaningful
+if another field—the so-called `tag`-field—is set to a specific value.
+
+An easy example: if a request can fail with an error message or succeed with a result.
+
+You could model that as 
+
+```
+{
+  "was_error": true,
+  "error_message": "there was an error"
+}
+```
+
+or
+
+```
+{
+  "was_error": false,
+  "result": 42
+}
+```
+
+in your JSON representation.
+
+But in a ML-like language (like, for example, Rust), you would instead model it as
+
+```
+type RequestResult 
+  = Error { error_message: String }
+  | Success { result: i64 }
+```
+
+where the distinction in `Error` or `Success` makes it clear that `error_message` and `result`
+only exist in one of these cases, not the other.
+
+We *can* encode exactly that idea into JSON in multiple ways, but not a “blessed” way.
+
+For example, another way to encode the above would be
+
+```
+{ 
+  "Error": { 
+    "error_message": "there was an error"
+  }
+}
+```
+
+and
+
+```
+{ 
+  "Success": { 
+    "result": 42
+  }
+}
+```
+
+Particularly notice the difference between the language representation, where the type is “closed”only `Success` or `Error` can happen—
+and the data representation where the type is “open”, more cases could potentially exist.
+
+This is an important differentiation from a type system:
+Our idealized configuration language just gives more structure to a bag of data,
+it does not restrict which value can be where.
+Think of a value in an unityped language, like Python.
+
+
+So far we have the notion of 
+
+1. a scalar (a primitive)
+2. a list (ordering on values)
+3. a record (unordered collection of named values)
+
+and in order to get the “open” `tag`ged enumeration values, we introduce
+
+4. a `tag`, which gives a name to a value
+
+We can then redefine `record` to mean “an unordered collection of `tag`ged values”,
+which further reduces the amount of concepts needed.
+
+And that’s it, this is the full idealized configuration language.
+
+
+## Some examples of data modelling with tags
+
+This is all well and good, but what does it look like in practice?
+
+For these examples I will be using JSON with a new `< "tag": value >` syntax
+to represent `tag`s.
+
+From a compatibility standpoint, `tag`s (or sum types) have dual properties to record types.
+
+With a record, when you have a producer that *adds* a field to it, the consumer will still be able to handle the record (provided the semantics of the existing fields is not changed by the new field).
+
+With a tag, *removing* a tag from the producer will mean that the consumer will still be able to handle the tag. It might do one “dead” check on the removed `tag`, but can still handle the remaining ones just fine.
+
+<!-- TODO: some illustration here -->
+    
+An example of how that is applied in practice is that in `protobuf3`, fields of a record are *always* optional fields.
+
+We can model optional fields by wrapping them in `< "Some": value >` or `< "None": {} >` (where the actual value of the `None` is ignored or always an empty record).
+
+So a protobuf with the fields `foo: int` and `bar: string` has to be parsed by the receiver als containing *four* possibilities:
+
+№|foo|bar|
+|--:|---|---|
+|1|`<"None":{}>`|`<"None":{}>`|
+|2|`<"Some":42>`|`<"None":{}>`|
+|3|`<"None":{}>`|`<"Some":"x">`|
+|4|`<"Some":42>`|`<"Some":"x">`|
+
+Now, iff the receiver actually handles all four possibilities
+(and doesn’t just crash if a field is not set, as customary in million-dollar-mistake languages),
+it’s easy to see how removing a field from the producer is semantically equal to always setting it to `<"None":{}>`.
+Since all receivers should be ready to receive `None` for every field, this provides a simple forward-compatibility scheme.
+
+We can abstract this to any kind of tag value:
+If you start with “more” tags, you give yourself space to remove them later without breaking compatibility, typically called “forward compatibility”.
+
+
+## To empty list/record or not to
+
+Something to think about is whether records and fields should be defined
+to always contain at least one element.
+
+As it stands, JSON has multiple ways of expressing the “empty value”:
+
+* `null`
+* `[]`
+* `{}`
+* `""`
+* *leave out the field*
+
+and two of those come from the possibility of having empty structured values.
+
+## Representations of this language
+
+This line of thought originally fell out of me designing [`netencode`](https://code.tvl.fyi/tree/users/Profpatsch/netencode/README.md)
+as a small human-debuggable format for pipeline serialization.
+
+In addition to the concepts mentioned here (especially tags),
+it provides a better set of scalars than JSON (specifically arbitrary bytestrings),
+but it cannot practically be written or modified by hand,
+which might be a good thing depending on how you look at it.
+
+---
+
+The way that is compatible with the rest of the ecosystem is probably to use a subset of json
+to represent our idealized language.
+
+There is multiple ways of encoding tags in json, which each have their pros and cons.
+
+The most common is probably the “tag field” variant, where the tag is pulled into the nested record:
+
+```
+{
+  "_tag": "Success",
+  "result": 42
+}
+```
+
+Which has the advantage that people know how to deal with it and that it’s easy to “just add another field”,
+plus it is backward-compatible when you had a record in the first place.
+
+It has multiple disadvantages however:
+
+* If your value wasn’t a record (e.g. an int) before, you have to put it in a record and assign an arbitrary name to its field
+* People are not forced to “unwrap” the tag first, so they are going to forget to check it
+* The magic “_tag” name cannot be used by any of the record’s fields
+
+
+An in-between version of this with less downsides is to always push a json record onto the stack:
+
+```
+{
+  "tag": "Success",
+  "value": {
+    "result": 42
+  }
+}
+```
+
+This makes it harder for people to miss checking the `tag`, but still possible of course.
+It also makes it easily possible to inspect the contents of `value` without knowing the
+exhaustive list of `tag`s, which can be useful in practice (though often not sound!).
+It also gets rid of the “_tag” field name clash problem.
+
+Disadvantages:
+
+* Breaks the backwards-compatibility with an existing record-based approach if you want to introduce `tag`s
+* Verbosity of representation
+* hard to distinguish a record with the `tag` and `value` fields from a `tag`ed value (though you know the type layout of your data on a higher level, don’t you? ;) )
+
+
+The final, “most pure” representation is the one I gave in the original introduction:
+
+```
+{
+  "Success": {
+    "result": 42
+  }
+}
+```
+
+Now you *have* to match on the `tag` name first, before you can actually access your data,
+and it’s less verbose than the above representation.
+
+Disavantages:
+
+* You also have to *know* what `tag`s to expect, it’s harder to query cause you need to extract the keys and values from the dict and then take the first one.
+* Doing a “tag backwards compat” check is harder,
+  because you can’t just check whether `_tag` or `tag`/`value` are the keys in the dict.
diff --git a/users/Profpatsch/blog/notes/preventing-oom.md b/users/Profpatsch/blog/notes/preventing-oom.md
new file mode 100644
index 0000000000..59ea4f7477
--- /dev/null
+++ b/users/Profpatsch/blog/notes/preventing-oom.md
@@ -0,0 +1,33 @@
+tags: linux
+date: 2020-01-25
+certainty: likely
+status: initial
+title: Preventing out-of-memory (OOM) errors on Linux
+
+# Preventing out-of-memory (OOM) errors on Linux
+
+I’ve been running out of memory more and more often lately. I don’t use any swap space because I am of the opinion that 16GB of memory should be sufficient for most daily and professional tasks. Which is generally true, however sometimes I have a runaway filling my memory. Emacs is very good at doing this for example, prone to filling your RAM when you open json files with very long lines.
+
+In theory, the kernel OOM killer should come in and save the day, but the Linux OOM killer is notorious for being extremely … conservative. It will try to free every internal structure it can before even thinking about touching any userspace processes. At that point, the desktop usually stopped responding minutes ago.
+
+Luckily the kernel provides memory statistics for the whole system, as well as single process, and the [`earlyoom`](https://github.com/rfjakob/earlyoom) tool uses those to keep memory usage under a certain limit. It will start killing processes, “heaviest” first, until the given upper memory limit is satisfied again.
+
+On NixOS, I set:
+
+```nix
+{
+  services.earlyoom = {
+    enable = true;
+    freeMemThreshold = 5; # <%5 free
+  };
+}
+```
+
+and after activation, this simple test shows whether the daemon is working:
+
+```shell
+$ tail /dev/zero
+fish: “tail /dev/zero” terminated by signal SIGTERM (Polite quit request)
+```
+
+`tail /dev/zero` searches for the last line of the file `/dev/zero`, and since it cannot know that there is no next line and no end to the stream of `\0` this file produces, it will fill the RAM as quickly as physically possible. Before it can fill it completely, `earlyoom` recognizes that the limit was breached, singles out the `tail` command as the process using the most amount of memory, and sends it a `SIGTERM`.
diff --git a/users/Profpatsch/blog/notes/private-trackers-are-markets.md b/users/Profpatsch/blog/notes/private-trackers-are-markets.md
new file mode 100644
index 0000000000..88fe5f07e5
--- /dev/null
+++ b/users/Profpatsch/blog/notes/private-trackers-are-markets.md
@@ -0,0 +1,46 @@
+# Private bittorrent trackers are markets
+
+Private bittorrent trackers have a currency called ratio,
+which is the bits you upload divided the bits you download.
+
+You have to keep the ratio above a certain lower limit,
+otherwise you get banned from the market or have to cut a deal with the moderators → bancruptcy
+
+New liquidity (?) is introduced to the market by so-called “freeleech” events or tokens,
+which essentially allow you to exchange a token (or some time in the case of time-restricted freeleech)
+for some data, which can then be seeded to generate future profits without spending ratio.
+
+Sometimes, ratio is pulled from the market by allowing to exchange it into website perks,
+like forum titles or other benefits like chat-memberships. This has a deflationary effect.
+It could be compared to “vanity items” in MMOs, which don’t grant a mechanical advantage in the market.
+Is there a real-world equivalent? i.e. allowing rich people to exchange some of their worth
+for vanity items instead of investing it for future gain?
+
+Sometimes, ratio can be traded for more than just transferred bits,
+for example by requesting a torrent for a certain album or movie,
+paying some ratio for the fulfillment of the request.
+
+---
+
+Based on how bittorrent works, usually multiple people “seed” a torrent.
+This means multiple people can answer a request for trading ratio.
+Part of the request (i.e. the first 30% of a movie)
+can be fulfilled by one party, part of it by a second or even more parties.
+
+For small requests (e.g. albums), often the time between announcing the trade
+and filling the trade is important for who is able to fill it.
+Getting a 1 second head-start vastly increases your chance of a handshake
+and starting the transmission, so on average you get a vastly higher ratio gain from that torrent.
+Meaning that using a bittorrent client which is fast to answer as a seeder will lead to better outcomes.
+This could be compared to mechanisms seen in high-speed trading.
+
+---
+
+Of course these market-mechanisms are in service of a wider policy goal,
+which is to ensure the constant availability of as much high-quality data as possible.
+There is more mechanisms at play on these trackers that all contribute to this goal
+(possible keywords to research: trumping, freeleech for underseeded torrents).
+
+In general, it is important to remember that markets are only a tool,
+never an end in themselves, as neoliberalists would like us to believe.
+They always are in service of a wider goal or policy. We live in a society.
diff --git a/users/Profpatsch/blog/notes/rust-string-conversions.md b/users/Profpatsch/blog/notes/rust-string-conversions.md
new file mode 100644
index 0000000000..99071ef9d3
--- /dev/null
+++ b/users/Profpatsch/blog/notes/rust-string-conversions.md
@@ -0,0 +1,53 @@
+# Converting between different String types in Rust
+
+```
+let s: String = ...
+let st: &str = ...
+let u: &[u8] = ...
+let b: [u8; 3] = b"foo"
+let v: Vec<u8> = ...
+let os: OsString = ...
+let ost: OsStr = ...
+
+From       To         Use                                    Comment
+----       --         ---                                    -------
+&str     -> String    String::from(st)
+&str     -> &[u8]     st.as_bytes()
+&str     -> Vec<u8>   st.as_bytes().to_owned()               via &[u8]
+&str     -> &OsStr    OsStr::new(st)
+
+String   -> &str      &s                                     alt. s.as_str()
+String   -> &[u8]     s.as_bytes()
+String   -> Vec<u8>   s.into_bytes()
+String   -> OsString  OsString::from(s)
+
+&[u8]    -> &str      str::from_utf8(u).unwrap()
+&[u8]    -> String    String::from_utf8(u).unwrap()
+&[u8]    -> Vec<u8>   u.to_owned()
+&[u8]    -> &OsStr    OsStr::from_bytes(u)                   use std::os::unix::ffi::OsStrExt;
+
+[u8; 3]  -> &[u8]     &b[..]                                 byte literal
+[u8; 3]  -> &[u8]     "foo".as_bytes()                       alternative via utf8 literal
+
+Vec<u8>  -> &str      str::from_utf8(&v).unwrap()            via &[u8]
+Vec<u8>  -> String    String::from_utf8(v)
+Vec<u8>  -> &[u8]     &v
+Vec<u8>  -> OsString  OsString::from_vec(v)                  use std::os::unix::ffi::OsStringExt;
+
+&OsStr   -> &str      ost.to_str().unwrap()
+&OsStr   -> String    ost.to_os_string().into_string()       via OsString
+                         .unwrap()
+&OsStr   -> Cow<str>  ost.to_string_lossy()                  Unicode replacement characters
+&OsStr   -> OsString  ost.to_os_string()
+&OsStr   -> &[u8]     ost.as_bytes()                         use std::os::unix::ffi::OsStringExt;
+
+OsString -> String    os.into_string().unwrap()              returns original OsString on failure
+OsString -> &str      os.to_str().unwrap()
+OsString -> &OsStr    os.as_os_str()
+OsString -> Vec<u8>   os.into_vec()                          use std::os::unix::ffi::OsStringExt;
+```
+
+
+## Source
+
+Original source is [this document on Pastebin](https://web.archive.org/web/20190710121935/https://pastebin.com/Mhfc6b9i)
diff --git a/users/Profpatsch/blog/posts/2017-05-04-ligature-emulation-in-emacs.md b/users/Profpatsch/blog/posts/2017-05-04-ligature-emulation-in-emacs.md
new file mode 100644
index 0000000000..ba80888bad
--- /dev/null
+++ b/users/Profpatsch/blog/posts/2017-05-04-ligature-emulation-in-emacs.md
@@ -0,0 +1,123 @@
+title: Ligature Emulation in Emacs
+date: 2017-05-04
+
+Monday was (yet another)
+[NixOS hackathon][hackathon] at [OpenLab Augsburg][ola].
+[Maximilian][mhuber] was there and to my amazement
+he got working ligatures in his Haskell files in Emacs! Ever since Hasklig
+updated its format to use ligatures and private Unicode code points a while ago,
+the hack I had used in my config stopped working.
+
+Encouraged by that I decided to take a look on Tuesday. Long story short, I was
+able to [get it working in a pretty satisfying way][done].
+
+[hackathon]: https://www.meetup.com/Munich-NixOS-Meetup/events/239077247/
+[mhuber]: https://github.com/maximilianhuber
+[ola]: https://openlab-augsburg.de
+[done]: https://github.com/i-tu/Hasklig/issues/84#issuecomment-298803495
+
+What’s left to do is package it into a module and push to melpa.
+
+
+### elisp still sucks, but it’s bearable, sometimes
+
+I’m the kind of person who, when trying to fix something elisp related, normally
+gives up two hours later and three macro calls deep. Yes, homoiconic,
+non-lexically-scoped, self-rewriting code is not exactly my fetish.
+This time the task and the library (`prettify-symbols-mode`) were simple enough
+for that to not happen.
+
+Some interesting technical trivia:
+
+- elisp literal character syntax is `?c`. `?\t` is the tab character
+- You join characters by `(string c1 c2 c3 ...)`
+- [dash.el][dash] is pretty awesome and does what a functional programmer
+  expects. Also, Rainbow Dash.
+- Hasklig and FiraCode multi-column symbols actually [only occupy one column, on
+  the far right of the glyph][glyph]. `my-correct-symbol-bounds` fixes emacs’
+  rendering in that case.
+
+
+[dash]: https://github.com/magnars/dash.el
+[glyph]: https://github.com/tonsky/FiraCode/issues/211#issuecomment-239082368
+
+
+## Appendix A
+
+For reference, here’s the complete code as it stands now. Feel free to paste
+into your config; let’s make it [MIT][mit]. Maybe link to this site, in case there are
+updates.
+
+[mit]: https://opensource.org/licenses/MIT
+
+```elisp
+ (defun my-correct-symbol-bounds (pretty-alist)
+    "Prepend a TAB character to each symbol in this alist,
+this way compose-region called by prettify-symbols-mode
+will use the correct width of the symbols
+instead of the width measured by char-width."
+    (mapcar (lambda (el)
+              (setcdr el (string ?\t (cdr el)))
+              el)
+            pretty-alist))
+
+  (defun my-ligature-list (ligatures codepoint-start)
+    "Create an alist of strings to replace with
+codepoints starting from codepoint-start."
+    (let ((codepoints (-iterate '1+ codepoint-start (length ligatures))))
+      (-zip-pair ligatures codepoints)))
+
+  ; list can be found at https://github.com/i-tu/Hasklig/blob/master/GlyphOrderAndAliasDB#L1588
+  (setq my-hasklig-ligatures
+    (let* ((ligs '("&&" "***" "*>" "\\\\" "||" "|>" "::"
+                   "==" "===" "==>" "=>" "=<<" "!!" ">>"
+                   ">>=" ">>>" ">>-" ">-" "->" "-<" "-<<"
+                   "<*" "<*>" "<|" "<|>" "<$>" "<>" "<-"
+                   "<<" "<<<" "<+>" ".." "..." "++" "+++"
+                   "/=" ":::" ">=>" "->>" "<=>" "<=<" "<->")))
+      (my-correct-symbol-bounds (my-ligature-list ligs #Xe100))))
+
+  ;; nice glyphs for haskell with hasklig
+  (defun my-set-hasklig-ligatures ()
+    "Add hasklig ligatures for use with prettify-symbols-mode."
+    (setq prettify-symbols-alist
+          (append my-hasklig-ligatures prettify-symbols-alist))
+    (prettify-symbols-mode))
+
+  (add-hook 'haskell-mode-hook 'my-set-hasklig-ligatures)
+```
+
+## Appendix B (Update 1): FiraCode integration
+
+I also created a mapping for [FiraCode][fira]. You need to grab the [additional
+symbol font][symbol] that adds (most) ligatures to the unicode private use area.
+Consult your system documentation on how to add it to your font cache.
+Next add `"Fira Code"` and `"Fira Code Symbol"` to your font preferences. Symbol
+only contains the additional characters, so you need both.
+
+If you are on NixOS, the font package should be on the main branch shortly, [I
+added a package][symbol-pkg].
+
+[fira]: https://github.com/tonsky/FiraCode/
+[symbol]: https://github.com/tonsky/FiraCode/issues/211#issuecomment-239058632
+[symbol-pkg]: https://github.com/NixOS/nixpkgs/pull/25517
+
+Here’s the mapping adjusted for FiraCode:
+
+```elisp
+  (setq my-fira-code-ligatures
+    (let* ((ligs '("www" "**" "***" "**/" "*>" "*/" "\\\\" "\\\\\\"
+                  "{-" "[]" "::" ":::" ":=" "!!" "!=" "!==" "-}"
+                  "--" "---" "-->" "->" "->>" "-<" "-<<" "-~"
+                  "#{" "#[" "##" "###" "####" "#(" "#?" "#_" "#_("
+                  ".-" ".=" ".." "..<" "..." "?=" "??" ";;" "/*"
+                  "/**" "/=" "/==" "/>" "//" "///" "&&" "||" "||="
+                  "|=" "|>" "^=" "$>" "++" "+++" "+>" "=:=" "=="
+                  "===" "==>" "=>" "=>>" "<=" "=<<" "=/=" ">-" ">="
+                  ">=>" ">>" ">>-" ">>=" ">>>" "<*" "<*>" "<|" "<|>"
+                  "<$" "<$>" "<!--" "<-" "<--" "<->" "<+" "<+>" "<="
+                  "<==" "<=>" "<=<" "<>" "<<" "<<-" "<<=" "<<<" "<~"
+                  "<~~" "</" "</>" "~@" "~-" "~=" "~>" "~~" "~~>" "%%"
+                  "x" ":" "+" "+" "*")))
+      (my-correct-symbol-bounds (my-ligature-list ligs #Xe100))))
+```
diff --git a/users/Profpatsch/cabal.project b/users/Profpatsch/cabal.project
new file mode 100644
index 0000000000..26b6186969
--- /dev/null
+++ b/users/Profpatsch/cabal.project
@@ -0,0 +1,14 @@
+packages:
+  ./my-prelude/my-prelude.cabal
+  ./my-webstuff/my-webstuff.cabal
+  ./netencode/netencode.cabal
+  ./arglib/arglib-netencode.cabal
+  ./execline/exec-helpers.cabal
+  ./htmx-experiment/htmx-experiment.cabal
+  ./mailbox-org/mailbox-org.cabal
+  ./cas-serve/cas-serve.cabal
+  ./jbovlaste-sqlite/jbovlaste-sqlite.cabal
+  ./whatcd-resolver/whatcd-resolver.cabal
+  ./openlab-tools/openlab-tools.cabal
+  ./httzip/httzip.cabal
+  ./my-xmonad/my-xmonad.cabal
diff --git a/users/Profpatsch/cas-serve/CasServe.hs b/users/Profpatsch/cas-serve/CasServe.hs
new file mode 100644
index 0000000000..62636fe9c1
--- /dev/null
+++ b/users/Profpatsch/cas-serve/CasServe.hs
@@ -0,0 +1,247 @@
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Main where
+
+import ArglibNetencode (arglibNetencode)
+import Control.Applicative
+import Control.Monad.Reader
+import Crypto.Hash qualified as Crypto
+import Data.ByteArray qualified as ByteArray
+import Data.ByteString.Lazy qualified as ByteString.Lazy
+import Data.ByteString.Lazy qualified as Lazy
+import Data.Functor.Compose
+import Data.Int (Int64)
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Data.Text.IO qualified as Text
+import Database.SQLite.Simple (NamedParam ((:=)))
+import Database.SQLite.Simple qualified as Sqlite
+import Database.SQLite.Simple.FromField qualified as Sqlite
+import Database.SQLite.Simple.QQ qualified as Sqlite
+import Label
+import Netencode.Parse qualified as Net
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import System.IO (stderr)
+
+parseArglib = do
+  let env = label @"arglibEnvvar" "CAS_SERVE_ARGS"
+  let asApi =
+        Net.asRecord >>> do
+          address <- label @"bindToAddress" <$> (Net.key "bindToAddress" >>> Net.asText)
+          port <- label @"port" <$> (Net.key "port" >>> Net.asText)
+          pure (T2 address port)
+  arglibNetencode "cas-serve" (Just env)
+    <&> Net.runParse
+      [fmt|Cannot parse arguments in "{env.arglibEnvvar}"|]
+      ( Net.asRecord >>> do
+          publicApi <- label @"publicApi" <$> (Net.key "publicApi" >>> asApi)
+          privateApi <- label @"privateApi" <$> (Net.key "privateApi" >>> asApi)
+          pure $ T2 publicApi privateApi
+      )
+
+main :: IO ()
+main = do
+  withEnv $ \env ->
+    Warp.runSettings
+      (Warp.defaultSettings & Warp.setPort 7070)
+      (api env)
+
+withEnv :: (Env -> IO a) -> IO a
+withEnv inner = do
+  withSqlite "./data.sqlite" $ \envData -> do
+    withSqlite "./wordlist.sqlite" $ \envWordlist -> inner Env {..}
+
+withSqlite :: String -> (Sqlite.Connection -> IO a) -> IO a
+withSqlite fileName inner = Sqlite.withConnection fileName $ \conn -> do
+  Sqlite.setTrace conn (Just (\msg -> Text.hPutStrLn stderr [fmt|{fileName}: {msg}|]))
+  Sqlite.execute conn [Sqlite.sql|PRAGMA foreign_keys = ON|] ()
+  inner conn
+
+api :: Env -> Wai.Application
+api env req respond = do
+  case runHandler (getById <|> insertById) req env of
+    Nothing -> respond $ Wai.responseLBS Http.status404 [] "endpoint does not exist."
+    Just handler' -> do
+      handler' >>= \case
+        Left (status, err) -> respond $ Wai.responseLBS status [] (err & toLazyBytes)
+        Right (headers, body) ->
+          respond $
+            Wai.responseLBS
+              Http.status200
+              headers
+              (body & toLazyBytes)
+
+data Env = Env
+  { envWordlist :: Sqlite.Connection,
+    envData :: Sqlite.Connection
+  }
+
+-- | I don’t need any fancy routing in this, so a handler is just something that returns a @Just (IO a)@ if it wants to handle the request.
+newtype Handler a
+  = Handler (ReaderT (Wai.Request, Env) (Compose Maybe IO) a)
+  deriving newtype (Functor, Applicative, Alternative)
+
+handler :: ((Wai.Request, Env) -> Maybe (IO a)) -> Handler a
+handler f = Handler (ReaderT (Compose . f))
+
+runHandler :: Handler a -> Wai.Request -> Env -> Maybe (IO a)
+runHandler (Handler handler') req env = getCompose $ handler' & (\readerT -> runReaderT readerT (req, env))
+
+getById ::
+  Handler
+    ( Either
+        (Http.Status, ByteString)
+        ([(Http.HeaderName, ByteString)], ByteString)
+    )
+getById = handler $ \(req, env) -> do
+  guard ((req & Wai.requestMethod) == Http.methodGet)
+  case req & Wai.pathInfo of
+    ["v0", "by-id", filename] -> Just $ do
+      Sqlite.queryNamed
+        @( T3
+             "mimetype"
+             Text
+             "content"
+             ByteString
+             "size"
+             Int
+         )
+        (env.envData)
+        [Sqlite.sql|
+        SELECT
+          mimetype,
+          cast (content AS blob) as content,
+          size
+        FROM file_content
+        JOIN file_references
+          ON file_references.file_content = file_content.hash_sha256
+        WHERE
+          file_references.reference_type = 'by-id'
+          AND (file_references.name || file_references.extension) = :filename
+       |]
+        [":filename" Sqlite.:= filename]
+        <&> \case
+          [] -> Left (Http.status404, "File not found.")
+          [res] ->
+            Right
+              ( [ ("Content-Type", res.mimetype & textToBytesUtf8),
+                  ("Content-Length", res.size & showToText & textToBytesUtf8)
+                ],
+                -- TODO: should this be lazy/streamed?
+                res.content
+              )
+          _more -> Left "file_references must be unique (in type and name)" & unwrapError
+    _ -> Nothing
+
+insertById :: Handler (Either a ([(Http.HeaderName, ByteString)], ByteString))
+insertById = handler $ \(req, env) -> do
+  guard ((req & Wai.requestMethod) == Http.methodPost)
+  case req & Wai.pathInfo of
+    ["v0", "by-id"] -> Just $ do
+      let maybeText bytes = case bytesToTextUtf8 bytes of
+            Left _err -> Nothing
+            Right t -> Just t
+      let mimeType =
+            ( (req & Wai.requestHeaders & List.lookup "X-Cas-Serve-Mimetype" >>= maybeText)
+                <|> (req & Wai.requestHeaders & List.lookup "Content-Type" >>= maybeText)
+            )
+              & fromMaybe "application/octet-stream"
+
+      let magicFileEnding mimeType' = case Text.split (== '/') mimeType' of
+            [_, ""] -> Nothing
+            ["", _] -> Nothing
+            [_, "any"] -> Nothing
+            ["image", ty] -> Just (Text.cons '.' ty)
+            ["video", ty] -> Just (Text.cons '.' ty)
+            ["text", "plain"] -> Just ".txt"
+            ["text", "html"] -> Just ".html"
+            ["application", "pdf"] -> Just ".pdf"
+            ["application", "json"] -> Just ".json"
+            _ -> Nothing
+
+      let extension =
+            ( (req & Wai.requestHeaders & List.lookup "X-Cas-Serve-FileExtension" >>= maybeText)
+                <|> ( (req & Wai.requestHeaders & List.lookup "Content-Type")
+                        >>= maybeText
+                        >>= magicFileEnding
+                    )
+            )
+              -- Just the empty extension if we can’t figure it out.
+              & fromMaybe ""
+
+      body <- Wai.consumeRequestBodyStrict req
+      let hash :: Crypto.Digest Crypto.SHA256 = Crypto.hashlazy body
+      let hashBytes = hash & ByteArray.convert @(Crypto.Digest Crypto.SHA256) @ByteString
+      let len = ByteString.Lazy.length body
+      name <- getNameFromWordlist env
+      let fullname = name <> extension
+
+      let conn = env.envData
+      Sqlite.withTransaction conn $ do
+        Sqlite.executeNamed
+          conn
+          [Sqlite.sql|
+            INSERT INTO file_content
+              (content, hash_sha256, size)
+              VALUES
+              (:content, :hash_sha256, :size)
+              ON CONFLICT (hash_sha256) DO NOTHING
+          |]
+          [ ":content" := (body :: Lazy.ByteString),
+            ":hash_sha256" := (hashBytes :: ByteString),
+            ":size" := (len :: Int64)
+          ]
+
+        -- TODO: we are not checking if the name already exists,
+        -- we just assume that 1633^3 is enough to not get any collisions for now.
+        -- If the name exists, the user gets a 500.
+        Sqlite.executeNamed
+          conn
+          [Sqlite.sql|
+            INSERT INTO file_references
+              (file_content, reference_type, name, extension, mimetype)
+            VALUES
+              (:file_content, :reference_type, :name, :extension, :mimetype)
+          |]
+          [ ":file_content" := (hashBytes :: ByteString),
+            ":reference_type" := ("by-id" :: Text),
+            ":name" := name,
+            ":extension" := (extension :: Text),
+            ":mimetype" := (mimeType :: Text)
+          ]
+      pure $
+        Right
+          ( [("Content-Type", "text/plain")],
+            [fmt|/v0/by-id/{fullname}|]
+          )
+    _ -> Nothing
+
+-- Get a random name from a wordlist, that is three words connected by @-@.
+getNameFromWordlist :: Env -> IO Text
+getNameFromWordlist env =
+  do
+    let numberOfWords = 3 :: Int
+    Sqlite.queryNamed @(Sqlite.Only Text)
+      (env.envWordlist)
+      [Sqlite.sql|SELECT word FROM wordlist ORDER BY RANDOM() LIMIT :words|]
+      [":words" Sqlite.:= numberOfWords]
+    <&> map Sqlite.fromOnly
+    <&> Text.intercalate "-"
+
+-- | We can use a Rec with a named list of types to parse a returning row of sqlite!!
+instance
+  ( Sqlite.FromField t1,
+    Sqlite.FromField t2,
+    Sqlite.FromField t3
+  ) =>
+  Sqlite.FromRow (T3 l1 t1 l2 t2 l3 t3)
+  where
+  fromRow = do
+    T3
+      <$> (label @l1 <$> Sqlite.field)
+      <*> (label @l2 <$> Sqlite.field)
+      <*> (label @l3 <$> Sqlite.field)
diff --git a/users/Profpatsch/cas-serve/cas-serve.cabal b/users/Profpatsch/cas-serve/cas-serve.cabal
new file mode 100644
index 0000000000..d14776700a
--- /dev/null
+++ b/users/Profpatsch/cas-serve/cas-serve.cabal
@@ -0,0 +1,73 @@
+cabal-version:      3.0
+name:               cas-serve
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable cas-serve
+    import: common-options
+
+    main-is:          CasServe.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        arglib-netencode,
+        netencode,
+        text,
+        sqlite-simple,
+        http-types,
+        wai,
+        warp,
+        mtl,
+        bytestring,
+        memory,
+        crypton,
diff --git a/users/Profpatsch/cas-serve/default.nix b/users/Profpatsch/cas-serve/default.nix
new file mode 100644
index 0000000000..14c3e4aa13
--- /dev/null
+++ b/users/Profpatsch/cas-serve/default.nix
@@ -0,0 +1,38 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.sqlite [ "sqlite3" ];
+
+  cas-serve = pkgs.haskellPackages.mkDerivation {
+    pname = "cas-serve";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./cas-serve.cabal
+      ./CasServe.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.crypton
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.sqlite-simple
+      depot.users.Profpatsch.arglib.netencode.haskell
+      depot.users.Profpatsch.netencode.netencode-hs
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  create-cas-database = depot.nix.writeExecline "create-cas-database" { readNArgs = 1; } [
+    bins.sqlite3
+    "$1"
+    "-init"
+    ./schema.sql
+  ];
+in
+cas-serve
diff --git a/users/Profpatsch/cas-serve/schema.sql b/users/Profpatsch/cas-serve/schema.sql
new file mode 100644
index 0000000000..b61a7a1ad5
--- /dev/null
+++ b/users/Profpatsch/cas-serve/schema.sql
@@ -0,0 +1,38 @@
+-- SQLite
+.dump
+
+PRAGMA foreign_keys = ON;
+
+BEGIN transaction;
+
+create table if not exists file_content (
+  content blob NOT NULL,
+  hash_sha256 blob PRIMARY KEY,
+  size integer NOT NULL
+) WITHOUT ROWID;
+
+
+create table if not exists file_references (
+  rowid integer PRIMARY KEY,
+  file_content NOT NULL REFERENCES file_content ON DELETE CASCADE,
+  reference_type text NOT NULL,
+  name text NOT NULL,
+  extension text NOT NULL,
+  mimetype text NOT NULL
+);
+
+create unique index if not exists file_references_type_name_unique on file_references (reference_type, name);
+
+-- insert into file_content values ('mycontent', 'myhash', 9);
+-- insert into file_references values (NULL, 'myhash', 'by-id', 'myschranz', '.txt', 'text/plain');
+-- insert into file_content values (readfile('/home/philip/Pictures/screenshot.png'), 'anotherhash', 999);
+-- insert into file_references values (NULL, 'anotherhash', 'by-id', 'img', '.png', 'image/png');
+
+select * from file_content;
+
+select * from file_references;
+
+COMMIT;
+
+-- drop table file_content;
+-- drop table file_references;
diff --git a/users/Profpatsch/cas-serve/wordlist.json b/users/Profpatsch/cas-serve/wordlist.json
new file mode 100644
index 0000000000..cc4bc62ad1
--- /dev/null
+++ b/users/Profpatsch/cas-serve/wordlist.json
@@ -0,0 +1 @@
+ [ "acrobat", "africa", "alaska", "albert", "albino", "album", "alcohol", "alex", "alpha", "amadeus", "amanda", "amazon", "america", "analog", "animal", "antenna", "antonio", "apollo", "april", "aroma", "artist", "aspirin", "athlete", "atlas", "banana", "bandit", "banjo", "bikini", "bingo", "bonus", "camera", "canada", "carbon", "casino", "catalog", "cinema", "citizen", "cobra", "comet", "compact", "complex", "context", "credit", "critic", "crystal", "culture", "david", "delta", "dialog", "diploma", "doctor", "domino", "dragon", "drama", "extra", "fabric", "final", "focus", "forum", "galaxy", "gallery", "global", "harmony", "hotel", "humor", "index", "japan", "kilo", "lemon", "liter", "lotus", "mango", "melon", "menu", "meter", "metro", "mineral", "model", "music", "object", "piano", "pirate", "plastic", "radio", "report", "signal", "sport", "studio", "subject", "super", "tango", "taxi", "tempo", "tennis", "textile", "tokyo", "total", "tourist", "video", "visa", "academy", "alfred", "atlanta", "atomic", "barbara", "bazaar", "brother", "budget", "cabaret", "cadet", "candle", "capsule", "caviar", "channel", "chapter", "circle", "cobalt", "comrade", "condor", "crimson", "cyclone", "darwin", "declare", "denver", "desert", "divide", "dolby", "domain", "double", "eagle", "echo", "eclipse", "editor", "educate", "edward", "effect", "electra", "emerald", "emotion", "empire", "eternal", "evening", "exhibit", "expand", "explore", "extreme", "ferrari", "forget", "freedom", "friday", "fuji", "galileo", "genesis", "gravity", "habitat", "hamlet", "harlem", "helium", "holiday", "hunter", "ibiza", "iceberg", "imagine", "infant", "isotope", "jackson", "jamaica", "jasmine", "java", "jessica", "kitchen", "lazarus", "letter", "license", "lithium", "loyal", "lucky", "magenta", "manual", "marble", "maxwell", "mayor", "monarch", "monday", "money", "morning", "mother", "mystery", "native", "nectar", "nelson", "network", "nikita", "nobel", "nobody", "nominal", "norway", "nothing", "number", "october", "office", "oliver", "opinion", "option", "order", "outside", "package", "pandora", "panther", "papa", "pattern", "pedro", "pencil", "people", "phantom", "philips", "pioneer", "pluto", "podium", "portal", "potato", "process", "proxy", "pupil", "python", "quality", "quarter", "quiet", "rabbit", "radical", "radius", "rainbow", "ramirez", "ravioli", "raymond", "respect", "respond", "result", "resume", "richard", "river", "roger", "roman", "rondo", "sabrina", "salary", "salsa", "sample", "samuel", "saturn", "savage", "scarlet", "scorpio", "sector", "serpent", "shampoo", "sharon", "silence", "simple", "society", "sonar", "sonata", "soprano", "sparta", "spider", "sponsor", "abraham", "action", "active", "actor", "adam", "address", "admiral", "adrian", "agenda", "agent", "airline", "airport", "alabama", "aladdin", "alarm", "algebra", "alibi", "alice", "alien", "almond", "alpine", "amber", "amigo", "ammonia", "analyze", "anatomy", "angel", "annual", "answer", "apple", "archive", "arctic", "arena", "arizona", "armada", "arnold", "arsenal", "arthur", "asia", "aspect", "athena", "audio", "august", "austria", "avenue", "average", "axiom", "aztec", "bagel", "baker", "balance", "ballad", "ballet", "bambino", "bamboo", "baron", "basic", "basket", "battery", "belgium", "benefit", "berlin", "bermuda", "bernard", "bicycle", "binary", "biology", "bishop", "blitz", "block", "blonde", "bonjour", "boris", "boston", "bottle", "boxer", "brandy", "bravo", "brazil", "bridge", "british", "bronze", "brown", "bruce", "bruno", "brush", "burger", "burma", "cabinet", "cactus", "cafe", "cairo", "calypso", "camel", "campus", "canal", "cannon", "canoe", "cantina", "canvas", "canyon", "capital", "caramel", "caravan", "career", "cargo", "carlo", "carol", "carpet", "cartel", "cartoon", "castle", "castro", "cecilia", "cement", "center", "century", "ceramic", "chamber", "chance", "change", "chaos", "charlie", "charm", "charter", "cheese", "chef", "chemist", "cherry", "chess", "chicago", "chicken", "chief", "china", "cigar", "circus", "city", "clara", "classic", "claudia", "clean", "client", "climax", "clinic", "clock", "club", "cockpit", "coconut", "cola", "collect", "colombo", "colony", "color", "combat", "comedy", "command", "company", "concert", "connect", "consul", "contact", "contour", "control", "convert", "copy", "corner", "corona", "correct", "cosmos", "couple", "courage", "cowboy", "craft", "crash", "cricket", "crown", "cuba", "dallas", "dance", "daniel", "decade", "decimal", "degree", "delete", "deliver", "delphi", "deluxe", "demand", "demo", "denmark", "derby", "design", "detect", "develop", "diagram", "diamond", "diana", "diego", "diesel", "diet", "digital", "dilemma", "direct", "disco", "disney", "distant", "dollar", "dolphin", "donald", "drink", "driver", "dublin", "duet", "dynamic", "earth", "east", "ecology", "economy", "edgar", "egypt", "elastic", "elegant", "element", "elite", "elvis", "email", "empty", "energy", "engine", "english", "episode", "equator", "escape", "escort", "ethnic", "europe", "everest", "evident", "exact", "example", "exit", "exotic", "export", "express", "factor", "falcon", "family", "fantasy", "fashion", "fiber", "fiction", "fidel", "fiesta", "figure", "film", "filter", "finance", "finish", "finland", "first", "flag", "flash", "florida", "flower", "fluid", "flute", "folio", "ford", "forest", "formal", "formula", "fortune", "forward", "fragile", "france", "frank", "fresh", "friend", "frozen", "future", "gabriel", "gamma", "garage", "garcia", "garden", "garlic", "gemini", "general", "genetic", "genius", "germany", "gloria", "gold", "golf", "gondola", "gong", "good", "gordon", "gorilla", "grand", "granite", "graph", "green", "group", "guide", "guitar", "guru", "hand", "happy", "harbor", "harvard", "havana", "hawaii", "helena", "hello", "henry", "hilton", "history", "horizon", "house", "human", "icon", "idea", "igloo", "igor", "image", "impact", "import", "india", "indigo", "input", "insect", "instant", "iris", "italian", "jacket", "jacob", "jaguar", "janet", "jargon", "jazz", "jeep", "john", "joker", "jordan", "judo", "jumbo", "june", "jungle", "junior", "jupiter", "karate", "karma", "kayak", "kermit", "king", "koala", "korea", "labor", "lady", "lagoon", "laptop", "laser", "latin", "lava", "lecture", "left", "legal", "level", "lexicon", "liberal", "libra", "lily", "limbo", "limit", "linda", "linear", "lion", "liquid", "little", "llama", "lobby", "lobster", "local", "logic", "logo", "lola", "london", "lucas", "lunar", "machine", "macro", "madam", "madonna", "madrid", "maestro", "magic", "magnet", "magnum", "mailbox", "major", "mama", "mambo", "manager", "manila", "marco", "marina", "market", "mars", "martin", "marvin", "mary", "master", "matrix", "maximum", "media", "medical", "mega", "melody", "memo", "mental", "mentor", "mercury", "message", "metal", "meteor", "method", "mexico", "miami", "micro", "milk", "million", "minimum", "minus", "minute", "miracle", "mirage", "miranda", "mister", "mixer", "mobile", "modem", "modern", "modular", "moment", "monaco", "monica", "monitor", "mono", "monster", "montana", "morgan", "motel", "motif", "motor", "mozart", "multi", "museum", "mustang", "natural", "neon", "nepal", "neptune", "nerve", "neutral", "nevada", "news", "next", "ninja", "nirvana", "normal", "nova", "novel", "nuclear", "numeric", "nylon", "oasis", "observe", "ocean", "octopus", "olivia", "olympic", "omega", "opera", "optic", "optimal", "orange", "orbit", "organic", "orient", "origin", "orlando", "oscar", "oxford", "oxygen", "ozone", "pablo", "pacific", "pagoda", "palace", "pamela", "panama", "pancake", "panda", "panel", "panic", "paradox", "pardon", "paris", "parker", "parking", "parody", "partner", "passage", "passive", "pasta", "pastel", "patent", "patient", "patriot", "patrol", "pegasus", "pelican", "penguin", "pepper", "percent", "perfect", "perfume", "period", "permit", "person", "peru", "phone", "photo", "picasso", "picnic", "picture", "pigment", "pilgrim", "pilot", "pixel", "pizza", "planet", "plasma", "plaza", "pocket", "poem", "poetic", "poker", "polaris", "police", "politic", "polo", "polygon", "pony", "popcorn", "popular", "postage", "precise", "prefix", "premium", "present", "price", "prince", "printer", "prism", "private", "prize", "product", "profile", "program", "project", "protect", "proton", "public", "pulse", "puma", "pump", "pyramid", "queen", "radar", "ralph", "random", "rapid", "rebel", "record", "recycle", "reflex", "reform", "regard", "regular", "relax", "reptile", "reverse", "ricardo", "right", "ringo", "risk", "ritual", "robert", "robot", "rocket", "rodeo", "romeo", "royal", "russian", "safari", "salad", "salami", "salmon", "salon", "salute", "samba", "sandra", "santana", "sardine", "school", "scoop", "scratch", "screen", "script", "scroll", "second", "secret", "section", "segment", "select", "seminar", "senator", "senior", "sensor", "serial", "service", "shadow", "sharp", "sheriff", "shock", "short", "shrink", "sierra", "silicon", "silk", "silver", "similar", "simon", "single", "siren", "slang", "slogan", "smart", "smoke", "snake", "social", "soda", "solar", "solid", "solo", "sonic", "source", "soviet", "special", "speed", "sphere", "spiral", "spirit", "spring", "static", "status", "stereo", "stone", "stop", "street", "strong", "student", "style", "sultan", "susan", "sushi", "suzuki", "switch", "symbol", "system", "tactic", "tahiti", "talent", "tarzan", "telex", "texas", "theory", "thermos", "tiger", "titanic", "tomato", "topic", "tornado", "toronto", "torpedo", "totem", "tractor", "traffic", "transit", "trapeze", "travel", "tribal", "trick", "trident", "trilogy", "tripod", "tropic", "trumpet", "tulip", "tuna", "turbo", "twist", "ultra", "uniform", "union", "uranium", "vacuum", "valid", "vampire", "vanilla", "vatican", "velvet", "ventura", "venus", "vertigo", "veteran", "victor", "vienna", "viking", "village", "vincent", "violet", "violin", "virtual", "virus", "vision", "visitor", "visual", "vitamin", "viva", "vocal", "vodka", "volcano", "voltage", "volume", "voyage", "water", "weekend", "welcome", "western", "window", "winter", "wizard", "wolf", "world", "xray", "yankee", "yoga", "yogurt", "yoyo", "zebra", "zero", "zigzag", "zipper", "zodiac", "zoom", "acid", "adios", "agatha", "alamo", "alert", "almanac", "aloha", "andrea", "anita", "arcade", "aurora", "avalon", "baby", "baggage", "balloon", "bank", "basil", "begin", "biscuit", "blue", "bombay", "botanic", "brain", "brenda", "brigade", "cable", "calibre", "carmen", "cello", "celtic", "chariot", "chrome", "citrus", "civil", "cloud", "combine", "common", "cool", "copper", "coral", "crater", "cubic", "cupid", "cycle", "depend", "door", "dream", "dynasty", "edison", "edition", "enigma", "equal", "eric", "event", "evita", "exodus", "extend", "famous", "farmer", "food", "fossil", "frog", "fruit", "geneva", "gentle", "george", "giant", "gilbert", "gossip", "gram", "greek", "grille", "hammer", "harvest", "hazard", "heaven", "herbert", "heroic", "hexagon", "husband", "immune", "inca", "inch", "initial", "isabel", "ivory", "jason", "jerome", "joel", "joshua", "journal", "judge", "juliet", "jump", "justice", "kimono", "kinetic", "leonid", "leopard", "lima", "maze", "medusa", "member", "memphis", "michael", "miguel", "milan", "mile", "miller", "mimic", "mimosa", "mission", "monkey", "moral", "moses", "mouse", "nancy", "natasha", "nebula", "nickel", "nina", "noise", "orchid", "oregano", "origami", "orinoco", "orion", "othello", "paper", "paprika", "prelude", "prepare", "pretend", "promise", "prosper", "provide", "puzzle", "remote", "repair", "reply", "rival", "riviera", "robin", "rose", "rover", "rudolf", "saga", "sahara", "scholar", "shelter", "ship", "shoe", "sigma", "sister", "sleep", "smile", "spain", "spark", "split", "spray", "square", "stadium", "star", "storm", "story", "strange", "stretch", "stuart", "subway", "sugar", "sulfur", "summer", "survive", "sweet", "swim", "table", "taboo", "target", "teacher", "telecom", "temple", "tibet", "ticket", "tina", "today", "toga", "tommy", "tower", "trivial", "tunnel", "turtle", "twin", "uncle", "unicorn", "unique", "update", "valery", "vega", "version", "voodoo", "warning", "william", "wonder", "year", "yellow", "young", "absent", "absorb", "absurd", "accent", "alfonso", "alias", "ambient", "anagram", "andy", "anvil", "appear", "apropos", "archer", "ariel", "armor", "arrow", "austin", "avatar", "axis", "baboon", "bahama", "bali", "balsa", "barcode", "bazooka", "beach", "beast", "beatles", "beauty", "before", "benny", "betty", "between", "beyond", "billy", "bison", "blast", "bless", "bogart", "bonanza", "book", "border", "brave", "bread", "break", "broken", "bucket", "buenos", "buffalo", "bundle", "button", "buzzer", "byte", "caesar", "camilla", "canary", "candid", "carrot", "cave", "chant", "child", "choice", "chris", "cipher", "clarion", "clark", "clever", "cliff", "clone", "conan", "conduct", "congo", "costume", "cotton", "cover", "crack", "current", "danube", "data", "decide", "deposit", "desire", "detail", "dexter", "dinner", "donor", "druid", "drum", "easy", "eddie", "enjoy", "enrico", "epoxy", "erosion", "except", "exile", "explain", "fame", "fast", "father", "felix", "field", "fiona", "fire", "fish", "flame", "flex", "flipper", "float", "flood", "floor", "forbid", "forever", "fractal", "frame", "freddie", "front", "fuel", "gallop", "game", "garbo", "gate", "gelatin", "gibson", "ginger", "giraffe", "gizmo", "glass", "goblin", "gopher", "grace", "gray", "gregory", "grid", "griffin", "ground", "guest", "gustav", "gyro", "hair", "halt", "harris", "heart", "heavy", "herman", "hippie", "hobby", "honey", "hope", "horse", "hostel", "hydro", "imitate", "info", "ingrid", "inside", "invent", "invest", "invite", "ivan", "james", "jester", "jimmy", "join", "joseph", "juice", "julius", "july", "kansas", "karl", "kevin", "kiwi", "ladder", "lake", "laura", "learn", "legacy", "legend", "lesson", "life", "light", "list", "locate", "lopez", "lorenzo", "love", "lunch", "malta", "mammal", "margin", "margo", "marion", "mask", "match", "mayday", "meaning", "mercy", "middle", "mike", "mirror", "modest", "morph", "morris", "mystic", "nadia", "nato", "navy", "needle", "neuron", "never", "newton", "nice", "night", "nissan", "nitro", "nixon", "north", "oberon", "octavia", "ohio", "olga", "open", "opus", "orca", "oval", "owner", "page", "paint", "palma", "parent", "parlor", "parole", "paul", "peace", "pearl", "perform", "phoenix", "phrase", "pierre", "pinball", "place", "plate", "plato", "plume", "pogo", "point", "polka", "poncho", "powder", "prague", "press", "presto", "pretty", "prime", "promo", "quest", "quick", "quiz", "quota", "race", "rachel", "raja", "ranger", "region", "remark", "rent", "reward", "rhino", "ribbon", "rider", "road", "rodent", "round", "rubber", "ruby", "rufus", "sabine", "saddle", "sailor", "saint", "salt", "scale", "scuba", "season", "secure", "shake", "shallow", "shannon", "shave", "shelf", "sherman", "shine", "shirt", "side", "sinatra", "sincere", "size", "slalom", "slow", "small", "snow", "sofia", "song", "sound", "south", "speech", "spell", "spend", "spoon", "stage", "stamp", "stand", "state", "stella", "stick", "sting", "stock", "store", "sunday", "sunset", "support", "supreme", "sweden", "swing", "tape", "tavern", "think", "thomas", "tictac", "time", "toast", "tobacco", "tonight", "torch", "torso", "touch", "toyota", "trade", "tribune", "trinity", "triton", "truck", "trust", "type", "under", "unit", "urban", "urgent", "user", "value", "vendor", "venice", "verona", "vibrate", "virgo", "visible", "vista", "vital", "voice", "vortex", "waiter", "watch", "wave", "weather", "wedding", "wheel", "whiskey", "wisdom", "android", "annex", "armani", "cake", "confide", "deal", "define", "dispute", "genuine", "idiom", "impress", "include", "ironic", "null", "nurse", "obscure", "prefer", "prodigy", "ego", "fax", "jet", "job", "rio", "ski", "yes" ]
diff --git a/users/Profpatsch/cas-serve/wordlist.sqlite b/users/Profpatsch/cas-serve/wordlist.sqlite
new file mode 100644
index 0000000000..5074474ba0
--- /dev/null
+++ b/users/Profpatsch/cas-serve/wordlist.sqlite
Binary files differdiff --git a/users/Profpatsch/cdb.nix b/users/Profpatsch/cdb.nix
new file mode 100644
index 0000000000..86e0a2d58f
--- /dev/null
+++ b/users/Profpatsch/cdb.nix
@@ -0,0 +1,93 @@
+{ depot, pkgs, ... }:
+
+let
+  cdbListToNetencode = depot.nix.writers.rustSimple
+    {
+      name = "cdb-list-to-netencode";
+      dependencies = [
+        depot.third_party.rust-crates.nom
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.users.Profpatsch.netencode.netencode-rs
+      ];
+    } ''
+    extern crate nom;
+    extern crate exec_helpers;
+    extern crate netencode;
+    use std::collections::HashMap;
+    use std::io::BufRead;
+    use nom::{IResult};
+    use nom::sequence::{tuple};
+    use nom::bytes::complete::{tag, take};
+    use nom::character::complete::{digit1, char};
+    use nom::error::{context, ErrorKind, ParseError};
+    use nom::combinator::{map_res};
+    use netencode::{T, Tag};
+
+    fn usize_t(s: &[u8]) -> IResult<&[u8], usize> {
+        context(
+            "usize",
+            map_res(
+                map_res(digit1, |n| std::str::from_utf8(n)),
+                |s| s.parse::<usize>())
+        )(s)
+    }
+
+    fn parse_cdb_record(s: &[u8]) -> IResult<&[u8], (&[u8], &[u8])> {
+        let (s, (_, klen, _, vlen, _)) = tuple((
+            char('+'),
+            usize_t,
+            char(','),
+            usize_t,
+            char(':')
+        ))(s)?;
+        let (s, (key, _, val)) = tuple((
+            take(klen),
+            tag("->"),
+            take(vlen),
+        ))(s)?;
+        Ok((s, (key, val)))
+    }
+
+    fn main() {
+        let mut res = vec![];
+        let stdin = std::io::stdin();
+        let mut lines = stdin.lock().split(b'\n');
+        loop {
+            match lines.next() {
+                None => exec_helpers::die_user_error("cdb-list-to-netencode", "stdin ended but we didn’t receive the empty line to signify the end of the cdbdump input!"),
+                Some(Err(err)) => exec_helpers::die_temporary("cdb-list-to-netencode", format!("could not read from stdin: {}", err)),
+                Some(Ok(line)) =>
+                    if &line == b"" {
+                        // the cdbdump input ends after an empty line (double \n)
+                        break;
+                    } else {
+                        match parse_cdb_record(&line) {
+                            Ok((b"", (key, val))) => {
+                                let (key, val) = match
+                                    std::str::from_utf8(key)
+                                    .and_then(|k| std::str::from_utf8(val).map(|v| (k, v))) {
+                                    Ok((key, val)) => (key.to_owned(), val.to_owned()),
+                                    Err(err) => exec_helpers::die_user_error("cdb-list-to-netencode", format!("cannot decode line {:?}, we only support utf8-encoded key/values pairs for now: {}", String::from_utf8_lossy(&line), err)),
+                                };
+                                let _ = res.push((key, val));
+                            },
+                            Ok((rest, _)) => exec_helpers::die_user_error("cdb-list-to-netencode", format!("could not decode record line {:?}, had some trailing bytes", String::from_utf8_lossy(&line))),
+                            Err(err) => exec_helpers::die_user_error("cdb-list-to-netencode", format!("could not decode record line {:?}: {:?}", String::from_utf8_lossy(&line), err)),
+                        }
+                    }
+            }
+        }
+        let list = T::List(res.into_iter().map(
+            |(k, v)| T::Record(vec![(String::from("key"), T::Text(k)), (String::from("val"), T::Text(v))].into_iter().collect())
+        ).collect());
+        netencode::encode(&mut std::io::stdout(), &list.to_u());
+    }
+
+  '';
+
+in
+{
+  inherit
+    cdbListToNetencode
+    ;
+}
diff --git a/users/Profpatsch/declib/.eslintrc.json b/users/Profpatsch/declib/.eslintrc.json
new file mode 100644
index 0000000000..9cffc711db
--- /dev/null
+++ b/users/Profpatsch/declib/.eslintrc.json
@@ -0,0 +1,14 @@
+{
+  "extends": ["eslint:recommended", "plugin:@typescript-eslint/strict-type-checked"],
+  "parser": "@typescript-eslint/parser",
+  "plugins": ["@typescript-eslint"],
+  "parserOptions": {
+    "project": true
+  },
+  "root": true,
+  "rules": {
+    "no-unused-vars": "warn",
+    "prefer-const": "warn",
+    "@typescript-eslint/no-unused-vars": "warn"
+  }
+}
diff --git a/users/Profpatsch/declib/.gitignore b/users/Profpatsch/declib/.gitignore
new file mode 100644
index 0000000000..8b56bf4ede
--- /dev/null
+++ b/users/Profpatsch/declib/.gitignore
@@ -0,0 +1,6 @@
+/node_modules/
+/.ninja/
+/output/
+
+# ignore for now
+/package.lock.json
diff --git a/users/Profpatsch/declib/.prettierrc b/users/Profpatsch/declib/.prettierrc
new file mode 100644
index 0000000000..7258fb81e0
--- /dev/null
+++ b/users/Profpatsch/declib/.prettierrc
@@ -0,0 +1,8 @@
+{
+  "trailingComma": "all",
+  "tabWidth": 2,
+  "semi": true,
+  "singleQuote": true,
+  "printWidth": 100,
+  "arrowParens": "avoid"
+}
diff --git a/users/Profpatsch/declib/README.md b/users/Profpatsch/declib/README.md
new file mode 100644
index 0000000000..11a8bf21a5
--- /dev/null
+++ b/users/Profpatsch/declib/README.md
@@ -0,0 +1,4 @@
+# Decentralized Library
+
+https://en.wikipedia.org/wiki/Distributed_library
+https://faculty.ist.psu.edu/jjansen/academic/pubs/ride98/ride98.html
diff --git a/users/Profpatsch/declib/build.ninja b/users/Profpatsch/declib/build.ninja
new file mode 100644
index 0000000000..f8844fc9be
--- /dev/null
+++ b/users/Profpatsch/declib/build.ninja
@@ -0,0 +1,16 @@
+
+builddir = .ninja
+
+outdir = ./output
+jsdir = $outdir/js
+
+rule tsc
+  command = node_modules/.bin/tsc
+
+build $outdir/index.js: tsc | index.ts tsconfig.json
+
+rule run
+  command = node $in
+
+build run: run $outdir/index.js
+  pool = console
diff --git a/users/Profpatsch/declib/index.ts b/users/Profpatsch/declib/index.ts
new file mode 100644
index 0000000000..c6a26f0922
--- /dev/null
+++ b/users/Profpatsch/declib/index.ts
@@ -0,0 +1,245 @@
+import generator, { MegalodonInterface } from 'megalodon';
+import { Account } from 'megalodon/lib/src/entities/account';
+import * as masto from 'megalodon/lib/src/entities/notification';
+import { Status } from 'megalodon/lib/src/entities/status';
+import * as rxjs from 'rxjs';
+import { Observable } from 'rxjs';
+import { NodeEventHandler } from 'rxjs/internal/observable/fromEvent';
+import * as sqlite from 'sqlite';
+import sqlite3 from 'sqlite3';
+import * as parse5 from 'parse5';
+import { mergeMap } from 'rxjs/operators';
+
+type Events =
+  | { type: 'connect'; event: [] }
+  | { type: 'update'; event: Status }
+  | { type: 'notification'; event: Notification }
+  | { type: 'delete'; event: number }
+  | { type: 'error'; event: Error }
+  | { type: 'heartbeat'; event: [] }
+  | { type: 'close'; event: [] }
+  | { type: 'parser-error'; event: Error };
+
+type Notification = masto.Notification & {
+  type: 'favourite' | 'reblog' | 'status' | 'mention' | 'poll' | 'update';
+  status: NonNullable<masto.Notification['status']>;
+  account: NonNullable<masto.Notification['account']>;
+};
+
+class Main {
+  private client: MegalodonInterface;
+  private socket: Observable<Events>;
+  private state!: State;
+  private config: {
+    databaseFile?: string;
+    baseServer: string;
+  };
+
+  private constructor() {
+    this.config = {
+      databaseFile: process.env['DECLIB_DATABASE_FILE'],
+      baseServer: process.env['DECLIB_MASTODON_SERVER'] ?? 'mastodon.xyz',
+    };
+    const ACCESS_TOKEN = process.env['DECLIB_MASTODON_ACCESS_TOKEN'];
+
+    if (!ACCESS_TOKEN) {
+      console.error('Please set DECLIB_MASTODON_ACCESS_TOKEN');
+      process.exit(1);
+    }
+    this.client = generator('mastodon', `https://${this.config.baseServer}`, ACCESS_TOKEN);
+    const websocket = this.client.publicSocket();
+    function mk<Name extends string, Type>(name: Name): Observable<{ type: Name; event: Type }> {
+      const wrap =
+        (h: NodeEventHandler) =>
+        (event: Type): void => {
+          h({ type: name, event });
+        };
+      return rxjs.fromEventPattern<{ type: Name; event: Type }>(
+        hdl => websocket.on(name, wrap(hdl)),
+        hdl => websocket.removeListener(name, wrap(hdl)),
+      );
+    }
+    this.socket = rxjs.merge(
+      mk<'connect', []>('connect'),
+      mk<'update', Status>('update'),
+      mk<'notification', Notification>('notification'),
+      mk<'delete', number>('delete'),
+      mk<'error', Error>('error'),
+      mk<'heartbeat', []>('heartbeat'),
+      mk<'close', []>('close'),
+      mk<'parser-error', Error>('parser-error'),
+    );
+  }
+
+  static async init(): Promise<Main> {
+    const self = new Main();
+    self.state = await State.init(self.config);
+    return self;
+  }
+
+  public main() {
+    // const res = await this.getAcc({ username: 'grindhold', server: 'chaos.social' });
+    // const res = await this.getAcc({ username: 'Profpatsch', server: 'mastodon.xyz' });
+    // const res = await this.getStatus('111862170899069698');
+    this.socket
+      .pipe(
+        mergeMap(async event => {
+          switch (event.type) {
+            case 'update': {
+              await this.state.addStatus(event.event);
+              console.log(`${event.event.account.acct}: ${event.event.content}`);
+              console.log(await this.state.databaseInternal.all(`SELECT * from status`));
+              break;
+            }
+            case 'notification': {
+              console.log(`NOTIFICATION (${event.event.type}):`);
+              console.log(event.event);
+              console.log(event.event.status.content);
+              const content = parseContent(event.event.status.content);
+              if (content) {
+                switch (content.command) {
+                  case 'addbook': {
+                    if (content.content[0]) {
+                      const book = {
+                        $owner: event.event.account.acct,
+                        $bookid: content.content[0],
+                      };
+                      console.log('adding book', book);
+                      await this.state.addBook(book);
+                      await this.client.postStatus(
+                        `@${event.event.account.acct} I have inserted book "${book.$bookid}" for you.`,
+                        {
+                          in_reply_to_id: event.event.status.id,
+                          visibility: 'direct',
+                        },
+                      );
+                    }
+                  }
+                }
+              }
+              break;
+            }
+            default: {
+              console.log(event);
+            }
+          }
+        }),
+      )
+      .subscribe();
+  }
+
+  private async getStatus(id: string): Promise<Status | null> {
+    return (await this.client.getStatus(id)).data;
+  }
+
+  private async getAcc(user: { username: string; server: string }): Promise<Account | null> {
+    const fullAccount = `${user.username}@${user.server}`;
+    const res = await this.client.searchAccount(fullAccount, {
+      limit: 10,
+    });
+    const accs = res.data.filter(acc =>
+      this.config.baseServer === user.server
+        ? (acc.acct = user.username)
+        : acc.acct === fullAccount,
+    );
+    return accs[0] ?? null;
+  }
+}
+
+type Interaction = {
+  originalStatus: { id: string };
+  lastStatus: { id: string };
+};
+
+class State {
+  db!: sqlite.Database;
+  private constructor() {}
+
+  static async init(config: { databaseFile?: string }): Promise<State> {
+    const s = new State();
+    s.db = await sqlite.open({
+      filename: config.databaseFile ?? ':memory:',
+      driver: sqlite3.Database,
+    });
+    await s.db.run('CREATE TABLE books (owner text, bookid text)');
+    await s.db.run('CREATE TABLE status (id text primary key, content json)');
+    return s;
+  }
+
+  async addBook(opts: { $owner: string; $bookid: string }) {
+    return await this.db.run('INSERT INTO books (owner, bookid) VALUES ($owner, $bookid)', opts);
+  }
+
+  async addStatus($status: Status) {
+    return await this.db.run(
+      `
+      INSERT INTO status (id, content) VALUES ($id, $status)
+      ON CONFLICT (id) DO UPDATE SET id = $id, content = $status
+      `,
+      {
+        $id: $status.id,
+        $status: JSON.stringify($status),
+      },
+    );
+  }
+
+  get databaseInternal() {
+    return this.db;
+  }
+}
+
+/** Parse the message; take the plain text, first line is the command any any successive lines are content */
+function parseContent(html: string): { command: string; content: string[] } | null {
+  const plain = contentToPlainText(html).split('\n');
+  if (plain[0]) {
+    return { command: plain[0].replace(' ', '').trim(), content: plain.slice(1) };
+  } else {
+    return null;
+  }
+}
+
+/** Convert the Html content to a plain text (best effort), keeping line breaks */
+function contentToPlainText(html: string): string {
+  const queue: parse5.DefaultTreeAdapterMap['childNode'][] = [];
+  queue.push(...parse5.parseFragment(html).childNodes);
+  let res = '';
+  let endOfP = false;
+  for (const el of queue) {
+    switch (el.nodeName) {
+      case '#text': {
+        res += (el as parse5.DefaultTreeAdapterMap['textNode']).value;
+        break;
+      }
+      case 'br': {
+        res += '\n';
+        break;
+      }
+      case 'p': {
+        if (endOfP) {
+          res += '\n';
+          endOfP = false;
+        }
+        queue.push(...el.childNodes);
+        endOfP = true;
+        break;
+      }
+      case 'span': {
+        break;
+      }
+      default: {
+        console.warn('unknown element in message: ', el);
+        break;
+      }
+    }
+  }
+  return res.trim();
+}
+
+Main.init().then(
+  m => {
+    m.main();
+  },
+  rej => {
+    throw rej;
+  },
+);
diff --git a/users/Profpatsch/declib/package.json b/users/Profpatsch/declib/package.json
new file mode 100644
index 0000000000..93176e8581
--- /dev/null
+++ b/users/Profpatsch/declib/package.json
@@ -0,0 +1,25 @@
+{
+  "name": "declib",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.ts",
+  "type": "commonjs",
+  "scripts": {
+    "run": "ninja run"
+  },
+  "author": "",
+  "license": "MIT",
+  "dependencies": {
+    "megalodon": "^9.2.2",
+    "parse5": "^7.1.2",
+    "rxjs": "^7.8.1",
+    "sqlite": "^5.1.1",
+    "sqlite3": "^5.1.7"
+  },
+  "devDependencies": {
+    "@typescript-eslint/eslint-plugin": "^6.21.0",
+    "@typescript-eslint/parser": "^6.21.0",
+    "eslint": "^8.56.0",
+    "typescript": "^5.3.3"
+  }
+}
diff --git a/users/Profpatsch/declib/tsconfig.json b/users/Profpatsch/declib/tsconfig.json
new file mode 100644
index 0000000000..b7f2f4c18b
--- /dev/null
+++ b/users/Profpatsch/declib/tsconfig.json
@@ -0,0 +1,25 @@
+{
+  "compilerOptions": {
+    "strict": true,
+    "module": "NodeNext",
+    "sourceMap": true,
+    "outDir": "output",
+    "target": "ES6",
+    "lib": [],
+    "typeRoots": ["node_modules/@types", "shims/@types"],
+    "moduleResolution": "NodeNext",
+
+    // importHelpers & downlevelIteration will reduce the generated javascript for new language features.
+    // `importHelpers` requires the `tslib` dependency.
+    // "downlevelIteration": true,
+    // "importHelpers": true
+    "noFallthroughCasesInSwitch": true,
+    "noImplicitOverride": true,
+    "noImplicitReturns": true,
+    "noPropertyAccessFromIndexSignature": true,
+    "noUncheckedIndexedAccess": true,
+
+  },
+
+  "files": ["index.ts"]
+}
diff --git a/users/Profpatsch/dhall/lib.dhall b/users/Profpatsch/dhall/lib.dhall
new file mode 100644
index 0000000000..fb97ba6070
--- /dev/null
+++ b/users/Profpatsch/dhall/lib.dhall
@@ -0,0 +1,84 @@
+let List/map
+    : ∀(a : Type) → ∀(b : Type) → (a → b) → List a → List b
+    = λ(a : Type) →
+      λ(b : Type) →
+      λ(f : a → b) →
+      λ(xs : List a) →
+        List/build
+          b
+          ( λ(list : Type) →
+            λ(cons : b → list → list) →
+              List/fold a xs list (λ(x : a) → cons (f x))
+          )
+
+let
+
+    --| Concatenate a `List` of `List`s into a single `List`
+    List/concat
+    : ∀(a : Type) → List (List a) → List a
+    = λ(a : Type) →
+      λ(xss : List (List a)) →
+        List/build
+          a
+          ( λ(list : Type) →
+            λ(cons : a → list → list) →
+            λ(nil : list) →
+              List/fold
+                (List a)
+                xss
+                list
+                (λ(xs : List a) → λ(ys : list) → List/fold a xs list cons ys)
+                nil
+          )
+
+let
+
+
+    -- Transform a list by applying a function to each element and flattening the results
+    List/concatMap
+    : ∀(a : Type) → ∀(b : Type) → (a → List b) → List a → List b
+    = λ(a : Type) →
+      λ(b : Type) →
+      λ(f : a → List b) →
+      λ(xs : List a) →
+        List/build
+          b
+          ( λ(list : Type) →
+            λ(cons : b → list → list) →
+              List/fold a xs list (λ(x : a) → List/fold b (f x) list cons)
+          )
+
+let Status = < Empty | NonEmpty : Text >
+
+let
+
+    {-|
+    Transform each value in a `List` to `Text` and then concatenate them with a
+    separator in between each value
+    -}
+    Text/concatMapSep
+    : ∀(separator : Text) → ∀(a : Type) → (a → Text) → List a → Text
+    = λ(separator : Text) →
+      λ(a : Type) →
+      λ(f : a → Text) →
+      λ(elements : List a) →
+        let status =
+              List/fold
+                a
+                elements
+                Status
+                ( λ(x : a) →
+                  λ(status : Status) →
+                    merge
+                      { Empty = Status.NonEmpty (f x)
+                      , NonEmpty =
+                          λ(result : Text) →
+                            Status.NonEmpty (f x ++ separator ++ result)
+                      }
+                      status
+                )
+                Status.Empty
+
+        in  merge { Empty = "", NonEmpty = λ(result : Text) → result } status
+
+in  { List/map, List/concat, List/concatMap, Text/concatMapSep }
diff --git a/users/Profpatsch/emacs-tree-sitter-move/README.md b/users/Profpatsch/emacs-tree-sitter-move/README.md
new file mode 100644
index 0000000000..ae8d763d61
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/README.md
@@ -0,0 +1,5 @@
+# emacs-tree-sitter-move
+
+An experiment in whether we can implement structural editing in emacs using the tree-sitter parser.
+
+What currently works: loading a tree-sitter gramma, navigating the AST left/right/up/down.
diff --git a/users/Profpatsch/emacs-tree-sitter-move/default.nix b/users/Profpatsch/emacs-tree-sitter-move/default.nix
new file mode 100644
index 0000000000..a9f259d96d
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/default.nix
@@ -0,0 +1,3 @@
+# nothing yet (TODO: expose shell & tool)
+{ ... }:
+{ }
diff --git a/users/Profpatsch/emacs-tree-sitter-move/shell.nix b/users/Profpatsch/emacs-tree-sitter-move/shell.nix
new file mode 100644
index 0000000000..f400d5c021
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/shell.nix
@@ -0,0 +1,17 @@
+{ pkgs ? import ../../../third_party { }, ... }:
+let
+  inherit (pkgs) lib;
+
+  treeSitterGrammars = pkgs.runCommandLocal "grammars" { } ''
+    mkdir -p $out/bin
+    ${lib.concatStringsSep "\n"
+      (lib.mapAttrsToList (name: src: "ln -s ${src}/parser $out/bin/${name}.so") pkgs.tree-sitter.builtGrammars)};
+  '';
+
+in
+pkgs.mkShell {
+  buildInputs = [
+    pkgs.tree-sitter.builtGrammars.python
+  ];
+  TREE_SITTER_GRAMMAR_DIR = treeSitterGrammars;
+}
diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.json b/users/Profpatsch/emacs-tree-sitter-move/test.json
new file mode 100644
index 0000000000..d9f8075976
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/test.json
@@ -0,0 +1,14 @@
+{
+    "foo": {
+        "x": [ 1, 2, 3, 4 ],
+        "bar": "test"
+    },
+    "foo": {
+        "x": [ 1, 2, 3, 4 ],
+        "bar": "test"
+    },
+    "foo": {
+        "x": [ 1, 2, 3, 4 ],
+        "bar": "test"
+    }
+}
diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.py b/users/Profpatsch/emacs-tree-sitter-move/test.py
new file mode 100644
index 0000000000..0f57bae035
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/test.py
@@ -0,0 +1,13 @@
+(4 + 5 + 5)
+
+def foo(a, b, c)
+
+def bar(a, b):
+    4
+    4
+    4
+
+[1, 4, 5, 10]
+
+def foo():
+    pass
diff --git a/users/Profpatsch/emacs-tree-sitter-move/test.sh b/users/Profpatsch/emacs-tree-sitter-move/test.sh
new file mode 100644
index 0000000000..681081f590
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/test.sh
@@ -0,0 +1,14 @@
+function foo () {
+    local x=123
+}
+
+function bar () {
+    local x=123
+}
+
+echo abc def \
+     gef gef
+
+printf \
+    "%s\n" \
+    haha
diff --git a/users/Profpatsch/emacs-tree-sitter-move/tmp.el b/users/Profpatsch/emacs-tree-sitter-move/tmp.el
new file mode 100644
index 0000000000..88d13fa45b
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/tmp.el
@@ -0,0 +1,28 @@
+(defun tree-sitter-load-from-grammar-dir (grammar-dir sym lang-name)
+  (tree-sitter-load
+   sym
+   (format "%s/bin/%s"
+           (getenv grammar-dir)
+           lang-name)))
+
+(defun tree-sitter-init-tmp-langs (alist)
+  (mapcar
+   (lambda (lang)
+     (pcase-let ((`(,name ,sym ,mode) lang))
+       (tree-sitter-load-from-grammar-dir "TREE_SITTER_GRAMMAR_DIR" sym name)
+       (cons mode sym)))
+   alist))
+
+
+(setq tree-sitter-major-mode-language-alist
+      (tree-sitter-init-tmp-langs
+       '(("python" python python-mode)
+         ("json" json js-mode)
+         ("bash" bash sh-mode)
+         )))
+
+(define-key evil-normal-state-map (kbd "C-.") #'tree-sitter-move-reset)
+(define-key evil-normal-state-map (kbd "C-<right>") #'tree-sitter-move-right)
+(define-key evil-normal-state-map (kbd "C-<left>") #'tree-sitter-move-left)
+(define-key evil-normal-state-map (kbd "C-<up>") #'tree-sitter-move-up)
+(define-key evil-normal-state-map (kbd "C-<down>") #'tree-sitter-move-down)
diff --git a/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el
new file mode 100644
index 0000000000..907e1e4081
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/tree-sitter-move.el
@@ -0,0 +1,139 @@
+;; this is not an actual cursor, just a node.
+;; It’s not super efficient, but cursors can’t be *set* to an arbitrary
+;; subnode, because they can’t access the parent otherwise.
+;; We’d need a way to reset the cursor and walk down to the node?!
+(defvar-local tree-sitter-move--cursor nil
+  "the buffer-local cursor used for movement")
+
+(defvar-local tree-sitter-move--debug-overlay nil
+  "an overlay used to visually display the region currently marked by the cursor")
+
+;;;;; TODO: should everything use named nodes? Only some things?
+;;;;; maybe there should be a pair of functions for everything?
+;;;;; For now restrict to named nodes.
+
+(defun tree-sitter-move--setup ()
+  ;; TODO
+  (progn
+    ;; TODO: if tree-sitter-mode fails to load, display a better error
+    (tree-sitter-mode t)
+    (setq tree-sitter-move--cursor (tsc-root-node tree-sitter-tree))
+    (add-variable-watcher
+     'tree-sitter-move--cursor
+     #'tree-sitter-move--debug-overlay-update)))
+
+(defun tree-sitter-move--debug-overlay-update (sym newval &rest _args)
+  "variable-watcher to update the debug overlay when the cursor changes"
+  (let ((start (tsc-node-start-position newval))
+        (end (tsc-node-end-position newval)))
+    (symbol-macrolet ((o tree-sitter-move--debug-overlay))
+      (if o
+          (move-overlay o start end)
+        (setq o (make-overlay start end))
+        (overlay-put o 'face 'highlight)
+        ))))
+
+(defun tree-sitter-move--debug-overlay-teardown ()
+  "Turn of the overlay visibility and delete the overlay object"
+  (when tree-sitter-move--debug-overlay
+    (delete-overlay tree-sitter-move--debug-overlay)
+    (setq tree-sitter-move--debug-overlay nil)))
+
+(defun tree-sitter-move--teardown ()
+  (setq tree-sitter-move--cursor nil)
+  (tree-sitter-move--debug-overlay-teardown)
+  (tree-sitter-mode nil))
+
+;; Get the syntax node the cursor is on.
+(defun tsc-get-named-node-at-point ()
+  (let ((p (point)))
+    (tsc-get-named-descendant-for-position-range
+     (tsc-root-node tree-sitter-tree) p p)))
+
+;; TODO: is this function necessary?
+;; Maybe tree-sitter always guarantees that parents are named?
+(defun tsc-get-named-parent (node)
+  (when-let ((parent (tsc-get-parent node)))
+    (while (and parent (not (tsc-node-named-p parent)))
+      (setq parent (tsc-get-parent parent)))
+    parent))
+
+(defun tsc-get-first-named-node-with-siblings-up (node)
+  "Returns the first 'upwards' node that has siblings. That includes the current
+  node, so if the given node has siblings, it is returned. Returns nil if there
+  is no such node until the root"
+  (when-let ((has-siblings-p
+              (lambda (parent-node)
+                (> (tsc-count-named-children parent-node)
+                   1)))
+             (cur node)
+             (parent (tsc-get-named-parent node)))
+    (while (and parent (not (funcall has-siblings-p parent)))
+      (setq cur parent)
+      (setq parent (tsc-get-named-parent cur)))
+    cur))
+
+(defun tree-sitter-move--set-cursor-to-node (node)
+  (setq tree-sitter-move--cursor node))
+
+(defun tree-sitter-move--set-cursor-to-node-at-point ()
+  (tree-sitter-move--set-cursor-to-node (tsc-get-named-node-at-point)))
+
+(defun tree-sitter-move--move-point-to-node (node)
+  (set-window-point
+    (selected-window)
+    (tsc-node-start-position node)))
+
+
+;; interactive commands (“do what I expect” section)
+
+(defun tree-sitter-move-reset ()
+  (interactive)
+  (tree-sitter-move--set-cursor-to-node-at-point))
+
+(defun tree-sitter-move-right ()
+  (interactive)
+  (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-next-named-sibling))
+
+(defun tree-sitter-move-left ()
+  (interactive)
+  (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-prev-named-sibling))
+
+(defun tree-sitter-move-up ()
+  (interactive)
+  (tree-sitter-move--move-skip-non-sibling-nodes 'tsc-get-parent))
+
+;; TODO: does not skip siblings yet, because the skip function only goes up (not down)
+(defun tree-sitter-move-down ()
+  (interactive)
+  (tree-sitter-move--move-if-possible (lambda (n) (tsc-get-nth-named-child n 0))))
+
+(defun tree-sitter-move--move-skip-non-sibling-nodes (move-fn)
+  "Moves to the sidewards next sibling. If the current node does not have siblings, go
+  upwards until something has siblings and then move to the side (right or left)."
+  (tree-sitter-move--move-if-possible
+   (lambda (cur)
+     (when-let ((with-siblings
+                 (tsc-get-first-named-node-with-siblings-up cur)))
+       (funcall move-fn with-siblings)))))
+
+(defun tree-sitter-move--move-if-possible (dir-fn)
+  (let ((next (funcall dir-fn tree-sitter-move--cursor)))
+    (when next
+      (tree-sitter-move--set-cursor-to-node next)
+      (tree-sitter-move--move-point-to-node next))))
+
+; mostly stolen from tree-sitter-mode
+;;;###autoload
+(define-minor-mode tree-sitter-move-mode
+  "Minor mode to do cursor movements via tree-sitter"
+  :init-value nil
+  :lighter " tree-sitter-move"
+  (if tree-sitter-move-mode
+      (tree-sitter--error-protect
+          (progn
+            (tree-sitter-move--setup))
+        (setq tree-sitter-move-mode nil)
+        (tree-sitter-move--teardown))
+    (lambda ())
+    (tree-sitter-move--teardown)))
diff --git a/users/Profpatsch/exactSource.nix b/users/Profpatsch/exactSource.nix
new file mode 100644
index 0000000000..5c713b5b1c
--- /dev/null
+++ b/users/Profpatsch/exactSource.nix
@@ -0,0 +1,90 @@
+{ ... }:
+# SPDX-License-Identifier: MIT
+# Created by Graham Christensen
+# version from https://github.com/grahamc/mayday/blob/c48f7583e622fe2e695a2a929de34679e5818816/exact-source.nix
+
+let
+  # Require that every path specified does exist.
+  #
+  # By default, Nix won't complain if you refer to a missing file
+  # if you don't actually use it:
+  #
+  #     nix-repl> ./bogus
+  #     /home/grahamc/playground/bogus
+  #
+  #     nix-repl> toString ./bogus
+  #     "/home/grahamc/playground/bogus"
+  #
+  # so in order for this interface to be *exact*, we must
+  # specifically require every provided path exists:
+  #
+  #     nix-repl> "${./bogus}"
+  #     error: getting attributes of path
+  #     '/home/grahamc/playground/bogus': No such file or
+  #     directory
+  requireAllPathsExist = paths:
+    let
+      validation = builtins.map (path: "${path}") paths;
+    in
+    builtins.deepSeq validation paths;
+
+  # Break down a given path in to a list of all of the path and
+  # its parent directories.
+  #
+  # `builtins.path` / `builtins.filterSource` will ask about
+  # a containing directory, and we must say YES otherwise it will
+  # not include anything below it.
+  #
+  # Concretely, convert: "/foo/baz/tux" in to:
+  #     [ "/foo/baz/tux" "/foo/baz" "/foo" ]
+  recursivelyPopDir = path:
+    if path == "/" then [ ]
+    else [ path ] ++ (recursivelyPopDir (builtins.dirOf path));
+
+  # Given a list of of strings, dedup the list and return a
+  # list of all unique strings.
+  #
+  # Note: only works on strings ;):
+  #
+  # First convert [ "foo" "foo" "bar" ] in to:
+  #     [
+  #       { name = "foo"; value = ""; }
+  #       { name = "foo"; value = ""; }
+  #       { name = "bar"; value = ""; }
+  #     ]
+  # then convert that to { "foo" = ""; "bar" = ""; }
+  # then get the attribute names, "foo" and "bar".
+  dedup = strings:
+    let
+      name_value_pairs = builtins.map
+        (string: { name = string; value = ""; })
+        strings;
+      attrset_of_strings = builtins.listToAttrs name_value_pairs;
+    in
+    builtins.attrNames attrset_of_strings;
+
+  exactSource = source_root: paths:
+    let
+      all_possible_paths =
+        let
+          # Convert all the paths in to relative paths on disk.
+          # ie: stringPaths will contain [ "/home/grahamc/playground/..." ];
+          # instead of /nix/store paths.
+          string_paths = builtins.map toString
+            (requireAllPathsExist paths);
+
+          all_paths_with_duplicates = builtins.concatMap
+            recursivelyPopDir
+            string_paths;
+        in
+        dedup all_paths_with_duplicates;
+
+      pathIsSpecified = path:
+        builtins.elem path all_possible_paths;
+    in
+    builtins.path {
+      path = source_root;
+      filter = (path: _type: pathIsSpecified path);
+    };
+in
+exactSource
diff --git a/users/Profpatsch/execline/ExecHelpers.hs b/users/Profpatsch/execline/ExecHelpers.hs
new file mode 100644
index 0000000000..438047b2b9
--- /dev/null
+++ b/users/Profpatsch/execline/ExecHelpers.hs
@@ -0,0 +1,48 @@
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TypeApplications #-}
+
+module ExecHelpers where
+
+import Data.String (IsString)
+import MyPrelude
+import qualified System.Exit as Sys
+
+newtype CurrentProgramName = CurrentProgramName { unCurrentProgramName :: Text }
+  deriving newtype (Show, Eq, Ord, IsString)
+
+-- | Exit 1 to signify a generic expected error
+-- (e.g. something that sometimes just goes wrong, like a nix build).
+dieExpectedError :: CurrentProgramName -> Text -> IO a
+dieExpectedError = dieWith 1
+
+-- | Exit 100 to signify a user error (“the user is holding it wrong”).
+--  This is a permanent error, if the program is executed the same way
+-- it should crash with 100 again.
+dieUserError :: CurrentProgramName -> Text -> IO a
+dieUserError = dieWith 100
+
+-- |  Exit 101 to signify an unexpected crash (failing assertion or panic).
+diePanic :: CurrentProgramName -> Text -> IO a
+diePanic = dieWith 101
+
+-- | Exit 111 to signify a temporary error (such as resource exhaustion)
+dieTemporary :: CurrentProgramName -> Text -> IO a
+dieTemporary = dieWith 111
+
+-- |  Exit 126 to signify an environment problem
+-- (the user has set up stuff incorrectly so the program cannot work)
+dieEnvironmentProblem :: CurrentProgramName -> Text -> IO a
+dieEnvironmentProblem = dieWith 126
+
+-- | Exit 127 to signify a missing executable.
+dieMissingExecutable :: CurrentProgramName -> Text -> IO a
+dieMissingExecutable = dieWith 127
+
+dieWith :: Natural -> CurrentProgramName -> Text -> IO a
+dieWith status currentProgramName msg = do
+  putStderrLn [fmt|{currentProgramName & unCurrentProgramName}: {msg}|]
+  Sys.exitWith
+    (Sys.ExitFailure (status & fromIntegral @Natural @Int))
diff --git a/users/Profpatsch/execline/default.nix b/users/Profpatsch/execline/default.nix
new file mode 100644
index 0000000000..04d07895c6
--- /dev/null
+++ b/users/Profpatsch/execline/default.nix
@@ -0,0 +1,70 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  exec-helpers-hs = pkgs.haskellPackages.mkDerivation {
+    pname = "exec-helpers";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./exec-helpers.cabal
+      ./ExecHelpers.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+  };
+
+  print-one-env = depot.nix.writers.rustSimple
+    {
+      name = "print-one-env";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
+    extern crate exec_helpers;
+    use std::os::unix::ffi::OsStrExt;
+    use std::io::Write;
+
+    fn main() {
+      let args = exec_helpers::args("print-one-env", 1);
+      let valname = std::ffi::OsStr::from_bytes(&args[0]);
+      match std::env::var_os(&valname) {
+        None => exec_helpers::die_user_error("print-one-env", format!("Env variable `{:?}` is not set", valname)),
+        Some(val) => std::io::stdout().write_all(&val.as_bytes()).unwrap()
+      }
+    }
+  '';
+
+  setsid = depot.nix.writers.rustSimple
+    {
+      name = "setsid";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.third_party.rust-crates.libc
+      ];
+    } ''
+    use std::os::unix::ffi::OsStrExt;
+    use std::ffi::OsStr;
+
+    fn main() {
+      let (args, prog) = exec_helpers::args_for_exec("setsid", 1);
+      let envvar = OsStr::from_bytes(&args.get(0).expect("first argument must be envvar name to set"));
+      let sid: i32 = unsafe { libc::setsid() };
+      std::env::set_var(envvar, format!("{}", sid));
+      let env: Vec<(&[u8], &[u8])> = vec![];
+      exec_helpers::exec_into_args("getid", prog, env);
+    }
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    exec-helpers-hs
+    print-one-env
+    setsid
+    ;
+}
diff --git a/users/Profpatsch/execline/exec-helpers.cabal b/users/Profpatsch/execline/exec-helpers.cabal
new file mode 100644
index 0000000000..b472ff6bd5
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers.cabal
@@ -0,0 +1,14 @@
+cabal-version:      3.0
+name:               exec-helpers
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+library
+    exposed-modules:          ExecHelpers
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/execline/exec-helpers/Cargo.lock b/users/Profpatsch/execline/exec-helpers/Cargo.lock
new file mode 100644
index 0000000000..1753cc949d
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/Cargo.lock
@@ -0,0 +1,7 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "exec_helpers"
+version = "0.1.0"
diff --git a/users/Profpatsch/execline/exec-helpers/Cargo.toml b/users/Profpatsch/execline/exec-helpers/Cargo.toml
new file mode 100644
index 0000000000..6642b66ee3
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "exec_helpers"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+name = "exec_helpers"
+path = "exec_helpers.rs"
diff --git a/users/Profpatsch/execline/exec-helpers/default.nix b/users/Profpatsch/execline/exec-helpers/default.nix
new file mode 100644
index 0000000000..5545d41d9d
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/default.nix
@@ -0,0 +1,6 @@
+{ depot, ... }:
+depot.nix.writers.rustSimpleLib
+{
+  name = "exec-helpers";
+}
+  (builtins.readFile ./exec_helpers.rs)
diff --git a/users/Profpatsch/execline/exec-helpers/exec_helpers.rs b/users/Profpatsch/execline/exec-helpers/exec_helpers.rs
new file mode 100644
index 0000000000..a57cbca353
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/exec_helpers.rs
@@ -0,0 +1,149 @@
+use std::ffi::OsStr;
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
+use std::os::unix::process::CommandExt;
+
+pub fn no_args(current_prog_name: &str) -> () {
+    let mut args = std::env::args_os();
+    // remove argv[0]
+    let _ = args.nth(0);
+    if args.len() > 0 {
+        die_user_error(
+            current_prog_name,
+            format!("Expected no arguments, got {:?}", args.collect::<Vec<_>>()),
+        )
+    }
+}
+
+pub fn args(current_prog_name: &str, no_of_positional_args: usize) -> Vec<Vec<u8>> {
+    let mut args = std::env::args_os();
+    // remove argv[0]
+    let _ = args.nth(0);
+    if args.len() != no_of_positional_args {
+        die_user_error(
+            current_prog_name,
+            format!(
+                "Expected {} arguments, got {}, namely {:?}",
+                no_of_positional_args,
+                args.len(),
+                args.collect::<Vec<_>>()
+            ),
+        )
+    }
+    args.map(|arg| arg.into_vec()).collect()
+}
+
+pub fn args_for_exec(
+    current_prog_name: &str,
+    no_of_positional_args: usize,
+) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
+    let mut args = std::env::args_os();
+    // remove argv[0]
+    let _ = args.nth(0);
+    let mut args = args.map(|arg| arg.into_vec());
+    let mut pos_args = vec![];
+    // get positional args
+    for i in 1..no_of_positional_args + 1 {
+        pos_args.push(args.nth(0).expect(&format!(
+            "{}: expects {} positional args, only got {}",
+            current_prog_name, no_of_positional_args, i
+        )));
+    }
+    // prog... is the rest of the iterator
+    let prog: Vec<Vec<u8>> = args.collect();
+    (pos_args, prog)
+}
+
+pub fn exec_into_args<'a, 'b, Args, Arg, Env, Key, Val>(
+    current_prog_name: &str,
+    args: Args,
+    env_additions: Env,
+) -> !
+where
+    Args: IntoIterator<Item = Arg>,
+    Arg: AsRef<[u8]>,
+    Env: IntoIterator<Item = (Key, Val)>,
+    Key: AsRef<[u8]>,
+    Val: AsRef<[u8]>,
+{
+    // TODO: is this possible without collecting into a Vec first, just leaving it an IntoIterator?
+    let args = args.into_iter().collect::<Vec<Arg>>();
+    let mut args = args.iter().map(|v| OsStr::from_bytes(v.as_ref()));
+    let prog = args.nth(0).expect(&format!(
+        "{}: first argument must be an executable",
+        current_prog_name
+    ));
+    // TODO: same here
+    let env = env_additions.into_iter().collect::<Vec<(Key, Val)>>();
+    let env = env
+        .iter()
+        .map(|(k, v)| (OsStr::from_bytes(k.as_ref()), OsStr::from_bytes(v.as_ref())));
+    let err = std::process::Command::new(prog).args(args).envs(env).exec();
+    die_missing_executable(
+        current_prog_name,
+        format!(
+            "exec failed: {}, while trying to execing into {:?}",
+            err, prog
+        ),
+    );
+}
+
+/// Exit 1 to signify a generic expected error
+/// (e.g. something that sometimes just goes wrong, like a nix build).
+pub fn die_expected_error<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(1, current_prog_name, msg)
+}
+
+/// Exit 100 to signify a user error (“the user is holding it wrong”).
+/// This is a permanent error, if the program is executed the same way
+/// it should crash with 100 again.
+pub fn die_user_error<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(100, current_prog_name, msg)
+}
+
+/// Exit 101 to signify an unexpected crash (failing assertion or panic).
+/// This is the same exit code that `panic!()` emits.
+pub fn die_panic<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(101, current_prog_name, msg)
+}
+
+/// Exit 111 to signify a temporary error (such as resource exhaustion)
+pub fn die_temporary<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(111, current_prog_name, msg)
+}
+
+/// Exit 126 to signify an environment problem
+/// (the user has set up stuff incorrectly so the program cannot work)
+pub fn die_environment_problem<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(126, current_prog_name, msg)
+}
+
+/// Exit 127 to signify a missing executable.
+pub fn die_missing_executable<S>(current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    die_with(127, current_prog_name, msg)
+}
+
+fn die_with<S>(status: i32, current_prog_name: &str, msg: S) -> !
+where
+    S: AsRef<str>,
+{
+    eprintln!("{}: {}", current_prog_name, msg.as_ref());
+    std::process::exit(status)
+}
diff --git a/users/Profpatsch/fafo.jpg b/users/Profpatsch/fafo.jpg
new file mode 100644
index 0000000000..78f11d208e
--- /dev/null
+++ b/users/Profpatsch/fafo.jpg
Binary files differdiff --git a/users/Profpatsch/git-db/default.nix b/users/Profpatsch/git-db/default.nix
new file mode 100644
index 0000000000..ad5d927677
--- /dev/null
+++ b/users/Profpatsch/git-db/default.nix
@@ -0,0 +1,10 @@
+{ depot, pkgs, lib, ... }:
+
+depot.nix.writers.rustSimple
+{
+  name = "git-db";
+  dependencies = [
+    depot.third_party.rust-crates.git2
+  ];
+}
+  (builtins.readFile ./git-db.rs)
diff --git a/users/Profpatsch/git-db/git-db.rs b/users/Profpatsch/git-db/git-db.rs
new file mode 100644
index 0000000000..c8019bf036
--- /dev/null
+++ b/users/Profpatsch/git-db/git-db.rs
@@ -0,0 +1,90 @@
+extern crate git2;
+use std::os::unix::ffi::OsStrExt;
+use std::path::PathBuf;
+
+const DEFAULT_BRANCH: &str = "refs/heads/main";
+
+fn main() {
+    let git_db_dir = std::env::var_os("GIT_DB_DIR").expect("set GIT_DB_DIR");
+    let git_db = PathBuf::from(git_db_dir).join("git");
+
+    std::fs::create_dir_all(&git_db).unwrap();
+
+    let repo = git2::Repository::init_opts(
+        &git_db,
+        git2::RepositoryInitOptions::new()
+            .bare(true)
+            .mkpath(true)
+            .description("git-db database")
+            .initial_head(DEFAULT_BRANCH),
+    )
+    .expect(&format!(
+        "unable to create or open bare git repo at {}",
+        &git_db.display()
+    ));
+
+    let mut index = repo.index().expect("cannot get the git index file");
+    eprintln!("{:#?}", index.version());
+    index.clear().expect("could not clean the index");
+
+    let now = std::time::SystemTime::now()
+        .duration_since(std::time::SystemTime::UNIX_EPOCH)
+        .expect("unable to get system time");
+
+    let now_git_time = git2::IndexTime::new(
+        now.as_secs() as i32, // lol
+        u32::from(now.subsec_nanos()),
+    );
+
+    let data = "hi, it’s me".as_bytes();
+
+    index
+        .add_frombuffer(
+            &git2::IndexEntry {
+            mtime: now_git_time,
+            ctime: now_git_time,
+            // don’t make sense
+            dev: 0,
+            ino: 0,
+            mode: /*libc::S_ISREG*/ 0b1000 << (3+9) | /* read write for owner */ 0o644,
+            uid: 0,
+            gid: 0,
+            file_size: data.len() as u32, // lol again
+            id: git2::Oid::zero(),
+            flags: 0,
+            flags_extended: 0,
+            path: "hi.txt".as_bytes().to_owned(),
+        },
+            data,
+        )
+        .expect("could not add data to index");
+
+    let oid = index.write_tree().expect("could not write index tree");
+
+    let to_add_tree = repo
+        .find_tree(oid)
+        .expect("we just created this tree, where did it go?");
+
+    let parent_commits = match repo.find_reference(DEFAULT_BRANCH) {
+        Ok(ref_) => vec![ref_.peel_to_commit().expect(&format!(
+            "reference {} does not point to a commit",
+            DEFAULT_BRANCH
+        ))],
+        Err(err) => match err.code() {
+            // no commit exists yet
+            git2::ErrorCode::NotFound => vec![],
+            _ => panic!("could not read latest commit from {}", DEFAULT_BRANCH),
+        },
+    };
+    repo.commit(
+        Some(DEFAULT_BRANCH),
+        &git2::Signature::now("Mr. Authorboy", "author@example.com").unwrap(),
+        &git2::Signature::now("Mr. Commiterboy", "committer@example.com").unwrap(),
+        "This is my first commit!\n\
+         \n\
+         I wonder if it supports extended commit descriptions?\n",
+        &to_add_tree,
+        &parent_commits.iter().collect::<Vec<_>>()[..],
+    )
+    .expect("could not commit the index we just wrote");
+}
diff --git a/users/Profpatsch/haskell-module-deps/README.md b/users/Profpatsch/haskell-module-deps/README.md
new file mode 100644
index 0000000000..b4f35beac5
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/README.md
@@ -0,0 +1,5 @@
+# haskell-module-deps
+
+An executable that when run in a project directory containing `.hs` files in `./src` will output a png/graph of how those modules import each other, transitively.
+
+Useful for getting an overview, finding weird import edges, figuring out how to get more compilation parallelism into your Haskell project.
diff --git a/users/Profpatsch/haskell-module-deps/default.nix b/users/Profpatsch/haskell-module-deps/default.nix
new file mode 100644
index 0000000000..71cc0a5b0d
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/default.nix
@@ -0,0 +1,55 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.zathura [ "zathura" ]
+    // depot.nix.getBins pkgs.haskellPackages.graphmod [ "graphmod" ]
+    // depot.nix.getBins pkgs.graphviz [ "dot" ]
+  ;
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the project directory as argument.
+  # Open in zathura.
+  haskell-module-deps = depot.nix.writeExecline "haskell-module-deps" { } [
+    "pipeline"
+    [ haskell-module-deps-with-filetype "pdf" "$@" ]
+    bins.zathura
+    "-"
+  ];
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the project directory as argument.
+  # Print a png to stdout.
+  haskell-module-deps-png = depot.nix.writeExecline "haskell-module-deps-png" { } [
+    haskell-module-deps-with-filetype
+    "png"
+    "$@"
+  ];
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the file type to generate as first argument
+  # and the project directory as second argument.
+  haskell-module-deps-with-filetype = pkgs.writers.writeBash "haskell-module-deps-with-filetype" ''
+    set -euo pipefail
+    shopt -s globstar
+    filetype="$1"
+    rootDir="$2"
+    ${bins.graphmod} \
+      ${/*silence warnings for missing external dependencies*/""} \
+      --quiet \
+      ${/*applies some kind of import simplification*/""} \
+      --prune-edges \
+      "$rootDir"/src/**/*.hs \
+      | ${bins.dot} \
+          ${/*otherwise it’s a bit cramped*/""} \
+          -Gsize="20,20!" \
+          -T"$filetype"
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    haskell-module-deps
+    haskell-module-deps-png
+    haskell-module-deps-with-filetype
+    ;
+}
diff --git a/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png b/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png
new file mode 100644
index 0000000000..53725c49e8
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png
Binary files differdiff --git a/users/Profpatsch/hie.yaml b/users/Profpatsch/hie.yaml
new file mode 100644
index 0000000000..1b5ae942ad
--- /dev/null
+++ b/users/Profpatsch/hie.yaml
@@ -0,0 +1,36 @@
+cradle:
+  cabal:
+    - path: "./my-prelude"
+      component: "lib:my-prelude"
+    - path: "./my-webstuff"
+      component: "lib:my-webstuff"
+    - path: "./netencode"
+      component: "lib:netencode"
+    - path: "./arglib"
+      component: "lib:arglib-netencode"
+    - path: "./execline"
+      component: "lib:exec-helpers"
+    - path: "./htmx-experiment/src"
+      component: "lib:htmx-experiment"
+    - path: "./htmx-experiment/Main.hs"
+      component: "htmx-experiment:exe:htmx-experiment"
+    - path: "./mailbox-org/src"
+      component: "lib:mailbox-org"
+    - path: "./mailbox-org/MailboxOrg.hs"
+      component: "mailbox-org:exe:mailbox-org"
+    - path: "./cas-serve/CasServe.hs"
+      component: "cas-serve:exe:cas-serve"
+    - path: "./jbovlaste-sqlite/JbovlasteSqlite.hs"
+      component: "jbovlaste-sqlite:exe:jbovlaste-sqlite"
+    - path: "./whatcd-resolver/src"
+      component: "lib:whatcd-resolver"
+    - path: "./whatcd-resolver/Main.hs"
+      component: "whatcd-resolver:exe:whatcd-resolver"
+    - path: "./openlab-tools/src"
+      component: "lib:openlab-tools"
+    - path: "./openlab-tools/Main.hs"
+      component: "openlab-tools:exe:openlab-tools"
+    - path: "./httzip/Httzip.hs"
+      component: "httzip:exe:httzip"
+    - path: "./my-xmonad/Xmonad.hs"
+      component: "my-xmonad:exe:xmonad"
diff --git a/users/Profpatsch/htmx-experiment/Main.hs b/users/Profpatsch/htmx-experiment/Main.hs
new file mode 100644
index 0000000000..29ce8610ff
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/Main.hs
@@ -0,0 +1,4 @@
+import HtmxExperiment qualified
+
+main :: IO ()
+main = HtmxExperiment.main
diff --git a/users/Profpatsch/htmx-experiment/default.nix b/users/Profpatsch/htmx-experiment/default.nix
new file mode 100644
index 0000000000..ef1a28bd2b
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/default.nix
@@ -0,0 +1,46 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  htmx-experiment = pkgs.haskellPackages.mkDerivation {
+    pname = "htmx-experiment";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./htmx-experiment.cabal
+      ./Main.hs
+      ./src/HtmxExperiment.hs
+      ./src/ServerErrors.hs
+      ./src/ValidationParseT.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.blaze-markup
+      pkgs.haskellPackages.bytestring
+      pkgs.haskellPackages.dlist
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.selective
+      pkgs.haskellPackages.text
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.warp
+
+    ];
+
+    isLibrary = false;
+    isExecutable = true;
+    license = lib.licenses.mit;
+  };
+
+
+in
+htmx-experiment
diff --git a/users/Profpatsch/htmx-experiment/htmx-experiment.cabal b/users/Profpatsch/htmx-experiment/htmx-experiment.cabal
new file mode 100644
index 0000000000..e9a0d93614
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/htmx-experiment.cabal
@@ -0,0 +1,89 @@
+cabal-version:      3.0
+name:               htmx-experiment
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+library
+    import: common-options
+    exposed-modules:
+      HtmxExperiment,
+      ServerErrors,
+      ValidationParseT
+    hs-source-dirs: ./src
+
+    build-depends:
+        base >=4.15 && <5,
+        -- http-api-data
+        blaze-html,
+        blaze-markup,
+        bytestring,
+        dlist,
+        http-types,
+        ihp-hsx,
+        monad-logger,
+        pa-error-tree,
+        pa-field-parser,
+        pa-label,
+        pa-prelude,
+        my-webstuff,
+        selective,
+        text,
+        unliftio,
+        wai,
+        warp
+
+
+executable htmx-experiment
+    import: common-options
+    main-is: Main.hs
+
+    build-depends:
+        htmx-experiment,
+        base >=4.15 && <5,
diff --git a/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs b/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs
new file mode 100644
index 0000000000..225206a584
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs
@@ -0,0 +1,377 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE QuasiQuotes #-}
+
+module HtmxExperiment where
+
+import Control.Category qualified as Cat
+import Control.Exception qualified as Exc
+import Control.Monad.Logger
+import Control.Selective (Selective (select))
+import Control.Selective qualified as Selective
+import Data.ByteString qualified as Bytes
+import Data.DList (DList)
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Maybe (maybeToList)
+import Data.Maybe qualified as Maybe
+import Data.Monoid qualified as Monoid
+import Data.Text qualified as Text
+import FieldParser hiding (nonEmpty)
+import GHC.TypeLits (KnownSymbol, symbolVal)
+import IHP.HSX.QQ (hsx)
+import Label
+import Multipart2 (FormValidation (FormValidation), FormValidationResult, MultipartParseT, failFormValidation)
+import Multipart2 qualified as Multipart
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import ServerErrors (ServerError (..), throwUserErrorTree)
+import Text.Blaze.Html5 (Html, docTypeHtml)
+import Text.Blaze.Renderer.Utf8 (renderMarkup)
+import UnliftIO (MonadUnliftIO (withRunInIO))
+import Prelude hiding (compare)
+
+-- data Routes
+--   = Root
+--   | Register
+--   | RegisterSubmit
+
+-- data Router url = Router
+--   { parse :: Routes.URLParser url,
+--     print :: url -> [Text]
+--   }
+
+-- routerPathInfo :: Routes.PathInfo a => Router a
+-- routerPathInfo =
+--   Router
+--     { parse = Routes.fromPathSegments,
+--       print = Routes.toPathSegments
+--     }
+
+-- subroute :: Text -> Router subUrl -> Router subUrl
+-- subroute path inner =
+--   Router
+--     { parse = Routes.segment path *> inner.parse,
+--       print = \url -> path : inner.print url
+--     }
+
+-- routerLeaf :: a -> Router a
+-- routerLeaf a =
+--   Router
+--     { parse = pure a,
+--       print = \_ -> []
+--     }
+
+-- routerToSite ::
+--   ((url -> [(Text, Maybe Text)] -> Text) -> url -> a) ->
+--   Router url ->
+--   Routes.Site url a
+-- routerToSite handler router =
+--   Routes.Site
+--     { handleSite = handler,
+--       formatPathSegments = (\x -> (x, [])) . router.print,
+--       parsePathSegments = Routes.parseSegments router.parse
+--     }
+
+-- handlers queryParams = \case
+--   Root -> "root"
+--   Register -> "register"
+--   RegisterSubmit -> "registersubmit"
+
+newtype Router handler from to = Router {unRouter :: from -> [Text] -> (Maybe handler, to)}
+  deriving
+    (Functor, Applicative)
+    via ( Compose
+            ((->) from)
+            ( Compose
+                ((->) [Text])
+                ((,) (Monoid.First handler))
+            )
+        )
+
+data Routes r handler = Routes
+  { users :: r (Label "register" handler)
+  }
+
+data Endpoint handler subroutes = Endpoint
+  { root :: handler,
+    subroutes :: subroutes
+  }
+  deriving stock (Show, Eq)
+
+data Handler = Handler {url :: Text}
+
+-- myRoute :: Router () from (Endpoint (Routes (Endpoint ()) Handler) b)
+-- myRoute =
+--   root $ do
+--     users <- fixed "users" () $ fixedFinal @"register" ()
+--     pure $ Routes {..}
+
+-- -- | the root and its children
+-- root :: routes from a -> routes from (Endpoint a b)
+-- root = todo
+
+-- | A fixed sub-route with children
+fixed :: Text -> handler -> Router handler from a -> Router handler from (Endpoint handler a)
+fixed route handler inner = Router $ \from -> \case
+  [final]
+    | route == final ->
+        ( Just handler,
+          Endpoint
+            { root = handler,
+              subroutes = (inner.unRouter from []) & snd
+            }
+        )
+  (this : more)
+    | route == this ->
+        ( (inner.unRouter from more) & fst,
+          Endpoint
+            { root = handler,
+              subroutes = (inner.unRouter from more) & snd
+            }
+        )
+  _ -> (Nothing, Endpoint {root = handler, subroutes = (inner.unRouter from []) & snd})
+
+-- integer ::
+--   forall routeName routes from a.
+--   Router (T2 routeName Integer "more" from) a ->
+--   Router from (Endpoint () a)
+-- integer inner = Router $ \case
+--   (path, []) ->
+--     runFieldParser Field.signedDecimal path
+--   (path, more) ->
+--     inner.unRouter more (runFieldParser Field.signedDecimal path)
+
+-- -- | A leaf route
+-- fixedFinal :: forall route handler from. (KnownSymbol route) => handler -> Router handler from (Label route Handler)
+-- fixedFinal handler = do
+--   let route = symbolText @route
+--   Rounter $ \from -> \case
+--     [final] | route == final -> (Just handler, label @route (Handler from))
+--     _ -> (Nothing, label @route handler)
+
+-- | Get the text of a symbol via TypeApplications
+symbolText :: forall sym. KnownSymbol sym => Text
+symbolText = do
+  symbolVal (Proxy :: Proxy sym)
+    & stringToText
+
+main :: IO ()
+main = runStderrLoggingT @IO $ do
+  withRunInIO @(LoggingT IO) $ \runInIO -> do
+    Warp.run 8080 $ \req respond -> catchServerError respond $ do
+      let respondOk res = Wai.responseLBS Http.ok200 [] (renderMarkup res)
+      let htmlRoot inner =
+            docTypeHtml
+              [hsx|
+            <head>
+              <script src="https://unpkg.com/htmx.org@1.9.2" integrity="sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h" crossorigin="anonymous"></script>
+            </head>
+            <body>
+              {inner}
+            </body>
+        |]
+      res <-
+        case req & Wai.pathInfo of
+          [] ->
+            pure $
+              respondOk $
+                htmlRoot
+                  [hsx|
+                      <div id="register_buttons">
+                        <button hx-get="/register" hx-target="body" hx-push-url="/register">Register an account</button>
+                        <button hx-get="/login" hx-target="body">Login</button>
+                      </div>
+              |]
+          ["register"] ->
+            pure $ respondOk $ fullEndpoint req $ \case
+              FullPage -> htmlRoot $ registerForm mempty
+              Snippet -> registerForm mempty
+          ["register", "submit"] -> do
+            FormValidation body <-
+              req
+                & parsePostBody
+                  registerFormValidate
+                & runInIO
+            case body of
+              -- if the parse succeeds, ignore any of the data
+              (_, Just a) -> pure $ respondOk $ htmlRoot [hsx|{a}|]
+              (errs, Nothing) -> pure $ respondOk $ htmlRoot $ registerForm errs
+          other ->
+            pure $ respondOk [hsx|no route here at {other}|]
+      respond $ res
+  where
+    catchServerError respond io =
+      Exc.catch io (\(ex :: ServerError) -> respond $ Wai.responseLBS ex.status [] ex.errBody)
+
+parsePostBody ::
+  (MonadIO m, MonadThrow m, MonadLogger m) =>
+  MultipartParseT backend m b ->
+  Wai.Request ->
+  m b
+parsePostBody parser req =
+  Multipart.parseMultipartOrThrow throwUserErrorTree parser req
+
+-- migrate :: IO (Label "numberOfRowsAffected" Natural)
+-- migrate =
+--   Init.runAppTest $ do
+--     runTransaction $
+--       execute
+--         [sql|
+--         CREATE TABLE IF NOT EXISTS experiments.users (
+--           id SERIAL PRIMARY KEY,
+--           email TEXT NOT NULL,
+--           registration_pending_token TEXT NULL
+--         )
+--         |]
+--         ()
+
+data HsxRequest
+  = Snippet
+  | FullPage
+
+fullEndpoint :: Wai.Request -> (HsxRequest -> t) -> t
+fullEndpoint req act = do
+  let isHxRequest = req & Wai.requestHeaders & List.find (\h -> (h & fst) == "HX-Request") & Maybe.isJust
+  if isHxRequest
+    then act Snippet
+    else act FullPage
+
+data FormField = FormField
+  { label_ :: Html,
+    required :: Bool,
+    id_ :: Text,
+    name :: ByteString,
+    type_ :: Text,
+    placeholder :: Maybe Text
+  }
+
+inputHtml ::
+  FormField ->
+  DList FormValidationResult ->
+  Html
+inputHtml (FormField {..}) validationResults = do
+  let validation =
+        validationResults
+          & toList
+          & mapMaybe
+            ( \v ->
+                if v.formFieldName == name
+                  then
+                    Just
+                      ( T2
+                          (label @"errors" (maybeToList v.hasError))
+                          (label @"originalValue" (Monoid.First (Just v.originalValue)))
+                      )
+                  else Nothing
+            )
+          & mconcat
+  let isFirstError =
+        validationResults
+          & List.find (\res -> Maybe.isJust res.hasError && res.formFieldName == name)
+          & Maybe.isJust
+  [hsx|
+      <label for={id_}>{label_}
+        <input
+          autofocus={isFirstError}
+          onfocus="this.select()"
+          required={required}
+          id={id_}
+          name={name}
+          type={type_}
+          placeholder={placeholder}
+          value={validation.originalValue.getFirst}
+        />
+        <p id="{id_}.validation">{validation.errors & nonEmpty <&> toList <&> map prettyError <&> Text.intercalate "; "}</p>
+      </label>
+  |]
+
+registerForm :: DList FormValidationResult -> Html
+registerForm validationErrors =
+  let fields =
+        mconcat
+          [ inputHtml $
+              FormField
+                { label_ = "Your Email:",
+                  required = True,
+                  id_ = "register_email",
+                  name = "email",
+                  type_ = "email",
+                  placeholder = Just "your@email.com"
+                },
+            inputHtml $
+              FormField
+                { label_ = "New password:",
+                  required = True,
+                  id_ = "register_password",
+                  name = "password",
+                  type_ = "password",
+                  placeholder = Just "hunter2"
+                },
+            inputHtml $
+              FormField
+                { label_ = "Repeated password:",
+                  required = True,
+                  id_ = "register_password_repeated",
+                  name = "password_repeated",
+                  type_ = "password",
+                  placeholder = Just "hunter2"
+                }
+          ]
+   in [hsx|
+  <form hx-post="/register/submit">
+    <fieldset>
+      <legend>Register user</legend>
+      {fields validationErrors}
+      <button id="register_submit_button" name="register">
+        Register
+      </button>
+    </fieldset>
+  </form>
+  |]
+
+registerFormValidate ::
+  Applicative m =>
+  MultipartParseT
+    w
+    m
+    (FormValidation (T2 "email" ByteString "password" ByteString))
+registerFormValidate = do
+  let emailFP = FieldParser $ \b ->
+        if
+            | Bytes.elem (charToWordUnsafe '@') b -> Right b
+            | otherwise -> Left [fmt|This is not an email address: "{b & bytesToTextUtf8Unsafe}"|]
+
+  getCompose @(MultipartParseT _ _) @FormValidation $ do
+    email <- Compose $ Multipart.fieldLabel' @"email" "email" emailFP
+    password <-
+      aEqB
+        "password_repeated"
+        "The two password fields must be the same"
+        (Compose $ Multipart.field' "password" Cat.id)
+        (\field -> Compose $ Multipart.field' field Cat.id)
+    pure $ T2 email (label @"password" password)
+  where
+    aEqB field validateErr fCompare fValidate =
+      Selective.fromMaybeS
+        -- TODO: this check only reached if the field itself is valid. Could we combine those errors?
+        (Compose $ pure $ failFormValidation (T2 (label @"formFieldName" field) (label @"originalValue" "")) validateErr)
+        $ do
+          compare <- fCompare
+          validate <- fValidate field
+          pure $ if compare == validate then Just validate else Nothing
+
+-- | A lifted version of 'Data.Maybe.fromMaybe'.
+fromMaybeS :: Selective f => f a -> f (Maybe a) -> f a
+fromMaybeS ifNothing fma =
+  select
+    ( fma <&> \case
+        Nothing -> Left ()
+        Just a -> Right a
+    )
+    ( do
+        a <- ifNothing
+        pure (\() -> a)
+    )
diff --git a/users/Profpatsch/htmx-experiment/src/ServerErrors.hs b/users/Profpatsch/htmx-experiment/src/ServerErrors.hs
new file mode 100644
index 0000000000..0fca7ab464
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/ServerErrors.hs
@@ -0,0 +1,244 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module ServerErrors where
+
+import Control.Exception (Exception)
+import Control.Monad.Logger (MonadLogger, logError, logWarn)
+import Data.ByteString.Lazy qualified as Bytes.Lazy
+import Data.Error.Tree
+import Network.HTTP.Types qualified as Http
+import PossehlAnalyticsPrelude
+
+data ServerError = ServerError
+  { status :: Http.Status,
+    errBody :: Bytes.Lazy.ByteString
+  }
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+emptyServerError :: Http.Status -> ServerError
+emptyServerError status = ServerError {status, errBody = ""}
+
+-- | Throw a user error.
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwUserError ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log & throw to the user
+  Error ->
+  m b
+throwUserError err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logWarn (err & errorContext "There was a “user holding it wrong” error, check the client code" & prettyError)
+  throwM
+    ServerError
+      { status = Http.badRequest400,
+        errBody = err & prettyError & textToBytesUtf8 & toLazyBytes
+      }
+
+-- | Throw a user error.
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwUserErrorTree ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log & throw to the user
+  ErrorTree ->
+  m b
+throwUserErrorTree err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logWarn (err & nestedError "There was a “user holding it wrong” error, check the client code" & prettyErrorTree)
+  throwM
+    ServerError
+      { status = Http.badRequest400,
+        errBody = err & prettyErrorTree & textToBytesUtf8 & toLazyBytes
+      }
+
+-- | Unwrap the `Either` and if `Left` throw a user error.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orUserError "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orUserError ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the error being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either Error a ->
+  m a
+orUserError outerMsg eErrA =
+  orUserErrorTree outerMsg (first singleError eErrA)
+
+-- | Unwrap the `Either` and if `Left` throw a user error. Will pretty-print the 'ErrorTree'
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orUserErrorTree "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orUserErrorTree ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the 'ErrorTree' being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either ErrorTree a ->
+  m a
+orUserErrorTree outerMsg = \case
+  Right a -> pure a
+  Left err -> do
+    -- TODO: this outer message should probably be added as a separate root instead of adding to the root error?
+    let tree = errorTreeContext outerMsg err
+    -- TODO: should we make this into a macro to keep the line numbers?
+    $logWarn (errorTreeContext "There was a “user holding it wrong” error, check the client code" tree & prettyErrorTree)
+    throwM
+      ServerError
+        { status = Http.badRequest400,
+          errBody = tree & prettyErrorTree & textToBytesUtf8 & toLazyBytes
+        }
+
+-- | Throw an internal error.
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwInternalError ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log internally
+  Error ->
+  m b
+throwInternalError err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logError
+    (err & prettyError)
+  throwM $ emptyServerError Http.internalServerError500
+
+-- | Throw an internal error.
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwInternalErrorTree ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log internally
+  ErrorTree ->
+  m b
+throwInternalErrorTree err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logError
+    (err & prettyErrorTree)
+  throwM $ emptyServerError   Http.internalServerError500
+
+-- | Unwrap the `Either` and if `Left` throw an internal error.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orInternalError "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orInternalError ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the error being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either Error a ->
+  m a
+orInternalError outerMsg eErrA = orInternalErrorTree outerMsg (first singleError eErrA)
+
+-- | Unwrap the `Either` and if `Left` throw an internal error. Will pretty-print the 'ErrorTree'.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orInternalErrorTree "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orInternalErrorTree ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the 'ErrorTree' being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either ErrorTree a ->
+  m a
+orInternalErrorTree outerMsg = \case
+  Right a -> pure a
+  Left err -> do
+    -- TODO: this outer message should probably be added as a separate root instead of adding to the root error?
+    let tree = errorTreeContext outerMsg err
+    -- TODO: should we make this into a macro to keep the line numbers?
+    $logError (tree & prettyErrorTree)
+    throwM $ emptyServerError   Http.internalServerError500
diff --git a/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs b/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs
new file mode 100644
index 0000000000..ffb6c2f395
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs
@@ -0,0 +1,40 @@
+module ValidationParseT where
+
+import Control.Monad.Logger (MonadLogger)
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Functor.Compose (Compose (..))
+import PossehlAnalyticsPrelude
+import ServerErrors
+
+-- | A simple way to create an Applicative parser that parses from some environment.
+--
+-- Use with DerivingVia. Grep codebase for examples.
+newtype ValidationParseT env m a = ValidationParseT {unValidationParseT :: env -> m (Validation (NonEmpty Error) a)}
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ((->) env)
+            (Compose m (Validation (NonEmpty Error)))
+        )
+
+-- | Helper that runs the given parser and throws a user error if the parsing failed.
+runValidationParseTOrUserError ::
+  forall validationParseT env m a.
+  ( Coercible validationParseT (ValidationParseT env m a),
+    MonadLogger m,
+    MonadThrow m
+  ) =>
+  -- | toplevel error message to throw if the parsing fails
+  Error ->
+  -- | The parser which should be run
+  validationParseT ->
+  -- | input to the parser
+  env ->
+  m a
+{-# INLINE runValidationParseTOrUserError #-}
+runValidationParseTOrUserError contextError parser env =
+  (coerce @_ @(ValidationParseT _ _ _) parser).unValidationParseT env
+    >>= \case
+      Failure errs -> throwUserErrorTree (errorTree contextError errs)
+      Success a -> pure a
diff --git a/users/Profpatsch/httzip/Httzip.hs b/users/Profpatsch/httzip/Httzip.hs
new file mode 100644
index 0000000000..761cd1d2ea
--- /dev/null
+++ b/users/Profpatsch/httzip/Httzip.hs
@@ -0,0 +1,66 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Main where
+
+import Conduit ((.|))
+import Data.Binary.Builder qualified as Builder
+import Data.Conduit qualified as Cond
+import Data.Conduit.Combinators qualified as Cond
+import Data.Conduit.Process.Typed qualified as Cond
+import Data.Conduit.Process.Typed qualified as Proc
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Conduit qualified as Wai.Conduit
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import System.Directory qualified as Dir
+import System.FilePath ((</>))
+import System.FilePath qualified as File
+import System.Posix qualified as Unix
+
+-- Webserver that returns folders under CWD as .zip archives (recursively)
+main :: IO ()
+main = do
+  currentDirectory <- Dir.getCurrentDirectory >>= Dir.canonicalizePath
+  run currentDirectory
+
+run :: FilePath -> IO ()
+run dir = do
+  currentDirectory <- Dir.canonicalizePath dir
+  putStderrLn $ [fmt|current {show currentDirectory}|]
+  Warp.run 7070 $ \req respond -> do
+    let respondHtml status content = respond $ Wai.responseLBS status [("Content-Type", "text/html")] content
+    case req & Wai.pathInfo of
+      [] -> respond $ Wai.responseLBS Http.status200 [("Content-Type", "text/html")] "any directory will be returned as .zip!"
+      filePath -> do
+        absoluteWantedFilepath <- Dir.canonicalizePath (currentDirectory </> (File.joinPath (filePath <&> textToString)))
+        -- I hope this prevents any shenanigans lol
+        let noCurrentDirPrefix = List.stripPrefix (File.addTrailingPathSeparator currentDirectory) absoluteWantedFilepath
+        if
+            | (any (Text.elem '/') filePath) -> putStderrLn "tried %2F encoding" >> respondHtml Http.status400 "no"
+            | Nothing <- noCurrentDirPrefix -> putStderrLn "tried parent dir with .." >> respondHtml Http.status400 "no^2"
+            | Just wantedFilePath <- noCurrentDirPrefix -> do
+                putStderrLn $ [fmt|wanted {show wantedFilePath}|]
+                ex <- Unix.fileExist wantedFilePath
+                if ex
+                  then do
+                    status <- Unix.getFileStatus wantedFilePath
+                    if status & Unix.isDirectory
+                      then do
+                        zipDir <- zipDirectory wantedFilePath
+                        Proc.withProcessWait zipDir $ \process -> do
+                          let stream =
+                                Proc.getStdout process
+                                  .| Cond.map (\bytes -> Cond.Chunk $ Builder.fromByteString bytes)
+                          -- TODO: how to handle broken zip? Is it just gonna return a 500? But the stream is already starting, so hard!
+                          respond $ Wai.Conduit.responseSource Http.ok200 [("Content-Type", "application/zip")] stream
+                      else respondHtml Http.status404 "not found"
+                  else respondHtml Http.status404 "not found"
+  where
+    zipDirectory toZipDir = do
+      putStderrLn [fmt|running $ zip {show ["--recurse-paths", "-", toZipDir]}|]
+      pure $
+        Proc.proc "zip" ["--recurse-paths", "-", toZipDir]
+          & Proc.setStdout Cond.createSource
diff --git a/users/Profpatsch/httzip/default.nix b/users/Profpatsch/httzip/default.nix
new file mode 100644
index 0000000000..35d8a69d56
--- /dev/null
+++ b/users/Profpatsch/httzip/default.nix
@@ -0,0 +1,40 @@
+{ depot, pkgs, lib, ... }:
+
+let
+
+  httzip = pkgs.haskellPackages.mkDerivation {
+    pname = "httzip";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./httzip.cabal
+      ./Httzip.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.wai-conduit
+      pkgs.haskellPackages.conduit-extra
+      pkgs.haskellPackages.conduit
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins httzip [ "httzip" ];
+
+in
+depot.nix.writeExecline "httzip-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  "${pkgs.zip}/bin:$${PATH}"
+  bins.httzip
+]
diff --git a/users/Profpatsch/httzip/httzip.cabal b/users/Profpatsch/httzip/httzip.cabal
new file mode 100644
index 0000000000..c463a6a5fe
--- /dev/null
+++ b/users/Profpatsch/httzip/httzip.cabal
@@ -0,0 +1,73 @@
+cabal-version:      3.0
+name:               httzip
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable httzip
+    import: common-options
+
+    main-is:          Httzip.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        bytestring,
+        text,
+        warp,
+        wai,
+        http-types,
+        directory,
+        filepath,
+        unix,
+        wai-conduit,
+        conduit,
+        conduit-extra,
+        binary
diff --git a/users/Profpatsch/ical-smolify/IcalSmolify.hs b/users/Profpatsch/ical-smolify/IcalSmolify.hs
new file mode 100644
index 0000000000..77264d1693
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/IcalSmolify.hs
@@ -0,0 +1,124 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import qualified Data.ByteString.Lazy as Bytes.Lazy
+import qualified Data.CaseInsensitive as CaseInsensitive
+import qualified Data.Default as Default
+import qualified Data.Map.Strict as Map
+import qualified Data.Set as Set
+import ExecHelpers (dieUserError, CurrentProgramName)
+import MyPrelude
+import qualified System.Environment as Env
+import Text.ICalendar
+import Prelude hiding (log)
+
+main :: IO ()
+main = do
+  Env.getArgs >>= \case
+    [] -> dieUserError progName "First argument must be the ics file name"
+    (file : _) ->
+      do
+        parse file
+        >>= traverse_
+          ( \vcal ->
+              vcal
+                & stripSingleTimezone
+                & minify
+                & printICalendar Default.def
+                & Bytes.Lazy.putStr
+          )
+
+progName :: CurrentProgramName
+progName = "ical-smolify"
+
+log :: Error -> IO ()
+log err = do
+  putStderrLn (errorContext "ical-smolify" err & prettyError)
+
+parse :: FilePath -> IO [VCalendar]
+parse file = do
+  parseICalendarFile Default.def file >>= \case
+    Left err -> do
+      dieUserError progName [fmt|Cannot parse ical file: {err}|]
+    Right (cals, warnings) -> do
+      for_ warnings (\warn -> log [fmt|Warning: {warn}|])
+      pure cals
+
+-- | Converts a single timezone definition to the corresponding X-WR-Timezone field.
+stripSingleTimezone :: VCalendar -> VCalendar
+stripSingleTimezone vcal =
+  case vcal & vcTimeZones & Map.toList of
+    [] -> vcal
+    [(_, tz)] -> do
+      let xtz =
+            OtherProperty
+              { otherName = CaseInsensitive.mk "X-WR-TIMEZONE",
+                otherValue = tz & vtzId & tzidValue & textToBytesUtf8Lazy,
+                otherParams = OtherParams Set.empty
+              }
+      vcal
+        { vcOther =
+            vcal & vcOther
+              -- remove any existing x-wr-timezone fields
+              & Set.filter (\prop -> (prop & otherName) /= (xtz & otherName))
+              & Set.insert xtz,
+          vcTimeZones = Map.empty
+        }
+    _more -> vcal
+
+-- | Minify the vcalendar event by throwing away everything that’s not an event.
+minify :: VCalendar -> VCalendar
+minify vcal =
+  vcal
+    { vcProdId = ProdId "" (OtherParams Set.empty),
+      -- , vcVersion    :: ICalVersion
+      -- , vcScale      :: Scale
+      -- , vcMethod     :: Maybe Method
+      -- , vcOther      :: …
+      -- , vcTimeZones  :: Map Text VTimeZone
+      vcEvents = Map.map minifyEvent (vcal & vcEvents),
+      vcTodos = Map.empty,
+      vcJournals = Map.empty,
+      vcFreeBusys = Map.empty,
+      vcOtherComps = Set.empty
+    }
+
+minifyEvent :: VEvent -> VEvent
+minifyEvent vev =
+  vev
+--  { veDTStamp       :: DTStamp
+--   , veUID           :: UID
+--   , veClass         :: Class -- ^ 'def' = 'Public'
+--   , veDTStart       :: Maybe DTStart
+--   , veCreated       :: Maybe Created
+--   , veDescription   :: Maybe Description
+--   , veGeo           :: Maybe Geo
+--   , veLastMod       :: Maybe LastModified
+--   , veLocation      :: Maybe Location
+--   , veOrganizer     :: Maybe Organizer
+--   , vePriority      :: Priority -- ^ 'def' = 0
+--   , veSeq           :: Sequence -- ^ 'def' = 0
+--   , veStatus        :: Maybe EventStatus
+--   , veSummary       :: Maybe Summary
+--   , veTransp        :: TimeTransparency -- ^ 'def' = 'Opaque'
+--   , veUrl           :: Maybe URL
+--   , veRecurId       :: Maybe RecurrenceId
+--   , veRRule         :: Set RRule
+--   , veDTEndDuration :: Maybe (Either DTEnd DurationProp)
+--   , veAttach        :: Set Attachment
+--   , veAttendee      :: Set Attendee
+--   , veCategories    :: Set Categories
+--   , veComment       :: Set Comment
+--   , veContact       :: Set Contact
+--   , veExDate        :: Set ExDate
+--   , veRStatus       :: Set RequestStatus
+--   , veRelated       :: Set RelatedTo
+--   , veResources     :: Set Resources
+--   , veRDate         :: Set RDate
+--   , veAlarms        :: Set VAlarm
+--   , veOther         :: Set OtherProperty
+--   }
diff --git a/users/Profpatsch/ical-smolify/README.md b/users/Profpatsch/ical-smolify/README.md
new file mode 100644
index 0000000000..86c166d3c1
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/README.md
@@ -0,0 +1,5 @@
+# ical-smolify
+
+Ensmallen an `ical` by stripping out redundant information like timezone definitions.
+
+The idea here was that after running through this preprocessor, it fits into a QR code (~2000bits) that can be scanned with your phone (for automatically adding to mobile calendar).
diff --git a/users/Profpatsch/ical-smolify/default.nix b/users/Profpatsch/ical-smolify/default.nix
new file mode 100644
index 0000000000..bf766db0e9
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/default.nix
@@ -0,0 +1,23 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  ical-smolify = pkgs.writers.writeHaskell "ical-smolify"
+    {
+      libraries = [
+        pkgs.haskellPackages.iCalendar
+        depot.users.Profpatsch.my-prelude
+        depot.users.Profpatsch.execline.exec-helpers-hs
+
+      ];
+      ghcArgs = [ "-threaded" ];
+    } ./IcalSmolify.hs;
+
+in
+
+ical-smolify.overrideAttrs (old: {
+  meta = lib.recursiveUpdate old.meta or { } {
+    # Dependency iCalendar no longer builds in nixpkgs due to a lack of maintenance upstream
+    # https://github.com/nixos/nixpkgs/commit/13d10cc6e302e7d5800c6a08c1728b14c3801e26
+    ci.skip = true;
+  };
+})
diff --git a/users/Profpatsch/ical-smolify/ical-smolify.cabal b/users/Profpatsch/ical-smolify/ical-smolify.cabal
new file mode 100644
index 0000000000..d7a46c581d
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/ical-smolify.cabal
@@ -0,0 +1,18 @@
+cabal-version:      3.0
+name:               ical-smolify
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+executable ical-smolify
+    main-is: IcalSmolify.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude,
+        exec-helpers
+        data-default
+        case-insensitive
+        iCalendar
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/imap-idle.nix b/users/Profpatsch/imap-idle.nix
new file mode 100644
index 0000000000..84af5d0e54
--- /dev/null
+++ b/users/Profpatsch/imap-idle.nix
@@ -0,0 +1,17 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  imap-idle = depot.nix.writers.rustSimple
+    {
+      name = "imap-idle";
+      dependencies = [
+        depot.users.Profpatsch.arglib.netencode.rust
+        depot.third_party.rust-crates.imap
+        depot.third_party.rust-crates.epoll
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    }
+    (builtins.readFile ./imap-idle.rs);
+
+in
+imap-idle
diff --git a/users/Profpatsch/imap-idle.rs b/users/Profpatsch/imap-idle.rs
new file mode 100644
index 0000000000..937847b879
--- /dev/null
+++ b/users/Profpatsch/imap-idle.rs
@@ -0,0 +1,140 @@
+extern crate exec_helpers;
+// extern crate arglib_netencode;
+// extern crate netencode;
+extern crate epoll;
+extern crate imap;
+
+// use netencode::dec;
+use imap::extensions::idle::SetReadTimeout;
+use std::convert::TryFrom;
+use std::fs::File;
+use std::io::{Read, Write};
+use std::os::unix::io::{AsRawFd, FromRawFd, RawFd};
+use std::time::Duration;
+
+/// Implements an UCSPI client that wraps fd 6 & 7
+/// and implements Write and Read with a timeout.
+/// See https://cr.yp.to/proto/ucspi.txt
+#[derive(Debug)]
+struct UcspiClient {
+    read: File,
+    read_epoll_fd: RawFd,
+    read_timeout: Option<Duration>,
+    write: File,
+}
+
+impl UcspiClient {
+    /// Use fd 6 and 7 to connect to the net, as is specified.
+    /// Unsafe because fd 6 and 7 are global resources and we don’t mutex them.
+    pub unsafe fn new_from_6_and_7() -> std::io::Result<Self> {
+        unsafe {
+            let read_epoll_fd = epoll::create(false)?;
+            Ok(UcspiClient {
+                read: File::from_raw_fd(6),
+                read_epoll_fd,
+                read_timeout: None,
+                write: File::from_raw_fd(7),
+            })
+        }
+    }
+}
+
+/// Emulates set_read_timeout() like on a TCP socket with an epoll on read.
+/// The BSD socket API is rather bad, so fd != fd,
+/// and if we cast the `UcspiClient` fds to `TcpStream` instead of `File`,
+/// we’d break any UCSPI client programs that *don’t* connect to TCP.
+/// Instead we use the (linux) `epoll` API in read to wait on the timeout.
+impl SetReadTimeout for UcspiClient {
+    fn set_read_timeout(&mut self, timeout: Option<Duration>) -> imap::Result<()> {
+        self.read_timeout = timeout;
+        Ok(())
+    }
+}
+
+impl Read for UcspiClient {
+    // TODO: test the epoll code with a short timeout
+    fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
+        const NO_DATA: u64 = 0;
+        // in order to implement the read_timeout,
+        // we use epoll to wait for either data or time out
+        epoll::ctl(
+            self.read_epoll_fd,
+            epoll::ControlOptions::EPOLL_CTL_ADD,
+            self.read.as_raw_fd(),
+            epoll::Event::new(epoll::Events::EPOLLIN, NO_DATA),
+        )?;
+        let UNUSED = epoll::Event::new(epoll::Events::EPOLLIN, NO_DATA);
+        let wait = epoll::wait(
+            self.read_epoll_fd,
+            match self.read_timeout {
+                Some(duration) => {
+                    i32::try_from(duration.as_millis()).expect("duration too big for epoll")
+                }
+                None => -1, // infinite
+            },
+            // event that was generated; but we don’t care
+            &mut vec![UNUSED; 1][..],
+        );
+        // Delete the listen fd from the epoll fd before reacting
+        // (otherwise it fails on the next read with `EPOLL_CTL_ADD`)
+        epoll::ctl(
+            self.read_epoll_fd,
+            epoll::ControlOptions::EPOLL_CTL_DEL,
+            self.read.as_raw_fd(),
+            UNUSED,
+        )?;
+        match wait {
+            // timeout happened (0 events)
+            Ok(0) => Err(std::io::Error::new(
+                std::io::ErrorKind::TimedOut,
+                "ucspi read timeout",
+            )),
+            // its ready for reading, we can read
+            Ok(_) => self.read.read(buf),
+            // error
+            err => err,
+        }
+    }
+}
+
+/// Just proxy through the `Write` of the write fd.
+impl Write for UcspiClient {
+    fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
+        self.write.write(buf)
+    }
+    fn flush(&mut self) -> std::io::Result<()> {
+        self.write.flush()
+    }
+}
+
+/// Connect to IMAP account and listen for new mails on the INBOX.
+fn main() {
+    exec_helpers::no_args("imap-idle");
+
+    // TODO: use arglib_netencode
+    let username = std::env::var("IMAP_USERNAME").expect("username");
+    let password = std::env::var("IMAP_PASSWORD").expect("password");
+
+    let net = unsafe { UcspiClient::new_from_6_and_7().expect("no ucspi client for you") };
+    let client = imap::Client::new(net);
+    let mut session = client
+        .login(username, password)
+        .map_err(|(err, _)| err)
+        .expect("unable to login");
+    eprintln!("{:#?}", session);
+    let list = session.list(None, Some("*"));
+    eprintln!("{:#?}", list);
+    let mailbox = session.examine("INBOX");
+    eprintln!("{:#?}", mailbox);
+    fn now() -> String {
+        String::from_utf8_lossy(&std::process::Command::new("date").output().unwrap().stdout)
+            .trim_right()
+            .to_string()
+    }
+    loop {
+        eprintln!("{}: idling on INBOX", now());
+        let mut handle = session.idle().expect("cannot idle on INBOX");
+        let () = handle.wait_keepalive().expect("waiting on idle failed");
+        eprintln!("{}: The mailbox has changed!", now());
+    }
+}
diff --git a/users/Profpatsch/importDhall.nix b/users/Profpatsch/importDhall.nix
new file mode 100644
index 0000000000..1947ad1ce1
--- /dev/null
+++ b/users/Profpatsch/importDhall.nix
@@ -0,0 +1,93 @@
+{ pkgs, depot, lib, ... }:
+let
+
+  # import the dhall file as nix expression via dhall-nix.
+  # Converts the normalized dhall expression to a nix file,
+  # puts it in the store and imports it.
+  # Types are erased, functions are converted to nix functions,
+  # unions values are nix functions that take a record of match
+  # functions for their alternatives.
+  # TODO: document better
+  importDhall =
+    {
+      # Root path of the dhall file tree to import (will be filtered by files)
+      root
+    , # A list of files which should be taken from `root` (relative paths).
+      # This is for minimizing the amount of things that have to be copied to the store.
+      # TODO: can you have directory prefixes?
+      files
+    , # The path of the dhall file which should be evaluated, relative to `root`, has to be in `files`
+      main
+    , # List of dependencies (TODO: what is a dependency?)
+      deps
+    , # dhall type of `main`, or `null` if anything should be possible.
+      type ? null
+    }:
+    let
+      absRoot = path: toString root + "/" + path;
+      src =
+        depot.users.Profpatsch.exactSource
+          root
+          # exactSource wants nix paths, but I think relative paths
+          # as strings are more intuitive.
+          ([ (absRoot main) ] ++ (map absRoot files));
+
+      cache = ".cache";
+      cacheDhall = "${cache}/dhall";
+
+      hadTypeAnnot = type != null;
+      typeAnnot = lib.optionalString hadTypeAnnot ": ${type}";
+
+      convert = pkgs.runCommandLocal "dhall-to-nix" { inherit deps; } ''
+        mkdir -p ${cacheDhall}
+        for dep in $deps; do
+          ${pkgs.xorg.lndir}/bin/lndir -silent $dep/${cacheDhall} ${cacheDhall}
+        done
+
+        export XDG_CACHE_HOME=$(pwd)/${cache}
+        # go into the source directory, so that the type can import files.
+        # TODO: This is a bit of a hack hrm.
+        cd "${src}"
+        printf 'Generating dhall nix code. Run
+        %s --file %s
+        to reproduce
+        ' \
+          ${pkgs.dhall}/bin/dhall \
+          ${absRoot main}
+        ${if hadTypeAnnot then ''
+            printf '%s' ${lib.escapeShellArg "${src}/${main} ${typeAnnot}"} \
+              | ${pkgs.dhall-nix}/bin/dhall-to-nix \
+              > $out
+          ''
+          else ''
+            printf 'No type annotation given, the dhall expression type was:\n'
+            ${pkgs.dhall}/bin/dhall type --file "${src}/${main}"
+            printf '%s' ${lib.escapeShellArg "${src}/${main}"} \
+              | ${pkgs.dhall-nix}/bin/dhall-to-nix \
+              > $out
+          ''}
+
+      '';
+    in
+    import convert;
+
+
+  # read dhall file in as JSON, then import as nix expression.
+  # The dhall file must not try to import from non-local URLs!
+  readDhallFileAsJson = dhallType: file:
+    let
+      convert = pkgs.runCommandLocal "dhall-to-json" { } ''
+        printf '%s' ${lib.escapeShellArg "${file} : ${dhallType}"} \
+          | ${pkgs.dhall-json}/bin/dhall-to-json \
+          > $out
+      '';
+    in
+    builtins.fromJSON (builtins.readFile convert);
+
+in
+{
+  inherit
+    importDhall
+    readDhallFileAsJson
+    ;
+}
diff --git a/users/Profpatsch/ini/default.nix b/users/Profpatsch/ini/default.nix
new file mode 100644
index 0000000000..e1a7a1a7b6
--- /dev/null
+++ b/users/Profpatsch/ini/default.nix
@@ -0,0 +1,6 @@
+{ depot, ... }:
+{
+  externs = {
+    renderIni = depot.users.Profpatsch.toINI { };
+  };
+}
diff --git a/users/Profpatsch/ini/ini.dhall b/users/Profpatsch/ini/ini.dhall
new file mode 100644
index 0000000000..f2efbc0af4
--- /dev/null
+++ b/users/Profpatsch/ini/ini.dhall
@@ -0,0 +1,36 @@
+let lib = ../dhall/lib.dhall
+
+let NameVal = λ(T : Type) → { name : Text, value : T }
+
+let ValueList = λ(T : Type) → List (NameVal T)
+
+let Section = ValueList Text
+
+let Sections = ValueList Section
+
+let Ini = { globalSection : Section, sections : Sections }
+
+let
+    -- Takes to INI files and merges their global sections and their section lists,
+    -- without duplicating by section name.
+    appendInis =
+      λ(inis : List Ini) →
+          { globalSection =
+              lib.List/concat
+                (NameVal Text)
+                (lib.List/map Ini Section (λ(i : Ini) → i.globalSection) inis)
+          , sections =
+              lib.List/concat
+                (NameVal Section)
+                (lib.List/map Ini Sections (λ(i : Ini) → i.sections) inis)
+          }
+        : Ini
+
+let
+    -- Signatures of functions that are input via FFI.
+    Externs =
+      { -- given a dsl of functions to create an Ini, render the ini file
+        renderIni : Ini → Text
+      }
+
+in  { NameVal, ValueList, Section, Sections, Ini, appendInis, Externs }
diff --git a/users/Profpatsch/jaeger.nix b/users/Profpatsch/jaeger.nix
new file mode 100644
index 0000000000..374e40df1a
--- /dev/null
+++ b/users/Profpatsch/jaeger.nix
@@ -0,0 +1,46 @@
+{ depot, pkgs, ... }:
+let
+  drv =
+    pkgs.stdenv.mkDerivation {
+      pname = "jaeger";
+      version = "1.49.0";
+      src = pkgs.fetchurl {
+        url = "https://github.com/jaegertracing/jaeger/releases/download/v1.49.0/jaeger-1.49.0-linux-amd64.tar.gz";
+        hash = "sha256-QhxISDlk/t431EesgVkHWTe7yiw2B+yyfq//GLP0As4=";
+      };
+      phases = [ "unpackPhase" "installPhase" "fixupPhase" ];
+      installPhase = ''
+        mkdir -p $out/bin
+        install ./jaeger-all-in-one $out/bin
+      '';
+    };
+  image =
+    pkgs.dockerTools.buildImage {
+      name = "jaeger";
+      tag = "1.49.0";
+      copyToRoot = drv;
+      config = {
+        Cmd = [ "/bin/jaeger-all-in-one" ];
+      };
+
+    };
+
+  runner =
+    depot.nix.writeExecline "jaeger-docker-run" { } [
+      "if"
+      [ "docker" "load" "-i" image ]
+      "docker"
+      "run"
+      "--rm"
+      "--name"
+      "jaeger"
+      # Web UI
+      "-p"
+      "16686:16686"
+      # Opentelemetry
+      "-p"
+      "4318:4318"
+      "jaeger:1.49.0"
+    ];
+in
+runner
diff --git a/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs b/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs
new file mode 100644
index 0000000000..8dae9cd026
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs
@@ -0,0 +1,389 @@
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Main where
+
+import Conduit ((.|))
+import Conduit qualified as Cond
+import Control.Category qualified as Cat
+import Control.Foldl qualified as Fold
+import Data.ByteString.Internal qualified as Bytes
+import Data.Error.Tree
+import Data.Int (Int64)
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Maybe (catMaybes)
+import Data.Text qualified as Text
+import Data.Text.IO qualified as Text
+import Database.SQLite.Simple qualified as Sqlite
+import Database.SQLite.Simple.FromField qualified as Sqlite
+import Database.SQLite.Simple.QQ qualified as Sqlite
+import FieldParser qualified as Field
+import Label
+import Parse
+import PossehlAnalyticsPrelude
+import Text.XML (def)
+import Text.XML qualified as Xml
+import Prelude hiding (init, maybe)
+
+main :: IO ()
+main = do
+  f <- file
+  f.documentRoot
+    & filterDown
+    & toTree
+    & prettyErrorTree
+    & Text.putStrLn
+
+test :: IO ()
+test = do
+  withEnv $ \env -> do
+    migrate env
+    f <- file
+    parseJbovlasteXml f
+      & \case
+        Left errs -> Text.putStrLn $ prettyErrorTree errs
+        Right valsi -> insertValsi env valsi
+
+filterDown :: Xml.Element -> Xml.Element
+filterDown el =
+  el
+    & filterElementsRec noUsers
+    & downTo (T2 (label @"maxdepth" 5) (label @"maxlistitems" 30))
+
+data Valsi = Valsi
+  { word :: Text,
+    definition :: Text,
+    definitionId :: Natural,
+    typ :: Text,
+    selmaho :: Maybe Text,
+    notes :: Maybe Text,
+    glosswords :: [T2 "word" Text "sense" (Maybe Text)],
+    keywords :: [T3 "word" Text "place" Natural "sense" (Maybe Text)]
+  }
+  deriving stock (Show)
+
+insertValsi :: Env -> [Valsi] -> IO ()
+insertValsi env vs = do
+  Sqlite.withTransaction env.envData $
+    do
+      valsiIds <-
+        Cond.yieldMany vs
+          .| Cond.mapMC
+            ( \v ->
+                Sqlite.queryNamed
+                  @(Sqlite.Only Int64)
+                  env.envData
+                  [Sqlite.sql|
+                     INSERT INTO valsi
+                       (word , definition , type , selmaho , notes )
+                       VALUES
+                       (:word, :definition, :type, :selmaho, :notes)
+                       RETURNING (id)
+                   |]
+                  [ ":word" Sqlite.:= v.word,
+                    ":definition" Sqlite.:= v.definition,
+                    ":type" Sqlite.:= v.typ,
+                    ":selmaho" Sqlite.:= v.selmaho,
+                    ":notes" Sqlite.:= v.notes
+                  ]
+                  >>= \case
+                    [one] -> pure one
+                    _ -> error "more or less than one result"
+            )
+          .| Cond.sinkList
+          & Cond.runConduit
+      for_ (zip valsiIds vs) $ \(Sqlite.Only vId, v) -> do
+        for_ v.glosswords $ \g -> do
+          Sqlite.executeNamed
+            env.envData
+            [Sqlite.sql|
+                      INSERT INTO glosswords
+                        (valsi_id , word , sense )
+                        VALUES
+                        (:valsi_id, :word, :sense)
+                    |]
+            [ ":valsi_id" Sqlite.:= vId,
+              ":word" Sqlite.:= g.word,
+              ":sense" Sqlite.:= g.sense
+            ]
+      for_ (zip valsiIds vs) $ \(Sqlite.Only vId, v) -> do
+        for_ v.keywords $ \g -> do
+          Sqlite.executeNamed
+            env.envData
+            [Sqlite.sql|
+                      INSERT INTO keywords
+                        (valsi_id , word , place , sense )
+                        VALUES
+                        (:valsi_id, :word, :place, :sense)
+                    |]
+            [ ":valsi_id" Sqlite.:= vId,
+              ":word" Sqlite.:= g.word,
+              ":place" Sqlite.:= (g.place & fromIntegral @Natural @Int),
+              ":sense" Sqlite.:= g.sense
+            ]
+
+migrate :: (HasField "envData" p Sqlite.Connection) => p -> IO ()
+migrate env = do
+  let x q = Sqlite.execute env.envData q ()
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS valsi (
+        id integer PRIMARY KEY,
+        word text NOT NULL,
+        definition text NOT NULL,
+        type text NOT NULL,
+        selmaho text NULL,
+        notes text NULL
+      )
+     |]
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS glosswords (
+        id integer PRIMARY KEY,
+        valsi_id integer NOT NULL,
+        word text NOT NULL,
+        sense text NULL,
+        FOREIGN KEY(valsi_id) REFERENCES valsi(id)
+      )
+    |]
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS keywords (
+        id integer PRIMARY KEY,
+        valsi_id integer NOT NULL,
+        word text NOT NULL,
+        place integer NOT NULL,
+        sense text NULL,
+        FOREIGN KEY(valsi_id) REFERENCES valsi(id)
+      )
+    |]
+
+data Env = Env
+  { envData :: Sqlite.Connection
+  }
+
+withEnv :: (Env -> IO a) -> IO a
+withEnv inner = do
+  withSqlite "./jbovlaste.sqlite" $ \envData -> inner Env {..}
+
+withSqlite :: String -> (Sqlite.Connection -> IO a) -> IO a
+withSqlite fileName inner = Sqlite.withConnection fileName $ \conn -> do
+  -- Sqlite.setTrace conn (Just (\msg -> Text.hPutStrLn IO.stderr [fmt|{fileName}: {msg}|]))
+  Sqlite.execute conn [Sqlite.sql|PRAGMA foreign_keys = ON|] ()
+  inner conn
+
+parseJbovlasteXml :: (HasField "documentRoot" r Xml.Element) => r -> Either ErrorTree [Valsi]
+parseJbovlasteXml xml =
+  xml.documentRoot
+    & runParse
+      "cannot parse jbovlaste.xml"
+      parser
+  where
+    parser =
+      (element "dictionary" <&> (.elementNodes) <&> mapMaybe nodeElementMay)
+        >>> ( find
+                ( element "direction"
+                    >>> do
+                      (attribute "from" >>> exactly showToText "lojban")
+                      *> (attribute "to" >>> exactly showToText "English")
+                      *> Cat.id
+                )
+                <&> (\x -> x.elementNodes <&> nodeElementMay)
+            )
+        >>> (multiple (maybe valsi) <&> catMaybes)
+    valsi =
+      element "valsi"
+        >>> do
+          let subNodes =
+                ( Cat.id
+                    <&> (.elementNodes)
+                    <&> mapMaybe nodeElementMay
+                )
+
+          let subElementContent elName =
+                subNodes
+                  >>> ( (find (element elName))
+                          <&> (.elementNodes)
+                      )
+                  >>> exactlyOne
+                  >>> content
+          let optionalSubElementContent elName =
+                subNodes
+                  >>> ((findAll (element elName) >>> zeroOrOne))
+                  >>> (maybe (lmap (.elementNodes) exactlyOne >>> content))
+
+          word <- attribute "word"
+          typ <- attribute "type"
+          selmaho <- optionalSubElementContent "selmaho"
+          definition <- subElementContent "definition"
+          definitionId <- subElementContent "definitionid" >>> fieldParser Field.decimalNatural
+          notes <- optionalSubElementContent "notes"
+          glosswords <-
+            (subNodes >>> findAll (element "glossword"))
+              >>> ( multiple $ do
+                      word' <- label @"word" <$> (attribute "word")
+                      sense <- label @"sense" <$> (attributeMay "sense")
+                      pure $ T2 word' sense
+                  )
+          keywords <-
+            (subNodes >>> findAll (element "keyword"))
+              >>> ( multiple $ do
+                      word' <- label @"word" <$> (attribute "word")
+                      place <- label @"place" <$> (attribute "place" >>> fieldParser Field.decimalNatural)
+                      sense <- label @"sense" <$> (attributeMay "sense")
+                      pure $ T3 word' place sense
+                  )
+
+          pure $ Valsi {..}
+
+file :: IO Xml.Document
+file = Xml.readFile def "./jbovlaste-en.xml"
+
+-- | Filter XML elements recursively based on the given predicate
+filterElementsRec :: (Xml.Element -> Bool) -> Xml.Element -> Xml.Element
+filterElementsRec f el =
+  el
+    { Xml.elementNodes =
+        mapMaybe
+          ( \case
+              Xml.NodeElement el' ->
+                if f el'
+                  then Just $ Xml.NodeElement $ filterElementsRec f el'
+                  else Nothing
+              other -> Just other
+          )
+          el.elementNodes
+    }
+
+-- | no <user> allowed
+noUsers :: Xml.Element -> Bool
+noUsers el = el.elementName.nameLocalName /= "user"
+
+downTo :: (T2 "maxdepth" Int "maxlistitems" Int) -> Xml.Element -> Xml.Element
+downTo n el =
+  if n.maxdepth > 0
+    then
+      el
+        { Xml.elementNodes =
+            ( do
+                let eleven = take (n.maxlistitems + 1) $ map down el.elementNodes
+                if List.length eleven == (n.maxlistitems + 1)
+                  then eleven <> [Xml.NodeComment "snip!"]
+                  else eleven
+            )
+        }
+    else el {Xml.elementNodes = [Xml.NodeComment "snip!"]}
+  where
+    down =
+      \case
+        Xml.NodeElement el' ->
+          Xml.NodeElement $
+            downTo
+              ( T2
+                  (label @"maxdepth" $ n.maxdepth - 1)
+                  (label @"maxlistitems" n.maxlistitems)
+              )
+              el'
+        more -> more
+
+toTree :: Xml.Element -> ErrorTree
+toTree el = do
+  case el.elementNodes & filter (not . isEmptyContent) & nonEmpty of
+    Nothing -> singleError (newError (prettyXmlElement el))
+    Just (n :| []) | not $ isElementNode n -> singleError $ errorContext (prettyXmlElement el) (nodeErrorNoElement n)
+    Just nodes -> nestedMultiError (newError (prettyXmlElement el)) (nodes <&> node)
+  where
+    isEmptyContent = \case
+      Xml.NodeContent c -> c & Text.all Bytes.isSpaceChar8
+      _ -> False
+    isElementNode = \case
+      Xml.NodeElement _ -> True
+      _ -> False
+
+    node :: Xml.Node -> ErrorTree
+    node = \case
+      Xml.NodeElement el' -> toTree el'
+      other -> singleError $ nodeErrorNoElement other
+
+    nodeErrorNoElement :: Xml.Node -> Error
+    nodeErrorNoElement = \case
+      Xml.NodeInstruction i -> [fmt|Instruction: {i & show}|]
+      Xml.NodeContent c -> [fmt|"{c & Text.replace "\"" "\\\""}"|]
+      Xml.NodeComment c -> [fmt|<!-- {c} -->|]
+      Xml.NodeElement _ -> error "NodeElement not allowed here"
+
+prettyXmlName :: Xml.Name -> Text
+prettyXmlName n = [fmt|{n.namePrefix & fromMaybe ""}{n.nameLocalName}|]
+
+prettyXmlElement :: Xml.Element -> Text
+prettyXmlElement el =
+  if not $ null el.elementAttributes
+    then [fmt|<{prettyXmlName el.elementName}: {attrs el.elementAttributes}>|]
+    else [fmt|<{prettyXmlName el.elementName}>|]
+  where
+    attrs :: Map Xml.Name Text -> Text
+    attrs a = a & Map.toList <&> (\(k, v) -> [fmt|{prettyXmlName k}={v}|]) & Text.intercalate ", " & \s -> [fmt|({s})|]
+
+nodeElementMay :: Xml.Node -> Maybe Xml.Element
+nodeElementMay = \case
+  Xml.NodeElement el -> Just el
+  _ -> Nothing
+
+element :: Text -> Parse Xml.Element Xml.Element
+element name = Parse $ \(ctx, el) ->
+  if el.elementName.nameLocalName == name
+    then Success (ctx & addContext (prettyXmlName el.elementName), el)
+    else Failure $ singleton [fmt|Expected element named <{name}> but got {el & prettyXmlElement} at {showContext ctx}|]
+
+content :: Parse Xml.Node Text
+content = Parse $ \(ctx, node) -> case node of
+  Xml.NodeContent t -> Success (ctx, t)
+  -- TODO: give an example of the node content?
+  n -> Failure $ singleton [fmt|Expected a content node, but got a {n & nodeType} node, at {showContext ctx}|]
+    where
+      nodeType = \case
+        Xml.NodeContent _ -> "content" :: Text
+        Xml.NodeComment _ -> "comment"
+        Xml.NodeInstruction _ -> "instruction"
+        Xml.NodeElement _ -> "element"
+
+attribute :: Text -> Parse Xml.Element Text
+attribute name = Parse $ \(ctx, el) ->
+  case el.elementAttributes & Map.mapKeys (.nameLocalName) & Map.lookup name of
+    Just a -> Success (ctx & addContext [fmt|{{attr:{name}}}|], a)
+    Nothing -> Failure $ singleton [fmt|Attribute "{name}" missing at {showContext ctx}|]
+
+attributeMay :: Text -> Parse Xml.Element (Maybe Text)
+attributeMay name = Parse $ \(ctx, el) ->
+  case el.elementAttributes & Map.mapKeys (.nameLocalName) & Map.lookup name of
+    Just a -> Success (ctx & addContext [fmt|{{attr:{name}}}|], Just a)
+    Nothing -> Success (ctx, Nothing)
+
+instance
+  ( Sqlite.FromField t1,
+    Sqlite.FromField t2,
+    Sqlite.FromField t3
+  ) =>
+  Sqlite.FromRow (T3 l1 t1 l2 t2 l3 t3)
+  where
+  fromRow = do
+    T3
+      <$> (label @l1 <$> Sqlite.field)
+      <*> (label @l2 <$> Sqlite.field)
+      <*> (label @l3 <$> Sqlite.field)
+
+foldRows ::
+  forall row params b.
+  (Sqlite.FromRow row, Sqlite.ToRow params) =>
+  Sqlite.Connection ->
+  Sqlite.Query ->
+  params ->
+  Fold.Fold row b ->
+  IO b
+foldRows conn qry params = Fold.purely f
+  where
+    f :: forall x. (x -> row -> x) -> x -> (x -> b) -> IO b
+    f acc init extract = do
+      x <- Sqlite.fold conn qry params init (\a r -> pure $ acc a r)
+      pure $ extract x
diff --git a/users/Profpatsch/jbovlaste-sqlite/default.nix b/users/Profpatsch/jbovlaste-sqlite/default.nix
new file mode 100644
index 0000000000..ea59fdec39
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/default.nix
@@ -0,0 +1,33 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  jbovlaste-sqlite = pkgs.haskellPackages.mkDerivation {
+    pname = "jbovlaste-sqlite";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./jbovlaste-sqlite.cabal
+      ./JbovlasteSqlite.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      depot.users.Profpatsch.my-prelude
+      pkgs.haskellPackages.foldl
+      pkgs.haskellPackages.sqlite-simple
+      pkgs.haskellPackages.xml-conduit
+
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+in
+jbovlaste-sqlite
diff --git a/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal b/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal
new file mode 100644
index 0000000000..f677615a16
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal
@@ -0,0 +1,76 @@
+cabal-version:      3.0
+name:               jbovlaste-sqlite
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable jbovlaste-sqlite
+    import: common-options
+
+    main-is:          JbovlasteSqlite.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        pa-field-parser,
+        my-prelude,
+        containers,
+        selective,
+        semigroupoids,
+        validation-selective,
+        sqlite-simple,
+        foldl,
+        conduit,
+        bytestring,
+        text,
+        sqlite-simple,
+        xml-conduit,
diff --git a/users/Profpatsch/lens.nix b/users/Profpatsch/lens.nix
new file mode 100644
index 0000000000..28f7506bdd
--- /dev/null
+++ b/users/Profpatsch/lens.nix
@@ -0,0 +1,137 @@
+{ ... }:
+let
+  id = x: x;
+
+  const = x: y: x;
+
+  comp = f: g: x: f (g x);
+
+  _ = v: f: f v;
+
+  # Profunctor (p :: Type -> Type -> Type)
+  Profunctor = rec {
+    # dimap :: (a -> b) -> (c -> d) -> p b c -> p a d
+    dimap = f: g: x: lmap f (rmap g x);
+    # lmap :: (a -> b) -> p b c -> p a c
+    lmap = f: dimap f id;
+    # rmap :: (c -> d) -> p b c -> p b d
+    rmap = g: dimap id g;
+  };
+
+  # Profunctor (->)
+  profunctorFun = Profunctor // {
+    # dimap :: (a -> b) -> (c -> d) -> (b -> c) -> a -> d
+    dimap = ab: cd: bc: a: cd (bc (ab a));
+    # lmap :: (a -> b) -> (b -> c) -> (a -> c)
+    lmap = ab: bc: a: bc (ab a);
+    # rmap :: (c -> d) -> (b -> c) -> (b -> d)
+    rmap = cd: bc: b: cd (bc b);
+  };
+
+  tuple = fst: snd: {
+    inherit fst snd;
+  };
+
+  swap = { fst, snd }: {
+    fst = snd;
+    snd = fst;
+  };
+
+  # Profunctor p => Strong (p :: Type -> Type -> Type)
+  Strong = pro: pro // rec {
+    # firstP :: p a b -> p (a, c) (b, c)
+    firstP = pab: pro.dimap swap swap (pro.secondP pab);
+    # secondP :: p a b -> p (c, a) (c, b)
+    secondP = pab: pro.dimap swap swap (pro.firstP pab);
+  };
+
+  # Strong (->)
+  strongFun = Strong profunctorFun // {
+    # firstP :: (a -> b) -> (a, c) -> (b, c)
+    firstP = f: { fst, snd }: { fst = f fst; inherit snd; };
+    # secondP :: (a -> b) -> (c, a) -> (c, b)
+    secondP = f: { snd, fst }: { snd = f snd; inherit fst; };
+  };
+
+  # Iso s t a b :: forall p. Profunctor p -> p a b -> p s t
+
+  # iso :: (s -> a) -> (b -> t) -> Iso s t a b
+  iso = pro: pro.dimap;
+
+  # Lens s t a b :: forall p. Strong p -> p a b -> p s t
+
+  # lens :: (s -> a) -> (s -> b -> t) -> Lens s t a b
+  lens = strong: get: set: pab:
+    lensP
+      strong
+      (s: tuple (get s) (b: set s b))
+      pab;
+
+  # lensP :: (s -> (a, b -> t)) -> Lens s t a b
+  lensP = strong: to: pab:
+    strong.dimap
+      to
+      ({ fst, snd }: snd fst)
+      (strong.firstP pab);
+
+  # first element of a tuple
+  # _1 :: Lens (a, c) (b, c) a b
+  _1 = strong: strong.firstP;
+
+  # second element of a tuple
+  # _2 :: Lens (c, a) (c, b) a b
+  _2 = strong: strong.secondP;
+
+  # a the given field in the record
+  # field :: (f :: String) -> Lens { f :: a; ... } { f :: b; ... } a b
+  field = name: strong:
+    lens
+      strong
+      (attrs: attrs.${name})
+      (attrs: a: attrs // { ${name} = a; });
+
+  # Setter :: (->) a b -> (->) s t
+  # Setter :: (a -> b) -> (s -> t)
+
+
+  # Subclasses of profunctor for (->).
+  # We only have Strong for now, but when we implement Choice we need to add it here.
+  profunctorSubclassesFun = strongFun;
+
+  # over :: Setter s t a b -> (a -> b) -> s -> t
+  over = setter:
+    # A setter needs to be instanced to the profunctor-subclass instances of (->).
+    (setter profunctorSubclassesFun);
+
+  # set :: Setter s t a b -> b -> s -> t
+  set = setter: b: over setter (const b);
+
+  # combine a bunch of optics, for the subclass instance of profunctor you give it.
+  optic = accessors: profunctorSubclass:
+    builtins.foldl' comp id
+      (map (accessor: accessor profunctorSubclass) accessors);
+
+
+in
+{
+  inherit
+    id
+    _
+    const
+    comp
+    Profunctor
+    profunctorFun
+    Strong
+    strongFun
+    iso
+    lens
+    optic
+    _1
+    _2
+    field
+    tuple
+    swap
+    over
+    set
+    ;
+}
diff --git a/users/Profpatsch/lib.nix b/users/Profpatsch/lib.nix
new file mode 100644
index 0000000000..879d87755d
--- /dev/null
+++ b/users/Profpatsch/lib.nix
@@ -0,0 +1,108 @@
+{ depot, pkgs, ... }:
+let
+  bins = depot.nix.getBins pkgs.coreutils [ "printf" "echo" "cat" "printenv" "tee" ]
+    // depot.nix.getBins pkgs.bash [ "bash" ]
+    // depot.nix.getBins pkgs.fdtools [ "multitee" ]
+  ;
+
+  # Print `msg` and and argv to stderr, then execute into argv
+  debugExec = msg: depot.nix.writeExecline "debug-exec" { } [
+    "if"
+    [
+      "fdmove"
+      "-c"
+      "1"
+      "2"
+      "if"
+      [ bins.printf "%s: " msg ]
+      "if"
+      [ bins.echo "$@" ]
+    ]
+    "$@"
+  ];
+
+  # Print stdin to stderr and stdout
+  eprint-stdin = depot.nix.writeExecline "eprint-stdin" { } [
+    "pipeline"
+    [ bins.multitee "0-1,2" ]
+    "$@"
+  ];
+
+  # Assume the input on stdin is netencode, pretty print it to stderr and forward it to stdout
+  eprint-stdin-netencode = depot.nix.writeExecline "eprint-stdin-netencode" { } [
+    "pipeline"
+    [
+      # move stdout to 3
+      "fdmove"
+      "3"
+      "1"
+      # the multitee copies stdin to 1 (the other pipeline end) and 3 (the stdout of the outer pipeline block)
+      "pipeline"
+      [ bins.multitee "0-1,3" ]
+      # make stderr the stdout of pretty, merging with the stderr of pretty
+      "fdmove"
+      "-c"
+      "1"
+      "2"
+      depot.users.Profpatsch.netencode.pretty
+    ]
+    "$@"
+  ];
+
+  # print the given environment variable in $1 to stderr, then execute into the rest of argv
+  eprintenv = depot.nix.writeExecline "eprintenv" { readNArgs = 1; } [
+    "ifelse"
+    [ "fdmove" "-c" "1" "2" bins.printenv "$1" ]
+    [ "$@" ]
+    "if"
+    [ depot.tools.eprintf "eprintenv: could not find \"\${1}\" in the environment\n" ]
+    "$@"
+  ];
+
+  # Split stdin into two commands, given by a block and the rest of argv
+  #
+  # Example (execline):
+  #
+  #   pipeline [ echo foo ]
+  #   split-stdin [ fdmove 1 2 foreground [ cat ] echo "bar" ] cat
+  #
+  #   stdout: foo\n
+  #   stderr: foo\nbar\n
+  split-stdin = depot.nix.writeExecline "split-stdin" { argMode = "env"; } [
+    "pipeline"
+    [
+      # this is horrible yes but the quickest way I knew how to implement it
+      "runblock"
+      "1"
+      bins.bash
+      "-c"
+      ''${bins.tee} >("$@")''
+      "bash-split-stdin"
+    ]
+    "runblock"
+    "-r"
+    "1"
+  ];
+
+  # remove everything but a few selected environment variables
+  runInEmptyEnv = keepVars:
+    let
+      importas = pkgs.lib.concatMap (var: [ "importas" "-i" var var ]) keepVars;
+      # we have to explicitely call export here, because PATH is probably empty
+      export = pkgs.lib.concatMap (var: [ "${pkgs.execline}/bin/export" var ''''${${var}}'' ]) keepVars;
+    in
+    depot.nix.writeExecline "empty-env" { }
+      (importas ++ [ "emptyenv" ] ++ export ++ [ "${pkgs.execline}/bin/exec" "$@" ]);
+
+
+in
+{
+  inherit
+    debugExec
+    eprint-stdin
+    eprint-stdin-netencode
+    eprintenv
+    split-stdin
+    runInEmptyEnv
+    ;
+}
diff --git a/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs b/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs
new file mode 100644
index 0000000000..a1a4586401
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs
@@ -0,0 +1,173 @@
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE NumericUnderscores #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import Conduit
+import Conduit qualified as Cond
+import Control.Concurrent
+import Control.Concurrent.Async qualified as Async
+import Control.Monad
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Bifunctor
+import Data.Conduit.Binary qualified as Conduit.Binary
+import Data.Conduit.Combinators qualified as Cond
+import Data.Conduit.Process
+import Data.Error
+import Data.Function
+import Data.Functor
+import Data.Text.IO (hPutStrLn)
+import MyPrelude
+import System.Directory qualified as Dir
+import System.Environment qualified as Env
+import System.Exit qualified as Exit
+import System.FilePath (takeDirectory)
+import System.FilePath.Posix qualified as FilePath
+import System.IO (stderr)
+import System.Posix qualified as Posix
+import Prelude hiding (log)
+
+data LorriEvent = LorriEvent
+  { nixFile :: Text,
+    eventType :: LorriEventType
+  }
+  deriving stock (Show)
+
+data LorriEventType
+  = Completed
+  | Started
+  | EvalFailure
+  deriving stock (Show)
+
+main :: IO ()
+main = do
+  argv <- Env.getArgs <&> nonEmpty
+
+  dir <- Dir.getCurrentDirectory
+  shellNix <-
+    findShellNix dir >>= \case
+      Nothing -> Exit.die [fmt|could not find any shell.nix in or above the directory {dir}|]
+      Just s -> pure s
+  getEventChan :: MVar (Chan LorriEvent) <- newEmptyMVar
+  Async.race_
+    ( do
+        sendEventChan :: Chan LorriEvent <- newChan
+        (exitCode, ()) <-
+          sourceProcessWithConsumer
+            (proc "lorri" ["internal", "stream-events"])
+            $
+            -- first, we want to send a message over the chan that the process is running (for timeout)
+            liftIO (putMVar getEventChan sendEventChan)
+              *> Conduit.Binary.lines
+              .| Cond.mapC
+                ( \jsonBytes ->
+                    (jsonBytes :: ByteString)
+                      & Json.parseStrict
+                        ( Json.key
+                            "Completed"
+                            ( do
+                                nixFile <- Json.key "nix_file" Json.asText
+                                pure LorriEvent {nixFile, eventType = Completed}
+                            )
+                            Json.<|> Json.key
+                              "Started"
+                              ( do
+                                  nixFile <- Json.key "nix_file" Json.asText
+                                  pure LorriEvent {nixFile, eventType = Started}
+                              )
+                            Json.<|> Json.key
+                              "Failure"
+                              ( do
+                                  nixFile <- Json.key "nix_file" Json.asText
+                                  pure LorriEvent {nixFile, eventType = EvalFailure}
+                              )
+                        )
+                      & first Json.displayError'
+                      & first (map newError)
+                      & first (smushErrors [fmt|Cannot parse line returned by lorri: {jsonBytes & bytesToTextUtf8Lenient}|])
+                      & unwrapError
+                )
+              .| (Cond.mapM_ (\ev -> writeChan sendEventChan ev))
+
+        log [fmt|lorri internal stream-events exited {show exitCode}|]
+    )
+    ( do
+        let waitMs ms = threadDelay (ms * 1000)
+
+        -- log [fmt|Waiting for lorri event for {shellNix}|]
+
+        eventChan <- takeMVar getEventChan
+
+        let isOurEvent ev = FilePath.normalise (ev & nixFile & textToString) == FilePath.normalise shellNix
+
+        let handleEvent ev =
+              case ev & eventType of
+                Started ->
+                  log [fmt|waiting for lorri build to finish|]
+                Completed -> do
+                  log [fmt|build completed|]
+                  exec (inDirenvDir (takeDirectory shellNix) <$> argv)
+                EvalFailure -> do
+                  log [fmt|evaluation failed! for path {ev & nixFile}|]
+                  Exit.exitWith (Exit.ExitFailure 111)
+
+        -- wait for 100ms for the first message from lorri,
+        -- or else assume lorri is not building the project yet
+        Async.race
+          (waitMs 100)
+          ( do
+              -- find the first event that we can use
+              let go = do
+                    ev <- readChan eventChan
+                    if isOurEvent ev then pure ev else go
+              go
+          )
+          >>= \case
+            Left () -> do
+              log [fmt|No event received from lorri, assuming this is the first evaluation|]
+              exec argv
+            Right ch -> handleEvent ch
+
+        runConduit $
+          repeatMC (readChan eventChan)
+            .| filterC isOurEvent
+            .| mapM_C handleEvent
+    )
+  where
+    inDirenvDir dir' argv' = ("direnv" :| ["exec", dir']) <> argv'
+    exec = \case
+      Just (exe :| args') -> Posix.executeFile exe True args' Nothing
+      Nothing -> Exit.exitSuccess
+
+log :: Text -> IO ()
+log msg = hPutStrLn stderr [fmt|lorri-wait-for-eval: {msg}|]
+
+-- | Searches from the current directory upwards, until it finds the `shell.nix`.
+findShellNix :: FilePath -> IO (Maybe FilePath)
+findShellNix curDir = do
+  let go :: (FilePath -> IO (Maybe FilePath))
+      go dir = do
+        let file = dir FilePath.</> "shell.nix"
+        Dir.doesFileExist file >>= \case
+          True -> pure (Just file)
+          False -> do
+            let parent = FilePath.takeDirectory dir
+            if parent == dir
+              then pure Nothing
+              else go parent
+  go (FilePath.normalise curDir)
+
+smushErrors :: Foldable t => Text -> t Error -> Error
+smushErrors msg errs =
+  errs
+    -- hrm, pretty printing and then creating a new error is kinda shady
+    & foldMap (\err -> "\n- " <> prettyError err)
+    & newError
+    & errorContext msg
diff --git a/users/Profpatsch/lorri-wait-for-eval/README.md b/users/Profpatsch/lorri-wait-for-eval/README.md
new file mode 100644
index 0000000000..9c5d8ef9e3
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/README.md
@@ -0,0 +1,7 @@
+# lorri-wait-for-eval
+
+A helper script for [lorri](https://github.com/nix-community/lorri), which wraps a command and executes it once lorri is finished evaluating the current `shell.nix`, and uses the new environment.
+
+This is useful when you need the new shell environment to be in scope of the command, but don’t want to waste time waiting for it to finish.
+
+This should really be a feature of lorri, but I couldn’t be assed to touch rust :P
diff --git a/users/Profpatsch/lorri-wait-for-eval/default.nix b/users/Profpatsch/lorri-wait-for-eval/default.nix
new file mode 100644
index 0000000000..90c6365fed
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/default.nix
@@ -0,0 +1,20 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  lorri-wait-for-eval = pkgs.writers.writeHaskell "lorri-wait-for-eval"
+    {
+      libraries = [
+        depot.users.Profpatsch.my-prelude
+        pkgs.haskellPackages.async
+        pkgs.haskellPackages.aeson-better-errors
+        pkgs.haskellPackages.conduit-extra
+        pkgs.haskellPackages.error
+        pkgs.haskellPackages.PyF
+        pkgs.haskellPackages.unliftio
+      ];
+      ghcArgs = [ "-threaded" ];
+
+    } ./LorriWaitForEval.hs;
+
+in
+lorri-wait-for-eval
diff --git a/users/Profpatsch/mailbox-org/MailboxOrg.hs b/users/Profpatsch/mailbox-org/MailboxOrg.hs
new file mode 100644
index 0000000000..6c5820080c
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/MailboxOrg.hs
@@ -0,0 +1,523 @@
+{-# LANGUAGE ApplicativeDo #-}
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE DerivingVia #-}
+{-# LANGUAGE GHC2021 #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedRecordDot #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE PackageImports #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoFieldSelectors #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import Aeson (parseErrorTree)
+import AesonQQ (aesonQQ)
+import ArglibNetencode
+import Control.Exception (try)
+import Control.Monad (replicateM)
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.ByteString qualified as ByteString
+import Data.ByteString.Lazy qualified as Lazy
+import Data.Char qualified as Char
+import "pa-error-tree" Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Text qualified as Text
+import ExecHelpers
+import Label
+import Netencode qualified
+import Netencode.Parse qualified as NetParse
+import Network.HTTP.Conduit qualified as Client
+import Network.HTTP.Simple qualified as Client
+import PossehlAnalyticsPrelude
+import Pretty
+import System.Directory qualified as File
+import System.Environment qualified as Env
+import System.Exit (ExitCode (ExitFailure, ExitSuccess))
+import System.Exit qualified as Exit
+import System.FilePath ((</>))
+import System.Process.Typed qualified as Process
+import System.Random qualified as Random
+import System.Random.Stateful qualified as Random
+import Prelude hiding (log)
+
+secret :: Tools -> IO (T2 "email" ByteString "password" ByteString)
+secret tools = do
+  T2
+    (label @"email" "mail@profpatsch.de")
+    <$> (label @"password" <$> fromPass "email/mailbox.org")
+  where
+    fromPass name =
+      tools.pass & runToolExpect0 [name]
+
+progName :: CurrentProgramName
+progName = "mailbox-org"
+
+log :: Error -> IO ()
+log err = do
+  putStderrLn (errorContext progName.unCurrentProgramName err & prettyError)
+
+data Tools = Tools
+  { pass :: Tool
+  }
+  deriving stock (Show)
+
+newtype Tool = Tool {unTool :: FilePath}
+  deriving stock (Show)
+
+parseTools :: Applicative m => (Text -> m (Either Error Tool)) -> m (Either ErrorTree Tools)
+parseTools getTool = do
+  let parser =
+        ( do
+            pass <- get "pass"
+            pure Tools {..}
+        )
+  parser & finalize
+  where
+    get name = name & getTool <&> eitherToListValidation & Compose
+    finalize p =
+      p.getCompose
+        <&> first (errorTree "Error reading tools")
+        <&> validationToEither
+
+main :: IO ()
+main =
+  arglibNetencode progName Nothing
+    >>= parseToolsArglib
+    >>= secret
+    >>= run applyFilters
+
+run ::
+  ( HasField "email" dat ByteString,
+    HasField "password" dat ByteString
+  ) =>
+  (Session -> IO ()) ->
+  dat ->
+  IO ()
+run act loginData = do
+  session <- login loginData
+  act session
+
+listFilterConfig :: Session -> IO ()
+listFilterConfig session = do
+  mailfilter
+    session
+    "config"
+    mempty
+    (Json.key "data" Json.asObject)
+    ()
+    >>= printPretty
+
+applyFilterRule ::
+  (HasField "folderId" dat Text) =>
+  dat ->
+  Session ->
+  IO ()
+applyFilterRule dat session = do
+  mailfilter
+    session
+    "apply"
+    ( T2
+        (label @"extraQueryParams" [("folderId", Just (dat.folderId & textToBytesUtf8))])
+        mempty
+    )
+    (Json.key "data" Json.asArray >> pure ())
+    (Json.Object mempty)
+
+data FilterRule = FilterRule
+  { actioncmds :: NonEmpty Json.Object,
+    test :: NonEmpty Json.Object
+  }
+
+data MailfilterList = MailfilterList
+  { id_ :: Json.Value,
+    rulename :: Text
+  }
+  deriving stock (Show, Eq)
+
+simpleRule ::
+  ( HasField "rulename" r Text,
+    HasField "id" r Natural,
+    HasField "emailContains" r Text,
+    HasField "subjectStartsWith" r Text
+  ) =>
+  r ->
+  Json.Value
+simpleRule dat = do
+  [aesonQQ|{
+    "id": |dat.id & enc @Natural|,
+    "position": 3,
+    "rulename": |dat.rulename & enc @Text|,
+    "active": true,
+    "flags": [],
+    "test": {
+      "id": "allof",
+      "tests": [
+        {
+          "id": "from",
+          "comparison": "contains",
+          "values": [
+            |dat.emailContains & enc @Text|
+          ]
+        },
+        {
+          "id": "subject",
+          "comparison": "startswith",
+          "values": [
+            |dat.subjectStartsWith & enc @Text|
+          ]
+        }
+      ]
+    },
+    "actioncmds": [
+      {
+        "id": "move",
+        "into": "default0/Archive"
+      },
+      {
+        "id": "stop"
+      }
+    ]
+  }|]
+  where
+    enc :: forall a. Json.ToJSON a => a -> Lazy.ByteString
+    enc val = val & Json.toJSON & Json.encode
+
+applyFilters :: Session -> IO ()
+applyFilters session = do
+  filters <-
+    mailfilter
+      session
+      "list"
+      mempty
+      ( Json.key "data" $ do
+          ( Json.eachInArray $ asDat @"mailfilter" $ do
+              id_ <- Json.key "id" Json.asValue
+              rulename <- Json.key "rulename" Json.asText
+              pure MailfilterList {..}
+            )
+            <&> mapFromListOn (\dat -> getLabel @"rulename" dat.parsed)
+      )
+      ([] :: [()])
+  let goal = Map.fromList [(label @"rulename" "another", 32 :: Integer), (label @"rulename" "xyz", 23)]
+  let actions = declarativeUpdate goal filters
+  log [fmt|To * create: {actions.toCreate & Map.keys & show}, * update: {actions.toUpdate & Map.keys & show}, * delete: {actions.toDelete & Map.keys & show}|]
+
+-- where
+-- filters
+--   & Map.elems
+--   & traverse_
+--     ( updateIfDifferent
+--         session
+--         ( \el ->
+--             pure $
+--               el.original.mailfilter
+--                 & KeyMap.insert "active" (Json.Bool False)
+--         )
+--         (pure ())
+--     )
+
+-- updateIfDifferent ::
+--   forall label parsed.
+--   ( HasField "id_" parsed Json.Value,
+--     HasField "rulename" parsed Text
+--   ) =>
+--   Session ->
+--   (Dat label Json.Object parsed -> IO Json.Object) ->
+--   Json.Parse Error () ->
+--   Dat label Json.Object parsed ->
+--   IO ()
+-- updateIfDifferent session switcheroo parser dat = do
+--   new <- switcheroo dat
+--   if new /= getField @label dat.original
+--     then do
+--       log [fmt|Updating filter "{dat.parsed.rulename}" (id {dat.parsed.id_ & show @Json.Value})|]
+--       mailfilter
+--         session
+--         "update"
+--         mempty
+--         parser
+--         new
+--     else do
+--       log [fmt|Skipping updating filter "{dat.parsed.rulename}" (id {dat.parsed.id_ & show @Json.Value}) because nothing changed.|]
+
+-- | https://oxpedia.org/wiki/index.php?title=HTTP_API_MailFilter
+mailfilter ::
+  ( Json.ToJSON a,
+    Show b
+  ) =>
+  Session ->
+  ByteString ->
+  T2
+    "extraQueryParams"
+    Client.Query
+    "httpMethod"
+    (Maybe ByteString) ->
+  Json.Parse Error b ->
+  a ->
+  IO b
+mailfilter session action opts parser body = do
+  req <-
+    Client.parseRequest "https://office.mailbox.org/appsuite/api/mailfilter/v2"
+      <&> Client.setQueryString
+        ( [ ("action", Just action),
+            ("colums", Just "1")
+          ]
+            <> opts.extraQueryParams
+        )
+      <&> Client.setRequestMethod (opts.httpMethod & fromMaybe "PUT")
+      <&> Client.setRequestBodyJSON body
+      <&> addSession session
+  req
+    & httpJSON [fmt|Cannot parse result for {req & prettyRequestShort}|] parser
+    >>= okOrDie
+    -- >>= (\resp -> printPretty resp >> pure resp)
+    <&> Client.responseBody
+  where
+    prettyRequestShort :: Client.Request -> Text
+    prettyRequestShort req = [fmt|request {req & Client.method}: {req & Client.host}{req & Client.path}{req & Client.queryString}|]
+
+-- | Given a goal and the actual state, return which elements to delete, update and create.
+declarativeUpdate ::
+  Ord k =>
+  -- | goal map
+  Map k a ->
+  -- | actual map
+  Map k b ->
+  T3
+    "toCreate"
+    (Map k a)
+    "toDelete"
+    (Map k b)
+    "toUpdate"
+    (Map k a)
+declarativeUpdate goal actual =
+  T3
+    (label @"toCreate" $ goal `Map.difference` actual)
+    (label @"toDelete" $ actual `Map.difference` goal)
+    (label @"toUpdate" $ goal `Map.intersection` actual)
+
+newtype Session = Session Client.CookieJar
+
+httpJSON ::
+  Error ->
+  Json.Parse Error b ->
+  Client.Request ->
+  IO (Client.Response b)
+httpJSON errMsg parser req = do
+  req
+    & Client.httpJSON @_ @Json.Value
+    >>= traverse
+      ( \val -> do
+          case val of
+            Json.Object obj
+              | "error" `KeyMap.member` obj
+                  && "error_desc" `KeyMap.member` obj -> do
+                  printPretty obj
+                  diePanic' "Server returned above inline error"
+            _ -> pure ()
+          val & Json.parseValue parser & \case
+            Left errs ->
+              errs
+                & parseErrorTree errMsg
+                & diePanic'
+            Right a -> pure a
+      )
+
+data Dat label orig parsed = Dat
+  { original :: Label label orig,
+    parsed :: parsed
+  }
+  deriving stock (Show, Eq)
+
+asDat ::
+  forall label err m a.
+  Monad m =>
+  Json.ParseT err m a ->
+  Json.ParseT err m (Dat label Json.Object a)
+asDat parser = do
+  original <- label @label <$> Json.asObject
+  parsed <- parser
+  pure Dat {..}
+
+addSession :: Session -> Client.Request -> Client.Request
+addSession (Session jar) req = do
+  let sessionId =
+        jar
+          & Client.destroyCookieJar
+          & List.find (\c -> "open-xchange-session-" `ByteString.isPrefixOf` c.cookie_name)
+          & annotate "The cookie jar did not contain an open-exchange-session-*"
+          & unwrapError
+          & (.cookie_value)
+
+  let req' = req & Client.addToRequestQueryString [("session", Just sessionId)]
+  req' {Client.cookieJar = Just jar}
+
+-- | Log into the mailbox.org service, and return the session secret cookies.
+login :: (HasField "email" dat ByteString, HasField "password" dat ByteString) => dat -> IO Session
+login dat = do
+  rnd <- randomString
+  req <-
+    Client.parseRequest "https://office.mailbox.org/ajax/login"
+      <&> Client.setQueryString
+        [ ("action", Just "formlogin"),
+          ("authId", Just $ ("mbo-" <> rnd) & stringToText & textToBytesUtf8)
+        ]
+      <&> Client.urlEncodedBody
+        [ ("version", "Form+Login"),
+          ("autologin", "true"),
+          ("client", "open-xchange-appsuite"),
+          ("uiWebPath", "/appsuite/"),
+          ("login", dat.email),
+          ("password", dat.password)
+        ]
+  Client.httpNoBody req
+    >>= okOrDie
+    <&> Client.responseCookieJar
+    <&> Session
+  where
+    -- For some reason they want the client to pass a random string
+    -- which is used for the session?‽!?
+    randomString = do
+      gen <- Random.newIOGenM =<< Random.newStdGen
+      let chars = ['a' .. 'z'] <> ['A' .. 'Z'] <> ['0' .. '9']
+      let len = 11
+      Random.uniformRM (0, List.length chars - 1) gen
+        & replicateM len
+        <&> map (\index -> chars !! index)
+
+okOrDie :: Show a => Client.Response a -> IO (Client.Response a)
+okOrDie resp =
+  case resp & Client.getResponseStatusCode of
+    200 -> pure resp
+    _ -> do
+      printPretty resp
+      diePanic' "non-200 result"
+
+diePanic' :: ErrorTree -> IO a
+diePanic' errs = errs & prettyErrorTree & diePanic progName
+
+-- | Parse the tools from the given arglib input, and check that the executables exist
+parseToolsArglib :: Netencode.T -> IO Tools
+parseToolsArglib t = do
+  let oneTool name =
+        NetParse.asText
+          <&> textToString
+          <&> ( \path ->
+                  path
+                    & File.getPermissions
+                    <&> File.executable
+                    <&> ( \case
+                            False -> Left $ [fmt|Tool "{name}" is not an executable|]
+                            True -> Right (Tool path)
+                        )
+              )
+  let allTools =
+        parseTools (\name -> Compose $ NetParse.key name >>> oneTool name)
+          & getCompose
+  t
+    & NetParse.runParse
+      "test"
+      -- TODO: a proper ParseT for netencode values
+      ( NetParse.asRecord
+          >>> NetParse.key "BINS"
+          >>> NetParse.asRecord
+          >>> allTools
+      )
+    & orDo diePanic'
+    & join @IO
+    >>= orDo (\errs -> errs & diePanic')
+
+-- | Just assume the tools exist by name in the environment.
+parseToolsToolname :: IO Tools
+parseToolsToolname =
+  parseTools
+    ( \name ->
+        checkInPath name <&> \case
+          False -> Left [fmt|"Cannot find "{name}" in PATH|]
+          True -> Right $ Tool (name & textToString)
+    )
+    >>= orDo diePanic'
+
+checkInPath :: Text -> IO Bool
+checkInPath name = do
+  Env.lookupEnv "PATH"
+    <&> annotate "No PATH set"
+    >>= orDo diePanic'
+    <&> stringToText
+    <&> Text.split (== ':')
+    <&> filter (/= "")
+    >>= traverse
+      ( \p ->
+          File.getPermissions ((textToString p) </> (textToString name))
+            <&> File.executable
+            & try @IOError
+            >>= \case
+              Left _ioError -> pure False
+              Right isExe -> pure isExe
+      )
+    <&> or
+
+orDo :: Applicative f => (t -> f a) -> Either t a -> f a
+orDo f = \case
+  Left e -> f e
+  Right a -> pure a
+
+runTool :: [Text] -> Tool -> IO (Exit.ExitCode, ByteString)
+runTool args tool = do
+  let bashArgs = prettyArgsForBash ((tool.unTool & stringToText) : args)
+  log [fmt|Running: $ {bashArgs}|]
+  Process.proc
+    tool.unTool
+    (args <&> textToString)
+    & Process.readProcessStdout
+    <&> second toStrictBytes
+    <&> second stripWhitespaceFromEnd
+
+-- | Like `runCommandExpect0`, run the given tool, given a tool accessor.
+runToolExpect0 :: [Text] -> Tool -> IO ByteString
+runToolExpect0 args tool =
+  tool & runTool args >>= \(ex, stdout) -> do
+    checkStatus0 tool.unTool ex
+    pure stdout
+
+-- | Check whether a command exited 0 or crash.
+checkStatus0 :: FilePath -> ExitCode -> IO ()
+checkStatus0 executable = \case
+  ExitSuccess -> pure ()
+  ExitFailure status -> do
+    diePanic' [fmt|Command `{executable}` did not exit with status 0 (success), but status {status}|]
+
+stripWhitespaceFromEnd :: ByteString -> ByteString
+stripWhitespaceFromEnd = ByteString.reverse . ByteString.dropWhile (\w -> w == charToWordUnsafe '\n') . ByteString.reverse
+
+-- | Pretty print a command line in a way that can be copied to bash.
+prettyArgsForBash :: [Text] -> Text
+prettyArgsForBash = Text.intercalate " " . map simpleBashEscape
+
+-- | Simple escaping for bash words. If they contain anything that’s not ascii chars
+-- and a bunch of often-used special characters, put the word in single quotes.
+simpleBashEscape :: Text -> Text
+simpleBashEscape t = do
+  case Text.find (not . isSimple) t of
+    Just _ -> escapeSingleQuote t
+    Nothing -> t
+  where
+    -- any word that is just ascii characters is simple (no spaces or control characters)
+    -- or contains a few often-used characters like - or .
+    isSimple c =
+      Char.isAsciiLower c
+        || Char.isAsciiUpper c
+        || Char.isDigit c
+        -- These are benign, bash will not interpret them as special characters.
+        || List.elem c ['-', '.', ':', '/']
+    -- Put the word in single quotes
+    -- If there is a single quote in the word,
+    -- close the single quoted word, add a single quote, open the word again
+    escapeSingleQuote t' = "'" <> Text.replace "'" "'\\''" t' <> "'"
diff --git a/users/Profpatsch/mailbox-org/README.md b/users/Profpatsch/mailbox-org/README.md
new file mode 100644
index 0000000000..b84e7b59c1
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/README.md
@@ -0,0 +1,7 @@
+# mailbox-org
+
+Interfacing with the API of [https://mailbox.org/]().
+
+They use [open-xchange](https://www.open-xchange.com/resources/oxpedia) as their App Suite, so we have to work with/reverse engineer their weird API.
+
+Intended so I have a way of uploading Sieve rules into their system semi-automatically.
diff --git a/users/Profpatsch/mailbox-org/default.nix b/users/Profpatsch/mailbox-org/default.nix
new file mode 100644
index 0000000000..73bd28292d
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/default.nix
@@ -0,0 +1,38 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  mailbox-org = pkgs.haskellPackages.mkDerivation {
+    pname = "mailbox-org";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./mailbox-org.cabal
+      ./src/AesonQQ.hs
+      ./MailboxOrg.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.execline.exec-helpers-hs
+      depot.users.Profpatsch.arglib.netencode.haskell
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.aeson
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.aeson-better-errors
+    ];
+
+    isLibrary = false;
+    isExecutable = true;
+    license = lib.licenses.mit;
+  };
+
+
+in
+lib.pipe mailbox-org [
+  (x: (depot.nix.getBins x [ "mailbox-org" ]).mailbox-org)
+  (depot.users.Profpatsch.arglib.netencode.with-args "mailbox-org" {
+    BINS = depot.nix.getBins pkgs.dovecot_pigeonhole [ "sieve-test" ];
+  })
+]
diff --git a/users/Profpatsch/mailbox-org/mailbox-org.cabal b/users/Profpatsch/mailbox-org/mailbox-org.cabal
new file mode 100644
index 0000000000..a1b041447b
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/mailbox-org.cabal
@@ -0,0 +1,95 @@
+cabal-version:      3.0
+name:               mailbox-org
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+        AesonQQ
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        aeson,
+        PyF,
+        template-haskell
+
+
+
+executable mailbox-org
+    import: common-options
+    main-is: MailboxOrg.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        mailbox-org,
+        my-prelude,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        exec-helpers,
+        netencode,
+        text,
+        directory,
+        filepath,
+        arglib-netencode,
+        random,
+        http-conduit,
+        aeson,
+        aeson-better-errors,
+        bytestring,
+        typed-process,
+        containers,
diff --git a/users/Profpatsch/mailbox-org/src/AesonQQ.hs b/users/Profpatsch/mailbox-org/src/AesonQQ.hs
new file mode 100644
index 0000000000..2ac3d533ae
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/src/AesonQQ.hs
@@ -0,0 +1,24 @@
+{-# LANGUAGE TemplateHaskellQuotes #-}
+
+module AesonQQ where
+
+import Data.Aeson qualified as Json
+import Language.Haskell.TH.Quote (QuasiQuoter)
+import PossehlAnalyticsPrelude
+import PyF qualified
+import PyF.Internal.QQ qualified as PyFConf
+
+aesonQQ :: QuasiQuoter
+aesonQQ =
+  PyF.mkFormatter
+    "aesonQQ"
+    PyF.defaultConfig
+      { PyFConf.delimiters = Just ('|', '|'),
+        PyFConf.postProcess = \exp_ -> do
+          -- TODO: this does not throw an error at compilation time if the json does not parse
+          [|
+            case Json.eitherDecodeStrict' @Json.Value $ textToBytesUtf8 $ stringToText $(exp_) of
+              Left err -> error err
+              Right a -> a
+            |]
+      }
diff --git a/users/Profpatsch/my-prelude/README.md b/users/Profpatsch/my-prelude/README.md
new file mode 100644
index 0000000000..2cc068579a
--- /dev/null
+++ b/users/Profpatsch/my-prelude/README.md
@@ -0,0 +1,42 @@
+# My Haskell Prelude
+
+Contains various modules I’ve found useful when writing Haskell.
+
+## Contents
+
+A short overview:
+
+### `MyPrelude.hs`
+
+A collection of re-exports and extra functions. This does *not* replace the `Prelude` module from `base`, but rather should be imported *in addition* to `Prelude`.
+
+Stuff like bad functions from prelude (partial stuff, or plain horrible stuff) are handled by a custom `.hlint` file, which you can find in [../.hlint.yaml]().
+
+The common style of haskell they try to enable is what I call “left-to-right Haskell”,
+where one mostly prefers forward-chaining operators like `&`/`<&>`/`>>=` to backwards operators like `$`/`<$>`/`<=<`. In addition, all transformation function should follow the scheme of `aToB` instead of `B.fromA`, e.g. `Text.unpack`/`Text.pack` -> `textToString`/`stringToText`. Includes a bunch of text conversion functions one needs all the time, in the same style.
+
+These have been battle-tested in a production codebase of ~30k lines of Haskell.
+
+### `Label.hs`
+
+A very useful collection of anonymous labbeled tuples and enums of size 2 and 3. Assumes GHC >9.2 for `RecordDotSyntax` support.
+
+### `Pretty.hs`
+
+Colorful multiline pretty-printing of Haskell values.
+
+### `Test.hs`
+
+A wrapper around `hspec` which produces colorful test diffs.
+
+### `Aeson.hs`
+
+Helpers around Json parsing.
+
+### `Data.Error.Tree`
+
+Collect errors (from [`Data.Error`](https://hackage.haskell.org/package/error-1.0.0.0/docs/Data-Error.html)) into a tree, then display them in a nested fashion. Super useful for e.g. collecting and displaying nested parsing errors.
+
+### `RunCommand.hs`
+
+A module wrapping the process API with some helpful defaults for executing commands and printing what is executed to stderr.
diff --git a/users/Profpatsch/my-prelude/default.nix b/users/Profpatsch/my-prelude/default.nix
new file mode 100644
index 0000000000..4bca8ea49f
--- /dev/null
+++ b/users/Profpatsch/my-prelude/default.nix
@@ -0,0 +1,52 @@
+{ depot, pkgs, lib, ... }:
+
+pkgs.haskellPackages.mkDerivation {
+  pname = "my-prelude";
+  version = "0.0.1-unreleased";
+
+  src = depot.users.Profpatsch.exactSource ./. [
+    ./my-prelude.cabal
+    ./src/Aeson.hs
+    ./src/Arg.hs
+    ./src/AtLeast.hs
+    ./src/MyPrelude.hs
+    ./src/Test.hs
+    ./src/Parse.hs
+    ./src/Pretty.hs
+    ./src/Seconds.hs
+    ./src/Tool.hs
+    ./src/ValidationParseT.hs
+    ./src/Postgres/Decoder.hs
+    ./src/Postgres/MonadPostgres.hs
+  ];
+
+  isLibrary = true;
+
+  libraryHaskellDepends = [
+    pkgs.haskellPackages.pa-prelude
+    pkgs.haskellPackages.pa-label
+    pkgs.haskellPackages.pa-error-tree
+    pkgs.haskellPackages.pa-json
+    pkgs.haskellPackages.pa-pretty
+    pkgs.haskellPackages.pa-field-parser
+    pkgs.haskellPackages.aeson-better-errors
+    pkgs.haskellPackages.foldl
+    pkgs.haskellPackages.resource-pool
+    pkgs.haskellPackages.error
+    pkgs.haskellPackages.hs-opentelemetry-api
+    pkgs.haskellPackages.hspec
+    pkgs.haskellPackages.hspec-expectations-pretty-diff
+    pkgs.haskellPackages.monad-logger
+    pkgs.haskellPackages.postgresql-simple
+    pkgs.haskellPackages.profunctors
+    pkgs.haskellPackages.PyF
+    pkgs.haskellPackages.semigroupoids
+    pkgs.haskellPackages.these
+    pkgs.haskellPackages.unliftio
+    pkgs.haskellPackages.validation-selective
+    pkgs.haskellPackages.vector
+  ];
+
+  license = lib.licenses.mit;
+
+}
diff --git a/users/Profpatsch/my-prelude/my-prelude.cabal b/users/Profpatsch/my-prelude/my-prelude.cabal
new file mode 100644
index 0000000000..2f7882a526
--- /dev/null
+++ b/users/Profpatsch/my-prelude/my-prelude.cabal
@@ -0,0 +1,121 @@
+cabal-version:      3.0
+name:               my-prelude
+version:            0.0.1.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+    -- allows defining pattern synonyms, but also the `import Foo (pattern FooPattern)` import syntax
+    PatternSynonyms
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    hs-source-dirs: src
+    exposed-modules:
+      MyPrelude
+      Aeson
+      Arg
+      AtLeast
+      Test
+      Postgres.Decoder
+      Postgres.MonadPostgres
+      ValidationParseT
+      Parse
+      Pretty
+      Seconds
+      Tool
+
+    -- Modules included in this executable, other than Main.
+    -- other-modules:
+
+    -- LANGUAGE extensions used by modules in this package.
+    -- other-extensions:
+    build-depends:
+       base >=4.15 && <5
+     , pa-prelude
+     , pa-label
+     , pa-error-tree
+     , pa-json
+     , pa-pretty
+     , pa-field-parser
+     , aeson
+     , aeson-better-errors
+     , bytestring
+     , containers
+     , foldl
+     , unordered-containers
+     , resource-pool
+     , resourcet
+     , scientific
+     , time
+     , error
+     , exceptions
+     , filepath
+     , hspec
+     , hspec-expectations-pretty-diff
+     , hs-opentelemetry-api
+     , monad-logger
+     , mtl
+     , postgresql-simple
+     , profunctors
+     , PyF
+     , semigroupoids
+     , selective
+     , template-haskell
+     , text
+     , these
+     , unix
+     , unliftio
+     , validation-selective
+     , vector
+     , ghc-boot
+     -- for Pretty
+     , aeson-pretty
+     , hscolour
+     , ansi-terminal
+     , nicify-lib
diff --git a/users/Profpatsch/my-prelude/src/Aeson.hs b/users/Profpatsch/my-prelude/src/Aeson.hs
new file mode 100644
index 0000000000..73d6116082
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Aeson.hs
@@ -0,0 +1,176 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GHC2021 #-}
+{-# LANGUAGE KindSignatures #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE UndecidableInstances #-}
+
+module Aeson where
+
+import Data.Aeson (Value (..))
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.Maybe (catMaybes)
+import Data.Vector qualified as Vector
+import Label
+import PossehlAnalyticsPrelude
+import Test.Hspec (describe, it, shouldBe)
+import Test.Hspec qualified as Hspec
+
+-- | Convert a 'Json.ParseError' to a corresponding 'ErrorTree'
+parseErrorTree :: Error -> Json.ParseError Error -> ErrorTree
+parseErrorTree contextMsg errs =
+  errs
+    & Json.displayError prettyError
+    <&> newError
+    & nonEmpty
+    & \case
+      Nothing -> singleError contextMsg
+      Just errs' -> errorTree contextMsg errs'
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabel @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel = do
+  keyLabel' (Proxy @label)
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+-- Version of 'keyLabel' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabel' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel' Proxy key parser = label @label <$> Json.key key parser
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay = do
+  keyLabelMay' (Proxy @label)
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+-- Version of 'keyLabelMay' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay' Proxy key parser = label @label <$> Json.keyMay key parser
+
+-- | Like 'Json.key', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamed :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m a
+keyRenamed (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.key newKey inner
+    Just parse -> parse
+
+-- | Like 'Json.keyMay', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamedMay :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m (Maybe a)
+keyRenamedMay (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.keyMay newKey inner
+    Just parse -> Just <$> parse
+
+-- | Helper function for 'keyRenamed' and 'keyRenamedMay' that returns the parser for the first old key that exists, if any.
+keyRenamedTryOldKeys :: Monad m => [Text] -> Json.ParseT err m a -> Json.ParseT err m (Maybe (Json.ParseT err m a))
+keyRenamedTryOldKeys oldKeys inner = do
+  oldKeys & traverse tryOld <&> catMaybes <&> nonEmpty <&> \case
+    Nothing -> Nothing
+    Just (old :| _moreOld) -> Just old
+  where
+    tryOld key =
+      Json.keyMay key (pure ()) <&> \case
+        Just () -> Just $ Json.key key inner
+        Nothing -> Nothing
+
+test_keyRenamed :: Hspec.Spec
+test_keyRenamed = do
+  describe "keyRenamed" $ do
+    let parser = keyRenamed ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right "text")
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right "text")
+    it "fails with the old key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "old" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "old"] (Json.WrongType Json.TyString Null)))
+    it "fails with the new key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "new" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "new"] (Json.WrongType Json.TyString Null)))
+    it "fails if the key is missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Left (Json.BadSchema [] (Json.KeyMissing "new")))
+  describe "keyRenamedMay" $ do
+    let parser = keyRenamedMay ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right (Just "text"))
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right (Just "text"))
+    it "allows the old and new key to be missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Right Nothing)
+
+-- | Create a json array from a list of json values.
+jsonArray :: [Value] -> Value
+jsonArray xs = xs & Vector.fromList & Array
diff --git a/users/Profpatsch/my-prelude/src/Arg.hs b/users/Profpatsch/my-prelude/src/Arg.hs
new file mode 100644
index 0000000000..a6ffa90924
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Arg.hs
@@ -0,0 +1,34 @@
+module Arg where
+
+import Data.String (IsString)
+import GHC.Exts (IsList)
+import GHC.TypeLits (Symbol)
+
+-- | Wrap a function argument into this helper to give it a better description for the caller without disturbing the callsite too much.
+--
+-- This has instances for IsString and Num, meaning if the caller is usually a string or number literal, it should Just Work.
+--
+-- e.g.
+--
+-- @
+-- myFoo :: Arg "used as the name in error message" Text -> IO ()
+-- myFoo (Arg name) = …
+-- @
+--
+-- Will display the description in the inferred type of the callsite.
+--
+-- Due to IsString you can call @myFoo@ like
+--
+-- @myFoo "name in error"@
+--
+-- This is mostly intended for literals, if you want to wrap arbitrary data, use @Label@.
+newtype Arg (description :: Symbol) a = Arg {unArg :: a}
+  deriving newtype
+    ( Show,
+      Eq,
+      IsString,
+      IsList,
+      Num,
+      Monoid,
+      Semigroup
+    )
diff --git a/users/Profpatsch/my-prelude/src/AtLeast.hs b/users/Profpatsch/my-prelude/src/AtLeast.hs
new file mode 100644
index 0000000000..3857c3a7cf
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/AtLeast.hs
@@ -0,0 +1,51 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module AtLeast where
+
+import Data.Aeson (FromJSON (parseJSON))
+import Data.Aeson.BetterErrors qualified as Json
+import FieldParser (FieldParser)
+import FieldParser qualified as Field
+import GHC.Records (HasField (..))
+import GHC.TypeLits (KnownNat, natVal)
+import PossehlAnalyticsPrelude
+  ( Natural,
+    Proxy (Proxy),
+    fmt,
+    prettyError,
+    (&),
+  )
+
+-- | A natural number that must be at least as big as the type literal.
+newtype AtLeast (min :: Natural) num = AtLeast num
+  -- Just use the instances of the wrapped number type
+  deriving newtype (Eq, Show)
+
+-- | This is the “destructor” for `AtLeast`, because of the phantom type (@min@) it cannot be inferred automatically.
+instance HasField "unAtLeast" (AtLeast min num) num where
+  getField (AtLeast num) = num
+
+parseAtLeast ::
+  forall min num.
+  (KnownNat min, Integral num, Show num) =>
+  FieldParser num (AtLeast min num)
+parseAtLeast =
+  let minInt = natVal (Proxy @min)
+   in Field.FieldParser $ \from ->
+        if from >= (minInt & fromIntegral)
+          then Right (AtLeast from)
+          else Left [fmt|Must be at least {minInt & show} but was {from & show}|]
+
+instance
+  (KnownNat min, FromJSON num, Integral num, Bounded num, Show num) =>
+  FromJSON (AtLeast min num)
+  where
+  parseJSON =
+    Json.toAesonParser
+      prettyError
+      ( do
+          num <- Json.fromAesonParser @_ @num
+          case Field.runFieldParser (parseAtLeast @min @num) num of
+            Left err -> Json.throwCustomError err
+            Right a -> pure a
+      )
diff --git a/users/Profpatsch/my-prelude/src/MyPrelude.hs b/users/Profpatsch/my-prelude/src/MyPrelude.hs
new file mode 100644
index 0000000000..880983c47e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/MyPrelude.hs
@@ -0,0 +1,776 @@
+{-# LANGUAGE ImplicitParams #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MagicHash #-}
+{-# LANGUAGE ViewPatterns #-}
+
+module MyPrelude
+  ( -- * Text conversions
+    Text,
+    ByteString,
+    Word8,
+    fmt,
+    textToString,
+    stringToText,
+    stringToBytesUtf8,
+    showToText,
+    textToBytesUtf8,
+    textToBytesUtf8Lazy,
+    bytesToTextUtf8,
+    bytesToTextUtf8Lazy,
+    bytesToTextUtf8Lenient,
+    bytesToTextUtf8LenientLazy,
+    bytesToTextUtf8Unsafe,
+    bytesToTextUtf8UnsafeLazy,
+    toStrict,
+    toLazy,
+    toStrictBytes,
+    toLazyBytes,
+    charToWordUnsafe,
+
+    -- * IO
+    putStrLn,
+    putStderrLn,
+    exitWithMessage,
+
+    -- * WIP code
+    todo,
+
+    -- * Records
+    HasField,
+
+    -- * Control flow
+    doAs,
+    (&),
+    (<&>),
+    (<|>),
+    foldMap1,
+    foldMap',
+    join,
+    when,
+    unless,
+    guard,
+    ExceptT (..),
+    runExceptT,
+    MonadThrow,
+    throwM,
+    MonadIO,
+    liftIO,
+    MonadReader,
+    asks,
+    Bifunctor,
+    first,
+    second,
+    bimap,
+    both,
+    foldMap,
+    fold,
+    foldl',
+    fromMaybe,
+    mapMaybe,
+    findMaybe,
+    Traversable,
+    for,
+    for_,
+    traverse,
+    traverse_,
+    traverseFold,
+    traverseFold1,
+    traverseFoldDefault,
+    MonadTrans,
+    lift,
+
+    -- * Data types
+    Coercible,
+    coerce,
+    Proxy (Proxy),
+    Map,
+    annotate,
+    Validation (Success, Failure),
+    failure,
+    successes,
+    failures,
+    traverseValidate,
+    traverseValidateM,
+    traverseValidateM_,
+    eitherToValidation,
+    eitherToListValidation,
+    validationToEither,
+    These (This, That, These),
+    eitherToThese,
+    eitherToListThese,
+    validationToThese,
+    thenThese,
+    thenValidate,
+    thenValidateM,
+    NonEmpty ((:|)),
+    pattern IsEmpty,
+    pattern IsNonEmpty,
+    singleton,
+    nonEmpty,
+    nonEmptyDef,
+    overNonEmpty,
+    zipNonEmpty,
+    zipWithNonEmpty,
+    zip3NonEmpty,
+    zipWith3NonEmpty,
+    zip4NonEmpty,
+    toList,
+    lengthNatural,
+    maximum1,
+    minimum1,
+    maximumBy1,
+    minimumBy1,
+    Vector,
+    Generic,
+    Lift,
+    Semigroup,
+    sconcat,
+    Monoid,
+    mconcat,
+    ifTrue,
+    ifExists,
+    Void,
+    absurd,
+    Identity (Identity, runIdentity),
+    Natural,
+    intToNatural,
+    Scientific,
+    Contravariant,
+    contramap,
+    (>$<),
+    (>&<),
+    Profunctor,
+    dimap,
+    lmap,
+    rmap,
+    Semigroupoid,
+    Category,
+    (>>>),
+    (&>>),
+    Any,
+
+    -- * Enum definition
+    inverseFunction,
+    inverseMap,
+    enumerateAll,
+
+    -- * Map helpers
+    mapFromListOn,
+    mapFromListOnMerge,
+
+    -- * Error handling
+    HasCallStack,
+    module Data.Error,
+  )
+where
+
+import Control.Applicative ((<|>))
+import Control.Category (Category, (>>>))
+import Control.Foldl.NonEmpty qualified as Foldl1
+import Control.Monad (guard, join, unless, when)
+import Control.Monad.Catch (MonadThrow (throwM))
+import Control.Monad.Except
+  ( ExceptT (..),
+    runExceptT,
+  )
+import Control.Monad.IO.Class (MonadIO, liftIO)
+import Control.Monad.Identity (Identity (Identity))
+import Control.Monad.Reader (MonadReader, asks)
+import Control.Monad.Trans (MonadTrans (lift))
+import Data.Bifunctor (Bifunctor, bimap, first, second)
+import Data.ByteString
+  ( ByteString,
+  )
+import Data.ByteString.Lazy qualified
+import Data.Char qualified
+import Data.Coerce (Coercible, coerce)
+import Data.Data (Proxy (Proxy))
+import Data.Error
+import Data.Foldable (Foldable (foldMap', toList), fold, foldl', for_, sequenceA_, traverse_)
+import Data.Foldable qualified as Foldable
+import Data.Function ((&))
+import Data.Functor ((<&>))
+import Data.Functor.Contravariant (Contravariant (contramap), (>$<))
+import Data.Functor.Identity (Identity (runIdentity))
+import Data.List (zip4)
+import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty)
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict
+  ( Map,
+  )
+import Data.Map.Strict qualified as Map
+import Data.Maybe (fromMaybe, mapMaybe)
+import Data.Maybe qualified as Maybe
+import Data.Profunctor (Profunctor, dimap, lmap, rmap)
+import Data.Scientific (Scientific)
+import Data.Semigroup (sconcat)
+import Data.Semigroup.Foldable (Foldable1 (fold1), foldMap1)
+import Data.Semigroup.Traversable (Traversable1)
+import Data.Semigroupoid (Semigroupoid (o))
+import Data.Text
+  ( Text,
+  )
+import Data.Text qualified
+import Data.Text.Encoding qualified
+import Data.Text.Encoding.Error qualified
+import Data.Text.Lazy qualified
+import Data.Text.Lazy.Encoding qualified
+import Data.These (These (That, These, This))
+import Data.Traversable (for)
+import Data.Vector (Vector)
+import Data.Void (Void, absurd)
+import Data.Word (Word8)
+import GHC.Exception (errorCallWithCallStackException)
+import GHC.Exts (Any, RuntimeRep, TYPE, raise#)
+import GHC.Generics (Generic)
+import GHC.Natural (Natural)
+import GHC.Records (HasField)
+import GHC.Stack (HasCallStack)
+import GHC.Utils.Encoding qualified as GHC
+import Language.Haskell.TH.Syntax (Lift)
+import PyF (fmt)
+import System.Exit qualified
+import System.IO qualified
+import Validation
+  ( Validation (Failure, Success),
+    eitherToValidation,
+    failure,
+    failures,
+    successes,
+    validationToEither,
+  )
+
+-- | Mark a `do`-block with the type of the Monad/Applicativ it uses.
+-- Only intended for reading ease and making code easier to understand,
+-- especially do-blocks that use unconventional monads (like Maybe or List).
+--
+-- Example:
+--
+-- @
+-- doAs @Maybe $ do
+--  a <- Just 'a'
+--  b <- Just 'b'
+--  pure (a, b)
+-- @
+doAs :: forall m a. m a -> m a
+doAs = id
+
+-- | Forward-applying 'contramap', like '&'/'$' and '<&>'/'<$>' but for '>$<'.
+(>&<) :: (Contravariant f) => f b -> (a -> b) -> f a
+(>&<) = flip contramap
+
+infixl 5 >&<
+
+-- | Forward semigroupoid application. The same as '(>>>)', but 'Semigroupoid' is not a superclass of 'Category' (yet).
+--
+-- Specialized examples:
+--
+-- @
+-- for functions : (a -> b) -> (b -> c) -> (a -> c)
+-- for Folds: Fold a b -> Fold b c -> Fold a c
+-- @
+(&>>) :: (Semigroupoid s) => s a b -> s b c -> s a c
+(&>>) = flip Data.Semigroupoid.o
+
+-- like >>>
+infixr 1 &>>
+
+-- | encode a Text to a UTF-8 encoded Bytestring
+textToBytesUtf8 :: Text -> ByteString
+textToBytesUtf8 = Data.Text.Encoding.encodeUtf8
+
+-- | encode a lazy Text to a UTF-8 encoded lazy Bytestring
+textToBytesUtf8Lazy :: Data.Text.Lazy.Text -> Data.ByteString.Lazy.ByteString
+textToBytesUtf8Lazy = Data.Text.Lazy.Encoding.encodeUtf8
+
+bytesToTextUtf8 :: ByteString -> Either Error Text
+bytesToTextUtf8 = first exceptionToError . Data.Text.Encoding.decodeUtf8'
+
+bytesToTextUtf8Lazy :: Data.ByteString.Lazy.ByteString -> Either Error Data.Text.Lazy.Text
+bytesToTextUtf8Lazy = first exceptionToError . Data.Text.Lazy.Encoding.decodeUtf8'
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8Unsafe :: ByteString -> Text
+bytesToTextUtf8Unsafe = Data.Text.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8UnsafeLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8UnsafeLazy = Data.Text.Lazy.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8Lenient :: Data.ByteString.ByteString -> Data.Text.Text
+bytesToTextUtf8Lenient =
+  Data.Text.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | decode a lazy Text from a lazy ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8LenientLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8LenientLazy =
+  Data.Text.Lazy.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | Make a lazy 'Text' strict.
+toStrict :: Data.Text.Lazy.Text -> Text
+toStrict = Data.Text.Lazy.toStrict
+
+-- | Make a strict 'Text' lazy.
+toLazy :: Text -> Data.Text.Lazy.Text
+toLazy = Data.Text.Lazy.fromStrict
+
+-- | Make a lazy 'ByteString' strict.
+toStrictBytes :: Data.ByteString.Lazy.ByteString -> ByteString
+toStrictBytes = Data.ByteString.Lazy.toStrict
+
+-- | Make a strict 'ByteString' lazy.
+toLazyBytes :: ByteString -> Data.ByteString.Lazy.ByteString
+toLazyBytes = Data.ByteString.Lazy.fromStrict
+
+-- | Convert a (performant) 'Text' into an (imperformant) list-of-char 'String'.
+--
+-- Some libraries (like @time@ or @network-uri@) still use the `String` as their interface. We only want to convert to string at the edges, otherwise use 'Text'.
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+textToString :: Text -> String
+textToString = Data.Text.unpack
+
+-- | Convert an (imperformant) list-of-char 'String' into a (performant) 'Text' .
+--
+-- Some libraries (like @time@ or @network-uri@) still use the `String` as their interface. We want to convert 'String' to 'Text' as soon as possible and only use 'Text' in our code.
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+stringToText :: String -> Text
+stringToText = Data.Text.pack
+
+-- | Encode a String to an UTF-8 encoded Bytestring
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+stringToBytesUtf8 :: String -> ByteString
+-- TODO(Profpatsch): use a stable interface
+stringToBytesUtf8 = GHC.utf8EncodeByteString
+
+-- | Like `show`, but generate a 'Text'
+--
+-- ATTN: This goes via `String` and thus is fairly inefficient.
+-- We should add a good display library at one point.
+--
+-- ATTN: unlike `show`, this forces the whole @'a
+-- so only use if you want to display the whole thing.
+showToText :: (Show a) => a -> Text
+showToText = stringToText . show
+
+-- | Unsafe conversion between 'Char' and 'Word8'. This is a no-op and
+-- silently truncates to 8 bits Chars > '\255'. It is provided as
+-- convenience for ByteString construction.
+--
+-- Use if you want to get the 'Word8' representation of a character literal.
+-- Don’t use on arbitrary characters!
+--
+-- >>> charToWordUnsafe ','
+-- 44
+charToWordUnsafe :: Char -> Word8
+{-# INLINE charToWordUnsafe #-}
+charToWordUnsafe = fromIntegral . Data.Char.ord
+
+pattern IsEmpty :: [a]
+pattern IsEmpty <- (null -> True)
+  where
+    IsEmpty = []
+
+pattern IsNonEmpty :: NonEmpty a -> [a]
+pattern IsNonEmpty n <- (nonEmpty -> Just n)
+  where
+    IsNonEmpty n = toList n
+
+{-# COMPLETE IsEmpty, IsNonEmpty #-}
+
+-- | Single element in a (non-empty) list.
+singleton :: a -> NonEmpty a
+singleton a = a :| []
+
+-- | If the given list is empty, use the given default element and return a non-empty list.
+nonEmptyDef :: a -> [a] -> NonEmpty a
+nonEmptyDef def xs =
+  xs & nonEmpty & \case
+    Nothing -> def :| []
+    Just ne -> ne
+
+-- | If the list is not empty, run the given function with a NonEmpty list, otherwise just return []
+overNonEmpty :: (Applicative f) => (NonEmpty a -> f [b]) -> [a] -> f [b]
+overNonEmpty f xs = case xs of
+  IsEmpty -> pure []
+  IsNonEmpty xs' -> f xs'
+
+-- | Zip two non-empty lists.
+zipNonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty (a, b)
+{-# INLINE zipNonEmpty #-}
+zipNonEmpty ~(a :| as) ~(b :| bs) = (a, b) :| zip as bs
+
+-- | Zip two non-empty lists, combining them with the given function
+zipWithNonEmpty :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
+{-# INLINE zipWithNonEmpty #-}
+zipWithNonEmpty = NonEmpty.zipWith
+
+-- | Zip three non-empty lists.
+zip3NonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty (a, b, c)
+{-# INLINE zip3NonEmpty #-}
+zip3NonEmpty ~(a :| as) ~(b :| bs) ~(c :| cs) = (a, b, c) :| zip3 as bs cs
+
+-- | Zip three non-empty lists, combining them with the given function
+zipWith3NonEmpty :: (a -> b -> c -> d) -> NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty d
+{-# INLINE zipWith3NonEmpty #-}
+zipWith3NonEmpty f ~(x :| xs) ~(y :| ys) ~(z :| zs) = f x y z :| zipWith3 f xs ys zs
+
+-- | Zip four non-empty lists
+zip4NonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty d -> NonEmpty (a, b, c, d)
+{-# INLINE zip4NonEmpty #-}
+zip4NonEmpty ~(a :| as) ~(b :| bs) ~(c :| cs) ~(d :| ds) = (a, b, c, d) :| zip4 as bs cs ds
+
+-- | We don’t want to use Foldable’s `length`, because it is too polymorphic and can lead to bugs.
+-- Only list-y things should have a length.
+class (Foldable f) => Lengthy f
+
+instance Lengthy []
+
+instance Lengthy NonEmpty
+
+instance Lengthy Vector
+
+lengthNatural :: (Lengthy f) => f a -> Natural
+lengthNatural xs =
+  xs
+    & Foldable.length
+    -- length can never be negative or something went really, really wrong
+    & fromIntegral @Int @Natural
+
+-- | @O(n)@. Get the maximum element from a non-empty structure (strict).
+maximum1 :: (Foldable1 f, Ord a) => f a -> a
+maximum1 = Foldl1.fold1 Foldl1.maximum
+
+-- | @O(n)@. Get the maximum element from a non-empty structure, using the given comparator (strict).
+maximumBy1 :: (Foldable1 f) => (a -> a -> Ordering) -> f a -> a
+maximumBy1 f = Foldl1.fold1 (Foldl1.maximumBy f)
+
+-- | @O(n)@. Get the minimum element from a non-empty structure (strict).
+minimum1 :: (Foldable1 f, Ord a) => f a -> a
+minimum1 = Foldl1.fold1 Foldl1.minimum
+
+-- | @O(n)@. Get the minimum element from a non-empty structure, using the given comparator (strict).
+minimumBy1 :: (Foldable1 f) => (a -> a -> Ordering) -> f a -> a
+minimumBy1 f = Foldl1.fold1 (Foldl1.minimumBy f)
+
+-- | Annotate a 'Maybe' with an error message and turn it into an 'Either'.
+annotate :: err -> Maybe a -> Either err a
+annotate err = \case
+  Nothing -> Left err
+  Just a -> Right a
+
+-- | Map the same function over both sides of a Bifunctor (e.g. a tuple).
+both :: (Bifunctor bi) => (a -> b) -> bi a a -> bi b b
+both f = bimap f f
+
+-- | Find the first element for which pred returns `Just a`, and return the `a`.
+--
+-- Example:
+-- @
+-- >>> :set -XTypeApplications
+-- >>> import qualified Text.Read
+--
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo"]
+-- Nothing
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo", "34.40", "34", "abc"]
+-- Just 34
+findMaybe :: (Foldable t) => (a -> Maybe b) -> t a -> Maybe b
+findMaybe mPred list =
+  let pred' x = Maybe.isJust $ mPred x
+   in case Foldable.find pred' list of
+        Just a -> mPred a
+        Nothing -> Nothing
+
+-- | 'traverse' with a function returning 'Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidate :: forall t a err b. (Traversable t) => (a -> Either err b) -> t a -> Either (NonEmpty err) (t b)
+traverseValidate f as =
+  as
+    & traverse @t @(Validation _) (eitherToListValidation . f)
+    & validationToEither
+
+-- | 'traverse' with a function returning 'm Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidateM :: forall t m a err b. (Traversable t, Applicative m) => (a -> m (Either err b)) -> t a -> m (Either (NonEmpty err) (t b))
+traverseValidateM f as =
+  as
+    & traverse @t @m (\a -> a & f <&> eitherToListValidation)
+    <&> sequenceA @t @(Validation _)
+    <&> validationToEither
+
+-- | 'traverse_' with a function returning 'm Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidateM_ :: forall t m a err. (Traversable t, Applicative m) => (a -> m (Either err ())) -> t a -> m (Either (NonEmpty err) ())
+traverseValidateM_ f as =
+  as
+    & traverse @t @m (\a -> a & f <&> eitherToListValidation)
+    <&> sequenceA_ @t @(Validation _)
+    <&> validationToEither
+
+-- | Like 'eitherToValidation', but puts the Error side into a NonEmpty list
+-- to make it combine with other validations.
+--
+-- See also 'validateEithers', if you have a list of Either and want to collect all errors.
+eitherToListValidation :: Either a c -> Validation (NonEmpty a) c
+eitherToListValidation = first singleton . eitherToValidation
+
+-- | Convert an 'Either' to a 'These'.
+eitherToThese :: Either err a -> These err a
+eitherToThese (Left err) = This err
+eitherToThese (Right a) = That a
+
+-- | Like 'eitherToThese', but puts the Error side into a NonEmpty list
+-- to make it combine with other theses.
+eitherToListThese :: Either err a -> These (NonEmpty err) a
+eitherToListThese (Left e) = This (singleton e)
+eitherToListThese (Right a) = That a
+
+-- | Convert a 'Validation' to a 'These'.
+validationToThese :: Validation err a -> These err a
+validationToThese (Failure err) = This err
+validationToThese (Success a) = That a
+
+-- | Nested '>>=' of a These inside some other @m@.
+--
+-- Use if you want to collect errors and successes, and want to chain multiple function returning 'These'.
+thenThese ::
+  (Monad m, Semigroup err) =>
+  (a -> m (These err b)) ->
+  m (These err a) ->
+  m (These err b)
+thenThese f x = do
+  th <- x
+  join <$> traverse f th
+
+-- | Nested validating bind-like combinator.
+--
+-- Use if you want to collect errors, and want to chain multiple functions returning 'Validation'.
+thenValidate ::
+  (a -> Validation err b) ->
+  Validation err a ->
+  Validation err b
+thenValidate f = \case
+  Success a -> f a
+  Failure err -> Failure err
+
+-- | Nested validating bind-like combinator inside some other @m@.
+--
+-- Use if you want to collect errors, and want to chain multiple functions returning 'Validation'.
+thenValidateM ::
+  (Monad m) =>
+  (a -> m (Validation err b)) ->
+  m (Validation err a) ->
+  m (Validation err b)
+thenValidateM f x =
+  eitherToValidation <$> do
+    x' <- validationToEither <$> x
+    case x' of
+      Left err -> pure $ Left err
+      Right a -> validationToEither <$> f a
+
+-- | Put the text to @stderr@.
+putStderrLn :: Text -> IO ()
+putStderrLn msg =
+  System.IO.hPutStrLn System.IO.stderr $ textToString msg
+
+exitWithMessage :: Text -> IO a
+exitWithMessage msg = do
+  putStderrLn msg
+  System.Exit.exitWith $ System.Exit.ExitFailure (-1)
+
+-- | Run some function producing applicative over a traversable data structure,
+-- then collect the results in a Monoid.
+--
+-- Very helpful with side-effecting functions returning @(Validation err a)@:
+--
+-- @
+-- let
+--   f :: Text -> IO (Validation (NonEmpty Error) Text)
+--   f t = pure $ if t == "foo" then Success t else Failure (singleton ("not foo: " <> t))
+--
+-- in traverseFold f [ "foo", "bar", "baz" ]
+--   == Failure ("not foo bar" :| ["not foo baz"])
+-- @
+--
+-- … since @(Semigroup err => Validation err a)@ is a @Semigroup@/@Monoid@ itself.
+traverseFold :: (Applicative ap, Traversable t, Monoid m) => (a -> ap m) -> t a -> ap m
+{-# INLINE traverseFold #-}
+traverseFold f xs =
+  -- note: could be weakened to (Foldable t) via `getAp . foldMap (Ap . f)`
+  fold <$> traverse f xs
+
+-- | Like 'traverseFold', but fold over a semigroup instead of a Monoid, by providing a starting element.
+traverseFoldDefault :: (Applicative ap, Traversable t, Semigroup m) => m -> (a -> ap m) -> t a -> ap m
+{-# INLINE traverseFoldDefault #-}
+traverseFoldDefault def f xs = foldDef def <$> traverse f xs
+  where
+    foldDef = foldr (<>)
+
+-- | Same as 'traverseFold', but with a 'Semigroup' and 'Traversable1' restriction.
+traverseFold1 :: (Applicative ap, Traversable1 t, Semigroup s) => (a -> ap s) -> t a -> ap s
+{-# INLINE traverseFold1 #-}
+-- note: cannot be weakened to (Foldable1 t) because there is no `Ap` for Semigroup (No `Apply` typeclass)
+traverseFold1 f xs = fold1 <$> traverse f xs
+
+-- | Use this in places where the code is still to be implemented.
+--
+-- It always type-checks and will show a warning at compile time if it was forgotten in the code.
+--
+-- Use instead of 'error' and 'undefined' for code that hasn’t been written.
+--
+-- Uses the same trick as https://hackage.haskell.org/package/protolude-0.3.0/docs/src/Protolude.Error.html#error
+{-# WARNING todo "'todo' (undefined code) remains in code" #-}
+todo :: forall (r :: RuntimeRep). forall (a :: TYPE r). (HasCallStack) => a
+todo = raise# (errorCallWithCallStackException "This code was not yet implemented: TODO" ?callStack)
+
+-- | Convert an integer to a 'Natural' if possible
+--
+-- Named the same as the function from "GHC.Natural", but does not crash.
+intToNatural :: (Integral a) => a -> Maybe Natural
+intToNatural i =
+  if i < 0
+    then Nothing
+    else Just $ fromIntegral i
+
+-- | @inverseFunction f@ creates a function that is the inverse of a given function
+-- @f@. It does so by constructing 'M.Map' internally for each value @f a@. The
+-- implementation makes sure that the 'M.Map' is constructed only once and then
+-- shared for every call.
+--
+-- __Memory usage note:__ don't inverse functions that have types like 'Int'
+-- as their result. In this case the created 'M.Map' will have huge size.
+--
+-- The complexity of reversed mapping is \(\mathcal{O}(\log n)\).
+--
+-- __Performance note:__ make sure to specialize monomorphic type of your functions
+-- that use 'inverseFunction' to avoid 'M.Map' reconstruction.
+--
+-- One of the common 'inverseFunction' use-case is inverting the 'show' or a 'show'-like
+-- function.
+--
+-- >>> data Color = Red | Green | Blue deriving (Show, Enum, Bounded)
+-- >>> parse = inverseFunction show :: String -> Maybe Color
+-- >>> parse "Red"
+-- Just Red
+-- >>> parse "Black"
+-- Nothing
+--
+-- __Correctness note:__ 'inverseFunction' expects /injective function/ as its argument,
+-- i.e. the function must map distinct arguments to distinct values.
+--
+-- Typical usage of this function looks like this:
+--
+-- @
+-- __data__ GhcVer
+--    = Ghc802
+--    | Ghc822
+--    | Ghc844
+--    | Ghc865
+--    | Ghc881
+--    __deriving__ ('Eq', 'Ord', 'Show', 'Enum', 'Bounded')
+--
+-- showGhcVer :: GhcVer -> 'Text'
+-- showGhcVer = \\__case__
+--    Ghc802 -> "8.0.2"
+--    Ghc822 -> "8.2.2"
+--    Ghc844 -> "8.4.4"
+--    Ghc865 -> "8.6.5"
+--    Ghc881 -> "8.8.1"
+--
+-- parseGhcVer :: 'Text' -> 'Maybe' GhcVer
+-- parseGhcVer = 'inverseFunction' showGhcVer
+--
+-- Taken from relude’s @Relude.Extra.Enum@.
+inverseFunction ::
+  forall a k.
+  (Bounded a, Enum a, Ord k) =>
+  (a -> k) ->
+  (k -> Maybe a)
+inverseFunction f k = Map.lookup k $ inverseMap f
+
+-- | Like `inverseFunction`, but instead of returning the function
+-- it returns a mapping from all possible outputs to their possible inputs.
+--
+-- This has the same restrictions of 'inverseFunction'.
+inverseMap :: forall a k. (Bounded a, Enum a, Ord k) => (a -> k) -> Map k a
+inverseMap f = enumerateAll <&> (\a -> (f a, a)) & Map.fromList
+
+-- | All possible values in this enum.
+enumerateAll :: (Enum a, Bounded a) => [a]
+enumerateAll = [minBound .. maxBound]
+
+-- | Create a 'Map' from a list of values, extracting the map key from each value. Like 'Map.fromList'.
+--
+-- Attention: if the key is not unique, the earliest value with the key will be in the map.
+mapFromListOn :: (Ord key) => (a -> key) -> [a] -> Map key a
+mapFromListOn f xs = xs <&> (\x -> (f x, x)) & Map.fromList
+
+-- | Create a 'Map' from a list of values, merging multiple values at the same key with '<>' (left-to-right)
+--
+-- `f` has to extract the key and value. Value must be mergable.
+--
+-- Attention: if the key is not unique, the earliest value with the key will be in the map.
+mapFromListOnMerge :: (Ord key, Semigroup s) => (a -> (key, s)) -> [a] -> Map key s
+mapFromListOnMerge f xs =
+  xs
+    <&> (\x -> f x)
+    & Map.fromListWith
+      -- we have to flip (`<>`) because `Map.fromListWith` merges its values “the other way around”
+      (flip (<>))
+
+-- | If the predicate is true, return the @m@, else 'mempty'.
+--
+-- This can be used (together with `ifExists`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+--   ifTrue (1 == 1) [1],
+--   [2, 3, 4],
+--   ifTrue False [5],
+-- ]
+-- :}
+-- [1,2,3,4]
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifTrue (1 == 1) (Sum 2), Sum 3 ]
+
+-- Sum {getSum = 6}
+
+ifTrue :: (Monoid m) => Bool -> m -> m
+ifTrue pred' m = if pred' then m else mempty
+
+-- | If the given @Maybe@ is @Just@, return the result of `f` wrapped in `pure`, else return `mempty`.
+
+-- This can be used (together with `ifTrue`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+-- unknown command '{'
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifExists id (Just 2), Sum 3 ]
+-- Sum {getSum = 6}
+
+ifExists :: (Monoid (f b), Applicative f) => (a -> b) -> Maybe a -> f b
+ifExists f m = m & foldMap @Maybe (pure . f)
diff --git a/users/Profpatsch/my-prelude/src/Parse.hs b/users/Profpatsch/my-prelude/src/Parse.hs
new file mode 100644
index 0000000000..65a0b0d39e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Parse.hs
@@ -0,0 +1,174 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Parse where
+
+import Control.Category qualified
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Monoid (First (..))
+import Data.Semigroup.Traversable
+import Data.Semigroupoid qualified as Semigroupoid
+import Data.Text qualified as Text
+import FieldParser (FieldParser)
+import FieldParser qualified as Field
+import PossehlAnalyticsPrelude
+import Validation (partitionValidations)
+import Prelude hiding (init, maybe)
+import Prelude qualified
+
+-- | A generic applicative “vertical” parser.
+-- Similar to `FieldParser`, but made for parsing whole structures and collect all errors in an `ErrorTree`.
+newtype Parse from to = Parse ((Context, from) -> Validation (NonEmpty ErrorTree) (Context, to))
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ( Compose
+                ((->) (Context, from))
+                (Validation (NonEmpty ErrorTree))
+            )
+            ((,) Context)
+        )
+
+-- | Every parser can add to the context, like e.g. an element parser will add the name of the element it should be parsing.
+-- This should be added to the error message of each parser, with `showContext`.
+newtype Context = Context (Maybe [Text])
+  deriving stock (Show)
+  deriving (Semigroup, Monoid) via (First [Text])
+
+instance Semigroupoid Parse where
+  o p2 p1 = Parse $ \from -> case runParse' p1 from of
+    Failure err -> Failure err
+    Success to1 -> runParse' p2 to1
+
+instance Category Parse where
+  (.) = Semigroupoid.o
+  id = Parse $ \t -> Success t
+
+instance Profunctor Parse where
+  lmap f (Parse p) = Parse $ lmap (second f) p
+  rmap = (<$>)
+
+runParse :: Error -> Parse from to -> from -> Either ErrorTree to
+runParse errMsg parser t =
+  (Context (Just ["$"]), t)
+    & runParse' parser
+    <&> snd
+    & first (nestedMultiError errMsg)
+    & validationToEither
+
+runParse' :: Parse from to -> (Context, from) -> Validation (NonEmpty ErrorTree) (Context, to)
+runParse' (Parse f) from = f from
+
+showContext :: Context -> Text
+showContext (Context context) = context & fromMaybe [] & List.reverse & Text.intercalate "."
+
+addContext :: Text -> Context -> Context
+addContext x (Context mxs) = Context (Just $ x : (mxs & fromMaybe []))
+
+mkParsePushContext :: Text -> ((Context, from) -> Either ErrorTree to) -> Parse from to
+mkParsePushContext toPush f = Parse $ \(ctx, from) -> case f (ctx, from) of
+  Right to -> Success (addContext toPush ctx, to)
+  Left err -> Failure $ singleton err
+
+mkParseNoContext :: (from -> Either ErrorTree to) -> Parse from to
+mkParseNoContext f = Parse $ \(ctx, from) -> case f from of
+  Right to -> Success (ctx, to)
+  Left err -> Failure $ singleton err
+
+-- | Accept only exactly the given value
+exactly :: (Eq from) => (from -> Text) -> from -> Parse from from
+exactly errDisplay from = Parse $ \(ctx, from') ->
+  if from == from'
+    then Success (ctx, from')
+    else Failure $ singleton [fmt|Field has to be exactly {errDisplay from}, was: {errDisplay from'} at {showContext ctx}|]
+
+-- | Make a parser to parse the whole list
+multiple :: Parse a1 a2 -> Parse [a1] [a2]
+multiple inner = dimap nonEmpty (Prelude.maybe [] toList) (maybe $ multipleNE inner)
+
+-- | Make a parser to parse the whole non-empty list
+multipleNE :: Parse from to -> Parse (NonEmpty from) (NonEmpty to)
+multipleNE inner = Parse $ \(ctx, from) ->
+  from
+    & zipIndex
+    & traverse (\(idx, f) -> runParse' inner (ctx, f) & first (singleton . nestedMultiError [fmt|{idx}|]))
+    -- we assume that, since the same parser is used everywhere, the context will be the same as well (TODO: correct?)
+    & second (\((ctx', y) :| ys) -> (ctx', y :| (snd <$> ys)))
+
+-- | Like '(>>>)', but returns the intermediate result alongside the final parse result.
+andParse :: Parse to to2 -> Parse from to -> Parse from (to, to2)
+andParse outer inner = Parse $ \from -> case runParse' inner from of
+  Failure err -> Failure err
+  Success (ctx, to) -> runParse' outer (ctx, to) <&> (second (to,))
+
+-- | Lift a parser into an optional value
+maybe :: Parse from to -> Parse (Maybe from) (Maybe to)
+maybe inner = Parse $ \(ctx, m) -> case m of
+  Nothing -> Success (ctx, Nothing)
+  Just a -> runParse' inner (ctx, a) & second (fmap Just)
+
+-- | Assert that there is exactly one element in the list
+exactlyOne :: Parse [from] from
+exactlyOne = Parse $ \(ctx, xs) -> case xs of
+  [] -> Failure $ singleton [fmt|Expected exactly 1 element, but got 0, at {ctx & showContext}|]
+  [one] -> Success (ctx, one)
+  _more -> Failure $ singleton [fmt|Expected exactly 1 element, but got 2, at {ctx & showContext}|]
+
+-- | Assert that there is exactly zero or one element in the list
+zeroOrOne :: Parse [from] (Maybe from)
+zeroOrOne = Parse $ \(ctx, xs) -> case xs of
+  [] -> Success (ctx, Nothing)
+  [one] -> Success (ctx, Just one)
+  _more -> Failure $ singleton [fmt|Expected exactly 1 element, but got 2, at {ctx & showContext}|]
+
+-- | Find the first element on which the sub-parser succeeds; if there was no match, return all error messages.
+find :: Parse from to -> Parse [from] to
+find inner = Parse $ \(ctx, xs) -> case xs of
+  [] -> failure [fmt|Wanted to get the first sub-parser that succeeds, but there were no elements in the list, at {ctx & showContext}|]
+  (y : ys) -> runParse' (findNE' inner) (ctx, y :| ys)
+
+-- | Find the first element on which the sub-parser succeeds; if there was no match, return all error messages.
+findNE' :: Parse from to -> Parse (NonEmpty from) to
+findNE' inner = Parse $ \(ctx, xs) ->
+  xs
+    <&> (\x -> runParse' inner (ctx, x))
+    & traverse1
+      ( \case
+          Success a -> Left a
+          Failure e -> Right e
+      )
+    & \case
+      Left a -> Success a
+      Right errs ->
+        errs
+          & zipIndex
+          <&> (\(idx, errs') -> nestedMultiError [fmt|{idx}|] errs')
+          & nestedMultiError [fmt|None of these sub-parsers succeeded|]
+          & singleton
+          & Failure
+
+-- | Find all elements on which the sub-parser succeeds; if there was no match, return an empty list
+findAll :: Parse from to -> Parse [from] [to]
+findAll inner = Parse $ \(ctx, xs) ->
+  xs
+    <&> (\x -> runParse' inner (ctx, x))
+    & partitionValidations
+    & \case
+      (_miss, []) ->
+        -- in this case we just arbitrarily forward the original context …
+        Success (ctx, [])
+      (_miss, (hitCtx, hit) : hits) -> Success (hitCtx, hit : (hits <&> snd))
+
+-- | convert a 'FieldParser' into a 'Parse'.
+fieldParser :: FieldParser from to -> Parse from to
+fieldParser fp = Parse $ \(ctx, from) -> case Field.runFieldParser fp from of
+  Right a -> Success (ctx, a)
+  Left err -> Failure $ singleton (singleError err)
+
+zipNonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty (a, b)
+zipNonEmpty (x :| xs) (y :| ys) = (x, y) :| zip xs ys
+
+zipIndex :: NonEmpty b -> NonEmpty (Natural, b)
+zipIndex = zipNonEmpty (1 :| [2 :: Natural ..])
diff --git a/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
new file mode 100644
index 0000000000..008b89b4ba
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
@@ -0,0 +1,94 @@
+module Postgres.Decoder where
+
+import Control.Applicative (Alternative)
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Error.Tree
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Binary (fromBinary))
+import Database.PostgreSQL.Simple.FromField qualified as PG
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Json qualified
+import Label
+import PossehlAnalyticsPrelude
+
+-- | A Decoder of postgres values. Allows embedding more complex parsers (like a 'Json.ParseT').
+newtype Decoder a = Decoder (PG.RowParser a)
+  deriving newtype (Functor, Applicative, Alternative, Monad)
+
+-- | Parse a `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+bytea :: Decoder ByteString
+bytea = fromField @(Binary ByteString) <&> (.fromBinary)
+
+-- | Parse a nullable `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+byteaMay :: Decoder (Maybe ByteString)
+byteaMay = fromField @(Maybe (Binary ByteString)) <&> fmap (.fromBinary)
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @Text :: Decoder Text
+-- @
+fromField :: PG.FromField a => Decoder a
+fromField = Decoder $ PG.fieldWith PG.fromField
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder' and wrap the result into the given 'Label'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @"myField" @Text :: Decoder (Label "myField" Text)
+-- @
+fromFieldLabel :: forall lbl a. PG.FromField a => Decoder (Label lbl a)
+fromFieldLabel = label @lbl <$> fromField
+
+-- | Parse fields out of a json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+json :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder a
+json parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @Json.Value field bytes
+  case Json.parseValue parser val of
+    Left err ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Right a -> pure a
+
+-- | Parse fields out of a nullable json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+jsonMay :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder (Maybe a)
+jsonMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @(Maybe Json.Value) field bytes
+  case Json.parseValue parser <$> val of
+    Nothing -> pure Nothing
+    Just (Left err) ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Just (Right a) -> pure (Just a)
diff --git a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
new file mode 100644
index 0000000000..a542f8c7b8
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
@@ -0,0 +1,931 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Postgres.MonadPostgres where
+
+import Arg
+import AtLeast (AtLeast)
+import Control.Exception
+  ( Exception (displayException),
+    Handler (Handler),
+    catches,
+    try,
+  )
+import Control.Foldl qualified as Fold
+import Control.Monad.Logger.CallStack (MonadLogger, logDebug, logWarn)
+import Control.Monad.Reader (MonadReader (ask), ReaderT (..))
+import Control.Monad.Trans.Resource
+import Data.Aeson (FromJSON)
+import Data.ByteString qualified as ByteString
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.Int (Int64)
+import Data.Kind (Type)
+import Data.List qualified as List
+import Data.Pool (Pool)
+import Data.Pool qualified as Pool
+import Data.Text qualified as Text
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Connection, FormatError, FromRow, Query, QueryError, ResultError, SqlError, ToRow)
+import Database.PostgreSQL.Simple qualified as PG
+import Database.PostgreSQL.Simple qualified as Postgres
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Database.PostgreSQL.Simple.ToField (ToField)
+import Database.PostgreSQL.Simple.ToRow (ToRow (toRow))
+import Database.PostgreSQL.Simple.Types (Query (..))
+import GHC.IO.Handle (Handle)
+import GHC.Records (getField)
+import Label
+import OpenTelemetry.Trace.Core (NewEvent (newEventName))
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import PossehlAnalyticsPrelude
+import Postgres.Decoder
+import Postgres.Decoder qualified as Dec
+import Pretty (showPretty)
+import Seconds
+import System.Exit (ExitCode (..))
+import Tool
+import UnliftIO (MonadUnliftIO (withRunInIO), bracket, hClose, mask_)
+import UnliftIO.Concurrent (forkIO)
+import UnliftIO.Process (ProcessHandle)
+import UnliftIO.Process qualified as Process
+import UnliftIO.Resource qualified as Resource
+import Prelude hiding (init, span)
+
+-- | Postgres queries/commands that can be executed within a running transaction.
+--
+-- These are implemented with the @postgresql-simple@ primitives of the same name
+-- and will behave the same unless othewise documented.
+class (Monad m) => MonadPostgres (m :: Type -> Type) where
+  -- | Execute an INSERT, UPDATE, or other SQL query that is not expected to return results.
+
+  -- Returns the number of rows affected.
+  execute ::
+    (ToRow params, Typeable params) =>
+    Query ->
+    params ->
+    Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute a multi-row INSERT, UPDATE, or other SQL query that is not expected to return results.
+  --
+  -- Returns the number of rows affected. If the list of parameters is empty,
+  -- this function will simply return 0 without issuing the query to the backend.
+  -- If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeMany ::
+    (ToRow params, Typeable params) =>
+    Query ->
+    NonEmpty params ->
+    Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute INSERT ... RETURNING, UPDATE ... RETURNING,
+  -- or other SQL query that accepts multi-row input and is expected to return results.
+  -- Note that it is possible to write query conn "INSERT ... RETURNING ..." ...
+  -- in cases where you are only inserting a single row,
+  -- and do not need functionality analogous to 'executeMany'.
+  --
+  -- If the list of parameters is empty, this function will simply return [] without issuing the query to the backend. If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeManyReturningWith :: (ToRow q) => Query -> NonEmpty q -> Decoder r -> Transaction m [r]
+
+  -- | Run a query, passing parameters and result row parser.
+  queryWith ::
+    (PG.ToRow params, Typeable params, Typeable r) =>
+    PG.Query ->
+    params ->
+    Decoder r ->
+    Transaction m [r]
+
+  -- | Run a query without any parameters and result row parser.
+  queryWith_ ::
+    (Typeable r) =>
+    PG.Query ->
+    Decoder r ->
+    Transaction m [r]
+
+  -- | Run a query, passing parameters, and fold over the resulting rows.
+  --
+  -- This doesn’t have to realize the full list of results in memory,
+  -- rather results are streamed incrementally from the database.
+  --
+  -- When dealing with small results, it may be simpler (and perhaps faster) to use query instead.
+  --
+  -- This fold is _not_ strict. The stream consumer is responsible
+  -- for forcing the evaluation of its result to avoid space leaks.
+  --
+  -- If you can, prefer aggregating in the database itself.
+  foldRowsWithAcc ::
+    (ToRow params, Typeable row, Typeable params) =>
+    Query ->
+    params ->
+    Decoder row ->
+    a ->
+    (a -> row -> Transaction m a) ->
+    Transaction m a
+
+  -- | Run a given transaction in a transaction block, rolling back the transaction
+  -- if any exception (postgres or Haskell Exception) is thrown during execution.
+  --
+  -- Re-throws the exception.
+  --
+  -- Don’t do any long-running things on the Haskell side during a transaction,
+  -- because it will block a database connection and potentially also lock
+  -- database tables from being written or read by other clients.
+  --
+  -- Nonetheless, try to push transactions as far out to the handlers as possible,
+  -- don’t do something like @runTransaction $ query …@, because it will lead people
+  -- to accidentally start nested transactions (the inner transaction is run on a new connections,
+  -- thus can’t see any changes done by the outer transaction).
+  -- Only handlers should run transactions.
+  runTransaction :: Transaction m a -> m a
+
+-- | Run a query, passing parameters. Prefer 'queryWith' if possible.
+query ::
+  forall m params r.
+  (PG.ToRow params, PG.FromRow r, Typeable params, Typeable r, MonadPostgres m) =>
+  PG.Query ->
+  params ->
+  Transaction m [r]
+query qry params = queryWith qry params (Decoder PG.fromRow)
+
+-- | Run a query without any parameters. Prefer 'queryWith' if possible.
+--
+-- TODO: I think(?) this can always be replaced by passing @()@ to 'query', remove?
+query_ ::
+  forall m r.
+  (Typeable r, PG.FromRow r, MonadPostgres m) =>
+  PG.Query ->
+  Transaction m [r]
+query_ qry = queryWith_ qry (Decoder PG.fromRow)
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRow ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m a
+querySingleRow qry params = do
+  query qry params >>= ensureSingleRow
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRowWith ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Decoder a ->
+  Transaction m a
+querySingleRowWith qry params decoder = do
+  queryWith qry params decoder >>= ensureSingleRow
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRowMaybe ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m (Maybe a)
+querySingleRowMaybe qry params = do
+  rows <- query qry params
+  case rows of
+    [] -> pure Nothing
+    [one] -> pure (Just one)
+    -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+    -- that a database function can error out, should probably handled by the instances.
+    more -> throwM $ SingleRowError {numberOfRowsReturned = (List.length more)}
+
+ensureSingleRow ::
+  (MonadThrow m) =>
+  [a] ->
+  m a
+ensureSingleRow = \case
+  -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+  -- that a database function can error out, should probably handled by the instances.
+  [] -> throwM (SingleRowError {numberOfRowsReturned = 0})
+  [one] -> pure one
+  more ->
+    throwM $
+      SingleRowError
+        { numberOfRowsReturned =
+            -- TODO: this is VERY bad, because it requires to parse the full database output, even if there’s 10000000000 elements
+            List.length more
+        }
+
+ensureNoneOrSingleRow ::
+  (MonadThrow m) =>
+  [a] ->
+  m (Maybe a)
+ensureNoneOrSingleRow = \case
+  -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+  -- that a database function can error out, should probably handled by the instances.
+  [] -> pure Nothing
+  [one] -> pure $ Just one
+  more ->
+    throwM $
+      SingleRowError
+        { numberOfRowsReturned =
+            -- TODO: this is VERY bad, because it requires to parse the full database output, even if there’s 10000000000 elements
+            List.length more
+        }
+
+-- | Run a query, passing parameters, and fold over the resulting rows.
+--
+-- This doesn’t have to realize the full list of results in memory,
+-- rather results are streamed incrementally from the database.
+--
+-- When dealing with small results, it may be simpler (and perhaps faster) to use query instead.
+--
+-- The results are folded strictly by the 'Fold.Fold' that is passed.
+--
+-- If you can, prefer aggregating in the database itself.
+foldRowsWith ::
+  forall row params m b.
+  ( MonadPostgres m,
+    PG.ToRow params,
+    Typeable row,
+    Typeable params
+  ) =>
+  PG.Query ->
+  params ->
+  Decoder row ->
+  Fold.Fold row b ->
+  Transaction m b
+foldRowsWith qry params decoder = Fold.purely f
+  where
+    f :: forall x. (x -> row -> x) -> x -> (x -> b) -> Transaction m b
+    f acc init extract = do
+      x <- foldRowsWithAcc qry params decoder init (\a r -> pure $ acc a r)
+      pure $ extract x
+
+newtype Transaction m a = Transaction {unTransaction :: (ReaderT Connection m a)}
+  deriving newtype
+    ( Functor,
+      Applicative,
+      Monad,
+      MonadThrow,
+      MonadLogger,
+      MonadIO,
+      MonadUnliftIO,
+      MonadTrans,
+      Otel.MonadTracer
+    )
+
+-- | [Resource Pool](http://hackage.haskell.org/package/resource-pool-0.2.3.2/docs/Data-Pool.html) configuration.
+data PoolingInfo = PoolingInfo
+  { -- | Minimal amount of resources that are
+    --   always available.
+    numberOfStripes :: AtLeast 1 Int,
+    -- | Time after which extra resources
+    --   (above minimum) can stay in the pool
+    --   without being used.
+    unusedResourceOpenTime :: Seconds,
+    -- | Max number of resources that can be
+    --   in the Pool at any time
+    maxOpenResourcesAcrossAllStripes :: AtLeast 1 Int
+  }
+  deriving stock (Generic, Eq, Show)
+  deriving anyclass (FromJSON)
+
+initMonadPostgres ::
+  (Text -> IO ()) ->
+  -- | Info describing the connection to the Postgres DB
+  Postgres.ConnectInfo ->
+  -- | Configuration info for pooling attributes
+  PoolingInfo ->
+  -- | Created Postgres connection pool
+  ResourceT IO (Pool Postgres.Connection)
+initMonadPostgres logInfoFn connectInfo poolingInfo = do
+  (_releaseKey, connPool) <-
+    Resource.allocate
+      (logInfoFn "Creating Postgres Connection Pool" >> createPGConnPool)
+      (\pool -> logInfoFn "Destroying Postgres Connection Pool" >> destroyPGConnPool pool)
+  pure connPool
+  where
+    -- \| Create a Postgres connection pool
+    createPGConnPool ::
+      IO (Pool Postgres.Connection)
+    createPGConnPool =
+      Pool.newPool $
+        Pool.defaultPoolConfig
+          {- resource init action -} poolCreateResource
+          {- resource destruction -} poolfreeResource
+          ( poolingInfo.unusedResourceOpenTime.unSeconds
+              & fromIntegral @Natural @Double
+          )
+          (poolingInfo.maxOpenResourcesAcrossAllStripes.unAtLeast)
+      where
+        poolCreateResource = Postgres.connect connectInfo
+        poolfreeResource = Postgres.close
+
+    -- \| Destroy a Postgres connection pool
+    destroyPGConnPool ::
+      -- \| Pool to be destroyed
+      (Pool Postgres.Connection) ->
+      IO ()
+    destroyPGConnPool p = Pool.destroyAllResources p
+
+-- | Improve a possible error message, by adding some context to it.
+--
+-- The given Exception type is caught, 'show'n and pretty-printed.
+--
+-- In case we get an `IOError`, we display it in a reasonable fashion.
+addErrorInformation ::
+  forall exc a.
+  (Exception exc) =>
+  Text.Text ->
+  IO a ->
+  IO a
+addErrorInformation msg io =
+  io
+    & try @exc
+    <&> first (showPretty >>> newError >>> errorContext msg)
+    & try @IOError
+    <&> first (showToError >>> errorContext "IOError" >>> errorContext msg)
+    <&> join @(Either Error)
+    >>= unwrapIOError
+
+-- | Catch any Postgres exception that gets thrown,
+-- print the query that was run and the query parameters,
+-- then rethrow inside an 'Error'.
+handlePGException ::
+  forall a params tools m.
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Text ->
+  Query ->
+  -- | Depending on whether we used `format` or `formatMany`.
+  Either params (NonEmpty params) ->
+  IO a ->
+  Transaction m a
+handlePGException tools queryType query' params io = do
+  withRunInIO $ \unliftIO ->
+    io
+      `catches` [ Handler $ unliftIO . logQueryException @SqlError,
+                  Handler $ unliftIO . logQueryException @QueryError,
+                  Handler $ unliftIO . logQueryException @ResultError,
+                  Handler $ unliftIO . logFormatException
+                ]
+  where
+    -- TODO: use throwInternalError here (after pulling it into the MonadPostgres class)
+    throwAsError = unwrapIOError . Left . newError
+    throwErr err = liftIO $ throwAsError $ prettyErrorTree $ nestedMultiError "A Postgres query failed" err
+    logQueryException :: (Exception e) => e -> Transaction m a
+    logQueryException exc = do
+      formattedQuery <- case params of
+        Left one -> pgFormatQuery' tools query' one
+        Right many -> pgFormatQueryMany' tools query' many
+      throwErr
+        ( singleError [fmt|Query Type: {queryType}|]
+            :| [ nestedError "Exception" (exc & showPretty & newError & singleError),
+                 nestedError "Query" (formattedQuery & newError & singleError)
+               ]
+        )
+    logFormatException :: FormatError -> Transaction m a
+    logFormatException fe = throwErr (fe & showPretty & newError & singleError & singleton)
+
+-- | Perform a Postgres action within a transaction
+withPGTransaction ::
+  -- | Postgres connection pool to be used for the action
+  (Pool Postgres.Connection) ->
+  -- | DB-action to be performed
+  (Postgres.Connection -> IO a) ->
+  -- | Result of the DB-action
+  IO a
+withPGTransaction connPool f =
+  Pool.withResource
+    connPool
+    (\conn -> Postgres.withTransaction conn (f conn))
+
+-- | `pg_formatter` is a perl script that does not support any kind of streaming.
+-- Thus we initialize a pool with a bunch of these scripts running, waiting for input. This way we can have somewhat fast SQL formatting.
+--
+-- Call `initPgFormatPool` to initialize, then use `runPgFormat` to format some sql.
+data PgFormatPool = PgFormatPool
+  { pool :: Pool PgFormatProcess,
+    pgFormat :: Tool
+  }
+
+data PgFormatProcess = PgFormatProcess
+  { stdinHdl :: Handle,
+    stdoutHdl :: Handle,
+    stderrHdl :: Handle,
+    procHdl :: ProcessHandle,
+    startedAt :: Otel.Timestamp
+  }
+
+initPgFormatPool :: (HasField "pgFormat" tools Tool) => tools -> IO PgFormatPool
+initPgFormatPool tools = do
+  pool <-
+    Pool.newPool
+      ( Pool.defaultPoolConfig
+          (pgFormatStartCommandWaitForInput tools)
+          ( \pgFmt -> do
+              Process.terminateProcess pgFmt.procHdl
+              -- make sure we don’t leave any zombies
+              _ <- forkIO $ do
+                _ <- Process.waitForProcess pgFmt.procHdl
+                pure ()
+              pure ()
+          )
+          -- unused resource time
+          100
+          -- number of resources
+          10
+      )
+
+  -- fill the pool with resources
+  let go =
+        Pool.tryWithResource pool (\_ -> go) >>= \case
+          Nothing -> pure ()
+          Just () -> pure ()
+  _ <- go
+  pure (PgFormatPool {pool, pgFormat = tools.pgFormat})
+
+destroyPgFormatPool :: PgFormatPool -> IO ()
+destroyPgFormatPool pool = Pool.destroyAllResources pool.pool
+
+-- | Get the oldest resource from the pool, or stop if you find a resource that’s older than `cutoffPointMs`.
+takeOldestResource :: PgFormatPool -> Arg "cutoffPointMs" Integer -> IO (PgFormatProcess, Pool.LocalPool PgFormatProcess)
+takeOldestResource pool cutoffPointMs = do
+  now <- Otel.getTimestamp
+  mask_ $ do
+    a <- Pool.takeResource pool.pool
+    (putBack, res) <- go now [] a
+    -- make sure we don’t leak any resources we didn’t use in the end
+    for_ putBack $ \(x, xLocal) -> Pool.putResource xLocal x
+    pure res
+  where
+    mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+    go now putBack a@(a', _) =
+      if abs (mkMs now - mkMs a'.startedAt) > cutoffPointMs.unArg
+        then pure (putBack, a)
+        else
+          Pool.tryTakeResource pool.pool >>= \case
+            Nothing -> pure (putBack, a)
+            Just b@(b', _) -> do
+              if a'.startedAt < b'.startedAt
+                then go now (b : putBack) a
+                else go now (a : putBack) b
+
+-- | Format the given SQL with pg_formatter. Will use the pool of already running formatters to speed up execution.
+runPgFormat :: PgFormatPool -> ByteString -> IO (T3 "exitCode" ExitCode "formatted" ByteString "stderr" ByteString)
+runPgFormat pool sqlStatement = do
+  bracket
+    (takeOldestResource pool 200)
+    ( \(a, localPool) -> do
+        -- we always destroy the resource, because the process exited
+        Pool.destroyResource pool.pool localPool a
+        -- create a new process to keep the pool “warm”
+        new <- pgFormatStartCommandWaitForInput pool
+        Pool.putResource localPool new
+    )
+    ( \(pgFmt, _localPool) -> do
+        putStderrLn "Running with warm pgformatter"
+        ByteString.hPut pgFmt.stdinHdl sqlStatement
+        -- close stdin to make pg_formatter format (it exits …)
+        -- issue: https://github.com/darold/pgFormatter/issues/333
+        hClose pgFmt.stdinHdl
+        formatted <- ByteString.hGetContents pgFmt.stdoutHdl
+        errs <- ByteString.hGetContents pgFmt.stderrHdl
+        exitCode <- Process.waitForProcess pgFmt.procHdl
+        pure $
+          T3
+            (label @"exitCode" exitCode)
+            (label @"formatted" formatted)
+            (label @"stderr" errs)
+    )
+
+runPGTransactionImpl ::
+  (MonadUnliftIO m) =>
+  m (Pool Postgres.Connection) ->
+  Transaction m a ->
+  m a
+{-# INLINE runPGTransactionImpl #-}
+runPGTransactionImpl zoom (Transaction transaction) = do
+  pool <- zoom
+  withRunInIO $ \unliftIO ->
+    withPGTransaction pool $ \conn -> do
+      unliftIO $ runReaderT transaction conn
+
+executeImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl #-}
+executeImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.execute conn qry params
+      & handlePGException tools "execute" qry (Left params)
+      >>= toNumberOfRowsAffected "executeImpl"
+
+executeImpl_ ::
+  (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl_ #-}
+executeImpl_ zoomTools zoomDebugLogDatabaseQueries qry =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled @() tools span logDatabaseQueries qry HasNoParams
+    conn <- Transaction ask
+    PG.execute_ conn qry
+      & handlePGException tools "execute_" qry (Left ())
+      >>= toNumberOfRowsAffected "executeImpl_"
+
+executeManyImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  NonEmpty params ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+executeManyImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (executeMany)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.executeMany conn qry (params & toList)
+      & handlePGException tools "executeMany" qry (Right params)
+      >>= toNumberOfRowsAffected "executeManyImpl"
+
+toNumberOfRowsAffected :: (MonadIO m) => Text -> Int64 -> m (Label "numberOfRowsAffected" Natural)
+toNumberOfRowsAffected functionName i64 =
+  i64
+    & intToNatural
+    & annotate [fmt|{functionName}: postgres returned a negative number of rows affected: {i64}|]
+    -- we throw this directly in IO here, because we don’t want to e.g. have to propagate MonadThrow through user code (it’s an assertion)
+    & unwrapIOError
+    & liftIO
+    <&> label @"numberOfRowsAffected"
+
+executeManyReturningWithImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  NonEmpty params ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE executeManyReturningWithImpl #-}
+executeManyReturningWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (executeManyReturning)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.returningWith fromRow conn qry (params & toList)
+      & handlePGException tools "executeManyReturning" qry (Right params)
+
+foldRowsWithAccImpl ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Decoder row ->
+  a ->
+  (a -> row -> Transaction m a) ->
+  Transaction m a
+{-# INLINE foldRowsWithAccImpl #-}
+foldRowsWithAccImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder rowParser) accumulator f = do
+  Otel.inSpan' "Postgres Query (foldRowsWithAcc)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    withRunInIO
+      ( \runInIO ->
+          do
+            PG.foldWithOptionsAndParser
+              PG.defaultFoldOptions
+              rowParser
+              conn
+              qry
+              params
+              accumulator
+              (\acc row -> runInIO $ f acc row)
+              & handlePGException tools "fold" qry (Left params)
+              & runInIO
+      )
+
+pgFormatQueryNoParams' ::
+  (MonadIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool) =>
+  tools ->
+  Query ->
+  Transaction m Text
+pgFormatQueryNoParams' tools q =
+  lift $ pgFormatQueryByteString tools q.fromQuery
+
+pgFormatQuery ::
+  (ToRow params, MonadIO m) =>
+  Query ->
+  params ->
+  Transaction m ByteString
+pgFormatQuery qry params = Transaction $ do
+  conn <- ask
+  liftIO $ PG.formatQuery conn qry params
+
+pgFormatQueryMany ::
+  (MonadIO m, ToRow params) =>
+  Query ->
+  NonEmpty params ->
+  Transaction m ByteString
+pgFormatQueryMany qry params = Transaction $ do
+  conn <- ask
+  liftIO $
+    PG.formatMany
+      conn
+      qry
+      ( params
+          -- upstream is partial on empty list, see https://github.com/haskellari/postgresql-simple/issues/129
+          & toList
+      )
+
+queryWithImpl ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE queryWithImpl #-}
+queryWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (queryWith)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.queryWith fromRow conn qry params
+      & handlePGException tools "query" qry (Left params)
+
+queryWithImpl_ ::
+  ( MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  m tools ->
+  Query ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE queryWithImpl_ #-}
+queryWithImpl_ zoomTools qry (Decoder fromRow) = do
+  tools <- lift @Transaction zoomTools
+  conn <- Transaction ask
+  liftIO (PG.queryWith_ fromRow conn qry)
+    & handlePGException tools "query" qry (Left ())
+
+data SingleRowError = SingleRowError
+  { -- | How many columns were actually returned by the query
+    numberOfRowsReturned :: Int
+  }
+  deriving stock (Show)
+
+instance Exception SingleRowError where
+  displayException (SingleRowError {..}) = [fmt|Single row expected from SQL query result, {numberOfRowsReturned} rows were returned instead."|]
+
+pgFormatQuery' ::
+  ( MonadIO m,
+    ToRow params,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Query ->
+  params ->
+  Transaction m Text
+pgFormatQuery' tools q p =
+  pgFormatQuery q p
+    >>= lift . pgFormatQueryByteString tools
+
+pgFormatQueryMany' ::
+  ( MonadIO m,
+    ToRow params,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Query ->
+  NonEmpty params ->
+  Transaction m Text
+pgFormatQueryMany' tools q p =
+  pgFormatQueryMany q p
+    >>= lift . pgFormatQueryByteString tools
+
+-- | Read the executable name "pg_format"
+postgresToolsParser :: ToolParserT IO (Label "pgFormat" Tool)
+postgresToolsParser = label @"pgFormat" <$> readTool "pg_format"
+
+pgFormatQueryByteString ::
+  ( MonadIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  ByteString ->
+  m Text
+pgFormatQueryByteString tools queryBytes = do
+  res <-
+    liftIO $
+      runPgFormat
+        tools.pgFormat
+        (queryBytes)
+  case res.exitCode of
+    ExitSuccess -> pure (res.formatted & bytesToTextUtf8Lenient)
+    ExitFailure status -> do
+      logWarn [fmt|pg_format failed with status {status} while formatting the query, using original query string. Is there a syntax error?|]
+      logDebug
+        ( prettyErrorTree
+            ( nestedMultiError
+                "pg_format output"
+                ( nestedError "stdout" (singleError (res.formatted & bytesToTextUtf8Lenient & newError))
+                    :| [(nestedError "stderr" (singleError (res.stderr & bytesToTextUtf8Lenient & newError)))]
+                )
+            )
+        )
+      logDebug [fmt|pg_format stdout: stderr|]
+      pure (queryBytes & bytesToTextUtf8Lenient)
+
+pgFormatStartCommandWaitForInput ::
+  ( MonadIO m,
+    HasField "pgFormat" tools Tool,
+    MonadFail m
+  ) =>
+  tools ->
+  m PgFormatProcess
+pgFormatStartCommandWaitForInput tools = do
+  do
+    startedAt <- Otel.getTimestamp
+    (Just stdinHdl, Just stdoutHdl, Just stderrHdl, procHdl) <-
+      Process.createProcess
+        ( ( Process.proc
+              tools.pgFormat.toolPath
+              [ "--no-rcfile",
+                "-"
+              ]
+          )
+            { Process.std_in = Process.CreatePipe,
+              Process.std_out = Process.CreatePipe,
+              Process.std_err = Process.CreatePipe
+            }
+        )
+
+    pure PgFormatProcess {..}
+
+data DebugLogDatabaseQueries
+  = -- | Do not log the database queries
+    DontLogDatabaseQueries
+  | -- | Log the database queries as debug output;
+    LogDatabaseQueries
+  | -- | Log the database queries as debug output and additionally the EXPLAIN output (from the query analyzer, not the actual values after execution cause that’s a bit harder to do)
+    LogDatabaseQueriesAndExplain
+  deriving stock (Show, Enum, Bounded)
+
+data HasQueryParams param
+  = HasNoParams
+  | HasSingleParam param
+  | HasMultiParams (NonEmpty param)
+
+-- | Log the postgres query depending on the given setting
+traceQueryIfEnabled ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  tools ->
+  Otel.Span ->
+  DebugLogDatabaseQueries ->
+  Query ->
+  HasQueryParams params ->
+  Transaction m ()
+traceQueryIfEnabled tools span logDatabaseQueries qry params = do
+  -- In case we have query logging enabled, we want to do that
+  let formattedQuery = do
+        withEvent
+          span
+          "Query Format start"
+          "Query Format end"
+          $ case params of
+            HasNoParams -> pgFormatQueryNoParams' tools qry
+            HasSingleParam p -> pgFormatQuery' tools qry p
+            HasMultiParams ps -> pgFormatQueryMany' tools qry ps
+
+  let doLog errs =
+        Otel.addAttributes
+          span
+          $ HashMap.fromList
+          $ ( ("_.postgres.query", Otel.toAttribute @Text errs.query)
+                : ( errs.explain
+                      & \case
+                        Nothing -> []
+                        Just ex -> [("_.postgres.explain", Otel.toAttribute @Text ex)]
+                  )
+            )
+  let doExplain = do
+        q <- formattedQuery
+        Otel.inSpan "Postgres EXPLAIN Query" Otel.defaultSpanArguments $ do
+          queryWithImpl_
+            (pure tools)
+            ( "EXPLAIN "
+                <> (
+                     -- TODO: this is not nice, but the only way to get the `executeMany` form to work with this
+                     -- because we need the query with all elements already interpolated.
+                     Query (q & textToBytesUtf8)
+                   )
+            )
+            (Dec.fromField @Text)
+            <&> Text.intercalate "\n"
+  case logDatabaseQueries of
+    DontLogDatabaseQueries -> pure ()
+    LogDatabaseQueries -> do
+      q <- formattedQuery
+      doLog (T2 (label @"query" q) (label @"explain" Nothing))
+    LogDatabaseQueriesAndExplain -> do
+      q <- formattedQuery
+      -- XXX: stuff like `CREATE SCHEMA` cannot be EXPLAINed, so we should catch exceptions here
+      -- and just ignore anything that errors (if it errors because of a problem with the query, it would have been caught by the query itself.
+      ex <- doExplain
+      doLog (T2 (label @"query" q) (label @"explain" (Just ex)))
+
+-- | Add a start and end event to the span, and figure out how long the difference was.
+--
+-- This is more lightweight than starting an extra span for timing things.
+withEvent :: (MonadIO f) => Otel.Span -> Text -> Text -> f b -> f b
+withEvent span start end act = do
+  let mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+  s <- Otel.getTimestamp
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = start,
+          newEventAttributes = mempty,
+          newEventTimestamp = Just s
+        }
+    )
+  res <- act
+  e <- Otel.getTimestamp
+  let tookMs =
+        (mkMs e - mkMs s)
+          -- should be small enough
+          & fromInteger @Int
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = end,
+          newEventAttributes = HashMap.fromList [("took ms", Otel.toAttribute tookMs)],
+          newEventTimestamp = Just e
+        }
+    )
+  pure res
+
+instance (ToField t1) => ToRow (Label l1 t1) where
+  toRow t2 = toRow $ PG.Only $ getField @l1 t2
+
+instance (ToField t1, ToField t2) => ToRow (T2 l1 t1 l2 t2) where
+  toRow t2 = toRow (getField @l1 t2, getField @l2 t2)
+
+instance (ToField t1, ToField t2, ToField t3) => ToRow (T3 l1 t1 l2 t2 l3 t3) where
+  toRow t3 = toRow (getField @l1 t3, getField @l2 t3, getField @l3 t3)
diff --git a/users/Profpatsch/my-prelude/src/Pretty.hs b/users/Profpatsch/my-prelude/src/Pretty.hs
new file mode 100644
index 0000000000..d9d4ce132b
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Pretty.hs
@@ -0,0 +1,108 @@
+module Pretty
+  ( -- * Pretty printing for error messages
+    Err,
+    showPretty,
+    showPrettyJson,
+    showedStringPretty,
+    printPretty,
+    printShowedStringPretty,
+    -- constructors hidden
+    prettyErrs,
+    message,
+    messageString,
+    pretty,
+    prettyString,
+    hscolour',
+  )
+where
+
+import Data.Aeson qualified as Json
+import Data.Aeson.Encode.Pretty qualified as Aeson.Pretty
+import Data.List qualified as List
+import Data.Text.Lazy.Builder qualified as Text.Builder
+import Language.Haskell.HsColour
+  ( Output (TTYg),
+    hscolour,
+  )
+import Language.Haskell.HsColour.ANSI (TerminalType (..))
+import Language.Haskell.HsColour.Colourise
+  ( defaultColourPrefs,
+  )
+import PossehlAnalyticsPrelude
+import System.Console.ANSI (setSGRCode)
+import System.Console.ANSI.Types
+  ( Color (Red),
+    ColorIntensity (Dull),
+    ConsoleLayer (Foreground),
+    SGR (Reset, SetColor),
+  )
+import Text.Nicify (nicify)
+
+-- | Print any 'Show'able type to stderr, formatted nicely and in color. Very helpful for debugging.
+printPretty :: (Show a) => a -> IO ()
+printPretty a =
+  a & showPretty & putStderrLn
+
+showPretty :: (Show a) => a -> Text
+showPretty a = a & pretty & (: []) & prettyErrs & stringToText
+
+-- | Pretty-print a string that was produced by `show` to stderr, formatted nicely and in color.
+printShowedStringPretty :: String -> IO ()
+printShowedStringPretty s = s & showedStringPretty & putStderrLn
+
+-- | Pretty-print a string that was produced by `show`
+showedStringPretty :: String -> Text
+showedStringPretty s = s & ErrPrettyString & (: []) & prettyErrs & stringToText
+
+showPrettyJson :: Json.Value -> Text
+showPrettyJson val =
+  val
+    & Aeson.Pretty.encodePrettyToTextBuilder
+    & Text.Builder.toLazyText
+    & toStrict
+
+-- | Display a list of 'Err's as a colored error message
+-- and abort the test.
+prettyErrs :: [Err] -> String
+prettyErrs errs = res
+  where
+    res = List.intercalate "\n" $ map one errs
+    one = \case
+      ErrMsg s -> color Red s
+      ErrPrettyString s -> prettyShowString s
+    -- Pretty print a String that was produced by 'show'
+    prettyShowString :: String -> String
+    prettyShowString = hscolour' . nicify
+
+-- | Small DSL for pretty-printing errors
+data Err
+  = -- | Message to display in the error
+    ErrMsg String
+  | -- | Pretty print a String that was produced by 'show'
+    ErrPrettyString String
+
+-- | Plain message to display, as 'Text'
+message :: Text -> Err
+message = ErrMsg . textToString
+
+-- | Plain message to display, as 'String'
+messageString :: String -> Err
+messageString = ErrMsg
+
+-- | Any 'Show'able to pretty print
+pretty :: (Show a) => a -> Err
+pretty x = ErrPrettyString $ show x
+
+-- | Pretty print a String that was produced by 'show'
+prettyString :: String -> Err
+prettyString s = ErrPrettyString s
+
+-- Prettifying Helpers, mostly stolen from
+-- https://hackage.haskell.org/package/hspec-expectations-pretty-diff-0.7.2.5/docs/src/Test.Hspec.Expectations.Pretty.html#prettyColor
+
+hscolour' :: String -> String
+hscolour' =
+  hscolour (TTYg Ansi16Colour) defaultColourPrefs False False "" False
+
+color :: Color -> String -> String
+color c s = setSGRCode [SetColor Foreground Dull c] ++ s ++ setSGRCode [Reset]
diff --git a/users/Profpatsch/my-prelude/src/Seconds.hs b/users/Profpatsch/my-prelude/src/Seconds.hs
new file mode 100644
index 0000000000..8d05f30be8
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Seconds.hs
@@ -0,0 +1,55 @@
+module Seconds where
+
+import Data.Aeson (FromJSON)
+import Data.Aeson qualified as Json
+import Data.Aeson.Types (FromJSON (parseJSON))
+import Data.Scientific
+import Data.Time (NominalDiffTime)
+import FieldParser
+import FieldParser qualified as Field
+import GHC.Natural (naturalToInteger)
+import PossehlAnalyticsPrelude
+
+-- | A natural number of seconds.
+newtype Seconds = Seconds {unSeconds :: Natural}
+  deriving stock (Eq, Show)
+
+-- | Parse a decimal number as a number of seconds
+textToSeconds :: FieldParser Text Seconds
+textToSeconds = Seconds <$> Field.decimalNatural
+
+scientificToSeconds :: FieldParser Scientific Seconds
+scientificToSeconds =
+  ( Field.boundedScientificIntegral @Int "Number of seconds"
+      >>> Field.integralToNatural
+  )
+    & rmap Seconds
+
+-- Microseconds, represented internally with a 64 bit Int
+newtype MicrosecondsInt = MicrosecondsInt {unMicrosecondsInt :: Int}
+  deriving stock (Eq, Show)
+
+-- | Try to fit a number of seconds into a MicrosecondsInt
+secondsToMicrosecondsInt :: FieldParser Seconds MicrosecondsInt
+secondsToMicrosecondsInt =
+  lmap
+    (\sec -> naturalToInteger sec.unSeconds * 1_000_000)
+    (Field.bounded "Could not fit into an Int after multiplying with 1_000_000 (seconds to microseconds)")
+    & rmap MicrosecondsInt
+
+secondsToNominalDiffTime :: Seconds -> NominalDiffTime
+secondsToNominalDiffTime sec =
+  sec.unSeconds
+    & naturalToInteger
+    & fromInteger @NominalDiffTime
+
+instance FromJSON Seconds where
+  parseJSON = Field.toParseJSON jsonNumberToSeconds
+
+-- | Parse a json number as a number of seconds.
+jsonNumberToSeconds :: FieldParser' Error Json.Value Seconds
+jsonNumberToSeconds = Field.jsonNumber >>> scientificToSeconds
+
+-- | Return the number of seconds in a week
+secondsInAWeek :: Seconds
+secondsInAWeek = Seconds (3600 * 24 * 7)
diff --git a/users/Profpatsch/my-prelude/src/Test.hs b/users/Profpatsch/my-prelude/src/Test.hs
new file mode 100644
index 0000000000..862ee16c25
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Test.hs
@@ -0,0 +1,115 @@
+{-# LANGUAGE LambdaCase #-}
+
+{- Generate Test suites.
+
+Restricted version of hspec, introduction: http://hspec.github.io/getting-started.html
+-}
+module Test
+  ( Spec,
+    runTest,
+    testMain,
+
+    -- * Structure
+    describe,
+    it,
+
+    -- * Expectations
+    Expectation,
+    testOk,
+    testErr,
+    shouldBe,
+    shouldNotBe,
+    shouldSatisfy,
+    shouldNotSatisfy,
+
+    -- * Setup & Teardown (hooks http://hspec.github.io/writing-specs.html#using-hooks)
+    before,
+    before_,
+    beforeWith,
+    beforeAll,
+    beforeAll_,
+    beforeAllWith,
+    after,
+    after_,
+    afterAll,
+    afterAll_,
+    around,
+    around_,
+    aroundWith,
+    aroundAll,
+    aroundAllWith,
+
+    -- * Common helpful predicates (use with 'shouldSatisfy')
+    isRight,
+    isLeft,
+
+    -- * Pretty printing of errors
+    errColored,
+    module Pretty,
+  )
+where
+
+-- export more expectations if needed
+
+import Data.Either
+  ( isLeft,
+    isRight,
+  )
+import Pretty
+import Test.Hspec
+  ( Expectation,
+    HasCallStack,
+    Spec,
+    after,
+    afterAll,
+    afterAll_,
+    after_,
+    around,
+    aroundAll,
+    aroundAllWith,
+    aroundWith,
+    around_,
+    before,
+    beforeAll,
+    beforeAllWith,
+    beforeAll_,
+    beforeWith,
+    before_,
+    describe,
+    hspec,
+    it,
+  )
+import Test.Hspec.Expectations.Pretty
+  ( expectationFailure,
+    shouldBe,
+    shouldNotBe,
+    shouldNotSatisfy,
+    shouldSatisfy,
+  )
+
+-- | Run a test directly (e.g. from the repl)
+runTest :: Spec -> IO ()
+runTest = hspec
+
+-- | Run a testsuite
+testMain ::
+  -- | Name of the test suite
+  String ->
+  -- | The tests in this test module
+  Spec ->
+  IO ()
+testMain testSuiteName tests = hspec $ describe testSuiteName tests
+
+-- | test successful
+testOk :: Expectation
+testOk = pure ()
+
+-- | Abort the test with an error message.
+-- If you want to display a Haskell type, use `errColored`.
+testErr :: HasCallStack => String -> Expectation
+testErr = expectationFailure
+
+-- | Display a list of 'Err's as a colored error message
+-- and abort the test.
+errColored :: [Pretty.Err] -> Expectation
+errColored = testErr . Pretty.prettyErrs
diff --git a/users/Profpatsch/my-prelude/src/Tool.hs b/users/Profpatsch/my-prelude/src/Tool.hs
new file mode 100644
index 0000000000..b773f4444e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Tool.hs
@@ -0,0 +1,75 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Tool where
+
+import Data.Error.Tree
+import Label
+import PossehlAnalyticsPrelude
+import System.Environment qualified as Env
+import System.Exit qualified as Exit
+import System.FilePath ((</>))
+import System.Posix qualified as Posix
+import ValidationParseT
+
+data Tool = Tool
+  { -- | absolute path to the executable
+    toolPath :: FilePath
+  }
+  deriving stock (Show)
+
+-- | Reads all tools from the @toolsEnvVar@ variable or aborts.
+readTools ::
+  Label "toolsEnvVar" Text ->
+  -- | Parser for Tools we bring with us at build time.
+  --
+  -- These are executables that we need available, and that we have to ship with the distribution of @pa-cli@.
+  ToolParserT IO tools ->
+  IO tools
+readTools env toolParser =
+  Env.lookupEnv (env.toolsEnvVar & textToString) >>= \case
+    Nothing -> do
+      Exit.die [fmt|Please set {env.toolsEnvVar} to a directory with all tools we need (see `Tools` in the code).|]
+    Just toolsDir ->
+      (Posix.fileExist toolsDir & ifTrueOrErr () [fmt|{env.toolsEnvVar} directory does not exist: {toolsDir}|])
+        & thenValidateM
+          ( \() ->
+              (Posix.getFileStatus toolsDir <&> Posix.isDirectory)
+                & ifTrueOrErr () [fmt|{env.toolsEnvVar} does not point to a directory: {toolsDir}|]
+          )
+        & thenValidateM
+          (\() -> toolParser.unToolParser toolsDir)
+        <&> first (errorTree [fmt|Could not find all tools in {env.toolsEnvVar}|])
+        >>= \case
+          Failure err -> Exit.die (err & prettyErrorTree & textToString)
+          Success t -> pure t
+
+newtype ToolParserT m a = ToolParserT
+  { unToolParser ::
+      FilePath ->
+      m (Validation (NonEmpty Error) a)
+  }
+  deriving
+    (Functor, Applicative)
+    via (ValidationParseT FilePath m)
+
+-- | Given a file path and the name of the tool executable, see whether it is an executable and return its full path.
+readTool :: Text -> ToolParserT IO Tool
+readTool exeName = ToolParserT $ \toolDir -> do
+  let toolPath :: FilePath = toolDir </> (exeName & textToString)
+  let read' = True
+  let write = False
+  let exec = True
+  Posix.fileExist toolPath
+    & ifTrueOrErr () [fmt|Tool does not exist: {toolPath}|]
+    & thenValidateM
+      ( \() ->
+          Posix.fileAccess toolPath read' write exec
+            & ifTrueOrErr (Tool {..}) [fmt|Tool is not readable/executable: {toolPath}|]
+      )
+
+-- | helper
+ifTrueOrErr :: (Functor f) => a -> Text -> f Bool -> f (Validation (NonEmpty Error) a)
+ifTrueOrErr true err io =
+  io <&> \case
+    True -> Success true
+    False -> Failure $ singleton $ newError err
diff --git a/users/Profpatsch/my-prelude/src/ValidationParseT.hs b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
new file mode 100644
index 0000000000..593b7ebf39
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
@@ -0,0 +1,16 @@
+module ValidationParseT where
+
+import Control.Selective (Selective)
+import Data.Functor.Compose (Compose (..))
+import PossehlAnalyticsPrelude
+
+-- | A simple way to create an Applicative parser that parses from some environment.
+--
+-- Use with DerivingVia. Grep codebase for examples.
+newtype ValidationParseT env m a = ValidationParseT {unValidationParseT :: env -> m (Validation (NonEmpty Error) a)}
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ((->) env)
+            (Compose m (Validation (NonEmpty Error)))
+        )
diff --git a/users/Profpatsch/my-webstuff/default.nix b/users/Profpatsch/my-webstuff/default.nix
new file mode 100644
index 0000000000..0067235be2
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/default.nix
@@ -0,0 +1,27 @@
+{ depot, pkgs, lib, ... }:
+
+pkgs.haskellPackages.mkDerivation {
+  pname = "my-webstuff";
+  version = "0.0.1-unreleased";
+
+  src = depot.users.Profpatsch.exactSource ./. [
+    ./my-webstuff.cabal
+    ./src/Multipart2.hs
+  ];
+
+  isLibrary = true;
+
+  libraryHaskellDepends = [
+    depot.users.Profpatsch.my-prelude
+    pkgs.haskellPackages.dlist
+    pkgs.haskellPackages.monad-logger
+    pkgs.haskellPackages.pa-error-tree
+    pkgs.haskellPackages.pa-field-parser
+    pkgs.haskellPackages.pa-prelude
+    pkgs.haskellPackages.selective
+    pkgs.haskellPackages.wai-extra
+  ];
+
+  license = lib.licenses.mit;
+
+}
diff --git a/users/Profpatsch/my-webstuff/my-webstuff.cabal b/users/Profpatsch/my-webstuff/my-webstuff.cabal
new file mode 100644
index 0000000000..fb42d9f6a5
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/my-webstuff.cabal
@@ -0,0 +1,72 @@
+cabal-version:      3.0
+name:               my-webstuff
+version:            0.0.1.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    hs-source-dirs: src
+    exposed-modules:
+      Multipart2
+
+    build-depends:
+       base >=4.15 && <5
+     , my-prelude
+     , pa-prelude
+     , pa-label
+     , pa-error-tree
+     , pa-field-parser
+     , bytestring
+     , monad-logger
+     , dlist
+     , selective
+     , wai
+     , wai-extra
diff --git a/users/Profpatsch/my-webstuff/src/Multipart2.hs b/users/Profpatsch/my-webstuff/src/Multipart2.hs
new file mode 100644
index 0000000000..5c283a3c1b
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/src/Multipart2.hs
@@ -0,0 +1,220 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Multipart2 where
+
+import Control.Monad.Logger (MonadLogger)
+import Control.Selective (Selective)
+import Data.ByteString.Lazy qualified as Lazy
+import Data.DList (DList)
+import Data.DList qualified as DList
+import Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import FieldParser
+import Label
+import Network.Wai qualified as Wai
+import Network.Wai.Parse qualified as Wai
+import PossehlAnalyticsPrelude
+import ValidationParseT
+
+data FormFields = FormFields
+  { inputs :: [Wai.Param],
+    files :: [MultipartFile Lazy.ByteString]
+  }
+
+-- | A parser for a HTTP multipart form (a form sent by the browser)
+newtype MultipartParseT backend m a = MultipartParseT
+  { unMultipartParseT ::
+      FormFields ->
+      m (Validation (NonEmpty Error) a)
+  }
+  deriving
+    (Functor, Applicative, Selective)
+    via (ValidationParseT FormFields m)
+
+-- | After parsing a form, either we get the result or a list of form fields that failed
+newtype FormValidation a
+  = FormValidation
+      (DList FormValidationResult, Maybe a)
+  deriving (Functor, Applicative, Selective) via (Compose ((,) (DList FormValidationResult)) Maybe)
+  deriving stock (Show)
+
+data FormValidationResult = FormValidationResult
+  { hasError :: Maybe Error,
+    formFieldName :: ByteString,
+    originalValue :: ByteString
+  }
+  deriving stock (Show)
+
+mkFormValidationResult ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Maybe Error ->
+  FormValidationResult
+mkFormValidationResult form err =
+  FormValidationResult
+    { hasError = err,
+      formFieldName = form.formFieldName,
+      originalValue = form.originalValue
+    }
+
+eitherToFormValidation ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Either Error a ->
+  FormValidation a
+eitherToFormValidation form = \case
+  Left err ->
+    FormValidation $ (DList.singleton $ mkFormValidationResult form (Just err), Nothing)
+  Right a ->
+    FormValidation $ ((DList.singleton $ mkFormValidationResult form Nothing), Just a)
+
+failFormValidation ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Error ->
+  FormValidation a
+failFormValidation form err =
+  FormValidation (DList.singleton $ mkFormValidationResult form (Just err), Nothing)
+
+-- | Parse the multipart form or throw a user error with a descriptive error message.
+parseMultipartOrThrow ::
+  (MonadLogger m, MonadIO m) =>
+  (ErrorTree -> m a) ->
+  MultipartParseT backend m a ->
+  Wai.Request ->
+  m a
+parseMultipartOrThrow throwF parser req = do
+  -- TODO: this throws all errors with `error`, so leads to 500 on bad input …
+  formFields <-
+    liftIO $
+      Wai.parseRequestBodyEx
+        Wai.defaultParseRequestBodyOptions
+        Wai.lbsBackEnd
+        req
+  parser.unMultipartParseT
+    FormFields
+      { inputs = fst formFields,
+        files = map fileDataToMultipartFile $ snd formFields
+      }
+    >>= \case
+      Failure errs -> throwF (errorTree "Cannot parse the multipart form" errs)
+      Success a -> pure a
+
+-- | Parse the field out of the multipart message
+field :: (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m a
+field fieldName fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    & findMaybe (\input -> if fst input == fieldName then Just (snd input) else Nothing)
+    & annotate [fmt|Field "{fieldName}" does not exist in the multipart form|]
+    >>= runFieldParser fieldParser
+    & eitherToListValidation
+    & pure
+
+-- | Parse the field out of the multipart message
+field' :: (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (FormValidation a)
+field' fieldName fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    & findMaybe (\input -> if fst input == fieldName then Just $ snd input else Nothing)
+    & annotate [fmt|Field "{fieldName}" does not exist in the multipart form|]
+    <&> ( \originalValue ->
+            originalValue
+              & runFieldParser fieldParser
+              & eitherToFormValidation
+                ( T2
+                    (label @"formFieldName" fieldName)
+                    (label @"originalValue" originalValue)
+                )
+        )
+    & eitherToListValidation
+    & pure
+
+-- | Parse the field out of the multipart message, and into a 'Label' of the given name.
+fieldLabel :: forall lbl backend m a. (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (Label lbl a)
+fieldLabel fieldName fieldParser = label @lbl <$> field fieldName fieldParser
+
+-- | Parse the field out of the multipart message, and into a 'Label' of the given name.
+fieldLabel' :: forall lbl backend m a. (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (FormValidation (Label lbl a))
+fieldLabel' fieldName fieldParser = fmap (label @lbl) <$> field' fieldName fieldParser
+
+-- | parse all fields out of the multipart message, with the same parser
+allFields :: (Applicative m) => FieldParser (T2 "key" ByteString "value" ByteString) b -> MultipartParseT backend m [b]
+allFields fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    <&> tupToT2 @"key" @"value"
+    & traverseValidate (runFieldParser fieldParser)
+    & eitherToValidation
+    & pure
+
+tupToT2 :: forall l1 l2 t1 t2. (t1, t2) -> T2 l1 t1 l2 t2
+tupToT2 (a, b) = T2 (label a) (label b)
+
+-- | Parse a file by name out of the multipart message
+file ::
+  (Applicative m) =>
+  ByteString ->
+  MultipartParseT backend m (MultipartFile Lazy.ByteString)
+file fieldName = MultipartParseT $ \mp ->
+  mp.files
+    & List.find (\input -> input.multipartNameAttribute == fieldName)
+    & annotate [fmt|File "{fieldName}" does not exist in the multipart form|]
+    & ( \case
+          Left err -> Failure (singleton err)
+          Right filePath -> Success filePath
+      )
+    & pure
+
+-- | Return all files from the multipart message
+allFiles ::
+  (Applicative m) =>
+  MultipartParseT backend m [MultipartFile Lazy.ByteString]
+allFiles = MultipartParseT $ \mp -> do
+  pure $ Success $ mp.files
+
+-- | Ensure there is exactly one file and return it (ignoring the field name)
+exactlyOneFile ::
+  (Applicative m) =>
+  MultipartParseT backend m (MultipartFile Lazy.ByteString)
+exactlyOneFile = MultipartParseT $ \mp ->
+  mp.files
+    & \case
+      [] -> pure $ failParse "Expected to receive a file, but the multipart form did not contain any files"
+      [file_] -> pure $ Success file_
+      more -> pure $ failParse [fmt|Expected to receive exactly one file, but the multipart form contained {List.length more} files|]
+  where
+    -- \| Fail to parse the multipart form with the given error message.
+    failParse :: Text -> Validation (NonEmpty Error) a
+    failParse = Failure . singleton . newError
+
+newtype GetFileContent backend m content = GetFileContent
+  {unGetFileContent :: (Wai.Request -> m (Either Error content))}
+
+-- | A file field in a multipart message.
+data MultipartFile content = MultipartFile
+  { -- | @name@ attribute of the corresponding HTML @\<input\>@
+    multipartNameAttribute :: ByteString,
+    -- | name of the file on the client's disk
+    fileNameOnDisk :: ByteString,
+    -- | MIME type for the file
+    fileMimeType :: ByteString,
+    -- | Content of the file
+    content :: content
+  }
+
+-- | Convert the multipart library struct of a multipart file to our own.
+fileDataToMultipartFile ::
+  Wai.File Lazy.ByteString ->
+  (MultipartFile Lazy.ByteString)
+fileDataToMultipartFile (multipartNameAttribute, file_) = do
+  MultipartFile
+    { multipartNameAttribute,
+      fileNameOnDisk = file_.fileName,
+      fileMimeType = file_.fileContentType,
+      content = file_.fileContent
+    }
diff --git a/users/Profpatsch/my-xmonad/Xmonad.hs b/users/Profpatsch/my-xmonad/Xmonad.hs
new file mode 100644
index 0000000000..bb727ac2f1
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/Xmonad.hs
@@ -0,0 +1,127 @@
+module Main where
+
+import Data.Function ((&))
+import XMonad
+import XMonad qualified as Xmonad
+import XMonad.Hooks.EwmhDesktops (ewmh)
+import XMonad.Layout.Decoration
+import XMonad.Layout.MultiToggle
+import XMonad.Layout.MultiToggle.Instances (StdTransformers (..))
+import XMonad.Layout.Tabbed (TabbedDecoration)
+import XMonad.Layout.Tabbed qualified as Tabbed
+import XMonad.StackSet qualified as StackSet
+import XMonad.Util.Cursor (setDefaultCursor)
+import XMonad.Util.EZConfig (additionalKeys, additionalKeysP, removeKeysP)
+
+data Mode = Normal | Presentation
+
+main :: IO ()
+main = do
+  let config = ewmh myConfig
+  dirs <- Xmonad.getDirectories
+  Xmonad.launch config dirs
+
+myConfig ::
+  XConfig
+    ( MultiToggle
+        ( HCons
+            StdTransformers
+            XMonad.Layout.MultiToggle.EOT
+        )
+        ( ModifiedLayout
+            ( Decoration
+                TabbedDecoration
+                DefaultShrinker
+            )
+            Tall
+        )
+    )
+myConfig =
+  conf
+    { modMask = modKey,
+      terminal = term Normal,
+      focusedBorderColor = "#859900",
+      layoutHook = layout,
+      startupHook = setDefaultCursor xC_heart,
+      workspaces = workspaceNames
+    }
+    `additionalKeysP` ( [
+                          -- fullscreen
+                          ("M-e", sendMessage $ Toggle NBFULL),
+                          -- i3-like keybindings, because I’m spoiled
+                          ("M-S-x", kill),
+                          -- exchange M-Ret and M-S-Ret
+                          ("M-<Return>", spawn $ term Normal),
+                          ("C-M-<Return>", spawn $ term Presentation),
+                          ("M-S-<Return>", windows StackSet.swapMaster)
+                          -- open simple exec dmenu
+                        ]
+                          ++
+                          -- something something workspaces
+                          [ (otherModMasks ++ "M-" ++ [key], action tag)
+                            | (tag, key) <- zip workspaceNames "123456789",
+                              (otherModMasks, action) <-
+                                [ ("", windows . StackSet.greedyView),
+                                  ("S-", windows . StackSet.shift)
+                                ]
+                          ]
+                          ++
+                          -- mod-{w,e,r} %! Switch to physical/Xinerama screens 1, 2, or 3
+                          -- mod-shift-{w,e,r} %! Move client to screen 1, 2, or 3
+                          [ ("M-v", focusToScreen 0),
+                            -- , ("M-l", focusToScreen 1)
+                            ("M-c", focusToScreen 2),
+                            ("M-S-v", windowToScreen 0),
+                            ("M-S-l", windowToScreen 1),
+                            ("M-S-c", windowToScreen 2)
+                          ]
+                          -- ((m .|. modMask, key), screenWorkspace sc >>= flip whenJust (windows . f))
+                          --   | (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
+                          --    , (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
+                      )
+    `additionalKeys`
+    -- arrow keys should move as well (hjkl blindness)
+    [ ((modKey, xK_Up), windows StackSet.focusUp),
+      ((modKey, xK_Down), windows StackSet.focusDown)
+    ]
+    `removeKeysP` [
+                    -- previous kill command
+                    "M-S-c",
+                    -- It is way to easy to kill everything by default
+                    "M-S-q",
+                    -- no idea, I want to use it for Mozc
+                    "M-n"
+                  ]
+  where
+    conf = def
+    workspaceNames = conf & workspaces
+    modKey = mod4Mask
+    -- TODO: meh
+    term :: Mode -> String
+    -- TODO: get terminal-emulator from the system config (currently alacritty)
+    term Normal = "terminal-emulator"
+    term Presentation = "notify-send TODO: currently not terminal presentation mode implemented" -- "terminal- -u ~/.config/lilyterm/pres.conf"
+    toScreen with _number = screenWorkspace 0 >>= \ws -> whenJust ws (windows . with)
+    focusToScreen = toScreen StackSet.view
+    windowToScreen = toScreen StackSet.shift
+
+-- copied from Xmonad.Config
+layout ::
+  MultiToggle
+    (HCons StdTransformers EOT)
+    (ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Tall)
+    Window
+layout =
+  tiled
+    & Tabbed.addTabsBottom Tabbed.shrinkText def
+    & toggleFullscreen
+  where
+    -- default tiling algorithm partitions the screen into two panes
+    tiled = Tall nmaster delta ratio
+    -- The default number of windows in the master pane
+    nmaster = 1
+    -- Default proportion of screen occupied by master pane
+    ratio = 1 / 2
+    -- Percent of screen to increment by when resizing panes
+    delta = 3 / 100
+    toggleFullscreen = mkToggle1 NBFULL
diff --git a/users/Profpatsch/my-xmonad/default.nix b/users/Profpatsch/my-xmonad/default.nix
new file mode 100644
index 0000000000..708d50e960
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/default.nix
@@ -0,0 +1,25 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  my-xmonad = pkgs.haskellPackages.mkDerivation {
+    pname = "my-xmonad";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./my-xmonad.cabal
+      ./Xmonad.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.xmonad-contrib
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+in
+my-xmonad
diff --git a/users/Profpatsch/my-xmonad/my-xmonad.cabal b/users/Profpatsch/my-xmonad/my-xmonad.cabal
new file mode 100644
index 0000000000..175c6c1633
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/my-xmonad.cabal
@@ -0,0 +1,62 @@
+cabal-version:      3.0
+name:               my-xmonad
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable xmonad
+    import: common-options
+
+    main-is: Xmonad.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        xmonad,
+        xmonad-contrib
diff --git a/users/Profpatsch/netencode/Netencode.hs b/users/Profpatsch/netencode/Netencode.hs
new file mode 100644
index 0000000000..ca93ab2fef
--- /dev/null
+++ b/users/Profpatsch/netencode/Netencode.hs
@@ -0,0 +1,433 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Netencode where
+
+import Control.Applicative (many)
+import Data.Attoparsec.ByteString qualified as Atto
+import Data.Attoparsec.ByteString.Char8 qualified as Atto.Char
+import Data.ByteString qualified as ByteString
+import Data.ByteString.Builder (Builder)
+import Data.ByteString.Builder qualified as Builder
+import Data.ByteString.Lazy qualified as ByteString.Lazy
+import Data.Fix (Fix (Fix))
+import Data.Fix qualified as Fix
+import Data.Functor.Classes (Eq1 (liftEq))
+import Data.Int (Int16, Int32, Int64, Int8)
+import Data.Map.NonEmpty (NEMap)
+import Data.Map.NonEmpty qualified as NEMap
+import Data.Semigroup qualified as Semi
+import Data.String (IsString)
+import Data.Word (Word16, Word32, Word64)
+import GHC.Exts (fromString)
+import Hedgehog qualified as Hedge
+import Hedgehog.Gen qualified as Gen
+import Hedgehog.Range qualified as Range
+import PossehlAnalyticsPrelude
+import Text.Show.Deriving
+import Prelude hiding (sum)
+
+-- | Netencode type base functor.
+--
+-- Recursive elements have a @rec@.
+data TF rec
+  = -- | Unit value
+    Unit
+  | -- | Boolean (2^1)
+    N1 Bool
+  | -- | Byte (2^3)
+    N3 Word8
+  | -- | 64-bit Natural (2^6)
+    N6 Word64
+  | -- | 64-bit Integer (2^6)
+    I6 Int64
+  | -- | Unicode Text
+    Text Text
+  | -- | Arbitrary Bytestring
+    Bytes ByteString
+  | -- | A constructor of a(n open) Sum
+    Sum (Tag Text rec)
+  | -- | Record
+    Record (NEMap Text rec)
+  | -- | List
+    List [rec]
+  deriving stock (Show, Eq, Functor)
+
+instance Eq1 TF where
+  liftEq _ Unit Unit = True
+  liftEq _ (N1 b) (N1 b') = b == b'
+  liftEq _ (N3 w8) (N3 w8') = w8 == w8'
+  liftEq _ (N6 w64) (N6 w64') = w64 == w64'
+  liftEq _ (I6 i64) (I6 i64') = i64 == i64'
+  liftEq _ (Text t) (Text t') = t == t'
+  liftEq _ (Bytes b) (Bytes b') = b == b'
+  liftEq eq (Sum t) (Sum t') = eq (t.tagVal) (t'.tagVal)
+  liftEq eq (Record m) (Record m') = liftEq eq m m'
+  liftEq eq (List xs) (List xs') = liftEq eq xs xs'
+  liftEq _ _ _ = False
+
+-- | A tagged value
+data Tag tag val = Tag
+  { tagTag :: tag,
+    tagVal :: val
+  }
+  deriving stock (Show, Eq, Functor)
+
+$(Text.Show.Deriving.deriveShow1 ''Tag)
+$(Text.Show.Deriving.deriveShow1 ''TF)
+
+-- | The Netencode type
+newtype T = T {unT :: Fix TF}
+  deriving stock (Eq, Show)
+
+-- | Create a unit
+unit :: T
+unit = T $ Fix Unit
+
+-- | Create a boolean
+n1 :: Bool -> T
+n1 = T . Fix . N1
+
+-- | Create a byte
+n3 :: Word8 -> T
+n3 = T . Fix . N3
+
+-- | Create a 64-bit natural
+n6 :: Word64 -> T
+n6 = T . Fix . N6
+
+-- | Create a 64-bit integer
+i6 :: Int64 -> T
+i6 = T . Fix . I6
+
+-- | Create a UTF-8 unicode text
+text :: Text -> T
+text = T . Fix . Text
+
+-- | Create an arbitrary bytestring
+bytes :: ByteString -> T
+bytes = T . Fix . Bytes
+
+-- | Create a tagged value from a tag name and a value
+tag :: Text -> T -> T
+tag key val = T $ Fix $ Sum $ coerce @(Tag Text T) @(Tag Text (Fix TF)) $ Tag key val
+
+-- | Create a record from a non-empty map
+record :: NEMap Text T -> T
+record = T . Fix . Record . coerce @(NEMap Text T) @(NEMap Text (Fix TF))
+
+-- | Create a list
+list :: [T] -> T
+list = T . Fix . List . coerce @[T] @([Fix TF])
+
+-- | Stable encoding of a netencode value. Record keys will be sorted lexicographically ascending.
+netencodeEncodeStable :: T -> Builder
+netencodeEncodeStable (T fix) = Fix.foldFix (netencodeEncodeStableF id) fix
+
+-- | Stable encoding of a netencode functor value. Record keys will be sorted lexicographically ascending.
+--
+-- The given function is used for encoding the recursive values.
+netencodeEncodeStableF :: (rec -> Builder) -> TF rec -> Builder
+netencodeEncodeStableF inner tf = builder go
+  where
+    -- TODO: directly pass in BL?
+    innerBL = fromBuilder . inner
+    go = case tf of
+      Unit -> "u,"
+      N1 False -> "n1:0,"
+      N1 True -> "n1:1,"
+      N3 w8 -> "n3:" <> fromBuilder (Builder.word8Dec w8) <> ","
+      N6 w64 -> "n6:" <> fromBuilder (Builder.word64Dec w64) <> ","
+      I6 i64 -> "i6:" <> fromBuilder (Builder.int64Dec i64) <> ","
+      Text t ->
+        let b = fromText t
+         in "t" <> builderLenDec b <> ":" <> b <> ","
+      Bytes b -> "b" <> builderLenDec (fromByteString b) <> ":" <> fromByteString b <> ","
+      Sum (Tag key val) -> encTag key val
+      Record m ->
+        -- NEMap uses Map internally, and that folds in lexicographic ascending order over the key.
+        -- Since these are `Text` in our case, this is stable.
+        let mBuilder = m & NEMap.foldMapWithKey encTag
+         in "{" <> builderLenDec mBuilder <> ":" <> mBuilder <> "}"
+      List xs ->
+        let xsBuilder = xs <&> innerBL & mconcat
+         in "[" <> builderLenDec xsBuilder <> ":" <> xsBuilder <> "]"
+      where
+        encTag key val =
+          let bKey = fromText key
+           in "<" <> builderLenDec bKey <> ":" <> bKey <> "|" <> innerBL val
+
+-- | A builder that knows its own size in bytes
+newtype BL = BL (Builder, Semi.Sum Natural)
+  deriving newtype (Monoid, Semigroup)
+
+instance IsString BL where
+  fromString s =
+    BL
+      ( fromString @Builder s,
+        fromString @ByteString s
+          & ByteString.length
+          & intToNatural
+          & fromMaybe 0
+          & Semi.Sum
+      )
+
+-- | Retrieve the builder
+builder :: BL -> Builder
+builder (BL (b, _)) = b
+
+-- | Retrieve the bytestring length
+builderLen :: BL -> Natural
+builderLen (BL (_, len)) = Semi.getSum $ len
+
+-- | Take a 'BL' and create a new 'BL' that represents the length as a decimal integer
+builderLenDec :: BL -> BL
+builderLenDec (BL (_, len)) =
+  let b = Builder.intDec $ (len & Semi.getSum & fromIntegral @Natural @Int)
+   in b & fromBuilder
+
+-- | Create a 'BL' from a 'Builder'.
+--
+-- Not efficient, goes back to a lazy bytestring to get the length
+fromBuilder :: Builder -> BL
+fromBuilder b =
+  BL
+    ( b,
+      b
+        & Builder.toLazyByteString
+        & ByteString.Lazy.length
+        & fromIntegral @Int64 @Natural
+        & Semi.Sum
+    )
+
+-- | Create a 'BL' from a 'ByteString'.
+fromByteString :: ByteString -> BL
+fromByteString b =
+  BL
+    ( Builder.byteString b,
+      b
+        & ByteString.length
+        & fromIntegral @Int @Natural
+        & Semi.Sum
+    )
+
+-- | Create a 'BL' from a 'Text'.
+fromText :: Text -> BL
+fromText t = t & textToBytesUtf8 & fromByteString
+
+-- | Parser for a netencode value.
+netencodeParser :: Atto.Parser T
+netencodeParser = T <$> go
+  where
+    go = Fix <$> netencodeParserF go
+
+-- | Parser for one level of a netencode value. Requires a parser for the recursion.
+netencodeParserF :: Atto.Parser rec -> Atto.Parser (TF rec)
+netencodeParserF inner = do
+  typeTag <- Atto.Char.anyChar
+  case typeTag of
+    't' -> Text <$> textParser
+    'b' -> Bytes <$> bytesParser
+    'u' -> unitParser
+    '<' -> Sum <$> tagParser
+    '{' -> Record <$> recordParser
+    '[' -> List <$> listParser
+    'n' -> naturalParser
+    'i' -> I6 <$> intParser
+    c -> fail ([c] <> " is not a valid netencode tag")
+  where
+    bytesParser = do
+      len <- boundedDecimalFail Atto.<?> "bytes is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "bytes did not have : after length"
+      bytes' <- Atto.take len
+      _ <- Atto.Char.char ',' Atto.<?> "bytes did not end with ,"
+      pure bytes'
+
+    textParser = do
+      len <- boundedDecimalFail Atto.<?> "text is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "text did not have : after length"
+      text' <-
+        Atto.take len <&> bytesToTextUtf8 >>= \case
+          Left err -> fail [fmt|cannot decode text as utf8: {err & prettyError}|]
+          Right t -> pure t
+      _ <- Atto.Char.char ',' Atto.<?> "text did not end with ,"
+      pure text'
+
+    unitParser = do
+      _ <- Atto.Char.char ',' Atto.<?> "unit did not end with ,"
+      pure $ Unit
+
+    tagParser = do
+      len <- boundedDecimalFail Atto.<?> "tag is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "tag did not have : after length"
+      tagTag <-
+        Atto.take len <&> bytesToTextUtf8 >>= \case
+          Left err -> fail [fmt|cannot decode tag key as utf8: {err & prettyError}|]
+          Right t -> pure t
+      _ <- Atto.Char.char '|' Atto.<?> "tag was missing the key/value separator (|)"
+      tagVal <- inner
+      pure $ Tag {..}
+
+    recordParser = do
+      -- TODO: the record does not use its inner length because we are descending into the inner parsers.
+      -- This is a smell! In theory it can be used to skip parsing the whole inner keys.
+      _len <- boundedDecimalFail Atto.<?> "record is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "record did not have : after length"
+      record' <-
+        many (Atto.Char.char '<' >> tagParser) <&> nonEmpty >>= \case
+          Nothing -> fail "record is not allowed to have 0 elements"
+          Just tags ->
+            pure $
+              tags
+                <&> (\t -> (t.tagTag, t.tagVal))
+                -- later keys are preferred if they are duplicates, according to the standard
+                & NEMap.fromList
+      _ <- Atto.Char.char '}' Atto.<?> "record did not end with }"
+      pure record'
+
+    listParser = do
+      -- TODO: the list does not use its inner length because we are descending into the inner parsers.
+      -- This is a smell! In theory it can be used to skip parsing the whole inner keys.
+      _len <- boundedDecimalFail Atto.<?> "list is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "list did not have : after length"
+      -- TODO: allow empty lists?
+      list' <- many inner
+      _ <- Atto.Char.char ']' Atto.<?> "list did not end with ]"
+      pure list'
+
+    intParser = do
+      let p :: forall parseSize. (Bounded parseSize, Integral parseSize) => (Integer -> Atto.Parser Int64)
+          p n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|i{n & show} did not have : after length|]
+            isNegative <- Atto.option False (Atto.Char.char '-' <&> \_c -> True)
+            int <-
+              boundedDecimal @parseSize >>= \case
+                Nothing -> fail [fmt|cannot parse into i{n & show}, the number is too big (would overflow)|]
+                Just i ->
+                  pure $
+                    if isNegative
+                      then -- TODO: this should alread be done in the decimal parser, @minBound@ cannot be parsed cause it’s one more than @(-maxBound)@!
+                        (-i)
+                      else i
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|i{n & show} did not end with ,|]
+            pure $ fromIntegral @parseSize @Int64 int
+      digit <- Atto.Char.digit
+      case digit of
+        -- TODO: separate parser for i1 and i2 that makes sure the boundaries are right!
+        '1' -> p @Int8 1
+        '2' -> p @Int8 2
+        '3' -> p @Int8 3
+        '4' -> p @Int16 4
+        '5' -> p @Int32 5
+        '6' -> p @Int64 6
+        '7' -> fail [fmt|i parser only supports numbers up to size 6, was 7|]
+        '8' -> fail [fmt|i parser only supports numbers up to size 6, was 8|]
+        '9' -> fail [fmt|i parser only supports numbers up to size 6, was 9|]
+        o -> fail [fmt|i number with length {o & show} not possible|]
+
+    naturalParser = do
+      let p :: forall parseSize finalSize. (Bounded parseSize, Integral parseSize, Num finalSize) => (Integer -> Atto.Parser finalSize)
+          p n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|n{n & show} did not have : after length|]
+            int <-
+              boundedDecimal @parseSize >>= \case
+                Nothing -> fail [fmt|cannot parse into n{n & show}, the number is too big (would overflow)|]
+                Just i -> pure i
+
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|n{n & show} did not end with ,|]
+            pure $ fromIntegral @parseSize @finalSize int
+      let b n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|n{n & show} did not have : after length|]
+            bool <-
+              (Atto.Char.char '0' >> pure False)
+                <|> (Atto.Char.char '1' >> pure True)
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|n{n & show} did not end with ,|]
+            pure bool
+
+      digit <- Atto.Char.digit
+      case digit of
+        -- TODO: separate parser for n1 and n2 that makes sure the boundaries are right!
+        '1' -> N1 <$> b 1
+        '2' -> N3 <$> p @Word8 @Word8 2
+        '3' -> N3 <$> p @Word8 @Word8 3
+        '4' -> N6 <$> p @Word16 @Word64 4
+        '5' -> N6 <$> p @Word32 @Word64 5
+        '6' -> N6 <$> p @Word64 @Word64 6
+        '7' -> fail [fmt|n parser only supports numbers up to size 6, was 7|]
+        '8' -> fail [fmt|n parser only supports numbers up to size 6, was 8|]
+        '9' -> fail [fmt|n parser only supports numbers up to size 6, was 9|]
+        o -> fail [fmt|n number with length {o & show} not possible|]
+
+-- | Parser for a bounded decimal that does not overflow the decimal.
+--
+--  via https://www.extrema.is/blog/2021/10/20/parsing-bounded-integers
+boundedDecimal :: forall a. (Bounded a, Integral a) => Atto.Parser (Maybe a)
+boundedDecimal = do
+  i :: Integer <- decimal
+  pure $
+    if (i :: Integer) > fromIntegral (maxBound :: a)
+      then Nothing
+      else Just $ fromIntegral i
+  where
+    -- Copied from @Attoparsec.Text@ and adjusted to bytestring
+    decimal :: (Integral a2) => Atto.Parser a2
+    decimal = ByteString.foldl' step 0 <$> Atto.Char.takeWhile1 Atto.Char.isDigit
+      where
+        step a c = a * 10 + fromIntegral (c - 48)
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Int) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Int64) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Word8) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Word64) #-}
+
+-- | 'boundedDecimal', but fail the parser if the decimal overflows.
+boundedDecimalFail :: Atto.Parser Int
+boundedDecimalFail =
+  boundedDecimal >>= \case
+    Nothing -> fail "decimal out of range"
+    Just a -> pure a
+
+-- | Hedgehog generator for a netencode value.
+genNetencode :: Hedge.MonadGen m => m T
+genNetencode =
+  Gen.recursive
+    Gen.choice
+    [ -- these are bundled into one Gen, so that scalar elements get chosen less frequently, and the generator produces nicely nested examples
+      Gen.frequency
+        [ (1, pure unit),
+          (1, n1 <$> Gen.bool),
+          (1, n3 <$> Gen.element [0, 1, 5]),
+          (1, n6 <$> Gen.element [0, 1, 5]),
+          (1, i6 <$> Gen.element [-1, 1, 5]),
+          (2, text <$> Gen.text (Range.linear 1 10) Gen.lower),
+          (2, bytes <$> Gen.bytes (Range.linear 1 10))
+        ]
+    ]
+    [ do
+        key <- Gen.text (Range.linear 3 10) Gen.lower
+        val <- genNetencode
+        pure $ tag key val,
+      record
+        <$> ( let k = Gen.text (Range.linear 3 10) Gen.lower
+                  v = genNetencode
+               in NEMap.insertMap
+                    <$> k
+                    <*> v
+                    <*> ( (Gen.map (Range.linear 0 3)) $
+                            (,) <$> k <*> v
+                        )
+            )
+    ]
+
+-- | Hedgehog property: encoding a netencode value and parsing it again returns the same result.
+prop_netencodeRoundtrip :: Hedge.Property
+prop_netencodeRoundtrip = Hedge.property $ do
+  enc <- Hedge.forAll genNetencode
+  ( Atto.parseOnly
+      netencodeParser
+      ( netencodeEncodeStable enc
+          & Builder.toLazyByteString
+          & toStrictBytes
+      )
+    )
+    Hedge.=== (Right enc)
diff --git a/users/Profpatsch/netencode/Netencode/Parse.hs b/users/Profpatsch/netencode/Netencode/Parse.hs
new file mode 100644
index 0000000000..184fb5f912
--- /dev/null
+++ b/users/Profpatsch/netencode/Netencode/Parse.hs
@@ -0,0 +1,102 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Netencode.Parse where
+
+import Control.Category qualified
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Fix (Fix (..))
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Map.NonEmpty (NEMap)
+import Data.Map.NonEmpty qualified as NEMap
+import Data.Semigroupoid qualified as Semigroupiod
+import Data.Semigroupoid qualified as Semigroupoid
+import Data.Text qualified as Text
+import Netencode qualified
+import PossehlAnalyticsPrelude
+import Prelude hiding (log)
+
+newtype Parse from to
+  = -- TODO: the way @Context = [Text]@ has to be forwarded to everything is kinda shitty.
+    -- This is essentially just a difference list, and can probably be treated as a function in the output?
+    Parse (([Text], from) -> Validation (NonEmpty ErrorTree) ([Text], to))
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ( Compose
+                ((->) ([Text], from))
+                (Validation (NonEmpty ErrorTree))
+            )
+            ((,) [Text])
+        )
+
+instance Semigroupoid Parse where
+  o p2 p1 = Parse $ \from -> case runParse' p1 from of
+    Failure err -> Failure err
+    Success to1 -> runParse' p2 to1
+
+instance Category Parse where
+  (.) = Semigroupoid.o
+  id = Parse $ \t -> Success t
+
+runParse :: Error -> Parse from to -> from -> Either ErrorTree to
+runParse errMsg parser t =
+  (["$"], t)
+    & runParse' parser
+    <&> snd
+    & first (nestedMultiError errMsg)
+    & validationToEither
+
+runParse' :: Parse from to -> ([Text], from) -> Validation (NonEmpty ErrorTree) ([Text], to)
+runParse' (Parse f) from = f from
+
+parseEither :: (([Text], from) -> Either ErrorTree ([Text], to)) -> Parse from to
+parseEither f = Parse $ \from -> f from & eitherToListValidation
+
+tAs :: (Netencode.TF (Fix Netencode.TF) -> Either ([Text] -> ErrorTree) to) -> Parse Netencode.T to
+tAs f = parseEither ((\(context, Netencode.T (Fix tf)) -> f tf & bimap ($ context) (context,)))
+
+key :: Text -> Parse (NEMap Text to) to
+key name = parseEither $ \(context, rec) ->
+  rec
+    & NEMap.lookup name
+    & annotate (errorTreeContext (showContext context) [fmt|Key "{name}" does not exist|])
+    <&> (addContext name context,)
+
+showContext :: [Text] -> Text
+showContext context = context & List.reverse & Text.intercalate "."
+
+addContext :: a -> [a] -> [a]
+addContext = (:)
+
+asText :: Parse Netencode.T Text
+asText = tAs $ \case
+  Netencode.Text t -> pure t
+  other -> typeError "of text" other
+
+asBytes :: Parse Netencode.T ByteString
+asBytes = tAs $ \case
+  Netencode.Bytes b -> pure b
+  other -> typeError "of bytes" other
+
+asRecord :: Parse Netencode.T (NEMap Text (Netencode.T))
+asRecord = tAs $ \case
+  Netencode.Record rec -> pure (rec <&> Netencode.T)
+  other -> typeError "a record" other
+
+typeError :: Text -> Netencode.TF ignored -> (Either ([Text] -> ErrorTree) b)
+typeError should is = do
+  let otherS = is <&> (\_ -> ("…" :: String)) & show
+  Left $ \context -> errorTreeContext (showContext context) [fmt|Value is not {should}, but a {otherS}|]
+
+orThrowParseError ::
+  Parse (Either Error to) to
+orThrowParseError = Parse $ \case
+  (context, Left err) ->
+    err
+      & singleError
+      & errorTreeContext (showContext context)
+      & singleton
+      & Failure
+  (context, Right to) -> Success (context, to)
diff --git a/users/Profpatsch/netencode/README.md b/users/Profpatsch/netencode/README.md
new file mode 100644
index 0000000000..3538a110a6
--- /dev/null
+++ b/users/Profpatsch/netencode/README.md
@@ -0,0 +1,133 @@
+# netencode 0.1-unreleased
+
+[bencode][] and [netstring][]-inspired pipe format that should be trivial to generate correctly in every context (only requires a `byte_length()` and a `printf()`), easy to parse (100 lines of code or less), mostly human-decipherable for easy debugging, and support nested record and sum types.
+
+
+## scalars
+
+Scalars have the format `[type prefix][size]:[value],`.
+
+where size is a natural number without leading zeroes.
+
+### unit
+
+The unit (`u`) has only one value.
+
+* The unit is: `u,`
+
+### numbers
+
+Naturals (`n`) and Integers (`i`), with a maximum size in bits.
+
+Bit sizes are specified in 2^n increments, 1 to 9 (`n1`..`n9`, `i1`..`n9`).
+
+* Natural `1234` that fits in 32 bits (2^5): `n5:1234,`
+* Integer `-42` that fits in 8 bits (2^3): `i3:-42,`
+* Integer `23` that fits in 64 bits (2^6): `i6:23,`
+* Integer `-1` that fits in 512 bits (2^9): `i9:-1,`
+* Natural `0` that fits in 1 bit (2^1): `n1:0,`
+
+An implementation can define the biggest numbers it supports, and has to throw an error for anything bigger. It has to support everything smaller, so for example if you support up to i6/n6, you have to support 1–6 as well. An implementation could support up to the current architecture’s wordsize for example.
+
+Floats are not supported, you can implement fixed-size decimals or ratios using integers.
+
+### booleans
+
+A boolean is represented as `n1`.
+
+* `n1:0,`: false
+* `n1:1,`: true
+
+TODO: should we add `f,` and `t,`?
+
+### text
+
+Text (`t`) that *must* be encoded as UTF-8, starting with its length in bytes:
+
+* The string `hello world` (11 bytes): `t11:hello world,`
+* The string `今日は` (9 bytes): `t9:今日は,`
+* The string `:,` (2 bytes): `t2::,,`
+* The empty sting `` (0 bytes): `t0:,`
+
+### binary
+
+Arbitrary binary strings (`b`) that can contain any data, starting with its length in bytes.
+
+* The ASCII string `hello world` as binary data (11 bytes): `b11:hello world,`
+* The empty binary string (0 bytes): `b0:,`
+* The bytestring with `^D` (1 byte): `b1:,`
+
+Since the binary strings are length-prefixd, they can contain `\0` and no escaping is required. Care has to be taken in languages with `\0`-terminated bytestrings.
+
+Use text (`t`) if you have utf-8 encoded data.
+
+## tagged values
+
+### tags
+
+A tag (`<`) gives a value a name. The tag is UTF-8 encoded, starting with its length in bytes and proceeding with the value.
+
+* The tag `foo` (3 bytes) tagging the text `hello` (5 bytes): `<3:foo|t5:hello,`
+* The tag `` (0 bytes) tagging the 8-bit integer 0: `<0:|i3:0,`
+
+### records (products/records), also maps
+
+A record (`{`) is a concatenation of tags (`<`). It needs to be closed with `}`.
+
+If tag names repeat the *earlier* ones should be ignored.
+Using the last tag corresponds with the way most languages handle converting a list of tuples to Maps, by using a for-loop and Map.insert without checking the contents first. Otherwise you’d have to revert the list first or remember which keys you already inserted.
+
+Ordering of tags in a record does not matter.
+
+Similar to text, records start with the length of their *whole encoded content*, in bytes. This makes it possible to treat their contents as opaque bytestrings.
+
+* There is no empty record. (TODO: make the empty record the unit type, remove `u,`?)
+* A record with one empty field, `foo`: `{9:<3:foo|u,}`
+* A record with two fields, `foo` and `x`: `{21:<3:foo|u,<1:x|t3:baz,}`
+* The same record: `{21:<1:x|t3:baz,<3:foo|u,}`
+* The same record (earlier occurences of fields are ignored): `{<1:x|u,28:<1:x|t3:baz,<3:foo|u,}`
+
+### sums (tagged unions)
+
+Simply a tagged value. The tag marker `<` indicates it is a sum if it appears outside of a record.
+
+## lists
+
+A list (`[`) imposes an ordering on a sequence of values. It needs to be closed with `]`. Values in it are simply concatenated.
+
+Similar to records, lists start with the length of their whole encoded content.
+
+* The empty list: `[0:]`
+* The list with one element, the string `foo`: `[7:t3:foo,]`
+* The list with text `foo` followed by i3 `-42`: `[14:t3:foo,i3:-42,]`
+* The list with `Some` and `None` tags: `[33:<4:Some|t3:foo,<4None|u,<4None|u,]`
+
+## parser security considerations
+
+The length field is a decimal number that is not length-restricted,
+meaning an attacker could give an infinitely long length (or extremely long)
+thus overflowing your parser if you are not careful.
+
+You should thus put a practical length limit to the length of length fields,
+which implicitely enforces a length limit on how long the value itself can be.
+
+Start by defining a max value length in bytes.
+Then count the number of decimals in that number.
+
+So if your max length is 1024 bytes, your length field can be a maximum `count_digits(1024) == 4` bytes long.
+
+Thus, if you restrict your parser to a length field of 4 bytes,
+it should also never parse anything longer than 1024 bytes for the value
+(plus 1 byte for the type tag, 4 bytes for the length, and 2 bytes for the separator & ending character).
+
+## motivation
+
+TODO
+
+## guarantees
+
+TODO: do I want unique representation (bijection like bencode?) This would put more restrictions on the generator, like sorting records in lexicographic order, but would make it possible to compare without decoding
+
+
+[bencode]: https://en.wikipedia.org/wiki/Bencode
+[netstring]: https://en.wikipedia.org/wiki/Netstring
diff --git a/users/Profpatsch/netencode/default.nix b/users/Profpatsch/netencode/default.nix
new file mode 100644
index 0000000000..6e7dce489a
--- /dev/null
+++ b/users/Profpatsch/netencode/default.nix
@@ -0,0 +1,184 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  netencode-rs = depot.nix.writers.rustSimpleLib
+    {
+      name = "netencode";
+      dependencies = [
+        depot.third_party.rust-crates.nom
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    }
+    (builtins.readFile ./netencode.rs);
+
+  netencode-hs = pkgs.haskellPackages.mkDerivation {
+    pname = "netencode";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./netencode.cabal
+      ./Netencode.hs
+      ./Netencode/Parse.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.hedgehog
+      pkgs.haskellPackages.nonempty-containers
+      pkgs.haskellPackages.deriving-compat
+      pkgs.haskellPackages.data-fix
+      pkgs.haskellPackages.bytestring
+      pkgs.haskellPackages.attoparsec
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+
+
+  };
+
+  gen = import ./gen.nix { inherit lib; };
+
+  pretty-rs = depot.nix.writers.rustSimpleLib
+    {
+      name = "netencode-pretty";
+      dependencies = [
+        netencode-rs
+      ];
+    }
+    (builtins.readFile ./pretty.rs);
+
+  pretty = depot.nix.writers.rustSimple
+    {
+      name = "netencode-pretty";
+      dependencies = [
+        netencode-rs
+        pretty-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
+    extern crate netencode;
+    extern crate netencode_pretty;
+    extern crate exec_helpers;
+
+    fn main() {
+      let (_, prog) = exec_helpers::args_for_exec("netencode-pretty", 0);
+      let t = netencode::t_from_stdin_or_die_user_error("netencode-pretty");
+      match netencode_pretty::Pretty::from_u(t.to_u()).print_multiline(&mut std::io::stdout()) {
+        Ok(()) => {},
+        Err(err) => exec_helpers::die_temporary("netencode-pretty", format!("could not write to stdout: {}", err))
+      }
+    }
+  '';
+
+  netencode-mustache = depot.nix.writers.rustSimple
+    {
+      name = "netencode_mustache";
+      dependencies = [
+        depot.users.Profpatsch.arglib.netencode.rust
+        netencode-rs
+        depot.third_party.rust-crates.mustache
+      ];
+    }
+    (builtins.readFile ./netencode-mustache.rs);
+
+
+  record-get = depot.nix.writers.rustSimple
+    {
+      name = "record-get";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
+    extern crate netencode;
+    extern crate exec_helpers;
+    use netencode::{encode, dec};
+    use netencode::dec::{Decoder, DecodeError};
+
+    fn main() {
+        let args = exec_helpers::args("record-get", 1);
+        let field = match std::str::from_utf8(&args[0]) {
+            Ok(f) => f,
+            Err(_e) => exec_helpers::die_user_error("record-get", format!("The field name needs to be valid unicode"))
+        };
+        let t = netencode::t_from_stdin_or_die_user_error("record-get");
+        match (dec::RecordDot {field, inner: dec::AnyU }).dec(t.to_u()) {
+            Ok(u) => encode(&mut std::io::stdout(), &u).expect("encoding to stdout failed"),
+            Err(DecodeError(err)) => exec_helpers::die_user_error("record-get", err)
+        }
+    }
+  '';
+
+  record-splice-env = depot.nix.writers.rustSimple
+    {
+      name = "record-splice-env";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
+    extern crate netencode;
+    extern crate exec_helpers;
+    use netencode::dec::{Record, Try, ScalarAsBytes, Decoder, DecodeError};
+
+    fn main() {
+        let t = netencode::t_from_stdin_or_die_user_error("record-splice-env");
+        let (_, prog) = exec_helpers::args_for_exec("record-splice-env", 0);
+        match Record(Try(ScalarAsBytes)).dec(t.to_u()) {
+            Ok(map) => {
+                exec_helpers::exec_into_args(
+                    "record-splice-env",
+                    prog,
+                    // some elements can’t be decoded as scalars, so just ignore them
+                    map.into_iter().filter_map(|(k, v)| v.map(|v2| (k, v2)))
+                );
+            },
+            Err(DecodeError(err)) => exec_helpers::die_user_error("record-splice-env", err),
+        }
+    }
+  '';
+
+  env-splice-record = depot.nix.writers.rustSimple
+    {
+      name = "env-splice-record";
+      dependencies = [
+        netencode-rs
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    } ''
+    extern crate netencode;
+    extern crate exec_helpers;
+    use netencode::{T};
+    use std::os::unix::ffi::OsStringExt;
+
+    fn main() {
+        exec_helpers::no_args("env-splice-record");
+        let mut res = std::collections::HashMap::new();
+        for (key, val) in std::env::vars_os() {
+          match (String::from_utf8(key.into_vec()), String::from_utf8(val.into_vec())) {
+            (Ok(k), Ok(v)) => { let _ = res.insert(k, T::Text(v)); },
+            // same as in record-splice-env, we ignore non-utf8 variables
+            (_, _) => {},
+          }
+        }
+        netencode::encode(&mut std::io::stdout(), &T::Record(res).to_u()).unwrap()
+    }
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    netencode-rs
+    netencode-hs
+    pretty-rs
+    pretty
+    netencode-mustache
+    record-get
+    record-splice-env
+    env-splice-record
+    gen
+    ;
+}
diff --git a/users/Profpatsch/netencode/gen.nix b/users/Profpatsch/netencode/gen.nix
new file mode 100644
index 0000000000..efc9629ca0
--- /dev/null
+++ b/users/Profpatsch/netencode/gen.nix
@@ -0,0 +1,73 @@
+{ lib }:
+let
+
+  netstring = tag: suffix: s:
+    "${tag}${toString (builtins.stringLength s)}:${s}${suffix}";
+
+  unit = "u,";
+
+  n1 = b: if b then "n1:1," else "n1:0,";
+
+  n = i: n: "n${toString i}:${toString n},";
+  i = i: n: "i${toString i}:${toString n},";
+
+  n3 = n 3;
+  n6 = n 6;
+  n7 = n 7;
+
+  i3 = i 3;
+  i6 = i 6;
+  i7 = i 7;
+
+  text = netstring "t" ",";
+  binary = netstring "b" ",";
+
+  tag = key: val: netstring "<" "|" key + val;
+
+  concatStrings = builtins.concatStringsSep "";
+
+  record = lokv: netstring "{" "}"
+    (concatStrings (map ({ key, val }: tag key val) lokv));
+
+  list = l: netstring "[" "]" (concatStrings l);
+
+  dwim = val:
+    let
+      match = {
+        "bool" = n1;
+        "int" = i6;
+        "string" = text;
+        "set" = attrs:
+          # it could be a derivation, then just return the path
+          if attrs.type or "" == "derivation" then text "${attrs}"
+          else
+            record (lib.mapAttrsToList
+              (k: v: {
+                key = k;
+                val = dwim v;
+              })
+              attrs);
+        "list" = l: list (map dwim l);
+      };
+    in
+    match.${builtins.typeOf val} val;
+
+in
+{
+  inherit
+    unit
+    n1
+    n3
+    n6
+    n7
+    i3
+    i6
+    i7
+    text
+    binary
+    tag
+    record
+    list
+    dwim
+    ;
+}
diff --git a/users/Profpatsch/netencode/netencode-mustache.rs b/users/Profpatsch/netencode/netencode-mustache.rs
new file mode 100644
index 0000000000..73ed5be1de
--- /dev/null
+++ b/users/Profpatsch/netencode/netencode-mustache.rs
@@ -0,0 +1,52 @@
+extern crate arglib_netencode;
+extern crate mustache;
+extern crate netencode;
+
+use mustache::Data;
+use netencode::T;
+use std::collections::HashMap;
+use std::io::Read;
+use std::os::unix::ffi::OsStrExt;
+
+fn netencode_to_mustache_data_dwim(t: T) -> Data {
+    match t {
+        // TODO: good idea?
+        T::Unit => Data::Null,
+        T::N1(b) => Data::Bool(b),
+        T::N3(u) => Data::String(u.to_string()),
+        T::N6(u) => Data::String(u.to_string()),
+        T::N7(u) => Data::String(u.to_string()),
+        T::I3(i) => Data::String(i.to_string()),
+        T::I6(i) => Data::String(i.to_string()),
+        T::I7(i) => Data::String(i.to_string()),
+        T::Text(s) => Data::String(s),
+        T::Binary(b) => unimplemented!(),
+        T::Sum(tag) => unimplemented!(),
+        T::Record(xs) => Data::Map(
+            xs.into_iter()
+                .map(|(key, val)| (key, netencode_to_mustache_data_dwim(val)))
+                .collect::<HashMap<_, _>>(),
+        ),
+        T::List(xs) => Data::Vec(
+            xs.into_iter()
+                .map(|x| netencode_to_mustache_data_dwim(x))
+                .collect::<Vec<_>>(),
+        ),
+    }
+}
+
+pub fn from_stdin() -> () {
+    let data = netencode_to_mustache_data_dwim(arglib_netencode::arglib_netencode(
+        "netencode-mustache",
+        Some(std::ffi::OsStr::new("TEMPLATE_DATA")),
+    ));
+    let mut stdin = String::new();
+    std::io::stdin().read_to_string(&mut stdin).unwrap();
+    mustache::compile_str(&stdin)
+        .and_then(|templ| templ.render_data(&mut std::io::stdout(), &data))
+        .unwrap()
+}
+
+pub fn main() {
+    from_stdin()
+}
diff --git a/users/Profpatsch/netencode/netencode.cabal b/users/Profpatsch/netencode/netencode.cabal
new file mode 100644
index 0000000000..7bff4487bb
--- /dev/null
+++ b/users/Profpatsch/netencode/netencode.cabal
@@ -0,0 +1,74 @@
+cabal-version:      3.0
+name:               netencode
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    exposed-modules:
+        Netencode,
+        Netencode.Parse
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        hedgehog,
+        nonempty-containers,
+        deriving-compat,
+        data-fix,
+        bytestring,
+        attoparsec,
+        text,
+        semigroupoids,
+        selective
diff --git a/users/Profpatsch/netencode/netencode.rs b/users/Profpatsch/netencode/netencode.rs
new file mode 100644
index 0000000000..34a8fcef09
--- /dev/null
+++ b/users/Profpatsch/netencode/netencode.rs
@@ -0,0 +1,969 @@
+extern crate exec_helpers;
+extern crate nom;
+
+use std::collections::HashMap;
+use std::fmt::{Debug, Display};
+use std::io::{Read, Write};
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum T {
+    // Unit
+    Unit,
+    // Boolean
+    N1(bool),
+    // Naturals
+    N3(u8),
+    N6(u64),
+    N7(u128),
+    // Integers
+    I3(i8),
+    I6(i64),
+    I7(i128),
+    // Text
+    // TODO: make into &str
+    Text(String),
+    // TODO: rename to Bytes
+    Binary(Vec<u8>),
+    // Tags
+    // TODO: make into &str
+    // TODO: rename to Tag
+    Sum(Tag<String, T>),
+    // TODO: make into &str
+    Record(HashMap<String, T>),
+    List(Vec<T>),
+}
+
+impl T {
+    pub fn to_u<'a>(&'a self) -> U<'a> {
+        match self {
+            T::Unit => U::Unit,
+            T::N1(b) => U::N1(*b),
+            T::N3(u) => U::N3(*u),
+            T::N6(u) => U::N6(*u),
+            T::N7(u) => U::N7(*u),
+            T::I3(i) => U::I3(*i),
+            T::I6(i) => U::I6(*i),
+            T::I7(i) => U::I7(*i),
+            T::Text(t) => U::Text(t.as_str()),
+            T::Binary(v) => U::Binary(v),
+            T::Sum(Tag { tag, val }) => U::Sum(Tag {
+                tag: tag.as_str(),
+                val: Box::new(val.to_u()),
+            }),
+            T::Record(map) => U::Record(map.iter().map(|(k, v)| (k.as_str(), v.to_u())).collect()),
+            T::List(l) => U::List(l.iter().map(|v| v.to_u()).collect::<Vec<U<'a>>>()),
+        }
+    }
+
+    pub fn encode<'a>(&'a self) -> Vec<u8> {
+        match self {
+            // TODO: don’t go via U, inefficient
+            o => o.to_u().encode(),
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum U<'a> {
+    Unit,
+    // Boolean
+    N1(bool),
+    // Naturals
+    N3(u8),
+    N6(u64),
+    N7(u128),
+    // Integers
+    I3(i8),
+    I6(i64),
+    I7(i128),
+    // Text
+    Text(&'a str),
+    Binary(&'a [u8]),
+    // TODO: the U-recursion we do here means we can’t be breadth-lazy anymore
+    // like we originally planned; maybe we want to go `U<'a>` → `&'a [u8]` again?
+    // Tags
+    // TODO: rename to Tag
+    Sum(Tag<&'a str, U<'a>>),
+    Record(HashMap<&'a str, U<'a>>),
+    List(Vec<U<'a>>),
+}
+
+impl<'a> U<'a> {
+    pub fn encode(&self) -> Vec<u8> {
+        let mut c = std::io::Cursor::new(vec![]);
+        encode(&mut c, self);
+        c.into_inner()
+    }
+
+    pub fn to_t(&self) -> T {
+        match self {
+            U::Unit => T::Unit,
+            U::N1(b) => T::N1(*b),
+            U::N3(u) => T::N3(*u),
+            U::N6(u) => T::N6(*u),
+            U::N7(u) => T::N7(*u),
+            U::I3(i) => T::I3(*i),
+            U::I6(i) => T::I6(*i),
+            U::I7(i) => T::I7(*i),
+            U::Text(t) => T::Text((*t).to_owned()),
+            U::Binary(v) => T::Binary((*v).to_owned()),
+            U::Sum(Tag { tag, val }) => T::Sum(Tag {
+                tag: (*tag).to_owned(),
+                val: Box::new(val.to_t()),
+            }),
+            U::Record(map) => T::Record(
+                map.iter()
+                    .map(|(k, v)| ((*k).to_owned(), v.to_t()))
+                    .collect::<HashMap<String, T>>(),
+            ),
+            U::List(l) => T::List(l.iter().map(|v| v.to_t()).collect::<Vec<T>>()),
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct Tag<S, A> {
+    // TODO: make into &str
+    pub tag: S,
+    pub val: Box<A>,
+}
+
+impl<S, A> Tag<S, A> {
+    fn map<F, B>(self, f: F) -> Tag<S, B>
+    where
+        F: Fn(A) -> B,
+    {
+        Tag {
+            tag: self.tag,
+            val: Box::new(f(*self.val)),
+        }
+    }
+}
+
+fn encode_tag<W: Write>(w: &mut W, tag: &str, val: &U) -> std::io::Result<()> {
+    write!(w, "<{}:{}|", tag.len(), tag)?;
+    encode(w, val)?;
+    Ok(())
+}
+
+pub fn encode<W: Write>(w: &mut W, u: &U) -> std::io::Result<()> {
+    match u {
+        U::Unit => write!(w, "u,"),
+        U::N1(b) => {
+            if *b {
+                write!(w, "n1:1,")
+            } else {
+                write!(w, "n1:0,")
+            }
+        }
+        U::N3(n) => write!(w, "n3:{},", n),
+        U::N6(n) => write!(w, "n6:{},", n),
+        U::N7(n) => write!(w, "n7:{},", n),
+        U::I3(i) => write!(w, "i3:{},", i),
+        U::I6(i) => write!(w, "i6:{},", i),
+        U::I7(i) => write!(w, "i7:{},", i),
+        U::Text(s) => {
+            write!(w, "t{}:", s.len());
+            w.write_all(s.as_bytes());
+            write!(w, ",")
+        }
+        U::Binary(s) => {
+            write!(w, "b{}:", s.len());
+            w.write_all(&s);
+            write!(w, ",")
+        }
+        U::Sum(Tag { tag, val }) => encode_tag(w, tag, val),
+        U::Record(m) => {
+            let mut c = std::io::Cursor::new(vec![]);
+            for (k, v) in m {
+                encode_tag(&mut c, k, v)?;
+            }
+            write!(w, "{{{}:", c.get_ref().len())?;
+            w.write_all(c.get_ref())?;
+            write!(w, "}}")
+        }
+        U::List(l) => {
+            let mut c = std::io::Cursor::new(vec![]);
+            for u in l {
+                encode(&mut c, u)?;
+            }
+            write!(w, "[{}:", c.get_ref().len())?;
+            w.write_all(c.get_ref())?;
+            write!(w, "]")
+        }
+    }
+}
+
+pub fn text(s: String) -> T {
+    T::Text(s)
+}
+
+pub fn t_from_stdin_or_die_user_error<'a>(prog_name: &'_ str) -> T {
+    match t_from_stdin_or_die_user_error_with_rest(prog_name, &vec![]) {
+        None => exec_helpers::die_user_error(prog_name, "stdin was empty"),
+        Some((rest, t)) => {
+            if rest.is_empty() {
+                t
+            } else {
+                exec_helpers::die_user_error(
+                    prog_name,
+                    format!(
+                        "stdin contained some soup after netencode value: {:?}",
+                        String::from_utf8_lossy(&rest)
+                    ),
+                )
+            }
+        }
+    }
+}
+
+/// Read a netencode value from stdin incrementally, return bytes that could not be read.
+/// Nothing if there was nothing to read from stdin & no initial_bytes were provided.
+/// These can be passed back as `initial_bytes` if more values should be read.
+pub fn t_from_stdin_or_die_user_error_with_rest<'a>(
+    prog_name: &'_ str,
+    initial_bytes: &[u8],
+) -> Option<(Vec<u8>, T)> {
+    let mut chonker = Chunkyboi::new(std::io::stdin().lock(), 4096);
+    // The vec to pass to the parser on each step
+    let mut parser_vec: Vec<u8> = initial_bytes.to_vec();
+    // whether stdin was already empty
+    let mut was_empty: bool = false;
+    loop {
+        match chonker.next() {
+            None => {
+                if parser_vec.is_empty() {
+                    return None;
+                } else {
+                    was_empty = true
+                }
+            }
+            Some(Err(err)) => exec_helpers::die_temporary(
+                prog_name,
+                &format!("could not read from stdin: {:?}", err),
+            ),
+            Some(Ok(mut new_bytes)) => parser_vec.append(&mut new_bytes),
+        }
+
+        match parse::t_t(&parser_vec) {
+            Ok((rest, t)) => return Some((rest.to_owned(), t)),
+            Err(nom::Err::Incomplete(Needed)) => {
+                if was_empty {
+                    exec_helpers::die_user_error(
+                        prog_name,
+                        &format!(
+                            "unable to parse netencode from stdin, input incomplete: {:?}",
+                            parser_vec
+                        ),
+                    );
+                }
+                // read more from stdin and try parsing again
+                continue;
+            }
+            Err(err) => exec_helpers::die_user_error(
+                prog_name,
+                &format!("unable to parse netencode from stdin: {:?}", err),
+            ),
+        }
+    }
+}
+
+// iter helper
+// TODO: put into its own module
+struct Chunkyboi<T> {
+    inner: T,
+    buf: Vec<u8>,
+}
+
+impl<R: Read> Chunkyboi<R> {
+    fn new(inner: R, chunksize: usize) -> Self {
+        let buf = vec![0; chunksize];
+        Chunkyboi { inner, buf }
+    }
+}
+
+impl<R: Read> Iterator for Chunkyboi<R> {
+    type Item = std::io::Result<Vec<u8>>;
+
+    fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> {
+        match self.inner.read(&mut self.buf) {
+            Ok(0) => None,
+            Ok(read) => {
+                // clone a new buffer so we can reuse the internal one
+                Some(Ok(self.buf[..read].to_owned()))
+            }
+            Err(err) => Some(Err(err)),
+        }
+    }
+}
+
+pub mod parse {
+    use super::{Tag, T, U};
+
+    use std::collections::HashMap;
+    use std::ops::Neg;
+    use std::str::FromStr;
+
+    use nom::branch::alt;
+    use nom::bytes::streaming::{tag, take};
+    use nom::character::streaming::{char, digit1};
+    use nom::combinator::{flat_map, map, map_parser, map_res, opt};
+    use nom::error::{context, ErrorKind, ParseError};
+    use nom::sequence::tuple;
+    use nom::IResult;
+
+    fn unit_t(s: &[u8]) -> IResult<&[u8], ()> {
+        let (s, _) = context("unit", tag("u,"))(s)?;
+        Ok((s, ()))
+    }
+
+    fn usize_t(s: &[u8]) -> IResult<&[u8], usize> {
+        context(
+            "usize",
+            map_res(map_res(digit1, |n| std::str::from_utf8(n)), |s| {
+                s.parse::<usize>()
+            }),
+        )(s)
+    }
+
+    fn sized(begin: char, end: char) -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> {
+        move |s: &[u8]| {
+            // This is the point where we check the descriminator;
+            // if the beginning char does not match, we can immediately return.
+            let (s, _) = char(begin)(s)?;
+            let (s, (len, _)) = tuple((usize_t, char(':')))(s)?;
+            let (s, (res, _)) = tuple((take(len), char(end)))(s)?;
+            Ok((s, res))
+        }
+    }
+
+    fn uint_t<'a, I: FromStr + 'a>(t: &'static str) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], I> {
+        move |s: &'a [u8]| {
+            let (s, (_, _, int, _)) = tuple((
+                tag(t.as_bytes()),
+                char(':'),
+                map_res(map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), |s| {
+                    s.parse::<I>()
+                }),
+                char(','),
+            ))(s)?;
+            Ok((s, int))
+        }
+    }
+
+    fn bool_t<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], bool> {
+        context(
+            "bool",
+            alt((map(tag("n1:0,"), |_| false), map(tag("n1:1,"), |_| true))),
+        )
+    }
+
+    fn int_t<'a, I: FromStr + Neg<Output = I>>(
+        t: &'static str,
+    ) -> impl Fn(&'a [u8]) -> IResult<&[u8], I> {
+        context(t, move |s: &'a [u8]| {
+            let (s, (_, _, neg, int, _)) = tuple((
+                tag(t.as_bytes()),
+                char(':'),
+                opt(char('-')),
+                map_res(map_res(digit1, |n: &[u8]| std::str::from_utf8(n)), |s| {
+                    s.parse::<I>()
+                }),
+                char(','),
+            ))(s)?;
+            let res = match neg {
+                Some(_) => -int,
+                None => int,
+            };
+            Ok((s, res))
+        })
+    }
+
+    fn tag_t(s: &[u8]) -> IResult<&[u8], Tag<String, T>> {
+        // recurses into the main parser
+        map(tag_g(t_t), |Tag { tag, val }| Tag {
+            tag: tag.to_string(),
+            val,
+        })(s)
+    }
+
+    fn tag_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Tag<&'a str, O>>
+    where
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
+    {
+        move |s: &[u8]| {
+            let (s, tag) = sized('<', '|')(s)?;
+            let (s, val) = inner(s)?;
+            Ok((
+                s,
+                Tag {
+                    tag: std::str::from_utf8(tag)
+                        .map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
+                    val: Box::new(val),
+                },
+            ))
+        }
+    }
+
+    /// parse text scalar (`t5:hello,`)
+    fn text(s: &[u8]) -> IResult<&[u8], T> {
+        let (s, res) = text_g(s)?;
+        Ok((s, T::Text(res.to_string())))
+    }
+
+    fn text_g(s: &[u8]) -> IResult<&[u8], &str> {
+        let (s, res) = sized('t', ',')(s)?;
+        Ok((
+            s,
+            std::str::from_utf8(res).map_err(|_| nom::Err::Failure((s, ErrorKind::Char)))?,
+        ))
+    }
+
+    fn binary<'a>() -> impl Fn(&'a [u8]) -> IResult<&'a [u8], T> {
+        map(binary_g(), |b| T::Binary(b.to_owned()))
+    }
+
+    fn binary_g() -> impl Fn(&[u8]) -> IResult<&[u8], &[u8]> {
+        sized('b', ',')
+    }
+
+    fn list_t(s: &[u8]) -> IResult<&[u8], Vec<T>> {
+        list_g(t_t)(s)
+    }
+
+    /// Wrap the inner parser of an `many0`/`fold_many0`, so that the parser
+    /// is not called when the `s` is empty already, preventing it from
+    /// returning `Incomplete` on streaming parsing.
+    fn inner_no_empty_string<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], O>
+    where
+        O: Clone,
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
+    {
+        move |s: &'a [u8]| {
+            if s.is_empty() {
+                // This is a bit hacky, `many0` considers the inside done
+                // when a parser returns `Err::Error`, ignoring the actual error content
+                Err(nom::Err::Error((s, nom::error::ErrorKind::Many0)))
+            } else {
+                inner(s)
+            }
+        }
+    }
+
+    fn list_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], Vec<O>>
+    where
+        O: Clone,
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
+    {
+        map_parser(
+            sized('[', ']'),
+            nom::multi::many0(inner_no_empty_string(inner)),
+        )
+    }
+
+    fn record_t<'a>(s: &'a [u8]) -> IResult<&'a [u8], HashMap<String, T>> {
+        let (s, r) = record_g(t_t)(s)?;
+        Ok((
+            s,
+            r.into_iter()
+                .map(|(k, v)| (k.to_string(), v))
+                .collect::<HashMap<_, _>>(),
+        ))
+    }
+
+    fn record_g<'a, P, O>(inner: P) -> impl Fn(&'a [u8]) -> IResult<&'a [u8], HashMap<&'a str, O>>
+    where
+        O: Clone,
+        P: Fn(&'a [u8]) -> IResult<&'a [u8], O>,
+    {
+        move |s: &'a [u8]| {
+            let (s, map) = map_parser(
+                sized('{', '}'),
+                nom::multi::fold_many0(
+                    inner_no_empty_string(tag_g(&inner)),
+                    HashMap::new(),
+                    |mut acc: HashMap<_, _>, Tag { tag, mut val }| {
+                        // ignore earlier tags with the same name
+                        // according to netencode spec
+                        let _ = acc.insert(tag, *val);
+                        acc
+                    },
+                ),
+            )(s)?;
+            if map.is_empty() {
+                // records must not be empty, according to the spec
+                Err(nom::Err::Failure((s, nom::error::ErrorKind::Many1)))
+            } else {
+                Ok((s, map))
+            }
+        }
+    }
+
+    pub fn u_u(s: &[u8]) -> IResult<&[u8], U> {
+        alt((
+            map(text_g, U::Text),
+            map(binary_g(), U::Binary),
+            map(unit_t, |()| U::Unit),
+            map(tag_g(u_u), |t| U::Sum(t)),
+            map(list_g(u_u), U::List),
+            map(record_g(u_u), U::Record),
+            map(bool_t(), |u| U::N1(u)),
+            map(uint_t("n3"), |u| U::N3(u)),
+            map(uint_t("n6"), |u| U::N6(u)),
+            map(uint_t("n7"), |u| U::N7(u)),
+            map(int_t("i3"), |u| U::I3(u)),
+            map(int_t("i6"), |u| U::I6(u)),
+            map(int_t("i7"), |u| U::I7(u)),
+            // less common
+            map(uint_t("n2"), |u| U::N3(u)),
+            map(uint_t("n4"), |u| U::N6(u)),
+            map(uint_t("n5"), |u| U::N6(u)),
+            map(int_t("i1"), |u| U::I3(u)),
+            map(int_t("i2"), |u| U::I3(u)),
+            map(int_t("i4"), |u| U::I6(u)),
+            map(int_t("i5"), |u| U::I6(u)),
+            // TODO: 8, 9 not supported
+        ))(s)
+    }
+
+    pub fn t_t(s: &[u8]) -> IResult<&[u8], T> {
+        alt((
+            text,
+            binary(),
+            map(unit_t, |_| T::Unit),
+            map(tag_t, |t| T::Sum(t)),
+            map(list_t, |l| T::List(l)),
+            map(record_t, |p| T::Record(p)),
+            map(bool_t(), |u| T::N1(u)),
+            // 8, 64 and 128 bit
+            map(uint_t("n3"), |u| T::N3(u)),
+            map(uint_t("n6"), |u| T::N6(u)),
+            map(uint_t("n7"), |u| T::N7(u)),
+            map(int_t("i3"), |u| T::I3(u)),
+            map(int_t("i6"), |u| T::I6(u)),
+            map(int_t("i7"), |u| T::I7(u)),
+            // less common
+            map(uint_t("n2"), |u| T::N3(u)),
+            map(uint_t("n4"), |u| T::N6(u)),
+            map(uint_t("n5"), |u| T::N6(u)),
+            map(int_t("i1"), |u| T::I3(u)),
+            map(int_t("i2"), |u| T::I3(u)),
+            map(int_t("i4"), |u| T::I6(u)),
+            map(int_t("i5"), |u| T::I6(u)),
+            // TODO: 8, 9 not supported
+        ))(s)
+    }
+
+    #[cfg(test)]
+    mod tests {
+        use super::*;
+
+        #[test]
+        fn test_parse_unit_t() {
+            assert_eq!(unit_t("u,".as_bytes()), Ok(("".as_bytes(), ())));
+        }
+
+        #[test]
+        fn test_parse_bool_t() {
+            assert_eq!(bool_t()("n1:0,".as_bytes()), Ok(("".as_bytes(), false)));
+            assert_eq!(bool_t()("n1:1,".as_bytes()), Ok(("".as_bytes(), true)));
+        }
+
+        #[test]
+        fn test_parse_usize_t() {
+            assert_eq!(usize_t("32foo".as_bytes()), Ok(("foo".as_bytes(), 32)));
+        }
+
+        #[test]
+        fn test_parse_int_t() {
+            assert_eq!(
+                uint_t::<u8>("n3")("n3:42,abc".as_bytes()),
+                Ok(("abc".as_bytes(), 42))
+            );
+            assert_eq!(
+                uint_t::<u8>("n3")("n3:1024,abc".as_bytes()),
+                Err(nom::Err::Error((
+                    "1024,abc".as_bytes(),
+                    nom::error::ErrorKind::MapRes
+                )))
+            );
+            assert_eq!(
+                int_t::<i64>("i6")("i6:-23,abc".as_bytes()),
+                Ok(("abc".as_bytes(), -23))
+            );
+            assert_eq!(
+                int_t::<i128>("i3")("i3:0,:abc".as_bytes()),
+                Ok((":abc".as_bytes(), 0))
+            );
+            assert_eq!(
+                uint_t::<u8>("n7")("n7:09,".as_bytes()),
+                Ok(("".as_bytes(), 9))
+            );
+            // assert_eq!(
+            //     length("c"),
+            //     Err(nom::Err::Error(("c", nom::error::ErrorKind::Digit)))
+            // );
+            // assert_eq!(
+            //     length(":"),
+            //     Err(nom::Err::Error((":", nom::error::ErrorKind::Digit)))
+            // );
+        }
+
+        #[test]
+        fn test_parse_text() {
+            assert_eq!(
+                text("t5:hello,".as_bytes()),
+                Ok(("".as_bytes(), T::Text("hello".to_owned()))),
+                "{}",
+                r"t5:hello,"
+            );
+            assert_eq!(
+                text("t4:fo".as_bytes()),
+                // The content of the text should be 4 long
+                Err(nom::Err::Incomplete(nom::Needed::Size(4))),
+                "{}",
+                r"t4:fo,"
+            );
+            assert_eq!(
+                text("t9:今日は,".as_bytes()),
+                Ok(("".as_bytes(), T::Text("今日は".to_owned()))),
+                "{}",
+                r"t9:今日は,"
+            );
+        }
+
+        #[test]
+        fn test_parse_binary() {
+            assert_eq!(
+                binary()("b5:hello,".as_bytes()),
+                Ok(("".as_bytes(), T::Binary(Vec::from("hello".to_owned())))),
+                "{}",
+                r"b5:hello,"
+            );
+            assert_eq!(
+                binary()("b4:fo".as_bytes()),
+                // The content of the byte should be 4 long
+                Err(nom::Err::Incomplete(nom::Needed::Size(4))),
+                "{}",
+                r"b4:fo,"
+            );
+            assert_eq!(
+                binary()("b4:foob".as_bytes()),
+                // The content is 4 bytes now, but the finishing , is missing
+                Err(nom::Err::Incomplete(nom::Needed::Size(1))),
+                "{}",
+                r"b4:fo,"
+            );
+            assert_eq!(
+                binary()("b9:今日は,".as_bytes()),
+                Ok(("".as_bytes(), T::Binary(Vec::from("今日は".as_bytes())))),
+                "{}",
+                r"b9:今日は,"
+            );
+        }
+
+        #[test]
+        fn test_list() {
+            assert_eq!(
+                list_t("[0:]".as_bytes()),
+                Ok(("".as_bytes(), vec![])),
+                "{}",
+                r"[0:]"
+            );
+            assert_eq!(
+                list_t("[6:u,u,u,]".as_bytes()),
+                Ok(("".as_bytes(), vec![T::Unit, T::Unit, T::Unit,])),
+                "{}",
+                r"[6:u,u,u,]"
+            );
+            assert_eq!(
+                list_t("[15:u,[7:t3:foo,]u,]".as_bytes()),
+                Ok((
+                    "".as_bytes(),
+                    vec![T::Unit, T::List(vec![T::Text("foo".to_owned())]), T::Unit,]
+                )),
+                "{}",
+                r"[15:u,[7:t3:foo,]u,]"
+            );
+        }
+
+        #[test]
+        fn test_record() {
+            assert_eq!(
+                record_t("{21:<1:a|u,<1:b|u,<1:c|u,}".as_bytes()),
+                Ok((
+                    "".as_bytes(),
+                    vec![
+                        ("a".to_owned(), T::Unit),
+                        ("b".to_owned(), T::Unit),
+                        ("c".to_owned(), T::Unit),
+                    ]
+                    .into_iter()
+                    .collect::<HashMap<String, T>>()
+                )),
+                "{}",
+                r"{21:<1:a|u,<1:b|u,<1:c|u,}"
+            );
+            // duplicated keys are ignored (first is taken)
+            assert_eq!(
+                record_t("{25:<1:a|u,<1:b|u,<1:a|i1:-1,}".as_bytes()),
+                Ok((
+                    "".as_bytes(),
+                    vec![("a".to_owned(), T::I3(-1)), ("b".to_owned(), T::Unit),]
+                        .into_iter()
+                        .collect::<HashMap<_, _>>()
+                )),
+                "{}",
+                r"{25:<1:a|u,<1:b|u,<1:a|i1:-1,}"
+            );
+            // empty records are not allowed
+            assert_eq!(
+                record_t("{0:}".as_bytes()),
+                Err(nom::Err::Failure((
+                    "".as_bytes(),
+                    nom::error::ErrorKind::Many1
+                ))),
+                "{}",
+                r"{0:}"
+            );
+        }
+
+        #[test]
+        fn test_parse() {
+            assert_eq!(
+                t_t("n3:255,".as_bytes()),
+                Ok(("".as_bytes(), T::N3(255))),
+                "{}",
+                r"n3:255,"
+            );
+            assert_eq!(
+                t_t("t6:halloo,".as_bytes()),
+                Ok(("".as_bytes(), T::Text("halloo".to_owned()))),
+                "{}",
+                r"t6:halloo,"
+            );
+            assert_eq!(
+                t_t("<3:foo|t6:halloo,".as_bytes()),
+                Ok((
+                    "".as_bytes(),
+                    T::Sum(Tag {
+                        tag: "foo".to_owned(),
+                        val: Box::new(T::Text("halloo".to_owned()))
+                    })
+                )),
+                "{}",
+                r"<3:foo|t6:halloo,"
+            );
+            // { a: Unit
+            // , foo: List <A: Unit | B: List i3> }
+            assert_eq!(
+                t_t("{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}".as_bytes()),
+                Ok((
+                    "".as_bytes(),
+                    T::Record(
+                        vec![
+                            ("a".to_owned(), T::Unit),
+                            (
+                                "foo".to_owned(),
+                                T::List(vec![
+                                    T::Sum(Tag {
+                                        tag: "A".to_owned(),
+                                        val: Box::new(T::Unit)
+                                    }),
+                                    T::Sum(Tag {
+                                        tag: "A".to_owned(),
+                                        val: Box::new(T::N1(true))
+                                    }),
+                                    T::Sum(Tag {
+                                        tag: "B".to_owned(),
+                                        val: Box::new(T::List(vec![T::I3(127)]))
+                                    }),
+                                ])
+                            )
+                        ]
+                        .into_iter()
+                        .collect::<HashMap<String, T>>()
+                    )
+                )),
+                "{}",
+                r"{52:<1:a|u,<3:foo|[33:<1:A|u,<1:A|n1:1,<1:B|[7:i3:127,]]}"
+            );
+        }
+    }
+}
+
+pub mod dec {
+    use super::*;
+    use std::collections::HashMap;
+
+    pub struct DecodeError(pub String);
+
+    pub trait Decoder<'a> {
+        type A;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError>;
+    }
+
+    /// Any netencode, as `T`.
+    #[derive(Clone, Copy)]
+    pub struct AnyT;
+    /// Any netencode, as `U`.
+    #[derive(Clone, Copy)]
+    pub struct AnyU;
+
+    impl<'a> Decoder<'a> for AnyT {
+        type A = T;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            Ok(u.to_t())
+        }
+    }
+
+    impl<'a> Decoder<'a> for AnyU {
+        type A = U<'a>;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            Ok(u)
+        }
+    }
+
+    /// A text
+    #[derive(Clone, Copy)]
+    pub struct Text;
+
+    /// A bytestring
+    // TODO: rename to Bytes
+    #[derive(Clone, Copy)]
+    pub struct Binary;
+
+    impl<'a> Decoder<'a> for Text {
+        type A = &'a str;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match u {
+                U::Text(t) => Ok(t),
+                other => Err(DecodeError(format!("Cannot decode {:?} into Text", other))),
+            }
+        }
+    }
+
+    impl<'a> Decoder<'a> for Binary {
+        type A = &'a [u8];
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match u {
+                U::Binary(b) => Ok(b),
+                other => Err(DecodeError(format!(
+                    "Cannot decode {:?} into Binary",
+                    other
+                ))),
+            }
+        }
+    }
+
+    /// Any scalar, converted to bytes.
+    #[derive(Clone, Copy)]
+    pub struct ScalarAsBytes;
+
+    impl<'a> Decoder<'a> for ScalarAsBytes {
+        type A = Vec<u8>;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match u {
+                U::N3(u) => Ok(format!("{}", u).into_bytes()),
+                U::N6(u) => Ok(format!("{}", u).into_bytes()),
+                U::N7(u) => Ok(format!("{}", u).into_bytes()),
+                U::I3(i) => Ok(format!("{}", i).into_bytes()),
+                U::I6(i) => Ok(format!("{}", i).into_bytes()),
+                U::I7(i) => Ok(format!("{}", i).into_bytes()),
+                U::Text(t) => Ok(t.as_bytes().to_owned()),
+                U::Binary(b) => Ok(b.to_owned()),
+                o => Err(DecodeError(format!("Cannot decode {:?} into scalar", o))),
+            }
+        }
+    }
+
+    /// A map of Ts (TODO: rename to map)
+    #[derive(Clone, Copy)]
+    pub struct Record<T>(pub T);
+
+    impl<'a, Inner> Decoder<'a> for Record<Inner>
+    where
+        Inner: Decoder<'a>,
+    {
+        type A = HashMap<&'a str, Inner::A>;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match u {
+                U::Record(map) => map
+                    .into_iter()
+                    .map(|(k, v)| self.0.dec(v).map(|v2| (k, v2)))
+                    .collect::<Result<Self::A, _>>(),
+                o => Err(DecodeError(format!("Cannot decode {:?} into record", o))),
+            }
+        }
+    }
+
+    /// Assume a record and project out the field with the given name and type.
+    #[derive(Clone, Copy)]
+    pub struct RecordDot<'a, T> {
+        pub field: &'a str,
+        pub inner: T,
+    }
+
+    impl<'a, Inner> Decoder<'a> for RecordDot<'_, Inner>
+    where
+        Inner: Decoder<'a> + Clone,
+    {
+        type A = Inner::A;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match Record(self.inner.clone()).dec(u) {
+                Ok(mut map) => match map.remove(self.field) {
+                    Some(inner) => Ok(inner),
+                    None => Err(DecodeError(format!(
+                        "Cannot find `{}` in record map",
+                        self.field
+                    ))),
+                },
+                Err(err) => Err(err),
+            }
+        }
+    }
+
+    /// Equals one of the listed `A`s exactly, after decoding.
+    #[derive(Clone)]
+    pub struct OneOf<T, A> {
+        pub inner: T,
+        pub list: Vec<A>,
+    }
+
+    impl<'a, Inner> Decoder<'a> for OneOf<Inner, Inner::A>
+    where
+        Inner: Decoder<'a>,
+        Inner::A: Display + Debug + PartialEq,
+    {
+        type A = Inner::A;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match self.inner.dec(u) {
+                Ok(inner) => match self.list.iter().any(|x| x.eq(&inner)) {
+                    true => Ok(inner),
+                    false => Err(DecodeError(format!(
+                        "{} is not one of {:?}",
+                        inner, self.list
+                    ))),
+                },
+                Err(err) => Err(err),
+            }
+        }
+    }
+
+    /// Try decoding as `T`.
+    #[derive(Clone)]
+    pub struct Try<T>(pub T);
+
+    impl<'a, Inner> Decoder<'a> for Try<Inner>
+    where
+        Inner: Decoder<'a>,
+    {
+        type A = Option<Inner::A>;
+        fn dec(&self, u: U<'a>) -> Result<Self::A, DecodeError> {
+            match self.0.dec(u) {
+                Ok(inner) => Ok(Some(inner)),
+                Err(err) => Ok(None),
+            }
+        }
+    }
+}
diff --git a/users/Profpatsch/netencode/pretty.rs b/users/Profpatsch/netencode/pretty.rs
new file mode 100644
index 0000000000..935c3d4a8a
--- /dev/null
+++ b/users/Profpatsch/netencode/pretty.rs
@@ -0,0 +1,163 @@
+extern crate netencode;
+
+use netencode::{Tag, T, U};
+
+pub enum Pretty {
+    Single {
+        r#type: char,
+        length: String,
+        val: String,
+        trailer: char,
+    },
+    Tag {
+        r#type: char,
+        length: String,
+        key: String,
+        inner: char,
+        val: Box<Pretty>,
+    },
+    Multi {
+        r#type: char,
+        length: String,
+        vals: Vec<Pretty>,
+        trailer: char,
+    },
+}
+
+impl Pretty {
+    pub fn from_u<'a>(u: U<'a>) -> Pretty {
+        match u {
+            U::Unit => Self::scalar('u', "", ""),
+            U::N1(b) => Self::scalar('n', "1:", if b { "1" } else { "0" }),
+            U::N3(n) => Self::scalar('n', "3:", n),
+            U::N6(n) => Self::scalar('n', "6:", n),
+            U::N7(n) => Self::scalar('n', "7:", n),
+            U::I3(i) => Self::scalar('i', "3:", i),
+            U::I6(i) => Self::scalar('i', "6:", i),
+            U::I7(i) => Self::scalar('i', "7:", i),
+            U::Text(s) => Pretty::Single {
+                r#type: 't',
+                length: format!("{}:", s.len()),
+                val: s.to_string(),
+                trailer: ',',
+            },
+            U::Binary(s) => Pretty::Single {
+                r#type: 'b',
+                length: format!("{}:", s.len()),
+                // For pretty printing we want the string to be visible obviously.
+                // Instead of not supporting binary, let’s use lossy conversion.
+                val: String::from_utf8_lossy(s).into_owned(),
+                trailer: ',',
+            },
+            U::Sum(Tag { tag, val }) => Self::pretty_tag(tag, Self::from_u(*val)),
+            U::Record(m) => Pretty::Multi {
+                r#type: '{',
+                // TODO: we are losing the size here, should we recompute it? Keep it?
+                length: String::from(""),
+                vals: m
+                    .into_iter()
+                    .map(|(k, v)| Self::pretty_tag(k, Self::from_u(v)))
+                    .collect(),
+                trailer: '}',
+            },
+            U::List(l) => Pretty::Multi {
+                r#type: '[',
+                // TODO: we are losing the size here, should we recompute it? Keep it?
+                length: String::from(""),
+                vals: l.into_iter().map(|v| Self::from_u(v)).collect(),
+                trailer: ']',
+            },
+        }
+    }
+
+    fn scalar<D>(r#type: char, length: &str, d: D) -> Pretty
+    where
+        D: std::fmt::Display,
+    {
+        Pretty::Single {
+            r#type,
+            length: length.to_string(),
+            val: format!("{}", d),
+            trailer: ',',
+        }
+    }
+
+    fn pretty_tag(tag: &str, val: Pretty) -> Pretty {
+        Pretty::Tag {
+            r#type: '<',
+            length: format!("{}:", tag.len()),
+            key: tag.to_string(),
+            inner: '|',
+            val: Box::new(val),
+        }
+    }
+
+    pub fn print_multiline<W>(&self, mut w: &mut W) -> std::io::Result<()>
+    where
+        W: std::io::Write,
+    {
+        Self::go(&mut w, self, 0, true);
+        write!(w, "\n")
+    }
+
+    fn go<W>(mut w: &mut W, p: &Pretty, depth: usize, is_newline: bool) -> std::io::Result<()>
+    where
+        W: std::io::Write,
+    {
+        const full: usize = 4;
+        const half: usize = 2;
+        let i = &vec![b' '; depth * full];
+        let iandhalf = &vec![b' '; depth * full + half];
+        let (i, iandhalf) = unsafe {
+            (
+                std::str::from_utf8_unchecked(i),
+                std::str::from_utf8_unchecked(iandhalf),
+            )
+        };
+        if is_newline {
+            write!(&mut w, "{}", i);
+        }
+        match p {
+            Pretty::Single {
+                r#type,
+                length,
+                val,
+                trailer,
+            } => write!(&mut w, "{} {}{}", r#type, val, trailer),
+            Pretty::Tag {
+                r#type,
+                length,
+                key,
+                inner,
+                val,
+            } => {
+                write!(&mut w, "{} {} {}", r#type, key, inner)?;
+                Self::go::<W>(&mut w, val, depth, false)
+            }
+            // if the length is 0 or 1, we print on one line,
+            // only if there’s more than one element we split the resulting value.
+            // we never break lines on arbitrary column sizes, since that is just silly.
+            Pretty::Multi {
+                r#type,
+                length,
+                vals,
+                trailer,
+            } => match vals.len() {
+                0 => write!(&mut w, "{} {}", r#type, trailer),
+                1 => {
+                    write!(&mut w, "{} ", r#type);
+                    Self::go::<W>(&mut w, &vals[0], depth, false)?;
+                    write!(&mut w, "{}", trailer)
+                }
+                more => {
+                    write!(&mut w, "\n{}{} \n", iandhalf, r#type)?;
+                    for v in vals {
+                        Self::go::<W>(&mut w, v, depth + 1, true)?;
+                        write!(&mut w, "\n")?;
+                    }
+                    write!(&mut w, "{}{}", iandhalf, trailer)
+                }
+            },
+        }
+    }
+}
diff --git a/users/Profpatsch/netstring/README.md b/users/Profpatsch/netstring/README.md
new file mode 100644
index 0000000000..b8daea11d1
--- /dev/null
+++ b/users/Profpatsch/netstring/README.md
@@ -0,0 +1,18 @@
+# Netstring
+
+Netstrings are a djb invention. They are intended as a serialization format. Instead of inline control characters like `\n` or `\0` to signal the end of a string, they use a run-length encoding given as the number of bytes, encoded in ASCII, at the beginning of the string.
+
+```
+hello -> 5:hello,
+foo! -> 4:foo!,
+こんにちは -> 15:こんにちは,
+```
+
+They can be used to encode e.g. lists by simply concatenating and reading them in one-by-one.
+
+If you need a more complex encoding, you could start encoding e.g. tuples as netstrings-in-netstrings, or you could use [`netencode`](../netencode/README.md) instead, which is what-if-json-but-netstrings, and takes the idea of netstrings to their logical conclusion.
+
+Resources:
+
+Spec: http://cr.yp.to/proto/netstrings.txt
+Wiki: https://en.wikipedia.org/wiki/Netstring
diff --git a/users/Profpatsch/netstring/default.nix b/users/Profpatsch/netstring/default.nix
new file mode 100644
index 0000000000..047fe6bae1
--- /dev/null
+++ b/users/Profpatsch/netstring/default.nix
@@ -0,0 +1,64 @@
+{ lib, pkgs, depot, ... }:
+let
+  toNetstring = depot.nix.netstring.fromString;
+
+  toNetstringList = xs:
+    lib.concatStrings (map toNetstring xs);
+
+  toNetstringKeyVal = depot.nix.netstring.attrsToKeyValList;
+
+  python-netstring = depot.users.Profpatsch.writers.python3Lib
+    {
+      name = "netstring";
+    } ''
+    def read_netstring(bytes):
+        (int_length, rest) = bytes.split(sep=b':', maxsplit=1)
+        val = rest[:int(int_length)]
+        # has to end on a ,
+        assert(rest[len(val)] == ord(','))
+        return (val, rest[len(val) + 1:])
+
+    def read_netstring_key_val(bytes):
+        (keyvalnet, rest) = read_netstring(bytes)
+        (key, valnet) = read_netstring(keyvalnet)
+        (val, nothing) = read_netstring(valnet)
+        assert(nothing == b"")
+        return (key, val, rest)
+
+    def read_netstring_key_val_list(bytes):
+        rest = bytes
+        res = {}
+        while rest != b"":
+            (key, val, r) = read_netstring_key_val(rest)
+            rest = r
+            res[key] = val
+        return res
+  '';
+
+  rust-netstring = depot.nix.writers.rustSimpleLib
+    {
+      name = "netstring";
+    } ''
+    pub fn to_netstring(s: &[u8]) -> Vec<u8> {
+        let len = s.len();
+        // length of the integer as ascii
+        let i_len = ((len as f64).log10() as usize) + 1;
+        let ns_len = i_len + 1 + len + 1;
+        let mut res = Vec::with_capacity(ns_len);
+        res.extend_from_slice(format!("{}:", len).as_bytes());
+        res.extend_from_slice(s);
+        res.push(b',');
+        res
+    }
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    toNetstring
+    toNetstringList
+    toNetstringKeyVal
+    python-netstring
+    rust-netstring
+    ;
+}
diff --git a/users/Profpatsch/netstring/tests/default.nix b/users/Profpatsch/netstring/tests/default.nix
new file mode 100644
index 0000000000..6a1062988f
--- /dev/null
+++ b/users/Profpatsch/netstring/tests/default.nix
@@ -0,0 +1,64 @@
+{ depot, lib, pkgs, ... }:
+
+let
+
+  python-netstring-test = depot.users.Profpatsch.writers.python3
+    {
+      name = "python-netstring-test";
+      libraries = p: [
+        depot.users.Profpatsch.netstring.python-netstring
+      ];
+    } ''
+    import netstring
+
+    def assEq(left, right):
+      assert left == right, "{} /= {}".format(str(left), str(right))
+
+    assEq(
+      netstring.read_netstring(b"""${depot.nix.netstring.fromString "hi!"}"""),
+      (b"hi!", b"")
+    )
+
+    assEq(
+      netstring.read_netstring_key_val(
+        b"""${depot.nix.netstring.attrsToKeyValList { foo = "42"; }}"""
+      ),
+      (b'foo', b'42', b"")
+    )
+
+    assEq(
+      netstring.read_netstring_key_val_list(
+        b"""${depot.nix.netstring.attrsToKeyValList { foo = "42"; bar = "hi"; }}"""
+      ),
+      { b'foo': b'42', b'bar': b'hi' }
+    )
+  '';
+
+  rust-netstring-test = depot.nix.writers.rustSimple
+    {
+      name = "rust-netstring-test";
+      dependencies = [
+        depot.users.Profpatsch.netstring.rust-netstring
+      ];
+    } ''
+    extern crate netstring;
+
+    fn main() {
+      assert_eq!(
+        std::str::from_utf8(&netstring::to_netstring(b"hello")).unwrap(),
+        r##"${depot.nix.netstring.fromString "hello"}"##
+      );
+      assert_eq!(
+        std::str::from_utf8(&netstring::to_netstring("こんにちは".as_bytes())).unwrap(),
+        r##"${depot.nix.netstring.fromString "こんにちは"}"##
+      );
+    }
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    python-netstring-test
+    rust-netstring-test
+    ;
+}
diff --git a/users/Profpatsch/nix-home/README.md b/users/Profpatsch/nix-home/README.md
new file mode 100644
index 0000000000..222978bc8c
--- /dev/null
+++ b/users/Profpatsch/nix-home/README.md
@@ -0,0 +1,7 @@
+# nix-home
+
+My very much simplified version of [home-manager](https://github.com/nix-community/home-manager/).
+
+Only takes care about installing symlinks into `$HOME`, and uses [`GNU stow`](https://www.gnu.org/software/stow/) for doing the actual mutating.
+
+No support for services (yet).
diff --git a/users/Profpatsch/nix-home/default.nix b/users/Profpatsch/nix-home/default.nix
new file mode 100644
index 0000000000..ee154c549a
--- /dev/null
+++ b/users/Profpatsch/nix-home/default.nix
@@ -0,0 +1,212 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.stow [ "stow" ]
+    // depot.nix.getBins pkgs.coreutils [ "mkdir" "ln" "printenv" "rm" ]
+    // depot.nix.getBins pkgs.xe [ "xe" ]
+    // depot.nix.getBins pkgs.lr [ "lr" ]
+    // depot.nix.getBins pkgs.nix [ "nix-store" ]
+  ;
+
+  # run stow to populate the target directory with the given stow package, read from stowDir.
+  # Bear in mind that `stowDirOriginPath` should always be semantically bound to the given `stowDir`, otherwise stow might become rather confused.
+  runStow =
+    {
+      # “stow package” to stow (see manpage)
+      # TODO: allow this function to un-stow multiple packages!
+      stowPackage
+    , # “target directory” to stow in (see manpage)
+      targetDir
+    , # The “stow directory” (see manpage), containing “stow packages” (see manpage)
+      stowDir
+    , # representative directory for the stowDir in the file system, against which stow will create relative links.
+      # ATTN: this is always overwritten with the contents of `stowDir`! You shouldn’t re-use the same `stowDirOriginPath` for different `stowDir`s, otherwise there might be surprises.
+      stowDirOriginPath
+    ,
+    }: depot.nix.writeExecline "stow-${stowPackage}" { } [
+      # first, create a temporary stow directory to use as source
+      # (stow will use it to determine the origin of files)
+      "if"
+      [ bins.mkdir "-p" stowDirOriginPath ]
+      # remove old symlinks
+      "if"
+      [
+        "pipeline"
+        [
+          bins.lr
+          "-0"
+          "-t"
+          "depth == 1 && type == l"
+          stowDirOriginPath
+        ]
+        bins.xe
+        "-0"
+        bins.rm
+      ]
+      # create an indirect gc root so our config is not cleaned under our asses by a garbage collect
+      "if"
+      [
+        bins.nix-store
+        "--realise"
+        "--indirect"
+        "--add-root"
+        "${stowDirOriginPath}/.nix-stowdir-gc-root"
+        stowDir
+      ]
+      # populate with new stow targets
+      "if"
+      [
+        "elglob"
+        "-w0"
+        "stowPackages"
+        "${stowDir}/*"
+        bins.ln
+        "--force"
+        "-st"
+        stowDirOriginPath
+        "$stowPackages"
+      ]
+      # stow always looks for $HOME/.stowrc to read more arguments
+      "export"
+      "HOME"
+      "/homeless-shelter"
+      bins.stow
+      # always run restow for now; this does more stat but will remove stale links
+      "--restow"
+      "--dir"
+      stowDirOriginPath
+      "--target"
+      targetDir
+      stowPackage
+    ];
+
+  # create a stow dir from a list of drv paths and a stow package name.
+  makeStowDir =
+    (with depot.nix.yants;
+    defun
+      [
+        (list (struct {
+          originalDir = drv;
+          stowPackage = string;
+        }))
+        drv
+      ])
+      (dirs:
+        depot.nix.runExecline "make-stow-dir"
+          {
+            stdin = lib.pipe dirs [
+              (map depot.users.Profpatsch.netencode.gen.dwim)
+              depot.users.Profpatsch.netstring.toNetstringList
+            ];
+          } [
+          "importas"
+          "out"
+          "out"
+          "if"
+          [ bins.mkdir "-p" "$out" ]
+          "forstdin"
+          "-d"
+          ""
+          "-o"
+          "0"
+          "line"
+          "pipeline"
+          [
+            depot.users.Profpatsch.execline.print-one-env
+            "line"
+          ]
+          depot.users.Profpatsch.netencode.record-splice-env
+          "importas"
+          "-ui"
+          "originalDir"
+          "originalDir"
+          "importas"
+          "-ui"
+          "stowPackage"
+          "stowPackage"
+          bins.ln
+          "-sT"
+          "$originalDir"
+          "\${out}/\${stowPackage}"
+        ]);
+
+  # this is a dumb way of generating a pure list of packages from a depot namespace.
+  readTreeNamespaceDrvs = namespace:
+    lib.pipe namespace [
+      (lib.filterAttrs (_: v: lib.isDerivation v))
+      (lib.mapAttrsToList (k: v: {
+        name = k;
+        drv = v;
+      }))
+    ];
+
+  scriptsStow =
+    lib.pipe { } [
+      (_: makeStowDir [{
+        stowPackage = "scripts";
+        originalDir = pkgs.linkFarm "scripts-farm"
+          ([
+            {
+              name = "scripts/ytextr";
+              path = depot.users.Profpatsch.ytextr;
+            }
+            {
+              name = "scripts/lorri-wait-for-eval";
+              path = depot.users.Profpatsch.lorri-wait-for-eval;
+            }
+            {
+              name = "scripts/lw";
+              path = depot.users.Profpatsch.lorri-wait-for-eval;
+            }
+
+          ]
+          ++
+          (lib.pipe depot.users.Profpatsch.aliases [
+            readTreeNamespaceDrvs
+            (map ({ name, drv }: {
+              name = "scripts/${name}";
+              path = drv;
+            }))
+          ]));
+      }])
+      (d: runStow {
+        stowDir = d;
+        stowPackage = "scripts";
+        targetDir = "/home/philip";
+        stowDirOriginPath = "/home/philip/.local/share/nix-home/stow-origin";
+      })
+    ];
+
+
+
+  terminalEmulatorStow =
+    lib.pipe { } [
+      (_: makeStowDir [{
+        stowPackage = "terminal-emulator";
+        originalDir = pkgs.linkFarm "terminal-emulator-farm"
+          ([
+            {
+              name = "bin/terminal-emulator";
+              path = depot.users.Profpatsch.alacritty;
+            }
+          ]);
+
+      }])
+      (d: runStow {
+        stowDir = d;
+        stowPackage = "terminal-emulator";
+        targetDir = "/home/philip";
+        # TODO: this should only be done once, in a single runStow instead of multiple
+        stowDirOriginPath = "/home/philip/.local/share/nix-home/stow-origin-terminal-emulator";
+      })
+    ];
+
+in
+
+# TODO: run multiple stows with runStow?
+  # TODO: temp setup
+depot.nix.writeExecline "nix-home" { } [
+  "if"
+  [ scriptsStow ]
+  terminalEmulatorStow
+]
diff --git a/users/Profpatsch/nix-tools.nix b/users/Profpatsch/nix-tools.nix
new file mode 100644
index 0000000000..4f29274573
--- /dev/null
+++ b/users/Profpatsch/nix-tools.nix
@@ -0,0 +1,159 @@
+{ depot, pkgs, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.nix [ "nix-build" "nix-instantiate" ];
+
+  # TODO: both of these don’t prevent `result` from being created. good? bad?
+
+  # Usage (execline syntax):
+  #    nix-run { -A foo <more_nix_options> } args...
+  #
+  # Takes an execline block of `nix-build` arguments, which should produce an executable store path.
+  # Then runs the store path with `prog...`.
+  nix-run = depot.nix.writeExecline "nix-run" { argMode = "env"; } [
+    "backtick"
+    "-iE"
+    "storepath"
+    [
+      runblock
+      "1"
+      bins.nix-build
+    ]
+    runblock
+    "-r"
+    "2"
+    "$storepath"
+  ];
+
+  # Usage (execline syntax):
+  #    nix-run-bin { -A foo <more_nix_options> } <foo_bin_name> args...
+  #
+  # Takes an execline block of `nix-build` arguments, which should produce a store path with a bin/ directory in it.
+  # Then runs the given command line with the given arguments. All executables in the built storepath’s bin directory are prepended to `PATH`.
+  nix-run-bin = depot.nix.writeExecline "nix-run-bin" { argMode = "env"; } [
+    "backtick"
+    "-iE"
+    "storepath"
+    [
+      runblock
+      "1"
+      bins.nix-build
+    ]
+    "importas"
+    "-ui"
+    "PATH"
+    "PATH"
+    "export"
+    "PATH"
+    "\${storepath}/bin:\${PATH}"
+    runblock
+    "-r"
+    "2"
+  ];
+
+  nix-eval = depot.nix.writeExecline "nix-eval" { } [
+    bins.nix-instantiate
+    "--read-write-mode"
+    "--eval"
+    "--strict"
+    "$@"
+  ];
+
+  # This is a rewrite of execline’s runblock.
+  # It adds the feature that instead of just
+  # executing the block it reads, it can also
+  # pass it as argv to given commands.
+  #
+  # This is going to be added to a future version
+  # of execline by skarnet, but for now it’s easier
+  # to just dirtily reimplement it in Python.
+  #
+  # TODO: this was added to recent execline versions,
+  # but it doesn’t seem to be a drop-in replacement,
+  # if I use execline’s runblock in nix-run-bin above,
+  # I get errors like
+  # > export: fatal: unable to exec runblock: Success
+  runblock = pkgs.writers.writePython3 "runblock"
+    {
+      flakeIgnore = [ "E501" "E226" ];
+    } ''
+    import sys
+    import os
+    from pathlib import Path
+
+    skip = False
+    one = sys.argv[1]
+    if one == "-r":
+        skip = True
+        block_number = int(sys.argv[2])
+        block_start = 3
+    elif one.startswith("-"):
+        print("runblock-python: only -r supported", file=sys.stderr)
+        sys.exit(100)
+    else:
+        block_number = int(one)
+        block_start = 2
+
+    execline_argv_no = int(os.getenvb(b"#"))
+    runblock_argv = [os.getenv(str(no)) for no in range(1, execline_argv_no + 1)]
+
+
+    def parse_block(args):
+        new_args = []
+        if args == []:
+            print(
+                "runblock-python: empty block",
+                file=sys.stderr
+            )
+            sys.exit(100)
+        for arg in args:
+            if arg == "":
+                break
+            elif arg.startswith(" "):
+                new_args.append(arg[1:])
+            else:
+                print(
+                    "runblock-python: unterminated block: {}".format(args),
+                    file=sys.stderr
+                )
+                sys.exit(100)
+        args_rest = args[len(new_args)+1:]
+        return (new_args, args_rest)
+
+
+    if skip:
+        rest = runblock_argv
+        for _ in range(0, block_number-1):
+            (_, rest) = parse_block(rest)
+        new_argv = rest
+    else:
+        new_argv = []
+        rest = runblock_argv
+        for _ in range(0, block_number):
+            (new_argv, rest) = parse_block(rest)
+
+    given_argv = sys.argv[block_start:]
+    run = given_argv + new_argv
+    if os.path.isabs(run[0]):
+        # TODO: ideally I’d check if it’s an executable here, but it was too hard to figure out and I couldn’t be bothered tbh
+        if not Path(run[0]).is_file():
+            print(
+                "runblock-python: Executable {} does not exist or is not a file.".format(run[0]),
+                file=sys.stderr
+            )
+            sys.exit(100)
+    os.execvp(
+        file=run[0],
+        args=run
+    )
+  '';
+
+
+in
+{
+  inherit
+    nix-run
+    nix-run-bin
+    nix-eval
+    ;
+}
diff --git a/users/Profpatsch/openlab-tools/Main.hs b/users/Profpatsch/openlab-tools/Main.hs
new file mode 100644
index 0000000000..d5f958a38a
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/Main.hs
@@ -0,0 +1,6 @@
+module Main where
+
+import OpenlabTools qualified
+
+main :: IO ()
+main = OpenlabTools.main
diff --git a/users/Profpatsch/openlab-tools/default.nix b/users/Profpatsch/openlab-tools/default.nix
new file mode 100644
index 0000000000..82641989f7
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/default.nix
@@ -0,0 +1,69 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  openlab-tools = pkgs.haskellPackages.mkDerivation {
+    pname = "openlab-tools";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./openlab-tools.cabal
+      ./Main.hs
+      ./src/OpenlabTools.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-json
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-run-command
+      pkgs.haskellPackages.aeson-better-errors
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.deepseq
+      pkgs.haskellPackages.case-insensitive
+      pkgs.haskellPackages.hs-opentelemetry-sdk
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.selective
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai-extra
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.tagsoup
+      pkgs.haskellPackages.time
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins openlab-tools [ "openlab-tools" ];
+
+in
+
+depot.nix.writeExecline "openlab-tools-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  "${pkgs.postgresql}/bin:$${PATH}"
+  "export"
+  "OPENLAB_TOOLS_TOOLS"
+  (pkgs.linkFarm "openlab-tools-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ])
+  bins.openlab-tools
+]
+
diff --git a/users/Profpatsch/openlab-tools/openlab-tools.cabal b/users/Profpatsch/openlab-tools/openlab-tools.cabal
new file mode 100644
index 0000000000..b7d217e051
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/openlab-tools.cabal
@@ -0,0 +1,111 @@
+cabal-version:      3.0
+name:               openlab-tools
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+       OpenlabTools
+
+    build-depends:
+        base >=4.15 && <5,
+        text,
+        my-prelude,
+        my-webstuff,
+        pa-prelude,
+        pa-error-tree,
+        pa-label,
+        pa-json,
+        pa-field-parser,
+        pa-run-command,
+        aeson-better-errors,
+        aeson,
+        blaze-html,
+        bytestring,
+        containers,
+        deepseq,
+        unordered-containers,
+        exceptions,
+        filepath,
+        hs-opentelemetry-sdk,
+        hs-opentelemetry-api,
+        http-conduit,
+        http-types,
+        ihp-hsx,
+        monad-logger,
+        mtl,
+        network-uri,
+        scientific,
+        selective,
+        unliftio,
+        wai-extra,
+        wai,
+        warp,
+        tagsoup,
+        time,
+        stm,
+        case-insensitive
+
+executable openlab-tools
+    import: common-options
+
+    main-is: Main.hs
+
+    ghc-options:
+      -threaded
+
+    build-depends:
+        base >=4.15 && <5,
+        openlab-tools
diff --git a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
new file mode 100644
index 0000000000..9fe51aba18
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
@@ -0,0 +1,551 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module OpenlabTools where
+
+import Control.Concurrent.STM hiding (atomically, readTVarIO)
+import Control.DeepSeq (NFData, deepseq)
+import Control.Monad.Logger qualified as Logger
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson.BetterErrors qualified as Json
+import Data.CaseInsensitive qualified as CaseInsensitive
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.Maybe (listToMaybe)
+import Data.Text qualified as Text
+import Data.Time (NominalDiffTime, UTCTime (utctDayTime), diffUTCTime, getCurrentTime)
+import Data.Time qualified as Time
+import Data.Time.Clock (addUTCTime)
+import Data.Time.Format qualified as Time.Format
+import Debug.Trace
+import FieldParser (FieldParser' (..))
+import FieldParser qualified as Field
+import GHC.Records (HasField (..))
+import GHC.Stack qualified
+import IHP.HSX.QQ (hsx)
+import Json qualified
+import Label
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import Parse (Parse)
+import Parse qualified
+import PossehlAnalyticsPrelude
+import Pretty
+import System.Environment qualified as Env
+import System.IO qualified as IO
+import Text.Blaze.Html.Renderer.Pretty qualified as Html.Pretty
+import Text.Blaze.Html.Renderer.Utf8 qualified as Html
+import Text.Blaze.Html5 qualified as Html
+import Text.HTML.TagSoup qualified as Soup
+import UnliftIO hiding (Handler, newTVarIO)
+import Prelude hiding (span, until)
+
+mapallSpaceOla :: Text
+mapallSpaceOla = "https://mapall.space/heatmap/show.php?id=OpenLab+Augsburg"
+
+mainPage :: Html.Html
+mainPage =
+  Html.docTypeHtml
+    [hsx|
+          <head>
+            <title>Openlab Augsburg Tools</title>
+            <meta charset="utf-8">
+            <meta name="viewport" content="width=device-width, initial-scale=1">
+          </head>
+
+          <body>
+            <p>Welcome to the OpenLab Augsburg tools thingy. The idea is to provide some services that can be embedded into our other pages.</p>
+
+            <h2>What’s there</h2>
+            <ul>
+              <li>
+                A <a href="snips/table-opening-hours-last-week">table displaying the opening hours last week</a>, courtesy of <a href={mapallSpaceOla}>mapall.space</a>.
+              </li>
+            </ul>
+
+
+            <h2>Show me the code/how to contribute</h2>
+
+            <p>The source code can be found <a href="https://code.tvl.fyi/tree/users/Profpatsch/openlab-tools">in my user dir in the tvl repo</a>.</p>
+
+            <p>To build the server, clone the repository from <a href="https://code.tvl.fyi/depot.git">https://code.tvl.fyi/depot.git</a>.
+            Then <code>cd</code> into <code>users/Profpatsch</code>, run <code>nix-shell</code>.
+            </p>
+
+            <p>You can now run the server with <code>cabal repl openlab-tools/`</code> by executing the <code>main</code> function inside the GHC repl. It starts on port <code>9099</code>.
+            <br>
+            To try out changes to the code, stop the server with <kbd><kbd>Ctrl</kbd>+<kbd>z</kbd></kbd> and type <code>:reload</code>, then <code>main</code> again.
+            <br>
+            Finally, from within <code>users/Profpatsch</code> you can start a working development environment by installing <var>vscode</var> or <var>vscodium</var> and the <var>Haskell</var> extension. Then run <code>code .</code> from within the directory.
+            </p>
+
+            <p>Once you have a patch, <a href="https://matrix.to/#/@profpatsch:augsburg.one">contact me on Matrix</a> or DM me at <code>irc/libera</code>, nick <code>Profpatsch</code>.
+            </p>
+          </body>
+        |]
+
+debug :: Bool
+debug = False
+
+runApp :: IO ()
+runApp = withTracer $ \tracer -> do
+  let renderHtml =
+        if debug
+          then Html.Pretty.renderHtml >>> stringToText >>> textToBytesUtf8 >>> toLazyBytes
+          else Html.renderHtml
+
+  let runApplication ::
+        (MonadUnliftIO m, MonadLogger m) =>
+        ( Wai.Request ->
+          (Wai.Response -> m Wai.ResponseReceived) ->
+          m Wai.ResponseReceived
+        ) ->
+        m ()
+      runApplication app = do
+        withRunInIO $ \runInIO -> Warp.run 9099 $ \req respond -> do
+          let catchAppException act =
+                try act >>= \case
+                  Right a -> pure a
+                  Left (AppException err) -> do
+                    runInIO (logError err)
+                    respond (Wai.responseLBS Http.status500 [] "")
+          liftIO $ catchAppException (runInIO $ app req (\resp -> liftIO $ respond resp))
+
+  let appT :: AppT IO () = do
+        let h extra res = Wai.responseLBS Http.ok200 (("Content-Type", "text/html") : extra) res
+        runHandlers
+          runApplication
+          [ Handler
+              { path = "",
+                body =
+                  Body
+                    (pure ())
+                    (\((), _) -> pure $ h [] (renderHtml mainPage))
+              },
+            Handler
+              { path = "snips/table-opening-hours-last-week",
+                body =
+                  Body
+                    ((label @"ifModifiedSince" <$> parseIfModifiedSince))
+                    ( \(req', cache) -> do
+                        now <- liftIO getCurrentTime <&> mkSecondTime
+                        new <- updateCacheIfNewer now cache heatmap
+                        let cacheToHeaders =
+                              [ ("Last-Modified", new.lastModified & formatHeaderTime),
+                                ("Expires", new.until & formatHeaderTime),
+                                ( "Cache-Control",
+                                  let maxAge = new.until `diffSecondTime` now
+                                   in [fmt|max-age={maxAge & floor @NominalDiffTime @Int  & show}, immutable|]
+                                )
+                              ]
+                        if
+                            -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
+                            | Just modifiedSince <- req'.ifModifiedSince,
+                              modifiedSince >= new.lastModified ->
+                                pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
+                            | otherwise ->
+                                pure $ h cacheToHeaders (new.result & toLazyBytes)
+                    )
+              }
+          ]
+
+  runReaderT (appT :: AppT IO ()).unAppT Context {..}
+  where
+    -- "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#syntax"
+    headerFormat = "%a, %d %b %0Y %T GMT"
+    formatHeaderTime (SecondTime t) =
+      t
+        & Time.Format.formatTime
+          @UTCTime
+          Time.Format.defaultTimeLocale
+          headerFormat
+        & stringToText
+        & textToBytesUtf8
+    parseHeaderTime =
+      Field.utf8
+        >>> ( FieldParser $ \t ->
+                t
+                  & textToString
+                  & Time.Format.parseTimeM
+                    @Maybe
+                    @UTCTime
+                    {-no leading whitespace -} False
+                    Time.Format.defaultTimeLocale
+                    headerFormat
+                  & annotate [fmt|Cannot parse header timestamp "{t}"|]
+            )
+    parseIfModifiedSince :: Parse Wai.Request (Maybe SecondTime)
+    parseIfModifiedSince =
+      lmap
+        ( (.requestHeaders)
+            >>> findMaybe
+              ( \(h, v) ->
+                  if "If-Modified-Since" == CaseInsensitive.mk h then Just v else Nothing
+              )
+        )
+        (Parse.maybe $ Parse.fieldParser parseHeaderTime)
+        & rmap (fmap mkSecondTime)
+
+parseRequest :: (MonadThrow f, MonadIO f) => Otel.Span -> Parse from a -> from -> f a
+parseRequest span parser req =
+  Parse.runParse "Unable to parse the HTTP request" parser req
+    & assertM span id
+
+heatmap :: AppT IO ByteString
+heatmap = do
+  Http.httpBS [fmt|GET {mapallSpaceOla}|]
+    <&> (.responseBody)
+    <&> Soup.parseTags
+    <&> Soup.canonicalizeTags
+    <&> findHeatmap
+    <&> fromMaybe (htmlToTags [hsx|<p>Uh oh! could not fetch the table from <a href={mapallSpaceOla}>{mapallSpaceOla}</a></p>|])
+    <&> Soup.renderTags
+  where
+    firstSection f t = t & Soup.sections f & listToMaybe
+    match :: Soup.Tag ByteString -> Soup.Tag ByteString -> Bool
+    match x (t :: Soup.Tag ByteString) = (Soup.~==) @ByteString t x
+    findHeatmap t =
+      t
+        & firstSection (match (Soup.TagOpen ("") [("class", "heatmap")]))
+        >>= firstSection (match (Soup.TagOpen "table" []))
+        <&> getTable
+        <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
+        <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
+
+    -- get the table from opening tag to closing tag (allowing nested tables)
+    getTable = go 0
+      where
+        go _ [] = []
+        go d (el : els)
+          | match (Soup.TagOpen "table" []) el = el : go (d + 1) els
+          | match (Soup.TagClose "table") el = if d <= 1 then [el] else el : go (traceShowId $ d - 1) els
+          | otherwise = el : go d els
+
+    htmlToTags :: Html.Html -> [Soup.Tag ByteString]
+    htmlToTags h = h & Html.renderHtml & toStrictBytes & Soup.parseTags
+
+    -- TODO: this is dog-slow because of the whole list recreation!
+    wrapTagStream ::
+      T2 "el" ByteString "attrs" [Soup.Attribute ByteString] ->
+      [Soup.Tag ByteString] ->
+      [Soup.Tag ByteString]
+    wrapTagStream tag inner = (Soup.TagOpen (tag.el) tag.attrs : inner) <> [Soup.TagClose tag.el]
+
+main :: IO ()
+main =
+  runApp
+
+-- ( do
+--     -- todo: trace that to the init functions as well
+--     Otel.inSpan "whatcd-resolver main function" Otel.defaultSpanArguments $ do
+--       _ <- runTransaction migrate
+--       htmlUi
+-- )
+
+data Handler m = Handler
+  { path :: Text,
+    body :: Body m
+  }
+
+data Body m
+  = forall a.
+    Body
+      (Parse Wai.Request a)
+      ((a, TVar (Cache ByteString)) -> m Wai.Response)
+
+runHandlers ::
+  (Otel.MonadTracer m, MonadUnliftIO m, MonadThrow m) =>
+  -- ( (Wai.Request -> (Wai.Response -> m Wai.ResponseReceived) -> m Wai.ResponseReceived) ->
+  --   m ()
+  -- ) ->
+  ( (Wai.Request -> (Wai.Response -> m a) -> m a) ->
+    m ()
+  ) ->
+  [Handler m] ->
+  m ()
+runHandlers runApplication handlers = do
+  withCaches ::
+    [ T2
+        "handler"
+        (Handler m)
+        "cache"
+        (TVar (Cache ByteString))
+    ] <-
+    handlers
+      & traverse
+        ( \h -> do
+            cache <- liftIO $ newCache h.path "nothing yet"
+            pure $ T2 (label @"handler" h) (label @"cache" cache)
+        )
+  runApplication $ \req respond -> do
+    let mHandler =
+          withCaches
+            & List.find
+              ( \h ->
+                  (h.handler.path)
+                    == (req & Wai.pathInfo & Text.intercalate "/")
+              )
+    case mHandler of
+      Nothing -> respond $ Wai.responseLBS Http.status404 [] "nothing here (yet)"
+      Just handler -> do
+        inSpan' "TODO" $ \span -> do
+          case handler.handler.body of
+            Body parse runHandler -> do
+              req' <- req & parseRequest span parse
+              resp <- runHandler (req', handler.cache)
+              respond resp
+
+inSpan :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> m a -> m a
+inSpan name = Otel.inSpan name Otel.defaultSpanArguments
+
+inSpan' :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> (Otel.Span -> m a) -> m a
+-- inSpan' name =  Otel.inSpan' name Otel.defaultSpanArguments
+inSpan' _name act = act (error "todo telemetry disabled")
+
+zipT2 ::
+  forall l1 l2 t1 t2.
+  ( HasField l1 (T2 l1 [t1] l2 [t2]) [t1],
+    HasField l2 (T2 l1 [t1] l2 [t2]) [t2]
+  ) =>
+  T2 l1 [t1] l2 [t2] ->
+  [T2 l1 t1 l2 t2]
+zipT2 xs =
+  zipWith
+    (\t1 t2 -> T2 (label @l1 t1) (label @l2 t2))
+    (getField @l1 xs)
+    (getField @l2 xs)
+
+unzipT2 :: forall l1 t1 l2 t2. [T2 l1 t1 l2 t2] -> T2 l1 [t1] l2 [t2]
+unzipT2 xs = xs <&> toTup & unzip & fromTup
+  where
+    toTup :: forall a b. T2 a t1 b t2 -> (t1, t2)
+    toTup (T2 a b) = (getField @a a, getField @b b)
+    fromTup :: (a, b) -> T2 l1 a l2 b
+    fromTup (t1, t2) = T2 (label @l1 t1) (label @l2 t2)
+
+unzipT3 :: forall l1 t1 l2 t2 l3 t3. [T3 l1 t1 l2 t2 l3 t3] -> T3 l1 [t1] l2 [t2] l3 [t3]
+unzipT3 xs = xs <&> toTup & unzip3 & fromTup
+  where
+    toTup :: forall a b c. T3 a t1 b t2 c t3 -> (t1, t2, t3)
+    toTup (T3 a b c) = (getField @a a, getField @b b, getField @c c)
+    fromTup :: (a, b, c) -> T3 l1 a l2 b l3 c
+    fromTup (t1, t2, t3) = T3 (label @l1 t1) (label @l2 t2) (label @l3 t3)
+
+newtype Optional a = OptionalInternal (Maybe a)
+
+mkOptional :: a -> Optional a
+mkOptional defaultValue = OptionalInternal $ Just defaultValue
+
+defaults :: Optional a
+defaults = OptionalInternal Nothing
+
+instance HasField "withDefault" (Optional a) (a -> a) where
+  getField (OptionalInternal m) defaultValue = case m of
+    Nothing -> defaultValue
+    Just a -> a
+
+httpJson ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  (Optional (Label "contentType" ByteString)) ->
+  Otel.Span ->
+  Json.Parse ErrorTree b ->
+  Http.Request ->
+  m b
+httpJson opts span parser req = do
+  let opts' = opts.withDefault (label @"contentType" "application/json")
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+              | statusCode == 200,
+                Just ct <- contentType,
+                ct == opts'.contentType ->
+                  Right $ (resp & Http.responseBody)
+              | statusCode == 200,
+                Just otherType <- contentType ->
+                  Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+              | statusCode == 200,
+                Nothing <- contentType ->
+                  Left [fmt|Server returned a body with unspecified content type|]
+              | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+    >>= assertM
+      span
+      ( \body ->
+          Json.parseStrict parser body
+            & first (Json.parseErrorTree "could not parse redacted response")
+      )
+
+assertM :: (MonadThrow f, MonadIO f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM span f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTree span err
+
+-- | UTC time that is only specific to the second
+newtype SecondTime = SecondTime {unSecondTime :: UTCTime}
+  deriving newtype (Show, Eq, Ord)
+
+mkSecondTime :: UTCTime -> SecondTime
+mkSecondTime utcTime = SecondTime utcTime {utctDayTime = Time.secondsToDiffTime $ floor utcTime.utctDayTime}
+
+diffSecondTime :: SecondTime -> SecondTime -> NominalDiffTime
+diffSecondTime (SecondTime a) (SecondTime b) = diffUTCTime a b
+
+data Cache a = Cache
+  { name :: !Text,
+    until :: !SecondTime,
+    lastModified :: !SecondTime,
+    result :: !a
+  }
+  deriving (Show)
+
+newCache :: Text -> a -> IO (TVar (Cache a))
+newCache name result = do
+  let until = mkSecondTime $ Time.UTCTime {utctDay = Time.ModifiedJulianDay 1, utctDayTime = 1}
+  let lastModified = until
+  newTVarIO $ Cache {..}
+
+updateCache :: (NFData a, Eq a) => SecondTime -> TVar (Cache a) -> a -> STM (Cache a)
+updateCache now cache result' = do
+  -- make sure we don’t hold onto the world by deepseq-ing and evaluating to WHNF
+  let !result = deepseq result' result'
+  let until = mkSecondTime $ (5 * 60) `addUTCTime` now.unSecondTime
+  !toWrite <- do
+    old <- readTVar cache
+    let name = old.name
+    -- only update the lastModified time iff the content changed (this is helpful for HTTP caching with If-Modified-Since)
+    if old.result == result
+      then do
+        let lastModified = old.lastModified
+        pure $ Cache {..}
+      else do
+        let lastModified = now
+        pure $ Cache {..}
+  _ <- writeTVar cache $! toWrite
+  pure toWrite
+
+-- | Run the given action iff the cache is stale, otherwise just return the item from the cache.
+updateCacheIfNewer :: (MonadUnliftIO m, NFData b, Eq b) => SecondTime -> TVar (Cache b) -> m b -> m (Cache b)
+updateCacheIfNewer now cache act = withRunInIO $ \runInIO -> do
+  old <- readTVarIO cache
+  if old.until < now
+    then do
+      res <- runInIO act
+      atomically $ updateCache now cache res
+    else pure old
+
+-- pgFormat <- readTools (label @"toolsEnvVar" "OPENLAB_TOOLS_TOOLS") (readTool "pg_format")
+-- let config = label @"logDatabaseQueries" LogDatabaseQueries
+-- pgConnPool <-
+--   Pool.newPool $
+--     Pool.defaultPoolConfig
+--       {- resource init action -} (Postgres.connectPostgreSQL (db & TmpPg.toConnectionString))
+--       {- resource destruction -} Postgres.close
+--       {- unusedResourceOpenTime -} 10
+--       {- max resources across all stripes -} 20
+-- transmissionSessionId <- newEmptyMVar
+-- let newAppT = do
+--       logInfo [fmt|Running with config: {showPretty config}|]
+--       logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
+--       appT
+-- runReaderT newAppT.unAppT Context {..}
+
+withTracer :: (Otel.Tracer -> IO c) -> IO c
+withTracer f = do
+  setDefaultEnv "OTEL_SERVICE_NAME" "whatcd-resolver"
+  bracket
+    -- Install the SDK, pulling configuration from the environment
+    Otel.initializeGlobalTracerProvider
+    -- Ensure that any spans that haven't been exported yet are flushed
+    Otel.shutdownTracerProvider
+    -- Get a tracer so you can create spans
+    (\tracerProvider -> f $ Otel.makeTracer tracerProvider "whatcd-resolver" Otel.tracerOptions)
+
+setDefaultEnv :: String -> String -> IO ()
+setDefaultEnv envName defaultValue = do
+  Env.lookupEnv envName >>= \case
+    Just _env -> pure ()
+    Nothing -> Env.setEnv envName defaultValue
+
+data Context = Context
+  { tracer :: Otel.Tracer
+  }
+
+newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
+  deriving newtype (Functor, Applicative, Monad, MonadIO, MonadUnliftIO, MonadThrow)
+
+data AppException = AppException Text
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+-- | A specialized variant of @addEvent@ that records attributes conforming to
+-- the OpenTelemetry specification's
+-- <https://github.com/open-telemetry/opentelemetry-specification/blob/49c2f56f3c0468ceb2b69518bcadadd96e0a5a8b/specification/trace/semantic_conventions/exceptions.md semantic conventions>
+--
+-- @since 0.0.1.0
+recordException ::
+  ( MonadIO m,
+    HasField "message" r Text,
+    HasField "type_" r Text
+  ) =>
+  Otel.Span ->
+  r ->
+  m ()
+recordException span dat = liftIO $ do
+  callStack <- GHC.Stack.whoCreated dat.message
+  newEventTimestamp <- Just <$> Otel.getTimestamp
+  Otel.addEvent span $
+    Otel.NewEvent
+      { newEventName = "exception",
+        newEventAttributes =
+          HashMap.fromList
+            [ ("exception.type", Otel.toAttribute @Text dat.type_),
+              ("exception.message", Otel.toAttribute @Text dat.message),
+              ("exception.stacktrace", Otel.toAttribute @Text $ Text.unlines $ map stringToText callStack)
+            ],
+        ..
+      }
+
+appThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> ErrorTree -> m a
+appThrowTree span exc = do
+  let msg = prettyErrorTree exc
+  -- recordException
+  --   span
+  --   ( T2
+  --       (label @"type_" "AppException")
+  --       (label @"message" msg)
+  --   )
+  throwM $ AppException msg
+
+orAppThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree span = \case
+  Left err -> appThrowTree span err
+  Right a -> pure a
+
+instance (MonadIO m) => MonadLogger (AppT m) where
+  monadLoggerLog loc src lvl msg = liftIO $ Logger.defaultOutput IO.stderr loc src lvl (Logger.toLogStr msg)
+
+instance (Monad m) => Otel.MonadTracer (AppT m) where
+  getTracer = AppT $ asks (.tracer)
diff --git a/users/Profpatsch/read-http.nix b/users/Profpatsch/read-http.nix
new file mode 100644
index 0000000000..d9ad6fc30d
--- /dev/null
+++ b/users/Profpatsch/read-http.nix
@@ -0,0 +1,19 @@
+{ depot, pkgs, ... }:
+
+let
+
+  read-http = depot.nix.writers.rustSimple
+    {
+      name = "read-http";
+      dependencies = [
+        depot.third_party.rust-crates.ascii
+        depot.third_party.rust-crates.httparse
+        depot.users.Profpatsch.netencode.netencode-rs
+        depot.users.Profpatsch.arglib.netencode.rust
+        depot.users.Profpatsch.execline.exec-helpers
+      ];
+    }
+    (builtins.readFile ./read-http.rs);
+
+in
+read-http
diff --git a/users/Profpatsch/read-http.rs b/users/Profpatsch/read-http.rs
new file mode 100644
index 0000000000..2b24e6beb1
--- /dev/null
+++ b/users/Profpatsch/read-http.rs
@@ -0,0 +1,249 @@
+extern crate arglib_netencode;
+extern crate ascii;
+extern crate exec_helpers;
+extern crate httparse;
+extern crate netencode;
+
+use exec_helpers::{die_expected_error, die_temporary, die_user_error};
+use std::collections::HashMap;
+use std::io::{Read, Write};
+use std::os::unix::io::FromRawFd;
+
+use netencode::dec::Decoder;
+use netencode::{dec, T, U};
+
+enum What {
+    Request,
+    Response,
+}
+
+// reads a http request (stdin), and writes all headers to stdout, as netencoded record.
+// The keys are text, but can be lists of text iff headers appear multiple times, so beware.
+fn main() -> std::io::Result<()> {
+    exec_helpers::no_args("read-http");
+
+    let args = dec::RecordDot {
+        field: "what",
+        inner: dec::OneOf {
+            list: vec!["request", "response"],
+            inner: dec::Text,
+        },
+    };
+    let what: What = match args.dec(arglib_netencode::arglib_netencode("read-http", None).to_u()) {
+        Ok("request") => What::Request,
+        Ok("response") => What::Response,
+        Ok(v) => panic!("shouldn’t happen!, value was: {}", v),
+        Err(dec::DecodeError(err)) => die_user_error("read-http", err),
+    };
+
+    fn read_stdin_to_complete<F>(mut parse: F) -> ()
+    where
+        F: FnMut(&[u8]) -> httparse::Result<usize>,
+    {
+        let mut res = httparse::Status::Partial;
+        loop {
+            if let httparse::Status::Complete(_) = res {
+                return;
+            }
+            let mut buf = [0; 2048];
+            match std::io::stdin().read(&mut buf[..]) {
+                Ok(size) => {
+                    if size == 0 {
+                        break;
+                    }
+                }
+                Err(err) => {
+                    die_temporary("read-http", format!("could not read from stdin, {:?}", err))
+                }
+            }
+            match parse(&buf) {
+                Ok(status) => {
+                    res = status;
+                }
+                Err(err) => {
+                    die_temporary("read-http", format!("httparse parsing failed: {:#?}", err))
+                }
+            }
+        }
+    }
+
+    fn normalize_headers<'a>(headers: &'a [httparse::Header]) -> HashMap<String, U<'a>> {
+        let mut res = HashMap::new();
+        for httparse::Header { name, value } in headers {
+            let val = ascii::AsciiStr::from_ascii(*value)
+                .expect(&format!(
+                    "read-http: we require header values to be ASCII, but the header {} was {:?}",
+                    name, value
+                ))
+                .as_str();
+            // lowercase the header names, since the standard doesn’t care
+            // and we want unique strings to match against
+            let name_lower = name.to_lowercase();
+            match res.insert(name_lower, U::Text(val)) {
+                None => (),
+                Some(U::Text(t)) => {
+                    let name_lower = name.to_lowercase();
+                    let _ = res.insert(name_lower, U::List(vec![U::Text(t), U::Text(val)]));
+                    ()
+                }
+                Some(U::List(mut l)) => {
+                    let name_lower = name.to_lowercase();
+                    l.push(U::Text(val));
+                    let _ = res.insert(name_lower, U::List(l));
+                    ()
+                }
+                Some(o) => panic!("read-http: header not text nor list: {:?}", o),
+            }
+        }
+        res
+    }
+
+    // tries to read until the end of the http header (deliniated by two newlines "\r\n\r\n")
+    fn read_till_end_of_header<R: Read>(buf: &mut Vec<u8>, reader: R) -> Option<()> {
+        let mut chonker = Chunkyboi::new(reader, 4096);
+        loop {
+            // TODO: attacker can send looooong input, set upper maximum
+            match chonker.next() {
+                Some(Ok(chunk)) => {
+                    buf.extend_from_slice(&chunk);
+                    if chunk.windows(4).any(|c| c == b"\r\n\r\n") {
+                        return Some(());
+                    }
+                }
+                Some(Err(err)) => {
+                    die_temporary("read-http", format!("error reading from stdin: {:?}", err))
+                }
+                None => return None,
+            }
+        }
+    }
+
+    // max header size chosen arbitrarily
+    let mut headers = [httparse::EMPTY_HEADER; 128];
+    let stdin = std::io::stdin();
+
+    match what {
+        Request => {
+            let mut req = httparse::Request::new(&mut headers);
+            let mut buf: Vec<u8> = vec![];
+            match read_till_end_of_header(&mut buf, stdin.lock()) {
+                Some(()) => match req.parse(&buf) {
+                    Ok(httparse::Status::Complete(_body_start)) => {}
+                    Ok(httparse::Status::Partial) => {
+                        die_expected_error("read-http", "httparse should have gotten a full header")
+                    }
+                    Err(err) => die_expected_error(
+                        "read-http",
+                        format!("httparse response parsing failed: {:#?}", err),
+                    ),
+                },
+                None => die_expected_error(
+                    "read-http",
+                    format!("httparse end of stdin reached before able to parse request headers"),
+                ),
+            }
+            let method = req.method.expect("method must be filled on complete parse");
+            let path = req.path.expect("path must be filled on complete parse");
+            write_dict_req(method, path, &normalize_headers(req.headers))
+        }
+        Response => {
+            let mut resp = httparse::Response::new(&mut headers);
+            let mut buf: Vec<u8> = vec![];
+            match read_till_end_of_header(&mut buf, stdin.lock()) {
+                Some(()) => match resp.parse(&buf) {
+                    Ok(httparse::Status::Complete(_body_start)) => {}
+                    Ok(httparse::Status::Partial) => {
+                        die_expected_error("read-http", "httparse should have gotten a full header")
+                    }
+                    Err(err) => die_expected_error(
+                        "read-http",
+                        format!("httparse response parsing failed: {:#?}", err),
+                    ),
+                },
+                None => die_expected_error(
+                    "read-http",
+                    format!("httparse end of stdin reached before able to parse response headers"),
+                ),
+            }
+            let code = resp.code.expect("code must be filled on complete parse");
+            let reason = resp
+                .reason
+                .expect("reason must be filled on complete parse");
+            write_dict_resp(code, reason, &normalize_headers(resp.headers))
+        }
+    }
+}
+
+fn write_dict_req<'a, 'buf>(
+    method: &'buf str,
+    path: &'buf str,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
+    let mut http = vec![("method", U::Text(method)), ("path", U::Text(path))]
+        .into_iter()
+        .collect();
+    write_dict(http, headers)
+}
+
+fn write_dict_resp<'a, 'buf>(
+    code: u16,
+    reason: &'buf str,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
+    let mut http = vec![
+        ("status", U::N6(code as u64)),
+        ("status-text", U::Text(reason)),
+    ]
+    .into_iter()
+    .collect();
+    write_dict(http, headers)
+}
+
+fn write_dict<'buf, 'a>(
+    mut http: HashMap<&str, U<'a>>,
+    headers: &'a HashMap<String, U<'a>>,
+) -> std::io::Result<()> {
+    match http.insert(
+        "headers",
+        U::Record(
+            headers
+                .iter()
+                .map(|(k, v)| (k.as_str(), v.clone()))
+                .collect(),
+        ),
+    ) {
+        None => (),
+        Some(_) => panic!("read-http: headers already in dict"),
+    };
+    netencode::encode(&mut std::io::stdout(), &U::Record(http))?;
+    Ok(())
+}
+
+// iter helper
+// TODO: put into its own module
+struct Chunkyboi<T> {
+    inner: T,
+    buf: Vec<u8>,
+}
+
+impl<R: Read> Chunkyboi<R> {
+    fn new(inner: R, chunksize: usize) -> Self {
+        let buf = vec![0; chunksize];
+        Chunkyboi { inner, buf }
+    }
+}
+
+impl<R: Read> Iterator for Chunkyboi<R> {
+    type Item = std::io::Result<Vec<u8>>;
+
+    fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> {
+        match self.inner.read(&mut self.buf) {
+            Ok(0) => None,
+            Ok(read) => {
+                // clone a new buffer so we can reuse the internal one
+                Some(Ok(self.buf[..read].to_owned()))
+            }
+            Err(err) => Some(Err(err)),
+        }
+    }
+}
diff --git a/users/Profpatsch/reverse-haskell-deps/README.md b/users/Profpatsch/reverse-haskell-deps/README.md
new file mode 100644
index 0000000000..efc288cae4
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/README.md
@@ -0,0 +1,3 @@
+# reverse-haskell-deps
+
+Parse the HTML at `https://packdeps.haskellers.com/reverse` to get the data about Haskell package reverse dependencies in a structured way (they should just expose that as a json tbh).
diff --git a/users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs b/users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs
new file mode 100644
index 0000000000..0e18ce8a6b
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs
@@ -0,0 +1,76 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module Main where
+
+import Data.ByteString qualified as ByteString
+import Data.Either
+import Data.List qualified as List
+import Data.Maybe
+import Data.Text (Text)
+import Data.Text qualified as Text
+import Data.Text.Encoding qualified
+import MyPrelude
+import Numeric.Natural
+import Text.HTML.TagSoup qualified as Tag
+import Text.Nicify
+import Text.Read qualified as Read
+
+parseNat :: Text -> Maybe Natural
+parseNat = Read.readMaybe . textToString
+
+printNice :: Show a => a -> IO ()
+printNice = putStrLn . nicify . show
+
+type Tag = Tag.Tag Text
+
+main = do
+  reverseHtml <- readStdinUtf8
+  printNice $ List.sortOn snd $ packagesAndReverseDeps reverseHtml
+  where
+    readStdinUtf8 = bytesToTextUtf8Lenient <$> ByteString.getContents
+
+-- | reads the table provided by https://packdeps.haskellers.com/reverse
+-- figuring out all sections (starting with the link to the package name),
+-- then figuring out the name of the package and the first column,
+-- which is the number of reverse dependencies of the package
+packagesAndReverseDeps :: Text -> [(Text, Natural)]
+packagesAndReverseDeps reverseHtml = do
+  let tags = Tag.parseTags reverseHtml
+  let sections = Tag.partitions (isJust . reverseLink) tags
+  let sectionName [] = "<unknown section>"
+      sectionName (sect : _) = sect & reverseLink & fromMaybe "<unknown section>"
+  let sectionNames = map sectionName sections
+  mapMaybe
+    ( \(name :: Text, sect) -> do
+        reverseDeps <- firstNaturalNumber sect
+        pure (sectionPackageName name sect, reverseDeps) :: Maybe (Text, Natural)
+    )
+    $ zip sectionNames sections
+  where
+    reverseLink = \case
+      Tag.TagOpen "a" attrs -> findMaybe attrReverseLink attrs
+      _ -> Nothing
+
+    attrReverseLink = \case
+      ("href", lnk) ->
+        if
+            | "packdeps.haskellers.com/reverse/" `Text.isInfixOf` lnk -> Just lnk
+            | otherwise -> Nothing
+      _ -> Nothing
+
+    sectionPackageName :: Text -> [Tag] -> Text
+    sectionPackageName sectionName = \case
+      (_ : Tag.TagText name : _) -> name
+      (_ : el : _) -> sectionName
+      xs -> sectionName
+
+    firstNaturalNumber :: [Tag] -> Maybe Natural
+    firstNaturalNumber =
+      findMaybe
+        ( \case
+            Tag.TagText t -> parseNat t
+            _ -> Nothing
+        )
diff --git a/users/Profpatsch/reverse-haskell-deps/default.nix b/users/Profpatsch/reverse-haskell-deps/default.nix
new file mode 100644
index 0000000000..b0a44420d7
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/default.nix
@@ -0,0 +1,32 @@
+{ depot, pkgs, ... }:
+
+# Parses https://packdeps.haskellers.com/reverse
+# and outputs the amount of reverse dependencies of each hackage package.
+
+let
+
+  rev = depot.nix.writeExecline "reverse-haskell-deps" { } [
+    "pipeline"
+    [
+      "${pkgs.curl}/bin/curl"
+      "-L"
+      "https://packdeps.haskellers.com/reverse"
+    ]
+    rev-hs
+
+  ];
+
+  rev-hs = pkgs.writers.writeHaskell "revers-haskell-deps-hs"
+    {
+      libraries = [
+        depot.users.Profpatsch.my-prelude
+        pkgs.haskellPackages.nicify-lib
+        pkgs.haskellPackages.tagsoup
+      ];
+      ghcArgs = [ "-threaded" ];
+    }
+    ./ReverseHaskellDeps.hs;
+
+
+in
+rev
diff --git a/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal b/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal
new file mode 100644
index 0000000000..4792f52adf
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal
@@ -0,0 +1,16 @@
+cabal-version:      3.0
+name:               reverse-haskell-deps
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+library
+    exposed-modules:          ReverseHaskellDeps.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude,
+        tagsoup,
+        nicify-lib
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/shell.nix b/users/Profpatsch/shell.nix
new file mode 100644
index 0000000000..b5095d476f
--- /dev/null
+++ b/users/Profpatsch/shell.nix
@@ -0,0 +1,110 @@
+# generic shell.nix that can be used for most of my projects here,
+# until I figure out a way to have composable shells.
+let root = (import ../../. { }); in
+{ pkgs ? root.third_party.nixpkgs, depot ? root, ... }:
+
+pkgs.mkShell {
+
+  buildInputs = [
+    pkgs.sqlite-interactive
+    pkgs.sqlite-utils
+    pkgs.haskell-language-server
+    pkgs.cabal-install
+    (pkgs.haskellPackages.ghcWithHoogle (h: [
+      h.async
+      h.aeson-better-errors
+      h.blaze-html
+      h.conduit-extra
+      h.error
+      h.monad-logger
+      h.pa-field-parser
+      h.pa-label
+      h.pa-json
+      h.pa-pretty
+      h.pa-run-command
+      h.ihp-hsx
+      h.PyF
+      h.foldl
+      h.unliftio
+      h.xml-conduit
+      h.wai
+      h.wai-extra
+      h.warp
+      h.profunctors
+      h.semigroupoids
+      h.validation-selective
+      h.free
+      h.cryptonite-conduit
+      h.sqlite-simple
+      h.hedgehog
+      h.http-conduit
+      h.http-conduit
+      h.wai-conduit
+      h.nonempty-containers
+      h.deriving-compat
+      h.unix
+      h.tagsoup
+      h.attoparsec
+      h.iCalendar
+      h.case-insensitive
+      h.hscolour
+      h.nicify-lib
+      h.hspec
+      h.hspec-expectations-pretty-diff
+      h.tmp-postgres
+      h.postgresql-simple
+      h.resource-pool
+      h.xmonad-contrib
+      h.hs-opentelemetry-sdk
+      h.punycode
+    ]))
+
+    pkgs.rustup
+    pkgs.pkg-config
+    pkgs.fuse
+    pkgs.postgresql_14
+    pkgs.nodejs
+    pkgs.ninja
+    pkgs.s6
+    pkgs.caddy
+
+    (depot.nix.binify {
+      name = "nix-run";
+      exe = depot.users.Profpatsch.nix-tools.nix-run;
+    })
+  ];
+
+  DEPOT_ROOT = toString ./../..;
+  PROFPATSCH_ROOT = toString ./.;
+
+  WHATCD_RESOLVER_TOOLS = pkgs.linkFarm "whatcd-resolver-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ];
+
+  # DECLIB_MASTODON_ACCESS_TOKEN read from `pass` in .envrc.
+
+  RUSTC_WRAPPER =
+    let
+      wrapperArgFile = libs: pkgs.writeText "rustc-wrapper-args"
+        (pkgs.lib.concatStringsSep
+          "\n"
+          (pkgs.lib.concatLists
+            (map
+              (lib: [
+                "-L"
+                "${pkgs.lib.getLib lib}/lib"
+              ])
+              libs)));
+    in
+    depot.nix.writeExecline "rustc-wrapper" { readNArgs = 1; } [
+      "$1"
+      "$@"
+      "@${wrapperArgFile [
+      depot.third_party.rust-crates.nom
+    ]}"
+    ];
+
+}
diff --git a/users/Profpatsch/shortcuttable/default.nix b/users/Profpatsch/shortcuttable/default.nix
new file mode 100644
index 0000000000..13ba220400
--- /dev/null
+++ b/users/Profpatsch/shortcuttable/default.nix
@@ -0,0 +1,172 @@
+{ depot, lib, pkgs, ... }:
+
+let
+  # run prog... and restart whenever SIGHUP is received
+  #
+  # this is useful for binding to a shortcut.
+  #
+  # Unfortunately, this requires a bunch of workarounds around the semantics of `trap`,
+  # but the general idea of bundling subprocesses with `setsid` is somewhat sound.
+  runShortcuttable =
+    depot.nix.writeExecline "run-shortcuttable" { } [
+      "importas"
+      "-i"
+      "run"
+      "XDG_RUNTIME_DIR"
+      "if"
+      [ "mkdir" "-p" "\${run}/shortcuttable/test" ]
+      "getpid"
+      "-E"
+      "controlpid"
+      savePid
+      "\${run}/shortcuttable/test/control"
+      "$controlpid"
+
+      # start the program
+      "background"
+      [
+        startSaveSID
+        "\${run}/shortcuttable/test/running-sid"
+        "$@"
+      ]
+
+      "trap"
+      [
+        "SIGHUP"
+        [
+          "if"
+          [ "echo" "got hup" ]
+          "if"
+          [
+            "if"
+            [ "echo" "killing our child processes" ]
+            "envfile"
+            "\${run}/shortcuttable/test/running-sid"
+            "importas"
+            "-ui"
+            "child_sid"
+            "pid"
+            "foreground"
+            [ "ps" "-f" "--sid" "$child_sid" ]
+            ctrlCCtrlDSid
+            "$child_sid"
+          ]
+          "if"
+          [ "echo" "restarting into" "$@" ]
+          "background"
+          [
+            startSaveSID
+            "\${run}/shortcuttable/test/running-sid"
+            "$@"
+          ]
+        ]
+        "SIGTERM"
+        [
+          (killShortcuttable { signal = "TERM"; })
+          "\${run}/shortcuttable/test/running-sid"
+          "\${run}/shortcuttable/test/exit"
+        ]
+        "SIGINT"
+        [
+          (killShortcuttable { signal = "INT"; })
+          "\${run}/shortcuttable/test/running-sid"
+          "\${run}/shortcuttable/test/exit"
+        ]
+      ]
+      depot.users.Profpatsch.execline.setsid
+      "child_sid"
+      "getpid"
+      "-E"
+      "exitpid"
+      savePid
+      "\${run}/shortcuttable/test/exit"
+      "$exitpid"
+      "sleep"
+      "infinity"
+    ];
+
+  killShortcuttable = { signal }: depot.nix.writeExecline "kill-shortcuttable" { readNArgs = 2; } [
+    "if"
+    [ "echo" "got SIG${signal}, quitting" ]
+    "if"
+    [
+      "envfile"
+      "$1"
+      "importas"
+      "-ui"
+      "child_sid"
+      "pid"
+      "foreground"
+      [ "ps" "-f" "--sid" "$child_sid" ]
+      ctrlCCtrlDSid
+      "$child_sid"
+    ]
+    "if"
+    [ "echo" "killing shortcuttable loop" ]
+    "envfile"
+    "$2"
+    "importas"
+    "-ui"
+    "trap_pid"
+    "pid"
+    "foreground"
+    [ "ps" "-fp" "$trap_pid" ]
+    "kill"
+    "--signal"
+    signal
+    "$trap_pid"
+  ];
+
+  savePid = depot.nix.writeExecline "save-pid" { readNArgs = 2; } [
+    "if"
+    [ "echo" "saving process:" ]
+    "if"
+    [ "ps" "-fp" "$2" ]
+    "if"
+    [
+      "redirfd"
+      "-w"
+      "1"
+      "$1"
+      "printf"
+      "pid = %s\n"
+      "$2"
+    ]
+    "$@"
+  ];
+
+  # try to kill process, first with SIGTERM then SIGQUIT (in case it’s a repl)
+  ctrlCCtrlDSid = depot.nix.writeExecline "ctrl-c-ctrl-d" { readNArgs = 1; } [
+    "ifelse"
+    "-n"
+    [ "kill" "--signal" "TERM" "--" "-\${1}" ]
+    [
+      "if"
+      [ "echo" "could not kill via SIGTERM, trying SIGQUIT …" ]
+      "ifelse"
+      "-n"
+      [ "kill" "--signal" "QUIT" "--" "-\${1}" ]
+      [ "echo" "SIGQUIT failed as well, keeping it running" ]
+      "$@"
+    ]
+    "$@"
+  ];
+
+  startSaveSID = depot.nix.writeExecline "start-save-sid" { readNArgs = 1; } [
+    depot.users.Profpatsch.execline.setsid
+    "child_sid"
+    "importas"
+    "-ui"
+    "child_sid"
+    "child_sid"
+    "if"
+    [ "echo" "children sid:" "$child_sid" ]
+    savePid
+    "$1"
+    "$child_sid"
+    "$@"
+  ];
+
+
+in
+runShortcuttable
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md
new file mode 100644
index 0000000000..e0a6aa2fb8
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md
@@ -0,0 +1,3 @@
+# sync-abfall-ics-aichach-friedberg
+
+A small tool to sync the ICS files for the local trash collection times at https://abfallwirtschaft.lra-aic-fdb.de/
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix
new file mode 100644
index 0000000000..739274cb6f
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix
@@ -0,0 +1,31 @@
+{ depot, pkgs, ... }:
+
+let
+  sync-to-dir = depot.users.Profpatsch.writers.python3
+    {
+      name = "sync-ics-to-dir";
+      libraries = (py: [
+        py.httpx
+        py.icalendar
+      ]);
+    } ./sync-ics-to-dir.py;
+
+  config =
+    depot.users.Profpatsch.importDhall.importDhall
+      {
+        root = ./..;
+        files = [
+          "sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall"
+          "dhall/lib.dhall"
+          "ini/ini.dhall"
+        ];
+        main = "sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall";
+        deps = [
+        ];
+      }
+      depot.users.Profpatsch.ini.externs;
+
+
+
+in
+{ inherit config; }
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall
new file mode 100644
index 0000000000..2a7ac84979
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall
@@ -0,0 +1,139 @@
+let Ini = ../ini/ini.dhall
+
+let Lib = ../dhall/lib.dhall
+
+in  \(Ini/externs : Ini.Externs) ->
+      let Vdirsyncer =
+            let StorageType =
+                  < FileSystem : { path : Text, fileext : < ICS > }
+                  | Http : { url : Text }
+                  >
+
+            let Collection = < FromA | FromB | Collection : Text >
+
+            let Collections =
+                  < Unspecified | TheseCollections : List Collection >
+
+            let Storage = { storageName : Text, storage : StorageType }
+
+            in  { Storage
+                , StorageType
+                , Collection
+                , Collections
+                , Pair =
+                    { pairName : Text
+                    , a : Storage
+                    , b : Storage
+                    , collections : Collections
+                    }
+                }
+
+      let toIniSections
+          : Vdirsyncer.Pair -> Ini.Sections
+          = \(pair : Vdirsyncer.Pair) ->
+              let
+                  -- we assume the names are [a-zA-Z_]
+                  renderList =
+                    \(l : List Text) ->
+                          "["
+                      ++  Lib.Text/concatMapSep
+                            ", "
+                            Text
+                            (\(t : Text) -> "\"${t}\"")
+                            l
+                      ++  "]"
+
+              in  let nv = \(name : Text) -> \(value : Text) -> { name, value }
+
+                  let mkStorage =
+                        \(storage : Vdirsyncer.Storage) ->
+                          { name = "storage ${storage.storageName}"
+                          , value =
+                              merge
+                                { FileSystem =
+                                    \ ( fs
+                                      : { path : Text, fileext : < ICS > }
+                                      ) ->
+                                      [ nv "type" "filesystem"
+                                      , nv
+                                          "fileext"
+                                          (merge { ICS = ".ics" } fs.fileext)
+                                      , nv "path" fs.path
+                                      ]
+                                , Http =
+                                    \(http : { url : Text }) ->
+                                      [ nv "type" "http", nv "url" http.url ]
+                                }
+                                storage.storage
+                          }
+
+                  in  [ { name = "pair ${pair.pairName}"
+                        , value =
+                          [ nv "a" pair.a.storageName
+                          , nv "b" pair.b.storageName
+                          , nv
+                              "collections"
+                              ( merge
+                                  { Unspecified = "none"
+                                  , TheseCollections =
+                                      \(colls : List Vdirsyncer.Collection) ->
+                                        renderList
+                                          ( Lib.List/map
+                                              Vdirsyncer.Collection
+                                              Text
+                                              ( \ ( coll
+                                                  : Vdirsyncer.Collection
+                                                  ) ->
+                                                  merge
+                                                    { FromA = "from a"
+                                                    , FromB = "from b"
+                                                    , Collection =
+                                                        \(t : Text) -> t
+                                                    }
+                                                    coll
+                                              )
+                                              colls
+                                          )
+                                  }
+                                  pair.collections
+                              )
+                          ]
+                        }
+                      , mkStorage pair.a
+                      , mkStorage pair.b
+                      ]
+
+      in  { example =
+              Ini/externs.renderIni
+                ( Ini.appendInis
+                    ( Lib.List/map
+                        Vdirsyncer.Pair
+                        Ini.Ini
+                        ( \(pair : Vdirsyncer.Pair) ->
+                            { globalSection = [] : Ini.Section
+                            , sections = toIniSections pair
+                            }
+                        )
+                        (   [ { pairName = "testPair"
+                              , a =
+                                { storageName = "mystor"
+                                , storage =
+                                    Vdirsyncer.StorageType.FileSystem
+                                      { path = "./test-ics"
+                                      , fileext = < ICS >.ICS
+                                      }
+                                }
+                              , b =
+                                { storageName = "mystor"
+                                , storage =
+                                    Vdirsyncer.StorageType.Http
+                                      { url = "https://profpatsch.de" }
+                                }
+                              , collections = Vdirsyncer.Collections.Unspecified
+                              }
+                            ]
+                          : List Vdirsyncer.Pair
+                        )
+                    )
+                )
+          }
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py
new file mode 100644
index 0000000000..4af3b9fb85
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py
@@ -0,0 +1,133 @@
+# horrible little module that fetches ICS files for the local trash public service.
+#
+# It tries its best to not overwrite existing ICS files in case the upstream goes down
+# or returns empty ICS files.
+import sys
+import httpx
+import asyncio
+import icalendar
+from datetime import datetime
+import syslog
+import os.path
+
+# Internal id for the street (extracted from the ics download url)
+ortsteil_id = "e9c32ab3-df25-4660-b88e-abda91897d7a"
+
+# They are using a numeric encoding to refer to different kinds of trash
+fraktionen = {
+    "restmüll": "1",
+    "bio": "5",
+    "papier": "7",
+    "gelbe_tonne": "13",
+    "problemmüllsammlung": "20"
+}
+
+def ics_url(year):
+  frakt = ','.join(fraktionen.values())
+  return f'https://awido.cubefour.de/Customer/aic-fdb/KalenderICS.aspx?oid={ortsteil_id}&jahr={year}&fraktionen={frakt}&reminder=1.12:00'
+
+def fetchers_for_years(start_year, no_of_years_in_future):
+    """given a starting year, and a number of years in the future,
+    return the years for which to fetch ics files"""
+    current_year = datetime.now().year
+    max_year = current_year + no_of_years_in_future
+    return {
+        "passed_years": range(start_year, current_year),
+        "this_and_future_years": range(current_year, 1 + max_year)
+    }
+
+async def fetch_ics(c, url):
+    """fetch an ICS file from an URL"""
+    try:
+        resp = await c.get(url)
+    except Exception as e:
+        return { "ics_does_not_exist_exc": e }
+
+    if resp.is_error:
+        return { "ics_does_not_exist": resp }
+    else:
+        try:
+            ics = icalendar.Calendar.from_ical(resp.content)
+            return { "ics": { "ics_parsed": ics, "ics_bytes": resp.content } }
+        except ValueError as e:
+            return { "ics_cannot_be_parsed": e }
+
+def ics_has_events(ics):
+    """Determine if there is any event in the ICS, otherwise we can assume it’s an empty file"""
+    for item in ics.walk():
+      if isinstance(item, icalendar.Event):
+        return True
+    return False
+
+async def write_nonempty_ics(directory, year, ics):
+    # only overwrite if the new ics has any events
+    if ics_has_events(ics['ics_parsed']):
+        path = os.path.join(directory, f"{year}.ics")
+        with open(path, "wb") as f:
+            f.write(ics['ics_bytes'])
+            info(f"wrote ics for year {year} to file {path}")
+    else:
+        info(f"ics for year {year} was empty, skipping")
+
+
+def main():
+    ics_directory = os.getenv("ICS_DIRECTORY", None)
+    if not ics_directory:
+        critical("please set ICS_DIRECTORY")
+    start_year = int(os.getenv("ICS_START_YEAR", 2022))
+    future_years = int(os.getenv("ICS_FUTURE_YEARS", 2))
+
+    years = fetchers_for_years(start_year, no_of_years_in_future=future_years)
+
+
+    async def go():
+        async with httpx.AsyncClient(follow_redirects=True) as c:
+            info(f"fetching ics for passed years: {years['passed_years']}")
+            for year in years["passed_years"]:
+                match await fetch_ics(c, ics_url(year)):
+                    case { "ics_does_not_exist_exc": error }:
+                       warn(f"The ics for the year {year} is gone, error when requesting: {error} for url {ics_url(year)}")
+                    case { "ics_does_not_exist": resp }:
+                       warn(f"The ics for the year {year} is gone, server returned status {resp.status} for url {ics_url(year)}")
+                    case { "ics_cannot_be_parsed": error }:
+                       warn(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
+                    case { "ics": ics }:
+                       info(f"fetched ics from {ics_url(year)}")
+                       await write_nonempty_ics(ics_directory, year, ics)
+                    case _:
+                       critical("unknown case for ics result")
+
+
+            info(f"fetching ics for current and upcoming years: {years['this_and_future_years']}")
+            for year in years["this_and_future_years"]:
+                match await fetch_ics(c, ics_url(year)):
+                    case { "ics_does_not_exist_exc": error }:
+                       critical(f"The ics for the year {year} is not available, error when requesting: {error} for url {ics_url(year)}")
+                    case { "ics_does_not_exist": resp }:
+                       critical(f"The ics for the year {year} is not available, server returned status {resp.status} for url {ics_url(year)}")
+                    case { "ics_cannot_be_parsed": error }:
+                       critical(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
+                    case { "ics": ics }:
+                       info(f"fetched ics from {ics_url(year)}")
+                       await write_nonempty_ics(ics_directory, year, ics)
+                    case _:
+                       critical("unknown case for ics result")
+
+    asyncio.run(go())
+
+def info(msg):
+    syslog.syslog(syslog.LOG_INFO, msg)
+
+def critical(msg):
+    syslog.syslog(syslog.LOG_CRIT, msg)
+    sys.exit(1)
+
+def warn(msg):
+    syslog.syslog(syslog.LOG_WARNING, msg)
+
+def debug(msg):
+    syslog.syslog(syslog.LOG_DEBUG, msg)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/users/Profpatsch/tagtime/README.md b/users/Profpatsch/tagtime/README.md
new file mode 100644
index 0000000000..ab2c7d14e5
--- /dev/null
+++ b/users/Profpatsch/tagtime/README.md
@@ -0,0 +1,18 @@
+# tagtime reimplementation
+
+What’s great about original perl tagtime?
+
+* timestamps are deterministic from the beginning (keep)
+* the tagging system should just work (tm)
+
+What’s the problem with the original perl tagtime?
+
+* it uses a bad, arbitrary file format -> sqlite3
+* the query window does not time out, so it’s easy to miss that it’s open (often hidden behind another window), and then the following pings might never appear)
+* There’s a bug with tags containing a `.` -> sqlite3
+
+What would be cool to have?
+
+* multi-entry mode (ping on phone and laptop and merge the replies eventually since they will apply to single timestamps)
+* simplifying reporting based on fuzzy matching & history
+* auto-generate nice time reports with hours for work items
diff --git a/users/Profpatsch/toINI.nix b/users/Profpatsch/toINI.nix
new file mode 100644
index 0000000000..537505d30b
--- /dev/null
+++ b/users/Profpatsch/toINI.nix
@@ -0,0 +1,79 @@
+{ lib, ... }:
+let
+  /* Generate an INI-style config file from an attrset
+   * specifying the global section (no header), and a
+   * list of sections which contain name/value pairs.
+   *
+   * generators.toINI {} {
+   *   globalSection = [
+   *     { name = "someGlobalKey"; value = "hi"; }
+   *   ];
+   *   sections = [
+   *     { name = "foo"; value = [
+   *         { name = "hi"; value = "${pkgs.hello}"; }
+   *         { name = "ciao"; value = "bar"; }
+   *       ];
+   *     }
+   *     { name = "baz";
+   *       value = [ { name = "also, integers"; value = 42; } ];
+   *     }
+   *   ];
+   * }
+   *
+   *> someGlobalKey=hi
+   *>
+   *> [foo]
+   *> hi=/nix/store/y93qql1p5ggfnaqjjqhxcw0vqw95rlz0-hello-2.10
+   *> ciao=bar
+   *>
+   *> [baz]
+   *> also, integers=42
+   *>
+   *
+   * The mk* configuration attributes can generically change
+   * the way sections and key-value strings are generated.
+   *
+   * Order of the sections and of keys is preserved,
+   * duplicate keys are allowed.
+   */
+  toINI =
+    {
+      # apply transformations (e.g. escapes) to section names
+      mkSectionName ? (name: lib.strings.escape [ "[" "]" ] name)
+    , # format a setting line from key and value
+      mkKeyValue ? lib.generators.mkKeyValueDefault { } "="
+    ,
+    }: { globalSection, sections }:
+    let
+      mkSection = sectName: sectValues: ''
+        [${mkSectionName sectName}]
+      '' + toKeyValue { inherit mkKeyValue; } sectValues;
+      # map input to ini sections
+      mkSections = lib.strings.concatMapStringsSep "\n"
+        ({ name, value }: mkSection name value)
+        sections;
+      mkGlobalSection =
+        if globalSection == [ ]
+        then ""
+        else toKeyValue { inherit mkKeyValue; } globalSection
+          + "\n";
+    in
+    mkGlobalSection
+    + mkSections;
+
+  /* Generate a name-value-style config file from a list.
+   *
+   * mkKeyValue is the same as in toINI.
+   */
+  toKeyValue =
+    { mkKeyValue ? lib.generators.mkKeyValueDefault { } "="
+    ,
+    }:
+    let
+      mkLine = k: v: mkKeyValue k v + "\n";
+      mkLines = k: v: [ (mkLine k v) ];
+    in
+    nameValues: lib.strings.concatStrings (lib.concatLists (map ({ name, value }: mkLines name value) nameValues));
+
+in
+toINI
diff --git a/users/Profpatsch/tree-sitter.nix b/users/Profpatsch/tree-sitter.nix
new file mode 100644
index 0000000000..2224da2a3b
--- /dev/null
+++ b/users/Profpatsch/tree-sitter.nix
@@ -0,0 +1,209 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.coreutils [ "head" "printf" "cat" ]
+    // depot.nix.getBins pkgs.ncurses [ "tput" ]
+    // depot.nix.getBins pkgs.bc [ "bc" ]
+    // depot.nix.getBins pkgs.ocamlPackages.sexp [ "sexp" ];
+
+  print-ast = depot.nix.writers.rustSimple
+    {
+      name = "print-ast";
+      dependencies = with depot.third_party.rust-crates; [
+        libloading
+        tree-sitter
+      ];
+    } ''
+    extern crate libloading;
+    extern crate tree_sitter;
+    use std::mem;
+    use std::io::{Read};
+    use libloading::{Library, Symbol};
+    use tree_sitter::{Language, Parser};
+
+    /// Load the shared lib FILE and return the language under SYMBOL-NAME.
+    /// Inspired by the rust source of emacs-tree-sitter.
+    fn _load_language(file: String, symbol_name: String) -> Result<Language, libloading::Error> {
+        let lib = Library::new(file)?;
+        let tree_sitter_lang: Symbol<'_, unsafe extern "C" fn() -> _> =
+            unsafe { lib.get(symbol_name.as_bytes())? };
+        let language: Language = unsafe { tree_sitter_lang() };
+        // Avoid segmentation fault by not unloading the lib, as language is a static piece of data.
+        // TODO: Attach an Rc<Library> to Language instead.
+        mem::forget(lib);
+        Ok(language)
+    }
+
+    fn main() {
+      let mut args = std::env::args();
+      let so = args.nth(1).unwrap();
+      let symbol_name = args.nth(0).unwrap();
+      let file = args.nth(0).unwrap();
+      let mut parser = Parser::new();
+      let lang = _load_language(so, symbol_name).unwrap();
+      parser.set_language(lang).unwrap();
+      let bytes = std::fs::read(&file).unwrap();
+      print!("{}", parser.parse(&bytes, None).unwrap().root_node().to_sexp());
+    }
+
+
+  '';
+
+  tree-sitter-nix = buildTreeSitterGrammar {
+    language = "tree-sitter-nix";
+    source = pkgs.fetchFromGitHub {
+      owner = "cstrahan";
+      repo = "tree-sitter-nix";
+      rev = "791b5ff0e4f0da358cbb941788b78d436a2ca621";
+      sha256 = "1y5b3wh3fcmbgq8r2i97likzfp1zp02m58zacw5a1cjqs5raqz66";
+    };
+  };
+
+  watch-file-modified = depot.nix.writers.rustSimple
+    {
+      name = "watch-file-modified";
+      dependencies = [
+        depot.third_party.rust-crates.inotify
+        depot.users.Profpatsch.netstring.rust-netstring
+      ];
+    } ''
+    extern crate inotify;
+    extern crate netstring;
+    use inotify::{EventMask, WatchMask, Inotify};
+    use std::io::Write;
+
+    fn main() {
+        let mut inotify = Inotify::init()
+            .expect("Failed to initialize inotify");
+
+        let file = std::env::args().nth(1).unwrap();
+
+        let file_watch = inotify
+            .add_watch(
+                &file,
+                WatchMask::MODIFY
+            )
+            .expect("Failed to add inotify watch");
+
+        let mut buffer = [0u8; 4096];
+        loop {
+            let events = inotify
+                .read_events_blocking(&mut buffer)
+                .expect("Failed to read inotify events");
+
+            for event in events {
+                if event.wd == file_watch {
+                  std::io::stdout().write(&netstring::to_netstring(file.as_bytes()));
+                  std::io::stdout().flush();
+                }
+            }
+        }
+    }
+
+  '';
+
+  # clear screen and set LINES and COLUMNS to terminal height & width
+  clear-screen = depot.nix.writeExecline "clear-screen" { } [
+    "if"
+    [ bins.tput "clear" ]
+    "backtick"
+    "-in"
+    "LINES"
+    [ bins.tput "lines" ]
+    "backtick"
+    "-in"
+    "COLUMNS"
+    [ bins.tput "cols" ]
+    "$@"
+  ];
+
+  print-nix-file = depot.nix.writeExecline "print-nix-file" { readNArgs = 1; } [
+    "pipeline"
+    [ print-ast "${tree-sitter-nix}/parser" "tree_sitter_nix" "$1" ]
+    "pipeline"
+    [ bins.sexp "print" ]
+    clear-screen
+    "importas"
+    "-ui"
+    "lines"
+    "LINES"
+    "backtick"
+    "-in"
+    "ls"
+    [
+      "pipeline"
+      # when you pull out bc to decrement an integer it’s time to switch to python lol
+      [ bins.printf "x=%s; --x\n" "$lines" ]
+      bins.bc
+    ]
+    "importas"
+    "-ui"
+    "l"
+    "ls"
+    bins.head
+    "-n\${l}"
+  ];
+
+  print-nix-file-on-update = depot.nix.writeExecline "print-nix-file-on-update" { readNArgs = 1; } [
+    "if"
+    [ print-nix-file "$1" ]
+    "pipeline"
+    [ watch-file-modified "$1" ]
+    "forstdin"
+    "-d"
+    ""
+    "file"
+    "importas"
+    "file"
+    "file"
+    print-nix-file
+    "$file"
+  ];
+
+  # copied from nixpkgs
+  buildTreeSitterGrammar =
+    {
+      # language name
+      language
+      # source for the language grammar
+    , source
+    }:
+
+    pkgs.stdenv.mkDerivation {
+
+      pname = "${language}-grammar";
+      inherit (pkgs.tree-sitter) version;
+
+      src = source;
+
+      buildInputs = [ pkgs.tree-sitter ];
+
+      dontUnpack = true;
+      configurePhase = ":";
+      buildPhase = ''
+        runHook preBuild
+        scanner_cc="$src/src/scanner.cc"
+        if [ ! -f "$scanner_cc" ]; then
+          scanner_cc=""
+        fi
+        $CXX -I$src/src/ -c $scanner_cc
+        $CC -I$src/src/ -shared -o parser -Os  scanner.o $src/src/parser.c -lstdc++
+        runHook postBuild
+      '';
+      installPhase = ''
+        runHook preInstall
+        mkdir $out
+        mv parser $out/
+        runHook postInstall
+      '';
+    };
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    print-ast
+    tree-sitter-nix
+    print-nix-file-on-update
+    watch-file-modified
+    ;
+}
diff --git a/users/Profpatsch/whatcd-resolver/Main.hs b/users/Profpatsch/whatcd-resolver/Main.hs
new file mode 100644
index 0000000000..21cd80cbf0
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/Main.hs
@@ -0,0 +1,6 @@
+module Main where
+
+import WhatcdResolver qualified
+
+main :: IO ()
+main = WhatcdResolver.main
diff --git a/users/Profpatsch/whatcd-resolver/README.md b/users/Profpatsch/whatcd-resolver/README.md
new file mode 100644
index 0000000000..d1902e546a
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/README.md
@@ -0,0 +1,21 @@
+# whatcd-resolver
+
+To run:
+
+```
+ninja run-services
+```
+
+in one terminal (starts the background tasks)
+
+```
+ninja run
+```
+
+to start the server. It runs on `9092`.
+
+You need to be in the `nix-shell` in `./..`.
+
+You need to set the `pass` key `internet/redacted/api-keys/whatcd-resolver` to an API key for RED.
+
+You need to have a transmission-rpc-daemon listening on port `9091` (no auth, try ssh port forwarding lol).
diff --git a/users/Profpatsch/whatcd-resolver/build.ninja b/users/Profpatsch/whatcd-resolver/build.ninja
new file mode 100644
index 0000000000..ff6ba8df04
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/build.ninja
@@ -0,0 +1,20 @@
+
+builddir = .ninja
+
+outdir = ./output
+
+rule run-services
+  command = s6-svscan ./services
+
+rule run
+  command = execlineb -c '$
+    importas -i DEPOT_ROOT DEPOT_ROOT $
+    importas -i PROFPATSCH_ROOT PROFPATSCH_ROOT cd $$PROFPATSCH_ROOT $
+    nix-run { $$DEPOT_ROOT -A users.Profpatsch.shortcuttable } cabal repl whatcd-resolver/ --repl-options "-e main" $
+    '
+
+build run-services: run-services
+  pool = console
+
+build run: run
+  pool = console
diff --git a/users/Profpatsch/whatcd-resolver/default.nix b/users/Profpatsch/whatcd-resolver/default.nix
new file mode 100644
index 0000000000..27468507ac
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/default.nix
@@ -0,0 +1,76 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  whatcd-resolver = pkgs.haskellPackages.mkDerivation {
+    pname = "whatcd-resolver";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./whatcd-resolver.cabal
+      ./Main.hs
+      ./src/WhatcdResolver.hs
+      ./src/AppT.hs
+      ./src/JsonLd.hs
+      ./src/Optional.hs
+      ./src/Html.hs
+      ./src/Http.hs
+      ./src/Transmission.hs
+      ./src/Redacted.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-json
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-run-command
+      pkgs.haskellPackages.aeson-better-errors
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.hs-opentelemetry-sdk
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.resource-pool
+      pkgs.haskellPackages.postgresql-simple
+      pkgs.haskellPackages.tmp-postgres
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai-extra
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.punycode
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins whatcd-resolver [ "whatcd-resolver" ];
+
+in
+
+depot.nix.writeExecline "whatcd-resolver-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  # TODO: figure out how to automatically migrate to a new postgres version with tmp_postgres (dump?)
+  "${pkgs.postgresql_14}/bin:$${PATH}"
+  "export"
+  "WHATCD_RESOLVER_TOOLS"
+  (pkgs.linkFarm "whatcd-resolver-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ])
+  bins.whatcd-resolver
+]
+
diff --git a/users/Profpatsch/whatcd-resolver/notes.org b/users/Profpatsch/whatcd-resolver/notes.org
new file mode 100644
index 0000000000..24662c0f32
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/notes.org
@@ -0,0 +1,48 @@
+* The Glorious what.cd¹ Resolver
+
+  ¹: At the time of writing, what.cd didn’t even exist anymore
+
+** Idea
+   
+   Stream your music (or media) from a private tracker transparently.
+   “Spotify for torrents”
+
+** Technical
+
+   You need to have a seedbox, which runs a server program.
+   The server manages queries, downloads torrents and requested files, and
+   provides http streams to the downloaded files (while caching them for
+   seeding).
+
+   Clients then use the API to search for music (e.g. query for artists or
+   tracks) and get back the promise of a stream to the resolved file (a bit how
+   resolvers in the Tomahawk Player work)
+
+*** The Server
+
+**** Resolving queries
+
+     ~resolve :: Query -> IO Identifiers~
+
+     A query is a search input for content (could be an artist or a movie name
+     or something)
+
+     There have to be multiple providers, depending on the site used
+     (e.g. one for Gazelle trackers, one for Piratebay) and some intermediate
+     structure (e.g. for going through Musicbrainz first).
+
+     Output is a unique identifier for a fetchable resource; this could be a
+     link to a torrent combined with a file/directory in said torrent.
+
+**** Fetching Identifiers
+
+     ~fetch :: Identifier -> IO (Promise Stream)~
+
+     Takes an Identifier (which should provide all information on how to grab
+     the media file and returns a stream to the media file once it’s ready.
+     
+     For torrents, this probably consists of telling the torrent
+     library/application to fetch a certain torrent and start downloading the
+     required files in it. The torrent fetcher would also need to do seeding and
+     space management, since one usually has to keep a ratio and hard drive
+     space is not unlimited.
diff --git a/users/Profpatsch/whatcd-resolver/server-notes.org b/users/Profpatsch/whatcd-resolver/server-notes.org
new file mode 100644
index 0000000000..cb990aba3d
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/server-notes.org
@@ -0,0 +1,2 @@
+* whatcd-resolver-server
+
diff --git a/users/Profpatsch/whatcd-resolver/services/.gitignore b/users/Profpatsch/whatcd-resolver/services/.gitignore
new file mode 100644
index 0000000000..5cdb254e8c
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/.gitignore
@@ -0,0 +1,3 @@
+/.s6-svscan/
+/**/event/
+/**/supervise/
diff --git a/users/Profpatsch/whatcd-resolver/services/jaeger/run b/users/Profpatsch/whatcd-resolver/services/jaeger/run
new file mode 100755
index 0000000000..41332f8bb6
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/jaeger/run
@@ -0,0 +1,3 @@
+#!/usr/bin/env execlineb
+importas -i DEPOT_ROOT DEPOT_ROOT
+nix-run { $DEPOT_ROOT -A users.Profpatsch.jaeger -kK --builders '' }
diff --git a/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run b/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run
new file mode 100755
index 0000000000..7081b35f5a
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run
@@ -0,0 +1,2 @@
+#!/usr/bin/env execlineb
+caddy reverse-proxy --from :9092 --to :9093
diff --git a/users/Profpatsch/whatcd-resolver/src/AppT.hs b/users/Profpatsch/whatcd-resolver/src/AppT.hs
new file mode 100644
index 0000000000..abe8ccad4c
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/AppT.hs
@@ -0,0 +1,150 @@
+{-# LANGUAGE DeriveAnyClass #-}
+
+module AppT where
+
+import Control.Monad.Logger qualified as Logger
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Error.Tree
+import Data.HashMap.Strict (HashMap)
+import Data.HashMap.Strict qualified as HashMap
+import Data.Pool (Pool)
+import Data.Text qualified as Text
+import Database.PostgreSQL.Simple qualified as Postgres
+import GHC.Stack qualified
+import Label
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import PossehlAnalyticsPrelude
+import Postgres.MonadPostgres
+import System.IO qualified as IO
+import UnliftIO
+import Prelude hiding (span)
+
+data Context = Context
+  { config :: Label "logDatabaseQueries" DebugLogDatabaseQueries,
+    tracer :: Otel.Tracer,
+    pgFormat :: PgFormatPool,
+    pgConnPool :: Pool Postgres.Connection,
+    transmissionSessionId :: MVar ByteString
+  }
+
+newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
+  deriving newtype (Functor, Applicative, Monad, MonadIO, MonadUnliftIO, MonadThrow)
+
+data AppException = AppException Text
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+-- *  Logging & Opentelemetry
+
+instance (MonadIO m) => MonadLogger (AppT m) where
+  monadLoggerLog loc src lvl msg = liftIO $ Logger.defaultOutput IO.stderr loc src lvl (Logger.toLogStr msg)
+
+instance (Monad m) => Otel.MonadTracer (AppT m) where
+  getTracer = AppT $ asks (.tracer)
+
+class (MonadUnliftIO m, Otel.MonadTracer m) => MonadOtel m
+
+instance (MonadUnliftIO m) => MonadOtel (AppT m)
+
+instance (MonadOtel m) => MonadOtel (Transaction m)
+
+inSpan :: (MonadOtel m) => Text -> m a -> m a
+inSpan name = Otel.inSpan name Otel.defaultSpanArguments
+
+inSpan' :: (MonadOtel m) => Text -> (Otel.Span -> m a) -> m a
+inSpan' name = Otel.inSpan' name Otel.defaultSpanArguments
+
+-- | Add the attribute to the span, prefixing it with the `_` namespace (to easier distinguish our application’s tags from standard tags)
+addAttribute :: (MonadIO m, Otel.ToAttribute a) => Otel.Span -> Text -> a -> m ()
+addAttribute span key a = Otel.addAttribute span ("_." <> key) a
+
+-- | Add the attributes to the span, prefixing each key with the `_` namespace (to easier distinguish our application’s tags from standard tags)
+addAttributes :: (MonadIO m) => Otel.Span -> HashMap Text Otel.Attribute -> m ()
+addAttributes span attrs = Otel.addAttributes span $ attrs & HashMap.mapKeys ("_." <>)
+
+appThrowTreeNewSpan :: (MonadThrow m, MonadOtel m) => Text -> ErrorTree -> m a
+appThrowTreeNewSpan spanName exc = inSpan' spanName $ \span -> do
+  let msg = prettyErrorTree exc
+  recordException
+    span
+    ( T2
+        (label @"type_" "AppException")
+        (label @"message" msg)
+    )
+  throwM $ AppException msg
+
+appThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> ErrorTree -> m a
+appThrowTree span exc = do
+  let msg = prettyErrorTree exc
+  recordException
+    span
+    ( T2
+        (label @"type_" "AppException")
+        (label @"message" msg)
+    )
+  throwM $ AppException msg
+
+orAppThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree span = \case
+  Left err -> appThrowTree span err
+  Right a -> pure a
+
+assertM :: (MonadThrow f, MonadIO f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM span f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTree span err
+
+assertMNewSpan :: (MonadThrow f, MonadOtel f) => Text -> (t -> Either ErrorTree a) -> t -> f a
+assertMNewSpan spanName f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTreeNewSpan spanName err
+
+-- | A specialized variant of @addEvent@ that records attributes conforming to
+-- the OpenTelemetry specification's
+-- <https://github.com/open-telemetry/opentelemetry-specification/blob/49c2f56f3c0468ceb2b69518bcadadd96e0a5a8b/specification/trace/semantic_conventions/exceptions.md semantic conventions>
+--
+-- @since 0.0.1.0
+recordException ::
+  ( MonadIO m,
+    HasField "message" r Text,
+    HasField "type_" r Text
+  ) =>
+  Otel.Span ->
+  r ->
+  m ()
+recordException span dat = liftIO $ do
+  callStack <- GHC.Stack.whoCreated dat.message
+  newEventTimestamp <- Just <$> Otel.getTimestamp
+  Otel.addEvent span $
+    Otel.NewEvent
+      { newEventName = "exception",
+        newEventAttributes =
+          HashMap.fromList
+            [ ("exception.type", Otel.toAttribute @Text dat.type_),
+              ("exception.message", Otel.toAttribute @Text dat.message),
+              ("exception.stacktrace", Otel.toAttribute @Text $ Text.unlines $ map stringToText callStack)
+            ],
+        ..
+      }
+
+-- * Postgres
+
+instance (MonadThrow m, MonadUnliftIO m) => MonadPostgres (AppT m) where
+  execute = executeImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  executeMany = executeManyImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  executeManyReturningWith = executeManyReturningWithImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  queryWith = queryWithImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  queryWith_ = queryWithImpl_ (AppT ask)
+
+  foldRowsWithAcc = foldRowsWithAccImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  runTransaction = runPGTransaction
+
+runPGTransaction :: (MonadUnliftIO m) => Transaction (AppT m) a -> AppT m a
+runPGTransaction (Transaction transaction) = do
+  pool <- AppT ask <&> (.pgConnPool)
+  withRunInIO $ \unliftIO ->
+    withPGTransaction pool $ \conn -> do
+      unliftIO $ runReaderT transaction conn
diff --git a/users/Profpatsch/whatcd-resolver/src/Html.hs b/users/Profpatsch/whatcd-resolver/src/Html.hs
new file mode 100644
index 0000000000..49b87b23dc
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Html.hs
@@ -0,0 +1,69 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Html where
+
+import Data.Aeson qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict qualified as Map
+import IHP.HSX.QQ (hsx)
+import PossehlAnalyticsPrelude
+import Text.Blaze.Html (Html)
+import Text.Blaze.Html5 qualified as Html
+import Prelude hiding (span)
+
+-- | Render an arbitrary json value to HTML in a more-or-less reasonable fashion.
+mkVal :: Json.Value -> Html
+mkVal = \case
+  Json.Number n -> Html.toHtml @Text $ showToText n
+  Json.String s -> Html.toHtml @Text s
+  Json.Bool True -> [hsx|<em>true</em>|]
+  Json.Bool False -> [hsx|<em>false</em>|]
+  Json.Null -> [hsx|<em>null</em>|]
+  Json.Array arr -> toOrderedList mkVal arr
+  Json.Object obj ->
+    obj
+      & KeyMap.toMapText
+      & toDefinitionList (Html.toHtml @Text) mkVal
+
+toOrderedList :: (Foldable t1) => (t2 -> Html) -> t1 t2 -> Html
+toOrderedList mkValFn arr =
+  arr
+    & foldMap (\el -> Html.li $ mkValFn el)
+    & Html.ol
+
+toUnorderedList :: (Foldable t1) => (t2 -> Html) -> t1 t2 -> Html
+toUnorderedList mkValFn arr =
+  arr
+    & foldMap (\el -> Html.li $ mkValFn el)
+    & Html.ul
+
+-- | Render a definition list from a Map
+toDefinitionList :: (Text -> Html) -> (t -> Html) -> Map Text t -> Html
+toDefinitionList mkKeyFn mkValFn obj =
+  obj
+    & Map.toList
+    & foldMap (\(k, v) -> Html.dt (mkKeyFn k) <> Html.dd (mkValFn v))
+    & Html.dl
+
+-- | Render a table-like structure of json values as an HTML table.
+toTable :: [[(Text, Json.Value)]] -> Html
+toTable xs =
+  case xs & nonEmpty of
+    Nothing ->
+      [hsx|<p>No results.</p>|]
+    Just xs' -> do
+      let headers = xs' & NonEmpty.head <&> fst <&> (\h -> [hsx|<th>{h}</th>|]) & mconcat
+      let vals = xs' & foldMap (Html.tr . foldMap (Html.td . mkVal . snd))
+      [hsx|
+              <table class="table">
+                <thead>
+                  <tr>
+                  {headers}
+                  </tr>
+                </thead>
+                <tbody>
+                  {vals}
+                </tbody>
+              </table>
+          |]
diff --git a/users/Profpatsch/whatcd-resolver/src/Http.hs b/users/Profpatsch/whatcd-resolver/src/Http.hs
new file mode 100644
index 0000000000..4fdbb306ad
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Http.hs
@@ -0,0 +1,129 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Http
+  ( doRequestJson,
+    RequestOptions (..),
+    mkRequestOptions,
+    setRequestMethod,
+    setRequestBodyLBS,
+    setRequestHeader,
+    getResponseStatus,
+    getResponseHeader,
+    getResponseBody,
+  )
+where
+
+import AppT
+import Data.CaseInsensitive (CI (original))
+import Data.Char qualified as Char
+import Data.Int (Int64)
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Data.Text.Lazy qualified as Lazy.Text
+import Data.Text.Punycode qualified as Punycode
+import Json.Enc qualified as Enc
+import MyPrelude
+import Network.HTTP.Client
+import Network.HTTP.Simple
+import OpenTelemetry.Attributes qualified as Otel
+import Optional
+import Prelude hiding (span)
+
+data RequestOptions = RequestOptions
+  { method :: ByteString,
+    host :: Text,
+    port :: Optional Int,
+    path :: Optional [Text],
+    headers :: Optional [Header],
+    usePlainHttp :: Optional Bool
+  }
+
+mkRequestOptions :: (HasField "method" r ByteString, HasField "host" r Text) => r -> RequestOptions
+mkRequestOptions opts =
+  RequestOptions
+    { method = opts.method,
+      port = defaults,
+      host = opts.host,
+      path = defaults,
+      headers = defaults,
+      usePlainHttp = defaults
+    }
+
+doRequestJson ::
+  (MonadOtel m) =>
+  RequestOptions ->
+  Enc.Enc ->
+  m (Response ByteString)
+doRequestJson opts val = inSpan' "HTTP Request (JSON)" $ \span -> do
+  let x = requestToXhCommandLine opts val
+  let attrs = [100, 200 .. fromIntegral @Int @Int64 (x & Text.length)]
+  for_ attrs $ \n -> do
+    addAttribute span [fmt|request.xh.{n}|] (Lazy.Text.repeat 'x' & Lazy.Text.take n & toStrict & Otel.TextAttribute)
+  addAttribute span "request.xh" (requestToXhCommandLine opts val)
+  defaultRequest {secure = not (opts & optsUsePlainHttp)}
+    & setRequestHost (opts & optsHost)
+    & setRequestPort (opts & optsPort)
+    -- TODO: is this automatically escaped by the library?
+    & setRequestPath (opts & optsPath)
+    & setRequestHeaders (opts & optsHeaders)
+    & setRequestMethod opts.method
+    & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
+    & httpBS
+
+optsHost :: RequestOptions -> ByteString
+optsHost opts =
+  if opts.host & Text.isAscii
+    then opts.host & textToBytesUtf8
+    else opts.host & Punycode.encode
+
+optsUsePlainHttp :: RequestOptions -> Bool
+optsUsePlainHttp opts = opts.usePlainHttp.withDefault False
+
+optsPort :: RequestOptions -> Int
+optsPort opts = opts.port.withDefault (if opts & optsUsePlainHttp then 80 else 443)
+
+optsPath :: RequestOptions -> ByteString
+optsPath opts = opts.path.withDefault [] & Text.intercalate "/" & ("/" <>) & textToBytesUtf8
+
+optsHeaders :: RequestOptions -> [Header]
+optsHeaders opts = opts.headers.withDefault []
+
+-- | Create a string that can be pasted on the command line to invoke the same HTTP request via the `xh` tool (curl but nicer syntax)
+requestToXhCommandLine :: RequestOptions -> Enc.Enc -> Text
+requestToXhCommandLine opts val = do
+  let protocol = if opts & optsUsePlainHttp then "http" :: Text else "https"
+  let url = [fmt|{protocol}://{opts & optsHost}:{opts & optsPort}{opts & optsPath}|]
+  let headers = opts & optsHeaders <&> \(hdr, v) -> hdr.original <> ":" <> v
+
+  prettyArgsForBash $
+    mconcat
+      [ ["xh", url],
+        headers <&> bytesToTextUtf8Lenient,
+        ["--raw"],
+        [val & Enc.encToBytesUtf8 & bytesToTextUtf8Lenient]
+      ]
+
+-- | Pretty print a command line in a way that can be copied to bash.
+prettyArgsForBash :: [Text] -> Text
+prettyArgsForBash = Text.intercalate " " . map simpleBashEscape
+
+-- | Simple escaping for bash words. If they contain anything that’s not ascii chars
+-- and a bunch of often-used special characters, put the word in single quotes.
+simpleBashEscape :: Text -> Text
+simpleBashEscape t = do
+  case Text.find (not . isSimple) t of
+    Just _ -> escapeSingleQuote t
+    Nothing -> t
+  where
+    -- any word that is just ascii characters is simple (no spaces or control characters)
+    -- or contains a few often-used characters like - or .
+    isSimple c =
+      Char.isAsciiLower c
+        || Char.isAsciiUpper c
+        || Char.isDigit c
+        -- These are benign, bash will not interpret them as special characters.
+        || List.elem c ['-', '.', ':', '/']
+    -- Put the word in single quotes
+    -- If there is a single quote in the word,
+    -- close the single quoted word, add a single quote, open the word again
+    escapeSingleQuote t' = "'" <> Text.replace "'" "'\\''" t' <> "'"
diff --git a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
new file mode 100644
index 0000000000..16b1ab991b
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
@@ -0,0 +1,138 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module JsonLd where
+
+import AppT
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.ByteString.Builder qualified as Builder
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Set (Set)
+import Data.Set qualified as Set
+import Html qualified
+import IHP.HSX.QQ (hsx)
+import Json qualified
+import Label
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types.URI qualified as Url
+import Network.URI (URI)
+import Optional
+import Redacted
+import Text.Blaze.Html (Html)
+import Prelude hiding (span)
+
+-- | A recursive `json+ld` structure.
+data Jsonld
+  = JsonldObject JsonldObject
+  | JsonldAnonymousObject JsonldAnonymousObject
+  | JsonldArray [Jsonld]
+  | JsonldField Json.Value
+  deriving stock (Show, Eq)
+
+-- | A json+ld object, that is something which can be further expanded by following the URL in its `id_` field.
+data JsonldObject = JsonldObject'
+  { -- | `@type` field; currently just the plain value without taking into account the json+ld context
+    type_ :: Set Text,
+    -- | `@id` field, usually a link to follow for expanding the object to its full glory
+    id_ :: Text,
+    -- | any fields of this object that remote deemed important enough to already pre-emptively include in the object; to get all fields resolve the URL in `id_`.
+    previewFields :: Map Text Jsonld
+  }
+  deriving stock (Show, Eq)
+
+-- | A json+ld object that cannot be inspected further by resolving its ID
+data JsonldAnonymousObject = JsonldAnonymousObject'
+  { -- | `@type` field; currently just the plain value without taking into account the json+ld context
+    type_ :: Set Text,
+    -- | fields of this anonymous object
+    fields :: Map Text Jsonld
+  }
+  deriving stock (Show, Eq)
+
+jsonldParser :: (Monad m) => Json.ParseT err m Jsonld
+jsonldParser =
+  Json.asValue >>= \cur -> do
+    if
+      | Json.Object _ <- cur -> do
+          type_ <-
+            Json.keyMay "@type" (Json.asArraySet Json.asText Json.<|> (Set.singleton <$> Json.asText))
+              <&> fromMaybe Set.empty
+          idMay <- Json.keyMay "@id" $ Json.asText
+          fields <-
+            Json.asObjectMap jsonldParser
+              <&> Map.delete "@type"
+              <&> Map.delete "@id"
+
+          if
+            | Just id_ <- idMay -> do
+                pure $ JsonldObject $ JsonldObject' {previewFields = fields, ..}
+            | otherwise -> pure $ JsonldAnonymousObject $ JsonldAnonymousObject' {..}
+      | Json.Array _ <- cur -> do
+          JsonldArray <$> Json.eachInArray jsonldParser
+      | otherwise -> pure $ JsonldField cur
+
+renderJsonld :: Jsonld -> Html
+renderJsonld = \case
+  JsonldObject obj -> renderObject obj (Just obj.id_) obj.previewFields
+  JsonldAnonymousObject obj -> renderObject obj Nothing obj.fields
+  JsonldArray arr ->
+    Html.toOrderedList renderJsonld arr
+  JsonldField f -> Html.mkVal f
+  where
+    renderObject obj mId_ fields = do
+      let id_ =
+            mId_ <&> \i ->
+              [hsx|
+                  <dt>Url</dt>
+                  <dd><a href={i}>{i}</a></dd>
+                  |]
+          getMoreButton =
+            mId_ <&> \i ->
+              [hsx|
+              <div>
+                <button
+                  hx-get={snippetHref i}
+                  hx-target="closest dl"
+                  hx-swap="outerHTML"
+                >more fields …</button>
+              </div>
+            |]
+      [hsx|
+      <dl>
+        <dt>Type</dt>
+        <dd>{obj.type_ & toList & schemaTypes}</dd>
+        {id_}
+        <dt>Fields</dt>
+        <dd>
+          {fields & Html.toDefinitionList schemaType renderJsonld}
+          {getMoreButton}
+        </dd>
+      </dl>
+    |]
+    snippetHref target =
+      Builder.toLazyByteString $
+        "/snips/jsonld/render"
+          <> Url.renderQueryBuilder True [("target", Just (textToBytesUtf8 target))]
+
+    schemaTypes xs =
+      xs
+        <&> schemaType
+        & List.intersperse ", "
+        & mconcat
+    schemaType t =
+      let href :: Text = [fmt|https://schema.org/{t}|] in [hsx|<a href={href} target="_blank">{t}</a>|]
+
+httpGetJsonLd :: (MonadThrow m, MonadOtel m) => (URI, Http.Request) -> m Jsonld
+httpGetJsonLd (uri, req) = inSpan' "Fetch json+ld" $ \span -> do
+  addAttribute span "json+ld.targetUrl" (uri & showToText)
+  httpJson
+    (mkOptional (label @"contentType" "application/ld+json"))
+    jsonldParser
+    ( req
+        & Http.setRequestMethod "GET"
+        & Http.setRequestHeader "Accept" ["application/ld+json"]
+    )
diff --git a/users/Profpatsch/whatcd-resolver/src/Optional.hs b/users/Profpatsch/whatcd-resolver/src/Optional.hs
new file mode 100644
index 0000000000..9791c84970
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Optional.hs
@@ -0,0 +1,18 @@
+module Optional where
+
+import GHC.Records (getField)
+import MyPrelude
+
+newtype Optional a = OptionalInternal (Maybe a)
+  deriving newtype (Functor)
+
+mkOptional :: a -> Optional a
+mkOptional defaultValue = OptionalInternal $ Just defaultValue
+
+defaults :: Optional a
+defaults = OptionalInternal Nothing
+
+instance HasField "withDefault" (Optional a) (a -> a) where
+  getField (OptionalInternal m) defaultValue = case m of
+    Nothing -> defaultValue
+    Just a -> a
diff --git a/users/Profpatsch/whatcd-resolver/src/Redacted.hs b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
new file mode 100644
index 0000000000..c0c26b72d6
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
@@ -0,0 +1,540 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Redacted where
+
+import AppT
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.List qualified as List
+import Database.PostgreSQL.Simple (Binary (Binary), Only (..))
+import Database.PostgreSQL.Simple.SqlQQ (sql)
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import FieldParser qualified as Field
+import Json qualified
+import Label
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import Optional
+import Postgres.Decoder qualified as Dec
+import Postgres.MonadPostgres
+import Pretty
+import RunCommand (runCommandExpect0)
+import Prelude hiding (span)
+
+redactedSearch ::
+  (MonadLogger m, MonadThrow m, MonadOtel m) =>
+  [(ByteString, ByteString)] ->
+  Json.Parse ErrorTree a ->
+  m a
+redactedSearch advanced parser =
+  inSpan "Redacted API Search" $
+    redactedApiRequestJson
+      ( T2
+          (label @"action" "browse")
+          (label @"actionArgs" ((advanced <&> second Just)))
+      )
+      parser
+
+redactedGetTorrentFile ::
+  ( MonadLogger m,
+    MonadThrow m,
+    HasField "torrentId" dat Int,
+    MonadOtel m
+  ) =>
+  dat ->
+  m ByteString
+redactedGetTorrentFile dat = inSpan' "Redacted Get Torrent File" $ \span -> do
+  req <-
+    mkRedactedApiRequest
+      ( T2
+          (label @"action" "download")
+          ( label @"actionArgs"
+              [ ("id", Just (dat.torrentId & showToText @Int & textToBytesUtf8))
+              -- try using tokens as long as we have them (TODO: what if there’s no tokens left?
+              -- ANSWER: it breaks:
+              -- responseBody = "{\"status\":\"failure\",\"error\":\"You do not have any freeleech tokens left. Please use the regular DL link.\"}",
+              -- ("usetoken", Just "1")
+              ]
+          )
+      )
+  httpTorrent span req
+
+-- fix
+--   ( \io -> do
+--       logInfo "delay"
+--       liftIO $ threadDelay 10_000_000
+--       io
+--   )
+
+exampleSearch :: (MonadThrow m, MonadLogger m, MonadPostgres m, MonadOtel m) => m (Transaction m ())
+exampleSearch = do
+  t1 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "cherish"),
+        ("artistname", "kirinji"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  t3 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "mouss et hakim"),
+        ("artistname", "mouss et hakim"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  t2 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "thriller"),
+        ("artistname", "michael jackson"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  pure (t1 >> t2 >> t3)
+
+-- | Do the search, return a transaction that inserts all results from all pages of the search.
+redactedSearchAndInsert ::
+  forall m.
+  ( MonadLogger m,
+    MonadPostgres m,
+    MonadThrow m,
+    MonadOtel m
+  ) =>
+  [(ByteString, ByteString)] ->
+  m (Transaction m ())
+redactedSearchAndInsert extraArguments = do
+  logInfo [fmt|Doing redacted search with arguments: {showPretty extraArguments}|]
+  -- The first search returns the amount of pages, so we use that to query all results piece by piece.
+  firstPage <- go Nothing
+  let remainingPages = firstPage.pages - 1
+  logInfo [fmt|Got the first page, found {remainingPages} more pages|]
+  let otherPagesNum = [(2 :: Natural) .. remainingPages]
+  otherPages <- traverse go (Just <$> otherPagesNum)
+  pure $
+    (firstPage : otherPages)
+      & concatMap (.tourGroups)
+      & \case
+        IsNonEmpty tgs -> tgs & insertTourGroupsAndTorrents
+        IsEmpty -> pure ()
+  where
+    go mpage =
+      redactedSearch
+        ( extraArguments
+            -- pass the page (for every search but the first one)
+            <> (mpage & ifExists (\page -> ("page", (page :: Natural) & showToText & textToBytesUtf8)))
+        )
+        ( do
+            status <- Json.key "status" Json.asText
+            when (status /= "success") $ do
+              Json.throwCustomError [fmt|Status was not "success", but {status}|]
+            Json.key "response" $ do
+              pages <-
+                Json.keyMay "pages" (Field.toJsonParser (Field.mapError singleError $ Field.jsonNumber >>> Field.boundedScientificIntegral @Int "not an Integer" >>> Field.integralToNatural))
+                  -- in case the field is missing, let’s assume there is only one page
+                  <&> fromMaybe 1
+              Json.key "results" $ do
+                tourGroups <-
+                  label @"tourGroups"
+                    <$> ( Json.eachInArray $ do
+                            groupId <- Json.keyLabel @"groupId" "groupId" (Json.asIntegral @_ @Int)
+                            groupName <- Json.keyLabel @"groupName" "groupName" Json.asText
+                            fullJsonResult <-
+                              label @"fullJsonResult"
+                                <$> ( Json.asObject
+                                        -- remove torrents cause they are inserted separately below
+                                        <&> KeyMap.filterWithKey (\k _ -> k /= "torrents")
+                                        <&> Json.Object
+                                    )
+                            let tourGroup = T3 groupId groupName fullJsonResult
+                            torrents <- Json.keyLabel @"torrents" "torrents" $
+                              Json.eachInArray $ do
+                                torrentId <- Json.keyLabel @"torrentId" "torrentId" (Json.asIntegral @_ @Int)
+                                fullJsonResultT <- label @"fullJsonResult" <$> Json.asValue
+                                pure $ T2 torrentId fullJsonResultT
+                            pure (T2 (label @"tourGroup" tourGroup) torrents)
+                        )
+                pure
+                  ( T2
+                      (label @"pages" pages)
+                      tourGroups
+                  )
+        )
+    insertTourGroupsAndTorrents ::
+      NonEmpty
+        ( T2
+            "tourGroup"
+            (T3 "groupId" Int "groupName" Text "fullJsonResult" Json.Value)
+            "torrents"
+            [T2 "torrentId" Int "fullJsonResult" Json.Value]
+        ) ->
+      Transaction m ()
+    insertTourGroupsAndTorrents dat = do
+      let tourGroups = dat <&> (.tourGroup)
+      let torrents = dat <&> (.torrents)
+      insertTourGroups tourGroups
+        >>= ( \res ->
+                insertTorrents $
+                  zipT2 $
+                    T2
+                      (label @"torrentGroupIdPg" $ res <&> (.tourGroupIdPg))
+                      (label @"torrents" (torrents & toList))
+            )
+    insertTourGroups ::
+      NonEmpty
+        ( T3
+            "groupId"
+            Int
+            "groupName"
+            Text
+            "fullJsonResult"
+            Json.Value
+        ) ->
+      Transaction m [Label "tourGroupIdPg" Int]
+    insertTourGroups dats = do
+      let groupNames =
+            dats <&> \dat -> [fmt|{dat.groupId}: {dat.groupName}|]
+      logInfo [fmt|Inserting tour groups for {showPretty groupNames}|]
+      _ <-
+        execute
+          [fmt|
+                  DELETE FROM redacted.torrent_groups
+                  WHERE group_id = ANY (?::integer[])
+              |]
+          (Only $ (dats <&> (.groupId) & toList & PGArray :: PGArray Int))
+      executeManyReturningWith
+        [fmt|
+              INSERT INTO redacted.torrent_groups (
+                group_id, group_name, full_json_result
+              ) VALUES
+              ( ?, ? , ? )
+              ON CONFLICT (group_id) DO UPDATE SET
+                group_id = excluded.group_id,
+                group_name = excluded.group_name,
+                full_json_result = excluded.full_json_result
+              RETURNING (id)
+            |]
+        ( dats <&> \dat ->
+            ( dat.groupId,
+              dat.groupName,
+              dat.fullJsonResult
+            )
+        )
+        (label @"tourGroupIdPg" <$> Dec.fromField @Int)
+
+    insertTorrents ::
+      [ T2
+          "torrentGroupIdPg"
+          Int
+          "torrents"
+          [T2 "torrentId" Int "fullJsonResult" Json.Value]
+      ] ->
+      Transaction m ()
+    insertTorrents dats = do
+      _ <-
+        execute
+          [sql|
+            DELETE FROM redacted.torrents_json
+            WHERE torrent_id = ANY (?::integer[])
+          |]
+          ( Only $
+              PGArray
+                [ torrent.torrentId
+                  | dat <- dats,
+                    torrent <- dat.torrents
+                ]
+          )
+
+      execute
+        [sql|
+          INSERT INTO redacted.torrents_json
+            ( torrent_group
+            , torrent_id
+            , full_json_result)
+          SELECT *
+          FROM UNNEST(
+              ?::integer[]
+            , ?::integer[]
+            , ?::jsonb[]
+          ) AS inputs(
+              torrent_group
+            , torrent_id
+            , full_json_result)
+          |]
+        ( [ ( dat.torrentGroupIdPg :: Int,
+              group.torrentId :: Int,
+              group.fullJsonResult :: Json.Value
+            )
+            | dat <- dats,
+              group <- dat.torrents
+          ]
+            & unzip3PGArray
+        )
+      pure ()
+
+unzip3PGArray :: [(a1, a2, a3)] -> (PGArray a1, PGArray a2, PGArray a3)
+unzip3PGArray xs = xs & unzip3 & \(a, b, c) -> (PGArray a, PGArray b, PGArray c)
+
+redactedGetTorrentFileAndInsert ::
+  ( HasField "torrentId" r Int,
+    MonadPostgres m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  r ->
+  Transaction m (Label "torrentFile" ByteString)
+redactedGetTorrentFileAndInsert dat = inSpan' "Redacted Get Torrent File and Insert" $ \span -> do
+  bytes <- redactedGetTorrentFile dat
+  execute
+    [sql|
+    UPDATE redacted.torrents_json
+    SET torrent_file = ?::bytea
+    WHERE torrent_id = ?::integer
+  |]
+    ( (Binary bytes :: Binary ByteString),
+      dat.torrentId
+    )
+    >>= assertOneUpdated span "redactedGetTorrentFileAndInsert"
+    >>= \() -> pure (label @"torrentFile" bytes)
+
+getTorrentFileById ::
+  ( MonadPostgres m,
+    HasField "torrentId" r Int,
+    MonadThrow m
+  ) =>
+  r ->
+  Transaction m (Maybe (Label "torrentFile" ByteString))
+getTorrentFileById dat = do
+  queryWith
+    [sql|
+    SELECT torrent_file
+    FROM redacted.torrents
+    WHERE torrent_id = ?::integer
+  |]
+    (Only $ (dat.torrentId :: Int))
+    (fmap @Maybe (label @"torrentFile") <$> Dec.byteaMay)
+    >>= ensureSingleRow
+
+updateTransmissionTorrentHashById ::
+  ( MonadPostgres m,
+    HasField "torrentId" r Int,
+    HasField "torrentHash" r Text
+  ) =>
+  r ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+updateTransmissionTorrentHashById dat = do
+  execute
+    [sql|
+    UPDATE redacted.torrents_json
+    SET transmission_torrent_hash = ?::text
+    WHERE torrent_id = ?::integer
+    |]
+    ( dat.torrentHash :: Text,
+      dat.torrentId :: Int
+    )
+
+assertOneUpdated ::
+  (HasField "numberOfRowsAffected" r Natural, MonadThrow m, MonadIO m) =>
+  Otel.Span ->
+  Text ->
+  r ->
+  m ()
+assertOneUpdated span name x = case x.numberOfRowsAffected of
+  1 -> pure ()
+  n -> appThrowTree span ([fmt|{name :: Text}: Expected to update exactly one row, but updated {n :: Natural} row(s)|])
+
+data TorrentData transmissionInfo = TorrentData
+  { groupId :: Int,
+    torrentId :: Int,
+    seedingWeight :: Int,
+    torrentJson :: Json.Value,
+    torrentGroupJson :: T3 "artist" Text "groupName" Text "groupYear" Int,
+    torrentStatus :: TorrentStatus transmissionInfo
+  }
+
+data TorrentStatus transmissionInfo
+  = NoTorrentFileYet
+  | NotInTransmissionYet
+  | InTransmission (T2 "torrentHash" Text "transmissionInfo" transmissionInfo)
+
+getTorrentById :: (MonadPostgres m, HasField "torrentId" r Int, MonadThrow m) => r -> Transaction m Json.Value
+getTorrentById dat = do
+  queryWith
+    [sql|
+    SELECT full_json_result FROM redacted.torrents
+    WHERE torrent_id = ?::integer
+  |]
+    (getLabel @"torrentId" dat)
+    (Dec.json Json.asValue)
+    >>= ensureSingleRow
+
+-- | Find the best torrent for each torrent group (based on the seeding_weight)
+getBestTorrents :: (MonadPostgres m, HasField "onlyDownloaded" opts Bool) => opts -> Transaction m [TorrentData ()]
+getBestTorrents opts = do
+  queryWith
+    [sql|
+      SELECT * FROM (
+        SELECT DISTINCT ON (group_id)
+          tg.group_id,
+          t.torrent_id,
+          seeding_weight,
+          t.full_json_result AS torrent_json,
+          tg.full_json_result AS torrent_group_json,
+          t.torrent_file IS NOT NULL as has_torrent_file,
+          t.transmission_torrent_hash
+        FROM redacted.torrents t
+        JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
+        ORDER BY group_id, seeding_weight DESC
+      ) as _
+      WHERE
+        -- onlyDownloaded
+        ((NOT ?::bool) OR has_torrent_file)
+      ORDER BY seeding_weight DESC
+    |]
+    (Only opts.onlyDownloaded :: Only Bool)
+    ( do
+        groupId <- Dec.fromField @Int
+        torrentId <- Dec.fromField @Int
+        seedingWeight <- Dec.fromField @Int
+        torrentJson <- Dec.json Json.asValue
+        torrentGroupJson <-
+          ( Dec.json $ do
+              artist <- Json.keyLabel @"artist" "artist" Json.asText
+              groupName <- Json.keyLabel @"groupName" "groupName" Json.asText
+              groupYear <- Json.keyLabel @"groupYear" "groupYear" (Json.asIntegral @_ @Int)
+              pure $ T3 artist groupName groupYear
+            )
+        hasTorrentFile <- Dec.fromField @Bool
+        transmissionTorrentHash <-
+          Dec.fromField @(Maybe Text)
+        pure $
+          TorrentData
+            { torrentStatus =
+                if
+                  | not hasTorrentFile -> NoTorrentFileYet
+                  | Nothing <- transmissionTorrentHash -> NotInTransmissionYet
+                  | Just hash <- transmissionTorrentHash ->
+                      InTransmission $
+                        T2 (label @"torrentHash" hash) (label @"transmissionInfo" ()),
+              ..
+            }
+    )
+
+-- | Do a request to the redacted API. If you know what that is, you know how to find the API docs.
+mkRedactedApiRequest ::
+  ( MonadThrow m,
+    MonadIO m,
+    MonadLogger m,
+    HasField "action" p ByteString,
+    HasField "actionArgs" p [(ByteString, Maybe ByteString)]
+  ) =>
+  p ->
+  m Http.Request
+mkRedactedApiRequest dat = do
+  authKey <- runCommandExpect0 "pass" ["internet/redacted/api-keys/whatcd-resolver"]
+  pure $
+    [fmt|https://redacted.ch/ajax.php|]
+      & Http.setRequestMethod "GET"
+      & Http.setQueryString (("action", Just dat.action) : dat.actionArgs)
+      & Http.setRequestHeader "Authorization" [authKey]
+
+httpTorrent ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  Otel.Span ->
+  Http.Request ->
+  m ByteString
+httpTorrent span req =
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just "application/x-bittorrent" <- contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Redacted returned a non-torrent body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Redacted returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Redacted returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+
+httpJson ::
+  ( MonadThrow m,
+    MonadOtel m
+  ) =>
+  (Optional (Label "contentType" ByteString)) ->
+  Json.Parse ErrorTree b ->
+  Http.Request ->
+  m b
+httpJson opts parser req = inSpan' "HTTP Request (JSON)" $ \span -> do
+  let opts' = opts.withDefault (label @"contentType" "application/json")
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just ct <- contentType,
+              ct == opts'.contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Server returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+    >>= assertM
+      span
+      ( \body ->
+          Json.parseStrict parser body
+            & first (Json.parseErrorTree "could not parse redacted response")
+      )
+
+redactedApiRequestJson ::
+  ( MonadThrow m,
+    MonadLogger m,
+    HasField "action" p ByteString,
+    HasField "actionArgs" p [(ByteString, Maybe ByteString)],
+    MonadOtel m
+  ) =>
+  p ->
+  Json.Parse ErrorTree a ->
+  m a
+redactedApiRequestJson dat parser =
+  do
+    mkRedactedApiRequest dat
+    >>= httpJson defaults parser
diff --git a/users/Profpatsch/whatcd-resolver/src/Transmission.hs b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
new file mode 100644
index 0000000000..66dbeb9ce7
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
@@ -0,0 +1,306 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Transmission where
+
+import AppT
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict qualified as Map
+import Database.PostgreSQL.Simple (Only (..))
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import FieldParser (FieldParser' (..))
+import FieldParser qualified as Field
+import Html qualified
+import Http qualified
+import Json qualified
+import Json.Enc (Enc)
+import Json.Enc qualified as Enc
+import Label
+import MyPrelude
+import Network.HTTP.Types
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import Optional
+import Postgres.MonadPostgres
+import Pretty
+import Text.Blaze.Html (Html)
+import UnliftIO
+import Prelude hiding (span)
+
+-- | A value between (inclusive) 0% and (inclusive) 100%. Precise to 1% steps.
+newtype Percentage = Percentage {unPercentage :: Int}
+  deriving stock (Show)
+
+-- | Parse a scientific into a Percentage
+scientificPercentage :: FieldParser' Error Scientific Percentage
+scientificPercentage =
+  Field.boundedScientificRealFloat @Float
+    >>> ( FieldParser $ \f ->
+            if
+              | f < 0 -> Left "percentage cannot be negative"
+              | f > 1 -> Left "percentage cannot be over 100%"
+              | otherwise -> Right $ Percentage $ ceiling (f * 100)
+        )
+
+-- | Fetch the current status from transmission, and remove the tranmission hash from our database
+-- iff it does not exist in transmission anymore
+getAndUpdateTransmissionTorrentsStatus ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Map (Label "torrentHash" Text) () ->
+  (Transaction m (Map (Label "torrentHash" Text) (Label "percentDone" Percentage)))
+getAndUpdateTransmissionTorrentsStatus knownTorrents = do
+  let fields = ["hashString", "percentDone"]
+  actualTorrents <-
+    lift @Transaction $
+      doTransmissionRequest'
+        ( transmissionRequestListOnlyTorrents
+            ( T2
+                (label @"fields" fields)
+                (label @"ids" (Map.keys knownTorrents))
+            )
+            $ do
+              torrentHash <- Json.keyLabel @"torrentHash" "hashString" Json.asText
+              percentDone <- Json.keyLabel @"percentDone" "percentDone" (Field.toJsonParser $ Field.jsonNumber >>> scientificPercentage)
+              pure (torrentHash, percentDone)
+        )
+        <&> Map.fromList
+  let toDelete = Map.difference knownTorrents actualTorrents
+  execute
+    [fmt|
+    UPDATE redacted.torrents_json
+    SET transmission_torrent_hash = NULL
+    WHERE transmission_torrent_hash = ANY (?::text[])
+  |]
+    $ Only (toDelete & Map.keys <&> (.torrentHash) & PGArray :: PGArray Text)
+  pure actualTorrents
+
+getTransmissionTorrentsTable ::
+  (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
+getTransmissionTorrentsTable = do
+  let fields =
+        [ "hashString",
+          "name",
+          "percentDone",
+          "percentComplete",
+          "downloadDir",
+          "files"
+        ]
+  doTransmissionRequest'
+    ( transmissionRequestListAllTorrents fields $ do
+        Json.asObject <&> KeyMap.toMapText
+    )
+    <&> \resp ->
+      Html.toTable
+        ( resp
+            & List.sortOn (\m -> m & Map.lookup "percentDone" & fromMaybe (Json.Number 0))
+            <&> Map.toList
+            -- TODO
+            & List.take 100
+        )
+
+data TransmissionRequest = TransmissionRequest
+  { method :: Text,
+    arguments :: Map Text Enc,
+    tag :: Maybe Int
+  }
+  deriving stock (Show)
+
+transmissionConnectionConfig :: T3 "host" Text "port" Int "usePlainHttp" Bool
+transmissionConnectionConfig = (T3 (label @"host" "localhost") (label @"port" 9091) (label @"usePlainHttp" True))
+
+transmissionRequestListAllTorrents :: (Monad m) => [Text] -> Json.ParseT e m out -> (TransmissionRequest, Json.ParseT e m [out])
+transmissionRequestListAllTorrents fields parseTorrent =
+  ( TransmissionRequest
+      { method = "torrent-get",
+        arguments =
+          Map.fromList
+            [ ("fields", Enc.list Enc.text fields)
+            ],
+        tag = Nothing
+      },
+    Json.key "torrents" $ Json.eachInArray parseTorrent
+  )
+
+transmissionRequestListOnlyTorrents ::
+  ( HasField "ids" r1 [(Label "torrentHash" Text)],
+    HasField "fields" r1 [Text],
+    Monad m
+  ) =>
+  r1 ->
+  Json.ParseT e m out ->
+  (TransmissionRequest, Json.ParseT e m [out])
+transmissionRequestListOnlyTorrents dat parseTorrent =
+  ( TransmissionRequest
+      { method = "torrent-get",
+        arguments =
+          Map.fromList
+            [ ("ids", Enc.list (\i -> Enc.text i.torrentHash) dat.ids),
+              ("fields", Enc.list Enc.text dat.fields)
+            ],
+        tag = Nothing
+      },
+    Json.key "torrents" $ Json.eachInArray parseTorrent
+  )
+
+transmissionRequestAddTorrent ::
+  (HasField "torrentFile" r ByteString, Monad m) =>
+  r ->
+  ( TransmissionRequest,
+    Json.ParseT err m (T2 "torrentHash" Text "torrentName" Text)
+  )
+transmissionRequestAddTorrent dat =
+  ( TransmissionRequest
+      { method = "torrent-add",
+        arguments =
+          Map.fromList
+            [ ("metainfo", Enc.base64Bytes dat.torrentFile),
+              ("paused", Enc.bool False)
+            ],
+        tag = Nothing
+      },
+    do
+      let p method = Json.key method $ do
+            hash <- Json.keyLabel @"torrentHash" "hashString" Json.asText
+            name <- Json.keyLabel @"torrentName" "name" Json.asText
+            pure $ T2 hash name
+      p "torrent-duplicate" Json.<|> p "torrent-added"
+  )
+
+data TransmissionResponse output = TransmissionResponse
+  { result :: TransmissionResponseStatus,
+    arguments :: Maybe output,
+    tag :: Maybe Int
+  }
+  deriving stock (Show)
+
+data TransmissionResponseStatus
+  = TransmissionResponseSuccess
+  | TransmissionResponseFailure Text
+  deriving stock (Show)
+
+doTransmissionRequest' ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  (TransmissionRequest, Json.Parse Error output) ->
+  m output
+doTransmissionRequest' req = inSpan' "Transmission Request" $ \span -> do
+  resp <-
+    doTransmissionRequest
+      span
+      transmissionConnectionConfig
+      req
+  case resp.result of
+    TransmissionResponseFailure err -> appThrowTree span (nestedError "Transmission RPC error" $ singleError $ newError err)
+    TransmissionResponseSuccess -> case resp.arguments of
+      Nothing -> appThrowTree span "Transmission RPC error: No `arguments` field in response"
+      Just out -> pure out
+
+-- | Contact the transmission RPC, and do the CSRF protection dance.
+--
+-- Spec: https://github.com/transmission/transmission/blob/main/docs/rpc-spec.md
+doTransmissionRequest ::
+  ( MonadTransmission m,
+    HasField "host" t1 Text,
+    HasField "port" t1 Int,
+    HasField "usePlainHttp" t1 Bool,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  Otel.Span ->
+  t1 ->
+  (TransmissionRequest, Json.Parse Error output) ->
+  m (TransmissionResponse output)
+doTransmissionRequest span dat (req, parser) = do
+  sessionId <- getTransmissionId
+  let textArg t = (Enc.text t, Otel.toAttribute @Text t)
+  let encArg enc = (enc, Otel.toAttribute @Text $ enc & Enc.encToTextPretty)
+  let intArg i = (Enc.int i, Otel.toAttribute @Int i)
+
+  let body :: [(Text, (Enc, Otel.Attribute))] =
+        ( [ ("method", req.method & textArg),
+            ("arguments", encArg $ Enc.map id req.arguments)
+          ]
+            <> (req.tag & foldMap (\t -> [("tag", t & intArg)]))
+        )
+  addAttributes
+    span
+    ( HashMap.fromList $
+        body
+          <&> bimap
+            (\k -> [fmt|transmission.{k}|])
+            (\(_, attr) -> attr)
+    )
+  resp <-
+    Http.doRequestJson
+      ( (Http.mkRequestOptions (T2 (label @"method" "POST") (label @"host" dat.host)))
+          { Http.path = mkOptional ["transmission", "rpc"],
+            Http.port = mkOptional dat.port,
+            Http.headers = mkOptional $ (sessionId & ifExists ("X-Transmission-Session-Id",)),
+            Http.usePlainHttp = mkOptional dat.usePlainHttp
+          }
+      )
+      (body <&> second fst & Enc.object)
+  -- Implement the CSRF protection thingy
+  case resp & Http.getResponseStatus & (.statusCode) of
+    409 -> do
+      tid <-
+        resp
+          & Http.getResponseHeader "X-Transmission-Session-Id"
+          & nonEmpty
+          & annotate [fmt|Missing "X-Transmission-Session-Id" header in 409 response: {showPretty resp}|]
+          & unwrapIOError
+          & liftIO
+          <&> NonEmpty.head
+      setTransmissionId tid
+      doTransmissionRequest span dat (req, parser)
+    200 ->
+      resp
+        & Http.getResponseBody
+        & Json.parseStrict
+          ( Json.mapError singleError $ do
+              result <-
+                Json.key "result" Json.asText <&> \case
+                  "success" -> TransmissionResponseSuccess
+                  err -> TransmissionResponseFailure err
+              arguments <-
+                Json.keyMay "arguments" parser
+              tag <-
+                Json.keyMay
+                  "tag"
+                  (Field.toJsonParser (Field.jsonNumber >>> Field.boundedScientificIntegral "tag too long"))
+              pure TransmissionResponse {..}
+          )
+        & first (Json.parseErrorTree "Cannot parse transmission RPC response")
+        & \case
+          Right a -> pure a
+          Left err -> do
+            case Json.eitherDecodeStrict' @Json.Value (resp & Http.getResponseBody) of
+              Left _err -> pure ()
+              Right val -> logInfo [fmt|failing transmission response: {showPrettyJson val}|]
+            appThrowTree span err
+    _ -> liftIO $ unwrapIOError $ Left [fmt|Non-200 response: {showPretty resp}|]
+
+class MonadTransmission m where
+  getTransmissionId :: m (Maybe ByteString)
+  setTransmissionId :: ByteString -> m ()
+
+instance (MonadIO m) => MonadTransmission (AppT m) where
+  getTransmissionId = AppT (asks (.transmissionSessionId)) >>= tryTakeMVar
+  setTransmissionId t = do
+    var <- AppT $ asks (.transmissionSessionId)
+    putMVar var t
diff --git a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
new file mode 100644
index 0000000000..1ec23e1fc7
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
@@ -0,0 +1,714 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module WhatcdResolver where
+
+import AppT
+import Control.Category qualified as Cat
+import Control.Monad.Catch.Pure (runCatch)
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Pool qualified as Pool
+import Data.Text qualified as Text
+import Database.PostgreSQL.Simple qualified as Postgres
+import Database.PostgreSQL.Simple.SqlQQ (sql)
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import Database.Postgres.Temp qualified as TmpPg
+import FieldParser (FieldParser, FieldParser' (..))
+import FieldParser qualified as Field
+import Html qualified
+import IHP.HSX.QQ (hsx)
+import Json qualified
+import Json.Enc (Enc)
+import Json.Enc qualified as Enc
+import JsonLd
+import Label
+import Multipart2 qualified as Multipart
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.HTTP.Types qualified as Http
+import Network.URI (URI)
+import Network.URI qualified
+import Network.Wai (ResponseReceived)
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Attributes qualified as Otel
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import Parse (Parse)
+import Parse qualified
+import Postgres.Decoder qualified as Dec
+import Postgres.MonadPostgres
+import Pretty
+import Redacted
+import System.Directory qualified as Dir
+import System.Directory qualified as Xdg
+import System.Environment qualified as Env
+import System.FilePath ((</>))
+import Text.Blaze.Html (Html)
+import Text.Blaze.Html.Renderer.Utf8 qualified as Html
+import Text.Blaze.Html5 qualified as Html
+import Tool (readTool, readTools)
+import Transmission
+import UnliftIO hiding (Handler)
+import Prelude hiding (span)
+
+main :: IO ()
+main =
+  runAppWith
+    ( do
+        -- todo: trace that to the init functions as well
+        Otel.inSpan "whatcd-resolver main function" Otel.defaultSpanArguments $ do
+          _ <- runTransaction migrate
+          htmlUi
+    )
+    <&> first showToError
+    >>= expectIOError "could not start whatcd-resolver"
+
+htmlUi :: AppT IO ()
+htmlUi = do
+  uniqueRunId <-
+    runTransaction $
+      querySingleRowWith
+        [sql|
+            SELECT gen_random_uuid()::text
+        |]
+        ()
+        (Dec.fromField @Text)
+
+  withRunInIO $ \runInIO -> Warp.run 9093 $ \req respondOrig -> do
+    let catchAppException act =
+          try act >>= \case
+            Right a -> pure a
+            Left (AppException err) -> do
+              runInIO (logError err)
+              respondOrig (Wai.responseLBS Http.status500 [] "")
+
+    catchAppException $ do
+      let mp span parser =
+            Multipart.parseMultipartOrThrow
+              (appThrowTree span)
+              parser
+              req
+
+      let torrentIdMp span =
+            mp
+              span
+              ( do
+                  label @"torrentId" <$> Multipart.field "torrent-id" ((Field.utf8 >>> Field.signedDecimal >>> Field.bounded @Int "int"))
+              )
+      let parseQueryArgs span parser =
+            Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
+              & assertM span id
+
+      let parseQueryArgsNewSpan spanName parser =
+            Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
+              & assertMNewSpan spanName id
+
+      let handlers :: Handlers (AppT IO)
+          handlers respond =
+            Map.fromList
+              [ ("", respond.html (mainHtml uniqueRunId)),
+                ( "snips/redacted/search",
+                  respond.html $
+                    \span -> do
+                      dat <-
+                        mp
+                          span
+                          ( do
+                              label @"searchstr" <$> Multipart.field "redacted-search" Cat.id
+                          )
+                      snipsRedactedSearch dat
+                ),
+                ( "snips/redacted/torrentDataJson",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    Html.mkVal <$> (runTransaction $ getTorrentById dat)
+                ),
+                ( "snips/redacted/getTorrentFile",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    runTransaction $ do
+                      inserted <- redactedGetTorrentFileAndInsert dat
+                      running <-
+                        lift @Transaction $
+                          doTransmissionRequest' (transmissionRequestAddTorrent inserted)
+                      updateTransmissionTorrentHashById
+                        ( T2
+                            (getLabel @"torrentHash" running)
+                            (getLabel @"torrentId" dat)
+                        )
+                      pure $
+                        everySecond
+                          "snips/transmission/getTorrentState"
+                          (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
+                          "Starting"
+                ),
+                -- TODO: this is bad duplication??
+                ( "snips/redacted/startTorrentFile",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    runTransaction $ do
+                      file <-
+                        getTorrentFileById dat
+                          <&> annotate [fmt|No torrent file for torrentId "{dat.torrentId}"|]
+                          >>= orAppThrowTree span
+
+                      running <-
+                        lift @Transaction $
+                          doTransmissionRequest' (transmissionRequestAddTorrent file)
+                      updateTransmissionTorrentHashById
+                        ( T2
+                            (getLabel @"torrentHash" running)
+                            (getLabel @"torrentId" dat)
+                        )
+                      pure $
+                        everySecond
+                          "snips/transmission/getTorrentState"
+                          (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
+                          "Starting"
+                ),
+                ( "snips/transmission/getTorrentState",
+                  respond.html $ \span -> do
+                    dat <- mp span $ label @"torrentHash" <$> Multipart.field "torrent-hash" Field.utf8
+                    status <-
+                      doTransmissionRequest'
+                        ( transmissionRequestListOnlyTorrents
+                            ( T2
+                                (label @"ids" [label @"torrentHash" dat.torrentHash])
+                                (label @"fields" ["hashString"])
+                            )
+                            (Json.keyLabel @"torrentHash" "hashString" Json.asText)
+                        )
+                        <&> List.find (\torrent -> torrent.torrentHash == dat.torrentHash)
+
+                    pure $
+                      case status of
+                        Nothing -> [hsx|ERROR unknown|]
+                        Just _torrent -> [hsx|Running|]
+                ),
+                ( "snips/jsonld/render",
+                  respond.html $ \span -> do
+                    qry <-
+                      parseQueryArgs
+                        span
+                        ( label @"target"
+                            <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
+                                    & Parse.andParse uriToHttpClientRequest
+                                )
+                        )
+                    jsonld <- httpGetJsonLd (qry.target)
+                    pure $ renderJsonld jsonld
+                ),
+                ( "artist",
+                  respond.html $ \span -> do
+                    qry <-
+                      parseQueryArgs
+                        span
+                        ( label @"dbId"
+                            <$> (singleQueryArgument "db_id" Field.utf8)
+                        )
+                    artistPage qry
+                ),
+                ( "autorefresh",
+                  respond.plain $ do
+                    qry <-
+                      parseQueryArgsNewSpan
+                        "Autorefresh Query Parse"
+                        ( label @"hasItBeenRestarted"
+                            <$> singleQueryArgument "hasItBeenRestarted" Field.utf8
+                        )
+                    pure $
+                      Wai.responseLBS
+                        Http.ok200
+                        ( [("Content-Type", "text/html")]
+                            <> if uniqueRunId /= qry.hasItBeenRestarted
+                              then -- cause the client side to refresh
+                                [("HX-Refresh", "true")]
+                              else []
+                        )
+                        ""
+                )
+              ]
+      runInIO $
+        runHandlers
+          (\respond -> respond.html $ (mainHtml uniqueRunId))
+          handlers
+          req
+          respondOrig
+  where
+    everySecond :: Text -> Enc -> Html -> Html
+    everySecond call extraData innerHtml = [hsx|<div hx-trigger="every 1s" hx-swap="outerHTML" hx-post={call} hx-vals={Enc.encToBytesUtf8 extraData}>{innerHtml}</div>|]
+
+    mainHtml :: Text -> Otel.Span -> AppT IO Html
+    mainHtml uniqueRunId _span = runTransaction $ do
+      -- jsonld <-
+      --   httpGetJsonLd
+      --     ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError,
+      --       "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec"
+      --     )
+      --     <&> renderJsonld
+      bestTorrentsTable <- getBestTorrentsTable
+      -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
+      pure $
+        Html.docTypeHtml
+          [hsx|
+      <head>
+        <title>whatcd-resolver</title>
+        <meta charset="utf-8">
+        <meta name="viewport" content="width=device-width, initial-scale=1">
+        <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
+        <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js" integrity="sha384-geWF76RCwLtnZ8qwWowPQNguL3RmwHVBC9FhGdlKrxdiJJigb/j/68SIy3Te4Bkz" crossorigin="anonymous"></script>
+        <script src="https://unpkg.com/htmx.org@1.9.2" integrity="sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h" crossorigin="anonymous"></script>
+        <style>
+          dl {
+            margin: 1em;
+            padding: 0.5em 1em;
+            border: thin solid;
+          }
+        </style>
+      </head>
+      <body>
+        <form
+          hx-post="/snips/redacted/search"
+          hx-target="#redacted-search-results">
+          <label for="redacted-search">Redacted Search</label>
+          <input
+            id="redacted-search"
+            type="text"
+            name="redacted-search" />
+          <button type="submit" hx-disabled-elt="this">Search</button>
+          <div class="htmx-indicator">Search running!</div>
+        </form>
+        <div id="redacted-search-results">
+          {bestTorrentsTable}
+        </div>
+        <!-- refresh the page if the uniqueRunId is different -->
+        <input
+             hidden
+             type="text"
+             id="autorefresh"
+             name="hasItBeenRestarted"
+             value={uniqueRunId}
+             hx-get="/autorefresh"
+             hx-trigger="every 5s"
+             hx-swap="none"
+        />
+      </body>
+    |]
+
+artistPage :: (HasField "dbId" dat Text, Applicative m) => dat -> m Html
+artistPage dat = do
+  pure
+    [hsx|
+    Artist ID: {dat.dbId}
+  |]
+
+type Handlers m = HandlerResponses m -> Map Text (m ResponseReceived)
+
+data HandlerResponses m = HandlerResponses
+  { -- | render html
+    html :: ((Otel.Span -> m Html) -> m ResponseReceived),
+    -- | render a plain wai response
+    plain :: (m Wai.Response -> m ResponseReceived)
+  }
+
+runHandlers ::
+  (MonadOtel m) =>
+  (HandlerResponses m -> m ResponseReceived) ->
+  (HandlerResponses m -> Map Text (m ResponseReceived)) ->
+  Wai.Request ->
+  (Wai.Response -> IO ResponseReceived) ->
+  m ResponseReceived
+runHandlers defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
+  let path = req & Wai.pathInfo & Text.intercalate "/"
+  let handlerResponses =
+        ( HandlerResponses
+            { plain = (\m -> liftIO $ runInIO m >>= respond),
+              html = \act ->
+                Otel.inSpan'
+                  [fmt|Route /{path}|]
+                  ( Otel.defaultSpanArguments
+                      { Otel.attributes =
+                          HashMap.fromList
+                            [ ("server.path", Otel.toAttribute @Text path)
+                            ]
+                      }
+                  )
+                  ( \span -> do
+                      res <- act span <&> (\html -> T2 (label @"html" html) (label @"extraHeaders" []))
+                      liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
+                  )
+            }
+        )
+  let handler =
+        (handlers handlerResponses)
+          & Map.lookup path
+          & fromMaybe (defaultHandler handlerResponses)
+  runInIO handler
+
+singleQueryArgument :: Text -> FieldParser ByteString to -> Parse Http.Query to
+singleQueryArgument field inner =
+  Parse.mkParsePushContext
+    field
+    ( \(ctx, qry) -> case qry
+        & mapMaybe
+          ( \(k, v) ->
+              if k == (field & textToBytesUtf8)
+                then Just v
+                else Nothing
+          ) of
+        [] -> Left [fmt|No such query argument "{field}", at {ctx & Parse.showContext}|]
+        [Nothing] -> Left [fmt|Expected one query argument with a value, but "{field}" was a query flag|]
+        [Just one] -> Right one
+        more -> Left [fmt|More than one value for query argument "{field}": {show more}, at {ctx & Parse.showContext}|]
+    )
+    >>> Parse.fieldParser inner
+
+-- | Make sure we can parse the given Text into an URI.
+textToURI :: Parse Text URI
+textToURI =
+  Parse.fieldParser
+    ( FieldParser $ \text ->
+        text
+          & textToString
+          & Network.URI.parseURI
+          & annotate [fmt|Cannot parse this as a URL: "{text}"|]
+    )
+
+-- | Make sure we can parse the given URI into a Request.
+--
+-- This tries to work around the horrible, horrible interface in Http.Client.
+uriToHttpClientRequest :: Parse URI Http.Request
+uriToHttpClientRequest =
+  Parse.mkParseNoContext
+    ( \url ->
+        (url & Http.requestFromURI)
+          & runCatch
+          & first (checkException @Http.HttpException)
+          & \case
+            Left (Right (Http.InvalidUrlException urlText reason)) ->
+              Left [fmt|Unable to set the url "{urlText}" as request URL, reason: {reason}|]
+            Left (Right exc@(Http.HttpExceptionRequest _ _)) ->
+              Left [fmt|Weird! Should not get a HttpExceptionRequest when parsing an URL (bad library design), was {exc & displayException}|]
+            Left (Left someExc) ->
+              Left [fmt|Weird! Should not get anyhting but a HttpException when parsing an URL (bad library design), was {someExc & displayException}|]
+            Right req -> pure req
+    )
+
+checkException :: (Exception b) => SomeException -> Either SomeException b
+checkException some = case fromException some of
+  Nothing -> Left some
+  Just e -> Right e
+
+snipsRedactedSearch ::
+  ( MonadLogger m,
+    MonadPostgres m,
+    HasField "searchstr" r ByteString,
+    MonadThrow m,
+    MonadTransmission m,
+    MonadOtel m
+  ) =>
+  r ->
+  m Html
+snipsRedactedSearch dat = do
+  t <-
+    redactedSearchAndInsert
+      [ ("searchstr", dat.searchstr),
+        ("releasetype", "album")
+      ]
+  runTransaction $ do
+    t
+    getBestTorrentsTable
+
+getBestTorrentsTable ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Transaction m Html
+getBestTorrentsTable = do
+  bestStale :: [TorrentData ()] <- getBestTorrents (label @"onlyDownloaded" False)
+  actual <-
+    getAndUpdateTransmissionTorrentsStatus
+      ( bestStale
+          & mapMaybe
+            ( \td -> case td.torrentStatus of
+                InTransmission h -> Just h
+                _ -> Nothing
+            )
+          <&> (\t -> (getLabel @"torrentHash" t, t.transmissionInfo))
+          & Map.fromList
+      )
+  let fresh =
+        bestStale
+          --  we have to update the status of every torrent that’s not in tranmission anymore
+          -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
+          <&> ( \td -> case td.torrentStatus of
+                  InTransmission info ->
+                    case actual & Map.lookup (getLabel @"torrentHash" info) of
+                      -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
+                      -- which is an internal factum that is established in getBestTorrents (and might change later)
+                      Nothing -> td {torrentStatus = NotInTransmissionYet}
+                      Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
+                  NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
+                  NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
+              )
+  let localTorrent b = case b.torrentStatus of
+        NoTorrentFileYet -> [hsx|<button hx-post="snips/redacted/getTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Upload Torrent</button>|]
+        InTransmission info -> [hsx|{info.transmissionInfo.percentDone.unPercentage}% done|]
+        NotInTransmissionYet -> [hsx|<button hx-post="snips/redacted/startTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Start Torrent</button>|]
+  let bestRows =
+        fresh
+          & foldMap
+            ( \b -> do
+                let artistLink :: Text = [fmt|/artist?db_id={b.groupId}|]
+                [hsx|
+                  <tr>
+                  <td>{localTorrent b}</td>
+                  <td>{Html.toHtml @Int b.groupId}</td>
+                  <td>
+                    <a href={artistLink}>
+                      {Html.toHtml @Text b.torrentGroupJson.artist}
+                    </a>
+                  </td>
+                  <td>{Html.toHtml @Text b.torrentGroupJson.groupName}</td>
+                  <td>{Html.toHtml @Int b.seedingWeight}</td>
+                  <td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
+                  </tr>
+                |]
+            )
+  pure $
+    [hsx|
+        <table class="table">
+          <thead>
+            <tr>
+              <th>Local</th>
+              <th>Group ID</th>
+              <th>Artist</th>
+              <th>Name</th>
+              <th>Weight</th>
+              <th>Torrent</th>
+              <th>Torrent Group</th>
+            </tr>
+          </thead>
+          <tbody>
+            {bestRows}
+          </tbody>
+        </table>
+      |]
+
+getTransmissionTorrentsTable ::
+  (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
+getTransmissionTorrentsTable = do
+  let fields =
+        [ "hashString",
+          "name",
+          "percentDone",
+          "percentComplete",
+          "downloadDir",
+          "files"
+        ]
+  doTransmissionRequest'
+    ( transmissionRequestListAllTorrents fields $ do
+        Json.asObject <&> KeyMap.toMapText
+    )
+    <&> \resp ->
+      Html.toTable
+        ( resp
+            & List.sortOn (\m -> m & Map.lookup "percentDone" & fromMaybe (Json.Number 0))
+            <&> Map.toList
+            -- TODO
+            & List.take 100
+        )
+
+unzip3PGArray :: [(a1, a2, a3)] -> (PGArray a1, PGArray a2, PGArray a3)
+unzip3PGArray xs = xs & unzip3 & \(a, b, c) -> (PGArray a, PGArray b, PGArray c)
+
+assertOneUpdated ::
+  (HasField "numberOfRowsAffected" r Natural, MonadThrow m, MonadIO m) =>
+  Otel.Span ->
+  Text ->
+  r ->
+  m ()
+assertOneUpdated span name x = case x.numberOfRowsAffected of
+  1 -> pure ()
+  n -> appThrowTree span ([fmt|{name :: Text}: Expected to update exactly one row, but updated {n :: Natural} row(s)|])
+
+migrate ::
+  ( MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Transaction m (Label "numberOfRowsAffected" Natural)
+migrate = inSpan "Database Migration" $ do
+  execute
+    [sql|
+    CREATE SCHEMA IF NOT EXISTS redacted;
+
+    CREATE TABLE IF NOT EXISTS redacted.torrent_groups (
+      id SERIAL PRIMARY KEY,
+      group_id INTEGER,
+      group_name TEXT,
+      full_json_result JSONB,
+      UNIQUE(group_id)
+    );
+
+    CREATE TABLE IF NOT EXISTS redacted.torrents_json (
+      id SERIAL PRIMARY KEY,
+      torrent_id INTEGER,
+      torrent_group SERIAL NOT NULL REFERENCES redacted.torrent_groups(id) ON DELETE CASCADE,
+      full_json_result JSONB,
+      UNIQUE(torrent_id)
+    );
+
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS torrent_file bytea NULL;
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS transmission_torrent_hash text NULL;
+
+    -- inflect out values of the full json
+
+    CREATE OR REPLACE VIEW redacted.torrents AS
+    SELECT
+      t.id,
+      t.torrent_id,
+      t.torrent_group,
+      -- the seeding weight is used to find the best torrent in a group.
+      ( ((full_json_result->'seeders')::integer*3
+        + (full_json_result->'snatches')::integer
+        )
+      -- prefer remasters by multiplying them with 3
+      * (CASE
+          WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
+          THEN 3
+          ELSE 1
+         END)
+      )
+      AS seeding_weight,
+      t.full_json_result,
+      t.torrent_file,
+      t.transmission_torrent_hash
+    FROM redacted.torrents_json t;
+
+    CREATE INDEX IF NOT EXISTS torrents_json_seeding ON redacted.torrents_json(((full_json_result->'seeding')::integer));
+    CREATE INDEX IF NOT EXISTS torrents_json_snatches ON redacted.torrents_json(((full_json_result->'snatches')::integer));
+  |]
+    ()
+
+httpTorrent ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  Otel.Span ->
+  Http.Request ->
+  m ByteString
+httpTorrent span req =
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just "application/x-bittorrent" <- contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Redacted returned a non-torrent body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Redacted returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Redacted returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+
+runAppWith :: AppT IO a -> IO (Either TmpPg.StartError a)
+runAppWith appT = withTracer $ \tracer -> withDb $ \db -> do
+  tool <- readTools (label @"toolsEnvVar" "WHATCD_RESOLVER_TOOLS") (readTool "pg_format")
+  pgFormat <- initPgFormatPool (label @"pgFormat" tool)
+  let config = label @"logDatabaseQueries" LogDatabaseQueries
+  pgConnPool <-
+    Pool.newPool $
+      Pool.defaultPoolConfig
+        {- resource init action -} (Postgres.connectPostgreSQL (db & TmpPg.toConnectionString))
+        {- resource destruction -} Postgres.close
+        {- unusedResourceOpenTime -} 10
+        {- max resources across all stripes -} 20
+  transmissionSessionId <- newEmptyMVar
+  let newAppT = do
+        logInfo [fmt|Running with config: {showPretty config}|]
+        logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
+        appT
+  runReaderT newAppT.unAppT Context {..}
+
+withTracer :: (Otel.Tracer -> IO c) -> IO c
+withTracer f = do
+  setDefaultEnv "OTEL_SERVICE_NAME" "whatcd-resolver"
+  bracket
+    -- Install the SDK, pulling configuration from the environment
+    ( do
+        (processors, opts) <- Otel.getTracerProviderInitializationOptions
+        tp <-
+          Otel.createTracerProvider
+            processors
+            -- workaround the attribute length bug https://github.com/iand675/hs-opentelemetry/issues/113
+            ( opts
+                { Otel.tracerProviderOptionsAttributeLimits =
+                    opts.tracerProviderOptionsAttributeLimits
+                      { Otel.attributeCountLimit = Just 65_000
+                      }
+                }
+            )
+        Otel.setGlobalTracerProvider tp
+        pure tp
+    )
+    -- Ensure that any spans that haven't been exported yet are flushed
+    Otel.shutdownTracerProvider
+    -- Get a tracer so you can create spans
+    (\tracerProvider -> f $ Otel.makeTracer tracerProvider "whatcd-resolver" Otel.tracerOptions)
+
+setDefaultEnv :: String -> String -> IO ()
+setDefaultEnv envName defaultValue = do
+  Env.lookupEnv envName >>= \case
+    Just _env -> pure ()
+    Nothing -> Env.setEnv envName defaultValue
+
+withDb :: (TmpPg.DB -> IO a) -> IO (Either TmpPg.StartError a)
+withDb act = do
+  dataDir <- Xdg.getXdgDirectory Xdg.XdgData "whatcd-resolver"
+  let databaseDir = dataDir </> "database"
+  let socketDir = dataDir </> "database-socket"
+  Dir.createDirectoryIfMissing True socketDir
+  initDbConfig <-
+    Dir.doesDirectoryExist databaseDir >>= \case
+      True -> pure TmpPg.Zlich
+      False -> do
+        putStderrLn [fmt|Database does not exist yet, creating in "{databaseDir}"|]
+        Dir.createDirectoryIfMissing True databaseDir
+        pure TmpPg.DontCare
+  let cfg =
+        mempty
+          { TmpPg.dataDirectory = TmpPg.Permanent (databaseDir),
+            TmpPg.socketDirectory = TmpPg.Permanent socketDir,
+            TmpPg.port = pure $ Just 5431,
+            TmpPg.initDbConfig
+          }
+  TmpPg.withConfig cfg $ \db -> do
+    -- print [fmt|data dir: {db & TmpPg.toDataDirectory}|]
+    -- print [fmt|conn string: {db & TmpPg.toConnectionString}|]
+    act db
diff --git a/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
new file mode 100644
index 0000000000..8b3258bb5f
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
@@ -0,0 +1,125 @@
+cabal-version:      3.0
+name:               whatcd-resolver
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Allow the same record field name to be declared twice per module.
+    -- This works, because we use `OverloadedRecordDot` everywhere (enforced by `NoFieldSelectors`).
+    DuplicateRecordFields
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+    -- allows defining pattern synonyms, but also the `import Foo (pattern FooPattern)` import syntax
+    PatternSynonyms
+
+  default-language: GHC2021
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+       WhatcdResolver
+       AppT
+       JsonLd
+       Optional
+       Http
+       Html
+       Transmission
+       Redacted
+
+    build-depends:
+        base >=4.15 && <5,
+        text,
+        my-prelude,
+        my-webstuff,
+        pa-prelude,
+        pa-error-tree,
+        pa-label,
+        pa-json,
+        pa-field-parser,
+        pa-run-command,
+        aeson-better-errors,
+        aeson,
+        blaze-html,
+        bytestring,
+        case-insensitive,
+        containers,
+        unordered-containers,
+        directory,
+        exceptions,
+        filepath,
+        hs-opentelemetry-sdk,
+        hs-opentelemetry-api,
+        http-conduit,
+        http-types,
+        http-client,
+        ihp-hsx,
+        monad-logger,
+        mtl,
+        network-uri,
+        resource-pool,
+        postgresql-simple,
+        punycode,
+        tmp-postgres,
+        unliftio,
+        wai-extra,
+        wai,
+        warp,
+
+executable whatcd-resolver
+    import: common-options
+
+    main-is: Main.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        whatcd-resolver
+
+    ghc-options:
+      -threaded
+
diff --git a/users/Profpatsch/writers/default.nix b/users/Profpatsch/writers/default.nix
new file mode 100644
index 0000000000..9fb69231a1
--- /dev/null
+++ b/users/Profpatsch/writers/default.nix
@@ -0,0 +1,120 @@
+{ depot, pkgs, lib, ... }:
+let
+  bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-mkdir" "s6-cat" "s6-ln" "s6-ls" "s6-touch" ]
+    // depot.nix.getBins pkgs.coreutils [ "printf" ];
+
+  inherit (depot.nix.yants) defun struct restrict attrs list string drv any;
+
+  inherit (depot.nix) drvSeqL;
+
+  FlakeError =
+    restrict
+      "flake error"
+      (s: lib.any (prefix: (builtins.substring 0 1 s) == prefix)
+        [ "E" "W" ])
+      string;
+  Libraries = defun [ (attrs any) (list drv) ];
+
+  pythonPackages = pkgs.python310Packages;
+  buildPythonPackages = pkgs.buildPackages.python310Packages;
+  python = pythonPackages.python;
+
+  python3 =
+    { name
+    , libraries ? (_: [ ])
+    , flakeIgnore ? [ ]
+    }:
+    let
+    in
+    pkgs.writers.makePythonWriter python pythonPackages buildPythonPackages name {
+      libraries = Libraries libraries pythonPackages;
+      flakeIgnore =
+        let
+          ignoreTheseErrors = [
+            # whitespace after {
+            "E201"
+            # whitespace before }
+            "E202"
+            # fuck 4-space indentation
+            "E121"
+            "E111"
+            # who cares about blank lines …
+            # … at end of files
+            "W391"
+            # … between functions
+            "E302"
+            "E305"
+            # … if there’s too many of them
+            "E303"
+            # or lines that are too long
+            "E501"
+          ];
+        in
+        list FlakeError (ignoreTheseErrors ++ flakeIgnore);
+    };
+
+  # TODO: add the same flake check as the pyhon3 writer
+  python3Lib = { name, libraries ? (_: [ ]) }: moduleString:
+    let
+      srcTree = depot.nix.runExecline.local name { stdin = moduleString; } [
+        "importas"
+        "out"
+        "out"
+        "if"
+        [ bins.s6-mkdir "-p" "\${out}/${name}" ]
+        "if"
+        [
+          "redirfd"
+          "-w"
+          "1"
+          "\${out}/setup.py"
+          bins.printf
+          ''
+            from distutils.core import setup
+
+            setup(
+              name='%s',
+              packages=['%s']
+            )
+          ''
+          name
+          name
+        ]
+        "if"
+        [
+          # redirect stdin to the init py
+          "redirfd"
+          "-w"
+          "1"
+          "\${out}/${name}/__init__.py"
+          bins.s6-cat
+        ]
+      ];
+    in
+    pythonPackages.buildPythonPackage {
+      inherit name;
+      src = srcTree;
+      propagatedBuildInputs = libraries pythonPackages;
+      doCheck = false;
+    };
+
+
+  ghcBins = libraries: depot.nix.getBins (pkgs.ghc.withPackages (_: libraries)) [ "runghc" ];
+
+  writeHaskellInteractive = name: { libraries, ghcArgs ? [ ] }: path:
+    depot.nix.writeExecline name { } ([
+      (ghcBins libraries).runghc
+      "--"
+    ] ++ ghcArgs ++ [
+      "--"
+      path
+    ]);
+
+in
+{
+  inherit
+    python3
+    python3Lib
+    writeHaskellInteractive
+    ;
+}
diff --git a/users/Profpatsch/writers/tests/default.nix b/users/Profpatsch/writers/tests/default.nix
new file mode 100644
index 0000000000..879aae82f7
--- /dev/null
+++ b/users/Profpatsch/writers/tests/default.nix
@@ -0,0 +1,56 @@
+{ depot, pkgs, ... }:
+
+let
+  inherit (depot.users.Profpatsch.writers)
+    python3Lib
+    python3
+    ;
+
+  inherit (pkgs)
+    coreutils
+    ;
+
+  run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
+    "if"
+    [ drv ]
+    "importas"
+    "out"
+    "out"
+    "${coreutils}/bin/touch"
+    "$out"
+  ];
+
+  pythonTransitiveLib = python3Lib
+    {
+      name = "transitive";
+    } ''
+    def transitive(s):
+      return s + " 1 2 3"
+  '';
+
+  pythonTestLib = python3Lib
+    {
+      name = "test_lib";
+      libraries = _: [ pythonTransitiveLib ];
+    } ''
+    import transitive
+    def test():
+      return transitive.transitive("test")
+  '';
+
+  pythonWithLib = run (python3
+    {
+      name = "python-with-lib";
+      libraries = _: [ pythonTestLib ];
+    } ''
+    import test_lib
+
+    assert test_lib.test() == "test 1 2 3"
+  '');
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    pythonWithLib
+    ;
+}
diff --git a/users/Profpatsch/ytextr/README.md b/users/Profpatsch/ytextr/README.md
new file mode 100644
index 0000000000..f1e40d8e68
--- /dev/null
+++ b/users/Profpatsch/ytextr/README.md
@@ -0,0 +1,5 @@
+# ytextr
+
+Wrapper around `yt-dlp` for downloading videos in good default quality with good default settings.
+
+Will always download the most up-to-date `yt-dlp` first, because the software usually stops working after a few weeks and needs to be updated, so just using `<nixpkgs>` often fails.
diff --git a/users/Profpatsch/ytextr/create-symlink-farm.nix b/users/Profpatsch/ytextr/create-symlink-farm.nix
new file mode 100644
index 0000000000..7b3a45b916
--- /dev/null
+++ b/users/Profpatsch/ytextr/create-symlink-farm.nix
@@ -0,0 +1,19 @@
+{
+  # list of package attribute names to get at run time
+  packageNamesAtRuntimeJsonPath
+,
+}:
+let
+  pkgs = import <nixpkgs> { };
+
+  getPkg = pkgName: pkgs.${pkgName};
+
+  packageNamesAtRuntime = builtins.fromJSON (builtins.readFile packageNamesAtRuntimeJsonPath);
+
+  runtime = map getPkg packageNamesAtRuntime;
+
+in
+pkgs.symlinkJoin {
+  name = "symlink-farm";
+  paths = runtime;
+}
diff --git a/users/Profpatsch/ytextr/default.nix b/users/Profpatsch/ytextr/default.nix
new file mode 100644
index 0000000000..3f3f073113
--- /dev/null
+++ b/users/Profpatsch/ytextr/default.nix
@@ -0,0 +1,82 @@
+{ depot, pkgs, lib, ... }:
+
+# ytextr is a wrapper arount yt-dlp (previously youtube-dl)
+# that extracts a single video according to my preferred settings.
+#
+# It will be sandboxed to the current directory, since I don’t particularly
+# trust the massive codebase of that tool (with hundreds of contributors).
+#
+# Since the rules for downloading videos is usually against the wishes
+# of proprietary vendors, and a video is many megabytes anyway,
+# it will be fetched from the most recent nixpkgs unstable channel before running.
+
+let
+  bins = depot.nix.getBins pkgs.nix [ "nix-build" ]
+    // depot.nix.getBins pkgs.bubblewrap [ "bwrap" ];
+
+  # Run a command, with the given packages in scope, and `packageNamesAtRuntime` being fetched at the start in the given nix `channel`.
+  nix-run-with-channel =
+    {
+      # The channel to get `packageNamesAtRuntime` from
+      channel
+    , # executable to run with `packageNamesAtRuntime` in PATH
+      # and the argv
+      executable
+    , # A list of nixpkgs package attribute names that should be put into PATH when running `command`.
+      packageNamesAtRuntime
+    ,
+    }: depot.nix.writeExecline "nix-run-with-channel-${channel}" { } [
+      # TODO: prevent race condition by writing a temporary gc root
+      "backtick"
+      "-iE"
+      "storepath"
+      [
+        bins.nix-build
+        "-I"
+        "nixpkgs=channel:${channel}"
+        "--arg"
+        "packageNamesAtRuntimeJsonPath"
+        (pkgs.writeText "packageNamesAtRuntime.json" (builtins.toJSON packageNamesAtRuntime))
+        ./create-symlink-farm.nix
+      ]
+      "importas"
+      "-ui"
+      "PATH"
+      "PATH"
+      "export"
+      "PATH"
+      "\${storepath}/bin:\${PATH}"
+      executable
+      "$@"
+    ];
+
+in
+nix-run-with-channel {
+  channel = "nixos-unstable";
+  packageNamesAtRuntime = [ "yt-dlp" ];
+  executable = depot.nix.writeExecline "ytextr" { } [
+    "getcwd"
+    "-E"
+    "cwd"
+    bins.bwrap
+    "--ro-bind"
+    "/nix/store"
+    "/nix/store"
+    "--ro-bind"
+    "/etc"
+    "/etc"
+    "--bind"
+    "$cwd"
+    "$cwd"
+    "yt-dlp"
+    "--no-playlist"
+    "--write-sub"
+    "--all-subs"
+    "--embed-subs"
+    "--merge-output-format"
+    "mkv"
+    "-f"
+    "bestvideo[height<=?1080]+bestaudio/best"
+    "$@"
+  ];
+}