about summary refs log tree commit diff
path: root/users/Profpatsch
diff options
context:
space:
mode:
Diffstat (limited to 'users/Profpatsch')
-rw-r--r--users/Profpatsch/.envrc5
-rw-r--r--users/Profpatsch/.gitignore1
-rw-r--r--users/Profpatsch/.hlint.yaml359
-rw-r--r--users/Profpatsch/.vscode/launch.json18
-rw-r--r--users/Profpatsch/.vscode/settings.json25
-rw-r--r--users/Profpatsch/OWNERS8
-rw-r--r--users/Profpatsch/README.md10
-rw-r--r--users/Profpatsch/aerc-no-config-perms.patch12
-rw-r--r--users/Profpatsch/aerc.dhall157
-rw-r--r--users/Profpatsch/aerc.nix51
-rw-r--r--users/Profpatsch/alacritty.dhall48
-rw-r--r--users/Profpatsch/alacritty.nix17
-rw-r--r--users/Profpatsch/aliases.nix13
-rw-r--r--users/Profpatsch/arglib/ArglibNetencode.hs22
-rw-r--r--users/Profpatsch/arglib/arglib-netencode.cabal65
-rw-r--r--users/Profpatsch/arglib/netencode.nix111
-rw-r--r--users/Profpatsch/blog/README.md7
-rw-r--r--users/Profpatsch/blog/default.nix11
-rw-r--r--users/Profpatsch/blog/notes/private-trackers-are-markets.md46
-rw-r--r--users/Profpatsch/cabal.project14
-rw-r--r--users/Profpatsch/cas-serve/CasServe.hs247
-rw-r--r--users/Profpatsch/cas-serve/cas-serve.cabal73
-rw-r--r--users/Profpatsch/cas-serve/default.nix38
-rw-r--r--users/Profpatsch/cas-serve/schema.sql38
-rw-r--r--users/Profpatsch/cas-serve/wordlist.json1
-rw-r--r--users/Profpatsch/cas-serve/wordlist.sqlitebin0 -> 36864 bytes
-rw-r--r--users/Profpatsch/declib/.eslintrc.json14
-rw-r--r--users/Profpatsch/declib/.gitignore6
-rw-r--r--users/Profpatsch/declib/.prettierrc8
-rw-r--r--users/Profpatsch/declib/README.md4
-rw-r--r--users/Profpatsch/declib/build.ninja16
-rw-r--r--users/Profpatsch/declib/index.ts245
-rw-r--r--users/Profpatsch/declib/package.json25
-rw-r--r--users/Profpatsch/declib/tsconfig.json25
-rw-r--r--users/Profpatsch/dhall/lib.dhall84
-rw-r--r--users/Profpatsch/emacs-tree-sitter-move/README.md5
-rw-r--r--users/Profpatsch/execline/ExecHelpers.hs48
-rw-r--r--users/Profpatsch/execline/default.nix45
-rw-r--r--users/Profpatsch/execline/exec-helpers.cabal14
-rw-r--r--users/Profpatsch/execline/exec-helpers/Cargo.lock7
-rw-r--r--users/Profpatsch/execline/exec-helpers/Cargo.toml8
-rw-r--r--users/Profpatsch/execline/exec-helpers/default.nix6
-rw-r--r--users/Profpatsch/execline/exec-helpers/exec_helpers.rs (renamed from users/Profpatsch/execline/exec_helpers.rs)0
-rw-r--r--users/Profpatsch/fafo.jpgbin0 -> 26139 bytes
-rw-r--r--users/Profpatsch/haskell-module-deps/README.md5
-rw-r--r--users/Profpatsch/haskell-module-deps/default.nix55
-rw-r--r--users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.pngbin0 -> 415873 bytes
-rw-r--r--users/Profpatsch/hie.yaml36
-rw-r--r--users/Profpatsch/htmx-experiment/Main.hs4
-rw-r--r--users/Profpatsch/htmx-experiment/default.nix46
-rw-r--r--users/Profpatsch/htmx-experiment/htmx-experiment.cabal89
-rw-r--r--users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs377
-rw-r--r--users/Profpatsch/htmx-experiment/src/ServerErrors.hs244
-rw-r--r--users/Profpatsch/htmx-experiment/src/ValidationParseT.hs40
-rw-r--r--users/Profpatsch/httzip/Httzip.hs66
-rw-r--r--users/Profpatsch/httzip/default.nix40
-rw-r--r--users/Profpatsch/httzip/httzip.cabal73
-rw-r--r--users/Profpatsch/ical-smolify/IcalSmolify.hs124
-rw-r--r--users/Profpatsch/ical-smolify/README.md5
-rw-r--r--users/Profpatsch/ical-smolify/default.nix23
-rw-r--r--users/Profpatsch/ical-smolify/ical-smolify.cabal18
-rw-r--r--users/Profpatsch/ini/default.nix6
-rw-r--r--users/Profpatsch/ini/ini.dhall36
-rw-r--r--users/Profpatsch/jaeger.nix46
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs389
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/default.nix33
-rw-r--r--users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal76
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs173
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/README.md7
-rw-r--r--users/Profpatsch/lorri-wait-for-eval/default.nix20
-rw-r--r--users/Profpatsch/mailbox-org/MailboxOrg.hs523
-rw-r--r--users/Profpatsch/mailbox-org/README.md7
-rw-r--r--users/Profpatsch/mailbox-org/default.nix38
-rw-r--r--users/Profpatsch/mailbox-org/mailbox-org.cabal95
-rw-r--r--users/Profpatsch/mailbox-org/src/AesonQQ.hs24
-rw-r--r--users/Profpatsch/my-prelude/README.md42
-rw-r--r--users/Profpatsch/my-prelude/default.nix52
-rw-r--r--users/Profpatsch/my-prelude/my-prelude.cabal121
-rw-r--r--users/Profpatsch/my-prelude/src/Aeson.hs176
-rw-r--r--users/Profpatsch/my-prelude/src/Arg.hs34
-rw-r--r--users/Profpatsch/my-prelude/src/AtLeast.hs51
-rw-r--r--users/Profpatsch/my-prelude/src/MyPrelude.hs776
-rw-r--r--users/Profpatsch/my-prelude/src/Parse.hs174
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/Decoder.hs94
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs930
-rw-r--r--users/Profpatsch/my-prelude/src/Pretty.hs108
-rw-r--r--users/Profpatsch/my-prelude/src/Seconds.hs55
-rw-r--r--users/Profpatsch/my-prelude/src/Test.hs115
-rw-r--r--users/Profpatsch/my-prelude/src/Tool.hs75
-rw-r--r--users/Profpatsch/my-prelude/src/ValidationParseT.hs16
-rw-r--r--users/Profpatsch/my-webstuff/default.nix27
-rw-r--r--users/Profpatsch/my-webstuff/my-webstuff.cabal72
-rw-r--r--users/Profpatsch/my-webstuff/src/Multipart2.hs220
-rw-r--r--users/Profpatsch/my-xmonad/Xmonad.hs127
-rw-r--r--users/Profpatsch/my-xmonad/default.nix25
-rw-r--r--users/Profpatsch/my-xmonad/my-xmonad.cabal62
-rw-r--r--users/Profpatsch/netencode/Netencode.hs433
-rw-r--r--users/Profpatsch/netencode/Netencode/Parse.hs102
-rw-r--r--users/Profpatsch/netencode/README.md20
-rw-r--r--users/Profpatsch/netencode/default.nix46
-rw-r--r--users/Profpatsch/netencode/netencode.cabal74
-rw-r--r--users/Profpatsch/netencode/netencode.rs112
-rw-r--r--users/Profpatsch/netstring/default.nix9
-rw-r--r--users/Profpatsch/nix-home/README.md7
-rw-r--r--users/Profpatsch/nix-home/default.nix9
-rw-r--r--users/Profpatsch/nix-tools.nix159
-rw-r--r--users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs80
-rw-r--r--users/Profpatsch/nixpkgs-rewriter/default.nix148
-rw-r--r--users/Profpatsch/openlab-tools/Main.hs6
-rw-r--r--users/Profpatsch/openlab-tools/default.nix69
-rw-r--r--users/Profpatsch/openlab-tools/openlab-tools.cabal111
-rw-r--r--users/Profpatsch/openlab-tools/src/OpenlabTools.hs551
-rw-r--r--users/Profpatsch/read-http.rs2
-rw-r--r--users/Profpatsch/reverse-haskell-deps/README.md3
-rw-r--r--users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs (renamed from users/Profpatsch/reverse-haskell-deps.hs)72
-rw-r--r--users/Profpatsch/reverse-haskell-deps/default.nix (renamed from users/Profpatsch/reverse-haskell-deps.nix)5
-rw-r--r--users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal16
-rw-r--r--users/Profpatsch/shell.nix110
-rw-r--r--users/Profpatsch/shortcuttable/default.nix172
-rw-r--r--users/Profpatsch/solarized.dhall39
-rw-r--r--users/Profpatsch/struct-edit/default.nix13
-rw-r--r--users/Profpatsch/struct-edit/main.go431
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md3
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix31
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall139
-rw-r--r--users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py133
-rw-r--r--users/Profpatsch/tagtime/README.md18
-rw-r--r--users/Profpatsch/whatcd-resolver/Main.hs6
-rw-r--r--users/Profpatsch/whatcd-resolver/README.md21
-rw-r--r--users/Profpatsch/whatcd-resolver/build.ninja20
-rw-r--r--users/Profpatsch/whatcd-resolver/default.nix76
-rw-r--r--users/Profpatsch/whatcd-resolver/notes.org48
-rw-r--r--users/Profpatsch/whatcd-resolver/server-notes.org2
-rw-r--r--users/Profpatsch/whatcd-resolver/services/.gitignore3
-rwxr-xr-xusers/Profpatsch/whatcd-resolver/services/jaeger/run3
-rwxr-xr-xusers/Profpatsch/whatcd-resolver/services/reverse-proxy/run2
-rw-r--r--users/Profpatsch/whatcd-resolver/src/AppT.hs150
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Html.hs69
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Http.hs131
-rw-r--r--users/Profpatsch/whatcd-resolver/src/JsonLd.hs137
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Optional.hs18
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Redacted.hs572
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Transmission.hs319
-rw-r--r--users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs800
-rw-r--r--users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal125
-rw-r--r--users/Profpatsch/writers/default.nix31
-rw-r--r--users/Profpatsch/writers/tests/default.nix2
-rw-r--r--users/Profpatsch/ytextr/README.md5
-rw-r--r--users/Profpatsch/ytextr/default.nix4
149 files changed, 12576 insertions, 1118 deletions
diff --git a/users/Profpatsch/.envrc b/users/Profpatsch/.envrc
new file mode 100644
index 0000000000..c91f923756
--- /dev/null
+++ b/users/Profpatsch/.envrc
@@ -0,0 +1,5 @@
+if pass apps/declib/mastodon_access_token >/dev/null; then
+    export DECLIB_MASTODON_ACCESS_TOKEN=$(pass apps/declib/mastodon_access_token)
+fi
+
+eval "$(lorri direnv)"
diff --git a/users/Profpatsch/.gitignore b/users/Profpatsch/.gitignore
new file mode 100644
index 0000000000..c33954f53a
--- /dev/null
+++ b/users/Profpatsch/.gitignore
@@ -0,0 +1 @@
+dist-newstyle/
diff --git a/users/Profpatsch/.hlint.yaml b/users/Profpatsch/.hlint.yaml
new file mode 100644
index 0000000000..12b7c61b70
--- /dev/null
+++ b/users/Profpatsch/.hlint.yaml
@@ -0,0 +1,359 @@
+# HLint configuration file
+# https://github.com/ndmitchell/hlint
+# Run `hlint --default` to see the example configuration file.
+##########################
+
+# WARNING: These need to be synced with the default-extensions field
+# in the cabal file.
+- arguments: [-XGHC2021, -XOverloadedRecordDot]
+
+# Ignore some builtin hints
+
+# often functions are more readable with explicit arguments
+- ignore: { name: Eta reduce }
+
+# these redundancy warnings are just completely irrelevant
+- ignore: { name: Redundant bracket }
+- ignore: { name: Move brackets to avoid $ }
+- ignore: { name: Redundant $ }
+- ignore: { name: Redundant do }
+- ignore: { name: Redundant multi-way if }
+
+# allow case-matching on bool, because why not
+- ignore: { name: Use if }
+
+# hlint cannot distinguish actual newtypes from data types
+# that accidentally have only one field
+# (but might have more in the future).
+# Since it’s a mostly irrelevant runtime optimization, we don’t care.
+- ignore: { name: Use newtype instead of data }
+
+# these lead to harder-to-read/more implicit code
+- ignore: { name: Use fmap }
+- ignore: { name: Use <$> }
+- ignore: { name: Use tuple-section }
+- ignore: { name: Use forM_ }
+- ignore: { name: Functor law }
+- ignore: { name: Use maybe }
+
+# fst and snd are usually a code smell and should be explicit matches, _naming the ignored side.
+- ignore: { name: Use fst }
+- ignore: { name: Use snd }
+- ignore: { name: Use fromMaybe }
+- ignore: { name: Use const }
+- ignore: { name: Replace case with maybe }
+- ignore: { name: Replace case with fromMaybe }
+- ignore: { name: Avoid lambda }
+- ignore: { name: Avoid lambda using `infix` }
+- ignore: { name: Use curry }
+- ignore: { name: Use uncurry }
+- ignore: { name: Use first }
+- ignore: { name: Redundant first }
+- ignore: { name: Use second }
+- ignore: { name: Use bimap }
+# just use `not x`
+- ignore: { name: Use unless }
+- ignore: { name: Redundant <&> }
+
+# list comprehensions are a seldomly used part of the Haskell language
+# and they introduce syntactic overhead that is usually not worth the conciseness
+- ignore: { name: Use list comprehension }
+
+# Seems to be buggy in cases
+- ignore: { name: Use section }
+
+# multiple maps in a row are usually used for clarity,
+# and the compiler will optimize them away, thank you very much.
+- ignore: { name: Use map once }
+- ignore: { name: Fuse foldr/map }
+- ignore: { name: Fuse traverse/map }
+- ignore: { name: Fuse traverse_/map }
+- ignore: { name: Fuse traverse/<$> }
+
+# this is silly, why would I use a special function if I can just (heh) `== Nothing`
+- ignore: { name: Use isNothing }
+
+# The duplication heuristic is not very smart
+# and more annoying than helpful.
+# see https://github.com/ndmitchell/hlint/issues/1009
+- ignore: { name: Reduce duplication }
+
+# Stops the pattern match trick
+- ignore: { name: Use record patterns }
+- ignore: { name: Use null }
+- ignore: { name: Use uncurry }
+
+# we don’t want void, see below
+- ignore: { name: Use void }
+
+- functions:
+    # disallow Enum instance functions, they are partial
+    - name: Prelude.succ
+      within: [Relude.Extra.Enum]
+      message: "Dangerous, will fail for highest element"
+    - name: Prelude.pred
+      within: [Relude.Extra.Enum]
+      message: "Dangerous, will fail for lowest element"
+    - name: Prelude.toEnum
+      within: []
+      message: "Extremely partial"
+    - name: Prelude.fromEnum
+      within: []
+      message: "Dangerous for most uses"
+    - name: Prelude.enumFrom
+      within: []
+    - name: Prelude.enumFromThen
+      within: []
+    - name: Prelude.enumFromThenTo
+      within: []
+    - name: Prelude.oundedEnumFrom
+      within: []
+    - name: Prelude.boundedEnumFromThen
+      within: []
+
+    - name: Text.Read.readMaybe
+      within:
+        # The BSON ObjectId depends on Read for parsing
+        - Milkmap.Milkmap
+        - Milkmap.FieldData.Value
+      message: "`readMaybe` is probably not what you want for parsing values, please use the `FieldParser` module."
+
+    # `void` discards its argument and is polymorphic,
+    # thus making it brittle in the face of code changes.
+    # (see https://tech.freckle.com/2020/09/23/void-is-a-smell/)
+    # Use an explicit `_ <- …` instead.
+    - name: Data.Functor.void
+      within: []
+      message: "`void` leads to bugs. Use an explicit `_ <- …` instead"
+
+    - name: Data.Foldable.length
+      within: ["MyPrelude"]
+      message: "`Data.Foldable.length` is dangerous to use, because it also works on types you wouldn’t expect, like `length (3,4) == 1` and `length (Just 2) == 1`. Use the `length` function for your specific type instead, for example `List.length` or `Map.length`."
+
+    - name: Prelude.length
+      within: ["MyPrelude"]
+      message: "`Prelude.length` is dangerous to use, because it also works on types you wouldn’t expect, like `length (3,4) == 1` and `length (Just 2) == 1`. Use the `length` function for your specific type instead, for example `List.length` or `Map.length`."
+
+    # Using an explicit lambda with its argument “underscored”
+    # is more clear in every case.
+    # e.g. `const True` => `\_request -> True`
+    # shows the reader that the ignored argument was a request.
+    - name: Prelude.const
+      within: []
+      message: "Replace `const` with an explicit lambda with type annotation for code clarity and type safety, e.g.: `const True` => `\\(_ :: Request) -> True`. If you really don’t want to spell out the type (which might lead to bugs!), you can also use something like `\_request -> True`."
+
+    - name: Data.List.nub
+      within: []
+      message: "O(n²), use `Data.Containers.ListUtils.nubOrd"
+
+    - name: Prelude.maximum
+      within: []
+      message: "`maximum` crashes on empty list; use non-empty lists and `maximum1`"
+
+    - name: Data.List.maximum
+      within: []
+      message: "`maximum` crashes on empty list; use non-empty lists and `maximum1`"
+
+    - name: Prelude.minimum
+      within: []
+      message: "`minimum` crashes on empty list; use non-empty lists and `minimum1`"
+
+    - name: Data.List.minimum
+      within: []
+      message: "`minimum` crashes on empty list; use non-empty lists and `minimum1`"
+
+    - name: Data.Foldable.maximum
+      within: []
+      message: "`maximum` crashes on empty foldable stucture; use Foldable1 and `maximum1`."
+
+    - name: Data.Foldable.minimum
+      within: []
+      message: "`minimum` crashes on empty foldable stucture; use Foldable1 and `minimum1`."
+
+    # Using prelude functions instead of stdlib functions
+
+    - name: "Data.Text.Encoding.encodeUtf8"
+      within: ["MyPrelude"]
+      message: "Use `textToBytesUtf8`"
+
+    - name: "Data.Text.Lazy.Encoding.encodeUtf8"
+      within: ["MyPrelude"]
+      message: "Use `textToBytesUtf8Lazy`"
+
+    - name: "Data.Text.Encoding.decodeUtf8'"
+      within: ["MyPrelude"]
+      message: "Use `bytesToTextUtf8`"
+
+    - name: "Data.Text.Encoding.Lazy.decodeUtf8'"
+      within: ["MyPrelude"]
+      message: "Use `bytesToTextUtf8Lazy`"
+
+    - name: "Data.Text.Encoding.decodeUtf8"
+      within: ["MyPrelude"]
+      message: "Either check for errors with `bytesToTextUtf8`, decode leniently with unicode replacement characters with `bytesToTextUtf8Lenient` or use the crashing version `bytesToTextUtf8Unsafe` (discouraged)."
+
+    - name: "Data.Text.Encoding.Lazy.decodeUtf8"
+      within: ["MyPrelude"]
+      message: "Either check for errors with `bytesToTextUtf8Lazy`, decode leniently with unicode replacement characters with `bytesToTextUtf8LenientLazy` or use the crashing version `bytesToTextUtf8UnsafeLazy` (discouraged)."
+
+    - name: "Data.Text.Lazy.toStrict"
+      within: ["MyPrelude"]
+      message: "Use `toStrict`"
+
+    - name: "Data.Text.Lazy.fromStrict"
+      within: ["MyPrelude"]
+      message: "Use `toLazy`"
+
+    - name: "Data.ByteString.Lazy.toStrict"
+      within: ["MyPrelude"]
+      message: "Use `toStrictBytes`"
+
+    - name: "Data.ByteString.Lazy.fromStrict"
+      within: ["MyPrelude"]
+      message: "Use `toLazyBytes`"
+
+    - name: "Data.Text.unpack"
+      within: ["MyPrelude"]
+      message: "Use `textToString`"
+
+    - name: "Data.Text.pack"
+      within: ["MyPrelude"]
+      message: "Use `stringToText`"
+
+    - name: "Data.Maybe.listToMaybe"
+      within: []
+      message: |
+        `listToMaybe`` throws away everything but the first element of a list (it is essentially `safeHead`).
+        If that is what you want, please use a pattern match like
+
+        ```
+        case xs of
+          [] -> …
+          (x:_) -> …
+        ```
+
+    - name: "Data.List.head"
+      within: []
+      message: |
+        `List.head` fails on an empty list. I didn’t think I have to say this, but please use a pattern match on the list, like:
+
+        ```
+        case xs of
+          [] -> … error handling …
+          (x:_) -> …
+        ```
+
+        Also think about why the rest of the list should be ignored.
+
+    - name: "Prelude.head"
+      within: []
+      message: |
+        `List.head` fails on an empty list. I didn’t think I have to say this, but please use a pattern match on the list, like.
+
+        ```
+        case xs of
+          [] -> … error handling …
+          (x:_) -> …
+        ```
+
+        Also think about why the rest of the list should be ignored.
+
+    - name: "Data.Maybe.fromJust"
+      within: []
+      message: |
+        `Maybe.fromJust` is obviously partial. Please use a pattern match.
+
+        In case you actually want to throw an error on an empty list,
+        please add an error message, like so:
+
+        ```
+        myMaybe & annotate "my error message" & unwrapError
+        ```
+
+        If you are in `IO`, use `unwrapIOError` instead,
+        or throw a monad-specific error.
+
+    - name: "Data.Either.fromLeft"
+      within: []
+      message: |
+        `Either.fromLeft` is obviously partial. Please use a pattern match.
+
+    - name: "Data.Either.fromRight"
+      within: []
+      message: |
+        `Either.fromRight` is obviously partial. Please use a pattern match.
+
+# Make restricted functions into an error if found
+- error: { name: "Avoid restricted function, see comment in .hlint.yaml" }
+
+# Some functions that have (more modern) aliases.
+# They are not dangerous per se,
+# but we want to make it easier to read our code so we should
+# make sure we don’t use too many things that are renames.
+
+- hint:
+    lhs: "undefined"
+    rhs: "todo"
+    note: "`undefined` is a silent error, `todo` will display a warning as long as it exists in the code."
+
+- hint:
+    lhs: "return"
+    rhs: "pure"
+    note: "Use `pure` from `Applicative` instead, it’s the exact same function."
+
+- hint:
+    lhs: "mapM"
+    rhs: "traverse"
+    note: "Use `traverse` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "mapM_"
+    rhs: "traverse_"
+    note: "Use `traverse_` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "forM"
+    rhs: "for"
+    note: "Use `for` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "forM_"
+    rhs: "for_"
+    note: "Use `for_` from `Traversable` instead. It’s the exact same function."
+
+- hint:
+    lhs: "stringToText (show x)"
+    rhs: "showToText x"
+
+- hint:
+    lhs: "Data.Set.toList (Data.Set.fromList x)"
+    rhs: "List.nubOrd x"
+    note: "`nubOrd` removes duplicate elements from a list."
+
+- modules:
+    # Disallowed Modules
+    - name: Data.Map
+      within: []
+      message: "Lazy maps leak space, use `import Data.Map.Strict as Map` instead"
+    - name: Control.Monad.Writer
+      within: []
+      message: "Lazy writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+    - name: Control.Monad.Trans.Writer.Lazy
+      within: []
+      message: "Lazy writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+    - name: Control.Monad.Trans.Writer.Strict
+      within: []
+      message: "Even strict writers leak space, use `Control.Monad.Trans.Writer.CPS` instead"
+
+    # Qualified module imports
+    - { name: Data.Map.Strict, as: Map }
+    - { name: Data.HashMap.Strict, as: HashMap }
+    - { name: Data.Set, as: Set }
+    - { name: Data.ByteString.Char8, as: Char8 }
+    - { name: Data.ByteString.Lazy.Char8, as: Char8.Lazy }
+    - { name: Data.Text, as: Text }
+    - { name: Data.Vector, as: Vector }
+    - { name: Data.Vault.Lazy, as: Vault }
+    - { name: Data.Aeson, as: Json }
+    - { name: Data.Aeson.Types, as: Json }
+    - { name: Data.Aeson.BetterErrors as Json }
diff --git a/users/Profpatsch/.vscode/launch.json b/users/Profpatsch/.vscode/launch.json
new file mode 100644
index 0000000000..baa087d437
--- /dev/null
+++ b/users/Profpatsch/.vscode/launch.json
@@ -0,0 +1,18 @@
+{
+    // Use IntelliSense to learn about possible attributes.
+    // Hover to view descriptions of existing attributes.
+    // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+    "version": "0.2.0",
+    "configurations": [
+        {
+            "name": "run declib",
+            "type": "node",
+            "cwd": "${workspaceFolder}/declib",
+            "request": "launch",
+            "runtimeExecutable": "ninja",
+            "runtimeArgs": [
+                "run",
+            ],
+        }
+    ]
+}
diff --git a/users/Profpatsch/.vscode/settings.json b/users/Profpatsch/.vscode/settings.json
new file mode 100644
index 0000000000..7984076c16
--- /dev/null
+++ b/users/Profpatsch/.vscode/settings.json
@@ -0,0 +1,25 @@
+{
+    "sqltools.connections": [
+        {
+            "previewLimit": 50,
+            "driver": "SQLite",
+            "name": "cas-serve",
+            "database": "${workspaceFolder:Profpatsch}/cas-serve/data.sqlite"
+        }
+    ],
+    "sqltools.useNodeRuntime": true,
+    "editor.formatOnSave": true,
+    "[typescript]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "[javascript]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "[json]": {
+        "editor.defaultFormatter": "esbenp.prettier-vscode"
+    },
+    "purescript.codegenTargets": [
+        "corefn"
+    ],
+    "purescript.foreignExt": "nix"
+}
diff --git a/users/Profpatsch/OWNERS b/users/Profpatsch/OWNERS
index 5a73d4c3a1..ac23e72256 100644
--- a/users/Profpatsch/OWNERS
+++ b/users/Profpatsch/OWNERS
@@ -1,4 +1,4 @@
-inherited: false
-owners:
-  - Profpatsch
-  - sterni
+set noparent
+
+Profpatsch
+sterni
diff --git a/users/Profpatsch/README.md b/users/Profpatsch/README.md
new file mode 100644
index 0000000000..5bb74cd758
--- /dev/null
+++ b/users/Profpatsch/README.md
@@ -0,0 +1,10 @@
+# Profpatsch’s assemblage of peculiarities and curiosities
+
+Welcome, Welcome.
+
+Welcome to my user dir, where we optimize f*** around, in order to optimize finding out.
+
+![fafo graph](./fafo.jpg)
+
+DISCLAIMER: All of this code is of the “do not try at work” sort, unless noted otherwise.
+You might try at home, however. Get inspired or get grossed out, whichever you like.
diff --git a/users/Profpatsch/aerc-no-config-perms.patch b/users/Profpatsch/aerc-no-config-perms.patch
deleted file mode 100644
index 86b41cd74b..0000000000
--- a/users/Profpatsch/aerc-no-config-perms.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/config/config.go b/config/config.go
-index 0472daf..5eed379 100644
---- a/config/config.go
-+++ b/config/config.go
-@@ -779,6 +779,7 @@ func (config *AercConfig) LoadBinds(binds *ini.File, baseName string, baseGroup
- // checkConfigPerms checks for too open permissions
- // printing the fix on stdout and returning an error
- func checkConfigPerms(filename string) error {
-+        return nil;
- 	info, err := os.Stat(filename)
- 	if err != nil {
- 		return nil // disregard absent files
diff --git a/users/Profpatsch/aerc.dhall b/users/Profpatsch/aerc.dhall
deleted file mode 100644
index fb63f7044b..0000000000
--- a/users/Profpatsch/aerc.dhall
+++ /dev/null
@@ -1,157 +0,0 @@
-let NameVal = λ(T : Type) → { name : Text, value : T }
-
-in  λ ( imports
-      : { -- Take an aerc filter from the aerc distribution /share directory
-          aercFilter : Text → Text
-        , -- given a dsl of functions to create an Ini, render the ini file
-          toIni :
-            { globalSection : List (NameVal Text)
-            , sections : List (NameVal (List (NameVal Text)))
-            } →
-              Text
-        }
-      ) →
-      let List/map
-          : ∀(a : Type) → ∀(b : Type) → (a → b) → List a → List b
-          = λ(a : Type) →
-            λ(b : Type) →
-            λ(f : a → b) →
-            λ(xs : List a) →
-              List/build
-                b
-                ( λ(list : Type) →
-                  λ(cons : b → list → list) →
-                    List/fold a xs list (λ(x : a) → cons (f x))
-                )
-
-      in  { accounts =
-              imports.toIni
-                { globalSection = [] : List (NameVal Text)
-                , sections =
-                  [ { name = "mail"
-                    , value =
-                      [ { name = "archive", value = "Archive" }
-                      , { name = "copy-to", value = "Sent" }
-                      , { name = "default", value = "INBOX" }
-                      , { name = "from"
-                        , value = "Profpatsch <mail@profpatsch.de>"
-                        }
-                      , { name = "source", value = "maildir://~/.Mail/mail" }
-                      , { name = "postpone", value = "Drafts" }
-                      ]
-                    }
-                  ]
-                }
-          , aerc =
-              imports.toIni
-                { globalSection = [] : List (NameVal Text)
-                , sections =
-                  [ { name = "filters"
-                    , value =
-                      [ { name = "text/html"
-                        , value = imports.aercFilter "html"
-                        }
-                      , let _ = "-- TODO: this awk should be taken from nix!"
-
-                        in  { name = "text/*"
-                            , value = "awk -f ${imports.aercFilter "plaintext"}"
-                            }
-                      ]
-                    }
-                  ]
-                }
-          , binds =
-              let
-                  -- keybinding and command to run
-                  Key =
-                    { ctrl : Bool, key : Text, cmd : Text }
-
-              in  let
-                      -- render a key to config format
-                      renderKey =
-                        λ(k : Key) →
-                          if    k.ctrl
-                          then  { name = "<C-${k.key}>", value = k.cmd }
-                          else  { name = k.key, value = k.cmd }
-
-                  let
-
-                      -- render a list of keys to config format
-                      renderKeys =
-                        λ(keys : List Key) →
-                          List/map Key (NameVal Text) renderKey keys
-
-                  let
-                      -- create a section whith a name and a list of keys
-                      sect =
-                        λ(section : Text) →
-                        λ(keys : List Key) →
-                          { name = section, value = renderKeys keys }
-
-                  let
-
-                      -- set key without modifiers
-                      key =
-                        λ(key : Text) → { key }
-
-                  let
-                      -- set special key without modifiers
-                      special =
-                        λ(key : Text) → { key = "<${key}>" }
-
-                  let
-                      -- no modifier
-                      none =
-                        { ctrl = False }
-
-                  let
-                      -- set control key
-                      ctrl =
-                        { ctrl = True }
-
-                  let
-                      -- set a command to execute
-                      cmd =
-                        λ(cmd : Text) → { cmd = ":${cmd}<Enter>" }
-
-                  let
-                      -- set a command, but stay on the prompt
-                      prompt =
-                        λ(cmd : Text) → { cmd = ":${cmd}<Space>" }
-
-                  let config =
-                        { globalSection =
-                            renderKeys
-                              [ ctrl ∧ key "p" ∧ cmd "prev-tab"
-                              , ctrl ∧ key "n" ∧ cmd "next-tab"
-                              , ctrl ∧ key "t" ∧ cmd "term"
-                              ]
-                        , sections =
-                          [ sect
-                              "messages"
-                              [ ctrl ∧ key "q" ∧ cmd "quit"
-                              , none ∧ special "Up" ∧ cmd "prev"
-                              , none ∧ special "Down" ∧ cmd "next"
-                              , none ∧ special "PgUp" ∧ cmd "prev 100%"
-                              , none ∧ special "PgDn" ∧ cmd "next 100%"
-                              , none ∧ key "g" ∧ cmd "select 0"
-                              , none ∧ key "G" ∧ cmd "select -1"
-                              , ctrl ∧ key "Up" ∧ cmd "prev-folder"
-                              , ctrl ∧ key "Down" ∧ cmd "next-folder"
-                              , none ∧ key "v" ∧ cmd "mark -t"
-                              , none ∧ key "V" ∧ cmd "mark -v"
-                              , none ∧ special "Enter" ∧ cmd "view"
-                              , none ∧ key "c" ∧ cmd "compose"
-                              , none ∧ key "|" ∧ prompt "pipe"
-                              , none ∧ key "t" ∧ prompt "term"
-                              , none ∧ key "/" ∧ prompt "search"
-                              , none ∧ key "n" ∧ cmd "next-result"
-                              , none ∧ key "N" ∧ cmd "prev-result"
-                              , none ∧ special "Esc" ∧ cmd "clear"
-                              ]
-                          , sect "view" [ none ∧ key "q" ∧ cmd "close" ]
-                          ]
-                        }
-
-                  in  imports.toIni config
-          }
diff --git a/users/Profpatsch/aerc.nix b/users/Profpatsch/aerc.nix
deleted file mode 100644
index 569f045a00..0000000000
--- a/users/Profpatsch/aerc.nix
+++ /dev/null
@@ -1,51 +0,0 @@
-{ depot, pkgs, lib, ... }:
-
-let
-  aerc-patched = pkgs.aerc.overrideAttrs (old: {
-    patches = old.patches or [ ] ++ [
-      ./aerc-no-config-perms.patch
-    ];
-  });
-
-  bins = depot.nix.getBins aerc-patched [ "aerc" ];
-
-  config =
-    depot.users.Profpatsch.importDhall.importDhall
-      {
-        root = ./.;
-        files = [
-          "aerc.dhall"
-        ];
-        main = "aerc.dhall";
-        deps = [ ];
-      }
-      {
-        aercFilter = name: "${aerc-patched}/share/aerc/filters/${name}";
-        toIni = depot.users.Profpatsch.toINI { };
-      };
-
-  aerc-config = pkgs.linkFarm "alacritty-config" [
-    {
-      name = "aerc/accounts.conf";
-      path = pkgs.writeText "accounts.conf" config.accounts;
-    }
-    {
-      name = "aerc/aerc.conf";
-      path = pkgs.writeText "aerc.conf" config.aerc;
-    }
-    {
-      name = "aerc/binds.conf";
-      path = pkgs.writeText "binds.conf" config.binds;
-    }
-  ];
-
-  aerc = depot.nix.writeExecline "aerc" { } [
-    "export"
-    "XDG_CONFIG_HOME"
-    aerc-config
-    bins.aerc
-    "$@"
-  ];
-
-in
-aerc
diff --git a/users/Profpatsch/alacritty.dhall b/users/Profpatsch/alacritty.dhall
deleted file mode 100644
index b4d99c8294..0000000000
--- a/users/Profpatsch/alacritty.dhall
+++ /dev/null
@@ -1,48 +0,0 @@
-let sol = (./solarized.dhall).hex
-
-let black = "#000000"
-
-let white = "#ffffff"
-
-let
-    -- todo: this looks not too good
-    solarized-dark =
-      { --Colors (Solarized Dark)
-        colors =
-        { -- Default colors
-          primary =
-          { background = black, foreground = white }
-        , -- Cursor colors
-          cursor =
-          { text = sol.base03, cursor = sol.base0 }
-        , -- Normal colors
-          normal =
-          { black = sol.base02
-          , red = sol.red
-          , green = sol.green
-          , yellow = sol.yellow
-          , blue = sol.blue
-          , magenta = sol.magenta
-          , cyan = sol.cyan
-          , white = sol.base2
-          }
-        , -- Bright colors
-          bright =
-          { black = sol.base03
-          , red = sol.orange
-          , green = sol.base01
-          , yellow = sol.base00
-          , blue = sol.base0
-          , magenta = sol.violet
-          , cyan = sol.base1
-          , white = sol.base3
-          }
-        }
-      }
-
-in  { alacritty-config = { font.size = 18, scolling.history = 100000 }
-    ,   -- This disables the dpi-sensitive scaling (cause otherwise the font will be humongous on my laptop screen)
-        alacritty-env
-      . WINIT_X11_SCALE_FACTOR
-      = 1
-    }
diff --git a/users/Profpatsch/alacritty.nix b/users/Profpatsch/alacritty.nix
index d2cb8de2fc..d3461c4aad 100644
--- a/users/Profpatsch/alacritty.nix
+++ b/users/Profpatsch/alacritty.nix
@@ -4,21 +4,14 @@ let
   bins = depot.nix.getBins pkgs.alacritty [ "alacritty" ];
 
   config =
-    depot.users.Profpatsch.importDhall.importDhall {
-      root = ./.;
-      files = [
-        "alacritty.dhall"
-        "solarized.dhall"
-      ];
-      main = "alacritty.dhall";
-      deps = [ ];
+    {
+      alacritty-config = { font.size = 18; scrolling.history = 100000; };
+      #  This disables the dpi-sensitive scaling (cause otherwise the font will be humongous on my laptop screen)
+      alacritty-env.WINIT_X11_SCALE_FACTOR = 1;
     };
 
-  config-file = lib.pipe config.alacritty-config [
-    (lib.generators.toYAML { })
-    (pkgs.writeText "alacritty.conf")
-  ];
 
+  config-file = (pkgs.formats.toml { }).generate "alacritty.conf" config.alacritty-config;
 
   alacritty = depot.nix.writeExecline "alacritty" { } (
     (lib.concatLists (lib.mapAttrsToList (k: v: [ "export" k (toString v) ]) config.alacritty-env))
diff --git a/users/Profpatsch/aliases.nix b/users/Profpatsch/aliases.nix
index 6a1c2c1a63..109de8ce33 100644
--- a/users/Profpatsch/aliases.nix
+++ b/users/Profpatsch/aliases.nix
@@ -72,4 +72,17 @@ depot.nix.readTree.drvTargets {
     "*\${2}*"
     "$@"
   ];
+
+  bell = depot.nix.writeExecline "bell" { } [
+    "if"
+    [
+      "pactl"
+      "upload-sample"
+      "${pkgs.sound-theme-freedesktop}/share/sounds/freedesktop/stereo/complete.oga"
+      "bell-window-system"
+    ]
+    "pactl"
+    "play-sample"
+    "bell-window-system"
+  ];
 }
diff --git a/users/Profpatsch/arglib/ArglibNetencode.hs b/users/Profpatsch/arglib/ArglibNetencode.hs
new file mode 100644
index 0000000000..4531151ca2
--- /dev/null
+++ b/users/Profpatsch/arglib/ArglibNetencode.hs
@@ -0,0 +1,22 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module ArglibNetencode where
+
+import Data.Attoparsec.ByteString qualified as Atto
+import ExecHelpers
+import Label
+import Netencode qualified
+import PossehlAnalyticsPrelude
+import System.Posix.Env.ByteString qualified as ByteEnv
+
+arglibNetencode :: CurrentProgramName -> Maybe (Label "arglibEnvvar" Text) -> IO Netencode.T
+arglibNetencode progName mEnvvar = do
+  let envvar = mEnvvar <&> (.arglibEnvvar) & fromMaybe "ARGLIB_NETENCODE" & textToBytesUtf8
+  ByteEnv.getEnv envvar >>= \case
+    Nothing -> dieUserError progName [fmt|could not read args, envvar {envvar} not set|]
+    Just bytes ->
+      case Atto.parseOnly (Netencode.netencodeParser <* Atto.endOfInput) bytes of
+        Left err -> dieEnvironmentProblem progName [fmt|arglib parsing error: {err}|]
+        Right t -> do
+          ByteEnv.unsetEnv envvar
+          pure t
diff --git a/users/Profpatsch/arglib/arglib-netencode.cabal b/users/Profpatsch/arglib/arglib-netencode.cabal
new file mode 100644
index 0000000000..42b524f405
--- /dev/null
+++ b/users/Profpatsch/arglib/arglib-netencode.cabal
@@ -0,0 +1,65 @@
+cabal-version:      3.0
+name:               arglib-netencode
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    exposed-modules:          ArglibNetencode
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        netencode,
+        exec-helpers,
+        attoparsec,
+        unix
diff --git a/users/Profpatsch/arglib/netencode.nix b/users/Profpatsch/arglib/netencode.nix
index 3f1d121e51..83a94ddd6c 100644
--- a/users/Profpatsch/arglib/netencode.nix
+++ b/users/Profpatsch/arglib/netencode.nix
@@ -1,42 +1,81 @@
 { depot, pkgs, lib, ... }:
 
 let
-  netencode = {
-    rust = depot.nix.writers.rustSimpleLib
-      {
-        name = "arglib-netencode";
-        dependencies = [
-          depot.users.Profpatsch.execline.exec-helpers
-          depot.users.Profpatsch.netencode.netencode-rs
-        ];
-      } ''
-      extern crate netencode;
-      extern crate exec_helpers;
-
-      use netencode::{T};
-      use std::os::unix::ffi::OsStrExt;
-
-      pub fn arglib_netencode(prog_name: &str, env: Option<&std::ffi::OsStr>) -> T {
-          let env = match env {
-              None => std::ffi::OsStr::from_bytes("ARGLIB_NETENCODE".as_bytes()),
-              Some(a) => a
-          };
-          let t = match std::env::var_os(env) {
-              None => exec_helpers::die_user_error(prog_name, format!("could not read args, envvar {} not set", env.to_string_lossy())),
-              // TODO: good error handling for the different parser errors
-              Some(soup) => match netencode::parse::t_t(soup.as_bytes()) {
-                  Ok((remainder, t)) => match remainder.is_empty() {
-                      true => t,
-                      false => exec_helpers::die_environment_problem(prog_name, format!("arglib: there was some unparsed bytes remaining: {:?}", remainder))
-                  },
-                  Err(err) => exec_helpers::die_environment_problem(prog_name, format!("arglib parsing error: {:?}", err))
-              }
-          };
-          std::env::remove_var(env);
-          t
-      }
-    '';
+
+  # Add the given nix arguments to the program as ARGLIB_NETENCODE envvar
+  #
+  # Calls `netencode.gen.dwim` on the provided nix args value.
+  with-args = name: args: prog: depot.nix.writeExecline "${name}-with-args" { } [
+    "export"
+    "ARGLIB_NETENCODE"
+    (depot.users.Profpatsch.netencode.gen.dwim args)
+    prog
+  ];
+
+  rust = depot.nix.writers.rustSimpleLib
+    {
+      name = "arglib-netencode";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.users.Profpatsch.netencode.netencode-rs
+      ];
+    } ''
+    extern crate netencode;
+    extern crate exec_helpers;
+
+    use netencode::{T};
+    use std::os::unix::ffi::OsStrExt;
+
+    pub fn arglib_netencode(prog_name: &str, env: Option<&std::ffi::OsStr>) -> T {
+        let env = match env {
+            None => std::ffi::OsStr::from_bytes("ARGLIB_NETENCODE".as_bytes()),
+            Some(a) => a
+        };
+        let t = match std::env::var_os(env) {
+            None => exec_helpers::die_user_error(prog_name, format!("could not read args, envvar {} not set", env.to_string_lossy())),
+            // TODO: good error handling for the different parser errors
+            Some(soup) => match netencode::parse::t_t(soup.as_bytes()) {
+                Ok((remainder, t)) => match remainder.is_empty() {
+                    true => t,
+                    false => exec_helpers::die_environment_problem(prog_name, format!("arglib: there was some unparsed bytes remaining: {:?}", remainder))
+                },
+                Err(err) => exec_helpers::die_environment_problem(prog_name, format!("arglib parsing error: {:?}", err))
+            }
+        };
+        std::env::remove_var(env);
+        t
+    }
+  '';
+
+  haskell = pkgs.haskellPackages.mkDerivation {
+    pname = "arglib-netencode";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./arglib-netencode.cabal
+      ./ArglibNetencode.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      depot.users.Profpatsch.netencode.netencode-hs
+      depot.users.Profpatsch.execline.exec-helpers-hs
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+
+
   };
 
+
 in
-depot.nix.readTree.drvTargets netencode
+depot.nix.readTree.drvTargets {
+  inherit
+    with-args
+    rust
+    haskell
+    ;
+}
diff --git a/users/Profpatsch/blog/README.md b/users/Profpatsch/blog/README.md
new file mode 100644
index 0000000000..0753ebdea5
--- /dev/null
+++ b/users/Profpatsch/blog/README.md
@@ -0,0 +1,7 @@
+# (Parts of) my website
+
+This is a part of https://profpatsch.de/, notably the blog posts.
+
+The other parts can be found in [vuizvui](https://github.com/openlab-aux/vuizvui/tree/master/pkgs/profpatsch/profpatsch.de). It’s a mess.
+
+And yes, this implements a webserver & routing engine with nix, execline & s6 utils. “Bis einer weint”, as we say in German.
diff --git a/users/Profpatsch/blog/default.nix b/users/Profpatsch/blog/default.nix
index 9848d83c56..f233eda9bb 100644
--- a/users/Profpatsch/blog/default.nix
+++ b/users/Profpatsch/blog/default.nix
@@ -26,6 +26,15 @@ let
   # /notes/*
   notes = [
     {
+      route = [ "notes" "private-trackers-are-markets" ];
+      name = "Private bittorrent trackers are markets";
+      page = { cssFile }: markdownToHtml {
+        name = "private-trackers-are-markets";
+        markdown = ./notes/private-trackers-are-markets.md;
+        inherit cssFile;
+      };
+    }
+    {
       route = [ "notes" "an-idealized-conflang" ];
       name = "An Idealized Configuration Language";
       page = { cssFile }: markdownToHtml {
@@ -141,7 +150,7 @@ let
     }:
       assert
       (lib.assertMsg
-        (builtins.pathExists (depot.path.origSrc + "/${relativePath}"))
+        (builtins.pathExists (depot.path.origSrc + ("/" + relativePath)))
         "depotCgitLink: path /${relativePath} does not exist in depot, and depot.path was ${toString depot.path}");
       "https://code.tvl.fyi/tree/${relativePath}";
 
diff --git a/users/Profpatsch/blog/notes/private-trackers-are-markets.md b/users/Profpatsch/blog/notes/private-trackers-are-markets.md
new file mode 100644
index 0000000000..88fe5f07e5
--- /dev/null
+++ b/users/Profpatsch/blog/notes/private-trackers-are-markets.md
@@ -0,0 +1,46 @@
+# Private bittorrent trackers are markets
+
+Private bittorrent trackers have a currency called ratio,
+which is the bits you upload divided the bits you download.
+
+You have to keep the ratio above a certain lower limit,
+otherwise you get banned from the market or have to cut a deal with the moderators → bancruptcy
+
+New liquidity (?) is introduced to the market by so-called “freeleech” events or tokens,
+which essentially allow you to exchange a token (or some time in the case of time-restricted freeleech)
+for some data, which can then be seeded to generate future profits without spending ratio.
+
+Sometimes, ratio is pulled from the market by allowing to exchange it into website perks,
+like forum titles or other benefits like chat-memberships. This has a deflationary effect.
+It could be compared to “vanity items” in MMOs, which don’t grant a mechanical advantage in the market.
+Is there a real-world equivalent? i.e. allowing rich people to exchange some of their worth
+for vanity items instead of investing it for future gain?
+
+Sometimes, ratio can be traded for more than just transferred bits,
+for example by requesting a torrent for a certain album or movie,
+paying some ratio for the fulfillment of the request.
+
+---
+
+Based on how bittorrent works, usually multiple people “seed” a torrent.
+This means multiple people can answer a request for trading ratio.
+Part of the request (i.e. the first 30% of a movie)
+can be fulfilled by one party, part of it by a second or even more parties.
+
+For small requests (e.g. albums), often the time between announcing the trade
+and filling the trade is important for who is able to fill it.
+Getting a 1 second head-start vastly increases your chance of a handshake
+and starting the transmission, so on average you get a vastly higher ratio gain from that torrent.
+Meaning that using a bittorrent client which is fast to answer as a seeder will lead to better outcomes.
+This could be compared to mechanisms seen in high-speed trading.
+
+---
+
+Of course these market-mechanisms are in service of a wider policy goal,
+which is to ensure the constant availability of as much high-quality data as possible.
+There is more mechanisms at play on these trackers that all contribute to this goal
+(possible keywords to research: trumping, freeleech for underseeded torrents).
+
+In general, it is important to remember that markets are only a tool,
+never an end in themselves, as neoliberalists would like us to believe.
+They always are in service of a wider goal or policy. We live in a society.
diff --git a/users/Profpatsch/cabal.project b/users/Profpatsch/cabal.project
new file mode 100644
index 0000000000..26b6186969
--- /dev/null
+++ b/users/Profpatsch/cabal.project
@@ -0,0 +1,14 @@
+packages:
+  ./my-prelude/my-prelude.cabal
+  ./my-webstuff/my-webstuff.cabal
+  ./netencode/netencode.cabal
+  ./arglib/arglib-netencode.cabal
+  ./execline/exec-helpers.cabal
+  ./htmx-experiment/htmx-experiment.cabal
+  ./mailbox-org/mailbox-org.cabal
+  ./cas-serve/cas-serve.cabal
+  ./jbovlaste-sqlite/jbovlaste-sqlite.cabal
+  ./whatcd-resolver/whatcd-resolver.cabal
+  ./openlab-tools/openlab-tools.cabal
+  ./httzip/httzip.cabal
+  ./my-xmonad/my-xmonad.cabal
diff --git a/users/Profpatsch/cas-serve/CasServe.hs b/users/Profpatsch/cas-serve/CasServe.hs
new file mode 100644
index 0000000000..62636fe9c1
--- /dev/null
+++ b/users/Profpatsch/cas-serve/CasServe.hs
@@ -0,0 +1,247 @@
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Main where
+
+import ArglibNetencode (arglibNetencode)
+import Control.Applicative
+import Control.Monad.Reader
+import Crypto.Hash qualified as Crypto
+import Data.ByteArray qualified as ByteArray
+import Data.ByteString.Lazy qualified as ByteString.Lazy
+import Data.ByteString.Lazy qualified as Lazy
+import Data.Functor.Compose
+import Data.Int (Int64)
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Data.Text.IO qualified as Text
+import Database.SQLite.Simple (NamedParam ((:=)))
+import Database.SQLite.Simple qualified as Sqlite
+import Database.SQLite.Simple.FromField qualified as Sqlite
+import Database.SQLite.Simple.QQ qualified as Sqlite
+import Label
+import Netencode.Parse qualified as Net
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import System.IO (stderr)
+
+parseArglib = do
+  let env = label @"arglibEnvvar" "CAS_SERVE_ARGS"
+  let asApi =
+        Net.asRecord >>> do
+          address <- label @"bindToAddress" <$> (Net.key "bindToAddress" >>> Net.asText)
+          port <- label @"port" <$> (Net.key "port" >>> Net.asText)
+          pure (T2 address port)
+  arglibNetencode "cas-serve" (Just env)
+    <&> Net.runParse
+      [fmt|Cannot parse arguments in "{env.arglibEnvvar}"|]
+      ( Net.asRecord >>> do
+          publicApi <- label @"publicApi" <$> (Net.key "publicApi" >>> asApi)
+          privateApi <- label @"privateApi" <$> (Net.key "privateApi" >>> asApi)
+          pure $ T2 publicApi privateApi
+      )
+
+main :: IO ()
+main = do
+  withEnv $ \env ->
+    Warp.runSettings
+      (Warp.defaultSettings & Warp.setPort 7070)
+      (api env)
+
+withEnv :: (Env -> IO a) -> IO a
+withEnv inner = do
+  withSqlite "./data.sqlite" $ \envData -> do
+    withSqlite "./wordlist.sqlite" $ \envWordlist -> inner Env {..}
+
+withSqlite :: String -> (Sqlite.Connection -> IO a) -> IO a
+withSqlite fileName inner = Sqlite.withConnection fileName $ \conn -> do
+  Sqlite.setTrace conn (Just (\msg -> Text.hPutStrLn stderr [fmt|{fileName}: {msg}|]))
+  Sqlite.execute conn [Sqlite.sql|PRAGMA foreign_keys = ON|] ()
+  inner conn
+
+api :: Env -> Wai.Application
+api env req respond = do
+  case runHandler (getById <|> insertById) req env of
+    Nothing -> respond $ Wai.responseLBS Http.status404 [] "endpoint does not exist."
+    Just handler' -> do
+      handler' >>= \case
+        Left (status, err) -> respond $ Wai.responseLBS status [] (err & toLazyBytes)
+        Right (headers, body) ->
+          respond $
+            Wai.responseLBS
+              Http.status200
+              headers
+              (body & toLazyBytes)
+
+data Env = Env
+  { envWordlist :: Sqlite.Connection,
+    envData :: Sqlite.Connection
+  }
+
+-- | I don’t need any fancy routing in this, so a handler is just something that returns a @Just (IO a)@ if it wants to handle the request.
+newtype Handler a
+  = Handler (ReaderT (Wai.Request, Env) (Compose Maybe IO) a)
+  deriving newtype (Functor, Applicative, Alternative)
+
+handler :: ((Wai.Request, Env) -> Maybe (IO a)) -> Handler a
+handler f = Handler (ReaderT (Compose . f))
+
+runHandler :: Handler a -> Wai.Request -> Env -> Maybe (IO a)
+runHandler (Handler handler') req env = getCompose $ handler' & (\readerT -> runReaderT readerT (req, env))
+
+getById ::
+  Handler
+    ( Either
+        (Http.Status, ByteString)
+        ([(Http.HeaderName, ByteString)], ByteString)
+    )
+getById = handler $ \(req, env) -> do
+  guard ((req & Wai.requestMethod) == Http.methodGet)
+  case req & Wai.pathInfo of
+    ["v0", "by-id", filename] -> Just $ do
+      Sqlite.queryNamed
+        @( T3
+             "mimetype"
+             Text
+             "content"
+             ByteString
+             "size"
+             Int
+         )
+        (env.envData)
+        [Sqlite.sql|
+        SELECT
+          mimetype,
+          cast (content AS blob) as content,
+          size
+        FROM file_content
+        JOIN file_references
+          ON file_references.file_content = file_content.hash_sha256
+        WHERE
+          file_references.reference_type = 'by-id'
+          AND (file_references.name || file_references.extension) = :filename
+       |]
+        [":filename" Sqlite.:= filename]
+        <&> \case
+          [] -> Left (Http.status404, "File not found.")
+          [res] ->
+            Right
+              ( [ ("Content-Type", res.mimetype & textToBytesUtf8),
+                  ("Content-Length", res.size & showToText & textToBytesUtf8)
+                ],
+                -- TODO: should this be lazy/streamed?
+                res.content
+              )
+          _more -> Left "file_references must be unique (in type and name)" & unwrapError
+    _ -> Nothing
+
+insertById :: Handler (Either a ([(Http.HeaderName, ByteString)], ByteString))
+insertById = handler $ \(req, env) -> do
+  guard ((req & Wai.requestMethod) == Http.methodPost)
+  case req & Wai.pathInfo of
+    ["v0", "by-id"] -> Just $ do
+      let maybeText bytes = case bytesToTextUtf8 bytes of
+            Left _err -> Nothing
+            Right t -> Just t
+      let mimeType =
+            ( (req & Wai.requestHeaders & List.lookup "X-Cas-Serve-Mimetype" >>= maybeText)
+                <|> (req & Wai.requestHeaders & List.lookup "Content-Type" >>= maybeText)
+            )
+              & fromMaybe "application/octet-stream"
+
+      let magicFileEnding mimeType' = case Text.split (== '/') mimeType' of
+            [_, ""] -> Nothing
+            ["", _] -> Nothing
+            [_, "any"] -> Nothing
+            ["image", ty] -> Just (Text.cons '.' ty)
+            ["video", ty] -> Just (Text.cons '.' ty)
+            ["text", "plain"] -> Just ".txt"
+            ["text", "html"] -> Just ".html"
+            ["application", "pdf"] -> Just ".pdf"
+            ["application", "json"] -> Just ".json"
+            _ -> Nothing
+
+      let extension =
+            ( (req & Wai.requestHeaders & List.lookup "X-Cas-Serve-FileExtension" >>= maybeText)
+                <|> ( (req & Wai.requestHeaders & List.lookup "Content-Type")
+                        >>= maybeText
+                        >>= magicFileEnding
+                    )
+            )
+              -- Just the empty extension if we can’t figure it out.
+              & fromMaybe ""
+
+      body <- Wai.consumeRequestBodyStrict req
+      let hash :: Crypto.Digest Crypto.SHA256 = Crypto.hashlazy body
+      let hashBytes = hash & ByteArray.convert @(Crypto.Digest Crypto.SHA256) @ByteString
+      let len = ByteString.Lazy.length body
+      name <- getNameFromWordlist env
+      let fullname = name <> extension
+
+      let conn = env.envData
+      Sqlite.withTransaction conn $ do
+        Sqlite.executeNamed
+          conn
+          [Sqlite.sql|
+            INSERT INTO file_content
+              (content, hash_sha256, size)
+              VALUES
+              (:content, :hash_sha256, :size)
+              ON CONFLICT (hash_sha256) DO NOTHING
+          |]
+          [ ":content" := (body :: Lazy.ByteString),
+            ":hash_sha256" := (hashBytes :: ByteString),
+            ":size" := (len :: Int64)
+          ]
+
+        -- TODO: we are not checking if the name already exists,
+        -- we just assume that 1633^3 is enough to not get any collisions for now.
+        -- If the name exists, the user gets a 500.
+        Sqlite.executeNamed
+          conn
+          [Sqlite.sql|
+            INSERT INTO file_references
+              (file_content, reference_type, name, extension, mimetype)
+            VALUES
+              (:file_content, :reference_type, :name, :extension, :mimetype)
+          |]
+          [ ":file_content" := (hashBytes :: ByteString),
+            ":reference_type" := ("by-id" :: Text),
+            ":name" := name,
+            ":extension" := (extension :: Text),
+            ":mimetype" := (mimeType :: Text)
+          ]
+      pure $
+        Right
+          ( [("Content-Type", "text/plain")],
+            [fmt|/v0/by-id/{fullname}|]
+          )
+    _ -> Nothing
+
+-- Get a random name from a wordlist, that is three words connected by @-@.
+getNameFromWordlist :: Env -> IO Text
+getNameFromWordlist env =
+  do
+    let numberOfWords = 3 :: Int
+    Sqlite.queryNamed @(Sqlite.Only Text)
+      (env.envWordlist)
+      [Sqlite.sql|SELECT word FROM wordlist ORDER BY RANDOM() LIMIT :words|]
+      [":words" Sqlite.:= numberOfWords]
+    <&> map Sqlite.fromOnly
+    <&> Text.intercalate "-"
+
+-- | We can use a Rec with a named list of types to parse a returning row of sqlite!!
+instance
+  ( Sqlite.FromField t1,
+    Sqlite.FromField t2,
+    Sqlite.FromField t3
+  ) =>
+  Sqlite.FromRow (T3 l1 t1 l2 t2 l3 t3)
+  where
+  fromRow = do
+    T3
+      <$> (label @l1 <$> Sqlite.field)
+      <*> (label @l2 <$> Sqlite.field)
+      <*> (label @l3 <$> Sqlite.field)
diff --git a/users/Profpatsch/cas-serve/cas-serve.cabal b/users/Profpatsch/cas-serve/cas-serve.cabal
new file mode 100644
index 0000000000..d14776700a
--- /dev/null
+++ b/users/Profpatsch/cas-serve/cas-serve.cabal
@@ -0,0 +1,73 @@
+cabal-version:      3.0
+name:               cas-serve
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable cas-serve
+    import: common-options
+
+    main-is:          CasServe.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        arglib-netencode,
+        netencode,
+        text,
+        sqlite-simple,
+        http-types,
+        wai,
+        warp,
+        mtl,
+        bytestring,
+        memory,
+        crypton,
diff --git a/users/Profpatsch/cas-serve/default.nix b/users/Profpatsch/cas-serve/default.nix
new file mode 100644
index 0000000000..14c3e4aa13
--- /dev/null
+++ b/users/Profpatsch/cas-serve/default.nix
@@ -0,0 +1,38 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.sqlite [ "sqlite3" ];
+
+  cas-serve = pkgs.haskellPackages.mkDerivation {
+    pname = "cas-serve";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./cas-serve.cabal
+      ./CasServe.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.crypton
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.sqlite-simple
+      depot.users.Profpatsch.arglib.netencode.haskell
+      depot.users.Profpatsch.netencode.netencode-hs
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  create-cas-database = depot.nix.writeExecline "create-cas-database" { readNArgs = 1; } [
+    bins.sqlite3
+    "$1"
+    "-init"
+    ./schema.sql
+  ];
+in
+cas-serve
diff --git a/users/Profpatsch/cas-serve/schema.sql b/users/Profpatsch/cas-serve/schema.sql
new file mode 100644
index 0000000000..b61a7a1ad5
--- /dev/null
+++ b/users/Profpatsch/cas-serve/schema.sql
@@ -0,0 +1,38 @@
+-- SQLite
+.dump
+
+PRAGMA foreign_keys = ON;
+
+BEGIN transaction;
+
+create table if not exists file_content (
+  content blob NOT NULL,
+  hash_sha256 blob PRIMARY KEY,
+  size integer NOT NULL
+) WITHOUT ROWID;
+
+
+create table if not exists file_references (
+  rowid integer PRIMARY KEY,
+  file_content NOT NULL REFERENCES file_content ON DELETE CASCADE,
+  reference_type text NOT NULL,
+  name text NOT NULL,
+  extension text NOT NULL,
+  mimetype text NOT NULL
+);
+
+create unique index if not exists file_references_type_name_unique on file_references (reference_type, name);
+
+-- insert into file_content values ('mycontent', 'myhash', 9);
+-- insert into file_references values (NULL, 'myhash', 'by-id', 'myschranz', '.txt', 'text/plain');
+-- insert into file_content values (readfile('/home/philip/Pictures/screenshot.png'), 'anotherhash', 999);
+-- insert into file_references values (NULL, 'anotherhash', 'by-id', 'img', '.png', 'image/png');
+
+select * from file_content;
+
+select * from file_references;
+
+COMMIT;
+
+-- drop table file_content;
+-- drop table file_references;
diff --git a/users/Profpatsch/cas-serve/wordlist.json b/users/Profpatsch/cas-serve/wordlist.json
new file mode 100644
index 0000000000..cc4bc62ad1
--- /dev/null
+++ b/users/Profpatsch/cas-serve/wordlist.json
@@ -0,0 +1 @@
+ [ "acrobat", "africa", "alaska", "albert", "albino", "album", "alcohol", "alex", "alpha", "amadeus", "amanda", "amazon", "america", "analog", "animal", "antenna", "antonio", "apollo", "april", "aroma", "artist", "aspirin", "athlete", "atlas", "banana", "bandit", "banjo", "bikini", "bingo", "bonus", "camera", "canada", "carbon", "casino", "catalog", "cinema", "citizen", "cobra", "comet", "compact", "complex", "context", "credit", "critic", "crystal", "culture", "david", "delta", "dialog", "diploma", "doctor", "domino", "dragon", "drama", "extra", "fabric", "final", "focus", "forum", "galaxy", "gallery", "global", "harmony", "hotel", "humor", "index", "japan", "kilo", "lemon", "liter", "lotus", "mango", "melon", "menu", "meter", "metro", "mineral", "model", "music", "object", "piano", "pirate", "plastic", "radio", "report", "signal", "sport", "studio", "subject", "super", "tango", "taxi", "tempo", "tennis", "textile", "tokyo", "total", "tourist", "video", "visa", "academy", "alfred", "atlanta", "atomic", "barbara", "bazaar", "brother", "budget", "cabaret", "cadet", "candle", "capsule", "caviar", "channel", "chapter", "circle", "cobalt", "comrade", "condor", "crimson", "cyclone", "darwin", "declare", "denver", "desert", "divide", "dolby", "domain", "double", "eagle", "echo", "eclipse", "editor", "educate", "edward", "effect", "electra", "emerald", "emotion", "empire", "eternal", "evening", "exhibit", "expand", "explore", "extreme", "ferrari", "forget", "freedom", "friday", "fuji", "galileo", "genesis", "gravity", "habitat", "hamlet", "harlem", "helium", "holiday", "hunter", "ibiza", "iceberg", "imagine", "infant", "isotope", "jackson", "jamaica", "jasmine", "java", "jessica", "kitchen", "lazarus", "letter", "license", "lithium", "loyal", "lucky", "magenta", "manual", "marble", "maxwell", "mayor", "monarch", "monday", "money", "morning", "mother", "mystery", "native", "nectar", "nelson", "network", "nikita", "nobel", "nobody", "nominal", "norway", "nothing", "number", "october", "office", "oliver", "opinion", "option", "order", "outside", "package", "pandora", "panther", "papa", "pattern", "pedro", "pencil", "people", "phantom", "philips", "pioneer", "pluto", "podium", "portal", "potato", "process", "proxy", "pupil", "python", "quality", "quarter", "quiet", "rabbit", "radical", "radius", "rainbow", "ramirez", "ravioli", "raymond", "respect", "respond", "result", "resume", "richard", "river", "roger", "roman", "rondo", "sabrina", "salary", "salsa", "sample", "samuel", "saturn", "savage", "scarlet", "scorpio", "sector", "serpent", "shampoo", "sharon", "silence", "simple", "society", "sonar", "sonata", "soprano", "sparta", "spider", "sponsor", "abraham", "action", "active", "actor", "adam", "address", "admiral", "adrian", "agenda", "agent", "airline", "airport", "alabama", "aladdin", "alarm", "algebra", "alibi", "alice", "alien", "almond", "alpine", "amber", "amigo", "ammonia", "analyze", "anatomy", "angel", "annual", "answer", "apple", "archive", "arctic", "arena", "arizona", "armada", "arnold", "arsenal", "arthur", "asia", "aspect", "athena", "audio", "august", "austria", "avenue", "average", "axiom", "aztec", "bagel", "baker", "balance", "ballad", "ballet", "bambino", "bamboo", "baron", "basic", "basket", "battery", "belgium", "benefit", "berlin", "bermuda", "bernard", "bicycle", "binary", "biology", "bishop", "blitz", "block", "blonde", "bonjour", "boris", "boston", "bottle", "boxer", "brandy", "bravo", "brazil", "bridge", "british", "bronze", "brown", "bruce", "bruno", "brush", "burger", "burma", "cabinet", "cactus", "cafe", "cairo", "calypso", "camel", "campus", "canal", "cannon", "canoe", "cantina", "canvas", "canyon", "capital", "caramel", "caravan", "career", "cargo", "carlo", "carol", "carpet", "cartel", "cartoon", "castle", "castro", "cecilia", "cement", "center", "century", "ceramic", "chamber", "chance", "change", "chaos", "charlie", "charm", "charter", "cheese", "chef", "chemist", "cherry", "chess", "chicago", "chicken", "chief", "china", "cigar", "circus", "city", "clara", "classic", "claudia", "clean", "client", "climax", "clinic", "clock", "club", "cockpit", "coconut", "cola", "collect", "colombo", "colony", "color", "combat", "comedy", "command", "company", "concert", "connect", "consul", "contact", "contour", "control", "convert", "copy", "corner", "corona", "correct", "cosmos", "couple", "courage", "cowboy", "craft", "crash", "cricket", "crown", "cuba", "dallas", "dance", "daniel", "decade", "decimal", "degree", "delete", "deliver", "delphi", "deluxe", "demand", "demo", "denmark", "derby", "design", "detect", "develop", "diagram", "diamond", "diana", "diego", "diesel", "diet", "digital", "dilemma", "direct", "disco", "disney", "distant", "dollar", "dolphin", "donald", "drink", "driver", "dublin", "duet", "dynamic", "earth", "east", "ecology", "economy", "edgar", "egypt", "elastic", "elegant", "element", "elite", "elvis", "email", "empty", "energy", "engine", "english", "episode", "equator", "escape", "escort", "ethnic", "europe", "everest", "evident", "exact", "example", "exit", "exotic", "export", "express", "factor", "falcon", "family", "fantasy", "fashion", "fiber", "fiction", "fidel", "fiesta", "figure", "film", "filter", "finance", "finish", "finland", "first", "flag", "flash", "florida", "flower", "fluid", "flute", "folio", "ford", "forest", "formal", "formula", "fortune", "forward", "fragile", "france", "frank", "fresh", "friend", "frozen", "future", "gabriel", "gamma", "garage", "garcia", "garden", "garlic", "gemini", "general", "genetic", "genius", "germany", "gloria", "gold", "golf", "gondola", "gong", "good", "gordon", "gorilla", "grand", "granite", "graph", "green", "group", "guide", "guitar", "guru", "hand", "happy", "harbor", "harvard", "havana", "hawaii", "helena", "hello", "henry", "hilton", "history", "horizon", "house", "human", "icon", "idea", "igloo", "igor", "image", "impact", "import", "india", "indigo", "input", "insect", "instant", "iris", "italian", "jacket", "jacob", "jaguar", "janet", "jargon", "jazz", "jeep", "john", "joker", "jordan", "judo", "jumbo", "june", "jungle", "junior", "jupiter", "karate", "karma", "kayak", "kermit", "king", "koala", "korea", "labor", "lady", "lagoon", "laptop", "laser", "latin", "lava", "lecture", "left", "legal", "level", "lexicon", "liberal", "libra", "lily", "limbo", "limit", "linda", "linear", "lion", "liquid", "little", "llama", "lobby", "lobster", "local", "logic", "logo", "lola", "london", "lucas", "lunar", "machine", "macro", "madam", "madonna", "madrid", "maestro", "magic", "magnet", "magnum", "mailbox", "major", "mama", "mambo", "manager", "manila", "marco", "marina", "market", "mars", "martin", "marvin", "mary", "master", "matrix", "maximum", "media", "medical", "mega", "melody", "memo", "mental", "mentor", "mercury", "message", "metal", "meteor", "method", "mexico", "miami", "micro", "milk", "million", "minimum", "minus", "minute", "miracle", "mirage", "miranda", "mister", "mixer", "mobile", "modem", "modern", "modular", "moment", "monaco", "monica", "monitor", "mono", "monster", "montana", "morgan", "motel", "motif", "motor", "mozart", "multi", "museum", "mustang", "natural", "neon", "nepal", "neptune", "nerve", "neutral", "nevada", "news", "next", "ninja", "nirvana", "normal", "nova", "novel", "nuclear", "numeric", "nylon", "oasis", "observe", "ocean", "octopus", "olivia", "olympic", "omega", "opera", "optic", "optimal", "orange", "orbit", "organic", "orient", "origin", "orlando", "oscar", "oxford", "oxygen", "ozone", "pablo", "pacific", "pagoda", "palace", "pamela", "panama", "pancake", "panda", "panel", "panic", "paradox", "pardon", "paris", "parker", "parking", "parody", "partner", "passage", "passive", "pasta", "pastel", "patent", "patient", "patriot", "patrol", "pegasus", "pelican", "penguin", "pepper", "percent", "perfect", "perfume", "period", "permit", "person", "peru", "phone", "photo", "picasso", "picnic", "picture", "pigment", "pilgrim", "pilot", "pixel", "pizza", "planet", "plasma", "plaza", "pocket", "poem", "poetic", "poker", "polaris", "police", "politic", "polo", "polygon", "pony", "popcorn", "popular", "postage", "precise", "prefix", "premium", "present", "price", "prince", "printer", "prism", "private", "prize", "product", "profile", "program", "project", "protect", "proton", "public", "pulse", "puma", "pump", "pyramid", "queen", "radar", "ralph", "random", "rapid", "rebel", "record", "recycle", "reflex", "reform", "regard", "regular", "relax", "reptile", "reverse", "ricardo", "right", "ringo", "risk", "ritual", "robert", "robot", "rocket", "rodeo", "romeo", "royal", "russian", "safari", "salad", "salami", "salmon", "salon", "salute", "samba", "sandra", "santana", "sardine", "school", "scoop", "scratch", "screen", "script", "scroll", "second", "secret", "section", "segment", "select", "seminar", "senator", "senior", "sensor", "serial", "service", "shadow", "sharp", "sheriff", "shock", "short", "shrink", "sierra", "silicon", "silk", "silver", "similar", "simon", "single", "siren", "slang", "slogan", "smart", "smoke", "snake", "social", "soda", "solar", "solid", "solo", "sonic", "source", "soviet", "special", "speed", "sphere", "spiral", "spirit", "spring", "static", "status", "stereo", "stone", "stop", "street", "strong", "student", "style", "sultan", "susan", "sushi", "suzuki", "switch", "symbol", "system", "tactic", "tahiti", "talent", "tarzan", "telex", "texas", "theory", "thermos", "tiger", "titanic", "tomato", "topic", "tornado", "toronto", "torpedo", "totem", "tractor", "traffic", "transit", "trapeze", "travel", "tribal", "trick", "trident", "trilogy", "tripod", "tropic", "trumpet", "tulip", "tuna", "turbo", "twist", "ultra", "uniform", "union", "uranium", "vacuum", "valid", "vampire", "vanilla", "vatican", "velvet", "ventura", "venus", "vertigo", "veteran", "victor", "vienna", "viking", "village", "vincent", "violet", "violin", "virtual", "virus", "vision", "visitor", "visual", "vitamin", "viva", "vocal", "vodka", "volcano", "voltage", "volume", "voyage", "water", "weekend", "welcome", "western", "window", "winter", "wizard", "wolf", "world", "xray", "yankee", "yoga", "yogurt", "yoyo", "zebra", "zero", "zigzag", "zipper", "zodiac", "zoom", "acid", "adios", "agatha", "alamo", "alert", "almanac", "aloha", "andrea", "anita", "arcade", "aurora", "avalon", "baby", "baggage", "balloon", "bank", "basil", "begin", "biscuit", "blue", "bombay", "botanic", "brain", "brenda", "brigade", "cable", "calibre", "carmen", "cello", "celtic", "chariot", "chrome", "citrus", "civil", "cloud", "combine", "common", "cool", "copper", "coral", "crater", "cubic", "cupid", "cycle", "depend", "door", "dream", "dynasty", "edison", "edition", "enigma", "equal", "eric", "event", "evita", "exodus", "extend", "famous", "farmer", "food", "fossil", "frog", "fruit", "geneva", "gentle", "george", "giant", "gilbert", "gossip", "gram", "greek", "grille", "hammer", "harvest", "hazard", "heaven", "herbert", "heroic", "hexagon", "husband", "immune", "inca", "inch", "initial", "isabel", "ivory", "jason", "jerome", "joel", "joshua", "journal", "judge", "juliet", "jump", "justice", "kimono", "kinetic", "leonid", "leopard", "lima", "maze", "medusa", "member", "memphis", "michael", "miguel", "milan", "mile", "miller", "mimic", "mimosa", "mission", "monkey", "moral", "moses", "mouse", "nancy", "natasha", "nebula", "nickel", "nina", "noise", "orchid", "oregano", "origami", "orinoco", "orion", "othello", "paper", "paprika", "prelude", "prepare", "pretend", "promise", "prosper", "provide", "puzzle", "remote", "repair", "reply", "rival", "riviera", "robin", "rose", "rover", "rudolf", "saga", "sahara", "scholar", "shelter", "ship", "shoe", "sigma", "sister", "sleep", "smile", "spain", "spark", "split", "spray", "square", "stadium", "star", "storm", "story", "strange", "stretch", "stuart", "subway", "sugar", "sulfur", "summer", "survive", "sweet", "swim", "table", "taboo", "target", "teacher", "telecom", "temple", "tibet", "ticket", "tina", "today", "toga", "tommy", "tower", "trivial", "tunnel", "turtle", "twin", "uncle", "unicorn", "unique", "update", "valery", "vega", "version", "voodoo", "warning", "william", "wonder", "year", "yellow", "young", "absent", "absorb", "absurd", "accent", "alfonso", "alias", "ambient", "anagram", "andy", "anvil", "appear", "apropos", "archer", "ariel", "armor", "arrow", "austin", "avatar", "axis", "baboon", "bahama", "bali", "balsa", "barcode", "bazooka", "beach", "beast", "beatles", "beauty", "before", "benny", "betty", "between", "beyond", "billy", "bison", "blast", "bless", "bogart", "bonanza", "book", "border", "brave", "bread", "break", "broken", "bucket", "buenos", "buffalo", "bundle", "button", "buzzer", "byte", "caesar", "camilla", "canary", "candid", "carrot", "cave", "chant", "child", "choice", "chris", "cipher", "clarion", "clark", "clever", "cliff", "clone", "conan", "conduct", "congo", "costume", "cotton", "cover", "crack", "current", "danube", "data", "decide", "deposit", "desire", "detail", "dexter", "dinner", "donor", "druid", "drum", "easy", "eddie", "enjoy", "enrico", "epoxy", "erosion", "except", "exile", "explain", "fame", "fast", "father", "felix", "field", "fiona", "fire", "fish", "flame", "flex", "flipper", "float", "flood", "floor", "forbid", "forever", "fractal", "frame", "freddie", "front", "fuel", "gallop", "game", "garbo", "gate", "gelatin", "gibson", "ginger", "giraffe", "gizmo", "glass", "goblin", "gopher", "grace", "gray", "gregory", "grid", "griffin", "ground", "guest", "gustav", "gyro", "hair", "halt", "harris", "heart", "heavy", "herman", "hippie", "hobby", "honey", "hope", "horse", "hostel", "hydro", "imitate", "info", "ingrid", "inside", "invent", "invest", "invite", "ivan", "james", "jester", "jimmy", "join", "joseph", "juice", "julius", "july", "kansas", "karl", "kevin", "kiwi", "ladder", "lake", "laura", "learn", "legacy", "legend", "lesson", "life", "light", "list", "locate", "lopez", "lorenzo", "love", "lunch", "malta", "mammal", "margin", "margo", "marion", "mask", "match", "mayday", "meaning", "mercy", "middle", "mike", "mirror", "modest", "morph", "morris", "mystic", "nadia", "nato", "navy", "needle", "neuron", "never", "newton", "nice", "night", "nissan", "nitro", "nixon", "north", "oberon", "octavia", "ohio", "olga", "open", "opus", "orca", "oval", "owner", "page", "paint", "palma", "parent", "parlor", "parole", "paul", "peace", "pearl", "perform", "phoenix", "phrase", "pierre", "pinball", "place", "plate", "plato", "plume", "pogo", "point", "polka", "poncho", "powder", "prague", "press", "presto", "pretty", "prime", "promo", "quest", "quick", "quiz", "quota", "race", "rachel", "raja", "ranger", "region", "remark", "rent", "reward", "rhino", "ribbon", "rider", "road", "rodent", "round", "rubber", "ruby", "rufus", "sabine", "saddle", "sailor", "saint", "salt", "scale", "scuba", "season", "secure", "shake", "shallow", "shannon", "shave", "shelf", "sherman", "shine", "shirt", "side", "sinatra", "sincere", "size", "slalom", "slow", "small", "snow", "sofia", "song", "sound", "south", "speech", "spell", "spend", "spoon", "stage", "stamp", "stand", "state", "stella", "stick", "sting", "stock", "store", "sunday", "sunset", "support", "supreme", "sweden", "swing", "tape", "tavern", "think", "thomas", "tictac", "time", "toast", "tobacco", "tonight", "torch", "torso", "touch", "toyota", "trade", "tribune", "trinity", "triton", "truck", "trust", "type", "under", "unit", "urban", "urgent", "user", "value", "vendor", "venice", "verona", "vibrate", "virgo", "visible", "vista", "vital", "voice", "vortex", "waiter", "watch", "wave", "weather", "wedding", "wheel", "whiskey", "wisdom", "android", "annex", "armani", "cake", "confide", "deal", "define", "dispute", "genuine", "idiom", "impress", "include", "ironic", "null", "nurse", "obscure", "prefer", "prodigy", "ego", "fax", "jet", "job", "rio", "ski", "yes" ]
diff --git a/users/Profpatsch/cas-serve/wordlist.sqlite b/users/Profpatsch/cas-serve/wordlist.sqlite
new file mode 100644
index 0000000000..5074474ba0
--- /dev/null
+++ b/users/Profpatsch/cas-serve/wordlist.sqlite
Binary files differdiff --git a/users/Profpatsch/declib/.eslintrc.json b/users/Profpatsch/declib/.eslintrc.json
new file mode 100644
index 0000000000..9cffc711db
--- /dev/null
+++ b/users/Profpatsch/declib/.eslintrc.json
@@ -0,0 +1,14 @@
+{
+  "extends": ["eslint:recommended", "plugin:@typescript-eslint/strict-type-checked"],
+  "parser": "@typescript-eslint/parser",
+  "plugins": ["@typescript-eslint"],
+  "parserOptions": {
+    "project": true
+  },
+  "root": true,
+  "rules": {
+    "no-unused-vars": "warn",
+    "prefer-const": "warn",
+    "@typescript-eslint/no-unused-vars": "warn"
+  }
+}
diff --git a/users/Profpatsch/declib/.gitignore b/users/Profpatsch/declib/.gitignore
new file mode 100644
index 0000000000..8b56bf4ede
--- /dev/null
+++ b/users/Profpatsch/declib/.gitignore
@@ -0,0 +1,6 @@
+/node_modules/
+/.ninja/
+/output/
+
+# ignore for now
+/package.lock.json
diff --git a/users/Profpatsch/declib/.prettierrc b/users/Profpatsch/declib/.prettierrc
new file mode 100644
index 0000000000..7258fb81e0
--- /dev/null
+++ b/users/Profpatsch/declib/.prettierrc
@@ -0,0 +1,8 @@
+{
+  "trailingComma": "all",
+  "tabWidth": 2,
+  "semi": true,
+  "singleQuote": true,
+  "printWidth": 100,
+  "arrowParens": "avoid"
+}
diff --git a/users/Profpatsch/declib/README.md b/users/Profpatsch/declib/README.md
new file mode 100644
index 0000000000..11a8bf21a5
--- /dev/null
+++ b/users/Profpatsch/declib/README.md
@@ -0,0 +1,4 @@
+# Decentralized Library
+
+https://en.wikipedia.org/wiki/Distributed_library
+https://faculty.ist.psu.edu/jjansen/academic/pubs/ride98/ride98.html
diff --git a/users/Profpatsch/declib/build.ninja b/users/Profpatsch/declib/build.ninja
new file mode 100644
index 0000000000..f8844fc9be
--- /dev/null
+++ b/users/Profpatsch/declib/build.ninja
@@ -0,0 +1,16 @@
+
+builddir = .ninja
+
+outdir = ./output
+jsdir = $outdir/js
+
+rule tsc
+  command = node_modules/.bin/tsc
+
+build $outdir/index.js: tsc | index.ts tsconfig.json
+
+rule run
+  command = node $in
+
+build run: run $outdir/index.js
+  pool = console
diff --git a/users/Profpatsch/declib/index.ts b/users/Profpatsch/declib/index.ts
new file mode 100644
index 0000000000..c6a26f0922
--- /dev/null
+++ b/users/Profpatsch/declib/index.ts
@@ -0,0 +1,245 @@
+import generator, { MegalodonInterface } from 'megalodon';
+import { Account } from 'megalodon/lib/src/entities/account';
+import * as masto from 'megalodon/lib/src/entities/notification';
+import { Status } from 'megalodon/lib/src/entities/status';
+import * as rxjs from 'rxjs';
+import { Observable } from 'rxjs';
+import { NodeEventHandler } from 'rxjs/internal/observable/fromEvent';
+import * as sqlite from 'sqlite';
+import sqlite3 from 'sqlite3';
+import * as parse5 from 'parse5';
+import { mergeMap } from 'rxjs/operators';
+
+type Events =
+  | { type: 'connect'; event: [] }
+  | { type: 'update'; event: Status }
+  | { type: 'notification'; event: Notification }
+  | { type: 'delete'; event: number }
+  | { type: 'error'; event: Error }
+  | { type: 'heartbeat'; event: [] }
+  | { type: 'close'; event: [] }
+  | { type: 'parser-error'; event: Error };
+
+type Notification = masto.Notification & {
+  type: 'favourite' | 'reblog' | 'status' | 'mention' | 'poll' | 'update';
+  status: NonNullable<masto.Notification['status']>;
+  account: NonNullable<masto.Notification['account']>;
+};
+
+class Main {
+  private client: MegalodonInterface;
+  private socket: Observable<Events>;
+  private state!: State;
+  private config: {
+    databaseFile?: string;
+    baseServer: string;
+  };
+
+  private constructor() {
+    this.config = {
+      databaseFile: process.env['DECLIB_DATABASE_FILE'],
+      baseServer: process.env['DECLIB_MASTODON_SERVER'] ?? 'mastodon.xyz',
+    };
+    const ACCESS_TOKEN = process.env['DECLIB_MASTODON_ACCESS_TOKEN'];
+
+    if (!ACCESS_TOKEN) {
+      console.error('Please set DECLIB_MASTODON_ACCESS_TOKEN');
+      process.exit(1);
+    }
+    this.client = generator('mastodon', `https://${this.config.baseServer}`, ACCESS_TOKEN);
+    const websocket = this.client.publicSocket();
+    function mk<Name extends string, Type>(name: Name): Observable<{ type: Name; event: Type }> {
+      const wrap =
+        (h: NodeEventHandler) =>
+        (event: Type): void => {
+          h({ type: name, event });
+        };
+      return rxjs.fromEventPattern<{ type: Name; event: Type }>(
+        hdl => websocket.on(name, wrap(hdl)),
+        hdl => websocket.removeListener(name, wrap(hdl)),
+      );
+    }
+    this.socket = rxjs.merge(
+      mk<'connect', []>('connect'),
+      mk<'update', Status>('update'),
+      mk<'notification', Notification>('notification'),
+      mk<'delete', number>('delete'),
+      mk<'error', Error>('error'),
+      mk<'heartbeat', []>('heartbeat'),
+      mk<'close', []>('close'),
+      mk<'parser-error', Error>('parser-error'),
+    );
+  }
+
+  static async init(): Promise<Main> {
+    const self = new Main();
+    self.state = await State.init(self.config);
+    return self;
+  }
+
+  public main() {
+    // const res = await this.getAcc({ username: 'grindhold', server: 'chaos.social' });
+    // const res = await this.getAcc({ username: 'Profpatsch', server: 'mastodon.xyz' });
+    // const res = await this.getStatus('111862170899069698');
+    this.socket
+      .pipe(
+        mergeMap(async event => {
+          switch (event.type) {
+            case 'update': {
+              await this.state.addStatus(event.event);
+              console.log(`${event.event.account.acct}: ${event.event.content}`);
+              console.log(await this.state.databaseInternal.all(`SELECT * from status`));
+              break;
+            }
+            case 'notification': {
+              console.log(`NOTIFICATION (${event.event.type}):`);
+              console.log(event.event);
+              console.log(event.event.status.content);
+              const content = parseContent(event.event.status.content);
+              if (content) {
+                switch (content.command) {
+                  case 'addbook': {
+                    if (content.content[0]) {
+                      const book = {
+                        $owner: event.event.account.acct,
+                        $bookid: content.content[0],
+                      };
+                      console.log('adding book', book);
+                      await this.state.addBook(book);
+                      await this.client.postStatus(
+                        `@${event.event.account.acct} I have inserted book "${book.$bookid}" for you.`,
+                        {
+                          in_reply_to_id: event.event.status.id,
+                          visibility: 'direct',
+                        },
+                      );
+                    }
+                  }
+                }
+              }
+              break;
+            }
+            default: {
+              console.log(event);
+            }
+          }
+        }),
+      )
+      .subscribe();
+  }
+
+  private async getStatus(id: string): Promise<Status | null> {
+    return (await this.client.getStatus(id)).data;
+  }
+
+  private async getAcc(user: { username: string; server: string }): Promise<Account | null> {
+    const fullAccount = `${user.username}@${user.server}`;
+    const res = await this.client.searchAccount(fullAccount, {
+      limit: 10,
+    });
+    const accs = res.data.filter(acc =>
+      this.config.baseServer === user.server
+        ? (acc.acct = user.username)
+        : acc.acct === fullAccount,
+    );
+    return accs[0] ?? null;
+  }
+}
+
+type Interaction = {
+  originalStatus: { id: string };
+  lastStatus: { id: string };
+};
+
+class State {
+  db!: sqlite.Database;
+  private constructor() {}
+
+  static async init(config: { databaseFile?: string }): Promise<State> {
+    const s = new State();
+    s.db = await sqlite.open({
+      filename: config.databaseFile ?? ':memory:',
+      driver: sqlite3.Database,
+    });
+    await s.db.run('CREATE TABLE books (owner text, bookid text)');
+    await s.db.run('CREATE TABLE status (id text primary key, content json)');
+    return s;
+  }
+
+  async addBook(opts: { $owner: string; $bookid: string }) {
+    return await this.db.run('INSERT INTO books (owner, bookid) VALUES ($owner, $bookid)', opts);
+  }
+
+  async addStatus($status: Status) {
+    return await this.db.run(
+      `
+      INSERT INTO status (id, content) VALUES ($id, $status)
+      ON CONFLICT (id) DO UPDATE SET id = $id, content = $status
+      `,
+      {
+        $id: $status.id,
+        $status: JSON.stringify($status),
+      },
+    );
+  }
+
+  get databaseInternal() {
+    return this.db;
+  }
+}
+
+/** Parse the message; take the plain text, first line is the command any any successive lines are content */
+function parseContent(html: string): { command: string; content: string[] } | null {
+  const plain = contentToPlainText(html).split('\n');
+  if (plain[0]) {
+    return { command: plain[0].replace(' ', '').trim(), content: plain.slice(1) };
+  } else {
+    return null;
+  }
+}
+
+/** Convert the Html content to a plain text (best effort), keeping line breaks */
+function contentToPlainText(html: string): string {
+  const queue: parse5.DefaultTreeAdapterMap['childNode'][] = [];
+  queue.push(...parse5.parseFragment(html).childNodes);
+  let res = '';
+  let endOfP = false;
+  for (const el of queue) {
+    switch (el.nodeName) {
+      case '#text': {
+        res += (el as parse5.DefaultTreeAdapterMap['textNode']).value;
+        break;
+      }
+      case 'br': {
+        res += '\n';
+        break;
+      }
+      case 'p': {
+        if (endOfP) {
+          res += '\n';
+          endOfP = false;
+        }
+        queue.push(...el.childNodes);
+        endOfP = true;
+        break;
+      }
+      case 'span': {
+        break;
+      }
+      default: {
+        console.warn('unknown element in message: ', el);
+        break;
+      }
+    }
+  }
+  return res.trim();
+}
+
+Main.init().then(
+  m => {
+    m.main();
+  },
+  rej => {
+    throw rej;
+  },
+);
diff --git a/users/Profpatsch/declib/package.json b/users/Profpatsch/declib/package.json
new file mode 100644
index 0000000000..93176e8581
--- /dev/null
+++ b/users/Profpatsch/declib/package.json
@@ -0,0 +1,25 @@
+{
+  "name": "declib",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.ts",
+  "type": "commonjs",
+  "scripts": {
+    "run": "ninja run"
+  },
+  "author": "",
+  "license": "MIT",
+  "dependencies": {
+    "megalodon": "^9.2.2",
+    "parse5": "^7.1.2",
+    "rxjs": "^7.8.1",
+    "sqlite": "^5.1.1",
+    "sqlite3": "^5.1.7"
+  },
+  "devDependencies": {
+    "@typescript-eslint/eslint-plugin": "^6.21.0",
+    "@typescript-eslint/parser": "^6.21.0",
+    "eslint": "^8.56.0",
+    "typescript": "^5.3.3"
+  }
+}
diff --git a/users/Profpatsch/declib/tsconfig.json b/users/Profpatsch/declib/tsconfig.json
new file mode 100644
index 0000000000..b7f2f4c18b
--- /dev/null
+++ b/users/Profpatsch/declib/tsconfig.json
@@ -0,0 +1,25 @@
+{
+  "compilerOptions": {
+    "strict": true,
+    "module": "NodeNext",
+    "sourceMap": true,
+    "outDir": "output",
+    "target": "ES6",
+    "lib": [],
+    "typeRoots": ["node_modules/@types", "shims/@types"],
+    "moduleResolution": "NodeNext",
+
+    // importHelpers & downlevelIteration will reduce the generated javascript for new language features.
+    // `importHelpers` requires the `tslib` dependency.
+    // "downlevelIteration": true,
+    // "importHelpers": true
+    "noFallthroughCasesInSwitch": true,
+    "noImplicitOverride": true,
+    "noImplicitReturns": true,
+    "noPropertyAccessFromIndexSignature": true,
+    "noUncheckedIndexedAccess": true,
+
+  },
+
+  "files": ["index.ts"]
+}
diff --git a/users/Profpatsch/dhall/lib.dhall b/users/Profpatsch/dhall/lib.dhall
new file mode 100644
index 0000000000..fb97ba6070
--- /dev/null
+++ b/users/Profpatsch/dhall/lib.dhall
@@ -0,0 +1,84 @@
+let List/map
+    : ∀(a : Type) → ∀(b : Type) → (a → b) → List a → List b
+    = λ(a : Type) →
+      λ(b : Type) →
+      λ(f : a → b) →
+      λ(xs : List a) →
+        List/build
+          b
+          ( λ(list : Type) →
+            λ(cons : b → list → list) →
+              List/fold a xs list (λ(x : a) → cons (f x))
+          )
+
+let
+
+    --| Concatenate a `List` of `List`s into a single `List`
+    List/concat
+    : ∀(a : Type) → List (List a) → List a
+    = λ(a : Type) →
+      λ(xss : List (List a)) →
+        List/build
+          a
+          ( λ(list : Type) →
+            λ(cons : a → list → list) →
+            λ(nil : list) →
+              List/fold
+                (List a)
+                xss
+                list
+                (λ(xs : List a) → λ(ys : list) → List/fold a xs list cons ys)
+                nil
+          )
+
+let
+
+
+    -- Transform a list by applying a function to each element and flattening the results
+    List/concatMap
+    : ∀(a : Type) → ∀(b : Type) → (a → List b) → List a → List b
+    = λ(a : Type) →
+      λ(b : Type) →
+      λ(f : a → List b) →
+      λ(xs : List a) →
+        List/build
+          b
+          ( λ(list : Type) →
+            λ(cons : b → list → list) →
+              List/fold a xs list (λ(x : a) → List/fold b (f x) list cons)
+          )
+
+let Status = < Empty | NonEmpty : Text >
+
+let
+
+    {-|
+    Transform each value in a `List` to `Text` and then concatenate them with a
+    separator in between each value
+    -}
+    Text/concatMapSep
+    : ∀(separator : Text) → ∀(a : Type) → (a → Text) → List a → Text
+    = λ(separator : Text) →
+      λ(a : Type) →
+      λ(f : a → Text) →
+      λ(elements : List a) →
+        let status =
+              List/fold
+                a
+                elements
+                Status
+                ( λ(x : a) →
+                  λ(status : Status) →
+                    merge
+                      { Empty = Status.NonEmpty (f x)
+                      , NonEmpty =
+                          λ(result : Text) →
+                            Status.NonEmpty (f x ++ separator ++ result)
+                      }
+                      status
+                )
+                Status.Empty
+
+        in  merge { Empty = "", NonEmpty = λ(result : Text) → result } status
+
+in  { List/map, List/concat, List/concatMap, Text/concatMapSep }
diff --git a/users/Profpatsch/emacs-tree-sitter-move/README.md b/users/Profpatsch/emacs-tree-sitter-move/README.md
new file mode 100644
index 0000000000..ae8d763d61
--- /dev/null
+++ b/users/Profpatsch/emacs-tree-sitter-move/README.md
@@ -0,0 +1,5 @@
+# emacs-tree-sitter-move
+
+An experiment in whether we can implement structural editing in emacs using the tree-sitter parser.
+
+What currently works: loading a tree-sitter gramma, navigating the AST left/right/up/down.
diff --git a/users/Profpatsch/execline/ExecHelpers.hs b/users/Profpatsch/execline/ExecHelpers.hs
new file mode 100644
index 0000000000..438047b2b9
--- /dev/null
+++ b/users/Profpatsch/execline/ExecHelpers.hs
@@ -0,0 +1,48 @@
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TypeApplications #-}
+
+module ExecHelpers where
+
+import Data.String (IsString)
+import MyPrelude
+import qualified System.Exit as Sys
+
+newtype CurrentProgramName = CurrentProgramName { unCurrentProgramName :: Text }
+  deriving newtype (Show, Eq, Ord, IsString)
+
+-- | Exit 1 to signify a generic expected error
+-- (e.g. something that sometimes just goes wrong, like a nix build).
+dieExpectedError :: CurrentProgramName -> Text -> IO a
+dieExpectedError = dieWith 1
+
+-- | Exit 100 to signify a user error (“the user is holding it wrong”).
+--  This is a permanent error, if the program is executed the same way
+-- it should crash with 100 again.
+dieUserError :: CurrentProgramName -> Text -> IO a
+dieUserError = dieWith 100
+
+-- |  Exit 101 to signify an unexpected crash (failing assertion or panic).
+diePanic :: CurrentProgramName -> Text -> IO a
+diePanic = dieWith 101
+
+-- | Exit 111 to signify a temporary error (such as resource exhaustion)
+dieTemporary :: CurrentProgramName -> Text -> IO a
+dieTemporary = dieWith 111
+
+-- |  Exit 126 to signify an environment problem
+-- (the user has set up stuff incorrectly so the program cannot work)
+dieEnvironmentProblem :: CurrentProgramName -> Text -> IO a
+dieEnvironmentProblem = dieWith 126
+
+-- | Exit 127 to signify a missing executable.
+dieMissingExecutable :: CurrentProgramName -> Text -> IO a
+dieMissingExecutable = dieWith 127
+
+dieWith :: Natural -> CurrentProgramName -> Text -> IO a
+dieWith status currentProgramName msg = do
+  putStderrLn [fmt|{currentProgramName & unCurrentProgramName}: {msg}|]
+  Sys.exitWith
+    (Sys.ExitFailure (status & fromIntegral @Natural @Int))
diff --git a/users/Profpatsch/execline/default.nix b/users/Profpatsch/execline/default.nix
index 752774e6ad..04d07895c6 100644
--- a/users/Profpatsch/execline/default.nix
+++ b/users/Profpatsch/execline/default.nix
@@ -1,11 +1,22 @@
 { depot, pkgs, lib, ... }:
 
 let
-  exec-helpers = depot.nix.writers.rustSimpleLib
-    {
-      name = "exec-helpers";
-    }
-    (builtins.readFile ./exec_helpers.rs);
+  exec-helpers-hs = pkgs.haskellPackages.mkDerivation {
+    pname = "exec-helpers";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./exec-helpers.cabal
+      ./ExecHelpers.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+  };
 
   print-one-env = depot.nix.writers.rustSimple
     {
@@ -28,10 +39,32 @@ let
     }
   '';
 
+  setsid = depot.nix.writers.rustSimple
+    {
+      name = "setsid";
+      dependencies = [
+        depot.users.Profpatsch.execline.exec-helpers
+        depot.third_party.rust-crates.libc
+      ];
+    } ''
+    use std::os::unix::ffi::OsStrExt;
+    use std::ffi::OsStr;
+
+    fn main() {
+      let (args, prog) = exec_helpers::args_for_exec("setsid", 1);
+      let envvar = OsStr::from_bytes(&args.get(0).expect("first argument must be envvar name to set"));
+      let sid: i32 = unsafe { libc::setsid() };
+      std::env::set_var(envvar, format!("{}", sid));
+      let env: Vec<(&[u8], &[u8])> = vec![];
+      exec_helpers::exec_into_args("getid", prog, env);
+    }
+  '';
+
 in
 depot.nix.readTree.drvTargets {
   inherit
-    exec-helpers
+    exec-helpers-hs
     print-one-env
+    setsid
     ;
 }
diff --git a/users/Profpatsch/execline/exec-helpers.cabal b/users/Profpatsch/execline/exec-helpers.cabal
new file mode 100644
index 0000000000..b472ff6bd5
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers.cabal
@@ -0,0 +1,14 @@
+cabal-version:      3.0
+name:               exec-helpers
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+library
+    exposed-modules:          ExecHelpers
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/execline/exec-helpers/Cargo.lock b/users/Profpatsch/execline/exec-helpers/Cargo.lock
new file mode 100644
index 0000000000..1753cc949d
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/Cargo.lock
@@ -0,0 +1,7 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "exec_helpers"
+version = "0.1.0"
diff --git a/users/Profpatsch/execline/exec-helpers/Cargo.toml b/users/Profpatsch/execline/exec-helpers/Cargo.toml
new file mode 100644
index 0000000000..6642b66ee3
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "exec_helpers"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+name = "exec_helpers"
+path = "exec_helpers.rs"
diff --git a/users/Profpatsch/execline/exec-helpers/default.nix b/users/Profpatsch/execline/exec-helpers/default.nix
new file mode 100644
index 0000000000..5545d41d9d
--- /dev/null
+++ b/users/Profpatsch/execline/exec-helpers/default.nix
@@ -0,0 +1,6 @@
+{ depot, ... }:
+depot.nix.writers.rustSimpleLib
+{
+  name = "exec-helpers";
+}
+  (builtins.readFile ./exec_helpers.rs)
diff --git a/users/Profpatsch/execline/exec_helpers.rs b/users/Profpatsch/execline/exec-helpers/exec_helpers.rs
index a57cbca353..a57cbca353 100644
--- a/users/Profpatsch/execline/exec_helpers.rs
+++ b/users/Profpatsch/execline/exec-helpers/exec_helpers.rs
diff --git a/users/Profpatsch/fafo.jpg b/users/Profpatsch/fafo.jpg
new file mode 100644
index 0000000000..78f11d208e
--- /dev/null
+++ b/users/Profpatsch/fafo.jpg
Binary files differdiff --git a/users/Profpatsch/haskell-module-deps/README.md b/users/Profpatsch/haskell-module-deps/README.md
new file mode 100644
index 0000000000..b4f35beac5
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/README.md
@@ -0,0 +1,5 @@
+# haskell-module-deps
+
+An executable that when run in a project directory containing `.hs` files in `./src` will output a png/graph of how those modules import each other, transitively.
+
+Useful for getting an overview, finding weird import edges, figuring out how to get more compilation parallelism into your Haskell project.
diff --git a/users/Profpatsch/haskell-module-deps/default.nix b/users/Profpatsch/haskell-module-deps/default.nix
new file mode 100644
index 0000000000..71cc0a5b0d
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/default.nix
@@ -0,0 +1,55 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.zathura [ "zathura" ]
+    // depot.nix.getBins pkgs.haskellPackages.graphmod [ "graphmod" ]
+    // depot.nix.getBins pkgs.graphviz [ "dot" ]
+  ;
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the project directory as argument.
+  # Open in zathura.
+  haskell-module-deps = depot.nix.writeExecline "haskell-module-deps" { } [
+    "pipeline"
+    [ haskell-module-deps-with-filetype "pdf" "$@" ]
+    bins.zathura
+    "-"
+  ];
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the project directory as argument.
+  # Print a png to stdout.
+  haskell-module-deps-png = depot.nix.writeExecline "haskell-module-deps-png" { } [
+    haskell-module-deps-with-filetype
+    "png"
+    "$@"
+  ];
+
+  # Display a graph of all modules in a project and how they depend on each other.
+  # Takes the file type to generate as first argument
+  # and the project directory as second argument.
+  haskell-module-deps-with-filetype = pkgs.writers.writeBash "haskell-module-deps-with-filetype" ''
+    set -euo pipefail
+    shopt -s globstar
+    filetype="$1"
+    rootDir="$2"
+    ${bins.graphmod} \
+      ${/*silence warnings for missing external dependencies*/""} \
+      --quiet \
+      ${/*applies some kind of import simplification*/""} \
+      --prune-edges \
+      "$rootDir"/src/**/*.hs \
+      | ${bins.dot} \
+          ${/*otherwise it’s a bit cramped*/""} \
+          -Gsize="20,20!" \
+          -T"$filetype"
+  '';
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    haskell-module-deps
+    haskell-module-deps-png
+    haskell-module-deps-with-filetype
+    ;
+}
diff --git a/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png b/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png
new file mode 100644
index 0000000000..53725c49e8
--- /dev/null
+++ b/users/Profpatsch/haskell-module-deps/example-output-dhall-haskell.png
Binary files differdiff --git a/users/Profpatsch/hie.yaml b/users/Profpatsch/hie.yaml
new file mode 100644
index 0000000000..1b5ae942ad
--- /dev/null
+++ b/users/Profpatsch/hie.yaml
@@ -0,0 +1,36 @@
+cradle:
+  cabal:
+    - path: "./my-prelude"
+      component: "lib:my-prelude"
+    - path: "./my-webstuff"
+      component: "lib:my-webstuff"
+    - path: "./netencode"
+      component: "lib:netencode"
+    - path: "./arglib"
+      component: "lib:arglib-netencode"
+    - path: "./execline"
+      component: "lib:exec-helpers"
+    - path: "./htmx-experiment/src"
+      component: "lib:htmx-experiment"
+    - path: "./htmx-experiment/Main.hs"
+      component: "htmx-experiment:exe:htmx-experiment"
+    - path: "./mailbox-org/src"
+      component: "lib:mailbox-org"
+    - path: "./mailbox-org/MailboxOrg.hs"
+      component: "mailbox-org:exe:mailbox-org"
+    - path: "./cas-serve/CasServe.hs"
+      component: "cas-serve:exe:cas-serve"
+    - path: "./jbovlaste-sqlite/JbovlasteSqlite.hs"
+      component: "jbovlaste-sqlite:exe:jbovlaste-sqlite"
+    - path: "./whatcd-resolver/src"
+      component: "lib:whatcd-resolver"
+    - path: "./whatcd-resolver/Main.hs"
+      component: "whatcd-resolver:exe:whatcd-resolver"
+    - path: "./openlab-tools/src"
+      component: "lib:openlab-tools"
+    - path: "./openlab-tools/Main.hs"
+      component: "openlab-tools:exe:openlab-tools"
+    - path: "./httzip/Httzip.hs"
+      component: "httzip:exe:httzip"
+    - path: "./my-xmonad/Xmonad.hs"
+      component: "my-xmonad:exe:xmonad"
diff --git a/users/Profpatsch/htmx-experiment/Main.hs b/users/Profpatsch/htmx-experiment/Main.hs
new file mode 100644
index 0000000000..29ce8610ff
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/Main.hs
@@ -0,0 +1,4 @@
+import HtmxExperiment qualified
+
+main :: IO ()
+main = HtmxExperiment.main
diff --git a/users/Profpatsch/htmx-experiment/default.nix b/users/Profpatsch/htmx-experiment/default.nix
new file mode 100644
index 0000000000..ef1a28bd2b
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/default.nix
@@ -0,0 +1,46 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  htmx-experiment = pkgs.haskellPackages.mkDerivation {
+    pname = "htmx-experiment";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./htmx-experiment.cabal
+      ./Main.hs
+      ./src/HtmxExperiment.hs
+      ./src/ServerErrors.hs
+      ./src/ValidationParseT.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.blaze-markup
+      pkgs.haskellPackages.bytestring
+      pkgs.haskellPackages.dlist
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.selective
+      pkgs.haskellPackages.text
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.warp
+
+    ];
+
+    isLibrary = false;
+    isExecutable = true;
+    license = lib.licenses.mit;
+  };
+
+
+in
+htmx-experiment
diff --git a/users/Profpatsch/htmx-experiment/htmx-experiment.cabal b/users/Profpatsch/htmx-experiment/htmx-experiment.cabal
new file mode 100644
index 0000000000..e9a0d93614
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/htmx-experiment.cabal
@@ -0,0 +1,89 @@
+cabal-version:      3.0
+name:               htmx-experiment
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+library
+    import: common-options
+    exposed-modules:
+      HtmxExperiment,
+      ServerErrors,
+      ValidationParseT
+    hs-source-dirs: ./src
+
+    build-depends:
+        base >=4.15 && <5,
+        -- http-api-data
+        blaze-html,
+        blaze-markup,
+        bytestring,
+        dlist,
+        http-types,
+        ihp-hsx,
+        monad-logger,
+        pa-error-tree,
+        pa-field-parser,
+        pa-label,
+        pa-prelude,
+        my-webstuff,
+        selective,
+        text,
+        unliftio,
+        wai,
+        warp
+
+
+executable htmx-experiment
+    import: common-options
+    main-is: Main.hs
+
+    build-depends:
+        htmx-experiment,
+        base >=4.15 && <5,
diff --git a/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs b/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs
new file mode 100644
index 0000000000..225206a584
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/HtmxExperiment.hs
@@ -0,0 +1,377 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE QuasiQuotes #-}
+
+module HtmxExperiment where
+
+import Control.Category qualified as Cat
+import Control.Exception qualified as Exc
+import Control.Monad.Logger
+import Control.Selective (Selective (select))
+import Control.Selective qualified as Selective
+import Data.ByteString qualified as Bytes
+import Data.DList (DList)
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Maybe (maybeToList)
+import Data.Maybe qualified as Maybe
+import Data.Monoid qualified as Monoid
+import Data.Text qualified as Text
+import FieldParser hiding (nonEmpty)
+import GHC.TypeLits (KnownSymbol, symbolVal)
+import IHP.HSX.QQ (hsx)
+import Label
+import Multipart2 (FormValidation (FormValidation), FormValidationResult, MultipartParseT, failFormValidation)
+import Multipart2 qualified as Multipart
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import ServerErrors (ServerError (..), throwUserErrorTree)
+import Text.Blaze.Html5 (Html, docTypeHtml)
+import Text.Blaze.Renderer.Utf8 (renderMarkup)
+import UnliftIO (MonadUnliftIO (withRunInIO))
+import Prelude hiding (compare)
+
+-- data Routes
+--   = Root
+--   | Register
+--   | RegisterSubmit
+
+-- data Router url = Router
+--   { parse :: Routes.URLParser url,
+--     print :: url -> [Text]
+--   }
+
+-- routerPathInfo :: Routes.PathInfo a => Router a
+-- routerPathInfo =
+--   Router
+--     { parse = Routes.fromPathSegments,
+--       print = Routes.toPathSegments
+--     }
+
+-- subroute :: Text -> Router subUrl -> Router subUrl
+-- subroute path inner =
+--   Router
+--     { parse = Routes.segment path *> inner.parse,
+--       print = \url -> path : inner.print url
+--     }
+
+-- routerLeaf :: a -> Router a
+-- routerLeaf a =
+--   Router
+--     { parse = pure a,
+--       print = \_ -> []
+--     }
+
+-- routerToSite ::
+--   ((url -> [(Text, Maybe Text)] -> Text) -> url -> a) ->
+--   Router url ->
+--   Routes.Site url a
+-- routerToSite handler router =
+--   Routes.Site
+--     { handleSite = handler,
+--       formatPathSegments = (\x -> (x, [])) . router.print,
+--       parsePathSegments = Routes.parseSegments router.parse
+--     }
+
+-- handlers queryParams = \case
+--   Root -> "root"
+--   Register -> "register"
+--   RegisterSubmit -> "registersubmit"
+
+newtype Router handler from to = Router {unRouter :: from -> [Text] -> (Maybe handler, to)}
+  deriving
+    (Functor, Applicative)
+    via ( Compose
+            ((->) from)
+            ( Compose
+                ((->) [Text])
+                ((,) (Monoid.First handler))
+            )
+        )
+
+data Routes r handler = Routes
+  { users :: r (Label "register" handler)
+  }
+
+data Endpoint handler subroutes = Endpoint
+  { root :: handler,
+    subroutes :: subroutes
+  }
+  deriving stock (Show, Eq)
+
+data Handler = Handler {url :: Text}
+
+-- myRoute :: Router () from (Endpoint (Routes (Endpoint ()) Handler) b)
+-- myRoute =
+--   root $ do
+--     users <- fixed "users" () $ fixedFinal @"register" ()
+--     pure $ Routes {..}
+
+-- -- | the root and its children
+-- root :: routes from a -> routes from (Endpoint a b)
+-- root = todo
+
+-- | A fixed sub-route with children
+fixed :: Text -> handler -> Router handler from a -> Router handler from (Endpoint handler a)
+fixed route handler inner = Router $ \from -> \case
+  [final]
+    | route == final ->
+        ( Just handler,
+          Endpoint
+            { root = handler,
+              subroutes = (inner.unRouter from []) & snd
+            }
+        )
+  (this : more)
+    | route == this ->
+        ( (inner.unRouter from more) & fst,
+          Endpoint
+            { root = handler,
+              subroutes = (inner.unRouter from more) & snd
+            }
+        )
+  _ -> (Nothing, Endpoint {root = handler, subroutes = (inner.unRouter from []) & snd})
+
+-- integer ::
+--   forall routeName routes from a.
+--   Router (T2 routeName Integer "more" from) a ->
+--   Router from (Endpoint () a)
+-- integer inner = Router $ \case
+--   (path, []) ->
+--     runFieldParser Field.signedDecimal path
+--   (path, more) ->
+--     inner.unRouter more (runFieldParser Field.signedDecimal path)
+
+-- -- | A leaf route
+-- fixedFinal :: forall route handler from. (KnownSymbol route) => handler -> Router handler from (Label route Handler)
+-- fixedFinal handler = do
+--   let route = symbolText @route
+--   Rounter $ \from -> \case
+--     [final] | route == final -> (Just handler, label @route (Handler from))
+--     _ -> (Nothing, label @route handler)
+
+-- | Get the text of a symbol via TypeApplications
+symbolText :: forall sym. KnownSymbol sym => Text
+symbolText = do
+  symbolVal (Proxy :: Proxy sym)
+    & stringToText
+
+main :: IO ()
+main = runStderrLoggingT @IO $ do
+  withRunInIO @(LoggingT IO) $ \runInIO -> do
+    Warp.run 8080 $ \req respond -> catchServerError respond $ do
+      let respondOk res = Wai.responseLBS Http.ok200 [] (renderMarkup res)
+      let htmlRoot inner =
+            docTypeHtml
+              [hsx|
+            <head>
+              <script src="https://unpkg.com/htmx.org@1.9.2" integrity="sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h" crossorigin="anonymous"></script>
+            </head>
+            <body>
+              {inner}
+            </body>
+        |]
+      res <-
+        case req & Wai.pathInfo of
+          [] ->
+            pure $
+              respondOk $
+                htmlRoot
+                  [hsx|
+                      <div id="register_buttons">
+                        <button hx-get="/register" hx-target="body" hx-push-url="/register">Register an account</button>
+                        <button hx-get="/login" hx-target="body">Login</button>
+                      </div>
+              |]
+          ["register"] ->
+            pure $ respondOk $ fullEndpoint req $ \case
+              FullPage -> htmlRoot $ registerForm mempty
+              Snippet -> registerForm mempty
+          ["register", "submit"] -> do
+            FormValidation body <-
+              req
+                & parsePostBody
+                  registerFormValidate
+                & runInIO
+            case body of
+              -- if the parse succeeds, ignore any of the data
+              (_, Just a) -> pure $ respondOk $ htmlRoot [hsx|{a}|]
+              (errs, Nothing) -> pure $ respondOk $ htmlRoot $ registerForm errs
+          other ->
+            pure $ respondOk [hsx|no route here at {other}|]
+      respond $ res
+  where
+    catchServerError respond io =
+      Exc.catch io (\(ex :: ServerError) -> respond $ Wai.responseLBS ex.status [] ex.errBody)
+
+parsePostBody ::
+  (MonadIO m, MonadThrow m, MonadLogger m) =>
+  MultipartParseT backend m b ->
+  Wai.Request ->
+  m b
+parsePostBody parser req =
+  Multipart.parseMultipartOrThrow throwUserErrorTree parser req
+
+-- migrate :: IO (Label "numberOfRowsAffected" Natural)
+-- migrate =
+--   Init.runAppTest $ do
+--     runTransaction $
+--       execute
+--         [sql|
+--         CREATE TABLE IF NOT EXISTS experiments.users (
+--           id SERIAL PRIMARY KEY,
+--           email TEXT NOT NULL,
+--           registration_pending_token TEXT NULL
+--         )
+--         |]
+--         ()
+
+data HsxRequest
+  = Snippet
+  | FullPage
+
+fullEndpoint :: Wai.Request -> (HsxRequest -> t) -> t
+fullEndpoint req act = do
+  let isHxRequest = req & Wai.requestHeaders & List.find (\h -> (h & fst) == "HX-Request") & Maybe.isJust
+  if isHxRequest
+    then act Snippet
+    else act FullPage
+
+data FormField = FormField
+  { label_ :: Html,
+    required :: Bool,
+    id_ :: Text,
+    name :: ByteString,
+    type_ :: Text,
+    placeholder :: Maybe Text
+  }
+
+inputHtml ::
+  FormField ->
+  DList FormValidationResult ->
+  Html
+inputHtml (FormField {..}) validationResults = do
+  let validation =
+        validationResults
+          & toList
+          & mapMaybe
+            ( \v ->
+                if v.formFieldName == name
+                  then
+                    Just
+                      ( T2
+                          (label @"errors" (maybeToList v.hasError))
+                          (label @"originalValue" (Monoid.First (Just v.originalValue)))
+                      )
+                  else Nothing
+            )
+          & mconcat
+  let isFirstError =
+        validationResults
+          & List.find (\res -> Maybe.isJust res.hasError && res.formFieldName == name)
+          & Maybe.isJust
+  [hsx|
+      <label for={id_}>{label_}
+        <input
+          autofocus={isFirstError}
+          onfocus="this.select()"
+          required={required}
+          id={id_}
+          name={name}
+          type={type_}
+          placeholder={placeholder}
+          value={validation.originalValue.getFirst}
+        />
+        <p id="{id_}.validation">{validation.errors & nonEmpty <&> toList <&> map prettyError <&> Text.intercalate "; "}</p>
+      </label>
+  |]
+
+registerForm :: DList FormValidationResult -> Html
+registerForm validationErrors =
+  let fields =
+        mconcat
+          [ inputHtml $
+              FormField
+                { label_ = "Your Email:",
+                  required = True,
+                  id_ = "register_email",
+                  name = "email",
+                  type_ = "email",
+                  placeholder = Just "your@email.com"
+                },
+            inputHtml $
+              FormField
+                { label_ = "New password:",
+                  required = True,
+                  id_ = "register_password",
+                  name = "password",
+                  type_ = "password",
+                  placeholder = Just "hunter2"
+                },
+            inputHtml $
+              FormField
+                { label_ = "Repeated password:",
+                  required = True,
+                  id_ = "register_password_repeated",
+                  name = "password_repeated",
+                  type_ = "password",
+                  placeholder = Just "hunter2"
+                }
+          ]
+   in [hsx|
+  <form hx-post="/register/submit">
+    <fieldset>
+      <legend>Register user</legend>
+      {fields validationErrors}
+      <button id="register_submit_button" name="register">
+        Register
+      </button>
+    </fieldset>
+  </form>
+  |]
+
+registerFormValidate ::
+  Applicative m =>
+  MultipartParseT
+    w
+    m
+    (FormValidation (T2 "email" ByteString "password" ByteString))
+registerFormValidate = do
+  let emailFP = FieldParser $ \b ->
+        if
+            | Bytes.elem (charToWordUnsafe '@') b -> Right b
+            | otherwise -> Left [fmt|This is not an email address: "{b & bytesToTextUtf8Unsafe}"|]
+
+  getCompose @(MultipartParseT _ _) @FormValidation $ do
+    email <- Compose $ Multipart.fieldLabel' @"email" "email" emailFP
+    password <-
+      aEqB
+        "password_repeated"
+        "The two password fields must be the same"
+        (Compose $ Multipart.field' "password" Cat.id)
+        (\field -> Compose $ Multipart.field' field Cat.id)
+    pure $ T2 email (label @"password" password)
+  where
+    aEqB field validateErr fCompare fValidate =
+      Selective.fromMaybeS
+        -- TODO: this check only reached if the field itself is valid. Could we combine those errors?
+        (Compose $ pure $ failFormValidation (T2 (label @"formFieldName" field) (label @"originalValue" "")) validateErr)
+        $ do
+          compare <- fCompare
+          validate <- fValidate field
+          pure $ if compare == validate then Just validate else Nothing
+
+-- | A lifted version of 'Data.Maybe.fromMaybe'.
+fromMaybeS :: Selective f => f a -> f (Maybe a) -> f a
+fromMaybeS ifNothing fma =
+  select
+    ( fma <&> \case
+        Nothing -> Left ()
+        Just a -> Right a
+    )
+    ( do
+        a <- ifNothing
+        pure (\() -> a)
+    )
diff --git a/users/Profpatsch/htmx-experiment/src/ServerErrors.hs b/users/Profpatsch/htmx-experiment/src/ServerErrors.hs
new file mode 100644
index 0000000000..0fca7ab464
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/ServerErrors.hs
@@ -0,0 +1,244 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module ServerErrors where
+
+import Control.Exception (Exception)
+import Control.Monad.Logger (MonadLogger, logError, logWarn)
+import Data.ByteString.Lazy qualified as Bytes.Lazy
+import Data.Error.Tree
+import Network.HTTP.Types qualified as Http
+import PossehlAnalyticsPrelude
+
+data ServerError = ServerError
+  { status :: Http.Status,
+    errBody :: Bytes.Lazy.ByteString
+  }
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+emptyServerError :: Http.Status -> ServerError
+emptyServerError status = ServerError {status, errBody = ""}
+
+-- | Throw a user error.
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwUserError ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log & throw to the user
+  Error ->
+  m b
+throwUserError err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logWarn (err & errorContext "There was a “user holding it wrong” error, check the client code" & prettyError)
+  throwM
+    ServerError
+      { status = Http.badRequest400,
+        errBody = err & prettyError & textToBytesUtf8 & toLazyBytes
+      }
+
+-- | Throw a user error.
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwUserErrorTree ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log & throw to the user
+  ErrorTree ->
+  m b
+throwUserErrorTree err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logWarn (err & nestedError "There was a “user holding it wrong” error, check the client code" & prettyErrorTree)
+  throwM
+    ServerError
+      { status = Http.badRequest400,
+        errBody = err & prettyErrorTree & textToBytesUtf8 & toLazyBytes
+      }
+
+-- | Unwrap the `Either` and if `Left` throw a user error.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orUserError "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orUserError ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the error being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either Error a ->
+  m a
+orUserError outerMsg eErrA =
+  orUserErrorTree outerMsg (first singleError eErrA)
+
+-- | Unwrap the `Either` and if `Left` throw a user error. Will pretty-print the 'ErrorTree'
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orUserErrorTree "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “User” here is a client using our API, not a human user.
+-- So we throw a `HTTP 400` error, which means the API was used incorrectly.
+--
+-- We also log the error as a warning, because it probably signifies a programming bug in our client.
+--
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orUserErrorTree ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the 'ErrorTree' being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either ErrorTree a ->
+  m a
+orUserErrorTree outerMsg = \case
+  Right a -> pure a
+  Left err -> do
+    -- TODO: this outer message should probably be added as a separate root instead of adding to the root error?
+    let tree = errorTreeContext outerMsg err
+    -- TODO: should we make this into a macro to keep the line numbers?
+    $logWarn (errorTreeContext "There was a “user holding it wrong” error, check the client code" tree & prettyErrorTree)
+    throwM
+      ServerError
+        { status = Http.badRequest400,
+          errBody = tree & prettyErrorTree & textToBytesUtf8 & toLazyBytes
+        }
+
+-- | Throw an internal error.
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwInternalError ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log internally
+  Error ->
+  m b
+throwInternalError err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logError
+    (err & prettyError)
+  throwM $ emptyServerError Http.internalServerError500
+
+-- | Throw an internal error.
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+throwInternalErrorTree ::
+  (MonadLogger m, MonadThrow m) =>
+  -- | The error to log internally
+  ErrorTree ->
+  m b
+throwInternalErrorTree err = do
+  -- TODO: should we make this into a macro to keep the line numbers?
+  $logError
+    (err & prettyErrorTree)
+  throwM $ emptyServerError   Http.internalServerError500
+
+-- | Unwrap the `Either` and if `Left` throw an internal error.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orInternalError "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orInternalError ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the error being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either Error a ->
+  m a
+orInternalError outerMsg eErrA = orInternalErrorTree outerMsg (first singleError eErrA)
+
+-- | Unwrap the `Either` and if `Left` throw an internal error. Will pretty-print the 'ErrorTree'.
+--
+-- Intended to use in a pipeline, e.g.:
+--
+-- @@
+-- doSomething
+--   >>= orInternalErrorTree "Oh no something did not work"
+--   >>= doSomethingElse
+-- @@
+--
+-- “Internal” here means some assertion that we depend on failed,
+-- e.g. some database request returned a wrong result/number of results
+-- or some invariant that we expect to hold failed.
+--
+-- This prints the full error to the log,
+-- and returns a “HTTP 500” error without the message.
+--
+-- If you want to signify a mishandling of the API (e.g. a wrong request), throw a `userError`.
+-- If you need to display a message to a human user, return a `FrontendResponse`
+-- or a structured type with translation keys (so we can localize the errors).
+orInternalErrorTree ::
+  (MonadThrow m, MonadLogger m) =>
+  -- | The message to add as a context to the 'ErrorTree' being thrown
+  Text ->
+  -- | Result to unwrap and potentially throw
+  Either ErrorTree a ->
+  m a
+orInternalErrorTree outerMsg = \case
+  Right a -> pure a
+  Left err -> do
+    -- TODO: this outer message should probably be added as a separate root instead of adding to the root error?
+    let tree = errorTreeContext outerMsg err
+    -- TODO: should we make this into a macro to keep the line numbers?
+    $logError (tree & prettyErrorTree)
+    throwM $ emptyServerError   Http.internalServerError500
diff --git a/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs b/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs
new file mode 100644
index 0000000000..ffb6c2f395
--- /dev/null
+++ b/users/Profpatsch/htmx-experiment/src/ValidationParseT.hs
@@ -0,0 +1,40 @@
+module ValidationParseT where
+
+import Control.Monad.Logger (MonadLogger)
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Functor.Compose (Compose (..))
+import PossehlAnalyticsPrelude
+import ServerErrors
+
+-- | A simple way to create an Applicative parser that parses from some environment.
+--
+-- Use with DerivingVia. Grep codebase for examples.
+newtype ValidationParseT env m a = ValidationParseT {unValidationParseT :: env -> m (Validation (NonEmpty Error) a)}
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ((->) env)
+            (Compose m (Validation (NonEmpty Error)))
+        )
+
+-- | Helper that runs the given parser and throws a user error if the parsing failed.
+runValidationParseTOrUserError ::
+  forall validationParseT env m a.
+  ( Coercible validationParseT (ValidationParseT env m a),
+    MonadLogger m,
+    MonadThrow m
+  ) =>
+  -- | toplevel error message to throw if the parsing fails
+  Error ->
+  -- | The parser which should be run
+  validationParseT ->
+  -- | input to the parser
+  env ->
+  m a
+{-# INLINE runValidationParseTOrUserError #-}
+runValidationParseTOrUserError contextError parser env =
+  (coerce @_ @(ValidationParseT _ _ _) parser).unValidationParseT env
+    >>= \case
+      Failure errs -> throwUserErrorTree (errorTree contextError errs)
+      Success a -> pure a
diff --git a/users/Profpatsch/httzip/Httzip.hs b/users/Profpatsch/httzip/Httzip.hs
new file mode 100644
index 0000000000..761cd1d2ea
--- /dev/null
+++ b/users/Profpatsch/httzip/Httzip.hs
@@ -0,0 +1,66 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Main where
+
+import Conduit ((.|))
+import Data.Binary.Builder qualified as Builder
+import Data.Conduit qualified as Cond
+import Data.Conduit.Combinators qualified as Cond
+import Data.Conduit.Process.Typed qualified as Cond
+import Data.Conduit.Process.Typed qualified as Proc
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Conduit qualified as Wai.Conduit
+import Network.Wai.Handler.Warp qualified as Warp
+import PossehlAnalyticsPrelude
+import System.Directory qualified as Dir
+import System.FilePath ((</>))
+import System.FilePath qualified as File
+import System.Posix qualified as Unix
+
+-- Webserver that returns folders under CWD as .zip archives (recursively)
+main :: IO ()
+main = do
+  currentDirectory <- Dir.getCurrentDirectory >>= Dir.canonicalizePath
+  run currentDirectory
+
+run :: FilePath -> IO ()
+run dir = do
+  currentDirectory <- Dir.canonicalizePath dir
+  putStderrLn $ [fmt|current {show currentDirectory}|]
+  Warp.run 7070 $ \req respond -> do
+    let respondHtml status content = respond $ Wai.responseLBS status [("Content-Type", "text/html")] content
+    case req & Wai.pathInfo of
+      [] -> respond $ Wai.responseLBS Http.status200 [("Content-Type", "text/html")] "any directory will be returned as .zip!"
+      filePath -> do
+        absoluteWantedFilepath <- Dir.canonicalizePath (currentDirectory </> (File.joinPath (filePath <&> textToString)))
+        -- I hope this prevents any shenanigans lol
+        let noCurrentDirPrefix = List.stripPrefix (File.addTrailingPathSeparator currentDirectory) absoluteWantedFilepath
+        if
+            | (any (Text.elem '/') filePath) -> putStderrLn "tried %2F encoding" >> respondHtml Http.status400 "no"
+            | Nothing <- noCurrentDirPrefix -> putStderrLn "tried parent dir with .." >> respondHtml Http.status400 "no^2"
+            | Just wantedFilePath <- noCurrentDirPrefix -> do
+                putStderrLn $ [fmt|wanted {show wantedFilePath}|]
+                ex <- Unix.fileExist wantedFilePath
+                if ex
+                  then do
+                    status <- Unix.getFileStatus wantedFilePath
+                    if status & Unix.isDirectory
+                      then do
+                        zipDir <- zipDirectory wantedFilePath
+                        Proc.withProcessWait zipDir $ \process -> do
+                          let stream =
+                                Proc.getStdout process
+                                  .| Cond.map (\bytes -> Cond.Chunk $ Builder.fromByteString bytes)
+                          -- TODO: how to handle broken zip? Is it just gonna return a 500? But the stream is already starting, so hard!
+                          respond $ Wai.Conduit.responseSource Http.ok200 [("Content-Type", "application/zip")] stream
+                      else respondHtml Http.status404 "not found"
+                  else respondHtml Http.status404 "not found"
+  where
+    zipDirectory toZipDir = do
+      putStderrLn [fmt|running $ zip {show ["--recurse-paths", "-", toZipDir]}|]
+      pure $
+        Proc.proc "zip" ["--recurse-paths", "-", toZipDir]
+          & Proc.setStdout Cond.createSource
diff --git a/users/Profpatsch/httzip/default.nix b/users/Profpatsch/httzip/default.nix
new file mode 100644
index 0000000000..35d8a69d56
--- /dev/null
+++ b/users/Profpatsch/httzip/default.nix
@@ -0,0 +1,40 @@
+{ depot, pkgs, lib, ... }:
+
+let
+
+  httzip = pkgs.haskellPackages.mkDerivation {
+    pname = "httzip";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./httzip.cabal
+      ./Httzip.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.wai
+      pkgs.haskellPackages.wai-conduit
+      pkgs.haskellPackages.conduit-extra
+      pkgs.haskellPackages.conduit
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins httzip [ "httzip" ];
+
+in
+depot.nix.writeExecline "httzip-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  "${pkgs.zip}/bin:$${PATH}"
+  bins.httzip
+]
diff --git a/users/Profpatsch/httzip/httzip.cabal b/users/Profpatsch/httzip/httzip.cabal
new file mode 100644
index 0000000000..c463a6a5fe
--- /dev/null
+++ b/users/Profpatsch/httzip/httzip.cabal
@@ -0,0 +1,73 @@
+cabal-version:      3.0
+name:               httzip
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable httzip
+    import: common-options
+
+    main-is:          Httzip.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        bytestring,
+        text,
+        warp,
+        wai,
+        http-types,
+        directory,
+        filepath,
+        unix,
+        wai-conduit,
+        conduit,
+        conduit-extra,
+        binary
diff --git a/users/Profpatsch/ical-smolify/IcalSmolify.hs b/users/Profpatsch/ical-smolify/IcalSmolify.hs
new file mode 100644
index 0000000000..77264d1693
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/IcalSmolify.hs
@@ -0,0 +1,124 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import qualified Data.ByteString.Lazy as Bytes.Lazy
+import qualified Data.CaseInsensitive as CaseInsensitive
+import qualified Data.Default as Default
+import qualified Data.Map.Strict as Map
+import qualified Data.Set as Set
+import ExecHelpers (dieUserError, CurrentProgramName)
+import MyPrelude
+import qualified System.Environment as Env
+import Text.ICalendar
+import Prelude hiding (log)
+
+main :: IO ()
+main = do
+  Env.getArgs >>= \case
+    [] -> dieUserError progName "First argument must be the ics file name"
+    (file : _) ->
+      do
+        parse file
+        >>= traverse_
+          ( \vcal ->
+              vcal
+                & stripSingleTimezone
+                & minify
+                & printICalendar Default.def
+                & Bytes.Lazy.putStr
+          )
+
+progName :: CurrentProgramName
+progName = "ical-smolify"
+
+log :: Error -> IO ()
+log err = do
+  putStderrLn (errorContext "ical-smolify" err & prettyError)
+
+parse :: FilePath -> IO [VCalendar]
+parse file = do
+  parseICalendarFile Default.def file >>= \case
+    Left err -> do
+      dieUserError progName [fmt|Cannot parse ical file: {err}|]
+    Right (cals, warnings) -> do
+      for_ warnings (\warn -> log [fmt|Warning: {warn}|])
+      pure cals
+
+-- | Converts a single timezone definition to the corresponding X-WR-Timezone field.
+stripSingleTimezone :: VCalendar -> VCalendar
+stripSingleTimezone vcal =
+  case vcal & vcTimeZones & Map.toList of
+    [] -> vcal
+    [(_, tz)] -> do
+      let xtz =
+            OtherProperty
+              { otherName = CaseInsensitive.mk "X-WR-TIMEZONE",
+                otherValue = tz & vtzId & tzidValue & textToBytesUtf8Lazy,
+                otherParams = OtherParams Set.empty
+              }
+      vcal
+        { vcOther =
+            vcal & vcOther
+              -- remove any existing x-wr-timezone fields
+              & Set.filter (\prop -> (prop & otherName) /= (xtz & otherName))
+              & Set.insert xtz,
+          vcTimeZones = Map.empty
+        }
+    _more -> vcal
+
+-- | Minify the vcalendar event by throwing away everything that’s not an event.
+minify :: VCalendar -> VCalendar
+minify vcal =
+  vcal
+    { vcProdId = ProdId "" (OtherParams Set.empty),
+      -- , vcVersion    :: ICalVersion
+      -- , vcScale      :: Scale
+      -- , vcMethod     :: Maybe Method
+      -- , vcOther      :: …
+      -- , vcTimeZones  :: Map Text VTimeZone
+      vcEvents = Map.map minifyEvent (vcal & vcEvents),
+      vcTodos = Map.empty,
+      vcJournals = Map.empty,
+      vcFreeBusys = Map.empty,
+      vcOtherComps = Set.empty
+    }
+
+minifyEvent :: VEvent -> VEvent
+minifyEvent vev =
+  vev
+--  { veDTStamp       :: DTStamp
+--   , veUID           :: UID
+--   , veClass         :: Class -- ^ 'def' = 'Public'
+--   , veDTStart       :: Maybe DTStart
+--   , veCreated       :: Maybe Created
+--   , veDescription   :: Maybe Description
+--   , veGeo           :: Maybe Geo
+--   , veLastMod       :: Maybe LastModified
+--   , veLocation      :: Maybe Location
+--   , veOrganizer     :: Maybe Organizer
+--   , vePriority      :: Priority -- ^ 'def' = 0
+--   , veSeq           :: Sequence -- ^ 'def' = 0
+--   , veStatus        :: Maybe EventStatus
+--   , veSummary       :: Maybe Summary
+--   , veTransp        :: TimeTransparency -- ^ 'def' = 'Opaque'
+--   , veUrl           :: Maybe URL
+--   , veRecurId       :: Maybe RecurrenceId
+--   , veRRule         :: Set RRule
+--   , veDTEndDuration :: Maybe (Either DTEnd DurationProp)
+--   , veAttach        :: Set Attachment
+--   , veAttendee      :: Set Attendee
+--   , veCategories    :: Set Categories
+--   , veComment       :: Set Comment
+--   , veContact       :: Set Contact
+--   , veExDate        :: Set ExDate
+--   , veRStatus       :: Set RequestStatus
+--   , veRelated       :: Set RelatedTo
+--   , veResources     :: Set Resources
+--   , veRDate         :: Set RDate
+--   , veAlarms        :: Set VAlarm
+--   , veOther         :: Set OtherProperty
+--   }
diff --git a/users/Profpatsch/ical-smolify/README.md b/users/Profpatsch/ical-smolify/README.md
new file mode 100644
index 0000000000..86c166d3c1
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/README.md
@@ -0,0 +1,5 @@
+# ical-smolify
+
+Ensmallen an `ical` by stripping out redundant information like timezone definitions.
+
+The idea here was that after running through this preprocessor, it fits into a QR code (~2000bits) that can be scanned with your phone (for automatically adding to mobile calendar).
diff --git a/users/Profpatsch/ical-smolify/default.nix b/users/Profpatsch/ical-smolify/default.nix
new file mode 100644
index 0000000000..bf766db0e9
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/default.nix
@@ -0,0 +1,23 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  ical-smolify = pkgs.writers.writeHaskell "ical-smolify"
+    {
+      libraries = [
+        pkgs.haskellPackages.iCalendar
+        depot.users.Profpatsch.my-prelude
+        depot.users.Profpatsch.execline.exec-helpers-hs
+
+      ];
+      ghcArgs = [ "-threaded" ];
+    } ./IcalSmolify.hs;
+
+in
+
+ical-smolify.overrideAttrs (old: {
+  meta = lib.recursiveUpdate old.meta or { } {
+    # Dependency iCalendar no longer builds in nixpkgs due to a lack of maintenance upstream
+    # https://github.com/nixos/nixpkgs/commit/13d10cc6e302e7d5800c6a08c1728b14c3801e26
+    ci.skip = true;
+  };
+})
diff --git a/users/Profpatsch/ical-smolify/ical-smolify.cabal b/users/Profpatsch/ical-smolify/ical-smolify.cabal
new file mode 100644
index 0000000000..d7a46c581d
--- /dev/null
+++ b/users/Profpatsch/ical-smolify/ical-smolify.cabal
@@ -0,0 +1,18 @@
+cabal-version:      3.0
+name:               ical-smolify
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+executable ical-smolify
+    main-is: IcalSmolify.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude,
+        exec-helpers
+        data-default
+        case-insensitive
+        iCalendar
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/ini/default.nix b/users/Profpatsch/ini/default.nix
new file mode 100644
index 0000000000..e1a7a1a7b6
--- /dev/null
+++ b/users/Profpatsch/ini/default.nix
@@ -0,0 +1,6 @@
+{ depot, ... }:
+{
+  externs = {
+    renderIni = depot.users.Profpatsch.toINI { };
+  };
+}
diff --git a/users/Profpatsch/ini/ini.dhall b/users/Profpatsch/ini/ini.dhall
new file mode 100644
index 0000000000..f2efbc0af4
--- /dev/null
+++ b/users/Profpatsch/ini/ini.dhall
@@ -0,0 +1,36 @@
+let lib = ../dhall/lib.dhall
+
+let NameVal = λ(T : Type) → { name : Text, value : T }
+
+let ValueList = λ(T : Type) → List (NameVal T)
+
+let Section = ValueList Text
+
+let Sections = ValueList Section
+
+let Ini = { globalSection : Section, sections : Sections }
+
+let
+    -- Takes to INI files and merges their global sections and their section lists,
+    -- without duplicating by section name.
+    appendInis =
+      λ(inis : List Ini) →
+          { globalSection =
+              lib.List/concat
+                (NameVal Text)
+                (lib.List/map Ini Section (λ(i : Ini) → i.globalSection) inis)
+          , sections =
+              lib.List/concat
+                (NameVal Section)
+                (lib.List/map Ini Sections (λ(i : Ini) → i.sections) inis)
+          }
+        : Ini
+
+let
+    -- Signatures of functions that are input via FFI.
+    Externs =
+      { -- given a dsl of functions to create an Ini, render the ini file
+        renderIni : Ini → Text
+      }
+
+in  { NameVal, ValueList, Section, Sections, Ini, appendInis, Externs }
diff --git a/users/Profpatsch/jaeger.nix b/users/Profpatsch/jaeger.nix
new file mode 100644
index 0000000000..374e40df1a
--- /dev/null
+++ b/users/Profpatsch/jaeger.nix
@@ -0,0 +1,46 @@
+{ depot, pkgs, ... }:
+let
+  drv =
+    pkgs.stdenv.mkDerivation {
+      pname = "jaeger";
+      version = "1.49.0";
+      src = pkgs.fetchurl {
+        url = "https://github.com/jaegertracing/jaeger/releases/download/v1.49.0/jaeger-1.49.0-linux-amd64.tar.gz";
+        hash = "sha256-QhxISDlk/t431EesgVkHWTe7yiw2B+yyfq//GLP0As4=";
+      };
+      phases = [ "unpackPhase" "installPhase" "fixupPhase" ];
+      installPhase = ''
+        mkdir -p $out/bin
+        install ./jaeger-all-in-one $out/bin
+      '';
+    };
+  image =
+    pkgs.dockerTools.buildImage {
+      name = "jaeger";
+      tag = "1.49.0";
+      copyToRoot = drv;
+      config = {
+        Cmd = [ "/bin/jaeger-all-in-one" ];
+      };
+
+    };
+
+  runner =
+    depot.nix.writeExecline "jaeger-docker-run" { } [
+      "if"
+      [ "docker" "load" "-i" image ]
+      "docker"
+      "run"
+      "--rm"
+      "--name"
+      "jaeger"
+      # Web UI
+      "-p"
+      "16686:16686"
+      # Opentelemetry
+      "-p"
+      "4318:4318"
+      "jaeger:1.49.0"
+    ];
+in
+runner
diff --git a/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs b/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs
new file mode 100644
index 0000000000..8dae9cd026
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/JbovlasteSqlite.hs
@@ -0,0 +1,389 @@
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Main where
+
+import Conduit ((.|))
+import Conduit qualified as Cond
+import Control.Category qualified as Cat
+import Control.Foldl qualified as Fold
+import Data.ByteString.Internal qualified as Bytes
+import Data.Error.Tree
+import Data.Int (Int64)
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Maybe (catMaybes)
+import Data.Text qualified as Text
+import Data.Text.IO qualified as Text
+import Database.SQLite.Simple qualified as Sqlite
+import Database.SQLite.Simple.FromField qualified as Sqlite
+import Database.SQLite.Simple.QQ qualified as Sqlite
+import FieldParser qualified as Field
+import Label
+import Parse
+import PossehlAnalyticsPrelude
+import Text.XML (def)
+import Text.XML qualified as Xml
+import Prelude hiding (init, maybe)
+
+main :: IO ()
+main = do
+  f <- file
+  f.documentRoot
+    & filterDown
+    & toTree
+    & prettyErrorTree
+    & Text.putStrLn
+
+test :: IO ()
+test = do
+  withEnv $ \env -> do
+    migrate env
+    f <- file
+    parseJbovlasteXml f
+      & \case
+        Left errs -> Text.putStrLn $ prettyErrorTree errs
+        Right valsi -> insertValsi env valsi
+
+filterDown :: Xml.Element -> Xml.Element
+filterDown el =
+  el
+    & filterElementsRec noUsers
+    & downTo (T2 (label @"maxdepth" 5) (label @"maxlistitems" 30))
+
+data Valsi = Valsi
+  { word :: Text,
+    definition :: Text,
+    definitionId :: Natural,
+    typ :: Text,
+    selmaho :: Maybe Text,
+    notes :: Maybe Text,
+    glosswords :: [T2 "word" Text "sense" (Maybe Text)],
+    keywords :: [T3 "word" Text "place" Natural "sense" (Maybe Text)]
+  }
+  deriving stock (Show)
+
+insertValsi :: Env -> [Valsi] -> IO ()
+insertValsi env vs = do
+  Sqlite.withTransaction env.envData $
+    do
+      valsiIds <-
+        Cond.yieldMany vs
+          .| Cond.mapMC
+            ( \v ->
+                Sqlite.queryNamed
+                  @(Sqlite.Only Int64)
+                  env.envData
+                  [Sqlite.sql|
+                     INSERT INTO valsi
+                       (word , definition , type , selmaho , notes )
+                       VALUES
+                       (:word, :definition, :type, :selmaho, :notes)
+                       RETURNING (id)
+                   |]
+                  [ ":word" Sqlite.:= v.word,
+                    ":definition" Sqlite.:= v.definition,
+                    ":type" Sqlite.:= v.typ,
+                    ":selmaho" Sqlite.:= v.selmaho,
+                    ":notes" Sqlite.:= v.notes
+                  ]
+                  >>= \case
+                    [one] -> pure one
+                    _ -> error "more or less than one result"
+            )
+          .| Cond.sinkList
+          & Cond.runConduit
+      for_ (zip valsiIds vs) $ \(Sqlite.Only vId, v) -> do
+        for_ v.glosswords $ \g -> do
+          Sqlite.executeNamed
+            env.envData
+            [Sqlite.sql|
+                      INSERT INTO glosswords
+                        (valsi_id , word , sense )
+                        VALUES
+                        (:valsi_id, :word, :sense)
+                    |]
+            [ ":valsi_id" Sqlite.:= vId,
+              ":word" Sqlite.:= g.word,
+              ":sense" Sqlite.:= g.sense
+            ]
+      for_ (zip valsiIds vs) $ \(Sqlite.Only vId, v) -> do
+        for_ v.keywords $ \g -> do
+          Sqlite.executeNamed
+            env.envData
+            [Sqlite.sql|
+                      INSERT INTO keywords
+                        (valsi_id , word , place , sense )
+                        VALUES
+                        (:valsi_id, :word, :place, :sense)
+                    |]
+            [ ":valsi_id" Sqlite.:= vId,
+              ":word" Sqlite.:= g.word,
+              ":place" Sqlite.:= (g.place & fromIntegral @Natural @Int),
+              ":sense" Sqlite.:= g.sense
+            ]
+
+migrate :: (HasField "envData" p Sqlite.Connection) => p -> IO ()
+migrate env = do
+  let x q = Sqlite.execute env.envData q ()
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS valsi (
+        id integer PRIMARY KEY,
+        word text NOT NULL,
+        definition text NOT NULL,
+        type text NOT NULL,
+        selmaho text NULL,
+        notes text NULL
+      )
+     |]
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS glosswords (
+        id integer PRIMARY KEY,
+        valsi_id integer NOT NULL,
+        word text NOT NULL,
+        sense text NULL,
+        FOREIGN KEY(valsi_id) REFERENCES valsi(id)
+      )
+    |]
+  x
+    [Sqlite.sql|
+      CREATE TABLE IF NOT EXISTS keywords (
+        id integer PRIMARY KEY,
+        valsi_id integer NOT NULL,
+        word text NOT NULL,
+        place integer NOT NULL,
+        sense text NULL,
+        FOREIGN KEY(valsi_id) REFERENCES valsi(id)
+      )
+    |]
+
+data Env = Env
+  { envData :: Sqlite.Connection
+  }
+
+withEnv :: (Env -> IO a) -> IO a
+withEnv inner = do
+  withSqlite "./jbovlaste.sqlite" $ \envData -> inner Env {..}
+
+withSqlite :: String -> (Sqlite.Connection -> IO a) -> IO a
+withSqlite fileName inner = Sqlite.withConnection fileName $ \conn -> do
+  -- Sqlite.setTrace conn (Just (\msg -> Text.hPutStrLn IO.stderr [fmt|{fileName}: {msg}|]))
+  Sqlite.execute conn [Sqlite.sql|PRAGMA foreign_keys = ON|] ()
+  inner conn
+
+parseJbovlasteXml :: (HasField "documentRoot" r Xml.Element) => r -> Either ErrorTree [Valsi]
+parseJbovlasteXml xml =
+  xml.documentRoot
+    & runParse
+      "cannot parse jbovlaste.xml"
+      parser
+  where
+    parser =
+      (element "dictionary" <&> (.elementNodes) <&> mapMaybe nodeElementMay)
+        >>> ( find
+                ( element "direction"
+                    >>> do
+                      (attribute "from" >>> exactly showToText "lojban")
+                      *> (attribute "to" >>> exactly showToText "English")
+                      *> Cat.id
+                )
+                <&> (\x -> x.elementNodes <&> nodeElementMay)
+            )
+        >>> (multiple (maybe valsi) <&> catMaybes)
+    valsi =
+      element "valsi"
+        >>> do
+          let subNodes =
+                ( Cat.id
+                    <&> (.elementNodes)
+                    <&> mapMaybe nodeElementMay
+                )
+
+          let subElementContent elName =
+                subNodes
+                  >>> ( (find (element elName))
+                          <&> (.elementNodes)
+                      )
+                  >>> exactlyOne
+                  >>> content
+          let optionalSubElementContent elName =
+                subNodes
+                  >>> ((findAll (element elName) >>> zeroOrOne))
+                  >>> (maybe (lmap (.elementNodes) exactlyOne >>> content))
+
+          word <- attribute "word"
+          typ <- attribute "type"
+          selmaho <- optionalSubElementContent "selmaho"
+          definition <- subElementContent "definition"
+          definitionId <- subElementContent "definitionid" >>> fieldParser Field.decimalNatural
+          notes <- optionalSubElementContent "notes"
+          glosswords <-
+            (subNodes >>> findAll (element "glossword"))
+              >>> ( multiple $ do
+                      word' <- label @"word" <$> (attribute "word")
+                      sense <- label @"sense" <$> (attributeMay "sense")
+                      pure $ T2 word' sense
+                  )
+          keywords <-
+            (subNodes >>> findAll (element "keyword"))
+              >>> ( multiple $ do
+                      word' <- label @"word" <$> (attribute "word")
+                      place <- label @"place" <$> (attribute "place" >>> fieldParser Field.decimalNatural)
+                      sense <- label @"sense" <$> (attributeMay "sense")
+                      pure $ T3 word' place sense
+                  )
+
+          pure $ Valsi {..}
+
+file :: IO Xml.Document
+file = Xml.readFile def "./jbovlaste-en.xml"
+
+-- | Filter XML elements recursively based on the given predicate
+filterElementsRec :: (Xml.Element -> Bool) -> Xml.Element -> Xml.Element
+filterElementsRec f el =
+  el
+    { Xml.elementNodes =
+        mapMaybe
+          ( \case
+              Xml.NodeElement el' ->
+                if f el'
+                  then Just $ Xml.NodeElement $ filterElementsRec f el'
+                  else Nothing
+              other -> Just other
+          )
+          el.elementNodes
+    }
+
+-- | no <user> allowed
+noUsers :: Xml.Element -> Bool
+noUsers el = el.elementName.nameLocalName /= "user"
+
+downTo :: (T2 "maxdepth" Int "maxlistitems" Int) -> Xml.Element -> Xml.Element
+downTo n el =
+  if n.maxdepth > 0
+    then
+      el
+        { Xml.elementNodes =
+            ( do
+                let eleven = take (n.maxlistitems + 1) $ map down el.elementNodes
+                if List.length eleven == (n.maxlistitems + 1)
+                  then eleven <> [Xml.NodeComment "snip!"]
+                  else eleven
+            )
+        }
+    else el {Xml.elementNodes = [Xml.NodeComment "snip!"]}
+  where
+    down =
+      \case
+        Xml.NodeElement el' ->
+          Xml.NodeElement $
+            downTo
+              ( T2
+                  (label @"maxdepth" $ n.maxdepth - 1)
+                  (label @"maxlistitems" n.maxlistitems)
+              )
+              el'
+        more -> more
+
+toTree :: Xml.Element -> ErrorTree
+toTree el = do
+  case el.elementNodes & filter (not . isEmptyContent) & nonEmpty of
+    Nothing -> singleError (newError (prettyXmlElement el))
+    Just (n :| []) | not $ isElementNode n -> singleError $ errorContext (prettyXmlElement el) (nodeErrorNoElement n)
+    Just nodes -> nestedMultiError (newError (prettyXmlElement el)) (nodes <&> node)
+  where
+    isEmptyContent = \case
+      Xml.NodeContent c -> c & Text.all Bytes.isSpaceChar8
+      _ -> False
+    isElementNode = \case
+      Xml.NodeElement _ -> True
+      _ -> False
+
+    node :: Xml.Node -> ErrorTree
+    node = \case
+      Xml.NodeElement el' -> toTree el'
+      other -> singleError $ nodeErrorNoElement other
+
+    nodeErrorNoElement :: Xml.Node -> Error
+    nodeErrorNoElement = \case
+      Xml.NodeInstruction i -> [fmt|Instruction: {i & show}|]
+      Xml.NodeContent c -> [fmt|"{c & Text.replace "\"" "\\\""}"|]
+      Xml.NodeComment c -> [fmt|<!-- {c} -->|]
+      Xml.NodeElement _ -> error "NodeElement not allowed here"
+
+prettyXmlName :: Xml.Name -> Text
+prettyXmlName n = [fmt|{n.namePrefix & fromMaybe ""}{n.nameLocalName}|]
+
+prettyXmlElement :: Xml.Element -> Text
+prettyXmlElement el =
+  if not $ null el.elementAttributes
+    then [fmt|<{prettyXmlName el.elementName}: {attrs el.elementAttributes}>|]
+    else [fmt|<{prettyXmlName el.elementName}>|]
+  where
+    attrs :: Map Xml.Name Text -> Text
+    attrs a = a & Map.toList <&> (\(k, v) -> [fmt|{prettyXmlName k}={v}|]) & Text.intercalate ", " & \s -> [fmt|({s})|]
+
+nodeElementMay :: Xml.Node -> Maybe Xml.Element
+nodeElementMay = \case
+  Xml.NodeElement el -> Just el
+  _ -> Nothing
+
+element :: Text -> Parse Xml.Element Xml.Element
+element name = Parse $ \(ctx, el) ->
+  if el.elementName.nameLocalName == name
+    then Success (ctx & addContext (prettyXmlName el.elementName), el)
+    else Failure $ singleton [fmt|Expected element named <{name}> but got {el & prettyXmlElement} at {showContext ctx}|]
+
+content :: Parse Xml.Node Text
+content = Parse $ \(ctx, node) -> case node of
+  Xml.NodeContent t -> Success (ctx, t)
+  -- TODO: give an example of the node content?
+  n -> Failure $ singleton [fmt|Expected a content node, but got a {n & nodeType} node, at {showContext ctx}|]
+    where
+      nodeType = \case
+        Xml.NodeContent _ -> "content" :: Text
+        Xml.NodeComment _ -> "comment"
+        Xml.NodeInstruction _ -> "instruction"
+        Xml.NodeElement _ -> "element"
+
+attribute :: Text -> Parse Xml.Element Text
+attribute name = Parse $ \(ctx, el) ->
+  case el.elementAttributes & Map.mapKeys (.nameLocalName) & Map.lookup name of
+    Just a -> Success (ctx & addContext [fmt|{{attr:{name}}}|], a)
+    Nothing -> Failure $ singleton [fmt|Attribute "{name}" missing at {showContext ctx}|]
+
+attributeMay :: Text -> Parse Xml.Element (Maybe Text)
+attributeMay name = Parse $ \(ctx, el) ->
+  case el.elementAttributes & Map.mapKeys (.nameLocalName) & Map.lookup name of
+    Just a -> Success (ctx & addContext [fmt|{{attr:{name}}}|], Just a)
+    Nothing -> Success (ctx, Nothing)
+
+instance
+  ( Sqlite.FromField t1,
+    Sqlite.FromField t2,
+    Sqlite.FromField t3
+  ) =>
+  Sqlite.FromRow (T3 l1 t1 l2 t2 l3 t3)
+  where
+  fromRow = do
+    T3
+      <$> (label @l1 <$> Sqlite.field)
+      <*> (label @l2 <$> Sqlite.field)
+      <*> (label @l3 <$> Sqlite.field)
+
+foldRows ::
+  forall row params b.
+  (Sqlite.FromRow row, Sqlite.ToRow params) =>
+  Sqlite.Connection ->
+  Sqlite.Query ->
+  params ->
+  Fold.Fold row b ->
+  IO b
+foldRows conn qry params = Fold.purely f
+  where
+    f :: forall x. (x -> row -> x) -> x -> (x -> b) -> IO b
+    f acc init extract = do
+      x <- Sqlite.fold conn qry params init (\a r -> pure $ acc a r)
+      pure $ extract x
diff --git a/users/Profpatsch/jbovlaste-sqlite/default.nix b/users/Profpatsch/jbovlaste-sqlite/default.nix
new file mode 100644
index 0000000000..ea59fdec39
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/default.nix
@@ -0,0 +1,33 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  jbovlaste-sqlite = pkgs.haskellPackages.mkDerivation {
+    pname = "jbovlaste-sqlite";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./jbovlaste-sqlite.cabal
+      ./JbovlasteSqlite.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      depot.users.Profpatsch.my-prelude
+      pkgs.haskellPackages.foldl
+      pkgs.haskellPackages.sqlite-simple
+      pkgs.haskellPackages.xml-conduit
+
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+in
+jbovlaste-sqlite
diff --git a/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal b/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal
new file mode 100644
index 0000000000..f677615a16
--- /dev/null
+++ b/users/Profpatsch/jbovlaste-sqlite/jbovlaste-sqlite.cabal
@@ -0,0 +1,76 @@
+cabal-version:      3.0
+name:               jbovlaste-sqlite
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable jbovlaste-sqlite
+    import: common-options
+
+    main-is:          JbovlasteSqlite.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        pa-field-parser,
+        my-prelude,
+        containers,
+        selective,
+        semigroupoids,
+        validation-selective,
+        sqlite-simple,
+        foldl,
+        conduit,
+        bytestring,
+        text,
+        sqlite-simple,
+        xml-conduit,
diff --git a/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs b/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs
new file mode 100644
index 0000000000..a1a4586401
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/LorriWaitForEval.hs
@@ -0,0 +1,173 @@
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE NumericUnderscores #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import Conduit
+import Conduit qualified as Cond
+import Control.Concurrent
+import Control.Concurrent.Async qualified as Async
+import Control.Monad
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Bifunctor
+import Data.Conduit.Binary qualified as Conduit.Binary
+import Data.Conduit.Combinators qualified as Cond
+import Data.Conduit.Process
+import Data.Error
+import Data.Function
+import Data.Functor
+import Data.Text.IO (hPutStrLn)
+import MyPrelude
+import System.Directory qualified as Dir
+import System.Environment qualified as Env
+import System.Exit qualified as Exit
+import System.FilePath (takeDirectory)
+import System.FilePath.Posix qualified as FilePath
+import System.IO (stderr)
+import System.Posix qualified as Posix
+import Prelude hiding (log)
+
+data LorriEvent = LorriEvent
+  { nixFile :: Text,
+    eventType :: LorriEventType
+  }
+  deriving stock (Show)
+
+data LorriEventType
+  = Completed
+  | Started
+  | EvalFailure
+  deriving stock (Show)
+
+main :: IO ()
+main = do
+  argv <- Env.getArgs <&> nonEmpty
+
+  dir <- Dir.getCurrentDirectory
+  shellNix <-
+    findShellNix dir >>= \case
+      Nothing -> Exit.die [fmt|could not find any shell.nix in or above the directory {dir}|]
+      Just s -> pure s
+  getEventChan :: MVar (Chan LorriEvent) <- newEmptyMVar
+  Async.race_
+    ( do
+        sendEventChan :: Chan LorriEvent <- newChan
+        (exitCode, ()) <-
+          sourceProcessWithConsumer
+            (proc "lorri" ["internal", "stream-events"])
+            $
+            -- first, we want to send a message over the chan that the process is running (for timeout)
+            liftIO (putMVar getEventChan sendEventChan)
+              *> Conduit.Binary.lines
+              .| Cond.mapC
+                ( \jsonBytes ->
+                    (jsonBytes :: ByteString)
+                      & Json.parseStrict
+                        ( Json.key
+                            "Completed"
+                            ( do
+                                nixFile <- Json.key "nix_file" Json.asText
+                                pure LorriEvent {nixFile, eventType = Completed}
+                            )
+                            Json.<|> Json.key
+                              "Started"
+                              ( do
+                                  nixFile <- Json.key "nix_file" Json.asText
+                                  pure LorriEvent {nixFile, eventType = Started}
+                              )
+                            Json.<|> Json.key
+                              "Failure"
+                              ( do
+                                  nixFile <- Json.key "nix_file" Json.asText
+                                  pure LorriEvent {nixFile, eventType = EvalFailure}
+                              )
+                        )
+                      & first Json.displayError'
+                      & first (map newError)
+                      & first (smushErrors [fmt|Cannot parse line returned by lorri: {jsonBytes & bytesToTextUtf8Lenient}|])
+                      & unwrapError
+                )
+              .| (Cond.mapM_ (\ev -> writeChan sendEventChan ev))
+
+        log [fmt|lorri internal stream-events exited {show exitCode}|]
+    )
+    ( do
+        let waitMs ms = threadDelay (ms * 1000)
+
+        -- log [fmt|Waiting for lorri event for {shellNix}|]
+
+        eventChan <- takeMVar getEventChan
+
+        let isOurEvent ev = FilePath.normalise (ev & nixFile & textToString) == FilePath.normalise shellNix
+
+        let handleEvent ev =
+              case ev & eventType of
+                Started ->
+                  log [fmt|waiting for lorri build to finish|]
+                Completed -> do
+                  log [fmt|build completed|]
+                  exec (inDirenvDir (takeDirectory shellNix) <$> argv)
+                EvalFailure -> do
+                  log [fmt|evaluation failed! for path {ev & nixFile}|]
+                  Exit.exitWith (Exit.ExitFailure 111)
+
+        -- wait for 100ms for the first message from lorri,
+        -- or else assume lorri is not building the project yet
+        Async.race
+          (waitMs 100)
+          ( do
+              -- find the first event that we can use
+              let go = do
+                    ev <- readChan eventChan
+                    if isOurEvent ev then pure ev else go
+              go
+          )
+          >>= \case
+            Left () -> do
+              log [fmt|No event received from lorri, assuming this is the first evaluation|]
+              exec argv
+            Right ch -> handleEvent ch
+
+        runConduit $
+          repeatMC (readChan eventChan)
+            .| filterC isOurEvent
+            .| mapM_C handleEvent
+    )
+  where
+    inDirenvDir dir' argv' = ("direnv" :| ["exec", dir']) <> argv'
+    exec = \case
+      Just (exe :| args') -> Posix.executeFile exe True args' Nothing
+      Nothing -> Exit.exitSuccess
+
+log :: Text -> IO ()
+log msg = hPutStrLn stderr [fmt|lorri-wait-for-eval: {msg}|]
+
+-- | Searches from the current directory upwards, until it finds the `shell.nix`.
+findShellNix :: FilePath -> IO (Maybe FilePath)
+findShellNix curDir = do
+  let go :: (FilePath -> IO (Maybe FilePath))
+      go dir = do
+        let file = dir FilePath.</> "shell.nix"
+        Dir.doesFileExist file >>= \case
+          True -> pure (Just file)
+          False -> do
+            let parent = FilePath.takeDirectory dir
+            if parent == dir
+              then pure Nothing
+              else go parent
+  go (FilePath.normalise curDir)
+
+smushErrors :: Foldable t => Text -> t Error -> Error
+smushErrors msg errs =
+  errs
+    -- hrm, pretty printing and then creating a new error is kinda shady
+    & foldMap (\err -> "\n- " <> prettyError err)
+    & newError
+    & errorContext msg
diff --git a/users/Profpatsch/lorri-wait-for-eval/README.md b/users/Profpatsch/lorri-wait-for-eval/README.md
new file mode 100644
index 0000000000..9c5d8ef9e3
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/README.md
@@ -0,0 +1,7 @@
+# lorri-wait-for-eval
+
+A helper script for [lorri](https://github.com/nix-community/lorri), which wraps a command and executes it once lorri is finished evaluating the current `shell.nix`, and uses the new environment.
+
+This is useful when you need the new shell environment to be in scope of the command, but don’t want to waste time waiting for it to finish.
+
+This should really be a feature of lorri, but I couldn’t be assed to touch rust :P
diff --git a/users/Profpatsch/lorri-wait-for-eval/default.nix b/users/Profpatsch/lorri-wait-for-eval/default.nix
new file mode 100644
index 0000000000..90c6365fed
--- /dev/null
+++ b/users/Profpatsch/lorri-wait-for-eval/default.nix
@@ -0,0 +1,20 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  lorri-wait-for-eval = pkgs.writers.writeHaskell "lorri-wait-for-eval"
+    {
+      libraries = [
+        depot.users.Profpatsch.my-prelude
+        pkgs.haskellPackages.async
+        pkgs.haskellPackages.aeson-better-errors
+        pkgs.haskellPackages.conduit-extra
+        pkgs.haskellPackages.error
+        pkgs.haskellPackages.PyF
+        pkgs.haskellPackages.unliftio
+      ];
+      ghcArgs = [ "-threaded" ];
+
+    } ./LorriWaitForEval.hs;
+
+in
+lorri-wait-for-eval
diff --git a/users/Profpatsch/mailbox-org/MailboxOrg.hs b/users/Profpatsch/mailbox-org/MailboxOrg.hs
new file mode 100644
index 0000000000..6c5820080c
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/MailboxOrg.hs
@@ -0,0 +1,523 @@
+{-# LANGUAGE ApplicativeDo #-}
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE DerivingVia #-}
+{-# LANGUAGE GHC2021 #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedRecordDot #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE PackageImports #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoFieldSelectors #-}
+{-# OPTIONS_GHC -Wall #-}
+
+module Main where
+
+import Aeson (parseErrorTree)
+import AesonQQ (aesonQQ)
+import ArglibNetencode
+import Control.Exception (try)
+import Control.Monad (replicateM)
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.ByteString qualified as ByteString
+import Data.ByteString.Lazy qualified as Lazy
+import Data.Char qualified as Char
+import "pa-error-tree" Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Text qualified as Text
+import ExecHelpers
+import Label
+import Netencode qualified
+import Netencode.Parse qualified as NetParse
+import Network.HTTP.Conduit qualified as Client
+import Network.HTTP.Simple qualified as Client
+import PossehlAnalyticsPrelude
+import Pretty
+import System.Directory qualified as File
+import System.Environment qualified as Env
+import System.Exit (ExitCode (ExitFailure, ExitSuccess))
+import System.Exit qualified as Exit
+import System.FilePath ((</>))
+import System.Process.Typed qualified as Process
+import System.Random qualified as Random
+import System.Random.Stateful qualified as Random
+import Prelude hiding (log)
+
+secret :: Tools -> IO (T2 "email" ByteString "password" ByteString)
+secret tools = do
+  T2
+    (label @"email" "mail@profpatsch.de")
+    <$> (label @"password" <$> fromPass "email/mailbox.org")
+  where
+    fromPass name =
+      tools.pass & runToolExpect0 [name]
+
+progName :: CurrentProgramName
+progName = "mailbox-org"
+
+log :: Error -> IO ()
+log err = do
+  putStderrLn (errorContext progName.unCurrentProgramName err & prettyError)
+
+data Tools = Tools
+  { pass :: Tool
+  }
+  deriving stock (Show)
+
+newtype Tool = Tool {unTool :: FilePath}
+  deriving stock (Show)
+
+parseTools :: Applicative m => (Text -> m (Either Error Tool)) -> m (Either ErrorTree Tools)
+parseTools getTool = do
+  let parser =
+        ( do
+            pass <- get "pass"
+            pure Tools {..}
+        )
+  parser & finalize
+  where
+    get name = name & getTool <&> eitherToListValidation & Compose
+    finalize p =
+      p.getCompose
+        <&> first (errorTree "Error reading tools")
+        <&> validationToEither
+
+main :: IO ()
+main =
+  arglibNetencode progName Nothing
+    >>= parseToolsArglib
+    >>= secret
+    >>= run applyFilters
+
+run ::
+  ( HasField "email" dat ByteString,
+    HasField "password" dat ByteString
+  ) =>
+  (Session -> IO ()) ->
+  dat ->
+  IO ()
+run act loginData = do
+  session <- login loginData
+  act session
+
+listFilterConfig :: Session -> IO ()
+listFilterConfig session = do
+  mailfilter
+    session
+    "config"
+    mempty
+    (Json.key "data" Json.asObject)
+    ()
+    >>= printPretty
+
+applyFilterRule ::
+  (HasField "folderId" dat Text) =>
+  dat ->
+  Session ->
+  IO ()
+applyFilterRule dat session = do
+  mailfilter
+    session
+    "apply"
+    ( T2
+        (label @"extraQueryParams" [("folderId", Just (dat.folderId & textToBytesUtf8))])
+        mempty
+    )
+    (Json.key "data" Json.asArray >> pure ())
+    (Json.Object mempty)
+
+data FilterRule = FilterRule
+  { actioncmds :: NonEmpty Json.Object,
+    test :: NonEmpty Json.Object
+  }
+
+data MailfilterList = MailfilterList
+  { id_ :: Json.Value,
+    rulename :: Text
+  }
+  deriving stock (Show, Eq)
+
+simpleRule ::
+  ( HasField "rulename" r Text,
+    HasField "id" r Natural,
+    HasField "emailContains" r Text,
+    HasField "subjectStartsWith" r Text
+  ) =>
+  r ->
+  Json.Value
+simpleRule dat = do
+  [aesonQQ|{
+    "id": |dat.id & enc @Natural|,
+    "position": 3,
+    "rulename": |dat.rulename & enc @Text|,
+    "active": true,
+    "flags": [],
+    "test": {
+      "id": "allof",
+      "tests": [
+        {
+          "id": "from",
+          "comparison": "contains",
+          "values": [
+            |dat.emailContains & enc @Text|
+          ]
+        },
+        {
+          "id": "subject",
+          "comparison": "startswith",
+          "values": [
+            |dat.subjectStartsWith & enc @Text|
+          ]
+        }
+      ]
+    },
+    "actioncmds": [
+      {
+        "id": "move",
+        "into": "default0/Archive"
+      },
+      {
+        "id": "stop"
+      }
+    ]
+  }|]
+  where
+    enc :: forall a. Json.ToJSON a => a -> Lazy.ByteString
+    enc val = val & Json.toJSON & Json.encode
+
+applyFilters :: Session -> IO ()
+applyFilters session = do
+  filters <-
+    mailfilter
+      session
+      "list"
+      mempty
+      ( Json.key "data" $ do
+          ( Json.eachInArray $ asDat @"mailfilter" $ do
+              id_ <- Json.key "id" Json.asValue
+              rulename <- Json.key "rulename" Json.asText
+              pure MailfilterList {..}
+            )
+            <&> mapFromListOn (\dat -> getLabel @"rulename" dat.parsed)
+      )
+      ([] :: [()])
+  let goal = Map.fromList [(label @"rulename" "another", 32 :: Integer), (label @"rulename" "xyz", 23)]
+  let actions = declarativeUpdate goal filters
+  log [fmt|To * create: {actions.toCreate & Map.keys & show}, * update: {actions.toUpdate & Map.keys & show}, * delete: {actions.toDelete & Map.keys & show}|]
+
+-- where
+-- filters
+--   & Map.elems
+--   & traverse_
+--     ( updateIfDifferent
+--         session
+--         ( \el ->
+--             pure $
+--               el.original.mailfilter
+--                 & KeyMap.insert "active" (Json.Bool False)
+--         )
+--         (pure ())
+--     )
+
+-- updateIfDifferent ::
+--   forall label parsed.
+--   ( HasField "id_" parsed Json.Value,
+--     HasField "rulename" parsed Text
+--   ) =>
+--   Session ->
+--   (Dat label Json.Object parsed -> IO Json.Object) ->
+--   Json.Parse Error () ->
+--   Dat label Json.Object parsed ->
+--   IO ()
+-- updateIfDifferent session switcheroo parser dat = do
+--   new <- switcheroo dat
+--   if new /= getField @label dat.original
+--     then do
+--       log [fmt|Updating filter "{dat.parsed.rulename}" (id {dat.parsed.id_ & show @Json.Value})|]
+--       mailfilter
+--         session
+--         "update"
+--         mempty
+--         parser
+--         new
+--     else do
+--       log [fmt|Skipping updating filter "{dat.parsed.rulename}" (id {dat.parsed.id_ & show @Json.Value}) because nothing changed.|]
+
+-- | https://oxpedia.org/wiki/index.php?title=HTTP_API_MailFilter
+mailfilter ::
+  ( Json.ToJSON a,
+    Show b
+  ) =>
+  Session ->
+  ByteString ->
+  T2
+    "extraQueryParams"
+    Client.Query
+    "httpMethod"
+    (Maybe ByteString) ->
+  Json.Parse Error b ->
+  a ->
+  IO b
+mailfilter session action opts parser body = do
+  req <-
+    Client.parseRequest "https://office.mailbox.org/appsuite/api/mailfilter/v2"
+      <&> Client.setQueryString
+        ( [ ("action", Just action),
+            ("colums", Just "1")
+          ]
+            <> opts.extraQueryParams
+        )
+      <&> Client.setRequestMethod (opts.httpMethod & fromMaybe "PUT")
+      <&> Client.setRequestBodyJSON body
+      <&> addSession session
+  req
+    & httpJSON [fmt|Cannot parse result for {req & prettyRequestShort}|] parser
+    >>= okOrDie
+    -- >>= (\resp -> printPretty resp >> pure resp)
+    <&> Client.responseBody
+  where
+    prettyRequestShort :: Client.Request -> Text
+    prettyRequestShort req = [fmt|request {req & Client.method}: {req & Client.host}{req & Client.path}{req & Client.queryString}|]
+
+-- | Given a goal and the actual state, return which elements to delete, update and create.
+declarativeUpdate ::
+  Ord k =>
+  -- | goal map
+  Map k a ->
+  -- | actual map
+  Map k b ->
+  T3
+    "toCreate"
+    (Map k a)
+    "toDelete"
+    (Map k b)
+    "toUpdate"
+    (Map k a)
+declarativeUpdate goal actual =
+  T3
+    (label @"toCreate" $ goal `Map.difference` actual)
+    (label @"toDelete" $ actual `Map.difference` goal)
+    (label @"toUpdate" $ goal `Map.intersection` actual)
+
+newtype Session = Session Client.CookieJar
+
+httpJSON ::
+  Error ->
+  Json.Parse Error b ->
+  Client.Request ->
+  IO (Client.Response b)
+httpJSON errMsg parser req = do
+  req
+    & Client.httpJSON @_ @Json.Value
+    >>= traverse
+      ( \val -> do
+          case val of
+            Json.Object obj
+              | "error" `KeyMap.member` obj
+                  && "error_desc" `KeyMap.member` obj -> do
+                  printPretty obj
+                  diePanic' "Server returned above inline error"
+            _ -> pure ()
+          val & Json.parseValue parser & \case
+            Left errs ->
+              errs
+                & parseErrorTree errMsg
+                & diePanic'
+            Right a -> pure a
+      )
+
+data Dat label orig parsed = Dat
+  { original :: Label label orig,
+    parsed :: parsed
+  }
+  deriving stock (Show, Eq)
+
+asDat ::
+  forall label err m a.
+  Monad m =>
+  Json.ParseT err m a ->
+  Json.ParseT err m (Dat label Json.Object a)
+asDat parser = do
+  original <- label @label <$> Json.asObject
+  parsed <- parser
+  pure Dat {..}
+
+addSession :: Session -> Client.Request -> Client.Request
+addSession (Session jar) req = do
+  let sessionId =
+        jar
+          & Client.destroyCookieJar
+          & List.find (\c -> "open-xchange-session-" `ByteString.isPrefixOf` c.cookie_name)
+          & annotate "The cookie jar did not contain an open-exchange-session-*"
+          & unwrapError
+          & (.cookie_value)
+
+  let req' = req & Client.addToRequestQueryString [("session", Just sessionId)]
+  req' {Client.cookieJar = Just jar}
+
+-- | Log into the mailbox.org service, and return the session secret cookies.
+login :: (HasField "email" dat ByteString, HasField "password" dat ByteString) => dat -> IO Session
+login dat = do
+  rnd <- randomString
+  req <-
+    Client.parseRequest "https://office.mailbox.org/ajax/login"
+      <&> Client.setQueryString
+        [ ("action", Just "formlogin"),
+          ("authId", Just $ ("mbo-" <> rnd) & stringToText & textToBytesUtf8)
+        ]
+      <&> Client.urlEncodedBody
+        [ ("version", "Form+Login"),
+          ("autologin", "true"),
+          ("client", "open-xchange-appsuite"),
+          ("uiWebPath", "/appsuite/"),
+          ("login", dat.email),
+          ("password", dat.password)
+        ]
+  Client.httpNoBody req
+    >>= okOrDie
+    <&> Client.responseCookieJar
+    <&> Session
+  where
+    -- For some reason they want the client to pass a random string
+    -- which is used for the session?‽!?
+    randomString = do
+      gen <- Random.newIOGenM =<< Random.newStdGen
+      let chars = ['a' .. 'z'] <> ['A' .. 'Z'] <> ['0' .. '9']
+      let len = 11
+      Random.uniformRM (0, List.length chars - 1) gen
+        & replicateM len
+        <&> map (\index -> chars !! index)
+
+okOrDie :: Show a => Client.Response a -> IO (Client.Response a)
+okOrDie resp =
+  case resp & Client.getResponseStatusCode of
+    200 -> pure resp
+    _ -> do
+      printPretty resp
+      diePanic' "non-200 result"
+
+diePanic' :: ErrorTree -> IO a
+diePanic' errs = errs & prettyErrorTree & diePanic progName
+
+-- | Parse the tools from the given arglib input, and check that the executables exist
+parseToolsArglib :: Netencode.T -> IO Tools
+parseToolsArglib t = do
+  let oneTool name =
+        NetParse.asText
+          <&> textToString
+          <&> ( \path ->
+                  path
+                    & File.getPermissions
+                    <&> File.executable
+                    <&> ( \case
+                            False -> Left $ [fmt|Tool "{name}" is not an executable|]
+                            True -> Right (Tool path)
+                        )
+              )
+  let allTools =
+        parseTools (\name -> Compose $ NetParse.key name >>> oneTool name)
+          & getCompose
+  t
+    & NetParse.runParse
+      "test"
+      -- TODO: a proper ParseT for netencode values
+      ( NetParse.asRecord
+          >>> NetParse.key "BINS"
+          >>> NetParse.asRecord
+          >>> allTools
+      )
+    & orDo diePanic'
+    & join @IO
+    >>= orDo (\errs -> errs & diePanic')
+
+-- | Just assume the tools exist by name in the environment.
+parseToolsToolname :: IO Tools
+parseToolsToolname =
+  parseTools
+    ( \name ->
+        checkInPath name <&> \case
+          False -> Left [fmt|"Cannot find "{name}" in PATH|]
+          True -> Right $ Tool (name & textToString)
+    )
+    >>= orDo diePanic'
+
+checkInPath :: Text -> IO Bool
+checkInPath name = do
+  Env.lookupEnv "PATH"
+    <&> annotate "No PATH set"
+    >>= orDo diePanic'
+    <&> stringToText
+    <&> Text.split (== ':')
+    <&> filter (/= "")
+    >>= traverse
+      ( \p ->
+          File.getPermissions ((textToString p) </> (textToString name))
+            <&> File.executable
+            & try @IOError
+            >>= \case
+              Left _ioError -> pure False
+              Right isExe -> pure isExe
+      )
+    <&> or
+
+orDo :: Applicative f => (t -> f a) -> Either t a -> f a
+orDo f = \case
+  Left e -> f e
+  Right a -> pure a
+
+runTool :: [Text] -> Tool -> IO (Exit.ExitCode, ByteString)
+runTool args tool = do
+  let bashArgs = prettyArgsForBash ((tool.unTool & stringToText) : args)
+  log [fmt|Running: $ {bashArgs}|]
+  Process.proc
+    tool.unTool
+    (args <&> textToString)
+    & Process.readProcessStdout
+    <&> second toStrictBytes
+    <&> second stripWhitespaceFromEnd
+
+-- | Like `runCommandExpect0`, run the given tool, given a tool accessor.
+runToolExpect0 :: [Text] -> Tool -> IO ByteString
+runToolExpect0 args tool =
+  tool & runTool args >>= \(ex, stdout) -> do
+    checkStatus0 tool.unTool ex
+    pure stdout
+
+-- | Check whether a command exited 0 or crash.
+checkStatus0 :: FilePath -> ExitCode -> IO ()
+checkStatus0 executable = \case
+  ExitSuccess -> pure ()
+  ExitFailure status -> do
+    diePanic' [fmt|Command `{executable}` did not exit with status 0 (success), but status {status}|]
+
+stripWhitespaceFromEnd :: ByteString -> ByteString
+stripWhitespaceFromEnd = ByteString.reverse . ByteString.dropWhile (\w -> w == charToWordUnsafe '\n') . ByteString.reverse
+
+-- | Pretty print a command line in a way that can be copied to bash.
+prettyArgsForBash :: [Text] -> Text
+prettyArgsForBash = Text.intercalate " " . map simpleBashEscape
+
+-- | Simple escaping for bash words. If they contain anything that’s not ascii chars
+-- and a bunch of often-used special characters, put the word in single quotes.
+simpleBashEscape :: Text -> Text
+simpleBashEscape t = do
+  case Text.find (not . isSimple) t of
+    Just _ -> escapeSingleQuote t
+    Nothing -> t
+  where
+    -- any word that is just ascii characters is simple (no spaces or control characters)
+    -- or contains a few often-used characters like - or .
+    isSimple c =
+      Char.isAsciiLower c
+        || Char.isAsciiUpper c
+        || Char.isDigit c
+        -- These are benign, bash will not interpret them as special characters.
+        || List.elem c ['-', '.', ':', '/']
+    -- Put the word in single quotes
+    -- If there is a single quote in the word,
+    -- close the single quoted word, add a single quote, open the word again
+    escapeSingleQuote t' = "'" <> Text.replace "'" "'\\''" t' <> "'"
diff --git a/users/Profpatsch/mailbox-org/README.md b/users/Profpatsch/mailbox-org/README.md
new file mode 100644
index 0000000000..b84e7b59c1
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/README.md
@@ -0,0 +1,7 @@
+# mailbox-org
+
+Interfacing with the API of [https://mailbox.org/]().
+
+They use [open-xchange](https://www.open-xchange.com/resources/oxpedia) as their App Suite, so we have to work with/reverse engineer their weird API.
+
+Intended so I have a way of uploading Sieve rules into their system semi-automatically.
diff --git a/users/Profpatsch/mailbox-org/default.nix b/users/Profpatsch/mailbox-org/default.nix
new file mode 100644
index 0000000000..73bd28292d
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/default.nix
@@ -0,0 +1,38 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  mailbox-org = pkgs.haskellPackages.mkDerivation {
+    pname = "mailbox-org";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./mailbox-org.cabal
+      ./src/AesonQQ.hs
+      ./MailboxOrg.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.execline.exec-helpers-hs
+      depot.users.Profpatsch.arglib.netencode.haskell
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.aeson
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.aeson-better-errors
+    ];
+
+    isLibrary = false;
+    isExecutable = true;
+    license = lib.licenses.mit;
+  };
+
+
+in
+lib.pipe mailbox-org [
+  (x: (depot.nix.getBins x [ "mailbox-org" ]).mailbox-org)
+  (depot.users.Profpatsch.arglib.netencode.with-args "mailbox-org" {
+    BINS = depot.nix.getBins pkgs.dovecot_pigeonhole [ "sieve-test" ];
+  })
+]
diff --git a/users/Profpatsch/mailbox-org/mailbox-org.cabal b/users/Profpatsch/mailbox-org/mailbox-org.cabal
new file mode 100644
index 0000000000..a1b041447b
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/mailbox-org.cabal
@@ -0,0 +1,95 @@
+cabal-version:      3.0
+name:               mailbox-org
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+        AesonQQ
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        aeson,
+        PyF,
+        template-haskell
+
+
+
+executable mailbox-org
+    import: common-options
+    main-is: MailboxOrg.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        mailbox-org,
+        my-prelude,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        exec-helpers,
+        netencode,
+        text,
+        directory,
+        filepath,
+        arglib-netencode,
+        random,
+        http-conduit,
+        aeson,
+        aeson-better-errors,
+        bytestring,
+        typed-process,
+        containers,
diff --git a/users/Profpatsch/mailbox-org/src/AesonQQ.hs b/users/Profpatsch/mailbox-org/src/AesonQQ.hs
new file mode 100644
index 0000000000..2ac3d533ae
--- /dev/null
+++ b/users/Profpatsch/mailbox-org/src/AesonQQ.hs
@@ -0,0 +1,24 @@
+{-# LANGUAGE TemplateHaskellQuotes #-}
+
+module AesonQQ where
+
+import Data.Aeson qualified as Json
+import Language.Haskell.TH.Quote (QuasiQuoter)
+import PossehlAnalyticsPrelude
+import PyF qualified
+import PyF.Internal.QQ qualified as PyFConf
+
+aesonQQ :: QuasiQuoter
+aesonQQ =
+  PyF.mkFormatter
+    "aesonQQ"
+    PyF.defaultConfig
+      { PyFConf.delimiters = Just ('|', '|'),
+        PyFConf.postProcess = \exp_ -> do
+          -- TODO: this does not throw an error at compilation time if the json does not parse
+          [|
+            case Json.eitherDecodeStrict' @Json.Value $ textToBytesUtf8 $ stringToText $(exp_) of
+              Left err -> error err
+              Right a -> a
+            |]
+      }
diff --git a/users/Profpatsch/my-prelude/README.md b/users/Profpatsch/my-prelude/README.md
new file mode 100644
index 0000000000..2cc068579a
--- /dev/null
+++ b/users/Profpatsch/my-prelude/README.md
@@ -0,0 +1,42 @@
+# My Haskell Prelude
+
+Contains various modules I’ve found useful when writing Haskell.
+
+## Contents
+
+A short overview:
+
+### `MyPrelude.hs`
+
+A collection of re-exports and extra functions. This does *not* replace the `Prelude` module from `base`, but rather should be imported *in addition* to `Prelude`.
+
+Stuff like bad functions from prelude (partial stuff, or plain horrible stuff) are handled by a custom `.hlint` file, which you can find in [../.hlint.yaml]().
+
+The common style of haskell they try to enable is what I call “left-to-right Haskell”,
+where one mostly prefers forward-chaining operators like `&`/`<&>`/`>>=` to backwards operators like `$`/`<$>`/`<=<`. In addition, all transformation function should follow the scheme of `aToB` instead of `B.fromA`, e.g. `Text.unpack`/`Text.pack` -> `textToString`/`stringToText`. Includes a bunch of text conversion functions one needs all the time, in the same style.
+
+These have been battle-tested in a production codebase of ~30k lines of Haskell.
+
+### `Label.hs`
+
+A very useful collection of anonymous labbeled tuples and enums of size 2 and 3. Assumes GHC >9.2 for `RecordDotSyntax` support.
+
+### `Pretty.hs`
+
+Colorful multiline pretty-printing of Haskell values.
+
+### `Test.hs`
+
+A wrapper around `hspec` which produces colorful test diffs.
+
+### `Aeson.hs`
+
+Helpers around Json parsing.
+
+### `Data.Error.Tree`
+
+Collect errors (from [`Data.Error`](https://hackage.haskell.org/package/error-1.0.0.0/docs/Data-Error.html)) into a tree, then display them in a nested fashion. Super useful for e.g. collecting and displaying nested parsing errors.
+
+### `RunCommand.hs`
+
+A module wrapping the process API with some helpful defaults for executing commands and printing what is executed to stderr.
diff --git a/users/Profpatsch/my-prelude/default.nix b/users/Profpatsch/my-prelude/default.nix
new file mode 100644
index 0000000000..4bca8ea49f
--- /dev/null
+++ b/users/Profpatsch/my-prelude/default.nix
@@ -0,0 +1,52 @@
+{ depot, pkgs, lib, ... }:
+
+pkgs.haskellPackages.mkDerivation {
+  pname = "my-prelude";
+  version = "0.0.1-unreleased";
+
+  src = depot.users.Profpatsch.exactSource ./. [
+    ./my-prelude.cabal
+    ./src/Aeson.hs
+    ./src/Arg.hs
+    ./src/AtLeast.hs
+    ./src/MyPrelude.hs
+    ./src/Test.hs
+    ./src/Parse.hs
+    ./src/Pretty.hs
+    ./src/Seconds.hs
+    ./src/Tool.hs
+    ./src/ValidationParseT.hs
+    ./src/Postgres/Decoder.hs
+    ./src/Postgres/MonadPostgres.hs
+  ];
+
+  isLibrary = true;
+
+  libraryHaskellDepends = [
+    pkgs.haskellPackages.pa-prelude
+    pkgs.haskellPackages.pa-label
+    pkgs.haskellPackages.pa-error-tree
+    pkgs.haskellPackages.pa-json
+    pkgs.haskellPackages.pa-pretty
+    pkgs.haskellPackages.pa-field-parser
+    pkgs.haskellPackages.aeson-better-errors
+    pkgs.haskellPackages.foldl
+    pkgs.haskellPackages.resource-pool
+    pkgs.haskellPackages.error
+    pkgs.haskellPackages.hs-opentelemetry-api
+    pkgs.haskellPackages.hspec
+    pkgs.haskellPackages.hspec-expectations-pretty-diff
+    pkgs.haskellPackages.monad-logger
+    pkgs.haskellPackages.postgresql-simple
+    pkgs.haskellPackages.profunctors
+    pkgs.haskellPackages.PyF
+    pkgs.haskellPackages.semigroupoids
+    pkgs.haskellPackages.these
+    pkgs.haskellPackages.unliftio
+    pkgs.haskellPackages.validation-selective
+    pkgs.haskellPackages.vector
+  ];
+
+  license = lib.licenses.mit;
+
+}
diff --git a/users/Profpatsch/my-prelude/my-prelude.cabal b/users/Profpatsch/my-prelude/my-prelude.cabal
new file mode 100644
index 0000000000..2f7882a526
--- /dev/null
+++ b/users/Profpatsch/my-prelude/my-prelude.cabal
@@ -0,0 +1,121 @@
+cabal-version:      3.0
+name:               my-prelude
+version:            0.0.1.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+    -- allows defining pattern synonyms, but also the `import Foo (pattern FooPattern)` import syntax
+    PatternSynonyms
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    hs-source-dirs: src
+    exposed-modules:
+      MyPrelude
+      Aeson
+      Arg
+      AtLeast
+      Test
+      Postgres.Decoder
+      Postgres.MonadPostgres
+      ValidationParseT
+      Parse
+      Pretty
+      Seconds
+      Tool
+
+    -- Modules included in this executable, other than Main.
+    -- other-modules:
+
+    -- LANGUAGE extensions used by modules in this package.
+    -- other-extensions:
+    build-depends:
+       base >=4.15 && <5
+     , pa-prelude
+     , pa-label
+     , pa-error-tree
+     , pa-json
+     , pa-pretty
+     , pa-field-parser
+     , aeson
+     , aeson-better-errors
+     , bytestring
+     , containers
+     , foldl
+     , unordered-containers
+     , resource-pool
+     , resourcet
+     , scientific
+     , time
+     , error
+     , exceptions
+     , filepath
+     , hspec
+     , hspec-expectations-pretty-diff
+     , hs-opentelemetry-api
+     , monad-logger
+     , mtl
+     , postgresql-simple
+     , profunctors
+     , PyF
+     , semigroupoids
+     , selective
+     , template-haskell
+     , text
+     , these
+     , unix
+     , unliftio
+     , validation-selective
+     , vector
+     , ghc-boot
+     -- for Pretty
+     , aeson-pretty
+     , hscolour
+     , ansi-terminal
+     , nicify-lib
diff --git a/users/Profpatsch/my-prelude/src/Aeson.hs b/users/Profpatsch/my-prelude/src/Aeson.hs
new file mode 100644
index 0000000000..73d6116082
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Aeson.hs
@@ -0,0 +1,176 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GHC2021 #-}
+{-# LANGUAGE KindSignatures #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE UndecidableInstances #-}
+
+module Aeson where
+
+import Data.Aeson (Value (..))
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.Maybe (catMaybes)
+import Data.Vector qualified as Vector
+import Label
+import PossehlAnalyticsPrelude
+import Test.Hspec (describe, it, shouldBe)
+import Test.Hspec qualified as Hspec
+
+-- | Convert a 'Json.ParseError' to a corresponding 'ErrorTree'
+parseErrorTree :: Error -> Json.ParseError Error -> ErrorTree
+parseErrorTree contextMsg errs =
+  errs
+    & Json.displayError prettyError
+    <&> newError
+    & nonEmpty
+    & \case
+      Nothing -> singleError contextMsg
+      Just errs' -> errorTree contextMsg errs'
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabel @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel = do
+  keyLabel' (Proxy @label)
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+-- Version of 'keyLabel' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabel' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel' Proxy key parser = label @label <$> Json.key key parser
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay = do
+  keyLabelMay' (Proxy @label)
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+-- Version of 'keyLabelMay' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay' Proxy key parser = label @label <$> Json.keyMay key parser
+
+-- | Like 'Json.key', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamed :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m a
+keyRenamed (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.key newKey inner
+    Just parse -> parse
+
+-- | Like 'Json.keyMay', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamedMay :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m (Maybe a)
+keyRenamedMay (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.keyMay newKey inner
+    Just parse -> Just <$> parse
+
+-- | Helper function for 'keyRenamed' and 'keyRenamedMay' that returns the parser for the first old key that exists, if any.
+keyRenamedTryOldKeys :: Monad m => [Text] -> Json.ParseT err m a -> Json.ParseT err m (Maybe (Json.ParseT err m a))
+keyRenamedTryOldKeys oldKeys inner = do
+  oldKeys & traverse tryOld <&> catMaybes <&> nonEmpty <&> \case
+    Nothing -> Nothing
+    Just (old :| _moreOld) -> Just old
+  where
+    tryOld key =
+      Json.keyMay key (pure ()) <&> \case
+        Just () -> Just $ Json.key key inner
+        Nothing -> Nothing
+
+test_keyRenamed :: Hspec.Spec
+test_keyRenamed = do
+  describe "keyRenamed" $ do
+    let parser = keyRenamed ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right "text")
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right "text")
+    it "fails with the old key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "old" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "old"] (Json.WrongType Json.TyString Null)))
+    it "fails with the new key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "new" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "new"] (Json.WrongType Json.TyString Null)))
+    it "fails if the key is missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Left (Json.BadSchema [] (Json.KeyMissing "new")))
+  describe "keyRenamedMay" $ do
+    let parser = keyRenamedMay ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right (Just "text"))
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right (Just "text"))
+    it "allows the old and new key to be missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Right Nothing)
+
+-- | Create a json array from a list of json values.
+jsonArray :: [Value] -> Value
+jsonArray xs = xs & Vector.fromList & Array
diff --git a/users/Profpatsch/my-prelude/src/Arg.hs b/users/Profpatsch/my-prelude/src/Arg.hs
new file mode 100644
index 0000000000..a6ffa90924
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Arg.hs
@@ -0,0 +1,34 @@
+module Arg where
+
+import Data.String (IsString)
+import GHC.Exts (IsList)
+import GHC.TypeLits (Symbol)
+
+-- | Wrap a function argument into this helper to give it a better description for the caller without disturbing the callsite too much.
+--
+-- This has instances for IsString and Num, meaning if the caller is usually a string or number literal, it should Just Work.
+--
+-- e.g.
+--
+-- @
+-- myFoo :: Arg "used as the name in error message" Text -> IO ()
+-- myFoo (Arg name) = …
+-- @
+--
+-- Will display the description in the inferred type of the callsite.
+--
+-- Due to IsString you can call @myFoo@ like
+--
+-- @myFoo "name in error"@
+--
+-- This is mostly intended for literals, if you want to wrap arbitrary data, use @Label@.
+newtype Arg (description :: Symbol) a = Arg {unArg :: a}
+  deriving newtype
+    ( Show,
+      Eq,
+      IsString,
+      IsList,
+      Num,
+      Monoid,
+      Semigroup
+    )
diff --git a/users/Profpatsch/my-prelude/src/AtLeast.hs b/users/Profpatsch/my-prelude/src/AtLeast.hs
new file mode 100644
index 0000000000..3857c3a7cf
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/AtLeast.hs
@@ -0,0 +1,51 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module AtLeast where
+
+import Data.Aeson (FromJSON (parseJSON))
+import Data.Aeson.BetterErrors qualified as Json
+import FieldParser (FieldParser)
+import FieldParser qualified as Field
+import GHC.Records (HasField (..))
+import GHC.TypeLits (KnownNat, natVal)
+import PossehlAnalyticsPrelude
+  ( Natural,
+    Proxy (Proxy),
+    fmt,
+    prettyError,
+    (&),
+  )
+
+-- | A natural number that must be at least as big as the type literal.
+newtype AtLeast (min :: Natural) num = AtLeast num
+  -- Just use the instances of the wrapped number type
+  deriving newtype (Eq, Show)
+
+-- | This is the “destructor” for `AtLeast`, because of the phantom type (@min@) it cannot be inferred automatically.
+instance HasField "unAtLeast" (AtLeast min num) num where
+  getField (AtLeast num) = num
+
+parseAtLeast ::
+  forall min num.
+  (KnownNat min, Integral num, Show num) =>
+  FieldParser num (AtLeast min num)
+parseAtLeast =
+  let minInt = natVal (Proxy @min)
+   in Field.FieldParser $ \from ->
+        if from >= (minInt & fromIntegral)
+          then Right (AtLeast from)
+          else Left [fmt|Must be at least {minInt & show} but was {from & show}|]
+
+instance
+  (KnownNat min, FromJSON num, Integral num, Bounded num, Show num) =>
+  FromJSON (AtLeast min num)
+  where
+  parseJSON =
+    Json.toAesonParser
+      prettyError
+      ( do
+          num <- Json.fromAesonParser @_ @num
+          case Field.runFieldParser (parseAtLeast @min @num) num of
+            Left err -> Json.throwCustomError err
+            Right a -> pure a
+      )
diff --git a/users/Profpatsch/my-prelude/src/MyPrelude.hs b/users/Profpatsch/my-prelude/src/MyPrelude.hs
new file mode 100644
index 0000000000..880983c47e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/MyPrelude.hs
@@ -0,0 +1,776 @@
+{-# LANGUAGE ImplicitParams #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MagicHash #-}
+{-# LANGUAGE ViewPatterns #-}
+
+module MyPrelude
+  ( -- * Text conversions
+    Text,
+    ByteString,
+    Word8,
+    fmt,
+    textToString,
+    stringToText,
+    stringToBytesUtf8,
+    showToText,
+    textToBytesUtf8,
+    textToBytesUtf8Lazy,
+    bytesToTextUtf8,
+    bytesToTextUtf8Lazy,
+    bytesToTextUtf8Lenient,
+    bytesToTextUtf8LenientLazy,
+    bytesToTextUtf8Unsafe,
+    bytesToTextUtf8UnsafeLazy,
+    toStrict,
+    toLazy,
+    toStrictBytes,
+    toLazyBytes,
+    charToWordUnsafe,
+
+    -- * IO
+    putStrLn,
+    putStderrLn,
+    exitWithMessage,
+
+    -- * WIP code
+    todo,
+
+    -- * Records
+    HasField,
+
+    -- * Control flow
+    doAs,
+    (&),
+    (<&>),
+    (<|>),
+    foldMap1,
+    foldMap',
+    join,
+    when,
+    unless,
+    guard,
+    ExceptT (..),
+    runExceptT,
+    MonadThrow,
+    throwM,
+    MonadIO,
+    liftIO,
+    MonadReader,
+    asks,
+    Bifunctor,
+    first,
+    second,
+    bimap,
+    both,
+    foldMap,
+    fold,
+    foldl',
+    fromMaybe,
+    mapMaybe,
+    findMaybe,
+    Traversable,
+    for,
+    for_,
+    traverse,
+    traverse_,
+    traverseFold,
+    traverseFold1,
+    traverseFoldDefault,
+    MonadTrans,
+    lift,
+
+    -- * Data types
+    Coercible,
+    coerce,
+    Proxy (Proxy),
+    Map,
+    annotate,
+    Validation (Success, Failure),
+    failure,
+    successes,
+    failures,
+    traverseValidate,
+    traverseValidateM,
+    traverseValidateM_,
+    eitherToValidation,
+    eitherToListValidation,
+    validationToEither,
+    These (This, That, These),
+    eitherToThese,
+    eitherToListThese,
+    validationToThese,
+    thenThese,
+    thenValidate,
+    thenValidateM,
+    NonEmpty ((:|)),
+    pattern IsEmpty,
+    pattern IsNonEmpty,
+    singleton,
+    nonEmpty,
+    nonEmptyDef,
+    overNonEmpty,
+    zipNonEmpty,
+    zipWithNonEmpty,
+    zip3NonEmpty,
+    zipWith3NonEmpty,
+    zip4NonEmpty,
+    toList,
+    lengthNatural,
+    maximum1,
+    minimum1,
+    maximumBy1,
+    minimumBy1,
+    Vector,
+    Generic,
+    Lift,
+    Semigroup,
+    sconcat,
+    Monoid,
+    mconcat,
+    ifTrue,
+    ifExists,
+    Void,
+    absurd,
+    Identity (Identity, runIdentity),
+    Natural,
+    intToNatural,
+    Scientific,
+    Contravariant,
+    contramap,
+    (>$<),
+    (>&<),
+    Profunctor,
+    dimap,
+    lmap,
+    rmap,
+    Semigroupoid,
+    Category,
+    (>>>),
+    (&>>),
+    Any,
+
+    -- * Enum definition
+    inverseFunction,
+    inverseMap,
+    enumerateAll,
+
+    -- * Map helpers
+    mapFromListOn,
+    mapFromListOnMerge,
+
+    -- * Error handling
+    HasCallStack,
+    module Data.Error,
+  )
+where
+
+import Control.Applicative ((<|>))
+import Control.Category (Category, (>>>))
+import Control.Foldl.NonEmpty qualified as Foldl1
+import Control.Monad (guard, join, unless, when)
+import Control.Monad.Catch (MonadThrow (throwM))
+import Control.Monad.Except
+  ( ExceptT (..),
+    runExceptT,
+  )
+import Control.Monad.IO.Class (MonadIO, liftIO)
+import Control.Monad.Identity (Identity (Identity))
+import Control.Monad.Reader (MonadReader, asks)
+import Control.Monad.Trans (MonadTrans (lift))
+import Data.Bifunctor (Bifunctor, bimap, first, second)
+import Data.ByteString
+  ( ByteString,
+  )
+import Data.ByteString.Lazy qualified
+import Data.Char qualified
+import Data.Coerce (Coercible, coerce)
+import Data.Data (Proxy (Proxy))
+import Data.Error
+import Data.Foldable (Foldable (foldMap', toList), fold, foldl', for_, sequenceA_, traverse_)
+import Data.Foldable qualified as Foldable
+import Data.Function ((&))
+import Data.Functor ((<&>))
+import Data.Functor.Contravariant (Contravariant (contramap), (>$<))
+import Data.Functor.Identity (Identity (runIdentity))
+import Data.List (zip4)
+import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty)
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict
+  ( Map,
+  )
+import Data.Map.Strict qualified as Map
+import Data.Maybe (fromMaybe, mapMaybe)
+import Data.Maybe qualified as Maybe
+import Data.Profunctor (Profunctor, dimap, lmap, rmap)
+import Data.Scientific (Scientific)
+import Data.Semigroup (sconcat)
+import Data.Semigroup.Foldable (Foldable1 (fold1), foldMap1)
+import Data.Semigroup.Traversable (Traversable1)
+import Data.Semigroupoid (Semigroupoid (o))
+import Data.Text
+  ( Text,
+  )
+import Data.Text qualified
+import Data.Text.Encoding qualified
+import Data.Text.Encoding.Error qualified
+import Data.Text.Lazy qualified
+import Data.Text.Lazy.Encoding qualified
+import Data.These (These (That, These, This))
+import Data.Traversable (for)
+import Data.Vector (Vector)
+import Data.Void (Void, absurd)
+import Data.Word (Word8)
+import GHC.Exception (errorCallWithCallStackException)
+import GHC.Exts (Any, RuntimeRep, TYPE, raise#)
+import GHC.Generics (Generic)
+import GHC.Natural (Natural)
+import GHC.Records (HasField)
+import GHC.Stack (HasCallStack)
+import GHC.Utils.Encoding qualified as GHC
+import Language.Haskell.TH.Syntax (Lift)
+import PyF (fmt)
+import System.Exit qualified
+import System.IO qualified
+import Validation
+  ( Validation (Failure, Success),
+    eitherToValidation,
+    failure,
+    failures,
+    successes,
+    validationToEither,
+  )
+
+-- | Mark a `do`-block with the type of the Monad/Applicativ it uses.
+-- Only intended for reading ease and making code easier to understand,
+-- especially do-blocks that use unconventional monads (like Maybe or List).
+--
+-- Example:
+--
+-- @
+-- doAs @Maybe $ do
+--  a <- Just 'a'
+--  b <- Just 'b'
+--  pure (a, b)
+-- @
+doAs :: forall m a. m a -> m a
+doAs = id
+
+-- | Forward-applying 'contramap', like '&'/'$' and '<&>'/'<$>' but for '>$<'.
+(>&<) :: (Contravariant f) => f b -> (a -> b) -> f a
+(>&<) = flip contramap
+
+infixl 5 >&<
+
+-- | Forward semigroupoid application. The same as '(>>>)', but 'Semigroupoid' is not a superclass of 'Category' (yet).
+--
+-- Specialized examples:
+--
+-- @
+-- for functions : (a -> b) -> (b -> c) -> (a -> c)
+-- for Folds: Fold a b -> Fold b c -> Fold a c
+-- @
+(&>>) :: (Semigroupoid s) => s a b -> s b c -> s a c
+(&>>) = flip Data.Semigroupoid.o
+
+-- like >>>
+infixr 1 &>>
+
+-- | encode a Text to a UTF-8 encoded Bytestring
+textToBytesUtf8 :: Text -> ByteString
+textToBytesUtf8 = Data.Text.Encoding.encodeUtf8
+
+-- | encode a lazy Text to a UTF-8 encoded lazy Bytestring
+textToBytesUtf8Lazy :: Data.Text.Lazy.Text -> Data.ByteString.Lazy.ByteString
+textToBytesUtf8Lazy = Data.Text.Lazy.Encoding.encodeUtf8
+
+bytesToTextUtf8 :: ByteString -> Either Error Text
+bytesToTextUtf8 = first exceptionToError . Data.Text.Encoding.decodeUtf8'
+
+bytesToTextUtf8Lazy :: Data.ByteString.Lazy.ByteString -> Either Error Data.Text.Lazy.Text
+bytesToTextUtf8Lazy = first exceptionToError . Data.Text.Lazy.Encoding.decodeUtf8'
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8Unsafe :: ByteString -> Text
+bytesToTextUtf8Unsafe = Data.Text.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8UnsafeLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8UnsafeLazy = Data.Text.Lazy.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8Lenient :: Data.ByteString.ByteString -> Data.Text.Text
+bytesToTextUtf8Lenient =
+  Data.Text.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | decode a lazy Text from a lazy ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8LenientLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8LenientLazy =
+  Data.Text.Lazy.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | Make a lazy 'Text' strict.
+toStrict :: Data.Text.Lazy.Text -> Text
+toStrict = Data.Text.Lazy.toStrict
+
+-- | Make a strict 'Text' lazy.
+toLazy :: Text -> Data.Text.Lazy.Text
+toLazy = Data.Text.Lazy.fromStrict
+
+-- | Make a lazy 'ByteString' strict.
+toStrictBytes :: Data.ByteString.Lazy.ByteString -> ByteString
+toStrictBytes = Data.ByteString.Lazy.toStrict
+
+-- | Make a strict 'ByteString' lazy.
+toLazyBytes :: ByteString -> Data.ByteString.Lazy.ByteString
+toLazyBytes = Data.ByteString.Lazy.fromStrict
+
+-- | Convert a (performant) 'Text' into an (imperformant) list-of-char 'String'.
+--
+-- Some libraries (like @time@ or @network-uri@) still use the `String` as their interface. We only want to convert to string at the edges, otherwise use 'Text'.
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+textToString :: Text -> String
+textToString = Data.Text.unpack
+
+-- | Convert an (imperformant) list-of-char 'String' into a (performant) 'Text' .
+--
+-- Some libraries (like @time@ or @network-uri@) still use the `String` as their interface. We want to convert 'String' to 'Text' as soon as possible and only use 'Text' in our code.
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+stringToText :: String -> Text
+stringToText = Data.Text.pack
+
+-- | Encode a String to an UTF-8 encoded Bytestring
+--
+-- ATTN: Don’t use `String` in code if you can avoid it, prefer `Text` instead.
+stringToBytesUtf8 :: String -> ByteString
+-- TODO(Profpatsch): use a stable interface
+stringToBytesUtf8 = GHC.utf8EncodeByteString
+
+-- | Like `show`, but generate a 'Text'
+--
+-- ATTN: This goes via `String` and thus is fairly inefficient.
+-- We should add a good display library at one point.
+--
+-- ATTN: unlike `show`, this forces the whole @'a
+-- so only use if you want to display the whole thing.
+showToText :: (Show a) => a -> Text
+showToText = stringToText . show
+
+-- | Unsafe conversion between 'Char' and 'Word8'. This is a no-op and
+-- silently truncates to 8 bits Chars > '\255'. It is provided as
+-- convenience for ByteString construction.
+--
+-- Use if you want to get the 'Word8' representation of a character literal.
+-- Don’t use on arbitrary characters!
+--
+-- >>> charToWordUnsafe ','
+-- 44
+charToWordUnsafe :: Char -> Word8
+{-# INLINE charToWordUnsafe #-}
+charToWordUnsafe = fromIntegral . Data.Char.ord
+
+pattern IsEmpty :: [a]
+pattern IsEmpty <- (null -> True)
+  where
+    IsEmpty = []
+
+pattern IsNonEmpty :: NonEmpty a -> [a]
+pattern IsNonEmpty n <- (nonEmpty -> Just n)
+  where
+    IsNonEmpty n = toList n
+
+{-# COMPLETE IsEmpty, IsNonEmpty #-}
+
+-- | Single element in a (non-empty) list.
+singleton :: a -> NonEmpty a
+singleton a = a :| []
+
+-- | If the given list is empty, use the given default element and return a non-empty list.
+nonEmptyDef :: a -> [a] -> NonEmpty a
+nonEmptyDef def xs =
+  xs & nonEmpty & \case
+    Nothing -> def :| []
+    Just ne -> ne
+
+-- | If the list is not empty, run the given function with a NonEmpty list, otherwise just return []
+overNonEmpty :: (Applicative f) => (NonEmpty a -> f [b]) -> [a] -> f [b]
+overNonEmpty f xs = case xs of
+  IsEmpty -> pure []
+  IsNonEmpty xs' -> f xs'
+
+-- | Zip two non-empty lists.
+zipNonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty (a, b)
+{-# INLINE zipNonEmpty #-}
+zipNonEmpty ~(a :| as) ~(b :| bs) = (a, b) :| zip as bs
+
+-- | Zip two non-empty lists, combining them with the given function
+zipWithNonEmpty :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
+{-# INLINE zipWithNonEmpty #-}
+zipWithNonEmpty = NonEmpty.zipWith
+
+-- | Zip three non-empty lists.
+zip3NonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty (a, b, c)
+{-# INLINE zip3NonEmpty #-}
+zip3NonEmpty ~(a :| as) ~(b :| bs) ~(c :| cs) = (a, b, c) :| zip3 as bs cs
+
+-- | Zip three non-empty lists, combining them with the given function
+zipWith3NonEmpty :: (a -> b -> c -> d) -> NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty d
+{-# INLINE zipWith3NonEmpty #-}
+zipWith3NonEmpty f ~(x :| xs) ~(y :| ys) ~(z :| zs) = f x y z :| zipWith3 f xs ys zs
+
+-- | Zip four non-empty lists
+zip4NonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty c -> NonEmpty d -> NonEmpty (a, b, c, d)
+{-# INLINE zip4NonEmpty #-}
+zip4NonEmpty ~(a :| as) ~(b :| bs) ~(c :| cs) ~(d :| ds) = (a, b, c, d) :| zip4 as bs cs ds
+
+-- | We don’t want to use Foldable’s `length`, because it is too polymorphic and can lead to bugs.
+-- Only list-y things should have a length.
+class (Foldable f) => Lengthy f
+
+instance Lengthy []
+
+instance Lengthy NonEmpty
+
+instance Lengthy Vector
+
+lengthNatural :: (Lengthy f) => f a -> Natural
+lengthNatural xs =
+  xs
+    & Foldable.length
+    -- length can never be negative or something went really, really wrong
+    & fromIntegral @Int @Natural
+
+-- | @O(n)@. Get the maximum element from a non-empty structure (strict).
+maximum1 :: (Foldable1 f, Ord a) => f a -> a
+maximum1 = Foldl1.fold1 Foldl1.maximum
+
+-- | @O(n)@. Get the maximum element from a non-empty structure, using the given comparator (strict).
+maximumBy1 :: (Foldable1 f) => (a -> a -> Ordering) -> f a -> a
+maximumBy1 f = Foldl1.fold1 (Foldl1.maximumBy f)
+
+-- | @O(n)@. Get the minimum element from a non-empty structure (strict).
+minimum1 :: (Foldable1 f, Ord a) => f a -> a
+minimum1 = Foldl1.fold1 Foldl1.minimum
+
+-- | @O(n)@. Get the minimum element from a non-empty structure, using the given comparator (strict).
+minimumBy1 :: (Foldable1 f) => (a -> a -> Ordering) -> f a -> a
+minimumBy1 f = Foldl1.fold1 (Foldl1.minimumBy f)
+
+-- | Annotate a 'Maybe' with an error message and turn it into an 'Either'.
+annotate :: err -> Maybe a -> Either err a
+annotate err = \case
+  Nothing -> Left err
+  Just a -> Right a
+
+-- | Map the same function over both sides of a Bifunctor (e.g. a tuple).
+both :: (Bifunctor bi) => (a -> b) -> bi a a -> bi b b
+both f = bimap f f
+
+-- | Find the first element for which pred returns `Just a`, and return the `a`.
+--
+-- Example:
+-- @
+-- >>> :set -XTypeApplications
+-- >>> import qualified Text.Read
+--
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo"]
+-- Nothing
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo", "34.40", "34", "abc"]
+-- Just 34
+findMaybe :: (Foldable t) => (a -> Maybe b) -> t a -> Maybe b
+findMaybe mPred list =
+  let pred' x = Maybe.isJust $ mPred x
+   in case Foldable.find pred' list of
+        Just a -> mPred a
+        Nothing -> Nothing
+
+-- | 'traverse' with a function returning 'Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidate :: forall t a err b. (Traversable t) => (a -> Either err b) -> t a -> Either (NonEmpty err) (t b)
+traverseValidate f as =
+  as
+    & traverse @t @(Validation _) (eitherToListValidation . f)
+    & validationToEither
+
+-- | 'traverse' with a function returning 'm Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidateM :: forall t m a err b. (Traversable t, Applicative m) => (a -> m (Either err b)) -> t a -> m (Either (NonEmpty err) (t b))
+traverseValidateM f as =
+  as
+    & traverse @t @m (\a -> a & f <&> eitherToListValidation)
+    <&> sequenceA @t @(Validation _)
+    <&> validationToEither
+
+-- | 'traverse_' with a function returning 'm Either' and collect all errors that happen, if they happen.
+--
+-- Does not shortcut on error, so will always traverse the whole list/'Traversable' structure.
+--
+-- This is a useful error handling function in many circumstances,
+-- because it won’t only return the first error that happens, but rather all of them.
+traverseValidateM_ :: forall t m a err. (Traversable t, Applicative m) => (a -> m (Either err ())) -> t a -> m (Either (NonEmpty err) ())
+traverseValidateM_ f as =
+  as
+    & traverse @t @m (\a -> a & f <&> eitherToListValidation)
+    <&> sequenceA_ @t @(Validation _)
+    <&> validationToEither
+
+-- | Like 'eitherToValidation', but puts the Error side into a NonEmpty list
+-- to make it combine with other validations.
+--
+-- See also 'validateEithers', if you have a list of Either and want to collect all errors.
+eitherToListValidation :: Either a c -> Validation (NonEmpty a) c
+eitherToListValidation = first singleton . eitherToValidation
+
+-- | Convert an 'Either' to a 'These'.
+eitherToThese :: Either err a -> These err a
+eitherToThese (Left err) = This err
+eitherToThese (Right a) = That a
+
+-- | Like 'eitherToThese', but puts the Error side into a NonEmpty list
+-- to make it combine with other theses.
+eitherToListThese :: Either err a -> These (NonEmpty err) a
+eitherToListThese (Left e) = This (singleton e)
+eitherToListThese (Right a) = That a
+
+-- | Convert a 'Validation' to a 'These'.
+validationToThese :: Validation err a -> These err a
+validationToThese (Failure err) = This err
+validationToThese (Success a) = That a
+
+-- | Nested '>>=' of a These inside some other @m@.
+--
+-- Use if you want to collect errors and successes, and want to chain multiple function returning 'These'.
+thenThese ::
+  (Monad m, Semigroup err) =>
+  (a -> m (These err b)) ->
+  m (These err a) ->
+  m (These err b)
+thenThese f x = do
+  th <- x
+  join <$> traverse f th
+
+-- | Nested validating bind-like combinator.
+--
+-- Use if you want to collect errors, and want to chain multiple functions returning 'Validation'.
+thenValidate ::
+  (a -> Validation err b) ->
+  Validation err a ->
+  Validation err b
+thenValidate f = \case
+  Success a -> f a
+  Failure err -> Failure err
+
+-- | Nested validating bind-like combinator inside some other @m@.
+--
+-- Use if you want to collect errors, and want to chain multiple functions returning 'Validation'.
+thenValidateM ::
+  (Monad m) =>
+  (a -> m (Validation err b)) ->
+  m (Validation err a) ->
+  m (Validation err b)
+thenValidateM f x =
+  eitherToValidation <$> do
+    x' <- validationToEither <$> x
+    case x' of
+      Left err -> pure $ Left err
+      Right a -> validationToEither <$> f a
+
+-- | Put the text to @stderr@.
+putStderrLn :: Text -> IO ()
+putStderrLn msg =
+  System.IO.hPutStrLn System.IO.stderr $ textToString msg
+
+exitWithMessage :: Text -> IO a
+exitWithMessage msg = do
+  putStderrLn msg
+  System.Exit.exitWith $ System.Exit.ExitFailure (-1)
+
+-- | Run some function producing applicative over a traversable data structure,
+-- then collect the results in a Monoid.
+--
+-- Very helpful with side-effecting functions returning @(Validation err a)@:
+--
+-- @
+-- let
+--   f :: Text -> IO (Validation (NonEmpty Error) Text)
+--   f t = pure $ if t == "foo" then Success t else Failure (singleton ("not foo: " <> t))
+--
+-- in traverseFold f [ "foo", "bar", "baz" ]
+--   == Failure ("not foo bar" :| ["not foo baz"])
+-- @
+--
+-- … since @(Semigroup err => Validation err a)@ is a @Semigroup@/@Monoid@ itself.
+traverseFold :: (Applicative ap, Traversable t, Monoid m) => (a -> ap m) -> t a -> ap m
+{-# INLINE traverseFold #-}
+traverseFold f xs =
+  -- note: could be weakened to (Foldable t) via `getAp . foldMap (Ap . f)`
+  fold <$> traverse f xs
+
+-- | Like 'traverseFold', but fold over a semigroup instead of a Monoid, by providing a starting element.
+traverseFoldDefault :: (Applicative ap, Traversable t, Semigroup m) => m -> (a -> ap m) -> t a -> ap m
+{-# INLINE traverseFoldDefault #-}
+traverseFoldDefault def f xs = foldDef def <$> traverse f xs
+  where
+    foldDef = foldr (<>)
+
+-- | Same as 'traverseFold', but with a 'Semigroup' and 'Traversable1' restriction.
+traverseFold1 :: (Applicative ap, Traversable1 t, Semigroup s) => (a -> ap s) -> t a -> ap s
+{-# INLINE traverseFold1 #-}
+-- note: cannot be weakened to (Foldable1 t) because there is no `Ap` for Semigroup (No `Apply` typeclass)
+traverseFold1 f xs = fold1 <$> traverse f xs
+
+-- | Use this in places where the code is still to be implemented.
+--
+-- It always type-checks and will show a warning at compile time if it was forgotten in the code.
+--
+-- Use instead of 'error' and 'undefined' for code that hasn’t been written.
+--
+-- Uses the same trick as https://hackage.haskell.org/package/protolude-0.3.0/docs/src/Protolude.Error.html#error
+{-# WARNING todo "'todo' (undefined code) remains in code" #-}
+todo :: forall (r :: RuntimeRep). forall (a :: TYPE r). (HasCallStack) => a
+todo = raise# (errorCallWithCallStackException "This code was not yet implemented: TODO" ?callStack)
+
+-- | Convert an integer to a 'Natural' if possible
+--
+-- Named the same as the function from "GHC.Natural", but does not crash.
+intToNatural :: (Integral a) => a -> Maybe Natural
+intToNatural i =
+  if i < 0
+    then Nothing
+    else Just $ fromIntegral i
+
+-- | @inverseFunction f@ creates a function that is the inverse of a given function
+-- @f@. It does so by constructing 'M.Map' internally for each value @f a@. The
+-- implementation makes sure that the 'M.Map' is constructed only once and then
+-- shared for every call.
+--
+-- __Memory usage note:__ don't inverse functions that have types like 'Int'
+-- as their result. In this case the created 'M.Map' will have huge size.
+--
+-- The complexity of reversed mapping is \(\mathcal{O}(\log n)\).
+--
+-- __Performance note:__ make sure to specialize monomorphic type of your functions
+-- that use 'inverseFunction' to avoid 'M.Map' reconstruction.
+--
+-- One of the common 'inverseFunction' use-case is inverting the 'show' or a 'show'-like
+-- function.
+--
+-- >>> data Color = Red | Green | Blue deriving (Show, Enum, Bounded)
+-- >>> parse = inverseFunction show :: String -> Maybe Color
+-- >>> parse "Red"
+-- Just Red
+-- >>> parse "Black"
+-- Nothing
+--
+-- __Correctness note:__ 'inverseFunction' expects /injective function/ as its argument,
+-- i.e. the function must map distinct arguments to distinct values.
+--
+-- Typical usage of this function looks like this:
+--
+-- @
+-- __data__ GhcVer
+--    = Ghc802
+--    | Ghc822
+--    | Ghc844
+--    | Ghc865
+--    | Ghc881
+--    __deriving__ ('Eq', 'Ord', 'Show', 'Enum', 'Bounded')
+--
+-- showGhcVer :: GhcVer -> 'Text'
+-- showGhcVer = \\__case__
+--    Ghc802 -> "8.0.2"
+--    Ghc822 -> "8.2.2"
+--    Ghc844 -> "8.4.4"
+--    Ghc865 -> "8.6.5"
+--    Ghc881 -> "8.8.1"
+--
+-- parseGhcVer :: 'Text' -> 'Maybe' GhcVer
+-- parseGhcVer = 'inverseFunction' showGhcVer
+--
+-- Taken from relude’s @Relude.Extra.Enum@.
+inverseFunction ::
+  forall a k.
+  (Bounded a, Enum a, Ord k) =>
+  (a -> k) ->
+  (k -> Maybe a)
+inverseFunction f k = Map.lookup k $ inverseMap f
+
+-- | Like `inverseFunction`, but instead of returning the function
+-- it returns a mapping from all possible outputs to their possible inputs.
+--
+-- This has the same restrictions of 'inverseFunction'.
+inverseMap :: forall a k. (Bounded a, Enum a, Ord k) => (a -> k) -> Map k a
+inverseMap f = enumerateAll <&> (\a -> (f a, a)) & Map.fromList
+
+-- | All possible values in this enum.
+enumerateAll :: (Enum a, Bounded a) => [a]
+enumerateAll = [minBound .. maxBound]
+
+-- | Create a 'Map' from a list of values, extracting the map key from each value. Like 'Map.fromList'.
+--
+-- Attention: if the key is not unique, the earliest value with the key will be in the map.
+mapFromListOn :: (Ord key) => (a -> key) -> [a] -> Map key a
+mapFromListOn f xs = xs <&> (\x -> (f x, x)) & Map.fromList
+
+-- | Create a 'Map' from a list of values, merging multiple values at the same key with '<>' (left-to-right)
+--
+-- `f` has to extract the key and value. Value must be mergable.
+--
+-- Attention: if the key is not unique, the earliest value with the key will be in the map.
+mapFromListOnMerge :: (Ord key, Semigroup s) => (a -> (key, s)) -> [a] -> Map key s
+mapFromListOnMerge f xs =
+  xs
+    <&> (\x -> f x)
+    & Map.fromListWith
+      -- we have to flip (`<>`) because `Map.fromListWith` merges its values “the other way around”
+      (flip (<>))
+
+-- | If the predicate is true, return the @m@, else 'mempty'.
+--
+-- This can be used (together with `ifExists`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+--   ifTrue (1 == 1) [1],
+--   [2, 3, 4],
+--   ifTrue False [5],
+-- ]
+-- :}
+-- [1,2,3,4]
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifTrue (1 == 1) (Sum 2), Sum 3 ]
+
+-- Sum {getSum = 6}
+
+ifTrue :: (Monoid m) => Bool -> m -> m
+ifTrue pred' m = if pred' then m else mempty
+
+-- | If the given @Maybe@ is @Just@, return the result of `f` wrapped in `pure`, else return `mempty`.
+
+-- This can be used (together with `ifTrue`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+-- unknown command '{'
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifExists id (Just 2), Sum 3 ]
+-- Sum {getSum = 6}
+
+ifExists :: (Monoid (f b), Applicative f) => (a -> b) -> Maybe a -> f b
+ifExists f m = m & foldMap @Maybe (pure . f)
diff --git a/users/Profpatsch/my-prelude/src/Parse.hs b/users/Profpatsch/my-prelude/src/Parse.hs
new file mode 100644
index 0000000000..65a0b0d39e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Parse.hs
@@ -0,0 +1,174 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Parse where
+
+import Control.Category qualified
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Monoid (First (..))
+import Data.Semigroup.Traversable
+import Data.Semigroupoid qualified as Semigroupoid
+import Data.Text qualified as Text
+import FieldParser (FieldParser)
+import FieldParser qualified as Field
+import PossehlAnalyticsPrelude
+import Validation (partitionValidations)
+import Prelude hiding (init, maybe)
+import Prelude qualified
+
+-- | A generic applicative “vertical” parser.
+-- Similar to `FieldParser`, but made for parsing whole structures and collect all errors in an `ErrorTree`.
+newtype Parse from to = Parse ((Context, from) -> Validation (NonEmpty ErrorTree) (Context, to))
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ( Compose
+                ((->) (Context, from))
+                (Validation (NonEmpty ErrorTree))
+            )
+            ((,) Context)
+        )
+
+-- | Every parser can add to the context, like e.g. an element parser will add the name of the element it should be parsing.
+-- This should be added to the error message of each parser, with `showContext`.
+newtype Context = Context (Maybe [Text])
+  deriving stock (Show)
+  deriving (Semigroup, Monoid) via (First [Text])
+
+instance Semigroupoid Parse where
+  o p2 p1 = Parse $ \from -> case runParse' p1 from of
+    Failure err -> Failure err
+    Success to1 -> runParse' p2 to1
+
+instance Category Parse where
+  (.) = Semigroupoid.o
+  id = Parse $ \t -> Success t
+
+instance Profunctor Parse where
+  lmap f (Parse p) = Parse $ lmap (second f) p
+  rmap = (<$>)
+
+runParse :: Error -> Parse from to -> from -> Either ErrorTree to
+runParse errMsg parser t =
+  (Context (Just ["$"]), t)
+    & runParse' parser
+    <&> snd
+    & first (nestedMultiError errMsg)
+    & validationToEither
+
+runParse' :: Parse from to -> (Context, from) -> Validation (NonEmpty ErrorTree) (Context, to)
+runParse' (Parse f) from = f from
+
+showContext :: Context -> Text
+showContext (Context context) = context & fromMaybe [] & List.reverse & Text.intercalate "."
+
+addContext :: Text -> Context -> Context
+addContext x (Context mxs) = Context (Just $ x : (mxs & fromMaybe []))
+
+mkParsePushContext :: Text -> ((Context, from) -> Either ErrorTree to) -> Parse from to
+mkParsePushContext toPush f = Parse $ \(ctx, from) -> case f (ctx, from) of
+  Right to -> Success (addContext toPush ctx, to)
+  Left err -> Failure $ singleton err
+
+mkParseNoContext :: (from -> Either ErrorTree to) -> Parse from to
+mkParseNoContext f = Parse $ \(ctx, from) -> case f from of
+  Right to -> Success (ctx, to)
+  Left err -> Failure $ singleton err
+
+-- | Accept only exactly the given value
+exactly :: (Eq from) => (from -> Text) -> from -> Parse from from
+exactly errDisplay from = Parse $ \(ctx, from') ->
+  if from == from'
+    then Success (ctx, from')
+    else Failure $ singleton [fmt|Field has to be exactly {errDisplay from}, was: {errDisplay from'} at {showContext ctx}|]
+
+-- | Make a parser to parse the whole list
+multiple :: Parse a1 a2 -> Parse [a1] [a2]
+multiple inner = dimap nonEmpty (Prelude.maybe [] toList) (maybe $ multipleNE inner)
+
+-- | Make a parser to parse the whole non-empty list
+multipleNE :: Parse from to -> Parse (NonEmpty from) (NonEmpty to)
+multipleNE inner = Parse $ \(ctx, from) ->
+  from
+    & zipIndex
+    & traverse (\(idx, f) -> runParse' inner (ctx, f) & first (singleton . nestedMultiError [fmt|{idx}|]))
+    -- we assume that, since the same parser is used everywhere, the context will be the same as well (TODO: correct?)
+    & second (\((ctx', y) :| ys) -> (ctx', y :| (snd <$> ys)))
+
+-- | Like '(>>>)', but returns the intermediate result alongside the final parse result.
+andParse :: Parse to to2 -> Parse from to -> Parse from (to, to2)
+andParse outer inner = Parse $ \from -> case runParse' inner from of
+  Failure err -> Failure err
+  Success (ctx, to) -> runParse' outer (ctx, to) <&> (second (to,))
+
+-- | Lift a parser into an optional value
+maybe :: Parse from to -> Parse (Maybe from) (Maybe to)
+maybe inner = Parse $ \(ctx, m) -> case m of
+  Nothing -> Success (ctx, Nothing)
+  Just a -> runParse' inner (ctx, a) & second (fmap Just)
+
+-- | Assert that there is exactly one element in the list
+exactlyOne :: Parse [from] from
+exactlyOne = Parse $ \(ctx, xs) -> case xs of
+  [] -> Failure $ singleton [fmt|Expected exactly 1 element, but got 0, at {ctx & showContext}|]
+  [one] -> Success (ctx, one)
+  _more -> Failure $ singleton [fmt|Expected exactly 1 element, but got 2, at {ctx & showContext}|]
+
+-- | Assert that there is exactly zero or one element in the list
+zeroOrOne :: Parse [from] (Maybe from)
+zeroOrOne = Parse $ \(ctx, xs) -> case xs of
+  [] -> Success (ctx, Nothing)
+  [one] -> Success (ctx, Just one)
+  _more -> Failure $ singleton [fmt|Expected exactly 1 element, but got 2, at {ctx & showContext}|]
+
+-- | Find the first element on which the sub-parser succeeds; if there was no match, return all error messages.
+find :: Parse from to -> Parse [from] to
+find inner = Parse $ \(ctx, xs) -> case xs of
+  [] -> failure [fmt|Wanted to get the first sub-parser that succeeds, but there were no elements in the list, at {ctx & showContext}|]
+  (y : ys) -> runParse' (findNE' inner) (ctx, y :| ys)
+
+-- | Find the first element on which the sub-parser succeeds; if there was no match, return all error messages.
+findNE' :: Parse from to -> Parse (NonEmpty from) to
+findNE' inner = Parse $ \(ctx, xs) ->
+  xs
+    <&> (\x -> runParse' inner (ctx, x))
+    & traverse1
+      ( \case
+          Success a -> Left a
+          Failure e -> Right e
+      )
+    & \case
+      Left a -> Success a
+      Right errs ->
+        errs
+          & zipIndex
+          <&> (\(idx, errs') -> nestedMultiError [fmt|{idx}|] errs')
+          & nestedMultiError [fmt|None of these sub-parsers succeeded|]
+          & singleton
+          & Failure
+
+-- | Find all elements on which the sub-parser succeeds; if there was no match, return an empty list
+findAll :: Parse from to -> Parse [from] [to]
+findAll inner = Parse $ \(ctx, xs) ->
+  xs
+    <&> (\x -> runParse' inner (ctx, x))
+    & partitionValidations
+    & \case
+      (_miss, []) ->
+        -- in this case we just arbitrarily forward the original context …
+        Success (ctx, [])
+      (_miss, (hitCtx, hit) : hits) -> Success (hitCtx, hit : (hits <&> snd))
+
+-- | convert a 'FieldParser' into a 'Parse'.
+fieldParser :: FieldParser from to -> Parse from to
+fieldParser fp = Parse $ \(ctx, from) -> case Field.runFieldParser fp from of
+  Right a -> Success (ctx, a)
+  Left err -> Failure $ singleton (singleError err)
+
+zipNonEmpty :: NonEmpty a -> NonEmpty b -> NonEmpty (a, b)
+zipNonEmpty (x :| xs) (y :| ys) = (x, y) :| zip xs ys
+
+zipIndex :: NonEmpty b -> NonEmpty (Natural, b)
+zipIndex = zipNonEmpty (1 :| [2 :: Natural ..])
diff --git a/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
new file mode 100644
index 0000000000..008b89b4ba
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
@@ -0,0 +1,94 @@
+module Postgres.Decoder where
+
+import Control.Applicative (Alternative)
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Error.Tree
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Binary (fromBinary))
+import Database.PostgreSQL.Simple.FromField qualified as PG
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Json qualified
+import Label
+import PossehlAnalyticsPrelude
+
+-- | A Decoder of postgres values. Allows embedding more complex parsers (like a 'Json.ParseT').
+newtype Decoder a = Decoder (PG.RowParser a)
+  deriving newtype (Functor, Applicative, Alternative, Monad)
+
+-- | Parse a `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+bytea :: Decoder ByteString
+bytea = fromField @(Binary ByteString) <&> (.fromBinary)
+
+-- | Parse a nullable `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+byteaMay :: Decoder (Maybe ByteString)
+byteaMay = fromField @(Maybe (Binary ByteString)) <&> fmap (.fromBinary)
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @Text :: Decoder Text
+-- @
+fromField :: PG.FromField a => Decoder a
+fromField = Decoder $ PG.fieldWith PG.fromField
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder' and wrap the result into the given 'Label'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @"myField" @Text :: Decoder (Label "myField" Text)
+-- @
+fromFieldLabel :: forall lbl a. PG.FromField a => Decoder (Label lbl a)
+fromFieldLabel = label @lbl <$> fromField
+
+-- | Parse fields out of a json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+json :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder a
+json parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @Json.Value field bytes
+  case Json.parseValue parser val of
+    Left err ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Right a -> pure a
+
+-- | Parse fields out of a nullable json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+jsonMay :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder (Maybe a)
+jsonMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @(Maybe Json.Value) field bytes
+  case Json.parseValue parser <$> val of
+    Nothing -> pure Nothing
+    Just (Left err) ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Just (Right a) -> pure (Just a)
diff --git a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
new file mode 100644
index 0000000000..2c9a48d134
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
@@ -0,0 +1,930 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Postgres.MonadPostgres where
+
+import Arg
+import AtLeast (AtLeast)
+import Control.Exception
+  ( Exception (displayException),
+    Handler (Handler),
+    catches,
+    try,
+  )
+import Control.Foldl qualified as Fold
+import Control.Monad.Logger.CallStack (MonadLogger, logDebug, logWarn)
+import Control.Monad.Reader (MonadReader (ask), ReaderT (..))
+import Control.Monad.Trans.Resource
+import Data.Aeson (FromJSON)
+import Data.ByteString qualified as ByteString
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.Int (Int64)
+import Data.Kind (Type)
+import Data.List qualified as List
+import Data.Pool (Pool)
+import Data.Pool qualified as Pool
+import Data.Text qualified as Text
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Connection, FormatError, FromRow, Query, QueryError, ResultError, SqlError, ToRow)
+import Database.PostgreSQL.Simple qualified as PG
+import Database.PostgreSQL.Simple qualified as Postgres
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Database.PostgreSQL.Simple.ToField (ToField)
+import Database.PostgreSQL.Simple.ToRow (ToRow (toRow))
+import Database.PostgreSQL.Simple.Types (Query (..))
+import GHC.IO.Handle (Handle)
+import GHC.Records (getField)
+import Label
+import OpenTelemetry.Trace.Core (NewEvent (newEventName))
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import PossehlAnalyticsPrelude
+import Postgres.Decoder
+import Postgres.Decoder qualified as Dec
+import Pretty (showPretty)
+import Seconds
+import System.Exit (ExitCode (..))
+import Tool
+import UnliftIO (MonadUnliftIO (withRunInIO), bracket, hClose, mask_)
+import UnliftIO.Concurrent (forkIO)
+import UnliftIO.Process (ProcessHandle)
+import UnliftIO.Process qualified as Process
+import UnliftIO.Resource qualified as Resource
+import Prelude hiding (init, span)
+
+-- | Postgres queries/commands that can be executed within a running transaction.
+--
+-- These are implemented with the @postgresql-simple@ primitives of the same name
+-- and will behave the same unless othewise documented.
+class (Monad m) => MonadPostgres (m :: Type -> Type) where
+  -- | Execute an INSERT, UPDATE, or other SQL query that is not expected to return results.
+
+  -- Returns the number of rows affected.
+  execute ::
+    (ToRow params, Typeable params) =>
+    Query ->
+    params ->
+    Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute a multi-row INSERT, UPDATE, or other SQL query that is not expected to return results.
+  --
+  -- Returns the number of rows affected. If the list of parameters is empty,
+  -- this function will simply return 0 without issuing the query to the backend.
+  -- If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeMany ::
+    (ToRow params, Typeable params) =>
+    Query ->
+    NonEmpty params ->
+    Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute INSERT ... RETURNING, UPDATE ... RETURNING,
+  -- or other SQL query that accepts multi-row input and is expected to return results.
+  -- Note that it is possible to write query conn "INSERT ... RETURNING ..." ...
+  -- in cases where you are only inserting a single row,
+  -- and do not need functionality analogous to 'executeMany'.
+  --
+  -- If the list of parameters is empty, this function will simply return [] without issuing the query to the backend. If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeManyReturningWith :: (ToRow q) => Query -> NonEmpty q -> Decoder r -> Transaction m [r]
+
+  -- | Run a query, passing parameters and result row parser.
+  queryWith ::
+    (PG.ToRow params, Typeable params, Typeable r) =>
+    PG.Query ->
+    params ->
+    Decoder r ->
+    Transaction m [r]
+
+  -- | Run a query without any parameters and result row parser.
+  queryWith_ ::
+    (Typeable r) =>
+    PG.Query ->
+    Decoder r ->
+    Transaction m [r]
+
+  -- | Run a query, passing parameters, and fold over the resulting rows.
+  --
+  -- This doesn’t have to realize the full list of results in memory,
+  -- rather results are streamed incrementally from the database.
+  --
+  -- When dealing with small results, it may be simpler (and perhaps faster) to use query instead.
+  --
+  -- This fold is _not_ strict. The stream consumer is responsible
+  -- for forcing the evaluation of its result to avoid space leaks.
+  --
+  -- If you can, prefer aggregating in the database itself.
+  foldRowsWithAcc ::
+    (ToRow params, Typeable row, Typeable params) =>
+    Query ->
+    params ->
+    Decoder row ->
+    a ->
+    (a -> row -> Transaction m a) ->
+    Transaction m a
+
+  -- | Run a given transaction in a transaction block, rolling back the transaction
+  -- if any exception (postgres or Haskell Exception) is thrown during execution.
+  --
+  -- Re-throws the exception.
+  --
+  -- Don’t do any long-running things on the Haskell side during a transaction,
+  -- because it will block a database connection and potentially also lock
+  -- database tables from being written or read by other clients.
+  --
+  -- Nonetheless, try to push transactions as far out to the handlers as possible,
+  -- don’t do something like @runTransaction $ query …@, because it will lead people
+  -- to accidentally start nested transactions (the inner transaction is run on a new connections,
+  -- thus can’t see any changes done by the outer transaction).
+  -- Only handlers should run transactions.
+  runTransaction :: Transaction m a -> m a
+
+-- | Run a query, passing parameters. Prefer 'queryWith' if possible.
+query ::
+  forall m params r.
+  (PG.ToRow params, PG.FromRow r, Typeable params, Typeable r, MonadPostgres m) =>
+  PG.Query ->
+  params ->
+  Transaction m [r]
+query qry params = queryWith qry params (Decoder PG.fromRow)
+
+-- | Run a query without any parameters. Prefer 'queryWith' if possible.
+--
+-- TODO: I think(?) this can always be replaced by passing @()@ to 'query', remove?
+query_ ::
+  forall m r.
+  (Typeable r, PG.FromRow r, MonadPostgres m) =>
+  PG.Query ->
+  Transaction m [r]
+query_ qry = queryWith_ qry (Decoder PG.fromRow)
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRow ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m a
+querySingleRow qry params = do
+  query qry params >>= ensureSingleRow
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRowWith ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Decoder a ->
+  Transaction m a
+querySingleRowWith qry params decoder = do
+  queryWith qry params decoder >>= ensureSingleRow
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRowMaybe ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m (Maybe a)
+querySingleRowMaybe qry params = do
+  rows <- query qry params
+  case rows of
+    [] -> pure Nothing
+    [one] -> pure (Just one)
+    -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+    -- that a database function can error out, should probably handled by the instances.
+    more -> throwM $ SingleRowError {numberOfRowsReturned = (List.length more)}
+
+ensureSingleRow ::
+  (MonadThrow m) =>
+  [a] ->
+  m a
+ensureSingleRow = \case
+  -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+  -- that a database function can error out, should probably handled by the instances.
+  [] -> throwM (SingleRowError {numberOfRowsReturned = 0})
+  [one] -> pure one
+  more ->
+    throwM $
+      SingleRowError
+        { numberOfRowsReturned =
+            -- TODO: this is VERY bad, because it requires to parse the full database output, even if there’s 10000000000 elements
+            List.length more
+        }
+
+ensureNoneOrSingleRow ::
+  (MonadThrow m) =>
+  [a] ->
+  m (Maybe a)
+ensureNoneOrSingleRow = \case
+  -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+  -- that a database function can error out, should probably handled by the instances.
+  [] -> pure Nothing
+  [one] -> pure $ Just one
+  more ->
+    throwM $
+      SingleRowError
+        { numberOfRowsReturned =
+            -- TODO: this is VERY bad, because it requires to parse the full database output, even if there’s 10000000000 elements
+            List.length more
+        }
+
+-- | Run a query, passing parameters, and fold over the resulting rows.
+--
+-- This doesn’t have to realize the full list of results in memory,
+-- rather results are streamed incrementally from the database.
+--
+-- When dealing with small results, it may be simpler (and perhaps faster) to use query instead.
+--
+-- The results are folded strictly by the 'Fold.Fold' that is passed.
+--
+-- If you can, prefer aggregating in the database itself.
+foldRowsWith ::
+  forall row params m b.
+  ( MonadPostgres m,
+    PG.ToRow params,
+    Typeable row,
+    Typeable params
+  ) =>
+  PG.Query ->
+  params ->
+  Decoder row ->
+  Fold.Fold row b ->
+  Transaction m b
+foldRowsWith qry params decoder = Fold.purely f
+  where
+    f :: forall x. (x -> row -> x) -> x -> (x -> b) -> Transaction m b
+    f acc init extract = do
+      x <- foldRowsWithAcc qry params decoder init (\a r -> pure $ acc a r)
+      pure $ extract x
+
+newtype Transaction m a = Transaction {unTransaction :: (ReaderT Connection m a)}
+  deriving newtype
+    ( Functor,
+      Applicative,
+      Monad,
+      MonadThrow,
+      MonadLogger,
+      MonadIO,
+      MonadUnliftIO,
+      MonadTrans,
+      Otel.MonadTracer
+    )
+
+-- | [Resource Pool](http://hackage.haskell.org/package/resource-pool-0.2.3.2/docs/Data-Pool.html) configuration.
+data PoolingInfo = PoolingInfo
+  { -- | Minimal amount of resources that are
+    --   always available.
+    numberOfStripes :: AtLeast 1 Int,
+    -- | Time after which extra resources
+    --   (above minimum) can stay in the pool
+    --   without being used.
+    unusedResourceOpenTime :: Seconds,
+    -- | Max number of resources that can be
+    --   in the Pool at any time
+    maxOpenResourcesAcrossAllStripes :: AtLeast 1 Int
+  }
+  deriving stock (Generic, Eq, Show)
+  deriving anyclass (FromJSON)
+
+initMonadPostgres ::
+  (Text -> IO ()) ->
+  -- | Info describing the connection to the Postgres DB
+  Postgres.ConnectInfo ->
+  -- | Configuration info for pooling attributes
+  PoolingInfo ->
+  -- | Created Postgres connection pool
+  ResourceT IO (Pool Postgres.Connection)
+initMonadPostgres logInfoFn connectInfo poolingInfo = do
+  (_releaseKey, connPool) <-
+    Resource.allocate
+      (logInfoFn "Creating Postgres Connection Pool" >> createPGConnPool)
+      (\pool -> logInfoFn "Destroying Postgres Connection Pool" >> destroyPGConnPool pool)
+  pure connPool
+  where
+    -- \| Create a Postgres connection pool
+    createPGConnPool ::
+      IO (Pool Postgres.Connection)
+    createPGConnPool =
+      Pool.newPool $
+        Pool.defaultPoolConfig
+          {- resource init action -} poolCreateResource
+          {- resource destruction -} poolfreeResource
+          ( poolingInfo.unusedResourceOpenTime.unSeconds
+              & fromIntegral @Natural @Double
+          )
+          (poolingInfo.maxOpenResourcesAcrossAllStripes.unAtLeast)
+      where
+        poolCreateResource = Postgres.connect connectInfo
+        poolfreeResource = Postgres.close
+
+    -- \| Destroy a Postgres connection pool
+    destroyPGConnPool ::
+      -- \| Pool to be destroyed
+      (Pool Postgres.Connection) ->
+      IO ()
+    destroyPGConnPool p = Pool.destroyAllResources p
+
+-- | Improve a possible error message, by adding some context to it.
+--
+-- The given Exception type is caught, 'show'n and pretty-printed.
+--
+-- In case we get an `IOError`, we display it in a reasonable fashion.
+addErrorInformation ::
+  forall exc a.
+  (Exception exc) =>
+  Text.Text ->
+  IO a ->
+  IO a
+addErrorInformation msg io =
+  io
+    & try @exc
+    <&> first (showPretty >>> newError >>> errorContext msg)
+    & try @IOError
+    <&> first (showToError >>> errorContext "IOError" >>> errorContext msg)
+    <&> join @(Either Error)
+    >>= unwrapIOError
+
+-- | Catch any Postgres exception that gets thrown,
+-- print the query that was run and the query parameters,
+-- then rethrow inside an 'Error'.
+handlePGException ::
+  forall a params tools m.
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Text ->
+  Query ->
+  -- | Depending on whether we used `format` or `formatMany`.
+  Either params (NonEmpty params) ->
+  IO a ->
+  Transaction m a
+handlePGException tools queryType query' params io = do
+  withRunInIO $ \unliftIO ->
+    io
+      `catches` [ Handler $ unliftIO . logQueryException @SqlError,
+                  Handler $ unliftIO . logQueryException @QueryError,
+                  Handler $ unliftIO . logQueryException @ResultError,
+                  Handler $ unliftIO . logFormatException
+                ]
+  where
+    -- TODO: use throwInternalError here (after pulling it into the MonadPostgres class)
+    throwAsError = unwrapIOError . Left . newError
+    throwErr err = liftIO $ throwAsError $ prettyErrorTree $ nestedMultiError "A Postgres query failed" err
+    logQueryException :: (Exception e) => e -> Transaction m a
+    logQueryException exc = do
+      formattedQuery <- case params of
+        Left one -> pgFormatQuery' tools query' one
+        Right many -> pgFormatQueryMany' tools query' many
+      throwErr
+        ( singleError [fmt|Query Type: {queryType}|]
+            :| [ nestedError "Exception" (exc & showPretty & newError & singleError),
+                 nestedError "Query" (formattedQuery & newError & singleError)
+               ]
+        )
+    logFormatException :: FormatError -> Transaction m a
+    logFormatException fe = throwErr (fe & showPretty & newError & singleError & singleton)
+
+-- | Perform a Postgres action within a transaction
+withPGTransaction ::
+  -- | Postgres connection pool to be used for the action
+  (Pool Postgres.Connection) ->
+  -- | DB-action to be performed
+  (Postgres.Connection -> IO a) ->
+  -- | Result of the DB-action
+  IO a
+withPGTransaction connPool f =
+  Pool.withResource
+    connPool
+    (\conn -> Postgres.withTransaction conn (f conn))
+
+-- | `pg_formatter` is a perl script that does not support any kind of streaming.
+-- Thus we initialize a pool with a bunch of these scripts running, waiting for input. This way we can have somewhat fast SQL formatting.
+--
+-- Call `initPgFormatPool` to initialize, then use `runPgFormat` to format some sql.
+data PgFormatPool = PgFormatPool
+  { pool :: Pool PgFormatProcess,
+    pgFormat :: Tool
+  }
+
+data PgFormatProcess = PgFormatProcess
+  { stdinHdl :: Handle,
+    stdoutHdl :: Handle,
+    stderrHdl :: Handle,
+    procHdl :: ProcessHandle,
+    startedAt :: Otel.Timestamp
+  }
+
+initPgFormatPool :: (HasField "pgFormat" tools Tool) => tools -> IO PgFormatPool
+initPgFormatPool tools = do
+  pool <-
+    Pool.newPool
+      ( Pool.defaultPoolConfig
+          (pgFormatStartCommandWaitForInput tools)
+          ( \pgFmt -> do
+              Process.terminateProcess pgFmt.procHdl
+              -- make sure we don’t leave any zombies
+              _ <- forkIO $ do
+                _ <- Process.waitForProcess pgFmt.procHdl
+                pure ()
+              pure ()
+          )
+          -- unused resource time
+          100
+          -- number of resources
+          10
+      )
+
+  -- fill the pool with resources
+  let go =
+        Pool.tryWithResource pool (\_ -> go) >>= \case
+          Nothing -> pure ()
+          Just () -> pure ()
+  _ <- go
+  pure (PgFormatPool {pool, pgFormat = tools.pgFormat})
+
+destroyPgFormatPool :: PgFormatPool -> IO ()
+destroyPgFormatPool pool = Pool.destroyAllResources pool.pool
+
+-- | Get the oldest resource from the pool, or stop if you find a resource that’s older than `cutoffPointMs`.
+takeOldestResource :: PgFormatPool -> Arg "cutoffPointMs" Integer -> IO (PgFormatProcess, Pool.LocalPool PgFormatProcess)
+takeOldestResource pool cutoffPointMs = do
+  now <- Otel.getTimestamp
+  mask_ $ do
+    a <- Pool.takeResource pool.pool
+    (putBack, res) <- go now [] a
+    -- make sure we don’t leak any resources we didn’t use in the end
+    for_ putBack $ \(x, xLocal) -> Pool.putResource xLocal x
+    pure res
+  where
+    mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+    go now putBack a@(a', _) =
+      if abs (mkMs now - mkMs a'.startedAt) > cutoffPointMs.unArg
+        then pure (putBack, a)
+        else
+          Pool.tryTakeResource pool.pool >>= \case
+            Nothing -> pure (putBack, a)
+            Just b@(b', _) -> do
+              if a'.startedAt < b'.startedAt
+                then go now (b : putBack) a
+                else go now (a : putBack) b
+
+-- | Format the given SQL with pg_formatter. Will use the pool of already running formatters to speed up execution.
+runPgFormat :: PgFormatPool -> ByteString -> IO (T3 "exitCode" ExitCode "formatted" ByteString "stderr" ByteString)
+runPgFormat pool sqlStatement = do
+  bracket
+    (takeOldestResource pool 200)
+    ( \(a, localPool) -> do
+        -- we always destroy the resource, because the process exited
+        Pool.destroyResource pool.pool localPool a
+        -- create a new process to keep the pool “warm”
+        new <- pgFormatStartCommandWaitForInput pool
+        Pool.putResource localPool new
+    )
+    ( \(pgFmt, _localPool) -> do
+        ByteString.hPut pgFmt.stdinHdl sqlStatement
+        -- close stdin to make pg_formatter format (it exits …)
+        -- issue: https://github.com/darold/pgFormatter/issues/333
+        hClose pgFmt.stdinHdl
+        formatted <- ByteString.hGetContents pgFmt.stdoutHdl
+        errs <- ByteString.hGetContents pgFmt.stderrHdl
+        exitCode <- Process.waitForProcess pgFmt.procHdl
+        pure $
+          T3
+            (label @"exitCode" exitCode)
+            (label @"formatted" formatted)
+            (label @"stderr" errs)
+    )
+
+runPGTransactionImpl ::
+  (MonadUnliftIO m) =>
+  m (Pool Postgres.Connection) ->
+  Transaction m a ->
+  m a
+{-# INLINE runPGTransactionImpl #-}
+runPGTransactionImpl zoom (Transaction transaction) = do
+  pool <- zoom
+  withRunInIO $ \unliftIO ->
+    withPGTransaction pool $ \conn -> do
+      unliftIO $ runReaderT transaction conn
+
+executeImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl #-}
+executeImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.execute conn qry params
+      & handlePGException tools "execute" qry (Left params)
+      >>= toNumberOfRowsAffected "executeImpl"
+
+executeImpl_ ::
+  (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl_ #-}
+executeImpl_ zoomTools zoomDebugLogDatabaseQueries qry =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled @() tools span logDatabaseQueries qry HasNoParams
+    conn <- Transaction ask
+    PG.execute_ conn qry
+      & handlePGException tools "execute_" qry (Left ())
+      >>= toNumberOfRowsAffected "executeImpl_"
+
+executeManyImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  NonEmpty params ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+executeManyImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (executeMany)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.executeMany conn qry (params & toList)
+      & handlePGException tools "executeMany" qry (Right params)
+      >>= toNumberOfRowsAffected "executeManyImpl"
+
+toNumberOfRowsAffected :: (MonadIO m) => Text -> Int64 -> m (Label "numberOfRowsAffected" Natural)
+toNumberOfRowsAffected functionName i64 =
+  i64
+    & intToNatural
+    & annotate [fmt|{functionName}: postgres returned a negative number of rows affected: {i64}|]
+    -- we throw this directly in IO here, because we don’t want to e.g. have to propagate MonadThrow through user code (it’s an assertion)
+    & unwrapIOError
+    & liftIO
+    <&> label @"numberOfRowsAffected"
+
+executeManyReturningWithImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  NonEmpty params ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE executeManyReturningWithImpl #-}
+executeManyReturningWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (executeManyReturning)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.returningWith fromRow conn qry (params & toList)
+      & handlePGException tools "executeManyReturning" qry (Right params)
+
+foldRowsWithAccImpl ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Decoder row ->
+  a ->
+  (a -> row -> Transaction m a) ->
+  Transaction m a
+{-# INLINE foldRowsWithAccImpl #-}
+foldRowsWithAccImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder rowParser) accumulator f = do
+  Otel.inSpan' "Postgres Query (foldRowsWithAcc)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    withRunInIO
+      ( \runInIO ->
+          do
+            PG.foldWithOptionsAndParser
+              PG.defaultFoldOptions
+              rowParser
+              conn
+              qry
+              params
+              accumulator
+              (\acc row -> runInIO $ f acc row)
+              & handlePGException tools "fold" qry (Left params)
+              & runInIO
+      )
+
+pgFormatQueryNoParams' ::
+  (MonadIO m, MonadLogger m, HasField "pgFormat" tools PgFormatPool) =>
+  tools ->
+  Query ->
+  Transaction m Text
+pgFormatQueryNoParams' tools q =
+  lift $ pgFormatQueryByteString tools q.fromQuery
+
+pgFormatQuery ::
+  (ToRow params, MonadIO m) =>
+  Query ->
+  params ->
+  Transaction m ByteString
+pgFormatQuery qry params = Transaction $ do
+  conn <- ask
+  liftIO $ PG.formatQuery conn qry params
+
+pgFormatQueryMany ::
+  (MonadIO m, ToRow params) =>
+  Query ->
+  NonEmpty params ->
+  Transaction m ByteString
+pgFormatQueryMany qry params = Transaction $ do
+  conn <- ask
+  liftIO $
+    PG.formatMany
+      conn
+      qry
+      ( params
+          -- upstream is partial on empty list, see https://github.com/haskellari/postgresql-simple/issues/129
+          & toList
+      )
+
+queryWithImpl ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE queryWithImpl #-}
+queryWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (queryWith)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.queryWith fromRow conn qry params
+      & handlePGException tools "query" qry (Left params)
+
+queryWithImpl_ ::
+  ( MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  m tools ->
+  Query ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE queryWithImpl_ #-}
+queryWithImpl_ zoomTools qry (Decoder fromRow) = do
+  tools <- lift @Transaction zoomTools
+  conn <- Transaction ask
+  liftIO (PG.queryWith_ fromRow conn qry)
+    & handlePGException tools "query" qry (Left ())
+
+data SingleRowError = SingleRowError
+  { -- | How many columns were actually returned by the query
+    numberOfRowsReturned :: Int
+  }
+  deriving stock (Show)
+
+instance Exception SingleRowError where
+  displayException (SingleRowError {..}) = [fmt|Single row expected from SQL query result, {numberOfRowsReturned} rows were returned instead."|]
+
+pgFormatQuery' ::
+  ( MonadIO m,
+    ToRow params,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Query ->
+  params ->
+  Transaction m Text
+pgFormatQuery' tools q p =
+  pgFormatQuery q p
+    >>= lift . pgFormatQueryByteString tools
+
+pgFormatQueryMany' ::
+  ( MonadIO m,
+    ToRow params,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  Query ->
+  NonEmpty params ->
+  Transaction m Text
+pgFormatQueryMany' tools q p =
+  pgFormatQueryMany q p
+    >>= lift . pgFormatQueryByteString tools
+
+-- | Read the executable name "pg_format"
+postgresToolsParser :: ToolParserT IO (Label "pgFormat" Tool)
+postgresToolsParser = label @"pgFormat" <$> readTool "pg_format"
+
+pgFormatQueryByteString ::
+  ( MonadIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool
+  ) =>
+  tools ->
+  ByteString ->
+  m Text
+pgFormatQueryByteString tools queryBytes = do
+  res <-
+    liftIO $
+      runPgFormat
+        tools.pgFormat
+        (queryBytes)
+  case res.exitCode of
+    ExitSuccess -> pure (res.formatted & bytesToTextUtf8Lenient)
+    ExitFailure status -> do
+      logWarn [fmt|pg_format failed with status {status} while formatting the query, using original query string. Is there a syntax error?|]
+      logDebug
+        ( prettyErrorTree
+            ( nestedMultiError
+                "pg_format output"
+                ( nestedError "stdout" (singleError (res.formatted & bytesToTextUtf8Lenient & newError))
+                    :| [(nestedError "stderr" (singleError (res.stderr & bytesToTextUtf8Lenient & newError)))]
+                )
+            )
+        )
+      logDebug [fmt|pg_format stdout: stderr|]
+      pure (queryBytes & bytesToTextUtf8Lenient)
+
+pgFormatStartCommandWaitForInput ::
+  ( MonadIO m,
+    HasField "pgFormat" tools Tool,
+    MonadFail m
+  ) =>
+  tools ->
+  m PgFormatProcess
+pgFormatStartCommandWaitForInput tools = do
+  do
+    startedAt <- Otel.getTimestamp
+    (Just stdinHdl, Just stdoutHdl, Just stderrHdl, procHdl) <-
+      Process.createProcess
+        ( ( Process.proc
+              tools.pgFormat.toolPath
+              [ "--no-rcfile",
+                "-"
+              ]
+          )
+            { Process.std_in = Process.CreatePipe,
+              Process.std_out = Process.CreatePipe,
+              Process.std_err = Process.CreatePipe
+            }
+        )
+
+    pure PgFormatProcess {..}
+
+data DebugLogDatabaseQueries
+  = -- | Do not log the database queries
+    DontLogDatabaseQueries
+  | -- | Log the database queries as debug output;
+    LogDatabaseQueries
+  | -- | Log the database queries as debug output and additionally the EXPLAIN output (from the query analyzer, not the actual values after execution cause that’s a bit harder to do)
+    LogDatabaseQueriesAndExplain
+  deriving stock (Show, Enum, Bounded)
+
+data HasQueryParams param
+  = HasNoParams
+  | HasSingleParam param
+  | HasMultiParams (NonEmpty param)
+
+-- | Log the postgres query depending on the given setting
+traceQueryIfEnabled ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools PgFormatPool,
+    Otel.MonadTracer m
+  ) =>
+  tools ->
+  Otel.Span ->
+  DebugLogDatabaseQueries ->
+  Query ->
+  HasQueryParams params ->
+  Transaction m ()
+traceQueryIfEnabled tools span logDatabaseQueries qry params = do
+  -- In case we have query logging enabled, we want to do that
+  let formattedQuery = do
+        withEvent
+          span
+          "Query Format start"
+          "Query Format end"
+          $ case params of
+            HasNoParams -> pgFormatQueryNoParams' tools qry
+            HasSingleParam p -> pgFormatQuery' tools qry p
+            HasMultiParams ps -> pgFormatQueryMany' tools qry ps
+
+  let doLog errs =
+        Otel.addAttributes
+          span
+          $ HashMap.fromList
+          $ ( ("_.postgres.query", Otel.toAttribute @Text errs.query)
+                : ( errs.explain
+                      & \case
+                        Nothing -> []
+                        Just ex -> [("_.postgres.explain", Otel.toAttribute @Text ex)]
+                  )
+            )
+  let doExplain = do
+        q <- formattedQuery
+        Otel.inSpan "Postgres EXPLAIN Query" Otel.defaultSpanArguments $ do
+          queryWithImpl_
+            (pure tools)
+            ( "EXPLAIN "
+                <> (
+                     -- TODO: this is not nice, but the only way to get the `executeMany` form to work with this
+                     -- because we need the query with all elements already interpolated.
+                     Query (q & textToBytesUtf8)
+                   )
+            )
+            (Dec.fromField @Text)
+            <&> Text.intercalate "\n"
+  case logDatabaseQueries of
+    DontLogDatabaseQueries -> pure ()
+    LogDatabaseQueries -> do
+      q <- formattedQuery
+      doLog (T2 (label @"query" q) (label @"explain" Nothing))
+    LogDatabaseQueriesAndExplain -> do
+      q <- formattedQuery
+      -- XXX: stuff like `CREATE SCHEMA` cannot be EXPLAINed, so we should catch exceptions here
+      -- and just ignore anything that errors (if it errors because of a problem with the query, it would have been caught by the query itself.
+      ex <- doExplain
+      doLog (T2 (label @"query" q) (label @"explain" (Just ex)))
+
+-- | Add a start and end event to the span, and figure out how long the difference was.
+--
+-- This is more lightweight than starting an extra span for timing things.
+withEvent :: (MonadIO f) => Otel.Span -> Text -> Text -> f b -> f b
+withEvent span start end act = do
+  let mkMs ts = (ts & Otel.timestampNanoseconds & toInteger) `div` 1000_000
+  s <- Otel.getTimestamp
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = start,
+          newEventAttributes = mempty,
+          newEventTimestamp = Just s
+        }
+    )
+  res <- act
+  e <- Otel.getTimestamp
+  let tookMs =
+        (mkMs e - mkMs s)
+          -- should be small enough
+          & fromInteger @Int
+  Otel.addEvent
+    span
+    ( Otel.NewEvent
+        { newEventName = end,
+          newEventAttributes = HashMap.fromList [("took ms", Otel.toAttribute tookMs)],
+          newEventTimestamp = Just e
+        }
+    )
+  pure res
+
+instance (ToField t1) => ToRow (Label l1 t1) where
+  toRow t2 = toRow $ PG.Only $ getField @l1 t2
+
+instance (ToField t1, ToField t2) => ToRow (T2 l1 t1 l2 t2) where
+  toRow t2 = toRow (getField @l1 t2, getField @l2 t2)
+
+instance (ToField t1, ToField t2, ToField t3) => ToRow (T3 l1 t1 l2 t2 l3 t3) where
+  toRow t3 = toRow (getField @l1 t3, getField @l2 t3, getField @l3 t3)
diff --git a/users/Profpatsch/my-prelude/src/Pretty.hs b/users/Profpatsch/my-prelude/src/Pretty.hs
new file mode 100644
index 0000000000..d9d4ce132b
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Pretty.hs
@@ -0,0 +1,108 @@
+module Pretty
+  ( -- * Pretty printing for error messages
+    Err,
+    showPretty,
+    showPrettyJson,
+    showedStringPretty,
+    printPretty,
+    printShowedStringPretty,
+    -- constructors hidden
+    prettyErrs,
+    message,
+    messageString,
+    pretty,
+    prettyString,
+    hscolour',
+  )
+where
+
+import Data.Aeson qualified as Json
+import Data.Aeson.Encode.Pretty qualified as Aeson.Pretty
+import Data.List qualified as List
+import Data.Text.Lazy.Builder qualified as Text.Builder
+import Language.Haskell.HsColour
+  ( Output (TTYg),
+    hscolour,
+  )
+import Language.Haskell.HsColour.ANSI (TerminalType (..))
+import Language.Haskell.HsColour.Colourise
+  ( defaultColourPrefs,
+  )
+import PossehlAnalyticsPrelude
+import System.Console.ANSI (setSGRCode)
+import System.Console.ANSI.Types
+  ( Color (Red),
+    ColorIntensity (Dull),
+    ConsoleLayer (Foreground),
+    SGR (Reset, SetColor),
+  )
+import Text.Nicify (nicify)
+
+-- | Print any 'Show'able type to stderr, formatted nicely and in color. Very helpful for debugging.
+printPretty :: (Show a) => a -> IO ()
+printPretty a =
+  a & showPretty & putStderrLn
+
+showPretty :: (Show a) => a -> Text
+showPretty a = a & pretty & (: []) & prettyErrs & stringToText
+
+-- | Pretty-print a string that was produced by `show` to stderr, formatted nicely and in color.
+printShowedStringPretty :: String -> IO ()
+printShowedStringPretty s = s & showedStringPretty & putStderrLn
+
+-- | Pretty-print a string that was produced by `show`
+showedStringPretty :: String -> Text
+showedStringPretty s = s & ErrPrettyString & (: []) & prettyErrs & stringToText
+
+showPrettyJson :: Json.Value -> Text
+showPrettyJson val =
+  val
+    & Aeson.Pretty.encodePrettyToTextBuilder
+    & Text.Builder.toLazyText
+    & toStrict
+
+-- | Display a list of 'Err's as a colored error message
+-- and abort the test.
+prettyErrs :: [Err] -> String
+prettyErrs errs = res
+  where
+    res = List.intercalate "\n" $ map one errs
+    one = \case
+      ErrMsg s -> color Red s
+      ErrPrettyString s -> prettyShowString s
+    -- Pretty print a String that was produced by 'show'
+    prettyShowString :: String -> String
+    prettyShowString = hscolour' . nicify
+
+-- | Small DSL for pretty-printing errors
+data Err
+  = -- | Message to display in the error
+    ErrMsg String
+  | -- | Pretty print a String that was produced by 'show'
+    ErrPrettyString String
+
+-- | Plain message to display, as 'Text'
+message :: Text -> Err
+message = ErrMsg . textToString
+
+-- | Plain message to display, as 'String'
+messageString :: String -> Err
+messageString = ErrMsg
+
+-- | Any 'Show'able to pretty print
+pretty :: (Show a) => a -> Err
+pretty x = ErrPrettyString $ show x
+
+-- | Pretty print a String that was produced by 'show'
+prettyString :: String -> Err
+prettyString s = ErrPrettyString s
+
+-- Prettifying Helpers, mostly stolen from
+-- https://hackage.haskell.org/package/hspec-expectations-pretty-diff-0.7.2.5/docs/src/Test.Hspec.Expectations.Pretty.html#prettyColor
+
+hscolour' :: String -> String
+hscolour' =
+  hscolour (TTYg Ansi16Colour) defaultColourPrefs False False "" False
+
+color :: Color -> String -> String
+color c s = setSGRCode [SetColor Foreground Dull c] ++ s ++ setSGRCode [Reset]
diff --git a/users/Profpatsch/my-prelude/src/Seconds.hs b/users/Profpatsch/my-prelude/src/Seconds.hs
new file mode 100644
index 0000000000..8d05f30be8
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Seconds.hs
@@ -0,0 +1,55 @@
+module Seconds where
+
+import Data.Aeson (FromJSON)
+import Data.Aeson qualified as Json
+import Data.Aeson.Types (FromJSON (parseJSON))
+import Data.Scientific
+import Data.Time (NominalDiffTime)
+import FieldParser
+import FieldParser qualified as Field
+import GHC.Natural (naturalToInteger)
+import PossehlAnalyticsPrelude
+
+-- | A natural number of seconds.
+newtype Seconds = Seconds {unSeconds :: Natural}
+  deriving stock (Eq, Show)
+
+-- | Parse a decimal number as a number of seconds
+textToSeconds :: FieldParser Text Seconds
+textToSeconds = Seconds <$> Field.decimalNatural
+
+scientificToSeconds :: FieldParser Scientific Seconds
+scientificToSeconds =
+  ( Field.boundedScientificIntegral @Int "Number of seconds"
+      >>> Field.integralToNatural
+  )
+    & rmap Seconds
+
+-- Microseconds, represented internally with a 64 bit Int
+newtype MicrosecondsInt = MicrosecondsInt {unMicrosecondsInt :: Int}
+  deriving stock (Eq, Show)
+
+-- | Try to fit a number of seconds into a MicrosecondsInt
+secondsToMicrosecondsInt :: FieldParser Seconds MicrosecondsInt
+secondsToMicrosecondsInt =
+  lmap
+    (\sec -> naturalToInteger sec.unSeconds * 1_000_000)
+    (Field.bounded "Could not fit into an Int after multiplying with 1_000_000 (seconds to microseconds)")
+    & rmap MicrosecondsInt
+
+secondsToNominalDiffTime :: Seconds -> NominalDiffTime
+secondsToNominalDiffTime sec =
+  sec.unSeconds
+    & naturalToInteger
+    & fromInteger @NominalDiffTime
+
+instance FromJSON Seconds where
+  parseJSON = Field.toParseJSON jsonNumberToSeconds
+
+-- | Parse a json number as a number of seconds.
+jsonNumberToSeconds :: FieldParser' Error Json.Value Seconds
+jsonNumberToSeconds = Field.jsonNumber >>> scientificToSeconds
+
+-- | Return the number of seconds in a week
+secondsInAWeek :: Seconds
+secondsInAWeek = Seconds (3600 * 24 * 7)
diff --git a/users/Profpatsch/my-prelude/src/Test.hs b/users/Profpatsch/my-prelude/src/Test.hs
new file mode 100644
index 0000000000..862ee16c25
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Test.hs
@@ -0,0 +1,115 @@
+{-# LANGUAGE LambdaCase #-}
+
+{- Generate Test suites.
+
+Restricted version of hspec, introduction: http://hspec.github.io/getting-started.html
+-}
+module Test
+  ( Spec,
+    runTest,
+    testMain,
+
+    -- * Structure
+    describe,
+    it,
+
+    -- * Expectations
+    Expectation,
+    testOk,
+    testErr,
+    shouldBe,
+    shouldNotBe,
+    shouldSatisfy,
+    shouldNotSatisfy,
+
+    -- * Setup & Teardown (hooks http://hspec.github.io/writing-specs.html#using-hooks)
+    before,
+    before_,
+    beforeWith,
+    beforeAll,
+    beforeAll_,
+    beforeAllWith,
+    after,
+    after_,
+    afterAll,
+    afterAll_,
+    around,
+    around_,
+    aroundWith,
+    aroundAll,
+    aroundAllWith,
+
+    -- * Common helpful predicates (use with 'shouldSatisfy')
+    isRight,
+    isLeft,
+
+    -- * Pretty printing of errors
+    errColored,
+    module Pretty,
+  )
+where
+
+-- export more expectations if needed
+
+import Data.Either
+  ( isLeft,
+    isRight,
+  )
+import Pretty
+import Test.Hspec
+  ( Expectation,
+    HasCallStack,
+    Spec,
+    after,
+    afterAll,
+    afterAll_,
+    after_,
+    around,
+    aroundAll,
+    aroundAllWith,
+    aroundWith,
+    around_,
+    before,
+    beforeAll,
+    beforeAllWith,
+    beforeAll_,
+    beforeWith,
+    before_,
+    describe,
+    hspec,
+    it,
+  )
+import Test.Hspec.Expectations.Pretty
+  ( expectationFailure,
+    shouldBe,
+    shouldNotBe,
+    shouldNotSatisfy,
+    shouldSatisfy,
+  )
+
+-- | Run a test directly (e.g. from the repl)
+runTest :: Spec -> IO ()
+runTest = hspec
+
+-- | Run a testsuite
+testMain ::
+  -- | Name of the test suite
+  String ->
+  -- | The tests in this test module
+  Spec ->
+  IO ()
+testMain testSuiteName tests = hspec $ describe testSuiteName tests
+
+-- | test successful
+testOk :: Expectation
+testOk = pure ()
+
+-- | Abort the test with an error message.
+-- If you want to display a Haskell type, use `errColored`.
+testErr :: HasCallStack => String -> Expectation
+testErr = expectationFailure
+
+-- | Display a list of 'Err's as a colored error message
+-- and abort the test.
+errColored :: [Pretty.Err] -> Expectation
+errColored = testErr . Pretty.prettyErrs
diff --git a/users/Profpatsch/my-prelude/src/Tool.hs b/users/Profpatsch/my-prelude/src/Tool.hs
new file mode 100644
index 0000000000..b773f4444e
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Tool.hs
@@ -0,0 +1,75 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Tool where
+
+import Data.Error.Tree
+import Label
+import PossehlAnalyticsPrelude
+import System.Environment qualified as Env
+import System.Exit qualified as Exit
+import System.FilePath ((</>))
+import System.Posix qualified as Posix
+import ValidationParseT
+
+data Tool = Tool
+  { -- | absolute path to the executable
+    toolPath :: FilePath
+  }
+  deriving stock (Show)
+
+-- | Reads all tools from the @toolsEnvVar@ variable or aborts.
+readTools ::
+  Label "toolsEnvVar" Text ->
+  -- | Parser for Tools we bring with us at build time.
+  --
+  -- These are executables that we need available, and that we have to ship with the distribution of @pa-cli@.
+  ToolParserT IO tools ->
+  IO tools
+readTools env toolParser =
+  Env.lookupEnv (env.toolsEnvVar & textToString) >>= \case
+    Nothing -> do
+      Exit.die [fmt|Please set {env.toolsEnvVar} to a directory with all tools we need (see `Tools` in the code).|]
+    Just toolsDir ->
+      (Posix.fileExist toolsDir & ifTrueOrErr () [fmt|{env.toolsEnvVar} directory does not exist: {toolsDir}|])
+        & thenValidateM
+          ( \() ->
+              (Posix.getFileStatus toolsDir <&> Posix.isDirectory)
+                & ifTrueOrErr () [fmt|{env.toolsEnvVar} does not point to a directory: {toolsDir}|]
+          )
+        & thenValidateM
+          (\() -> toolParser.unToolParser toolsDir)
+        <&> first (errorTree [fmt|Could not find all tools in {env.toolsEnvVar}|])
+        >>= \case
+          Failure err -> Exit.die (err & prettyErrorTree & textToString)
+          Success t -> pure t
+
+newtype ToolParserT m a = ToolParserT
+  { unToolParser ::
+      FilePath ->
+      m (Validation (NonEmpty Error) a)
+  }
+  deriving
+    (Functor, Applicative)
+    via (ValidationParseT FilePath m)
+
+-- | Given a file path and the name of the tool executable, see whether it is an executable and return its full path.
+readTool :: Text -> ToolParserT IO Tool
+readTool exeName = ToolParserT $ \toolDir -> do
+  let toolPath :: FilePath = toolDir </> (exeName & textToString)
+  let read' = True
+  let write = False
+  let exec = True
+  Posix.fileExist toolPath
+    & ifTrueOrErr () [fmt|Tool does not exist: {toolPath}|]
+    & thenValidateM
+      ( \() ->
+          Posix.fileAccess toolPath read' write exec
+            & ifTrueOrErr (Tool {..}) [fmt|Tool is not readable/executable: {toolPath}|]
+      )
+
+-- | helper
+ifTrueOrErr :: (Functor f) => a -> Text -> f Bool -> f (Validation (NonEmpty Error) a)
+ifTrueOrErr true err io =
+  io <&> \case
+    True -> Success true
+    False -> Failure $ singleton $ newError err
diff --git a/users/Profpatsch/my-prelude/src/ValidationParseT.hs b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
new file mode 100644
index 0000000000..593b7ebf39
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
@@ -0,0 +1,16 @@
+module ValidationParseT where
+
+import Control.Selective (Selective)
+import Data.Functor.Compose (Compose (..))
+import PossehlAnalyticsPrelude
+
+-- | A simple way to create an Applicative parser that parses from some environment.
+--
+-- Use with DerivingVia. Grep codebase for examples.
+newtype ValidationParseT env m a = ValidationParseT {unValidationParseT :: env -> m (Validation (NonEmpty Error) a)}
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ((->) env)
+            (Compose m (Validation (NonEmpty Error)))
+        )
diff --git a/users/Profpatsch/my-webstuff/default.nix b/users/Profpatsch/my-webstuff/default.nix
new file mode 100644
index 0000000000..0067235be2
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/default.nix
@@ -0,0 +1,27 @@
+{ depot, pkgs, lib, ... }:
+
+pkgs.haskellPackages.mkDerivation {
+  pname = "my-webstuff";
+  version = "0.0.1-unreleased";
+
+  src = depot.users.Profpatsch.exactSource ./. [
+    ./my-webstuff.cabal
+    ./src/Multipart2.hs
+  ];
+
+  isLibrary = true;
+
+  libraryHaskellDepends = [
+    depot.users.Profpatsch.my-prelude
+    pkgs.haskellPackages.dlist
+    pkgs.haskellPackages.monad-logger
+    pkgs.haskellPackages.pa-error-tree
+    pkgs.haskellPackages.pa-field-parser
+    pkgs.haskellPackages.pa-prelude
+    pkgs.haskellPackages.selective
+    pkgs.haskellPackages.wai-extra
+  ];
+
+  license = lib.licenses.mit;
+
+}
diff --git a/users/Profpatsch/my-webstuff/my-webstuff.cabal b/users/Profpatsch/my-webstuff/my-webstuff.cabal
new file mode 100644
index 0000000000..fb42d9f6a5
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/my-webstuff.cabal
@@ -0,0 +1,72 @@
+cabal-version:      3.0
+name:               my-webstuff
+version:            0.0.1.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    hs-source-dirs: src
+    exposed-modules:
+      Multipart2
+
+    build-depends:
+       base >=4.15 && <5
+     , my-prelude
+     , pa-prelude
+     , pa-label
+     , pa-error-tree
+     , pa-field-parser
+     , bytestring
+     , monad-logger
+     , dlist
+     , selective
+     , wai
+     , wai-extra
diff --git a/users/Profpatsch/my-webstuff/src/Multipart2.hs b/users/Profpatsch/my-webstuff/src/Multipart2.hs
new file mode 100644
index 0000000000..5c283a3c1b
--- /dev/null
+++ b/users/Profpatsch/my-webstuff/src/Multipart2.hs
@@ -0,0 +1,220 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Multipart2 where
+
+import Control.Monad.Logger (MonadLogger)
+import Control.Selective (Selective)
+import Data.ByteString.Lazy qualified as Lazy
+import Data.DList (DList)
+import Data.DList qualified as DList
+import Data.Error.Tree
+import Data.Functor.Compose
+import Data.List qualified as List
+import FieldParser
+import Label
+import Network.Wai qualified as Wai
+import Network.Wai.Parse qualified as Wai
+import PossehlAnalyticsPrelude
+import ValidationParseT
+
+data FormFields = FormFields
+  { inputs :: [Wai.Param],
+    files :: [MultipartFile Lazy.ByteString]
+  }
+
+-- | A parser for a HTTP multipart form (a form sent by the browser)
+newtype MultipartParseT backend m a = MultipartParseT
+  { unMultipartParseT ::
+      FormFields ->
+      m (Validation (NonEmpty Error) a)
+  }
+  deriving
+    (Functor, Applicative, Selective)
+    via (ValidationParseT FormFields m)
+
+-- | After parsing a form, either we get the result or a list of form fields that failed
+newtype FormValidation a
+  = FormValidation
+      (DList FormValidationResult, Maybe a)
+  deriving (Functor, Applicative, Selective) via (Compose ((,) (DList FormValidationResult)) Maybe)
+  deriving stock (Show)
+
+data FormValidationResult = FormValidationResult
+  { hasError :: Maybe Error,
+    formFieldName :: ByteString,
+    originalValue :: ByteString
+  }
+  deriving stock (Show)
+
+mkFormValidationResult ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Maybe Error ->
+  FormValidationResult
+mkFormValidationResult form err =
+  FormValidationResult
+    { hasError = err,
+      formFieldName = form.formFieldName,
+      originalValue = form.originalValue
+    }
+
+eitherToFormValidation ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Either Error a ->
+  FormValidation a
+eitherToFormValidation form = \case
+  Left err ->
+    FormValidation $ (DList.singleton $ mkFormValidationResult form (Just err), Nothing)
+  Right a ->
+    FormValidation $ ((DList.singleton $ mkFormValidationResult form Nothing), Just a)
+
+failFormValidation ::
+  ( HasField "formFieldName" form ByteString,
+    HasField "originalValue" form ByteString
+  ) =>
+  form ->
+  Error ->
+  FormValidation a
+failFormValidation form err =
+  FormValidation (DList.singleton $ mkFormValidationResult form (Just err), Nothing)
+
+-- | Parse the multipart form or throw a user error with a descriptive error message.
+parseMultipartOrThrow ::
+  (MonadLogger m, MonadIO m) =>
+  (ErrorTree -> m a) ->
+  MultipartParseT backend m a ->
+  Wai.Request ->
+  m a
+parseMultipartOrThrow throwF parser req = do
+  -- TODO: this throws all errors with `error`, so leads to 500 on bad input …
+  formFields <-
+    liftIO $
+      Wai.parseRequestBodyEx
+        Wai.defaultParseRequestBodyOptions
+        Wai.lbsBackEnd
+        req
+  parser.unMultipartParseT
+    FormFields
+      { inputs = fst formFields,
+        files = map fileDataToMultipartFile $ snd formFields
+      }
+    >>= \case
+      Failure errs -> throwF (errorTree "Cannot parse the multipart form" errs)
+      Success a -> pure a
+
+-- | Parse the field out of the multipart message
+field :: (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m a
+field fieldName fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    & findMaybe (\input -> if fst input == fieldName then Just (snd input) else Nothing)
+    & annotate [fmt|Field "{fieldName}" does not exist in the multipart form|]
+    >>= runFieldParser fieldParser
+    & eitherToListValidation
+    & pure
+
+-- | Parse the field out of the multipart message
+field' :: (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (FormValidation a)
+field' fieldName fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    & findMaybe (\input -> if fst input == fieldName then Just $ snd input else Nothing)
+    & annotate [fmt|Field "{fieldName}" does not exist in the multipart form|]
+    <&> ( \originalValue ->
+            originalValue
+              & runFieldParser fieldParser
+              & eitherToFormValidation
+                ( T2
+                    (label @"formFieldName" fieldName)
+                    (label @"originalValue" originalValue)
+                )
+        )
+    & eitherToListValidation
+    & pure
+
+-- | Parse the field out of the multipart message, and into a 'Label' of the given name.
+fieldLabel :: forall lbl backend m a. (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (Label lbl a)
+fieldLabel fieldName fieldParser = label @lbl <$> field fieldName fieldParser
+
+-- | Parse the field out of the multipart message, and into a 'Label' of the given name.
+fieldLabel' :: forall lbl backend m a. (Applicative m) => ByteString -> FieldParser ByteString a -> MultipartParseT backend m (FormValidation (Label lbl a))
+fieldLabel' fieldName fieldParser = fmap (label @lbl) <$> field' fieldName fieldParser
+
+-- | parse all fields out of the multipart message, with the same parser
+allFields :: (Applicative m) => FieldParser (T2 "key" ByteString "value" ByteString) b -> MultipartParseT backend m [b]
+allFields fieldParser = MultipartParseT $ \mp ->
+  mp.inputs
+    <&> tupToT2 @"key" @"value"
+    & traverseValidate (runFieldParser fieldParser)
+    & eitherToValidation
+    & pure
+
+tupToT2 :: forall l1 l2 t1 t2. (t1, t2) -> T2 l1 t1 l2 t2
+tupToT2 (a, b) = T2 (label a) (label b)
+
+-- | Parse a file by name out of the multipart message
+file ::
+  (Applicative m) =>
+  ByteString ->
+  MultipartParseT backend m (MultipartFile Lazy.ByteString)
+file fieldName = MultipartParseT $ \mp ->
+  mp.files
+    & List.find (\input -> input.multipartNameAttribute == fieldName)
+    & annotate [fmt|File "{fieldName}" does not exist in the multipart form|]
+    & ( \case
+          Left err -> Failure (singleton err)
+          Right filePath -> Success filePath
+      )
+    & pure
+
+-- | Return all files from the multipart message
+allFiles ::
+  (Applicative m) =>
+  MultipartParseT backend m [MultipartFile Lazy.ByteString]
+allFiles = MultipartParseT $ \mp -> do
+  pure $ Success $ mp.files
+
+-- | Ensure there is exactly one file and return it (ignoring the field name)
+exactlyOneFile ::
+  (Applicative m) =>
+  MultipartParseT backend m (MultipartFile Lazy.ByteString)
+exactlyOneFile = MultipartParseT $ \mp ->
+  mp.files
+    & \case
+      [] -> pure $ failParse "Expected to receive a file, but the multipart form did not contain any files"
+      [file_] -> pure $ Success file_
+      more -> pure $ failParse [fmt|Expected to receive exactly one file, but the multipart form contained {List.length more} files|]
+  where
+    -- \| Fail to parse the multipart form with the given error message.
+    failParse :: Text -> Validation (NonEmpty Error) a
+    failParse = Failure . singleton . newError
+
+newtype GetFileContent backend m content = GetFileContent
+  {unGetFileContent :: (Wai.Request -> m (Either Error content))}
+
+-- | A file field in a multipart message.
+data MultipartFile content = MultipartFile
+  { -- | @name@ attribute of the corresponding HTML @\<input\>@
+    multipartNameAttribute :: ByteString,
+    -- | name of the file on the client's disk
+    fileNameOnDisk :: ByteString,
+    -- | MIME type for the file
+    fileMimeType :: ByteString,
+    -- | Content of the file
+    content :: content
+  }
+
+-- | Convert the multipart library struct of a multipart file to our own.
+fileDataToMultipartFile ::
+  Wai.File Lazy.ByteString ->
+  (MultipartFile Lazy.ByteString)
+fileDataToMultipartFile (multipartNameAttribute, file_) = do
+  MultipartFile
+    { multipartNameAttribute,
+      fileNameOnDisk = file_.fileName,
+      fileMimeType = file_.fileContentType,
+      content = file_.fileContent
+    }
diff --git a/users/Profpatsch/my-xmonad/Xmonad.hs b/users/Profpatsch/my-xmonad/Xmonad.hs
new file mode 100644
index 0000000000..bb727ac2f1
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/Xmonad.hs
@@ -0,0 +1,127 @@
+module Main where
+
+import Data.Function ((&))
+import XMonad
+import XMonad qualified as Xmonad
+import XMonad.Hooks.EwmhDesktops (ewmh)
+import XMonad.Layout.Decoration
+import XMonad.Layout.MultiToggle
+import XMonad.Layout.MultiToggle.Instances (StdTransformers (..))
+import XMonad.Layout.Tabbed (TabbedDecoration)
+import XMonad.Layout.Tabbed qualified as Tabbed
+import XMonad.StackSet qualified as StackSet
+import XMonad.Util.Cursor (setDefaultCursor)
+import XMonad.Util.EZConfig (additionalKeys, additionalKeysP, removeKeysP)
+
+data Mode = Normal | Presentation
+
+main :: IO ()
+main = do
+  let config = ewmh myConfig
+  dirs <- Xmonad.getDirectories
+  Xmonad.launch config dirs
+
+myConfig ::
+  XConfig
+    ( MultiToggle
+        ( HCons
+            StdTransformers
+            XMonad.Layout.MultiToggle.EOT
+        )
+        ( ModifiedLayout
+            ( Decoration
+                TabbedDecoration
+                DefaultShrinker
+            )
+            Tall
+        )
+    )
+myConfig =
+  conf
+    { modMask = modKey,
+      terminal = term Normal,
+      focusedBorderColor = "#859900",
+      layoutHook = layout,
+      startupHook = setDefaultCursor xC_heart,
+      workspaces = workspaceNames
+    }
+    `additionalKeysP` ( [
+                          -- fullscreen
+                          ("M-e", sendMessage $ Toggle NBFULL),
+                          -- i3-like keybindings, because I’m spoiled
+                          ("M-S-x", kill),
+                          -- exchange M-Ret and M-S-Ret
+                          ("M-<Return>", spawn $ term Normal),
+                          ("C-M-<Return>", spawn $ term Presentation),
+                          ("M-S-<Return>", windows StackSet.swapMaster)
+                          -- open simple exec dmenu
+                        ]
+                          ++
+                          -- something something workspaces
+                          [ (otherModMasks ++ "M-" ++ [key], action tag)
+                            | (tag, key) <- zip workspaceNames "123456789",
+                              (otherModMasks, action) <-
+                                [ ("", windows . StackSet.greedyView),
+                                  ("S-", windows . StackSet.shift)
+                                ]
+                          ]
+                          ++
+                          -- mod-{w,e,r} %! Switch to physical/Xinerama screens 1, 2, or 3
+                          -- mod-shift-{w,e,r} %! Move client to screen 1, 2, or 3
+                          [ ("M-v", focusToScreen 0),
+                            -- , ("M-l", focusToScreen 1)
+                            ("M-c", focusToScreen 2),
+                            ("M-S-v", windowToScreen 0),
+                            ("M-S-l", windowToScreen 1),
+                            ("M-S-c", windowToScreen 2)
+                          ]
+                          -- ((m .|. modMask, key), screenWorkspace sc >>= flip whenJust (windows . f))
+                          --   | (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
+                          --    , (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
+                      )
+    `additionalKeys`
+    -- arrow keys should move as well (hjkl blindness)
+    [ ((modKey, xK_Up), windows StackSet.focusUp),
+      ((modKey, xK_Down), windows StackSet.focusDown)
+    ]
+    `removeKeysP` [
+                    -- previous kill command
+                    "M-S-c",
+                    -- It is way to easy to kill everything by default
+                    "M-S-q",
+                    -- no idea, I want to use it for Mozc
+                    "M-n"
+                  ]
+  where
+    conf = def
+    workspaceNames = conf & workspaces
+    modKey = mod4Mask
+    -- TODO: meh
+    term :: Mode -> String
+    -- TODO: get terminal-emulator from the system config (currently alacritty)
+    term Normal = "terminal-emulator"
+    term Presentation = "notify-send TODO: currently not terminal presentation mode implemented" -- "terminal- -u ~/.config/lilyterm/pres.conf"
+    toScreen with _number = screenWorkspace 0 >>= \ws -> whenJust ws (windows . with)
+    focusToScreen = toScreen StackSet.view
+    windowToScreen = toScreen StackSet.shift
+
+-- copied from Xmonad.Config
+layout ::
+  MultiToggle
+    (HCons StdTransformers EOT)
+    (ModifiedLayout (Decoration TabbedDecoration DefaultShrinker) Tall)
+    Window
+layout =
+  tiled
+    & Tabbed.addTabsBottom Tabbed.shrinkText def
+    & toggleFullscreen
+  where
+    -- default tiling algorithm partitions the screen into two panes
+    tiled = Tall nmaster delta ratio
+    -- The default number of windows in the master pane
+    nmaster = 1
+    -- Default proportion of screen occupied by master pane
+    ratio = 1 / 2
+    -- Percent of screen to increment by when resizing panes
+    delta = 3 / 100
+    toggleFullscreen = mkToggle1 NBFULL
diff --git a/users/Profpatsch/my-xmonad/default.nix b/users/Profpatsch/my-xmonad/default.nix
new file mode 100644
index 0000000000..708d50e960
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/default.nix
@@ -0,0 +1,25 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  my-xmonad = pkgs.haskellPackages.mkDerivation {
+    pname = "my-xmonad";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./my-xmonad.cabal
+      ./Xmonad.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.xmonad-contrib
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+in
+my-xmonad
diff --git a/users/Profpatsch/my-xmonad/my-xmonad.cabal b/users/Profpatsch/my-xmonad/my-xmonad.cabal
new file mode 100644
index 0000000000..175c6c1633
--- /dev/null
+++ b/users/Profpatsch/my-xmonad/my-xmonad.cabal
@@ -0,0 +1,62 @@
+cabal-version:      3.0
+name:               my-xmonad
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+executable xmonad
+    import: common-options
+
+    main-is: Xmonad.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        xmonad,
+        xmonad-contrib
diff --git a/users/Profpatsch/netencode/Netencode.hs b/users/Profpatsch/netencode/Netencode.hs
new file mode 100644
index 0000000000..ca93ab2fef
--- /dev/null
+++ b/users/Profpatsch/netencode/Netencode.hs
@@ -0,0 +1,433 @@
+{-# LANGUAGE AllowAmbiguousTypes #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Netencode where
+
+import Control.Applicative (many)
+import Data.Attoparsec.ByteString qualified as Atto
+import Data.Attoparsec.ByteString.Char8 qualified as Atto.Char
+import Data.ByteString qualified as ByteString
+import Data.ByteString.Builder (Builder)
+import Data.ByteString.Builder qualified as Builder
+import Data.ByteString.Lazy qualified as ByteString.Lazy
+import Data.Fix (Fix (Fix))
+import Data.Fix qualified as Fix
+import Data.Functor.Classes (Eq1 (liftEq))
+import Data.Int (Int16, Int32, Int64, Int8)
+import Data.Map.NonEmpty (NEMap)
+import Data.Map.NonEmpty qualified as NEMap
+import Data.Semigroup qualified as Semi
+import Data.String (IsString)
+import Data.Word (Word16, Word32, Word64)
+import GHC.Exts (fromString)
+import Hedgehog qualified as Hedge
+import Hedgehog.Gen qualified as Gen
+import Hedgehog.Range qualified as Range
+import PossehlAnalyticsPrelude
+import Text.Show.Deriving
+import Prelude hiding (sum)
+
+-- | Netencode type base functor.
+--
+-- Recursive elements have a @rec@.
+data TF rec
+  = -- | Unit value
+    Unit
+  | -- | Boolean (2^1)
+    N1 Bool
+  | -- | Byte (2^3)
+    N3 Word8
+  | -- | 64-bit Natural (2^6)
+    N6 Word64
+  | -- | 64-bit Integer (2^6)
+    I6 Int64
+  | -- | Unicode Text
+    Text Text
+  | -- | Arbitrary Bytestring
+    Bytes ByteString
+  | -- | A constructor of a(n open) Sum
+    Sum (Tag Text rec)
+  | -- | Record
+    Record (NEMap Text rec)
+  | -- | List
+    List [rec]
+  deriving stock (Show, Eq, Functor)
+
+instance Eq1 TF where
+  liftEq _ Unit Unit = True
+  liftEq _ (N1 b) (N1 b') = b == b'
+  liftEq _ (N3 w8) (N3 w8') = w8 == w8'
+  liftEq _ (N6 w64) (N6 w64') = w64 == w64'
+  liftEq _ (I6 i64) (I6 i64') = i64 == i64'
+  liftEq _ (Text t) (Text t') = t == t'
+  liftEq _ (Bytes b) (Bytes b') = b == b'
+  liftEq eq (Sum t) (Sum t') = eq (t.tagVal) (t'.tagVal)
+  liftEq eq (Record m) (Record m') = liftEq eq m m'
+  liftEq eq (List xs) (List xs') = liftEq eq xs xs'
+  liftEq _ _ _ = False
+
+-- | A tagged value
+data Tag tag val = Tag
+  { tagTag :: tag,
+    tagVal :: val
+  }
+  deriving stock (Show, Eq, Functor)
+
+$(Text.Show.Deriving.deriveShow1 ''Tag)
+$(Text.Show.Deriving.deriveShow1 ''TF)
+
+-- | The Netencode type
+newtype T = T {unT :: Fix TF}
+  deriving stock (Eq, Show)
+
+-- | Create a unit
+unit :: T
+unit = T $ Fix Unit
+
+-- | Create a boolean
+n1 :: Bool -> T
+n1 = T . Fix . N1
+
+-- | Create a byte
+n3 :: Word8 -> T
+n3 = T . Fix . N3
+
+-- | Create a 64-bit natural
+n6 :: Word64 -> T
+n6 = T . Fix . N6
+
+-- | Create a 64-bit integer
+i6 :: Int64 -> T
+i6 = T . Fix . I6
+
+-- | Create a UTF-8 unicode text
+text :: Text -> T
+text = T . Fix . Text
+
+-- | Create an arbitrary bytestring
+bytes :: ByteString -> T
+bytes = T . Fix . Bytes
+
+-- | Create a tagged value from a tag name and a value
+tag :: Text -> T -> T
+tag key val = T $ Fix $ Sum $ coerce @(Tag Text T) @(Tag Text (Fix TF)) $ Tag key val
+
+-- | Create a record from a non-empty map
+record :: NEMap Text T -> T
+record = T . Fix . Record . coerce @(NEMap Text T) @(NEMap Text (Fix TF))
+
+-- | Create a list
+list :: [T] -> T
+list = T . Fix . List . coerce @[T] @([Fix TF])
+
+-- | Stable encoding of a netencode value. Record keys will be sorted lexicographically ascending.
+netencodeEncodeStable :: T -> Builder
+netencodeEncodeStable (T fix) = Fix.foldFix (netencodeEncodeStableF id) fix
+
+-- | Stable encoding of a netencode functor value. Record keys will be sorted lexicographically ascending.
+--
+-- The given function is used for encoding the recursive values.
+netencodeEncodeStableF :: (rec -> Builder) -> TF rec -> Builder
+netencodeEncodeStableF inner tf = builder go
+  where
+    -- TODO: directly pass in BL?
+    innerBL = fromBuilder . inner
+    go = case tf of
+      Unit -> "u,"
+      N1 False -> "n1:0,"
+      N1 True -> "n1:1,"
+      N3 w8 -> "n3:" <> fromBuilder (Builder.word8Dec w8) <> ","
+      N6 w64 -> "n6:" <> fromBuilder (Builder.word64Dec w64) <> ","
+      I6 i64 -> "i6:" <> fromBuilder (Builder.int64Dec i64) <> ","
+      Text t ->
+        let b = fromText t
+         in "t" <> builderLenDec b <> ":" <> b <> ","
+      Bytes b -> "b" <> builderLenDec (fromByteString b) <> ":" <> fromByteString b <> ","
+      Sum (Tag key val) -> encTag key val
+      Record m ->
+        -- NEMap uses Map internally, and that folds in lexicographic ascending order over the key.
+        -- Since these are `Text` in our case, this is stable.
+        let mBuilder = m & NEMap.foldMapWithKey encTag
+         in "{" <> builderLenDec mBuilder <> ":" <> mBuilder <> "}"
+      List xs ->
+        let xsBuilder = xs <&> innerBL & mconcat
+         in "[" <> builderLenDec xsBuilder <> ":" <> xsBuilder <> "]"
+      where
+        encTag key val =
+          let bKey = fromText key
+           in "<" <> builderLenDec bKey <> ":" <> bKey <> "|" <> innerBL val
+
+-- | A builder that knows its own size in bytes
+newtype BL = BL (Builder, Semi.Sum Natural)
+  deriving newtype (Monoid, Semigroup)
+
+instance IsString BL where
+  fromString s =
+    BL
+      ( fromString @Builder s,
+        fromString @ByteString s
+          & ByteString.length
+          & intToNatural
+          & fromMaybe 0
+          & Semi.Sum
+      )
+
+-- | Retrieve the builder
+builder :: BL -> Builder
+builder (BL (b, _)) = b
+
+-- | Retrieve the bytestring length
+builderLen :: BL -> Natural
+builderLen (BL (_, len)) = Semi.getSum $ len
+
+-- | Take a 'BL' and create a new 'BL' that represents the length as a decimal integer
+builderLenDec :: BL -> BL
+builderLenDec (BL (_, len)) =
+  let b = Builder.intDec $ (len & Semi.getSum & fromIntegral @Natural @Int)
+   in b & fromBuilder
+
+-- | Create a 'BL' from a 'Builder'.
+--
+-- Not efficient, goes back to a lazy bytestring to get the length
+fromBuilder :: Builder -> BL
+fromBuilder b =
+  BL
+    ( b,
+      b
+        & Builder.toLazyByteString
+        & ByteString.Lazy.length
+        & fromIntegral @Int64 @Natural
+        & Semi.Sum
+    )
+
+-- | Create a 'BL' from a 'ByteString'.
+fromByteString :: ByteString -> BL
+fromByteString b =
+  BL
+    ( Builder.byteString b,
+      b
+        & ByteString.length
+        & fromIntegral @Int @Natural
+        & Semi.Sum
+    )
+
+-- | Create a 'BL' from a 'Text'.
+fromText :: Text -> BL
+fromText t = t & textToBytesUtf8 & fromByteString
+
+-- | Parser for a netencode value.
+netencodeParser :: Atto.Parser T
+netencodeParser = T <$> go
+  where
+    go = Fix <$> netencodeParserF go
+
+-- | Parser for one level of a netencode value. Requires a parser for the recursion.
+netencodeParserF :: Atto.Parser rec -> Atto.Parser (TF rec)
+netencodeParserF inner = do
+  typeTag <- Atto.Char.anyChar
+  case typeTag of
+    't' -> Text <$> textParser
+    'b' -> Bytes <$> bytesParser
+    'u' -> unitParser
+    '<' -> Sum <$> tagParser
+    '{' -> Record <$> recordParser
+    '[' -> List <$> listParser
+    'n' -> naturalParser
+    'i' -> I6 <$> intParser
+    c -> fail ([c] <> " is not a valid netencode tag")
+  where
+    bytesParser = do
+      len <- boundedDecimalFail Atto.<?> "bytes is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "bytes did not have : after length"
+      bytes' <- Atto.take len
+      _ <- Atto.Char.char ',' Atto.<?> "bytes did not end with ,"
+      pure bytes'
+
+    textParser = do
+      len <- boundedDecimalFail Atto.<?> "text is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "text did not have : after length"
+      text' <-
+        Atto.take len <&> bytesToTextUtf8 >>= \case
+          Left err -> fail [fmt|cannot decode text as utf8: {err & prettyError}|]
+          Right t -> pure t
+      _ <- Atto.Char.char ',' Atto.<?> "text did not end with ,"
+      pure text'
+
+    unitParser = do
+      _ <- Atto.Char.char ',' Atto.<?> "unit did not end with ,"
+      pure $ Unit
+
+    tagParser = do
+      len <- boundedDecimalFail Atto.<?> "tag is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "tag did not have : after length"
+      tagTag <-
+        Atto.take len <&> bytesToTextUtf8 >>= \case
+          Left err -> fail [fmt|cannot decode tag key as utf8: {err & prettyError}|]
+          Right t -> pure t
+      _ <- Atto.Char.char '|' Atto.<?> "tag was missing the key/value separator (|)"
+      tagVal <- inner
+      pure $ Tag {..}
+
+    recordParser = do
+      -- TODO: the record does not use its inner length because we are descending into the inner parsers.
+      -- This is a smell! In theory it can be used to skip parsing the whole inner keys.
+      _len <- boundedDecimalFail Atto.<?> "record is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "record did not have : after length"
+      record' <-
+        many (Atto.Char.char '<' >> tagParser) <&> nonEmpty >>= \case
+          Nothing -> fail "record is not allowed to have 0 elements"
+          Just tags ->
+            pure $
+              tags
+                <&> (\t -> (t.tagTag, t.tagVal))
+                -- later keys are preferred if they are duplicates, according to the standard
+                & NEMap.fromList
+      _ <- Atto.Char.char '}' Atto.<?> "record did not end with }"
+      pure record'
+
+    listParser = do
+      -- TODO: the list does not use its inner length because we are descending into the inner parsers.
+      -- This is a smell! In theory it can be used to skip parsing the whole inner keys.
+      _len <- boundedDecimalFail Atto.<?> "list is missing a digit specifying the length"
+      _ <- Atto.Char.char ':' Atto.<?> "list did not have : after length"
+      -- TODO: allow empty lists?
+      list' <- many inner
+      _ <- Atto.Char.char ']' Atto.<?> "list did not end with ]"
+      pure list'
+
+    intParser = do
+      let p :: forall parseSize. (Bounded parseSize, Integral parseSize) => (Integer -> Atto.Parser Int64)
+          p n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|i{n & show} did not have : after length|]
+            isNegative <- Atto.option False (Atto.Char.char '-' <&> \_c -> True)
+            int <-
+              boundedDecimal @parseSize >>= \case
+                Nothing -> fail [fmt|cannot parse into i{n & show}, the number is too big (would overflow)|]
+                Just i ->
+                  pure $
+                    if isNegative
+                      then -- TODO: this should alread be done in the decimal parser, @minBound@ cannot be parsed cause it’s one more than @(-maxBound)@!
+                        (-i)
+                      else i
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|i{n & show} did not end with ,|]
+            pure $ fromIntegral @parseSize @Int64 int
+      digit <- Atto.Char.digit
+      case digit of
+        -- TODO: separate parser for i1 and i2 that makes sure the boundaries are right!
+        '1' -> p @Int8 1
+        '2' -> p @Int8 2
+        '3' -> p @Int8 3
+        '4' -> p @Int16 4
+        '5' -> p @Int32 5
+        '6' -> p @Int64 6
+        '7' -> fail [fmt|i parser only supports numbers up to size 6, was 7|]
+        '8' -> fail [fmt|i parser only supports numbers up to size 6, was 8|]
+        '9' -> fail [fmt|i parser only supports numbers up to size 6, was 9|]
+        o -> fail [fmt|i number with length {o & show} not possible|]
+
+    naturalParser = do
+      let p :: forall parseSize finalSize. (Bounded parseSize, Integral parseSize, Num finalSize) => (Integer -> Atto.Parser finalSize)
+          p n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|n{n & show} did not have : after length|]
+            int <-
+              boundedDecimal @parseSize >>= \case
+                Nothing -> fail [fmt|cannot parse into n{n & show}, the number is too big (would overflow)|]
+                Just i -> pure i
+
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|n{n & show} did not end with ,|]
+            pure $ fromIntegral @parseSize @finalSize int
+      let b n = do
+            _ <- Atto.Char.char ':' Atto.<?> [fmt|n{n & show} did not have : after length|]
+            bool <-
+              (Atto.Char.char '0' >> pure False)
+                <|> (Atto.Char.char '1' >> pure True)
+            _ <- Atto.Char.char ',' Atto.<?> [fmt|n{n & show} did not end with ,|]
+            pure bool
+
+      digit <- Atto.Char.digit
+      case digit of
+        -- TODO: separate parser for n1 and n2 that makes sure the boundaries are right!
+        '1' -> N1 <$> b 1
+        '2' -> N3 <$> p @Word8 @Word8 2
+        '3' -> N3 <$> p @Word8 @Word8 3
+        '4' -> N6 <$> p @Word16 @Word64 4
+        '5' -> N6 <$> p @Word32 @Word64 5
+        '6' -> N6 <$> p @Word64 @Word64 6
+        '7' -> fail [fmt|n parser only supports numbers up to size 6, was 7|]
+        '8' -> fail [fmt|n parser only supports numbers up to size 6, was 8|]
+        '9' -> fail [fmt|n parser only supports numbers up to size 6, was 9|]
+        o -> fail [fmt|n number with length {o & show} not possible|]
+
+-- | Parser for a bounded decimal that does not overflow the decimal.
+--
+--  via https://www.extrema.is/blog/2021/10/20/parsing-bounded-integers
+boundedDecimal :: forall a. (Bounded a, Integral a) => Atto.Parser (Maybe a)
+boundedDecimal = do
+  i :: Integer <- decimal
+  pure $
+    if (i :: Integer) > fromIntegral (maxBound :: a)
+      then Nothing
+      else Just $ fromIntegral i
+  where
+    -- Copied from @Attoparsec.Text@ and adjusted to bytestring
+    decimal :: (Integral a2) => Atto.Parser a2
+    decimal = ByteString.foldl' step 0 <$> Atto.Char.takeWhile1 Atto.Char.isDigit
+      where
+        step a c = a * 10 + fromIntegral (c - 48)
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Int) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Int64) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Word8) #-}
+{-# SPECIALIZE boundedDecimal :: Atto.Parser (Maybe Word64) #-}
+
+-- | 'boundedDecimal', but fail the parser if the decimal overflows.
+boundedDecimalFail :: Atto.Parser Int
+boundedDecimalFail =
+  boundedDecimal >>= \case
+    Nothing -> fail "decimal out of range"
+    Just a -> pure a
+
+-- | Hedgehog generator for a netencode value.
+genNetencode :: Hedge.MonadGen m => m T
+genNetencode =
+  Gen.recursive
+    Gen.choice
+    [ -- these are bundled into one Gen, so that scalar elements get chosen less frequently, and the generator produces nicely nested examples
+      Gen.frequency
+        [ (1, pure unit),
+          (1, n1 <$> Gen.bool),
+          (1, n3 <$> Gen.element [0, 1, 5]),
+          (1, n6 <$> Gen.element [0, 1, 5]),
+          (1, i6 <$> Gen.element [-1, 1, 5]),
+          (2, text <$> Gen.text (Range.linear 1 10) Gen.lower),
+          (2, bytes <$> Gen.bytes (Range.linear 1 10))
+        ]
+    ]
+    [ do
+        key <- Gen.text (Range.linear 3 10) Gen.lower
+        val <- genNetencode
+        pure $ tag key val,
+      record
+        <$> ( let k = Gen.text (Range.linear 3 10) Gen.lower
+                  v = genNetencode
+               in NEMap.insertMap
+                    <$> k
+                    <*> v
+                    <*> ( (Gen.map (Range.linear 0 3)) $
+                            (,) <$> k <*> v
+                        )
+            )
+    ]
+
+-- | Hedgehog property: encoding a netencode value and parsing it again returns the same result.
+prop_netencodeRoundtrip :: Hedge.Property
+prop_netencodeRoundtrip = Hedge.property $ do
+  enc <- Hedge.forAll genNetencode
+  ( Atto.parseOnly
+      netencodeParser
+      ( netencodeEncodeStable enc
+          & Builder.toLazyByteString
+          & toStrictBytes
+      )
+    )
+    Hedge.=== (Right enc)
diff --git a/users/Profpatsch/netencode/Netencode/Parse.hs b/users/Profpatsch/netencode/Netencode/Parse.hs
new file mode 100644
index 0000000000..184fb5f912
--- /dev/null
+++ b/users/Profpatsch/netencode/Netencode/Parse.hs
@@ -0,0 +1,102 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Netencode.Parse where
+
+import Control.Category qualified
+import Control.Selective (Selective)
+import Data.Error.Tree
+import Data.Fix (Fix (..))
+import Data.Functor.Compose
+import Data.List qualified as List
+import Data.Map.NonEmpty (NEMap)
+import Data.Map.NonEmpty qualified as NEMap
+import Data.Semigroupoid qualified as Semigroupiod
+import Data.Semigroupoid qualified as Semigroupoid
+import Data.Text qualified as Text
+import Netencode qualified
+import PossehlAnalyticsPrelude
+import Prelude hiding (log)
+
+newtype Parse from to
+  = -- TODO: the way @Context = [Text]@ has to be forwarded to everything is kinda shitty.
+    -- This is essentially just a difference list, and can probably be treated as a function in the output?
+    Parse (([Text], from) -> Validation (NonEmpty ErrorTree) ([Text], to))
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ( Compose
+                ((->) ([Text], from))
+                (Validation (NonEmpty ErrorTree))
+            )
+            ((,) [Text])
+        )
+
+instance Semigroupoid Parse where
+  o p2 p1 = Parse $ \from -> case runParse' p1 from of
+    Failure err -> Failure err
+    Success to1 -> runParse' p2 to1
+
+instance Category Parse where
+  (.) = Semigroupoid.o
+  id = Parse $ \t -> Success t
+
+runParse :: Error -> Parse from to -> from -> Either ErrorTree to
+runParse errMsg parser t =
+  (["$"], t)
+    & runParse' parser
+    <&> snd
+    & first (nestedMultiError errMsg)
+    & validationToEither
+
+runParse' :: Parse from to -> ([Text], from) -> Validation (NonEmpty ErrorTree) ([Text], to)
+runParse' (Parse f) from = f from
+
+parseEither :: (([Text], from) -> Either ErrorTree ([Text], to)) -> Parse from to
+parseEither f = Parse $ \from -> f from & eitherToListValidation
+
+tAs :: (Netencode.TF (Fix Netencode.TF) -> Either ([Text] -> ErrorTree) to) -> Parse Netencode.T to
+tAs f = parseEither ((\(context, Netencode.T (Fix tf)) -> f tf & bimap ($ context) (context,)))
+
+key :: Text -> Parse (NEMap Text to) to
+key name = parseEither $ \(context, rec) ->
+  rec
+    & NEMap.lookup name
+    & annotate (errorTreeContext (showContext context) [fmt|Key "{name}" does not exist|])
+    <&> (addContext name context,)
+
+showContext :: [Text] -> Text
+showContext context = context & List.reverse & Text.intercalate "."
+
+addContext :: a -> [a] -> [a]
+addContext = (:)
+
+asText :: Parse Netencode.T Text
+asText = tAs $ \case
+  Netencode.Text t -> pure t
+  other -> typeError "of text" other
+
+asBytes :: Parse Netencode.T ByteString
+asBytes = tAs $ \case
+  Netencode.Bytes b -> pure b
+  other -> typeError "of bytes" other
+
+asRecord :: Parse Netencode.T (NEMap Text (Netencode.T))
+asRecord = tAs $ \case
+  Netencode.Record rec -> pure (rec <&> Netencode.T)
+  other -> typeError "a record" other
+
+typeError :: Text -> Netencode.TF ignored -> (Either ([Text] -> ErrorTree) b)
+typeError should is = do
+  let otherS = is <&> (\_ -> ("…" :: String)) & show
+  Left $ \context -> errorTreeContext (showContext context) [fmt|Value is not {should}, but a {otherS}|]
+
+orThrowParseError ::
+  Parse (Either Error to) to
+orThrowParseError = Parse $ \case
+  (context, Left err) ->
+    err
+      & singleError
+      & errorTreeContext (showContext context)
+      & singleton
+      & Failure
+  (context, Right to) -> Success (context, to)
diff --git a/users/Profpatsch/netencode/README.md b/users/Profpatsch/netencode/README.md
index 8dc39f6337..3538a110a6 100644
--- a/users/Profpatsch/netencode/README.md
+++ b/users/Profpatsch/netencode/README.md
@@ -85,7 +85,7 @@ Similar to text, records start with the length of their *whole encoded content*,
 * A record with one empty field, `foo`: `{9:<3:foo|u,}`
 * A record with two fields, `foo` and `x`: `{21:<3:foo|u,<1:x|t3:baz,}`
 * The same record: `{21:<1:x|t3:baz,<3:foo|u,}`
-* The same record (later occurences of fields are ignored): `{28:<1:x|t3:baz,<3:foo|u,<1:x|u,}`
+* The same record (earlier occurences of fields are ignored): `{<1:x|u,28:<1:x|t3:baz,<3:foo|u,}`
 
 ### sums (tagged unions)
 
@@ -102,6 +102,24 @@ Similar to records, lists start with the length of their whole encoded content.
 * The list with text `foo` followed by i3 `-42`: `[14:t3:foo,i3:-42,]`
 * The list with `Some` and `None` tags: `[33:<4:Some|t3:foo,<4None|u,<4None|u,]`
 
+## parser security considerations
+
+The length field is a decimal number that is not length-restricted,
+meaning an attacker could give an infinitely long length (or extremely long)
+thus overflowing your parser if you are not careful.
+
+You should thus put a practical length limit to the length of length fields,
+which implicitely enforces a length limit on how long the value itself can be.
+
+Start by defining a max value length in bytes.
+Then count the number of decimals in that number.
+
+So if your max length is 1024 bytes, your length field can be a maximum `count_digits(1024) == 4` bytes long.
+
+Thus, if you restrict your parser to a length field of 4 bytes,
+it should also never parse anything longer than 1024 bytes for the value
+(plus 1 byte for the type tag, 4 bytes for the length, and 2 bytes for the separator & ending character).
+
 ## motivation
 
 TODO
diff --git a/users/Profpatsch/netencode/default.nix b/users/Profpatsch/netencode/default.nix
index d389258148..6e7dce489a 100644
--- a/users/Profpatsch/netencode/default.nix
+++ b/users/Profpatsch/netencode/default.nix
@@ -11,6 +11,34 @@ let
     }
     (builtins.readFile ./netencode.rs);
 
+  netencode-hs = pkgs.haskellPackages.mkDerivation {
+    pname = "netencode";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./netencode.cabal
+      ./Netencode.hs
+      ./Netencode/Parse.hs
+    ];
+
+    libraryHaskellDepends = [
+      pkgs.haskellPackages.hedgehog
+      pkgs.haskellPackages.nonempty-containers
+      pkgs.haskellPackages.deriving-compat
+      pkgs.haskellPackages.data-fix
+      pkgs.haskellPackages.bytestring
+      pkgs.haskellPackages.attoparsec
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-error-tree
+    ];
+
+    isLibrary = true;
+    license = lib.licenses.mit;
+
+
+  };
+
   gen = import ./gen.nix { inherit lib; };
 
   pretty-rs = depot.nix.writers.rustSimpleLib
@@ -37,9 +65,8 @@ let
 
     fn main() {
       let (_, prog) = exec_helpers::args_for_exec("netencode-pretty", 0);
-      let mut buf = vec![];
-      let u = netencode::u_from_stdin_or_die_user_error("netencode-pretty", &mut buf);
-      match netencode_pretty::Pretty::from_u(u).print_multiline(&mut std::io::stdout()) {
+      let t = netencode::t_from_stdin_or_die_user_error("netencode-pretty");
+      match netencode_pretty::Pretty::from_u(t.to_u()).print_multiline(&mut std::io::stdout()) {
         Ok(()) => {},
         Err(err) => exec_helpers::die_temporary("netencode-pretty", format!("could not write to stdout: {}", err))
       }
@@ -64,24 +91,21 @@ let
       dependencies = [
         netencode-rs
         depot.users.Profpatsch.execline.exec-helpers
-        depot.users.Profpatsch.arglib.netencode.rust
       ];
     } ''
     extern crate netencode;
-    extern crate arglib_netencode;
     extern crate exec_helpers;
     use netencode::{encode, dec};
     use netencode::dec::{Decoder, DecodeError};
 
     fn main() {
-        let mut buf = vec![];
         let args = exec_helpers::args("record-get", 1);
         let field = match std::str::from_utf8(&args[0]) {
             Ok(f) => f,
             Err(_e) => exec_helpers::die_user_error("record-get", format!("The field name needs to be valid unicode"))
         };
-        let u = netencode::u_from_stdin_or_die_user_error("record-get", &mut buf);
-        match (dec::RecordDot {field, inner: dec::AnyU }).dec(u) {
+        let t = netencode::t_from_stdin_or_die_user_error("record-get");
+        match (dec::RecordDot {field, inner: dec::AnyU }).dec(t.to_u()) {
             Ok(u) => encode(&mut std::io::stdout(), &u).expect("encoding to stdout failed"),
             Err(DecodeError(err)) => exec_helpers::die_user_error("record-get", err)
         }
@@ -101,10 +125,9 @@ let
     use netencode::dec::{Record, Try, ScalarAsBytes, Decoder, DecodeError};
 
     fn main() {
-        let mut buf = vec![];
-        let u = netencode::u_from_stdin_or_die_user_error("record-splice-env", &mut buf);
+        let t = netencode::t_from_stdin_or_die_user_error("record-splice-env");
         let (_, prog) = exec_helpers::args_for_exec("record-splice-env", 0);
-        match Record(Try(ScalarAsBytes)).dec(u) {
+        match Record(Try(ScalarAsBytes)).dec(t.to_u()) {
             Ok(map) => {
                 exec_helpers::exec_into_args(
                     "record-splice-env",
@@ -149,6 +172,7 @@ in
 depot.nix.readTree.drvTargets {
   inherit
     netencode-rs
+    netencode-hs
     pretty-rs
     pretty
     netencode-mustache
diff --git a/users/Profpatsch/netencode/netencode.cabal b/users/Profpatsch/netencode/netencode.cabal
new file mode 100644
index 0000000000..7bff4487bb
--- /dev/null
+++ b/users/Profpatsch/netencode/netencode.cabal
@@ -0,0 +1,74 @@
+cabal-version:      3.0
+name:               netencode
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    exposed-modules:
+        Netencode,
+        Netencode.Parse
+
+    build-depends:
+        base >=4.15 && <5,
+        pa-prelude,
+        pa-label,
+        pa-error-tree,
+        hedgehog,
+        nonempty-containers,
+        deriving-compat,
+        data-fix,
+        bytestring,
+        attoparsec,
+        text,
+        semigroupoids,
+        selective
diff --git a/users/Profpatsch/netencode/netencode.rs b/users/Profpatsch/netencode/netencode.rs
index bb08dca4aa..34a8fcef09 100644
--- a/users/Profpatsch/netencode/netencode.rs
+++ b/users/Profpatsch/netencode/netencode.rs
@@ -198,25 +198,103 @@ pub fn text(s: String) -> T {
     T::Text(s)
 }
 
-pub fn u_from_stdin_or_die_user_error<'a>(prog_name: &'_ str, stdin_buf: &'a mut Vec<u8>) -> U<'a> {
-    std::io::stdin().lock().read_to_end(stdin_buf);
-    let u = match parse::u_u(stdin_buf) {
-        Ok((rest, u)) => match rest {
-            b"" => u,
-            _ => exec_helpers::die_user_error(
+pub fn t_from_stdin_or_die_user_error<'a>(prog_name: &'_ str) -> T {
+    match t_from_stdin_or_die_user_error_with_rest(prog_name, &vec![]) {
+        None => exec_helpers::die_user_error(prog_name, "stdin was empty"),
+        Some((rest, t)) => {
+            if rest.is_empty() {
+                t
+            } else {
+                exec_helpers::die_user_error(
+                    prog_name,
+                    format!(
+                        "stdin contained some soup after netencode value: {:?}",
+                        String::from_utf8_lossy(&rest)
+                    ),
+                )
+            }
+        }
+    }
+}
+
+/// Read a netencode value from stdin incrementally, return bytes that could not be read.
+/// Nothing if there was nothing to read from stdin & no initial_bytes were provided.
+/// These can be passed back as `initial_bytes` if more values should be read.
+pub fn t_from_stdin_or_die_user_error_with_rest<'a>(
+    prog_name: &'_ str,
+    initial_bytes: &[u8],
+) -> Option<(Vec<u8>, T)> {
+    let mut chonker = Chunkyboi::new(std::io::stdin().lock(), 4096);
+    // The vec to pass to the parser on each step
+    let mut parser_vec: Vec<u8> = initial_bytes.to_vec();
+    // whether stdin was already empty
+    let mut was_empty: bool = false;
+    loop {
+        match chonker.next() {
+            None => {
+                if parser_vec.is_empty() {
+                    return None;
+                } else {
+                    was_empty = true
+                }
+            }
+            Some(Err(err)) => exec_helpers::die_temporary(
                 prog_name,
-                format!(
-                    "stdin contained some soup after netencode value: {:?}",
-                    String::from_utf8_lossy(rest)
-                ),
+                &format!("could not read from stdin: {:?}", err),
+            ),
+            Some(Ok(mut new_bytes)) => parser_vec.append(&mut new_bytes),
+        }
+
+        match parse::t_t(&parser_vec) {
+            Ok((rest, t)) => return Some((rest.to_owned(), t)),
+            Err(nom::Err::Incomplete(Needed)) => {
+                if was_empty {
+                    exec_helpers::die_user_error(
+                        prog_name,
+                        &format!(
+                            "unable to parse netencode from stdin, input incomplete: {:?}",
+                            parser_vec
+                        ),
+                    );
+                }
+                // read more from stdin and try parsing again
+                continue;
+            }
+            Err(err) => exec_helpers::die_user_error(
+                prog_name,
+                &format!("unable to parse netencode from stdin: {:?}", err),
             ),
-        },
-        Err(err) => exec_helpers::die_user_error(
-            prog_name,
-            format!("unable to parse netencode from stdin: {:?}", err),
-        ),
-    };
-    u
+        }
+    }
+}
+
+// iter helper
+// TODO: put into its own module
+struct Chunkyboi<T> {
+    inner: T,
+    buf: Vec<u8>,
+}
+
+impl<R: Read> Chunkyboi<R> {
+    fn new(inner: R, chunksize: usize) -> Self {
+        let buf = vec![0; chunksize];
+        Chunkyboi { inner, buf }
+    }
+}
+
+impl<R: Read> Iterator for Chunkyboi<R> {
+    type Item = std::io::Result<Vec<u8>>;
+
+    fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> {
+        match self.inner.read(&mut self.buf) {
+            Ok(0) => None,
+            Ok(read) => {
+                // clone a new buffer so we can reuse the internal one
+                Some(Ok(self.buf[..read].to_owned()))
+            }
+            Err(err) => Some(Err(err)),
+        }
+    }
 }
 
 pub mod parse {
diff --git a/users/Profpatsch/netstring/default.nix b/users/Profpatsch/netstring/default.nix
index e85cf24dd8..047fe6bae1 100644
--- a/users/Profpatsch/netstring/default.nix
+++ b/users/Profpatsch/netstring/default.nix
@@ -1,16 +1,11 @@
 { lib, pkgs, depot, ... }:
 let
-  toNetstring = s:
-    "${toString (builtins.stringLength s)}:${s},";
+  toNetstring = depot.nix.netstring.fromString;
 
   toNetstringList = xs:
     lib.concatStrings (map toNetstring xs);
 
-  toNetstringKeyVal = attrs:
-    lib.concatStrings
-      (lib.mapAttrsToList
-        (k: v: toNetstring (toNetstring k + toNetstring v))
-        attrs);
+  toNetstringKeyVal = depot.nix.netstring.attrsToKeyValList;
 
   python-netstring = depot.users.Profpatsch.writers.python3Lib
     {
diff --git a/users/Profpatsch/nix-home/README.md b/users/Profpatsch/nix-home/README.md
new file mode 100644
index 0000000000..222978bc8c
--- /dev/null
+++ b/users/Profpatsch/nix-home/README.md
@@ -0,0 +1,7 @@
+# nix-home
+
+My very much simplified version of [home-manager](https://github.com/nix-community/home-manager/).
+
+Only takes care about installing symlinks into `$HOME`, and uses [`GNU stow`](https://www.gnu.org/software/stow/) for doing the actual mutating.
+
+No support for services (yet).
diff --git a/users/Profpatsch/nix-home/default.nix b/users/Profpatsch/nix-home/default.nix
index 3f0b7c9c39..ee154c549a 100644
--- a/users/Profpatsch/nix-home/default.nix
+++ b/users/Profpatsch/nix-home/default.nix
@@ -150,6 +150,15 @@ let
               name = "scripts/ytextr";
               path = depot.users.Profpatsch.ytextr;
             }
+            {
+              name = "scripts/lorri-wait-for-eval";
+              path = depot.users.Profpatsch.lorri-wait-for-eval;
+            }
+            {
+              name = "scripts/lw";
+              path = depot.users.Profpatsch.lorri-wait-for-eval;
+            }
+
           ]
           ++
           (lib.pipe depot.users.Profpatsch.aliases [
diff --git a/users/Profpatsch/nix-tools.nix b/users/Profpatsch/nix-tools.nix
new file mode 100644
index 0000000000..4f29274573
--- /dev/null
+++ b/users/Profpatsch/nix-tools.nix
@@ -0,0 +1,159 @@
+{ depot, pkgs, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.nix [ "nix-build" "nix-instantiate" ];
+
+  # TODO: both of these don’t prevent `result` from being created. good? bad?
+
+  # Usage (execline syntax):
+  #    nix-run { -A foo <more_nix_options> } args...
+  #
+  # Takes an execline block of `nix-build` arguments, which should produce an executable store path.
+  # Then runs the store path with `prog...`.
+  nix-run = depot.nix.writeExecline "nix-run" { argMode = "env"; } [
+    "backtick"
+    "-iE"
+    "storepath"
+    [
+      runblock
+      "1"
+      bins.nix-build
+    ]
+    runblock
+    "-r"
+    "2"
+    "$storepath"
+  ];
+
+  # Usage (execline syntax):
+  #    nix-run-bin { -A foo <more_nix_options> } <foo_bin_name> args...
+  #
+  # Takes an execline block of `nix-build` arguments, which should produce a store path with a bin/ directory in it.
+  # Then runs the given command line with the given arguments. All executables in the built storepath’s bin directory are prepended to `PATH`.
+  nix-run-bin = depot.nix.writeExecline "nix-run-bin" { argMode = "env"; } [
+    "backtick"
+    "-iE"
+    "storepath"
+    [
+      runblock
+      "1"
+      bins.nix-build
+    ]
+    "importas"
+    "-ui"
+    "PATH"
+    "PATH"
+    "export"
+    "PATH"
+    "\${storepath}/bin:\${PATH}"
+    runblock
+    "-r"
+    "2"
+  ];
+
+  nix-eval = depot.nix.writeExecline "nix-eval" { } [
+    bins.nix-instantiate
+    "--read-write-mode"
+    "--eval"
+    "--strict"
+    "$@"
+  ];
+
+  # This is a rewrite of execline’s runblock.
+  # It adds the feature that instead of just
+  # executing the block it reads, it can also
+  # pass it as argv to given commands.
+  #
+  # This is going to be added to a future version
+  # of execline by skarnet, but for now it’s easier
+  # to just dirtily reimplement it in Python.
+  #
+  # TODO: this was added to recent execline versions,
+  # but it doesn’t seem to be a drop-in replacement,
+  # if I use execline’s runblock in nix-run-bin above,
+  # I get errors like
+  # > export: fatal: unable to exec runblock: Success
+  runblock = pkgs.writers.writePython3 "runblock"
+    {
+      flakeIgnore = [ "E501" "E226" ];
+    } ''
+    import sys
+    import os
+    from pathlib import Path
+
+    skip = False
+    one = sys.argv[1]
+    if one == "-r":
+        skip = True
+        block_number = int(sys.argv[2])
+        block_start = 3
+    elif one.startswith("-"):
+        print("runblock-python: only -r supported", file=sys.stderr)
+        sys.exit(100)
+    else:
+        block_number = int(one)
+        block_start = 2
+
+    execline_argv_no = int(os.getenvb(b"#"))
+    runblock_argv = [os.getenv(str(no)) for no in range(1, execline_argv_no + 1)]
+
+
+    def parse_block(args):
+        new_args = []
+        if args == []:
+            print(
+                "runblock-python: empty block",
+                file=sys.stderr
+            )
+            sys.exit(100)
+        for arg in args:
+            if arg == "":
+                break
+            elif arg.startswith(" "):
+                new_args.append(arg[1:])
+            else:
+                print(
+                    "runblock-python: unterminated block: {}".format(args),
+                    file=sys.stderr
+                )
+                sys.exit(100)
+        args_rest = args[len(new_args)+1:]
+        return (new_args, args_rest)
+
+
+    if skip:
+        rest = runblock_argv
+        for _ in range(0, block_number-1):
+            (_, rest) = parse_block(rest)
+        new_argv = rest
+    else:
+        new_argv = []
+        rest = runblock_argv
+        for _ in range(0, block_number):
+            (new_argv, rest) = parse_block(rest)
+
+    given_argv = sys.argv[block_start:]
+    run = given_argv + new_argv
+    if os.path.isabs(run[0]):
+        # TODO: ideally I’d check if it’s an executable here, but it was too hard to figure out and I couldn’t be bothered tbh
+        if not Path(run[0]).is_file():
+            print(
+                "runblock-python: Executable {} does not exist or is not a file.".format(run[0]),
+                file=sys.stderr
+            )
+            sys.exit(100)
+    os.execvp(
+        file=run[0],
+        args=run
+    )
+  '';
+
+
+in
+{
+  inherit
+    nix-run
+    nix-run-bin
+    nix-eval
+    ;
+}
diff --git a/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs b/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs
deleted file mode 100644
index 3ed96a7b6e..0000000000
--- a/users/Profpatsch/nixpkgs-rewriter/MetaStdenvLib.hs
+++ /dev/null
@@ -1,80 +0,0 @@
-{-# LANGUAGE PartialTypeSignatures #-}
-{-# LANGUAGE LambdaCase #-}
-{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE NamedFieldPuns #-}
-import Nix.Parser
-import Nix.Expr.Types
-import Nix.Expr.Types.Annotated
-import System.Environment (getArgs)
-import System.Exit (die)
-import Data.Fix (Fix(..))
-import qualified Data.Text as Text
-import qualified Data.ByteString.Lazy.Char8 as BL
-import qualified Data.Aeson as A
-import qualified Data.Aeson.Encoding as A
-import Data.Function ((&))
-import qualified System.IO as IO
-import qualified Text.Megaparsec.Pos as MP
-
-main = do
-  (nixFile:_) <- getArgs
-  (parseNixFileLoc nixFile :: IO _) >>= \case
-    Failure err -> do
-      ePutStrLn $ show err
-      die "oh no"
-    Success expr -> do
-      case snd $ match expr of
-        NoArguments -> do
-          ePutStrLn $ "NoArguments in " <> nixFile
-          printPairs mempty
-        YesLib vars -> do
-          ePutStrLn $ "lib in " <> show vars <> " in " <> nixFile
-          printPairs mempty
-        NoLib vars srcSpan -> do
-          ePutStrLn $ nixFile <> " needs lib added"
-          printPairs
-            $ "fileName" A..= nixFile
-            <> "fromLine" A..= (srcSpan & spanBegin & sourceLine)
-            <> "fromColumn" A..= (srcSpan & spanBegin & sourceColumn)
-            <> "toLine" A..= (srcSpan & spanEnd & sourceLine)
-            <> "toColumn" A..= (srcSpan & spanEnd & sourceColumn)
-
-printPairs pairs = BL.putStrLn $ A.encodingToLazyByteString $ A.pairs pairs
-
-ePutStrLn = IO.hPutStrLn IO.stderr
-
-data Descend = YesDesc | NoDesc
-  deriving Show
-data Matched =  NoArguments | NoLib [VarName] SrcSpan | YesLib [VarName]
-  deriving Show
-
-match :: Fix (Compose (Ann SrcSpan) NExprF) -> (Descend, Matched)
-match = \case
-  (AnnE outerSpan (NAbs (ParamSet params _ _) (AnnE innerSpan _))) -> (NoDesc,
-    let vars = map fst params in
-    case (any (== "lib") vars) of
-      True -> YesLib vars
-      False ->
-          -- The span of the arglist is from the beginning of the match
-          -- to the beginning of the inner expression
-          let varSpan = SrcSpan
-                { spanBegin = outerSpan & spanBegin
-                -- -1 to prevent the spans from overlapping
-                , spanEnd = sourcePosMinus1 (innerSpan & spanBegin) }
-          in NoLib vars varSpan)
-  _ -> (NoDesc, NoArguments)
-
--- | Remove one from a source positon.
---
--- That means if the current position is at the very beginning of a line,
--- jump to the previous line.
-sourcePosMinus1 :: SourcePos -> SourcePos
-sourcePosMinus1 src@(SourcePos { sourceLine, sourceColumn }) =
-  let
-    col = MP.mkPos $ max (MP.unPos sourceColumn - 1) 1
-    line = MP.mkPos $ case MP.unPos sourceColumn of
-      1 -> max (MP.unPos sourceLine - 1) 1
-      _ -> MP.unPos sourceLine
-  in src
-    { sourceLine = line
-    , sourceColumn = col }
diff --git a/users/Profpatsch/nixpkgs-rewriter/default.nix b/users/Profpatsch/nixpkgs-rewriter/default.nix
deleted file mode 100644
index 0740a870aa..0000000000
--- a/users/Profpatsch/nixpkgs-rewriter/default.nix
+++ /dev/null
@@ -1,148 +0,0 @@
-{ depot, pkgs, ... }:
-let
-  inherit (depot.nix)
-    writeExecline
-    ;
-  inherit (depot.users.Profpatsch.lib)
-    debugExec
-    ;
-
-  bins = depot.nix.getBins pkgs.coreutils [ "head" "shuf" ]
-    // depot.nix.getBins pkgs.jq [ "jq" ]
-    // depot.nix.getBins pkgs.findutils [ "xargs" ]
-    // depot.nix.getBins pkgs.gnused [ "sed" ]
-  ;
-
-  export-json-object = pkgs.writers.writePython3 "export-json-object" { } ''
-    import json
-    import sys
-    import os
-
-    d = json.load(sys.stdin)
-
-    if d == {}:
-        sys.exit(0)
-
-    for k, v in d.items():
-        os.environ[k] = str(v)
-
-    os.execvp(sys.argv[1], sys.argv[1:])
-  '';
-
-  meta-stdenv-lib = pkgs.writers.writeHaskell "meta-stdenv-lib"
-    {
-      libraries = [
-        pkgs.haskellPackages.hnix
-        pkgs.haskellPackages.aeson
-      ];
-    } ./MetaStdenvLib.hs;
-
-  replace-between-lines = writeExecline "replace-between-lines" { readNArgs = 1; } [
-    "importas"
-    "-ui"
-    "file"
-    "fileName"
-    "importas"
-    "-ui"
-    "from"
-    "fromLine"
-    "importas"
-    "-ui"
-    "to"
-    "toLine"
-    "if"
-    [ depot.tools.eprintf "%s-%s\n" "$from" "$to" ]
-    (debugExec "adding lib")
-    bins.sed
-    "-e"
-    "\${from},\${to} \${1}"
-    "-i"
-    "$file"
-  ];
-
-  add-lib-if-necessary = writeExecline "add-lib-if-necessary" { readNArgs = 1; } [
-    "pipeline"
-    [ meta-stdenv-lib "$1" ]
-    export-json-object
-    # first replace any stdenv.lib mentions in the arg header
-    # if this is not done, the replace below kills these.
-    # Since we want it anyway ultimately, let’s do it here.
-    "if"
-    [ replace-between-lines "s/stdenv\.lib/lib/" ]
-    # then add the lib argument
-    # (has to be before stdenv, otherwise default arguments might be in the way)
-    replace-between-lines
-    "s/stdenv/lib, stdenv/"
-  ];
-
-  metaString = ''meta = with stdenv.lib; {'';
-
-  replace-stdenv-lib = pkgs.writers.writeBash "replace-stdenv-lib" ''
-    set -euo pipefail
-    sourceDir="$1"
-    for file in $(
-      ${pkgs.ripgrep}/bin/rg \
-        --files-with-matches \
-        --fixed-strings \
-        -e '${metaString}' \
-        "$sourceDir"
-    )
-    do
-      echo "replacing stdenv.lib meta in $file" >&2
-      ${bins.sed} -e '/${metaString}/ s/stdenv.lib/lib/' \
-          -i "$file"
-      ${add-lib-if-necessary} "$file"
-    done
-  '';
-
-  instantiate-nixpkgs-randomly = writeExecline "instantiate-nixpkgs-randomly" { readNArgs = 1; } [
-    "export"
-    "NIXPKGS_ALLOW_BROKEN"
-    "1"
-    "export"
-    "NIXPKGS_ALLOW_UNFREE"
-    "1"
-    "export"
-    "NIXPKGS_ALLOW_INSECURE"
-    "1"
-    "export"
-    "NIXPKGS_ALLOW_UNSUPPORTED_SYSTEM"
-    "1"
-    "pipeline"
-    [
-      "nix"
-      "eval"
-      "--raw"
-      ''(
-          let pkgs = import ''${1} {};
-          in builtins.toJSON (builtins.attrNames pkgs)
-        )''
-    ]
-    "pipeline"
-    [ bins.jq "-r" ".[]" ]
-    "pipeline"
-    [ bins.shuf ]
-    "pipeline"
-    [ bins.head "-n" "1000" ]
-    bins.xargs
-    "-I"
-    "{}"
-    "-n1"
-    "if"
-    [ depot.tools.eprintf "instantiating %s\n" "{}" ]
-    "nix-instantiate"
-    "$1"
-    "-A"
-    "{}"
-  ];
-
-in
-depot.nix.readTree.drvTargets {
-  inherit
-    instantiate-nixpkgs-randomly
-    # requires hnix, which we don’t want in tvl for now
-    # uncomment manually if you want to use it.
-    #   meta-stdenv-lib
-    #   replace-stdenv-lib
-    ;
-}
diff --git a/users/Profpatsch/openlab-tools/Main.hs b/users/Profpatsch/openlab-tools/Main.hs
new file mode 100644
index 0000000000..d5f958a38a
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/Main.hs
@@ -0,0 +1,6 @@
+module Main where
+
+import OpenlabTools qualified
+
+main :: IO ()
+main = OpenlabTools.main
diff --git a/users/Profpatsch/openlab-tools/default.nix b/users/Profpatsch/openlab-tools/default.nix
new file mode 100644
index 0000000000..82641989f7
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/default.nix
@@ -0,0 +1,69 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  openlab-tools = pkgs.haskellPackages.mkDerivation {
+    pname = "openlab-tools";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./openlab-tools.cabal
+      ./Main.hs
+      ./src/OpenlabTools.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-json
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-run-command
+      pkgs.haskellPackages.aeson-better-errors
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.deepseq
+      pkgs.haskellPackages.case-insensitive
+      pkgs.haskellPackages.hs-opentelemetry-sdk
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.selective
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai-extra
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.tagsoup
+      pkgs.haskellPackages.time
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins openlab-tools [ "openlab-tools" ];
+
+in
+
+depot.nix.writeExecline "openlab-tools-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  "${pkgs.postgresql}/bin:$${PATH}"
+  "export"
+  "OPENLAB_TOOLS_TOOLS"
+  (pkgs.linkFarm "openlab-tools-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ])
+  bins.openlab-tools
+]
+
diff --git a/users/Profpatsch/openlab-tools/openlab-tools.cabal b/users/Profpatsch/openlab-tools/openlab-tools.cabal
new file mode 100644
index 0000000000..b7d217e051
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/openlab-tools.cabal
@@ -0,0 +1,111 @@
+cabal-version:      3.0
+name:               openlab-tools
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+       OpenlabTools
+
+    build-depends:
+        base >=4.15 && <5,
+        text,
+        my-prelude,
+        my-webstuff,
+        pa-prelude,
+        pa-error-tree,
+        pa-label,
+        pa-json,
+        pa-field-parser,
+        pa-run-command,
+        aeson-better-errors,
+        aeson,
+        blaze-html,
+        bytestring,
+        containers,
+        deepseq,
+        unordered-containers,
+        exceptions,
+        filepath,
+        hs-opentelemetry-sdk,
+        hs-opentelemetry-api,
+        http-conduit,
+        http-types,
+        ihp-hsx,
+        monad-logger,
+        mtl,
+        network-uri,
+        scientific,
+        selective,
+        unliftio,
+        wai-extra,
+        wai,
+        warp,
+        tagsoup,
+        time,
+        stm,
+        case-insensitive
+
+executable openlab-tools
+    import: common-options
+
+    main-is: Main.hs
+
+    ghc-options:
+      -threaded
+
+    build-depends:
+        base >=4.15 && <5,
+        openlab-tools
diff --git a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
new file mode 100644
index 0000000000..16f1b626ac
--- /dev/null
+++ b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
@@ -0,0 +1,551 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module OpenlabTools where
+
+import Control.Concurrent.STM hiding (atomically, readTVarIO)
+import Control.DeepSeq (NFData, deepseq)
+import Control.Monad.Logger qualified as Logger
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson.BetterErrors qualified as Json
+import Data.CaseInsensitive qualified as CaseInsensitive
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.Maybe (listToMaybe)
+import Data.Text qualified as Text
+import Data.Time (NominalDiffTime, UTCTime (utctDayTime), diffUTCTime, getCurrentTime)
+import Data.Time qualified as Time
+import Data.Time.Clock (addUTCTime)
+import Data.Time.Format qualified as Time.Format
+import Debug.Trace
+import FieldParser (FieldParser' (..))
+import FieldParser qualified as Field
+import GHC.Records (HasField (..))
+import GHC.Stack qualified
+import IHP.HSX.QQ (hsx)
+import Json qualified
+import Label
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.HTTP.Types qualified as Http
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import Parse (Parse)
+import Parse qualified
+import PossehlAnalyticsPrelude
+import Pretty
+import System.Environment qualified as Env
+import System.IO qualified as IO
+import Text.Blaze.Html.Renderer.Pretty qualified as Html.Pretty
+import Text.Blaze.Html.Renderer.Utf8 qualified as Html
+import Text.Blaze.Html5 qualified as Html
+import Text.HTML.TagSoup qualified as Soup
+import UnliftIO hiding (Handler, newTVarIO)
+import Prelude hiding (span, until)
+
+mapallSpaceOla :: Text
+mapallSpaceOla = "https://mapall.space/heatmap/show.php?id=OpenLab+Augsburg"
+
+mainPage :: Html.Html
+mainPage =
+  Html.docTypeHtml
+    [hsx|
+          <head>
+            <title>Openlab Augsburg Tools</title>
+            <meta charset="utf-8">
+            <meta name="viewport" content="width=device-width, initial-scale=1">
+          </head>
+
+          <body>
+            <p>Welcome to the OpenLab Augsburg tools thingy. The idea is to provide some services that can be embedded into our other pages.</p>
+
+            <h2>What’s there</h2>
+            <ul>
+              <li>
+                A <a href="snips/table-opening-hours-last-week">table displaying the opening hours last week</a>, courtesy of <a href={mapallSpaceOla}>mapall.space</a>.
+              </li>
+            </ul>
+
+
+            <h2>Show me the code/how to contribute</h2>
+
+            <p>The source code can be found <a href="https://code.tvl.fyi/tree/users/Profpatsch/openlab-tools">in my user dir in the tvl repo</a>.</p>
+
+            <p>To build the server, clone the repository from <a href="https://code.tvl.fyi/depot.git">https://code.tvl.fyi/depot.git</a>.
+            Then <code>cd</code> into <code>users/Profpatsch</code>, run <code>nix-shell</code>.
+            </p>
+
+            <p>You can now run the server with <code>cabal repl openlab-tools/`</code> by executing the <code>main</code> function inside the GHC repl. It starts on port <code>9099</code>.
+            <br>
+            To try out changes to the code, stop the server with <kbd><kbd>Ctrl</kbd>+<kbd>z</kbd></kbd> and type <code>:reload</code>, then <code>main</code> again.
+            <br>
+            Finally, from within <code>users/Profpatsch</code> you can start a working development environment by installing <var>vscode</var> or <var>vscodium</var> and the <var>Haskell</var> extension. Then run <code>code .</code> from within the directory.
+            </p>
+
+            <p>Once you have a patch, <a href="https://matrix.to/#/@profpatsch:augsburg.one">contact me on Matrix</a> or DM me at <code>irc/libera</code>, nick <code>Profpatsch</code>.
+            </p>
+          </body>
+        |]
+
+debug :: Bool
+debug = False
+
+runApp :: IO ()
+runApp = withTracer $ \tracer -> do
+  let renderHtml =
+        if debug
+          then Html.Pretty.renderHtml >>> stringToText >>> textToBytesUtf8 >>> toLazyBytes
+          else Html.renderHtml
+
+  let runApplication ::
+        (MonadUnliftIO m, MonadLogger m) =>
+        ( Wai.Request ->
+          (Wai.Response -> m Wai.ResponseReceived) ->
+          m Wai.ResponseReceived
+        ) ->
+        m ()
+      runApplication app = do
+        withRunInIO $ \runInIO -> Warp.run 9099 $ \req respond -> do
+          let catchAppException act =
+                try act >>= \case
+                  Right a -> pure a
+                  Left (AppException err) -> do
+                    runInIO (logError err)
+                    respond (Wai.responseLBS Http.status500 [] "")
+          liftIO $ catchAppException (runInIO $ app req (\resp -> liftIO $ respond resp))
+
+  let appT :: AppT IO () = do
+        let h extra res = Wai.responseLBS Http.ok200 (("Content-Type", "text/html") : extra) res
+        runHandlers
+          runApplication
+          [ Handler
+              { path = "",
+                body =
+                  Body
+                    (pure ())
+                    (\((), _) -> pure $ h [] (renderHtml mainPage))
+              },
+            Handler
+              { path = "snips/table-opening-hours-last-week",
+                body =
+                  Body
+                    ((label @"ifModifiedSince" <$> parseIfModifiedSince))
+                    ( \(req', cache) -> do
+                        now <- liftIO getCurrentTime <&> mkSecondTime
+                        new <- updateCacheIfNewer now cache heatmap
+                        let cacheToHeaders =
+                              [ ("Last-Modified", new.lastModified & formatHeaderTime),
+                                ("Expires", new.until & formatHeaderTime),
+                                ( "Cache-Control",
+                                  let maxAge = new.until `diffSecondTime` now
+                                   in [fmt|max-age={maxAge & floor @NominalDiffTime @Int  & show}, immutable|]
+                                )
+                              ]
+                        if
+                          -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
+                          | Just modifiedSince <- req'.ifModifiedSince,
+                            modifiedSince >= new.lastModified ->
+                              pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
+                          | otherwise ->
+                              pure $ h cacheToHeaders (new.result & toLazyBytes)
+                    )
+              }
+          ]
+
+  runReaderT (appT :: AppT IO ()).unAppT Context {..}
+  where
+    -- "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#syntax"
+    headerFormat = "%a, %d %b %0Y %T GMT"
+    formatHeaderTime (SecondTime t) =
+      t
+        & Time.Format.formatTime
+          @UTCTime
+          Time.Format.defaultTimeLocale
+          headerFormat
+        & stringToText
+        & textToBytesUtf8
+    parseHeaderTime =
+      Field.utf8
+        >>> ( FieldParser $ \t ->
+                t
+                  & textToString
+                  & Time.Format.parseTimeM
+                    @Maybe
+                    @UTCTime
+                    {-no leading whitespace -} False
+                    Time.Format.defaultTimeLocale
+                    headerFormat
+                  & annotate [fmt|Cannot parse header timestamp "{t}"|]
+            )
+    parseIfModifiedSince :: Parse Wai.Request (Maybe SecondTime)
+    parseIfModifiedSince =
+      lmap
+        ( (.requestHeaders)
+            >>> findMaybe
+              ( \(h, v) ->
+                  if "If-Modified-Since" == CaseInsensitive.mk h then Just v else Nothing
+              )
+        )
+        (Parse.maybe $ Parse.fieldParser parseHeaderTime)
+        & rmap (fmap mkSecondTime)
+
+parseRequest :: (MonadThrow f) => Otel.Span -> Parse from a -> from -> f a
+parseRequest span parser req =
+  Parse.runParse "Unable to parse the HTTP request" parser req
+    & assertM span id
+
+heatmap :: AppT IO ByteString
+heatmap = do
+  Http.httpBS [fmt|GET {mapallSpaceOla}|]
+    <&> (.responseBody)
+    <&> Soup.parseTags
+    <&> Soup.canonicalizeTags
+    <&> findHeatmap
+    <&> fromMaybe (htmlToTags [hsx|<p>Uh oh! could not fetch the table from <a href={mapallSpaceOla}>{mapallSpaceOla}</a></p>|])
+    <&> Soup.renderTags
+  where
+    firstSection f t = t & Soup.sections f & listToMaybe
+    match :: Soup.Tag ByteString -> Soup.Tag ByteString -> Bool
+    match x (t :: Soup.Tag ByteString) = (Soup.~==) @ByteString t x
+    findHeatmap t =
+      t
+        & firstSection (match (Soup.TagOpen ("") [("class", "heatmap")]))
+        >>= firstSection (match (Soup.TagOpen "table" []))
+          <&> getTable
+          <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
+          <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
+
+    -- get the table from opening tag to closing tag (allowing nested tables)
+    getTable = go 0
+      where
+        go _ [] = []
+        go d (el : els)
+          | match (Soup.TagOpen "table" []) el = el : go (d + 1) els
+          | match (Soup.TagClose "table") el = if d <= 1 then [el] else el : go (traceShowId $ d - 1) els
+          | otherwise = el : go d els
+
+    htmlToTags :: Html.Html -> [Soup.Tag ByteString]
+    htmlToTags h = h & Html.renderHtml & toStrictBytes & Soup.parseTags
+
+    -- TODO: this is dog-slow because of the whole list recreation!
+    wrapTagStream ::
+      T2 "el" ByteString "attrs" [Soup.Attribute ByteString] ->
+      [Soup.Tag ByteString] ->
+      [Soup.Tag ByteString]
+    wrapTagStream tag inner = (Soup.TagOpen (tag.el) tag.attrs : inner) <> [Soup.TagClose tag.el]
+
+main :: IO ()
+main =
+  runApp
+
+-- ( do
+--     -- todo: trace that to the init functions as well
+--     Otel.inSpan "whatcd-resolver main function" Otel.defaultSpanArguments $ do
+--       _ <- runTransaction migrate
+--       htmlUi
+-- )
+
+data Handler m = Handler
+  { path :: Text,
+    body :: Body m
+  }
+
+data Body m
+  = forall a.
+    Body
+      (Parse Wai.Request a)
+      ((a, TVar (Cache ByteString)) -> m Wai.Response)
+
+runHandlers ::
+  (Otel.MonadTracer m, MonadUnliftIO m, MonadThrow m) =>
+  -- ( (Wai.Request -> (Wai.Response -> m Wai.ResponseReceived) -> m Wai.ResponseReceived) ->
+  --   m ()
+  -- ) ->
+  ( (Wai.Request -> (Wai.Response -> m a) -> m a) ->
+    m ()
+  ) ->
+  [Handler m] ->
+  m ()
+runHandlers runApplication handlers = do
+  withCaches ::
+    [ T2
+        "handler"
+        (Handler m)
+        "cache"
+        (TVar (Cache ByteString))
+    ] <-
+    handlers
+      & traverse
+        ( \h -> do
+            cache <- liftIO $ newCache h.path "nothing yet"
+            pure $ T2 (label @"handler" h) (label @"cache" cache)
+        )
+  runApplication $ \req respond -> do
+    let mHandler =
+          withCaches
+            & List.find
+              ( \h ->
+                  (h.handler.path)
+                    == (req & Wai.pathInfo & Text.intercalate "/")
+              )
+    case mHandler of
+      Nothing -> respond $ Wai.responseLBS Http.status404 [] "nothing here (yet)"
+      Just handler -> do
+        inSpan' "TODO" $ \span -> do
+          case handler.handler.body of
+            Body parse runHandler -> do
+              req' <- req & parseRequest span parse
+              resp <- runHandler (req', handler.cache)
+              respond resp
+
+inSpan :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> m a -> m a
+inSpan name = Otel.inSpan name Otel.defaultSpanArguments
+
+inSpan' :: Text -> (Otel.Span -> m a) -> m a
+-- inSpan' name = Otel.inSpan' name Otel.defaultSpanArguments
+inSpan' _name act = act (error "todo telemetry disabled")
+
+zipT2 ::
+  forall l1 l2 t1 t2.
+  ( HasField l1 (T2 l1 [t1] l2 [t2]) [t1],
+    HasField l2 (T2 l1 [t1] l2 [t2]) [t2]
+  ) =>
+  T2 l1 [t1] l2 [t2] ->
+  [T2 l1 t1 l2 t2]
+zipT2 xs =
+  zipWith
+    (\t1 t2 -> T2 (label @l1 t1) (label @l2 t2))
+    (getField @l1 xs)
+    (getField @l2 xs)
+
+unzipT2 :: forall l1 t1 l2 t2. [T2 l1 t1 l2 t2] -> T2 l1 [t1] l2 [t2]
+unzipT2 xs = xs <&> toTup & unzip & fromTup
+  where
+    toTup :: forall a b. T2 a t1 b t2 -> (t1, t2)
+    toTup (T2 a b) = (getField @a a, getField @b b)
+    fromTup :: (a, b) -> T2 l1 a l2 b
+    fromTup (t1, t2) = T2 (label @l1 t1) (label @l2 t2)
+
+unzipT3 :: forall l1 t1 l2 t2 l3 t3. [T3 l1 t1 l2 t2 l3 t3] -> T3 l1 [t1] l2 [t2] l3 [t3]
+unzipT3 xs = xs <&> toTup & unzip3 & fromTup
+  where
+    toTup :: forall a b c. T3 a t1 b t2 c t3 -> (t1, t2, t3)
+    toTup (T3 a b c) = (getField @a a, getField @b b, getField @c c)
+    fromTup :: (a, b, c) -> T3 l1 a l2 b l3 c
+    fromTup (t1, t2, t3) = T3 (label @l1 t1) (label @l2 t2) (label @l3 t3)
+
+newtype Optional a = OptionalInternal (Maybe a)
+
+mkOptional :: a -> Optional a
+mkOptional defaultValue = OptionalInternal $ Just defaultValue
+
+defaults :: Optional a
+defaults = OptionalInternal Nothing
+
+instance HasField "withDefault" (Optional a) (a -> a) where
+  getField (OptionalInternal m) defaultValue = case m of
+    Nothing -> defaultValue
+    Just a -> a
+
+httpJson ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  (Optional (Label "contentType" ByteString)) ->
+  Otel.Span ->
+  Json.Parse ErrorTree b ->
+  Http.Request ->
+  m b
+httpJson opts span parser req = do
+  let opts' = opts.withDefault (label @"contentType" "application/json")
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just ct <- contentType,
+              ct == opts'.contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Server returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+    >>= assertM
+      span
+      ( \body ->
+          Json.parseStrict parser body
+            & first (Json.parseErrorTree "could not parse redacted response")
+      )
+
+assertM :: (MonadThrow f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM span f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTree span err
+
+-- | UTC time that is only specific to the second
+newtype SecondTime = SecondTime {unSecondTime :: UTCTime}
+  deriving newtype (Show, Eq, Ord)
+
+mkSecondTime :: UTCTime -> SecondTime
+mkSecondTime utcTime = SecondTime utcTime {utctDayTime = Time.secondsToDiffTime $ floor utcTime.utctDayTime}
+
+diffSecondTime :: SecondTime -> SecondTime -> NominalDiffTime
+diffSecondTime (SecondTime a) (SecondTime b) = diffUTCTime a b
+
+data Cache a = Cache
+  { name :: !Text,
+    until :: !SecondTime,
+    lastModified :: !SecondTime,
+    result :: !a
+  }
+  deriving stock (Show)
+
+newCache :: Text -> a -> IO (TVar (Cache a))
+newCache name result = do
+  let until = mkSecondTime $ Time.UTCTime {utctDay = Time.ModifiedJulianDay 1, utctDayTime = 1}
+  let lastModified = until
+  newTVarIO $ Cache {..}
+
+updateCache :: (NFData a, Eq a) => SecondTime -> TVar (Cache a) -> a -> STM (Cache a)
+updateCache now cache result' = do
+  -- make sure we don’t hold onto the world by deepseq-ing and evaluating to WHNF
+  let !result = deepseq result' result'
+  let until = mkSecondTime $ (5 * 60) `addUTCTime` now.unSecondTime
+  !toWrite <- do
+    old <- readTVar cache
+    let name = old.name
+    -- only update the lastModified time iff the content changed (this is helpful for HTTP caching with If-Modified-Since)
+    if old.result == result
+      then do
+        let lastModified = old.lastModified
+        pure $ Cache {..}
+      else do
+        let lastModified = now
+        pure $ Cache {..}
+  _ <- writeTVar cache $! toWrite
+  pure toWrite
+
+-- | Run the given action iff the cache is stale, otherwise just return the item from the cache.
+updateCacheIfNewer :: (MonadUnliftIO m, NFData b, Eq b) => SecondTime -> TVar (Cache b) -> m b -> m (Cache b)
+updateCacheIfNewer now cache act = withRunInIO $ \runInIO -> do
+  old <- readTVarIO cache
+  if old.until < now
+    then do
+      res <- runInIO act
+      atomically $ updateCache now cache res
+    else pure old
+
+-- pgFormat <- readTools (label @"toolsEnvVar" "OPENLAB_TOOLS_TOOLS") (readTool "pg_format")
+-- let config = label @"logDatabaseQueries" LogDatabaseQueries
+-- pgConnPool <-
+--   Pool.newPool $
+--     Pool.defaultPoolConfig
+--       {- resource init action -} (Postgres.connectPostgreSQL (db & TmpPg.toConnectionString))
+--       {- resource destruction -} Postgres.close
+--       {- unusedResourceOpenTime -} 10
+--       {- max resources across all stripes -} 20
+-- transmissionSessionId <- newEmptyMVar
+-- let newAppT = do
+--       logInfo [fmt|Running with config: {showPretty config}|]
+--       logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
+--       appT
+-- runReaderT newAppT.unAppT Context {..}
+
+withTracer :: (Otel.Tracer -> IO c) -> IO c
+withTracer f = do
+  setDefaultEnv "OTEL_SERVICE_NAME" "whatcd-resolver"
+  bracket
+    -- Install the SDK, pulling configuration from the environment
+    Otel.initializeGlobalTracerProvider
+    -- Ensure that any spans that haven't been exported yet are flushed
+    Otel.shutdownTracerProvider
+    -- Get a tracer so you can create spans
+    (\tracerProvider -> f $ Otel.makeTracer tracerProvider "whatcd-resolver" Otel.tracerOptions)
+
+setDefaultEnv :: String -> String -> IO ()
+setDefaultEnv envName defaultValue = do
+  Env.lookupEnv envName >>= \case
+    Just _env -> pure ()
+    Nothing -> Env.setEnv envName defaultValue
+
+data Context = Context
+  { tracer :: Otel.Tracer
+  }
+
+newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
+  deriving newtype (Functor, Applicative, Monad, MonadIO, MonadUnliftIO, MonadThrow)
+
+data AppException = AppException Text
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+-- | A specialized variant of @addEvent@ that records attributes conforming to
+-- the OpenTelemetry specification's
+-- <https://github.com/open-telemetry/opentelemetry-specification/blob/49c2f56f3c0468ceb2b69518bcadadd96e0a5a8b/specification/trace/semantic_conventions/exceptions.md semantic conventions>
+--
+-- @since 0.0.1.0
+recordException ::
+  ( MonadIO m,
+    HasField "message" r Text,
+    HasField "type_" r Text
+  ) =>
+  Otel.Span ->
+  r ->
+  m ()
+recordException span dat = liftIO $ do
+  callStack <- GHC.Stack.whoCreated dat.message
+  newEventTimestamp <- Just <$> Otel.getTimestamp
+  Otel.addEvent span $
+    Otel.NewEvent
+      { newEventName = "exception",
+        newEventAttributes =
+          HashMap.fromList
+            [ ("exception.type", Otel.toAttribute @Text dat.type_),
+              ("exception.message", Otel.toAttribute @Text dat.message),
+              ("exception.stacktrace", Otel.toAttribute @Text $ Text.unlines $ map stringToText callStack)
+            ],
+        ..
+      }
+
+appThrowTree :: (MonadThrow m) => Otel.Span -> ErrorTree -> m a
+appThrowTree _span exc = do
+  let msg = prettyErrorTree exc
+  -- recordException
+  --   span
+  --   ( T2
+  --       (label @"type_" "AppException")
+  --       (label @"message" msg)
+  --   )
+  throwM $ AppException msg
+
+orAppThrowTree :: (MonadThrow m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree span = \case
+  Left err -> appThrowTree span err
+  Right a -> pure a
+
+instance (MonadIO m) => MonadLogger (AppT m) where
+  monadLoggerLog loc src lvl msg = liftIO $ Logger.defaultOutput IO.stderr loc src lvl (Logger.toLogStr msg)
+
+instance (Monad m) => Otel.MonadTracer (AppT m) where
+  getTracer = AppT $ asks (.tracer)
diff --git a/users/Profpatsch/read-http.rs b/users/Profpatsch/read-http.rs
index efaded87e6..2b24e6beb1 100644
--- a/users/Profpatsch/read-http.rs
+++ b/users/Profpatsch/read-http.rs
@@ -220,7 +220,7 @@ fn write_dict<'buf, 'a>(
 }
 
 // iter helper
-
+// TODO: put into its own module
 struct Chunkyboi<T> {
     inner: T,
     buf: Vec<u8>,
diff --git a/users/Profpatsch/reverse-haskell-deps/README.md b/users/Profpatsch/reverse-haskell-deps/README.md
new file mode 100644
index 0000000000..efc288cae4
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/README.md
@@ -0,0 +1,3 @@
+# reverse-haskell-deps
+
+Parse the HTML at `https://packdeps.haskellers.com/reverse` to get the data about Haskell package reverse dependencies in a structured way (they should just expose that as a json tbh).
diff --git a/users/Profpatsch/reverse-haskell-deps.hs b/users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs
index 6b644df9ec..0e18ce8a6b 100644
--- a/users/Profpatsch/reverse-haskell-deps.hs
+++ b/users/Profpatsch/reverse-haskell-deps/ReverseHaskellDeps.hs
@@ -1,72 +1,76 @@
 {-# LANGUAGE LambdaCase #-}
-{-# LANGUAGE OverloadedStrings #-}
 {-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE OverloadedStrings #-}
 {-# LANGUAGE ScopedTypeVariables #-}
-import qualified Text.HTML.TagSoup as Tag
-import qualified Data.Text as Text
-import Data.Text (Text)
-import qualified Data.List as List
+
+module Main where
+
+import Data.ByteString qualified as ByteString
+import Data.Either
+import Data.List qualified as List
 import Data.Maybe
-import Text.Nicify
-import qualified Text.Read as Read
+import Data.Text (Text)
+import Data.Text qualified as Text
+import Data.Text.Encoding qualified
+import MyPrelude
 import Numeric.Natural
-import Data.Either
-import qualified Data.ByteString as ByteString
-import qualified Data.Text.Encoding
+import Text.HTML.TagSoup qualified as Tag
+import Text.Nicify
+import Text.Read qualified as Read
 
-parseNat :: Text.Text -> Maybe Natural
-parseNat = Read.readMaybe . Text.unpack
+parseNat :: Text -> Maybe Natural
+parseNat = Read.readMaybe . textToString
 
 printNice :: Show a => a -> IO ()
 printNice = putStrLn . nicify . show
 
-type Tag = Tag.Tag Text.Text
+type Tag = Tag.Tag Text
 
 main = do
   reverseHtml <- readStdinUtf8
   printNice $ List.sortOn snd $ packagesAndReverseDeps reverseHtml
-
   where
-    readStdinUtf8 = Data.Text.Encoding.decodeUtf8 <$> ByteString.getContents
+    readStdinUtf8 = bytesToTextUtf8Lenient <$> ByteString.getContents
 
 -- | reads the table provided by https://packdeps.haskellers.com/reverse
 -- figuring out all sections (starting with the link to the package name),
 -- then figuring out the name of the package and the first column,
 -- which is the number of reverse dependencies of the package
+packagesAndReverseDeps :: Text -> [(Text, Natural)]
 packagesAndReverseDeps reverseHtml = do
   let tags = Tag.parseTags reverseHtml
-  let sections =  Tag.partitions (isJust . reverseLink) tags
-  let sectionNames = map (fromJust . reverseLink . head) sections
+  let sections = Tag.partitions (isJust . reverseLink) tags
+  let sectionName [] = "<unknown section>"
+      sectionName (sect : _) = sect & reverseLink & fromMaybe "<unknown section>"
+  let sectionNames = map sectionName sections
   mapMaybe
-    (\(name :: Text.Text, sect) -> do
+    ( \(name :: Text, sect) -> do
         reverseDeps <- firstNaturalNumber sect
-        pure (sectionPackageName name sect, reverseDeps) :: Maybe (Text.Text, Natural))
+        pure (sectionPackageName name sect, reverseDeps) :: Maybe (Text, Natural)
+    )
     $ zip sectionNames sections
-
-
   where
     reverseLink = \case
-      Tag.TagOpen "a" attrs -> mapFind attrReverseLink attrs
+      Tag.TagOpen "a" attrs -> findMaybe attrReverseLink attrs
       _ -> Nothing
 
     attrReverseLink = \case
-      ("href", lnk) -> if
-          | "packdeps.haskellers.com/reverse/" `Text.isInfixOf` lnk -> Just lnk
-          | otherwise -> Nothing
+      ("href", lnk) ->
+        if
+            | "packdeps.haskellers.com/reverse/" `Text.isInfixOf` lnk -> Just lnk
+            | otherwise -> Nothing
       _ -> Nothing
 
     sectionPackageName :: Text -> [Tag] -> Text
     sectionPackageName sectionName = \case
-      (_: Tag.TagText name : _) -> name
-      (_: el : _) -> sectionName
+      (_ : Tag.TagText name : _) -> name
+      (_ : el : _) -> sectionName
       xs -> sectionName
 
-
     firstNaturalNumber :: [Tag] -> Maybe Natural
     firstNaturalNumber =
-      mapFind (\case
-        Tag.TagText t -> parseNat t
-        _ -> Nothing)
-
-    mapFind :: (a -> Maybe b) -> [a] -> Maybe b
-    mapFind f xs = fromJust . f <$> List.find (isJust . f) xs
+      findMaybe
+        ( \case
+            Tag.TagText t -> parseNat t
+            _ -> Nothing
+        )
diff --git a/users/Profpatsch/reverse-haskell-deps.nix b/users/Profpatsch/reverse-haskell-deps/default.nix
index 6df7bc6329..b0a44420d7 100644
--- a/users/Profpatsch/reverse-haskell-deps.nix
+++ b/users/Profpatsch/reverse-haskell-deps/default.nix
@@ -19,12 +19,13 @@ let
   rev-hs = pkgs.writers.writeHaskell "revers-haskell-deps-hs"
     {
       libraries = [
+        depot.users.Profpatsch.my-prelude
         pkgs.haskellPackages.nicify-lib
         pkgs.haskellPackages.tagsoup
       ];
-
+      ghcArgs = [ "-threaded" ];
     }
-    ./reverse-haskell-deps.hs;
+    ./ReverseHaskellDeps.hs;
 
 
 in
diff --git a/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal b/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal
new file mode 100644
index 0000000000..4792f52adf
--- /dev/null
+++ b/users/Profpatsch/reverse-haskell-deps/reverse-haskell-deps.cabal
@@ -0,0 +1,16 @@
+cabal-version:      3.0
+name:               reverse-haskell-deps
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+library
+    exposed-modules:          ReverseHaskellDeps.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        my-prelude,
+        tagsoup,
+        nicify-lib
+
+    default-language: Haskell2010
diff --git a/users/Profpatsch/shell.nix b/users/Profpatsch/shell.nix
new file mode 100644
index 0000000000..ec3326fe86
--- /dev/null
+++ b/users/Profpatsch/shell.nix
@@ -0,0 +1,110 @@
+# generic shell.nix that can be used for most of my projects here,
+# until I figure out a way to have composable shells.
+let root = (import ../../. { }); in
+{ pkgs ? root.third_party.nixpkgs, depot ? root, ... }:
+
+pkgs.mkShell {
+
+  buildInputs = [
+    pkgs.sqlite-interactive
+    pkgs.sqlite-utils
+    pkgs.haskell-language-server
+    pkgs.cabal-install
+    (pkgs.haskellPackages.ghcWithHoogle (h: [
+      h.async
+      h.aeson-better-errors
+      h.blaze-html
+      h.conduit-extra
+      h.error
+      h.monad-logger
+      h.pa-field-parser
+      h.pa-label
+      h.pa-json
+      h.pa-pretty
+      h.pa-run-command
+      h.ihp-hsx
+      h.PyF
+      h.foldl
+      h.unliftio
+      h.xml-conduit
+      h.wai
+      h.wai-extra
+      h.warp
+      h.profunctors
+      h.semigroupoids
+      h.validation-selective
+      h.free
+      h.cryptonite-conduit
+      h.sqlite-simple
+      h.hedgehog
+      h.http-conduit
+      h.http-conduit
+      h.wai-conduit
+      h.nonempty-containers
+      h.deriving-compat
+      h.unix
+      h.tagsoup
+      h.attoparsec
+      # h.iCalendar
+      h.case-insensitive
+      h.hscolour
+      h.nicify-lib
+      h.hspec
+      h.hspec-expectations-pretty-diff
+      h.tmp-postgres
+      h.postgresql-simple
+      h.resource-pool
+      h.xmonad-contrib
+      h.hs-opentelemetry-sdk
+      h.punycode
+    ]))
+
+    pkgs.rustup
+    pkgs.pkg-config
+    pkgs.fuse
+    pkgs.postgresql_14
+    pkgs.nodejs
+    pkgs.ninja
+    pkgs.s6
+    pkgs.caddy
+
+    (depot.nix.binify {
+      name = "nix-run";
+      exe = depot.users.Profpatsch.nix-tools.nix-run;
+    })
+  ];
+
+  DEPOT_ROOT = toString ./../..;
+  PROFPATSCH_ROOT = toString ./.;
+
+  WHATCD_RESOLVER_TOOLS = pkgs.linkFarm "whatcd-resolver-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ];
+
+  # DECLIB_MASTODON_ACCESS_TOKEN read from `pass` in .envrc.
+
+  RUSTC_WRAPPER =
+    let
+      wrapperArgFile = libs: pkgs.writeText "rustc-wrapper-args"
+        (pkgs.lib.concatStringsSep
+          "\n"
+          (pkgs.lib.concatLists
+            (map
+              (lib: [
+                "-L"
+                "${pkgs.lib.getLib lib}/lib"
+              ])
+              libs)));
+    in
+    depot.nix.writeExecline "rustc-wrapper" { readNArgs = 1; } [
+      "$1"
+      "$@"
+      "@${wrapperArgFile [
+      depot.third_party.rust-crates.nom
+    ]}"
+    ];
+
+}
diff --git a/users/Profpatsch/shortcuttable/default.nix b/users/Profpatsch/shortcuttable/default.nix
new file mode 100644
index 0000000000..13ba220400
--- /dev/null
+++ b/users/Profpatsch/shortcuttable/default.nix
@@ -0,0 +1,172 @@
+{ depot, lib, pkgs, ... }:
+
+let
+  # run prog... and restart whenever SIGHUP is received
+  #
+  # this is useful for binding to a shortcut.
+  #
+  # Unfortunately, this requires a bunch of workarounds around the semantics of `trap`,
+  # but the general idea of bundling subprocesses with `setsid` is somewhat sound.
+  runShortcuttable =
+    depot.nix.writeExecline "run-shortcuttable" { } [
+      "importas"
+      "-i"
+      "run"
+      "XDG_RUNTIME_DIR"
+      "if"
+      [ "mkdir" "-p" "\${run}/shortcuttable/test" ]
+      "getpid"
+      "-E"
+      "controlpid"
+      savePid
+      "\${run}/shortcuttable/test/control"
+      "$controlpid"
+
+      # start the program
+      "background"
+      [
+        startSaveSID
+        "\${run}/shortcuttable/test/running-sid"
+        "$@"
+      ]
+
+      "trap"
+      [
+        "SIGHUP"
+        [
+          "if"
+          [ "echo" "got hup" ]
+          "if"
+          [
+            "if"
+            [ "echo" "killing our child processes" ]
+            "envfile"
+            "\${run}/shortcuttable/test/running-sid"
+            "importas"
+            "-ui"
+            "child_sid"
+            "pid"
+            "foreground"
+            [ "ps" "-f" "--sid" "$child_sid" ]
+            ctrlCCtrlDSid
+            "$child_sid"
+          ]
+          "if"
+          [ "echo" "restarting into" "$@" ]
+          "background"
+          [
+            startSaveSID
+            "\${run}/shortcuttable/test/running-sid"
+            "$@"
+          ]
+        ]
+        "SIGTERM"
+        [
+          (killShortcuttable { signal = "TERM"; })
+          "\${run}/shortcuttable/test/running-sid"
+          "\${run}/shortcuttable/test/exit"
+        ]
+        "SIGINT"
+        [
+          (killShortcuttable { signal = "INT"; })
+          "\${run}/shortcuttable/test/running-sid"
+          "\${run}/shortcuttable/test/exit"
+        ]
+      ]
+      depot.users.Profpatsch.execline.setsid
+      "child_sid"
+      "getpid"
+      "-E"
+      "exitpid"
+      savePid
+      "\${run}/shortcuttable/test/exit"
+      "$exitpid"
+      "sleep"
+      "infinity"
+    ];
+
+  killShortcuttable = { signal }: depot.nix.writeExecline "kill-shortcuttable" { readNArgs = 2; } [
+    "if"
+    [ "echo" "got SIG${signal}, quitting" ]
+    "if"
+    [
+      "envfile"
+      "$1"
+      "importas"
+      "-ui"
+      "child_sid"
+      "pid"
+      "foreground"
+      [ "ps" "-f" "--sid" "$child_sid" ]
+      ctrlCCtrlDSid
+      "$child_sid"
+    ]
+    "if"
+    [ "echo" "killing shortcuttable loop" ]
+    "envfile"
+    "$2"
+    "importas"
+    "-ui"
+    "trap_pid"
+    "pid"
+    "foreground"
+    [ "ps" "-fp" "$trap_pid" ]
+    "kill"
+    "--signal"
+    signal
+    "$trap_pid"
+  ];
+
+  savePid = depot.nix.writeExecline "save-pid" { readNArgs = 2; } [
+    "if"
+    [ "echo" "saving process:" ]
+    "if"
+    [ "ps" "-fp" "$2" ]
+    "if"
+    [
+      "redirfd"
+      "-w"
+      "1"
+      "$1"
+      "printf"
+      "pid = %s\n"
+      "$2"
+    ]
+    "$@"
+  ];
+
+  # try to kill process, first with SIGTERM then SIGQUIT (in case it’s a repl)
+  ctrlCCtrlDSid = depot.nix.writeExecline "ctrl-c-ctrl-d" { readNArgs = 1; } [
+    "ifelse"
+    "-n"
+    [ "kill" "--signal" "TERM" "--" "-\${1}" ]
+    [
+      "if"
+      [ "echo" "could not kill via SIGTERM, trying SIGQUIT …" ]
+      "ifelse"
+      "-n"
+      [ "kill" "--signal" "QUIT" "--" "-\${1}" ]
+      [ "echo" "SIGQUIT failed as well, keeping it running" ]
+      "$@"
+    ]
+    "$@"
+  ];
+
+  startSaveSID = depot.nix.writeExecline "start-save-sid" { readNArgs = 1; } [
+    depot.users.Profpatsch.execline.setsid
+    "child_sid"
+    "importas"
+    "-ui"
+    "child_sid"
+    "child_sid"
+    "if"
+    [ "echo" "children sid:" "$child_sid" ]
+    savePid
+    "$1"
+    "$child_sid"
+    "$@"
+  ];
+
+
+in
+runShortcuttable
diff --git a/users/Profpatsch/solarized.dhall b/users/Profpatsch/solarized.dhall
deleted file mode 100644
index 01e14d64f4..0000000000
--- a/users/Profpatsch/solarized.dhall
+++ /dev/null
@@ -1,39 +0,0 @@
--- SOLARIZED HEX     16/8 TERMCOL  XTERM/HEX   L*A*B      RGB         HSB
--- --------- ------- ---- -------  ----------- ---------- ----------- -----------
--- base03    #002b36  8/4 brblack  234 #1c1c1c 15 -12 -12   0  43  54 193 100  21
--- base02    #073642  0/4 black    235 #262626 20 -12 -12   7  54  66 192  90  26
--- base01    #586e75 10/7 brgreen  240 #585858 45 -07 -07  88 110 117 194  25  46
--- base00    #657b83 11/7 bryellow 241 #626262 50 -07 -07 101 123 131 195  23  51
--- base0     #839496 12/6 brblue   244 #808080 60 -06 -03 131 148 150 186  13  59
--- base1     #93a1a1 14/4 brcyan   245 #8a8a8a 65 -05 -02 147 161 161 180   9  63
--- base2     #eee8d5  7/7 white    254 #e4e4e4 92 -00  10 238 232 213  44  11  93
--- base3     #fdf6e3 15/7 brwhite  230 #ffffd7 97  00  10 253 246 227  44  10  99
--- yellow    #b58900  3/3 yellow   136 #af8700 60  10  65 181 137   0  45 100  71
--- orange    #cb4b16  9/3 brred    166 #d75f00 50  50  55 203  75  22  18  89  80
--- red       #dc322f  1/1 red      160 #d70000 50  65  45 220  50  47   1  79  86
--- magenta   #d33682  5/5 magenta  125 #af005f 50  65 -05 211  54 130 331  74  83
--- violet    #6c71c4 13/5 brmagenta 61 #5f5faf 50  15 -45 108 113 196 237  45  77
--- blue      #268bd2  4/4 blue      33 #0087ff 55 -10 -45  38 139 210 205  82  82
--- cyan      #2aa198  6/6 cyan      37 #00afaf 60 -35 -05  42 161 152 175  74  63
--- green     #859900  2/2 green     64 #5f8700 60 -20  65 133 153   0  68 100  60
-{
-  hex  =
-    {
-      base03 = "#002b36",
-      base02 = "#073642",
-      base01 = "#586e75",
-      base00 = "#657b83",
-      base0 = "#839496",
-      base1 = "#93a1a1",
-      base2 = "#eee8d5",
-      base3 = "#fdf6e3",
-      yellow = "#b58900",
-      orange    = "#cb4b16",
-      red       = "#dc322f",
-      magenta   = "#d33682",
-      violet    = "#6c71c4",
-      blue      = "#268bd2",
-      cyan      = "#2aa198",
-      green     = "#859900",
-    }
-}
diff --git a/users/Profpatsch/struct-edit/default.nix b/users/Profpatsch/struct-edit/default.nix
deleted file mode 100644
index 11a7200ce4..0000000000
--- a/users/Profpatsch/struct-edit/default.nix
+++ /dev/null
@@ -1,13 +0,0 @@
-{ depot, ... }:
-depot.nix.buildGo.program {
-  name = "struct-edit";
-  srcs = [
-    ./main.go
-  ];
-  deps = [
-    depot.third_party.gopkgs."github.com".charmbracelet.bubbletea
-    depot.third_party.gopkgs."github.com".charmbracelet.lipgloss
-    depot.third_party.gopkgs."github.com".muesli.termenv
-    depot.third_party.gopkgs."github.com".mattn.go-isatty
-  ];
-}
diff --git a/users/Profpatsch/struct-edit/main.go b/users/Profpatsch/struct-edit/main.go
deleted file mode 100644
index c1a7013385..0000000000
--- a/users/Profpatsch/struct-edit/main.go
+++ /dev/null
@@ -1,431 +0,0 @@
-package main
-
-import (
-	json "encoding/json"
-	"fmt"
-	"log"
-	"os"
-	"sort"
-	"strings"
-
-	tea "github.com/charmbracelet/bubbletea"
-	lipgloss "github.com/charmbracelet/lipgloss"
-	// termenv "github.com/muesli/termenv"
-	// isatty "github.com/mattn/go-isatty"
-)
-
-// Keeps the full data structure and a path that indexes our current position into it.
-type model struct {
-	path []index
-	data val
-}
-
-// an index into a value, uint for lists and string for maps.
-// nil for any scalar value.
-// TODO: use an actual interface for these
-type index interface{}
-
-/// recursive value that we can represent.
-type val struct {
-	// the “type” of value; see tag const belove
-	tag tag
-	// last known position of our cursor
-	last_index index
-	// documentation (TODO)
-	doc string
-	// the actual value;
-	// the actual structure is behind a pointer so we can replace the struct.
-	// determined by the tag
-	// tagString -> *string
-	// tagFloat -> *float64
-	// tagList -> *[]val
-	// tagMap -> *map[string]val
-	val interface{}
-}
-
-type tag string
-
-const (
-	tagString tag = "string"
-	tagFloat  tag = "float"
-	tagList   tag = "list"
-	tagMap    tag = "map"
-)
-
-// print a value, flat
-func (v val) Render() string {
-	s := ""
-	switch v.tag {
-	case tagString:
-		s += *v.val.(*string)
-	case tagFloat:
-		s += fmt.Sprint(*v.val.(*float64))
-	case tagList:
-		s += "[ "
-		vs := []string{}
-		for _, enum := range v.enumerate() {
-			vs = append(vs, enum.v.Render())
-		}
-		s += strings.Join(vs, ", ")
-		s += " ]"
-	case tagMap:
-		s += "{ "
-		vs := []string{}
-		for _, enum := range v.enumerate() {
-			vs = append(vs, fmt.Sprintf("%s: %s", enum.i.(string), enum.v.Render()))
-		}
-		s += strings.Join(vs, ", ")
-		s += " }"
-	default:
-		s += fmt.Sprintf("<unknown: %v>", v)
-	}
-	return s
-}
-
-// render an index, depending on the type
-func renderIndex(i index) (s string) {
-	switch i := i.(type) {
-	case nil:
-		s = ""
-	// list index
-	case uint:
-		s = "*"
-	// map index
-	case string:
-		s = i + ":"
-	}
-	return
-}
-
-// take an arbitrary (within restrictions) go value and construct a val from it
-func makeVal(i interface{}) val {
-	var v val
-	switch i := i.(type) {
-	case string:
-		v = val{
-			tag:        tagString,
-			last_index: index(nil),
-			doc:        "",
-			val:        &i,
-		}
-	case float64:
-		v = val{
-			tag:        tagFloat,
-			last_index: index(nil),
-			doc:        "",
-			val:        &i,
-		}
-	case []interface{}:
-		ls := []val{}
-		for _, i := range i {
-			ls = append(ls, makeVal(i))
-		}
-		v = val{
-			tag:        tagList,
-			last_index: pos1Inner(tagList, &ls),
-			doc:        "",
-			val:        &ls,
-		}
-	case map[string]interface{}:
-		ls := map[string]val{}
-		for k, i := range i {
-			ls[k] = makeVal(i)
-		}
-		v = val{
-			tag:        tagMap,
-			last_index: pos1Inner(tagMap, &ls),
-			doc:        "",
-			val:        &ls,
-		}
-	default:
-		log.Fatalf("makeVal: cannot read json of type %T", i)
-	}
-	return v
-}
-
-// return an index that points at the first entry in val
-func (v val) pos1() index {
-	return v.enumerate()[0].i
-}
-
-func pos1Inner(tag tag, v interface{}) index {
-	return enumerateInner(tag, v)[0].i
-}
-
-type enumerate struct {
-	i index
-	v val
-}
-
-// enumerate gives us a stable ordering of elements in this val.
-// for scalars it’s just a nil index & the val itself.
-// Guaranteed to always return at least one element.
-func (v val) enumerate() (e []enumerate) {
-	e = enumerateInner(v.tag, v.val)
-	if e == nil {
-		e = append(e, enumerate{
-			i: nil,
-			v: v,
-		})
-	}
-	return
-}
-
-// like enumerate, but returns an empty slice for scalars without inner vals.
-func enumerateInner(tag tag, v interface{}) (e []enumerate) {
-	switch tag {
-	case tagString:
-		fallthrough
-	case tagFloat:
-		e = nil
-	case tagList:
-		for i, v := range *v.(*[]val) {
-			e = append(e, enumerate{i: index(uint(i)), v: v})
-		}
-	case tagMap:
-		// map sorting order is not stable (actually randomized thank jabber)
-		// so let’s sort them
-		keys := []string{}
-		m := *v.(*map[string]val)
-		for k, _ := range m {
-			keys = append(keys, k)
-		}
-		sort.Strings(keys)
-		for _, k := range keys {
-			e = append(e, enumerate{i: index(k), v: m[k]})
-		}
-	default:
-		log.Fatalf("unknown val tag %s, %v", tag, v)
-	}
-	return
-}
-
-func (m model) PathString() string {
-	s := "/ "
-	var is []string
-	for _, v := range m.path {
-		is = append(is, fmt.Sprintf("%v", v))
-	}
-	s += strings.Join(is, " / ")
-	return s
-}
-
-// walk the given path down in data, to get the value at that point.
-// Assumes that all path indexes are valid indexes into data.
-// Returns a pointer to the value at point, in order to be able to change it.
-func walk(data *val, path []index) (*val, bool, error) {
-	res := data
-	atPath := func(index int) string {
-		return fmt.Sprintf("at path %v", path[:index+1])
-	}
-	errf := func(ty string, val interface{}, index int) error {
-		return fmt.Errorf("walk: can’t walk into %s %v %s", ty, val, atPath(index))
-	}
-	for i, p := range path {
-		switch res.tag {
-		case tagString:
-			return nil, true, nil
-		case tagFloat:
-			return nil, true, nil
-		case tagList:
-			switch p := p.(type) {
-			case uint:
-				list := *res.val.(*[]val)
-				if int(p) >= len(list) || p < 0 {
-					return nil, false, fmt.Errorf("index out of bounds %s", atPath(i))
-				}
-				res = &list[p]
-			default:
-				return nil, false, fmt.Errorf("not a list index %s", atPath(i))
-			}
-		case tagMap:
-			switch p := p.(type) {
-			case string:
-				m := *res.val.(*map[string]val)
-				if a, ok := m[p]; ok {
-					res = &a
-				} else {
-					return nil, false, fmt.Errorf("index %s not in map %s", p, atPath(i))
-				}
-			default:
-				return nil, false, fmt.Errorf("not a map index %v %s", p, atPath(i))
-			}
-
-		default:
-			return nil, false, errf(string(res.tag), res.val, i)
-		}
-	}
-	return res, false, nil
-}
-
-// descend into the selected index. Assumes that the index is valid.
-// Will not descend into scalars.
-func (m model) descend() (model, error) {
-	// TODO: two walks?!
-	this, _, err := walk(&m.data, m.path)
-	if err != nil {
-		return m, err
-	}
-	newPath := append(m.path, this.last_index)
-	_, bounce, err := walk(&m.data, newPath)
-	if err != nil {
-		return m, err
-	}
-	// only descend if we *can*
-	if !bounce {
-		m.path = newPath
-	}
-	return m, nil
-}
-
-// ascend to one level up. stops at the root.
-func (m model) ascend() (model, error) {
-	if len(m.path) > 0 {
-		m.path = m.path[:len(m.path)-1]
-		_, _, err := walk(&m.data, m.path)
-		return m, err
-	}
-	return m, nil
-}
-
-/// go to the next item, or wraparound
-func (min model) next() (m model, err error) {
-	m = min
-	this, _, err := walk(&m.data, m.path)
-	if err != nil {
-		return
-	}
-	enumL := this.enumerate()
-	setNext := false
-	for _, enum := range enumL {
-		if setNext {
-			this.last_index = enum.i
-			setNext = false
-			break
-		}
-		if enum.i == this.last_index {
-			setNext = true
-		}
-	}
-	// wraparound
-	if setNext {
-		this.last_index = enumL[0].i
-	}
-	return
-}
-
-/// go to the previous item, or wraparound
-func (min model) prev() (m model, err error) {
-	m = min
-	this, _, err := walk(&m.data, m.path)
-	if err != nil {
-		return
-	}
-	enumL := this.enumerate()
-	// last element, wraparound
-	prevIndex := enumL[len(enumL)-1].i
-	for _, enum := range enumL {
-		if enum.i == this.last_index {
-			this.last_index = prevIndex
-			break
-		}
-		prevIndex = enum.i
-	}
-	return
-}
-
-/// bubbletea implementations
-
-func (m model) Init() tea.Cmd {
-	return nil
-}
-
-func initialModel(v interface{}) model {
-	val := makeVal(v)
-	return model{
-		path: []index{},
-		data: val,
-	}
-}
-
-func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
-	var err error
-	switch msg := msg.(type) {
-	case tea.KeyMsg:
-		switch msg.String() {
-		case "ctrl+c", "q":
-			return m, tea.Quit
-
-		case "up":
-			m, err = m.prev()
-
-		case "down":
-			m, err = m.next()
-
-		case "right":
-			m, err = m.descend()
-
-		case "left":
-			m, err = m.ascend()
-
-			// 	case "enter":
-			// 		_, ok := m.selected[m.cursor]
-			// 		if ok {
-			// 			delete(m.selected, m.cursor)
-			// 		} else {
-			// 			m.selected[m.cursor] = struct{}{}
-			// 		}
-		}
-
-	}
-	if err != nil {
-		log.Fatal(err)
-	}
-	return m, nil
-}
-
-var pathColor = lipgloss.NewStyle().
-	// light blue
-	Foreground(lipgloss.Color("12"))
-
-var selectedColor = lipgloss.NewStyle().
-	Bold(true)
-
-func (m model) View() string {
-	s := pathColor.Render(m.PathString())
-	cur, _, err := walk(&m.data, m.path)
-	if err != nil {
-		log.Fatal(err)
-	}
-	s += cur.doc + "\n"
-	s += "\n"
-	for _, enum := range cur.enumerate() {
-		is := renderIndex(enum.i)
-		if is != "" {
-			s += is + " "
-		}
-		if enum.i == cur.last_index {
-			s += selectedColor.Render(enum.v.Render())
-		} else {
-			s += enum.v.Render()
-		}
-		s += "\n"
-	}
-
-	// s += fmt.Sprintf("%v\n", m)
-	// s += fmt.Sprintf("%v\n", cur)
-
-	return s
-}
-
-func main() {
-	var input interface{}
-	err := json.NewDecoder(os.Stdin).Decode(&input)
-	if err != nil {
-		log.Fatal("json from stdin: ", err)
-	}
-	p := tea.NewProgram(initialModel(input))
-	if err := p.Start(); err != nil {
-		log.Fatal("bubbletea TUI error: ", err)
-	}
-}
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md
new file mode 100644
index 0000000000..e0a6aa2fb8
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/README.md
@@ -0,0 +1,3 @@
+# sync-abfall-ics-aichach-friedberg
+
+A small tool to sync the ICS files for the local trash collection times at https://abfallwirtschaft.lra-aic-fdb.de/
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix
new file mode 100644
index 0000000000..739274cb6f
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/default.nix
@@ -0,0 +1,31 @@
+{ depot, pkgs, ... }:
+
+let
+  sync-to-dir = depot.users.Profpatsch.writers.python3
+    {
+      name = "sync-ics-to-dir";
+      libraries = (py: [
+        py.httpx
+        py.icalendar
+      ]);
+    } ./sync-ics-to-dir.py;
+
+  config =
+    depot.users.Profpatsch.importDhall.importDhall
+      {
+        root = ./..;
+        files = [
+          "sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall"
+          "dhall/lib.dhall"
+          "ini/ini.dhall"
+        ];
+        main = "sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall";
+        deps = [
+        ];
+      }
+      depot.users.Profpatsch.ini.externs;
+
+
+
+in
+{ inherit config; }
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall
new file mode 100644
index 0000000000..2a7ac84979
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall
@@ -0,0 +1,139 @@
+let Ini = ../ini/ini.dhall
+
+let Lib = ../dhall/lib.dhall
+
+in  \(Ini/externs : Ini.Externs) ->
+      let Vdirsyncer =
+            let StorageType =
+                  < FileSystem : { path : Text, fileext : < ICS > }
+                  | Http : { url : Text }
+                  >
+
+            let Collection = < FromA | FromB | Collection : Text >
+
+            let Collections =
+                  < Unspecified | TheseCollections : List Collection >
+
+            let Storage = { storageName : Text, storage : StorageType }
+
+            in  { Storage
+                , StorageType
+                , Collection
+                , Collections
+                , Pair =
+                    { pairName : Text
+                    , a : Storage
+                    , b : Storage
+                    , collections : Collections
+                    }
+                }
+
+      let toIniSections
+          : Vdirsyncer.Pair -> Ini.Sections
+          = \(pair : Vdirsyncer.Pair) ->
+              let
+                  -- we assume the names are [a-zA-Z_]
+                  renderList =
+                    \(l : List Text) ->
+                          "["
+                      ++  Lib.Text/concatMapSep
+                            ", "
+                            Text
+                            (\(t : Text) -> "\"${t}\"")
+                            l
+                      ++  "]"
+
+              in  let nv = \(name : Text) -> \(value : Text) -> { name, value }
+
+                  let mkStorage =
+                        \(storage : Vdirsyncer.Storage) ->
+                          { name = "storage ${storage.storageName}"
+                          , value =
+                              merge
+                                { FileSystem =
+                                    \ ( fs
+                                      : { path : Text, fileext : < ICS > }
+                                      ) ->
+                                      [ nv "type" "filesystem"
+                                      , nv
+                                          "fileext"
+                                          (merge { ICS = ".ics" } fs.fileext)
+                                      , nv "path" fs.path
+                                      ]
+                                , Http =
+                                    \(http : { url : Text }) ->
+                                      [ nv "type" "http", nv "url" http.url ]
+                                }
+                                storage.storage
+                          }
+
+                  in  [ { name = "pair ${pair.pairName}"
+                        , value =
+                          [ nv "a" pair.a.storageName
+                          , nv "b" pair.b.storageName
+                          , nv
+                              "collections"
+                              ( merge
+                                  { Unspecified = "none"
+                                  , TheseCollections =
+                                      \(colls : List Vdirsyncer.Collection) ->
+                                        renderList
+                                          ( Lib.List/map
+                                              Vdirsyncer.Collection
+                                              Text
+                                              ( \ ( coll
+                                                  : Vdirsyncer.Collection
+                                                  ) ->
+                                                  merge
+                                                    { FromA = "from a"
+                                                    , FromB = "from b"
+                                                    , Collection =
+                                                        \(t : Text) -> t
+                                                    }
+                                                    coll
+                                              )
+                                              colls
+                                          )
+                                  }
+                                  pair.collections
+                              )
+                          ]
+                        }
+                      , mkStorage pair.a
+                      , mkStorage pair.b
+                      ]
+
+      in  { example =
+              Ini/externs.renderIni
+                ( Ini.appendInis
+                    ( Lib.List/map
+                        Vdirsyncer.Pair
+                        Ini.Ini
+                        ( \(pair : Vdirsyncer.Pair) ->
+                            { globalSection = [] : Ini.Section
+                            , sections = toIniSections pair
+                            }
+                        )
+                        (   [ { pairName = "testPair"
+                              , a =
+                                { storageName = "mystor"
+                                , storage =
+                                    Vdirsyncer.StorageType.FileSystem
+                                      { path = "./test-ics"
+                                      , fileext = < ICS >.ICS
+                                      }
+                                }
+                              , b =
+                                { storageName = "mystor"
+                                , storage =
+                                    Vdirsyncer.StorageType.Http
+                                      { url = "https://profpatsch.de" }
+                                }
+                              , collections = Vdirsyncer.Collections.Unspecified
+                              }
+                            ]
+                          : List Vdirsyncer.Pair
+                        )
+                    )
+                )
+          }
diff --git a/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py
new file mode 100644
index 0000000000..4af3b9fb85
--- /dev/null
+++ b/users/Profpatsch/sync-abfall-ics-aichach-friedberg/sync-ics-to-dir.py
@@ -0,0 +1,133 @@
+# horrible little module that fetches ICS files for the local trash public service.
+#
+# It tries its best to not overwrite existing ICS files in case the upstream goes down
+# or returns empty ICS files.
+import sys
+import httpx
+import asyncio
+import icalendar
+from datetime import datetime
+import syslog
+import os.path
+
+# Internal id for the street (extracted from the ics download url)
+ortsteil_id = "e9c32ab3-df25-4660-b88e-abda91897d7a"
+
+# They are using a numeric encoding to refer to different kinds of trash
+fraktionen = {
+    "restmüll": "1",
+    "bio": "5",
+    "papier": "7",
+    "gelbe_tonne": "13",
+    "problemmüllsammlung": "20"
+}
+
+def ics_url(year):
+  frakt = ','.join(fraktionen.values())
+  return f'https://awido.cubefour.de/Customer/aic-fdb/KalenderICS.aspx?oid={ortsteil_id}&jahr={year}&fraktionen={frakt}&reminder=1.12:00'
+
+def fetchers_for_years(start_year, no_of_years_in_future):
+    """given a starting year, and a number of years in the future,
+    return the years for which to fetch ics files"""
+    current_year = datetime.now().year
+    max_year = current_year + no_of_years_in_future
+    return {
+        "passed_years": range(start_year, current_year),
+        "this_and_future_years": range(current_year, 1 + max_year)
+    }
+
+async def fetch_ics(c, url):
+    """fetch an ICS file from an URL"""
+    try:
+        resp = await c.get(url)
+    except Exception as e:
+        return { "ics_does_not_exist_exc": e }
+
+    if resp.is_error:
+        return { "ics_does_not_exist": resp }
+    else:
+        try:
+            ics = icalendar.Calendar.from_ical(resp.content)
+            return { "ics": { "ics_parsed": ics, "ics_bytes": resp.content } }
+        except ValueError as e:
+            return { "ics_cannot_be_parsed": e }
+
+def ics_has_events(ics):
+    """Determine if there is any event in the ICS, otherwise we can assume it’s an empty file"""
+    for item in ics.walk():
+      if isinstance(item, icalendar.Event):
+        return True
+    return False
+
+async def write_nonempty_ics(directory, year, ics):
+    # only overwrite if the new ics has any events
+    if ics_has_events(ics['ics_parsed']):
+        path = os.path.join(directory, f"{year}.ics")
+        with open(path, "wb") as f:
+            f.write(ics['ics_bytes'])
+            info(f"wrote ics for year {year} to file {path}")
+    else:
+        info(f"ics for year {year} was empty, skipping")
+
+
+def main():
+    ics_directory = os.getenv("ICS_DIRECTORY", None)
+    if not ics_directory:
+        critical("please set ICS_DIRECTORY")
+    start_year = int(os.getenv("ICS_START_YEAR", 2022))
+    future_years = int(os.getenv("ICS_FUTURE_YEARS", 2))
+
+    years = fetchers_for_years(start_year, no_of_years_in_future=future_years)
+
+
+    async def go():
+        async with httpx.AsyncClient(follow_redirects=True) as c:
+            info(f"fetching ics for passed years: {years['passed_years']}")
+            for year in years["passed_years"]:
+                match await fetch_ics(c, ics_url(year)):
+                    case { "ics_does_not_exist_exc": error }:
+                       warn(f"The ics for the year {year} is gone, error when requesting: {error} for url {ics_url(year)}")
+                    case { "ics_does_not_exist": resp }:
+                       warn(f"The ics for the year {year} is gone, server returned status {resp.status} for url {ics_url(year)}")
+                    case { "ics_cannot_be_parsed": error }:
+                       warn(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
+                    case { "ics": ics }:
+                       info(f"fetched ics from {ics_url(year)}")
+                       await write_nonempty_ics(ics_directory, year, ics)
+                    case _:
+                       critical("unknown case for ics result")
+
+
+            info(f"fetching ics for current and upcoming years: {years['this_and_future_years']}")
+            for year in years["this_and_future_years"]:
+                match await fetch_ics(c, ics_url(year)):
+                    case { "ics_does_not_exist_exc": error }:
+                       critical(f"The ics for the year {year} is not available, error when requesting: {error} for url {ics_url(year)}")
+                    case { "ics_does_not_exist": resp }:
+                       critical(f"The ics for the year {year} is not available, server returned status {resp.status} for url {ics_url(year)}")
+                    case { "ics_cannot_be_parsed": error }:
+                       critical(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
+                    case { "ics": ics }:
+                       info(f"fetched ics from {ics_url(year)}")
+                       await write_nonempty_ics(ics_directory, year, ics)
+                    case _:
+                       critical("unknown case for ics result")
+
+    asyncio.run(go())
+
+def info(msg):
+    syslog.syslog(syslog.LOG_INFO, msg)
+
+def critical(msg):
+    syslog.syslog(syslog.LOG_CRIT, msg)
+    sys.exit(1)
+
+def warn(msg):
+    syslog.syslog(syslog.LOG_WARNING, msg)
+
+def debug(msg):
+    syslog.syslog(syslog.LOG_DEBUG, msg)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/users/Profpatsch/tagtime/README.md b/users/Profpatsch/tagtime/README.md
new file mode 100644
index 0000000000..ab2c7d14e5
--- /dev/null
+++ b/users/Profpatsch/tagtime/README.md
@@ -0,0 +1,18 @@
+# tagtime reimplementation
+
+What’s great about original perl tagtime?
+
+* timestamps are deterministic from the beginning (keep)
+* the tagging system should just work (tm)
+
+What’s the problem with the original perl tagtime?
+
+* it uses a bad, arbitrary file format -> sqlite3
+* the query window does not time out, so it’s easy to miss that it’s open (often hidden behind another window), and then the following pings might never appear)
+* There’s a bug with tags containing a `.` -> sqlite3
+
+What would be cool to have?
+
+* multi-entry mode (ping on phone and laptop and merge the replies eventually since they will apply to single timestamps)
+* simplifying reporting based on fuzzy matching & history
+* auto-generate nice time reports with hours for work items
diff --git a/users/Profpatsch/whatcd-resolver/Main.hs b/users/Profpatsch/whatcd-resolver/Main.hs
new file mode 100644
index 0000000000..21cd80cbf0
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/Main.hs
@@ -0,0 +1,6 @@
+module Main where
+
+import WhatcdResolver qualified
+
+main :: IO ()
+main = WhatcdResolver.main
diff --git a/users/Profpatsch/whatcd-resolver/README.md b/users/Profpatsch/whatcd-resolver/README.md
new file mode 100644
index 0000000000..d1902e546a
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/README.md
@@ -0,0 +1,21 @@
+# whatcd-resolver
+
+To run:
+
+```
+ninja run-services
+```
+
+in one terminal (starts the background tasks)
+
+```
+ninja run
+```
+
+to start the server. It runs on `9092`.
+
+You need to be in the `nix-shell` in `./..`.
+
+You need to set the `pass` key `internet/redacted/api-keys/whatcd-resolver` to an API key for RED.
+
+You need to have a transmission-rpc-daemon listening on port `9091` (no auth, try ssh port forwarding lol).
diff --git a/users/Profpatsch/whatcd-resolver/build.ninja b/users/Profpatsch/whatcd-resolver/build.ninja
new file mode 100644
index 0000000000..ff6ba8df04
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/build.ninja
@@ -0,0 +1,20 @@
+
+builddir = .ninja
+
+outdir = ./output
+
+rule run-services
+  command = s6-svscan ./services
+
+rule run
+  command = execlineb -c '$
+    importas -i DEPOT_ROOT DEPOT_ROOT $
+    importas -i PROFPATSCH_ROOT PROFPATSCH_ROOT cd $$PROFPATSCH_ROOT $
+    nix-run { $$DEPOT_ROOT -A users.Profpatsch.shortcuttable } cabal repl whatcd-resolver/ --repl-options "-e main" $
+    '
+
+build run-services: run-services
+  pool = console
+
+build run: run
+  pool = console
diff --git a/users/Profpatsch/whatcd-resolver/default.nix b/users/Profpatsch/whatcd-resolver/default.nix
new file mode 100644
index 0000000000..27468507ac
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/default.nix
@@ -0,0 +1,76 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  #   bins = depot.nix.getBins pkgs.sqlite ["sqlite3"];
+
+  whatcd-resolver = pkgs.haskellPackages.mkDerivation {
+    pname = "whatcd-resolver";
+    version = "0.1.0";
+
+    src = depot.users.Profpatsch.exactSource ./. [
+      ./whatcd-resolver.cabal
+      ./Main.hs
+      ./src/WhatcdResolver.hs
+      ./src/AppT.hs
+      ./src/JsonLd.hs
+      ./src/Optional.hs
+      ./src/Html.hs
+      ./src/Http.hs
+      ./src/Transmission.hs
+      ./src/Redacted.hs
+    ];
+
+    libraryHaskellDepends = [
+      depot.users.Profpatsch.my-prelude
+      depot.users.Profpatsch.my-webstuff
+      pkgs.haskellPackages.pa-prelude
+      pkgs.haskellPackages.pa-label
+      pkgs.haskellPackages.pa-json
+      pkgs.haskellPackages.pa-error-tree
+      pkgs.haskellPackages.pa-field-parser
+      pkgs.haskellPackages.pa-run-command
+      pkgs.haskellPackages.aeson-better-errors
+      pkgs.haskellPackages.blaze-html
+      pkgs.haskellPackages.hs-opentelemetry-sdk
+      pkgs.haskellPackages.http-conduit
+      pkgs.haskellPackages.http-types
+      pkgs.haskellPackages.ihp-hsx
+      pkgs.haskellPackages.monad-logger
+      pkgs.haskellPackages.resource-pool
+      pkgs.haskellPackages.postgresql-simple
+      pkgs.haskellPackages.tmp-postgres
+      pkgs.haskellPackages.unliftio
+      pkgs.haskellPackages.wai-extra
+      pkgs.haskellPackages.warp
+      pkgs.haskellPackages.punycode
+    ];
+
+    isExecutable = true;
+    isLibrary = false;
+    license = lib.licenses.mit;
+  };
+
+  bins = depot.nix.getBins whatcd-resolver [ "whatcd-resolver" ];
+
+in
+
+depot.nix.writeExecline "whatcd-resolver-wrapped" { } [
+  "importas"
+  "-i"
+  "PATH"
+  "PATH"
+  "export"
+  "PATH"
+  # TODO: figure out how to automatically migrate to a new postgres version with tmp_postgres (dump?)
+  "${pkgs.postgresql_14}/bin:$${PATH}"
+  "export"
+  "WHATCD_RESOLVER_TOOLS"
+  (pkgs.linkFarm "whatcd-resolver-tools" [
+    {
+      name = "pg_format";
+      path = "${pkgs.pgformatter}/bin/pg_format";
+    }
+  ])
+  bins.whatcd-resolver
+]
+
diff --git a/users/Profpatsch/whatcd-resolver/notes.org b/users/Profpatsch/whatcd-resolver/notes.org
new file mode 100644
index 0000000000..24662c0f32
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/notes.org
@@ -0,0 +1,48 @@
+* The Glorious what.cd¹ Resolver
+
+  ¹: At the time of writing, what.cd didn’t even exist anymore
+
+** Idea
+   
+   Stream your music (or media) from a private tracker transparently.
+   “Spotify for torrents”
+
+** Technical
+
+   You need to have a seedbox, which runs a server program.
+   The server manages queries, downloads torrents and requested files, and
+   provides http streams to the downloaded files (while caching them for
+   seeding).
+
+   Clients then use the API to search for music (e.g. query for artists or
+   tracks) and get back the promise of a stream to the resolved file (a bit how
+   resolvers in the Tomahawk Player work)
+
+*** The Server
+
+**** Resolving queries
+
+     ~resolve :: Query -> IO Identifiers~
+
+     A query is a search input for content (could be an artist or a movie name
+     or something)
+
+     There have to be multiple providers, depending on the site used
+     (e.g. one for Gazelle trackers, one for Piratebay) and some intermediate
+     structure (e.g. for going through Musicbrainz first).
+
+     Output is a unique identifier for a fetchable resource; this could be a
+     link to a torrent combined with a file/directory in said torrent.
+
+**** Fetching Identifiers
+
+     ~fetch :: Identifier -> IO (Promise Stream)~
+
+     Takes an Identifier (which should provide all information on how to grab
+     the media file and returns a stream to the media file once it’s ready.
+     
+     For torrents, this probably consists of telling the torrent
+     library/application to fetch a certain torrent and start downloading the
+     required files in it. The torrent fetcher would also need to do seeding and
+     space management, since one usually has to keep a ratio and hard drive
+     space is not unlimited.
diff --git a/users/Profpatsch/whatcd-resolver/server-notes.org b/users/Profpatsch/whatcd-resolver/server-notes.org
new file mode 100644
index 0000000000..cb990aba3d
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/server-notes.org
@@ -0,0 +1,2 @@
+* whatcd-resolver-server
+
diff --git a/users/Profpatsch/whatcd-resolver/services/.gitignore b/users/Profpatsch/whatcd-resolver/services/.gitignore
new file mode 100644
index 0000000000..5cdb254e8c
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/.gitignore
@@ -0,0 +1,3 @@
+/.s6-svscan/
+/**/event/
+/**/supervise/
diff --git a/users/Profpatsch/whatcd-resolver/services/jaeger/run b/users/Profpatsch/whatcd-resolver/services/jaeger/run
new file mode 100755
index 0000000000..41332f8bb6
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/jaeger/run
@@ -0,0 +1,3 @@
+#!/usr/bin/env execlineb
+importas -i DEPOT_ROOT DEPOT_ROOT
+nix-run { $DEPOT_ROOT -A users.Profpatsch.jaeger -kK --builders '' }
diff --git a/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run b/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run
new file mode 100755
index 0000000000..7081b35f5a
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/services/reverse-proxy/run
@@ -0,0 +1,2 @@
+#!/usr/bin/env execlineb
+caddy reverse-proxy --from :9092 --to :9093
diff --git a/users/Profpatsch/whatcd-resolver/src/AppT.hs b/users/Profpatsch/whatcd-resolver/src/AppT.hs
new file mode 100644
index 0000000000..3232004122
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/AppT.hs
@@ -0,0 +1,150 @@
+{-# LANGUAGE DeriveAnyClass #-}
+
+module AppT where
+
+import Control.Monad.Logger qualified as Logger
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Error.Tree
+import Data.HashMap.Strict (HashMap)
+import Data.HashMap.Strict qualified as HashMap
+import Data.Pool (Pool)
+import Data.Text qualified as Text
+import Database.PostgreSQL.Simple qualified as Postgres
+import GHC.Stack qualified
+import Label
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import PossehlAnalyticsPrelude
+import Postgres.MonadPostgres
+import System.IO qualified as IO
+import UnliftIO
+import Prelude hiding (span)
+
+data Context = Context
+  { config :: Label "logDatabaseQueries" DebugLogDatabaseQueries,
+    tracer :: Otel.Tracer,
+    pgFormat :: PgFormatPool,
+    pgConnPool :: Pool Postgres.Connection,
+    transmissionSessionId :: IORef (Maybe ByteString)
+  }
+
+newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
+  deriving newtype (Functor, Applicative, Monad, MonadIO, MonadUnliftIO, MonadThrow)
+
+data AppException = AppException Text
+  deriving stock (Show)
+  deriving anyclass (Exception)
+
+-- *  Logging & Opentelemetry
+
+instance (MonadIO m) => MonadLogger (AppT m) where
+  monadLoggerLog loc src lvl msg = liftIO $ Logger.defaultOutput IO.stderr loc src lvl (Logger.toLogStr msg)
+
+instance (Monad m) => Otel.MonadTracer (AppT m) where
+  getTracer = AppT $ asks (.tracer)
+
+class (MonadUnliftIO m, Otel.MonadTracer m) => MonadOtel m
+
+instance (MonadUnliftIO m) => MonadOtel (AppT m)
+
+instance (MonadOtel m) => MonadOtel (Transaction m)
+
+inSpan :: (MonadOtel m) => Text -> m a -> m a
+inSpan name = Otel.inSpan name Otel.defaultSpanArguments
+
+inSpan' :: (MonadOtel m) => Text -> (Otel.Span -> m a) -> m a
+inSpan' name = Otel.inSpan' name Otel.defaultSpanArguments
+
+-- | Add the attribute to the span, prefixing it with the `_` namespace (to easier distinguish our application’s tags from standard tags)
+addAttribute :: (MonadIO m, Otel.ToAttribute a) => Otel.Span -> Text -> a -> m ()
+addAttribute span key a = Otel.addAttribute span ("_." <> key) a
+
+-- | Add the attributes to the span, prefixing each key with the `_` namespace (to easier distinguish our application’s tags from standard tags)
+addAttributes :: (MonadIO m) => Otel.Span -> HashMap Text Otel.Attribute -> m ()
+addAttributes span attrs = Otel.addAttributes span $ attrs & HashMap.mapKeys ("_." <>)
+
+appThrowTreeNewSpan :: (MonadThrow m, MonadOtel m) => Text -> ErrorTree -> m a
+appThrowTreeNewSpan spanName exc = inSpan' spanName $ \span -> do
+  let msg = prettyErrorTree exc
+  recordException
+    span
+    ( T2
+        (label @"type_" "AppException")
+        (label @"message" msg)
+    )
+  throwM $ AppException msg
+
+appThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> ErrorTree -> m a
+appThrowTree span exc = do
+  let msg = prettyErrorTree exc
+  recordException
+    span
+    ( T2
+        (label @"type_" "AppException")
+        (label @"message" msg)
+    )
+  throwM $ AppException msg
+
+orAppThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree span = \case
+  Left err -> appThrowTree span err
+  Right a -> pure a
+
+assertM :: (MonadThrow f, MonadIO f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM span f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTree span err
+
+assertMNewSpan :: (MonadThrow f, MonadOtel f) => Text -> (t -> Either ErrorTree a) -> t -> f a
+assertMNewSpan spanName f v = case f v of
+  Right a -> pure a
+  Left err -> appThrowTreeNewSpan spanName err
+
+-- | A specialized variant of @addEvent@ that records attributes conforming to
+-- the OpenTelemetry specification's
+-- <https://github.com/open-telemetry/opentelemetry-specification/blob/49c2f56f3c0468ceb2b69518bcadadd96e0a5a8b/specification/trace/semantic_conventions/exceptions.md semantic conventions>
+--
+-- @since 0.0.1.0
+recordException ::
+  ( MonadIO m,
+    HasField "message" r Text,
+    HasField "type_" r Text
+  ) =>
+  Otel.Span ->
+  r ->
+  m ()
+recordException span dat = liftIO $ do
+  callStack <- GHC.Stack.whoCreated dat.message
+  newEventTimestamp <- Just <$> Otel.getTimestamp
+  Otel.addEvent span $
+    Otel.NewEvent
+      { newEventName = "exception",
+        newEventAttributes =
+          HashMap.fromList
+            [ ("exception.type", Otel.toAttribute @Text dat.type_),
+              ("exception.message", Otel.toAttribute @Text dat.message),
+              ("exception.stacktrace", Otel.toAttribute @Text $ Text.unlines $ map stringToText callStack)
+            ],
+        ..
+      }
+
+-- * Postgres
+
+instance (MonadThrow m, MonadUnliftIO m) => MonadPostgres (AppT m) where
+  execute = executeImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  executeMany = executeManyImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  executeManyReturningWith = executeManyReturningWithImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  queryWith = queryWithImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  queryWith_ = queryWithImpl_ (AppT ask)
+
+  foldRowsWithAcc = foldRowsWithAccImpl (AppT ask) (AppT $ asks (.config.logDatabaseQueries))
+  runTransaction = runPGTransaction
+
+runPGTransaction :: (MonadUnliftIO m) => Transaction (AppT m) a -> AppT m a
+runPGTransaction (Transaction transaction) = do
+  pool <- AppT ask <&> (.pgConnPool)
+  withRunInIO $ \unliftIO ->
+    withPGTransaction pool $ \conn -> do
+      unliftIO $ runReaderT transaction conn
diff --git a/users/Profpatsch/whatcd-resolver/src/Html.hs b/users/Profpatsch/whatcd-resolver/src/Html.hs
new file mode 100644
index 0000000000..49b87b23dc
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Html.hs
@@ -0,0 +1,69 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Html where
+
+import Data.Aeson qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict qualified as Map
+import IHP.HSX.QQ (hsx)
+import PossehlAnalyticsPrelude
+import Text.Blaze.Html (Html)
+import Text.Blaze.Html5 qualified as Html
+import Prelude hiding (span)
+
+-- | Render an arbitrary json value to HTML in a more-or-less reasonable fashion.
+mkVal :: Json.Value -> Html
+mkVal = \case
+  Json.Number n -> Html.toHtml @Text $ showToText n
+  Json.String s -> Html.toHtml @Text s
+  Json.Bool True -> [hsx|<em>true</em>|]
+  Json.Bool False -> [hsx|<em>false</em>|]
+  Json.Null -> [hsx|<em>null</em>|]
+  Json.Array arr -> toOrderedList mkVal arr
+  Json.Object obj ->
+    obj
+      & KeyMap.toMapText
+      & toDefinitionList (Html.toHtml @Text) mkVal
+
+toOrderedList :: (Foldable t1) => (t2 -> Html) -> t1 t2 -> Html
+toOrderedList mkValFn arr =
+  arr
+    & foldMap (\el -> Html.li $ mkValFn el)
+    & Html.ol
+
+toUnorderedList :: (Foldable t1) => (t2 -> Html) -> t1 t2 -> Html
+toUnorderedList mkValFn arr =
+  arr
+    & foldMap (\el -> Html.li $ mkValFn el)
+    & Html.ul
+
+-- | Render a definition list from a Map
+toDefinitionList :: (Text -> Html) -> (t -> Html) -> Map Text t -> Html
+toDefinitionList mkKeyFn mkValFn obj =
+  obj
+    & Map.toList
+    & foldMap (\(k, v) -> Html.dt (mkKeyFn k) <> Html.dd (mkValFn v))
+    & Html.dl
+
+-- | Render a table-like structure of json values as an HTML table.
+toTable :: [[(Text, Json.Value)]] -> Html
+toTable xs =
+  case xs & nonEmpty of
+    Nothing ->
+      [hsx|<p>No results.</p>|]
+    Just xs' -> do
+      let headers = xs' & NonEmpty.head <&> fst <&> (\h -> [hsx|<th>{h}</th>|]) & mconcat
+      let vals = xs' & foldMap (Html.tr . foldMap (Html.td . mkVal . snd))
+      [hsx|
+              <table class="table">
+                <thead>
+                  <tr>
+                  {headers}
+                  </tr>
+                </thead>
+                <tbody>
+                  {vals}
+                </tbody>
+              </table>
+          |]
diff --git a/users/Profpatsch/whatcd-resolver/src/Http.hs b/users/Profpatsch/whatcd-resolver/src/Http.hs
new file mode 100644
index 0000000000..487d55c21d
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Http.hs
@@ -0,0 +1,131 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Http
+  ( doRequestJson,
+    RequestOptions (..),
+    mkRequestOptions,
+    setRequestMethod,
+    setRequestBodyLBS,
+    setRequestHeader,
+    getResponseStatus,
+    getResponseHeader,
+    getResponseBody,
+  )
+where
+
+import AppT
+import Data.CaseInsensitive (CI (original))
+import Data.Char qualified as Char
+import Data.List qualified as List
+import Data.Text qualified as Text
+import Data.Text.Punycode qualified as Punycode
+import Json.Enc qualified as Enc
+import MyPrelude
+import Network.HTTP.Client
+import Network.HTTP.Simple
+import Network.HTTP.Types.Status (Status (..))
+import Optional
+import Prelude hiding (span)
+
+data RequestOptions = RequestOptions
+  { method :: ByteString,
+    host :: Text,
+    port :: Optional Int,
+    path :: Optional [Text],
+    headers :: Optional [Header],
+    usePlainHttp :: Optional Bool
+  }
+
+mkRequestOptions :: (HasField "method" r ByteString, HasField "host" r Text) => r -> RequestOptions
+mkRequestOptions opts =
+  RequestOptions
+    { method = opts.method,
+      port = defaults,
+      host = opts.host,
+      path = defaults,
+      headers = defaults,
+      usePlainHttp = defaults
+    }
+
+doRequestJson ::
+  (MonadOtel m) =>
+  RequestOptions ->
+  Enc.Enc ->
+  m (Response ByteString)
+doRequestJson opts val = inSpan' "HTTP Request (JSON)" $ \span -> do
+  addAttribute span "request.xh" (requestToXhCommandLine opts val)
+  resp <-
+    defaultRequest {secure = not (opts & optsUsePlainHttp)}
+      & setRequestHost (opts & optsHost)
+      & setRequestPort (opts & optsPort)
+      -- TODO: is this automatically escaped by the library?
+      & setRequestPath (opts & optsPath)
+      & setRequestHeaders (opts & optsHeaders)
+      & setRequestMethod opts.method
+      & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
+      & httpBS
+  let code = resp & getResponseStatus & (.statusCode)
+  let msg = resp & getResponseStatus & (.statusMessage) & bytesToTextUtf8Lenient
+  addAttribute
+    span
+    "request.response.status"
+    ([fmt|{code} {msg}|] :: Text)
+  pure resp
+
+optsHost :: RequestOptions -> ByteString
+optsHost opts =
+  if opts.host & Text.isAscii
+    then opts.host & textToBytesUtf8
+    else opts.host & Punycode.encode
+
+optsUsePlainHttp :: RequestOptions -> Bool
+optsUsePlainHttp opts = opts.usePlainHttp.withDefault False
+
+optsPort :: RequestOptions -> Int
+optsPort opts = opts.port.withDefault (if opts & optsUsePlainHttp then 80 else 443)
+
+optsPath :: RequestOptions -> ByteString
+optsPath opts = opts.path.withDefault [] & Text.intercalate "/" & ("/" <>) & textToBytesUtf8
+
+optsHeaders :: RequestOptions -> [Header]
+optsHeaders opts = opts.headers.withDefault []
+
+-- | Create a string that can be pasted on the command line to invoke the same HTTP request via the `xh` tool (curl but nicer syntax)
+requestToXhCommandLine :: RequestOptions -> Enc.Enc -> Text
+requestToXhCommandLine opts val = do
+  let protocol = if opts & optsUsePlainHttp then "http" :: Text else "https"
+  let url = [fmt|{protocol}://{opts & optsHost}:{opts & optsPort}{opts & optsPath}|]
+  let headers = opts & optsHeaders <&> \(hdr, v) -> hdr.original <> ":" <> v
+
+  prettyArgsForBash $
+    mconcat
+      [ ["xh", url],
+        headers <&> bytesToTextUtf8Lenient,
+        ["--raw"],
+        [val & Enc.encToBytesUtf8 & bytesToTextUtf8Lenient]
+      ]
+
+-- | Pretty print a command line in a way that can be copied to bash.
+prettyArgsForBash :: [Text] -> Text
+prettyArgsForBash = Text.intercalate " " . map simpleBashEscape
+
+-- | Simple escaping for bash words. If they contain anything that’s not ascii chars
+-- and a bunch of often-used special characters, put the word in single quotes.
+simpleBashEscape :: Text -> Text
+simpleBashEscape t = do
+  case Text.find (not . isSimple) t of
+    Just _ -> escapeSingleQuote t
+    Nothing -> t
+  where
+    -- any word that is just ascii characters is simple (no spaces or control characters)
+    -- or contains a few often-used characters like - or .
+    isSimple c =
+      Char.isAsciiLower c
+        || Char.isAsciiUpper c
+        || Char.isDigit c
+        -- These are benign, bash will not interpret them as special characters.
+        || List.elem c ['-', '.', ':', '/']
+    -- Put the word in single quotes
+    -- If there is a single quote in the word,
+    -- close the single quoted word, add a single quote, open the word again
+    escapeSingleQuote t' = "'" <> Text.replace "'" "'\\''" t' <> "'"
diff --git a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
new file mode 100644
index 0000000000..1a021b706c
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
@@ -0,0 +1,137 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module JsonLd where
+
+import AppT
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.ByteString.Builder qualified as Builder
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Set (Set)
+import Data.Set qualified as Set
+import Html qualified
+import IHP.HSX.QQ (hsx)
+import Json qualified
+import Label
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types.URI qualified as Url
+import Network.URI (URI)
+import Optional
+import Redacted
+import Text.Blaze.Html (Html)
+import Prelude hiding (span)
+
+-- | A recursive `json+ld` structure.
+data Jsonld
+  = JsonldObject JsonldObject
+  | JsonldAnonymousObject JsonldAnonymousObject
+  | JsonldArray [Jsonld]
+  | JsonldField Json.Value
+  deriving stock (Show, Eq)
+
+-- | A json+ld object, that is something which can be further expanded by following the URL in its `id_` field.
+data JsonldObject = JsonldObject'
+  { -- | `@type` field; currently just the plain value without taking into account the json+ld context
+    type_ :: Set Text,
+    -- | `@id` field, usually a link to follow for expanding the object to its full glory
+    id_ :: Text,
+    -- | any fields of this object that remote deemed important enough to already pre-emptively include in the object; to get all fields resolve the URL in `id_`.
+    previewFields :: Map Text Jsonld
+  }
+  deriving stock (Show, Eq)
+
+-- | A json+ld object that cannot be inspected further by resolving its ID
+data JsonldAnonymousObject = JsonldAnonymousObject'
+  { -- | `@type` field; currently just the plain value without taking into account the json+ld context
+    type_ :: Set Text,
+    -- | fields of this anonymous object
+    fields :: Map Text Jsonld
+  }
+  deriving stock (Show, Eq)
+
+jsonldParser :: (Monad m) => Json.ParseT err m Jsonld
+jsonldParser =
+  Json.asValue >>= \cur -> do
+    if
+      | Json.Object _ <- cur -> do
+          type_ <-
+            Json.keyMay "@type" (Json.asArraySet Json.asText Json.<|> (Set.singleton <$> Json.asText))
+              <&> fromMaybe Set.empty
+          idMay <- Json.keyMay "@id" $ Json.asText
+          fields <-
+            Json.asObjectMap jsonldParser
+              <&> Map.delete "@type"
+              <&> Map.delete "@id"
+
+          if
+            | Just id_ <- idMay -> do
+                pure $ JsonldObject $ JsonldObject' {previewFields = fields, ..}
+            | otherwise -> pure $ JsonldAnonymousObject $ JsonldAnonymousObject' {..}
+      | Json.Array _ <- cur -> do
+          JsonldArray <$> Json.eachInArray jsonldParser
+      | otherwise -> pure $ JsonldField cur
+
+renderJsonld :: Jsonld -> Html
+renderJsonld = \case
+  JsonldObject obj -> renderObject obj (Just obj.id_) obj.previewFields
+  JsonldAnonymousObject obj -> renderObject obj Nothing obj.fields
+  JsonldArray arr ->
+    Html.toOrderedList renderJsonld arr
+  JsonldField f -> Html.mkVal f
+  where
+    renderObject obj mId_ fields = do
+      let id_ =
+            mId_ <&> \i ->
+              [hsx|
+                  <dt>Url</dt>
+                  <dd><a href={i}>{i}</a></dd>
+                  |]
+          getMoreButton =
+            mId_ <&> \i ->
+              [hsx|
+              <div>
+                <button
+                  hx-get={snippetHref i}
+                  hx-target="closest dl"
+                  hx-swap="outerHTML"
+                >more fields …</button>
+              </div>
+            |]
+      [hsx|
+      <dl>
+        <dt>Type</dt>
+        <dd>{obj.type_ & toList & schemaTypes}</dd>
+        {id_}
+        <dt>Fields</dt>
+        <dd>
+          {fields & Html.toDefinitionList schemaType renderJsonld}
+          {getMoreButton}
+        </dd>
+      </dl>
+    |]
+    snippetHref target =
+      Builder.toLazyByteString $
+        "/snips/jsonld/render"
+          <> Url.renderQueryBuilder True [("target", Just (textToBytesUtf8 target))]
+
+    schemaTypes xs =
+      xs
+        <&> schemaType
+        & List.intersperse ", "
+        & mconcat
+    schemaType t =
+      let href :: Text = [fmt|https://schema.org/{t}|] in [hsx|<a href={href} target="_blank">{t}</a>|]
+
+httpGetJsonLd :: (MonadThrow m, MonadOtel m) => (URI, Http.Request) -> m Jsonld
+httpGetJsonLd (uri, req) = inSpan' "Fetch json+ld" $ \span -> do
+  addAttribute span "json+ld.targetUrl" (uri & showToText)
+  httpJson
+    (mkOptional (label @"contentType" "application/ld+json"))
+    jsonldParser
+    ( req
+        & Http.setRequestMethod "GET"
+        & Http.setRequestHeader "Accept" ["application/ld+json"]
+    )
diff --git a/users/Profpatsch/whatcd-resolver/src/Optional.hs b/users/Profpatsch/whatcd-resolver/src/Optional.hs
new file mode 100644
index 0000000000..9791c84970
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Optional.hs
@@ -0,0 +1,18 @@
+module Optional where
+
+import GHC.Records (getField)
+import MyPrelude
+
+newtype Optional a = OptionalInternal (Maybe a)
+  deriving newtype (Functor)
+
+mkOptional :: a -> Optional a
+mkOptional defaultValue = OptionalInternal $ Just defaultValue
+
+defaults :: Optional a
+defaults = OptionalInternal Nothing
+
+instance HasField "withDefault" (Optional a) (a -> a) where
+  getField (OptionalInternal m) defaultValue = case m of
+    Nothing -> defaultValue
+    Just a -> a
diff --git a/users/Profpatsch/whatcd-resolver/src/Redacted.hs b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
new file mode 100644
index 0000000000..c8182a8cab
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
@@ -0,0 +1,572 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Redacted where
+
+import AppT
+import Arg
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.List qualified as List
+import Database.PostgreSQL.Simple (Binary (Binary), Only (..))
+import Database.PostgreSQL.Simple.SqlQQ (sql)
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import FieldParser qualified as Field
+import Json qualified
+import Label
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import Optional
+import Postgres.Decoder qualified as Dec
+import Postgres.MonadPostgres
+import Pretty
+import RunCommand (runCommandExpect0)
+import Prelude hiding (span)
+
+redactedSearch ::
+  (MonadLogger m, MonadThrow m, MonadOtel m) =>
+  [(ByteString, ByteString)] ->
+  Json.Parse ErrorTree a ->
+  m a
+redactedSearch advanced parser =
+  inSpan "Redacted API Search" $
+    redactedApiRequestJson
+      ( T2
+          (label @"action" "browse")
+          (label @"actionArgs" ((advanced <&> second Just)))
+      )
+      parser
+
+redactedGetTorrentFile ::
+  ( MonadLogger m,
+    MonadThrow m,
+    HasField "torrentId" dat Int,
+    MonadOtel m
+  ) =>
+  dat ->
+  m ByteString
+redactedGetTorrentFile dat = inSpan' "Redacted Get Torrent File" $ \span -> do
+  req <-
+    mkRedactedApiRequest
+      ( T2
+          (label @"action" "download")
+          ( label @"actionArgs"
+              [ ("id", Just (dat.torrentId & showToText @Int & textToBytesUtf8))
+              -- try using tokens as long as we have them (TODO: what if there’s no tokens left?
+              -- ANSWER: it breaks:
+              -- responseBody = "{\"status\":\"failure\",\"error\":\"You do not have any freeleech tokens left. Please use the regular DL link.\"}",
+              -- ("usetoken", Just "1")
+              ]
+          )
+      )
+  httpTorrent span req
+
+mkRedactedTorrentLink :: Arg "torrentGroupId" Int -> Text
+mkRedactedTorrentLink torrentId = [fmt|https://redacted.ch/torrents.php?id={torrentId.unArg}|]
+
+exampleSearch :: (MonadThrow m, MonadLogger m, MonadPostgres m, MonadOtel m) => m (Transaction m ())
+exampleSearch = do
+  t1 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "cherish"),
+        ("artistname", "kirinji"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  t3 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "mouss et hakim"),
+        ("artistname", "mouss et hakim"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  t2 <-
+    redactedSearchAndInsert
+      [ ("searchstr", "thriller"),
+        ("artistname", "michael jackson"),
+        -- ("year", "1982"),
+        -- ("format", "MP3"),
+        -- ("releasetype", "album"),
+        ("order_by", "year")
+      ]
+  pure (t1 >> t2 >> t3)
+
+-- | Do the search, return a transaction that inserts all results from all pages of the search.
+redactedSearchAndInsert ::
+  forall m.
+  ( MonadLogger m,
+    MonadPostgres m,
+    MonadThrow m,
+    MonadOtel m
+  ) =>
+  [(ByteString, ByteString)] ->
+  m (Transaction m ())
+redactedSearchAndInsert extraArguments = do
+  logInfo [fmt|Doing redacted search with arguments: {showPretty extraArguments}|]
+  -- The first search returns the amount of pages, so we use that to query all results piece by piece.
+  firstPage <- go Nothing
+  let remainingPages = firstPage.pages - 1
+  logInfo [fmt|Got the first page, found {remainingPages} more pages|]
+  let otherPagesNum = [(2 :: Natural) .. remainingPages]
+  otherPages <- traverse go (Just <$> otherPagesNum)
+  pure $
+    (firstPage : otherPages)
+      & concatMap (.tourGroups)
+      & \case
+        IsNonEmpty tgs -> tgs & insertTourGroupsAndTorrents
+        IsEmpty -> pure ()
+  where
+    go mpage =
+      redactedSearch
+        ( extraArguments
+            -- pass the page (for every search but the first one)
+            <> (mpage & ifExists (\page -> ("page", (page :: Natural) & showToText & textToBytesUtf8)))
+        )
+        ( do
+            status <- Json.key "status" Json.asText
+            when (status /= "success") $ do
+              Json.throwCustomError [fmt|Status was not "success", but {status}|]
+            Json.key "response" $ do
+              pages <-
+                Json.keyMay "pages" (Field.toJsonParser (Field.mapError singleError $ Field.jsonNumber >>> Field.boundedScientificIntegral @Int "not an Integer" >>> Field.integralToNatural))
+                  -- in case the field is missing, let’s assume there is only one page
+                  <&> fromMaybe 1
+              Json.key "results" $ do
+                tourGroups <-
+                  label @"tourGroups"
+                    <$> ( Json.eachInArray $ do
+                            groupId <- Json.keyLabel @"groupId" "groupId" (Json.asIntegral @_ @Int)
+                            groupName <- Json.keyLabel @"groupName" "groupName" Json.asText
+                            fullJsonResult <-
+                              label @"fullJsonResult"
+                                <$> ( Json.asObject
+                                        -- remove torrents cause they are inserted separately below
+                                        <&> KeyMap.filterWithKey (\k _ -> k /= "torrents")
+                                        <&> Json.Object
+                                    )
+                            let tourGroup = T3 groupId groupName fullJsonResult
+                            torrents <- Json.keyLabel @"torrents" "torrents" $
+                              Json.eachInArray $ do
+                                torrentId <- Json.keyLabel @"torrentId" "torrentId" (Json.asIntegral @_ @Int)
+                                fullJsonResultT <- label @"fullJsonResult" <$> Json.asValue
+                                pure $ T2 torrentId fullJsonResultT
+                            pure (T2 (label @"tourGroup" tourGroup) torrents)
+                        )
+                pure
+                  ( T2
+                      (label @"pages" pages)
+                      tourGroups
+                  )
+        )
+    insertTourGroupsAndTorrents ::
+      NonEmpty
+        ( T2
+            "tourGroup"
+            (T3 "groupId" Int "groupName" Text "fullJsonResult" Json.Value)
+            "torrents"
+            [T2 "torrentId" Int "fullJsonResult" Json.Value]
+        ) ->
+      Transaction m ()
+    insertTourGroupsAndTorrents dat = do
+      let tourGroups = dat <&> (.tourGroup)
+      let torrents = dat <&> (.torrents)
+      insertTourGroups tourGroups
+        >>= ( \res ->
+                insertTorrents $
+                  zipT2 $
+                    T2
+                      (label @"torrentGroupIdPg" $ res <&> (.tourGroupIdPg))
+                      (label @"torrents" (torrents & toList))
+            )
+    insertTourGroups ::
+      NonEmpty
+        ( T3
+            "groupId"
+            Int
+            "groupName"
+            Text
+            "fullJsonResult"
+            Json.Value
+        ) ->
+      Transaction m [Label "tourGroupIdPg" Int]
+    insertTourGroups dats = do
+      let groupNames =
+            dats <&> \dat -> [fmt|{dat.groupId}: {dat.groupName}|]
+      logInfo [fmt|Inserting tour groups for {showPretty groupNames}|]
+      _ <-
+        execute
+          [fmt|
+                  DELETE FROM redacted.torrent_groups
+                  WHERE group_id = ANY (?::integer[])
+              |]
+          (Only $ (dats <&> (.groupId) & toList & PGArray :: PGArray Int))
+      executeManyReturningWith
+        [fmt|
+              INSERT INTO redacted.torrent_groups (
+                group_id, group_name, full_json_result
+              ) VALUES
+              ( ?, ? , ? )
+              ON CONFLICT (group_id) DO UPDATE SET
+                group_id = excluded.group_id,
+                group_name = excluded.group_name,
+                full_json_result = excluded.full_json_result
+              RETURNING (id)
+            |]
+        ( dats <&> \dat ->
+            ( dat.groupId,
+              dat.groupName,
+              dat.fullJsonResult
+            )
+        )
+        (label @"tourGroupIdPg" <$> Dec.fromField @Int)
+
+    insertTorrents ::
+      [ T2
+          "torrentGroupIdPg"
+          Int
+          "torrents"
+          [T2 "torrentId" Int "fullJsonResult" Json.Value]
+      ] ->
+      Transaction m ()
+    insertTorrents dats = do
+      _ <-
+        execute
+          [sql|
+            DELETE FROM redacted.torrents_json
+            WHERE torrent_id = ANY (?::integer[])
+          |]
+          ( Only $
+              PGArray
+                [ torrent.torrentId
+                  | dat <- dats,
+                    torrent <- dat.torrents
+                ]
+          )
+
+      execute
+        [sql|
+          INSERT INTO redacted.torrents_json
+            ( torrent_group
+            , torrent_id
+            , full_json_result)
+          SELECT *
+          FROM UNNEST(
+              ?::integer[]
+            , ?::integer[]
+            , ?::jsonb[]
+          ) AS inputs(
+              torrent_group
+            , torrent_id
+            , full_json_result)
+          |]
+        ( [ ( dat.torrentGroupIdPg :: Int,
+              group.torrentId :: Int,
+              group.fullJsonResult :: Json.Value
+            )
+            | dat <- dats,
+              group <- dat.torrents
+          ]
+            & unzip3PGArray
+        )
+      pure ()
+
+unzip3PGArray :: [(a1, a2, a3)] -> (PGArray a1, PGArray a2, PGArray a3)
+unzip3PGArray xs = xs & unzip3 & \(a, b, c) -> (PGArray a, PGArray b, PGArray c)
+
+redactedGetTorrentFileAndInsert ::
+  ( HasField "torrentId" r Int,
+    MonadPostgres m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  r ->
+  Transaction m (Label "torrentFile" ByteString)
+redactedGetTorrentFileAndInsert dat = inSpan' "Redacted Get Torrent File and Insert" $ \span -> do
+  bytes <- redactedGetTorrentFile dat
+  execute
+    [sql|
+    UPDATE redacted.torrents_json
+    SET torrent_file = ?::bytea
+    WHERE torrent_id = ?::integer
+  |]
+    ( (Binary bytes :: Binary ByteString),
+      dat.torrentId
+    )
+    >>= assertOneUpdated span "redactedGetTorrentFileAndInsert"
+    >>= \() -> pure (label @"torrentFile" bytes)
+
+getTorrentFileById ::
+  ( MonadPostgres m,
+    HasField "torrentId" r Int,
+    MonadThrow m
+  ) =>
+  r ->
+  Transaction m (Maybe (Label "torrentFile" ByteString))
+getTorrentFileById dat = do
+  queryWith
+    [sql|
+    SELECT torrent_file
+    FROM redacted.torrents
+    WHERE torrent_id = ?::integer
+  |]
+    (Only $ (dat.torrentId :: Int))
+    (fmap @Maybe (label @"torrentFile") <$> Dec.byteaMay)
+    >>= ensureSingleRow
+
+updateTransmissionTorrentHashById ::
+  ( MonadPostgres m,
+    HasField "torrentId" r Int,
+    HasField "torrentHash" r Text
+  ) =>
+  r ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+updateTransmissionTorrentHashById dat = do
+  execute
+    [sql|
+    UPDATE redacted.torrents_json
+    SET transmission_torrent_hash = ?::text
+    WHERE torrent_id = ?::integer
+    |]
+    ( dat.torrentHash :: Text,
+      dat.torrentId :: Int
+    )
+
+assertOneUpdated ::
+  (HasField "numberOfRowsAffected" r Natural, MonadThrow m, MonadIO m) =>
+  Otel.Span ->
+  Text ->
+  r ->
+  m ()
+assertOneUpdated span name x = case x.numberOfRowsAffected of
+  1 -> pure ()
+  n -> appThrowTree span ([fmt|{name :: Text}: Expected to update exactly one row, but updated {n :: Natural} row(s)|])
+
+data TorrentData transmissionInfo = TorrentData
+  { groupId :: Int,
+    torrentId :: Int,
+    seedingWeight :: Int,
+    artists :: [T2 "artistId" Int "artistName" Text],
+    torrentJson :: Json.Value,
+    torrentGroupJson :: TorrentGroupJson,
+    torrentStatus :: TorrentStatus transmissionInfo
+  }
+
+data TorrentGroupJson = TorrentGroupJson
+  { groupName :: Text,
+    groupYear :: Int
+  }
+
+data TorrentStatus transmissionInfo
+  = NoTorrentFileYet
+  | NotInTransmissionYet
+  | InTransmission (T2 "torrentHash" Text "transmissionInfo" transmissionInfo)
+
+getTorrentById :: (MonadPostgres m, HasField "torrentId" r Int, MonadThrow m) => r -> Transaction m Json.Value
+getTorrentById dat = do
+  queryWith
+    [sql|
+    SELECT full_json_result FROM redacted.torrents
+    WHERE torrent_id = ?::integer
+  |]
+    (getLabel @"torrentId" dat)
+    (Dec.json Json.asValue)
+    >>= ensureSingleRow
+
+data GetBestTorrentsFilter = GetBestTorrentsFilter
+  { onlyDownloaded :: Bool,
+    onlyArtist :: Maybe (Label "redactedId" Natural)
+  }
+
+-- | Find the best torrent for each torrent group (based on the seeding_weight)
+getBestTorrents ::
+  (MonadPostgres m) =>
+  GetBestTorrentsFilter ->
+  Transaction m [TorrentData ()]
+getBestTorrents opts = do
+  queryWith
+    [sql|
+      WITH filtered_torrents AS (
+        SELECT DISTINCT ON (torrent_group)
+          id
+        FROM
+          redacted.torrents
+        WHERE
+          -- onlyDownloaded
+          ((NOT ?::bool) OR torrent_file IS NOT NULL)
+          -- filter by artist id
+          AND
+          (?::bool OR (to_jsonb(?::int) <@ (jsonb_path_query_array(full_json_result, '$.artists[*].id'))))
+        ORDER BY torrent_group, seeding_weight DESC
+      )
+      SELECT
+        tg.group_id,
+        t.torrent_id,
+        t.seeding_weight,
+        t.full_json_result AS torrent_json,
+        tg.full_json_result AS torrent_group_json,
+        t.torrent_file IS NOT NULL AS has_torrent_file,
+        t.transmission_torrent_hash
+      FROM filtered_torrents f
+      JOIN redacted.torrents t ON t.id = f.id
+      JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
+      ORDER BY seeding_weight DESC
+    |]
+    ( do
+        let (onlyArtistB, onlyArtistId) = case opts.onlyArtist of
+              Nothing -> (True, 0)
+              Just a -> (False, a.redactedId)
+        ( opts.onlyDownloaded :: Bool,
+          onlyArtistB :: Bool,
+          onlyArtistId & fromIntegral @Natural @Int
+          )
+    )
+    ( do
+        groupId <- Dec.fromField @Int
+        torrentId <- Dec.fromField @Int
+        seedingWeight <- Dec.fromField @Int
+        (torrentJson, artists) <- Dec.json $ do
+          val <- Json.asValue
+          artists <- Json.keyOrDefault "artists" [] $ Json.eachInArray $ do
+            id_ <- Json.keyLabel @"artistId" "id" (Json.asIntegral @_ @Int)
+            name <- Json.keyLabel @"artistName" "name" Json.asText
+            pure $ T2 id_ name
+          pure (val, artists)
+        torrentGroupJson <-
+          ( Dec.json $ do
+              groupName <- Json.key "groupName" Json.asText
+              groupYear <- Json.key "groupYear" (Json.asIntegral @_ @Int)
+              pure $ TorrentGroupJson {..}
+            )
+        hasTorrentFile <- Dec.fromField @Bool
+        transmissionTorrentHash <-
+          Dec.fromField @(Maybe Text)
+        pure $
+          TorrentData
+            { torrentStatus =
+                if
+                  | not hasTorrentFile -> NoTorrentFileYet
+                  | Nothing <- transmissionTorrentHash -> NotInTransmissionYet
+                  | Just hash <- transmissionTorrentHash ->
+                      InTransmission $
+                        T2 (label @"torrentHash" hash) (label @"transmissionInfo" ()),
+              ..
+            }
+    )
+
+-- | Do a request to the redacted API. If you know what that is, you know how to find the API docs.
+mkRedactedApiRequest ::
+  ( MonadThrow m,
+    MonadIO m,
+    MonadLogger m,
+    HasField "action" p ByteString,
+    HasField "actionArgs" p [(ByteString, Maybe ByteString)]
+  ) =>
+  p ->
+  m Http.Request
+mkRedactedApiRequest dat = do
+  authKey <- runCommandExpect0 "pass" ["internet/redacted/api-keys/whatcd-resolver"]
+  pure $
+    [fmt|https://redacted.ch/ajax.php|]
+      & Http.setRequestMethod "GET"
+      & Http.setQueryString (("action", Just dat.action) : dat.actionArgs)
+      & Http.setRequestHeader "Authorization" [authKey]
+
+httpTorrent ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  Otel.Span ->
+  Http.Request ->
+  m ByteString
+httpTorrent span req =
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just "application/x-bittorrent" <- contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Redacted returned a non-torrent body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Redacted returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Redacted returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+
+httpJson ::
+  ( MonadThrow m,
+    MonadOtel m
+  ) =>
+  (Optional (Label "contentType" ByteString)) ->
+  Json.Parse ErrorTree b ->
+  Http.Request ->
+  m b
+httpJson opts parser req = inSpan' "HTTP Request (JSON)" $ \span -> do
+  let opts' = opts.withDefault (label @"contentType" "application/json")
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just ct <- contentType,
+              ct == opts'.contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Server returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+    >>= assertM
+      span
+      ( \body ->
+          Json.parseStrict parser body
+            & first (Json.parseErrorTree "could not parse redacted response")
+      )
+
+redactedApiRequestJson ::
+  ( MonadThrow m,
+    MonadLogger m,
+    HasField "action" p ByteString,
+    HasField "actionArgs" p [(ByteString, Maybe ByteString)],
+    MonadOtel m
+  ) =>
+  p ->
+  Json.Parse ErrorTree a ->
+  m a
+redactedApiRequestJson dat parser =
+  do
+    mkRedactedApiRequest dat
+    >>= httpJson defaults parser
diff --git a/users/Profpatsch/whatcd-resolver/src/Transmission.hs b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
new file mode 100644
index 0000000000..acbab00162
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
@@ -0,0 +1,319 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Transmission where
+
+import AppT
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.List.NonEmpty qualified as NonEmpty
+import Data.Map.Strict qualified as Map
+import Database.PostgreSQL.Simple (Only (..))
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import FieldParser (FieldParser' (..))
+import FieldParser qualified as Field
+import Html qualified
+import Http qualified
+import Json qualified
+import Json.Enc (Enc)
+import Json.Enc qualified as Enc
+import Label
+import MyPrelude
+import Network.HTTP.Types
+import OpenTelemetry.Attributes (ToAttribute (toAttribute))
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import Optional
+import Postgres.MonadPostgres
+import Pretty
+import Text.Blaze.Html (Html)
+import UnliftIO
+import Prelude hiding (span)
+
+-- | A value between (inclusive) 0% and (inclusive) 100%. Precise to 1% steps.
+newtype Percentage = Percentage {unPercentage :: Int}
+  deriving stock (Show)
+
+-- | Parse a scientific into a Percentage
+scientificPercentage :: FieldParser' Error Scientific Percentage
+scientificPercentage =
+  Field.boundedScientificRealFloat @Float
+    >>> ( FieldParser $ \f ->
+            if
+              | f < 0 -> Left "percentage cannot be negative"
+              | f > 1 -> Left "percentage cannot be over 100%"
+              | otherwise -> Right $ Percentage $ ceiling (f * 100)
+        )
+
+-- | Fetch the current status from transmission, and remove the tranmission hash from our database
+-- iff it does not exist in transmission anymore
+getAndUpdateTransmissionTorrentsStatus ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Map (Label "torrentHash" Text) () ->
+  (Transaction m (Map (Label "torrentHash" Text) (Label "percentDone" Percentage)))
+getAndUpdateTransmissionTorrentsStatus knownTorrents = do
+  let fields = ["hashString", "percentDone"]
+  actualTorrents <-
+    lift @Transaction $
+      doTransmissionRequest'
+        ( transmissionRequestListOnlyTorrents
+            ( T2
+                (label @"fields" fields)
+                (label @"ids" (Map.keys knownTorrents))
+            )
+            $ do
+              torrentHash <- Json.keyLabel @"torrentHash" "hashString" Json.asText
+              percentDone <- Json.keyLabel @"percentDone" "percentDone" (Field.toJsonParser $ Field.jsonNumber >>> scientificPercentage)
+              pure (torrentHash, percentDone)
+        )
+        <&> Map.fromList
+  let toDelete = Map.difference knownTorrents actualTorrents
+  execute
+    [fmt|
+    UPDATE redacted.torrents_json
+    SET transmission_torrent_hash = NULL
+    WHERE transmission_torrent_hash = ANY (?::text[])
+  |]
+    $ Only (toDelete & Map.keys <&> (.torrentHash) & PGArray :: PGArray Text)
+  pure actualTorrents
+
+getTransmissionTorrentsTable ::
+  (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
+getTransmissionTorrentsTable = do
+  let fields =
+        [ "hashString",
+          "name",
+          "percentDone",
+          "percentComplete",
+          "downloadDir",
+          "files"
+        ]
+  doTransmissionRequest'
+    ( transmissionRequestListAllTorrents fields $ do
+        Json.asObject <&> KeyMap.toMapText
+    )
+    <&> \resp ->
+      Html.toTable
+        ( resp
+            & List.sortOn (\m -> m & Map.lookup "percentDone" & fromMaybe (Json.Number 0))
+            <&> Map.toList
+            -- TODO
+            & List.take 100
+        )
+
+data TransmissionRequest = TransmissionRequest
+  { method :: Text,
+    arguments :: Map Text Enc,
+    tag :: Maybe Int
+  }
+  deriving stock (Show)
+
+transmissionConnectionConfig :: T3 "host" Text "port" Int "usePlainHttp" Bool
+transmissionConnectionConfig = (T3 (label @"host" "localhost") (label @"port" 9091) (label @"usePlainHttp" True))
+
+transmissionRequestListAllTorrents :: (Monad m) => [Text] -> Json.ParseT e m out -> (TransmissionRequest, Json.ParseT e m [out])
+transmissionRequestListAllTorrents fields parseTorrent =
+  ( TransmissionRequest
+      { method = "torrent-get",
+        arguments =
+          Map.fromList
+            [ ("fields", Enc.list Enc.text fields)
+            ],
+        tag = Nothing
+      },
+    Json.key "torrents" $ Json.eachInArray parseTorrent
+  )
+
+transmissionRequestListOnlyTorrents ::
+  ( HasField "ids" r1 [(Label "torrentHash" Text)],
+    HasField "fields" r1 [Text],
+    Monad m
+  ) =>
+  r1 ->
+  Json.ParseT e m out ->
+  (TransmissionRequest, Json.ParseT e m [out])
+transmissionRequestListOnlyTorrents dat parseTorrent =
+  ( TransmissionRequest
+      { method = "torrent-get",
+        arguments =
+          Map.fromList
+            [ ("ids", Enc.list (\i -> Enc.text i.torrentHash) dat.ids),
+              ("fields", Enc.list Enc.text dat.fields)
+            ],
+        tag = Nothing
+      },
+    Json.key "torrents" $ Json.eachInArray parseTorrent
+  )
+
+transmissionRequestAddTorrent ::
+  (HasField "torrentFile" r ByteString, Monad m) =>
+  r ->
+  ( TransmissionRequest,
+    Json.ParseT err m (T2 "torrentHash" Text "torrentName" Text)
+  )
+transmissionRequestAddTorrent dat =
+  ( TransmissionRequest
+      { method = "torrent-add",
+        arguments =
+          Map.fromList
+            [ ("metainfo", Enc.base64Bytes dat.torrentFile),
+              ("paused", Enc.bool False)
+            ],
+        tag = Nothing
+      },
+    do
+      let p method = Json.key method $ do
+            hash <- Json.keyLabel @"torrentHash" "hashString" Json.asText
+            name <- Json.keyLabel @"torrentName" "name" Json.asText
+            pure $ T2 hash name
+      p "torrent-duplicate" Json.<|> p "torrent-added"
+  )
+
+data TransmissionResponse output = TransmissionResponse
+  { result :: TransmissionResponseStatus,
+    arguments :: Maybe output,
+    tag :: Maybe Int
+  }
+  deriving stock (Show)
+
+data TransmissionResponseStatus
+  = TransmissionResponseSuccess
+  | TransmissionResponseFailure Text
+  deriving stock (Show)
+
+doTransmissionRequest' ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  (TransmissionRequest, Json.Parse Error output) ->
+  m output
+doTransmissionRequest' req = inSpan' "Transmission Request" $ \span -> do
+  resp <-
+    doTransmissionRequest
+      span
+      transmissionConnectionConfig
+      req
+  case resp.result of
+    TransmissionResponseFailure err -> appThrowTree span (nestedError "Transmission RPC error" $ singleError $ newError err)
+    TransmissionResponseSuccess -> case resp.arguments of
+      Nothing -> appThrowTree span "Transmission RPC error: No `arguments` field in response"
+      Just out -> pure out
+
+-- | Contact the transmission RPC, and do the CSRF protection dance.
+--
+-- Spec: https://github.com/transmission/transmission/blob/main/docs/rpc-spec.md
+doTransmissionRequest ::
+  ( MonadTransmission m,
+    HasField "host" t1 Text,
+    HasField "port" t1 Int,
+    HasField "usePlainHttp" t1 Bool,
+    MonadThrow m,
+    MonadLogger m,
+    MonadOtel m
+  ) =>
+  Otel.Span ->
+  t1 ->
+  (TransmissionRequest, Json.Parse Error output) ->
+  m (TransmissionResponse output)
+doTransmissionRequest span dat (req, parser) = do
+  sessionId <- getCurrentTransmissionSessionId
+  let textArg t = (Enc.text t, Otel.toAttribute @Text t)
+  let encArg enc = (enc, Otel.toAttribute @Text $ enc & Enc.encToTextPretty)
+  let intArg i = (Enc.int i, Otel.toAttribute @Int i)
+
+  let body :: [(Text, (Enc, Otel.Attribute))] =
+        ( [ ("method", req.method & textArg),
+            ("arguments", encArg $ Enc.map id req.arguments)
+          ]
+            <> (req.tag & foldMap (\t -> [("tag", t & intArg)]))
+        )
+  addAttributes
+    span
+    ( HashMap.fromList $
+        body
+          <&> bimap
+            (\k -> [fmt|transmission.{k}|])
+            (\(_, attr) -> attr)
+    )
+  resp <-
+    Http.doRequestJson
+      ( (Http.mkRequestOptions (T2 (label @"method" "POST") (label @"host" dat.host)))
+          { Http.path = mkOptional ["transmission", "rpc"],
+            Http.port = mkOptional dat.port,
+            Http.headers = mkOptional $ (sessionId & ifExists ("X-Transmission-Session-Id",)),
+            Http.usePlainHttp = mkOptional dat.usePlainHttp
+          }
+      )
+      (body <&> second fst & Enc.object)
+  -- Implement the CSRF protection thingy
+  case resp & Http.getResponseStatus & (.statusCode) of
+    409 -> inSpan' "New Transmission Session ID" $ \span' -> do
+      tid <-
+        resp
+          & Http.getResponseHeader "X-Transmission-Session-Id"
+          & nonEmpty
+          & annotate [fmt|Missing "X-Transmission-Session-Id" header in 409 response: {showPretty resp}|]
+          & unwrapIOError
+          & liftIO
+          <&> NonEmpty.head
+
+      addAttributes span' $
+        HashMap.fromList
+          [ ("transmission.new_session_id", tid & bytesToTextUtf8Lenient & toAttribute),
+            ("transmission.old_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
+
+      updateTransmissionSessionId tid
+
+      doTransmissionRequest span dat (req, parser)
+    200 -> do
+      addAttributes span $
+        HashMap.fromList
+          [ ("transmission.valid_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
+      resp
+        & Http.getResponseBody
+        & Json.parseStrict
+          ( Json.mapError singleError $ do
+              result <-
+                Json.key "result" Json.asText <&> \case
+                  "success" -> TransmissionResponseSuccess
+                  err -> TransmissionResponseFailure err
+              arguments <-
+                Json.keyMay "arguments" parser
+              tag <-
+                Json.keyMay
+                  "tag"
+                  (Field.toJsonParser (Field.jsonNumber >>> Field.boundedScientificIntegral "tag too long"))
+              pure TransmissionResponse {..}
+          )
+        & first (Json.parseErrorTree "Cannot parse transmission RPC response")
+        & \case
+          Right a -> pure a
+          Left err -> do
+            case Json.eitherDecodeStrict' @Json.Value (resp & Http.getResponseBody) of
+              Left _err -> pure ()
+              Right val -> logInfo [fmt|failing transmission response: {showPrettyJson val}|]
+            appThrowTree span err
+    _ -> liftIO $ unwrapIOError $ Left [fmt|Non-200 response: {showPretty resp}|]
+
+class MonadTransmission m where
+  getCurrentTransmissionSessionId :: m (Maybe ByteString)
+  updateTransmissionSessionId :: ByteString -> m ()
+
+instance (MonadIO m) => MonadTransmission (AppT m) where
+  getCurrentTransmissionSessionId = AppT (asks (.transmissionSessionId)) >>= readIORef
+  updateTransmissionSessionId t = do
+    var <- AppT $ asks (.transmissionSessionId)
+    writeIORef var (Just t)
diff --git a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
new file mode 100644
index 0000000000..7b890fdd8f
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
@@ -0,0 +1,800 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module WhatcdResolver where
+
+import AppT
+import Arg
+import Control.Category qualified as Cat
+import Control.Monad.Catch.Pure (runCatch)
+import Control.Monad.Logger.CallStack
+import Control.Monad.Reader
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree (prettyErrorTree)
+import Data.HashMap.Strict qualified as HashMap
+import Data.List qualified as List
+import Data.Map.Strict qualified as Map
+import Data.Pool qualified as Pool
+import Data.Text qualified as Text
+import Database.PostgreSQL.Simple qualified as Postgres
+import Database.PostgreSQL.Simple.SqlQQ (sql)
+import Database.PostgreSQL.Simple.Types (PGArray (PGArray))
+import Database.Postgres.Temp qualified as TmpPg
+import FieldParser (FieldParser, FieldParser' (..))
+import FieldParser qualified as Field
+import Html qualified
+import IHP.HSX.QQ (hsx)
+import IHP.HSX.ToHtml (ToHtml)
+import Json qualified
+import Json.Enc (Enc)
+import Json.Enc qualified as Enc
+import JsonLd
+import Label
+import Multipart2 qualified as Multipart
+import MyPrelude
+import Network.HTTP.Client.Conduit qualified as Http
+import Network.HTTP.Simple qualified as Http
+import Network.HTTP.Types
+import Network.HTTP.Types qualified as Http
+import Network.URI (URI)
+import Network.URI qualified
+import Network.Wai (ResponseReceived)
+import Network.Wai qualified as Wai
+import Network.Wai.Handler.Warp qualified as Warp
+import Network.Wai.Parse qualified as Wai
+import OpenTelemetry.Attributes qualified as Otel
+import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import Parse (Parse)
+import Parse qualified
+import Postgres.Decoder qualified as Dec
+import Postgres.MonadPostgres
+import Pretty
+import Redacted
+import System.Directory qualified as Dir
+import System.Directory qualified as Xdg
+import System.Environment qualified as Env
+import System.FilePath ((</>))
+import Text.Blaze.Html (Html)
+import Text.Blaze.Html.Renderer.Utf8 qualified as Html
+import Text.Blaze.Html5 qualified as Html
+import Tool (readTool, readTools)
+import Transmission
+import UnliftIO hiding (Handler)
+import Prelude hiding (span)
+
+main :: IO ()
+main =
+  runAppWith
+    ( do
+        -- todo: trace that to the init functions as well
+        Otel.inSpan "whatcd-resolver main function" Otel.defaultSpanArguments $ do
+          _ <- runTransaction migrate
+          htmlUi
+    )
+    <&> first showToError
+    >>= expectIOError "could not start whatcd-resolver"
+
+htmlUi :: AppT IO ()
+htmlUi = do
+  uniqueRunId <-
+    runTransaction $
+      querySingleRowWith
+        [sql|
+            SELECT gen_random_uuid()::text
+        |]
+        ()
+        (Dec.fromField @Text)
+
+  withRunInIO $ \runInIO -> Warp.run 9093 $ \req respondOrig -> do
+    let catchAppException act =
+          try act >>= \case
+            Right a -> pure a
+            Left (AppException err) -> do
+              runInIO (logError err)
+              respondOrig (Wai.responseLBS Http.status500 [] "")
+
+    catchAppException $ do
+      let mp span parser =
+            Multipart.parseMultipartOrThrow
+              (appThrowTree span)
+              parser
+              req
+
+      let torrentIdMp span =
+            mp
+              span
+              ( do
+                  label @"torrentId" <$> Multipart.field "torrent-id" ((Field.utf8 >>> Field.signedDecimal >>> Field.bounded @Int "int"))
+              )
+
+      let parseQueryArgsNewSpan spanName parser =
+            Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
+              & assertMNewSpan spanName id
+
+      let handlers :: Handlers (AppT IO)
+          handlers respond =
+            Map.fromList
+              [ ("", respond.html (mainHtml uniqueRunId)),
+                ( "snips/redacted/search",
+                  respond.html $
+                    \span -> do
+                      dat <-
+                        mp
+                          span
+                          ( do
+                              label @"searchstr" <$> Multipart.field "redacted-search" Cat.id
+                          )
+                      snipsRedactedSearch dat
+                ),
+                ( "snips/redacted/torrentDataJson",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    Html.mkVal <$> (runTransaction $ getTorrentById dat)
+                ),
+                ( "snips/redacted/getTorrentFile",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    runTransaction $ do
+                      inserted <- redactedGetTorrentFileAndInsert dat
+                      running <-
+                        lift @Transaction $
+                          doTransmissionRequest' (transmissionRequestAddTorrent inserted)
+                      updateTransmissionTorrentHashById
+                        ( T2
+                            (getLabel @"torrentHash" running)
+                            (getLabel @"torrentId" dat)
+                        )
+                      pure $
+                        everySecond
+                          "snips/transmission/getTorrentState"
+                          (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
+                          "Starting"
+                ),
+                -- TODO: this is bad duplication??
+                ( "snips/redacted/startTorrentFile",
+                  respond.html $ \span -> do
+                    dat <- torrentIdMp span
+                    runTransaction $ do
+                      file <-
+                        getTorrentFileById dat
+                          <&> annotate [fmt|No torrent file for torrentId "{dat.torrentId}"|]
+                          >>= orAppThrowTree span
+
+                      running <-
+                        lift @Transaction $
+                          doTransmissionRequest' (transmissionRequestAddTorrent file)
+                      updateTransmissionTorrentHashById
+                        ( T2
+                            (getLabel @"torrentHash" running)
+                            (getLabel @"torrentId" dat)
+                        )
+                      pure $
+                        everySecond
+                          "snips/transmission/getTorrentState"
+                          (Enc.object [("torrent-hash", Enc.text running.torrentHash)])
+                          "Starting"
+                ),
+                ( "snips/transmission/getTorrentState",
+                  respond.html $ \span -> do
+                    dat <- mp span $ label @"torrentHash" <$> Multipart.field "torrent-hash" Field.utf8
+                    status <-
+                      doTransmissionRequest'
+                        ( transmissionRequestListOnlyTorrents
+                            ( T2
+                                (label @"ids" [label @"torrentHash" dat.torrentHash])
+                                (label @"fields" ["hashString"])
+                            )
+                            (Json.keyLabel @"torrentHash" "hashString" Json.asText)
+                        )
+                        <&> List.find (\torrent -> torrent.torrentHash == dat.torrentHash)
+
+                    pure $
+                      case status of
+                        Nothing -> [hsx|ERROR unknown|]
+                        Just _torrent -> [hsx|Running|]
+                ),
+                ( "snips/jsonld/render",
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+                    htmlWithQueryArgs
+                      ( label @"target"
+                          <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
+                                  & Parse.andParse uriToHttpClientRequest
+                              )
+                      )
+                      ( \qry _span -> do
+                          jsonld <- httpGetJsonLd (qry.target)
+                          pure $ renderJsonld jsonld
+                      )
+                ),
+                ( "artist",
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+
+                    htmlWithQueryArgs
+                      ( label @"redactedId"
+                          <$> (singleQueryArgument "redacted_id" (Field.utf8 >>> Field.decimalNatural))
+                      )
+                      $ \qry _span -> do
+                        artistPage qry
+                ),
+                ( "autorefresh",
+                  respond.plain $ do
+                    qry <-
+                      parseQueryArgsNewSpan
+                        "Autorefresh Query Parse"
+                        ( label @"hasItBeenRestarted"
+                            <$> singleQueryArgument "hasItBeenRestarted" Field.utf8
+                        )
+                    pure $
+                      Wai.responseLBS
+                        Http.ok200
+                        ( [("Content-Type", "text/html")]
+                            <> if uniqueRunId /= qry.hasItBeenRestarted
+                              then -- cause the client side to refresh
+                                [("HX-Refresh", "true")]
+                              else []
+                        )
+                        ""
+                )
+              ]
+      runInIO $
+        runHandlers
+          (\respond -> respond.html $ (mainHtml uniqueRunId))
+          handlers
+          req
+          respondOrig
+  where
+    everySecond :: Text -> Enc -> Html -> Html
+    everySecond call extraData innerHtml = [hsx|<div hx-trigger="every 1s" hx-swap="outerHTML" hx-post={call} hx-vals={Enc.encToBytesUtf8 extraData}>{innerHtml}</div>|]
+
+    mainHtml :: Text -> Otel.Span -> AppT IO Html
+    mainHtml uniqueRunId _span = runTransaction $ do
+      -- jsonld <-
+      --   httpGetJsonLd
+      --     ( URI.parseURI "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec" & annotate "not an URI" & unwrapError,
+      --       "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec"
+      --     )
+      --     <&> renderJsonld
+      bestTorrentsTable <- getBestTorrentsTable Nothing
+      -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
+      pure $
+        htmlPageChrome
+          [hsx|
+            <form
+              hx-post="/snips/redacted/search"
+              hx-target="#redacted-search-results">
+              <label for="redacted-search">Redacted Search</label>
+              <input
+                id="redacted-search"
+                type="text"
+                name="redacted-search" />
+              <button type="submit" hx-disabled-elt="this">Search</button>
+              <div class="htmx-indicator">Search running!</div>
+            </form>
+            <div id="redacted-search-results">
+              {bestTorrentsTable}
+            </div>
+            <!-- refresh the page if the uniqueRunId is different -->
+            <input
+                hidden
+                type="text"
+                id="autorefresh"
+                name="hasItBeenRestarted"
+                value={uniqueRunId}
+                hx-get="/autorefresh"
+                hx-trigger="every 5s"
+                hx-swap="none"
+            />
+        |]
+
+htmlPageChrome :: (ToHtml a) => a -> Html
+htmlPageChrome body =
+  Html.docTypeHtml $
+    [hsx|
+      <head>
+        <!-- TODO: set nice page title for each page -->
+        <title>whatcd-resolver</title>
+        <meta charset="utf-8">
+        <meta name="viewport" content="width=device-width, initial-scale=1">
+        <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
+        <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js" integrity="sha384-geWF76RCwLtnZ8qwWowPQNguL3RmwHVBC9FhGdlKrxdiJJigb/j/68SIy3Te4Bkz" crossorigin="anonymous"></script>
+        <script src="https://unpkg.com/htmx.org@1.9.2" integrity="sha384-L6OqL9pRWyyFU3+/bjdSri+iIphTN/bvYyM37tICVyOJkWZLpP2vGn6VUEXgzg6h" crossorigin="anonymous"></script>
+        <style>
+          dl {
+            margin: 1em;
+            padding: 0.5em 1em;
+            border: thin solid;
+          }
+        </style>
+      </head>
+      <body>
+        {body}
+      </body>
+    |]
+
+artistPage ::
+  ( HasField "redactedId" dat Natural,
+    MonadPostgres m,
+    MonadOtel m,
+    MonadLogger m,
+    MonadThrow m,
+    MonadTransmission m
+  ) =>
+  dat ->
+  m Html
+artistPage dat = runTransaction $ do
+  torrents <- getBestTorrentsTable (Just $ getLabel @"redactedId" dat)
+  pure $
+    htmlPageChrome
+      [hsx|
+        Artist ID: {dat.redactedId}
+
+        {torrents}
+      |]
+
+type Handlers m = HandlerResponses m -> Map Text (m ResponseReceived)
+
+data HandlerResponses m = HandlerResponses
+  { -- | render html
+    html :: (Otel.Span -> m Html) -> m ResponseReceived,
+    -- | render html after parsing some query arguments
+    htmlWithQueryArgs :: forall a. (Parse Query a -> (a -> Otel.Span -> m Html) -> m ResponseReceived),
+    -- | render a plain wai response
+    plain :: (m Wai.Response -> m ResponseReceived)
+  }
+
+runHandlers ::
+  (MonadOtel m) =>
+  (HandlerResponses m -> m ResponseReceived) ->
+  (HandlerResponses m -> Map Text (m ResponseReceived)) ->
+  Wai.Request ->
+  (Wai.Response -> IO ResponseReceived) ->
+  m ResponseReceived
+runHandlers defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
+  let path = req & Wai.pathInfo & Text.intercalate "/"
+  let html act =
+        Otel.inSpan'
+          [fmt|Route /{path}|]
+          ( Otel.defaultSpanArguments
+              { Otel.attributes =
+                  HashMap.fromList
+                    [ ("_.server.path", Otel.toAttribute @Text path),
+                      ("_.server.query_args", Otel.toAttribute @Text (req.rawQueryString & bytesToTextUtf8Lenient))
+                    ]
+              }
+          )
+          ( \span -> do
+              res <- act span <&> (\h -> T2 (label @"html" h) (label @"extraHeaders" []))
+              liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
+          )
+
+  let handlerResponses =
+        ( HandlerResponses
+            { plain = (\m -> liftIO $ runInIO m >>= respond),
+              html,
+              htmlWithQueryArgs = \parser act ->
+                case req & Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) of
+                  Right a -> html (act a)
+                  Left err ->
+                    html
+                      ( \span -> do
+                          recordException
+                            span
+                            ( T2
+                                (label @"type_" "Query Parse Exception")
+                                (label @"message" (prettyErrorTree err))
+                            )
+
+                          pure
+                            [hsx|
+                              <h1>Error:</h1>
+                              <pre>{err & prettyErrorTree}</pre>
+                            |]
+                      )
+            }
+        )
+  let handler =
+        (handlers handlerResponses)
+          & Map.lookup path
+          & fromMaybe (defaultHandler handlerResponses)
+  runInIO handler
+
+singleQueryArgument :: Text -> FieldParser ByteString to -> Parse Http.Query to
+singleQueryArgument field inner =
+  Parse.mkParsePushContext
+    field
+    ( \(ctx, qry) -> case qry
+        & mapMaybe
+          ( \(k, v) ->
+              if k == (field & textToBytesUtf8)
+                then Just v
+                else Nothing
+          ) of
+        [] -> Left [fmt|No such query argument "{field}", at {ctx & Parse.showContext}|]
+        [Nothing] -> Left [fmt|Expected one query argument with a value, but "{field}" was a query flag|]
+        [Just one] -> Right one
+        more -> Left [fmt|More than one value for query argument "{field}": {show more}, at {ctx & Parse.showContext}|]
+    )
+    >>> Parse.fieldParser inner
+
+-- | Make sure we can parse the given Text into an URI.
+textToURI :: Parse Text URI
+textToURI =
+  Parse.fieldParser
+    ( FieldParser $ \text ->
+        text
+          & textToString
+          & Network.URI.parseURI
+          & annotate [fmt|Cannot parse this as a URL: "{text}"|]
+    )
+
+-- | Make sure we can parse the given URI into a Request.
+--
+-- This tries to work around the horrible, horrible interface in Http.Client.
+uriToHttpClientRequest :: Parse URI Http.Request
+uriToHttpClientRequest =
+  Parse.mkParseNoContext
+    ( \url ->
+        (url & Http.requestFromURI)
+          & runCatch
+          & first (checkException @Http.HttpException)
+          & \case
+            Left (Right (Http.InvalidUrlException urlText reason)) ->
+              Left [fmt|Unable to set the url "{urlText}" as request URL, reason: {reason}|]
+            Left (Right exc@(Http.HttpExceptionRequest _ _)) ->
+              Left [fmt|Weird! Should not get a HttpExceptionRequest when parsing an URL (bad library design), was {exc & displayException}|]
+            Left (Left someExc) ->
+              Left [fmt|Weird! Should not get anyhting but a HttpException when parsing an URL (bad library design), was {someExc & displayException}|]
+            Right req -> pure req
+    )
+
+checkException :: (Exception b) => SomeException -> Either SomeException b
+checkException some = case fromException some of
+  Nothing -> Left some
+  Just e -> Right e
+
+snipsRedactedSearch ::
+  ( MonadLogger m,
+    MonadPostgres m,
+    HasField "searchstr" r ByteString,
+    MonadThrow m,
+    MonadTransmission m,
+    MonadOtel m
+  ) =>
+  r ->
+  m Html
+snipsRedactedSearch dat = do
+  t <-
+    redactedSearchAndInsert
+      [ ("searchstr", dat.searchstr),
+        ("releasetype", "album")
+      ]
+  runTransaction $ do
+    t
+    getBestTorrentsTable (Nothing :: Maybe (Label "redactedId" Natural))
+
+data ArtistFilter = ArtistFilter
+  { onlyArtist :: Maybe (Label "artistId" Text)
+  }
+
+getBestTorrentsTable ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Maybe (Label "redactedId" Natural) ->
+  Transaction m Html
+getBestTorrentsTable artistFilter = do
+  bestStale :: [TorrentData ()] <- getBestTorrents GetBestTorrentsFilter {onlyArtist = artistFilter, onlyDownloaded = False}
+  actual <-
+    getAndUpdateTransmissionTorrentsStatus
+      ( bestStale
+          & mapMaybe
+            ( \td -> case td.torrentStatus of
+                InTransmission h -> Just h
+                _ -> Nothing
+            )
+          <&> (\t -> (getLabel @"torrentHash" t, t.transmissionInfo))
+          & Map.fromList
+      )
+  let fresh =
+        bestStale
+          --  we have to update the status of every torrent that’s not in tranmission anymore
+          -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
+          <&> ( \td -> case td.torrentStatus of
+                  InTransmission info ->
+                    case actual & Map.lookup (getLabel @"torrentHash" info) of
+                      -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
+                      -- which is an internal factum that is established in getBestTorrents (and might change later)
+                      Nothing -> td {torrentStatus = NotInTransmissionYet}
+                      Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
+                  NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
+                  NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
+              )
+  let localTorrent b = case b.torrentStatus of
+        NoTorrentFileYet -> [hsx|<button hx-post="snips/redacted/getTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Upload Torrent</button>|]
+        InTransmission info -> [hsx|{info.transmissionInfo.percentDone.unPercentage}% done|]
+        NotInTransmissionYet -> [hsx|<button hx-post="snips/redacted/startTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Start Torrent</button>|]
+  let bestRows =
+        fresh
+          & foldMap
+            ( \b -> do
+                let artists =
+                      b.artists
+                        <&> ( \a ->
+                                T2
+                                  (label @"url" [fmt|/artist?redacted_id={a.artistId}|])
+                                  (label @"content" $ Html.toHtml @Text a.artistName)
+                            )
+                        & mkLinkList
+
+                [hsx|
+                  <tr>
+                  <td>{localTorrent b}</td>
+                  <td>{Html.toHtml @Int b.groupId}</td>
+                  <td>
+                    {artists}
+                  </td>
+                  <td>
+                    <a href={mkRedactedTorrentLink (Arg b.groupId)} target="_blank">
+                      {Html.toHtml @Text b.torrentGroupJson.groupName}
+                    </a>
+                  </td>
+                  <td>{Html.toHtml @Int b.torrentGroupJson.groupYear}</td>
+                  <td>{Html.toHtml @Int b.seedingWeight}</td>
+                  <td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
+                  </tr>
+                |]
+            )
+  pure $
+    [hsx|
+        <table class="table">
+          <thead>
+            <tr>
+              <th>Local</th>
+              <th>Group ID</th>
+              <th>Artist</th>
+              <th>Name</th>
+              <th>Year</th>
+              <th>Weight</th>
+              <th>Torrent</th>
+              <th>Torrent Group</th>
+            </tr>
+          </thead>
+          <tbody>
+            {bestRows}
+          </tbody>
+        </table>
+      |]
+
+mkLinkList :: [T2 "url" Text "content" Html] -> Html
+mkLinkList xs =
+  xs
+    <&> ( \x -> do
+            [hsx|<a href={x.url}>{x.content}</a>|]
+        )
+    & List.intersperse ", "
+    & mconcat
+
+getTransmissionTorrentsTable ::
+  (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
+getTransmissionTorrentsTable = do
+  let fields =
+        [ "hashString",
+          "name",
+          "percentDone",
+          "percentComplete",
+          "downloadDir",
+          "files"
+        ]
+  doTransmissionRequest'
+    ( transmissionRequestListAllTorrents fields $ do
+        Json.asObject <&> KeyMap.toMapText
+    )
+    <&> \resp ->
+      Html.toTable
+        ( resp
+            & List.sortOn (\m -> m & Map.lookup "percentDone" & fromMaybe (Json.Number 0))
+            <&> Map.toList
+            -- TODO
+            & List.take 100
+        )
+
+unzip3PGArray :: [(a1, a2, a3)] -> (PGArray a1, PGArray a2, PGArray a3)
+unzip3PGArray xs = xs & unzip3 & \(a, b, c) -> (PGArray a, PGArray b, PGArray c)
+
+assertOneUpdated ::
+  (HasField "numberOfRowsAffected" r Natural, MonadThrow m, MonadIO m) =>
+  Otel.Span ->
+  Text ->
+  r ->
+  m ()
+assertOneUpdated span name x = case x.numberOfRowsAffected of
+  1 -> pure ()
+  n -> appThrowTree span ([fmt|{name :: Text}: Expected to update exactly one row, but updated {n :: Natural} row(s)|])
+
+migrate ::
+  ( MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Transaction m (Label "numberOfRowsAffected" Natural)
+migrate = inSpan "Database Migration" $ do
+  execute
+    [sql|
+    CREATE SCHEMA IF NOT EXISTS redacted;
+
+    CREATE TABLE IF NOT EXISTS redacted.torrent_groups (
+      id SERIAL PRIMARY KEY,
+      group_id INTEGER,
+      group_name TEXT,
+      full_json_result JSONB,
+      UNIQUE(group_id)
+    );
+
+    CREATE TABLE IF NOT EXISTS redacted.torrents_json (
+      id SERIAL PRIMARY KEY,
+      torrent_id INTEGER,
+      torrent_group SERIAL NOT NULL REFERENCES redacted.torrent_groups(id) ON DELETE CASCADE,
+      full_json_result JSONB,
+      UNIQUE(torrent_id)
+    );
+
+    CREATE INDEX IF NOT EXISTS redacted_torrents_json_torrent_group_fk ON redacted.torrents_json (torrent_group);
+
+
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS torrent_file bytea NULL;
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS transmission_torrent_hash text NULL;
+
+
+    -- the seeding weight is used to find the best torrent in a group.
+    CREATE OR REPLACE FUNCTION calc_seeding_weight(full_json_result jsonb) RETURNS int AS $$
+    BEGIN
+      RETURN
+        ((full_json_result->'seeders')::integer*3
+        + (full_json_result->'snatches')::integer
+        )
+        -- prefer remasters by multiplying them with 3
+        * (CASE
+            WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
+            THEN 3
+            ELSE 1
+          END);
+    END;
+    $$ LANGUAGE plpgsql IMMUTABLE;
+
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS seeding_weight int GENERATED ALWAYS AS (calc_seeding_weight(full_json_result)) STORED;
+
+    -- inflect out values of the full json
+    CREATE OR REPLACE VIEW redacted.torrents AS
+    SELECT
+      t.id,
+      t.torrent_id,
+      t.torrent_group,
+      -- the seeding weight is used to find the best torrent in a group.
+      t.seeding_weight,
+      t.full_json_result,
+      t.torrent_file,
+      t.transmission_torrent_hash
+    FROM redacted.torrents_json t;
+
+
+    CREATE INDEX IF NOT EXISTS torrents_json_seeding ON redacted.torrents_json(((full_json_result->'seeding')::integer));
+    CREATE INDEX IF NOT EXISTS torrents_json_snatches ON redacted.torrents_json(((full_json_result->'snatches')::integer));
+  |]
+    ()
+
+httpTorrent ::
+  ( MonadIO m,
+    MonadThrow m
+  ) =>
+  Otel.Span ->
+  Http.Request ->
+  m ByteString
+httpTorrent span req =
+  Http.httpBS req
+    >>= assertM
+      span
+      ( \resp -> do
+          let statusCode = resp & Http.responseStatus & (.statusCode)
+              contentType =
+                resp
+                  & Http.responseHeaders
+                  & List.lookup "content-type"
+                  <&> Wai.parseContentType
+                  <&> (\(ct, _mimeAttributes) -> ct)
+          if
+            | statusCode == 200,
+              Just "application/x-bittorrent" <- contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Redacted returned a non-torrent body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Redacted returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Redacted returned an non-200 error code, code {code}: {resp & showPretty}|]
+      )
+
+runAppWith :: AppT IO a -> IO (Either TmpPg.StartError a)
+runAppWith appT = withTracer $ \tracer -> withDb $ \db -> do
+  tool <- readTools (label @"toolsEnvVar" "WHATCD_RESOLVER_TOOLS") (readTool "pg_format")
+  pgFormat <- initPgFormatPool (label @"pgFormat" tool)
+  let config = label @"logDatabaseQueries" LogDatabaseQueries
+  pgConnPool <-
+    Pool.newPool $
+      Pool.defaultPoolConfig
+        {- resource init action -} (Postgres.connectPostgreSQL (db & TmpPg.toConnectionString))
+        {- resource destruction -} Postgres.close
+        {- unusedResourceOpenTime -} 10
+        {- max resources across all stripes -} 20
+  transmissionSessionId <- newIORef Nothing
+  let newAppT = do
+        logInfo [fmt|Running with config: {showPretty config}|]
+        logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
+        appT
+  runReaderT newAppT.unAppT Context {..}
+
+withTracer :: (Otel.Tracer -> IO c) -> IO c
+withTracer f = do
+  setDefaultEnv "OTEL_SERVICE_NAME" "whatcd-resolver"
+  bracket
+    -- Install the SDK, pulling configuration from the environment
+    ( do
+        (processors, opts) <- Otel.getTracerProviderInitializationOptions
+        tp <-
+          Otel.createTracerProvider
+            processors
+            -- workaround the attribute length bug https://github.com/iand675/hs-opentelemetry/issues/113
+            ( opts
+                { Otel.tracerProviderOptionsAttributeLimits =
+                    opts.tracerProviderOptionsAttributeLimits
+                      { Otel.attributeCountLimit = Just 65_000
+                      }
+                }
+            )
+        Otel.setGlobalTracerProvider tp
+        pure tp
+    )
+    -- Ensure that any spans that haven't been exported yet are flushed
+    Otel.shutdownTracerProvider
+    -- Get a tracer so you can create spans
+    (\tracerProvider -> f $ Otel.makeTracer tracerProvider "whatcd-resolver" Otel.tracerOptions)
+
+setDefaultEnv :: String -> String -> IO ()
+setDefaultEnv envName defaultValue = do
+  Env.lookupEnv envName >>= \case
+    Just _env -> pure ()
+    Nothing -> Env.setEnv envName defaultValue
+
+withDb :: (TmpPg.DB -> IO a) -> IO (Either TmpPg.StartError a)
+withDb act = do
+  dataDir <- Xdg.getXdgDirectory Xdg.XdgData "whatcd-resolver"
+  let databaseDir = dataDir </> "database"
+  let socketDir = dataDir </> "database-socket"
+  Dir.createDirectoryIfMissing True socketDir
+  initDbConfig <-
+    Dir.doesDirectoryExist databaseDir >>= \case
+      True -> pure TmpPg.Zlich
+      False -> do
+        putStderrLn [fmt|Database does not exist yet, creating in "{databaseDir}"|]
+        Dir.createDirectoryIfMissing True databaseDir
+        pure TmpPg.DontCare
+  let cfg =
+        mempty
+          { TmpPg.dataDirectory = TmpPg.Permanent (databaseDir),
+            TmpPg.socketDirectory = TmpPg.Permanent socketDir,
+            TmpPg.port = pure $ Just 5431,
+            TmpPg.initDbConfig
+          }
+  TmpPg.withConfig cfg $ \db -> do
+    -- print [fmt|data dir: {db & TmpPg.toDataDirectory}|]
+    -- print [fmt|conn string: {db & TmpPg.toConnectionString}|]
+    act db
diff --git a/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
new file mode 100644
index 0000000000..8b3258bb5f
--- /dev/null
+++ b/users/Profpatsch/whatcd-resolver/whatcd-resolver.cabal
@@ -0,0 +1,125 @@
+cabal-version:      3.0
+name:               whatcd-resolver
+version:            0.1.0.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Allow the same record field name to be declared twice per module.
+    -- This works, because we use `OverloadedRecordDot` everywhere (enforced by `NoFieldSelectors`).
+    DuplicateRecordFields
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+    -- allows defining pattern synonyms, but also the `import Foo (pattern FooPattern)` import syntax
+    PatternSynonyms
+
+  default-language: GHC2021
+
+library
+    import: common-options
+
+    hs-source-dirs: src
+
+    exposed-modules:
+       WhatcdResolver
+       AppT
+       JsonLd
+       Optional
+       Http
+       Html
+       Transmission
+       Redacted
+
+    build-depends:
+        base >=4.15 && <5,
+        text,
+        my-prelude,
+        my-webstuff,
+        pa-prelude,
+        pa-error-tree,
+        pa-label,
+        pa-json,
+        pa-field-parser,
+        pa-run-command,
+        aeson-better-errors,
+        aeson,
+        blaze-html,
+        bytestring,
+        case-insensitive,
+        containers,
+        unordered-containers,
+        directory,
+        exceptions,
+        filepath,
+        hs-opentelemetry-sdk,
+        hs-opentelemetry-api,
+        http-conduit,
+        http-types,
+        http-client,
+        ihp-hsx,
+        monad-logger,
+        mtl,
+        network-uri,
+        resource-pool,
+        postgresql-simple,
+        punycode,
+        tmp-postgres,
+        unliftio,
+        wai-extra,
+        wai,
+        warp,
+
+executable whatcd-resolver
+    import: common-options
+
+    main-is: Main.hs
+
+    build-depends:
+        base >=4.15 && <5,
+        whatcd-resolver
+
+    ghc-options:
+      -threaded
+
diff --git a/users/Profpatsch/writers/default.nix b/users/Profpatsch/writers/default.nix
index 02f39da02d..9fb69231a1 100644
--- a/users/Profpatsch/writers/default.nix
+++ b/users/Profpatsch/writers/default.nix
@@ -15,12 +15,19 @@ let
       string;
   Libraries = defun [ (attrs any) (list drv) ];
 
+  pythonPackages = pkgs.python310Packages;
+  buildPythonPackages = pkgs.buildPackages.python310Packages;
+  python = pythonPackages.python;
+
   python3 =
     { name
     , libraries ? (_: [ ])
     , flakeIgnore ? [ ]
-    }: pkgs.writers.writePython3 name {
-      libraries = Libraries libraries pkgs.python3Packages;
+    }:
+    let
+    in
+    pkgs.writers.makePythonWriter python pythonPackages buildPythonPackages name {
+      libraries = Libraries libraries pythonPackages;
       flakeIgnore =
         let
           ignoreTheseErrors = [
@@ -37,6 +44,10 @@ let
             # … between functions
             "E302"
             "E305"
+            # … if there’s too many of them
+            "E303"
+            # or lines that are too long
+            "E501"
           ];
         in
         list FlakeError (ignoreTheseErrors ++ flakeIgnore);
@@ -80,18 +91,30 @@ let
         ]
       ];
     in
-    pkgs.python3Packages.buildPythonPackage {
+    pythonPackages.buildPythonPackage {
       inherit name;
       src = srcTree;
-      propagatedBuildInputs = libraries pkgs.python3Packages;
+      propagatedBuildInputs = libraries pythonPackages;
       doCheck = false;
     };
 
 
+  ghcBins = libraries: depot.nix.getBins (pkgs.ghc.withPackages (_: libraries)) [ "runghc" ];
+
+  writeHaskellInteractive = name: { libraries, ghcArgs ? [ ] }: path:
+    depot.nix.writeExecline name { } ([
+      (ghcBins libraries).runghc
+      "--"
+    ] ++ ghcArgs ++ [
+      "--"
+      path
+    ]);
+
 in
 {
   inherit
     python3
     python3Lib
+    writeHaskellInteractive
     ;
 }
diff --git a/users/Profpatsch/writers/tests/default.nix b/users/Profpatsch/writers/tests/default.nix
index d0d62d3b0e..879aae82f7 100644
--- a/users/Profpatsch/writers/tests/default.nix
+++ b/users/Profpatsch/writers/tests/default.nix
@@ -45,7 +45,7 @@ let
     } ''
     import test_lib
 
-    assert(test_lib.test() == "test 1 2 3")
+    assert test_lib.test() == "test 1 2 3"
   '');
 
 in
diff --git a/users/Profpatsch/ytextr/README.md b/users/Profpatsch/ytextr/README.md
new file mode 100644
index 0000000000..f1e40d8e68
--- /dev/null
+++ b/users/Profpatsch/ytextr/README.md
@@ -0,0 +1,5 @@
+# ytextr
+
+Wrapper around `yt-dlp` for downloading videos in good default quality with good default settings.
+
+Will always download the most up-to-date `yt-dlp` first, because the software usually stops working after a few weeks and needs to be updated, so just using `<nixpkgs>` often fails.
diff --git a/users/Profpatsch/ytextr/default.nix b/users/Profpatsch/ytextr/default.nix
index ac630603b9..3f3f073113 100644
--- a/users/Profpatsch/ytextr/default.nix
+++ b/users/Profpatsch/ytextr/default.nix
@@ -54,7 +54,7 @@ in
 nix-run-with-channel {
   channel = "nixos-unstable";
   packageNamesAtRuntime = [ "yt-dlp" ];
-  executable = depot.nix.writeExecline "ytextr" { readNArgs = 1; } [
+  executable = depot.nix.writeExecline "ytextr" { } [
     "getcwd"
     "-E"
     "cwd"
@@ -77,6 +77,6 @@ nix-run-with-channel {
     "mkv"
     "-f"
     "bestvideo[height<=?1080]+bestaudio/best"
-    "$1"
+    "$@"
   ];
 }