about summary refs log tree commit diff
path: root/users/Profpatsch/my-prelude
diff options
context:
space:
mode:
Diffstat (limited to 'users/Profpatsch/my-prelude')
-rw-r--r--users/Profpatsch/my-prelude/README.md42
-rw-r--r--users/Profpatsch/my-prelude/default.nix48
-rw-r--r--users/Profpatsch/my-prelude/my-prelude.cabal107
-rw-r--r--users/Profpatsch/my-prelude/src/Aeson.hs176
-rw-r--r--users/Profpatsch/my-prelude/src/AtLeast.hs51
-rw-r--r--users/Profpatsch/my-prelude/src/MyPrelude.hs587
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/Decoder.hs94
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs583
-rw-r--r--users/Profpatsch/my-prelude/src/Seconds.hs55
-rw-r--r--users/Profpatsch/my-prelude/src/Test.hs115
-rw-r--r--users/Profpatsch/my-prelude/src/Tool.hs75
-rw-r--r--users/Profpatsch/my-prelude/src/ValidationParseT.hs16
12 files changed, 1949 insertions, 0 deletions
diff --git a/users/Profpatsch/my-prelude/README.md b/users/Profpatsch/my-prelude/README.md
new file mode 100644
index 000000000000..2cc068579a52
--- /dev/null
+++ b/users/Profpatsch/my-prelude/README.md
@@ -0,0 +1,42 @@
+# My Haskell Prelude
+
+Contains various modules I’ve found useful when writing Haskell.
+
+## Contents
+
+A short overview:
+
+### `MyPrelude.hs`
+
+A collection of re-exports and extra functions. This does *not* replace the `Prelude` module from `base`, but rather should be imported *in addition* to `Prelude`.
+
+Stuff like bad functions from prelude (partial stuff, or plain horrible stuff) are handled by a custom `.hlint` file, which you can find in [../.hlint.yaml]().
+
+The common style of haskell they try to enable is what I call “left-to-right Haskell”,
+where one mostly prefers forward-chaining operators like `&`/`<&>`/`>>=` to backwards operators like `$`/`<$>`/`<=<`. In addition, all transformation function should follow the scheme of `aToB` instead of `B.fromA`, e.g. `Text.unpack`/`Text.pack` -> `textToString`/`stringToText`. Includes a bunch of text conversion functions one needs all the time, in the same style.
+
+These have been battle-tested in a production codebase of ~30k lines of Haskell.
+
+### `Label.hs`
+
+A very useful collection of anonymous labbeled tuples and enums of size 2 and 3. Assumes GHC >9.2 for `RecordDotSyntax` support.
+
+### `Pretty.hs`
+
+Colorful multiline pretty-printing of Haskell values.
+
+### `Test.hs`
+
+A wrapper around `hspec` which produces colorful test diffs.
+
+### `Aeson.hs`
+
+Helpers around Json parsing.
+
+### `Data.Error.Tree`
+
+Collect errors (from [`Data.Error`](https://hackage.haskell.org/package/error-1.0.0.0/docs/Data-Error.html)) into a tree, then display them in a nested fashion. Super useful for e.g. collecting and displaying nested parsing errors.
+
+### `RunCommand.hs`
+
+A module wrapping the process API with some helpful defaults for executing commands and printing what is executed to stderr.
diff --git a/users/Profpatsch/my-prelude/default.nix b/users/Profpatsch/my-prelude/default.nix
new file mode 100644
index 000000000000..5ed68026db50
--- /dev/null
+++ b/users/Profpatsch/my-prelude/default.nix
@@ -0,0 +1,48 @@
+{ depot, pkgs, lib, ... }:
+
+pkgs.haskellPackages.mkDerivation {
+  pname = "my-prelude";
+  version = "0.0.1-unreleased";
+
+  src = depot.users.Profpatsch.exactSource ./. [
+    ./my-prelude.cabal
+    ./src/Aeson.hs
+    ./src/AtLeast.hs
+    ./src/MyPrelude.hs
+    ./src/Test.hs
+    ./src/Seconds.hs
+    ./src/Tool.hs
+    ./src/ValidationParseT.hs
+    ./src/Postgres/Decoder.hs
+    ./src/Postgres/MonadPostgres.hs
+  ];
+
+  isLibrary = true;
+
+  libraryHaskellDepends = [
+    pkgs.haskellPackages.pa-prelude
+    pkgs.haskellPackages.pa-label
+    pkgs.haskellPackages.pa-error-tree
+    pkgs.haskellPackages.pa-json
+    pkgs.haskellPackages.pa-pretty
+    pkgs.haskellPackages.pa-field-parser
+    pkgs.haskellPackages.aeson-better-errors
+    pkgs.haskellPackages.resource-pool
+    pkgs.haskellPackages.error
+    pkgs.haskellPackages.hs-opentelemetry-api
+    pkgs.haskellPackages.hspec
+    pkgs.haskellPackages.hspec-expectations-pretty-diff
+    pkgs.haskellPackages.monad-logger
+    pkgs.haskellPackages.postgresql-simple
+    pkgs.haskellPackages.profunctors
+    pkgs.haskellPackages.PyF
+    pkgs.haskellPackages.semigroupoids
+    pkgs.haskellPackages.these
+    pkgs.haskellPackages.unliftio
+    pkgs.haskellPackages.validation-selective
+    pkgs.haskellPackages.vector
+  ];
+
+  license = lib.licenses.mit;
+
+}
diff --git a/users/Profpatsch/my-prelude/my-prelude.cabal b/users/Profpatsch/my-prelude/my-prelude.cabal
new file mode 100644
index 000000000000..c811c00e0adf
--- /dev/null
+++ b/users/Profpatsch/my-prelude/my-prelude.cabal
@@ -0,0 +1,107 @@
+cabal-version:      3.0
+name:               my-prelude
+version:            0.0.1.0
+author:             Profpatsch
+maintainer:         mail@profpatsch.de
+
+common common-options
+  ghc-options:
+      -Wall
+      -Wno-type-defaults
+      -Wunused-packages
+      -Wredundant-constraints
+      -fwarn-missing-deriving-strategies
+
+  -- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
+  -- for a description of all these extensions
+  default-extensions:
+      -- Infer Applicative instead of Monad where possible
+    ApplicativeDo
+
+    -- Allow literal strings to be Text
+    OverloadedStrings
+
+    -- Syntactic sugar improvements
+    LambdaCase
+    MultiWayIf
+
+    -- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
+    NoStarIsType
+
+    -- Convenient and crucial to deal with ambiguous field names, commonly
+    -- known as RecordDotSyntax
+    OverloadedRecordDot
+
+    -- does not export record fields as functions, use OverloadedRecordDot to access instead
+    NoFieldSelectors
+
+    -- Record punning
+    RecordWildCards
+
+    -- Improved Deriving
+    DerivingStrategies
+    DerivingVia
+
+    -- Type-level strings
+    DataKinds
+
+    -- to enable the `type` keyword in import lists (ormolu uses this automatically)
+    ExplicitNamespaces
+
+  default-language: GHC2021
+
+
+library
+    import: common-options
+    hs-source-dirs: src
+    exposed-modules:
+      MyPrelude
+      Aeson
+      AtLeast
+      Test
+      Postgres.Decoder
+      Postgres.MonadPostgres
+      ValidationParseT
+      Seconds
+      Tool
+
+    -- Modules included in this executable, other than Main.
+    -- other-modules:
+
+    -- LANGUAGE extensions used by modules in this package.
+    -- other-extensions:
+    build-depends:
+       base >=4.15 && <5
+     , pa-prelude
+     , pa-label
+     , pa-error-tree
+     , pa-json
+     , pa-pretty
+     , pa-field-parser
+     , aeson
+     , aeson-better-errors
+     , bytestring
+     , containers
+     , resource-pool
+     , resourcet
+     , scientific
+     , time
+     , error
+     , exceptions
+     , filepath
+     , hspec
+     , hspec-expectations-pretty-diff
+     , hs-opentelemetry-api
+     , monad-logger
+     , mtl
+     , postgresql-simple
+     , profunctors
+     , PyF
+     , semigroupoids
+     , selective
+     , text
+     , these
+     , unix
+     , unliftio
+     , validation-selective
+     , vector
diff --git a/users/Profpatsch/my-prelude/src/Aeson.hs b/users/Profpatsch/my-prelude/src/Aeson.hs
new file mode 100644
index 000000000000..73d611608224
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Aeson.hs
@@ -0,0 +1,176 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GHC2021 #-}
+{-# LANGUAGE KindSignatures #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE UndecidableInstances #-}
+
+module Aeson where
+
+import Data.Aeson (Value (..))
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree
+import Data.Maybe (catMaybes)
+import Data.Vector qualified as Vector
+import Label
+import PossehlAnalyticsPrelude
+import Test.Hspec (describe, it, shouldBe)
+import Test.Hspec qualified as Hspec
+
+-- | Convert a 'Json.ParseError' to a corresponding 'ErrorTree'
+parseErrorTree :: Error -> Json.ParseError Error -> ErrorTree
+parseErrorTree contextMsg errs =
+  errs
+    & Json.displayError prettyError
+    <&> newError
+    & nonEmpty
+    & \case
+      Nothing -> singleError contextMsg
+      Just errs' -> errorTree contextMsg errs'
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabel @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel = do
+  keyLabel' (Proxy @label)
+
+-- | Parse a key from the object, à la 'Json.key', return a labelled value.
+-- Version of 'keyLabel' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabel' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" Text)
+-- @@
+keyLabel' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label a)
+keyLabel' Proxy key parser = label @label <$> Json.key key parser
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+--
+-- We don’t provide a version that infers the json object key,
+-- since that conflates internal naming with the external API, which is dangerous.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay @"myLabel" "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay ::
+  forall label err m a.
+  Monad m =>
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay = do
+  keyLabelMay' (Proxy @label)
+
+-- | Parse an optional key from the object, à la 'Json.keyMay', return a labelled value.
+-- Version of 'keyLabelMay' that requires a proxy.
+--
+-- @@
+-- do
+--   txt <- keyLabelMay' (Proxy @"myLabel") "jsonKeyName" Json.asText
+--   pure (txt :: Label "myLabel" (Maybe Text))
+-- @@
+keyLabelMay' ::
+  forall label err m a.
+  Monad m =>
+  Proxy label ->
+  Text ->
+  Json.ParseT err m a ->
+  Json.ParseT err m (Label label (Maybe a))
+keyLabelMay' Proxy key parser = label @label <$> Json.keyMay key parser
+
+-- | Like 'Json.key', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamed :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m a
+keyRenamed (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.key newKey inner
+    Just parse -> parse
+
+-- | Like 'Json.keyMay', but allows a list of keys that are tried in order.
+--
+-- This is intended for renaming keys in an object.
+-- The first key is the most up-to-date version of a key, the others are for backward-compatibility.
+--
+-- If a key (new or old) exists, the inner parser will always be executed for that key.
+keyRenamedMay :: Monad m => NonEmpty Text -> Json.ParseT err m a -> Json.ParseT err m (Maybe a)
+keyRenamedMay (newKey :| oldKeys) inner =
+  keyRenamedTryOldKeys oldKeys inner >>= \case
+    Nothing -> Json.keyMay newKey inner
+    Just parse -> Just <$> parse
+
+-- | Helper function for 'keyRenamed' and 'keyRenamedMay' that returns the parser for the first old key that exists, if any.
+keyRenamedTryOldKeys :: Monad m => [Text] -> Json.ParseT err m a -> Json.ParseT err m (Maybe (Json.ParseT err m a))
+keyRenamedTryOldKeys oldKeys inner = do
+  oldKeys & traverse tryOld <&> catMaybes <&> nonEmpty <&> \case
+    Nothing -> Nothing
+    Just (old :| _moreOld) -> Just old
+  where
+    tryOld key =
+      Json.keyMay key (pure ()) <&> \case
+        Just () -> Just $ Json.key key inner
+        Nothing -> Nothing
+
+test_keyRenamed :: Hspec.Spec
+test_keyRenamed = do
+  describe "keyRenamed" $ do
+    let parser = keyRenamed ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right "text")
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right "text")
+    it "fails with the old key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "old" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "old"] (Json.WrongType Json.TyString Null)))
+    it "fails with the new key in the error if the inner parser is wrong" $ do
+      p (Object (KeyMap.singleton "new" Null))
+        `shouldBe` (Left (Json.BadSchema [Json.ObjectKey "new"] (Json.WrongType Json.TyString Null)))
+    it "fails if the key is missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Left (Json.BadSchema [] (Json.KeyMissing "new")))
+  describe "keyRenamedMay" $ do
+    let parser = keyRenamedMay ("new" :| ["old"]) Json.asText
+    let p = Json.parseValue @() parser
+    it "accepts the new key and the old key" $ do
+      p (Object (KeyMap.singleton "new" (String "text")))
+        `shouldBe` (Right (Just "text"))
+      p (Object (KeyMap.singleton "old" (String "text")))
+        `shouldBe` (Right (Just "text"))
+    it "allows the old and new key to be missing" $ do
+      p (Object KeyMap.empty)
+        `shouldBe` (Right Nothing)
+
+-- | Create a json array from a list of json values.
+jsonArray :: [Value] -> Value
+jsonArray xs = xs & Vector.fromList & Array
diff --git a/users/Profpatsch/my-prelude/src/AtLeast.hs b/users/Profpatsch/my-prelude/src/AtLeast.hs
new file mode 100644
index 000000000000..3857c3a7cfe7
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/AtLeast.hs
@@ -0,0 +1,51 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module AtLeast where
+
+import Data.Aeson (FromJSON (parseJSON))
+import Data.Aeson.BetterErrors qualified as Json
+import FieldParser (FieldParser)
+import FieldParser qualified as Field
+import GHC.Records (HasField (..))
+import GHC.TypeLits (KnownNat, natVal)
+import PossehlAnalyticsPrelude
+  ( Natural,
+    Proxy (Proxy),
+    fmt,
+    prettyError,
+    (&),
+  )
+
+-- | A natural number that must be at least as big as the type literal.
+newtype AtLeast (min :: Natural) num = AtLeast num
+  -- Just use the instances of the wrapped number type
+  deriving newtype (Eq, Show)
+
+-- | This is the “destructor” for `AtLeast`, because of the phantom type (@min@) it cannot be inferred automatically.
+instance HasField "unAtLeast" (AtLeast min num) num where
+  getField (AtLeast num) = num
+
+parseAtLeast ::
+  forall min num.
+  (KnownNat min, Integral num, Show num) =>
+  FieldParser num (AtLeast min num)
+parseAtLeast =
+  let minInt = natVal (Proxy @min)
+   in Field.FieldParser $ \from ->
+        if from >= (minInt & fromIntegral)
+          then Right (AtLeast from)
+          else Left [fmt|Must be at least {minInt & show} but was {from & show}|]
+
+instance
+  (KnownNat min, FromJSON num, Integral num, Bounded num, Show num) =>
+  FromJSON (AtLeast min num)
+  where
+  parseJSON =
+    Json.toAesonParser
+      prettyError
+      ( do
+          num <- Json.fromAesonParser @_ @num
+          case Field.runFieldParser (parseAtLeast @min @num) num of
+            Left err -> Json.throwCustomError err
+            Right a -> pure a
+      )
diff --git a/users/Profpatsch/my-prelude/src/MyPrelude.hs b/users/Profpatsch/my-prelude/src/MyPrelude.hs
new file mode 100644
index 000000000000..1be248d091a9
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/MyPrelude.hs
@@ -0,0 +1,587 @@
+{-# LANGUAGE ImplicitParams #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MagicHash #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE PolyKinds #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# OPTIONS_GHC -fexpose-all-unfoldings #-}
+
+module MyPrelude
+  ( -- * Text conversions
+    Text,
+    ByteString,
+    Word8,
+    fmt,
+    textToString,
+    stringToText,
+    showToText,
+    textToBytesUtf8,
+    textToBytesUtf8Lazy,
+    bytesToTextUtf8,
+    bytesToTextUtf8Lazy,
+    bytesToTextUtf8Lenient,
+    bytesToTextUtf8LenientLazy,
+    bytesToTextUtf8Unsafe,
+    bytesToTextUtf8UnsafeLazy,
+    toStrict,
+    toLazy,
+    toStrictBytes,
+    toLazyBytes,
+    charToWordUnsafe,
+
+    -- * IO
+    putStrLn,
+    putStderrLn,
+    exitWithMessage,
+
+    -- * WIP code
+    todo,
+
+    -- * Records
+    HasField,
+
+    -- * Control flow
+    (&),
+    (<&>),
+    (<|>),
+    foldMap1,
+    foldMap',
+    join,
+    when,
+    unless,
+    guard,
+    ExceptT (..),
+    runExceptT,
+    MonadThrow,
+    throwM,
+    MonadIO,
+    liftIO,
+    MonadReader,
+    asks,
+    Bifunctor,
+    first,
+    second,
+    bimap,
+    both,
+    foldMap,
+    fold,
+    foldl',
+    fromMaybe,
+    mapMaybe,
+    findMaybe,
+    Traversable,
+    for,
+    for_,
+    traverse,
+    traverse_,
+    traverseFold,
+    traverseFold1,
+    traverseFoldDefault,
+    MonadTrans,
+    lift,
+
+    -- * Data types
+    Coercible,
+    coerce,
+    Proxy (Proxy),
+    Map,
+    annotate,
+    Validation (Success, Failure),
+    failure,
+    successes,
+    failures,
+    eitherToValidation,
+    eitherToListValidation,
+    validationToEither,
+    These (This, That, These),
+    eitherToThese,
+    eitherToListThese,
+    validationToThese,
+    thenThese,
+    thenValidate,
+    NonEmpty ((:|)),
+    singleton,
+    nonEmpty,
+    nonEmptyDef,
+    toList,
+    toNonEmptyDefault,
+    maximum1,
+    minimum1,
+    Generic,
+    Semigroup,
+    sconcat,
+    Monoid,
+    mconcat,
+    ifTrue,
+    ifExists,
+    Void,
+    absurd,
+    Identity (Identity, runIdentity),
+    Natural,
+    intToNatural,
+    Contravariant,
+    contramap,
+    (>$<),
+    (>&<),
+    Profunctor,
+    dimap,
+    lmap,
+    rmap,
+    Semigroupoid,
+    Category,
+    (>>>),
+    (&>>),
+
+    -- * Enum definition
+    inverseFunction,
+    inverseMap,
+
+    -- * Error handling
+    HasCallStack,
+    module Data.Error,
+  )
+where
+
+import Control.Applicative ((<|>))
+import Control.Category (Category, (>>>))
+import Control.Monad (guard, join, unless, when)
+import Control.Monad.Catch (MonadThrow (throwM))
+import Control.Monad.Except
+  ( ExceptT (..),
+    runExceptT,
+  )
+import Control.Monad.IO.Class (MonadIO, liftIO)
+import Control.Monad.Identity (Identity (Identity))
+import Control.Monad.Reader (MonadReader, asks)
+import Control.Monad.Trans (MonadTrans (lift))
+import Data.Bifunctor (Bifunctor, bimap, first, second)
+import Data.ByteString
+  ( ByteString,
+  )
+import Data.ByteString.Lazy qualified
+import Data.Char qualified
+import Data.Coerce (Coercible, coerce)
+import Data.Data (Proxy (Proxy))
+import Data.Error
+import Data.Foldable (Foldable (foldMap', toList), fold, foldl', for_, traverse_)
+import Data.Foldable qualified as Foldable
+import Data.Function ((&))
+import Data.Functor ((<&>))
+import Data.Functor.Contravariant (Contravariant (contramap), (>$<))
+import Data.Functor.Identity (Identity (runIdentity))
+import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty)
+import Data.Map.Strict
+  ( Map,
+  )
+import Data.Map.Strict qualified as Map
+import Data.Maybe (fromMaybe, mapMaybe)
+import Data.Maybe qualified as Maybe
+import Data.Profunctor (Profunctor, dimap, lmap, rmap)
+import Data.Semigroup (Max (Max, getMax), Min (Min, getMin), sconcat)
+import Data.Semigroup.Foldable (Foldable1 (fold1), foldMap1)
+import Data.Semigroup.Traversable (Traversable1)
+import Data.Semigroupoid (Semigroupoid (o))
+import Data.Text
+  ( Text,
+  )
+import Data.Text qualified
+import Data.Text.Encoding qualified
+import Data.Text.Encoding.Error qualified
+import Data.Text.Lazy qualified
+import Data.Text.Lazy.Encoding qualified
+import Data.These (These (That, These, This))
+import Data.Traversable (for)
+import Data.Void (Void, absurd)
+import Data.Word (Word8)
+import GHC.Exception (errorCallWithCallStackException)
+import GHC.Exts (RuntimeRep, TYPE, raise#)
+import GHC.Generics (Generic)
+import GHC.Natural (Natural)
+import GHC.Records (HasField)
+import GHC.Stack (HasCallStack)
+import PyF (fmt)
+import System.Exit qualified
+import System.IO qualified
+import Validation
+  ( Validation (Failure, Success),
+    eitherToValidation,
+    failure,
+    failures,
+    successes,
+    validationToEither,
+  )
+
+-- | Forward-applying 'contramap', like '&'/'$' and '<&>'/'<$>' but for '>$<'.
+(>&<) :: Contravariant f => f b -> (a -> b) -> f a
+(>&<) = flip contramap
+
+infixl 5 >&<
+
+-- | Forward semigroupoid application. The same as '(>>>)', but 'Semigroupoid' is not a superclass of 'Category' (yet).
+--
+-- Specialized examples:
+--
+-- @@
+-- for functions : (a -> b) -> (b -> c) -> (a -> c)
+-- for Folds: Fold a b -> Fold b c -> Fold a c
+-- @@
+(&>>) :: Semigroupoid s => s a b -> s b c -> s a c
+(&>>) = flip Data.Semigroupoid.o
+
+-- like >>>
+infixr 1 &>>
+
+-- | encode a Text to a UTF-8 encoded Bytestring
+textToBytesUtf8 :: Text -> ByteString
+textToBytesUtf8 = Data.Text.Encoding.encodeUtf8
+
+-- | encode a lazy Text to a UTF-8 encoded lazy Bytestring
+textToBytesUtf8Lazy :: Data.Text.Lazy.Text -> Data.ByteString.Lazy.ByteString
+textToBytesUtf8Lazy = Data.Text.Lazy.Encoding.encodeUtf8
+
+bytesToTextUtf8 :: ByteString -> Either Error Text
+bytesToTextUtf8 = first exceptionToError . Data.Text.Encoding.decodeUtf8'
+
+bytesToTextUtf8Lazy :: Data.ByteString.Lazy.ByteString -> Either Error Data.Text.Lazy.Text
+bytesToTextUtf8Lazy = first exceptionToError . Data.Text.Lazy.Encoding.decodeUtf8'
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8Unsafe :: ByteString -> Text
+bytesToTextUtf8Unsafe = Data.Text.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8 (crash if that is not the case)
+bytesToTextUtf8UnsafeLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8UnsafeLazy = Data.Text.Lazy.Encoding.decodeUtf8
+
+-- | decode a Text from a ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8Lenient :: Data.ByteString.ByteString -> Data.Text.Text
+bytesToTextUtf8Lenient =
+  Data.Text.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | decode a lazy Text from a lazy ByteString that is assumed to be UTF-8,
+-- replace non-UTF-8 characters with the replacment char U+FFFD.
+bytesToTextUtf8LenientLazy :: Data.ByteString.Lazy.ByteString -> Data.Text.Lazy.Text
+bytesToTextUtf8LenientLazy =
+  Data.Text.Lazy.Encoding.decodeUtf8With Data.Text.Encoding.Error.lenientDecode
+
+-- | Make a lazy text strict
+toStrict :: Data.Text.Lazy.Text -> Text
+toStrict = Data.Text.Lazy.toStrict
+
+-- | Make a strict text lazy
+toLazy :: Text -> Data.Text.Lazy.Text
+toLazy = Data.Text.Lazy.fromStrict
+
+toStrictBytes :: Data.ByteString.Lazy.ByteString -> ByteString
+toStrictBytes = Data.ByteString.Lazy.toStrict
+
+toLazyBytes :: ByteString -> Data.ByteString.Lazy.ByteString
+toLazyBytes = Data.ByteString.Lazy.fromStrict
+
+textToString :: Text -> String
+textToString = Data.Text.unpack
+
+stringToText :: String -> Text
+stringToText = Data.Text.pack
+
+showToText :: (Show a) => a -> Text
+showToText = stringToText . show
+
+-- | Unsafe conversion between 'Char' and 'Word8'. This is a no-op and
+-- silently truncates to 8 bits Chars > '\255'. It is provided as
+-- convenience for ByteString construction.
+--
+-- Use if you want to get the 'Word8' representation of a character literal.
+-- Don’t use on arbitrary characters!
+--
+-- >>> charToWordUnsafe ','
+-- 44
+charToWordUnsafe :: Char -> Word8
+charToWordUnsafe = fromIntegral . Data.Char.ord
+{-# INLINE charToWordUnsafe #-}
+
+-- | Single element in a (non-empty) list.
+singleton :: a -> NonEmpty a
+singleton a = a :| []
+
+-- | If the given list is empty, use the given default element and return a non-empty list.
+nonEmptyDef :: a -> [a] -> NonEmpty a
+nonEmptyDef def xs =
+  xs & nonEmpty & \case
+    Nothing -> def :| []
+    Just ne -> ne
+
+-- | Construct a non-empty list, given a default value if the ist list was empty.
+toNonEmptyDefault :: a -> [a] -> NonEmpty a
+toNonEmptyDefault def xs = case xs of
+  [] -> def :| []
+  (x : xs') -> x :| xs'
+
+-- | @O(n)@. Get the maximum element from a non-empty structure.
+maximum1 :: (Foldable1 f, Ord a) => f a -> a
+maximum1 xs = xs & foldMap1 Max & getMax
+
+-- | @O(n)@. Get the minimum element from a non-empty structure.
+minimum1 :: (Foldable1 f, Ord a) => f a -> a
+minimum1 xs = xs & foldMap1 Min & getMin
+
+-- | Annotate a 'Maybe' with an error message and turn it into an 'Either'.
+annotate :: err -> Maybe a -> Either err a
+annotate err = \case
+  Nothing -> Left err
+  Just a -> Right a
+
+-- | Map the same function over both sides of a Bifunctor (e.g. a tuple).
+both :: Bifunctor bi => (a -> b) -> bi a a -> bi b b
+both f = bimap f f
+
+-- | Find the first element for which pred returns `Just a`, and return the `a`.
+--
+-- Example:
+-- @
+-- >>> :set -XTypeApplications
+-- >>> import qualified Text.Read
+--
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo"]
+-- Nothing
+-- >>> findMaybe (Text.Read.readMaybe @Int) ["foo", "34.40", "34", "abc"]
+-- Just 34
+findMaybe :: Foldable t => (a -> Maybe b) -> t a -> Maybe b
+findMaybe mPred list =
+  let pred' x = Maybe.isJust $ mPred x
+   in case Foldable.find pred' list of
+        Just a -> mPred a
+        Nothing -> Nothing
+
+-- | Like 'eitherToValidation', but puts the Error side into a NonEmpty list
+-- to make it combine with other validations.
+eitherToListValidation :: Either a c -> Validation (NonEmpty a) c
+eitherToListValidation = first singleton . eitherToValidation
+
+-- | Convert an 'Either' to a 'These'.
+eitherToThese :: Either err a -> These err a
+eitherToThese (Left err) = This err
+eitherToThese (Right a) = That a
+
+-- | Like 'eitherToThese', but puts the Error side into a NonEmpty list
+-- to make it combine with other theses.
+eitherToListThese :: Either err a -> These (NonEmpty err) a
+eitherToListThese (Left e) = This (singleton e)
+eitherToListThese (Right a) = That a
+
+-- | Convert a 'Validation' to a 'These'.
+validationToThese :: Validation err a -> These err a
+validationToThese (Failure err) = This err
+validationToThese (Success a) = That a
+
+-- | Nested '>>=' of a These inside some other @m@.
+--
+-- Use if you want to collect errors and successes, and want to chain multiple function returning 'These'.
+thenThese ::
+  (Monad m, Semigroup err) =>
+  (a -> m (These err b)) ->
+  m (These err a) ->
+  m (These err b)
+thenThese f x = do
+  th <- x
+  join <$> traverse f th
+
+-- | Nested validating bind-like combinator inside some other @m@.
+--
+-- Use if you want to collect errors, and want to chain multiple functions returning 'Validation'.
+thenValidate ::
+  (Monad m) =>
+  (a -> m (Validation err b)) ->
+  m (Validation err a) ->
+  m (Validation err b)
+thenValidate f x =
+  eitherToValidation <$> do
+    x' <- validationToEither <$> x
+    case x' of
+      Left err -> pure $ Left err
+      Right a -> validationToEither <$> f a
+
+-- | Put the text to @stderr@.
+putStderrLn :: Text -> IO ()
+putStderrLn msg =
+  System.IO.hPutStrLn System.IO.stderr $ textToString msg
+
+exitWithMessage :: Text -> IO a
+exitWithMessage msg = do
+  putStderrLn msg
+  System.Exit.exitWith $ System.Exit.ExitFailure (-1)
+
+-- | Run some function producing applicative over a traversable data structure,
+-- then collect the results in a Monoid.
+--
+-- Very helpful with side-effecting functions returning @(Validation err a)@:
+--
+-- @
+-- let
+--   f :: Text -> IO (Validation (NonEmpty Error) Text)
+--   f t = pure $ if t == "foo" then Success t else Failure (singleton ("not foo: " <> t))
+--
+-- in traverseFold f [ "foo", "bar", "baz" ]
+--   == Failure ("not foo bar" :| ["not foo baz"])
+-- @
+--
+-- … since @(Semigroup err => Validation err a)@ is a @Semigroup@/@Monoid@ itself.
+traverseFold :: (Applicative ap, Traversable t, Monoid m) => (a -> ap m) -> t a -> ap m
+traverseFold f xs =
+  -- note: could be weakened to (Foldable t) via `getAp . foldMap (Ap . f)`
+  fold <$> traverse f xs
+{-# INLINE traverseFold #-}
+
+-- | Like 'traverseFold', but fold over a semigroup instead of a Monoid, by providing a starting element.
+traverseFoldDefault :: (Applicative ap, Traversable t, Semigroup m) => m -> (a -> ap m) -> t a -> ap m
+traverseFoldDefault def f xs = foldDef def <$> traverse f xs
+  where
+    foldDef = foldr (<>)
+{-# INLINE traverseFoldDefault #-}
+
+-- | Same as 'traverseFold', but with a 'Semigroup' and 'Traversable1' restriction.
+traverseFold1 :: (Applicative ap, Traversable1 t, Semigroup s) => (a -> ap s) -> t a -> ap s
+-- note: cannot be weakened to (Foldable1 t) because there is no `Ap` for Semigroup (No `Apply` typeclass)
+traverseFold1 f xs = fold1 <$> traverse f xs
+{-# INLINE traverseFold1 #-}
+
+-- | Use this in places where the code is still to be implemented.
+--
+-- It always type-checks and will show a warning at compile time if it was forgotten in the code.
+--
+-- Use instead of 'error' and 'undefined' for code that hasn’t been written.
+--
+-- Uses the same trick as https://hackage.haskell.org/package/protolude-0.3.0/docs/src/Protolude.Error.html#error
+{-# WARNING todo "'todo' (undefined code) remains in code" #-}
+todo :: forall (r :: RuntimeRep). forall (a :: TYPE r). HasCallStack => a
+todo = raise# (errorCallWithCallStackException "This code was not yet implemented: TODO" ?callStack)
+
+-- | Convert an integer to a 'Natural' if possible
+--
+-- Named the same as the function from "GHC.Natural", but does not crash.
+intToNatural :: Integral a => a -> Maybe Natural
+intToNatural i =
+  if i < 0
+    then Nothing
+    else Just $ fromIntegral i
+
+-- | @inverseFunction f@ creates a function that is the inverse of a given function
+-- @f@. It does so by constructing 'M.Map' internally for each value @f a@. The
+-- implementation makes sure that the 'M.Map' is constructed only once and then
+-- shared for every call.
+--
+-- __Memory usage note:__ don't inverse functions that have types like 'Int'
+-- as their result. In this case the created 'M.Map' will have huge size.
+--
+-- The complexity of reversed mapping is \(\mathcal{O}(\log n)\).
+--
+-- __Performance note:__ make sure to specialize monomorphic type of your functions
+-- that use 'inverseFunction' to avoid 'M.Map' reconstruction.
+--
+-- One of the common 'inverseFunction' use-case is inverting the 'show' or a 'show'-like
+-- function.
+--
+-- >>> data Color = Red | Green | Blue deriving (Show, Enum, Bounded)
+-- >>> parse = inverseFunction show :: String -> Maybe Color
+-- >>> parse "Red"
+-- Just Red
+-- >>> parse "Black"
+-- Nothing
+--
+-- __Correctness note:__ 'inverseFunction' expects /injective function/ as its argument,
+-- i.e. the function must map distinct arguments to distinct values.
+--
+-- Typical usage of this function looks like this:
+--
+-- @
+-- __data__ GhcVer
+--    = Ghc802
+--    | Ghc822
+--    | Ghc844
+--    | Ghc865
+--    | Ghc881
+--    __deriving__ ('Eq', 'Ord', 'Show', 'Enum', 'Bounded')
+--
+-- showGhcVer :: GhcVer -> 'Text'
+-- showGhcVer = \\__case__
+--    Ghc802 -> "8.0.2"
+--    Ghc822 -> "8.2.2"
+--    Ghc844 -> "8.4.4"
+--    Ghc865 -> "8.6.5"
+--    Ghc881 -> "8.8.1"
+--
+-- parseGhcVer :: 'Text' -> 'Maybe' GhcVer
+-- parseGhcVer = 'inverseFunction' showGhcVer
+--
+-- Taken from relude’s @Relude.Extra.Enum@.
+inverseFunction ::
+  forall a k.
+  (Bounded a, Enum a, Ord k) =>
+  (a -> k) ->
+  (k -> Maybe a)
+inverseFunction f k = Map.lookup k $ inverseMap f
+
+-- | Like `inverseFunction`, but instead of returning the function
+-- it returns a mapping from all possible outputs to their possible inputs.
+--
+-- This has the same restrictions of 'inverseFunction'.
+inverseMap ::
+  forall a k.
+  (Bounded a, Enum a, Ord k) =>
+  (a -> k) ->
+  Map k a
+inverseMap f =
+  universe
+    <&> (\a -> (f a, a))
+    & Map.fromList
+  where
+    universe :: [a]
+    universe = [minBound .. maxBound]
+
+-- | If the predicate is true, return the @m@, else 'mempty'.
+--
+-- This can be used (together with `ifExists`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+--   ifTrue (1 == 1) [1],
+--   [2, 3, 4],
+--   ifTrue False [5],
+-- ]
+-- :}
+-- [1,2,3,4]
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifTrue (1 == 1) (Sum 2), Sum 3 ]
+
+-- Sum {getSum = 6}
+
+ifTrue :: Monoid m => Bool -> m -> m
+ifTrue pred' m = if pred' then m else mempty
+
+-- | If the given @Maybe@ is @Just@, return the @m@, else return mempty.
+
+-- This can be used (together with `ifTrue`) to e.g. create lists with optional elements:
+--
+-- >>> import Data.Monoid (Sum(..))
+--
+-- >>> :{ mconcat [
+--   ifExists (Just [1]),
+--   [2, 3, 4],
+--   ifExists Nothing,
+-- ]
+-- :}
+-- [1,2,3,4]
+--
+-- Or any other Monoid:
+--
+-- >>> mconcat [ Sum 1, ifExists (Just (Sum 2)), Sum 3 ]
+
+-- Sum {getSum = 6}
+
+ifExists :: Monoid m => Maybe m -> m
+ifExists = fold
diff --git a/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
new file mode 100644
index 000000000000..008b89b4ba3d
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/Decoder.hs
@@ -0,0 +1,94 @@
+module Postgres.Decoder where
+
+import Control.Applicative (Alternative)
+import Data.Aeson qualified as Json
+import Data.Aeson.BetterErrors qualified as Json
+import Data.Error.Tree
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Binary (fromBinary))
+import Database.PostgreSQL.Simple.FromField qualified as PG
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Json qualified
+import Label
+import PossehlAnalyticsPrelude
+
+-- | A Decoder of postgres values. Allows embedding more complex parsers (like a 'Json.ParseT').
+newtype Decoder a = Decoder (PG.RowParser a)
+  deriving newtype (Functor, Applicative, Alternative, Monad)
+
+-- | Parse a `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+bytea :: Decoder ByteString
+bytea = fromField @(Binary ByteString) <&> (.fromBinary)
+
+-- | Parse a nullable `bytea` field, equivalent to @Binary ByteString@ but avoids the pitfall of having to use 'Binary'.
+byteaMay :: Decoder (Maybe ByteString)
+byteaMay = fromField @(Maybe (Binary ByteString)) <&> fmap (.fromBinary)
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @Text :: Decoder Text
+-- @
+fromField :: PG.FromField a => Decoder a
+fromField = Decoder $ PG.fieldWith PG.fromField
+
+-- | Turn any type that implements 'PG.fromField' into a 'Decoder' and wrap the result into the given 'Label'. Use type applications to prevent accidental conversions:
+--
+-- @
+-- fromField @"myField" @Text :: Decoder (Label "myField" Text)
+-- @
+fromFieldLabel :: forall lbl a. PG.FromField a => Decoder (Label lbl a)
+fromFieldLabel = label @lbl <$> fromField
+
+-- | Parse fields out of a json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+json :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder a
+json parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @Json.Value field bytes
+  case Json.parseValue parser val of
+    Left err ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Right a -> pure a
+
+-- | Parse fields out of a nullable json value returned from the database.
+--
+-- ATTN: The whole json record has to be transferred before it is parsed,
+-- so if you only need a tiny bit of it, use `->` and `->>` in your SQL statement
+-- and return only the fields you need from the query.
+--
+-- In that case pay attention to NULL though:
+--
+-- @
+-- SELECT '{"foo": {}}'::jsonb->>'foo' IS NULL
+-- → TRUE
+-- @
+--
+-- Also note: `->>` will coerce the json value to @text@, regardless of the content.
+-- So the JSON object @{"foo": {}}"@ would be returned as the text: @"{\"foo\": {}}"@.
+jsonMay :: Typeable a => Json.ParseT ErrorTree Identity a -> Decoder (Maybe a)
+jsonMay parser = Decoder $ PG.fieldWith $ \field bytes -> do
+  val <- PG.fromField @(Maybe Json.Value) field bytes
+  case Json.parseValue parser <$> val of
+    Nothing -> pure Nothing
+    Just (Left err) ->
+      PG.returnError
+        PG.ConversionFailed
+        field
+        (err & Json.parseErrorTree "Cannot decode jsonb column" & prettyErrorTree & textToString)
+    Just (Right a) -> pure (Just a)
diff --git a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
new file mode 100644
index 000000000000..45c94b2009ca
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
@@ -0,0 +1,583 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+module Postgres.MonadPostgres where
+
+import AtLeast (AtLeast)
+import Control.Exception
+import Control.Monad.Except
+import Control.Monad.Logger (MonadLogger, logDebug, logWarn)
+import Control.Monad.Reader (MonadReader (ask), ReaderT (..))
+import Control.Monad.Trans.Resource
+import Data.Aeson (FromJSON)
+import Data.Error.Tree
+import Data.Int (Int64)
+import Data.Kind (Type)
+import Data.List qualified as List
+import Data.Pool (Pool)
+import Data.Pool qualified as Pool
+import Data.Text qualified as Text
+import Data.Typeable (Typeable)
+import Database.PostgreSQL.Simple (Connection, FormatError, FromRow, Query, QueryError, ResultError, SqlError, ToRow)
+import Database.PostgreSQL.Simple qualified as PG
+import Database.PostgreSQL.Simple qualified as Postgres
+import Database.PostgreSQL.Simple.FromRow qualified as PG
+import Database.PostgreSQL.Simple.ToField (ToField)
+import Database.PostgreSQL.Simple.ToRow (ToRow (toRow))
+import Database.PostgreSQL.Simple.Types (Query (..))
+import GHC.Records (HasField (..))
+import Label
+import OpenTelemetry.Trace.Core qualified as Otel hiding (inSpan, inSpan')
+import OpenTelemetry.Trace.Monad qualified as Otel
+import PossehlAnalyticsPrelude
+import Postgres.Decoder
+import Postgres.Decoder qualified as Dec
+import Pretty (showPretty)
+import Seconds
+import System.Exit (ExitCode (..))
+import Tool
+import UnliftIO (MonadUnliftIO (withRunInIO))
+import UnliftIO.Process qualified as Process
+import UnliftIO.Resource qualified as Resource
+import Prelude hiding (span)
+
+-- | Postgres queries/commands that can be executed within a running transaction.
+--
+-- These are implemented with the @postgresql-simple@ primitives of the same name
+-- and will behave the same unless othewise documented.
+class (Monad m) => MonadPostgres (m :: Type -> Type) where
+  -- | Execute an INSERT, UPDATE, or other SQL query that is not expected to return results.
+
+  -- Returns the number of rows affected.
+  execute :: (ToRow params, Typeable params) => Query -> params -> Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute an INSERT, UPDATE, or other SQL query that is not expected to return results. Does not take parameters.
+
+  -- Returns the number of rows affected.
+  execute_ :: Query -> Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute a multi-row INSERT, UPDATE, or other SQL query that is not expected to return results.
+  --
+  -- Returns the number of rows affected. If the list of parameters is empty, this function will simply return 0 without issuing the query to the backend. If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeMany :: (ToRow params, Typeable params) => Query -> [params] -> Transaction m (Label "numberOfRowsAffected" Natural)
+
+  -- | Execute INSERT ... RETURNING, UPDATE ... RETURNING, or other SQL query that accepts multi-row input and is expected to return results. Note that it is possible to write query conn "INSERT ... RETURNING ..." ... in cases where you are only inserting a single row, and do not need functionality analogous to 'executeMany'.
+  --
+  -- If the list of parameters is empty, this function will simply return [] without issuing the query to the backend. If this is not desired, consider using the 'PG.Values' constructor instead.
+  executeManyReturningWith :: (ToRow q) => Query -> [q] -> Decoder r -> Transaction m [r]
+
+  -- | Run a query, passing parameters and result row parser.
+  queryWith :: (PG.ToRow params, Typeable params, Typeable r) => PG.Query -> params -> Decoder r -> Transaction m [r]
+
+  -- | Run a query without any parameters and result row parser.
+  queryWith_ :: (Typeable r) => PG.Query -> Decoder r -> Transaction m [r]
+
+  -- | Run a query, passing parameters, and fold over the resulting rows.
+  --
+  -- This doesn’t have to realize the full list of results in memory,
+  -- rather results are streamed incrementally from the database.
+  --
+  -- When dealing with small results, it may be simpler (and perhaps faster) to use query instead.
+  --
+  -- This fold is _not_ strict. The stream consumer is responsible for forcing the evaluation of its result to avoid space leaks.
+  --
+  -- If you can, prefer aggregating in the database itself.
+  foldRows ::
+    (FromRow row, ToRow params, Typeable row, Typeable params) =>
+    Query ->
+    params ->
+    a ->
+    (a -> row -> Transaction m a) ->
+    Transaction m a
+
+  -- | Run a given transaction in a transaction block, rolling back the transaction
+  -- if any exception (postgres or Haskell Exception) is thrown during execution.
+  --
+  -- Re-throws the exception.
+  --
+  -- Don’t do any long-running things on the Haskell side during a transaction,
+  -- because it will block a database connection and potentially also lock
+  -- database tables from being written or read by other clients.
+  --
+  -- Nonetheless, try to push transactions as far out to the handlers as possible,
+  -- don’t do something like @runTransaction $ query …@, because it will lead people
+  -- to accidentally start nested transactions (the inner transaction is run on a new connections,
+  -- thus can’t see any changes done by the outer transaction).
+  -- Only handlers should run transactions.
+  runTransaction :: Transaction m a -> m a
+
+-- | Run a query, passing parameters.
+query :: forall m params r. (PG.ToRow params, PG.FromRow r, Typeable params, Typeable r, MonadPostgres m) => PG.Query -> params -> Transaction m [r]
+query qry params = queryWith qry params (Decoder PG.fromRow)
+
+-- | Run a query without any parameters.
+query_ :: forall m r. (Typeable r, PG.FromRow r, MonadPostgres m) => PG.Query -> Transaction m [r]
+query_ qry = queryWith_ qry (Decoder PG.fromRow)
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRow ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m a
+querySingleRow qry params = do
+  query qry params >>= ensureSingleRow
+
+-- TODO: implement via fold, so that the result doesn’t have to be realized in memory
+querySingleRowMaybe ::
+  ( MonadPostgres m,
+    ToRow qParams,
+    Typeable qParams,
+    FromRow a,
+    Typeable a,
+    MonadThrow m
+  ) =>
+  Query ->
+  qParams ->
+  Transaction m (Maybe a)
+querySingleRowMaybe qry params = do
+  rows <- query qry params
+  case rows of
+    [] -> pure Nothing
+    [one] -> pure (Just one)
+    -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+    -- that a database function can error out, should probably handled by the instances.
+    more -> throwM $ SingleRowError {numberOfRowsReturned = (List.length more)}
+
+ensureSingleRow :: (MonadThrow m) => [a] -> m a
+ensureSingleRow = \case
+  -- TODO: Should we MonadThrow this here? It’s really an implementation detail of MonadPostgres
+  -- that a database function can error out, should probably handled by the instances.
+  [] -> throwM (SingleRowError {numberOfRowsReturned = 0})
+  [one] -> pure one
+  more ->
+    throwM $
+      SingleRowError
+        { numberOfRowsReturned =
+            -- TODO: this is VERY bad, because it requires to parse the full database output, even if there’s 10000000000 elements
+            List.length more
+        }
+
+newtype Transaction m a = Transaction {unTransaction :: (ReaderT Connection m a)}
+  deriving newtype
+    ( Functor,
+      Applicative,
+      Monad,
+      MonadThrow,
+      MonadLogger,
+      MonadIO,
+      MonadUnliftIO,
+      MonadTrans,
+      Otel.MonadTracer
+    )
+
+runTransaction' :: Connection -> Transaction m a -> m a
+runTransaction' conn transaction = runReaderT transaction.unTransaction conn
+
+-- | [Resource Pool](http://hackage.haskell.org/package/resource-pool-0.2.3.2/docs/Data-Pool.html) configuration.
+data PoolingInfo = PoolingInfo
+  { -- | Minimal amount of resources that are
+    --   always available.
+    numberOfStripes :: AtLeast 1 Int,
+    -- | Time after which extra resources
+    --   (above minimum) can stay in the pool
+    --   without being used.
+    unusedResourceOpenTime :: Seconds,
+    -- | Max number of resources that can be
+    --   in the Pool at any time
+    maxOpenResourcesPerStripe :: AtLeast 1 Int
+  }
+  deriving stock (Generic, Eq, Show)
+  deriving anyclass (FromJSON)
+
+initMonadPostgres ::
+  (Text -> IO ()) ->
+  -- | Info describing the connection to the Postgres DB
+  Postgres.ConnectInfo ->
+  -- | Configuration info for pooling attributes
+  PoolingInfo ->
+  -- | Created Postgres connection pool
+  ResourceT IO (Pool Postgres.Connection)
+initMonadPostgres logInfoFn connectInfo poolingInfo = do
+  (_releaseKey, connPool) <-
+    Resource.allocate
+      (logInfoFn "Creating Postgres Connection Pool" >> createPGConnPool)
+      (\pool -> logInfoFn "Destroying Postgres Connection Pool" >> destroyPGConnPool pool)
+  pure connPool
+  where
+    -- \| Create a Postgres connection pool
+    createPGConnPool ::
+      IO (Pool Postgres.Connection)
+    createPGConnPool =
+      Pool.createPool
+        poolCreateResource
+        poolfreeResource
+        poolingInfo.numberOfStripes.unAtLeast
+        (poolingInfo.unusedResourceOpenTime & secondsToNominalDiffTime)
+        (poolingInfo.maxOpenResourcesPerStripe.unAtLeast)
+      where
+        poolCreateResource = Postgres.connect connectInfo
+        poolfreeResource = Postgres.close
+
+    -- \| Destroy a Postgres connection pool
+    destroyPGConnPool ::
+      -- \| Pool to be destroyed
+      (Pool Postgres.Connection) ->
+      IO ()
+    destroyPGConnPool p = Pool.destroyAllResources p
+
+-- | Catch any Postgres exception that gets thrown,
+-- print the query that was run and the query parameters,
+-- then rethrow inside an 'Error'.
+handlePGException ::
+  forall a params tools m.
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool) =>
+  tools ->
+  Text ->
+  Query ->
+  -- | Depending on whether we used `format` or `formatMany`.
+  Either params [params] ->
+  IO a ->
+  Transaction m a
+handlePGException tools queryType query' params io = do
+  withRunInIO $ \unliftIO ->
+    io
+      `catches` [ Handler $ unliftIO . logQueryException @SqlError,
+                  Handler $ unliftIO . logQueryException @QueryError,
+                  Handler $ unliftIO . logQueryException @ResultError,
+                  Handler $ unliftIO . logFormatException
+                ]
+  where
+    -- TODO: use throwInternalError here (after pulling it into the MonadPostgres class)
+    throwAsError = unwrapIOError . Left . newError
+    throwErr err = liftIO $ throwAsError $ prettyErrorTree $ nestedMultiError "A Postgres query failed" err
+    logQueryException :: (Exception e) => e -> Transaction m a
+    logQueryException exc = do
+      formattedQuery <- case params of
+        Left one -> pgFormatQuery' tools query' one
+        Right many -> pgFormatQueryMany' tools query' many
+      throwErr
+        ( singleError [fmt|Query Type: {queryType}|]
+            :| [ nestedError "Exception" (exc & showPretty & newError & singleError),
+                 nestedError "Query" (formattedQuery & newError & singleError)
+               ]
+        )
+    logFormatException :: FormatError -> Transaction m a
+    logFormatException fe = throwErr (fe & showPretty & newError & singleError & singleton)
+
+-- | Perform a Postgres action within a transaction
+withPGTransaction ::
+  -- | Postgres connection pool to be used for the action
+  (Pool Postgres.Connection) ->
+  -- | DB-action to be performed
+  (Postgres.Connection -> IO a) ->
+  -- | Result of the DB-action
+  IO a
+withPGTransaction connPool f =
+  Pool.withResource
+    connPool
+    (\conn -> Postgres.withTransaction conn (f conn))
+
+runPGTransactionImpl :: (MonadUnliftIO m) => m (Pool Postgres.Connection) -> Transaction m a -> m a
+{-# INLINE runPGTransactionImpl #-}
+runPGTransactionImpl zoom (Transaction transaction) = do
+  pool <- zoom
+  withRunInIO $ \unliftIO ->
+    withPGTransaction pool $ \conn -> do
+      unliftIO $ runReaderT transaction conn
+
+executeImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl #-}
+executeImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.execute conn qry params
+      & handlePGException tools "execute" qry (Left params)
+      >>= toNumberOfRowsAffected "executeImpl"
+
+executeImpl_ ::
+  (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+{-# INLINE executeImpl_ #-}
+executeImpl_ zoomTools zoomDebugLogDatabaseQueries qry =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled @() tools span logDatabaseQueries qry HasNoParams
+    conn <- Transaction ask
+    PG.execute_ conn qry
+      & handlePGException tools "execute_" qry (Left ())
+      >>= toNumberOfRowsAffected "executeImpl_"
+
+executeManyImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  [params] ->
+  Transaction m (Label "numberOfRowsAffected" Natural)
+executeManyImpl zoomTools zoomDebugLogDatabaseQueries qry params =
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.executeMany conn qry params
+      & handlePGException tools "executeMany" qry (Right params)
+      >>= toNumberOfRowsAffected "executeManyImpl"
+
+toNumberOfRowsAffected :: (MonadIO m) => Text -> Int64 -> m (Label "numberOfRowsAffected" Natural)
+toNumberOfRowsAffected functionName i64 =
+  i64
+    & intToNatural
+    & annotate [fmt|{functionName}: postgres returned a negative number of rows affected: {i64}|]
+    -- we throw this directly in IO here, because we don’t want to e.g. have to propagate MonadThrow through user code (it’s an assertion)
+    & unwrapIOError
+    & liftIO
+    <&> label @"numberOfRowsAffected"
+
+executeManyReturningWithImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  [params] ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE executeManyReturningWithImpl #-}
+executeManyReturningWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasMultiParams params)
+    conn <- Transaction ask
+    PG.returningWith fromRow conn qry params
+      & handlePGException tools "executeManyReturning" qry (Right params)
+
+foldRowsImpl ::
+  (FromRow row, ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool) =>
+  m tools ->
+  Query ->
+  params ->
+  a ->
+  (a -> row -> Transaction m a) ->
+  Transaction m a
+{-# INLINE foldRowsImpl #-}
+foldRowsImpl zoomTools qry params accumulator f = do
+  conn <- Transaction ask
+  tools <- lift @Transaction zoomTools
+  withRunInIO
+    ( \runInIO ->
+        do
+          PG.fold
+            conn
+            qry
+            params
+            accumulator
+            (\acc row -> runInIO $ f acc row)
+            & handlePGException tools "fold" qry (Left params)
+            & runInIO
+    )
+
+pgFormatQueryNoParams' ::
+  (MonadIO m, MonadLogger m, HasField "pgFormat" tools Tool) =>
+  tools ->
+  Query ->
+  Transaction m Text
+pgFormatQueryNoParams' tools q =
+  lift $ pgFormatQueryByteString tools q.fromQuery
+
+pgFormatQuery :: (ToRow params, MonadIO m) => Query -> params -> Transaction m ByteString
+pgFormatQuery qry params = Transaction $ do
+  conn <- ask
+  liftIO $ PG.formatQuery conn qry params
+
+pgFormatQueryMany :: (MonadIO m, ToRow params) => Query -> [params] -> Transaction m ByteString
+pgFormatQueryMany qry params = Transaction $ do
+  conn <- ask
+  liftIO $ PG.formatMany conn qry params
+
+queryWithImpl ::
+  (ToRow params, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool, Otel.MonadTracer m) =>
+  m tools ->
+  m DebugLogDatabaseQueries ->
+  Query ->
+  params ->
+  Decoder r ->
+  Transaction m [r]
+{-# INLINE queryWithImpl #-}
+queryWithImpl zoomTools zoomDebugLogDatabaseQueries qry params (Decoder fromRow) = do
+  Otel.inSpan' "Postgres Query (execute)" Otel.defaultSpanArguments $ \span -> do
+    tools <- lift @Transaction zoomTools
+    logDatabaseQueries <- lift @Transaction zoomDebugLogDatabaseQueries
+    traceQueryIfEnabled tools span logDatabaseQueries qry (HasSingleParam params)
+    conn <- Transaction ask
+    PG.queryWith fromRow conn qry params
+      & handlePGException tools "query" qry (Left params)
+
+queryWithImpl_ :: (MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool) => m tools -> Query -> Decoder r -> Transaction m [r]
+{-# INLINE queryWithImpl_ #-}
+queryWithImpl_ zoomTools qry (Decoder fromRow) = do
+  tools <- lift @Transaction zoomTools
+  conn <- Transaction ask
+  liftIO (PG.queryWith_ fromRow conn qry)
+    & handlePGException tools "query" qry (Left ())
+
+pgQuery :: (ToRow params, FromRow r, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool) => tools -> Query -> params -> Transaction m [r]
+pgQuery tools qry params = do
+  conn <- Transaction ask
+  PG.query conn qry params
+    & handlePGException tools "query" qry (Left params)
+
+pgQuery_ :: (FromRow r, MonadUnliftIO m, MonadLogger m, HasField "pgFormat" tools Tool) => tools -> Query -> Transaction m [r]
+pgQuery_ tools qry = do
+  conn <- Transaction ask
+  PG.query_ conn qry
+    & handlePGException tools "query_" qry (Left ())
+
+data SingleRowError = SingleRowError
+  { -- | How many columns were actually returned by the query
+    numberOfRowsReturned :: Int
+  }
+  deriving stock (Show)
+
+instance Exception SingleRowError where
+  displayException (SingleRowError {..}) = [fmt|Single row expected from SQL query result, {numberOfRowsReturned} rows were returned instead."|]
+
+pgFormatQuery' :: (MonadIO m, ToRow params, MonadLogger m, HasField "pgFormat" tools Tool) => tools -> Query -> params -> Transaction m Text
+pgFormatQuery' tools q p =
+  pgFormatQuery q p
+    >>= lift . pgFormatQueryByteString tools
+
+pgFormatQueryMany' :: (MonadIO m, ToRow params, MonadLogger m, HasField "pgFormat" tools Tool) => tools -> Query -> [params] -> Transaction m Text
+pgFormatQueryMany' tools q p =
+  pgFormatQueryMany q p
+    >>= lift . pgFormatQueryByteString tools
+
+-- | Read the executable name "pg_format"
+postgresToolsParser :: ToolParserT IO (Label "pgFormat" Tool)
+postgresToolsParser = label @"pgFormat" <$> readTool "pg_format"
+
+pgFormatQueryByteString :: (MonadIO m, MonadLogger m, HasField "pgFormat" tools Tool) => tools -> ByteString -> m Text
+pgFormatQueryByteString tools queryBytes = do
+  do
+    (exitCode, stdout, stderr) <-
+      Process.readProcessWithExitCode
+        tools.pgFormat.toolPath
+        ["-"]
+        (queryBytes & bytesToTextUtf8Lenient & textToString)
+    case exitCode of
+      ExitSuccess -> pure (stdout & stringToText)
+      ExitFailure status -> do
+        $logWarn [fmt|pg_format failed with status {status} while formatting the query, using original query string. Is there a syntax error?|]
+        $logDebug
+          ( prettyErrorTree
+              ( nestedMultiError
+                  "pg_format output"
+                  ( nestedError "stdout" (singleError (stdout & stringToText & newError))
+                      :| [(nestedError "stderr" (singleError (stderr & stringToText & newError)))]
+                  )
+              )
+          )
+        $logDebug [fmt|pg_format stdout: stderr|]
+        pure (queryBytes & bytesToTextUtf8Lenient)
+
+data DebugLogDatabaseQueries
+  = -- | Do not log the database queries
+    DontLogDatabaseQueries
+  | -- | Log the database queries as debug output;
+    LogDatabaseQueries
+  | -- | Log the database queries as debug output and additionally the EXPLAIN output (from the query analyzer, not the actual values after execution cause that’s a bit harder to do)
+    LogDatabaseQueriesAndExplain
+  deriving stock (Show, Enum, Bounded)
+
+data HasQueryParams param
+  = HasNoParams
+  | HasSingleParam param
+  | HasMultiParams [param]
+
+-- | Log the postgres query depending on the given setting
+traceQueryIfEnabled ::
+  ( ToRow params,
+    MonadUnliftIO m,
+    MonadLogger m,
+    HasField "pgFormat" tools Tool,
+    Otel.MonadTracer m
+  ) =>
+  tools ->
+  Otel.Span ->
+  DebugLogDatabaseQueries ->
+  Query ->
+  HasQueryParams params ->
+  Transaction m ()
+traceQueryIfEnabled tools span logDatabaseQueries qry params = do
+  -- In case we have query logging enabled, we want to do that
+  let formattedQuery = case params of
+        HasNoParams -> pgFormatQueryNoParams' tools qry
+        HasSingleParam p -> pgFormatQuery' tools qry p
+        HasMultiParams ps -> pgFormatQueryMany' tools qry ps
+  let doLog errs =
+        Otel.addAttributes
+          span
+          $ ( ("postgres.query", Otel.toAttribute @Text errs.query)
+                : ( errs.explain
+                      & foldMap
+                        ( \ex ->
+                            [("postgres.explain", Otel.toAttribute @Text ex)]
+                        )
+                  )
+            )
+  let doExplain = do
+        q <- formattedQuery
+        Otel.inSpan "Postgres EXPLAIN Query" Otel.defaultSpanArguments $ do
+          queryWithImpl_
+            (pure tools)
+            ( "EXPLAIN "
+                <> (
+                     -- TODO: this is not nice, but the only way to get the `executeMany` form to work with this
+                     -- because we need the query with all elements already interpolated.
+                     Query (q & textToBytesUtf8)
+                   )
+            )
+            (Dec.fromField @Text)
+            <&> Text.intercalate "\n"
+  case logDatabaseQueries of
+    DontLogDatabaseQueries -> pure ()
+    LogDatabaseQueries -> do
+      q <- formattedQuery
+      doLog (T2 (label @"query" q) (label @"explain" Nothing))
+    LogDatabaseQueriesAndExplain -> do
+      q <- formattedQuery
+      -- XXX: stuff like `CREATE SCHEMA` cannot be EXPLAINed, so we should catch exceptions here
+      -- and just ignore anything that errors (if it errors because of a problem with the query, it would have been caught by the query itself.
+      ex <- doExplain
+      doLog (T2 (label @"query" q) (label @"explain" (Just ex)))
+
+instance (ToField t1) => ToRow (Label l1 t1) where
+  toRow t2 = toRow $ PG.Only $ getField @l1 t2
+
+instance (ToField t1, ToField t2) => ToRow (T2 l1 t1 l2 t2) where
+  toRow t2 = toRow (getField @l1 t2, getField @l2 t2)
+
+instance (ToField t1, ToField t2, ToField t3) => ToRow (T3 l1 t1 l2 t2 l3 t3) where
+  toRow t3 = toRow (getField @l1 t3, getField @l2 t3, getField @l3 t3)
diff --git a/users/Profpatsch/my-prelude/src/Seconds.hs b/users/Profpatsch/my-prelude/src/Seconds.hs
new file mode 100644
index 000000000000..8d05f30be8c3
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Seconds.hs
@@ -0,0 +1,55 @@
+module Seconds where
+
+import Data.Aeson (FromJSON)
+import Data.Aeson qualified as Json
+import Data.Aeson.Types (FromJSON (parseJSON))
+import Data.Scientific
+import Data.Time (NominalDiffTime)
+import FieldParser
+import FieldParser qualified as Field
+import GHC.Natural (naturalToInteger)
+import PossehlAnalyticsPrelude
+
+-- | A natural number of seconds.
+newtype Seconds = Seconds {unSeconds :: Natural}
+  deriving stock (Eq, Show)
+
+-- | Parse a decimal number as a number of seconds
+textToSeconds :: FieldParser Text Seconds
+textToSeconds = Seconds <$> Field.decimalNatural
+
+scientificToSeconds :: FieldParser Scientific Seconds
+scientificToSeconds =
+  ( Field.boundedScientificIntegral @Int "Number of seconds"
+      >>> Field.integralToNatural
+  )
+    & rmap Seconds
+
+-- Microseconds, represented internally with a 64 bit Int
+newtype MicrosecondsInt = MicrosecondsInt {unMicrosecondsInt :: Int}
+  deriving stock (Eq, Show)
+
+-- | Try to fit a number of seconds into a MicrosecondsInt
+secondsToMicrosecondsInt :: FieldParser Seconds MicrosecondsInt
+secondsToMicrosecondsInt =
+  lmap
+    (\sec -> naturalToInteger sec.unSeconds * 1_000_000)
+    (Field.bounded "Could not fit into an Int after multiplying with 1_000_000 (seconds to microseconds)")
+    & rmap MicrosecondsInt
+
+secondsToNominalDiffTime :: Seconds -> NominalDiffTime
+secondsToNominalDiffTime sec =
+  sec.unSeconds
+    & naturalToInteger
+    & fromInteger @NominalDiffTime
+
+instance FromJSON Seconds where
+  parseJSON = Field.toParseJSON jsonNumberToSeconds
+
+-- | Parse a json number as a number of seconds.
+jsonNumberToSeconds :: FieldParser' Error Json.Value Seconds
+jsonNumberToSeconds = Field.jsonNumber >>> scientificToSeconds
+
+-- | Return the number of seconds in a week
+secondsInAWeek :: Seconds
+secondsInAWeek = Seconds (3600 * 24 * 7)
diff --git a/users/Profpatsch/my-prelude/src/Test.hs b/users/Profpatsch/my-prelude/src/Test.hs
new file mode 100644
index 000000000000..862ee16c255d
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Test.hs
@@ -0,0 +1,115 @@
+{-# LANGUAGE LambdaCase #-}
+
+{- Generate Test suites.
+
+Restricted version of hspec, introduction: http://hspec.github.io/getting-started.html
+-}
+module Test
+  ( Spec,
+    runTest,
+    testMain,
+
+    -- * Structure
+    describe,
+    it,
+
+    -- * Expectations
+    Expectation,
+    testOk,
+    testErr,
+    shouldBe,
+    shouldNotBe,
+    shouldSatisfy,
+    shouldNotSatisfy,
+
+    -- * Setup & Teardown (hooks http://hspec.github.io/writing-specs.html#using-hooks)
+    before,
+    before_,
+    beforeWith,
+    beforeAll,
+    beforeAll_,
+    beforeAllWith,
+    after,
+    after_,
+    afterAll,
+    afterAll_,
+    around,
+    around_,
+    aroundWith,
+    aroundAll,
+    aroundAllWith,
+
+    -- * Common helpful predicates (use with 'shouldSatisfy')
+    isRight,
+    isLeft,
+
+    -- * Pretty printing of errors
+    errColored,
+    module Pretty,
+  )
+where
+
+-- export more expectations if needed
+
+import Data.Either
+  ( isLeft,
+    isRight,
+  )
+import Pretty
+import Test.Hspec
+  ( Expectation,
+    HasCallStack,
+    Spec,
+    after,
+    afterAll,
+    afterAll_,
+    after_,
+    around,
+    aroundAll,
+    aroundAllWith,
+    aroundWith,
+    around_,
+    before,
+    beforeAll,
+    beforeAllWith,
+    beforeAll_,
+    beforeWith,
+    before_,
+    describe,
+    hspec,
+    it,
+  )
+import Test.Hspec.Expectations.Pretty
+  ( expectationFailure,
+    shouldBe,
+    shouldNotBe,
+    shouldNotSatisfy,
+    shouldSatisfy,
+  )
+
+-- | Run a test directly (e.g. from the repl)
+runTest :: Spec -> IO ()
+runTest = hspec
+
+-- | Run a testsuite
+testMain ::
+  -- | Name of the test suite
+  String ->
+  -- | The tests in this test module
+  Spec ->
+  IO ()
+testMain testSuiteName tests = hspec $ describe testSuiteName tests
+
+-- | test successful
+testOk :: Expectation
+testOk = pure ()
+
+-- | Abort the test with an error message.
+-- If you want to display a Haskell type, use `errColored`.
+testErr :: HasCallStack => String -> Expectation
+testErr = expectationFailure
+
+-- | Display a list of 'Err's as a colored error message
+-- and abort the test.
+errColored :: [Pretty.Err] -> Expectation
+errColored = testErr . Pretty.prettyErrs
diff --git a/users/Profpatsch/my-prelude/src/Tool.hs b/users/Profpatsch/my-prelude/src/Tool.hs
new file mode 100644
index 000000000000..066f68bbe0df
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/Tool.hs
@@ -0,0 +1,75 @@
+{-# LANGUAGE QuasiQuotes #-}
+
+module Tool where
+
+import Data.Error.Tree
+import Label
+import PossehlAnalyticsPrelude
+import System.Environment qualified as Env
+import System.Exit qualified as Exit
+import System.FilePath ((</>))
+import System.Posix qualified as Posix
+import ValidationParseT
+
+data Tool = Tool
+  { -- | absolute path to the executable
+    toolPath :: FilePath
+  }
+  deriving stock (Show)
+
+-- | Reads all tools from the @toolsEnvVar@ variable or aborts.
+readTools ::
+  Label "toolsEnvVar" Text ->
+  -- | Parser for Tools we bring with us at build time.
+  --
+  -- These are executables that we need available, and that we have to ship with the distribution of @pa-cli@.
+  ToolParserT IO tools ->
+  IO tools
+readTools env toolParser =
+  Env.lookupEnv (env.toolsEnvVar & textToString) >>= \case
+    Nothing -> do
+      Exit.die [fmt|Please set {env.toolsEnvVar} to a directory with all tools we need (see `Tools` in the code).|]
+    Just toolsDir ->
+      (Posix.fileExist toolsDir & ifTrueOrErr () [fmt|{env.toolsEnvVar} directory does not exist: {toolsDir}|])
+        & thenValidate
+          ( \() ->
+              (Posix.getFileStatus toolsDir <&> Posix.isDirectory)
+                & ifTrueOrErr () [fmt|{env.toolsEnvVar} does not point to a directory: {toolsDir}|]
+          )
+        & thenValidate
+          (\() -> toolParser.unToolParser toolsDir)
+        <&> first (errorTree [fmt|Could not find all tools in {env.toolsEnvVar}|])
+        >>= \case
+          Failure err -> Exit.die (err & prettyErrorTree & textToString)
+          Success t -> pure t
+
+newtype ToolParserT m a = ToolParserT
+  { unToolParser ::
+      FilePath ->
+      m (Validation (NonEmpty Error) a)
+  }
+  deriving
+    (Functor, Applicative)
+    via (ValidationParseT FilePath m)
+
+-- | Given a file path and the name of the tool executable, see whether it is an executable and return its full path.
+readTool :: Text -> ToolParserT IO Tool
+readTool exeName = ToolParserT $ \toolDir -> do
+  let toolPath :: FilePath = toolDir </> (exeName & textToString)
+  let read' = True
+  let write = False
+  let exec = True
+  Posix.fileExist toolPath
+    & ifTrueOrErr () [fmt|Tool does not exist: {toolPath}|]
+    & thenValidate
+      ( \() ->
+          Posix.fileAccess toolPath read' write exec
+            & ifTrueOrErr (Tool {..}) [fmt|Tool is not readable/executable: {toolPath}|]
+      )
+
+-- | helper
+ifTrueOrErr :: Functor f => a -> Text -> f Bool -> f (Validation (NonEmpty Error) a)
+ifTrueOrErr true err io =
+  io <&> \case
+    True -> Success true
+    False -> Failure $ singleton $ newError err
diff --git a/users/Profpatsch/my-prelude/src/ValidationParseT.hs b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
new file mode 100644
index 000000000000..593b7ebf3918
--- /dev/null
+++ b/users/Profpatsch/my-prelude/src/ValidationParseT.hs
@@ -0,0 +1,16 @@
+module ValidationParseT where
+
+import Control.Selective (Selective)
+import Data.Functor.Compose (Compose (..))
+import PossehlAnalyticsPrelude
+
+-- | A simple way to create an Applicative parser that parses from some environment.
+--
+-- Use with DerivingVia. Grep codebase for examples.
+newtype ValidationParseT env m a = ValidationParseT {unValidationParseT :: env -> m (Validation (NonEmpty Error) a)}
+  deriving
+    (Functor, Applicative, Selective)
+    via ( Compose
+            ((->) env)
+            (Compose m (Validation (NonEmpty Error)))
+        )