about summary refs log tree commit diff
path: root/services/tazblog/src/BlogStore.hs
diff options
context:
space:
mode:
authorVincent Ambo <tazjin@google.com>2019-08-25T19·15+0100
committerVincent Ambo <tazjin@google.com>2019-08-25T19·15+0100
commit1747df418e8cdd5d9de1a643354d7ac28591ed14 (patch)
treed2d2ff27374bd40f5c3bcdf366343b1b76297a1c /services/tazblog/src/BlogStore.hs
parent2fdc87222871e6b68ba2d7ee1c634cfa0d75c572 (diff)
chore(tazblog): Format source files with ormolu r/60
Ormolu's formatting is quite annoying (it uses a lot of unnecessary
vertical space and doesn't align elements), but I can't be bothered to
do manual formatting - especially because whatever formatting
haskell-mode in Emacs produces seems to depend on an opaque state
machine or something.
Diffstat (limited to 'services/tazblog/src/BlogStore.hs')
-rw-r--r--services/tazblog/src/BlogStore.hs128
1 files changed, 71 insertions, 57 deletions
diff --git a/services/tazblog/src/BlogStore.hs b/services/tazblog/src/BlogStore.hs
index a91db060b8..0472fef56b 100644
--- a/services/tazblog/src/BlogStore.hs
+++ b/services/tazblog/src/BlogStore.hs
@@ -1,3 +1,7 @@
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+
 -- |This module implements fetching of individual blog entries from
 -- DNS. Yes, you read that correctly.
 --
@@ -15,49 +19,47 @@
 --
 -- This module implements logic for assembling a post out of these
 -- fragments and caching it based on the TTL of its `_meta` record.
-{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE LambdaCase #-}
-{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module BlogStore
+  ( BlogCache,
+    EntryId (..),
+    Entry (..),
+    withCache,
+    listEntries,
+    getEntry,
+    show'
+    )
+where
 
-module BlogStore(
-  BlogCache,
-  EntryId(..),
-  Entry(..),
-  withCache,
-  listEntries,
-  getEntry,
-  show',
-) where
-
-import Data.Aeson ((.:), FromJSON(..), Value(Object), decodeStrict)
 import Control.Applicative ((<$>), (<*>))
 import Control.Monad (mzero)
 import Control.Monad.IO.Class (MonadIO, liftIO)
+import Data.Aeson ((.:), FromJSON (..), Value (Object), decodeStrict)
+import Data.ByteString.Base64 (decodeLenient)
+import Data.Either (fromRight)
+import Data.List (sortBy)
 import Data.Text as T (Text, concat, pack)
-import Data.Text.Encoding (encodeUtf8, decodeUtf8')
+import Data.Text.Encoding (decodeUtf8', encodeUtf8)
 import Data.Time (Day)
 import Locales (BlogLang (..))
-import Network.DNS (lookupTXT, DNSError)
+import Network.DNS (DNSError, lookupTXT)
 import qualified Network.DNS.Resolver as R
-import Data.ByteString.Base64 (decodeLenient)
-import Data.List (sortBy)
-import Data.Either (fromRight)
 
 newtype EntryId = EntryId {unEntryId :: Integer}
   deriving (Eq, Ord, FromJSON)
 
 instance Show EntryId where
+
   show = show . unEntryId
 
 data Entry
   = Entry
       { entryId :: EntryId,
-        lang    :: BlogLang,
-        author  :: Text,
-        title   :: Text,
-        btext   :: Text,
-        mtext   :: Text,
-        edate   :: Day
+        lang :: BlogLang,
+        author :: Text,
+        title :: Text,
+        btext :: Text,
+        mtext :: Text,
+        edate :: Day
         }
   deriving (Eq, Ord, Show)
 
@@ -80,20 +82,22 @@ type Count = Int
 
 withCache :: Text -> (BlogCache -> IO a) -> IO a
 withCache zone f = do
-  let conf = R.defaultResolvConf { R.resolvCache = Just R.defaultCacheConf
-                                 , R.resolvConcurrent = True }
+  let conf =
+        R.defaultResolvConf
+          { R.resolvCache = Just R.defaultCacheConf,
+            R.resolvConcurrent = True
+            }
   seed <- R.makeResolvSeed conf
   R.withResolver seed $ (\r -> f $ BlogCache r zone)
 
-
 listEntries :: MonadIO m => BlogCache -> Offset -> Count -> m [Entry]
 listEntries cache offset count = liftIO $ do
   posts <- postList cache
   entries <- mapM (entryFromDNS cache) $ take count $ drop offset $ fromRight (error "no posts") posts
-
   -- TODO: maybe don't just drop broken entries
   return
-    $ fromRight (error "no entries") $ sequence entries
+    $ fromRight (error "no entries")
+    $ sequence entries
 
 getEntry :: MonadIO m => BlogCache -> EntryId -> m (Maybe Entry)
 getEntry cache eid = liftIO $ (entryFromDNS cache eid) >>= \case
@@ -104,7 +108,6 @@ show' :: Show a => a -> Text
 show' = pack . show
 
 -- * DNS fetching implementation
-
 type Chunk = Integer
 
 -- | Represents the metadata stored for each post in the _meta record.
@@ -112,23 +115,28 @@ data Meta = Meta Integer Text Day
   deriving (Show)
 
 instance FromJSON Meta where
-  parseJSON (Object v) = Meta <$>
-    v .: "c" <*>
-    v .: "t" <*>
-    v .: "d"
+
+  parseJSON (Object v) =
+    Meta
+      <$> v
+      .: "c"
+      <*> v
+      .: "t"
+      <*> v
+      .: "d"
   parseJSON _ = mzero
 
 entryMetadata :: BlogCache -> EntryId -> IO (Either StoreError Meta)
 entryMetadata (BlogCache r z) (EntryId eid) =
   let domain = encodeUtf8 ("_meta." <> show' eid <> "." <> z)
       record = lookupTXT r domain
-      toMeta rrdata = case decodeStrict $ decodeLenient rrdata  of
+      toMeta rrdata = case decodeStrict $ decodeLenient rrdata of
         Nothing -> Left InvalidMetadata
-        Just m  -> Right m
-  in record >>= \case
-    (Left err) -> return $ Left $ DNS err
-    (Right [ bs ]) -> return $ toMeta bs
-    _ -> return $ Left InvalidMetadata
+        Just m -> Right m
+   in record >>= \case
+        (Left err) -> return $ Left $ DNS err
+        (Right [bs]) -> return $ toMeta bs
+        _ -> return $ Left InvalidMetadata
 
 entryChunk :: BlogCache -> EntryId -> Chunk -> IO (Either StoreError Text)
 entryChunk (BlogCache r z) (EntryId eid) c =
@@ -137,14 +145,14 @@ entryChunk (BlogCache r z) (EntryId eid) c =
       toChunk rrdata = case decodeUtf8' $ decodeLenient rrdata of
         Left _ -> Left InvalidChunk
         Right chunk -> Right chunk
-  in record >>= \case
-    (Left err) -> return $ Left $ DNS err
-    (Right [ bs ]) -> return $ toChunk bs
-    _ -> return $ Left InvalidChunk
+   in record >>= \case
+        (Left err) -> return $ Left $ DNS err
+        (Right [bs]) -> return $ toChunk bs
+        _ -> return $ Left InvalidChunk
 
 fetchAssembleChunks :: BlogCache -> EntryId -> Meta -> IO (Either StoreError Text)
 fetchAssembleChunks cache eid (Meta n _ _) = do
-  chunks <- mapM (entryChunk cache eid) [0..(n - 1)]
+  chunks <- mapM (entryChunk cache eid) [0 .. (n - 1)]
   return $ either Left (Right . T.concat) $ sequence chunks
 
 entryFromDNS :: BlogCache -> EntryId -> IO (Either StoreError Entry)
@@ -155,19 +163,25 @@ entryFromDNS cache eid = do
     Right meta -> do
       chunks <- fetchAssembleChunks cache eid meta
       let (Meta _ t d) = meta
-      return $ either Left (\text -> Right $ Entry {
-                               entryId = eid,
-                               lang = EN,
-                               author = "tazjin",
-                               title = t,
-                               btext = text,
-                               mtext = "",
-                               edate = d}) chunks
+      return
+        $ either Left
+            ( \text -> Right $ Entry
+                { entryId = eid,
+                  lang = EN,
+                  author = "tazjin",
+                  title = t,
+                  btext = text,
+                  mtext = "",
+                  edate = d
+                  }
+              )
+            chunks
 
 postList :: BlogCache -> IO (Either StoreError [EntryId])
 postList (BlogCache r z) =
   let domain = encodeUtf8 ("_posts." <> z)
       record = lookupTXT r domain
-      toPosts = fmap (sortBy (flip compare)) . sequence .
-        map (\r -> maybe (Left InvalidPosts) Right (decodeStrict r))
-  in record >>= return . either (Left . DNS) toPosts
+      toPosts =
+        fmap (sortBy (flip compare)) . sequence
+          . map (\r -> maybe (Left InvalidPosts) Right (decodeStrict r))
+   in record >>= return . either (Left . DNS) toPosts