aboutsummaryrefslogtreecommitdiff
path: root/lib/Server.hs
blob: 6ca9c14685bfdd1d3574dfb10069624792a0b3da (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
{-# LANGUAGE DerivingStrategies    #-}
{-# LANGUAGE ExplicitNamespaces    #-}
{-# LANGUAGE FlexibleContexts      #-}
{-# LANGUAGE FlexibleInstances     #-}
{-# LANGUAGE LambdaCase            #-}
{-# LANGUAGE OverloadedLists       #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RecordWildCards       #-}
{-# LANGUAGE TypeApplications      #-}


-- Implementation of the API. This module is the main point of the program.
module Server (application) where
import           Control.Monad               (forever, unless, void, when)
import           Control.Monad.Catch         (handle)
import           Control.Monad.Extra         (ifM, maybeM, unlessM, whenM)
import           Control.Monad.IO.Class      (MonadIO (liftIO))
import           Control.Monad.Logger        (LoggingT, logWarnN)
import           Control.Monad.Reader        (forM)
import           Control.Monad.Trans         (lift)
import qualified Data.Aeson                  as A
import qualified Data.ByteString.Char8       as C8
import           Data.Coerce                 (coerce)
import           Data.Functor                ((<&>))
import qualified Data.Map                    as M
import           Data.Pool                   (Pool)
import           Data.Proxy                  (Proxy (Proxy))
import           Data.Swagger                (toSchema)
import           Data.Text                   (Text)
import           Data.Text.Encoding          (decodeUtf8)
import           Data.Time                   (NominalDiffTime,
                                              UTCTime (utctDay), addUTCTime,
                                              diffUTCTime, getCurrentTime,
                                              nominalDay)
import qualified Data.Vector                 as V
import           Database.Persist
import           Database.Persist.Postgresql (SqlBackend, runMigration)
import           Fmt                         ((+|), (|+))
import qualified Network.WebSockets          as WS
import           Servant                     (Application,
                                              ServerError (errBody), err401,
                                              err404, serve, throwError)
import           Servant.API                 (NoContent (..), (:<|>) (..))
import           Servant.Server              (Handler, hoistServer)
import           Servant.Swagger             (toSwagger)

import           API
import           GTFS
import           Persist
import           Server.ControlRoom
import           Server.GTFS_RT              (gtfsRealtimeServer)
import           Server.Util                 (Service, ServiceM, runService,
                                              sendErrorMsg)
import           Yesod                       (toWaiAppPlain)

import           Conferer                    (fetch, mkConfig)
import           Extrapolation               (Extrapolator (..),
                                              LinearExtrapolator (..))
import           System.IO.Unsafe

import           Config                      (ServerConfig)
import           Data.ByteString             (ByteString)
import           Data.ByteString.Lazy        (toStrict)
import           Prometheus
import           Prometheus.Metric.GHC

application :: GTFS -> Pool SqlBackend -> IO Application
application gtfs dbpool = do
  doMigration dbpool
  metrics <- Metrics
    <$> register (gauge (Info "ws_connections" "Number of WS Connections"))
  register ghcMetrics
  pure $ serve (Proxy @CompleteAPI) $ hoistServer (Proxy @CompleteAPI) runService $ server gtfs metrics dbpool

-- databaseMigration :: ConnectionString -> IO ()
doMigration pool = runSql pool $
  -- TODO: before that, check if the uuid module is enabled
  -- in sql: check if SELECT * FROM pg_extension WHERE extname = 'uuid-ossp';
  -- returns an empty list
  runMigration migrateAll

server :: GTFS -> Metrics -> Pool SqlBackend -> Service CompleteAPI
server gtfs@GTFS{..} Metrics{..} dbpool = handleDebugAPI
  :<|> (handleStations :<|> handleTimetable :<|> handleTrip
        :<|> handleRegister :<|> handleTrainPing (throwError err401) :<|> handleWS
        :<|> handleDebugState :<|> handleDebugTrain :<|> handleDebugRegister
        :<|> gtfsRealtimeServer gtfs dbpool) :<|> metrics
  :<|> pure (unsafePerformIO (toWaiAppPlain (ControlRoom gtfs dbpool)))
  where handleStations = pure stations
        handleTimetable station maybeDay = do
          -- TODO: resolve "overlay" trips (perhaps just additional CalendarDates?)
          day <- liftIO $ maybeM (getCurrentTime <&> utctDay) pure (pure maybeDay)
          pure
            -- don't send stations ending at this station
            . M.filter ((==) station . stationId . stopStation . V.last . tripStops)
            $ tripsOnDay gtfs day
        handleTrip trip = case M.lookup trip trips of
          Just res -> pure res
          Nothing  -> throwError err404
        handleRegister tripID RegisterJson{..} = do
          today <- liftIO getCurrentTime <&> utctDay
          unless (runsOnDay gtfs tripID today)
            $ sendErrorMsg "this trip does not run today."
          expires <- liftIO $ getCurrentTime <&> addUTCTime validityPeriod
          RunningKey token <- runSql dbpool $ insert (Running expires False tripID today Nothing registerAgent)
          pure token
        handleDebugRegister tripID day = do
          expires <- liftIO $ getCurrentTime <&> addUTCTime validityPeriod
          RunningKey token <- runSql dbpool $ insert (Running expires False tripID day Nothing "debug key")
          pure token
        handleTrainPing onError ping = isTokenValid dbpool (coerce $ trainPingToken ping) >>= \case
          Nothing -> do
            onError
            pure Nothing
          Just running@Running{..} -> do
            let anchor = extrapolateAnchorFromPing LinearExtrapolator gtfs running ping
            -- TODO: are these always inserted in order?
            runSql dbpool $ do
              insert ping
              last <- selectFirst
                [TrainAnchorTrip ==. runningTrip, TrainAnchorDay ==. runningDay]
                [Desc TrainAnchorWhen]
              -- only insert new estimates if they've actually changed anything
              when (fmap (trainAnchorDelay . entityVal) last /= Just (trainAnchorDelay anchor))
                $ void $ insert anchor
            pure (Just anchor)
        handleWS conn = do
          liftIO $ WS.forkPingThread conn 30
          incGauge metricsWSGauge
          handle (\(e :: WS.ConnectionException) -> decGauge metricsWSGauge) $ forever $ do
            msg <- liftIO $ WS.receiveData conn
            case A.eitherDecode msg of
              Left err -> do
                logWarnN ("stray websocket message: "+|show msg|+" (could not decode: "+|err|+")")
                liftIO $ WS.sendClose conn (C8.pack err)
                decGauge metricsWSGauge
              Right ping ->
                -- if invalid token, send a "polite" close request. Note that the client may
                -- ignore this and continue sending messages, which will continue to be handled.
                liftIO $ handleTrainPing (WS.sendClose conn ("" :: ByteString)) ping >>= \case
                  Just anchor -> WS.sendTextData conn (A.encode anchor)
                  Nothing     -> pure ()
        handleDebugState = do
          now <- liftIO getCurrentTime
          runSql dbpool $ do
           running <- selectList [RunningBlocked ==. False, RunningExpires >=. now] []
           pairs <- forM running $ \(Entity token@(RunningKey uuid) _) -> do
             entities <- selectList [TrainPingToken ==. token] []
             pure (uuid, fmap entityVal entities)
           pure (M.fromList pairs)
        handleDebugTrain tripId day = do
          unless (runsOnDay gtfs tripId day)
            $ sendErrorMsg ("this trip does not run on "+|day|+".")
          runSql dbpool $ do
            tokens <- selectList [RunningTrip ==. tripId, RunningDay ==. day] []
            pings <- forM tokens $ \(Entity token _) -> do
              selectList [TrainPingToken ==. token] [] <&> fmap entityVal
            pure (concat pings)
        handleDebugAPI = pure $ toSwagger (Proxy @API)
        metrics = exportMetricsAsText <&> (decodeUtf8 . toStrict)


-- TODO: proper debug logging for expired tokens
isTokenValid :: MonadIO m => Pool SqlBackend -> Token -> m (Maybe Running)
isTokenValid dbpool token = runSql dbpool $ get (coerce token) >>= \case
  Just trip | not (runningBlocked trip) -> do
    ifM (hasExpired (runningExpires trip))
      (pure Nothing)
      (pure (Just trip))
  _ -> pure Nothing

hasExpired :: MonadIO m => UTCTime -> m Bool
hasExpired limit = do
  now <- liftIO getCurrentTime
  pure (now > limit)

validityPeriod :: NominalDiffTime
validityPeriod = nominalDay