code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, FlexibleInstances #-} module DSL.GraphDSL ( -- Types Equality (..), Implication (..), InEquality (..), Constraint, Requirement (..), Graph, CLD, graph, constraints, Sign (..), TimeFrame (..), -- Graph syntax GraphSyntax, mkNode, (>+>), (>->), (>++>), (>-->), (>?>), (>~+>), (>~->), (>~++>), (>~-->), (>~?>), link, constrain, -- Compilation compile, compileGraph, compileConstraints, ignoreTime, -- Printing and visualization prettify, prettyPrint, preview ) where -- | Imports import Control.Monad.State import DSL.ConstraintDSL import Data.Graph.Inductive as G hiding (mkNode, Graph) import Data.GraphViz hiding (Graph) import qualified Data.Text.Lazy as Txt hiding (head) -- | Gives an identifier for a node type Name = Node -- | Gives the sign of influence {+, ++, +?, -, --, -?, 0, ?} data Sign = P | PP | PQ | M | MM | MQ | Z | Q deriving (Ord, Eq) -- | Convert a sign to a string instance Show Sign where show P = "+" show PP = "++" show PQ = "+?" show M = "-" show MM = "--" show MQ = "-?" show Z = "0" show Q = "?" -- | Label a sign instance Labellable Sign where toLabelValue = textLabelValue . Txt.pack . show -- | Label a node instance Labellable (String, Maybe Node) where toLabelValue (s, _) = textLabelValue . Txt.pack $ s -- | A type for requirements data Requirement = S Sign | ST (Sign, TimeFrame) deriving (Show) -- | We need to be able to lift Signs into Requirements instance Lifts Sign Requirement where lift = S -- | We need to be able to lift pairs of Signs in to Requirements instance Lifts (Sign, TimeFrame) Requirement where lift = ST -- | A type for constraints type Constraint = ConstraintType Name Requirement -- | A Time interval data TimeFrame = Im | Future deriving (Ord, Eq, Show) -- | Graphs type Graph = Gr (String, Maybe Node) Sign -- | A CLD is the graph, the signs associated with the edges and the constraints (and the names of the nodes) data CLDG n e = CLDG {graph :: Gr n e, constraints :: [Constraint]} deriving (Show) -- | A CLD is a specialiced general cld....... type CLD = CLDG (String, Maybe Node) Sign -- | The monadic syntax of graphs type GraphSyntax a = State (CLDG String (Sign, TimeFrame)) a -- fixity infixl >+> infixl >++> infixl >-> infixl >--> infixl >?> infixl >~+> infixl >~++> infixl >~-> infixl >~--> infixl >~?> -- | Create a new node mkNode :: String -> GraphSyntax Name mkNode s = do cld <- get let gr = graph cld let i = head $ newNodes 1 gr put cld { graph = insNode (i, s) gr } return i -- | Create a new edge (>+>) = makeEdge P Im (>->) = makeEdge M Im (>?>) = makeEdge Q Im (>++>) = makeEdge PP Im (>-->) = makeEdge MM Im (>~+>) = makeEdge P Future (>~->) = makeEdge M Future (>~?>) = makeEdge Q Future (>~++>) = makeEdge PP Future (>~-->) = makeEdge MM Future -- | Factor out the commonality in >x> makeEdge :: Sign -> TimeFrame -> GraphSyntax Name -> Name -> GraphSyntax Name makeEdge s t g w = do v <- g cld <- get put $ cld { graph = insEdge (v, w, (s, t)) (graph cld) } return w -- | Add a constraint constrain :: (IsConstraint Name Requirement c) => c -> GraphSyntax () constrain c = do cld <- get put $ cld { constraints = toConstraint c : constraints cld } -- | Syntactic sugar link :: a -> GraphSyntax a link = return -- | The initial state initialState :: CLDG String (Sign, TimeFrame) initialState = CLDG G.empty [] -- | Compile the graph compile :: GraphSyntax a -> CLD compile gs = CLDG (nfilter (\n -> 0 /= (length (neighbors g n))) g) constrs where -- General CLD cldg = execState gs initialState -- Posed constraints constr = constraints cldg -- compute temporal constraints listConstrTemporal = [ (n, (s, t)) | (Equality n (ST (s, t))) <- constr] constrs = filter removeTemporal constr ++ [ if futureEdge then toConstraint $ (n+(if t == Future then minNode else 0)) := s else toConstraint $ n := s | (n, (s, t)) <- listConstrTemporal ] removeTemporal (Equality _ (ST _)) = False removeTemporal _ = True -- The raw CLDG graph gra = graph cldg -- The raw CLDG extended with parents, but without time gra' = nmap (\s -> (s, Nothing)) $ emap fst gra -- The minimum value for the new node minNode = head $ newNodes 1 gra -- Is there an edge to the near future futureEdge = any (\(_, _, (_, t)) -> t /= Im) $ labEdges gra newNs = map (\(n, a) -> (n+minNode, (a++"'", Just n))) (labNodes gra) newEs = map (\(sr, si, (s, t)) -> if t == Im then (sr+minNode, si+minNode, (s, Im)) else (sr, si+minNode, (s, Im)) ) (labEdges gra) weirdGraph = foldl (flip insEdge) (foldl (flip insNode) (nmap (\s -> (s, Nothing)) gra) newNs) newEs g = if futureEdge then emap fst $ delEdges (map (\(a, b, _) -> (a, b)) $ filter (\(_, _, (x, y)) -> y /= Im) $ labEdges weirdGraph ) weirdGraph else gra' -- | Extract a graph from a syntax compileGraph :: GraphSyntax a -> Graph compileGraph = graph . compile -- | Extract the constraints from a syntax compileConstraints :: GraphSyntax a -> [Constraint] compileConstraints = constraints . compile -- | Ignore time in the CLD ignoreTime :: GraphSyntax a -> GraphSyntax () ignoreTime gs = do gs state <- get put $ state {graph = emap (\(s, _) -> (s, Im)) (graph state)}
GRACeFUL-project/GraphDSL
src/DSL/GraphDSL.hs
Haskell
bsd-3-clause
6,207
-- | An algorithm for merging users' edits. Specifically, there's just one -- function – 'merge' – and it simply does a three-way diff. module Guide.Diff.Merge ( merge, ) where import Imports import Guide.Diff.Tokenize import qualified Data.Patch as PV import qualified Data.Text as T -- | An implementation of a 3-way diff and merge. merge :: Text -- ^ Original text -> Text -- ^ Variant A (preferred) -> Text -- ^ Variant B -> Text -- ^ Merged text merge (toVector . tokenize -> orig) (toVector . tokenize -> a) (toVector . tokenize -> b) = T.concat . toList $ PV.apply (pa <> pb') orig where -- 1. diff pa = PV.diff orig a pb = PV.diff orig b -- 2. merge (_, pb') = PV.transformWith PV.ours pa pb
aelve/guide
back/src/Guide/Diff/Merge.hs
Haskell
bsd-3-clause
770
{-# LANGUAGE TemplateHaskell #-} module Database.Esqueleto.Join.TH where import qualified Data.List as List import Data.Maybe import Data.Monoid import Data.Tagged import qualified Database.Esqueleto as E import Language.Haskell.TH import Language.Haskell.TH.ExpandSyns import Database.Esqueleto.Join (<$$>) :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b) f <$$> a = (f <$>) <$> a mkJoins :: Q [Dec] mkJoins = fmap concat . mapM mkInstance . findPairings =<< mapM pluck =<< entityFieldInstances data Entity = Entity { eEntityType :: Tagged "Entity" Type -- ^ Type like @Student@ , eFields :: [EntityField] -- ^ Foreign key fields like @StudentTeacherId@ } deriving (Eq, Show) pluck :: Tagged "EntityField" Dec -> Q Entity pluck dec = Entity (entityType dec) . catMaybes <$> mapM fieldKeyConstructors (entityFieldConstructors dec) pairs :: [a] -> [(a, a)] pairs xs = (,) <$> xs <*> xs data InstanceEntity -- | One piece of @FieldPair@ declaration like a @Student@ in @Student@-@Teacher@ pair = InstanceEntity { ieEntityType :: Tagged "Entity" Type , ieFieldConstructor :: Tagged "FieldConstructor" Con , ieMaybeCon :: MaybeCon } deriving (Eq, Show) data Pair = Pair { left :: InstanceEntity , right :: InstanceEntity , joinType :: Tagged "JoinType" Type -- ^ The type that these two entities can join on like @TeacherId@ in a @Student@-@Teacher@ pair } deriving (Eq, Show) -- | Find pairs of entities with a unique way of joining findPairings :: [Entity] -> [Pair] findPairings xs = symmetrize . catMaybes $ uncurry handlePair <$> pairs xs where symmetrize x = List.nub $ (swap <$> x) <> x -- Make sure we can join in either order where swap Pair{..} = Pair right left joinType handlePair lEnt rEnt = case (cons (eEntityType lEnt) (eFields lEnt), cons (eEntityType lEnt) (eFields rEnt)) of ([(lFC, lMC)], [(rFC, rMC)]) | not (lMC == Present && rMC == Present) -- It doesn't make much sense for the primary key to be nullable -> Just (Pair (InstanceEntity (eEntityType lEnt) lFC lMC) (InstanceEntity (eEntityType rEnt) rFC rMC) (Tagged . AppT (ConT ''E.Key) . unTagged . eEntityType $ lEnt)) _ -> Nothing where cons :: Tagged "Entity" Type -> [EntityField] -> [(Tagged "FieldConstructor" Con, MaybeCon)] cons t = map (\EntityField{..} -> (efFieldConstructor, efMaybeCon)) . filter ((== unTagged t) . unTagged . efFieldOutType) mkInstance :: Pair -> Q [Dec] mkInstance Pair{..} = [d| instance FieldPair $(spliceTCon left) $(spliceTCon right) $(spliceMaybeCon left) $(spliceMaybeCon right) where type JoinKey $(spliceTCon left) $(spliceTCon right) = $(pure . unTagged $ joinType) pair = ( ($(singlize . ieMaybeCon $ left), $(spliceCon left)) , ($(singlize . ieMaybeCon $ right), $(spliceCon right)) ) |] where promote Present = PromotedT 'Present promote Absent = PromotedT 'Absent singlize Present = [|SPresent|] singlize Absent = [|SAbsent|] spliceMaybeCon = pure . promote . ieMaybeCon spliceTCon = pure . unTagged . ieEntityType spliceCon = mkCon . unTagged . ieFieldConstructor mkCon (NormalC name _) = conE name mkCon _ = error "Field key doesn't use a normal constructor" entityFieldInstances :: Q [Tagged "EntityField" Dec] entityFieldInstances = do FamilyI _ instances <- reify ''E.EntityField pure $ Tagged <$> instances entityType :: Tagged "EntityField" Dec -> Tagged "Entity" Type entityType (Tagged (DataInstD _ _ [ty, _] _ _)) = Tagged ty entityType _ = error "`EntityField` not returning `DataInstD`" entityFieldConstructors :: Tagged "EntityField" Dec -> [Tagged "ForAllFieldConstructor" Con] entityFieldConstructors (Tagged (DataInstD _ _ _ cons _)) = Tagged <$> cons entityFieldConstructors _ = error "`EntityField` not returning `DataInstD`" data EntityField = EntityField { efFieldOutType :: Tagged "FieldOutType" Type -- ^ In a field like @StudentTeacherId@, the @FieldOutType@ is @Teacher@ , efFieldConstructor :: Tagged "FieldConstructor" Con -- ^ A constructor like @StudentTeacherId@ , efMaybeCon :: MaybeCon -- ^ Does the @FieldConstructor@ return a type like @Maybe TeacherId@ or just @TeacherId@? } deriving (Eq, Show) fieldKeyConstructors :: Tagged "ForAllFieldConstructor" Con -> Q (Maybe EntityField) fieldKeyConstructors (Tagged con) = case con of (ForallC [] [AppT _equalityT ty] con') -> (uncurry (mkEntityField con') <$$>) . expandSyns' . extractEntityType =<< expandSyns ty _ -> pure Nothing where mkEntityField (Tagged -> efFieldConstructor) (Tagged -> efFieldOutType) efMaybeCon = EntityField{..} expandSyns' (Just (ty, con')) = Just . (, con') <$> expandSyns ty expandSyns' Nothing = pure Nothing extractEntityType (AppT (ConT k) ty) | k == ''E.Key = Just (ty, Absent) extractEntityType (AppT (ConT m) (AppT (ConT k) ty)) | m == ''Maybe && k == ''E.Key = Just (ty, Present) extractEntityType _ = Nothing
pseudonom/dovetail
src/Database/Esqueleto/Join/TH.hs
Haskell
bsd-3-clause
5,108
module HsenvMonadUtils (runInTmpDir) where import System.Directory import Util.IO import HsenvMonad runInTmpDir :: Hsenv a -> Hsenv a runInTmpDir m = do tmp <- liftIO getTemporaryDirectory tmpDir <- liftIO $ createTemporaryDirectory tmp "hsenv" oldCwd <- liftIO getCurrentDirectory liftIO $ setCurrentDirectory tmpDir let cleanup = do liftIO $ setCurrentDirectory oldCwd liftIO $ removeDirectoryRecursive tmpDir m `finally` cleanup
Paczesiowa/hsenv
src/HsenvMonadUtils.hs
Haskell
bsd-3-clause
462
{-# LANGUAGE RecordWildCards #-} module Anchor.Tokens.Server ( P.version, startServer, ServerState(..), module X, ) where import Control.Concurrent import Control.Concurrent.Async import Data.Pool import qualified Data.Streaming.Network as N import Database.PostgreSQL.Simple import qualified Network.Socket as S import Network.Wai.Handler.Warp hiding (Connection) import Pipes.Concurrent import Servant.Server import System.Log.Logger import qualified System.Remote.Monitoring as EKG import Anchor.Tokens.Server.API as X hiding (logName) import Anchor.Tokens.Server.Configuration as X import Anchor.Tokens.Server.Statistics as X import Anchor.Tokens.Server.Types as X import Paths_anchor_token_server as P -- * Server logName :: String logName = "Anchor.Tokens.Server" -- | Start the statistics-reporting thread. startStatistics :: ServerOptions -> Pool Connection -> GrantCounters -> IO (Output GrantEvent, IO ()) startStatistics ServerOptions{..} connPool counters = do debugM logName $ "Starting EKG" srv <- EKG.forkServer optStatsHost optStatsPort (output, input, seal) <- spawn' (bounded 50) registerOAuth2Metrics (EKG.serverMetricStore srv) connPool input counters let stop = do debugM logName $ "Stopping EKG" atomically seal killThread (EKG.serverThreadId srv) threadDelay 10000 debugM logName $ "Stopped EKG" return (output, stop) startServer :: ServerOptions -> IO (IO (Async ())) startServer serverOpts@ServerOptions{..} = do debugM logName $ "Opening API Socket" sock <- N.bindPortTCP optServicePort optServiceHost let createConn = connectPostgreSQL optDBString destroyConn conn = close conn stripes = 1 keep_alive = 10 num_conns = 20 serverPGConnPool <- createPool createConn destroyConn stripes keep_alive num_conns counters <- mkGrantCounters (serverEventSink, serverEventStop) <- startStatistics serverOpts serverPGConnPool counters let settings = setPort optServicePort $ setHost optServiceHost $ defaultSettings serverOAuth2Server = anchorOAuth2Server serverPGConnPool serverEventSink apiSrv <- async $ do debugM logName $ "Starting API Server" runSettingsSocket settings sock $ serve anchorOAuth2API (server ServerState{..}) let serverServiceStop = do debugM logName $ "Closing API Socket" S.close sock async $ do wait apiSrv debugM logName $ "Stopped API Server" return $ do serverEventStop destroyAllResources serverPGConnPool serverServiceStop
zerobuzz/anchor-token-server
lib/Anchor/Tokens/Server.hs
Haskell
bsd-3-clause
2,912
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeSynonymInstances #-} module ClientProxyApi where import System.Random import Control.Monad.Trans.Except import Control.Monad.Trans.Resource hiding (register) import Control.Monad.IO.Class import Data.Aeson import Data.Aeson.TH import Data.Bson.Generic import GHC.Generics import Network.Wai hiding(Response) import Network.Wai.Handler.Warp import Network.Wai.Logger import Servant import Servant.API import Servant.Client import System.IO import System.Directory import System.Environment (getArgs, getProgName, lookupEnv) import System.Log.Formatter import System.Log.Handler (setFormatter) import System.Log.Handler.Simple import System.Log.Handler.Syslog import System.Log.Logger import Data.Bson.Generic import qualified Data.List as DL import Data.Maybe (catMaybes) import Data.Text (pack, unpack) import Data.Time.Clock (UTCTime, getCurrentTime) import Data.Time.Format (defaultTimeLocale, formatTime) import Control.Monad (when) import Network.HTTP.Client (newManager, defaultManagerSettings) import System.Process import LRUCache as C data File = File { fileName :: FilePath, fileContent :: String } deriving (Eq, Show, Generic) instance ToJSON File instance FromJSON File data Response = Response{ response :: String } deriving (Eq, Show, Generic) instance ToJSON Response instance FromJSON Response data User = User{ uusername :: String, upassword :: String, timeout :: String, token :: String } deriving (Eq, Show, Generic) instance ToJSON User instance FromJSON User instance ToBSON User instance FromBSON User data Signin = Signin{ susername :: String, spassword :: String } deriving (Eq, Show, Generic) instance ToJSON Signin instance FromJSON Signin instance ToBSON Signin instance FromBSON Signin type ApiHandler = ExceptT ServantErr IO serverport :: String serverport = "8080" serverhost :: String serverhost = "localhost" type AuthApi = "signin" :> ReqBody '[JSON] Signin :> Post '[JSON] User :<|> "register" :> ReqBody '[JSON] Signin :> Post '[JSON] Response :<|> "isvalid" :> ReqBody '[JSON] User :> Post '[JSON] Response :<|> "extend" :> ReqBody '[JSON] User :> Post '[JSON] Response authApi :: Proxy AuthApi authApi = Proxy signin :: Signin -> ClientM User register :: Signin -> ClientM Response isvalid :: User -> ClientM Response extend :: User -> ClientM Response signin :<|> register :<|> isvalid :<|> extend = client authApi signinQuery :: Signin -> ClientM User signinQuery signindetails = do signinquery <- signin signindetails return signinquery registerQuery :: Signin -> ClientM Response registerQuery registerdetails = do registerquery <- register registerdetails return registerquery isvalidQuery :: User -> ClientM Response isvalidQuery isvaliddetails = do isvalidquery <- isvalid isvaliddetails return isvalidquery extendQuery :: User -> ClientM Response extendQuery extenddetails = do extendquery <- extend extenddetails return extendquery type DirectoryApi = "open" :> Capture "fileName" String :> Get '[JSON] File :<|> "close" :> ReqBody '[JSON] File :> Post '[JSON] Response :<|> "allfiles" :> Get '[JSON] [String] directoryApi :: Proxy DirectoryApi directoryApi = Proxy open :: String -> ClientM File close :: File -> ClientM Response allfiles :: ClientM [String] open :<|> close :<|> allfiles = client directoryApi openQuery:: String -> ClientM File openQuery filename = do openquery <- open filename return openquery closeQuery:: File -> ClientM Response closeQuery file = do closequery <- close file return closequery type LockingApi = "lock" :> Capture "fileName" String :> Get '[JSON] Bool :<|> "unlock" :> Capture "fileName" String :> Get '[JSON] Bool :<|> "islocked" :> Capture "fileName" String :> Get '[JSON] Bool lockingApi :: Proxy LockingApi lockingApi = Proxy lock :: String -> ClientM Bool unlock :: String -> ClientM Bool islocked :: String -> ClientM Bool lock :<|> unlock :<|> islocked = client lockingApi lockQuery:: String -> ClientM Bool lockQuery fName = do lockquery <- lock fName return lockquery unlockQuery:: String -> ClientM Bool unlockQuery fName = do unlockquery <- unlock fName return unlockquery islockedQuery :: String -> ClientM Bool islockedQuery fName = do islockedquery <- islocked fName return islockedquery mainClient :: IO() mainClient = do createDirectoryIfMissing True ("localstorage/") setCurrentDirectory ("localstorage/") authpart authpart :: IO() authpart = do putStrLn $ "Enter one of the following commands: LOGIN/REGISTER" cmd <- getLine case cmd of "LOGIN" -> authlogin "REGISTER" -> authregister authlogin :: IO () authlogin = do putStrLn $ "Enter your username:" username <- getLine putStrLn $ "Enter your password" password <- getLine let user = (Signin username password) manager <- newManager defaultManagerSettings res <- runClientM (signinQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 "")) case res of Left err -> do putStrLn $ "Error: " ++ show err authpart Right response -> do cache <- C.newHandle 5 mainloop response cache authregister :: IO () authregister = do putStrLn $ "Enter your details to make a new account" putStrLn $ "Enter your username:" username <- getLine putStrLn $ "Enter your password" password <- getLine let user = (Signin username password) manager <- newManager defaultManagerSettings res <- runClientM (registerQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 "")) case res of Left err -> do putStrLn $ "Error: " ++ show err authpart Right response -> authpart mainloop :: User -> (C.Handle String String) -> IO() mainloop user cache = do putStrLn $ "Enter one of the following commands: FILES/UPLOAD/DOWNLOAD/CLOSE" cmd <- getLine case cmd of "FILES" -> displayFiles user cache "UPLOAD" -> uploadFile user cache "DOWNLOAD" -> downloadFile user cache "CLOSE" -> putStrLn $ "Closing service!" _ -> do putStrLn $ "Invalid Command. Try Again" mainloop user cache displayFiles :: User -> (C.Handle String String) -> IO() displayFiles user cache = do putStrLn "Fetching file list. Please wait." isTokenValid user manager <- newManager defaultManagerSettings res <- runClientM allfiles (ClientEnv manager (BaseUrl Http "localhost" 7008 "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right response -> do extendToken user mapM putStrLn response mainloop user cache uploadFile :: User -> (C.Handle String String) -> IO() uploadFile user cache = do putStrLn "Please enter the name of the file to upload" fileName <- getLine let cmd = shell ("vim " ++ fileName) createProcess_ "vim" cmd putStrLn $ "Hit enter when youre finished" enter <- getLine fileContent <- readFile fileName let file = File fileName fileContent response <- putFile file user cache putStrLn $ "Response: " ++ show response mainloop user cache downloadFile :: User -> (C.Handle String String) -> IO() downloadFile user cache = do putStrLn "Please enter the name of the file to download" fileName <- getLine incache <- C.iolookup cache fileName case incache of (Nothing) -> getFile fileName user cache (Just v) -> do putStrLn $ "Cache hit" liftIO (writeFile (fileName) v) let cmd = shell ("vim " ++ fileName) createProcess_ "vim" cmd putStrLn $ "Would you like to re-upload this file? y/n" yesorno <- getLine putStrLn $ "Are you Sure? y/n" sure <- getLine fileContent <- readFile (fileName) case sure of ("y") -> do let file = File fileName fileContent putFile file user cache mainloop user cache (_) -> mainloop user cache mainloop user cache isTokenValid :: User -> IO() isTokenValid user = do manager <- newManager defaultManagerSettings res <- runClientM (isvalidQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right responser -> do case (response responser) of "Token is Valid" -> return() _ -> do putStrLn $ "Session timeout, returning to login menu" authpart extendToken :: User -> IO() extendToken user = do manager <- newManager defaultManagerSettings res <- runClientM (extendQuery user) (ClientEnv manager (BaseUrl Http "localhost" 8082 "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right response -> return() getFile:: String -> User -> (C.Handle String String) -> IO() getFile filename user cache = do isTokenValid user locksuccess <- lockFile filename case locksuccess of True -> do manager <- newManager defaultManagerSettings res <- runClientM (openQuery filename) (ClientEnv manager (BaseUrl Http "localhost" 7008 "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right response -> do extendToken user C.ioinsert cache filename (fileContent response) liftIO (writeFile (fileName response) (fileContent response)) let cmd = shell ("vim " ++ (fileName response)) createProcess_ "vim" cmd putStrLn $ "Would you like to re-upload this file? y/n" yesorno <- getLine putStrLn $ "Please enter your answer again y/n" sure <- getLine case sure of ("y") -> do unlocker <- unlockFile filename fileContent <- readFile (fileName response) let file = File filename fileContent putFile file user cache mainloop user cache (_) -> do unlocker <- unlockFile filename mainloop user cache False -> putStrLn $ "Unable to lock file " ++ filename ++ ". Perhaps another user is using it." putFile:: File -> User-> (C.Handle String String) -> IO () putFile file user cache = do isTokenValid user locksuccess <- lockFile (fileName file) case locksuccess of True -> do manager <- newManager defaultManagerSettings res <- runClientM (closeQuery file) (ClientEnv manager (BaseUrl Http "localhost" 7008 "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right responser -> do extendToken user unlocksuccess <- unlockFile (fileName file) case unlocksuccess of True -> do incache <- C.iolookup cache (fileName file) case incache of (Nothing) -> putStrLn $ (response responser) (Just v) -> C.ioinsert cache (fileName file) (fileContent file) False -> putStrLn $ "Failed to unlock file possible conflict. Try again soon" False -> putStrLn $ "Unable to lock file " ++ (fileName file) ++ ". Perhaps another user is using it." lockFile :: String -> IO Bool lockFile fName = do manager <- newManager defaultManagerSettings res <- runClientM (islockedQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 "")) case res of Left err -> do putStrLn $ "Error: " ++ show err return False Right responser -> do case responser of True -> return False False -> do res <- runClientM (lockQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 "")) case res of Left err ->do putStrLn $ "Error: " ++ show err return False Right response -> return True unlockFile :: String -> IO Bool unlockFile fName = do manager <- newManager defaultManagerSettings res <- runClientM (islockedQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 "")) case res of Left err -> do putStrLn $ "Error: " ++ show err return False Right responser -> do case responser of False -> return False True -> do res <- runClientM (unlockQuery fName) (ClientEnv manager (BaseUrl Http "localhost" 8000 "")) case res of Left err -> do putStrLn $ "Error: " ++ show err return False Right response -> return True
Garygunn94/DFS
ClientProxy/.stack-work/intero/intero7973HSQ.hs
Haskell
bsd-3-clause
14,674
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} module Halytics.Metric.Statistics where import Data.Proxy import GHC.TypeLits import Halytics.Monitor.Tuple import qualified Data.Vector.Unboxed as V import qualified Statistics.Quantile as Stats import qualified Statistics.Sample as Stats newtype StoredStats a = StoredStats a class FromStats a r | a -> r where func :: Proxy a -> V.Vector Double -> r instance Collect (StoredStats a) where type S (StoredStats a) = [Double] collect _ = flip (:) instance Default (StoredStats a) where initial _ = [] instance {-# OVERLAPPABLE #-} (Collect (StoredStats a), FromStats a r) => Resultable (StoredStats a) r where r _ xs = func (Proxy :: Proxy a) (V.fromList xs) -------------------------------------------------------------------------------- -- From Statistics.Sample -------------------------------------------------------------------------------- -- Mean data Mean' type Mean = StoredStats Mean' instance FromStats Mean' Double where func _ = Stats.mean instance Resultable Mean String where r _ xs = "Mean: " ++ show res where res = r (Proxy :: Proxy Mean) xs :: Double data HarmonicMean' type HarmonicMean = StoredStats HarmonicMean' instance FromStats HarmonicMean' Double where func _ = Stats.harmonicMean instance Resultable HarmonicMean String where r _ xs = "Harmonic mean: " ++ show res where res = r (Proxy :: Proxy HarmonicMean) xs :: Double data GeometricMean' type GeometricMean = StoredStats GeometricMean' instance FromStats GeometricMean' Double where func _ = Stats.geometricMean instance Resultable GeometricMean String where r _ xs = "Geometric mean: " ++ show res where res = r (Proxy :: Proxy GeometricMean) xs :: Double -------------------------------------------------------------------------------- -- Central moments data CentralMoment' :: Nat -> * type CentralMoment k = StoredStats (CentralMoment' k) instance (KnownNat k) => FromStats (CentralMoment' k) Double where func _ = Stats.centralMoment k where k = fromInteger $ natVal (Proxy :: Proxy k) instance (KnownNat k) => Resultable (CentralMoment k) String where r _ xs = show k ++ "th central moment: " ++ show res where res = r (Proxy :: Proxy (CentralMoment k)) xs :: Double k = natVal (Proxy :: Proxy k) :: Integer data CentralMoments' :: Nat -> Nat -> * type CentralMoments k j = StoredStats (CentralMoments' k j) instance (KnownNat k, KnownNat j) => FromStats (CentralMoments' k j) (Double, Double) where func _ = Stats.centralMoments k j where k = fromInteger $ natVal (Proxy :: Proxy k) j = fromInteger $ natVal (Proxy :: Proxy j) instance (KnownNat k, KnownNat j) => Resultable (CentralMoments k j) (String, String) where r _ xs = (kStr, jStr) where kStr = show k ++ "th central moment: " ++ show kRes jStr = show j ++ "th central moment: " ++ show jRes kRes = r (Proxy :: Proxy (CentralMoment k)) xs :: Double jRes = r (Proxy :: Proxy (CentralMoment j)) xs :: Double k = fromInteger $ natVal (Proxy :: Proxy k) :: Integer j = fromInteger $ natVal (Proxy :: Proxy j) :: Integer instance (KnownNat k, KnownNat j) => Resultable (CentralMoments k j) String where r _ xs = kStr ++ ", " ++ jStr where (kStr, jStr) = r (Proxy :: Proxy (CentralMoments k j)) xs -------------------------------------------------------------------------------- -- Curvature and all data Skewness' type Skewness = StoredStats Skewness' instance FromStats Skewness' Double where func _ = Stats.skewness instance Resultable Skewness String where r _ xs = "Skewness: " ++ show res where res = r (Proxy :: Proxy Skewness) xs :: Double data Kurtosis' type Kurtosis = StoredStats Kurtosis' instance FromStats Kurtosis' Double where func _ = Stats.kurtosis instance Resultable Kurtosis String where r _ xs = "Kurtosis: " ++ show res where res = r (Proxy :: Proxy Kurtosis) xs :: Double -------------------------------------------------------------------------------- -- Variance and standard dev data Variance' type Variance = StoredStats Variance' instance FromStats Variance' Double where func _ = Stats.variance instance Resultable Variance String where r _ xs = "Variance: " ++ show res where res = r (Proxy :: Proxy Variance) xs :: Double data VarianceUnbiased' type VarianceUnbiased = StoredStats VarianceUnbiased' instance FromStats VarianceUnbiased' Double where func _ = Stats.varianceUnbiased instance Resultable VarianceUnbiased String where r _ xs = "Unbiased variance: " ++ show res where res = r (Proxy :: Proxy VarianceUnbiased) xs :: Double data MeanVariance' type MeanVariance = StoredStats MeanVariance' instance FromStats MeanVariance' (Double, Double) where func _ = Stats.meanVariance instance Resultable MeanVariance (String, String) where r _ xs = (mStr, vStr) where mStr = "Mean: " ++ show m vStr = "Variance: " ++ show v (m, v) = r (Proxy :: Proxy MeanVariance) xs :: (Double, Double) instance Resultable MeanVariance String where r _ xs = mStr ++ ", " ++ vStr where (mStr, vStr) = r (Proxy :: Proxy MeanVariance) xs :: (String, String) data MeanVarianceUnb' type MeanVarianceUnb = StoredStats MeanVarianceUnb' instance FromStats MeanVarianceUnb' (Double, Double) where func _ = Stats.meanVarianceUnb instance Resultable MeanVarianceUnb (String, String) where r _ xs = (mStr, vStr) where mStr = "Unbiased mean: " ++ show m vStr = "Unbiased variance: " ++ show v (m, v) = r (Proxy :: Proxy MeanVarianceUnb) xs :: (Double, Double) instance Resultable MeanVarianceUnb String where r _ xs = mStr ++ ", " ++ vStr where (mStr, vStr) = r (Proxy :: Proxy MeanVarianceUnb) xs :: (String, String) data StdDev' type StdDev = StoredStats StdDev' instance FromStats StdDev' Double where func _ = Stats.stdDev instance Resultable StdDev String where r _ xs = "Standard deviation: " ++ show res where res = r (Proxy :: Proxy StdDev) xs :: Double data FastVariance' type FastVariance = StoredStats FastVariance' instance FromStats FastVariance' Double where func _ = Stats.fastVariance instance Resultable FastVariance String where r _ xs = "Fast variance: " ++ show res where res = r (Proxy :: Proxy FastVariance) xs :: Double data FastVarianceUnbiased' type FastVarianceUnbiased = StoredStats FastVarianceUnbiased' instance FromStats FastVarianceUnbiased' Double where func _ = Stats.fastVarianceUnbiased instance Resultable FastVarianceUnbiased String where r _ xs = "Fast unbiased variance: " ++ show res where res = r (Proxy :: Proxy FastVarianceUnbiased) xs :: Double data FastStdDev' type FastStdDev = StoredStats FastStdDev' instance FromStats FastStdDev' Double where func _ = Stats.fastStdDev instance Resultable FastStdDev String where r _ xs = "Fast standard deviation: " ++ show res where res = r (Proxy :: Proxy FastStdDev) xs :: Double -------------------------------------------------------------------------------- -- From Statistics.Quantile -- Helpers type Quantile k q = WeightedAvg k q type Percentile k = Quantile k 100 type Median = Percentile 50 data WeightedAvg' :: Nat -> Nat -> * type WeightedAvg k q = StoredStats (WeightedAvg' k q) instance (KnownNat k, KnownNat q) => FromStats (WeightedAvg' k q) (Maybe Double) where -- Here we return a 'Maybe' because 'weightedAvg' throws an exception -- on an empty vector func _ v = if V.null v then Nothing else Just $ Stats.weightedAvg k q v where k = fromInteger $ natVal (Proxy :: Proxy k) q = fromInteger $ natVal (Proxy :: Proxy q) instance (KnownNat k, KnownNat q) => Resultable (WeightedAvg k q) String where r _ xs = str (natVal (Proxy :: Proxy k)) (natVal (Proxy :: Proxy q)) where str 50 100 = "Median: " ++ show res str k 100 = show k ++ "th percentile: " ++ show res str k q = "Quantile " ++ show k ++ "/" ++ show q ++ ": " ++ show res res = r (Proxy :: Proxy (WeightedAvg k q)) xs :: Maybe Double -- TODO: Add functions that take a 'ContParam'
nmattia/halytics
src/Halytics/Metric/Statistics.hs
Haskell
bsd-3-clause
8,593
{-# LANGUAGE StandaloneDeriving, FlexibleInstances, BangPatterns #-} {-# LANGUAGE MultiParamTypeClasses, TypeFamilies #-} module Data.Array.Matryoshka.PArray.Unboxed ( U.Unbox, U, fromVector, toVector ) where import Control.DeepSeq import Data.Array.Matryoshka.PArray.Base import qualified Data.Vector.Unboxed as U import qualified Data.Vector.Unboxed.Mutable as UM data U instance U.Unbox a => PArrayIn U a where data PArray U a = PArrayU !(U.Vector a) fromList !xs = PArrayU $! U.fromList xs toMPArray (PArrayU xs) = U.unsafeThaw xs >>= (\ x -> return $! MPArrayU x) toList (PArrayU xs) = U.toList xs length (PArrayU xs) = let len = U.length xs in len `seq` len empty (PArrayU xs) = let x = U.null xs in x `seq` x emptyP = PArrayU U.empty postscanl f ne (PArrayU xs) = PArrayU $! U.postscanl' f ne xs splitAt n (PArrayU xs) = let (l, r) = U.splitAt n xs !res = l `seq` r `seq` (PArrayU l, PArrayU r) in res (++) (PArrayU xs) (PArrayU ys) = PArrayU $! xs U.++ ys -- should most likely be changed to regular index (!) (PArrayU xs) n = U.unsafeIndex xs n generate !n !f = PArrayU $! U.generate n f cons a (PArrayU xs) = PArrayU $! U.cons a xs snoc (PArrayU xs) a = PArrayU $! U.snoc xs a filter !p (PArrayU xs) = PArrayU $! U.filter p xs singleton e = PArrayU $! U.singleton e slice s n (PArrayU xs) = PArrayU $! U.slice s n xs fromVector :: U.Unbox a => U.Vector a -> PArray U a fromVector !a = let x = PArrayU a in x `seq` x toVector :: PArray U a -> U.Vector a toVector (PArrayU a) = a -- mutable parallel array. only used as internal data structure instance U.Unbox a => PArrayOut U a where data MPArray U a = MPArrayU !(UM.IOVector a) unsafeNew !n = UM.unsafeNew n >>= (\ x -> return $! MPArrayU x) unsafeFreeze (MPArrayU a) = U.unsafeFreeze a >>= (\ x -> return $! PArrayU x) unsafeWrite (MPArrayU a) i e = UM.unsafeWrite a i e unsafeRead (MPArrayU a) i = UM.unsafeRead a i toPArray = unsafeFreeze emptyMP = unsafeNew 0 unsafeSlice s e (MPArrayU xs) = MPArrayU $! UM.unsafeSlice s e xs deriving instance (Show a, U.Unbox a) => Show (PArray U a) instance NFData xs => NFData (PArray U xs) where rnf (PArrayU xs) = rnf xs instance NFData xs => NFData (MPArray U xs) where rnf (MPArrayU xs) = rnf xs
agremm/Matryoshka
Data/Array/Matryoshka/PArray/Unboxed.hs
Haskell
bsd-3-clause
2,368
-- | for developing only -- module Paths_siberia where getDataFileName ::FilePath -> IO FilePath getDataFileName = return getDataDir::IO FilePath getDataDir = return "."
chemist/siberia
tmp/Paths_siberia.hs
Haskell
bsd-3-clause
174
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- | A preliminary renderer that produces `ReactJS` components when run using -- GHCJS. -- module Text.Blaze.Renderer.ReactJS ( ReactJSNode , renderHtml ) where import Control.Applicative import Control.Monad import Control.Monad.Trans (lift) import Control.Monad.Trans.Either ( runEitherT, EitherT(..), left) import qualified Data.ByteString.Char8 as SBC import qualified Data.HashMap.Strict as HMS import Data.List (isInfixOf) import Data.Monoid ((<>)) import qualified Data.Text as T import qualified Data.ByteString as S import qualified GHCJS.Foreign as Foreign import GHCJS.Marshal as Marshal import GHCJS.Types (JSString, JSRef, JSArray, JSObject, castRef) import Prelude hiding (span) import Text.Blaze.Internal import Text.Blaze.Event.Internal import Text.Blaze.Event.Charcode (unCharcode) import Text.Blaze.Event.Keycode (unKeycode) ------------------------------------------------------------------------------ -- FFI to ReactJS ------------------------------------------------------------------------------ data ReactJSEvent_ type ReactJSEvent = JSRef ReactJSEvent_ data ReactJSNode_ type ReactJSNode = JSRef ReactJSNode_ type ReactJSNodes = JSArray ReactJSNode foreign import javascript unsafe "h$reactjs.mkDomNode($1, $2, $3)" mkReactJSParent :: JSString -> JSObject JSString -> ReactJSNodes -> IO ReactJSNode foreign import javascript unsafe "h$reactjs.mkDomNode($1, $2, [])" mkReactJSLeaf :: JSString -> JSObject JSString -> IO ReactJSNode foreign import javascript unsafe "$1.preventDefault()" preventDefault :: ReactJSEvent -> IO () foreign import javascript unsafe "$1.stopPropagation()" stopPropagation :: ReactJSEvent -> IO () ------------------------------------------------------------------------------ -- Rendering ------------------------------------------------------------------------------ -- TODO (SM): find a better representation for the rendering of Strings. -- Probably a DList T.Text with a following concat. -- | Render a 'ChoiceString'. -- fromChoiceString :: ChoiceString -- ^ String to render -> String -- ^ String to append -> String -- ^ Resulting string fromChoiceString (Static s) = getString s fromChoiceString (String s) = (s ++) fromChoiceString (Text s) = (T.unpack s ++) fromChoiceString (ByteString s) = (SBC.unpack s ++) fromChoiceString (PreEscaped x) = -- FiXME (SM): here we actually need to unescape! case x of String s -> (s ++) Text s -> (\k -> T.foldr (:) k s) s -> fromChoiceString s fromChoiceString (External x) = case x of -- Check that the sequence "</" is *not* in the external data. String s -> if "</" `isInfixOf` s then id else (s ++) Text s -> if "</" `T.isInfixOf` s then id else (\k -> T.foldr (:) k s) ByteString s -> if "</" `S.isInfixOf` s then id else (SBC.unpack s ++) s -> fromChoiceString s fromChoiceString (AppendChoiceString x y) = fromChoiceString x . fromChoiceString y fromChoiceString EmptyChoiceString = id -- | Render some 'Markup' to a virtual dom. -- -- This function is morally pure. -- render :: forall act. Show act => (act -> Bool -> IO ()) -- ^ Callback for actions raised by event handlers. -> Markup act -> IO ReactJSNodes render handleAct0 markup = do children <- Foreign.newArray go handleAct0 (\_props -> return ()) children markup return children where go :: forall act' b. (act' -> Bool -> IO ()) -> (JSObject JSString -> IO ()) -> (JSArray ReactJSNode) -> MarkupM act' b -> IO () go handleAct setProps children html0 = case html0 of MapActions f h -> go (handleAct . f) setProps children h OnEvent handler h -> do let setProps' props = do registerEventHandler (handleAct <$> handler) props setProps props go handleAct setProps' children h Parent tag _open _close h -> tagToVNode (staticStringToJs tag) h CustomParent tag h -> tagToVNode (choiceStringToJs tag) h Leaf tag _begin _end -> leafToVNode (staticStringToJs tag) CustomLeaf tag _close -> leafToVNode (choiceStringToJs tag) Content content -> textToVNode (choiceStringToJs content) AddAttribute key _preparedKey value h -> do setProperty (staticStringToJs key) (choiceStringToJs value) h AddBoolAttribute key value h -> do setProperty (staticStringToJs key) (Foreign.toJSBool value) h -- FIXME (SM): This is not going to work in all cases, as 'attributes' -- must be set differently from properties. AddCustomAttribute key value h -> setProperty (choiceStringToJs key) (choiceStringToJs value) h AddObjectAttribute key object h -> do jsObj <- toJSRef_hashMap object setProperty (staticStringToJs key) jsObj h Empty -> return () Append h1 h2 -> do go handleAct setProps children h1 go handleAct setProps children h2 where choiceStringToJs cs = Foreign.toJSString (fromChoiceString cs "") staticStringToJs ss = Foreign.toJSString (getText ss) -- setProperty :: JSString -> JSRef a -> MarkupM (EventHandler act') b -> IO () setProperty key value content = go handleAct setProps' children content where setProps' props = Foreign.setProp key value props >> setProps props makePropertiesObject = do props <- Foreign.newObj setProps props return props tagToVNode tag content = do props <- makePropertiesObject innerChildren <- Foreign.newArray go handleAct (\_props -> return ()) innerChildren content node <- mkReactJSParent tag props innerChildren Foreign.pushArray node children leafToVNode tag = do props <- makePropertiesObject node <- mkReactJSLeaf tag props Foreign.pushArray node children textToVNode :: JSString -> IO () textToVNode jsText = Foreign.pushArray jsText children -- TODO (asayers): Something like this should probably be added to GHCJS.Marshall: -- toJSRef_hashMap :: (IsString a, ToJSRef b) -- => HMS.HashMap a b -- -> IO (JSRef (HMS.HashMap a b)) toJSRef_hashMap :: HMS.HashMap T.Text T.Text -> IO (JSRef (HMS.HashMap T.Text T.Text)) toJSRef_hashMap hashmap = fmap castRef $ do obj <- Foreign.newObj let addProp k v = Foreign.setProp k (Foreign.toJSString v) obj void $ HMS.traverseWithKey addProp hashmap return obj renderHtml :: Show act => (act -> Bool -> IO ()) -> Markup act -> IO (ReactJSNode) renderHtml handleAction html = do children <- render handleAction html props <- Foreign.newObj mkReactJSParent "div" props children ------------------------------------------------------------------------------ -- Event handler callback construction ------------------------------------------------------------------------------ -- | ReactJS defines the following event types, as of v0.12: data ReactJSEventType -- Clipboard Events = OnCopyE | OnCutE | OnPasteE -- Keyboard Events | OnKeyDownE | OnKeyPressE | OnKeyUpE -- Focus Events | OnFocusE | OnBlurE -- Form Events | OnChangeE | OnInputE | OnSubmitE -- Mouse Events | OnClickE | OnDoubleClickE | OnDragE | OnDragEndE | OnDragEnterE | OnDragExitE | OnDragLeaveE | OnDragOverE | OnDragStartE | OnDropE | OnMouseDownE | OnMouseEnterE | OnMouseLeaveE | OnMouseMoveE | OnMouseOutE | OnMouseOverE | OnMouseUpE -- Touch Events | OnTouchCancelE | OnTouchEndE | OnTouchMoveE | OnTouchStartE -- UI Events | OnScrollE -- Wheel Events | OnWheelE reactEventName :: ReactJSEventType -> JSString reactEventName ev = case ev of OnCopyE -> "onCopy" OnCutE -> "onCut" OnPasteE -> "onPaste" OnKeyDownE -> "onKeyDown" OnKeyPressE -> "onKeyPress" OnKeyUpE -> "onKeyUp" OnFocusE -> "onFocus" OnBlurE -> "onBlur" OnChangeE -> "onChange" OnInputE -> "onInput" OnSubmitE -> "onSubmit" OnClickE -> "onClick" OnDoubleClickE -> "onDoubleClick" OnDragE -> "onDrag" OnDragEndE -> "onDragEnd" OnDragEnterE -> "onDragEnter" OnDragExitE -> "onDragExit" OnDragLeaveE -> "onDragLeave" OnDragOverE -> "onDragOver" OnDragStartE -> "onDragStart" OnDropE -> "onDrop" OnMouseDownE -> "onMouseDown" OnMouseEnterE -> "onMouseEnter" OnMouseLeaveE -> "onMouseLeave" OnMouseMoveE -> "onMouseMove" OnMouseOutE -> "onMouseOut" OnMouseOverE -> "onMouseOver" OnMouseUpE -> "onMouseUp" OnTouchCancelE -> "onTouchCancel" OnTouchEndE -> "onTouchEnd" OnTouchMoveE -> "onTouchMove" OnTouchStartE -> "onTouchStart" OnScrollE -> "onScroll" OnWheelE -> "onWheel" lookupProp :: JSString -> JSRef a -> EitherT T.Text IO (JSRef b) lookupProp name obj = do mbProp <- lift $ Foreign.getPropMaybe name obj maybe (left err) return mbProp where err = "failed to get property '" <> Foreign.fromJSString name <> "'." lookupIntProp :: JSString -> JSRef a -> EitherT T.Text IO Int lookupIntProp name obj = do ref <- lookupProp name obj mbInt <- lift $ Marshal.fromJSRef ref case mbInt of Nothing -> left "lookupIntProp: couldn't parse field as Int" Just x -> return x lookupDoubleProp :: JSString -> JSRef a -> EitherT T.Text IO Double lookupDoubleProp name obj = do ref <- lookupProp name obj mbDouble <- lift $ Marshal.fromJSRef ref case mbDouble of Nothing -> left "lookupDoubleProp: couldn't parse field as Double" Just x -> return x data Handler = IgnoreEvent | HandleEvent (IO (Bool -> IO ())) -- ^ Contains an IO action which generates the callback to attach to the event registerEventHandler :: EventHandler (Bool -> IO ()) -> JSObject JSString -- ^ Properties to register the event handler in -> IO () registerEventHandler eh props = case eh of OnKeyDown keys mkAct -> register True OnKeyDownE $ \eventRef -> handleKeyEvent eventRef keys mkAct OnKeyUp keys mkAct -> register True OnKeyUpE $ \eventRef -> handleKeyEvent eventRef keys mkAct OnKeyPress chars mkAct -> register True OnKeyPressE $ \eventRef -> handleCharEvent eventRef chars mkAct OnFocus mkAct -> register False OnFocusE $ \_eventRef -> return $ Right $ HandleEvent mkAct OnBlur mkAct -> register False OnBlurE $ \_eventRef -> return $ Right $ HandleEvent mkAct OnValueChange mkAct -> register True OnChangeE $ \eventRef -> runEitherT $ do valueRef <- lookupProp "value" =<< lookupProp "target" eventRef return $ HandleEvent $ mkAct $ Foreign.fromJSString valueRef OnCheckedChange mkAct -> register False OnChangeE $ \eventRef -> runEitherT $ do valueRef <- lookupProp "checked" =<< lookupProp "target" eventRef return $ HandleEvent $ mkAct $ Foreign.fromJSBool valueRef OnSubmit mkAct -> register True OnSubmitE $ \_eventRef -> return $ Right $ HandleEvent mkAct OnClick btns mkAct -> register False OnClickE $ \eventRef -> handleMouseEvent eventRef btns mkAct OnDoubleClick btns mkAct -> register False OnDoubleClickE $ \eventRef -> handleMouseEvent eventRef btns mkAct OnMouseDown btns mkAct -> register False OnMouseDownE $ \eventRef -> handleMouseEvent eventRef btns mkAct OnMouseUp btns mkAct -> register False OnMouseUpE $ \eventRef -> handleMouseEvent eventRef btns mkAct OnMouseMove mkAct -> register False OnMouseMoveE $ \eventRef -> runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef OnMouseEnter mkAct -> register False OnMouseEnterE $ \eventRef -> runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef OnMouseLeave mkAct -> register False OnMouseLeaveE $ \eventRef -> runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef OnMouseOver mkAct -> register False OnMouseOverE $ \eventRef -> runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef OnMouseOut mkAct -> register False OnMouseOutE $ \eventRef -> runEitherT $ HandleEvent . mkAct <$> getMousePosition eventRef OnScroll mkAct -> register False OnScrollE $ \eventRef -> runEitherT $ do scrollTop <- lookupIntProp "scrollTop" =<<lookupProp "target" eventRef return $ HandleEvent $ mkAct scrollTop OnWheel mkAct -> register False OnWheelE $ \eventRef -> runEitherT $ do dx <- lookupDoubleProp "deltaX" eventRef dy <- lookupDoubleProp "deltaY" eventRef dz <- lookupDoubleProp "deltaZ" eventRef let deltaValue = DeltaValue dx dy dz deltaMode <- lookupIntProp "deltaMode" eventRef domDelta <- case deltaMode of 0 -> return $ PixelDelta deltaValue 1 -> return $ LineDelta deltaValue 2 -> return $ PageDelta deltaValue _ -> left "registerEventHandler: unrecognized delta mode" return $ HandleEvent $ mkAct domDelta where handleKeyEvent eventRef keys mkAct = runEitherT $ do keycode <- lookupIntProp "keyCode" eventRef <|> lookupIntProp "which" eventRef if keycode `elem` map unKeycode keys then return $ HandleEvent mkAct else return $ IgnoreEvent handleCharEvent eventRef chars mkAct = runEitherT $ do charcode <- lookupIntProp "charCode" eventRef <|> lookupIntProp "which" eventRef if charcode `elem` map unCharcode chars then return $ HandleEvent mkAct else return $ IgnoreEvent handleMouseEvent :: ReactJSEvent -> [MouseButton] -> (MousePosition -> IO (Bool -> IO ())) -> IO (Either T.Text Handler) handleMouseEvent eventRef btns mkAct = runEitherT $ do button <- getMouseButton eventRef if button `elem` btns then HandleEvent . mkAct <$> getMousePosition eventRef else return IgnoreEvent getMouseButton :: ReactJSEvent -> EitherT T.Text IO MouseButton getMouseButton eventRef = do button <- lookupIntProp "button" eventRef case button of 0 -> return LeftButton 1 -> return MiddleButton 2 -> return RightButton _ -> left "getMouseButton: couldn't parse button code" getMousePosition :: ReactJSEvent -> EitherT T.Text IO MousePosition getMousePosition eventRef = do clientX <- lookupIntProp "clientX" eventRef clientY <- lookupIntProp "clientY" eventRef pageX <- lookupIntProp "pageX" eventRef pageY <- lookupIntProp "pageY" eventRef screenX <- lookupIntProp "screenX" eventRef screenY <- lookupIntProp "screenY" eventRef return MousePosition { mpClientX = clientX , mpClientY = clientY , mpPageX = pageX , mpPageY = pageY , mpScreenX = screenX , mpScreenY = screenY } register :: Bool -> ReactJSEventType -> (ReactJSEvent -> IO (Either T.Text Handler)) -- ^ Callback to actually handle the event. -> IO () register requireSyncRedraw reactEvent extractHandler = do -- FIXME (SM): memory leak to to AlwaysRetain. Need to hook-up ReactJS -- event handler table with GHCJS GC. cb <- Foreign.syncCallback1 Foreign.AlwaysRetain False $ \eventRef -> do -- try to extract handler errOrHandler <- extractHandler eventRef case errOrHandler of Left err -> do -- prevent default action and cancel propagation preventDefault eventRef stopPropagation eventRef -- print the error let eventName = Foreign.fromJSString $ reactEventName reactEvent eventType <- either (const "Unknown type") Foreign.fromJSString <$> runEitherT (lookupProp "type" eventRef) putStrLn $ unlines [ "blaze-react - event handling error: " ++ T.unpack err , "Event was " ++ eventName ++ " of type " ++ eventType ] Right IgnoreEvent -> return () Right (HandleEvent mkHandler) -> do -- prevent default action and cancel propagation preventDefault eventRef stopPropagation eventRef -- run the handler. This triggers a redraw. handler <- mkHandler handler requireSyncRedraw Foreign.setProp (reactEventName reactEvent) cb props
meiersi/blaze-react
src/Text/Blaze/Renderer/ReactJS.hs
Haskell
mit
17,708
-- Copyright 2016 TensorFlow authors. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. -- | Conduit wrappers for TensorFlow.Records. {-# LANGUAGE Rank2Types #-} module TensorFlow.Records.Conduit ( -- * Encode/Decode encodeTFRecords , decodeTFRecords -- * Source/Sink , sinkTFRecords , sourceTFRecords ) where import Control.Monad.Catch (MonadThrow) import Control.Monad.Trans.Resource (MonadResource) import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import Data.Conduit ((=$=), Conduit, Consumer, Producer) import Data.Conduit.Binary (sinkFile, sourceFile) import Data.Conduit.Cereal (conduitGet2, conduitPut) import TensorFlow.Records (getTFRecord, putTFRecord) -- | Decode TFRecords from a stream of bytes. decodeTFRecords :: MonadThrow m => Conduit B.ByteString m BL.ByteString decodeTFRecords = conduitGet2 getTFRecord -- | Read TFRecords from a file. sourceTFRecords :: (MonadResource m, MonadThrow m) => FilePath -> Producer m BL.ByteString sourceTFRecords path = sourceFile path =$= decodeTFRecords -- | Encode TFRecords to a stream of bytes. encodeTFRecords :: Monad m => Conduit BL.ByteString m B.ByteString encodeTFRecords = conduitPut putTFRecord -- | Write TFRecords to a file. sinkTFRecords :: (MonadResource m) => FilePath -> Consumer BL.ByteString m () sinkTFRecords path = encodeTFRecords =$= sinkFile path
judah/tensorflow-haskell
tensorflow-records-conduit/src/TensorFlow/Records/Conduit.hs
Haskell
apache-2.0
1,892
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-unused-binds #-} {-# LANGUAGE StandaloneDeriving, FlexibleContexts, DeriveDataTypeable , UndecidableInstances, FlexibleInstances, MultiParamTypeClasses , PatternGuards, Rank2Types, TypeSynonymInstances #-} ----------------------------------------------------------------------------- -- | -- Module : XMonad.Layout.Groups -- Copyright : Quentin Moser <moserq@gmail.com> -- License : BSD-style (see LICENSE) -- -- Maintainer : orphaned -- Stability : unstable -- Portability : unportable -- -- Two-level layout with windows split in individual layout groups, -- themselves managed by a user-provided layout. -- ----------------------------------------------------------------------------- module XMonad.Layout.Groups ( -- * Usage -- $usage -- * Creation group -- * Messages , GroupsMessage(..) , ModifySpec -- ** Useful 'ModifySpec's , swapUp , swapDown , swapMaster , focusUp , focusDown , focusMaster , swapGroupUp , swapGroupDown , swapGroupMaster , focusGroupUp , focusGroupDown , focusGroupMaster , moveToGroupUp , moveToGroupDown , moveToNewGroupUp , moveToNewGroupDown , splitGroup -- * Types , Groups , Group(..) , onZipper , onLayout , WithID , sameID ) where import XMonad import qualified XMonad.StackSet as W import XMonad.Util.Stack import Data.Maybe (isJust, isNothing, fromMaybe, catMaybes, fromJust) import Data.List ((\\)) import Control.Arrow ((>>>)) import Control.Applicative ((<$>)) import Control.Monad (forM) -- $usage -- This module provides a layout combinator that allows you -- to manage your windows in independent groups. You can provide -- both the layout with which to arrange the windows inside each -- group, and the layout with which the groups themselves will -- be arranged on the screen. -- -- The "XMonad.Layout.Groups.Examples" and "XMonad.Layout.Groups.Wmii" -- modules contain examples of layouts that can be defined with this -- combinator. They're also the recommended starting point -- if you are a beginner and looking for something you can use easily. -- -- One thing to note is that 'Groups'-based layout have their own -- notion of the order of windows, which is completely separate -- from XMonad's. For this reason, operations like 'XMonad.StackSet.SwapUp' -- will have no visible effect, and those like 'XMonad.StackSet.focusUp' -- will focus the windows in an unpredictable order. For a better way of -- rearranging windows and moving focus in such a layout, see the -- example 'ModifySpec's (to be passed to the 'Modify' message) provided -- by this module. -- -- If you use both 'Groups'-based and other layouts, The "XMonad.Layout.Groups.Helpers" -- module provides actions that can work correctly with both, defined using -- functions from "XMonad.Actions.MessageFeedback". -- | Create a 'Groups' layout. -- -- Note that the second parameter (the layout for arranging the -- groups) is not used on 'Windows', but on 'Group's. For this -- reason, you can only use layouts that don't specifically -- need to manage 'Window's. This is obvious, when you think -- about it. group :: l Window -> l2 (Group l Window) -> Groups l l2 Window group l l2 = Groups l l2 startingGroups (U 1 0) where startingGroups = fromJust $ singletonZ $ G (ID (U 0 0) l) emptyZ -- * Stuff with unique keys data Uniq = U Integer Integer deriving (Eq, Show, Read) -- | From a seed, generate an infinite list of keys and a new -- seed. All keys generated with this method will be different -- provided you don't use 'gen' again with a key from the list. -- (if you need to do that, see 'split' instead) gen :: Uniq -> (Uniq, [Uniq]) gen (U i1 i2) = (U (i1+1) i2, zipWith U (repeat i1) [i2..]) -- | Split an infinite list into two. I ended up not -- needing this, but let's keep it just in case. -- split :: [a] -> ([a], [a]) -- split as = snd $ foldr step (True, ([], [])) as -- where step a (True, (as1, as2)) = (False, (a:as1, as2)) -- step a (False, (as1, as2)) = (True, (as1, a:as2)) -- | Add a unique identity to a layout so we can -- follow it around. data WithID l a = ID { getID :: Uniq , unID :: (l a)} deriving (Show, Read) -- | Compare the ids of two 'WithID' values sameID :: WithID l a -> WithID l a -> Bool sameID (ID id1 _) (ID id2 _) = id1 == id2 instance Eq (WithID l a) where ID id1 _ == ID id2 _ = id1 == id2 instance LayoutClass l a => LayoutClass (WithID l) a where runLayout ws@W.Workspace { W.layout = ID id l } r = do (placements, ml') <- flip runLayout r ws { W.layout = l} return (placements, ID id <$> ml') handleMessage (ID id l) sm = do ml' <- handleMessage l sm return $ ID id <$> ml' description (ID _ l) = description l -- * The 'Groups' layout -- ** Datatypes -- | A group of windows and its layout algorithm. data Group l a = G { gLayout :: WithID l a , gZipper :: Zipper a } deriving (Show, Read, Eq) onLayout :: (WithID l a -> WithID l a) -> Group l a -> Group l a onLayout f g = g { gLayout = f $ gLayout g } onZipper :: (Zipper a -> Zipper a) -> Group l a -> Group l a onZipper f g = g { gZipper = f $ gZipper g } -- | The type of our layouts. data Groups l l2 a = Groups { -- | The starting layout for new groups baseLayout :: l a -- | The layout for placing each group on the screen , partitioner :: l2 (Group l a) -- | The window groups , groups :: W.Stack (Group l a) -- | A seed for generating unique ids , seed :: Uniq } deriving instance (Show a, Show (l a), Show (l2 (Group l a))) => Show (Groups l l2 a) deriving instance (Read a, Read (l a), Read (l2 (Group l a))) => Read (Groups l l2 a) -- | Messages accepted by 'Groups'-based layouts. -- All other messages are forwarded to the layout of the currently -- focused subgroup (as if they had been wrapped in 'ToFocused'). data GroupsMessage = ToEnclosing SomeMessage -- ^ Send a message to the enclosing layout -- (the one that places the groups themselves) | ToGroup Int SomeMessage -- ^ Send a message to the layout for nth group -- (starting at 0) | ToFocused SomeMessage -- ^ Send a message to the layout for the focused -- group | ToAll SomeMessage -- ^ Send a message to all the sub-layouts | Refocus -- ^ Refocus the window which should be focused according -- to the layout. | Modify ModifySpec -- ^ Modify the ordering\/grouping\/focusing -- of windows according to a 'ModifySpec' deriving Typeable instance Show GroupsMessage where show (ToEnclosing _) = "ToEnclosing {...}" show (ToGroup i _) = "ToGroup "++show i++" {...}" show (ToFocused _) = "ToFocused {...}" show (ToAll _) = "ToAll {...}" show Refocus = "Refocus" show (Modify _) = "Modify {...}" instance Message GroupsMessage modifyGroups :: (Zipper (Group l a) -> Zipper (Group l a)) -> Groups l l2 a -> Groups l l2 a modifyGroups f g = let (seed', id:_) = gen (seed g) defaultGroups = fromJust $ singletonZ $ G (ID id $ baseLayout g) emptyZ in g { groups = fromMaybe defaultGroups . f . Just $ groups g , seed = seed' } -- ** Readaptation -- | Adapt our groups to a new stack. -- This algorithm handles window additions and deletions correctly, -- ignores changes in window ordering, and tries to react to any -- other stack changes as gracefully as possible. readapt :: Eq a => Zipper a -> Groups l l2 a -> Groups l l2 a readapt z g = let mf = getFocusZ z (seed', id:_) = gen $ seed g g' = g { seed = seed' } in flip modifyGroups g' $ mapZ_ (onZipper $ removeDeleted z) >>> filterKeepLast (isJust . gZipper) >>> findNewWindows (W.integrate' z) >>> addWindows (ID id $ baseLayout g) >>> focusGroup mf >>> onFocusedZ (onZipper $ focusWindow mf) where filterKeepLast _ Nothing = Nothing filterKeepLast f z@(Just s) = maybe (singletonZ $ W.focus s) Just $ filterZ_ f z -- | Remove the windows from a group which are no longer present in -- the stack. removeDeleted :: Eq a => Zipper a -> Zipper a -> Zipper a removeDeleted z = filterZ_ (flip elemZ z) -- | Identify the windows not already in a group. findNewWindows :: Eq a => [a] -> Zipper (Group l a) -> (Zipper (Group l a), [a]) findNewWindows as gs = (gs, foldrZ_ removePresent as gs) where removePresent g as' = filter (not . flip elemZ (gZipper g)) as' -- | Add windows to the focused group. If you need to create one, -- use the given layout and an id from the given list. addWindows :: WithID l a -> (Zipper (Group l a), [a]) -> Zipper (Group l a) addWindows l (Nothing, as) = singletonZ $ G l (W.differentiate as) addWindows _ (z, as) = onFocusedZ (onZipper add) z where add z = foldl (flip insertUpZ) z as -- | Focus the group containing the given window focusGroup :: Eq a => Maybe a -> Zipper (Group l a) -> Zipper (Group l a) focusGroup Nothing = id focusGroup (Just a) = fromTags . map (tagBy $ elemZ a . gZipper) . W.integrate' -- | Focus the given window focusWindow :: Eq a => Maybe a -> Zipper a -> Zipper a focusWindow Nothing = id focusWindow (Just a) = fromTags . map (tagBy (==a)) . W.integrate' -- * Interface -- ** Layout instance instance (LayoutClass l Window, LayoutClass l2 (Group l Window)) => LayoutClass (Groups l l2) Window where description (Groups _ p gs _) = s1++" by "++s2 where s1 = description $ gLayout $ W.focus gs s2 = description p runLayout ws@(W.Workspace _ _l z) r = let l = readapt z _l in do (areas, mpart') <- runLayout ws { W.layout = partitioner l , W.stack = Just $ groups l } r results <- forM areas $ \(g, r') -> runLayout ws { W.layout = gLayout g , W.stack = gZipper g } r' let hidden = map gLayout (W.integrate $ groups _l) \\ map (gLayout . fst) areas hidden' <- mapM (flip handleMessage $ SomeMessage Hide) hidden let placements = concatMap fst results newL = justMakeNew l mpart' (map snd results ++ hidden') return $ (placements, newL) handleMessage l@(Groups _ p _ _) sm | Just (ToEnclosing sm') <- fromMessage sm = do mp' <- handleMessage p sm' return $ maybeMakeNew l mp' [] handleMessage l@(Groups _ p gs _) sm | Just (ToAll sm') <- fromMessage sm = do mp' <- handleMessage p sm' mg's <- mapZM_ (handle sm') $ Just gs return $ maybeMakeNew l mp' $ W.integrate' mg's where handle sm (G l _) = handleMessage l sm handleMessage l sm | Just a <- fromMessage sm = let _rightType = a == Hide -- Is there a better-looking way -- of doing this? in handleMessage l $ SomeMessage $ ToAll sm handleMessage l@(Groups _ _ z _) sm = case fromMessage sm of Just (ToFocused sm') -> do mg's <- W.integrate' <$> handleOnFocused sm' z return $ maybeMakeNew l Nothing mg's Just (ToGroup i sm') -> do mg's <- handleOnIndex i sm' z return $ maybeMakeNew l Nothing mg's Just (Modify spec) -> case applySpec spec l of Just l' -> refocus l' >> return (Just l') Nothing -> return $ Just l Just Refocus -> refocus l >> return (Just l) Just _ -> return Nothing Nothing -> handleMessage l $ SomeMessage (ToFocused sm) where handleOnFocused sm z = mapZM step $ Just z where step True (G l _) = handleMessage l sm step False _ = return Nothing handleOnIndex i sm z = mapM step $ zip [0..] $ W.integrate z where step (j, (G l _)) | i == j = handleMessage l sm step _ = return Nothing justMakeNew :: Groups l l2 a -> Maybe (l2 (Group l a)) -> [Maybe (WithID l a)] -> Maybe (Groups l l2 a) justMakeNew g mpart' ml's = Just g { partitioner = fromMaybe (partitioner g) mpart' , groups = combine (groups g) ml's } where combine z ml's = let table = map (\(ID id a) -> (id, a)) $ catMaybes ml's in flip mapS_ z $ \(G (ID id l) ws) -> case lookup id table of Nothing -> G (ID id l) ws Just l' -> G (ID id l') ws mapS_ f = fromJust . mapZ_ f . Just maybeMakeNew :: Groups l l2 a -> Maybe (l2 (Group l a)) -> [Maybe (WithID l a)] -> Maybe (Groups l l2 a) maybeMakeNew _ Nothing ml's | all isNothing ml's = Nothing maybeMakeNew g mpart' ml's = justMakeNew g mpart' ml's refocus :: Groups l l2 Window -> X () refocus g = case getFocusZ $ gZipper $ W.focus $ groups g of Just w -> focus w Nothing -> return () -- ** ModifySpec type -- | Type of functions describing modifications to a 'Groups' layout. They -- are transformations on 'Zipper's of groups. -- -- Things you shouldn't do: -- -- * Forge new windows (they will be ignored) -- -- * Duplicate windows (whatever happens is your problem) -- -- * Remove windows (they will be added again) -- -- * Duplicate layouts (only one will be kept, the rest will -- get the base layout) -- -- Note that 'ModifySpec' is a rank-2 type (indicating that 'ModifySpec's must -- be polymorphic in the layout type), so if you define functions taking -- 'ModifySpec's as arguments, or returning them, you'll need to write a type -- signature and add @{-# LANGUAGE Rank2Types #-}@ at the beginning type ModifySpec = forall l. WithID l Window -> Zipper (Group l Window) -> Zipper (Group l Window) -- | Apply a ModifySpec. applySpec :: ModifySpec -> Groups l l2 Window -> Maybe (Groups l l2 Window) applySpec f g = let (seed', id:ids) = gen $ seed g g' = flip modifyGroups g $ f (ID id $ baseLayout g) >>> toTags >>> foldr reID ((ids, []), []) >>> snd >>> fromTags in case groups g == groups g' of True -> Nothing False -> Just g' { seed = seed' } where reID eg ((id:ids, seen), egs) = let myID = getID $ gLayout $ fromE eg in case elem myID seen of False -> ((id:ids, myID:seen), eg:egs) True -> ((ids, seen), mapE_ (setID id) eg:egs) where setID id (G (ID _ _) z) = G (ID id $ baseLayout g) z reID _ (([], _), _) = undefined -- The list of ids is infinite -- ** Misc. ModifySpecs -- | helper onFocused :: (Zipper Window -> Zipper Window) -> ModifySpec onFocused f _ gs = onFocusedZ (onZipper f) gs -- | Swap the focused window with the previous one. swapUp :: ModifySpec swapUp = onFocused swapUpZ -- | Swap the focused window with the next one. swapDown :: ModifySpec swapDown = onFocused swapDownZ -- | Swap the focused window with the (group's) master -- window. swapMaster :: ModifySpec swapMaster = onFocused swapMasterZ -- | Swap the focused group with the previous one. swapGroupUp :: ModifySpec swapGroupUp _ = swapUpZ -- | Swap the focused group with the next one. swapGroupDown :: ModifySpec swapGroupDown _ = swapDownZ -- | Swap the focused group with the master group. swapGroupMaster :: ModifySpec swapGroupMaster _ = swapMasterZ -- | Move focus to the previous window in the group. focusUp :: ModifySpec focusUp = onFocused focusUpZ -- | Move focus to the next window in the group. focusDown :: ModifySpec focusDown = onFocused focusDownZ -- | Move focus to the group's master window. focusMaster :: ModifySpec focusMaster = onFocused focusMasterZ -- | Move focus to the previous group. focusGroupUp :: ModifySpec focusGroupUp _ = focusUpZ -- | Move focus to the next group. focusGroupDown :: ModifySpec focusGroupDown _ = focusDownZ -- | Move focus to the master group. focusGroupMaster :: ModifySpec focusGroupMaster _ = focusMasterZ -- | helper _removeFocused :: W.Stack a -> (a, Zipper a) _removeFocused (W.Stack f (u:up) down) = (f, Just $ W.Stack u up down) _removeFocused (W.Stack f [] (d:down)) = (f, Just $ W.Stack d [] down) _removeFocused (W.Stack f [] []) = (f, Nothing) -- helper _moveToNewGroup :: WithID l Window -> W.Stack (Group l Window) -> (Group l Window -> Zipper (Group l Window) -> Zipper (Group l Window)) -> Zipper (Group l Window) _moveToNewGroup l0 s insertX | G l (Just f) <- W.focus s = let (w, f') = _removeFocused f s' = s { W.focus = G l f' } in insertX (G l0 $ singletonZ w) $ Just s' _moveToNewGroup _ s _ = Just s -- | Move the focused window to a new group before the current one. moveToNewGroupUp :: ModifySpec moveToNewGroupUp _ Nothing = Nothing moveToNewGroupUp l0 (Just s) = _moveToNewGroup l0 s insertUpZ -- | Move the focused window to a new group after the current one. moveToNewGroupDown :: ModifySpec moveToNewGroupDown _ Nothing = Nothing moveToNewGroupDown l0 (Just s) = _moveToNewGroup l0 s insertDownZ -- | Move the focused window to the previous group. -- If 'True', when in the first group, wrap around to the last one. -- If 'False', create a new group before it. moveToGroupUp :: Bool -> ModifySpec moveToGroupUp _ _ Nothing = Nothing moveToGroupUp False l0 (Just s) = if null (W.up s) then moveToNewGroupUp l0 (Just s) else moveToGroupUp True l0 (Just s) moveToGroupUp True _ (Just s@(W.Stack _ [] [])) = Just s moveToGroupUp True _ (Just s@(W.Stack (G l (Just f)) _ _)) = let (w, f') = _removeFocused f in onFocusedZ (onZipper $ insertUpZ w) $ focusUpZ $ Just s { W.focus = G l f' } moveToGroupUp True _ gs = gs -- | Move the focused window to the next group. -- If 'True', when in the last group, wrap around to the first one. -- If 'False', create a new group after it. moveToGroupDown :: Bool -> ModifySpec moveToGroupDown _ _ Nothing = Nothing moveToGroupDown False l0 (Just s) = if null (W.down s) then moveToNewGroupDown l0 (Just s) else moveToGroupDown True l0 (Just s) moveToGroupDown True _ (Just s@(W.Stack _ [] [])) = Just s moveToGroupDown True _ (Just s@(W.Stack (G l (Just f)) _ _)) = let (w, f') = _removeFocused f in onFocusedZ (onZipper $ insertUpZ w) $ focusDownZ $ Just s { W.focus = G l f' } moveToGroupDown True _ gs = gs -- | Split the focused group into two at the position of the focused window (below it, -- unless it's the last window - in that case, above it). splitGroup :: ModifySpec splitGroup _ Nothing = Nothing splitGroup l0 z@(Just s) | G l (Just ws) <- W.focus s = case ws of W.Stack _ [] [] -> z W.Stack f (u:up) [] -> let g1 = G l $ Just $ W.Stack f [] [] g2 = G l0 $ Just $ W.Stack u up [] in insertDownZ g1 $ onFocusedZ (const g2) z W.Stack f up (d:down) -> let g1 = G l $ Just $ W.Stack f up [] g2 = G l0 $ Just $ W.Stack d [] down in insertUpZ g1 $ onFocusedZ (const g2) z splitGroup _ _ = Nothing
f1u77y/xmonad-contrib
XMonad/Layout/Groups.hs
Haskell
bsd-3-clause
21,445
module Snap.Internal.Http.Server.TimeoutManager.Tests ( tests ) where import Control.Concurrent import Data.IORef import Data.Maybe import System.PosixCompat.Time import System.Timeout import Test.Framework import Test.Framework.Providers.HUnit import Test.HUnit hiding (Test, path) import qualified Snap.Internal.Http.Server.TimeoutManager as TM tests :: [Test] tests = [ testOneTimeout , testOneTimeoutAfterInactivity , testCancel , testTickle ] testOneTimeout :: Test testOneTimeout = testCase "timeout/oneTimeout" $ do mgr <- TM.initialize 3 epochTime oneTimeout mgr testOneTimeoutAfterInactivity :: Test testOneTimeoutAfterInactivity = testCase "timeout/oneTimeoutAfterInactivity" $ do mgr <- TM.initialize 3 epochTime threadDelay $ 7 * seconds oneTimeout mgr oneTimeout :: TM.TimeoutManager -> IO () oneTimeout mgr = do mv <- newEmptyMVar _ <- TM.register (putMVar mv ()) mgr m <- timeout (6*seconds) $ takeMVar mv assertBool "timeout fired" $ isJust m TM.stop mgr testTickle :: Test testTickle = testCase "timeout/tickle" $ do mgr <- TM.initialize 8 epochTime ref <- newIORef (0 :: Int) h <- TM.register (writeIORef ref 1) mgr threadDelay $ 5 * seconds b0 <- readIORef ref assertEqual "b0" 0 b0 TM.tickle h 8 threadDelay $ 5 * seconds b1 <- readIORef ref assertEqual "b1" 0 b1 threadDelay $ 8 * seconds b2 <- readIORef ref assertEqual "b2" 1 b2 TM.stop mgr testCancel :: Test testCancel = testCase "timeout/cancel" $ do mgr <- TM.initialize 3 epochTime ref <- newIORef (0 :: Int) h <- TM.register (writeIORef ref 1) mgr threadDelay $ 1 * seconds TM.cancel h threadDelay $ 5 * seconds b0 <- readIORef ref assertEqual "b0" 0 b0 TM.stop mgr seconds :: Int seconds = (10::Int) ^ (6::Int)
beni55/snap-server
test/suite/Snap/Internal/Http/Server/TimeoutManager/Tests.hs
Haskell
bsd-3-clause
1,964
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- UI for the sandboxing functionality. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox ( sandboxInit, sandboxDelete, sandboxAddSource, sandboxAddSourceSnapshot, sandboxDeleteSource, sandboxListSources, sandboxHcPkg, dumpPackageEnvironment, withSandboxBinDirOnSearchPath, getSandboxConfigFilePath, loadConfigOrSandboxConfig, findSavedDistPref, initPackageDBIfNeeded, maybeWithSandboxDirOnSearchPath, WereDepsReinstalled(..), reinstallAddSourceDeps, maybeReinstallAddSourceDeps, SandboxPackageInfo(..), maybeWithSandboxPackageInfo, tryGetIndexFilePath, sandboxBuildDir, getInstalledPackagesInSandbox, updateSandboxConfigFileFlag, updateInstallDirs, configPackageDB', configCompilerAux', getPersistOrConfigCompiler ) where import Distribution.Client.Setup ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) , GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags , defaultSandboxLocation, withRepoContext ) import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps , maybeAddCompilerTimestampRecord , withAddTimestamps , removeTimestamps ) import Distribution.Client.Config ( SavedConfig(..), defaultUserInstall, loadConfig ) import Distribution.Client.Dependency ( foldProgress ) import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) import Distribution.Client.Install ( InstallArgs, makeInstallContext, makeInstallPlan, processInstallPlan ) import Distribution.Utils.NubList ( fromNubList ) import Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironment(..), PackageEnvironmentType(..) , createPackageEnvironmentFile, classifyPackageEnvironment , tryLoadSandboxPackageEnvironmentFile, loadUserConfig , commentPackageEnvironment, showPackageEnvironmentWithComments , sandboxPackageEnvironmentFile, userPackageEnvironmentFile , sandboxPackageDBPath ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) , UseSandbox(..) ) import Distribution.Client.SetupWrapper ( SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Types ( PackageLocation(..) , SourcePackage(..) ) import Distribution.Client.Utils ( inDir, tryCanonicalizePath , tryFindAddSourcePackageDesc) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) , PackageDBStack ) import Distribution.Simple.Configure ( configCompilerAuxEx , interpretPackageDbFlags , getPackageDBContents , maybeGetPersistBuildConfig , findDistPrefOrDefault , findDistPref ) import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo import Distribution.Simple.PreProcess ( knownSuffixHandlers ) import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) , fromFlagOrDefault, flagToMaybe ) import Distribution.Simple.SrcDist ( prepareTree ) import Distribution.Simple.Utils ( die, debug, notice, info, warn , debugNoWrap, defaultPackageDesc , intercalate, topHandlerWith , createDirectoryIfMissingVerbose ) import Distribution.Package ( Package(..) ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity, lessVerbose ) import Distribution.Compat.Environment ( lookupEnv, setEnv ) import Distribution.Client.Compat.FilePerms ( setFileHidden ) import qualified Distribution.Client.Sandbox.Index as Index import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.Simple.Register as Register import qualified Data.Map as M import qualified Data.Set as S import Data.Either (partitionEithers) import Control.Exception ( assert, bracket_ ) import Control.Monad ( forM, liftM, liftM2, unless, when ) import Data.Bits ( shiftL, shiftR, xor ) import Data.Char ( ord ) import Data.IORef ( newIORef, writeIORef, readIORef ) import Data.List ( delete , foldl' , intersperse , isPrefixOf , groupBy ) import Data.Maybe ( fromJust ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty, mappend ) #endif import Data.Word ( Word32 ) import Numeric ( showHex ) import System.Directory ( canonicalizePath , createDirectory , doesDirectoryExist , doesFileExist , getCurrentDirectory , removeDirectoryRecursive , removeFile , renameDirectory ) import System.FilePath ( (</>), equalFilePath , getSearchPath , searchPathSeparator , splitSearchPath , takeDirectory ) -- -- * Constants -- -- | The name of the sandbox subdirectory where we keep snapshots of add-source -- dependencies. snapshotDirectoryName :: FilePath snapshotDirectoryName = "snapshots" -- | Non-standard build dir that is used for building add-source deps instead of -- "dist". Fixes surprising behaviour in some cases (see issue #1281). sandboxBuildDir :: FilePath -> FilePath sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" where sandboxDirHash = jenkins sandboxDir -- See http://en.wikipedia.org/wiki/Jenkins_hash_function jenkins :: String -> Word32 jenkins str = loop_finish $ foldl' loop 0 str where loop :: Word32 -> Char -> Word32 loop hash key_i' = hash''' where key_i = toEnum . ord $ key_i' hash' = hash + key_i hash'' = hash' + (shiftL hash' 10) hash''' = hash'' `xor` (shiftR hash'' 6) loop_finish :: Word32 -> Word32 loop_finish hash = hash''' where hash' = hash + (shiftL hash 3) hash'' = hash' `xor` (shiftR hash' 11) hash''' = hash'' + (shiftL hash'' 15) -- -- * Basic sandbox functions. -- -- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the -- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to -- 'NoFlag'. updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags updateSandboxConfigFileFlag globalFlags = case globalSandboxConfigFile globalFlags of Flag _ -> return globalFlags NoFlag -> do f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" return globalFlags { globalSandboxConfigFile = f' } -- | Return the path to the sandbox config file - either the default or the one -- specified with @--sandbox-config-file@. getSandboxConfigFilePath :: GlobalFlags -> IO FilePath getSandboxConfigFilePath globalFlags = do let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags case sandboxConfigFileFlag of NoFlag -> do pkgEnvDir <- getCurrentDirectory return (pkgEnvDir </> sandboxPackageEnvironmentFile) Flag path -> return path -- | Load the @cabal.sandbox.config@ file (and possibly the optional -- @cabal.config@). In addition to a @PackageEnvironment@, also return a -- canonical path to the sandbox. Exit with error if the sandbox directory or -- the package environment file do not exist. tryLoadSandboxConfig :: Verbosity -> GlobalFlags -> IO (FilePath, PackageEnvironment) tryLoadSandboxConfig verbosity globalFlags = do path <- getSandboxConfigFilePath globalFlags tryLoadSandboxPackageEnvironmentFile verbosity path (globalConfigFile globalFlags) -- | Return the name of the package index file for this package environment. tryGetIndexFilePath :: SavedConfig -> IO FilePath tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config) -- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of -- 'SavedConfig'. tryGetIndexFilePath' :: GlobalFlags -> IO FilePath tryGetIndexFilePath' globalFlags = do let paths = fromNubList $ globalLocalRepos globalFlags case paths of [] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ "no local repos found. " ++ checkConfiguration _ -> return $ (last paths) </> Index.defaultIndexFileName where checkConfiguration = "Please check your configuration ('" ++ userPackageEnvironmentFile ++ "')." -- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error -- message than just pattern-matching. getSandboxPackageDB :: ConfigFlags -> IO PackageDB getSandboxPackageDB configFlags = do case configPackageDBs configFlags of [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? [] -> die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt [_] -> die $ "Unexpected contents of the 'package-db' field. " ++ sandboxConfigCorrupt _ -> die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt where sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." -- | Which packages are installed in the sandbox package DB? getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackagesInSandbox verbosity configFlags comp conf = do sandboxDB <- getSandboxPackageDB configFlags getPackageDBContents verbosity comp sandboxDB conf -- | Temporarily add $SANDBOX_DIR/bin to $PATH. withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir where -- TODO: Instead of modifying the global process state, it'd be better to -- set the environment individually for each subprocess invocation. This -- will have to wait until the Shell monad is implemented; without it the -- required changes are too intrusive. addBinDir :: IO () addBinDir = do mbOldPath <- lookupEnv "PATH" let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) mbOldPath setEnv "PATH" newPath rmBinDir :: IO () rmBinDir = do oldPath <- getSearchPath let newPath = intercalate [searchPathSeparator] (delete sandboxBin oldPath) setEnv "PATH" newPath sandboxBin = sandboxDir </> "bin" -- | Initialise a package DB for this compiler if it doesn't exist. initPackageDBIfNeeded :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO () initPackageDBIfNeeded verbosity configFlags comp conf = do SpecificPackageDB dbPath <- getSandboxPackageDB configFlags packageDBExists <- doesDirectoryExist dbPath unless packageDBExists $ Register.initPackageDB verbosity comp conf dbPath when packageDBExists $ debug verbosity $ "The package database already exists: " ++ dbPath -- | Entry point for the 'cabal sandbox dump-pkgenv' command. dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags commentPkgEnv <- commentPackageEnvironment sandboxDir putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv -- | Entry point for the 'cabal sandbox init' command. sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxInit verbosity sandboxFlags globalFlags = do -- Warn if there's a 'cabal-dev' sandbox. isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") (doesFileExist $ "cabal-dev" </> "cabal.config") when isCabalDevSandbox $ warn verbosity $ "You are apparently using a legacy (cabal-dev) sandbox. " ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " ++ "You may want to delete the 'cabal-dev' directory to prevent issues." -- Create the sandbox directory. let sandboxDir' = fromFlagOrDefault defaultSandboxLocation (sandboxLocation sandboxFlags) createDirectoryIfMissingVerbose verbosity True sandboxDir' sandboxDir <- tryCanonicalizePath sandboxDir' setFileHidden sandboxDir -- Determine which compiler to use (using the value from ~/.cabal/config). userConfig <- loadConfig verbosity (globalConfigFile globalFlags) (comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig) -- Create the package environment file. pkgEnvFile <- getSandboxConfigFilePath globalFlags createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let config = pkgEnvSavedConfig pkgEnv configFlags = savedConfigureFlags config -- Create the index file if it doesn't exist. indexFile <- tryGetIndexFilePath config indexFileExists <- doesFileExist indexFile if indexFileExists then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir Index.createEmpty verbosity indexFile -- Create the package DB for the default compiler. initPackageDBIfNeeded verbosity configFlags comp conf maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform -- | Entry point for the 'cabal sandbox delete' command. sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxDelete verbosity _sandboxFlags globalFlags = do (useSandbox, _) <- loadConfigOrSandboxConfig verbosity globalFlags { globalRequireSandbox = Flag False } case useSandbox of NoSandbox -> warn verbosity "Not in a sandbox." UseSandbox sandboxDir -> do curDir <- getCurrentDirectory pkgEnvFile <- getSandboxConfigFilePath globalFlags -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard -- location. let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ curDir </> sandboxPackageEnvironmentFile if isNonDefaultConfigLocation then warn verbosity $ "Sandbox config file is in non-default location: '" ++ pkgEnvFile ++ "'.\n Please delete manually." else removeFile pkgEnvFile -- Remove the sandbox directory, unless we're using a shared sandbox. let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ curDir </> defaultSandboxLocation when isNonDefaultSandboxLocation $ die $ "Non-default sandbox location used: '" ++ sandboxDir ++ "'.\nAssuming a shared sandbox. Please delete '" ++ sandboxDir ++ "' manually." absSandboxDir <- canonicalizePath sandboxDir notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir removeDirectoryRecursive absSandboxDir let pathInsideSandbox = isPrefixOf absSandboxDir -- Warn the user if deleting the sandbox deleted a package database -- referenced in the current environment. checkPackagePaths var = do let checkPath path = do absPath <- canonicalizePath path (when (pathInsideSandbox absPath) . warn verbosity) (var ++ " refers to package database " ++ path ++ " inside the deleted sandbox.") liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH" checkPackagePaths "GHC_PACKAGE_PATH" checkPackagePaths "GHCJS_PACKAGE_PATH" -- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> BuildTreeRefType -> IO () doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do let savedConfig = pkgEnvSavedConfig pkgEnv indexFile <- tryGetIndexFilePath savedConfig -- If we're running 'sandbox add-source' for the first time for this compiler, -- we need to create an initial timestamp record. (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform withAddTimestamps sandboxDir $ do -- Path canonicalisation is done in addBuildTreeRefs, but we do it -- twice because of the timestamps file. buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType return buildTreeRefs' -- | Entry point for the 'cabal sandbox add-source' command. sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef -- | Entry point for the 'cabal sandbox add-source --snapshot' command. sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> IO () sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do let snapshotDir = sandboxDir </> snapshotDirectoryName -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private -- location. createDirectoryIfMissingVerbose verbosity True snapshotDir -- Collect the package descriptions first, so that if some path does not refer -- to a cabal package, we fail immediately. pkgs <- forM buildTreeRefs $ \buildTreeRef -> inDir (Just buildTreeRef) $ return . flattenPackageDescription =<< readPackageDescription verbosity =<< defaultPackageDesc verbosity -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If -- 'prepareTree' throws an error at any point, the old snapshots will still be -- in consistent state. tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> inDir (Just buildTreeRef) $ do let targetDir = snapshotDir </> (display . packageId $ pkg) targetTmpDir = targetDir ++ "-tmp" dirExists <- doesDirectoryExist targetTmpDir when dirExists $ removeDirectoryRecursive targetDir createDirectory targetTmpDir prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers return (targetTmpDir, targetDir) -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to -- "snapshots/$PKGNAME-$VERSION". snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do dirExists <- doesDirectoryExist targetDir when dirExists $ removeDirectoryRecursive targetDir renameDirectory targetTmpDir targetDir return targetDir -- Once the packages are copied, just 'add-source' them as usual. doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef -- | Entry point for the 'cabal sandbox delete-source' command. sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) (results, convDict) <- Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs let (failedPaths, removedPaths) = partitionEithers results removedRefs = fmap convDict removedPaths unless (null removedPaths) $ do removeTimestamps sandboxDir removedPaths notice verbosity $ "Success deleting sources: " ++ showL removedRefs ++ "\n\n" unless (null failedPaths) $ do let groupedFailures = groupBy errorType failedPaths mapM_ handleErrors groupedFailures die $ "The sources with the above errors were skipped. (" ++ showL (fmap getPath failedPaths) ++ ")" notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ "source dependency, but does not remove the package " ++ "from the sandbox package DB.\n\n" ++ "Use 'sandbox hc-pkg -- unregister' to do that." where getPath (Index.ErrNonregisteredSource p) = p getPath (Index.ErrNonexistentSource p) = p showPaths f = concat . intersperse " " . fmap (show . f) showL = showPaths id showE [] = return ' ' showE errs = showPaths getPath errs errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} = True errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True errorType _ _ = False handleErrors [] = return () handleErrors errs@(Index.ErrNonregisteredSource{}:_) = warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n") handleErrors errs@(Index.ErrNonexistentSource{}:_) = warn verbosity ("Source directory not found for paths: " ++ showE errs ++ "\n" ++ "If you are trying to delete a reference to a removed directory, " ++ "please provide the full absolute path " ++ "(as given by `sandbox list-sources`).\n\n") -- | Entry point for the 'cabal sandbox list-sources' command. sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxListSources verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) refs <- Index.listBuildTreeRefs verbosity Index.ListIgnored Index.LinksAndSnapshots indexFile when (null refs) $ notice verbosity $ "Index file '" ++ indexFile ++ "' has no references to local build trees." when (not . null $ refs) $ do notice verbosity $ "Source dependencies registered " ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" mapM_ putStrLn refs notice verbosity $ "\nTo unregister source dependencies, " ++ "use the 'sandbox delete-source' command." -- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ -- tool with provided arguments, restricted to the sandbox. sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv -- Invoke hc-pkg for the most recently configured compiler (if any), -- using the right package-db for the compiler (see #1935). (comp, platform, conf) <- getPersistOrConfigCompiler configFlags let dir = sandboxPackageDBPath sandboxDir comp platform dbStack = [GlobalPackageDB, SpecificPackageDB dir] Register.invokeHcPkg verbosity comp conf dbStack extraArgs updateInstallDirs :: Flag Bool -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) updateInstallDirs userInstallFlag (useSandbox, savedConfig) = case useSandbox of NoSandbox -> let savedConfig' = savedConfig { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } in (useSandbox, savedConfig') _ -> (useSandbox, savedConfig) where configureFlags = savedConfigureFlags savedConfig userInstallDirs = savedUserInstallDirs savedConfig globalInstallDirs = savedGlobalInstallDirs savedConfig installDirs | userInstall = userInstallDirs | otherwise = globalInstallDirs userInstall = fromFlagOrDefault defaultUserInstall (configUserInstall configureFlags `mappend` userInstallFlag) -- | Check which type of package environment we're in and return a -- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates -- whether we're working in a sandbox. loadConfigOrSandboxConfig :: Verbosity -> GlobalFlags -- ^ For @--config-file@ and -- @--sandbox-config-file@. -> IO (UseSandbox, SavedConfig) loadConfigOrSandboxConfig verbosity globalFlags = do let configFileFlag = globalConfigFile globalFlags sandboxConfigFileFlag = globalSandboxConfigFile globalFlags ignoreSandboxFlag = globalIgnoreSandbox globalFlags pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag case pkgEnvType of -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. SandboxPackageEnvironment -> do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags -- ^ Prints an error message and exits on error. let config = pkgEnvSavedConfig pkgEnv return (UseSandbox sandboxDir, config) -- Only @cabal.config@ is present. UserPackageEnvironment -> do config <- loadConfig verbosity configFileFlag userConfig <- loadUserConfig verbosity pkgEnvDir Nothing let config' = config `mappend` userConfig dieIfSandboxRequired config' return (NoSandbox, config') -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. AmbientPackageEnvironment -> do config <- loadConfig verbosity configFileFlag let globalConstraintsOpt = flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config globalConstraintConfig <- loadUserConfig verbosity pkgEnvDir globalConstraintsOpt let config' = config `mappend` globalConstraintConfig dieIfSandboxRequired config return (NoSandbox, config') where -- Return the path to the package environment directory - either the -- current directory or the one that @--sandbox-config-file@ resides in. getPkgEnvDir :: (Flag FilePath) -> IO FilePath getPkgEnvDir sandboxConfigFileFlag = do case sandboxConfigFileFlag of NoFlag -> getCurrentDirectory Flag path -> tryCanonicalizePath . takeDirectory $ path -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. dieIfSandboxRequired :: SavedConfig -> IO () dieIfSandboxRequired config = checkFlag flag where flag = (globalRequireSandbox . savedGlobalFlags $ config) `mappend` (globalRequireSandbox globalFlags) checkFlag (Flag True) = die $ "'require-sandbox' is set to True, but no sandbox is present. " ++ "Use '--no-require-sandbox' if you want to override " ++ "'require-sandbox' temporarily." checkFlag (Flag False) = return () checkFlag (NoFlag) = return () -- | Return the saved \"dist/\" prefix, or the default prefix. findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath findSavedDistPref config flagDistPref = do let defDistPref = useDistPref defaultSetupScriptOptions flagDistPref' = configDistPref (savedConfigureFlags config) `mappend` flagDistPref findDistPref defDistPref flagDistPref' -- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do -- nothing. maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a maybeWithSandboxDirOnSearchPath NoSandbox act = act maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = withSandboxBinDirOnSearchPath sandboxDir $ act -- | Had reinstallAddSourceDeps actually reinstalled any dependencies? data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled -- | Reinstall those add-source dependencies that have been modified since -- we've last installed them. Assumes that we're working inside a sandbox. reinstallAddSourceDeps :: Verbosity -> ConfigFlags -> ConfigExFlags -> InstallFlags -> GlobalFlags -> FilePath -> IO WereDepsReinstalled reinstallAddSourceDeps verbosity configFlags' configExFlags installFlags globalFlags sandboxDir = topHandler' $ do let sandboxDistPref = sandboxBuildDir sandboxDir configFlags = configFlags' { configDistPref = Flag sandboxDistPref } haddockFlags = mempty { haddockDistPref = Flag sandboxDistPref } (comp, platform, conf) <- configCompilerAux' configFlags retVal <- newIORef NoDepsReinstalled withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir $ \sandboxPkgInfo -> unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do withRepoContext verbosity globalFlags $ \repoContext -> do let args :: InstallArgs args = ((configPackageDB' configFlags) ,repoContext ,comp, platform, conf ,UseSandbox sandboxDir, Just sandboxPkgInfo ,globalFlags, configFlags, configExFlags, installFlags ,haddockFlags) -- This can actually be replaced by a call to 'install', but we use a -- lower-level API because of layer separation reasons. Additionally, we -- might want to use some lower-level features this in the future. withSandboxBinDirOnSearchPath sandboxDir $ do installContext <- makeInstallContext verbosity args Nothing installPlan <- foldProgress logMsg die' return =<< makeInstallPlan verbosity args installContext processInstallPlan verbosity args installContext installPlan writeIORef retVal ReinstalledSomeDeps readIORef retVal where die' message = die (message ++ installFailedInSandbox) -- TODO: use a better error message, remove duplication. installFailedInSandbox = "Note: when using a sandbox, all packages are required to have " ++ "consistent dependencies. Try reinstalling/unregistering the " ++ "offending packages or recreating the sandbox." logMsg message rest = debugNoWrap verbosity message >> rest topHandler' = topHandlerWith $ \_ -> do warn verbosity "Couldn't reinstall some add-source dependencies." -- Here we can't know whether any deps have been reinstalled, so we have -- to be conservative. return ReinstalledSomeDeps -- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that -- we don't update the timestamp file here - this is done in -- 'postInstallActions'. withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> FilePath -> (SandboxPackageInfo -> IO ()) -> IO () withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir cont = do -- List all add-source deps. indexFile <- tryGetIndexFilePath' globalFlags buildTreeRefs <- Index.listBuildTreeRefs verbosity Index.DontListIgnored Index.OnlyLinks indexFile let allAddSourceDepsSet = S.fromList buildTreeRefs -- List all packages installed in the sandbox. installedPkgIndex <- getInstalledPackagesInSandbox verbosity configFlags comp conf let err = "Error reading sandbox package information." -- Get the package descriptions for all add-source deps. depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) isInstalled pkgid = not . null . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid installedDepsMap = M.filter (isInstalled . packageId) depsMap -- Get the package ids of modified (and installed) add-source deps. modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir (compilerId comp) platform installedDepsMap -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to -- be a subset of the keys of 'depsMap'. let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) | modDepPath <- modifiedAddSourceDeps ] modifiedDepsMap = M.fromList modifiedDeps assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) if (null modifiedDeps) then info verbosity $ "Found no modified add-source deps." else notice verbosity $ "Some add-source dependencies have been modified. " ++ "They will be reinstalled..." -- Get the package ids of the remaining add-source deps (some are possibly not -- installed). let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) -- Finally, assemble a 'SandboxPackageInfo'. cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet where toSourcePackage (path, pkgDesc) = SourcePackage (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing -- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the -- identity otherwise. maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> UseSandbox -> (Maybe SandboxPackageInfo -> IO ()) -> IO () maybeWithSandboxPackageInfo verbosity configFlags globalFlags comp platform conf useSandbox cont = case useSandbox of NoSandbox -> cont Nothing UseSandbox sandboxDir -> withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir (\spi -> cont (Just spi)) -- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that -- case. maybeReinstallAddSourceDeps :: Verbosity -> Flag (Maybe Int) -- ^ The '-j' flag -> ConfigFlags -- ^ Saved configure flags -- (from dist/setup-config) -> GlobalFlags -> (UseSandbox, SavedConfig) -> IO WereDepsReinstalled maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' globalFlags' (useSandbox, config) = do case useSandbox of NoSandbox -> return NoDepsReinstalled UseSandbox sandboxDir -> do -- Reinstall the modified add-source deps. let configFlags = savedConfigureFlags config `mappendSomeSavedFlags` configFlags' configExFlags = defaultConfigExFlags `mappend` savedConfigureExFlags config installFlags' = defaultInstallFlags `mappend` savedInstallFlags config installFlags = installFlags' { installNumJobs = installNumJobs installFlags' `mappend` numJobsFlag } globalFlags = savedGlobalFlags config -- This makes it possible to override things like 'remote-repo-cache' -- from the command line. These options are hidden, and are only -- useful for debugging, so this should be fine. `mappend` globalFlags' reinstallAddSourceDeps verbosity configFlags configExFlags installFlags globalFlags sandboxDir where -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ -- because we don't want to auto-enable things like 'library-profiling' for -- all add-source dependencies even if the user has passed -- '--enable-library-profiling' to 'cabal configure'. These options are -- supposed to be set in 'cabal.config'. mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags mappendSomeSavedFlags sandboxConfigFlags savedFlags = sandboxConfigFlags { configHcFlavor = configHcFlavor sandboxConfigFlags `mappend` configHcFlavor savedFlags, configHcPath = configHcPath sandboxConfigFlags `mappend` configHcPath savedFlags, configHcPkg = configHcPkg sandboxConfigFlags `mappend` configHcPkg savedFlags, configProgramPaths = configProgramPaths sandboxConfigFlags `mappend` configProgramPaths savedFlags, configProgramArgs = configProgramArgs sandboxConfigFlags `mappend` configProgramArgs savedFlags, -- NOTE: Unconditionally choosing the value from -- 'dist/setup-config'. Sandbox package DB location may have been -- changed by 'configure -w'. configPackageDBs = configPackageDBs savedFlags -- FIXME: Is this compatible with the 'inherit' feature? } -- -- Utils (transitionary) -- -- FIXME: configPackageDB' and configCompilerAux' don't really belong in this -- module -- configPackageDB' :: ConfigFlags -> PackageDBStack configPackageDB' cfg = interpretPackageDbFlags userInstall (configPackageDBs cfg) where userInstall = fromFlagOrDefault True (configUserInstall cfg) configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramConfiguration) configCompilerAux' configFlags = configCompilerAuxEx configFlags --FIXME: make configCompilerAux use a sensible verbosity { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } -- | Try to read the most recently configured compiler from the -- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it -- cannot be read. getPersistOrConfigCompiler :: ConfigFlags -> IO (Compiler, Platform, ProgramConfiguration) getPersistOrConfigCompiler configFlags = do distPref <- findDistPrefOrDefault (configDistPref configFlags) mlbi <- maybeGetPersistBuildConfig distPref case mlbi of Nothing -> do configCompilerAux' configFlags Just lbi -> return ( LocalBuildInfo.compiler lbi , LocalBuildInfo.hostPlatform lbi , LocalBuildInfo.withPrograms lbi )
tolysz/prepare-ghcjs
spec-lts8/cabal/cabal-install/Distribution/Client/Sandbox.hs
Haskell
bsd-3-clause
41,442
module PackageTests.EmptyLib.Check where import PackageTests.PackageTester import System.FilePath import Test.Tasty.HUnit -- See https://github.com/haskell/cabal/issues/1241 emptyLib :: FilePath -> Assertion emptyLib ghcPath = do let spec = PackageSpec { directory = "PackageTests" </> "EmptyLib" </> "empty" , configOpts = [] , distPref = Nothing } result <- cabal_build spec ghcPath assertBuildSucceeded result
Helkafen/cabal
Cabal/tests/PackageTests/EmptyLib/Check.hs
Haskell
bsd-3-clause
464
{- $Id: AFRPTestsDelay.hs,v 1.2 2003/11/10 21:28:58 antony Exp $ ****************************************************************************** * A F R P * * * * Module: AFRPTestsPre * * Purpose: Test cases for pre and (derived) combinators * * that (semantically) involves a pre. * * Authors: Antony Courtney and Henrik Nilsson * * * * Copyright (c) Yale University, 2003 * * Copyright (c) University of Nottingham, 2005 * * * ****************************************************************************** -} module AFRPTestsPre (pre_tr, pre_trs) where import FRP.Yampa import AFRPTestsCommon ------------------------------------------------------------------------------ -- Test cases for pre and related combinators ------------------------------------------------------------------------------ pre_t0 = testSF1 (iPre 17) pre_t0r = [17.0,0.0,1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0,14.0, 15.0,16.0,17.0,18.0,19.0,20.0,21.0,22.0,23.0] pre_t1 = testSF2 (iPre 17) pre_t1r = [17.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,2.0,2.0, 3.0,3.0,3.0,3.0,3.0,4.0,4.0,4.0,4.0] pre_t2 = testSF1 (time >>> arr (\t -> sin (0.5 * t * pi + pi)) >>> loop (arr (\(x1,x2) -> let x' = max x1 x2 in (x',x')) >>> second (iPre 0.0))) pre_t2r = take 25 (let xs = [ sin (0.5 * t * pi + pi) | t <- [0.0, 0.25 ..] ] in tail (scanl max 0 xs)) -- This is a (somewhat strange) way of doing a counter that -- stops after reaching a threshold. Note that the ingoing event -- is *control dependent* on the output of the counter, so -- "dHold" really has to have the capability of delivering an -- output without looking at the current input at all. pre_t3, pre_t3r :: [Int] pre_t3 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = repeatedly 1.0 () >>> (loop $ arr (\(e,c) -> (e `tag` (c + 1)) `gate` (c < 10)) >>> dHold 0 >>> arr dup) pre_t3r = [0,0,0,0, -- 0s 0,1,1,1, -- 1s 1,2,2,2, -- 2s 2,3,3,3, -- 3s 3,4,4,4, -- 4s 4,5,5,5, -- 5s 5,6,6,6, -- 6s 6,7,7,7, -- 7s 7,8,8,8, -- 8s 8,9,9,9, -- 9s 9,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s -- Version of the above that tests that thigs still work OK also if -- there is an initial event. pre_t4, pre_t4r :: [Int] pre_t4 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge) >>> (loop $ arr (\(e,c) -> (e `tag` (c + 1)) `gate` (c < 10)) >>> dHold 0 >>> arr dup) pre_t4r = [0,1,1,1, -- 0s 1,2,2,2, -- 1s 2,3,3,3, -- 2s 3,4,4,4, -- 3s 4,5,5,5, -- 4s 5,6,6,6, -- 5s 6,7,7,7, -- 6s 7,8,8,8, -- 7s 8,9,9,9, -- 8s 9,10,10,10, -- 9s 10,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s -- Similar test to "pre_t3" above but for dAccumHold. pre_t5, pre_t5r :: [Int] pre_t5 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = repeatedly 1.0 () >>> (loop $ arr (\(e,c) -> (e `tag` (+1)) `gate` (c < 10)) >>> dAccumHold 0 >>> arr dup) pre_t5r = [0,0,0,0, -- 0s 0,1,1,1, -- 1s 1,2,2,2, -- 2s 2,3,3,3, -- 3s 3,4,4,4, -- 4s 4,5,5,5, -- 5s 5,6,6,6, -- 6s 6,7,7,7, -- 7s 7,8,8,8, -- 8s 8,9,9,9, -- 9s 9,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s -- Similar test to "pre_t4" above but for dAccumHold. pre_t6, pre_t6r :: [Int] pre_t6 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge) >>> (loop $ arr (\(e,c) -> (e `tag` (+1)) `gate` (c < 10)) >>> dAccumHold 0 >>> arr dup) pre_t6r = [0,1,1,1, -- 0s 1,2,2,2, -- 1s 2,3,3,3, -- 2s 3,4,4,4, -- 3s 4,5,5,5, -- 4s 5,6,6,6, -- 5s 6,7,7,7, -- 6s 7,8,8,8, -- 7s 8,9,9,9, -- 8s 9,10,10,10, -- 9s 10,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s -- Similar test to "pre_t3" above but for dAccumHoldBy. pre_t7, pre_t7r :: [Int] pre_t7 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = repeatedly 1.0 () >>> (loop $ arr (\(e,c) -> e `gate` (c < 10)) >>> dAccumHoldBy (\c _ -> c + 1) 0 >>> arr dup) pre_t7r = [0,0,0,0, -- 0s 0,1,1,1, -- 1s 1,2,2,2, -- 2s 2,3,3,3, -- 3s 3,4,4,4, -- 4s 4,5,5,5, -- 5s 5,6,6,6, -- 6s 6,7,7,7, -- 7s 7,8,8,8, -- 8s 8,9,9,9, -- 9s 9,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s -- Similar test to "pre_t4" above but for dAccumHoldBy. pre_t8, pre_t8r :: [Int] pre_t8 = take 50 (embed sf (deltaEncode 0.25 (repeat ()))) where sf = (now () &&& repeatedly 1.0 ()) >>> arr (uncurry lMerge) >>> (loop $ arr (\(e,c) -> e `gate` (c < 10)) >>> dAccumHoldBy (\c _ -> c + 1) 0 >>> arr dup) pre_t8r = [0,1,1,1, -- 0s 1,2,2,2, -- 1s 2,3,3,3, -- 2s 3,4,4,4, -- 3s 4,5,5,5, -- 4s 5,6,6,6, -- 5s 6,7,7,7, -- 6s 7,8,8,8, -- 7s 8,9,9,9, -- 8s 9,10,10,10, -- 9s 10,10,10,10, -- 10s 10,10,10,10, -- 11s 10,10] -- 12s pre_trs = [ pre_t0 ~= pre_t0r, pre_t1 ~= pre_t1r, pre_t2 ~= pre_t2r, pre_t3 == pre_t3r, pre_t4 == pre_t4r, pre_t5 == pre_t5r, pre_t6 == pre_t6r, pre_t7 == pre_t7r, pre_t8 == pre_t8r ] pre_tr = and pre_trs
ony/Yampa-core
tests/AFRPTestsPre.hs
Haskell
bsd-3-clause
6,766
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="es-ES"> <title>SVN Digger Files</title> <maps> <homeID>svndigger</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
kingthorin/zap-extensions
addOns/svndigger/src/main/javahelp/help_es_ES/helpset_es_ES.hs
Haskell
apache-2.0
967
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="pt-BR"> <title>Visualização de Navegador | Extensão ZAP</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Conteúdo</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Índice</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Localizar</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favoritos</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/browserView/src/main/javahelp/org/zaproxy/zap/extension/browserView/resources/help_pt_BR/helpset_pt_BR.hs
Haskell
apache-2.0
994
{-# LANGUAGE GADTs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE UndecidableInstances #-} -- #345 module ShouldCompile where data Succ n data Zero class Plus x y z | x y -> z instance Plus Zero x x instance Plus x y z => Plus (Succ x) y (Succ z) infixr 5 ::: data List :: * -> * -> * where Nil :: List a Zero (:::) :: a -> List a n -> List a (Succ n) append :: Plus x y z => List a x -> List a y -> List a z append Nil ys = ys append (x ::: xs) ys = x ::: append xs ys
sdiehl/ghc
testsuite/tests/gadt/gadt-fd.hs
Haskell
bsd-3-clause
622
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-} -- | -- Copyright : (C) 2019 team@functionalconcepts.org -- License : MIT -- Maintainer : Helmut Merz <helmutm@cy55.de> -- Stability : experimental -- Portability : GHC only (requires STM) -- -- Access to the FCO backend using the fco-actor framework. -- module Fco.Backend.Actor ( -- * Backend Actor Request (..), Response (..), spawnBackend, -- * Usage Example demo ) where import BasicPrelude hiding (lookup) import qualified Data.Text as T import Control.Monad.Extra (whileM) import Data.IntMap (elems) import Control.Concurrent.Actor ( Actor, Behaviour (..), ControlMsg (..), Mailbox, Mailboxes, MsgHandler, StdBoxes (..), messageBox, controlBox, act_children, call, ctxGets, ctxPut, defContext, defListener, mailbox, minimalContext, setDefContext, runActor, send, spawnStdActor, stdBehvs, stdBoxes) import Control.Concurrent.Actor.Config ( ConfigRequest (..), ConfigResponse (..), spawnConfigDef) import Control.Concurrent.Actor.Console (spawnConIn, spawnConOut) import Fco.Backend ( Environment, credentials, dbSettings, dbName, setupEnv, query, storeTriple ) import qualified Fco.Core.Parse as CP import qualified Fco.Core.Show as CS import Fco.Core.Struct (lookup) import qualified Fco.Core.Types as CT import Fco.Core.Types (Namespace (..)) -- | A message used to query or update the backend. data Request = Query CT.Query (Mailbox Response) | Update CT.Triple -- | The response message type as returned (sent back) by the backend actor. newtype Response = Response [CT.Triple] -- | Start a backend actor spawnBackend :: StdBoxes ConfigRequest -> Actor st (StdBoxes Request) spawnBackend config = do ConfigResponse (_, cfg) <- call config (ConfigQuery "backend-pgsql") let db = dbSettings { dbName = lookup "dbname" cfg, credentials = (lookup "dbuser" cfg, lookup "dbpassword" cfg) } env <- liftIO $ setupEnv db spawnStdActor backendHandler env backendHandler :: MsgHandler Environment Request backendHandler env (Query qu client) = do tr <- liftIO $ query env qu send client $ Response tr return $ Just env backendHandler env (Update tr) = do liftIO $ storeTriple env tr return $ Just env -- | An example main function that reads a query from stdin, -- parses it, and queries the backend. -- The query result is printed to stdout. -- -- Enter '? ? ?' to get a list of all triples. demo :: IO () demo = runActor act minimalContext where act = do self <- stdBoxes respBox <- mailbox spawnConIn self config <- spawnConfigDef backend <- spawnBackend config output <- spawnConOut let behvs = stdBehvs self (inpHandler (messageBox backend) respBox) [Behv respBox (responseHandler (messageBox output))] setDefContext () behvs defListener -- message handlers used by the demo function. inpHandler :: Mailbox Request -> Mailbox Response -> MsgHandler st Text inpHandler reqBox respBox state txt = do send reqBox $ Query (CP.parseQuery (Namespace "") txt) respBox return $ Just state responseHandler :: Mailbox Text -> MsgHandler st Response responseHandler outbox state (Response triples) = do send outbox $ unlines (map CS.showTriple triples) return $ Just state
cyberconcepts/fco-backend
src/Fco/Backend/Actor.hs
Haskell
mit
3,488
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-} module Language.Erlang.Fold where import Language.Fold import Language.Erlang.Algebra import Language.Erlang.Syntax instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Program pr where fold f (Program mod exs ims des fns) = programF f (fold f mod) (map (fold f) exs) (map (fold f) ims) (map (fold f) des) (map (fold f) fns) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Attribute at where fold f (Module name) = moduleF f name fold f (Export names) = exportF f names fold f (Import name) = importF f name fold f (Define name bv) = defineF f name (fold f bv) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Function fn where fold f (Function name pats exp) = functionF f name (map (fold f) pats) (fold f exp) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) BasicValue bv where fold f (AtomicLiteral s) = atomicLiteralF f s fold f (StringLiteral s) = stringLiteralF f s fold f (NumberLiteral i) = numberLiteralF f i fold f (ProcessLiteral s) = processLiteralF f s instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) InfixOp iop where fold f OpLT = opLTF f fold f OpLEq = opLEqF f fold f OpGT = opGTF f fold f OpGEq = opGEqF f fold f OpEq = opEqF f fold f OpNEq = opNEqF f fold f OpLAnd = opLAndF f fold f OpLOr = opLOrF f fold f OpMul = opMulF f fold f OpDiv = opDivF f fold f OpMod = opModF f fold f OpSub = opSubF f fold f OpBAnd = opBAndF f fold f OpBXor = opBXorF f fold f OpBOr = opBOrF f fold f OpAdd = opAddF f instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Exp exp where fold f (InfixExp iop exp0 exp) = infixExpF f (fold f iop) (fold f exp0) (fold f exp) fold f (ModExp n0 n) = modExpF f n0 n fold f (Apply exp exps) = applyF f (fold f exp) (map (fold f) exps) fold f (Call exp0 exp) = callF f (fold f exp0) (fold f exp) fold f (Case exp mats) = caseF f (fold f exp) (map (fold f) mats) fold f (FunAnon pats exp) = funAnonF f (map (fold f) pats) (fold f exp) fold f (Receive mats) = receiveF f (map (fold f) mats) fold f (If mats) = ifF f (map (fold f) mats) fold f (Send exp0 exp) = sendF f (fold f exp0) (fold f exp) fold f (Seq exp0 exp) = seqF f (fold f exp0) (fold f exp) fold f (Assign pat exp) = assignF f (fold f pat) (fold f exp) fold f (ExpT exps) = expTF f (map (fold f) exps) fold f (ExpL exps) = expLF f (map (fold f) exps) fold f (ExpVal bv) = expValF f (fold f bv) fold f (ExpVar name) = expVarF f name fold f (RecordCreate name attrs) = recordCreateF f name (map (\(k,v) -> (k, fold f v)) attrs) fold f (Coercion exp) = coercionF f (fold f exp) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Match mat where fold f (Match pat gua exp) = matchF f (fold f pat) (fmap (fold f) gua) (fold f exp) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Pattern pat where fold f (PatVar name) = patVarF f name fold f (PatT pats) = patTF f (map (fold f) pats) fold f (PatL pats) = patLF f (map (fold f) pats) fold f (PatVal bv) = patValF f (fold f bv) instance Fold (ErlangAlgebra pr at fn bv iop exp mat pat gua) Guard gua where fold f (GuardVal bv) = guardValF f (fold f bv) fold f (GuardVar name) = guardVarF f name fold f (GuardCall g gs) = guardCallF f (fold f g) (map (fold f) gs) fold f (GuardT gs) = guardTF f (map (fold f) gs) fold f (GuardL gs) = guardLF f (map (fold f) gs)
arnihermann/timedreb2erl
src/Language/Erlang/Fold.hs
Haskell
mit
3,586
{-# LANGUAGE RankNTypes #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE RecordWildCards #-} module U.Util.Cache where import Prelude hiding (lookup) import Control.Monad.IO.Class (liftIO) import UnliftIO (MonadIO, newTVarIO, modifyTVar', writeTVar, atomically, readTVar, readTVarIO) import qualified Data.Map as Map import Data.Functor (($>)) import Control.Monad (when) import Data.Foldable (for_) data Cache k v = Cache { lookup_ :: k -> IO (Maybe v) , insert_ :: k -> v -> IO () } lookup :: MonadIO m => Cache k v -> k -> m (Maybe v) lookup c k = liftIO (lookup_ c k) insert :: MonadIO m => Cache k v -> k -> v -> m () insert c k v = liftIO (insert_ c k v) -- Create a cache of unbounded size. cache :: (MonadIO m, Ord k) => m (Cache k v) cache = do t <- newTVarIO Map.empty let lookup k = Map.lookup k <$> readTVarIO t insert k v = do m <- readTVarIO t case Map.lookup k m of Nothing -> atomically $ modifyTVar' t (Map.insert k v) _ -> pure () pure $ Cache lookup insert nullCache :: Cache k v nullCache = Cache (const (pure Nothing)) (\_ _ -> pure ()) -- Create a cache of bounded size. Once the cache -- reaches a size of `maxSize`, older unused entries -- are evicted from the cache. Unlike LRU caching, -- where cache hits require updating LRU info, -- cache hits here are read-only and contention free. semispaceCache :: (MonadIO m, Ord k) => Word -> m (Cache k v) semispaceCache 0 = pure nullCache semispaceCache maxSize = do -- Analogous to semispace GC, keep 2 maps: gen0 and gen1 -- `insert k v` is done in gen0 -- if full, gen1 = gen0; gen0 = Map.empty -- `lookup k` is done in gen0; then gen1 -- if found in gen0, return immediately -- if found in gen1, `insert k v`, then return -- Thus, older keys not recently looked up are forgotten gen0 <- newTVarIO Map.empty gen1 <- newTVarIO Map.empty let lookup k = readTVarIO gen0 >>= \m0 -> case Map.lookup k m0 of Nothing -> readTVarIO gen1 >>= \m1 -> case Map.lookup k m1 of Nothing -> pure Nothing Just v -> insert k v $> Just v just -> pure just insert k v = atomically $ do modifyTVar' gen0 (Map.insert k v) m0 <- readTVar gen0 when (fromIntegral (Map.size m0) >= maxSize) $ do writeTVar gen1 m0 writeTVar gen0 Map.empty pure $ Cache lookup insert -- Cached function application: if a key `k` is not in the cache, -- calls `f` and inserts `f k` results in the cache. apply :: MonadIO m => Cache k v -> (k -> m v) -> k -> m v apply c f k = lookup c k >>= \case Just v -> pure v Nothing -> do v <- f k insert c k v pure v -- Cached function application which only caches values for -- which `f k` is non-empty. For instance, if `g` is `Maybe`, -- and `f x` returns `Nothing`, this won't be cached. -- -- Useful when we think that missing results for `f` may be -- later filled in so we don't want to cache missing results. applyDefined :: (MonadIO m, Applicative g, Traversable g) => Cache k v -> (k -> m (g v)) -> k -> m (g v) applyDefined c f k = lookup c k >>= \case Just v -> pure (pure v) Nothing -> do v <- f k -- only populate the cache if f returns a non-empty result for_ v $ \v -> insert c k v pure v
unisonweb/platform
codebase2/util/U/Util/Cache.hs
Haskell
mit
3,354
{-# LANGUAGE ImplicitParams #-} -- | Based on An Introduction to Hybrid Automata by Jean-Francois Raskin -- http://www.cmi.ac.in/~madhavan/courses/acts2010/Raskin_Intro_Hybrid_Automata.pdf module WaterHeater where import Control.Applicative ((<|>)) import Zelus import CyphyUtils data TankState = T1 | T2 | T3 | T4 deriving (Eq, Show) data BurnerEvent = ON | OFF deriving (Eq, Show) data BurnerState = B1 | B2 | B3 | B4 deriving (Eq, Show) data ThermoEvent = UP95 | DW93 deriving (Eq, Show) run :: S Double -> (S Double, E ThermoEvent, E BurnerEvent) run ref_temp = let (temperature, _) = unzip (tank burner_events) burner_events = burner thermo_events thermo_events = thermo ref_temp temperature in (temperature, thermo_events, burner_events) where ?h = 0.01 -- tank :: (?h :: Double) => E BurnerEvent -> S Double tank burner_event = zip temperature state where max_temp = 100 min_temp = 20 init_temp = 99 --min_temp k = 0.075 heat = 150 dtemp = map k1 state * pre temperature + map m state temperature = integ (dtemp `in1t` init_temp) m T1 = k * heat m _ = 0 k1 T1 = -k k1 T3 = -k k1 _ = 0 state = automaton [ T1 >-- temperature >=? max_temp --> T2 , T1 >-- burner_event `isEvent` val OFF --> T3 , T2 >-- burner_event `isEvent` val OFF --> T3 , T3 >-- burner_event `isEvent` val ON --> T1 , T3 >-- temperature <=? min_temp --> T4 , T4 >-- burner_event `isEvent` val ON --> T1 ] burner :: (?h :: Double) => E ThermoEvent -> E BurnerEvent burner thermo_event = on <|> off where delay = 0.1 dy B1 = 0 dy B2 = 1 dy B3 = 0 dy B4 = 1 on = val ON `when` (state `took` (B1 --> B2)) off = val OFF `when` (state `took` (B3 --> B4)) y = integ (map dy state `in1t` 0 `reset` (0 `whenEvent` (on <|> off))) state = automaton [ B1 >-- thermo_event `isEvent` val DW93 --> B2 , B2 >-- y >=? val delay --> B3 , B3 >-- thermo_event `isEvent` val UP95 --> B4 , B4 >-- y >=? val delay --> B1 ] thermo :: (?h :: Double) => S Double -> S Double -> E ThermoEvent thermo ref_temp temperature = (up <|> down) where max_temp = ref_temp + 1 min_temp = ref_temp - 1 frequency = 0.1 samples = val False |-> (z >=? frequency) up = val UP95 `when` (temperature >=? max_temp &&? samples) down = val DW93 `when` (temperature <=? min_temp &&? samples) dz = 1 z = integ (dz `in1t` 0 `reset` (0 `when` samples)) ----------------------------------------------- ----- Examples --- tex = let ?h = 0.01 in thermo tempdown tempdown = [100 - t*0.1 | t <- [0..200]] tempup = [80 + t*0.1 | t <- [0..200]]
koengit/cyphy
src/WaterHeater.hs
Haskell
mit
2,723
module Database.Siege.Query where import Data.Char import Data.List data SExpr = Atom String | List [SExpr] parse :: String -> SExpr parse = undefined generate :: SExpr -> String generate (List exprs) = "(" ++ (intercalate " " $ map generate exprs) ++ ")" generate (Atom var) = if any isSpace var || elem ')' var then undefined else var -- main = print $ generate $ List [Atom "lambda", List [Atom "a"], Atom "a"] data PreConditions = Type | HashExists String Bool | HashLookup String PreConditions | SetExists String Bool | ListLookup Int PreConditions | ListEmpty | SequenceAt Int Preconditions | SequenceSize (Int -> Bool) | Branch [PreConditions] data Path = HashLookup String Path | SequenceLookup Int Path data WriteOperation = Set Path Ref | Del Path | SetInsert Path String | SetRemove Path String | DropList Path Int data ReadOperation = Get Path | Exists Path | SetExists Path String | Size Path data Query = Get [ReadOperation] | Alter PreConditions [WriteOperation] --SExpr -> Ref -> (Ref, SExpr)
DanielWaterworth/siege
src/Database/Siege/Query.hs
Haskell
mit
1,082
{-# LANGUAGE TypeFamilies, GADTs, TupleSections #-} module Text.Regex.Applicative.Interface where import Control.Applicative hiding (empty) import Control.Arrow import Control.Monad (guard) import qualified Data.List as List import Data.Maybe import Text.Regex.Applicative.Types import Text.Regex.Applicative.Object -- | 'RE' is a profunctor. This is its contravariant map. -- -- (A dependency on the @profunctors@ package doesn't seem justified.) comap :: (s2 -> s1) -> RE s1 a -> RE s2 a comap f re = case re of Eps -> Eps Symbol t p -> Symbol t (p . f) Alt r1 r2 -> Alt (comap f r1) (comap f r2) App r1 r2 -> App (comap f r1) (comap f r2) Fmap g r -> Fmap g (comap f r) CatMaybes r -> CatMaybes (comap f r) Fail -> Fail Rep gr fn a r -> Rep gr fn a (comap f r) Void r -> Void (comap f r) -- | Match and return any single symbol anySym :: RE s s anySym = msym Just -- | Match zero or more instances of the given expression, which are combined using -- the given folding function. -- -- 'Greediness' argument controls whether this regular expression should match -- as many as possible ('Greedy') or as few as possible ('NonGreedy') instances -- of the underlying expression. reFoldl :: Greediness -> (b -> a -> b) -> b -> RE s a -> RE s b reFoldl g f b a = Rep g f b a -- | Match zero or more instances of the given expression, but as -- few of them as possible (i.e. /non-greedily/). A greedy equivalent of 'few' -- is 'many'. -- -- Examples: -- -- >Text.Regex.Applicative> findFirstPrefix (few anySym <* "b") "ababab" -- >Just ("a","abab") -- >Text.Regex.Applicative> findFirstPrefix (many anySym <* "b") "ababab" -- >Just ("ababa","") few :: RE s a -> RE s [a] few a = reverse <$> Rep NonGreedy (flip (:)) [] a -- | Return matched symbols as part of the return value withMatched :: RE s a -> RE s (a, [s]) withMatched Eps = flip (,) [] <$> Eps withMatched (Symbol t p) = Symbol t (\s -> (,[s]) <$> p s) withMatched (Alt a b) = withMatched a <|> withMatched b withMatched (App a b) = (\(f, s) (x, t) -> (f x, s ++ t)) <$> withMatched a <*> withMatched b withMatched Fail = Fail withMatched (Fmap f x) = (f *** id) <$> withMatched x withMatched (CatMaybes x) = CatMaybes $ (\ (as, s) -> flip (,) s <$> as) <$> withMatched x withMatched (Rep gr f a0 x) = Rep gr (\(a, s) (x, t) -> (f a x, s ++ t)) (a0, []) (withMatched x) -- N.B.: this ruins the Void optimization withMatched (Void x) = (const () *** id) <$> withMatched x -- | @s =~ a = match a s@ (=~) :: [s] -> RE s a -> Maybe a (=~) = flip match infix 2 =~ -- | Attempt to match a string of symbols against the regular expression. -- Note that the whole string (not just some part of it) should be matched. -- -- Examples: -- -- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "a" -- >Just 'a' -- >Text.Regex.Applicative> match (sym 'a' <|> sym 'b') "ab" -- >Nothing -- match :: RE s a -> [s] -> Maybe a match re = let obj = compile re in \str -> listToMaybe $ results $ foldl (flip step) obj str -- | Find a string prefix which is matched by the regular expression. -- -- Of all matching prefixes, pick one using left bias (prefer the left part of -- '<|>' to the right part) and greediness. -- -- This is the match which a backtracking engine (such as Perl's one) would find -- first. -- -- If match is found, the rest of the input is also returned. -- -- See also 'findFirstPrefixWithUncons', of which this is a special case. -- -- Examples: -- -- >Text.Regex.Applicative> findFirstPrefix ("a" <|> "ab") "abc" -- >Just ("a","bc") -- >Text.Regex.Applicative> findFirstPrefix ("ab" <|> "a") "abc" -- >Just ("ab","c") -- >Text.Regex.Applicative> findFirstPrefix "bc" "abc" -- >Nothing findFirstPrefix :: RE s a -> [s] -> Maybe (a, [s]) findFirstPrefix = findFirstPrefixWithUncons List.uncons -- | Find the first prefix, with the given @uncons@ function. -- -- @since 0.3.4 findFirstPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss) findFirstPrefixWithUncons = findPrefixWith' (walk emptyObject . threads) where walk obj [] = (obj, Nothing) walk obj (t:ts) = case getResult t of Just r -> (obj, Just r) Nothing -> walk (addThread t obj) ts -- | Find the longest string prefix which is matched by the regular expression. -- -- Submatches are still determined using left bias and greediness, so this is -- different from POSIX semantics. -- -- If match is found, the rest of the input is also returned. -- -- See also 'findLongestPrefixWithUncons', of which this is a special case. -- -- Examples: -- -- >Text.Regex.Applicative Data.Char> let keyword = "if" -- >Text.Regex.Applicative Data.Char> let identifier = many $ psym isAlpha -- >Text.Regex.Applicative Data.Char> let lexeme = (Left <$> keyword) <|> (Right <$> identifier) -- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "if foo" -- >Just (Left "if"," foo") -- >Text.Regex.Applicative Data.Char> findLongestPrefix lexeme "iffoo" -- >Just (Right "iffoo","") findLongestPrefix :: RE s a -> [s] -> Maybe (a, [s]) findLongestPrefix = findLongestPrefixWithUncons List.uncons -- | Find the longest prefix, with the given @uncons@ function. -- -- @since 0.3.4 findLongestPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss) findLongestPrefixWithUncons = findPrefixWith' ((,) <*> listToMaybe . results) findPrefixWith' :: (ReObject s a -> (ReObject s a, Maybe a)) -- ^ Given the regex object, compute the regex object to feed the next input value into, and -- the result, if any. -> (ss -> Maybe (s, ss)) -- ^ @uncons@ -> RE s a -> ss -> Maybe (a, ss) findPrefixWith' walk uncons = \ re -> go (compile re) Nothing where go obj resOld ss = case walk obj of (obj', resThis) -> let res = flip (,) ss <$> resThis <|> resOld in case uncons ss of _ | failed obj' -> res Nothing -> res Just (s, ss) -> go (step s obj') res ss -- | Find the shortest prefix (analogous to 'findLongestPrefix') -- -- See also 'findShortestPrefixWithUncons', of which this is a special case. findShortestPrefix :: RE s a -> [s] -> Maybe (a, [s]) findShortestPrefix = findShortestPrefixWithUncons List.uncons -- | Find the shortest prefix (analogous to 'findLongestPrefix'), with the given @uncons@ function. -- -- @since 0.3.4 findShortestPrefixWithUncons :: (ss -> Maybe (s, ss)) -> RE s a -> ss -> Maybe (a, ss) findShortestPrefixWithUncons uncons = go . compile where go obj ss = case results obj of r:_ -> Just (r, ss) _ -> do guard (not (failed obj)) (s, ss) <- uncons ss go (step s obj) ss -- | Find the leftmost substring that is matched by the regular expression. -- Otherwise behaves like 'findFirstPrefix'. Returns the result together with -- the prefix and suffix of the string surrounding the match. findFirstInfix :: RE s a -> [s] -> Maybe ([s], a, [s]) findFirstInfix re str = fmap (\((first, res), last) -> (first, res, last)) $ findFirstPrefix ((,) <$> few anySym <*> re) str -- Auxiliary function for findExtremeInfix prefixCounter :: RE s (Int, [s]) prefixCounter = second reverse <$> reFoldl NonGreedy f (0, []) anySym where f (i, prefix) s = ((,) $! (i+1)) $ s:prefix data InfixMatchingState s a = GotResult { prefixLen :: !Int , prefixStr :: [s] , result :: a , postfixStr :: [s] } | NoResult -- a `preferOver` b chooses one of a and b, giving preference to a preferOver :: InfixMatchingState s a -> InfixMatchingState s a -> InfixMatchingState s a preferOver NoResult b = b preferOver b NoResult = b preferOver a b = case prefixLen a `compare` prefixLen b of GT -> b -- prefer b when it has smaller prefix _ -> a -- otherwise, prefer a mkInfixMatchingState :: [s] -- rest of input -> Thread s ((Int, [s]), a) -> InfixMatchingState s a mkInfixMatchingState rest thread = case getResult thread of Just ((pLen, pStr), res) -> GotResult { prefixLen = pLen , prefixStr = pStr , result = res , postfixStr = rest } Nothing -> NoResult gotResult :: InfixMatchingState s a -> Bool gotResult GotResult {} = True gotResult _ = False -- Algorithm for finding leftmost longest infix match: -- -- 1. Add a thread /.*?/ to the begginning of the regexp -- 2. As soon as we get first accept, we delete that thread -- 3. When we get more than one accept, we choose one by the following criteria: -- 3.1. Compare by the length of prefix (since we are looking for the leftmost -- match) -- 3.2. If they are produced on the same step, choose the first one (left-biased -- choice) -- 3.3. If they are produced on the different steps, choose the later one (since -- they have the same prefixes, later means longer) findExtremalInfix :: -- function to combine a later result (first arg) to an earlier one (second -- arg) (InfixMatchingState s a -> InfixMatchingState s a -> InfixMatchingState s a) -> RE s a -> [s] -> Maybe ([s], a, [s]) findExtremalInfix newOrOld re str = case go (compile $ (,) <$> prefixCounter <*> re) str NoResult of NoResult -> Nothing r@GotResult{} -> Just (prefixStr r, result r, postfixStr r) where {- go :: ReObject s ((Int, [s]), a) -> [s] -> InfixMatchingState s a -> InfixMatchingState s a -} go obj str resOld = let resThis = foldl (\acc t -> acc `preferOver` mkInfixMatchingState str t) NoResult $ threads obj res = resThis `newOrOld` resOld obj' = -- If we just found the first result, kill the "prefixCounter" thread. -- We rely on the fact that it is the last thread of the object. if gotResult resThis && not (gotResult resOld) then fromThreads $ init $ threads obj else obj in case str of [] -> res _ | failed obj -> res (s:ss) -> go (step s obj') ss res -- | Find the leftmost substring that is matched by the regular expression. -- Otherwise behaves like 'findLongestPrefix'. Returns the result together with -- the prefix and suffix of the string surrounding the match. findLongestInfix :: RE s a -> [s] -> Maybe ([s], a, [s]) findLongestInfix = findExtremalInfix preferOver -- | Find the leftmost substring that is matched by the regular expression. -- Otherwise behaves like 'findShortestPrefix'. Returns the result together with -- the prefix and suffix of the string surrounding the match. findShortestInfix :: RE s a -> [s] -> Maybe ([s], a, [s]) findShortestInfix = findExtremalInfix $ flip preferOver -- | Replace matches of the regular expression with its value. -- -- >Text.Regex.Applicative > replace ("!" <$ sym 'f' <* some (sym 'o')) "quuxfoofooooofoobarfobar" -- >"quux!!!bar!bar" replace :: RE s [s] -> [s] -> [s] replace r = ($ []) . go where go ys = case findLongestInfix r ys of Nothing -> (ys ++) Just (before, m, rest) -> (before ++) . (m ++) . go rest
feuerbach/regex-applicative
Text/Regex/Applicative/Interface.hs
Haskell
mit
11,440
{-# LANGUAGE CPP #-} module GHCJS.DOM.CSSRule ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.CSSRule #else module Graphics.UI.Gtk.WebKit.DOM.CSSRule #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.CSSRule #else import Graphics.UI.Gtk.WebKit.DOM.CSSRule #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/CSSRule.hs
Haskell
mit
420
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-} module GHCJS.DOM.JSFFI.Generated.MediaList (js_item, item, js_deleteMedium, deleteMedium, js_appendMedium, appendMedium, js_setMediaText, setMediaText, js_getMediaText, getMediaText, js_getLength, getLength, MediaList, castToMediaList, gTypeMediaList) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord) import Data.Typeable (Typeable) import GHCJS.Types (JSRef(..), JSString, castRef) import GHCJS.Foreign (jsNull) import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..)) import GHCJS.Marshal (ToJSRef(..), FromJSRef(..)) import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..)) import Control.Monad.IO.Class (MonadIO(..)) import Data.Int (Int64) import Data.Word (Word, Word64) import GHCJS.DOM.Types import Control.Applicative ((<$>)) import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName) import GHCJS.DOM.Enums foreign import javascript unsafe "$1[\"item\"]($2)" js_item :: JSRef MediaList -> Word -> IO (JSRef (Maybe JSString)) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.item Mozilla MediaList.item documentation> item :: (MonadIO m, FromJSString result) => MediaList -> Word -> m (Maybe result) item self index = liftIO (fromMaybeJSString <$> (js_item (unMediaList self) index)) foreign import javascript unsafe "$1[\"deleteMedium\"]($2)" js_deleteMedium :: JSRef MediaList -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.deleteMedium Mozilla MediaList.deleteMedium documentation> deleteMedium :: (MonadIO m, ToJSString oldMedium) => MediaList -> oldMedium -> m () deleteMedium self oldMedium = liftIO (js_deleteMedium (unMediaList self) (toJSString oldMedium)) foreign import javascript unsafe "$1[\"appendMedium\"]($2)" js_appendMedium :: JSRef MediaList -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.appendMedium Mozilla MediaList.appendMedium documentation> appendMedium :: (MonadIO m, ToJSString newMedium) => MediaList -> newMedium -> m () appendMedium self newMedium = liftIO (js_appendMedium (unMediaList self) (toJSString newMedium)) foreign import javascript unsafe "$1[\"mediaText\"] = $2;" js_setMediaText :: JSRef MediaList -> JSRef (Maybe JSString) -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.mediaText Mozilla MediaList.mediaText documentation> setMediaText :: (MonadIO m, ToJSString val) => MediaList -> Maybe val -> m () setMediaText self val = liftIO (js_setMediaText (unMediaList self) (toMaybeJSString val)) foreign import javascript unsafe "$1[\"mediaText\"]" js_getMediaText :: JSRef MediaList -> IO (JSRef (Maybe JSString)) -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.mediaText Mozilla MediaList.mediaText documentation> getMediaText :: (MonadIO m, FromJSString result) => MediaList -> m (Maybe result) getMediaText self = liftIO (fromMaybeJSString <$> (js_getMediaText (unMediaList self))) foreign import javascript unsafe "$1[\"length\"]" js_getLength :: JSRef MediaList -> IO Word -- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaList.length Mozilla MediaList.length documentation> getLength :: (MonadIO m) => MediaList -> m Word getLength self = liftIO (js_getLength (unMediaList self))
plow-technologies/ghcjs-dom
src/GHCJS/DOM/JSFFI/Generated/MediaList.hs
Haskell
mit
3,649
-- A module with some code to explore theorems in the monadic lambda calculus module TP where import Data.List import Data.Set (Set) import qualified Data.Set as Set import Data.Maybe import Data.Map (Map) import qualified Data.Map as Map import Data.Foldable hiding (concat,any,all) import Control.Monad.State import DataTypes startState :: S startState = S (-1) Map.empty -- Util -- |The 'split' function splits a set (encoded as a list) in all possible ways. -- -- >>> split [1,2] -- [([],[1,2]),([1],[2]),([2],[1]),([1,2],[])] split :: [a] -> [([a],[a])] split [] = [([],[])] split [a] = [([],[a]),([a],[])] split (a : as) = left ++ right where left = [(a : l,r) | (l,r) <- rec] right = [(l, a : r) | (l,r) <- rec] rec = split as -- |Returns the current state integer and decrease the state by one. getAndDec :: NonDeterministicState S Int getAndDec = do s <- get i <- return $ counter s modify (\x -> x{counter = (i-1)}) return i -- |Takes a sequent of formulae and generates fresh variables for each formula, wrapping it in a non-deterministic state toDecorated :: Sequent -> NonDeterministicState S DecoratedSequent toDecorated (gamma,f) = do aux <- return $ \x -> do i <- getAndDec j <- getAndDec return $ DF i (V j) x gamma' <- mapM aux gamma f' <- aux f return (gamma',f') -- |Takes a decorated sequent and generates fresh variables for each formula, wrapping it in a non-deterministic state and returning a map from the new variables to the original constant terms toDecoratedWithConstants :: ([(LambdaTerm,Formula)],Formula) -> NonDeterministicState S (DecoratedSequent,Map Int LambdaTerm) toDecoratedWithConstants (gamma,f) = do aux <- return $ \(c,x) -> do i <- getAndDec j <- getAndDec return $ (DF i (V j) x,(i,c)) gamma' <- mapM aux gamma f' <- do i <- getAndDec j <- getAndDec return $ DF i (V j) f return ((map fst gamma',f'),Map.fromList $ map snd gamma') -- |Associates two formulae in the variable-formula binding map in the state associate :: Formula -> Formula -> NonDeterministicState S () associate f g = do s <- get m <- return $ vars s modify (\x -> x{vars = Map.insert f g m}) return () -- |Looks up the binding of a formula in the variable-formula binding map of the state getBinding :: Formula -> NonDeterministicState S (Maybe Formula) getBinding f = aux f [f] where aux f vs = do s <- get m <- return $ vars s res <- return $ Map.lookup f m case res of Nothing -> return Nothing Just v@(Var _ _ _) -> case Data.List.elem v vs of False -> aux v (v : vs) True -> return $ Just f Just f -> return $ Just f -- |Tries to unify to formulae: returns 'True' in case of success (and associate the unified formulae) and 'False' otherwise (without changing the state) unify :: Formula -> Formula -> NonDeterministicState S Bool unify v1@(Var _ t1 _) v2@(Var _ t2 _) | t1 == t2 = do binding1 <- getBinding v1 binding2 <- getBinding v2 case binding1 of Nothing -> do associate v1 v2 return True Just g -> case binding2 of Nothing -> do associate v2 v1 return True Just f -> return $ f == g | otherwise = return False unify v@(Var _ t _) f | t == (getType f) = do binding <- getBinding v case binding of Nothing -> do associate v f return True Just g -> return $ g == f | otherwise = return False unify f v@(Var _ _ _) = unify v f unify f g = return $ f == g -- |Returns all the proofs for a given sequent proofs :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) proofs s@(gamma,f) = do every $ map (\r -> r s) [iR,mR,tR] ++ map (\(r,g) -> r g (delete g gamma,f)) [(r,g) | r <- [i,iL,mL,tL] , g <- gamma] -- |The identity rule i :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) i a (hyp,a') | not $ any isLinear (map formula hyp) = do res <- unify (formula a) (formula a') case res of False -> failure True -> do i <- getAndDec x <- return $ V i return $ Leaf Id ([DF (identifier a) x (formula a)] , DF (identifier a') x (formula a')) | otherwise = failure i _ _ = failure -- |The left implication rule iL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) iL f@(DF _ _ (I a b ty lin)) (gamma,c) = do a_id <- getAndDec b_id <- getAndDec t <- getAndDec >>= \i -> return $ V i x <- getAndDec >>= \j -> return $ V j splits <- return $ split gamma proveChildren <- return $ \(g,g') -> do l <- proofs (g,DF a_id t a) r <- proofs (DF b_id x b : g',c) return (l,r) (l,r) <- every $ map proveChildren splits (delta,a') <- return $ getVal l ((gamma_with_b), c') <- return $ getVal r b' <- return $ lookupFormula b_id gamma_with_b gamma <- return $ delete b' gamma_with_b y <- getAndDec >>= \i -> return $ V i return $ Branch ImpL l (DF (identifier f) y (I a b ty lin) : gamma `union` delta ,DF (identifier c') (sub (App y (term a')) (term b') (term c')) (formula c')) r iL _ _ = failure -- |The left diamond rule mL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) mL ma@(DF _ y (M a m1 _ _)) (gamma, f@(DF j _ (M b m2 tyb lin))) | m1 == m2 = do id_a <- getAndDec x <- getAndDec >>= \i -> return $ V i c <- proofs (DF id_a x a : gamma, f) (gamma_and_a,mb) <- return $ getVal c a <- return $ lookupFormula id_a gamma_and_a gamma <- return $ delete a gamma_and_a return $ Unary MonL (ma : gamma, DF j (y :*: (Lambda (term a) (term mb))) (M b m2 tyb lin)) c | otherwise = failure mL _ _ = failure -- |The left tensor rule tL :: DecoratedFormula -> DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) tL ab@(DF _ y (P a b _ _)) (gamma, c) = do a_id <- getAndDec b_id <- getAndDec f <- getAndDec >>= \i -> return $ V i g <- getAndDec >>= \i -> return $ V i child <- proofs (DF a_id f a : DF b_id g b : gamma,c) (gamma_and_a_and_b,c') <- return $ getVal child a <- return $ lookupFormula a_id gamma_and_a_and_b b <- return $ lookupFormula b_id gamma_and_a_and_b gamma <- return $ delete a $ delete b gamma_and_a_and_b return $ Unary TensL (ab : gamma, DF (identifier c) (sub (FirstProjection y) (term a) (sub (SecondProjection y) (term b) (term c'))) (formula c)) child tL _ _ = failure -- |The right implication rule iR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) iR (gamma, DF i _ f@(I a b _ _)) = do a_id <- getAndDec b_id <- getAndDec x <- getAndDec >>= \i -> return $ V i t <- getAndDec >>= \i -> return $ V i c <- proofs (DF a_id x a : gamma, DF b_id t b) (gamma_and_a,b) <- return $ getVal c a <- return $ lookupFormula a_id gamma_and_a gamma <- return $ delete a gamma_and_a return $ Unary ImpR (gamma, DF i (Lambda (term a) (term b)) f) c iR _ = failure -- |The right diamond rule mR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) mR (gamma,DF i _ ma@(M a _ _ _)) = do a_id <- getAndDec x <- getAndDec >>= \i -> return $ V i c <- proofs (gamma,DF a_id x a) (gamma,a) <- return $ getVal c return $ Unary MonR (gamma,DF i (Eta (term a)) ma) c mR _ = failure -- |The right tensor rule tR :: DecoratedSequent -> NonDeterministicState S (BinTree DecoratedSequent) tR (gamma,DF i _ f@(P a b _ _)) = do a_id <- getAndDec b_id <- getAndDec t <- getAndDec >>= \i -> return $ V i u <- getAndDec >>= \i -> return $ V i splits <- return $ split gamma proveChildren <- return $ \(g,g') -> do l <- proofs (g,DF a_id t a) r <- proofs (g',DF b_id u b) return (l,r) (l,r) <- every $ map proveChildren splits (gamma,a) <- return $ getVal l (delta,b) <- return $ getVal r return $ Branch TensR l (gamma `union` delta, DF i (Pair (term a) (term b)) f) r tR _ = failure -- |This function searches for a formula in a list of formulae by comparing their unique ids. -- It's meant to be used only by the left implication and left monad rules. -- Raises an error if no formula with the given id is found lookupFormula :: Int -> [DecoratedFormula] -> DecoratedFormula lookupFormula _ [] = error "This will never be reached by the rules" lookupFormula n (f : rest) | n == (identifier f) = f | otherwise = lookupFormula n rest -- |Substitute a term for another inside a third term (should be the substitution of a variable with a term) sub :: LambdaTerm -> -- the new term LambdaTerm -> -- the variable/old term LambdaTerm -> -- the context LambdaTerm -- the new term sub _ _ c@(C _) = c sub new old t@(V _) | t == old = new | otherwise = t sub new old t@(Lambda v b) | v == old = t | otherwise = Lambda v $ sub new old b sub new old (App f a) = App (sub new old f) (sub new old a) sub new old (Eta f) = Eta (sub new old f) sub new old (m :*: k) = (:*:) (sub new old m) (sub new old k) sub new old (Pair a b) = Pair (sub new old a) (sub new old b) sub new old (FirstProjection a) = FirstProjection $ sub new old a sub new old (SecondProjection a) = SecondProjection $ sub new old a -- |Collects all variables from a proof collectVars :: BinTree DecoratedSequent -> Set LambdaTerm collectVars t = Set.fromList $ foldMap aux t where aux = concat . (map f) . (map term) . j j (c,f) = f : c f v@(V _) = [v] f (C _) = [] f (Lambda v t) = f v ++ f t f (App g a) = f g ++ f a f (Eta x) = f x f (m :*: k) = f m ++ f k f (Pair a b) = f a ++ f b f (FirstProjection a) = f a f (SecondProjection a) = f a -- |Changes all the negative indices used in the vars to contiguos positive integers sanitizeVars :: BinTree DecoratedSequent -> BinTree DecoratedSequent sanitizeVars t = fmap sanitize t where sanitize (gamma,f) = (map deepSub gamma,deepSub f) deepSub (DF i lt f) = (DF i (zub lt) f) zub (V i) = V $ fromJust $ lookup i m zub c@(C _) = c zub (Lambda x t) = Lambda (zub x) (zub t) zub (App f g) = App (zub f) (zub g) zub (Eta x) = Eta (zub x) zub (m :*: k) = (zub m) :*: (zub k) zub (Pair a b) = Pair (zub a) (zub b) zub (FirstProjection a) = FirstProjection $ zub a zub (SecondProjection a) = SecondProjection $ zub a m = zip (map (\(V i) -> i) $ Set.toList $ collectVars t) [0..] replaceWithConstants :: BinTree DecoratedSequent -> (Map Int LambdaTerm) -> BinTree DecoratedSequent replaceWithConstants t m = fmap (\n -> replaceWithConstantsInNode n m) t replaceWithConstantsInNode :: DecoratedSequent -> (Map Int LambdaTerm) -> DecoratedSequent replaceWithConstantsInNode (gamma,f) m = new where new = (map fst gamma', deepSub f) gamma' = map replace gamma n = map fromJust $ filter isJust $ map snd gamma' replace df@(DF i v f) = case Map.lookup i m of Nothing -> (df,Nothing) Just c -> (DF i c f,Just (v,c)) deepSub (DF i lt f) = (DF i (zub lt) f) zub v@(V _) = case lookup v n of Nothing -> v Just c -> c zub c@(C _) = c zub (Lambda x t) = Lambda (zub x) (zub t) zub (App f g) = App (zub f) (zub g) zub (Eta x) = Eta (zub x) zub (m :*: k) = (zub m) :*: (zub k) zub (Pair a b) = Pair (zub a) (zub b) zub (FirstProjection a) = FirstProjection $ zub a zub (SecondProjection a) = SecondProjection $ zub a alphaEquivalent :: LambdaTerm -> LambdaTerm -> Map Int Int -> Bool alphaEquivalent c1@(C _) c2@(C _) _ = c1 == c2 alphaEquivalent (V i) (V j) m = case Map.lookup i m of Just h -> j == h Nothing -> i == j alphaEquivalent (Lambda (V i) t) (Lambda (V j) u) m = alphaEquivalent t u (Map.insert i j m) alphaEquivalent (App t s) (App d z) m = (alphaEquivalent t d m) && (alphaEquivalent s z m) alphaEquivalent (Eta t) (Eta d) m = alphaEquivalent t d m alphaEquivalent (t :*: s) (d :*: z) m = (alphaEquivalent t d m) && (alphaEquivalent s z m) alphaEquivalent (Pair a b) (Pair a' b') m = alphaEquivalent a a' m && alphaEquivalent b b' m alphaEquivalent (FirstProjection a) (FirstProjection b) m = alphaEquivalent a b m alphaEquivalent (SecondProjection a) (SecondProjection b) m = alphaEquivalent a b m alphaEquivalent _ _ _ = False -- |This function works only under the assumption that all the formulae in the hypothesis are distinct, otherwise the answer is NO! equivalentDecoratedSequent :: DecoratedSequent -> DecoratedSequent -> Bool equivalentDecoratedSequent s1 s2 = f1 == f2 && hypEqual && noDuplicates && alphaEquivalent t1 t2 e where noDuplicates = (length $ Set.toList $ Set.fromList (map formula hyp1)) == length hyp1 && (length $ Set.toList $ Set.fromList (map formula hyp2)) == length hyp2 hyp1 = fst s1 hyp2 = fst s2 hypEqual = (Set.fromList (map formula hyp1)) == (Set.fromList (map formula hyp2)) varId (V i) = i varId _ = -1 m1 = Map.fromList $ map (\x -> (formula x, varId $ term x)) hyp1 m2 = Map.fromList $ map (\x -> (formula x, varId $ term x)) hyp2 e = mixMaps m1 m2 t1 = betaReduce $ monadReduce $ etaReduce $ term $ snd $ s1 t2 = betaReduce $ monadReduce $ etaReduce $ term $ snd $ s2 f1 = formula $ snd $ s1 f2 = formula $ snd $ s2 mixMaps :: Map Formula Int -> Map Formula Int -> Map Int Int mixMaps m n = Map.fromList $ aux (Map.toList m) where aux [] = [] aux ((f,i) : rest) = (i,n Map.! f) : aux rest etaReduce :: LambdaTerm -> LambdaTerm etaReduce c@(C _) = c etaReduce v@(V _) = v etaReduce (App f g) = App (etaReduce f) (etaReduce g) etaReduce (Eta t) = Eta $ etaReduce t etaReduce (m :*: k) = (etaReduce m) :*: (etaReduce k) etaReduce (Pair a b) = Pair (etaReduce a) (etaReduce b) etaReduce (FirstProjection a) = FirstProjection $ etaReduce a etaReduce (SecondProjection a) = SecondProjection $ etaReduce a etaReduce (Lambda (V i) (App f (V j))) | i == j = etaReduce f | otherwise = Lambda (V i) (App (etaReduce f) (V j)) etaReduce (Lambda x t) = let x' = etaReduce x t' = etaReduce t in if t == t' then Lambda x' t' else etaReduce (Lambda x' t') betaReduce :: LambdaTerm -> LambdaTerm betaReduce t = aux t Map.empty where aux c@(C _) _ = c aux v@(V i) m = case Map.lookup i m of Nothing -> v Just t -> t aux (App (Lambda (V i) body) x) m = aux body (Map.insert i x m) aux (App f x) m = let f' = aux f m in if f == f' then (App f (aux x m)) else aux (App f' x) m aux (Lambda x b) m = Lambda (aux x m) (aux b m) aux (Eta t) m = Eta $ aux t m aux (n :*: k) m = (aux n m) :*: (aux k m) aux (Pair a b) m = Pair (aux a m) (aux b m) aux (FirstProjection a) m = FirstProjection $ aux a m aux (SecondProjection a) m = SecondProjection $ aux a m monadReduce :: LambdaTerm -> LambdaTerm monadReduce ((Eta t) :*: u) = App (monadReduce u) (monadReduce t) monadReduce (t :*: (Lambda (V i) (Eta (V j)))) | i == j = monadReduce t | otherwise = (monadReduce t) :*: (Lambda (V i) (Eta (V j))) monadReduce v@(V _) = v monadReduce c@(C _) = c monadReduce (App t u) = App (monadReduce t) (monadReduce u) monadReduce (Lambda x t) = Lambda (monadReduce x) (monadReduce t) monadReduce (Eta t) = Eta $ monadReduce t monadReduce (Pair a b) = Pair (monadReduce a) (monadReduce b) monadReduce (FirstProjection a) = FirstProjection $ monadReduce a monadReduce (SecondProjection a) = SecondProjection $ monadReduce a monadReduce (t :*: u) = let t' = monadReduce t u' = monadReduce u in if t == t' && u == u' then t' :*: u' else monadReduce (t' :*: u')
gianlucagiorgolo/glue-tp
TP.hs
Haskell
mit
16,780
module PhotonMap ( PhotonMap , PhotonSurfaceInteraction , count , generatePhotonMap , getLightToViewerAtIntersection ) where import Numeric.FastMath ( ) import Control.DeepSeq ( NFData(..), force ) import Control.Monad ( replicateM, liftM ) import Data.KdMap.Static ( KdMap, buildWithDist, inRadius ) import Core ( Point(..), Ray(..), UnitVector , translate, neg, magnitude, to, calculateReflection, (|*|) ) import Light ( Light, PhotonLightSource, sumLights, scaled ) import Material ( probabilityDiffuseReflection, probabilitySpecularReflection , diffuseLight, specularLight, brdf ) import Rnd ( Rnd, rndDouble, rndDirectionInHemisphere ) import Scene ( Scene, Intersection(..), allPhotonLightSources, sceneIntersection ) import Volume ( Volume(..) ) data PhotonSurfaceInteraction = PhotonSurfaceInteraction !UnitVector !Light data LightTransferEvent = EventDiffuse | EventSpecular data LightTransfer = LightTransfer Ray Light LightTransferEvent instance NFData PhotonSurfaceInteraction where rnf (PhotonSurfaceInteraction !v !l) = rnf v `seq` rnf l `seq` () data PhotonMap = PhotonMap (KdMap Double Point PhotonSurfaceInteraction) !Int !Double instance NFData PhotonMap where rnf (PhotonMap !k !n !s) = rnf k `seq` rnf n `seq` rnf s `seq` () generatePhotonMap :: Scene -> Int -> Rnd PhotonMap generatePhotonMap scene num = do psis <- generatePhotonSurfaceInxs scene num return $ force $ PhotonMap (buildWithDist pointToList distSquared psis) (length psis) (1.0 / fromIntegral num) where pointToList (Point !x !y !z) = [x, y, z] distSquared (Point !x1 !y1 !z1) (Point !x2 !y2 !z2) = xd * xd + yd * yd + zd * zd where xd = x1 - x2 yd = y1 - y2 zd = z1 - z2 count :: PhotonMap -> Int count (PhotonMap _ n _) = n generatePhotonSurfaceInxs :: Scene -> Int -> Rnd [(Point, PhotonSurfaceInteraction)] generatePhotonSurfaceInxs scene num = concatM $ mapM (generatePhotonSurfaceInxsForLightSource scene numPerLight) lightSources where lightSources = allPhotonLightSources scene numPerLight = num `div` length lightSources generatePhotonSurfaceInxsForLightSource :: Scene -> Int -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)] generatePhotonSurfaceInxsForLightSource scene num lightSource = concatM $ replicateM num $ generateSinglePhotonSurfaceInxn scene lightSource generateSinglePhotonSurfaceInxn :: Scene -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)] generateSinglePhotonSurfaceInxn scene lightSource = do (ray, light) <- lightSource traceLightRay 10 scene (LightTransfer ray light EventDiffuse) handlePhotonIntersection :: [(Point, PhotonSurfaceInteraction)] -> LightTransferEvent -> (Point, PhotonSurfaceInteraction) -> [(Point, PhotonSurfaceInteraction)] handlePhotonIntersection !list !event psi = case event of EventDiffuse -> psi : list EventSpecular -> list traceLightRay :: Int -> Scene -> LightTransfer -> Rnd [(Point, PhotonSurfaceInteraction)] traceLightRay !limit !scene !incoming@(LightTransfer !incomingRay incomingLight _) = if limit <= 0 then return [] else case maybeIntersection of Nothing -> return [] Just ix -> do maybeOutgoingLight <- computeOutgoingLightRay ix incoming let !event = maybe EventDiffuse getEvent maybeOutgoingLight let !photonIntersection = toPhotonIntersection ix recurse <- maybe (return []) (traceLightRay (limit - 1) scene) maybeOutgoingLight return $ handlePhotonIntersection recurse event photonIntersection where getEvent (LightTransfer _ _ ev) = ev !maybeIntersection = sceneIntersection scene incomingRay toPhotonIntersection (Intersection (Ray _ !rd) _ _ !pos) = (pos, PhotonSurfaceInteraction rd incomingLight) computeOutgoingLightRay :: Intersection -> LightTransfer -> Rnd (Maybe LightTransfer) computeOutgoingLightRay (Intersection _ (Volume _ !nrm _ !material) _ !wp) (LightTransfer (Ray _ !incomingRay) !incomingLight _) = do prob <- rndDouble 0.0 1.0 go prob where !pd = probabilityDiffuseReflection material ps = probabilitySpecularReflection material go prob | prob < pd = goDiffuse | prob < pd + ps = goSpecular | otherwise = return Nothing goDiffuse = do dr <- diffuseReflect surfaceNormal return $ Just $ LightTransfer (Ray movedFromSurface dr) (diffuseLight material incomingLight) EventDiffuse goSpecular = return $ Just $ LightTransfer (Ray movedFromSurface $ calculateReflection incomingRay surfaceNormal) (specularLight material incomingLight) EventSpecular !surfaceNormal = nrm wp !movedFromSurface = translate (surfaceNormal |*| epsilon) wp !epsilon = 0.0001 diffuseReflect :: UnitVector -> Rnd UnitVector diffuseReflect = rndDirectionInHemisphere getLightToViewerAtIntersection :: PhotonMap -> Intersection -> Light getLightToViewerAtIntersection (PhotonMap !kdmap _ !scale) (Intersection (Ray _ !outgoingVector) (Volume _ !nrm _ !material) _ !wp) = (sumLights $ map attenuateByDistance nearInteractions) `scaled` scale where attenuateByDistance (!pp, !psi) = brdfForInteraction psi `scaled` coneFilter pp wp maxDistance brdfForInteraction (PhotonSurfaceInteraction !incomingVector !incomingLight) = surfaceBrdf incomingLight (neg incomingVector) (neg outgoingVector) surfaceNormal wp !surfaceNormal = nrm wp !surfaceBrdf = brdf material !nearInteractions = inRadius kdmap maxDistance wp !maxDistance = 10.0 concatM :: Monad m => m [[a]] -> m [a] concatM = liftM concat coneFilter :: Point -> Point -> Double -> Double coneFilter !pp !wp !maxDistance = (1.0 - distance / (2.0 * maxDistance)) / maxDistance where !distance = magnitude (pp `to` wp) -- gaussianFilter :: Point -> Point -> Double -> Double -- gaussianFilter !pp !wp !maxDistance = -- a * (1.0 - (1.0 - exp (mb * px)) / dv) -- where -- !a = 0.918 -- !mb = -1.953 -- !dv = 1.0 - exp mb -- !ds = magnitudeSquared (pp `to` wp) -- !px = ds / (2.0 * maxDistance * maxDistance)
stu-smith/rendering-in-haskell
src/experiment08/PhotonMap.hs
Haskell
mit
6,437
{-# LANGUAGE OverloadedStrings #-} module Y2018.M04.D17.Exercise where {-- So, yesterday we had a bunch of JSON from various periodicals, but they were all in the same JSON format, which made it simple to upload them into a common database. Today, we have JSON, but in a different format, that we need to upload into that database, so, let's do it! YES! Also, the size is different, so it will be compressed this time, FER SHUR! --} import qualified Codec.Compression.GZip as GZ import Data.Aeson -- below imports available via 1HaskellADay git repository import Store.SQL.Connection (withConnection) import Y2018.M04.D16.Exercise data ViceArticles = VA { arts :: [Vice] } data Vice = Vice { vid :: Integer, vart :: String } deriving Show -- of course, Vice has a hashed id, but oh, well. Use its idx, instead -- The articles are in this directory: viceDir :: FilePath viceDir = "Y2018/M04/D17/compressed/" -- and the files are: vices :: [FilePath] vices = map (("vice-" ++) . (++ ".json")) (words "rands tops") -- remember to add ".gz" but I did that for ya yesterday. instance FromJSON ViceArticles where parseJSON (Object o) = undefined instance FromJSON Vice where parseJSON (Object o) = undefined vice2Art :: Vice -> IxArt vice2Art v = undefined -- parse in the vice articles then save them to the articles database. -- Also, make sure non-ASCII characters are removed from the text, because ick. -- Of course, first, you need to add "VIC" as a publisher and the vices -- as file types to the database. See yesterday's exercise and do that. -- hint: use withConnection ARCHIVE to get a connection to the ARCHIVE database readVices :: FilePath -> IO [IxArt] readVices vicedir = undefined -- remember to append ".gz" -- with the above readVices function, you should be able to call main'' and go!
geophf/1HaskellADay
exercises/HAD/Y2018/M04/D17/Exercise.hs
Haskell
mit
1,834
-- | Analysis and transformation of SQL queries. module Database.Selda.Transform where import Database.Selda.Column import Database.Selda.SQL import Database.Selda.Query.Type import Database.Selda.Types -- | Remove all dead columns recursively, assuming that the given list of -- column names contains all names present in the final result. removeDeadCols :: [ColName] -> SQL -> SQL removeDeadCols live sql = case source sql' of EmptyTable -> sql' TableName _ -> sql' Values _ _ -> sql' RawSql _ -> sql' Product qs -> sql' {source = Product $ map noDead qs} Join jt on l r -> sql' {source = Join jt on (noDead l) (noDead r)} Union union_all l r -> sql' {source = Union union_all (noDead l) (noDead r)} where noDead = removeDeadCols live' sql' = keepCols (implicitlyLiveCols sql ++ live) sql live' = allColNames sql' -- | Return the names of all columns in the given top-level query. -- Subqueries are not traversed. allColNames :: SQL -> [ColName] allColNames sql = colNames (cols sql) ++ implicitlyLiveCols sql -- | Return the names of all non-output (i.e. 'cols') columns in the given -- top-level query. Subqueries are not traversed. implicitlyLiveCols :: SQL -> [ColName] implicitlyLiveCols sql = concat [ concatMap allNamesIn (restricts sql) , colNames (groups sql) , colNames (map snd $ ordering sql) , colNames (liveExtras sql) , case source sql of Join _ on _ _ -> allNamesIn on _ -> [] ] -- | Get all column names appearing in the given list of (possibly complex) -- columns. colNames :: [SomeCol SQL] -> [ColName] colNames cs = concat [ [n | Some c <- cs, n <- allNamesIn c] , [n | Named _ c <- cs, n <- allNamesIn c] , [n | Named n _ <- cs] ] -- | Remove all columns but the given, named ones and aggregates, from a query's -- list of outputs. -- If we want to refer to a column in an outer query, it must have a name. -- If it doesn't, then it's either not referred to by an outer query, or -- the outer query duplicates the expression, thereby referring directly -- to the names of its components. keepCols :: [ColName] -> SQL -> SQL keepCols live sql = sql {cols = filtered} where filtered = filter (`oneOf` live) (cols sql) oneOf (Some (AggrEx _ _)) _ = True oneOf (Named _ (AggrEx _ _)) _ = True oneOf (Some (Col n)) ns = n `elem` ns oneOf (Named n _) ns = n `elem` ns oneOf _ _ = False -- | Build the outermost query from the SQL generation state. -- Groups are ignored, as they are only used by 'aggregate'. state2sql :: GenState -> SQL state2sql (GenState [sql] srs _ _ _) = sql {restricts = restricts sql ++ srs} state2sql (GenState ss srs _ _ _) = SQL (allCols ss) (Product ss) srs [] [] Nothing [] False -- | Get all output columns from a list of SQL ASTs. allCols :: [SQL] -> [SomeCol SQL] allCols sqls = [outCol col | sql <- sqls, col <- cols sql] where outCol (Named n _) = Some (Col n) outCol c = c
valderman/selda
selda/src/Database/Selda/Transform.hs
Haskell
mit
3,098
-- Copyright (c) 2016-present, SoundCloud Ltd. -- All rights reserved. -- -- This source code is distributed under the terms of a MIT license, -- found in the LICENSE file. {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE TemplateHaskell #-} module Kubernetes.Model.V1.Endpoints ( Endpoints (..) , kind , apiVersion , metadata , subsets , mkEndpoints ) where import Control.Lens.TH (makeLenses) import Data.Aeson.TH (defaultOptions, deriveJSON, fieldLabelModifier) import Data.Text (Text) import GHC.Generics (Generic) import Kubernetes.Model.V1.EndpointSubset (EndpointSubset) import Kubernetes.Model.V1.ObjectMeta (ObjectMeta) import Prelude hiding (drop, error, max, min) import qualified Prelude as P import Test.QuickCheck (Arbitrary, arbitrary) import Test.QuickCheck.Instances () -- | Endpoints is a collection of endpoints that implement the actual service. Example:\n Name: \&quot;mysvc\&quot;,\n Subsets: [\n {\n Addresses: [{\&quot;ip\&quot;: \&quot;10.10.1.1\&quot;}, {\&quot;ip\&quot;: \&quot;10.10.2.2\&quot;}],\n Ports: [{\&quot;name\&quot;: \&quot;a\&quot;, \&quot;port\&quot;: 8675}, {\&quot;name\&quot;: \&quot;b\&quot;, \&quot;port\&quot;: 309}]\n },\n {\n Addresses: [{\&quot;ip\&quot;: \&quot;10.10.3.3\&quot;}],\n Ports: [{\&quot;name\&quot;: \&quot;a\&quot;, \&quot;port\&quot;: 93}, {\&quot;name\&quot;: \&quot;b\&quot;, \&quot;port\&quot;: 76}]\n },\n ] data Endpoints = Endpoints { _kind :: !(Maybe Text) , _apiVersion :: !(Maybe Text) , _metadata :: !(Maybe ObjectMeta) , _subsets :: !([EndpointSubset]) } deriving (Show, Eq, Generic) makeLenses ''Endpoints $(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''Endpoints) instance Arbitrary Endpoints where arbitrary = Endpoints <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary -- | Use this method to build a Endpoints mkEndpoints :: [EndpointSubset] -> Endpoints mkEndpoints xsubsetsx = Endpoints Nothing Nothing Nothing xsubsetsx
soundcloud/haskell-kubernetes
lib/Kubernetes/Model/V1/Endpoints.hs
Haskell
mit
2,506
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-} import Network.Wai.Middleware.OAuth2 as OAuth2 import Network.OAuth.OAuth2 import Keys (googleKey) import Data.ByteString import Control.Monad (unless) import System.Exit (exitFailure) import Test.QuickCheck.All (quickCheckAll) import Test.QuickCheck (Property) import Test.QuickCheck.Monadic (assert, monadicIO, run) import Network.Wai.Test (defaultRequest, request, runSession, simpleBody, simpleHeaders, SResponse) googleScopeEmail :: QueryParams googleScopeEmail = [("scope", "email")] state :: QueryParams state = [("state", "00000000")] prop_login :: Property prop_login = monadicIO $ do login <- run $ runSession (request defaultRequest) (\_ sendResponse -> sendResponse $ OAuth2.login googleKey (googleScopeEmail ++ state)) run $ print (show $ simpleHeaders login) assert $ (simpleHeaders login) == locationHeader where --build it myself and check against OAuth2 answer locationHeader = [("Location",oauthOAuthorizeEndpoint googleKey `appendQueryParam` (transform' [("client_id",Just $ oauthClientId googleKey),("response_type",Just "code"),("redirect_uri",oauthCallback googleKey),("scope",Just "email"),("state",Just "00000000")]))] main = do allPass <- $quickCheckAll -- Run QuickCheck on all prop_ functions unless allPass exitFailure
NerdGGuy/wai-middleware-oauth2
test/test.hs
Haskell
mit
1,345
-- The main module of the application. Performs GLFW-specific initialization and others. module Main ( main ) where import Control.Applicative import Control.Concurrent import Control.Exception import Control.Monad import Control.Monad.Trans import qualified Graphics.UI.GLFW as GLFW import System.Environment import System.IO import qualified ApplicationModel as AM import qualified FixedStepManager as FSM import qualified Renderer as R -- | The error handler to be called when a GLFW error occurs. errorHandler :: GLFW.ErrorCallback errorHandler error description = do hPutStrLn stderr $ (show error) ++ ": " ++ description -- | Scans a key which is being pressed. scanKeyPress :: GLFW.Window -- ^ the window handle -> GLFW.Key -- ^ a code of a key which is scanned -> IO Bool -- ^ True indicates the key is being pressed scanKeyPress window key = (== GLFW.KeyState'Pressed) <$> (GLFW.getKey window key) -- | Updates the application model. update :: GLFW.Window -- ^ the window handle -> AM.RectangleData -- ^ data before updating -> IO AM.RectangleData -- ^ data after updating update window (AM.RectangleData x y) = do l <- (toValue (-1)) <$> (scanKeyPress window GLFW.Key'A) r <- (toValue 1) <$> (scanKeyPress window GLFW.Key'D) u <- (toValue 1) <$> (scanKeyPress window GLFW.Key'W) d <- (toValue (-1)) <$> (scanKeyPress window GLFW.Key'X) return $ AM.RectangleData (x + l + r) (y + u + d) where toValue v True = v toValue _ False = 0 -- | The rendering loop. renderingLoop :: GLFW.Window -- ^ the window handle -> (AM.RectangleData -> IO ()) -- ^ rendering action -> IO () renderingLoop window render = do GLFW.setTime 0 FSM.runStepManager (1/60) (loop (AM.RectangleData 0 0)) where loop rd = ((lift . GLFW.windowShouldClose) window) >>= (flip unless) (go rd) getTime = GLFW.getTime >>= maybe (throwIO $ userError "getTime") (\t -> return t) go rd = do t <- lift getTime fp <- FSM.checkNextAction t case fp of FSM.None -> do (lift . threadDelay) 10 -- Suspends to reduce the CPU usage. loop rd FSM.Update -> do rd' <- FSM.doUpdate (update window rd) loop rd' FSM.Drawing -> do FSM.doDrawing (render rd) (lift . GLFW.swapBuffers) window lift GLFW.pollEvents loop rd -- | The process after the createWindow. afterCreateWindow :: GLFW.Window -- ^ the window handle -> IO () afterCreateWindow window = do GLFW.makeContextCurrent $ Just window GLFW.swapInterval 1 desc <- R.initialize renderingLoop window (R.render desc) R.terminate desc GLFW.destroyWindow window -- | The entry point of the application. main :: IO () main = do progName <- getProgName GLFW.setErrorCallback $ Just errorHandler GLFW.init GLFW.windowHint $ GLFW.WindowHint'ContextVersionMajor 3 GLFW.windowHint $ GLFW.WindowHint'ContextVersionMinor 3 GLFW.windowHint $ GLFW.WindowHint'OpenGLProfile GLFW.OpenGLProfile'Core GLFW.createWindow 500 500 progName Nothing Nothing >>= maybe (return ()) afterCreateWindow GLFW.terminate
fujiyan/toriaezuzakki
haskell/glfw/keyboard/Rectangle.hs
Haskell
bsd-2-clause
3,276
{-# LANGUAGE BangPatterns #-} {-| RPC test program. -} {- Copyright (C) 2011, 2012, 2013 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} import Control.Concurrent import Control.Monad import System.Console.GetOpt import System.Environment import System.IO import Text.JSON (decode) import Text.Printf import Ganeti.BasicTypes import Ganeti.Common import Ganeti.Config import Ganeti.Errors import Ganeti.JSON import Ganeti.Objects import qualified Ganeti.Path as P import Ganeti.Rpc import Ganeti.Utils -- | Command line options structure. data Options = Options { optRpc :: String -- ^ RPC to execute , optDataFile :: FilePath -- ^ Path to the RPC serialised form , optVerbose :: Bool -- ^ Verbosity level , optStats :: Bool -- ^ Whether to show timing stats , optCount :: Int -- ^ Count of (multi) RPCs to do , optBatch :: Int -- ^ How many (multi) RPCs to run in parallel , optShowHelp :: Bool -- ^ Just show the help , optShowComp :: Bool -- ^ Just show the completion info , optShowVer :: Bool -- ^ Just show the program version } deriving Show -- | Default values for the command line options. defaultOptions :: Options defaultOptions = Options { optRpc = "version" , optDataFile = "rpc.json" , optVerbose = False , optStats = False , optCount = 1 , optBatch = 1 , optShowHelp = False , optShowComp = False , optShowVer = False } instance StandardOptions Options where helpRequested = optShowHelp verRequested = optShowVer compRequested = optShowComp requestHelp o = o { optShowHelp = True } requestVer o = o { optShowVer = True } requestComp o = o { optShowComp = True } -- | The rpcs we support. Sadly this duplicates the RPC list. data KnownRpc = KRInstanceInfo RpcCallInstanceInfo | KRAllInstancesInfo RpcCallAllInstancesInfo | KRInstanceList RpcCallInstanceList | KRNodeInfo RpcCallNodeInfo | KRVersion RpcCallVersion | KRStorageList RpcCallStorageList | KRTestDelay RpcCallTestDelay | KRExportList RpcCallExportList deriving (Show) -- | The command line options. options :: [GenericOptType Options] options = [ (Option "r" ["rpc"] (ReqArg (\ r o -> Ok o { optRpc = r }) "RPC") "the rpc to use [version]", OptComplChoices []) , (Option "f" ["data-file"] (ReqArg (\ f o -> Ok o { optDataFile = f }) "FILE") "the rpc serialised form [\"rpc.json\"]", OptComplFile) , (Option "v" ["verbose"] (NoArg (\ opts -> Ok opts { optVerbose = True})) "show more information when executing RPCs", OptComplNone) , (Option "t" ["stats"] (NoArg (\ opts -> Ok opts { optStats = True})) "show timing information summary", OptComplNone) , (Option "c" ["count"] (reqWithConversion (tryRead "reading count") (\count opts -> Ok opts { optCount = count }) "NUMBER") "Count of (multi) RPCs to execute [1]", OptComplInteger) , (Option "b" ["batch"] (reqWithConversion (tryRead "reading batch size") (\batch opts -> Ok opts { optBatch = batch }) "NUMBER") "Parallelisation factor for RPCs [1]", OptComplInteger) , oShowHelp , oShowComp , oShowVer ] -- | Arguments we expect arguments :: [ArgCompletion] arguments = [ArgCompletion OptComplOneNode 1 Nothing] -- | Log a message. logMsg :: MVar () -> String -> IO () logMsg outmvar text = withMVar outmvar $ \_ -> do let p = if null text || last text /= '\n' then putStrLn else putStr p text hFlush stdout -- | Parses a RPC. parseRpc :: String -> String -> Result KnownRpc parseRpc "instance_info" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRInstanceInfo parseRpc "all_instances_info" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRAllInstancesInfo parseRpc "instance_list" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRInstanceList parseRpc "node_info" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRNodeInfo parseRpc "version" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRVersion parseRpc "storage_list" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRStorageList parseRpc "test_delay" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRTestDelay parseRpc "export_list" f = fromJResult "parsing rpc" (decode f) >>= Ok . KRExportList parseRpc s _ = Bad $ "Unknown rpc '" ++ s ++ "'" -- | Executes a RPC. These duplicate definitions are needed due to the -- polymorphism of 'executeRpcCall', and the binding of the result -- based on the input rpc call. execRpc :: [Node] -> KnownRpc -> IO [[String]] execRpc n (KRInstanceInfo v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRAllInstancesInfo v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRInstanceList v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRNodeInfo v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRVersion v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRStorageList v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRTestDelay v) = formatRpcRes `fmap` executeRpcCall n v execRpc n (KRExportList v) = formatRpcRes `fmap` executeRpcCall n v -- | Helper to format the RPC result such that it can be printed by -- 'printTable'. formatRpcRes :: (Show b) => [(Node, ERpcError b)] -> [[String]] formatRpcRes = map (\(n, r) -> [nodeName n, either explainRpcError show r]) -- | Main function. main :: IO () main = do cmd_args <- getArgs (opts, args) <- parseOpts defaultOptions cmd_args "rpc-test" options arguments rpc <- parseRpc (optRpc opts) `liftM` readFile (optDataFile opts) >>= exitIfBad "parsing RPC" cfg_file <- P.clusterConfFile cfg <- loadConfig cfg_file>>= exitIfBad "Can't load configuration" nodes <- exitIfBad "Can't find node" . errToResult $ mapM (getNode cfg) args token <- newEmptyMVar -- semaphore for batch calls outmvar <- newMVar () -- token for stdout non-interleaving let logger = if optVerbose opts then logMsg outmvar else const $ return () let batch = [1..optBatch opts] count = optCount opts rpcs = count * length nodes logger $ printf "Will execute %s multi-ops and %s RPCs" (show count) (show rpcs) tstart <- getCurrentTimeUSec _ <- forkIO $ mapM_ (\_ -> putMVar token ()) batch mapM_ (\idx -> do let str_idx = show idx logger $ "Acquiring token for run " ++ str_idx _ <- takeMVar token forkIO $ do start <- getCurrentTimeUSec logger $ "Start run " ++ str_idx !results <- execRpc nodes rpc stop <- getCurrentTimeUSec let delta = (fromIntegral (stop - start)::Double) / 1000 putMVar token () let stats = if optVerbose opts then printf "Done run %d in %7.3fmsec\n" idx delta else "" table = printTable "" ["Node", "Result"] results [False, False] logMsg outmvar $ stats ++ table ) [1..count] mapM_ (\_ -> takeMVar token) batch _ <- takeMVar outmvar when (optStats opts) $ do tstop <- getCurrentTimeUSec let delta = (fromIntegral (tstop - tstart) / 1000000)::Double printf "Total runtime: %9.3fs\n" delta :: IO () printf "Total mult-ops: %9d\n" count :: IO () printf "Total single RPCs: %9d\n" rpcs :: IO () printf "Multi-ops/sec: %9.3f\n" (fromIntegral count / delta) :: IO () printf "RPCs/sec: %9.3f\n" (fromIntegral rpcs / delta) :: IO ()
apyrgio/snf-ganeti
src/rpc-test.hs
Haskell
bsd-2-clause
9,085
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} {- | Module : Kiosk.Backend.Data.ReportTemplateSpec Description : Tests for Report Generation Copyright : Plow Technologies LLC License : MIT License Maintainer : Scott Murphy Stability : experimental Portability : portable These are tests for report generation as it specifically relates to data templates -} module Kiosk.Backend.Data.ReportTemplateSpec (spec,main,convertToKioskForm) where import Control.Applicative ((<$>)) import Data.String (IsString) -- import Control.Lens import Control.Lens import Data.Aeson import qualified Data.ByteString.Lazy as L import Data.Text (Text) import qualified Data.Text as T import Data.Time import Data.Time.Clock.POSIX -- import Kiosk.Backend.Data.DataTemplate import Data.Map.Lazy (Map) import qualified Data.Map.Lazy as M import Data.Text (Text) import qualified Data.Text as T import Kiosk.Backend.Data.DataTemplate import Kiosk.Backend.Data.ReportTemplate import Kiosk.Backend.Form import Language.Haskell.TH import Codec.Xlsx import Data.Monoid ((<>)) -- import ReportTemplate.Internal hiding (spec) import ReportTemplate.Report import System.Locale (defaultTimeLocale) import System.Time import Test.Hspec import Test.QuickCheck main :: IO () main = hspec spec spec :: SpecWith () spec = do describe (nameBase 'makeCellDoubleFromInputDouble) $ do it "Gets an Input double and transfers it to a CellDouble" $ do True `shouldBe` True describe (nameBase 'makeCellTextFromInputText) $ do it "Gets an InputText and transfers it to a CellText" $ do True `shouldBe` True -- a template is described from here: makeCobaltExcelTemplate :: XlsxReportTemplate makeCobaltExcelTemplate = buildReportTemplate preambleTemplate rowTemplate preambleTemplate :: XlsxPreambleTemplateList preambleTemplate = [("Report Prepared For", const $ getCompanyName (1,1)) ] where formatTimestampDate context _ = def & cellValue ?~ CellText "Current Time not active" -- formatTimestampDate context _ = makeCellMapFromUTCTime "%c" (2,2) . _xlsxCurrentTime $ context rowTemplate:: XlsxRowTemplateList rowTemplate = [ ("Water Hauling Number",getWaterHauler ) ,("Lease Name",getLeaseName) ,("Description", getDescription) ,("Truck Number",getTruckNumber) ,("Customer Ticket Number", getCustomerTicketNumber) ] where getDescription = const $ makeCellTextWithCellTemplate descriptionTemplate descriptionList descriptionList = ["Truck #", "Name of Lease", "Water Hauling Permit #"] getWaterHauler = const $ makeCellTextFromInputText "Water Hauling Permit #" getLeaseName = const $ makeCellTextFromInputText "Name of Lease" getTruckNumber = const $ makeCellTextFromInputText "Truck #" getCustomerTicketNumber = const $ makeCellTextFromInputText "Customer Ticket #" descriptionTemplate [field1, field2, field3] = "Field1 Is: " <> field1 <> "Field 2 is:" <> field2 <> "Field 3 is:" <> field3 descriptionTemplate _ = "Wrong Number of arguments" -- | Report Inspection dispayReportValues = do report <- generateReport sequence $ (foldrTableByRowWithIndex printAndMoveOn (return ())) <$> (toListOf (reportRows._ReportTableRowIndex._2) report) where printAndMoveOn k rowVal m = do m print k print rowVal -- | Generate report makeXlsxFile = do ct <- getPOSIXTime xl <- generateReportXlsx L.writeFile "example.xlsx" $ fromXlsx ct xl generateReportXlsx = do sheet <- generateReportSheet return $ def & atSheet "ex" ?~ sheet generateReportXlsx :: IO Xlsx generateReportSheet = renderSpreadsheet <$> generateReport generateReport :: IO XlsxReport generateReport = do ct <- getCurrentTime dtes <- generate generateDataTemplatesWithData let forms@(oneForm:_) = convertToKioskForm <$> currentCobaltForms reportTemplate = makeCobaltExcelTemplate report = buildXlsxReport reportTemplate (XlsxContext "") oneForm dtes return report generateDataTemplatesWithData = do txt <- T.pack <$> arbitrary let targetDataTemplates = (fromFormToDataTemplate.convertToKioskForm <$> currentCobaltForms ) transformedDataTemplates = targetDataTemplates & (traverse . _templateItems . traverse . _templateValue . _InputTypeText . getInputText) .~ "an arbitrary thign" return transformedDataTemplates -- | Form Generation (Cobalt Version) convertToKioskForm :: CobaltWaterHaulingCompany -> Form convertToKioskForm waterHaulingCompany = Form cobaltEnvironmentalSolutions cobaltAddress cobaltLogo defaultPhone [createWaterHauler waterHaulingName] cobaltFormBody where waterHaulingName = _whcCompanyName $ waterHaulingCompany newtype UUID = UUID { _getUUID :: Text} deriving (Read,Eq,Show,IsString,ToJSON,FromJSON,Ord) data CobaltWaterHaulingCompany = CobaltWaterHaulingCompany { _whcFormId:: Maybe FormId , _whcCompanyName :: CompanyName , _whcGetUUID :: UUID } deriving (Eq,Ord) cobaltEnvironmentalSolutions :: Company cobaltEnvironmentalSolutions = Company "Cobalt Environmental Solutions LLC" [CompanyWidth $ WidthAttribute (12::Int) ] cobaltAddress:: Address cobaltAddress= Address "PO Box 130 Wilson, Oklahoma 73463\n886-849-5483\n" [AddressWidth $ WidthAttribute (12::Int)] cobaltLogo :: Logo cobaltLogo = Logo "" [LogoPath . PathAttribute $ "Cobalt.png"] createWaterHauler :: CompanyName -> Constant createWaterHauler whc = Constant (T.pack.show $ whc) [ ConstantAttributeType "'Water Hauling Company'" , ConstantAttributeIndexable $ IndexableAttribute True ] newtype FormId = FormId {_getFormId :: Integer} deriving (Read,Eq,Show,Num,ToJSON,FromJSON,Ord) cobaltFormBody :: [Row] cobaltFormBody = [ truckNumberRow , permitNumberRow , customerTicketNumberRow , leaseInfoRow , leaseOperatorRow , leaseNameRow , waterTypeAndAmountRow , dateRow , timeInRow , driverNameRow , signatureRow] where truckNumberRow = generateInputRowText "Truck #" permitNumberRow = generateInputRowText "Water Hauling Permit #" customerTicketNumberRow = generateInputRowText "Customer Ticket #" leaseInfoRow = generateLabelRow "Lease Information" leaseOperatorRow = leaseOperatorDropdownRow leaseNameRow = generateInputRowText "Name of Lease" waterTypeAndAmountRow = waterTypeRadioRow dateRow = generateInputRowDate "Date" timeInRow = generateInputRowTime "Time In" driverNameRow = generateInputRowText "Driver's Name" signatureRow = generateInputRowSignature "Driver Signature" waterTypeRadioRow :: Row waterTypeRadioRow = Row [waterTypeRadio] [] waterTypeRadio :: Item waterTypeRadio = Item [ItemRadio . generateRadio "Type of Water Hauled" $ options ] [] where options = [generateOption "Produced Water" ,generateOption "Pit Water" ,generateOption "Fresh Water" ,generateOption "Flowback Water" ] leaseOperatorDropdownRow :: Row leaseOperatorDropdownRow = Row [leaseOperatorItem] [] where leaseOperatorItem = Item [ItemDropdown leaseOperatorDropdown] [] dropdownOptions :: [Option] dropdownOptions = generateOption <$> leaseOperators leaseOperatorDropdown :: Dropdown leaseOperatorDropdown = Dropdown (Label "Lease Operator" []) dropdownOptions (Just fullDefaultInputText ) generateLabelRow :: Text -> Row generateLabelRow labelText = Row [generateLabelItem labelText] [] -- Input Text generateInputRowText :: Text -> Row generateInputRowText labelText = Row [generateInputItemText labelText] [] generateInputItemText :: Text -> Item generateInputItemText labelText = Item [ItemLabel . generateLabel $ labelText , ItemInput fullDefaultInputText] [] fullDefaultInputText :: Input fullDefaultInputText = Input fullDefaultInputTypeText fullDefaultInputAttributesList fullDefaultInputTypeText :: InputType fullDefaultInputTypeText = InputTypeText $ InputText (""::Text) -- Input Date generateInputRowDate :: Text -> Row generateInputRowDate labelDate = Row [generateInputItemDate labelDate] [] generateInputItemDate :: Text -> Item generateInputItemDate labelDate = Item [ItemLabel . generateLabel $ labelDate , ItemAutoInput . AutoInput $ fullDefaultInputDate] [] fullDefaultInputDate :: Input fullDefaultInputDate = Input fullDefaultInputTypeDate [InputType InputTypeAttributeDate] fullDefaultInputTypeDate :: InputType fullDefaultInputTypeDate = InputTypeDate $ (InputDate "") -- Input Time generateInputRowTime :: Text -> Row generateInputRowTime labelTime = Row [generateInputItemTime labelTime] [] generateInputItemTime :: Text -> Item generateInputItemTime labelTime = Item [ItemLabel . generateLabel $ labelTime , ItemAutoInput . AutoInput $ fullDefaultInputTime] [] fullDefaultInputTime :: Input fullDefaultInputTime = Input fullDefaultInputTypeTime [InputType InputTypeAttributeTime] fullDefaultInputTypeTime :: InputType fullDefaultInputTypeTime = InputTypeTime $ (InputTime "") -- Input Signature generateInputRowSignature :: Text -> Row generateInputRowSignature labelText = Row [generateInputItemSignature labelText] [] generateInputItemSignature :: Text -> Item generateInputItemSignature labelText = Item [ItemLabel . generateLabel $ labelText , ItemInput fullDefaultInputSignature] [] fullDefaultInputSignature :: Input fullDefaultInputSignature = Input fullDefaultInputTypeSignature [InputType InputTypeAttributeSignature] generateLabelItem :: Text -> Item generateLabelItem labelText = Item [ItemLabel . generateLabel $ labelText ] [] leaseOperators :: [Text] leaseOperators = ["XTO Energy","Continental Resources","Citation Oil and Gas","Other","Brady's Welding & Machine Shop","WFW Production","Mustang Fuel","SSB Production","LINN Energy","Keith F Walker","GLB","Mack Energy","Nubs","Ardmore Production","Dehart","Southern Oklahoma Production","Silver Creek","Brady Welding & Machine Shop","Coastal Plains","Thunder Oil & Gas","Atlas Pipeline","Cantrell Energy","Kingery Energy","Williford Resources","Mark Shidler","WFD Oil","Yale Oil","Star Oil & Co.","TEF","T&B Oil Co."] fullDefaultInputTypeSignature :: InputType fullDefaultInputTypeSignature = InputTypeSignature $ Signature "" fullDefaultInputAttributesList :: [InputAttribute] fullDefaultInputAttributesList = [tAttr, ixAttr] where ixAttr = InputIndexable $ IndexableAttribute True tAttr = InputType $ InputTypeAttributeText generateLabel :: Text -> Label generateLabel labelText = Label labelText [LabelWidth $ WidthAttribute (12::Int)] generateRadio :: Text -> [Option] -> Radio generateRadio labelText options = Radio (generateLabel labelText) [] options [fullDefaultOptionQualifier] -- | Radio fullDefaultOptionQualifier :: OptionQualifier fullDefaultOptionQualifier = OptionQualifier fullDefaultQualifierChoices [] fullDefaultQualifierChoices :: [QualifierChoices] fullDefaultQualifierChoices = [ QualifierLabel ( Label "Amount" []) , QualifierInput fullDefaultQualifierInput] fullDefaultQualifierInput :: Input fullDefaultQualifierInput = Input dit dia where dit = InputTypeDouble . InputDouble $ 0.0 dia = [tAttr, ixAttr,minAttr,maxAttr] minAttr = InputMinDouble $ MinAttributeDouble (0.0::Double) maxAttr = InputMaxDouble $ MaxAttributeDouble (150.0::Double) ixAttr = InputIndexable $ IndexableAttribute True tAttr = InputType $ InputTypeAttributeDouble generateOption :: Text -> Option generateOption optionText = Option optionText [] data CompanyName = BigStarTrucking | BulletEnergyServices | CandJTrucking | BigMacTankTrucks | BradyWeldingandMachineShop | KleenOilfieldServices | BandCBackhoeandTransports | ForsytheOilfield | HullsOilfield | SouthCentralOilfieldServices | TopOTexas | MitchellTankTruckServices | FluidServices | DavenportOilfieldServices | TestCompany | SoonerStar | NexStream | HullEnvironmentalServices | Arkhoma | ZeroSeven | HammTankAndTrucking deriving (Eq,Ord) instance Show CompanyName where show (BigStarTrucking) = "Big Star Trucking" show (BulletEnergyServices) = "Bullet Energy Services" show (CandJTrucking) = "C and J Trucking" show (BigMacTankTrucks) = "Big Mac Trucks" show (BradyWeldingandMachineShop) = "Bradly Welding and Machine Shop" show (KleenOilfieldServices) = "Kleen Oilfield Services" show (BandCBackhoeandTransports) = "B and C Backhoe and Transports" show (ForsytheOilfield ) = "Forsythe Oilfield" show (HullsOilfield) = "Hulls Oilfield" show (SouthCentralOilfieldServices) = "South Central Oilfield Services" show (TopOTexas) = "Top-O-Texas" show (MitchellTankTruckServices) = "Mitchell Tank Truck Services" show (FluidServices) = "Fluid Services" show (DavenportOilfieldServices) = "Davenport Oilfield Services" show (TestCompany ) = "Test Company" show (SoonerStar ) = "Sooner Star" show (NexStream ) = "NexStream" show (Arkhoma ) = "Arkhoma" show (HullEnvironmentalServices) = "Hull Environmental Services" show (ZeroSeven) = "07 Energy" show (HammTankAndTrucking) = "Hamm Tank and Trucking Service, LLC" exampleUUID :: UUID exampleUUID = "a2e3609e-154d-4e60-80e0-c77189098617" currentCobaltForms :: [CobaltWaterHaulingCompany] currentCobaltForms = [ CobaltWaterHaulingCompany (Just 0) BigStarTrucking exampleUUID , CobaltWaterHaulingCompany (Just 1) BulletEnergyServices exampleUUID , CobaltWaterHaulingCompany (Just 2) CandJTrucking exampleUUID , CobaltWaterHaulingCompany (Just 3) BigMacTankTrucks exampleUUID , CobaltWaterHaulingCompany (Just 4) BradyWeldingandMachineShop exampleUUID , CobaltWaterHaulingCompany (Just 5) KleenOilfieldServices exampleUUID , CobaltWaterHaulingCompany (Just 6) BandCBackhoeandTransports exampleUUID , CobaltWaterHaulingCompany (Just 7) ForsytheOilfield exampleUUID , CobaltWaterHaulingCompany (Just 8) HullsOilfield exampleUUID , CobaltWaterHaulingCompany (Just 9) SouthCentralOilfieldServices exampleUUID , CobaltWaterHaulingCompany (Just 10) TopOTexas exampleUUID , CobaltWaterHaulingCompany (Just 11) MitchellTankTruckServices exampleUUID , CobaltWaterHaulingCompany (Just 12) FluidServices exampleUUID , CobaltWaterHaulingCompany (Just 13) DavenportOilfieldServices exampleUUID , CobaltWaterHaulingCompany (Just 14) TestCompany exampleUUID , CobaltWaterHaulingCompany (Just 15) SoonerStar exampleUUID , CobaltWaterHaulingCompany (Just 16) NexStream exampleUUID , CobaltWaterHaulingCompany (Just 17) Arkhoma exampleUUID , CobaltWaterHaulingCompany (Just 18) HullEnvironmentalServices exampleUUID , CobaltWaterHaulingCompany (Just 19) ZeroSeven exampleUUID , CobaltWaterHaulingCompany (Just 20) HammTankAndTrucking exampleUUID]
plow-technologies/cobalt-kiosk-data-template
test/Kiosk/Backend/Data/ReportTemplateSpec.hs
Haskell
bsd-3-clause
17,251
-- Copyright (c) 2017, Travis Bemann -- All rights reserved. -- -- Redistribution and use in source and binary forms, with or without -- modification, are permitted provided that the following conditions are met: -- -- o Redistributions of source code must retain the above copyright notice, this -- list of conditions and the following disclaimer. -- -- o Redistributions in binary form must reproduce the above copyright notice, -- this list of conditions and the following disclaimer in the documentation -- and/or other materials provided with the distribution. -- -- o Neither the name of the copyright holder nor the names of its -- contributors may be used to endorse or promote products derived from -- this software without specific prior written permission. -- -- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -- POSSIBILITY OF SUCH DAMAGE. {-# LANGUAGE OverloadedStrings, OverloadedLabels, PatternSynonyms, BangPatterns #-} module Main (Main.main) where import Robots.Genetic.HunterKiller.Types import Robots.Genetic.HunterKiller.Utility import Robots.Genetic.HunterKiller.Intrinsics import Robots.Genetic.HunterKiller.Params import Robots.Genetic.HunterKiller.Load import Robots.Genetic.HunterKiller.Save import Robots.Genetic.HunterKiller.Combat import Robots.Genetic.HunterKiller.Render import Control.Concurrent (forkIO, forkOS, threadDelay) import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar) import Control.Concurrent.STM (STM, atomically) import Control.Concurrent.STM.TQueue (TQueue, newTQueueIO, writeTQueue, readTQueue, tryReadTQueue) import System.Exit (exitWith, exitFailure, ExitCode(..)) import qualified Data.Sequence as Seq import Data.Sequence ((><)) import qualified Data.Text as Text import Data.Text.IO (hPutStr, readFile) import System.IO (stderr, openFile, hClose, IOMode(..)) import System.Environment (getArgs, getProgName) import Control.Exception (catch, IOException, SomeException) import qualified GI.Gtk as Gtk import qualified GI.Gdk as Gdk import GI.Cairo.Structs.Context (Context(..)) import qualified Graphics.Rendering.Cairo as Cairo import Data.GI.Base import Control.Monad.Trans.Reader (ReaderT(..)) import Foreign.Ptr (castPtr) import Graphics.Rendering.Cairo.Types (Cairo(..)) import Graphics.Rendering.Cairo.Internal (Render(..)) import Control.Monad.IO.Class (liftIO) import Text.Printf (printf) import Data.Functor ((<$>)) import Text.Read (readMaybe) import Data.IORef (IORef, newIORef, readIORef, writeIORef) import System.Random (StdGen, newStdGen) import Data.Foldable (foldl', toList) import GI.GLib (idleAdd, pattern PRIORITY_DEFAULT, pattern PRIORITY_HIGH) import GI.Gdk.Objects.Window import qualified System.Clock as Clock import Prelude hiding (hPutStr, readFile) import Control.DeepSeq (NFData(..), deepseq) -- | The main action. main :: IO () main = do inputs <- getInputs case inputs of Right (exprs, params, savePath) -> setup exprs $ params { robotParamsAutoSavePath = savePath } Left errorText -> do hPutStr stderr errorText exitFailure -- | Get inputs. getInputs :: IO (Either Text.Text (Seq.Seq RobotExpr, RobotParams, FilePath)) getInputs = do args <- getArgs case args of [worldPath, worldCopiesText, paramsPath, savePath] -> do paramsText <- catch (Right <$> readFile paramsPath) (\e -> return . Left . Text.pack $ show (e :: IOException)) case paramsText of Right paramsText -> do worldText <- catch (Right <$> readFile worldPath) (\e -> return . Left . Text.pack $ show (e :: IOException)) case worldText of Right worldText -> let errorOrParams = loadParams paramsText in case errorOrParams of Right params -> let errorOrExprs = loadWorld specialConstEntries worldText in case errorOrExprs of Right exprs -> case readMaybe worldCopiesText of Just worldCopies -> if worldCopies >= 1 then return $ Right (foldl' (><) Seq.empty $ Seq.replicate worldCopies exprs, params, savePath) else return $ Left "number of copies must be greater than zero\n" Nothing -> return $ Left "invalid number of copies\n" Left errorText -> return . Left . Text.pack $ printf "%s: unable to load world: %s\n" worldPath errorText Left errorText -> return . Left . Text.pack $ printf "%s: unable to load params: %s\n" paramsPath errorText Left errorText -> return . Left . Text.pack $ printf "%s: %s\n" worldPath errorText Left errorText -> return . Left . Text.pack $ printf "%s: %s\n" paramsPath errorText _ -> do progName <- getProgName return . Left . Text.pack $ printf "Usage: %s WORLD-FILE COUNT PARAMS-FILE SAVE-FILE\n" progName -- | Set up the UI and prepare for running. setup :: Seq.Seq RobotExpr -> RobotParams -> IO () setup exprs params = do controlQueue <- newTQueueIO exitQueue <- newTQueueIO Gtk.init Nothing window <- Gtk.windowNew Gtk.WindowTypeToplevel Gtk.setWindowTitle window "Botwars" Gtk.onWidgetDestroy window $ do Gtk.mainQuit atomically $ writeTQueue controlQueue RobotExit vbox <- Gtk.boxNew Gtk.OrientationVertical 10 Gtk.boxSetHomogeneous vbox False canvas <- Gtk.drawingAreaNew Gtk.widgetSetSizeRequest canvas 920 920 worldRef <- newIORef Nothing Gtk.onWidgetDraw canvas $ \(Context fp) -> do withManagedPtr fp $ \p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do w <- liftIO $ fromIntegral <$> Gtk.widgetGetAllocatedWidth canvas h <- liftIO $ fromIntegral <$> Gtk.widgetGetAllocatedHeight canvas world <- liftIO $ readIORef worldRef case world of Just world -> drawWorld world w h Nothing -> return () return True Gtk.boxPackStart vbox canvas True True 0 buttonBox <- Gtk.buttonBoxNew Gtk.OrientationHorizontal Gtk.buttonBoxSetLayout buttonBox Gtk.ButtonBoxStyleCenter backwardButton <- Gtk.buttonNew stopButton <- Gtk.buttonNew startButton <- Gtk.buttonNew forwardButton <- Gtk.buttonNew saveButton <- Gtk.buttonNew Gtk.buttonSetLabel backwardButton "<<" Gtk.buttonSetLabel stopButton "Stop" Gtk.buttonSetLabel startButton "Start" Gtk.buttonSetLabel forwardButton ">>" Gtk.buttonSetLabel saveButton "Save" Gtk.onButtonClicked backwardButton $ atomically $ writeTQueue controlQueue RobotBackward Gtk.onButtonClicked stopButton $ atomically $ writeTQueue controlQueue RobotStop Gtk.onButtonClicked startButton $ atomically $ writeTQueue controlQueue RobotStart Gtk.onButtonClicked forwardButton $ atomically $ writeTQueue controlQueue RobotForward Gtk.onButtonClicked saveButton $ do fileChooser <- Gtk.fileChooserNativeNew (Just "Save As World") (Just window) Gtk.FileChooserActionSave Nothing Nothing result <- toEnum <$> fromIntegral <$> Gtk.nativeDialogRun fileChooser case result of Gtk.ResponseTypeAccept -> do filename <- Gtk.fileChooserGetFilename fileChooser case filename of Just filename -> do let !message = deepseq filename `seq` RobotSave filename atomically $ writeTQueue controlQueue message Nothing -> return () _ -> return () Gtk.boxPackStart buttonBox backwardButton False False 0 Gtk.boxPackStart buttonBox stopButton False False 0 Gtk.boxPackStart buttonBox startButton False False 0 Gtk.boxPackStart buttonBox forwardButton False False 0 Gtk.boxPackStart buttonBox saveButton False False 0 Gtk.boxPackEnd vbox buttonBox False False 0 Gtk.containerAdd window vbox gen <- newStdGen Gtk.widgetShowAll window forkOS Gtk.main forkIO $ do time <- Clock.getTime Clock.Monotonic let play = RobotPlay { robotPlayCyclesPerSecond = robotParamsMaxCyclesPerSecond params, robotPlayRunning = False, robotPlayReverse = False, robotPlayIndex = 0, robotPlayDoStep = RobotNoStep } mainLoop (initCont exprs params gen) canvas worldRef controlQueue exitQueue time play exitStatus <- atomically $ readTQueue exitQueue exitWith exitStatus -- | Execute the main loop of the genetically-programmed robot fighting arena. mainLoop :: RobotCont -> Gtk.DrawingArea -> IORef (Maybe RobotWorld) -> TQueue RobotControl -> TQueue ExitCode -> Clock.TimeSpec -> RobotPlay -> IO () mainLoop cont canvas worldRef controlQueue exitQueue nextTime play = do let params = robotContParams cont control <- atomically $ tryReadTQueue controlQueue case control of Just RobotExit -> atomically $ writeTQueue exitQueue ExitSuccess Just (RobotSave path) -> do case robotContWorld cont of Just world -> do message <- saveWorldToFile path world case message of Left errorText -> hPutStr stderr errorText Right () -> return () Nothing -> return () mainLoop cont canvas worldRef controlQueue exitQueue nextTime play Just control -> let play' = changePlay control play params in mainLoop cont canvas worldRef controlQueue exitQueue nextTime play' Nothing -> do let displayInfo = robotPlayRunning play || (robotPlayDoStep play /= RobotNoStep) (cont', world, play) <- nextState cont play writeIORef worldRef (world `seq` Just world) Gdk.threadsAddIdle PRIORITY_HIGH $ do window <- Gtk.widgetGetWindow canvas case window of Just window -> Gdk.windowInvalidateRect window Nothing True Nothing -> return () return False if displayInfo then do let robotDisplay = Text.concat (toList (fmap (\robot -> Text.pack $ printf "%d " (robotIndex robot)) (robotWorldRobots world))) shotDisplay = Text.concat (toList (fmap (\shot -> Text.pack $ printf "%d " (shotRobotIndex shot)) (robotWorldShots world))) putStr $ printf "Robots: %sShots: %s\n" robotDisplay shotDisplay else return () time <- Clock.getTime Clock.Monotonic let cyclesPerSecond = if robotPlayRunning play then robotPlayCyclesPerSecond play else robotParamsMaxCyclesPerSecond params maxDelay = Clock.fromNanoSecs . floor $ 1000000000.0 / cyclesPerSecond nextTime' = nextTime + maxDelay if time < nextTime' then threadDelay . fromIntegral $ (Clock.toNanoSecs (nextTime' - time)) `div` 1000 else return () let nextTime'' = if time - nextTime > (Clock.fromNanoSecs . floor $ 2000000000.0 / cyclesPerSecond) then time else nextTime' mainLoop cont' canvas worldRef controlQueue exitQueue nextTime'' play -- | Change the playback state. changePlay :: RobotControl -> RobotPlay -> RobotParams -> RobotPlay changePlay RobotStart play _ = play { robotPlayRunning = True } changePlay RobotStop play _ = play { robotPlayRunning = False } changePlay RobotForward play params = if robotPlayRunning play then if robotPlayReverse play then let newCyclesPerSecond = robotPlayCyclesPerSecond play / 2.0 in if newCyclesPerSecond >= robotParamsMaxCyclesPerSecond params / 16.0 then play { robotPlayCyclesPerSecond = newCyclesPerSecond } else play { robotPlayReverse = False, robotPlayCyclesPerSecond = robotParamsMaxCyclesPerSecond params / 16.0 } else let newCyclesPerSecond = robotPlayCyclesPerSecond play * 2.0 in if newCyclesPerSecond <= robotParamsMaxCyclesPerSecond params then play { robotPlayCyclesPerSecond = newCyclesPerSecond } else play { robotPlayCyclesPerSecond = robotParamsMaxCyclesPerSecond params } else play { robotPlayDoStep = RobotStepForward } changePlay RobotBackward play params = if robotPlayRunning play then if robotPlayReverse play then let newCyclesPerSecond = robotPlayCyclesPerSecond play * 2.0 in if newCyclesPerSecond <= robotParamsMaxCyclesPerSecond params then play { robotPlayCyclesPerSecond = newCyclesPerSecond } else play { robotPlayCyclesPerSecond = robotParamsMaxCyclesPerSecond params } else let newCyclesPerSecond = robotPlayCyclesPerSecond play / 2.0 in if newCyclesPerSecond >= robotParamsMaxCyclesPerSecond params / 16.0 then play { robotPlayCyclesPerSecond = newCyclesPerSecond } else play { robotPlayReverse = True, robotPlayCyclesPerSecond = robotParamsMaxCyclesPerSecond params / 16.0 } else play { robotPlayDoStep = RobotStepBackward } changePlay _ _ _ = error "impossible" -- | Get a new continuity, world, and play control state. nextState :: RobotCont -> RobotPlay -> IO (RobotCont, RobotWorld, RobotPlay) nextState cont play = if robotPlayRunning play then if not $ robotPlayReverse play then if robotPlayIndex play >= -1 then do let (event, cont') = executeCycle cont play' = play { robotPlayIndex = 0 } world <- case event of RobotWorldCycle world -> return world RobotRoundDone world autoSave -> do doRoundDone world autoSave $ robotContParams cont return world return (cont', world, play') else let index = robotPlayIndex play + 1 prevWorlds = robotContPrevWorlds cont world = case Seq.lookup (Seq.length prevWorlds + index) prevWorlds of Just world -> world Nothing -> error "impossible" play' = play { robotPlayIndex = index } in return (cont, world, play') else let prevWorlds = robotContPrevWorlds cont in if robotPlayIndex play >= -(Seq.length prevWorlds - 1) then let index = robotPlayIndex play - 1 world = case Seq.lookup (Seq.length prevWorlds + index) prevWorlds of Just world -> world Nothing -> error "impossible" play' = play { robotPlayIndex = index } in return (cont, world, play') else let play' = play { robotPlayRunning = False, robotPlayReverse = False } world = case Seq.lookup (Seq.length prevWorlds + robotPlayIndex play') prevWorlds of Just world -> world Nothing -> error "impossible" in return (cont, world, play') else case robotPlayDoStep play of RobotStepForward -> if robotPlayIndex play >= -1 then do let (event, cont') = executeCycle cont play' = play { robotPlayIndex = 0, robotPlayDoStep = RobotNoStep } world <- case event of RobotWorldCycle world -> return world RobotRoundDone world autoSave -> do doRoundDone world autoSave $ robotContParams cont return world return (cont', world, play') else let index = robotPlayIndex play + 1 prevWorlds = robotContPrevWorlds cont world = case Seq.lookup (Seq.length prevWorlds + index) prevWorlds of Just world -> world Nothing -> error "impossible" play' = play { robotPlayIndex = index, robotPlayDoStep = RobotNoStep } in return (cont, world, play') RobotStepBackward -> let prevWorlds = robotContPrevWorlds cont in if robotPlayIndex play >= -(Seq.length prevWorlds - 1) then let index = robotPlayIndex play - 1 world = case Seq.lookup (Seq.length prevWorlds + index) prevWorlds of Just world -> world Nothing -> error "impossible" play' = play { robotPlayIndex = index, robotPlayDoStep = RobotNoStep } in return (cont, world, play') else let play' = play { robotPlayDoStep = RobotNoStep } in if robotPlayIndex play' < 0 then let world = case Seq.lookup (Seq.length prevWorlds + robotPlayIndex play') prevWorlds of Just world -> world Nothing -> error "impossible" in return (cont, world, play) else case robotContWorld cont of Just world -> return (cont, world, play') Nothing -> do let (event, cont') = executeCycle cont play'' = play' { robotPlayIndex = 0 } world <- case event of RobotWorldCycle world -> return world RobotRoundDone world autoSave -> do doRoundDone world autoSave $ robotContParams cont return world return (cont', world, play'') RobotNoStep -> if robotPlayIndex play >= 0 then case robotContWorld cont of Just world -> return (cont, world, play) Nothing -> do let (event, cont') = executeCycle cont world <- case event of RobotWorldCycle world -> return world RobotRoundDone world autoSave -> do doRoundDone world autoSave $ robotContParams cont return world return (cont', world, play) else do let prevWorlds = robotContPrevWorlds cont world = case Seq.lookup (Seq.length prevWorlds + robotPlayIndex play) prevWorlds of Just world -> world Nothing -> error "impossible" return (cont, world, play) -- | Handle round done. doRoundDone :: RobotWorld -> RobotAutoSave -> RobotParams -> IO () doRoundDone world autoSave params = do saveWorldToFile (robotParamsBackupSavePath params) world >> return () doAutoSave (robotParamsAutoSavePath params) autoSave -- | Do autosave if needed. doAutoSave :: FilePath -> RobotAutoSave -> IO () doAutoSave path autoSave = do let round = robotAutoSaveRound autoSave case robotAutoSaveWorld autoSave of Just world -> do message <- saveWorldToFile (printf "%s.%d" path round) world case message of Left errorText -> hPutStr stderr errorText Right () -> return () Nothing -> return () case robotAutoSaveRobot autoSave of Just robot -> do message <- saveRobotToFile (printf "%s.%d.individual" path round) robot case message of Left errorText -> hPutStr stderr errorText Right () -> return () Nothing -> return () -- | Save a world. saveWorldToFile :: FilePath -> RobotWorld -> IO (Either Text.Text ()) saveWorldToFile path world = do let worldText = saveWorld specialConstEntries . fmap robotExpr $ robotWorldRobots world saveFile <- catch (Right <$> openFile path WriteMode) (\e -> return . Left . Text.pack $ show (e :: IOException)) case saveFile of Right saveFile -> do hPutStr saveFile worldText hClose saveFile return $ Right () Left errorText -> do return $ Left errorText -- | Save a robot. saveRobotToFile :: FilePath -> Robot -> IO (Either Text.Text ()) saveRobotToFile path expr = do let worldText = saveWorld specialConstEntries . Seq.singleton $ robotExpr expr saveFile <- catch (Right <$> openFile path WriteMode) (\e -> return . Left . Text.pack $ show (e :: IOException)) case saveFile of Right saveFile -> do hPutStr saveFile worldText hClose saveFile return $ Right () Left errorText -> do return $ Left errorText
tabemann/botwars
src/Main.hs
Haskell
bsd-3-clause
22,547
---------------------------------------------------------------------------- -- | -- Module : Imported1 -- Copyright : (c) Sergey Vinokurov 2015 -- License : BSD3-style (see LICENSE) -- Maintainer : serg.foo@gmail.com ---------------------------------------------------------------------------- {-# LANGUAGE TypeOperators #-} module Imported1 where foo :: a -> a foo x = x bar :: a -> a bar x = x ($$) :: a -> a -> a x $$ _ = x data (:$$:) a b = (:$$$:) a b
sergv/tags-server
test-data/0001module_with_imports/Imported1.hs
Haskell
bsd-3-clause
482
module OpenSet where import Data.List hiding (union) import Data.Set as Set dup x = (x, x) openSets :: Ord a => [a] -> [[[a]]] openSets x = toList $ Set.map (toList.(Set.map toList)) $ Set.filter isOpen candidates where (e, u) = (fromList [], fromList x) conpact = fromList [e, u] candidates = Set.map (union conpact) $ powerSet (powerSet u Set.\\ conpact) isOpen o = Set.fold (\a b -> p a && b) True ps where p (a, b) = uncurry (&&) (intersection a b `member` o, union a b `member` o) ps = Set.filter (uncurry (<)) $ uncurry cartesianProduct $ dup (o Set.\\ conpact) main :: IO () main = mapM_ go $ zip [1..] $ openSets [0,1,2] where go (i, ln) = putStrLn $ show i ++ " : " ++ show ln
cutsea110/aop
src/OpenSet.hs
Haskell
bsd-3-clause
790
module Rules.Eq where import Derivation import Goal import Rules.Utils import Tactic import Term -- H >> (a = b in A) = (a' = b' in A') in U(i) -- H >> A = A' in U(i) -- H >> a = a' in A -- H >> b = b' in A' -- Uses: EQ_EQ eqEQ :: PrlTactic eqEQ (Goal ctx t) = case t of Eq (Eq m1 n1 a1) (Eq m2 n2 a2) (Uni i) -> return $ Result { resultGoals = [ Goal ctx (Eq a1 a2 (Uni i)) , Goal ctx (Eq m1 m2 a1) , Goal ctx (Eq n1 n2 a1) ] , resultEvidence = \d -> case d of [d1, d2, d3] -> EQ_EQ d1 d2 d3 _ -> error "Eq.EQ: Invalid evidence!" } _ -> fail "Eq.EQ does not apply." -- H >> tt = tt in (a = b in A) -- H >> a = b in A -- Uses: EQ_MEM_EQ eqMEMEQ :: PrlTactic eqMEMEQ (Goal ctx t) = case t of Eq TT TT (Eq m n a) -> return $ Result { resultGoals = [ Goal ctx (Eq m n a) ] , resultEvidence = \d -> case d of [d] -> EQ_MEM_EQ d _ -> error "Eq.MEMEQ: Invalid evidence!" } _ -> fail "Eq.MEMEQ does not apply." -- H >> a = b in A -- H >> b = a in A -- Uses: EQ_SYM eqSYM :: PrlTactic eqSYM (Goal ctx t) = case t of Eq m n a -> return $ Result { resultGoals = [ Goal ctx (Eq n m a) ] , resultEvidence = \d -> case d of [d] -> EQ_SYM d _ -> error "Eq.SYM: Invalid evidence!" } _ -> fail "Eq.SYM does not apply." -- H >> [a/x]C -- H, x : A >> C in U(i) -- H >> a = b in A -- H >> [b/x]C -- Uses: EQ_SUBST -- Note that first supplied term should be a = b in A and -- the second one should be C. eqSUBST :: Universe -> Term -> Term -> PrlTactic eqSUBST uni eq pat (Goal ctx t) = case eq of Eq m n a | subst m 0 pat == t -> return $ Result { resultGoals = [ Goal (a <:> ctx) (Eq pat pat (Uni uni)) , Goal ctx eq , Goal ctx (subst n 0 pat) ] , resultEvidence = \d -> case d of [d1, d2, d3] -> EQ_SUBST uni pat d1 d2 d3 _ -> error "Eq.SUBST: Invalid evidence!" } _ -> fail "Eq.SUBST does not apply."
thsutton/cha
lib/Rules/Eq.hs
Haskell
bsd-3-clause
2,365
{-# LANGUAGE FlexibleContexts #-} module Language.Lambda.Semantics.Named.BigStep.Tests ( bigStepTests ) where import Prelude hiding (fromInteger, toInteger) import Data.Char import Bound.Unwrap as BU import Test.Tasty import qualified Test.Tasty.QuickCheck as QC import qualified Test.Tasty.SmallCheck as SC import qualified Test.SmallCheck.Series as SC import Test.Tasty.HUnit import Language.Lambda.Syntax.Named.Exp import Language.Lambda.Semantics.Named.BigStep import Language.Lambda.Syntax.Named.Testdata renderFresh :: Printer String renderFresh (n,i) = n ++ show i normalOrder = mkNormalOrder renderFresh callByValue = mkCallByValue renderFresh callByName = mkCallByName renderFresh bigStepTests = testGroup "tests" [ identityTests , logicTests , arithmeticTests , letrecTests ] -- ----------------------------------------------------------------------------- identityTests = testGroup "IdentityTests" [ testCase "i_ 1" $ normalOrder i_ @=? i_ , testCase "i_ 2" $ normalOrder (i_ # i_) @=? i_ , testCase "i_ 3" $ normalOrder (i_ # k_) @=? k_ ] -- ----------------------------------------------------------------------------- logicTests = testGroup "LogicTests" [ if_Tests , not_Tests , and_Tests , or_Tests , imp_Tests , iff_Tests , logicLawsTests ] if_Tests = testGroup "if_Tests" [ testCase "if_ 1" $ one_ @=? normalOrder (if_ # tru_ # one_ # zro_) , testCase "if_ 2" $ zro_ @=? normalOrder (if_ # fls_ # one_ # zro_) ] not_Tests = testGroup "not_Tests" [ testCase "not_ 1" $ fls_ @=? normalOrder (not_ # tru_) , testCase "not_ 2" $ tru_ @=? normalOrder (not_ # fls_) ] and_Tests = testGroup "and_Tests" [ testCase "and_ 1" $ tru_ @=? normalOrder (and_ # tru_ # tru_) , testCase "and_ 2" $ fls_ @=? normalOrder (and_ # tru_ # fls_) , testCase "and_ 3" $ fls_ @=? normalOrder (and_ # fls_ # tru_) , testCase "and_4 " $ fls_ @=? normalOrder (and_ # fls_ # fls_) ] or_Tests = testGroup "or_Tests" [ testCase "or_ 1" $ tru_ @=? normalOrder (or_ # tru_ # tru_) , testCase "or_ 2" $ tru_ @=? normalOrder (or_ # tru_ # fls_) , testCase "or_ 3" $ tru_ @=? normalOrder (or_ # fls_ # tru_) , testCase "or_ 4" $ fls_ @=? normalOrder (or_ # fls_ # fls_) ] imp_Tests = testGroup "imp_Tests" [ testCase "imp_ 1" $ tru_ @=? normalOrder (imp_ # tru_ # tru_) , testCase "imp_ 2" $ fls_ @=? normalOrder (imp_ # tru_ # fls_) , testCase "imp_ 3" $ tru_ @=? normalOrder (imp_ # fls_ # tru_) , testCase "imp_ " $ tru_ @=? normalOrder (imp_ # fls_ # fls_) ] iff_Tests = testGroup "iff_Tests" [ testCase "iff_ 1" $ tru_ @=? normalOrder (iff_ # tru_ # tru_) , testCase "iff_ 2" $ fls_ @=? normalOrder (iff_ # tru_ # fls_) , testCase "iff_ 3" $ fls_ @=? normalOrder (iff_ # fls_ # tru_) , testCase "iff_ 4" $ tru_ @=? normalOrder (iff_ # fls_ # fls_) ] logicLawsTests = testGroup "Laws of Logic" [ deMorganTests ] deMorganTests = testGroup "De Morgan" [ SC.testProperty "De Morgan 1" $ \p q -> let p_ = fromBool p q_ = fromBool q in normalOrder (not_ # (and_ # p_ # q_)) == normalOrder (or_ # (not_ # p_) # (not_ # q_)) , SC.testProperty "De Morgan 2" $ \p q -> let p_ = fromBool p q_ = fromBool q in normalOrder (not_ # (or_ # p_ # q_)) == normalOrder (and_ # (not_ # p_) # (not_ # q_)) ] -- ----------------------------------------------------------------------------- arithmeticTests = testGroup "ArithmeticTests" [ iszro_Tests , scc_Tests , prd_Tests , scc_prd_RelationTests , pls_Tests , mlt_Tests , add_mlt_RelationTests , pow_Tests , leqnat_Tests , eqnat_Tests , fac_Tests ] iszro_Tests = testGroup "iszro_Tests" [ testCase "iszro_ 1" $ normalOrder (iszro_ # zro_) @=? tru_ , testCase "iszro_ 2" $ normalOrder (iszro_ # (scc_ # zro_)) @=? fls_ ] scc_Tests = testGroup "scc_Tests" [ testCase "scc_ 1" $ normalOrder (scc_ # zro_) @=? one_ , QC.testProperty "scc_ 2" $ QC.forAll (interval 0 250) $ \n -> normalOrder (scc_ # (unsafeFromInt n)) == unsafeFromInt (succ n) , scProp "scc_ 3" 100 $ \n -> let pos = SC.getNonNegative n :: Int in normalOrder (scc_ # (unsafeFromInt pos)) == (unsafeFromInt (succ pos)) ] prd_Tests = testGroup "prd_Tests" [ testCase "prd_ 1" $ normalOrder (prd_ # zro_) @=? zro_ , testCase "prd_ 2" $ normalOrder (prd_ # one_) @=? zro_ ] scc_prd_RelationTests = testGroup "scc_prd_RelationTests" [ QC.testProperty "scc_ prd_ 1 (inverse)" $ QC.forAll (interval 0 250) $ \n -> let cn = unsafeFromInt n in normalOrder (prd_ # (scc_ # cn)) == cn , scProp "scc_ prd_ 2 (inverse)" 50 $ \n -> let cn = fromPositive n in normalOrder (prd_ # (scc_ # cn)) == cn , scProp "scc_ prd_ 3 (inverse)" 50 $ \n -> let cn = fromPositive n in normalOrder (scc_ # (prd_ # cn)) == cn ] pls_Tests = testGroup "pls_Tests" [ scProp "pls_ 1 (`zro_` right neutral)" 50 $ \n -> let cn = fromPositive n in normalOrder (add_ # cn # zro_) == cn , scProp "pls_ 2 (`zro_` left neutral)" 50 $ \n -> let cn = fromPositive n in normalOrder (add_ # zro_ # cn) == cn , scProp "sub_ 1 (`zro_` right neutral)" 50 $ \n -> let cn = fromPositive n in normalOrder (sub_ # cn # zro_) == cn , scProp "add_ sub_ (inverse)" 30 $ \n -> let cn = fromPositive n in normalOrder (sub_ # cn # (add_ # cn # zro_)) == zro_ ] mlt_Tests = testGroup "mtl_Tests" [ scProp "mlt_ 1" 30 $ \n -> let cn = fromPositive n in normalOrder (mlt_ # cn # zro_) == zro_ , scProp "mlt_ 2" 30 $ \n -> let cn = fromPositive n in normalOrder (mlt_ # zro_ # cn) == zro_ ] add_mlt_RelationTests = testGroup "add_mlt_RelationTests" [ scProp "add_ mlt_ (distributivity)" 3 $ \n m o -> let cn = fromPositive n cm = fromPositive m co = fromPositive o in normalOrder (mlt_ # (add_ # cn # cm) # co) == normalOrder (add_ # (mlt_ # cn # co) # (mlt_ # cm # co)) ] pow_Tests = testGroup "pow_Tests" [ scProp "pow 1" 10 $ \n -> let cn = fromPositive n in normalOrder (pow_ # cn # zro_) == one_ , scProp "pow 2" 10 $ \n -> let cn = fromPositive n in normalOrder (pow_ # cn # one_ ) == cn , scProp "pow 3" 4 $ \n -> let cn = fromPositive n two_ = scc_ # one_ in normalOrder (pow_ # cn # two_ ) == normalOrder (mlt_ # cn # cn) ] leqnat_Tests = testGroup "leqnat_Tests" [ scProp "leqnat 1: reflexivity" 10 $ \n -> let cn = fromPositive n in normalOrder (leqnat_ # cn # cn ) == tru_ , scProp "leqnat 2: antisymmetry" 3 $ \n m -> let cn = fromPositive n cm = fromPositive m premis = and_ # (leqnat_ # cn # cm ) # (leqnat_ # cm # cn) conclusion = fromBool (cn == cm) in normalOrder (imp_ # premis # conclusion) == tru_ , scProp "leqnat 3: transitivity" 3 $ \n m o -> let cn = fromPositive n cm = fromPositive m co = fromPositive o premis = and_ # (leqnat_ # cn # cm ) # (leqnat_ # cm # co) conclusion = leqnat_ # cn # cm in normalOrder (imp_ # premis # conclusion) == tru_ ] eqnat_Tests = testGroup "eqnat_Tests" [ scProp "eqnat 1 (reflexivity)" 10 $ \n -> let cn = fromPositive n in normalOrder (eqnat_ # cn # cn ) == tru_ , scProp "eqnat 2 (symmetry)" 3 $ \n m -> let cn = fromPositive n cm = fromPositive m premis = eqnat_ # cn # cm conclusion = eqnat_ # cm # cn in normalOrder (imp_ # premis # conclusion) == tru_ , scProp "eqnat 3 (transitivity)" 3 $ \n m o -> let cn = fromPositive n cm = fromPositive m co = fromPositive o premis = and_ # (eqnat_ # cn # cm ) # (eqnat_ # cm # co) conclusion = eqnat_ # cn # cm in normalOrder (imp_ # premis # conclusion) == tru_ ] fac_Tests = testGroup "fac_Tests" [ testCase "fac 0 1" $ normalOrder (fac_ # zro_) @=? one_ , testCase "fac 1 1" $ normalOrder (fac_ # one_) @=? one_ , testCase "fac 2 2" $ normalOrder (fac_ # n2_) @=? (normalOrder n2_) , scProp "fac_ golden" 3 $ \n -> let cn = SC.getNonNegative n in normalOrder (fac_ # (unsafeFromInt cn)) == unsafeFromInt (fac cn) ] where fac x = if x == 0 then 1 else x * fac (pred x) -- ----------------------------------------------------------------------------- letrecTests = testGroup "Let Tests" [ testCase "Let 1 (identity)" $ let ltc = Let ("i", i_) (Var"i") in normalOrder ltc @=? i_ ] -- ----------------------------------------------------------------------------- -- ----------------------------------------------------------------------------- -- helper functions unsafeFromInt :: Int -> Exp String unsafeFromInt = maybe (error "unsafeFromInt") id . fromInt fromInt :: Int -> Maybe (Exp String) fromInt i | i < 0 = Nothing | otherwise = Just $ "f" ! "x" ! go i where go 0 = Var "x" go i = Var "f" # go (pred i) toInt :: Exp String -> Maybe Int toInt (Lam f (Lam x body)) = go body where go (Var x) = Just 0 go (Var f `App` arg) = succ <$> go arg go _ = Nothing toInteger _ = Nothing unsafeToInt :: Exp String -> Int unsafeToInt = maybe (error "unsafeToInt") id . toInt fromBool :: Bool -> Exp String fromBool True = tru_ fromBool False = fls_ toBool :: Exp String -> Maybe Bool toBool exp | exp == tru_ = Just True | exp == fls_ = Just False | otherwise = Nothing unsafeToBool :: Exp String -> Bool unsafeToBool = maybe (error "unsafeToBool") id . toBool fromChar :: Char -> Exp String fromChar = unsafeFromInt . ord toChar :: Exp String -> Maybe Char toChar = fmap chr . toInteger unsafeToChar :: Exp String -> Char unsafeToChar = maybe (error "unsafeToChar") id . toChar -- ----------------------------------------------------------------------------- scProp :: SC.Testable IO a => String -> SC.Depth -> a -> TestTree scProp s d = SC.testProperty s . SC.changeDepth (const d) fromNonNegative :: SC.NonNegative Int -> Exp String fromNonNegative = unsafeFromInt . SC.getNonNegative fromPositive :: SC.Positive Int -> Exp String fromPositive = unsafeFromInt . SC.getPositive interval :: (Enum a, Num a) => a -> a -> QC.Gen a interval l u = QC.oneof . fmap return $ [l .. u]
julmue/UntypedLambda
test/Language/Lambda/Semantics/Named/BigStep/Tests.hs
Haskell
bsd-3-clause
11,361
-------------------------------------------------------------------------------- -- | -- Module : Sequence.Location -- Copyright : (c) [2009..2010] Trevor L. McDonell -- License : BSD -- -- Locate sequences fragments from index keys -- -------------------------------------------------------------------------------- module Sequence.Location (SKey, lookup) where import Mass import Sequence.Fragment import Util.Misc import Prelude hiding (lookup) import Numeric.Search.Range import qualified Data.ByteString.Lazy as L import qualified Data.Vector.Generic as G -------------------------------------------------------------------------------- -- Database Search -------------------------------------------------------------------------------- type SKey = Int -- -- Find the last index in the ordered array whose value is less than or equal to -- the given search element. Binary search, O(log n). -- searchVector :: (Ord a, G.Vector v a) => v a -> a -> Maybe Int searchVector vec x = searchFromTo (\i -> vec G.! i > x) 0 (G.length vec - 1) -- -- Locate a particular sequence in the database -- lookup :: SequenceDB -> SKey -> Maybe Fragment lookup db k = do -- Index of the sequence this fragment derives from -- seqIdx <- searchVector (G.tail (dbFragSeg db)) (fromIntegral k) -- Extract the supporting information for the fragment -- let (res,c,n) = dbFrag db G.! k [a,b] = G.toList $ G.slice seqIdx 2 (dbIonSeg db) hdr = dbHeader db G.! seqIdx aa = G.toList $ G.slice (fromIntegral c) (fromIntegral (n-c+1)) (dbIon db) ca = if c > a then dbIon db G.! (fromIntegral c-1) else c2w '-' na = if n < b-1 then dbIon db G.! (fromIntegral n+1) else c2w '-' return $ Fragment (res + massH + massH2O) hdr (L.pack $ [ca,c2w '.'] ++ aa ++ [c2w '.',na])
tmcdonell/hfx
src/haskell/Sequence/Location.hs
Haskell
bsd-3-clause
1,869
module Playground01 where import Data.List simple :: t -> t simple x = x calcChange :: (Num t, Ord t) => t -> t -> t calcChange owed given = if change > 0 then change else 0 where change = given - owed doublePlusTwo :: Num t => t -> t doublePlusTwo x = doubleX + 2 where doubleX = x * 2 yorga :: Integer yorga = 9 doubleMe :: Num a => a -> a doubleMe x = x * 2 -- (\x -> x) "hi" body :: Ord t => t -> t -> t body sumSquare squareSum = if sumSquare > squareSum then sumSquare else squareSum body' :: Ord a => a -> a -> a body' = (\sumSquare squareSum -> if sumSquare > squareSum then sumSquare else squareSum) body'' :: Num a => a -> a -> a body'' = (\x y -> x + y) body''' :: Num a => a -> a -> a body''' x y = x + y sumSquareOrSquareSum :: (Ord a, Num a) => a -> a -> a sumSquareOrSquareSum x y = body' (x^2 + y^2) ((x+y)^2) sumSquareOrSquareSum' :: (Num t, Ord t) => t -> t -> t sumSquareOrSquareSum' x y = let sumSquare = (x^2 + y^2) squareSum = ((x+y)^2) in if sumSquare > squareSum then sumSquare else squareSum overwrite :: Num t => t1 -> t overwrite x = let x = 2 in let x = 3 in let x = 4 in x k :: Integer k = 10 add :: Integer -> Integer add y = y + k add' :: Num a => a -> a add' y = (\k -> y + k) 3 add'' :: Num a => t -> a add'' y = (\y -> (\k -> y + k ) 1 ) 2 --Q2.2 counter :: Num a => t -> a counter x = let x = x + 1 in let x = x + 1 in x counter' :: Num a => a -> a counter' x = (\x -> x + 1) x counter'' :: Num a => a -> a counter'' x = (\x -> (\x -> x + 1) x + 1 ) x --listing 3.3 ifEven :: Integral t => (t -> t) -> t -> t ifEven myFunction x = if even x then myFunction x else x inc :: Num a => a -> a inc n = n + 1 double :: Num a => a -> a double n = n * 2 square :: Num a => a -> a square n = n ^ 2 ifEvenInc :: Integral t => t -> t ifEvenInc n = ifEven inc n ifEvenDouble :: Integral t => t -> t ifEvenDouble n = ifEven double n ifEvenSquare :: Integral t => t -> t ifEvenSquare n = ifEven square n --esempio con la lambda ifEvenLambaDouble :: Integral k => k -> k ifEvenLambaDouble k' = ifEven (\x -> x * 2) k' --QUICK CHECK 3.1 Write a lambda function for cubing x and pass it to ifEven ifEvenLambaCube :: Integral a => a -> a ifEvenLambaCube k' = ifEven (\x -> x ^ 3) k' --listing 3.4 names :: [([Char], [Char])] names = [("Ian", "Curtis"), ("Bernard","Sumner"), ("Peter", "Hook"), ("Stephen","Morris")] -- Listing 3.4 names sortedNames :: [(String, String)] sortedNames = sort names -- lastNamesComparator :: Ord(a,a) => a -> a -> Ordering lastNamesComparator :: Ord a1 => (a2, a1) -> (a, a1) -> Ordering --lastNamesComparator :: Ord a1 => ((a2, a1), (a, a1)) -> Ordering lastNamesComparator name1 name2 = let lastName1 = snd name1 in let lastName2 = snd name2 in if lastName1 > lastName2 then GT else if lastName1 < lastName2 then LT else EQ -- Listing 3.5 compareLastNames compareLastNames :: Ord a1 => (a2, a1) -> (a, a1) -> Ordering compareLastNames name1 name2 = if lastName1 > lastName2 then GT else if lastName1 < lastName2 then LT else EQ where lastName1 = snd name1 lastName2 = snd name2 -- esempio -- compareLastNames ((\n -> head n) names) ((\n -> head n) names) -- uso -- sortBy compareLastNames names -- QUICK CHECK 3.2 In compareLastNames -- you didn’t handle the case of having two last names that are the same -- but with different first names. Modify the compareLastNamesfunction to compare first names and use it to fix -- compareLastNames. compareLastNames' :: Ord a => ([a], [a]) -> ([a], [a]) -> Ordering compareLastNames' name1 name2 = if complexName1 > complexName2 then GT else if complexName1 < complexName2 then LT else EQ where lastName1 = snd name1 lastName2 = snd name2 firstName1 = fst name1 firstName2 = fst name2 complexName1 = lastName1 ++ firstName1 complexName2 = lastName2 ++ firstName2
stefanocerruti/haskell-primer-alpha
src/Playground01.hs
Haskell
bsd-3-clause
5,208
-- | -- Module : Network.Machine.Protocol.SMTP -- Copyright : Lodvær 2015 -- License : BSD3 -- -- Maintainer : Lodvær <lodvaer@gmail.com> -- Stability : provisional -- Portability : unknown -- -- SMTP machines. module Network.Machine.Protocol.SMTP where -- TODO
lodvaer/machines-network
src/Network/Machine/Protocol/SMTP.hs
Haskell
bsd-3-clause
285
module Paths_simple ( version, getBinDir, getLibDir, getDataDir, getLibexecDir, getDataFileName, getSysconfDir ) where import qualified Control.Exception as Exception import Data.Version (Version(..)) import System.Environment (getEnv) import Prelude catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a catchIO = Exception.catch version :: Version version = Version [0,1,0,0] [] bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath bindir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/bin" libdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/lib/x86_64-linux-ghc-7.10.3/simple-0.1.0.0-3KtLGDpFPvjB6VnwWh4q2E" datadir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/share/x86_64-linux-ghc-7.10.3/simple-0.1.0.0" libexecdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/libexec" sysconfdir = "/home/chuck/Documents/simple/.stack-work/install/x86_64-linux/lts-6.17/7.10.3/etc" getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath getBinDir = catchIO (getEnv "simple_bindir") (\_ -> return bindir) getLibDir = catchIO (getEnv "simple_libdir") (\_ -> return libdir) getDataDir = catchIO (getEnv "simple_datadir") (\_ -> return datadir) getLibexecDir = catchIO (getEnv "simple_libexecdir") (\_ -> return libexecdir) getSysconfDir = catchIO (getEnv "simple_sysconfdir") (\_ -> return sysconfdir) getDataFileName :: FilePath -> IO FilePath getDataFileName name = do dir <- getDataDir return (dir ++ "/" ++ name)
Chuck-Aguilar/haskell-opencv-work
.stack-work/dist/x86_64-linux/Cabal-1.22.5.0/build/autogen/Paths_simple.hs
Haskell
bsd-3-clause
1,617
{-# LANGUAGE ForeignFunctionInterface #-} module Network.UV ( Loop , defaultLoop , createLoop , run ) where import Foreign.C import Foreign.Ptr import Network.UV.TCP import Network.UV.Internal.UV foreign import ccall unsafe "uv_loop_new" c_uv_loop_new :: IO (Ptr a) foreign import ccall unsafe "uv_default_loop" c_uv_default_loop :: IO (Ptr a) foreign import ccall unsafe "uv_run" c_uv_run :: Ptr a -> IO CInt -- | Get the default loop. -- -- This function always returns the same loop. defaultLoop :: IO Loop defaultLoop = do ptr <- c_uv_default_loop return $ Loop ptr -- | Create a new loop. createLoop :: IO Loop createLoop = do ptr <- c_uv_loop_new return $ Loop ptr -- | Run the given loop. run :: Loop -> IO () run (Loop ptr) = c_uv_run ptr >> return ()
aardvarrk/hlibuv
src/Network/UV.hs
Haskell
bsd-3-clause
835
-- | Allows the abstract syntax tree to be compiled into C++ template -- metaprogram. module Compiler.Compile ( -- * Module compiling compileModule , compileTopLevel -- * Top level entities compiling , compileType , compileDataDef , compileValDef -- * Expression compiling , compileExpr ) where import Data.Char import Data.List import Compiler.AST import Utility -- | Left opening brace surrounded by newlines. lbrace :: String lbrace = "\n{\n" -- | Right closing brace surrounded by newlines. rbrace :: String rbrace = "\n};\n" -- | Print a @struct@ given its name and contents. -- -- >>> putStrLn $ struct "s" "int x;" -- struct s -- { -- int x; -- }; struct :: String -- ^ Name of the @struct@. -> String -- ^ Content of the @struct@. -> String struct name content = concat ["struct ", name, lbrace, content, rbrace] -- | Print a @template@ given its name, contents and the (only) template -- argument. -- -- >>> putStrLn $ template "x" "s" "int y;" -- template <typename x> -- struct s -- { -- int y; -- }; template :: String -- ^ Name of the template argument. -> String -- ^ Name of the @struct@. -> String -- ^ Content of the @struct@. -> String template arg name content = "template <typename " ++ arg ++ ">\n" ++ struct name content -- | Print a @template@ @struct@ forward declaration. -- -- >>> putStrLn $ fwdTemplate "s" -- template <typename> -- struct s; fwdTemplate :: String -- ^ Name of the @struct@. -> String fwdTemplate name = "template <typename>\nstruct " ++ name ++ ";\n" -- | Print a @typedef@ which identifies type expression @what@ with @type@. -- -- >>> putStrLn $ typedef "int" -- typedef int type; typedef :: String -- ^ Type expression. -> String typedef what = "typedef " ++ what ++ " " ++ ty ++ ";" -- | Print a type expression extracting inner @type@ from another type. -- -- >>> putStrLn $ innerType "vector" -- typename vector::type innerType :: String -- ^ Type expression. -> String innerType what = "typename " ++ what ++ "::" ++ ty -- | Print a nested hierarchy of @struct@ures used for lambda abstraction. -- -- >>> putStrLn $ innerApply "x" "s" "int x;" -- struct s -- { -- struct type -- { -- template <typename x> -- struct apply -- { -- int x; -- }; -- }; -- }; innerApply :: String -- ^ Name of the template argument. -> String -- ^ Name of the @struct@. -> String -- ^ Content of the @struct@. -> String innerApply arg name = struct name . struct ty . template arg apply -- | Print a list of declarations. -- -- This is just a name-flavored 'concat'. decls :: [String] -- ^ List of declarations. -> String decls = concat -- | 'String' constant for inner @template@ used for lambda abstractions. apply :: String apply = "apply" -- | 'String' constant for inner @typedef@s. ty :: String ty = "type" -- | 'String' constant for the @dummy@ type. dummy :: String dummy = "__dummy" -- | Compile whole module. -- -- Compiles all top level entities contained in module. compileModule :: Module -> String compileModule (Module m) = intercalate sep $ map compileTopLevel m where sep = "\n\n" -- | Compile a top level declaration. compileTopLevel :: TopLevel -> String compileTopLevel tl = case tl of Data dd -> compileDataDef dd Value vd -> compileValDef vd Type _ -> "" -- Types are erased. Assume _ -> "" -- So are type assumptions. -- | Compile a type signature. -- -- Since type signatures have no correspondence in the template C++ code, -- no C++ code is produced. compileType :: TypeSig -> String compileType _ = "" -- | Compile a data definition. -- -- Note that since this language doesn't allow pattern matching, this -- function will automatically define an appropriate eliminator for the -- data type. compileDataDef :: DataDef -> String compileDataDef (DataDef (TyCon tyConName _) variants) = decls [ intercalate sep $ zipWith defineCtor variants [0 ..] , defineElim variants ] where sep = "\n\n" localArg n = "__ctor_arg" ++ show n localStruct n = "__ctor_local" ++ show n ctorStruct = "__ctor_top_local" elimStruct = "__elim_top_local" primDataStruct = "__data" applyAlt = "apply_alt" -- Compile a single data constructor. defineCtor :: Variant -- ^ Data constructor. -> Int -- ^ Numeric suffix for @struct@s. -> String defineCtor (DataCon cname ts) n = struct cname . decls $ [ go 0 ctorStruct [] ts , typedef $ innerType ctorStruct ] where go :: Int -> String -> [String] -> [Type] -> String go _ name args [] = struct name . typedef . concat $ [ primDataStruct , "<" , show n , ", " , dummy , concatMap ((", " ++) . innerType) (reverse args) , ">" ] go u name args (_:rest) = innerApply localA name . decls $ [ go (u + 1) localS (localA:args) rest , typedef $ innerType localS ] where localA = localArg u localS = localStruct u -- Compile an eliminator for the whole data type. defineElim :: [Variant] -> String defineElim vs = struct (firstToLower tyConName) . decls $ [ go 0 elimStruct [] vs , typedef $ innerType elimStruct ] where firstToLower [] = [] firstToLower (c:cs) = toLower c:cs go :: Int -- ^ Numeric suffix for @struct@s. -> String -- ^ Outer @struct@ name. -> [String] -- ^ Names of eliminator arguments. -> [Variant] -- ^ Data constructors. -> String go _ name args [] = struct name . struct ty . decls . intersperse "\n" $ [ fwdTemplate applyAlt ] ++ zipWith3 handleCase vs (reverse args) [0 ..] ++ [ template typeArg apply . typedef . innerType . concat $ [ applyAlt , "<" , innerType typeArg , ">" ] ] where typeArg = "__type_arg" go u name args (_:rest) = innerApply localA name . decls $ [ go (u + 1) localS (localA:args) rest , typedef $ innerType localS ] where localA = localArg u localS = localStruct u -- Compile a @template@ specialization which deconstructs @n@-th -- constructor and applies the corresponding elimination function to -- all its fields. handleCase :: Variant -- ^ 'Variant' to be compiled. -> String -- ^ Argument name. -> Int -- ^ Argument position. -> String handleCase (DataCon _ ts) arg n = concat [ "template <typename " , dummy , concatMap (", typename " ++) args , ">\nstruct " , applyAlt , "<" , primDataStruct , "<" , show n , ", " , dummy , concatMap (", " ++) args , "> >" , lbrace , decls $ wrapFields ++ [ compileExpr localS . foldl1 App . map Var $ arg:map (extra ++) args , typedef $ innerType localS ] , rbrace ] where -- Names of all constructor fields. args = zipWith (const $ (fieldA ++) . show) ts [0 :: Int ..] -- Create a wrapper @struct@ures so the data type can -- contain the values directly rather than just the -- expression names. -- -- This would otherwise lead to all kinds of problems with -- expressions not being interchangeable even though their -- values are. wrapFields = map wrapStruct args where wrapStruct name = struct (extra ++ name) $ typedef name fieldA = "__field_arg" localS = "__local_case" -- Prefix for the wrapped structures. extra = "__extra" -- | Compile a value definition. compileValDef :: ValueDef -> String compileValDef (ValueDef name expr) = struct name . decls $ [ compileExpr defStruct expr , typedef . innerType $ defStruct ] where defStruct = "__def" -- | Compile and expression given a name of @struct@ it should be declared in. compileExpr :: String -- ^ Name of the @struct@. -> Expr -> String compileExpr = go (0 :: Int) where localStruct n = "__local" ++ show n leftStruct n = "__left" ++ show n rightStruct n = "__right" ++ show n go :: Int -- ^ Numeric suffix for @struct@s. -> String -- ^ Outer @struct@ name. -> Expr -- ^ Expression to be compiled. -> String go _ name (Var v) = struct name . typedef . innerType $ v go u name (Lam x expr) = innerApply x name . decls $ [ go (u + 1) local expr , typedef $ innerType local ] where local = localStruct u go u name (App e1 e2) = struct name . decls $ [ go (u + 1) left e1 , go (u + 1) right e2 , typedef . concat $ [ "typename " , left , "::type::template apply<" , right , ">::type" ] ] where left = leftStruct u right = rightStruct u go u name (Let dec expr) = struct name . decls $ map compileValDef dec ++ [ go (u + 1) local expr , typedef $ innerType local ] where local = localStruct u go u name (SetType expr _) = go u name expr go _ name (NumLit n) = struct name . typedef $ "Int<" ++ show n ++ ">" go _ name (BoolLit b) = struct name . typedef $ "Bool<" ++ uncap (show b) ++ ">" -- Fixed point operator is transformed into language primitive -- "fix" and a lambda abstraction. go u name (Fix x expr) = go u name (App (Var "fix") (Lam x expr))
vituscze/norri
src/Compiler/Compile.hs
Haskell
bsd-3-clause
10,491
{-# LANGUAGE DeriveGeneric , OverloadedStrings #-} module Lib where import Control.Monad import Data.Aeson import qualified Data.ByteString.Lazy.Char8 as BL import qualified Data.Text as T import qualified Data.Text.IO as T import Data.Yaml import GHC.Generics import Network.Nats import System.Exit import System.IO import Build import Debug.Trace data ServerConfig = ServerConfig { natsConf :: NatsConf } deriving( Eq, Generic, Show ) instance FromJSON ServerConfig instance ToJSON ServerConfig data NatsConf = NatsConf { natsHost :: String , natsPort :: Int } deriving( Eq, Generic, Show ) instance FromJSON NatsConf instance ToJSON NatsConf -- | Read config from the yaml file at the given path. -- @ -- readConfig "examples/config.yaml" -- @ readConfig :: FilePath -> IO (Either String ServerConfig) readConfig filePath = do eitherConf <- decodeFileEither filePath case eitherConf of Left ex -> return . Left $ prettyPrintParseException ex Right conf -> return $ Right conf -- | Spin up the server. launch :: FilePath -> IO () launch fp = do eitherConf <- readConfig fp case eitherConf of Left err -> putStrLn err Right conf -> launch' conf launch' :: ServerConfig -> IO () launch' conf = do let addr = buildAddr conf nats <- trace addr $ connect addr sid <- subscribe nats "build" Nothing $ \_ _ msg _ -> buildCb msg loop nats where buildAddr :: ServerConfig -> String buildAddr conf = let nconf = natsConf conf in "nats://" ++ (natsHost nconf) ++ ":" ++ (show $ natsPort nconf) loop :: Nats -> IO () loop nats = do str <- getLine if str == "exit" then exitWith ExitSuccess else do -- Allow manual testing for now publish nats "build" $ BL.pack str loop nats buildCb :: BL.ByteString -> IO () buildCb msg = case eitherDecode msg of Left err -> error $ show err -- FIXME: Remove this! Right buildSpec -> runBuild buildSpec >> return () -- FIXME: What!!?? Why!?
wayofthepie/cspawn
src/Lib.hs
Haskell
bsd-3-clause
2,119
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} module Retcon.Network.Server where import Control.Applicative import Control.Concurrent import Control.Concurrent.Async import Control.Error.Util () import qualified Control.Exception as E import Control.Lens hiding (Context, coerce) import Control.Monad import Control.Monad.Catch import Control.Monad.Error.Class import Control.Monad.Reader import Control.Monad.Trans.Except import qualified Data.Aeson.Diff as D import Data.Binary import qualified Data.ByteString as BS hiding (unpack) import qualified Data.ByteString.Char8 as BS (unpack) import Data.ByteString.Lazy (fromStrict, toStrict) import qualified Data.ByteString.Lazy as LBS import Data.Coerce import Data.Either import qualified Data.List as L import Data.List.NonEmpty hiding (filter, length, map) import qualified Data.Map as M import Data.Maybe import Data.Monoid import Data.String import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Traversable () import System.Log.Logger import qualified System.Metrics as Ekg import qualified System.Remote.Monitoring as Ekg import System.ZMQ4 import Retcon.Configuration import Retcon.DataSource (runDSMonad) import qualified Retcon.DataSource as DS import Retcon.Diff import Retcon.Document import Retcon.Identifier import Retcon.Monad import Retcon.Network.Ekg import Retcon.Network.Protocol import Retcon.Store import Retcon.Store.PostgreSQL type ErrorMsg = String -------------------------------------------------------------------------------- -- * Server data ServerState = ServerState { _serverContext :: Context -- ^ ZMQ context , _serverSocket :: Socket Rep -- ^ ZMQ socket , _serverConfig :: Configuration -- ^ retcond config , _serverStore :: PGStore -- ^ Internal data store, shared between all server threads , _serverEkgServer :: Ekg.Server -- ^ Ekg server } makeLenses ''ServerState -- | Name of server component for logging. logName :: String logName = "Retcon.Server" -- | Spawn a thread serving the retcon API and a number of worker threads -- to process requests accepted by that server. -- spawnServer :: Configuration -> Int -> IO () spawnServer cfg n = do _ <- bracket start stop $ \state -> do -- Spawn a server implementing the protocol and some workers api <- spawnServerAPI state peasants <- spawnServerWorkers state -- Ensures workers die if the API server does. mapM_ (link2 api) peasants -- Wait for all of the API server or worker threads to finish. mapM_ wait (api:peasants) return () where spawnServerAPI :: ServerState -> IO (Async ()) spawnServerAPI = async . flip runProtocol protocol spawnServerWorkers :: ServerState -> IO [Async ()] spawnServerWorkers state = replicateM n (async $ worker (_serverStore state) cfg) start :: IO ServerState start = do noticeM logName "Starting Server" -- Setup ekg ekgStore <- Ekg.newStore initialiseMeters ekgStore cfg ekgServer <- Ekg.forkServerWith ekgStore "localhost" 8888 let (zmq_conn, _, pg_conn) = configServer cfg ctx <- context sock <- socket ctx Rep bind sock zmq_conn db <- initBackend (PGOpts pg_conn) return $ ServerState ctx sock cfg db ekgServer stop :: ServerState -> IO () stop state = do closeBackend $ state ^. serverStore close $ state ^. serverSocket term $ state ^. serverContext killThread $ Ekg.serverThreadId $ state ^. serverEkgServer noticeM logName "Stopped Server" -------------------------------------------------------------------------------- -- * Protocol Implementation -- | A monad which wraps up some state, some error handling, and some IO to -- implement the server side of retcond. newtype Protocol a = Proto { unProtocol :: ExceptT APIError (ReaderT ServerState IO) a } deriving (Applicative, Functor, Monad, MonadError APIError, MonadIO, MonadReader ServerState) instance MonadThrow Protocol where throwM = liftIO . E.throwIO instance MonadCatch Protocol where (Proto (ExceptT m)) `catch` f = Proto . ExceptT $ m `catch` (runExceptT . unProtocol . f) -- | Execute a 'Protocol' action. runProtocol :: ServerState -> Protocol a -> IO a runProtocol s act = do res <- flip runReaderT s . runExceptT . unProtocol $ act case res of Left e -> throwM e Right a -> return a -- | Server protocol handler. protocol :: Protocol () protocol = loop where loop = do sock <- _serverSocket <$> ask -- Read a response from the client. cmd <- liftIO $ receiveMulti sock -- Decode and process it. (status, resp) <- case cmd of [hdr, req] -> catchAndInject . join $ dispatch <$> (toEnum <$> decodeStrict hdr) <*> pure (fromStrict req) _ -> throwError InvalidNumberOfMessageParts -- Send the response to the client. liftIO . sendMulti sock . fromList $ [encodeStrict status, resp] -- Play it again, Sam. loop dispatch :: SomeHeader -> LBS.ByteString -> Protocol (Bool, BS.ByteString) dispatch (SomeHeader hdr) body = (True,) <$> case hdr of HeaderConflicted -> encodeStrict <$> listConflicts (decode body) HeaderResolve -> encodeStrict <$> resolveConflict (decode body) HeaderChange -> encodeStrict <$> notify (decode body) InvalidHeader -> return . encodeStrict $ InvalidResponse -- Catch exceptions and inject them into the monad as errors. -- -- TODO: Chain together the catching and handling of difference Exception -- types and return more specific errors, if available. catchAndInject :: Protocol (Bool, BS.ByteString) -> Protocol (Bool, BS.ByteString) catchAndInject act = catchError (catch act injectSomeException) reportAPIError where injectSomeException :: (MonadIO m, MonadError APIError m) => SomeException -> m a injectSomeException e = do liftIO . errorM logName . fromString $ "Intercepted error to forward to client: " <> show e throwError UnknownServerError -- Handle an error in executing operations by sending it back to the client. reportAPIError :: APIError -> Protocol (Bool, BS.ByteString) reportAPIError e = do liftIO . errorM logName . fromString $ "Could not process message: " <> show e return (False, toStrict . encode . fromEnum $ e) -- | Process a request for unresolved conflicts. listConflicts :: RequestConflicted -> Protocol ResponseConflicted listConflicts RequestConflicted = do liftIO $ infoM logName "Listing conflicts" conflicts <- liftIO . lookupConflicts =<< view serverStore return $ ResponseConflictedSerialised $ fmap mkRespItem conflicts where mkRespItem ConflictResp{..} = ResponseConflictedSerialisedItem _conflictRawDoc _conflictRawDiff (coerce _conflictDiffID) (coerce _conflictRawOps) -- | Process and resolve a conflict. resolveConflict :: RequestResolve -> Protocol ResponseResolve resolveConflict (RequestResolve diffID opIDs) = do store <- view serverStore liftIO . infoM logName $ "Resolving conflict: " <> show diffID new <- composeNewDiff store liftIO $ addWork store (WorkApplyPatch diffID $ new ^. patchDiff) return ResponseResolve where composeNewDiff store = do things <- liftIO $ lookupDiffConflicts store opIDs return $ Patch Unamed $ D.Patch $ map _ops things -- | Notification of a change to be queued for processing. notify :: RequestChange -> Protocol ResponseChange notify (RequestChange nt) = do let ent_name = nt ^. notificationEntity src_name = nt ^. notificationSource fid = nt ^. notificationForeignID cfg <- _serverConfig <$> ask store <- _serverStore <$> ask liftIO . infoM logName . T.unpack $ "Received change notification for: " <> ename ent_name <> "." <> sname src_name <> "/" <> fid let m_ds = do Entity{..} <- M.lookup ent_name (configEntities cfg) M.lookup src_name entitySources case m_ds of Nothing -> do liftIO . errorM logName . T.unpack $ "Unknown entity or source: " <> ename ent_name <> "." <> sname src_name throwError UnknownKeyError Just _ -> liftIO . addWork store . WorkNotify $ ForeignKey ent_name src_name fid return ResponseChange -------------------------------------------------------------------------------- -- * Asynchronous Server Workers -- | Get a work item from the work queue, apply a constant backoff if there is -- nothing in the queue. -- -- This function must not getWorkBackoff :: Store store => store -> IO (WorkItemID, WorkItem) getWorkBackoff store = do work <- getWork store case work of Nothing -> threadDelay 50000 >> getWorkBackoff store Just x -> return x -- | A worker for the retcond server. -- These workers cannot die, they simply log any errors and keep going. -- worker :: Store store => store -> Configuration -> IO () worker store cfg = go where -- Get a work item from the queue, mark it as busy and try to complete it. -- If all goes well, mark the work as finished when done, otherwise signal -- it as free. -- go = do bracketOnError getIt ungetIt completeIt go getIt = liftIO $ getWorkBackoff store ungetIt = ungetWork store . fst completeIt (work_id, item) = do case item of WorkNotify fk -> do liftIO . debugM logName $ "Processing a notifcation: " <> show fk processNotification store cfg fk WorkApplyPatch did a_diff -> do liftIO . debugM logName $ "Processing a diff: " <> show did processDiff store cfg did a_diff completeWork store work_id -- notifications processNotification :: Store store => store -> Configuration -> ForeignKey -> IO () processNotification store cfg fk@(ForeignKey{..}) = do let x = do e <- M.lookup fkEntity (configEntities cfg) d <- M.lookup fkSource (entitySources e) return (e, d) case x of Nothing -> liftIO . criticalM logName $ "Unknown key in workqueue: " <> show fk Just (entity, source) -> do ik <- liftIO $ lookupInternalKey store fk doc <- runDSMonad $ DS.readDocument source fk let allSources = M.elems (entitySources entity) sources = L.delete source allSources liftIO $ case (ik, doc) of (Nothing, Left e) -> notifyProblem (RetconUnknown $ show e) (Nothing, Right d) -> notifyCreate store sources fk d (Just i, Left _) -> notifyDelete store sources i (Just i, Right _) -> notifyUpdate store allSources i (entityPolicy entity) -- | Creates a new internal document to reflect a new foreign change. Update -- all given data sources of the change. -- -- Caller is responsible for: ensuring the datasources exclude the one from -- which the event originates. -- notifyCreate :: Store store => store -> [DataSource] -> ForeignKey -> Document -> IO () notifyCreate store datasources fk@(ForeignKey{..}) doc@(Document{..}) = do infoM logName $ "CREATE: " <> show fk -- Create an internal key associated with the new document ik <- createInternalKey store fkEntity recordForeignKey store ik fk -- Create an initial document recordInitialDocument store ik doc -- Update other sources in the entity forM_ datasources (createDoc ik) -- Update ekg incCreates fkEntity where createDoc ik ds = do x <- runDSMonad $ DS.createDocument ds $ graftDoc doc ds case x of Left e -> errorM logName ("notifyCreate: " <> show e) Right f -> recordForeignKey store ik f graftDoc Document{..} DataSource{..} = Document sourceEntity sourceName _documentContent -- | Deletes internal document to reflect the foreign change. Update -- all given data sources of the change. -- -- Caller is responsible for: ensuring the datasources exclude the one from -- which the event originates. -- notifyDelete :: Store store => store -> [DataSource] -> InternalKey -> IO () notifyDelete store datasources ik = do infoM logName $ "DELETE: " <> show ik forM_ datasources deleteDoc -- Update ekg incDeletes $ ikEntity ik where deleteDoc ds = do f <- lookupForeignKey store (sourceName ds) ik case f of Nothing -> do warningM logName $ "notifyDelete: unable to find foreign key for internal ID " <> show ik <> "." Just fk -> do -- Delete the document hushBoth $ runDSMonad $ DS.deleteDocument ds fk -- Delete known foreign key void $ deleteForeignKey store fk -- Delete internal bookkeeping void $ deleteInitialDocument store ik deleteInternalKey store ik -- | Updates internal document to reflect the foreign change. Update -- all given data sources of the change. -- notifyUpdate :: Store store => store -> [DataSource] -> InternalKey -> MergePolicy -> IO () notifyUpdate store datasources ik policy = do infoM logName $ "UPDATE: " <> show ik -- Fetch documents from all data sources. docs <- mapM (getDocument store ik) datasources let (_missing, valid) = partitionEithers docs -- Load (or calculate) the initial document. initial <- lookupInitialDocument store ik >>= maybe (calculate valid) return -- Extract and merge patches. let (merged, rejects) = mergeAll policy $ map (diff policy initial) valid if null rejects then debugM logName $ "No rejected changes processing " <> show ik else infoM logName $ "Rejected " <> show (length rejects) <> " changes in " <> show ik if (null rejects) && (mempty == view patchDiff merged) then debugM logName $ "Empty diff for " <> show ik <> ", skipping." else do -- Record changes in history. did <- recordDiffs store ik (merged, rejects) infoM logName $ "Recorded diff " <> show did <> " against " <> show ik -- Update and save the documents. let docs' = map (patch policy merged . either (const initial) id) docs failures <- lefts <$> mapM (setDocument store ik) (L.zip datasources docs') mapM_ (\e -> errorM logName $ "setDocument error: " <> e) failures -- Update initial document. let initial' = patch policy merged initial recordInitialDocument store ik initial' -- Update ekg incUpdates $ ikEntity ik where calculate :: [Document] -> IO Document calculate docs = do infoM logName $ "No initial document for " <> show ik <> "." return . either (const $ emptyDocument (ikEntity ik) "<initial>") id $ calculateInitialDocument docs -- | Logs a problem with the notification. notifyProblem :: RetconError -> IO () notifyProblem = errorM logName . (<>) "notifyProblem: " . show -- | Apply a 'Patch' to resolve the conflicts on a previous update. -- -- TODO(thsutton) We need to check the diff hasn't already been resolved. processDiff :: (Store store, MonadIO m, Functor m) => store -> Configuration -> DiffID -> D.Patch -> m () processDiff store cfg diffID resolveDiff = do res <- runExceptT act case res of Left e -> liftIO . errorM logName $ e Right () -> return () where act = do liftIO . infoM logName $ "Resolving errors in diff " <> show diffID conflict <- getConflict let en = EntityName . T.decodeUtf8 $ conflict ^. diffEntity ik = InternalKey en $ conflict ^. diffKey resolvePatch = Patch Unamed resolveDiff (policy, srcs) <- getSources en -- 0. Load and update the initial document. initial <- liftIO $ fromMaybe (emptyDocument en "<initial>") <$> lookupInitialDocument store ik let initial' = patch policy resolvePatch initial -- 1. Apply the patch to all sources. forM_ srcs $ \src -> liftIO $ do doc <- either (const initial') id <$> getDocument store ik src res <- setDocument store ik (src, patch policy resolvePatch doc) case res of Right _ -> return () Left e -> errorM logName $ "setDocument error: " <> e -- 2. Record the updated initial document. liftIO $ recordInitialDocument store ik initial' -- 3. Mark the conflicted patch as resolved. let resolveKeys = getKeys (D.patchOperations resolveDiff) conflictKeys = getKeys $ conflict ^. diffConflicts liftIO $ if resolveKeys == conflictKeys then do infoM logName $ "Mark as resolved diff " <> show diffID resolveDiffs store diffID else do infoM logName $ "Reduce diff " <> show diffID reduceDiff store diffID resolveKeys getKeys = L.nub . L.sort . map D.changePath getConflict = do conf <- liftIO $ lookupDiff store diffID case conf of Nothing -> throwError $ "Cannot resolve diff " <> show diffID <> " because it doesn't exist." Just v -> return v getSources en = do let things = do e <- M.lookup en (configEntities cfg) return (entityPolicy e, map snd . M.toList . entitySources $ e) case things of Nothing -> throwError $ "Cannot resolve diff " <> show diffID <> " because there are no " <> "sources for " <> show en <> "." Just x -> return x -------------------------------------------------------------------------------- -- * Data source functions -- | Get the 'Document' corresponding to an 'InternalKey' from a 'DataSource'. getDocument :: Store store => store -> InternalKey -> DataSource -> IO (Either ErrorMsg Document) getDocument store ik ds = do f <- lookupForeignKey store (sourceName ds) ik case f of Nothing -> return (Left $ "getDocument: No foreign key found for internal ID " <> show ik <> ".") Just fk -> fmap (over _Left show) . DS.runDSMonad $ DS.readDocument ds fk -- | Set the 'Document' in the given 'DataSource' corresponding to an 'InternalKey'. setDocument :: Store store => store -> InternalKey -> (DataSource, Document) -> IO (Either ErrorMsg ForeignKey) setDocument store ik (ds, doc) = do f <- lookupForeignKey store (sourceName ds) ik case f of Nothing -> return (Left $ "setDocument: No foreign key found for internal ID " <> show ik <> ".") Just fk -> fmap (over _Left show) . DS.runDSMonad $ DS.updateDocument ds fk doc -- | Merge a sequence of 'Patch'es by applying a 'MergePolicy'. -- mergeAll :: MergePolicy -> [Patch] -> (Patch, [RejectedOp]) mergeAll pol = foldr (\p1 (p2, r) -> (r <>) <$> merge pol p1 p2) (emptyPatch, mempty) extractDiff :: Document -> Document -> Patch extractDiff = diff ignoreConflicts -------------------------------------------------------------------------------- -- * Utility functions -- | Decode a serializable value from a strict 'ByteString'. -- -- If the bytestring cannot be decoded, a 'DecodeError' is thrown. decodeStrict :: (MonadIO m, MonadError APIError m, Binary a) => BS.ByteString -> m a decodeStrict bs = case decodeOrFail . fromStrict $ bs of Left _ -> do liftIO . warningM logName . BS.unpack $ "Decode failure for input: " <> bs throwError DecodeError Right (_, _, x) -> return x -- | Encode a serialisable value into a strict 'ByteString'. encodeStrict :: (Binary a) => a -> BS.ByteString encodeStrict = toStrict . encode -- | Silences both errors (via logging) and results. -- hushBoth :: Show a => IO (Either a b) -> IO () hushBoth act = act >>= \x -> case x of Left e -> errorM logName (show e) Right _ -> return ()
anchor/retcon
lib/Retcon/Network/Server.hs
Haskell
bsd-3-clause
21,317
-- File created: 2009-07-21 13:19:42 module Main (main, runOne) where import Prelude hiding (catch) import Control.Exception (catch) import Data.IORef (newIORef, readIORef) import System.Environment (getArgs) import Haschoo.Running (runRepl, runFile, run, RunError) import Haschoo.Stdlib (toplevelContext) import Haschoo.Utils (void, errPrint) main :: IO () main = do ctx <- toplevelContext args <- getArgs if null args then runRepl ctx else do initCtx <- mapM readIORef ctx mapM_ (\f -> do ctx' <- mapM newIORef initCtx runFile ctx' f `catch` \e -> errPrint (e :: RunError)) args -- For GHCi use runOne :: String -> IO () runOne s = toplevelContext >>= \ctx -> void (run ctx "runOne" s)
Deewiant/haschoo
Haschoo/Main.hs
Haskell
bsd-3-clause
821
{- (c) The University of Glasgow 2006 (c) The University of Glasgow, 1997-2006 Buffers for scanning string input stored in external arrays. -} {-# LANGUAGE BangPatterns, CPP, MagicHash, UnboxedTuples #-} {-# OPTIONS_GHC -O2 #-} -- We always optimise this, otherwise performance of a non-optimised -- compiler is severely affected module StringBuffer ( StringBuffer(..), -- non-abstract for vs\/HaskellService -- * Creation\/destruction hGetStringBuffer, hGetStringBufferBlock, appendStringBuffers, stringToStringBuffer, -- * Inspection nextChar, currentChar, prevChar, atEnd, -- * Moving and comparison stepOn, offsetBytes, byteDiff, atLine, -- * Conversion lexemeToString, lexemeToFastString, decodePrevNChars, -- * Parsing integers parseUnsignedInteger, ) where #include "HsVersions.h" import GhcPrelude import Encoding import FastString import FastFunctions import Outputable import Util import Data.Maybe import Control.Exception import System.IO import System.IO.Unsafe ( unsafePerformIO ) import GHC.IO.Encoding.UTF8 ( mkUTF8 ) import GHC.IO.Encoding.Failure ( CodingFailureMode(IgnoreCodingFailure) ) import GHC.Exts import Foreign -- ----------------------------------------------------------------------------- -- The StringBuffer type -- |A StringBuffer is an internal pointer to a sized chunk of bytes. -- The bytes are intended to be *immutable*. There are pure -- operations to read the contents of a StringBuffer. -- -- A StringBuffer may have a finalizer, depending on how it was -- obtained. -- data StringBuffer = StringBuffer { buf :: {-# UNPACK #-} !(ForeignPtr Word8), len :: {-# UNPACK #-} !Int, -- length cur :: {-# UNPACK #-} !Int -- current pos } -- The buffer is assumed to be UTF-8 encoded, and furthermore -- we add three '\0' bytes to the end as sentinels so that the -- decoder doesn't have to check for overflow at every single byte -- of a multibyte sequence. instance Show StringBuffer where showsPrec _ s = showString "<stringbuffer(" . shows (len s) . showString "," . shows (cur s) . showString ")>" -- ----------------------------------------------------------------------------- -- Creation / Destruction -- | Read a file into a 'StringBuffer'. The resulting buffer is automatically -- managed by the garbage collector. hGetStringBuffer :: FilePath -> IO StringBuffer hGetStringBuffer fname = do h <- openBinaryFile fname ReadMode size_i <- hFileSize h offset_i <- skipBOM h size_i 0 -- offset is 0 initially let size = fromIntegral $ size_i - offset_i buf <- mallocForeignPtrArray (size+3) withForeignPtr buf $ \ptr -> do r <- if size == 0 then return 0 else hGetBuf h ptr size hClose h if (r /= size) then ioError (userError "short read of file") else newUTF8StringBuffer buf ptr size hGetStringBufferBlock :: Handle -> Int -> IO StringBuffer hGetStringBufferBlock handle wanted = do size_i <- hFileSize handle offset_i <- hTell handle >>= skipBOM handle size_i let size = min wanted (fromIntegral $ size_i-offset_i) buf <- mallocForeignPtrArray (size+3) withForeignPtr buf $ \ptr -> do r <- if size == 0 then return 0 else hGetBuf handle ptr size if r /= size then ioError (userError $ "short read of file: "++show(r,size,size_i,handle)) else newUTF8StringBuffer buf ptr size -- | Skip the byte-order mark if there is one (see #1744 and #6016), -- and return the new position of the handle in bytes. -- -- This is better than treating #FEFF as whitespace, -- because that would mess up layout. We don't have a concept -- of zero-width whitespace in Haskell: all whitespace codepoints -- have a width of one column. skipBOM :: Handle -> Integer -> Integer -> IO Integer skipBOM h size offset = -- Only skip BOM at the beginning of a file. if size > 0 && offset == 0 then do -- Validate assumption that handle is in binary mode. ASSERTM( hGetEncoding h >>= return . isNothing ) -- Temporarily select utf8 encoding with error ignoring, -- to make `hLookAhead` and `hGetChar` return full Unicode characters. bracket_ (hSetEncoding h safeEncoding) (hSetBinaryMode h True) $ do c <- hLookAhead h if c == '\xfeff' then hGetChar h >> hTell h else return offset else return offset where safeEncoding = mkUTF8 IgnoreCodingFailure newUTF8StringBuffer :: ForeignPtr Word8 -> Ptr Word8 -> Int -> IO StringBuffer newUTF8StringBuffer buf ptr size = do pokeArray (ptr `plusPtr` size :: Ptr Word8) [0,0,0] -- sentinels for UTF-8 decoding return $ StringBuffer buf size 0 appendStringBuffers :: StringBuffer -> StringBuffer -> IO StringBuffer appendStringBuffers sb1 sb2 = do newBuf <- mallocForeignPtrArray (size+3) withForeignPtr newBuf $ \ptr -> withForeignPtr (buf sb1) $ \sb1Ptr -> withForeignPtr (buf sb2) $ \sb2Ptr -> do copyArray ptr (sb1Ptr `advancePtr` cur sb1) sb1_len copyArray (ptr `advancePtr` sb1_len) (sb2Ptr `advancePtr` cur sb2) sb2_len pokeArray (ptr `advancePtr` size) [0,0,0] return (StringBuffer newBuf size 0) where sb1_len = calcLen sb1 sb2_len = calcLen sb2 calcLen sb = len sb - cur sb size = sb1_len + sb2_len -- | Encode a 'String' into a 'StringBuffer' as UTF-8. The resulting buffer -- is automatically managed by the garbage collector. stringToStringBuffer :: String -> StringBuffer stringToStringBuffer str = unsafePerformIO $ do let size = utf8EncodedLength str buf <- mallocForeignPtrArray (size+3) withForeignPtr buf $ \ptr -> do utf8EncodeString ptr str pokeArray (ptr `plusPtr` size :: Ptr Word8) [0,0,0] -- sentinels for UTF-8 decoding return (StringBuffer buf size 0) -- ----------------------------------------------------------------------------- -- Grab a character -- | Return the first UTF-8 character of a nonempty 'StringBuffer' and as well -- the remaining portion (analogous to 'Data.List.uncons'). __Warning:__ The -- behavior is undefined if the 'StringBuffer' is empty. The result shares -- the same buffer as the original. Similar to 'utf8DecodeChar', if the -- character cannot be decoded as UTF-8, '\0' is returned. {-# INLINE nextChar #-} nextChar :: StringBuffer -> (Char,StringBuffer) nextChar (StringBuffer buf len (I# cur#)) = -- Getting our fingers dirty a little here, but this is performance-critical inlinePerformIO $ do withForeignPtr buf $ \(Ptr a#) -> do case utf8DecodeChar# (a# `plusAddr#` cur#) of (# c#, nBytes# #) -> let cur' = I# (cur# +# nBytes#) in return (C# c#, StringBuffer buf len cur') -- | Return the first UTF-8 character of a nonempty 'StringBuffer' (analogous -- to 'Data.List.head'). __Warning:__ The behavior is undefined if the -- 'StringBuffer' is empty. Similar to 'utf8DecodeChar', if the character -- cannot be decoded as UTF-8, '\0' is returned. currentChar :: StringBuffer -> Char currentChar = fst . nextChar prevChar :: StringBuffer -> Char -> Char prevChar (StringBuffer _ _ 0) deflt = deflt prevChar (StringBuffer buf _ cur) _ = inlinePerformIO $ do withForeignPtr buf $ \p -> do p' <- utf8PrevChar (p `plusPtr` cur) return (fst (utf8DecodeChar p')) -- ----------------------------------------------------------------------------- -- Moving -- | Return a 'StringBuffer' with the first UTF-8 character removed (analogous -- to 'Data.List.tail'). __Warning:__ The behavior is undefined if the -- 'StringBuffer' is empty. The result shares the same buffer as the -- original. stepOn :: StringBuffer -> StringBuffer stepOn s = snd (nextChar s) -- | Return a 'StringBuffer' with the first @n@ bytes removed. __Warning:__ -- If there aren't enough characters, the returned 'StringBuffer' will be -- invalid and any use of it may lead to undefined behavior. The result -- shares the same buffer as the original. offsetBytes :: Int -- ^ @n@, the number of bytes -> StringBuffer -> StringBuffer offsetBytes i s = s { cur = cur s + i } -- | Compute the difference in offset between two 'StringBuffer's that share -- the same buffer. __Warning:__ The behavior is undefined if the -- 'StringBuffer's use separate buffers. byteDiff :: StringBuffer -> StringBuffer -> Int byteDiff s1 s2 = cur s2 - cur s1 -- | Check whether a 'StringBuffer' is empty (analogous to 'Data.List.null'). atEnd :: StringBuffer -> Bool atEnd (StringBuffer _ l c) = l == c -- | Computes a 'StringBuffer' which points to the first character of the -- wanted line. Lines begin at 1. atLine :: Int -> StringBuffer -> Maybe StringBuffer atLine line sb@(StringBuffer buf len _) = inlinePerformIO $ withForeignPtr buf $ \p -> do p' <- skipToLine line len p if p' == nullPtr then return Nothing else let delta = p' `minusPtr` p in return $ Just (sb { cur = delta , len = len - delta }) skipToLine :: Int -> Int -> Ptr Word8 -> IO (Ptr Word8) skipToLine !line !len !op0 = go 1 op0 where !opend = op0 `plusPtr` len go !i_line !op | op >= opend = pure nullPtr | i_line == line = pure op | otherwise = do w <- peek op :: IO Word8 case w of 10 -> go (i_line + 1) (plusPtr op 1) 13 -> do -- this is safe because a 'StringBuffer' is -- guaranteed to have 3 bytes sentinel values. w' <- peek (plusPtr op 1) :: IO Word8 case w' of 10 -> go (i_line + 1) (plusPtr op 2) _ -> go (i_line + 1) (plusPtr op 1) _ -> go i_line (plusPtr op 1) -- ----------------------------------------------------------------------------- -- Conversion -- | Decode the first @n@ bytes of a 'StringBuffer' as UTF-8 into a 'String'. -- Similar to 'utf8DecodeChar', if the character cannot be decoded as UTF-8, -- they will be replaced with '\0'. lexemeToString :: StringBuffer -> Int -- ^ @n@, the number of bytes -> String lexemeToString _ 0 = "" lexemeToString (StringBuffer buf _ cur) bytes = utf8DecodeStringLazy buf cur bytes lexemeToFastString :: StringBuffer -> Int -- ^ @n@, the number of bytes -> FastString lexemeToFastString _ 0 = nilFS lexemeToFastString (StringBuffer buf _ cur) len = inlinePerformIO $ withForeignPtr buf $ \ptr -> return $! mkFastStringBytes (ptr `plusPtr` cur) len -- | Return the previous @n@ characters (or fewer if we are less than @n@ -- characters into the buffer. decodePrevNChars :: Int -> StringBuffer -> String decodePrevNChars n (StringBuffer buf _ cur) = inlinePerformIO $ withForeignPtr buf $ \p0 -> go p0 n "" (p0 `plusPtr` (cur - 1)) where go :: Ptr Word8 -> Int -> String -> Ptr Word8 -> IO String go buf0 n acc p | n == 0 || buf0 >= p = return acc go buf0 n acc p = do p' <- utf8PrevChar p let (c,_) = utf8DecodeChar p' go buf0 (n - 1) (c:acc) p' -- ----------------------------------------------------------------------------- -- Parsing integer strings in various bases parseUnsignedInteger :: StringBuffer -> Int -> Integer -> (Char->Int) -> Integer parseUnsignedInteger (StringBuffer buf _ cur) len radix char_to_int = inlinePerformIO $ withForeignPtr buf $ \ptr -> return $! let go i x | i == len = x | otherwise = case fst (utf8DecodeChar (ptr `plusPtr` (cur + i))) of char -> go (i + 1) (x * radix + toInteger (char_to_int char)) in go 0 0
ezyang/ghc
compiler/utils/StringBuffer.hs
Haskell
bsd-3-clause
12,130
module Network.EasyBitcoin.Internal.CurveConstants where -- SECP256k1 curve parameters pairG :: (Integer, Integer) pairG = ( 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 , 0X483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 ) curveP :: Integer curveP = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f curveN :: Integer curveN = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 integerB :: Integer integerB = 7 integerA :: Integer integerA = 0
vwwv/easy-bitcoin
Network/EasyBitcoin/Internal/CurveConstants.hs
Haskell
bsd-3-clause
573
{-# LANGUAGE EmptyDataDecls #-} module TypeLevel.Number.Nat.Types ( I , O , Z ) where -- | One bit. data I n -- | Zero bit. data O n -- | Bit stream terminator. data Z
Shimuuar/type-level-numbers
TypeLevel/Number/Nat/Types.hs
Haskell
bsd-3-clause
271
factors :: Int -> [Int] factors n = [x | x <- [1..n], mod n x == 0] prime :: Int -> Bool prime x = factors x == [1, x] main :: IO () main = print . sum $ take 1000 [x | x <- 2 : [3, 5..], prime x]
nikai3d/ce-challenges
easy/sum_prime.hs
Haskell
bsd-3-clause
201
{-# LANGUAGE OverloadedStrings #-} module Hakyll.Web.Urls.Relativize.Tests ( tests ) where import Test.Framework import Test.HUnit hiding (Test) import Hakyll.Web.Urls.Relativize import TestSuite.Util tests :: [Test] tests = fromAssertions "relativizeUrls" [ "<a href=\"../foo\">bar</a>" @=? relativizeUrls ".." "<a href=\"/foo\">bar</a>" , "<img src=\"../../images/lolcat.png\"></img>" @=? relativizeUrls "../.." "<img src=\"/images/lolcat.png\" />" , "<a href=\"http://haskell.org\">Haskell</a>" @=? relativizeUrls "../.." "<a href=\"http://haskell.org\">Haskell</a>" , "<a href=\"http://haskell.org\">Haskell</a>" @=? relativizeUrls "../.." "<a href=\"http://haskell.org\">Haskell</a>" , "<script src=\"//ajax.googleapis.com/jquery.min.js\"></script>" @=? relativizeUrls "../.." "<script src=\"//ajax.googleapis.com/jquery.min.js\"></script>" ]
sol/hakyll
tests/Hakyll/Web/Urls/Relativize/Tests.hs
Haskell
bsd-3-clause
935
{-# LANGUAGE GADTs, DataKinds, KindSignatures #-} import Data.Map.Strict (Map) data Protocol = Protocol { pEndpoints :: [Endpoint] , pVersion :: String } data Endpoint = Endpoint { eUrl :: String , eTransmissions :: Map Method Transmission } data Method = GET | POST | OPTIONS | HEAD | PUT | DELETE -- | A Transmission is a bundle consisting of a request and a response. data Transmission = Transmission { tRequest :: Request , tResponse :: Response } data Request = Request { rqFields :: [Field] } data Response = Response { reFields :: [Field] } data Field = Field { fName :: String , fLabel :: String , fDescription :: String , fType :: FieldType , fValidators :: [Validator] } data FieldType = FTInt | FTString | FTBool | FTDouble | FTList FieldType | FTMap FieldType FieldType -- I'm not sure about this one. We'll see if it's needed. -- | The validator type, I'm not at all sure about if this is a good idea. data Validator = Required Bool -- ^ Is this field required? True by default. | NullAllowed Bool -- ^ Do we get away with not supplying this field? False by default. | LowerBound Int -- ^ Lower bound in length for strings and lists, and by value for numbers. | UpperBound Int -- ^ Upper bound in length for strings and lists, and by value for numbers. | Chain Validator Validator -- ^ Combine two validators, both must pass for this validator to pass. -- | Datakind for token validity data Token where ValidToken :: Token InvalidToken :: Token deriving Show -- | Side effects that we can get from tokens data TokenSideEffect :: Token -> * where Produce :: TokenSideEffect ValidToken Require :: TokenSideEffect ValidToken Consume :: TokenSideEffect InvalidToken login :: TokenSideEffect ValidToken login = Produce updateUsername :: String -> TokenSideEffect ValidToken -> TokenSideEffect ValidToken updateUsername = undefined logout :: TokenSideEffect ValidToken -> TokenSideEffect InvalidToken logout = const Consume transaction = logout $ updateUsername "Bob" login
MaximilianAlgehed/Haspec
src/Lang/Lang.hs
Haskell
bsd-3-clause
2,310
{-# LANGUAGE CPP #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} #ifdef TRUSTWORTHY {-# LANGUAGE Trustworthy #-} #endif ------------------------------------------------------------------------------- -- | -- Module : Data.Vector.Lens -- Copyright : (C) 2012 Edward Kmett -- License : BSD-style (see the file LICENSE) -- Maintainer : Edward Kmett <ekmett@gmail.com> -- Stability : provisional -- Portability : non-portable -- -- This module provides lenses and traversals for working with generic vectors. ------------------------------------------------------------------------------- module Data.Vector.Lens ( toVectorOf -- * Isomorphisms , vector , reversed , forced -- * Lenses , _head , _tail , _last , _init , sliced -- * Traversal of individual indices , ordinals ) where import Control.Applicative import Control.Lens import Data.Vector as Vector hiding (zip, filter, indexed) import Prelude hiding ((++), length, null, head, tail, init, last, map, reverse) import Data.List (nub) import Data.Monoid -- | A 'Traversal' reading and writing to the 'head' of a 'Vector' -- -- >>> Vector.fromList [1,2,3] ^? _head -- Just 1 -- -- >>> Vector.empty ^? _head -- Nothing -- -- >>> Vector.fromList "abc" & _head .~ 'Q' -- fromList "Qbc" _head :: Traversal' (Vector a) a _head f v | null v = pure v | otherwise = f (unsafeHead v) <&> \a -> v // [(0,a)] {-# INLINE _head #-} -- | A 'Traversal' reading and writing to the 'last' element of a 'Vector' -- -- >>> Vector.fromList "abcde" ^? _last -- Just 'e' -- -- >>> Vector.empty ^? _last -- Nothing -- -- >>> Vector.fromList "abcde" & _last .~ 'Q' -- fromList "abcdQ" _last :: Traversal' (Vector a) a _last f v | null v = pure v | otherwise = f (unsafeLast v) <&> \a -> v // [(length v - 1, a)] {-# INLINE _last #-} -- | A 'Traversal' reading and writing to the 'tail' of a 'Vector' -- -- >>> Vector.fromList "abcde" ^? _tail -- Just (fromList "bcde") -- -- >>> Vector.empty ^? _tail -- Nothing -- -- >>> _tail .~ Vector.fromList [3,4,5] $ Vector.fromList [1,2] -- fromList [1,3,4,5] _tail :: Traversal' (Vector a) (Vector a) _tail f v | null v = pure v | otherwise = f (unsafeTail v) <&> cons (unsafeHead v) {-# INLINE _tail #-} -- | A 'Traversal' reading and replacing all but the a 'last' element of a 'Vector' -- -- >>> Vector.fromList [1,2,3,4] ^? _init -- Just (fromList [1,2,3]) -- -- >>> Vector.empty ^? _init -- Nothing -- -- >>> Vector.fromList "abcdef" & _init.mapped %~ succ -- fromList "bcdeff" _init :: Traversal' (Vector a) (Vector a) _init f v | null v = pure v | otherwise = f (unsafeInit v) <&> (`snoc` unsafeLast v) {-# INLINE _init #-} -- | @sliced i n@ provides a lens that edits the @n@ elements starting at index @i@ from a lens. -- -- This is only a valid lens if you do not change the length of the resulting 'Vector'. -- -- Attempting to return a longer or shorter vector will result in violations of the 'Lens' laws. -- -- >>> Vector.fromList [1..10] ^. sliced 2 5 -- fromList [3,4,5,6,7] -- -- >>> Vector.fromList [1..10] & sliced 2 5 . mapped .~ 0 -- fromList [1,2,0,0,0,0,0,8,9,10] sliced :: Int -- ^ @i@ starting index -> Int -- ^ @n@ length -> Lens' (Vector a) (Vector a) sliced i n f v = f (slice i n v) <&> \ v0 -> v // zip [i..i+n-1] (toList v0) {-# INLINE sliced #-} -- | Similar to 'toListOf', but returning a 'Vector'. -- -- >>> toVectorOf both (8,15) -- fromList [8,15] toVectorOf :: Getting (Endo [a]) s t a b -> s -> Vector a toVectorOf l s = fromList (toListOf l s) {-# INLINE toVectorOf #-} -- | Convert a list to a 'Vector' (or back) -- -- >>> [1,2,3] ^. vector -- fromList [1,2,3] -- -- >>> [1,2,3] ^. vector . from vector -- [1,2,3] -- -- >>> Vector.fromList [0,8,15] ^. from vector . vector -- fromList [0,8,15] vector :: Iso [a] [b] (Vector a) (Vector b) vector = iso fromList toList {-# INLINE vector #-} -- | Convert a 'Vector' to a version with all the elements in the reverse order -- -- >>> Vector.fromList [1,2,3] ^. reversed -- fromList [3,2,1] reversed :: Iso (Vector a) (Vector b) (Vector a) (Vector b) reversed = iso reverse reverse {-# INLINE reversed #-} -- | Convert a 'Vector' to a version that doesn't retain any extra memory. forced :: Iso (Vector a) (Vector b) (Vector a) (Vector b) forced = iso force force {-# INLINE forced #-} -- | This 'Traversal' will ignore any duplicates in the supplied list of indices. -- -- >>> toListOf (ordinals [1,3,2,5,9,10]) $ Vector.fromList [2,4..40] -- [4,8,6,12,20,22] ordinals :: [Int] -> IndexedTraversal' Int (Vector a) a ordinals is f v = fmap (v //) $ traverse (\i -> (,) i <$> indexed f i (v ! i)) $ nub $ filter (\i -> 0 <= i && i < l) is where l = length v {-# INLINE ordinals #-}
np/lens
src/Data/Vector/Lens.hs
Haskell
bsd-3-clause
4,796
{-# LANGUAGE OverloadedStrings #-} module Block ( blockSpecs ) where import Test.Hspec import Data.Text (Text) import Data.Conduit import qualified Data.Conduit.List as CL import Text.Markdown (def, MarkdownSettings(..)) import Text.Markdown.Block import Data.Functor.Identity (runIdentity) checkWith :: MarkdownSettings -> Text -> [Block Text] -> Expectation checkWith ms md blocks = runIdentity (yield md $$ toBlocks ms =$ CL.consume) `shouldBe` blocks check :: Text -> [Block Text] -> Expectation check = checkWith def blockSpecs :: Spec blockSpecs = do describe "tilde code" $ do it "simple" $ check "~~~haskell\nfoo\n\nbar\n~~~" [BlockCode (Just "haskell") "foo\n\nbar"] it "no lang" $ check "~~~\nfoo\n\nbar\n~~~" [BlockCode Nothing "foo\n\nbar"] it "no close" $ check "~~~\nfoo\n\nbar\n" [BlockPara " ~~~\nfoo", BlockPara "bar"] describe "list" $ do it "simple unordered" $ check "* foo\n\n* bar\n\n*\t\tqux" [ BlockList Unordered (Right [BlockPara "foo"]) , BlockList Unordered (Right [BlockPara "bar"]) , BlockList Unordered (Right [BlockPara "qux"]) ] it "simple ordered" $ check "1. foo\n\n3. bar\n\n17.\t\tqux" [ BlockList Ordered (Right [BlockPara "foo"]) , BlockList Ordered (Right [BlockPara "bar"]) , BlockList Ordered (Right [BlockPara "qux"]) ] it "nested" $ check "* foo\n* \n 1. bar\n 2. baz" [ BlockList Unordered (Left "foo") , BlockList Unordered (Right [ BlockList Ordered $ Left "bar" , BlockList Ordered $ Left "baz" ]) ] it "with blank" $ check "* foo\n\n bar\n\n* baz" [ BlockList Unordered $ Right [ BlockPara "foo" , BlockPara "bar" ] , BlockList Unordered $ Right [ BlockPara "baz" ] ] it "without whitespace" $ check "*foo\n\n1.bar" [ BlockPara "*foo" , BlockPara "1.bar" ] describe "blockquote" $ do it "simple" $ check "> foo\n>\n> * bar" [ BlockQuote [ BlockPara "foo" , BlockList Unordered $ Left "bar" ] ] it "blank" $ check "> foo\n\n> * bar" [ BlockQuote [BlockPara "foo"] , BlockQuote [BlockList Unordered $ Left "bar"] ] it "require blank before blockquote" $ check "foo\n> bar" [ BlockPara "foo\n> bar" ] it "no blank before blockquote" $ checkWith def { msBlankBeforeBlockquote = False } "foo\n> bar" [ BlockPara "foo", BlockQuote [BlockPara "bar"]] describe "indented code" $ do it "simple" $ check " foo\n bar\n" [ BlockCode Nothing "foo\nbar" ] it "blank" $ check " foo\n\n bar\n" [ BlockCode Nothing "foo\n\nbar" ] it "extra space" $ check " foo\n\n bar\n" [ BlockCode Nothing "foo\n\n bar" ] describe "html" $ do it "simple" $ check "<p>Hello world!</p>" [ BlockHtml "<p>Hello world!</p>" ] it "multiline" $ check "<p>Hello world!\n</p>" [ BlockHtml "<p>Hello world!\n</p>" ]
thefalconfeat/markdown
test/Block.hs
Haskell
bsd-3-clause
3,651
z = (x, y) where x = [ n * n | n <- [1..] , odd n , isPrime n ] y = [ n * n | n <- [1..] , even n , isPrime (n `div` 2) ]
itchyny/vim-haskell-indent
test/list/comprehension_multi_line2.out.hs
Haskell
mit
216
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-missing-fields #-} {-# OPTIONS_GHC -fno-warn-missing-signatures #-} {-# OPTIONS_GHC -fno-warn-name-shadowing #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} ----------------------------------------------------------------- -- Autogenerated by Thrift -- -- -- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -- @generated ----------------------------------------------------------------- module TestService_Iface where import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..), Eq, Show, Ord, concat, error, fromIntegral, fromEnum, length, map, maybe, not, null, otherwise, return, show, toEnum, enumFromTo, Bounded, minBound, maxBound, (.), (&&), (||), (==), (++), ($), (-), (>>=), (>>)) import Control.Applicative (ZipList(..), (<*>)) import Control.Exception import Control.Monad ( liftM, ap, when ) import Data.ByteString.Lazy (ByteString) import Data.Functor ( (<$>) ) import Data.Hashable import Data.Int import Data.Maybe (catMaybes) import Data.Text.Lazy ( Text ) import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 ) import qualified Data.Text.Lazy as T import Data.Typeable ( Typeable ) import qualified Data.HashMap.Strict as Map import qualified Data.HashSet as Set import qualified Data.Vector as Vector import Test.QuickCheck.Arbitrary ( Arbitrary(..) ) import Test.QuickCheck ( elements ) import Thrift hiding (ProtocolExnType(..)) import qualified Thrift (ProtocolExnType(..)) import Thrift.Types import Thrift.Arbitraries import Module_Types class TestService_Iface a where init :: a -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> Int64 -> IO Int64
chjp2046/fbthrift
thrift/compiler/test/fixtures/service-fuzzer/gen-hs/TestService_Iface.hs
Haskell
apache-2.0
1,942
{-# Language RankNTypes #-} {-# Language TypeOperators #-} {-# Language BangPatterns #-} module IO where -- import Data.Array import Prelude hiding (traverse) import Data.Vector.Unboxed hiding (force) import qualified Data.Vector.Unboxed as V import Data.Array.Repa import Types import System.CPUTime import Data.Array.Repa.IO.DevIL import Data.Word import qualified Data.Array.Repa as R import Debug.Trace import System.Directory import Control.Exception import Control.DeepSeq import Data.Array.Repa.Repr.ForeignPtr import System.IO.Error hiding (catch) import Data.Array.Accelerate.IO import qualified Data.Array.Accelerate as A import Text.Printf import qualified Codec.Picture as Codec import Control.DeepSeq import Data.Time.Clock printDiff :: UTCTime -> UTCTime -> IO () printDiff start end = do let s = show (diffUTCTime end start) putStrLn (Prelude.init s) -- drops the "s" from the end -- | time monadic computation. printTimeIO :: IO a -> IO a printTimeIO action = do start <- getCurrentTime a <- action end <- getCurrentTime printDiff start end return a -- see about `seq` in the following benchmark -- https://github.com/AccelerateHS/accelerate/issues/208 -- | time evaluation of pure computation, in picoseconds. printTime :: a -> IO a printTime f = do start <- getCurrentTime end <- seq f getCurrentTime printDiff start end return f printTimeDeep :: (NFData a) => a -> IO a printTimeDeep f = do start <- getCurrentTime end <- deepseq f getCurrentTime printDiff start end return f writeVectorImage :: String -> VectorImage -> IO () writeVectorImage fname vecImg = Codec.writePng fname img where img = Codec.generateImage (\x y -> (fromIntegral $ (V.!) (pixels vecImg) ((width vecImg)*y + x))::Word8) (width vecImg) (height vecImg) readImgAsVector :: String -> IO VectorImage readImgAsVector fname = do (Right !img) <- Codec.readImage fname case img of Codec.ImageRGB8 rgbImg -> do let Codec.Image !imgWidth !imgHeight _ = rgbImg positions = Prelude.concatMap (\h -> Prelude.map (\w -> (w,h)) [0..imgWidth-1]) [0..imgHeight-1] !vec = fromList (Prelude.map (\(x,y) -> let (Codec.PixelRGB8 r g b) = Codec.pixelAt rgbImg x y in rgbToGreyPixel r g b) positions) img = deepseq vec (VectorImage vec imgWidth imgHeight) return img _ -> error "readImgAsVector: unsupported image type." rgbToGreyPixel :: Word8 -> Word8 -> Word8 -> Int rgbToGreyPixel r g b = ceiling $ (0.21::Double) * fromIntegral r + 0.71 * fromIntegral g + 0.07 * fromIntegral b readImgAsAccelerateArray :: String -> IO (A.Acc AccelerateImage) readImgAsAccelerateArray fname = do arr <- readImgAsManifestRepaArray fname let accImg = seq (A.use (fromRepa arr)) (A.use (fromRepa arr)) return (accImg) readImgAsRepaArray :: String -> IO RepaImage readImgAsRepaArray fname = do arr <- readImgAsManifestRepaArray fname return (delay arr) readImgAsManifestRepaArray :: String -> IO (Array A (Z :. Int :. Int) Int) readImgAsManifestRepaArray fname = do !img <- readImg return img where readImg = runIL $ do (RGB a) <- readImage fname img <- computeP $ traverse a (\(Z :. x :. y :. _) -> (Z :. x :. y)) luminosity :: IL (Array A DIM2 Int) return img writeAccelerateImg :: String -> AccelerateImage -> IO () writeAccelerateImg fname img = do repaImage <- copyP (toRepa img) writeRepaImg fname repaImage -- writeRepaImg :: String -> RepaImageComputed -> IO () writeRepaImg fname img = do removeIfExists fname im <- demote img runIL $ do writeImage fname (Grey im) removeIfExists :: FilePath -> IO () removeIfExists fileName = removeFile fileName `catch` handleExists where handleExists e | isDoesNotExistError e = return () | otherwise = throwIO e luminosity :: (DIM3 -> Word8) -> DIM2 -> Int luminosity f (Z :. i :. j) = round $ (0.21::Double) * r + 0.71 * g + 0.07 * b where r = fromIntegral $ f (Z :. i :. j :. 0) g = fromIntegral $ f (Z :. i :. j :. 1) b = fromIntegral $ f (Z :. i :. j :. 2) demote :: Monad m => Array U DIM2 Int -> m (Array F DIM2 Word8) demote arr = computeP $ R.map ffs arr where {-# INLINE ffs #-} ffs :: Int -> Word8 ffs x = fromIntegral (x :: Int) {-# NOINLINE demote #-}
robstewart57/small-image-processing-dsl-implementations
haskell/small-image-processing-dsl/src/IO.hs
Haskell
bsd-3-clause
4,421
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} -- | Clean a project. module Stack.Clean (clean ,CleanOpts(..) ,StackCleanException(..) ) where import Control.Exception (Exception) import Control.Monad.Catch (throwM) import Data.Foldable (forM_) import Data.List ((\\),intercalate) import qualified Data.Map.Strict as Map import Data.Maybe (mapMaybe) import Data.Typeable (Typeable) import Path (Path, Abs, Dir) import Path.IO (ignoringAbsence, removeDirRecur) import Stack.Build.Source (getLocalPackageViews) import Stack.Build.Target (LocalPackageView(..)) import Stack.Config (getLocalPackages) import Stack.Constants (distDirFromDir, workDirFromDir) import Stack.Types.PackageName import Stack.Types.Config import Stack.Types.StackT -- | Deletes build artifacts in the current project. -- -- Throws 'StackCleanException'. clean :: (StackM env m, HasEnvConfig env) => CleanOpts -> m () clean cleanOpts = do dirs <- dirsToDelete cleanOpts forM_ dirs (ignoringAbsence . removeDirRecur) dirsToDelete :: (StackM env m, HasEnvConfig env) => CleanOpts -> m [Path Abs Dir] dirsToDelete cleanOpts = do packages <- getLocalPackages case cleanOpts of CleanShallow [] -> -- Filter out packages listed as extra-deps mapM distDirFromDir . Map.keys . Map.filter (== False) $ packages CleanShallow targets -> do localPkgViews <- getLocalPackageViews let localPkgNames = Map.keys localPkgViews getPkgDir pkgName = fmap (lpvRoot . fst) (Map.lookup pkgName localPkgViews) case targets \\ localPkgNames of [] -> mapM distDirFromDir (mapMaybe getPkgDir targets) xs -> throwM (NonLocalPackages xs) CleanFull -> do pkgWorkDirs <- mapM workDirFromDir (Map.keys packages) projectWorkDir <- getProjectWorkDir return (projectWorkDir : pkgWorkDirs) -- | Options for @stack clean@. data CleanOpts = CleanShallow [PackageName] -- ^ Delete the "dist directories" as defined in 'Stack.Constants.distRelativeDir' -- for the given local packages. If no packages are given, all project packages -- should be cleaned. | CleanFull -- ^ Delete all work directories in the project. -- | Exceptions during cleanup. newtype StackCleanException = NonLocalPackages [PackageName] deriving (Typeable) instance Show StackCleanException where show (NonLocalPackages pkgs) = "The following packages are not part of this project: " ++ intercalate ", " (map show pkgs) instance Exception StackCleanException
mrkkrp/stack
src/Stack/Clean.hs
Haskell
bsd-3-clause
2,851
{- (c) Rahul Muttineni 2016-2017 (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 Loading interface files -} {-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module ETA.Iface.LoadIface ( -- Importing one thing tcLookupImported_maybe, importDecl, checkWiredInTyCon, ifCheckWiredInThing, -- RnM/TcM functions loadModuleInterface, loadModuleInterfaces, loadSrcInterface, loadSrcInterface_maybe, loadInterfaceForName, loadInterfaceForModule, -- IfM functions loadInterface, loadWiredInHomeIface, loadSysInterface, loadUserInterface, loadPluginInterface, findAndReadIface, readIface, -- Used when reading the module's old interface loadDecls, -- Should move to TcIface and be renamed initExternalPackageState, ifaceStats, pprModIface, showIface ) where import {-# SOURCE #-} ETA.Iface.TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst, tcIfaceFamInst, tcIfaceVectInfo, tcIfaceAnnotations ) import ETA.Main.DynFlags import ETA.Iface.IfaceSyn import ETA.Iface.IfaceEnv import ETA.Main.HscTypes import ETA.BasicTypes.BasicTypes hiding (SuccessFlag(..)) import ETA.TypeCheck.TcRnMonad import ETA.Main.Constants import ETA.Prelude.PrelNames import ETA.Prelude.PrelInfo import ETA.Prelude.PrimOp ( allThePrimOps, primOpFixity, primOpOcc ) import ETA.BasicTypes.MkId ( seqId ) import ETA.Specialise.Rules import ETA.Types.TyCon import ETA.Main.Annotations import ETA.Types.InstEnv import ETA.Types.FamInstEnv import ETA.BasicTypes.Name import ETA.BasicTypes.NameEnv import ETA.BasicTypes.Avail import ETA.BasicTypes.Module import ETA.Utils.Maybes import ETA.Main.ErrUtils import ETA.Main.Finder import ETA.Utils.UniqFM import ETA.BasicTypes.SrcLoc import ETA.Utils.Outputable import qualified ETA.Utils.Outputable as Outputable import ETA.Iface.BinIface import ETA.Utils.Panic import ETA.Utils.Util import ETA.Utils.FastString import ETA.Utils.Fingerprint import ETA.Main.Hooks import Control.Monad import Data.IORef import System.FilePath #include "HsVersions.h" {- ************************************************************************ * * * tcImportDecl is the key function for "faulting in" * * imported things * * ************************************************************************ The main idea is this. We are chugging along type-checking source code, and find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find it in the EPS type envt. So it 1 loads GHC.Base.hi 2 gets the decl for GHC.Base.map 3 typechecks it via tcIfaceDecl 4 and adds it to the type env in the EPS Note that DURING STEP 4, we may find that map's type mentions a type constructor that also Notice that for imported things we read the current version from the EPS mutable variable. This is important in situations like ...$(e1)...$(e2)... where the code that e1 expands to might import some defns that also turn out to be needed by the code that e2 expands to. -} tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing) -- Returns (Failed err) if we can't find the interface file for the thing tcLookupImported_maybe name = do { hsc_env <- getTopEnv ; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name) ; case mb_thing of Just thing -> return (Succeeded thing) Nothing -> tcImportDecl_maybe name } tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing) -- Entry point for *source-code* uses of importDecl tcImportDecl_maybe name | Just thing <- wiredInNameTyThing_maybe name = do { when (needWiredInHomeIface thing) (initIfaceTcRn (loadWiredInHomeIface name)) -- See Note [Loading instances for wired-in things] ; return (Succeeded thing) } | otherwise = initIfaceTcRn (importDecl name) importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing) -- Get the TyThing for this Name from an interface file -- It's not a wired-in thing -- the caller caught that importDecl name = ASSERT( not (isWiredInName name) ) do { traceIf nd_doc -- Load the interface, which should populate the PTE ; mb_iface <- ASSERT2( isExternalName name, ppr name ) loadInterface nd_doc (nameModule name) ImportBySystem ; case mb_iface of { Failed err_msg -> return (Failed err_msg) ; Succeeded _ -> do -- Now look it up again; this time we should find it { eps <- getEps ; case lookupTypeEnv (eps_PTE eps) name of Just thing -> return (Succeeded thing) Nothing -> return (Failed not_found_msg) }}} where nd_doc = ptext (sLit "Need decl for") <+> ppr name not_found_msg = hang (ptext (sLit "Can't find interface-file declaration for") <+> pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name) 2 (vcat [ptext (sLit "Probable cause: bug in .hi-boot file, or inconsistent .hi file"), ptext (sLit "Use -ddump-if-trace to get an idea of which file caused the error")]) {- ************************************************************************ * * Checks for wired-in things * * ************************************************************************ Note [Loading instances for wired-in things] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We need to make sure that we have at least *read* the interface files for any module with an instance decl or RULE that we might want. * If the instance decl is an orphan, we have a whole separate mechanism (loadOrphanModules) * If the instance decl is not an orphan, then the act of looking at the TyCon or Class will force in the defining module for the TyCon/Class, and hence the instance decl * BUT, if the TyCon is a wired-in TyCon, we don't really need its interface; but we must make sure we read its interface in case it has instances or rules. That is what LoadIface.loadWiredInHomeInterface does. It's called from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing} * HOWEVER, only do this for TyCons. There are no wired-in Classes. There are some wired-in Ids, but we don't want to load their interfaces. For example, Control.Exception.Base.recSelError is wired in, but that module is compiled late in the base library, and we don't want to force it to load before it's been compiled! All of this is done by the type checker. The renamer plays no role. (It used to, but no longer.) -} checkWiredInTyCon :: TyCon -> TcM () -- Ensure that the home module of the TyCon (and hence its instances) -- are loaded. See Note [Loading instances for wired-in things] -- It might not be a wired-in tycon (see the calls in TcUnify), -- in which case this is a no-op. checkWiredInTyCon tc | not (isWiredInName tc_name) = return () | otherwise = do { mod <- getModule ; ASSERT( isExternalName tc_name ) when (mod /= nameModule tc_name) (initIfaceTcRn (loadWiredInHomeIface tc_name)) -- Don't look for (non-existent) Float.hi when -- compiling Float.lhs, which mentions Float of course -- A bit yukky to call initIfaceTcRn here } where tc_name = tyConName tc ifCheckWiredInThing :: TyThing -> IfL () -- Even though we are in an interface file, we want to make -- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double) -- Ditto want to ensure that RULES are loaded too -- See Note [Loading instances for wired-in things] ifCheckWiredInThing thing = do { mod <- getIfModule -- Check whether we are typechecking the interface for this -- very module. E.g when compiling the base library in --make mode -- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in -- the HPT, so without the test we'll demand-load it into the PIT! -- C.f. the same test in checkWiredInTyCon above ; let name = getName thing ; ASSERT2( isExternalName name, ppr name ) when (needWiredInHomeIface thing && mod /= nameModule name) (loadWiredInHomeIface name) } needWiredInHomeIface :: TyThing -> Bool -- Only for TyCons; see Note [Loading instances for wired-in things] needWiredInHomeIface (ATyCon {}) = True needWiredInHomeIface _ = False {- ************************************************************************ * * loadSrcInterface, loadOrphanModules, loadInterfaceForName These three are called from TcM-land * * ************************************************************************ -} -- Note [Un-ambiguous multiple interfaces] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- When a user writes an import statement, this usually causes a *single* -- interface file to be loaded. However, the game is different when -- signatures are being imported. Suppose in packages p and q we have -- signatures: -- -- module A where -- foo :: Int -- -- module A where -- bar :: Int -- -- If both packages are exposed and I am importing A, I should see a -- "unified" signature: -- -- module A where -- foo :: Int -- bar :: Int -- -- The way we achieve this is having the module lookup for A load and return -- multiple interface files, which we will then process as if there were -- "multiple" imports: -- -- import "p" A -- import "q" A -- -- Doing so does not cause any ambiguity, because any overlapping identifiers -- are guaranteed to have the same name if the backing implementations of the -- two signatures are the same (a condition which is checked by 'Packages'.) -- | Load the interface corresponding to an @import@ directive in -- source code. On a failure, fail in the monad with an error message. -- See Note [Un-ambiguous multiple interfaces] for why the return type -- is @[ModIface]@ loadSrcInterface :: SDoc -> ModuleName -> IsBootInterface -- {-# SOURCE #-} ? -> Maybe FastString -- "package", if any -> RnM [ModIface] loadSrcInterface doc mod want_boot maybe_pkg = do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg ; case res of Failed err -> failWithTc err Succeeded ifaces -> return ifaces } -- | Like 'loadSrcInterface', but returns a 'MaybeErr'. See also -- Note [Un-ambiguous multiple interfaces] loadSrcInterface_maybe :: SDoc -> ModuleName -> IsBootInterface -- {-# SOURCE #-} ? -> Maybe FastString -- "package", if any -> RnM (MaybeErr MsgDoc [ModIface]) loadSrcInterface_maybe doc mod want_boot maybe_pkg -- We must first find which Module this import refers to. This involves -- calling the Finder, which as a side effect will search the filesystem -- and create a ModLocation. If successful, loadIface will read the -- interface; it will call the Finder again, but the ModLocation will be -- cached from the first search. = do { hsc_env <- getTopEnv -- ToDo: findImportedModule should return a list of interfaces ; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg ; case res of Found _ mod -> fmap (fmap (:[])) . initIfaceTcRn $ loadInterface doc mod (ImportByUser want_boot) err -> return (Failed (cannotFindInterface (hsc_dflags hsc_env) mod err)) } -- | Load interface directly for a fully qualified 'Module'. (This is a fairly -- rare operation, but in particular it is used to load orphan modules -- in order to pull their instances into the global package table and to -- handle some operations in GHCi). loadModuleInterface :: SDoc -> Module -> TcM ModIface loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod) -- | Load interfaces for a collection of modules. loadModuleInterfaces :: SDoc -> [Module] -> TcM () loadModuleInterfaces doc mods | null mods = return () | otherwise = initIfaceTcRn (mapM_ load mods) where load mod = loadSysInterface (doc <+> parens (ppr mod)) mod -- | Loads the interface for a given Name. -- Should only be called for an imported name; -- otherwise loadSysInterface may not find the interface loadInterfaceForName :: SDoc -> Name -> TcRn ModIface loadInterfaceForName doc name = do { when debugIsOn $ -- Check pre-condition do { this_mod <- getModule ; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) } ; ASSERT2( isExternalName name, ppr name ) initIfaceTcRn $ loadSysInterface doc (nameModule name) } -- | Loads the interface for a given Module. loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface loadInterfaceForModule doc m = do -- Should not be called with this module when debugIsOn $ do this_mod <- getModule MASSERT2( this_mod /= m, ppr m <+> parens doc ) initIfaceTcRn $ loadSysInterface doc m {- ********************************************************* * * loadInterface The main function to load an interface for an imported module, and put it in the External Package State * * ********************************************************* -} -- | An 'IfM' function to load the home interface for a wired-in thing, -- so that we're sure that we see its instance declarations and rules -- See Note [Loading instances for wired-in things] in TcIface loadWiredInHomeIface :: Name -> IfM lcl () loadWiredInHomeIface name = ASSERT( isWiredInName name ) do _ <- loadSysInterface doc (nameModule name); return () where doc = ptext (sLit "Need home interface for wired-in thing") <+> ppr name ------------------ -- | Loads a system interface and throws an exception if it fails loadSysInterface :: SDoc -> Module -> IfM lcl ModIface loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem ------------------ -- | Loads a user interface and throws an exception if it fails. The first parameter indicates -- whether we should import the boot variant of the module loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface loadUserInterface is_boot doc mod_name = loadInterfaceWithException doc mod_name (ImportByUser is_boot) loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface loadPluginInterface doc mod_name = loadInterfaceWithException doc mod_name ImportByPlugin ------------------ -- | A wrapper for 'loadInterface' that throws an exception if it fails loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface loadInterfaceWithException doc mod_name where_from = do { mb_iface <- loadInterface doc mod_name where_from ; dflags <- getDynFlags ; case mb_iface of Failed err -> liftIO $ throwGhcExceptionIO (ProgramError (showSDoc dflags err)) Succeeded iface -> return iface } ------------------ loadInterface :: SDoc -> Module -> WhereFrom -> IfM lcl (MaybeErr MsgDoc ModIface) -- loadInterface looks in both the HPT and PIT for the required interface -- If not found, it loads it, and puts it in the PIT (always). -- If it can't find a suitable interface file, we -- a) modify the PackageIfaceTable to have an empty entry -- (to avoid repeated complaints) -- b) return (Left message) -- -- It's not necessarily an error for there not to be an interface -- file -- perhaps the module has changed, and that interface -- is no longer used loadInterface doc_str mod from = do { -- Read the state (eps,hpt) <- getEpsAndHpt ; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from) -- Check whether we have the interface already ; dflags <- getDynFlags ; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of { Just iface -> return (Succeeded iface) ; -- Already loaded -- The (src_imp == mi_boot iface) test checks that the already-loaded -- interface isn't a boot iface. This can conceivably happen, -- if an earlier import had a before we got to real imports. I think. _ -> do { -- READ THE MODULE IN ; read_result <- case (wantHiBootFile dflags eps mod from) of Failed err -> return (Failed err) Succeeded hi_boot_file -> findAndReadIface doc_str mod hi_boot_file ; case read_result of { Failed err -> do { let fake_iface = emptyModIface mod ; updateEps_ $ \eps -> eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface } -- Not found, so add an empty iface to -- the EPS map so that we don't look again ; return (Failed err) } ; -- Found and parsed! -- We used to have a sanity check here that looked for: -- * System importing .. -- * a home package module .. -- * that we know nothing about (mb_dep == Nothing)! -- -- But this is no longer valid because thNameToGhcName allows users to -- cause the system to load arbitrary interfaces (by supplying an appropriate -- Template Haskell original-name). Succeeded (iface, file_path) -> let loc_doc = text file_path in initIfaceLcl mod loc_doc $ do -- Load the new ModIface into the External Package State -- Even home-package interfaces loaded by loadInterface -- (which only happens in OneShot mode; in Batch/Interactive -- mode, home-package modules are loaded one by one into the HPT) -- are put in the EPS. -- -- The main thing is to add the ModIface to the PIT, but -- we also take the -- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo -- out of the ModIface and put them into the big EPS pools -- NB: *first* we do loadDecl, so that the provenance of all the locally-defined --- names is done correctly (notably, whether this is an .hi file or .hi-boot file). -- If we do loadExport first the wrong info gets into the cache (unless we -- explicitly tag each export which seems a bit of a bore) ; ignore_prags <- goptM Opt_IgnoreInterfacePragmas ; new_eps_decls <- loadDecls ignore_prags (mi_decls iface) ; new_eps_insts <- mapM tcIfaceInst (mi_insts iface) ; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface) ; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface) ; new_eps_anns <- tcIfaceAnnotations (mi_anns iface) ; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface) ; let { final_iface = iface { mi_decls = panic "No mi_decls in PIT", mi_insts = panic "No mi_insts in PIT", mi_fam_insts = panic "No mi_fam_insts in PIT", mi_rules = panic "No mi_rules in PIT", mi_anns = panic "No mi_anns in PIT" } } ; updateEps_ $ \ eps -> if elemModuleEnv mod (eps_PIT eps) then eps else case from of -- See Note [Care with plugin imports] ImportByPlugin -> eps { eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface, eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls} _ -> eps { eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface, eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls, eps_rule_base = extendRuleBaseList (eps_rule_base eps) new_eps_rules, eps_inst_env = extendInstEnvList (eps_inst_env eps) new_eps_insts, eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps) new_eps_fam_insts, eps_vect_info = plusVectInfo (eps_vect_info eps) new_eps_vect_info, eps_ann_env = extendAnnEnvList (eps_ann_env eps) new_eps_anns, eps_mod_fam_inst_env = let fam_inst_env = extendFamInstEnvList emptyFamInstEnv new_eps_fam_insts in extendModuleEnv (eps_mod_fam_inst_env eps) mod fam_inst_env, eps_stats = addEpsInStats (eps_stats eps) (length new_eps_decls) (length new_eps_insts) (length new_eps_rules) } ; return (Succeeded final_iface) }}}} wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom -> MaybeErr MsgDoc IsBootInterface -- Figure out whether we want Foo.hi or Foo.hi-boot wantHiBootFile dflags eps mod from = case from of ImportByUser usr_boot | usr_boot && not this_package -> Failed (badSourceImport mod) | otherwise -> Succeeded usr_boot ImportByPlugin -> Succeeded False ImportBySystem | not this_package -- If the module to be imported is not from this package -> Succeeded False -- don't look it up in eps_is_boot, because that is keyed -- on the ModuleName of *home-package* modules only. -- We never import boot modules from other packages! | otherwise -> case lookupUFM (eps_is_boot eps) (moduleName mod) of Just (_, is_boot) -> Succeeded is_boot Nothing -> Succeeded False -- The boot-ness of the requested interface, -- based on the dependencies in directly-imported modules where this_package = thisPackage dflags == moduleUnitId mod badSourceImport :: Module -> SDoc badSourceImport mod = hang (ptext (sLit "You cannot {-# SOURCE #-} import a module from another package")) 2 (ptext (sLit "but") <+> quotes (ppr mod) <+> ptext (sLit "is from package") <+> quotes (ppr (moduleUnitId mod))) {- Note [Care with plugin imports] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When dynamically loading a plugin (via loadPluginInterface) we populate the same External Package State (EPS), even though plugin modules are to link with the compiler itself, and not with the compiled program. That's fine: mostly the EPS is just a cache for the interace files on disk. But it's NOT ok for the RULES or instance environment. We do not want to fire a RULE from the plugin on the code we are compiling, otherwise the code we are compiling will have a reference to a RHS of the rule that exists only in the compiler! This actually happened to Daniel, via a RULE arising from a specialisation of (^) in the plugin. Solution: when loading plugins, do not extend the rule and instance environments. We are only interested in the type environment, so that we can check that the plugin exports a function with the type that the compiler expects. -} ----------------------------------------------------- -- Loading type/class/value decls -- We pass the full Module name here, replete with -- its package info, so that we can build a Name for -- each binder with the right package info in it -- All subsequent lookups, including crucially lookups during typechecking -- the declaration itself, will find the fully-glorious Name -- -- We handle ATs specially. They are not main declarations, but also not -- implicit things (in particular, adding them to `implicitTyThings' would mess -- things up in the renaming/type checking of source programs). ----------------------------------------------------- addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv addDeclsToPTE pte things = extendNameEnvList pte things loadDecls :: Bool -> [(Fingerprint, IfaceDecl)] -> IfL [(Name,TyThing)] loadDecls ignore_prags ver_decls = do { mod <- getIfModule ; thingss <- mapM (loadDecl ignore_prags mod) ver_decls ; return (concat thingss) } loadDecl :: Bool -- Don't load pragmas into the decl pool -> Module -> (Fingerprint, IfaceDecl) -> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the -- TyThings are forkM'd thunks loadDecl ignore_prags mod (_version, decl) = do { -- Populate the name cache with final versions of all -- the names associated with the decl main_name <- lookupOrig mod (ifName decl) -- Typecheck the thing, lazily -- NB. Firstly, the laziness is there in case we never need the -- declaration (in one-shot mode), and secondly it is there so that -- we don't look up the occurrence of a name before calling mk_new_bndr -- on the binder. This is important because we must get the right name -- which includes its nameParent. ; thing <- forkM doc $ do { bumpDeclStats main_name ; tcIfaceDecl ignore_prags decl } -- Populate the type environment with the implicitTyThings too. -- -- Note [Tricky iface loop] -- ~~~~~~~~~~~~~~~~~~~~~~~~ -- Summary: The delicate point here is that 'mini-env' must be -- buildable from 'thing' without demanding any of the things -- 'forkM'd by tcIfaceDecl. -- -- In more detail: Consider the example -- data T a = MkT { x :: T a } -- The implicitTyThings of T are: [ <datacon MkT>, <selector x>] -- (plus their workers, wrappers, coercions etc etc) -- -- We want to return an environment -- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ] -- (where the "MkT" is the *Name* associated with MkT, etc.) -- -- We do this by mapping the implicit_names to the associated -- TyThings. By the invariant on ifaceDeclImplicitBndrs and -- implicitTyThings, we can use getOccName on the implicit -- TyThings to make this association: each Name's OccName should -- be the OccName of exactly one implicitTyThing. So the key is -- to define a "mini-env" -- -- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ] -- where the 'MkT' here is the *OccName* associated with MkT. -- -- However, there is a subtlety: due to how type checking needs -- to be staged, we can't poke on the forkM'd thunks inside the -- implicitTyThings while building this mini-env. -- If we poke these thunks too early, two problems could happen: -- (1) When processing mutually recursive modules across -- hs-boot boundaries, poking too early will do the -- type-checking before the recursive knot has been tied, -- so things will be type-checked in the wrong -- environment, and necessary variables won't be in -- scope. -- -- (2) Looking up one OccName in the mini_env will cause -- others to be looked up, which might cause that -- original one to be looked up again, and hence loop. -- -- The code below works because of the following invariant: -- getOccName on a TyThing does not force the suspended type -- checks in order to extract the name. For example, we don't -- poke on the "T a" type of <selector x> on the way to -- extracting <selector x>'s OccName. Of course, there is no -- reason in principle why getting the OccName should force the -- thunks, but this means we need to be careful in -- implicitTyThings and its helper functions. -- -- All a bit too finely-balanced for my liking. -- This mini-env and lookup function mediates between the --'Name's n and the map from 'OccName's to the implicit TyThings ; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing] lookup n = case lookupOccEnv mini_env (getOccName n) of Just thing -> thing Nothing -> pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl)) ; implicit_names <- mapM (lookupOrig mod) (ifaceDeclImplicitBndrs decl) -- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names) ; return $ (main_name, thing) : -- uses the invariant that implicit_names and -- implicitTyThings are bijective [(n, lookup n) | n <- implicit_names] } where doc = ptext (sLit "Declaration for") <+> ppr (ifName decl) bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used bumpDeclStats name = do { traceIf (text "Loading decl for" <+> ppr name) ; updateEps_ (\eps -> let stats = eps_stats eps in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } }) } {- ********************************************************* * * \subsection{Reading an interface file} * * ********************************************************* Note [Home module load error] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the sought-for interface is in the current package (as determined by -package-name flag) then it jolly well should already be in the HPT because we process home-package modules in dependency order. (Except in one-shot mode; see notes with hsc_HPT decl in HscTypes). It is possible (though hard) to get this error through user behaviour. * Suppose package P (modules P1, P2) depends on package Q (modules Q1, Q2, with Q2 importing Q1) * We compile both packages. * Now we edit package Q so that it somehow depends on P * Now recompile Q with --make (without recompiling P). * Then Q1 imports, say, P1, which in turn depends on Q2. So Q2 is a home-package module which is not yet in the HPT! Disaster. This actually happened with P=base, Q=ghc-prim, via the AMP warnings. See Trac #8320. -} findAndReadIface :: SDoc -> Module -> IsBootInterface -- True <=> Look for a .hi-boot file -- False <=> Look for .hi file -> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath)) -- Nothing <=> file not found, or unreadable, or illegible -- Just x <=> successfully found and parsed -- It *doesn't* add an error to the monad, because -- sometimes it's ok to fail... see notes with loadInterface findAndReadIface doc_str mod hi_boot_file = do traceIf (sep [hsep [ptext (sLit "Reading"), if hi_boot_file then ptext (sLit "[boot]") else Outputable.empty, ptext (sLit "interface for"), ppr mod <> semi], nest 4 (ptext (sLit "reason:") <+> doc_str)]) -- Check for GHC.Prim, and return its static interface if mod == gHC_PRIM then do iface <- getHooked ghcPrimIfaceHook ghcPrimIface return (Succeeded (iface, "<built in interface for GHC.Prim>")) else do dflags <- getDynFlags -- Look for the file hsc_env <- getTopEnv mb_found <- liftIO (findExactModule hsc_env mod) case mb_found of Found loc mod -> do -- Found file, so read it let file_path = addBootSuffix_maybe hi_boot_file (ml_hi_file loc) -- See Note [Home module load error] if thisPackage dflags == moduleUnitId mod && not (isOneShot (ghcMode dflags)) then return (Failed (homeModError mod loc)) else do r <- read_file file_path checkBuildDynamicToo r return r err -> do traceIf (ptext (sLit "...not found")) dflags <- getDynFlags return (Failed (cannotFindInterface dflags (moduleName mod) err)) where read_file file_path = do traceIf (ptext (sLit "readIFace") <+> text file_path) read_result <- readIface mod file_path case read_result of Failed err -> return (Failed (badIfaceFile file_path err)) Succeeded iface | mi_module iface /= mod -> return (Failed (wrongIfaceModErr iface mod file_path)) | otherwise -> return (Succeeded (iface, file_path)) -- Don't forget to fill in the package name... checkBuildDynamicToo (Succeeded (iface, filePath)) = do dflags <- getDynFlags whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do let ref = canGenerateDynamicToo dflags dynFilePath = addBootSuffix_maybe hi_boot_file $ replaceExtension filePath (dynHiSuf dflags) r <- read_file dynFilePath case r of Succeeded (dynIface, _) | mi_mod_hash iface == mi_mod_hash dynIface -> return () | otherwise -> do traceIf (text "Dynamic hash doesn't match") liftIO $ writeIORef ref False Failed err -> do traceIf (text "Failed to load dynamic interface file:" $$ err) liftIO $ writeIORef ref False checkBuildDynamicToo _ = return () -- @readIface@ tries just the one file. readIface :: Module -> FilePath -> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface) -- Failed err <=> file not found, or unreadable, or illegible -- Succeeded iface <=> successfully found and parsed readIface wanted_mod file_path = do { res <- tryMostM $ readBinIface CheckHiWay QuietBinIFaceReading file_path ; case res of Right iface | wanted_mod == actual_mod -> return (Succeeded iface) | otherwise -> return (Failed err) where actual_mod = mi_module iface err = hiModuleNameMismatchWarn wanted_mod actual_mod Left exn -> return (Failed (text (showException exn))) } {- ********************************************************* * * Wired-in interface for GHC.Prim * * ********************************************************* -} initExternalPackageState :: ExternalPackageState initExternalPackageState = EPS { eps_is_boot = emptyUFM, eps_PIT = emptyPackageIfaceTable, eps_PTE = emptyTypeEnv, eps_inst_env = emptyInstEnv, eps_fam_inst_env = emptyFamInstEnv, eps_rule_base = mkRuleBase builtinRules, -- Initialise the EPS rule pool with the built-in rules eps_mod_fam_inst_env = emptyModuleEnv, eps_vect_info = noVectInfo, eps_ann_env = emptyAnnEnv, eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0 , n_insts_in = 0, n_insts_out = 0 , n_rules_in = length builtinRules, n_rules_out = 0 } } {- ********************************************************* * * Wired-in interface for GHC.Prim * * ********************************************************* -} ghcPrimIface :: ModIface ghcPrimIface = (emptyModIface gHC_PRIM) { mi_exports = ghcPrimExports, mi_decls = [], mi_fixities = fixities, mi_fix_fn = mkIfaceFixCache fixities } where fixities = (getOccName seqId, Fixity 0 InfixR) -- seq is infixr 0 : mapMaybe mkFixity allThePrimOps mkFixity op = (,) (primOpOcc op) <$> primOpFixity op {- ********************************************************* * * \subsection{Statistics} * * ********************************************************* -} ifaceStats :: ExternalPackageState -> SDoc ifaceStats eps = hcat [text "Renamer stats: ", msg] where stats = eps_stats eps msg = vcat [int (n_ifaces_in stats) <+> text "interfaces read", hsep [ int (n_decls_out stats), text "type/class/variable imported, out of", int (n_decls_in stats), text "read"], hsep [ int (n_insts_out stats), text "instance decls imported, out of", int (n_insts_in stats), text "read"], hsep [ int (n_rules_out stats), text "rule decls imported, out of", int (n_rules_in stats), text "read"] ] {- ************************************************************************ * * Printing interfaces * * ************************************************************************ -} -- | Read binary interface, and print it out showIface :: HscEnv -> FilePath -> IO () showIface hsc_env filename = do -- skip the hi way check; we don't want to worry about profiled vs. -- non-profiled interfaces, for example. iface <- initTcRnIf 's' hsc_env () () $ readBinIface IgnoreHiWay TraceBinIFaceReading filename let dflags = hsc_dflags hsc_env log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (pprModIface iface) pprModIface :: ModIface -> SDoc -- Show a ModIface pprModIface iface = vcat [ ptext (sLit "interface") <+> ppr (mi_module iface) <+> pp_boot <+> (if mi_orphan iface then ptext (sLit "[orphan module]") else Outputable.empty) <+> (if mi_finsts iface then ptext (sLit "[family instance module]") else Outputable.empty) <+> (if mi_hpc iface then ptext (sLit "[hpc]") else Outputable.empty) <+> integer hiVersion , nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface)) , nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface)) , nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface)) , nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface)) , nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface)) , nest 2 (text "sig of:" <+> ppr (mi_sig_of iface)) , nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface)) , nest 2 (ptext (sLit "where")) , ptext (sLit "exports:") , nest 2 (vcat (map pprExport (mi_exports iface))) , pprDeps (mi_deps iface) , vcat (map pprUsage (mi_usages iface)) , vcat (map pprIfaceAnnotation (mi_anns iface)) , pprFixities (mi_fixities iface) , vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface] , vcat (map ppr (mi_insts iface)) , vcat (map ppr (mi_fam_insts iface)) , vcat (map ppr (mi_rules iface)) , pprVectInfo (mi_vect_info iface) , ppr (mi_warns iface) , pprTrustInfo (mi_trust iface) , pprTrustPkg (mi_trust_pkg iface) ] where pp_boot | mi_boot iface = ptext (sLit "[boot]") | otherwise = Outputable.empty {- When printing export lists, we print like this: Avail f f AvailTC C [C, x, y] C(x,y) AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C -} pprExport :: IfaceExport -> SDoc pprExport (Avail n) = ppr n pprExport (AvailTC _ []) = Outputable.empty pprExport (AvailTC n (n':ns)) | n==n' = ppr n <> pp_export ns | otherwise = ppr n <> char '|' <> pp_export (n':ns) where pp_export [] = Outputable.empty pp_export names = braces (hsep (map ppr names)) pprUsage :: Usage -> SDoc pprUsage usage@UsagePackageModule{} = pprUsageImport usage usg_mod pprUsage usage@UsageHomeModule{} = pprUsageImport usage usg_mod_name $$ nest 2 ( maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$ vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ] ) pprUsage usage@UsageFile{} = hsep [ptext (sLit "addDependentFile"), doubleQuotes (text (usg_file_path usage))] pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc pprUsageImport usage usg_mod' = hsep [ptext (sLit "import"), safe, ppr (usg_mod' usage), ppr (usg_mod_hash usage)] where safe | usg_safe usage = ptext $ sLit "safe" | otherwise = ptext $ sLit " -/ " pprDeps :: Dependencies -> SDoc pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs, dep_finsts = finsts }) = vcat [ptext (sLit "module dependencies:") <+> fsep (map ppr_mod mods), ptext (sLit "package dependencies:") <+> fsep (map ppr_pkg pkgs), ptext (sLit "orphans:") <+> fsep (map ppr orphs), ptext (sLit "family instance modules:") <+> fsep (map ppr finsts) ] where ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot ppr_pkg (pkg,trust_req) = ppr pkg <> (if trust_req then text "*" else Outputable.empty) ppr_boot True = text "[boot]" ppr_boot False = Outputable.empty pprFixities :: [(OccName, Fixity)] -> SDoc pprFixities [] = Outputable.empty pprFixities fixes = ptext (sLit "fixities") <+> pprWithCommas pprFix fixes where pprFix (occ,fix) = ppr fix <+> ppr occ pprVectInfo :: IfaceVectInfo -> SDoc pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars , ifaceVectInfoTyCon = tycons , ifaceVectInfoTyConReuse = tyconsReuse , ifaceVectInfoParallelVars = parallelVars , ifaceVectInfoParallelTyCons = parallelTyCons }) = vcat [ ptext (sLit "vectorised variables:") <+> hsep (map ppr vars) , ptext (sLit "vectorised tycons:") <+> hsep (map ppr tycons) , ptext (sLit "vectorised reused tycons:") <+> hsep (map ppr tyconsReuse) , ptext (sLit "parallel variables:") <+> hsep (map ppr parallelVars) , ptext (sLit "parallel tycons:") <+> hsep (map ppr parallelTyCons) ] pprTrustInfo :: IfaceTrustInfo -> SDoc pprTrustInfo trust = ptext (sLit "trusted:") <+> ppr trust pprTrustPkg :: Bool -> SDoc pprTrustPkg tpkg = ptext (sLit "require own pkg trusted:") <+> ppr tpkg instance Outputable Warnings where ppr = pprWarns pprWarns :: Warnings -> SDoc pprWarns NoWarnings = Outputable.empty pprWarns (WarnAll txt) = ptext (sLit "Warn all") <+> ppr txt pprWarns (WarnSome prs) = ptext (sLit "Warnings") <+> vcat (map pprWarning prs) where pprWarning (name, txt) = ppr name <+> ppr txt pprIfaceAnnotation :: IfaceAnnotation -> SDoc pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized }) = ppr target <+> ptext (sLit "annotated by") <+> ppr serialized {- ********************************************************* * * \subsection{Errors} * * ********************************************************* -} badIfaceFile :: String -> SDoc -> SDoc badIfaceFile file err = vcat [ptext (sLit "Bad interface file:") <+> text file, nest 4 err] hiModuleNameMismatchWarn :: Module -> Module -> MsgDoc hiModuleNameMismatchWarn requested_mod read_mod = -- ToDo: This will fail to have enough qualification when the package IDs -- are the same withPprStyle (mkUserStyle alwaysQualify AllTheWay) $ -- we want the Modules below to be qualified with package names, -- so reset the PrintUnqualified setting. hsep [ ptext (sLit "Something is amiss; requested module ") , ppr requested_mod , ptext (sLit "differs from name found in the interface file") , ppr read_mod ] wrongIfaceModErr :: ModIface -> Module -> String -> SDoc wrongIfaceModErr iface mod_name file_path = sep [ptext (sLit "Interface file") <+> iface_file, ptext (sLit "contains module") <+> quotes (ppr (mi_module iface)) <> comma, ptext (sLit "but we were expecting module") <+> quotes (ppr mod_name), sep [ptext (sLit "Probable cause: the source code which generated"), nest 2 iface_file, ptext (sLit "has an incompatible module name") ] ] where iface_file = doubleQuotes (text file_path) homeModError :: Module -> ModLocation -> SDoc -- See Note [Home module load error] homeModError mod location = ptext (sLit "attempting to use module ") <> quotes (ppr mod) <> (case ml_hs_file location of Just file -> space <> parens (text file) Nothing -> Outputable.empty) <+> ptext (sLit "which is not loaded")
pparkkin/eta
compiler/ETA/Iface/LoadIface.hs
Haskell
bsd-3-clause
47,743
{-# LANGUAGE OverloadedStrings #-} import Text.XML.Expat.Tree import Control.Monad import Data.Text (Text) import qualified Data.Text as T import Data.ByteString (ByteString) import qualified Data.ByteString.Lazy as L import Data.Maybe main :: IO () main = do bs <- L.readFile "ROADS.xml" let Element _ _ chs = parseThrowing defaultParseOptions bs :: UNode Text forM_ chs $ \ch -> do case ch of elt@(Element "shape" _ _) -> do putStrLn $ T.unpack $ fromMaybe "" $ getAttribute elt "FULL_NAME" _ -> return ()
sol/hexpat
test/test2.hs
Haskell
bsd-3-clause
570
{-# LANGUAGE NoRebindableSyntax #-} -- | This file contains the template haskell code for deriving SubHask class instances from Base instances. -- All of the standard instances are created in "SubHask.Compatibility.Base". -- This module is exported so that you can easily make instances for your own types without any extra work. -- To do this, just put the line -- -- > deriveAll -- -- at the bottom of your file. -- Any types in scope that do not already have SubHask instances will have them created automatically. -- -- FIXME: -- Most classes aren't implemented yet. -- I don't want to go through the work until their definitions stabilize somewhat. module SubHask.TemplateHaskell.Base where import qualified Prelude as Base import qualified Control.Applicative as Base import qualified Control.Monad as Base import Language.Haskell.TH import System.IO import SubHask.Category import SubHask.Algebra import SubHask.Monad import SubHask.Internal.Prelude import Debug.Trace -------------------------------------------------------------------------------- -- We need these instances to get anything done type instance Logic Name = Bool instance Eq_ Name where (==) = (Base.==) type instance Logic Dec = Bool instance Eq_ Dec where (==) = (Base.==) type instance Logic Type = Bool instance Eq_ Type where (==) = (Base.==) -------------------------------------------------------------------------------- -- generic helper functions -- | Derives instances for all data types in scope. -- This is the only function you should need to use. -- The other functions are exported only for debugging purposes if this function should fail. deriveAll :: Q [Dec] deriveAll = Base.liftM concat $ Base.mapM go [ (''Base.Eq, mkPreludeEq) , (''Base.Functor, mkPreludeFunctor) , (''Base.Applicative,mkPreludeApplicative) , (''Base.Monad,mkPreludeMonad) ] where go (n,f) = forAllInScope n f -- | Constructs an instance using the given function for everything in scope. forAllInScope :: Name -> (Cxt -> Q Type -> Q [Dec]) -> Q [Dec] forAllInScope preludename f = do info <- reify preludename case info of ClassI _ xs -> Base.liftM concat $ Base.sequence $ map mgo $ Base.filter fgo xs where mgo (InstanceD ctx (AppT _ t) _) = f ctx (Base.return t) fgo (InstanceD _ (AppT _ t) _ ) = not elem '>' $ show t -- | This is an internal helper function. -- It prevents us from defining two instances for the same class/type pair. runIfNotInstance :: Name -> Type -> Q [Dec] -> Q [Dec] runIfNotInstance n t q = do inst <- alreadyInstance n t if inst then trace ("skipping instance: "++show n++" / "++show t) $ Base.return [] else trace ("deriving instance: "++show n++" / "++show t) $ q where alreadyInstance :: Name -> Type -> Q Bool alreadyInstance n t = do info <- reify n Base.return $ case info of ClassI _ xs -> or $ map (genericTypeEq t.rmInstanceD) xs -- FIXME: -- This function was introduced to fix a name capture problem where `Eq a` and `Eq b` are not recognized as the same type. -- The current solution is not correct, but works for some cases. genericTypeEq (AppT s1 t1) (AppT s2 t2) = genericTypeEq s1 s2 && genericTypeEq t1 t2 genericTypeEq (ConT n1) (ConT n2) = n1==n2 genericTypeEq (VarT _) (VarT _) = true genericTypeEq (SigT _ _) (SigT _ _) = true genericTypeEq (TupleT n1) (TupleT n2) = n1==n2 genericTypeEq ArrowT ArrowT = true genericTypeEq ListT ListT = true genericTypeEq _ _ = false rmInstanceD (InstanceD _ (AppT _ t) _) = t -------------------------------------------------------------------------------- -- comparison hierarchy -- | Create an "Eq" instance from a "Prelude.Eq" instance. mkPreludeEq :: Cxt -> Q Type -> Q [Dec] mkPreludeEq ctx qt = do t <- qt runIfNotInstance ''Eq_ t $ Base.return [ TySynInstD ( mkName "Logic" ) ( TySynEqn [ t ] ( ConT $ mkName "Bool" ) ) , InstanceD ctx ( AppT ( ConT $ mkName "Eq_" ) t ) [ FunD ( mkName "==" ) [ Clause [] (NormalB $ VarE $ mkName "Base.==") [] ] ] ] -------------------------------------------------------------------------------- -- monad hierarchy -- | Create a "Functor" instance from a "Prelude.Functor" instance. mkPreludeFunctor :: Cxt -> Q Type -> Q [Dec] mkPreludeFunctor ctx qt = do t <- qt runIfNotInstance ''Functor t $ Base.return [ InstanceD ctx ( AppT ( AppT ( ConT $ mkName "Functor" ) ( ConT $ mkName "Hask" ) ) t ) [ FunD ( mkName "fmap" ) [ Clause [] (NormalB $ VarE $ mkName "Base.fmap") [] ] ] ] -- | Create an "Applicative" instance from a "Prelude.Applicative" instance. mkPreludeApplicative :: Cxt -> Q Type -> Q [Dec] mkPreludeApplicative cxt qt = do t <- qt runIfNotInstance ''Applicative t $ Base.return [ InstanceD cxt ( AppT ( AppT ( ConT $ mkName "Applicative" ) ( ConT $ mkName "Hask" ) ) t ) [ FunD ( mkName "pure" ) [ Clause [] (NormalB $ VarE $ mkName "Base.pure") [] ] , FunD ( mkName "<*>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.<*>") [] ] ] ] -- | Create a "Monad" instance from a "Prelude.Monad" instance. -- -- FIXME: -- Monad transformers still require their parameter monad to be an instance of "Prelude.Monad". mkPreludeMonad :: Cxt -> Q Type -> Q [Dec] mkPreludeMonad cxt qt = do t <- qt -- can't call -- > runIfNotInstance ''Monad t $ -- due to lack of TH support for type families trace ("deriving instance: Monad / "++show t) $ if cannotDeriveMonad t then Base.return [] else Base.return [ InstanceD cxt ( AppT ( ConT $ mkName "Then" ) t ) [ FunD ( mkName ">>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>>") [] ] ] , InstanceD -- ( ClassP ''Functor [ ConT ''Hask , t ] : cxt ) ( AppT (AppT (ConT ''Functor) (ConT ''Hask)) t : cxt ) ( AppT ( AppT ( ConT $ mkName "Monad" ) ( ConT $ mkName "Hask" ) ) t ) [ FunD ( mkName "return_" ) [ Clause [] (NormalB $ VarE $ mkName "Base.return") [] ] , FunD ( mkName "join" ) [ Clause [] (NormalB $ VarE $ mkName "Base.join" ) [] ] , FunD ( mkName ">>=" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>>=" ) [] ] , FunD ( mkName ">=>" ) [ Clause [] (NormalB $ VarE $ mkName "Base.>=>" ) [] ] , FunD ( mkName "=<<" ) [ Clause [] (NormalB $ VarE $ mkName "Base.=<<" ) [] ] , FunD ( mkName "<=<" ) [ Clause [] (NormalB $ VarE $ mkName "Base.<=<" ) [] ] ] ] where -- | This helper function "filters out" monads for which we can't automatically derive an implementation. -- This failure can be due to missing Functor instances or weird type errors. cannotDeriveMonad t = elem (show $ getName t) badmonad where getName :: Type -> Name getName t = case t of (ConT t) -> t ListT -> mkName "[]" (SigT t _) -> getName t (AppT (ConT t) _) -> t (AppT (AppT (ConT t) _) _) -> t (AppT (AppT (AppT (ConT t) _) _) _) -> t (AppT (AppT (AppT (AppT (ConT t) _) _) _) _) -> t (AppT (AppT (AppT (AppT (AppT (ConT t) _) _) _) _) _) -> t (AppT (AppT (AppT (AppT (AppT (AppT (ConT t) _) _) _) _) _) _) -> t t -> error ("cannotDeriveMonad error="++show t) badmonad = [ "Text.ParserCombinators.ReadBase.P" , "Control.Monad.ST.Lazy.Imp.ST" , "Data.Proxy.Proxy" ]
abailly/subhask
src/SubHask/TemplateHaskell/Base.hs
Haskell
bsd-3-clause
8,576
{-# LANGUAGE BangPatterns, CPP, RecordWildCards, GADTs #-} module CmmLayoutStack ( cmmLayoutStack, setInfoTableStackMap ) where import GhcPrelude hiding ((<*>)) import StgCmmUtils ( callerSaveVolatileRegs ) -- XXX layering violation import StgCmmForeign ( saveThreadState, loadThreadState ) -- XXX layering violation import BasicTypes import Cmm import CmmInfo import BlockId import CLabel import CmmUtils import MkGraph import ForeignCall import CmmLive import CmmProcPoint import SMRep import Hoopl.Block import Hoopl.Collections import Hoopl.Dataflow import Hoopl.Graph import Hoopl.Label import UniqSupply import StgCmmUtils ( newTemp ) import Maybes import UniqFM import Util import DynFlags import FastString import Outputable hiding ( isEmpty ) import qualified Data.Set as Set import Control.Monad.Fix import Data.Array as Array import Data.Bits import Data.List (nub) #include "HsVersions.h" {- Note [Stack Layout] The job of this pass is to - replace references to abstract stack Areas with fixed offsets from Sp. - replace the CmmHighStackMark constant used in the stack check with the maximum stack usage of the proc. - save any variables that are live across a call, and reload them as necessary. Before stack allocation, local variables remain live across native calls (CmmCall{ cmm_cont = Just _ }), and after stack allocation local variables are clobbered by native calls. We want to do stack allocation so that as far as possible - stack use is minimized, and - unnecessary stack saves and loads are avoided. The algorithm we use is a variant of linear-scan register allocation, where the stack is our register file. We proceed in two passes, see Note [Two pass approach] for why they are not easy to merge into one. Pass 1: - First, we do a liveness analysis, which annotates every block with the variables live on entry to the block. - We traverse blocks in reverse postorder DFS; that is, we visit at least one predecessor of a block before the block itself. The stack layout flowing from the predecessor of the block will determine the stack layout on entry to the block. - We maintain a data structure Map Label StackMap which describes the contents of the stack and the stack pointer on entry to each block that is a successor of a block that we have visited. - For each block we visit: - Look up the StackMap for this block. - If this block is a proc point (or a call continuation, if we aren't splitting proc points), we need to reload all the live variables from the stack - but this is done in Pass 2, which calculates more precise liveness information (see description of Pass 2). - Walk forwards through the instructions: - At an assignment x = Sp[loc] - Record the fact that Sp[loc] contains x, so that we won't need to save x if it ever needs to be spilled. - At an assignment x = E - If x was previously on the stack, it isn't any more - At the last node, if it is a call or a jump to a proc point - Lay out the stack frame for the call (see setupStackFrame) - emit instructions to save all the live variables - Remember the StackMaps for all the successors - emit an instruction to adjust Sp - If the last node is a branch, then the current StackMap is the StackMap for the successors. - Manifest Sp: replace references to stack areas in this block with real Sp offsets. We cannot do this until we have laid out the stack area for the successors above. In this phase we also eliminate redundant stores to the stack; see elimStackStores. - There is one important gotcha: sometimes we'll encounter a control transfer to a block that we've already processed (a join point), and in that case we might need to rearrange the stack to match what the block is expecting. (exactly the same as in linear-scan register allocation, except here we have the luxury of an infinite supply of temporary variables). - Finally, we update the magic CmmHighStackMark constant with the stack usage of the function, and eliminate the whole stack check if there was no stack use. (in fact this is done as part of the main traversal, by feeding the high-water-mark output back in as an input. I hate cyclic programming, but it's just too convenient sometimes.) There are plenty of tricky details: update frames, proc points, return addresses, foreign calls, and some ad-hoc optimisations that are convenient to do here and effective in common cases. Comments in the code below explain these. Pass 2: - Calculate live registers, but taking into account that nothing is live at the entry to a proc point. - At each proc point and call continuation insert reloads of live registers from the stack (they were saved by Pass 1). Note [Two pass approach] The main reason for Pass 2 is being able to insert only the reloads that are needed and the fact that the two passes need different liveness information. Let's consider an example: ..... \ / D <- proc point / \ E F \ / G <- proc point | X Pass 1 needs liveness assuming that local variables are preserved across calls. This is important because it needs to save any local registers to the stack (e.g., if register a is used in block X, it must be saved before any native call). However, for Pass 2, where we want to reload registers from stack (in a proc point), this is overly conservative and would lead us to generate reloads in D for things used in X, even though we're going to generate reloads in G anyway (since it's also a proc point). So Pass 2 calculates liveness knowing that nothing is live at the entry to a proc point. This means that in D we only need to reload things used in E or F. This can be quite important, for an extreme example see testcase for #3294. Merging the two passes is not trivial - Pass 2 is a backward rewrite and Pass 1 is a forward one. Furthermore, Pass 1 is creating code that uses local registers (saving them before a call), which the liveness analysis for Pass 2 must see to be correct. -} -- All stack locations are expressed as positive byte offsets from the -- "base", which is defined to be the address above the return address -- on the stack on entry to this CmmProc. -- -- Lower addresses have higher StackLocs. -- type StackLoc = ByteOff {- A StackMap describes the stack at any given point. At a continuation it has a particular layout, like this: | | <- base |-------------| | ret0 | <- base + 8 |-------------| . upd frame . <- base + sm_ret_off |-------------| | | . vars . . (live/dead) . | | <- base + sm_sp - sm_args |-------------| | ret1 | . ret vals . <- base + sm_sp (<--- Sp points here) |-------------| Why do we include the final return address (ret0) in our stack map? I have absolutely no idea, but it seems to be done that way consistently in the rest of the code generator, so I played along here. --SDM Note that we will be constructing an info table for the continuation (ret1), which needs to describe the stack down to, but not including, the update frame (or ret0, if there is no update frame). -} data StackMap = StackMap { sm_sp :: StackLoc -- ^ the offset of Sp relative to the base on entry -- to this block. , sm_args :: ByteOff -- ^ the number of bytes of arguments in the area for this block -- Defn: the offset of young(L) relative to the base is given by -- (sm_sp - sm_args) of the StackMap for block L. , sm_ret_off :: ByteOff -- ^ Number of words of stack that we do not describe with an info -- table, because it contains an update frame. , sm_regs :: UniqFM (LocalReg,StackLoc) -- ^ regs on the stack } instance Outputable StackMap where ppr StackMap{..} = text "Sp = " <> int sm_sp $$ text "sm_args = " <> int sm_args $$ text "sm_ret_off = " <> int sm_ret_off $$ text "sm_regs = " <> pprUFM sm_regs ppr cmmLayoutStack :: DynFlags -> ProcPointSet -> ByteOff -> CmmGraph -> UniqSM (CmmGraph, LabelMap StackMap) cmmLayoutStack dflags procpoints entry_args graph@(CmmGraph { g_entry = entry }) = do -- We need liveness info. Dead assignments are removed later -- by the sinking pass. let liveness = cmmLocalLiveness dflags graph blocks = postorderDfs graph (final_stackmaps, _final_high_sp, new_blocks) <- mfix $ \ ~(rec_stackmaps, rec_high_sp, _new_blocks) -> layout dflags procpoints liveness entry entry_args rec_stackmaps rec_high_sp blocks blocks_with_reloads <- insertReloadsAsNeeded dflags procpoints final_stackmaps entry new_blocks new_blocks' <- mapM (lowerSafeForeignCall dflags) blocks_with_reloads return (ofBlockList entry new_blocks', final_stackmaps) -- ----------------------------------------------------------------------------- -- Pass 1 -- ----------------------------------------------------------------------------- layout :: DynFlags -> LabelSet -- proc points -> LabelMap CmmLocalLive -- liveness -> BlockId -- entry -> ByteOff -- stack args on entry -> LabelMap StackMap -- [final] stack maps -> ByteOff -- [final] Sp high water mark -> [CmmBlock] -- [in] blocks -> UniqSM ( LabelMap StackMap -- [out] stack maps , ByteOff -- [out] Sp high water mark , [CmmBlock] -- [out] new blocks ) layout dflags procpoints liveness entry entry_args final_stackmaps final_sp_high blocks = go blocks init_stackmap entry_args [] where (updfr, cont_info) = collectContInfo blocks init_stackmap = mapSingleton entry StackMap{ sm_sp = entry_args , sm_args = entry_args , sm_ret_off = updfr , sm_regs = emptyUFM } go [] acc_stackmaps acc_hwm acc_blocks = return (acc_stackmaps, acc_hwm, acc_blocks) go (b0 : bs) acc_stackmaps acc_hwm acc_blocks = do let (entry0@(CmmEntry entry_lbl tscope), middle0, last0) = blockSplit b0 let stack0@StackMap { sm_sp = sp0 } = mapFindWithDefault (pprPanic "no stack map for" (ppr entry_lbl)) entry_lbl acc_stackmaps -- (a) Update the stack map to include the effects of -- assignments in this block let stack1 = foldBlockNodesF (procMiddle acc_stackmaps) middle0 stack0 -- (b) Look at the last node and if we are making a call or -- jumping to a proc point, we must save the live -- variables, adjust Sp, and construct the StackMaps for -- each of the successor blocks. See handleLastNode for -- details. (middle1, sp_off, last1, fixup_blocks, out) <- handleLastNode dflags procpoints liveness cont_info acc_stackmaps stack1 tscope middle0 last0 -- (c) Manifest Sp: run over the nodes in the block and replace -- CmmStackSlot with CmmLoad from Sp with a concrete offset. -- -- our block: -- middle0 -- the original middle nodes -- middle1 -- live variable saves from handleLastNode -- Sp = Sp + sp_off -- Sp adjustment goes here -- last1 -- the last node -- let middle_pre = blockToList $ foldl blockSnoc middle0 middle1 let final_blocks = manifestSp dflags final_stackmaps stack0 sp0 final_sp_high entry0 middle_pre sp_off last1 fixup_blocks let acc_stackmaps' = mapUnion acc_stackmaps out -- If this block jumps to the GC, then we do not take its -- stack usage into account for the high-water mark. -- Otherwise, if the only stack usage is in the stack-check -- failure block itself, we will do a redundant stack -- check. The stack has a buffer designed to accommodate -- the largest amount of stack needed for calling the GC. -- this_sp_hwm | isGcJump last0 = 0 | otherwise = sp0 - sp_off hwm' = maximum (acc_hwm : this_sp_hwm : map sm_sp (mapElems out)) go bs acc_stackmaps' hwm' (final_blocks ++ acc_blocks) -- ----------------------------------------------------------------------------- -- Not foolproof, but GCFun is the culprit we most want to catch isGcJump :: CmmNode O C -> Bool isGcJump (CmmCall { cml_target = CmmReg (CmmGlobal l) }) = l == GCFun || l == GCEnter1 isGcJump _something_else = False -- ----------------------------------------------------------------------------- -- This doesn't seem right somehow. We need to find out whether this -- proc will push some update frame material at some point, so that we -- can avoid using that area of the stack for spilling. The -- updfr_space field of the CmmProc *should* tell us, but it doesn't -- (I think maybe it gets filled in later when we do proc-point -- splitting). -- -- So we'll just take the max of all the cml_ret_offs. This could be -- unnecessarily pessimistic, but probably not in the code we -- generate. collectContInfo :: [CmmBlock] -> (ByteOff, LabelMap ByteOff) collectContInfo blocks = (maximum ret_offs, mapFromList (catMaybes mb_argss)) where (mb_argss, ret_offs) = mapAndUnzip get_cont blocks get_cont :: Block CmmNode x C -> (Maybe (Label, ByteOff), ByteOff) get_cont b = case lastNode b of CmmCall { cml_cont = Just l, .. } -> (Just (l, cml_ret_args), cml_ret_off) CmmForeignCall { .. } -> (Just (succ, ret_args), ret_off) _other -> (Nothing, 0) -- ----------------------------------------------------------------------------- -- Updating the StackMap from middle nodes -- Look for loads from stack slots, and update the StackMap. This is -- purely for optimisation reasons, so that we can avoid saving a -- variable back to a different stack slot if it is already on the -- stack. -- -- This happens a lot: for example when function arguments are passed -- on the stack and need to be immediately saved across a call, we -- want to just leave them where they are on the stack. -- procMiddle :: LabelMap StackMap -> CmmNode e x -> StackMap -> StackMap procMiddle stackmaps node sm = case node of CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot area off) _) -> sm { sm_regs = addToUFM (sm_regs sm) r (r,loc) } where loc = getStackLoc area off stackmaps CmmAssign (CmmLocal r) _other -> sm { sm_regs = delFromUFM (sm_regs sm) r } _other -> sm getStackLoc :: Area -> ByteOff -> LabelMap StackMap -> StackLoc getStackLoc Old n _ = n getStackLoc (Young l) n stackmaps = case mapLookup l stackmaps of Nothing -> pprPanic "getStackLoc" (ppr l) Just sm -> sm_sp sm - sm_args sm + n -- ----------------------------------------------------------------------------- -- Handling stack allocation for a last node -- We take a single last node and turn it into: -- -- C1 (some statements) -- Sp = Sp + N -- C2 (some more statements) -- call f() -- the actual last node -- -- plus possibly some more blocks (we may have to add some fixup code -- between the last node and the continuation). -- -- C1: is the code for saving the variables across this last node onto -- the stack, if the continuation is a call or jumps to a proc point. -- -- C2: if the last node is a safe foreign call, we have to inject some -- extra code that goes *after* the Sp adjustment. handleLastNode :: DynFlags -> ProcPointSet -> LabelMap CmmLocalLive -> LabelMap ByteOff -> LabelMap StackMap -> StackMap -> CmmTickScope -> Block CmmNode O O -> CmmNode O C -> UniqSM ( [CmmNode O O] -- nodes to go *before* the Sp adjustment , ByteOff -- amount to adjust Sp , CmmNode O C -- new last node , [CmmBlock] -- new blocks , LabelMap StackMap -- stackmaps for the continuations ) handleLastNode dflags procpoints liveness cont_info stackmaps stack0@StackMap { sm_sp = sp0 } tscp middle last = case last of -- At each return / tail call, -- adjust Sp to point to the last argument pushed, which -- is cml_args, after popping any other junk from the stack. CmmCall{ cml_cont = Nothing, .. } -> do let sp_off = sp0 - cml_args return ([], sp_off, last, [], mapEmpty) -- At each CmmCall with a continuation: CmmCall{ cml_cont = Just cont_lbl, .. } -> return $ lastCall cont_lbl cml_args cml_ret_args cml_ret_off CmmForeignCall{ succ = cont_lbl, .. } -> do return $ lastCall cont_lbl (wORD_SIZE dflags) ret_args ret_off -- one word of args: the return address CmmBranch {} -> handleBranches CmmCondBranch {} -> handleBranches CmmSwitch {} -> handleBranches where -- Calls and ForeignCalls are handled the same way: lastCall :: BlockId -> ByteOff -> ByteOff -> ByteOff -> ( [CmmNode O O] , ByteOff , CmmNode O C , [CmmBlock] , LabelMap StackMap ) lastCall lbl cml_args cml_ret_args cml_ret_off = ( assignments , spOffsetForCall sp0 cont_stack cml_args , last , [] -- no new blocks , mapSingleton lbl cont_stack ) where (assignments, cont_stack) = prepareStack lbl cml_ret_args cml_ret_off prepareStack lbl cml_ret_args cml_ret_off | Just cont_stack <- mapLookup lbl stackmaps -- If we have already seen this continuation before, then -- we just have to make the stack look the same: = (fixupStack stack0 cont_stack, cont_stack) -- Otherwise, we have to allocate the stack frame | otherwise = (save_assignments, new_cont_stack) where (new_cont_stack, save_assignments) = setupStackFrame dflags lbl liveness cml_ret_off cml_ret_args stack0 -- For other last nodes (branches), if any of the targets is a -- proc point, we have to set up the stack to match what the proc -- point is expecting. -- handleBranches :: UniqSM ( [CmmNode O O] , ByteOff , CmmNode O C , [CmmBlock] , LabelMap StackMap ) handleBranches -- Note [diamond proc point] | Just l <- futureContinuation middle , (nub $ filter (`setMember` procpoints) $ successors last) == [l] = do let cont_args = mapFindWithDefault 0 l cont_info (assigs, cont_stack) = prepareStack l cont_args (sm_ret_off stack0) out = mapFromList [ (l', cont_stack) | l' <- successors last ] return ( assigs , spOffsetForCall sp0 cont_stack (wORD_SIZE dflags) , last , [] , out) | otherwise = do pps <- mapM handleBranch (successors last) let lbl_map :: LabelMap Label lbl_map = mapFromList [ (l,tmp) | (l,tmp,_,_) <- pps ] fix_lbl l = mapFindWithDefault l l lbl_map return ( [] , 0 , mapSuccessors fix_lbl last , concat [ blk | (_,_,_,blk) <- pps ] , mapFromList [ (l, sm) | (l,_,sm,_) <- pps ] ) -- For each successor of this block handleBranch :: BlockId -> UniqSM (BlockId, BlockId, StackMap, [CmmBlock]) handleBranch l -- (a) if the successor already has a stackmap, we need to -- shuffle the current stack to make it look the same. -- We have to insert a new block to make this happen. | Just stack2 <- mapLookup l stackmaps = do let assigs = fixupStack stack0 stack2 (tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs return (l, tmp_lbl, stack2, block) -- (b) if the successor is a proc point, save everything -- on the stack. | l `setMember` procpoints = do let cont_args = mapFindWithDefault 0 l cont_info (stack2, assigs) = setupStackFrame dflags l liveness (sm_ret_off stack0) cont_args stack0 (tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs return (l, tmp_lbl, stack2, block) -- (c) otherwise, the current StackMap is the StackMap for -- the continuation. But we must remember to remove any -- variables from the StackMap that are *not* live at -- the destination, because this StackMap might be used -- by fixupStack if this is a join point. | otherwise = return (l, l, stack1, []) where live = mapFindWithDefault (panic "handleBranch") l liveness stack1 = stack0 { sm_regs = filterUFM is_live (sm_regs stack0) } is_live (r,_) = r `elemRegSet` live makeFixupBlock :: DynFlags -> ByteOff -> Label -> StackMap -> CmmTickScope -> [CmmNode O O] -> UniqSM (Label, [CmmBlock]) makeFixupBlock dflags sp0 l stack tscope assigs | null assigs && sp0 == sm_sp stack = return (l, []) | otherwise = do tmp_lbl <- newBlockId let sp_off = sp0 - sm_sp stack maybeAddUnwind block | debugLevel dflags > 0 = block `blockSnoc` CmmUnwind [(Sp, Just unwind_val)] | otherwise = block where unwind_val = cmmOffset dflags (CmmReg spReg) (sm_sp stack) block = blockJoin (CmmEntry tmp_lbl tscope) ( maybeAddSpAdj dflags sp_off $ maybeAddUnwind $ blockFromList assigs ) (CmmBranch l) return (tmp_lbl, [block]) -- Sp is currently pointing to current_sp, -- we want it to point to -- (sm_sp cont_stack - sm_args cont_stack + args) -- so the difference is -- sp0 - (sm_sp cont_stack - sm_args cont_stack + args) spOffsetForCall :: ByteOff -> StackMap -> ByteOff -> ByteOff spOffsetForCall current_sp cont_stack args = current_sp - (sm_sp cont_stack - sm_args cont_stack + args) -- | create a sequence of assignments to establish the new StackMap, -- given the old StackMap. fixupStack :: StackMap -> StackMap -> [CmmNode O O] fixupStack old_stack new_stack = concatMap move new_locs where old_map = sm_regs old_stack new_locs = stackSlotRegs new_stack move (r,n) | Just (_,m) <- lookupUFM old_map r, n == m = [] | otherwise = [CmmStore (CmmStackSlot Old n) (CmmReg (CmmLocal r))] setupStackFrame :: DynFlags -> BlockId -- label of continuation -> LabelMap CmmLocalLive -- liveness -> ByteOff -- updfr -> ByteOff -- bytes of return values on stack -> StackMap -- current StackMap -> (StackMap, [CmmNode O O]) setupStackFrame dflags lbl liveness updfr_off ret_args stack0 = (cont_stack, assignments) where -- get the set of LocalRegs live in the continuation live = mapFindWithDefault Set.empty lbl liveness -- the stack from the base to updfr_off is off-limits. -- our new stack frame contains: -- * saved live variables -- * the return address [young(C) + 8] -- * the args for the call, -- which are replaced by the return values at the return -- point. -- everything up to updfr_off is off-limits -- stack1 contains updfr_off, plus everything we need to save (stack1, assignments) = allocate dflags updfr_off live stack0 -- And the Sp at the continuation is: -- sm_sp stack1 + ret_args cont_stack = stack1{ sm_sp = sm_sp stack1 + ret_args , sm_args = ret_args , sm_ret_off = updfr_off } -- ----------------------------------------------------------------------------- -- Note [diamond proc point] -- -- This special case looks for the pattern we get from a typical -- tagged case expression: -- -- Sp[young(L1)] = L1 -- if (R1 & 7) != 0 goto L1 else goto L2 -- L2: -- call [R1] returns to L1 -- L1: live: {y} -- x = R1 -- -- If we let the generic case handle this, we get -- -- Sp[-16] = L1 -- if (R1 & 7) != 0 goto L1a else goto L2 -- L2: -- Sp[-8] = y -- Sp = Sp - 16 -- call [R1] returns to L1 -- L1a: -- Sp[-8] = y -- Sp = Sp - 16 -- goto L1 -- L1: -- x = R1 -- -- The code for saving the live vars is duplicated in each branch, and -- furthermore there is an extra jump in the fast path (assuming L1 is -- a proc point, which it probably is if there is a heap check). -- -- So to fix this we want to set up the stack frame before the -- conditional jump. How do we know when to do this, and when it is -- safe? The basic idea is, when we see the assignment -- -- Sp[young(L)] = L -- -- we know that -- * we are definitely heading for L -- * there can be no more reads from another stack area, because young(L) -- overlaps with it. -- -- We don't necessarily know that everything live at L is live now -- (some might be assigned between here and the jump to L). So we -- simplify and only do the optimisation when we see -- -- (1) a block containing an assignment of a return address L -- (2) ending in a branch where one (and only) continuation goes to L, -- and no other continuations go to proc points. -- -- then we allocate the stack frame for L at the end of the block, -- before the branch. -- -- We could generalise (2), but that would make it a bit more -- complicated to handle, and this currently catches the common case. futureContinuation :: Block CmmNode O O -> Maybe BlockId futureContinuation middle = foldBlockNodesB f middle Nothing where f :: CmmNode a b -> Maybe BlockId -> Maybe BlockId f (CmmStore (CmmStackSlot (Young l) _) (CmmLit (CmmBlock _))) _ = Just l f _ r = r -- ----------------------------------------------------------------------------- -- Saving live registers -- | Given a set of live registers and a StackMap, save all the registers -- on the stack and return the new StackMap and the assignments to do -- the saving. -- allocate :: DynFlags -> ByteOff -> LocalRegSet -> StackMap -> (StackMap, [CmmNode O O]) allocate dflags ret_off live stackmap@StackMap{ sm_sp = sp0 , sm_regs = regs0 } = -- we only have to save regs that are not already in a slot let to_save = filter (not . (`elemUFM` regs0)) (Set.elems live) regs1 = filterUFM (\(r,_) -> elemRegSet r live) regs0 in -- make a map of the stack let stack = reverse $ Array.elems $ accumArray (\_ x -> x) Empty (1, toWords dflags (max sp0 ret_off)) $ ret_words ++ live_words where ret_words = [ (x, Occupied) | x <- [ 1 .. toWords dflags ret_off] ] live_words = [ (toWords dflags x, Occupied) | (r,off) <- nonDetEltsUFM regs1, -- See Note [Unique Determinism and code generation] let w = localRegBytes dflags r, x <- [ off, off - wORD_SIZE dflags .. off - w + 1] ] in -- Pass over the stack: find slots to save all the new live variables, -- choosing the oldest slots first (hence a foldr). let save slot ([], stack, n, assigs, regs) -- no more regs to save = ([], slot:stack, plusW dflags n 1, assigs, regs) save slot (to_save, stack, n, assigs, regs) = case slot of Occupied -> (to_save, Occupied:stack, plusW dflags n 1, assigs, regs) Empty | Just (stack', r, to_save') <- select_save to_save (slot:stack) -> let assig = CmmStore (CmmStackSlot Old n') (CmmReg (CmmLocal r)) n' = plusW dflags n 1 in (to_save', stack', n', assig : assigs, (r,(r,n')):regs) | otherwise -> (to_save, slot:stack, plusW dflags n 1, assigs, regs) -- we should do better here: right now we'll fit the smallest first, -- but it would make more sense to fit the biggest first. select_save :: [LocalReg] -> [StackSlot] -> Maybe ([StackSlot], LocalReg, [LocalReg]) select_save regs stack = go regs [] where go [] _no_fit = Nothing go (r:rs) no_fit | Just rest <- dropEmpty words stack = Just (replicate words Occupied ++ rest, r, rs++no_fit) | otherwise = go rs (r:no_fit) where words = localRegWords dflags r -- fill in empty slots as much as possible (still_to_save, save_stack, n, save_assigs, save_regs) = foldr save (to_save, [], 0, [], []) stack -- push any remaining live vars on the stack (push_sp, push_assigs, push_regs) = foldr push (n, [], []) still_to_save where push r (n, assigs, regs) = (n', assig : assigs, (r,(r,n')) : regs) where n' = n + localRegBytes dflags r assig = CmmStore (CmmStackSlot Old n') (CmmReg (CmmLocal r)) trim_sp | not (null push_regs) = push_sp | otherwise = plusW dflags n (- length (takeWhile isEmpty save_stack)) final_regs = regs1 `addListToUFM` push_regs `addListToUFM` save_regs in -- XXX should be an assert if ( n /= max sp0 ret_off ) then pprPanic "allocate" (ppr n <+> ppr sp0 <+> ppr ret_off) else if (trim_sp .&. (wORD_SIZE dflags - 1)) /= 0 then pprPanic "allocate2" (ppr trim_sp <+> ppr final_regs <+> ppr push_sp) else ( stackmap { sm_regs = final_regs , sm_sp = trim_sp } , push_assigs ++ save_assigs ) -- ----------------------------------------------------------------------------- -- Manifesting Sp -- | Manifest Sp: turn all the CmmStackSlots into CmmLoads from Sp. The -- block looks like this: -- -- middle_pre -- the middle nodes -- Sp = Sp + sp_off -- Sp adjustment goes here -- last -- the last node -- -- And we have some extra blocks too (that don't contain Sp adjustments) -- -- The adjustment for middle_pre will be different from that for -- middle_post, because the Sp adjustment intervenes. -- manifestSp :: DynFlags -> LabelMap StackMap -- StackMaps for other blocks -> StackMap -- StackMap for this block -> ByteOff -- Sp on entry to the block -> ByteOff -- SpHigh -> CmmNode C O -- first node -> [CmmNode O O] -- middle -> ByteOff -- sp_off -> CmmNode O C -- last node -> [CmmBlock] -- new blocks -> [CmmBlock] -- final blocks with Sp manifest manifestSp dflags stackmaps stack0 sp0 sp_high first middle_pre sp_off last fixup_blocks = final_block : fixup_blocks' where area_off = getAreaOff stackmaps adj_pre_sp, adj_post_sp :: CmmNode e x -> CmmNode e x adj_pre_sp = mapExpDeep (areaToSp dflags sp0 sp_high area_off) adj_post_sp = mapExpDeep (areaToSp dflags (sp0 - sp_off) sp_high area_off) -- Add unwind pseudo-instruction at the beginning of each block to -- document Sp level for debugging add_initial_unwind block | debugLevel dflags > 0 = CmmUnwind [(Sp, Just sp_unwind)] `blockCons` block | otherwise = block where sp_unwind = CmmRegOff spReg (sp0 - wORD_SIZE dflags) -- Add unwind pseudo-instruction right before the Sp adjustment -- if there is one. add_adj_unwind block | debugLevel dflags > 0 , sp_off /= 0 = block `blockSnoc` CmmUnwind [(Sp, Just sp_unwind)] | otherwise = block where sp_unwind = CmmRegOff spReg (sp0 - wORD_SIZE dflags - sp_off) final_middle = maybeAddSpAdj dflags sp_off . add_adj_unwind . add_initial_unwind . blockFromList . map adj_pre_sp . elimStackStores stack0 stackmaps area_off $ middle_pre final_last = optStackCheck (adj_post_sp last) final_block = blockJoin first final_middle final_last fixup_blocks' = map (mapBlock3' (id, adj_post_sp, id)) fixup_blocks getAreaOff :: LabelMap StackMap -> (Area -> StackLoc) getAreaOff _ Old = 0 getAreaOff stackmaps (Young l) = case mapLookup l stackmaps of Just sm -> sm_sp sm - sm_args sm Nothing -> pprPanic "getAreaOff" (ppr l) maybeAddSpAdj :: DynFlags -> ByteOff -> Block CmmNode O O -> Block CmmNode O O maybeAddSpAdj _ 0 block = block maybeAddSpAdj dflags sp_off block = block `blockSnoc` adj where adj = CmmAssign spReg (cmmOffset dflags (CmmReg spReg) sp_off) {- Note [SP old/young offsets] Sp(L) is the Sp offset on entry to block L relative to the base of the OLD area. SpArgs(L) is the size of the young area for L, i.e. the number of arguments. - in block L, each reference to [old + N] turns into [Sp + Sp(L) - N] - in block L, each reference to [young(L') + N] turns into [Sp + Sp(L) - Sp(L') + SpArgs(L') - N] - be careful with the last node of each block: Sp has already been adjusted to be Sp + Sp(L) - Sp(L') -} areaToSp :: DynFlags -> ByteOff -> ByteOff -> (Area -> StackLoc) -> CmmExpr -> CmmExpr areaToSp dflags sp_old _sp_hwm area_off (CmmStackSlot area n) = cmmOffset dflags (CmmReg spReg) (sp_old - area_off area - n) -- Replace (CmmStackSlot area n) with an offset from Sp areaToSp dflags _ sp_hwm _ (CmmLit CmmHighStackMark) = mkIntExpr dflags sp_hwm -- Replace CmmHighStackMark with the number of bytes of stack used, -- the sp_hwm. See Note [Stack usage] in StgCmmHeap areaToSp dflags _ _ _ (CmmMachOp (MO_U_Lt _) args) | falseStackCheck args = zeroExpr dflags areaToSp dflags _ _ _ (CmmMachOp (MO_U_Ge _) args) | falseStackCheck args = mkIntExpr dflags 1 -- Replace a stack-overflow test that cannot fail with a no-op -- See Note [Always false stack check] areaToSp _ _ _ _ other = other -- | Determine whether a stack check cannot fail. falseStackCheck :: [CmmExpr] -> Bool falseStackCheck [ CmmMachOp (MO_Sub _) [ CmmRegOff (CmmGlobal Sp) x_off , CmmLit (CmmInt y_lit _)] , CmmReg (CmmGlobal SpLim)] = fromIntegral x_off >= y_lit falseStackCheck _ = False -- Note [Always false stack check] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- We can optimise stack checks of the form -- -- if ((Sp + x) - y < SpLim) then .. else .. -- -- where are non-negative integer byte offsets. Since we know that -- SpLim <= Sp (remember the stack grows downwards), this test must -- yield False if (x >= y), so we can rewrite the comparison to False. -- A subsequent sinking pass will later drop the dead code. -- Optimising this away depends on knowing that SpLim <= Sp, so it is -- really the job of the stack layout algorithm, hence we do it now. -- -- The control flow optimiser may negate a conditional to increase -- the likelihood of a fallthrough if the branch is not taken. But -- not every conditional is inverted as the control flow optimiser -- places some requirements on the predecessors of both branch targets. -- So we better look for the inverted comparison too. optStackCheck :: CmmNode O C -> CmmNode O C optStackCheck n = -- Note [Always false stack check] case n of CmmCondBranch (CmmLit (CmmInt 0 _)) _true false _ -> CmmBranch false CmmCondBranch (CmmLit (CmmInt _ _)) true _false _ -> CmmBranch true other -> other -- ----------------------------------------------------------------------------- -- | Eliminate stores of the form -- -- Sp[area+n] = r -- -- when we know that r is already in the same slot as Sp[area+n]. We -- could do this in a later optimisation pass, but that would involve -- a separate analysis and we already have the information to hand -- here. It helps clean up some extra stack stores in common cases. -- -- Note that we may have to modify the StackMap as we walk through the -- code using procMiddle, since an assignment to a variable in the -- StackMap will invalidate its mapping there. -- elimStackStores :: StackMap -> LabelMap StackMap -> (Area -> ByteOff) -> [CmmNode O O] -> [CmmNode O O] elimStackStores stackmap stackmaps area_off nodes = go stackmap nodes where go _stackmap [] = [] go stackmap (n:ns) = case n of CmmStore (CmmStackSlot area m) (CmmReg (CmmLocal r)) | Just (_,off) <- lookupUFM (sm_regs stackmap) r , area_off area + m == off -> go stackmap ns _otherwise -> n : go (procMiddle stackmaps n stackmap) ns -- ----------------------------------------------------------------------------- -- Update info tables to include stack liveness setInfoTableStackMap :: DynFlags -> LabelMap StackMap -> CmmDecl -> CmmDecl setInfoTableStackMap dflags stackmaps (CmmProc top_info@TopInfo{..} l v g) = CmmProc top_info{ info_tbls = mapMapWithKey fix_info info_tbls } l v g where fix_info lbl info_tbl@CmmInfoTable{ cit_rep = StackRep _ } = info_tbl { cit_rep = StackRep (get_liveness lbl) } fix_info _ other = other get_liveness :: BlockId -> Liveness get_liveness lbl = case mapLookup lbl stackmaps of Nothing -> pprPanic "setInfoTableStackMap" (ppr lbl <+> ppr info_tbls) Just sm -> stackMapToLiveness dflags sm setInfoTableStackMap _ _ d = d stackMapToLiveness :: DynFlags -> StackMap -> Liveness stackMapToLiveness dflags StackMap{..} = reverse $ Array.elems $ accumArray (\_ x -> x) True (toWords dflags sm_ret_off + 1, toWords dflags (sm_sp - sm_args)) live_words where live_words = [ (toWords dflags off, False) | (r,off) <- nonDetEltsUFM sm_regs , isGcPtrType (localRegType r) ] -- See Note [Unique Determinism and code generation] -- ----------------------------------------------------------------------------- -- Pass 2 -- ----------------------------------------------------------------------------- insertReloadsAsNeeded :: DynFlags -> ProcPointSet -> LabelMap StackMap -> BlockId -> [CmmBlock] -> UniqSM [CmmBlock] insertReloadsAsNeeded dflags procpoints final_stackmaps entry blocks = do toBlockList . fst <$> rewriteCmmBwd liveLattice rewriteCC (ofBlockList entry blocks) mapEmpty where rewriteCC :: RewriteFun CmmLocalLive rewriteCC (BlockCC e_node middle0 x_node) fact_base0 = do let entry_label = entryLabel e_node stackmap = case mapLookup entry_label final_stackmaps of Just sm -> sm Nothing -> panic "insertReloadsAsNeeded: rewriteCC: stackmap" -- Merge the liveness from successor blocks and analyse the last -- node. joined = gen_kill dflags x_node $! joinOutFacts liveLattice x_node fact_base0 -- What is live at the start of middle0. live_at_middle0 = foldNodesBwdOO (gen_kill dflags) middle0 joined -- If this is a procpoint we need to add the reloads, but only if -- they're actually live. Furthermore, nothing is live at the entry -- to a proc point. (middle1, live_with_reloads) | entry_label `setMember` procpoints = let reloads = insertReloads dflags stackmap live_at_middle0 in (foldr blockCons middle0 reloads, emptyRegSet) | otherwise = (middle0, live_at_middle0) -- Final liveness for this block. !fact_base2 = mapSingleton entry_label live_with_reloads return (BlockCC e_node middle1 x_node, fact_base2) insertReloads :: DynFlags -> StackMap -> CmmLocalLive -> [CmmNode O O] insertReloads dflags stackmap live = [ CmmAssign (CmmLocal reg) -- This cmmOffset basically corresponds to manifesting -- @CmmStackSlot Old sp_off@, see Note [SP old/young offsets] (CmmLoad (cmmOffset dflags (CmmReg spReg) (sp_off - reg_off)) (localRegType reg)) | (reg, reg_off) <- stackSlotRegs stackmap , reg `elemRegSet` live ] where sp_off = sm_sp stackmap -- ----------------------------------------------------------------------------- -- Lowering safe foreign calls {- Note [Lower safe foreign calls] We start with Sp[young(L1)] = L1 ,----------------------- | r1 = foo(x,y,z) returns to L1 '----------------------- L1: R1 = r1 -- copyIn, inserted by mkSafeCall ... the stack layout algorithm will arrange to save and reload everything live across the call. Our job now is to expand the call so we get Sp[young(L1)] = L1 ,----------------------- | SAVE_THREAD_STATE() | token = suspendThread(BaseReg, interruptible) | r = foo(x,y,z) | BaseReg = resumeThread(token) | LOAD_THREAD_STATE() | R1 = r -- copyOut | jump Sp[0] '----------------------- L1: r = R1 -- copyIn, inserted by mkSafeCall ... Note the copyOut, which saves the results in the places that L1 is expecting them (see Note [safe foreign call convention]). Note also that safe foreign call is replace by an unsafe one in the Cmm graph. -} lowerSafeForeignCall :: DynFlags -> CmmBlock -> UniqSM CmmBlock lowerSafeForeignCall dflags block | (entry@(CmmEntry _ tscp), middle, CmmForeignCall { .. }) <- blockSplit block = do -- Both 'id' and 'new_base' are KindNonPtr because they're -- RTS-only objects and are not subject to garbage collection id <- newTemp (bWord dflags) new_base <- newTemp (cmmRegType dflags (CmmGlobal BaseReg)) let (caller_save, caller_load) = callerSaveVolatileRegs dflags save_state_code <- saveThreadState dflags load_state_code <- loadThreadState dflags let suspend = save_state_code <*> caller_save <*> mkMiddle (callSuspendThread dflags id intrbl) midCall = mkUnsafeCall tgt res args resume = mkMiddle (callResumeThread new_base id) <*> -- Assign the result to BaseReg: we -- might now have a different Capability! mkAssign (CmmGlobal BaseReg) (CmmReg (CmmLocal new_base)) <*> caller_load <*> load_state_code (_, regs, copyout) = copyOutOflow dflags NativeReturn Jump (Young succ) (map (CmmReg . CmmLocal) res) ret_off [] -- NB. after resumeThread returns, the top-of-stack probably contains -- the stack frame for succ, but it might not: if the current thread -- received an exception during the call, then the stack might be -- different. Hence we continue by jumping to the top stack frame, -- not by jumping to succ. jump = CmmCall { cml_target = entryCode dflags $ CmmLoad (CmmReg spReg) (bWord dflags) , cml_cont = Just succ , cml_args_regs = regs , cml_args = widthInBytes (wordWidth dflags) , cml_ret_args = ret_args , cml_ret_off = ret_off } graph' <- lgraphOfAGraph ( suspend <*> midCall <*> resume <*> copyout <*> mkLast jump, tscp) case toBlockList graph' of [one] -> let (_, middle', last) = blockSplit one in return (blockJoin entry (middle `blockAppend` middle') last) _ -> panic "lowerSafeForeignCall0" -- Block doesn't end in a safe foreign call: | otherwise = return block foreignLbl :: FastString -> CmmExpr foreignLbl name = CmmLit (CmmLabel (mkForeignLabel name Nothing ForeignLabelInExternalPackage IsFunction)) callSuspendThread :: DynFlags -> LocalReg -> Bool -> CmmNode O O callSuspendThread dflags id intrbl = CmmUnsafeForeignCall (ForeignTarget (foreignLbl (fsLit "suspendThread")) (ForeignConvention CCallConv [AddrHint, NoHint] [AddrHint] CmmMayReturn)) [id] [CmmReg (CmmGlobal BaseReg), mkIntExpr dflags (fromEnum intrbl)] callResumeThread :: LocalReg -> LocalReg -> CmmNode O O callResumeThread new_base id = CmmUnsafeForeignCall (ForeignTarget (foreignLbl (fsLit "resumeThread")) (ForeignConvention CCallConv [AddrHint] [AddrHint] CmmMayReturn)) [new_base] [CmmReg (CmmLocal id)] -- ----------------------------------------------------------------------------- plusW :: DynFlags -> ByteOff -> WordOff -> ByteOff plusW dflags b w = b + w * wORD_SIZE dflags data StackSlot = Occupied | Empty -- Occupied: a return address or part of an update frame instance Outputable StackSlot where ppr Occupied = text "XXX" ppr Empty = text "---" dropEmpty :: WordOff -> [StackSlot] -> Maybe [StackSlot] dropEmpty 0 ss = Just ss dropEmpty n (Empty : ss) = dropEmpty (n-1) ss dropEmpty _ _ = Nothing isEmpty :: StackSlot -> Bool isEmpty Empty = True isEmpty _ = False localRegBytes :: DynFlags -> LocalReg -> ByteOff localRegBytes dflags r = roundUpToWords dflags (widthInBytes (typeWidth (localRegType r))) localRegWords :: DynFlags -> LocalReg -> WordOff localRegWords dflags = toWords dflags . localRegBytes dflags toWords :: DynFlags -> ByteOff -> WordOff toWords dflags x = x `quot` wORD_SIZE dflags stackSlotRegs :: StackMap -> [(LocalReg, StackLoc)] stackSlotRegs sm = nonDetEltsUFM (sm_regs sm) -- See Note [Unique Determinism and code generation]
ezyang/ghc
compiler/cmm/CmmLayoutStack.hs
Haskell
bsd-3-clause
47,277
-- | -- Module : Network.TLS.X509 -- License : BSD-style -- Maintainer : Vincent Hanquez <vincent@snarc.org> -- Stability : experimental -- Portability : unknown -- -- X509 helpers -- module Network.TLS.X509 ( CertificateChain(..) , Certificate(..) , SignedCertificate , getCertificate , isNullCertificateChain , getCertificateChainLeaf , CertificateRejectReason(..) , CertificateUsage(..) , CertificateStore , ValidationCache , exceptionValidationCache , validateDefault , FailedReason , ServiceID , wrapCertificateChecks ) where import Data.X509 import Data.X509.Validation import Data.X509.CertificateStore isNullCertificateChain :: CertificateChain -> Bool isNullCertificateChain (CertificateChain l) = null l getCertificateChainLeaf :: CertificateChain -> SignedExact Certificate getCertificateChainLeaf (CertificateChain []) = error "empty certificate chain" getCertificateChainLeaf (CertificateChain (x:_)) = x -- | Certificate and Chain rejection reason data CertificateRejectReason = CertificateRejectExpired | CertificateRejectRevoked | CertificateRejectUnknownCA | CertificateRejectOther String deriving (Show,Eq) -- | Certificate Usage callback possible returns values. data CertificateUsage = CertificateUsageAccept -- ^ usage of certificate accepted | CertificateUsageReject CertificateRejectReason -- ^ usage of certificate rejected deriving (Show,Eq) wrapCertificateChecks :: [FailedReason] -> CertificateUsage wrapCertificateChecks [] = CertificateUsageAccept wrapCertificateChecks l | Expired `elem` l = CertificateUsageReject $ CertificateRejectExpired | InFuture `elem` l = CertificateUsageReject $ CertificateRejectExpired | UnknownCA `elem` l = CertificateUsageReject $ CertificateRejectUnknownCA | otherwise = CertificateUsageReject $ CertificateRejectOther (show l)
beni55/hs-tls
core/Network/TLS/X509.hs
Haskell
bsd-3-clause
1,995
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE NoImplicitPrelude #-} ----------------------------------------------------------------------------- -- | -- Module : Data.Ord -- Copyright : (c) The University of Glasgow 2005 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : stable -- Portability : portable -- -- Orderings -- ----------------------------------------------------------------------------- module Data.Ord ( Ord(..), Ordering(..), Down(..), comparing, ) where import GHC.Base import GHC.Show import GHC.Read -- | -- > comparing p x y = compare (p x) (p y) -- -- Useful combinator for use in conjunction with the @xxxBy@ family -- of functions from "Data.List", for example: -- -- > ... sortBy (comparing fst) ... comparing :: (Ord a) => (b -> a) -> b -> b -> Ordering comparing p x y = compare (p x) (p y) -- | The 'Down' type allows you to reverse sort order conveniently. A value of type -- @'Down' a@ contains a value of type @a@ (represented as @'Down' a@). -- If @a@ has an @'Ord'@ instance associated with it then comparing two -- values thus wrapped will give you the opposite of their normal sort order. -- This is particularly useful when sorting in generalised list comprehensions, -- as in: @then sortWith by 'Down' x@ -- -- Provides 'Show' and 'Read' instances (/since: 4.7.0.0/). -- -- /Since: 4.6.0.0/ newtype Down a = Down a deriving (Eq, Show, Read) instance Ord a => Ord (Down a) where compare (Down x) (Down y) = y `compare` x
frantisekfarka/ghc-dsi
libraries/base/Data/Ord.hs
Haskell
bsd-3-clause
1,568
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="sl-SI"> <title>Passive Scan Rules | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
ccgreen13/zap-extensions
src/org/zaproxy/zap/extension/pscanrules/resources/help_sl_SI/helpset_sl_SI.hs
Haskell
apache-2.0
980
{-# LANGUAGE RankNTypes, TypeInType #-} module T11640 where import Data.Kind data HEq :: forall k1. k1 -> forall k2. k2 -> Type where
ezyang/ghc
testsuite/tests/polykinds/T11640.hs
Haskell
bsd-3-clause
137
{- (c) The AQUA Project, Glasgow University, 1993-1998 \section[SimplMonad]{The simplifier Monad} -} {-# LANGUAGE CPP #-} module SimplEnv ( InId, InBind, InExpr, InAlt, InArg, InType, InBndr, InVar, OutId, OutTyVar, OutBind, OutExpr, OutAlt, OutArg, OutType, OutBndr, OutVar, InCoercion, OutCoercion, -- The simplifier mode setMode, getMode, updMode, -- Environments SimplEnv(..), StaticEnv, pprSimplEnv, -- Temp not abstract mkSimplEnv, extendIdSubst, SimplEnv.extendTvSubst, SimplEnv.extendCvSubst, zapSubstEnv, setSubstEnv, getInScope, setInScope, setInScopeSet, modifyInScope, addNewInScopeIds, getSimplRules, SimplSR(..), mkContEx, substId, lookupRecBndr, refineFromInScope, simplNonRecBndr, simplRecBndrs, simplBinder, simplBinders, substTy, substTyVar, getTvSubst, getCvSubst, substCo, substCoVar, -- Floats Floats, emptyFloats, isEmptyFloats, addNonRec, addFloats, extendFloats, wrapFloats, setFloats, zapFloats, addRecFloats, mapFloats, doFloatFromRhs, getFloatBinds ) where #include "HsVersions.h" import SimplMonad import CoreMonad ( SimplifierMode(..) ) import CoreSyn import CoreUtils import Var import VarEnv import VarSet import OrdList import Id import MkCore ( mkWildValBinder ) import TysWiredIn import qualified Type import Type hiding ( substTy, substTyVarBndr, substTyVar ) import qualified Coercion import Coercion hiding ( substCo, substTy, substCoVar, substCoVarBndr, substTyVarBndr ) import BasicTypes import MonadUtils import Outputable import FastString import Util import Data.List {- ************************************************************************ * * \subsection[Simplify-types]{Type declarations} * * ************************************************************************ -} type InBndr = CoreBndr type InVar = Var -- Not yet cloned type InId = Id -- Not yet cloned type InType = Type -- Ditto type InBind = CoreBind type InExpr = CoreExpr type InAlt = CoreAlt type InArg = CoreArg type InCoercion = Coercion type OutBndr = CoreBndr type OutVar = Var -- Cloned type OutId = Id -- Cloned type OutTyVar = TyVar -- Cloned type OutType = Type -- Cloned type OutCoercion = Coercion type OutBind = CoreBind type OutExpr = CoreExpr type OutAlt = CoreAlt type OutArg = CoreArg {- ************************************************************************ * * \subsubsection{The @SimplEnv@ type} * * ************************************************************************ -} data SimplEnv = SimplEnv { ----------- Static part of the environment ----------- -- Static in the sense of lexically scoped, -- wrt the original expression seMode :: SimplifierMode, -- The current substitution seTvSubst :: TvSubstEnv, -- InTyVar |--> OutType seCvSubst :: CvSubstEnv, -- InCoVar |--> OutCoercion seIdSubst :: SimplIdSubst, -- InId |--> OutExpr ----------- Dynamic part of the environment ----------- -- Dynamic in the sense of describing the setup where -- the expression finally ends up -- The current set of in-scope variables -- They are all OutVars, and all bound in this module seInScope :: InScopeSet, -- OutVars only -- Includes all variables bound by seFloats seFloats :: Floats -- See Note [Simplifier floats] } type StaticEnv = SimplEnv -- Just the static part is relevant pprSimplEnv :: SimplEnv -> SDoc -- Used for debugging; selective pprSimplEnv env = vcat [ptext (sLit "TvSubst:") <+> ppr (seTvSubst env), ptext (sLit "IdSubst:") <+> ppr (seIdSubst env), ptext (sLit "InScope:") <+> vcat (map ppr_one in_scope_vars) ] where in_scope_vars = varEnvElts (getInScopeVars (seInScope env)) ppr_one v | isId v = ppr v <+> ppr (idUnfolding v) | otherwise = ppr v type SimplIdSubst = IdEnv SimplSR -- IdId |--> OutExpr -- See Note [Extending the Subst] in CoreSubst data SimplSR = DoneEx OutExpr -- Completed term | DoneId OutId -- Completed term variable | ContEx TvSubstEnv -- A suspended substitution CvSubstEnv SimplIdSubst InExpr instance Outputable SimplSR where ppr (DoneEx e) = ptext (sLit "DoneEx") <+> ppr e ppr (DoneId v) = ptext (sLit "DoneId") <+> ppr v ppr (ContEx _tv _cv _id e) = vcat [ptext (sLit "ContEx") <+> ppr e {-, ppr (filter_env tv), ppr (filter_env id) -}] -- where -- fvs = exprFreeVars e -- filter_env env = filterVarEnv_Directly keep env -- keep uniq _ = uniq `elemUFM_Directly` fvs {- Note [SimplEnv invariants] ~~~~~~~~~~~~~~~~~~~~~~~~~~ seInScope: The in-scope part of Subst includes *all* in-scope TyVars and Ids The elements of the set may have better IdInfo than the occurrences of in-scope Ids, and (more important) they will have a correctly-substituted type. So we use a lookup in this set to replace occurrences The Ids in the InScopeSet are replete with their Rules, and as we gather info about the unfolding of an Id, we replace it in the in-scope set. The in-scope set is actually a mapping OutVar -> OutVar, and in case expressions we sometimes bind seIdSubst: The substitution is *apply-once* only, because InIds and OutIds can overlap. For example, we generally omit mappings a77 -> a77 from the substitution, when we decide not to clone a77, but it's quite legitimate to put the mapping in the substitution anyway. Furthermore, consider let x = case k of I# x77 -> ... in let y = case k of I# x77 -> ... in ... and suppose the body is strict in both x and y. Then the simplifier will pull the first (case k) to the top; so the second (case k) will cancel out, mapping x77 to, well, x77! But one is an in-Id and the other is an out-Id. Of course, the substitution *must* applied! Things in its domain simply aren't necessarily bound in the result. * substId adds a binding (DoneId new_id) to the substitution if the Id's unique has changed Note, though that the substitution isn't necessarily extended if the type of the Id changes. Why not? Because of the next point: * We *always, always* finish by looking up in the in-scope set any variable that doesn't get a DoneEx or DoneVar hit in the substitution. Reason: so that we never finish up with a "old" Id in the result. An old Id might point to an old unfolding and so on... which gives a space leak. [The DoneEx and DoneVar hits map to "new" stuff.] * It follows that substExpr must not do a no-op if the substitution is empty. substType is free to do so, however. * When we come to a let-binding (say) we generate new IdInfo, including an unfolding, attach it to the binder, and add this newly adorned binder to the in-scope set. So all subsequent occurrences of the binder will get mapped to the full-adorned binder, which is also the one put in the binding site. * The in-scope "set" usually maps x->x; we use it simply for its domain. But sometimes we have two in-scope Ids that are synomyms, and should map to the same target: x->x, y->x. Notably: case y of x { ... } That's why the "set" is actually a VarEnv Var -} mkSimplEnv :: SimplifierMode -> SimplEnv mkSimplEnv mode = SimplEnv { seMode = mode , seInScope = init_in_scope , seFloats = emptyFloats , seTvSubst = emptyVarEnv , seCvSubst = emptyVarEnv , seIdSubst = emptyVarEnv } -- The top level "enclosing CC" is "SUBSUMED". init_in_scope :: InScopeSet init_in_scope = mkInScopeSet (unitVarSet (mkWildValBinder unitTy)) -- See Note [WildCard binders] {- Note [WildCard binders] ~~~~~~~~~~~~~~~~~~~~~~~ The program to be simplified may have wild binders case e of wild { p -> ... } We want to *rename* them away, so that there are no occurrences of 'wild-id' (with wildCardKey). The easy way to do that is to start of with a representative Id in the in-scope set There can be be *occurrences* of wild-id. For example, MkCore.mkCoreApp transforms e (a /# b) --> case (a /# b) of wild { DEFAULT -> e wild } This is ok provided 'wild' isn't free in 'e', and that's the delicate thing. Generally, you want to run the simplifier to get rid of the wild-ids before doing much else. It's a very dark corner of GHC. Maybe it should be cleaned up. -} getMode :: SimplEnv -> SimplifierMode getMode env = seMode env setMode :: SimplifierMode -> SimplEnv -> SimplEnv setMode mode env = env { seMode = mode } updMode :: (SimplifierMode -> SimplifierMode) -> SimplEnv -> SimplEnv updMode upd env = env { seMode = upd (seMode env) } --------------------- extendIdSubst :: SimplEnv -> Id -> SimplSR -> SimplEnv extendIdSubst env@(SimplEnv {seIdSubst = subst}) var res = ASSERT2( isId var && not (isCoVar var), ppr var ) env {seIdSubst = extendVarEnv subst var res} extendTvSubst :: SimplEnv -> TyVar -> Type -> SimplEnv extendTvSubst env@(SimplEnv {seTvSubst = subst}) var res = env {seTvSubst = extendVarEnv subst var res} extendCvSubst :: SimplEnv -> CoVar -> Coercion -> SimplEnv extendCvSubst env@(SimplEnv {seCvSubst = subst}) var res = env {seCvSubst = extendVarEnv subst var res} --------------------- getInScope :: SimplEnv -> InScopeSet getInScope env = seInScope env setInScopeSet :: SimplEnv -> InScopeSet -> SimplEnv setInScopeSet env in_scope = env {seInScope = in_scope} setInScope :: SimplEnv -> SimplEnv -> SimplEnv -- Set the in-scope set, and *zap* the floats setInScope env env_with_scope = env { seInScope = seInScope env_with_scope, seFloats = emptyFloats } setFloats :: SimplEnv -> SimplEnv -> SimplEnv -- Set the in-scope set *and* the floats setFloats env env_with_floats = env { seInScope = seInScope env_with_floats, seFloats = seFloats env_with_floats } addNewInScopeIds :: SimplEnv -> [CoreBndr] -> SimplEnv -- The new Ids are guaranteed to be freshly allocated addNewInScopeIds env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst }) vs = env { seInScope = in_scope `extendInScopeSetList` vs, seIdSubst = id_subst `delVarEnvList` vs } -- Why delete? Consider -- let x = a*b in (x, \x -> x+3) -- We add [x |-> a*b] to the substitution, but we must -- _delete_ it from the substitution when going inside -- the (\x -> ...)! modifyInScope :: SimplEnv -> CoreBndr -> SimplEnv -- The variable should already be in scope, but -- replace the existing version with this new one -- which has more information modifyInScope env@(SimplEnv {seInScope = in_scope}) v = env {seInScope = extendInScopeSet in_scope v} --------------------- zapSubstEnv :: SimplEnv -> SimplEnv zapSubstEnv env = env {seTvSubst = emptyVarEnv, seCvSubst = emptyVarEnv, seIdSubst = emptyVarEnv} setSubstEnv :: SimplEnv -> TvSubstEnv -> CvSubstEnv -> SimplIdSubst -> SimplEnv setSubstEnv env tvs cvs ids = env { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids } mkContEx :: SimplEnv -> InExpr -> SimplSR mkContEx (SimplEnv { seTvSubst = tvs, seCvSubst = cvs, seIdSubst = ids }) e = ContEx tvs cvs ids e {- ************************************************************************ * * \subsection{Floats} * * ************************************************************************ Note [Simplifier floats] ~~~~~~~~~~~~~~~~~~~~~~~~~ The Floats is a bunch of bindings, classified by a FloatFlag. * All of them satisfy the let/app invariant Examples NonRec x (y:ys) FltLifted Rec [(x,rhs)] FltLifted NonRec x* (p:q) FltOKSpec -- RHS is WHNF. Question: why not FltLifted? NonRec x# (y +# 3) FltOkSpec -- Unboxed, but ok-for-spec'n NonRec x* (f y) FltCareful -- Strict binding; might fail or diverge Can't happen: NonRec x# (a /# b) -- Might fail; does not satisfy let/app NonRec x# (f y) -- Might diverge; does not satisfy let/app -} data Floats = Floats (OrdList OutBind) FloatFlag -- See Note [Simplifier floats] data FloatFlag = FltLifted -- All bindings are lifted and lazy -- Hence ok to float to top level, or recursive | FltOkSpec -- All bindings are FltLifted *or* -- strict (perhaps because unlifted, -- perhaps because of a strict binder), -- *and* ok-for-speculation -- Hence ok to float out of the RHS -- of a lazy non-recursive let binding -- (but not to top level, or into a rec group) | FltCareful -- At least one binding is strict (or unlifted) -- and not guaranteed cheap -- Do not float these bindings out of a lazy let instance Outputable Floats where ppr (Floats binds ff) = ppr ff $$ ppr (fromOL binds) instance Outputable FloatFlag where ppr FltLifted = ptext (sLit "FltLifted") ppr FltOkSpec = ptext (sLit "FltOkSpec") ppr FltCareful = ptext (sLit "FltCareful") andFF :: FloatFlag -> FloatFlag -> FloatFlag andFF FltCareful _ = FltCareful andFF FltOkSpec FltCareful = FltCareful andFF FltOkSpec _ = FltOkSpec andFF FltLifted flt = flt doFloatFromRhs :: TopLevelFlag -> RecFlag -> Bool -> OutExpr -> SimplEnv -> Bool -- If you change this function look also at FloatIn.noFloatFromRhs doFloatFromRhs lvl rec str rhs (SimplEnv {seFloats = Floats fs ff}) = not (isNilOL fs) && want_to_float && can_float where want_to_float = isTopLevel lvl || exprIsCheap rhs || exprIsExpandable rhs -- See Note [Float when cheap or expandable] can_float = case ff of FltLifted -> True FltOkSpec -> isNotTopLevel lvl && isNonRec rec FltCareful -> isNotTopLevel lvl && isNonRec rec && str {- Note [Float when cheap or expandable] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We want to float a let from a let if the residual RHS is a) cheap, such as (\x. blah) b) expandable, such as (f b) if f is CONLIKE But there are - cheap things that are not expandable (eg \x. expensive) - expandable things that are not cheap (eg (f b) where b is CONLIKE) so we must take the 'or' of the two. -} emptyFloats :: Floats emptyFloats = Floats nilOL FltLifted unitFloat :: OutBind -> Floats -- This key function constructs a singleton float with the right form unitFloat bind = Floats (unitOL bind) (flag bind) where flag (Rec {}) = FltLifted flag (NonRec bndr rhs) | not (isStrictId bndr) = FltLifted | exprOkForSpeculation rhs = FltOkSpec -- Unlifted, and lifted but ok-for-spec (eg HNF) | otherwise = ASSERT2( not (isUnLiftedType (idType bndr)), ppr bndr ) FltCareful -- Unlifted binders can only be let-bound if exprOkForSpeculation holds addNonRec :: SimplEnv -> OutId -> OutExpr -> SimplEnv -- Add a non-recursive binding and extend the in-scope set -- The latter is important; the binder may already be in the -- in-scope set (although it might also have been created with newId) -- but it may now have more IdInfo addNonRec env id rhs = id `seq` -- This seq forces the Id, and hence its IdInfo, -- and hence any inner substitutions env { seFloats = seFloats env `addFlts` unitFloat (NonRec id rhs), seInScope = extendInScopeSet (seInScope env) id } extendFloats :: SimplEnv -> OutBind -> SimplEnv -- Add these bindings to the floats, and extend the in-scope env too extendFloats env bind = env { seFloats = seFloats env `addFlts` unitFloat bind, seInScope = extendInScopeSetList (seInScope env) bndrs } where bndrs = bindersOf bind addFloats :: SimplEnv -> SimplEnv -> SimplEnv -- Add the floats for env2 to env1; -- *plus* the in-scope set for env2, which is bigger -- than that for env1 addFloats env1 env2 = env1 {seFloats = seFloats env1 `addFlts` seFloats env2, seInScope = seInScope env2 } addFlts :: Floats -> Floats -> Floats addFlts (Floats bs1 l1) (Floats bs2 l2) = Floats (bs1 `appOL` bs2) (l1 `andFF` l2) zapFloats :: SimplEnv -> SimplEnv zapFloats env = env { seFloats = emptyFloats } addRecFloats :: SimplEnv -> SimplEnv -> SimplEnv -- Flattens the floats from env2 into a single Rec group, -- prepends the floats from env1, and puts the result back in env2 -- This is all very specific to the way recursive bindings are -- handled; see Simplify.simplRecBind addRecFloats env1 env2@(SimplEnv {seFloats = Floats bs ff}) = ASSERT2( case ff of { FltLifted -> True; _ -> False }, ppr (fromOL bs) ) env2 {seFloats = seFloats env1 `addFlts` unitFloat (Rec (flattenBinds (fromOL bs)))} wrapFloats :: SimplEnv -> OutExpr -> OutExpr -- Wrap the floats around the expression; they should all -- satisfy the let/app invariant, so mkLets should do the job just fine wrapFloats (SimplEnv {seFloats = Floats bs _}) body = foldrOL Let body bs getFloatBinds :: SimplEnv -> [CoreBind] getFloatBinds (SimplEnv {seFloats = Floats bs _}) = fromOL bs isEmptyFloats :: SimplEnv -> Bool isEmptyFloats (SimplEnv {seFloats = Floats bs _}) = isNilOL bs mapFloats :: SimplEnv -> ((Id,CoreExpr) -> (Id,CoreExpr)) -> SimplEnv mapFloats env@SimplEnv { seFloats = Floats fs ff } fun = env { seFloats = Floats (mapOL app fs) ff } where app (NonRec b e) = case fun (b,e) of (b',e') -> NonRec b' e' app (Rec bs) = Rec (map fun bs) {- ************************************************************************ * * Substitution of Vars * * ************************************************************************ Note [Global Ids in the substitution] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We look up even a global (eg imported) Id in the substitution. Consider case X.g_34 of b { (a,b) -> ... case X.g_34 of { (p,q) -> ...} ... } The binder-swap in the occurrence analyser will add a binding for a LocalId version of g (with the same unique though): case X.g_34 of b { (a,b) -> let g_34 = b in ... case X.g_34 of { (p,q) -> ...} ... } So we want to look up the inner X.g_34 in the substitution, where we'll find that it has been substituted by b. (Or conceivably cloned.) -} substId :: SimplEnv -> InId -> SimplSR -- Returns DoneEx only on a non-Var expression substId (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v = case lookupVarEnv ids v of -- Note [Global Ids in the substitution] Nothing -> DoneId (refineFromInScope in_scope v) Just (DoneId v) -> DoneId (refineFromInScope in_scope v) Just (DoneEx (Var v)) -> DoneId (refineFromInScope in_scope v) Just res -> res -- DoneEx non-var, or ContEx -- Get the most up-to-date thing from the in-scope set -- Even though it isn't in the substitution, it may be in -- the in-scope set with better IdInfo refineFromInScope :: InScopeSet -> Var -> Var refineFromInScope in_scope v | isLocalId v = case lookupInScope in_scope v of Just v' -> v' Nothing -> WARN( True, ppr v ) v -- This is an error! | otherwise = v lookupRecBndr :: SimplEnv -> InId -> OutId -- Look up an Id which has been put into the envt by simplRecBndrs, -- but where we have not yet done its RHS lookupRecBndr (SimplEnv { seInScope = in_scope, seIdSubst = ids }) v = case lookupVarEnv ids v of Just (DoneId v) -> v Just _ -> pprPanic "lookupRecBndr" (ppr v) Nothing -> refineFromInScope in_scope v {- ************************************************************************ * * \section{Substituting an Id binder} * * ************************************************************************ These functions are in the monad only so that they can be made strict via seq. -} simplBinders :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr]) simplBinders env bndrs = mapAccumLM simplBinder env bndrs ------------- simplBinder :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr) -- Used for lambda and case-bound variables -- Clone Id if necessary, substitute type -- Return with IdInfo already substituted, but (fragile) occurrence info zapped -- The substitution is extended only if the variable is cloned, because -- we *don't* need to use it to track occurrence info. simplBinder env bndr | isTyVar bndr = do { let (env', tv) = substTyVarBndr env bndr ; seqTyVar tv `seq` return (env', tv) } | otherwise = do { let (env', id) = substIdBndr env bndr ; seqId id `seq` return (env', id) } --------------- simplNonRecBndr :: SimplEnv -> InBndr -> SimplM (SimplEnv, OutBndr) -- A non-recursive let binder simplNonRecBndr env id = do { let (env1, id1) = substIdBndr env id ; seqId id1 `seq` return (env1, id1) } --------------- simplRecBndrs :: SimplEnv -> [InBndr] -> SimplM SimplEnv -- Recursive let binders simplRecBndrs env@(SimplEnv {}) ids = do { let (env1, ids1) = mapAccumL substIdBndr env ids ; seqIds ids1 `seq` return env1 } --------------- substIdBndr :: SimplEnv -> InBndr -> (SimplEnv, OutBndr) -- Might be a coercion variable substIdBndr env bndr | isCoVar bndr = substCoVarBndr env bndr | otherwise = substNonCoVarIdBndr env bndr --------------- substNonCoVarIdBndr :: SimplEnv -> InBndr -- Env and binder to transform -> (SimplEnv, OutBndr) -- Clone Id if necessary, substitute its type -- Return an Id with its -- * Type substituted -- * UnfoldingInfo, Rules, WorkerInfo zapped -- * Fragile OccInfo (only) zapped: Note [Robust OccInfo] -- * Robust info, retained especially arity and demand info, -- so that they are available to occurrences that occur in an -- earlier binding of a letrec -- -- For the robust info, see Note [Arity robustness] -- -- Augment the substitution if the unique changed -- Extend the in-scope set with the new Id -- -- Similar to CoreSubst.substIdBndr, except that -- the type of id_subst differs -- all fragile info is zapped substNonCoVarIdBndr env@(SimplEnv { seInScope = in_scope, seIdSubst = id_subst }) old_id = ASSERT2( not (isCoVar old_id), ppr old_id ) (env { seInScope = in_scope `extendInScopeSet` new_id, seIdSubst = new_subst }, new_id) where id1 = uniqAway in_scope old_id id2 = substIdType env id1 new_id = zapFragileIdInfo id2 -- Zaps rules, worker-info, unfolding -- and fragile OccInfo -- Extend the substitution if the unique has changed, -- or there's some useful occurrence information -- See the notes with substTyVarBndr for the delSubstEnv new_subst | new_id /= old_id = extendVarEnv id_subst old_id (DoneId new_id) | otherwise = delVarEnv id_subst old_id ------------------------------------ seqTyVar :: TyVar -> () seqTyVar b = b `seq` () seqId :: Id -> () seqId id = seqType (idType id) `seq` idInfo id `seq` () seqIds :: [Id] -> () seqIds [] = () seqIds (id:ids) = seqId id `seq` seqIds ids {- Note [Arity robustness] ~~~~~~~~~~~~~~~~~~~~~~~ We *do* transfer the arity from from the in_id of a let binding to the out_id. This is important, so that the arity of an Id is visible in its own RHS. For example: f = \x. ....g (\y. f y).... We can eta-reduce the arg to g, because f is a value. But that needs to be visible. This interacts with the 'state hack' too: f :: Bool -> IO Int f = \x. case x of True -> f y False -> \s -> ... Can we eta-expand f? Only if we see that f has arity 1, and then we take advantage of the 'state hack' on the result of (f y) :: State# -> (State#, Int) to expand the arity one more. There is a disadvantage though. Making the arity visible in the RHS allows us to eta-reduce f = \x -> f x to f = f which technically is not sound. This is very much a corner case, so I'm not worried about it. Another idea is to ensure that f's arity never decreases; its arity started as 1, and we should never eta-reduce below that. Note [Robust OccInfo] ~~~~~~~~~~~~~~~~~~~~~ It's important that we *do* retain the loop-breaker OccInfo, because that's what stops the Id getting inlined infinitely, in the body of the letrec. -} {- ************************************************************************ * * Impedence matching to type substitution * * ************************************************************************ -} getTvSubst :: SimplEnv -> TvSubst getTvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env }) = mkTvSubst in_scope tv_env getCvSubst :: SimplEnv -> CvSubst getCvSubst (SimplEnv { seInScope = in_scope, seTvSubst = tv_env, seCvSubst = cv_env }) = CvSubst in_scope tv_env cv_env substTy :: SimplEnv -> Type -> Type substTy env ty = Type.substTy (getTvSubst env) ty substTyVar :: SimplEnv -> TyVar -> Type substTyVar env tv = Type.substTyVar (getTvSubst env) tv substTyVarBndr :: SimplEnv -> TyVar -> (SimplEnv, TyVar) substTyVarBndr env tv = case Type.substTyVarBndr (getTvSubst env) tv of (TvSubst in_scope' tv_env', tv') -> (env { seInScope = in_scope', seTvSubst = tv_env' }, tv') substCoVar :: SimplEnv -> CoVar -> Coercion substCoVar env tv = Coercion.substCoVar (getCvSubst env) tv substCoVarBndr :: SimplEnv -> CoVar -> (SimplEnv, CoVar) substCoVarBndr env cv = case Coercion.substCoVarBndr (getCvSubst env) cv of (CvSubst in_scope' tv_env' cv_env', cv') -> (env { seInScope = in_scope', seTvSubst = tv_env', seCvSubst = cv_env' }, cv') substCo :: SimplEnv -> Coercion -> Coercion substCo env co = Coercion.substCo (getCvSubst env) co ------------------ substIdType :: SimplEnv -> Id -> Id substIdType (SimplEnv { seInScope = in_scope, seTvSubst = tv_env }) id | isEmptyVarEnv tv_env || isEmptyVarSet (tyVarsOfType old_ty) = id | otherwise = Id.setIdType id (Type.substTy (TvSubst in_scope tv_env) old_ty) -- The tyVarsOfType is cheaper than it looks -- because we cache the free tyvars of the type -- in a Note in the id's type itself where old_ty = idType id
urbanslug/ghc
compiler/simplCore/SimplEnv.hs
Haskell
bsd-3-clause
27,858
{-# LANGUAGE DeriveDataTypeable #-} module UnitTests.Options ( OptionShowSolverLog(..) , OptionMtimeChangeDelay(..) , extraOptions ) where import Data.Proxy import Data.Typeable import Test.Tasty.Options {------------------------------------------------------------------------------- Test options -------------------------------------------------------------------------------} extraOptions :: [OptionDescription] extraOptions = [ Option (Proxy :: Proxy OptionShowSolverLog) , Option (Proxy :: Proxy OptionMtimeChangeDelay) ] newtype OptionShowSolverLog = OptionShowSolverLog Bool deriving Typeable instance IsOption OptionShowSolverLog where defaultValue = OptionShowSolverLog False parseValue = fmap OptionShowSolverLog . safeRead optionName = return "show-solver-log" optionHelp = return "Show full log from the solver" optionCLParser = flagCLParser Nothing (OptionShowSolverLog True) newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int deriving Typeable instance IsOption OptionMtimeChangeDelay where defaultValue = OptionMtimeChangeDelay 0 parseValue = fmap OptionMtimeChangeDelay . safeRead optionName = return "mtime-change-delay" optionHelp = return $ "How long to wait before attempting to detect" ++ "file modification, in microseconds"
mydaum/cabal
cabal-install/tests/UnitTests/Options.hs
Haskell
bsd-3-clause
1,399
{-# LANGUAGE TypeFamilies, LiberalTypeSynonyms #-} -- ^ crucial for exercising the code paths to be -- tested here module ShouldCompile where type family Element c :: * f :: x -> Element x f x = undefined
forked-upstream-packages-for-ghcjs/ghc
testsuite/tests/indexed-types/should_compile/Simple19.hs
Haskell
bsd-3-clause
264
module Annfail12 where -- Testing errors hidden in annotations {-# ANN f (error "You were meant to see this error!" :: Int) #-} f x = x
wxwxwwxxx/ghc
testsuite/tests/annotations/should_fail/annfail12.hs
Haskell
bsd-3-clause
137
{-# LANGUAGE FlexibleInstances #-} module Text.Pin ( Pin(..), tag, empty, simple, isSelf, fromName ) where import Control.Applicative hiding ( many, (<|>), empty, optional ) import Control.Name import Data.Either import Data.List hiding ( find ) import Data.Set ( Set, fromList ) import Data.String.Utils ( strip ) import Text.ParserCombinators.Parsec import Text.ParserCombinators.TagWiki import Text.Printf import Text.Utils import qualified Control.Modifier as Mods import qualified Data.Set as Set import qualified Text.Tag as Tag -- A reference to another file and/or event data Pin = Pin { categories :: Set String , qualifiers :: Set Pin , text :: String -- The original text, for display } tag :: Pin -> String -- The normalized text, for checking equality tag = slugify . text empty :: Pin empty = simple "" simple :: String -> Pin simple = Pin Set.empty Set.empty isSelf :: Pin -> Bool isSelf = (== "") . text fromName :: Name -> Pin fromName = simple . namePart instance Eq Pin where x == y = tag x == tag y && categories x == categories y && qualifiers x == qualifiers y instance Ord Pin where x <= y = tag x <= tag y && categories x <= categories y && qualifiers x <= qualifiers y instance Parseable Pin where parser = do (names, mods) <- partitionEithers <$> many pinPart -- We need at least one name. -- If we failed to pick up a name in pinPart, pick one up now. -- This will probably cause an error, but at least it will be -- the 'right' "no name" error. name <- if null names then Tag.tag else pure $ unwords names pure Pin{ text = name , categories = fromList $ Mods.categories mods , qualifiers = fromList $ Mods.qualifiers mods } where pinPart = try (Left <$> Tag.tag) <|> (Right <$> Mods.catOrQual) <?> "text, category, or qualifier" instance Show Pin where show (Pin cs qs t) = printf "%s%s%s" (strip t) cstr qstr where cstr = if null cs' then "" else " #" ++ intercalate " #" cs' qstr = if null qs' then "" else " (" ++ intercalate ") (" qs' ++ ")" cs' = Set.toList cs qs' = map show $ Set.toList qs
Soares/tagwiki
src/Text/Pin.hs
Haskell
mit
2,306
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE BangPatterns #-} module Data.Frame.Types ( Val(..), Type(..), subsumes, subsume, like, typeVal, lub ) where import Data.Data -- import Data.DateTime import Data.Text (Text, pack) import Data.Frame.Internal (Default(..)) import Control.DeepSeq (NFData(..)) ------------------------------------------------------------------------------- -- Types ------------------------------------------------------------------------------- -- Columns types have a subsumption rule which dictates when we upcast the type of the values in column. If we -- have a column of Int values with a single String element in the middle of the data then then we upcast to -- String. If the user specifes (Maybe a) type for the column then the column treats mismatched values as -- missing values. -- -- a <: a -- a <: b |- Maybe a <: Maybe b -- Double <: String -- Bool <: String -- Datetime <: String -- Int <: Double subsumes :: Type -> Type -> Bool subsumes (MT a) b = subsumes a b subsumes ST DT = True subsumes ST BT = True subsumes ST IT = True subsumes ST TT = True subsumes DT IT = True subsumes _ Any = True subsumes a b = a == b subsume :: Type -> Val -> Val subsume ST v = case v of D x -> S (pack $ show x) I x -> S (pack $ show x) S x -> S x B True -> S (pack "true") B False -> S (pack "false") -- T x -> S (pack $ show x) {-M x -> error "maybe case"-} subsume DT v = case v of D x -> D x I x -> D (fromIntegral x) {-M x -> error "maybe case"-} subsume IT v = case v of I x -> I x M x -> error "maybe case" subsume BT v = case v of B x -> B x subsume (MT IT) v = case v of I x -> I x _ -> NA subsume (MT DT) v = case v of D x -> D x I x -> D (fromIntegral x) _ -> NA subsume (MT ST) v = case v of S x -> S x _ -> NA subsume (MT BT) v = case v of B x -> B x _ -> NA subsume (MT Any) v = NA like :: Val -> Val -> Bool like (D _) (D _) = True like (I _) (I _) = True like (S _) (S _) = True like (B _) (B _) = True -- like (T _) (T _) = True like (M (Just a)) (M (Just b)) = like a b like (M (Just _)) (M Nothing) = True like (M (Nothing)) (M (Just _)) = True like (M (Nothing)) (M Nothing) = True like _ _ = False data Type = DT | IT | ST | BT | MT Type | TT | Any deriving (Eq, Show, Ord) -- Heterogeneous value data Val = D {-# UNPACK #-} !Double | I {-# UNPACK #-} !Int | S {-# UNPACK #-} !Text | B !Bool | M !(Maybe Val) -- | T !DateTime | NA deriving (Eq, Show, Ord, Data, Typeable) instance NFData Val where rnf (D _) = () rnf (I _) = () rnf (S _) = () rnf (B a) = rnf a rnf (M a) = rnf a -- rnf (T a) = rnf a rnf NA = () typeVal :: Val -> Type typeVal (D _) = DT typeVal (I _) = IT typeVal (S _) = ST typeVal (B _) = BT -- typeVal (T _) = TT typeVal (M (Just t)) = MT (typeVal t) typeVal (M Nothing) = Any typeVal NA = Any -- lub [I 3, D 2.3] -> DT -- lub [I 3, D 2.3, S "a"] -> ST lub :: [Val] -> Either String Type lub vals = go Nothing vals where go (Just lub) [] = Right lub go Nothing (NA:xs) = goNa Nothing xs -- first value is a NA go Nothing (x:xs) = go (Just (typeVal x)) xs go (Just lub) (x:xs) | typeVal x == Any = goNa (Just (maybeT lub)) xs -- we hit a NA midstream | lub == typeVal x = go (Just lub) xs | lub `subsumes` typeVal x = go (Just lub) xs | typeVal x `subsumes` lub = go (Just (typeVal x)) xs | otherwise = Left $ "No subsumption: " ++ (show lub) ++ " ~ " ++ (show $ typeVal x) goNa Nothing (x:xs) = goNa (Just (typeVal x)) xs goNa (Just lub) [] = Right lub goNa (Just lub) (x:xs) | lub == typeVal x = goNa (Just lub) xs | lub `subsumes` typeVal x = goNa (Just lub) xs | maybeT (typeVal x) `subsumes` lub = goNa (Just (maybeT (typeVal x))) xs | otherwise = goNa (Just lub) xs -- missing case maybeT :: Type -> Type maybeT (MT a) = MT a maybeT a = (MT a)
houshuang/frame
src/Data/Frame/Types.hs
Haskell
mit
3,994
module TypeClient where -- Can't refer to modules named Type -- Should rename the module import Type g :: Int g = Type.id 3
antalsz/hs-to-coq
examples/base-tests/TypeClient.hs
Haskell
mit
126
{-# LANGUAGE DeriveGeneric #-} module SimpleDecree ( IntegerOperation(..) ) where -- local imports import Control.Consensus.Paxos -- external imports import qualified Data.Serialize as C import GHC.Generics -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- data IntegerOperation = SetValue Integer | GetValue Integer | AddDelta Integer | SubtractDelta Integer | MultiplyFactor Integer | DivideByFactor Integer deriving (Generic, Eq, Show) instance C.Serialize IntegerOperation instance Decreeable IntegerOperation
hargettp/paxos
tests/SimpleDecree.hs
Haskell
mit
654
module Type( Type(..) )where import Definition data Type = Polymorphism | TypeAlias Identifier Identifier | TypeArray Type Int | Type Identifier Int deriving (Eq) instance Show Type where show Polymorphism = "?Type?" show (Type n i) = n ++ replicate i '*'
sqd/haskell-C89-interpreter
Type.hs
Haskell
mit
267
----------------------------------------------------------- -- | -- module: MXNet.Core.Base.Internal.TH.Symbol -- copyright: (c) 2016 Tao He -- license: MIT -- maintainer: sighingnow@gmail.com -- -- Functions about Symbol that generated by template haskell. -- {-# OPTIONS_GHC -Wno-missing-signatures #-} {-# OPTIONS_GHC -Wno-redundant-constraints #-} {-# OPTIONS_GHC -Wno-unused-local-binds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} module MXNet.Core.Base.Internal.TH.Symbol where import Data.Proxy import MXNet.Core.Base.HMap import MXNet.Core.Base.Internal import MXNet.Core.Base.Internal.TH (registerSymbolOps) import MXNet.Core.NNVM.Internal (nnGetOpHandle, nnSymbolCompose) import Prelude hiding (sin, sinh, cos, cosh, tan, tanh, min, max, round, floor, abs, sum, sqrt, log, exp, flip, concat, repeat, reverse) -- | Register symbol operators. $(registerSymbolOps)
sighingnow/mxnet-haskell
mxnet/src/MXNet/Core/Base/Internal/TH/Symbol.hs
Haskell
mit
1,228
module Y2017.M02.D20.Solution where import Control.Arrow ((&&&)) import Data.Map (Map) import qualified Data.Map as Map import Data.Monoid import Data.Ratio -- below imports available via 1HaskellADay git repository import Control.Logic.Frege (adjoin) import qualified Data.Bag as Bag import Data.Percentage import Rosalind.Types import Rosalind.GCContent import Rosalind.Scan.FASTA {-- Is there a better way? In above Rosalind.GCContent import we have this function: gcContent :: DNAStrand -> Percentage gcContent strand = let gcs = getSum (sumGC (Map.toList (Bag.fromList strand))) in P (uncurry (%) $ adjoin fromIntegral (gcs, length strand)) The thing is, okay, it works, but how does it work? 1. It iterates through the DNA string to get the GC-content 2. It iterates through the DNA string, again, to get the length. Whenever I see the length-function I have a little annoying voice saying: You could do better. Some cases I do have to call the length function, but in some cases, I do not. This is one of those cases. rewrite gcContent so that the DNA string is iterated only once. That is to say, somewhere (else) the length of this list (String) is already recorded. Find that record and use it to compute the GC-content signature of a string. --} gcContent' :: DNAStrand -> Percentage gcContent' dna = -- our nucleotide-totals is as follows: let nukes = Map.toList (Bag.fromList dna) -- well, the totals of all the nucleotides IS the length of the list: len = sum (map snd nukes) -- the length of nukes is 4 or less, dna can have thousands of nucleotides -- And now, along with the old definition, we have everything we need in P (uncurry (%) $ adjoin (fromIntegral . getSum) (sumGC nukes, len)) -- Now, using Rosalind/rosy_strands.txt verify that gcContent' == gcContent {-- >>> fmap (map (ident &&& gcContent . strand)) $ readFASTA "Rosalind/rosy_strands.txt" [("Rosalind_6404",53.75%),("Rosalind_5959",53.57%),("Rosalind_0808",60.91%)] >>> fmap (map (ident &&& gcContent' . strand)) $ readFASTA "Rosalind/rosy_strands.txt" [("Rosalind_6404",53.75%),("Rosalind_5959",53.57%),("Rosalind_0808",60.91%)] --} -- moving gcContent' definition to Rosalind.GCContent.gcContent
geophf/1HaskellADay
exercises/HAD/Y2017/M02/D20/Solution.hs
Haskell
mit
2,231
{-# LANGUAGE FlexibleContexts #-} module Rx.Notification where import Rx.Observable.Types getValue :: Notification v -> Maybe v getValue (OnNext v) = Just v getValue _ = Nothing hasThrowable :: Notification v -> Bool hasThrowable (OnError _) = True hasThrowable _ = False accept :: Notification v -> Observer v -> IO () accept notification (Observer observerFn) = observerFn notification
roman/Haskell-Reactive-Extensions
rx-core/src/Rx/Notification.hs
Haskell
mit
394
{-# LANGUAGE RecordWildCards #-} import Data.Foldable (for_) import Test.Hspec (Spec, describe, it, shouldBe) import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith) import Base (Error(..), rebase) main :: IO () main = hspecWith defaultConfig {configFastFail = True} specs specs :: Spec specs = describe "rebase" $ for_ cases test where test Case{..} = it description assertion where assertion = expression `shouldBe` outputDigits expression = rebase inputBase outputBase inputDigits data Case = Case { description :: String , inputBase :: Integer , inputDigits :: [Integer] , outputBase :: Integer , outputDigits :: Either (Error Integer) [Integer] } cases :: [Case] cases = [ Case { description = "single bit one to decimal" , inputBase = 2 , inputDigits = [1] , outputBase = 10 , outputDigits = Right [1] } , Case { description = "binary to single decimal" , inputBase = 2 , inputDigits = [1, 0, 1] , outputBase = 10 , outputDigits = Right [5] } , Case { description = "single decimal to binary" , inputBase = 10 , inputDigits = [5] , outputBase = 2 , outputDigits = Right [1, 0, 1] } , Case { description = "binary to multiple decimal" , inputBase = 2 , inputDigits = [1, 0, 1, 0, 1, 0] , outputBase = 10 , outputDigits = Right [4, 2] } , Case { description = "decimal to binary" , inputBase = 10 , inputDigits = [4, 2] , outputBase = 2 , outputDigits = Right [1, 0, 1, 0, 1, 0] } , Case { description = "trinary to hexadecimal" , inputBase = 3 , inputDigits = [1, 1, 2, 0] , outputBase = 16 , outputDigits = Right [2, 10] } , Case { description = "hexadecimal to trinary" , inputBase = 16 , inputDigits = [2, 10] , outputBase = 3 , outputDigits = Right [1, 1, 2, 0] } , Case { description = "15-bit integer" , inputBase = 97 , inputDigits = [3, 46, 60] , outputBase = 73 , outputDigits = Right [6, 10, 45] } -- The following three cases are [0] in all-your-base.json. -- Here we use [] to represent the lack of digits, i.e., zero. , Case { description = "empty list" , inputBase = 2 , inputDigits = [] , outputBase = 10 , outputDigits = Right [] } , Case { description = "single zero" , inputBase = 10 , inputDigits = [0] , outputBase = 2 , outputDigits = Right [] } , Case { description = "multiple zeros" , inputBase = 10 , inputDigits = [0, 0, 0] , outputBase = 2 , outputDigits = Right [] } , Case { description = "leading zeros" , inputBase = 7 , inputDigits = [0, 6, 0] , outputBase = 10 , outputDigits = Right [4, 2] } , Case { description = "input base is one" , inputBase = 1 , inputDigits = [0] , outputBase = 10 , outputDigits = Left InvalidInputBase } , Case { description = "input base is zero" , inputBase = 0 , inputDigits = [] , outputBase = 10 , outputDigits = Left InvalidInputBase } , Case { description = "input base is negative" , inputBase = -2 , inputDigits = [1] , outputBase = 10 , outputDigits = Left InvalidInputBase } , Case { description = "negative digit" , inputBase = 2 , inputDigits = [1, -1, 1, 0, 1, 0] , outputBase = 10 , outputDigits = Left (InvalidDigit (-1)) } , Case { description = "invalid positive digit" , inputBase = 2 , inputDigits = [1, 2, 1, 0, 1, 0] , outputBase = 10 , outputDigits = Left (InvalidDigit 2) } , Case { description = "output base is one" , inputBase = 2 , inputDigits = [1, 0, 1, 0, 1, 0] , outputBase = 1 , outputDigits = Left InvalidOutputBase } , Case { description = "output base is zero" , inputBase = 10 , inputDigits = [7] , outputBase = 0 , outputDigits = Left InvalidOutputBase } , Case { description = "output base is negative" , inputBase = 2 , inputDigits = [1] , outputBase = -7 , outputDigits = Left InvalidOutputBase } , Case { description = "both bases are negative" , inputBase = -2 , inputDigits = [1] , outputBase = -7 -- debatable: This could be Left InvalidOutputBase as well. , outputDigits = Left InvalidInputBase } ] -- a13cfc6a039b8e5effac4ecc3ceea56d3f8fa807
exercism/xhaskell
exercises/practice/all-your-base/test/Tests.hs
Haskell
mit
5,933
import Test.HUnit import Q33 test1 = TestCase (assertEqual "tryCoPrime 0 15 should be Right Zero is not supported." (Right "Zero is not supported") (tryCoPrime 0 15)) test2 = TestCase (assertEqual "tryCoPrime 15 0 should be Right Zero is not supported." (Right "Zero is not supported") (tryCoPrime 15 0 )) test3 = TestCase (assertEqual "tryCoPrime 12 32 should be Left False ." (Left False ) (tryCoPrime 12 32)) test4 = TestCase (assertEqual "tryCoPrime 31 12 should be Left True ." (Left True ) (tryCoPrime 31 12)) main = runTestTT $ TestList [test1,test2,test3,test4]
cshung/MiscLab
Haskell99/q33.test.hs
Haskell
mit
644
module Ch15.MadLibs where import Data.Monoid type Verb = String type Adjective = String type Adverb = String type Noun = String type Exclamation = String madlib :: Exclamation -> Adverb -> Noun -> Adjective -> String madlib e adv noun adj = e <> "! he said " <> adv <> " as he jumped into his car " <> noun <> " and drove off with his " <> adj <> " wife." madlibBetter :: Exclamation -> Adverb -> Noun -> Adjective -> String madlibBetter e adv noun adj = mconcat [ e, "! he said " , adv, " as he jumped into his car " , noun, " and drove off with his " , adj, " wife." ]
andrewMacmurray/haskell-book-solutions
src/ch15/MadLibs.hs
Haskell
mit
606
module Slack where import ClassyPrelude import Control.Monad (mfilter) import Control.Monad.Except (ExceptT(ExceptT)) import Control.Lens (Getter, Prism', prism', view, to) import Control.Lens.TH (makeLenses, makePrisms) import Data.Aeson ((.=), Value(Number, String), Object, FromJSON(parseJSON), ToJSON(toJSON), object) import qualified Data.Aeson.BetterErrors as ABE import qualified Data.Aeson.BetterErrors.Internal as ABEI import qualified Data.HashMap.Strict as HM import Data.Proxy (Proxy(Proxy)) import Data.Scientific (toBoundedInteger) import Data.Text (splitOn) import Data.Time.Clock.POSIX (posixSecondsToUTCTime) import TextShow (TextShow(showb)) import TextShow.TH (deriveTextShow) import TextShowOrphans () data TS = TS { _tsTime :: Word32, _tsUnique :: Word32 } deriving (Eq, Ord, Read, Show) asTS :: ABE.Parse Text TS asTS = ABE.asText >>= either ABE.throwCustomError pure . ts ts :: Text -> Either Text TS ts t = case splitOn "." t of [readMay -> Just time, readMay -> Just unique] -> Right $ TS time unique other -> Left $ "couldn't parse as a time.unique pair, got parts: " <> tshow other unTS :: TS -> Text unTS (TS t u) = tshow t <> "." <> tshow u tsToUTCTime :: TS -> UTCTime tsToUTCTime (TS t _) = posixSecondsToUTCTime (fromIntegral t) instance FromJSON TS where parseJSON = ABE.toAesonParser id asTS instance ToJSON TS where toJSON = toJSON . unTS makeLenses ''TS deriveTextShow ''TS newtype Time = Time { unTime :: Word32 } deriving (Eq, Ord, Read, Show) asTime :: ABE.Parse Text Time asTime = ABEI.ParseT . ReaderT $ \ (ABEI.ParseReader path value) -> ExceptT . Identity $ case value of String s | Just t <- readMay s -> Right (Time t) String s -> Left (ABEI.BadSchema (toList path) . ABEI.CustomError $ "tried to parse " <> tshow s <> " as time but couldn't") Number s | Just w32 <- toBoundedInteger s -> Right (Time w32) Number s -> Left . ABEI.BadSchema (toList path) . ABEI.CustomError $ "out of bound unix time " <> tshow s other -> Left . ABEI.BadSchema (toList path) . ABEI.CustomError $ "expected a time as string or number not " <> tshow other instance ToJSON Time where toJSON = toJSON . unTime makePrisms ''Time deriveTextShow ''Time newtype ID a = ID { unID :: Text } deriving (Eq, Ord, Read, Show) asID :: ABE.Parse Text (ID a) asID = ID <$> ABE.asText instance FromJSON (ID a) where parseJSON = ABE.toAesonParser id asID instance ToJSON (ID a) where toJSON = String . unID makePrisms ''ID deriveTextShow ''ID idedName :: Getter s Text -> Getter s (ID k) -> (s -> Text) idedName name ident s = view name s ++ " <" ++ view (ident . to unID) s ++ ">" data Response a = ResponseNotOk !Text | ResponseOk a data RtmStartRequest = RtmStartRequest { rtmStartToken :: Text } data RtmStartRp = RtmStartRp { _rtmStartUrl :: Text , _rtmStartSelf :: Self , _rtmStartTeam :: Team , _rtmStartUsers :: [User] , _rtmStartChannels :: [Channel] , _rtmStartGroups :: [Group] , _rtmStartIMs :: [IM] , _rtmStartBots :: [Bot] } testRtmStartRp :: RtmStartRp testRtmStartRp = RtmStartRp { _rtmStartUrl = "url" , _rtmStartSelf = Self (ID "UEMBOT") "Embot" mempty (Time 0) PresenceActive , _rtmStartTeam = Team (ID "TTEAM") "Team" Nothing "domain" Nothing False mempty , _rtmStartUsers = [] , _rtmStartChannels = [] , _rtmStartGroups = [] , _rtmStartIMs = [] , _rtmStartBots = [] } data Self = Self { _selfID :: ID User , _selfName :: Text , _selfPrefs :: Object , _selfCreated :: Time , _selfManualPresence :: Presence } data Presence = PresenceActive | PresenceAway data Team = Team { _teamID :: ID Team , _teamName :: Text , _teamEmailDomain :: Maybe Text , _teamDomain :: Text , _teamMsgEditWindowMins :: Maybe Int , _teamOverStorageLimit :: Bool , _teamPrefs :: Object } data User = User { _userID :: ID User , _userName :: Text , _userRealName :: Maybe Text , _userDeleted :: Bool , _userColor :: Maybe Text , _userTz :: Maybe Tz , _userProfile :: Profile , _userIsAdmin :: Bool , _userIsOwner :: Bool , _userIsPrimaryOwner :: Bool , _userIsRestricted :: Bool , _userIsUltraRestricted :: Bool , _userHas2fa :: Bool , _userTwoFactorType :: Maybe Text , _userHasFiles :: Bool , _userPresence :: Maybe Presence } data Tz = Tz { _tz :: Text , _tzLabel :: Text , _tzOffset :: Int } data Profile = Profile { _profileFirstName :: Maybe Text , _profileLastName :: Maybe Text , _profileRealName :: Maybe Text , _profileRealNameNormalized :: Maybe Text , _profileEmail :: Maybe Text , _profileSkype :: Maybe Text , _profilePhone :: Maybe Text , _profileImages :: IntMap Text } data Channel = Channel { _channelID :: ID Channel , _channelName :: Text , _channelCreated :: Time , _channelCreator :: ID User , _channelIsArchived :: Bool , _channelIsGeneral :: Bool , _channelMembers :: [ID User] , _channelTopic :: Maybe (SlackTracked Text) , _channelPurpose :: Maybe (SlackTracked Text) , _channelIsMember :: Bool , _channelLastRead :: Maybe TS , _channelLatest :: Maybe Message , _channelUnreadCount :: Maybe Int } data Group = Group { _groupID :: ID Group , _groupName :: Text , _groupCreated :: Time , _groupCreator :: ID User , _groupIsArchived :: Bool , _groupMembers :: [ID User] , _groupTopic :: Maybe (SlackTracked Text) , _groupPurpose :: Maybe (SlackTracked Text) , _groupIsOpen :: Bool , _groupLastRead :: Maybe TS , _groupLatest :: Maybe Message , _groupUnreadCount :: Maybe Int } data IM = IM { _imID :: ID IM , _imUser :: ID User , _imCreated :: Time , _imIsUserDeleted :: Bool , _imIsOpen :: Bool , _imLastRead :: Maybe TS , _imLatest :: Maybe Message , _imUnreadCount :: Maybe Int } data Bot = Bot { _botID :: ID Bot , _botName :: Text , _botIcons :: HM.HashMap Text Text } data Chat data Message = Message { _messageChat :: Maybe (ID Chat) , _messageUser :: Maybe (ID User) , _messageSubtype :: Maybe MessageSubtype , _messageText :: Maybe Text , _messageTS :: TS , _messageEdited :: Maybe MessageEdited , _messageDeletedTS :: Maybe TS , _messageEventTS :: Maybe TS , _messageHidden :: Bool , _messageAttachments :: [Attachment] , _messageInviter :: Maybe (ID User) , _messageIsStarred :: Bool , _messagePinnedTo :: [ID Channel] , _messageReactions :: [MessageReaction] } testMessage :: ID Chat -> ID User -> Text -> Message testMessage chat from text = Message { _messageChat = Just chat , _messageUser = Just from , _messageSubtype = Nothing , _messageText = Just text , _messageTS = TS 0 0 , _messageEdited = Nothing , _messageDeletedTS = Nothing , _messageEventTS = Nothing , _messageHidden = False , _messageAttachments = [] , _messageInviter = Nothing , _messageIsStarred = False , _messagePinnedTo = [] , _messageReactions = [] } data MessageSubtype = BotMS | MeMS | ChangedMS | DeletedMS | ChannelJoinMS | ChannelLeaveMS | ChannelTopicMS | ChannelPurposeMS | ChannelNameMS | ChannelArchiveMS | ChannelUnarchiveMS | GroupJoinMS | GroupLeaveMS | GroupTopicMS | GroupPurposeMS | GroupNameMS | GroupArchiveMS | GroupUnarchiveMS | FileShareMS | FileCommentMS | FileMentionMS | PinnedItemMS | ReminderAddMS | ReminderDeleteMS | BotAddMS data MessageEdited = MessageEdited { _messageEditedUser :: ID User , _messageEditedTS :: TS } data MessageReaction = MessageReaction { _messageReactionName :: Text , _messageReactionCount :: Int , _messageReactionUsers :: [ID User] } data Attachment = Attachment { _attachmentFallback :: Maybe Text , _attachmentColor :: Maybe Text , _attachmentPretext :: Maybe Text , _attachmentAuthorName :: Maybe Text , _attachmentAuthorLink :: Maybe Text , _attachmentAuthorIcon :: Maybe Text , _attachmentTitle :: Maybe Text , _attachmentTitleLink :: Maybe Text , _attachmentText :: Maybe Text , _attachmentFields :: [AttachmentField] , _attachmentFromUrl :: Maybe Text , _attachmentThumbUrl :: Maybe Text , _attachmentThumbWidth :: Maybe Int , _attachmentThumbHeight :: Maybe Int , _attachmentId :: Int } data AttachmentField = AttachmentField { _fieldTitle :: Text , _fieldValue :: Text , _fieldShort :: Bool } data SlackTracked a = SlackTracked { _trackedValue :: a , _trackedCreator :: ID User , _trackedLastSet :: Time } data File = File { _fileID :: ID File , _fileCreated :: Time , _fileTimestamp :: Time , _fileName :: Text , _fileTitle :: Text , _fileMimeType :: Text , _fileFileType :: Text , _filePrettyType :: Text , _fileUser :: ID User , _fileMode :: FileMode , _fileEditable :: Bool , _fileIsExternal :: Bool , _fileExternalType :: Text , _fileSize :: Word64 , _fileURL :: Text , _fileURLDownload :: Text , _fileURLPrivate :: Text , _fileURLPrivateDownload :: Text , _fileThumb :: IntMap Text , _filePermalink :: Text , _fileEditLink :: Text , _filePreview :: Text , _filePreviewHighlight :: Text , _fileLines :: Int , _fileLinesMore :: Int , _fileIsPublic :: Bool , _filePublicURLShared :: Bool , _fileChannels :: [ID Channel] , _fileGroups :: [ID Group] , _fileIMs :: [ID IM] , _fileInitialComment :: Maybe Message , _fileNumStars :: Int , _fileIsStarred :: Bool } data FileMode = FileHosted | FileExternal | FileSnippet | FilePost data FileComment = FileComment { _fileCommentID :: ID FileComment , _fileCommentTimestamp :: Time , _fileCommentUser :: ID User , _fileCommentComment :: Text } data RtmEvent = RtmHello | RtmReplyOk Word64 (Maybe TS) (Maybe Text) | RtmReplyNotOk Word64 Int32 Text | RtmMessage Message | RtmChannelMarked (ChatMarked Channel) | RtmChannelCreated Channel | RtmChannelJoined Channel | RtmChannelLeft (ID Channel) | RtmChannelDeleted (ID Channel) | RtmChannelRenamed (ChatRenamed Channel) | RtmChannelArchive (ChatUser Channel) | RtmChannelUnarchive (ChatUser Channel) | RtmChannelHistoryChanged (ChatHistoryChanged Channel) | RtmIMCreated IMCreated | RtmIMOpen (ChatUser IM) | RtmIMClose (ChatUser IM) | RtmIMMarked (ChatMarked IM) | RtmIMHistoryChanged (ChatHistoryChanged IM) | RtmGroupJoined Group | RtmGroupLeft (ID Group) | RtmGroupOpen (ChatUser Group) | RtmGroupClose (ChatUser Group) | RtmGroupArchive (ID Group) | RtmGroupUnarchive (ID Group) | RtmGroupRename (ChatRenamed Group) | RtmGroupMarked (ChatMarked Group) | RtmGroupHistoryChanged (ChatHistoryChanged Group) | RtmFileCreated File | RtmFileShared File | RtmFileUnshared File | RtmFilePublic File | RtmFilePrivate (ID File) | RtmFileChange File | RtmFileDeleted FileDeleted | RtmFileCommentAdded FileCommentUpdated | RtmFileCommentEdited FileCommentUpdated | RtmFileCommentDeleted FileCommentDeleted | RtmPresenceChange PresenceChange | RtmManualPresenceChange Presence | RtmPrefChange PrefChange | RtmUserChange User | RtmUserTyping (ChatUser Chat) | RtmTeamJoin User | RtmStarAdded Star | RtmStarRemoved Star | RtmEmojiChanged TS | RtmCommandsChanged TS | RtmTeamPrefChange PrefChange | RtmTeamRename Text | RtmTeamDomainChange TeamDomainChange | RtmEmailDomainChanged EmailDomainChanged | RtmBotAdded Bot | RtmBotChanged Bot | RtmAccountsChanged data ChatMarked a = ChatMarked { _chatMarkedChannel :: ID a , _chatMarkedTS :: TS } data ChatUser a = ChatUser { _chatUserUser :: ID User , _chatUserChannelID :: ID a } data ChatRenamed a = ChatRenamed { _chatRenamedChannelID :: ID a , _chatRenamedName :: Text } data ChatHistoryChanged a = ChatHistoryChanged { _chatHistoryChangedLatest :: Text , _chatHistoryChangedTS :: TS , _chatHistoryChangedEventTS :: TS } data IMCreated = IMCreated { _imCreatedUser :: ID User , _imCreatedChannel :: IM } data FileDeleted = FileDeleted { _fileDeletedFileID :: ID File , _fileDeletedEventTS :: TS } data FileCommentUpdated = FileCommentUpdated { _fileCommentUpdatedFile :: File , _fileCommentUpdatedComment :: FileComment } data FileCommentDeleted = FileCommentDeleted { _fileCommentDeletedFile :: File , _fileCommentDeletedComment :: ID FileComment } data PresenceChange = PresenceChange { _presenceChangeUser :: ID User , _presenceChangePresence :: Presence } data PrefChange = PrefChange { _prefChangeName :: Text , _prefChangeValue :: Value } data Star = Star { _starUser :: Text , _starItem :: StarItem , _starEventTS :: TS } data StarItem = StarItemMessage Message | StarItemFile File | StarItemFileComment File FileComment | StarItemChannel (ID Channel) | StarItemIM (ID IM) | StarItemGroup (ID Group) data TeamDomainChange = TeamDomainChange { _teamDomainChangeUrl :: Text , _teamDomainChangeDomain :: Text } data EmailDomainChanged = EmailDomainChanged { _emailDomainChangedEmailDomain :: Text , _emailDomainChangedEventTS :: TS } data RtmSendMessage = RtmSendMessage { _sendMessageSeqnum :: Word64 , _sendMessageChat :: ID Chat , _sendMessageText :: Text } class SlackTyped a where isTypedID :: Proxy a -> ID b -> Bool instance SlackTyped Channel where isTypedID _ = isPrefixOf "C" . unID instance SlackTyped File where isTypedID _ (ID t) = "F" `isPrefixOf` t && not ("Fc" `isPrefixOf` t) instance SlackTyped FileComment where isTypedID _ (ID t) = "Fc" `isPrefixOf` t instance SlackTyped Group where isTypedID _ = isPrefixOf "G" . unID instance SlackTyped Chat where isTypedID _ i = isTypedID (Proxy :: Proxy Channel) i || isTypedID (Proxy :: Proxy IM) i || isTypedID (Proxy :: Proxy Group) i instance SlackTyped IM where isTypedID _ = isPrefixOf "D" . unID instance SlackTyped User where isTypedID _ = isPrefixOf "U" . unID typedID :: forall a. SlackTyped a => Prism' (ID Chat) (ID a) typedID = prism' (\ (ID t) -> ID t) asTypedID asTypedID :: forall a b. SlackTyped b => ID a -> Maybe (ID b) asTypedID i = if isTypedID (Proxy :: Proxy b) i then Just (ID . unID $ i) else Nothing asChannelID :: ID Chat -> Maybe (ID Channel) asChannelID = asTypedID asGroupID :: ID Chat -> Maybe (ID Group) asGroupID = asTypedID asIMID :: ID Chat -> Maybe (ID IM) asIMID = asTypedID deriving instance Eq RtmStartRequest deriving instance Eq RtmStartRp deriving instance Eq Self deriving instance Eq Team deriving instance Eq User deriving instance Eq Tz deriving instance Eq Profile deriving instance Eq Chat deriving instance Eq Channel deriving instance Eq Group deriving instance Eq IM deriving instance Eq Bot deriving instance Eq MessageSubtype deriving instance Enum MessageSubtype deriving instance Ord MessageSubtype deriving instance Bounded MessageSubtype deriving instance Eq MessageReaction deriving instance Eq Message deriving instance Eq MessageEdited deriving instance Eq Attachment deriving instance Eq AttachmentField deriving instance Eq a => Eq (SlackTracked a) deriving instance Eq FileMode deriving instance Eq File deriving instance Eq FileComment deriving instance Eq RtmEvent deriving instance Eq a => Eq (ChatMarked a) deriving instance Eq a => Eq (ChatUser a) deriving instance Eq a => Eq (ChatRenamed a) deriving instance Eq a => Eq (ChatHistoryChanged a) deriving instance Eq IMCreated deriving instance Eq FileDeleted deriving instance Eq FileCommentUpdated deriving instance Eq FileCommentDeleted deriving instance Eq Presence deriving instance Eq PresenceChange deriving instance Eq PrefChange deriving instance Eq Star deriving instance Eq StarItem deriving instance Eq TeamDomainChange deriving instance Eq EmailDomainChanged deriving instance Eq RtmSendMessage makeLenses ''RtmStartRequest makeLenses ''RtmStartRp makeLenses ''Self makeLenses ''Team makeLenses ''User makeLenses ''Tz makeLenses ''Profile makeLenses ''Channel makeLenses ''Group makeLenses ''IM makeLenses ''Bot makeLenses ''MessageReaction makeLenses ''Message makeLenses ''MessageEdited makeLenses ''Attachment makeLenses ''AttachmentField makeLenses ''SlackTracked makeLenses ''File makeLenses ''FileComment makePrisms ''RtmEvent makeLenses ''ChatMarked makeLenses ''ChatUser makeLenses ''ChatRenamed makeLenses ''ChatHistoryChanged makeLenses ''IMCreated makeLenses ''FileDeleted makeLenses ''FileCommentUpdated makeLenses ''FileCommentDeleted makeLenses ''PresenceChange makeLenses ''PrefChange makeLenses ''Star makePrisms ''StarItem makeLenses ''TeamDomainChange makeLenses ''EmailDomainChanged makeLenses ''RtmSendMessage instance TextShow Chat where showb _ = "Chat" deriveTextShow ''RtmStartRequest deriveTextShow ''RtmStartRp deriveTextShow ''Self deriveTextShow ''Presence deriveTextShow ''Team deriveTextShow ''User deriveTextShow ''Tz deriveTextShow ''Profile deriveTextShow ''Channel deriveTextShow ''Group deriveTextShow ''IM deriveTextShow ''Bot deriveTextShow ''Message deriveTextShow ''MessageSubtype deriveTextShow ''MessageEdited deriveTextShow ''MessageReaction deriveTextShow ''Attachment deriveTextShow ''AttachmentField deriveTextShow ''SlackTracked deriveTextShow ''File deriveTextShow ''FileMode deriveTextShow ''FileComment deriveTextShow ''RtmEvent deriveTextShow ''ChatMarked deriveTextShow ''ChatUser deriveTextShow ''ChatRenamed deriveTextShow ''ChatHistoryChanged deriveTextShow ''IMCreated deriveTextShow ''FileDeleted deriveTextShow ''FileCommentUpdated deriveTextShow ''FileCommentDeleted deriveTextShow ''PresenceChange deriveTextShow ''PrefChange deriveTextShow ''Star deriveTextShow ''StarItem deriveTextShow ''TeamDomainChange deriveTextShow ''EmailDomainChanged deriveTextShow ''RtmSendMessage instance ToJSON RtmStartRequest where toJSON (RtmStartRequest { .. }) = object [ ("token", toJSON rtmStartToken) ] asResponse :: ABE.Parse Text a -> ABE.Parse Text (Response a) asResponse parseInner = ABE.key "ok" ABE.asBool >>= \ case True -> ResponseOk <$> parseInner False -> ResponseNotOk <$> ABE.keyOrDefault "error" "unknown error" ABE.asText asRtmStartRp :: ABE.Parse Text RtmStartRp asRtmStartRp = RtmStartRp <$> ABE.key "url" ABE.asText <*> ABE.key "self" asSelf <*> ABE.key "team" asTeam <*> ABE.key "users" (ABE.eachInArray asUser) <*> ABE.key "channels" (ABE.eachInArray asChannel) <*> ABE.key "groups" (ABE.eachInArray asGroup) <*> ABE.key "ims" (ABE.eachInArray asIM) <*> ABE.key "bots" (ABE.eachInArray asBot) asSelf :: ABE.Parse Text Self asSelf = Self <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> ABE.key "prefs" ABE.asObject <*> ABE.key "created" asTime <*> ABE.key "manual_presence" asPresence asPresence :: ABE.Parse Text Presence asPresence = ABE.asText >>= \ case "active" -> pure PresenceActive "away" -> pure PresenceAway other -> ABE.throwCustomError $ "unknown presence value " <> other asTeam :: ABE.Parse Text Team asTeam = Team <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> (mfilter (not . null) <$> ABE.keyMay "email_domain" ABE.asText) <*> ABE.key "domain" ABE.asText <*> (mfilter (not . (==) (-1)) <$> ABE.keyMay "msg_edit_window_mins" ABE.asIntegral) <*> ABE.key "over_storage_limit" ABE.asBool <*> ABE.key "prefs" ABE.asObject asUser :: ABE.Parse Text User asUser = User <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> ABE.keyMay "real_name" ABE.asText <*> ABE.key "deleted" ABE.asBool <*> ABE.keyMay "color" ABE.asText <*> ( ( (,,) <$> (join <$> ABE.keyMay "tz" (ABE.perhaps ABE.asText)) <*> ABE.keyMay "tz_label" ABE.asText <*> ABE.keyMay "tz_offset" ABE.asIntegral ) >>= \ (tzMay, labelMay, offsMay) -> pure $ Tz <$> tzMay <*> labelMay <*> offsMay ) <*> ABE.key "profile" asProfile <*> ABE.keyOrDefault "is_admin" False ABE.asBool <*> ABE.keyOrDefault "is_owner" False ABE.asBool <*> ABE.keyOrDefault "is_primary_owner" False ABE.asBool <*> ABE.keyOrDefault "is_restricted" False ABE.asBool <*> ABE.keyOrDefault "is_ultra_restricted" False ABE.asBool <*> ABE.keyOrDefault "has_2fa" False ABE.asBool <*> ABE.keyMay "two_factor_type" ABE.asText <*> ABE.keyOrDefault "has_files" False ABE.asBool <*> ABE.keyMay "presence" asPresence asProfile :: ABE.Parse Text Profile asProfile = Profile <$> ABE.keyMay "first_name" ABE.asText <*> ABE.keyMay "last_name" ABE.asText <*> ABE.keyMay "real_name" ABE.asText <*> ABE.keyMay "real_name_normalized" ABE.asText <*> ABE.keyMay "email" ABE.asText <*> ABE.keyMay "skype" ABE.asText <*> ABE.keyMay "phone" ABE.asText <*> asThumbs asThumbs :: ABE.Parse Text (IntMap Text) asThumbs = mapFromList . catMaybes <$> mapM (\ n -> map (n, ) <$> ABE.keyMay ("image_" <> tshow n) ABE.asText) [24 :: Int, 32, 48, 72, 192, 512] asChannel :: ABE.Parse Text Channel asChannel = Channel <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> ABE.key "created" asTime <*> ABE.key "creator" asID <*> ABE.key "is_archived" ABE.asBool <*> ABE.keyOrDefault "is_general" False ABE.asBool <*> ABE.keyOrDefault "members" [] (ABE.eachInArray asID) <*> ABE.keyMay "topic" (asSlackTracked ABE.asText) <*> ABE.keyMay "purpose" (asSlackTracked ABE.asText) <*> ABE.keyOrDefault "is_member" False ABE.asBool <*> ABE.keyMay "last_read" asTS <*> ABE.keyMay "latest" asMessage <*> ABE.keyMay "unread_count" ABE.asIntegral asGroup :: ABE.Parse Text Group asGroup = Group <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> ABE.key "created" asTime <*> ABE.key "creator" asID <*> ABE.key "is_archived" ABE.asBool <*> ABE.keyOrDefault "members" [] (ABE.eachInArray asID) <*> ABE.keyMay "topic" (asSlackTracked ABE.asText) <*> ABE.keyMay "purpose" (asSlackTracked ABE.asText) <*> ABE.keyOrDefault "is_open" False ABE.asBool <*> ABE.keyMay "last_read" asTS <*> ABE.keyMay "latest" asMessage <*> ABE.keyMay "unread_count" ABE.asIntegral asIM :: ABE.Parse Text IM asIM = IM <$> ABE.key "id" asID <*> ABE.key "user" asID <*> ABE.key "created" asTime <*> ABE.keyOrDefault "is_user_deleted" False ABE.asBool <*> ABE.keyOrDefault "is_open" False ABE.asBool <*> ABE.keyMay "last_read" asTS <*> ABE.keyMay "latest" asMessage <*> ABE.keyMay "unread_count" ABE.asIntegral asBot :: ABE.Parse Text Bot asBot = Bot <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText <*> ABE.keyOrDefault "icons" mempty (mapFromList <$> ABE.eachInObject ABE.asText) asSlackTracked :: ABE.Parse Text a -> ABE.Parse Text (SlackTracked a) asSlackTracked parseValue = SlackTracked <$> ABE.key "value" parseValue <*> ABE.key "creator" asID <*> ABE.key "last_set" asTime asMessage :: ABE.Parse Text Message asMessage = Message <$> ABE.keyMay "channel" asID <*> ABE.keyMay "user" asID <*> ABE.keyMay "subtype" asMessageSubtype <*> ABE.keyMay "text" ABE.asText <*> ABE.key "ts" asTS <*> ABE.keyMay "edited" asMessageEdited <*> ABE.keyMay "deleted_ts" asTS <*> ABE.keyMay "event_is" asTS <*> ABE.keyOrDefault "hidden" False ABE.asBool <*> ABE.keyOrDefault "attachments" [] (ABE.eachInArray asAttachment) <*> ABE.keyMay "inviter" asID <*> ABE.keyOrDefault "is_starred" False ABE.asBool <*> ABE.keyOrDefault "pinned_to" [] (ABE.eachInArray asID) <*> ABE.keyOrDefault "reactions" [] (ABE.eachInArray asMessageReaction) asMessageSubtype :: ABE.Parse Text MessageSubtype asMessageSubtype = ABE.asText >>= either ABE.throwCustomError pure . messageSubtypeFromText messageSubtypeFromText :: Text -> Either Text MessageSubtype messageSubtypeFromText = \ case "bot_message" -> Right BotMS "me_message" -> Right MeMS "message_changed" -> Right ChangedMS "message_deleted" -> Right DeletedMS "channel_join" -> Right ChannelJoinMS "channel_leave" -> Right ChannelLeaveMS "channel_topic" -> Right ChannelTopicMS "channel_purpose" -> Right ChannelPurposeMS "channel_name" -> Right ChannelNameMS "channel_archive" -> Right ChannelArchiveMS "channel_unarchive" -> Right ChannelUnarchiveMS "group_join" -> Right GroupJoinMS "group_leave" -> Right GroupLeaveMS "group_topic" -> Right GroupTopicMS "group_purpose" -> Right GroupPurposeMS "group_name" -> Right GroupNameMS "group_archive" -> Right GroupArchiveMS "group_unarchive" -> Right GroupUnarchiveMS "file_share" -> Right FileShareMS "file_comment" -> Right FileCommentMS "file_mention" -> Right FileMentionMS "pinned_item" -> Right PinnedItemMS "reminder_add" -> Right ReminderAddMS "reminder_delete" -> Right ReminderDeleteMS "bot_add" -> Right BotAddMS other -> Left $ "unknown message subtype " <> other messageSubtypeToText :: MessageSubtype -> Text messageSubtypeToText = \ case BotMS -> "bot_message" MeMS -> "me_message" ChangedMS -> "message_changed" DeletedMS -> "message_deleted" ChannelJoinMS -> "channel_join" ChannelLeaveMS -> "channel_leave" ChannelTopicMS -> "channel_topic" ChannelPurposeMS -> "channel_purpose" ChannelNameMS -> "channel_name" ChannelArchiveMS -> "channel_archive" ChannelUnarchiveMS -> "channel_unarchive" GroupJoinMS -> "group_join" GroupLeaveMS -> "group_leave" GroupTopicMS -> "group_topic" GroupPurposeMS -> "group_purpose" GroupNameMS -> "group_name" GroupArchiveMS -> "group_archive" GroupUnarchiveMS -> "group_unarchive" FileShareMS -> "file_share" FileCommentMS -> "file_comment" FileMentionMS -> "file_mention" PinnedItemMS -> "pinned_item" ReminderAddMS -> "reminder_add" ReminderDeleteMS -> "reminder_delete" BotAddMS -> "bot_add" instance FromJSON MessageSubtype where parseJSON = ABE.toAesonParser id asMessageSubtype instance ToJSON MessageSubtype where toJSON = toJSON . messageSubtypeToText asMessageEdited :: ABE.Parse Text MessageEdited asMessageEdited = MessageEdited <$> ABE.key "user" asID <*> ABE.key "ts" asTS asMessageReaction :: ABE.Parse Text MessageReaction asMessageReaction = MessageReaction <$> ABE.key "name" ABE.asText <*> ABE.key "count" ABE.asIntegral <*> ABE.key "users" (ABE.eachInArray asID) asAttachment :: ABE.Parse Text Attachment asAttachment = Attachment <$> ABE.keyMay "fallback" ABE.asText <*> ABE.keyMay "color" ABE.asText <*> ABE.keyMay "pretext" ABE.asText <*> ABE.keyMay "author_name" ABE.asText <*> ABE.keyMay "author_link" ABE.asText <*> ABE.keyMay "author_icon" ABE.asText <*> ABE.keyMay "title" ABE.asText <*> ABE.keyMay "title_link" ABE.asText <*> ABE.keyMay "text" ABE.asText <*> ABE.keyOrDefault "fields" [] (ABE.eachInArray asAttachmentField) <*> ABE.keyMay "from_url" ABE.asText <*> ABE.keyMay "thumb_url" ABE.asText <*> ABE.keyMay "thumb_width" ABE.asIntegral <*> ABE.keyMay "thumb_height" ABE.asIntegral <*> ABE.keyOrDefault "id" 1 ABE.asIntegral -- FIXME? this defaulting is a lie! asAttachmentField :: ABE.Parse Text AttachmentField asAttachmentField = AttachmentField <$> ABE.key "title" ABE.asText <*> ABE.key "value" ABE.asText <*> ABE.key "short" ABE.asBool asFile :: ABE.Parse Text File asFile = File <$> ABE.key "id" asID <*> ABE.key "created" asTime <*> ABE.key "timestamp" asTime <*> ABE.key "name" ABE.asText <*> ABE.key "title" ABE.asText <*> ABE.key "mimetype" ABE.asText <*> ABE.key "filetype" ABE.asText <*> ABE.key "pretty_type" ABE.asText <*> ABE.key "user" asID <*> ABE.key "mode" asFileMode <*> ABE.key "editable" ABE.asBool <*> ABE.key "is_external" ABE.asBool <*> ABE.key "external_type" ABE.asText <*> ABE.key "size" ABE.asIntegral <*> ABE.key "url" ABE.asText <*> ABE.key "url_download" ABE.asText <*> ABE.key "url_private" ABE.asText <*> ABE.key "url_private_download" ABE.asText <*> asThumbs <*> ABE.key "permalink" ABE.asText <*> ABE.key "edit_link" ABE.asText <*> ABE.key "preview" ABE.asText <*> ABE.key "preview_highlight" ABE.asText <*> ABE.key "lines" ABE.asIntegral <*> ABE.key "lines_more" ABE.asIntegral <*> ABE.key "is_public" ABE.asBool <*> ABE.key "public_url_shared" ABE.asBool <*> ABE.keyOrDefault "channels" [] (ABE.eachInArray asID) <*> ABE.keyOrDefault "groups" [] (ABE.eachInArray asID) <*> ABE.keyOrDefault "ims" [] (ABE.eachInArray asID) <*> ABE.keyMay "initial_comment" asMessage <*> ABE.keyOrDefault "num_starts" 0 ABE.asIntegral <*> ABE.keyOrDefault "is_starred" False ABE.asBool asFileMode :: ABE.Parse Text FileMode asFileMode = ABE.asText >>= \ case "hosted" -> pure FileHosted "external" -> pure FileExternal "snippet" -> pure FileSnippet "post" -> pure FilePost other -> ABE.throwCustomError $ "unknown file mode " <> other asFileComment :: ABE.Parse Text FileComment asFileComment = FileComment <$> ABE.key "id" asID <*> ABE.key "timestamp" asTime <*> ABE.key "user" asID <*> ABE.key "comment" ABE.asText asRtmEvent :: ABE.Parse Text RtmEvent asRtmEvent = ABE.keyMay "reply_to" ABE.asIntegral >>= \ case Just seqnum -> ABE.key "ok" ABE.asBool >>= \ case True -> RtmReplyOk seqnum <$> ABE.keyMay "ts" asTS <*> ABE.keyMay "text" ABE.asText False -> ABE.key "error" ( RtmReplyNotOk seqnum <$> ABE.key "code" ABE.asIntegral <*> ABE.key "msg" ABE.asText ) Nothing -> ABE.key "type" ABE.asText >>= \ case "hello" -> pure RtmHello "message" -> RtmMessage <$> asMessage "channel_marked" -> RtmChannelMarked <$> asChatMarked "channel_created" -> RtmChannelCreated <$> ABE.key "channel" asChannel "channel_joined" -> RtmChannelJoined <$> ABE.key "channel" asChannel "channel_left" -> RtmChannelLeft <$> ABE.key "channel" asID "channel_deleted" -> RtmChannelDeleted <$> ABE.key "channel" asID "channel_rename" -> RtmChannelRenamed <$> ABE.key "channel" asChatRenamed "channel_archive" -> RtmChannelArchive <$> asChatUser "channel_unarchive" -> RtmChannelUnarchive <$> asChatUser "channel_history_changed" -> RtmChannelHistoryChanged <$> asChatHistoryChanged "im_created" -> RtmIMCreated <$> asIMCreated "im_open" -> RtmIMOpen <$> asChatUser "im_close" -> RtmIMClose <$> asChatUser "im_marked" -> RtmIMMarked <$> asChatMarked "im_history_changed" -> RtmIMHistoryChanged <$> asChatHistoryChanged "group_joined" -> RtmGroupJoined <$> ABE.key "channel" asGroup "group_left" -> RtmGroupLeft <$> ABE.key "channel" asID "group_open" -> RtmGroupOpen <$> asChatUser "group_close" -> RtmGroupClose <$> asChatUser "group_archive" -> RtmGroupArchive <$> ABE.key "channel" asID "group_unarchive" -> RtmGroupUnarchive <$> ABE.key "channel" asID "group_rename" -> RtmGroupRename <$> ABE.key "channel" asChatRenamed "group_marked" -> RtmGroupMarked <$> asChatMarked "group_history_changed" -> RtmGroupHistoryChanged <$> asChatHistoryChanged "file_created" -> RtmFileCreated <$> ABE.key "file" asFile "file_shared" -> RtmFileShared <$> ABE.key "file" asFile "file_unshared" -> RtmFileUnshared <$> ABE.key "file" asFile "file_public" -> RtmFilePublic <$> ABE.key "file" asFile "file_private" -> RtmFilePrivate <$> ABE.key "file" asID "file_change" -> RtmFileChange <$> ABE.key "file" asFile "file_deleted" -> RtmFileDeleted <$> asFileDeleted "file_comment_added" -> RtmFileCommentAdded <$> asFileCommentUpdated "file_comment_edited" -> RtmFileCommentEdited <$> asFileCommentUpdated "file_comment_deleted" -> RtmFileCommentDeleted <$> asFileCommentDeleted "presence_change" -> RtmPresenceChange <$> asPresenceChange "manual_presence_change" -> RtmManualPresenceChange <$> ABE.key "presence" asPresence "user_typing" -> RtmUserTyping <$> asChatUser "pref_change" -> RtmPrefChange <$> asPrefChange "user_change" -> RtmUserChange <$> ABE.key "user" asUser "team_join" -> RtmTeamJoin <$> ABE.key "user" asUser "star_added" -> RtmStarAdded <$> asStar "star_removed" -> RtmStarRemoved <$> asStar "emoji_changed" -> RtmEmojiChanged <$> ABE.key "event_ts" asTS "commands_changed" -> RtmCommandsChanged <$> ABE.key "event_ts" asTS "team_pref_change" -> RtmTeamPrefChange <$> asPrefChange "team_rename" -> RtmTeamRename <$> ABE.key "name" ABE.asText "team_domain_change" -> RtmTeamDomainChange <$> asTeamDomainChange "email_domain_changed" -> RtmEmailDomainChanged <$> asEmailDomainChanged "bot_added" -> RtmBotAdded <$> ABE.key "bot" asBot "bot_changed" -> RtmBotChanged <$> ABE.key "bot" asBot "accounts_changed" -> pure RtmAccountsChanged other -> ABE.throwCustomError $ "unknown RTM event type " <> other asChatMarked :: ABE.Parse Text (ChatMarked a) asChatMarked = ChatMarked <$> ABE.key "channel" asID <*> ABE.key "ts" asTS asChatUser :: ABE.Parse Text (ChatUser a) asChatUser = ChatUser <$> ABE.key "channel" asID <*> ABE.key "user" asID asChatRenamed :: ABE.Parse Text (ChatRenamed a) asChatRenamed = ChatRenamed <$> ABE.key "id" asID <*> ABE.key "name" ABE.asText asChatHistoryChanged :: ABE.Parse Text (ChatHistoryChanged a) asChatHistoryChanged = ChatHistoryChanged <$> ABE.key "latest" ABE.asText <*> ABE.key "ts" asTS <*> ABE.key "event_ts" asTS asIMCreated :: ABE.Parse Text IMCreated asIMCreated = IMCreated <$> ABE.key "user" asID <*> ABE.key "channel" asIM asFileDeleted :: ABE.Parse Text FileDeleted asFileDeleted = FileDeleted <$> ABE.key "file_id" asID <*> ABE.key "event_ts" asTS asFileCommentUpdated :: ABE.Parse Text FileCommentUpdated asFileCommentUpdated = FileCommentUpdated <$> ABE.key "file" asFile <*> ABE.key "comment" asFileComment asFileCommentDeleted :: ABE.Parse Text FileCommentDeleted asFileCommentDeleted = FileCommentDeleted <$> ABE.key "file" asFile <*> ABE.key "comment" asID asPresenceChange :: ABE.Parse Text PresenceChange asPresenceChange = PresenceChange <$> ABE.key "user" asID <*> ABE.key "presence" asPresence asPrefChange :: ABE.Parse Text PrefChange asPrefChange = PrefChange <$> ABE.key "name" ABE.asText <*> ABE.key "value" (ABEI.withValue Right) asStar :: ABE.Parse Text Star asStar = Star <$> ABE.key "user" ABE.asText <*> ABE.key "item" asStarItem <*> ABE.key "event_ts" asTS asStarItem :: ABE.Parse Text StarItem asStarItem = ABE.key "type" ABE.asText >>= \ case "message" -> StarItemMessage <$> ABE.key "message" asMessage "file" -> StarItemFile <$> ABE.key "file" asFile "file_comment" -> StarItemFileComment <$> ABE.key "file" asFile <*> ABE.key "comment" asFileComment "channel" -> StarItemChannel <$> ABE.key "channel" asID "im" -> StarItemIM <$> ABE.key "im" asID "group" -> StarItemGroup <$> ABE.key "group" asID other -> ABE.throwCustomError $ "unknown starrable item type " <> other asTeamDomainChange :: ABE.Parse Text TeamDomainChange asTeamDomainChange = TeamDomainChange <$> ABE.key "url" ABE.asText <*> ABE.key "domain" ABE.asText asEmailDomainChanged :: ABE.Parse Text EmailDomainChanged asEmailDomainChanged = EmailDomainChanged <$> ABE.key "email_domain" ABE.asText <*> ABE.key "event_ts" asTS instance ToJSON RtmSendMessage where toJSON (RtmSendMessage seqnum chat message) = object [ "type" .= ("message" :: Text) , "id" .= seqnum , "channel" .= chat , "text" .= message ]
Dridus/alexandria
server/Slack.hs
Haskell
mit
37,892
{-# LANGUAGE TypeApplications, ScopedTypeVariables, LambdaCase, ViewPatterns, RecordWildCards, AllowAmbiguousTypes, GADTs, TypeFamilies, KindSignatures, DataKinds, TemplateHaskell #-} module PrintModGuts ( -- *Pieces of 'ModGuts' for printing ModGutsInfo(..), fullInfo, summaryInfo, -- *Printing 'ModGuts' formatModGuts, printModGuts, -- *Type hackery ModGutsInfoType, KnownInfo(..), SomeKnownModGutsInfo(..), known, unknown ) where import qualified Language.Haskell.TH as TH import Data.Generics (Proxy(..)) import Data.Bifunctor import Data.Foldable import Data.Traversable import Data.Function import Data.Maybe import Data.List (sortBy) import GHC import Avail (AvailInfo()) import PatSyn (PatSyn()) import InstEnv (InstEnv(), instEnvElts, is_orphan) import FamInstEnv (FamInstEnv(), famInstEnvElts) import GhcPlugins import PprCore import Text.PrettyPrint.Util -- These aren't in the order they show up in 'ModGuts', necessarily; they're in -- the most useful order for printing. data ModGutsInfo = MGI_Module | MGI_Location | MGI_Exports | MGI_InPackageImports | MGI_PackageDeps | MGI_OrphanInstanceDeps | MGI_TypeFamilyInstanceDeps | MGI_UsedFiles | MGI_UsedTemplateHaskell | MGI_Environment | MGI_Fixities | MGI_TypeConstructors | MGI_TypeClassInstances | MGI_TypeFamilyInstances | MGI_PatternSynonyms | MGI_Rules | MGI_ForeignStubs | MGI_Warnings | MGI_Annotations | MGI_HpcInfo | MGI_Breakpoints | MGI_VectorizationPragmas | MGI_VectorizedDeclarations | MGI_TypeClassInstanceEnvironment | MGI_TypeFamilyInstanceEnvironment | MGI_SafeHaskell | MGI_NeedToTrustSelfPkg | MGI_Contents deriving (Eq, Ord, Enum, Bounded, Show, Read) type family ModGutsInfoType (info :: ModGutsInfo) :: * where ModGutsInfoType 'MGI_Module = (Module, HscSource) ModGutsInfoType 'MGI_Location = SrcSpan ModGutsInfoType 'MGI_Exports = [AvailInfo] ModGutsInfoType 'MGI_InPackageImports = [(ModuleName, IsBootInterface)] ModGutsInfoType 'MGI_PackageDeps = [(UnitId, Bool)] ModGutsInfoType 'MGI_OrphanInstanceDeps = [Module] ModGutsInfoType 'MGI_TypeFamilyInstanceDeps = [Module] ModGutsInfoType 'MGI_UsedFiles = [Usage] ModGutsInfoType 'MGI_UsedTemplateHaskell = Bool ModGutsInfoType 'MGI_Environment = GlobalRdrEnv ModGutsInfoType 'MGI_Fixities = FixityEnv ModGutsInfoType 'MGI_TypeConstructors = [TyCon] ModGutsInfoType 'MGI_TypeClassInstances = [ClsInst] ModGutsInfoType 'MGI_TypeFamilyInstances = [FamInst] ModGutsInfoType 'MGI_PatternSynonyms = [PatSyn] ModGutsInfoType 'MGI_Rules = [CoreRule] ModGutsInfoType 'MGI_ForeignStubs = ForeignStubs ModGutsInfoType 'MGI_Warnings = Warnings ModGutsInfoType 'MGI_Annotations = [Annotation] ModGutsInfoType 'MGI_HpcInfo = HpcInfo ModGutsInfoType 'MGI_Breakpoints = Maybe ModBreaks ModGutsInfoType 'MGI_VectorizationPragmas = [CoreVect] ModGutsInfoType 'MGI_VectorizedDeclarations = VectInfo ModGutsInfoType 'MGI_TypeClassInstanceEnvironment = InstEnv ModGutsInfoType 'MGI_TypeFamilyInstanceEnvironment = FamInstEnv ModGutsInfoType 'MGI_SafeHaskell = SafeHaskellMode ModGutsInfoType 'MGI_NeedToTrustSelfPkg = Bool ModGutsInfoType 'MGI_Contents = CoreProgram class KnownInfo (info :: ModGutsInfo) where knownInfo :: ModGutsInfo infoDescription :: String infoData :: ModGuts -> ModGutsInfoType info infoFormat :: ModGutsInfoType info -> SDoc data SomeKnownModGutsInfo where Known :: KnownInfo info => Proxy info -> SomeKnownModGutsInfo unknown :: SomeKnownModGutsInfo -> ModGutsInfo unknown (Known (Proxy :: Proxy info)) = knownInfo @info instance Eq SomeKnownModGutsInfo where (==) = (==) `on` unknown instance Ord SomeKnownModGutsInfo where compare = compare `on` unknown instance Show SomeKnownModGutsInfo where showsPrec p (unknown -> info) = showParen (p >= 11) $ showString "Known @'" . shows info . showString " Proxy" do sig <- TH.sigD (TH.mkName "known") [t|ModGutsInfo -> SomeKnownModGutsInfo|] TH.TyConI (TH.DataD _ _ _ _ cons _) <- TH.reify ''ModGutsInfo clauses <- fmap (TH.FunD $ TH.mkName "known") . for cons $ \case TH.NormalC info _ -> TH.clause [TH.conP info []] (TH.normalB [e|Known (Proxy :: Proxy $(TH.promotedT info))|]) [] _ -> fail "internal error: could not define `known'" pure [sig, clauses] instance KnownInfo 'MGI_Module where knownInfo = MGI_Module infoDescription = "Module" infoData = (,) <$> mg_module <*> mg_hsc_src infoFormat = \(mod, hscSrc) -> ppr mod <> case hscSrc of HsSrcFile -> empty HsBootFile -> space <> text "[boot interface]" HsigFile -> space <> text "[signature]" instance KnownInfo 'MGI_Location where knownInfo = MGI_Location infoDescription = "Source locations" infoData = mg_loc infoFormat = ppr instance KnownInfo 'MGI_Exports where knownInfo = MGI_Exports infoDescription = "Exports" infoData = mg_exports infoFormat = ppr instance KnownInfo 'MGI_InPackageImports where knownInfo = MGI_InPackageImports infoDescription = "In-package imports" infoData = dep_mods . mg_deps infoFormat = pprListWith . pprAnnotated $ text "[boot]" instance KnownInfo 'MGI_PackageDeps where knownInfo = MGI_PackageDeps infoDescription = "Required packages" infoData = dep_pkgs . mg_deps infoFormat = pprListWith . pprAnnotated $ text "[must be trusted]" instance KnownInfo 'MGI_OrphanInstanceDeps where knownInfo = MGI_OrphanInstanceDeps infoDescription = "Orphan instances in" infoData = dep_orphs . mg_deps infoFormat = ppr instance KnownInfo 'MGI_TypeFamilyInstanceDeps where knownInfo = MGI_TypeFamilyInstanceDeps infoDescription = "Type family instantiations in" infoData = dep_finsts . mg_deps infoFormat = ppr instance KnownInfo 'MGI_UsedFiles where knownInfo = MGI_UsedFiles infoDescription = "Used files" infoData = mg_usages infoFormat = pprListWith (text . getUsageName) . sortBy compareUsage . map withUsageName where (UsageFile{}, name1) `compareUsage` (UsageFile{}, name2) = name1 `compare` name2 (UsageHomeModule{}, name1) `compareUsage` (UsageHomeModule{}, name2) = name1 `compare` name2 (UsagePackageModule{}, name1) `compareUsage` (UsagePackageModule{}, name2) = name1 `compare` name2 (UsageFile{}, _) `compareUsage` _ = LT _ `compareUsage` (UsageFile{}, _) = GT (UsagePackageModule{}, _) `compareUsage` _ = GT _ `compareUsage` (UsagePackageModule{}, _) = LT usageName UsagePackageModule{..} = moduleNameString $ moduleName usg_mod -- TODO: include package? usageName UsageHomeModule{..} = moduleNameString usg_mod_name usageName UsageFile{..} = usg_file_path withUsageName u = (u, usageName u) getUsageName = snd instance KnownInfo 'MGI_UsedTemplateHaskell where knownInfo = MGI_UsedTemplateHaskell infoDescription = "Template Haskell" infoData = mg_used_th infoFormat = yesNo instance KnownInfo 'MGI_Environment where knownInfo = MGI_Environment infoDescription = "Environment" infoData = mg_rdr_env infoFormat = pprListWith element . sortBy (stableNameCmp `on` gre_name) . concat . occEnvElts where element GRE{..} = ppr gre_name <> label [ parent gre_par , nonlocal gre_lcl , imports gre_imp ] label mlabels = case catMaybes mlabels of [] -> empty labels -> space <> pprListWith id labels parent NoParent = Nothing parent (ParentIs parent) = Just $ text "parent:" <+> ppr parent parent (FldParent parent mlabel) = Just $ text "parent:" <+> ppr parent <> case mlabel of Just label -> text "." <> text (unpackFS label) Nothing -> empty parent PatternSynonym = Just $ text "pattern synonym" nonlocal True = Nothing nonlocal False = Just $ text "nonlocal" imports _ = Nothing -- TODO instance KnownInfo 'MGI_Fixities where knownInfo = MGI_Fixities infoDescription = "Fixities" infoData = mg_fix_env infoFormat = ppr . nameEnvElts instance KnownInfo 'MGI_TypeConstructors where knownInfo = MGI_TypeConstructors infoDescription = "Type constructors" infoData = mg_tcs infoFormat = ppr instance KnownInfo 'MGI_TypeClassInstances where knownInfo = MGI_TypeClassInstances infoDescription = "Instances" infoData = mg_insts infoFormat = pprListWith $ \inst -> pprInstanceHdr inst <> case is_orphan inst of IsOrphan -> space <> text "[orphan]" NotOrphan _ -> empty instance KnownInfo 'MGI_TypeFamilyInstances where knownInfo = MGI_TypeFamilyInstances infoDescription = "Open type family instantiations" infoData = mg_fam_insts infoFormat = ppr instance KnownInfo 'MGI_PatternSynonyms where knownInfo = MGI_PatternSynonyms infoDescription = "Pattern synonyms" infoData = mg_patsyns infoFormat = ppr instance KnownInfo 'MGI_Rules where knownInfo = MGI_Rules infoDescription = "Rewrite rules" infoData = mg_rules infoFormat = pprListWith $ \case Rule{..} -> doubleQuotes (ftext ru_name) <+> ppr ru_act BuiltinRule{..} -> doubleQuotes (ftext ru_name) <+> text "[builtin for" <+> ppr ru_fn <> text "]" instance KnownInfo 'MGI_ForeignStubs where knownInfo = MGI_ForeignStubs infoDescription = "Foreign stubs" infoData = mg_foreign infoFormat = \case NoStubs -> none ForeignStubs prototypes cStubs -> maybeEmpty none id $ labeled "Prototypes" prototypes $+$ labeled "C stubs" cStubs where none = text "None" labeled label = maybeEmpty empty $ hang (text label <> colon) (length label + 1) instance KnownInfo 'MGI_Warnings where knownInfo = MGI_Warnings infoDescription = "Warning annotations" infoData = mg_warns infoFormat = pprListWith warning . \case NoWarnings -> [] WarnAll txt -> [(text "Whole module", txt)] WarnSome txts -> map (first ppr) txts where warning (what,txt) = warningFor what txt <+> warningText txt warningFor what (WarningTxt _ _) = what <> colon warningFor what (DeprecatedTxt _ _) = what <> text ": [DEPRECATED]" warningText = fsep . map (ftext . sl_fs . unLoc) . \case WarningTxt _ lits -> lits DeprecatedTxt _ lits -> lits instance KnownInfo 'MGI_Annotations where knownInfo = MGI_Annotations infoDescription = "Annotations" infoData = mg_anns infoFormat = pprListWith $ \Annotation{..} -> let target = case ann_target of NamedTarget name -> ppr name ModuleTarget mod -> text "module" <+> ppr mod payload = case ann_value of Serialized ty _bytes -> text (show ty) in parens $ target <> comma <+> payload instance KnownInfo 'MGI_HpcInfo where knownInfo = MGI_HpcInfo infoDescription = "HPC" infoData = mg_hpc_info infoFormat = \case HpcInfo ticks _ -> text "Used;" <+> ppr ticks <+> text "tick" <> if ticks == 1 then empty else char 's' NoHpcInfo True -> text "Unused, but depended on" NoHpcInfo False -> text "Unused" instance KnownInfo 'MGI_Breakpoints where knownInfo = MGI_Breakpoints infoDescription = "Breakpoints" infoData = mg_modBreaks infoFormat = ppr . maybe [] (toList . modBreaks_locs) -- TODO: We could inclode the other information, but the location is by far -- the simplest and is probably one of the most useful things instance KnownInfo 'MGI_VectorizationPragmas where knownInfo = MGI_VectorizationPragmas infoDescription = "Vectorization pragmas" infoData = mg_vect_decls infoFormat = ppr instance KnownInfo 'MGI_VectorizedDeclarations where knownInfo = MGI_VectorizedDeclarations infoDescription = "Vectorized declarations" infoData = mg_vect_info infoFormat = ppr instance KnownInfo 'MGI_TypeClassInstanceEnvironment where knownInfo = MGI_TypeClassInstanceEnvironment infoDescription = "Type class instance environment" infoData = mg_inst_env infoFormat = ppr . instEnvElts instance KnownInfo 'MGI_TypeFamilyInstanceEnvironment where knownInfo = MGI_TypeFamilyInstanceEnvironment infoDescription = "Type family instance environment" infoData = mg_fam_inst_env infoFormat = ppr . famInstEnvElts instance KnownInfo 'MGI_SafeHaskell where knownInfo = MGI_SafeHaskell infoDescription = "Safe Haskell" infoData = mg_safe_haskell infoFormat = ppr instance KnownInfo 'MGI_NeedToTrustSelfPkg where knownInfo = MGI_NeedToTrustSelfPkg infoDescription = "Needs to trust its own package" infoData = mg_trust_pkg infoFormat = yesNo instance KnownInfo 'MGI_Contents where knownInfo = MGI_Contents infoDescription = "Contents" infoData = mg_binds infoFormat = pprCoreBindings -- TODO: Newline first? formatModGuts :: [ModGutsInfo] -> ModGuts -> SDoc formatModGuts infos guts = let format (known -> Known (Proxy :: Proxy info)) = text (infoDescription @info) <> colon <+> infoFormat @info (infoData @info guts) in foldr ($+$) empty $ map format infos printModGuts :: [ModGutsInfo] -> ModGuts -> CoreM () printModGuts = (putMsg .) . formatModGuts fullInfo :: [ModGutsInfo] fullInfo = [minBound..maxBound] summaryInfo :: [ModGutsInfo] summaryInfo = [MGI_Module, MGI_Exports, MGI_Contents]
antalsz/hs-to-coq
structural-isomorphism-plugin/src/PrintModGuts.hs
Haskell
mit
15,062
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} module Batch.Parser ( parse , command , Script , Command (..) , Expression (..) ) where import Batch.Definitions import Batch.Lexer import Control.Applicative import Control.Exception (assert) import Text.Parsec (ParseError, Parsec, ParsecT, Stream, (<?>)) import qualified Text.Parsec as Parsec import Text.Parsec.Pos (SourcePos) type Script = [Command] parse :: String -> Either ParseError Command parse source = lexx source >>= parseTokens parseTokens :: Tokens -> Either ParseError Command parseTokens = Parsec.parse script "(tokens)" script :: Parsec Tokens st Command script = Program <$> Parsec.manyTill command Parsec.eof block :: Parsec Tokens st [Command] block = parenthesizedCommands <|> fmap (:[]) command where parenthesizedCommands = tok OpenParen *> Parsec.manyTill command (tok CloseParen) command :: Parsec Tokens st Command command = (actual <|> comment) <?> "command" where actual = do c <- nextCommand pipe c <|> redirect c <|> return c nextCommand = Parsec.choice [ at , echo , label , goto , ifCommand , ver , find , typeCommand ] comment :: Parsec Tokens st Command comment = Comment <$> (tok DoubleColon *> stringTok) at :: Parsec Tokens st Command at = Quieted <$> (tok At *> command) label :: Parsec Tokens st Command label = Label <$> (tok Colon *> stringTok) goto :: Parsec Tokens st Command goto = Goto <$> (tok KeywordGoto *> stringTok) echo :: Parsec Tokens st Command echo = tok KeywordEcho *> (dot <|> msg <|> on <|> off) where msg = EchoMessage <$> stringTok dot = tok Dot *> return (EchoMessage "") on = tok KeywordOn *> return (EchoEnabled True) off = tok KeywordOff *> return (EchoEnabled False) ver :: Parsec Tokens st Command ver = tok KeywordVer *> return Ver find :: Parsec Tokens st Command find = (\f -> Find f []) <$> (tok KeywordFind *> stringTok) typeCommand :: Parsec Tokens st Command typeCommand = (\p -> Type [p]) <$> (tok KeywordType *> stringTok) ifCommand :: Parsec Tokens st Command ifCommand = tok KeywordIf *> (caseInsensitive <|> nots <|> normal) where caseInsensitive = notted string <|> string string = do item1 <- stringTok cmp <- stringComparison item2 <- stringTok consequent <- block alternative <- (tok KeywordElse *> block) <|> return [Noop] return $ If (parseComparison item1 cmp item2) consequent alternative stringComparison = Parsec.choice $ map tok [ DoubleEqual , CompareOpEqu , CompareOpNeq , CompareOpLss , CompareOpLeq , CompareOpGtr , CompareOpGeq ] parseComparison l DoubleEqual r = EqualsExpr (StringExpr l) (StringExpr r) nots = tok KeywordNot *> return Noop -- TODO normal = fileExist <|> string <|> defined <|> errorLevel <|> cmdExtVersion notted p = tok KeywordNot *> p -- TODO fileExist = do tok KeywordExist filepath <- stringTok consequent <- block alternative <- (tok KeywordElse *> block) <|> return [Noop] return $ If (Exist filepath) consequent alternative errorLevel = do tok KeywordErrorLevel n <- integerTok c <- command return (If (ErrorLevelExpr n) [c] [Noop]) defined = do tok KeywordDefined var <- stringTok c <- command return (If (DefinedExpr var) [c] [Noop]) cmdExtVersion = do tok KeywordCmdExtVersion n <- integerTok c <- command return (If (CmdExtVersionExpr n) [c] [Noop]) pipe :: Command -> Parsec Tokens st Command pipe c = PipeCommand c <$> (tok Pipe *> command) redirect :: Command -> Parsec Tokens st Command redirect c = Redirection c <$> (tok GreaterThan *> filepathTok) filepathTok :: (Stream s m Token) => ParsecT s u m String filepathTok = stringTok <|> (tok KeywordNul *> return "Nul") stringTok :: (Stream s m Token) => ParsecT s u m String stringTok = (extract <$> satisfy f) <?> "string" where f (StringTok _) = True f _ = False extract (StringTok s) = s extract _ = assert False undefined integerTok :: (Stream s m Token) => ParsecT s u m Integer integerTok = (extract <$> satisfy f) <?> "integer" where f (IntegerTok _) = True f _ = False extract (IntegerTok i) = i extract _ = assert False undefined tok :: (Stream s m Token) => Token -> ParsecT s u m Token tok t = satisfy (==t) <?> show t satisfy :: (Stream s m Token) => (Token -> Bool) -> ParsecT s u m Token satisfy f = Parsec.tokenPrim show (\pos c _cs -> updatePosToken pos c) (\c -> if f c then Just c else Nothing) updatePosToken :: SourcePos -> Token -> SourcePos updatePosToken pos _ = Parsec.incSourceColumn pos 1
danstiner/transpiler
src/Batch/Parser.hs
Haskell
mit
4,947
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html module Stratosphere.ResourceProperties.AppStreamImageBuilderDomainJoinInfo where import Stratosphere.ResourceImports -- | Full data type definition for AppStreamImageBuilderDomainJoinInfo. See -- 'appStreamImageBuilderDomainJoinInfo' for a more convenient constructor. data AppStreamImageBuilderDomainJoinInfo = AppStreamImageBuilderDomainJoinInfo { _appStreamImageBuilderDomainJoinInfoDirectoryName :: Maybe (Val Text) , _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName :: Maybe (Val Text) } deriving (Show, Eq) instance ToJSON AppStreamImageBuilderDomainJoinInfo where toJSON AppStreamImageBuilderDomainJoinInfo{..} = object $ catMaybes [ fmap (("DirectoryName",) . toJSON) _appStreamImageBuilderDomainJoinInfoDirectoryName , fmap (("OrganizationalUnitDistinguishedName",) . toJSON) _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName ] -- | Constructor for 'AppStreamImageBuilderDomainJoinInfo' containing required -- fields as arguments. appStreamImageBuilderDomainJoinInfo :: AppStreamImageBuilderDomainJoinInfo appStreamImageBuilderDomainJoinInfo = AppStreamImageBuilderDomainJoinInfo { _appStreamImageBuilderDomainJoinInfoDirectoryName = Nothing , _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName = Nothing } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html#cfn-appstream-imagebuilder-domainjoininfo-directoryname asibdjiDirectoryName :: Lens' AppStreamImageBuilderDomainJoinInfo (Maybe (Val Text)) asibdjiDirectoryName = lens _appStreamImageBuilderDomainJoinInfoDirectoryName (\s a -> s { _appStreamImageBuilderDomainJoinInfoDirectoryName = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appstream-imagebuilder-domainjoininfo.html#cfn-appstream-imagebuilder-domainjoininfo-organizationalunitdistinguishedname asibdjiOrganizationalUnitDistinguishedName :: Lens' AppStreamImageBuilderDomainJoinInfo (Maybe (Val Text)) asibdjiOrganizationalUnitDistinguishedName = lens _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName (\s a -> s { _appStreamImageBuilderDomainJoinInfoOrganizationalUnitDistinguishedName = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/AppStreamImageBuilderDomainJoinInfo.hs
Haskell
mit
2,522
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters.html module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters where import Stratosphere.ResourceImports -- | Full data type definition for -- KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters. -- See -- 'kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters' -- for a more convenient constructor. data KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters = KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters { _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath :: Val Text } deriving (Show, Eq) instance ToJSON KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters where toJSON KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters{..} = object $ catMaybes [ (Just . ("RecordRowPath",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath ] -- | Constructor for -- 'KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters' -- containing required fields as arguments. kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters :: Val Text -- ^ 'kavardsjsonmpRecordRowPath' -> KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters recordRowPatharg = KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters { _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath = recordRowPatharg } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-jsonmappingparameters-recordrowpath kavardsjsonmpRecordRowPath :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters (Val Text) kavardsjsonmpRecordRowPath = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParametersRecordRowPath = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationReferenceDataSourceJSONMappingParameters.hs
Haskell
mit
2,477
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Custom.Codegen where import Data.Word import Data.String import Data.List import Data.Function import qualified Data.Map as Map import Control.Monad.State import Control.Applicative import LLVM.General.AST import LLVM.General.AST.Global import qualified LLVM.General.AST as AST import qualified LLVM.General.AST.Constant as C import qualified LLVM.General.AST.Attribute as A import qualified LLVM.General.AST.CallingConvention as CC import qualified LLVM.General.AST.IntegerPredicate as IP ------------------------------------------------------------------------------- -- Module Level ------------------------------------------------------------------------------- newtype LLVM a = LLVM { unLLVM :: State AST.Module a } deriving (Functor, Applicative, Monad, MonadState AST.Module ) runLLVM :: AST.Module -> LLVM a -> AST.Module runLLVM = flip (execState . unLLVM) emptyModule :: String -> AST.Module emptyModule label = defaultModule { moduleName = label } addDefn :: Definition -> LLVM () addDefn d = do defs <- gets moduleDefinitions modify $ \s -> s { moduleDefinitions = defs ++ [d] } define :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM () define retty label argtys body = addDefn $ GlobalDefinition $ functionDefaults { name = Name label , parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False) , returnType = retty , basicBlocks = body } external :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM () external retty label argtys body = addDefn $ GlobalDefinition $ functionDefaults { name = Name label , parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False) , returnType = retty , basicBlocks = body } --------------------------------------------------------------------------------- -- Types ------------------------------------------------------------------------------- int64 :: Type int64 = IntegerType 64 ------------------------------------------------------------------------------- -- Names ------------------------------------------------------------------------------- type Names = Map.Map String Int uniqueName :: String -> Names -> (String, Names) uniqueName nm ns = case Map.lookup nm ns of Nothing -> (nm, Map.insert nm 1 ns) Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns) instance IsString Name where fromString = Name . fromString ------------------------------------------------------------------------------- -- Codegen State ------------------------------------------------------------------------------- type SymbolTable = [(String, Operand)] data CodegenState = CodegenState { currentBlock :: Name -- Name of the active block to append to , blocks :: Map.Map Name BlockState -- Blocks for function , symtab :: SymbolTable -- Function scope symbol table , blockCount :: Int -- Count of basic blocks , count :: Word -- Count of unnamed instructions , names :: Names -- Name Supply } deriving Show data BlockState = BlockState { idx :: Int -- Block index , stack :: [Named Instruction] -- Stack of instructions , term :: Maybe (Named Terminator) -- Block terminator } deriving Show ------------------------------------------------------------------------------- -- Codegen Operations ------------------------------------------------------------------------------- newtype Codegen a = Codegen { runCodegen :: State CodegenState a } deriving (Functor, Applicative, Monad, MonadState CodegenState ) sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)] sortBlocks = sortBy (compare `on` (idx . snd)) createBlocks :: CodegenState -> [BasicBlock] createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m) makeBlock :: (Name, BlockState) -> BasicBlock makeBlock (l, (BlockState _ s t)) = BasicBlock l s (maketerm t) where maketerm (Just x) = x maketerm Nothing = error $ "Block has no terminator: " ++ (show l) entryBlockName :: String entryBlockName = "entry" emptyBlock :: Int -> BlockState emptyBlock i = BlockState i [] Nothing emptyCodegen :: CodegenState emptyCodegen = CodegenState (Name entryBlockName) Map.empty [] 1 0 Map.empty execCodegen :: Codegen a -> CodegenState execCodegen m = execState (runCodegen m) emptyCodegen fresh :: Codegen Word fresh = do i <- gets count modify $ \s -> s { count = 1 + i } return $ i + 1 instr :: Instruction -> Codegen (Operand) instr ins = do n <- fresh let ref = (UnName n) blk <- current let i = stack blk modifyBlock (blk { stack = i ++ [ref := ins] } ) return $ local ref terminator :: Named Terminator -> Codegen (Named Terminator) terminator trm = do blk <- current modifyBlock (blk { term = Just trm }) return trm ------------------------------------------------------------------------------- -- Block Stack ------------------------------------------------------------------------------- entry :: Codegen Name entry = gets currentBlock addBlock :: String -> Codegen Name addBlock bname = do bls <- gets blocks ix <- gets blockCount nms <- gets names let new = emptyBlock ix (qname, supply) = uniqueName bname nms modify $ \s -> s { blocks = Map.insert (Name qname) new bls , blockCount = ix + 1 , names = supply } return (Name qname) setBlock :: Name -> Codegen Name setBlock bname = do modify $ \s -> s { currentBlock = bname } return bname getBlock :: Codegen Name getBlock = gets currentBlock modifyBlock :: BlockState -> Codegen () modifyBlock new = do active <- gets currentBlock modify $ \s -> s { blocks = Map.insert active new (blocks s) } current :: Codegen BlockState current = do c <- gets currentBlock blks <- gets blocks case Map.lookup c blks of Just x -> return x Nothing -> error $ "No such block: " ++ show c ------------------------------------------------------------------------------- -- Symbol Table ------------------------------------------------------------------------------- assign :: String -> Operand -> Codegen () assign var x = do lcls <- gets symtab modify $ \s -> s { symtab = [(var, x)] ++ lcls } getvar :: String -> Codegen Operand getvar var = do syms <- gets symtab case lookup var syms of Just x -> return x Nothing -> error $ "Local variable not in scope: " ++ show var ------------------------------------------------------------------------------- -- References local :: Name -> Operand local = LocalReference global :: Name -> C.Constant global = C.GlobalReference externf :: Name -> Operand externf = ConstantOperand . C.GlobalReference -- Arithmetic and Constants iadd :: Operand -> Operand -> Codegen Operand iadd a b = instr $ Add False False a b [] isub :: Operand -> Operand -> Codegen Operand isub a b = instr $ Sub False False a b [] imul :: Operand -> Operand -> Codegen Operand imul a b = instr $ Mul True True a b [] idiv :: Operand -> Operand -> Codegen Operand idiv a b = instr $ SDiv False a b [] icmp :: IP.IntegerPredicate -> Operand -> Operand -> Codegen Operand icmp cond a b = instr $ ICmp cond a b [] {-- fadd :: Operand -> Operand -> Codegen Operand fadd a b = instr $ FAdd a b [] fsub :: Operand -> Operand -> Codegen Operand fsub a b = instr $ FSub a b [] fmul :: Operand -> Operand -> Codegen Operand fmul a b = instr $ FMul a b [] fdiv :: Operand -> Operand -> Codegen Operand fdiv a b = instr $ FDiv a b [] fcmp :: FP.FloatingPointPredicate -> Operand -> Operand -> Codegen Operand fcmp cond a b = instr $ FCmp cond a b [] --} cons :: C.Constant -> Operand cons = ConstantOperand {-- uitofp :: Type -> Operand -> Codegen Operand uitofp ty a = instr $ UIToFP a ty [] --} toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])] toArgs = map (\x -> (x, [])) -- Effects call :: Operand -> [Operand] -> Codegen Operand call fn args = instr $ Call False CC.C [] (Right fn) (toArgs args) [] [] alloca :: Type -> Codegen Operand alloca ty = instr $ Alloca ty Nothing 0 [] store :: Operand -> Operand -> Codegen Operand store ptr val = instr $ Store False ptr val Nothing 0 [] load :: Operand -> Codegen Operand load ptr = instr $ Load False ptr Nothing 0 [] -- Control Flow br :: Name -> Codegen (Named Terminator) br val = terminator $ Do $ Br val [] cbr :: Operand -> Name -> Name -> Codegen (Named Terminator) cbr cond tr fl = terminator $ Do $ CondBr cond tr fl [] ret :: Operand -> Codegen (Named Terminator) ret val = terminator $ Do $ Ret (Just val) []
eigengo/hwsexp
core/main/Custom/Codegen.hs
Haskell
apache-2.0
8,731
module Synthax.Lexer ( names , opNames , symbol , identifier , reserved , reservedOp , parens , integer , float , semiSep1 ) where import Prelude import Text.Parsec import qualified Text.Parsec.Token as Token import Text.Parsec.Language import Text.Parsec.String names :: [String] names = words "Source Code Module Gain Crossfade Filter Let" opNames :: [String] opNames = words "<<< >>>" lexer :: Token.TokenParser a lexer = Token.makeTokenParser emptyDef { Token.commentStart = "/*" , Token.commentEnd = "*/" , Token.commentLine = "#" , Token.identStart = letter , Token.identLetter = letter <|> char '_' , Token.reservedNames = names , Token.reservedOpNames = opNames } identifier :: Parser String identifier = Token.identifier lexer symbol :: String -> Parser String symbol = Token.symbol lexer reserved :: String -> Parser () reserved = Token.reserved lexer reservedOp :: String -> Parser () reservedOp = Token.reservedOp lexer parens :: Parser a -> Parser a parens = Token.parens lexer integer :: Parser Integer integer = Token.integer lexer float :: Parser Double float = Token.float lexer semiSep1 :: Parser a -> Parser [a] semiSep1 = Token.semiSep1 lexer
burz/sonada
Synthax/Lexer.hs
Haskell
apache-2.0
1,211
v = [1, 5, 10, 50, 100, 500 ] acm [] [] = 0 acm (a:as) (b:bs) = (a*b) + acm as bs ans i v = let a = acm i v in if a >= 1000 then 1 else 0 main = do l <- getLine let i = map read $ words l :: [Int] o = ans i v print o
a143753/AOJ
0296.hs
Haskell
apache-2.0
255
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables, CPP #-} {-| Utility functions. -} {- Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Ganeti.Utils ( debug , debugFn , debugXy , sepSplit , findFirst , stdDev , if' , select , applyIf , commaJoin , ensureQuoted , tryRead , readMaybe , formatTable , printTable , parseUnit , parseUnitAssumeBinary , plural , niceSort , niceSortKey , exitIfBad , exitErr , exitWhen , exitUnless , logWarningIfBad , rStripSpace , newUUID , isUUID , getCurrentTime , getCurrentTimeUSec , clockTimeToString , clockTimeToCTime , clockTimeToUSec , cTimeToClockTime , diffClockTimes , chompPrefix , warn , wrap , trim , defaultHead , exitIfEmpty , splitEithers , recombineEithers , resolveAddr , monadicThe , setOwnerAndGroupFromNames , setOwnerWGroupR , formatOrdinal , tryAndLogIOError , withDefaultOnIOError , lockFile , FStat , nullFStat , getFStat , getFStatSafe , needsReload , watchFile , watchFileBy , safeRenameFile , FilePermissions(..) , ensurePermissions , ordNub , isSubsequenceOf , frequency ) where import Control.Concurrent import Control.Exception (try, bracket) import Control.Monad import Control.Monad.Error import Control.Monad.Fail (MonadFail) import qualified Data.Attoparsec.ByteString as A import qualified Data.ByteString.UTF8 as UTF8 import Data.Char (toUpper, isAlphaNum, isDigit, isSpace) import qualified Data.Either as E import Data.Function (on) import Data.IORef import Data.List import qualified Data.Map as M import Data.Maybe (fromMaybe) import qualified Data.Set as S import Foreign.C.Types (CTime(..)) import Numeric (showOct) import System.Directory (renameFile, createDirectoryIfMissing) import System.FilePath.Posix (takeDirectory) import System.INotify import System.Posix.Types import Debug.Trace import Network.Socket import Ganeti.BasicTypes import Ganeti.Compat import qualified Ganeti.ConstantUtils as ConstantUtils import Ganeti.Logging import Ganeti.Runtime import System.IO import System.Exit import System.Posix.Files import System.Posix.IO import System.Time (ClockTime(..), getClockTime, TimeDiff(..)) import qualified System.Time as STime -- * Debug functions -- | To be used only for debugging, breaks referential integrity. debug :: Show a => a -> a debug x = trace (show x) x -- | Displays a modified form of the second parameter before returning -- it. debugFn :: Show b => (a -> b) -> a -> a debugFn fn x = debug (fn x) `seq` x -- | Show the first parameter before returning the second one. debugXy :: Show a => a -> b -> b debugXy = seq . debug -- * Miscellaneous -- | Apply the function if condition holds, otherwise use default value. applyIf :: Bool -> (a -> a) -> a -> a applyIf b f x = if b then f x else x -- | Comma-join a string list. commaJoin :: [String] -> String commaJoin = intercalate "," -- | Split a list on a separator and return a list of lists. sepSplit :: Eq a => a -> [a] -> [[a]] sepSplit sep s | null s = [] | null xs = [x] | null ys = [x,[]] | otherwise = x:sepSplit sep ys where (x, xs) = break (== sep) s ys = drop 1 xs -- | Finds the first unused element in a set starting from a given base. findFirst :: (Ord a, Enum a) => a -> S.Set a -> a findFirst base xs = case S.splitMember base xs of (_, False, _) -> base (_, True, ys) -> fromMaybe (succ base) $ (fmap fst . find (uncurry (<)) . zip [succ base..] . S.toAscList $ ys) `mplus` fmap (succ . fst) (S.maxView ys) -- | Simple pluralize helper plural :: Int -> String -> String -> String plural 1 s _ = s plural _ _ p = p -- | Ensure a value is quoted if needed. ensureQuoted :: String -> String ensureQuoted v = if not (all (\c -> isAlphaNum c || c == '.') v) then '\'':v ++ "'" else v -- * Mathematical functions -- Simple and slow statistical functions, please replace with better -- versions -- | Standard deviation function. stdDev :: [Double] -> Double stdDev lst = -- first, calculate the list length and sum lst in a single step, -- for performance reasons let (ll', sx) = foldl' (\(rl, rs) e -> let rl' = rl + 1 rs' = rs + e in rl' `seq` rs' `seq` (rl', rs')) (0::Int, 0) lst ll = fromIntegral ll'::Double mv = sx / ll av = foldl' (\accu em -> let d = em - mv in accu + d * d) 0.0 lst in sqrt (av / ll) -- stddev -- * Logical functions -- Avoid syntactic sugar and enhance readability. These functions are proposed -- by some for inclusion in the Prelude, and at the moment they are present -- (with various definitions) in the utility-ht package. Some rationale and -- discussion is available at <http://www.haskell.org/haskellwiki/If-then-else> -- | \"if\" as a function, rather than as syntactic sugar. if' :: Bool -- ^ condition -> a -- ^ \"then\" result -> a -- ^ \"else\" result -> a -- ^ \"then\" or "else" result depending on the condition if' True x _ = x if' _ _ y = y -- * Parsing utility functions -- | Parse results from readsPrec. parseChoices :: MonadFail m => String -> String -> [(a, String)] -> m a parseChoices _ _ [(v, "")] = return v parseChoices name s [(_, e)] = fail $ name ++ ": leftover characters when parsing '" ++ s ++ "': '" ++ e ++ "'" parseChoices name s _ = fail $ name ++ ": cannot parse string '" ++ s ++ "'" -- | Safe 'read' function returning data encapsulated in a Result. tryRead :: (MonadFail m, Read a) => String -> String -> m a tryRead name s = parseChoices name s $ reads s -- | Parse a string using the 'Read' instance. -- Succeeds if there is exactly one valid result. -- -- /Backport from Text.Read introduced in base-4.6.0.0/ readMaybe :: Read a => String -> Maybe a readMaybe s = case reads s of [(a, "")] -> Just a _ -> Nothing -- | Format a table of strings to maintain consistent length. formatTable :: [[String]] -> [Bool] -> [[String]] formatTable vals numpos = let vtrans = transpose vals -- transpose, so that we work on rows -- rather than columns mlens = map (maximum . map length) vtrans expnd = map (\(flds, isnum, ml) -> map (\val -> let delta = ml - length val filler = replicate delta ' ' in if delta > 0 then if isnum then filler ++ val else val ++ filler else val ) flds ) (zip3 vtrans numpos mlens) in transpose expnd -- | Constructs a printable table from given header and rows printTable :: String -> [String] -> [[String]] -> [Bool] -> String printTable lp header rows isnum = unlines . map ((++) lp . (:) ' ' . unwords) $ formatTable (header:rows) isnum -- | Converts a unit (e.g. m or GB) into a scaling factor. parseUnitValue :: (MonadFail m) => Bool -> String -> m Rational parseUnitValue noDecimal unit -- binary conversions first | null unit = return 1 | unit == "m" || upper == "MIB" = return 1 | unit == "g" || upper == "GIB" = return kbBinary | unit == "t" || upper == "TIB" = return $ kbBinary * kbBinary -- SI conversions | unit == "M" || upper == "MB" = return mbFactor | unit == "G" || upper == "GB" = return $ mbFactor * kbDecimal | unit == "T" || upper == "TB" = return $ mbFactor * kbDecimal * kbDecimal | otherwise = fail $ "Unknown unit '" ++ unit ++ "'" where upper = map toUpper unit kbBinary = 1024 :: Rational kbDecimal = if noDecimal then kbBinary else 1000 decToBin = kbDecimal / kbBinary -- factor for 1K conversion mbFactor = decToBin * decToBin -- twice the factor for just 1K -- | Tries to extract number and scale from the given string. -- -- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is -- specified, it defaults to MiB. Return value is always an integral -- value in MiB; if the first argument is True, all kilos are binary. parseUnitEx :: (MonadFail m, Integral a, Read a) => Bool -> String -> m a parseUnitEx noDecimal str = -- TODO: enhance this by splitting the unit parsing code out and -- accepting floating-point numbers case (reads str::[(Int, String)]) of [(v, suffix)] -> let unit = dropWhile (== ' ') suffix in do scaling <- parseUnitValue noDecimal unit return $ truncate (fromIntegral v * scaling) _ -> fail $ "Can't parse string '" ++ str ++ "'" -- | Tries to extract number and scale from the given string. -- -- Input must be in the format NUMBER+ SPACE* [UNIT]. If no unit is -- specified, it defaults to MiB. Return value is always an integral -- value in MiB. parseUnit :: (MonadFail m, Integral a, Read a) => String -> m a parseUnit = parseUnitEx False -- | Tries to extract a number and scale from a given string, taking -- all kilos to be binary. parseUnitAssumeBinary :: (MonadFail m, Integral a, Read a) => String -> m a parseUnitAssumeBinary = parseUnitEx True -- | Unwraps a 'Result', exiting the program if it is a 'Bad' value, -- otherwise returning the actual contained value. exitIfBad :: String -> Result a -> IO a exitIfBad msg (Bad s) = exitErr (msg ++ ": " ++ s) exitIfBad _ (Ok v) = return v -- | Exits immediately with an error message. exitErr :: String -> IO a exitErr errmsg = do hPutStrLn stderr $ "Error: " ++ errmsg exitWith (ExitFailure 1) -- | Exits with an error message if the given boolean condition if true. exitWhen :: Bool -> String -> IO () exitWhen True msg = exitErr msg exitWhen False _ = return () -- | Exits with an error message /unless/ the given boolean condition -- if true, the opposite of 'exitWhen'. exitUnless :: Bool -> String -> IO () exitUnless cond = exitWhen (not cond) -- | Unwraps a 'Result', logging a warning message and then returning a default -- value if it is a 'Bad' value, otherwise returning the actual contained value. logWarningIfBad :: String -> a -> Result a -> IO a logWarningIfBad msg defVal (Bad s) = do logWarning $ msg ++ ": " ++ s return defVal logWarningIfBad _ _ (Ok v) = return v -- | Try an IO interaction, log errors and unfold as a 'Result'. tryAndLogIOError :: IO a -> String -> (a -> Result b) -> IO (Result b) tryAndLogIOError io msg okfn = try io >>= either (\ e -> do let combinedmsg = msg ++ ": " ++ show (e :: IOError) logError combinedmsg return . Bad $ combinedmsg) (return . okfn) -- | Try an IO interaction and return a default value if the interaction -- throws an IOError. withDefaultOnIOError :: a -> IO a -> IO a withDefaultOnIOError a io = try io >>= either (\ (_ :: IOError) -> return a) return -- | Print a warning, but do not exit. warn :: String -> IO () warn = hPutStrLn stderr . (++) "Warning: " -- | Helper for 'niceSort'. Computes the key element for a given string. extractKey :: [Either Integer String] -- ^ Current (partial) key, reversed -> String -- ^ Remaining string -> ([Either Integer String], String) extractKey ek [] = (reverse ek, []) extractKey ek xs@(x:_) = let (span_fn, conv_fn) = if isDigit x then (isDigit, Left . read) else (not . isDigit, Right) (k, rest) = span span_fn xs in extractKey (conv_fn k:ek) rest {-| Sort a list of strings based on digit and non-digit groupings. Given a list of names @['a1', 'a10', 'a11', 'a2']@ this function will sort the list in the logical order @['a1', 'a2', 'a10', 'a11']@. The sort algorithm breaks each name in groups of either only-digits or no-digits, and sorts based on each group. Internally, this is not implemented via regexes (like the Python version), but via actual splitting of the string in sequences of either digits or everything else, and converting the digit sequences in /Left Integer/ and the non-digit ones in /Right String/, at which point sorting becomes trivial due to the built-in 'Either' ordering; we only need one extra step of dropping the key at the end. -} niceSort :: [String] -> [String] niceSort = niceSortKey id -- | Key-version of 'niceSort'. We use 'sortBy' and @compare `on` fst@ -- since we don't want to add an ordering constraint on the /a/ type, -- hence the need to only compare the first element of the /(key, a)/ -- tuple. niceSortKey :: (a -> String) -> [a] -> [a] niceSortKey keyfn = map snd . sortBy (compare `on` fst) . map (\s -> (fst . extractKey [] $ keyfn s, s)) -- | Strip space characthers (including newline). As this is -- expensive, should only be run on small strings. rStripSpace :: String -> String rStripSpace = reverse . dropWhile isSpace . reverse -- | Returns a random UUID. -- This is a Linux-specific method as it uses the /proc filesystem. newUUID :: IO String newUUID = do contents <- readFile ConstantUtils.randomUuidFile return $! rStripSpace $ take 128 contents -- | Parser that doesn't fail on a valid UUIDs (same as -- "Ganeti.Constants.uuidRegex"). uuidCheckParser :: A.Parser () uuidCheckParser = do -- Not using Attoparsec.Char8 because "all attempts to use characters -- above code point U+00FF will give wrong answers" and we don't -- want such things to be accepted as UUIDs. let lowerHex = A.satisfy (\c -> (48 <= c && c <= 57) || -- 0-9 (97 <= c && c <= 102)) -- a-f hx n = A.count n lowerHex d = A.word8 45 -- '-' void $ hx 8 >> d >> hx 4 >> d >> hx 4 >> d >> hx 4 >> d >> hx 12 -- | Checks if the string is a valid UUID as in "Ganeti.Constants.uuidRegex". isUUID :: String -> Bool isUUID = isRight . A.parseOnly (uuidCheckParser <* A.endOfInput) . UTF8.fromString -- | Returns the current time as an 'Integer' representing the number -- of seconds from the Unix epoch. getCurrentTime :: IO Integer getCurrentTime = do TOD ctime _ <- getClockTime return ctime -- | Returns the current time as an 'Integer' representing the number -- of microseconds from the Unix epoch (hence the need for 'Integer'). getCurrentTimeUSec :: IO Integer getCurrentTimeUSec = liftM clockTimeToUSec getClockTime -- | Convert a ClockTime into a (seconds-only) timestamp. clockTimeToString :: ClockTime -> String clockTimeToString (TOD t _) = show t -- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@). clockTimeToCTime :: ClockTime -> EpochTime clockTimeToCTime (TOD secs _) = fromInteger secs -- | Convert a ClockTime the number of microseconds since the epoch. clockTimeToUSec :: ClockTime -> Integer clockTimeToUSec (TOD ctime pico) = -- pico: 10^-12, micro: 10^-6, so we have to shift seconds left and -- picoseconds right ctime * 1000000 + pico `div` 1000000 -- | Convert a ClockTime into a (seconds-only) 'EpochTime' (AKA @time_t@). cTimeToClockTime :: EpochTime -> ClockTime cTimeToClockTime (CTime timet) = TOD (toInteger timet) 0 -- | A version of `diffClockTimes` that works around ghc bug #2519. diffClockTimes :: ClockTime -> ClockTime -> TimeDiff diffClockTimes t1 t2 = let delta = STime.diffClockTimes t1 t2 secondInPicoseconds = 1000000000000 in if tdPicosec delta < 0 then delta { tdSec = tdSec delta - 1 , tdPicosec = tdPicosec delta + secondInPicoseconds } else delta {-| Strip a prefix from a string, allowing the last character of the prefix (which is assumed to be a separator) to be absent from the string if the string terminates there. \>>> chompPrefix \"foo:bar:\" \"a:b:c\" Nothing \>>> chompPrefix \"foo:bar:\" \"foo:bar:baz\" Just \"baz\" \>>> chompPrefix \"foo:bar:\" \"foo:bar:\" Just \"\" \>>> chompPrefix \"foo:bar:\" \"foo:bar\" Just \"\" \>>> chompPrefix \"foo:bar:\" \"foo:barbaz\" Nothing -} chompPrefix :: String -> String -> Maybe String chompPrefix pfx str = if pfx `isPrefixOf` str || str == init pfx then Just $ drop (length pfx) str else Nothing -- | Breaks a string in lines with length \<= maxWidth. -- -- NOTE: The split is OK if: -- -- * It doesn't break a word, i.e. the next line begins with space -- (@isSpace . head $ rest@) or the current line ends with space -- (@null revExtra@); -- -- * It breaks a very big word that doesn't fit anyway (@null revLine@). wrap :: Int -- ^ maxWidth -> String -- ^ string that needs wrapping -> [String] -- ^ string \"broken\" in lines wrap maxWidth = filter (not . null) . map trim . wrap0 where wrap0 :: String -> [String] wrap0 text | length text <= maxWidth = [text] | isSplitOK = line : wrap0 rest | otherwise = line' : wrap0 rest' where (line, rest) = splitAt maxWidth text (revExtra, revLine) = break isSpace . reverse $ line (line', rest') = (reverse revLine, reverse revExtra ++ rest) isSplitOK = null revLine || null revExtra || startsWithSpace rest startsWithSpace (x:_) = isSpace x startsWithSpace _ = False -- | Removes surrounding whitespace. Should only be used in small -- strings. trim :: String -> String trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace -- | A safer head version, with a default value. defaultHead :: a -> [a] -> a defaultHead def [] = def defaultHead _ (x:_) = x -- | A 'head' version in the I/O monad, for validating parameters -- without which we cannot continue. exitIfEmpty :: String -> [a] -> IO a exitIfEmpty _ (x:_) = return x exitIfEmpty s [] = exitErr s -- | Obtain the unique element of a list in an arbitrary monad. monadicThe :: (Eq a, MonadFail m) => String -> [a] -> m a monadicThe s [] = fail s monadicThe s (x:xs) | all (x ==) xs = return x | otherwise = fail s -- | Split an 'Either' list into two separate lists (containing the -- 'Left' and 'Right' elements, plus a \"trail\" list that allows -- recombination later. -- -- This is splitter; for recombination, look at 'recombineEithers'. -- The sum of \"left\" and \"right\" lists should be equal to the -- original list length, and the trail list should be the same length -- as well. The entries in the resulting lists are reversed in -- comparison with the original list. splitEithers :: [Either a b] -> ([a], [b], [Bool]) splitEithers = foldl' splitter ([], [], []) where splitter (l, r, t) e = case e of Left v -> (v:l, r, False:t) Right v -> (l, v:r, True:t) -- | Recombines two \"left\" and \"right\" lists using a \"trail\" -- list into a single 'Either' list. -- -- This is the counterpart to 'splitEithers'. It does the opposite -- transformation, and the output list will be the reverse of the -- input lists. Since 'splitEithers' also reverses the lists, calling -- these together will result in the original list. -- -- Mismatches in the structure of the lists (e.g. inconsistent -- lengths) are represented via 'Bad'; normally this function should -- not fail, if lists are passed as generated by 'splitEithers'. recombineEithers :: (Show a, Show b) => [a] -> [b] -> [Bool] -> Result [Either a b] recombineEithers lefts rights trail = foldM recombiner ([], lefts, rights) trail >>= checker where checker (eithers, [], []) = Ok eithers checker (_, lefts', rights') = Bad $ "Inconsistent results after recombination, l'=" ++ show lefts' ++ ", r'=" ++ show rights' recombiner (es, l:ls, rs) False = Ok (Left l:es, ls, rs) recombiner (es, ls, r:rs) True = Ok (Right r:es, ls, rs) recombiner (_, ls, rs) t = Bad $ "Inconsistent trail log: l=" ++ show ls ++ ", r=" ++ show rs ++ ",t=" ++ show t -- | Default hints for the resolver resolveAddrHints :: Maybe AddrInfo resolveAddrHints = Just defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV] } -- | Resolves a numeric address. resolveAddr :: Int -> String -> IO (Result (Family, SockAddr)) resolveAddr port str = do resolved <- getAddrInfo resolveAddrHints (Just str) (Just (show port)) return $ case resolved of [] -> Bad "Invalid results from lookup?" best:_ -> Ok (addrFamily best, addrAddress best) -- | Set the owner and the group of a file (given as names, not numeric id). setOwnerAndGroupFromNames :: FilePath -> GanetiDaemon -> GanetiGroup -> IO () setOwnerAndGroupFromNames filename daemon dGroup = do -- TODO: it would be nice to rework this (or getEnts) so that runtimeEnts -- is read only once per daemon startup, and then cached for further usage. runtimeEnts <- runResultT getEnts ents <- exitIfBad "Can't find required user/groups" runtimeEnts -- note: we use directly ! as lookup failures shouldn't happen, due -- to the map construction let uid = reUserToUid ents M.! daemon let gid = reGroupToGid ents M.! dGroup setOwnerAndGroup filename uid gid -- | Resets permissions so that the owner can read/write and the group only -- read. All other permissions are cleared. setOwnerWGroupR :: FilePath -> IO () setOwnerWGroupR path = setFileMode path mode where mode = foldl unionFileModes nullFileMode [ownerReadMode, ownerWriteMode, groupReadMode] -- | Formats an integral number, appending a suffix. formatOrdinal :: (Integral a, Show a) => a -> String formatOrdinal num | num > 10 && num < 20 = suffix "th" | tens == 1 = suffix "st" | tens == 2 = suffix "nd" | tens == 3 = suffix "rd" | otherwise = suffix "th" where tens = num `mod` 10 suffix s = show num ++ s -- | Attempt, in a non-blocking way, to obtain a lock on a given file; report -- back success. -- Returns the file descriptor so that the lock can be released by closing lockFile :: FilePath -> IO (Result Fd) lockFile path = runResultT . liftIO $ do handle <- openFile path WriteMode fd <- handleToFd handle setLock fd (WriteLock, AbsoluteSeek, 0, 0) return fd -- | File stat identifier. type FStat = (EpochTime, FileID, FileOffset) -- | Null 'FStat' value. nullFStat :: FStat nullFStat = (-1, -1, -1) -- | Computes the file cache data from a FileStatus structure. buildFileStatus :: FileStatus -> FStat buildFileStatus ofs = let modt = modificationTime ofs inum = fileID ofs fsize = fileSize ofs in (modt, inum, fsize) -- | Wrapper over 'buildFileStatus'. This reads the data from the -- filesystem and then builds our cache structure. getFStat :: FilePath -> IO FStat getFStat p = liftM buildFileStatus (getFileStatus p) -- | Safe version of 'getFStat', that ignores IOErrors. getFStatSafe :: FilePath -> IO FStat getFStatSafe fpath = liftM (either (const nullFStat) id) ((try $ getFStat fpath) :: IO (Either IOError FStat)) -- | Check if the file needs reloading needsReload :: FStat -> FilePath -> IO (Maybe FStat) needsReload oldstat path = do newstat <- getFStat path return $ if newstat /= oldstat then Just newstat else Nothing -- | Until the given point in time (useconds since the epoch), wait -- for the output of a given method to change and return the new value; -- make use of the promise that the output only changes if the reference -- has a value different than the given one. watchFileEx :: (Eq b) => Integer -> b -> IORef b -> (a -> Bool) -> IO a -> IO a watchFileEx endtime base ref check read_fn = do current <- getCurrentTimeUSec if current > endtime then read_fn else do val <- readIORef ref if val /= base then do new <- read_fn if check new then return new else do logDebug "Observed change not relevant" threadDelay 100000 watchFileEx endtime val ref check read_fn else do threadDelay 100000 watchFileEx endtime base ref check read_fn -- | Within the given timeout (in seconds), wait for for the output -- of the given method to satisfy a given predicate and return the new value; -- make use of the promise that the method will only change its value, if -- the given file changes on disk. If the file does not exist on disk, return -- immediately. watchFileBy :: FilePath -> Int -> (a -> Bool) -> IO a -> IO a watchFileBy fpath timeout check read_fn = do current <- getCurrentTimeUSec let endtime = current + fromIntegral timeout * 1000000 fstat <- getFStatSafe fpath ref <- newIORef fstat bracket initINotify killINotify $ \inotify -> do let do_watch e = do logDebug $ "Notified of change in " ++ fpath ++ "; event: " ++ show e when (e == Ignored) (addWatch inotify [Modify, Delete] (toInotifyPath fpath) do_watch >> return ()) fstat' <- getFStatSafe fpath writeIORef ref fstat' _ <- addWatch inotify [Modify, Delete] (toInotifyPath fpath) do_watch newval <- read_fn if check newval then do logDebug $ "File " ++ fpath ++ " changed during setup of inotify" return newval else watchFileEx endtime fstat ref check read_fn -- | Within the given timeout (in seconds), wait for for the output -- of the given method to change and return the new value; make use of -- the promise that the method will only change its value, if -- the given file changes on disk. If the file does not exist on disk, return -- immediately. watchFile :: Eq a => FilePath -> Int -> a -> IO a -> IO a watchFile fpath timeout old = watchFileBy fpath timeout (/= old) -- | Type describing ownership and permissions of newly generated -- directories and files. All parameters are optional, with nothing -- meaning that the default value should be left untouched. data FilePermissions = FilePermissions { fpOwner :: Maybe GanetiDaemon , fpGroup :: Maybe GanetiGroup , fpPermissions :: FileMode } -- | Ensure that a given file or directory has the permissions, and -- possibly ownerships, as required. ensurePermissions :: FilePath -> FilePermissions -> IO (Result ()) ensurePermissions fpath perms = do -- Fetch the list of entities runtimeEnts <- runResultT getEnts ents <- exitIfBad "Can't determine user/group ids" runtimeEnts -- Get the existing file properties eitherFileStatus <- try $ getFileStatus fpath :: IO (Either IOError FileStatus) -- And see if any modifications are needed (flip $ either (return . Bad . show)) eitherFileStatus $ \fstat -> do ownertry <- case fpOwner perms of Nothing -> return $ Right () Just owner -> try $ do let ownerid = reUserToUid ents M.! owner unless (ownerid == fileOwner fstat) $ do logDebug $ "Changing owner of " ++ fpath ++ " to " ++ show owner setOwnerAndGroup fpath ownerid (-1) grouptry <- case fpGroup perms of Nothing -> return $ Right () Just grp -> try $ do let groupid = reGroupToGid ents M.! grp unless (groupid == fileGroup fstat) $ do logDebug $ "Changing group of " ++ fpath ++ " to " ++ show grp setOwnerAndGroup fpath (-1) groupid let fp = fpPermissions perms permtry <- if fileMode fstat == fp then return $ Right () else try $ do logInfo $ "Changing permissions of " ++ fpath ++ " to " ++ showOct fp "" setFileMode fpath fp let errors = E.lefts ([ownertry, grouptry, permtry] :: [Either IOError ()]) if null errors then return $ Ok () else return . Bad $ show errors -- | Safely rename a file, creating the target directory, if needed. safeRenameFile :: FilePermissions -> FilePath -> FilePath -> IO (Result ()) safeRenameFile perms from to = do directtry <- try $ renameFile from to case (directtry :: Either IOError ()) of Right () -> return $ Ok () Left _ -> do result <- try $ do let dir = takeDirectory to createDirectoryIfMissing True dir _ <- ensurePermissions dir perms renameFile from to return $ either (Bad . show) Ok (result :: Either IOError ()) -- | Removes duplicates, preserving order. ordNub :: (Ord a) => [a] -> [a] ordNub = let go _ [] = [] go s (x:xs) = if x `S.member` s then go s xs else x : go (S.insert x s) xs in go S.empty {-# ANN frequency "HLint: ignore Use alternative" #-} -- | Returns a list of tuples of elements and the number of times they occur -- in a list frequency :: Ord t => [t] -> [(Int, t)] frequency xs = map (\x -> (length x, head x)) . group . sort $ xs
ganeti/ganeti
src/Ganeti/Utils.hs
Haskell
bsd-2-clause
30,303
module Shader where import Graphics.GL import Control.Monad import Control.Monad.Trans import Foreign import Foreign.C.String import qualified Data.ByteString as BS import qualified Data.Text.Encoding as Text import qualified Data.Text.IO as Text import Linear import Data.Foldable newtype GLProgram = GLProgram { unGLProgram :: GLuint } newtype AttributeLocation = AttributeLocation { unAttributeLocation :: GLint } newtype UniformLocation = UniformLocation { unUniformLocation :: GLint } overPtr :: (MonadIO m, Storable a) => (Ptr a -> IO b) -> m a overPtr f = liftIO (alloca (\p -> f p >> peek p)) useProgram :: MonadIO m => GLProgram -> m () useProgram (GLProgram program) = glUseProgram (fromIntegral program) uniformM44 :: UniformLocation -> M44 GLfloat -> IO () uniformM44 uniform matrix = do let mvpUniformLoc = fromIntegral (unUniformLocation uniform) withArray (concatMap toList (transpose matrix)) (\matrixPtr -> glUniformMatrix4fv mvpUniformLoc 1 GL_FALSE matrixPtr) --------------- -- Load shaders --------------- createShaderProgram :: FilePath -> FilePath -> IO GLProgram createShaderProgram vertexShaderPath fragmentShaderPath = do vertexShader <- glCreateShader GL_VERTEX_SHADER compileShader vertexShaderPath vertexShader fragmentShader <- glCreateShader GL_FRAGMENT_SHADER compileShader fragmentShaderPath fragmentShader shaderProg <- glCreateProgram glAttachShader shaderProg vertexShader glAttachShader shaderProg fragmentShader glLinkProgram shaderProg linked <- overPtr (glGetProgramiv shaderProg GL_LINK_STATUS) when (linked == fromIntegral GL_FALSE) (do maxLength <- overPtr (glGetProgramiv shaderProg GL_INFO_LOG_LENGTH) logLines <- allocaArray (fromIntegral maxLength) (\p -> alloca (\lenP -> do glGetProgramInfoLog shaderProg maxLength lenP p len <- peek lenP peekCStringLen (p,fromIntegral len))) putStrLn logLines) return (GLProgram shaderProg) where compileShader path shader = do src <- Text.readFile path BS.useAsCString (Text.encodeUtf8 src) (\ptr -> withArray [ptr] (\srcs -> glShaderSource shader 1 srcs nullPtr)) glCompileShader shader when True (do maxLength <- overPtr (glGetShaderiv shader GL_INFO_LOG_LENGTH) logLines <- allocaArray (fromIntegral maxLength) (\p -> alloca (\lenP -> do glGetShaderInfoLog shader maxLength lenP p len <- peek lenP peekCStringLen (p,fromIntegral len))) when (length logLines > 0) (do putStrLn ("In " ++ path ++ ":") putStrLn logLines) ) getShaderAttribute :: GLProgram -> String -> IO AttributeLocation getShaderAttribute (GLProgram prog) attributeName = do location <- withCString attributeName $ \attributeNameCString -> glGetAttribLocation prog attributeNameCString when (location == -1) $ error $ "Coudn't bind attribute: " ++ attributeName return (AttributeLocation location) getShaderUniform :: GLProgram -> String -> IO UniformLocation getShaderUniform (GLProgram prog) uniformName = do location <- withCString uniformName $ \uniformNameCString -> glGetUniformLocation prog uniformNameCString when (location == -1) $ error $ "Coudn't bind uniform: " ++ uniformName return (UniformLocation location) glGetErrors :: IO () glGetErrors = do code <- glGetError case code of GL_NO_ERROR -> return () e -> do case e of GL_INVALID_ENUM -> putStrLn "* Invalid Enum" GL_INVALID_VALUE -> putStrLn "* Invalid Value" GL_INVALID_OPERATION -> putStrLn "* Invalid Operation" GL_INVALID_FRAMEBUFFER_OPERATION -> putStrLn "* Invalid Framebuffer Operation" GL_OUT_OF_MEMORY -> putStrLn "* OOM" GL_STACK_UNDERFLOW -> putStrLn "* Stack underflow" GL_STACK_OVERFLOW -> putStrLn "* Stack overflow" _ -> return () glGetErrors
lukexi/halive
demo/Shader.hs
Haskell
bsd-2-clause
4,741
module Blockchain.Data.Wire ( Message(..), Capability(..), obj2WireMessage, wireMessage2Obj ) where import Data.Functor import Data.List import Data.Word import Network.Haskoin.Crypto import Numeric import Text.PrettyPrint.ANSI.Leijen hiding ((<$>)) import qualified Blockchain.Colors as CL import Blockchain.Data.BlockDB import Blockchain.Data.DataDefs import Blockchain.Data.Peer import Blockchain.Data.RLP import Blockchain.Data.SignedTransaction import Blockchain.Format import Blockchain.SHA import Blockchain.Util import Debug.Trace data Capability = ETH Integer | SHH Integer deriving (Show) name2Cap::Integer->String->Capability name2Cap qqqq "eth" = ETH qqqq name2Cap qqqq "shh" = SHH qqqq name2Cap _ x = error $ "Unknown capability string: " ++ x {-capValue::Capability->String capValue ETH = "eth" capValue SHH = "shh"-} instance RLPSerializable Capability where rlpEncode (ETH qqqq) = RLPArray [rlpEncode "eth", rlpEncode qqqq] rlpEncode (SHH qqqq) = RLPArray [rlpEncode "shh", rlpEncode qqqq] rlpDecode (RLPArray [name, qqqq]) = name2Cap (rlpDecode qqqq) $ rlpDecode name rlpDecode x = error $ "wrong format given to rlpDecode for Capability: " ++ show (pretty x) data TerminationReason = DisconnectRequested | TCPSubSystemError | BreachOfProtocol | UselessPeer | TooManyPeers | AlreadyConnected | IncompatibleP2PProtocolVersion | NullNodeIdentityReceived | ClientQuitting | UnexpectedIdentity | ConnectedToSelf | PingTimeout | OtherSubprotocolReason deriving (Show) numberToTerminationReason::Integer->TerminationReason numberToTerminationReason 0x00 = DisconnectRequested numberToTerminationReason 0x01 = TCPSubSystemError numberToTerminationReason 0x02 = BreachOfProtocol numberToTerminationReason 0x03 = UselessPeer numberToTerminationReason 0x04 = TooManyPeers numberToTerminationReason 0x05 = AlreadyConnected numberToTerminationReason 0x06 = IncompatibleP2PProtocolVersion numberToTerminationReason 0x07 = NullNodeIdentityReceived numberToTerminationReason 0x08 = ClientQuitting numberToTerminationReason 0x09 = UnexpectedIdentity numberToTerminationReason 0x0a = ConnectedToSelf numberToTerminationReason 0x0b = PingTimeout numberToTerminationReason 0x0c = OtherSubprotocolReason numberToTerminationReason _ = error "numberToTerminationReasion called with unsupported number" terminationReasonToNumber::TerminationReason->Integer terminationReasonToNumber DisconnectRequested = 0x00 terminationReasonToNumber TCPSubSystemError = 0x01 terminationReasonToNumber BreachOfProtocol = 0x02 terminationReasonToNumber UselessPeer = 0x03 terminationReasonToNumber TooManyPeers = 0x04 terminationReasonToNumber AlreadyConnected = 0x05 terminationReasonToNumber IncompatibleP2PProtocolVersion = 0x06 terminationReasonToNumber NullNodeIdentityReceived = 0x07 terminationReasonToNumber ClientQuitting = 0x08 terminationReasonToNumber UnexpectedIdentity = 0x09 terminationReasonToNumber ConnectedToSelf = 0x0a terminationReasonToNumber PingTimeout = 0x0b terminationReasonToNumber OtherSubprotocolReason = 0x0c data Message = Hello { version::Int, clientId::String, capability::[Capability], port::Int, nodeId::Word512 } | Disconnect TerminationReason | Ping | Pong | GetPeers | Peers [Peer] | Status { protocolVersion::Int, networkID::String, totalDifficulty::Int, latestHash::SHA, genesisHash:: SHA } | QqqqStatus Int | Transactions [SignedTransaction] | GetBlocks [SHA] | Blocks [Block] | BlockHashes [SHA] | GetBlockHashes { parentSHAs::[SHA], numChildItems::Integer } | GetTransactions | NewBlockPacket Block Integer | PacketCount Integer | QqqqPacket | WhisperProtocolVersion Int deriving (Show) instance Format Message where format Hello{version=ver, clientId=c, capability=cap, port=p, nodeId=n} = CL.blue "Hello" ++ " version: " ++ show ver ++ "\n" ++ " cliendId: " ++ show c ++ "\n" ++ " capability: " ++ intercalate ", " (show <$> cap) ++ "\n" ++ " port: " ++ show p ++ "\n" ++ " nodeId: " ++ take 20 (padZeros 64 (showHex n "")) ++ "...." format (Disconnect reason) = CL.blue "Disconnect" ++ "(" ++ show reason ++ ")" format Ping = CL.blue "Ping" format Pong = CL.blue "Pong" format GetPeers = CL.blue "GetPeers" format (Peers peers) = CL.blue "Peers: " ++ intercalate ", " (format <$> peers) format Status{ protocolVersion=ver, networkID=nID, totalDifficulty=d, latestHash=lh, genesisHash=gh } = CL.blue "Status" ++ " protocolVersion: " ++ show ver ++ "\n" ++ " networkID: " ++ show nID ++ "\n" ++ " totalDifficulty: " ++ show d ++ "\n" ++ " latestHash: " ++ show (pretty lh) ++ "\n" ++ " genesisHash: " ++ show (pretty gh) format (QqqqStatus ver) = CL.blue "QqqqStatus " ++ " protocolVersion: " ++ show ver format (Transactions transactions) = CL.blue "Transactions:\n " ++ tab (intercalate "\n " (format <$> transactions)) --Short version format (BlockHashes shas) = CL.blue "BlockHashes " ++ "(" ++ show (length shas) ++ " new hashes)" --Long version {- format (BlockHashes shas) = CL.blue "BlockHashes:" ++ tab ("\n" ++ intercalate "\n " (show . pretty <$> shas))-} format (GetBlocks shas) = CL.blue "GetBlocks:" ++ tab ("\n" ++ intercalate "\n " (show . pretty <$> shas)) format (Blocks blocks) = CL.blue "Blocks:" ++ tab("\n" ++ intercalate "\n " (format <$> blocks)) format (GetBlockHashes pSHAs numChild) = CL.blue "GetBlockHashes" ++ " (max: " ++ show numChild ++ "):\n " ++ intercalate ",\n " (show . pretty <$> pSHAs) format (NewBlockPacket block d) = CL.blue "NewBlockPacket" ++ " (" ++ show d ++ ")" ++ tab ("\n" ++ format block) format (PacketCount c) = CL.blue "PacketCount:" ++ show c format QqqqPacket = CL.blue "QqqqPacket" format GetTransactions = CL.blue "GetTransactions" format (WhisperProtocolVersion ver) = CL.blue "WhisperProtocolVersion " ++ show ver obj2WireMessage::Word8->RLPObject->Message obj2WireMessage 0x0 (RLPArray [ver, cId, RLPArray cap, p, nId]) = Hello (fromInteger $ rlpDecode ver) (rlpDecode cId) (rlpDecode <$> cap) (fromInteger $ rlpDecode p) $ rlp2Word512 nId obj2WireMessage 0x1 (RLPArray [reason]) = Disconnect (numberToTerminationReason $ rlpDecode reason) obj2WireMessage 0x2 (RLPArray []) = Ping obj2WireMessage 0x2 (RLPArray [RLPArray []]) = Ping obj2WireMessage 0x3 (RLPArray []) = Pong obj2WireMessage 0x4 (RLPArray []) = GetPeers obj2WireMessage 0x5 (RLPArray peers) = Peers $ rlpDecode <$> peers obj2WireMessage 0x10 (RLPArray [ver, nID, d, lh, gh]) = Status { protocolVersion=fromInteger $ rlpDecode ver, networkID = rlpDecode nID, totalDifficulty = fromInteger $ rlpDecode d, latestHash=rlpDecode lh, genesisHash=rlpDecode gh } obj2WireMessage 0x10 (RLPArray [ver]) = QqqqStatus $ fromInteger $ rlpDecode ver obj2WireMessage 0x11 (RLPArray []) = GetTransactions obj2WireMessage 0x12 (RLPArray transactions) = Transactions $ rlpDecode <$> transactions obj2WireMessage 0x13 (RLPArray items) = GetBlockHashes (rlpDecode <$> init items) $ rlpDecode $ last items obj2WireMessage 0x14 (RLPArray items) = BlockHashes $ rlpDecode <$> items obj2WireMessage 0x15 (RLPArray items) = GetBlocks $ rlpDecode <$> items obj2WireMessage 0x16 (RLPArray blocks) = Blocks $ rlpDecode <$> blocks obj2WireMessage 0x17 (RLPArray [block, td]) = NewBlockPacket (rlpDecode block) (rlpDecode td) obj2WireMessage 0x18 (RLPArray [c]) = PacketCount $ rlpDecode c obj2WireMessage 0x19 (RLPArray []) = QqqqPacket obj2WireMessage 0x20 (RLPArray [ver]) = WhisperProtocolVersion $ fromInteger $ rlpDecode ver obj2WireMessage x y = error ("Missing case in obj2WireMessage: " ++ show x ++ ", " ++ show (pretty y)) wireMessage2Obj::Message->(Word8, RLPObject) wireMessage2Obj Hello { version = ver, clientId = cId, capability = cap, port = p, nodeId = nId } = (0x0, RLPArray [ rlpEncode $ toInteger ver, rlpEncode cId, RLPArray $ rlpEncode <$> cap, rlpEncode $ toInteger p, word5122RLP nId ]) wireMessage2Obj (Disconnect reason) = (0x0, RLPArray [rlpEncode $ terminationReasonToNumber reason]) wireMessage2Obj Ping = (0x2, RLPArray []) wireMessage2Obj Pong = (0x3, RLPArray []) wireMessage2Obj GetPeers = (0x4, RLPArray []) wireMessage2Obj (Peers peers) = (0x5, RLPArray $ (rlpEncode <$> peers)) wireMessage2Obj (Status ver nID d lh gh) = (0x10, RLPArray [rlpEncode $ toInteger ver, rlpEncode nID, rlpEncode $ toInteger d, rlpEncode lh, rlpEncode gh]) wireMessage2Obj (QqqqStatus ver) = (0x10, RLPArray [rlpEncode $ toInteger ver]) wireMessage2Obj GetTransactions = (0x11, RLPArray []) wireMessage2Obj (Transactions transactions) = (0x12, RLPArray (rlpEncode <$> transactions)) wireMessage2Obj (GetBlockHashes pSHAs numChildren) = (0x13, RLPArray $ (rlpEncode <$> pSHAs) ++ [rlpEncode numChildren]) wireMessage2Obj (BlockHashes shas) = (0x14, RLPArray (rlpEncode <$> shas)) wireMessage2Obj (GetBlocks shas) = (0x15, RLPArray (rlpEncode <$> shas)) wireMessage2Obj (Blocks blocks) = (0x16, RLPArray (rlpEncode <$> blocks)) wireMessage2Obj (NewBlockPacket block d) = (0x17, RLPArray [rlpEncode block, rlpEncode d]) wireMessage2Obj (PacketCount c) = (0x18, RLPArray [rlpEncode c]) wireMessage2Obj QqqqPacket = (0x19, RLPArray []) wireMessage2Obj (WhisperProtocolVersion ver) = (0x20, RLPArray [rlpEncode $ toInteger ver])
kejace/ethereum-client-haskell
src/Blockchain/Data/Wire.hs
Haskell
bsd-3-clause
9,629
{-# LANGUAGE ScopedTypeVariables #-} -- | Provides common combinators for concurrency in Javascript. -- -- The emulated threading Javascript threading model provided by -- Sunroof is based on cooperative multithreading -- (since Javascript is not multithreaded). module Language.Sunroof.Concurrent ( loop , forkJS , threadDelay , yield ) where import Language.Sunroof.Types import Language.Sunroof.Classes import Language.Sunroof.JS.Number ( JSNumber ) import Language.Sunroof.JS.Browser ( window, setTimeout ) import Language.Sunroof.Utils -- ------------------------------------------------------------- -- General Concurrent Combinators. -- ------------------------------------------------------------- -- | @loop x f@ executes the function @f@ repeatedly. -- After each iteration the result value of the function -- is feed back as input of the next iteration. -- The initial value supplied for the first iteration is @x@. -- This loop will never terminate. loop :: (Sunroof a) => a -> (a -> JSB a) -> JSB () loop start m = do f <- fixJS $ \ f -> continuation $ \ a -> do a' <- m a yield -- stop after every loop for pause goto f a' goto f start -- and call the looping function -- | Fork of the given computation in a different thread. forkJS :: (SunroofThread t1) => JS t1 () -> JS t2 () forkJS m = do _ <- window # setTimeout (\() -> blockableJS m) 0 return () -- | Delay the execution of all instructions after this one by -- the given amount of milliseconds. threadDelay :: JSNumber -> JSB () threadDelay n = callcc $ \ o -> do _ <- window # setTimeout (\x -> goto o x) n done -- | Give another thread time to execute. yield :: JSB () yield = threadDelay 0
ku-fpg/sunroof-compiler
Language/Sunroof/Concurrent.hs
Haskell
bsd-3-clause
1,754
module Graph where import qualified Data.Map as Map import qualified Data.Set as Set -- | The type of graph whose vertices are of type n. type Graph n = Map.Map n (Set.Set n) empty :: Graph n empty = Map.empty union :: (Eq n, Ord n) => Graph n -> Graph n -> Graph n union i1 i2 = Map.unionWith Set.union i1 i2 clique :: (Eq n, Ord n) => Set.Set n -> Graph n clique set = Map.fromSet (\x -> set `Set.difference` Set.singleton x) set vertices :: (Eq n, Ord n) => Graph n -> [n] vertices = Map.keys -- | The in-degree of the specified vertex in the graph. degree :: (Eq n, Ord n) => Graph n -> n -> Int degree i v = Set.size (i Map.! v) removeVertex :: (Eq n, Ord n) => Graph n -> n -> Graph n removeVertex intGr v = Map.map (Set.delete v) $ Map.delete v intGr -- | The set of sources (vertices whose in-degrees are 0). sources :: (Eq n, Ord n) => Graph n -> [n] sources gr = Set.toList $ Map.keysSet gr Set.\\ Map.foldl' Set.union Set.empty gr neighbors :: (Eq n, Ord n) => Graph n -> n -> [n] neighbors i v = Set.toList $ i Map.! v
koba-e964/hayashii-mcc
Graph.hs
Haskell
bsd-3-clause
1,043
{-# OPTIONS_GHC -fno-warn-type-defaults #-} {-# LANGUAGE ScopedTypeVariables, ExistentialQuantification, RankNTypes, OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : System.Taffybar.Widget.Workspaces -- Copyright : (c) Ivan A. Malison -- License : BSD3-style (see LICENSE) -- -- Maintainer : Ivan A. Malison -- Stability : unstable -- Portability : unportable ----------------------------------------------------------------------------- module System.Taffybar.Widget.Workspaces where import Control.Applicative import Control.Arrow ((&&&)) import Control.Concurrent import qualified Control.Concurrent.MVar as MV import Control.Exception.Enclosed (catchAny) import Control.Monad import Control.Monad.IO.Class import Control.Monad.Trans.Class import Control.Monad.Trans.Maybe import Control.Monad.Trans.Reader import Control.RateLimit import Data.Default (Default(..)) import qualified Data.Foldable as F import Data.GI.Base.ManagedPtr (unsafeCastTo) import Data.Int import Data.List (intersect, sortBy, (\\)) import qualified Data.Map as M import Data.Maybe import qualified Data.MultiMap as MM import Data.Ord import qualified Data.Set as Set import qualified Data.Text as T import Data.Time.Units import Data.Tuple.Select import Data.Tuple.Sequence import qualified GI.Gdk.Enums as Gdk import qualified GI.Gdk.Structs.EventScroll as Gdk import qualified GI.GdkPixbuf.Objects.Pixbuf as Gdk import qualified GI.Gtk as Gtk import Prelude import StatusNotifier.Tray (scalePixbufToSize) import System.Log.Logger import System.Taffybar.Context import System.Taffybar.Information.EWMHDesktopInfo import System.Taffybar.Information.SafeX11 import System.Taffybar.Information.X11DesktopInfo import System.Taffybar.Util import System.Taffybar.Widget.Generic.AutoSizeImage (autoSizeImage) import System.Taffybar.Widget.Util import System.Taffybar.WindowIcon import Text.Printf data WorkspaceState = Active | Visible | Hidden | Empty | Urgent deriving (Show, Eq) getCSSClass :: (Show s) => s -> T.Text getCSSClass = T.toLower . T.pack . show cssWorkspaceStates :: [T.Text] cssWorkspaceStates = map getCSSClass [Active, Visible, Hidden, Empty, Urgent] data WindowData = WindowData { windowId :: X11Window , windowTitle :: String , windowClass :: String , windowUrgent :: Bool , windowActive :: Bool , windowMinimized :: Bool } deriving (Show, Eq) data WidgetUpdate = WorkspaceUpdate Workspace | IconUpdate [X11Window] data Workspace = Workspace { workspaceIdx :: WorkspaceId , workspaceName :: String , workspaceState :: WorkspaceState , windows :: [WindowData] } deriving (Show, Eq) data WorkspacesContext = WorkspacesContext { controllersVar :: MV.MVar (M.Map WorkspaceId WWC) , workspacesVar :: MV.MVar (M.Map WorkspaceId Workspace) , workspacesWidget :: Gtk.Box , workspacesConfig :: WorkspacesConfig , taffyContext :: Context } type WorkspacesIO a = ReaderT WorkspacesContext IO a liftContext :: TaffyIO a -> WorkspacesIO a liftContext action = asks taffyContext >>= lift . runReaderT action liftX11Def :: a -> X11Property a -> WorkspacesIO a liftX11Def dflt prop = liftContext $ runX11Def dflt prop setWorkspaceWidgetStatusClass :: (MonadIO m, Gtk.IsWidget a) => Workspace -> a -> m () setWorkspaceWidgetStatusClass workspace widget = updateWidgetClasses widget [getCSSClass $ workspaceState workspace] cssWorkspaceStates updateWidgetClasses :: (Foldable t1, Foldable t, Gtk.IsWidget a, MonadIO m) => a -> t1 T.Text -> t T.Text -> m () updateWidgetClasses widget toAdd toRemove = do context <- Gtk.widgetGetStyleContext widget let hasClass = Gtk.styleContextHasClass context addIfMissing klass = hasClass klass >>= (`when` Gtk.styleContextAddClass context klass) . not removeIfPresent klass = unless (klass `elem` toAdd) $ hasClass klass >>= (`when` Gtk.styleContextRemoveClass context klass) mapM_ removeIfPresent toRemove mapM_ addIfMissing toAdd class WorkspaceWidgetController wc where getWidget :: wc -> WorkspacesIO Gtk.Widget updateWidget :: wc -> WidgetUpdate -> WorkspacesIO wc updateWidgetX11 :: wc -> WidgetUpdate -> WorkspacesIO wc updateWidgetX11 cont _ = return cont data WWC = forall a. WorkspaceWidgetController a => WWC a instance WorkspaceWidgetController WWC where getWidget (WWC wc) = getWidget wc updateWidget (WWC wc) update = WWC <$> updateWidget wc update updateWidgetX11 (WWC wc) update = WWC <$> updateWidgetX11 wc update type ControllerConstructor = Workspace -> WorkspacesIO WWC type ParentControllerConstructor = ControllerConstructor -> ControllerConstructor type WindowIconPixbufGetter = Int32 -> WindowData -> TaffyIO (Maybe Gdk.Pixbuf) data WorkspacesConfig = WorkspacesConfig { widgetBuilder :: ControllerConstructor , widgetGap :: Int , maxIcons :: Maybe Int , minIcons :: Int , getWindowIconPixbuf :: WindowIconPixbufGetter , labelSetter :: Workspace -> WorkspacesIO String , showWorkspaceFn :: Workspace -> Bool , borderWidth :: Int , updateEvents :: [String] , updateRateLimitMicroseconds :: Integer , iconSort :: [WindowData] -> WorkspacesIO [WindowData] , urgentWorkspaceState :: Bool } defaultWorkspacesConfig :: WorkspacesConfig defaultWorkspacesConfig = WorkspacesConfig { widgetBuilder = buildButtonController defaultBuildContentsController , widgetGap = 0 , maxIcons = Nothing , minIcons = 0 , getWindowIconPixbuf = defaultGetWindowIconPixbuf , labelSetter = return . workspaceName , showWorkspaceFn = const True , borderWidth = 2 , iconSort = sortWindowsByPosition , updateEvents = allEWMHProperties \\ [ewmhWMIcon] , updateRateLimitMicroseconds = 100000 , urgentWorkspaceState = False } instance Default WorkspacesConfig where def = defaultWorkspacesConfig hideEmpty :: Workspace -> Bool hideEmpty Workspace { workspaceState = Empty } = False hideEmpty _ = True wLog :: MonadIO m => Priority -> String -> m () wLog l s = liftIO $ logM "System.Taffybar.Widget.Workspaces" l s updateVar :: MV.MVar a -> (a -> WorkspacesIO a) -> WorkspacesIO a updateVar var modify = do ctx <- ask lift $ MV.modifyMVar var $ fmap (\a -> (a, a)) . flip runReaderT ctx . modify updateWorkspacesVar :: WorkspacesIO (M.Map WorkspaceId Workspace) updateWorkspacesVar = do workspacesRef <- asks workspacesVar updateVar workspacesRef buildWorkspaceData getWorkspaceToWindows :: [X11Window] -> X11Property (MM.MultiMap WorkspaceId X11Window) getWorkspaceToWindows = foldM (\theMap window -> MM.insert <$> getWorkspace window <*> pure window <*> pure theMap) MM.empty getWindowData :: Maybe X11Window -> [X11Window] -> X11Window -> X11Property WindowData getWindowData activeWindow urgentWindows window = do wTitle <- getWindowTitle window wClass <- getWindowClass window wMinimized <- getWindowMinimized window return WindowData { windowId = window , windowTitle = wTitle , windowClass = wClass , windowUrgent = window `elem` urgentWindows , windowActive = Just window == activeWindow , windowMinimized = wMinimized } buildWorkspaceData :: M.Map WorkspaceId Workspace -> WorkspacesIO (M.Map WorkspaceId Workspace) buildWorkspaceData _ = ask >>= \context -> liftX11Def M.empty $ do names <- getWorkspaceNames wins <- getWindows workspaceToWindows <- getWorkspaceToWindows wins urgentWindows <- filterM isWindowUrgent wins activeWindow <- getActiveWindow active:visible <- getVisibleWorkspaces let getWorkspaceState idx ws | idx == active = Active | idx `elem` visible = Visible | urgentWorkspaceState (workspacesConfig context) && not (null (ws `intersect` urgentWindows)) = Urgent | null ws = Empty | otherwise = Hidden foldM (\theMap (idx, name) -> do let ws = MM.lookup idx workspaceToWindows windowInfos <- mapM (getWindowData activeWindow urgentWindows) ws return $ M.insert idx Workspace { workspaceIdx = idx , workspaceName = name , workspaceState = getWorkspaceState idx ws , windows = windowInfos } theMap) M.empty names addWidgetsToTopLevel :: WorkspacesIO () addWidgetsToTopLevel = do WorkspacesContext { controllersVar = controllersRef , workspacesWidget = cont } <- ask controllersMap <- lift $ MV.readMVar controllersRef -- Elems returns elements in ascending order of their keys so this will always -- add the widgets in the correct order mapM_ addWidget $ M.elems controllersMap lift $ Gtk.widgetShowAll cont addWidget :: WWC -> WorkspacesIO () addWidget controller = do cont <- asks workspacesWidget workspaceWidget <- getWidget controller lift $ do -- XXX: This hbox exists to (hopefully) prevent the issue where workspace -- widgets appear out of order, in the switcher, by acting as an empty -- place holder when the actual widget is hidden. hbox <- Gtk.boxNew Gtk.OrientationHorizontal 0 void $ Gtk.widgetGetParent workspaceWidget >>= traverse (unsafeCastTo Gtk.Box) >>= traverse (flip Gtk.containerRemove workspaceWidget) Gtk.containerAdd hbox workspaceWidget Gtk.containerAdd cont hbox workspacesNew :: WorkspacesConfig -> TaffyIO Gtk.Widget workspacesNew cfg = ask >>= \tContext -> lift $ do cont <- Gtk.boxNew Gtk.OrientationHorizontal $ fromIntegral (widgetGap cfg) controllersRef <- MV.newMVar M.empty workspacesRef <- MV.newMVar M.empty let context = WorkspacesContext { controllersVar = controllersRef , workspacesVar = workspacesRef , workspacesWidget = cont , workspacesConfig = cfg , taffyContext = tContext } -- This will actually create all the widgets runReaderT updateAllWorkspaceWidgets context updateHandler <- onWorkspaceUpdate context iconHandler <- onIconsChanged context let doUpdate = lift . updateHandler handleConfigureEvents e@(ConfigureEvent {}) = doUpdate e handleConfigureEvents _ = return () (workspaceSubscription, iconSubscription, geometrySubscription) <- flip runReaderT tContext $ sequenceT ( subscribeToPropertyEvents (updateEvents cfg) $ doUpdate , subscribeToPropertyEvents [ewmhWMIcon] (lift . onIconChanged iconHandler) , subscribeToAll handleConfigureEvents ) let doUnsubscribe = flip runReaderT tContext $ mapM_ unsubscribe [ iconSubscription , workspaceSubscription , geometrySubscription ] _ <- Gtk.onWidgetUnrealize cont doUnsubscribe _ <- widgetSetClassGI cont "workspaces" Gtk.toWidget cont updateAllWorkspaceWidgets :: WorkspacesIO () updateAllWorkspaceWidgets = do wLog DEBUG "Updating workspace widgets" workspacesMap <- updateWorkspacesVar wLog DEBUG $ printf "Workspaces: %s" $ show workspacesMap wLog DEBUG "Adding and removing widgets" updateWorkspaceControllers let updateController' idx controller = maybe (return controller) (updateWidget controller . WorkspaceUpdate) $ M.lookup idx workspacesMap logUpdateController i = wLog DEBUG $ printf "Updating %s workspace widget" $ show i updateController i cont = logUpdateController i >> updateController' i cont wLog DEBUG "Done updating individual widget" doWidgetUpdate updateController wLog DEBUG "Showing and hiding controllers" setControllerWidgetVisibility setControllerWidgetVisibility :: WorkspacesIO () setControllerWidgetVisibility = do ctx@WorkspacesContext { workspacesVar = workspacesRef , controllersVar = controllersRef , workspacesConfig = cfg } <- ask lift $ do workspacesMap <- MV.readMVar workspacesRef controllersMap <- MV.readMVar controllersRef forM_ (M.elems workspacesMap) $ \ws -> let action = if showWorkspaceFn cfg ws then Gtk.widgetShow else Gtk.widgetHide in traverse (flip runReaderT ctx . getWidget) (M.lookup (workspaceIdx ws) controllersMap) >>= maybe (return ()) action doWidgetUpdate :: (WorkspaceId -> WWC -> WorkspacesIO WWC) -> WorkspacesIO () doWidgetUpdate updateController = do c@WorkspacesContext { controllersVar = controllersRef } <- ask lift $ MV.modifyMVar_ controllersRef $ \controllers -> do wLog DEBUG "Updating controllers ref" controllersList <- mapM (\(idx, controller) -> do newController <- runReaderT (updateController idx controller) c return (idx, newController)) $ M.toList controllers return $ M.fromList controllersList updateWorkspaceControllers :: WorkspacesIO () updateWorkspaceControllers = do WorkspacesContext { controllersVar = controllersRef , workspacesVar = workspacesRef , workspacesWidget = cont , workspacesConfig = cfg } <- ask workspacesMap <- lift $ MV.readMVar workspacesRef controllersMap <- lift $ MV.readMVar controllersRef let newWorkspacesSet = M.keysSet workspacesMap existingWorkspacesSet = M.keysSet controllersMap when (existingWorkspacesSet /= newWorkspacesSet) $ do let addWorkspaces = Set.difference newWorkspacesSet existingWorkspacesSet removeWorkspaces = Set.difference existingWorkspacesSet newWorkspacesSet builder = widgetBuilder cfg _ <- updateVar controllersRef $ \controllers -> do let oldRemoved = F.foldl (flip M.delete) controllers removeWorkspaces buildController idx = builder <$> M.lookup idx workspacesMap buildAndAddController theMap idx = maybe (return theMap) (>>= return . flip (M.insert idx) theMap) (buildController idx) foldM buildAndAddController oldRemoved $ Set.toList addWorkspaces -- Clear the container and repopulate it lift $ Gtk.containerForeach cont (Gtk.containerRemove cont) addWidgetsToTopLevel rateLimitFn :: forall req resp. WorkspacesContext -> (req -> IO resp) -> ResultsCombiner req resp -> IO (req -> IO resp) rateLimitFn context = let limit = (updateRateLimitMicroseconds $ workspacesConfig context) rate = fromMicroseconds limit :: Microsecond in generateRateLimitedFunction $ PerInvocation rate onWorkspaceUpdate :: WorkspacesContext -> IO (Event -> IO ()) onWorkspaceUpdate context = do rateLimited <- rateLimitFn context doUpdate combineRequests let withLog event = do case event of PropertyEvent _ _ _ _ _ atom _ _ -> wLog DEBUG $ printf "Event %s" $ show atom _ -> return () void $ forkIO $ rateLimited event return withLog where combineRequests _ b = Just (b, const ((), ())) doUpdate _ = postGUIASync $ runReaderT updateAllWorkspaceWidgets context onIconChanged :: (Set.Set X11Window -> IO ()) -> Event -> IO () onIconChanged handler event = case event of PropertyEvent { ev_window = wid } -> do wLog DEBUG $ printf "Icon changed event %s" $ show wid handler $ Set.singleton wid _ -> return () onIconsChanged :: WorkspacesContext -> IO (Set.Set X11Window -> IO ()) onIconsChanged context = rateLimitFn context onIconsChanged' combineRequests where combineRequests windows1 windows2 = Just (Set.union windows1 windows2, const ((), ())) onIconsChanged' wids = do wLog DEBUG $ printf "Icon update execute %s" $ show wids postGUIASync $ flip runReaderT context $ doWidgetUpdate (\idx c -> wLog DEBUG (printf "Updating %s icons." $ show idx) >> updateWidget c (IconUpdate $ Set.toList wids)) initializeWWC :: WorkspaceWidgetController a => a -> Workspace -> ReaderT WorkspacesContext IO WWC initializeWWC controller ws = WWC <$> updateWidget controller (WorkspaceUpdate ws) -- | A WrappingController can be used to wrap some child widget with another -- abitrary widget. data WrappingController = WrappingController { wrappedWidget :: Gtk.Widget , wrappedController :: WWC } instance WorkspaceWidgetController WrappingController where getWidget = lift . Gtk.toWidget . wrappedWidget updateWidget wc update = do updated <- updateWidget (wrappedController wc) update return wc { wrappedController = updated } data WorkspaceContentsController = WorkspaceContentsController { containerWidget :: Gtk.Widget , contentsControllers :: [WWC] } buildContentsController :: [ControllerConstructor] -> ControllerConstructor buildContentsController constructors ws = do controllers <- mapM ($ ws) constructors ctx <- ask tempController <- lift $ do cons <- Gtk.boxNew Gtk.OrientationHorizontal 0 mapM_ (flip runReaderT ctx . getWidget >=> Gtk.containerAdd cons) controllers outerBox <- Gtk.toWidget cons >>= buildPadBox _ <- widgetSetClassGI cons "contents" widget <- Gtk.toWidget outerBox return WorkspaceContentsController { containerWidget = widget , contentsControllers = controllers } initializeWWC tempController ws defaultBuildContentsController :: ControllerConstructor defaultBuildContentsController = buildContentsController [buildLabelController, buildIconController] bottomLeftAlignedBoxWrapper :: T.Text -> ControllerConstructor -> ControllerConstructor bottomLeftAlignedBoxWrapper boxClass constructor ws = do controller <- constructor ws widget <- getWidget controller ebox <- Gtk.eventBoxNew _ <- widgetSetClassGI ebox boxClass Gtk.widgetSetHalign ebox Gtk.AlignStart Gtk.widgetSetValign ebox Gtk.AlignEnd Gtk.containerAdd ebox widget wrapped <- Gtk.toWidget ebox let wrappingController = WrappingController { wrappedWidget = wrapped , wrappedController = controller } initializeWWC wrappingController ws buildLabelOverlayController :: ControllerConstructor buildLabelOverlayController = buildOverlayContentsController [buildIconController] [bottomLeftAlignedBoxWrapper "overlay-box" buildLabelController] buildOverlayContentsController :: [ControllerConstructor] -> [ControllerConstructor] -> ControllerConstructor buildOverlayContentsController mainConstructors overlayConstructors ws = do controllers <- mapM ($ ws) mainConstructors overlayControllers <- mapM ($ ws) overlayConstructors ctx <- ask tempController <- lift $ do mainContents <- Gtk.boxNew Gtk.OrientationHorizontal 0 mapM_ (flip runReaderT ctx . getWidget >=> Gtk.containerAdd mainContents) controllers outerBox <- Gtk.toWidget mainContents >>= buildPadBox _ <- widgetSetClassGI mainContents "contents" overlay <- Gtk.overlayNew Gtk.containerAdd overlay outerBox mapM_ (flip runReaderT ctx . getWidget >=> Gtk.overlayAddOverlay overlay) overlayControllers widget <- Gtk.toWidget overlay return WorkspaceContentsController { containerWidget = widget , contentsControllers = controllers ++ overlayControllers } initializeWWC tempController ws instance WorkspaceWidgetController WorkspaceContentsController where getWidget = return . containerWidget updateWidget cc update = do WorkspacesContext {} <- ask case update of WorkspaceUpdate newWorkspace -> lift $ setWorkspaceWidgetStatusClass newWorkspace $ containerWidget cc _ -> return () newControllers <- mapM (`updateWidget` update) $ contentsControllers cc return cc {contentsControllers = newControllers} updateWidgetX11 cc update = do newControllers <- mapM (`updateWidgetX11` update) $ contentsControllers cc return cc {contentsControllers = newControllers} newtype LabelController = LabelController { label :: Gtk.Label } buildLabelController :: ControllerConstructor buildLabelController ws = do tempController <- lift $ do lbl <- Gtk.labelNew Nothing _ <- widgetSetClassGI lbl "workspace-label" return LabelController { label = lbl } initializeWWC tempController ws instance WorkspaceWidgetController LabelController where getWidget = lift . Gtk.toWidget . label updateWidget lc (WorkspaceUpdate newWorkspace) = do WorkspacesContext { workspacesConfig = cfg } <- ask labelText <- labelSetter cfg newWorkspace lift $ do Gtk.labelSetMarkup (label lc) $ T.pack labelText setWorkspaceWidgetStatusClass newWorkspace $ label lc return lc updateWidget lc _ = return lc data IconWidget = IconWidget { iconContainer :: Gtk.EventBox , iconImage :: Gtk.Image , iconWindow :: MV.MVar (Maybe WindowData) , iconForceUpdate :: IO () } getPixbufForIconWidget :: Bool -> MV.MVar (Maybe WindowData) -> Int32 -> WorkspacesIO (Maybe Gdk.Pixbuf) getPixbufForIconWidget transparentOnNone dataVar size = do ctx <- ask let tContext = taffyContext ctx getPBFromData = getWindowIconPixbuf $ workspacesConfig ctx getPB' = runMaybeT $ MaybeT (lift $ MV.readMVar dataVar) >>= MaybeT . getPBFromData size getPB = if transparentOnNone then maybeTCombine getPB' (Just <$> pixBufFromColor size 0) else getPB' lift $ runReaderT getPB tContext buildIconWidget :: Bool -> Workspace -> WorkspacesIO IconWidget buildIconWidget transparentOnNone ws = do ctx <- ask lift $ do windowVar <- MV.newMVar Nothing img <- Gtk.imageNew refreshImage <- autoSizeImage img (flip runReaderT ctx . getPixbufForIconWidget transparentOnNone windowVar) Gtk.OrientationHorizontal ebox <- Gtk.eventBoxNew _ <- widgetSetClassGI img "window-icon" _ <- widgetSetClassGI ebox "window-icon-container" Gtk.containerAdd ebox img _ <- Gtk.onWidgetButtonPressEvent ebox $ const $ liftIO $ do info <- MV.readMVar windowVar case info of Just updatedInfo -> flip runReaderT ctx $ liftX11Def () $ focusWindow $ windowId updatedInfo _ -> liftIO $ void $ switch ctx (workspaceIdx ws) return True return IconWidget { iconContainer = ebox , iconImage = img , iconWindow = windowVar , iconForceUpdate = refreshImage } data IconController = IconController { iconsContainer :: Gtk.Box , iconImages :: [IconWidget] , iconWorkspace :: Workspace } buildIconController :: ControllerConstructor buildIconController ws = do tempController <- lift $ do hbox <- Gtk.boxNew Gtk.OrientationHorizontal 0 return IconController {iconsContainer = hbox, iconImages = [], iconWorkspace = ws} initializeWWC tempController ws instance WorkspaceWidgetController IconController where getWidget = lift . Gtk.toWidget . iconsContainer updateWidget ic (WorkspaceUpdate newWorkspace) = do newImages <- updateImages ic newWorkspace return ic { iconImages = newImages, iconWorkspace = newWorkspace } updateWidget ic (IconUpdate updatedIcons) = updateWindowIconsById ic updatedIcons >> return ic updateWindowIconsById :: IconController -> [X11Window] -> WorkspacesIO () updateWindowIconsById ic windowIds = mapM_ maybeUpdateWindowIcon $ iconImages ic where maybeUpdateWindowIcon widget = do info <- lift $ MV.readMVar $ iconWindow widget when (maybe False (flip elem windowIds . windowId) info) $ updateIconWidget ic widget info scaledWindowIconPixbufGetter :: WindowIconPixbufGetter -> WindowIconPixbufGetter scaledWindowIconPixbufGetter getter size = getter size >=> lift . traverse (scalePixbufToSize size Gtk.OrientationHorizontal) constantScaleWindowIconPixbufGetter :: Int32 -> WindowIconPixbufGetter -> WindowIconPixbufGetter constantScaleWindowIconPixbufGetter constantSize getter = const $ scaledWindowIconPixbufGetter getter constantSize handleIconGetterException :: WindowIconPixbufGetter -> WindowIconPixbufGetter handleIconGetterException getter = \size windowData -> catchAny (getter size windowData) $ \e -> do wLog WARNING $ printf "Failed to get window icon for %s: %s" (show windowData) (show e) return Nothing getWindowIconPixbufFromEWMH :: WindowIconPixbufGetter getWindowIconPixbufFromEWMH = handleIconGetterException $ \size windowData -> runX11Def Nothing (getIconPixBufFromEWMH size $ windowId windowData) getWindowIconPixbufFromClass :: WindowIconPixbufGetter getWindowIconPixbufFromClass = handleIconGetterException $ \size windowData -> lift $ getWindowIconFromClasses size (windowClass windowData) getWindowIconPixbufFromDesktopEntry :: WindowIconPixbufGetter getWindowIconPixbufFromDesktopEntry = handleIconGetterException $ \size windowData -> getWindowIconFromDesktopEntryByClasses size (windowClass windowData) getWindowIconPixbufFromChrome :: WindowIconPixbufGetter getWindowIconPixbufFromChrome _ windowData = getPixBufFromChromeData $ windowId windowData defaultGetWindowIconPixbuf :: WindowIconPixbufGetter defaultGetWindowIconPixbuf = scaledWindowIconPixbufGetter unscaledDefaultGetWindowIconPixbuf unscaledDefaultGetWindowIconPixbuf :: WindowIconPixbufGetter unscaledDefaultGetWindowIconPixbuf = getWindowIconPixbufFromDesktopEntry <|||> getWindowIconPixbufFromClass <|||> getWindowIconPixbufFromEWMH addCustomIconsToDefaultWithFallbackByPath :: (WindowData -> Maybe FilePath) -> FilePath -> WindowIconPixbufGetter addCustomIconsToDefaultWithFallbackByPath getCustomIconPath fallbackPath = addCustomIconsAndFallback getCustomIconPath (const $ lift $ getPixbufFromFilePath fallbackPath) unscaledDefaultGetWindowIconPixbuf addCustomIconsAndFallback :: (WindowData -> Maybe FilePath) -> (Int32 -> TaffyIO (Maybe Gdk.Pixbuf)) -> WindowIconPixbufGetter -> WindowIconPixbufGetter addCustomIconsAndFallback getCustomIconPath fallback defaultGetter = scaledWindowIconPixbufGetter $ getCustomIcon <|||> defaultGetter <|||> (\s _ -> fallback s) where getCustomIcon :: Int32 -> WindowData -> TaffyIO (Maybe Gdk.Pixbuf) getCustomIcon _ wdata = lift $ maybe (return Nothing) getPixbufFromFilePath $ getCustomIconPath wdata sortWindowsByPosition :: [WindowData] -> WorkspacesIO [WindowData] sortWindowsByPosition wins = do let getGeometryWorkspaces w = getDisplay >>= liftIO . (`safeGetGeometry` w) getGeometries = mapM (forkM return ((((sel2 &&& sel3) <$>) .) getGeometryWorkspaces) . windowId) wins windowGeometries <- liftX11Def [] getGeometries let getLeftPos wd = fromMaybe (999999999, 99999999) $ lookup (windowId wd) windowGeometries compareWindowData a b = compare (windowMinimized a, getLeftPos a) (windowMinimized b, getLeftPos b) return $ sortBy compareWindowData wins updateImages :: IconController -> Workspace -> WorkspacesIO [IconWidget] updateImages ic ws = do WorkspacesContext {workspacesConfig = cfg} <- ask sortedWindows <- iconSort cfg $ windows ws wLog DEBUG $ printf "Updating images for %s" (show ws) let updateIconWidget' getImageAction wdata = do iconWidget <- getImageAction _ <- updateIconWidget ic iconWidget wdata return iconWidget existingImages = map return $ iconImages ic buildAndAddIconWidget transparentOnNone = do iw <- buildIconWidget transparentOnNone ws lift $ Gtk.containerAdd (iconsContainer ic) $ iconContainer iw return iw infiniteImages = existingImages ++ replicate (minIcons cfg - length existingImages) (buildAndAddIconWidget True) ++ repeat (buildAndAddIconWidget False) windowCount = length $ windows ws maxNeeded = maybe windowCount (min windowCount) $ maxIcons cfg newImagesNeeded = length existingImages < max (minIcons cfg) maxNeeded -- XXX: Only one of the two things being zipped can be an infinite list, -- which is why this newImagesNeeded contortion is needed. imgSrcs = if newImagesNeeded then infiniteImages else existingImages getImgs = maybe imgSrcs (`take` imgSrcs) $ maxIcons cfg justWindows = map Just sortedWindows windowDatas = if newImagesNeeded then justWindows ++ replicate (minIcons cfg - length justWindows) Nothing else justWindows ++ repeat Nothing newImgs <- zipWithM updateIconWidget' getImgs windowDatas when newImagesNeeded $ lift $ Gtk.widgetShowAll $ iconsContainer ic return newImgs getWindowStatusString :: WindowData -> T.Text getWindowStatusString windowData = T.toLower $ T.pack $ case windowData of WindowData { windowMinimized = True } -> "minimized" WindowData { windowActive = True } -> show Active WindowData { windowUrgent = True } -> show Urgent _ -> "normal" possibleStatusStrings :: [T.Text] possibleStatusStrings = map (T.toLower . T.pack) [show Active, show Urgent, "minimized", "normal", "inactive"] updateIconWidget :: IconController -> IconWidget -> Maybe WindowData -> WorkspacesIO () updateIconWidget _ IconWidget { iconContainer = iconButton , iconWindow = windowRef , iconForceUpdate = updateIcon } windowData = do let statusString = maybe "inactive" getWindowStatusString windowData :: T.Text title = T.pack . windowTitle <$> windowData setIconWidgetProperties = updateWidgetClasses iconButton [statusString] possibleStatusStrings void $ updateVar windowRef $ const $ return windowData Gtk.widgetSetTooltipText iconButton title lift $ updateIcon >> setIconWidgetProperties data WorkspaceButtonController = WorkspaceButtonController { button :: Gtk.EventBox , buttonWorkspace :: Workspace , contentsController :: WWC } buildButtonController :: ParentControllerConstructor buildButtonController contentsBuilder workspace = do cc <- contentsBuilder workspace workspacesRef <- asks workspacesVar ctx <- ask widget <- getWidget cc lift $ do ebox <- Gtk.eventBoxNew Gtk.containerAdd ebox widget Gtk.eventBoxSetVisibleWindow ebox False _ <- Gtk.onWidgetScrollEvent ebox $ \scrollEvent -> do dir <- Gdk.getEventScrollDirection scrollEvent workspaces <- liftIO $ MV.readMVar workspacesRef let switchOne a = liftIO $ flip runReaderT ctx $ liftX11Def () (switchOneWorkspace a (length (M.toList workspaces) - 1)) >> return True case dir of Gdk.ScrollDirectionUp -> switchOne True Gdk.ScrollDirectionLeft -> switchOne True Gdk.ScrollDirectionDown -> switchOne False Gdk.ScrollDirectionRight -> switchOne False _ -> return False _ <- Gtk.onWidgetButtonPressEvent ebox $ const $ switch ctx $ workspaceIdx workspace return $ WWC WorkspaceButtonController { button = ebox, buttonWorkspace = workspace, contentsController = cc } switch :: (MonadIO m) => WorkspacesContext -> WorkspaceId -> m Bool switch ctx idx = do liftIO $ flip runReaderT ctx $ liftX11Def () $ switchToWorkspace idx return True instance WorkspaceWidgetController WorkspaceButtonController where getWidget wbc = lift $ Gtk.toWidget $ button wbc updateWidget wbc update = do newContents <- updateWidget (contentsController wbc) update return wbc { contentsController = newContents }
teleshoes/taffybar
src/System/Taffybar/Widget/Workspaces.hs
Haskell
bsd-3-clause
32,155
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances, BangPatterns, ConstraintKinds #-} module Numeric.DSDE.SDESolver where import Numeric.DSDE.SDE.GeometricBrownian import Numeric.DSDE.RNG import Numeric.DSDE.SDE import qualified System.Random.MWC as M -- | The Euler-Maruyama solving method. Order 1/2. data EulerMaruyama = EulerMaruyama -- | The Milstein solving method. Order 1. data Milstein = Milstein -- | Type class describing a method of solving SDE problems. -- Defined by the next value produced in a solving sequence. class SDESolver a where w_iplus1 :: (Monad m, SDE sde, RNGGen rng m p, Parameter p) => a -> sde p -> rng -> p -> p -> p -> m p solverName :: a -> String instance SDESolver EulerMaruyama where {-# INLINE w_iplus1 #-} {-# SPECIALIZE w_iplus1 :: EulerMaruyama -> GeometricBrownian Double -> M.GenIO -> Double -> Double -> Double -> IO Double #-} w_iplus1 _ !sde !rng !t_i !w_i !deltat = getRand rng >>= \rand -> return $ w_i + f sde t_i w_i * deltat + g sde t_i w_i * deltaB rand where deltaB r = sqrt deltat * r solverName _ = "Euler-Maruyama" instance SDESolver Milstein where w_iplus1 _ !sde !rng !t_i !w_i !deltat = getRand rng >>= \rand -> return $ w_i + f sde t_i w_i * deltat + g' * deltaB rand + g'/2 * partgoverparty sde t_i w_i * (deltaB rand^^(2 :: Integer) - deltat) where deltaB r = sqrt deltat * r g' = g sde t_i w_i solverName _ = "Milstein"
davnils/sde-solver
src/Numeric/DSDE/SDESolver.hs
Haskell
bsd-3-clause
1,664