code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-# LANGUAGE TypeSynonymInstances #-}
module Kwil.Lexer where
import Data.Char
import Data.List
import Data.Maybe (listToMaybe)
import Control.Monad.Trans.State.Lazy
import Control.Monad
data Token = LowerCaseID String
| UpperCaseID String
| Equals
| LBracket
| RBracket
| LParen
| RParen
| ClassDot -- ::
| QualifierDot -- .
| CaseSeparator
| LambdaSymbol
| ArrowSymbol
| Semicolon
| Number Integer
| If
| Then
| Else
| Data
| Case
| Of
| Otherwise
| EndOfFile deriving (Read, Show, Eq)
isIDChar c = isAlphaNum c || c == '_'
-- Here's a list of reserved words that
-- cannot be used in names
reservedTokens = [(";", Semicolon),
("{", LBracket),
("}", RBracket),
("(", LParen),
(")", RParen),
("::", ClassDot),
(".", QualifierDot),
("=", Equals),
("|", CaseSeparator),
("\\", LambdaSymbol),
("->", ArrowSymbol),
("if", If),
("then", Then),
("else", Else),
("data", Data),
("case", Case),
("of", Of),
("otherwise", Otherwise)]
data KToken = KToken {
token :: Token,
location :: Location
}
instance Show KToken where
show (KToken token location) = show token
data Location = Loc {
filename :: String,
line :: Integer,
col :: Integer
}
moveRight steps loc = loc {col = col loc + toInteger steps}
-- when newlining, we return to the start of the line
moveDown steps loc = loc {col=1, line=line loc + toInteger steps}
-- utility function, drop but return count as well
dropCount f l = let (dropped, kept) = span f l in (length dropped, kept)
data ReaderData = ReaderData String Location
data InputReader a = Reader { runReader :: ReaderData -> (a, ReaderData) }
instance Functor InputReader where
fmap f reader = Reader $ \rData ->
let (result,state) = runReader reader rData in
(f result, state)
instance Applicative InputReader where
pure a = Reader $ \rData -> (a, rData)
rF <*> rA = Reader $ \rData ->
let (f, newState) = runReader rF rData
(a, newState') = runReader rA newState in (f a, newState')
instance Monad InputReader where
(>>=) (Reader firstOp) nextOpDecider = Reader $ \rData ->
-- run through the reader, and use it to find the next state
let (result, newState) = firstOp rData
nextOp = nextOpDecider result in
runReader nextOp newState
-- just get one character but don't edit the data
peek = Reader $ \rData@(ReaderData input loc) -> (listToMaybe input, rData)
-- take one character and move the cursor location
takeOne :: InputReader Char
takeOne = Reader $ \(ReaderData (i:is) loc) ->
if i == '\n' then
let newLoc = moveDown 1 loc in (i, ReaderData is newLoc)
else
let newLoc = moveRight 1 loc in (i, ReaderData is newLoc)
-- just get the current location
curLoc :: InputReader Location
curLoc = Reader $ \rData@(ReaderData _ loc) -> (loc, rData)
-- just get the current input
curInput :: InputReader String
curInput = Reader $ \rData@(ReaderData input pos) -> (input, rData)
-- full token helper: take the location and pair it with an AST token
aToken :: Token -> Location -> InputReader KToken
aToken token loc = return $ KToken token loc
-- take N tokens
takeN = flip replicateM takeOne
-- takeWhile for our reader... take values that match our preditcate
takeWhileR :: (Char -> Bool) -> InputReader String
takeWhileR predicate = do
mTop <- peek
case mTop of
Just top ->
if predicate top then do
takeOne -- take the top character off
rest <- takeWhileR predicate
return $ top:rest
else
return []
Nothing -> return []
parseFile :: String -> String -> [KToken]
parseFile input fname =
let initialLocation = Loc fname 1 1
rData = ReaderData input initialLocation
-- run the reader itself
(tokens, endState) = runReader parseKTokens rData in
tokens
parseKTokens :: InputReader [KToken]
parseKTokens = do
token <- parseKTokenM
case token of
KToken EndOfFile loc -> return [token]
_ -> do
tokens <- parseKTokens
return (token:tokens)
parseKTokenM :: InputReader KToken
parseKTokenM = do
maybeFirstChar <- peek
loc <- curLoc
case maybeFirstChar of
Nothing -> aToken EndOfFile loc
Just _ -> parseReserved
parseReserved :: InputReader KToken
parseReserved = do
input <- curInput
-- check for each reserved token whether the token representation
-- is a prefix of the input
case find (flip isPrefixOf input . fst) reservedTokens of
Just (tokenRepr, astToken) -> do
tokenLoc <- curLoc
takeN (length tokenRepr) -- pop the tokens
aToken astToken tokenLoc
Nothing -> parseIDs
parseIDs = do
(Just fstChar) <- peek -- we've previously checked for non-emptiness
loc <- curLoc
if isLower fstChar then do
lowerID <- takeWhileR isIDChar
aToken (LowerCaseID lowerID) loc
else if isUpper fstChar then do
upperID <- takeWhileR isIDChar
aToken (UpperCaseID upperID) loc
else if isDigit fstChar then do
digits <- takeWhileR isDigit
aToken (Number (read digits)) loc
else if isSpace fstChar then do
parseSpaces
parseKTokenM
else error $ " Lexer token error : token was <<" ++ (fstChar:">>")
parseSpaces = takeWhileR isSpace
|
rtpg/kwil
|
Kwil/Lexer.hs
|
Haskell
|
bsd-3-clause
| 5,667
|
{-# LANGUAGE
FlexibleInstances
, TypeSynonymInstances
#-}
module Data.String.ToString (ToString (..)) where
import Data.CaseInsensitive (CI, foldedCase)
import qualified Data.ByteString.Lazy.UTF8 as LBU
import qualified Data.ByteString.UTF8 as SBU
import qualified Data.Text as ST
import qualified Data.Text.Lazy as LT
class ToString a where
toString :: a -> String
instance ToString String where
toString = id
instance ToString SBU.ByteString where
toString = SBU.toString
instance ToString LBU.ByteString where
toString = LBU.toString
instance ToString ST.Text where
toString = ST.unpack
instance ToString LT.Text where
toString = LT.unpack
instance ToString s => ToString (CI s) where
toString = toString . foldedCase
|
silkapp/tostring
|
src/Data/String/ToString.hs
|
Haskell
|
bsd-3-clause
| 783
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Duration.UK.Corpus
( corpus
, negativeCorpus
) where
import Prelude
import Data.String
import Duckling.Duration.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
import Duckling.TimeGrain.Types (Grain(..))
context :: Context
context = testContext {locale = makeLocale UK Nothing}
corpus :: Corpus
corpus = (context, testOptions, allExamples)
negativeCorpus :: NegativeCorpus
negativeCorpus = (context, testOptions, examples)
where
examples =
[ "в дні"
, "секретар"
, "хвилини"
]
allExamples :: [Example]
allExamples = concat
[ examples (DurationData 1 Second)
[ "1 секунда"
, "одна сек"
]
, examples (DurationData 2 Minute)
[ "2 хв"
, "дві хвилини"
]
, examples (DurationData 30 Day)
[ "30 днів"
]
, examples (DurationData 7 Week)
[ "сім тижнів"
]
, examples (DurationData 1 Month)
[ "1 місяць"
]
, examples (DurationData 2 Year)
[ "2 роки"
]
, examples (DurationData 30 Minute)
[ "півгодини"
, "1/2 години"
]
, examples (DurationData 12 Hour)
[ "пів дня"
]
, examples (DurationData 90 Minute)
[ "півтори години"
]
, examples (DurationData 27 Month)
[ "2 роки і 3 місяці"
, "2 роки, 3 місяці"
]
, examples (DurationData 31719604 Second)
[ "1 рік, 2 дня, 3 години і 4 секунди"
]
]
|
facebookincubator/duckling
|
Duckling/Duration/UK/Corpus.hs
|
Haskell
|
bsd-3-clause
| 2,019
|
-- JavaScript Contract Compiler
module Main where
import System.Console.GetOpt
import System.Environment
import System.Directory
import System.FilePath
import System.Exit
import Control.Monad
import BrownPLT.JavaScript.Contracts
import Paths_JsContracts -- created by Cabal
import BrownPLT.JavaScript.Parser (parseJavaScriptFromFile)
import Data.List
data Flag
= Help
| Release
| Debug
| Namespace String
| Interface String
| NoExport
deriving (Eq,Ord,Show)
options :: [ OptDescr Flag ]
options =
[ Option ['h'] ["help"] (NoArg Help)
"display this help message"
, Option ['r'] ["release"] (NoArg Release)
"encapsulate, ignoring all contracts"
, Option ['d'] ["debug"] (NoArg Debug)
"enable contracts and encapsulate (default)"
, Option ['n'] ["namespace"] (ReqArg Namespace "NAMESPACE")
"exports names to the namespace"
, Option [] ["no-export"] (NoArg NoExport)
"do not export names to the global object"
, Option ['i'] ["interface"] (ReqArg Interface "PATH")
"path to the interface; uses module.jsi by default"
]
usage = usageInfo
"Usage: jscc [options] module.js\nOptions:\n" options
main = do
args <- getArgs
dataDir <- getDataDir
let (opts', nonOpts, errors) = getOpt Permute options args
let opts = sort opts'
unless (null errors) $ do
mapM_ putStrLn errors
fail "jscc terminated"
checkHelp opts
(isDebugMode, opts) <- getDebugMode opts
(namespace, opts) <- getNamespace opts
(ifacePath, opts) <- getInterfacePath opts nonOpts
(isExport, opts) <- getExportGlobals opts
when (not $ null opts) $ do
putStrLn $ "spurious arguments: " ++ (show opts)
fail "jscc terminated"
case nonOpts of
[implPath] -> do
checkFile implPath
rawImpl <- readFile implPath
let boilerplatePath = dataDir </> "contracts.js"
rawBoilerplate <- readFile boilerplatePath
interface <- parseInterface ifacePath
let result = if isDebugMode
then compileFormatted rawImpl implPath rawBoilerplate
isExport interface
else compileRelease rawImpl implPath rawBoilerplate
isExport interface namespace
putStrLn result
return ()
otherwise -> do
putStrLn "expected a single filename.js"
fail "jscc terminated"
checkFile path = do
exists <- doesFileExist path
unless exists $ do
putStrLn $ "could not find the file: " ++ path
exitFailure
getDebugMode (Release:rest) = return (False,rest)
getDebugMode (Debug:rest) = return (True,rest)
getDebugMode rest = return (True,rest)
getNamespace ((Namespace s):rest) = return (Just s, rest)
getNamespace rest = return (Nothing,rest)
checkHelp (Help:_) = do
putStrLn usage
exitSuccess
checkHelp _ = return ()
getExportGlobals (NoExport:rest) = return (False, rest)
getExportGlobals rest = return (True, rest)
getInterfacePath :: [Flag] -> [String] -> IO (FilePath,[Flag])
getInterfacePath (Interface path:rest) _ = do
checkFile path
return (path,rest)
getInterfacePath rest (implPath:_) = do
let path = addExtension (dropExtension implPath) "jsi"
checkFile path
return (path,rest)
getInterfacePath _ [] = do
putStrLn "Invalid arguments (use -h for help)"
exitFailure
|
brownplt/javascript-contracts
|
src/Jscc.hs
|
Haskell
|
bsd-3-clause
| 3,294
|
{-|
Module : Numeric.ER.ShowHTML
Description : Misc facilities for HTML rendering.
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
-}
module Numeric.ER.ShowHTML where
import qualified Text.Html as H
import Text.Regex
{-|
Render HTML is a way that can be inlined in
Javascript strings etc.
-}
showHTML ::
(H.HTML t) =>
t -> String
showHTML v =
escapeNewLines $
renderHtmlNoHeader $
H.toHtml v
where
-- stripHeader s =
-- (splitRegex (mkRegex "-->") s) !! 1
escapeNewLines s =
(subRegex (mkRegex "([^\\])$") s "\\1\\\\")
abovesTable attrs cells =
H.table H.! attrs H.<< (H.aboves $ map (H.td H.<<) cells)
besidesTable attrs cells =
H.table H.! attrs H.<< (H.aboves [H.besides $ map (H.td H.<<) cells])
renderHtmlNoHeader :: H.Html -> String
renderHtmlNoHeader theHtml =
foldr (.) id (map (H.renderHtml' 0)
(H.getHtmlElements theHtml)) "\n"
toHtmlDefault :: (Show a) => a -> H.Html
toHtmlDefault = H.toHtml . show
instance (H.HTML a) => H.HTML (Maybe a) where
toHtml Nothing = H.toHtml $ "[Nothing]"
toHtml (Just a) = H.toHtml a
|
michalkonecny/polypaver
|
src/Numeric/ER/ShowHTML.hs
|
Haskell
|
bsd-3-clause
| 1,285
|
module BuildParseTests (test) where
import Blaze.ByteString.Builder (Builder, toByteString)
import Data.ByteString (ByteString)
import Data.Attoparsec.ByteString (Parser, parseOnly, endOfInput)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Network.SPDY.Frames
import Network.SPDY.Internal.Deserialize
import Network.SPDY.Internal.Serialize
import Instances ()
test :: Test
test = testGroup "Build-parse tests"
[ testProperty "Raw frame header" prop_buildParseRawFrameHeader
, testProperty "DataLength" prop_buildParseDataLength
, testProperty "StreamID" prop_buildParseStreamID
, testProperty "HeaderCount" prop_buildParseHeaderCount
, testProperty "HeaderName" prop_buildParseHeaderName
, testProperty "HeaderValue" prop_buildParseHeaderValue
, testProperty "Priority" prop_buildParsePriority
, testProperty "Slot" prop_buildParseSlot
, testProperty "TerminationStatus" prop_buildParseTerminationStatus
, testProperty "SettingID" prop_buildParseSettingID
, testProperty "SettingValue" prop_buildParseSettingValue
, testProperty "SettingIDAndFlags" prop_buildParseSettingIDAndFlags
, testProperty "PingID" prop_buildParsePingID
, testProperty "GoAwayStatus" prop_buildParseGoAwayStatus
, testProperty "DeltaWindowSize" prop_buildParseDeltaWindowSize
, testProperty "Slot16" prop_buildParseSlot16
, testProperty "Proof" prop_buildParseProof
, testProperty "Certificate" prop_buildParseCertificate ]
prop_buildParseRawFrameHeader :: RawFrameHeader -> Bool
prop_buildParseRawFrameHeader = prop_buildParse rawHeaderBuilder parseFrameHeader
prop_buildParseDataLength :: DataLength -> Bool
prop_buildParseDataLength = prop_buildParse toBuilder parseDataLength
prop_buildParseStreamID :: StreamID -> Bool
prop_buildParseStreamID = prop_buildParse toBuilder parseStreamID
prop_buildParsePriority :: Priority -> Bool
prop_buildParsePriority = prop_buildParse toBuilder parsePriority
prop_buildParseSlot :: Slot -> Bool
prop_buildParseSlot = prop_buildParse toBuilder parseSlot
prop_buildParseHeaderCount :: HeaderCount -> Bool
prop_buildParseHeaderCount = prop_buildParse toBuilder parseHeaderCount
prop_buildParseHeaderName :: HeaderName -> Bool
prop_buildParseHeaderName = prop_buildParse toBuilder parseHeaderName
prop_buildParseHeaderValue :: HeaderValue -> Bool
prop_buildParseHeaderValue = prop_buildParse toBuilder parseHeaderValue
prop_buildParseTerminationStatus :: TerminationStatus -> Bool
prop_buildParseTerminationStatus = prop_buildParse toBuilder parseTerminationStatus
prop_buildParseSettingID :: SettingID -> Bool
prop_buildParseSettingID = prop_buildParse toBuilder parseSettingID
prop_buildParseSettingValue :: SettingValue -> Bool
prop_buildParseSettingValue = prop_buildParse toBuilder parseSettingValue
prop_buildParseSettingIDAndFlags :: SettingIDAndFlags -> Bool
prop_buildParseSettingIDAndFlags = prop_buildParse toBuilder parseSettingIDAndFlags
prop_buildParsePingID :: PingID -> Bool
prop_buildParsePingID = prop_buildParse toBuilder parsePingID
prop_buildParseGoAwayStatus :: GoAwayStatus -> Bool
prop_buildParseGoAwayStatus = prop_buildParse toBuilder parseGoAwayStatus
prop_buildParseDeltaWindowSize :: DeltaWindowSize -> Bool
prop_buildParseDeltaWindowSize = prop_buildParse toBuilder parseDeltaWindowSize
prop_buildParseSlot16 :: Slot16 -> Bool
prop_buildParseSlot16 = prop_buildParse toBuilder parseSlot16
prop_buildParseProof :: Proof -> Bool
prop_buildParseProof = prop_buildParse toBuilder parseProof
prop_buildParseCertificate :: Certificate -> Bool
prop_buildParseCertificate = prop_buildParse toBuilder parseCertificate
prop_buildParse :: Eq a => (a -> Builder) -> Parser a -> a -> Bool
prop_buildParse builderFor parser =
prop_serializeParse (toByteString . builderFor) (parseOnly parser')
where parser' = do r <- parser; endOfInput; return r
prop_serializeParse :: Eq a => (a -> ByteString) -> (ByteString -> Either String a) -> a -> Bool
prop_serializeParse serialize parse x =
either (const False) (x ==) $ parse $ serialize x
|
kcharter/spdy-base
|
test/BuildParseTests.hs
|
Haskell
|
bsd-3-clause
| 4,180
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE PartialTypeSignatures #-}
module Language.CodeGen.LLVM where
import Prelude hiding (init)
import Control.Monad.State
import Data.Foldable(toList)
import Data.Sequence (Seq,(<|),(|>),(><))
import qualified Data.Sequence as S
import LLVM.General.AST (Module,Definition,BasicBlock,Named,Instruction)
import qualified LLVM.General.AST as L
import qualified LLVM.General.AST.Linkage as Linkage
import qualified LLVM.General.AST.Visibility as Visibility
import qualified LLVM.General.AST.CallingConvention as CallingConvention
import qualified LLVM.General.AST.Type as Type
import qualified LLVM.General.AST.Constant as Constant
import qualified LLVM.General.AST.Float as Float
import Language.SimpleExpression (SimpleExpr)
import qualified Language.SimpleExpression as Simple
import Language.Expression (Expr)
import qualified Language.Expression as Full
import Language.Function
data Two = One | Two
instance Show Two where
show One = "1"
show Two = "2"
data GroundExpr a where
Var :: Seq Two -> GroundExpr Double
Const :: Double -> GroundExpr Double
App :: GroundExpr (a -> b) -> GroundExpr a -> GroundExpr b
Fun :: Fun (a -> b) -> GroundExpr (a -> b)
deriving instance Show (GroundExpr a)
data Assignment where
Assignment :: Seq Two -> Maybe Double -> GroundExpr Double -> Assignment
deriving instance Show Assignment
type Assignments = Seq Assignment
compile :: SynArrow SimpleExpr Function b Double -> Module
compile f = case optimize f of
LoopD i (Arr (Function g)) -> modul "test" $ assignments (inj (S.Const 0) i) $ g E.Var
compile' :: SynArrow SimpleExpr Function b Double -> IO (Either String String)
compile' s = L.withContext $ \ctx ->
runExceptT $ L.withModuleFromAST ctx (compile s) $ \m ->
L.moduleLLVMAssembly m
modul :: String -> Assignments -> Module
modul n ass = L.defaultModule
{ L.moduleName = n
, L.moduleDefinitions = return $ mainFunction ass
}
mainFunction :: Assignments -> Definition
mainFunction ass =
L.GlobalDefinition $ L.Function Linkage.Internal
Visibility.Default
Nothing
CallingConvention.Fast
[]
(Type.IntegerType 32)
(L.Name "main")
([],False)
[]
Nothing
Nothing
0
Nothing
Nothing
$ mainBody ass
mainBody :: Assignments -> [BasicBlock]
mainBody as =
let cgs = execState (genInstructions as) empty
in [ L.BasicBlock
(L.Name "init")
(toList $ initBlock cgs)
(L.Do (L.Br (L.Name "loop") []))
, L.BasicBlock
(L.Name "loop")
(toList $ loopBlock cgs)
(L.Do (L.Br (L.Name "loop") []))
]
genInstructions :: Seq Assignment -> Gen ()
genInstructions as = do
forM_ as $ \(Assignment to init e) -> do
case init of
Just c -> initInst $ initName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.ConstantOperand (Constant.Float (Float.Double c))) []
Nothing -> initInst $ initName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.ConstantOperand (Constant.Float (Float.Double 0))) []
lastName <- loopExpr e
loopInst $ loopName to L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double 0))) (L.LocalReference Type.double lastName) []
where
loopExpr :: GroundExpr Double -> Gen L.Name
loopExpr expr = case expr of
Var n -> do
let lx = loopName n
ix = initName n
loopInst $ lx L.:= L.Phi Type.double [ (L.LocalReference Type.double ix, L.Name "init")
, (L.LocalReference Type.double lx, L.Name "loop")
] []
return lx
Const c -> do
x <- fresh
loopInst $ x L.:= L.Add False False (L.ConstantOperand (Constant.Float (Float.Double c))) (L.ConstantOperand (Constant.Float (Float.Double c))) []
return x
GFun2 Add e1 e2 -> app2 L.Add e1 e2
GFun2 Mult e1 e2 -> app2 L.Mul e1 e2
GFun2 Sub e1 e2 -> app2 L.Sub e1 e2
app2 :: Op2 -> GroundExpr Double -> GroundExpr Double -> Gen L.Name
app2 f e1 e2 = do
x1 <- loopExpr e1
x2 <- loopExpr e2
x3 <- fresh
loopInst $ x3 L.:= f False False (L.LocalReference Type.double x1) (L.LocalReference Type.double x2) []
return x3
type Op2 = Bool -> Bool -> L.Operand -> L.Operand -> L.InstructionMetadata -> Instruction
initName :: Seq Two -> L.Name
initName ids = L.Name $ 'i' : (concat (toList (fmap show ids)))
loopName :: Seq Two -> L.Name
loopName ids = L.Name $ 'l' : (concat (toList (fmap show ids)))
data CodeGenState = CodeGenState
{ initBlock :: Seq (Named Instruction)
, loopBlock :: Seq (Named Instruction)
, nextVariable :: Word
}
empty :: CodeGenState
empty = CodeGenState S.empty S.empty 0
type Gen = State CodeGenState
initInst :: Named Instruction -> Gen ()
initInst inst = modify $ \cgs -> cgs {initBlock = initBlock cgs |> inst}
loopInst :: Named Instruction -> Gen ()
loopInst inst = modify $ \cgs -> cgs {loopBlock = loopBlock cgs |> inst}
fresh :: Gen L.Name
fresh = state (\cgs -> let i = nextVariable cgs in (L.UnName i, cgs {nextVariable = i+1}))
assignments :: SimpleExpr e -> Expr e -> Seq Assignment
assignments prod0 = go S.empty prod0 . Full.optimizeExpr
where
go :: Seq Two -> SimpleExpr e -> Expr e -> Seq Assignment
go to prod expr = case (prod,expr) of
(Simple.Inj p1 p2,Full.Inj e1 e2) ->
go (to |> One) p1 e1 >< go (to |> Two) p2 e2
(Simple.Const c,e1) -> ass to (Just c) e1
(Simple.Unit,e1) -> ass to Nothing e1
_ -> error "cannot happen"
ass to p expr = case lowerExpr expr of
Just e -> return $ Assignment to p e
Nothing -> error "Expr is not ground."
pattern Fun1 f e1 = Full.App (Full.Fun f) e1
pattern GFun1 f e1 = App (Fun f) e1
pattern Fun2 f e1 e2 = Full.App (Full.App (Full.Fun f) e1) e2
pattern GFun2 f e1 e2 = App (App (Fun f) e1) e2
lowerExpr :: Expr a -> Maybe (GroundExpr Double)
lowerExpr = go S.empty
where
go :: Seq Two -> Expr a -> Maybe (GroundExpr Double)
go addr expr = case expr of
Full.Proj1 e1 -> go (One <| addr) e1
Full.Proj2 e1 -> go (Two <| addr) e1
Full.Var -> Just $ Var addr
Full.Const d -> Just $ Const d
Fun2 Add e1 e2 -> GFun2 Add <$> go addr e1 <*> go addr e2
Fun2 Mult e1 e2 -> GFun2 Mult <$> go addr e1 <*> go addr e2
Fun2 Sub e1 e2 -> GFun2 Sub <$> go addr e1 <*> go addr e2
Fun2 Div e1 e2 -> GFun2 Div <$> go addr e1 <*> go addr e2
Fun1 Abs e1 -> GFun1 Abs <$> go addr e1
Fun1 Signum e1 -> GFun1 Signum <$> go addr e1
Fun1 Sin e1 -> GFun1 Sin <$> go addr e1
Fun1 Cos e1 -> GFun1 Cos <$> go addr e1
Full.Fun _ -> Nothing
Full.Inj _ _ -> Nothing
Full.App _ _ -> Nothing
|
svenkeidel/hsynth
|
src/Language/CodeGen/LLVM.hs
|
Haskell
|
bsd-3-clause
| 7,098
|
{-# OPTIONS_GHC -w #-}
module Parser where
import AST
import Lexer
-- parser produced by Happy Version 1.18.10
data HappyAbsSyn t4 t5 t6 t7 t8 t9 t10 t11 t12 t13 t14 t15 t16 t17 t18
= HappyTerminal (Token)
| HappyErrorToken Int
| HappyAbsSyn4 t4
| HappyAbsSyn5 t5
| HappyAbsSyn6 t6
| HappyAbsSyn7 t7
| HappyAbsSyn8 t8
| HappyAbsSyn9 t9
| HappyAbsSyn10 t10
| HappyAbsSyn11 t11
| HappyAbsSyn12 t12
| HappyAbsSyn13 t13
| HappyAbsSyn14 t14
| HappyAbsSyn15 t15
| HappyAbsSyn16 t16
| HappyAbsSyn17 t17
| HappyAbsSyn18 t18
action_0 (19) = happyShift action_3
action_0 (4) = happyGoto action_4
action_0 (5) = happyGoto action_2
action_0 _ = happyFail
action_1 (19) = happyShift action_3
action_1 (5) = happyGoto action_2
action_1 _ = happyFail
action_2 (19) = happyShift action_9
action_2 (6) = happyGoto action_8
action_2 _ = happyReduce_4
action_3 (22) = happyShift action_6
action_3 (59) = happyShift action_7
action_3 (18) = happyGoto action_5
action_3 _ = happyFail
action_4 (61) = happyAccept
action_4 _ = happyFail
action_5 (57) = happyShift action_11
action_5 _ = happyFail
action_6 _ = happyReduce_55
action_7 _ = happyReduce_54
action_8 _ = happyReduce_1
action_9 (22) = happyShift action_6
action_9 (59) = happyShift action_7
action_9 (18) = happyGoto action_10
action_9 _ = happyFail
action_10 (57) = happyShift action_13
action_10 _ = happyFail
action_11 (20) = happyShift action_12
action_11 _ = happyFail
action_12 (21) = happyShift action_15
action_12 _ = happyFail
action_13 (7) = happyGoto action_14
action_13 _ = happyReduce_5
action_14 (20) = happyShift action_20
action_14 (22) = happyShift action_6
action_14 (33) = happyShift action_21
action_14 (34) = happyShift action_22
action_14 (59) = happyShift action_7
action_14 (8) = happyGoto action_17
action_14 (11) = happyGoto action_18
action_14 (18) = happyGoto action_19
action_14 _ = happyReduce_8
action_15 (31) = happyShift action_16
action_15 _ = happyFail
action_16 (22) = happyShift action_27
action_16 _ = happyFail
action_17 (58) = happyShift action_26
action_17 _ = happyFail
action_18 (22) = happyShift action_6
action_18 (59) = happyShift action_7
action_18 (18) = happyGoto action_25
action_18 _ = happyFail
action_19 _ = happyReduce_16
action_20 (22) = happyShift action_6
action_20 (33) = happyShift action_21
action_20 (34) = happyShift action_22
action_20 (59) = happyShift action_7
action_20 (11) = happyGoto action_24
action_20 (18) = happyGoto action_19
action_20 _ = happyFail
action_21 (55) = happyShift action_23
action_21 _ = happyReduce_15
action_22 _ = happyReduce_14
action_23 (56) = happyShift action_32
action_23 _ = happyFail
action_24 (22) = happyShift action_6
action_24 (59) = happyShift action_7
action_24 (18) = happyGoto action_31
action_24 _ = happyFail
action_25 (52) = happyShift action_30
action_25 _ = happyFail
action_26 (19) = happyShift action_9
action_26 (6) = happyGoto action_29
action_26 _ = happyReduce_4
action_27 (53) = happyShift action_28
action_27 _ = happyFail
action_28 (32) = happyShift action_34
action_28 _ = happyFail
action_29 _ = happyReduce_3
action_30 _ = happyReduce_6
action_31 (53) = happyShift action_33
action_31 _ = happyFail
action_32 _ = happyReduce_13
action_33 (22) = happyShift action_6
action_33 (33) = happyShift action_21
action_33 (34) = happyShift action_22
action_33 (59) = happyShift action_7
action_33 (9) = happyGoto action_36
action_33 (11) = happyGoto action_37
action_33 (18) = happyGoto action_19
action_33 _ = happyReduce_10
action_34 (55) = happyShift action_35
action_34 _ = happyFail
action_35 (56) = happyShift action_40
action_35 _ = happyFail
action_36 (54) = happyShift action_39
action_36 _ = happyFail
action_37 (22) = happyShift action_6
action_37 (59) = happyShift action_7
action_37 (18) = happyGoto action_38
action_37 _ = happyFail
action_38 (50) = happyShift action_44
action_38 (10) = happyGoto action_43
action_38 _ = happyReduce_12
action_39 (57) = happyShift action_42
action_39 _ = happyFail
action_40 (22) = happyShift action_6
action_40 (59) = happyShift action_7
action_40 (18) = happyGoto action_41
action_40 _ = happyFail
action_41 (54) = happyShift action_47
action_41 _ = happyFail
action_42 (7) = happyGoto action_46
action_42 _ = happyReduce_5
action_43 _ = happyReduce_9
action_44 (22) = happyShift action_6
action_44 (33) = happyShift action_21
action_44 (34) = happyShift action_22
action_44 (59) = happyShift action_7
action_44 (11) = happyGoto action_45
action_44 (18) = happyGoto action_19
action_44 _ = happyFail
action_45 (22) = happyShift action_6
action_45 (59) = happyShift action_7
action_45 (18) = happyGoto action_56
action_45 _ = happyFail
action_46 (22) = happyShift action_6
action_46 (25) = happyShift action_52
action_46 (27) = happyShift action_53
action_46 (28) = happyShift action_54
action_46 (33) = happyShift action_21
action_46 (34) = happyShift action_22
action_46 (57) = happyShift action_55
action_46 (59) = happyShift action_7
action_46 (11) = happyGoto action_18
action_46 (12) = happyGoto action_49
action_46 (13) = happyGoto action_50
action_46 (18) = happyGoto action_51
action_46 _ = happyReduce_25
action_47 (57) = happyShift action_48
action_47 _ = happyFail
action_48 (7) = happyGoto action_67
action_48 _ = happyReduce_5
action_49 (22) = happyShift action_6
action_49 (25) = happyShift action_52
action_49 (27) = happyShift action_53
action_49 (28) = happyShift action_54
action_49 (57) = happyShift action_55
action_49 (59) = happyShift action_7
action_49 (12) = happyGoto action_49
action_49 (13) = happyGoto action_66
action_49 (18) = happyGoto action_59
action_49 _ = happyReduce_25
action_50 (24) = happyShift action_65
action_50 _ = happyFail
action_51 (49) = happyShift action_63
action_51 (55) = happyShift action_64
action_51 _ = happyReduce_16
action_52 (53) = happyShift action_62
action_52 _ = happyFail
action_53 (53) = happyShift action_61
action_53 _ = happyFail
action_54 (53) = happyShift action_60
action_54 _ = happyFail
action_55 (22) = happyShift action_6
action_55 (25) = happyShift action_52
action_55 (27) = happyShift action_53
action_55 (28) = happyShift action_54
action_55 (57) = happyShift action_55
action_55 (59) = happyShift action_7
action_55 (12) = happyGoto action_49
action_55 (13) = happyGoto action_58
action_55 (18) = happyGoto action_59
action_55 _ = happyReduce_25
action_56 (50) = happyShift action_44
action_56 (10) = happyGoto action_57
action_56 _ = happyReduce_12
action_57 _ = happyReduce_11
action_58 (58) = happyShift action_84
action_58 _ = happyFail
action_59 (49) = happyShift action_63
action_59 (55) = happyShift action_64
action_59 _ = happyFail
action_60 (22) = happyShift action_6
action_60 (23) = happyShift action_72
action_60 (30) = happyShift action_73
action_60 (35) = happyShift action_74
action_60 (36) = happyShift action_75
action_60 (37) = happyShift action_76
action_60 (53) = happyShift action_77
action_60 (59) = happyShift action_7
action_60 (60) = happyShift action_78
action_60 (14) = happyGoto action_83
action_60 (15) = happyGoto action_70
action_60 (18) = happyGoto action_71
action_60 _ = happyFail
action_61 (22) = happyShift action_6
action_61 (23) = happyShift action_72
action_61 (30) = happyShift action_73
action_61 (35) = happyShift action_74
action_61 (36) = happyShift action_75
action_61 (37) = happyShift action_76
action_61 (53) = happyShift action_77
action_61 (59) = happyShift action_7
action_61 (60) = happyShift action_78
action_61 (14) = happyGoto action_82
action_61 (15) = happyGoto action_70
action_61 (18) = happyGoto action_71
action_61 _ = happyFail
action_62 (22) = happyShift action_6
action_62 (23) = happyShift action_72
action_62 (30) = happyShift action_73
action_62 (35) = happyShift action_74
action_62 (36) = happyShift action_75
action_62 (37) = happyShift action_76
action_62 (53) = happyShift action_77
action_62 (59) = happyShift action_7
action_62 (60) = happyShift action_78
action_62 (14) = happyGoto action_81
action_62 (15) = happyGoto action_70
action_62 (18) = happyGoto action_71
action_62 _ = happyFail
action_63 (22) = happyShift action_6
action_63 (23) = happyShift action_72
action_63 (30) = happyShift action_73
action_63 (35) = happyShift action_74
action_63 (36) = happyShift action_75
action_63 (37) = happyShift action_76
action_63 (53) = happyShift action_77
action_63 (59) = happyShift action_7
action_63 (60) = happyShift action_78
action_63 (14) = happyGoto action_80
action_63 (15) = happyGoto action_70
action_63 (18) = happyGoto action_71
action_63 _ = happyFail
action_64 (22) = happyShift action_6
action_64 (23) = happyShift action_72
action_64 (30) = happyShift action_73
action_64 (35) = happyShift action_74
action_64 (36) = happyShift action_75
action_64 (37) = happyShift action_76
action_64 (53) = happyShift action_77
action_64 (59) = happyShift action_7
action_64 (60) = happyShift action_78
action_64 (14) = happyGoto action_79
action_64 (15) = happyGoto action_70
action_64 (18) = happyGoto action_71
action_64 _ = happyFail
action_65 (22) = happyShift action_6
action_65 (23) = happyShift action_72
action_65 (30) = happyShift action_73
action_65 (35) = happyShift action_74
action_65 (36) = happyShift action_75
action_65 (37) = happyShift action_76
action_65 (53) = happyShift action_77
action_65 (59) = happyShift action_7
action_65 (60) = happyShift action_78
action_65 (14) = happyGoto action_69
action_65 (15) = happyGoto action_70
action_65 (18) = happyGoto action_71
action_65 _ = happyFail
action_66 _ = happyReduce_24
action_67 (22) = happyShift action_6
action_67 (25) = happyShift action_52
action_67 (27) = happyShift action_53
action_67 (28) = happyShift action_54
action_67 (33) = happyShift action_21
action_67 (34) = happyShift action_22
action_67 (57) = happyShift action_55
action_67 (59) = happyShift action_7
action_67 (11) = happyGoto action_18
action_67 (12) = happyGoto action_49
action_67 (13) = happyGoto action_68
action_67 (18) = happyGoto action_51
action_67 _ = happyReduce_25
action_68 (58) = happyShift action_108
action_68 _ = happyFail
action_69 (38) = happyShift action_85
action_69 (39) = happyShift action_86
action_69 (40) = happyShift action_87
action_69 (41) = happyShift action_88
action_69 (42) = happyShift action_89
action_69 (43) = happyShift action_90
action_69 (44) = happyShift action_91
action_69 (45) = happyShift action_92
action_69 (46) = happyShift action_93
action_69 (47) = happyShift action_94
action_69 (48) = happyShift action_95
action_69 (52) = happyShift action_107
action_69 _ = happyFail
action_70 (51) = happyShift action_105
action_70 (55) = happyShift action_106
action_70 _ = happyReduce_41
action_71 _ = happyReduce_43
action_72 (22) = happyShift action_6
action_72 (33) = happyShift action_104
action_72 (59) = happyShift action_7
action_72 (18) = happyGoto action_103
action_72 _ = happyFail
action_73 _ = happyReduce_44
action_74 _ = happyReduce_38
action_75 _ = happyReduce_39
action_76 (22) = happyShift action_6
action_76 (23) = happyShift action_72
action_76 (30) = happyShift action_73
action_76 (35) = happyShift action_74
action_76 (36) = happyShift action_75
action_76 (37) = happyShift action_76
action_76 (53) = happyShift action_77
action_76 (59) = happyShift action_7
action_76 (60) = happyShift action_78
action_76 (14) = happyGoto action_102
action_76 (15) = happyGoto action_70
action_76 (18) = happyGoto action_71
action_76 _ = happyFail
action_77 (22) = happyShift action_6
action_77 (23) = happyShift action_72
action_77 (30) = happyShift action_73
action_77 (35) = happyShift action_74
action_77 (36) = happyShift action_75
action_77 (37) = happyShift action_76
action_77 (53) = happyShift action_77
action_77 (59) = happyShift action_7
action_77 (60) = happyShift action_78
action_77 (14) = happyGoto action_101
action_77 (15) = happyGoto action_70
action_77 (18) = happyGoto action_71
action_77 _ = happyFail
action_78 _ = happyReduce_37
action_79 (38) = happyShift action_85
action_79 (39) = happyShift action_86
action_79 (40) = happyShift action_87
action_79 (41) = happyShift action_88
action_79 (42) = happyShift action_89
action_79 (43) = happyShift action_90
action_79 (44) = happyShift action_91
action_79 (45) = happyShift action_92
action_79 (46) = happyShift action_93
action_79 (47) = happyShift action_94
action_79 (48) = happyShift action_95
action_79 (56) = happyShift action_100
action_79 _ = happyFail
action_80 (38) = happyShift action_85
action_80 (39) = happyShift action_86
action_80 (40) = happyShift action_87
action_80 (41) = happyShift action_88
action_80 (42) = happyShift action_89
action_80 (43) = happyShift action_90
action_80 (44) = happyShift action_91
action_80 (45) = happyShift action_92
action_80 (46) = happyShift action_93
action_80 (47) = happyShift action_94
action_80 (48) = happyShift action_95
action_80 (52) = happyShift action_99
action_80 _ = happyFail
action_81 (38) = happyShift action_85
action_81 (39) = happyShift action_86
action_81 (40) = happyShift action_87
action_81 (41) = happyShift action_88
action_81 (42) = happyShift action_89
action_81 (43) = happyShift action_90
action_81 (44) = happyShift action_91
action_81 (45) = happyShift action_92
action_81 (46) = happyShift action_93
action_81 (47) = happyShift action_94
action_81 (48) = happyShift action_95
action_81 (54) = happyShift action_98
action_81 _ = happyFail
action_82 (38) = happyShift action_85
action_82 (39) = happyShift action_86
action_82 (40) = happyShift action_87
action_82 (41) = happyShift action_88
action_82 (42) = happyShift action_89
action_82 (43) = happyShift action_90
action_82 (44) = happyShift action_91
action_82 (45) = happyShift action_92
action_82 (46) = happyShift action_93
action_82 (47) = happyShift action_94
action_82 (48) = happyShift action_95
action_82 (54) = happyShift action_97
action_82 _ = happyFail
action_83 (38) = happyShift action_85
action_83 (39) = happyShift action_86
action_83 (40) = happyShift action_87
action_83 (41) = happyShift action_88
action_83 (42) = happyShift action_89
action_83 (43) = happyShift action_90
action_83 (44) = happyShift action_91
action_83 (45) = happyShift action_92
action_83 (46) = happyShift action_93
action_83 (47) = happyShift action_94
action_83 (48) = happyShift action_95
action_83 (54) = happyShift action_96
action_83 _ = happyFail
action_84 _ = happyReduce_17
action_85 (22) = happyShift action_6
action_85 (23) = happyShift action_72
action_85 (30) = happyShift action_73
action_85 (35) = happyShift action_74
action_85 (36) = happyShift action_75
action_85 (37) = happyShift action_76
action_85 (53) = happyShift action_77
action_85 (59) = happyShift action_7
action_85 (60) = happyShift action_78
action_85 (14) = happyGoto action_131
action_85 (15) = happyGoto action_70
action_85 (18) = happyGoto action_71
action_85 _ = happyFail
action_86 (22) = happyShift action_6
action_86 (23) = happyShift action_72
action_86 (30) = happyShift action_73
action_86 (35) = happyShift action_74
action_86 (36) = happyShift action_75
action_86 (37) = happyShift action_76
action_86 (53) = happyShift action_77
action_86 (59) = happyShift action_7
action_86 (60) = happyShift action_78
action_86 (14) = happyGoto action_130
action_86 (15) = happyGoto action_70
action_86 (18) = happyGoto action_71
action_86 _ = happyFail
action_87 (22) = happyShift action_6
action_87 (23) = happyShift action_72
action_87 (30) = happyShift action_73
action_87 (35) = happyShift action_74
action_87 (36) = happyShift action_75
action_87 (37) = happyShift action_76
action_87 (53) = happyShift action_77
action_87 (59) = happyShift action_7
action_87 (60) = happyShift action_78
action_87 (14) = happyGoto action_129
action_87 (15) = happyGoto action_70
action_87 (18) = happyGoto action_71
action_87 _ = happyFail
action_88 (22) = happyShift action_6
action_88 (23) = happyShift action_72
action_88 (30) = happyShift action_73
action_88 (35) = happyShift action_74
action_88 (36) = happyShift action_75
action_88 (37) = happyShift action_76
action_88 (53) = happyShift action_77
action_88 (59) = happyShift action_7
action_88 (60) = happyShift action_78
action_88 (14) = happyGoto action_128
action_88 (15) = happyGoto action_70
action_88 (18) = happyGoto action_71
action_88 _ = happyFail
action_89 (22) = happyShift action_6
action_89 (23) = happyShift action_72
action_89 (30) = happyShift action_73
action_89 (35) = happyShift action_74
action_89 (36) = happyShift action_75
action_89 (37) = happyShift action_76
action_89 (53) = happyShift action_77
action_89 (59) = happyShift action_7
action_89 (60) = happyShift action_78
action_89 (14) = happyGoto action_127
action_89 (15) = happyGoto action_70
action_89 (18) = happyGoto action_71
action_89 _ = happyFail
action_90 (22) = happyShift action_6
action_90 (23) = happyShift action_72
action_90 (30) = happyShift action_73
action_90 (35) = happyShift action_74
action_90 (36) = happyShift action_75
action_90 (37) = happyShift action_76
action_90 (53) = happyShift action_77
action_90 (59) = happyShift action_7
action_90 (60) = happyShift action_78
action_90 (14) = happyGoto action_126
action_90 (15) = happyGoto action_70
action_90 (18) = happyGoto action_71
action_90 _ = happyFail
action_91 (22) = happyShift action_6
action_91 (23) = happyShift action_72
action_91 (30) = happyShift action_73
action_91 (35) = happyShift action_74
action_91 (36) = happyShift action_75
action_91 (37) = happyShift action_76
action_91 (53) = happyShift action_77
action_91 (59) = happyShift action_7
action_91 (60) = happyShift action_78
action_91 (14) = happyGoto action_125
action_91 (15) = happyGoto action_70
action_91 (18) = happyGoto action_71
action_91 _ = happyFail
action_92 (22) = happyShift action_6
action_92 (23) = happyShift action_72
action_92 (30) = happyShift action_73
action_92 (35) = happyShift action_74
action_92 (36) = happyShift action_75
action_92 (37) = happyShift action_76
action_92 (53) = happyShift action_77
action_92 (59) = happyShift action_7
action_92 (60) = happyShift action_78
action_92 (14) = happyGoto action_124
action_92 (15) = happyGoto action_70
action_92 (18) = happyGoto action_71
action_92 _ = happyFail
action_93 (22) = happyShift action_6
action_93 (23) = happyShift action_72
action_93 (30) = happyShift action_73
action_93 (35) = happyShift action_74
action_93 (36) = happyShift action_75
action_93 (37) = happyShift action_76
action_93 (53) = happyShift action_77
action_93 (59) = happyShift action_7
action_93 (60) = happyShift action_78
action_93 (14) = happyGoto action_123
action_93 (15) = happyGoto action_70
action_93 (18) = happyGoto action_71
action_93 _ = happyFail
action_94 (22) = happyShift action_6
action_94 (23) = happyShift action_72
action_94 (30) = happyShift action_73
action_94 (35) = happyShift action_74
action_94 (36) = happyShift action_75
action_94 (37) = happyShift action_76
action_94 (53) = happyShift action_77
action_94 (59) = happyShift action_7
action_94 (60) = happyShift action_78
action_94 (14) = happyGoto action_122
action_94 (15) = happyGoto action_70
action_94 (18) = happyGoto action_71
action_94 _ = happyFail
action_95 (22) = happyShift action_6
action_95 (23) = happyShift action_72
action_95 (30) = happyShift action_73
action_95 (35) = happyShift action_74
action_95 (36) = happyShift action_75
action_95 (37) = happyShift action_76
action_95 (53) = happyShift action_77
action_95 (59) = happyShift action_7
action_95 (60) = happyShift action_78
action_95 (14) = happyGoto action_121
action_95 (15) = happyGoto action_70
action_95 (18) = happyGoto action_71
action_95 _ = happyFail
action_96 (52) = happyShift action_120
action_96 _ = happyFail
action_97 (22) = happyShift action_6
action_97 (25) = happyShift action_52
action_97 (27) = happyShift action_53
action_97 (28) = happyShift action_54
action_97 (57) = happyShift action_55
action_97 (59) = happyShift action_7
action_97 (12) = happyGoto action_119
action_97 (18) = happyGoto action_59
action_97 _ = happyFail
action_98 (22) = happyShift action_6
action_98 (25) = happyShift action_52
action_98 (27) = happyShift action_53
action_98 (28) = happyShift action_54
action_98 (57) = happyShift action_55
action_98 (59) = happyShift action_7
action_98 (12) = happyGoto action_118
action_98 (18) = happyGoto action_59
action_98 _ = happyFail
action_99 _ = happyReduce_22
action_100 (49) = happyShift action_117
action_100 _ = happyFail
action_101 (38) = happyShift action_85
action_101 (39) = happyShift action_86
action_101 (40) = happyShift action_87
action_101 (41) = happyShift action_88
action_101 (42) = happyShift action_89
action_101 (43) = happyShift action_90
action_101 (44) = happyShift action_91
action_101 (45) = happyShift action_92
action_101 (46) = happyShift action_93
action_101 (47) = happyShift action_94
action_101 (48) = happyShift action_95
action_101 (54) = happyShift action_116
action_101 _ = happyFail
action_102 _ = happyReduce_40
action_103 (53) = happyShift action_115
action_103 _ = happyFail
action_104 (55) = happyShift action_114
action_104 _ = happyFail
action_105 (22) = happyShift action_6
action_105 (29) = happyShift action_113
action_105 (59) = happyShift action_7
action_105 (18) = happyGoto action_112
action_105 _ = happyFail
action_106 (22) = happyShift action_6
action_106 (23) = happyShift action_72
action_106 (30) = happyShift action_73
action_106 (35) = happyShift action_74
action_106 (36) = happyShift action_75
action_106 (37) = happyShift action_76
action_106 (53) = happyShift action_77
action_106 (59) = happyShift action_7
action_106 (60) = happyShift action_78
action_106 (14) = happyGoto action_111
action_106 (15) = happyGoto action_70
action_106 (18) = happyGoto action_71
action_106 _ = happyFail
action_107 (58) = happyShift action_110
action_107 _ = happyFail
action_108 (58) = happyShift action_109
action_108 _ = happyFail
action_109 _ = happyReduce_2
action_110 (20) = happyShift action_20
action_110 (8) = happyGoto action_138
action_110 _ = happyReduce_8
action_111 (38) = happyShift action_85
action_111 (39) = happyShift action_86
action_111 (40) = happyShift action_87
action_111 (41) = happyShift action_88
action_111 (42) = happyShift action_89
action_111 (43) = happyShift action_90
action_111 (44) = happyShift action_91
action_111 (45) = happyShift action_92
action_111 (46) = happyShift action_93
action_111 (47) = happyShift action_94
action_111 (48) = happyShift action_95
action_111 (56) = happyShift action_137
action_111 _ = happyFail
action_112 (53) = happyShift action_136
action_112 _ = happyFail
action_113 _ = happyReduce_48
action_114 (22) = happyShift action_6
action_114 (23) = happyShift action_72
action_114 (30) = happyShift action_73
action_114 (35) = happyShift action_74
action_114 (36) = happyShift action_75
action_114 (37) = happyShift action_76
action_114 (53) = happyShift action_77
action_114 (59) = happyShift action_7
action_114 (60) = happyShift action_78
action_114 (14) = happyGoto action_135
action_114 (15) = happyGoto action_70
action_114 (18) = happyGoto action_71
action_114 _ = happyFail
action_115 (54) = happyShift action_134
action_115 _ = happyFail
action_116 _ = happyReduce_42
action_117 (22) = happyShift action_6
action_117 (23) = happyShift action_72
action_117 (30) = happyShift action_73
action_117 (35) = happyShift action_74
action_117 (36) = happyShift action_75
action_117 (37) = happyShift action_76
action_117 (53) = happyShift action_77
action_117 (59) = happyShift action_7
action_117 (60) = happyShift action_78
action_117 (14) = happyGoto action_133
action_117 (15) = happyGoto action_70
action_117 (18) = happyGoto action_71
action_117 _ = happyFail
action_118 (26) = happyShift action_132
action_118 _ = happyReduce_19
action_119 _ = happyReduce_20
action_120 _ = happyReduce_21
action_121 _ = happyReduce_30
action_122 (48) = happyShift action_95
action_122 _ = happyReduce_29
action_123 (48) = happyShift action_95
action_123 _ = happyReduce_28
action_124 (46) = happyShift action_93
action_124 (47) = happyShift action_94
action_124 (48) = happyShift action_95
action_124 _ = happyReduce_34
action_125 (45) = happyShift action_92
action_125 (46) = happyShift action_93
action_125 (47) = happyShift action_94
action_125 (48) = happyShift action_95
action_125 _ = happyReduce_33
action_126 (43) = happyFail
action_126 (44) = happyShift action_91
action_126 (45) = happyShift action_92
action_126 (46) = happyShift action_93
action_126 (47) = happyShift action_94
action_126 (48) = happyShift action_95
action_126 _ = happyReduce_36
action_127 (42) = happyFail
action_127 (43) = happyShift action_90
action_127 (44) = happyShift action_91
action_127 (45) = happyShift action_92
action_127 (46) = happyShift action_93
action_127 (47) = happyShift action_94
action_127 (48) = happyShift action_95
action_127 _ = happyReduce_35
action_128 (41) = happyFail
action_128 (42) = happyShift action_89
action_128 (43) = happyShift action_90
action_128 (44) = happyShift action_91
action_128 (45) = happyShift action_92
action_128 (46) = happyShift action_93
action_128 (47) = happyShift action_94
action_128 (48) = happyShift action_95
action_128 _ = happyReduce_32
action_129 (40) = happyFail
action_129 (41) = happyShift action_88
action_129 (42) = happyShift action_89
action_129 (43) = happyShift action_90
action_129 (44) = happyShift action_91
action_129 (45) = happyShift action_92
action_129 (46) = happyShift action_93
action_129 (47) = happyShift action_94
action_129 (48) = happyShift action_95
action_129 _ = happyReduce_31
action_130 (38) = happyShift action_85
action_130 (40) = happyShift action_87
action_130 (41) = happyShift action_88
action_130 (42) = happyShift action_89
action_130 (43) = happyShift action_90
action_130 (44) = happyShift action_91
action_130 (45) = happyShift action_92
action_130 (46) = happyShift action_93
action_130 (47) = happyShift action_94
action_130 (48) = happyShift action_95
action_130 _ = happyReduce_27
action_131 (40) = happyShift action_87
action_131 (41) = happyShift action_88
action_131 (42) = happyShift action_89
action_131 (43) = happyShift action_90
action_131 (44) = happyShift action_91
action_131 (45) = happyShift action_92
action_131 (46) = happyShift action_93
action_131 (47) = happyShift action_94
action_131 (48) = happyShift action_95
action_131 _ = happyReduce_26
action_132 (22) = happyShift action_6
action_132 (25) = happyShift action_52
action_132 (27) = happyShift action_53
action_132 (28) = happyShift action_54
action_132 (57) = happyShift action_55
action_132 (59) = happyShift action_7
action_132 (12) = happyGoto action_143
action_132 (18) = happyGoto action_59
action_132 _ = happyFail
action_133 (38) = happyShift action_85
action_133 (39) = happyShift action_86
action_133 (40) = happyShift action_87
action_133 (41) = happyShift action_88
action_133 (42) = happyShift action_89
action_133 (43) = happyShift action_90
action_133 (44) = happyShift action_91
action_133 (45) = happyShift action_92
action_133 (46) = happyShift action_93
action_133 (47) = happyShift action_94
action_133 (48) = happyShift action_95
action_133 (52) = happyShift action_142
action_133 _ = happyFail
action_134 _ = happyReduce_46
action_135 (38) = happyShift action_85
action_135 (39) = happyShift action_86
action_135 (40) = happyShift action_87
action_135 (41) = happyShift action_88
action_135 (42) = happyShift action_89
action_135 (43) = happyShift action_90
action_135 (44) = happyShift action_91
action_135 (45) = happyShift action_92
action_135 (46) = happyShift action_93
action_135 (47) = happyShift action_94
action_135 (48) = happyShift action_95
action_135 (56) = happyShift action_141
action_135 _ = happyFail
action_136 (22) = happyShift action_6
action_136 (23) = happyShift action_72
action_136 (30) = happyShift action_73
action_136 (35) = happyShift action_74
action_136 (36) = happyShift action_75
action_136 (37) = happyShift action_76
action_136 (53) = happyShift action_77
action_136 (59) = happyShift action_7
action_136 (60) = happyShift action_78
action_136 (14) = happyGoto action_139
action_136 (15) = happyGoto action_70
action_136 (16) = happyGoto action_140
action_136 (18) = happyGoto action_71
action_136 _ = happyReduce_51
action_137 _ = happyReduce_47
action_138 _ = happyReduce_7
action_139 (38) = happyShift action_85
action_139 (39) = happyShift action_86
action_139 (40) = happyShift action_87
action_139 (41) = happyShift action_88
action_139 (42) = happyShift action_89
action_139 (43) = happyShift action_90
action_139 (44) = happyShift action_91
action_139 (45) = happyShift action_92
action_139 (46) = happyShift action_93
action_139 (47) = happyShift action_94
action_139 (48) = happyShift action_95
action_139 (50) = happyShift action_146
action_139 (17) = happyGoto action_145
action_139 _ = happyReduce_53
action_140 (54) = happyShift action_144
action_140 _ = happyFail
action_141 _ = happyReduce_45
action_142 _ = happyReduce_23
action_143 _ = happyReduce_18
action_144 _ = happyReduce_49
action_145 _ = happyReduce_50
action_146 (22) = happyShift action_6
action_146 (23) = happyShift action_72
action_146 (30) = happyShift action_73
action_146 (35) = happyShift action_74
action_146 (36) = happyShift action_75
action_146 (37) = happyShift action_76
action_146 (53) = happyShift action_77
action_146 (59) = happyShift action_7
action_146 (60) = happyShift action_78
action_146 (14) = happyGoto action_147
action_146 (15) = happyGoto action_70
action_146 (18) = happyGoto action_71
action_146 _ = happyFail
action_147 (38) = happyShift action_85
action_147 (39) = happyShift action_86
action_147 (40) = happyShift action_87
action_147 (41) = happyShift action_88
action_147 (42) = happyShift action_89
action_147 (43) = happyShift action_90
action_147 (44) = happyShift action_91
action_147 (45) = happyShift action_92
action_147 (46) = happyShift action_93
action_147 (47) = happyShift action_94
action_147 (48) = happyShift action_95
action_147 (50) = happyShift action_146
action_147 (17) = happyGoto action_148
action_147 _ = happyReduce_53
action_148 _ = happyReduce_52
happyReduce_1 = happySpecReduce_2 4 happyReduction_1
happyReduction_1 (HappyAbsSyn6 happy_var_2)
(HappyAbsSyn5 happy_var_1)
= HappyAbsSyn4
(Fix . AProgram $ fixMap (happy_var_1 : happy_var_2)
)
happyReduction_1 _ _ = notHappyAtAll
happyReduce_2 = happyReduce 18 5 happyReduction_2
happyReduction_2 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn13 happy_var_16) `HappyStk`
(HappyAbsSyn7 happy_var_15) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_12) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn5
(AClass happy_var_2 [] [Fix $ mainMethod happy_var_12 happy_var_15 happy_var_16]
) `HappyStk` happyRest
happyReduce_3 = happyReduce 7 6 happyReduction_3
happyReduction_3 ((HappyAbsSyn6 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn8 happy_var_5) `HappyStk`
(HappyAbsSyn7 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(AClass happy_var_2 (fixMap happy_var_4) (fixMap happy_var_5) : happy_var_7
) `HappyStk` happyRest
happyReduce_4 = happySpecReduce_0 6 happyReduction_4
happyReduction_4 = HappyAbsSyn6
([]
)
happyReduce_5 = happySpecReduce_0 7 happyReduction_5
happyReduction_5 = HappyAbsSyn7
([]
)
happyReduce_6 = happyReduce 4 7 happyReduction_6
happyReduction_6 (_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
(HappyAbsSyn7 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn7
(happy_var_1 ++ [AVar happy_var_2 happy_var_3]
) `HappyStk` happyRest
happyReduce_7 = happyReduce 14 8 happyReduction_7
happyReduction_7 ((HappyAbsSyn8 happy_var_14) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_11) `HappyStk`
_ `HappyStk`
(HappyAbsSyn13 happy_var_9) `HappyStk`
(HappyAbsSyn7 happy_var_8) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn8
(AMethod happy_var_2 happy_var_3 (fixMap happy_var_5) (fixMap happy_var_8) (fixMap happy_var_9) (Fix happy_var_11) : happy_var_14
) `HappyStk` happyRest
happyReduce_8 = happySpecReduce_0 8 happyReduction_8
happyReduction_8 = HappyAbsSyn8
([]
)
happyReduce_9 = happySpecReduce_3 9 happyReduction_9
happyReduction_9 (HappyAbsSyn10 happy_var_3)
(HappyAbsSyn18 happy_var_2)
(HappyAbsSyn11 happy_var_1)
= HappyAbsSyn9
(AVar happy_var_1 happy_var_2 : happy_var_3
)
happyReduction_9 _ _ _ = notHappyAtAll
happyReduce_10 = happySpecReduce_0 9 happyReduction_10
happyReduction_10 = HappyAbsSyn9
([]
)
happyReduce_11 = happyReduce 4 10 happyReduction_11
happyReduction_11 ((HappyAbsSyn10 happy_var_4) `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
(HappyAbsSyn11 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn10
(AVar happy_var_2 happy_var_3 : happy_var_4
) `HappyStk` happyRest
happyReduce_12 = happySpecReduce_0 10 happyReduction_12
happyReduction_12 = HappyAbsSyn10
([]
)
happyReduce_13 = happySpecReduce_3 11 happyReduction_13
happyReduction_13 _
_
_
= HappyAbsSyn11
(TypeIntegerArray
)
happyReduce_14 = happySpecReduce_1 11 happyReduction_14
happyReduction_14 _
= HappyAbsSyn11
(TypeBoolean
)
happyReduce_15 = happySpecReduce_1 11 happyReduction_15
happyReduction_15 _
= HappyAbsSyn11
(TypeInteger
)
happyReduce_16 = happySpecReduce_1 11 happyReduction_16
happyReduction_16 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn11
(TypeAppDefined happy_var_1
)
happyReduction_16 _ = notHappyAtAll
happyReduce_17 = happySpecReduce_3 12 happyReduction_17
happyReduction_17 _
(HappyAbsSyn13 happy_var_2)
_
= HappyAbsSyn12
(AStatScope (fixMap happy_var_2)
)
happyReduction_17 _ _ _ = notHappyAtAll
happyReduce_18 = happyReduce 7 12 happyReduction_18
happyReduction_18 ((HappyAbsSyn12 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AIf (Fix happy_var_3) (Fix happy_var_5) (Fix happy_var_7)
) `HappyStk` happyRest
happyReduce_19 = happyReduce 5 12 happyReduction_19
happyReduction_19 ((HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AIf (Fix happy_var_3) (Fix happy_var_5) (Fix $ AStatScope [])
) `HappyStk` happyRest
happyReduce_20 = happyReduce 5 12 happyReduction_20
happyReduction_20 ((HappyAbsSyn12 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(AWhile (Fix happy_var_3) (Fix happy_var_5)
) `HappyStk` happyRest
happyReduce_21 = happyReduce 5 12 happyReduction_21
happyReduction_21 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn12
(APrint (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_22 = happyReduce 4 12 happyReduction_22
happyReduction_22 (_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn12
(AAssignment (Fix $ AExprIdentifier happy_var_1) (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_23 = happyReduce 7 12 happyReduction_23
happyReduction_23 (_ `HappyStk`
(HappyAbsSyn14 happy_var_6) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn12
(AIndexedAssignment (Fix $ AExprIdentifier happy_var_1) (Fix happy_var_3) (Fix happy_var_6)
) `HappyStk` happyRest
happyReduce_24 = happySpecReduce_2 13 happyReduction_24
happyReduction_24 (HappyAbsSyn13 happy_var_2)
(HappyAbsSyn12 happy_var_1)
= HappyAbsSyn13
(happy_var_1 : happy_var_2
)
happyReduction_24 _ _ = notHappyAtAll
happyReduce_25 = happySpecReduce_0 13 happyReduction_25
happyReduction_25 = HappyAbsSyn13
([]
)
happyReduce_26 = happySpecReduce_3 14 happyReduction_26
happyReduction_26 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLogicalAnd (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_26 _ _ _ = notHappyAtAll
happyReduce_27 = happySpecReduce_3 14 happyReduction_27
happyReduction_27 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLogicalOr (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_27 _ _ _ = notHappyAtAll
happyReduce_28 = happySpecReduce_3 14 happyReduction_28
happyReduction_28 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandPlus (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_28 _ _ _ = notHappyAtAll
happyReduce_29 = happySpecReduce_3 14 happyReduction_29
happyReduction_29 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandMinus (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_29 _ _ _ = notHappyAtAll
happyReduce_30 = happySpecReduce_3 14 happyReduction_30
happyReduction_30 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandMult (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_30 _ _ _ = notHappyAtAll
happyReduce_31 = happySpecReduce_3 14 happyReduction_31
happyReduction_31 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLess (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_31 _ _ _ = notHappyAtAll
happyReduce_32 = happySpecReduce_3 14 happyReduction_32
happyReduction_32 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLess (Fix happy_var_3) (Fix happy_var_1)
)
happyReduction_32 _ _ _ = notHappyAtAll
happyReduce_33 = happySpecReduce_3 14 happyReduction_33
happyReduction_33 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandEqual (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_33 _ _ _ = notHappyAtAll
happyReduce_34 = happySpecReduce_3 14 happyReduction_34
happyReduction_34 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprNegation (Fix $ AExprOp OperandEqual (Fix happy_var_1) (Fix happy_var_3))
)
happyReduction_34 _ _ _ = notHappyAtAll
happyReduce_35 = happySpecReduce_3 14 happyReduction_35
happyReduction_35 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLessEqual (Fix happy_var_1) (Fix happy_var_3)
)
happyReduction_35 _ _ _ = notHappyAtAll
happyReduce_36 = happySpecReduce_3 14 happyReduction_36
happyReduction_36 (HappyAbsSyn14 happy_var_3)
_
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn14
(AExprOp OperandLessEqual (Fix happy_var_3) (Fix happy_var_1)
)
happyReduction_36 _ _ _ = notHappyAtAll
happyReduce_37 = happySpecReduce_1 14 happyReduction_37
happyReduction_37 (HappyTerminal (TIntLiteral happy_var_1))
= HappyAbsSyn14
(AExprInt happy_var_1
)
happyReduction_37 _ = notHappyAtAll
happyReduce_38 = happySpecReduce_1 14 happyReduction_38
happyReduction_38 _
= HappyAbsSyn14
(AExprTrue
)
happyReduce_39 = happySpecReduce_1 14 happyReduction_39
happyReduction_39 _
= HappyAbsSyn14
(AExprFalse
)
happyReduce_40 = happySpecReduce_2 14 happyReduction_40
happyReduction_40 (HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn14
(AExprNegation (Fix happy_var_2)
)
happyReduction_40 _ _ = notHappyAtAll
happyReduce_41 = happySpecReduce_1 14 happyReduction_41
happyReduction_41 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn14
(happy_var_1
)
happyReduction_41 _ = notHappyAtAll
happyReduce_42 = happySpecReduce_3 15 happyReduction_42
happyReduction_42 _
(HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn15
(happy_var_2
)
happyReduction_42 _ _ _ = notHappyAtAll
happyReduce_43 = happySpecReduce_1 15 happyReduction_43
happyReduction_43 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn15
(AExprIdentifier happy_var_1
)
happyReduction_43 _ = notHappyAtAll
happyReduce_44 = happySpecReduce_1 15 happyReduction_44
happyReduction_44 _
= HappyAbsSyn15
(AExprThis
)
happyReduce_45 = happyReduce 5 15 happyReduction_45
happyReduction_45 (_ `HappyStk`
(HappyAbsSyn14 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprIntArray (Fix happy_var_4)
) `HappyStk` happyRest
happyReduce_46 = happyReduce 4 15 happyReduction_46
happyReduction_46 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprNewObject happy_var_2
) `HappyStk` happyRest
happyReduce_47 = happyReduce 4 15 happyReduction_47
happyReduction_47 (_ `HappyStk`
(HappyAbsSyn14 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn15 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprList (Fix happy_var_1) (Fix happy_var_3)
) `HappyStk` happyRest
happyReduce_48 = happySpecReduce_3 15 happyReduction_48
happyReduction_48 _
_
(HappyAbsSyn15 happy_var_1)
= HappyAbsSyn15
(AExprLength (Fix happy_var_1)
)
happyReduction_48 _ _ _ = notHappyAtAll
happyReduce_49 = happyReduce 6 15 happyReduction_49
happyReduction_49 (_ `HappyStk`
(HappyAbsSyn16 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn15 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn15
(AExprInvocation (Fix happy_var_1) happy_var_3 (fixMap happy_var_5)
) `HappyStk` happyRest
happyReduce_50 = happySpecReduce_2 16 happyReduction_50
happyReduction_50 (HappyAbsSyn17 happy_var_2)
(HappyAbsSyn14 happy_var_1)
= HappyAbsSyn16
(happy_var_1 : happy_var_2
)
happyReduction_50 _ _ = notHappyAtAll
happyReduce_51 = happySpecReduce_0 16 happyReduction_51
happyReduction_51 = HappyAbsSyn16
([]
)
happyReduce_52 = happySpecReduce_3 17 happyReduction_52
happyReduction_52 (HappyAbsSyn17 happy_var_3)
(HappyAbsSyn14 happy_var_2)
_
= HappyAbsSyn17
(happy_var_2 : happy_var_3
)
happyReduction_52 _ _ _ = notHappyAtAll
happyReduce_53 = happySpecReduce_0 17 happyReduction_53
happyReduction_53 = HappyAbsSyn17
([]
)
happyReduce_54 = happySpecReduce_1 18 happyReduction_54
happyReduction_54 (HappyTerminal (TIdLiteral happy_var_1))
= HappyAbsSyn18
(happy_var_1
)
happyReduction_54 _ = notHappyAtAll
happyReduce_55 = happySpecReduce_1 18 happyReduction_55
happyReduction_55 _
= HappyAbsSyn18
("main"
)
happyNewToken action sts stk [] =
action 61 61 notHappyAtAll (HappyState action) sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = action i i tk (HappyState action) sts stk tks in
case tk of {
TClass -> cont 19;
TPublic -> cont 20;
TStatic -> cont 21;
TMain -> cont 22;
TNew -> cont 23;
TReturn -> cont 24;
TIf -> cont 25;
TElse -> cont 26;
TWhile -> cont 27;
TPrint -> cont 28;
TLength -> cont 29;
TThis -> cont 30;
TVoid -> cont 31;
TString -> cont 32;
TInt -> cont 33;
TBoolean -> cont 34;
TTrue -> cont 35;
TFalse -> cont 36;
TNegation -> cont 37;
TLogicAnd -> cont 38;
TLogicOr -> cont 39;
TCompareLess -> cont 40;
TCompareGreater -> cont 41;
TCompareLessEqual -> cont 42;
TCompareGreaterEqual -> cont 43;
TCompareEqual -> cont 44;
TCompareNotEqual -> cont 45;
TAdd -> cont 46;
TSub -> cont 47;
TMul -> cont 48;
TAssignment -> cont 49;
TComma -> cont 50;
TDot -> cont 51;
TSemiColon -> cont 52;
TLeftParen -> cont 53;
TRightParen -> cont 54;
TLeftBracket -> cont 55;
TRightBracket -> cont 56;
TLeftBrace -> cont 57;
TRightBrace -> cont 58;
TIdLiteral happy_dollar_dollar -> cont 59;
TIntLiteral happy_dollar_dollar -> cont 60;
_ -> happyError' (tk:tks)
}
happyError_ 61 tk tks = happyError' tks
happyError_ _ tk tks = happyError' (tk:tks)
newtype HappyIdentity a = HappyIdentity a
happyIdentity = HappyIdentity
happyRunIdentity (HappyIdentity a) = a
instance Monad HappyIdentity where
return = HappyIdentity
(HappyIdentity p) >>= q = q p
happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b
happyThen = (>>=)
happyReturn :: () => a -> HappyIdentity a
happyReturn = (return)
happyThen1 m k tks = (>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> HappyIdentity a
happyReturn1 = \a tks -> (return) a
happyError' :: () => [(Token)] -> HappyIdentity a
happyError' = HappyIdentity . parserError
parseMiniJava tks = happyRunIdentity happySomeParser where
happySomeParser = happyThen (happyParse action_0 tks) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
mainMethod arg vars code = AMethod TypeVoid "main" [Fix $ AVar TypeVoid arg] (fixMap vars) (fixMap code) (Fix AExprVoid)
fixMap = map Fix
parserError :: [Token] -> a
parserError (t1:t2:t3:t4:t5:_) = error $ "Parse error, next tokens:" ++ renderTokens [t1, t2, t3, t4, t5]
parserError tokens = error $ "Parse error @eof:" ++ renderTokens tokens
renderTokens = concatMap ((" " ++) . show)
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "<command-line>" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp
{-# LINE 30 "templates/GenericTemplate.hs" #-}
{-# LINE 51 "templates/GenericTemplate.hs" #-}
{-# LINE 61 "templates/GenericTemplate.hs" #-}
{-# LINE 70 "templates/GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is (1), it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept (1) tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
{-# LINE 148 "templates/GenericTemplate.hs" #-}
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Int -> -- token number
Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k - ((1) :: Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
new_state = action
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
{-# LINE 246 "templates/GenericTemplate.hs" #-}
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery ((1) is the error token)
-- parse error if we are in recovery and we fail again
happyFail (1) tk old_st _ stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "failing" $
happyError_ i tk
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail (1) tk old_st (((HappyState (action))):(sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll :: a
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# LINE 312 "templates/GenericTemplate.hs" #-}
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
|
davnils/minijava-compiler
|
src/Parser.hs
|
Haskell
|
bsd-3-clause
| 52,493
|
{-# LANGUAGE OverloadedStrings #-}
module MateVMRuntime.Utilities where
import Data.Word
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as B
import Data.List
import JVM.ClassFile
import Data.IORef
import System.IO.Unsafe
import MateVMRuntime.Types
import MateVMRuntime.NativeSizes
import System.CPUTime
import Text.Printf
import MateVMRuntime.Debug
buildMethodID :: Class Direct -> Word16 -> MethodInfo
buildMethodID cls idx = MethodInfo (ntName nt) rc (ntSignature nt)
where
(rc, nt) = case constsPool cls M.! idx of
(CMethod rc' nt') -> (rc', nt')
(CIfaceMethod rc' nt') -> (rc', nt')
_ -> error "buildMethodID: something wrong. abort."
buildFieldOffset :: Class Direct -> Word16 -> (B.ByteString, B.ByteString)
buildFieldOffset cls idx = (rc, ntName fnt)
where (CField rc fnt) = constsPool cls M.! idx
buildClassID :: Class Direct -> Word16 -> B.ByteString
buildClassID cls idx = cl
where (CClass cl) = constsPool cls M.! idx
methodNameTypeByIdx :: Class Direct -> Word16 -> NameType (Method Direct)
methodNameTypeByIdx cls idx = case constsPool cls M.! idx of
(CMethod _ nt') -> nt'
(CIfaceMethod _ nt') -> nt'
_ -> error "methodGetArgsCount: something wrong. abort."
methodGetArgsCount :: NameType (Method Direct) -> NativeWord
methodGetArgsCount nt = genericLength args
where (MethodSignature args _) = ntSignature nt
lookupMethodWithSig :: B.ByteString -> MethodSignature -> Class Direct -> Maybe (Method Direct)
lookupMethodWithSig name sig cls =
find (\x -> methodName x == name && methodSignature x == sig) $ classMethods cls
checkNothing :: String -> Maybe a -> a
checkNothing m Nothing = error m
checkNothing _ (Just v) = v
compileTime :: IORef Integer
{-# NOINLINE compileTime #-}
compileTime = unsafePerformIO $ newIORef 0
-- measure time, from http://www.haskell.org/haskellwiki/Timing_computations
time :: String -> IO t -> IO t
time desc a = do
start <- getCPUTime
v <- a
end <- getCPUTime
let diff = end - start
if (isPrefixOf "compile" desc)
then do
ct <- readIORef compileTime
writeIORef compileTime $ ct + diff
else do
printfTime $ printf "%s: %0.6f\n" desc (((fromIntegral diff) / (10^12)) :: Double)
return v
printCompileTime :: IO ()
printCompileTime = do
ct <- readIORef compileTime
printfTime $ printf "compiletime: %0.6f\n" ((fromIntegral ct) / (10^12) :: Double)
|
LouisJenkinsCS/Minimal-JVM
|
MateVMRuntime/Utilities.hs
|
Haskell
|
bsd-3-clause
| 2,414
|
{-# LANGUAGE LambdaCase #-}
module ShaderRick where
import Graphics.GL.Pal
import Data.IORef
import Control.Monad.Trans
shaderRecompiler :: MonadIO m => FilePath -> FilePath -> (Program -> IO r) -> m (IO (r, String))
shaderRecompiler vertShaderPath fragShaderPath makeResult = liftIO $ do
(shader, anyError) <- createShaderProgram' vertShaderPath fragShaderPath
result <- makeResult shader
resultRef <- newIORef (result, anyError)
lookForChange <- watchFiles [vertShaderPath, fragShaderPath]
return $ do
lookForChange >>= \case
Nothing -> return ()
Just _ -> do
(newShader, newError) <- createShaderProgram' vertShaderPath fragShaderPath
goodResult <- if null newError
then makeResult newShader
else fst <$> readIORef resultRef
writeIORef resultRef (goodResult, newError)
readIORef resultRef
|
lukexi/tinyrick
|
src/ShaderRick.hs
|
Haskell
|
bsd-3-clause
| 888
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.DeepSeq
import Criterion.Main
import Data.Binary
import Data.ByteString.Lazy (ByteString)
import Data.Group
import Data.Maybe
import MCL.Curves.Fp254BNb
main :: IO ()
main = defaultMain
[ bgroup "Fp"
[ benchFpArith fp_a fp_b
, bench "hash_to" $ nf hashToFp "54o6vyua2984v357b35n63"
, bgroup "from_integer"
[ bench "small" $ nf mkFp small_integer
, bench "large" $ nf mkFp large_integer
]
, bench "eq" $ nf (uncurry (==)) (fp_a, fp_b)
, bench "to_integer" $ nf fromFp fp_a
, bench "is_zero" $ nf fp_isZero fp_a
, bench "sqrt" $ nf fp_squareRoot fp_a
, bench "show" $ nf show fp_a
, benchBinary fp_a
]
, bgroup "Fp2"
[ benchFpArith fp2_a fp2_b
, bench "from_base" $ nf (uncurry mkFp2) (fp_a, fp_b)
, bench "eq" $ nf (uncurry (==)) (fp2_a, fp2_b)
, bench "is_zero" $ nf fp_isZero fp_a
, bench "sqrt" $ nf fp2_squareRoot fp2_a
, bench "show" $ nf show fp2_a
, benchBinary fp2_a
]
, bgroup "Fp12"
[ benchFpArith fp12_a fp12_b
, bench "eq" $ nf (uncurry (==)) (fp12_a, fp12_b)
, bench "is_zero" $ nf fp12_isZero fp12_a
, bench "show" $ nf show fp12_a
, benchBinary fp12_a
]
, bgroup "Fr"
[ benchFpArith fr_a fr_b
, bench "hash_to" $ nf hashToFr "6mn8o8rmn634wcxq354x31"
, bgroup "from_integer"
[ bench "small" $ nf mkFr small_integer
, bench "large" $ nf mkFr large_integer
]
, bench "eq" $ nf (uncurry (==)) (fr_a, fr_b)
, bench "to_integer" $ nf fromFr fr_a
, bench "is_zero" $ nf fr_isZero fr_a
, bench "show" $ nf show fr_a
, benchBinary fr_a
]
, bgroup "G1"
[ benchGroupArith g1_powFr g1_p g1_q
, bench "construct" $ nf (uncurry mkG1) (g1_p_x, g1_p_y)
, bench "map_to" $ nf mapToG1 fp_a
, bench "eq" $ nf (uncurry (==)) (g1_p, g1_q)
, bench "is_zero" $ nf g1_isZero g1_p
, bench "affine" $ nf g1_affineCoords g1_p
, bench "show" $ nf show g1_p
, benchBinary g1_p
]
, bgroup "G2"
[ benchGroupArith g2_powFr g2_p g2_q
, bench "construct" $ nf (uncurry mkG2) (g2_p_x, g2_p_y)
, bench "map_to" $ nf mapToG2 fp2_a
, bench "eq" $ nf (uncurry (==)) (g2_p, g2_q)
, bench "is_zero" $ nf g2_isZero g2_p
, bench "affine" $ nf g2_affineCoords g2_p
, bench "show" $ nf show g2_p
, benchBinary g2_p
]
, bgroup "GT"
[ bench "pow" $ nf (uncurry pow) (gt_a, large_integer)
, bench "pow_native" $ nf (uncurry gt_powFr) (gt_a, large_integer_fr)
]
, bgroup "pairing"
[ bench "compute1" $ nf (uncurry pairing) (g1_p, g2_q)
, bench "compute2" $ nf (uncurry pairing) (g1_q, g2_p)
]
]
----------------------------------------
benchFpArith :: (Fractional a, NFData a) => a -> a -> Benchmark
benchFpArith a b = bgroup "arith"
[ bench "add" $ nf (uncurry (+)) (a, b)
, bench "subtract" $ nf (uncurry (-)) (a, b)
, bench "multiply" $ nf (uncurry (*)) (a, b)
, bench "negate" $ nf negate a
, bench "invert" $ nf recip a
]
benchGroupArith :: (Group g, NFData g) => (g -> Fr -> g) -> g -> g -> Benchmark
benchGroupArith fpow p q = bgroup "arith"
[ bench "add" $ nf (uncurry mappend) (p, q)
, bench "invert" $ nf invert p
, bgroup "mul"
[ bench "small" $ nf (uncurry pow) (p, small_integer)
, bench "large" $ nf (uncurry pow) (p, large_integer)
, bench "native" $ nf (uncurry fpow) (p, large_integer_fr)
]
]
benchBinary :: forall a. (Binary a, NFData a) => a -> Benchmark
benchBinary a = bgroup "binary"
[ bench "put" $ nf encode a
, bench "get" $ nf (decode :: ByteString -> a) (encode a)
]
----------------------------------------
fr_a, fr_b :: Fr
fr_a = hashToFr "a"
fr_b = hashToFr "b"
fp_a, fp_b :: Fp
fp_a = hashToFp "a"
fp_b = hashToFp "b"
fp2_a, fp2_b :: Fp2
fp2_a = mkFp2 (hashToFp "a") (hashToFp "b")
fp2_b = mkFp2 (hashToFp "c") (hashToFp "d")
fp12_a, fp12_b :: Fp12
fp12_a = mkFp12 (mkFp2 (hashToFp "a") (hashToFp "b"))
(mkFp2 (hashToFp "c") (hashToFp "d"))
(mkFp2 (hashToFp "e") (hashToFp "f"))
(mkFp2 (hashToFp "g") (hashToFp "h"))
(mkFp2 (hashToFp "i") (hashToFp "j"))
(mkFp2 (hashToFp "k") (hashToFp "l"))
fp12_b = mkFp12 (mkFp2 (hashToFp "m") (hashToFp "n"))
(mkFp2 (hashToFp "o") (hashToFp "p"))
(mkFp2 (hashToFp "q") (hashToFp "r"))
(mkFp2 (hashToFp "s") (hashToFp "t"))
(mkFp2 (hashToFp "u") (hashToFp "v"))
(mkFp2 (hashToFp "w") (hashToFp "x"))
----------------------------------------
g1_p, g1_q :: G1
g1_p = mapToG1 fp_a
g1_q = mapToG1 fp_b
g1_p_x, g1_p_y :: Fp
(g1_p_x, g1_p_y) = fromJust $ g1_affineCoords g1_p
----------------------------------------
g2_p, g2_q :: G2
g2_p = mapToG2 fp2_a
g2_q = mapToG2 fp2_b
g2_p_x, g2_p_y :: Fp2
(g2_p_x, g2_p_y) = fromJust $ g2_affineCoords g2_p
gt_a :: GT
gt_a = pairing g1_p g2_q
----------------------------------------
small_integer, large_integer :: Integer
small_integer = 42
large_integer = fr_modulus `quot` 2
large_integer_fr :: Fr
large_integer_fr = mkFr large_integer
|
arybczak/haskell-mcl
|
benchmark/Main.hs
|
Haskell
|
bsd-3-clause
| 5,452
|
{-# LANGUAGE NoMonomorphismRestriction #-}
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012
--
-- License : BSD-style
--
-- Maintainer : hans@hanshoglund.se
-- Stability : experimental
-- Portability : portable
--
-- Provides overloaded pitch literals.
--
-------------------------------------------------------------------------------------
module Music.Pitch.Literal.Pitch (
-- * IsPitch class
IsPitch(..),
-- * Literal values
-- ** Four octaves up
cs'''', ds'''', es'''', fs'''', gs'''', as'''', bs'''',
c'''' , d'''' , e'''' , f'''' , g'''' , a'''' , b'''' ,
cb'''', db'''', eb'''', fb'''', gb'''', ab'''', bb'''',
-- ** Three octaves up
cs''', ds''', es''', fs''', gs''', as''', bs''',
c''' , d''' , e''' , f''' , g''' , a''' , b''' ,
cb''', db''', eb''', fb''', gb''', ab''', bb''',
-- ** Two octaves up
cs'', ds'', es'', fs'', gs'', as'', bs'',
c'' , d'' , e'' , f'' , g'' , a'' , b'' ,
cb'', db'', eb'', fb'', gb'', ab'', bb'',
-- ** One octave up
cs' , ds' , es' , fs' , gs' , as' , bs' ,
c' , d' , e' , f' , g' , a' , b' ,
cb' , db' , eb' , fb' , gb' , ab' , bb' ,
-- ** Standard octave
cs , ds , es , fs , gs , as , bs ,
c , d , e , f , g , a , b ,
cb , db , eb , fb , gb , ab , bb ,
-- ** One octave down
cs_ , ds_ , es_ , fs_ , gs_ , as_ , bs_ ,
c_ , d_ , e_ , f_ , g_ , a_ , b_ ,
cb_ , db_ , eb_ , fb_ , gb_ , ab_ , bb_ ,
-- ** Two octaves down
cs__, ds__, es__, fs__, gs__, as__, bs__,
c__ , d__ , e__ , f__ , g__ , a__ , b__ ,
cb__, db__, eb__, fb__, gb__, ab__, bb__,
-- ** Three octaves down
cs___, ds___, es___, fs___, gs___, as___, bs___,
c___ , d___ , e___ , f___ , g___ , a___ , b___ ,
cb___, db___, eb___, fb___, gb___, ab___, bb___,
-- ** Four octaves down
cs____, ds____, es____, fs____, gs____, as____, bs____,
c____ , d____ , e____ , f____ , g____ , a____ , b____ ,
cb____, db____, eb____, fb____, gb____, ab____, bb____,
) where
import Control.Applicative
import Data.Fixed
import Data.Int
import Data.Ratio
import Data.Semigroup
import Data.Word
import Data.AffineSpace ((.-.))
import Music.Pitch.Common.Types
-- Pitch literal, defined as @(class, alteration, octave)@, where
--
-- * @class@ is a pitch class number in @[0..6]@, starting from C.
--
-- * @alteration@ is the number of semitones, i.e. 0 is natural, 1 for sharp 2 for double sharp, -1 for flat and -2 for double flat.
-- Alteration is in 'Maybe' because some pitch representations differ between explicit and explicit accidentals, i.e. a diatonic
-- pitch type may assume @(0,Nothing,...)@ to mean C sharp rather than C.
--
-- * @octave@ is octave number in scientific pitch notation - 4.
--
-- Middle C is represented by the pitch literal @(0, Nothing, 0)@.
--
-- newtype PitchL = PitchL { getPitchL :: (Int, Maybe Double, Int) }
-- deriving (Eq, Show, Ord)
class IsPitch a where
fromPitch :: Pitch -> a
instance IsPitch a => IsPitch (Maybe a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch (First a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch (Last a) where
fromPitch = pure . fromPitch
instance IsPitch a => IsPitch [a] where
fromPitch = pure . fromPitch
instance (Monoid b, IsPitch a) => IsPitch (b, a) where
fromPitch = pure . fromPitch
-- TODO clean by inlining this whole thing or similar
viaPitchL :: (Int, Int, Int) -> Pitch
viaPitchL (pc, sem, oct) = Pitch $ mkInterval' sem (oct * 7 + pc)
where
mkInterval' diff diatonic = Interval (diatonicToChromatic (fromIntegral diatonic) + fromIntegral diff, fromIntegral diatonic)
diatonicToChromatic :: DiatonicSteps -> ChromaticSteps
diatonicToChromatic d = fromIntegral $ (octaves*12) + go restDia
where
-- restDia is always in [0..6]
(octaves, restDia) = fromIntegral d `divMod` 7
go = ([0,2,4,5,7,9,11] !!)
cs'''' = fromPitch $ viaPitchL (0, 1, 4)
ds'''' = fromPitch $ viaPitchL (1, 1, 4)
es'''' = fromPitch $ viaPitchL (2, 1, 4)
fs'''' = fromPitch $ viaPitchL (3, 1, 4)
gs'''' = fromPitch $ viaPitchL (4, 1, 4)
as'''' = fromPitch $ viaPitchL (5, 1, 4)
bs'''' = fromPitch $ viaPitchL (6, 1, 4)
c'''' = fromPitch $ viaPitchL (0, 0, 4)
d'''' = fromPitch $ viaPitchL (1, 0, 4)
e'''' = fromPitch $ viaPitchL (2, 0, 4)
f'''' = fromPitch $ viaPitchL (3, 0, 4)
g'''' = fromPitch $ viaPitchL (4, 0, 4)
a'''' = fromPitch $ viaPitchL (5, 0, 4)
b'''' = fromPitch $ viaPitchL (6, 0, 4)
cb'''' = fromPitch $ viaPitchL (0, (-1), 4)
db'''' = fromPitch $ viaPitchL (1, (-1), 4)
eb'''' = fromPitch $ viaPitchL (2, (-1), 4)
fb'''' = fromPitch $ viaPitchL (3, (-1), 4)
gb'''' = fromPitch $ viaPitchL (4, (-1), 4)
ab'''' = fromPitch $ viaPitchL (5, (-1), 4)
bb'''' = fromPitch $ viaPitchL (6, (-1), 4)
cs''' = fromPitch $ viaPitchL (0, 1, 3)
ds''' = fromPitch $ viaPitchL (1, 1, 3)
es''' = fromPitch $ viaPitchL (2, 1, 3)
fs''' = fromPitch $ viaPitchL (3, 1, 3)
gs''' = fromPitch $ viaPitchL (4, 1, 3)
as''' = fromPitch $ viaPitchL (5, 1, 3)
bs''' = fromPitch $ viaPitchL (6, 1, 3)
c''' = fromPitch $ viaPitchL (0, 0, 3)
d''' = fromPitch $ viaPitchL (1, 0, 3)
e''' = fromPitch $ viaPitchL (2, 0, 3)
f''' = fromPitch $ viaPitchL (3, 0, 3)
g''' = fromPitch $ viaPitchL (4, 0, 3)
a''' = fromPitch $ viaPitchL (5, 0, 3)
b''' = fromPitch $ viaPitchL (6, 0, 3)
cb''' = fromPitch $ viaPitchL (0, (-1), 3)
db''' = fromPitch $ viaPitchL (1, (-1), 3)
eb''' = fromPitch $ viaPitchL (2, (-1), 3)
fb''' = fromPitch $ viaPitchL (3, (-1), 3)
gb''' = fromPitch $ viaPitchL (4, (-1), 3)
ab''' = fromPitch $ viaPitchL (5, (-1), 3)
bb''' = fromPitch $ viaPitchL (6, (-1), 3)
cs'' = fromPitch $ viaPitchL (0, 1, 2)
ds'' = fromPitch $ viaPitchL (1, 1, 2)
es'' = fromPitch $ viaPitchL (2, 1, 2)
fs'' = fromPitch $ viaPitchL (3, 1, 2)
gs'' = fromPitch $ viaPitchL (4, 1, 2)
as'' = fromPitch $ viaPitchL (5, 1, 2)
bs'' = fromPitch $ viaPitchL (6, 1, 2)
c'' = fromPitch $ viaPitchL (0, 0, 2)
d'' = fromPitch $ viaPitchL (1, 0, 2)
e'' = fromPitch $ viaPitchL (2, 0, 2)
f'' = fromPitch $ viaPitchL (3, 0, 2)
g'' = fromPitch $ viaPitchL (4, 0, 2)
a'' = fromPitch $ viaPitchL (5, 0, 2)
b'' = fromPitch $ viaPitchL (6, 0, 2)
cb'' = fromPitch $ viaPitchL (0, (-1), 2)
db'' = fromPitch $ viaPitchL (1, (-1), 2)
eb'' = fromPitch $ viaPitchL (2, (-1), 2)
fb'' = fromPitch $ viaPitchL (3, (-1), 2)
gb'' = fromPitch $ viaPitchL (4, (-1), 2)
ab'' = fromPitch $ viaPitchL (5, (-1), 2)
bb'' = fromPitch $ viaPitchL (6, (-1), 2)
cs' = fromPitch $ viaPitchL (0, 1, 1)
ds' = fromPitch $ viaPitchL (1, 1, 1)
es' = fromPitch $ viaPitchL (2, 1, 1)
fs' = fromPitch $ viaPitchL (3, 1, 1)
gs' = fromPitch $ viaPitchL (4, 1, 1)
as' = fromPitch $ viaPitchL (5, 1, 1)
bs' = fromPitch $ viaPitchL (6, 1, 1)
c' = fromPitch $ viaPitchL (0, 0, 1)
d' = fromPitch $ viaPitchL (1, 0, 1)
e' = fromPitch $ viaPitchL (2, 0, 1)
f' = fromPitch $ viaPitchL (3, 0, 1)
g' = fromPitch $ viaPitchL (4, 0, 1)
a' = fromPitch $ viaPitchL (5, 0, 1)
b' = fromPitch $ viaPitchL (6, 0, 1)
cb' = fromPitch $ viaPitchL (0, (-1), 1)
db' = fromPitch $ viaPitchL (1, (-1), 1)
eb' = fromPitch $ viaPitchL (2, (-1), 1)
fb' = fromPitch $ viaPitchL (3, (-1), 1)
gb' = fromPitch $ viaPitchL (4, (-1), 1)
ab' = fromPitch $ viaPitchL (5, (-1), 1)
bb' = fromPitch $ viaPitchL (6, (-1), 1)
cs = fromPitch $ viaPitchL (0, 1, 0)
ds = fromPitch $ viaPitchL (1, 1, 0)
es = fromPitch $ viaPitchL (2, 1, 0)
fs = fromPitch $ viaPitchL (3, 1, 0)
gs = fromPitch $ viaPitchL (4, 1, 0)
as = fromPitch $ viaPitchL (5, 1, 0)
bs = fromPitch $ viaPitchL (6, 1, 0)
c = fromPitch $ viaPitchL (0, 0, 0)
d = fromPitch $ viaPitchL (1, 0, 0)
e = fromPitch $ viaPitchL (2, 0, 0)
f = fromPitch $ viaPitchL (3, 0, 0)
g = fromPitch $ viaPitchL (4, 0, 0)
a = fromPitch $ viaPitchL (5, 0, 0)
b = fromPitch $ viaPitchL (6, 0, 0)
cb = fromPitch $ viaPitchL (0, (-1), 0)
db = fromPitch $ viaPitchL (1, (-1), 0)
eb = fromPitch $ viaPitchL (2, (-1), 0)
fb = fromPitch $ viaPitchL (3, (-1), 0)
gb = fromPitch $ viaPitchL (4, (-1), 0)
ab = fromPitch $ viaPitchL (5, (-1), 0)
bb = fromPitch $ viaPitchL (6, (-1), 0)
cs_ = fromPitch $ viaPitchL (0, 1, -1)
ds_ = fromPitch $ viaPitchL (1, 1, -1)
es_ = fromPitch $ viaPitchL (2, 1, -1)
fs_ = fromPitch $ viaPitchL (3, 1, -1)
gs_ = fromPitch $ viaPitchL (4, 1, -1)
as_ = fromPitch $ viaPitchL (5, 1, -1)
bs_ = fromPitch $ viaPitchL (6, 1, -1)
c_ = fromPitch $ viaPitchL (0, 0, -1)
d_ = fromPitch $ viaPitchL (1, 0, -1)
e_ = fromPitch $ viaPitchL (2, 0, -1)
f_ = fromPitch $ viaPitchL (3, 0, -1)
g_ = fromPitch $ viaPitchL (4, 0, -1)
a_ = fromPitch $ viaPitchL (5, 0, -1)
b_ = fromPitch $ viaPitchL (6, 0, -1)
cb_ = fromPitch $ viaPitchL (0, (-1), -1)
db_ = fromPitch $ viaPitchL (1, (-1), -1)
eb_ = fromPitch $ viaPitchL (2, (-1), -1)
fb_ = fromPitch $ viaPitchL (3, (-1), -1)
gb_ = fromPitch $ viaPitchL (4, (-1), -1)
ab_ = fromPitch $ viaPitchL (5, (-1), -1)
bb_ = fromPitch $ viaPitchL (6, (-1), -1)
cs__ = fromPitch $ viaPitchL (0, 1, -2)
ds__ = fromPitch $ viaPitchL (1, 1, -2)
es__ = fromPitch $ viaPitchL (2, 1, -2)
fs__ = fromPitch $ viaPitchL (3, 1, -2)
gs__ = fromPitch $ viaPitchL (4, 1, -2)
as__ = fromPitch $ viaPitchL (5, 1, -2)
bs__ = fromPitch $ viaPitchL (6, 1, -2)
c__ = fromPitch $ viaPitchL (0, 0, -2)
d__ = fromPitch $ viaPitchL (1, 0, -2)
e__ = fromPitch $ viaPitchL (2, 0, -2)
f__ = fromPitch $ viaPitchL (3, 0, -2)
g__ = fromPitch $ viaPitchL (4, 0, -2)
a__ = fromPitch $ viaPitchL (5, 0, -2)
b__ = fromPitch $ viaPitchL (6, 0, -2)
cb__ = fromPitch $ viaPitchL (0, (-1), -2)
db__ = fromPitch $ viaPitchL (1, (-1), -2)
eb__ = fromPitch $ viaPitchL (2, (-1), -2)
fb__ = fromPitch $ viaPitchL (3, (-1), -2)
gb__ = fromPitch $ viaPitchL (4, (-1), -2)
ab__ = fromPitch $ viaPitchL (5, (-1), -2)
bb__ = fromPitch $ viaPitchL (6, (-1), -2)
cs___ = fromPitch $ viaPitchL (0, 1, -3)
ds___ = fromPitch $ viaPitchL (1, 1, -3)
es___ = fromPitch $ viaPitchL (2, 1, -3)
fs___ = fromPitch $ viaPitchL (3, 1, -3)
gs___ = fromPitch $ viaPitchL (4, 1, -3)
as___ = fromPitch $ viaPitchL (5, 1, -3)
bs___ = fromPitch $ viaPitchL (6, 1, -3)
c___ = fromPitch $ viaPitchL (0, 0, -3)
d___ = fromPitch $ viaPitchL (1, 0, -3)
e___ = fromPitch $ viaPitchL (2, 0, -3)
f___ = fromPitch $ viaPitchL (3, 0, -3)
g___ = fromPitch $ viaPitchL (4, 0, -3)
a___ = fromPitch $ viaPitchL (5, 0, -3)
b___ = fromPitch $ viaPitchL (6, 0, -3)
cb___ = fromPitch $ viaPitchL (0, (-1), -3)
db___ = fromPitch $ viaPitchL (1, (-1), -3)
eb___ = fromPitch $ viaPitchL (2, (-1), -3)
fb___ = fromPitch $ viaPitchL (3, (-1), -3)
gb___ = fromPitch $ viaPitchL (4, (-1), -3)
ab___ = fromPitch $ viaPitchL (5, (-1), -3)
bb___ = fromPitch $ viaPitchL (6, (-1), -3)
cs____ = fromPitch $ viaPitchL (0, 1, -4)
ds____ = fromPitch $ viaPitchL (1, 1, -4)
es____ = fromPitch $ viaPitchL (2, 1, -4)
fs____ = fromPitch $ viaPitchL (3, 1, -4)
gs____ = fromPitch $ viaPitchL (4, 1, -4)
as____ = fromPitch $ viaPitchL (5, 1, -4)
bs____ = fromPitch $ viaPitchL (6, 1, -4)
c____ = fromPitch $ viaPitchL (0, 0, -4)
d____ = fromPitch $ viaPitchL (1, 0, -4)
e____ = fromPitch $ viaPitchL (2, 0, -4)
f____ = fromPitch $ viaPitchL (3, 0, -4)
g____ = fromPitch $ viaPitchL (4, 0, -4)
a____ = fromPitch $ viaPitchL (5, 0, -4)
b____ = fromPitch $ viaPitchL (6, 0, -4)
cb____ = fromPitch $ viaPitchL (0, (-1), -4)
db____ = fromPitch $ viaPitchL (1, (-1), -4)
eb____ = fromPitch $ viaPitchL (2, (-1), -4)
fb____ = fromPitch $ viaPitchL (3, (-1), -4)
gb____ = fromPitch $ viaPitchL (4, (-1), -4)
ab____ = fromPitch $ viaPitchL (5, (-1), -4)
bb____ = fromPitch $ viaPitchL (6, (-1), -4)
|
music-suite/music-pitch
|
src/Music/Pitch/Literal/Pitch.hs
|
Haskell
|
bsd-3-clause
| 12,962
|
module Main where
import Control.Monad (when)
import Data.Maybe (fromMaybe)
import System.Environment (getArgs)
import Text.Printf (printf)
import qualified System.Console.GetOpt as GetOpt
import Parser
import Tokenizer
-- Options parsing
data Options = Options {
optInput :: Maybe FilePath,
optVerbose :: Bool }
deriving Show
defaultOptions :: Options
defaultOptions = Options {
optInput = Nothing,
optVerbose = False }
options :: [GetOpt.OptDescr (Options -> Options)]
options = [
GetOpt.Option ['v'] ["verbose"]
(GetOpt.NoArg (\opts -> opts { optVerbose = True }))
"chatty output on stderr",
GetOpt.Option ['i'] ["input-file"]
(GetOpt.OptArg ((\f opts -> opts { optInput = Just f }) . fromMaybe "input") "FILE")
"input FILE" ]
compilerOpts :: [String] -> IO (Options, [String])
compilerOpts argv =
case GetOpt.getOpt GetOpt.Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defaultOptions o, n)
(_,_,errs) -> ioError (userError (concat errs ++ GetOpt.usageInfo header options))
where header = "Usage: ic [OPTION...] files..."
-- Actual program
main :: IO ()
main = do
args <- getArgs
(opts, leftover) <- compilerOpts args
case leftover of
[] -> return ()
_ -> error $ "Unknown args " ++ show leftover
content <- case optInput opts of
Nothing -> getContents
Just _ -> error "Unimplemented. Use stdin."
when (optVerbose opts) $ putStrLn "Tokenizing..."
tokenized <- case tokenize "(unknown)" content of
Left te -> error $ show te
Right tokenized_ -> return tokenized_
when (optVerbose opts) $ do
putStrLn "Tokenized:"
mapM_ putStr [ printf "\t%s\n" (show toks) | toks <- tokenized ]
putStrLn ""
when (optVerbose opts) $ putStrLn "Parsing..."
parsed <- case parseOCM tokenized of
Left pe -> error $ show pe
Right parsed_ -> return parsed_
when (optVerbose opts) $ do
putStrLn "Parsed:"
putStrLn $ show parsed
|
christianlavoie/origami-computational-model
|
src/Main.hs
|
Haskell
|
bsd-3-clause
| 2,056
|
-- |
module VK.App (app
, module Exp) where
import React.Flux
import System.IO.Unsafe (unsafePerformIO)
import VK.App.Actions
import VK.App.Store
import VK.App.Types as Exp
import VK.App.Views
import VK.DOM.Router as Exp
app :: App ParentRouter
app =
App {appName = "VK"
, appState = store
, appView = appView_
, appInitAction = AppInit
, appRouter = Just $ storeRouter appDispatcher
}
where
appDispatcher action = unsafePerformIO $ do
st <- getStoreData store
return $ dispatch st action
|
eryx67/vk-api-example
|
src/VK/App.hs
|
Haskell
|
bsd-3-clause
| 635
|
{-# LANGUAGE TypeFamilies, BangPatterns, TypeOperators, FlexibleContexts, FlexibleInstances, ScopedTypeVariables #-}
module Main where
import Data.NeuralNetwork hiding (cost')
import Data.NeuralNetwork.Backend.BLASHS
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import Data.List(foldl',partition,maximumBy)
import Data.IORef
import Text.Printf (printf)
import Control.Monad
import Control.Monad.Except
import System.Environment
import Text.PrettyPrint.Free hiding (flatten)
import System.IO (hFlush, stdout)
import System.IO.Unsafe
import Parser
main = do x <- runExceptT $ compile byBLASHSf (In2D 28 28,
Convolution 2 7 3 :&: MaxPooling 2 :&:
Convolution 4 5 2 :&: MaxPooling 2 :&:
Reshape2DAs1D :&:
FullConnect 512 :&: FullConnect 32 :&:
FullConnect 10 :&: HNil,
MeanSquaredError)
case x of
Left _ -> putStrLn "Error."
Right cnn -> do
loop cnn 5
-- debug cnn
where
loop cnn cnt = do
cnn <- dotrain cnn cnt
dotest cnn
putStr "Continue? (number):"
hFlush stdout
str <- getLine
let next = (reads :: ReadS Int) str
when (not $ null next) (loop cnn (fst $ head next))
debug :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e) -> IO ()
debug nn = do
a0:a1:_ <- getArgs
let cycle = read a0 :: Int
rate = read a1 :: Float
putStrLn "Load training data."
dataset <- trainingData >>= mapM preprocess . uncurry zip
testset <- testData >>= mapM preprocess . take 10 . uncurry zip
cnt <- newIORef 0 :: IO (IORef Int)
let dispAndInc = do
i <- readIORef cnt
writeIORef cnt (i+1)
putStrLn ("Iteration " ++ show i)
nn <- iterateM (cycle `div` checkpoint) nn $ \nn1 -> do
nn1 <- iterateM checkpoint nn1 $ (dispAndInc >>) . online rate dataset
putStrLn "[test]..."
smalltest testset nn1
return nn1
nn <- iterateM (cycle `mod` checkpoint) nn $ (dispAndInc >>) . online rate dataset
putStrLn "[final test]..."
smalltest testset nn
where
checkpoint = 2
smalltest it (nn,_) = do
flip mapM_ it $ \(ds,ev) -> do
pv <- forward nn ds
prettyResult pv >>= putStrLn . ("+" ++ )
prettyResult ev >>= putStrLn . ("*" ++ )
dotrain :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e)-> Int -> IO (n,e)
dotrain nn mcnt = do
putStrLn "Load training data."
dataset <- trainingData >>= mapM preprocess . uncurry zip
putStrLn "Load test data."
putStrLn "Learning."
cnt <- newIORef 0 :: IO (IORef Int)
let dispAndInc = do
i <- readIORef cnt
writeIORef cnt (i+1)
putStrLn ("Iteration " ++ show i)
iterateM mcnt nn ((dispAndInc >>) . online 0.001 dataset)
dotest :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> (n,e) -> IO ()
dotest !(nn,_) = do
testset <- testData >>= mapM preprocess . uncurry zip
putStrLn "Start test"
result <- mapM ((>>= postprocess) . forward nn . fst) testset
expect <- mapM (postprocess . snd) testset
let (co,wr) = partition (uncurry (==)) $ zip result expect
putStrLn $ printf "correct: %d, wrong: %d" (length co) (length wr)
putStrLn $ "First 10 tests:"
flip mapM_ (take 10 testset) $ \(ds,ev) -> do
pv <- forward nn ds
prettyResult pv >>= putStrLn . ("+" ++ )
prettyResult ev >>= putStrLn . ("*" ++ )
online :: (ModelCst n e, Inp n ~ PImage, Out n ~ PLabel, Run n ~ IO)
=> Float -> [(Inp n, Out n)] -> (n,e) -> IO (n,e)
online rate ds !nn = walk ds nn
where
walk [] !nn = return nn
walk (d:ds) !nn = do !nn <- learn nn d rate
walk ds nn
iterateM :: (MonadIO m) => Int -> a -> (a -> m a) -> m a
iterateM n x f = go 0 x
where
go i x = if i == n
then
return x
else do
x <- f x
go (i+1) x
type PImage = V.Vector (DenseMatrix Float)
type PLabel = DenseVector Float
preprocess :: (Image, Label) -> IO (PImage, PLabel)
preprocess (img,lbl) = do
i <- SV.unsafeThaw img
l <- SV.unsafeThaw lbl
return (V.singleton $ DenseMatrix 28 28 i, DenseVector l)
postprocess :: PLabel -> IO Int
postprocess v = do
a <- denseVectorToVector v
return $ V.maxIndex a
prettyResult a = do
v <- postprocess a
return $ showPretty $ text (printf "%02d:" v) <+> pretty a
where
showPretty x = displayS (renderPretty 0.4 500 x) ""
instance Pretty (DenseVector Float) where
pretty vec = let a = unsafePerformIO (denseVectorToVector vec)
in encloseSep langle rangle comma $ map (text . printf "%.04f") (V.toList a)
|
pierric/neural-network
|
Backend-blashs/Example/MNIST/Main.hs
|
Haskell
|
bsd-3-clause
| 5,095
|
module Opaleye.Internal.Tag where
-- | Tag is for use as a source of unique IDs in QueryArr
newtype Tag = UnsafeTag Int deriving (Read, Show)
start :: Tag
start = UnsafeTag 1
next :: Tag -> Tag
next = UnsafeTag . (+1) . unsafeUnTag
unsafeUnTag :: Tag -> Int
unsafeUnTag (UnsafeTag i) = i
tagWith :: Tag -> String -> String
tagWith t s = s ++ "_" ++ show (unsafeUnTag t)
|
WraithM/haskell-opaleye
|
src/Opaleye/Internal/Tag.hs
|
Haskell
|
bsd-3-clause
| 375
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Mars.Command.Load (Load (..)) where
import Data.Aeson as Aeson
import Data.String.Conv
import Data.Text (Text)
import Data.Typeable
import GHC.Generics
import Mars.Command
import System.IO (hPutStrLn, stderr)
import Test.QuickCheck
import Mars.Renderable
newtype Load = Load Text
deriving (Generic, Show, Eq, Typeable)
newtype LoadResult = LoadFile Text
instance Command Load LoadResult where
evalCommand _ (Load filename) = LoadFile filename
instance Action LoadResult where
execCommand s (LoadFile filename) = do
c <- readFile . toS $ filename
(loadResult . decode . toS) c
where
loadResult Nothing = printErr "Could not parse"
loadResult (Just j) = reportResult . fromJSON $ j
reportResult (Aeson.Error err) = printErr err
reportResult (Aeson.Success state) = pure state
printErr err = s <$ hPutStrLn stderr ("Invalid saved state: " <> err)
instance Renderable Load where
render (Load f) = "load \"" <> f <> "\""
instance Arbitrary Load where
arbitrary = Load <$> arbString
arbString :: Gen Text
arbString =
toS
<$> listOf
(elements (['A' .. 'Z'] <> ['a' .. 'z']))
`suchThat` (not . null)
|
lorcanmcdonald/mars
|
src/Mars/Command/Load.hs
|
Haskell
|
bsd-3-clause
| 1,281
|
{-# LANGUAGE NoMonomorphismRestriction, ExtendedDefaultRules#-}
module DocTest.Flat.Endian where
import qualified DocTest
import Test.Tasty(TestTree,testGroup)
import Flat.Endian
import Numeric (showHex)
tests :: IO TestTree
tests = testGroup "Flat.Endian" <$> sequence [ DocTest.test "src/Data/Flat/Endian.hs:36" ["True"] (DocTest.asPrint( toBE64 0xF0F1F2F3F4F5F6F7 == if isBigEndian then 0xF0F1F2F3F4F5F6F7 else 0xF7F6F5F4F3F2F1F0 )), DocTest.test "src/Data/Flat/Endian.hs:49" ["True"] (DocTest.asPrint( toBE32 0xF0F1F2F3 == if isBigEndian then 0xF0F1F2F3 else 0xF3F2F1F0 )), DocTest.test "src/Data/Flat/Endian.hs:62" ["True"] (DocTest.asPrint( toBE16 0xF0F1 == if isBigEndian then 0xF0F1 else 0xF1F0 ))]
|
tittoassini/flat
|
test/DocTest/Data/Flat/Endian.hs
|
Haskell
|
bsd-3-clause
| 712
|
module Air.Data.Default where
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
-- BEGIN
-- copy from data.default
import Data.Ratio
import qualified Data.Set as S
import qualified Data.Map as M
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Word (Word8, Word16, Word32, Word64)
import Data.Time (Day(..), TimeOfDay, midnight, UTCTime(..), DiffTime, secondsToDiffTime)
import qualified Data.Text as ST
import qualified Data.Text.Lazy as LT
-- | A class for types with a default value.
class Default a where
-- | The default value for this type.
def :: a
instance Default () where def = ()
instance Default (S.Set v) where def = S.empty
instance Default (M.Map k v) where def = M.empty
instance Default Int where def = 0
instance Default Integer where def = 0
instance Default Float where def = 0
instance Default Double where def = 0
instance (Integral a) => Default (Ratio a) where def = 0
instance Default (Maybe a) where def = Nothing
instance Default [a] where def = []
instance (Default r) => Default (e -> r) where def _ = def
instance (Default a) => Default (IO a) where def = return def
instance (Default a, Default b) => Default (a, b) where
def = (def, def)
-- END
instance Default B.ByteString where
def = B.empty
instance Default L.ByteString where
def = L.empty
instance Default ST.Text where
def = ST.empty
instance Default LT.Text where
def = LT.empty
instance Default Int8 where def = 0
instance Default Int16 where def = 0
instance Default Int32 where def = 0
instance Default Int64 where def = 0
instance Default Word8 where def = 0
instance Default Word16 where def = 0
instance Default Word32 where def = 0
instance Default Word64 where def = 0
instance Default Bool where def = False
instance (Default a, Default b, Default c) => Default (a, b, c) where
def = (def, def, def)
instance (Default a, Default b, Default c, Default d) => Default (a, b, c, d) where
def = (def, def, def, def)
instance (Default a, Default b, Default c, Default d, Default e) => Default (a, b, c, d, e) where
def = (def, def, def, def, def)
instance Default Day where
def = ModifiedJulianDay def
instance Default DiffTime where
def = secondsToDiffTime def
instance Default UTCTime where
def = UTCTime def def
instance Default TimeOfDay where
def = midnight
|
nfjinjing/air
|
src/Air/Data/Default.hs
|
Haskell
|
bsd-3-clause
| 2,370
|
{-# LANGUAGE ForeignFunctionInterface, OverloadedStrings, CPP #-}
module IOSMain where
import Graphics.UI.SDL as SDL
import HXSDL
foreign export ccall "haskell_main" main :: IO ()
main =
withInit [InitVideo] $
withWindow "Hello World!" (Position 100 100) (Size 640 480) [WindowShown] $ \win ->
withRenderer win (Device (-1)) [Accelerated, PresentVSync] $ \ren -> do mainLoop ren
|
EDeijl/HXSDL
|
src/iOSMain.hs
|
Haskell
|
mit
| 387
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai.Handler.Warp.Request (
recvRequest
, headerLines
) where
import Control.Applicative
import qualified Control.Concurrent as Conc (yield)
import Control.Exception (throwIO)
import Data.Array ((!))
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as SU
import qualified Data.CaseInsensitive as CI
import qualified Data.IORef as I
import Data.Monoid (mempty)
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.Conduit
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.ReadInt
import Network.Wai.Handler.Warp.RequestHeader
import Network.Wai.Handler.Warp.Settings (Settings, settingsNoParsePath)
import qualified Network.Wai.Handler.Warp.Timeout as Timeout
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import Prelude hiding (lines)
import Control.Monad (when)
----------------------------------------------------------------
-- FIXME come up with good values here
maxTotalHeaderLength :: Int
maxTotalHeaderLength = 50 * 1024
----------------------------------------------------------------
-- | Receiving a HTTP request from 'Connection' and parsing its header
-- to create 'Request'.
recvRequest :: Settings
-> Connection
-> InternalInfo
-> SockAddr -- ^ Peer's address.
-> Source -- ^ Where HTTP request comes from.
-> IO (Request
,IndexedHeader) -- ^
-- 'Request' passed to 'Application',
-- 'IndexedHeader' of HTTP request for internal use,
recvRequest settings conn ii addr src = do
hdrlines <- headerLines src
(method, unparsedPath, path, query, httpversion, hdr) <- parseHeaderLines hdrlines
let idxhdr = indexRequestHeader hdr
expect = idxhdr ! idxExpect
cl = idxhdr ! idxContentLength
te = idxhdr ! idxTransferEncoding
handleExpect conn httpversion expect
(rbody, bodyLength) <- bodyAndSource src cl te
rbody' <- timeoutBody th rbody
let req = Request {
requestMethod = method
, httpVersion = httpversion
, pathInfo = H.decodePathSegments path
, rawPathInfo = if settingsNoParsePath settings then unparsedPath else path
, rawQueryString = query
, queryString = H.parseQuery query
, requestHeaders = hdr
, isSecure = False
, remoteHost = addr
, requestBody = rbody'
, vault = mempty
, requestBodyLength = bodyLength
, requestHeaderHost = idxhdr ! idxHost
, requestHeaderRange = idxhdr ! idxRange
}
return (req, idxhdr)
where
th = threadHandle ii
----------------------------------------------------------------
headerLines :: Source -> IO [ByteString]
headerLines src = do
bs <- readSource src
if S.null bs
then throwIO $ NotEnoughLines []
else push src (THStatus 0 id id) bs
----------------------------------------------------------------
handleExpect :: Connection
-> H.HttpVersion
-> Maybe HeaderValue
-> IO ()
handleExpect conn ver (Just "100-continue") = do
connSendAll conn continue
Conc.yield
where
continue
| ver == H.http11 = "HTTP/1.1 100 Continue\r\n\r\n"
| otherwise = "HTTP/1.0 100 Continue\r\n\r\n"
handleExpect _ _ _ = return ()
----------------------------------------------------------------
bodyAndSource :: Source
-> Maybe HeaderValue -- ^ content length
-> Maybe HeaderValue -- ^ transfer-encoding
-> IO (IO ByteString
,RequestBodyLength
)
bodyAndSource src cl te
| chunked = do
csrc <- mkCSource src
return (readCSource csrc, ChunkedBody)
| otherwise = do
isrc <- mkISource src len
return (readISource isrc, bodyLen)
where
len = toLength cl
bodyLen = KnownLength $ fromIntegral len
chunked = isChunked te
toLength :: Maybe HeaderValue -> Int
toLength Nothing = 0
toLength (Just bs) = readInt bs
isChunked :: Maybe HeaderValue -> Bool
isChunked (Just bs) = CI.foldCase bs == "chunked"
isChunked _ = False
----------------------------------------------------------------
timeoutBody :: Timeout.Handle -> IO ByteString -> IO (IO ByteString)
timeoutBody timeoutHandle rbody = do
isFirstRef <- I.newIORef True
return $ do
isFirst <- I.readIORef isFirstRef
when isFirst $
-- Timeout handling was paused after receiving the full request
-- headers. Now we need to resume it to avoid a slowloris
-- attack during request body sending.
Timeout.resume timeoutHandle
bs <- rbody
-- As soon as we finish receiving the request body, whether
-- because the application is not interested in more bytes, or
-- because there is no more data available, pause the timeout
-- handler again.
when (S.null bs) (Timeout.pause timeoutHandle)
return bs
----------------------------------------------------------------
type BSEndo = ByteString -> ByteString
type BSEndoList = [ByteString] -> [ByteString]
data THStatus = THStatus
{-# UNPACK #-} !Int -- running total byte count
BSEndoList -- previously parsed lines
BSEndo -- bytestrings to be prepended
----------------------------------------------------------------
{- FIXME
close :: Sink ByteString IO a
close = throwIO IncompleteHeaders
-}
push :: Source -> THStatus -> ByteString -> IO [ByteString]
push src (THStatus len lines prepend) bs'
-- Too many bytes
| len > maxTotalHeaderLength = throwIO OverLargeHeader
| otherwise = push' mnl
where
bs = prepend bs'
bsLen = S.length bs
mnl = do
nl <- S.elemIndex 10 bs
-- check if there are two more bytes in the bs
-- if so, see if the second of those is a horizontal space
if bsLen > nl + 1 then
let c = S.index bs (nl + 1)
b = case nl of
0 -> True
1 -> S.index bs 0 == 13
_ -> False
in Just (nl, not b && (c == 32 || c == 9))
else
Just (nl, False)
{-# INLINE push' #-}
push' :: Maybe (Int, Bool) -> IO [ByteString]
-- No newline find in this chunk. Add it to the prepend,
-- update the length, and continue processing.
push' Nothing = do
bs <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push src status bs
where
len' = len + bsLen
prepend' = S.append bs
status = THStatus len' lines prepend'
-- Found a newline, but next line continues as a multiline header
push' (Just (end, True)) = push src status rest
where
rest = S.drop (end + 1) bs
prepend' = S.append (SU.unsafeTake (checkCR bs end) bs)
len' = len + end
status = THStatus len' lines prepend'
-- Found a newline at position end.
push' (Just (end, False))
-- leftover
| S.null line = do
when (start < bsLen) $ leftoverSource src (SU.unsafeDrop start bs)
return (lines [])
-- more headers
| otherwise = let len' = len + start
lines' = lines . (line:)
status = THStatus len' lines' id
in if start < bsLen then
-- more bytes in this chunk, push again
let bs'' = SU.unsafeDrop start bs
in push src status bs''
else do
-- no more bytes in this chunk, ask for more
bs <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push src status bs
where
start = end + 1 -- start of next chunk
line = SU.unsafeTake (checkCR bs end) bs
{-# INLINE checkCR #-}
checkCR :: ByteString -> Int -> Int
checkCR bs pos = if 13 == S.index bs p then p else pos -- 13 is CR
where
!p = pos - 1
|
beni55/wai
|
warp/Network/Wai/Handler/Warp/Request.hs
|
Haskell
|
mit
| 8,515
|
{-# OPTIONS_GHC -fglasgow-exts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Morphism.Futu
-- Copyright : (C) 2008 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable (rank-2 polymorphism)
--
-- Traditional operators, shown here to show how to roll your own
----------------------------------------------------------------------------
module Control.Morphism.Futu
( futu, g_futu
, postpro_futu, g_postpro_futu
, distFutu
) where
import Control.Functor.Algebra
import Control.Functor.Extras
import Control.Functor.Fix
import Control.Monad.Free
import Control.Morphism.Ana
import Control.Morphism.Postpro
-- | Generalized from @futu :: Functor f => GCoalgebra f (Free f) a -> a -> FixF f@
futu :: (RunMonadFree f m) => GCoalgebra f m a -> a -> FixF f
futu = g_ana (distFutu id)
g_futu :: (Functor f, RunMonadFree h m) => Dist h f -> GCoalgebra f m a -> a -> FixF f
g_futu k = g_ana (distFutu k)
-- | A futumorphic postpromorphism
postpro_futu :: (RunMonadFree f m) => GCoalgebra f m a -> (f :~> f) -> a -> FixF f
postpro_futu = g_postpro (distFutu id)
-- | A generalized-futumorphic postpromorphism
g_postpro_futu :: (Functor f, RunMonadFree h m) => Dist h f -> GCoalgebra f m a -> (f :~> f) -> a -> FixF f
g_postpro_futu k = g_postpro (distFutu k)
-- | Turn a distributive law for a functor into a distributive law for the free monad of that functor.
-- This has been generalized to support generating distributive laws for a number of related free-monad-like
-- constructions such as the Codensity monad of the free monad of a functor.
distFutu :: (Functor f, RunMonadFree h m) => Dist h f -> Dist m f
distFutu k = cataFree (fmap return) (fmap inFree . k)
|
urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav
|
Control/Morphism/Futu.hs
|
Haskell
|
apache-2.0
| 1,865
|
{-# LANGUAGE ForeignFunctionInterface #-}
module Main (module Main, module Arc4) where
import Control.Concurrent
import Control.Monad.Reader
-- import Data.Char
-- import Data.List
import Data.Word
import Network.Socket
import System.Console.GetOpt
import System.Environment
import System.Exit
import System.IO
import Text.Printf
import Arc4
import NetSim
import TM
import Target
import Data.IterIO
data Test = Test {
testStream :: [ThreadId] -> Targ -> Targ -> TM Bool
, testTargA :: TM Targ
, testTargB :: TM Targ
, testAtoB :: NetSim ()
, testBtoA :: NetSim ()
, testDescrip :: String
}
tests :: [Test]
tests =
[ Test twoWay spawnTarget spawnOrConnect inumNop inumNop
"Bi-directionally transfer data"
, Test pingPong spawnTarget spawnOrConnect inumNop inumNop
"Ping-pong short messages back and forth"
, Test pingPong spawnTarget spawnOrConnect excessive inumNop
"Ping-pong with test for excessive retransmissions"
, Test oneWay internalTarget spawnOrConnect inumNop inumNop
"Receiving data from reference implementation"
, Test flowControl spawnTarget spawnTarget inumNop inumNop
"Flow control when application doesn't read output"
, Test oneWay spawnTarget internalTarget inumNop inumNop
"Sending data to reference implementation"
, Test twoWay spawnTarget internalTarget inumNop inumNop
"Bi-directionally interoperating with reference"
, Test eofTest spawnTarget spawnOrConnect inumNop inumNop
"Test for proper end-of-file handling"
, Test twoWay spawnOrConnect spawnTarget (garbage 0.05) (garbage 0.05)
"Two-way transfer injecting 5% garbage packets"
, Test oneWay internalTarget spawnOrConnect (reorderer 0.02) inumNop
"Receiving from reference with 2% reordering"
, Test twoWay spawnTarget spawnOrConnect (duplicater 0.05) (duplicater 0.05)
"Two-way transfer with 5% packet duplication"
, Test twoWay spawnTarget spawnOrConnect
(badlength 0.02) (truncater 0.02)
"Two-way transfer with 2% of packets having bad length"
, Test oneWay spawnTarget spawnOrConnect (dropper 0.02) (dropper 0.02)
"One-way transfer with 2% packet loss"
, Test twoWay spawnTarget spawnOrConnect (corrupter 0.02) (corrupter 0.02)
"Two-way transfer with 2% packet corruption"
]
runTest :: Int -> Test -> TM Bool
runTest n test = do
liftIO $ putStr $ printf "TEST %2d: %-60s" n (testDescrip test ++ "...")
a <- testTargA test
b <- testTargB test
threads <- mapM forkTM [
tUSource a |$ testAtoB test .| tUDrain b
, tUSource b |$ testBtoA test .| tUDrain a ]
result <- testStream test threads a b
liftIO $ putStrLn $ if result then "passed" else "FAILED"
return result
data Options = Options{ optSeed :: String
, optDebug :: Bool
, optWin :: Word32
, optTimeout :: Int
, optQuiet :: Bool
, optList :: Bool
, optTest :: Maybe Int
, optGdb :: Bool
, optServer :: Bool
}
defaultOptions :: Options
defaultOptions = Options { optSeed = ""
, optDebug = False
, optWin = 1
, optTimeout = 1000
, optQuiet = True
, optList = False
, optTest = Nothing
, optGdb = False
, optServer = False
}
options :: [OptDescr (Options -> Options)]
options =
[ Option "" ["seed"]
(ReqArg (\s o -> o { optSeed = s }) "SEED")
"set random seed to a specific string"
, Option "d" ["debug"]
(NoArg (\o -> o { optDebug = True }))
"enable debugging support"
, Option "" ["gdb"]
(NoArg (\o -> o { optGdb = True }))
"print PID forked processes so you can attach with gdb"
, Option "v" ["verbose"]
(NoArg (\o -> o { optQuiet = False }))
"show reliable program stderr"
, Option "L" ["list"]
(NoArg (\o -> o { optList = True }))
"list available tests"
, Option "s" ["server"]
(NoArg (\o -> o { optServer = True }))
"test server mode"
, Option "w" ["window"]
(ReqArg (\s o -> o { optWin = read s}) "SIZE")
"specify window size"
, Option "T" ["test"]
(ReqArg (\t o -> o { optTest = Just $ read t}) "#")
"run just one test"
, Option "t" ["timeout"]
(ReqArg (\s o -> o { optTimeout = read s}) "msec")
"retransmission timeout"
]
doOpt :: IO (Options, [String])
doOpt = do
argv <- getArgs
case getOpt RequireOrder options argv of
(o,n,[]) -> return $ (foldl (flip ($)) defaultOptions o, n)
(_,_,errs) -> do
hPutStrLn stderr $ concat errs
usage
usage :: IO a
usage = do
prog <- getProgName
let header = "usage: " ++ prog ++ " [OPTIONS] reliable [reliable OPTIONS]\n"
hPutStrLn stderr $ usageInfo header options
exitFailure
rt :: Int -> TM ()
rt n | n <= 0 = return ()
| otherwise = do bool <- asks tcRnd >>= flip a4RandomBool 0.1
liftIO $ putStrLn $ show bool
rt (n-1)
showTests :: IO ()
showTests = do putStrLn "\nAvailable tests:\n"
st (1::Int) tests
putStrLn ""
where
st _ [] = return ()
st n (t:ts) = do _ <- hPrintf stdout " %2d. %s\n" n (testDescrip t)
st (n+1) ts
runTests :: Int -> [Test] -> TM (Int,Int)
runTests _ [] = return (0, 0)
runTests n (t:ts) = do
result <- runTest n t
(passed, completed) <- runTests (n + 1) ts
return (if result then passed + 1 else passed, completed + 1)
main :: IO ()
main = withSocketsDo $ do
(o, argv) <- doOpt
when (optList o) $ showTests >> exitSuccess
when (null argv) usage
r <- a4RandomNew $ optSeed o
let config' = TestConfig { tcTarget = argv
, tcDebug = optDebug o
, tcRnd = r
, tcWin = optWin o
, tcTimeout = optTimeout o
, tcQuiet = optQuiet o
, tcGdb = optGdb o
, tcServer = Nothing
}
config <- if optServer o
then do server <- runReaderT startServer config'
return config' { tcServer = Just server }
else return config'
hSetBuffering stdout NoBuffering
case optTest o of
_ | optList o -> showTests
Just n | n <= 0 || n > length tests -> showTests
Just n -> do
_ <- runReaderT (runTest n $ tests !! (n - 1)) config
return ()
Nothing -> do
(passed, completed) <- runReaderT (runTests 1 tests) config
putStrLn $ printf "SUMMARY: passed %d/%d" passed completed
|
TC1211/TCP
|
src/3a/tester-src/Examples/reliable/tester.hs
|
Haskell
|
apache-2.0
| 7,264
|
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts #-}
-- | Filter for compressing the 'Response' body.
module Happstack.Server.Compression
( compressedResponseFilter
, compressedResponseFilter'
, compressWithFilter
, gzipFilter
, deflateFilter
, identityFilter
, starFilter
, standardEncodingHandlers
) where
import Happstack.Server.Internal.Compression ( compressedResponseFilter
, compressedResponseFilter'
, compressWithFilter
, gzipFilter
, deflateFilter
, identityFilter
, starFilter
, standardEncodingHandlers
)
|
arybczak/happstack-server
|
src/Happstack/Server/Compression.hs
|
Haskell
|
bsd-3-clause
| 923
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.FetchUtils
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : cabal-devel@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Functions for fetching packages
-----------------------------------------------------------------------------
{-# LANGUAGE RecordWildCards #-}
module Distribution.Client.FetchUtils (
-- * fetching packages
fetchPackage,
isFetched,
checkFetched,
-- ** specifically for repo packages
fetchRepoTarball,
-- * fetching other things
downloadIndex,
) where
import Distribution.Client.Types
import Distribution.Client.HttpUtils
( downloadURI, isOldHackageURI, DownloadResult(..)
, HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps )
import Distribution.Package
( PackageId, packageName, packageVersion )
import Distribution.Simple.Utils
( notice, info, setupMessage )
import Distribution.Text
( display )
import Distribution.Verbosity
( Verbosity )
import Data.Maybe
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.IO
( openTempFile, hClose )
import System.FilePath
( (</>), (<.>) )
import qualified System.FilePath.Posix as FilePath.Posix
( combine, joinPath )
import Network.URI
( URI(uriPath) )
-- ------------------------------------------------------------
-- * Actually fetch things
-- ------------------------------------------------------------
-- | Returns @True@ if the package has already been fetched
-- or does not need fetching.
--
isFetched :: PackageLocation (Maybe FilePath) -> IO Bool
isFetched loc = case loc of
LocalUnpackedPackage _dir -> return True
LocalTarballPackage _file -> return True
RemoteTarballPackage _uri local -> return (isJust local)
RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid)
checkFetched :: PackageLocation (Maybe FilePath)
-> IO (Maybe (PackageLocation FilePath))
checkFetched loc = case loc of
LocalUnpackedPackage dir ->
return (Just $ LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (Just $ LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (Just $ RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (Just $ RepoTarballPackage repo pkgid file)
RemoteTarballPackage _uri Nothing -> return Nothing
RepoTarballPackage repo pkgid Nothing -> do
let file = packageFile repo pkgid
exists <- doesFileExist file
if exists
then return (Just $ RepoTarballPackage repo pkgid file)
else return Nothing
-- | Fetch a package if we don't have it already.
--
fetchPackage :: HttpTransport
-> Verbosity
-> PackageLocation (Maybe FilePath)
-> IO (PackageLocation FilePath)
fetchPackage transport verbosity loc = case loc of
LocalUnpackedPackage dir ->
return (LocalUnpackedPackage dir)
LocalTarballPackage file ->
return (LocalTarballPackage file)
RemoteTarballPackage uri (Just file) ->
return (RemoteTarballPackage uri file)
RepoTarballPackage repo pkgid (Just file) ->
return (RepoTarballPackage repo pkgid file)
RemoteTarballPackage uri Nothing -> do
path <- downloadTarballPackage uri
return (RemoteTarballPackage uri path)
RepoTarballPackage repo pkgid Nothing -> do
local <- fetchRepoTarball transport verbosity repo pkgid
return (RepoTarballPackage repo pkgid local)
where
downloadTarballPackage uri = do
transportCheckHttps transport uri
notice verbosity ("Downloading " ++ show uri)
tmpdir <- getTemporaryDirectory
(path, hnd) <- openTempFile tmpdir "cabal-.tar.gz"
hClose hnd
_ <- downloadURI transport verbosity uri path
return path
-- | Fetch a repo package if we don't have it already.
--
fetchRepoTarball :: HttpTransport -> Verbosity -> Repo -> PackageId -> IO FilePath
fetchRepoTarball transport verbosity repo pkgid = do
fetched <- doesFileExist (packageFile repo pkgid)
if fetched
then do info verbosity $ display pkgid ++ " has already been downloaded."
return (packageFile repo pkgid)
else do setupMessage verbosity "Downloading" pkgid
downloadRepoPackage
where
downloadRepoPackage = case repo of
RepoLocal{..} -> return (packageFile repo pkgid)
RepoRemote{..} -> do
remoteRepoCheckHttps transport repoRemote
let uri = packageURI repoRemote pkgid
dir = packageDir repo pkgid
path = packageFile repo pkgid
createDirectoryIfMissing True dir
_ <- downloadURI transport verbosity uri path
return path
-- | Downloads an index file to [config-dir/packages/serv-id].
--
downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult
downloadIndex transport verbosity remoteRepo cacheDir = do
remoteRepoCheckHttps transport remoteRepo
let uri = (remoteRepoURI remoteRepo) {
uriPath = uriPath (remoteRepoURI remoteRepo)
`FilePath.Posix.combine` "00-index.tar.gz"
}
path = cacheDir </> "00-index" <.> "tar.gz"
createDirectoryIfMissing True cacheDir
downloadURI transport verbosity uri path
-- ------------------------------------------------------------
-- * Path utilities
-- ------------------------------------------------------------
-- | Generate the full path to the locally cached copy of
-- the tarball for a given @PackageIdentifer@.
--
packageFile :: Repo -> PackageId -> FilePath
packageFile repo pkgid = packageDir repo pkgid
</> display pkgid
<.> "tar.gz"
-- | Generate the full path to the directory where the local cached copy of
-- the tarball for a given @PackageIdentifer@ is stored.
--
packageDir :: Repo -> PackageId -> FilePath
packageDir repo pkgid = repoLocalDir repo
</> display (packageName pkgid)
</> display (packageVersion pkgid)
-- | Generate the URI of the tarball for a given package.
--
packageURI :: RemoteRepo -> PackageId -> URI
packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,display (packageName pkgid)
,display (packageVersion pkgid)
,display pkgid <.> "tar.gz"]
}
packageURI repo pkgid =
(remoteRepoURI repo) {
uriPath = FilePath.Posix.joinPath
[uriPath (remoteRepoURI repo)
,"package"
,display pkgid <.> "tar.gz"]
}
|
martinvlk/cabal
|
cabal-install/Distribution/Client/FetchUtils.hs
|
Haskell
|
bsd-3-clause
| 6,921
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/Data/BitUtil.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.BitUtil
-- Copyright : (c) Clark Gaebel 2012
-- (c) Johan Tibel 2012
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
-----------------------------------------------------------------------------
module Data.BitUtil
( highestBitMask
) where
-- On GHC, include MachDeps.h to get WORD_SIZE_IN_BITS macro.
import Data.Bits ((.|.), xor)
import GHC.Exts (Word(..), Int(..))
import GHC.Prim (uncheckedShiftRL#)
-- The highestBitMask implementation is based on
-- http://graphics.stanford.edu/~seander/bithacks.html#RoundUpPowerOf2
-- which has been put in the public domain.
-- | Return a word where only the highest bit is set.
highestBitMask :: Word -> Word
highestBitMask x1 = let x2 = x1 .|. x1 `shiftRL` 1
x3 = x2 .|. x2 `shiftRL` 2
x4 = x3 .|. x3 `shiftRL` 4
x5 = x4 .|. x4 `shiftRL` 8
x6 = x5 .|. x5 `shiftRL` 16
x7 = x6 .|. x6 `shiftRL` 32
in x7 `xor` (x7 `shiftRL` 1)
{-# INLINE highestBitMask #-}
-- Right and left logical shifts.
shiftRL :: Word -> Int -> Word
{--------------------------------------------------------------------
GHC: use unboxing to get @shiftRL@ inlined.
--------------------------------------------------------------------}
shiftRL (W# x) (I# i) = W# (uncheckedShiftRL# x i)
{-# INLINE shiftRL #-}
|
phischu/fragnix
|
tests/packages/scotty/Data.BitUtil.hs
|
Haskell
|
bsd-3-clause
| 2,131
|
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI #-}
{- | Haskell-specific web worker API. The URL is expected to point to a script
that is the same as the caller, or at least a script that has been
produced by GHCJS and contains the same static values.
-}
module JavaScript.Web.Worker.Haskell ( HaskellWorker
, terminate
, call
) where
import qualified JavaScript.Web.Worker as W
data HaskellWorker = HaskellWorker W.Worker
create :: JSString -> IO HaskellWorker
create uri = fmap HaskellWorker (W.create uri)
{-# INLINE create #-}
-- fixme stop all waiters?
terminate :: HaskellWorker -> IO ()
terminate (HaskellWorker w) = W.terminate w
{-# INLINE terminate #-}
-- call :: SomethingSomething -> HaskellWorker -> IO a
call hw = undefined
{-# INLINE call #-}
|
tavisrudd/ghcjs-base
|
JavaScript/Web/Worker/Haskell.hs
|
Haskell
|
mit
| 895
|
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) 2006-2014 Duncan Coutts
-- License : BSD-style
--
-- Maintainer : duncan@community.haskell.org
--
-- Compression and decompression of data streams in the gzip format.
--
-- The format is described in detail in RFC #1952:
-- <http://www.ietf.org/rfc/rfc1952.txt>
--
-- See also the zlib home page: <http://zlib.net/>
--
-----------------------------------------------------------------------------
module Codec.Compression.GZip (
-- | This module provides pure functions for compressing and decompressing
-- streams of data in the gzip format and represented by lazy 'ByteString's.
-- This makes it easy to use either in memory or with disk or network IO.
--
-- For example a simple gzip compression program is just:
--
-- > import qualified Data.ByteString.Lazy as ByteString
-- > import qualified Codec.Compression.GZip as GZip
-- >
-- > main = ByteString.interact GZip.compress
--
-- Or you could lazily read in and decompress a @.gz@ file using:
--
-- > content <- fmap GZip.decompress (readFile file)
--
-- * Simple compression and decompression
compress,
decompress,
-- * Extended api with control over compression parameters
compressWith,
decompressWith,
CompressParams(..), defaultCompressParams,
DecompressParams(..), defaultDecompressParams,
-- ** The compression parameter types
CompressionLevel(..),
defaultCompression,
noCompression,
bestSpeed,
bestCompression,
compressionLevel,
Method(..),
deflateMethod,
WindowBits(..),
defaultWindowBits,
windowBits,
MemoryLevel(..),
defaultMemoryLevel,
minMemoryLevel,
maxMemoryLevel,
memoryLevel,
CompressionStrategy(..),
defaultStrategy,
filteredStrategy,
huffmanOnlyStrategy,
) where
import Data.ByteString.Lazy (ByteString)
import qualified Codec.Compression.Zlib.Internal as Internal
import Codec.Compression.Zlib.Internal hiding (compress, decompress)
-- | Decompress a stream of data in the gzip format.
--
-- There are a number of errors that can occur. In each case an exception will
-- be thrown. The possible error conditions are:
--
-- * if the stream does not start with a valid gzip header
--
-- * if the compressed stream is corrupted
--
-- * if the compressed stream ends permaturely
--
-- Note that the decompression is performed /lazily/. Errors in the data stream
-- may not be detected until the end of the stream is demanded (since it is
-- only at the end that the final checksum can be checked). If this is
-- important to you, you must make sure to consume the whole decompressed
-- stream before doing any IO action that depends on it.
--
decompress :: ByteString -> ByteString
decompress = decompressWith defaultDecompressParams
-- | Like 'decompress' but with the ability to specify various decompression
-- parameters. Typical usage:
--
-- > decompressWith defaultCompressParams { ... }
--
decompressWith :: DecompressParams -> ByteString -> ByteString
decompressWith = Internal.decompress gzipFormat
-- | Compress a stream of data into the gzip format.
--
-- This uses the default compression parameters. In partiular it uses the
-- default compression level which favours a higher compression ratio over
-- compression speed, though it does not use the maximum compression level.
--
-- Use 'compressWith' to adjust the compression level or other compression
-- parameters.
--
compress :: ByteString -> ByteString
compress = compressWith defaultCompressParams
-- | Like 'compress' but with the ability to specify various compression
-- parameters. Typical usage:
--
-- > compressWith defaultCompressParams { ... }
--
-- In particular you can set the compression level:
--
-- > compressWith defaultCompressParams { compressLevel = BestCompression }
--
compressWith :: CompressParams -> ByteString -> ByteString
compressWith = Internal.compress gzipFormat
|
CloudI/CloudI
|
src/api/haskell/external/zlib-0.6.2.1/Codec/Compression/GZip.hs
|
Haskell
|
mit
| 3,981
|
{-# LANGUAGE BangPatterns, CPP #-}
-- | File descriptor cache to avoid locks in kernel.
module Network.Wai.Handler.Warp.FdCache (
withFdCache
, Fd
, Refresh
#ifndef WINDOWS
, openFile
, closeFile
, setFileCloseOnExec
#endif
) where
#ifndef WINDOWS
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>), (<*>))
#endif
import Control.Exception (bracket)
import Network.Wai.Handler.Warp.IORef
import Network.Wai.Handler.Warp.MultiMap
import Control.Reaper
import System.Posix.IO (openFd, OpenFileFlags(..), defaultFileFlags, OpenMode(ReadOnly), closeFd, FdOption(CloseOnExec), setFdOption)
#endif
import System.Posix.Types (Fd)
----------------------------------------------------------------
type Hash = Int
-- | An action to activate a Fd cache entry.
type Refresh = IO ()
getFdNothing :: Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFdNothing _ _ = return (Nothing, return ())
----------------------------------------------------------------
-- | Creating 'MutableFdCache' and executing the action in the second
-- argument. The first argument is a cache duration in second.
withFdCache :: Int -> ((Hash -> FilePath -> IO (Maybe Fd, Refresh)) -> IO a) -> IO a
#ifdef WINDOWS
withFdCache _ action = action getFdNothing
#else
withFdCache 0 action = action getFdNothing
withFdCache duration action = bracket (initialize duration)
terminate
(\mfc -> action (getFd mfc))
----------------------------------------------------------------
data Status = Active | Inactive
newtype MutableStatus = MutableStatus (IORef Status)
status :: MutableStatus -> IO Status
status (MutableStatus ref) = readIORef ref
newActiveStatus :: IO MutableStatus
newActiveStatus = MutableStatus <$> newIORef Active
refresh :: MutableStatus -> Refresh
refresh (MutableStatus ref) = writeIORef ref Active
inactive :: MutableStatus -> IO ()
inactive (MutableStatus ref) = writeIORef ref Inactive
----------------------------------------------------------------
data FdEntry = FdEntry !FilePath !Fd !MutableStatus
openFile :: FilePath -> IO Fd
openFile path = do
fd <- openFd path ReadOnly Nothing defaultFileFlags{nonBlock=False}
setFileCloseOnExec fd
return fd
closeFile :: Fd -> IO ()
closeFile = closeFd
newFdEntry :: FilePath -> IO FdEntry
newFdEntry path = FdEntry path <$> openFile path <*> newActiveStatus
setFileCloseOnExec :: Fd -> IO ()
setFileCloseOnExec fd = setFdOption fd CloseOnExec True
----------------------------------------------------------------
type FdCache = MMap FdEntry
-- | Mutable Fd cacher.
newtype MutableFdCache = MutableFdCache (Reaper FdCache (Hash, FdEntry))
fdCache :: MutableFdCache -> IO FdCache
fdCache (MutableFdCache reaper) = reaperRead reaper
look :: MutableFdCache -> FilePath -> Hash -> IO (Maybe FdEntry)
look mfc path key = searchWith key check <$> fdCache mfc
where
check (FdEntry path' _ _) = path == path'
----------------------------------------------------------------
-- The first argument is a cache duration in second.
initialize :: Int -> IO MutableFdCache
initialize duration = MutableFdCache <$> mkReaper settings
where
settings = defaultReaperSettings {
reaperAction = clean
, reaperDelay = duration
, reaperCons = uncurry insert
, reaperNull = isEmpty
, reaperEmpty = empty
}
clean :: FdCache -> IO (FdCache -> FdCache)
clean old = do
new <- pruneWith old prune
return $ merge new
where
prune (FdEntry _ fd mst) = status mst >>= act
where
act Active = inactive mst >> return True
act Inactive = closeFd fd >> return False
----------------------------------------------------------------
terminate :: MutableFdCache -> IO ()
terminate (MutableFdCache reaper) = do
!t <- reaperStop reaper
mapM_ closeIt $ toList t
where
closeIt (FdEntry _ fd _) = closeFd fd
----------------------------------------------------------------
-- | Getting 'Fd' and 'Refresh' from the mutable Fd cacher.
getFd :: MutableFdCache -> Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFd mfc@(MutableFdCache reaper) h path = look mfc path h >>= get
where
get Nothing = do
ent@(FdEntry _ fd mst) <- newFdEntry path
reaperAdd reaper (h, ent)
return (Just fd, refresh mst)
get (Just (FdEntry _ fd mst)) = do
refresh mst
return (Just fd, refresh mst)
#endif
|
utdemir/wai
|
warp/Network/Wai/Handler/Warp/FdCache.hs
|
Haskell
|
mit
| 4,467
|
{-# CFILES a.c #-}
foreign import ccall unsafe "foo" foo :: Int -> Int
main = print $ foo 6
|
dcreager/cabal
|
tests/systemTests/exeWithC/test.hs
|
Haskell
|
bsd-3-clause
| 93
|
module PatBindIn1 where
main :: Int
main = foo 3
foo :: Int -> Int
foo x
= (h + t) + (snd tup)
where
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h, t) = head $ (zip [1 .. 10] [3 .. 15])
tup :: (Int, Int)
h :: Int
t :: Int
tup@(h, t) = head $ (zip [1 .. 10] [3 .. 15])
|
mpickering/HaRe
|
old/testing/demote/PatBindIn1AST.hs
|
Haskell
|
bsd-3-clause
| 310
|
-- From comment:76 in Trac #9858
-- This exploit still works in GHC 7.10.1.
-- By Shachaf Ben-Kiki, Ørjan Johansen and Nathan van Doorn
{-# LANGUAGE Safe #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ImpredicativeTypes #-}
module T9858a where
import Data.Typeable
type E = (:~:)
type PX = Proxy (((),()) => ())
type PY = Proxy (() -> () -> ())
data family F p a b
newtype instance F a b PX = ID (a -> a)
newtype instance F a b PY = UC (a -> b)
{-# NOINLINE ecast #-}
ecast :: E p q -> f p -> f q
ecast Refl = id
supercast :: F a b PX -> F a b PY
supercast = case cast e of
Just e' -> ecast e'
where
e = Refl
e :: E PX PX
uc :: a -> b
uc = case supercast (ID id) of UC f -> f
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_fail/T9858a.hs
|
Haskell
|
bsd-3-clause
| 733
|
module A (T,t) where
data T = T
t = T
instance Eq T where
t1 == t2 = True
|
hferreiro/replay
|
testsuite/tests/driver/recomp008/A1.hs
|
Haskell
|
bsd-3-clause
| 79
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DataKinds #-}
module Control.Monad.Apiary.Filter.Capture
( path, fetch, fetch', anyPath, restPath
) where
import Control.Monad.Apiary.Internal (Filter, Filter', focus)
import Control.Monad.Apiary.Filter.Internal
(Doc(DocPath, DocFetch, DocAny, DocRest))
import GHC.TypeLits.Compat(KnownSymbol, symbolVal)
import Data.Proxy.Compat(Proxy(..))
import Data.Apiary.Param(Path, pathRep, readPathAs)
import Network.Routing.Dict(KV((:=)))
import qualified Network.Routing.Dict as Dict
import qualified Network.Routing as R
import qualified Data.Text as T
import Text.Blaze.Html(Html)
-- | check first path and drill down. since 0.11.0.
path :: Monad actM => T.Text -> Filter' exts actM m
path p = focus (DocPath p) Nothing (R.exact p)
-- | get first path and drill down. since 0.11.0.
fetch' :: (k Dict.</ prms, KnownSymbol k, Path p, Monad actM) => proxy k -> proxy' p -> Maybe Html
-> Filter exts actM m prms (k ':= p ': prms)
fetch' k p h = focus (DocFetch (T.pack $ symbolVal k) (pathRep p) h) Nothing $ R.fetch k (readPathAs p)
fetch :: forall proxy k p exts prms actM m. (k Dict.</ prms, KnownSymbol k, Path p, Monad actM)
=> proxy (k ':= p) -> Maybe Html
-> Filter exts actM m prms (k ':= p ': prms)
fetch _ h = fetch' k p h
where
k = Proxy :: Proxy k
p = Proxy :: Proxy p
anyPath :: (Monad m, Monad actM) => Filter' exts actM m
anyPath = focus DocAny Nothing R.any
restPath :: (k Dict.</ prms, KnownSymbol k, Monad m, Monad actM)
=> proxy k -> Maybe Html
-> Filter exts actM m prms (k ':= [T.Text] ': prms)
restPath k h = focus (DocRest (T.pack $ symbolVal k) h) Nothing (R.rest k)
|
philopon/apiary
|
src/Control/Monad/Apiary/Filter/Capture.hs
|
Haskell
|
mit
| 1,884
|
{-# htermination readsPrec :: Int -> String -> [(Int,String)] #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_readsPrec_5.hs
|
Haskell
|
mit
| 66
|
{-# htermination (mapM :: (b -> Maybe a) -> (List b) -> Maybe (List a)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Maybe a = Nothing | Just a ;
map :: (b -> a) -> (List b) -> (List a);
map f Nil = Nil;
map f (Cons x xs) = Cons (f x) (map f xs);
pt :: (c -> b) -> (a -> c) -> a -> b;
pt f g x = f (g x);
gtGtEsMaybe :: Maybe b -> (b -> Maybe c) -> Maybe c
gtGtEsMaybe (Just x) k = k x;
gtGtEsMaybe Nothing k = Nothing;
returnMaybe :: b -> Maybe b
returnMaybe = Just;
sequence0 x xs = returnMaybe (Cons x xs);
sequence1 cs x = gtGtEsMaybe (sequence cs) (sequence0 x);
sequence Nil = returnMaybe Nil;
sequence (Cons c cs) = gtGtEsMaybe c (sequence1 cs);
mapM f = pt sequence (map f);
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/mapM_1.hs
|
Haskell
|
mit
| 783
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Kafka.IntegrationSpec
where
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
import Control.Monad (forM, forM_)
import Control.Monad.Loops
import Data.Either
import Data.Map (fromList)
import Data.Monoid ((<>))
import Kafka.Consumer
import Kafka.Metadata
import Kafka.Producer
import Kafka.TestEnv
import Test.Hspec
import qualified Data.ByteString as BS
{- HLINT ignore "Redundant do" -}
spec :: Spec
spec = do
describe "Per-message commit" $ do
specWithProducer "Run producer" $ do
it "1. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer with per-message commit" consumerProps $ do
it "2. should receive 2 messages" $ \k -> do
res <- receiveMessages k
length <$> res `shouldBe` Right 2
comRes <- forM res . mapM $ commitOffsetMessage OffsetCommit k
comRes `shouldBe` Right [Nothing, Nothing]
specWithProducer "Run producer again" $ do
it "3. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after per-message commit" consumerProps $ do
it "4. should receive 2 messages again" $ \k -> do
res <- receiveMessages k
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
comRes `shouldBe` Nothing
describe "Store offsets" $ do
specWithProducer "Run producer" $ do
it "1. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer with no auto store" consumerPropsNoStore $ do
it "2. should receive 2 messages without storing" $ \k -> do
res <- receiveMessages k
length <$> res `shouldBe` Right 2
comRes <- commitAllOffsets OffsetCommit k
comRes `shouldBe` Just (KafkaResponseError RdKafkaRespErrNoOffset)
specWithProducer "Run producer again" $ do
it "3. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after commit without store" consumerPropsNoStore $ do
it "4. should receive 4 messages and store them" $ \k -> do
res <- receiveMessages k
storeRes <- forM res . mapM $ storeOffsetMessage k
comRes <- commitAllOffsets OffsetCommit k
length <$> storeRes `shouldBe` Right 4
length <$> res `shouldBe` Right 4
comRes `shouldBe` Nothing
specWithProducer "Run producer again" $ do
it "5. sends 2 messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
specWithConsumer "Consumer after commit with store" consumerPropsNoStore $ do
it "6. should receive 2 messages" $ \k -> do
res <- receiveMessages k
storeRes <- forM res $ mapM (storeOffsetMessage k)
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
length <$> storeRes `shouldBe` Right 2
comRes `shouldBe` Nothing
specWithKafka "Part 3 - Consume after committing stored offsets" consumerPropsNoStore $ do
it "5. sends 2 messages to test topic" $ \(_, prod) -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
it "6. should receive 2 messages" $ \(k, _) -> do
res <- receiveMessages k
storeRes <- forM res $ mapM (storeOffsetMessage k)
comRes <- commitAllOffsets OffsetCommit k
length <$> res `shouldBe` Right 2
length <$> storeRes `shouldBe` Right 2
comRes `shouldBe` Nothing
describe "Kafka.IntegrationSpec" $ do
specWithProducer "Run producer" $ do
it "sends messages to test topic" $ \prod -> do
res <- sendMessages (testMessages testTopic) prod
res `shouldBe` Right ()
it "sends messages with callback to test topic" $ \prod -> do
var <- newEmptyMVar
let
msg = ProducerRecord
{ prTopic = "callback-topic"
, prPartition = UnassignedPartition
, prKey = Nothing
, prValue = Just "test from producer"
}
res <- produceMessage' prod msg (putMVar var)
res `shouldBe` Right ()
callbackRes <- flushProducer prod *> takeMVar var
callbackRes `shouldSatisfy` \case
DeliverySuccess _ _ -> True
DeliveryFailure _ _ -> False
NoMessageError _ -> False
specWithConsumer "Run consumer with async polling" (consumerProps <> groupId (makeGroupId "async")) runConsumerSpec
specWithConsumer "Run consumer with sync polling" (consumerProps <> groupId (makeGroupId "sync") <> callbackPollMode CallbackPollModeSync) runConsumerSpec
describe "Kafka.Consumer.BatchSpec" $ do
specWithConsumer "Batch consumer" (consumerProps <> groupId "batch-consumer") $ do
it "should consume first batch" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 5)
length res `shouldBe` 5
forM_ res (`shouldSatisfy` isRight)
it "should consume second batch with not enough messages" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 50)
let res' = Prelude.filter (/= Left (KafkaResponseError RdKafkaRespErrPartitionEof)) res
length res' `shouldSatisfy` (< 50)
forM_ res' (`shouldSatisfy` isRight)
it "should consume empty batch when there are no messages" $ \k -> do
res <- pollMessageBatch k (Timeout 1000) (BatchSize 50)
length res `shouldBe` 0
----------------------------------------------------------------------------------------------------------------
data ReadState = Skip | Read
receiveMessages :: KafkaConsumer -> IO (Either KafkaError [ConsumerRecord (Maybe BS.ByteString) (Maybe BS.ByteString)])
receiveMessages kafka =
Right . rights <$> allMessages
where
allMessages =
unfoldrM (\s -> do
msg <- pollMessage kafka (Timeout 1000)
case (s, msg) of
(Skip, Left _) -> pure $ Just (msg, Skip)
(_, Right msg') -> pure $ Just (Right msg', Read)
(Read, _) -> pure Nothing
) Skip
testMessages :: TopicName -> [ProducerRecord]
testMessages t =
[ ProducerRecord t UnassignedPartition Nothing (Just "test from producer")
, ProducerRecord t UnassignedPartition (Just "key") (Just "test from producer (with key)")
]
sendMessages :: [ProducerRecord] -> KafkaProducer -> IO (Either KafkaError ())
sendMessages msgs prod =
Right <$> (forM_ msgs (produceMessage prod) >> flushProducer prod)
runConsumerSpec :: SpecWith KafkaConsumer
runConsumerSpec = do
it "should receive messages" $ \k -> do
res <- receiveMessages k
let msgsLen = either (const 0) length res
msgsLen `shouldSatisfy` (> 0)
let timestamps = crTimestamp <$> either (const []) id res
forM_ timestamps $ \ts ->
ts `shouldNotBe` NoTimestamp
comRes <- commitAllOffsets OffsetCommit k
comRes `shouldBe` Nothing
it "should get committed" $ \k -> do
res <- committed k (Timeout 1000) [(testTopic, PartitionId 0)]
res `shouldSatisfy` isRight
it "should get position" $ \k -> do
res <- position k [(testTopic, PartitionId 0)]
res `shouldSatisfy` isRight
it "should get watermark offsets" $ \k -> do
res <- sequence <$> watermarkOffsets k (Timeout 1000) testTopic
res `shouldSatisfy` isRight
length <$> res `shouldBe` (Right 1)
it "should return subscription" $ \k -> do
res <- subscription k
res `shouldSatisfy` isRight
length <$> res `shouldBe` Right 1
it "should return assignment" $ \k -> do
res <- assignment k
res `shouldSatisfy` isRight
res `shouldBe` Right (fromList [(testTopic, [PartitionId 0])])
it "should return all topics metadata" $ \k -> do
res <- allTopicsMetadata k (Timeout 1000)
res `shouldSatisfy` isRight
let filterUserTopics m = m { kmTopics = filter (\t -> topicType (tmTopicName t) == User) (kmTopics m) }
let res' = fmap filterUserTopics res
length . kmBrokers <$> res' `shouldBe` Right 1
let topicsLen = either (const 0) (length . kmTopics) res'
let hasTopic = either (const False) (any (\t -> tmTopicName t == testTopic) . kmTopics) res'
topicsLen `shouldSatisfy` (>0)
hasTopic `shouldBe` True
it "should return topic metadata" $ \k -> do
res <- topicMetadata k (Timeout 1000) testTopic
res `shouldSatisfy` isRight
length . kmBrokers <$> res `shouldBe` Right 1
length . kmTopics <$> res `shouldBe` Right 1
it "should describe all consumer groups" $ \k -> do
res <- allConsumerGroupsInfo k (Timeout 1000)
let groups = either (const []) (fmap giGroup) res
let prefixedGroups = filter isTestGroupId groups
let resLen = length prefixedGroups
resLen `shouldSatisfy` (>0)
-- fmap giGroup <$> res `shouldBe` Right [testGroupId]
it "should describe a given consumer group" $ \k -> do
res <- consumerGroupInfo k (Timeout 1000) testGroupId
fmap giGroup <$> res `shouldBe` Right [testGroupId]
it "should describe non-existent consumer group" $ \k -> do
res <- consumerGroupInfo k (Timeout 1000) "does-not-exist"
res `shouldBe` Right []
it "should read topic offsets for time" $ \k -> do
res <- topicOffsetsForTime k (Timeout 1000) (Millis 1904057189508) testTopic
res `shouldSatisfy` isRight
fmap tpOffset <$> res `shouldBe` Right [PartitionOffsetEnd]
it "should seek and return no error" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) (PartitionOffset 1)]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 1)
it "should seek to the beginning" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetBeginning]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
it "should seek to the end" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetEnd]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldSatisfy` (\x ->
x == Left (KafkaResponseError RdKafkaRespErrPartitionEof)
|| x == Left (KafkaResponseError RdKafkaRespErrTimedOut))
it "should respect out-of-bound offsets (invalid offset)" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) PartitionOffsetInvalid]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
it "should respect out-of-bound offsets (huge offset)" $ \k -> do
res <- seek k (Timeout 1000) [TopicPartition testTopic (PartitionId 0) (PartitionOffset 123456)]
res `shouldBe` Nothing
msg <- pollMessage k (Timeout 1000)
crOffset <$> msg `shouldBe` Right (Offset 0)
|
haskell-works/kafka-client
|
tests-it/Kafka/IntegrationSpec.hs
|
Haskell
|
mit
| 12,117
|
module Handler.AnalogOutSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getAnalogOutR" $ do
error "Spec not implemented: getAnalogOutR"
|
aufheben/lambda-arduino
|
test/Handler/AnalogOutSpec.hs
|
Haskell
|
mit
| 183
|
{-# htermination addToFM :: FiniteMap () b -> () -> b -> FiniteMap () b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addToFM_2.hs
|
Haskell
|
mit
| 94
|
-- ex3.1.hs
swapTriple :: (a,b,c) -> (b,c,a)
swapTriple (x,y,z) = (y,z,x)
duplicate :: a -> (a, a)
duplicate x = (x, x)
nothing :: a -> Maybe a
nothing _ = Nothing
index :: [a] -> [ (Int, a) ]
index [] = []
index [x] = [(0,x)]
index (x:xs) = let indexed@((n,_):_) = index xs
in (n+1,x):indexed
maybeA :: [a] -> Char
maybeA [] = 'a'
|
hnfmr/beginning_haskell
|
ex3.1.hs
|
Haskell
|
mit
| 352
|
module Proteome.Grep.Parse where
import Chiasma.Data.Ident (generateIdent, identText)
import Data.Attoparsec.Text (parseOnly)
import qualified Data.Text as Text (strip, stripPrefix)
import Ribosome.Menu.Data.MenuItem (MenuItem(MenuItem))
import Text.Parser.Char (anyChar, char, noneOf)
import Text.Parser.Combinators (manyTill)
import Text.Parser.Token (TokenParsing, natural)
import Proteome.Data.GrepOutputLine (GrepOutputLine(GrepOutputLine))
import Proteome.Grep.Syntax (lineNumber)
grepParser ::
TokenParsing m =>
m GrepOutputLine
grepParser =
GrepOutputLine <$> path <*> (subtract 1 <$> number) <*> optional number <*> (toText <$> many anyChar)
where
path =
toText <$> manyTill (noneOf ":") (char ':')
number =
(fromInteger <$> natural) <* char ':'
formatGrepLine :: Text -> GrepOutputLine -> Text
formatGrepLine cwd (GrepOutputLine path line col text') =
relativePath <> " " <> lineNumber <> " " <> show line <> ":" <> show (fromMaybe 1 col) <> " " <> Text.strip text'
where
relativePath =
fromMaybe path (Text.stripPrefix (cwd <> "/") path)
parseGrepOutput ::
MonadRibo m =>
Text ->
Text ->
m (Maybe (MenuItem GrepOutputLine))
parseGrepOutput cwd =
item . parseOnly grepParser
where
item (Right a) = do
ident <- identText <$> generateIdent
return (Just (convert ident a))
item (Left err) =
Nothing <$ logDebug ("parsing grep output failed: " <> err)
convert _ file =
MenuItem file text' text'
where
text' =
formatGrepLine cwd file
|
tek/proteome
|
packages/proteome/lib/Proteome/Grep/Parse.hs
|
Haskell
|
mit
| 1,558
|
module Language.Dash.VM.VMSpec where
import Data.Word
import Language.Dash.Asm.Assembler
import Language.Dash.IR.Opcode
import Language.Dash.IR.Data
import Language.Dash.VM.DataEncoding
import Language.Dash.VM.VM
import Language.Dash.Limits
import Test.Hspec
import Test.QuickCheck
runProg :: [[Opcode]] -> IO Word32
runProg = runProgTbl []
runProgTbl :: [Word32] -> [[Opcode]] -> IO Word32
runProgTbl tbl prog = do
(value, _, _) <- execute asm tbl' []
return value
where
(asm, tbl', _) =
let encProg = map EncodedFunction prog in
let resultOrError = assembleWithEncodedConstTable encProg tbl (fromIntegral.constAddrToInt) [] in
case resultOrError of
Left err -> error $ show err -- TODO do this without an error
Right result -> result
spec :: Spec
spec = do
describe "Virtual Machine" $ do
it "loads a number into a register" $ do
let prog = [[ OpcLoadI 0 55,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 55)
it "adds two numbers" $ do
let prog = [[ OpcLoadI 1 5,
OpcLoadI 2 32,
OpcAdd 0 1 2,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 37)
it "moves a register" $ do
let prog = [[ OpcLoadI 2 37,
OpcMove 0 2,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 37)
it "directly calls a function" $ do
let prog = [[ OpcLoadI 1 15,
OpcLoadI 2 23,
OpcAdd 4 1 2,
OpcLoadF 3 (mkFuncAddr 1),
OpcSetArg 0 4 0,
OpcAp 0 3 1,
OpcRet 0 ], [
OpcFunHeader 1,
OpcLoadI 1 100,
OpcAdd 2 0 1,
OpcRet 2]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 138)
it "calls a closure downwards" $ do
let prog = [[ OpcLoadF 2 (mkFuncAddr 2),
OpcLoadI 3 80,
OpcSetArg 0 3 0,
OpcPartAp 2 2 1,
OpcLoadF 1 (mkFuncAddr 1),
OpcSetArg 0 2 0,
OpcAp 0 1 1,
OpcRet 0 ], [
-- fun1
OpcFunHeader 2,
OpcLoadI 2 115,
OpcLoadI 3 23,
OpcAdd 2 2 3,
OpcSetArg 0 2 0,
OpcGenAp 2 0 1,
OpcRet 2 ], [
-- fun2
-- fun_header 1 1, -- (* 1 closed over value, 1 parameter *)
OpcFunHeader 2,
OpcSub 2 1 0,
OpcRet 2 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 58) -- 115 + 23 - 80
it "calls a closure upwards" $ do
let prog = [[ OpcLoadF 1 (mkFuncAddr 1),
OpcAp 1 1 0,
OpcLoadI 2 80,
OpcSetArg 0 2 0,
OpcGenAp 0 1 1,
OpcRet 0 ], [
-- fun 1
OpcFunHeader 1,
OpcLoadF 1 (mkFuncAddr 2),
OpcLoadI 2 24,
OpcSetArg 0 2 0,
OpcPartAp 0 1 1,
OpcRet 0 ], [
-- fun 2
OpcFunHeader 2,
OpcSub 2 1 0,
OpcRet 2 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 56) -- 80 - 24
it "modifies a closure" $ do
let prog = [[ OpcLoadF 1 (mkFuncAddr 1),
OpcAp 1 1 0,
OpcLoadI 2 80,
OpcSetArg 0 2 0,
OpcGenAp 0 1 1,
OpcRet 0 ], [
-- fun 1
OpcFunHeader 1,
OpcLoadF 1 (mkFuncAddr 2),
OpcLoadI 2 77,
OpcLoadI 3 55,
OpcSetArg 0 2 1,
OpcPartAp 0 1 2,
OpcLoadI 7 33,
OpcSetClVal 0 7 1,
OpcRet 0 ], [
-- fun 2
OpcFunHeader 3,
OpcSub 3 0 1,
OpcRet 3 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 44) -- 77 - 33
{-
it "applies a number tag to a value" $ do
let original = 44
let symbol = make_vm_value original vm_tag_number
(tag_of_vm_value symbol) vm_tag_number,
assert_equal (value_of_vm_value symbol) original
),
it "applies a symbol tag to a value" $ do
let original = 12
let symbol = make_vm_value original vm_tag_symbol
assert_equal (tag_of_vm_value symbol) vm_tag_symbol,
assert_equal (value_of_vm_value symbol) original
),
-}
it "loads a symbol into a register" $ do
let sym = mkSymId 12
let prog = [[ OpcLoadPS 0 sym,
OpcRet 0]]
(runProg prog) `shouldReturn` (encodePlainSymbol sym)
it "loads a compound symbol" $ do
let ctable = [ encodeNumber 1,
encodeCompoundSymbolHeader (mkSymId 5) 1,
encodeNumber 3
]
let prog = [[ OpcLoadCS 0 (mkConstAddr 1),
OpcRet 0 ]]
(runProgTbl ctable prog) `shouldReturn` (encodeCompoundSymbolRef $ mkConstAddr 1)
it "jumps forward" $ do
let prog = [[ OpcLoadI 0 66,
OpcJmp 1,
OpcRet 0,
OpcLoadI 0 70,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 70)
it "jumps if condition true" $ do
let prog = [[ OpcLoadI 1 2, -- counter
OpcLoadI 2 5, -- target value
OpcLoadI 5 0, -- accumulator
OpcLoadI 3 1,
OpcEq 4 1 2,
OpcJmpTrue 4 3,
OpcAdd 5 5 1,
OpcAdd 1 1 3,
OpcJmp (-5),
OpcMove 0 5,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
-- result: 2 + 3 + 4 = 9
decodedResult `shouldBe` (VMNumber 9)
it "matches a number" $ do
let ctable = [ encodeMatchHeader 2,
encodeNumber 11,
encodeNumber 22 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadI 1 22,
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "matches a symbol" $ do
let ctable = [ encodeMatchHeader 2,
encodePlainSymbol (mkSymId 11),
encodePlainSymbol (mkSymId 22) ]
let prog = [[ OpcLoadI 0 600,
OpcLoadPS 1 (mkSymId 22),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "matches a data symbol" $ do
let ctable = [ encodeMatchHeader 2,
encodeCompoundSymbolRef (mkConstAddr 3),
encodeCompoundSymbolRef (mkConstAddr 6),
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadCS 1 (mkConstAddr 9),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 0,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 4,
OpcRet 0,
OpcLoadI 0 300,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 300)
it "binds a value in a match" $ do
let ctable = [ encodeMatchHeader 2,
encodeCompoundSymbolRef (mkConstAddr 3),
encodeCompoundSymbolRef (mkConstAddr 6),
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeMatchVar 1,
encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 77 ]
let prog = [[ OpcLoadI 0 600,
OpcLoadI 4 66,
OpcLoadCS 1 (mkConstAddr 9),
OpcLoadAddr 2 (mkConstAddr 0),
OpcMatch 1 2 3,
OpcJmp 1,
OpcJmp 2,
OpcLoadI 0 22,
OpcRet 0,
OpcMove 0 4, -- reg 4 contains match var 1 (see pattern in ctable)
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMNumber 77)
it "loads a symbol on the heap" $ do
let ctable = [ encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 3) 2,
encodeNumber 33,
encodeNumber 44 ]
let prog = [[ OpcLoadCS 0 (mkConstAddr 0),
OpcLoadCS 1 (mkConstAddr 3),
OpcCopySym 0 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
let symNames = ["X", "A", "Y", "B"]
let decodeResult = decode result ctable symNames
decodeResult `shouldReturn` (VMSymbol "B" [VMNumber 33, VMNumber 44])
it "modifies a heap symbol" $ do
let ctable = [ encodeCompoundSymbolHeader (mkSymId 1) 2,
encodeNumber 55,
encodeNumber 66,
encodeCompoundSymbolHeader (mkSymId 3) 2,
encodeNumber 33,
encodeNumber 44 ]
let prog = [[ OpcLoadCS 0 (mkConstAddr 0),
OpcLoadCS 1 (mkConstAddr 3),
OpcCopySym 0 1,
OpcLoadPS 5 (mkSymId 6),
OpcSetSymField 0 5 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
let symNames = ["X", "A", "Y", "B", "Z", "W", "success"]
let decodeResult = decode result ctable symNames
decodeResult `shouldReturn` (VMSymbol "B" [VMNumber 33, VMSymbol "success" []])
it "loads a string into a register" $ do
let prog = [[ OpcLoadStr 0 (mkConstAddr 55),
OpcRet 0 ]]
(runProg prog) `shouldReturn` (encodeStringRef $ mkConstAddr 55)
it "determines the length of a string" $ do
let ctable = [ encodeStringHeader 5 2,
encodeStringChunk 'd' 'a' 's' 'h',
encodeStringChunk '!' '\0' '\0' '\0' ]
let prog = [[ OpcLoadStr 1 (mkConstAddr 0),
OpcStrLen 0 1,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 5)
it "creates a new string" $ do
let prog = [[ OpcLoadI 1 8,
OpcNewStr 0 1,
OpcRet 0 ]]
result <- runProg prog
decodedResult <- decode result [] []
decodedResult `shouldBe` (VMString "")
it "copies a string" $ do
let loop = (-6);
let end = 4;
let ctable = [ encodeStringHeader 5 2,
encodeStringChunk 'd' 'a' 's' 'h',
encodeStringChunk '!' '\0' '\0' '\0' ]
let prog = [[ OpcLoadStr 6 (mkConstAddr 0),
OpcStrLen 1 6,
OpcLoadI 2 0, -- index
OpcLoadI 5 1,
OpcNewStr 3 1,
-- loop:
OpcEq 7 2 1,
OpcJmpTrue 7 end,
OpcGetChar 4 6 2,
OpcPutChar 4 3 2,
OpcAdd 2 2 5,
OpcJmp loop,
-- end:
OpcMove 0 3,
OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMString "dash!")
it "looks up a value in a module" $ do
let ctable = [ encodeOpaqueSymbolHeader (mkSymId 10) 2
, encodePlainSymbol (mkSymId 0)
, encodePlainSymbol (mkSymId 5)
, encodeNumber 33
]
let prog = [[ OpcLoadOS 1 (mkConstAddr 0)
, OpcLoadPS 2 (mkSymId 5)
, OpcGetField 0 1 2
, OpcRet 0 ]]
result <- runProgTbl ctable prog
decodedResult <- decode result ctable []
decodedResult `shouldBe` (VMNumber 33)
|
arne-schroppe/dash
|
test/Language/Dash/VM/VMSpec.hs
|
Haskell
|
mit
| 14,363
|
-----------------------------------------------------------------------------
-- |
-- Module : Reader.Parser
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein (klein@react.uni-saarland.de)
--
-- Parsing module containing all neccessary parsers.
--
-----------------------------------------------------------------------------
module Reader.Parser
( parse
) where
-----------------------------------------------------------------------------
import Data.Enum
( EnumDefinition(..)
)
import Data.Error
( Error
, parseError
)
import Reader.Error
( errEnumConflict
)
import Reader.Parser.Data
( Specification(..)
)
import Reader.Parser.Info
( infoParser
)
import Reader.Parser.Global
( globalParser
)
import Reader.Parser.Component
( componentParser
)
import Text.Parsec
( (<|>)
)
import qualified Text.Parsec as P
( parse
)
import Text.Parsec.String
( Parser
)
-----------------------------------------------------------------------------
-- | @parseSpecification str @ parses a specification from the string @str@.
parse
:: String -> Either Error Specification
parse str =
case P.parse specificationParser "Syntax Error" str of
Left err -> parseError err
Right x -> do
mapM_ checkEnum $ enumerations x
return x
-----------------------------------------------------------------------------
specificationParser
:: Parser Specification
specificationParser = do
(i,d,s,r,a) <- infoParser
(ps,vs,ms) <- globalParser <|> return ([],[],[])
(is,os,es,ss,rs,as,ns,gs) <- componentParser
return Specification
{ title = i
, description = d
, semantics = s
, target = r
, tags = a
, enumerations = ms
, parameters = ps
, definitions = vs
, inputs = is
, outputs = os
, initially = es
, preset = ss
, requirements = rs
, assumptions = as
, invariants = ns
, guarantees = gs
}
-----------------------------------------------------------------------------
checkEnum
:: EnumDefinition String -> Either Error ()
checkEnum e = case eDouble e of
Just ((m,p),(x,_),(y,_),f) -> errEnumConflict m x y (toStr (eSize e) f) p
Nothing -> return ()
where
toStr n f = map (toS . f) [0,1..n-1]
toS (Right ()) = '*'
toS (Left True) = '1'
toS (Left False) = '0'
-----------------------------------------------------------------------------
|
reactive-systems/syfco
|
src/lib/Reader/Parser.hs
|
Haskell
|
mit
| 2,459
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html
module Stratosphere.ResourceProperties.KinesisAnalyticsApplicationCSVMappingParameters where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- KinesisAnalyticsApplicationCSVMappingParameters. See
-- 'kinesisAnalyticsApplicationCSVMappingParameters' for a more convenient
-- constructor.
data KinesisAnalyticsApplicationCSVMappingParameters =
KinesisAnalyticsApplicationCSVMappingParameters
{ _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter :: Val Text
, _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsApplicationCSVMappingParameters where
toJSON KinesisAnalyticsApplicationCSVMappingParameters{..} =
object $
catMaybes
[ (Just . ("RecordColumnDelimiter",) . toJSON) _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter
, (Just . ("RecordRowDelimiter",) . toJSON) _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter
]
-- | Constructor for 'KinesisAnalyticsApplicationCSVMappingParameters'
-- containing required fields as arguments.
kinesisAnalyticsApplicationCSVMappingParameters
:: Val Text -- ^ 'kaacsvmpRecordColumnDelimiter'
-> Val Text -- ^ 'kaacsvmpRecordRowDelimiter'
-> KinesisAnalyticsApplicationCSVMappingParameters
kinesisAnalyticsApplicationCSVMappingParameters recordColumnDelimiterarg recordRowDelimiterarg =
KinesisAnalyticsApplicationCSVMappingParameters
{ _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter = recordColumnDelimiterarg
, _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter = recordRowDelimiterarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html#cfn-kinesisanalytics-application-csvmappingparameters-recordcolumndelimiter
kaacsvmpRecordColumnDelimiter :: Lens' KinesisAnalyticsApplicationCSVMappingParameters (Val Text)
kaacsvmpRecordColumnDelimiter = lens _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter (\s a -> s { _kinesisAnalyticsApplicationCSVMappingParametersRecordColumnDelimiter = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-application-csvmappingparameters.html#cfn-kinesisanalytics-application-csvmappingparameters-recordrowdelimiter
kaacsvmpRecordRowDelimiter :: Lens' KinesisAnalyticsApplicationCSVMappingParameters (Val Text)
kaacsvmpRecordRowDelimiter = lens _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter (\s a -> s { _kinesisAnalyticsApplicationCSVMappingParametersRecordRowDelimiter = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsApplicationCSVMappingParameters.hs
|
Haskell
|
mit
| 2,947
|
type Fname = String
type Var = String
data Program = Prog [Fundef] Exp deriving Show
data Fundef = Fun String [String] Exp deriving Show
data Exp = I Int | V Var | B Bool | Nil | Fname String | App Exp Exp deriving Show
type Code = [Instn]
data Instn =
PUSH Int
| PUSHINT Int
| PUSHGLOBAL String
| PUSHBOOL Bool
| PUSHNIL
| POP Int
| EVAL
| UNWIND
| MKAP
| UPDATE Int
| RETURN
| LABEL String
| JUMP String
| JFALSE String
| ADD
| SUB
| MUL
| DIV
| CONS
| HEAD
| TAIL
| IF
| EQU
| GLOBSTART String Int
| PRINT
| STOP
instance Show Instn where
show (PUSH i) = "PUSH " ++ show i ++ "\n"
show (PUSHINT i) = "PUSHINT " ++ show i ++ "\n"
show (PUSHGLOBAL str) = "PUSHGLOBAL " ++ show str ++ "\n"
show (PUSHBOOL b) = "PUSHBOOL " ++ show b ++ "\n"
show PUSHNIL = "PUSHNIL " ++ "\n"
show (POP i) = "POP " ++ show i ++ "\n"
show EVAL = "EVAL" ++ "\n"
show UNWIND = "UNWIND" ++ "\n"
show MKAP = "MKAP" ++ "\n"
show RETURN = "RETURN" ++ "\n"
show (UPDATE i) = "UPDATE " ++ show i ++ "\n"
show (LABEL str) = "LABEL " ++ show str ++ "\n"
show (JUMP str) = "JUMP " ++ show str ++ "\n"
show (JFALSE str) = "JFALSE " ++ show str ++ "\n"
show ADD = "ADD" ++ "\n"
show SUB = "SUB" ++ "\n"
show MUL = "MUL" ++ "\n"
show DIV = "DIV" ++ "\n"
show CONS = "CONS" ++ "\n"
show HEAD = "HEAD" ++ "\n"
show TAIL = "TAIL" ++ "\n"
show IF = "IF" ++ "\n"
show EQU = "EQU" ++ "\n"
show (GLOBSTART str i) = "\n GLOBSTART " ++ show str ++ " " ++ show i ++ "\n"
show PRINT = "PRINT" ++ "\n"
show STOP = "STOP" ++ "\n"
start (Prog fdefList term) = (foldr func g fdefList)
where
g = LABEL "MAIN":(expr term (\x -> 999) 999) (EVAL:PRINT:STOP:hardCoded) --here the 999 are arbitrary
hardCoded = concat (map (getCode) builtins)
func (Fun fname argList fdef) codeTillNow = GLOBSTART fname n:(body fdef r n codeTillNow)
where
n = length argList
tupleList = foldr (\x acc -> (x, (length acc)+1):acc) [] argList
r ident = snd$head (filter (\(v,x) -> v==ident) tupleList)
body fbody r d codeTillNow= expr fbody r d ((UPDATE (d+1)):POP d:UNWIND:codeTillNow)
builtins = [("+",ADD), ("-",SUB), ("*",MUL), ("/",DIV), ("cons",CONS), ("head",HEAD), ("car",HEAD), ("cdr",HEAD), ("==",EQU), ("null",HEAD), ("if",IF)]
expr (I x) r d codeTillNow = PUSHINT x:codeTillNow
expr (B x) r d codeTillNow = PUSHBOOL x:codeTillNow
expr (Fname fname) _ _ codeTillNow = PUSHGLOBAL fname:codeTillNow
expr (V v) r d codeTillNow = PUSH (d - (r v)):codeTillNow
expr (App (x) (y)) r d codeTillNow = expr (y) r d (expr (x) r (d+1) (MKAP:codeTillNow))
expr (Nil) r d codeTillNow = PUSHNIL:codeTillNow
getCode (mini,gcode)
|(mini `elem` ["+","-","*","/","=="]) = [GLOBSTART mini 2, PUSH 1, EVAL, PUSH 1, EVAL, gcode, UPDATE 3]
|(mini=="cons") = [GLOBSTART mini 1, gcode, UPDATE 1, RETURN]
|(mini `elem` ["head","car","cdr","null"]) = [GLOBSTART mini 1, EVAL, gcode, EVAL, UPDATE 1, UNWIND]
|(mini=="if") = [GLOBSTART mini 3, PUSH 0, EVAL, JFALSE "l1", PUSH 1, JUMP "l2", LABEL "l1", PUSH 2, LABEL "l2", EVAL, UPDATE 4, POP 3, UNWIND]
|otherwise = []
gencpgm :: Program -> Code
gencpgm p = start p
|
sushantmahajan/programs
|
haskell/assign2.hs
|
Haskell
|
cc0-1.0
| 3,308
|
{-# LANGUAGE BangPatterns #-}
module Ylang.Primitive
(
addBin,
andBin,
orBin,
xorBin,
notUnary,
adds,
ands,
ors,
xors,
nots
) where
import Ylang.Display
import Ylang.Value
type BinOp a = a -> a -> Either String a
type Variadic a = [a] -> Either String a
variadic :: BinOp Val -> Variadic Val
variadic _ [x] = Right x
variadic f (x1:x2:[]) = f x1 x2
variadic f (x1:x2:xs) = case (f x1 x2) of
Right x -> variadic f (x:xs)
Left e -> Left e
variadicHalt :: BinOp Val -> Val -> Variadic Val
variadicHalt _ _ [x] = Right x
variadicHalt f _ (x1:x2:[]) = f x1 x2
variadicHalt f t (x1:x2:xs) = case (f x1 x2) of
Right x
| x == t -> Right t
| otherwise -> variadic f (x:xs)
Left e -> Left e
undefinedFound :: Either String Val
undefinedFound = Left "Undefined"
unknownImplError :: String -> Val -> Either String Val
unknownImplError fn x = Left $
"Undefined Implement " ++ fn ++ " for " ++ toString (getType x) ++ " type"
typeNotMatch :: Either String Val
typeNotMatch = Left "Type Not Match"
-- |
-- (+ <ylang-value> <ylang-value>)
addBin :: BinOp Val
addBin x y = case (x, y) of
-- Numbers
(ValIntn i, ValIntn j) -> Right $ ValIntn (i + j)
(ValFlon i, ValFlon j) -> Right $ ValFlon (i + j)
(ValRatn i, ValRatn j) -> Right $ ValRatn (i + j)
(ValIntn i, ValFlon j) -> Right $ ValRatn (fromInteger i + toRational j)
(ValIntn i, ValRatn j) -> Right $ ValRatn (fromInteger i + j)
(ValFlon i, ValRatn j) -> Right $ ValRatn (toRational i + j)
(ValFlon _, ValIntn _) -> addBin y x
(ValRatn _, ValIntn _) -> addBin y x
(ValRatn _, ValFlon _) -> addBin y x
(ValChr a, ValChr b) -> Right $ ValStr (a:b:[])
(ValChr a, ValStr b) -> Right $ ValStr (a:b)
(ValStr a, ValChr b) -> Right $ ValStr (a ++ [b])
(ValStr a, ValStr b) -> Right $ ValStr (a ++ b)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(+)" x
| otherwise -> typeNotMatch
adds :: Variadic Val
adds = variadic addBin
andBin :: BinOp Val
andBin x y = case (x, y) of
(ValBool i, ValBool j) -> Right $ ValBool (i && j)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(&)" x
| otherwise -> typeNotMatch
ands :: Variadic Val
ands = variadicHalt andBin $ ValBool False
orBin :: BinOp Val
orBin x y = case (x, y) of
(ValBool i, ValBool j) -> Right $ ValBool (i || j)
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(|)" x
| otherwise -> typeNotMatch
ors :: Variadic Val
ors = variadicHalt orBin $ ValBool True
xorBin :: BinOp Val
xorBin x y = case (x, y) of
(ValBool True, ValBool False) -> Right $ ValBool True
(ValBool False, ValBool True) -> Right $ ValBool True
(ValBool _, ValBool _) -> Right $ ValBool False
(ValBotm, _) -> undefinedFound
(_, ValBotm) -> undefinedFound
(_, _)
| getType x == getType y -> unknownImplError "(^)" x
| otherwise -> typeNotMatch
xors :: Variadic Val
xors = variadic xorBin
notUnary :: Val -> Either String Val
notUnary (ValBool i) = Right $ ValBool (not i)
notUnary ValBotm = undefinedFound
notUnary _ = typeNotMatch
nots :: Variadic Val
nots (e:[]) = notUnary e
nots (_:_) = Left "Too Parameter"
|
VoQn/ylang
|
Ylang/Primitive.hs
|
Haskell
|
apache-2.0
| 3,350
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE BangPatterns #-}
module Lib.LiftedIO
(print, putStrLn, newIORef, modifyIORef', readIORef,
writeIORef, IOREF.IORef)
where
------------------------------------------------------------------------------------
import qualified Prelude as P
import Control.Monad.State (MonadIO)
import Control.Monad.IO.Class (liftIO)
import Prelude ((.), Show, ($))
import qualified Data.IORef as IOREF
------------------------------------------------------------------------------------
print :: (Show a, MonadIO m) => a -> m ()
print = liftIO . P.print
putStrLn :: MonadIO m => P.String -> m ()
putStrLn = liftIO . P.putStrLn
newIORef :: MonadIO m => a -> m (IOREF.IORef a)
newIORef = liftIO. IOREF.newIORef
modifyIORef' :: MonadIO m => IOREF.IORef a -> (a -> a) -> m ()
modifyIORef' x y = liftIO $ IOREF.modifyIORef' x y
readIORef :: MonadIO m => IOREF.IORef a -> m a
readIORef = liftIO . IOREF.readIORef
writeIORef :: MonadIO m => IOREF.IORef a -> a -> m ()
writeIORef x !y = liftIO $ IOREF.writeIORef x y
|
kernelim/gitomail
|
src/Lib/LiftedIO.hs
|
Haskell
|
apache-2.0
| 1,159
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Risk where
import Control.Monad.Random
import Control.Monad
import Data.List (sort, foldr)
import Data.Ratio
------------------------------------------------------------
-- Die values
newtype DieValue = DV { unDV :: Int }
deriving (Eq, Ord, Show, Num)
first :: (a -> b) -> (a, c) -> (b, c)
first f (a, c) = (f a, c)
instance Random DieValue where
random = first DV . randomR (1,6)
randomR (low,hi) = first DV . randomR (max 1 (unDV low), min 6 (unDV hi))
die :: Rand StdGen DieValue
die = getRandom
------------------------------------------------------------
-- Risk
type Army = Int
data Battlefield = Battlefield { attackers :: Army, defenders :: Army }
deriving Show
-- Exercise #2:
battle :: Battlefield -> Rand StdGen Battlefield
battle bf = do
atkDice <- roll attackForce
defDice <- roll defenseForce
let lineUp = zip (sort atkDice) (sort defDice)
winners = map (uncurry (>)) lineUp
defenseLost = length $ filter id winners
offenseLost = length $ filter not winners
return (Battlefield (atk - offenseLost) (def - defenseLost))
where
atk = attackers bf
def = defenders bf
attackForce = if atk > 3 then 3 else min (atk - 1) 3
defenseForce = if def > 2 then 2 else def
roll n = replicateM n die
-- Exercise #3:
invade :: Battlefield -> Rand StdGen Battlefield
invade bf = do
newBf <- battle bf
if attackers newBf > 1 && defenders newBf > 0
then invade newBf
else return newBf
-- Exercise #4:
successProb :: Battlefield -> Rand StdGen Double
successProb bf = do
battlefields <- replicateM 1000 (invade bf)
let wins = foldr (\b acc -> acc + if (0 == defenders b) then 1 else 0) 0 battlefields
battles = length battlefields
return $ (wins / fromIntegral battles)
|
parsonsmatt/cis194
|
hw12/Risk.hs
|
Haskell
|
apache-2.0
| 1,882
|
{-
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE TupleSections #-}
module Camfort.Specification.Stencils.CheckBackend
(
-- * Classes
SynToAst(..)
-- * Errors
, SynToAstError
, regionNotInScope
-- * Helpers
, checkOffsetsAgainstSpec
) where
import Algebra.Lattice (joins1)
import Control.Arrow (second)
import Data.Function (on)
import Data.Int (Int64)
import Data.List (sort)
import qualified Data.Set as S
import qualified Camfort.Helpers.Vec as V
import qualified Camfort.Specification.Stencils.Consistency as C
import Camfort.Specification.Stencils.Model
import qualified Camfort.Specification.Stencils.Parser.Types as SYN
import Camfort.Specification.Stencils.Syntax
data SynToAstError = RegionNotInScope String
deriving (Eq)
regionNotInScope :: String -> SynToAstError
regionNotInScope = RegionNotInScope
instance Show SynToAstError where
show (RegionNotInScope r) = "Error: region " ++ r ++ " is not in scope."
-- Class for functions converting from Parser parse
-- syntax to the AST representation of the Syntax module
class SynToAst s t | s -> t where
synToAst :: (?renv :: RegionEnv) => s -> Either SynToAstError t
-- Top-level conversion of declarations
instance SynToAst SYN.Specification (Either RegionDecl SpecDecl) where
synToAst (SYN.SpecDec spec vars) = do
spec' <- synToAst spec
return $ Right (vars, spec')
synToAst (SYN.RegionDec rvar region) = do
spec' <- synToAst region
return $ Left (rvar, spec')
-- Convert temporal or spatial specifications
instance SynToAst SYN.SpecInner Specification where
synToAst (SYN.SpecInner spec isStencil) = do
spec' <- synToAst spec
return $ Specification spec' isStencil
instance SynToAst (Multiplicity (Approximation SYN.Region)) (Multiplicity (Approximation Spatial)) where
synToAst (Once a) = fmap Once . synToAst $ a
synToAst (Mult a) = fmap Mult . synToAst $ a
instance SynToAst (Approximation SYN.Region) (Approximation Spatial) where
synToAst (Exact s) = fmap (Exact . Spatial) . synToAst $ s
synToAst (Bound s1 s2) = (Bound `on` (fmap Spatial)) <$> synToAst s1 <*> synToAst s2
instance SynToAst (Maybe SYN.Region) (Maybe RegionSum) where
synToAst Nothing = pure Nothing
synToAst (Just r) = fmap Just . synToAst $ r
-- Convert region definitions into the DNF-form used internally
instance SynToAst SYN.Region RegionSum where
synToAst = dnf
-- Convert a grammar syntax to Disjunctive Normal Form AST
dnf :: (?renv :: RegionEnv) => SYN.Region -> Either SynToAstError RegionSum
dnf (SYN.RegionConst rconst) = pure . Sum $ [Product [rconst]]
-- Distributive law
dnf (SYN.And r1 r2) = do
r1' <- dnf r1
r2' <- dnf r2
return $ Sum $ unSum r1' >>= (\(Product ps1) ->
unSum r2' >>= (\(Product ps2) ->
return $ Product $ ps1 ++ ps2))
-- Coalesce sums
dnf (SYN.Or r1 r2) = do
r1' <- dnf r1
r2' <- dnf r2
return $ Sum $ unSum r1' ++ unSum r2'
-- Region conversion
dnf (SYN.Var v) =
case lookup v ?renv of
Nothing -> Left (RegionNotInScope v)
Just rs -> return rs
-- *** Other Helpers
checkOffsetsAgainstSpec :: [(Variable, Multiplicity [[Int]])]
-> [(Variable, Specification)]
-> Bool
checkOffsetsAgainstSpec offsetMaps specMaps =
variablesConsistent && all specConsistent specToVecList
where
variablesConsistent =
let vs1 = sort . fmap fst $ offsetMaps
vs2 = sort . fmap fst $ specMaps
in vs1 == vs2
specConsistent spec =
case spec of
(spec', Once (V.VL vs)) -> spec' `C.consistent` (Once . toUNF) vs == C.Consistent
(spec', Mult (V.VL vs)) -> spec' `C.consistent` (Mult . toUNF) vs == C.Consistent
toUNF :: [ V.Vec n Int64 ] -> UnionNF n Offsets
toUNF = joins1 . map (return . fmap intToSubscript)
-- This function generates the special offsets subspace, subscript,
-- that either had one element or is the whole set.
intToSubscript :: Int64 -> Offsets
intToSubscript i
| fromIntegral i == absoluteRep = SetOfIntegers
| otherwise = Offsets . S.singleton $ i
-- Convert list of list of indices into vectors and wrap them around
-- existential so that we don't have to prove they are all of the same
-- size.
specToVecList :: [ (Specification, Multiplicity (V.VecList Int64)) ]
specToVecList = map (second (fmap V.fromLists)) specToIxs
specToIxs :: [ (Specification, Multiplicity [ [ Int64 ] ]) ]
specToIxs = pairWithFst specMaps (map (second toInt64) offsetMaps)
toInt64 :: Multiplicity [ [ Int ] ] -> Multiplicity [ [ Int64 ] ]
toInt64 = fmap (map (map fromIntegral))
-- Given two maps for each key in the first map generate a set of
-- tuples matching the (val,val') where val and val' are corresponding
-- values from each set.
pairWithFst :: Eq a => [ (a, b) ] -> [ (a, c) ] -> [ (b, c) ]
pairWithFst [] _ = []
pairWithFst ((key, val):xs) ys =
map ((val,) . snd) (filter ((key ==) . fst) ys) ++ pairWithFst xs ys
-- Local variables:
-- mode: haskell
-- haskell-program-name: "cabal repl"
-- End:
|
dorchard/camfort
|
src/Camfort/Specification/Stencils/CheckBackend.hs
|
Haskell
|
apache-2.0
| 5,966
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
module BasicSpec where
import LoadTestCallbacks()
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck.Monadic
import Test.QuickCheck.Property (rejected)
import Bio.Motions.Types
import Bio.Motions.Common
import Bio.Motions.Representation.Class
import Bio.Motions.Representation.Chain.Internal
import Bio.Motions.Representation.Chain.Slow
import Bio.Motions.Callback.Class
import Bio.Motions.Callback.StandardScore
import Bio.Motions.Callback.GyrationRadius
import Bio.Motions.Callback.Parser.TH
import Bio.Motions.Representation.Dump
import Bio.Motions.Utils.Random
import Control.Monad
import Control.Monad.Trans
import Control.Lens
import Data.Maybe
import Data.MonoTraversable
import Data.Proxy
import Linear
shouldAlmostBe :: (Fractional a, Ord a, Show a) => a -> a -> Expectation
x `shouldAlmostBe` y = abs (x - y) `shouldSatisfy` (< 1e-7)
instance MonadRandom m => MonadRandom (PropertyM m) where
type Random (PropertyM m) = Random m
getRandom = lift getRandom
getRandomR = lift . getRandomR
testRepr :: _ => proxy repr -> Int -> Spec
testRepr (_ :: _ repr) maxMoveQd = before (loadDump dump freezePredicate :: IO repr) $ do
context "when redumping" $
beforeWith makeDump testRedump
context "when inspecting the data"
testInspect
context "when computing callbacks"
testCallbacks
context "when generating a move"
testGenerateMove
beforeWith (performMove beadMove) $
context "after making a bead move" $ do
testAfterBeadMove
beforeWith (performMove binderMove) $
context "after making a binder move"
testAfterBinderMove
where
beads = sum . map length $ dumpIndexedChains dump
binders = length $ dumpBinders dump
freezePredicate b = b ^. beadChain == 0
dump = Dump
{ dumpBinders =
[ BinderInfo (V3 0 1 2) bi0
, BinderInfo (V3 0 1 3) bi0
, BinderInfo (V3 5 5 5) bi1
]
, dumpChains =
[ [ DumpBeadInfo (V3 0 1 1) ev0
, DumpBeadInfo (V3 5 6 6) ev1
, DumpBeadInfo (V3 5 5 6) ev0
]
, [ DumpBeadInfo (V3 0 0 2) ev0
, DumpBeadInfo (V3 5 4 5) ev1
]
, [ DumpBeadInfo (V3 7 7 7) ev0
, DumpBeadInfo (V3 7 8 8) ev0
]
]
}
[bi0, bi1] = map BinderType [0, 1]
(ev0, ev1) = ([1, 0], [0, 1000])
complexFunctionResult = 45117.35291086203
beadMove = Move (V3 5 6 6) (V3 0 0 (-1))
binderMove = Move (V3 0 1 2) (V3 1 0 0)
updatedChain = [ BeadInfo (V3 0 1 1) ev0 0 0 0
, BeadInfo (V3 5 6 5) ev1 1 0 1
, BeadInfo (V3 5 5 6) ev0 2 0 2
]
updatedChains = updatedChain : tail (dumpIndexedChains dump)
updatedBinders = [ BinderInfo (V3 1 1 2) bi0
, BinderInfo (V3 0 1 3) bi0
, BinderInfo (V3 5 5 5) bi1
]
testRedump :: SpecWith Dump
testRedump = do
it "yields the same chains" $ \dump' ->
dumpChains dump' `shouldBe` dumpChains dump
it "yields the same binders" $ \dump' ->
dumpBinders dump' `shouldMatchList` dumpBinders dump
testInspect :: SpecWith repr
testInspect = do
it "yields the same number of chains" $
getNumberOfChains >=> (`shouldBe` length (dumpChains dump))
it "yields the same binders" $ \repr -> do
binders <- getBinders repr (pure . otoList)
binders `shouldBe` dumpBinders dump
it "yields the same beads" $ \repr -> do
beads <- forM [0..length (dumpChains dump) - 1] $
\idx -> getChain repr idx (pure . otoList)
beads `shouldBe` dumpIndexedChains dump
context "when using getAtomAt" $ do
it "returns binders" $ \repr ->
forM_ (dumpBinders dump) $ \binder -> do
atom <- getAtomAt (binder ^. position) repr
atom `shouldBe` Just (asAtom binder)
it "returns beads" $ \repr ->
forM_ (concat $ dumpIndexedChains dump) $ \bead -> do
atom <- getAtomAt (bead ^. position) repr
atom `shouldBe` Just (asAtom bead)
it "returns Nothing" $ \repr -> do
atom <- getAtomAt (V3 0 0 0) repr
atom `shouldBe` Nothing
testCallbacks :: SpecWith repr
testCallbacks = do
it "has the correct score" $ \repr -> do
score :: StandardScore <- runCallback repr
score `shouldBe` 1002
it "has the correct score after a bead move" $ \repr -> do
score :: StandardScore <- updateCallback repr 1002 beadMove
score `shouldBe` 2002
it "has the correct score after a binder move" $ \repr -> do
score :: StandardScore <- updateCallback repr 1002 binderMove
score `shouldBe` 1000
it "has the correct gyration radii" $ \repr -> do
GyrationRadius [c1, c2, c3] <- runCallback repr
c1 `shouldAlmostBe` 5.92809748
c2 `shouldAlmostBe` 7.07106781
c3 `shouldAlmostBe` 1.41421356
it "has the same gyration radii afer a binder move" $ \repr -> do
oldRadii :: GyrationRadius <- runCallback repr
newRadii :: GyrationRadius <- updateCallback repr oldRadii $ Move (V3 0 1 2) (V3 1 0 0)
oldRadii `shouldBe` newRadii
it "has the correct gyradion radii afer a bead move" $ \repr -> do
oldRadii :: GyrationRadius <- runCallback repr
GyrationRadius [c1, c2, c3] <- updateCallback repr oldRadii $ Move (V3 0 1 1) (V3 0 0 (-1))
c1 `shouldAlmostBe` 6.34952763
c2 `shouldAlmostBe` 7.07106781
c3 `shouldAlmostBe` 1.41421356
context "when computing the template haskell callbacks" $ do
it "has the correct sum42-beads" $ \repr -> do
res :: THCallback "sum42-beads" <- runCallback repr
res `shouldBe` THCallback (42 * beads)
it "has the correct prod2-all" $ \repr -> do
res :: THCallback "prod2-all" <- runCallback repr
res `shouldBe` THCallback (2 ^ (beads + binders))
it "has the correct list42-binders" $ \repr -> do
res :: THCallback "list42-binders" <- runCallback repr
res `shouldBe` THCallback (replicate binders 42)
it "has the correct prod-binders-beads" $ \repr -> do
res :: THCallback "prod-binders-beads" <- runCallback repr
res `shouldBe` THCallback (binders * beads)
it "has the correct list-11" $ \repr -> do
res :: THCallback "list-11" <- runCallback repr
res `shouldBe` THCallback [sqrt 2, 1]
it "has the correct sum-11" $ \repr -> do
res :: THCallback "sum-11" <- runCallback repr
res `shouldBe` THCallback (1 + sqrt 2)
it "has the correct pairs-dist<2" $ \repr -> do
res :: THCallback "pairs-dist<2" <- runCallback repr
res `shouldBe` THCallback 22
it "has the correct complex-function" $ \repr -> do
res :: THCallback "complex-function" <- runCallback repr
res `shouldBe` complexFunctionResult
it "has the correct count-lamins" $ \repr -> do
res :: THCallback "count-lamins" <- runCallback repr
res `shouldBe` THCallback 2
it "has the correct score" $ \repr -> do
res :: THCallback "score" <- runCallback repr
res `shouldBe` THCallback 1002
testAfterBeadMove :: SpecWith repr
testAfterBeadMove = do
it "reports the old location to be empty" $ \repr -> do
matom <- getAtomAt (V3 5 6 6) repr
matom `shouldBe` Nothing
it "reports the new location to contain the bead" $ \repr -> do
matom <- getAtomAt (V3 5 6 5) repr
matom `shouldBe` Just (asAtom $ BeadInfo (V3 5 6 5) ev1 1 0 1)
it "reports the updated chain" $ \repr -> do
chain <- getChain repr 0 $ pure . otoList
chain `shouldBe` updatedChain
it "reports the binders to be unchanged" $ \repr -> do
binders <- getBinders repr $ pure . otoList
binders `shouldMatchList` dumpBinders dump
context "when dumping" $ beforeWith makeDump $ do
it "reports the updated chain" $ \dump' ->
dumpIndexedChains dump' `shouldBe` updatedChains
it "reports the binders to be unchanged" $ \dump' ->
dumpBinders dump' `shouldMatchList` dumpBinders dump
context "when updating callbacks" $ do
it "has the correct sum-11" $ \repr -> do
res :: THCallback "sum-11" <- updateCallback repr (THCallback (1 + sqrt 2)) beadMove
corrRes <- runCallback repr
res `shouldAlmostBe` corrRes
it "has the correct pairs-dist<2" $ \repr -> do
res :: THCallback "pairs-dist<2" <- updateCallback repr (THCallback 22) beadMove
corrRes <- runCallback repr
res `shouldBe` corrRes
it "has the correct complex-function" $ \repr -> do
res <- updateCallback repr complexFunctionResult beadMove
corrRes <- runCallback repr
res `shouldAlmostBe` corrRes
it "has the correct count-lamins" $ \repr -> do
res :: THCallback "count-lamins" <- updateCallback repr (THCallback 2) beadMove
res `shouldBe` THCallback 2
it "has the correct score" $ \repr -> do
res :: THCallback "score" <- updateCallback repr (THCallback 1002) beadMove
res `shouldBe` THCallback 2002
context "when generating a move"
testGenerateMove
testAfterBinderMove :: SpecWith repr
testAfterBinderMove = do
it "reports the old location to be empty" $ \repr -> do
matom <- getAtomAt (V3 0 1 2) repr
matom `shouldBe` Nothing
it "reports the new location to contain the binder" $ \repr -> do
matom <- getAtomAt (V3 1 1 2) repr
matom `shouldBe` Just (asAtom $ BinderInfo (V3 1 1 2) bi0)
it "reports the updated binders" $ \repr -> do
binders <- getBinders repr $ pure . otoList
binders `shouldMatchList` updatedBinders
context "when dumping" $ beforeWith makeDump $ do
it "reports the beads to be unchanged" $ \dump' ->
dumpIndexedChains dump' `shouldBe` updatedChains
it "reports the updated binders" $ \dump' ->
dumpBinders dump' `shouldMatchList` updatedBinders
context "when generating a move"
testGenerateMove
testGenerateMove :: SpecWith repr
testGenerateMove = modifyMaxSuccess (const 1000) $ do
it "moves an existing atoms" $ \repr -> monadicIO $ do
MoveFromTo from _ <- genMove repr
atom <- getAtomAt from repr
assert $ isJust atom
it "moves an atom into an unoccupied position" $ \repr -> monadicIO $ do
MoveFromTo _ to <- genMove repr
atom <- getAtomAt to repr
assert $ isNothing atom
it "performs only moves with the correct length" $ \repr -> monadicIO $ do
Move _ diff <- genMove repr
assert $ quadrance diff `elem` ([1..maxMoveQd] :: [_])
context "when generating many moves" $
beforeWith prepareMoves $ do
it "fails reasonably rarely" $ \(_, moves) ->
length moves `shouldSatisfy` (> 100)
beforeWith getAtoms $ do
it "moves binders sufficiently often" $ \atoms ->
length [x | Just (Binder x) <- atoms] `shouldSatisfy` (> 50)
it "moves beads sufficiently often" $ \atoms ->
length [x | Just (Bead x) <- atoms] `shouldSatisfy` (> 50)
it "does not move any lamins or frozen beads" $ \repr -> monadicIO $ do
MoveFromTo from _ <- genMove repr
Just atom <- getAtomAt from repr
case atom ^. located of
BinderSig binder -> assert $ binder ^. binderType /= laminType
BeadSig bead -> assert . not . freezePredicate $ bead ^. beadSignature
where
prepareMoves repr = (repr,) . catMaybes <$> replicateM 1000 (runWithRandom $ generateMove repr)
getAtoms (repr, moves) = forM moves $ flip getAtomAt repr . moveFrom
genMove repr = lift (runWithRandom $ generateMove repr) >>= maybe (stop rejected) pure
spec :: Spec
spec = do
context "the pure chain representation" $
testRepr (Proxy :: Proxy PureChainRepresentation) 2
context "the IO chain representation" $
testRepr (Proxy :: Proxy IOChainRepresentation) 2
context "the Slow chain representation" $
testRepr (Proxy :: Proxy (SlowChainRepresentation 4 4)) 4
|
Motions/motions
|
test/BasicSpec.hs
|
Haskell
|
apache-2.0
| 13,670
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
module Web.Twitter.Conduit.Request (
HasParam,
APIRequest (..),
) where
import Data.Aeson
import GHC.TypeLits (Symbol)
import Network.HTTP.Client.MultipartFormData
import qualified Network.HTTP.Types as HT
import Web.Twitter.Conduit.Request.Internal
-- $setup
-- >>> :set -XOverloadedStrings -XDataKinds -XTypeOperators
-- >>> import Control.Lens
-- >>> import Web.Twitter.Conduit.Parameters
-- >>> type SampleId = Integer
-- >>> type SampleApi = '["count" ':= Integer, "max_id" ':= Integer]
-- >>> let sampleApiRequest :: APIRequest SampleApi [SampleId]; sampleApiRequest = APIRequest "GET" "https://api.twitter.com/sample/api.json" []
-- | API request. You should use specific builder functions instead of building this directly.
--
-- For example, if there were a @SampleApi@ type and a builder function which named @sampleApiRequest@.
--
-- @
-- type SampleId = 'Integer'
-- sampleApiRequest :: 'APIRequest' SampleApi [SampleId]
-- sampleApiRequest = 'APIRequest' \"GET\" \"https:\/\/api.twitter.com\/sample\/api.json\" []
-- type SampleApi = '[ "count" ':= Integer
-- , "max_id" ':= Integer
-- ]
--
-- @
--
-- We can obtain request params from @'APIRequest' SampleApi [SampleId]@ :
--
-- >>> sampleApiRequest ^. params
-- []
--
-- The second type parameter of the APIRequest represents the allowed parameters for the APIRequest.
-- For example, @sampleApiRequest@ has 2 @Integer@ parameters, that is "count" and "max_id".
-- You can update those parameters by label lenses (@#count@ and @#max_id@ respectively)
--
-- >>> (sampleApiRequest & #count ?~ 100 & #max_id ?~ 1234567890) ^. params
-- [("max_id",PVInteger {unPVInteger = 1234567890}),("count",PVInteger {unPVInteger = 100})]
-- >>> (sampleApiRequest & #count ?~ 100 & #max_id ?~ 1234567890 & #count .~ Nothing) ^. params
-- [("max_id",PVInteger {unPVInteger = 1234567890})]
data APIRequest (supports :: [Param Symbol *]) responseType
= APIRequest
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
}
| APIRequestMultipart
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _part :: [Part]
}
| APIRequestJSON
{ _method :: HT.Method
, _url :: String
, _params :: APIQuery
, _body :: Value
}
instance Parameters (APIRequest supports responseType) where
type SupportParameters (APIRequest supports responseType) = supports
params f (APIRequest m u pa) = APIRequest m u <$> f pa
params f (APIRequestMultipart m u pa prt) =
(\p -> APIRequestMultipart m u p prt) <$> f pa
params f (APIRequestJSON m u pa body) = (\p -> APIRequestJSON m u p body) <$> f pa
instance Show (APIRequest apiName responseType) where
show (APIRequest m u p) = "APIRequest " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestMultipart m u p _) = "APIRequestMultipart " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
show (APIRequestJSON m u p _) = "APIRequestJSON " ++ show m ++ " " ++ show u ++ " " ++ show (makeSimpleQuery p)
|
himura/twitter-conduit
|
src/Web/Twitter/Conduit/Request.hs
|
Haskell
|
bsd-2-clause
| 3,255
|
{-# LANGUAGE FlexibleInstances #-}
module Util.UnixDiff where
import Data.Algorithm.Diff
import qualified Data.Algorithm.DiffOutput as O
import Language.Clojure.AST
data GroupDiffAction = OMod LineRange LineRange
| OIns LineRange Int
| ODel LineRange Int
deriving (Show)
data DiffAction = Copy (Int, Int)
| Ins Int
| Del Int
deriving Eq
preprocess :: String -> String -> [DiffAction]
preprocess s1 s2 = map processDiff (diff s1 s2)
preprocessGrouped :: String -> String -> [GroupDiffAction]
preprocessGrouped s1 s2 = map processGroupedDiff (groupedDiff s1 s2)
diff :: String -> String -> [Diff (String, Int)]
diff s1 s2 = getDiffBy eqIgnoringLines (withLineN s1) (withLineN s2)
groupedDiff :: String -> String -> [O.DiffOperation O.LineRange]
groupedDiff f1 f2 = O.diffToLineRanges $ getGroupedDiff (lines f1) (lines f2)
withLineN :: String -> [(String, Int)]
withLineN s = zip (lines s) [1..]
eqIgnoringLines s1 s2 = fst s1 == fst s2
ppPDiff :: [DiffAction] -> String
ppPDiff = foldl (\d a -> d ++ "\n" ++ show a) ""
processDiff :: Diff (String, Int) -> DiffAction
processDiff (Both (_, i1) (_, i2)) = (Copy (i1, i2))
processDiff (First (_, i)) = (Del i)
processDiff (Second (_, i)) = (Ins i)
processGroupedDiff :: O.DiffOperation O.LineRange -> GroupDiffAction
processGroupedDiff (O.Change srcR dstR) = OMod (extractLineRange srcR) (extractLineRange dstR)
processGroupedDiff (O.Addition lr line) = OIns (extractLineRange lr) line
processGroupedDiff (O.Deletion lr line) = ODel (extractLineRange lr) line
extractLineRange :: O.LineRange -> LineRange
extractLineRange lr = Range start end
where
(start, end) = O.lrNumbers lr
instance Show DiffAction where
show (Copy (i1, i2)) = show i1 ++ " % " ++ show i2
show (Ins i) = show i ++ "+"
show (Del i) = show i ++ "-"
|
nazrhom/vcs-clojure
|
src/Util/UnixDiff.hs
|
Haskell
|
bsd-3-clause
| 1,872
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module MVC.EventHandler where
import Control.Lens
import Control.Monad
import Control.Monad.Reader (MonadReader (..))
import Control.Monad.State (MonadState (..))
import Control.Monad.Trans.Reader (ReaderT)
import qualified Control.Monad.Trans.Reader as R
import Control.Monad.Trans.State.Strict (State, StateT)
import qualified Control.Monad.Trans.State.Strict as S
import Pipes
import MVC.Event (EitherSomeEvent,Event,Msg(..),SomeEvent(..),toEitherSomeEvent)
-----------------------------------------------------------------------------
type ModelP a b s = Pipe a b (State s)
newtype EventHandlerP a b s r =
EventHandlerP (StateT (EventHandler a b s) (ModelP a b s) r)
deriving (Functor,Applicative,Monad,MonadState (EventHandler a b s))
newtype SomeEventHandlerP a b s r =
SomeEventHandlerP (StateT (SomeEventHandler a b s) (ModelP a b s) r)
deriving (Functor,Applicative,Monad,MonadState (SomeEventHandler a b s))
newtype HandleEvent v r =
HandleEvent (ReaderT (Int,AppStateAPI v) (StateT v (State (AppState v))) r)
deriving (Functor,Applicative,Monad,MonadReader (Int,AppStateAPI v))
type HandleEventResult a b v = HandleEvent v [Either a b]
class HandlesEvent v where
type AppState v :: *
type EventIn v :: *
type EventOut v :: *
data AppStateAPI v :: *
handleEvent :: v -> EventIn v -> HandleEventResult (EventIn v) (EventOut v) v
data SomeEventHandler :: * -> * -> * -> * where
SomeEventHandler :: (HandlesEvent v, AppState v ~ s, EventIn v ~ a, EventOut v ~ b) =>
{ _ehId :: Int
, _ehAPI :: AppStateAPI v
, _ehEventIn :: a' -> Maybe a
, _ehEventOut :: Either a b -> Either a' b'
, _ehEventHandler :: v
} -> SomeEventHandler a' b' s
ehId :: Lens' (SomeEventHandler a b v) Int
ehId f (SomeEventHandler i a ein eout s) = (\i' -> SomeEventHandler i' a ein eout s) <$> f i
newtype EventHandler a b s =
EventHandler { _eventHandlers :: [SomeEventHandler a b s] }
deriving (Monoid)
mkEventHandler :: SomeEventHandler a b s -> EventHandler a b s
mkEventHandler = EventHandler . (:[])
eventHandlers :: Lens' (EventHandler a b s) [SomeEventHandler a b s]
eventHandlers f (EventHandler h) = (\h' -> EventHandler h') <$> f h
initialiseEventHandler :: EventHandler a b s -> EventHandler a b s
initialiseEventHandler = over eventHandlers (zipWith (set ehId) [1..])
-----------------------------------------------------------------------------
runRecursiveEventHandler :: EventHandler a b s -> ModelP a b s ()
runRecursiveEventHandler = flip runEventHandlerP recursiveEventHandlerP
runEventHandlerP :: EventHandler a b s -> EventHandlerP a b s r -> ModelP a b s r
runEventHandlerP eventhandler (EventHandlerP eventHandlerP) = S.evalStateT eventHandlerP eventhandler
recursiveEventHandlerP :: EventHandlerP a b s ()
recursiveEventHandlerP = forever $ EventHandlerP (lift await) >>= go
where
go e = do
r <- forEventHandlers $ do
eventHandler <- getEventHandlerP
appState <- getAppStateP
let (r,appSvc',appState') = runEventHandler eventHandler appState e
putEventHandlerP appSvc'
putAppStateP appState'
return r
mapM_ (either go releaseP) r
runEventHandler :: SomeEventHandler a b s -> s -> a -> ([Either a b],SomeEventHandler a b s,s)
runEventHandler eventHandler@SomeEventHandler{..} appstate event =
maybe ignore process (_ehEventIn event)
where
ignore = ([],eventHandler,appstate)
process event' =
let
(HandleEvent handleEvent') = handleEvent _ehEventHandler event'
((events,eventHandler'),appstate') = S.runState (S.runStateT (R.runReaderT handleEvent' (_ehId,_ehAPI)) _ehEventHandler) appstate
in
(map _ehEventOut events,(SomeEventHandler _ehId _ehAPI _ehEventIn _ehEventOut eventHandler'),appstate')
-----------------------------------------------------------------------------
forEventHandlers :: (Monoid r) => SomeEventHandlerP a b s r -> EventHandlerP a b s r
forEventHandlers (SomeEventHandlerP handler) = EventHandlerP $ zoom (eventHandlers . traverse) handler
releaseP :: b -> EventHandlerP a b s ()
releaseP = EventHandlerP . lift . yield
getEventHandlerP :: SomeEventHandlerP a b s (SomeEventHandler a b s)
getEventHandlerP = SomeEventHandlerP S.get
putEventHandlerP :: SomeEventHandler a b s -> SomeEventHandlerP a b s ()
putEventHandlerP = SomeEventHandlerP . S.put
getAppStateP :: SomeEventHandlerP a b s s
getAppStateP = SomeEventHandlerP $ lift $ lift S.get
putAppStateP :: s -> SomeEventHandlerP a b s ()
putAppStateP = SomeEventHandlerP . lift . lift . S.put
-----------------------------------------------------------------------------
getEventHandlerId :: HandleEvent a Int
getEventHandlerId = HandleEvent (R.asks fst)
getAppStateAPI :: HandleEvent a (AppStateAPI a)
getAppStateAPI = HandleEvent (R.asks snd)
getEventHandler :: HandlesEvent a => HandleEvent a a
getEventHandler = HandleEvent $ lift $ S.get
putEventHandler :: HandlesEvent a => a -> HandleEvent a ()
putEventHandler = HandleEvent . lift . S.put
getAppState :: HandleEvent a (AppState a)
getAppState = HandleEvent $ lift $ lift $ S.get
putAppState :: AppState a -> HandleEvent a ()
putAppState = HandleEvent . lift . lift . S.put
getsAppState :: (AppState a -> r) -> HandleEvent a r
getsAppState f = liftM f getAppState
modifyAppState :: (AppState a -> AppState a) -> HandleEvent a ()
modifyAppState = HandleEvent . lift . lift . S.modify
modifyAppState' :: (AppState a -> (r,AppState a)) -> HandleEvent a r
modifyAppState' f = do
s <- getAppState
let (r,s') = f s
putAppState s'
return r
modifyAppState'' :: (AppState a -> Maybe (r,AppState a)) -> HandleEvent a (Maybe r)
modifyAppState'' f = do
s <- getAppState
maybe (return Nothing) (\(r,s') -> putAppState s' >> return (Just r)) (f s)
withAppState :: (AppState a -> HandleEvent a r) -> HandleEvent a r
withAppState f = getAppState >>= f
noEvents :: HandleEvent a [b]
noEvents = return []
propagate :: b -> Either b c
propagate = Left
propagate' :: Event b => b -> EitherSomeEvent
propagate' = propagate . SomeEvent
release :: c -> Either b c
release = Right
release' :: Event c => c -> EitherSomeEvent
release' = release . SomeEvent
-----------------------------------------------------------------------------
data LogEventHandler s = LogEventHandler
instance HandlesEvent (LogEventHandler s) where
type AppState (LogEventHandler s) = s
type EventIn (LogEventHandler s) = String
type EventOut (LogEventHandler s) = Msg
data AppStateAPI (LogEventHandler s) = LogEventHandlerAPI
handleEvent _ = return . (:[]) . release . Msg
newLogEventHandler :: (a -> Maybe String) -> (Either String Msg -> Either a b) -> EventHandler a b s
newLogEventHandler ein eout = EventHandler [SomeEventHandler 0 LogEventHandlerAPI ein eout LogEventHandler]
newLogEventHandler' :: EventHandler SomeEvent SomeEvent s
newLogEventHandler' = newLogEventHandler (Just . show) toEitherSomeEvent
|
cmahon/mvc-service
|
library/MVC/EventHandler.hs
|
Haskell
|
bsd-3-clause
| 7,244
|
-----------------------------------------------------------------------------
-- |
-- Module : Main
-- Description : Copies all files with the specified filename beginning
-- from the first directory to the second directory
-- Copyright : (c) Artem Tsushko, 2015
-- License : BSD3
-- Maintainer : artemtsushko@gmail.com
-- Stability : provisional
-- Portability : POSIX
--
-- Copies all files with the specified filename beginning
-- from the first directory to the second directory
-- /Usage/
-- Provide the input directory and the output directory as command line args.
-- Then enter a filename beginning pattern.
-----------------------------------------------------------------------------
module Main (
main
) where
import System.Directory
import System.Environment
import System.FilePath.Posix (combine)
import System.IO
import System.IO.Error
import System.Posix
import Control.Monad
import Data.List
main :: IO ()
main = copyMatchingFiles `catchIOError` handleError
{- | Asks user for filename beginning and then copies all files matching this
filename pattern from the first directory to the second one. Directory
paths are provided as command line arguments
-}
copyMatchingFiles :: IO ()
copyMatchingFiles = do
(inputDir:outputDir:_) <- mapM canonicalizePath =<< getArgs
if inputDir /= outputDir
then do
beginning <- putStr "enter file beginning: " >> hFlush stdout >> getLine
filesToCopy <- filterM (doesFileExist . combine inputDir)
. filter (beginning `isPrefixOf`)
=<< getDirectoryContents inputDir
sizes <- forM filesToCopy (\filename -> do
let inputFile = combine inputDir filename
outputFile = combine outputDir filename
copyFile inputFile outputFile
getFileSize inputFile )
putStrLn $ "Copied " ++ show (sum sizes) ++ " bytes."
else
putStrLn "Both directories are the same"
-- | Takes a path to file and returns the file's size in bytes
getFileSize :: String -> IO FileOffset
getFileSize path = do
stat <- getFileStatus path
return (fileSize stat)
{- | Handles errors in such cases:
* User didn't pass at least 2 command line arguments
* The specified paths don't point to existing directories
-}
handleError :: IOError -> IO ()
handleError e
| isUserError e = do -- triggered if user didn't at least 2 arguments
progName <- getProgName
putStrLn $ "Usage: " ++ progName ++ " " ++ "<inputDirPath> <outputDirPath>"
| isDoesNotExistError e = case ioeGetFileName e of
Just path -> putStrLn $ "Directory does not exist at: " ++ path
Nothing -> putStrLn "File or directory does not exist"
| otherwise = ioError e
|
artemtsushko/matching-files-copier
|
src/Main.hs
|
Haskell
|
bsd-3-clause
| 2,799
|
module MultiVersion where
import Data.List
import qualified Data.Map as Map
import Data.Version
import Distribution.Simple.PackageIndex (allPackages)
import Distribution.Package (PackageIdentifier(..), PackageName(..))
import Distribution.InstalledPackageInfo (InstalledPackageInfo(..))
import Config
import InstalledPackages
main :: Config -> [String] -> IO ()
main config _args = do
pkgs <- getAllPackages config
putStr
. unlines
. concatMap (\(p,vs) -> map (\v -> p ++ "-" ++ showVersion v) vs)
. Map.toList
. Map.filter (not . null)
. fmap removeMaximum
. Map.fromListWith (++)
. map (\(x,y) -> (x,[y]))
. map toPkgTuple
$ allPackages pkgs
toPkgTuple :: InstalledPackageInfo -> (String, Version)
toPkgTuple pkg = (name, currentVersion)
where
pkgId = sourcePackageId pkg
name = unPackageName (pkgName pkgId)
currentVersion = pkgVersion pkgId
removeMaximum :: [Version] -> [Version]
removeMaximum xs = delete (maximum xs) xs
|
glguy/GhcPkgUtils
|
MultiVersion.hs
|
Haskell
|
bsd-3-clause
| 990
|
{-# LANGUAGE OverloadedStrings #-}
module Spec.Helpers where
import Base
import Models (true, false)
import Util (toSqlKey)
fromBool :: Bool -> TValueId
fromBool True = true
fromBool _ = false
(==.) :: Int64 -> Bool -> Formula PropertyId
(==.) p v = Atom (toSqlKey p) v
(&&.) :: Formula a -> Formula a -> Formula a
(&&.) f (And sf) = And (f:sf)
(&&.) (And sf) f = And (f:sf)
(&&.) f1 f2 = And [f1, f2]
(||.) :: Formula a -> Formula a -> Formula a
(||.) f (Or sf) = Or (f:sf)
(||.) (Or sf) f = Or (f:sf)
(||.) f1 f2 = Or [f1, f2]
(~.) :: (Int64, Int64) -> Bool -> Trait
(~.) (sid,pid) tf = Trait (toSqlKey sid) (toSqlKey pid) (fromBool tf) "" False
(=>.) :: Formula a -> Formula a -> Implication a
(=>.) a c = Implication a c ""
(|=) :: SpaceId -> PropertyId -> Trait
(|=) s p = Trait s p true "" False
|
jamesdabbs/pi-base-2
|
test/Spec/Helpers.hs
|
Haskell
|
bsd-3-clause
| 813
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Package
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- Defines a package identifier along with a parser and pretty printer for it.
-- 'PackageIdentifier's consist of a name and an exact version. It also defines
-- a 'Dependency' data type. A dependency is a package name and a version
-- range, like @\"foo >= 1.2 && < 2\"@.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Package (
-- * Package ids
PackageName(..),
PackageIdentifier(..),
PackageId,
-- * Package dependencies
Dependency(..),
thisPackageVersion,
notThisPackageVersion,
-- * Package classes
Package(..), packageName, packageVersion,
PackageFixedDeps(..),
) where
import Distribution.Version
( Version(..), VersionRange, anyVersion, thisVersion, notThisVersion )
import Distribution.Text (Text(..))
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP ((<++))
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>), (<+>))
import qualified Data.Char as Char ( isDigit, isAlphaNum )
import Data.List ( intersperse )
newtype PackageName = PackageName String
deriving (Read, Show, Eq, Ord)
instance Text PackageName where
disp (PackageName n) = Disp.text n
parse = do
ns <- Parse.sepBy1 component (Parse.char '-')
return (PackageName (concat (intersperse "-" ns)))
where
component = do
cs <- Parse.munch1 Char.isAlphaNum
if all Char.isDigit cs then Parse.pfail else return cs
-- each component must contain an alphabetic character, to avoid
-- ambiguity in identifiers like foo-1 (the 1 is the version number).
-- | Type alias so we can use the shorter name PackageId.
type PackageId = PackageIdentifier
-- | The name and version of a package.
data PackageIdentifier
= PackageIdentifier {
pkgName :: PackageName, -- ^The name of this package, eg. foo
pkgVersion :: Version -- ^the version of this package, eg 1.2
}
deriving (Read, Show, Eq, Ord)
instance Text PackageIdentifier where
disp (PackageIdentifier n v) = case v of
Version [] _ -> disp n -- if no version, don't show version.
_ -> disp n <> Disp.char '-' <> disp v
parse = do
n <- parse
v <- (Parse.char '-' >> parse) <++ return (Version [] [])
return (PackageIdentifier n v)
-- ------------------------------------------------------------
-- * Package dependencies
-- ------------------------------------------------------------
data Dependency = Dependency PackageName VersionRange
deriving (Read, Show, Eq)
instance Text Dependency where
disp (Dependency name ver) =
disp name <+> disp ver
parse = do name <- parse
Parse.skipSpaces
ver <- parse <++ return anyVersion
Parse.skipSpaces
return (Dependency name ver)
thisPackageVersion :: PackageIdentifier -> Dependency
thisPackageVersion (PackageIdentifier n v) =
Dependency n (thisVersion v)
notThisPackageVersion :: PackageIdentifier -> Dependency
notThisPackageVersion (PackageIdentifier n v) =
Dependency n (notThisVersion v)
-- | Class of things that can be identified by a 'PackageIdentifier'
--
-- Types in this class are all notions of a package. This allows us to have
-- different types for the different phases that packages go though, from
-- simple name\/id, package description, configured or installed packages.
--
class Package pkg where
packageId :: pkg -> PackageIdentifier
packageName :: Package pkg => pkg -> PackageName
packageName = pkgName . packageId
packageVersion :: Package pkg => pkg -> Version
packageVersion = pkgVersion . packageId
instance Package PackageIdentifier where
packageId = id
-- | Subclass of packages that have specific versioned dependencies.
--
-- So for example a not-yet-configured package has dependencies on version
-- ranges, not specific versions. A configured or an already installed package
-- depends on exact versions. Some operations or data structures (like
-- dependency graphs) only make sense on this subclass of package types.
--
class Package pkg => PackageFixedDeps pkg where
depends :: pkg -> [PackageIdentifier]
|
dcreager/cabal
|
Distribution/Package.hs
|
Haskell
|
bsd-3-clause
| 5,898
|
{-# LANGUAGE OverloadedStrings #-}
module Data.IHO.S52.SVG.Renderer (renderVectorInstructions) where
import Data.Text (Text)
import Data.Monoid
import Data.Either
import Data.Map (Map)
import qualified Data.Map as Map
import Text.Blaze.Svg (Svg)
import Text.Blaze.Internal ((!))
import qualified Text.Blaze.Internal as SVG
import qualified Text.Blaze.Svg11 as SVG
import qualified Text.Blaze.Svg11.Attributes as A
import Control.Monad.RWS
import Data.IHO.S52.Types
import Data.IHO.S52.SVG.Helper
import Data.Int
data RendererConfig =
RendererConfig {
penFactor :: Integer,
lookupColour :: Char -> Text
}
renderConfig :: Map Char Text -> RendererConfig
renderConfig ctbl = RendererConfig {
penFactor = 30,
lookupColour =
\c -> if (c == '@') then "none"
else maybe (error $ "lookupColor: undefined color: " ++ show c) id $
Map.lookup c ctbl
}
type PolygonBuffer = (Vector2, [Either VectorInstruction (Int16, Int16, Int16)])
data RendererState =
RendererState {
config :: RendererConfig,
penPos :: Vector2,
penWidth :: Integer,
penColour :: Text,
inPolygon :: Bool,
fillTrans :: Float,
lineBuffer :: [VectorInstruction],
polygonBuffers :: [PolygonBuffer]
}
type RenderAction = RWS VectorInstruction Svg RendererState
renderState :: RendererConfig -> RendererState
renderState cfg = RendererState {
config = cfg,
penPos = (0,0),
penWidth = penFactor cfg,
penColour = "none",
inPolygon = False,
fillTrans = 1,
lineBuffer = mempty,
polygonBuffers = mempty
}
renderVectorInstructions :: Map Char Text -> [VectorInstruction] -> Svg
renderVectorInstructions cmap is =
let cfg = renderConfig cmap
s0 = renderState cfg
in SVG.g $ renderVectorInstructions' s0 is
renderVectorInstructions' :: RendererState -> [VectorInstruction] -> Svg
renderVectorInstructions' s0 [] =
snd $ evalRWS renderLineBuffer undefined s0
renderVectorInstructions' s0 (i:is) =
let (s1, w) = execRWS renderVectorInstruction i s0
in w `mappend` (renderVectorInstructions' s1 is)
renderVectorInstruction :: RenderAction ()
renderVectorInstruction = ask >>= evalVectorInstruction
evalVectorInstruction :: VectorInstruction -> RenderAction ()
evalVectorInstruction (SetPenColour c) = do
renderLineBuffer
setPenColour c
evalVectorInstruction (SetPenWidth w) = do
renderLineBuffer
setPenWidth w
evalVectorInstruction (SetPenTransparency t) = setFillTrans t
evalVectorInstruction i@(PenUp v) = do
addBufferInstruction i
modify (\s -> s { penPos = v } )
evalVectorInstruction i@(PenDraw v) = do
addBufferInstruction i
modify (\s -> s { penPos = v } )
evalVectorInstruction i@(Circle r) = do
st <- get
if (inPolygon st)
then addBufferInstruction i
else do renderLineBuffer
let (x, y) = penPos st
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
classA = A.class_ . SVG.textValue . mconcat $ ["s52 fill_none stroke_"
, penColour st]
tell $ svgCircle r x y ! classA ! strokeWA
evalVectorInstruction (PolygonMode EnterPolygonMode) = do
st <- get
if (inPolygon $ st)
then fail "eval EntePolygonMode: must be in line mode"
else createNewPolygonBuffer
evalVectorInstruction (PolygonMode SubPolygon) = createNewPolygonBuffer
evalVectorInstruction (PolygonMode PolygonDone) = do
st <- get
let v = fst . last . polygonBuffers $ st
put st { penPos = v }
evalVectorInstruction OutlinePolygon =
renderPolygonBuffers False
evalVectorInstruction FillPolygon =
renderPolygonBuffers True
evalVectorInstruction (SymbolCall sy o) = do
(x,y) <- fmap penPos get
tell $ useSymbol x y sy -- TODO: handle orientation o
createNewPolygonBuffer :: RenderAction ()
createNewPolygonBuffer = do
st <- get
let newPBuffers = (penPos st, mempty) : (polygonBuffers st)
put st { polygonBuffers = newPBuffers
, inPolygon = True
}
addBufferInstruction :: VectorInstruction -> RenderAction ()
addBufferInstruction i = do
st <- get
if (inPolygon st)
then let pbuffers = polygonBuffers st
(v0, pb0) = head pbuffers
_i = case i of
(Circle r) -> let (x, y) = penPos st in Right (r, x, y)
inst -> Left inst
newPBuffers = (v0, pb0 ++ [_i]) : (drop 1 pbuffers)
in modify (\s -> s { polygonBuffers = newPBuffers})
else let newLBuffer = (lineBuffer st) ++ [i]
in modify (\s -> s { lineBuffer = newLBuffer})
setPenColour :: Char -> RenderAction ()
setPenColour c =
modify (\st -> st { penColour = (lookupColour . config $ st) c } )
setPenWidth :: Integral i => i -> RenderAction ()
setPenWidth w =
modify (\st -> st { penWidth = toInteger w * (penFactor . config $ st) } )
setFillTrans :: (Show i, Integral i) => i -> RenderAction ()
setFillTrans t
| ((t < 0) || (t > 4)) = fail $ "setFillTrans: 0 >= t <= 4. t=" ++ show t
| otherwise = modify (\st -> st { fillTrans = 1.0 - (0.25) } )
renderPathCmd :: VectorInstruction -> SVG.Path
renderPathCmd (PenUp (x, y)) = SVG.m x y
renderPathCmd (PenDraw (x, y)) = SVG.l x y
renderPathCmd _c = fail $ "undefined Path Command: " ++ show _c
-- | renders and clears the (non empty) Line Buffer
renderLineBuffer :: RenderAction ()
renderLineBuffer = do
st <- get
let lb = lineBuffer st
case lb of
[] -> return ()
_ ->
let _is = map renderPathCmd lb
pathA = A.d . SVG.mkPath $ sequence _is >> return ()
classA = A.class_ . SVG.textValue . mconcat $ ["s52 fill_none stroke_", penColour st]
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
in do tell $ SVG.path ! classA ! strokeWA ! pathA
put st { lineBuffer = mempty }
return ()
-- | renders and clear the Polygon Buffers
renderPolygonBuffers :: Bool -> RenderAction ()
renderPolygonBuffers fill = do
st <- get
let pBuffers = polygonBuffers st
_ <- sequence $ map (renderPolygonBuffer fill) pBuffers
let v = fst . last . polygonBuffers $ st
put st { penPos = v , polygonBuffers = mempty, inPolygon = False }
renderPolygonBuffer :: Bool -> PolygonBuffer -> RenderAction ()
renderPolygonBuffer fill (p0, xs) =
let _cs = rights xs
_is = (map renderPathCmd ((PenUp p0) : (lefts xs))) ++ [SVG.z]
pathA = A.d . SVG.mkPath $ sequence _is >> return ()
in do
st <- get
let classA = A.class_ . SVG.textValue . mconcat $
if (fill)
then ["s52 stroke_none fill_", penColour st]
else ["s52 fill_none stroke_", penColour st]
fillOA = A.fillOpacity . SVG.toValue . fillTrans $ st
strokeWA = A.strokeWidth . SVG.toValue . toInteger . penWidth $ st
if (fill)
then do tell $ SVG.path ! fillOA ! classA ! pathA
_ <- sequence $ map (\(r, cx, cy) ->
tell $ svgCircle r cx cy ! classA ! fillOA) _cs
return ()
else do tell $ SVG.path ! classA ! strokeWA ! pathA
_ <- sequence $ map (\(r, cx, cy) ->
tell $ svgCircle r cx cy ! classA) _cs
return ()
|
alios/iho-presentation
|
Data/IHO/S52/SVG/Renderer.hs
|
Haskell
|
bsd-3-clause
| 7,278
|
{-
PickSquare.hs (adapted from picksquare.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2003 <sven_panne@yahoo.com>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
Use of multiple names and picking are demonstrated. A 3x3 grid of squares is
drawn. When the left mouse button is pressed, all squares under the cursor
position have their color changed.
-}
import Data.Array ( Array, listArray, (!) )
import Data.IORef ( IORef, newIORef, readIORef, modifyIORef )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
type Board = Array (GLint,GLint) (IORef Int)
-- Clear color value for every square on the board
myInit :: IO Board
myInit = do
clearColor $= Color4 0 0 0 0
refs <- sequence . replicate 9 . newIORef $ 0
return $ listArray ((0,0),(2,2)) refs
-- The nine squares are drawn. Each square is given two names: one for the row
-- and the other for the column on the grid. The color of each square is
-- determined by its position on the grid, and the value in the board array.
-- Note: In contrast to the the original example, we always give names to
-- squares, regardless of the render mode. This simplifies the code a bit and
-- is even suggested by the Red Book.
drawSquares :: Board -> IO ()
drawSquares board =
flip mapM_ [ 0 .. 2 ] $ \i -> do
loadName (Name (fromIntegral i))
flip mapM_ [ 0 .. 2 ] $ \j ->
withName (Name (fromIntegral j)) $ do
val <- readIORef (board ! (i,j))
-- resolve overloading, not needed in "real" programs
let color3f = color :: Color3 GLfloat -> IO ()
color3f (Color3 (fromIntegral i / 3.0)
(fromIntegral j / 3.0)
(fromIntegral val / 3.0))
rect (Vertex2 i j) (Vertex2 (i + 1) (j + 1))
-- processHits prints the hit records and updates the board array.
processHits :: Maybe[HitRecord] -> Board -> IO ()
processHits Nothing _ = putStrLn "selection buffer overflow"
processHits (Just hitRecords) board = do
putStrLn ("hits = " ++ show (length hitRecords))
mapM_ (\(HitRecord z1 z2 names) -> do
putStrLn (" number of names for this hit = " ++ show (length names))
putStr (" z1 is " ++ show z1)
putStrLn ("; z2 is " ++ show z2)
putStr " names are"
sequence_ [ putStr (" " ++ show n) | Name n <- names ]
putChar '\n'
let [i, j] = [ fromIntegral n | Name n <- names ]
modifyIORef (board ! (i,j)) (\x -> (x + 1) `mod` 3))
hitRecords
-- pickSquares sets up selection mode, name stack, and projection matrix for
-- picking. Then the objects are drawn.
bufSize :: GLsizei
bufSize = 512
pickSquares :: Board -> KeyboardMouseCallback
pickSquares board (MouseButton LeftButton) Down _ (Position x y) = do
vp@(_, (Size _ height)) <- get viewport
(_, maybeHitRecords) <- getHitRecords bufSize $
withName (Name 0) $ do
matrixMode $= Projection
preservingMatrix $ do
loadIdentity
-- create 5x5 pixel picking region near cursor location
pickMatrix (fromIntegral x, fromIntegral height - fromIntegral y) (5, 5) vp
ortho2D 0 3 0 3
drawSquares board
flush
processHits maybeHitRecords board
postRedisplay Nothing
pickSquares _ (Char '\27') Down _ _ = exitWith ExitSuccess
pickSquares _ _ _ _ _ = return ()
display :: Board -> DisplayCallback
display board = do
clear [ ColorBuffer ]
drawSquares board
flush
reshape :: ReshapeCallback
reshape size = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
ortho2D 0 3 0 3
matrixMode $= Modelview 0
loadIdentity
-- Main Loop
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode ]
initialWindowSize $= Size 100 100
initialWindowPosition $= Position 100 100
createWindow progName
board <- myInit
reshapeCallback $= Just reshape
displayCallback $= display board
keyboardMouseCallback $= Just (pickSquares board)
mainLoop
|
OS2World/DEV-UTIL-HUGS
|
demos/GLUT/examples/RedBook/PickSquare.hs
|
Haskell
|
bsd-3-clause
| 4,192
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{- |
Module : Text.Garnett.Completers.BashCompleter
Copyright : Copyright (C) 2014 Julian K. Arni
License : BSD3
Maintainer : Julian K. Arni <jkarni@gmail.com
Stability : alpha
Convert a GarnettFile to a bash completion script.
-}
module Text.Garnett.Completers.BashCompleter where
import Data.List hiding (group)
import Control.Lens
import Control.Arrow ((&&&))
import Control.Monad.Free
import qualified Data.Map as Map
import qualified Data.Text as T
import Text.Garnett.Definition
import Text.Garnett.Completers.ShellDSL
--------------------------------------------------------------------------
-- BashWriter
--
-- In broad outlines, we do the following: For each subparser, create a
-- bash function that can handle completion for that parser. Then create
-- another function that can pick the right subparser based on completion
-- so far, as well as complete parsers. Add a few helper predefined bash
-- functions.
--------------------------------------------------------------------------
data Bash
instance GarnettWriter Bash where
fmt _ = Fmt "bash"
fromGarnett = toDoc . allBash
--------------------------------------------------------------------------
-- Generate appropriate code
--
--------------------------------------------------------------------------
lkupBash :: Map.Map Fmt a -> Maybe a
lkupBash = lkup (undefined::Bash)
-- | Create a completion function for a given parser.
eachParser :: GParser -> Free ShellF ()
eachParser gp = do
let shorts = foldr (\a b -> b ++ " -" ++ return a) "" $ getShortCompls gp
let longs = " --" ++ (intercalate " --" $ map T.unpack $ getLongCompls gp)
let compgen = shorts ++ longs
define ('_':(T.unpack $ gp ^. parserName)) $ do
stmt "local cur prev words"
stmt "COMPREPLY=()"
stmt "_get_comp_words_by_ref cur prev words"
caseStmt "cur" [( "-*", stmt ("COMPREPLY=( $( compgen -W '"
++ compgen ++ "' -- $cur ))"))
]
-- | A function that checks what subparser we're in, and delegates
-- completion to the corresponding bash function.
delegator :: GarnettFile -> Free ShellF ()
delegator gf = do
let myName = '_':(T.unpack $ gf ^. progName)
let pNames = fmap T.unpack $ gf ^. mainParser . subparsers ^.. folded . parserName
comment $ "Handles completion on subparser names, and delegates "
comment $ "completion to the subparser completion functions."
define myName $ do
stmt "local cur prev words"
stmt "COMPREPLY=()"
stmt "_get_comp_words_by_ref cur prev words"
ifStmt (stmt "${words[@]} -eq 2")
(stmt $ "COMPREPLY=( $( compgen -W '" ++ unwords pNames ++ "' -- $cur ))")
(caseStmt "words[1]" $ fmap (id &&& \x -> stmt $ '_':x ++ "()")
pNames)
stmt "return 0"
-- | The complete bash script.
allBash :: GarnettFile -> Free ShellF ()
allBash gf = do
comment $ "Completion script for " ++ (T.unpack $ gf ^. progName)
comment $ "Generated with Garnett"
mapM_ eachParser (gf ^. mainParser . subparsers)
delegator gf
-- stmt "complete -F
|
jkarni/Garnett
|
src/Text/Garnett/Completers/BashCompleter.hs
|
Haskell
|
bsd-3-clause
| 3,256
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Data.Binary.Indexed where
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.ByteString.Lazy
class HBinary phi h where
hput :: phi ix -> h ix -> Put
hget :: phi ix -> Get (h ix)
hdecode :: HBinary phi h => phi ix -> ByteString -> h ix
hdecode phi = runGet (hget phi)
hencode :: HBinary phi h => phi ix -> h ix -> ByteString
hencode phi = runPut . hput phi
|
sebastiaanvisser/islay
|
src/Data/Binary/Indexed.hs
|
Haskell
|
bsd-3-clause
| 438
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveTraversable #-}
module Rawr.Note where
import GHC.Generics
import Text.Printf
import Data.Aeson
import qualified Data.List as DL
import Data.Maybe
import qualified Data.ByteString.Lazy.Char8 as DB8
data Note = Func String
| Note String
| Call String
| CallEdge String String
| EndFunc
deriving (Generic, Show, Eq, ToJSON)
isFunc (Func _) = True
isFunc _ = False
isCallEdge (CallEdge _ _) = True
isCallEdge _ = False
isEndFunc EndFunc = True
isEndFunc _ = False
isNote (Note _) = True
isNote _ = False
-- what is this for?
notesToString :: [Note] -> String
notesToString notes = DB8.unpack $ encode $ map notesToString' notes
replaceChar oldChar newChar cs = [if c == oldChar then newChar else c | c <- cs]
notesToString' :: Note -> String
notesToString' (Func name) =
let template = "{ \"function\" : \"%s\""
in printf template name
notesToString' EndFunc = "}"
notesToString' (Note note) = note
example = [ Func "f1"
, Note "asdf"
, Note "zxcv"
, Func "f2"
, Note "c"
, EndFunc
, Note "d"
, Func "f3"
, Func "f4"
, EndFunc
, EndFunc
, Note "e"
, EndFunc
]
callCollect [] = []
callCollect (Func caller : EndFunc : rest) = (Call caller : rest)
callCollect (Func caller1 : Func caller2 : rest) = callCollect (Func caller1 : (callCollect (Func caller2 : rest)))
callCollect (Func caller : Call callee : rest) = (CallEdge caller callee) : callCollect (Func caller : rest)
callCollect (Func caller : ce@(CallEdge _ _) : rest) = ce : callCollect (Func caller : rest)
callCollect (EndFunc : rest) = callCollect rest
callCollect xs = [EndFunc]
dropNotes xs = filter (not . isNote) xs
dotGraph :: [Note] -> String
dotGraph notes = let template = unlines [ "digraph {"
, "rankdir=\"LR\";"
, " %s "
, "}"
]
callEdges = DL.nub $ filter isCallEdge (callCollect (dropNotes notes))
nodeTxt = concat [printf "%s -> %s;\n" x y | (CallEdge x y) <- callEdges]
fixedTxt = replaceChar ':' ' '
$ replaceChar '\'' '_'
$ replaceChar '/' '_'
$ replaceChar '.' '_' nodeTxt
in printf template fixedTxt
|
drhodes/jade2hdl
|
rawr/src/Rawr/Note.hs
|
Haskell
|
bsd-3-clause
| 2,631
|
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module VectorTests.Vector3 where
import Geometry.Space
import Test.Framework
import VectorTests.VectorGenerators ()
prop_vectorProductAnticommutativity :: Vector3 Double -> Vector3 Double -> Bool
prop_vectorProductAnticommutativity a b = cross a b == neg (cross b a)
prop_vectorProductZeroAngle :: Vector3 Double -> Bool
prop_vectorProductZeroAngle a = cross a a == Vector3 0 0 0
prop_vectorProductLength :: Vector3 Double -> Vector3 Double -> Bool
prop_vectorProductLength a b = normL2Squared (cross a b) <= normL2Squared a * normL2Squared b
|
achirkin/fgeom
|
test/VectorTests/Vector3.hs
|
Haskell
|
bsd-3-clause
| 600
|
module OTPSpec (main, spec) where
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
True `shouldBe` False
|
pbogdan/otp-auth
|
test/OTPSpec.hs
|
Haskell
|
bsd-3-clause
| 200
|
----------------------------------------------------------------------------
-- |
-- Module : H
-- Copyright : (c) Sergey Vinokurov 2016
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
-- Created : Sunday, 23 October 2016
----------------------------------------------------------------------------
module H where
import G (foo, bar)
data FooTyp = Foo Int
data BarTyp = Bar Double
data BazTyp = Baz String
|
sergv/tags-server
|
test-data/0007resolvable_import_cycle/H.hs
|
Haskell
|
bsd-3-clause
| 459
|
{-# LANGUAGE OverloadedStrings #-}
import Cataskell.Game
import Cataskell.Serialize
import Control.Monad.Random
import Control.Monad.State
import Control.Lens
import System.Random
import System.Environment (getArgs)
import Control.Exception (assert)
import Data.List (findIndex)
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import System.IO (openBinaryFile, IOMode(WriteMode), hClose)
getToEnd :: Int -> Int -> Int -> (Int, [Game], Bool)
getToEnd maxIter maxSeed seed
= let (initialGame, r') = runRand (newGame ["1","2","3"]) (mkStdGen seed)
gs = map fst $ iterate (\(x, r) -> runRand (execStateT randomActGoodInitial x) r) (initialGame, r')
endsAt = findIndex ((== End) . view phase) gs
in if seed < maxSeed
then maybe (getToEnd maxIter maxSeed (seed+1)) (\end -> (seed, take (end+1) gs, True)) endsAt
else (seed, take maxIter gs, False)
main :: IO ()
main = do
args <- getArgs
let (iters, maxSeed) = case args of
[] -> (1000, 1000)
[i] -> (read i, 1000)
(i:s:_) -> (read i, read s)
let (seed, allGs, success) = getToEnd iters maxSeed 0
print (seed, success)
let gs' = encode allGs
h <- openBinaryFile "games.js" WriteMode
B.hPut h "var data="
B.hPut h gs'
B.hPut h ";"
hClose h
|
corajr/cataskell
|
cataskell-example/Main.hs
|
Haskell
|
bsd-3-clause
| 1,372
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Database schema versioning and Migration
--
--
module NejlaCommon.Persistence.Migration
( sql
, sqlFile
, migrate
, M
, SchemaVersion
, Migration(..)
-- * Helper functions
, schemaEmptyP
-- ** Re-exports
, gitHash
, P.rawExecute
, P.PersistValue(..)
, P.Single(..)
-- * Internal functions
, setupMetaSchema
, currentSchemaVersion
, registerMigration
) where
import Control.Monad.Logger
import Control.Monad.Reader
import qualified Data.List as List
import Data.Maybe ( fromMaybe )
import Data.Text ( Text )
import qualified Database.Persist.Sql as P
import Development.GitRev
import NejlaCommon.Persistence.Util ( sql, sqlFile )
import System.Exit ( exitFailure )
type M a = ReaderT P.SqlBackend (LoggingT IO) a
type SchemaVersion = Text
-- | Check if a schema is empty (e.g. hasn't been initualized)
-- Postgres-specfic
schemaEmptyP :: Text -- ^ Name of the schema
-> M Bool
schemaEmptyP schema = do
res <- P.rawSql [sql|
SELECT relname
FROM pg_class c
INNER JOIN pg_namespace s
ON s.oid = c.relnamespace
WHERE s.nspname=?
|]
[ P.PersistText schema ] :: M [P.Single Text]
return $ List.null res
-- | Setup the metadata schema "_meta" and register an empty migration as a
-- starting point
setupMetaSchema :: M ()
setupMetaSchema = schemaEmptyP "_meta" >>= \case
-- DB versioning not initialized
True -> do
$logInfo "Schema versioning not found. Initializing now."
P.rawExecute $(sqlFile "src/NejlaCommon/Persistence/sql/initialize_versioning.sql")
[]
-- Schema versioning already installed
False -> return ()
-- | Query the current schema version
currentSchemaVersion :: M (Maybe SchemaVersion)
currentSchemaVersion = do
P.rawSql [sql|
SELECT _meta.schema_version();
|]
[] >>= \case
[ Nothing ] -> return Nothing
[ Just (P.Single (P.PersistText i)) ] -> return (Just i)
[ Just (P.Single P.PersistNull) ] -> return Nothing
_ -> error "currentSchemaVersion: wrong number of results"
-- | Register a migration. Shouldn't be used manually
registerMigration
:: Text -- ^ Program revision (e.g. git revision)
-> Maybe SchemaVersion -- ^ Expected schema version before migration
-> SchemaVersion -- ^ Schema version after the migration
-> Text -- ^ Description of the migration changes
-> M ()
registerMigration revision expect to description = do
_ <- P.rawSql [sql| SELECT _meta.add_migration(?, ?, ?, ?);
|]
[ maybe P.PersistNull P.PersistText expect
, P.PersistText to
, P.PersistText description
, P.PersistText revision
] :: M [P.Single P.PersistValue]
return ()
-- | Run a migration
runMigration :: Text -- ^ Program revision
-> Migration
-> M ()
runMigration revision Migration{..} = do
$logInfo $ "Migrating database schema from " <> fromMaybe "<None>" expect
<> " to " <> to <> " (" <> description <> ")"
script
registerMigration revision expect to description
data Migration =
Migration
{ expect :: Maybe SchemaVersion
-- ^ Expected schema version before the migration (Nothing if no migrations exist)
, to :: SchemaVersion
-- ^ Schema version after the migration
, description :: Text
-- ^ Description of the migration
, script :: M ()
}
findMigration :: Text -> Maybe SchemaVersion -> [Migration] -> M ()
findMigration _r (Just v) [ Migration{..} ]
| v == to =
$logInfo $ "Already in schema version " <> v <> "; nothing to do."
-- Already in final schema version
findMigration revision v ms@(Migration{..} : mss)
| v == expect = runMigrations revision v ms
| otherwise = findMigration revision v mss
findMigration _r v _ = do
$logError $ "Unknown schema version " <> fromMaybe "<None>" v
liftIO exitFailure
runMigrations :: Text -> Maybe SchemaVersion -> [Migration] -> M ()
runMigrations _ v [] = do
$logInfo $ "Finished migrations. Final schema: " <> fromMaybe "<None>" v
return ()
runMigrations revision v (m@Migration{..} : ms)
| v == expect = do
runMigration revision m >> runMigrations revision (Just to) ms
| otherwise = do
$logError $
"runMigrations: Unknown schema version " <> fromMaybe "<None>" v
liftIO exitFailure
-- | Finds the current schema version and runs all migrations linearly starting
-- from that version.
--
-- Takes a list of migrations. Each migration should leave the schema in the
-- version the next migration expects (that is, the @to@-field of migration @n@
-- should match the @expect@-field of migration @n+1@)
--
-- The first time this function runs it sets up a metadata schema "_meta" that
-- logs the migrations that have been run in the past. The initial schema
-- version before any migrations are registered is the empty string "", so the
-- first migration should expect this schema.
migrate :: Text -- ^ Program revision (e.g. $(gitHash) from gitrev)
-> [Migration]
-> M ()
migrate _ [] = do
$logError "List of migrations is empty, can't migrate"
liftIO exitFailure
migrate revision migrations = do
setupMetaSchema
sv <- currentSchemaVersion
findMigration revision sv migrations
|
nejla/nejla-common
|
src/NejlaCommon/Persistence/Migration.hs
|
Haskell
|
bsd-3-clause
| 5,718
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (catchAny)
import Control.Monad
import Control.Monad.Catch (MonadMask, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.Foldable as F
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List (intersect, maximumBy)
import Data.List.Extra (nubOrd)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Stack.BuildPlan
import Stack.Config (getSnapshots,
makeConcreteResolver)
import Stack.Constants
import Stack.Solver
import Stack.Types
import Stack.Types.Internal (HasLogLevel, HasReExec,
HasTerminal)
import qualified System.FilePath as FP
-- | Generate stack.yaml
initProject
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs Dir
-> InitOpts
-> Maybe AbstractResolver
-> m ()
initProject currDir initOpts mresolver = do
let dest = currDir </> stackDotYaml
reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest
exists <- doesFileExist dest
when (not (forceOverwrite initOpts) && exists) $ do
error ("Stack configuration file " <> reldest <>
" exists, use 'stack solver' to fix the existing config file or \
\'--force' to overwrite it.")
dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts)
let noPkgMsg = "In order to init, you should have an existing .cabal \
\file. Please try \"stack new\" instead."
find = findCabalFiles (includeSubDirs initOpts)
dirs' = if null dirs then [currDir] else dirs
cabalfps <- liftM concat $ mapM find dirs'
(bundle, dupPkgs) <- cabalPackagesCheck cabalfps noPkgMsg Nothing
(r, flags, extraDeps, rbundle) <- getDefaultResolver dest initOpts
mresolver bundle
let ignored = Map.difference bundle rbundle
dupPkgMsg
| (dupPkgs /= []) =
"Warning: Some packages were found to have names conflicting \
\with others and have been commented out in the \
\packages section.\n"
| otherwise = ""
missingPkgMsg
| (Map.size ignored > 0) =
"Warning: Some packages were found to be incompatible with \
\the resolver and have been left commented out in the \
\packages section.\n"
| otherwise = ""
extraDepMsg
| (Map.size extraDeps > 0) =
"Warning: Specified resolver could not satisfy all \
\dependencies. Some external packages have been added \
\as dependencies.\n"
| otherwise = ""
makeUserMsg msgs =
let msg = concat msgs
in if msg /= "" then
msg <> "You can suppress this message by removing it from \
\stack.yaml\n"
else ""
userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg]
gpds = Map.elems $ fmap snd rbundle
p = Project
{ projectUserMsg = if userMsg == "" then Nothing else Just userMsg
, projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = removeSrcPkgDefaultFlags gpds flags
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
makeRelDir dir =
case stripDir currDir dir of
Nothing
| currDir == dir -> "."
| otherwise -> assert False $ toFilePath dir
Just rel -> toFilePath rel
makeRel = fmap toFilePath . makeRelativeToCurrentDir
pkgs = map toPkg $ Map.elems (fmap (parent . fst) rbundle)
toPkg dir = PackageEntry
{ peValidWanted = Nothing
, peExtraDepMaybe = Nothing
, peLocation = PLFilePath $ makeRelDir dir
, peSubdirs = []
}
indent t = T.unlines $ fmap (" " <>) (T.lines t)
$logInfo $ "Initialising configuration using resolver: " <> resolverName r
$logInfo $ "Total number of user packages considered: "
<> (T.pack $ show $ (Map.size bundle + length dupPkgs))
when (dupPkgs /= []) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ length dupPkgs)
<> " duplicate packages:"
rels <- mapM makeRel dupPkgs
$logWarn $ indent $ showItems rels
when (Map.size ignored > 0) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ Map.size ignored)
<> " packages due to dependency conflicts:"
rels <- mapM makeRel (Map.elems (fmap fst ignored))
$logWarn $ indent $ showItems $ rels
when (Map.size extraDeps > 0) $ do
$logWarn $ "Warning! " <> (T.pack $ show $ Map.size extraDeps)
<> " external dependencies were added."
$logInfo $
(if exists then "Overwriting existing configuration file: "
else "Writing configuration to file: ")
<> T.pack reldest
liftIO $ L.writeFile (toFilePath dest)
$ B.toLazyByteString
$ renderStackYaml p
(Map.elems $ fmap (makeRelDir . parent . fst) ignored)
(map (makeRelDir . parent) dupPkgs)
$logInfo "All done."
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder
renderStackYaml p ignoredPackages dupPackages =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString "# This file was automatically generated by stack init\n" <>
B.byteString "# For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration.html\n\n" <>
F.foldMap (goComment o) comments <>
goOthers (o `HM.difference` HM.fromList comments) <>
B.byteString
"# Control whether we use the GHC we find on the path\n\
\# system-ghc: true\n\n\
\# Require a specific version of stack, using version ranges\n\
\# require-stack-version: -any # Default\n\
\# require-stack-version: >= 1.0.0\n\n\
\# Override the architecture used by stack, especially useful on Windows\n\
\# arch: i386\n\
\# arch: x86_64\n\n\
\# Extra directories used by stack for building\n\
\# extra-include-dirs: [/path/to/dir]\n\
\# extra-lib-dirs: [/path/to/dir]\n\n\
\# Allow a newer minor version of GHC than the snapshot specifies\n\
\# compiler-check: newer-minor\n"
comments =
[ ("user-message", "A message to be displayed to the user. Used when autogenerated config ignored some packages or added extra deps.")
, ("resolver", "Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)")
, ("packages", "Local packages, usually specified by relative directory name")
, ("extra-deps", "Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)")
, ("flags", "Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "Extra package databases containing global packages")
]
commentedPackages =
let ignoredComment = "# The following packages have been ignored \
\due to incompatibility with the resolver compiler or \
\dependency conflicts with other packages"
dupComment = "# The following packages have been ignored due \
\to package name conflict with other packages"
in commentPackages ignoredComment ignoredPackages
<> commentPackages dupComment dupPackages
commentPackages comment pkgs
| pkgs /= [] =
B.byteString (BC.pack $ comment ++ "\n")
<> (B.byteString $ BC.pack $ concat
$ (map (\x -> "#- " ++ x ++ "\n") pkgs) ++ ["\n"])
| otherwise = ""
goComment o (name, comment) =
case HM.lookup name o of
Nothing -> assert (name == "user-message") mempty
Just v ->
B.byteString "# " <>
B.byteString comment <>
B.byteString "\n" <>
B.byteString (Yaml.encode $ Yaml.object [(name, v)]) <>
if (name == "packages") then commentedPackages else "" <>
B.byteString "\n"
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m Snapshots
getSnapshots' =
getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " http://docs.haskellstack.org/en/stable/yaml_configuration.html"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
error ""
-- | Get the default resolver value
getDefaultResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Maybe AbstractResolver
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getDefaultResolver stackYaml initOpts mresolver bundle =
maybe selectSnapResolver makeConcreteResolver mresolver
>>= getWorkingResolverPlan stackYaml initOpts bundle
where
-- TODO support selecting best across regular and custom snapshots
selectSnapResolver = do
let gpds = Map.elems (fmap snd bundle)
snaps <- getSnapshots' >>= getRecommendedSnapshots
(s, r) <- selectBestSnapshot gpds snaps
case r of
BuildPlanCheckFail {} | not (omitPackages initOpts)
-> throwM (NoMatchingSnapshot snaps)
_ -> return $ ResolverSnapshot s
getWorkingResolverPlan
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getWorkingResolverPlan stackYaml initOpts bundle resolver = do
$logInfo $ "Selected resolver: " <> resolverName resolver
go bundle
where
go info = do
eres <- checkBundleResolver stackYaml initOpts info resolver
-- if some packages failed try again using the rest
case eres of
Right (f, edeps)-> return (resolver, f, edeps, info)
Left ignored
| Map.null available -> do
$logWarn "*** Could not find a working plan for any of \
\the user packages.\nProceeding to create a \
\config anyway."
return (resolver, Map.empty, Map.empty, Map.empty)
| otherwise -> do
when ((Map.size available) == (Map.size info)) $
error "Bug: No packages to ignore"
if length ignored > 1 then do
$logWarn "*** Ignoring packages:"
$logWarn $ indent $ showItems ignored
else
$logWarn $ "*** Ignoring package: "
<> (T.pack $ packageNameString (head ignored))
go available
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
isAvailable k _ = not (k `elem` ignored)
available = Map.filterWithKey isAvailable info
checkBundleResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m (Either [PackageName] ( Map PackageName (Map FlagName Bool)
, Map PackageName Version))
checkBundleResolver stackYaml initOpts bundle resolver = do
result <- checkResolverSpec gpds Nothing resolver
case result of
BuildPlanCheckOk f -> return $ Right (f, Map.empty)
BuildPlanCheckPartial f _
| needSolver resolver initOpts -> do
$logWarn $ "*** Resolver " <> resolverName resolver
<> " will need external packages: "
$logWarn $ indent $ T.pack $ show result
solve f
| otherwise -> throwM $ ResolverPartial resolver (show result)
BuildPlanCheckFail _ e _
| (omitPackages initOpts) -> do
$logWarn $ "*** Resolver compiler mismatch: "
<> resolverName resolver
$logWarn $ indent $ T.pack $ show result
let failed = Map.unions (Map.elems (fmap deNeededBy e))
return $ Left (Map.keys failed)
| otherwise -> throwM $ ResolverMismatch resolver (show result)
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
gpds = Map.elems (fmap snd bundle)
solve flags = do
let cabalDirs = map parent (Map.elems (fmap fst bundle))
srcConstraints = mergeConstraints (gpdPackages gpds) flags
eresult <- solveResolverSpec stackYaml cabalDirs
(resolver, srcConstraints, Map.empty)
case eresult of
Right (src, ext) ->
return $ Right (fmap snd (Map.union src ext), fmap fst ext)
Left packages
| omitPackages initOpts, srcpkgs /= []-> do
pkg <- findOneIndependent srcpkgs flags
return $ Left [pkg]
| otherwise -> throwM (SolverGiveUp giveUpMsg)
where srcpkgs = intersect (Map.keys bundle) packages
-- among a list of packages find one on which none among the rest of the
-- packages depend. This package is a good candidate to be removed from
-- the list of packages when there is conflict in dependencies among this
-- set of packages.
findOneIndependent packages flags = do
platform <- asks (configPlatform . getConfig)
(compiler, _) <- getResolverConstraints stackYaml resolver
let getGpd pkg = snd (fromJust (Map.lookup pkg bundle))
getFlags pkg = fromJust (Map.lookup pkg flags)
deps pkg = gpdPackageDeps (getGpd pkg) compiler platform
(getFlags pkg)
allDeps = concat $ map (Map.keys . deps) packages
isIndependent pkg = not $ pkg `elem` allDeps
-- prefer to reject packages in deeper directories
path pkg = fst (fromJust (Map.lookup pkg bundle))
pathlen = length . FP.splitPath . toFilePath . path
maxPathlen = maximumBy (compare `on` pathlen)
return $ maxPathlen (filter isIndependent packages)
giveUpMsg = concat
[ " - Use '--omit-packages to exclude conflicting package(s).\n"
, " - Tweak the generated "
, toFilePath stackDotYaml <> " and then run 'stack solver':\n"
, " - Add any missing remote packages.\n"
, " - Add extra dependencies to guide solver.\n"
, " - Update external packages with 'stack update' and try again.\n"
]
needSolver _ (InitOpts {useSolver = True}) = True
needSolver (ResolverCompiler _) _ = True
needSolver _ _ = False
getRecommendedSnapshots :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Snapshots
-> m [SnapName]
getRecommendedSnapshots snapshots = do
-- in order - Latest LTS, Latest Nightly, all LTS most recent first
return $ nubOrd $ concat
[ map (uncurry LTS)
(take 1 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
, [Nightly $ snapshotsNightly snapshots]
, map (uncurry LTS)
(drop 1 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
]
data InitOpts = InitOpts
{ searchDirs :: ![T.Text]
-- ^ List of sub directories to search for .cabal files
, useSolver :: Bool
-- ^ Use solver to determine required external dependencies
, omitPackages :: Bool
-- ^ Exclude conflicting or incompatible user packages
, forceOverwrite :: Bool
-- ^ Overwrite existing stack.yaml
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
|
harendra-kumar/stack
|
src/Stack/Init.hs
|
Haskell
|
bsd-3-clause
| 20,472
|
{-# LANGUAGE OverloadedStrings #-}
module Dhall.Test.Schemas where
import Data.Text (Text)
import Dhall.Parser (Header (..))
import Prelude hiding (FilePath)
import Test.Tasty (TestTree)
import Turtle (FilePath)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified Dhall.Core as Core
import qualified Dhall.Parser as Parser
import qualified Dhall.Pretty as Pretty
import qualified Dhall.Schemas as Schemas
import qualified Dhall.Test.Util as Test.Util
import qualified Prettyprinter as Doc
import qualified Prettyprinter.Render.Text as Doc.Render.Text
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.HUnit as Tasty.HUnit
import qualified Turtle
schemasDirectory :: FilePath
schemasDirectory = "./tests/schemas"
getTests :: IO TestTree
getTests = do
schemasTests <- Test.Util.discover (Turtle.chars <* "A.dhall") schemaTest (Turtle.lstree schemasDirectory)
return (Tasty.testGroup "schemas tests" [ schemasTests ])
format :: Header -> Core.Expr Parser.Src Core.Import -> Text
format (Header header) expr =
let doc = Doc.pretty header
<> Pretty.prettyCharacterSet Pretty.Unicode expr
<> "\n"
docStream = Pretty.layout doc
in
Doc.Render.Text.renderStrict docStream
schemaTest :: Text -> TestTree
schemaTest prefix =
Tasty.HUnit.testCase (Text.unpack prefix) $ do
let inputFile = Text.unpack (prefix <> "A.dhall")
let outputFile = Text.unpack (prefix <> "B.dhall")
inputText <- Text.IO.readFile inputFile
(header, parsedInput) <- Core.throws (Parser.exprAndHeaderFromText mempty inputText)
parsedSchema <- Core.throws (Parser.exprFromText mempty (Test.Util.toDhallPath (prefix <> "Schema.dhall")))
actualExpression <- Schemas.rewriteWithSchemas parsedSchema parsedInput
let actualText = format header actualExpression
expectedText <- Text.IO.readFile outputFile
let message = "The rewritten expression did not match the expected output"
Tasty.HUnit.assertEqual message expectedText actualText
|
Gabriel439/Haskell-Dhall-Library
|
dhall/tests/Dhall/Test/Schemas.hs
|
Haskell
|
bsd-3-clause
| 2,250
|
{-|
Module : Grammar.Size
Description : Definition of the Size datatype.
Copyright : (c) Davide Mancusi, 2017
License : BSD3
Maintainer : arekfu@gmail.com
Stability : experimental
Portability : POSIX
This module exports the 'Size' data type.
-}
module Grammar.Size
( Size
, mean
) where
-- system imports
import Data.Foldable
-- | A type alias for sized random expansion
type Size = Double
mean :: (Foldable t, Fractional a) => t a -> a
mean xs = let meanAcc (tot, n) x = (tot+x, n+1)
(total, len) = foldl' meanAcc (0, 0::Int) xs
in total / fromIntegral len
|
arekfu/grammar-haskell
|
src/Grammar/Size.hs
|
Haskell
|
bsd-3-clause
| 603
|
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module Web.Slack.Types.Topic where
import Data.Aeson
import Data.Text (Text)
import Control.Applicative
import Control.Lens.TH
import Prelude
type Purpose = Topic
data Topic = Topic
{ _topicValue :: Text
, _topicCreator :: Text
, _topicLastSet :: Int
} deriving (Show)
makeLenses ''Topic
instance FromJSON Topic where
parseJSON = withObject "topic" (\o ->
Topic <$> o .: "value" <*> o .: "creator" <*> o .: "last_set")
|
madjar/slack-api
|
src/Web/Slack/Types/Topic.hs
|
Haskell
|
mit
| 540
|
module Server where
import Control.Exception (bracket, finally, handleJust, tryJust)
import Control.Monad (guard)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import GHC.IO.Exception (IOErrorType(ResourceVanished))
import Network (PortID(UnixSocket), Socket, accept, listenOn, sClose)
import System.Directory (removeFile)
import System.Exit (ExitCode(ExitSuccess))
import System.IO (Handle, hClose, hFlush, hGetLine, hPutStrLn)
import System.IO.Error (ioeGetErrorType, isAlreadyInUseError, isDoesNotExistError)
import CommandLoop (newCommandLoopState, Config, updateConfig, startCommandLoop)
import Types (ClientDirective(..), Command, CommandExtra(..), ServerDirective(..))
import Util (readMaybe)
createListenSocket :: FilePath -> IO Socket
createListenSocket socketPath = do
r <- tryJust (guard . isAlreadyInUseError) $ listenOn (UnixSocket socketPath)
case r of
Right socket -> return socket
Left _ -> do
removeFile socketPath
listenOn (UnixSocket socketPath)
startServer :: FilePath -> Maybe Socket -> CommandExtra -> IO ()
startServer socketPath mbSock cmdExtra = do
case mbSock of
Nothing -> bracket (createListenSocket socketPath) cleanup go
Just sock -> (go sock) `finally` (cleanup sock)
where
cleanup :: Socket -> IO ()
cleanup sock = do
sClose sock
removeSocketFile
go :: Socket -> IO ()
go sock = do
state <- newCommandLoopState
currentClient <- newIORef Nothing
configRef <- newIORef Nothing
config <- updateConfig Nothing cmdExtra
startCommandLoop state (clientSend currentClient) (getNextCommand currentClient sock configRef) config Nothing
removeSocketFile :: IO ()
removeSocketFile = do
-- Ignore possible error if socket file does not exist
_ <- tryJust (guard . isDoesNotExistError) $ removeFile socketPath
return ()
clientSend :: IORef (Maybe Handle) -> ClientDirective -> IO ()
clientSend currentClient clientDirective = do
mbH <- readIORef currentClient
case mbH of
Just h -> ignoreEPipe $ do
hPutStrLn h (show clientDirective)
hFlush h
Nothing -> return ()
where
-- EPIPE means that the client is no longer there.
ignoreEPipe = handleJust (guard . isEPipe) (const $ return ())
isEPipe = (==ResourceVanished) . ioeGetErrorType
getNextCommand :: IORef (Maybe Handle) -> Socket -> IORef (Maybe Config) -> IO (Maybe (Command, Config))
getNextCommand currentClient sock config = do
checkCurrent <- readIORef currentClient
case checkCurrent of
Just h -> hClose h
Nothing -> return ()
(h, _, _) <- accept sock
writeIORef currentClient (Just h)
msg <- hGetLine h -- TODO catch exception
let serverDirective = readMaybe msg
case serverDirective of
Nothing -> do
clientSend currentClient $ ClientUnexpectedError $
"The client sent an invalid message to the server: " ++ show msg
getNextCommand currentClient sock config
Just (SrvCommand cmd cmdExtra) -> do
curConfig <- readIORef config
config' <- updateConfig curConfig cmdExtra
writeIORef config (Just config')
return $ Just (cmd, config')
Just SrvStatus -> do
mapM_ (clientSend currentClient) $
[ ClientStdout "Server is running."
, ClientExit ExitSuccess
]
getNextCommand currentClient sock config
Just SrvExit -> do
mapM_ (clientSend currentClient) $
[ ClientStdout "Shutting down server."
, ClientExit ExitSuccess
]
-- Must close the handle here because we are exiting the loop so it
-- won't be closed in the code above
hClose h
return Nothing
|
pacak/hdevtools
|
src/Server.hs
|
Haskell
|
mit
| 3,921
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Form Handler | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/formhandler/src/main/javahelp/org/zaproxy/zap/extension/formhandler/resources/help_zh_CN/helpset_zh_CN.hs
|
Haskell
|
apache-2.0
| 974
|
module Main (main) where
import Network.Info
main = do
ns <- getNetworkInterfaces
mapM (putStrLn . showInterface) ns
showInterface :: NetworkInterface -> String
showInterface n = name n ++ "\n"
++ "IPv4 Address: " ++ show (ipv4 n) ++ "\n"
++ "IPv6 Address: " ++ show (ipv6 n) ++ "\n"
++ "MAC Address: " ++ show (mac n) ++ "\n"
|
olorin/network-info
|
test/src/Main.hs
|
Haskell
|
bsd-3-clause
| 397
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
#if __GLASGOW_HASKELL__ >= 706
{-# LANGUAGE PolyKinds #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Generics.Lens
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-- Note: @GHC.Generics@ exports a number of names that collide with @Control.Lens@.
--
-- You can use hiding or imports to mitigate this to an extent, and the following imports,
-- represent a fair compromise for user code:
--
-- > import Control.Lens hiding (Rep)
-- > import GHC.Generics hiding (from, to)
--
-- You can use 'generic' to replace 'GHC.Generics.from' and 'GHC.Generics.to' from @GHC.Generics@,
-- and probably won't be explicitly referencing 'Control.Lens.Representable.Rep' from @Control.Lens@
-- in code that uses generics.
--
-- This module provides compatibility with older GHC versions by using the
-- <http://hackage.haskell.org/package/generic-deriving generic-deriving>
-- package.
----------------------------------------------------------------------------
module GHC.Generics.Lens
(
generic
, generic1
, _V1
, _U1
, _Par1
, _Rec1
, _K1
, _M1
, _L1
, _R1
, _UAddr
, _UChar
, _UDouble
, _UFloat
, _UInt
, _UWord
) where
import Control.Lens
import GHC.Exts (Char(..), Double(..), Float(..),
Int(..), Ptr(..), Word(..))
import qualified GHC.Generics as Generic
import GHC.Generics hiding (from, to)
#if !(MIN_VERSION_base(4,9,0))
import Generics.Deriving.Base hiding (from, to)
#endif
-- $setup
-- >>> :set -XNoOverloadedStrings
-- | Convert from the data type to its representation (or back)
--
-- >>> "hello"^.generic.from generic :: String
-- "hello"
generic :: Generic a => Iso' a (Rep a b)
generic = iso Generic.from Generic.to
{-# INLINE generic #-}
-- | Convert from the data type to its representation (or back)
generic1 :: Generic1 f => Iso (f a) (f b) (Rep1 f a) (Rep1 f b)
generic1 = iso from1 to1
{-# INLINE generic1 #-}
_V1 :: Over p f (V1 s) (V1 t) a b
_V1 _ = absurd where
absurd !_a = undefined
{-# INLINE _V1 #-}
_U1 :: Iso (U1 p) (U1 q) () ()
_U1 = iso (const ()) (const U1)
{-# INLINE _U1 #-}
_Par1 :: Iso (Par1 p) (Par1 q) p q
_Par1 = iso unPar1 Par1
{-# INLINE _Par1 #-}
_Rec1 :: Iso (Rec1 f p) (Rec1 g q) (f p) (g q)
_Rec1 = iso unRec1 Rec1
{-# INLINE _Rec1 #-}
_K1 :: Iso (K1 i c p) (K1 j d q) c d
_K1 = iso unK1 K1
{-# INLINE _K1 #-}
_M1 :: Iso (M1 i c f p) (M1 j d g q) (f p) (g q)
_M1 = iso unM1 M1
{-# INLINE _M1 #-}
_L1 :: Prism' ((f :+: g) a) (f a)
_L1 = prism remitter reviewer
where
remitter = L1
reviewer (L1 l) = Right l
reviewer x = Left x
{-# INLINE _L1 #-}
-- | You can access fields of `data (f :*: g) p` by using it's `Field1` and `Field2` instances
_R1 :: Prism' ((f :+: g) a) (g a)
_R1 = prism remitter reviewer
where
remitter = R1
reviewer (R1 l) = Right l
reviewer x = Left x
{-# INLINE _R1 #-}
_UAddr :: Iso (UAddr p) (UAddr q) (Ptr c) (Ptr d)
_UAddr = iso remitter reviewer
where
remitter (UAddr a) = Ptr a
reviewer (Ptr a) = UAddr a
{-# INLINE _UAddr #-}
_UChar :: Iso (UChar p) (UChar q) Char Char
_UChar = iso remitter reviewer
where
remitter (UChar c) = C# c
reviewer (C# c) = UChar c
{-# INLINE _UChar #-}
_UDouble :: Iso (UDouble p) (UDouble q) Double Double
_UDouble = iso remitter reviewer
where
remitter (UDouble d) = D# d
reviewer (D# d) = UDouble d
{-# INLINE _UDouble #-}
_UFloat :: Iso (UFloat p) (UFloat q) Float Float
_UFloat = iso remitter reviewer
where
remitter (UFloat f) = F# f
reviewer (F# f) = UFloat f
{-# INLINE _UFloat #-}
_UInt :: Iso (UInt p) (UInt q) Int Int
_UInt = iso remitter reviewer
where
remitter (UInt i) = I# i
reviewer (I# i) = UInt i
{-# INLINE _UInt #-}
_UWord :: Iso (UWord p) (UWord q) Word Word
_UWord = iso remitter reviewer
where
remitter (UWord w) = W# w
reviewer (W# w) = UWord w
{-# INLINE _UWord #-}
|
ddssff/lens
|
src/GHC/Generics/Lens.hs
|
Haskell
|
bsd-3-clause
| 4,250
|
-- |
-- Module : React
-- Copyright : (C) 2014-15 Joel Burget
-- License : MIT
-- Maintainer : Joel Burget <joelburget@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-- Usage:
--
-- This tutorial assumes only a basic understanding of React, the DOM, and
-- browser events. I recomment at least skimming the [official React
-- tutorial](https://facebook.github.io/react/docs/tutorial.html).
--
-- Let's start with a basic example:
--
-- @
-- page_ :: ReactNode Void
-- page_ =
-- let cls = smartClass
-- { name = "page"
--
-- -- initially the input is empty
-- , initialState = ""
--
-- -- always transition to the input's new value
-- , transition = \(_, value) -> (value, Nothing)
--
-- , renderFn = \_ str -> div_ [ class_ "container" ] $ do
-- input_ [ value_ str, onChange (Just . value . target) ]
-- }
-- in classLeaf cls ()
--
-- main :: IO ()
-- main = do
-- Just doc <- currentDocument
-- Just elem <- documentGetElementById doc ("elem" :: JSString)
-- render page_ elem
-- @
--
-- In this example we defined a React class with 'Text' state, but taking only
-- @()@ as a prop. It's possible to use anything for props and state --
-- numbers, JSON, even React classes.
--
-- In the example the input always contains the state from the class, and the
-- state is updated on every input change event -- effectively, every
-- keystroke.
module React
(
-- * Classes
ReactClass()
, ClassConfig(..)
, ClassCtx
, smartClass
, dumbClass
-- * Rendering
, render
, debugRender
-- * React Nodes
, ReactNode
-- * Events
, module React.Events
-- * Local
, module React.Local
-- XXX(joel)
, AttrOrHandler()
-- TODO - create React.Internal module for these?
-- * Attributes
, module React.Attrs
-- * Creating Elements
, module React.Elements
-- * JS Interop
, ImportedClass
-- * PropTypes
, PropRequired(IsRequired, IsntRequired)
, PropType(..)
, PropTypable
) where
-- TODO
-- restricted monads
-- store elem in monad
-- escaping / dangerouslySetInnerHTML
import React.Class
import React.Imports
import React.Local
import React.Render
import React.Types
import React.Attrs
import React.Elements
import React.Events
import React.Rebindable
import React.PropTypes
|
joelburget/react-haskell
|
src/React.hs
|
Haskell
|
mit
| 2,455
|
module ListImplicitUsed where
import Data.List
foo = sort
|
serokell/importify
|
test/test-data/base@basic/30-ListImplicitUsed.hs
|
Haskell
|
mit
| 70
|
{-
******************************************************************************
* H M T C *
* *
* Module: Name *
* Purpose: Representation of names *
* Authors: Henrik Nilsson *
* *
* Copyright (c) Henrik Nilsson, 2006 - 2012 *
* *
******************************************************************************
-}
-- | Representation of names. Types, variables, procedures, operators ...
module Name where
type Name = String
|
jbracker/supermonad-plugin
|
examples/monad/hmtc/original/Name.hs
|
Haskell
|
bsd-3-clause
| 910
|
-- | Data.Graph is sorely lacking in several ways, This just tries to fill in
-- some holes and provide a more convinient interface
{-# LANGUAGE RecursiveDo #-}
module Util.Graph(
Graph(),
Util.Graph.components,
Util.Graph.dff,
Util.Graph.reachable,
Util.Graph.scc,
Util.Graph.topSort,
cyclicNodes,
findLoopBreakers,
fromGraph,
fromScc,
getBindGroups,
groupOverlapping,
mapGraph,
newGraph',
newGraph,
newGraphReachable,
reachableFrom,
restitchGraph,
sccForest,
easySCC,
sccGroups,
toDag,
transitiveClosure,
transitiveReduction
) where
import Control.Monad
import Control.Monad.ST
import Data.Array.IArray
import Data.Array.ST hiding(unsafeFreeze)
import Data.Array.Unsafe (unsafeFreeze)
import Data.Graph hiding(Graph)
import Data.Maybe
import Data.STRef
import GenUtil
import Data.List(sort,sortBy,group,delete)
import Util.UnionFindST
import qualified Data.Graph as G
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Traversable as A
data Graph n = Graph G.Graph (Table n)
instance Show n => Show (Graph n) where
showsPrec n g = showsPrec n (Util.Graph.scc g)
-- simple scc interface
easySCC :: Ord name => [node] -> (node -> name) -> (node -> [name]) -> [[node]]
easySCC ns fn fd = map f $ stronglyConnComp [ (n, fn n, fd n) | n <- ns] where
f (AcyclicSCC x) = [x]
f (CyclicSCC xs) = xs
fromGraph :: Graph n -> [(n,[n])]
fromGraph (Graph g lv) = [ (lv!v,map (lv!) vs) | (v,vs) <- assocs g ]
newGraph :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Graph n)
newGraph ns a b = snd $ newGraph' ns a b
newGraphReachable :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> ([k] -> [n],Graph n)
newGraphReachable ns fn fd = (rable,ng) where
(vmap,ng) = newGraph' ns fn fd
rable ks = Util.Graph.reachable ng [ v | Just v <- map (flip Map.lookup vmap) ks ]
reachableFrom :: Ord k => (n -> k) -> (n -> [k]) -> [n] -> [k] -> [n]
reachableFrom fn fd ns = fst $ newGraphReachable ns fn fd
-- | Build a graph from a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node should have edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; they are ignored.
newGraph' :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Map.Map k Vertex,Graph n)
newGraph' ns fn fd = (kmap,Graph graph nr) where
nr = listArray bounds0 ns
max_v = length ns - 1
bounds0 = (0,max_v) :: (Vertex, Vertex)
kmap = Map.fromList [ (fn n,i) | (i,n) <- zip [0 ..] ns ]
graph = listArray bounds0 [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- ns]
fromScc (Left n) = [n]
fromScc (Right n) = n
-- | determine a set of loopbreakers subject to a fitness function
-- loopbreakers have a minimum of their incoming edges ignored.
findLoopBreakers
:: (n -> Int) -- ^ fitness function, greater numbers mean more likely to be a loopbreaker
-> (n -> Bool) -- ^ whether a node is suitable at all for a choice as loopbreaker
-> Graph n -- ^ the graph
-> ([n],[n]) -- ^ (loop breakers,dependency ordered nodes after loopbreaking)
findLoopBreakers func ex (Graph g ln) = ans where
scc = G.scc g
ans = f g scc [] [] where
f g (Node v []:sccs) fs lb
| v `elem` g ! v = let ng = (fmap (Data.List.delete v) g) in f ng (G.scc ng) [] (v:lb)
| otherwise = f g sccs (v:fs) lb
f g (n:_) fs lb = f ng (G.scc ng) [] (mv:lb) where
mv = case sortBy (\ a b -> compare (snd b) (snd a)) [ (v,func (ln!v)) | v <- ns, ex (ln!v) ] of
((mv,_):_) -> mv
[] -> error "findLoopBreakers: no valid loopbreakers"
ns = dec n []
ng = fmap (Data.List.delete mv) g
f _ [] xs lb = (map ((ln!) . head) (group $ sort lb),reverse $ map (ln!) xs)
dec (Node v ts) vs = v:foldr dec vs ts
reachable :: Graph n -> [Vertex] -> [n]
reachable (Graph g ln) vs = map (ln!) $ snub $ concatMap (G.reachable g) vs
sccGroups :: Graph n -> [[n]]
sccGroups g = map fromScc (Util.Graph.scc g)
scc :: Graph n -> [Either n [n]]
scc (Graph g ln) = map decode forest where
forest = G.scc g
decode (Node v [])
| v `elem` g ! v = Right [ln!v]
| otherwise = Left (ln!v)
decode other = Right (dec other [])
dec (Node v ts) vs = ln!v:foldr dec vs ts
sccForest :: Graph n -> Forest n
sccForest (Graph g ln) = map (fmap (ln!)) forest where
forest = G.scc g
dff :: Graph n -> Forest n
dff (Graph g ln) = map (fmap (ln!)) forest where
forest = G.dff g
components :: Graph n -> [[n]]
components (Graph g ln) = map decode forest where
forest = G.components g
decode n = dec n []
dec (Node v ts) vs = ln!v:foldr dec vs ts
topSort :: Graph n -> [n]
topSort (Graph g ln) = map (ln!) $ G.topSort g
cyclicNodes :: Graph n -> [n]
cyclicNodes g = concat [ xs | Right xs <- Util.Graph.scc g]
toDag :: Graph n -> Graph [n]
toDag (Graph g lv) = Graph g' ns' where
ns' = listArray (0,max_v) [ map (lv!) ns | ns <- nss ]
g' = listArray (0,max_v) [ snub [ v | n <- ns, v <- g!n ] | ns <- nss ]
max_v = length nss - 1
nss = map (flip f []) (G.scc g) where
f (Node v ts) rs = v:foldr f rs ts
type AdjacencyMatrix s = STArray s (Vertex,Vertex) Bool
type IAdjacencyMatrix = Array (Vertex,Vertex) Bool
transitiveClosureAM :: AdjacencyMatrix s -> ST s ()
transitiveClosureAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
dij <- readArray arr (i,j)
dik <- readArray arr (i,k)
dkj <- readArray arr (k,j)
writeArray arr (i,j) (dij || (dik && dkj))
transitiveReductionAM :: AdjacencyMatrix s -> ST s ()
transitiveReductionAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
transitiveClosureAM arr
(farr :: IAdjacencyMatrix) <- freeze arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
if farr!(k,i) && farr!(i,j) then
writeArray arr (k,j) False
else return ()
toAdjacencyMatrix :: G.Graph -> ST s (AdjacencyMatrix s)
toAdjacencyMatrix g = do
let (0,max_v) = bounds g
arr <- newArray ((0,0),(max_v,max_v)) False :: ST s (STArray s (Vertex,Vertex) Bool)
sequence_ [ writeArray arr (v,u) True | (v,vs) <- assocs g, u <- vs ]
return arr
fromAdjacencyMatrix :: AdjacencyMatrix s -> ST s G.Graph
fromAdjacencyMatrix arr = do
bnds@(_,(max_v,_)) <- getBounds arr
rs <- getAssocs arr
let rs' = [ x | (x,True) <- rs ]
return (listArray (0,max_v) [ [ v | (n',v) <- rs', n == n' ] | n <- [ 0 .. max_v] ])
transitiveClosure :: Graph n -> Graph n
transitiveClosure (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveClosureAM a
fromAdjacencyMatrix a
transitiveReduction :: Graph n -> Graph n
transitiveReduction (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveReductionAM a
fromAdjacencyMatrix a
instance Functor Graph where
fmap f (Graph g n) = Graph g (fmap f n)
--mapT :: (Vertex -> a -> b) -> Table a -> Table b
--mapT f t = listArray (bounds t) [ (f v (t!v)) | v <- indices t ]
restitchGraph :: Ord k => (n -> k) -> (n -> [k]) -> Graph n -> Graph n
restitchGraph fn fd (Graph g nr) = Graph g' nr where
kmap = Map.fromList [ (fn n,i) | (i,n) <- assocs nr ]
g' = listArray (bounds g) [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- elems nr]
mapGraph :: forall a b . (a -> [b] -> b) -> Graph a -> Graph b
mapGraph f (Graph gr nr) = runST $ do
mnr <- thaw nr :: ST s (STArray s Vertex a)
mnr <- mapArray Left mnr
let g i = readArray mnr i >>= \v -> case v of
Right m -> return m
Left l -> mdo
writeArray mnr i (Right r)
rs <- mapM g (gr!i)
let r = f l rs
return r
mapM_ g (range $ bounds nr)
mnr <- mapArray fromRight mnr
mnr <- unsafeFreeze mnr
return (Graph gr mnr)
-- this uses a very efficient union-find algorithm.
groupOverlapping :: Ord b => (a -> [b]) -> [a] -> (Map.Map b Int,[(Int,[a])])
groupOverlapping fn xs = runUF $ do
es <- forM xs $ \x -> new_ x
mref <- liftST (newSTRef Map.empty)
forM_ es $ \e -> do
let bs = fn $ fromElement e
cmap <- liftST $ readSTRef mref
sequence_ [ union_ x e | b <- bs, Just x <- [Map.lookup b cmap]]
liftST $ modifySTRef mref (Map.union (Map.fromList [ (x,e) | x <- bs ]))
cmap <- liftST $ readSTRef mref
cmap' <- A.mapM getUnique cmap
es <- (Set.toList . Set.fromList) `liftM` mapM find es
es' <- forM es $ \e -> do
u <- getUnique e
es <- getElements e
return (u,map fromElement es)
return (cmap',es')
-- Given a list of nodes, a function to convert nodes to a list of its names,
-- function to convert nodes to a list of names on which the node is
-- dependendant, bindgroups will return a list of bind groups generater from the
-- list of nodes given. nodes with matching keys are placed in the same binding
-- groups.
getBindGroups :: Ord name =>
[node] -> -- List of nodes
(node -> [name]) -> -- Function to convert nodes to unique names
(node -> [name]) -> -- Function to return dependencies of this node
[Either [node] [[node]]] -- binding groups, collecting nodes with overlapping names together
getBindGroups ns fn fd = ans where
(mp,ns') = (groupOverlapping fn ns)
ans = map f $ stronglyConnComp
[(xs,Just n,map (`Map.lookup` mp) (concatMap fd xs)) | (n,xs) <- ns']
f (AcyclicSCC x) = Left x
f (CyclicSCC xs) = Right xs
|
hvr/jhc
|
src/Util/Graph.hs
|
Haskell
|
mit
| 9,869
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Directory List v2.3 LC</title>
<maps>
<homeID>directorylistv2_3_lc</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/directorylistv2_3_lc/src/main/javahelp/help_sr_CS/helpset_sr_CS.hs
|
Haskell
|
apache-2.0
| 984
|
{-|
This modules defines the 'QueryArr' arrow, which is an arrow that represents
selecting data from a database, and composing multiple queries together.
-}
module Opaleye.SQLite.QueryArr (QueryArr, Query) where
import Opaleye.SQLite.Internal.QueryArr (QueryArr, Query)
|
bergmark/haskell-opaleye
|
opaleye-sqlite/src/Opaleye/SQLite/QueryArr.hs
|
Haskell
|
bsd-3-clause
| 283
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Read.Lex
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : non-portable (uses Text.ParserCombinators.ReadP)
--
-- The cut-down Haskell lexer, used by Text.Read
--
-----------------------------------------------------------------------------
module Text.Read.Lex
-- lexing types
( Lexeme(..) -- :: *; Show, Eq
, Lexeme'(..)
, numberToInteger, numberToRangedRational
-- lexer
, lex -- :: ReadP Lexeme Skips leading spaces
, lex' -- :: ReadP Lexeme Skips leading spaces
, hsLex -- :: ReadP String
, lexChar -- :: ReadP Char Reads just one char, with H98 escapes
, readIntP -- :: Num a => a -> (Char -> Bool) -> (Char -> Int) -> ReadP a
, readOctP -- :: Num a => ReadP a
, readDecP -- :: Num a => ReadP a
, readHexP -- :: Num a => ReadP a
)
where
import Text.ParserCombinators.ReadP
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.Num( Num(..), Integer )
import GHC.Show( Show(..) )
#ifndef __HADDOCK__
import {-# SOURCE #-} GHC.Unicode ( isSpace, isAlpha, isAlphaNum )
#endif
import GHC.Real( Integral, Rational, (%), fromIntegral,
toInteger, (^), infinity, notANumber )
import GHC.List
import GHC.Enum( maxBound )
#else
import Prelude hiding ( lex )
import Data.Char( chr, ord, isSpace, isAlpha, isAlphaNum )
import Data.Ratio( Ratio, (%) )
#endif
#ifdef __HUGS__
import Hugs.Prelude( Ratio(..) )
#endif
import Data.Maybe
import Control.Monad
-- -----------------------------------------------------------------------------
-- Lexing types
-- ^ Haskell lexemes.
data Lexeme
= Char Char -- ^ Character literal
| String String -- ^ String literal, with escapes interpreted
| Punc String -- ^ Punctuation or reserved symbol, e.g. @(@, @::@
| Ident String -- ^ Haskell identifier, e.g. @foo@, @Baz@
| Symbol String -- ^ Haskell symbol, e.g. @>>@, @:%@
| Int Integer -- ^ Integer literal
| Rat Rational -- ^ Floating point literal
| EOF
deriving (Eq, Show)
data Lexeme' = Ident' String
| Punc' String
| Symbol' String
| Number Number
deriving (Eq, Show)
data Number = MkNumber Int -- Base
Digits -- Integral part
| MkDecimal Digits -- Integral part
(Maybe Digits) -- Fractional part
(Maybe Integer) -- Exponent
deriving (Eq, Show)
numberToInteger :: Number -> Maybe Integer
numberToInteger (MkNumber base iPart) = Just (val (fromIntegral base) 0 iPart)
numberToInteger (MkDecimal iPart Nothing Nothing) = Just (val 10 0 iPart)
numberToInteger _ = Nothing
numberToRangedRational :: (Int, Int) -> Number
-> Maybe Rational -- Nothing = Inf
numberToRangedRational (neg, pos) n@(MkDecimal iPart mFPart (Just exp))
= let mFirstDigit = case dropWhile (0 ==) iPart of
iPart'@(_ : _) -> Just (length iPart')
[] -> case mFPart of
Nothing -> Nothing
Just fPart ->
case span (0 ==) fPart of
(_, []) -> Nothing
(zeroes, _) ->
Just (negate (length zeroes))
in case mFirstDigit of
Nothing -> Just 0
Just firstDigit ->
let firstDigit' = firstDigit + fromInteger exp
in if firstDigit' > (pos + 3)
then Nothing
else if firstDigit' < (neg - 3)
then Just 0
else Just (numberToRational n)
numberToRangedRational _ n = Just (numberToRational n)
numberToRational :: Number -> Rational
numberToRational (MkNumber base iPart) = val (fromIntegral base) 0 iPart % 1
numberToRational (MkDecimal iPart mFPart mExp)
= let i = val 10 0 iPart
in case (mFPart, mExp) of
(Nothing, Nothing) -> i % 1
(Nothing, Just exp)
| exp >= 0 -> (i * (10 ^ exp)) % 1
| otherwise -> i % (10 ^ (- exp))
(Just fPart, Nothing) -> fracExp 0 i fPart
(Just fPart, Just exp) -> fracExp exp i fPart
-- -----------------------------------------------------------------------------
-- Lexing
lex :: ReadP Lexeme
lex = skipSpaces >> lexToken
lex' :: ReadP Lexeme'
lex' = skipSpaces >> lexToken'
hsLex :: ReadP String
-- ^ Haskell lexer: returns the lexed string, rather than the lexeme
hsLex = do skipSpaces
(s,_) <- gather lexToken
return s
lexToken :: ReadP Lexeme
lexToken = lexEOF +++
lexLitChar +++
lexString +++
lexPunc +++
lexSymbol +++
lexId +++
lexNumber
lexToken' :: ReadP Lexeme'
lexToken' = lexSymbol' +++
lexId' +++
fmap Number lexNumber'
-- ----------------------------------------------------------------------
-- End of file
lexEOF :: ReadP Lexeme
lexEOF = do s <- look
guard (null s)
return EOF
-- ---------------------------------------------------------------------------
-- Single character lexemes
lexPunc :: ReadP Lexeme
lexPunc =
do c <- satisfy isPuncChar
return (Punc [c])
where
isPuncChar c = c `elem` ",;()[]{}`"
-- ----------------------------------------------------------------------
-- Symbols
lexSymbol :: ReadP Lexeme
lexSymbol =
do s <- munch1 isSymbolChar
if s `elem` reserved_ops then
return (Punc s) -- Reserved-ops count as punctuation
else
return (Symbol s)
where
isSymbolChar c = c `elem` "!@#$%&*+./<=>?\\^|:-~"
reserved_ops = ["..", "::", "=", "\\", "|", "<-", "->", "@", "~", "=>"]
lexSymbol' :: ReadP Lexeme'
lexSymbol' =
do s <- munch1 isSymbolChar
if s `elem` reserved_ops then
return (Punc' s) -- Reserved-ops count as punctuation
else
return (Symbol' s)
where
isSymbolChar c = c `elem` "!@#$%&*+./<=>?\\^|:-~"
reserved_ops = ["..", "::", "=", "\\", "|", "<-", "->", "@", "~", "=>"]
-- ----------------------------------------------------------------------
-- identifiers
lexId :: ReadP Lexeme
lexId = lex_nan <++ lex_id
where
-- NaN and Infinity look like identifiers, so
-- we parse them first.
lex_nan = (string "NaN" >> return (Rat notANumber)) +++
(string "Infinity" >> return (Rat infinity))
lex_id = do c <- satisfy isIdsChar
s <- munch isIdfChar
return (Ident (c:s))
-- Identifiers can start with a '_'
isIdsChar c = isAlpha c || c == '_'
isIdfChar c = isAlphaNum c || c `elem` "_'"
lexId' :: ReadP Lexeme'
lexId' = do c <- satisfy isIdsChar
s <- munch isIdfChar
return (Ident' (c:s))
where
-- Identifiers can start with a '_'
isIdsChar c = isAlpha c || c == '_'
isIdfChar c = isAlphaNum c || c `elem` "_'"
#ifndef __GLASGOW_HASKELL__
infinity, notANumber :: Rational
infinity = 1 :% 0
notANumber = 0 :% 0
#endif
-- ---------------------------------------------------------------------------
-- Lexing character literals
lexLitChar :: ReadP Lexeme
lexLitChar =
do _ <- char '\''
(c,esc) <- lexCharE
guard (esc || c /= '\'') -- Eliminate '' possibility
_ <- char '\''
return (Char c)
lexChar :: ReadP Char
lexChar = do { (c,_) <- lexCharE; return c }
lexCharE :: ReadP (Char, Bool) -- "escaped or not"?
lexCharE =
do c1 <- get
if c1 == '\\'
then do c2 <- lexEsc; return (c2, True)
else do return (c1, False)
where
lexEsc =
lexEscChar
+++ lexNumeric
+++ lexCntrlChar
+++ lexAscii
lexEscChar =
do c <- get
case c of
'a' -> return '\a'
'b' -> return '\b'
'f' -> return '\f'
'n' -> return '\n'
'r' -> return '\r'
't' -> return '\t'
'v' -> return '\v'
'\\' -> return '\\'
'\"' -> return '\"'
'\'' -> return '\''
_ -> pfail
lexNumeric =
do base <- lexBaseChar <++ return 10
n <- lexInteger base
guard (n <= toInteger (ord maxBound))
return (chr (fromInteger n))
lexCntrlChar =
do _ <- char '^'
c <- get
case c of
'@' -> return '\^@'
'A' -> return '\^A'
'B' -> return '\^B'
'C' -> return '\^C'
'D' -> return '\^D'
'E' -> return '\^E'
'F' -> return '\^F'
'G' -> return '\^G'
'H' -> return '\^H'
'I' -> return '\^I'
'J' -> return '\^J'
'K' -> return '\^K'
'L' -> return '\^L'
'M' -> return '\^M'
'N' -> return '\^N'
'O' -> return '\^O'
'P' -> return '\^P'
'Q' -> return '\^Q'
'R' -> return '\^R'
'S' -> return '\^S'
'T' -> return '\^T'
'U' -> return '\^U'
'V' -> return '\^V'
'W' -> return '\^W'
'X' -> return '\^X'
'Y' -> return '\^Y'
'Z' -> return '\^Z'
'[' -> return '\^['
'\\' -> return '\^\'
']' -> return '\^]'
'^' -> return '\^^'
'_' -> return '\^_'
_ -> pfail
lexAscii =
do choice
[ (string "SOH" >> return '\SOH') <++
(string "SO" >> return '\SO')
-- \SO and \SOH need maximal-munch treatment
-- See the Haskell report Sect 2.6
, string "NUL" >> return '\NUL'
, string "STX" >> return '\STX'
, string "ETX" >> return '\ETX'
, string "EOT" >> return '\EOT'
, string "ENQ" >> return '\ENQ'
, string "ACK" >> return '\ACK'
, string "BEL" >> return '\BEL'
, string "BS" >> return '\BS'
, string "HT" >> return '\HT'
, string "LF" >> return '\LF'
, string "VT" >> return '\VT'
, string "FF" >> return '\FF'
, string "CR" >> return '\CR'
, string "SI" >> return '\SI'
, string "DLE" >> return '\DLE'
, string "DC1" >> return '\DC1'
, string "DC2" >> return '\DC2'
, string "DC3" >> return '\DC3'
, string "DC4" >> return '\DC4'
, string "NAK" >> return '\NAK'
, string "SYN" >> return '\SYN'
, string "ETB" >> return '\ETB'
, string "CAN" >> return '\CAN'
, string "EM" >> return '\EM'
, string "SUB" >> return '\SUB'
, string "ESC" >> return '\ESC'
, string "FS" >> return '\FS'
, string "GS" >> return '\GS'
, string "RS" >> return '\RS'
, string "US" >> return '\US'
, string "SP" >> return '\SP'
, string "DEL" >> return '\DEL'
]
-- ---------------------------------------------------------------------------
-- string literal
lexString :: ReadP Lexeme
lexString =
do _ <- char '"'
body id
where
body f =
do (c,esc) <- lexStrItem
if c /= '"' || esc
then body (f.(c:))
else let s = f "" in
return (String s)
lexStrItem = (lexEmpty >> lexStrItem)
+++ lexCharE
lexEmpty =
do _ <- char '\\'
c <- get
case c of
'&' -> do return ()
_ | isSpace c -> do skipSpaces; _ <- char '\\'; return ()
_ -> do pfail
-- ---------------------------------------------------------------------------
-- Lexing numbers
type Base = Int
type Digits = [Int]
lexNumber :: ReadP Lexeme
lexNumber
= lexHexOct <++ -- First try for hex or octal 0x, 0o etc
-- If that fails, try for a decimal number
lexDecNumber -- Start with ordinary digits
lexNumber' :: ReadP Number
lexNumber'
= lexHexOct' <++ -- First try for hex or octal 0x, 0o etc
-- If that fails, try for a decimal number
lexDecNumber'
lexHexOct :: ReadP Lexeme
lexHexOct
= do _ <- char '0'
base <- lexBaseChar
digits <- lexDigits base
return (Int (val (fromIntegral base) 0 digits))
lexHexOct' :: ReadP Number
lexHexOct'
= do _ <- char '0'
base <- lexBaseChar
digits <- lexDigits base
return (MkNumber base digits)
lexBaseChar :: ReadP Int
-- Lex a single character indicating the base; fail if not there
lexBaseChar = do { c <- get;
case c of
'o' -> return 8
'O' -> return 8
'x' -> return 16
'X' -> return 16
_ -> pfail }
lexDecNumber :: ReadP Lexeme
lexDecNumber =
do xs <- lexDigits 10
mFrac <- lexFrac <++ return Nothing
mExp <- lexExp <++ return Nothing
return (value xs mFrac mExp)
where
value xs mFrac mExp = valueFracExp (val 10 0 xs) mFrac mExp
valueFracExp :: Integer -> Maybe Digits -> Maybe Integer
-> Lexeme
valueFracExp a Nothing Nothing
= Int a -- 43
valueFracExp a Nothing (Just exp)
| exp >= 0 = Int (a * (10 ^ exp)) -- 43e7
| otherwise = Rat (a % (10 ^ (-exp))) -- 43e-7
valueFracExp a (Just fs) mExp -- 4.3[e2]
= Rat (fracExp (fromMaybe 0 mExp) a fs)
-- Be a bit more efficient in calculating the Rational.
-- Instead of calculating the fractional part alone, then
-- adding the integral part and finally multiplying with
-- 10 ^ exp if an exponent was given, do it all at once.
lexDecNumber' :: ReadP Number
lexDecNumber' =
do xs <- lexDigits 10
mFrac <- lexFrac <++ return Nothing
mExp <- lexExp <++ return Nothing
return (MkDecimal xs mFrac mExp)
lexFrac :: ReadP (Maybe Digits)
-- Read the fractional part; fail if it doesn't
-- start ".d" where d is a digit
lexFrac = do _ <- char '.'
fraction <- lexDigits 10
return (Just fraction)
lexExp :: ReadP (Maybe Integer)
lexExp = do _ <- char 'e' +++ char 'E'
exp <- signedExp +++ lexInteger 10
return (Just exp)
where
signedExp
= do c <- char '-' +++ char '+'
n <- lexInteger 10
return (if c == '-' then -n else n)
lexDigits :: Int -> ReadP Digits
-- Lex a non-empty sequence of digits in specified base
lexDigits base =
do s <- look
xs <- scan s id
guard (not (null xs))
return xs
where
scan (c:cs) f = case valDig base c of
Just n -> do _ <- get; scan cs (f.(n:))
Nothing -> do return (f [])
scan [] f = do return (f [])
lexInteger :: Base -> ReadP Integer
lexInteger base =
do xs <- lexDigits base
return (val (fromIntegral base) 0 xs)
val :: Num a => a -> a -> Digits -> a
-- val base y [d1,..,dn] = y ++ [d1,..,dn], as it were
val _ y [] = y
val base y (x:xs) = y' `seq` val base y' xs
where
y' = y * base + fromIntegral x
-- Calculate a Rational from the exponent [of 10 to multiply with],
-- the integral part of the mantissa and the digits of the fractional
-- part. Leaving the calculation of the power of 10 until the end,
-- when we know the effective exponent, saves multiplications.
-- More importantly, this way we need at most one gcd instead of three.
--
-- frac was never used with anything but Integer and base 10, so
-- those are hardcoded now (trivial to change if necessary).
fracExp :: Integer -> Integer -> Digits -> Rational
fracExp exp mant []
| exp < 0 = mant % (10 ^ (-exp))
| otherwise = fromInteger (mant * 10 ^ exp)
fracExp exp mant (d:ds) = exp' `seq` mant' `seq` fracExp exp' mant' ds
where
exp' = exp - 1
mant' = mant * 10 + fromIntegral d
valDig :: (Eq a, Num a) => a -> Char -> Maybe Int
valDig 8 c
| '0' <= c && c <= '7' = Just (ord c - ord '0')
| otherwise = Nothing
valDig 10 c = valDecDig c
valDig 16 c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| 'a' <= c && c <= 'f' = Just (ord c - ord 'a' + 10)
| 'A' <= c && c <= 'F' = Just (ord c - ord 'A' + 10)
| otherwise = Nothing
valDig _ _ = error "valDig: Bad base"
valDecDig :: Char -> Maybe Int
valDecDig c
| '0' <= c && c <= '9' = Just (ord c - ord '0')
| otherwise = Nothing
-- ----------------------------------------------------------------------
-- other numeric lexing functions
readIntP :: Num a => a -> (Char -> Bool) -> (Char -> Int) -> ReadP a
readIntP base isDigit valDigit =
do s <- munch1 isDigit
return (val base 0 (map valDigit s))
readIntP' :: (Eq a, Num a) => a -> ReadP a
readIntP' base = readIntP base isDigit valDigit
where
isDigit c = maybe False (const True) (valDig base c)
valDigit c = maybe 0 id (valDig base c)
readOctP, readDecP, readHexP :: (Eq a, Num a) => ReadP a
readOctP = readIntP' 8
readDecP = readIntP' 10
readHexP = readIntP' 16
|
mightymoose/liquidhaskell
|
benchmarks/base-4.5.1.0/Text/Read/Lex.hs
|
Haskell
|
bsd-3-clause
| 17,364
|
{-# LANGUAGE CPP #-}
module Examples.Hello where
import Options.Applicative
#if __GLASGOW_HASKELL__ <= 702
import Data.Monoid
(<>) :: Monoid a => a -> a -> a
(<>) = mappend
#endif
data Sample = Sample
{ hello :: String
, quiet :: Bool }
deriving Show
sample :: Parser Sample
sample = Sample
<$> strOption
( long "hello"
<> metavar "TARGET"
<> help "Target for the greeting" )
<*> switch
( long "quiet"
<> help "Whether to be quiet" )
greet :: Sample -> IO ()
greet (Sample h False) = putStrLn $ "Hello, " ++ h
greet _ = return ()
main :: IO ()
main = execParser opts >>= greet
opts :: ParserInfo Sample
opts = info (sample <**> helper)
( fullDesc
<> progDesc "Print a greeting for TARGET"
<> header "hello - a test for optparse-applicative" )
|
begriffs/optparse-applicative
|
tests/Examples/Hello.hs
|
Haskell
|
bsd-3-clause
| 812
|
module PatternMatch7 where
g = (\(y:ys) -> (case y of
p | p == 45 -> 12
_ -> 52))
f x = (\(p:ps) -> (case p of
l | x == 45 -> 12
_ -> 52))
|
kmate/HaRe
|
old/testing/foldDef/PatternMatch7_TokOut.hs
|
Haskell
|
bsd-3-clause
| 205
|
module Main where
main :: IO ()
main = putStrLn "This is foo from has-exe-foo-too"
|
AndreasPK/stack
|
test/integration/tests/1198-multiple-exes-with-same-name/files/has-exe-foo-too/app/Main.hs
|
Haskell
|
bsd-3-clause
| 84
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
#if !MIN_VERSION_base(4,8,0)
{-# LANGUAGE DeriveDataTypeable #-}
import Control.Applicative ((<$>), (<*>), (*>))
#endif
import Control.DeepSeq
import Criterion.Main
import qualified Data.ByteString as BS
import Data.Int
import qualified Data.IntMap.Strict as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.Store
import Data.Typeable
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import Data.Word
import GHC.Generics
-- TODO: add packer
#if COMPARISON_BENCH
import qualified Data.Binary as Binary
import qualified Data.Serialize as Cereal
import qualified Data.ByteString.Lazy as BL
import Data.Vector.Serialize ()
#endif
data SomeData = SomeData !Int64 !Word8 !Double
deriving (Eq, Show, Generic, Typeable)
instance NFData SomeData where
rnf x = x `seq` ()
instance Store SomeData
#if COMPARISON_BENCH
instance Cereal.Serialize SomeData
instance Binary.Binary SomeData
#endif
main :: IO ()
main = do
#if SMALL_BENCH
let is = 0::Int
sds = SomeData 1 1 1
smallprods = (SmallProduct 0 1 2 3)
smallmanualprods = (SmallProductManual 0 1 2 3)
sss = [SS1 1, SS2 2, SS3 3, SS4 4]
ssms = [SSM1 1, SSM2 2, SSM3 3, SSM4 4]
nestedTuples = ((1,2),(3,4)) :: ((Int,Int),(Int,Int))
#else
let is = V.enumFromTo 1 100 :: V.Vector Int
sds = (\i -> SomeData i (fromIntegral i) (fromIntegral i))
<$> V.enumFromTo 1 100
smallprods = (\ i -> SmallProduct i (i+1) (i+2) (i+3))
<$> V.enumFromTo 1 100
smallmanualprods = (\ i -> SmallProductManual i (i+1) (i+2) (i+3))
<$> V.enumFromTo 1 100
sss = (\i -> case i `mod` 4 of
0 -> SS1 (fromIntegral i)
1 -> SS2 (fromIntegral i)
2 -> SS3 (fromIntegral i)
3 -> SS4 (fromIntegral i)
_ -> error "This does not compute."
) <$> V.enumFromTo 1 (100 :: Int)
ssms = (\i -> case i `mod` 4 of
0 -> SSM1 (fromIntegral i)
1 -> SSM2 (fromIntegral i)
2 -> SSM3 (fromIntegral i)
3 -> SSM4 (fromIntegral i)
_ -> error "This does not compute."
) <$> V.enumFromTo 1 (100 :: Int)
nestedTuples = (\i -> ((i,i+1),(i+2,i+3))) <$> V.enumFromTo (1::Int) 100
ints = [1..100] :: [Int]
pairs = map (\x -> (x, x)) ints
strings = show <$> ints
intsSet = Set.fromDistinctAscList ints
intSet = IntSet.fromDistinctAscList ints
intsMap = Map.fromDistinctAscList pairs
intMap = IntMap.fromDistinctAscList pairs
stringsSet = Set.fromList strings
stringsMap = Map.fromList (zip strings ints)
#endif
defaultMain
[ bgroup "encode"
[ benchEncode is
#if !SMALL_BENCH
, benchEncode' "1kb storable" (SV.fromList ([1..256] :: [Int32]))
, benchEncode' "10kb storable" (SV.fromList ([1..(256 * 10)] :: [Int32]))
, benchEncode' "1kb normal" (V.fromList ([1..256] :: [Int32]))
, benchEncode' "10kb normal" (V.fromList ([1..(256 * 10)] :: [Int32]))
, benchEncode intsSet
, benchEncode intSet
, benchEncode intsMap
, benchEncode intMap
, benchEncode stringsSet
, benchEncode stringsMap
#endif
, benchEncode smallprods
, benchEncode smallmanualprods
, benchEncode sss
, benchEncode ssms
, benchEncode nestedTuples
, benchEncode sds
]
, bgroup "decode"
[ benchDecode is
#if !SMALL_BENCH
, benchDecode' "1kb storable" (SV.fromList ([1..256] :: [Int32]))
, benchDecode' "10kb storable" (SV.fromList ([1..(256 * 10)] :: [Int32]))
, benchDecode' "1kb normal" (V.fromList ([1..256] :: [Int32]))
, benchDecode' "10kb normal" (V.fromList ([1..(256 * 10)] :: [Int32]))
, benchDecode intsSet
, benchDecode intSet
, benchDecode intsMap
, benchDecode intMap
, benchDecode stringsSet
, benchDecode stringsMap
#endif
, benchDecode smallprods
, benchDecode smallmanualprods
, benchDecode sss
, benchDecode ssms
, benchDecode nestedTuples
, benchDecode sds
]
]
type Ctx a =
( Store a, Typeable a, NFData a
#if COMPARISON_BENCH
, Binary.Binary a
, Cereal.Serialize a
#endif
)
benchEncode :: Ctx a => a -> Benchmark
benchEncode = benchEncode' ""
benchEncode' :: Ctx a => String -> a -> Benchmark
benchEncode' msg x0 =
env (return x0) $ \x ->
let label = msg ++ " (" ++ show (typeOf x0) ++ ")"
benchStore name = bench name (nf encode x) in
#if COMPARISON_BENCH
bgroup label
[ benchStore "store"
, bench "cereal" (nf Cereal.encode x)
, bench "binary" (nf Binary.encode x)
]
#else
benchStore label
#endif
benchDecode :: Ctx a => a -> Benchmark
benchDecode = benchDecode' ""
-- TODO: comparison bench for decode
benchDecode' :: forall a. Ctx a => String -> a -> Benchmark
#if COMPARISON_BENCH
benchDecode' prefix x0 =
bgroup label
[ env (return (encode x0)) $ \x -> bench "store" (nf (decodeEx :: BS.ByteString -> a) x)
, env (return (Cereal.encode x0)) $ \x -> bench "cereal" (nf ((ensureRight . Cereal.decode) :: BS.ByteString -> a) x)
, env (return (Binary.encode x0)) $ \x -> bench "binary" (nf (Binary.decode :: BL.ByteString -> a) x)
]
where
label = prefix ++ " (" ++ show (typeOf x0) ++ ")"
ensureRight (Left x) = error "left!"
ensureRight (Right x) = x
#else
benchDecode' prefix x0 =
env (return (encode x0)) $ \x ->
bench (prefix ++ " (" ++ show (typeOf x0) ++ ")") (nf (decodeEx :: BS.ByteString -> a) x)
#endif
------------------------------------------------------------------------
-- Serialized datatypes
data SmallProduct = SmallProduct Int32 Int32 Int32 Int32
deriving (Generic, Show, Typeable)
instance NFData SmallProduct
instance Store SmallProduct
data SmallProductManual = SmallProductManual Int32 Int32 Int32 Int32
deriving (Generic, Show, Typeable)
instance NFData SmallProductManual
instance Store SmallProductManual where
size = ConstSize 16
peek = SmallProductManual <$> peek <*> peek <*> peek <*> peek
poke (SmallProductManual a b c d) = poke a *> poke b *> poke c *> poke d
data SmallSum
= SS1 Int8
| SS2 Int32
| SS3 Int64
| SS4 Word32
deriving (Generic, Show, Typeable)
instance NFData SmallSum
instance Store SmallSum
data SmallSumManual
= SSM1 Int8
| SSM2 Int32
| SSM3 Int64
| SSM4 Word32
deriving (Generic, Show, Typeable)
instance NFData SmallSumManual
instance Store SmallSumManual where
size = VarSize $ \x -> 1 + case x of
SSM1{} -> 1
SSM2{} -> 4
SSM3{} -> 8
SSM4{} -> 4
peek = do
tag <- peek
case tag :: Word8 of
0 -> SSM1 <$> peek
1 -> SSM2 <$> peek
2 -> SSM3 <$> peek
3 -> SSM4 <$> peek
_ -> fail "Invalid tag"
poke (SSM1 x) = poke (0 :: Word8) >> poke x
poke (SSM2 x) = poke (1 :: Word8) >> poke x
poke (SSM3 x) = poke (2 :: Word8) >> poke x
poke (SSM4 x) = poke (3 :: Word8) >> poke x
-- TODO: add TH generation of the above, and add LargeSum / LargeProduct cases
#if COMPARISON_BENCH
instance Binary.Binary SmallProduct
instance Binary.Binary SmallSum
instance Cereal.Serialize SmallProduct
instance Cereal.Serialize SmallSum
instance Binary.Binary SmallProductManual where
get = SmallProductManual <$> Binary.get <*> Binary.get <*> Binary.get <*> Binary.get
put (SmallProductManual a b c d) = Binary.put a *> Binary.put b *> Binary.put c *> Binary.put d
instance Binary.Binary SmallSumManual where
get = do
tag <- Binary.get
case tag :: Word8 of
0 -> SSM1 <$> Binary.get
1 -> SSM2 <$> Binary.get
2 -> SSM3 <$> Binary.get
3 -> SSM4 <$> Binary.get
_ -> fail "Invalid tag"
put (SSM1 x) = Binary.put (0 :: Word8) *> Binary.put x
put (SSM2 x) = Binary.put (1 :: Word8) *> Binary.put x
put (SSM3 x) = Binary.put (2 :: Word8) *> Binary.put x
put (SSM4 x) = Binary.put (3 :: Word8) *> Binary.put x
instance Cereal.Serialize SmallProductManual where
get = SmallProductManual <$> Cereal.get <*> Cereal.get <*> Cereal.get <*> Cereal.get
put (SmallProductManual a b c d) = Cereal.put a *> Cereal.put b *> Cereal.put c *> Cereal.put d
instance Cereal.Serialize SmallSumManual where
get = do
tag <- Cereal.get
case tag :: Word8 of
0 -> SSM1 <$> Cereal.get
1 -> SSM2 <$> Cereal.get
2 -> SSM3 <$> Cereal.get
3 -> SSM4 <$> Cereal.get
_ -> fail "Invalid tag"
put (SSM1 x) = Cereal.put (0 :: Word8) *> Cereal.put x
put (SSM2 x) = Cereal.put (1 :: Word8) *> Cereal.put x
put (SSM3 x) = Cereal.put (2 :: Word8) *> Cereal.put x
put (SSM4 x) = Cereal.put (3 :: Word8) *> Cereal.put x
#endif
|
fpco/store
|
bench/Bench.hs
|
Haskell
|
mit
| 9,639
|
module Game.Render.Core.Error
( initLogging
, logGL
)
where
import System.Log.Logger
import Control.Monad
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import qualified Graphics.Rendering.OpenGL as GL
initLogging :: IO ()
initLogging = do
h <- fileHandler "opengl.log" ERROR >>= \lh -> return $
setFormatter lh (simpleLogFormatter "[$time : $loggername : $prio] $msg")
updateGlobalLogger "OpenGL" (addHandler h >> setLevel DEBUG)
--updateGlobalLogger "OpenGL" (setLevel DEBUG)
logGL :: String -> IO ()
logGL msg = do
errors <- GL.get GL.errors
unless (null errors) $
errorM "OpenGL" $ msg ++ " with: " ++ show errors
|
mfpi/q-inqu
|
Game/Render/Core/Error.hs
|
Haskell
|
mit
| 730
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PackageImports #-}
module Blocks where
import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue)
import Control.Applicative
import Control.Monad
import Control.Lens ((+~), (^.), contains)
import Data.Foldable (foldMap, traverse_)
import Data.Vinyl
import Data.Set (Set)
import Data.Vinyl.Universe ((:::), SField(..))
import Graphics.GLUtil
import qualified Graphics.UI.GLFW as GLFW
import Graphics.GLUtil.Camera2D
import Graphics.Rendering.OpenGL
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Graphics.VinylGL
import Data.Vector.Storable (fromList)
import Linear (V1(..), V2(..), _x, M33)
import System.FilePath ((</>))
import FRP.Elerea.Simple
import Data.List (transpose)
import Data.Maybe
import Control.Concurrent (threadDelay)
import Control.Monad.RWS.Strict (RWST, ask, asks, evalRWST, get, liftIO, modify, put)
import Data.Bool.Extras
import System.Random
import Graphics.Renderer.FontAtlas
import Data.Char
import Window
--------------------------------------------------------------------------------
-- Utils
--------------------------------------------------------------------------------
toDigits :: Int -> [Int]
toDigits x = let d = x `div` 10
m = x `mod` 10
in if d == 0
then [m]
else toDigits d ++ [m]
--------------------------------------------------------------------------------
-- Board
--------------------------------------------------------------------------------
type BlockID = Int
noBlockID = 7 :: BlockID
blockIDI = 6 :: BlockID
blockIDO = 5 :: BlockID
blockIDS = 4 :: BlockID
blockIDZ = 3 :: BlockID
blockIDT = 2 :: BlockID
blockIDJ = 1 :: BlockID
blockIDL = 0 :: BlockID
chooseBlock :: IO BlockID
chooseBlock = do
bid <- getStdRandom (randomR (0, 6))
return bid
-- Board starts noBlockID for cell
type Row = [BlockID]
type Board = [Row]
printBoard :: Board -> IO ()
printBoard = mapM_ (\row -> printRow row >> putStr "\n")
where printRow = mapM_ (\v -> putStr (show v) >> putStr " ")
emptyRow :: Row
emptyRow = replicate 10 noBlockID
emptyBoard ::Board
emptyBoard = replicate 20 emptyRow
applyBlock :: Board -> Block -> Board
applyBlock = error ""
rowFull :: Row -> Bool
rowFull = all isBlock
isBlock :: BlockID -> Bool
isBlock = not . noBlock
noBlock :: BlockID -> Bool
noBlock = (noBlockID==)
-- Check for full rows and removes them, appending an empty line at the front
-- Returns updated board and number of lines deleted
updateRows :: Board -> (Board, Int)
updateRows board = (replicate n emptyRow ++ b, n)
where (b, n) = foldr (\row (board, n) -> if rowFull row
then (board, n+1)
else (row : board, n) ) ([], 0) board
-- Adopt the OriginalNintendo Scoring System
-- Number of Lines: 1 2 3 4
-- 40 * (level + 1) 100 * (level + 1) 300 * (level + 1) 1200 * (level + 1)
--
-- Returns the number of points given the number of rows and level
calPoints :: Int -> Int -> Int
calPoints lines level = ([40, 100, 300, 1200] !! (lines-1)) * (level + 1)
levelUp :: Int -> Int
levelUp linesCompleted = bool (bool (1 + ((linesCompleted - 1) `div` 10)) 10 (linesCompleted > 90))
1 (linesCompleted == 0)
type Block = [[BlockID]]
{-
As we know the size of each of our blocks we should really define them using dependently
typed matrixes and then well typed transforms. This would lead to a bit more work on
apply to board functions as blocks has different sizes.
can come back to look at this later...
-}
--------------
pivit :: Pos
pivit = (2,1)
blockI0 = [ [noBlockID, noBlockID, noBlockID, noBlockID],
[blockIDI, blockIDI, blockIDI, blockIDI],
[noBlockID, noBlockID, noBlockID, noBlockID],
[noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockI1 = [ [noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID],
[noBlockID, noBlockID, blockIDI, noBlockID] ] :: Block
blockJ0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, noBlockID, blockIDJ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ1 = [ [ noBlockID, noBlockID, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ2 = [ [ noBlockID, blockIDJ, noBlockID, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, blockIDJ],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockJ3 = [ [ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, noBlockID, blockIDJ, noBlockID],
[ noBlockID, blockIDJ, blockIDJ, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDL, blockIDL, blockIDL],
[ noBlockID, blockIDL, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL1 = [ [ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, blockIDL],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL2 = [ [ noBlockID, noBlockID, noBlockID, blockIDL],
[ noBlockID, blockIDL, blockIDL, blockIDL],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockL3 = [ [ noBlockID, blockIDL, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, blockIDL, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockO0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDO, blockIDO, noBlockID],
[ noBlockID, blockIDO, blockIDO, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockS0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, blockIDS, blockIDS],
[ noBlockID, blockIDS, blockIDS, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockS1 = [ [ noBlockID, noBlockID, blockIDS, noBlockID],
[ noBlockID, noBlockID, blockIDS, blockIDS],
[ noBlockID, noBlockID, noBlockID, blockIDS],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDT, blockIDT, blockIDT],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT1 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, blockIDT, blockIDT],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT2 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, blockIDT, blockIDT, blockIDT],
[ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockT3 = [ [ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, blockIDT, blockIDT, noBlockID],
[ noBlockID, noBlockID, blockIDT, noBlockID],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockZ0 = [ [ noBlockID, noBlockID, noBlockID, noBlockID],
[ noBlockID, blockIDZ, blockIDZ, noBlockID ],
[ noBlockID, noBlockID, blockIDZ, blockIDZ ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
blockZ1 = [ [ noBlockID, noBlockID, noBlockID, blockIDZ],
[ noBlockID, noBlockID, blockIDZ, blockIDZ ],
[ noBlockID, noBlockID, blockIDZ, noBlockID ],
[ noBlockID, noBlockID, noBlockID, noBlockID] ] :: Block
-- next two functions calulate the number of rows and columns that are left, right,
leftRight :: Block -> (Int, Int)
leftRight b = (minimum $ map (length . takeWhile noBlock . take 2) b,
minimum $ map (length . dropWhile (not . noBlock) . drop 3) b)
upDown :: Block -> (Int, Int)
upDown = (\b -> (length $ takeWhile (==True) b,
length $ dropWhile (==False) $ dropWhile (==True) b)) . map (all noBlock)
isBlockOnBoard :: Pos -> Block -> Bool
isBlockOnBoard (x,y) b = let (l,r) = leftRight b
(u,d) = upDown b
(px,py) = pivit
in (y - (1 - u) >= 0) && (y + (2 - d) <= 19) &&
(x - (2 - l) >= 0) && (x + (1 - r) <= 9)
-- Precompute all blocks and their corresponding rotations
blocks :: [ [Block] ]
blocks = [ [ blockL0, blockL1, blockL2, blockL3 ],
[ blockJ0, blockJ1, blockJ2, blockJ3 ],
[ blockT0, blockT1, blockT2, blockT3 ],
[ blockZ0, blockZ1, blockZ0, blockZ1 ],
[ blockS0, blockS1, blockS0, blockS1 ],
[ blockO0, blockO0, blockO0, blockO0 ],
[ blockI0, blockI1, blockI0, blockI1 ] ]
rotatedBlock :: BlockID -> Int -> Block
rotatedBlock blockID rotation = (blocks !! blockID) !! rotation
-- place a block on an empty board at position (x,y)
-- if the block cannot be placed at the specifed position fail
placeBlockEmptyBoard' p@(x,y) id row =
let (l,r) = leftRight block
block = rotatedBlock id row
(u,d) = upDown block
b = replicate ((y - 1) + u) emptyRow
a = replicate ((19 - y - 2) + d) emptyRow
bs = map (\row -> replicate ((x - 2) + l) noBlockID ++
take ((4-l) - r) (drop l row) ++
replicate ((9 - x - 1) + r) noBlockID) (drop u (take (4 - d) block))
in if isBlockOnBoard p block
then Just (b ++ bs ++ a)
else Nothing
data Update = UPlace | UReplace
deriving (Eq, Show)
-- combine two boards
-- the idea is that we have board with single (i.e. current) block placed
-- and then we simply try and map that board over the current playing board
-- returns a merged board if the current piece can be placed correclty, otherwise
-- fails to construct a board
-- we actually have two modes, place and replace
overlayBoard :: Update -> Maybe Board -> Maybe Board -> Maybe Board
overlayBoard replace (Just b) (Just b') = zipWithM (zipWithM (f replace)) b b'
where
f UPlace v v' = if noBlock v && not (noBlock v')
then Just v'
else if isBlock v'
then Nothing
else Just v
f UReplace v v' = if isBlock v'
then Just noBlockID
else Just v
overlayBoard _ _ _ = Nothing
data Move = MoveL | MoveR | MoveD
deriving (Show, Eq)
type Pos = (Int, Int)
inBoard :: Pos -> Bool
inBoard (x,y) = (0 <= x && x < 10) && (0 <= y && y < 20)
initialPosition :: BlockID -> Pos
initialPosition id = (5,0)
iterationDelay :: Int -> Double
iterationDelay level = (11.0 - (fromIntegral level)) * 0.05
block = SField :: SField ("block" ::: BlockID) -- current block
nblock = SField :: SField ("nblock" ::: BlockID) -- next block
rotation = SField :: SField ("rotation" ::: Int) -- current rotation
pos = SField :: SField ("position" ::: Pos) -- current position
board = SField :: SField ("board" ::: Board) -- current board
score = SField :: SField ("score" ::: Int) -- current score
nlines = SField :: SField ("nlines" ::: Int) -- number of complete lines
idelay = SField :: SField ("idelay" ::: Double) -- delay between drop
ticks = SField :: SField ("ticks" ::: Double) -- ticks since start
frames = SField :: SField ("frames" ::: Double) -- number of frames processed
type World' = ["block" ::: BlockID,
"nblock" ::: BlockID,
"rotation" ::: Int,
"position" ::: Pos,
"board" ::: Board,
"score" ::: Int,
"nlines" ::: Int,
"idelay" ::: Double,
"ticks" ::: Double,
"frames" ::: Double]
type World = PlainFieldRec World'
mkWorld :: BlockID ->
BlockID ->
Int ->
Pos ->
Board ->
Int ->
Int ->
Double ->
Double ->
Double ->
World
mkWorld id nid r p b s l d t f =
block =: id <+> nblock =: nid <+> rotation =: r <+> pos =: p <+>
board =: b <+> score =: s <+> nlines =: l <+>
idelay =: d <+> ticks =: t <+> frames =: f
initialWorld :: BlockID -> BlockID -> World
initialWorld bid nbid = mkWorld bid nbid 0 (initialPosition bid)
(fromJust $ overlayBoard UPlace (Just emptyBoard) $
placeBlockEmptyBoard' (initialPosition bid) bid 0) 0 0
(iterationDelay 1) 0.0 0.0
--------------------------------------------------------------------------------
-- Graphics Stuff
--------------------------------------------------------------------------------
type Point2D = V2 GLfloat
type UV = Point2D
type VPos = "vertexCoord" ::: Point2D
type Tex = "texCoord" ::: Point2D
vpos :: SField VPos
vpos = SField
tex :: SField Tex
tex = SField
tex_width = (1 / 8) :: GLfloat
tex_height = 64 :: GLfloat
calcTex offset =
[[V2 (offset * tex_width) 1, V2 (offset') 1, V2 (offset * tex_width) 0],
[V2 (offset * tex_width) 0, V2 (offset') 0, V2 (offset') 1]]
where
offset' = tex_width + offset * tex_width
noBlockTex = calcTex 7 :: [[UV]]
blockTexI = calcTex 6 :: [[UV]]
blockTexO = calcTex 5 :: [[UV]]
blockTexS = calcTex 4 :: [[UV]]
blockTexZ = calcTex 3 :: [[UV]]
blockTexT = calcTex 2 :: [[UV]]
blockTexJ = calcTex 1 :: [[UV]]
blockTexL = calcTex 0 :: [[UV]]
blockTex = [ blockTexI, blockTexO, blockTexS, blockTexZ,
blockTexT, blockTexJ, blockTexL, noBlockTex]
-- this should move to a library as it can be part of the 2D engine
square :: GLfloat -> GLfloat -> [[Point2D]]
square x y = [[V2 (x * cell_width) (y * cell_height + cell_height),
V2 (x * cell_width + cell_width) (y * cell_height + cell_height),
V2 (x * cell_width) (y * cell_height)],
[V2 (x * cell_width) (y * cell_height),
V2 (x * cell_width + cell_width) (y * cell_height),
V2 (x * cell_width + cell_width) (y * cell_height + cell_height)
]
]
where
cell_width :: GLfloat
cell_width = 1.0 / 16
cell_height :: GLfloat
cell_height = 1.0 / 20
createScore :: Int -> [Char]
createScore s = let digits = take 5 $ toDigits s
in (take (5 - (length digits)) ['0','0','0','0','0']) ++
(map (\c -> toEnum $ c + 48) digits)
-- As we only want to print the minimum number of digits we
-- generate verts least digit first and then we simply draw the required
-- number of triangles. Score limited to 5 digits!!
scoreText :: GLfloat ->
GLfloat ->
CharInfo ->
[Char] ->
IO (BufferedVertices [VPos,Tex])
scoreText x y offsets digits = bufferVertices $ scoreText' x y offsets digits
scoreText' :: GLfloat ->
GLfloat ->
CharInfo ->
[Char] ->
[PlainFieldRec [VPos,Tex]]
scoreText' x y offsets (d1:d2:d3:d4:d5:_) = vt
where (o1,o1',h1) = charToOffsetWidthHeight offsets d1
(o2,o2',h2) = charToOffsetWidthHeight offsets d2
(o3,o3',h3) = charToOffsetWidthHeight offsets d3
(o4,o4',h4) = charToOffsetWidthHeight offsets d4
(o5,o5',h5) = charToOffsetWidthHeight offsets d5
vt :: [PlainFieldRec [VPos,Tex]]
vt = concat $ concat ps
f (sq, t) = zipWith (zipWith (\pt uv -> vpos =: pt <+> tex =: uv)) sq t
ps = map f [
(square x y, [[V2 o1 0, V2 o1' 0, V2 o1 h1],
[V2 o1 h1, V2 o1' h1, V2 o1' 0]]),
(square (x+1) y, [[V2 o2 0, V2 o2' 0, V2 o2 h2],
[V2 o2 h1, V2 o2' h1, V2 o2' 0]]),
(square (x+2) y, [[V2 o3 0, V2 o3' 0, V2 o3 h3],
[V2 o3 h1, V2 o3' h1, V2 o3' 0]]),
(square (x+3) y, [[V2 o4 0, V2 o4' 0, V2 o4 h4],
[V2 o4 h1, V2 o4' h4, V2 o4' 0]]),
(square (x+4) y, [[V2 o5 0, V2 o5' 0, V2 o5 h5],
[V2 o5 h5, V2 o5' h5, V2 o5' 0]]) ]
-- Generate the triangles for the board, this is done just once
-- If we used a SOA VOA, then we could upload this once and never again
graphicsBoard :: [[Point2D]]
graphicsBoard = concat $ concat $ b
where b = map (\y -> map (\x -> square x y) [0..9])
[19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0]
initialGraphicsBoard :: Board -> [PlainFieldRec [VPos,Tex]]
initialGraphicsBoard =
concat . zipWith (zipWith (\pt uv -> vpos =: pt <+> tex =: uv)) graphicsBoard . boardToTex
--boardToTex :: Board -> [[ [[UV]] ]]
boardToTex = concat . map (foldr (\a b -> (blockTex!!a) ++ b) [])
boardVerts :: Board -> IO (BufferedVertices [VPos,Tex])
boardVerts = bufferVertices . initialGraphicsBoard
type GLInfo = PlainFieldRec '["cam" ::: M33 GLfloat]
loadTextures :: [FilePath] -> IO [TextureObject]
loadTextures = fmap (either error id . sequence) . mapM aux
where aux f = do img <- readTexture ("resources" </> f)
traverse_ (const texFilter) img
return img
texFilter = do textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
texture2DWrap $= (Repeated, ClampToEdge)
isPress :: GLFW.KeyState -> Bool
isPress GLFW.KeyState'Pressed = True
isPress GLFW.KeyState'Repeating = True
isPress _ = False
isKeyPressed :: TQueue EventKey -> IO (Bool, Bool, Bool, Bool, Bool)
isKeyPressed kq = do
k <- atomically $ tryReadTQueue kq
maybe (return (False, False, False, False, False))
(\(EventKey win k scancode ks mk) ->
if isPress ks
then case k of
Key'Left -> return (True, False, False, False, False)
Key'Right -> return (False, True, False, False, False)
Key'Up -> return (False, False, True, False, False)
Key'Down -> return (False, False, False, True, False)
Key'Q -> return (False, False, False, False, True)
Key'Escape -> return (False, False, False, False, True)
_ -> return (False, False, False, False, False)
else return (False, False, False, False, False)) k
calPos :: Bool -> Bool -> Pos -> Pos
calPos l r (x,y) = let x' = bool x (x-1) l
in (bool x' (x'+1) r, y)
calRot :: Int -> Bool -> Int
calRot r = bool r ((r + 1) `mod` 4)
{- Game logic is pretty simple:
1. Check if quit, if so then clean up and exit
2. If game is not game over
1. Check if Left, Right, or Rotate key's pressed, if so
See if movement is valid (include move down with gravity) and update board if so
Otherwise, check to see if can move down and update board if so
Otherwise, introduce new piece
2. Check if any rows have been completed and remove and shuffle down, adding new rows at top, if necessary
3. Generate updated verts and texture and upload to GL
3. Renderer frame (this is not done in play function)
4. Continue (loop) to next frame
-}
-- add drop key support
-- return nothing if QUIT (and for the moment gameover)
play :: BufferedVertices [VPos, Tex] -> UI -> World -> IO (Maybe World)
play verts ui world = do
(l,r,u,d, q) <- isKeyPressed (keys ui) --isKeyPressed' ui
let d = (world ^. rLens idelay) - (timeStep ui)
yadd = bool 0 1 (d <= 0)
if q -- QUIT
then return Nothing
else do let (x,y) = world ^. rLens pos
(x',y') = calPos l r (x,y)
y'' = y'+ yadd
rot = world ^. rLens rotation
rot' = calRot rot u
bid = world ^. rLens block
nbid = world ^. rLens nblock
-- try and place updated piece (including any user movement)
b = overlayBoard UReplace (Just $ world ^. rLens board) $
placeBlockEmptyBoard' (x, y) bid rot
pb = placeBlockEmptyBoard' (x',y'') bid rot'
ub = overlayBoard UPlace b pb
nd = bool d (iterationDelay (levelUp (world ^. rLens nlines))) (d <= 0)
if isJust ub -- can piece be placed, including user movement
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust ub)
return $ Just $ mkWorld bid nbid rot' (x',y'') (fromJust ub)
(world ^. rLens score) (world ^. rLens nlines)
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else do let pb' = placeBlockEmptyBoard' (x, y + yadd) bid rot
ub' = overlayBoard UPlace b pb'
if isJust ub'
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust ub')
return $ Just $ mkWorld bid nbid rot' (x, y + yadd) (fromJust ub')
(world ^. rLens score) (world ^. rLens nlines)
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else do let (upb, nls) = updateRows $ world ^. rLens board
s = world ^. rLens score
nl = (world ^. rLens nlines) + nls
s' <- if nls > 0
then do let l = levelUp nl
ss = (s + calPoints nls l)
return ss
else return s
nbid' <- chooseBlock
let (nx,ny) = initialPosition nbid
npb = placeBlockEmptyBoard' (nx,ny) nbid 0
nb = overlayBoard UPlace (Just $ upb) npb
if isJust nb
then do reloadVertices verts (fromList $ initialGraphicsBoard $ fromJust nb)
return $ Just $ mkWorld nbid nbid' 0 (nx,ny) (fromJust nb)
s' nl
nd
(world ^. rLens ticks) ((world ^. rLens frames) + 1)
else return Nothing -- Gameover
renderer :: World -> IO (GLInfo -> World -> UI -> IO (Maybe World))
renderer iworld = do
ts <- simpleShaderProgram ("shaders"</>"text.vert") ("shaders"</>"text.frag")
s <- simpleShaderProgram ("shaders"</>"piece.vert") ("shaders"</>"piece.frag")
[blocks] <- loadTextures ["blocks.png"]
putStrLn "Loaded shaders"
setUniforms s (texSampler =: 0)
nbid <- chooseBlock
verts <- boardVerts (iworld ^. rLens board)
indices <- bufferIndices [0..(2 * 10 * 20 * 3)]
vao <- makeVAO $ do
enableVertices' s verts
bindVertices verts
bindBuffer ElementArrayBuffer $= Just indices
(chars, offsets) <- createAtlas ("resources"</>"ArcadeClassic.ttf") 48 1
setUniforms ts (texSampler =: 1)
tverts <- scoreText 11 18 offsets $ createScore 0
tindices <- bufferIndices [0 .. 2 * 3 * 5]
tvao <- makeVAO $ do
enableVertices' ts tverts
bindVertices tverts
bindBuffer ElementArrayBuffer $= Just tindices
return $ \i world ui -> do
w <- play verts ui world
if isJust w
then do
currentProgram $= Just (program s)
setUniforms s i
withVAO vao . withTextures2D [blocks] $ drawIndexedTris (2 * 10 * 20)
currentProgram $= Just (program ts)
setUniforms ts i
-- TODO: we should really only upload new score verts when it changes
-- this needs to be moved into play
reloadVertices tverts
(fromList $ scoreText' 11 18 offsets $ createScore $
((fromJust w) ^. rLens score))
withVAO tvao . withTextures2D [chars] $ drawIndexedTris (2*5)
return w
else return w
where
texSampler = SField :: SField ("tex" ::: GLint)
loop :: IO UI -> World -> IO ()
loop tick world = do
clearColor $= Color4 0.00 0.1 0.1 1
r <- Blocks.renderer world
go camera2D world r
where go :: Camera GLfloat -> World -> (GLInfo -> World -> UI -> IO (Maybe World)) -> IO ()
go c world draw = do
ui <- tick
clear [ColorBuffer, DepthBuffer]
let mCam = camMatrix c
info = SField =: mCam
cells = [0,0,0]
world' <- draw info world ui
if isNothing world'
then return ()
else do --let world'' = (ticks `rPut` (((fromJust world') ^. rLens ticks) + timeStep ui) ) (fromJust world')
--fps = (world'' ^. rLens frames) / ((world'' ^. rLens ticks))
--print ("FPS: " ++ show fps)
swapBuffers (window ui) >> go c (fromJust world') draw
main :: IO ()
main = do
let width = 580
height = 960
tick <- initGL "ABC or Another Blocks Clone" width height
bid <- chooseBlock
nbid <- chooseBlock
loop tick $ initialWorld bid nbid
return ()
|
bgaster/blocks
|
Blocks.hs
|
Haskell
|
mit
| 26,497
|
module Game ( Game
, Unfinished
, Position
, Coordinate
, start
, move
, isFinished
, bounds
, openPositions
, marks
) where
import Game.Internal
|
amar47shah/NICTA-TicTacToe
|
src/Game.hs
|
Haskell
|
mit
| 267
|
-- |
-- Module : Main
-- Description :
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created December 21 2015
-- TODO | -
-- -
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module Main where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import System.Environment (getArgs)
import Control.Concurrent
import qualified Elrond.Core as Core
import qualified Elrond.Server as Server
import qualified Elrond.Client as Client
--------------------------------------------------------------------------------------------------------------------------------------------
-- Entry point
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
main :: IO ()
main = do
putStrLn "Elvish nonsense and dwarvish tenacity."
args <- getArgs
case take 1 args of
["server"] -> Server.start
["client"] -> Client.start
_ -> putStrLn "Put up your umbrellas, folks. Shit just hit the fan."
threadDelay $ round (5.0 * 10^6)
putStrLn "Shutting down..."
error "TODO: Figure out a better way to stop the server."
|
SwiftsNamesake/Elrond
|
app/Main.hs
|
Haskell
|
mit
| 2,092
|
{-# LANGUAGE QuasiQuotes #-}
module TestLedgerProcess (ledgerProcessSpec) where
import Test.Hspec (Spec, describe, it, shouldReturn)
import Data.String.Interpolate (i)
import Data.String.Interpolate.Util (unindent)
import Text.RE.Replace
import Text.RE.TDFA.String
import qualified Data.Expenses.Ledger.Process as LP
{-|
Removes @version=".*"@, @id=".*"@, @ref=".*"@
attributes from the output of the @ledger xml@ command,
since these are non-deterministically generated.
|-}
stripUnstableAttributes :: String -> String
stripUnstableAttributes xml =
let
mtch = xml *=~ [re| (id|ref|version)="[^"]*">|]
in
replaceAll ">" mtch
-- | Replace CRLF with LF.
normalizeNewlines :: String -> String
normalizeNewlines s =
let
mtch = s *=~ [re|\r\n|]
in
replaceAll "\n" mtch
ledgerProcessSpec :: Spec
ledgerProcessSpec =
describe "Data.Expenses.Ledger.Process" $
describe "xmlOfString" $
it "should return digits for simple input" $
stripUnstableAttributes . normalizeNewlines <$> LP.xmlOfString simpleLedgerJournal
`shouldReturn` simpleLedgerXml
where
simpleLedgerJournal = unindent [i|
bucket Assets:SGD
2018/01/01 food
Expenses:Food 5 SGD
|]
simpleLedgerXml = unindent [i|
<?xml version="1.0" encoding="utf-8"?>
<ledger>
<commodities>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
</commodities>
<accounts>
<account>
<name/>
<fullname/>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>0</quantity>
</amount>
</account-total>
<account>
<name>Assets</name>
<fullname>Assets</fullname>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-total>
<account>
<name>SGD</name>
<fullname>Assets:SGD</fullname>
<account-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-amount>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</account-total>
</account>
</account>
<account>
<name>Expenses</name>
<fullname>Expenses</fullname>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-total>
<account>
<name>Food</name>
<fullname>Expenses:Food</fullname>
<account-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-amount>
<account-total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</account-total>
</account>
</account>
</account>
</accounts>
<transactions>
<transaction>
<date>2018/01/01</date>
<payee>food</payee>
<postings>
<posting>
<account>
<name>Expenses:Food</name>
</account>
<post-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</post-amount>
<total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>5</quantity>
</amount>
</total>
</posting>
<posting generated="true">
<account>
<name>Assets:SGD</name>
</account>
<post-amount>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>-5</quantity>
</amount>
</post-amount>
<total>
<amount>
<commodity flags="S">
<symbol>SGD</symbol>
</commodity>
<quantity>0</quantity>
</amount>
</total>
</posting>
</postings>
</transaction>
</transactions>
</ledger>
|]
|
rgoulter/expenses-csv-utils
|
test/TestLedgerProcess.hs
|
Haskell
|
mit
| 5,361
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
module Data.Neural.FeedForward where
import Control.Applicative
import Control.DeepSeq
import Data.Bifunctor
import Data.List
import Data.Reflection
import Data.Neural.Types
import Data.Neural.Utility
import Data.Proxy
import GHC.TypeLits
import Linear
import Linear.V
import Numeric.AD.Rank1.Forward
import System.Random
import Text.Printf
import qualified Data.Binary as B
import qualified Data.List as P
import qualified Data.Vector as V
data Network :: Nat -> [Nat] -> Nat -> *
-> * where
NetOL :: !(FLayer i o a) -> Network i '[] o a
NetIL :: KnownNat j => !(FLayer i j a) -> !(Network j hs o a) -> Network i (j ': hs) o a
infixr 5 `NetIL`
data SomeNet :: * -> * where
SomeNet :: (KnownNat i, KnownNat o) => Network i hs o a -> SomeNet a
data OpaqueNet :: Nat -> Nat -> * -> * where
OpaqueNet :: (KnownNat i, KnownNat o) => Network i hs o a -> OpaqueNet i o a
runNetwork :: forall i hs o a. (KnownNat i, Num a) => (a -> a) -> (a -> a) -> Network i hs o a -> V i a -> V o a
runNetwork f g = go
where
go :: forall i' hs' o'. KnownNat i' => Network i' hs' o' a -> V i' a -> V o' a
go n v = case n of
NetOL l -> g <$> runFLayer l v
NetIL l n' -> go n' (f <$> runFLayer l v)
{-# INLINE runNetwork #-}
trainSample :: forall i o a hs. (KnownNat i, KnownNat o, Num a)
=> a -> (Forward a -> Forward a) -> (Forward a -> Forward a)
-> V i a -> V o a
-> Network i hs o a
-> Network i hs o a
trainSample step f g x0 y n0 = snd $ go x0 n0
where
-- x: input
-- y: target
-- d: x * w
-- o: f d
go :: forall j hs'. KnownNat j => V j a -> Network j hs' o a -> (V j a, Network j hs' o a)
go x n =
case n of
NetOL l@(FLayer ln) ->
let d :: V o a
d = runFLayer l x
delta :: V o a
ln' :: V o (Node j a)
(delta, ln') = unzipV $ liftA3 (adjustOutput xb) ln y d
-- drop contrib from bias term
deltaws :: V j a
-- deltaws = delta *! (nodeWeights <$> ln')
deltaws = delta *! (nodeWeights <$> ln)
l' :: FLayer j o a
l' = FLayer ln'
in (deltaws, NetOL l')
NetIL l@(FLayer ln :: FLayer j k a) (n' :: Network k ks o a) ->
let d :: V k a
d = runFLayer l x
o :: V k a
o = fst . diff' f <$> d
deltaos :: V k a
n'' :: Network k ks o a
(deltaos, n'') = go o n'
delta :: V k a
ln' :: V k (Node j a)
(delta, ln') = unzipV $ liftA3 (adjustHidden xb) ln deltaos d
deltaws :: V j a
-- deltaws = delta *! (nodeWeights <$> ln')
deltaws = delta *! (nodeWeights <$> ln)
l' :: FLayer j k a
l' = FLayer ln'
in (deltaws, l' `NetIL` n'')
where
xb = Node 1 x
-- {-# INLINE go #-}
-- per neuron/node traversal
-- every neuron has a delta
adjustOutput :: KnownNat j => Node j a -> Node j a -> a -> a -> (a, Node j a)
adjustOutput xb node y' d = (delta, adjustWeights delta xb node)
where
delta = let (o, o') = diff' g d
in (o - y') * o'
-- delta = (f d - y) * f' d
{-# INLINE adjustOutput #-}
-- delta = d - y
adjustHidden :: KnownNat j => Node j a -> Node j a -> a -> a -> (a, Node j a)
adjustHidden xb node deltao d = (delta, adjustWeights delta xb node)
where
-- instead of (o - target), use deltao, weighted average of errors
delta = deltao * diff f d
{-# INLINE adjustHidden #-}
-- delta = deltao
-- per weight traversal
adjustWeights :: KnownNat j => a -> Node j a -> Node j a -> Node j a
adjustWeights delta = liftA2 (\w n -> n - step * delta * w)
{-# INLINE adjustWeights #-}
{-# INLINE trainSample #-}
networkHeatmap :: (KnownNat i, Num a) => (a -> a) -> (a -> a) -> Network i hs o a -> V i a -> [[a]]
networkHeatmap f g n v =
vToList v : case n of
NetOL l -> [vToList (g <$> runFLayer l v)]
NetIL l n' -> networkHeatmap f g n' $ f <$> runFLayer l v
where
vToList = V.toList . toVector
drawHeatmap :: KnownNat i => (Double -> Double) -> (Double -> Double) -> Network i hs o Double -> V i Double -> String
drawHeatmap f g n = unlines
. map (intercalate "\t")
. P.transpose
. map (padLists ' ')
. padLists ""
. map (padLists ' ' . map (printf "% .3f"))
. networkHeatmap f g n
where
padLists :: forall a. a -> [[a]] -> [[a]]
padLists p xss = flip map xss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (map length xss)
drawNetwork :: forall i hs o. Dim i => Network i hs o Double -> String
drawNetwork = unlines
. map (intercalate "\t")
. P.transpose
. map (padLists ' ')
. padLists ""
. map (intercalate [""])
. doublePad ""
. ([]:)
. (replicate (reflectDim (Proxy :: Proxy i)) ["o"] :)
. addDot
. (map . map . map) (printf "% .3f")
. networkToList
where
addDot :: [[[String]]] -> [[[String]]]
addDot = concatMap $ \xs -> [xs, replicate (length xs) ["o"]]
-- bracketize :: String -> String
-- bracketize str = '[' : str ++ "]"
padLists :: forall a. a -> [[a]] -> [[a]]
padLists p xss = flip map xss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (map length xss)
doublePad :: forall a. a -> [[[a]]] -> [[[a]]]
doublePad p xsss = flip (map . map) xsss $ \xs ->
let d = (maxlen - length xs) `div` 2
in take maxlen $ replicate d p ++ xs ++ repeat p
where
maxlen = maximum (concatMap (map length) xsss)
nodeToList :: forall j a. Node j a -> [a]
nodeToList (Node b (V w)) = b : V.toList w
layerToList :: forall i' o' a. FLayer i' o' a -> [[a]]
layerToList (FLayer (V l)) = nodeToList <$> V.toList l
networkToList :: forall i' hs' o' a. Network i' hs' o' a -> [[[a]]]
networkToList n' = case n' of
NetOL l -> [layerToList l]
NetIL l n'' -> layerToList l : networkToList n''
randomNetwork :: (RandomGen g, Random (Network i hs o a), Num a)
=> g
-> (Network i hs o a, g)
randomNetwork g = (first . fmap) (subtract 1 . (*2)) $ random g
randomNetworkIO :: (Random (Network i hs o a), Num a) => IO (Network i hs o a)
randomNetworkIO = fmap (subtract 1 . (*2)) <$> randomIO
networkStructure :: forall i hs o a. (KnownNat i, KnownNat o) => Network i hs o a -> (Int, [Int], Int)
networkStructure (NetOL _) = (reflectDim (Proxy :: Proxy i), [], reflectDim (Proxy :: Proxy o))
networkStructure (NetIL _ n') = (reflectDim (Proxy :: Proxy i), j : hs, o)
where
(j, hs, o) = networkStructure n'
-- induceOutput :: forall i hs o a. (KnownNat i, KnownNat o, Floating a, Ord a) => a -> a -> (a, a) -> (a -> a) -> Network i hs o a -> V o a -> V i a -> V i a
-- induceOutput nudge step (mn,mx) f n y x0@(V x0v) = V . fst $ foldl' g (x0v, errFrom x0) [0..V.length x0v - 1]
-- where
-- errFrom = qd y . runNetwork f n
-- g (x, e) i = let x' = V.modify (\v -> VM.write v i . clamp . (+ nudge) =<< VM.read v i) x
-- e' = errFrom (V x')
-- x'' = V.modify (\v -> VM.write v i . clamp . subtract (nudge*step/e') =<< VM.read v i) x
-- e'' = errFrom (V x'')
-- in (x'', e'')
-- clamp = min mx . max mn
-- | Boilerplate instances
instance Functor (Network i hs o) where
fmap f n = case n of
NetOL l -> NetOL (fmap f l)
NetIL l n' -> fmap f l `NetIL` fmap f n'
{-# INLINE fmap #-}
instance (KnownNat i, KnownNat o) => Applicative (Network i '[] o) where
pure = NetOL . pure
{-# INLINE pure #-}
NetOL f <*> NetOL x = NetOL (f <*> x)
{-# INLINE (<*>) #-}
instance (KnownNat i, KnownNat j, Applicative (Network j hs o)) => Applicative (Network i (j ': hs) o) where
pure x = pure x `NetIL` pure x
{-# INLINE pure #-}
NetIL fi fr <*> NetIL xi xr = NetIL (fi <*> xi) (fr <*> xr)
{-# INLINE (<*>) #-}
instance (KnownNat i, KnownNat o, Random a) => Random (Network i '[] o a) where
random = first NetOL . random
randomR (NetOL rmn, NetOL rmx) = first NetOL . randomR (rmn, rmx)
instance (KnownNat i, KnownNat j, Random a, Random (Network j hs o a)) => Random (Network i (j ': hs) o a) where
random g = let (l, g') = random g
in first (l `NetIL`) (random g')
randomR (NetIL lmn nmn, NetIL lmx nmx) g =
let (l , g') = randomR (lmn, lmx) g
in first (l `NetIL`) (randomR (nmn, nmx) g')
instance (KnownNat i, KnownNat o, B.Binary a) => B.Binary (Network i '[] o a) where
put (NetOL l) = B.put l
get = NetOL <$> B.get
-- instance (KnownNat i, KnownNat o, KnownNat j, B.Binary a, B.Binary (Network j hs o a)) => B.Binary (Network i (j ': hs) o a) where
instance (KnownNat i, KnownNat j, B.Binary a, B.Binary (Network j hs o a)) => B.Binary (Network i (j ': hs) o a) where
put (NetIL l n') = B.put l *> B.put n'
get = NetIL <$> B.get <*> B.get
instance NFData a => NFData (Network i hs o a) where
rnf (NetOL (force -> !_)) = ()
rnf (NetIL (force -> !_) (force -> !_)) = ()
deriving instance Show a => Show (Network i hs o a)
deriving instance Foldable (Network i hs o)
deriving instance Traversable (Network i hs o)
deriving instance Show a => Show (SomeNet a)
deriving instance Functor SomeNet
deriving instance Foldable SomeNet
deriving instance Traversable SomeNet
instance B.Binary a => B.Binary (SomeNet a) where
put sn = case sn of
SomeNet (n :: Network i hs o a) -> do
B.put $ natVal (Proxy :: Proxy i)
B.put $ natVal (Proxy :: Proxy o)
B.put $ OpaqueNet n
get = do
i <- B.get
o <- B.get
reifyNat i $ \(Proxy :: Proxy i) ->
reifyNat o $ \(Proxy :: Proxy o) -> do
oqn <- B.get :: B.Get (OpaqueNet i o a)
return $ case oqn of
OpaqueNet n -> SomeNet n
deriving instance Show a => Show (OpaqueNet i o a)
deriving instance Functor (OpaqueNet i o)
deriving instance Foldable (OpaqueNet i o)
deriving instance Traversable (OpaqueNet i o)
instance (KnownNat i, KnownNat o, B.Binary a) => B.Binary (OpaqueNet i o a) where
put oqn = case oqn of
OpaqueNet n -> do
case n of
NetOL l -> do
B.put True
B.put l
NetIL (l :: FLayer i j a) (n' :: Network j js o a) -> do
B.put False
B.put $ natVal (Proxy :: Proxy j)
B.put l
B.put (OpaqueNet n')
get = do
isOL <- B.get
if isOL
then do
OpaqueNet . NetOL <$> B.get
else do
j <- B.get
reifyNat j $ \(Proxy :: Proxy j) -> do
l <- B.get :: B.Get (FLayer i j a)
nqo <- B.get :: B.Get (OpaqueNet j o a)
return $ case nqo of
OpaqueNet n -> OpaqueNet $ l `NetIL` n
asOpaqueNet :: SomeNet a
-> (forall i o. (KnownNat i, KnownNat o) => OpaqueNet i o a -> r)
-> r
asOpaqueNet sn f = case sn of
SomeNet n -> f (OpaqueNet n)
|
mstksg/neural
|
src/Data/Neural/FeedForward.hs
|
Haskell
|
mit
| 12,834
|
module Typing.Expr (i_expr) where
import Typing.Constraint
import Typing.Env
import Typing.Substitution
import Typing.Subtyping
import Typing.TypeError
import Typing.Types
import Typing.Util
import Absyn.Base
import Absyn.Meta
import qualified Absyn.Untyped as U
import qualified Absyn.Typed as T
import Util.Error
import Control.Monad (when, zipWithM)
import Data.List (union)
i_expr :: U.Expr -> Tc (T.Expr, Type)
i_expr (meta :< Literal lit) =
return (meta :< Literal lit, i_lit lit)
i_expr (meta :< Ident [i]) = do
ty <- lookupValue i
return (meta :< Ident [i], ty)
-- TODO: Clear this up - should be handled by the renamer now
i_expr (_ :< Ident (_:_)) = undefined
i_expr (_ :< Ident []) = undefined
i_expr (meta :< ParenthesizedExpr expr) = do
(expr', ty) <- i_expr expr
return (meta :< ParenthesizedExpr expr', ty)
i_expr (meta :< BinOp _ _ lhs op rhs) = do
tyOp@(Fun gen _ _) <- lookupValue op
(lhs', lhsTy) <- i_expr lhs
(rhs', rhsTy) <- i_expr rhs
(retType', typeArgs) <- inferTyArgs [lhsTy, rhsTy] tyOp
constraintArgs <- inferConstraintArgs gen [] typeArgs
return (meta :< BinOp constraintArgs [] lhs' (op, tyOp) rhs', retType')
i_expr (meta :< Match expr cases) = do
(expr', ty) <- i_expr expr
(cases', casesTy) <- unzip <$> mapM (i_case ty) cases
let retTy = case casesTy of
[] -> void
x:xs -> foldl (\/) x xs
return (meta :< Match expr' cases', retTy)
i_expr (meta :< Call fn constraintArgs types []) =
i_expr (meta :< Call fn constraintArgs types [emptySpanFromPos (spanEnd meta) :< VoidExpr])
where
emptySpanFromPos pos = SourceSpan { spanStart = pos, spanEnd = pos }
i_expr (meta :< Call fn _ types args) = do
(fn', tyFn) <- i_expr fn
let tyFn' = normalizeFnType tyFn
(tyFn''@(Fun gen _ retType), skippedVars) <- adjustFnType (null types) args tyFn'
(retType', args', typeArgs) <-
case (tyFn'', types) of
(Fun (_:_) params _, []) -> do
(_, argsTy) <- unzip <$> mapM i_expr args
(retType, typeArgs) <- inferTyArgs argsTy tyFn''
let s = zipSubst (map fst gen) typeArgs
let params' = map (applySubst s) params
args' <- zipWithM instSubtype args params'
return (retType, args', typeArgs)
(Fun gen params _, _) -> do
types' <- mapM resolveType types
let s = zipSubst (map fst gen) types'
let params' = map (applySubst s) params
args' <- zipWithM instSubtype args params'
return (applySubst s retType, args', types')
_ -> undefined
constraintArgs <- inferConstraintArgs gen skippedVars typeArgs
return (meta :< Call fn' constraintArgs types args', retType')
i_expr (meta :< Record fields) = do
(exprs, types) <- mapM (i_expr . snd) fields >>= return . unzip
let labels = map fst fields
let fieldsTy = zip labels types
let recordTy = Rec fieldsTy
let record = Record (zip labels exprs)
return (meta :< record, recordTy)
i_expr (meta :< FieldAccess expr field) = do
(expr', ty) <- i_expr expr
let
aux :: Type -> [(String, Type)] -> Tc (T.Expr, Type)
aux ty r = case lookup field r of
Nothing -> throwError $ UnknownField ty field
Just t -> return (meta :< FieldAccess expr' field, t)
case ty of
Rec r -> aux ty r
Cls _ -> do
vars <- lookupInstanceVars ty
aux ty vars
_ -> throwError . GenericError $ "Expected a record, but found value of type " ++ show ty
i_expr (meta :< If ifCond ifBody elseBody) = do
(ifCond', ty) <- i_expr ifCond
ty <:! bool
(ifBody', ifTy) <- i_body ifBody
(elseBody', elseTy) <- i_body elseBody
return (meta :< If ifCond' ifBody' elseBody', ifTy \/ elseTy)
i_expr (meta :< List _ items) = do
(items', itemsTy) <- unzip <$> mapM i_expr items
(ty, itemTy) <- case itemsTy of
[] -> do
nilTy <- lookupValue "Nil"
return (nilTy, Bot)
x:xs ->
let ty = foldl (\/) x xs
in return (list ty, ty)
return (meta :< List (Just itemTy) items', ty)
i_expr (meta :< FnExpr fn) = do
(fn', ty) <- i_fn fn
return $ (meta :< FnExpr fn', ty)
i_expr (meta :< Negate _ expr) = do
(expr', ty) <- i_expr expr
intf <- lookupInterface "Std.Number"
constrArgs <- boundsCheck ty intf
return (meta :< Negate constrArgs expr', ty)
-- Expressions generated during type checking
i_expr (meta :< VoidExpr) = return (meta :< VoidExpr, void)
i_expr (_ :< TypeCall {}) = undefined
instSubtype :: U.Expr -> Type -> Tc T.Expr
instSubtype arg@(meta :< _) ty = do
(arg', argTy) <- i_expr arg
arg'' <- case (argTy, ty) of
(Fun gen@(_:_) _ _, Fun [] _ _) -> do
typeArgs <- inferTyAbs argTy ty
constraintArgs <- inferConstraintArgs gen [] typeArgs
return $ meta :< TypeCall arg' constraintArgs
_ -> do
argTy <:! ty
return arg'
return arg''
inferConstraintArgs :: [BoundVar] -> [BoundVar] -> [Type] -> Tc [ConstraintArg]
inferConstraintArgs gen skippedVars typeArgs = do
concat <$> zipWithM findConstrArgs gen typeArgs'
where
typeArgs' = zipWith findHoles gen typeArgs
findHoles var ty =
if var `elem` skippedVars
then mkHole var
else ty
findConstrArgs (_, []) tyArg = do
return [CAType tyArg]
findConstrArgs (_, bounds) tyArg = do
concat <$> mapM (boundsCheck tyArg) bounds
-- Checks if `t1` implements the interface in `t2`
-- returns a list with a single ConstraintArg indicating
-- how to satisfy the bounds. See `ConstraintArg` for
-- an explanation on the kinds of ConstraintArgs
boundsCheck :: Type -> Intf -> Tc [ConstraintArg]
boundsCheck t1 t2@(Intf name _ _) = do
args <- boundsCheck' t1 t2
if null args
then throwError $ MissingImplementation name t1
else return args
boundsCheck' :: Type -> Intf -> Tc [ConstraintArg]
boundsCheck' v@(Var _ bounds) intf = do
return $ if intf `elem` bounds
then [CABound v intf]
else []
boundsCheck' (TyApp ty args) intf@(Intf name _ _) = do
implementations <- lookupImplementations name
case lookup ty implementations of
Nothing -> return []
Just vars -> do
let aux arg (_, bounds) =
concat <$> mapM (boundsCheck arg) bounds
args <- concat <$> zipWithM aux args vars
return [CAPoly ty intf args]
boundsCheck' (Forall params ty) intf =
boundsCheck' (params \\ ty) intf
boundsCheck' (TyAbs params ty) intf =
boundsCheck' (params \\ ty) intf
boundsCheck' Bot intf =
return [CABound Bot intf]
boundsCheck' ty intf@(Intf name _ _) = do
implementations <- lookupImplementations name
case lookup ty implementations of
Just [] -> return [CABound ty intf]
_ -> return []
normalizeFnType :: Type -> Type
normalizeFnType (Fun gen params (Fun [] params' retTy)) =
normalizeFnType (Fun gen (params ++ params') retTy)
normalizeFnType ty = ty
adjustFnType :: Bool -> [a] -> Type -> Tc (Type, [BoundVar])
adjustFnType allowHoles args fn@(Fun gen params retType) = do
let lArgs = length args
case compare lArgs (length params) of
EQ -> return (fn, [])
LT ->
let headArgs = take lArgs params
tailArgs = drop lArgs params
skippedGen = filter aux gen
aux (v, _) = allowHoles && v `elem` (fv $ Fun [] tailArgs retType) && v `notElem` (foldl union [] $ map fv headArgs)
in return (Fun gen headArgs $ Fun skippedGen tailArgs retType, skippedGen)
GT -> throwError ArityMismatch
adjustFnType _ _ ty = throwError . GenericError $ "Expected a function, found " ++ show ty
i_lit :: Literal -> Type
i_lit (Integer _) = int
i_lit (Float _) = float
i_lit (Char _) = char
i_lit (String _) = string
i_case :: Type -> U.Case -> Tc (T.Case, Type)
i_case ty (meta :< Case pattern caseBody) = do
m <- startMarker
pattern' <- c_pattern ty pattern
endMarker m
(caseBody', ty) <- i_body caseBody
clearMarker m
return (meta :< Case pattern' caseBody', ty)
c_pattern :: Type -> U.Pattern -> Tc T.Pattern
c_pattern _ (meta :< PatDefault) =
return $ meta :< PatDefault
c_pattern ty (meta :< PatLiteral l) = do
let litTy = i_lit l
litTy <:! ty
return $ meta :< PatLiteral l
c_pattern ty (meta :< PatVar v) = do
insertValue v ty
return $ meta :< PatVar v
c_pattern ty@(Rec tyFields) (meta :< PatRecord fields) = do
fields' <- mapM aux fields
return $ meta :< PatRecord fields'
where
aux (key, pat) = do
case lookup key tyFields of
Just ty -> do
pat' <- c_pattern ty pat
return (key, pat')
Nothing ->
throwError . GenericError $ "Matching against field `" ++ key ++ "`, which is not included in the type of the value being matched, `" ++ show ty ++ "`"
c_pattern ty (_ :< PatRecord _) = do
throwError . GenericError $ "Using a record pattern, but value being matched has type `" ++ show ty ++ "`"
c_pattern ty (meta :< PatList pats rest) = do
itemTy <- getItemTy ty
pats' <- mapM (c_pattern itemTy) pats
rest' <- case rest of
NoRest -> return NoRest
DiscardRest -> return DiscardRest
NamedRest n -> do
insertValue n ty
return (NamedRest n)
return $ meta :< PatList pats' rest'
where
getItemTy (Forall _ (TyApp (Con "List") _)) =
return Top
getItemTy (TyApp (Con "List") [ty]) =
return ty
getItemTy _ =
throwError . GenericError $ "Using a list pattern, but value being matched has type `" ++ show ty ++ "`"
c_pattern ty (meta :< PatCtor name vars) = do
ctorTy <- lookupValue name
let (fnTy, params, retTy) = case ctorTy of
fn@(Fun [] params retTy) -> (fn, params, retTy)
fn@(Fun gen params retTy) -> (fn, params, Forall (map fst gen) retTy)
t -> (Fun [] [] t, [], t)
when (length vars /= length params) (throwError ArityMismatch)
retTy <:! ty
let substs = case (retTy, ty) of
(Forall gen _, TyApp _ args) -> zipSubst gen args
_ -> emptySubst
let params' = map (applySubst substs) params
vars' <- zipWithM c_pattern params' vars
return $ meta :< PatCtor (name, fnTy) vars'
|
tadeuzagallo/verve-lang
|
src/Typing/Expr.hs
|
Haskell
|
mit
| 10,404
|
{-# LANGUAGE Arrows #-}
module Game.Client.Objects.Network where
import FRP.Yampa as Yampa
import FRP.Yampa.Geometry
import Graphics.UI.SDL as SDL
import Graphics.UI.SDL.Events as SDL.Events
import Graphics.UI.SDL.Keysym as SDL.Keysym
import Game.Shared.Types
import Game.Shared.Networking
import Game.Shared.Object
import Game.Shared.Arrows
import Game.Client.Objects.Towers
import Game.Client.Objects.Input
import Game.Client.Objects.Minions
import Game.Client.Objects.Base
import Game.Client.Components.BasicComponents
import Game.Client.Components.Projectiles
import Game.Client.Object
import Game.Client.Resources
import Game.Client.Input
import Game.Client.Graphics
import Game.Client.Networking
-- |Manager that handles object creation events, generating
-- the game object and submitting a spawn request for it
netManager :: Object
netManager = proc objInput -> do
-- Connection events
connSuccEvent <- connectionSuccess -< oiNetwork objInput
connFailedEvent <- connectionFailed -< oiNetwork objInput
-- Object creation
newPlayerObjectEvent <- createObject Player -< oiNetwork objInput
newPlayerProjectile1ObjectEvent <- createObject (PlayerProjectile 1) -< oiNetwork objInput
newPlayerProjectile2ObjectEvent <- createObject (PlayerProjectile 2) -< oiNetwork objInput
newTurretObjectEvent <- createObject Turret -< oiNetwork objInput
newTurretProjectileObjectEvent <- createObject TurretProjectile -< oiNetwork objInput
newMinionObjectEvent <- createObject Minion -< oiNetwork objInput
newMinionProjectileObjectEvent <- createObject MinionProjectile -< oiNetwork objInput
newNexusObjectEvent <- createObject Nexus -< oiNetwork objInput
-- Return state
returnA -< (defaultObjOutput objInput) {
ooSpawnRequests = foldl (mergeBy (++)) noEvent [connSuccEvent `tag` [playerObject],
newPlayerObjectEvent `tagUsing` map networkPlayer,
newPlayerProjectile1ObjectEvent `tagUsing` map networkPlayerProjectile,
newPlayerProjectile2ObjectEvent `tagUsing` map networkPlayerProjectile,
newTurretObjectEvent `tagUsing` map turretObject,
newTurretProjectileObjectEvent `tagUsing` map towerProjectile,
newMinionObjectEvent `tagUsing` map minionObject,
newMinionProjectileObjectEvent `tagUsing` map minionProjectile,
newNexusObjectEvent `tagUsing` map nexusObject]
}
-- |Game object that draws the map background
mapBackground :: Object
mapBackground = proc objInput -> do
-- Return state
returnA -< (defaultObjOutput objInput) {
ooGraphic = draw backgroundImage (Mask Nothing 0 0),
ooGraphicLayer = GameLayer 0,
ooGameObject = (defaultGameObject objInput) {
goPos = zeroVector,
goSize = vector2 3072 3072
}
}
-- |Game object for a player not managed by this client
networkPlayer :: GameObject -- ^The representation of this player that constructed this game object
-> Object
networkPlayer obj = proc objInput -> do
-- Components
basicComponent <- basicObject obj -< BasicObjectInput {
boiNetwork = oiNetwork objInput
}
statsComponent <- objectStats obj -< ObjectStatsInput {
osiNetwork = oiNetwork objInput
}
let stats = osoStats statsComponent
position = booPosition basicComponent
-- Components
healthbarComponent <- healthbarDisplay 1.5 (vector2 (-6) (-12)) 44 6 -< HealthbarDisplayInput {
hdiHealthChangedEvent = osoHealthChanged statsComponent,
hdiObjectPosition = position,
hdiCurrentHealth = stHealth stats,
hdiCurrentMaxHealth = stMaxHealth stats
}
-- Return state
let (x, y) = vectorRoundedComponents position
team = goTeam obj
returnA -< (defaultObjOutput objInput) {
ooKillRequest = booObjectDestroyed basicComponent,
ooGraphic = drawAll [draw (playerImage team) (Mask Nothing x y),
hdoHealthbarGraphic healthbarComponent],
ooGraphicLayer = GameLayer 10,
ooGameObject = obj {
goPos = position,
goStats = Just stats
}
}
-- |Projectile fired by a networked player
networkPlayerProjectile :: GameObject -- ^Game object representation at creation time of this object
-> Object
networkPlayerProjectile obj = proc objInput -> do
-- Components
basicComponent <- basicObject obj -< BasicObjectInput {
boiNetwork = oiNetwork objInput
}
statsComponent <- objectStats obj -< ObjectStatsInput {
osiNetwork = oiNetwork objInput
}
let stats = osoStats statsComponent
rec projectileComponent <- directionalProjectile (enemyTeam (goTeam obj)) obj 5 -< DirectionalProjectileInput {
dpiAllCollisions = oiCollidingWith objInput,
dpiAllObjects = oiAllObjects objInput,
dpiSpeed = fromIntegral (stSpeed stats),
dpiCurrPos = position
}
position <- (^+^ (goPos obj)) ^<< integral -< dpoMoveDelta projectileComponent
-- Return state
let (x, y) = vectorRoundedComponents position
returnA -< (defaultObjOutput objInput) {
ooKillRequest = lMerge (booObjectDestroyed basicComponent) (dpoHitTargetEvent projectileComponent),
ooGraphic = draw turretProjectileImage (Mask Nothing x y),
ooGraphicLayer = GameLayer 6,
ooGameObject = obj {
goPos = position
}
}
|
Mattiemus/LaneWars
|
Game/Client/Objects/Network.hs
|
Haskell
|
mit
| 6,036
|
{-# LANGUAGE TemplateHaskell #-}
module SoOSiM.Components.Thread.Types where
import Control.Lens
import Control.Concurrent.STM.TQueue
import SoOSiM.Components.Common
import SoOSiM.Components.ResourceDescriptor
import SoOSiM.Components.SoOSApplicationGraph
data ThreadState = Blocked | Waiting | Executing | Killed
deriving Eq
data Deadline = Infinity | Exact Int
deriving (Eq,Show)
instance Ord Deadline where
compare Infinity Infinity = EQ
compare (Exact i) Infinity = LT
compare Infinity (Exact i) = GT
compare (Exact i) (Exact j) = compare i j
data Thread
= Thread
{ -- | The thread unique id
_threadId :: ThreadId
-- | number of incoming \"ports\", each in-port has an id from 0 to (n_in - 1)
, _n_in :: Int
-- | number of outgoing \"ports\", each out-port has an id form 0 to (n_in - 1)
, _n_out :: Int
-- | incoming ports: ntokens per port
, _in_ports :: [TQueue (Int,Int)]
-- | outgoing links
--
-- contains the pair (thread_dest_id, in_port_id) of the destination threads
, _out_ports :: [(ThreadId,TQueue (Int,Int))]
-- | Number of (simulation) cycles needed to complete one instance of the thread
, _exec_cycles :: Int
-- | resource requirements
, _rr :: ResourceDescriptor
-- | an enumerations: BLOCKED; READY; EXECUTING
, _execution_state :: ThreadState
-- | The id of the resource where this thread is executing
, _res_id :: ResourceId
-- | It is the SimTime when the current instance has been activated
-- this is update by the Scheduler.wake_up_threads when the thres is
-- moved from blocked to ready.
-- it can be needed to sort ready thread in FIFO order
, _activation_time :: Int
, _program :: [AppCommand]
, _localMem :: (Int,Int)
, _relativeDeadlineOut :: Deadline
, _relativeDeadlineIn :: Deadline
}
instance Show Thread where
show = show . _threadId
makeLenses ''Thread
|
christiaanb/SoOSiM-components
|
src/SoOSiM/Components/Thread/Types.hs
|
Haskell
|
mit
| 1,994
|
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Arduino.SamplePrograms.SevenSegment
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Control a single seven-segment display, echoing user's key presses
-- on it verbatim. We use a shift-register to reduce the number of
-- pins we need on the Arduino to control the display.
-------------------------------------------------------------------------------
module System.Hardware.Arduino.SamplePrograms.SevenSegment where
import Control.Monad (forever)
import Control.Monad.Trans (liftIO)
import Data.Bits (testBit)
import Data.Word (Word8)
import System.IO (hSetBuffering, stdin, BufferMode(NoBuffering))
import System.Hardware.Arduino
import System.Hardware.Arduino.Parts.ShiftRegisters
import System.Hardware.Arduino.Parts.SevenSegmentCodes
-- | Connections for the Texas Instruments 74HC595 shift-register. Datasheet: <http://www.ti.com/lit/ds/symlink/sn74hc595.pdf>.
-- In our circuit, we merely use pins 8 thru 12 on the Arduino to control the 'serial', 'enable', 'rClock', 'sClock', and 'nClear'
-- lines, respectively. Since we do not need to read the output of the shift-register, we leave the 'bits' field unconnected.
sr :: SR_74HC595
sr = SR_74HC595 { serial = digital 8
, nEnable = digital 9
, rClock = digital 10
, sClock = digital 11
, nClear = digital 12
, mbBits = Nothing
}
-- | Seven-segment display demo. For each key-press, we display an equivalent pattern
-- on the connected 7-segment-display. Note that most characters are not-mappable, so
-- we use approximations if available. We use a shift-register to reduce the pin
-- requirements on the Arduino, setting the bits serially.
--
-- Parts:
--
-- * The seven-segment digit we use is a common-cathode single-digit display, such as
-- TDSG5150 (<http://www.vishay.com/docs/83126/83126.pdf>), or Microvity's IS121,
-- but almost any such digit would do. Just pay attention to the line-connections,
-- and do not forget the limiting resistors: 220 ohm's should do nicely.
--
-- * The shift-register is Texas-Instruments 74HC595: <http://www.ti.com/lit/ds/symlink/sn74hc595.pdf>.
-- Make sure to connect the register output lines to the seven-segment displays with the corresponding
-- letters. That is, shift-registers @Q_A@ (Chip-pin 15) should connect to segment @A@; @Q_B@ (Chip-pin 1)
-- to segment @B@, and so on. We do not use the shift-register @Q_H'@ (Chip-pin 9) in this design.
--
-- <<http://github.com/LeventErkok/hArduino/raw/master/System/Hardware/Arduino/SamplePrograms/Schematics/SevenSegment.png>>
sevenSegment :: IO ()
sevenSegment = withArduino False "/dev/cu.usbmodemfd131" $ do
initialize sr
liftIO $ do hSetBuffering stdin NoBuffering
putStrLn "Seven-Segment-Display demo."
putStrLn "For each key-press, we will try to display it as a 7-segment character."
putStrLn "If there is no good mapping (which is common), we'll just display a dot."
putStrLn ""
putStrLn "Press-keys to be shown on the display, Ctrl-C to quit.."
forever repl
where pushWord w = do mapM_ (push sr) [w `testBit` i | i <- [0..7]]
store sr
repl = do c <- liftIO getChar
case char2SS c of
Just w -> pushWord w
Nothing -> pushWord (0x01::Word8) -- the dot, which also nicely covers the '.'
|
aufheben/lambda-arduino
|
packages/hArduino-0.9/System/Hardware/Arduino/SamplePrograms/SevenSegment.hs
|
Haskell
|
mit
| 3,812
|
module Graphics.Rendering.OpenGL.Extensions where
import Data.List as List
import Graphics.Rendering.OpenGL as OpenGL
originFrustum :: Rational -> Rational -> Rational -> Rational -> IO()
originFrustum = \width height near far -> do
let x_radius = ((/) width 2)
let y_radius = ((/) height 2)
let bounds = [(-) 0 x_radius, (+) 0 x_radius, (-) 0 y_radius, (+) 0 y_radius, near, far]
let [left, right, bottom, top, neard, fard] = (List.map fromRational bounds)
(OpenGL.frustum left right bottom top neard fard)
centeredOrtho :: Rational -> Rational -> Rational -> Rational -> Rational -> Rational -> IO()
centeredOrtho = \x y z width height depth -> do
let x_radius = ((/) width 2)
let y_radius = ((/) height 2)
let z_radius = ((/) depth 2)
let bounds = [(-) x x_radius, (+) x x_radius, (-) y y_radius, (+) y y_radius, (-) z z_radius, (+) z z_radius]
let [x0, x1, y0, y1, z0, z1] = (List.map fromRational bounds)
(OpenGL.ortho x0 x1 y0 y1 z0 z1)
originOrtho = (centeredOrtho 0 0 0)
|
stevedonnelly/haskell
|
code/Graphics/Rendering/OpenGL/Extensions.hs
|
Haskell
|
mit
| 1,028
|
#!/usr/bin/env runhaskell
main = putStrLn "Hello World!"
|
pwittchen/learning-haskell
|
tasks/00_hello_world.hs
|
Haskell
|
apache-2.0
| 57
|
{-# LANGUAGE TemplateHaskell #-}
module Hack3.Lens where
import Hack3
import Control.Lens
makeLenses ''Env
makeLenses ''Response
|
nfjinjing/hack3-lens
|
src/Hack3/Lens.hs
|
Haskell
|
apache-2.0
| 133
|
{-# LANGUAGE CPP, TupleSections, BangPatterns, LambdaCase #-}
{-# OPTIONS_GHC -Wwarn #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Interface.Create
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : haddock@projects.haskell.org
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Interface.Create (createInterface) where
import Documentation.Haddock.Doc (metaDocAppend)
import Haddock.Types
import Haddock.Options
import Haddock.GhcUtils
import Haddock.Utils
import Haddock.Convert
import Haddock.Interface.LexParseRn
import qualified Data.Map as M
import Data.Map (Map)
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Ord
import Control.Applicative
import Control.Arrow (second)
import Control.DeepSeq
import Control.Monad
import Data.Function (on)
import qualified Data.Foldable as F
import qualified Packages
import qualified Module
import qualified SrcLoc
import GHC
import HscTypes
import Name
import Bag
import RdrName
import TcRnTypes
import FastString (concatFS)
import qualified Outputable as O
-- | Use a 'TypecheckedModule' to produce an 'Interface'.
-- To do this, we need access to already processed modules in the topological
-- sort. That's what's in the 'IfaceMap'.
createInterface :: TypecheckedModule -> [Flag] -> IfaceMap -> InstIfaceMap -> ErrMsgGhc Interface
createInterface tm flags modMap instIfaceMap = do
let ms = pm_mod_summary . tm_parsed_module $ tm
mi = moduleInfo tm
L _ hsm = parsedSource tm
!safety = modInfoSafe mi
mdl = ms_mod ms
dflags = ms_hspp_opts ms
!instances = modInfoInstances mi
!fam_instances = md_fam_insts md
!exportedNames = modInfoExports mi
(TcGblEnv {tcg_rdr_env = gre, tcg_warns = warnings}, md) = tm_internals_ tm
-- The renamed source should always be available to us, but it's best
-- to be on the safe side.
(group_, mayExports, mayDocHeader) <-
case renamedSource tm of
Nothing -> do
liftErrMsg $ tell [ "Warning: Renamed source is not available." ]
return (emptyRnGroup, Nothing, Nothing)
Just (x, _, y, z) -> return (x, y, z)
opts0 <- liftErrMsg $ mkDocOpts (haddockOptions dflags) flags mdl
let opts
| Flag_IgnoreAllExports `elem` flags = OptIgnoreExports : opts0
| otherwise = opts0
(!info, mbDoc) <- liftErrMsg $ processModuleHeader dflags gre safety mayDocHeader
let declsWithDocs = topDecls group_
fixMap = mkFixMap group_
(decls, _) = unzip declsWithDocs
localInsts = filter (nameIsLocalOrFrom mdl) $ map getName instances
++ map getName fam_instances
-- Locations of all TH splices
splices = [ l | L l (SpliceD _) <- hsmodDecls hsm ]
maps@(!docMap, !argMap, !subMap, !declMap, _) =
mkMaps dflags gre localInsts declsWithDocs
let exports0 = fmap (reverse . map unLoc) mayExports
exports
| OptIgnoreExports `elem` opts = Nothing
| otherwise = exports0
warningMap = mkWarningMap dflags warnings gre exportedNames
let allWarnings = M.unions (warningMap : map ifaceWarningMap (M.elems modMap))
exportItems <- mkExportItems modMap mdl allWarnings gre exportedNames decls
maps fixMap splices exports instIfaceMap dflags
let !visibleNames = mkVisibleNames maps exportItems opts
-- Measure haddock documentation coverage.
let prunedExportItems0 = pruneExportItems exportItems
!haddockable = 1 + length exportItems -- module + exports
!haddocked = (if isJust mbDoc then 1 else 0) + length prunedExportItems0
!coverage = (haddockable, haddocked)
-- Prune the export list to just those declarations that have
-- documentation, if the 'prune' option is on.
let prunedExportItems'
| OptPrune `elem` opts = prunedExportItems0
| otherwise = exportItems
!prunedExportItems = seqList prunedExportItems' `seq` prunedExportItems'
let !aliases =
mkAliasMap dflags $ tm_renamed_source tm
modWarn = moduleWarning dflags gre warnings
return $! Interface {
ifaceMod = mdl
, ifaceOrigFilename = msHsFilePath ms
, ifaceInfo = info
, ifaceDoc = Documentation mbDoc modWarn
, ifaceRnDoc = Documentation Nothing Nothing
, ifaceOptions = opts
, ifaceDocMap = docMap
, ifaceArgMap = argMap
, ifaceRnDocMap = M.empty
, ifaceRnArgMap = M.empty
, ifaceExportItems = prunedExportItems
, ifaceRnExportItems = []
, ifaceExports = exportedNames
, ifaceVisibleExports = visibleNames
, ifaceDeclMap = declMap
, ifaceSubMap = subMap
, ifaceFixMap = fixMap
, ifaceModuleAliases = aliases
, ifaceInstances = instances
, ifaceFamInstances = fam_instances
, ifaceHaddockCoverage = coverage
, ifaceWarningMap = warningMap
}
mkAliasMap :: DynFlags -> Maybe RenamedSource -> M.Map Module ModuleName
mkAliasMap dflags mRenamedSource =
case mRenamedSource of
Nothing -> M.empty
Just (_,impDecls,_,_) ->
M.fromList $
mapMaybe (\(SrcLoc.L _ impDecl) -> do
alias <- ideclAs impDecl
return $
(lookupModuleDyn dflags
(fmap Module.fsToPackageKey $
fmap snd $ ideclPkgQual impDecl)
(case ideclName impDecl of SrcLoc.L _ name -> name),
alias))
impDecls
-- similar to GHC.lookupModule
lookupModuleDyn ::
DynFlags -> Maybe PackageKey -> ModuleName -> Module
lookupModuleDyn _ (Just pkgId) mdlName =
Module.mkModule pkgId mdlName
lookupModuleDyn dflags Nothing mdlName =
case Packages.lookupModuleInAllPackages dflags mdlName of
(m,_):_ -> m
[] -> Module.mkModule Module.mainPackageKey mdlName
-------------------------------------------------------------------------------
-- Warnings
-------------------------------------------------------------------------------
mkWarningMap :: DynFlags -> Warnings -> GlobalRdrEnv -> [Name] -> WarningMap
mkWarningMap dflags warnings gre exps = case warnings of
NoWarnings -> M.empty
WarnAll _ -> M.empty
WarnSome ws ->
let ws' = [ (n, w) | (occ, w) <- ws, elt <- lookupGlobalRdrEnv gre occ
, let n = gre_name elt, n `elem` exps ]
in M.fromList $ map (second $ parseWarning dflags gre) ws'
moduleWarning :: DynFlags -> GlobalRdrEnv -> Warnings -> Maybe (Doc Name)
moduleWarning _ _ NoWarnings = Nothing
moduleWarning _ _ (WarnSome _) = Nothing
moduleWarning dflags gre (WarnAll w) = Just $ parseWarning dflags gre w
parseWarning :: DynFlags -> GlobalRdrEnv -> WarningTxt -> Doc Name
parseWarning dflags gre w = force $ case w of
DeprecatedTxt _ msg -> format "Deprecated: " (concatFS $ map (snd . unLoc) msg)
WarningTxt _ msg -> format "Warning: " (concatFS $ map (snd . unLoc) msg)
where
format x xs = DocWarning . DocParagraph . DocAppend (DocString x)
. processDocString dflags gre $ HsDocString xs
-------------------------------------------------------------------------------
-- Doc options
--
-- Haddock options that are embedded in the source file
-------------------------------------------------------------------------------
mkDocOpts :: Maybe String -> [Flag] -> Module -> ErrMsgM [DocOption]
mkDocOpts mbOpts flags mdl = do
opts <- case mbOpts of
Just opts -> case words $ replace ',' ' ' opts of
[] -> tell ["No option supplied to DOC_OPTION/doc_option"] >> return []
xs -> liftM catMaybes (mapM parseOption xs)
Nothing -> return []
hm <- if Flag_HideModule (moduleString mdl) `elem` flags
then return $ OptHide : opts
else return opts
if Flag_ShowExtensions (moduleString mdl) `elem` flags
then return $ OptShowExtensions : hm
else return hm
parseOption :: String -> ErrMsgM (Maybe DocOption)
parseOption "hide" = return (Just OptHide)
parseOption "prune" = return (Just OptPrune)
parseOption "ignore-exports" = return (Just OptIgnoreExports)
parseOption "not-home" = return (Just OptNotHome)
parseOption "show-extensions" = return (Just OptShowExtensions)
parseOption other = tell ["Unrecognised option: " ++ other] >> return Nothing
--------------------------------------------------------------------------------
-- Maps
--------------------------------------------------------------------------------
type Maps = (DocMap Name, ArgMap Name, SubMap, DeclMap, InstMap)
-- | Create 'Maps' by looping through the declarations. For each declaration,
-- find its names, its subordinates, and its doc strings. Process doc strings
-- into 'Doc's.
mkMaps :: DynFlags
-> GlobalRdrEnv
-> [Name]
-> [(LHsDecl Name, [HsDocString])]
-> Maps
mkMaps dflags gre instances decls =
let (a, b, c, d) = unzip4 $ map mappings decls
in (f' $ map (nubBy ((==) `on` fst)) a , f b, f c, f d, instanceMap)
where
f :: (Ord a, Monoid b) => [[(a, b)]] -> Map a b
f = M.fromListWith (<>) . concat
f' :: [[(Name, MDoc Name)]] -> Map Name (MDoc Name)
f' = M.fromListWith metaDocAppend . concat
mappings :: (LHsDecl Name, [HsDocString])
-> ( [(Name, MDoc Name)]
, [(Name, Map Int (MDoc Name))]
, [(Name, [Name])]
, [(Name, [LHsDecl Name])]
)
mappings (ldecl, docStrs) =
let L l decl = ldecl
declDoc :: [HsDocString] -> Map Int HsDocString
-> (Maybe (MDoc Name), Map Int (MDoc Name))
declDoc strs m =
let doc' = processDocStrings dflags gre strs
m' = M.map (processDocStringParas dflags gre) m
in (doc', m')
(doc, args) = declDoc docStrs (typeDocs decl)
subs :: [(Name, [HsDocString], Map Int HsDocString)]
subs = subordinates instanceMap decl
(subDocs, subArgs) = unzip $ map (\(_, strs, m) -> declDoc strs m) subs
ns = names l decl
subNs = [ n | (n, _, _) <- subs ]
dm = [ (n, d) | (n, Just d) <- zip ns (repeat doc) ++ zip subNs subDocs ]
am = [ (n, args) | n <- ns ] ++ zip subNs subArgs
sm = [ (n, subNs) | n <- ns ]
cm = [ (n, [ldecl]) | n <- ns ++ subNs ]
in seqList ns `seq`
seqList subNs `seq`
doc `seq`
seqList subDocs `seq`
seqList subArgs `seq`
(dm, am, sm, cm)
instanceMap :: Map SrcSpan Name
instanceMap = M.fromList [ (getSrcSpan n, n) | n <- instances ]
names :: SrcSpan -> HsDecl Name -> [Name]
names l (InstD d) = maybeToList (M.lookup loc instanceMap) -- See note [2].
where loc = case d of
TyFamInstD _ -> l -- The CoAx's loc is the whole line, but only for TFs
_ -> getInstLoc d
names _ decl = getMainDeclBinder decl
-- Note [2]:
------------
-- We relate ClsInsts to InstDecls using the SrcSpans buried inside them.
-- That should work for normal user-written instances (from looking at GHC
-- sources). We can assume that commented instances are user-written.
-- This lets us relate Names (from ClsInsts) to comments (associated
-- with InstDecls).
--------------------------------------------------------------------------------
-- Declarations
--------------------------------------------------------------------------------
-- | Get all subordinate declarations inside a declaration, and their docs.
subordinates :: InstMap -> HsDecl Name -> [(Name, [HsDocString], Map Int HsDocString)]
subordinates instMap decl = case decl of
InstD (ClsInstD d) -> do
DataFamInstDecl { dfid_tycon = L l _
, dfid_defn = def } <- unLoc <$> cid_datafam_insts d
[ (n, [], M.empty) | Just n <- [M.lookup l instMap] ] ++ dataSubs def
InstD (DataFamInstD d) -> dataSubs (dfid_defn d)
TyClD d | isClassDecl d -> classSubs d
| isDataDecl d -> dataSubs (tcdDataDefn d)
_ -> []
where
classSubs dd = [ (name, doc, typeDocs d) | (L _ d, doc) <- classDecls dd
, name <- getMainDeclBinder d, not (isValD d)
]
dataSubs dd = constrs ++ fields
where
cons = map unL $ (dd_cons dd)
constrs = [ (unL cname, maybeToList $ fmap unL $ con_doc c, M.empty)
| c <- cons, cname <- con_names c ]
fields = [ (unL n, maybeToList $ fmap unL doc, M.empty)
| RecCon flds <- map con_details cons
, L _ (ConDeclField ns _ doc) <- (unLoc flds)
, n <- ns ]
-- | Extract function argument docs from inside types.
typeDocs :: HsDecl Name -> Map Int HsDocString
typeDocs d =
let docs = go 0 in
case d of
SigD (TypeSig _ ty _) -> docs (unLoc ty)
SigD (PatSynSig _ _ req prov ty) ->
let allTys = ty : concat [ unLoc req, unLoc prov ]
in F.foldMap (docs . unLoc) allTys
ForD (ForeignImport _ ty _ _) -> docs (unLoc ty)
TyClD (SynDecl { tcdRhs = ty }) -> docs (unLoc ty)
_ -> M.empty
where
go n (HsForAllTy _ _ _ _ ty) = go n (unLoc ty)
go n (HsFunTy (L _ (HsDocTy _ (L _ x))) (L _ ty)) = M.insert n x $ go (n+1) ty
go n (HsFunTy _ ty) = go (n+1) (unLoc ty)
go n (HsDocTy _ (L _ doc)) = M.singleton n doc
go _ _ = M.empty
-- | All the sub declarations of a class (that we handle), ordered by
-- source location, with documentation attached if it exists.
classDecls :: TyClDecl Name -> [(LHsDecl Name, [HsDocString])]
classDecls class_ = filterDecls . collectDocs . sortByLoc $ decls
where
decls = docs ++ defs ++ sigs ++ ats
docs = mkDecls tcdDocs DocD class_
defs = mkDecls (bagToList . tcdMeths) ValD class_
sigs = mkDecls tcdSigs SigD class_
ats = mkDecls tcdATs (TyClD . FamDecl) class_
-- | The top-level declarations of a module that we care about,
-- ordered by source location, with documentation attached if it exists.
topDecls :: HsGroup Name -> [(LHsDecl Name, [HsDocString])]
topDecls = filterClasses . filterDecls . collectDocs . sortByLoc . ungroup
-- | Extract a map of fixity declarations only
mkFixMap :: HsGroup Name -> FixMap
mkFixMap group_ = M.fromList [ (n,f)
| L _ (FixitySig ns f) <- hs_fixds group_,
L _ n <- ns ]
-- | Take all declarations except pragmas, infix decls, rules from an 'HsGroup'.
ungroup :: HsGroup Name -> [LHsDecl Name]
ungroup group_ =
mkDecls (tyClGroupConcat . hs_tyclds) TyClD group_ ++
mkDecls hs_derivds DerivD group_ ++
mkDecls hs_defds DefD group_ ++
mkDecls hs_fords ForD group_ ++
mkDecls hs_docs DocD group_ ++
mkDecls hs_instds InstD group_ ++
mkDecls (typesigs . hs_valds) SigD group_ ++
mkDecls (valbinds . hs_valds) ValD group_
where
typesigs (ValBindsOut _ sigs) = filter isVanillaLSig sigs
typesigs _ = error "expected ValBindsOut"
valbinds (ValBindsOut binds _) = concatMap bagToList . snd . unzip $ binds
valbinds _ = error "expected ValBindsOut"
-- | Take a field of declarations from a data structure and create HsDecls
-- using the given constructor
mkDecls :: (a -> [Located b]) -> (b -> c) -> a -> [Located c]
mkDecls field con struct = [ L loc (con decl) | L loc decl <- field struct ]
-- | Sort by source location
sortByLoc :: [Located a] -> [Located a]
sortByLoc = sortBy (comparing getLoc)
--------------------------------------------------------------------------------
-- Filtering of declarations
--
-- We filter out declarations that we don't intend to handle later.
--------------------------------------------------------------------------------
-- | Filter out declarations that we don't handle in Haddock
filterDecls :: [(LHsDecl a, doc)] -> [(LHsDecl a, doc)]
filterDecls = filter (isHandled . unL . fst)
where
isHandled (ForD (ForeignImport {})) = True
isHandled (TyClD {}) = True
isHandled (InstD {}) = True
isHandled (SigD d) = isVanillaLSig (reL d)
isHandled (ValD _) = True
-- we keep doc declarations to be able to get at named docs
isHandled (DocD _) = True
isHandled _ = False
-- | Go through all class declarations and filter their sub-declarations
filterClasses :: [(LHsDecl a, doc)] -> [(LHsDecl a, doc)]
filterClasses decls = [ if isClassD d then (L loc (filterClass d), doc) else x
| x@(L loc d, doc) <- decls ]
where
filterClass (TyClD c) =
TyClD $ c { tcdSigs = filter (liftA2 (||) isVanillaLSig isMinimalLSig) $ tcdSigs c }
filterClass _ = error "expected TyClD"
--------------------------------------------------------------------------------
-- Collect docs
--
-- To be able to attach the right Haddock comment to the right declaration,
-- we sort the declarations by their SrcLoc and "collect" the docs for each
-- declaration.
--------------------------------------------------------------------------------
-- | Collect docs and attach them to the right declarations.
collectDocs :: [LHsDecl a] -> [(LHsDecl a, [HsDocString])]
collectDocs = go Nothing []
where
go Nothing _ [] = []
go (Just prev) docs [] = finished prev docs []
go prev docs (L _ (DocD (DocCommentNext str)) : ds)
| Nothing <- prev = go Nothing (str:docs) ds
| Just decl <- prev = finished decl docs (go Nothing [str] ds)
go prev docs (L _ (DocD (DocCommentPrev str)) : ds) = go prev (str:docs) ds
go Nothing docs (d:ds) = go (Just d) docs ds
go (Just prev) docs (d:ds) = finished prev docs (go (Just d) [] ds)
finished decl docs rest = (decl, reverse docs) : rest
-- | Build the list of items that will become the documentation, from the
-- export list. At this point, the list of ExportItems is in terms of
-- original names.
--
-- We create the export items even if the module is hidden, since they
-- might be useful when creating the export items for other modules.
mkExportItems
:: IfaceMap
-> Module -- this module
-> WarningMap
-> GlobalRdrEnv
-> [Name] -- exported names (orig)
-> [LHsDecl Name]
-> Maps
-> FixMap
-> [SrcSpan] -- splice locations
-> Maybe [IE Name]
-> InstIfaceMap
-> DynFlags
-> ErrMsgGhc [ExportItem Name]
mkExportItems
modMap thisMod warnings gre exportedNames decls
maps@(docMap, argMap, subMap, declMap, instMap) fixMap splices optExports instIfaceMap dflags =
case optExports of
Nothing -> fullModuleContents dflags warnings gre maps fixMap splices decls
Just exports -> liftM concat $ mapM lookupExport exports
where
lookupExport (IEVar (L _ x)) = declWith x
lookupExport (IEThingAbs (L _ t)) = declWith t
lookupExport (IEThingAll (L _ t)) = declWith t
lookupExport (IEThingWith (L _ t) _) = declWith t
lookupExport (IEModuleContents (L _ m)) =
moduleExports thisMod m dflags warnings gre exportedNames decls modMap instIfaceMap maps fixMap splices
lookupExport (IEGroup lev docStr) = return $
return . ExportGroup lev "" $ processDocString dflags gre docStr
lookupExport (IEDoc docStr) = return $
return . ExportDoc $ processDocStringParas dflags gre docStr
lookupExport (IEDocNamed str) = liftErrMsg $
findNamedDoc str [ unL d | d <- decls ] >>= return . \case
Nothing -> []
Just doc -> return . ExportDoc $ processDocStringParas dflags gre doc
declWith :: Name -> ErrMsgGhc [ ExportItem Name ]
declWith t =
case findDecl t of
([L l (ValD _)], (doc, _)) -> do
-- Top-level binding without type signature
export <- hiValExportItem dflags t doc (l `elem` splices) $ M.lookup t fixMap
return [export]
(ds, docs_) | decl : _ <- filter (not . isValD . unLoc) ds ->
let declNames = getMainDeclBinder (unL decl)
in case () of
_
-- temp hack: we filter out separately exported ATs, since we haven't decided how
-- to handle them yet. We should really give an warning message also, and filter the
-- name out in mkVisibleNames...
| t `elem` declATs (unL decl) -> return []
-- We should not show a subordinate by itself if any of its
-- parents is also exported. See note [1].
| t `notElem` declNames,
Just p <- find isExported (parents t $ unL decl) ->
do liftErrMsg $ tell [
"Warning: " ++ moduleString thisMod ++ ": " ++
pretty dflags (nameOccName t) ++ " is exported separately but " ++
"will be documented under " ++ pretty dflags (nameOccName p) ++
". Consider exporting it together with its parent(s)" ++
" for code clarity." ]
return []
-- normal case
| otherwise -> case decl of
-- A single signature might refer to many names, but we
-- create an export item for a single name only. So we
-- modify the signature to contain only that single name.
L loc (SigD sig) ->
-- fromJust is safe since we already checked in guards
-- that 't' is a name declared in this declaration.
let newDecl = L loc . SigD . fromJust $ filterSigNames (== t) sig
in return [ mkExportDecl t newDecl docs_ ]
L loc (TyClD cl@ClassDecl{}) -> do
mdef <- liftGhcToErrMsgGhc $ minimalDef t
let sig = maybeToList $ fmap (noLoc . MinimalSig mempty . fmap noLoc) mdef
return [ mkExportDecl t
(L loc $ TyClD cl { tcdSigs = sig ++ tcdSigs cl }) docs_ ]
_ -> return [ mkExportDecl t decl docs_ ]
-- Declaration from another package
([], _) -> do
mayDecl <- hiDecl dflags t
case mayDecl of
Nothing -> return [ ExportNoDecl t [] ]
Just decl ->
-- We try to get the subs and docs
-- from the installed .haddock file for that package.
case M.lookup (nameModule t) instIfaceMap of
Nothing -> do
liftErrMsg $ tell
["Warning: Couldn't find .haddock for export " ++ pretty dflags t]
let subs_ = [ (n, noDocForDecl) | (n, _, _) <- subordinates instMap (unLoc decl) ]
return [ mkExportDecl t decl (noDocForDecl, subs_) ]
Just iface ->
return [ mkExportDecl t decl (lookupDocs t warnings (instDocMap iface) (instArgMap iface) (instSubMap iface)) ]
_ -> return []
mkExportDecl :: Name -> LHsDecl Name -> (DocForDecl Name, [(Name, DocForDecl Name)]) -> ExportItem Name
mkExportDecl name decl (doc, subs) = decl'
where
decl' = ExportDecl (restrictTo sub_names (extractDecl name mdl decl)) doc subs' [] fixities False
mdl = nameModule name
subs' = filter (isExported . fst) subs
sub_names = map fst subs'
fixities = [ (n, f) | n <- name:sub_names, Just f <- [M.lookup n fixMap] ]
isExported = (`elem` exportedNames)
findDecl :: Name -> ([LHsDecl Name], (DocForDecl Name, [(Name, DocForDecl Name)]))
findDecl n
| m == thisMod, Just ds <- M.lookup n declMap =
(ds, lookupDocs n warnings docMap argMap subMap)
| Just iface <- M.lookup m modMap, Just ds <- M.lookup n (ifaceDeclMap iface) =
(ds, lookupDocs n warnings (ifaceDocMap iface) (ifaceArgMap iface) (ifaceSubMap iface))
| otherwise = ([], (noDocForDecl, []))
where
m = nameModule n
hiDecl :: DynFlags -> Name -> ErrMsgGhc (Maybe (LHsDecl Name))
hiDecl dflags t = do
mayTyThing <- liftGhcToErrMsgGhc $ lookupName t
case mayTyThing of
Nothing -> do
liftErrMsg $ tell ["Warning: Not found in environment: " ++ pretty dflags t]
return Nothing
Just x -> case tyThingToLHsDecl x of
Left m -> liftErrMsg (tell [bugWarn m]) >> return Nothing
Right (m, t') -> liftErrMsg (tell $ map bugWarn m)
>> return (Just $ noLoc t')
where
warnLine x = O.text "haddock-bug:" O.<+> O.text x O.<>
O.comma O.<+> O.quotes (O.ppr t) O.<+>
O.text "-- Please report this on Haddock issue tracker!"
bugWarn = O.showSDoc dflags . warnLine
hiValExportItem :: DynFlags -> Name -> DocForDecl Name -> Bool -> Maybe Fixity -> ErrMsgGhc (ExportItem Name)
hiValExportItem dflags name doc splice fixity = do
mayDecl <- hiDecl dflags name
case mayDecl of
Nothing -> return (ExportNoDecl name [])
Just decl -> return (ExportDecl decl doc [] [] fixities splice)
where
fixities = case fixity of
Just f -> [(name, f)]
Nothing -> []
-- | Lookup docs for a declaration from maps.
lookupDocs :: Name -> WarningMap -> DocMap Name -> ArgMap Name -> SubMap
-> (DocForDecl Name, [(Name, DocForDecl Name)])
lookupDocs n warnings docMap argMap subMap =
let lookupArgDoc x = M.findWithDefault M.empty x argMap in
let doc = (lookupDoc n, lookupArgDoc n) in
let subs = M.findWithDefault [] n subMap in
let subDocs = [ (s, (lookupDoc s, lookupArgDoc s)) | s <- subs ] in
(doc, subDocs)
where
lookupDoc name = Documentation (M.lookup name docMap) (M.lookup name warnings)
-- | Return all export items produced by an exported module. That is, we're
-- interested in the exports produced by \"module B\" in such a scenario:
--
-- > module A (module B) where
-- > import B (...) hiding (...)
--
-- There are three different cases to consider:
--
-- 1) B is hidden, in which case we return all its exports that are in scope in A.
-- 2) B is visible, but not all its exports are in scope in A, in which case we
-- only return those that are.
-- 3) B is visible and all its exports are in scope, in which case we return
-- a single 'ExportModule' item.
moduleExports :: Module -- ^ Module A
-> ModuleName -- ^ The real name of B, the exported module
-> DynFlags -- ^ The flags used when typechecking A
-> WarningMap
-> GlobalRdrEnv -- ^ The renaming environment used for A
-> [Name] -- ^ All the exports of A
-> [LHsDecl Name] -- ^ All the declarations in A
-> IfaceMap -- ^ Already created interfaces
-> InstIfaceMap -- ^ Interfaces in other packages
-> Maps
-> FixMap
-> [SrcSpan] -- ^ Locations of all TH splices
-> ErrMsgGhc [ExportItem Name] -- ^ Resulting export items
moduleExports thisMod expMod dflags warnings gre _exports decls ifaceMap instIfaceMap maps fixMap splices
| m == thisMod = fullModuleContents dflags warnings gre maps fixMap splices decls
| otherwise =
case M.lookup m ifaceMap of
Just iface
| OptHide `elem` ifaceOptions iface -> return (ifaceExportItems iface)
| otherwise -> return [ ExportModule m ]
Nothing -> -- We have to try to find it in the installed interfaces
-- (external packages).
case M.lookup expMod (M.mapKeys moduleName instIfaceMap) of
Just iface -> return [ ExportModule (instMod iface) ]
Nothing -> do
liftErrMsg $
tell ["Warning: " ++ pretty dflags thisMod ++ ": Could not find " ++
"documentation for exported module: " ++ pretty dflags expMod]
return []
where
m = mkModule packageKey expMod
packageKey = modulePackageKey thisMod
-- Note [1]:
------------
-- It is unnecessary to document a subordinate by itself at the top level if
-- any of its parents is also documented. Furthermore, if the subordinate is a
-- record field or a class method, documenting it under its parent
-- indicates its special status.
--
-- A user might expect that it should show up separately, so we issue a
-- warning. It's a fine opportunity to also tell the user she might want to
-- export the subordinate through the parent export item for clarity.
--
-- The code removes top-level subordinates also when the parent is exported
-- through a 'module' export. I think that is fine.
--
-- (For more information, see Trac #69)
fullModuleContents :: DynFlags -> WarningMap -> GlobalRdrEnv -> Maps -> FixMap -> [SrcSpan]
-> [LHsDecl Name] -> ErrMsgGhc [ExportItem Name]
fullModuleContents dflags warnings gre (docMap, argMap, subMap, declMap, instMap) fixMap splices decls =
liftM catMaybes $ mapM mkExportItem (expandSig decls)
where
-- A type signature can have multiple names, like:
-- foo, bar :: Types..
--
-- We go through the list of declarations and expand type signatures, so
-- that every type signature has exactly one name!
expandSig :: [LHsDecl name] -> [LHsDecl name]
expandSig = foldr f []
where
f :: LHsDecl name -> [LHsDecl name] -> [LHsDecl name]
f (L l (SigD (TypeSig names t nwcs))) xs = foldr (\n acc -> L l (SigD (TypeSig [n] t nwcs)) : acc) xs names
f (L l (SigD (GenericSig names t))) xs = foldr (\n acc -> L l (SigD (GenericSig [n] t)) : acc) xs names
f x xs = x : xs
mkExportItem :: LHsDecl Name -> ErrMsgGhc (Maybe (ExportItem Name))
mkExportItem (L _ (DocD (DocGroup lev docStr))) = do
return . Just . ExportGroup lev "" $ processDocString dflags gre docStr
mkExportItem (L _ (DocD (DocCommentNamed _ docStr))) = do
return . Just . ExportDoc $ processDocStringParas dflags gre docStr
mkExportItem (L l (ValD d))
| name:_ <- collectHsBindBinders d, Just [L _ (ValD _)] <- M.lookup name declMap =
-- Top-level binding without type signature.
let (doc, _) = lookupDocs name warnings docMap argMap subMap in
fmap Just (hiValExportItem dflags name doc (l `elem` splices) $ M.lookup name fixMap)
| otherwise = return Nothing
mkExportItem decl@(L l (InstD d))
| Just name <- M.lookup (getInstLoc d) instMap =
let (doc, subs) = lookupDocs name warnings docMap argMap subMap in
return $ Just (ExportDecl decl doc subs [] (fixities name subs) (l `elem` splices))
mkExportItem (L l (TyClD cl@ClassDecl{ tcdLName = L _ name, tcdSigs = sigs })) = do
mdef <- liftGhcToErrMsgGhc $ minimalDef name
let sig = maybeToList $ fmap (noLoc . MinimalSig mempty . fmap noLoc) mdef
expDecl (L l (TyClD cl { tcdSigs = sig ++ sigs })) l name
mkExportItem decl@(L l d)
| name:_ <- getMainDeclBinder d = expDecl decl l name
| otherwise = return Nothing
fixities name subs = [ (n,f) | n <- name : map fst subs
, Just f <- [M.lookup n fixMap] ]
expDecl decl l name = return $ Just (ExportDecl decl doc subs [] (fixities name subs) (l `elem` splices))
where (doc, subs) = lookupDocs name warnings docMap argMap subMap
-- | Sometimes the declaration we want to export is not the "main" declaration:
-- it might be an individual record selector or a class method. In these
-- cases we have to extract the required declaration (and somehow cobble
-- together a type signature for it...).
extractDecl :: Name -> Module -> LHsDecl Name -> LHsDecl Name
extractDecl name mdl decl
| name `elem` getMainDeclBinder (unLoc decl) = decl
| otherwise =
case unLoc decl of
TyClD d@ClassDecl {} ->
let matches = [ sig | sig <- tcdSigs d, name `elem` sigName sig,
isVanillaLSig sig ] -- TODO: document fixity
in case matches of
[s0] -> let (n, tyvar_names) = (tcdName d, getTyVars d)
L pos sig = extractClassDecl n tyvar_names s0
in L pos (SigD sig)
_ -> error "internal: extractDecl (ClassDecl)"
TyClD d@DataDecl {} ->
let (n, tyvar_names) = (tcdName d, map toTypeNoLoc $ getTyVars d)
in SigD <$> extractRecSel name mdl n tyvar_names (dd_cons (tcdDataDefn d))
InstD (DataFamInstD DataFamInstDecl { dfid_tycon = L _ n
, dfid_pats = HsWB { hswb_cts = tys }
, dfid_defn = defn }) ->
SigD <$> extractRecSel name mdl n tys (dd_cons defn)
InstD (ClsInstD ClsInstDecl { cid_datafam_insts = insts }) ->
let matches = [ d | L _ d <- insts
, L _ ConDecl { con_details = RecCon rec } <- dd_cons (dfid_defn d)
, ConDeclField { cd_fld_names = ns } <- map unLoc (unLoc rec)
, L _ n <- ns
, n == name
]
in case matches of
[d0] -> extractDecl name mdl (noLoc . InstD $ DataFamInstD d0)
_ -> error "internal: extractDecl (ClsInstD)"
_ -> error "internal: extractDecl"
where
getTyVars = hsLTyVarLocNames . tyClDeclTyVars
toTypeNoLoc :: Located Name -> LHsType Name
toTypeNoLoc = noLoc . HsTyVar . unLoc
extractClassDecl :: Name -> [Located Name] -> LSig Name -> LSig Name
extractClassDecl c tvs0 (L pos (TypeSig lname ltype _)) = case ltype of
L _ (HsForAllTy expl _ tvs (L _ preds) ty) ->
L pos (TypeSig lname (noLoc (HsForAllTy expl Nothing tvs (lctxt preds) ty)) [])
_ -> L pos (TypeSig lname (noLoc (HsForAllTy Implicit Nothing emptyHsQTvs (lctxt []) ltype)) [])
where
lctxt = noLoc . ctxt
ctxt preds = nlHsTyConApp c (map toTypeNoLoc tvs0) : preds
extractClassDecl _ _ _ = error "extractClassDecl: unexpected decl"
extractRecSel :: Name -> Module -> Name -> [LHsType Name] -> [LConDecl Name]
-> LSig Name
extractRecSel _ _ _ _ [] = error "extractRecSel: selector not found"
extractRecSel nm mdl t tvs (L _ con : rest) =
case con_details con of
RecCon (L _ fields) | ((n,L _ (ConDeclField _nn ty _)) : _) <- matching_fields fields ->
L (getLoc n) (TypeSig [noLoc nm] (noLoc (HsFunTy data_ty (getBangType ty))) [])
_ -> extractRecSel nm mdl t tvs rest
where
matching_fields flds = [ (n,f) | f@(L _ (ConDeclField ns _ _)) <- flds, n <- ns, unLoc n == nm ]
data_ty
| ResTyGADT _ ty <- con_res con = ty
| otherwise = foldl' (\x y -> noLoc (HsAppTy x y)) (noLoc (HsTyVar t)) tvs
-- | Keep export items with docs.
pruneExportItems :: [ExportItem Name] -> [ExportItem Name]
pruneExportItems = filter hasDoc
where
hasDoc (ExportDecl{expItemMbDoc = (Documentation d _, _)}) = isJust d
hasDoc _ = True
mkVisibleNames :: Maps -> [ExportItem Name] -> [DocOption] -> [Name]
mkVisibleNames (_, _, _, _, instMap) exports opts
| OptHide `elem` opts = []
| otherwise = let ns = concatMap exportName exports
in seqList ns `seq` ns
where
exportName e@ExportDecl {} = name ++ subs
where subs = map fst (expItemSubDocs e)
name = case unLoc $ expItemDecl e of
InstD d -> maybeToList $ M.lookup (getInstLoc d) instMap
decl -> getMainDeclBinder decl
exportName ExportNoDecl {} = [] -- we don't count these as visible, since
-- we don't want links to go to them.
exportName _ = []
seqList :: [a] -> ()
seqList [] = ()
seqList (x : xs) = x `seq` seqList xs
-- | Find a stand-alone documentation comment by its name.
findNamedDoc :: String -> [HsDecl Name] -> ErrMsgM (Maybe HsDocString)
findNamedDoc name = search
where
search [] = do
tell ["Cannot find documentation for: $" ++ name]
return Nothing
search (DocD (DocCommentNamed name' doc) : rest)
| name == name' = return (Just doc)
| otherwise = search rest
search (_other_decl : rest) = search rest
|
mrBliss/haddock
|
haddock-api/src/Haddock/Interface/Create.hs
|
Haskell
|
bsd-2-clause
| 36,249
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.