code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Main where
import Control.Concurrent
import Control.Concurrent.Supervised
import Control.Monad
import Control.Monad.Base
worker1 :: (MonadSupervisor m) => m ()
worker1 = do
liftBase $ threadDelay 1000000
void $ spawn worker2
threadId <- spawn worker2
liftBase $ do
killThread threadId -- note you may use haskell functions to kill supervised thread
putStrLn $ "worker1 done."
worker2 :: (MonadBase IO m) => m ()
worker2 = liftBase $ do
threadDelay 1000000
putStrLn $ "worker2 done."
main :: IO ()
main = do
runSupervisorT $ do
void $ spawn worker1
waitTill NoRunningThreads
putStrLn "All threads terminated. Press any key to exit."
void $ getChar
|
schernichkin/supervised-concurrency
|
samples/supervisor-events/Main.hs
|
bsd-3-clause
| 844
| 0
| 11
| 225
| 197
| 96
| 101
| 26
| 1
|
module Main where
import Lib
main :: IO ()
main = mainEntry
|
paradoxix/haskellplayground
|
app/Main.hs
|
bsd-3-clause
| 62
| 0
| 6
| 14
| 22
| 13
| 9
| 4
| 1
|
module CLaSH.Driver.PrepareBinding
( prepareBinding
)
where
-- External Modules
import Data.Map (Map)
-- GHC API
import qualified CoreSyn
-- Internal Modules
import CLaSH.CoreHW (coreToCoreHW)
import CLaSH.Desugar (desugar)
import CLaSH.Driver.Types
import CLaSH.Util.CoreHW (Var,Term)
prepareBinding ::
Map CoreSyn.CoreBndr CoreSyn.CoreExpr
-> CoreSyn.CoreBndr
-> DriverSession (Map Var Term)
prepareBinding globalBindings bndr = do
globalBindings' <- desugar globalBindings bndr
coreHWBindings <- coreToCoreHW globalBindings' bndr
return coreHWBindings
|
christiaanb/clash-tryout
|
src/CLaSH/Driver/PrepareBinding.hs
|
bsd-3-clause
| 620
| 0
| 9
| 126
| 144
| 79
| 65
| 16
| 1
|
--------------------------------------------------------------------------------
-- |
-- Module : Spectrum.DTA
-- Copyright : (c) [2009..2010] Trevor L. McDonell
-- License : BSD
--
-- Parse a DTA LC-MS/MS results file.
--
-- The file format is very simple. The first line contains the singly protonated
-- peptide mass (MH+) and the peptide charge state as a pair of space separated
-- values. Subsequent lines contain space separated pairs of fragment ion m/z
-- ratio and intensity values. Note that the precursor peptide mass is
-- independent of the charge state.
--
-- The filename usually used to identify the dataset, and each file contains
-- only a single MS/MS sample set.
--
--------------------------------------------------------------------------------
module Spectrum.DTA (readDTA) where
import Mass
import Util.Parsec
import Spectrum.Data
import Data.Vector.Unboxed (fromList)
import Data.ByteString.Lazy (empty)
import Text.ParserCombinators.Parsec
--------------------------------------------------------------------------------
-- DTA File Parser/Lexer
--------------------------------------------------------------------------------
--
-- The DTA file contains at least one line, each of which is terminated by an
-- end-of-line character (eol)
--
dtaFile :: RealFrac a => Parser [(a,a)]
dtaFile = endBy readF2 eol
--
-- Encase the values read from the DTA file into a data structure
--
mkSpec :: FilePath -> [(Float,Float)] -> Either String MS2Data
mkSpec name [] = Left ("Error parsing file: " ++ show name ++ "\nempty spectrum")
mkSpec name ((m,c):ss)
| trunc' c /= c = Left ("Error parsing file: " ++ show name ++ "\ninvalid peptide charge state\nexpecting integer")
| otherwise = Right (MS2Data empty pcr c (fromList ss))
where
pcr = (m + (c-1) * massH) / c
trunc' = fromInteger . truncate
--------------------------------------------------------------------------------
-- File I/O
--------------------------------------------------------------------------------
--
-- Read the given file and return either an error or the MS/MS data.
--
readDTA :: FilePath -> IO (Either String MS2Data)
readDTA name = do
dta <- parseFromFile dtaFile name
case dta of
Left e -> return (Left ("Error parsing file: " ++ show e))
Right s -> return (mkSpec name s)
|
tmcdonell/hfx
|
src/haskell/Spectrum/DTA.hs
|
bsd-3-clause
| 2,359
| 0
| 15
| 402
| 392
| 219
| 173
| 22
| 2
|
{-# LANGUAGE ExistentialQuantification #-}
module FFmpeg.Config where
-- import Control.Monad
-- import Debug
import FFmpeg.Probe
import System.Environment
import System.FilePath
class Config a where
makeArgs :: a -> Probe -> [String]
makeExt :: a -> String
defaultCfg :: a
fullArgs :: a -> Probe -> FilePath -> IO [String]
fullArgs a probe outdir = do
let infile = fpath probe
let outfile = replaceExtension (replaceDirectory infile outdir) (makeExt a)
return $ ["-i", infile]
++ ["-n"]
++ ["-nostdin"]
++ ["-v", "error"]
++ ["-progress", outfile ++ ".tmp"]
++ makeArgs a probe
++ [outfile]
data LoadedCfg = forall a. (Config a) => LoadedCfg a
instance Config LoadedCfg where
defaultCfg = undefined
makeArgs (LoadedCfg a) probe = makeArgs a probe
makeExt (LoadedCfg a) = makeExt a
|
YLiLarry/compress-video
|
src/FFmpeg/Config.hs
|
bsd-3-clause
| 978
| 14
| 9
| 317
| 269
| 149
| 120
| 25
| 0
|
{-# LANGUAGE FlexibleContexts #-}
module Day6 where
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Tuple
import Data.List
import Data.Ord
type ECMap = Map Int (Map Char Int)
iter :: ECMap -> String -> ECMap
iter m = foldl go m . zip [1..]
where
go r (k, c) = case Map.lookup k r of
Nothing -> Map.insert k (Map.singleton c 1) r
Just x -> Map.update (\_ -> Just (Map.insertWith (+) c 1 x)) k r
decode :: ECMap -> String
decode = Map.elems . Map.map (snd . head . sortBy (comparing Down) . map swap . Map.assocs)
decode' :: ECMap -> String
decode' = Map.elems . Map.map (snd . head . sort . map swap . Map.assocs)
errorCorrected = decode . foldl iter Map.empty . lines
errorCorrected' = decode' . foldl iter Map.empty . lines
|
cl04/advent2016
|
src/Day6.hs
|
bsd-3-clause
| 811
| 0
| 16
| 205
| 339
| 177
| 162
| 19
| 2
|
module Style (
module Style.Types
, module Style.Color
, styleNode
) where
import Types
import DOM
import Style.Color
import Style.Parser
import Style.Types
import Control.Lens hiding (children)
import qualified Data.Map as M
styleNode :: DomNode -> StyledNode
styleNode node = StyledNode nt sty $ map styleNode $ node ^. children
where nt = node ^. nodeType
sty = case nt of
Element _ attrs -> maybe newStyle parseStyle $ M.lookup "style" attrs
_ -> newStyle
|
forestbelton/orb
|
src/Style.hs
|
bsd-3-clause
| 545
| 0
| 12
| 159
| 151
| 84
| 67
| 17
| 2
|
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleContexts #-}
-- | The type checker checks whether the program is type-consistent.
-- Whether type annotations are already present is irrelevant, but if
-- they are, the type checker will signal an error if they are wrong.
-- The program does not need to have any particular properties for the
-- type checker to function; in particular it does not need unique
-- names.
module Language.Futhark.TypeChecker
( checkProg
, TypeError(..)
, Scope(..)
)
where
import Control.Applicative
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.State
import Control.Monad.RWS
import Data.Array
import Data.List
import Data.Loc
import Data.Maybe
import Data.Either
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Prelude
import Language.Futhark
import Language.Futhark.Renamer
(tagProg', untagPattern)
import Futhark.FreshNames hiding (newName)
import qualified Futhark.FreshNames
-- | Information about an error during type checking. The 'Show'
-- instance for this type produces a human-readable description.
-- | Information about an error during type checking. The 'Show'
-- instance for this type produces a human-readable description.
data TypeError =
TypeError SrcLoc String
-- ^ A general error happened at the given position and
-- for the given reason.
| UnifyError SrcLoc (TypeBase Rank NoInfo ()) SrcLoc (TypeBase Rank NoInfo ())
-- ^ Types of two expressions failed to unify.
| UnexpectedType SrcLoc
(TypeBase Rank NoInfo ()) [TypeBase Rank NoInfo ()]
-- ^ Expression of type was not one of the expected
-- types.
| ReturnTypeError SrcLoc Name (TypeBase Rank NoInfo ()) (TypeBase Rank NoInfo ())
-- ^ The body of a function definition has a different
-- type than its declaration.
| DupDefinitionError Name SrcLoc SrcLoc
-- ^ Two functions have been defined with the same name.
| DupParamError Name Name SrcLoc
-- ^ Two function parameters share the same name.
| DupPatternError Name SrcLoc SrcLoc
-- ^ Two pattern variables share the same name.
| InvalidPatternError (PatternBase NoInfo Name)
(TypeBase Rank NoInfo ()) (Maybe String) SrcLoc
-- ^ The pattern is not compatible with the type or is otherwise
-- inconsistent.
| UnknownVariableError Name SrcLoc
-- ^ Unknown variable of the given name referenced at the given spot.
| UnknownFunctionError QualName SrcLoc
-- ^ Unknown function of the given name called at the given spot.
| ParameterMismatch (Maybe QualName) SrcLoc
(Either Int [TypeBase Rank NoInfo ()]) [TypeBase Rank NoInfo ()]
-- ^ A function (possibly anonymous) was called with
-- invalid arguments. The third argument is either the
-- number of parameters, or the specific types of
-- parameters accepted (sometimes, only the former can
-- be determined).
| UseAfterConsume Name SrcLoc SrcLoc
-- ^ A variable was attempted used after being
-- consumed. The last location is the point of
-- consumption.
| IndexingError Int Int SrcLoc
-- ^ Too many indices provided. The first integer is
-- the number of dimensions in the array being
-- indexed.
| BadAnnotation SrcLoc String
(TypeBase Rank NoInfo ()) (TypeBase Rank NoInfo ())
-- ^ One of the type annotations fails to match with the
-- derived type. The string is a description of the
-- role of the type. The last type is the new derivation.
| BadTupleAnnotation SrcLoc String
[Maybe (TypeBase Rank NoInfo ())] [TypeBase Rank NoInfo ()]
-- ^ One of the tuple type annotations fails to
-- match with the derived type. The string is a
-- description of the role of the type. The last
-- type is the elemens of the new derivation.
| CurriedConsumption QualName SrcLoc
-- ^ A function is being curried with an argument to be consumed.
| BadLetWithValue SrcLoc
-- ^ The new value for an array slice in let-with is aliased to the source.
| ReturnAliased Name Name SrcLoc
-- ^ The unique return value of the function aliases
-- one of the function parameters.
| UniqueReturnAliased Name SrcLoc
-- ^ A unique element of the tuple returned by the
-- function aliases some other element of the tuple.
| NotAnArray SrcLoc (ExpBase CompTypeBase Name) (TypeBase Rank NoInfo ())
| PermutationError SrcLoc [Int] Int (Maybe Name)
-- ^ The permutation is not valid.
| DimensionNotInteger SrcLoc Name
-- ^ A dimension annotation was a non-integer variable.
| CyclicalTypeDefinition SrcLoc Name
-- ^ Type alias has been defined cyclically.
| UndefinedAlias SrcLoc Name
-- ^ Type alias is referenced, but not defined
| DupTypeAlias SrcLoc Name
-- ^ Type alias has been defined twice
| DupSigError SrcLoc Name
-- ^ Signature has been defined twice
| InvalidUniqueness SrcLoc (TypeBase Rank NoInfo ())
-- ^ Uniqueness attribute applied to non-array.
| UndefinedQualName SrcLoc QualName
-- ^ Undefined longname
| InvalidField SrcLoc Type String
instance Show TypeError where
show (TypeError pos msg) =
"Type error at " ++ locStr pos ++ ":\n" ++ msg
show (UnifyError e1loc t1 e2loc t2) =
"Cannot unify type " ++ pretty t1 ++
" of expression at " ++ locStr e1loc ++
"\nwith type " ++ pretty t2 ++
" of expression at " ++ locStr e2loc
show (UnexpectedType loc _ []) =
"Type of expression at " ++ locStr loc ++
"cannot have any type - possibly a bug in the type checker."
show (UnexpectedType loc t ts) =
"Type of expression at " ++ locStr loc ++ " must be one of " ++
intercalate ", " (map pretty ts) ++ ", but is " ++
pretty t ++ "."
show (ReturnTypeError pos fname rettype bodytype) =
"Declaration of function " ++ nameToString fname ++ " at " ++ locStr pos ++
" declares return type " ++ pretty rettype ++ ", but body has type " ++
pretty bodytype
show (DupDefinitionError name pos1 pos2) =
"Duplicate definition of function " ++ nameToString name ++ ". Defined at " ++
locStr pos1 ++ " and " ++ locStr pos2 ++ "."
show (DupParamError funname paramname pos) =
"Parameter " ++ pretty paramname ++
" mentioned multiple times in argument list of function " ++
nameToString funname ++ " at " ++ locStr pos ++ "."
show (DupPatternError name pos1 pos2) =
"Variable " ++ pretty name ++ " bound twice in tuple pattern; at " ++
locStr pos1 ++ " and " ++ locStr pos2 ++ "."
show (InvalidPatternError pat t desc loc) =
"Pattern " ++ pretty pat ++
" cannot match value of type " ++ pretty t ++ " at " ++ locStr loc ++ end
where end = case desc of Nothing -> "."
Just desc' -> ":\n" ++ desc'
show (UnknownVariableError name pos) =
"Unknown variable " ++ pretty name ++ " referenced at " ++ locStr pos ++ "."
show (UnknownFunctionError fname pos) =
"Unknown function " ++ longnameToString fname ++ " called at " ++ locStr pos ++ "."
show (ParameterMismatch fname pos expected got) =
"In call of " ++ fname' ++ " at position " ++ locStr pos ++ ":\n" ++
"expecting " ++ show nexpected ++ " argument(s) of type(s) " ++
expected' ++ ", but got " ++ show ngot ++
" arguments of types " ++ intercalate ", " (map pretty got) ++ "."
where (nexpected, expected') =
case expected of
Left i -> (i, "(polymorphic)")
Right ts -> (length ts, intercalate ", " $ map pretty ts)
ngot = length got
fname' = maybe "anonymous function" (("function "++) . longnameToString) fname
show (UseAfterConsume name rloc wloc) =
"Variable " ++ pretty name ++ " used at " ++ locStr rloc ++
", but it was consumed at " ++ locStr wloc ++ ". (Possibly through aliasing)"
show (IndexingError dims got pos) =
show got ++ " indices given at " ++ locStr pos ++
", but type of indexee has " ++ show dims ++ " dimension(s)."
show (BadAnnotation loc desc expected got) =
"Annotation of \"" ++ desc ++ "\" type of expression at " ++
locStr loc ++ " is " ++ pretty expected ++
", but derived to be " ++ pretty got ++ "."
show (BadTupleAnnotation loc desc expected got) =
"Annotation of \"" ++ desc ++ "\" type of expression at " ++
locStr loc ++ " is a tuple {" ++
intercalate ", " (map (maybe "(unspecified)" pretty) expected) ++
"}, but derived to be " ++ prettyTuple got ++ "."
show (CurriedConsumption fname loc) =
"Function " ++ longnameToString fname ++
" curried over a consuming parameter at " ++ locStr loc ++ "."
show (BadLetWithValue loc) =
"New value for elements in let-with shares data with source array at " ++
locStr loc ++ ". This is illegal, as it prevents in-place modification."
show (ReturnAliased fname name loc) =
"Unique return value of function " ++ nameToString fname ++ " at " ++
locStr loc ++ " is aliased to " ++ pretty name ++ ", which is not consumed."
show (UniqueReturnAliased fname loc) =
"A unique tuple element of return value of function " ++
nameToString fname ++ " at " ++ locStr loc ++
" is aliased to some other tuple component."
show (NotAnArray loc _ t) =
"The expression at " ++ locStr loc ++
" is expected to be an array, but is " ++ pretty t ++ "."
show (PermutationError loc perm rank name) =
"The permutation (" ++ intercalate ", " (map show perm) ++
") is not valid for array " ++ name' ++ "of rank " ++ show rank ++ " at " ++
locStr loc ++ "."
where name' = maybe "" ((++" ") . pretty) name
show (DimensionNotInteger loc name) =
"Dimension declaration " ++ pretty name ++ " at " ++ locStr loc ++
" should be an integer."
show (CyclicalTypeDefinition loc name) =
"Type alias " ++ pretty name ++ " at " ++ locStr loc ++
" is cyclically defined."
show (UndefinedAlias loc name) =
"Type alias '" ++ nameToString name ++ "' referenced at line " ++ locStr loc
++ ", but not defined."
show (DupTypeAlias loc name) =
"Type alias '" ++ nameToString name ++ "' defined twice at line " ++ show loc
show (DupSigError loc name) =
"Duplicate definition of type '" ++ nameToString name ++ "' at line " ++ locStr loc
show (InvalidUniqueness loc t) =
"Attempt to declare unique non-array " ++ pretty t ++ " at " ++ locStr loc ++ "."
show (UndefinedQualName loc longname) =
"Attempt to use undefined " ++ show longname ++ " at " ++ locStr loc ++ "."
show (InvalidField loc t field) =
"Attempt to access field '" ++ field ++ "' of value of type " ++
pretty t ++ " at " ++ locStr loc ++ "."
-- | A tuple of a return type and a list of argument types.
type FunBinding = (QualName, StructTypeBase VName, [StructTypeBase VName])
type TypeBinding = TypeBase ShapeDecl NoInfo VName
data Binding = Bound Type
| WasConsumed SrcLoc
data Usage = Consumed SrcLoc
| Observed SrcLoc
deriving (Eq, Ord, Show)
data Occurence = Occurence { observed :: Names VName
, consumed :: Names VName
, location :: SrcLoc
}
deriving (Eq, Show)
instance Located Occurence where
locOf = locOf . location
observation :: Names VName -> SrcLoc -> Occurence
observation = flip Occurence HS.empty
consumption :: Names VName -> SrcLoc -> Occurence
consumption = Occurence HS.empty
nullOccurence :: Occurence -> Bool
nullOccurence occ = HS.null (observed occ) && HS.null (consumed occ)
type Occurences = [Occurence]
type UsageMap = HM.HashMap VName [Usage]
usageMap :: Occurences -> UsageMap
usageMap = foldl comb HM.empty
where comb m (Occurence obs cons loc) =
let m' = HS.foldl' (ins $ Observed loc) m obs
in HS.foldl' (ins $ Consumed loc) m' cons
ins v m k = HM.insertWith (++) k [v] m
combineOccurences :: VName -> Usage -> Usage -> Either TypeError Usage
combineOccurences _ (Observed loc) (Observed _) = Right $ Observed loc
combineOccurences name (Consumed wloc) (Observed rloc) =
Left $ UseAfterConsume (baseName name) rloc wloc
combineOccurences name (Observed rloc) (Consumed wloc) =
Left $ UseAfterConsume (baseName name) rloc wloc
combineOccurences name (Consumed loc1) (Consumed loc2) =
Left $ UseAfterConsume (baseName name) (max loc1 loc2) (min loc1 loc2)
checkOccurences :: Occurences -> Either TypeError ()
checkOccurences = void . HM.traverseWithKey comb . usageMap
where comb _ [] = Right ()
comb name (u:us) = foldM_ (combineOccurences name) u us
allConsumed :: Occurences -> Names VName
allConsumed = HS.unions . map consumed
seqOccurences :: Occurences -> Occurences -> Occurences
seqOccurences occurs1 occurs2 =
filter (not . nullOccurence) $ map filt occurs1 ++ occurs2
where filt occ =
occ { observed = observed occ `HS.difference` postcons }
postcons = allConsumed occurs2
altOccurences :: Occurences -> Occurences -> Occurences
altOccurences occurs1 occurs2 =
filter (not . nullOccurence) $ map filt occurs1 ++ occurs2
where filt occ =
occ { consumed = consumed occ `HS.difference` postcons
, observed = observed occ `HS.difference` postcons }
postcons = allConsumed occurs2
-- | A pair of a variable table and a function table. Type checking
-- happens with access to this environment. The function table is
-- only initialised at the very beginning, but the variable table will
-- be extended during type-checking when let-expressions are
-- encountered.
data Scope = Scope { envVtable :: HM.HashMap VName Binding
, envFtable :: HM.HashMap Name FunBinding
, envTAtable :: HM.HashMap Name TypeBinding
, envModTable :: HM.HashMap Name Scope
, envBreadcrumb :: QualName
}
initialScope :: Scope
initialScope = Scope HM.empty
initialFtable
HM.empty
HM.empty
([] , nameFromString "")
-- | The type checker runs in this monad. The 'Either' monad is used
-- for error handling.
newtype TypeM a = TypeM (RWST
Scope -- Reader
Occurences -- Writer
VNameSource -- State
(Except TypeError) -- Inner monad
a)
deriving (Monad, Functor, Applicative,
MonadReader Scope,
MonadWriter Occurences,
MonadState VNameSource,
MonadError TypeError)
runTypeM :: Scope -> VNameSource -> TypeM a
-> Either TypeError (a, VNameSource)
runTypeM env src (TypeM m) = do
(x, src', _) <- runExcept $ runRWST m env src
return (x, src')
bad :: TypeError -> TypeM a
bad = throwError
newName :: VName -> TypeM VName
newName s = do src <- get
let (s', src') = Futhark.FreshNames.newName src s
put src'
return s'
newID :: Name -> TypeM VName
newID s = newName $ ID (s, 0)
newIDFromString :: String -> TypeM VName
newIDFromString = newID . nameFromString
newIdent :: String -> Type -> SrcLoc -> TypeM Ident
newIdent s t loc = do
s' <- newID $ nameFromString s
return $ Ident s' (Info t) loc
newParam :: String -> Type -> SrcLoc -> TypeM (ParamBase NoInfo VName)
newParam s t loc = do
s' <- newIDFromString s
return $ Param s' (TypeDecl (contractTypeBase t) NoInfo) loc
liftEither :: Either TypeError a -> TypeM a
liftEither = either bad return
occur :: Occurences -> TypeM ()
occur = tell
-- | Proclaim that we have made read-only use of the given variable.
-- No-op unless the variable is array-typed.
observe :: Ident -> TypeM ()
observe (Ident nm (Info t) loc)
| primType t = return ()
| otherwise = let als = nm `HS.insert` aliases t
in occur [observation als loc]
-- | Proclaim that we have written to the given variable.
consume :: SrcLoc -> Names VName -> TypeM ()
consume loc als = occur [consumption als loc]
-- | Proclaim that we have written to the given variable, and mark
-- accesses to it and all of its aliases as invalid inside the given
-- computation.
consuming :: Ident -> TypeM a -> TypeM a
consuming (Ident name (Info t) loc) m = do
consume loc $ aliases t
local consume' m
where consume' env =
env { envVtable = HM.insert name (WasConsumed loc) $ envVtable env }
collectOccurences :: TypeM a -> TypeM (a, Occurences)
collectOccurences m = pass $ do
(x, dataflow) <- listen m
return ((x, dataflow), const mempty)
maybeCheckOccurences :: Occurences -> TypeM ()
maybeCheckOccurences = liftEither . checkOccurences
alternative :: TypeM a -> TypeM b -> TypeM (a,b)
alternative m1 m2 = pass $ do
(x, occurs1) <- listen m1
(y, occurs2) <- listen m2
maybeCheckOccurences occurs1
maybeCheckOccurences occurs2
let usage = occurs1 `altOccurences` occurs2
return ((x, y), const usage)
-- | Make all bindings nonunique.
noUnique :: TypeM a -> TypeM a
noUnique = local (\env -> env { envVtable = HM.map f $ envVtable env})
where f (Bound t) = Bound $ t `setUniqueness` Nonunique
f (WasConsumed loc) = WasConsumed loc
binding :: [Ident] -> TypeM a -> TypeM a
binding bnds = check . local (`bindVars` bnds)
where bindVars :: Scope -> [Ident] -> Scope
bindVars = foldl bindVar
bindVar :: Scope -> Ident -> Scope
bindVar env (Ident name (Info tp) _) =
let inedges = HS.toList $ aliases tp
update (Bound tp')
-- If 'name' is tuple-typed, don't alias the components
-- to 'name', because tuples have no identity beyond
-- their components.
| Tuple _ <- tp = Bound tp'
| otherwise = Bound (tp' `addAliases` HS.insert name)
update b = b
in env { envVtable = HM.insert name (Bound tp) $
adjustSeveral update inedges $
envVtable env }
adjustSeveral f = flip $ foldl $ flip $ HM.adjust f
-- Check whether the bound variables have been used correctly
-- within their scope.
check m = do
(a, usages) <- collectBindingsOccurences m
maybeCheckOccurences usages
return a
-- Collect and remove all occurences in @bnds@. This relies
-- on the fact that no variables shadow any other.
collectBindingsOccurences m = pass $ do
(x, usage) <- listen m
let (relevant, rest) = split usage
return ((x, relevant), const rest)
where split = unzip .
map (\occ ->
let (obs1, obs2) = divide $ observed occ
(con1, con2) = divide $ consumed occ
in (occ { observed = obs1, consumed = con1 },
occ { observed = obs2, consumed = con2 }))
names = HS.fromList $ map identName bnds
divide s = (s `HS.intersection` names, s `HS.difference` names)
bindingParams :: [Parameter] -> TypeM a -> TypeM a
bindingParams params m =
-- We need to bind both the identifiers themselves, as well as any
-- presently non-bound shape annotations.
binding (map fromParam params) $
-- Figure out the not already bound shape annotations.
binding (concat [ mapMaybe (inspectDim $ srclocOf param) $
nestedDims' $ paramDeclaredType param
| param <- params])
m
where inspectDim _ AnyDim =
Nothing
inspectDim _ (ConstDim _) =
Nothing
inspectDim loc (NamedDim name) =
Just $ Ident name (Info $ Prim $ Signed Int32) loc
lookupVar :: VName -> SrcLoc -> TypeM Type
lookupVar name pos = do
bnd <- asks $ HM.lookup name . envVtable
case bnd of
Nothing -> bad $ UnknownVariableError (baseName name) pos
Just (Bound t) -> return t
Just (WasConsumed wloc) -> bad $ UseAfterConsume (baseName name) pos wloc
-- | @t1 `unifyTypes` t2@ attempts to unify @t2@ and @t2@. If
-- unification cannot happen, 'Nothing' is returned, otherwise a type
-- that combines the aliasing of @t1@ and @t2@ is returned. The
-- uniqueness of the resulting type will be the least of the
-- uniqueness of @t1@ and @t2@.
unifyTypes :: Monoid (as vn) =>
TypeBase Rank as vn
-> TypeBase Rank as vn
-> Maybe (TypeBase Rank as vn)
unifyTypes (Prim t1) (Prim t2)
| t1 == t2 = Just $ Prim t1
| otherwise = Nothing
unifyTypes (Array at1) (Array at2) =
Array <$> unifyArrayTypes at1 at2
unifyTypes (Tuple ts1) (Tuple ts2)
| length ts1 == length ts2 =
Tuple <$> zipWithM unifyTypes ts1 ts2
unifyTypes _ _ = Nothing
unifyArrayTypes :: Monoid (as vn) =>
ArrayTypeBase Rank as vn
-> ArrayTypeBase Rank as vn
-> Maybe (ArrayTypeBase Rank as vn)
unifyArrayTypes (PrimArray bt1 shape1 u1 als1) (PrimArray bt2 shape2 u2 als2)
| shapeRank shape1 == shapeRank shape2, bt1 == bt2 =
Just $ PrimArray bt1 shape1 (u1 <> u2) (als1 <> als2)
unifyArrayTypes (TupleArray et1 shape1 u1) (TupleArray et2 shape2 u2)
| shapeRank shape1 == shapeRank shape2 =
TupleArray <$> zipWithM unifyTupleArrayElemTypes et1 et2 <*>
pure shape1 <*> pure (u1 <> u2)
unifyArrayTypes _ _ =
Nothing
unifyTupleArrayElemTypes :: Monoid (as vn) =>
TupleArrayElemTypeBase Rank as vn
-> TupleArrayElemTypeBase Rank as vn
-> Maybe (TupleArrayElemTypeBase Rank as vn)
unifyTupleArrayElemTypes (PrimArrayElem bt1 als1 u1) (PrimArrayElem bt2 als2 u2)
| bt1 == bt2 = Just $ PrimArrayElem bt1 (als1 <> als2) (u1 <> u2)
| otherwise = Nothing
unifyTupleArrayElemTypes (ArrayArrayElem at1) (ArrayArrayElem at2) =
ArrayArrayElem <$> unifyArrayTypes at1 at2
unifyTupleArrayElemTypes (TupleArrayElem ts1) (TupleArrayElem ts2) =
TupleArrayElem <$> zipWithM unifyTupleArrayElemTypes ts1 ts2
unifyTupleArrayElemTypes _ _ =
Nothing
-- | Determine if two types are identical, ignoring uniqueness.
-- Causes a 'TypeError' if they fail to match, and otherwise returns
-- one of them.
unifyExpTypes :: Exp -> Exp -> TypeM Type
unifyExpTypes e1 e2 =
maybe (bad $ UnifyError
(srclocOf e1) (toStructural t1)
(srclocOf e2) (toStructural t2)) return $
unifyTypes (typeOf e1) (typeOf e2)
where t1 = typeOf e1
t2 = typeOf e2
anySignedType :: [Type]
anySignedType = map (Prim . Signed) [minBound .. maxBound]
anyUnsignedType :: [Type]
anyUnsignedType = map (Prim . Unsigned) [minBound .. maxBound]
anyIntType :: [Type]
anyIntType = anySignedType ++ anyUnsignedType
anyFloatType :: [Type]
anyFloatType = map (Prim . FloatType) [minBound .. maxBound]
anyNumberType :: [Type]
anyNumberType = anyIntType ++ anyFloatType
-- | @require ts e@ causes a 'TypeError' if @typeOf e@ does not unify
-- with one of the types in @ts@. Otherwise, simply returns @e@.
-- This function is very useful in 'checkExp'.
require :: [Type] -> Exp -> TypeM Exp
require ts e
| any (typeOf e `similarTo`) ts = return e
| otherwise = bad $ UnexpectedType (srclocOf e)
(toStructural $ typeOf e) $
map toStructural ts
chompDecs :: [DecBase NoInfo VName]
-> ([FunOrTypeDecBase NoInfo VName], [DecBase NoInfo VName])
chompDecs decs = f ([], decs)
where f (foo , FunOrTypeDec dec : xs ) = f (dec:foo , xs)
f (foo , bar) = (foo, bar)
buildScopeFromDecs :: [FunOrTypeDecBase NoInfo VName]
-> TypeM Scope
buildScopeFromDecs [] = ask
buildScopeFromDecs decs = do
scope <- ask
scope' <- buildTAtable scope
buildFtable scope'
where
-- To build the ftable we loop through the list of function
-- definitions. In addition to the normal ftable information
-- (name, return type, argument types), we also keep track of
-- position information, in order to report both locations of
-- duplicate function definitions. The position information is
-- removed at the end.
buildFtable scope = do
ftable' <- HM.map rmLoc <$>
foldM (expandFun scope) (HM.map addLoc $ envFtable scope) (mapMaybe (isFun . FunOrTypeDec) decs)
return $ scope {envFtable = ftable'}
buildTAtable = typeAliasTableFromProg (mapMaybe (isType . FunOrTypeDec) decs)
expandFun scope fntable (FunDef _ (name,_) (TypeDecl ret NoInfo) args _ pos) = do
let argtypes = map paramDeclaredType args
(prefixes, _) = envBreadcrumb scope
look tname tloc =
maybe (throwError $ UndefinedQualName tloc tname) return $
typeFromScope tname scope
ret' <- expandType look ret
argtypes' <- mapM (expandType look) argtypes
return $ HM.insert name ( (prefixes, name) , ret' , argtypes' , pos) fntable
rmLoc (longname, ret,args,_) = (longname, ret, args)
addLoc (longname, t, ts) = (longname, t, ts, noLoc)
-- | Type check a program containing arbitrary no information,
-- yielding either a type error or a program with complete type
-- information.
checkProg :: UncheckedProg -> Either TypeError (Prog, VNameSource)
checkProg prog = do
checkedProg <- runTypeM initialScope src $ Prog <$> checkProg' (progDecs prog')
return $ flattenProgFunctions checkedProg
where
(prog', src) = tagProg' blankNameSource prog
checkProg' :: [DecBase NoInfo VName] -> TypeM [DecBase Info VName]
checkProg' decs = do
checkForDuplicateDecs decs
(_, decs') <- checkDecs decs
return decs'
checkForDuplicateDecs :: [DecBase NoInfo VName] -> TypeM ()
checkForDuplicateDecs =
foldM_ f mempty
where f known (FunOrTypeDec (FunDec (FunDef _ (name,_) _ _ _ loc))) =
case HM.lookup (name, "function") known of
Just loc' ->
bad $ DupDefinitionError name loc loc'
_ -> return $ HM.insert (name, "function") loc known
f known (FunOrTypeDec (TypeDec (TypeDef name _ loc))) =
case HM.lookup (name, "type") known of
Just loc' ->
bad $ DupDefinitionError name loc loc'
_ -> return $ HM.insert (name, "type") loc known
f known (SigDec (SigDef name _ loc)) =
case HM.lookup (name, "signature") known of
Just loc' ->
bad $ DupDefinitionError name loc loc'
_ -> return $ HM.insert (name, "signature") loc known
f known (ModDec (ModDef name _ loc)) =
case HM.lookup (name, "module") known of
Just loc' ->
bad $ DupDefinitionError name loc loc'
_ -> return $ HM.insert (name, "module") loc known
checkMod :: ModDefBase NoInfo VName -> TypeM (Scope , ModDefBase Info VName)
checkMod (ModDef name decs loc) =
local (`addBreadcrumb` name) $ do
checkForDuplicateDecs decs
(scope, decs') <- checkDecs decs
return (scope, ModDef name decs' loc)
checkDecs :: [DecBase NoInfo VName] -> TypeM (Scope, [DecBase Info VName])
checkDecs (ModDec modd:rest) = do
(modscope, modd') <- checkMod modd
local (addModule modscope) $
do
(scope, rest') <- checkDecs rest
return (scope, ModDec modd' : rest' )
checkDecs (SigDec _:rest) = checkDecs rest
checkDecs [] = do
scope <- ask
return (scope, [])
checkDecs decs = do
let (funOrTypeDecs, rest) = chompDecs decs
scopeFromFunOrTypeDecs <- buildScopeFromDecs funOrTypeDecs
local (const scopeFromFunOrTypeDecs) $ do
checkedeDecs <- checkFunOrTypeDec funOrTypeDecs
(scope, rest') <- checkDecs rest
return (scope , checkedeDecs ++ rest')
checkFunOrTypeDec :: [FunOrTypeDecBase NoInfo VName] -> TypeM [DecBase Info VName]
checkFunOrTypeDec (FunDec fundef:decs) = do
fundef' <- checkFun fundef
decs' <- checkFunOrTypeDec decs
return $ FunOrTypeDec (FunDec fundef') : decs'
checkFunOrTypeDec (TypeDec _:decs) = checkFunOrTypeDec decs
checkFunOrTypeDec [] = return []
initialFtable :: HM.HashMap Name FunBinding
initialFtable = HM.fromList $ map addBuiltin $ HM.toList builtInFunctions
where addBuiltin (name, (t, ts)) = (name, (([],name), Prim t, map Prim ts))
checkFun :: FunDefBase NoInfo VName -> TypeM FunDef
checkFun (FunDef entry fullname@(fname,_) rettype params body loc) = do
rettype' <- checkTypeDecl rettype
let rettype_structural = toStructural $ unInfo $ expandedType rettype'
params' <- checkParams
body' <- bindingParams params' $ do
checkRetType loc $ unInfo $ expandedType rettype'
checkExp body
checkReturnAlias rettype_structural params' $ typeOf body'
if toStructural (typeOf body') `subtypeOf` rettype_structural then
return $ FunDef entry fullname rettype' params' body' loc
else bad $ ReturnTypeError loc fname rettype_structural $ toStructural $ typeOf body'
where
checkParams = do
-- First find all normal parameters (checking for duplicates).
params1' <- foldM checkNormParams [] params
-- Then check shape annotations (where duplicates are OK, as
-- long as it's not a duplicate of a normal parameter.)
mapM_ checkDimDecls params1'
return $ reverse params1'
checkNormParams knownparams param
| paramName param `elem` map paramName knownparams =
bad $ DupParamError fname (baseName $ paramName param) loc
| otherwise = do
-- For now, the expanded type is the same as the declared type.
param' <- checkParam param
return $ param' : knownparams
checkDimDecls param
| Just name <- find (`elem` map paramName params) boundDims =
bad $ DupParamError fname (baseName name) loc
| otherwise =
return ()
where boundDims = mapMaybe boundDim $ nestedDims $ paramType param
boundDim (NamedDim name) = Just name
boundDim _ = Nothing
notAliasingParam params' names =
forM_ params' $ \p ->
when (not (unique $ paramType p) &&
paramName p `HS.member` names) $
bad $ ReturnAliased fname (baseName $ paramName p) loc
-- | Check that unique return values do not alias a
-- non-consumed parameter.
checkReturnAlias rettp params' =
foldM_ (checkReturnAlias' params') HS.empty . returnAliasing rettp
checkReturnAlias' params' seen (Unique, names)
| any (`HS.member` HS.map snd seen) $ HS.toList names =
bad $ UniqueReturnAliased fname loc
| otherwise = do
notAliasingParam params' names
return $ seen `HS.union` tag Unique names
checkReturnAlias' _ seen (Nonunique, names)
| any (`HS.member` seen) $ HS.toList $ tag Unique names =
bad $ UniqueReturnAliased fname loc
| otherwise = return $ seen `HS.union` tag Nonunique names
tag u = HS.map $ \name -> (u, name)
returnAliasing (Tuple ets1) (Tuple ets2) =
concat $ zipWith returnAliasing ets1 ets2
returnAliasing expected got = [(uniqueness expected, aliases got)]
checkExp :: ExpBase NoInfo VName
-> TypeM Exp
checkExp (Literal val pos) =
Literal <$> checkLiteral pos val <*> pure pos
checkExp (TupLit es loc) =
TupLit <$> mapM checkExp es <*> pure loc
checkExp (ArrayLit es _ loc) = do
es' <- mapM checkExp es
-- Find the universal type of the array arguments.
et <- case es' of
[] -> bad $ TypeError loc "Empty array literal"
e:es'' ->
let check elemt eleme
| Just elemt' <- elemt `unifyTypes` typeOf eleme =
return elemt'
| otherwise =
bad $ TypeError loc $ pretty eleme ++ " is not of expected type " ++ pretty elemt ++ "."
in foldM check (typeOf e) es''
return $ ArrayLit es' (Info et) loc
checkExp (Empty decl loc) =
Empty <$> checkTypeDecl decl <*> pure loc
checkExp (BinOp op e1 e2 NoInfo pos) = checkBinOp op e1 e2 pos
checkExp (UnOp Not e pos) = do
e' <- require [Prim Bool] =<< checkExp e
return $ UnOp Not e' pos
checkExp (UnOp Complement e loc) = do
e' <- require anyIntType =<< checkExp e
return $ UnOp Complement e' loc
checkExp (UnOp Negate e loc) = do
e' <- require anyNumberType =<< checkExp e
return $ UnOp Negate e' loc
checkExp (UnOp Abs e loc) = do
e' <- require anyNumberType =<< checkExp e
return $ UnOp Abs e' loc
checkExp (UnOp Signum e loc) = do
e' <- require anyIntType =<< checkExp e
return $ UnOp Signum e' loc
checkExp (UnOp (ToFloat t) e loc) = do
e' <- require anyNumberType =<< checkExp e
return $ UnOp (ToFloat t) e' loc
checkExp (UnOp (ToSigned t) e loc) = do
e' <- require anyNumberType =<< checkExp e
return $ UnOp (ToSigned t) e' loc
checkExp (UnOp (ToUnsigned t) e loc) = do
e' <- require anyNumberType =<< checkExp e
return $ UnOp (ToUnsigned t) e' loc
checkExp (If e1 e2 e3 _ pos) = do
e1' <- require [Prim Bool] =<< checkExp e1
((e2', e3'), dflow) <- collectOccurences $ checkExp e2 `alternative` checkExp e3
tell dflow
brancht <- unifyExpTypes e2' e3'
let t' = addAliases brancht
(`HS.difference` allConsumed dflow)
return $ If e1' e2' e3' (Info t') pos
checkExp (Var ident) = do
ident' <- checkIdent ident
observe ident'
return $ Var ident'
checkExp (Apply fname args _ loc) = do
bnd <- asks (funFromScope fname)
case bnd of
Nothing -> bad $ UnknownFunctionError fname loc
Just (longname, ftype, paramtypes) -> do
(args', argflows) <- unzip <$> mapM (\(arg,_) -> (checkArg arg)) args
let rettype' = returnType (removeShapeAnnotations ftype)
(map diet paramtypes) (map typeOf args')
checkFuncall (Just fname) loc paramtypes ftype argflows
return $ Apply longname (zip args' $ map diet paramtypes) (Info rettype') loc
checkExp (LetPat pat e body pos) = do
(e', dataflow) <- collectOccurences $ checkExp e
(scope, pat') <- checkBinding pat (typeOf e') dataflow
scope $ do
body' <- checkExp body
return $ LetPat pat' e' body' pos
checkExp (LetWith d@(Ident dest _ destpos) src idxes ve body pos) = do
src' <- checkIdent src
idxes' <- mapM (require [Prim $ Signed Int32] <=< checkExp) idxes
let destt' = unInfo (identType src') `setAliases` HS.empty
dest' = Ident dest (Info destt') destpos
unless (unique $ unInfo $ identType src') $
bad $ TypeError pos $ "Source '" ++ pretty (baseName $ identName src) ++
"' has type " ++ pretty (unInfo $ identType src') ++ ", which is not unique"
case peelArray (length idxes) (unInfo $ identType src') of
Nothing -> bad $ IndexingError
(arrayRank $ unInfo $ identType src') (length idxes) (srclocOf src)
Just elemt ->
sequentially (require [elemt] =<< checkExp ve) $ \ve' _ -> do
when (identName src `HS.member` aliases (typeOf ve')) $
bad $ BadLetWithValue pos
(scope, _) <- checkBinding (Id d) destt' mempty
body' <- consuming src' $ scope $ checkExp body
return $ LetWith dest' src' idxes' ve' body' pos
checkExp (Index e idxes pos) = do
e' <- checkExp e
let vt = typeOf e'
when (arrayRank vt < length idxes) $
bad $ IndexingError (arrayRank vt) (length idxes) pos
idxes' <- mapM (require [Prim $ Signed Int32] <=< checkExp) idxes
return $ Index e' idxes' pos
checkExp (TupleIndex e i NoInfo loc) = do
e' <- checkExp e
case typeOf e' of
Tuple ts | t:_ <- drop i ts -> return $ TupleIndex e' i (Info t) loc
_ -> bad $ InvalidField loc (typeOf e') (show i)
checkExp (Iota e pos) = do
e' <- require [Prim $ Signed Int32] =<< checkExp e
return $ Iota e' pos
checkExp (Size i e pos) = do
e' <- checkExp e
case typeOf e' of
Array {}
| i >= 0 && i < arrayRank (typeOf e') ->
return $ Size i e' pos
| otherwise ->
bad $ TypeError pos $ "Type " ++ pretty (typeOf e') ++ " has no dimension " ++ show i ++ "."
_ -> bad $ TypeError pos "Argument to size must be array."
checkExp (Replicate countexp valexp pos) = do
countexp' <- require [Prim $ Signed Int32] =<< checkExp countexp
valexp' <- checkExp valexp
return $ Replicate countexp' valexp' pos
checkExp (Reshape shapeexps arrexp pos) = do
shapeexps' <- mapM (require [Prim $ Signed Int32] <=< checkExp) shapeexps
arrexp' <- checkExp arrexp
return (Reshape shapeexps' arrexp' pos)
checkExp (Rearrange perm arrexp pos) = do
arrexp' <- checkExp arrexp
let rank = arrayRank $ typeOf arrexp'
when (length perm /= rank || sort perm /= [0..rank-1]) $
bad $ PermutationError pos perm rank name
return $ Rearrange perm arrexp' pos
where name = case arrexp of Var v -> Just $ baseName $ identName v
_ -> Nothing
checkExp (Transpose arrexp pos) = do
arrexp' <- checkExp arrexp
when (arrayRank (typeOf arrexp') /= 2) $
bad $ TypeError pos "Argument to transpose is not two-dimensional array."
return $ Transpose arrexp' pos
checkExp (Rotate d offexp arrexp loc) = do
arrexp' <- checkExp arrexp
offexp' <- require [Prim $ Signed Int32] =<< checkExp offexp
let rank = arrayRank (typeOf arrexp')
when (rank <= d) $
bad $ TypeError loc $ "Attempting to rotate dimension " ++ show d ++
" of array " ++ pretty arrexp ++
" which has only " ++ show rank ++ " dimensions."
return $ Rotate d offexp' arrexp' loc
checkExp (Zip arrexps loc) = do
arrexps' <- mapM (checkExp . fst) arrexps
arrts <- forM arrexps' $ \arrexp -> do
let arrt = typeOf arrexp
when (arrayRank arrt < 1) $
bad $ TypeError (srclocOf arrexp) $
"Type of expression is not array, but " ++ pretty arrt ++ "."
return arrt
return $ Zip (zip arrexps' $ map Info arrts) loc
checkExp (Unzip e _ pos) = do
e' <- checkExp e
case typeOf e' of
Array (TupleArray ets shape u) ->
let componentType et =
let et' = tupleArrayElemToType et
u' = max u $ tupleArrayElemUniqueness et
in arrayOf et' shape u'
in return $ Unzip e' (map (Info . componentType) ets) pos
t ->
bad $ TypeError pos $
"Argument to unzip is not an array of tuples, but " ++
pretty t ++ "."
checkExp (Unsafe e loc) =
Unsafe <$> checkExp e <*> pure loc
checkExp (Map fun arrexp pos) = do
(arrexp', arg) <- checkSOACArrayArg arrexp
fun' <- checkLambda fun [arg]
return (Map fun' arrexp' pos)
checkExp (Reduce comm fun startexp arrexp pos) = do
(startexp', startarg) <- checkArg startexp
(arrexp', arrarg) <- checkSOACArrayArg arrexp
fun' <- checkLambda fun [startarg, arrarg]
let redtype = lambdaReturnType fun'
unless (typeOf startexp' `subtypeOf` redtype) $
bad $ TypeError pos $ "Initial value is of type " ++ pretty (typeOf startexp') ++ ", but reduce function returns type " ++ pretty redtype ++ "."
unless (argType arrarg `subtypeOf` redtype) $
bad $ TypeError pos $ "Array element value is of type " ++ pretty (argType arrarg) ++ ", but reduce function returns type " ++ pretty redtype ++ "."
return $ Reduce comm fun' startexp' arrexp' pos
checkExp (Scan fun startexp arrexp pos) = do
(startexp', startarg) <- checkArg startexp
(arrexp', arrarg@(inrowt, _, _)) <- checkSOACArrayArg arrexp
fun' <- checkLambda fun [startarg, arrarg]
let scantype = lambdaReturnType fun'
unless (typeOf startexp' `subtypeOf` scantype) $
bad $ TypeError pos $ "Initial value is of type " ++ pretty (typeOf startexp') ++ ", but scan function returns type " ++ pretty scantype ++ "."
unless (inrowt `subtypeOf` scantype) $
bad $ TypeError pos $ "Array element value is of type " ++ pretty inrowt ++ ", but scan function returns type " ++ pretty scantype ++ "."
return $ Scan fun' startexp' arrexp' pos
checkExp (Filter fun arrexp pos) = do
(arrexp', (rowelemt, argflow, argloc)) <- checkSOACArrayArg arrexp
let nonunique_arg = (rowelemt `setUniqueness` Nonunique,
argflow, argloc)
fun' <- checkLambda fun [nonunique_arg]
when (lambdaReturnType fun' /= Prim Bool) $
bad $ TypeError pos "Filter function does not return bool."
return $ Filter fun' arrexp' pos
checkExp (Partition funs arrexp pos) = do
(arrexp', (rowelemt, argflow, argloc)) <- checkSOACArrayArg arrexp
let nonunique_arg = (rowelemt `setUniqueness` Nonunique,
argflow, argloc)
funs' <- forM funs $ \fun -> do
fun' <- checkLambda fun [nonunique_arg]
when (lambdaReturnType fun' /= Prim Bool) $
bad $ TypeError (srclocOf fun') "Partition function does not return bool."
return fun'
return $ Partition funs' arrexp' pos
checkExp (Stream form lam@(AnonymFun lam_ps _ (TypeDecl lam_rtp NoInfo) _) arr pos) = do
lam_ps' <- mapM checkParam lam_ps
let isArrayType arrtp =
case arrtp of
Array _ -> True
_ -> False
let isArrayType' arrtp =
case arrtp of
UserUnique t _ -> isArrayType' t
UserArray{} -> True
_ -> False
let lit_int0 = Literal (PrimValue $ SignedValue $ Int32Value 0) pos
[(_, intarg),(arr',arrarg)] <- mapM checkArg [lit_int0, arr]
-- arr must have an array type
unless (isArrayType $ typeOf arr') $
bad $ TypeError pos $ "Stream with input array of non-array type " ++ pretty (typeOf arr') ++ "."
-- typecheck stream's lambdas
(form', macctup) <-
case form of
MapLike o -> return (MapLike o, Nothing)
RedLike o comm lam0 acc -> do
(acc',accarg) <- checkArg acc
lam0' <- checkLambda lam0 [accarg, accarg]
let redtype = lambdaReturnType lam0'
unless (typeOf acc' `subtypeOf` redtype) $
bad $ TypeError pos $ "Stream's reduce fun: Initial value is of type " ++
pretty (typeOf acc') ++ ", but reduce fun returns type "++pretty redtype++"."
return (RedLike o comm lam0' acc', Just(acc',accarg))
Sequential acc -> do
(acc',accarg) <- checkArg acc
return (Sequential acc', Just(acc',accarg))
-- (i) properly check the lambda on its parameter and
--(ii) make some fake arguments, which do not alias `arr', and
-- check that aliases of `arr' are not used inside lam.
let fakearg = (typeOf arr' `setAliases` HS.empty, mempty, srclocOf pos)
(aas,faas) = case macctup of
Nothing -> ([intarg, arrarg], [intarg,fakearg] )
Just(_,accarg) -> ([intarg, accarg, arrarg],[intarg, accarg, fakearg])
lam' <- checkLambda lam aas
(_, dflow)<- collectOccurences $ checkLambda lam faas
let arr_aliasses = HS.toList $ aliases $ typeOf arr'
let usages = usageMap dflow
when (any (`HM.member` usages) arr_aliasses) $
bad $ TypeError pos "Stream with input array used inside lambda."
-- check that the result type of lambda matches the accumulator part
_ <- case macctup of
Just (acc',_) ->
case lambdaReturnType lam' of
Tuple (acctp:_) ->
unless (typeOf acc' `subtypeOf` removeShapeAnnotations acctp) $
bad $ TypeError pos ("Stream with accumulator-type missmatch"++
"or result arrays of non-array type.")
rtp' -> unless (typeOf acc' `subtypeOf` removeShapeAnnotations rtp') $
bad $ TypeError pos "Stream with accumulator-type missmatch."
Nothing -> return ()
-- check outerdim of Lambda's streamed-in array params are NOT specified,
-- and that return type inner dimens are all specified but not as other
-- lambda parameters!
(chunk,lam_arr_tp)<- case macctup of
Just _ -> case lam_ps' of
[ch,_,arrpar] -> return (paramName ch,
paramType arrpar)
_ -> bad $ TypeError pos "Stream's lambda should have three args."
Nothing-> case lam_ps' of
[ch, arrpar] -> return (paramName ch,
paramType arrpar)
_ -> bad $ TypeError pos "Stream's lambda should have three args."
let outer_dims = arrayDims lam_arr_tp
_ <- case head outer_dims of
AnyDim -> return ()
NamedDim _ -> return ()
ConstDim _ -> bad $ TypeError pos ("Stream: outer dimension of stream should NOT"++
" be specified since it is "++pretty chunk++"by default.")
_ <- case lam_rtp of
UserTuple res_tps _ -> do
let res_arr_tps = tail res_tps
if all isArrayType' res_arr_tps
then do let lam_params = HS.fromList $ map paramName lam_ps'
arr_iner_dims = concatMap (tail . arrayDims') res_arr_tps
boundDim (NamedDim name) = return $ Just name
boundDim (ConstDim _ ) = return Nothing
boundDim _ =
bad $ TypeError pos $ "Stream's lambda: inner dimensions of the"++
" streamed-result arrays MUST be specified!"
rtp_iner_syms <- catMaybes <$> mapM boundDim arr_iner_dims
case find (`HS.member` lam_params) rtp_iner_syms of
Just name -> bad $ TypeError pos $
"Stream's lambda: " ++ pretty (baseName name) ++
" cannot specify a variant inner result shape"
_ -> return ()
else bad $ TypeError pos "Stream with result arrays of non-array type."
_ -> return ()-- means that no array is streamed out!
-- finally return type-checked stream!
return $ Stream form' lam' arr' pos
checkExp (Stream _ _ _ pos) =
bad $ TypeError pos "Stream with lambda NOT an anonymous function!!!!"
checkExp (Split i splitexps arrexp loc) = do
splitexps' <- mapM (require [Prim $ Signed Int32] <=< checkExp) splitexps
arrexp' <- checkExp arrexp
let t = typeOf arrexp'
when (arrayRank t <= i) $
bad $ TypeError loc $ "Cannot split array " ++ pretty arrexp'
++ " of type " ++ pretty t
++ " across dimension " ++ pretty i ++ "."
return $ Split i splitexps' arrexp' loc
checkExp (Concat i arr1exp arr2exps loc) = do
arr1exp' <- checkExp arr1exp
arr2exps' <- mapM (require [typeOf arr1exp'] <=< checkExp) arr2exps
mapM_ ofProperRank arr2exps'
return $ Concat i arr1exp' arr2exps' loc
where ofProperRank e
| arrayRank t <= i =
bad $ TypeError loc $ "Cannot concat array " ++ pretty e
++ " of type " ++ pretty t
++ " across dimension " ++ pretty i ++ "."
| otherwise = return ()
where t = typeOf e
checkExp (Copy e pos) = do
e' <- checkExp e
return $ Copy e' pos
checkExp (DoLoop mergepat mergeexp form loopbody letbody loc) = do
-- First we do a basic check of the loop body to figure out which of
-- the merge parameters are being consumed. For this, we first need
-- to check the merge pattern, which requires the (initial) merge
-- expression.
((mergeexp', bindExtra), mergeflow) <-
collectOccurences $ do
mergeexp' <- checkExp mergeexp
return $
case form of
For _ _ (Ident loopvar _ _) _ ->
let iparam = Ident loopvar (Info $ Prim $ Signed Int32) loc
in (mergeexp', [iparam])
While _ ->
(mergeexp', [])
-- Check the loop body.
(firstscope, mergepat') <- checkBinding mergepat (typeOf mergeexp') mempty
((form', loopbody'), bodyflow) <-
noUnique $ firstscope $ binding bindExtra $ collectOccurences $
case form of
For dir lboundexp (Ident loopvar _ loopvarloc) uboundexp -> do
lboundexp' <- require [Prim $ Signed Int32] =<< checkExp lboundexp
uboundexp' <- require [Prim $ Signed Int32] =<< checkExp uboundexp
loopbody' <- checkExp loopbody
return (For dir lboundexp' (Ident loopvar (Info $ Prim $ Signed Int32) loopvarloc) uboundexp',
loopbody')
While condexp -> do
(condexp', condflow) <-
collectOccurences $ require [Prim Bool] =<< checkExp condexp
(loopbody', bodyflow) <-
collectOccurences $ checkExp loopbody
occur $ condflow `seqOccurences` bodyflow
return (While condexp',
loopbody')
let consumed_merge = patNameSet mergepat' `HS.intersection`
allConsumed bodyflow
uniquePat (Wildcard (Info t) wloc) =
Wildcard (Info $ t `setUniqueness` Nonunique) wloc
uniquePat (Id (Ident name (Info t) iloc))
| name `HS.member` consumed_merge =
Id $ Ident name (Info $ t `setUniqueness` Unique `setAliases` mempty) iloc
| otherwise =
let t' = case t of Tuple{} -> t
_ -> t `setUniqueness` Nonunique
in Id $ Ident name (Info t') iloc
uniquePat (TuplePattern pats ploc) =
TuplePattern (map uniquePat pats) ploc
-- Make the pattern unique where needed.
mergepat'' = uniquePat mergepat'
-- Now check that the loop returned the right type.
unless (typeOf loopbody' `subtypeOf` patternType mergepat'') $
bad $ UnexpectedType (srclocOf loopbody')
(toStructural $ typeOf loopbody')
[toStructural $ patternType mergepat'']
-- Check that the new values of consumed merge parameters do not
-- alias something bound outside the loop, AND that anything
-- returned for a unique merge parameter does not alias anything
-- else returned.
bound_outside <- asks $ HS.fromList . HM.keys . envVtable
let checkMergeReturn (Id ident) t
| unique $ unInfo $ identType ident,
v:_ <- HS.toList $ aliases t `HS.intersection` bound_outside =
lift $ bad $ TypeError loc $ "Loop return value corresponding to merge parameter " ++
pretty (identName ident) ++ " aliases " ++ pretty v ++ "."
| otherwise = do
(cons,obs) <- get
unless (HS.null $ aliases t `HS.intersection` cons) $
lift $ bad $ TypeError loc $ "Loop return value for merge parameter " ++
pretty (identName ident) ++ " aliases other consumed merge parameter."
when (unique (unInfo $ identType ident) &&
not (HS.null (aliases t `HS.intersection` (cons<>obs)))) $
lift $ bad $ TypeError loc $ "Loop return value for consuming merge parameter " ++
pretty (identName ident) ++ " aliases previously returned value." ++ show (aliases t, cons, obs)
if unique (unInfo $ identType ident)
then put (cons<>aliases t, obs)
else put (cons, obs<>aliases t)
checkMergeReturn (TuplePattern pats _) (Tuple ts) =
zipWithM_ checkMergeReturn pats ts
checkMergeReturn _ _ =
return ()
evalStateT (checkMergeReturn mergepat'' $ typeOf loopbody') (mempty, mempty)
let consumeMerge (Id (Ident _ (Info pt) ploc)) mt
| unique pt = consume ploc $ aliases mt
consumeMerge (TuplePattern pats _) (Tuple ts) =
zipWithM_ consumeMerge pats ts
consumeMerge _ _ =
return ()
((), merge_consume) <-
collectOccurences $ consumeMerge mergepat'' $ typeOf mergeexp'
occur $ mergeflow `seqOccurences` merge_consume
binding (patIdents mergepat'') $ do
letbody' <- checkExp letbody
return $ DoLoop mergepat'' mergeexp'
form'
loopbody' letbody' loc
checkExp (Write is vs as pos) = do
is' <- checkExp is
vs' <- checkExp vs
(as', aflows) <- unzip <$> mapM (collectOccurences . checkExp) as
checkWriteIndexes $ typeOf is'
let ats = map typeOf as'
let avbad = bad $ TypeError pos "Write value arrays and I/O arrays do not have the same type"
case as' of
[a] -> void $ unifyExpTypes vs' a
_ -> case typeOf vs' of
Array (TupleArray primElems (Rank rankP) _) ->
forM_ (zip ats primElems) $ \(at, p) -> case (at, p) of
(Array (PrimArray ptA (Rank rankA) _ _),
PrimArrayElem ptP _ _) ->
unless (rankP == rankA && ptP == ptA) avbad
_ -> avbad
Tuple primElems ->
forM_ (zip ats primElems) $ \(at, p) -> case (at, p) of
(Array (PrimArray ptA (Rank rankA) _ _),
Array (PrimArray ptP (Rank rankP) _ _)) ->
unless (rankP == rankA && ptP == ptA) avbad
_ -> avbad
_ -> avbad
if all unique ats
then forM_ (zip aflows ats) $ \(aflow, at) ->
occur $ aflow `seqOccurences` [consumption (aliases at) pos]
else bad $ TypeError pos $ "Write sources '" ++
intercalate ", " (map pretty as') ++
"' have types " ++ intercalate ", " (map pretty ats) ++
", which are not all unique."
return (Write is' vs' as' pos)
-- FIXME: This code is a bit messy.
where checkWriteIndexes it = case it of
Array (PrimArray (Signed Int32) (Rank 1) _uniqueness _annotations) ->
return ()
Array (TupleArray exps (Rank 1) _uniqueness) ->
forM_ exps $ \e -> case e of
PrimArrayElem (Signed Int32) _ _ ->
return ()
_ -> widxbad
Tuple exps ->
forM_ exps $ \e -> case e of
Array (PrimArray (Signed Int32) (Rank 1) _ _) ->
return ()
_ -> widxbad
_ -> widxbad
widxbad = bad $ TypeError pos "the indexes array of write must consist only of signed 32-bit ints"
checkSOACArrayArg :: ExpBase NoInfo VName
-> TypeM (Exp, Arg)
checkSOACArrayArg e = do
(e', (t, dflow, argloc)) <- checkArg e
case peelArray 1 t of
Nothing -> bad $ TypeError argloc "SOAC argument is not an array"
Just rt -> return (e', (rt, dflow, argloc))
checkLiteral :: SrcLoc -> Value -> TypeM Value
checkLiteral _ (PrimValue bv) = return $ PrimValue bv
checkLiteral loc (TupValue vals) = do
vals' <- mapM (checkLiteral loc) vals
return $ TupValue vals'
checkLiteral loc (ArrayValue arr rt) = do
vals <- mapM (checkLiteral loc) (elems arr)
case find ((/=rt) . removeNames . valueType) vals of
Just wrong -> bad $ TypeError loc $ pretty wrong ++ " is not of expected type " ++ pretty rt ++ "."
_ -> return ()
return $ ArrayValue (listArray (bounds arr) vals) rt
checkIdent :: IdentBase NoInfo VName -> TypeM Ident
checkIdent (Ident name _ pos) = do
vt <- lookupVar name pos
return $ Ident name (Info vt) pos
checkParam :: ParamBase NoInfo VName
-> TypeM (ParamBase Info VName)
checkParam (Param name decl loc) =
Param name <$> checkTypeDecl decl <*> pure loc
checkBinOp :: BinOp -> ExpBase NoInfo VName -> ExpBase NoInfo VName -> SrcLoc
-> TypeM Exp
checkBinOp Plus e1 e2 pos = checkPolyBinOp Plus anyNumberType e1 e2 pos
checkBinOp Minus e1 e2 pos = checkPolyBinOp Minus anyNumberType e1 e2 pos
checkBinOp Pow e1 e2 pos = checkPolyBinOp Pow anyNumberType e1 e2 pos
checkBinOp Times e1 e2 pos = checkPolyBinOp Times anyNumberType e1 e2 pos
checkBinOp Divide e1 e2 pos = checkPolyBinOp Divide anyNumberType e1 e2 pos
checkBinOp Mod e1 e2 pos = checkPolyBinOp Mod anyIntType e1 e2 pos
checkBinOp Quot e1 e2 pos = checkPolyBinOp Quot anyIntType e1 e2 pos
checkBinOp Rem e1 e2 pos = checkPolyBinOp Rem anyIntType e1 e2 pos
checkBinOp ShiftR e1 e2 pos = checkPolyBinOp ShiftR anyIntType e1 e2 pos
checkBinOp ZShiftR e1 e2 pos = checkPolyBinOp ZShiftR anyIntType e1 e2 pos
checkBinOp ShiftL e1 e2 pos = checkPolyBinOp ShiftL anyIntType e1 e2 pos
checkBinOp Band e1 e2 pos = checkPolyBinOp Band anyIntType e1 e2 pos
checkBinOp Xor e1 e2 pos = checkPolyBinOp Xor anyIntType e1 e2 pos
checkBinOp Bor e1 e2 pos = checkPolyBinOp Bor anyIntType e1 e2 pos
checkBinOp LogAnd e1 e2 pos = checkPolyBinOp LogAnd [Prim Bool] e1 e2 pos
checkBinOp LogOr e1 e2 pos = checkPolyBinOp LogOr [Prim Bool] e1 e2 pos
checkBinOp Equal e1 e2 pos = checkRelOp Equal anyNumberType e1 e2 pos
checkBinOp NotEqual e1 e2 pos = checkRelOp NotEqual anyNumberType e1 e2 pos
checkBinOp Less e1 e2 pos = checkRelOp Less anyNumberType e1 e2 pos
checkBinOp Leq e1 e2 pos = checkRelOp Leq anyNumberType e1 e2 pos
checkBinOp Greater e1 e2 pos = checkRelOp Greater anyNumberType e1 e2 pos
checkBinOp Geq e1 e2 pos = checkRelOp Geq anyNumberType e1 e2 pos
checkRelOp :: BinOp -> [Type]
-> ExpBase NoInfo VName -> ExpBase NoInfo VName -> SrcLoc
-> TypeM Exp
checkRelOp op tl e1 e2 pos = do
e1' <- require tl =<< checkExp e1
e2' <- require tl =<< checkExp e2
_ <- unifyExpTypes e1' e2'
return $ BinOp op e1' e2' (Info $ Prim Bool) pos
checkPolyBinOp :: BinOp -> [Type]
-> ExpBase NoInfo VName -> ExpBase NoInfo VName -> SrcLoc
-> TypeM Exp
checkPolyBinOp op tl e1 e2 pos = do
e1' <- require tl =<< checkExp e1
e2' <- require tl =<< checkExp e2
t' <- unifyExpTypes e1' e2'
return $ BinOp op e1' e2' (Info t') pos
sequentially :: TypeM a -> (a -> Occurences -> TypeM b) -> TypeM b
sequentially m1 m2 = do
(a, m1flow) <- collectOccurences m1
(b, m2flow) <- collectOccurences $ m2 a m1flow
occur $ m1flow `seqOccurences` m2flow
return b
checkBinding :: PatternBase NoInfo VName -> Type -> Occurences
-> TypeM (TypeM a -> TypeM a, Pattern)
checkBinding pat et dflow = do
(pat', idds) <-
runStateT (checkBinding' pat et) []
return (\m -> sequentially (tell dflow) (const . const $ binding idds m), pat')
where checkBinding' (Id (Ident name NoInfo pos)) t = do
let t' = typeOf $ Var $ Ident name (Info t) pos
add $ Ident name (Info t') pos
return $ Id $ Ident name (Info t') pos
checkBinding' (TuplePattern pats pos) (Tuple ts)
| length pats == length ts = do
pats' <- zipWithM checkBinding' pats ts
return $ TuplePattern pats' pos
checkBinding' (Wildcard NoInfo loc) t =
return $ Wildcard (Info t) loc
checkBinding' _ _ =
lift $ bad $ InvalidPatternError
(untagPattern errpat) (toStructural et)
Nothing $ srclocOf pat
add ident = do
bnd <- gets $ find (==ident)
case bnd of
Nothing -> modify (ident:)
Just (Ident name _ pos2) ->
lift $ bad $ DupPatternError (baseName name) (srclocOf ident) pos2
-- A pattern with known type box (NoInfo) for error messages.
errpat = rmTypes pat
rmTypes (Id (Ident name _ pos)) = Id $ Ident name NoInfo pos
rmTypes (TuplePattern pats pos) = TuplePattern (map rmTypes pats) pos
rmTypes (Wildcard _ loc) = Wildcard NoInfo loc
validApply :: [StructTypeBase VName] -> [Type] -> Bool
validApply expected got =
length got == length expected &&
and (zipWith subtypeOf (map toStructural got) (map toStructural expected))
type Arg = (Type, Occurences, SrcLoc)
argType :: Arg -> Type
argType (t, _, _) = t
checkArg :: ExpBase NoInfo VName -> TypeM (Exp, Arg)
checkArg arg = do
(arg', dflow) <- collectOccurences $ checkExp arg
return (arg', (typeOf arg', dflow, srclocOf arg'))
checkFuncall :: Maybe QualName -> SrcLoc
-> [StructType] -> StructType -> [Arg]
-> TypeM ()
checkFuncall fname loc paramtypes _ args = do
let argts = map argType args
unless (validApply paramtypes argts) $
bad $ ParameterMismatch fname loc
(Right $ map toStructural paramtypes) (map toStructural argts)
forM_ (zip (map diet paramtypes) args) $ \(d, (t, dflow, argloc)) -> do
maybeCheckOccurences dflow
let occurs = consumeArg argloc t d
occur $ dflow `seqOccurences` occurs
consumeArg :: SrcLoc -> Type -> Diet -> [Occurence]
consumeArg loc (Tuple ets) (TupleDiet ds) =
concat $ zipWith (consumeArg loc) ets ds
consumeArg loc at Consume = [consumption (aliases at) loc]
consumeArg loc at _ = [observation (aliases at) loc]
checkLambda :: LambdaBase NoInfo VName -> [Arg]
-> TypeM Lambda
checkLambda (AnonymFun params body ret pos) args = do
params' <- mapM checkParam params
case () of
_ | length params == length args -> do
FunDef _ _ ret' params'' body' _ <-
noUnique $ checkFun (FunDef False (nameFromString "<anonymous>", blankLongname) ret params body pos)
checkFuncall Nothing pos (map paramType params') (unInfo $ expandedType ret') args
return $ AnonymFun params'' body' ret' pos
| [(Tuple ets, _, _)] <- args,
validApply (map paramType params') ets -> do
-- The function expects N parameters, but the argument is a
-- single N-tuple whose types match the parameters.
-- Generate a shim to make it fit.
FunDef _ _ ret' _ body' _ <-
noUnique $ checkFun (FunDef False (nameFromString "<anonymous>", blankLongname ) ret params body pos)
tupident <- newIdent "tup_shim"
(Tuple $ map (fromStruct .
removeShapeAnnotations .
paramType) params')
pos
let untype ident = ident { identType = NoInfo }
paramtype = UserTuple (map paramDeclaredType params) pos
tupparam = Param (identName tupident) (TypeDecl paramtype NoInfo) $
srclocOf tupident
tupfun = AnonymFun [tupparam] tuplet ret pos
tuplet = LetPat (TuplePattern (map (Id . untype . fromParam) params') pos)
(Var $ untype tupident) body pos
_ <- checkLambda tupfun args
return $ AnonymFun params' body' ret' pos
| otherwise -> bad $ TypeError pos $ "Anonymous function defined with " ++ show (length params') ++ " parameters, but expected to take " ++ show (length args) ++ " arguments."
checkLambda (CurryFun fname curryargexps _ pos) args = do
(curryargexps', curryargs) <- unzip <$> mapM checkArg curryargexps
bnd <- asks (funFromScope fname)
case bnd of
Nothing -> bad $ UnknownFunctionError fname pos
Just (longname, rt, paramtypes) -> do
let rettype' = fromStruct $ removeShapeAnnotations rt
paramtypes' = map (fromStruct . removeShapeAnnotations) paramtypes
case () of
_ | [(Tuple ets, _, _)] <- args,
validApply paramtypes ets -> do
-- Same shimming as in the case for anonymous functions.
let mkparam i t = newIdent ("param_" ++ show i) t pos
params <- zipWithM mkparam [(0::Int)..] paramtypes'
paramname <- newIDFromString "x"
let paramtype = Tuple paramtypes
paramtype' = contractTypeBase paramtype
tupparam = Param paramname (TypeDecl paramtype' NoInfo) pos
tuplet = LetPat (TuplePattern (map (Id . untype) params) pos)
(Var $ Ident paramname NoInfo pos) body pos
tupfun = AnonymFun [tupparam] tuplet
(TypeDecl (contractTypeBase rt) NoInfo) pos
body = Apply fname [(Var $ untype param, diet paramt) |
(param, paramt) <- zip params paramtypes']
NoInfo pos
void $ checkLambda tupfun args
return $ CurryFun longname curryargexps' (Info rettype') pos
| otherwise -> do
case find (unique . snd) $ zip curryargexps paramtypes of
Just (e, _) -> bad $ CurriedConsumption fname $ srclocOf e
_ -> return ()
let mkparam i t = newParam ("param_" ++ show i) t pos
asIdent p = Ident (paramName p) NoInfo $ srclocOf p
params <- zipWithM mkparam [(0::Int)..] $
drop (length curryargs) paramtypes'
let fun = AnonymFun params body
(TypeDecl (contractTypeBase rt) NoInfo) pos
body = Apply fname (zip (curryargexps++map (Var . asIdent) params) $
map diet paramtypes)
NoInfo pos
void $ checkLambda fun args
return $ CurryFun longname curryargexps' (Info rettype') pos
where untype ident = ident { identType = NoInfo }
checkLambda (UnOpFun unop NoInfo NoInfo loc) [arg] = do
var <- newIdent "x" (argType arg) loc
binding [var] $ do
e <- checkExp (UnOp unop (Var var { identType = NoInfo }) loc)
return $ UnOpFun unop (Info (argType arg)) (Info (typeOf e)) loc
checkLambda (UnOpFun unop NoInfo NoInfo loc) args =
bad $ ParameterMismatch (Just $ nameToQualName $ nameFromString $ pretty unop) loc (Left 1) $
map (toStructural . argType) args
checkLambda (BinOpFun op NoInfo NoInfo NoInfo loc) args =
checkPolyLambdaOp op [] args loc
checkLambda (CurryBinOpLeft binop x _ _ loc) [arg] =
checkCurryBinOp CurryBinOpLeft binop x loc arg
checkLambda (CurryBinOpLeft binop _ _ _ loc) args =
bad $ ParameterMismatch (Just $ nameToQualName $ nameFromString $ pretty binop) loc (Left 1) $
map (toStructural . argType) args
checkLambda (CurryBinOpRight binop x _ _ loc) [arg] =
checkCurryBinOp CurryBinOpRight binop x loc arg
checkLambda (CurryBinOpRight binop _ _ _ loc) args =
bad $ ParameterMismatch (Just $ nameToQualName $ nameFromString $ pretty binop) loc (Left 1) $
map (toStructural . argType) args
checkCurryBinOp :: (BinOp -> Exp -> Info Type -> Info (CompTypeBase VName) -> SrcLoc -> b)
-> BinOp -> ExpBase NoInfo VName -> SrcLoc -> Arg -> TypeM b
checkCurryBinOp f binop x loc arg = do
x' <- checkExp x
y <- newIdent "y" (argType arg) loc
xvar <- newIdent "x" (typeOf x') loc
binding [y, xvar] $ do
e <- checkExp (BinOp binop (Var $ untype y) (Var $ untype xvar) NoInfo loc)
return $ f binop x' (Info $ argType arg) (Info $ typeOf e) loc
where untype (Ident name _ varloc) = Ident name NoInfo varloc
checkPolyLambdaOp :: BinOp -> [ExpBase NoInfo VName] -> [Arg]
-> SrcLoc -> TypeM Lambda
checkPolyLambdaOp op curryargexps args pos = do
curryargexpts <- map typeOf <$> mapM checkExp curryargexps
let argts = [ argt | (argt, _, _) <- args ]
tp <- case curryargexpts ++ argts of
[t1, t2] | t1 == t2 -> return t1
[Tuple [t1,t2]] | t1 == t2 -> return t1 -- For autoshimming.
l -> bad $ ParameterMismatch (Just fname) pos (Left 2) $ map toStructural l
xname <- newIDFromString "x"
yname <- newIDFromString "y"
let xident t = Ident xname t pos
yident t = Ident yname t pos
(x,y,params) <- case curryargexps of
[] -> return (Var $ xident NoInfo,
Var $ yident NoInfo,
[xident $ Info tp, yident $ Info tp])
[e] -> return (e,
Var $ yident NoInfo,
[yident $ Info tp])
(e1:e2:_) -> return (e1, e2, [])
rettype <- typeOf <$> binding params (checkBinOp op x y pos)
return $ BinOpFun op (Info tp) (Info tp) (Info rettype) pos
where fname = nameToQualName $ nameFromString $ pretty op
checkRetType :: SrcLoc -> StructType -> TypeM ()
checkRetType loc (Tuple ts) = mapM_ (checkRetType loc) ts
checkRetType _ (Prim _) = return ()
checkRetType loc (Array at) =
checkArrayType loc at
checkArrayType :: SrcLoc
-> DeclArrayTypeBase VName
-> TypeM ()
checkArrayType loc (PrimArray _ ds _ _) =
mapM_ (checkDim loc) $ shapeDims ds
checkArrayType loc (TupleArray cts ds _) = do
mapM_ (checkDim loc) $ shapeDims ds
mapM_ (checkTupleArrayElem loc) cts
checkTupleArrayElem :: SrcLoc
-> DeclTupleArrayElemTypeBase VName
-> TypeM ()
checkTupleArrayElem _ PrimArrayElem{} =
return ()
checkTupleArrayElem loc (ArrayArrayElem at) =
checkArrayType loc at
checkTupleArrayElem loc (TupleArrayElem cts) =
mapM_ (checkTupleArrayElem loc) cts
checkDim :: SrcLoc -> DimDecl VName -> TypeM ()
checkDim _ AnyDim =
return ()
checkDim _ (ConstDim _) =
return ()
checkDim loc (NamedDim name) = do
t <- lookupVar name loc
case t of
Prim (Signed Int32) -> return ()
_ -> bad $ DimensionNotInteger loc $ baseName name
patternType :: Pattern -> Type
patternType (Wildcard (Info t) _) = t
patternType (Id ident) = unInfo $ identType ident
patternType (TuplePattern pats _) = Tuple $ map patternType pats
expandType :: (Applicative m, MonadError TypeError m) =>
(QualName -> SrcLoc -> m (StructTypeBase VName))
-> UserType VName
-> m (StructTypeBase VName)
expandType look (UserTypeAlias longname loc) =
look longname loc
expandType _ (UserPrim prim _) =
return $ Prim prim
expandType look (UserTuple ts _) =
Tuple <$> mapM (expandType look) ts
expandType look (UserArray t d _) = do
t' <- expandType look t
return $ arrayOf t' (ShapeDecl [d]) Nonunique
expandType look (UserUnique t loc) = do
t' <- expandType look t
case t' of
Array{} -> return $ t' `setUniqueness` Unique
_ -> throwError $ InvalidUniqueness loc $ toStructural t'
checkTypeDecl :: TypeDeclBase NoInfo VName -> TypeM (TypeDeclBase Info VName)
checkTypeDecl (TypeDecl t NoInfo) =
TypeDecl t . Info <$> expandType look t
where look longname loc = do
types <- asks (typeFromScope longname)
case types of
Nothing -> throwError $ UndefinedQualName loc longname
Just namet -> return namet
-- Creating the initial type alias table is done by maintaining a
-- table of the type aliases we have processed (initialised to empty),
-- then initialising every type alias. This ensures we do not waste
-- time processing any alias more than once. The monadic structure is
-- a Reader and a State on top of an Either.
type TypeAliasTableM =
ReaderT (HS.HashSet QualName) (StateT Scope TypeM)
typeAliasTableFromProg :: [TypeDefBase NoInfo VName]
-> Scope
-> TypeM Scope
typeAliasTableFromProg defs scope = do
checkForDuplicateTypes defs
execStateT (runReaderT (mapM_ process defs) mempty) scope
where
findDefByName name = find ((==name) . typeAlias) defs
process :: TypeDefBase NoInfo VName
-> TypeAliasTableM (StructTypeBase VName)
process (TypeDef name (TypeDecl ut NoInfo) _) = do
t <- expandType typeOfName ut
modify $ addType name t
return t
typeOfName :: QualName -> SrcLoc
-> TypeAliasTableM (StructTypeBase VName)
typeOfName (prefixes, name) loc = do
inside <- ask
known <- get
case typeFromScope (prefixes, name) known of
Just t -> return t
Nothing
| (prefixes, name) `HS.member` inside ->
throwError $ CyclicalTypeDefinition loc name
| Just def <- findDefByName name ->
local (HS.insert (prefixes, name)) $ process def
| otherwise ->
throwError $ UndefinedAlias loc name
addBreadcrumb :: Scope -> Name -> Scope
addBreadcrumb scope name =
let (names, _) = envBreadcrumb scope
in scope {envBreadcrumb = (names ++ [name], name)}
addModule :: Scope -> Scope -> Scope
addModule modd scope =
let moddName = moduleName modd
in scope {envModTable = HM.insert moddName modd $ envModTable scope}
moduleName :: Scope -> Name
moduleName modd =
let (_, name) = envBreadcrumb modd
in name
envFromScope :: [Name] -> Scope -> Maybe Scope
envFromScope (x:xs) scope = case HM.lookup x $ envModTable scope
of
Just scope' -> envFromScope xs scope'
Nothing -> Nothing
envFromScope [] scope = Just scope
typeFromScope :: QualName -> Scope -> Maybe TypeBinding
typeFromScope (prefixes, name) scope = do
scope' <- envFromScope prefixes scope
let taTable = envTAtable scope'
HM.lookup name taTable
addType :: Name -> StructTypeBase VName -> Scope -> Scope
addType name tp scope =
scope {envTAtable = HM.insert name tp $ envTAtable scope}
funFromScope :: QualName -> Scope -> Maybe FunBinding
funFromScope (prefixes, name) scope = do
scope' <- envFromScope prefixes scope
let taTable = envFtable scope'
HM.lookup name taTable
checkForDuplicateTypes :: [TypeDefBase NoInfo VName] -> TypeM ()
checkForDuplicateTypes = foldM_ check mempty
where check seen def
| name `HS.member` seen =
throwError $ DupTypeAlias (srclocOf def) name
| otherwise =
return $ name `HS.insert` seen
where name = typeAlias def
flattenProgFunctions :: (Prog, a) -> (Prog, a)
flattenProgFunctions (prog, a) = let
topLongname = ([], nameFromString "")
bottomFuns = map (giveLongname topLongname) $ mapMaybe isFun $ progDecs prog
moduleFuns = concatMap (flattenModule topLongname) $ mapMaybe isMod $ progDecs prog
funs = map (FunOrTypeDec . FunDec) (bottomFuns ++ moduleFuns)
in (Prog funs , a)
flattenModule :: QualName -> ModDefBase f vn -> [FunDefBase f vn]
flattenModule longName modd =
let appendedName = appendNameToQualName (modName modd) longName
in flattenModule' appendedName modd
flattenModule' :: QualName -> ModDefBase f vn -> [FunDefBase f vn]
flattenModule' longname moddefbase = let
functions = mapMaybe isFun $ modDecls moddefbase
modules = mapMaybe isMod $ modDecls moddefbase
functions' = map (giveLongname longname) functions
modulefunctions = concatMap (flattenModule longname) modules
in functions' ++ modulefunctions
appendNameToQualName :: Name -> QualName -> QualName
appendNameToQualName name (prefixes, a) = (prefixes ++ [name] , a)
giveLongname :: QualName -> FunDefBase f vn -> FunDefBase f vn
giveLongname (prefixes, _) fundef =
let (funname, realizedName) = funDefName fundef in
fundef { funDefName = (longnameToName (prefixes, funname), realizedName) }
|
mrakgr/futhark
|
src/Language/Futhark/TypeChecker.hs
|
bsd-3-clause
| 74,352
| 0
| 29
| 20,348
| 23,422
| 11,493
| 11,929
| 1,418
| 47
|
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Face
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling Face.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Face (
Face(..), marshalFace, unmarshalFace
) where
import Graphics.Rendering.OpenGL.GL.BasicTypes ( GLenum )
--------------------------------------------------------------------------------
data Face =
Front
| Back
| FrontAndBack
deriving ( Eq, Ord, Show )
marshalFace :: Face -> GLenum
marshalFace x = case x of
Front -> 0x404
Back -> 0x405
FrontAndBack -> 0x408
unmarshalFace :: GLenum -> Face
unmarshalFace x
| x == 0x404 = Front
| x == 0x405 = Back
| x == 0x408 = FrontAndBack
| otherwise = error ("unmarshalFace: illegal value " ++ show x)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/OpenGL/Graphics/Rendering/OpenGL/GL/Face.hs
|
bsd-3-clause
| 1,133
| 0
| 9
| 199
| 194
| 112
| 82
| 19
| 3
|
module Logic.LG where
import qualified Logic.Link as Link
import Logic.Formula
import Logic.ProofStructure
-- Formula language of Lambek Grishin calculus (LG); see Moortgat & Moot 2012, p. 1-2
data Formula a = Atom a | -- NP, S, etc.
(:*:) (Formula a) (Formula a) | -- ⊗ (Prod)
(:\\) (Formula a) (Formula a) | -- \ (LDiv)
(://) (Formula a) (Formula a) | -- / (RDiv)
(:+:) (Formula a) (Formula a) | -- ⊕ (Sum)
(:-\) (Formula a) (Formula a) | -- ⃠ (LSub)
(:-/) (Formula a) (Formula a) -- ⊘ (RSub)
deriving (Eq, Ord)
instance (Show a) => Show (Formula a) where
show (Atom a) = show a
show (a :// b) = "("++(show a) ++ " /" ++ (show b) ++ ")" -- /
show (a :*: b) = "("++(show a) ++ " [x] " ++ (show b) ++ ")" -- ⊗
show (a :\\ b) = "("++(show a) ++ "\\ " ++ (show b) ++ ")" -- \
show (a :-/ b) = "("++(show a) ++ " [/]" ++ (show b) ++ ")" -- ⊘
show (a :+: b) = "("++(show a) ++ " [+] " ++ (show b) ++ ")" -- ⊕
show (a :-\ b) = "("++(show a) ++ "[\\] " ++ (show b) ++ ")" -- ⃠
instance (Eq a) => Formulae (Formula a) where
-- Rules for creating Links from complex formulae. See also M&M 2012, p6
--- Hypothesis
-- unfoldHypothesis :: (Eq a) => Formula a -> (Link.LinkType, [Formula a], [Formula a], Formula a)
---- Fusion connectives (Hypothesis)
unfoldHypothesis (a :// b) = (Link.Tensor, [a :// b, b], [a], (0,0)) -- L/
unfoldHypothesis (a :*: b) = (Link.CoTensor, [a :*: b], [a, b], (0,0)) -- L⊗
unfoldHypothesis (b :\\ a) = (Link.Tensor, [b :\\ a, b], [a], (0,0)) -- L\
---- Fission connectives (Hypothesis)
unfoldHypothesis (a :-/ b) = (Link.CoTensor, [a :-/ b, b], [a], (0,0)) -- L⊘
unfoldHypothesis (a :+: b) = (Link.Tensor, [a :+: b], [a, b], (0,0)) -- L⊕
unfoldHypothesis (b :-\ a) = (Link.CoTensor, [b, b :\\ a], [a], (0,1)) -- L⃠
-- We can't unfold atomic formulae
unfoldHypothesis (Atom a) = (Link.NoLink, [Atom a], [], (0,0))
---
--- Conclusion
-- unfoldConclusion :: (Eq a) => (Link.LinkType, Formula a, Formula a, Formula a)
---- Fusion connectives (Conclusion)
unfoldConclusion (a :// b) = (Link.CoTensor, [a], [a :// b, b], (1,0)) -- R/
unfoldConclusion (a :*: b) = (Link.Tensor, [a, b], [a :*: b], (1,0)) -- R⊗
unfoldConclusion (b :\\ a) = (Link.CoTensor, [a], [b, b :\\ a], (1,1)) -- R\
---- Fission connectives (Conclusion)
unfoldConclusion (a :-/ b) = (Link.Tensor, [a], [a :-/ b, b], (1,0)) -- R⊘
unfoldConclusion (a :+: b) = (Link.CoTensor, [a, b], [a :+: b], (1,0)) -- R⊕
unfoldConclusion (b :-\ a) = (Link.Tensor, [a], [b, b :-\ a], (1,1)) -- R⃠
-- We can't unfold atomic formulae
unfoldConclusion (Atom a) = (Link.NoLink, [], [Atom a], (1,0))
--- Unfold examples
--unfoldExamples = [
-- unfoldHypothesis ((Atom "A") :// (Atom "B")), -- L/
-- unfoldHypothesis ((Atom "A") :*: (Atom "B")), -- L⊗
-- unfoldHypothesis ((Atom "B") :\\ (Atom "A")), -- L\
-- unfoldHypothesis ((Atom "A") :-/ (Atom "B")), -- L⊘
-- unfoldHypothesis ((Atom "A") :+: (Atom "B")), -- L⊕
-- unfoldHypothesis ((Atom "B") :-\ (Atom "A")), -- L⃠
-- unfoldConclusion ((Atom "A") :// (Atom "B")), -- R/
-- unfoldConclusion ((Atom "A") :*: (Atom "B")), -- R⊗
-- unfoldConclusion ((Atom "B") :\\ (Atom "A")), -- R\
-- unfoldConclusion ((Atom "A") :-/ (Atom "B")), -- R⊘
-- unfoldConclusion ((Atom "A") :+: (Atom "B")), -- R⊕
-- unfoldConclusion ((Atom "B") :-\ (Atom "A"))] -- R⃠
--
|
digitalheir/net-prove
|
src/Logic/LG.hs
|
bsd-3-clause
| 3,466
| 9
| 31
| 747
| 1,216
| 713
| 503
| 35
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Exception.Base (displayException)
import qualified Data.Text as T
import Data.Time
import Data.Version (showVersion)
import Formatting
import Options.Applicative
import Paths_datenverbrauch (version)
import Protolude hiding ((%))
import Text.Printf
import Args
import Persist
import PublishTariff
import QueryTariff
import Types
main :: IO ()
main = execParser opts >>= run
where opts = info (helper <*> appArgs)
( fullDesc
<> progDesc (toS $ T.unlines
["run it to show the current usage, use '--pub-xxx' switch to publish the values."
,"use $ts$ for the current timestamp in ms epoch and $value$ for the current value in the url."
])
<> header "datenverbrauch - query internet access data usage")
run :: AppArgs -> IO ()
run ShowVersion = printf "version: %s\n" (showVersion version)
run (Run ac) = do
unless (acQuiet ac) printHeader
res <- runExceptT $ runReaderT queryTariff ac
runReaderT (evalRes res) ac
where printHeader = do
tz <- getCurrentTimeZone
ts <- utcToLocalTime tz <$> getCurrentTime
let date = formatTime defaultTimeLocale "%d.%m.%0Y %R" ts
printf "Startup - date: %s - version: %s\n" date (showVersion version)
-- | eval the run result
--
-- >>> -- all fine
-- >>> :set -XOverloadedStrings
-- >>> let t = Tariff (Balance 10) (Usage 500 230 270) 21
-- >>> let at = AvailableThreshold Nothing Nothing
-- >>> let bt = BalanceThreshold Nothing Nothing
-- >>> let cfg = AppConfig False (ProviderLogin "" "") Nothing [] at bt "http://provider.url.com"
-- >>> runReaderT (evalRes (Right t)) cfg
-- --------------------
-- Balance: 10.00 €
-- --------------------
-- Quota: 500 MB
-- Used: 230 MB
-- Available: 270 MB
-- --------------------
-- Days left: 21
-- --------------------
--
--
-- >>> -- available warning threshold
-- >>> let t = Tariff (Balance 10) (Usage 500 230 270) 21
-- >>> let at = AvailableThreshold (Just 280) Nothing
-- >>> let bt = BalanceThreshold Nothing Nothing
-- >>> let cfg = AppConfig False (ProviderLogin "" "") Nothing [] at bt "http://provider.url.com"
-- >>> runReaderT (evalRes (Right t)) cfg
-- --------------------
-- Balance: 10.00 €
-- --------------------
-- Quota: 500 MB
-- Used: 230 MB
-- Available: 270 MB
-- --------------------
-- Days left: 21
-- --------------------
-- available below warning threshold!
-- *** Exception: ExitFailure 1
evalRes :: Either AppError Tariff -> ReaderT AppConfig IO ()
-- handle successful result
evalRes (Right tariff@(Tariff balance usage daysLeft)) = do
beQuiet <- asks acQuiet
--
-- build / print the report
--
let sep = T.replicate 20 "-"
logger $ unlines' [
sep
, balanceReport balance
, sep
, usageReport usage
, sep
, sformat ("Days left: " % (left 8 ' ' %. int)) daysLeft
, sep
]
--
-- persist the values when a path is given
--
maybePersistPath <- asks acPersistPath
let persistAndLogResult path = do
res <- persist tariff path
case res of
(Right n) -> unless beQuiet . putText $ "values persisted in file: " <> T.pack n
(Left e) -> putStrLn $ "unable to persist values: " <> displayException e
liftIO $ mapM_ persistAndLogResult maybePersistPath
--
-- publish the values
--
-- * when no usage is available, publish zeros
-- and terminate with exit code 2
--
case usage of
UsageNotAvailable -> do
logger "publish zeros for usage"
publishTariff (tariff { tUsage = Usage 0 0 0 })
lift $ exitWith (ExitFailure 2)
_ -> publishTariff tariff
--
-- log warnings / terminate with failure exit code if values are below the threshold
--
availableBelowCritial <- isBelowCritical usage
when availableBelowCritial $ do
logger "available below critial threshold!"
lift $ exitWith (ExitFailure 2)
availableBelowWarning <- isBelowWarning usage
when availableBelowWarning $ do
logger "available below warning threshold!"
lift $ exitWith (ExitFailure 1)
balanceBelowWarning <- isBelowCritical balance
when balanceBelowWarning $ do
logger "balance below critial threshold!"
lift $ exitWith (ExitFailure 2)
balanceBelowNotification <- isBelowWarning balance
when balanceBelowNotification $ do
logger "balance below warning threshold!"
lift $ exitWith (ExitFailure 1)
-- handle errors
evalRes (Left e) =
lift $ do putText $ "ERROR: " <> show e
exitWith $ ExitFailure 2
-- | build the report for the balance
--
balanceReport :: Balance -> Text
balanceReport (Balance b) = sformat ("Balance:" % (left 9 ' ' %. fixed 2) % " €") b
balanceReport _ = "Balance not available\n"
-- | build the report for the usage
--
usageReport :: Usage -> Text
usageReport (Usage q u a) = sformat ("Quota: " % (left 5 ' ' %. int) % " MB" %
"\nUsed: " % (left 5 ' ' %. int) % " MB" %
"\nAvailable: " % (left 5 ' ' %. int) % " MB" ) q u a
usageReport _ = "Usage not available - quota exhausted?"
-- | log to stdout unless '--quite' flag was passed
--
logger :: Text -> ReaderT AppConfig IO ()
logger txt = do
quiet <- asks acQuiet
unless quiet . lift . putText $ txt
-- | like unlines from the prelude, but without the last newline
--
unlines' :: [Text] -> Text
unlines' = T.init . T.unlines
|
section77/datenverbrauch
|
src/Main.hs
|
bsd-3-clause
| 6,114
| 0
| 18
| 1,897
| 1,155
| 590
| 565
| 94
| 3
|
{- source: http://www.haskell.org/haskellwiki/Introduction#Quicksort_in_Haskell -}
module ArtGallery.Sort.QuickSort.NaiveOneLine (
qsort
) where
qsort (p:xs) = qsort [x | x<-xs, x<p] ++ [p] ++ qsort [x | x<-xs, x>=p]
|
adarqui/ArtGallery
|
Haskell/src/ArtGallery/Sort/QuickSort/NaiveOneLine.hs
|
bsd-3-clause
| 220
| 0
| 10
| 29
| 84
| 46
| 38
| 3
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE NoImplicitPrelude #-}
{- |
'Internal' is big so compiling it takes time;
thus we make experiments in 'Unstable'
and move them to 'Internal' after we are satisfied.
-}
module Unstable
(
-- * Sequencing by stream-building monad
Comonad(..)
, mkid
, unid
, Build
, buildl
, buildrl
, bsamno
, brate
, bcont
, blint
-- * Partial processing and resuming
, runBuild
, Bst
, Iso
, isorev
, bimapCon
, liftCont
, lowerCont
-- * Specifying duration
, bsam
, bsec
, btick
, bbeat
, bto
, btos
-- * Writing output
, blwrite
, blwrite_
, blwritemap
, blwritezip
-- * Sequencing
-- ** Newtype-continuation sequencing
, clmap
, clzip
, clpass
, crlpass
, crlpasss
, crlthrus
, crlzip
, crlzips
-- ** Lambda-continuation sequencing
, rlcid
, rlczip
)
where
import qualified Control.Monad.Trans.Cont as Mc
import qualified Control.Monad.Trans.State.Strict as Ms
import Control.Monad ((>=>))
import Control.Monad.Trans.Cont
import Data.Functor.Identity
import Sound.Class
import Sound.InfList
import Sound.Int
import Sound.Time
{-
newtype Cont r a = MkCont { _runCont :: (a -> r) -> r }
instance Functor (Cont r) where fmap f x = MkCont (\ c -> _runCont x (c . f))
instance Applicative (Cont r) where
pure x = MkCont (\ c -> c x)
(<*>) ff fx = MkCont (\ c -> _runCont ff (\ f -> _runCont fx (\ x -> c (f x))))
instance Monad (Cont r) where
return x = MkCont (\ c -> c x)
(>>=) m k = MkCont (\ c -> _runCont m (\ x -> _runCont (k x) c))
instance (Num a) => Num (Cont r a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
(-) = liftA2 (-)
negate = fmap negate
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
runCont :: Cont r a -> (a -> r) -> r
runCont (MkCont f) c = f c
{- |
@
runContId x = 'runCont' x id
@
-}
runContId :: Cont a a -> a
runContId x = runCont x id
-}
rlcid :: Int -> RL a -> (RL a -> RL a) -> RL a
rlcid n x c =
rated r (lcid n (unrated x) (unrated . c . rated r))
where
r = rateOf x
rlczip :: Int -> (a -> b -> c) -> RL a -> RL b -> (RL a -> RL b -> RL c) -> RL c
rlczip n f x y c =
rated r (lczip n f (unrated x) (unrated y) (\ a b -> unrated (c (rated r a) (rated r b))))
where
r = rateOf x `ror` rateOf y
crlzip :: Int -> (a -> b -> c) -> RL a -> RL b -> Cont (RL c) (RL a, RL b)
crlzip n f x y = cont (\ c -> rlczip n f x y (curry c))
crlzips :: T -> (a -> b -> c) -> RL a -> RL b -> Cont (RL c) (RL a, RL b)
crlzips t f x y = crlzip n f x y
where
r = rateOf x `ror` rateOf y
n = round (t * fromIntegral (_unRate r))
clmap :: Int -> (a -> b) -> L a -> Cont (L b) (L a)
clmap n f x = cont (lcmap n f x)
clzip :: Int -> (a -> b -> c) -> L a -> L b -> Cont (L c) (L a, L b)
-- clzip n f x y = cont (\ c -> lczip n f x y (\ xt yt -> c (xt, yt)))
clzip n f x y = cont (\ c -> lczip n f x y (curry c))
{- |
@
clpass n = 'clmap' n 'id'
@
-}
clpass :: Int -> L a -> Cont (L a) (L a)
clpass n x = cont (lcid n x)
crlpass :: Int -> RL a -> Cont (RL a) (RL a)
crlpass n x = cont (rlcid n x)
crlpasss :: T -> RL a -> Cont (RL a) (RL a)
crlpasss t x = crlpass n x
where
r = rateOf x
n = round (t * fromIntegral (_unRate r))
-- | This is 'crlpasss' but without three same letters in a row.
crlthrus :: T -> RL a -> Cont (RL a) (RL a)
crlthrus = crlpasss
mkid :: a -> Identity a
mkid = Identity
unid :: Identity a -> a
unid (Identity x) = x
class (Functor w) => Comonad w where
coreturn :: w a -> a
cojoin :: w a -> w (w a)
cojoin = cobind id
cobind :: (w a -> b) -> w a -> w b
cobind f = fmap f . cojoin
infixl 1 `cobind`
instance Comonad Identity where { coreturn = unid; cojoin = mkid; }
-- | Isomorphism.
data Iso a b = MkIso { _isoright :: a -> b, _isoleft :: b -> a }
isorev :: Iso a b -> Iso b a
isorev (MkIso f g) = MkIso g f
bimapCon :: Iso r s -> ((a -> r) -> r) -> ((a -> s) -> s)
bimapCon i f cs =
right_ (f (left_ . cs))
where
left_ = _isoleft i
right_ = _isoright i
liftCont :: (Monad m, Comonad m) => ((a -> r) -> r) -> ((a -> m r) -> m r)
liftCont = bimapCon iso
where
iso = MkIso return coreturn
lowerCont :: (Monad m, Comonad m) => ((a -> m r) -> m r) -> ((a -> r) -> r)
lowerCont = bimapCon iso
where
iso = MkIso coreturn return
newtype Build r a
= MkBuild { _runBuild :: Ms.StateT Bst (Mc.ContT r Identity) a }
data Bst
= MkBst
{
_bstrate :: !(Rate Int)
, _bstTickPerBeat :: !(Rate Int)
, _bstsamno :: !Int
, _bsttick :: !Int
, _bstticksam :: L (SampleNumber Int)
}
deriving (Show)
instance Functor (Build r) where fmap f = MkBuild . fmap f . _runBuild
instance Applicative (Build r) where { pure = MkBuild . pure; (<*>) ff fx = MkBuild (_runBuild ff <*> _runBuild fx); }
instance Monad (Build r) where { return = MkBuild . return; (>>=) m k = MkBuild (_runBuild m >>= _runBuild . k); }
instance (Num a) => Num (Build r a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
(-) = liftA2 (-)
negate = fmap negate
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Fractional a) => Fractional (Build r a) where
(/) = liftA2 (/)
recip = fmap recip
fromRational = pure . fromRational
bmodify_ :: (Bst -> Bst) -> Build r ()
bmodify_ f = MkBuild $ Ms.modify f
bstupdatetick_ :: Bst -> Bst
bstupdatetick_ s =
if nextTickSampleNumber > physamno
then s
else bstupdatetick_ s'
where
tick = _bsttick s
physamno = _bstsamno s
MkL _ (MkL nextTickSampleNumber ticksam') = _bstticksam s
s' = s
{
_bsttick = succ tick
, _bstticksam = ticksam'
}
badvsam_ :: Int -> Build r ()
badvsam_ n =
if n >= 0
then bmodify_ $ \ s -> bstupdatetick_ s { _bstsamno = _bstsamno s + n }
else error "badvsam_: trying to go back in time"
basks_ :: (Bst -> a) -> Build r a
basks_ f = MkBuild (Ms.gets f)
runBuild :: Build r a -> Bst -> ((a, Bst) -> Identity r) -> Identity r
runBuild x s c = Mc.runContT (Ms.runStateT (_runBuild x) s) c
buildI_ :: Bst -> Build a a -> a
buildI_ s x = unid (Mc.runContT (Ms.evalStateT (_runBuild x) s) return)
buildl :: SamplePerSecond (Rate Int) -> TickPerBeat (Rate Int) -> L (SampleNumber Int) -> Build (L a) (L a) -> L a
buildl r tpb logtophy x = buildI_ (MkBst r tpb 0 0 logtophy) x
buildrl :: SamplePerSecond (Rate Int) -> TickPerBeat (Rate Int) -> L (SampleNumber Int) -> Build (L a) (L a) -> RL a
buildrl r tpb ltp x = rated r $ buildl r tpb ltp x
-- | Get the current sample number.
bsamno :: Build r Int
bsamno = basks_ _bstsamno
-- | Get the sample rate.
brate :: Build r (Rate Int)
brate = basks_ _bstrate
-- | Lift a continuation.
bcont :: ((a -> r) -> r) -> Build r a
bcont x =
MkBuild . Ms.StateT $ \ s ->
Mc.ContT (\ c -> return (x (\ a -> coreturn (c (a, s)))))
{- |
@blwrite n x@ writes the first @n@ samples of @x@ to the output stream.
This returns @'ldrop' n x@.
-}
blwrite :: Build (L a) Int -> L a -> Build (L a) (L a)
blwrite bn x = bn >>= \ n -> badvsam_ n >> bcont (lcid n x)
-- | This is like 'blwrite' but discards the stream tail.
blwrite_ :: Build (L a) Int -> L a -> Build (L a) ()
blwrite_ bn x = blwrite bn x >> return ()
{- |
@blwritemap n f x@ writes the first @n@ samples of @'lmap' f x@ to the output stream.
This returns @'ldrop' n x@.
-}
blwritemap :: Build (L b) Int -> (a -> b) -> L a -> Build (L b) (L a)
blwritemap bn f x = bn >>= \ n -> badvsam_ n >> bcont (lcmap n f x)
{- |
@blwritezip n f x y@ writes the first @n@ samples of @'lzip' f x y@ to the output stream.
This returns @('ldrop' n x, 'ldrop' n y)@.
-}
blwritezip :: Build (L c) Int -> (a -> b -> c) -> L a -> L b -> Build (L c) (L a, L b)
blwritezip bn f x y = bn >>= \ n -> badvsam_ n >> bcont (lczip n f x y . curry)
blint :: (Fractional a) => L a -> Build r (L a)
blint x = fmap (\ r -> sint (ratedt r) x) brate
-- | Literal constants do not need this due to @('Num' a) => 'Num' ('Build' r a)@.
bsam :: Int -> Build r Int
bsam = pure
-- | Convert number of seconds to number of samples.
bsec :: T -> Build r Int
bsec t = fmap (round . (* t) . fromIntegral . _unRate) brate
-- | Convert number of ticks into number of samples.
btick :: Int -> Build r Int
btick t =
basks_ $ \ s ->
let
lookupStream = _bstticksam s
currentSample = _bstsamno s
targetSample = head (ldrop t lookupStream)
in
targetSample - currentSample
bbeat :: Rational -> Build r Int
bbeat n = do
tpb <- basks_ (_unRate . _bstTickPerBeat)
btick (round (n * fromIntegral tpb))
{- |
@bto n@ computes the number of samples that we must write
to make the current sample number be @n@.
See 'bsamno' for the current sample number.
-}
bto :: Int -> Build r Int
bto n = fmap (\ s -> n - s) bsamno
btos :: T -> Build r Int
btos = bsec >=> bto
|
edom/sound
|
src/Unstable.hs
|
bsd-3-clause
| 9,094
| 0
| 18
| 2,570
| 3,392
| 1,763
| 1,629
| 203
| 2
|
-- Example of Multiple Type Stack on ST Monad
{-# LANGUAGE ExistentialQuantification #-}
module MultiTypeStack where
import Control.Monad (liftM)
import Control.Monad.ST (ST)
import Control.Monad.ST (runST)
import Data.STRef (STRef)
import Data.STRef (modifySTRef)
import Data.STRef (newSTRef)
import Data.STRef (readSTRef)
import Data.STRef (writeSTRef)
type Var s a = STRef s a
data VarInfo s a = VarInfo { var :: Var s a
, stack :: STRef s [a] }
data AnyVarInfo s = forall a. (Show a) => AnyVarInfo (VarInfo s a)
type VarsInfo s = STRef s [AnyVarInfo s]
newVarsInfo :: ST s (VarsInfo s)
newVarsInfo = newSTRef []
sShow :: AnyVarInfo s -> ST s String
sShow (AnyVarInfo vi) = liftM show $ readSTRef (var vi)
_new :: Show a => VarsInfo s -> a -> ST s (Var s a)
_new vsi a = do
v <- newSTRef a
s <- newSTRef []
modifySTRef vsi $ \vis -> AnyVarInfo (VarInfo v s):vis
return v
get :: Var s a -> ST s a
get = readSTRef
set :: Var s a -> a -> ST s ()
set = writeSTRef
_push :: VarsInfo s -> ST s ()
_push vsi = do
vis <- readSTRef vsi
mapM_ __push vis
where
__push :: AnyVarInfo s -> ST s ()
__push (AnyVarInfo (VarInfo v s)) = do
a <- readSTRef v
modifySTRef s $ \as -> a:as
_pop :: VarsInfo s -> ST s ()
_pop vsi = do
vis <- readSTRef vsi
mapM_ __pop vis
where
__pop :: AnyVarInfo s -> ST s ()
__pop (AnyVarInfo (VarInfo v s)) = do
(a:as) <- readSTRef s
writeSTRef v a
writeSTRef s as
{-|
>>> testStack
(123,True,LT,["GT","False","456"],["LT","True","123"])
-}
testStack :: (Int, Bool, Ordering, [String], [String])
testStack = runST $ do
-- setup
vsi <- newVarsInfo
let new a = _new vsi a
let push = _push vsi
let pop = _pop vsi
-- create variables with stack and initial values
vi <- new 123
vb <- new True
vo <- new LT
-- push all variables
push
-- modify variables
set vi 456
set vb False
set vo GT
-- (for debug)
vis <- readSTRef vsi
ls <- mapM sShow vis
-- pop all variables
pop
-- get results
i <- get vi
b <- get vb
o <- get vo
-- (for debug)
vis' <- readSTRef vsi
ls' <- mapM sShow vis'
return (i, b, o, ls, ls')
|
notae/haskell-exercise
|
pack/MultiTypeStack.hs
|
bsd-3-clause
| 2,187
| 0
| 12
| 565
| 903
| 446
| 457
| 68
| 1
|
{-# LANGUAGE EmptyDataDecls #-}
module HtmlCanvas where
import FFI
import Prelude
class Eventable a
addEventListener :: String -> Fay () -> Bool -> Fay ()
addEventListener = ffi "window['addEventListener'](%1,%2,%3)"
--------------------------------------------------------------------------------
-- | Mini DOM
-- TODO: Check if Cinder installed
--------------------------------------------------------------------------------
-- data Element
-- instance Eventable Element
-- Image
data Image
instance Eventable Image
newImage :: Fay Image
newImage = ffi "new Image()"
src :: Image -> Fay String
src = ffi "%1['src]"
setSrc :: String -> Image -> Fay ()
setSrc = ffi "%2['src'] = %1"
-- Canvas
data Canvas
instance Eventable Canvas
-- | Get an element by its ID.
getElementById :: String -> Fay Canvas
getElementById = ffi "document['getElementById'](%1)"
getContext :: String -> Canvas -> Fay Context
getContext = ffi "%2['getContext'](%1)"
--------------------------------------------------------------------------------
-- CanvasRenderingContext2D
-- | A canvas context.
data Context
--- Attributes
-- fillStyle
fillStyle :: Context -> Fay String
fillStyle = ffi "%1['fillStyle']"
setFillStyle :: String -> Context -> Fay ()
setFillStyle = ffi "%2['fillStyle']=%1"
-- font
font :: Context -> Fay String
font = ffi "%1['font']"
setFont :: String -> Context -> Fay ()
setFont = ffi "%2['font']=%1"
-- globalAlpha
globalAlpha :: Context -> Fay Double
globalAlpha = ffi "%1['globalAlpha']"
setGlobalAlpha :: Double -> Context -> Fay ()
setGlobalAlpha = ffi "%2['globalAlpha']=%1"
-- globalCompositeOperation
globalCompositeOperation :: Context -> Fay String
globalCompositeOperation = ffi "%1['globalCompositeOperation']"
setGlobalCompositeOperation :: String -> Context -> Fay ()
setGlobalCompositeOperation = ffi "%2['globalCompositeOperation']=%1"
-- lineCap
lineCap :: Context -> Fay String
lineCap = ffi "%1['lineCap']"
setLineCap :: String -> Context -> Fay ()
setLineCap = ffi "%2['lineCap']=%1"
-- lineDashOffset
lineDashOffset :: Context -> Fay Double
lineDashOffset = ffi "%1['lineDashOffset']"
setLineDashOffset :: Double -> Context -> Fay ()
setLineDashOffset = ffi "%2['lineDashOffset']=%1"
-- lineJoin
lineJoin :: Context -> Fay String
lineJoin = ffi "%1['lineJoin']"
setLineJoin :: String -> Context -> Fay ()
setLineJoin = ffi "%2['lineJoin']=%1"
-- lineWidth
lineWidth :: Context -> Fay Double
lineWidth = ffi "%1['lineWidth']"
setLineWidth :: Double -> Context -> Fay ()
setLineWidth = ffi "%2['lineWidth']=%1"
-- miterLimit
miterLimit :: Context -> Fay Double
miterLimit = ffi "%1['miterLimit']"
setMiterLimit :: Double -> Context -> Fay ()
setMiterLimit = ffi "%2['miterLimit']=%1"
-- shadowBlur
shadowBlur :: Context -> Fay Double
shadowBlur = ffi "%1['shadowBlur']"
setShadowBlur :: Double -> Context -> Fay ()
setShadowBlur = ffi "%2['shadowBlur']=%1"
-- shadowColor
shadowColor :: Context -> Fay String
shadowColor = ffi "%1['shadowColor']"
setShadowColor :: String -> Context -> Fay ()
setShadowColor = ffi "%2['shadowColor']=%1"
-- shadowOffsetX
shadowOffsetX :: Context -> Fay Double
shadowOffsetX = ffi "%1['shadowOffsetX']"
setShadowOffsetX :: Double -> Context -> Fay ()
setShadowOffsetX = ffi "%2['shadowOffsetX']=%1"
-- shadowOffsetY
shadowOffsetY :: Context -> Fay Double
shadowOffsetY = ffi "%1['shadowOffsetY']"
setShadowOffsetY :: Double -> Context -> Fay ()
setShadowOffsetY = ffi "%2['shadowOffsetY']=%1"
-- strokeStyle
strokeStyle :: Context -> Fay String
strokeStyle = ffi "%1['strokeStyle']"
setStrokeStyle :: String -> Context -> Fay ()
setStrokeStyle = ffi "%2['strokeStyle']=%1"
-- textAlign
textAlign :: Context -> Fay String
textAlign = ffi "%1['textAlign']"
setTextAlign :: String -> Context -> Fay ()
setTextAlign = ffi "%2['textAlign']=%1"
-- textBaseline
textBaseline :: Context -> Fay String
textBaseline = ffi "%1['textBaseline']"
setTextBaseline :: String -> Context -> Fay ()
setTextBaseline = ffi "%2['textBaseline']=%1"
--- Methods
-- last is optional
arc :: Double -> Double -> Double -> Double -> Double -> Bool -> Context -> Fay ()
arc = ffi "%7['arc'](%1,%2,%3,%4,%5,%6)"
arcTo :: Double -> Double -> Double -> Double -> Double -> Context -> Fay ()
arcTo = ffi "%6['arcTo'](%1,%2,%3,%4,%5)"
beginPath :: Context -> Fay ()
beginPath = ffi "%1['beginPath']()"
bezierCurveTo :: Double -> Double -> Double -> Double -> Double -> Double -> Context -> Fay ()
bezierCurveTo = ffi "%7['bezierCurveTo'](%1,%2,%3,%4,%5,%6)"
clearRect :: Double -> Double -> Double -> Double -> Context -> Fay ()
clearRect = ffi "%5['clearRect'](%1,%2,%3,%4)"
clip :: Context -> Fay ()
clip = ffi "%1['clip']()"
closePath :: Context -> Fay ()
closePath = ffi "%1['closePath']()"
-- TODO: createImageData()
-- TODO: createLinearGradient()
-- should return nsIDOMCanvasGradient
-- createLinearGradient :: Context -> Double -> Double -> Double -> Double -> Fay ()
-- createLinearGradient = ffi "%1['createLinearGradient'](%2,%3,%4,%5)"
-- TODO: createPattern()
-- TODO: createLinearGradient()
-- should return nsIDOMCanvasGradient
-- createRadialGradient :: Context -> Double -> Double -> Double -> Double -> Double -> Double -> Fay ()
-- createRadialGradient = ffi "%1['createRadialGradient'](%2,%3,%4,%5,%6,%7)"
-- TODO: drawCustomFocusRing()
drawImage :: Image -> Double -> Double -> Context -> Fay ()
drawImage = ffi "%4['drawImage'](%1,%2,%3)"
drawImageSize :: Image -> Double -> Double -> Double -> Double -> Context -> Fay ()
drawImageSize = ffi "%7['drawImage'](%1,%2,%3,%4,%5,%6)"
drawImageSpecific :: Image -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> Context -> Fay ()
drawImageSpecific = ffi "%10['drawImage'](%1,%2,%3,%4,%5,%6,%7,%8,%9)"
-- TODO: drawSystemFocusRing()
fill :: Context -> Fay ()
fill = ffi "%1['fill']()"
fillRect :: Double -> Double -> Double -> Double -> Context -> Fay ()
fillRect = ffi "%5['fillRect'](%1,%2,%3,%4)"
-- last is optional
fillText :: String -> Double -> Double -> Double -> Context -> Fay ()
fillText = ffi "%5['fillText'](%1,%2,%3,%4)"
-- TODO: getImageData()
-- TODO: getLineDash()
isPointInPath :: Double -> Double -> Context -> Fay Bool
isPointInPath = ffi "%3['isPointInPath'](%1,%2)"
isPointInStroke :: Double -> Double -> Context -> Fay Bool
isPointInStroke = ffi "%3['isPointInStroke'](%1,%2)"
lineTo :: Double -> Double -> Context -> Fay ()
lineTo = ffi "%3['lineTo'](%1,%2)"
-- TODO: measureText()
moveTo :: Double -> Double -> Context -> Fay ()
moveTo = ffi "%3['moveTo'](%1,%2)"
-- TODO: putImageData()
quadraticCurveTo :: Double -> Double -> Double -> Double -> Context -> Fay ()
quadraticCurveTo = ffi "%5['quadraticCurveTo'](%1,%2,%3,%4)"
rect :: Double -> Double -> Double -> Double -> Context -> Fay ()
rect = ffi "%5['rect'](%1,%2,%3,%4)"
restore :: Context -> Fay ()
restore = ffi "%1['restore']()"
rotate :: Double -> Context -> Fay ()
rotate = ffi "%2['rotate'](%1)"
save :: Context -> Fay ()
save = ffi "%1['save']()"
scale :: Double -> Double -> Context -> Fay ()
scale = ffi "%3['scale'](%1,%2)"
scrollPathIntoView :: Context -> Fay ()
scrollPathIntoView = ffi "%1['scrollPathIntoView']()"
-- TODO: setLineDash()
setTransform :: Double -> Double -> Double -> Double -> Double -> Double -> Context -> Fay ()
setTransform = ffi "%7['setTransform'](%1,%2,%3,%4,%5,%6)"
stroke :: Context -> Fay ()
stroke = ffi "%1['stroke']()"
strokeRect :: Double -> Double -> Double -> Double -> Context -> Fay ()
strokeRect = ffi "%5['strokeRect'](%1,%2,%3,%4)"
-- TODO: strokeText()
transform :: Double -> Double -> Double -> Double -> Double -> Double -> Context -> Fay ()
transform = ffi "%7['transform'](%1,%2,%3,%4,%5,%6)"
translate :: Double -> Double -> Context -> Fay ()
translate = ffi "%3['translate'](%1,%2)"
|
svaiter/fay-canvas
|
src/HtmlCanvas.hs
|
bsd-3-clause
| 7,846
| 0
| 16
| 1,181
| 1,934
| 994
| 940
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
module AWS.EC2.Types.Subnets
( CreateSubnetRequest(..)
, Subnet(..)
, SubnetState
) where
import AWS.EC2.Types.Common (ResourceTag)
import AWS.Lib.FromText
data CreateSubnetRequest = CreateSubnetRequest
{ createSubnetRequestVpcId :: Text
, createSubnetRequestCidrBlock :: AddrRange IPv4
, createSubnetRequestAvailabilityZone :: Maybe Text
}
deriving (Show, Read, Eq)
data Subnet = Subnet
{ subnetId :: Text
, subnetState :: SubnetState
, subnetVpcId :: Text
, subnetCidrBlock :: AddrRange IPv4
, subnetAvailableIpAddressCount :: Int
, subnetAvailabilityZone :: Text
, subnetDefaultForAz :: Maybe Bool
, subnetMapPublicIpOnLaunch :: Maybe Bool
, subnetTagSet :: [ResourceTag]
}
deriving (Show, Read, Eq)
data SubnetState
= SubnetStatePending
| SubnetStateAvailable
deriving (Show, Read, Eq)
deriveFromText "SubnetState" ["pending", "available"]
|
IanConnolly/aws-sdk-fork
|
AWS/EC2/Types/Subnets.hs
|
bsd-3-clause
| 969
| 0
| 9
| 196
| 223
| 134
| 89
| 28
| 0
|
module Problem42 where
import Data.Char
import Data.List.Split
main :: IO ()
main = do
names <- readFile "txt/42.txt"
print . length . filter isTriangular . map value . wordsBy (not . isLetter) $ names
value :: String -> Int
value = sum . map (\x -> ord x - ord 'A' + 1) . filter (/= '\"')
isTriangular :: Int -> Bool
-- n = x(x+1)/2 => x^2 + x - 2n = 0 => x = (-1 +- sqrt(1+8n))/2
isTriangular n = n == x' * (x' + 1) `div` 2
where x' = floor $ ((sqrt $ fromIntegral (1 + 8 * n) :: Double) - 1) / 2
|
adityagupta1089/Project-Euler-Haskell
|
src/problems/Problem42.hs
|
bsd-3-clause
| 515
| 0
| 16
| 127
| 218
| 115
| 103
| 12
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
module Main where
import GHC.Float
import Graphics.Gloss
import Graphics.Gloss.Interface.Pure.Simulate
import Graphics.Gloss.Interface.IO.Simulate
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Data.ViewPort
import Debug.Trace
import Control.Monad.Fix
import Control.Monad
import Control.Concurrent
import Foreign.ForeignPtr
import System.CPUTime
import Control.Parallel
import Control.Parallel.Strategies
import Data.Map
import qualified Sound.File.Sndfile as Snd
import qualified Sound.File.Sndfile.Buffer.Vector as B
import Control.Monad.Trans.State.Strict
import Control.Monad.IO.Class
import Control.Monad
import Sound.Analysis.Spectrum
import Sound.Analysis.SpectralFlux
import Sound.Analysis.Onset
import Sound.Filters.FreqFilters
import Sound.Filters.FluxFilters
import Sound.Data.PCM
import Sound.Data.Stream
import Sound.Data.Freq
import Sound.Data.Flux
import qualified Sound.Data.Onset as O
import qualified Sound.SFML.Mixer as M
import qualified Data.Sequence as S
import qualified Data.Foldable as F
--(ToDraw, ToBeDrawn)
type Time = Float
data DrawableOnsets = DrawableOnsets { time :: !Float,
mix :: M.Mixer,
onsets :: !O.OnsetData,
pics :: ![Picture],
vp :: !ViewPort}
--RUN WITH OPTIONS
-- +RTS -N2 -s -qg2 -A1024k
main = do
--bs holds our BufferS
(info, Just (bs :: B.Buffer Double)) <- Snd.readFile musicPath
--Convert to an unboxed array
let b = sterilize bs
--Grab all of the sound file
let ct = (truncate $ (fromIntegral $ Snd.frames info)/2048.0)
let i = StreamInfo 1024 44100 2
let b' = sampleRawPCM 0 ct (i, b)
let s = pcmStream b'
let freqs = freqStream s
--let !osStream = onsetStream 32 (lowpass $ fluxStream freqs 24)
let !osStream = onsetStream 32 (fluxStream freqs 24)
let bins = [86, 172..]
let freqBins = [43, 86..]
let mix = M.initMixer
let inWin = InWindow "Gloss" (500,500) (1,1)
print $ show (fmap S.length (O.bands osStream))
--mix' <- M.withMixer mix $ setupMusic
--M.withMixer mix' $ M.startChannel "testchan"
--simulateIO inWin white 60 (makeModel mix' osStream) drawWorld stepWorld
setupMusic = do
get
M.addChannel musicPath "testchan" 100.0
musicPath = "./downtheroad.wav"
makeModel :: M.Mixer -> O.OnsetData -> DrawableOnsets
makeModel mixer !os = DrawableOnsets 0.0 mixer os [] viewPortInit
{-# INLINE updateStream #-}
updateStream :: Float -> DrawableOnsets -> DrawableOnsets
updateStream !t' !dos = dos'
where (drawable, remBands) = S.foldrWithIndex frameSrch (S.empty, S.singleton S.empty) osBands
ps = (S.foldrWithIndex makeDrawable [] drawable)
remaining = O.OnsetData i remBands
os = onsets dos
i = O.info os
osBands = O.bands os
oldPics = pics dos
ps' = (oldPics++ps)
numPics = length ps'
culledPics | numPics > 300 = ps
| otherwise = ps'
frameSrch :: Int -> S.Seq O.Onset -> (S.Seq (Maybe O.Onset), S.Seq (S.Seq O.Onset)) -> (S.Seq (Maybe O.Onset), S.Seq (S.Seq O.Onset))
frameSrch !i !e acc@(!toDraw, !left)
| elem' == Nothing = (Nothing S.<| toDraw, e S.<| left)
| t' > (frameToTime elem') = (elem' S.<| toDraw, left' S.<| left)
| otherwise = (Nothing S.<| toDraw, e S.<| left)
where (!elemS, !left') = S.splitAt 1 e
!elem' | S.length elemS >= 1 = Just $ S.index elemS 0
| otherwise = Nothing
frameToTime (Just !f) = fromIntegral (O.frame f)*( (fromIntegral $ sampleRate i)/(fromIntegral $ fftWindow i))*2.0
dos' = dos{time=t',onsets=remaining, pics=culledPics}
{-# INLINE makeDrawable #-}
makeDrawable :: Int -> Maybe O.Onset -> [Picture] -> [Picture]
makeDrawable i (Just !(O.Onset f p)) acc = (translate (fromIntegral i*2) (f'*2) $ (color (colorWheel !! i)(rectangleSolid p' p')) ): acc
where p' = double2Float (p*0.005)
f' = (fromIntegral f)
makeDrawable _ Nothing acc = acc --color (colorWheel !! i) (rectangleSolid 1 1)
colorWheel = cycle [red, green, blue, yellow, cyan, magenta, rose, violet, azure, aquamarine, chartreuse, orange]
drawWorld :: DrawableOnsets -> IO Picture
drawWorld (DrawableOnsets t m os ps view) = return $ applyViewPortToPicture view $ pictures ps
toPlot :: S.Seq (S.Seq O.Onset) -> Int -> [(Double, Double)]
toPlot fs n = snd $ F.foldl step (0.0, []) fs'
where fs' = fmap (flip S.index $ n) fs
--step :: (Int, [(Int, Double)]) -> Double -> (Int, [(Int, Double)])
step (i, es) e = (i+1.0, es++[(i, O.power e)])
{-# INLINE stepWorld #-}
stepWorld :: ViewPort -> Float -> DrawableOnsets -> IO DrawableOnsets
stepWorld vp t' dos@(DrawableOnsets t m os ps view) = do
playhead <- evalStateT (M.playheadPosition "testchan") m
let chans = M.channels m
let dur = M.duration $ chans ! "testchan"
--traceIO("duration: "++show dur++" pos: "++show playhead)
let vp' = view{viewPortTranslate=(-30, -((t/43.0)-38)),viewPortScale=10}
return $ updateStream (playhead*1000) dos{vp=vp'}
checkInit oldTick newTick | oldTick == 0.0 = trace("starting value: "++show newTick)$newTick
| otherwise = oldTick
|
Smurf/Tempoist
|
Examples/OnsetAnimation.hs
|
bsd-3-clause
| 5,591
| 0
| 17
| 1,425
| 1,760
| 943
| 817
| 117
| 1
|
-- |A SExpr is an S-expressionin the style of Rivest's Canonical
-- S-expressions. Atoms may be of any type, but String and
-- ByteString have special support. Similarly, lists may be of any
-- type, but haskell lists have special support.
-- Rivest's implementation of S-expressions is unusual in supporting
-- MIME type hints for each atom. See http:\/\/people.csail.mit.edu\/rivest\/Sexp.txt.
-- This capability is provided by the 'Hinted' type, which wraps atom types
-- and provides the necessary extension to the S-expression syntax.
--
-- The "Text.SExpr.Convert" module defines a modular parsing and
-- serializing framework based on two type classes, 'Atom' and 'List'.
-- The special support for [], String and ByteString is implemented via these
-- classes and may be extended to other types by declaring appropriate instances.
module Text.SExpr
( -- * Basics
SExpr, Hinted,
Atom(..), Raw, Simple,
List(..),
atom,
isAtom,
unAtom,
fromAtom,
list,
isList,
unList,
fromList,
-- * Hinted Atoms
hinted,
unhinted,
fromHinted,
hint,
maybeHint,
hintWithDefault,
dropHint,
mapHint,
defaultHint,
-- * Character predicates to support encoding
isTokenChar,isInitialTokenChar,isQuoteableChar,
-- * Transformations
matchSExpr,
foldSExpr,
lmap,
-- * String printers
canonicalString,
basicString,
advancedString,
-- * ShowS printers
canonical,
-- * Doc pretty printers
basic,
advanced,
-- * Put binary printers
putCanonical,
-- * Parsers
readSExpr,
readSExprString,
readCanonicalSExprString,
advancedSExpr,
canonicalSExpr
) where
import Text.SExpr.Type
import Text.SExpr.Convert
|
mokus0/s-expression
|
src/Text/SExpr.hs
|
bsd-3-clause
| 2,089
| 0
| 5
| 730
| 173
| 123
| 50
| 40
| 0
|
{-# LANGUAGE DeriveGeneric #-}
module Abstract.Interfaces.Wrapper (
HostDataWrapper (..)
) where
import GHC.Generics
data HostDataWrapper a = HostDataWrapper {
h :: String,
d :: a
} deriving (Show, Read, Generic)
|
adarqui/Abstract-Interfaces
|
src/Abstract/Interfaces/Wrapper.hs
|
bsd-3-clause
| 218
| 0
| 8
| 35
| 59
| 37
| 22
| 8
| 0
|
--------------------------------------------------------------------------------
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
module Lib where
--------------------------------------------------------------------------------
import WhatMorphism.Annotations
import WhatMorphism.TemplateHaskell
--------------------------------------------------------------------------------
data List a
= Cons a (List a)
| Empty
deriving (Show)
--------------------------------------------------------------------------------
$(deriveFold ''List "foldList")
$(deriveBuild ''List "buildList")
{-# ANN type List (RegisterFoldBuild "foldList" "buildList") #-}
--------------------------------------------------------------------------------
data Tree a
= Leaf a
| Node (Tree a) (Tree a)
deriving (Show)
--------------------------------------------------------------------------------
$(deriveFold ''Tree "foldTree")
$(deriveBuild ''Tree "buildTree")
{-# ANN type Tree (RegisterFoldBuild "foldTree" "buildTree") #-}
|
jaspervdj/thesis
|
what-morphism-examples/src/Lib.hs
|
bsd-3-clause
| 1,069
| 0
| 8
| 134
| 144
| 79
| 65
| 19
| 0
|
module Graphics.UI.SDL.Timer (
-- * Timer Support
addTimer,
delay,
getPerformanceCounter,
getPerformanceFrequency,
getTicks,
removeTimer
) where
import Data.Word
import Foreign.C.Types
import Foreign.Ptr
import Graphics.UI.SDL.Types
foreign import ccall "SDL.h SDL_AddTimer" addTimer :: Word32 -> TimerCallback -> Ptr () -> IO TimerID
foreign import ccall "SDL.h SDL_Delay" delay :: Word32 -> IO ()
foreign import ccall "SDL.h SDL_GetPerformanceCounter" getPerformanceCounter :: IO Word64
foreign import ccall "SDL.h SDL_GetPerformanceFrequency" getPerformanceFrequency :: IO Word64
foreign import ccall "SDL.h SDL_GetTicks" getTicks :: IO Word32
foreign import ccall "SDL.h SDL_RemoveTimer" removeTimer :: TimerID -> IO Bool
|
ekmett/sdl2
|
Graphics/UI/SDL/Timer.hs
|
bsd-3-clause
| 735
| 12
| 10
| 97
| 181
| 102
| 79
| 17
| 0
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Generic.Control.Alternative where
import Prelude ()
import Generic.Control.Applicative
class Applicative j f => Alternative j f where
empty :: j (f a)
(<|>) :: j (f a) -> j (f a) -> j (f a)
|
sebastiaanvisser/AwesomePrelude
|
src/Generic/Control/Alternative.hs
|
bsd-3-clause
| 243
| 4
| 7
| 46
| 92
| 51
| 41
| 7
| 0
|
{-# LANGUAGE OverloadedLists #-}
module AccelerationStructure where
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import Data.Bits
import Data.Coerce ( coerce )
import Data.Vector ( Vector )
import Foreign.Storable ( Storable(poke, sizeOf) )
import HasVulkan
import MonadVulkan
import Scene
import UnliftIO.Foreign ( castPtr )
import Vulkan.CStruct
import Vulkan.CStruct.Extends
import Vulkan.Core10
import Vulkan.Core12.Promoted_From_VK_KHR_buffer_device_address
import Vulkan.Extensions.VK_KHR_acceleration_structure
import Vulkan.Utils.QueueAssignment
import Vulkan.Zero
import VulkanMemoryAllocator ( AllocationCreateInfo
( requiredFlags
, usage
)
, MemoryUsage
( MEMORY_USAGE_GPU_ONLY
)
)
----------------------------------------------------------------
-- TLAS
----------------------------------------------------------------
createTLAS :: SceneBuffers -> V (ReleaseKey, AccelerationStructureKHR)
createTLAS sceneBuffers = do
--
-- Create the bottom level accelerationStructures
--
(_blasReleaseKey, blas) <- createBLAS sceneBuffers
blasAddress <- getAccelerationStructureDeviceAddressKHR' zero
{ accelerationStructure = blas
}
let identity = TransformMatrixKHR (1, 0, 0, 0) (0, 1, 0, 0) (0, 0, 1, 0)
inst :: AccelerationStructureInstanceKHR
inst = zero
{ transform = identity
, instanceCustomIndex = 0
, mask = complement 0
, instanceShaderBindingTableRecordOffset = 0
, flags = GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR
, accelerationStructureReference = coerce blasAddress
}
--
-- Create the buffer for the top level instances
--
let numInstances = 1
instanceDescsSize =
numInstances * cStructSize @AccelerationStructureInstanceKHR
(_instBufferReleaseKey, (instBuffer, instBufferAllocation, _)) <- withBuffer'
zero
{ usage =
BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR
.|. BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT
, size = fromIntegral instanceDescsSize
}
-- TODO: Make this GPU only and transfer to it
zero
{ requiredFlags = MEMORY_PROPERTY_HOST_VISIBLE_BIT
.|. MEMORY_PROPERTY_HOST_COHERENT_BIT
}
nameObject' instBuffer "TLAS instances"
instBufferDeviceAddress <- getBufferDeviceAddress' zero { buffer = instBuffer
}
--
-- populate the instance buffer
--
(instMapKey, instMapPtr) <- withMappedMemory' instBufferAllocation
liftIO $ poke (castPtr @_ @AccelerationStructureInstanceKHR instMapPtr) inst
release instMapKey
let buildGeometries =
[ zero
{ geometryType = GEOMETRY_TYPE_INSTANCES_KHR
, geometry = Instances AccelerationStructureGeometryInstancesDataKHR
{ arrayOfPointers = False
, data' = DeviceAddressConst instBufferDeviceAddress
}
, flags = GEOMETRY_OPAQUE_BIT_KHR
}
]
buildInfo = zero { type' = ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR
, mode = BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR -- ignored but used later
, srcAccelerationStructure = NULL_HANDLE -- ignored
, dstAccelerationStructure = NULL_HANDLE -- ignored
, geometries = buildGeometries
, scratchData = zero
}
maxPrimitiveCounts = [1]
rangeInfos = [zero { primitiveCount = 1, primitiveOffset = 0 }]
sizes <- getAccelerationStructureBuildSizesKHR'
ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR
buildInfo
maxPrimitiveCounts
(_tlasBufferKey, tlasKey, tlas) <- buildAccelerationStructure buildInfo
rangeInfos
sizes
nameObject' tlas "TLAS"
pure (tlasKey, tlas)
buildAccelerationStructure
:: AccelerationStructureBuildGeometryInfoKHR
-> Vector AccelerationStructureBuildRangeInfoKHR
-> AccelerationStructureBuildSizesInfoKHR
-> V (ReleaseKey, ReleaseKey, AccelerationStructureKHR)
buildAccelerationStructure geom ranges sizes = do
--
-- Allocate the buffer to hold the acceleration structure
--
let bufferSize = accelerationStructureSize sizes
(asBufferKey, (asBuffer, _, _)) <- withBuffer'
zero { size = bufferSize
, usage = BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR
}
zero { usage = MEMORY_USAGE_GPU_ONLY }
--
-- Allocate scratch space for building
--
(scratchBufferKey, (scratchBuffer, _, _)) <- withBuffer'
zero { size = buildScratchSize sizes
, usage = BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT
}
zero { usage = MEMORY_USAGE_GPU_ONLY }
scratchBufferDeviceAddress <- getBufferDeviceAddress' zero
{ buffer = scratchBuffer
}
let asci = zero { type' = ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR
, buffer = asBuffer
, offset = 0
, size = bufferSize
}
(asKey, as) <- withAccelerationStructureKHR' asci
oneShotComputeCommands $ do
cmdBuildAccelerationStructuresKHR'
[ geom { dstAccelerationStructure = as
, scratchData = DeviceAddress scratchBufferDeviceAddress
}
]
[ranges]
release scratchBufferKey
pure (asKey, asBufferKey, as)
--
-- Create the bottom level acceleration structure
--
createBLAS :: SceneBuffers -> V (ReleaseKey, AccelerationStructureKHR)
createBLAS sceneBuffers = do
(sceneGeom, sceneOffsets) <- sceneGeometry sceneBuffers
let buildInfo = zero { type' = ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR
, mode = BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR -- ignored but used later
, srcAccelerationStructure = NULL_HANDLE -- ignored
, dstAccelerationStructure = NULL_HANDLE -- ignored
, geometries = [sceneGeom]
, scratchData = zero
}
maxPrimitiveCounts = [sceneSize sceneBuffers]
sizes <- getAccelerationStructureBuildSizesKHR'
ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR
buildInfo
maxPrimitiveCounts
(_blasBufferKey, blasKey, blas) <- buildAccelerationStructure buildInfo
sceneOffsets
sizes
nameObject' blas "BLAS"
pure (blasKey, blas)
sceneGeometry
:: SceneBuffers
-> V
( AccelerationStructureGeometryKHR
, Vector AccelerationStructureBuildRangeInfoKHR
)
sceneGeometry SceneBuffers {..} = do
boxAddr <- getBufferDeviceAddress' zero { buffer = sceneAabbs }
let boxData = AccelerationStructureGeometryAabbsDataKHR
{ data' = DeviceAddressConst boxAddr
, stride = fromIntegral (sizeOf (undefined :: AabbPositionsKHR))
}
geom :: AccelerationStructureGeometryKHR
geom = zero { geometryType = GEOMETRY_TYPE_AABBS_KHR
, flags = GEOMETRY_OPAQUE_BIT_KHR
, geometry = Aabbs boxData
}
let offsetInfo = [zero { primitiveCount = sceneSize, primitiveOffset = 0 }]
pure (geom, offsetInfo)
----------------------------------------------------------------
-- Utils
----------------------------------------------------------------
-- TODO: use compute queue here
oneShotComputeCommands :: CmdT V () -> V ()
oneShotComputeCommands cmds = do
--
-- Create command buffers
--
graphicsQueue <- getGraphicsQueue
QueueFamilyIndex graphicsQueueFamilyIndex <- getGraphicsQueueFamilyIndex
(poolKey, commandPool) <- withCommandPool' zero
{ queueFamilyIndex = graphicsQueueFamilyIndex
}
~[commandBuffer] <- allocateCommandBuffers' zero
{ commandPool = commandPool
, level = COMMAND_BUFFER_LEVEL_PRIMARY
, commandBufferCount = 1
}
--
-- Record and kick off the build commands
--
useCommandBuffer' commandBuffer
zero { flags = COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT }
cmds
(fenceKey, fence) <- withFence' zero
queueSubmit
graphicsQueue
[SomeStruct zero { commandBuffers = [commandBufferHandle commandBuffer] }]
fence
let oneSecond = 1e9
waitForFencesSafe' [fence] True oneSecond >>= \case
SUCCESS -> pure ()
TIMEOUT -> error "Timed out running one shot commands"
_ -> error "Unhandled exit code in oneShotComputeCommands"
release fenceKey
release poolKey
|
expipiplus1/vulkan
|
examples/rays/AccelerationStructure.hs
|
bsd-3-clause
| 9,457
| 0
| 16
| 3,069
| 1,529
| 854
| 675
| -1
| -1
|
module SourceNames where
import HsName(Id,HsName(..),hsUnQual,ModuleName(..),moduleName)
import SrcLoc1(SrcLoc,loc0,srcFile)
import qualified SrcLoc1 as L
import PrettyPrint
import HasBaseName
import UniqueNames
import QualNames
import SpecialNames
import Char(isUpper)
import Data.Generics
data SN i = SN i SrcLoc deriving (Data, Typeable)
instance Show i => Show (SN i) where
showsPrec p (SN i _) = showsPrec p i
instance Read i => Read (SN i) where
readsPrec p s = [(SN i loc0,r)|(i,r)<-readsPrec p s]
instance Unique (SN HsName) where
unique m (SN n p) = G m (hsUnQual n) (srcLoc p)
instance Unique (SN Id) where
unique m (SN n p) = G m n (srcLoc p)
--srcName (PN n _) = SN n loc0 -- a temporary hack (I hope)
srcName n = SN (getBaseName n) (L.srcLoc n)
fakeSN n = SN n loc0
instance HasBaseName (SN i) i where getBaseName (SN i _) = i
instance Eq i => Eq (SN i) where SN n1 _==SN n2 _ = n1==n2
instance Ord i => Ord (SN i) where compare (SN n1 _) (SN n2 _) = compare n1 n2
instance Functor SN where fmap f (SN i o) = SN (f i) o
instance QualNames qn m n => QualNames (SN qn) m (SN n) where
getQualifier = getQualifier . getBaseName
getQualified = fmap getQualified
mkUnqual = fmap mkUnqual
mkQual m = fmap (mkQual m)
instance HasSpecialNames i => HasSpecialNames (SrcLoc->SN i) where
list_tycon_name = SN list_tycon_name
fun_tycon_name = SN fun_tycon_name
char_tycon_name = SN char_tycon_name
tuple_tycon_name = SN . tuple_tycon_name
instance IsSpecialName i => IsSpecialName (SN i) where
is_list_tycon_name (SN i _) = is_list_tycon_name i
is_fun_tycon_name (SN i _) = is_fun_tycon_name i
is_char_tycon_name (SN i _) = is_char_tycon_name i
is_tuple_tycon_name n (SN i _) = is_tuple_tycon_name n i
---
instance Printable i => Printable (SN i) where
ppi (SN n p) = ppi n<>ppIfDebug ("<<"<>p<>">>")
wrap (SN n p) = wrap n<>ppIfDebug ("<<"<>p<>">>")
-- positions ends up outside parenthesis...
instance PrintableOp i => PrintableOp (SN i) where
isOp (SN n p) = isOp n
ppiOp (SN n p) = ppiOp n<>ppIfDebug ("<<"<>p<>">>")
hsName2modName (SN hs loc) =
case hs of
UnQual m -> return (SN (moduleName (srcFile loc) m) loc)
Qual m n@(c:_) ->
if isUpper c
then return (SN (PlainModule (mn m++"."++n)) loc)
else fail "Invalid hierarchical module name"
where mn (PlainModule s) = s
mn (MainModule _) = "Main"
|
forste/haReFork
|
tools/base/parse2/SourceNames.hs
|
bsd-3-clause
| 2,505
| 2
| 17
| 594
| 1,078
| 543
| 535
| -1
| -1
|
-- |A lowest common denominator interface to the Win32 and MacOSX MIDI bindings, MacOSX part.
module System.MIDI.MacOSX
( module System.MIDI.Base
, Source
, Destination
, Connection
, enumerateSources
, enumerateDestinations
, MIDIHasName
, getName
, getModel
, getManufacturer
, openSource
, openDestination
, close
, send
, sendSysEx
, start
, stop
, getNextEvent
, getEvents
, currentTime
) where
import System.MIDI.Base
import Control.Monad
import Control.Concurrent.MVar
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TChan
import Data.List
import Foreign hiding (unsafePerformIO)
import Foreign.StablePtr
import System.IO.Unsafe
import System.MacOSX.CoreFoundation
import System.MacOSX.CoreAudio
import System.MacOSX.CoreMIDI
-- |Gets all the events from the buffer.
getEvents :: Connection -> IO [MidiEvent]
getEvents conn = do
m <- getNextEvent conn
case m of
Nothing -> return []
Just ev -> do
evs <- getEvents conn
return (ev:evs)
-- |Gets the next event from a buffered connection.
getNextEvent :: Connection -> IO (Maybe MidiEvent)
getNextEvent conn = case cn_fifo_cb conn of
Right _ -> fail "this is not a buffered connection"
Left chan -> do
b <- atomically $ isEmptyTChan chan
if b
then return Nothing
else do
x <- atomically $ readTChan chan
return (Just x)
type Client = MIDIClientRef
type Device = MIDIDeviceRef
type Port = MIDIPortRef
-- |The opaque data type representing a MIDI connection
data Connection = Connection
{ cn_isInput :: Bool
, cn_port :: MIDIPortRef
, cn_endpoint :: MIDIEndpointRef
, cn_time :: MVar UInt64 -- measured in nanosecs
, cn_alive :: MVar Bool
, cn_fifo_cb :: Either (TChan MidiEvent) ClientCallback
, cn_midiproc :: FunPtr (MIDIReadProc () ())
, cn_mydata :: StablePtr (MVar Connection)
}
----- automatic client creation
client = unsafePerformIO $ newEmptyMVar :: MVar Client
{-
#ifdef __GLASGOW_HASKELL__
clientFinalizer :: IO ()
clientFinalizer = do
c <- readMVar client
disposeClient c
#endif
-}
getClient :: IO MIDIClientRef
getClient = do
b <- isEmptyMVar client
if b
then do
x <- newClient "HaskellMidi"
putMVar client x
{-
#ifdef __GLASGOW_HASKELL__
addMVarFinalizer client clientFinalizer -- uh-oh, that's not a good idea (not in the present form)
#endif
-}
return x
else readMVar client
-- |Returns the time elapsed since the last `start` call, in milisecs.
currentTime :: Connection -> IO Word32
currentTime conn = do
t <- audioGetCurrentTimeInNanos
t0 <- readMVar (cn_time conn)
return (nanoToMili $ t-t0)
nanoToMili :: UInt64 -> Word32
nanoToMili n = fromIntegral $ div n 1000000
convertShortMessage :: UInt64 -> (MIDITimeStamp,[Word8]) -> IO MidiEvent
convertShortMessage t0 (ts',bytes) = do
ts <- audioConvertHostTimeToNanos ts'
return $ MidiEvent (nanoToMili $ ts-t0) (translateShortMessage $ decodeShortMessage bytes)
myMIDIReadProc :: Ptr MIDIPacket -> Ptr () -> Ptr () -> IO ()
myMIDIReadProc packets myptr _ = do
let stabptr = castPtrToStablePtr myptr :: StablePtr (MVar Connection)
mv <- deRefStablePtr stabptr :: IO (MVar Connection)
mconn <- tryTakeMVar mv -- we are also "blocking" (handling) further callbacks this way
case mconn of
Nothing -> return ()
Just conn -> do
time0 <- readMVar (cn_time conn)
list1 <- depackMIDIPacketList packets
let (normal,sysex') = partition (\(_,bytes) -> isShortMessage bytes) list1
sysexs <- forM sysex' $ \(ts',bytes) -> do
ts <- audioConvertHostTimeToNanos ts'
return $ MidiEvent (nanoToMili $ ts-time0) (SysEx $ tail bytes)
normals <- mapM (convertShortMessage time0) normal
let events = sysexs ++ normals
case (cn_fifo_cb conn) of
Left chan -> atomically $ mapM_ (writeTChan chan) events
Right call -> mapM_ call events
putMVar mv conn -- do not forget to put it back!
-- |Opens a MIDI Source.
-- There are two possibilites to receive MIDI messages. The user can either support a callback function,
-- or get the messages from an asynchronous buffer. However, mixing the two approaches is not allowed.
openSource :: Source -> Maybe ClientCallback -> IO Connection
openSource src@(Source endpoint) mcallback = do
client <- getClient
myData <- newEmptyMVar :: IO (MVar Connection)
sp <- newStablePtr myData
the_callback <- mkMIDIReadProc myMIDIReadProc
time <- newEmptyMVar
alive <- newMVar True
fifo_cb <- case mcallback of
Just cb -> return $ Right cb
Nothing -> liftM Left $ atomically newTChan
inport <- newInputPort client "Input Port" the_callback (castStablePtrToPtr sp)
let conn = Connection True inport endpoint time alive fifo_cb the_callback sp
putMVar myData conn
return conn
-- |Opens a MIDI Destination.
openDestination :: Destination -> IO Connection
openDestination dst@(Destination endpoint) = do
client <- getClient
outport <- newOutputPort client "Output Port"
alive <- newMVar True
time <- newEmptyMVar
let conn = Connection False outport endpoint time alive undefined undefined undefined
return conn
sendShortMessage :: Connection -> ShortMessage -> IO ()
sendShortMessage conn msg = case cn_isInput conn of
True -> fail "sending short messages to midi sources is not supported"
False -> midiSend (cn_port conn) (Destination $ cn_endpoint conn) msg
-- |Sends a short message. The connection must be a `Destination`.
send :: Connection -> MidiMessage -> IO ()
send conn msg = sendShortMessage conn (untranslateShortMessage msg)
-- |Sends a system exclusive message. You shouldn't include the starting \/ trailing bytes 0xF0 and 0xF7.
sendSysEx :: Connection -> [Word8] -> IO ()
sendSysEx conn dat = midiSendSysEx (cn_endpoint conn) dat
-- |Starts a connection. This is required for receiving MIDI messages, and also for starting the clock.
start :: Connection -> IO ()
start conn = do
b <- isEmptyMVar (cn_time conn)
if b
then do
hosttime <- audioGetCurrentTimeInNanos
putMVar (cn_time conn) hosttime
case cn_isInput conn of
True -> connectToSource (cn_port conn) (Source $ cn_endpoint conn) nullPtr
False -> return ()
else putStrLn "warning: you shouldn't call start twice"
-- |Stops a connection.
stop :: Connection -> IO ()
stop conn = do
b <- isEmptyMVar (cn_time conn)
if not b
then do
takeMVar (cn_time conn)
case cn_isInput conn of
True -> disconnectFromSource (cn_port conn) (Source $ cn_endpoint conn)
False -> return ()
else putStrLn "warning: you shouldn't call stop twice"
-- |Closes a MIDI Connection
close conn = do
when (cn_isInput conn) $ do
b <- isEmptyMVar (cn_time conn)
when (not b) (stop conn)
disposePort (cn_port conn)
cleanup conn
-- called by "close"; not exposed.
cleanup :: Connection -> IO ()
cleanup conn = case (cn_isInput conn) of
True -> do
freeHaskellFunPtr (cn_midiproc conn)
freeStablePtr (cn_mydata conn)
False -> return ()
|
sixohsix/hmidi
|
System/MIDI/MacOSX.hs
|
bsd-3-clause
| 7,172
| 0
| 21
| 1,558
| 1,923
| 946
| 977
| 168
| 3
|
module HPage.Test.Server where
import Data.Char
import GHC.IO
import Control.Monad.Error
import Test.QuickCheck
import Test.Runner.Driver
import Test.Runner
import qualified HPage.Control as HP
import qualified HPage.Server as HPS
import qualified Language.Haskell.Interpreter as Hint
import qualified Language.Haskell.Interpreter.Server as HS
import System.Directory
import System.FilePath
import Paths_hpage
newtype ModuleName = MN {mnString :: String}
deriving (Eq)
instance Show ModuleName where
show = mnString
instance Arbitrary ModuleName where
arbitrary = elements $ map (MN . ("Test" ++)) ["A", "Be", "Cee", "Diego", "Epsilon", "Foxtrot"]
newtype KnownModuleName = KMN {kmnString :: String}
deriving (Eq, Show)
instance Arbitrary KnownModuleName where
arbitrary = elements $ map KMN ["Data.List", "Control.Monad", "System.Directory", "Control.Monad.Loops"]
newtype ClassName = CN {cnString :: String}
deriving (Eq, Show)
instance Arbitrary ClassName where
arbitrary = elements $ map CN ["HPage", "IO", "IO a", "Int", "String"]
instance Arbitrary HP.Extension where
arbitrary = elements HP.availableExtensions
data WorkingExtension = WEX {wexExts :: [HP.Extension],
wexModule :: String}
deriving (Eq, Show)
instance Arbitrary WorkingExtension where
arbitrary = elements [wexTypeSynonymInstances, wexOverlappingInstances, wexFlexibleInstances]
wexTypeSynonymInstances :: WorkingExtension
wexTypeSynonymInstances = WEX [HP.TypeSynonymInstances] "TypeSynonymInstances.hs"
wexOverlappingInstances :: WorkingExtension
wexOverlappingInstances = WEX [HP.TypeSynonymInstances,
HP.OverlappingInstances] "OverlappingInstances.hs"
wexFlexibleInstances :: WorkingExtension
wexFlexibleInstances = WEX [HP.FlexibleInstances] "FlexibleInstances.hs"
shouldFail :: (MonadError e m) => m a -> m Bool
shouldFail a = (a >> return False) `catchError` (\_ -> return True)
testDir :: FilePath
testDir = "TestFiles"
main :: IO ()
main =
do
createDirectoryIfMissing True testDir
hps <- HPS.start
hs <- HS.start
_ <- runTests [("Editing", runWithQuickCheck $ prop_setget_text hps)
, ("New Page", runWithQuickCheck $ prop_new_page hps)
, ("Open Page", runWithQuickCheck $ prop_open_page hps)
, ("Open Page Failing", runWithQuickCheck $ prop_open_page_fail hps)
, ("Set/Get Page", runWithQuickCheck $ prop_setget_page hps)
, ("Set Page Index Failing", runWithQuickCheck $ prop_set_page_index_fail hps)
, ("Save Page", runWithQuickCheck $ prop_save_page hps)
, ("Save Page Failing", runWithQuickCheck $ prop_save_page_fail hps)
, ("Save Page As...", runWithQuickCheck $ prop_save_page_as hps)
, ("Close Page", runWithQuickCheck $ prop_close_page hps)
, ("Named Expressions", runWithQuickCheck $ prop_let_fail hps hs)
, ("Generally Failing", runWithQuickCheck $ prop_fail hps hs)
, ("Load Modules", runWithQuickCheck $ prop_load_module hps hs)
, ("Import Modules", runWithQuickCheck $ prop_import_module hps)
, ("Reload Modules", runWithQuickCheck $ prop_reload_modules hps hs)
, ("Get Loaded Modules", runWithQuickCheck $ prop_get_loaded_modules hps hs)
, ("Describe Module", runWithQuickCheck $ prop_get_module_exports hps hs)
, ("Get Available Extensions", runWithQuickCheck prop_get_available_extensions)
, ("Set Extensions", runWithQuickCheck $ prop_get_set_extensions hps)
, ("Working Extensions", runWithQuickCheck $ prop_working_extensions hps)
, ("Set Invalid Extensions", runWithQuickCheck $ prop_get_set_extension_fail hps)
, ("Set Src Directories", runWithQuickCheck $ prop_get_set_source_dirs hps)
, ("Working Src Directories", runWithQuickCheck $ prop_working_source_dirs hps)
, ("Get/Set GHC Options", runWithQuickCheck $ prop_get_set_ghc_opts hps)
, ("Get/Set Invalid GHC Options", runWithQuickCheck $ prop_get_set_ghc_opts_fail hps)
, ("Working GHC Options", runWithQuickCheck $ prop_working_ghc_opts hps)]
removeDirectoryRecursive testDir
instance Eq (Hint.InterpreterError) where
a == b = show a == show b
prop_fail :: HPS.ServerHandle -> HS.ServerHandle -> String -> Bool
prop_fail hps hs txt' =
unsafePerformIO $ do
let txt = show $ length txt'
let expr = "lenggth \"" ++ txt ++ "\""
Left hpsr <- HPS.runIn hps $ HP.setPageText expr 0 >> HP.interpret
Left hsr <- HS.runIn hs $ Hint.eval expr
return $ hsr == hpsr
prop_import_module :: HPS.ServerHandle -> KnownModuleName -> Bool
prop_import_module hps kmn =
unsafePerformIO $ do
let mn = kmnString kmn
HPS.runIn hps $ do
r1 <- HP.importModules [mn]
r2 <- HP.getImportedModules
r3 <- HP.importModules [mn]
r4 <- HP.getImportedModules
return $ r1 == (Right ()) &&
r3 == (Right ()) &&
mn `elem` r2 &&
mn `elem` r4
prop_get_module_exports :: HPS.ServerHandle -> HS.ServerHandle -> KnownModuleName -> Bool
prop_get_module_exports hps hs kmn =
unsafePerformIO $ do
let mn = kmnString kmn
Right hpsr <- HPS.runIn hps $ HP.importModules [mn] >> HP.getModuleExports mn
Right hsr <- HS.runIn hs $ Hint.setImports ["Prelude", mn] >> Hint.getModuleExports mn
-- liftDebugIO (hpsr, hsr)
return $ all match $ zip hpsr hsr
where match ((HP.MEFun fn _), (Hint.Fun fn2)) = fn == fn2
match ((HP.MEClass cn cfs), (Hint.Class cn2 cfs2)) = cn == cn2 && all match (zip cfs (map Hint.Fun cfs2))
match ((HP.MEData dn dcs), (Hint.Data dn2 dcs2)) = dn == dn2 && all match (zip dcs (map Hint.Fun dcs2))
match _ = False
prop_load_module :: HPS.ServerHandle -> HS.ServerHandle -> ModuleName -> Bool
prop_load_module hps hs mn =
(show mn == "Test") || (
unsafePerformIO $ do
let mname = testDir ++ "." ++ show mn
let ftxt = "module " ++ mname ++ " where v = 32"
let f2txt = "v = \"" ++ show mn ++ "\""
hpsr <- HPS.runIn hps $ do
-- Save TestFiles/Test...hs
_ <- HP.setPageText ftxt 0
HP.savePageAs $ testDir </> (show mn) ++ ".hs"
-- Save TestFiles/test.hs
_ <- HP.setPageText f2txt 0
HP.savePageAs $ testDir </> "test.hs"
-- Load TestFiles/test.hs by path
_ <- HP.loadModules [testDir </> "test.hs"]
_ <- HP.setPageText "v" 0
Right fv <- HP.interpret
fm <- HP.getLoadedModules >>= return . mN
-- Load TestFiles/Test...hs by name
_ <- HP.loadModules [mname]
_ <- HP.setPageText "v" 0
r <- HP.interpret
Right sv <- HP.interpret
sm <- HP.getLoadedModules >>= return . mN
return (HP.intValue fv, HP.intValue sv, fm, sm)
hsr <- HS.runIn hs $ do
Hint.loadModules [testDir </> "test.hs"]
Hint.getLoadedModules >>= Hint.setTopLevelModules
fv <- Hint.eval "v"
fm <- Hint.getLoadedModules
Hint.loadModules [mname]
Hint.getLoadedModules >>= Hint.setTopLevelModules
sv <- Hint.eval "v"
sm <- Hint.getLoadedModules
return (fv, sv, Right fm, Right sm)
-- liftDebugIO (hpsr, hsr)
return $ Right hpsr == hsr )
where mN (Right mns) = Right $ map HP.modName mns
mN (Left err) = Left err
prop_reload_modules :: HPS.ServerHandle -> HS.ServerHandle -> String -> Bool
prop_reload_modules hps hs txt' =
unsafePerformIO $ do
let txt = show $ length txt'
let expr = "test = show \"" ++ txt ++ "\""
Right hpsr <- HPS.runIn hps $ do
_ <- HP.setPageText expr 0
HP.savePageAs $ testDir </> "test.hs"
_ <- HP.setPageText "test" 0
_ <- HP.loadModules [testDir </> "test.hs"]
_ <- HP.reloadModules
HP.interpret
Right hsr <- HS.runIn hs $ do
Hint.loadModules [testDir </> "test.hs"]
Hint.getLoadedModules >>= Hint.setTopLevelModules
Hint.eval "test"
return $ HP.intValue hpsr == hsr
prop_get_loaded_modules :: HPS.ServerHandle -> HS.ServerHandle -> ModuleName -> Bool
prop_get_loaded_modules hps hs mn =
unsafePerformIO $ do
let expr1 = "ytest = show \"" ++ show mn ++ "\""
let expr2 = "module " ++ testDir ++ ".XX" ++ show mn ++ "2 where import " ++ testDir ++ ".XX" ++ show mn ++ "3; xtest = show \"" ++ show mn ++ "\""
let expr3 = "module " ++ testDir ++ ".XX" ++ show mn ++ "3 where xfact = (1,2,3)"
let mnf1 = testDir </> "XX" ++ (show mn) ++ "1.hs"
let mnf2 = testDir </> "XX" ++ (show mn) ++ "2.hs"
let mnf3 = testDir </> "XX" ++ (show mn) ++ "3.hs"
HPS.runIn hps $ do
_ <- HP.setPageText expr1 0
HP.savePageAs mnf1
_ <- HP.setPageText expr2 0
HP.savePageAs mnf2
_ <- HP.setPageText expr3 0
HP.savePageAs mnf3
hpsr1 <- HPS.runIn hps $ HP.loadModules [mnf1] >> HP.getLoadedModules
hsr1 <- HS.runIn hs $ Hint.loadModules [mnf1] >> Hint.getLoadedModules
hpsr2 <- HPS.runIn hps $ HP.loadModules [mnf2] >> HP.getLoadedModules
hsr2 <- HS.runIn hs $ Hint.loadModules [mnf2] >> Hint.getLoadedModules
hpsr3 <- HPS.runIn hps $ HP.loadModules [mnf3] >> HP.getLoadedModules
hsr3 <- HS.runIn hs $ Hint.loadModules [mnf3] >> Hint.getLoadedModules
--liftDebugIO [(hpsr1, hpsr2, hpsr3), (hsr1, hsr2, hsr3)]
return $ (mN hpsr1, mN hpsr2, mN hpsr3) == (hsr1, hsr2, hsr3)
where mN (Right mns) = Right $ map HP.modName mns
mN (Left err) = Left err
prop_setget_text :: HPS.ServerHandle -> String -> Bool
prop_setget_text hps txt' =
unsafePerformIO $ HPS.runIn hps $ do
let txt = show $ length txt'
_ <- HP.setPageText txt 0
HP.getPageText >>= return . (txt ==)
prop_undoredo :: HPS.ServerHandle -> String -> Bool
prop_undoredo hps txt' =
unsafePerformIO $ HPS.runIn hps $ do
let txt = show $ length txt'
HP.addPage
b0 <- HP.getPageText
_ <- HP.setPageText b0 0
b1 <- HP.getPageText
addExpr "xx"
b2 <- HP.getPageText
addExpr "yy"
b3 <- HP.getPageText
_ <- HP.setPageText b3 6
b4 <- HP.getPageText
addExpr "zz"
b5 <- HP.getPageText
let to5 = [0..5] :: [Int]
after <- mapM (\_ -> HP.undo >> HP.getPageText) to5
redo <- mapM (\_ -> HP.redo >> HP.getPageText) to5
_ <- HP.setPageText "" 0 >> HP.setPageText "cc" 0 >> HP.setPageText "" 0
c0 <- addExpr "zz" >> HP.getPageText
c1 <- HP.undo >> HP.getPageText
c2 <- HP.undo >> HP.getPageText
c3 <- HP.setPageText "ww" 0 >> HP.getPageText
c4 <- HP.redo >> HP.getPageText
let result = ([b4, b3, b2, b1, b0, "", ""] == after) &&
([b1, b2, b3, b4, b5, b5, b5] == redo) &&
([c0, c1, c2, c3, c4] == ["zz", "", "cc", "ww", "ww"])
if not result
then
do
-- liftDebugIO [b5, b4, b3, b2, b1, b0, "", ""]
-- liftDebugIO after
-- liftDebugIO [b1, b2, b3, b4, b5, b5, b5, b5]
-- liftDebugIO redo
-- liftDebugIO ["zz", "", "cc", "ww", "ww"]
-- liftDebugIO [c0, c1, c2, c3, c4]
return False
else
return True
prop_new_page :: HPS.ServerHandle -> Int -> Bool
prop_new_page hps i' =
unsafePerformIO $ HPS.runIn hps $ do
let i = if abs i' > 10 then 10 else abs i' + 1
_ <- HP.setPageText "" 0
HP.closeAllPages
pc0 <- HP.getPageCount
pi0 <- HP.getPageIndex
pt0 <- HP.getPageText
pss <- (flip mapM) [1..i] $ \x -> do
HP.addPage
psc <- HP.getPageCount
psi <- HP.getPageIndex
pst <- HP.getPageText
HP.setPageIndex $ psc - 1
_ <- HP.setPageText ("old "++ show x) 0
return (x, psc, psi, pst)
let results = (0,pc0,pi0,pt0):pss
-- liftDebugIO results
return $ all (\(k, kc, ki, kt) ->
kc == k+1 &&
ki == 0 &&
kt == "") $ results
prop_open_page, prop_open_page_fail :: HPS.ServerHandle -> String -> Bool
prop_open_page hps file' =
unsafePerformIO $ HPS.runIn hps $ do
let file = "f-" ++ (show $ length file')
let path = testDir </> "Test" ++ file
Hint.liftIO $ writeFile path file
HP.closeAllPages
HP.openPage path
liftM (file ==) HP.getPageText
prop_open_page_fail hps file' =
unsafePerformIO $ HPS.runIn hps $ do
let file = "f-" ++ (show $ length file')
let path = testDir </> "NO-Test" ++ file
HP.closeAllPages
shouldFail $ HP.openPage path
prop_setget_page, prop_set_page_index_fail :: HPS.ServerHandle -> Int -> Bool
prop_setget_page hps i' =
unsafePerformIO $ HPS.runIn hps $ do
let i = if abs i' > 10 then 10 else abs i' + 1
_ <- HP.setPageText "" 0
HP.closeAllPages
_ <- HP.setPageText "0" 0
forM_ [1..i] $ \x ->
do
HP.addPage
HP.setPageText (show x) 0
pc <- HP.getPageCount
pss <- (flip mapM) [0..i] $ \x -> do
HP.setPageIndex (i-x)
psi <- HP.getPageIndex
pst <- HP.getPageText
_ <- HP.setPageText ("old "++ show x) 0
return (x, psi, pst)
-- liftDebugIO pss
return . ((pc == i+1) &&) $ all (\(k, ki, kt) ->
ki == (i-k) &&
kt == show k) $ pss
prop_set_page_index_fail hps i' =
unsafePerformIO $ HPS.runIn hps $ do
let i = if abs i' > 10 then 10 else abs i' + 1
HP.closeAllPages
replicateM_ (i-1) HP.addPage
shouldFail $ HP.setPageIndex i
prop_save_page, prop_save_page_fail, prop_save_page_as :: HPS.ServerHandle -> String -> Bool
prop_save_page hps file' =
unsafePerformIO $ HPS.runIn hps $ do
let file = "f-" ++ (show $ length file')
let path = testDir </> "Test" ++ file
Hint.liftIO $ writeFile path file
HP.closeAllPages
HP.openPage path
HP.savePage
p1 <- HP.getPageText
HP.openPage path
p2 <- HP.getPageText
addExpr file
HP.savePage
p3 <- HP.getPageText
return $ p1 == file &&
p2 == file &&
p3 == (file ++ "\n\n" ++ file)
prop_save_page_fail hps file' =
unsafePerformIO $ HPS.runIn hps $ do
HP.closeAllPages
shouldFail $ HP.savePage
prop_save_page_as hps file' =
unsafePerformIO $ HPS.runIn hps $ do
let file = "f-" ++ (show $ length file')
let path = testDir </> "Test" ++ file
HP.closeAllPages
_ <- HP.setPageText file 0
HP.savePageAs path
HP.openPage path
p0 <- HP.getPageText
HP.savePage
HP.openPage path
p1 <- HP.getPageText
return $ p0 == file &&
p1 == file
prop_close_page :: HPS.ServerHandle -> Int -> Bool
prop_close_page hps i' =
unsafePerformIO $ HPS.runIn hps $ do
let i = if abs i' > 10 then 10 else abs i' + 1
HP.closeAllPages
_ <- HP.setPageText (show i) 0
forM_ [1..i] $ \x ->
do
HP.addPage
HP.setPageText (show (i-x)) 0
pcb <- HP.getPageCount
pss <- (flip mapM) [i,i-1..1] $ \x -> do
HP.setPageIndex x
pbi <- HP.getPageIndex
pbt <- HP.getPageText
HP.closePage
pai <- HP.getPageIndex
pat <- HP.getPageText
return (x, pbi, pbt, pai, pat)
pca <- HP.getPageCount
pia <- HP.getPageIndex
pta <- HP.getPageText
HP.closePage
pcf <- HP.getPageCount
pif <- HP.getPageIndex
ptf <- HP.getPageText
--liftDebugIO (pcb, (pca, pia, pta), (pcf, pif, ptf), pss)
--let ff = \(k, _, _, _, _) -> (k, k, show k, k-1, show (k-1))
--liftDebugIO (i+1, (1, 0, "0"), (1, 0, ""), map ff pss)
return . ((pcb == i+1 &&
pca == 1 && pia == 0 && pta == "0" &&
pcf == 1 && pif == 0 && ptf == "") &&) $
all (\(k, kbi, kbt, kai, kat) ->
kbi == k &&
kbt == show k &&
kai == k-1 &&
kat == show (k-1) ) $ pss
prop_close_all_pages :: HPS.ServerHandle -> Int -> Bool
prop_close_all_pages hps i' =
unsafePerformIO $ HPS.runIn hps $ do
let i = if abs i' > 10 then 10 else abs i' + 1
HP.closeAllPages
c0 <- HP.getPageCount
_ <- HP.setPageText "not empty" 0
forM_ [1..i-1] $ \_ -> HP.addPage
c1 <- HP.getPageCount
HP.setPageIndex $ c1 - 1
HP.closeAllPages
c2 <- HP.getPageCount
i2 <- HP.getPageIndex
t2 <- HP.getPageText
let result = (c0, c1, c2, i2, t2) == (1, i, 1, 0, "")
if not result
then
do
-- liftDebugIO (c0, c1, c2, i2, t2)
return False
else
return True
prop_let_fail :: HPS.ServerHandle -> HS.ServerHandle -> String -> Bool
prop_let_fail hps hs txt' =
unsafePerformIO $ do
let txt = show $ length txt'
let expr = "testL x = lenggth x"
Left hpsr <- HPS.runIn hps $ do
HP.addPage
addExpr expr
addExpr $ "test2L = 2 * (testL \"" ++ txt ++ "\")"
addExpr "test2L / 2"
HP.interpret
Left hsr <- HS.runIn hs $ Hint.eval "lenggth \"\""
return $ hsr == hpsr
prop_get_available_extensions :: String -> Bool
prop_get_available_extensions _ = HP.availableExtensions == Hint.availableExtensions
prop_get_set_extensions :: HPS.ServerHandle -> [HP.Extension] -> Bool
prop_get_set_extensions hps exs =
unsafePerformIO $ HPS.runIn hps $ do
_ <- HP.setLanguageExtensions []
exs0 <- HP.getLanguageExtensions
_ <- HP.setLanguageExtensions exs
exs1 <- HP.getLanguageExtensions
_ <- HP.setLanguageExtensions []
exs2 <- HP.getLanguageExtensions
return $ (exs0 == Right []) && (exs1 == Right exs) && (exs2 == Right [])
prop_working_extensions :: HPS.ServerHandle -> WorkingExtension -> Bool
prop_working_extensions hps (WEX es m) =
unsafePerformIO $ HPS.runIn hps $ do
path <- Hint.liftIO . getDataFileName $ "res" </> "test" </> m
_ <- HP.setLanguageExtensions []
before <- HP.loadModules [path]
_ <- HP.setLanguageExtensions es
after <- HP.loadModules [path]
let failed = case before of
Left _ -> True
_ -> False
return $ failed && (after == Right ())
prop_get_set_extension_fail :: HPS.ServerHandle -> String -> Bool
prop_get_set_extension_fail hps s =
unsafePerformIO $ HPS.runIn hps $ do
r <- HP.setLanguageExtensions [HP.UnknownExtension s]
case r of
Left _ -> return True
Right _ -> return False
prop_get_set_source_dirs :: HPS.ServerHandle -> [FilePath] -> Bool
prop_get_set_source_dirs hps sds =
unsafePerformIO $ HPS.runIn hps $ do
_ <- HP.setSourceDirs []
sds0 <- HP.getSourceDirs
_ <- HP.setSourceDirs sds
sds1 <- HP.getSourceDirs
_ <- HP.setSourceDirs []
sds2 <- HP.getSourceDirs
return $ (sds0 == []) && (sds1 == sds) && (sds2 == [])
prop_working_source_dirs :: HPS.ServerHandle -> ModuleName -> Bool
prop_working_source_dirs hps (MN file) =
unsafePerformIO $ HPS.runIn hps $ do
let path = testDir </> file ++ ".hs"
_ <- HP.setPageText ("module " ++ file ++ " where t = 1") 0
HP.savePageAs path
_ <- HP.setSourceDirs []
before <- HP.loadModules [file]
_ <- HP.setSourceDirs [testDir, testDir]
after <- HP.loadModules [file]
let failed = case before of
Left _ -> True
_ -> False
-- liftDebugIO (before, after, failed)
return $ failed && (after == Right ())
prop_get_set_ghc_opts :: HPS.ServerHandle -> String -> Bool
prop_get_set_ghc_opts hps ops =
unsafePerformIO $ HPS.runIn hps $ do
ops0 <- HP.getGhcOpts
_ <- HP.setGhcOpts $ "-i" ++ ops
ops1 <- HP.getGhcOpts
_ <- HP.setGhcOpts ""
ops2 <- HP.getGhcOpts
-- liftDebugIO (ops, ops0, ops1, ops2)
return $ (ops1 == (ops0 ++ " -i" ++ ops)) && (ops2 == ops1)
prop_get_set_ghc_opts_fail :: HPS.ServerHandle -> ClassName -> Bool
prop_get_set_ghc_opts_fail hps (CN ops) =
unsafePerformIO $ HPS.runIn hps $ do
res <- HP.setGhcOpts ops
case res of
Left _ -> return True
_ -> return False
prop_working_ghc_opts :: HPS.ServerHandle -> ModuleName -> Bool
prop_working_ghc_opts hps (MN file) =
unsafePerformIO $ HPS.runIn hps $ do
let path = testDir </> file ++ ".hs"
_ <- HP.setPageText ("module " ++ file ++ " where t = 1") 0
HP.savePageAs path
_ <- HP.setSourceDirs []
before <- HP.loadModules [file]
_ <- HP.setGhcOpts $ "-i" ++ testDir
after <- HP.loadModules [file]
let failed = case before of
Left _ -> True
_ -> False
-- liftDebugIO (before, after, failed)
return $ failed && (after == Right ())
addExpr :: String -> HP.HPage ()
addExpr e = HP.getPageText >>= \t -> let ne = t ++ "\n\n" ++ e in HP.setPageText ne (length ne) >> return ()
|
elbrujohalcon/hPage
|
src/HPage/Test/Server.hs
|
bsd-3-clause
| 34,468
| 0
| 26
| 19,219
| 7,038
| 3,455
| 3,583
| 481
| 4
|
-- | Implementation of statistics gathering and processing utilities.
module Pos.Infra.Statistics
( module Pos.Infra.Statistics.Ekg
, module Pos.Infra.Statistics.Statsd
) where
import Pos.Infra.Statistics.Ekg
import Pos.Infra.Statistics.Statsd
|
input-output-hk/pos-haskell-prototype
|
infra/src/Pos/Infra/Statistics.hs
|
mit
| 287
| 0
| 5
| 66
| 40
| 29
| 11
| 5
| 0
|
{-# LANGUAGE ViewPatterns #-}
module Control.Monad.MWCSpec (spec) where
import Test.Hspec
import Test.Hspec.QuickCheck
import qualified Data.Text as T
import Control.Monad.MWC
import Control.Monad (forM_)
spec :: Spec
spec = do
prop "valid byte" $ do
w <- runMWCReaderT uniformAsciiByte
shouldSatisfy (toEnum $ fromEnum w) isValid
let impls =
[ ("standard", uniformAsciiText)
, ("simple", uniformAsciiTextSimple)
, ("complex64", uniformAsciiTextComplex64)
]
forM_ impls $ \(name, f) -> describe name $ do
prop "correct length" $ \(abs -> len) -> do
t <- runMWCReaderT $ f len
T.length t `shouldBe` len
prop "correct content" $ \(abs -> len) -> do
t <- runMWCReaderT $ f len
shouldSatisfy t (T.all isValid)
where
isValid c =
('A' <= c && c <= 'Z') ||
('a' <= c && c <= 'z') ||
('0' <= c && c <= '9') ||
c == '-' ||
c == '_'
|
bitemyapp/snoy-extra
|
test/Control/Monad/MWCSpec.hs
|
mit
| 1,011
| 0
| 19
| 327
| 341
| 179
| 162
| 29
| 1
|
-- ErrorList {{{
data ErrorList e a =
ErrorListFailure [e]
| ErrorListSuccess a [a]
instance Monoid (ErrorList e a) where
null = ErrorListFailure []
ErrorListFailure e1 ++ ErrorListFailure e2 = ErrorListFailure $ e1 ++ e2
ErrorListSuccess x xs ++ ErrorListSuccess y ys = ErrorListSuccess x (xs ++ (y:ys))
ErrorListFailure _ ++ ys = ys
xs ++ ErrorListFailure _ = xs
instance Functorial Monoid (ErrorList e) where functorial = W
instance Unit (ErrorList e) where
unit x = ErrorListSuccess x []
instance Functor (ErrorList e) where
map = mmap
instance Product (ErrorList e) where
(<*>) = mpair
instance Applicative (ErrorList e) where
(<@>) = mapply
instance Bind (ErrorList e) where
ErrorListFailure e >>= _ = ErrorListFailure e
ErrorListSuccess x [] >>= k = k x
ErrorListSuccess x (x':xs') >>= k = k x ++ (ErrorListSuccess x' xs' >>= k)
instance Monad (ErrorList e) where
instance MonadBot (ErrorList e) where
mbot = null
instance MonadMonoid (ErrorList e) where
(<++>) = (++)
-- instance CoIterable a (ErrorList e a) where
-- coiter _ i (ErrorListFailure _) = i
-- coiter f i (ErrorListSuccess x xs) = f x $ coiter f i xs
-- instance Buildable a (ErrorList e a) where
-- nil = ErrorListFailure []
-- cons x xs = ErrorListSuccess x $ toList xs
errorListConcat :: ErrorList e (ErrorList e a) -> ErrorList e a
errorListConcat (ErrorListFailure e) = ErrorListFailure e
errorListConcat (ErrorListSuccess x xs) = x ++ concat xs
errorListPluck :: ErrorList e a -> ErrorList e (ErrorList e a) -> [e] :+: (ErrorList e a, ErrorList e (ErrorList e a))
errorListPluck (ErrorListFailure e1) (ErrorListFailure e2) = Inl $ e1 ++ e2
errorListPluck (ErrorListFailure e1) (ErrorListSuccess _ _) = Inl e1
errorListPluck (ErrorListSuccess x xs) (ErrorListFailure _) = Inr (unit x, unit $ fromList xs)
errorListPluck (ErrorListSuccess x1 xs1) (ErrorListSuccess xs2 xss) = do
(ys2, xss') <- errorListPluck xs2 $ fromList xss
return (ErrorListSuccess x1 $ toList ys2, ErrorListSuccess (fromList xs1) $ toList xss')
errorListTranspose :: ErrorList e (ErrorList e a) -> ErrorList e (ErrorList e a)
errorListTranspose (ErrorListFailure e) = unit $ ErrorListFailure e
errorListTranspose (ErrorListSuccess xs xss) =
case errorListPluck xs $ fromList xss of
Inl e -> ErrorListFailure e
Inr (ys, xss') -> ErrorListSuccess ys $ toList $ errorListTranspose xss'
-- }}}
-- MonadErrorList {{{
newtype ErrorListT e m a = ErrorListT { unErrorListT :: m (ErrorList e a) }
class (Monad m) => MonadErrorListI e m where
errorListI :: m ~> ErrorListT e m
class (Monad m) => MonadErrorListE e m where
errorListE :: ErrorListT e m ~> m
class (MonadErrorListI e m, MonadErrorListE e m) => MonadErrorList e m where
throwErrorList :: (MonadErrorListE e m) => e -> m a
throwErrorList = errorListE . ErrorListT . unit . ErrorListFailure . unit
-- }}}
-- ErrorListT {{{
errorListCommute :: (Functor m) => ErrorListT e (ErrorListT e m) ~> ErrorListT e (ErrorListT e m)
errorListCommute aMM = ErrorListT $ ErrorListT $ errorListTranspose ^$ unErrorListT $ unErrorListT aMM
instance (Unit m) => Unit (ErrorListT e m) where
unit = ErrorListT . unit . unit
instance (Functor m) => Functor (ErrorListT e m) where
map f = ErrorListT . f ^^. unErrorListT
instance (Monad m, Functorial Monoid m) => Product (ErrorListT e m) where
(<*>) = mpair
instance (Monad m, Functorial Monoid m) => Applicative (ErrorListT e m) where
(<@>) = mapply
instance (Bind m, Functorial Monoid m) => Bind (ErrorListT e m) where
(>>=) :: forall a b. ErrorListT e m a -> (a -> ErrorListT e m b) -> ErrorListT e m b
aM >>= k = ErrorListT $ do
xs <- unErrorListT aM
unErrorListT $ concat $ k ^$ xs
instance (Monad m, Functorial Monoid m) => Monad (ErrorListT e m) where
instance FunctorUnit2 (ErrorListT e) where
funit2 = ErrorListT .^ unit
instance FunctorJoin2 (ErrorListT e) where
fjoin2 = ErrorListT . errorListConcat ^. unErrorListT . unErrorListT
instance FunctorFunctor2 (ErrorListT e) where
ftMap f = ErrorListT . f . unErrorListT
instance (Functorial Monoid m) => Monoid (ErrorListT e m a) where
null =
with (functorial :: W (Monoid (m (ErrorList e a)))) $
ErrorListT null
xs ++ ys =
with (functorial :: W (Monoid (m (ErrorList e a)))) $
ErrorListT $ unErrorListT xs ++ unErrorListT ys
instance (Functorial Monoid m) => Functorial Monoid (ErrorListT e m) where functorial = W
instance (Functorial Monoid m) => MonadBot (ErrorListT e m) where
mbot = null
instance (Functorial Monoid m) => MonadAppend (ErrorListT e m) where
(<++>) = (++)
instance (Monad m, Functorial Monoid m) => MonadErrorListI e (ErrorListT e m) where
errorListI :: ErrorListT e m ~> ErrorListT e (ErrorListT e m)
errorListI = errorListCommute . ftUnit
instance (Monad m, Functorial Monoid m) => MonadErrorListE e (ErrorListT e m) where
errorListE :: ErrorListT e (ErrorListT e m) ~> ErrorListT e m
errorListE = ftJoin . errorListCommute
instance (Monad m, Functorial Monoid m) => MonadErrorList e (ErrorListT e m) where
errorToErrorList :: (Functor m) => ErrorT e m ~> ErrorListT e m
errorToErrorList aM = ErrorListT $ ff ^$ unErrorT aM
where
ff (Inl e) = ErrorListFailure [e]
ff (Inr a) = ErrorListSuccess a []
-- this might not be right
errorListToError :: (Monad m, Functorial Monoid m) => ErrorListT e (ErrorListT e m) a -> ErrorT e (ErrorListT e m) a
errorListToError aM = ErrorT $ mconcat . ff *$ unErrorListT aM
where
ff (ErrorListFailure e) = map (return . Inl) e
ff (ErrorListSuccess x xs) = map (return . Inr) $ x:xs
instance (Monad m, Functorial Monoid m) => MonadErrorE e (ErrorListT e m) where
errorE :: ErrorT e (ErrorListT e m) ~> ErrorListT e m
errorE = errorListE . errorToErrorList
-- instance (Monad m, Functorial Monoid m) => MonadErrorI e (ErrorListT e m) where
-- errorI :: ErrorListT e m ~> ErrorT e (ErrorListT e m)
-- errorI = errorListToError . errorListI
-- instance (Monad m, Functorial Monoid m) => MonadError e (ErrorListT e m) where
-- }}}
-- State // ErrorList {{{
stateErrorListCommute :: (Functor m, Monoid s) => StateT s (ErrorListT e m) ~> ErrorListT e (StateT s m)
stateErrorListCommute aMM = ErrorListT $ StateT $ \ s -> ff ^$ unErrorListT $ unStateT s aMM
where
ff asL = (fst ^$ asL, concat $ snd ^$ asL)
errorListStateCommute :: (Functor m) => ErrorListT e (StateT s m) ~> StateT s (ErrorListT e m)
errorListStateCommute aMM = StateT $ \ s -> ErrorListT $ ff ^$ unStateT s $ unErrorListT aMM
where
ff (xs, s) = (,s) ^$ xs
instance (MonadErrorListI e m, Functorial Monoid m, Monoid s) => MonadErrorListI e (StateT s m) where
errorListI :: StateT s m ~> ErrorListT e (StateT s m)
errorListI = stateErrorListCommute . mtMap errorListI
instance (MonadErrorListE e m, Functorial Monoid m) => MonadErrorListE e (StateT s m) where
errorListE :: ErrorListT e (StateT s m) ~> StateT s m
errorListE = mtMap errorListE . errorListStateCommute
instance (MonadErrorList e m, Functorial Monoid m, Monoid s) => MonadErrorList e (StateT s m) where
instance (MonadStateI s m, Functorial Monoid m) => MonadStateI s (ErrorListT e m) where
stateI :: ErrorListT e m ~> StateT s (ErrorListT e m)
stateI = errorListStateCommute . ftMap stateI
instance (MonadStateE s m, Functorial Monoid m, Monoid s) => MonadStateE s (ErrorListT e m) where
stateE :: StateT s (ErrorListT e m) ~> ErrorListT e m
stateE = ftMap stateE . stateErrorListCommute
instance (MonadState s m, Functorial Monoid m, Monoid s) => MonadState s (ErrorListT e m) where
-- }}}
instance (Pretty e, Pretty a) => Pretty (ErrorList e a) where
pretty (ErrorListFailure e) = app [con "Failure", pretty e]
pretty (ErrorListSuccess x xs) = app [con "Success", pretty (x:xs)]
instance (Functorial Pretty m, Pretty e, Pretty a) => Pretty (ErrorListT e m a) where
pretty (ErrorListT aM) =
with (functorial :: W (Pretty (m (ErrorList e a)))) $
-- app [con "ErrorListT", pretty aM]
pretty aM
instance (Functorial Pretty m, Pretty e) => Functorial Pretty (ErrorListT e m) where
functorial = W
|
davdar/maam
|
src/FP/NotUsed/ErrorList.hs
|
bsd-3-clause
| 8,095
| 69
| 19
| 1,555
| 3,112
| 1,558
| 1,554
| -1
| -1
|
{-# LANGUAGE CPP #-}
module TcFlatten(
FlattenMode(..),
flatten, flattenManyNom,
unflatten,
) where
#include "HsVersions.h"
import TcRnTypes
import TcType
import Type
import TcEvidence
import TyCon
import TypeRep
import Kind( isSubKind )
import Coercion ( tyConRolesX )
import Var
import VarEnv
import NameEnv
import Outputable
import TcSMonad as TcS
import DynFlags( DynFlags )
import Util
import Bag
import FastString
import Control.Monad
import MonadUtils ( zipWithAndUnzipM )
import GHC.Exts ( inline )
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ( Applicative(..), (<$>) )
#endif
{-
Note [The flattening story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A CFunEqCan is either of form
[G] <F xis> : F xis ~ fsk -- fsk is a FlatSkol
[W] x : F xis ~ fmv -- fmv is a unification variable,
-- but untouchable,
-- with MetaInfo = FlatMetaTv
where
x is the witness variable
fsk/fmv is a flatten skolem
xis are function-free
CFunEqCans are always [Wanted], or [Given], never [Derived]
fmv untouchable just means that in a CTyVarEq, say,
fmv ~ Int
we do NOT unify fmv.
* KEY INSIGHTS:
- A given flatten-skolem, fsk, is known a-priori to be equal to
F xis (the LHS), with <F xis> evidence
- A unification flatten-skolem, fmv, stands for the as-yet-unknown
type to which (F xis) will eventually reduce
* Inert set invariant: if F xis1 ~ fsk1, F xis2 ~ fsk2
then xis1 /= xis2
i.e. at most one CFunEqCan with a particular LHS
* Each canonical CFunEqCan x : F xis ~ fsk/fmv has its own
distinct evidence variable x and flatten-skolem fsk/fmv.
Why? We make a fresh fsk/fmv when the constraint is born;
and we never rewrite the RHS of a CFunEqCan.
* Function applications can occur in the RHS of a CTyEqCan. No reason
not allow this, and it reduces the amount of flattening that must occur.
* Flattening a type (F xis):
- If we are flattening in a Wanted/Derived constraint
then create new [W] x : F xis ~ fmv
else create new [G] x : F xis ~ fsk
with fresh evidence variable x and flatten-skolem fsk/fmv
- Add it to the work list
- Replace (F xis) with fsk/fmv in the type you are flattening
- You can also add the CFunEqCan to the "flat cache", which
simply keeps track of all the function applications you
have flattened.
- If (F xis) is in the cache already, just
use its fsk/fmv and evidence x, and emit nothing.
- No need to substitute in the flat-cache. It's not the end
of the world if we start with, say (F alpha ~ fmv1) and
(F Int ~ fmv2) and then find alpha := Int. Athat will
simply give rise to fmv1 := fmv2 via [Interacting rule] below
* Canonicalising a CFunEqCan [G/W] x : F xis ~ fsk/fmv
- Flatten xis (to substitute any tyvars; there are already no functions)
cos :: xis ~ flat_xis
- New wanted x2 :: F flat_xis ~ fsk/fmv
- Add new wanted to flat cache
- Discharge x = F cos ; x2
* Unification flatten-skolems, fmv, ONLY get unified when either
a) The CFunEqCan takes a step, using an axiom
b) During un-flattening
They are never unified in any other form of equality.
For example [W] ffmv ~ Int is stuck; it does not unify with fmv.
* We *never* substitute in the RHS (i.e. the fsk/fmv) of a CFunEqCan.
That would destroy the invariant about the shape of a CFunEqCan,
and it would risk wanted/wanted interactions. The only way we
learn information about fsk is when the CFunEqCan takes a step.
However we *do* substitute in the LHS of a CFunEqCan (else it
would never get to fire!)
* [Interacting rule]
(inert) [W] x1 : F tys ~ fmv1
(work item) [W] x2 : F tys ~ fmv2
Just solve one from the other:
x2 := x1
fmv2 := fmv1
This just unites the two fsks into one.
Always solve given from wanted if poss.
* For top-level reductions, see Note [Top-level reductions for type functions]
in TcInteract
Why given-fsks, alone, doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Could we get away with only flatten meta-tyvars, with no flatten-skolems? No.
[W] w : alpha ~ [F alpha Int]
---> flatten
w = ...w'...
[W] w' : alpha ~ [fsk]
[G] <F alpha Int> : F alpha Int ~ fsk
--> unify (no occurs check)
alpha := [fsk]
But since fsk = F alpha Int, this is really an occurs check error. If
that is all we know about alpha, we will succeed in constraint
solving, producing a program with an infinite type.
Even if we did finally get (g : fsk ~ Boo)l by solving (F alpha Int ~ fsk)
using axiom, zonking would not see it, so (x::alpha) sitting in the
tree will get zonked to an infinite type. (Zonking always only does
refl stuff.)
Why flatten-meta-vars, alone doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Look at Simple13, with unification-fmvs only
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fmv]
[W] x : F a ~ fmv
--> subst a in x
x = F g' ; x2
[W] x2 : F [fmv] ~ fmv
And now we have an evidence cycle between g' and x!
If we used a given instead (ie current story)
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fsk]
[G] <F a> : F a ~ fsk
---> Substitute for a
[G] g' : a ~ [fsk]
[G] F (sym g'); <F a> : F [fsk] ~ fsk
Why is it right to treat fmv's differently to ordinary unification vars?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
f :: forall a. a -> a -> Bool
g :: F Int -> F Int -> Bool
Consider
f (x:Int) (y:Bool)
This gives alpha~Int, alpha~Bool. There is an inconsistency,
but really only one error. SherLoc may tell you which location
is most likely, based on other occurrences of alpha.
Consider
g (x:Int) (y:Bool)
Here we get (F Int ~ Int, F Int ~ Bool), which flattens to
(fmv ~ Int, fmv ~ Bool)
But there are really TWO separate errors.
** We must not complain about Int~Bool. **
Moreover these two errors could arise in entirely unrelated parts of
the code. (In the alpha case, there must be *some* connection (eg
v:alpha in common envt).)
Note [Orientation of equalities with fmvs] and
Note [Unflattening can force the solver to iterate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is a bad dilemma concerning flatten meta-vars (fmvs).
This example comes from IndTypesPerfMerge, T10226, T10009.
From the ambiguity check for
f :: (F a ~ a) => a
we get:
[G] F a ~ a
[W] F alpha ~ alpha, alpha ~ a
From Givens we get
[G] F a ~ fsk, fsk ~ a
Now if we flatten we get
[W] alpha ~ fmv, F alpha ~ fmv, alpha ~ a
Now, processing the first one first, choosing alpha := fmv
[W] F fmv ~ fmv, fmv ~ a
And now we are stuck. We must either *unify* fmv := a, or
use the fmv ~ a to rewrite F fmv ~ fmv, so we can make it
meet up with the given F a ~ blah.
Old solution: always put fmvs on the left, so we get
[W] fmv ~ alpha, F alpha ~ fmv, alpha ~ a
BUT this works badly for Trac #10340:
get :: MonadState s m => m s
instance MonadState s (State s) where ...
foo :: State Any Any
foo = get
For 'foo' we instantiate 'get' at types mm ss
[W] MonadState ss mm, [W] mm ss ~ State Any Any
Flatten, and decompose
[W] MnadState ss mm, [W] Any ~ fmv, [W] mm ~ State fmv, [W] fmv ~ ss
Unify mm := State fmv:
[W] MonadState ss (State fmv), [W] Any ~ fmv, [W] fmv ~ ss
If we orient with (untouchable) fmv on the left we are now stuck:
alas, the instance does not match!! But if instead we orient with
(touchable) ss on the left, we unify ss:=fmv, to get
[W] MonadState fmv (State fmv), [W] Any ~ fmv
Now we can solve.
This is a real dilemma. CURRENT SOLUTION:
* Orient with touchable variables on the left. This is the
simple, uniform thing to do. So we would orient ss ~ fmv,
not the other way round.
* In the 'f' example, we get stuck with
F fmv ~ fmv, fmv ~ a
But during unflattening we will fail to dischargeFmv for the
CFunEqCan F fmv ~ fmv, because fmv := F fmv would make an ininite
type. Instead we unify fmv:=a, AND record that we have done so.
If any such "non-CFunEqCan unifications" take place (in
unflatten_eq in TcFlatten.unflatten) iterate the entire process.
This is done by the 'go' loop in solveSimpleWanteds.
This story does not feel right but it's the best I can do; and the
iteration only happens in pretty obscure circumstances.
************************************************************************
* *
* Other notes (Oct 14)
I have not revisted these, but I didn't want to discard them
* *
************************************************************************
Try: rewrite wanted with wanted only for fmvs (not all meta-tyvars)
But: fmv ~ alpha[0]
alpha[0] ~ fmv’
Now we don’t see that fmv ~ fmv’, which is a problem for injectivity detection.
Conclusion: rewrite wanteds with wanted for all untouchables.
skol ~ untch, must re-orieint to untch ~ skol, so that we can use it to rewrite.
************************************************************************
* *
* Examples
Here is a long series of examples I had to work through
* *
************************************************************************
Simple20
~~~~~~~~
axiom F [a] = [F a]
[G] F [a] ~ a
-->
[G] fsk ~ a
[G] [F a] ~ fsk (nc)
-->
[G] F a ~ fsk2
[G] fsk ~ [fsk2]
[G] fsk ~ a
-->
[G] F a ~ fsk2
[G] a ~ [fsk2]
[G] fsk ~ a
-----------------------------------
----------------------------------------
indexed-types/should_compile/T44984
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
flatten
~~~~~~~
fmv0 := F Bool
fmv1 := H (F Bool)
fmv2 := H alpha
alpha := F Bool
plus
fmv1 ~ fmv2
But these two are equal under the above assumptions.
Solve by Refl.
--- under plan B, namely solve fmv1:=fmv2 eagerly ---
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2 fmv2 := fmv1
fmv0 ~ alpha
flatten
fmv0 := F Bool
fmv1 := H fmv0 = H (F Bool)
retain H alpha ~ fmv2
because fmv2 has been filled
alpha := F Bool
----------------------------
indexed-types/should_failt/T4179
after solving
[W] fmv_1 ~ fmv_2
[W] A3 (FCon x) ~ fmv_1 (CFunEqCan)
[W] A3 (x (aoa -> fmv_2)) ~ fmv_2 (CFunEqCan)
----------------------------------------
indexed-types/should_fail/T7729a
a) [W] BasePrimMonad (Rand m) ~ m1
b) [W] tt m1 ~ BasePrimMonad (Rand m)
---> process (b) first
BasePrimMonad (Ramd m) ~ fmv_atH
fmv_atH ~ tt m1
---> now process (a)
m1 ~ s_atH ~ tt m1 -- An obscure occurs check
----------------------------------------
typecheck/TcTypeNatSimple
Original constraint
[W] x + y ~ x + alpha (non-canonical)
==>
[W] x + y ~ fmv1 (CFunEqCan)
[W] x + alpha ~ fmv2 (CFuneqCan)
[W] fmv1 ~ fmv2 (CTyEqCan)
(sigh)
----------------------------------------
indexed-types/should_fail/GADTwrong1
[G] Const a ~ ()
==> flatten
[G] fsk ~ ()
work item: Const a ~ fsk
==> fire top rule
[G] fsk ~ ()
work item fsk ~ ()
Surely the work item should rewrite to () ~ ()? Well, maybe not;
it'a very special case. More generally, our givens look like
F a ~ Int, where (F a) is not reducible.
----------------------------------------
indexed_types/should_fail/T8227:
Why using a different can-rewrite rule in CFunEqCan heads
does not work.
Assuming NOT rewriting wanteds with wanteds
Inert: [W] fsk_aBh ~ fmv_aBk -> fmv_aBk
[W] fmv_aBk ~ fsk_aBh
[G] Scalar fsk_aBg ~ fsk_aBh
[G] V a ~ f_aBg
Worklist includes [W] Scalar fmv_aBi ~ fmv_aBk
fmv_aBi, fmv_aBk are flatten unificaiton variables
Work item: [W] V fsk_aBh ~ fmv_aBi
Note that the inert wanteds are cyclic, because we do not rewrite
wanteds with wanteds.
Then we go into a loop when normalise the work-item, because we
use rewriteOrSame on the argument of V.
Conclusion: Don't make canRewrite context specific; instead use
[W] a ~ ty to rewrite a wanted iff 'a' is a unification variable.
----------------------------------------
Here is a somewhat similar case:
type family G a :: *
blah :: (G a ~ Bool, Eq (G a)) => a -> a
blah = error "urk"
foo x = blah x
For foo we get
[W] Eq (G a), G a ~ Bool
Flattening
[W] G a ~ fmv, Eq fmv, fmv ~ Bool
We can't simplify away the Eq Bool unless we substitute for fmv.
Maybe that doesn't matter: we would still be left with unsolved
G a ~ Bool.
--------------------------
Trac #9318 has a very simple program leading to
[W] F Int ~ Int
[W] F Int ~ Bool
We don't want to get "Error Int~Bool". But if fmv's can rewrite
wanteds, we will
[W] fmv ~ Int
[W] fmv ~ Bool
--->
[W] Int ~ Bool
************************************************************************
* *
* FlattenEnv & FlatM
* The flattening environment & monad
* *
************************************************************************
-}
type FlatWorkListRef = TcRef [Ct] -- See Note [The flattening work list]
data FlattenEnv
= FE { fe_mode :: FlattenMode
, fe_loc :: CtLoc -- See Note [Flattener CtLoc]
, fe_flavour :: CtFlavour
, fe_eq_rel :: EqRel -- See Note [Flattener EqRels]
, fe_work :: FlatWorkListRef } -- See Note [The flattening work list]
data FlattenMode -- Postcondition for all three: inert wrt the type substitution
= FM_FlattenAll -- Postcondition: function-free
| FM_SubstOnly -- See Note [Flattening under a forall]
-- | FM_Avoid TcTyVar Bool -- See Note [Lazy flattening]
-- -- Postcondition:
-- -- * tyvar is only mentioned in result under a rigid path
-- -- e.g. [a] is ok, but F a won't happen
-- -- * If flat_top is True, top level is not a function application
-- -- (but under type constructors is ok e.g. [F a])
mkFlattenEnv :: FlattenMode -> CtEvidence -> FlatWorkListRef -> FlattenEnv
mkFlattenEnv fm ctev ref = FE { fe_mode = fm
, fe_loc = ctEvLoc ctev
, fe_flavour = ctEvFlavour ctev
, fe_eq_rel = ctEvEqRel ctev
, fe_work = ref }
-- | The 'FlatM' monad is a wrapper around 'TcS' with the following
-- extra capabilities: (1) it offers access to a 'FlattenEnv';
-- and (2) it maintains the flattening worklist.
-- See Note [The flattening work list].
newtype FlatM a
= FlatM { runFlatM :: FlattenEnv -> TcS a }
instance Monad FlatM where
return x = FlatM $ const (return x)
m >>= k = FlatM $ \env ->
do { a <- runFlatM m env
; runFlatM (k a) env }
instance Functor FlatM where
fmap = liftM
instance Applicative FlatM where
pure = return
(<*>) = ap
liftTcS :: TcS a -> FlatM a
liftTcS thing_inside
= FlatM $ const thing_inside
emitFlatWork :: Ct -> FlatM ()
-- See Note [The flattening work list]
emitFlatWork ct = FlatM $ \env -> updTcRef (fe_work env) (ct :)
runFlatten :: FlattenMode -> CtEvidence -> FlatM a -> TcS a
-- Run thing_inside (which does flattening), and put all
-- the work it generates onto the main work list
-- See Note [The flattening work list]
-- NB: The returned evidence is always the same as the original, but with
-- perhaps a new CtLoc
runFlatten mode ev thing_inside
= do { flat_ref <- newTcRef []
; let fmode = mkFlattenEnv mode ev flat_ref
; res <- runFlatM thing_inside fmode
; new_flats <- readTcRef flat_ref
; updWorkListTcS (add_flats new_flats)
; return res }
where
add_flats new_flats wl
= wl { wl_funeqs = add_funeqs new_flats (wl_funeqs wl) }
add_funeqs [] wl = wl
add_funeqs (f:fs) wl = add_funeqs fs (f:wl)
-- add_funeqs fs ws = reverse fs ++ ws
-- e.g. add_funeqs [f1,f2,f3] [w1,w2,w3,w4]
-- = [f3,f2,f1,w1,w2,w3,w4]
traceFlat :: String -> SDoc -> FlatM ()
traceFlat herald doc = liftTcS $ traceTcS herald doc
getFlatEnvField :: (FlattenEnv -> a) -> FlatM a
getFlatEnvField accessor
= FlatM $ \env -> return (accessor env)
getEqRel :: FlatM EqRel
getEqRel = getFlatEnvField fe_eq_rel
getRole :: FlatM Role
getRole = eqRelRole <$> getEqRel
getFlavour :: FlatM CtFlavour
getFlavour = getFlatEnvField fe_flavour
getFlavourRole :: FlatM CtFlavourRole
getFlavourRole
= do { flavour <- getFlavour
; eq_rel <- getEqRel
; return (flavour, eq_rel) }
getMode :: FlatM FlattenMode
getMode = getFlatEnvField fe_mode
getLoc :: FlatM CtLoc
getLoc = getFlatEnvField fe_loc
checkStackDepth :: Type -> FlatM ()
checkStackDepth ty
= do { loc <- getLoc
; liftTcS $ checkReductionDepth loc ty }
-- | Change the 'EqRel' in a 'FlatM'.
setEqRel :: EqRel -> FlatM a -> FlatM a
setEqRel new_eq_rel thing_inside
= FlatM $ \env ->
if new_eq_rel == fe_eq_rel env
then runFlatM thing_inside env
else runFlatM thing_inside (env { fe_eq_rel = new_eq_rel })
-- | Change the 'FlattenMode' in a 'FlattenEnv'.
setMode :: FlattenMode -> FlatM a -> FlatM a
setMode new_mode thing_inside
= FlatM $ \env ->
if new_mode `eq` fe_mode env
then runFlatM thing_inside env
else runFlatM thing_inside (env { fe_mode = new_mode })
where
FM_FlattenAll `eq` FM_FlattenAll = True
FM_SubstOnly `eq` FM_SubstOnly = True
-- FM_Avoid tv1 b1 `eq` FM_Avoid tv2 b2 = tv1 == tv2 && b1 == b2
_ `eq` _ = False
bumpDepth :: FlatM a -> FlatM a
bumpDepth (FlatM thing_inside)
= FlatM $ \env -> do { let env' = env { fe_loc = bumpCtLocDepth (fe_loc env) }
; thing_inside env' }
-- Flatten skolems
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
newFlattenSkolemFlatM :: TcType -- F xis
-> FlatM (CtEvidence, TcTyVar) -- [W] x:: F xis ~ fsk
newFlattenSkolemFlatM ty
= do { flavour <- getFlavour
; loc <- getLoc
; liftTcS $ newFlattenSkolem flavour loc ty }
{-
Note [The flattening work list]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The "flattening work list", held in the fe_work field of FlattenEnv,
is a list of CFunEqCans generated during flattening. The key idea
is this. Consider flattening (Eq (F (G Int) (H Bool)):
* The flattener recursively calls itself on sub-terms before building
the main term, so it will encounter the terms in order
G Int
H Bool
F (G Int) (H Bool)
flattening to sub-goals
w1: G Int ~ fuv0
w2: H Bool ~ fuv1
w3: F fuv0 fuv1 ~ fuv2
* Processing w3 first is BAD, because we can't reduce i t,so it'll
get put into the inert set, and later kicked out when w1, w2 are
solved. In Trac #9872 this led to inert sets containing hundreds
of suspended calls.
* So we want to process w1, w2 first.
* So you might think that we should just use a FIFO deque for the work-list,
so that putting adding goals in order w1,w2,w3 would mean we processed
w1 first.
* BUT suppose we have 'type instance G Int = H Char'. Then processing
w1 leads to a new goal
w4: H Char ~ fuv0
We do NOT want to put that on the far end of a deque! Instead we want
to put it at the *front* of the work-list so that we continue to work
on it.
So the work-list structure is this:
* The wl_funeqs (in TcS) is a LIFO stack; we push new goals (such as w4) on
top (extendWorkListFunEq), and take new work from the top
(selectWorkItem).
* When flattening, emitFlatWork pushes new flattening goals (like
w1,w2,w3) onto the flattening work list, fe_work, another
push-down stack.
* When we finish flattening, we *reverse* the fe_work stack
onto the wl_funeqs stack (which brings w1 to the top).
The function runFlatten initialises the fe_work stack, and reverses
it onto wl_fun_eqs at the end.
Note [Flattener EqRels]
~~~~~~~~~~~~~~~~~~~~~~~
When flattening, we need to know which equality relation -- nominal
or representation -- we should be respecting. The only difference is
that we rewrite variables by representational equalities when fe_eq_rel
is ReprEq, and that we unwrap newtypes when flattening w.r.t.
representational equality.
Note [Flattener CtLoc]
~~~~~~~~~~~~~~~~~~~~~~
The flattener does eager type-family reduction.
Type families might loop, and we
don't want GHC to do so. A natural solution is to have a bounded depth
to these processes. A central difficulty is that such a solution isn't
quite compositional. For example, say it takes F Int 10 steps to get to Bool.
How many steps does it take to get from F Int -> F Int to Bool -> Bool?
10? 20? What about getting from Const Char (F Int) to Char? 11? 1? Hard to
know and hard to track. So, we punt, essentially. We store a CtLoc in
the FlattenEnv and just update the environment when recurring. In the
TyConApp case, where there may be multiple type families to flatten,
we just copy the current CtLoc into each branch. If any branch hits the
stack limit, then the whole thing fails.
A consequence of this is that setting the stack limits appropriately
will be essentially impossible. So, the official recommendation if a
stack limit is hit is to disable the check entirely. Otherwise, there
will be baffling, unpredictable errors.
Note [Lazy flattening]
~~~~~~~~~~~~~~~~~~~~~~
The idea of FM_Avoid mode is to flatten less aggressively. If we have
a ~ [F Int]
there seems to be no great merit in lifting out (F Int). But if it was
a ~ [G a Int]
then we *do* want to lift it out, in case (G a Int) reduces to Bool, say,
which gets rid of the occurs-check problem. (For the flat_top Bool, see
comments above and at call sites.)
HOWEVER, the lazy flattening actually seems to make type inference go
*slower*, not faster. perf/compiler/T3064 is a case in point; it gets
*dramatically* worse with FM_Avoid. I think it may be because
floating the types out means we normalise them, and that often makes
them smaller and perhaps allows more re-use of previously solved
goals. But to be honest I'm not absolutely certain, so I am leaving
FM_Avoid in the code base. What I'm removing is the unique place
where it is *used*, namely in TcCanonical.canEqTyVar.
See also Note [Conservative unification check] in TcUnify, which gives
other examples where lazy flattening caused problems.
Bottom line: FM_Avoid is unused for now (Nov 14).
Note: T5321Fun got faster when I disabled FM_Avoid
T5837 did too, but it's pathalogical anyway
Note [Phantoms in the flattener]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data Proxy p = Proxy
and we're flattening (Proxy ty) w.r.t. ReprEq. Then, we know that `ty`
is really irrelevant -- it will be ignored when solving for representational
equality later on. So, we omit flattening `ty` entirely. This may
violate the expectation of "xi"s for a bit, but the canonicaliser will
soon throw out the phantoms when decomposing a TyConApp. (Or, the
canonicaliser will emit an insoluble, in which case the unflattened version
yields a better error message anyway.)
-}
{- *********************************************************************
* *
* Externally callable flattening functions *
* *
* They are all wrapped in runFlatten, so their *
* flattening work gets put into the work list *
* *
********************************************************************* -}
flatten :: FlattenMode -> CtEvidence -> TcType
-> TcS (Xi, TcCoercion)
flatten mode ev ty
= runFlatten mode ev (flatten_one ty)
flattenManyNom :: CtEvidence -> [TcType] -> TcS ([Xi], [TcCoercion])
-- Externally-callable, hence runFlatten
-- Flatten a bunch of types all at once; in fact they are
-- always the arguments of a saturated type-family, so
-- ctEvFlavour ev = Nominal
-- and we want to flatten all at nominal role
flattenManyNom ev tys
= runFlatten FM_FlattenAll ev (flatten_many_nom tys)
{- *********************************************************************
* *
* The main flattening functions
* *
********************************************************************* -}
{- Note [Flattening]
~~~~~~~~~~~~~~~~~~~~
flatten ty ==> (xi, cc)
where
xi has no type functions, unless they appear under ForAlls
cc = Auxiliary given (equality) constraints constraining
the fresh type variables in xi. Evidence for these
is always the identity coercion, because internally the
fresh flattening skolem variables are actually identified
with the types they have been generated to stand in for.
Note that it is flatten's job to flatten *every type function it sees*.
flatten is only called on *arguments* to type functions, by canEqGiven.
Recall that in comments we use alpha[flat = ty] to represent a
flattening skolem variable alpha which has been generated to stand in
for ty.
----- Example of flattening a constraint: ------
flatten (List (F (G Int))) ==> (xi, cc)
where
xi = List alpha
cc = { G Int ~ beta[flat = G Int],
F beta ~ alpha[flat = F beta] }
Here
* alpha and beta are 'flattening skolem variables'.
* All the constraints in cc are 'given', and all their coercion terms
are the identity.
NB: Flattening Skolems only occur in canonical constraints, which
are never zonked, so we don't need to worry about zonking doing
accidental unflattening.
Note that we prefer to leave type synonyms unexpanded when possible,
so when the flattener encounters one, it first asks whether its
transitive expansion contains any type function applications. If so,
it expands the synonym and proceeds; if not, it simply returns the
unexpanded synonym.
Note [flatten_many performance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In programs with lots of type-level evaluation, flatten_many becomes
part of a tight loop. For example, see test perf/compiler/T9872a, which
calls flatten_many a whopping 7,106,808 times. It is thus important
that flatten_many be efficient.
Performance testing showed that the current implementation is indeed
efficient. It's critically important that zipWithAndUnzipM be
specialized to TcS, and it's also quite helpful to actually `inline`
it. On test T9872a, here are the allocation stats (Dec 16, 2014):
* Unspecialized, uninlined: 8,472,613,440 bytes allocated in the heap
* Specialized, uninlined: 6,639,253,488 bytes allocated in the heap
* Specialized, inlined: 6,281,539,792 bytes allocated in the heap
To improve performance even further, flatten_many_nom is split off
from flatten_many, as nominal equality is the common case. This would
be natural to write using mapAndUnzipM, but even inlined, that function
is not as performant as a hand-written loop.
* mapAndUnzipM, inlined: 7,463,047,432 bytes allocated in the heap
* hand-written recursion: 5,848,602,848 bytes allocated in the heap
If you make any change here, pay close attention to the T9872{a,b,c} tests
and T5321Fun.
If we need to make this yet more performant, a possible way forward is to
duplicate the flattener code for the nominal case, and make that case
faster. This doesn't seem quite worth it, yet.
-}
flatten_many :: [Role] -> [Type] -> FlatM ([Xi], [TcCoercion])
-- Coercions :: Xi ~ Type, at roles given
-- Returns True iff (no flattening happened)
-- NB: The EvVar inside the 'fe_ev :: CtEvidence' is unused,
-- we merely want (a) Given/Solved/Derived/Wanted info
-- (b) the GivenLoc/WantedLoc for when we create new evidence
flatten_many roles tys
-- See Note [flatten_many performance]
= inline zipWithAndUnzipM go roles tys
where
go Nominal ty = setEqRel NomEq $ flatten_one ty
go Representational ty = setEqRel ReprEq $ flatten_one ty
go Phantom ty = -- See Note [Phantoms in the flattener]
return (ty, mkTcPhantomCo ty ty)
-- | Like 'flatten_many', but assumes that every role is nominal.
flatten_many_nom :: [Type] -> FlatM ([Xi], [TcCoercion])
flatten_many_nom [] = return ([], [])
-- See Note [flatten_many performance]
flatten_many_nom (ty:tys)
= do { (xi, co) <- flatten_one ty
; (xis, cos) <- flatten_many_nom tys
; return (xi:xis, co:cos) }
------------------
flatten_one :: TcType -> FlatM (Xi, TcCoercion)
-- Flatten a type to get rid of type function applications, returning
-- the new type-function-free type, and a collection of new equality
-- constraints. See Note [Flattening] for more detail.
--
-- Postcondition: Coercion :: Xi ~ TcType
-- The role on the result coercion matches the EqRel in the FlattenEnv
flatten_one xi@(LitTy {})
= do { role <- getRole
; return (xi, mkTcReflCo role xi) }
flatten_one (TyVarTy tv)
= do { mb_yes <- flatten_tyvar tv
; role <- getRole
; case mb_yes of
Left tv' -> -- Done
do { traceFlat "flattenTyVar1" (ppr tv $$ ppr (tyVarKind tv'))
; return (ty', mkTcReflCo role ty') }
where
ty' = mkTyVarTy tv'
Right (ty1, co1) -- Recurse
-> do { (ty2, co2) <- flatten_one ty1
; traceFlat "flattenTyVar2" (ppr tv $$ ppr ty2)
; return (ty2, co2 `mkTcTransCo` co1) } }
flatten_one (AppTy ty1 ty2)
= do { (xi1,co1) <- flatten_one ty1
; eq_rel <- getEqRel
; case (eq_rel, nextRole xi1) of
(NomEq, _) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Nominal) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Representational) -> flatten_rhs xi1 co1 ReprEq
(ReprEq, Phantom) ->
return (mkAppTy xi1 ty2, co1 `mkTcAppCo` mkTcNomReflCo ty2) }
where
flatten_rhs xi1 co1 eq_rel2
= do { (xi2,co2) <- setEqRel eq_rel2 $ flatten_one ty2
; traceFlat "flatten/appty"
(ppr ty1 $$ ppr ty2 $$ ppr xi1 $$
ppr co1 $$ ppr xi2)
; role1 <- getRole
; let role2 = eqRelRole eq_rel2
; return ( mkAppTy xi1 xi2
, mkTcTransAppCo role1 co1 xi1 ty1
role2 co2 xi2 ty2
role1 ) } -- output should match fmode
flatten_one (FunTy ty1 ty2)
= do { (xi1,co1) <- flatten_one ty1
; (xi2,co2) <- flatten_one ty2
; role <- getRole
; return (mkFunTy xi1 xi2, mkTcFunCo role co1 co2) }
flatten_one (TyConApp tc tys)
-- Expand type synonyms that mention type families
-- on the RHS; see Note [Flattening synonyms]
| Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
, let expanded_ty = mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys'
= do { mode <- getMode
; let used_tcs = tyConsOfType rhs
; case mode of
FM_FlattenAll | anyNameEnv isTypeFamilyTyCon used_tcs
-> flatten_one expanded_ty
_ -> flatten_ty_con_app tc tys }
-- Otherwise, it's a type function application, and we have to
-- flatten it away as well, and generate a new given equality constraint
-- between the application and a newly generated flattening skolem variable.
| isTypeFamilyTyCon tc
= flatten_fam_app tc tys
-- For * a normal data type application
-- * data family application
-- we just recursively flatten the arguments.
| otherwise
-- FM_Avoid stuff commented out; see Note [Lazy flattening]
-- , let fmode' = case fmode of -- Switch off the flat_top bit in FM_Avoid
-- FE { fe_mode = FM_Avoid tv _ }
-- -> fmode { fe_mode = FM_Avoid tv False }
-- _ -> fmode
= flatten_ty_con_app tc tys
flatten_one ty@(ForAllTy {})
-- We allow for-alls when, but only when, no type function
-- applications inside the forall involve the bound type variables.
= do { let (tvs, rho) = splitForAllTys ty
; (rho', co) <- setMode FM_SubstOnly $ flatten_one rho
-- Substitute only under a forall
-- See Note [Flattening under a forall]
; return (mkForAllTys tvs rho', foldr mkTcForAllCo co tvs) }
flatten_ty_con_app :: TyCon -> [TcType] -> FlatM (Xi, TcCoercion)
flatten_ty_con_app tc tys
= do { eq_rel <- getEqRel
; let role = eqRelRole eq_rel
; (xis, cos) <- case eq_rel of
NomEq -> flatten_many_nom tys
ReprEq -> flatten_many (tyConRolesX role tc) tys
; return (mkTyConApp tc xis, mkTcTyConAppCo role tc cos) }
{-
Note [Flattening synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Not expanding synonyms aggressively improves error messages, and
keeps types smaller. But we need to take care.
Suppose
type T a = a -> a
and we want to flatten the type (T (F a)). Then we can safely flatten
the (F a) to a skolem, and return (T fsk). We don't need to expand the
synonym. This works because TcTyConAppCo can deal with synonyms
(unlike TyConAppCo), see Note [TcCoercions] in TcEvidence.
But (Trac #8979) for
type T a = (F a, a) where F is a type function
we must expand the synonym in (say) T Int, to expose the type function
to the flattener.
Note [Flattening under a forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under a forall, we
(a) MUST apply the inert substitution
(b) MUST NOT flatten type family applications
Hence FMSubstOnly.
For (a) consider c ~ a, a ~ T (forall b. (b, [c]))
If we don't apply the c~a substitution to the second constraint
we won't see the occurs-check error.
For (b) consider (a ~ forall b. F a b), we don't want to flatten
to (a ~ forall b.fsk, F a b ~ fsk)
because now the 'b' has escaped its scope. We'd have to flatten to
(a ~ forall b. fsk b, forall b. F a b ~ fsk b)
and we have not begun to think about how to make that work!
************************************************************************
* *
Flattening a type-family application
* *
************************************************************************
-}
flatten_fam_app, flatten_exact_fam_app, flatten_exact_fam_app_fully
:: TyCon -> [TcType] -> FlatM (Xi, TcCoercion)
-- flatten_fam_app can be over-saturated
-- flatten_exact_fam_app is exactly saturated
-- flatten_exact_fam_app_fully lifts out the application to top level
-- Postcondition: Coercion :: Xi ~ F tys
flatten_fam_app tc tys -- Can be over-saturated
= ASSERT2( tyConArity tc <= length tys
, ppr tc $$ ppr (tyConArity tc) $$ ppr tys)
-- Type functions are saturated
-- The type function might be *over* saturated
-- in which case the remaining arguments should
-- be dealt with by AppTys
do { let (tys1, tys_rest) = splitAt (tyConArity tc) tys
; (xi1, co1) <- flatten_exact_fam_app tc tys1
-- co1 :: xi1 ~ F tys1
-- all Nominal roles b/c the tycon is oversaturated
; (xis_rest, cos_rest) <- flatten_many (repeat Nominal) tys_rest
-- cos_res :: xis_rest ~ tys_rest
; return ( mkAppTys xi1 xis_rest -- NB mkAppTys: rhs_xi might not be a type variable
-- cf Trac #5655
, mkTcAppCos co1 cos_rest -- (rhs_xi :: F xis) ; (F cos :: F xis ~ F tys)
) }
flatten_exact_fam_app tc tys
= do { mode <- getMode
; role <- getRole
; case mode of
FM_FlattenAll -> flatten_exact_fam_app_fully tc tys
FM_SubstOnly -> do { (xis, cos) <- flatten_many roles tys
; return ( mkTyConApp tc xis
, mkTcTyConAppCo role tc cos ) }
where
-- These are always going to be Nominal for now,
-- but not if #8177 is implemented
roles = tyConRolesX role tc }
-- FM_Avoid tv flat_top ->
-- do { (xis, cos) <- flatten_many fmode roles tys
-- ; if flat_top || tv `elemVarSet` tyVarsOfTypes xis
-- then flatten_exact_fam_app_fully fmode tc tys
-- else return ( mkTyConApp tc xis
-- , mkTcTyConAppCo (feRole fmode) tc cos ) }
flatten_exact_fam_app_fully tc tys
-- See Note [Reduce type family applications eagerly]
= try_to_reduce tc tys False id $
do { -- First, flatten the arguments
(xis, cos) <- setEqRel NomEq $ flatten_many_nom tys
; eq_rel <- getEqRel
; let role = eqRelRole eq_rel
ret_co = mkTcTyConAppCo role tc cos
-- ret_co :: F xis ~ F tys
-- Now, look in the cache
; mb_ct <- liftTcS $ lookupFlatCache tc xis
; flavour_role <- getFlavourRole
; case mb_ct of
Just (co, rhs_ty, flav) -- co :: F xis ~ fsk
| (flav, NomEq) `canDischargeFR` flavour_role
-> -- Usable hit in the flat-cache
-- We certainly *can* use a Wanted for a Wanted
do { traceFlat "flatten/flat-cache hit" $ (ppr tc <+> ppr xis $$ ppr rhs_ty)
; (fsk_xi, fsk_co) <- flatten_one rhs_ty
-- The fsk may already have been unified, so flatten it
-- fsk_co :: fsk_xi ~ fsk
; return (fsk_xi, fsk_co `mkTcTransCo`
maybeTcSubCo eq_rel
(mkTcSymCo co) `mkTcTransCo`
ret_co) }
-- :: fsk_xi ~ F xis
-- Try to reduce the family application right now
-- See Note [Reduce type family applications eagerly]
_ -> try_to_reduce tc xis True (`mkTcTransCo` ret_co) $
do { let fam_ty = mkTyConApp tc xis
; (ev, fsk) <- newFlattenSkolemFlatM fam_ty
; let fsk_ty = mkTyVarTy fsk
co = ctEvCoercion ev
; liftTcS $ extendFlatCache tc xis (co, fsk_ty, ctEvFlavour ev)
-- The new constraint (F xis ~ fsk) is not necessarily inert
-- (e.g. the LHS may be a redex) so we must put it in the work list
; let ct = CFunEqCan { cc_ev = ev
, cc_fun = tc
, cc_tyargs = xis
, cc_fsk = fsk }
; emitFlatWork ct
; traceFlat "flatten/flat-cache miss" $ (ppr fam_ty $$ ppr fsk)
; return (fsk_ty, maybeTcSubCo eq_rel
(mkTcSymCo co)
`mkTcTransCo` ret_co) }
}
where
try_to_reduce :: TyCon -- F, family tycon
-> [Type] -- args, not necessarily flattened
-> Bool -- add to the flat cache?
-> ( TcCoercion -- :: xi ~ F args
-> TcCoercion ) -- what to return from outer function
-> FlatM (Xi, TcCoercion) -- continuation upon failure
-> FlatM (Xi, TcCoercion)
try_to_reduce tc tys cache update_co k
= do { checkStackDepth (mkTyConApp tc tys)
; mb_match <- liftTcS $ matchFam tc tys
; case mb_match of
Just (norm_co, norm_ty)
-> do { traceFlat "Eager T.F. reduction success" $
vcat [ppr tc, ppr tys, ppr norm_ty, ppr cache]
; (xi, final_co) <- bumpDepth $ flatten_one norm_ty
; let co = norm_co `mkTcTransCo` mkTcSymCo final_co
; flavour <- getFlavour
; when cache $
liftTcS $
extendFlatCache tc tys (co, xi, flavour)
; return (xi, update_co $ mkTcSymCo co) }
Nothing -> k }
{- Note [Reduce type family applications eagerly]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we come across a type-family application like (Append (Cons x Nil) t),
then, rather than flattening to a skolem etc, we may as well just reduce
it on the spot to (Cons x t). This saves a lot of intermediate steps.
Examples that are helped are tests T9872, and T5321Fun.
Performance testing indicates that it's best to try this *twice*, once
before flattening arguments and once after flattening arguments.
Adding the extra reduction attempt before flattening arguments cut
the allocation amounts for the T9872{a,b,c} tests by half.
An example of where the early reduction appears helpful:
type family Last x where
Last '[x] = x
Last (h ': t) = Last t
workitem: (x ~ Last '[1,2,3,4,5,6])
Flattening the argument never gets us anywhere, but trying to flatten
it at every step is quadratic in the length of the list. Reducing more
eagerly makes simplifying the right-hand type linear in its length.
Testing also indicated that the early reduction should *not* use the
flat-cache, but that the later reduction *should*. (Although the
effect was not large.) Hence the Bool argument to try_to_reduce. To
me (SLPJ) this seems odd; I get that eager reduction usually succeeds;
and if don't use the cache for eager reduction, we will miss most of
the opportunities for using it at all. More exploration would be good
here.
At the end, once we've got a flat rhs, we extend the flatten-cache to record
the result. Doing so can save lots of work when the same redex shows up more
than once. Note that we record the link from the redex all the way to its
*final* value, not just the single step reduction. Interestingly, using the
flat-cache for the first reduction resulted in an increase in allocations
of about 3% for the four T9872x tests. However, using the flat-cache in
the later reduction is a similar gain. I (Richard E) don't currently (Dec '14)
have any knowledge as to *why* these facts are true.
************************************************************************
* *
Flattening a type variable
* *
********************************************************************* -}
flatten_tyvar :: TcTyVar
-> FlatM (Either TyVar (TcType, TcCoercion))
-- "Flattening" a type variable means to apply the substitution to it
-- Specifically, look up the tyvar in
-- * the internal MetaTyVar box
-- * the inerts
-- Return (Left tv') if it is not found, tv' has a properly zonked kind
-- (Right (ty, co) if found, with co :: ty ~ tv;
flatten_tyvar tv
| not (isTcTyVar tv) -- Happens when flatten under a (forall a. ty)
= flatten_tyvar3 tv
-- So ty contains references to the non-TcTyVar a
| otherwise
= do { mb_ty <- liftTcS $ isFilledMetaTyVar_maybe tv
; role <- getRole
; case mb_ty of
Just ty -> do { traceFlat "Following filled tyvar" (ppr tv <+> equals <+> ppr ty)
; return (Right (ty, mkTcReflCo role ty)) } ;
Nothing -> do { flavour_role <- getFlavourRole
; flatten_tyvar2 tv flavour_role } }
flatten_tyvar2 :: TcTyVar -> CtFlavourRole
-> FlatM (Either TyVar (TcType, TcCoercion))
-- Try in the inert equalities
-- See Definition [Applying a generalised substitution] in TcSMonad
-- See Note [Stability of flattening] in TcSMonad
flatten_tyvar2 tv flavour_role@(flavour, eq_rel)
| Derived <- flavour -- For derived equalities, consult the inert_model (only)
= ASSERT( eq_rel == NomEq ) -- All derived equalities are nominal
do { model <- liftTcS $ getInertModel
; case lookupVarEnv model tv of
Just (CTyEqCan { cc_rhs = rhs })
-> return (Right (rhs, pprPanic "flatten_tyvar2" (ppr tv $$ ppr rhs)))
-- Evidence is irrelevant for Derived contexts
_ -> flatten_tyvar3 tv }
| otherwise -- For non-derived equalities, consult the inert_eqs (only)
= do { ieqs <- liftTcS $ getInertEqs
; case lookupVarEnv ieqs tv of
Just (ct:_) -- If the first doesn't work,
-- the subsequent ones won't either
| CTyEqCan { cc_ev = ctev, cc_tyvar = tv, cc_rhs = rhs_ty } <- ct
, ctEvFlavourRole ctev `eqCanRewriteFR` flavour_role
-> do { traceFlat "Following inert tyvar" (ppr tv <+> equals <+> ppr rhs_ty $$ ppr ctev)
; let rewrite_co1 = mkTcSymCo (ctEvCoercion ctev)
rewrite_co = case (ctEvEqRel ctev, eq_rel) of
(ReprEq, _rel) -> ASSERT( _rel == ReprEq )
-- if this ASSERT fails, then
-- eqCanRewriteFR answered incorrectly
rewrite_co1
(NomEq, NomEq) -> rewrite_co1
(NomEq, ReprEq) -> mkTcSubCo rewrite_co1
; return (Right (rhs_ty, rewrite_co)) }
-- NB: ct is Derived then fmode must be also, hence
-- we are not going to touch the returned coercion
-- so ctEvCoercion is fine.
_other -> flatten_tyvar3 tv }
flatten_tyvar3 :: TcTyVar -> FlatM (Either TyVar a)
-- Always returns Left!
flatten_tyvar3 tv
= -- Done, but make sure the kind is zonked
do { let kind = tyVarKind tv
; (new_knd, _kind_co) <- setMode FM_SubstOnly $ flatten_one kind
; return (Left (setVarType tv new_knd)) }
{-
Note [An alternative story for the inert substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(This entire note is just background, left here in case we ever want
to return the the previousl state of affairs)
We used (GHC 7.8) to have this story for the inert substitution inert_eqs
* 'a' is not in fvs(ty)
* They are *inert* in the weaker sense that there is no infinite chain of
(i1 `eqCanRewrite` i2), (i2 `eqCanRewrite` i3), etc
This means that flattening must be recursive, but it does allow
[G] a ~ [b]
[G] b ~ Maybe c
This avoids "saturating" the Givens, which can save a modest amount of work.
It is easy to implement, in TcInteract.kick_out, by only kicking out an inert
only if (a) the work item can rewrite the inert AND
(b) the inert cannot rewrite the work item
This is signifcantly harder to think about. It can save a LOT of work
in occurs-check cases, but we don't care about them much. Trac #5837
is an example; all the constraints here are Givens
[G] a ~ TF (a,Int)
-->
work TF (a,Int) ~ fsk
inert fsk ~ a
--->
work fsk ~ (TF a, TF Int)
inert fsk ~ a
--->
work a ~ (TF a, TF Int)
inert fsk ~ a
---> (attempting to flatten (TF a) so that it does not mention a
work TF a ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (substitute for a)
work TF (fsk2, TF Int) ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (top-level reduction, re-orient)
work fsk2 ~ (TF fsk2, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (attempt to flatten (TF fsk2) to get rid of fsk2
work TF fsk2 ~ fsk3
work fsk2 ~ (fsk3, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
--->
work TF fsk2 ~ fsk3
inert fsk2 ~ (fsk3, TF Int)
inert a ~ ((fsk3, TF Int), TF Int)
inert fsk ~ ((fsk3, TF Int), TF Int)
Because the incoming given rewrites all the inert givens, we get more and
more duplication in the inert set. But this really only happens in pathalogical
casee, so we don't care.
************************************************************************
* *
Unflattening
* *
************************************************************************
An unflattening example:
[W] F a ~ alpha
flattens to
[W] F a ~ fmv (CFunEqCan)
[W] fmv ~ alpha (CTyEqCan)
We must solve both!
-}
unflatten :: Cts -> Cts -> TcS Cts
unflatten tv_eqs funeqs
= do { dflags <- getDynFlags
; tclvl <- getTcLevel
; traceTcS "Unflattening" $ braces $
vcat [ ptext (sLit "Funeqs =") <+> pprCts funeqs
, ptext (sLit "Tv eqs =") <+> pprCts tv_eqs ]
-- Step 1: unflatten the CFunEqCans, except if that causes an occurs check
-- Occurs check: consider [W] alpha ~ [F alpha]
-- ==> (flatten) [W] F alpha ~ fmv, [W] alpha ~ [fmv]
-- ==> (unify) [W] F [fmv] ~ fmv
-- See Note [Unflatten using funeqs first]
; funeqs <- foldrBagM (unflatten_funeq dflags) emptyCts funeqs
; traceTcS "Unflattening 1" $ braces (pprCts funeqs)
-- Step 2: unify the tv_eqs, if possible
; tv_eqs <- foldrBagM (unflatten_eq dflags tclvl) emptyCts tv_eqs
; traceTcS "Unflattening 2" $ braces (pprCts tv_eqs)
-- Step 3: fill any remaining fmvs with fresh unification variables
; funeqs <- mapBagM finalise_funeq funeqs
; traceTcS "Unflattening 3" $ braces (pprCts funeqs)
-- Step 4: remove any tv_eqs that look like ty ~ ty
; tv_eqs <- foldrBagM finalise_eq emptyCts tv_eqs
; let all_flat = tv_eqs `andCts` funeqs
; traceTcS "Unflattening done" $ braces (pprCts all_flat)
-- Step 5: zonk the result
-- Motivation: makes them nice and ready for the next step
-- (see TcInteract.solveSimpleWanteds)
; zonkSimples all_flat }
where
----------------
unflatten_funeq :: DynFlags -> Ct -> Cts -> TcS Cts
unflatten_funeq dflags ct@(CFunEqCan { cc_fun = tc, cc_tyargs = xis
, cc_fsk = fmv, cc_ev = ev }) rest
= do { -- fmv should be an un-filled flatten meta-tv;
-- we now fix its final value by filling it, being careful
-- to observe the occurs check. Zonking will eliminate it
-- altogether in due course
rhs' <- zonkTcType (mkTyConApp tc xis)
; case occurCheckExpand dflags fmv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { setEvBindIfWanted ev
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; unflattenFmv fmv rhs''
; return rest }
_ -> -- Occurs check
return (ct `consCts` rest) }
unflatten_funeq _ other_ct _
= pprPanic "unflatten_funeq" (ppr other_ct)
----------------
finalise_funeq :: Ct -> TcS Ct
finalise_funeq (CFunEqCan { cc_fsk = fmv, cc_ev = ev })
= do { demoteUnfilledFmv fmv
; return (mkNonCanonical ev) }
finalise_funeq ct = pprPanic "finalise_funeq" (ppr ct)
----------------
unflatten_eq :: DynFlags -> TcLevel -> Ct -> Cts -> TcS Cts
unflatten_eq dflags tclvl ct@(CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) rest
| isFmvTyVar tv -- Previously these fmvs were untouchable,
-- but now they are touchable
-- NB: unlike unflattenFmv, filling a fmv here does
-- bump the unification count; it is "improvement"
-- Note [Unflattening can force the solver to iterate]
= do { lhs_elim <- tryFill dflags tv rhs ev
; if lhs_elim then return rest else
do { rhs_elim <- try_fill dflags tclvl ev rhs (mkTyVarTy tv)
; if rhs_elim then return rest else
return (ct `consCts` rest) } }
| otherwise
= return (ct `consCts` rest)
unflatten_eq _ _ ct _ = pprPanic "unflatten_irred" (ppr ct)
----------------
finalise_eq :: Ct -> Cts -> TcS Cts
finalise_eq (CTyEqCan { cc_ev = ev, cc_tyvar = tv
, cc_rhs = rhs, cc_eq_rel = eq_rel }) rest
| isFmvTyVar tv
= do { ty1 <- zonkTcTyVar tv
; ty2 <- zonkTcType rhs
; let is_refl = ty1 `tcEqType` ty2
; if is_refl then do { setEvBindIfWanted ev
(EvCoercion $
mkTcReflCo (eqRelRole eq_rel) rhs)
; return rest }
else return (mkNonCanonical ev `consCts` rest) }
| otherwise
= return (mkNonCanonical ev `consCts` rest)
finalise_eq ct _ = pprPanic "finalise_irred" (ppr ct)
----------------
try_fill dflags tclvl ev ty1 ty2
| Just tv1 <- tcGetTyVar_maybe ty1
, isTouchableOrFmv tclvl tv1
, typeKind ty1 `isSubKind` tyVarKind tv1
= tryFill dflags tv1 ty2 ev
| otherwise
= return False
tryFill :: DynFlags -> TcTyVar -> TcType -> CtEvidence -> TcS Bool
-- (tryFill tv rhs ev) sees if 'tv' is an un-filled MetaTv
-- If so, and if tv does not appear in 'rhs', set tv := rhs
-- bind the evidence (which should be a CtWanted) to Refl<rhs>
-- and return True. Otherwise return False
tryFill dflags tv rhs ev
= ASSERT2( not (isGiven ev), ppr ev )
do { is_filled <- isFilledMetaTyVar tv
; if is_filled then return False else
do { rhs' <- zonkTcType rhs
; case occurCheckExpand dflags tv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { setEvBindIfWanted ev
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; unifyTyVar tv rhs''
; return True }
_ -> -- Occurs check
return False } }
{-
Note [Unflatten using funeqs first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
[W] G a ~ Int
[W] F (G a) ~ G a
do not want to end up with
[W] F Int ~ Int
because that might actually hold! Better to end up with the two above
unsolved constraints. The flat form will be
G a ~ fmv1 (CFunEqCan)
F fmv1 ~ fmv2 (CFunEqCan)
fmv1 ~ Int (CTyEqCan)
fmv1 ~ fmv2 (CTyEqCan)
Flatten using the fun-eqs first.
-}
|
ghc-android/ghc
|
compiler/typecheck/TcFlatten.hs
|
bsd-3-clause
| 55,943
| 54
| 23
| 16,699
| 5,621
| 2,998
| 2,623
| -1
| -1
|
{-# Language PatternGuards #-}
module Blub
( blub
, foo
, bar
) where
import Ugah.Foo
import Control.Applicative
import Control.Monad
import Ugah.Blub
f :: Int -> Int
f = (+ 3)
ddd
|
jystic/hsimport
|
tests/goldenFiles/ModuleTest19.hs
|
bsd-3-clause
| 194
| 0
| 5
| 45
| 57
| 35
| 22
| -1
| -1
|
{-# LANGUAGE CPP #-}
module Web.Browser
( openBrowser
) where
#if defined(mingw32_HOST_OS)
import Web.Browser.Windows (openBrowserWindows)
#else
import Data.List (isInfixOf)
import System.Info (os)
import Web.Browser.Linux (openBrowserLinux)
import Web.Browser.OSX (openBrowserOSX)
#endif
-- |'openBrowser' opens a URL in the user's preferred web browser. Returns
-- whether or not the operation succeeded.
openBrowser :: String -> IO Bool
#if defined(mingw32_HOST_OS)
openBrowser = openBrowserWindows
#else
openBrowser
| any (`isInfixOf` os) ["linux", "bsd"] = openBrowserLinux
| "darwin" `isInfixOf` os = openBrowserOSX
| otherwise = error "unsupported platform"
#endif
|
rightfold/open-browser
|
lib/Web/Browser.hs
|
bsd-3-clause
| 728
| 0
| 6
| 138
| 48
| 32
| 16
| 12
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module BitcoinCore.Transaction.Script where
import BitcoinCore.Transaction.Optcodes (OPCODE(..))
import BitcoinCore.Keys (PubKeyHash(..))
import General.Hash (Hash(..))
import Prelude hiding (concat, reverse, sequence)
import qualified Data.ByteString as BS
import Data.ByteString (ByteString)
import Data.ByteString.Base16 (encode)
import Data.Binary (Binary(..))
import Data.Binary.Put (Put, putWord8, putByteString)
import Data.Binary.Get (Get, getWord8, getByteString, isolate, bytesRead)
import Data.List (reverse)
import Test.QuickCheck.Arbitrary (Arbitrary(..))
import Test.QuickCheck.Gen (listOf, oneof, choose, vectorOf)
newtype Script = Script [ ScriptComponent ]
deriving (Eq, Show)
data ScriptComponent
= OP OPCODE
| Txt ByteString
deriving (Eq)
instance Show ScriptComponent where
show (OP opcode) = "OP " ++ show opcode
show (Txt bs) = "Txt " ++ (show . encode) bs
instance Binary ScriptComponent where
put = putScriptComponent
get = getScriptComponent
payToPubkeyHash :: PubKeyHash -> Script
payToPubkeyHash pubKeyHash = Script
[OP OP_DUP, OP OP_HASH160, (Txt . hash) pubKeyHash , OP OP_EQUALVERIFY, OP OP_CHECKSIG]
putScript :: Script -> Put
putScript (Script script) =
mapM_ put script
putScriptComponent :: ScriptComponent -> Put
putScriptComponent (OP opcode) = putWord8 . fromIntegral . fromEnum $ opcode
putScriptComponent (Txt bs)
| BS.length bs < 76 = do
putWord8 . fromIntegral . BS.length $ bs
putByteString bs
| otherwise = error $ "Need to implement OP_PUSHDATA1, OP_PUSHDATA2, etc. BS: " ++ (show . encode) bs
getScript :: Int -> Get Script
getScript lengthBytes = isolate lengthBytes $
Script . reverse <$> getScriptStep []
where getScriptStep acc = do
bytesRead' <- bytesRead
if bytesRead' < fromIntegral lengthBytes
then do
comp <- get
getScriptStep (comp:acc)
else return acc
getScriptComponent :: Get ScriptComponent
getScriptComponent = do
code <- fromIntegral <$> getWord8
if 0 < code && code < 76
then Txt <$> getByteString code
else return $ OP $ toEnum code
instance Arbitrary Script where
arbitrary = Script <$> listOf arbitrary
instance Arbitrary ScriptComponent where
arbitrary = oneof [genTxt, genOp]
where
genTxt = do
txtLength <- choose (1, 75)
Txt . BS.pack <$> vectorOf txtLength arbitrary
genOp = OP <$> arbitrary
|
clample/lamdabtc
|
backend/src/BitcoinCore/Transaction/Script.hs
|
bsd-3-clause
| 2,466
| 0
| 14
| 484
| 766
| 412
| 354
| 64
| 2
|
import Data.Map as Map
import Control.Monad.State
class SLSymantics repr where
int :: Int -> repr Int
bool :: Bool -> repr Bool
float :: Float -> repr Float
tup2:: (repr a, repr b) -> repr (a, b)
lam :: (repr a -> repr b) -> repr (a -> b)
app :: repr (a -> b) -> repr a -> repr b
fix :: (repr a -> repr a) -> repr a
and :: repr Bool -> repr Bool -> repr Bool
or :: repr Bool -> repr Bool -> repr Bool
add :: repr Int -> repr Int -> repr Int
sub :: repr Int -> repr Int -> repr Int
mul :: repr Int -> repr Int -> repr Int
div :: repr Int -> repr Int -> repr Int
addf:: repr Float -> repr Float -> repr Float
subf:: repr Float -> repr Float -> repr Float
mulf:: repr Float -> repr Float -> repr Float
divf:: repr Float -> repr Float -> repr Float
mod :: repr Int -> repr Int -> repr Int
modf:: repr Float -> repr Float -> repr Float
sqrt_:: repr Float -> repr Float
leq :: repr Int -> repr Int -> repr Bool
leqf:: repr Float -> repr Float -> repr Bool
if_ :: repr Bool -> repr a -> repr a -> repr a
newtype Bind = Bind { name :: String } deriving (Eq, Ord, Show)
define f expr = modify $ insert f expr
csqrt :: SLSymantics repr => State (Map Bind (repr (Float -> (Float, Float)))) ()
csqrt = define (Bind "csqrt") $ lam
(\x -> if_ (leqf (float 0.0) x)
(tup2 (sqrt_ x, float 0.0))
(tup2 (float 0.0, sqrt_ (mulf (float (-1.0)) x))))
sl :: State (Map Bind (repr a)) () -> Map Bind (repr a)
sl f = execState f empty
quadroots :: SLSymantics repr => Map Bind (repr (Float -> (Float, Float)))
quadroots = sl $ do csqrt
|
staltz/gloc
|
sketch/sl.hs
|
bsd-3-clause
| 1,741
| 0
| 19
| 554
| 827
| 403
| 424
| 37
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.QSemN
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- Quantity semaphores in which each thread may wait for an arbitrary
-- \"amount\".
--
-----------------------------------------------------------------------------
module Control.Concurrent.QSemN
( -- * General Quantity Semaphores
QSemN, -- abstract
newQSemN, -- :: Int -> IO QSemN
waitQSemN, -- :: QSemN -> Int -> IO ()
signalQSemN -- :: QSemN -> Int -> IO ()
) where
import Prelude
import Control.Concurrent.MVar
import Data.Typeable
#include "Typeable.h"
-- |A 'QSemN' is a quantity semaphore, in which the available
-- \"quantity\" may be signalled or waited for in arbitrary amounts.
newtype QSemN = QSemN (MVar (Int,[(Int,MVar ())]))
INSTANCE_TYPEABLE0(QSemN,qSemNTc,"QSemN")
-- |Build a new 'QSemN' with a supplied initial quantity.
newQSemN :: Int -> IO QSemN
newQSemN init = do
sem <- newMVar (init,[])
return (QSemN sem)
-- |Wait for the specified quantity to become available
waitQSemN :: QSemN -> Int -> IO ()
waitQSemN (QSemN sem) sz = do
(avail,blocked) <- takeMVar sem -- gain ex. access
if (avail - sz) >= 0 then
-- discharging 'sz' still leaves the semaphore
-- in an 'unblocked' state.
putMVar sem (avail-sz,blocked)
else do
block <- newEmptyMVar
putMVar sem (avail, blocked++[(sz,block)])
takeMVar block
-- |Signal that a given quantity is now available from the 'QSemN'.
signalQSemN :: QSemN -> Int -> IO ()
signalQSemN (QSemN sem) n = do
(avail,blocked) <- takeMVar sem
(avail',blocked') <- free (avail+n) blocked
putMVar sem (avail',blocked')
where
free avail [] = return (avail,[])
free avail ((req,block):blocked)
| avail >= req = do
putMVar block ()
free (avail-req) blocked
| otherwise = do
(avail',blocked') <- free avail blocked
return (avail',(req,block):blocked')
|
FranklinChen/hugs98-plus-Sep2006
|
packages/base/Control/Concurrent/QSemN.hs
|
bsd-3-clause
| 2,191
| 10
| 14
| 441
| 518
| 284
| 234
| -1
| -1
|
module Reinforce.Algorithms.Double.Internal where
import Reinforce.Algorithms.Internal (RLParams)
class RLParams m r => DoubleTDLearning m s a r | m -> s a r where
choose :: s -> m a
update1 :: s -> a -> r -> m ()
update2 :: s -> a -> r -> m ()
actions1 :: s -> m [a]
actions2 :: s -> m [a]
value1 :: s -> a -> m r
value2 :: s -> a -> m r
|
stites/reinforce
|
reinforce-algorithms/src/Reinforce/Algorithms/Double/Internal.hs
|
bsd-3-clause
| 365
| 0
| 11
| 103
| 169
| 90
| 79
| -1
| -1
|
yes = (f x) x
|
mpickering/hlint-refactor
|
tests/examples/Bracket0.hs
|
bsd-3-clause
| 14
| 0
| 7
| 5
| 15
| 7
| 8
| 1
| 1
|
module LetIn3 where
--A definition can be lifted from a where or let into the surronding binding group.
--Lifting a definition widens the scope of the definition.
--In this example, lift 'sq' in 'sumSquares'
--This example aims to test lifting a definition from a let clause to a let clause,
sumSquares x y = let pow=2
in (let sq 0=0
sq z=z^pow
in sq x + sq y)
anotherFun 0 y = sq y
where sq x = x^2
|
kmate/HaRe
|
old/testing/liftOneLevel/LetIn3.hs
|
bsd-3-clause
| 503
| 0
| 12
| 182
| 95
| 48
| 47
| 7
| 2
|
module Test14 where
f = (let x = 45 in (x,45))
|
kmate/HaRe
|
old/testing/refacSlicing/Test14.hs
|
bsd-3-clause
| 48
| 0
| 9
| 12
| 29
| 17
| 12
| 2
| 1
|
module F10 where
f10f = \h -> (h 1 2, h 3)
f10g = \x -> \y -> x+y
f10h = f10f f10g
f10x1 = fst f10h
f10x2 = snd f10h
f10 = f10x2 f10x1
|
hferreiro/replay
|
testsuite/tests/arityanal/f10.hs
|
bsd-3-clause
| 144
| 0
| 7
| 44
| 79
| 43
| 36
| 7
| 1
|
module Arbitrary.Program where
import qualified Numeric.Limp.Program as P
import Numeric.Limp.Rep
import Arbitrary.Var
import Arbitrary.Assignment
import Test.QuickCheck
import Control.Applicative
type Program' = P.Program ZVar RVar IntDouble
data ProgramAss = ProgramAss Program' Assignment'
deriving Show
instance Arbitrary ProgramAss where
arbitrary
= do a <- arbitrary
ProgramAss <$> program a <*> assignment a
instance Arbitrary Program' where
arbitrary = arbitrary >>= program
program :: Vars -> Gen Program'
program vs
= do dir <- elements [P.Minimise, P.Maximise]
obj <- linearR vs
cons <- constraints vs
bnds <- listOf (bounds vs)
return $ P.program dir obj cons bnds
linearR :: Vars -> Gen (P.Linear ZVar RVar IntDouble P.KR)
linearR (Vars zs rs)
= do let vs = map Left zs ++ map Right rs
vs' <- listOf1 (elements vs)
cs' <- infiniteListOf arbitrary
summand <- arbitrary
return $ P.LR (vs' `zip` cs') summand
linearZ :: Vars -> Gen (P.Linear ZVar RVar IntDouble P.KZ)
linearZ (Vars zs _rs)
= do vs' <- listOf1 (elements zs)
cs' <- infiniteListOf arbitrary
summand <- arbitrary
return $ P.LZ (vs' `zip` cs') summand
constraints :: Vars -> Gen (P.Constraint ZVar RVar IntDouble)
constraints vs
= oneof
[ (P.:==) <$> lR <*> lR
, (P.:<=) <$> lR <*> lR
, (P.:<) <$> lZ <*> lZ
, (P.:>=) <$> lR <*> lR
, (P.:>) <$> lZ <*> lZ
, P.Between <$> lR <*> lR <*> lR
, (P.:&&) <$> constraints vs <*> constraints vs
, return P.CTrue ]
where
lR = linearR vs
lZ = linearZ vs
bounds :: Vars -> Gen (P.Bounds ZVar RVar IntDouble)
bounds (Vars zs rs)
= oneof [bZ, bR]
where
bZ = do v <- elements zs
a <- arbitrary
b <- arbitrary
return $ P.BoundZ (a,v,b)
bR = do v <- elements rs
a <- arbitrary
b <- arbitrary
return $ P.BoundR (a,v,b)
|
amosr/limp
|
tests/Arbitrary/Program.hs
|
mit
| 1,985
| 0
| 11
| 559
| 766
| 387
| 379
| 60
| 1
|
module Renamer.Env
( Rn
, RnName(..)
, RnEnv
, runRn
, defaultEnv
, local
, insertLocalType
, insertLocalValue
, insertLocalInterface
, insertInternalType
, insertInternalValue
, ident
, renameType
, renameValue
, renameIdentValue
, renameInterface
, joinName
-- for imports
, findValuesAndTypes
) where
import Renamer.Error
import Lib.Registry
import Typing.TypeError (TypeError(ExtraneousImplementation, UnknownInterface))
import Util.Env
import Util.Error
import Util.Scope
import Data.List (intercalate)
import Data.Maybe (maybe)
-- Extra environment used for renaming
data RenamerEnv = RenamerEnv
{ modName :: String
} deriving (Show)
data RnIntf = RnIntf RnName (Field RdrName RnName)
deriving (Show)
instance Env RenamerEnv where
type KeyType RenamerEnv = RdrName
type ValueType RenamerEnv = RnName
type InterfaceType RenamerEnv = RnIntf
deleteBetween _ _ = id
-- Helper interface for the Rn monad
type Rn a = Scoped RenamerEnv a
type RnEnv = Scope RenamerEnv
runRn :: String -> Rn a -> RnEnv -> Result (RnEnv, a)
runRn modName rn state =
runScoped rn' state
where
rn' = do
updateEnv $ \e -> e { modName }
rn
defaultEnv :: RnEnv
defaultEnv =
createScope
values
types
[]
(RenamerEnv { modName = "" })
where
values = fromRegistry isValue ++ fromRegistry isCtor
types = fromRegistry isType
fromRegistry pred = mkBuiltin . name <$> filter pred registry
mkBuiltin n = ((Nothing, n), Internal n)
type RdrName = (Maybe String, String)
data RnName
= External (String, String)
| Internal String
deriving (Show)
{-mkBuiltins :: [String] -> RenamerEnv-}
-- Helpers
local :: String -> Rn String
local name = do
mod <- thisMod
return $ joinName (mod, name)
localRdr :: String -> Rn (Maybe String, String)
localRdr name =
return (Nothing, name)
external :: String -> Rn RnName
external name = do
mod <- thisMod
return $ External (mod, name)
internal :: String -> Rn RnName
internal = return . Internal
insertion :: (String -> Rn a) -> (String -> Rn b) -> (a -> b -> Rn ()) -> String -> Rn ()
insertion rdrName rnName insert name = do
key <- rdrName name
value <- rnName name
insert key value
insertLocalType :: String -> Rn ()
insertLocalType = insertion localRdr external insertType
insertLocalValue :: String -> Rn ()
insertLocalValue = insertion localRdr external insertValue
insertLocalInterface :: String -> [String] -> Rn ()
insertLocalInterface intf methods = do
intfRdr <- localRdr intf
intfRn <- external intf
methods' <- mapM mkMethod methods
insertInterface intfRdr (RnIntf intfRn methods')
where
mkMethod str = (,) <$> localRdr str <*> external str
insertInternalType :: String -> Rn ()
insertInternalType = insertion localRdr internal insertType
insertInternalValue :: String -> Rn ()
insertInternalValue = insertion localRdr internal insertValue
ident :: [String] -> RdrName
ident [] = undefined
ident [x] = (Nothing, x)
ident xs =
let mod = intercalate "." $ init xs
name = last xs
in (Just mod, name)
rnName :: RnName -> String
rnName (Internal n) = n
rnName (External n) = joinName n
lookupRnName :: (RdrName -> Rn (Maybe RnName)) -> RdrName -> Rn RnName
lookupRnName lookup rdrName = do
maybeName <- lookup rdrName
maybe unknown return maybeName
where
-- TODO: better errors here according to the type of the variable
unknown = throwError $ UnknownVariable $ showRdrName rdrName
lookupRdrName :: (RdrName -> Rn (Maybe RnName)) -> RdrName -> Rn String
lookupRdrName f = fmap rnName . lookupRnName f
renameType :: String -> Rn String
renameType name = lookupRdrName lookupType (Nothing, name)
renameValue :: String -> Rn String
renameValue name = lookupRdrName lookupValue (Nothing, name)
renameIdentValue :: [String] -> Rn String
renameIdentValue = lookupRdrName lookupValue . ident
renameInterface :: String -> Rn (String, String -> Rn String)
renameInterface name = do
maybeIntf <- lookupInterface =<< localRdr name
RnIntf name methods <- maybe unknownInterface return maybeIntf
return (rnName name, renameIntfMethod methods)
where
unknownInterface = throwError $ UnknownInterface $ name
renameIntfMethod :: Field RdrName RnName -> String -> Rn String
renameIntfMethod methods method = do
method' <- localRdr method
case Prelude.lookup method' methods of
Nothing -> unknownIntfMethod
Just name -> return $ rnName name
where
unknownIntfMethod = throwError $ ExtraneousImplementation $ method
showRdrName :: RdrName -> String
showRdrName (Nothing, name) = name
showRdrName (Just mod, name) = joinName (mod, name)
joinName :: (String, String) -> String
joinName (mod, n) = mod ++ "." ++ n
-- HELPERS
thisMod :: Rn String
thisMod = getEnv modName
-- For imports
findValuesAndTypes :: String -> Maybe String -> RnEnv -> ([(RdrName, RdrName)], [(RdrName, RdrName)])
findValuesAndTypes modName modAlias env =
(rdrNames values, rdrNames types ++ rdrNames interfaces)
where
valuesResult = runScoped (filterValues filter) env
values = getValue valuesResult
typesResult = runScoped (filterTypes filter) env
types = getValue typesResult
interfacesResult = runScoped (filterInterfaces filterIntf) env
interfaces = getValue interfacesResult
filter _ (External (m, _)) = m == modName
filter _ _ = False
filterIntf _ (RnIntf (External (m, _)) _) = m == modName
filterIntf _ _ = False
rdrNames :: [(RdrName, a)] -> [(RdrName, RdrName)]
rdrNames = map $ \((_, name), _) -> ((Nothing, name), (modAlias, name))
getValue (Right (_, a)) = a
getValue _ = undefined
|
tadeuzagallo/verve-lang
|
src/Renamer/Env.hs
|
mit
| 5,681
| 0
| 12
| 1,150
| 1,889
| 989
| 900
| -1
| -1
|
import Data.Numbers.Primes (isPrime, primes)
rotations :: Integer -> [Integer]
rotations x = fn (length s - 1) s
where s = show x
fn 0 (y:ys) = (toi $ ys ++ [y]) : []
fn r (y:ys) = let n = ys ++ [y]
in toi n : fn (r-1) n
toi i = read i :: Integer
circularPrimes :: Int
circularPrimes = length $ filter f $ takeWhile (<1000000) primes
where f = all isPrime . rotations
-- circularPrimes == 55
|
samidarko/euler
|
problem035.hs
|
mit
| 447
| 3
| 12
| 137
| 212
| 110
| 102
| 11
| 2
|
module Main where
--------------------
-- Global Imports --
import Graphics.Rendering.OpenGL
import Graphics.UI.GLFW as GLFW
import Data.IORef
-------------------
-- Local Imports --
import Network
import Config
----------
-- Code --
-- | The callback for when the window should be closed.
makeWindowCloseCallback :: IORef Bool -> WindowCloseCallback
makeWindowCloseCallback closedRef = do
writeIORef closedRef True
return True
-- | The callback for when the window is resized.
makeWindowSizeCallback :: WindowSizeCallback
makeWindowSizeCallback s = viewport $= (Position 0 0, s)
-- | The entry point to the program. Sets things up and passes it off to the
-- Netwire network.
main :: IO ()
main = do
initialize
openWindow (Size glRenderWidth glRenderHeight) [DisplayRGBBits 8 8 8, DisplayAlphaBits 8, DisplayDepthBits 24] Window
windowTitle $= "netwire-vinyl"
closedRef <- newIORef False
windowCloseCallback $= do
atomicWriteIORef closedRef True
return True
windowCloseCallback $= makeWindowCloseCallback closedRef
windowSizeCallback $= makeWindowSizeCallback
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
runNetwork closedRef
closeWindow
|
crockeo/netwire-vinyl
|
src/Main.hs
|
mit
| 1,205
| 0
| 10
| 202
| 243
| 123
| 120
| 27
| 1
|
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------
-- Haskell Xonix: Author - Sourabh S Joshi
--------------------------------------------------------------------------------------
module XoPolyLine (evaluatePolyEdgeEvent) where
import XoData
import XoBoundingBox
import Data.List
-- a polyEvent will happen when the player either enters or exits a bounding box
-- the main flow is capped to simulate only one such event,
-- which considerably simplifies the making of polylines and the
-- disintegration/breaking-up of bounding boxes
-- the polyLine itself is stored as a list of points,
-- from the first break
-- to any subsequent turning points,
-- to the last break, which is immediately resolved
-- this is called when the player presses a direction key, so that a poly turn point can be added
-- beware that if the player presses this at the exact instant that we are on any polyLine, then we need to call
-- polyEventEnd and evaluate the whole simulation instead!
--
evaluatePolyEdgeEvent :: XoGame -> XoEntity -> (XoShape, XoPoint3D) -> XoGame
evaluatePolyEdgeEvent g newPlayer intersect
| (points.shape.polyLine) g == [] = evaluatePolyEventBegin g newPlayer intersect
| otherwise = evaluatePolyEventEnd g newPlayer intersect
evaluatePolyEventBegin :: XoGame -> XoEntity -> (XoShape, XoPoint3D) -> XoGame
evaluatePolyEventBegin g newPlayer intersect@(shp@(XoLine p1@(px1, py1, pz1) p2@(px2, py2, pz2)), pc@(pcx, pcy, pcz))
| (pcx /= px1 && pcy /= py1) = error "Polyline: Collision point isn't on the edge it is supposed to be on!"
| (px1 == px2 || py1 == py2) = if (not (tooSmallCutEdge shp)) then g {polyLine = newPolyLine}
else error "PolyLine: Found edge too small to be cut!"
| otherwise = error "Polyline: beginning at non-x/y parallel edge!"
where
traceLine = XoTraceLine [pc]
newPolyLine = (polyLine g) {shape = traceLine}
evaluatePolyEventEnd :: XoGame -> XoEntity -> (XoShape, XoPoint3D) -> XoGame
evaluatePolyEventEnd g newPlayer intersect@(shp@(XoLine p1@(px1, py1, pz1) p2@(px2, py2, pz2)), pc@(pcx, pcy, pcz))
| (points.shape.polyLine) g == [] = error "Polyline: Cannot end polyLine that does not exist!"
| (px1 == px2 && px1 == sx) || (py1 == py2 && py1 == sy) = evaluatePolyEndSameEdgesCut updatedG newPlayer intersect
| otherwise = evaluatePolyEndDiffEdgesCut updatedG newPlayer intersect
where
polyLineRecord = polyLine g
startLinePoint@(sx, sy, sz) = (head.points.shape) polyLineRecord
updatedPolyPoints = (points $ shape $ polyLineRecord) ++ [pc]
updatedPolyShape = (shape polyLineRecord){points = updatedPolyPoints}
updatedPolyRecord = polyLineRecord{shape = updatedPolyShape}
updatedG = g{polyLine = updatedPolyRecord}
evaluatePolyEndSameEdgesCut :: XoGame -> XoEntity -> (XoShape, XoPoint3D) -> XoGame
evaluatePolyEndSameEdgesCut g newPlayer intersect@(shp@(XoLine p1@(px1, py1, pz1) p2@(px2, py2, pz2)), pc@(pcx, pcy, pcz)) = newGame
where
affectedPolygon = findAffectedPolygon g pc
polyTraceOriginal = points $ shape $ polyLine g
--find start and end points of original poly trace
startPolyTrace@(spx, spy, spz) = head $ polyTraceOriginal
endPolyTrace@(epx, epy, epz) = last $ polyTraceOriginal
--find the edge where BB will be split into two BBs
affectedEdge@(tl@(tx, ty, tz), hd@(hx, hy, hz)) = getAffectedEdgeFromPolygon affectedPolygon pc ((last.init) polyTraceOriginal)
isPolyTraceCCW
| tx == hx = (ty < hy && spy > epy) || (ty > hy && spy < epy)
| ty == hy = (tx < hx && spx > epx) || (tx > hx && spx < epx)
| otherwise = error "PolyLine: Oopsie! messed up affected edge"
polyTraceCCWOriented = if (isPolyTraceCCW) then polyTraceOriginal else reverse polyTraceOriginal
-- find the inner and outer poly traces (polyWidth) apart and CCW oriented
(leftPoly, rightPoly) = findCCWLeftRightPolyLines polyTraceCCWOriented affectedEdge affectedEdge
-- FIXME, if start or end points of the leftPoly or the rightPoly are too close (<polyWidth) to the
-- end point of the affected Edge, then make the end point of the poly the end pt of affected edge
-- and also carry this delta manipulation in x or y over to one point before end pt of poly and
-- one point after start point of poly
-- note that in same edge cut case, you only need to do this for right poly
-- but in different edge cut case, both new poly's qualify for this potential change
-- end FIXME
-- now splice these polyLine traces to get 2 BB from 1 (they may be empty which will be checked later)
innerPolygon = affectedPolygon {shape = XoPolygon leftPoly}
(prevPoints, (tl':laterPoints)) = break (\x -> x == tl) $ points $ shape $ affectedPolygon
mainPolygon = affectedPolygon {shape = XoPolygon (prevPoints ++ (tl:rightPoly) ++ laterPoints) }
-- then remove the original affected polygon, and replace it by these two
newBBList = innerPolygon : mainPolygon : (delete affectedPolygon $ boundingBoxes g)
--and remove this polyline which has been consumed
newPolyShape = (shape $ polyLine g){points = []}
newPolyLine = (polyLine g){shape = newPolyShape}
newGame = g {boundingBoxes = newBBList, polyLine = newPolyLine}
evaluatePolyEndDiffEdgesCut :: XoGame -> XoEntity -> (XoShape, XoPoint3D) -> XoGame
evaluatePolyEndDiffEdgesCut g newPlayer intersect@(shp@(XoLine p1@(px1, py1, pz1) p2@(px2, py2, pz2)), pc@(pcx, pcy, pcz)) = newGame
where
affectedPolygon = findAffectedPolygon g pc
polyTraceOriginal = points $ shape $ polyLine g
startPolyTrace@(spx, spy, spz) = head $ polyTraceOriginal
endPolyTrace@(epx, epy, epz) = last $ polyTraceOriginal
--find the affected edges at entry and exit
affEdgeEntry@(tlAffEntry@(tlAffEntx, tlAffEnty, tlAffEntz), hdAffEntry@(hdAffEntx, hdAffEnty, hdAffEntz))
= getAffectedEdgeFromPolygon affectedPolygon startPolyTrace ((head.tail) polyTraceOriginal)
affEdgeExit@(tlAffExit@(tlAffExx, tlAffExy, tlAffExz), hdAffExit@(hdAffExx, hdAffExy, hdAffExz))
= getAffectedEdgeFromPolygon affectedPolygon endPolyTrace ((last.init) polyTraceOriginal)
(leftPoly, rightPoly) = findCCWLeftRightPolyLines polyTraceOriginal affEdgeEntry affEdgeExit
--FIXME: Keep in mind same FIXME as note above for SameEdgesCut version
--end FIXME
--now rearrange the original polygon points list, so that entry edge's head is listed first,
--followed in CCW order by other points (including somewhere the exit edge's tail)
--and ending obviously with the entry edge's tail
--i.e. [entry edge head, ............., exit edge tail, exit edge head, ..........., entry edge tail]
(ptsToMove, ptsToStart) = break (==hdAffEntry) (points $ shape affectedPolygon)
rearrangedPolygon = ptsToStart ++ ptsToMove
-- now get the points that go in the new polygon1 and polygon2 in CCW order also
-- this is obtained by breaking on *head* of exit edge (draw pictures if you want to figure out why)
(polygon1Pts, polygon2Pts) = break (==hdAffExit) rearrangedPolygon
(polygon1AllPts, polygon2AllPts) = (polygon1Pts ++ rightPoly, polygon2Pts ++ leftPoly)
polygon1 = affectedPolygon {shape = XoPolygon polygon1AllPts}
polygon2 = affectedPolygon {shape = XoPolygon polygon2AllPts}
-- then remove the original affected polygon, and replace it by these two
newBBList = polygon1 : polygon2 : (delete affectedPolygon $ boundingBoxes g)
--and remove this polyline which has been consumed
newPolyShape = (shape $ polyLine g){points = []}
newPolyLine = (polyLine g){shape = newPolyShape}
newGame = g {boundingBoxes = newBBList, polyLine = newPolyLine}
-- identifies the bounding box the given point lies on, by checking if it lies on any of the edges of the BB
findAffectedPolygon :: XoGame -> XoPoint3D -> XoEntity
findAffectedPolygon g intPt = entityFound
where
entityFound = if (null candidates) then error "PolyLine:Intersect point does not lie on any polygon!"
else if ((length candidates) > 1)
then error "PolyLine: Mutiple polygons contain intersect point!"
else (fst $ head candidates)
candidates = filter (\(bb, tr) -> tr == True) $ zip (boundingBoxes g) $ map (any (\x->x == True))
$ map (map (isPointOnEdge intPt)) $ map (getListOfBoundingBoxEdges) $ boundingBoxes g
isPointOnEdge :: XoPoint3D -> (XoPoint3D, XoPoint3D) -> Bool
isPointOnEdge ip@(ipx, ipy, ipz) ed@(p1@(p1x, p1y, p1z), p2@(p2x, p2y, p2z))
| p1x == p2x && p1x == ipx && (((sort [ipy, p1y, p2y]) !! 1) == ipy) = True
| p1y == p2y && p1y == ipy && (((sort [ipx, p1x, p2x]) !! 1) == ipx) = True
| otherwise = False
--find the edge of the polygon on which the poly start or end point lies
--checkpoint is the other point on the last segment of the poly (other than point of collision pc)
--we need checkpoint, in the case where intersection is at an edge (i.e. 2 edges)...
--we will pick the perpendicular edge
getAffectedEdgeFromPolygon :: XoEntity -> XoPoint3D -> XoPoint3D -> (XoPoint3D, XoPoint3D)
getAffectedEdgeFromPolygon checkPolygon pc checkPoint = affectedEdge
where
affectedEdgeCandidates = filter (\(_, x)-> (x == True)) $ zip (getListOfBoundingBoxEdges checkPolygon)
$ map (isPointOnEdge pc) $ getListOfBoundingBoxEdges checkPolygon
affectedEdge
| (null affectedEdgeCandidates) = error "PolyLine: Oops! Too few affected edges."
| (length affectedEdgeCandidates == 1) = fst $ head affectedEdgeCandidates
| (length affectedEdgeCandidates) == 2 = resolveTwoAffectedEdges pc checkPoint affectedEdgeCandidates
| otherwise = error "PolyLine: Oops! Too many (>2) affected edges."
--pc is point of collision on edge
--cp is check point i.e. other point on the last segment of the poly
resolveTwoAffectedEdges :: XoPoint3D -> XoPoint3D -> [((XoPoint3D, XoPoint3D), Bool)] -> (XoPoint3D, XoPoint3D)
resolveTwoAffectedEdges pc@(pcx, pcy, pcz) cp@(cpx, cpy, cpz) ( ((e1p1@(e1p1x, e1p1y, e1p1z), e1p2@(e1p2x, e1p2y, e1p2z)), _) : ((e2p1@(e2p1x, e2p1y, e2p1z), e2p2@(e2p2x, e2p2y, e2p2z)), _) :[])
| (e1p1x == e1p2x) && (pcy == cpy) = (e1p1, e1p2) -- case where edge is parallel to y and poly is parallel to x
| (e1p1y == e1p2y) && (pcx == cpx) = (e1p1, e1p2) -- case where edge is parallel to x and poly is parallel to y
| (e2p1x == e2p2x) && (pcy == cpy) = (e2p1, e2p2) -- case where edge is parallel to y and poly is parallel to x
| (e2p1y == e2p2y) && (pcx == cpx) = (e2p1, e2p2) -- case where edge is parallel to x and poly is parallel to y
| otherwise = error "PolyLine: Cannot resolve 2 edge candidate case on poly completion!"
resolveTwoAffectedEdges _ _ _ = error "PolyLine: 2 edges expected!"
------------------------------------------------------------------------------------------
-- Utility functions
------------------------------------------------------------------------------------------
-- note that too small cut edges shouldn't arise, because we should be able to manipulate the
-- cut parts of a polygon (when cut previously by a polyLine) so that no super-thin edges remain
tooSmallCutEdge :: XoShape -> Bool
tooSmallCutEdge shp@(XoLine p1@(px1, py1, pz1) p2@(px2, py2, pz2))
| (px1 == px2 && abs(py1 - py2) < polyLineWidth) = True -- super thin edge case
| (py1 == py2 && abs(px1 - px2) < polyLineWidth) = True -- super thin edge case
| (px1 /= px2 && py1 /= py2) = error "Cut edge is non x/y parallel"
| otherwise = False
tooSmallCuttingEdge _ = error "Found non-line shaped cutting edge"
-- this method, given a CCW polyline, splits it into two polylines,
-- one is to the left, and other to the right of the original polyline
-- the one to the left has its points arranged in CCW direction,
-- and the one to the right has its points arranged in a CCW direction also
-- this is so that we can easily splice these polylines to the appropriate polygons
findCCWLeftRightPolyLines :: [XoPoint3D] -> (XoPoint3D, XoPoint3D) -> (XoPoint3D, XoPoint3D) -> ([XoPoint3D], [XoPoint3D])
findCCWLeftRightPolyLines [] _ _ = error "PolyLine: Empty Polyline error!"
findCCWLeftRightPolyLines (p:[]) _ _ = error "PolyLine: Not enough poly line points error!"
findCCWLeftRightPolyLines pseq@(p1@(p1x, p1y, p1z):p2@(p2x, p2y, p2z):ps)
entry@(enttail@(enttx, entty, enttz), entryhead@(enthx, enthy, enthz))
eexit@(extail@(extx, exty, extz), exhead@(exhx, exhy, exhz))
= (leftPoints, rightPoints)
where
headPoly@(hx, hy, hz) = head pseq
lastPoly@(lx, ly, lz) = last pseq
displacementPointsRaw = findCCWPolyLineDisplacementPoints pseq
displacementPointsleftRight@(ls, rs) = (map fst displacementPointsRaw, map snd displacementPointsRaw)
(startLeft, startRight)
| enttx == enthx = if entty < enthy then ((hx, hy - t, hz), (hx, hy + t, hz))
else ((hx, hy + t, hz), (hx, hy - t, hz))
| entty == enthy = if enttx < enthx then ((hx - t, hy, hz), (hx + t, hy, hz))
else ((hx + t, hy, hz), (hx - t, hy, hz))
| otherwise = error "PolyLine: non x/y parallel coordinates!"
(endLeft, endRight)
| extx == exhx = if exty < exhy then ((lx, ly + t, lz), (lx, ly - t, lz))
else ((lx, ly - t, lz), (lx, ly + t, lz))
| exty == exhy = if extx < exhx then ((lx + t, ly, lz), (lx - t, ly, lz))
else ((lx - t, ly, lz), (lx + t, ly, lz))
| otherwise = error "PolyLine: non x/y parallel coordinates!"
(leftPoints, rightPoints) = (startLeft:ls ++ [endLeft], reverse (startRight:rs ++ [endRight]))
t = polyLineWidth `div` 2
--this method takes the polyLine points (in CCW direction) as input
--it disregards the very first and the very last point in the input
--it gives us a pair for every other polyline point
--this pair is a pair of the point to the (left, right) of the original polyLine point,
--if we walked the poly line in a CCW direction from start to end
--the left ones will be CCW when read in order
--the right ones will be CW when read in order
findCCWPolyLineDisplacementPoints :: [(XoPoint3D)] -> [(XoPoint3D, XoPoint3D)]
findCCWPolyLineDisplacementPoints (p1@(p1x, p1y, p1z):p2@(p2x, p2y, p2z):p3@(p3x, p3y, p3z):ps) = leftRightPair : findCCWPolyLineDisplacementPoints (p2:p3:ps)
where
leftRightPair
| horRightLeftTurn = ((p2x - t , p2y + t, p2z), (p2x + t, p2y - t, p2z))
| horRightRightTurn = ((p2x + t , p2y + t, p2z), (p2x - t, p2y - t, p2z))
| horLeftRightTurn = ((p2x - t , p2y - t, p2z), (p2x + t, p2y + t, p2z))
| horLeftLeftTurn = ((p2x + t , p2y - t, p2z), (p2x - t, p2y + t, p2z))
| verUpRightTurn = ((p2x - t , p2y + t, p2z), (p2x + t, p2y - t, p2z))
| verUpLeftTurn = ((p2x - t , p2y - t, p2z), (p2x + t, p2y + t, p2z))
| verBottomLeftTurn = ((p2x + t , p2y + t, p2z), (p2x - t, p2y - t, p2z))
| verBottomRightTurn = ((p2x + t , p2y - t, p2z), (p2x - t, p2y + t, p2z))
| otherwise = error "PolyLine: Unknown turn orientation!"
horRightLeftTurn = (p1y == p2y) && (p1x < p2x) && (p2y < p3y)
horRightRightTurn = (p1y == p2y) && (p1x < p2x) && (p2y > p3y)
horLeftRightTurn = (p1y == p2y) && (p1x > p2x) && (p2y < p3y)
horLeftLeftTurn = (p1y == p2y) && (p1x > p2x) && (p2y > p3y)
verUpRightTurn = (p1x == p2x) && (p1y < p2y) && (p2x < p3x)
verUpLeftTurn = (p1x == p2x) && (p1y < p2y) && (p2x > p3x)
verBottomLeftTurn = (p1x == p2x) && (p1y > p2y) && (p2x < p3x)
verBottomRightTurn = (p1x == p2x) && (p1y > p2y) && (p2x > p3x)
t = polyLineWidth `div` 2
findCCWPolyLineDisplacementPoints _ = []
-- TODO: What to do in the following scenarios?
-- 1. Polyline trace is such that player is going parallel and along a BB edge?
-- *We might want to perturb the player such that it is just outside or inside the
-- polygon (preferably outside).
-- 2. A key is pressed, and it turns out that between the last player position,
-- and the key press, we had crossed into a BB. Do we have to do the simulation
-- by limiting the frameTime to entryTime, but still accomodate the turn later?
-- *I guess we can start polyLine, then simulate and get new game state
-- then start polyLine and turn it as desired, then simulate the new game state
-- and just display this last one. What if the first game state ends the game?
-- 3. polyline starts cutting through a very thin and long polygon, parallel to the long edge but in
-- the middle of the polygon thickness (very thin), and the outsides of the polyline fall
-- outside the polygon itself!?
-- *we should really be able to erase the polygon as the player moves!
-- 4. what if we are about to exit a polygon but turn at the last moment?
-- *record the last turn point as just enough away from the edge
|
cbrghostrider/Xonix
|
GamePlay/XoPolyLine.hs
|
mit
| 17,420
| 72
| 15
| 3,590
| 4,383
| 2,494
| 1,889
| 152
| 5
|
-- The MIT License (MIT)
--
-- Copyright (c) 2013 Jonathan McCluskey
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
-- all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
-- The agile project management program is intended to allow developers to have
-- a command-line interface to a text-based agile storage database. This database
-- can be initialized and stored along with the source and version controlled
-- along with the source
import System.Environment
import System.Directory
import System.IO
import System.IO.Error
import Data.List
--
-- The main function
--
main = argumentsParser `catch` exceptionHandler
--
-- Command-line argument parser
--
argumentsParser :: IO ()
argumentsParser = do
(command:args) <- getArgs
handleCommand command args
--
-- Handler for the first command-line argument
--
handleCommand :: String -> [String] -> IO ()
handleCommand command args
| command == "init" = initialize
| command == "status" = status
| command == "roadmap" = roadmap args
| command == "feature" = feature args
| command == "story" = story args
| command == "task" = task args
| otherwise = usage
--
-- Initializes the text-based agile storage database
--
initialize :: IO ()
initialize = do
createDirectoryIfMissing False ".agile"
appendFile ".agile/roadmap" ("")
appendFile ".agile/features" ("")
appendFile ".agile/stories" ("")
appendFile ".agile/tasks" ("")
putStrLn "Initialized."
--
-- Displays the status of the project
--
status :: IO ()
status = do
putStrLn "Status does nothing yet."
--
-- Allows for adding, removing, and showing roadmap items
--
roadmap :: [String] -> IO ()
roadmap args = do
putStrLn "Roadmap does nothing yet."
--
-- Allows for adding, removing, and showing features
--
feature :: [String] -> IO ()
feature args = do
putStrLn "Feature does nothing yet."
--
-- Allows for adding, removing, and showing stories
--
story :: [String] -> IO ()
story args = do
putStrLn "Story does nothing yet."
--
-- Allows for adding, removing, and showing tasks
--
task :: [String] -> IO ()
task [] = usage
task (arg:args)
| arg == "add" = taskAdd args
| arg == "rm" = taskRm args
| arg == "show" = taskShow
| otherwise = usage
--
-- Adds a task to the agile storage database
--
taskAdd :: [String] -> IO ()
taskAdd [] = usage
taskAdd [todoItem]= do
appendFile ".agile/tasks" (todoItem ++ "\n")
putStrLn "Task added."
--
-- Removes a task to the agile storage database
--
taskRm :: [String] -> IO ()
taskRm [] = usage
taskRm [numberString] = do
handle <- openFile ".agile/tasks" ReadMode
(tempName, tempHandle) <- openTempFile "." "temp"
contents <- hGetContents handle
let number = read numberString
todoTasks = lines contents
newTodoItems = delete (todoTasks !! number) todoTasks
hPutStr tempHandle $ unlines newTodoItems
hClose handle
hClose tempHandle
removeFile ".agile/tasks"
renameFile tempName ".agile/tasks"
putStrLn "Task removed."
--
-- Shows the tasks in the agile storage database
--
taskShow :: IO ()
taskShow = do
contents <- readFile ".agile/tasks"
let todoTasks = lines contents
numberedTasks = zipWith (\n line -> show n ++ " - " ++ line) [0..] todoTasks
putStr $ unlines numberedTasks
--
-- Exception Handler
--
exceptionHandler :: IOError -> IO ()
exceptionHandler e = usage
--
-- Displays the usage statement
--
usage :: IO ()
usage = do
putStrLn "USAGE: agile command [options]"
putStrLn " command:"
putStrLn " init"
putStrLn " status"
putStrLn " roadmap"
putStrLn " feature"
putStrLn " story"
putStrLn " task"
putStrLn " options:"
putStrLn " add [map=1,2,3] [date=YYYY/MM/DD] [phase=[new|planned|current|impeded|closed]] [title=\"Title\"] [body=\"Body\"] "
putStrLn " update [number] [map=1,2,3] [map+=4] [map-=2] [date=YYYY/MM/DD] [phase=[new|planned|current|impeded|closed]] [title=\"Title\"] [body=\"Body\"] "
putStrLn " rm [number]"
putStrLn " show"
putStrLn ""
|
gitjonathan/agile
|
agile.hs
|
mit
| 5,182
| 0
| 15
| 1,153
| 972
| 486
| 486
| 90
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Common where
import Data.Int
import Data.Char
deltaEncode :: [Int64] -> [Int64]
deltaEncode a = head a : zipWith (-) (tail a) a
deltaDecode :: [Int64] -> [Int64]
deltaDecode = scanl1 (+)
parseTerm' :: String -> String
parseTerm' = do
let filterAlphas = map toUpper . filter (\x -> isAlphaNum x || isSpace x)
let removeSpaces = filter isAlphaNum
let filterWords = unwords . filter ("NEAR" /=) . filter ("IN" /=) . words
removeSpaces . filterWords . filterAlphas
parseRootTerm :: String -> String
parseRootTerm = take 3 . parseTerm'
|
ederoyd46/GeoIndex
|
src/Common.hs
|
mit
| 595
| 1
| 15
| 115
| 211
| 111
| 100
| 16
| 1
|
{-# LANGUAGE LambdaCase, GeneralizedNewtypeDeriving, FlexibleContexts #-}
module HsToCoq.Coq.Gallina.UseTypeInBinders (
useTypeInBinders, useTypeInBinders', UTIBError(..), UTIBIsTypeTooShort(..),
-- ** Monadic version that doesn't consolidate identical typed binders
useTypeInBindersM
) where
import Control.Lens
import Data.Bifunctor
import Data.Foldable
import Data.Maybe
import Data.List.NonEmpty (NonEmpty(..))
import HsToCoq.Util.Function
import Control.Monad.Except
import Control.Monad.State
import HsToCoq.Coq.Gallina
import HsToCoq.Coq.Gallina.Util
newtype UTIBIsTypeTooShort = UTIBIsTypeTooShort { utibIsTypeTooShort :: Bool }
deriving (Eq, Ord, Enum, Bounded, Show, Read)
data UTIBError = UTIBMismatchedGeneralizability
| UTIBMismatchedExplicitness
| UTIBMismatchedBoth
deriving (Eq, Ord, Enum, Bounded, Show, Read)
-- Module-local
drain_binder :: MonadState Term m => m (Maybe BinderInfo)
drain_binder = gets unconsOneBinderFromType >>= \case
Just (bi, t) -> Just bi <$ put t
Nothing -> pure Nothing
-- Module-local
binder_match_errors :: Binder -> BinderInfo -> Maybe UTIBError
binder_match_errors b bi
| badGeneralizability && badExplicitness = Just UTIBMismatchedBoth
| badGeneralizability = Just UTIBMismatchedGeneralizability
| badExplicitness = Just UTIBMismatchedExplicitness
| otherwise = Nothing
where
badGeneralizability = binderGeneralizability b /= _biGeneralizability bi
badExplicitness = binderExplicitness b /= _biExplicitness bi
useTypeInBindersM :: (MonadError UTIBError m, MonadState Term m) => Binders -> m (Binders, UTIBIsTypeTooShort)
useTypeInBindersM (b :| bs) = drain_binder >>= \case
Nothing -> pure (b :| bs, UTIBIsTypeTooShort True)
Just bi@(BinderInfo g ei _ mtyp) -> do
traverse_ throwError $ binder_match_errors b bi
let newBinderNamed x = case mtyp of
Just typ -> Typed g ei (x :| []) typ
Nothing -> mkBinder ei x -- Without a type, we can't be in the 'Generalizable' case
newNamelessBinder = case mtyp of
Just typ -> Generalized ei typ
Nothing -> error "INTERNAL ERROR: all generalized binders should have a concrete type"
-- We know that any 'Generalizable' 'Binder's have an actual type, not 'Nothing'
continue b' mb'' = first (b' :|) <$> useTypeInBindersML (maybeToList mb'' ++ bs)
case caseOneBinder b of
Binder_ _ _ x _ mb' -> continue (newBinderNamed x) mb'
Generalized_{} -> continue newNamelessBinder Nothing
where
useTypeInBindersML [] = pure ([], UTIBIsTypeTooShort False)
useTypeInBindersML (b : bs) = first toList <$> useTypeInBindersM (b :| bs)
useTypeInBinders :: Term -> Binders -> Either UTIBError (Term, Binders, UTIBIsTypeTooShort)
useTypeInBinders ty bs = (_2 %~ consolidateTypedBinders) . munge <$> runStateT (useTypeInBindersM bs) ty
where munge ((bs', short), ty') = (ty', bs', short)
useTypeInBinders' :: Term -> Binders -> Either UTIBError (Term, Binders)
useTypeInBinders' = fmap (\(ty,bs,_) -> (ty,bs)) .: useTypeInBinders
|
antalsz/hs-to-coq
|
src/lib/HsToCoq/Coq/Gallina/UseTypeInBinders.hs
|
mit
| 3,257
| 0
| 20
| 740
| 863
| 454
| 409
| 54
| 6
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
module FVL.EvalAST
( Expr(..)
, RVal(..)
, valueTransform
, showCons
, evalTransform
) where
import FVL.Algebra
import qualified FVL.FAST as FAST
data Expr a b
= CInt Integer
| CBool Bool
| CVar String
| Add b b
| Sub b b
| Mul b b
| Div b b
| And b b
| Or b b
| Not b
| Equal b b
| Less b b
| Empty
| Cons b b
| If b a a
| Function String a
| Appl b a
| LetRec String String a a
| Case b a String String a
deriving Functor
showCons' :: LazyFix Expr -> [LazyFix Expr]
showCons' (Fx' (x `Cons` y)) = x : showCons' y
showCons' e = [e]
showCons :: LazyFix Expr -> String
showCons e = "[" ++ (foldr combine "\b\b" (showCons' e)) ++ "]"
where combine (Fx' Empty) b = b
combine a b = show a ++ ", " ++ b
instance Show (LazyFix Expr) where
show (Fx' (CInt n)) = show n
show (Fx' (CBool b)) = show b
show (Fx' (CVar s)) = s
show (Fx' (x `Add` y)) = show x ++ " + " ++ show y
show (Fx' (x `Sub` y)) = show x ++ " - " ++ show y
show (Fx' (x `Mul` y)) = show x ++ " * " ++ show y
show (Fx' (x `Div` y)) = show x ++ " / " ++ show y
show (Fx' (x `And` y)) = show x ++ " && " ++ show y
show (Fx' (x `Or` y)) = show x ++ " || " ++ show y
show (Fx' (Not x)) = "!" ++ (case x of
(Fx' (CBool b)) -> show b
(Fx' (CVar s)) -> s
_ -> "(" ++ show x ++ ")")
show (Fx' (x `Equal` y)) = show x ++ " = " ++ show y
show (Fx' (x `Less` y)) = show x ++ " < " ++ show y
show (Fx' Empty) = "[]"
show (Fx' (x `Cons` y)) = showCons . Fx' $ x `Cons` y
show (Fx' (If p x y)) = "If " ++ show p ++ " Then " ++ show x ++ " Else " ++ show y
show (Fx' (Function x p)) = "Function " ++ x ++ " -> " ++ show p
show (Fx' (Appl f x)) = (case f of
(Fx' (CInt n)) -> show n ++ " "
(Fx' (CBool b)) -> show b ++ " "
(Fx' (CVar s)) -> s ++ " "
(Fx' (Appl _ _)) -> show f ++ " "
_ -> "(" ++ show f ++ ") ") ++ (case x of
(Fx' (CInt n)) -> show n
(Fx' (CBool b)) -> show b
(Fx' (CVar s)) -> s
(Fx' (Appl _ _)) -> show x
_ -> "(" ++ show x ++ ")")
show (Fx' (LetRec f x p e))
= "Let Rec " ++ f ++ " " ++ x ++ " = " ++ show p ++ " In " ++ show e
show (Fx' (Case p x s t y)) = "Case " ++ show x ++ " Of [] -> " ++ show x
++ " | (" ++ s ++ ", " ++ t ++ ") -> " ++ show y
data RVal = RInt Integer
| RBool Bool
| RFunction String (LazyFix Expr)
| REmpty
| RCons RVal RVal
valueTransform :: RVal -> LazyFix Expr
valueTransform (RInt n) = Fx' $ CInt n
valueTransform (RBool b) = Fx' $ CBool b
valueTransform (RFunction s p) = Fx' $ Function s p
valueTransform REmpty = Fx' $ Empty
valueTransform (RCons x y) = Fx' $ Cons (valueTransform x) (valueTransform y)
instance Show RVal where
show = show . valueTransform
alg :: Algebra FAST.Expr (LazyFix Expr)
alg (FAST.CInt n) = Fx' $ CInt n
alg (FAST.CBool b) = Fx' $ CBool b
alg (FAST.CVar s) = Fx' $ CVar s
alg (FAST.Add x y) = Fx' $ Add x y
alg (FAST.Sub x y) = Fx' $ Sub x y
alg (FAST.Mul x y) = Fx' $ Mul x y
alg (FAST.Div x y) = Fx' $ Div x y
alg (FAST.And x y) = Fx' $ And x y
alg (FAST.Or x y) = Fx' $ Or x y
alg (FAST.Not x) = Fx' $ Not x
alg (FAST.Equal x y) = Fx' $ Equal x y
alg (FAST.Less x y) = Fx' $ Less x y
alg (FAST.Empty) = Fx' $ Empty
alg (FAST.Cons x y) = Fx' $ Cons x y
alg (FAST.If p x y) = Fx' $ If p x y
alg (FAST.Function s p) = Fx' $ Function s p
alg (FAST.Appl f x) = Fx' $ Appl f x
alg (FAST.LetRec f x p e) = Fx' $ LetRec f x p e
alg (FAST.Case p x s t y) = Fx' $ Case p x s t y
evalTransform :: Fix FAST.Expr -> LazyFix Expr
evalTransform = cata alg
|
burz/Feval
|
FVL/EvalAST.hs
|
mit
| 3,798
| 0
| 15
| 1,217
| 2,059
| 1,040
| 1,019
| 108
| 2
|
-- lineage
-- By Gregory W. Schwartz
-- | Built-in
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Sequence as Seq
-- | Cabal
import Options.Applicative
import Data.Fasta.String
import Data.Aeson
-- | Local
import Types
import Utility
import Tree
-- Command line arguments
data Options = Options { input :: String
, expandFlag :: Bool
, copyFlag :: Bool
, aaFlag :: Bool
, haskellFlag :: Bool
, inputCopyField :: Int
, output :: String
}
-- Command line options
options :: Parser Options
options = Options
<$> strOption
( long "input"
<> short 'i'
<> metavar "FILE"
<> value ""
<> help "The input fasta file, where the first entry is the root" )
<*> switch
( long "expand-tree"
<> short 'e'
<> help "Whether to output the expanded tree with no collapsing of\
\ each mutation node" )
<*> switch
( long "copy-number"
<> short 'c'
<> help "Whether to take copy number into account for the mutations" )
<*> switch
( long "amino-acids"
<> short 'a'
<> help "Whether the sequences are DNA or proteins" )
<*> switch
( long "haskell"
<> short 'H'
<> help "Whether to print the output as a haskell type" )
<*> option auto
( long "input-copy-field"
<> short 'C'
<> value 1
<> metavar "INT"
<> help "The field (1 indexed) in the header\
\ which contains the copy number" )
<*> strOption
( long "output"
<> short 'o'
<> metavar "FILE"
<> value ""
<> help "The output file containing the json tree" )
sharedTree :: Options -> IO ()
sharedTree opts = do
contents <- if (null . input $ opts)
then getContents
else readFile . input $ opts
let copyBool = copyFlag opts
copyIdx = inputCopyField opts
completeFastaList = parseFasta contents
root = toEmptySuperFasta . head $ completeFastaList
fastaList = map ( assignMutations (aaFlag opts) root
. fastaToSuperFasta copyBool copyIdx)
. tail
$ completeFastaList
tree = createTree Nothing
(Seq.fromList . superFastaSeq $ root)
fastaList
finalTree = if expandFlag opts
then tree
else collapseTree [] tree
-- Output results to stdin or file
if (null . output $ opts)
then
if haskellFlag opts
then putStrLn . show $ finalTree
else B.putStrLn . encode $ finalTree
else
if haskellFlag opts
then writeFile (output opts) . show $ finalTree
else B.writeFile (output opts) . encode $ finalTree
main :: IO ()
main = execParser opts >>= sharedTree
where
opts = info (helper <*> options)
( fullDesc
<> progDesc "Create the lineage tree from a fasta file using shared\
\ mutations"
<> header "lineage, Gregory W. Schwartz" )
|
GregorySchwartz/lineage
|
src/Main.hs
|
gpl-2.0
| 3,510
| 0
| 18
| 1,471
| 689
| 353
| 336
| 83
| 6
|
module Properties ( testProperties ) where
import Test.Framework
import Properties.CHK
import Properties.Statistics
import Properties.Types
testProperties :: [Test]
testProperties =
[ testGroup "CHK" chkTests
, testGroup "stats" statsTests
, testGroup "types" typeTests
]
|
waldheinz/ads
|
src/tests/Properties.hs
|
gpl-3.0
| 287
| 0
| 6
| 47
| 66
| 38
| 28
| 10
| 1
|
module Problem067 (answer) where
import qualified Problem018 as P18
answer :: IO Int
answer = do
content <- readFile "./data/67.txt"
let triangle = readTriangle content
return $ P18.shortestPath 1 1 triangle
readTriangle :: String -> [[Int]]
readTriangle content = map (map read . words ) (lines content)
|
geekingfrog/project-euler
|
Problem067.hs
|
gpl-3.0
| 313
| 0
| 10
| 55
| 110
| 57
| 53
| 9
| 1
|
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module Y2015.Q.BTest where
import Control.Parallel.Strategies
import Test.Framework
import Y2015.Q.B
runsolve = unlines . showSolutions . parMap rdeepseq (show . solve) . parse . tail . lines
where showSolutions = zipWith (++) ["Case #" ++ show i ++ ": " | i <- [1::Int ..]]
test_example :: IO ()
test_example = assertEqual example_solution $ runsolve example
example = "3\n\
\1\n\
\3\n\
\4\n\
\1 2 1 2\n\
\1\n\
\4"
example_solution = "Case #1: 3\n\
\Case #2: 2\n\
\Case #3: 3\n"
test_case0 :: IO ()
test_case0 = assertEqual (Solution 1) $ solve (Problem [1])
test_case1 :: IO ()
test_case1 = assertEqual (Solution 6) $ solve (Problem [3,8,3,8])
test_case2 :: IO ()
test_case2 = assertEqual (Solution 5) $ solve (Problem [3,3,3,9])
test_case3 :: IO ()
test_case3 = assertEqual (Solution 99) $ solve (Problem [1..99])
test_case_max_small :: IO ()
test_case_max_small = assertEqual (Solution 9) $ solve (Problem $ replicate 6 9)
test_case_max_small2 :: IO ()
test_case_max_small2 = assertEqual (Solution 9) $ solve (Problem $ replicate 6 8 ++ [9])
test_case_max_small3 :: IO ()
test_case_max_small3 = assertEqual (Solution 5) $ solve (Problem [9,3])
test_case_max_small4 :: IO ()
test_case_max_small4 = assertEqual (Solution 6) $ solve (Problem [9,5])
test_case_max_small5 :: IO ()
test_case_max_small5 = assertEqual (Solution 7) $ solve (Problem [9,5,9])
test_case_max_small6 :: IO ()
test_case_max_small6 = assertEqual (Solution 8) $ solve (Problem [9,5,9,9])
test_case_max_large :: IO ()
test_case_max_large = assertEqual (Solution 1000) $ solve (Problem $ replicate 1000 1000)
test_case_min_large :: IO ()
test_case_min_large = assertEqual (Solution 88) $ solve (Problem $ replicate 2 1000)
{-# ANN module ("hlint:ignore Use camelCase"::String) #-}
|
joranvar/GoogleCodeJam
|
Y2015/Q/BTest.hs
|
gpl-3.0
| 1,797
| 0
| 11
| 272
| 681
| 354
| 327
| 36
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Numerical.PETSc.Internal.PutGet
-- Copyright : (c) Marco Zocca 2015
-- License : LGPL3
-- Maintainer : zocca . marco . gmail . com
-- Stability : experimental
--
-- | Mid-level interface: catching exceptions and hiding pointers in lexical
-- scope of `bracket`s
--
-----------------------------------------------------------------------------
module Numerical.PETSc.Internal.PutGet
( logViewStdout,
module X ) where
import Numerical.PETSc.Internal.PutGet.IS as X
import Numerical.PETSc.Internal.PutGet.SF as X
import Numerical.PETSc.Internal.PutGet.Vec as X
import Numerical.PETSc.Internal.PutGet.Mat as X
import Numerical.PETSc.Internal.PutGet.DM as X
import Numerical.PETSc.Internal.PutGet.KSP as X
import Numerical.PETSc.Internal.PutGet.PC as X
import Numerical.PETSc.Internal.PutGet.SNES as X
import Numerical.PETSc.Internal.PutGet.TS as X
import Numerical.PETSc.Internal.PutGet.TAO as X
import Numerical.PETSc.Internal.PutGet.Viewer as X
import Numerical.PETSc.Internal.PutGet.PetscMisc as X
import Numerical.PETSc.Internal.PutGet.EPS as X
import Numerical.PETSc.Internal.PutGet.SVD as X
import Numerical.PETSc.Internal.PutGet.SlepcMisc as X
import Numerical.PETSc.Internal.Types as X
-- | logging
logViewStdout :: Comm -> IO ()
logViewStdout cc = do
petscLogDefaultBegin
withPetscViewStdout cc petscLogView
|
ocramz/petsc-hs
|
src/Numerical/PETSc/Internal/PutGet.hs
|
gpl-3.0
| 1,462
| 0
| 7
| 193
| 222
| 167
| 55
| 23
| 1
|
import Data.Char
import Data.List
import System.IO
import System.Environment
import Algebra
import Chemistry.XYZ
import System.Console.CmdArgs
import Ch_Geom2Molc_Opts
{- ############################################################## -}
{- "main" module, handling the IO and calling all other Functions -}
{- ############################################################## -}
main = do
-- get input file, output file and list of atoms from command line arguments
arguments <- cmdArgs ch_Geom2Molc_Opts
-- bind input from command line to useful variables
let inputFile = input arguments
outputFile = output arguments
-- reading the xyz file
geomHandle <- openFile inputFile ReadMode
geomRawContents <- hGetContents geomHandle
let -- get the elements from the original file and the number of atoms
moleculeElements = [getElement geomRawContents a | a <- [1..(nAtoms geomRawContents)]]
moleculeAtomNumber = nAtoms geomRawContents
-- convert the atom coordinates to a list of float lists
moleculeGeometryOrig = [getCoordFromAtom geomRawContents a | a <- [1..(nAtoms geomRawContents)]]
-- zip the list and sort it elementwise
moleculeZippedGeometrySorted = sort (zip moleculeElements moleculeGeometryOrig)
-- now unzip again and manipulate the elemts to get a number
moleculeSortedElements = fst (unzip moleculeZippedGeometrySorted)
moleculeSortedCoordinates = snd (unzip moleculeZippedGeometrySorted)
-- intermediately make sublists where same elements are grouped
groupedSortedElements = group moleculeSortedElements
-- count the length of each sublist
numberSameElementAppears = [length (groupedSortedElements!!a) | a <- [0..(length groupedSortedElements - 1)]]
-- use the number of each element to create a list that will be zipped with the elements
numberingSameElementsList = [[1..(numberSameElementAppears!!a)] | a <- [0..(length groupedSortedElements - 1)]]
-- zip together the grouped elements and their numbers in a few steps
numberedElements_1 = zip groupedSortedElements [map show (numberingSameElementsList!!a) | a <- [0..(length groupedSortedElements -1)]]
numberedElements_2 = [zip (fst (numberedElements_1!!a)) (snd (numberedElements_1!!a)) | a <- [0..(length numberedElements_1 - 1)]]
numberedElements_3 = concat numberedElements_2
-- zip numbered elements together with coordinates
moleculeSortedGeometry = zip numberedElements_3 moleculeSortedCoordinates
if (outputFile == "stdout")
then do
printOrderedXYZs stdout moleculeSortedGeometry
else do
outputHandle <- openFile outputFile WriteMode
printOrderedXYZs outputHandle moleculeSortedGeometry
hClose outputHandle
{- ############################## -}
{- Functions used in this program -}
{- ############################## -}
printOrderedXYZs :: Handle -> [((String, String), [Float])] -> IO()
printOrderedXYZs file [] = return ()
printOrderedXYZs file (((a,b),(x:y:z:_)):[]) = hPutStrLn file (a ++ b ++ " " ++ show x ++ " " ++ show y ++ " " ++ show z ++ " angstrom")
printOrderedXYZs file (a:at) = if ( (fst (fst a)) == (fst (fst (at!!0))))
then do printOrderedXYZs file [a]
printOrderedXYZs file at
else do printOrderedXYZs file [a]
hPutStr file "\n"
printOrderedXYZs file at
|
sheepforce/Haskell-Tools
|
ch_geom2molc/Ch_Geom2Molc.hs
|
gpl-3.0
| 3,394
| 6
| 17
| 651
| 764
| 401
| 363
| 44
| 2
|
module Handler.DownloadSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getDownloadR" $ do
error "Spec not implemented: getDownloadR"
|
ackao/APRICoT
|
web/conference-management-system/test/Handler/DownloadSpec.hs
|
gpl-3.0
| 180
| 0
| 11
| 39
| 44
| 23
| 21
| 6
| 1
|
module HMbo.ManyBodyOperatorSpec (spec) where
import Test.Hspec
import Control.Exception (evaluate)
import HMbo
import Data.Maybe
import qualified Data.Vector.Unboxed as VU
import Data.Complex (conjugate, realPart)
import Data.Monoid
matrixElement :: Ket -> ManyBodyOperator -> Ket -> Amplitude
matrixElement psi a phi = VU.foldl1 (+) $ VU.zipWith (*) psi' aPhi
where
aPhi = a `apply` phi
psi' = VU.map conjugate psi
basisState :: Int -> Int -> Ket
basisState d i = VU.fromList [kroneckerDelta i j | j <- [0..(d - 1)]]
where
kroneckerDelta m n | m == n = 1.0
| otherwise = 0.0
aij :: ManyBodyOperator -> Int -> Int -> Amplitude
aij a i j = matrixElement (basisState d i) a (basisState d j)
where
d = fromDim $ getDim a
isClose :: Double -> Amplitude -> Amplitude -> Bool
isClose tol a b = (realPart $ abs (a - b)) < tol
defTol :: Double
defTol = 1.0e-12
isHermitian :: Double -> ManyBodyOperator -> Bool
isHermitian tol a =
and [isClose tol (aij a i j) (conjugate (aij a j i))
| i <- [0..(d-1)], j <- [0..i]]
where
d = fromDim $ getDim a
isDiagonal :: Double -> ManyBodyOperator -> Bool
isDiagonal tol a =
and [i == j || isClose tol 0.0 (aij a i j)
| i <- [0..(d - 1)], j <- [0..(d - 1)]]
where
d = fromDim $ getDim a
isCloseMat :: Double -> ManyBodyOperator -> ManyBodyOperator -> Bool
isCloseMat tol a b =
and [isClose tol (aij a i j) (aij b i j) | i <- [0..d], j <- [0..d]]
where
d = fromDim $ getDim a
spec :: Spec
spec = do
describe "Null Matrix" $ do
it "Can be built from a dimension." $
getDim (zero (fromJust $ toDim 2)) `shouldBe` fromJust (toDim 2)
it "Does not change dimension when scaled." $
getDim (scale 3.7 $ zero (fromJust (toDim 2)))
`shouldBe` fromJust (toDim 2)
describe "Identity Matrix" $ do
it "Can be built from a dimension." $
getDim (eye (fromJust (toDim 2))) `shouldBe` fromJust (toDim 2)
it "Does not change dimension when scaled." $
getDim (scale 3.7 $ eye (fromJust (toDim 2)))
`shouldBe` fromJust (toDim 2)
describe "ketBra" $ do
it "Can be constructed from valid indices." $ do
ketBra (fromJust $ toDim 2) 0 0 1.0 `shouldSatisfy` isJust
ketBra (fromJust $ toDim 2) 0 1 1.0 `shouldSatisfy` isJust
it "Cannot be constructed from negative indices." $ do
ketBra (fromJust $ toDim 2) (-1) 0 1.0 `shouldBe` Nothing
ketBra (fromJust $ toDim 2) 0 (-1) 1.0 `shouldBe` Nothing
it "Cannot be constructed from indices that are too large." $
ketBra (fromJust (toDim 2)) 2 0 1.0 `shouldBe` Nothing
describe "kron" $ do
it "Multiplies dimensions." $
getDim (
kron (eye (fromJust (toDim 3)))
(eye (fromJust $ toDim 3))) `shouldBe` fromJust (toDim 9)
it "Results in ScaledId when both of the operators are identites." $
kron
(eye (fromJust (toDim 3)))
(eye (fromJust (toDim 3)))
`shouldBe` eye (fromJust (toDim 9))
it
"Produces Hermitian operators when applied to Hermitian operators." $
do
kron sigmaX sigmaY `shouldSatisfy` isHermitian defTol
kron sigmaX (eye (fromJust $ toDim 4)) `shouldSatisfy`
isHermitian defTol
it
"Produces a diagonal operator when supplied with diagonal operators." $
do
kron sigmaZ sigmaZ `shouldSatisfy` isDiagonal defTol
kron (eye 3) sigmaZ `shouldSatisfy` isDiagonal defTol
kron sigmaZ (eye 3) `shouldSatisfy` isDiagonal defTol
describe "The Kronecker product Monoid" $ do
it "Has an identity of dimension 1." $
getDim (mempty :: ManyBodyOperator) `shouldBe` 1
it "Gives the same result as kron." $
sigmaX <> sigmaY `shouldSatisfy`
isCloseMat defTol (sigmaX `kron` sigmaY)
describe "add" $ do
it "Results in error if dimensions don't match." $ do
evaluate (add
(eye (fromJust (toDim 2)))
(eye (fromJust (toDim 3))))
`shouldThrow` errorCall "add: Dimensions don't match."
it "Adds matrix entries." $ do
let op1 = sigmaZ `kron` (eye 2)
let op2 = sigmaX `kron` sigmaY
let theSum = op1 `add` op2
aij theSum 0 1 `shouldSatisfy`
isClose defTol ((aij op1 0 1) + (aij op2 0 1))
aij theSum 2 1 `shouldSatisfy`
isClose defTol ((aij op1 2 1) + (aij op2 2 1))
aij theSum 0 0 `shouldSatisfy`
isClose defTol ((aij op1 0 0) + (aij op2 0 0))
describe "apply" $ do
it "Returns nothing when dimensions don't match." $ do
let v = VU.fromList [1.3, 3.4] :: Ket
let d = fromJust $ toDim 3
evaluate (apply (eye d) v) `shouldThrow`
errorCall "nApply: operator dimension doesn't match vector dimensions"
describe "Identity" $
it "Returns vectors unchanged." $ do
let v = VU.fromList [1.3, 3.4] :: Ket
let d = fromJust $ toDim 2
apply (eye d) v `shouldBe` v
describe "Zero operator" $
it "Returns 0." $ do
let v = VU.fromList [1.3, 3.4] :: Ket
let w = VU.replicate 2 0 :: Ket
let d = fromJust $ toDim 2
apply (zero d) v `shouldBe` w
describe "transpose" $ do
it "Returns the same vector when transposed with pivot 1." $ do
let v = VU.fromList [1.3, 3.4] :: Ket
transpose 1 v `shouldBe` v
it "Returns the same vector when transposed with pivot (length v)." $ do
let v = VU.fromList [1.3, 3.4] :: Ket
transpose (VU.length v) v `shouldBe` v
it "Works for a typical case" $ do
let v = VU.fromList [1, 2, 3, 4] :: Ket
let w = VU.fromList [1, 3, 2, 4] :: Ket
transpose 2 v `shouldBe` w
describe "sigmaZ" $ do
it "Has no off-diagonal elements." $
sigmaZ `shouldSatisfy` isDiagonal defTol
it "Considers the 0 state as spin down." $ do
aij sigmaZ 0 0 `shouldSatisfy` isClose defTol (-1.0)
it "Considers the 1 state as spin up." $ do
aij sigmaZ 1 1 `shouldSatisfy` isClose defTol (1.0)
describe "sigmaX" $ do
it "Has no diagonal elements." $ do
aij sigmaX 0 0 `shouldSatisfy` isClose defTol 0.0
aij sigmaX 1 1 `shouldSatisfy` isClose defTol 0.0
it "Is Hermitian." $
sigmaX `shouldSatisfy` isHermitian defTol
it "Is equal to the sum of sigmaPlus and sigmaMinus" $
sigmaX `shouldSatisfy`
isCloseMat defTol (sigmaPlus `add` sigmaMinus)
describe "sigmaY" $ do
it "Has no diagonal elements." $ do
aij sigmaY 0 0 `shouldSatisfy` isClose defTol 0.0
aij sigmaY 1 1 `shouldSatisfy` isClose defTol 0.0
it "Is Hermitian." $
sigmaY `shouldSatisfy` isHermitian defTol
describe "sigmaPlus" $ do
it "Is equal to zero when applied twice." $ do
let two = fromJust (toDim 2)
(sigmaPlus `mul` sigmaPlus) `shouldSatisfy`
isCloseMat defTol (zero two)
describe "sigmaMinus" $ do
it "Is equal to zero when applied twice." $ do
let two = fromJust (toDim 2)
(sigmaMinus `mul` sigmaMinus) `shouldSatisfy`
isCloseMat defTol (zero two)
it "Returns sigmaZ when commuted with sigmaMinus." $
((sigmaPlus `mul` sigmaMinus) `add`
(scale (-1.0) $ sigmaMinus `mul` sigmaPlus)) `shouldSatisfy`
isCloseMat defTol sigmaZ
describe "numberOperator" $ do
it "Is diagonal." $ do
let d = fromJust $ toDim 3
numberOperator d `shouldSatisfy` isDiagonal defTol
it "Is Hermitian." $ do
let d = fromJust $ toDim 3
numberOperator d `shouldSatisfy` isHermitian defTol
it "Is equal to a^dagger a." $ do
let d = fromJust $ toDim 4
numberOperator d `shouldSatisfy` isCloseMat defTol
(creationOperator d `mul` annihilationOperator d)
|
d-meiser/hmbo
|
tests/HMbo/ManyBodyOperatorSpec.hs
|
gpl-3.0
| 7,690
| 0
| 22
| 2,065
| 2,810
| 1,381
| 1,429
| 179
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.ProximityBeacon.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.ProximityBeacon.Types.Product where
import Network.Google.Prelude
import Network.Google.ProximityBeacon.Types.Sum
-- | An object representing a latitude\/longitude pair. This is expressed as
-- a pair of doubles representing degrees latitude and degrees longitude.
-- Unless specified otherwise, this must conform to the
-- <http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf WGS84 standard>.
-- Values must be within normalized ranges.
--
-- /See:/ 'latLng' smart constructor.
data LatLng =
LatLng'
{ _llLatitude :: !(Maybe (Textual Double))
, _llLongitude :: !(Maybe (Textual Double))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LatLng' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'llLatitude'
--
-- * 'llLongitude'
latLng
:: LatLng
latLng = LatLng' {_llLatitude = Nothing, _llLongitude = Nothing}
-- | The latitude in degrees. It must be in the range [-90.0, +90.0].
llLatitude :: Lens' LatLng (Maybe Double)
llLatitude
= lens _llLatitude (\ s a -> s{_llLatitude = a}) .
mapping _Coerce
-- | The longitude in degrees. It must be in the range [-180.0, +180.0].
llLongitude :: Lens' LatLng (Maybe Double)
llLongitude
= lens _llLongitude (\ s a -> s{_llLongitude = a}) .
mapping _Coerce
instance FromJSON LatLng where
parseJSON
= withObject "LatLng"
(\ o ->
LatLng' <$>
(o .:? "latitude") <*> (o .:? "longitude"))
instance ToJSON LatLng where
toJSON LatLng'{..}
= object
(catMaybes
[("latitude" .=) <$> _llLatitude,
("longitude" .=) <$> _llLongitude])
-- | A subset of attachment information served via the
-- \`beaconinfo.getforobserved\` method, used when your users encounter
-- your beacons.
--
-- /See:/ 'attachmentInfo' smart constructor.
data AttachmentInfo =
AttachmentInfo'
{ _aiMaxDistanceMeters :: !(Maybe (Textual Double))
, _aiData :: !(Maybe Bytes)
, _aiNamespacedType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AttachmentInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aiMaxDistanceMeters'
--
-- * 'aiData'
--
-- * 'aiNamespacedType'
attachmentInfo
:: AttachmentInfo
attachmentInfo =
AttachmentInfo'
{ _aiMaxDistanceMeters = Nothing
, _aiData = Nothing
, _aiNamespacedType = Nothing
}
-- | The distance away from the beacon at which this attachment should be
-- delivered to a mobile app. Setting this to a value greater than zero
-- indicates that the app should behave as if the beacon is \"seen\" when
-- the mobile device is less than this distance away from the beacon.
-- Different attachments on the same beacon can have different max
-- distances. Note that even though this value is expressed with fractional
-- meter precision, real-world behavior is likley to be much less precise
-- than one meter, due to the nature of current Bluetooth radio technology.
-- Optional. When not set or zero, the attachment should be delivered at
-- the beacon\'s outer limit of detection.
aiMaxDistanceMeters :: Lens' AttachmentInfo (Maybe Double)
aiMaxDistanceMeters
= lens _aiMaxDistanceMeters
(\ s a -> s{_aiMaxDistanceMeters = a})
. mapping _Coerce
-- | An opaque data container for client-provided data.
aiData :: Lens' AttachmentInfo (Maybe ByteString)
aiData
= lens _aiData (\ s a -> s{_aiData = a}) .
mapping _Bytes
-- | Specifies what kind of attachment this is. Tells a client how to
-- interpret the \`data\` field. Format is 'namespace\/type', for example
-- 'scrupulous-wombat-12345\/welcome-message'
aiNamespacedType :: Lens' AttachmentInfo (Maybe Text)
aiNamespacedType
= lens _aiNamespacedType
(\ s a -> s{_aiNamespacedType = a})
instance FromJSON AttachmentInfo where
parseJSON
= withObject "AttachmentInfo"
(\ o ->
AttachmentInfo' <$>
(o .:? "maxDistanceMeters") <*> (o .:? "data") <*>
(o .:? "namespacedType"))
instance ToJSON AttachmentInfo where
toJSON AttachmentInfo'{..}
= object
(catMaybes
[("maxDistanceMeters" .=) <$> _aiMaxDistanceMeters,
("data" .=) <$> _aiData,
("namespacedType" .=) <$> _aiNamespacedType])
-- | Properties of the beacon device, for example battery type or firmware
-- version. Optional.
--
-- /See:/ 'beaconProperties' smart constructor.
newtype BeaconProperties =
BeaconProperties'
{ _bpAddtional :: HashMap Text Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BeaconProperties' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bpAddtional'
beaconProperties
:: HashMap Text Text -- ^ 'bpAddtional'
-> BeaconProperties
beaconProperties pBpAddtional_ =
BeaconProperties' {_bpAddtional = _Coerce # pBpAddtional_}
bpAddtional :: Lens' BeaconProperties (HashMap Text Text)
bpAddtional
= lens _bpAddtional (\ s a -> s{_bpAddtional = a}) .
_Coerce
instance FromJSON BeaconProperties where
parseJSON
= withObject "BeaconProperties"
(\ o -> BeaconProperties' <$> (parseJSONObject o))
instance ToJSON BeaconProperties where
toJSON = toJSON . _bpAddtional
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'empty' smart constructor.
data Empty =
Empty'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Empty' with the minimum fields required to make a request.
--
empty
:: Empty
empty = Empty'
instance FromJSON Empty where
parseJSON = withObject "Empty" (\ o -> pure Empty')
instance ToJSON Empty where
toJSON = const emptyObject
-- | Response for a request to delete attachments.
--
-- /See:/ 'deleteAttachmentsResponse' smart constructor.
newtype DeleteAttachmentsResponse =
DeleteAttachmentsResponse'
{ _darNumDeleted :: Maybe (Textual Int32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DeleteAttachmentsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'darNumDeleted'
deleteAttachmentsResponse
:: DeleteAttachmentsResponse
deleteAttachmentsResponse =
DeleteAttachmentsResponse' {_darNumDeleted = Nothing}
-- | The number of attachments that were deleted.
darNumDeleted :: Lens' DeleteAttachmentsResponse (Maybe Int32)
darNumDeleted
= lens _darNumDeleted
(\ s a -> s{_darNumDeleted = a})
. mapping _Coerce
instance FromJSON DeleteAttachmentsResponse where
parseJSON
= withObject "DeleteAttachmentsResponse"
(\ o ->
DeleteAttachmentsResponse' <$> (o .:? "numDeleted"))
instance ToJSON DeleteAttachmentsResponse where
toJSON DeleteAttachmentsResponse'{..}
= object
(catMaybes [("numDeleted" .=) <$> _darNumDeleted])
-- | Request for beacon and attachment information about beacons that a
-- mobile client has encountered \"in the wild\".
--
-- /See:/ 'getInfoForObservedBeaconsRequest' smart constructor.
data GetInfoForObservedBeaconsRequest =
GetInfoForObservedBeaconsRequest'
{ _gifobrObservations :: !(Maybe [Observation])
, _gifobrNamespacedTypes :: !(Maybe [Text])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GetInfoForObservedBeaconsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gifobrObservations'
--
-- * 'gifobrNamespacedTypes'
getInfoForObservedBeaconsRequest
:: GetInfoForObservedBeaconsRequest
getInfoForObservedBeaconsRequest =
GetInfoForObservedBeaconsRequest'
{_gifobrObservations = Nothing, _gifobrNamespacedTypes = Nothing}
-- | The beacons that the client has encountered. At least one must be given.
gifobrObservations :: Lens' GetInfoForObservedBeaconsRequest [Observation]
gifobrObservations
= lens _gifobrObservations
(\ s a -> s{_gifobrObservations = a})
. _Default
. _Coerce
-- | Specifies what kind of attachments to include in the response. When
-- given, the response will include only attachments of the given types.
-- When empty, no attachments will be returned. Must be in the format
-- 'namespace\/type'. Accepts \`*\` to specify all types in all namespaces
-- owned by the client. Optional.
gifobrNamespacedTypes :: Lens' GetInfoForObservedBeaconsRequest [Text]
gifobrNamespacedTypes
= lens _gifobrNamespacedTypes
(\ s a -> s{_gifobrNamespacedTypes = a})
. _Default
. _Coerce
instance FromJSON GetInfoForObservedBeaconsRequest
where
parseJSON
= withObject "GetInfoForObservedBeaconsRequest"
(\ o ->
GetInfoForObservedBeaconsRequest' <$>
(o .:? "observations" .!= mempty) <*>
(o .:? "namespacedTypes" .!= mempty))
instance ToJSON GetInfoForObservedBeaconsRequest
where
toJSON GetInfoForObservedBeaconsRequest'{..}
= object
(catMaybes
[("observations" .=) <$> _gifobrObservations,
("namespacedTypes" .=) <$> _gifobrNamespacedTypes])
-- | An attachment namespace defines read and write access for all the
-- attachments created under it. Each namespace is globally unique, and
-- owned by one project which is the only project that can create
-- attachments under it.
--
-- /See:/ 'namespace' smart constructor.
data Namespace =
Namespace'
{ _nServingVisibility :: !(Maybe NamespaceServingVisibility)
, _nNamespaceName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Namespace' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nServingVisibility'
--
-- * 'nNamespaceName'
namespace
:: Namespace
namespace =
Namespace' {_nServingVisibility = Nothing, _nNamespaceName = Nothing}
-- | Specifies what clients may receive attachments under this namespace via
-- \`beaconinfo.getforobserved\`.
nServingVisibility :: Lens' Namespace (Maybe NamespaceServingVisibility)
nServingVisibility
= lens _nServingVisibility
(\ s a -> s{_nServingVisibility = a})
-- | Resource name of this namespace. Namespaces names have the format:
-- 'namespaces\/namespace'.
nNamespaceName :: Lens' Namespace (Maybe Text)
nNamespaceName
= lens _nNamespaceName
(\ s a -> s{_nNamespaceName = a})
instance FromJSON Namespace where
parseJSON
= withObject "Namespace"
(\ o ->
Namespace' <$>
(o .:? "servingVisibility") <*>
(o .:? "namespaceName"))
instance ToJSON Namespace where
toJSON Namespace'{..}
= object
(catMaybes
[("servingVisibility" .=) <$> _nServingVisibility,
("namespaceName" .=) <$> _nNamespaceName])
-- | Write-only registration parameters for beacons using Eddystone-EID
-- format. Two ways of securely registering an Eddystone-EID beacon with
-- the service are supported: 1. Perform an ECDH key exchange via this API,
-- including a previous call to \`GET \/v1beta1\/eidparams\`. In this case
-- the fields \`beacon_ecdh_public_key\` and \`service_ecdh_public_key\`
-- should be populated and \`beacon_identity_key\` should not be populated.
-- This method ensures that only the two parties in the ECDH key exchange
-- can compute the identity key, which becomes a secret between them. 2.
-- Derive or obtain the beacon\'s identity key via other secure means
-- (perhaps an ECDH key exchange between the beacon and a mobile device or
-- any other secure method), and then submit the resulting identity key to
-- the service. In this case \`beacon_identity_key\` field should be
-- populated, and neither of \`beacon_ecdh_public_key\` nor
-- \`service_ecdh_public_key\` fields should be. The security of this
-- method depends on how securely the parties involved (in particular the
-- bluetooth client) handle the identity key, and obviously on how securely
-- the identity key was generated. See [the Eddystone
-- specification](https:\/\/github.com\/google\/eddystone\/tree\/master\/eddystone-eid)
-- at GitHub.
--
-- /See:/ 'ephemeralIdRegistration' smart constructor.
data EphemeralIdRegistration =
EphemeralIdRegistration'
{ _eirRotationPeriodExponent :: !(Maybe (Textual Word32))
, _eirInitialClockValue :: !(Maybe (Textual Word64))
, _eirBeaconIdentityKey :: !(Maybe Bytes)
, _eirBeaconEcdhPublicKey :: !(Maybe Bytes)
, _eirInitialEid :: !(Maybe Bytes)
, _eirServiceEcdhPublicKey :: !(Maybe Bytes)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EphemeralIdRegistration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eirRotationPeriodExponent'
--
-- * 'eirInitialClockValue'
--
-- * 'eirBeaconIdentityKey'
--
-- * 'eirBeaconEcdhPublicKey'
--
-- * 'eirInitialEid'
--
-- * 'eirServiceEcdhPublicKey'
ephemeralIdRegistration
:: EphemeralIdRegistration
ephemeralIdRegistration =
EphemeralIdRegistration'
{ _eirRotationPeriodExponent = Nothing
, _eirInitialClockValue = Nothing
, _eirBeaconIdentityKey = Nothing
, _eirBeaconEcdhPublicKey = Nothing
, _eirInitialEid = Nothing
, _eirServiceEcdhPublicKey = Nothing
}
-- | Indicates the nominal period between each rotation of the beacon\'s
-- ephemeral ID. \"Nominal\" because the beacon should randomize the actual
-- interval. See [the spec at
-- github](https:\/\/github.com\/google\/eddystone\/tree\/master\/eddystone-eid)
-- for details. This value corresponds to a power-of-two scaler on the
-- beacon\'s clock: when the scaler value is K, the beacon will begin
-- broadcasting a new ephemeral ID on average every 2^K seconds.
eirRotationPeriodExponent :: Lens' EphemeralIdRegistration (Maybe Word32)
eirRotationPeriodExponent
= lens _eirRotationPeriodExponent
(\ s a -> s{_eirRotationPeriodExponent = a})
. mapping _Coerce
-- | The initial clock value of the beacon. The beacon\'s clock must have
-- begun counting at this value immediately prior to transmitting this
-- value to the resolving service. Significant delay in transmitting this
-- value to the service risks registration or resolution failures. If a
-- value is not provided, the default is zero.
eirInitialClockValue :: Lens' EphemeralIdRegistration (Maybe Word64)
eirInitialClockValue
= lens _eirInitialClockValue
(\ s a -> s{_eirInitialClockValue = a})
. mapping _Coerce
-- | The private key of the beacon. If this field is populated,
-- \`beacon_ecdh_public_key\` and \`service_ecdh_public_key\` must not be
-- populated.
eirBeaconIdentityKey :: Lens' EphemeralIdRegistration (Maybe ByteString)
eirBeaconIdentityKey
= lens _eirBeaconIdentityKey
(\ s a -> s{_eirBeaconIdentityKey = a})
. mapping _Bytes
-- | The beacon\'s public key used for the Elliptic curve Diffie-Hellman key
-- exchange. When this field is populated, \`service_ecdh_public_key\` must
-- also be populated, and \`beacon_identity_key\` must not be.
eirBeaconEcdhPublicKey :: Lens' EphemeralIdRegistration (Maybe ByteString)
eirBeaconEcdhPublicKey
= lens _eirBeaconEcdhPublicKey
(\ s a -> s{_eirBeaconEcdhPublicKey = a})
. mapping _Bytes
-- | An initial ephemeral ID calculated using the clock value submitted as
-- \`initial_clock_value\`, and the secret key generated by the
-- Diffie-Hellman key exchange using \`service_ecdh_public_key\` and
-- \`service_ecdh_public_key\`. This initial EID value will be used by the
-- service to confirm that the key exchange process was successful.
eirInitialEid :: Lens' EphemeralIdRegistration (Maybe ByteString)
eirInitialEid
= lens _eirInitialEid
(\ s a -> s{_eirInitialEid = a})
. mapping _Bytes
-- | The service\'s public key used for the Elliptic curve Diffie-Hellman key
-- exchange. When this field is populated, \`beacon_ecdh_public_key\` must
-- also be populated, and \`beacon_identity_key\` must not be.
eirServiceEcdhPublicKey :: Lens' EphemeralIdRegistration (Maybe ByteString)
eirServiceEcdhPublicKey
= lens _eirServiceEcdhPublicKey
(\ s a -> s{_eirServiceEcdhPublicKey = a})
. mapping _Bytes
instance FromJSON EphemeralIdRegistration where
parseJSON
= withObject "EphemeralIdRegistration"
(\ o ->
EphemeralIdRegistration' <$>
(o .:? "rotationPeriodExponent") <*>
(o .:? "initialClockValue")
<*> (o .:? "beaconIdentityKey")
<*> (o .:? "beaconEcdhPublicKey")
<*> (o .:? "initialEid")
<*> (o .:? "serviceEcdhPublicKey"))
instance ToJSON EphemeralIdRegistration where
toJSON EphemeralIdRegistration'{..}
= object
(catMaybes
[("rotationPeriodExponent" .=) <$>
_eirRotationPeriodExponent,
("initialClockValue" .=) <$> _eirInitialClockValue,
("beaconIdentityKey" .=) <$> _eirBeaconIdentityKey,
("beaconEcdhPublicKey" .=) <$>
_eirBeaconEcdhPublicKey,
("initialEid" .=) <$> _eirInitialEid,
("serviceEcdhPublicKey" .=) <$>
_eirServiceEcdhPublicKey])
-- | Response to ListNamespacesRequest that contains all the project\'s
-- namespaces.
--
-- /See:/ 'listNamespacesResponse' smart constructor.
newtype ListNamespacesResponse =
ListNamespacesResponse'
{ _lnrNamespaces :: Maybe [Namespace]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListNamespacesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lnrNamespaces'
listNamespacesResponse
:: ListNamespacesResponse
listNamespacesResponse = ListNamespacesResponse' {_lnrNamespaces = Nothing}
-- | The attachments that corresponded to the request params.
lnrNamespaces :: Lens' ListNamespacesResponse [Namespace]
lnrNamespaces
= lens _lnrNamespaces
(\ s a -> s{_lnrNamespaces = a})
. _Default
. _Coerce
instance FromJSON ListNamespacesResponse where
parseJSON
= withObject "ListNamespacesResponse"
(\ o ->
ListNamespacesResponse' <$>
(o .:? "namespaces" .!= mempty))
instance ToJSON ListNamespacesResponse where
toJSON ListNamespacesResponse'{..}
= object
(catMaybes [("namespaces" .=) <$> _lnrNamespaces])
-- | Represents a whole or partial calendar date, e.g. a birthday. The time
-- of day and time zone are either specified elsewhere or are not
-- significant. The date is relative to the Proleptic Gregorian Calendar.
-- This can represent: * A full date, with non-zero year, month and day
-- values * A month and day value, with a zero year, e.g. an anniversary *
-- A year on its own, with zero month and day values * A year and month
-- value, with a zero day, e.g. a credit card expiration date Related types
-- are google.type.TimeOfDay and \`google.protobuf.Timestamp\`.
--
-- /See:/ 'date' smart constructor.
data Date =
Date'
{ _dDay :: !(Maybe (Textual Int32))
, _dYear :: !(Maybe (Textual Int32))
, _dMonth :: !(Maybe (Textual Int32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Date' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dDay'
--
-- * 'dYear'
--
-- * 'dMonth'
date
:: Date
date = Date' {_dDay = Nothing, _dYear = Nothing, _dMonth = Nothing}
-- | Day of month. Must be from 1 to 31 and valid for the year and month, or
-- 0 if specifying a year by itself or a year and month where the day is
-- not significant.
dDay :: Lens' Date (Maybe Int32)
dDay
= lens _dDay (\ s a -> s{_dDay = a}) .
mapping _Coerce
-- | Year of date. Must be from 1 to 9999, or 0 if specifying a date without
-- a year.
dYear :: Lens' Date (Maybe Int32)
dYear
= lens _dYear (\ s a -> s{_dYear = a}) .
mapping _Coerce
-- | Month of year. Must be from 1 to 12, or 0 if specifying a year without a
-- month and day.
dMonth :: Lens' Date (Maybe Int32)
dMonth
= lens _dMonth (\ s a -> s{_dMonth = a}) .
mapping _Coerce
instance FromJSON Date where
parseJSON
= withObject "Date"
(\ o ->
Date' <$>
(o .:? "day") <*> (o .:? "year") <*> (o .:? "month"))
instance ToJSON Date where
toJSON Date'{..}
= object
(catMaybes
[("day" .=) <$> _dDay, ("year" .=) <$> _dYear,
("month" .=) <$> _dMonth])
-- | Details of a beacon device.
--
-- /See:/ 'beacon' smart constructor.
data Beacon =
Beacon'
{ _beaLatLng :: !(Maybe LatLng)
, _beaStatus :: !(Maybe BeaconStatus)
, _beaBeaconName :: !(Maybe Text)
, _beaEphemeralIdRegistration :: !(Maybe EphemeralIdRegistration)
, _beaIndoorLevel :: !(Maybe IndoorLevel)
, _beaExpectedStability :: !(Maybe BeaconExpectedStability)
, _beaProvisioningKey :: !(Maybe Bytes)
, _beaDescription :: !(Maybe Text)
, _beaPlaceId :: !(Maybe Text)
, _beaAdvertisedId :: !(Maybe AdvertisedId)
, _beaProperties :: !(Maybe BeaconProperties)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Beacon' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'beaLatLng'
--
-- * 'beaStatus'
--
-- * 'beaBeaconName'
--
-- * 'beaEphemeralIdRegistration'
--
-- * 'beaIndoorLevel'
--
-- * 'beaExpectedStability'
--
-- * 'beaProvisioningKey'
--
-- * 'beaDescription'
--
-- * 'beaPlaceId'
--
-- * 'beaAdvertisedId'
--
-- * 'beaProperties'
beacon
:: Beacon
beacon =
Beacon'
{ _beaLatLng = Nothing
, _beaStatus = Nothing
, _beaBeaconName = Nothing
, _beaEphemeralIdRegistration = Nothing
, _beaIndoorLevel = Nothing
, _beaExpectedStability = Nothing
, _beaProvisioningKey = Nothing
, _beaDescription = Nothing
, _beaPlaceId = Nothing
, _beaAdvertisedId = Nothing
, _beaProperties = Nothing
}
-- | The location of the beacon, expressed as a latitude and longitude pair.
-- This location is given when the beacon is registered or updated. It does
-- not necessarily indicate the actual current location of the beacon.
-- Optional.
beaLatLng :: Lens' Beacon (Maybe LatLng)
beaLatLng
= lens _beaLatLng (\ s a -> s{_beaLatLng = a})
-- | Current status of the beacon. Required.
beaStatus :: Lens' Beacon (Maybe BeaconStatus)
beaStatus
= lens _beaStatus (\ s a -> s{_beaStatus = a})
-- | Resource name of this beacon. A beacon name has the format
-- \"beacons\/N!beaconId\" where the beaconId is the base16 ID broadcast by
-- the beacon and N is a code for the beacon\'s type. Possible values are
-- \`3\` for Eddystone, \`1\` for iBeacon, or \`5\` for AltBeacon. This
-- field must be left empty when registering. After reading a beacon,
-- clients can use the name for future operations.
beaBeaconName :: Lens' Beacon (Maybe Text)
beaBeaconName
= lens _beaBeaconName
(\ s a -> s{_beaBeaconName = a})
-- | Write-only registration parameters for beacons using Eddystone-EID
-- (remotely resolved ephemeral ID) format. This information will not be
-- populated in API responses. When submitting this data, the
-- \`advertised_id\` field must contain an ID of type Eddystone-UID. Any
-- other ID type will result in an error.
beaEphemeralIdRegistration :: Lens' Beacon (Maybe EphemeralIdRegistration)
beaEphemeralIdRegistration
= lens _beaEphemeralIdRegistration
(\ s a -> s{_beaEphemeralIdRegistration = a})
-- | The indoor level information for this beacon, if known. As returned by
-- the Google Maps API. Optional.
beaIndoorLevel :: Lens' Beacon (Maybe IndoorLevel)
beaIndoorLevel
= lens _beaIndoorLevel
(\ s a -> s{_beaIndoorLevel = a})
-- | Expected location stability. This is set when the beacon is registered
-- or updated, not automatically detected in any way. Optional.
beaExpectedStability :: Lens' Beacon (Maybe BeaconExpectedStability)
beaExpectedStability
= lens _beaExpectedStability
(\ s a -> s{_beaExpectedStability = a})
-- | Some beacons may require a user to provide an authorization key before
-- changing any of its configuration (e.g. broadcast frames, transmit
-- power). This field provides a place to store and control access to that
-- key. This field is populated in responses to \`GET
-- \/v1beta1\/beacons\/3!beaconId\` from users with write access to the
-- given beacon. That is to say: If the user is authorized to write the
-- beacon\'s confidential data in the service, the service considers them
-- authorized to configure the beacon. Note that this key grants nothing on
-- the service, only on the beacon itself.
beaProvisioningKey :: Lens' Beacon (Maybe ByteString)
beaProvisioningKey
= lens _beaProvisioningKey
(\ s a -> s{_beaProvisioningKey = a})
. mapping _Bytes
-- | Free text used to identify and describe the beacon. Maximum length 140
-- characters. Optional.
beaDescription :: Lens' Beacon (Maybe Text)
beaDescription
= lens _beaDescription
(\ s a -> s{_beaDescription = a})
-- | The [Google Places API](\/places\/place-id) Place ID of the place where
-- the beacon is deployed. This is given when the beacon is registered or
-- updated, not automatically detected in any way. Optional.
beaPlaceId :: Lens' Beacon (Maybe Text)
beaPlaceId
= lens _beaPlaceId (\ s a -> s{_beaPlaceId = a})
-- | The identifier of a beacon as advertised by it. This field must be
-- populated when registering. It may be empty when updating a beacon
-- record because it is ignored in updates. When registering a beacon that
-- broadcasts Eddystone-EID, this field should contain a \"stable\"
-- Eddystone-UID that identifies the beacon and links it to its
-- attachments. The stable Eddystone-UID is only used for administering the
-- beacon.
beaAdvertisedId :: Lens' Beacon (Maybe AdvertisedId)
beaAdvertisedId
= lens _beaAdvertisedId
(\ s a -> s{_beaAdvertisedId = a})
-- | Properties of the beacon device, for example battery type or firmware
-- version. Optional.
beaProperties :: Lens' Beacon (Maybe BeaconProperties)
beaProperties
= lens _beaProperties
(\ s a -> s{_beaProperties = a})
instance FromJSON Beacon where
parseJSON
= withObject "Beacon"
(\ o ->
Beacon' <$>
(o .:? "latLng") <*> (o .:? "status") <*>
(o .:? "beaconName")
<*> (o .:? "ephemeralIdRegistration")
<*> (o .:? "indoorLevel")
<*> (o .:? "expectedStability")
<*> (o .:? "provisioningKey")
<*> (o .:? "description")
<*> (o .:? "placeId")
<*> (o .:? "advertisedId")
<*> (o .:? "properties"))
instance ToJSON Beacon where
toJSON Beacon'{..}
= object
(catMaybes
[("latLng" .=) <$> _beaLatLng,
("status" .=) <$> _beaStatus,
("beaconName" .=) <$> _beaBeaconName,
("ephemeralIdRegistration" .=) <$>
_beaEphemeralIdRegistration,
("indoorLevel" .=) <$> _beaIndoorLevel,
("expectedStability" .=) <$> _beaExpectedStability,
("provisioningKey" .=) <$> _beaProvisioningKey,
("description" .=) <$> _beaDescription,
("placeId" .=) <$> _beaPlaceId,
("advertisedId" .=) <$> _beaAdvertisedId,
("properties" .=) <$> _beaProperties])
-- | Diagnostics for a single beacon.
--
-- /See:/ 'diagnostics' smart constructor.
data Diagnostics =
Diagnostics'
{ _dAlerts :: !(Maybe [Text])
, _dBeaconName :: !(Maybe Text)
, _dEstimatedLowBatteryDate :: !(Maybe Date)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Diagnostics' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dAlerts'
--
-- * 'dBeaconName'
--
-- * 'dEstimatedLowBatteryDate'
diagnostics
:: Diagnostics
diagnostics =
Diagnostics'
{ _dAlerts = Nothing
, _dBeaconName = Nothing
, _dEstimatedLowBatteryDate = Nothing
}
-- | An unordered list of Alerts that the beacon has.
dAlerts :: Lens' Diagnostics [Text]
dAlerts
= lens _dAlerts (\ s a -> s{_dAlerts = a}) . _Default
. _Coerce
-- | Resource name of the beacon. For Eddystone-EID beacons, this may be the
-- beacon\'s current EID, or the beacon\'s \"stable\" Eddystone-UID.
dBeaconName :: Lens' Diagnostics (Maybe Text)
dBeaconName
= lens _dBeaconName (\ s a -> s{_dBeaconName = a})
-- | The date when the battery is expected to be low. If the value is missing
-- then there is no estimate for when the battery will be low. This value
-- is only an estimate, not an exact date.
dEstimatedLowBatteryDate :: Lens' Diagnostics (Maybe Date)
dEstimatedLowBatteryDate
= lens _dEstimatedLowBatteryDate
(\ s a -> s{_dEstimatedLowBatteryDate = a})
instance FromJSON Diagnostics where
parseJSON
= withObject "Diagnostics"
(\ o ->
Diagnostics' <$>
(o .:? "alerts" .!= mempty) <*> (o .:? "beaconName")
<*> (o .:? "estimatedLowBatteryDate"))
instance ToJSON Diagnostics where
toJSON Diagnostics'{..}
= object
(catMaybes
[("alerts" .=) <$> _dAlerts,
("beaconName" .=) <$> _dBeaconName,
("estimatedLowBatteryDate" .=) <$>
_dEstimatedLowBatteryDate])
-- | Response to \`ListBeaconAttachments\` that contains the requested
-- attachments.
--
-- /See:/ 'listBeaconAttachmentsResponse' smart constructor.
newtype ListBeaconAttachmentsResponse =
ListBeaconAttachmentsResponse'
{ _lbarAttachments :: Maybe [BeaconAttachment]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListBeaconAttachmentsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbarAttachments'
listBeaconAttachmentsResponse
:: ListBeaconAttachmentsResponse
listBeaconAttachmentsResponse =
ListBeaconAttachmentsResponse' {_lbarAttachments = Nothing}
-- | The attachments that corresponded to the request params.
lbarAttachments :: Lens' ListBeaconAttachmentsResponse [BeaconAttachment]
lbarAttachments
= lens _lbarAttachments
(\ s a -> s{_lbarAttachments = a})
. _Default
. _Coerce
instance FromJSON ListBeaconAttachmentsResponse where
parseJSON
= withObject "ListBeaconAttachmentsResponse"
(\ o ->
ListBeaconAttachmentsResponse' <$>
(o .:? "attachments" .!= mempty))
instance ToJSON ListBeaconAttachmentsResponse where
toJSON ListBeaconAttachmentsResponse'{..}
= object
(catMaybes [("attachments" .=) <$> _lbarAttachments])
-- | Indoor level, a human-readable string as returned by Google Maps APIs,
-- useful to indicate which floor of a building a beacon is located on.
--
-- /See:/ 'indoorLevel' smart constructor.
newtype IndoorLevel =
IndoorLevel'
{ _ilName :: Maybe Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'IndoorLevel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ilName'
indoorLevel
:: IndoorLevel
indoorLevel = IndoorLevel' {_ilName = Nothing}
-- | The name of this level.
ilName :: Lens' IndoorLevel (Maybe Text)
ilName = lens _ilName (\ s a -> s{_ilName = a})
instance FromJSON IndoorLevel where
parseJSON
= withObject "IndoorLevel"
(\ o -> IndoorLevel' <$> (o .:? "name"))
instance ToJSON IndoorLevel where
toJSON IndoorLevel'{..}
= object (catMaybes [("name" .=) <$> _ilName])
-- | Information a client needs to provision and register beacons that
-- broadcast Eddystone-EID format beacon IDs, using Elliptic curve
-- Diffie-Hellman key exchange. See [the Eddystone
-- specification](https:\/\/github.com\/google\/eddystone\/tree\/master\/eddystone-eid)
-- at GitHub.
--
-- /See:/ 'ephemeralIdRegistrationParams' smart constructor.
data EphemeralIdRegistrationParams =
EphemeralIdRegistrationParams'
{ _eirpMinRotationPeriodExponent :: !(Maybe (Textual Word32))
, _eirpMaxRotationPeriodExponent :: !(Maybe (Textual Word32))
, _eirpServiceEcdhPublicKey :: !(Maybe Bytes)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EphemeralIdRegistrationParams' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eirpMinRotationPeriodExponent'
--
-- * 'eirpMaxRotationPeriodExponent'
--
-- * 'eirpServiceEcdhPublicKey'
ephemeralIdRegistrationParams
:: EphemeralIdRegistrationParams
ephemeralIdRegistrationParams =
EphemeralIdRegistrationParams'
{ _eirpMinRotationPeriodExponent = Nothing
, _eirpMaxRotationPeriodExponent = Nothing
, _eirpServiceEcdhPublicKey = Nothing
}
-- | Indicates the minimum rotation period supported by the service. See
-- EddystoneEidRegistration.rotation_period_exponent
eirpMinRotationPeriodExponent :: Lens' EphemeralIdRegistrationParams (Maybe Word32)
eirpMinRotationPeriodExponent
= lens _eirpMinRotationPeriodExponent
(\ s a -> s{_eirpMinRotationPeriodExponent = a})
. mapping _Coerce
-- | Indicates the maximum rotation period supported by the service. See
-- EddystoneEidRegistration.rotation_period_exponent
eirpMaxRotationPeriodExponent :: Lens' EphemeralIdRegistrationParams (Maybe Word32)
eirpMaxRotationPeriodExponent
= lens _eirpMaxRotationPeriodExponent
(\ s a -> s{_eirpMaxRotationPeriodExponent = a})
. mapping _Coerce
-- | The beacon service\'s public key for use by a beacon to derive its
-- Identity Key using Elliptic Curve Diffie-Hellman key exchange.
eirpServiceEcdhPublicKey :: Lens' EphemeralIdRegistrationParams (Maybe ByteString)
eirpServiceEcdhPublicKey
= lens _eirpServiceEcdhPublicKey
(\ s a -> s{_eirpServiceEcdhPublicKey = a})
. mapping _Bytes
instance FromJSON EphemeralIdRegistrationParams where
parseJSON
= withObject "EphemeralIdRegistrationParams"
(\ o ->
EphemeralIdRegistrationParams' <$>
(o .:? "minRotationPeriodExponent") <*>
(o .:? "maxRotationPeriodExponent")
<*> (o .:? "serviceEcdhPublicKey"))
instance ToJSON EphemeralIdRegistrationParams where
toJSON EphemeralIdRegistrationParams'{..}
= object
(catMaybes
[("minRotationPeriodExponent" .=) <$>
_eirpMinRotationPeriodExponent,
("maxRotationPeriodExponent" .=) <$>
_eirpMaxRotationPeriodExponent,
("serviceEcdhPublicKey" .=) <$>
_eirpServiceEcdhPublicKey])
-- | A subset of beacon information served via the
-- \`beaconinfo.getforobserved\` method, which you call when users of your
-- app encounter your beacons.
--
-- /See:/ 'beaconInfo' smart constructor.
data BeaconInfo =
BeaconInfo'
{ _biAttachments :: !(Maybe [AttachmentInfo])
, _biBeaconName :: !(Maybe Text)
, _biAdvertisedId :: !(Maybe AdvertisedId)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BeaconInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'biAttachments'
--
-- * 'biBeaconName'
--
-- * 'biAdvertisedId'
beaconInfo
:: BeaconInfo
beaconInfo =
BeaconInfo'
{ _biAttachments = Nothing
, _biBeaconName = Nothing
, _biAdvertisedId = Nothing
}
-- | Attachments matching the type(s) requested. May be empty if no
-- attachment types were requested.
biAttachments :: Lens' BeaconInfo [AttachmentInfo]
biAttachments
= lens _biAttachments
(\ s a -> s{_biAttachments = a})
. _Default
. _Coerce
-- | The name under which the beacon is registered.
biBeaconName :: Lens' BeaconInfo (Maybe Text)
biBeaconName
= lens _biBeaconName (\ s a -> s{_biBeaconName = a})
-- | The ID advertised by the beacon.
biAdvertisedId :: Lens' BeaconInfo (Maybe AdvertisedId)
biAdvertisedId
= lens _biAdvertisedId
(\ s a -> s{_biAdvertisedId = a})
instance FromJSON BeaconInfo where
parseJSON
= withObject "BeaconInfo"
(\ o ->
BeaconInfo' <$>
(o .:? "attachments" .!= mempty) <*>
(o .:? "beaconName")
<*> (o .:? "advertisedId"))
instance ToJSON BeaconInfo where
toJSON BeaconInfo'{..}
= object
(catMaybes
[("attachments" .=) <$> _biAttachments,
("beaconName" .=) <$> _biBeaconName,
("advertisedId" .=) <$> _biAdvertisedId])
-- | Represents one beacon observed once.
--
-- /See:/ 'observation' smart constructor.
data Observation =
Observation'
{ _oTelemetry :: !(Maybe Bytes)
, _oTimestampMs :: !(Maybe DateTime')
, _oAdvertisedId :: !(Maybe AdvertisedId)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Observation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oTelemetry'
--
-- * 'oTimestampMs'
--
-- * 'oAdvertisedId'
observation
:: Observation
observation =
Observation'
{_oTelemetry = Nothing, _oTimestampMs = Nothing, _oAdvertisedId = Nothing}
-- | The array of telemetry bytes received from the beacon. The server is
-- responsible for parsing it. This field may frequently be empty, as with
-- a beacon that transmits telemetry only occasionally.
oTelemetry :: Lens' Observation (Maybe ByteString)
oTelemetry
= lens _oTelemetry (\ s a -> s{_oTelemetry = a}) .
mapping _Bytes
-- | Time when the beacon was observed.
oTimestampMs :: Lens' Observation (Maybe UTCTime)
oTimestampMs
= lens _oTimestampMs (\ s a -> s{_oTimestampMs = a})
. mapping _DateTime
-- | The ID advertised by the beacon the client has encountered. If the
-- submitted \`advertised_id\` type is Eddystone-EID, then the client must
-- be authorized to resolve the given beacon. Otherwise no data will be
-- returned for that beacon. Required.
oAdvertisedId :: Lens' Observation (Maybe AdvertisedId)
oAdvertisedId
= lens _oAdvertisedId
(\ s a -> s{_oAdvertisedId = a})
instance FromJSON Observation where
parseJSON
= withObject "Observation"
(\ o ->
Observation' <$>
(o .:? "telemetry") <*> (o .:? "timestampMs") <*>
(o .:? "advertisedId"))
instance ToJSON Observation where
toJSON Observation'{..}
= object
(catMaybes
[("telemetry" .=) <$> _oTelemetry,
("timestampMs" .=) <$> _oTimestampMs,
("advertisedId" .=) <$> _oAdvertisedId])
-- | Project-specific data associated with a beacon.
--
-- /See:/ 'beaconAttachment' smart constructor.
data BeaconAttachment =
BeaconAttachment'
{ _baMaxDistanceMeters :: !(Maybe (Textual Double))
, _baCreationTimeMs :: !(Maybe DateTime')
, _baData :: !(Maybe Bytes)
, _baAttachmentName :: !(Maybe Text)
, _baNamespacedType :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BeaconAttachment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'baMaxDistanceMeters'
--
-- * 'baCreationTimeMs'
--
-- * 'baData'
--
-- * 'baAttachmentName'
--
-- * 'baNamespacedType'
beaconAttachment
:: BeaconAttachment
beaconAttachment =
BeaconAttachment'
{ _baMaxDistanceMeters = Nothing
, _baCreationTimeMs = Nothing
, _baData = Nothing
, _baAttachmentName = Nothing
, _baNamespacedType = Nothing
}
-- | The distance away from the beacon at which this attachment should be
-- delivered to a mobile app. Setting this to a value greater than zero
-- indicates that the app should behave as if the beacon is \"seen\" when
-- the mobile device is less than this distance away from the beacon.
-- Different attachments on the same beacon can have different max
-- distances. Note that even though this value is expressed with fractional
-- meter precision, real-world behavior is likley to be much less precise
-- than one meter, due to the nature of current Bluetooth radio technology.
-- Optional. When not set or zero, the attachment should be delivered at
-- the beacon\'s outer limit of detection. Negative values are invalid and
-- return an error.
baMaxDistanceMeters :: Lens' BeaconAttachment (Maybe Double)
baMaxDistanceMeters
= lens _baMaxDistanceMeters
(\ s a -> s{_baMaxDistanceMeters = a})
. mapping _Coerce
-- | The UTC time when this attachment was created, in milliseconds since the
-- UNIX epoch.
baCreationTimeMs :: Lens' BeaconAttachment (Maybe UTCTime)
baCreationTimeMs
= lens _baCreationTimeMs
(\ s a -> s{_baCreationTimeMs = a})
. mapping _DateTime
-- | An opaque data container for client-provided data. Must be
-- [base64](http:\/\/tools.ietf.org\/html\/rfc4648#section-4) encoded in
-- HTTP requests, and will be so encoded (with padding) in responses.
-- Required.
baData :: Lens' BeaconAttachment (Maybe ByteString)
baData
= lens _baData (\ s a -> s{_baData = a}) .
mapping _Bytes
-- | Resource name of this attachment. Attachment names have the format:
-- 'beacons\/beacon_id\/attachments\/attachment_id'. Leave this empty on
-- creation.
baAttachmentName :: Lens' BeaconAttachment (Maybe Text)
baAttachmentName
= lens _baAttachmentName
(\ s a -> s{_baAttachmentName = a})
-- | Specifies what kind of attachment this is. Tells a client how to
-- interpret the \`data\` field. Format is 'namespace\/type'. Namespace
-- provides type separation between clients. Type describes the type of
-- \`data\`, for use by the client when parsing the \`data\` field.
-- Required.
baNamespacedType :: Lens' BeaconAttachment (Maybe Text)
baNamespacedType
= lens _baNamespacedType
(\ s a -> s{_baNamespacedType = a})
instance FromJSON BeaconAttachment where
parseJSON
= withObject "BeaconAttachment"
(\ o ->
BeaconAttachment' <$>
(o .:? "maxDistanceMeters") <*>
(o .:? "creationTimeMs")
<*> (o .:? "data")
<*> (o .:? "attachmentName")
<*> (o .:? "namespacedType"))
instance ToJSON BeaconAttachment where
toJSON BeaconAttachment'{..}
= object
(catMaybes
[("maxDistanceMeters" .=) <$> _baMaxDistanceMeters,
("creationTimeMs" .=) <$> _baCreationTimeMs,
("data" .=) <$> _baData,
("attachmentName" .=) <$> _baAttachmentName,
("namespacedType" .=) <$> _baNamespacedType])
-- | Response that contains the requested diagnostics.
--
-- /See:/ 'listDiagnosticsResponse' smart constructor.
data ListDiagnosticsResponse =
ListDiagnosticsResponse'
{ _ldrNextPageToken :: !(Maybe Text)
, _ldrDiagnostics :: !(Maybe [Diagnostics])
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListDiagnosticsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ldrNextPageToken'
--
-- * 'ldrDiagnostics'
listDiagnosticsResponse
:: ListDiagnosticsResponse
listDiagnosticsResponse =
ListDiagnosticsResponse'
{_ldrNextPageToken = Nothing, _ldrDiagnostics = Nothing}
-- | Token that can be used for pagination. Returned only if the request
-- matches more beacons than can be returned in this response.
ldrNextPageToken :: Lens' ListDiagnosticsResponse (Maybe Text)
ldrNextPageToken
= lens _ldrNextPageToken
(\ s a -> s{_ldrNextPageToken = a})
-- | The diagnostics matching the given request.
ldrDiagnostics :: Lens' ListDiagnosticsResponse [Diagnostics]
ldrDiagnostics
= lens _ldrDiagnostics
(\ s a -> s{_ldrDiagnostics = a})
. _Default
. _Coerce
instance FromJSON ListDiagnosticsResponse where
parseJSON
= withObject "ListDiagnosticsResponse"
(\ o ->
ListDiagnosticsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "diagnostics" .!= mempty))
instance ToJSON ListDiagnosticsResponse where
toJSON ListDiagnosticsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _ldrNextPageToken,
("diagnostics" .=) <$> _ldrDiagnostics])
-- | Defines a unique identifier of a beacon as broadcast by the device.
--
-- /See:/ 'advertisedId' smart constructor.
data AdvertisedId =
AdvertisedId'
{ _aiId :: !(Maybe Bytes)
, _aiType :: !(Maybe AdvertisedIdType)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AdvertisedId' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aiId'
--
-- * 'aiType'
advertisedId
:: AdvertisedId
advertisedId = AdvertisedId' {_aiId = Nothing, _aiType = Nothing}
-- | The actual beacon identifier, as broadcast by the beacon hardware. Must
-- be [base64](http:\/\/tools.ietf.org\/html\/rfc4648#section-4) encoded in
-- HTTP requests, and will be so encoded (with padding) in responses. The
-- base64 encoding should be of the binary byte-stream and not any textual
-- (such as hex) representation thereof. Required.
aiId :: Lens' AdvertisedId (Maybe ByteString)
aiId
= lens _aiId (\ s a -> s{_aiId = a}) . mapping _Bytes
-- | Specifies the identifier type. Required.
aiType :: Lens' AdvertisedId (Maybe AdvertisedIdType)
aiType = lens _aiType (\ s a -> s{_aiType = a})
instance FromJSON AdvertisedId where
parseJSON
= withObject "AdvertisedId"
(\ o ->
AdvertisedId' <$> (o .:? "id") <*> (o .:? "type"))
instance ToJSON AdvertisedId where
toJSON AdvertisedId'{..}
= object
(catMaybes
[("id" .=) <$> _aiId, ("type" .=) <$> _aiType])
-- | Response that contains list beacon results and pagination help.
--
-- /See:/ 'listBeaconsResponse' smart constructor.
data ListBeaconsResponse =
ListBeaconsResponse'
{ _lbrNextPageToken :: !(Maybe Text)
, _lbrBeacons :: !(Maybe [Beacon])
, _lbrTotalCount :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ListBeaconsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbrNextPageToken'
--
-- * 'lbrBeacons'
--
-- * 'lbrTotalCount'
listBeaconsResponse
:: ListBeaconsResponse
listBeaconsResponse =
ListBeaconsResponse'
{ _lbrNextPageToken = Nothing
, _lbrBeacons = Nothing
, _lbrTotalCount = Nothing
}
-- | An opaque pagination token that the client may provide in their next
-- request to retrieve the next page of results.
lbrNextPageToken :: Lens' ListBeaconsResponse (Maybe Text)
lbrNextPageToken
= lens _lbrNextPageToken
(\ s a -> s{_lbrNextPageToken = a})
-- | The beacons that matched the search criteria.
lbrBeacons :: Lens' ListBeaconsResponse [Beacon]
lbrBeacons
= lens _lbrBeacons (\ s a -> s{_lbrBeacons = a}) .
_Default
. _Coerce
-- | Estimate of the total number of beacons matched by the query. Higher
-- values may be less accurate.
lbrTotalCount :: Lens' ListBeaconsResponse (Maybe Int64)
lbrTotalCount
= lens _lbrTotalCount
(\ s a -> s{_lbrTotalCount = a})
. mapping _Coerce
instance FromJSON ListBeaconsResponse where
parseJSON
= withObject "ListBeaconsResponse"
(\ o ->
ListBeaconsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "beacons" .!= mempty)
<*> (o .:? "totalCount"))
instance ToJSON ListBeaconsResponse where
toJSON ListBeaconsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lbrNextPageToken,
("beacons" .=) <$> _lbrBeacons,
("totalCount" .=) <$> _lbrTotalCount])
-- | Information about the requested beacons, optionally including attachment
-- data.
--
-- /See:/ 'getInfoForObservedBeaconsResponse' smart constructor.
newtype GetInfoForObservedBeaconsResponse =
GetInfoForObservedBeaconsResponse'
{ _gifobrBeacons :: Maybe [BeaconInfo]
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GetInfoForObservedBeaconsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gifobrBeacons'
getInfoForObservedBeaconsResponse
:: GetInfoForObservedBeaconsResponse
getInfoForObservedBeaconsResponse =
GetInfoForObservedBeaconsResponse' {_gifobrBeacons = Nothing}
-- | Public information about beacons. May be empty if the request matched no
-- beacons.
gifobrBeacons :: Lens' GetInfoForObservedBeaconsResponse [BeaconInfo]
gifobrBeacons
= lens _gifobrBeacons
(\ s a -> s{_gifobrBeacons = a})
. _Default
. _Coerce
instance FromJSON GetInfoForObservedBeaconsResponse
where
parseJSON
= withObject "GetInfoForObservedBeaconsResponse"
(\ o ->
GetInfoForObservedBeaconsResponse' <$>
(o .:? "beacons" .!= mempty))
instance ToJSON GetInfoForObservedBeaconsResponse
where
toJSON GetInfoForObservedBeaconsResponse'{..}
= object
(catMaybes [("beacons" .=) <$> _gifobrBeacons])
|
brendanhay/gogol
|
gogol-proximitybeacon/gen/Network/Google/ProximityBeacon/Types/Product.hs
|
mpl-2.0
| 52,070
| 0
| 21
| 11,760
| 8,053
| 4,655
| 3,398
| 914
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
#include "overlap.h"
module Opaleye.X.Table
( Table, table, tableWithSchema
, Properties, properties
)
where
-- anonymous-data ------------------------------------------------------------
import Data.Labeled (Labeled (Labeled))
-- base ----------------------------------------------------------------------
import Data.Proxy (Proxy (Proxy))
-- opaleye -------------------------------------------------------------------
import Opaleye.Column (Column)
import Opaleye.Table (TableColumns, optional, required)
import qualified Opaleye.Table as O (Table, table, tableWithSchema)
-- opaleye-x -----------------------------------------------------------------
import Opaleye.X.Internal
import Opaleye.X.TF ()
-- profunctors ---------------------------------------------------------------
import Data.Profunctor (Profunctor, dimap, lmap)
-- product-profunctors -------------------------------------------------------
import Data.Profunctor.Product (ProductProfunctor, (***!), empty)
import Data.Profunctor.Product.Default (Default, def)
-- types ---------------------------------------------------------------------
import Type.Meta (Known, Val, val)
------------------------------------------------------------------------------
type Table a = O.Table a (CollectOptional a)
------------------------------------------------------------------------------
table :: Properties a => ([String] -> String) -> String -> Table a
table mangler s = O.table s (properties mangler)
------------------------------------------------------------------------------
tableWithSchema :: Properties a
=> ([String] -> String) -> String -> String -> Table a
tableWithSchema mangler n s = O.tableWithSchema n s (properties mangler)
------------------------------------------------------------------------------
newtype PropertiesPP a b =
PropertiesPP ([String] -> ([String] -> String) -> TableColumns a b)
------------------------------------------------------------------------------
instance Profunctor PropertiesPP where
dimap l r (PropertiesPP p) = PropertiesPP $ \ns f -> dimap l r (p ns f)
------------------------------------------------------------------------------
instance ProductProfunctor PropertiesPP where
empty = PropertiesPP $ \_ _ -> empty
PropertiesPP a ***! PropertiesPP b =
PropertiesPP $ \ns f -> a ns f ***! b ns f
------------------------------------------------------------------------------
instance Default PropertiesPP (Column a) (Column a) where
def = PropertiesPP $ \ns f -> required (f ns)
------------------------------------------------------------------------------
instance Default PropertiesPP (Option (Column a)) (Column a) where
def = PropertiesPP $ \ns f -> lmap (\(Option a) -> a) (optional (f ns))
------------------------------------------------------------------------------
instance (Options a o, Default PropertiesPP o b) =>
Default PropertiesPP (Optional a) b
where
def = let PropertiesPP p = def in PropertiesPP $ \ns f ->
lmap (\(Optional a) -> a) (p ns f)
------------------------------------------------------------------------------
instance __OVERLAPS__
(Default PropertiesPP (f a) (f b), Known s, ShowVal (Val s))
=>
Default PropertiesPP (Labeled f '(s, a)) (Labeled f '(s, b))
where
def = let PropertiesPP p = def in PropertiesPP $ \ns ->
dimap unlabel Labeled . p (ns ++ [showVal (val (Proxy :: Proxy s))])
where
unlabel :: Labeled f '(s, a) -> f a
unlabel (Labeled a) = a
------------------------------------------------------------------------------
class ShowVal a where
showVal :: a -> String
------------------------------------------------------------------------------
instance ShowVal String where
showVal = id
------------------------------------------------------------------------------
instance __OVERLAPPABLE__ Show a => ShowVal a where
showVal = show
------------------------------------------------------------------------------
type Properties a = Default PropertiesPP a (CollectOptional a)
------------------------------------------------------------------------------
properties :: Properties a
=> ([String] -> String) -> TableColumns a (CollectOptional a)
properties = let PropertiesPP p = def in p []
|
duairc/opaleye-x
|
src/Opaleye/X/Table.hs
|
mpl-2.0
| 4,769
| 0
| 19
| 736
| 1,068
| 586
| 482
| 64
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.IAP.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.IAP.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
|
brendanhay/gogol
|
gogol-iap/gen/Network/Google/IAP/Types/Sum.hs
|
mpl-2.0
| 1,209
| 0
| 11
| 292
| 197
| 114
| 83
| 26
| 0
|
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, DeriveFunctor, DeriveTraversable, DeriveFoldable, NoMonomorphismRestriction, FlexibleContexts #-}
module HN.MilnerTools (instantiatedType, freshAtoms, MyStack, unifyM, runStack, subst, closureM, templateArgs, T(..), emptyClosureM, constantType, convertTv, convert, getReverseMap, revert, runApply, UTerm(..), IntVar(..)) where
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Unification (lookupVar, applyBindings, getFreeVars, freeVar)
import Control.Unification.IntVar (IntVar(..), runIntBindingT)
import Control.Monad.State
import Utils
import HN.TypeTools
import HN.Intermediate (Const (..))
import qualified SPL.Types as Old
import Unifier.Unifier
import Unifier.Restricted
-- freshAtoms используется всего в одном месте - при
-- вычислении атрибута Definition.loc.argAtoms
-- argument types are not generalized, thus S.empty
freshAtoms a counter = zipWith (\a i -> (a, (S.empty, tv i))) a [counter..]
instantiatedType counter (tu, t) = (counter + S.size tu, convert $ mapTypeTV (\a -> fromMaybe (Old.TV a) (M.lookup a substitutions)) t) where
substitutions = M.fromDistinctAscList $ zipWith (\a b -> (a, tv b)) (S.toAscList tu) [counter..]
xget :: MyStack (M.Map String Int)
xget = lift get
xput :: M.Map String Int -> MyStack ()
xput = lift . put
type MyStack a = WithEnv (State (M.Map String Int)) a
runStack x = fst $ fst $ flip runState (M.empty :: M.Map String Int) $ runIntBindingT x
convert (Old.T a) = return $ UTerm $ T a
convert (Old.TT a) = (UTerm . TT) <$> Prelude.mapM convert a
convert (Old.TD n a) = (UTerm . TD n) <$> Prelude.mapM convert a
convert a @ (Old.TV _) = convertTv a
convertTv (Old.TV a) = do
m <- xget
fmap UVar $ maybe (xfreeVar a m) (return . IntVar) $ M.lookup a m where
xfreeVar a m = do
ii @ (IntVar i) <- freeVar
xput (M.insert a i m)
return ii
subsumesM x y = runErrorT2 (subsumes x y) >> exportBindings
exportBindings = do
x <- fmap reverseMap xget
xget >>= fmap (M.fromList . catMaybes) . mapM (fff x) . M.toList where
fff x (tv, iv) = fmap (fmap (\ o -> (tv, revert o x))) $ lookupVar $ IntVar iv
revert x m = mrevert x where
mrevert (UTerm x) = f x
mrevert (UVar (IntVar i)) = Old.TV $ tracedUncondLookup "Unification.revert" i m
f (T x) = Old.T x
f (TT x) = Old.TT $ map mrevert x
f (TD s x) = Old.TD s $ map mrevert x
runApply = fmap fromRight . runErrorT2 . applyBindings
revertM newTerm = fmap (revert newTerm . reverseMap) xget
subst = convertAndBind >=> revertM
convertAndBind = convert >=> runApply
getReverseMap = fmap reverseMap xget
closureM = liftM3M $ \convEnv args result -> do
let convTau = UTerm $ TT $ args ++ [result]
let varListToSet = fmap (S.fromList . map (\(IntVar x) -> x))
tpv <- varListToSet $ getFreeVars convTau
epv <- varListToSet $ Prelude.concat <$> mapM getFreeVars convEnv
return (tpv S.\\ epv, convTau)
emptyClosureM tau = do
convTau <- tau >>= runApply >>= revertM
return (S.empty, convTau)
templateArgs tau (generalizedVars, generalizedT) = do
inferredType <- convert generalizedT
callSiteType <- tau >>= runApply
subst2 <- subsumesM inferredType callSiteType
let fs x = tracedUncondLookup "AG.TypeInference.fs" x subst2
return $ map fs $ S.toList generalizedVars
unifyM fnTau argTau beta = do
args <- sequence argTau
result <- beta
fn <- fnTau
runErrorT2 $ unify fn $ UTerm $ TT $ args ++ [result]
return result
-- используется при выводе типа константы в качестве tau
constantType x = return $ UTerm $ case x of
ConstInt _ -> T "num"
ConstString _ -> T "string"
|
ingvar-lynn/HNC
|
HN/MilnerTools.hs
|
lgpl-3.0
| 3,700
| 2
| 19
| 629
| 1,436
| 735
| 701
| 74
| 4
|
module MancalaAI (minimax) where
import MancalaBoard
-- simple hueristic, returns how many stones are in store
hueristic :: MancalaBoard -> Player -> Int
hueristic board player = (playerHuer board player) - (playerHuer board (nextPlayer player)) where
playerHuer b p = numCaptured b p + sum (playerSide b p) + (isMyTurn b p)
isMyTurn b p = if (getCurPlayer b) == p then 1 else 0
-- implementation taken from psuedocode on Wikipedia
minimax :: MancalaBoard -> Player -> Int -> Bool -> Int
minimax board aiPlayer 0 _ = hueristic board aiPlayer
minimax board aiPlayer depth True = snd (maximum heuristics) where
heuristics = [(minimaxUtil (move board m), m) | m <- (allowedMoves board)]
minimaxUtil b = minimax b aiPlayer (depth - 1) False
minimax board aiPlayer depth False = snd (minimum heuristics) where
heuristics = [(minimaxUtil (move board m), m) | m <- (allowedMoves board)]
minimaxUtil b = minimax b aiPlayer (depth - 1) True
|
kbrose/math
|
Mancala/MancalaAI.hs
|
unlicense
| 961
| 0
| 11
| 185
| 351
| 181
| 170
| 14
| 2
|
module Handler.Utils where
import Data.Text (pack, unpack)
import Import
toTextPairs :: [(String, String)] -> [(Text, Text)]
toTextPairs = map toTextPair
where toTextPair (a, b) = (pack a, pack b)
getStringFromField :: Text -> GHandler sub App String
getStringFromField = fmap unpack . runInputGet . ireq textField
|
thlorenz/WebToInk
|
webtoink/Handler/Utils.hs
|
bsd-2-clause
| 323
| 0
| 8
| 54
| 120
| 67
| 53
| 8
| 1
|
{-# LANGUAGE TypeFamilies #-}
module Language.Drasil.Code.Imperative.GOOL.Symantics (
-- Typeclasses
PackageSym(..), AuxiliarySym(..)
) where
import Language.Drasil (Expr)
import Database.Drasil (ChunkDB)
import Language.Drasil.Code.DataDesc (DataDesc)
import Language.Drasil.CodeSpec (Comments, Verbosity)
import GOOL.Drasil (ProgData, GOOLState)
import Text.PrettyPrint.HughesPJ (Doc)
class (AuxiliarySym repr) => PackageSym repr where
type Package repr
package :: ProgData -> [repr (Auxiliary repr)] ->
repr (Package repr)
class AuxiliarySym repr where
type Auxiliary repr
type AuxHelper repr
doxConfig :: String -> GOOLState -> Verbosity -> repr (Auxiliary repr)
sampleInput :: ChunkDB -> DataDesc -> [Expr] -> repr (Auxiliary repr)
optimizeDox :: repr (AuxHelper repr)
makefile :: [Comments] -> GOOLState -> ProgData -> repr (Auxiliary repr)
auxHelperDoc :: repr (AuxHelper repr) -> Doc
auxFromData :: FilePath -> Doc -> repr (Auxiliary repr)
|
JacquesCarette/literate-scientific-software
|
code/drasil-code/Language/Drasil/Code/Imperative/GOOL/Symantics.hs
|
bsd-2-clause
| 986
| 0
| 12
| 156
| 309
| 174
| 135
| 22
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
{-| Nordom lets you import external expressions located either in local files or
hosted on network endpoints.
To import a local file as an expression, just insert the path to the file,
prepending a @./@ if the path is relative to the current directory. For
example, suppose we had the following three local files:
> -- id
> \(a : *) -> \(x : a) -> x
> -- Bool
> forall (Bool : *) -> forall (True : Bool) -> forall (False : Bool) -> Bool
> -- True
> \(Bool : *) -> \(True : Bool) -> \(False : Bool) -> True
You could then reference them within a Nordom expression using this syntax:
> ./id ./Bool ./True
... which would embed their expressions directly within the syntax tree:
> -- ... expands out to:
> (\(a : *) -> \(x : a) -> x)
> (forall (Bool : *) -> forall (True : Bool) -> forall (False : Bool) -> True)
> (\(Bool : *) -> \(True : Bool) -> \(False : Bool) -> True)
... and which normalizes to:
> λ(Bool : *) → λ(True : Bool) → λ(False : Bool) → True
Imported expressions may contain imports of their own, too, which will
continue to be resolved. However, Nordom will prevent cyclic imports. For
example, if you had these two files:
> -- foo
> ./bar
> -- bar
> ./foo
... Nordom would throw the following exception if you tried to import @foo@:
> morte:
> ⤷ ./foo
> ⤷ ./bar
> Cyclic import: ./foo
You can also import expressions hosted on network endpoints. Just use the
URL
> http://host[:port]/path
The compiler expects the downloaded expressions to be in the same format
as local files, specifically UTF8-encoded source code text.
For example, if our @id@ expression were hosted at @http://example.com/id@,
then we would embed the expression within our code using:
> http://example.com/id
You can also reuse directory names as expressions. If you provide a path
to a local or remote directory then the compiler will look for a file named
@\@@ within that directory and use that file to represent the directory.
-}
module Nordom.Import (
-- * Import
load
, Cycle(..)
, ReferentiallyOpaque(..)
, Imported(..)
) where
import Control.Exception (Exception, IOException, catch, onException, throwIO)
import Control.Monad (join)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.State.Strict (StateT)
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Text.Buildable (build)
import Data.Text.Lazy (Text)
import Data.Text.Lazy.Builder (Builder)
import Data.Traversable (traverse)
import Data.Typeable (Typeable)
import Filesystem.Path ((</>), FilePath)
import Filesystem as Filesystem
import Lens.Family (LensLike')
import Lens.Family.State.Strict (zoom)
import Nordom.Core (Expr, Path(..), X(..))
import Network.HTTP.Client (Manager)
import Prelude hiding (FilePath)
import qualified Control.Monad.Trans.State.Strict as State
import qualified Data.Foldable as Foldable
import qualified Data.List as List
import qualified Data.Map.Strict as Map
import qualified Data.Text.Lazy as Text
import qualified Data.Text.Lazy.Builder as Builder
import qualified Data.Text.Lazy.Encoding as Text
import qualified Nordom.Core as Nordom
import qualified Nordom.Parser as Nordom
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Filesystem.Path.CurrentOS as Filesystem
builderToString :: Builder -> String
builderToString = Text.unpack . Builder.toLazyText
-- | An import failed because of a cycle in the import graph
newtype Cycle = Cycle
{ cyclicImport :: Path -- ^ The offending cyclic import
}
deriving (Typeable)
instance Exception Cycle
instance Show Cycle where
show (Cycle path) = "Cyclic import: " ++ builderToString (build path)
{-| Nordom tries to ensure that all expressions hosted on network endpoints are
weakly referentially transparent, meaning roughly that any two clients will
compile the exact same result given the same URL.
To be precise, a strong interpretaton of referential transparency means that
if you compiled a URL you could replace the expression hosted at that URL
with the compiled result. Let's term this \"static linking\". Nordom (very
intentionally) does not satisfy this stronger interpretation of referential
transparency since \"statically linking\" an expression (i.e. permanently
resolving all imports) means that the expression will no longer update if
its dependencies change.
In general, either interpretation of referential transparency is not
enforceable in a networked context since one can easily violate referential
transparency with a custom DNS, but Nordom can still try to guard against
common unintentional violations. To do this, Nordom enforces that a
non-local import may not reference a local import.
Local imports are defined as:
* A file
* A URL with a host of @localhost@ or @127.0.0.1@
All other imports are defined to be non-local
-}
newtype ReferentiallyOpaque = ReferentiallyOpaque
{ opaqueImport :: Path -- ^ The offending opaque import
} deriving (Typeable)
instance Exception ReferentiallyOpaque
instance Show ReferentiallyOpaque where
show (ReferentiallyOpaque path) =
"Referentially opaque import: " ++ builderToString (build path)
-- | Extend another exception with the current import stack
data Imported e = Imported
{ importStack :: [Path] -- ^ Imports resolved so far, in reverse order
, nested :: e -- ^ The nested exception
} deriving (Typeable)
instance Exception e => Exception (Imported e)
instance Show e => Show (Imported e) where
show (Imported paths e) =
"\n"
++ unlines (map (\path -> "⤷ " ++ builderToString (build path)) paths')
++ show e
where
-- Canonicalize all paths
paths' = drop 1 (reverse (canonicalizeAll paths))
data Status = Status
{ _stack :: [Path]
, _cache :: Map Path (Expr X)
, _manager :: Maybe Manager
}
canonicalizeAll :: [Path] -> [Path]
canonicalizeAll = map canonicalize . List.tails
stack :: Functor f => LensLike' f Status [Path]
stack k s = fmap (\x -> s { _stack = x }) (k (_stack s))
cache :: Functor f => LensLike' f Status (Map Path (Expr X))
cache k s = fmap (\x -> s { _cache = x }) (k (_cache s))
manager :: Functor f => LensLike' f Status (Maybe Manager)
manager k s = fmap (\x -> s { _manager = x }) (k (_manager s))
needManager :: StateT Status IO Manager
needManager = do
x <- zoom manager State.get
case x of
Just m -> return m
Nothing -> do
let settings = HTTP.tlsManagerSettings
{ HTTP.managerResponseTimeout = Just 1000000 } -- 1 second
m <- liftIO (HTTP.newManager settings)
zoom manager (State.put (Just m))
return m
{-| This function computes the current path by taking the last absolute path
(either an absolute `FilePath` or `URL`) and combining it with all following
relative paths
For example, if the file `./foo/bar` imports `./baz`, that will resolve to
`./foo/baz`. Relative imports are relative to a file's parent directory.
This also works for URLs, too.
This code is full of all sorts of edge cases so it wouldn't surprise me at
all if you find something broken in here. Most of the ugliness is due to:
* Handling paths ending with @/\@@ by stripping the @/\@@ suffix if and only
if you navigate to any downstream relative paths
* Removing spurious @.@s and @..@s from the path
Also, there are way too many `reverse`s in the URL-handling cod For now I
don't mind, but if were to really do this correctly we'd store the URLs as
`Text` for O(1) access to the end of the string. The only reason we use
`String` at all is for consistency with the @http-client@ library.
-}
canonicalize :: [Path] -> Path
canonicalize [] = File "."
canonicalize (File file0:paths0) =
if Filesystem.relative file0
then go file0 paths0
else File (clean file0)
where
go currPath [] = File (clean currPath)
go currPath (URL url0:_ ) = combine prefix suffix
where
prefix = parentURL (removeAtFromURL url0)
suffix = clean currPath
-- `clean` will resolve internal @.@/@..@'s in @currPath@, but we still
-- need to manually handle @.@/@..@'s at the beginning of the path
combine url path = case Filesystem.stripPrefix ".." path of
Just path' -> combine url' path'
where
url' = parentURL (removeAtFromURL url)
Nothing -> case Filesystem.stripPrefix "." path of
Just path' -> combine url path'
Nothing ->
-- This `last` is safe because the lexer constraints all
-- URLs to be non-empty. I couldn't find a simple and safe
-- equivalent in the `text` API
case Text.last url of
'/' -> URL (url <> path')
_ -> URL (url <> "/" <> path')
where
path' = Text.fromStrict (case Filesystem.toText path of
Left txt -> txt
Right txt -> txt )
go currPath (File file:paths) =
if Filesystem.relative file
then go file' paths
else File (clean file')
where
file' = Filesystem.parent (removeAtFromFile file) </> currPath
canonicalize (URL path:_) = URL path
parentURL :: Text -> Text
parentURL = Text.dropWhileEnd (/= '/')
removeAtFromURL:: Text -> Text
removeAtFromURL url
| Text.isSuffixOf "/@" url = Text.dropEnd 2 url
| Text.isSuffixOf "/" url = Text.dropEnd 1 url
| otherwise = url
removeAtFromFile :: FilePath -> FilePath
removeAtFromFile file =
if Filesystem.filename file == "@"
then Filesystem.parent file
else file
-- | Remove all @.@'s and @..@'s in the path
clean :: FilePath -> FilePath
clean = strip . Filesystem.collapse
where
strip p = case Filesystem.stripPrefix "." p of
Nothing -> p
Just p' -> p'
{-| Load a `Path` as a \"dynamic\" expression (without resolving any imports)
This also returns the true final path (i.e. explicit "/@" at the end for
directories)
-}
loadDynamic :: Path -> StateT Status IO (Expr Path)
loadDynamic p = do
paths <- zoom stack State.get
let readURL url = do
request <- liftIO (HTTP.parseUrl (Text.unpack url))
m <- needManager
let httpLbs' = do
HTTP.httpLbs request m `catch` (\e -> case e of
HTTP.StatusCodeException _ _ _ -> do
let request' = request
{ HTTP.path = HTTP.path request <> "/@" }
-- If the fallback fails, reuse the original
-- exception to avoid user confusion
HTTP.httpLbs request' m
`onException` throwIO (Imported paths e)
_ -> throwIO (Imported paths e) )
response <- liftIO httpLbs'
case Text.decodeUtf8' (HTTP.responseBody response) of
Left err -> liftIO (throwIO (Imported paths err))
Right txt -> return txt
let readFile' file = liftIO (do
(do txt <- Filesystem.readTextFile file
return (Text.fromStrict txt) ) `catch` (\e -> do
-- Unfortunately, GHC throws an `InappropriateType`
-- exception when trying to read a directory, but does not
-- export the exception, so I must resort to a more
-- heavy-handed `catch`
let _ = e :: IOException
-- If the fallback fails, reuse the original exception to
-- avoid user confusion
let file' = file </> "@"
txt <- Filesystem.readTextFile file'
`onException` throwIO (Imported paths e)
return (Text.fromStrict txt) ) )
txt <- case canonicalize (p:paths) of
File file -> readFile' file
URL url -> readURL url
let abort err = liftIO (throwIO (Imported (p:paths) err))
case Nordom.exprFromText txt of
Left err -> case canonicalize (p:paths) of
URL url -> do
-- Also try the fallback in case of a parse error, since the
-- parse error might signify that this URL points to a directory
-- list
request <- liftIO (HTTP.parseUrl (Text.unpack url))
let request' = request { HTTP.path = HTTP.path request <> "/@" }
m <- needManager
response <- liftIO
(HTTP.httpLbs request' m `onException` abort err)
case Text.decodeUtf8' (HTTP.responseBody response) of
Left _ -> liftIO (abort err)
Right txt' -> case Nordom.exprFromText txt' of
Left _ -> liftIO (abort err)
Right expr -> return expr
_ -> liftIO (abort err)
Right expr -> return expr
-- | Load a `Path` as a \"static\" expression (with all imports resolved)
loadStatic :: Path -> StateT Status IO (Expr X)
loadStatic path = do
paths <- zoom stack State.get
let local (URL url) = case HTTP.parseUrl (Text.unpack url) of
Nothing -> False
Just request -> case HTTP.host request of
"127.0.0.1" -> True
"localhost" -> True
_ -> False
local (File _) = True
let parent = canonicalize paths
let here = canonicalize (path:paths)
if local here && not (local parent)
then liftIO (throwIO (Imported paths (ReferentiallyOpaque path)))
else return ()
(expr, cached) <- if here `elem` canonicalizeAll paths
then liftIO (throwIO (Imported paths (Cycle path)))
else do
m <- zoom cache State.get
case Map.lookup here m of
Just expr -> return (expr, True)
Nothing -> do
expr' <- loadDynamic path
expr'' <- case traverse (\_ -> Nothing) expr' of
-- No imports left
Just expr -> do
zoom cache (State.put $! Map.insert here expr m)
return expr
-- Some imports left, so recurse
Nothing -> do
let paths' = path:paths
zoom stack (State.put paths')
expr'' <- fmap join (traverse loadStatic expr')
zoom stack (State.put paths)
return expr''
return (expr'', False)
-- Type-check expressions here for two separate reasons:
--
-- * to verify that they are closed
-- * to catch type errors as early in the import process as possible
--
-- There is no need to check expressions that have been cached, since they
-- have already been checked
if cached
then return ()
else case Nordom.typeOf expr of
Left err -> liftIO (throwIO (Imported (path:paths) err))
Right _ -> return ()
return expr
{-| Resolve all imports within an expression
By default the starting path is the current directory, but you can override
the starting path with a file if you read in the expression from that file
-}
load
:: Maybe Path
-- ^ Starting path
-> Expr Path
-- ^ Expression to resolve
-> IO (Expr X)
load here expr =
State.evalStateT (fmap join (traverse loadStatic expr)) status
where
status = Status (Foldable.toList here) Map.empty Nothing
|
Gabriel439/Haskell-Nordom-Library
|
src/Nordom/Import.hs
|
bsd-3-clause
| 16,481
| 43
| 33
| 5,161
| 2,802
| 1,516
| 1,286
| 224
| 11
|
module Main
( main -- :: IO ()
) where
import Test.Hspec
import Test.Hspec.HUnit()
import Test.QuickCheck()
main :: IO ()
main = hspec $ do
describe "test" $ do
it "does something dumb" $ 1 == (1::Int)
describe "test2" $ do
it "does something else" $ 2 == (2::Int)
|
thoughtpolice/hs-leveldb
|
tests/Properties.hs
|
bsd-3-clause
| 293
| 0
| 13
| 77
| 111
| 59
| 52
| 11
| 1
|
module VSimR.Timeline (
advance
, commit
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import VSimR.Ptr
import VSimR.Time
import VSimR.Signal
import VSimR.Waveform
import VSimR.Process
-- | Returns the time of the next event, as well as signals to be changd
--
-- TODO: take waitable processes into account
next_event :: (MonadIO m) => [Ptr (Signal s)] -> m ([(Ptr (Signal s),Waveform)], Time)
next_event ss = foldM cmp ([],time_max) ss where
cmp o@(l,t) r = do
(Change t' _, w') <- event `liftM` wcurr `liftM` deref r
case compare t' t of
LT -> return ([(r,w')], t')
EQ -> return ((r,w'):l, t)
_ -> return o
-- | Calculates next event's time
advance :: (MonadIO m) => [Ptr (Signal s)] -> m (Time, [Ptr (Process s)])
advance ss = do
(cs,t) <- next_event ss
ps <- forM cs $ \(r,w) -> do
s <- deref r
write r (chwave w s)
return (proc s)
return (t, concat ps)
-- | Invalidate signal assignments
--
-- TODO: monitor multiple assignments, implement resolvers
commit :: (MonadIO m) => Time -> [Assignment s] -> m ()
commit t as = do
forM_ as $ \(Assignment r pw) -> do
(Signal' w o c p) <- deref r
write r (Signal' (unPW w pw) o c p)
|
ierton/vsim
|
src/VSimR/Timeline.hs
|
bsd-3-clause
| 1,344
| 0
| 15
| 390
| 535
| 282
| 253
| 32
| 3
|
module Code29_Plan2 where
import Code28
boxall :: [[a]] -> [a]
boxall = boxall1
jcode :: Int -> [Int]
jcode n = code (0,n)
bumpBy :: Int -> [Int] -> [Int]
bumpBy _ [] = []
bumpBy k [a] = [a+k]
bumpBy k (a:b:as) = (a+k) : b : bumpBy k as
bumpDn :: (Int, Int) -> [Int]
bumpDn (k,n) = bumpBy k [n-1,n-2 .. 1]
code :: (Int, Int) -> [Int]
code = boxall . map bumpDn . pairs
pairs :: (Int,Int) -> [(Int,Int)]
pairs (_,1) = []
pairs (k,n) = pairs (k',n-1) ++ [(k,n)]
where k' = if odd n then k+1 else 1
|
sampou-org/pfad
|
Code/Code29_Plan2.hs
|
bsd-3-clause
| 591
| 0
| 8
| 200
| 343
| 194
| 149
| 18
| 2
|
{-|
Module : Idris.WhoCalls
Description : Find function callers and callees.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
module Idris.WhoCalls (whoCalls, callsWho) where
import Idris.AbsSyntax
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Idris.Core.TT
import Data.List (nub)
occurs :: Name -> Term -> Bool
occurs n (P Bound _ _) = False
occurs n (P _ n' _) = n == n'
occurs n (Bind _ b sc) = occursBinder n b || occurs n sc
occurs n (App _ t1 t2) = occurs n t1 || occurs n t2
occurs n (Proj t _) = occurs n t
occurs n _ = False
names :: Term -> [Name]
names (P Bound _ _) = []
names (P _ n _) = [n]
names (Bind _ b sc) = namesBinder b ++ names sc
names (App _ t1 t2) = names t1 ++ names t2
names (Proj t _) = names t
names _ = []
occursBinder :: Name -> Binder Term -> Bool
occursBinder n (Let rc ty val) = occurs n ty || occurs n val
occursBinder n (NLet ty val) = occurs n ty || occurs n val
occursBinder n b = occurs n (binderTy b)
namesBinder :: Binder Term -> [Name]
namesBinder (Let rc ty val) = names ty ++ names val
namesBinder (NLet ty val) = names ty ++ names val
namesBinder b = names (binderTy b)
occursSC :: Name -> SC -> Bool
occursSC n (Case _ _ alts) = any (occursCaseAlt n) alts
occursSC n (ProjCase t alts) = occurs n t || any (occursCaseAlt n) alts
occursSC n (STerm t) = occurs n t
occursSC n _ = False
namesSC :: SC -> [Name]
namesSC (Case _ _ alts) = concatMap namesCaseAlt alts
namesSC (ProjCase t alts) = names t ++ concatMap namesCaseAlt alts
namesSC (STerm t) = names t
namesSC _ = []
occursCaseAlt :: Name -> CaseAlt -> Bool
occursCaseAlt n (ConCase n' _ _ sc) = n == n' || occursSC n sc
occursCaseAlt n (FnCase n' _ sc) = n == n' || occursSC n sc
occursCaseAlt n (ConstCase _ sc) = occursSC n sc
occursCaseAlt n (SucCase _ sc) = occursSC n sc
occursCaseAlt n (DefaultCase sc) = occursSC n sc
namesCaseAlt :: CaseAlt -> [Name]
namesCaseAlt (ConCase n' _ _ sc) = n' : namesSC sc
namesCaseAlt (FnCase n' _ sc) = n' : namesSC sc
namesCaseAlt (ConstCase _ sc) = namesSC sc
namesCaseAlt (SucCase _ sc) = namesSC sc
namesCaseAlt (DefaultCase sc) = namesSC sc
occursDef :: Name -> Def -> Bool
occursDef n (Function ty tm) = occurs n ty || occurs n tm
occursDef n (TyDecl _ ty) = occurs n ty
occursDef n (Operator ty _ _) = occurs n ty
occursDef n (CaseOp _ ty _ _ _ defs) = occurs n ty || occursSC n (snd (cases_compiletime defs))
namesDef :: Def -> [Name]
namesDef (Function ty tm) = names ty ++ names tm
namesDef (TyDecl _ ty) = names ty
namesDef (Operator ty _ _) = names ty
namesDef (CaseOp _ ty _ _ _ defs) = names ty ++ namesSC (snd (cases_compiletime defs))
findOccurs :: Name -> Idris [Name]
findOccurs n = do ctxt <- getContext
-- A definition calls a function if the function is in the type or RHS of the definition
let defs = (map fst . filter (\(n', def) -> n /= n' && occursDef n def) . ctxtAlist) ctxt
-- A datatype calls its
return defs
whoCalls :: Name -> Idris [(Name, [Name])]
whoCalls n = do ctxt <- getContext
let names = lookupNames n ctxt
find nm = do ns <- findOccurs nm
return (nm, nub ns)
mapM find names
callsWho :: Name -> Idris [(Name, [Name])]
callsWho n = do ctxt <- getContext
let defs = lookupNameDef n ctxt
return $ map (\ (n, def) -> (n, nub $ namesDef def)) defs
|
uuhan/Idris-dev
|
src/Idris/WhoCalls.hs
|
bsd-3-clause
| 3,470
| 0
| 18
| 867
| 1,552
| 765
| 787
| 74
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
module Juno.Consensus.Handle.Revolution
(handle)
where
import Control.Lens
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Data.Map (Map)
import qualified Data.Map as Map
import Juno.Consensus.Handle.Types
import Juno.Util.Util (debug, getRevSigOrInvariantError)
import qualified Juno.Types as JT
data RevolutionEnv = RevolutionEnv {
_lazyVote :: Maybe (Term, NodeID, LogIndex) -- Handler
, _currentLeader :: Maybe NodeID -- Client,Handler,Role
, _replayMap :: Map (NodeID, Signature) (Maybe CommandResult) -- Handler
}
makeLenses ''RevolutionEnv
data RevolutionOut =
UnknownNode |
RevolutionCalledOnNonLeader |
IgnoreLeader
{ _deleteReplayMapEntry :: (NodeID, Signature) } |
IgnoreLeaderAndClearLazyVote
{ _deleteReplayMapEntry :: (NodeID, Signature) }
handleRevolution :: (MonadReader RevolutionEnv m, MonadWriter [String] m) => Revolution -> m RevolutionOut
handleRevolution rev@Revolution{..} = do
currentLeader' <- view currentLeader
replayMap' <- view replayMap
let revSig = getRevSigOrInvariantError "handleRevolution" rev
if Map.notMember (_revClientId, revSig) replayMap'
then
case currentLeader' of
Just l | l == _revLeaderId -> do
-- clear our lazy vote if it was for this leader
lazyVote' <- view lazyVote
case lazyVote' of
Just (_, lvid, _) | lvid == _revLeaderId -> return $ IgnoreLeaderAndClearLazyVote (_revClientId, revSig)
_ -> return $ IgnoreLeader (_revClientId, revSig)
_ -> return RevolutionCalledOnNonLeader
else return UnknownNode
handle :: Monad m => Revolution -> JT.Raft m ()
handle msg = do
s <- get
(out,l) <- runReaderT (runWriterT (handleRevolution msg)) $
RevolutionEnv
(JT._lazyVote s)
(JT._currentLeader s)
(JT._replayMap s)
mapM_ debug l
case out of
UnknownNode -> return ()
RevolutionCalledOnNonLeader -> return ()
IgnoreLeader{..} -> do
JT.replayMap %= Map.insert _deleteReplayMapEntry Nothing
JT.ignoreLeader .= True
IgnoreLeaderAndClearLazyVote{..} -> do
JT.replayMap %= Map.insert _deleteReplayMapEntry Nothing
JT.lazyVote .= Nothing
JT.ignoreLeader .= True
|
haroldcarr/juno
|
src/Juno/Consensus/Handle/Revolution.hs
|
bsd-3-clause
| 2,386
| 0
| 20
| 508
| 645
| 339
| 306
| 59
| 4
|
{-# LANGUAGE ConstraintKinds, FlexibleContexts, RankNTypes, OverloadedStrings #-}
{-# LANGUAGE UndecidableInstances, ScopedTypeVariables, AllowAmbiguousTypes #-}
-----------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
-- |
-- | Module : Compile string patterns into Attoparsec parsers
-- | Author : Xiao Ling
-- | Date : 9/7/2016
-- |
---------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
module PatternCompiler (
Pattern
, Token (..)
, PInput (S, Star)
, compile
, compile'
, token
, tokenizer
, (<**)
, name
) where
import Data.List.Split
import Data.Attoparsec.Text
import Data.Text (Text, unpack, pack)
import Control.Applicative
import Parsers
{-----------------------------------------------------------------------------
Tokenization and Parsing
------------------------------------------------------------------------------}
data Token = Word String
| Hole
| Slash
| Comma
| OptComma
| Or Token Token
deriving (Eq, Show)
-- * Input into Pattern
data PInput = S String -- * Pattern match some Text
| Star -- * Pattern match any string of alphabetical symbol
| Nil -- * trivial PInput so that
-- * compile "fillod" Nil Nil = word "fillod"
deriving (Eq, Show)
-- * A pattern of form `R * *` relates two
-- * strings `*` with some relation `R`
type Pattern = PInput -> PInput -> Parser Text
{-----------------------------------------------------------------------------
Top level function
------------------------------------------------------------------------------}
-- * given an expression, output pattern
compile :: String -> Pattern
compile = compiler . tokenizer
-- * compile a string into a parser, the pattern described by the string
-- * does not have any `Hole`s in it
-- * Note: compile' xs = word xs
-- * so that compile' "*" = pzero, it is the identity pattern
compile' :: String -> Parser Text
compile' xs = compile xs Nil Nil
{-----------------------------------------------------------------------------
Tokenizer
------------------------------------------------------------------------------}
-- * maps a string to some set of tokens
tokenizer :: String -> [Token]
tokenizer = fmap token . concat . fmap recoverComma . splitOn " "
-- * `token`ize a string
-- * note if `token` sees a string `xs` it does not recognize,
-- * it just outputs a `Word xs`
-- * TODO: quick and dirty here, consider doing something real
token :: String -> Token
token "*" = Hole
token "," = Comma
token "(,)" = OptComma
token xs = case splitOn "/" xs of
y:ys -> Word (stripParens y) `catOr` ys
_ -> Word $ stripParens xs
catOr :: Token -> [String] -> Token
catOr t = foldr (\x ts -> ts `Or` Word (stripParens x)) t
{-----------------------------------------------------------------------------
Compiler
------------------------------------------------------------------------------}
-- * compile a list of tokens into binary pattern `BinPattern`
-- * note by construction this function fails with identity parser under (<+>)
compiler :: [Token] -> Pattern
compiler ts = \u v -> [u,v] `fill` ts
-- * Given stack of strings `w:ws` as eventual input to the
-- * binary pattern, and list of tokens `t:ts`,
-- * create a `Parser Text` by `fill`ing in all the `Hole`s
-- * If the stack is empty before tokens are, then all remaining
-- * `Hole` tokens are mapped to parser `star`
fill :: [PInput] -> [Token] -> Parser Text
fill (i:is) (Hole:ts) = case i of
S w -> word w <+> fill is ts
Star -> star <+> fill is ts
Nil -> pzero <+> fill is ts
fill _ [] = pzero
fill is (t:ts) = toP t <+> fill is ts
-- * convert token to parser, note `Hole` is sent to `star` which accept
-- * any string of alphabetical symbols
toP :: Token -> Parser Text
toP (Word xs) = word xs
toP Hole = star
toP Slash = pzero
toP OptComma = comma
toP Comma = word ","
toP (Or t1 t2) = toP t1 <|> toP t2
{-----------------------------------------------------------------------------
Utils
------------------------------------------------------------------------------}
recoverComma :: String -> [String]
recoverComma [] = []
recoverComma xs | last xs == ',' = [init xs, ","]
| otherwise = [xs]
-- * aggressively remove all occurences of "(" and/or ")" in a string
stripParens :: String -> String
stripParens = foldr strip mempty
where strip c cs | c == '(' = cs
| c == ')' = cs
| otherwise = c:cs
|
lingxiao/GoodGreatIntensity
|
lib/PatternCompiler.hs
|
bsd-3-clause
| 4,996
| 0
| 11
| 1,107
| 835
| 461
| 374
| 72
| 3
|
{-# LANGUAGE ImplicitParams, FlexibleContexts #-}
module Frontend.TypeValidate (validateTypeSpec,
validateTypeSpec2,
validateTypeDeps) where
import Control.Monad.Error
import Data.List
import TSLUtil
import Pos
import Frontend.NS
import Frontend.Spec
import Frontend.Type
import Frontend.TypeOps
import Frontend.ExprOps
import Frontend.ExprValidate
---------------------------------------------------------------------
-- Validate individual TypeSpec
---------------------------------------------------------------------
validateTypeSpec :: (?spec::Spec, MonadError String me) => Scope -> TypeSpec -> me ()
-- * Struct fields must have unique names and valid types
validateTypeSpec sc (StructSpec _ fs) = do
uniqNames (\n -> "Field " ++ n ++ " declared multiple times ") fs
_ <- mapM (validateTypeSpec sc . tspec) fs
return ()
validateTypeSpec sc (ArraySpec _ t _) = validateTypeSpec sc t
validateTypeSpec sc (VarArraySpec _ t) = validateTypeSpec sc t
validateTypeSpec sc (PtrSpec _ t) = validateTypeSpec sc t
validateTypeSpec sc (SeqSpec _ t) = validateTypeSpec sc t
-- * user-defined type names refer to valid types
validateTypeSpec sc (UserTypeSpec _ n) = do {checkTypeDecl sc n; return ()}
validateTypeSpec _ _ = return ()
-- Second pass: validate array sizes
validateTypeSpec2 :: (?spec::Spec, MonadError String me) => Scope -> TypeSpec -> me ()
validateTypeSpec2 s (ArraySpec _ t l) = do
let ?scope = s
?privoverride = False
validateExpr' l
assert (isConstExpr l) (pos l) $ "Array length must be a constant expression"
assert (isInt $ exprType l) (pos l) $ "Array length must be an integer expression"
assert (evalInt l >= 0) (pos l) $ "Array length must be non-negative"
validateTypeSpec2 s t
validateTypeSpec2 s (VarArraySpec _ t) = validateTypeSpec2 s t
validateTypeSpec2 s (StructSpec _ fs) = do
_ <- mapM (validateTypeSpec2 s . tspec) fs
return ()
validateTypeSpec2 s (PtrSpec _ t) = validateTypeSpec2 s t
validateTypeSpec2 s (SeqSpec _ t) = do validateTypeSpec2 s t
assert (isSequence (Type s t)) (pos t) $ "Sequence of sequences is not allowed. Possible solution: embed the nested sequence in a struct"
validateTypeSpec2 _ _ = return ()
---------------------------------------------------------------------
-- Check that the graph of dependencies among TypeDecl's is acyclic
---------------------------------------------------------------------
validateTypeDeps :: (?spec::Spec, MonadError String me) => me ()
validateTypeDeps =
case grCycle tdeclGraph of
Nothing -> return ()
Just c -> err (pos $ snd $ head c) $ "Cyclic type aggregation: " ++ (intercalate "->" $ map (show . snd) c)
|
termite2/tsl
|
Frontend/TypeValidate.hs
|
bsd-3-clause
| 2,797
| 0
| 13
| 565
| 763
| 382
| 381
| 47
| 2
|
{-# LANGUAGE DeriveGeneric #-}
module SizeOfSpec(spec) where
import Test.Hspec
import GHC.Generics(Generic)
import Pact.Types.SizeOf
-- Testing whether derived instance for empty constructors is 1 word
data A = A1 | A2 deriving (Eq, Show, Generic)
data B = B1 Int | B2 Int Bool | B3 Int Bool A deriving (Eq, Show, Generic)
data C a = C a deriving (Eq, Show, Generic)
instance SizeOf A
instance SizeOf B
instance SizeOf a => SizeOf (C a)
spec :: Spec
spec = describe "SizeOf generics conform to specification" $ do
it "Costs only one word for shared data types" $ do
sizeOf A1 `shouldBe` wordSize
sizeOf A2 `shouldBe` wordSize
it "Costs the constructor size + 1 word per field" $ do
sizeOf (B1 0) `shouldBe` (sizeOf (0::Int) + constructorCost 1)
sizeOf (B2 0 True) `shouldBe` (sizeOf (0::Int) + sizeOf True + constructorCost 2)
let b3Cost = sizeOf (0::Int) + sizeOf True + sizeOf A1 + constructorCost 3
sizeOf (B3 0 True A1) `shouldBe` b3Cost
it "Works with parametrically defined instances" $ do
sizeOf (C (B1 0)) `shouldBe` (sizeOf (B1 0) + constructorCost 1)
|
kadena-io/pact
|
tests/SizeOfSpec.hs
|
bsd-3-clause
| 1,134
| 0
| 18
| 254
| 414
| 211
| 203
| 23
| 1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Types related to parsing from 'Exp' to 'Prop' and 'Invariant'.
module Pact.Analyze.Parse.Types where
import Control.Applicative (Alternative)
import Control.Lens (makeLenses, (<&>))
import Control.Monad.Except (MonadError (throwError))
import Control.Monad.Reader (ReaderT)
import Control.Monad.State.Strict (StateT)
import qualified Data.HashMap.Strict as HM
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Prelude hiding (exp)
import Pact.Types.Lang (AtomExp (..),
Exp (EAtom, ELiteral, ESeparator),
ListDelimiter (..), ListExp (..),
Literal (LString), LiteralExp (..),
Separator (..), SeparatorExp (..))
import qualified Pact.Types.Lang as Pact
import Pact.Types.Typecheck (UserType)
import Pact.Types.Pretty
import Pact.Analyze.Feature hiding (Doc, Type, Var, ks, obj,
str)
import Pact.Analyze.Types
-- @PreProp@ stands between @Exp@ and @Prop@.
--
-- The conversion from @Exp@ is light, handled in @expToPreProp@.
data PreProp
-- literals
= PreIntegerLit Integer
| PreStringLit Text
| PreDecimalLit Decimal
| PreTimeLit Time
| PreBoolLit Bool
| PreListLit [PreProp]
-- identifiers
| PreAbort
| PreSuccess
| PreGovPasses
| PreResult
-- In conversion from @Exp@ to @PreProp@ we maintain a distinction between
-- bound and unbound variables. Bound (@PreVar@) variables are bound inside
-- quantifiers. Unbound (@PreGlobalVar@) variables either refer to a property
-- definition or a table.
| PreVar VarId Text
| PreGlobalVar Text
-- quantifiers
| PreForall VarId Text QType PreProp
| PreExists VarId Text QType PreProp
-- applications
| PreApp Text [PreProp]
| PreAt PreProp PreProp
| PrePropRead PreProp PreProp PreProp
| PreLiteralObject (Map Text PreProp)
deriving (Eq, Show)
-- | Find the set of global variables in a @PreProp@.
prePropGlobals :: PreProp -> Set Text
prePropGlobals = \case
PreListLit props -> Set.unions $ prePropGlobals <$> props
PreGlobalVar name -> Set.singleton name
PreForall _ _ _ prop -> prePropGlobals prop
PreExists _ _ _ prop -> prePropGlobals prop
PreApp _ props -> Set.unions $ prePropGlobals <$> props
PreAt p1 p2 -> prePropGlobals p1 `Set.union` prePropGlobals p2
PrePropRead p1 p2 p3 -> Set.unions $ prePropGlobals <$> [p1, p2, p3]
PreLiteralObject obj -> Set.unions $ prePropGlobals <$> Map.elems obj
_ -> Set.empty
instance Pretty PreProp where
pretty = \case
PreIntegerLit i -> pretty i
PreStringLit t -> dquotes $ pretty t
PreDecimalLit d -> pretty d
PreTimeLit t -> pretty (Pact.LTime (toPact timeIso t))
PreBoolLit True -> "true"
PreBoolLit False -> "false"
PreListLit lst -> commaBrackets $ fmap pretty lst
PreAbort -> pretty STransactionAborts
PreSuccess -> pretty STransactionSucceeds
PreGovPasses -> pretty SGovernancePasses
PreResult -> pretty SFunctionResult
PreVar _id name -> pretty name
PreGlobalVar name -> pretty name
PreForall _vid name qty prop ->
"(" <> pretty SUniversalQuantification <> " (" <> pretty name <> ":" <>
pretty qty <> ") " <> pretty prop <> ")"
PreExists _vid name qty prop ->
"(" <> pretty SExistentialQuantification <> " (" <> pretty name <> ":" <>
pretty qty <> ") " <> pretty prop <> ")"
PreApp name applicands ->
"(" <> pretty name <> " " <> hsep (map pretty applicands) <> ")"
PreAt objIx obj ->
"(" <> pretty SObjectProjection <> " " <> pretty objIx <> " " <>
pretty obj <> ")"
PrePropRead tn rk ba ->
"(" <> pretty SPropRead <> " '" <> pretty tn <> " " <> pretty rk <> " " <>
pretty ba <> ")"
PreLiteralObject obj -> commaBraces $ Map.toList obj <&> \(k, v) ->
pretty k <> " := " <> pretty v
throwErrorT :: MonadError String m => Text -> m a
throwErrorT = throwError . T.unpack
throwErrorD :: MonadError String m => Doc -> m a
throwErrorD = throwError . renderCompactString'
-- TODO(joel): add location info
throwErrorIn :: (MonadError String m, Pretty a) => a -> Doc -> m b
throwErrorIn exp msg = throwError $ renderCompactString' $
"in " <> pretty exp <> ", " <> msg
textToQuantifier
:: Text -> Maybe (VarId -> Text -> QType -> PreProp -> PreProp)
textToQuantifier = \case
SUniversalQuantification -> Just PreForall
SExistentialQuantification -> Just PreExists
_ -> Nothing
type TableEnv = TableMap (ColumnMap EType)
data PropCheckEnv = PropCheckEnv
{ _varTys :: Map VarId QType
, _tableEnv :: TableEnv
, _quantifiedTables :: Set TableName
, _quantifiedColumns :: Set ColumnName
-- User-defined properties
, _definedProps :: HM.HashMap Text (DefinedProperty PreProp)
-- Vars bound within a user-defined property
, _localVars :: HM.HashMap Text EProp
}
newtype EitherFail e a = EitherFail { _getEither :: Either e a }
deriving (Show, Eq, Ord, Functor, Applicative, Alternative, Monad, MonadError e)
type ParseEnv = Map Text VarId
type PropParse = ReaderT ParseEnv (StateT VarId (Either String))
type PropCheck = ReaderT PropCheckEnv (EitherFail String)
type InvariantParse = ReaderT [(Pact.Arg UserType, VarId)] (Either String)
makeLenses ''PropCheckEnv
pattern ParenList :: [Exp t] -> Exp t
pattern ParenList elems <- Pact.EList (ListExp elems Parens _i)
pattern BraceList :: [Exp t] -> Exp t
pattern BraceList elems <- Pact.EList (ListExp elems Braces _i)
pattern SquareList :: [Exp t] -> Exp t
pattern SquareList elems <- Pact.EList (ListExp elems Brackets _i)
pattern EAtom' :: Text -> Exp t
pattern EAtom' name <- EAtom (AtomExp name [] False _i)
pattern ELiteral' :: Literal -> Exp t
pattern ELiteral' lit <- ELiteral (LiteralExp lit _i)
pattern EStrLiteral' :: Text -> Exp t
pattern EStrLiteral' lit <- ELiteral (LiteralExp (LString lit) _i)
pattern Colon' :: Exp t
pattern Colon' <- ESeparator (SeparatorExp Colon _i)
|
kadena-io/pact
|
src-tool/Pact/Analyze/Parse/Types.hs
|
bsd-3-clause
| 6,977
| 0
| 17
| 1,961
| 1,830
| 966
| 864
| 137
| 9
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Map.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Map
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- /Note:/ You should use "Data.Map.Strict" instead of this module if:
--
-- * You will eventually need all the values stored.
--
-- * The stored values don't represent large virtual data structures
-- to be lazily computed.
--
-- An efficient implementation of ordered maps from keys to values
-- (dictionaries).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import qualified Data.Map as Map
--
-- The implementation of 'Map' is based on /size balanced/ binary trees (or
-- trees of /bounded balance/) as described by:
--
-- * Stephen Adams, \"/Efficient sets: a balancing act/\",
-- Journal of Functional Programming 3(4):553-562, October 1993,
-- <http://www.swiss.ai.mit.edu/~adams/BB/>.
--
-- * J. Nievergelt and E.M. Reingold,
-- \"/Binary search trees of bounded balance/\",
-- SIAM journal of computing 2(1), March 1973.
--
-- Note that the implementation is /left-biased/ -- the elements of a
-- first argument are always preferred to the second, for example in
-- 'union' or 'insert'.
--
-- /Warning/: The size of the map must not exceed @maxBound::Int@. Violation of
-- this condition is not detected and if the size limit is exceeded, its
-- behaviour is undefined.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation (<http://en.wikipedia.org/wiki/Big_O_notation>).
-----------------------------------------------------------------------------
module Data.Map
( module Data.Map.Lazy
, insertWith'
, insertWithKey'
, insertLookupWithKey'
, fold
, foldWithKey
) where
import Prelude hiding (foldr)
import Data.Map.Lazy
import qualified Data.Map.Strict as Strict
-- | /Deprecated./ As of version 0.5, replaced by 'Data.Map.Strict.insertWith'.
--
-- /O(log n)/. Same as 'insertWith', but the value being inserted to the map is
-- evaluated to WHNF beforehand.
--
-- For example, to update a counter:
--
-- > insertWith' (+) k 1 m
--
insertWith' :: Ord k => (a -> a -> a) -> k -> a -> Map k a -> Map k a
insertWith' = Strict.insertWith
{-# INLINABLE insertWith' #-}
-- | /Deprecated./ As of version 0.5, replaced by
-- 'Data.Map.Strict.insertWithKey'.
--
-- /O(log n)/. Same as 'insertWithKey', but the value being inserted to the map is
-- evaluated to WHNF beforehand.
insertWithKey' :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
-- We do not reuse Data.Map.Strict.insertWithKey, because it is stricter -- it
-- forces evaluation of the given value.
insertWithKey' = Strict.insertWithKey
{-# INLINABLE insertWithKey' #-}
-- | /Deprecated./ As of version 0.5, replaced by
-- 'Data.Map.Strict.insertLookupWithKey'.
--
-- /O(log n)/. Same as 'insertLookupWithKey', but the value being inserted to
-- the map is evaluated to WHNF beforehand.
insertLookupWithKey' :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a
-> (Maybe a, Map k a)
-- We do not reuse Data.Map.Strict.insertLookupWithKey, because it is stricter -- it
-- forces evaluation of the given value.
insertLookupWithKey' = Strict.insertLookupWithKey
{-# INLINABLE insertLookupWithKey' #-}
-- | /Deprecated./ As of version 0.5, replaced by 'foldr'.
--
-- /O(n)/. Fold the values in the map using the given right-associative
-- binary operator. This function is an equivalent of 'foldr' and is present
-- for compatibility only.
fold :: (a -> b -> b) -> b -> Map k a -> b
fold = foldr
{-# INLINE fold #-}
-- | /Deprecated./ As of version 0.4, replaced by 'foldrWithKey'.
--
-- /O(n)/. Fold the keys and values in the map using the given right-associative
-- binary operator. This function is an equivalent of 'foldrWithKey' and is present
-- for compatibility only.
foldWithKey :: (k -> a -> b -> b) -> b -> Map k a -> b
foldWithKey = foldrWithKey
{-# INLINE foldWithKey #-}
|
phischu/fragnix
|
benchmarks/containers/Data.Map.hs
|
bsd-3-clause
| 4,671
| 0
| 11
| 1,188
| 408
| 261
| 147
| 27
| 1
|
{-
Copyright (C) 2009 Gwern Branwen <gwern0@gmail.com> and
John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
-- | Functions for creating Atom feeds for Gitit wikis and pages.
module Network.Gitit.Feed (FeedConfig(..), filestoreToXmlFeed) where
import Data.Time (UTCTime, formatTime, getCurrentTime, addUTCTime)
#if MIN_VERSION_time(1,5,0)
import Data.Time (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
import Data.Foldable as F (concatMap)
import Data.List (intercalate, sortBy, nub)
import Data.Maybe (fromMaybe)
import Data.Ord (comparing)
import Network.URI (isUnescapedInURI, escapeURIString)
import System.FilePath (dropExtension, takeExtension, (<.>))
import Data.FileStore.Generic (Diff(..), diff)
import Data.FileStore.Types (history, retrieve, Author(authorName), Change(..),
FileStore, Revision(..), TimeRange(..), RevisionId)
import Text.Atom.Feed (nullEntry, nullFeed, nullLink, nullPerson,
Date, Entry(..), Feed(..), Link(linkRel), Generator(..),
Person(personName), EntryContent(..), TextContent(TextString))
import Text.Atom.Feed.Export (xmlFeed)
import Text.XML.Light (ppTopElement, showContent, Content(..), Element(..), blank_element, QName(..), blank_name, CData(..), blank_cdata)
import Data.Version (showVersion)
import Paths_gitit (version)
data FeedConfig = FeedConfig {
fcTitle :: String
, fcBaseUrl :: String
, fcFeedDays :: Integer
} deriving (Read, Show)
gititGenerator :: Generator
gititGenerator = Generator {genURI = Just "http://github.com/jgm/gitit"
, genVersion = Just (showVersion version)
, genText = "gitit"}
filestoreToXmlFeed :: FeedConfig -> FileStore -> Maybe FilePath -> IO String
filestoreToXmlFeed cfg f = fmap xmlFeedToString . generateFeed cfg gititGenerator f
xmlFeedToString :: Feed -> String
xmlFeedToString = ppTopElement . xmlFeed
generateFeed :: FeedConfig -> Generator -> FileStore -> Maybe FilePath -> IO Feed
generateFeed cfg generator fs mbPath = do
now <- getCurrentTime
revs <- changeLog (fcFeedDays cfg) fs mbPath now
diffs <- mapM (getDiffs fs) revs
let home = fcBaseUrl cfg ++ "/"
-- TODO: 'nub . sort' `persons` - but no Eq or Ord instances!
persons = map authorToPerson $ nub $ sortBy (comparing authorName) $ map revAuthor revs
basefeed = generateEmptyfeed generator (fcTitle cfg) home mbPath persons (formatFeedTime now)
revisions = map (revisionToEntry home) (zip revs diffs)
return basefeed {feedEntries = revisions}
-- | Get the last N days history.
changeLog :: Integer -> FileStore -> Maybe FilePath -> UTCTime -> IO [Revision]
changeLog days a mbPath now' = do
let files = F.concatMap (\f -> [f, f <.> "page"]) mbPath
let startTime = addUTCTime (fromIntegral $ -60 * 60 * 24 * days) now'
rs <- history a files TimeRange{timeFrom = Just startTime, timeTo = Just now'}
(Just 200) -- hard limit of 200 to conserve resources
return $ sortBy (flip $ comparing revDateTime) rs
getDiffs :: FileStore -> Revision -> IO [(FilePath, [Diff [String]])]
getDiffs fs Revision{ revId = to, revDateTime = rd, revChanges = rv } = do
revPair <- history fs [] (TimeRange Nothing $ Just rd) (Just 2)
let from = if length revPair >= 2
then Just $ revId $ revPair !! 1
else Nothing
diffs <- mapM (getDiff fs from (Just to)) rv
return $ map filterPages $ zip (map getFP rv) diffs
where getFP (Added fp) = fp
getFP (Modified fp) = fp
getFP (Deleted fp) = fp
filterPages (fp, d) = case (reverse fp) of
'e':'g':'a':'p':'.':x -> (reverse x, d)
_ -> (fp, [])
getDiff :: FileStore -> Maybe RevisionId -> Maybe RevisionId -> Change -> IO [Diff [String]]
getDiff fs from _ (Deleted fp) = do
contents <- retrieve fs fp from
return [First $ lines contents]
getDiff fs from to (Modified fp) = diff fs fp from to
getDiff fs _ to (Added fp) = do
contents <- retrieve fs fp to
return [Second $ lines contents]
generateEmptyfeed :: Generator -> String ->String ->Maybe String -> [Person] -> Date -> Feed
generateEmptyfeed generator title home mbPath authors now =
baseNull {feedAuthors = authors,
feedGenerator = Just generator,
feedLinks = [ (nullLink $ home ++ "_feed/" ++ escape (fromMaybe "" mbPath))
{linkRel = Just (Left "self")}]
}
where baseNull = nullFeed home (TextString title) now
revisionToEntry :: String -> (Revision, [(FilePath, [Diff [String]])]) -> Entry
revisionToEntry home (Revision{ revId = rid, revDateTime = rdt,
revAuthor = ra, revDescription = rd,
revChanges = rv}, diffs) =
baseEntry{ entryContent = Just $ HTMLContent $ concat $ map showContent $ map diffFile diffs
, entryAuthors = [authorToPerson ra], entryLinks = [ln] }
where baseEntry = nullEntry url title (formatFeedTime rdt)
url = home ++ escape (extract $ head rv) ++ "?revision=" ++ rid
ln = (nullLink url) {linkRel = Just (Left "alternate")}
title = TextString $ (takeWhile ('\n' /=) rd) ++ " - " ++ (intercalate ", " $ map show rv)
diffFile :: (FilePath, [Diff [String]]) -> Content
diffFile (fp, d) =
enTag "div" $ header : text
where
header = enTag1 "h1" $ enText fp
text = map (enTag1 "p") $ concat $ map diffLines d
diffLines :: Diff [String] -> [Content]
diffLines (First x) = map (enTag1 "s" . enText) x
diffLines (Second x) = map (enTag1 "b" . enText) x
diffLines (Both x _) = map enText x
enTag :: String -> [Content] -> Content
enTag tag content = Elem blank_element{ elName=blank_name{qName=tag}
, elContent=content
}
enTag1 :: String -> Content -> Content
enTag1 tag content = enTag tag [content]
enText :: String -> Content
enText content = Text blank_cdata{cdData=content}
-- gitit is set up not to reveal registration emails
authorToPerson :: Author -> Person
authorToPerson ra = nullPerson {personName = authorName ra}
-- TODO: replace with Network.URI version of shortcut if it ever is added
escape :: String -> String
escape = escapeURIString isUnescapedInURI
formatFeedTime :: UTCTime -> String
formatFeedTime = formatTime defaultTimeLocale "%FT%TZ"
-- TODO: this boilerplate can be removed by changing Data.FileStore.Types to say
-- data Change = Modified {extract :: FilePath} | Deleted {extract :: FilePath} | Added
-- {extract :: FilePath}
-- so then it would be just 'escape (extract $ head rv)' without the 4 line definition
extract :: Change -> FilePath
extract x = dePage $ case x of {Modified n -> n; Deleted n -> n; Added n -> n}
where dePage f = if takeExtension f == ".page" then dropExtension f else f
-- TODO: figure out how to create diff links in a non-broken manner
{-
diff :: String -> String -> Revision -> Link
diff home path' Revision{revId = rid} =
let n = nullLink (home ++ "_diff/" ++ escape path' ++ "?to=" ++ rid) -- ++ fromrev)
in n {linkRel = Just (Left "alternate")}
-}
|
cleichner/gitit
|
src/Network/Gitit/Feed.hs
|
gpl-2.0
| 7,895
| 0
| 16
| 1,792
| 2,219
| 1,200
| 1,019
| 113
| 5
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.RegisterDomain
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Registers a new domain.
--
-- Access Control
--
-- You can use IAM policies to control this action's access to Amazon SWF
-- resources as follows:
--
-- You cannot use an IAM policy to control domain access for this action. The
-- name of the domain being registered is available as the resource of this
-- action. Use an 'Action' element to allow or deny permission to call this action.
-- You cannot use an IAM policy to constrain this action's parameters. If the
-- caller does not have sufficient permissions to invoke the action, or the
-- parameter values fall outside the specified constraints, the action fails.
-- The associated event attribute's cause parameter will be set to
-- OPERATION_NOT_PERMITTED. For details and example IAM policies, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAMto Manage Access to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_RegisterDomain.html>
module Network.AWS.SWF.RegisterDomain
(
-- * Request
RegisterDomain
-- ** Request constructor
, registerDomain
-- ** Request lenses
, rdDescription
, rdName
, rdWorkflowExecutionRetentionPeriodInDays
-- * Response
, RegisterDomainResponse
-- ** Response constructor
, registerDomainResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
data RegisterDomain = RegisterDomain
{ _rdDescription :: Maybe Text
, _rdName :: Text
, _rdWorkflowExecutionRetentionPeriodInDays :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'RegisterDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rdDescription' @::@ 'Maybe' 'Text'
--
-- * 'rdName' @::@ 'Text'
--
-- * 'rdWorkflowExecutionRetentionPeriodInDays' @::@ 'Text'
--
registerDomain :: Text -- ^ 'rdName'
-> Text -- ^ 'rdWorkflowExecutionRetentionPeriodInDays'
-> RegisterDomain
registerDomain p1 p2 = RegisterDomain
{ _rdName = p1
, _rdWorkflowExecutionRetentionPeriodInDays = p2
, _rdDescription = Nothing
}
-- | A text description of the domain.
rdDescription :: Lens' RegisterDomain (Maybe Text)
rdDescription = lens _rdDescription (\s a -> s { _rdDescription = a })
-- | Name of the domain to register. The name must be unique in the region that
-- the domain is registered in.
--
-- The specified string must not start or end with whitespace. It must not
-- contain a ':' (colon), '/' (slash), '|' (vertical bar), or any control characters
-- (\u0000-\u001f | \u007f - \u009f). Also, it must not contain the literal
-- string quotarnquot.
rdName :: Lens' RegisterDomain Text
rdName = lens _rdName (\s a -> s { _rdName = a })
-- | The duration (in days) that records and histories of workflow executions on
-- the domain should be kept by the service. After the retention period, the
-- workflow execution is not available in the results of visibility calls.
--
-- If you pass the value 'NONE' or '0' (zero), then the workflow execution history
-- will not be retained. As soon as the workflow execution completes, the
-- execution record and its history are deleted.
--
-- The maximum workflow execution retention period is 90 days. For more
-- information about Amazon SWF service limits, see: <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dg-limits.html Amazon SWF Service Limits>
-- in the /Amazon SWF Developer Guide/.
rdWorkflowExecutionRetentionPeriodInDays :: Lens' RegisterDomain Text
rdWorkflowExecutionRetentionPeriodInDays =
lens _rdWorkflowExecutionRetentionPeriodInDays
(\s a -> s { _rdWorkflowExecutionRetentionPeriodInDays = a })
data RegisterDomainResponse = RegisterDomainResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RegisterDomainResponse' constructor.
registerDomainResponse :: RegisterDomainResponse
registerDomainResponse = RegisterDomainResponse
instance ToPath RegisterDomain where
toPath = const "/"
instance ToQuery RegisterDomain where
toQuery = const mempty
instance ToHeaders RegisterDomain
instance ToJSON RegisterDomain where
toJSON RegisterDomain{..} = object
[ "name" .= _rdName
, "description" .= _rdDescription
, "workflowExecutionRetentionPeriodInDays" .= _rdWorkflowExecutionRetentionPeriodInDays
]
instance AWSRequest RegisterDomain where
type Sv RegisterDomain = SWF
type Rs RegisterDomain = RegisterDomainResponse
request = post "RegisterDomain"
response = nullResponse RegisterDomainResponse
|
kim/amazonka
|
amazonka-swf/gen/Network/AWS/SWF/RegisterDomain.hs
|
mpl-2.0
| 5,858
| 0
| 9
| 1,284
| 522
| 327
| 195
| 63
| 1
|
{-# LANGUAGE TemplateHaskell, FlexibleInstances, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.BasicTypes (testBasicTypes) where
import Test.QuickCheck hiding (Result)
import Test.QuickCheck.Function
import Control.Monad
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.BasicTypes
-- Since we actually want to test these, don't tell us not to use them :)
{-# ANN module "HLint: ignore Functor law" #-}
{-# ANN module "HLint: ignore Monad law, left identity" #-}
{-# ANN module "HLint: ignore Monad law, right identity" #-}
{-# ANN module "HLint: ignore Use >=>" #-}
{-# ANN module "HLint: ignore Use ." #-}
-- * Arbitrary instances
instance (Arbitrary a) => Arbitrary (Result a) where
arbitrary = oneof [ Bad <$> arbitrary
, Ok <$> arbitrary
]
-- * Test cases
-- | Tests the functor identity law:
--
-- > fmap id == id
prop_functor_id :: Result Int -> Property
prop_functor_id ri =
fmap id ri ==? ri
-- | Tests the functor composition law:
--
-- > fmap (f . g) == fmap f . fmap g
prop_functor_composition :: Result Int
-> Fun Int Int -> Fun Int Int -> Property
prop_functor_composition ri (Fun _ f) (Fun _ g) =
fmap (f . g) ri ==? (fmap f . fmap g) ri
-- | Tests the applicative identity law:
--
-- > pure id <*> v = v
prop_applicative_identity :: Result Int -> Property
prop_applicative_identity v =
pure id <*> v ==? v
-- | Tests the applicative composition law:
--
-- > pure (.) <*> u <*> v <*> w = u <*> (v <*> w)
prop_applicative_composition :: Result (Fun Int Int)
-> Result (Fun Int Int)
-> Result Int
-> Property
prop_applicative_composition u v w =
let u' = fmap apply u
v' = fmap apply v
in pure (.) <*> u' <*> v' <*> w ==? u' <*> (v' <*> w)
-- | Tests the applicative homomorphism law:
--
-- > pure f <*> pure x = pure (f x)
prop_applicative_homomorphism :: Fun Int Int -> Int -> Property
prop_applicative_homomorphism (Fun _ f) x =
((pure f <*> pure x)::Result Int) ==? pure (f x)
-- | Tests the applicative interchange law:
--
-- > u <*> pure y = pure ($ y) <*> u
prop_applicative_interchange :: Result (Fun Int Int)
-> Int -> Property
prop_applicative_interchange f y =
let u = fmap apply f -- need to extract the actual function from Fun
in u <*> pure y ==? pure ($ y) <*> u
-- | Tests the applicative\/functor correspondence:
--
-- > fmap f x = pure f <*> x
prop_applicative_functor :: Fun Int Int -> Result Int -> Property
prop_applicative_functor (Fun _ f) x =
fmap f x ==? pure f <*> x
-- | Tests the applicative\/monad correspondence:
--
-- > pure = return
--
-- > (<*>) = ap
prop_applicative_monad :: Int -> Result (Fun Int Int) -> Property
prop_applicative_monad v f =
let v' = pure v :: Result Int
f' = fmap apply f -- need to extract the actual function from Fun
in v' ==? return v .&&. (f' <*> v') ==? f' `ap` v'
-- | Tests the monad laws:
--
-- > return a >>= k == k a
--
-- > m >>= return == m
--
-- > m >>= (\x -> k x >>= h) == (m >>= k) >>= h
prop_monad_laws :: Int -> Result Int
-> Fun Int (Result Int)
-> Fun Int (Result Int)
-> Property
prop_monad_laws a m (Fun _ k) (Fun _ h) =
conjoin
[ counterexample "return a >>= k == k a" ((return a >>= k) ==? k a)
, counterexample "m >>= return == m" ((m >>= return) ==? m)
, counterexample "m >>= (\\x -> k x >>= h) == (m >>= k) >>= h)"
((m >>= (\x -> k x >>= h)) ==? ((m >>= k) >>= h))
]
-- | Tests the monad plus laws:
--
-- > mzero >>= f = mzero
--
-- > v >> mzero = mzero
prop_monadplus_mzero :: Result Int -> Fun Int (Result Int) -> Property
prop_monadplus_mzero v (Fun _ f) =
counterexample "mzero >>= f = mzero" ((mzero >>= f) ==? mzero) .&&.
-- FIXME: since we have "many" mzeros, we can't test for equality,
-- just that we got back a 'Bad' value; I'm not sure if this means
-- our MonadPlus instance is not sound or not...
counterexample "v >> mzero = mzero" (isBad (v >> mzero))
testSuite "BasicTypes"
[ 'prop_functor_id
, 'prop_functor_composition
, 'prop_applicative_identity
, 'prop_applicative_composition
, 'prop_applicative_homomorphism
, 'prop_applicative_interchange
, 'prop_applicative_functor
, 'prop_applicative_monad
, 'prop_monad_laws
, 'prop_monadplus_mzero
]
|
ganeti/ganeti
|
test/hs/Test/Ganeti/BasicTypes.hs
|
bsd-2-clause
| 5,815
| 0
| 15
| 1,311
| 1,017
| 549
| 468
| 76
| 1
|
module Data.HashPSQ.Tests
( tests
) where
import Prelude hiding (lookup)
import Test.Framework (Test)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck (Property, arbitrary,
forAll)
import Test.HUnit (Assertion, assert)
import Data.HashPSQ.Internal
import qualified Data.OrdPSQ as OrdPSQ
import Data.PSQ.Class.Gen
import Data.PSQ.Class.Util
--------------------------------------------------------------------------------
-- Index of tests
--------------------------------------------------------------------------------
tests :: [Test]
tests =
[ testCase "showBucket" test_showBucket
, testCase "toBucket" test_toBucket
, testProperty "unsafeLookupIncreasePriority"
prop_unsafeLookupIncreasePriority
, testProperty "unsafeInsertIncreasePriority"
prop_unsafeInsertIncreasePriority
, testProperty "unsafeInsertIncreasePriorityView"
prop_unsafeInsertIncreasePriorityView
]
--------------------------------------------------------------------------------
-- Unit tests
--------------------------------------------------------------------------------
test_showBucket :: Assertion
test_showBucket =
assert $ length (coverShowInstance bucket) > 0
where
bucket :: Bucket Int Int Char
bucket = B 1 'a' OrdPSQ.empty
test_toBucket :: Assertion
test_toBucket =
assert True
-- TODO (jaspervdj)
-- assert $ mkBucket (OrdPSQ.empty :: OrdPSQ.OrdPSQ Int Int Char)
--------------------------------------------------------------------------------
-- Properties
--------------------------------------------------------------------------------
prop_unsafeLookupIncreasePriority :: Property
prop_unsafeLookupIncreasePriority =
forAll arbitraryPSQ $ \t ->
forAll arbitrary $ \k ->
let newP = maybe 0 ((+ 1) . fst) (lookup k t)
(mbPx, t') = unsafeLookupIncreasePriority k newP t
expect = case mbPx of
Nothing -> Nothing
Just (p, x) -> Just (p + 1, x)
in valid (t' :: HashPSQ LousyHashedInt Int Char) &&
lookup k t' == expect &&
lookup k t == mbPx
prop_unsafeInsertIncreasePriority :: Property
prop_unsafeInsertIncreasePriority =
forAll arbitraryPSQ $ \t ->
forAll arbitrary $ \k ->
forAll arbitrary $ \x ->
let prio = largerThanMaxPrio t
t' = unsafeInsertIncreasePriority k prio x t
in valid (t' :: HashPSQ LousyHashedInt Int Char) &&
lookup k t' == Just (prio, x)
prop_unsafeInsertIncreasePriorityView :: Property
prop_unsafeInsertIncreasePriorityView =
forAll arbitraryPSQ $ \t ->
forAll arbitrary $ \k ->
forAll arbitrary $ \x ->
let prio = largerThanMaxPrio t
(mbPx, t') = unsafeInsertIncreasePriorityView k prio x t
in valid (t' :: HashPSQ LousyHashedInt Int Char) &&
lookup k t' == Just (prio, x) &&
lookup k t == mbPx
|
bttr/psqueues
|
tests/Data/HashPSQ/Tests.hs
|
bsd-3-clause
| 3,404
| 0
| 19
| 1,020
| 663
| 360
| 303
| 62
| 2
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
module Network.Wai.Ghcjs.Internal where
import Control.Arrow
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Char
import Data.Default ()
import Data.List
import Language.Haskell.TH
import Language.Haskell.TH.Lift
import Language.Haskell.TH.Syntax
import System.Directory
import System.Directory.Tree
import System.FilePath
import System.IO
-- | Specifies how to build the client application.
data BuildConfig = BuildConfig {
mainFile :: FilePath
-- ^ location of the main module
, customIndexFile :: Maybe FilePath
-- ^ custom @index.html@ file
, sourceDirs :: [FilePath]
-- ^ where to look for Haskell source files
, projectDir :: FilePath
-- ^ where the client application resides. Both 'mainFile' and
-- 'sourceDirs' are interpreted relative to 'projectDir'.
, projectExec :: Exec
-- ^ which ghcjs package databases to use (see 'Exec')
, buildDir :: FilePath
-- ^ where to store build results
} deriving (Eq, Show)
instance Lift BuildConfig where
lift = \ case
BuildConfig a b c d e f -> [|BuildConfig a b c d e f|]
getSourceDirs :: BuildConfig -> [FilePath]
getSourceDirs config = case sourceDirs config of
[] -> ["."]
dirs -> dirs
prepareConfig :: String -> BuildConfig -> IO BuildConfig
prepareConfig environment config = do
let dir = buildDir config </> environment
createDirectoryIfMissing True dir
absBuildDir <- canonicalizePath dir
return $ config {
buildDir = absBuildDir
}
-- | In case your client application needs dependencies that are
-- installed in a @cabal@ sandbox or through @stack@ you can specify
-- that with 'Exec'.
data Exec
= Vanilla
-- ^ no additional package databases are needed
| Cabal
-- ^ execute build commands prefixed with @cabal exec --@
| Stack
-- ^ execute build commands prefixed with @stack exec --@
deriving (Eq, Show)
instance Lift Exec where
lift = \ case
Vanilla -> [|Vanilla|]
Cabal -> [|Cabal|]
Stack -> [|Stack|]
addExec :: Exec -> String -> String
addExec exec command = case exec of
Vanilla -> command
Cabal -> "cabal exec -- " ++ command
Stack -> "stack exec -- " ++ command
findHaskellFiles :: MonadIO m => [FilePath] -> m [FilePath]
findHaskellFiles sourceDirs = liftIO $ do
r <-
sort <$>
nub <$>
map normalise <$>
concat <$>
map inner <$>
mapM (readDirectoryWith (const $ return ())) sourceDirs
return r
where
inner :: AnchoredDirTree () -> [FilePath]
inner (anchor :/ dirTree) = map (anchor </>) $ case dirTree of
File name () ->
if isHaskellFile name && not (isHidden name)
then [name]
else []
Dir name children ->
if not (isHidden name)
then concat $ map inner $ map (name :/) children
else []
Failed name err -> error $ show (name, err)
isHaskellFile = (== ".hs") . takeExtension
isHidden = \ case
"." -> False
".." -> False
'.' : _ -> True
_ -> False
inCurrentDirectory :: FilePath -> IO a -> IO a
inCurrentDirectory dir action = bracket before after (const action)
where
before = do
old <- getCurrentDirectory
setCurrentDirectory dir
return old
after old = do
setCurrentDirectory old
ifDevel :: a -> a -> CM a
ifDevel a b = do
mode <- readCompilationMode
return $ case mode of
Development -> a
Production -> b
data CompilationMode
= Production
| Development
deriving (Eq, Ord, Show)
compilationModeFile :: FilePath
compilationModeFile = "ghcjs-compilation-mode"
readCompilationMode :: CM CompilationMode
readCompilationMode = do
file <- IO $ do
createIfMissing
canonicalizePath compilationModeFile
AddDependentFile file
IO $ do
contents <- readFile file
case parse contents of
Right m -> return m
Left () -> throwIO $ ErrorCall
("invalid " ++ compilationModeFile ++ " file:\n" ++ contents)
where
parse :: String -> Either () CompilationMode
parse =
lines >>>
map (dropWhile isSpace) >>>
filter (not . ("#" `isPrefixOf`)) >>>
concatMap words >>>
(\ case
["development"] -> return Development
["production"] -> return Production
_ -> Left ())
createIfMissing :: IO ()
createIfMissing = do
exists <- doesFileExist compilationModeFile
when (not exists) $ do
writeFile compilationModeFile $ unlines $
"# This file controls the compilation mode for the client code through ghcjs." :
"" :
"# In 'production' mode the client code will be compiled while compiling" :
"# the server (through template haskell). The resulting assets (index.html" :
"# and javascript files will be embedded into the executable. This allows" :
"# to distribute the executable as a single file." :
"production" :
"" :
"# In 'development' mode the javascript files will be compiled on the fly" :
"# from the source files on http requests. Recompilation will be triggered" :
"# by changes on disks to the source files." :
"# development" :
[]
hPutStrLn stderr $
"INFO: writing default " ++ compilationModeFile
data CM a where
(:>>=) :: CM a -> (a -> CM b) -> CM b
IO :: IO a -> CM a
AddDependentFile :: FilePath -> CM ()
instance Functor CM where
fmap f = \ case
a :>>= b -> a :>>= (\ x -> f <$> b x)
IO action -> IO (fmap f action)
AddDependentFile file ->
AddDependentFile file :>>= (return . f)
instance Applicative CM where
pure = IO . pure
fA <*> xA = do
f <- fA
x <- xA
pure $ f x
instance Monad CM where
(>>=) = (:>>=)
runCM :: CM a -> Q a
runCM = \ case
a :>>= b -> runCM a >>= runCM . b
IO action -> runIO action
AddDependentFile file -> addDependentFile file
|
soenkehahn/wai-shake
|
src/Network/Wai/Ghcjs/Internal.hs
|
bsd-3-clause
| 6,040
| 0
| 24
| 1,584
| 1,557
| 804
| 753
| 168
| 8
|
{-| This @lens@ tutorial targets Haskell beginners and assumes only basic
familiarity with Haskell. By the end of this tutorial you should:
* understand what problems the @lens@ library solves,
* know when it is appropriate to use the @lens@ library,
* be proficient in the most common @lens@ idioms,
* understand the drawbacks of using lenses, and:
* know where to look if you wish to learn more advanced tricks.
If you would like to follow along with these examples, just import this
module:
> $ ghci
> >>> import Control.Lens.Tutorial
-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
module Control.Lens.Tutorial (
-- * Motivation
-- $motivation
-- * Lenses
-- $lenses
-- * Accessor notation
-- $accessors
-- * First-class
-- $firstclass
-- * Traversals
-- $traversals
-- * Types
-- $types
-- * Drawbacks
-- $drawbacks
-- * Conclusion
-- $conclusion
) where
import Control.Applicative (Applicative)
import Control.Lens hiding (element)
import Data.Foldable (Foldable)
import Data.Monoid (Monoid)
-- $motivation
--
-- The simplest problem that the @lens@ library solves is updating deeply
-- nested records. Suppose you had the following nested Haskell data types:
--
-- > data Atom = Atom { _element :: String, _point :: Point }
-- >
-- > data Point = Point { _x :: Double, _y :: Double }
--
-- If you wanted to increase the @x@ coordinate of an `Atom` by one unit, you
-- would have to write something like this in Haskell:
--
-- > shiftAtomX :: Atom -> Atom
-- > shiftAtomX (Atom e (Point x y)) = Atom e (Point (x + 1) y)
--
-- This unpacking and repacking of data types grows increasingly difficult the
-- more fields you add to each data type or the more deeply nested your data
-- structures become.
--
-- The @lens@ library solves this problem by letting you instead write:
--
-- > -- atom.hs
-- >
-- > {-# LANGUAGE TemplateHaskell #-}
-- >
-- > import Control.Lens hiding (element)
-- >
-- > data Atom = Atom { _element :: String, _point :: Point } deriving (Show)
-- >
-- > data Point = Point { _x :: Double, _y :: Double } deriving (Show)
-- >
-- > makeLenses ''Atom
-- > makeLenses ''Point
-- >
-- > shiftAtomX :: Atom -> Atom
-- > shiftAtomX = over (point . x) (+ 1)
--
-- Let's convince ourselves that this works:
--
-- >>> let atom = Atom { _element = "C", _point = Point { _x = 1.0, _y = 2.0 } }
-- >>> shiftAtomX atom
-- Atom {_element = "C", _point = Point {_x = 2.0, _y = 2.0}}
--
-- The above solution does not change no matter how many fields we add to
-- @Atom@ or @Point@.
--
-- Now suppose that we added yet another data structure:
--
-- > data Molecule = Molecule { _atoms :: [Atom] } deriving (Show)
--
-- We could shift an entire @Molecule@ by writing:
--
-- > makeLenses ''Molecule
-- >
-- > shiftMoleculeX :: Molecule -> Molecule
-- > shiftMoleculeX = over (atoms . traverse . point . x) (+ 1)
--
-- Again, this works the way we expect:
--
-- >>> let atom1 = Atom { _element = "C", _point = Point { _x = 1.0, _y = 2.0 } }
-- >>> let atom2 = Atom { _element = "O", _point = Point { _x = 3.0, _y = 4.0 } }
-- >>> let molecule = Molecule { _atoms = [atom1, atom2] }
-- >>> shiftMoleculeX molecule -- Output formatted for clarity
-- Molecule {_atoms = [Atom {_element = "C", _point = Point {_x = 2.0, _y = 2.0}},Atom {_element = "O", _point = Point {_x = 4.0, _y = 4.0}}]}
--
-- ... or formatted for clarity:
--
-- > Molecule
-- > { _atoms =
-- > [ Atom { _element = "C", _point = Point { _x = 2.0, _y = 2.0 } }
-- > , Atom { _element = ")", _point = Point { _x = 4.0, _y = 4.0 } }
-- > ]
-- > }
--
-- Many people stumble across lenses while trying to solve this common problem
-- of working with data structures with a large number of fields or deeply
-- nested values. These sorts of situations arise commonly in:
--
-- * games with complex and deeply nested state
--
-- * scientific data formats
--
-- * sensor or instrument output
--
-- * web APIs
--
-- * XML and JSON
--
-- * enterprise code where data structures can have tens, hundreds, or even
-- thousands of fields (true story!)
{- $lenses
You might have some basic questions like:
/Question:/ What is a lens?
/Answer:/ A lens is a first class getter and setter
We already saw how to use lenses to update values using `over`, but we can
also use lenses to retrieve values using `view`:
>>> let atom = Atom { _element = "C", _point = Point { _x = 1.0, _y = 2.0 } }
>>> view (point . x) atom
1.0
In other words, lenses package both \"get\" and \"set\" functionality into
a single value (the lens). You could pretend that a lens is a record
with two fields:
> data Lens a b = Lens
> { view :: a -> b
> , over :: (b -> b) -> (a -> a)
> }
That's not how lenses are actually implemented, but it's a useful
starting intuition.
/Question:/ What is the type of a lens?
/Answer:/ We used two lenses in the above @Atom@ example, with these types:
> point :: Lens' Atom Point
> x :: Lens' Point Double
The @point@ lens contains all the information we need to get or set the
@_point@ field of the @Atom@ type (which is a `Point`). Similarly, the @x@
lens contains all the information we need to get or set the @_x@ field of
the @Point@ data type (which is a `Double`).
The convention for the `Lens'` type parameters is:
> -- +-- Bigger type
> -- |
> -- v
> Lens' bigger smaller
> -- ^
> -- |
> -- +-- Smaller type within the bigger type
The actual definition of `Lens'` is:
> type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
You might wonder how you can fit both getter and setter functionality in
a single value like this. The trick is that we get to pick what `Functor`
we specialize @f@ to and depending on which `Functor` we pick we get
different features.
For example, if you pick @(f = `Identity`)@:
> type ASetter' a b = (b -> Identity b) -> (a -> Identity a)
>
> -- ... equivalent to: (b -> b) -> (a -> a)
... you can build an `over`-like function.
Similarly, if you pick @(f = `Const` b)@:
> type Getting b a b = (b -> Const b b) -> (a -> Const b b)
>
> -- ... equivalent to: (b -> b ) -> (a -> b )
>
> -- ... equivalent to: (a -> b )
... you can build a `view`-like function.
Those are not the only two `Functor`s we can pick. In fact, we can do a
lot more with lenses than just get and set values, but those are the two
most commonly used features.
/Question:/ How do I create lenses?
/Answer:/ You can either auto-generate them using Template Haskell or
create them by hand
In our @Atom@ example, we auto-generated the lenses using Template Haskell,
like this:
> makeLenses ''Atom
> makeLenses ''Point
This created four lenses of the following types:
> element :: Lens' Atom String
> point :: Lens' Atom Point
> x :: Lens' Point Double
> y :: Lens' Point Double
`makeLenses` creates one lens per field prefixed with an underscore. The
lens has the same name as the field without the underscore.
However, sometimes Template Haskell is not an option, so we can also use
the `lens` utility function to build lenses. This utility has type:
> lens :: (a -> b) -> (b -> a -> a) -> Lens' a b
The first argument is a \"getter\" (a way to extract a @\'b\'@ from an
@\'a\'@). The second argument is a \"setter\" (given a @b@, update an
@a@). The result is a `Lens'` built from the getter and setter. You would
use `lens` like this:
> point :: Lens' Atom Point
> point = lens _point (\newPoint atom -> atom { _point = newPoint })
You can even define lenses without incurring a dependency on the @lens@
library. Remember that lenses are just higher-order functions over
`Functor`s, so we could instead write:
> -- point :: Lens' Atom Point
> point :: Functor f => (Point -> f Point) -> Atom -> f Atom
> point k atom = fmap (\newPoint -> atom { _point = newPoint }) (k (_point atom))
This means that you can provide lenses for your library's types without
depending on the @lens@ library. All you need is the `fmap` function,
which is provided by the Haskell Prelude.
/Question:/ How do I combine lenses?
/Answer:/ You compose them, using function composition (Yes, really!)
You can think of the function composition operator as having this type:
> (.) :: Lens' a b -> Lens' b c -> Lens' a c
We can compose lenses using function composition because `Lens'` is a
type synonym for a higher-order function:
> type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
So under the hood we are composing two higher-order functions to get back a
new higher-order function:
> (.) :: Functor f
> => ((b -> f b) -> (a -> f a))
> -> ((c -> f c) -> (b -> f b))
> -> ((c -> f c) -> (a -> f a))
In our original @Atom@ example, we composed the @point@ and @x@ lenses to
create a new composite lens:
> point :: Lens' Atom Point
> x :: Lens' Point Double
>
> point . x :: Lens' Atom Double
This composite lens lets us get or set the @x@ coordinate of an @Atom@.
We can use `over` and `view` on the composite `Lens'` and they will behave
exactly the way we expect:
> view (point . x) :: Atom -> Double
>
> over (point . x) :: (Double -> Double) -> (Atom -> Atom)
/Question:/ How do I consume lenses?
/Answer:/ Using `view`, `set` or `over`
Here are their types:
> view :: Lens' a b -> a -> b
>
> over :: Lens' a b -> (b -> b) -> a -> a
>
> set :: Lens' a b -> b -> a -> a
> set lens b = over lens (\_ -> b)
`view` and `over` are the two fundamental functions on lenses. `set` is
just a special case of `over`.
`view` and `over` are fundamental because they distribute over lens
composition:
> view (lens1 . lens2) = (view lens2) . (view lens1)
>
> view id = id
> over (lens1 . lens2) = (over lens1) . (over lens2)
>
> over id = id
/Question:/ What else do I need to know?
/Answer:/ That's pretty much it!
For 90% of use cases, you just:
* Create lenses (using `makeLens`, `lens` or plain-old `fmap`)
* Compose them (using (`.`))
* Consume them (using `view`, `set`, and `over`)
You could actually stop reading here if you are in a hurry since this
covers the overwhelmingly common use case for the library. On the other
hand, keep reading if you would like to learn additional tricks and
features.
-}
{- $accessors
You might be used to object-oriented languages where you could retrieve a
nested field using:
> atom.point.x
You can do almost the exact same thing using the @lens@ library, except
that the first dot will have a @^@ right before the dot:
>>> let atom = Atom { _element = "C", _point = Point { _x = 1.0, _y = 2.0 } }
>>> atom^.point.x
1.0
You can better understand why this works, by adding whitespace and
explicit parentheses:
> atom ^. (point . x)
This trick uses (`^.`), which is an infix operator equivalent to `view`:
> (^.) :: a -> Lens' a b -> b
> x ^. l = view l x
... and you just keep adding dots after that for each lens you compose.
This gives the appearance of object-oriented accessors if you omit the
whitespace around the operators.
-}
{- $firstclass
Lenses are \"first class\" values, meaning that you can manipulate them
using ordinary functional programming techniques. You can take them as
inputs, return them as outputs, or stick them in data structures. Anything
goes!
For example, suppose we don't want to define separate shift functions for
@Atom@s and @Molecule@s:
> shiftAtomX :: Atom -> Atom
> shiftAtomX = over (point . x) (+ 1)
> shiftMoleculeX :: Molecule -> Molecule
> shiftMoleculeX = over (atoms . traverse . point . x) (+ 1)
We can instead unify them into a single function by parametrizing the
shift function on the lens:
> shift lens = over lens (+ 1)
This lets us write:
> shift (point . x) :: Atom -> Atom
>
> shift (atoms . traverse . point . x) :: Molecule -> Molecule
Even better, we can define synonyms for our composite lenses:
> atomX :: Lens' Atom Double
> atomX = point . x
>
> -- We'll learn what `Traversal` means shortly
> moleculeX :: Traversal' Molecule Double
> moleculeX = atoms . traverse . point . x
Now we can write code almost identical to the original code:
> shift atomX :: Atom -> Atom
>
> shift moleculeX :: Molecule -> Molecule
... but we also get several other utilities for free:
> set atomX :: Double -> Atom -> Atom
>
> set moleculeX :: Double -> Molecule -> Molecule
>
> view atomX :: Atom -> Double
>
> -- We can't use `view` for `Traversal'`s. Read on to find out why
> toListOf moleculeX :: Molecule -> [Double]
That's much more reusable, but you might wonder what this `Traversal'` and
`toListOf` business is all about.
-}
-- $traversals
-- /Question:/ What is a traversal?
--
-- /Answer:/ A first class getter and setter for an arbitrary number of values
--
-- A traversal lets you get all the values it points to as a list and it also
-- lets you update or set all the values it points to. Think of a traversal
-- as a record with two fields:
--
-- > data Traversal' a b = Traversal'
-- > { toListOf :: a -> [b]
-- > , over :: (b -> b) -> (a -> a)
-- > }
--
-- That's not how traversals are actually implemented, but it's a useful
-- starting intuition.
--
-- We can still use `over` and `set` (a special case of `over`) with a
-- traversal, but we use `toListOf` instead of `view`.
--
-- /Question:/ What is the type of a traversal?
--
-- /Answer:/ We used one traversal in the above @Molecule@ example:
--
-- > moleculeX :: Traversal' Molecule Double
--
-- This `Traversal'` lets us get or set an arbitrary number of x coordinates,
-- each of which is a `Double`. There could be less than one x coordinate
-- (i.e. 0 coordinates) or more than one x coordinate. Contrast this with a
-- `Lens'` which can only get or set exactly one value.
--
-- Like `Lens'`, `Traversal'` is a type synonym for a higher-order function:
--
-- > type Traversal' a b = forall f . Applicative f => (b -> f b) -> (a -> f a)
-- >
-- > type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
--
-- Notice that the only difference between a `Lens'` and a `Traversal'` is the
-- type class constraint. A `Lens'` has a `Functor` constraint and
-- `Traversal'` has an `Applicative` constraint. This means that any `Lens'`
-- is automatically also a valid `Traversal'` (since `Functor` is a superclass
-- of `Applicative`).
--
-- Since every `Lens'` is a `Traversal'`, all of our example lenses also
-- double as traversals:
--
-- > atoms :: Traversal' Molecule [Atom]
-- > element :: Traversal' Atom String
-- > point :: Traversal' Atom Point
-- > x :: Traversal' Point Double
-- > y :: Traversal' Point Double
--
-- We actually used yet another `Traversal'`, which was `traverse` (from
-- "Data.Traversable"):
--
-- > traverse :: Traversable t => Traversal' (t a) a
--
-- This works because the `Traversal'` type synonym expands out to:
--
-- > traverse :: (Applicative f, Traversable t) => (a -> f a) -> t a -> f (t a)
--
-- ... which is exactly the traditional type signature of `traverse`.
--
-- In our @Molecule@ example, we were using the special case where @t = []@:
--
-- > traverse :: Traversal' [a] a
--
-- In Haskell, you can derive `Functor`, `Data.Foldable.Foldable` and
-- `Traversable` for many data types using the @DeriveFoldable@ and
-- @DeriveTraversable@ extensions. This means that you can autogenerate a
-- valid `traverse` for these data types:
--
-- > {-# LANGUAGE DeriveFoldable #-}
-- > {-# LANGUAGE DeriveFunctor #-}
-- > {-# LANGUAGE DeriveTraversable #-}
-- >
-- > import Control.Lens
-- > import Data.Foldable
-- >
-- > data Pair a = Pair a a deriving (Functor, Foldable, Traversable)
--
-- We could then use `traverse` to navigate from `Pair` to its two children:
--
-- > traverse :: Traversal' (Pair a) a
-- >
-- > over traverse :: (a -> a) -> (Pair a -> Pair a)
-- >
-- > over traverse (+ 1) (Pair 3 4) = Pair 4 5
--
-- /Question:/ How do I create traversals?
--
-- /Answer:/ There are three main ways to create primitive traversals:
--
-- * `traverse` is a `Traversal'` that you get for any type that implements
-- `Traversable`
--
-- * Every `Lens'` will also type-check as a `Traversal'`
--
-- * You can use Template Haskell to generate `Traversal'`s using `makePrisms`
-- since every `Prism'` is also a `Traversal'` (not covered in this
-- tutorial)
--
-- /Question:/ How do I combine traversals?
--
-- /Answer:/ You compose them, using function composition
--
-- You can think of the function composition operator as having this type:
--
-- > (.) :: Traversal' a b -> Traversal' b c -> Traversal' a c
--
-- We can compose traversals using function composition because a
-- `Traversal'` is a type synonym for a higher-order function:
--
-- > type Traversal' a b = forall f . Applicative f => (b -> f b) -> (a -> f a)
--
-- So under the hood we are composing two functions to get back a new
-- function:
--
-- > (.) :: Applicative f
-- > => ((b -> f b) -> (a -> f a))
-- > -> ((c -> f c) -> (b -> f b))
-- > -> ((c -> f c) -> (a -> f a))
--
-- In our original @Molecule@ example, we composed four `Traversal'`s
-- together to create a new `Traversal'`:
--
-- > -- Remember that `atoms`, `point`, and `x` are also `Traversal'`s
-- > atoms :: Traversal' Molecule [Atom]
-- > traverse :: Traversal' [Atom] Atom
-- > point :: Traversal' Atom Point
-- > x :: Traversal' Point Double
-- >
-- > -- Now compose them
-- > atoms :: Traversal' Molecule [Atom]
-- > atoms . traverse :: Traversal' Molecule Atom
-- > atoms . traverse . point :: Traversal' Molecule Point
-- > atoms . traverse . point . x :: Traversal' Molecule Double
--
-- This composite traversal lets us get or set the @x@ coordinates of a
-- @Molecule@.
--
-- > over (atoms . traverse . point . x)
-- > :: (Double -> Double) -> (Molecule -> Molecule)
-- >
-- > toListOf (atoms . traverse . point . x)
-- > :: Molecule -> [Double]
--
-- /Question:/ How do I consume traversals?
--
-- /Answer:/ Using `toListOf`, `set` or `over`
--
-- Here are their types:
--
-- > toListOf :: Traversal' a b -> a -> [b]
-- >
-- > over :: Traversal' a b -> (b -> b) -> a -> a
-- >
-- > set :: Traversal' a b -> b -> a -> a
-- > set traversal b = over traversal (\_ -> b)
--
-- Note that `toListOf` distributes over traversal composition:
--
-- > toListOf (traversal1 . traversal2) = (toListOf traversal1) >=> (toListOf traversal2)
-- >
-- > toListOf id = return
--
-- If you prefer object-oriented syntax you can also use (`^..`), which is an
-- infix operator equivalent to `toListOf`:
--
-- >>> Pair 3 4 ^.. traverse
-- [3,4]
{- $types
You might wonder why you can use `over` on both a `Lens'` and a
`Traversal'` but you can only use `view` on a `Lens'`. We can see why by
studying the (simplified) type and implementation of `over`:
> over :: ((b -> Identity b) -> (a -> Identity b)) -> (b -> b) -> a -> a
> over setter f x = runIdentity (setter (\y -> Identity (f y)) x)
To follow the implementation, just step slowly through the types. Here
are the types of the arguments to `over`:
> setter :: (b -> Identity b) -> (a -> Identity b)
> f :: b -> b
> x :: a
... and here are the types of the sub-expressions on the right-hand side:
> \y -> Identity (f y) :: b -> Identity b
> setter (\y -> Identity (f y)) :: a -> Identity a
> setter (\y -> Identity (f y)) x :: Identity a
> runIdentity (setter (\y -> Identity (f y)) x) :: a
We can replace @setter@ with @point@ and replace @x@ with @atom@ to see
that this generates the correct code for updating an atom's point:
> over point f atom
>
> -- Definition of `over`
> = runIdentity (point (\y -> Identity (f y)) atom)
>
> -- Definition of `point`
> = runIdentity (fmap (\newPoint -> atom { _point = newPoint }) (Identity (f (_point atom)))
>
> -- fmap g (Identity y) = Identity (g y)
> = runIdentity (Identity (atom { _point = f (_point atom) }))
>
> -- runIdentity (Identity z) = z
> = atom { _point = f (_point atom) }
... which is exactly what we would have written by hand without lenses.
The reason `over` works for both `Lens'`es and `Traversal'`s is because
`Identity` implements both `Functor` and `Applicative`:
> instance Functor Identity where ...
> instance Applicative Identity where ...
So both the `Lens'` type and `Traversal'` type synonyms:
> type Traversal' a b = forall f . Applicative f => (b -> f b) -> (a -> f a)
>
> type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
... can both be specialized to use `Identity` in place of @f@:
> (b -> Identity b) -> (a -> Identity a)
... making them valid arguments to `over`.
Now let's study the (simplified) type and implementation of `view`:
> view :: ((b -> Const b b) -> (a -> Const b a)) -> a -> b
> view getter x = getConst (getter Const x)
Again, we can walk slowly through the types of the arguments:
> getter :: (b -> Const b b) -> (a -> Const b a)
> x :: a
... and the types of the sub-expressions on the right-hand side:
> getter Const :: a -> Const b a
> getter Const x :: Const b a
> getConst (getter Const x) :: b
Let's see how this plays out for the @point@ lens:
> view point atom
>
> -- Definition of `view`
> = getConst (point Const atom)
>
> -- Definition of `point`
> = getConst (fmap (\newPoint -> atom { _point = newPoint }) (Const (_point atom)))
>
> -- fmap g (Const y) = Const y
> = getConst (Const (_point atom))
>
> -- getConst (Const z) = z
> = _point atom
... which is exactly what we would have written by hand without lenses.
`view` accepts `Lens'`es because `Const` implements `Functor`:
> instance Functor (Const b)
... so the `Lens'` type synonym:
> type Lens' a b = forall f . Functor f => (b -> f b) -> (a -> f a)
... can be specialized to use @(`Const` b)@ in place of @f@:
> (b -> Const b b) -> (a -> Const b b)
... making it a valid argument to `view`.
Interestingly, `Const` implements also `Applicative`, but with a
constraint:
> instance Monoid b => Applicative (Const b)
This implies that we *can* use `view` on a `Traversal'`, but only if the
value that we extract is a `Monoid`. Let's try this out:
>>> let atom1 = Atom { _element = "C", _point = Point { _x = 1.0, _y = 2.0 } }
>>> let atom2 = Atom { _element = "O", _point = Point { _x = 3.0, _y = 4.0 } }
>>> let molecule = Molecule { _atoms = [atom1, atom2] }
>>> view (atoms . traverse . element) molecule
"CO"
This works because our traversal's result is a `String`:
> atoms . traverse . element :: Traversal' Molecule String
... and `String` implements the `Data.Monoid.Monoid` interface. When you
try to extract multiple strings using `view` they get flattened together
into a single `String` using `Data.Monoid.mappend`.
If you try to extract the element from an empty molecule:
>>> view (atoms . traverse . element) (Molecule { _atoms = [] })
""
You get the empty string (i.e. `Data.Monoid.mempty`).
This is why the result of a `Traversal'` needs to be a `Data.Monoid.Monoid`
when using `view`. If the `Traversal'` points to more than one value you
need some way to combine them into a single value (using
`Data.Monoid.mappend`) and if the `Traversal'` points to less than one
value you need a default value to return (using `Data.Monoid.mempty`).
If you try to `view` a `Traversal'` that doesn't point to a
`Data.Monoid.Monoid`, you will get the following type error:
> >>> view (atoms . traverse . point . x) molecule
> No instance for (Data.Monoid.Monoid Double)
> arising from a use of `traverse'
> In the first argument of `(.)', namely `traverse'
> In the second argument of `(.)', namely `traverse . point . x'
> In the first argument of `view', namely
> `(atoms . traverse . point . x)'
The compiler complains that `Double` does not implement the
`Data.Monoid.Monoid` type class, so there is no sensible way to merge all
the x coordinates that our `Traversal'` points to. For these cases you
should use `toListOf` instead.
-}
{- $drawbacks
Lenses come with trade-offs, so you should use them wisely.
For example, lenses do not produce the best error messages. Unless you
understand how `Traversal'`s work you will probably not understand the
above error message.
Also, lenses increase the learning curve for new Haskell programmers, so
you should consider avoiding them in tutorial code targeting novice
Haskell programmers.
Lenses also add a level of boilerplate to all data types to auto-generate
lenses and increase compile times. So for small projects the overhead of
adding lenses may dwarf the benefits.
@lens@ is also a library with a large dependency tree, focused on being
\"batteries included\" and covering a large cross-section of the Haskell
ecosystem. Browsing the Hackage listing you will find support modules
ranging from "System.FilePath.Lens" to "Control.Parallel.Strategies.Lens",
and many more. If you need a more light-weight alternative you can use
the @lens-simple@ or @microlens@ library, each of which provides a
restricted subset of the @lens@ library with a much smaller dependency tree.
The ideal use case for the @lens@ library is a medium-to-large project with
rich and deeply nested types. In these large projects the benefits of using
lenses outweigh the costs.
-}
{- $conclusion
This tutorial covers an extremely small subset of this library. If you
would like to learn more, you can begin by skimming the example code in the
following modules:
* "Control.Lens.Getter"
* "Control.Lens.Setter"
* "Control.Lens.Traversal"
* "Control.Lens.Tuple"
* "Control.Lens.Lens"
* "Control.Lens.Review"
* "Control.Lens.Prism"
* "Control.Lens.Iso"
The documentation for these modules includes several examples to get you
started and help you build an intuition for more advanced tricks that were
not covered in this tutorial.
You can also study several long-form examples here:
<https://github.com/ekmett/lens/tree/master/examples>
If you prefer light-weight @lens@-compatible libraries, then check out
@lens-simple@ or @micro-lens@:
* <http://hackage.haskell.org/package/microlens microlens>
* <http://hackage.haskell.org/package/lens-simple lens-simple>
If you would like a broader survey of lens features, then you can check
out these tutorials:
* <https://www.fpcomplete.com/school/to-infinity-and-beyond/pick-of-the-week/a-little-lens-starter-tutorial A little lens starter tutorial> - Introduces
Prisms, Isos and JSON functionality
* <http://www.haskellforall.com/2013/05/program-imperatively-using-haskell.html Program imperatively using Haskell lenses> - Illustrates lens support for stateful code
-}
data Atom = Atom { _element :: String, _point :: Point } deriving (Show)
data Point = Point { _x :: Double, _y :: Double } deriving (Show)
data Molecule = Molecule { _atoms :: [Atom] } deriving (Show)
data Pair a = Pair a a deriving (Functor, Foldable, Traversable)
makeLenses ''Atom
makeLenses ''Point
makeLenses ''Molecule
shiftAtomX :: Atom -> Atom
shiftAtomX = over (point . x) (+ 1)
shiftMoleculeX :: Molecule -> Molecule
shiftMoleculeX = over (atoms . traverse . point . x) (+ 1)
shift :: ASetter' a Double -> a -> a
shift lens = over lens (+ 1)
|
ocramz/Haskell-Lens-Tutorial-Library
|
src/Control/Lens/Tutorial.hs
|
bsd-3-clause
| 28,724
| 0
| 9
| 7,145
| 574
| 449
| 125
| 22
| 1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnPat]{Renaming of patterns}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
module RnPat (-- main entry points
rnPat, rnPats, rnBindPat, rnPatAndThen,
NameMaker, applyNameMaker, -- a utility for making names:
localRecNameMaker, topRecNameMaker, -- sometimes we want to make local names,
-- sometimes we want to make top (qualified) names.
isTopRecNameMaker,
rnHsRecFields, HsRecFieldContext(..),
rnHsRecUpdFields,
-- CpsRn monad
CpsRn, liftCps,
-- Literals
rnLit, rnOverLit,
-- Pattern Error messages that are also used elsewhere
checkTupSize, patSigErr
) where
-- ENH: thin imports to only what is necessary for patterns
import {-# SOURCE #-} RnExpr ( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSplicePat )
#include "HsVersions.h"
import HsSyn
import TcRnMonad
import TcHsSyn ( hsOverLitName )
import RnEnv
import RnTypes
import PrelNames
import TyCon ( tyConName )
import ConLike
import Type ( TyThing(..) )
import Name
import NameSet
import RdrName
import BasicTypes
import Util
import ListSetOps ( removeDups )
import Outputable
import SrcLoc
import Literal ( inCharRange )
import TysWiredIn ( nilDataCon )
import DataCon
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad ( when, liftM, ap )
import Data.Ratio
{-
*********************************************************
* *
The CpsRn Monad
* *
*********************************************************
Note [CpsRn monad]
~~~~~~~~~~~~~~~~~~
The CpsRn monad uses continuation-passing style to support this
style of programming:
do { ...
; ns <- bindNames rs
; ...blah... }
where rs::[RdrName], ns::[Name]
The idea is that '...blah...'
a) sees the bindings of ns
b) returns the free variables it mentions
so that bindNames can report unused ones
In particular,
mapM rnPatAndThen [p1, p2, p3]
has a *left-to-right* scoping: it makes the binders in
p1 scope over p2,p3.
-}
newtype CpsRn b = CpsRn { unCpsRn :: forall r. (b -> RnM (r, FreeVars))
-> RnM (r, FreeVars) }
-- See Note [CpsRn monad]
instance Functor CpsRn where
fmap = liftM
instance Applicative CpsRn where
pure x = CpsRn (\k -> k x)
(<*>) = ap
instance Monad CpsRn where
(CpsRn m) >>= mk = CpsRn (\k -> m (\v -> unCpsRn (mk v) k))
runCps :: CpsRn a -> RnM (a, FreeVars)
runCps (CpsRn m) = m (\r -> return (r, emptyFVs))
liftCps :: RnM a -> CpsRn a
liftCps rn_thing = CpsRn (\k -> rn_thing >>= k)
liftCpsFV :: RnM (a, FreeVars) -> CpsRn a
liftCpsFV rn_thing = CpsRn (\k -> do { (v,fvs1) <- rn_thing
; (r,fvs2) <- k v
; return (r, fvs1 `plusFV` fvs2) })
wrapSrcSpanCps :: (a -> CpsRn b) -> Located a -> CpsRn (Located b)
-- Set the location, and also wrap it around the value returned
wrapSrcSpanCps fn (L loc a)
= CpsRn (\k -> setSrcSpan loc $
unCpsRn (fn a) $ \v ->
k (L loc v))
lookupConCps :: Located RdrName -> CpsRn (Located Name)
lookupConCps con_rdr
= CpsRn (\k -> do { con_name <- lookupLocatedOccRn con_rdr
; (r, fvs) <- k con_name
; return (r, addOneFV fvs (unLoc con_name)) })
-- We add the constructor name to the free vars
-- See Note [Patterns are uses]
{-
Note [Patterns are uses]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module Foo( f, g ) where
data T = T1 | T2
f T1 = True
f T2 = False
g _ = T1
Arguably we should report T2 as unused, even though it appears in a
pattern, because it never occurs in a constructed position. See
Trac #7336.
However, implementing this in the face of pattern synonyms would be
less straightforward, since given two pattern synonyms
pattern P1 <- P2
pattern P2 <- ()
we need to observe the dependency between P1 and P2 so that type
checking can be done in the correct order (just like for value
bindings). Dependencies between bindings is analyzed in the renamer,
where we don't know yet whether P2 is a constructor or a pattern
synonym. So for now, we do report conid occurrences in patterns as
uses.
*********************************************************
* *
Name makers
* *
*********************************************************
Externally abstract type of name makers,
which is how you go from a RdrName to a Name
-}
data NameMaker
= LamMk -- Lambdas
Bool -- True <=> report unused bindings
-- (even if True, the warning only comes out
-- if -Wunused-matches is on)
| LetMk -- Let bindings, incl top level
-- Do *not* check for unused bindings
TopLevelFlag
MiniFixityEnv
topRecNameMaker :: MiniFixityEnv -> NameMaker
topRecNameMaker fix_env = LetMk TopLevel fix_env
isTopRecNameMaker :: NameMaker -> Bool
isTopRecNameMaker (LetMk TopLevel _) = True
isTopRecNameMaker _ = False
localRecNameMaker :: MiniFixityEnv -> NameMaker
localRecNameMaker fix_env = LetMk NotTopLevel fix_env
matchNameMaker :: HsMatchContext a -> NameMaker
matchNameMaker ctxt = LamMk report_unused
where
-- Do not report unused names in interactive contexts
-- i.e. when you type 'x <- e' at the GHCi prompt
report_unused = case ctxt of
StmtCtxt GhciStmtCtxt -> False
-- also, don't warn in pattern quotes, as there
-- is no RHS where the variables can be used!
ThPatQuote -> False
_ -> True
rnHsSigCps :: LHsSigWcType RdrName -> CpsRn (LHsSigWcType Name)
rnHsSigCps sig = CpsRn (rnHsSigWcTypeScoped PatCtx sig)
newPatLName :: NameMaker -> Located RdrName -> CpsRn (Located Name)
newPatLName name_maker rdr_name@(L loc _)
= do { name <- newPatName name_maker rdr_name
; return (L loc name) }
newPatName :: NameMaker -> Located RdrName -> CpsRn Name
newPatName (LamMk report_unused) rdr_name
= CpsRn (\ thing_inside ->
do { name <- newLocalBndrRn rdr_name
; (res, fvs) <- bindLocalNames [name] (thing_inside name)
; when report_unused $ warnUnusedMatches [name] fvs
; return (res, name `delFV` fvs) })
newPatName (LetMk is_top fix_env) rdr_name
= CpsRn (\ thing_inside ->
do { name <- case is_top of
NotTopLevel -> newLocalBndrRn rdr_name
TopLevel -> newTopSrcBinder rdr_name
; bindLocalNames [name] $ -- Do *not* use bindLocalNameFV here
-- See Note [View pattern usage]
addLocalFixities fix_env [name] $
thing_inside name })
-- Note: the bindLocalNames is somewhat suspicious
-- because it binds a top-level name as a local name.
-- however, this binding seems to work, and it only exists for
-- the duration of the patterns and the continuation;
-- then the top-level name is added to the global env
-- before going on to the RHSes (see RnSource.hs).
{-
Note [View pattern usage]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let (r, (r -> x)) = x in ...
Here the pattern binds 'r', and then uses it *only* in the view pattern.
We want to "see" this use, and in let-bindings we collect all uses and
report unused variables at the binding level. So we must use bindLocalNames
here, *not* bindLocalNameFV. Trac #3943.
*********************************************************
* *
External entry points
* *
*********************************************************
There are various entry points to renaming patterns, depending on
(1) whether the names created should be top-level names or local names
(2) whether the scope of the names is entirely given in a continuation
(e.g., in a case or lambda, but not in a let or at the top-level,
because of the way mutually recursive bindings are handled)
(3) whether the a type signature in the pattern can bind
lexically-scoped type variables (for unpacking existential
type vars in data constructors)
(4) whether we do duplicate and unused variable checking
(5) whether there are fixity declarations associated with the names
bound by the patterns that need to be brought into scope with them.
Rather than burdening the clients of this module with all of these choices,
we export the three points in this design space that we actually need:
-}
-- ----------- Entry point 1: rnPats -------------------
-- Binds local names; the scope of the bindings is entirely in the thing_inside
-- * allows type sigs to bind type vars
-- * local namemaker
-- * unused and duplicate checking
-- * no fixities
rnPats :: HsMatchContext Name -- for error messages
-> [LPat RdrName]
-> ([LPat Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnPats ctxt pats thing_inside
= do { envs_before <- getRdrEnvs
-- (1) rename the patterns, bringing into scope all of the term variables
-- (2) then do the thing inside.
; unCpsRn (rnLPatsAndThen (matchNameMaker ctxt) pats) $ \ pats' -> do
{ -- Check for duplicated and shadowed names
-- Must do this *after* renaming the patterns
-- See Note [Collect binders only after renaming] in HsUtils
-- Because we don't bind the vars all at once, we can't
-- check incrementally for duplicates;
-- Nor can we check incrementally for shadowing, else we'll
-- complain *twice* about duplicates e.g. f (x,x) = ...
; addErrCtxt doc_pat $
checkDupAndShadowedNames envs_before $
collectPatsBinders pats'
; thing_inside pats' } }
where
doc_pat = text "In" <+> pprMatchContext ctxt
rnPat :: HsMatchContext Name -- for error messages
-> LPat RdrName
-> (LPat Name -> RnM (a, FreeVars))
-> RnM (a, FreeVars) -- Variables bound by pattern do not
-- appear in the result FreeVars
rnPat ctxt pat thing_inside
= rnPats ctxt [pat] (\pats' -> let [pat'] = pats' in thing_inside pat')
applyNameMaker :: NameMaker -> Located RdrName -> RnM (Located Name)
applyNameMaker mk rdr = do { (n, _fvs) <- runCps (newPatLName mk rdr)
; return n }
-- ----------- Entry point 2: rnBindPat -------------------
-- Binds local names; in a recursive scope that involves other bound vars
-- e.g let { (x, Just y) = e1; ... } in ...
-- * does NOT allows type sig to bind type vars
-- * local namemaker
-- * no unused and duplicate checking
-- * fixities might be coming in
rnBindPat :: NameMaker
-> LPat RdrName
-> RnM (LPat Name, FreeVars)
-- Returned FreeVars are the free variables of the pattern,
-- of course excluding variables bound by this pattern
rnBindPat name_maker pat = runCps (rnLPatAndThen name_maker pat)
{-
*********************************************************
* *
The main event
* *
*********************************************************
-}
-- ----------- Entry point 3: rnLPatAndThen -------------------
-- General version: parametrized by how you make new names
rnLPatsAndThen :: NameMaker -> [LPat RdrName] -> CpsRn [LPat Name]
rnLPatsAndThen mk = mapM (rnLPatAndThen mk)
-- Despite the map, the monad ensures that each pattern binds
-- variables that may be mentioned in subsequent patterns in the list
--------------------
-- The workhorse
rnLPatAndThen :: NameMaker -> LPat RdrName -> CpsRn (LPat Name)
rnLPatAndThen nm lpat = wrapSrcSpanCps (rnPatAndThen nm) lpat
rnPatAndThen :: NameMaker -> Pat RdrName -> CpsRn (Pat Name)
rnPatAndThen _ (WildPat _) = return (WildPat placeHolderType)
rnPatAndThen mk (ParPat pat) = do { pat' <- rnLPatAndThen mk pat; return (ParPat pat') }
rnPatAndThen mk (LazyPat pat) = do { pat' <- rnLPatAndThen mk pat; return (LazyPat pat') }
rnPatAndThen mk (BangPat pat) = do { pat' <- rnLPatAndThen mk pat; return (BangPat pat') }
rnPatAndThen mk (VarPat (L l rdr)) = do { loc <- liftCps getSrcSpanM
; name <- newPatName mk (L loc rdr)
; return (VarPat (L l name)) }
-- we need to bind pattern variables for view pattern expressions
-- (e.g. in the pattern (x, x -> y) x needs to be bound in the rhs of the tuple)
rnPatAndThen mk (SigPatIn pat sig)
-- When renaming a pattern type signature (e.g. f (a :: T) = ...), it is
-- important to rename its type signature _before_ renaming the rest of the
-- pattern, so that type variables are first bound by the _outermost_ pattern
-- type signature they occur in. This keeps the type checker happy when
-- pattern type signatures happen to be nested (#7827)
--
-- f ((Just (x :: a) :: Maybe a)
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~^ `a' is first bound here
-- ~~~~~~~~~~~~~~~^ the same `a' then used here
= do { sig' <- rnHsSigCps sig
; pat' <- rnLPatAndThen mk pat
; return (SigPatIn pat' sig') }
rnPatAndThen mk (LitPat lit)
| HsString src s <- lit
= do { ovlStr <- liftCps (xoptM LangExt.OverloadedStrings)
; if ovlStr
then rnPatAndThen mk
(mkNPat (noLoc (mkHsIsString src s placeHolderType))
Nothing)
else normal_lit }
| otherwise = normal_lit
where
normal_lit = do { liftCps (rnLit lit); return (LitPat lit) }
rnPatAndThen _ (NPat (L l lit) mb_neg _eq _)
= do { lit' <- liftCpsFV $ rnOverLit lit
; mb_neg' <- liftCpsFV $ case mb_neg of
Nothing -> return (Nothing, emptyFVs)
Just _ -> do { (neg, fvs) <- lookupSyntaxName negateName
; return (Just neg, fvs) }
; eq' <- liftCpsFV $ lookupSyntaxName eqName
; return (NPat (L l lit') mb_neg' eq' placeHolderType) }
rnPatAndThen mk (NPlusKPat rdr (L l lit) _ _ _ _)
= do { new_name <- newPatName mk rdr
; lit' <- liftCpsFV $ rnOverLit lit
; minus <- liftCpsFV $ lookupSyntaxName minusName
; ge <- liftCpsFV $ lookupSyntaxName geName
; return (NPlusKPat (L (nameSrcSpan new_name) new_name)
(L l lit') lit' ge minus placeHolderType) }
-- The Report says that n+k patterns must be in Integral
rnPatAndThen mk (AsPat rdr pat)
= do { new_name <- newPatLName mk rdr
; pat' <- rnLPatAndThen mk pat
; return (AsPat new_name pat') }
rnPatAndThen mk p@(ViewPat expr pat _ty)
= do { liftCps $ do { vp_flag <- xoptM LangExt.ViewPatterns
; checkErr vp_flag (badViewPat p) }
-- Because of the way we're arranging the recursive calls,
-- this will be in the right context
; expr' <- liftCpsFV $ rnLExpr expr
; pat' <- rnLPatAndThen mk pat
-- Note: at this point the PreTcType in ty can only be a placeHolder
-- ; return (ViewPat expr' pat' ty) }
; return (ViewPat expr' pat' placeHolderType) }
rnPatAndThen mk (ConPatIn con stuff)
-- rnConPatAndThen takes care of reconstructing the pattern
-- The pattern for the empty list needs to be replaced by an empty explicit list pattern when overloaded lists is turned on.
= case unLoc con == nameRdrName (dataConName nilDataCon) of
True -> do { ol_flag <- liftCps $ xoptM LangExt.OverloadedLists
; if ol_flag then rnPatAndThen mk (ListPat [] placeHolderType Nothing)
else rnConPatAndThen mk con stuff}
False -> rnConPatAndThen mk con stuff
rnPatAndThen mk (ListPat pats _ _)
= do { opt_OverloadedLists <- liftCps $ xoptM LangExt.OverloadedLists
; pats' <- rnLPatsAndThen mk pats
; case opt_OverloadedLists of
True -> do { (to_list_name,_) <- liftCps $ lookupSyntaxName toListName
; return (ListPat pats' placeHolderType
(Just (placeHolderType, to_list_name)))}
False -> return (ListPat pats' placeHolderType Nothing) }
rnPatAndThen mk (PArrPat pats _)
= do { pats' <- rnLPatsAndThen mk pats
; return (PArrPat pats' placeHolderType) }
rnPatAndThen mk (TuplePat pats boxed _)
= do { liftCps $ checkTupSize (length pats)
; pats' <- rnLPatsAndThen mk pats
; return (TuplePat pats' boxed []) }
rnPatAndThen mk (SumPat pat alt arity _)
= do { pat <- rnLPatAndThen mk pat
; return (SumPat pat alt arity PlaceHolder)
}
-- If a splice has been run already, just rename the result.
rnPatAndThen mk (SplicePat (HsSpliced mfs (HsSplicedPat pat)))
= SplicePat . HsSpliced mfs . HsSplicedPat <$> rnPatAndThen mk pat
rnPatAndThen mk (SplicePat splice)
= do { eith <- liftCpsFV $ rnSplicePat splice
; case eith of -- See Note [rnSplicePat] in RnSplice
Left not_yet_renamed -> rnPatAndThen mk not_yet_renamed
Right already_renamed -> return already_renamed }
rnPatAndThen _ pat = pprPanic "rnLPatAndThen" (ppr pat)
--------------------
rnConPatAndThen :: NameMaker
-> Located RdrName -- the constructor
-> HsConPatDetails RdrName
-> CpsRn (Pat Name)
rnConPatAndThen mk con (PrefixCon pats)
= do { con' <- lookupConCps con
; pats' <- rnLPatsAndThen mk pats
; return (ConPatIn con' (PrefixCon pats')) }
rnConPatAndThen mk con (InfixCon pat1 pat2)
= do { con' <- lookupConCps con
; pat1' <- rnLPatAndThen mk pat1
; pat2' <- rnLPatAndThen mk pat2
; fixity <- liftCps $ lookupFixityRn (unLoc con')
; liftCps $ mkConOpPatRn con' fixity pat1' pat2' }
rnConPatAndThen mk con (RecCon rpats)
= do { con' <- lookupConCps con
; rpats' <- rnHsRecPatsAndThen mk con' rpats
; return (ConPatIn con' (RecCon rpats')) }
--------------------
rnHsRecPatsAndThen :: NameMaker
-> Located Name -- Constructor
-> HsRecFields RdrName (LPat RdrName)
-> CpsRn (HsRecFields Name (LPat Name))
rnHsRecPatsAndThen mk (L _ con) hs_rec_fields@(HsRecFields { rec_dotdot = dd })
= do { flds <- liftCpsFV $ rnHsRecFields (HsRecFieldPat con) mkVarPat
hs_rec_fields
; flds' <- mapM rn_field (flds `zip` [1..])
; return (HsRecFields { rec_flds = flds', rec_dotdot = dd }) }
where
mkVarPat l n = VarPat (L l n)
rn_field (L l fld, n') = do { arg' <- rnLPatAndThen (nested_mk dd mk n')
(hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' })) }
-- Suppress unused-match reporting for fields introduced by ".."
nested_mk Nothing mk _ = mk
nested_mk (Just _) mk@(LetMk {}) _ = mk
nested_mk (Just n) (LamMk report_unused) n' = LamMk (report_unused && (n' <= n))
{-
************************************************************************
* *
Record fields
* *
************************************************************************
-}
data HsRecFieldContext
= HsRecFieldCon Name
| HsRecFieldPat Name
| HsRecFieldUpd
rnHsRecFields
:: forall arg.
HsRecFieldContext
-> (SrcSpan -> RdrName -> arg)
-- When punning, use this to build a new field
-> HsRecFields RdrName (Located arg)
-> RnM ([LHsRecField Name (Located arg)], FreeVars)
-- This surprisingly complicated pass
-- a) looks up the field name (possibly using disambiguation)
-- b) fills in puns and dot-dot stuff
-- When we we've finished, we've renamed the LHS, but not the RHS,
-- of each x=e binding
--
-- This is used for record construction and pattern-matching, but not updates.
rnHsRecFields ctxt mk_arg (HsRecFields { rec_flds = flds, rec_dotdot = dotdot })
= do { pun_ok <- xoptM LangExt.RecordPuns
; disambig_ok <- xoptM LangExt.DisambiguateRecordFields
; parent <- check_disambiguation disambig_ok mb_con
; flds1 <- mapM (rn_fld pun_ok parent) flds
; mapM_ (addErr . dupFieldErr ctxt) dup_flds
; dotdot_flds <- rn_dotdot dotdot mb_con flds1
; let all_flds | null dotdot_flds = flds1
| otherwise = flds1 ++ dotdot_flds
; return (all_flds, mkFVs (getFieldIds all_flds)) }
where
mb_con = case ctxt of
HsRecFieldCon con | not (isUnboundName con) -> Just con
HsRecFieldPat con | not (isUnboundName con) -> Just con
_ {- update or isUnboundName con -} -> Nothing
-- The unbound name test is because if the constructor
-- isn't in scope the constructor lookup will add an error
-- add an error, but still return an unbound name.
-- We don't want that to screw up the dot-dot fill-in stuff.
doc = case mb_con of
Nothing -> text "constructor field name"
Just con -> text "field of constructor" <+> quotes (ppr con)
rn_fld :: Bool -> Maybe Name -> LHsRecField RdrName (Located arg)
-> RnM (LHsRecField Name (Located arg))
rn_fld pun_ok parent (L l (HsRecField { hsRecFieldLbl
= L loc (FieldOcc (L ll lbl) _)
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { sel <- setSrcSpan loc $ lookupRecFieldOcc parent doc lbl
; arg' <- if pun
then do { checkErr pun_ok (badPun (L loc lbl))
-- Discard any module qualifier (#11662)
; let arg_rdr = mkRdrUnqual (rdrNameOcc lbl)
; return (L loc (mk_arg loc arg_rdr)) }
else return arg
; return (L l (HsRecField { hsRecFieldLbl
= L loc (FieldOcc (L ll lbl) sel)
, hsRecFieldArg = arg'
, hsRecPun = pun })) }
rn_dotdot :: Maybe Int -- See Note [DotDot fields] in HsPat
-> Maybe Name -- The constructor (Nothing for an
-- out of scope constructor)
-> [LHsRecField Name (Located arg)] -- Explicit fields
-> RnM [LHsRecField Name (Located arg)] -- Filled in .. fields
rn_dotdot Nothing _mb_con _flds -- No ".." at all
= return []
rn_dotdot (Just {}) Nothing _flds -- Constructor out of scope
= return []
rn_dotdot (Just n) (Just con) flds -- ".." on record construction / pat match
= ASSERT( n == length flds )
do { loc <- getSrcSpanM -- Rather approximate
; dd_flag <- xoptM LangExt.RecordWildCards
; checkErr dd_flag (needFlagDotDot ctxt)
; (rdr_env, lcl_env) <- getRdrEnvs
; con_fields <- lookupConstructorFields con
; when (null con_fields) (addErr (badDotDotCon con))
; let present_flds = map (occNameFS . rdrNameOcc) $ getFieldLbls flds
-- For constructor uses (but not patterns)
-- the arg should be in scope locally;
-- i.e. not top level or imported
-- Eg. data R = R { x,y :: Int }
-- f x = R { .. } -- Should expand to R {x=x}, not R{x=x,y=y}
arg_in_scope lbl = mkVarUnqual lbl `elemLocalRdrEnv` lcl_env
dot_dot_gres = [ (lbl, sel, head gres)
| fl <- con_fields
, let lbl = flLabel fl
, let sel = flSelector fl
, not (lbl `elem` present_flds)
, let gres = lookupGRE_Field_Name rdr_env sel lbl
, not (null gres) -- Check selector is in scope
, case ctxt of
HsRecFieldCon {} -> arg_in_scope lbl
_other -> True ]
; addUsedGREs (map thdOf3 dot_dot_gres)
; return [ L loc (HsRecField
{ hsRecFieldLbl = L loc (FieldOcc (L loc arg_rdr) sel)
, hsRecFieldArg = L loc (mk_arg loc arg_rdr)
, hsRecPun = False })
| (lbl, sel, _) <- dot_dot_gres
, let arg_rdr = mkVarUnqual lbl ] }
check_disambiguation :: Bool -> Maybe Name -> RnM (Maybe Name)
-- When disambiguation is on, return name of parent tycon.
check_disambiguation disambig_ok mb_con
| disambig_ok, Just con <- mb_con
= do { env <- getGlobalRdrEnv; return (find_tycon env con) }
| otherwise = return Nothing
find_tycon :: GlobalRdrEnv -> Name {- DataCon -} -> Maybe Name {- TyCon -}
-- Return the parent *type constructor* of the data constructor
-- (that is, the parent of the data constructor),
-- or 'Nothing' if it is a pattern synonym or not in scope.
-- That's the parent to use for looking up record fields.
find_tycon env con_name
| Just (AConLike (RealDataCon dc)) <- wiredInNameTyThing_maybe con_name
= Just (tyConName (dataConTyCon dc))
-- Special case for [], which is built-in syntax
-- and not in the GlobalRdrEnv (Trac #8448)
| Just gre <- lookupGRE_Name env con_name
= case gre_par gre of
ParentIs p -> Just p
_ -> Nothing -- Can happen if the con_name
-- is for a pattern synonym
| otherwise = Nothing
-- Data constructor not lexically in scope at all
-- See Note [Disambiguation and Template Haskell]
dup_flds :: [[RdrName]]
-- Each list represents a RdrName that occurred more than once
-- (the list contains all occurrences)
-- Each list in dup_fields is non-empty
(_, dup_flds) = removeDups compare (getFieldLbls flds)
{- Note [Disambiguation and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #12130)
module Foo where
import M
b = $(funny)
module M(funny) where
data T = MkT { x :: Int }
funny :: Q Exp
funny = [| MkT { x = 3 } |]
When we splice, neither T nor MkT are lexically in scope, so find_tycon will
fail. But there is no need for diambiguation anyway, so we just return Nothing
-}
rnHsRecUpdFields
:: [LHsRecUpdField RdrName]
-> RnM ([LHsRecUpdField Name], FreeVars)
rnHsRecUpdFields flds
= do { pun_ok <- xoptM LangExt.RecordPuns
; overload_ok <- xoptM LangExt.DuplicateRecordFields
; (flds1, fvss) <- mapAndUnzipM (rn_fld pun_ok overload_ok) flds
; mapM_ (addErr . dupFieldErr HsRecFieldUpd) dup_flds
-- Check for an empty record update e {}
-- NB: don't complain about e { .. }, because rn_dotdot has done that already
; when (null flds) $ addErr emptyUpdateErr
; return (flds1, plusFVs fvss) }
where
doc = text "constructor field name"
rn_fld :: Bool -> Bool -> LHsRecUpdField RdrName -> RnM (LHsRecUpdField Name, FreeVars)
rn_fld pun_ok overload_ok (L l (HsRecField { hsRecFieldLbl = L loc f
, hsRecFieldArg = arg
, hsRecPun = pun }))
= do { let lbl = rdrNameAmbiguousFieldOcc f
; sel <- setSrcSpan loc $
-- Defer renaming of overloaded fields to the typechecker
-- See Note [Disambiguating record fields] in TcExpr
if overload_ok
then do { mb <- lookupGlobalOccRn_overloaded overload_ok lbl
; case mb of
Nothing -> do { addErr (unknownSubordinateErr doc lbl)
; return (Right []) }
Just r -> return r }
else fmap Left $ lookupGlobalOccRn lbl
; arg' <- if pun
then do { checkErr pun_ok (badPun (L loc lbl))
-- Discard any module qualifier (#11662)
; let arg_rdr = mkRdrUnqual (rdrNameOcc lbl)
; return (L loc (HsVar (L loc arg_rdr))) }
else return arg
; (arg'', fvs) <- rnLExpr arg'
; let fvs' = case sel of
Left sel_name -> fvs `addOneFV` sel_name
Right [FieldOcc _ sel_name] -> fvs `addOneFV` sel_name
Right _ -> fvs
lbl' = case sel of
Left sel_name ->
L loc (Unambiguous (L loc lbl) sel_name)
Right [FieldOcc lbl sel_name] ->
L loc (Unambiguous lbl sel_name)
Right _ -> L loc (Ambiguous (L loc lbl) PlaceHolder)
; return (L l (HsRecField { hsRecFieldLbl = lbl'
, hsRecFieldArg = arg''
, hsRecPun = pun }), fvs') }
dup_flds :: [[RdrName]]
-- Each list represents a RdrName that occurred more than once
-- (the list contains all occurrences)
-- Each list in dup_fields is non-empty
(_, dup_flds) = removeDups compare (getFieldUpdLbls flds)
getFieldIds :: [LHsRecField Name arg] -> [Name]
getFieldIds flds = map (unLoc . hsRecFieldSel . unLoc) flds
getFieldLbls :: [LHsRecField id arg] -> [RdrName]
getFieldLbls flds
= map (unLoc . rdrNameFieldOcc . unLoc . hsRecFieldLbl . unLoc) flds
getFieldUpdLbls :: [LHsRecUpdField id] -> [RdrName]
getFieldUpdLbls flds = map (rdrNameAmbiguousFieldOcc . unLoc . hsRecFieldLbl . unLoc) flds
needFlagDotDot :: HsRecFieldContext -> SDoc
needFlagDotDot ctxt = vcat [text "Illegal `..' in record" <+> pprRFC ctxt,
text "Use RecordWildCards to permit this"]
badDotDotCon :: Name -> SDoc
badDotDotCon con
= vcat [ text "Illegal `..' notation for constructor" <+> quotes (ppr con)
, nest 2 (text "The constructor has no labelled fields") ]
emptyUpdateErr :: SDoc
emptyUpdateErr = text "Empty record update"
badPun :: Located RdrName -> SDoc
badPun fld = vcat [text "Illegal use of punning for field" <+> quotes (ppr fld),
text "Use NamedFieldPuns to permit this"]
dupFieldErr :: HsRecFieldContext -> [RdrName] -> SDoc
dupFieldErr ctxt dups
= hsep [text "duplicate field name",
quotes (ppr (head dups)),
text "in record", pprRFC ctxt]
pprRFC :: HsRecFieldContext -> SDoc
pprRFC (HsRecFieldCon {}) = text "construction"
pprRFC (HsRecFieldPat {}) = text "pattern"
pprRFC (HsRecFieldUpd {}) = text "update"
{-
************************************************************************
* *
\subsubsection{Literals}
* *
************************************************************************
When literals occur we have to make sure
that the types and classes they involve
are made available.
-}
rnLit :: HsLit -> RnM ()
rnLit (HsChar _ c) = checkErr (inCharRange c) (bogusCharError c)
rnLit _ = return ()
-- Turn a Fractional-looking literal which happens to be an integer into an
-- Integer-looking literal.
generalizeOverLitVal :: OverLitVal -> OverLitVal
generalizeOverLitVal (HsFractional (FL {fl_text=src,fl_value=val}))
| denominator val == 1 = HsIntegral src (numerator val)
generalizeOverLitVal lit = lit
rnOverLit :: HsOverLit t -> RnM (HsOverLit Name, FreeVars)
rnOverLit origLit
= do { opt_NumDecimals <- xoptM LangExt.NumDecimals
; let { lit@(OverLit {ol_val=val})
| opt_NumDecimals = origLit {ol_val = generalizeOverLitVal (ol_val origLit)}
| otherwise = origLit
}
; let std_name = hsOverLitName val
; (SyntaxExpr { syn_expr = from_thing_name }, fvs)
<- lookupSyntaxName std_name
; let rebindable = case from_thing_name of
HsVar (L _ v) -> v /= std_name
_ -> panic "rnOverLit"
; return (lit { ol_witness = from_thing_name
, ol_rebindable = rebindable
, ol_type = placeHolderType }, fvs) }
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
patSigErr :: Outputable a => a -> SDoc
patSigErr ty
= (text "Illegal signature in pattern:" <+> ppr ty)
$$ nest 4 (text "Use ScopedTypeVariables to permit it")
bogusCharError :: Char -> SDoc
bogusCharError c
= text "character literal out of range: '\\" <> char c <> char '\''
badViewPat :: Pat RdrName -> SDoc
badViewPat pat = vcat [text "Illegal view pattern: " <+> ppr pat,
text "Use ViewPatterns to enable view patterns"]
|
sgillespie/ghc
|
compiler/rename/RnPat.hs
|
bsd-3-clause
| 34,616
| 29
| 23
| 11,248
| 6,604
| 3,565
| 3,039
| -1
| -1
|
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.IO.Encoding.Latin1 (module M) where
import "base" GHC.IO.Encoding.Latin1 as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/GHC/IO/Encoding/Latin1.hs
|
apache-2.0
| 759
| 0
| 4
| 136
| 27
| 21
| 6
| 4
| 0
|
{-# OPTIONS -XOverloadedStrings #-}
module ChannelSpec (main, spec) where
import Test.Hspec
import Network.AMQP
import Network.AMQP.Internal (channelID)
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "openChannel" $ do
context "with automatically allocated channel id" $ do
it "opens a new channel with unique id" $ do
conn <- openConnection "127.0.0.1" "/" "guest" "guest"
ch1 <- openChannel conn
ch2 <- openChannel conn
ch3 <- openChannel conn
channelID ch1 `shouldBe` 1
channelID ch2 `shouldBe` 2
channelID ch3 `shouldBe` 3
closeConnection conn
describe "closeChannel" $ do
context "with an open channel" $ do
it "closes the channel" $ do
pending
describe "qos" $ do
context "with prefetchCount = 5" $ do
it "sets prefetch count" $ do
-- we won't demonstrate how basic.qos works in concert
-- with acks here, it's more of a basic.consume functionality
-- aspect
conn <- openConnection "127.0.0.1" "/" "guest" "guest"
ch <- openChannel conn
qos ch 0 5 False
closeConnection conn
|
bitemyapp/amqp
|
test/ChannelSpec.hs
|
bsd-3-clause
| 1,341
| 0
| 27
| 502
| 295
| 134
| 161
| 31
| 1
|
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.SamplePrograms.Rewrite.TranslationTest
-- Copyright : (c) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- Translation Plugin Test
-------------------------------------------------------------------------------
module Main where
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransLetTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransFuncTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransMultiTest2
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransFuncTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransFuncArgTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransIfTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransRecurTest
import System.Hardware.Haskino.SamplePrograms.Rewrite.TransRecurLetTest
main :: IO ()
main = do
transTest
transLetTest
transFuncTest
transFuncArgTest
transMultiTest
transIfTest
transRecurTest
transRecurLetTest
|
ku-fpg/kansas-amber
|
tests/TransTests/TranslationTest.hs
|
bsd-3-clause
| 1,163
| 0
| 6
| 100
| 133
| 91
| 42
| 20
| 1
|
{-# LANGUAGE NamedFieldPuns, RecordWildCards, BangPatterns #-}
module Distribution.Server.Features.StaticFiles (
initStaticFilesFeature
) where
import Distribution.Server.Framework
import Distribution.Server.Framework.Templating
import Text.XHtml.Strict (Html, toHtml, anchor, (<<), (!), href, paragraph)
import Data.List hiding (find)
import System.FilePath
import System.Directory (getDirectoryContents)
-- | A feature to provide the top level files on the site (using templates)
-- and also serve the genuinely static files.
--
initStaticFilesFeature :: ServerEnv
-> IO (IO HackageFeature)
initStaticFilesFeature env@ServerEnv{serverTemplatesDir, serverTemplatesMode} = do
-- Page templates
templates <- loadTemplates serverTemplatesMode
[serverTemplatesDir]
["index.html", "hackageErrorPage.txt", "hackageErrorPage.html"]
staticFiles <- find (isSuffixOf ".html.st") serverTemplatesDir
return $ do
let feature = staticFilesFeature env templates staticFiles
return feature
-- Simpler version of Syhstem.FilePath.Find (which requires unix-compat)
find :: (FilePath -> Bool) -> FilePath -> IO [FilePath]
find p dirPath = do
contents <- getDirectoryContents dirPath
return (filter p contents)
staticFilesFeature :: ServerEnv -> Templates -> [FilePath] -> HackageFeature
staticFilesFeature ServerEnv{serverStaticDir, serverTemplatesMode}
templates staticFiles =
(emptyHackageFeature "static-files") {
featureResources =
[ (resourceAt "/") {
resourceGet = [("", \_ -> serveStaticIndexTemplate)]
}
-- TODO: we currently cannot use /.. here because then we cannot use it for
-- the legacy redirects feature.
-- , (resourceAt "/..") {
-- resourceGet = [("", \_ -> serveStaticTemplates)]
-- }
, (resourceAt "/static/..") {
resourceGet = [("", \_ -> serveStaticDirFiles)]
}
] ++
[ (resourceAt ("/" ++ filename)) {
resourceGet = [("", \_ -> serveStaticToplevelFile mimetype filename)]
}
| (filename, mimetype) <- toplevelFiles ]
++
[ (resourceAt ("/" ++ dropExtension name)) {
resourceGet = [("", \_ -> serveStaticTemplate name)]
}
| name <- toplevelTemplates ]
, featureState = []
, featureErrHandlers = [("txt", textErrorPage)
,("html", htmlErrorPage)]
, featureReloadFiles = reloadTemplates templates
}
where
staticResourceCacheControls =
case serverTemplatesMode of
DesignMode -> [Public, maxAgeSeconds 0, NoCache]
NormalMode -> [Public, maxAgeDays 1]
serveStaticDirFiles :: ServerPartE Response
serveStaticDirFiles = do
cacheControlWithoutETag staticResourceCacheControls
serveDirectory DisableBrowsing [] serverStaticDir
serveStaticToplevelFile :: String -> FilePath -> ServerPartE Response
serveStaticToplevelFile mimetype filename = do
cacheControlWithoutETag staticResourceCacheControls
serveFile (asContentType mimetype) (serverStaticDir </> filename)
serveStaticTemplate :: String -> ServerPartE Response
serveStaticTemplate = serveTemplate
-- serveStaticTemplates :: ServerPartE Response
-- serveStaticTemplates =
-- path $ \name -> do
-- nullDir
-- noTrailingSlash --TODO: redirect to non-slash version
-- serveTemplate (name ++ ".html")
serveStaticIndexTemplate :: ServerPartE Response
serveStaticIndexTemplate =
serveTemplate "index.html"
serveTemplate :: String -> ServerPartE Response
serveTemplate name = do
mtemplate <- tryGetTemplate templates name
case mtemplate of
Nothing -> mzero
Just template -> do
cacheControlWithoutETag staticResourceCacheControls
ok $ toResponse $ template []
textErrorPage (ErrorResponse errCode hdrs errTitle message) = do
template <- getTemplate templates "hackageErrorPage.txt"
let formattedMessage = messageToText message
response = toResponse $ template
[ "errorTitle" $= errTitle
, "errorMessage" $= formattedMessage
]
return $ response {
rsCode = errCode,
rsHeaders = addHeaders (rsHeaders response) hdrs
}
htmlErrorPage :: ErrorResponse -> ServerPartE Response
htmlErrorPage (ErrorResponse errCode hdrs errTitle message) = do
template <- getTemplate templates "hackageErrorPage.html"
let formattedMessage = paragraph << errorToHtml message
response = toResponse $ template
[ "errorTitle" $= errTitle
, "errorMessage" $= formattedMessage
]
return $ response {
rsCode = errCode,
rsHeaders = addHeaders (rsHeaders response) hdrs
}
toplevelFiles = [("favicon.ico", "image/x-icon")]
toplevelTemplates = map dropExtension staticFiles
addHeaders :: Headers -> [(String, String)] -> Headers
addHeaders hdrs hdrs' = foldl' (\h (k,v) -> addHeader k v h) hdrs (reverse hdrs')
errorToHtml :: [MessageSpan] -> [Html]
errorToHtml [] = []
errorToHtml (MText x :xs) = toHtml x: errorToHtml xs
errorToHtml (MLink x url:xs) = (anchor ! [href url] << x): errorToHtml xs
|
mpickering/hackage-server
|
Distribution/Server/Features/StaticFiles.hs
|
bsd-3-clause
| 5,375
| 0
| 16
| 1,337
| 1,196
| 642
| 554
| 96
| 3
|
module T16615
where
f :: Int -> Bool
f i = if i == 0 then True else g (pred i)
g :: Int -> Bool
g i = if i == 0 then False else f (pred i)
|
sdiehl/ghc
|
testsuite/tests/deSugar/should_compile/T16615.hs
|
bsd-3-clause
| 141
| 0
| 8
| 41
| 80
| 43
| 37
| 5
| 2
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>AJAX Spider | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İndeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Axtar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/spiderAjax/src/main/javahelp/org/zaproxy/zap/extension/spiderAjax/resources/help_az_AZ/helpset_az_AZ.hs
|
apache-2.0
| 974
| 78
| 66
| 159
| 413
| 209
| 204
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>Linux WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/webdrivers/webdriverlinux/src/main/javahelp/org/zaproxy/zap/extension/webdriverlinux/resources/help_hr_HR/helpset_hr_HR.hs
|
apache-2.0
| 961
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, ExistentialQuantification #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Exception
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (extended exceptions)
--
-- This module provides support for raising and catching both built-in
-- and user-defined exceptions.
--
-- In addition to exceptions thrown by 'IO' operations, exceptions may
-- be thrown by pure code (imprecise exceptions) or by external events
-- (asynchronous exceptions), but may only be caught in the 'IO' monad.
-- For more details, see:
--
-- * /A semantics for imprecise exceptions/, by Simon Peyton Jones,
-- Alastair Reid, Tony Hoare, Simon Marlow, Fergus Henderson,
-- in /PLDI'99/.
--
-- * /Asynchronous exceptions in Haskell/, by Simon Marlow, Simon Peyton
-- Jones, Andy Moran and John Reppy, in /PLDI'01/.
--
-- * /An Extensible Dynamically-Typed Hierarchy of Exceptions/,
-- by Simon Marlow, in /Haskell '06/.
--
-----------------------------------------------------------------------------
module Control.Exception (
-- * The Exception type
SomeException(..),
Exception(..), -- class
IOException, -- instance Eq, Ord, Show, Typeable, Exception
ArithException(..), -- instance Eq, Ord, Show, Typeable, Exception
ArrayException(..), -- instance Eq, Ord, Show, Typeable, Exception
AssertionFailed(..),
SomeAsyncException(..),
AsyncException(..), -- instance Eq, Ord, Show, Typeable, Exception
asyncExceptionToException, asyncExceptionFromException,
NonTermination(..),
NestedAtomically(..),
BlockedIndefinitelyOnMVar(..),
BlockedIndefinitelyOnSTM(..),
AllocationLimitExceeded(..),
CompactionFailed(..),
Deadlock(..),
NoMethodError(..),
PatternMatchFail(..),
RecConError(..),
RecSelError(..),
RecUpdError(..),
ErrorCall(..),
TypeError(..),
-- * Throwing exceptions
throw,
throwIO,
ioError,
throwTo,
-- * Catching Exceptions
-- $catching
-- ** Catching all exceptions
-- $catchall
-- ** The @catch@ functions
catch,
catches, Handler(..),
catchJust,
-- ** The @handle@ functions
handle,
handleJust,
-- ** The @try@ functions
try,
tryJust,
-- ** The @evaluate@ function
evaluate,
-- ** The @mapException@ function
mapException,
-- * Asynchronous Exceptions
-- $async
-- ** Asynchronous exception control
-- |The following functions allow a thread to control delivery of
-- asynchronous exceptions during a critical region.
mask,
mask_,
uninterruptibleMask,
uninterruptibleMask_,
MaskingState(..),
getMaskingState,
interruptible,
allowInterrupt,
-- *** Applying @mask@ to an exception handler
-- $block_handler
-- *** Interruptible operations
-- $interruptible
-- * Assertions
assert,
-- * Utilities
bracket,
bracket_,
bracketOnError,
finally,
onException,
) where
import Control.Exception.Base
import GHC.Base
import GHC.IO (interruptible)
-- | You need this when using 'catches'.
data Handler a = forall e . Exception e => Handler (e -> IO a)
-- | @since 4.6.0.0
instance Functor Handler where
fmap f (Handler h) = Handler (fmap f . h)
{- |
Sometimes you want to catch two different sorts of exception. You could
do something like
> f = expr `catch` \ (ex :: ArithException) -> handleArith ex
> `catch` \ (ex :: IOException) -> handleIO ex
However, there are a couple of problems with this approach. The first is
that having two exception handlers is inefficient. However, the more
serious issue is that the second exception handler will catch exceptions
in the first, e.g. in the example above, if @handleArith@ throws an
@IOException@ then the second exception handler will catch it.
Instead, we provide a function 'catches', which would be used thus:
> f = expr `catches` [Handler (\ (ex :: ArithException) -> handleArith ex),
> Handler (\ (ex :: IOException) -> handleIO ex)]
-}
catches :: IO a -> [Handler a] -> IO a
catches io handlers = io `catch` catchesHandler handlers
catchesHandler :: [Handler a] -> SomeException -> IO a
catchesHandler handlers e = foldr tryHandler (throw e) handlers
where tryHandler (Handler handler) res
= case fromException e of
Just e' -> handler e'
Nothing -> res
-- -----------------------------------------------------------------------------
-- Catching exceptions
{- $catching
There are several functions for catching and examining
exceptions; all of them may only be used from within the
'IO' monad.
Here's a rule of thumb for deciding which catch-style function to
use:
* If you want to do some cleanup in the event that an exception
is raised, use 'finally', 'bracket' or 'onException'.
* To recover after an exception and do something else, the best
choice is to use one of the 'try' family.
* ... unless you are recovering from an asynchronous exception, in which
case use 'catch' or 'catchJust'.
The difference between using 'try' and 'catch' for recovery is that in
'catch' the handler is inside an implicit 'mask' (see \"Asynchronous
Exceptions\") which is important when catching asynchronous
exceptions, but when catching other kinds of exception it is
unnecessary. Furthermore it is possible to accidentally stay inside
the implicit 'mask' by tail-calling rather than returning from the
handler, which is why we recommend using 'try' rather than 'catch' for
ordinary exception recovery.
A typical use of 'tryJust' for recovery looks like this:
> do r <- tryJust (guard . isDoesNotExistError) $ getEnv "HOME"
> case r of
> Left e -> ...
> Right home -> ...
-}
-- -----------------------------------------------------------------------------
-- Asynchronous exceptions
-- | When invoked inside 'mask', this function allows a masked
-- asynchronous exception to be raised, if one exists. It is
-- equivalent to performing an interruptible operation (see
-- #interruptible), but does not involve any actual blocking.
--
-- When called outside 'mask', or inside 'uninterruptibleMask', this
-- function has no effect.
--
-- @since 4.4.0.0
allowInterrupt :: IO ()
allowInterrupt = interruptible $ return ()
{- $async
#AsynchronousExceptions# Asynchronous exceptions are so-called because they arise due to
external influences, and can be raised at any point during execution.
'StackOverflow' and 'HeapOverflow' are two examples of
system-generated asynchronous exceptions.
The primary source of asynchronous exceptions, however, is
'throwTo':
> throwTo :: ThreadId -> Exception -> IO ()
'throwTo' (also 'Control.Concurrent.killThread') allows one
running thread to raise an arbitrary exception in another thread. The
exception is therefore asynchronous with respect to the target thread,
which could be doing anything at the time it receives the exception.
Great care should be taken with asynchronous exceptions; it is all too
easy to introduce race conditions by the over zealous use of
'throwTo'.
-}
{- $block_handler
There\'s an implied 'mask' around every exception handler in a call
to one of the 'catch' family of functions. This is because that is
what you want most of the time - it eliminates a common race condition
in starting an exception handler, because there may be no exception
handler on the stack to handle another exception if one arrives
immediately. If asynchronous exceptions are masked on entering the
handler, though, we have time to install a new exception handler
before being interrupted. If this weren\'t the default, one would have
to write something like
> mask $ \restore ->
> catch (restore (...))
> (\e -> handler)
If you need to unmask asynchronous exceptions again in the exception
handler, @restore@ can be used there too.
Note that 'try' and friends /do not/ have a similar default, because
there is no exception handler in this case. Don't use 'try' for
recovering from an asynchronous exception.
-}
{- $interruptible
#interruptible#
Some operations are /interruptible/, which means that they can receive
asynchronous exceptions even in the scope of a 'mask'. Any function
which may itself block is defined as interruptible; this includes
'Control.Concurrent.MVar.takeMVar'
(but not 'Control.Concurrent.MVar.tryTakeMVar'),
and most operations which perform
some I\/O with the outside world. The reason for having
interruptible operations is so that we can write things like
> mask $ \restore -> do
> a <- takeMVar m
> catch (restore (...))
> (\e -> ...)
if the 'Control.Concurrent.MVar.takeMVar' was not interruptible,
then this particular
combination could lead to deadlock, because the thread itself would be
blocked in a state where it can\'t receive any asynchronous exceptions.
With 'Control.Concurrent.MVar.takeMVar' interruptible, however, we can be
safe in the knowledge that the thread can receive exceptions right up
until the point when the 'Control.Concurrent.MVar.takeMVar' succeeds.
Similar arguments apply for other interruptible operations like
'System.IO.openFile'.
It is useful to think of 'mask' not as a way to completely prevent
asynchronous exceptions, but as a way to switch from asynchronous mode
to polling mode. The main difficulty with asynchronous
exceptions is that they normally can occur anywhere, but within a
'mask' an asynchronous exception is only raised by operations that are
interruptible (or call other interruptible operations). In many cases
these operations may themselves raise exceptions, such as I\/O errors,
so the caller will usually be prepared to handle exceptions arising from the
operation anyway. To perform an explicit poll for asynchronous exceptions
inside 'mask', use 'allowInterrupt'.
Sometimes it is too onerous to handle exceptions in the middle of a
critical piece of stateful code. There are three ways to handle this
kind of situation:
* Use STM. Since a transaction is always either completely executed
or not at all, transactions are a good way to maintain invariants
over state in the presence of asynchronous (and indeed synchronous)
exceptions.
* Use 'mask', and avoid interruptible operations. In order to do
this, we have to know which operations are interruptible. It is
impossible to know for any given library function whether it might
invoke an interruptible operation internally; so instead we give a
list of guaranteed-not-to-be-interruptible operations below.
* Use 'uninterruptibleMask'. This is generally not recommended,
unless you can guarantee that any interruptible operations invoked
during the scope of 'uninterruptibleMask' can only ever block for
a short time. Otherwise, 'uninterruptibleMask' is a good way to
make your program deadlock and be unresponsive to user interrupts.
The following operations are guaranteed not to be interruptible:
* operations on 'Data.IORef.IORef' from "Data.IORef"
* STM transactions that do not use 'GHC.Conc.retry'
* everything from the @Foreign@ modules
* everything from "Control.Exception" except for 'throwTo'
* 'Control.Concurrent.MVar.tryTakeMVar', 'Control.Concurrent.MVar.tryPutMVar',
'Control.Concurrent.MVar.isEmptyMVar'
* 'Control.Concurrent.MVar.takeMVar' if the 'Control.Concurrent.MVar.MVar' is
definitely full, and conversely 'Control.Concurrent.MVar.putMVar' if the
'Control.Concurrent.MVar.MVar' is definitely empty
* 'Control.Concurrent.MVar.newEmptyMVar', 'Control.Concurrent.MVar.newMVar'
* 'Control.Concurrent.forkIO', 'Control.Concurrent.myThreadId'
-}
{- $catchall
It is possible to catch all exceptions, by using the type 'SomeException':
> catch f (\e -> ... (e :: SomeException) ...)
HOWEVER, this is normally not what you want to do!
For example, suppose you want to read a file, but if it doesn't exist
then continue as if it contained \"\". You might be tempted to just
catch all exceptions and return \"\" in the handler. However, this has
all sorts of undesirable consequences. For example, if the user
presses control-C at just the right moment then the 'UserInterrupt'
exception will be caught, and the program will continue running under
the belief that the file contains \"\". Similarly, if another thread
tries to kill the thread reading the file then the 'ThreadKilled'
exception will be ignored.
Instead, you should only catch exactly the exceptions that you really
want. In this case, this would likely be more specific than even
\"any IO exception\"; a permissions error would likely also want to be
handled differently. Instead, you would probably want something like:
> e <- tryJust (guard . isDoesNotExistError) (readFile f)
> let str = either (const "") id e
There are occasions when you really do need to catch any sort of
exception. However, in most cases this is just so you can do some
cleaning up; you aren't actually interested in the exception itself.
For example, if you open a file then you want to close it again,
whether processing the file executes normally or throws an exception.
However, in these cases you can use functions like 'bracket', 'finally'
and 'onException', which never actually pass you the exception, but
just call the cleanup functions at the appropriate points.
But sometimes you really do need to catch any exception, and actually
see what the exception is. One example is at the very top-level of a
program, you may wish to catch any exception, print it to a logfile or
the screen, and then exit gracefully. For these cases, you can use
'catch' (or one of the other exception-catching functions) with the
'SomeException' type.
-}
|
sdiehl/ghc
|
libraries/base/Control/Exception.hs
|
bsd-3-clause
| 14,368
| 0
| 10
| 2,959
| 582
| 382
| 200
| 69
| 2
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcMatches: Typecheck some @Matches@
-}
{-# LANGUAGE CPP, RankNTypes #-}
module TcMatches ( tcMatchesFun, tcGRHS, tcGRHSsPat, tcMatchesCase, tcMatchLambda,
TcMatchCtxt(..), TcStmtChecker, TcExprStmtChecker, TcCmdStmtChecker,
tcStmts, tcStmtsAndThen, tcDoStmts, tcBody,
tcDoStmt, tcGuardStmt
) where
import {-# SOURCE #-} TcExpr( tcSyntaxOp, tcInferRhoNC, tcInferRho, tcCheckId,
tcMonoExpr, tcMonoExprNC, tcPolyExpr )
import HsSyn
import BasicTypes
import TcRnMonad
import TcEnv
import TcPat
import TcMType
import TcType
import TcBinds
import TcUnify
import Name
import TysWiredIn
import Id
import TyCon
import TysPrim
import TcEvidence
import Outputable
import Util
import SrcLoc
import FastString
-- Create chunkified tuple tybes for monad comprehensions
import MkCore
import Control.Monad
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{tcMatchesFun, tcMatchesCase}
* *
************************************************************************
@tcMatchesFun@ typechecks a @[Match]@ list which occurs in a
@FunMonoBind@. The second argument is the name of the function, which
is used in error messages. It checks that all the equations have the
same number of arguments before using @tcMatches@ to do the work.
Note [Polymorphic expected type for tcMatchesFun]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcMatchesFun may be given a *sigma* (polymorphic) type
so it must be prepared to use tcGen to skolemise it.
See Note [sig_tau may be polymorphic] in TcPat.
-}
tcMatchesFun :: Name -> Bool
-> MatchGroup Name (LHsExpr Name)
-> TcSigmaType -- Expected type of function
-> TcM (HsWrapper, MatchGroup TcId (LHsExpr TcId))
-- Returns type of body
tcMatchesFun fun_name inf matches exp_ty
= do { -- Check that they all have the same no of arguments
-- Location is in the monad, set the caller so that
-- any inter-equation error messages get some vaguely
-- sensible location. Note: we have to do this odd
-- ann-grabbing, because we don't always have annotations in
-- hand when we call tcMatchesFun...
traceTc "tcMatchesFun" (ppr fun_name $$ ppr exp_ty)
; checkArgs fun_name matches
; (wrap_gen, (wrap_fun, group))
<- tcGen (FunSigCtxt fun_name True) exp_ty $ \ _ exp_rho ->
-- Note [Polymorphic expected type for tcMatchesFun]
matchFunTys herald arity exp_rho $ \ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty matches
; return (wrap_gen <.> wrap_fun, group) }
where
arity = matchGroupArity matches
herald = ptext (sLit "The equation(s) for")
<+> quotes (ppr fun_name) <+> ptext (sLit "have")
match_ctxt = MC { mc_what = FunRhs fun_name inf, mc_body = tcBody }
{-
@tcMatchesCase@ doesn't do the argument-count check because the
parser guarantees that each equation has exactly one argument.
-}
tcMatchesCase :: (Outputable (body Name)) =>
TcMatchCtxt body -- Case context
-> TcRhoType -- Type of scrutinee
-> MatchGroup Name (Located (body Name)) -- The case alternatives
-> TcRhoType -- Type of whole case expressions
-> TcM (MatchGroup TcId (Located (body TcId))) -- Translated alternatives
tcMatchesCase ctxt scrut_ty matches res_ty
| isEmptyMatchGroup matches -- Allow empty case expressions
= return (MG { mg_alts = [], mg_arg_tys = [scrut_ty], mg_res_ty = res_ty, mg_origin = mg_origin matches })
| otherwise
= tcMatches ctxt [scrut_ty] res_ty matches
tcMatchLambda :: MatchGroup Name (LHsExpr Name) -> TcRhoType
-> TcM (HsWrapper, MatchGroup TcId (LHsExpr TcId))
tcMatchLambda match res_ty
= matchFunTys herald n_pats res_ty $ \ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty match
where
n_pats = matchGroupArity match
herald = sep [ ptext (sLit "The lambda expression")
<+> quotes (pprSetDepth (PartWay 1) $
pprMatches (LambdaExpr :: HsMatchContext Name) match),
-- The pprSetDepth makes the abstraction print briefly
ptext (sLit "has")]
match_ctxt = MC { mc_what = LambdaExpr,
mc_body = tcBody }
-- @tcGRHSsPat@ typechecks @[GRHSs]@ that occur in a @PatMonoBind@.
tcGRHSsPat :: GRHSs Name (LHsExpr Name) -> TcRhoType
-> TcM (GRHSs TcId (LHsExpr TcId))
-- Used for pattern bindings
tcGRHSsPat grhss res_ty = tcGRHSs match_ctxt grhss res_ty
where
match_ctxt = MC { mc_what = PatBindRhs,
mc_body = tcBody }
matchFunTys
:: SDoc -- See Note [Herald for matchExpecteFunTys] in TcUnify
-> Arity
-> TcRhoType
-> ([TcSigmaType] -> TcRhoType -> TcM a)
-> TcM (HsWrapper, a)
-- Written in CPS style for historical reasons;
-- could probably be un-CPSd, like matchExpectedTyConApp
matchFunTys herald arity res_ty thing_inside
= do { (co, pat_tys, res_ty) <- matchExpectedFunTys herald arity res_ty
; res <- thing_inside pat_tys res_ty
; return (coToHsWrapper (mkTcSymCo co), res) }
{-
************************************************************************
* *
\subsection{tcMatch}
* *
************************************************************************
-}
tcMatches :: (Outputable (body Name)) => TcMatchCtxt body
-> [TcSigmaType] -- Expected pattern types
-> TcRhoType -- Expected result-type of the Match.
-> MatchGroup Name (Located (body Name))
-> TcM (MatchGroup TcId (Located (body TcId)))
data TcMatchCtxt body -- c.f. TcStmtCtxt, also in this module
= MC { mc_what :: HsMatchContext Name, -- What kind of thing this is
mc_body :: Located (body Name) -- Type checker for a body of
-- an alternative
-> TcRhoType
-> TcM (Located (body TcId)) }
tcMatches ctxt pat_tys rhs_ty (MG { mg_alts = matches, mg_origin = origin })
= ASSERT( not (null matches) ) -- Ensure that rhs_ty is filled in
do { matches' <- mapM (tcMatch ctxt pat_tys rhs_ty) matches
; return (MG { mg_alts = matches', mg_arg_tys = pat_tys, mg_res_ty = rhs_ty, mg_origin = origin }) }
-------------
tcMatch :: (Outputable (body Name)) => TcMatchCtxt body
-> [TcSigmaType] -- Expected pattern types
-> TcRhoType -- Expected result-type of the Match.
-> LMatch Name (Located (body Name))
-> TcM (LMatch TcId (Located (body TcId)))
tcMatch ctxt pat_tys rhs_ty match
= wrapLocM (tc_match ctxt pat_tys rhs_ty) match
where
tc_match ctxt pat_tys rhs_ty match@(Match _ pats maybe_rhs_sig grhss)
= add_match_ctxt match $
do { (pats', grhss') <- tcPats (mc_what ctxt) pats pat_tys $
tc_grhss ctxt maybe_rhs_sig grhss rhs_ty
; return (Match Nothing pats' Nothing grhss') }
tc_grhss ctxt Nothing grhss rhs_ty
= tcGRHSs ctxt grhss rhs_ty -- No result signature
-- Result type sigs are no longer supported
tc_grhss _ (Just {}) _ _
= panic "tc_ghrss" -- Rejected by renamer
-- For (\x -> e), tcExpr has already said "In the expresssion \x->e"
-- so we don't want to add "In the lambda abstraction \x->e"
add_match_ctxt match thing_inside
= case mc_what ctxt of
LambdaExpr -> thing_inside
m_ctxt -> addErrCtxt (pprMatchInCtxt m_ctxt match) thing_inside
-------------
tcGRHSs :: TcMatchCtxt body -> GRHSs Name (Located (body Name)) -> TcRhoType
-> TcM (GRHSs TcId (Located (body TcId)))
-- Notice that we pass in the full res_ty, so that we get
-- good inference from simple things like
-- f = \(x::forall a.a->a) -> <stuff>
-- We used to force it to be a monotype when there was more than one guard
-- but we don't need to do that any more
tcGRHSs ctxt (GRHSs grhss binds) res_ty
= do { (binds', grhss') <- tcLocalBinds binds $
mapM (wrapLocM (tcGRHS ctxt res_ty)) grhss
; return (GRHSs grhss' binds') }
-------------
tcGRHS :: TcMatchCtxt body -> TcRhoType -> GRHS Name (Located (body Name))
-> TcM (GRHS TcId (Located (body TcId)))
tcGRHS ctxt res_ty (GRHS guards rhs)
= do { (guards', rhs') <- tcStmtsAndThen stmt_ctxt tcGuardStmt guards res_ty $
mc_body ctxt rhs
; return (GRHS guards' rhs') }
where
stmt_ctxt = PatGuard (mc_what ctxt)
{-
************************************************************************
* *
\subsection{@tcDoStmts@ typechecks a {\em list} of do statements}
* *
************************************************************************
-}
tcDoStmts :: HsStmtContext Name
-> [LStmt Name (LHsExpr Name)]
-> TcRhoType
-> TcM (HsExpr TcId) -- Returns a HsDo
tcDoStmts ListComp stmts res_ty
= do { (co, elt_ty) <- matchExpectedListTy res_ty
; let list_ty = mkListTy elt_ty
; stmts' <- tcStmts ListComp (tcLcStmt listTyCon) stmts elt_ty
; return $ mkHsWrapCo co (HsDo ListComp stmts' list_ty) }
tcDoStmts PArrComp stmts res_ty
= do { (co, elt_ty) <- matchExpectedPArrTy res_ty
; let parr_ty = mkPArrTy elt_ty
; stmts' <- tcStmts PArrComp (tcLcStmt parrTyCon) stmts elt_ty
; return $ mkHsWrapCo co (HsDo PArrComp stmts' parr_ty) }
tcDoStmts DoExpr stmts res_ty
= do { stmts' <- tcStmts DoExpr tcDoStmt stmts res_ty
; return (HsDo DoExpr stmts' res_ty) }
tcDoStmts MDoExpr stmts res_ty
= do { stmts' <- tcStmts MDoExpr tcDoStmt stmts res_ty
; return (HsDo MDoExpr stmts' res_ty) }
tcDoStmts MonadComp stmts res_ty
= do { stmts' <- tcStmts MonadComp tcMcStmt stmts res_ty
; return (HsDo MonadComp stmts' res_ty) }
tcDoStmts ctxt _ _ = pprPanic "tcDoStmts" (pprStmtContext ctxt)
tcBody :: LHsExpr Name -> TcRhoType -> TcM (LHsExpr TcId)
tcBody body res_ty
= do { traceTc "tcBody" (ppr res_ty)
; body' <- tcMonoExpr body res_ty
; return body'
}
{-
************************************************************************
* *
\subsection{tcStmts}
* *
************************************************************************
-}
type TcExprStmtChecker = TcStmtChecker HsExpr
type TcCmdStmtChecker = TcStmtChecker HsCmd
type TcStmtChecker body
= forall thing. HsStmtContext Name
-> Stmt Name (Located (body Name))
-> TcRhoType -- Result type for comprehension
-> (TcRhoType -> TcM thing) -- Checker for what follows the stmt
-> TcM (Stmt TcId (Located (body TcId)), thing)
tcStmts :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> TcRhoType
-> TcM [LStmt TcId (Located (body TcId))]
tcStmts ctxt stmt_chk stmts res_ty
= do { (stmts', _) <- tcStmtsAndThen ctxt stmt_chk stmts res_ty $
const (return ())
; return stmts' }
tcStmtsAndThen :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> TcRhoType
-> (TcRhoType -> TcM thing)
-> TcM ([LStmt TcId (Located (body TcId))], thing)
-- Note the higher-rank type. stmt_chk is applied at different
-- types in the equations for tcStmts
tcStmtsAndThen _ _ [] res_ty thing_inside
= do { thing <- thing_inside res_ty
; return ([], thing) }
-- LetStmts are handled uniformly, regardless of context
tcStmtsAndThen ctxt stmt_chk (L loc (LetStmt binds) : stmts) res_ty thing_inside
= do { (binds', (stmts',thing)) <- tcLocalBinds binds $
tcStmtsAndThen ctxt stmt_chk stmts res_ty thing_inside
; return (L loc (LetStmt binds') : stmts', thing) }
-- For the vanilla case, handle the location-setting part
tcStmtsAndThen ctxt stmt_chk (L loc stmt : stmts) res_ty thing_inside
= do { (stmt', (stmts', thing)) <-
setSrcSpan loc $
addErrCtxt (pprStmtInCtxt ctxt stmt) $
stmt_chk ctxt stmt res_ty $ \ res_ty' ->
popErrCtxt $
tcStmtsAndThen ctxt stmt_chk stmts res_ty' $
thing_inside
; return (L loc stmt' : stmts', thing) }
---------------------------------------------------
-- Pattern guards
---------------------------------------------------
tcGuardStmt :: TcExprStmtChecker
tcGuardStmt _ (BodyStmt guard _ _ _) res_ty thing_inside
= do { guard' <- tcMonoExpr guard boolTy
; thing <- thing_inside res_ty
; return (BodyStmt guard' noSyntaxExpr noSyntaxExpr boolTy, thing) }
tcGuardStmt ctxt (BindStmt pat rhs _ _) res_ty thing_inside
= do { (rhs', rhs_ty) <- tcInferRhoNC rhs -- Stmt has a context already
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat rhs_ty $
thing_inside res_ty
; return (BindStmt pat' rhs' noSyntaxExpr noSyntaxExpr, thing) }
tcGuardStmt _ stmt _ _
= pprPanic "tcGuardStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- List comprehensions and PArrays
-- (no rebindable syntax)
---------------------------------------------------
-- Dealt with separately, rather than by tcMcStmt, because
-- a) PArr isn't (yet) an instance of Monad, so the generality seems overkill
-- b) We have special desugaring rules for list comprehensions,
-- which avoid creating intermediate lists. They in turn
-- assume that the bind/return operations are the regular
-- polymorphic ones, and in particular don't have any
-- coercion matching stuff in them. It's hard to avoid the
-- potential for non-trivial coercions in tcMcStmt
tcLcStmt :: TyCon -- The list/Parray type constructor ([] or PArray)
-> TcExprStmtChecker
tcLcStmt _ _ (LastStmt body _) elt_ty thing_inside
= do { body' <- tcMonoExprNC body elt_ty
; thing <- thing_inside (panic "tcLcStmt: thing_inside")
; return (LastStmt body' noSyntaxExpr, thing) }
-- A generator, pat <- rhs
tcLcStmt m_tc ctxt (BindStmt pat rhs _ _) elt_ty thing_inside
= do { pat_ty <- newFlexiTyVarTy liftedTypeKind
; rhs' <- tcMonoExpr rhs (mkTyConApp m_tc [pat_ty])
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat pat_ty $
thing_inside elt_ty
; return (BindStmt pat' rhs' noSyntaxExpr noSyntaxExpr, thing) }
-- A boolean guard
tcLcStmt _ _ (BodyStmt rhs _ _ _) elt_ty thing_inside
= do { rhs' <- tcMonoExpr rhs boolTy
; thing <- thing_inside elt_ty
; return (BodyStmt rhs' noSyntaxExpr noSyntaxExpr boolTy, thing) }
-- ParStmt: See notes with tcMcStmt
tcLcStmt m_tc ctxt (ParStmt bndr_stmts_s _ _) elt_ty thing_inside
= do { (pairs', thing) <- loop bndr_stmts_s
; return (ParStmt pairs' noSyntaxExpr noSyntaxExpr, thing) }
where
-- loop :: [([LStmt Name], [Name])] -> TcM ([([LStmt TcId], [TcId])], thing)
loop [] = do { thing <- thing_inside elt_ty
; return ([], thing) } -- matching in the branches
loop (ParStmtBlock stmts names _ : pairs)
= do { (stmts', (ids, pairs', thing))
<- tcStmtsAndThen ctxt (tcLcStmt m_tc) stmts elt_ty $ \ _elt_ty' ->
do { ids <- tcLookupLocalIds names
; (pairs', thing) <- loop pairs
; return (ids, pairs', thing) }
; return ( ParStmtBlock stmts' ids noSyntaxExpr : pairs', thing ) }
tcLcStmt m_tc ctxt (TransStmt { trS_form = form, trS_stmts = stmts
, trS_bndrs = bindersMap
, trS_by = by, trS_using = using }) elt_ty thing_inside
= do { let (bndr_names, n_bndr_names) = unzip bindersMap
unused_ty = pprPanic "tcLcStmt: inner ty" (ppr bindersMap)
-- The inner 'stmts' lack a LastStmt, so the element type
-- passed in to tcStmtsAndThen is never looked at
; (stmts', (bndr_ids, by'))
<- tcStmtsAndThen (TransStmtCtxt ctxt) (tcLcStmt m_tc) stmts unused_ty $ \_ -> do
{ by' <- case by of
Nothing -> return Nothing
Just e -> do { e_ty <- tcInferRho e; return (Just e_ty) }
; bndr_ids <- tcLookupLocalIds bndr_names
; return (bndr_ids, by') }
; let m_app ty = mkTyConApp m_tc [ty]
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m (a,b,c) -> m (a,b,c)m (ThenForm)
-- :: ((a,b,c)->t) -> m (a,b,c) -> m (m (a,b,c))) (GroupForm)
-- n_app :: Type -> Type -- Wraps a 'ty' into '[ty]' for GroupForm
; let n_app = case form of
ThenForm -> (\ty -> ty)
_ -> m_app
by_arrow :: Type -> Type -- Wraps 'ty' to '(a->t) -> ty' if the By is present
by_arrow = case by' of
Nothing -> \ty -> ty
Just (_,e_ty) -> \ty -> (alphaTy `mkFunTy` e_ty) `mkFunTy` ty
tup_ty = mkBigCoreVarTupTy bndr_ids
poly_arg_ty = m_app alphaTy
poly_res_ty = m_app (n_app alphaTy)
using_poly_ty = mkForAllTy alphaTyVar $ by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalId n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids (thing_inside elt_ty)
; return (emptyTransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = fmap fst by', trS_using = final_using
, trS_form = form }, thing) }
tcLcStmt _ _ stmt _ _
= pprPanic "tcLcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Monad comprehensions
-- (supports rebindable syntax)
---------------------------------------------------
tcMcStmt :: TcExprStmtChecker
tcMcStmt _ (LastStmt body return_op) res_ty thing_inside
= do { a_ty <- newFlexiTyVarTy liftedTypeKind
; return_op' <- tcSyntaxOp MCompOrigin return_op
(a_ty `mkFunTy` res_ty)
; body' <- tcMonoExprNC body a_ty
; thing <- thing_inside (panic "tcMcStmt: thing_inside")
; return (LastStmt body' return_op', thing) }
-- Generators for monad comprehensions ( pat <- rhs )
--
-- [ body | q <- gen ] -> gen :: m a
-- q :: a
--
tcMcStmt ctxt (BindStmt pat rhs bind_op fail_op) res_ty thing_inside
= do { rhs_ty <- newFlexiTyVarTy liftedTypeKind
; pat_ty <- newFlexiTyVarTy liftedTypeKind
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
; bind_op' <- tcSyntaxOp MCompOrigin bind_op
(mkFunTys [rhs_ty, mkFunTy pat_ty new_res_ty] res_ty)
-- If (but only if) the pattern can fail, typecheck the 'fail' operator
; fail_op' <- if isIrrefutableHsPat pat
then return noSyntaxExpr
else tcSyntaxOp MCompOrigin fail_op (mkFunTy stringTy new_res_ty)
; rhs' <- tcMonoExprNC rhs rhs_ty
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat pat_ty $
thing_inside new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op', thing) }
-- Boolean expressions.
--
-- [ body | stmts, expr ] -> expr :: m Bool
--
tcMcStmt _ (BodyStmt rhs then_op guard_op _) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- guard_op :: test_ty -> rhs_ty
-- then_op :: rhs_ty -> new_res_ty -> res_ty
-- Where test_ty is, for example, Bool
test_ty <- newFlexiTyVarTy liftedTypeKind
; rhs_ty <- newFlexiTyVarTy liftedTypeKind
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; rhs' <- tcMonoExpr rhs test_ty
; guard_op' <- tcSyntaxOp MCompOrigin guard_op
(mkFunTy test_ty rhs_ty)
; then_op' <- tcSyntaxOp MCompOrigin then_op
(mkFunTys [rhs_ty, new_res_ty] res_ty)
; thing <- thing_inside new_res_ty
; return (BodyStmt rhs' then_op' guard_op' rhs_ty, thing) }
-- Grouping statements
--
-- [ body | stmts, then group by e using f ]
-- -> e :: t
-- f :: forall a. (a -> t) -> m a -> m (m a)
-- [ body | stmts, then group using f ]
-- -> f :: forall a. m a -> m (m a)
-- We type [ body | (stmts, group by e using f), ... ]
-- f <optional by> [ (a,b,c) | stmts ] >>= \(a,b,c) -> ...body....
--
-- We type the functions as follows:
-- f <optional by> :: m1 (a,b,c) -> m2 (a,b,c) (ThenForm)
-- :: m1 (a,b,c) -> m2 (n (a,b,c)) (GroupForm)
-- (>>=) :: m2 (a,b,c) -> ((a,b,c) -> res) -> res (ThenForm)
-- :: m2 (n (a,b,c)) -> (n (a,b,c) -> res) -> res (GroupForm)
--
tcMcStmt ctxt (TransStmt { trS_stmts = stmts, trS_bndrs = bindersMap
, trS_by = by, trS_using = using, trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_fmap = fmap_op }) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkArrowKind` liftedTypeKind
; m1_ty <- newFlexiTyVarTy star_star_kind
; m2_ty <- newFlexiTyVarTy star_star_kind
; tup_ty <- newFlexiTyVarTy liftedTypeKind
; by_e_ty <- newFlexiTyVarTy liftedTypeKind -- The type of the 'by' expression (if any)
-- n_app :: Type -> Type -- Wraps a 'ty' into '(n ty)' for GroupForm
; n_app <- case form of
ThenForm -> return (\ty -> ty)
_ -> do { n_ty <- newFlexiTyVarTy star_star_kind
; return (n_ty `mkAppTy`) }
; let by_arrow :: Type -> Type
-- (by_arrow res) produces ((alpha->e_ty) -> res) ('by' present)
-- or res ('by' absent)
by_arrow = case by of
Nothing -> \res -> res
Just {} -> \res -> (alphaTy `mkFunTy` by_e_ty) `mkFunTy` res
poly_arg_ty = m1_ty `mkAppTy` alphaTy
using_arg_ty = m1_ty `mkAppTy` tup_ty
poly_res_ty = m2_ty `mkAppTy` n_app alphaTy
using_res_ty = m2_ty `mkAppTy` n_app tup_ty
using_poly_ty = mkForAllTy alphaTyVar $ by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let (bndr_names, n_bndr_names) = unzip bindersMap
; (stmts', (bndr_ids, by', return_op')) <-
tcStmtsAndThen (TransStmtCtxt ctxt) tcMcStmt stmts using_arg_ty $ \res_ty' -> do
{ by' <- case by of
Nothing -> return Nothing
Just e -> do { e' <- tcMonoExpr e by_e_ty; return (Just e') }
-- Find the Ids (and hence types) of all old binders
; bndr_ids <- tcLookupLocalIds bndr_names
-- 'return' is only used for the binders, so we know its type.
-- return :: (a,b,c,..) -> m (a,b,c,..)
; return_op' <- tcSyntaxOp MCompOrigin return_op $
(mkBigCoreVarTupTy bndr_ids) `mkFunTy` res_ty'
; return (bndr_ids, by', return_op') }
--------------- Typecheck the 'bind' function -------------
-- (>>=) :: m2 (n (a,b,c)) -> ( n (a,b,c) -> new_res_ty ) -> res_ty
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; bind_op' <- tcSyntaxOp MCompOrigin bind_op $
using_res_ty `mkFunTy` (n_app tup_ty `mkFunTy` new_res_ty)
`mkFunTy` res_ty
--------------- Typecheck the 'fmap' function -------------
; fmap_op' <- case form of
ThenForm -> return noSyntaxExpr
_ -> fmap unLoc . tcPolyExpr (noLoc fmap_op) $
mkForAllTy alphaTyVar $ mkForAllTy betaTyVar $
(alphaTy `mkFunTy` betaTy)
`mkFunTy` (n_app alphaTy)
`mkFunTy` (n_app betaTy)
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m1 (a,b,c) -> m2 (n (a,b,c))
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
--------------- Bulding the bindersMap ----------------
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalId n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids (thing_inside new_res_ty)
; return (TransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = by', trS_using = final_using
, trS_ret = return_op', trS_bind = bind_op'
, trS_fmap = fmap_op', trS_form = form }, thing) }
-- A parallel set of comprehensions
-- [ (g x, h x) | ... ; let g v = ...
-- | ... ; let h v = ... ]
--
-- It's possible that g,h are overloaded, so we need to feed the LIE from the
-- (g x, h x) up through both lots of bindings (so we get the bindLocalMethods).
-- Similarly if we had an existential pattern match:
--
-- data T = forall a. Show a => C a
--
-- [ (show x, show y) | ... ; C x <- ...
-- | ... ; C y <- ... ]
--
-- Then we need the LIE from (show x, show y) to be simplified against
-- the bindings for x and y.
--
-- It's difficult to do this in parallel, so we rely on the renamer to
-- ensure that g,h and x,y don't duplicate, and simply grow the environment.
-- So the binders of the first parallel group will be in scope in the second
-- group. But that's fine; there's no shadowing to worry about.
--
-- Note: The `mzip` function will get typechecked via:
--
-- ParStmt [st1::t1, st2::t2, st3::t3]
--
-- mzip :: m st1
-- -> (m st2 -> m st3 -> m (st2, st3)) -- recursive call
-- -> m (st1, (st2, st3))
--
tcMcStmt ctxt (ParStmt bndr_stmts_s mzip_op bind_op) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkArrowKind` liftedTypeKind
; m_ty <- newFlexiTyVarTy star_star_kind
; let mzip_ty = mkForAllTys [alphaTyVar, betaTyVar] $
(m_ty `mkAppTy` alphaTy)
`mkFunTy`
(m_ty `mkAppTy` betaTy)
`mkFunTy`
(m_ty `mkAppTy` mkBoxedTupleTy [alphaTy, betaTy])
; mzip_op' <- unLoc `fmap` tcPolyExpr (noLoc mzip_op) mzip_ty
; (blocks', thing) <- loop m_ty bndr_stmts_s
-- Typecheck bind:
; let tys = [ mkBigCoreVarTupTy bs | ParStmtBlock _ bs _ <- blocks']
tuple_ty = mk_tuple_ty tys
; bind_op' <- tcSyntaxOp MCompOrigin bind_op $
(m_ty `mkAppTy` tuple_ty)
`mkFunTy` (tuple_ty `mkFunTy` res_ty)
`mkFunTy` res_ty
; return (ParStmt blocks' mzip_op' bind_op', thing) }
where
mk_tuple_ty tys = foldr1 (\tn tm -> mkBoxedTupleTy [tn, tm]) tys
-- loop :: Type -- m_ty
-- -> [([LStmt Name], [Name])]
-- -> TcM ([([LStmt TcId], [TcId])], thing)
loop _ [] = do { thing <- thing_inside res_ty
; return ([], thing) } -- matching in the branches
loop m_ty (ParStmtBlock stmts names return_op : pairs)
= do { -- type dummy since we don't know all binder types yet
id_tys <- mapM (const (newFlexiTyVarTy liftedTypeKind)) names
; let m_tup_ty = m_ty `mkAppTy` mkBigCoreTupTy id_tys
; (stmts', (ids, return_op', pairs', thing))
<- tcStmtsAndThen ctxt tcMcStmt stmts m_tup_ty $ \m_tup_ty' ->
do { ids <- tcLookupLocalIds names
; let tup_ty = mkBigCoreVarTupTy ids
; return_op' <- tcSyntaxOp MCompOrigin return_op
(tup_ty `mkFunTy` m_tup_ty')
; (pairs', thing) <- loop m_ty pairs
; return (ids, return_op', pairs', thing) }
; return (ParStmtBlock stmts' ids return_op' : pairs', thing) }
tcMcStmt _ stmt _ _
= pprPanic "tcMcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Do-notation
-- (supports rebindable syntax)
---------------------------------------------------
tcDoStmt :: TcExprStmtChecker
tcDoStmt _ (LastStmt body _) res_ty thing_inside
= do { body' <- tcMonoExprNC body res_ty
; thing <- thing_inside (panic "tcDoStmt: thing_inside")
; return (LastStmt body' noSyntaxExpr, thing) }
tcDoStmt ctxt (BindStmt pat rhs bind_op fail_op) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
-- This level of generality is needed for using do-notation
-- in full generality; see Trac #1537
-- I'd like to put this *after* the tcSyntaxOp
-- (see Note [Treat rebindable syntax first], but that breaks
-- the rigidity info for GADTs. When we move to the new story
-- for GADTs, we can move this after tcSyntaxOp
rhs_ty <- newFlexiTyVarTy liftedTypeKind
; pat_ty <- newFlexiTyVarTy liftedTypeKind
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; bind_op' <- tcSyntaxOp DoOrigin bind_op
(mkFunTys [rhs_ty, mkFunTy pat_ty new_res_ty] res_ty)
-- If (but only if) the pattern can fail,
-- typecheck the 'fail' operator
; fail_op' <- if isIrrefutableHsPat pat
then return noSyntaxExpr
else tcSyntaxOp DoOrigin fail_op (mkFunTy stringTy new_res_ty)
; rhs' <- tcMonoExprNC rhs rhs_ty
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat pat_ty $
thing_inside new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op', thing) }
tcDoStmt _ (BodyStmt rhs then_op _ _) res_ty thing_inside
= do { -- Deal with rebindable syntax;
-- (>>) :: rhs_ty -> new_res_ty -> res_ty
-- See also Note [Treat rebindable syntax first]
rhs_ty <- newFlexiTyVarTy liftedTypeKind
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; then_op' <- tcSyntaxOp DoOrigin then_op
(mkFunTys [rhs_ty, new_res_ty] res_ty)
; rhs' <- tcMonoExprNC rhs rhs_ty
; thing <- thing_inside new_res_ty
; return (BodyStmt rhs' then_op' noSyntaxExpr rhs_ty, thing) }
tcDoStmt ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names
, recS_rec_ids = rec_names, recS_ret_fn = ret_op
, recS_mfix_fn = mfix_op, recS_bind_fn = bind_op })
res_ty thing_inside
= do { let tup_names = rec_names ++ filterOut (`elem` rec_names) later_names
; tup_elt_tys <- newFlexiTyVarTys (length tup_names) liftedTypeKind
; let tup_ids = zipWith mkLocalId tup_names tup_elt_tys
tup_ty = mkBigCoreTupTy tup_elt_tys
; tcExtendIdEnv tup_ids $ do
{ stmts_ty <- newFlexiTyVarTy liftedTypeKind
; (stmts', (ret_op', tup_rets))
<- tcStmtsAndThen ctxt tcDoStmt stmts stmts_ty $ \ inner_res_ty ->
do { tup_rets <- zipWithM tcCheckId tup_names tup_elt_tys
-- Unify the types of the "final" Ids (which may
-- be polymorphic) with those of "knot-tied" Ids
; ret_op' <- tcSyntaxOp DoOrigin ret_op (mkFunTy tup_ty inner_res_ty)
; return (ret_op', tup_rets) }
; mfix_res_ty <- newFlexiTyVarTy liftedTypeKind
; mfix_op' <- tcSyntaxOp DoOrigin mfix_op
(mkFunTy (mkFunTy tup_ty stmts_ty) mfix_res_ty)
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; bind_op' <- tcSyntaxOp DoOrigin bind_op
(mkFunTys [mfix_res_ty, mkFunTy tup_ty new_res_ty] res_ty)
; thing <- thing_inside new_res_ty
; let rec_ids = takeList rec_names tup_ids
; later_ids <- tcLookupLocalIds later_names
; traceTc "tcdo" $ vcat [ppr rec_ids <+> ppr (map idType rec_ids),
ppr later_ids <+> ppr (map idType later_ids)]
; return (RecStmt { recS_stmts = stmts', recS_later_ids = later_ids
, recS_rec_ids = rec_ids, recS_ret_fn = ret_op'
, recS_mfix_fn = mfix_op', recS_bind_fn = bind_op'
, recS_later_rets = [], recS_rec_rets = tup_rets
, recS_ret_ty = stmts_ty }, thing)
}}
tcDoStmt _ stmt _ _
= pprPanic "tcDoStmt: unexpected Stmt" (ppr stmt)
{-
Note [Treat rebindable syntax first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When typechecking
do { bar; ... } :: IO ()
we want to typecheck 'bar' in the knowledge that it should be an IO thing,
pushing info from the context into the RHS. To do this, we check the
rebindable syntax first, and push that information into (tcMonoExprNC rhs).
Otherwise the error shows up when cheking the rebindable syntax, and
the expected/inferred stuff is back to front (see Trac #3613).
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
@sameNoOfArgs@ takes a @[RenamedMatch]@ and decides whether the same
number of args are used in each equation.
-}
checkArgs :: Name -> MatchGroup Name body -> TcM ()
checkArgs _ (MG { mg_alts = [] })
= return ()
checkArgs fun (MG { mg_alts = match1:matches })
| null bad_matches
= return ()
| otherwise
= failWithTc (vcat [ptext (sLit "Equations for") <+> quotes (ppr fun) <+>
ptext (sLit "have different numbers of arguments"),
nest 2 (ppr (getLoc match1)),
nest 2 (ppr (getLoc (head bad_matches)))])
where
n_args1 = args_in_match match1
bad_matches = [m | m <- matches, args_in_match m /= n_args1]
args_in_match :: LMatch Name body -> Int
args_in_match (L _ (Match _ pats _ _)) = length pats
|
urbanslug/ghc
|
compiler/typecheck/TcMatches.hs
|
bsd-3-clause
| 37,847
| 2
| 20
| 12,259
| 7,469
| 3,956
| 3,513
| -1
| -1
|
main = do
print $ fst $ properFraction 1.5
print $ snd $ properFraction 1.5
print $ truncate (-1.5)
print $ round (-1.5)
print $ ceiling (-1.5)
print $ floor (-1.5)
return ()
|
beni55/ghcjs
|
test/fay/realFrac.hs
|
mit
| 189
| 0
| 10
| 47
| 102
| 46
| 56
| 8
| 1
|
module Database.Persist.Redis
( module Database.Persist.Redis.Config
, module Database.Persist.Redis.Store
, module Database.Persist.Redis.Exception
) where
import Database.Persist.Redis.Config
import Database.Persist.Redis.Store
import Database.Persist.Redis.Exception
|
paul-rouse/persistent
|
persistent-redis/Database/Persist/Redis.hs
|
mit
| 287
| 0
| 5
| 36
| 54
| 39
| 15
| 7
| 0
|
module ShouldSucceed where
class Eq2 a where
doubleeq :: a -> a -> Bool
class (Eq2 a) => Ord2 a where
lt :: a -> a -> Bool
instance Eq2 Int where
doubleeq x y = True
instance Ord2 Int where
lt x y = True
instance (Eq2 a,Ord2 a) => Eq2 [a] where
doubleeq xs ys = True
f x y = doubleeq x [1]
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/typecheck/should_compile/tc058.hs
|
bsd-3-clause
| 302
| 0
| 8
| 78
| 147
| 76
| 71
| 12
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module TestModuleTH where
import Language.Haskell.TH
$(do
modAnn <- pragAnnD ModuleAnnotation
(stringE "TH module annotation")
[typ] <- [d| data TestTypeTH = TestTypeTH |]
conAnn <- pragAnnD (ValueAnnotation $ mkName "TestTypeTH")
(stringE "TH Constructor annotation")
typAnn <- pragAnnD (TypeAnnotation $ mkName "TestTypeTH")
(stringE "TH Type annotation")
valAnn <- pragAnnD (ValueAnnotation $ mkName "testValueTH")
(stringE "TH Value annotation")
[val] <- [d| testValueTH = (42 :: Int) |]
return [modAnn, conAnn, typAnn, typ, valAnn, val] )
|
urbanslug/ghc
|
testsuite/tests/annotations/should_compile/th/TestModuleTH.hs
|
bsd-3-clause
| 711
| 0
| 13
| 210
| 172
| 90
| 82
| 15
| 0
|
module ShouldSucceed where
h x = f (f True x) x
f x y = if x then y else (g y x)
g y x = if x then y else (f x y)
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc027.hs
|
bsd-3-clause
| 116
| 0
| 7
| 37
| 77
| 41
| 36
| 4
| 2
|
{-# htermination (enumFromTup0 :: Tup0 -> (List Tup0)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup0 = Tup0 ;
enumFromTup0 :: Tup0 -> (List Tup0)
enumFromTup0 Tup0 = Cons Tup0 Nil;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/enumFrom_1.hs
|
mit
| 251
| 0
| 8
| 55
| 74
| 42
| 32
| 6
| 1
|
-- | 'foldr1' is a variant of 'foldr' that
-- has no starting value argument,
-- and thus must be applied to non-empty lists.
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 _ [x] = x
foldr1 f (x:xs) = f x (foldr1 f xs)
foldr1 _ [] = errorEmptyList "foldr1"
|
iharh/fp-by-example
|
tex/src/foldr1_hof.hs
|
mit
| 262
| 0
| 8
| 64
| 89
| 47
| 42
| 4
| 1
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLMenuElement
(js_setCompact, setCompact, js_getCompact, getCompact,
HTMLMenuElement, castToHTMLMenuElement, gTypeHTMLMenuElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"compact\"] = $2;"
js_setCompact :: JSRef HTMLMenuElement -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLMenuElement.compact Mozilla HTMLMenuElement.compact documentation>
setCompact :: (MonadIO m) => HTMLMenuElement -> Bool -> m ()
setCompact self val
= liftIO (js_setCompact (unHTMLMenuElement self) val)
foreign import javascript unsafe "($1[\"compact\"] ? 1 : 0)"
js_getCompact :: JSRef HTMLMenuElement -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLMenuElement.compact Mozilla HTMLMenuElement.compact documentation>
getCompact :: (MonadIO m) => HTMLMenuElement -> m Bool
getCompact self = liftIO (js_getCompact (unHTMLMenuElement self))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/HTMLMenuElement.hs
|
mit
| 1,766
| 14
| 9
| 212
| 449
| 275
| 174
| 27
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.