code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
[lq| ack :: m:Nat -> n:Nat -> Nat / [m, n]|]
ack :: Int -> Int -> Int
ack m n
| m == 0 = n + 1
| m > 0 && n == 0 = ack (m-1) 1
| m > 0 && n > 0 = ack (m-1) (ack m (n-1))
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/Ackermann.hs | bsd-3-clause | 247 | 2 | 10 | 88 | 142 | 69 | 73 | 8 | 1 |
-- |
-- Module : Language.SequentCore.Inspect
-- Description : Sequent Core information dumper
-- Maintainer : maurerl@cs.uoregon.edu
-- Stability : experimental
--
-- An optimizer plugin that reports specified information about the module from
-- a Sequent Core standpoint.
module Language.SequentCore.Inspect (
plugin
) where
import GhcPlugins ( Plugin(installCoreToDos), CommandLineOption
, defaultPlugin
, reinitializeGlobals
, CoreM, CoreToDo(CoreDoPluginPass)
, putMsg, errorMsg
, getDynFlags, ufCreationThreshold
)
import Language.SequentCore.Simpl.ExprSize
import Language.SequentCore.Syntax
import Language.SequentCore.Plugin
import Outputable
import Control.Monad (forM_)
-- | The plugin. A GHC plugin is a module that exports a value called @plugin@
-- with the type 'Plugin'.
plugin :: Plugin
plugin = defaultPlugin {
installCoreToDos = install
}
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install opts todos =
do reinitializeGlobals
-- This puts the dump pass at the beginning of the pipeline, before any
-- optimization. To see the post-optimizer state, put 'newPass' at the back
-- of the list instead.
return $ newPass : todos
where
newPass = CoreDoPluginPass "sequent-core-inspect" passFunc
passFunc = sequentPass (inspectSequentCore opts)
data Options = Options { optShowSizes :: Bool, optUnrecognized :: [String] }
defaults :: Options
defaults = Options { optShowSizes = False, optUnrecognized = [] }
parseOption :: String -> Options -> Options
parseOption "size" opts = opts { optShowSizes = True }
parseOption other opts = opts { optUnrecognized = other : optUnrecognized opts }
inspectSequentCore :: [CommandLineOption] -> [SeqCoreBind] -> CoreM [SeqCoreBind]
inspectSequentCore rawOpts bs = do
let opts = foldr parseOption defaults rawOpts
unknownOpts = optUnrecognized opts
if null unknownOpts
then do
forM_ bs $ \bind -> case bind of
NonRec pair -> showBind opts pair
Rec pairs -> forM_ pairs (showBind opts)
else do
errorMsg $ text "Unrecognized option(s): " <+>
sep (punctuate comma $ map text unknownOpts)
return bs
showBind :: Options -> SeqCoreBindPair -> CoreM ()
showBind opts pair
= do
dflags <- getDynFlags
let (x, rhs) = destBindPair pair
idPart = ppr x
cap = ufCreationThreshold dflags
sizePart | optShowSizes opts = ppr size
| otherwise = empty
size = either (termSize dflags cap) (pKontSize dflags cap) rhs
putMsg $ sep [ idPart, sizePart ]
where
| osa1/sequent-core | src/Language/SequentCore/Inspect.hs | bsd-3-clause | 2,720 | 0 | 17 | 664 | 604 | 326 | 278 | 53 | 3 |
{-# Language FlexibleContexts #-}
--------------------------------------------------------------------
-- |
-- Module: HTrade.Shared.Utils
--
-- Various utility functions shared between the projects.
module HTrade.Shared.Utils where
import qualified Control.Concurrent.Async as A
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Control.Monad.Base (liftBase, MonadBase)
import Data.Word (Word16, Word64)
import qualified Pipes as P
import HTrade.Shared.Types
-- | Default port used by the backend service.
backendPort :: Word16
backendPort = 1111
-- | Pipline component terminating upon receiving 'Data.Maybe.Nothing',
-- propagating uwrapped Just values.
terminateD
:: Monad m
=> P.Pipe (Maybe a) a m ()
terminateD = do
val <- P.await
case val of
Just a -> P.yield a >> terminateD
Nothing -> return ()
-- | Apply a function if the supplied value isn't 'Data.Maybe.Nothing'.
onJust
:: Monad m
=> Maybe a
-> (a -> m (Maybe b))
-> m (Maybe b)
onJust Nothing _ = return Nothing
onJust (Just val) f = f val
-- | Evaluate an IO action and catch ANY exceptions in an either value.
tryAny :: IO a -> IO (Either E.SomeException a)
tryAny action = A.withAsync action A.waitCatch
-- | Hoist synchronous exceptions into Maybe and let all other pass.
blockExceptions :: IO (Maybe a) -> IO (Maybe a)
blockExceptions = fmap filterExp . tryAny
where
filterExp (Right result) = result
filterExp (Left e) = case (E.fromException e :: Maybe E.AsyncException) of
Nothing -> Nothing -- synchronous exception occured, register.
Just _ -> E.throw e -- asynchronous exception occured, re-throw.
-- | Convert seconds to microseconds.
seconds
:: Word64
-> MicroSeconds
seconds = (* 10^(6 :: Int))
-- | Wait for specified amount of time (in microseconds).
delay
:: MonadBase IO m
=> MicroSeconds
-> m ()
delay = liftBase . threadDelay . fromIntegral
| davnils/distr-btc | src/HTrade/Shared/Utils.hs | bsd-3-clause | 2,060 | 0 | 12 | 482 | 475 | 256 | 219 | 43 | 3 |
{-# LANGUAGE BangPatterns #-}
module Math.ContinuedFraction (CF
,fromDouble
,toDouble
,fromDoubleP
,takeCF
)
where
import qualified Data.Vector as V
-- |Continued fraction datatype, storing coefficients in a 'Data.Vector'
newtype CF = CF { unCF :: V.Vector Integer }
instance Show CF where
show = show . unCF
restLimit = 2**128
splitNum x = let y = floor x :: Integer
(!a, !b) = (y, 1 / (x - (fromInteger y :: Double)))
in (a,b)
fracStep x = if x > restLimit then Nothing else Just . splitNum $ x
-- |Computes the continued fraction of the given real number
fromDouble :: Double -> CF
fromDouble = CF . V.unfoldr fracStep
-- |Computes the first 'n' coefficients of the continued fraction associated to the real number
-- P stands for 'Partial'
fromDoubleP :: Int -> Double -> CF
fromDoubleP n = CF . V.unfoldrN n fracStep
-- |Computes the real number corresponding to the given continuous fraction
toDouble :: CF -> Double
toDouble = V.foldr (\y x -> let z = x + 1 / (fromInteger y :: Double) in z `seq` z) 0 . unCF
-- |Returns a 'Data.Vector' with the first 'n' coefficients of the continued fraction
takeCF :: Int -> CF -> V.Vector Integer
takeCF n = V.take n . unCF | alpmestan/continued-fractions | Math/ContinuedFraction.hs | bsd-3-clause | 1,365 | 0 | 15 | 408 | 340 | 188 | 152 | 23 | 2 |
module DependencyInjection.Laces
( Inject()
, inject
, unInject
, Dependable()
, use
, Module()
, DependencyError(..)
, componentOrThrow
, componentMay
, componentWhy
)
where
import Control.Exception
import Data.Dynamic
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
newtype Inject a = Inject a
deriving Typeable
inject :: a -> Inject a
inject = Inject
unInject :: Inject a -> a
unInject (Inject x) = x
data Dependencies = Dependencies
{ dependenciesTarget :: TypeRep
, dependenciesDependencies :: [TypeRep]
, dependenciesFinalise :: Dynamic
}
class Typeable a => Dependable a where
depend :: a -> Dependencies
instance Typeable a => Dependable (Inject a) where
depend a = Dependencies (typeRep a) [] (toDyn (unInject :: Inject a -> a))
instance (Typeable i, Dependable o) => Dependable (i -> o) where
depend f = inner {dependenciesDependencies = typeRep (Flip f) : dependenciesDependencies inner} where
inner = (depend $ f undefined)
newtype Flip a b c = Flip (a c b)
use :: Dependable a => a -> Module
use factory = Module $ M.singleton dependenciesTarget [Injection (toDyn factory) dependenciesDependencies dependenciesFinalise] where
Dependencies{..} = depend factory
newtype Module = Module (M.Map TypeRep [Injection])
data Injection = Injection
{ injectionBase :: Dynamic
, injectionDependencies :: [TypeRep]
, injectionFinalise :: Dynamic
}
data DependencyError = MissingDependency TypeRep | DuplicateDependency TypeRep
deriving (Eq, Show)
data DependencyException = DependencyException [DependencyError]
deriving (Show, Typeable)
instance Exception DependencyException
instance Monoid Module where
mempty = Module mempty
Module x `mappend` Module y = Module (M.unionWith (<>) x y)
componentOrThrow :: Typeable a => Module -> a
componentOrThrow module' = case componentWhy module' of
Left errors -> throw (DependencyException errors)
Right dependency -> dependency
componentMay :: Typeable a => Module -> Maybe a
componentMay module' = case componentWhy module' of
Left _errors -> Nothing
Right dependency -> Just dependency
componentWhy :: forall a. Typeable a => Module -> Either [DependencyError] a
componentWhy (Module moduleMap) = ret where
ret :: Either [DependencyError] a
ret = fmap unDynamic (dependency $ typeRep ret) where
unDynamic :: Dynamic -> a
unDynamic dyn = fromDyn dyn (bug ["wrong return type", showDynType dyn])
dependency :: TypeRep -> Either [DependencyError] Dynamic
dependency rep = case fromMaybe [] (M.lookup rep moduleMap) of
[] -> Left [MissingDependency rep]
[Injection{..}] -> fmap (apply injectionFinalise)
$ foldr cons (Right injectionBase) (reverse injectionDependencies)
_ -> Left [DuplicateDependency rep]
cons :: TypeRep -> Either [DependencyError] Dynamic -> Either [DependencyError] Dynamic
cons xRep fDyn = apply <$> fDyn <+> dependency xRep
apply :: Dynamic -> Dynamic -> Dynamic
apply fDyn xDyn
= fromMaybe
(bug
[ "incompatible types when applying function;"
, showDynType fDyn
, showDynType xDyn
]
)
(dynApply fDyn xDyn)
infixl 4 <+> -- same as <*>
(<+>) :: Monoid e => Either e (a -> b) -> Either e a -> Either e b
Left e <+> Left e' = Left (e <> e') -- different from
Left e <+> Right _ = Left e
Right _ <+> Left e' = Left e'
Right f <+> Right x = Right (f x)
showDynType :: Dynamic -> String
showDynType dyn = "(" ++ (show . dynTypeRep) dyn ++ ")"
bug :: [String] -> a
bug = error . unwords . ("BUG in DependencyInjection.Laces:" :)
| dave4420/dependency-injection-laces | src/DependencyInjection/Laces.hs | bsd-3-clause | 3,646 | 0 | 13 | 767 | 1,246 | 645 | 601 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName' represents names as strings with just a little more information:
-- the \"namespace\" that the name came from, e.g. the namespace of value, type constructors or
-- data constructors
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name': see "Name#name_types"
--
-- * 'Id.Id': see "Id#name_types"
--
-- * 'Var.Var': see "Var#name_types"
module OccName (
-- * The 'NameSpace' type
NameSpace, -- Abstract
nameSpacesRelated,
-- ** Construction
-- $real_vs_source_data_constructors
tcName, clsName, tcClsName, dataName, varName,
tvName, srcDataName,
-- ** Pretty Printing
pprNameSpace, pprNonVarNameSpace, pprNameSpaceBrief,
-- * The 'OccName' type
OccName, -- Abstract, instance of Outputable
pprOccName,
-- ** Construction
mkOccName, mkOccNameFS,
mkVarOcc, mkVarOccFS,
mkDataOcc, mkDataOccFS,
mkTyVarOcc, mkTyVarOccFS,
mkTcOcc, mkTcOccFS,
mkClsOcc, mkClsOccFS,
mkDFunOcc,
setOccNameSpace,
demoteOccName,
HasOccName(..),
-- ** Derived 'OccName's
isDerivedOccName,
mkDataConWrapperOcc, mkWorkerOcc,
mkMatcherOcc, mkBuilderOcc,
mkDefaultMethodOcc, isDefaultMethodOcc, isTypeableBindOcc,
mkNewTyCoOcc, mkClassOpAuxOcc,
mkCon2TagOcc, mkTag2ConOcc, mkMaxTagOcc,
mkClassDataConOcc, mkDictOcc, mkIPOcc,
mkSpecOcc, mkForeignExportOcc, mkRepEqOcc,
mkGenR, mkGen1R,
mkDataTOcc, mkDataCOcc, mkDataConWorkerOcc,
mkSuperDictSelOcc, mkSuperDictAuxOcc,
mkLocalOcc, mkMethodOcc, mkInstTyTcOcc,
mkInstTyCoOcc, mkEqPredCoOcc,
mkRecFldSelOcc,
mkTyConRepOcc,
-- ** Deconstruction
occNameFS, occNameString, occNameSpace,
isVarOcc, isTvOcc, isTcOcc, isDataOcc, isDataSymOcc, isSymOcc, isValOcc,
parenSymOcc, startsWithUnderscore,
isTcClsNameSpace, isTvNameSpace, isDataConNameSpace, isVarNameSpace, isValNameSpace,
-- * The 'OccEnv' type
OccEnv, emptyOccEnv, unitOccEnv, extendOccEnv, mapOccEnv,
lookupOccEnv, mkOccEnv, mkOccEnv_C, extendOccEnvList, elemOccEnv,
occEnvElts, foldOccEnv, plusOccEnv, plusOccEnv_C, extendOccEnv_C,
extendOccEnv_Acc, filterOccEnv, delListFromOccEnv, delFromOccEnv,
alterOccEnv, pprOccEnv,
-- * The 'OccSet' type
OccSet, emptyOccSet, unitOccSet, mkOccSet, extendOccSet,
extendOccSetList,
unionOccSets, unionManyOccSets, minusOccSet, elemOccSet,
isEmptyOccSet, intersectOccSet, intersectsOccSet,
filterOccSet,
-- * Tidying up
TidyOccEnv, emptyTidyOccEnv, initTidyOccEnv,
tidyOccName, avoidClashesOccEnv, delTidyOccEnvList,
-- FsEnv
FastStringEnv, emptyFsEnv, lookupFsEnv, extendFsEnv, mkFsEnv
) where
import GhcPrelude
import Util
import Unique
import DynFlags
import UniqFM
import UniqSet
import FastString
import FastStringEnv
import Outputable
import Lexeme
import Binary
import Control.DeepSeq
import Data.Char
import Data.Data
{-
************************************************************************
* *
\subsection{Name space}
* *
************************************************************************
-}
data NameSpace = VarName -- Variables, including "real" data constructors
| DataName -- "Source" data constructors
| TvName -- Type variables
| TcClsName -- Type constructors and classes; Haskell has them
-- in the same name space for now.
deriving( Eq, Ord )
-- Note [Data Constructors]
-- see also: Note [Data Constructor Naming] in DataCon.hs
--
-- $real_vs_source_data_constructors
-- There are two forms of data constructor:
--
-- [Source data constructors] The data constructors mentioned in Haskell source code
--
-- [Real data constructors] The data constructors of the representation type, which may not be the same as the source type
--
-- For example:
--
-- > data T = T !(Int, Int)
--
-- The source datacon has type @(Int, Int) -> T@
-- The real datacon has type @Int -> Int -> T@
--
-- GHC chooses a representation based on the strictness etc.
tcName, clsName, tcClsName :: NameSpace
dataName, srcDataName :: NameSpace
tvName, varName :: NameSpace
-- Though type constructors and classes are in the same name space now,
-- the NameSpace type is abstract, so we can easily separate them later
tcName = TcClsName -- Type constructors
clsName = TcClsName -- Classes
tcClsName = TcClsName -- Not sure which!
dataName = DataName
srcDataName = DataName -- Haskell-source data constructors should be
-- in the Data name space
tvName = TvName
varName = VarName
isDataConNameSpace :: NameSpace -> Bool
isDataConNameSpace DataName = True
isDataConNameSpace _ = False
isTcClsNameSpace :: NameSpace -> Bool
isTcClsNameSpace TcClsName = True
isTcClsNameSpace _ = False
isTvNameSpace :: NameSpace -> Bool
isTvNameSpace TvName = True
isTvNameSpace _ = False
isVarNameSpace :: NameSpace -> Bool -- Variables or type variables, but not constructors
isVarNameSpace TvName = True
isVarNameSpace VarName = True
isVarNameSpace _ = False
isValNameSpace :: NameSpace -> Bool
isValNameSpace DataName = True
isValNameSpace VarName = True
isValNameSpace _ = False
pprNameSpace :: NameSpace -> SDoc
pprNameSpace DataName = text "data constructor"
pprNameSpace VarName = text "variable"
pprNameSpace TvName = text "type variable"
pprNameSpace TcClsName = text "type constructor or class"
pprNonVarNameSpace :: NameSpace -> SDoc
pprNonVarNameSpace VarName = empty
pprNonVarNameSpace ns = pprNameSpace ns
pprNameSpaceBrief :: NameSpace -> SDoc
pprNameSpaceBrief DataName = char 'd'
pprNameSpaceBrief VarName = char 'v'
pprNameSpaceBrief TvName = text "tv"
pprNameSpaceBrief TcClsName = text "tc"
-- demoteNameSpace lowers the NameSpace if possible. We can not know
-- in advance, since a TvName can appear in an HsTyVar.
-- See Note [Demotion] in GHC.Rename.Env
demoteNameSpace :: NameSpace -> Maybe NameSpace
demoteNameSpace VarName = Nothing
demoteNameSpace DataName = Nothing
demoteNameSpace TvName = Nothing
demoteNameSpace TcClsName = Just DataName
{-
************************************************************************
* *
\subsection[Name-pieces-datatypes]{The @OccName@ datatypes}
* *
************************************************************************
-}
-- | Occurrence Name
--
-- In this context that means:
-- "classified (i.e. as a type name, value name, etc) but not qualified
-- and not yet resolved"
data OccName = OccName
{ occNameSpace :: !NameSpace
, occNameFS :: !FastString
}
instance Eq OccName where
(OccName sp1 s1) == (OccName sp2 s2) = s1 == s2 && sp1 == sp2
instance Ord OccName where
-- Compares lexicographically, *not* by Unique of the string
compare (OccName sp1 s1) (OccName sp2 s2)
= (s1 `compare` s2) `thenCmp` (sp1 `compare` sp2)
instance Data OccName where
-- don't traverse?
toConstr _ = abstractConstr "OccName"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "OccName"
instance HasOccName OccName where
occName = id
instance NFData OccName where
rnf x = x `seq` ()
{-
************************************************************************
* *
\subsection{Printing}
* *
************************************************************************
-}
instance Outputable OccName where
ppr = pprOccName
instance OutputableBndr OccName where
pprBndr _ = ppr
pprInfixOcc n = pprInfixVar (isSymOcc n) (ppr n)
pprPrefixOcc n = pprPrefixVar (isSymOcc n) (ppr n)
pprOccName :: OccName -> SDoc
pprOccName (OccName sp occ)
= getPprStyle $ \ sty ->
if codeStyle sty
then ztext (zEncodeFS occ)
else pp_occ <> pp_debug sty
where
pp_debug sty | debugStyle sty = braces (pprNameSpaceBrief sp)
| otherwise = empty
pp_occ = sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressUniques dflags
then text (strip_th_unique (unpackFS occ))
else ftext occ
-- See Note [Suppressing uniques in OccNames]
strip_th_unique ('[' : c : _) | isAlphaNum c = []
strip_th_unique (c : cs) = c : strip_th_unique cs
strip_th_unique [] = []
{-
Note [Suppressing uniques in OccNames]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a hack to de-wobblify the OccNames that contain uniques from
Template Haskell that have been turned into a string in the OccName.
See Note [Unique OccNames from Template Haskell] in Convert.hs
************************************************************************
* *
\subsection{Construction}
* *
************************************************************************
-}
mkOccName :: NameSpace -> String -> OccName
mkOccName occ_sp str = OccName occ_sp (mkFastString str)
mkOccNameFS :: NameSpace -> FastString -> OccName
mkOccNameFS occ_sp fs = OccName occ_sp fs
mkVarOcc :: String -> OccName
mkVarOcc s = mkOccName varName s
mkVarOccFS :: FastString -> OccName
mkVarOccFS fs = mkOccNameFS varName fs
mkDataOcc :: String -> OccName
mkDataOcc = mkOccName dataName
mkDataOccFS :: FastString -> OccName
mkDataOccFS = mkOccNameFS dataName
mkTyVarOcc :: String -> OccName
mkTyVarOcc = mkOccName tvName
mkTyVarOccFS :: FastString -> OccName
mkTyVarOccFS fs = mkOccNameFS tvName fs
mkTcOcc :: String -> OccName
mkTcOcc = mkOccName tcName
mkTcOccFS :: FastString -> OccName
mkTcOccFS = mkOccNameFS tcName
mkClsOcc :: String -> OccName
mkClsOcc = mkOccName clsName
mkClsOccFS :: FastString -> OccName
mkClsOccFS = mkOccNameFS clsName
-- demoteOccName lowers the Namespace of OccName.
-- see Note [Demotion]
demoteOccName :: OccName -> Maybe OccName
demoteOccName (OccName space name) = do
space' <- demoteNameSpace space
return $ OccName space' name
-- Name spaces are related if there is a chance to mean the one when one writes
-- the other, i.e. variables <-> data constructors and type variables <-> type constructors
nameSpacesRelated :: NameSpace -> NameSpace -> Bool
nameSpacesRelated ns1 ns2 = ns1 == ns2 || otherNameSpace ns1 == ns2
otherNameSpace :: NameSpace -> NameSpace
otherNameSpace VarName = DataName
otherNameSpace DataName = VarName
otherNameSpace TvName = TcClsName
otherNameSpace TcClsName = TvName
{- | Other names in the compiler add additional information to an OccName.
This class provides a consistent way to access the underlying OccName. -}
class HasOccName name where
occName :: name -> OccName
{-
************************************************************************
* *
Environments
* *
************************************************************************
OccEnvs are used mainly for the envts in ModIfaces.
Note [The Unique of an OccName]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
They are efficient, because FastStrings have unique Int# keys. We assume
this key is less than 2^24, and indeed FastStrings are allocated keys
sequentially starting at 0.
So we can make a Unique using
mkUnique ns key :: Unique
where 'ns' is a Char representing the name space. This in turn makes it
easy to build an OccEnv.
-}
instance Uniquable OccName where
-- See Note [The Unique of an OccName]
getUnique (OccName VarName fs) = mkVarOccUnique fs
getUnique (OccName DataName fs) = mkDataOccUnique fs
getUnique (OccName TvName fs) = mkTvOccUnique fs
getUnique (OccName TcClsName fs) = mkTcOccUnique fs
newtype OccEnv a = A (UniqFM a)
deriving Data
emptyOccEnv :: OccEnv a
unitOccEnv :: OccName -> a -> OccEnv a
extendOccEnv :: OccEnv a -> OccName -> a -> OccEnv a
extendOccEnvList :: OccEnv a -> [(OccName, a)] -> OccEnv a
lookupOccEnv :: OccEnv a -> OccName -> Maybe a
mkOccEnv :: [(OccName,a)] -> OccEnv a
mkOccEnv_C :: (a -> a -> a) -> [(OccName,a)] -> OccEnv a
elemOccEnv :: OccName -> OccEnv a -> Bool
foldOccEnv :: (a -> b -> b) -> b -> OccEnv a -> b
occEnvElts :: OccEnv a -> [a]
extendOccEnv_C :: (a->a->a) -> OccEnv a -> OccName -> a -> OccEnv a
extendOccEnv_Acc :: (a->b->b) -> (a->b) -> OccEnv b -> OccName -> a -> OccEnv b
plusOccEnv :: OccEnv a -> OccEnv a -> OccEnv a
plusOccEnv_C :: (a->a->a) -> OccEnv a -> OccEnv a -> OccEnv a
mapOccEnv :: (a->b) -> OccEnv a -> OccEnv b
delFromOccEnv :: OccEnv a -> OccName -> OccEnv a
delListFromOccEnv :: OccEnv a -> [OccName] -> OccEnv a
filterOccEnv :: (elt -> Bool) -> OccEnv elt -> OccEnv elt
alterOccEnv :: (Maybe elt -> Maybe elt) -> OccEnv elt -> OccName -> OccEnv elt
emptyOccEnv = A emptyUFM
unitOccEnv x y = A $ unitUFM x y
extendOccEnv (A x) y z = A $ addToUFM x y z
extendOccEnvList (A x) l = A $ addListToUFM x l
lookupOccEnv (A x) y = lookupUFM x y
mkOccEnv l = A $ listToUFM l
elemOccEnv x (A y) = elemUFM x y
foldOccEnv a b (A c) = foldUFM a b c
occEnvElts (A x) = eltsUFM x
plusOccEnv (A x) (A y) = A $ plusUFM x y
plusOccEnv_C f (A x) (A y) = A $ plusUFM_C f x y
extendOccEnv_C f (A x) y z = A $ addToUFM_C f x y z
extendOccEnv_Acc f g (A x) y z = A $ addToUFM_Acc f g x y z
mapOccEnv f (A x) = A $ mapUFM f x
mkOccEnv_C comb l = A $ addListToUFM_C comb emptyUFM l
delFromOccEnv (A x) y = A $ delFromUFM x y
delListFromOccEnv (A x) y = A $ delListFromUFM x y
filterOccEnv x (A y) = A $ filterUFM x y
alterOccEnv fn (A y) k = A $ alterUFM fn y k
instance Outputable a => Outputable (OccEnv a) where
ppr x = pprOccEnv ppr x
pprOccEnv :: (a -> SDoc) -> OccEnv a -> SDoc
pprOccEnv ppr_elt (A env) = pprUniqFM ppr_elt env
type OccSet = UniqSet OccName
emptyOccSet :: OccSet
unitOccSet :: OccName -> OccSet
mkOccSet :: [OccName] -> OccSet
extendOccSet :: OccSet -> OccName -> OccSet
extendOccSetList :: OccSet -> [OccName] -> OccSet
unionOccSets :: OccSet -> OccSet -> OccSet
unionManyOccSets :: [OccSet] -> OccSet
minusOccSet :: OccSet -> OccSet -> OccSet
elemOccSet :: OccName -> OccSet -> Bool
isEmptyOccSet :: OccSet -> Bool
intersectOccSet :: OccSet -> OccSet -> OccSet
intersectsOccSet :: OccSet -> OccSet -> Bool
filterOccSet :: (OccName -> Bool) -> OccSet -> OccSet
emptyOccSet = emptyUniqSet
unitOccSet = unitUniqSet
mkOccSet = mkUniqSet
extendOccSet = addOneToUniqSet
extendOccSetList = addListToUniqSet
unionOccSets = unionUniqSets
unionManyOccSets = unionManyUniqSets
minusOccSet = minusUniqSet
elemOccSet = elementOfUniqSet
isEmptyOccSet = isEmptyUniqSet
intersectOccSet = intersectUniqSets
intersectsOccSet s1 s2 = not (isEmptyOccSet (s1 `intersectOccSet` s2))
filterOccSet = filterUniqSet
{-
************************************************************************
* *
\subsection{Predicates and taking them apart}
* *
************************************************************************
-}
occNameString :: OccName -> String
occNameString (OccName _ s) = unpackFS s
setOccNameSpace :: NameSpace -> OccName -> OccName
setOccNameSpace sp (OccName _ occ) = OccName sp occ
isVarOcc, isTvOcc, isTcOcc, isDataOcc :: OccName -> Bool
isVarOcc (OccName VarName _) = True
isVarOcc _ = False
isTvOcc (OccName TvName _) = True
isTvOcc _ = False
isTcOcc (OccName TcClsName _) = True
isTcOcc _ = False
-- | /Value/ 'OccNames's are those that are either in
-- the variable or data constructor namespaces
isValOcc :: OccName -> Bool
isValOcc (OccName VarName _) = True
isValOcc (OccName DataName _) = True
isValOcc _ = False
isDataOcc (OccName DataName _) = True
isDataOcc _ = False
-- | Test if the 'OccName' is a data constructor that starts with
-- a symbol (e.g. @:@, or @[]@)
isDataSymOcc :: OccName -> Bool
isDataSymOcc (OccName DataName s) = isLexConSym s
isDataSymOcc _ = False
-- Pretty inefficient!
-- | Test if the 'OccName' is that for any operator (whether
-- it is a data constructor or variable or whatever)
isSymOcc :: OccName -> Bool
isSymOcc (OccName DataName s) = isLexConSym s
isSymOcc (OccName TcClsName s) = isLexSym s
isSymOcc (OccName VarName s) = isLexSym s
isSymOcc (OccName TvName s) = isLexSym s
-- Pretty inefficient!
parenSymOcc :: OccName -> SDoc -> SDoc
-- ^ Wrap parens around an operator
parenSymOcc occ doc | isSymOcc occ = parens doc
| otherwise = doc
startsWithUnderscore :: OccName -> Bool
-- ^ Haskell 98 encourages compilers to suppress warnings about unused
-- names in a pattern if they start with @_@: this implements that test
startsWithUnderscore occ = headFS (occNameFS occ) == '_'
{-
************************************************************************
* *
\subsection{Making system names}
* *
************************************************************************
Here's our convention for splitting up the interface file name space:
d... dictionary identifiers
(local variables, so no name-clash worries)
All of these other OccNames contain a mixture of alphabetic
and symbolic characters, and hence cannot possibly clash with
a user-written type or function name
$f... Dict-fun identifiers (from inst decls)
$dmop Default method for 'op'
$pnC n'th superclass selector for class C
$wf Worker for function 'f'
$sf.. Specialised version of f
D:C Data constructor for dictionary for class C
NTCo:T Coercion connecting newtype T with its representation type
TFCo:R Coercion connecting a data family to its representation type R
In encoded form these appear as Zdfxxx etc
:... keywords (export:, letrec: etc.)
--- I THINK THIS IS WRONG!
This knowledge is encoded in the following functions.
@mk_deriv@ generates an @OccName@ from the prefix and a string.
NB: The string must already be encoded!
-}
-- | Build an 'OccName' derived from another 'OccName'.
--
-- Note that the pieces of the name are passed in as a @[FastString]@ so that
-- the whole name can be constructed with a single 'concatFS', minimizing
-- unnecessary intermediate allocations.
mk_deriv :: NameSpace
-> FastString -- ^ A prefix which distinguishes one sort of
-- derived name from another
-> [FastString] -- ^ The name we are deriving from in pieces which
-- will be concatenated.
-> OccName
mk_deriv occ_sp sys_prefix str =
mkOccNameFS occ_sp (concatFS $ sys_prefix : str)
isDerivedOccName :: OccName -> Bool
-- ^ Test for definitions internally generated by GHC. This predicate
-- is used to suppress printing of internal definitions in some debug prints
isDerivedOccName occ =
case occNameString occ of
'$':c:_ | isAlphaNum c -> True -- E.g. $wfoo
c:':':_ | isAlphaNum c -> True -- E.g. N:blah newtype coercions
_other -> False
isDefaultMethodOcc :: OccName -> Bool
isDefaultMethodOcc occ =
case occNameString occ of
'$':'d':'m':_ -> True
_ -> False
-- | Is an 'OccName' one of a Typeable @TyCon@ or @Module@ binding?
-- This is needed as these bindings are renamed differently.
-- See Note [Grand plan for Typeable] in TcTypeable.
isTypeableBindOcc :: OccName -> Bool
isTypeableBindOcc occ =
case occNameString occ of
'$':'t':'c':_ -> True -- mkTyConRepOcc
'$':'t':'r':_ -> True -- Module binding
_ -> False
mkDataConWrapperOcc, mkWorkerOcc,
mkMatcherOcc, mkBuilderOcc,
mkDefaultMethodOcc,
mkClassDataConOcc, mkDictOcc,
mkIPOcc, mkSpecOcc, mkForeignExportOcc, mkRepEqOcc,
mkGenR, mkGen1R,
mkDataConWorkerOcc, mkNewTyCoOcc,
mkInstTyCoOcc, mkEqPredCoOcc, mkClassOpAuxOcc,
mkCon2TagOcc, mkTag2ConOcc, mkMaxTagOcc,
mkTyConRepOcc
:: OccName -> OccName
-- These derived variables have a prefix that no Haskell value could have
mkDataConWrapperOcc = mk_simple_deriv varName "$W"
mkWorkerOcc = mk_simple_deriv varName "$w"
mkMatcherOcc = mk_simple_deriv varName "$m"
mkBuilderOcc = mk_simple_deriv varName "$b"
mkDefaultMethodOcc = mk_simple_deriv varName "$dm"
mkClassOpAuxOcc = mk_simple_deriv varName "$c"
mkDictOcc = mk_simple_deriv varName "$d"
mkIPOcc = mk_simple_deriv varName "$i"
mkSpecOcc = mk_simple_deriv varName "$s"
mkForeignExportOcc = mk_simple_deriv varName "$f"
mkRepEqOcc = mk_simple_deriv tvName "$r" -- In RULES involving Coercible
mkClassDataConOcc = mk_simple_deriv dataName "C:" -- Data con for a class
mkNewTyCoOcc = mk_simple_deriv tcName "N:" -- Coercion for newtypes
mkInstTyCoOcc = mk_simple_deriv tcName "D:" -- Coercion for type functions
mkEqPredCoOcc = mk_simple_deriv tcName "$co"
-- Used in derived instances
mkCon2TagOcc = mk_simple_deriv varName "$con2tag_"
mkTag2ConOcc = mk_simple_deriv varName "$tag2con_"
mkMaxTagOcc = mk_simple_deriv varName "$maxtag_"
-- TyConRepName stuff; see Note [Grand plan for Typeable] in TcTypeable
mkTyConRepOcc occ = mk_simple_deriv varName prefix occ
where
prefix | isDataOcc occ = "$tc'"
| otherwise = "$tc"
-- Generic deriving mechanism
mkGenR = mk_simple_deriv tcName "Rep_"
mkGen1R = mk_simple_deriv tcName "Rep1_"
-- Overloaded record field selectors
mkRecFldSelOcc :: String -> OccName
mkRecFldSelOcc s = mk_deriv varName "$sel" [fsLit s]
mk_simple_deriv :: NameSpace -> FastString -> OccName -> OccName
mk_simple_deriv sp px occ = mk_deriv sp px [occNameFS occ]
-- Data constructor workers are made by setting the name space
-- of the data constructor OccName (which should be a DataName)
-- to VarName
mkDataConWorkerOcc datacon_occ = setOccNameSpace varName datacon_occ
mkSuperDictAuxOcc :: Int -> OccName -> OccName
mkSuperDictAuxOcc index cls_tc_occ
= mk_deriv varName "$cp" [fsLit $ show index, occNameFS cls_tc_occ]
mkSuperDictSelOcc :: Int -- ^ Index of superclass, e.g. 3
-> OccName -- ^ Class, e.g. @Ord@
-> OccName -- ^ Derived 'Occname', e.g. @$p3Ord@
mkSuperDictSelOcc index cls_tc_occ
= mk_deriv varName "$p" [fsLit $ show index, occNameFS cls_tc_occ]
mkLocalOcc :: Unique -- ^ Unique to combine with the 'OccName'
-> OccName -- ^ Local name, e.g. @sat@
-> OccName -- ^ Nice unique version, e.g. @$L23sat@
mkLocalOcc uniq occ
= mk_deriv varName "$L" [fsLit $ show uniq, occNameFS occ]
-- The Unique might print with characters
-- that need encoding (e.g. 'z'!)
-- | Derive a name for the representation type constructor of a
-- @data@\/@newtype@ instance.
mkInstTyTcOcc :: String -- ^ Family name, e.g. @Map@
-> OccSet -- ^ avoid these Occs
-> OccName -- ^ @R:Map@
mkInstTyTcOcc str = chooseUniqueOcc tcName ('R' : ':' : str)
mkDFunOcc :: String -- ^ Typically the class and type glommed together e.g. @OrdMaybe@.
-- Only used in debug mode, for extra clarity
-> Bool -- ^ Is this a hs-boot instance DFun?
-> OccSet -- ^ avoid these Occs
-> OccName -- ^ E.g. @$f3OrdMaybe@
-- In hs-boot files we make dict funs like $fx7ClsTy, which get bound to the real
-- thing when we compile the mother module. Reason: we don't know exactly
-- what the mother module will call it.
mkDFunOcc info_str is_boot set
= chooseUniqueOcc VarName (prefix ++ info_str) set
where
prefix | is_boot = "$fx"
| otherwise = "$f"
mkDataTOcc, mkDataCOcc
:: OccName -- ^ TyCon or data con string
-> OccSet -- ^ avoid these Occs
-> OccName -- ^ E.g. @$f3OrdMaybe@
-- data T = MkT ... deriving( Data ) needs definitions for
-- $tT :: Data.Generics.Basics.DataType
-- $cMkT :: Data.Generics.Basics.Constr
mkDataTOcc occ = chooseUniqueOcc VarName ("$t" ++ occNameString occ)
mkDataCOcc occ = chooseUniqueOcc VarName ("$c" ++ occNameString occ)
{-
Sometimes we need to pick an OccName that has not already been used,
given a set of in-use OccNames.
-}
chooseUniqueOcc :: NameSpace -> String -> OccSet -> OccName
chooseUniqueOcc ns str set = loop (mkOccName ns str) (0::Int)
where
loop occ n
| occ `elemOccSet` set = loop (mkOccName ns (str ++ show n)) (n+1)
| otherwise = occ
{-
We used to add a '$m' to indicate a method, but that gives rise to bad
error messages from the type checker when we print the function name or pattern
of an instance-decl binding. Why? Because the binding is zapped
to use the method name in place of the selector name.
(See TcClassDcl.tcMethodBind)
The way it is now, -ddump-xx output may look confusing, but
you can always say -dppr-debug to get the uniques.
However, we *do* have to zap the first character to be lower case,
because overloaded constructors (blarg) generate methods too.
And convert to VarName space
e.g. a call to constructor MkFoo where
data (Ord a) => Foo a = MkFoo a
If this is necessary, we do it by prefixing '$m'. These
guys never show up in error messages. What a hack.
-}
mkMethodOcc :: OccName -> OccName
mkMethodOcc occ@(OccName VarName _) = occ
mkMethodOcc occ = mk_simple_deriv varName "$m" occ
{-
************************************************************************
* *
\subsection{Tidying them up}
* *
************************************************************************
Before we print chunks of code we like to rename it so that
we don't have to print lots of silly uniques in it. But we mustn't
accidentally introduce name clashes! So the idea is that we leave the
OccName alone unless it accidentally clashes with one that is already
in scope; if so, we tack on '1' at the end and try again, then '2', and
so on till we find a unique one.
There's a wrinkle for operators. Consider '>>='. We can't use '>>=1'
because that isn't a single lexeme. So we encode it to 'lle' and *then*
tack on the '1', if necessary.
Note [TidyOccEnv]
~~~~~~~~~~~~~~~~~
type TidyOccEnv = UniqFM Int
* Domain = The OccName's FastString. These FastStrings are "taken";
make sure that we don't re-use
* Int, n = A plausible starting point for new guesses
There is no guarantee that "FSn" is available;
you must look that up in the TidyOccEnv. But
it's a good place to start looking.
* When looking for a renaming for "foo2" we strip off the "2" and start
with "foo". Otherwise if we tidy twice we get silly names like foo23.
However, if it started with digits at the end, we always make a name
with digits at the end, rather than shortening "foo2" to just "foo",
even if "foo" is unused. Reasons:
- Plain "foo" might be used later
- We use trailing digits to subtly indicate a unification variable
in typechecker error message; see TypeRep.tidyTyVarBndr
We have to take care though! Consider a machine-generated module (#10370)
module Foo where
a1 = e1
a2 = e2
...
a2000 = e2000
Then "a1", "a2" etc are all marked taken. But now if we come across "a7" again,
we have to do a linear search to find a free one, "a2001". That might just be
acceptable once. But if we now come across "a8" again, we don't want to repeat
that search.
So we use the TidyOccEnv mapping for "a" (not "a7" or "a8") as our base for
starting the search; and we make sure to update the starting point for "a"
after we allocate a new one.
Note [Tidying multiple names at once]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
> :t (id,id,id)
Every id contributes a type variable to the type signature, and all of them are
"a". If we tidy them one by one, we get
(id,id,id) :: (a2 -> a2, a1 -> a1, a -> a)
which is a bit unfortunate, as it unfairly renames only two of them. What we
would like to see is
(id,id,id) :: (a3 -> a3, a2 -> a2, a1 -> a1)
To achieve this, the function avoidClashesOccEnv can be used to prepare the
TidyEnv, by “blocking” every name that occurs twice in the map. This way, none
of the "a"s will get the privilege of keeping this name, and all of them will
get a suitable number by tidyOccName.
This prepared TidyEnv can then be used with tidyOccName. See tidyTyCoVarBndrs
for an example where this is used.
This is #12382.
-}
type TidyOccEnv = UniqFM Int -- The in-scope OccNames
-- See Note [TidyOccEnv]
emptyTidyOccEnv :: TidyOccEnv
emptyTidyOccEnv = emptyUFM
initTidyOccEnv :: [OccName] -> TidyOccEnv -- Initialise with names to avoid!
initTidyOccEnv = foldl' add emptyUFM
where
add env (OccName _ fs) = addToUFM env fs 1
delTidyOccEnvList :: TidyOccEnv -> [FastString] -> TidyOccEnv
delTidyOccEnvList = delListFromUFM
-- see Note [Tidying multiple names at once]
avoidClashesOccEnv :: TidyOccEnv -> [OccName] -> TidyOccEnv
avoidClashesOccEnv env occs = go env emptyUFM occs
where
go env _ [] = env
go env seenOnce ((OccName _ fs):occs)
| fs `elemUFM` env = go env seenOnce occs
| fs `elemUFM` seenOnce = go (addToUFM env fs 1) seenOnce occs
| otherwise = go env (addToUFM seenOnce fs ()) occs
tidyOccName :: TidyOccEnv -> OccName -> (TidyOccEnv, OccName)
tidyOccName env occ@(OccName occ_sp fs)
| not (fs `elemUFM` env)
= -- Desired OccName is free, so use it,
-- and record in 'env' that it's no longer available
(addToUFM env fs 1, occ)
| otherwise
= case lookupUFM env base1 of
Nothing -> (addToUFM env base1 2, OccName occ_sp base1)
Just n -> find 1 n
where
base :: String -- Drop trailing digits (see Note [TidyOccEnv])
base = dropWhileEndLE isDigit (unpackFS fs)
base1 = mkFastString (base ++ "1")
find !k !n
= case lookupUFM env new_fs of
Just {} -> find (k+1 :: Int) (n+k)
-- By using n+k, the n argument to find goes
-- 1, add 1, add 2, add 3, etc which
-- moves at quadratic speed through a dense patch
Nothing -> (new_env, OccName occ_sp new_fs)
where
new_fs = mkFastString (base ++ show n)
new_env = addToUFM (addToUFM env new_fs 1) base1 (n+1)
-- Update: base1, so that next time we'll start where we left off
-- new_fs, so that we know it is taken
-- If they are the same (n==1), the former wins
-- See Note [TidyOccEnv]
{-
************************************************************************
* *
Binary instance
Here rather than in GHC.Iface.Binary because OccName is abstract
* *
************************************************************************
-}
instance Binary NameSpace where
put_ bh VarName = do
putByte bh 0
put_ bh DataName = do
putByte bh 1
put_ bh TvName = do
putByte bh 2
put_ bh TcClsName = do
putByte bh 3
get bh = do
h <- getByte bh
case h of
0 -> do return VarName
1 -> do return DataName
2 -> do return TvName
_ -> do return TcClsName
instance Binary OccName where
put_ bh (OccName aa ab) = do
put_ bh aa
put_ bh ab
get bh = do
aa <- get bh
ab <- get bh
return (OccName aa ab)
| sdiehl/ghc | compiler/basicTypes/OccName.hs | bsd-3-clause | 33,445 | 0 | 14 | 8,903 | 5,550 | 2,966 | 2,584 | 458 | 5 |
module HttpClient where
import Network.HTTP.Client
import Network.HTTP.Types.Status (statusCode)
import Network.HTTP.Types.Header
import qualified Data.ByteString.Char8 as Byte
import Data.ByteString.Lazy.Char8 as C
import Game
import MExp
import Data.Maybe
getUrl gameId player = "http://tictactoe.homedir.eu/game/" ++ gameId ++ "/player/" ++ player
play :: String -> String -> IO()
play gameId player = do
if (player == "1")
then
do
attack gameId
else
defend gameId
attack :: String -> IO()
attack gameId = do
let url = getUrl gameId "1"
post url "x" []
defend :: String -> IO()
defend gameId = do
let url = getUrl gameId "2"
get url "o" []
get :: String -> String -> [Move] -> IO()
get url player moves = do
let checkWinner = winner moves
if checkWinner == Nothing
then
do
manager <- newManager defaultManagerSettings
initialRequest <- parseUrlThrow $ url
let request = initialRequest {
method = Byte.pack "GET",
requestHeaders = [(hContentType, Byte.pack "application/m-expr"),(hAccept, Byte.pack "application/m-expr")]
}
response <- httpLbs request manager
let movesFromOponent = getMovesFromMExp (unpack $ responseBody response)
let (oponent:t) = Prelude.reverse movesFromOponent
Prelude.putStrLn ("GET")
Prelude.putStrLn ( show $ oponent )
Prelude.putStrLn ("")
post url player movesFromOponent
else
Prelude.putStrLn ( getGameState checkWinner )
post :: String -> String -> [Move] -> IO()
post url player moves = do
let checkWinner = winner moves
if checkWinner == Nothing
then
do
manager <- newManager defaultManagerSettings
initialRequest <- parseUrlThrow $ url
let playerMove = miniMax player moves
let movesToSend = moves ++ [playerMove]
let movesToSendExp = movesToMExp movesToSend
Prelude.putStrLn ("POST")
Prelude.putStrLn ( show $ playerMove )
Prelude.putStrLn ("")
let request = initialRequest {
method = Byte.pack "POST",
requestHeaders = [(hContentType, Byte.pack "application/m-expr"),(hAccept, Byte.pack "application/m-expr")],
requestBody = RequestBodyLBS $ C.pack (movesToSendExp)
}
response <- httpLbs request manager
get url player movesToSend
else
Prelude.putStrLn ( getGameState checkWinner ) | tomas-stuina/fp | src/HttpClient.hs | bsd-3-clause | 2,370 | 8 | 24 | 538 | 757 | 381 | 376 | 62 | 2 |
-- | These are the objects of the TPFS system: namely, files and
-- tags. This module provides the low-level functions for managing
-- file/tag info and their respective tables, as well as somewhat
-- higher-level functions for more sane access.
module System.TPFS.Objects (
-- * File objects
-- ** The file table
FileID,
isFileIDInUse,
lookupByFileID,
addToFileTable,
removeFromFileTable,
-- ** File information blocks
FileInfo(..),
getFileInfo,
putFileInfo,
createFileInfo,
removeFileInfo,
-- ** High-level access
-- * Tag objects
-- ** The tag table
TagHash,
lookupByTagHash,
addToTagTable,
removeFromTagTable,
-- ** Tag information blocks
TagInfo(..),
getTagInfo,
putTagInfo,
createTagInfo,
removeTagInfo
-- ** High-level access
) where
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as BS
import Data.Word
import System.Random
import System.TPFS.Device
import System.TPFS.Filesystem
-- | File identification numbers are 128 bits long and are usually
-- randomly generated. As such, there is a 'Random' instance for
-- 'FileID's. Be careful when using this, however. It is possible
-- (though unlikely) for a collision to occur, so it is strongly
-- recommended to use 'isFileIDInUse' to ensure the 'FileID' is not
-- already in use when generating a 'FileID' in this manner.
data FileID = FileID !Word64 !Word64
deriving Eq
instance Random FileID where
randomR = undefined
random g = (FileID a b, g'') where (a, g' ) = random g
(b, g'') = random g'
-- | Searches the file table to check if the given 'FileID' is
-- currently in use. This is useful when generating random 'FileID's.
isFileIDInUse :: Device m h
=> Filesystem m h
-> FileID -- ^ The 'FileID' to check.
-> m Bool -- ^ 'True' if the 'FileID' is
-- currently in use; 'False' if not.
isFileIDInUse = undefined
-- | Searches the file table for a given 'FileID' and returns the
-- address at which its 'FileInfo' can be found.
lookupByFileID :: Device m h
=> Filesystem m h
-> FileID -- ^ The ID of the file to look up.
-> m (Maybe Address) -- ^ 'Nothing' if the 'FileID' does not exist;
-- @'Just' 'Address'@ otherwise.
lookupByFileID = undefined
-- | Links a file information record to the file table so that it may
-- be discovered.
--
-- Note that a file information record (represented by 'FileInfo')
-- /must/ be linked to the file table in order for it to be found.
addToFileTable :: Device m h
=> Filesystem m h
-> (FileID, Address) -- ^ The ID of the file in
-- question, and the address of
-- the first block of its record.
-> m ()
addToFileTable = undefined
-- | Unlinks a file information table from the file table. This
-- prevents the file from being discovered.
--
-- It is possible for a file to be \'lost\' if it is not linked, so
-- take care when using this function manually.
removeFromFileTable :: Device m h
=> Filesystem m h
-> FileID -- ^ The file to be unlinked.
-> m ()
removeFromFileTable = undefined
-- | The 'FileInfo' structure attaches the blocks of a file to its
-- identification ('FileID' and tags) and other information vital for
-- reading the file (offset, length, lock).
data FileInfo = FileInfo { fileID :: FileID -- ^ The identification number of the file.
, firstBlock :: Address -- ^ The first block in the file.
, lastBlock :: Address -- ^ The last block in the file. Makes appending faster.
, fileOffset :: Word64 -- ^ Describes the offset of the content within the blocks'
-- data. This could potentially allow for quick prepending of
-- data to the file.
, fileLength :: Word64 -- ^ The apparent (not necessarily actual block-wise) byte
-- length of the file.
, isLocked :: Bool -- ^ Whether the file is currently locked for writing. If the
-- file is locked for writing, the implementation must raise
-- an error when attempting to open the file for writing.
, tagHashes :: [TagHash] -- ^ The hashes of each of the tags the file is attached to.
}
-- | Reads the file information from a file information record.
getFileInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The first block of the file information record.
-> m FileInfo
getFileInfo = undefined
-- | Modifies the file information in a file information record. Does
-- not affect tag structures linked via 'tagHashes'.
putFileInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The first block of the file information record to be modified.
-> FileInfo -- ^ The file information to place in the record.
-> m ()
putFileInfo = undefined
-- | Creates a new file information record. Won't link the file to the
-- file table, nor affect tag structures linked via 'tagHashes'.
createFileInfo :: Device m h
=> Filesystem m h
-> FileInfo -- ^ The initial contents of the file information record.
-> m Address -- ^ A pointer to the first block of the newly created record.
createFileInfo = undefined
-- | Frees the blocks used by a file information record. Does not free
-- blocks used by the file itself.
--
-- Note: 'removeFileInfo' won't automatically unlink the file from the
-- file table nor any tags linked to it. These operations should both
-- be done before calling 'removeFileInfo'.
removeFileInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The address of the first block
-- in the file information record to
-- be removed.
-> m ()
removeFileInfo = undefined
-- | Identifies tags with a SHA-256 hash of their data.
data TagHash = TagHash !Word64 !Word64 !Word64 !Word64
deriving Eq
-- | Looks for a tag information record by its hash.
lookupByTagHash :: Device m h
=> Filesystem m h
-> TagHash -- ^ The hash of the tag to look up.
-> m (Maybe Address) -- ^ 'Just' the address of the first block of the tag
-- information record if found, or 'Nothing' if not found.
lookupByTagHash = undefined
-- | Links a tag information record to the tag table.
addToTagTable :: Device m h
=> Filesystem m h
-> (TagHash, Address) -- ^ The hash of the tag in question, and the
-- address of its information record.
-> m ()
addToTagTable = undefined
-- | Removes a tag information record from the tag table.
--
-- Note: There is really no good reason to do this, as tags are
-- usually managed automatically.
removeFromTagTable :: Device m h
=> Filesystem m h
-> TagHash
-> m ()
removeFromTagTable = undefined
-- | Describes a tag information record.
data TagInfo = TagInfo { tagHash :: TagHash -- ^ The hash of the 'tagData'.
, tagData :: BS.ByteString -- ^ The data (contents) of the tag. The length/contents
-- of a tag's data are not restricted. Note: This is a strict
-- ByteString, unlike the lazy ByteStrings that are often
-- used throughout this package.
, fileIDs :: [FileID] -- ^ The IDs of the files attached to the tag.
}
-- | Reads a 'TagInfo' structure from a tag information record on
-- disk.
getTagInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The first block of the tag information record.
-> m TagInfo
getTagInfo = undefined
-- | Writes a 'TagInfo' structure to a tag information record on disk.
putTagInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The first block of the tag information record to be modified.
-> TagInfo -- ^ The tag information to place in the record.
-> m ()
putTagInfo = undefined
-- | Creates a new tag information record. Won't link the tag to the
-- tag table, nor affect files linked via 'fileIDs'.
--
-- Note: A tag information record is fairly useless if not linked.
createTagInfo :: Device m h
=> Filesystem m h
-> TagInfo -- ^ The initial contents of the tag information record.
-> m Address -- ^ A pointer to the first block of the newly created record.
createTagInfo = undefined
-- | Frees the blocks used by a tag information record. This function
-- is mostly for internal use, as tag information records are usually
-- managed automatically.
--
-- Note: 'removeTagInfo' won't automatically unlink the tag from the
-- tag table nor any files linked to it. These operations should both
-- be done before calling 'removeTagInfo'.
removeTagInfo :: Device m h
=> Filesystem m h
-> Address -- ^ The address of the first block in
-- the tag information record to be
-- removed.
-> m ()
removeTagInfo = undefined
| devyn/TPFS | System/TPFS/Objects.hs | bsd-3-clause | 10,148 | 0 | 10 | 3,527 | 991 | 582 | 409 | 133 | 1 |
module Control.Distributed.Process.Internal.Containers where
class (Eq k, Functor m) => Map m k | m -> k where
empty :: m a
member :: k -> m a -> Bool
insert :: k -> a -> m a -> m a
delete :: k -> m a -> m a
lookup :: k -> m a -> a
filter :: (a -> Bool) -> m a -> m a
filterWithKey :: (k -> a -> Bool) -> m a -> m a
| haskell-distributed/distributed-process-platform | src/Control/Distributed/Process/Platform/Internal/Containers.hs | bsd-3-clause | 375 | 0 | 10 | 140 | 179 | 92 | 87 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
module ObjectSpec where
import Control.Applicative hiding (empty)
import Control.Exception.Base
import Control.Lens
import Data.Dynamic
import Data.Functor.Compose
import GHC.Real (Ratio((:%)), (%))
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck
import Test.QuickCheck.Function as Fun
import Data.Object.Dynamic
import Data.Object.Dynamic.Examples.PointParticle
import Data.Object.Dynamic.Presets
import Data.Object.Dynamic.Type
newtype Particle = Particle (Object Precise)
deriving (Typeable, Objective)
instance UseReal Particle where
type UnderlyingReal Particle = Rational
instance Eq Particle where
x == y = x ^? kineticEnergy == y ^? kineticEnergy
stackOverflowException :: Selector AsyncException
stackOverflowException StackOverflow = True
stackOverflowException _ = False
spec :: Spec
spec = do
describe "Point particle library" $ do
prop "calculates the energy from mass and velocity" $ \m v ->
(fromMassVelocity m v :: Particle) ^? kineticEnergy /= Nothing
prop "calculates the energy from mass and momentum" $ \m v ->
(fromMassMomentum m v :: Particle) ^? kineticEnergy /= Nothing
it "avoids infinite loop even if neither velocity nor momentum is known" $
(empty & insert Mass 42 :: Particle) ^? kineticEnergy == Nothing
prop "reproduces the particle from mass and velocity" $ \m v ->
(m > 0) ==>
let p0 :: Particle
mp1,mp2 :: Maybe Particle
p0 = fromMassVelocity m v
mp1 = fromMassMomentum <$> p0^?mass <*> p0^?momentum
mp2 = (\p1 -> fromMassVelocity <$> p1^?mass <*> p1^?velocity) =<< mp1
in Just p0 == mp2
describe "Objects, as Traversal," $ do
prop "satisfies the first law : t pure ≡ pure" $ \m v ->
let p = fromMassVelocity m v in
mass pure p == (pure p :: Maybe Particle) &&
velocity pure p == (pure p :: Either () Particle) &&
momentum pure p == (pure p :: [Particle]) &&
kineticEnergy pure p == (pure p :: ([Particle], Particle))
prop "satisfies the second law : fmap (t f) . t g ≡ getCompose . t (Compose . fmap f . g)" $
\f' g' m v ->
let f :: Rational -> Maybe Rational
g :: Rational -> [Rational]
f = fmap toRatio . Fun.apply f' . fromRatio
g = fmap toRatio . Fun.apply g' . fromRatio
fromRatio :: Rational -> (Integer, Integer)
fromRatio (x:%y) = (x,y)
toRatio :: (Integer, Integer) -> Rational
toRatio (x,y) = x % (if y == 0 then 1 else y)
p :: Particle
p = fromMassVelocity m v
in
(fmap (mass f) . (mass g)) p ==
(getCompose . mass (Compose . fmap f . g) $ p) &&
(fmap (kineticEnergy f) . (kineticEnergy g)) p ==
(getCompose . kineticEnergy (Compose . fmap f . g) $ p)
prop "satisfies the second law for vector members, too" $
\f' g' m v ->
let f :: Vec Rational -> Maybe (Vec Rational)
g :: Vec Rational -> [Vec Rational]
f = fmap toRatio . Fun.apply f' . fromRatio
g = fmap toRatio . Fun.apply g' . fromRatio
fromRatio :: Vec Rational -> ((Integer, Integer), (Integer, Integer))
fromRatio (Vec (ax:%ay) (bx:%by)) = ((ax,ay), (bx,by))
toRatio :: ((Integer, Integer), (Integer, Integer)) -> Vec Rational
toRatio ((ax,ay), (bx,by)) = Vec (mk ax ay) (mk bx by)
mk x y = x % (if y == 0 then 1 else y)
p :: Particle
p = fromMassVelocity m v
in
(fmap (velocity f) . (velocity g)) p ==
(getCompose . velocity (Compose . fmap f . g) $ p) &&
(fmap (momentum f) . (momentum g)) p ==
(getCompose . momentum (Compose . fmap f . g) $ p)
| nushio3/dynamic-object | test/ObjectSpec.hs | bsd-3-clause | 4,081 | 0 | 40 | 1,129 | 1,345 | 715 | 630 | 88 | 3 |
--------------------------------------------------------------------------------
-- |
-- Module : TicTacToe
-- Copyright : (c) 2008-2010 Galois, Inc.
-- License : BSD3
--
-- Maintainer : John Launchbury <john@galois.com>
-- Stability :
-- Portability : concurrency
--
-- A test file for the Orc EDSL
--
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- Use the correspondence between a magic square and a tictactoe board
-- 2 9 4
-- 7 5 3
-- 6 1 8
-- A win is any three numbers that add up to 15.
module TicTacToe(
module Orc
, Game(..)
, board
, example
, win
) where
import Orc
import Data.List
-----------------------
data Game = Game {me,you :: [Int]} deriving Show
board :: [[Int]]
board = [[2, 9, 4],
[7, 5, 3],
[6, 1, 8]]
showboard = silent $ liftIO $ putStr $ grid $
map (map show) board
example = Game {me = [7,8], you = [9,3]}
switch (Game xs ys) = Game ys xs
initial = Game [] []
showGame :: Game -> Orc ()
showGame (Game xs ys)
= putStrLine $ grid $ map (map (position xs ys)) board
position xs os n
| n `elem` xs = "X"
| n `elem` os = "O"
| True = " "
grid :: [[String]] -> String
grid [as,bs,cs] = horiz ++ row as ++
horiz ++ row bs ++
horiz ++ row cs ++
horiz ++ "\n"
where horiz = "+---+---+---+\n"
row [a,b,c] = "| "++a++" | "++b++" | "++c++" |\n"
getMove :: Game -> IO Game
getMove g@(Game xs ys) = do
putStr "What's your play? "
y <- fmap read getLine
if clash (xs++ys) y
then putStr "That position is taken...\n" >> getMove g
else return (Game xs (y:ys))
`catch` \_ -> do
putStr "I didn't understand that. Enter 1..9: "
getMove g
----------------------
win :: [Int] -> Orc ()
win xs = do
ys <- powerset xs
guard (length ys == 3)
guard (sum ys == 15)
powerset :: [a] -> Orc [a]
powerset xs = filterM (const $ list [False,True]) xs
clash xs x = x `elem` xs
nextMove :: Game -> Orc Game
nextMove g@(Game xs ys) = showGame g
>> (do
n <- list [1..9]
guard $ not $ clash (xs++ys) n
(win (n:xs) >> return (Game (n:xs) ys))
-- <|> (win (n:ys) >> return (Game (n:xs) ys))
) <?> (do
n <- list [1..9]
guard $ not $ clash (xs++ys) n
return (Game (n:xs) ys))
------------
myTurn :: Game -> Orc ()
myTurn g = do
(Game xs ys) <- cut $ nextMove g
(win xs >> putStrLine "I win!!")
<?> (guard (length (xs++ys) == 9) >> putStrLine "It's a draw.")
<?> yourTurn (Game xs ys)
yourTurn :: Game -> Orc ()
yourTurn g = do
showGame g
(Game xs ys) <- liftIO (getMove g)
(win ys >> putStrLine "You win...")
<?> (guard (length (xs++ys) == 9) >> putStrLine "It's a draw.")
<?> myTurn (Game xs ys)
| GaloisInc/orc | src/Examples/tictactoe.hs | bsd-3-clause | 2,749 | 0 | 16 | 749 | 1,113 | 582 | 531 | 74 | 2 |
-- |
-- Module : Main
-- Copyright : (c) 2018 Harendra Kumar
--
-- License : BSD3
-- Maintainer : streamly@composewell.com
import Control.DeepSeq (NFData)
import Data.Functor.Identity (Identity, runIdentity)
import System.Random (randomRIO)
import qualified NestedOps as Ops
import Streamly
import Gauge
benchIO :: (NFData b) => String -> (Int -> IO b) -> Benchmark
benchIO name f = bench name $ nfIO $ randomRIO (1,1) >>= f
_benchId :: (NFData b) => String -> (Int -> Identity b) -> Benchmark
_benchId name f = bench name $ nf (\g -> runIdentity (g 1)) f
main :: IO ()
main =
-- TBD Study scaling with 10, 100, 1000 loop iterations
defaultMain
[ bgroup "serially"
[ benchIO "toNullAp" $ Ops.toNullAp serially
, benchIO "toNull" $ Ops.toNull serially
, benchIO "toNull3" $ Ops.toNull3 serially
, benchIO "toList" $ Ops.toList serially
-- , benchIO "toListSome" $ Ops.toListSome serially
, benchIO "filterAllOut" $ Ops.filterAllOut serially
, benchIO "filterAllIn" $ Ops.filterAllIn serially
, benchIO "filterSome" $ Ops.filterSome serially
, benchIO "breakAfterSome" $ Ops.breakAfterSome serially
]
, bgroup "wSerially"
[ benchIO "toNullAp" $ Ops.toNullAp wSerially
, benchIO "toNull" $ Ops.toNull wSerially
, benchIO "toNull3" $ Ops.toNull3 wSerially
, benchIO "toList" $ Ops.toList wSerially
-- , benchIO "toListSome" $ Ops.toListSome wSerially
, benchIO "filterAllOut" $ Ops.filterAllOut wSerially
, benchIO "filterAllIn" $ Ops.filterAllIn wSerially
, benchIO "filterSome" $ Ops.filterSome wSerially
, benchIO "breakAfterSome" $ Ops.breakAfterSome wSerially
]
, bgroup "aheadly"
[ benchIO "toNullAp" $ Ops.toNullAp aheadly
, benchIO "toNull" $ Ops.toNull aheadly
, benchIO "toNull3" $ Ops.toNull3 aheadly
, benchIO "toList" $ Ops.toList aheadly
-- , benchIO "toListSome" $ Ops.toListSome aheadly
, benchIO "filterAllOut" $ Ops.filterAllOut aheadly
, benchIO "filterAllIn" $ Ops.filterAllIn aheadly
, benchIO "filterSome" $ Ops.filterSome aheadly
, benchIO "breakAfterSome" $ Ops.breakAfterSome aheadly
]
, bgroup "asyncly"
[ benchIO "toNullAp" $ Ops.toNullAp asyncly
, benchIO "toNull" $ Ops.toNull asyncly
, benchIO "toNull3" $ Ops.toNull3 asyncly
, benchIO "toList" $ Ops.toList asyncly
-- , benchIO "toListSome" $ Ops.toListSome asyncly
, benchIO "filterAllOut" $ Ops.filterAllOut asyncly
, benchIO "filterAllIn" $ Ops.filterAllIn asyncly
, benchIO "filterSome" $ Ops.filterSome asyncly
, benchIO "breakAfterSome" $ Ops.breakAfterSome asyncly
]
, bgroup "wAsyncly"
[ benchIO "toNullAp" $ Ops.toNullAp wAsyncly
, benchIO "toNull" $ Ops.toNull wAsyncly
, benchIO "toNull3" $ Ops.toNull3 wAsyncly
, benchIO "toList" $ Ops.toList wAsyncly
-- , benchIO "toListSome" $ Ops.toListSome wAsyncly
, benchIO "filterAllOut" $ Ops.filterAllOut wAsyncly
, benchIO "filterAllIn" $ Ops.filterAllIn wAsyncly
, benchIO "filterSome" $ Ops.filterSome wAsyncly
, benchIO "breakAfterSome" $ Ops.breakAfterSome wAsyncly
]
, bgroup "parallely"
[ benchIO "toNullAp" $ Ops.toNullAp parallely
, benchIO "toNull" $ Ops.toNull parallely
, benchIO "toNull3" $ Ops.toNull3 parallely
, benchIO "toList" $ Ops.toList parallely
--, benchIO "toListSome" $ Ops.toListSome parallely
, benchIO "filterAllOut" $ Ops.filterAllOut parallely
, benchIO "filterAllIn" $ Ops.filterAllIn parallely
, benchIO "filterSome" $ Ops.filterSome parallely
, benchIO "breakAfterSome" $ Ops.breakAfterSome parallely
]
]
| harendra-kumar/asyncly | benchmark/Nested.hs | bsd-3-clause | 4,283 | 0 | 11 | 1,378 | 983 | 482 | 501 | 67 | 1 |
module Network.HTTP.AuthProxy.Claim
( CS.getKey, CS.Key
, headerName, assert, check, Options(..), defaultOptions, checkMiddleware
) where
import Control.Monad
import qualified Data.ByteString as BS
import Data.Maybe
import Network.HTTP.Types
import Network.Wai
import Text.Email.Validate
import qualified Web.ClientSession as CS
headerName :: HeaderName
headerName = "X-Authenticated-Email"
assert :: CS.Key -> EmailAddress -> IO BS.ByteString
assert key = CS.encryptIO key . toByteString
check :: CS.Key -> BS.ByteString -> Maybe EmailAddress
check key = CS.decrypt key >=> emailAddress
data Options =
Options
{ requireAuthentication :: Bool
, requireEmailDomain :: Maybe BS.ByteString
} deriving (Eq, Ord, Show)
defaultOptions :: Options
defaultOptions =
Options
{ requireAuthentication = True
, requireEmailDomain = Nothing
}
type ListElemView a = (Maybe a, ([a] -> [a], [a]))
findView :: (a -> Bool) -> [a] -> ListElemView a
findView p = f id
where
f prevs = \case
[] -> (Nothing, (prevs, []))
(x : xs)
| p x -> (Just x, (prevs, xs))
| otherwise -> f (prevs . (x :)) xs
setElem :: Maybe a -> ListElemView a -> ListElemView a
setElem x (_, ctx) = (x, ctx)
getElem :: ListElemView a -> Maybe a
getElem = fst
reassemble :: ListElemView a -> [a]
reassemble (x, (prevs, xs)) = prevs $ maybeToList x ++ xs
checkMiddleware :: CS.Key -> Options -> Middleware
checkMiddleware key Options{..} app req respond =
if allow
then app req' respond
else respond $ responseLBS status303 [(hLocation, "/auth/login")] ""
where
encHeaderView = findView ((== headerName) . fst) $ requestHeaders req
encHeader = fmap snd $ getElem encHeaderView
claimEmail = encHeader >>= check key
acceptedClaim = case (requireEmailDomain, claimEmail) of
(Nothing, _) -> claimEmail
(Just expectDomain, Just actualEmail)
| domainPart actualEmail == expectDomain -> claimEmail
_ -> Nothing
decHeader = fmap ((headerName,) . toByteString) acceptedClaim
headers' = reassemble $ setElem decHeader encHeaderView
req' = req { requestHeaders = headers' }
allow = not requireAuthentication || isJust acceptedClaim
| ktvoelker/auth-proxy | auth-claim/Network/HTTP/AuthProxy/Claim.hs | bsd-3-clause | 2,206 | 0 | 14 | 443 | 764 | 419 | 345 | -1 | -1 |
module Neural (NeuralNetwork, buildNeuralNetwork, buildRandomNeuralNetwork, calculateHighestOutputIndex, calculateOutputValues, mutate) where
import Data.List
import Data.List.Split
import Data.Ord
import System.Random
type Value = Double
data NeuralNetwork = NeuralNetwork Value [Int] [[[Value]]] deriving Show -- weight values grouped by vertice group, terminus, origin.
buildStaticNeuralNetwork :: Value -> [Int] -> NeuralNetwork
buildStaticNeuralNetwork mutationRate nodeLayerSizes = buildNeuralNetwork mutationRate nodeLayerSizes []
buildNeuralNetwork :: Value -> [Int] -> [Value] -> NeuralNetwork
buildNeuralNetwork mutationRate nodeLayerSizes weights
| numWeights < numVertices = buildNeuralNetwork mutationRate nodeLayerSizes padWeights
| otherwise = NeuralNetwork mutationRate nodeLayerSizes verticeGroups
where verticeGroups = map (\(vtg,numOrigins) -> chunksOf numOrigins vtg) (zip groupWeightsByVerticeGroup originLayerSizes)
groupWeightsByVerticeGroup = splitPlaces numVerticesByLayer weights
numWeights = length weights
numVertices = sum numVerticesByLayer
numVerticesByLayer = zipWith (*) nodeLayerSizes terminusLayerSizes
terminusLayerSizes = drop 1 nodeLayerSizes
originLayerSizes = init nodeLayerSizes
padWeights = weights ++ (take (numVertices - numWeights) $ repeat 0.0)
buildRandomNeuralNetwork :: (Value,Value) -> [Int] -> IO NeuralNetwork
buildRandomNeuralNetwork verticeRange nodeLayerSizes = do
g <- newStdGen
let (mutationRate,g') = randomR (1.0,1.0) g
randomVerticeValues = take numVertices $ randomRs verticeRange g'
where numVertices = sum $ zipWith (*) nodeLayerSizes (drop 1 nodeLayerSizes)
return $ buildNeuralNetwork mutationRate nodeLayerSizes randomVerticeValues
calculateHighestOutputIndex :: [Value] -> NeuralNetwork -> Int
calculateHighestOutputIndex inputValues neural = maxIndex $ calculateOutputValues inputValues neural
where maxIndex xs = snd $ maxOutputAndIndex xs
maxOutputAndIndex xs = maximumBy (comparing fst) (zip xs [0..])
calculateOutputValues :: [Value] -> NeuralNetwork -> [Value]
calculateOutputValues inputValues (NeuralNetwork _ _ verticeGroups) = foldl' calculateLayerValues inputValues verticeGroups
calculateLayerValues :: [Value] -> [[Value]] -> [Value]
calculateLayerValues previousLayer verticeGroup = map (calculateNode previousLayer) verticeGroup
where calculateNode previousLayer weights = squash $ sum $ zipWith (*) previousLayer weights
where squash x = 1 / (1 + ((exp 1) ** (negate x)))
mutate :: (Value,Value) -> NeuralNetwork -> IO NeuralNetwork
mutate verticeRange neural@(NeuralNetwork mutationRate nodeLayerSizes verticeGroups) = do
g <- newStdGen
let (chance,g') = randomR (0.0,1.0) g
if chance <= mutationRate then mutateVertices g' else return neural
where mutateVertices g = do
return $ buildNeuralNetwork mutationRate nodeLayerSizes newValues
where values = concat $ concat verticeGroups
(verticeToReplace,g') = randomR (0,(length values) - 1) g
(left,(_:right)) = splitAt verticeToReplace values
(newValue,_) = randomR verticeRange g'
newValues = left ++ [newValue] ++ right
| KenseiMaedhros/neural-net-visual-test | src/Neural.hs | bsd-3-clause | 3,363 | 0 | 15 | 660 | 908 | 477 | 431 | 50 | 2 |
module Graphics.Gnuplot.Execute where
import System.Exit (ExitCode, )
import System.Cmd (system, )
import Graphics.Gnuplot.Utility
(quote, semiColonConcat, )
simple ::
[String] {-^ The lines of the gnuplot script to be piped into gnuplot -}
-> [String] {-^ Options for gnuplot -}
-> IO ExitCode
simple program options =
let cmd =
"sh -c 'echo " ++ quote (semiColonConcat program) ++
" | gnuplot " ++ unwords options ++ "'"
in do --putStrLn cmd
system cmd
{-
escape :: String -> String
escape ('\"':xs) = '\\' : '\"' : escape xs
escape (x:xs) = x : escape xs
escape [] = []
-}
| wavewave/gnuplot | execute/shell/Graphics/Gnuplot/Execute.hs | bsd-3-clause | 642 | 0 | 15 | 166 | 125 | 70 | 55 | 15 | 1 |
{-# LANGUAGE
ScopedTypeVariables, TupleSections, LambdaCase,
RankNTypes, ScopedTypeVariables #-}
module CBSD.Messages.SocketComm where
import CBSD.Messages.Types
import Network
import System.IO
import Control.Exception
import Control.Concurrent
import Data.Function
import Data.Aeson
import Text.Printf
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as CB
-- | Try grabbing ports until there's a unused one.
listenOnUnusedPort :: IO (PortNumber, Socket)
listenOnUnusedPort = ($ 2000) $ fix $ \go port ->
catch ((port,) <$> listenOn (PortNumber port))
(\(_ :: IOException) -> go (port + 1))
-- Comm primitives (Note the StripEmptyContent wrapping!)
------------------------------------------------------------
-- | Try reading until the handle becomes non-empty
getMessage :: forall a. FromJSON a => Handle -> IO a
getMessage handle = do
line <- B.hGetLine handle
-- printf "getMessage: %s\n" (show line)
maybe
(error $ printf "received invalid message: %s\n" (show line))
(pure . unStripEmptyContent)
(decodeStrict line)
putMessage :: ToJSON a => Handle -> a -> IO ()
putMessage handle a = do
let line = encode (StripEmptyContent a)
-- printf "putMessage: %s\n" (show line)
CB.hPutStrLn handle $ LB.toStrict line
-- | Send request, then get response
request :: (ToJSON a, FromJSON b) => Handle -> a -> IO b
request handle a = do
putMessage handle a
getMessage handle
-- | Get request, make response and send it
-- If the request isn't valid, try reading again
respond ::
(FromJSON a, ToJSON a, ToJSON b) => Handle -> (a -> IO (Maybe b)) -> IO ()
respond handle makeResponse = do
req <- getMessage handle
maybe
(error $ printf "received invalid request: %s\n" (show $ encode req))
(putMessage handle)
=<< makeResponse req
registerAtCenter ::
IO PortNumber -- ^ Port of center
-> String -- ^ Component name
-> [GameType]
-> ComponentType
-> IO (
PortNumber, Handle, -- ^ Input port and handle
PortNumber, Handle) -- ^ Output port and handle
registerAtCenter getCenterOutPort name gameTypes componentType = do
hSetBuffering stdout LineBuffering
-- Get center port number
printf "getting port number of center\n"
centerOutPort <- getCenterOutPort
printf "acquired port number: %s\n" (show centerOutPort)
-- Connect to center
hCenterOut <- fix $ \again -> do
printf "trying to connect to center at port %s\n" (show centerOutPort)
catch (connectTo "localhost" (PortNumber centerOutPort))
(\(_ :: IOException) -> do
printf "failed to connect\n"
threadDelay 1000000
again)
hSetBuffering hCenterOut LineBuffering
printf "connected\n"
-- Accept center
(centerInPort, centerInSock) <- listenOnUnusedPort
printf "listening for center on port %s\n" (show centerInPort)
putMessage hCenterOut (Req_CONNECT $
ReqConnect gameTypes name componentType (fromIntegral centerInPort))
printf "CONNECT request sent to center\n"
resConnect <- getMessage hCenterOut
case resConnect of
Res_CONNECT (ResConnect res _) -> case res of
OK -> pure ()
FAILURE -> error "received FAILURE code in CONNECT response from center\n"
other -> error $ printf "expected CONNECT response, got %s\n" (show $ encode other)
printf "CONNECT response OK\n"
(hCenterIn, _, _) <- accept centerInSock
hSetBuffering hCenterIn LineBuffering
printf "accepted center\n"
pure (centerInPort, hCenterIn, centerOutPort, hCenterOut)
| AndrasKovacs/elte-cbsd | src/CBSD/Messages/SocketComm.hs | bsd-3-clause | 3,654 | 0 | 17 | 781 | 895 | 452 | 443 | 80 | 3 |
module WriteATypeSignature where
-- chapter 5 "write a type signature" pg. 151
functionH :: [a] -> a
functionH (x:_)= x
functionC :: (Ord a) => a -> a -> Bool
functionC x y =
if (x > y) then True else False
functionS :: (a, b) -> b
functionS (x, y) = y
| brodyberg/Notes | ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/WriteATypeSignature.hs | mit | 259 | 0 | 7 | 59 | 108 | 62 | 46 | 8 | 2 |
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Main(main) where
import Test.HUnitPlus
import qualified Tests.Control as Control
import qualified Tests.Language as Language
tests = [ Control.tests, Language.tests ]
testsuite = TestSuite { suiteName = "UnitTests", suiteConcurrently = True,
suiteTests = tests, suiteOptions = [] }
main :: IO ()
main = createMain [testsuite]
| emc2/saltlang | test/library/UnitTest.hs | bsd-3-clause | 1,919 | 0 | 7 | 343 | 128 | 91 | 37 | 9 | 1 |
-- |
-- TH.Lib contains lots of useful helper functions for
-- generating and manipulating Template Haskell terms
{-# LANGUAGE CPP #-}
module Language.Haskell.TH.Lib where
-- All of the exports from this module should
-- be "public" functions. The main module TH
-- re-exports them all.
import Language.Haskell.TH.Syntax hiding (Role, InjectivityAnn)
import qualified Language.Haskell.TH.Syntax as TH
import Control.Monad( liftM, liftM2 )
import Data.Word( Word8 )
----------------------------------------------------------
-- * Type synonyms
----------------------------------------------------------
type InfoQ = Q Info
type PatQ = Q Pat
type FieldPatQ = Q FieldPat
type ExpQ = Q Exp
type TExpQ a = Q (TExp a)
type DecQ = Q Dec
type DecsQ = Q [Dec]
type ConQ = Q Con
type TypeQ = Q Type
type TyLitQ = Q TyLit
type CxtQ = Q Cxt
type PredQ = Q Pred
type DerivClauseQ = Q DerivClause
type MatchQ = Q Match
type ClauseQ = Q Clause
type BodyQ = Q Body
type GuardQ = Q Guard
type StmtQ = Q Stmt
type RangeQ = Q Range
type SourceStrictnessQ = Q SourceStrictness
type SourceUnpackednessQ = Q SourceUnpackedness
type BangQ = Q Bang
type BangTypeQ = Q BangType
type VarBangTypeQ = Q VarBangType
type StrictTypeQ = Q StrictType
type VarStrictTypeQ = Q VarStrictType
type FieldExpQ = Q FieldExp
type RuleBndrQ = Q RuleBndr
type TySynEqnQ = Q TySynEqn
type PatSynDirQ = Q PatSynDir
type PatSynArgsQ = Q PatSynArgs
-- must be defined here for DsMeta to find it
type Role = TH.Role
type InjectivityAnn = TH.InjectivityAnn
----------------------------------------------------------
-- * Lowercase pattern syntax functions
----------------------------------------------------------
intPrimL :: Integer -> Lit
intPrimL = IntPrimL
wordPrimL :: Integer -> Lit
wordPrimL = WordPrimL
floatPrimL :: Rational -> Lit
floatPrimL = FloatPrimL
doublePrimL :: Rational -> Lit
doublePrimL = DoublePrimL
integerL :: Integer -> Lit
integerL = IntegerL
charL :: Char -> Lit
charL = CharL
charPrimL :: Char -> Lit
charPrimL = CharPrimL
stringL :: String -> Lit
stringL = StringL
stringPrimL :: [Word8] -> Lit
stringPrimL = StringPrimL
rationalL :: Rational -> Lit
rationalL = RationalL
litP :: Lit -> PatQ
litP l = return (LitP l)
varP :: Name -> PatQ
varP v = return (VarP v)
tupP :: [PatQ] -> PatQ
tupP ps = do { ps1 <- sequence ps; return (TupP ps1)}
unboxedTupP :: [PatQ] -> PatQ
unboxedTupP ps = do { ps1 <- sequence ps; return (UnboxedTupP ps1)}
unboxedSumP :: PatQ -> SumAlt -> SumArity -> PatQ
unboxedSumP p alt arity = do { p1 <- p; return (UnboxedSumP p1 alt arity) }
conP :: Name -> [PatQ] -> PatQ
conP n ps = do ps' <- sequence ps
return (ConP n ps')
infixP :: PatQ -> Name -> PatQ -> PatQ
infixP p1 n p2 = do p1' <- p1
p2' <- p2
return (InfixP p1' n p2')
uInfixP :: PatQ -> Name -> PatQ -> PatQ
uInfixP p1 n p2 = do p1' <- p1
p2' <- p2
return (UInfixP p1' n p2')
parensP :: PatQ -> PatQ
parensP p = do p' <- p
return (ParensP p')
tildeP :: PatQ -> PatQ
tildeP p = do p' <- p
return (TildeP p')
bangP :: PatQ -> PatQ
bangP p = do p' <- p
return (BangP p')
asP :: Name -> PatQ -> PatQ
asP n p = do p' <- p
return (AsP n p')
wildP :: PatQ
wildP = return WildP
recP :: Name -> [FieldPatQ] -> PatQ
recP n fps = do fps' <- sequence fps
return (RecP n fps')
listP :: [PatQ] -> PatQ
listP ps = do ps' <- sequence ps
return (ListP ps')
sigP :: PatQ -> TypeQ -> PatQ
sigP p t = do p' <- p
t' <- t
return (SigP p' t')
viewP :: ExpQ -> PatQ -> PatQ
viewP e p = do e' <- e
p' <- p
return (ViewP e' p')
fieldPat :: Name -> PatQ -> FieldPatQ
fieldPat n p = do p' <- p
return (n, p')
-------------------------------------------------------------------------------
-- * Stmt
bindS :: PatQ -> ExpQ -> StmtQ
bindS p e = liftM2 BindS p e
letS :: [DecQ] -> StmtQ
letS ds = do { ds1 <- sequence ds; return (LetS ds1) }
noBindS :: ExpQ -> StmtQ
noBindS e = do { e1 <- e; return (NoBindS e1) }
parS :: [[StmtQ]] -> StmtQ
parS sss = do { sss1 <- mapM sequence sss; return (ParS sss1) }
-------------------------------------------------------------------------------
-- * Range
fromR :: ExpQ -> RangeQ
fromR x = do { a <- x; return (FromR a) }
fromThenR :: ExpQ -> ExpQ -> RangeQ
fromThenR x y = do { a <- x; b <- y; return (FromThenR a b) }
fromToR :: ExpQ -> ExpQ -> RangeQ
fromToR x y = do { a <- x; b <- y; return (FromToR a b) }
fromThenToR :: ExpQ -> ExpQ -> ExpQ -> RangeQ
fromThenToR x y z = do { a <- x; b <- y; c <- z;
return (FromThenToR a b c) }
-------------------------------------------------------------------------------
-- * Body
normalB :: ExpQ -> BodyQ
normalB e = do { e1 <- e; return (NormalB e1) }
guardedB :: [Q (Guard,Exp)] -> BodyQ
guardedB ges = do { ges' <- sequence ges; return (GuardedB ges') }
-------------------------------------------------------------------------------
-- * Guard
normalG :: ExpQ -> GuardQ
normalG e = do { e1 <- e; return (NormalG e1) }
normalGE :: ExpQ -> ExpQ -> Q (Guard, Exp)
normalGE g e = do { g1 <- g; e1 <- e; return (NormalG g1, e1) }
patG :: [StmtQ] -> GuardQ
patG ss = do { ss' <- sequence ss; return (PatG ss') }
patGE :: [StmtQ] -> ExpQ -> Q (Guard, Exp)
patGE ss e = do { ss' <- sequence ss;
e' <- e;
return (PatG ss', e') }
-------------------------------------------------------------------------------
-- * Match and Clause
-- | Use with 'caseE'
match :: PatQ -> BodyQ -> [DecQ] -> MatchQ
match p rhs ds = do { p' <- p;
r' <- rhs;
ds' <- sequence ds;
return (Match p' r' ds') }
-- | Use with 'funD'
clause :: [PatQ] -> BodyQ -> [DecQ] -> ClauseQ
clause ps r ds = do { ps' <- sequence ps;
r' <- r;
ds' <- sequence ds;
return (Clause ps' r' ds') }
---------------------------------------------------------------------------
-- * Exp
-- | Dynamically binding a variable (unhygenic)
dyn :: String -> ExpQ
dyn s = return (VarE (mkName s))
varE :: Name -> ExpQ
varE s = return (VarE s)
conE :: Name -> ExpQ
conE s = return (ConE s)
litE :: Lit -> ExpQ
litE c = return (LitE c)
appE :: ExpQ -> ExpQ -> ExpQ
appE x y = do { a <- x; b <- y; return (AppE a b)}
appTypeE :: ExpQ -> TypeQ -> ExpQ
appTypeE x t = do { a <- x; s <- t; return (AppTypeE a s) }
parensE :: ExpQ -> ExpQ
parensE x = do { x' <- x; return (ParensE x') }
uInfixE :: ExpQ -> ExpQ -> ExpQ -> ExpQ
uInfixE x s y = do { x' <- x; s' <- s; y' <- y;
return (UInfixE x' s' y') }
infixE :: Maybe ExpQ -> ExpQ -> Maybe ExpQ -> ExpQ
infixE (Just x) s (Just y) = do { a <- x; s' <- s; b <- y;
return (InfixE (Just a) s' (Just b))}
infixE Nothing s (Just y) = do { s' <- s; b <- y;
return (InfixE Nothing s' (Just b))}
infixE (Just x) s Nothing = do { a <- x; s' <- s;
return (InfixE (Just a) s' Nothing)}
infixE Nothing s Nothing = do { s' <- s; return (InfixE Nothing s' Nothing) }
infixApp :: ExpQ -> ExpQ -> ExpQ -> ExpQ
infixApp x y z = infixE (Just x) y (Just z)
sectionL :: ExpQ -> ExpQ -> ExpQ
sectionL x y = infixE (Just x) y Nothing
sectionR :: ExpQ -> ExpQ -> ExpQ
sectionR x y = infixE Nothing x (Just y)
lamE :: [PatQ] -> ExpQ -> ExpQ
lamE ps e = do ps' <- sequence ps
e' <- e
return (LamE ps' e')
-- | Single-arg lambda
lam1E :: PatQ -> ExpQ -> ExpQ
lam1E p e = lamE [p] e
lamCaseE :: [MatchQ] -> ExpQ
lamCaseE ms = sequence ms >>= return . LamCaseE
tupE :: [ExpQ] -> ExpQ
tupE es = do { es1 <- sequence es; return (TupE es1)}
unboxedTupE :: [ExpQ] -> ExpQ
unboxedTupE es = do { es1 <- sequence es; return (UnboxedTupE es1)}
unboxedSumE :: ExpQ -> SumAlt -> SumArity -> ExpQ
unboxedSumE e alt arity = do { e1 <- e; return (UnboxedSumE e1 alt arity) }
condE :: ExpQ -> ExpQ -> ExpQ -> ExpQ
condE x y z = do { a <- x; b <- y; c <- z; return (CondE a b c)}
multiIfE :: [Q (Guard, Exp)] -> ExpQ
multiIfE alts = sequence alts >>= return . MultiIfE
letE :: [DecQ] -> ExpQ -> ExpQ
letE ds e = do { ds2 <- sequence ds; e2 <- e; return (LetE ds2 e2) }
caseE :: ExpQ -> [MatchQ] -> ExpQ
caseE e ms = do { e1 <- e; ms1 <- sequence ms; return (CaseE e1 ms1) }
doE :: [StmtQ] -> ExpQ
doE ss = do { ss1 <- sequence ss; return (DoE ss1) }
compE :: [StmtQ] -> ExpQ
compE ss = do { ss1 <- sequence ss; return (CompE ss1) }
arithSeqE :: RangeQ -> ExpQ
arithSeqE r = do { r' <- r; return (ArithSeqE r') }
listE :: [ExpQ] -> ExpQ
listE es = do { es1 <- sequence es; return (ListE es1) }
sigE :: ExpQ -> TypeQ -> ExpQ
sigE e t = do { e1 <- e; t1 <- t; return (SigE e1 t1) }
recConE :: Name -> [Q (Name,Exp)] -> ExpQ
recConE c fs = do { flds <- sequence fs; return (RecConE c flds) }
recUpdE :: ExpQ -> [Q (Name,Exp)] -> ExpQ
recUpdE e fs = do { e1 <- e; flds <- sequence fs; return (RecUpdE e1 flds) }
stringE :: String -> ExpQ
stringE = litE . stringL
fieldExp :: Name -> ExpQ -> Q (Name, Exp)
fieldExp s e = do { e' <- e; return (s,e') }
-- | @staticE x = [| static x |]@
staticE :: ExpQ -> ExpQ
staticE = fmap StaticE
unboundVarE :: Name -> ExpQ
unboundVarE s = return (UnboundVarE s)
-- ** 'arithSeqE' Shortcuts
fromE :: ExpQ -> ExpQ
fromE x = do { a <- x; return (ArithSeqE (FromR a)) }
fromThenE :: ExpQ -> ExpQ -> ExpQ
fromThenE x y = do { a <- x; b <- y; return (ArithSeqE (FromThenR a b)) }
fromToE :: ExpQ -> ExpQ -> ExpQ
fromToE x y = do { a <- x; b <- y; return (ArithSeqE (FromToR a b)) }
fromThenToE :: ExpQ -> ExpQ -> ExpQ -> ExpQ
fromThenToE x y z = do { a <- x; b <- y; c <- z;
return (ArithSeqE (FromThenToR a b c)) }
-------------------------------------------------------------------------------
-- * Dec
valD :: PatQ -> BodyQ -> [DecQ] -> DecQ
valD p b ds =
do { p' <- p
; ds' <- sequence ds
; b' <- b
; return (ValD p' b' ds')
}
funD :: Name -> [ClauseQ] -> DecQ
funD nm cs =
do { cs1 <- sequence cs
; return (FunD nm cs1)
}
tySynD :: Name -> [TyVarBndr] -> TypeQ -> DecQ
tySynD tc tvs rhs = do { rhs1 <- rhs; return (TySynD tc tvs rhs1) }
dataD :: CxtQ -> Name -> [TyVarBndr] -> Maybe Kind -> [ConQ] -> [DerivClauseQ]
-> DecQ
dataD ctxt tc tvs ksig cons derivs =
do
ctxt1 <- ctxt
cons1 <- sequence cons
derivs1 <- sequence derivs
return (DataD ctxt1 tc tvs ksig cons1 derivs1)
newtypeD :: CxtQ -> Name -> [TyVarBndr] -> Maybe Kind -> ConQ -> [DerivClauseQ]
-> DecQ
newtypeD ctxt tc tvs ksig con derivs =
do
ctxt1 <- ctxt
con1 <- con
derivs1 <- sequence derivs
return (NewtypeD ctxt1 tc tvs ksig con1 derivs1)
classD :: CxtQ -> Name -> [TyVarBndr] -> [FunDep] -> [DecQ] -> DecQ
classD ctxt cls tvs fds decs =
do
decs1 <- sequence decs
ctxt1 <- ctxt
return $ ClassD ctxt1 cls tvs fds decs1
instanceD :: CxtQ -> TypeQ -> [DecQ] -> DecQ
instanceD = instanceWithOverlapD Nothing
instanceWithOverlapD :: Maybe Overlap -> CxtQ -> TypeQ -> [DecQ] -> DecQ
instanceWithOverlapD o ctxt ty decs =
do
ctxt1 <- ctxt
decs1 <- sequence decs
ty1 <- ty
return $ InstanceD o ctxt1 ty1 decs1
sigD :: Name -> TypeQ -> DecQ
sigD fun ty = liftM (SigD fun) $ ty
forImpD :: Callconv -> Safety -> String -> Name -> TypeQ -> DecQ
forImpD cc s str n ty
= do ty' <- ty
return $ ForeignD (ImportF cc s str n ty')
infixLD :: Int -> Name -> DecQ
infixLD prec nm = return (InfixD (Fixity prec InfixL) nm)
infixRD :: Int -> Name -> DecQ
infixRD prec nm = return (InfixD (Fixity prec InfixR) nm)
infixND :: Int -> Name -> DecQ
infixND prec nm = return (InfixD (Fixity prec InfixN) nm)
pragInlD :: Name -> Inline -> RuleMatch -> Phases -> DecQ
pragInlD name inline rm phases
= return $ PragmaD $ InlineP name inline rm phases
pragSpecD :: Name -> TypeQ -> Phases -> DecQ
pragSpecD n ty phases
= do
ty1 <- ty
return $ PragmaD $ SpecialiseP n ty1 Nothing phases
pragSpecInlD :: Name -> TypeQ -> Inline -> Phases -> DecQ
pragSpecInlD n ty inline phases
= do
ty1 <- ty
return $ PragmaD $ SpecialiseP n ty1 (Just inline) phases
pragSpecInstD :: TypeQ -> DecQ
pragSpecInstD ty
= do
ty1 <- ty
return $ PragmaD $ SpecialiseInstP ty1
pragRuleD :: String -> [RuleBndrQ] -> ExpQ -> ExpQ -> Phases -> DecQ
pragRuleD n bndrs lhs rhs phases
= do
bndrs1 <- sequence bndrs
lhs1 <- lhs
rhs1 <- rhs
return $ PragmaD $ RuleP n bndrs1 lhs1 rhs1 phases
pragAnnD :: AnnTarget -> ExpQ -> DecQ
pragAnnD target expr
= do
exp1 <- expr
return $ PragmaD $ AnnP target exp1
pragLineD :: Int -> String -> DecQ
pragLineD line file = return $ PragmaD $ LineP line file
dataInstD :: CxtQ -> Name -> [TypeQ] -> Maybe Kind -> [ConQ] -> [DerivClauseQ]
-> DecQ
dataInstD ctxt tc tys ksig cons derivs =
do
ctxt1 <- ctxt
tys1 <- sequence tys
cons1 <- sequence cons
derivs1 <- sequence derivs
return (DataInstD ctxt1 tc tys1 ksig cons1 derivs1)
newtypeInstD :: CxtQ -> Name -> [TypeQ] -> Maybe Kind -> ConQ -> [DerivClauseQ]
-> DecQ
newtypeInstD ctxt tc tys ksig con derivs =
do
ctxt1 <- ctxt
tys1 <- sequence tys
con1 <- con
derivs1 <- sequence derivs
return (NewtypeInstD ctxt1 tc tys1 ksig con1 derivs1)
tySynInstD :: Name -> TySynEqnQ -> DecQ
tySynInstD tc eqn =
do
eqn1 <- eqn
return (TySynInstD tc eqn1)
dataFamilyD :: Name -> [TyVarBndr] -> Maybe Kind -> DecQ
dataFamilyD tc tvs kind
= return $ DataFamilyD tc tvs kind
openTypeFamilyD :: Name -> [TyVarBndr] -> FamilyResultSig
-> Maybe InjectivityAnn -> DecQ
openTypeFamilyD tc tvs res inj
= return $ OpenTypeFamilyD (TypeFamilyHead tc tvs res inj)
closedTypeFamilyD :: Name -> [TyVarBndr] -> FamilyResultSig
-> Maybe InjectivityAnn -> [TySynEqnQ] -> DecQ
closedTypeFamilyD tc tvs result injectivity eqns =
do eqns1 <- sequence eqns
return (ClosedTypeFamilyD (TypeFamilyHead tc tvs result injectivity) eqns1)
-- These were deprecated in GHC 8.0 with a plan to remove them in 8.2. If you
-- remove this check please also:
-- 1. remove deprecated functions
-- 2. remove CPP language extension from top of this module
-- 3. remove the FamFlavour data type from Syntax module
-- 4. make sure that all references to FamFlavour are gone from DsMeta,
-- Convert, TcSplice (follows from 3)
#if __GLASGOW_HASKELL__ >= 802
#error Remove deprecated familyNoKindD, familyKindD, closedTypeFamilyNoKindD and closedTypeFamilyKindD
#endif
{-# DEPRECATED familyNoKindD, familyKindD
"This function will be removed in the next stable release. Use openTypeFamilyD/dataFamilyD instead." #-}
familyNoKindD :: FamFlavour -> Name -> [TyVarBndr] -> DecQ
familyNoKindD flav tc tvs =
case flav of
TypeFam -> return $ OpenTypeFamilyD (TypeFamilyHead tc tvs NoSig Nothing)
DataFam -> return $ DataFamilyD tc tvs Nothing
familyKindD :: FamFlavour -> Name -> [TyVarBndr] -> Kind -> DecQ
familyKindD flav tc tvs k =
case flav of
TypeFam ->
return $ OpenTypeFamilyD (TypeFamilyHead tc tvs (KindSig k) Nothing)
DataFam -> return $ DataFamilyD tc tvs (Just k)
{-# DEPRECATED closedTypeFamilyNoKindD, closedTypeFamilyKindD
"This function will be removed in the next stable release. Use closedTypeFamilyD instead." #-}
closedTypeFamilyNoKindD :: Name -> [TyVarBndr] -> [TySynEqnQ] -> DecQ
closedTypeFamilyNoKindD tc tvs eqns =
do eqns1 <- sequence eqns
return (ClosedTypeFamilyD (TypeFamilyHead tc tvs NoSig Nothing) eqns1)
closedTypeFamilyKindD :: Name -> [TyVarBndr] -> Kind -> [TySynEqnQ] -> DecQ
closedTypeFamilyKindD tc tvs kind eqns =
do eqns1 <- sequence eqns
return (ClosedTypeFamilyD (TypeFamilyHead tc tvs (KindSig kind) Nothing)
eqns1)
roleAnnotD :: Name -> [Role] -> DecQ
roleAnnotD name roles = return $ RoleAnnotD name roles
standaloneDerivD :: CxtQ -> TypeQ -> DecQ
standaloneDerivD = standaloneDerivWithStrategyD Nothing
standaloneDerivWithStrategyD :: Maybe DerivStrategy -> CxtQ -> TypeQ -> DecQ
standaloneDerivWithStrategyD ds ctxtq tyq =
do
ctxt <- ctxtq
ty <- tyq
return $ StandaloneDerivD ds ctxt ty
defaultSigD :: Name -> TypeQ -> DecQ
defaultSigD n tyq =
do
ty <- tyq
return $ DefaultSigD n ty
-- | Pattern synonym declaration
patSynD :: Name -> PatSynArgsQ -> PatSynDirQ -> PatQ -> DecQ
patSynD name args dir pat = do
args' <- args
dir' <- dir
pat' <- pat
return (PatSynD name args' dir' pat')
-- | Pattern synonym type signature
patSynSigD :: Name -> TypeQ -> DecQ
patSynSigD nm ty =
do ty' <- ty
return $ PatSynSigD nm ty'
tySynEqn :: [TypeQ] -> TypeQ -> TySynEqnQ
tySynEqn lhs rhs =
do
lhs1 <- sequence lhs
rhs1 <- rhs
return (TySynEqn lhs1 rhs1)
cxt :: [PredQ] -> CxtQ
cxt = sequence
derivClause :: Maybe DerivStrategy -> [PredQ] -> DerivClauseQ
derivClause ds p = do p' <- cxt p
return $ DerivClause ds p'
normalC :: Name -> [BangTypeQ] -> ConQ
normalC con strtys = liftM (NormalC con) $ sequence strtys
recC :: Name -> [VarBangTypeQ] -> ConQ
recC con varstrtys = liftM (RecC con) $ sequence varstrtys
infixC :: Q (Bang, Type) -> Name -> Q (Bang, Type) -> ConQ
infixC st1 con st2 = do st1' <- st1
st2' <- st2
return $ InfixC st1' con st2'
forallC :: [TyVarBndr] -> CxtQ -> ConQ -> ConQ
forallC ns ctxt con = liftM2 (ForallC ns) ctxt con
gadtC :: [Name] -> [StrictTypeQ] -> TypeQ -> ConQ
gadtC cons strtys ty = liftM2 (GadtC cons) (sequence strtys) ty
recGadtC :: [Name] -> [VarStrictTypeQ] -> TypeQ -> ConQ
recGadtC cons varstrtys ty = liftM2 (RecGadtC cons) (sequence varstrtys) ty
-------------------------------------------------------------------------------
-- * Type
forallT :: [TyVarBndr] -> CxtQ -> TypeQ -> TypeQ
forallT tvars ctxt ty = do
ctxt1 <- ctxt
ty1 <- ty
return $ ForallT tvars ctxt1 ty1
varT :: Name -> TypeQ
varT = return . VarT
conT :: Name -> TypeQ
conT = return . ConT
infixT :: TypeQ -> Name -> TypeQ -> TypeQ
infixT t1 n t2 = do t1' <- t1
t2' <- t2
return (InfixT t1' n t2')
uInfixT :: TypeQ -> Name -> TypeQ -> TypeQ
uInfixT t1 n t2 = do t1' <- t1
t2' <- t2
return (UInfixT t1' n t2')
parensT :: TypeQ -> TypeQ
parensT t = do t' <- t
return (ParensT t')
appT :: TypeQ -> TypeQ -> TypeQ
appT t1 t2 = do
t1' <- t1
t2' <- t2
return $ AppT t1' t2'
arrowT :: TypeQ
arrowT = return ArrowT
listT :: TypeQ
listT = return ListT
litT :: TyLitQ -> TypeQ
litT l = fmap LitT l
tupleT :: Int -> TypeQ
tupleT i = return (TupleT i)
unboxedTupleT :: Int -> TypeQ
unboxedTupleT i = return (UnboxedTupleT i)
unboxedSumT :: SumArity -> TypeQ
unboxedSumT arity = return (UnboxedSumT arity)
sigT :: TypeQ -> Kind -> TypeQ
sigT t k
= do
t' <- t
return $ SigT t' k
equalityT :: TypeQ
equalityT = return EqualityT
wildCardT :: TypeQ
wildCardT = return WildCardT
{-# DEPRECATED classP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please use 'conT' and 'appT'." #-}
classP :: Name -> [Q Type] -> Q Pred
classP cla tys
= do
tysl <- sequence tys
return (foldl AppT (ConT cla) tysl)
{-# DEPRECATED equalP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please see 'equalityT'." #-}
equalP :: TypeQ -> TypeQ -> PredQ
equalP tleft tright
= do
tleft1 <- tleft
tright1 <- tright
eqT <- equalityT
return (foldl AppT eqT [tleft1, tright1])
promotedT :: Name -> TypeQ
promotedT = return . PromotedT
promotedTupleT :: Int -> TypeQ
promotedTupleT i = return (PromotedTupleT i)
promotedNilT :: TypeQ
promotedNilT = return PromotedNilT
promotedConsT :: TypeQ
promotedConsT = return PromotedConsT
noSourceUnpackedness, sourceNoUnpack, sourceUnpack :: SourceUnpackednessQ
noSourceUnpackedness = return NoSourceUnpackedness
sourceNoUnpack = return SourceNoUnpack
sourceUnpack = return SourceUnpack
noSourceStrictness, sourceLazy, sourceStrict :: SourceStrictnessQ
noSourceStrictness = return NoSourceStrictness
sourceLazy = return SourceLazy
sourceStrict = return SourceStrict
{-# DEPRECATED isStrict
["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ",
"Example usage: 'bang noSourceUnpackedness sourceStrict'"] #-}
{-# DEPRECATED notStrict
["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ",
"Example usage: 'bang noSourceUnpackedness noSourceStrictness'"] #-}
{-# DEPRECATED unpacked
["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ",
"Example usage: 'bang sourceUnpack sourceStrict'"] #-}
isStrict, notStrict, unpacked :: Q Strict
isStrict = bang noSourceUnpackedness sourceStrict
notStrict = bang noSourceUnpackedness noSourceStrictness
unpacked = bang sourceUnpack sourceStrict
bang :: SourceUnpackednessQ -> SourceStrictnessQ -> BangQ
bang u s = do u' <- u
s' <- s
return (Bang u' s')
bangType :: BangQ -> TypeQ -> BangTypeQ
bangType = liftM2 (,)
varBangType :: Name -> BangTypeQ -> VarBangTypeQ
varBangType v bt = do (b, t) <- bt
return (v, b, t)
{-# DEPRECATED strictType
"As of @template-haskell-2.11.0.0@, 'StrictType' has been replaced by 'BangType'. Please use 'bangType' instead." #-}
strictType :: Q Strict -> TypeQ -> StrictTypeQ
strictType = bangType
{-# DEPRECATED varStrictType
"As of @template-haskell-2.11.0.0@, 'VarStrictType' has been replaced by 'VarBangType'. Please use 'varBangType' instead." #-}
varStrictType :: Name -> StrictTypeQ -> VarStrictTypeQ
varStrictType = varBangType
-- * Type Literals
numTyLit :: Integer -> TyLitQ
numTyLit n = if n >= 0 then return (NumTyLit n)
else fail ("Negative type-level number: " ++ show n)
strTyLit :: String -> TyLitQ
strTyLit s = return (StrTyLit s)
-------------------------------------------------------------------------------
-- * Kind
plainTV :: Name -> TyVarBndr
plainTV = PlainTV
kindedTV :: Name -> Kind -> TyVarBndr
kindedTV = KindedTV
varK :: Name -> Kind
varK = VarT
conK :: Name -> Kind
conK = ConT
tupleK :: Int -> Kind
tupleK = TupleT
arrowK :: Kind
arrowK = ArrowT
listK :: Kind
listK = ListT
appK :: Kind -> Kind -> Kind
appK = AppT
starK :: Kind
starK = StarT
constraintK :: Kind
constraintK = ConstraintT
-------------------------------------------------------------------------------
-- * Type family result
noSig :: FamilyResultSig
noSig = NoSig
kindSig :: Kind -> FamilyResultSig
kindSig = KindSig
tyVarSig :: TyVarBndr -> FamilyResultSig
tyVarSig = TyVarSig
-------------------------------------------------------------------------------
-- * Injectivity annotation
injectivityAnn :: Name -> [Name] -> InjectivityAnn
injectivityAnn = TH.InjectivityAnn
-------------------------------------------------------------------------------
-- * Role
nominalR, representationalR, phantomR, inferR :: Role
nominalR = NominalR
representationalR = RepresentationalR
phantomR = PhantomR
inferR = InferR
-------------------------------------------------------------------------------
-- * Callconv
cCall, stdCall, cApi, prim, javaScript :: Callconv
cCall = CCall
stdCall = StdCall
cApi = CApi
prim = Prim
javaScript = JavaScript
-------------------------------------------------------------------------------
-- * Safety
unsafe, safe, interruptible :: Safety
unsafe = Unsafe
safe = Safe
interruptible = Interruptible
-------------------------------------------------------------------------------
-- * FunDep
funDep :: [Name] -> [Name] -> FunDep
funDep = FunDep
-------------------------------------------------------------------------------
-- * FamFlavour
typeFam, dataFam :: FamFlavour
typeFam = TypeFam
dataFam = DataFam
-------------------------------------------------------------------------------
-- * RuleBndr
ruleVar :: Name -> RuleBndrQ
ruleVar = return . RuleVar
typedRuleVar :: Name -> TypeQ -> RuleBndrQ
typedRuleVar n ty = ty >>= return . TypedRuleVar n
-------------------------------------------------------------------------------
-- * AnnTarget
valueAnnotation :: Name -> AnnTarget
valueAnnotation = ValueAnnotation
typeAnnotation :: Name -> AnnTarget
typeAnnotation = TypeAnnotation
moduleAnnotation :: AnnTarget
moduleAnnotation = ModuleAnnotation
-------------------------------------------------------------------------------
-- * Pattern Synonyms (sub constructs)
unidir, implBidir :: PatSynDirQ
unidir = return Unidir
implBidir = return ImplBidir
explBidir :: [ClauseQ] -> PatSynDirQ
explBidir cls = do
cls' <- sequence cls
return (ExplBidir cls')
prefixPatSyn :: [Name] -> PatSynArgsQ
prefixPatSyn args = return $ PrefixPatSyn args
recordPatSyn :: [Name] -> PatSynArgsQ
recordPatSyn sels = return $ RecordPatSyn sels
infixPatSyn :: Name -> Name -> PatSynArgsQ
infixPatSyn arg1 arg2 = return $ InfixPatSyn arg1 arg2
--------------------------------------------------------------
-- * Useful helper function
appsE :: [ExpQ] -> ExpQ
appsE [] = error "appsE []"
appsE [x] = x
appsE (x:y:zs) = appsE ( (appE x y) : zs )
-- | Return the Module at the place of splicing. Can be used as an
-- input for 'reifyModule'.
thisModule :: Q Module
thisModule = do
loc <- location
return $ Module (mkPkgName $ loc_package loc) (mkModName $ loc_module loc)
| snoyberg/ghc | libraries/template-haskell/Language/Haskell/TH/Lib.hs | bsd-3-clause | 26,397 | 0 | 13 | 6,492 | 8,587 | 4,412 | 4,175 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE OverloadedStrings, ForeignFunctionInterface, CPP, MagicHash,
GeneralizedNewtypeDeriving #-}
-- | JSString standard functions, to make them a more viable alternative to
-- the horribly inefficient standard Strings.
--
-- Many functions have linear time complexity due to JavaScript engines not
-- implementing slicing, etc. in constant time.
--
-- All functions are supported on both client and server, with the exception
-- of 'match', 'matches', 'regex' and 'replace', which are wrappers on top of
-- JavaScript's native regular expressions and thus only supported on the
-- client.
module Haste.JSString (
-- * Building JSStrings
empty, singleton, pack, cons, snoc, append, replicate,
-- * Deconstructing JSStrings
unpack, head, last, tail, drop, take, init, splitAt,
-- * Examining JSStrings
null, length, any, all,
-- * Modifying JSStrings
map, reverse, intercalate, foldl', foldr, concat, concatMap,
-- * Regular expressions (client-side only)
RegEx, match, matches, regex, replace
) where
import qualified Data.List
import Prelude hiding (foldr, concat, concatMap, reverse, map, all, any,
length, null, splitAt, init, take, drop, tail, head,
last, replicate)
import Data.String
import Haste.Prim
import Haste.Prim.Foreign
#ifdef __HASTE__
import GHC.Prim
import System.IO.Unsafe
{-# INLINE d2c #-}
d2c :: Double -> Char
d2c d = unsafeCoerce# d
_jss_singleton :: Char -> IO JSString
_jss_singleton = ffi "String.fromCharCode"
_jss_cons :: Char -> JSString -> IO JSString
_jss_cons = ffi "(function(c,s){return String.fromCharCode(c)+s;})"
_jss_snoc :: JSString -> Char -> IO JSString
_jss_snoc = ffi "(function(s,c){return s+String.fromCharCode(c);})"
_jss_append :: JSString -> JSString -> IO JSString
_jss_append = ffi "(function(a,b){return a+b;})"
_jss_len :: JSString -> IO Int
_jss_len = ffi "(function(s){return s.length;})"
_jss_index :: JSString -> Int -> IO Double
_jss_index = ffi "(function(s,i){return s.charCodeAt(i);})"
_jss_substr :: JSString -> Int -> IO JSString
_jss_substr = ffi "(function(s,x){return s.substr(x);})"
_jss_take :: Int -> JSString -> IO JSString
_jss_take = ffi "(function(n,s){return s.substr(0,n);})"
_jss_rev :: JSString -> IO JSString
_jss_rev = ffi "(function(s){return s.split('').reverse().join('');})"
_jss_re_match :: JSString -> RegEx -> IO Bool
_jss_re_match = ffi "(function(s,re){return s.search(re)>=0;})"
_jss_re_compile :: JSString -> JSString -> IO RegEx
_jss_re_compile = ffi "(function(re,fs){return new RegExp(re,fs);})"
_jss_re_replace :: JSString -> RegEx -> JSString -> IO JSString
_jss_re_replace = ffi "(function(s,re,rep){return s.replace(re,rep);})"
_jss_re_find :: RegEx -> JSString -> IO [JSString]
_jss_re_find = ffi "(function(re,s) {\
var a = s.match(re);\
return a ? a : [];})"
{-# INLINE _jss_map #-}
_jss_map :: (Char -> Char) -> JSString -> JSString
_jss_map f s = veryUnsafePerformIO $ cmap_js (_jss_singleton . f) s
{-# INLINE _jss_cmap #-}
_jss_cmap :: (Char -> JSString) -> JSString -> JSString
_jss_cmap f s = veryUnsafePerformIO $ cmap_js (return . f) s
cmap_js :: (Char -> IO JSString) -> JSString -> IO JSString
cmap_js = ffi "(function(f,s){\
var s2 = '';\
for(var i in s) {\
s2 += f(s.charCodeAt(i));\
}\
return s2;})"
{-# INLINE _jss_foldl #-}
_jss_foldl :: (ToAny a, FromAny a) => (a -> Char -> a) -> a -> JSString -> a
_jss_foldl f x s = fromOpaque . unsafePerformIO $ do
foldl_js (\a c -> toOpaque $ f (fromOpaque a) c) (toOpaque x) s
foldl_js :: (Opaque a -> Char -> Opaque a)
-> Opaque a
-> JSString
-> IO (Opaque a)
foldl_js = ffi "(function(f,x,s){\
for(var i in s) {\
x = f(x,s.charCodeAt(i));\
}\
return x;})"
{-# INLINE _jss_foldr #-}
_jss_foldr :: (ToAny a, FromAny a) => (Char -> a -> a) -> a -> JSString -> a
_jss_foldr f x s = fromOpaque . unsafePerformIO $ do
foldr_js (\c -> toOpaque . f c . fromOpaque) (toOpaque x) s
foldr_js :: (Char -> Opaque a -> Opaque a)
-> Opaque a
-> JSString
-> IO (Opaque a)
foldr_js = ffi "(function(f,x,s){\
for(var i = s.length-1; i >= 0; --i) {\
x = f(s.charCodeAt(i),x);\
}\
return x;})"
#else
{-# INLINE d2c #-}
d2c :: Char -> Char
d2c = id
_jss_singleton :: Char -> IO JSString
_jss_singleton c = return $ toJSStr [c]
_jss_cons :: Char -> JSString -> IO JSString
_jss_cons c s = return $ toJSStr (c : fromJSStr s)
_jss_snoc :: JSString -> Char -> IO JSString
_jss_snoc s c = return $ toJSStr (fromJSStr s ++ [c])
_jss_append :: JSString -> JSString -> IO JSString
_jss_append a b = return $ catJSStr "" [a, b]
_jss_len :: JSString -> IO Int
_jss_len s = return $ Data.List.length $ fromJSStr s
_jss_index :: JSString -> Int -> IO Char
_jss_index s n = return $ fromJSStr s !! n
_jss_substr :: JSString -> Int -> IO JSString
_jss_substr s n = return $ toJSStr $ Data.List.drop n $ fromJSStr s
_jss_take :: Int -> JSString -> IO JSString
_jss_take n = return . toJSStr . Data.List.take n . fromJSStr
_jss_map :: (Char -> Char) -> JSString -> JSString
_jss_map f = toJSStr . Data.List.map f . fromJSStr
_jss_cmap :: (Char -> JSString) -> JSString -> JSString
_jss_cmap f =
toJSStr . Data.List.concat . Data.List.map (fromJSStr . f) . fromJSStr
_jss_rev :: JSString -> IO JSString
_jss_rev = return . toJSStr . Data.List.reverse . fromJSStr
_jss_foldl :: (a -> Char -> a) -> a -> JSString -> a
_jss_foldl f x = Data.List.foldl' f x . fromJSStr
_jss_foldr :: (Char -> a -> a) -> a -> JSString -> a
_jss_foldr f x = Data.List.foldr f x . fromJSStr
_jss_re_compile :: JSString -> JSString -> IO RegEx
_jss_re_compile _ _ =
error "Regular expressions are only supported client-side!"
_jss_re_match :: JSString -> RegEx -> IO Bool
_jss_re_match _ _ =
error "Regular expressions are only supported client-side!"
_jss_re_replace :: JSString -> RegEx -> JSString -> IO JSString
_jss_re_replace _ _ _ =
error "Regular expressions are only supported client-side!"
_jss_re_find :: RegEx -> JSString -> IO [JSString]
_jss_re_find _ _ =
error "Regular expressions are only supported client-side!"
#endif
-- | A regular expression. May be used to match and replace JSStrings.
newtype RegEx = RegEx JSAny
deriving (ToAny, FromAny)
instance IsString RegEx where
fromString s = veryUnsafePerformIO $ _jss_re_compile (fromString s) ""
-- | O(1) The empty JSString.
empty :: JSString
empty = ""
-- | O(1) JSString consisting of a single character.
singleton :: Char -> JSString
singleton = veryUnsafePerformIO . _jss_singleton
-- | O(n) Convert a list of Char into a JSString.
pack :: [Char] -> JSString
pack = toJSStr
-- | O(n) Convert a JSString to a list of Char.
unpack :: JSString -> [Char]
unpack = fromJSStr
infixr 5 `cons`
-- | O(n) Prepend a character to a JSString.
cons :: Char -> JSString -> JSString
cons c s = veryUnsafePerformIO $ _jss_cons c s
infixl 5 `snoc`
-- | O(n) Append a character to a JSString.
snoc :: JSString -> Char -> JSString
snoc s c = veryUnsafePerformIO $ _jss_snoc s c
-- | O(n) Append two JSStrings.
append :: JSString -> JSString -> JSString
append a b = veryUnsafePerformIO $ _jss_append a b
-- | O(1) Extract the first element of a non-empty JSString.
head :: JSString -> Char
head s =
#ifdef __HASTE__
case veryUnsafePerformIO $ _jss_index s 0 of
c | isNaN c -> error "Haste.JSString.head: empty JSString"
| otherwise -> d2c c -- Double/Int/Char share representation.
#else
Data.List.head $ fromJSStr s
#endif
-- | O(1) Extract the last element of a non-empty JSString.
last :: JSString -> Char
last s =
case veryUnsafePerformIO $ _jss_len s of
0 -> error "Haste.JSString.head: empty JSString"
n -> d2c (veryUnsafePerformIO $ _jss_index s (n-1))
-- | O(n) All elements but the first of a JSString. Returns an empty JSString
-- if the given JSString is empty.
tail :: JSString -> JSString
tail s = veryUnsafePerformIO $ _jss_substr s 1
-- | O(n) Drop 'n' elements from the given JSString.
drop :: Int -> JSString -> JSString
drop n s = veryUnsafePerformIO $ _jss_substr s (max 0 n)
-- | O(n) Take 'n' elements from the given JSString.
take :: Int -> JSString -> JSString
take n s = veryUnsafePerformIO $ _jss_take n s
-- | O(n) All elements but the last of a JSString. Returns an empty JSString
-- if the given JSString is empty.
init :: JSString -> JSString
init s = veryUnsafePerformIO $ _jss_take (veryUnsafePerformIO (_jss_len s)-1) s
-- | O(1) Test whether a JSString is empty.
null :: JSString -> Bool
null s = veryUnsafePerformIO (_jss_len s) == 0
-- | O(1) Get the length of a JSString as an Int.
length :: JSString -> Int
length = veryUnsafePerformIO . _jss_len
-- | O(n) Map a function over the given JSString.
map :: (Char -> Char) -> JSString -> JSString
map f s = _jss_map f s
-- | O(n) reverse a JSString.
reverse :: JSString -> JSString
reverse = veryUnsafePerformIO . _jss_rev
-- | O(n) Join a list of JSStrings, with a specified separator. Equivalent to
-- 'String.join'.
intercalate :: JSString -> [JSString] -> JSString
intercalate = catJSStr
-- | O(n) Left fold over a JSString.
foldl' :: (ToAny a, FromAny a) => (a -> Char -> a) -> a -> JSString -> a
foldl' = _jss_foldl
-- | O(n) Right fold over a JSString.
foldr :: (ToAny a, FromAny a) => (Char -> a -> a) -> a -> JSString -> a
foldr = _jss_foldr
-- | O(n) Concatenate a list of JSStrings.
concat :: [JSString] -> JSString
concat = catJSStr ""
-- | O(n) Map a function over a JSString, then concatenate the results.
-- Note that this function is actually faster than 'map' in most cases.
concatMap :: (Char -> JSString) -> JSString -> JSString
concatMap = _jss_cmap
-- | O(n) Determines whether any character in the string satisfies the given
-- predicate.
any :: (Char -> Bool) -> JSString -> Bool
any p = Haste.JSString.foldl' (\a x -> a || p x) False
-- | O(n) Determines whether all characters in the string satisfy the given
-- predicate.
all :: (Char -> Bool) -> JSString -> Bool
all p = Haste.JSString.foldl' (\a x -> a && p x) False
-- | O(n) Create a JSString containing 'n' instances of a single character.
replicate :: Int -> Char -> JSString
replicate n c = Haste.JSString.pack $ Data.List.replicate n c
-- | O(n) Equivalent to (take n xs, drop n xs).
splitAt :: Int -> JSString -> (JSString, JSString)
splitAt n s = (Haste.JSString.take n s, Haste.JSString.drop n s)
-- | O(n) Determines whether the given JSString matches the given regular
-- expression or not.
matches :: JSString -> RegEx -> Bool
matches s re = veryUnsafePerformIO $ _jss_re_match s re
-- | O(n) Find all strings corresponding to the given regular expression.
match :: RegEx -> JSString -> [JSString]
match re s = veryUnsafePerformIO $ _jss_re_find re s
-- | O(n) Compile a regular expression and an (optionally empty) list of flags
-- into a 'RegEx' which can be used to match, replace, etc. on JSStrings.
--
-- The regular expression and flags are passed verbatim to the browser's
-- RegEx constructor, meaning that the syntax is the same as when using
-- regular expressions in raw JavaScript.
regex :: JSString -- ^ Regular expression.
-> JSString -- ^ Potential flags.
-> RegEx
regex re flags = veryUnsafePerformIO $ _jss_re_compile re flags
-- | O(n) String substitution using regular expressions.
replace :: JSString -- ^ String perform substitution on.
-> RegEx -- ^ Regular expression to match.
-> JSString -- ^ Replacement string.
-> JSString
replace s re rep = veryUnsafePerformIO $ _jss_re_replace s re rep
| nyson/haste-compiler | libraries/haste-lib/src/Haste/JSString.hs | bsd-3-clause | 11,643 | 288 | 14 | 2,241 | 2,939 | 1,609 | 1,330 | 133 | 2 |
module QuickCheckTesting where
import Test.QuickCheck
import Data.Ratio
someFun :: [Int] -> Int
someFun [] = 0
someFun xs = head xs
symmetry :: [a] -> [a]
symmetry = reverse
symRel :: Eq a => [a] -> [a] -> Bool
symRel xs ys = xs == reverse ys
-- test: any two symmetric sequences give the same result under someFun
propSym :: Eq b => ([a]->b) -> [a] -> Bool
propSym f xs = f xs == f (symmetry xs)
propSym' :: (Eq a, Eq b) => ([a]->b) -> [a] -> [a] -> Property
propSym' f xs ys = symRel xs ys ==> f xs == f ys
propSym'' :: (Arbitrary a, Show a, Eq b) =>
([a] -> b) -> Property
propSym'' f = forAll (genSymPairs arbitrary) $ \(xs, ys) ->
f xs == f ys
propSym3 :: (Arbitrary a, Show a, Eq b) =>
([a] -> b) -> Property
propSym3 f = forAllShrink (genSymPairs arbitrary) shrinkSymPairs $ \(xs, ys) ->
f xs == f ys
shrinkSymPairs :: Arbitrary a => ([a], [a]) -> [([a], [a])]
shrinkSymPairs (xs, _ys) = [ (xs, symmetry xs) | xs <- shrink xs ]
genSymPairs :: Gen a -> Gen ([a], [a])
genSymPairs g = do
xs <- listOf g
return (xs, symmetry xs)
main = do
quickCheck (propSym' someFun)
quickCheck (propSym'' someFun)
quickCheck (propSym3 someFun)
----------------------------------------------------------------
affine :: [Rational] -> [Rational] -> Bool
-- exists a, b. map (\x -> a + b*x) xs == ys
affine [] [] = True
affine [_x] [_y] = True
affine xs ys = and (zipWith (\x y -> a + b*x == y) xs ys)
where (a, b) = findCoeff xs ys
findCoeff :: [Rational] -> [Rational] -> (Rational, Rational)
findCoeff xs ys = error "TODO"
----------------
localOrderPreserving :: Ord a => [a] -> [a] -> Bool
localOrderPreserving xs ys = localCompare xs == localCompare ys
localCompare xs = zipWith compare xs (tail xs)
countSummits :: Ord a => [a] -> Int
countSummits xs = countMatches [GT,LT] (localCompare xs)
-- assume no overlap
countMatches :: Eq a => [a] -> [a] -> Int
countMatches pat xs | length xs < length pat = 0
| otherwise = (if start == pat then 1 else 0)
+ countMatches pat rest
where (start, rest) = splitAt (length pat) xs
prop :: (Ord a, Eq b) => ([a]->b) -> [a] -> [a] -> Property
prop f xs ys = localOrderPreserving xs ys ==> f xs == f ys
myProp :: [Int] -> Property
myProp xs = forAll arbitrary $ \a ->
forAll arbitrary $ \(Positive b) ->
-- collect b $
countSummits xs == countSummits (map (\x -> a + b*x) xs)
propMono :: ([Int]->Int) -> [Int] -> [Int] -> Property
propMono = prop
test = do
quickCheck myProp
-- quickCheck (propMono countSummits) -- too rarely localOrderPreserving
----------------
{-
Notes: Constraint solving may come in handy for generation of test
cases satisfying certain invariants. See the work of Arnaud Gotlieb:
http://people.rennes.inria.fr/Arnaud.Gotlieb/
-}
| GRACeFUL-project/GRACe | examples/QuickCheckTesting.hs | bsd-3-clause | 2,903 | 0 | 16 | 713 | 1,188 | 624 | 564 | 59 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances,
UndecidableInstances, FlexibleContexts, EmptyDataDecls, ScopedTypeVariables,
TypeOperators, TypeSynonymInstances, TypeFamilies #-}
module Methods where
import Records
import References
{-
un metodo prende un a e ritorna una reference di b
-}
type Method s a b = (a -> Reference s b)
{-
un record dotato di metodi va definito come ricorsivo
affinché non risulti in un tipo infinito; infatti un
record rischierebbe di aver tipo:
type MyObject = ... .* Method MyObject a b .* ...
che non é accettabile; per questo definiamo:
type MyObject k = ... .* Method k a b .* ...
dove k verrá istanziato ad un opportuno contenitore:
data RecMyObject = RecMyObject (MyObject RecMyObject)
-}
class Recursive s where
type Rec s :: *
cons :: s -> Rec s
elim :: Rec s -> s
{-
in virtú del fatto che i metodi ritornano il contenitore
ricorsivo del record, e non il record stesso, definiamo
una funzione che prende il metodo definito in modo
naturale (ossia in termini del record vero e proprio)
e lo converte alla forma che ci serve
-}
mk_method :: forall a b s . (Recursive s) => Method s a b -> Method (Rec s) a b
mk_method m =
\(x :: a) ->
let (Reference getter setter) = m x
in
(Reference (\(rs :: Rec s) ->
let rs' = elim rs :: s
(v,rs'') = getter rs' :: (b,s)
in (v,cons rs'') :: (b,Rec s))
(\(rs :: Rec s) -> \(x :: b) ->
let rs' = elim rs :: s
(v,rs'') = getter rs' :: (b,s)
((),rs''') = setter rs'' x :: ((),s)
in ((),cons rs''')))
{-
diamo un operatore di selezione per i metodi
-}
(<<-) :: forall a s b c n . (CNum n, Recursive a, HasField n (Method (Rec a) b c) a) => (Reference s a) -> n -> (b -> Reference s c)
(<<-) (Reference get set) n =
\(x :: b) ->
from_constant( Constant(\(s :: s)->
let (v,s') = get s :: (a,s)
m = v .! n
ry = m x :: Reference (Rec a) c
(y,v') = getter ry (cons v) :: (c,Rec a)
v'' = elim v'
((),s'') = set s' v''
in (y,s'')))
| vs-team/Papers | Before Giuseppe's PhD/Monads/ObjectiveMonad/MonadicObjects/trunk/Src/Methods.hs | mit | 2,540 | 0 | 18 | 986 | 634 | 348 | 286 | 35 | 1 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import Roman (numerals)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList numeralsTests ]
numeralsTests :: [Test]
numeralsTests = map TestCase $
[ "I" @=? numerals 1
, "II" @=? numerals 2
, "III" @=? numerals 3
, "IV" @=? numerals 4
, "V" @=? numerals 5
, "VI" @=? numerals 6
, "IX" @=? numerals 9
, "XXVII" @=? numerals 27
, "XLVIII" @=? numerals 48
, "LIX" @=? numerals 59
, "XCIII" @=? numerals 93
, "CXLI" @=? numerals 141
, "CLXIII" @=? numerals 163
, "CDII" @=? numerals 402
, "DLXXV" @=? numerals 575
, "CMXI" @=? numerals 911
, "MXXIV" @=? numerals 1024
, "MMM" @=? numerals 3000
]
| pminten/xhaskell | roman-numerals/roman-numerals_test.hs | mit | 1,039 | 0 | 12 | 227 | 392 | 204 | 188 | 32 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.GetPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves information about the specified managed policy, including the
-- policy's default version and the total number of users, groups, and roles
-- that the policy is attached to. For a list of the specific users, groups, and
-- roles that the policy is attached to, use the 'ListEntitiesForPolicy' API. This
-- API returns metadata about the policy. To retrieve the policy document for a
-- specific version of the policy, use 'GetPolicyVersion'.
--
-- This API retrieves information about managed policies. To retrieve
-- information about an inline policy that is embedded with a user, group, or
-- role, use the 'GetUserPolicy', 'GetGroupPolicy', or 'GetRolePolicy' API.
--
-- For more information about policies, refer to <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies and InlinePolicies> in the /Using IAM/ guide.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_GetPolicy.html>
module Network.AWS.IAM.GetPolicy
(
-- * Request
GetPolicy
-- ** Request constructor
, getPolicy
-- ** Request lenses
, gpPolicyArn
-- * Response
, GetPolicyResponse
-- ** Response constructor
, getPolicyResponse
-- ** Response lenses
, gprPolicy
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
newtype GetPolicy = GetPolicy
{ _gpPolicyArn :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'GetPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gpPolicyArn' @::@ 'Text'
--
getPolicy :: Text -- ^ 'gpPolicyArn'
-> GetPolicy
getPolicy p1 = GetPolicy
{ _gpPolicyArn = p1
}
gpPolicyArn :: Lens' GetPolicy Text
gpPolicyArn = lens _gpPolicyArn (\s a -> s { _gpPolicyArn = a })
newtype GetPolicyResponse = GetPolicyResponse
{ _gprPolicy :: Maybe Policy
} deriving (Eq, Read, Show)
-- | 'GetPolicyResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gprPolicy' @::@ 'Maybe' 'Policy'
--
getPolicyResponse :: GetPolicyResponse
getPolicyResponse = GetPolicyResponse
{ _gprPolicy = Nothing
}
-- | Information about the policy.
gprPolicy :: Lens' GetPolicyResponse (Maybe Policy)
gprPolicy = lens _gprPolicy (\s a -> s { _gprPolicy = a })
instance ToPath GetPolicy where
toPath = const "/"
instance ToQuery GetPolicy where
toQuery GetPolicy{..} = mconcat
[ "PolicyArn" =? _gpPolicyArn
]
instance ToHeaders GetPolicy
instance AWSRequest GetPolicy where
type Sv GetPolicy = IAM
type Rs GetPolicy = GetPolicyResponse
request = post "GetPolicy"
response = xmlResponse
instance FromXML GetPolicyResponse where
parseXML = withElement "GetPolicyResult" $ \x -> GetPolicyResponse
<$> x .@? "Policy"
| romanb/amazonka | amazonka-iam/gen/Network/AWS/IAM/GetPolicy.hs | mpl-2.0 | 3,899 | 0 | 9 | 834 | 432 | 268 | 164 | 53 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, PatternGuards #-}
{- A high level language of tactic composition, for building
elaborators from a high level language into the core theory.
This is our interface to proof construction, rather than
ProofState, because this gives us a language to build derived
tactics out of the primitives.
-}
module Idris.Core.Elaborate(module Idris.Core.Elaborate,
module Idris.Core.ProofState) where
import Idris.Core.ProofState
import Idris.Core.ProofTerm(bound_in, getProofTerm, mkProofTerm, bound_in_term,
refocus)
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Core.Typecheck
import Idris.Core.Unify
import Idris.Core.DeepSeq
import Control.DeepSeq
import Control.Monad.State.Strict
import Data.Char
import Data.List
import Debug.Trace
import Util.Pretty hiding (fill)
data ElabState aux = ES (ProofState, aux) String (Maybe (ElabState aux))
deriving Show
type Elab' aux a = StateT (ElabState aux) TC a
type Elab a = Elab' () a
proof :: ElabState aux -> ProofState
proof (ES (p, _) _ _) = p
-- Insert a 'proofSearchFail' error if necessary to shortcut any further
-- fruitless searching
proofFail :: Elab' aux a -> Elab' aux a
proofFail e = do s <- get
case runStateT e s of
OK (a, s') -> do put s'
return $! a
Error err -> lift $ Error (ProofSearchFail err)
explicit :: Name -> Elab' aux ()
explicit n = do ES (p, a) s m <- get
let p' = p { dontunify = n : dontunify p }
put (ES (p', a) s m)
-- Add a name that's okay to use in proof search (typically either because
-- it was given explicitly on the lhs, or intrduced as an explicit lambda
-- or let binding)
addPSname :: Name -> Elab' aux ()
addPSname n@(UN _)
= do ES (p, a) s m <- get
let p' = p { psnames = n : psnames p }
put (ES (p', a) s m)
addPSname _ = return () -- can only use user given names
getPSnames :: Elab' aux [Name]
getPSnames = do ES (p, a) s m <- get
return (psnames p)
saveState :: Elab' aux ()
saveState = do e@(ES p s _) <- get
put (ES p s (Just e))
loadState :: Elab' aux ()
loadState = do (ES p s e) <- get
case e of
Just st -> put st
_ -> lift $ Error . Msg $ "Nothing to undo"
getNameFrom :: Name -> Elab' aux Name
getNameFrom n = do (ES (p, a) s e) <- get
let next = nextname p
let p' = p { nextname = next + 1 }
put (ES (p', a) s e)
let n' = case n of
UN x -> MN (next+100) x
MN i x -> if i == 99999
then MN (next+500) x
else MN (next+100) x
NS (UN x) s -> MN (next+100) x
_ -> n
return $! n'
setNextName :: Elab' aux ()
setNextName = do env <- get_env
ES (p, a) s e <- get
let pargs = map fst (getArgTys (ptype p))
initNextNameFrom (pargs ++ map fst env)
initNextNameFrom :: [Name] -> Elab' aux ()
initNextNameFrom ns = do ES (p, a) s e <- get
let n' = maxName (nextname p) ns
put (ES (p { nextname = n' }, a) s e)
where
maxName m ((MN i _) : xs) = maxName (max m i) xs
maxName m (_ : xs) = maxName m xs
maxName m [] = m + 1
errAt :: String -> Name -> Elab' aux a -> Elab' aux a
errAt thing n elab = do s <- get
case runStateT elab s of
OK (a, s') -> do put s'
return $! a
Error e -> lift $ Error (rewriteErr e)
where
rewriteErr (At f e) = At f (rewriteErr e)
rewriteErr (ProofSearchFail e) = ProofSearchFail (rewriteErr e)
rewriteErr e = Elaborating thing n e
erun :: FC -> Elab' aux a -> Elab' aux a
erun f elab = do s <- get
case runStateT elab s of
OK (a, s') -> do put s'
return $! a
Error (ProofSearchFail (At f e))
-> lift $ Error (ProofSearchFail (At f e))
Error (At f e) -> lift $ Error (At f e)
Error e -> lift $ Error (At f e)
runElab :: aux -> Elab' aux a -> ProofState -> TC (a, ElabState aux)
runElab a e ps = runStateT e (ES (ps, a) "" Nothing)
execElab :: aux -> Elab' aux a -> ProofState -> TC (ElabState aux)
execElab a e ps = execStateT e (ES (ps, a) "" Nothing)
initElaborator :: Name -> Context -> Ctxt TypeInfo -> Type -> ProofState
initElaborator = newProof
elaborate :: Context -> Ctxt TypeInfo -> Name -> Type -> aux -> Elab' aux a -> TC (a, String)
elaborate ctxt datatypes n ty d elab = do let ps = initElaborator n ctxt datatypes ty
(a, ES ps' str _) <- runElab d elab ps
return $! (a, str)
-- | Modify the auxiliary state
updateAux :: (aux -> aux) -> Elab' aux ()
updateAux f = do ES (ps, a) l p <- get
put (ES (ps, f a) l p)
-- | Get the auxiliary state
getAux :: Elab' aux aux
getAux = do ES (ps, a) _ _ <- get
return $! a
-- | Set whether to show the unifier log
unifyLog :: Bool -> Elab' aux ()
unifyLog log = do ES (ps, a) l p <- get
put (ES (ps { unifylog = log }, a) l p)
getUnifyLog :: Elab' aux Bool
getUnifyLog = do ES (ps, a) l p <- get
return (unifylog ps)
-- | Process a tactic within the current elaborator state
processTactic' :: Tactic -> Elab' aux ()
processTactic' t = do ES (p, a) logs prev <- get
(p', log) <- lift $ processTactic t p
put (ES (p', a) (logs ++ log) prev)
return $! ()
updatePS :: (ProofState -> ProofState) -> Elab' aux ()
updatePS f = do ES (ps, a) logs prev <- get
put $ ES (f ps, a) logs prev
now_elaborating :: FC -> Name -> Name -> Elab' aux ()
now_elaborating fc f a = updatePS (nowElaboratingPS fc f a)
done_elaborating_app :: Name -> Elab' aux ()
done_elaborating_app f = updatePS (doneElaboratingAppPS f)
done_elaborating_arg :: Name -> Name -> Elab' aux ()
done_elaborating_arg f a = updatePS (doneElaboratingArgPS f a)
elaborating_app :: Elab' aux [(FC, Name, Name)]
elaborating_app = do ES (ps, _) _ _ <- get
return $ map (\ (FailContext x y z) -> (x, y, z))
(while_elaborating ps)
-- Some handy gadgets for pulling out bits of state
-- | Get the global context
get_context :: Elab' aux Context
get_context = do ES p _ _ <- get
return $! (context (fst p))
-- | Update the context.
-- (should only be used for adding temporary definitions or all sorts of
-- stuff could go wrong)
set_context :: Context -> Elab' aux ()
set_context ctxt = do ES (p, a) logs prev <- get
put (ES (p { context = ctxt }, a) logs prev)
get_datatypes :: Elab' aux (Ctxt TypeInfo)
get_datatypes = do ES p _ _ <- get
return $! (datatypes (fst p))
set_datatypes :: Ctxt TypeInfo -> Elab' aux ()
set_datatypes ds = do ES (p, a) logs prev <- get
put (ES (p { datatypes = ds }, a) logs prev)
-- | get the proof term
get_term :: Elab' aux Term
get_term = do ES p _ _ <- get
return $! (getProofTerm (pterm (fst p)))
-- | modify the proof term
update_term :: (Term -> Term) -> Elab' aux ()
update_term f = do ES (p,a) logs prev <- get
let p' = p { pterm = mkProofTerm (f (getProofTerm (pterm p))) }
put (ES (p', a) logs prev)
-- | get the local context at the currently in focus hole
get_env :: Elab' aux Env
get_env = do ES p _ _ <- get
lift $ envAtFocus (fst p)
get_inj :: Elab' aux [Name]
get_inj = do ES p _ _ <- get
return $! (injective (fst p))
get_holes :: Elab' aux [Name]
get_holes = do ES p _ _ <- get
return $! (holes (fst p))
get_usedns :: Elab' aux [Name]
get_usedns = do ES p _ _ <- get
let bs = bound_in (pterm (fst p)) ++
bound_in_term (ptype (fst p))
let nouse = holes (fst p) ++ bs ++ dontunify (fst p) ++ usedns (fst p)
return $! nouse
get_probs :: Elab' aux Fails
get_probs = do ES p _ _ <- get
return $! (problems (fst p))
-- | Return recently solved names (that is, the names solved since the
-- last call to get_recents)
get_recents :: Elab' aux [Name]
get_recents = do ES (p, a) l prev <- get
put (ES (p { recents = [] }, a) l prev)
return (recents p)
-- | get the current goal type
goal :: Elab' aux Type
goal = do ES p _ _ <- get
b <- lift $ goalAtFocus (fst p)
return $! (binderTy b)
is_guess :: Elab' aux Bool
is_guess = do ES p _ _ <- get
b <- lift $ goalAtFocus (fst p)
case b of
Guess _ _ -> return True
_ -> return False
-- | Get the guess at the current hole, if there is one
get_guess :: Elab' aux Term
get_guess = do ES p _ _ <- get
b <- lift $ goalAtFocus (fst p)
case b of
Guess t v -> return $! v
_ -> fail "Not a guess"
-- | Typecheck locally
get_type :: Raw -> Elab' aux Type
get_type tm = do ctxt <- get_context
env <- get_env
(val, ty) <- lift $ check ctxt env tm
return $! (finalise ty)
get_type_val :: Raw -> Elab' aux (Term, Type)
get_type_val tm = do ctxt <- get_context
env <- get_env
(val, ty) <- lift $ check ctxt env tm
return $! (finalise val, finalise ty)
-- | get holes we've deferred for later definition
get_deferred :: Elab' aux [Name]
get_deferred = do ES p _ _ <- get
return $! (deferred (fst p))
checkInjective :: (Term, Term, Term) -> Elab' aux ()
checkInjective (tm, l, r) = do ctxt <- get_context
if isInj ctxt tm then return $! ()
else lift $ tfail (NotInjective tm l r)
where isInj ctxt (P _ n _)
| isConName n ctxt = True
isInj ctxt (App _ f a) = isInj ctxt f
isInj ctxt (Constant _) = True
isInj ctxt (TType _) = True
isInj ctxt (Bind _ (Pi _ _ _) sc) = True
isInj ctxt _ = False
-- | get instance argument names
get_instances :: Elab' aux [Name]
get_instances = do ES p _ _ <- get
return $! (instances (fst p))
-- | get auto argument names
get_autos :: Elab' aux [(Name, [Name])]
get_autos = do ES p _ _ <- get
return $! (autos (fst p))
-- | given a desired hole name, return a unique hole name
unique_hole :: Name -> Elab' aux Name
unique_hole = unique_hole' False
unique_hole' :: Bool -> Name -> Elab' aux Name
unique_hole' reusable n
= do ES p _ _ <- get
let bs = bound_in (pterm (fst p)) ++
bound_in_term (ptype (fst p))
let nouse = holes (fst p) ++ bs ++ dontunify (fst p) ++ usedns (fst p)
n' <- return $! uniqueNameCtxt (context (fst p)) n nouse
ES (p, a) s u <- get
case n' of
MN i _ -> when (i >= nextname p) $
put (ES (p { nextname = i + 1 }, a) s u)
_ -> return $! ()
return $! n'
elog :: String -> Elab' aux ()
elog str = do ES p logs prev <- get
put (ES p (logs ++ str ++ "\n") prev)
getLog :: Elab' aux String
getLog = do ES p logs _ <- get
return $! logs
-- The primitives, from ProofState
attack :: Elab' aux ()
attack = processTactic' Attack
claim :: Name -> Raw -> Elab' aux ()
claim n t = processTactic' (Claim n t)
claimFn :: Name -> Name -> Raw -> Elab' aux ()
claimFn n bn t = processTactic' (ClaimFn n bn t)
unifyGoal :: Raw -> Elab' aux ()
unifyGoal t = processTactic' (UnifyGoal t)
exact :: Raw -> Elab' aux ()
exact t = processTactic' (Exact t)
fill :: Raw -> Elab' aux ()
fill t = processTactic' (Fill t)
match_fill :: Raw -> Elab' aux ()
match_fill t = processTactic' (MatchFill t)
prep_fill :: Name -> [Name] -> Elab' aux ()
prep_fill n ns = processTactic' (PrepFill n ns)
complete_fill :: Elab' aux ()
complete_fill = processTactic' CompleteFill
solve :: Elab' aux ()
solve = processTactic' Solve
start_unify :: Name -> Elab' aux ()
start_unify n = processTactic' (StartUnify n)
end_unify :: Elab' aux ()
end_unify = processTactic' EndUnify
-- Clear the list of variables not to unify, and try to solve them
unify_all :: Elab' aux ()
unify_all = processTactic' UnifyAll
regret :: Elab' aux ()
regret = processTactic' Regret
compute :: Elab' aux ()
compute = processTactic' Compute
computeLet :: Name -> Elab' aux ()
computeLet n = processTactic' (ComputeLet n)
simplify :: Elab' aux ()
simplify = processTactic' Simplify
hnf_compute :: Elab' aux ()
hnf_compute = processTactic' HNF_Compute
eval_in :: Raw -> Elab' aux ()
eval_in t = processTactic' (EvalIn t)
check_in :: Raw -> Elab' aux ()
check_in t = processTactic' (CheckIn t)
intro :: Maybe Name -> Elab' aux ()
intro n = processTactic' (Intro n)
introTy :: Raw -> Maybe Name -> Elab' aux ()
introTy ty n = processTactic' (IntroTy ty n)
forall :: Name -> Maybe ImplicitInfo -> Raw -> Elab' aux ()
forall n i t = processTactic' (Forall n i t)
letbind :: Name -> Raw -> Raw -> Elab' aux ()
letbind n t v = processTactic' (LetBind n t v)
expandLet :: Name -> Term -> Elab' aux ()
expandLet n v = processTactic' (ExpandLet n v)
rewrite :: Raw -> Elab' aux ()
rewrite tm = processTactic' (Rewrite tm)
induction :: Raw -> Elab' aux ()
induction tm = processTactic' (Induction tm)
casetac :: Raw -> Elab' aux ()
casetac tm = processTactic' (CaseTac tm)
equiv :: Raw -> Elab' aux ()
equiv tm = processTactic' (Equiv tm)
-- | Turn the current hole into a pattern variable with the provided
-- name, made unique if MN
patvar :: Name -> Elab' aux ()
patvar n@(SN _) = do apply (Var n) []; solve
patvar n = do env <- get_env
hs <- get_holes
if (n `elem` map fst env) then do apply (Var n) []; solve
else do n' <- case n of
UN _ -> return $! n
MN _ _ -> unique_hole n
NS _ _ -> return $! n
x -> return $! n
processTactic' (PatVar n')
-- | Turn the current hole into a pattern variable with the provided
-- name, but don't make MNs unique.
patvar' :: Name -> Elab' aux ()
patvar' n@(SN _) = do apply (Var n) [] ; solve
patvar' n = do env <- get_env
hs <- get_holes
if (n `elem` map fst env) then do apply (Var n) [] ; solve
else processTactic' (PatVar n)
patbind :: Name -> Elab' aux ()
patbind n = processTactic' (PatBind n)
focus :: Name -> Elab' aux ()
focus n = processTactic' (Focus n)
movelast :: Name -> Elab' aux ()
movelast n = processTactic' (MoveLast n)
dotterm :: Elab' aux ()
dotterm = do ES (p, a) s m <- get
tm <- get_term
case holes p of
[] -> return ()
(h : hs) ->
do let outer = findOuter h [] tm
let p' = p { dotted = (h, outer) : dotted p }
-- trace ("DOTTING " ++ show (h, outer) ++ "\n" ++
-- show tm) $
put $ ES (p', a) s m
where
findOuter h env (P _ n _) | h == n = env
findOuter h env (Bind n b sc)
= union (foB b)
(findOuter h env (instantiate (P Bound n (binderTy b)) sc))
where foB (Guess t v) = union (findOuter h env t) (findOuter h (n:env) v)
foB (Let t v) = union (findOuter h env t) (findOuter h env v)
foB b = findOuter h env (binderTy b)
findOuter h env (App _ f a)
= union (findOuter h env f) (findOuter h env a)
findOuter h env _ = []
get_dotterm :: Elab' aux [(Name, [Name])]
get_dotterm = do ES (p, a) s m <- get
return (dotted p)
-- | Set the zipper in the proof state to point at the current sub term
-- (This currently happens automatically, so this will have no effect...)
zipHere :: Elab' aux ()
zipHere = do ES (ps, a) s m <- get
let pt' = refocus (Just (head (holes ps))) (pterm ps)
put (ES (ps { pterm = pt' }, a) s m)
matchProblems :: Bool -> Elab' aux ()
matchProblems all = processTactic' (MatchProblems all)
unifyProblems :: Elab' aux ()
unifyProblems = processTactic' UnifyProblems
defer :: [Name] -> Name -> Elab' aux ()
defer ds n = do n' <- unique_hole n
processTactic' (Defer ds n')
deferType :: Name -> Raw -> [Name] -> Elab' aux ()
deferType n ty ns = processTactic' (DeferType n ty ns)
instanceArg :: Name -> Elab' aux ()
instanceArg n = processTactic' (Instance n)
autoArg :: Name -> Elab' aux ()
autoArg n = processTactic' (AutoArg n)
setinj :: Name -> Elab' aux ()
setinj n = processTactic' (SetInjective n)
proofstate :: Elab' aux ()
proofstate = processTactic' ProofState
reorder_claims :: Name -> Elab' aux ()
reorder_claims n = processTactic' (Reorder n)
qed :: Elab' aux Term
qed = do processTactic' QED
ES p _ _ <- get
return $! (getProofTerm (pterm (fst p)))
undo :: Elab' aux ()
undo = processTactic' Undo
-- | Prepare to apply a function by creating holes to be filled by the arguments
prepare_apply :: Raw -- ^ The operation being applied
-> [Bool] -- ^ Whether arguments are implicit
-> Elab' aux [(Name, Name)] -- ^ The names of the arguments and their holes to be filled with elaborated argument values
prepare_apply fn imps =
do ty <- get_type fn
ctxt <- get_context
env <- get_env
-- let claims = getArgs ty imps
-- claims <- mkClaims (normalise ctxt env ty) imps []
claims <- -- trace (show (fn, imps, ty, map fst env, normalise ctxt env (finalise ty))) $
mkClaims (finalise ty)
(normalise ctxt env (finalise ty))
imps [] (map fst env)
ES (p, a) s prev <- get
-- reverse the claims we made so that args go left to right
let n = length (filter not imps)
let (h : hs) = holes p
put (ES (p { holes = h : (reverse (take n hs) ++ drop n hs) }, a) s prev)
return $! claims
where
mkClaims :: Type -- ^ The type of the operation being applied
-> Type -- ^ Normalised version if we need it
-> [Bool] -- ^ Whether the arguments are implicit
-> [(Name, Name)] -- ^ Accumulator for produced claims
-> [Name] -- ^ Hypotheses
-> Elab' aux [(Name, Name)] -- ^ The names of the arguments and their holes, resp.
mkClaims (Bind n' (Pi _ t_in _) sc) (Bind _ _ scn) (i : is) claims hs =
do let t = rebind hs t_in
n <- getNameFrom (mkMN n')
-- when (null claims) (start_unify n)
let sc' = instantiate (P Bound n t) sc
env <- get_env
claim n (forgetEnv (map fst env) t)
when i (movelast n)
mkClaims sc' scn is ((n', n) : claims) hs
-- if we run out of arguments, we need the normalised version...
mkClaims t tn@(Bind _ _ sc) (i : is) cs hs = mkClaims tn tn (i : is) cs hs
mkClaims t _ [] claims _ = return $! (reverse claims)
mkClaims _ _ _ _ _
| Var n <- fn
= do ctxt <- get_context
case lookupTy n ctxt of
[] -> lift $ tfail $ NoSuchVariable n
_ -> lift $ tfail $ TooManyArguments n
| otherwise = fail $ "Too many arguments for " ++ show fn
doClaim ((i, _), n, t) = do claim n t
when i (movelast n)
mkMN n@(MN i _) = n
mkMN n@(UN x) = MN 99999 x
mkMN n@(SN s) = sMN 99999 (show s)
mkMN (NS n xs) = NS (mkMN n) xs
rebind hs (Bind n t sc)
| n `elem` hs = let n' = uniqueName n hs in
Bind n' (fmap (rebind hs) t) (rebind (n':hs) sc)
| otherwise = Bind n (fmap (rebind hs) t) (rebind (n:hs) sc)
rebind hs (App s f a) = App s (rebind hs f) (rebind hs a)
rebind hs t = t
-- | Apply an operator, solving some arguments by unification or matching.
apply, match_apply :: Raw -- ^ The operator to apply
-> [(Bool, Int)] -- ^ For each argument, whether to
-- attempt to solve it and the
-- priority in which to do so
-> Elab' aux [(Name, Name)]
apply = apply' fill
match_apply = apply' match_fill
apply' :: (Raw -> Elab' aux ()) -> Raw -> [(Bool, Int)] -> Elab' aux [(Name, Name)]
apply' fillt fn imps =
do args <- prepare_apply fn (map fst imps)
-- _Don't_ solve the arguments we're specifying by hand.
-- (remove from unified list before calling end_unify)
hs <- get_holes
ES (p, a) s prev <- get
let dont = if null imps
then head hs : dontunify p
else getNonUnify (head hs : dontunify p) imps args
let (n, hunis) = -- trace ("AVOID UNIFY: " ++ show (fn, dont)) $
unified p
let unify = -- trace ("Not done " ++ show hs) $
dropGiven dont hunis hs
let notunify = -- trace ("Not done " ++ show (hs, hunis)) $
keepGiven dont hunis hs
put (ES (p { dontunify = dont, unified = (n, unify),
notunified = notunify ++ notunified p }, a) s prev)
fillt (raw_apply fn (map (Var . snd) args))
ulog <- getUnifyLog
g <- goal
traceWhen ulog ("Goal " ++ show g ++ " -- when elaborating " ++ show fn) $
end_unify
return $! (map (\(argName, argHole) -> (argName, updateUnify unify argHole)) args)
where updateUnify us n = case lookup n us of
Just (P _ t _) -> t
_ -> n
getNonUnify acc [] _ = acc
getNonUnify acc _ [] = acc
getNonUnify acc ((i,_):is) ((a, t):as)
| i = getNonUnify acc is as
| otherwise = getNonUnify (t : acc) is as
-- getNonUnify imps args = map fst (filter (not . snd) (zip (map snd args) (map fst imps)))
apply2 :: Raw -> [Maybe (Elab' aux ())] -> Elab' aux ()
apply2 fn elabs =
do args <- prepare_apply fn (map isJust elabs)
fill (raw_apply fn (map (Var . snd) args))
elabArgs (map snd args) elabs
ES (p, a) s prev <- get
let (n, hs) = unified p
end_unify
solve
where elabArgs [] [] = return $! ()
elabArgs (n:ns) (Just e:es) = do focus n; e
elabArgs ns es
elabArgs (n:ns) (_:es) = elabArgs ns es
isJust (Just _) = False
isJust _ = True
apply_elab :: Name -> [Maybe (Int, Elab' aux ())] -> Elab' aux ()
apply_elab n args =
do ty <- get_type (Var n)
ctxt <- get_context
env <- get_env
claims <- doClaims (hnf ctxt env ty) args []
prep_fill n (map fst claims)
let eclaims = sortBy (\ (_, x) (_,y) -> priOrder x y) claims
elabClaims [] False claims
complete_fill
end_unify
where
priOrder Nothing Nothing = EQ
priOrder Nothing _ = LT
priOrder _ Nothing = GT
priOrder (Just (x, _)) (Just (y, _)) = compare x y
doClaims (Bind n' (Pi _ t _) sc) (i : is) claims =
do n <- unique_hole (mkMN n')
when (null claims) (start_unify n)
let sc' = instantiate (P Bound n t) sc
claim n (forget t)
case i of
Nothing -> return $! ()
Just _ -> -- don't solve by unification as there is an explicit value
do ES (p, a) s prev <- get
put (ES (p { dontunify = n : dontunify p }, a) s prev)
doClaims sc' is ((n, i) : claims)
doClaims t [] claims = return $! (reverse claims)
doClaims _ _ _ = fail $ "Wrong number of arguments for " ++ show n
elabClaims failed r []
| null failed = return $! ()
| otherwise = if r then elabClaims [] False failed
else return $! ()
elabClaims failed r ((n, Nothing) : xs) = elabClaims failed r xs
elabClaims failed r (e@(n, Just (_, elaboration)) : xs)
| r = try (do ES p _ _ <- get
focus n; elaboration; elabClaims failed r xs)
(elabClaims (e : failed) r xs)
| otherwise = do ES p _ _ <- get
focus n; elaboration; elabClaims failed r xs
mkMN n@(MN _ _) = n
mkMN n@(UN x) = MN 0 x
mkMN (NS n ns) = NS (mkMN n) ns
-- If the goal is not a Pi-type, invent some names and make it a pi type
checkPiGoal :: Name -> Elab' aux ()
checkPiGoal n
= do g <- goal
case g of
Bind _ (Pi _ _ _) _ -> return ()
_ -> do a <- getNameFrom (sMN 0 "pargTy")
b <- getNameFrom (sMN 0 "pretTy")
f <- getNameFrom (sMN 0 "pf")
claim a RType
claim b RType
claim f (RBind n (Pi Nothing (Var a) RType) (Var b))
movelast a
movelast b
fill (Var f)
solve
focus f
simple_app :: Bool -> Elab' aux () -> Elab' aux () -> String -> Elab' aux ()
simple_app infer fun arg str =
do a <- getNameFrom (sMN 0 "argTy")
b <- getNameFrom (sMN 0 "retTy")
f <- getNameFrom (sMN 0 "f")
s <- getNameFrom (sMN 0 "s")
claim a RType
claim b RType
claim f (RBind (sMN 0 "aX") (Pi Nothing (Var a) RType) (Var b))
tm <- get_term
start_unify s
-- if 'infer' is set, we're assuming it's a simply typed application
-- so safe to unify with the goal type (as there'll be no dependencies)
when infer $ unifyGoal (Var b)
hs <- get_holes
claim s (Var a)
prep_fill f [s]
focus f; fun
focus s; arg
tm <- get_term
ps <- get_probs
ty <- goal
hs <- get_holes
complete_fill
env <- get_env
-- We don't need a and b in the hole queue any more since they were
-- just for checking f, so move them to the end. If they never end up
-- getting solved, we'll get an 'Incomplete term' error.
hs <- get_holes
when (a `elem` hs) $ do movelast a
when (b `elem` hs) $ do movelast b
end_unify
where
regretWith err = try regret (lift $ tfail err)
-- Abstract over an argument of unknown type, giving a name for the hole
-- which we'll fill with the argument type too.
arg :: Name -> Maybe ImplicitInfo -> Name -> Elab' aux ()
arg n i tyhole = do ty <- unique_hole tyhole
claim ty RType
movelast ty
forall n i (Var ty)
-- try a tactic, if it adds any unification problem, return an error
no_errors :: Elab' aux () -> Maybe Err -> Elab' aux ()
no_errors tac err
= do ps <- get_probs
s <- get
case err of
Nothing -> tac
Just e -> -- update the error, if there is one.
case runStateT tac s of
Error _ -> lift $ Error e
OK (a, s') -> do put s'
return a
unifyProblems
ps' <- get_probs
if (length ps' > length ps) then
case reverse ps' of
((x, y, _, env, inerr, while, _) : _) ->
let (xp, yp) = getProvenance inerr
env' = map (\(x, b) -> (x, binderTy b)) env in
lift $ tfail $
case err of
Nothing -> CantUnify False (x, xp) (y, yp) inerr env' 0
Just e -> e
else return $! ()
-- Try a tactic, if it fails, try another
try :: Elab' aux a -> Elab' aux a -> Elab' aux a
try t1 t2 = try' t1 t2 False
handleError :: (Err -> Bool) -> Elab' aux a -> Elab' aux a -> Elab' aux a
handleError p t1 t2
= do s <- get
ps <- get_probs
case runStateT t1 s of
OK (v, s') -> do put s'
return $! v
Error e1 -> if p e1 then
do case runStateT t2 s of
OK (v, s') -> do put s'; return $! v
Error e2 -> lift (tfail e2)
else lift (tfail e1)
try' :: Elab' aux a -> Elab' aux a -> Bool -> Elab' aux a
try' t1 t2 proofSearch
= do s <- get
ps <- get_probs
ulog <- getUnifyLog
ivs <- get_instances
case prunStateT 999999 False ps t1 s of
OK ((v, _, _), s') -> do put s'
return $! v
Error e1 -> traceWhen ulog ("try failed " ++ show e1) $
if recoverableErr e1 then
do case runStateT t2 s of
OK (v, s') -> do put s'; return $! v
Error e2 -> lift (tfail e2)
else lift (tfail e1)
where recoverableErr err@(CantUnify r x y _ _ _)
= -- traceWhen r (show err) $
r || proofSearch
recoverableErr (CantSolveGoal _ _) = False
recoverableErr (CantResolveAlts _) = proofSearch
recoverableErr (ProofSearchFail (Msg _)) = True
recoverableErr (ProofSearchFail _) = False
recoverableErr (ElaboratingArg _ _ _ e) = recoverableErr e
recoverableErr (At _ e) = recoverableErr e
recoverableErr (ElabScriptDebug _ _ _) = False
recoverableErr _ = True
tryCatch :: Elab' aux a -> (Err -> Elab' aux a) -> Elab' aux a
tryCatch t1 t2
= do s <- get
ps <- get_probs
ulog <- getUnifyLog
-- case prunStateT 999999 False ps t1 s of
case runStateT t1 s of
OK (v, s') -> do put s'
return $! v
Error e1 -> traceWhen ulog ("tryCatch failed " ++ show e1) $
case runStateT (t2 e1) s of
OK (v, s') -> do put s'
return $! v
Error e2 -> lift (tfail e2)
tryWhen :: Bool -> Elab' aux a -> Elab' aux a -> Elab' aux a
tryWhen True a b = try a b
tryWhen False a b = a
-- Bool says whether it's okay to create new unification problems. If set
-- to False, then the whole tactic fails if there are any new problems
tryAll :: [(Elab' aux a, Name)] -> Elab' aux a
tryAll [(x, _)] = x
tryAll xs = tryAll' [] 999999 xs
where
cantResolve :: Elab' aux a
cantResolve = lift $ tfail $ CantResolveAlts (map snd xs)
noneValid :: Elab' aux a
noneValid = lift $ tfail $ NoValidAlts (map snd xs)
tryAll' :: [Elab' aux a] -> -- successes
Int -> -- most problems
[(Elab' aux a, Name)] -> -- still to try
Elab' aux a
tryAll' [res] pmax [] = res
tryAll' (_:_) pmax [] = cantResolve
tryAll' [] pmax [] = noneValid
tryAll' cs pmax ((x, msg):xs)
= do s <- get
ps <- get_probs
case prunStateT pmax True ps x s of
OK ((v, newps, probs), s') ->
do let cs' = if (newps < pmax)
then [do put s'; return $! v]
else (do put s'; return $! v) : cs
tryAll' cs' newps xs
Error err -> do put s
tryAll' cs pmax xs
-- Run an elaborator, and fail if any problems are introduced
prunStateT
:: Int
-> Bool
-> [a]
-> Control.Monad.State.Strict.StateT
(ElabState t) TC t1
-> ElabState t
-> TC ((t1, Int, Idris.Core.Unify.Fails), ElabState t)
prunStateT pmax zok ps x s
= case runStateT x s of
OK (v, s'@(ES (p, _) _ _)) ->
let newps = length (problems p) - length ps
newpmax = if newps < 0 then 0 else newps in
if (newpmax > pmax || (not zok && newps > 0)) -- length ps == 0 && newpmax > 0))
then case reverse (problems p) of
((_,_,_,_,e,_,_):_) -> Error e
else OK ((v, newpmax, problems p), s')
Error e -> Error e
debugElaborator :: [ErrorReportPart] -> Elab' aux a
debugElaborator msg = do ps <- fmap proof get
saveState -- so we don't need to remember the hole order
hs <- get_holes
holeInfo <- mapM getHoleInfo hs
loadState
lift . Error $ ElabScriptDebug msg (getProofTerm (pterm ps)) holeInfo
where getHoleInfo :: Name -> Elab' aux (Name, Type, [(Name, Binder Type)])
getHoleInfo h = do focus h
g <- goal
env <- get_env
return (h, g, env)
qshow :: Fails -> String
qshow fs = show (map (\ (x, y, _, _, _, _, _) -> (x, y)) fs)
dumpprobs [] = ""
dumpprobs ((_,_,_,e):es) = show e ++ "\n" ++ dumpprobs es
| uwap/Idris-dev | src/Idris/Core/Elaborate.hs | bsd-3-clause | 34,161 | 12 | 23 | 12,688 | 12,407 | 6,140 | 6,267 | 703 | 12 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.Readers.Markdown (tests) where
import Text.Pandoc.Definition
import Test.Framework
import Tests.Helpers
import Tests.Arbitrary()
import Text.Pandoc.Builder
import qualified Data.Set as Set
-- import Text.Pandoc.Shared ( normalize )
import Text.Pandoc
import Text.Pandoc.Error
markdown :: String -> Pandoc
markdown = handleError . readMarkdown def
markdownSmart :: String -> Pandoc
markdownSmart = handleError . readMarkdown def { readerSmart = True }
markdownCDL :: String -> Pandoc
markdownCDL = handleError . readMarkdown def { readerExtensions = Set.insert
Ext_compact_definition_lists $ readerExtensions def }
markdownGH :: String -> Pandoc
markdownGH = handleError . readMarkdown def { readerExtensions = githubMarkdownExtensions }
infix 4 =:
(=:) :: ToString c
=> String -> (String, c) -> Test
(=:) = test markdown
testBareLink :: (String, Inlines) -> Test
testBareLink (inp, ils) =
test (handleError . readMarkdown def{ readerExtensions =
Set.fromList [Ext_autolink_bare_uris, Ext_raw_html] })
inp (inp, doc $ para ils)
autolink :: String -> Inlines
autolink s = link s "" (str s)
bareLinkTests :: [(String, Inlines)]
bareLinkTests =
[ ("http://google.com is a search engine.",
autolink "http://google.com" <> " is a search engine.")
, ("<a href=\"http://foo.bar.baz\">http://foo.bar.baz</a>",
rawInline "html" "<a href=\"http://foo.bar.baz\">" <>
"http://foo.bar.baz" <> rawInline "html" "</a>")
, ("Try this query: http://google.com?search=fish&time=hour.",
"Try this query: " <> autolink "http://google.com?search=fish&time=hour" <> ".")
, ("HTTPS://GOOGLE.COM,",
autolink "HTTPS://GOOGLE.COM" <> ",")
, ("http://el.wikipedia.org/wiki/Τεχνολογία,",
autolink "http://el.wikipedia.org/wiki/Τεχνολογία" <> ",")
, ("doi:10.1000/182,",
autolink "doi:10.1000/182" <> ",")
, ("git://github.com/foo/bar.git,",
autolink "git://github.com/foo/bar.git" <> ",")
, ("file:///Users/joe/joe.txt, and",
autolink "file:///Users/joe/joe.txt" <> ", and")
, ("mailto:someone@somedomain.com.",
autolink "mailto:someone@somedomain.com" <> ".")
, ("Use http: this is not a link!",
"Use http: this is not a link!")
, ("(http://google.com).",
"(" <> autolink "http://google.com" <> ").")
, ("http://en.wikipedia.org/wiki/Sprite_(computer_graphics)",
autolink "http://en.wikipedia.org/wiki/Sprite_(computer_graphics)")
, ("http://en.wikipedia.org/wiki/Sprite_[computer_graphics]",
link "http://en.wikipedia.org/wiki/Sprite_%5Bcomputer_graphics%5D" ""
(str "http://en.wikipedia.org/wiki/Sprite_[computer_graphics]"))
, ("http://en.wikipedia.org/wiki/Sprite_{computer_graphics}",
link "http://en.wikipedia.org/wiki/Sprite_%7Bcomputer_graphics%7D" ""
(str "http://en.wikipedia.org/wiki/Sprite_{computer_graphics}"))
, ("http://example.com/Notification_Center-GitHub-20101108-140050.jpg",
autolink "http://example.com/Notification_Center-GitHub-20101108-140050.jpg")
, ("https://github.com/github/hubot/blob/master/scripts/cream.js#L20-20",
autolink "https://github.com/github/hubot/blob/master/scripts/cream.js#L20-20")
, ("http://www.rubyonrails.com",
autolink "http://www.rubyonrails.com")
, ("http://www.rubyonrails.com:80",
autolink "http://www.rubyonrails.com:80")
, ("http://www.rubyonrails.com/~minam",
autolink "http://www.rubyonrails.com/~minam")
, ("https://www.rubyonrails.com/~minam",
autolink "https://www.rubyonrails.com/~minam")
, ("http://www.rubyonrails.com/~minam/url%20with%20spaces",
autolink "http://www.rubyonrails.com/~minam/url%20with%20spaces")
, ("http://www.rubyonrails.com/foo.cgi?something=here",
autolink "http://www.rubyonrails.com/foo.cgi?something=here")
, ("http://www.rubyonrails.com/foo.cgi?something=here&and=here",
autolink "http://www.rubyonrails.com/foo.cgi?something=here&and=here")
, ("http://www.rubyonrails.com/contact;new",
autolink "http://www.rubyonrails.com/contact;new")
, ("http://www.rubyonrails.com/contact;new%20with%20spaces",
autolink "http://www.rubyonrails.com/contact;new%20with%20spaces")
, ("http://www.rubyonrails.com/contact;new?with=query&string=params",
autolink "http://www.rubyonrails.com/contact;new?with=query&string=params")
, ("http://www.rubyonrails.com/~minam/contact;new?with=query&string=params",
autolink "http://www.rubyonrails.com/~minam/contact;new?with=query&string=params")
, ("http://en.wikipedia.org/wiki/Wikipedia:Today%27s_featured_picture_%28animation%29/January_20%2C_2007",
autolink "http://en.wikipedia.org/wiki/Wikipedia:Today%27s_featured_picture_%28animation%29/January_20%2C_2007")
, ("http://www.mail-archive.com/rails@lists.rubyonrails.org/",
autolink "http://www.mail-archive.com/rails@lists.rubyonrails.org/")
, ("http://www.amazon.com/Testing-Equal-Sign-In-Path/ref=pd_bbs_sr_1?ie=UTF8&s=books&qid=1198861734&sr=8-1",
autolink "http://www.amazon.com/Testing-Equal-Sign-In-Path/ref=pd_bbs_sr_1?ie=UTF8&s=books&qid=1198861734&sr=8-1")
, ("http://en.wikipedia.org/wiki/Texas_hold%27em",
autolink "http://en.wikipedia.org/wiki/Texas_hold%27em")
, ("https://www.google.com/doku.php?id=gps:resource:scs:start",
autolink "https://www.google.com/doku.php?id=gps:resource:scs:start")
, ("http://www.rubyonrails.com",
autolink "http://www.rubyonrails.com")
, ("http://manuals.ruby-on-rails.com/read/chapter.need_a-period/103#page281",
autolink "http://manuals.ruby-on-rails.com/read/chapter.need_a-period/103#page281")
, ("http://foo.example.com/controller/action?parm=value&p2=v2#anchor123",
autolink "http://foo.example.com/controller/action?parm=value&p2=v2#anchor123")
, ("http://foo.example.com:3000/controller/action",
autolink "http://foo.example.com:3000/controller/action")
, ("http://foo.example.com:3000/controller/action+pack",
autolink "http://foo.example.com:3000/controller/action+pack")
, ("http://business.timesonline.co.uk/article/0,,9065-2473189,00.html",
autolink "http://business.timesonline.co.uk/article/0,,9065-2473189,00.html")
, ("http://www.mail-archive.com/ruby-talk@ruby-lang.org/",
autolink "http://www.mail-archive.com/ruby-talk@ruby-lang.org/")
, ("https://example.org/?anchor=lala-",
autolink "https://example.org/?anchor=lala-")
, ("https://example.org/?anchor=-lala",
autolink "https://example.org/?anchor=-lala")
]
{-
p_markdown_round_trip :: Block -> Bool
p_markdown_round_trip b = matches d' d''
where d' = normalize $ Pandoc (Meta [] [] []) [b]
d'' = normalize
$ readMarkdown def { readerSmart = True }
$ writeMarkdown def d'
matches (Pandoc _ [Plain []]) (Pandoc _ []) = True
matches (Pandoc _ [Para []]) (Pandoc _ []) = True
matches (Pandoc _ [Plain xs]) (Pandoc _ [Para xs']) = xs == xs'
matches x y = x == y
-}
tests :: [Test]
tests = [ testGroup "inline code"
[ "with attribute" =:
"`document.write(\"Hello\");`{.javascript}"
=?> para
(codeWith ("",["javascript"],[]) "document.write(\"Hello\");")
, "with attribute space" =:
"`*` {.haskell .special x=\"7\"}"
=?> para (codeWith ("",["haskell","special"],[("x","7")]) "*")
]
, testGroup "emph and strong"
[ "two strongs in emph" =:
"***a**b **c**d*" =?> para (emph (strong (str "a") <> str "b" <> space
<> strong (str "c") <> str "d"))
, "emph and strong emph alternating" =:
"*xxx* ***xxx*** xxx\n*xxx* ***xxx*** xxx"
=?> para (emph "xxx" <> space <> strong (emph "xxx") <>
space <> "xxx" <> softbreak <>
emph "xxx" <> space <> strong (emph "xxx") <>
space <> "xxx")
, "emph with spaced strong" =:
"*x **xx** x*"
=?> para (emph ("x" <> space <> strong "xx" <> space <> "x"))
, "intraword underscore with opening underscore (#1121)" =:
"_foot_ball_" =?> para (emph (text "foot_ball"))
]
, testGroup "raw LaTeX"
[ "in URL" =:
"\\begin\n" =?> para (text "\\begin")
]
, testGroup "raw HTML"
[ "nesting (issue #1330)" =:
"<del>test</del>" =?>
rawBlock "html" "<del>" <> plain (str "test") <>
rawBlock "html" "</del>"
, "invalid tag (issue #1820" =:
"</ div></.div>" =?>
para (text "</ div></.div>")
, "technically invalid comment" =:
"<!-- pandoc --help -->" =?>
rawBlock "html" "<!-- pandoc --help -->"
, test markdownGH "issue 2469" $
"<\n\na>" =?>
para (text "<") <> para (text "a>")
]
, testGroup "emoji"
[ test markdownGH "emoji symbols" $
":smile: and :+1:" =?> para (text "😄 and 👍")
]
, "unbalanced brackets" =:
"[[[[[[[[[[[[[[[hi" =?> para (text "[[[[[[[[[[[[[[[hi")
, testGroup "backslash escapes"
[ "in URL" =:
"[hi](/there\\))"
=?> para (link "/there)" "" "hi")
, "in title" =:
"[hi](/there \"a\\\"a\")"
=?> para (link "/there" "a\"a" "hi")
, "in reference link title" =:
"[hi]\n\n[hi]: /there (a\\)a)"
=?> para (link "/there" "a)a" "hi")
, "in reference link URL" =:
"[hi]\n\n[hi]: /there\\.0"
=?> para (link "/there.0" "" "hi")
]
, testGroup "bare URIs"
(map testBareLink bareLinkTests)
, testGroup "autolinks"
[ "with unicode dash following" =:
"<http://foo.bar>\8212" =?> para (autolink "http://foo.bar" <>
str "\8212")
, "a partial URL (#2277)" =:
"<www.boe.es/buscar/act.php?id=BOE-A-1996-8930#a66>" =?>
para (text "<www.boe.es/buscar/act.php?id=BOE-A-1996-8930#a66>")
]
, testGroup "links"
[ "no autolink inside link" =:
"[<https://example.org>](url)" =?>
para (link "url" "" (text "<https://example.org>"))
, "no inline link inside link" =:
"[[a](url2)](url)" =?>
para (link "url" "" (text "[a](url2)"))
, "no bare URI inside link" =:
"[https://example.org(](url)" =?>
para (link "url" "" (text "https://example.org("))
]
, testGroup "Headers"
[ "blank line before header" =:
"\n# Header\n"
=?> headerWith ("header",[],[]) 1 "Header"
, "bracketed text (#2062)" =:
"# [hi]\n"
=?> headerWith ("hi",[],[]) 1 "[hi]"
, "ATX header without trailing #s" =:
"# Foo bar\n\n" =?>
headerWith ("foo-bar",[],[]) 1 "Foo bar"
, "ATX header without trailing #s" =:
"# Foo bar with # #" =?>
headerWith ("foo-bar-with",[],[]) 1 "Foo bar with #"
, "setext header" =:
"Foo bar\n=\n\n Foo bar 2 \n=" =?>
headerWith ("foo-bar",[],[]) 1 "Foo bar"
<> headerWith ("foo-bar-2",[],[]) 1 "Foo bar 2"
]
, testGroup "Implicit header references"
[ "ATX header without trailing #s" =:
"# Header\n[header]\n\n[header ]\n\n[ header]" =?>
headerWith ("header",[],[]) 1 "Header"
<> para (link "#header" "" (text "header"))
<> para (link "#header" "" (text "header"))
<> para (link "#header" "" (text "header"))
, "ATX header with trailing #s" =:
"# Foo bar #\n[foo bar]\n\n[foo bar ]\n\n[ foo bar]" =?>
headerWith ("foo-bar",[],[]) 1 "Foo bar"
<> para (link "#foo-bar" "" (text "foo bar"))
<> para (link "#foo-bar" "" (text "foo bar"))
<> para (link "#foo-bar" "" (text "foo bar"))
, "setext header" =:
" Header \n=\n\n[header]\n\n[header ]\n\n[ header]" =?>
headerWith ("header",[],[]) 1 "Header"
<> para (link "#header" "" (text "header"))
<> para (link "#header" "" (text "header"))
<> para (link "#header" "" (text "header"))
]
, testGroup "smart punctuation"
[ test markdownSmart "quote before ellipses"
("'...hi'"
=?> para (singleQuoted "…hi"))
, test markdownSmart "apostrophe before emph"
("D'oh! A l'*aide*!"
=?> para ("D’oh! A l’" <> emph "aide" <> "!"))
, test markdownSmart "apostrophe in French"
("À l'arrivée de la guerre, le thème de l'«impossibilité du socialisme»"
=?> para "À l’arrivée de la guerre, le thème de l’«impossibilité du socialisme»")
, test markdownSmart "apostrophe after math" $ -- issue #1909
"The value of the $x$'s and the systems' condition." =?>
para (text "The value of the " <> math "x" <> text "\8217s and the systems\8217 condition.")
]
, testGroup "footnotes"
[ "indent followed by newline and flush-left text" =:
"[^1]\n\n[^1]: my note\n\n \nnot in note\n"
=?> para (note (para "my note")) <> para "not in note"
, "indent followed by newline and indented text" =:
"[^1]\n\n[^1]: my note\n \n in note\n"
=?> para (note (para "my note" <> para "in note"))
, "recursive note" =:
"[^1]\n\n[^1]: See [^1]\n"
=?> para (note (para "See [^1]"))
]
, testGroup "lhs"
[ test (handleError . readMarkdown def{ readerExtensions = Set.insert
Ext_literate_haskell $ readerExtensions def })
"inverse bird tracks and html" $
"> a\n\n< b\n\n<div>\n"
=?> codeBlockWith ("",["sourceCode","literate","haskell"],[]) "a"
<>
codeBlockWith ("",["sourceCode","haskell"],[]) "b"
<>
rawBlock "html" "<div>\n\n"
]
-- the round-trip properties frequently fail
-- , testGroup "round trip"
-- [ property "p_markdown_round_trip" p_markdown_round_trip
-- ]
, testGroup "definition lists"
[ "no blank space" =:
"foo1\n : bar\n\nfoo2\n : bar2\n : bar3\n" =?>
definitionList [ (text "foo1", [plain (text "bar")])
, (text "foo2", [plain (text "bar2"),
plain (text "bar3")])
]
, "blank space before first def" =:
"foo1\n\n : bar\n\nfoo2\n\n : bar2\n : bar3\n" =?>
definitionList [ (text "foo1", [para (text "bar")])
, (text "foo2", [para (text "bar2"),
plain (text "bar3")])
]
, "blank space before second def" =:
"foo1\n : bar\n\nfoo2\n : bar2\n\n : bar3\n" =?>
definitionList [ (text "foo1", [plain (text "bar")])
, (text "foo2", [plain (text "bar2"),
para (text "bar3")])
]
, "laziness" =:
"foo1\n : bar\nbaz\n : bar2\n" =?>
definitionList [ (text "foo1", [plain (text "bar" <>
softbreak <> text "baz"),
plain (text "bar2")])
]
, "no blank space before first of two paragraphs" =:
"foo1\n : bar\n\n baz\n" =?>
definitionList [ (text "foo1", [para (text "bar") <>
para (text "baz")])
]
, "first line not indented" =:
"foo\n: bar\n" =?>
definitionList [ (text "foo", [plain (text "bar")]) ]
, "list in definition" =:
"foo\n: - bar\n" =?>
definitionList [ (text "foo", [bulletList [plain (text "bar")]]) ]
, "in div" =:
"<div>foo\n: - bar\n</div>" =?>
divWith nullAttr (definitionList
[ (text "foo", [bulletList [plain (text "bar")]]) ])
]
, testGroup "+compact_definition_lists"
[ test markdownCDL "basic compact list" $
"foo1\n: bar\n baz\nfoo2\n: bar2\n" =?>
definitionList [ (text "foo1", [plain (text "bar" <> softbreak <>
text "baz")])
, (text "foo2", [plain (text "bar2")])
]
]
, testGroup "lists"
[ "issue #1154" =:
" - <div>\n first div breaks\n </div>\n\n <button>if this button exists</button>\n\n <div>\n with this div too.\n </div>\n"
=?> bulletList [divWith nullAttr (para $ text "first div breaks") <>
rawBlock "html" "<button>" <>
plain (text "if this button exists") <>
rawBlock "html" "</button>" <>
divWith nullAttr (para $ text "with this div too.")]
, test markdownGH "issue #1636" $
unlines [ "* a"
, "* b"
, "* c"
, " * d" ]
=?>
bulletList [ plain "a"
, plain "b"
, plain "c" <> bulletList [plain "d"] ]
]
, testGroup "entities"
[ "character references" =:
"⟨ ö" =?> para (text "\10216 ö")
, "numeric" =:
",DD" =?> para (text ",DD")
, "in link title" =:
"[link](/url \"title ⟨ ö ,\")" =?>
para (link "/url" "title \10216 ö ," (text "link"))
]
, testGroup "citations"
[ "simple" =:
"@item1" =?> para (cite [
Citation{ citationId = "item1"
, citationPrefix = []
, citationSuffix = []
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
] "@item1")
, "key starts with digit" =:
"@1657:huyghens" =?> para (cite [
Citation{ citationId = "1657:huyghens"
, citationPrefix = []
, citationSuffix = []
, citationMode = AuthorInText
, citationNoteNum = 0
, citationHash = 0
}
] "@1657:huyghens")
]
, let citation = cite [Citation "cita" [] [] AuthorInText 0 0] (str "@cita")
in testGroup "footnote/link following citation" -- issue #2083
[ "footnote" =:
unlines [ "@cita[^note]"
, ""
, "[^note]: note" ] =?>
para (
citation <> note (para $ str "note")
)
, "normal link" =:
"@cita [link](http://www.com)" =?>
para (
citation <> space <> link "http://www.com" "" (str "link")
)
, "reference link" =:
unlines [ "@cita [link][link]"
, ""
, "[link]: http://www.com" ] =?>
para (
citation <> space <> link "http://www.com" "" (str "link")
)
, "short reference link" =:
unlines [ "@cita [link]"
, ""
, "[link]: http://www.com" ] =?>
para (
citation <> space <> link "http://www.com" "" (str "link")
)
, "implicit header link" =:
unlines [ "# Header"
, "@cita [Header]" ] =?>
headerWith ("header",[],[]) 1 (str "Header") <> para (
citation <> space <> link "#header" "" (str "Header")
)
, "regular citation" =:
"@cita [foo]" =?>
para (
cite [Citation "cita" [] [Str "foo"] AuthorInText 0 0]
(str "@cita" <> space <> str "[foo]")
)
]
]
| janschulz/pandoc | tests/Tests/Readers/Markdown.hs | gpl-2.0 | 20,928 | 0 | 22 | 6,825 | 3,872 | 2,042 | 1,830 | 383 | 1 |
module MediaWiki.API.Query.Users.Import where
import MediaWiki.API.Utils
import MediaWiki.API.Query.Users
import Text.XML.Light.Types
import Text.XML.Light.Proc ( strContent )
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) UsersResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe UsersResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlPage) (fmap children $ pNode "users" es)
return emptyUsersResponse{usrUsers=ps}
xmlPage :: Element -> Maybe UsersInfo
xmlPage e = do
guard (elName e == nsName "user")
let nm = pAttr "name" e
let ec = pAttr "editcount" e >>= readMb
let re = pAttr "registration" e
let bl1 = pAttr "blockedby" e
let bl2 = pAttr "blockreason" e
let bl = case (bl1,bl2) of { (Just a, Just b) -> Just (a,b) ; _ -> Nothing}
let gs = fromMaybe [] $ fmap (mapMaybe xmlG) (fmap children $ pNode "groups" (children e))
return emptyUsersInfo
{ usName = nm
, usEditCount = ec
, usRegDate = re
, usBlock = bl
, usGroups = gs
}
where
xmlG p = do
guard (elName p == nsName "g")
return (strContent p)
| neobrain/neobot | mediawiki/MediaWiki/API/Query/Users/Import.hs | bsd-3-clause | 1,281 | 2 | 16 | 319 | 509 | 255 | 254 | 36 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Stack.Options.Completion
( ghcOptsCompleter
, targetCompleter
, flagCompleter
, projectExeCompleter
) where
import Data.Char (isSpace)
import Data.List (isPrefixOf)
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Distribution.PackageDescription as C
import qualified Distribution.Types.UnqualComponentName as C
import Options.Applicative
import Options.Applicative.Builder.Extra
import Stack.Constants (ghcShowOptionsOutput)
import Stack.Options.GlobalParser (globalOptsFromMonoid)
import Stack.Runners
import Stack.Prelude
import Stack.Types.Config
import Stack.Types.NamedComponent
import Stack.Types.SourceMap
ghcOptsCompleter :: Completer
ghcOptsCompleter = mkCompleter $ \inputRaw -> return $
let input = unescapeBashArg inputRaw
(curArgReversed, otherArgsReversed) = break isSpace (reverse input)
curArg = reverse curArgReversed
otherArgs = reverse otherArgsReversed
in if null curArg then [] else
map (otherArgs ++) $
filter (curArg `isPrefixOf`) ghcShowOptionsOutput
-- TODO: Ideally this would pay attention to --stack-yaml, may require
-- changes to optparse-applicative.
buildConfigCompleter
:: (String -> RIO EnvConfig [String])
-> Completer
buildConfigCompleter inner = mkCompleter $ \inputRaw -> do
let input = unescapeBashArg inputRaw
case input of
-- If it looks like a flag, skip this more costly completion.
('-': _) -> return []
_ -> do
go' <- globalOptsFromMonoid False mempty
let go = go' { globalLogLevel = LevelOther "silent" }
withRunnerGlobal go $ withConfig NoReexec $ withDefaultEnvConfig $ inner input
targetCompleter :: Completer
targetCompleter = buildConfigCompleter $ \input -> do
packages <- view $ buildConfigL.to (smwProject . bcSMWanted)
comps <- for packages ppComponents
pure
$ filter (input `isPrefixOf`)
$ concatMap allComponentNames
$ Map.toList comps
where
allComponentNames (name, comps) =
map (T.unpack . renderPkgComponent . (name,)) (Set.toList comps)
flagCompleter :: Completer
flagCompleter = buildConfigCompleter $ \input -> do
bconfig <- view buildConfigL
gpds <- for (smwProject $ bcSMWanted bconfig) ppGPD
let wildcardFlags
= nubOrd
$ concatMap (\(name, gpd) ->
map (\fl -> "*:" ++ flagString name fl) (C.genPackageFlags gpd))
$ Map.toList gpds
normalFlags
= concatMap (\(name, gpd) ->
map (\fl -> packageNameString name ++ ":" ++ flagString name fl)
(C.genPackageFlags gpd))
$ Map.toList gpds
flagString name fl =
let flname = C.unFlagName $ C.flagName fl
in (if flagEnabled name fl then "-" else "") ++ flname
prjFlags =
case configProject (bcConfig bconfig) of
PCProject (p, _) -> projectFlags p
PCGlobalProject -> mempty
PCNoProject _ -> mempty
flagEnabled name fl =
fromMaybe (C.flagDefault fl) $
Map.lookup (C.flagName fl) $
Map.findWithDefault Map.empty name prjFlags
return $ filter (input `isPrefixOf`) $
case input of
('*' : ':' : _) -> wildcardFlags
('*' : _) -> wildcardFlags
_ -> normalFlags
projectExeCompleter :: Completer
projectExeCompleter = buildConfigCompleter $ \input -> do
packages <- view $ buildConfigL.to (smwProject . bcSMWanted)
gpds <- Map.traverseWithKey (const ppGPD) packages
pure
$ filter (input `isPrefixOf`)
$ nubOrd
$ concatMap
(\gpd -> map
(C.unUnqualComponentName . fst)
(C.condExecutables gpd)
)
gpds
| juhp/stack | src/Stack/Options/Completion.hs | bsd-3-clause | 4,065 | 0 | 21 | 1,126 | 1,062 | 564 | 498 | 98 | 6 |
{-# LANGUAGE RankNTypes #-}
module T17213a where
foo :: (forall a. a->a)-> Int
foo x = error "ukr"
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T17213a.hs | bsd-3-clause | 100 | 0 | 8 | 19 | 37 | 21 | 16 | 4 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Selenium add-on</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/selenium/src/main/javahelp/org/zaproxy/zap/extension/selenium/resources/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 960 | 82 | 53 | 156 | 394 | 208 | 186 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Text.Strict.Lens
-- Copyright : (C) 2012-2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Data.Text.Strict.Lens
( packed, unpacked
, builder
, text
, utf8
, _Text
) where
import Control.Lens.Type
import Control.Lens.Getter
import Control.Lens.Fold
import Control.Lens.Iso
import Control.Lens.Prism
import Control.Lens.Setter
import Control.Lens.Traversal
import Data.ByteString (ByteString)
import Data.Monoid
import Data.Text as Strict
import Data.Text.Encoding
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Control.Lens
-- | This isomorphism can be used to 'pack' (or 'unpack') strict 'Text'.
--
--
-- >>> "hello"^.packed -- :: Text
-- "hello"
--
-- @
-- 'pack' x ≡ x '^.' 'packed'
-- 'unpack' x ≡ x '^.' 'from' 'packed'
-- 'packed' ≡ 'from' 'unpacked'
-- 'packed' ≡ 'iso' 'pack' 'unpack'
-- @
packed :: Iso' String Text
packed = iso pack unpack
{-# INLINE packed #-}
-- | This isomorphism can be used to 'unpack' (or 'pack') lazy 'Text'.
--
-- >>> "hello"^.unpacked -- :: String
-- "hello"
--
-- This 'Iso' is provided for notational convenience rather than out of great need, since
--
-- @
-- 'unpacked' ≡ 'from' 'packed'
-- @
--
-- @
-- 'pack' x ≡ x '^.' 'from' 'unpacked'
-- 'unpack' x ≡ x '^.' 'packed'
-- 'unpacked' ≡ 'iso' 'unpack' 'pack'
-- @
unpacked :: Iso' Text String
unpacked = iso unpack pack
{-# INLINE unpacked #-}
-- | This is an alias for 'unpacked' that makes it more obvious how to use it with '#'
--
-- >> _Text # "hello" -- :: Text
-- "hello"
_Text :: Iso' Text String
_Text = unpacked
{-# INLINE _Text #-}
-- | Convert between strict 'Text' and 'Builder' .
--
-- @
-- 'fromText' x ≡ x '^.' 'builder'
-- 'toStrict' ('toLazyText' x) ≡ x '^.' 'from' 'builder'
-- @
builder :: Iso' Text Builder
builder = iso fromText (toStrict . toLazyText)
{-# INLINE builder #-}
-- | Traverse the individual characters in strict 'Text'.
--
-- >>> anyOf text (=='o') "hello"
-- True
--
-- When the type is unambiguous, you can also use the more general 'each'.
--
-- @
-- 'text' ≡ 'unpacked' . 'traversed'
-- 'text' ≡ 'each'
-- @
--
-- Note that when just using this as a 'Setter', @'setting' 'Data.Text.map'@ can
-- be more efficient.
text :: IndexedTraversal' Int Text Char
text = unpacked . traversed
{-# INLINE [0] text #-}
{-# RULES
"strict text -> map" text = sets Strict.map :: ASetter' Text Char;
"strict text -> imap" text = isets imapStrict :: AnIndexedSetter' Int Text Char;
"strict text -> foldr" text = foldring Strict.foldr :: Getting (Endo r) Text Char;
"strict text -> ifoldr" text = ifoldring ifoldrStrict :: IndexedGetting Int (Endo r) Text Char;
#-}
imapStrict :: (Int -> Char -> Char) -> Text -> Text
imapStrict f = snd . Strict.mapAccumL (\i a -> i `seq` (i + 1, f i a)) 0
{-# INLINE imapStrict #-}
ifoldrStrict :: (Int -> Char -> a -> a) -> a -> Text -> a
ifoldrStrict f z xs = Strict.foldr (\ x g i -> i `seq` f i x (g (i+1))) (const z) xs 0
{-# INLINE ifoldrStrict #-}
-- | Encode/Decode a strict 'Text' to/from strict 'ByteString', via UTF-8.
--
-- >>> utf8 # "☃"
-- "\226\152\131"
utf8 :: Prism' ByteString Text
utf8 = prism' encodeUtf8 (preview _Right . decodeUtf8')
{-# INLINE utf8 #-}
| danidiaz/lens | src/Data/Text/Strict/Lens.hs | bsd-3-clause | 3,646 | 0 | 13 | 658 | 502 | 317 | 185 | 50 | 1 |
import qualified Data.Vector as U
main = print (U.and (U.replicate 100 True))
| dolio/vector | old-testsuite/microsuite/and.hs | bsd-3-clause | 79 | 0 | 10 | 13 | 35 | 19 | 16 | 2 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
{- | This module gives you a way to mount applications under sub-URIs.
For example:
> bugsApp, helpdeskApp, apiV1, apiV2, mainApp :: Application
>
> myApp :: Application
> myApp = mapUrls $
> mount "bugs" bugsApp
> <|> mount "helpdesk" helpdeskApp
> <|> mount "api"
> ( mount "v1" apiV1
> <|> mount "v2" apiV2
> )
> <|> mountRoot mainApp
-}
module Network.Wai.UrlMap (
UrlMap',
UrlMap,
mount',
mount,
mountRoot,
mapUrls
) where
import Control.Applicative
import Data.List
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.ByteString as B
import Network.HTTP.Types
import Network.Wai
type Path = [Text]
newtype UrlMap' a = UrlMap' { unUrlMap :: [(Path, a)] }
instance Functor UrlMap' where
fmap f (UrlMap' xs) = UrlMap' (fmap (\(p, a) -> (p, f a)) xs)
instance Applicative UrlMap' where
pure x = UrlMap' [([], x)]
(UrlMap' xs) <*> (UrlMap' ys) = UrlMap' [ (p, f y) |
(p, y) <- ys,
f <- map snd xs ]
instance Alternative UrlMap' where
empty = UrlMap' empty
(UrlMap' xs) <|> (UrlMap' ys) = UrlMap' (xs <|> ys)
type UrlMap = UrlMap' Application
-- | Mount an application under a given path. The ToApplication typeclass gives
-- you the option to pass either an 'Network.Wai.Application' or an 'UrlMap'
-- as the second argument.
mount' :: ToApplication a => Path -> a -> UrlMap
mount' prefix thing = UrlMap' [(prefix, toApplication thing)]
-- | A convenience function like mount', but for mounting things under a single
-- path segment.
mount :: ToApplication a => Text -> a -> UrlMap
mount prefix thing = mount' [prefix] thing
-- | Mount something at the root. Use this for the last application in the
-- block, to avoid 500 errors from none of the applications matching.
mountRoot :: ToApplication a => a -> UrlMap
mountRoot = mount' []
try :: Eq a
=> [a] -- ^ Path info of request
-> [([a], b)] -- ^ List of applications to match
-> Maybe ([a], b)
try xs tuples = foldl go Nothing tuples
where
go (Just x) _ = Just x
go _ (prefix, y) = stripPrefix prefix xs >>= \xs' -> return (xs', y)
class ToApplication a where
toApplication :: a -> Application
instance ToApplication Application where
toApplication = id
instance ToApplication UrlMap where
toApplication urlMap req sendResponse =
case try (pathInfo req) (unUrlMap urlMap) of
Just (newPath, app) ->
app (req { pathInfo = newPath
, rawPathInfo = makeRaw newPath
}) sendResponse
Nothing ->
sendResponse $ responseLBS
status404
[(hContentType, "text/plain")]
"Not found\n"
where
makeRaw :: [Text] -> B.ByteString
makeRaw = ("/" `B.append`) . T.encodeUtf8 . T.intercalate "/"
mapUrls :: UrlMap -> Application
mapUrls = toApplication
| AndrewRademacher/wai | wai-extra/Network/Wai/UrlMap.hs | mit | 3,240 | 0 | 13 | 946 | 767 | 425 | 342 | 63 | 2 |
-- A test to show that -XStaticPointers keeps generated CAFs alive.
{-# LANGUAGE StaticPointers #-}
module Main where
import GHC.StaticPtr
import Control.Concurrent
import Data.Maybe (fromJust)
import GHC.Fingerprint
import System.Mem
import System.Mem.Weak
import Unsafe.Coerce (unsafeCoerce)
nats :: [Integer]
nats = [0 .. ]
-- The key of a 'StaticPtr' to some CAF.
nats_key :: StaticKey
nats_key = staticKey (static nats :: StaticPtr [Integer])
main = do
let z = nats !! 400
print z
performGC
addFinalizer z (putStrLn "finalizer z")
print z
performGC
threadDelay 1000000
Just p <- unsafeLookupStaticPtr nats_key
print (deRefStaticPtr (unsafeCoerce p) !! 800 :: Integer)
-- Uncommenting the next line keeps 'nats' alive and would prevent a segfault
-- if 'nats' were garbage collected.
-- print (nats !! 900)
| wxwxwwxxx/ghc | testsuite/tests/rts/GcStaticPointers.hs | bsd-3-clause | 840 | 0 | 12 | 150 | 195 | 103 | 92 | 23 | 1 |
{-
From: Paul Sanders <psanders@srd.bt.co.uk>
To: partain
Subject: A puzzle for you
Date: Mon, 28 Oct 91 17:02:19 GMT
I'm struggling with the following code fragment at the moment:
-}
import Data.Array -- 1.3
import Data.Ix -- 1.3
conv_list :: (Ix a, Ix b) => [a] -> [b] -> [[c]] -> Array (a,b) c -> Array (a,b) c
conv_list [] _ _ ar = ar
conv_list _ _ [] ar = ar
conv_list (r:rs) cls (rt:rts) ar
= conv_list rs cls rts ar'
where ar' = conv_elems r cls rt ar
conv_elems :: (Ix a, Ix b) => a -> [b] -> [c] -> Array (a,b) c -> Array (a,b) c
conv_elems row [] _ ar = ar
conv_elems _ _ [] ar = ar
conv_elems row (col:cls) (rt:rts) ar
= conv_elems row cls rts ar'
where ar' = ar // [((row,col), rt)]
ar :: Array (Int, Int) Int
ar = conv_list [(1::Int)..(3::Int)] [(1::Int)..(3::Int)] ar_list init_ar
where init_ar = array (((1::Int),(1::Int)),((3::Int),(3::Int))) []
ar_list :: [[Int]] -- WDP
ar_list = [[1,2,3],
[6,7,8],
[10,12,15]]
main = putStrLn (show ar)
{-
What it tries to do is turn a list of lists into a 2-d array in an incremental
fashion using 2 nested for-loops. It compiles okay on the prototype compiler
but gives a segmentation fault when it executes. I know I can define in the
array in one go (and I have done) but, for my piece of mind, I want to get this
way working properly.
Is it a bug in the prototype or is there a glaringly obvious error in my code
which I've been stupid to spot ????
Hoping its the latter,
Paul.
-}
| urbanslug/ghc | testsuite/tests/programs/sanders_array/Main.hs | bsd-3-clause | 1,508 | 1 | 11 | 345 | 519 | 293 | 226 | 22 | 1 |
-- | Non-Futhark-specific utilities. If you find yourself writing
-- general functions on generic data structures, consider putting them
-- here.
--
-- Sometimes it is also preferable to copy a small function rather
-- than introducing a large dependency. In this case, make sure to
-- note where you got it from (and make sure that the license is
-- compatible).
module Futhark.Util
(mapAccumLM,
chunk,
chunks,
dropAt,
takeLast,
dropLast,
mapEither,
maybeNth,
splitFromEnd,
splitAt3,
focusNth,
unixEnvironment,
isEnvVarSet,
directoryContents,
zEncodeString
)
where
import Numeric
import Data.Char
import Data.List
import Data.Either
import Data.Maybe
import System.Environment
import System.IO.Unsafe
import System.Directory.Tree (readDirectoryWith, flattenDir,
DirTree(File), AnchoredDirTree(..))
-- | Like 'mapAccumL', but monadic.
mapAccumLM :: Monad m =>
(acc -> x -> m (acc, y)) -> acc -> [x] -> m (acc, [y])
mapAccumLM _ acc [] = return (acc, [])
mapAccumLM f acc (x:xs) = do
(acc', x') <- f acc x
(acc'', xs') <- mapAccumLM f acc' xs
return (acc'', x':xs')
-- | @chunk n a@ splits @a@ into @n@-size-chunks. If the length of
-- @a@ is not divisible by @n@, the last chunk will have fewer than
-- @n@ elements (but it will never be empty).
chunk :: Int -> [a] -> [[a]]
chunk _ [] = []
chunk n xs =
let (bef,aft) = splitAt n xs
in bef : chunk n aft
-- | @chunks ns a@ splits @a@ into chunks determined by the elements
-- of @ns@. It must hold that @sum ns == length a@, or the resulting
-- list may contain too few chunks, or not all elements of @a@.
chunks :: [Int] -> [a] -> [[a]]
chunks [] _ = []
chunks (n:ns) xs =
let (bef,aft) = splitAt n xs
in bef : chunks ns aft
-- | @dropAt i n@ drops @n@ elements starting at element @i@.
dropAt :: Int -> Int -> [a] -> [a]
dropAt i n xs = take i xs ++ drop (i+n) xs
-- | @takeLast n l@ takes the last @n@ elements of @l@.
takeLast :: Int -> [a] -> [a]
takeLast n = reverse . take n . reverse
-- | @dropLast n l@ drops the last @n@ elements of @l@.
dropLast :: Int -> [a] -> [a]
dropLast n = reverse . drop n . reverse
-- | A combination of 'map' and 'partitionEithers'.
mapEither :: (a -> Either b c) -> [a] -> ([b], [c])
mapEither f l = partitionEithers $ map f l
-- | Return the list element at the given index, if the index is valid.
maybeNth :: Integral int => int -> [a] -> Maybe a
maybeNth i l
| i >= 0, v:_ <- genericDrop i l = Just v
| otherwise = Nothing
-- | Like 'splitAt', but from the end.
splitFromEnd :: Int -> [a] -> ([a], [a])
splitFromEnd i l = splitAt (length l - i) l
-- | Like 'splitAt', but produces three lists.
splitAt3 :: Int -> Int -> [a] -> ([a], [a], [a])
splitAt3 n m l =
let (xs, l') = splitAt n l
(ys, zs) = splitAt m l'
in (xs, ys, zs)
-- | Return the list element at the given index, if the index is
-- valid, along with the elements before and after.
focusNth :: Integral int => int -> [a] -> Maybe ([a], a, [a])
focusNth i xs
| (bef, x:aft) <- genericSplitAt i xs = Just (bef, x, aft)
| otherwise = Nothing
{-# NOINLINE unixEnvironment #-}
-- | The Unix environment when the Futhark compiler started.
unixEnvironment :: [(String,String)]
unixEnvironment = unsafePerformIO getEnvironment
-- Is an environment variable set to 0 or 1? If 0, return False; if 1, True;
-- otherwise the default value.
isEnvVarSet :: String -> Bool -> Bool
isEnvVarSet name default_val = fromMaybe default_val $ do
val <- lookup name unixEnvironment
case val of
"0" -> return False
"1" -> return True
_ -> Nothing
-- | Every non-directory file contained in a directory tree.
directoryContents :: FilePath -> IO [FilePath]
directoryContents dir = do
_ :/ tree <- readDirectoryWith return dir
return $ mapMaybe isFile $ flattenDir tree
where isFile (File _ path) = Just path
isFile _ = Nothing
-- Z-encoding from https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/SymbolNames
--
-- Slightly simplified as we do not need it to deal with tuples and
-- the like.
--
-- (c) The University of Glasgow, 1997-2006
type UserString = String -- As the user typed it
type EncodedString = String -- Encoded form
zEncodeString :: UserString -> EncodedString
zEncodeString "" = ""
zEncodeString (c:cs) = encodeDigitChar c ++ concatMap encodeChar cs
unencodedChar :: Char -> Bool -- True for chars that don't need encoding
unencodedChar 'Z' = False
unencodedChar 'z' = False
unencodedChar '_' = True
unencodedChar c = isAsciiLower c
|| isAsciiUpper c
|| isDigit c
-- If a digit is at the start of a symbol then we need to encode it.
-- Otherwise names like 9pH-0.1 give linker errors.
encodeDigitChar :: Char -> EncodedString
encodeDigitChar c | isDigit c = encodeAsUnicodeCharar c
| otherwise = encodeChar c
encodeChar :: Char -> EncodedString
encodeChar c | unencodedChar c = [c] -- Common case first
-- Constructors
encodeChar '(' = "ZL" -- Needed for things like (,), and (->)
encodeChar ')' = "ZR" -- For symmetry with (
encodeChar '[' = "ZM"
encodeChar ']' = "ZN"
encodeChar ':' = "ZC"
encodeChar 'Z' = "ZZ"
-- Variables
encodeChar 'z' = "zz"
encodeChar '&' = "za"
encodeChar '|' = "zb"
encodeChar '^' = "zc"
encodeChar '$' = "zd"
encodeChar '=' = "ze"
encodeChar '>' = "zg"
encodeChar '#' = "zh"
encodeChar '.' = "zi"
encodeChar '<' = "zl"
encodeChar '-' = "zm"
encodeChar '!' = "zn"
encodeChar '+' = "zp"
encodeChar '\'' = "zq"
encodeChar '\\' = "zr"
encodeChar '/' = "zs"
encodeChar '*' = "zt"
encodeChar '_' = "zu"
encodeChar '%' = "zv"
encodeChar c = encodeAsUnicodeCharar c
encodeAsUnicodeCharar :: Char -> EncodedString
encodeAsUnicodeCharar c = 'z' : if isDigit (head hex_str) then hex_str
else '0':hex_str
where hex_str = showHex (ord c) "U"
| ihc/futhark | src/Futhark/Util.hs | isc | 6,102 | 0 | 11 | 1,498 | 1,633 | 872 | 761 | 128 | 3 |
main = getContents >>= putStr . main'
main' :: String -> String
main' cs = unlines $ map ( takeWhile (/= ':') ) ( lines cs )
| ryuichiueda/UspMagazineHaskell | Study1_Q1/q1_1.hs | mit | 126 | 0 | 9 | 28 | 56 | 29 | 27 | 3 | 1 |
{-# LANGUAGE FlexibleContexts, OverloadedStrings #-}
module Player.Cheats where
import Player.Types
import Universe
import Resources
import Player
import Data.Map.Strict
import Data.AdditiveGroup
import Reflex.Dom
import GHCJS.DOM.EventM (on)
import GHCJS.DOM.GlobalEventHandlers (keyPress)
cheatsHandler :: PlayerWidget t m x => m ()
cheatsHandler = do
doc <- askDocument
event <- wrapDomEvent doc (`on` keyPress) getKeyEvent
tellPlayerAction $ const addResources <$> (ffilter (== (fromEnum 'r')) $ fromIntegral <$> event)
addResources :: PlayerId -> Universe -> Either String Universe
addResources plId u = Right $ u { _players = updatedPlayers }
where updatedPlayers = adjust updatePlayerData plId $ _players u
updatePlayerData plData = plData { _playerResources = updatedResources }
where updatedResources = _playerResources plData ^+^ Resources 3 3 3 3 3 3 3 3
| martin-kolinek/some-board-game | src/Player/Cheats.hs | mit | 896 | 0 | 14 | 147 | 256 | 138 | 118 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Day8 (day8, day8', run, Instruction(..), Screen, applyInstruction, parseInput) where
import Data.Array.Unboxed ((//), (!), UArray, bounds, elems, listArray, range)
import Text.Parsec ((<|>) , Parsec , ParseError)
import qualified Text.Parsec as P
data Instruction = Rect Int Int | RotateRow Int Int | RotateColumn Int Int deriving (Eq, Ord, Show)
type Screen = UArray (Int, Int) Bool
parseInput :: String -> Either ParseError [Instruction]
parseInput = P.parse (P.sepEndBy1 parseInstruction P.endOfLine) ""
parseInstruction :: Parsec String () Instruction
parseInstruction = P.try parseRect <|> P.try parseRotateRow <|> parseRotateColumn
where
parseRect = Rect <$> (P.string "rect " *> number) <*> (P.char 'x' *> number)
parseRotateRow = RotateRow <$> (P.string "rotate row y=" *> number) <*> (P.string " by " *> number)
parseRotateColumn = RotateColumn <$> (P.string "rotate column x=" *> number) <*> (P.string " by " *> number)
number = read <$> P.many1 P.digit
applyInstruction :: Screen -> Instruction -> Screen
applyInstruction s (Rect x y) = s // [(i, True) | i <- range ((0, 0), (x - 1, y - 1))]
applyInstruction s (RotateRow y n) = s // [(i, s ! (shift i)) | i <- range ((0, y), (maxX, y))]
where
(_, (maxX, _)) = bounds s
shift (x, y) = ((x - n) `mod` (maxX + 1), y)
applyInstruction s (RotateColumn x n) = s // [(i, s ! (shift i)) | i <- range ((x, 0), (x, maxY))]
where
(_, (_, maxY)) = bounds s
shift (x, y) = (x, (y - n) `mod` (maxY + 1))
empty :: Screen
empty = listArray ((0, 0), (49, 5)) (repeat False)
showScreen :: Screen -> [String]
showScreen s = [ [if s ! (x, y) then '#' else '.' | x <- [0 .. maxX]] | y <- [0 .. maxY] ]
where
(_, (maxX, maxY)) = bounds s
-- Final, top-level exports
day8 :: String -> Int
day8 = either (const (-1)) (length . filter id . elems . foldl applyInstruction empty) . parseInput
day8' :: String -> Screen
day8' = either (const empty) (foldl applyInstruction empty) . parseInput
-- Input
run :: IO ()
run = do
putStrLn "Day 8 results: "
input <- readFile "inputs/day8.txt"
putStrLn $ " " ++ show (day8 input)
mapM_ (putStrLn . (" " ++)) . showScreen . day8' $ input
| brianshourd/adventOfCode2016 | src/Day8.hs | mit | 2,228 | 0 | 13 | 454 | 1,012 | 563 | 449 | -1 | -1 |
{-# LANGUAGE RankNTypes, FlexibleContexts #-}
module FTag.Sort (sortByTags) where
import qualified Data.Text as T
import Data.List (sortBy)
import Data.Ord (comparing)
import Env
import FTag.Data
import FTag.DB
sortByTags :: [TagN] -> [StorFileN] -> FTag [StorFileN]
sortByTags ts fs = sortByValues <$> (runInDB (mapM (getValues ts) fs) >>= mapM addUserN)
addUserN :: (StorFileN, [Maybe T.Text]) -> FTag (StorFileN, [Maybe T.Text])
addUserN (f, vs) = let ap uf = (f, vs ++ [Just $ unU uf])
in ap <$> toUser f
getValues :: [TagN] -> StorFileN -> DBAction (StorFileN, [Maybe T.Text])
getValues ts f = wrap <$> mapM (\t -> toMaybe <$> isTagged t f) ts
where toMaybe (FTTaggedWithVal v) = Just v
toMaybe _ = Nothing
wrap vs = (f, vs)
sortByValues :: (Ord v) => [(a, [v])] -> [a]
sortByValues = map fst . sortBy (comparing snd)
| chrys-h/ftag | src/FTag/Sort.hs | mit | 887 | 0 | 13 | 200 | 378 | 205 | 173 | 20 | 2 |
{- |
Module : IntermediateCode.hs
Description : Intermediate code generation
Maintainer : Philipp Borgers, Tilman Blumenbach, Lyudmila Vaseva, Sascha Zinke,
Maximilian Claus, Michal Ajchman, Nicolas Lehmann, Tudor Soroceanu
License : MIT
Stability : unstable
IntemediateCode.hs takes the output from the SemanticalAnalysis module
(which is a list of paths) and generates LLVM IR code.
It turns every path of the form (PathID, [Lexeme], PathID) into a basic block.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module IntermediateCode(process) where
-- imports --
import ErrorHandling as EH
import InterfaceDT as IDT
import FunctionDeclarations
import TypeDefinitions
import Control.Applicative
import Control.Monad.State
import Data.Char
import Data.List
import Data.Map hiding (filter, map)
import Data.Word
import LLVM.General.AST
import LLVM.General.AST.AddrSpace
import LLVM.General.AST.CallingConvention
import LLVM.General.AST.Constant as Constant
import LLVM.General.AST.Float
import LLVM.General.AST.Instruction as Instruction
import LLVM.General.AST.IntegerPredicate
import LLVM.General.AST.Linkage
import LLVM.General.AST.Operand
import qualified LLVM.General.AST.Global as Global
data CodegenState = CodegenState {
blocks :: [BasicBlock],
count :: Word, --Count of unnamed Instructions
localDict :: Map String (Int, Integer)
}
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState)
data GlobalCodegenState = GlobalCodegenState {
dict :: Map String (Int, Integer)
}
newtype GlobalCodegen a = GlobalCodegen { runGlobalCodegen :: State GlobalCodegenState a }
deriving (Functor, Applicative, Monad, MonadState GlobalCodegenState)
execGlobalCodegen :: Map String (Int, Integer) -> GlobalCodegen a -> a
execGlobalCodegen d m = evalState (runGlobalCodegen m) $ GlobalCodegenState d
execCodegen :: Map String (Int, Integer) -> Codegen a -> a
execCodegen d m = evalState (runCodegen m) $ CodegenState [] 0 d
-- |Generate module from list of definitions.
generateModule :: [Definition] -> Module
generateModule definitions = defaultModule {
moduleName = "rail-heaven",
moduleDefinitions = definitions
}
-- |Generate a ret statement, returning a 32-bit Integer to the caller.
-- While we use 64-bit integers everywhere else, our "main" function
-- needs to return an "int" which usually is 32-bits even on 64-bit systems.
terminator :: Integer -- ^The 32-bit Integer to return.
-> Named Terminator -- ^The return statement.
terminator ret = Do Ret {
returnOperand = Just $ ConstantOperand $ Int 32 ret,
metadata' = []
}
-- |Generate global byte array (from a constant string).
createGlobalString :: Lexeme -> Global
createGlobalString (Constant s) = globalVariableDefaults {
Global.type' = ArrayType {
nArrayElements = fromInteger l,
elementType = IntegerType {typeBits = 8}
},
Global.initializer = Just Array {
memberType = IntegerType {typeBits = 8},
memberValues = map trans s ++ [Int { integerBits = 8, integerValue = 0 }]
}
}
where
l = toInteger $ 1 + length s
trans c = Int { integerBits = 8, integerValue = toInteger $ ord c }
-- |Create constant strings/byte arrays for a module.
-- TODO: Maybe rename these subfunctions?
generateConstants :: [AST] -> [Global]
generateConstants = map createGlobalString . getAllCons
-- |Get all 'Constant's in a module.
getAllCons :: [AST] -> [Lexeme]
getAllCons = concatMap generateCons
where
generateCons :: AST -> [Lexeme]
generateCons (name, paths) = concatMap generateC paths
generateC :: (Int, [Lexeme], Int) -> [Lexeme]
generateC (pathID, lex, nextPath) = filter checkCons lex
checkCons :: Lexeme -> Bool
checkCons (Constant c) = True
checkCons _ = False
--------------------------------------------------------------------------------
-- |Generate global variables for push and pop from and into variables.
createGlobalVariable :: Lexeme -> Global
createGlobalVariable (Pop v) = globalVariableDefaults {
Global.name = Name v,
Global.type' = ArrayType {
nArrayElements = fromInteger l,
elementType = IntegerType {typeBits = 8}
},
Global.initializer = Just Array {
memberType = IntegerType {typeBits = 8},
memberValues = map trans v ++ [Int { integerBits = 8, integerValue = 0 }]
}
}
where
l = toInteger $ 1 + length v
trans c = Int { integerBits = 8, integerValue = toInteger $ ord c }
-- |Generate all global variable definitions for a module.
generateVariables :: [AST] -> [Global]
generateVariables = map createGlobalVariable . getAllVars
where
getAllVars :: [AST] -> [Lexeme]
getAllVars = concatMap generateVars
generateVars :: AST -> [Lexeme]
generateVars (name, paths) = nub $ concatMap generateV paths --delete duplicates
generateV :: (Int, [Lexeme], Int) -> [Lexeme]
generateV (pathID, lex, nextPath) = filter checkVars lex
checkVars :: Lexeme -> Bool
checkVars (Pop v) = True
checkVars _ = False
--------------------------------------------------------------------------------
-- |Generate an instruction for the 'u'nderflow check command.
generateInstruction :: (Lexeme, String) -> Codegen [Named Instruction]
generateInstruction (Underflow, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "underflow_check",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instructions for junctions.
generateInstruction (Junction label, funcName) = do
index <- fresh
index2 <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "pop_bool",
arguments = [],
functionAttributes = [],
metadata = []
}, UnName index2 := LLVM.General.AST.ICmp {
LLVM.General.AST.iPredicate = LLVM.General.AST.IntegerPredicate.EQ,
LLVM.General.AST.operand0 = LocalReference (UnName index),
LLVM.General.AST.operand1 = ConstantOperand $ Int 64 0,
metadata = []
}]
-- |Generate instruction for pop into a variable
generateInstruction (Pop name, funcName) = do
index <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "pop_into",
arguments = [ (LocalReference $ Name "table", []),
(ConstantOperand Constant.GetElementPtr {
Constant.inBounds = True,
Constant.address = Constant.GlobalReference (Name name),
Constant.indices = [
Int { integerBits = 8, integerValue = 0 },
Int { integerBits = 8, integerValue = 0 }
]}, [])],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for push from a variable
generateInstruction (Push name, funcName) = do
index <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "push_from",
arguments = [(LocalReference $ Name "table", []),
(ConstantOperand Constant.GetElementPtr {
Constant.inBounds = True,
Constant.address = Constant.GlobalReference (Name name),
Constant.indices = [
Int { integerBits = 8, integerValue = 0 },
Int { integerBits = 8, integerValue = 0 }
]}, [])],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for push of a constant.
-- access to our push function definied in stack.ll??
-- http://llvm.org/docs/LangRef.html#call-instruction
generateInstruction (Constant value, funcName) = do
index <- fresh
dict <- gets localDict
return [UnName index := LLVM.General.AST.Call {
-- The optional tail and musttail markers indicate that the optimizers
--should perform tail call optimization.
isTailCall = False,
-- The optional "cconv" marker indicates which calling convention the call
-- should use. If none is specified, the call defaults to using C calling
-- conventions.
callingConvention = C,
-- The optional Parameter Attributes list for return values. Only 'zeroext',
-- 'signext', and 'inreg' attributes are valid here
returnAttributes = [],
-- actual function to call
function = Right $ ConstantOperand $ GlobalReference $ Name "push_string_cpy",
-- argument list whose types match the function signature argument types
-- and parameter attributes. All arguments must be of first class type. If
-- the function signature indicates the function accepts a variable number of
-- arguments, the extra arguments can be specified.
arguments = [
-- The 'getelementptr' instruction is used to get the address of a
-- subelement of an aggregate data structure. It performs address
-- calculation only and does not access memory.
-- http://llvm.org/docs/LangRef.html#getelementptr-instruction
(ConstantOperand Constant.GetElementPtr {
Constant.inBounds = True,
Constant.address = Constant.GlobalReference (UnName $ fromInteger $ snd $ dict ! value),
Constant.indices = [
Int { integerBits = 8, integerValue = 0 },
Int { integerBits = 8, integerValue = 0 }
]
}, [])
],
-- optional function attributes list. Only 'noreturn', 'nounwind',
-- 'readonly' and 'readnone' attributes are valid here.
functionAttributes = [],
metadata = []
}]
-- depending on the Lexeme we see we need to create one or more Instructions
-- the generateInstruction function should return a list of instructions
-- after the mapping phase we should flatten the array with concat so we that we get
-- a list of Instructions that we can insert in the BasicBlock
-- |Generate instruction for printing strings to stdout.
generateInstruction (Output, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "print",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the Boom lexeme (crashes program).
generateInstruction (Boom, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "crash",
arguments = [(ConstantOperand $ Int 1 1, [])],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the Input lexeme.
generateInstruction (Input, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "input",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the EOF lexeme.
generateInstruction (EOF, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "eof_check",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the add instruction.
generateInstruction (Add1, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "add",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the remainder instruction.
generateInstruction (Remainder, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "rem",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the type instruction.
generateInstruction (RType, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "type",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the sub instruction.
generateInstruction (Subtract, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "sub",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the mul instruction.
generateInstruction (Multiply, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "mult",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the div instruction.
generateInstruction (Divide, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "div",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the strlen instruction.
generateInstruction (Size, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "strlen",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the strapp instruction.
generateInstruction (Append, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "strapp",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the strcut instruction.
generateInstruction (Cut, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "strcut",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the equal instruction.
generateInstruction (Equal, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "equal",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for the greater instruction.
generateInstruction (Greater, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "greater",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for start instruction
generateInstruction (Start, funcName) = do
index <- fresh
index2 <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "start",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instructions for lambda push
generateInstruction (Lambda label, funcName) = do
index <- fresh
index2 <- fresh
index3 <- fresh
index4 <- fresh
index5 <- fresh
index6 <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "malloc",
arguments = [(ConstantOperand $ Int 64 24, [])],
functionAttributes = [],
metadata = []
},
UnName index2 := LLVM.General.AST.BitCast {
Instruction.operand0 = LocalReference $ UnName index,
Instruction.type' = PointerType (NamedTypeReference $ Name "struct.table") (AddrSpace 0),
metadata = []
},
UnName index3 := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "initialise",
arguments = [(LocalReference $ UnName index2, [])],
functionAttributes = [],
metadata = []
},
UnName index4 := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "copy_symbol_table",
arguments = [(LocalReference $ Name "table", []), (LocalReference $ UnName index2, [])],
functionAttributes = [],
metadata = []
},
UnName index5 := LLVM.General.AST.Alloca {
allocatedType = PointerType functionReturnLambda (AddrSpace 0),
numElements = Nothing,
alignment = 8,
metadata = []
},
Do LLVM.General.AST.Store {
volatile = False,
Instruction.address = LocalReference $ UnName index5,
value = ConstantOperand $ GlobalReference $ Name (funcName ++ "!" ++ show label),
maybeAtomicity = Nothing,
alignment = 8,
metadata = []
},
UnName index6 := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "push_lambda",
arguments = [(LocalReference $ UnName index5, []), (LocalReference $ UnName index2, [])],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for finish instruction
generateInstruction (Finish, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "finish",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for function call
generateInstruction (IDT.Call "", funcName) = do
index <- fresh
index2 <- fresh
index6 <- fresh
return [ UnName index := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "pop_lambda",
arguments = [],
functionAttributes = [],
metadata = []
},
UnName index2 := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "get_lambda_pointer",
arguments = [(LocalReference $ UnName index, [])],
functionAttributes = [],
metadata = []
},
UnName index6 := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "get_lambda_table",
arguments = [(LocalReference $ UnName index, [])],
functionAttributes = [],
metadata = []
},
Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ LocalReference $ UnName index2,
arguments = [(LocalReference $ UnName index6, [])],
functionAttributes = [],
metadata = []
}]
generateInstruction (IDT.Call functionName, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name functionName,
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for pushing nil.
generateInstruction (Nil, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "gen_list_push_nil",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for list cons.
generateInstruction (Cons, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "gen_list_cons",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Generate instruction for list breakup.
generateInstruction (Breakup, funcName) =
return [Do LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "gen_list_breakup",
arguments = [],
functionAttributes = [],
metadata = []
}]
-- |Fallback for unhandled lexemes (generates no-op).
generateInstruction _ = return [ Do $ Instruction.FAdd (ConstantOperand $ Float $ Single 1.0) (ConstantOperand $ Float $ Single 1.0) [] ]
-- |Appends the function name to the lexemes/instructions.
--
-- The function name is only relevant for the Lambda instruction, because we use
-- "functionName"!"jumpLable" as name for the Lambda functions.
appendName :: [a] -> String -> [(a, String)]
appendName [] name = []
appendName (x:[]) name = [(x, name)]
appendName (x:xs) name = (x, name):rest
where rest = appendName xs name
-- |Generate the instructions making up one basic block.
generateBasicBlock :: ((Int, [Lexeme], Int), String) -> Codegen BasicBlock
generateBasicBlock ((label, instructions, 0), name) = do
tmp <- mapM generateInstruction (appendName instructions name)
return $ BasicBlock (Name $ "l_" ++ show label) (concat tmp) $ terminator 0
generateBasicBlock ((label, instructions, jumpLabel), name) = do
tmp <- mapM generateInstruction (appendName instructions name)
i <- gets count
case filter isJunction instructions of
[Junction junctionLabel] -> return $ BasicBlock (Name $ "l_" ++ show label) (concat tmp) $ condbranch junctionLabel i
[] -> return $ BasicBlock (Name $ "l_" ++ show label) (concat tmp) branch
where
isJunction (Junction a) = True
isJunction _ = False
condbranch junctionLabel i = Do CondBr {
condition = LocalReference $ UnName i,
trueDest = Name $ "l_" ++ show junctionLabel,
falseDest = Name $ "l_" ++ show jumpLabel,
metadata' = []
}
branch = Do Br {
dest = Name $ "l_" ++ show jumpLabel,
metadata' = []
}
-- |Generate all basic blocks for a function.
generateBasicBlocks :: [(Int, [Lexeme], Int)] -> String -> Codegen [BasicBlock]
generateBasicBlocks lexemes name = mapM generateBasicBlock (appendName lexemes name)
-- |Generate a function definition from an AST.
--
-- At the beginning of each function, we create a block with lable "entry".
-- In this block we create the symbol table and jump then to "l_1", the first
-- regular block in each function.
-- The first pattern-matching is for lambda functions
generateFunction :: AST -> GlobalCodegen Definition
generateFunction (name, lexemes) = do
dict <- gets dict
return $ GlobalDefinition
(if "!" `isInfixOf` name then
Global.functionDefaults {
Global.name = Name name,
Global.returnType = IntegerType 32,
Global.parameters = ( [ Parameter (PointerType (NamedTypeReference $ Name
"struct.table") (AddrSpace 0)) (Name "t") [] ], False ),
Global.basicBlocks = concat (
return ( BasicBlock (Name "entry") [
Name "table_alloc" := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "malloc",
arguments = [(ConstantOperand $ Int 64 24, [])],
functionAttributes = [],
metadata = []
},
Name "table" := LLVM.General.AST.BitCast {
Instruction.operand0 = LocalReference $ Name "table_alloc",
Instruction.type' = PointerType (NamedTypeReference $ Name "struct.table") (AddrSpace 0),
metadata = []
},
Name "" := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "initialise",
arguments = [(LocalReference $ Name "table", [])],
functionAttributes = [],
metadata = []
},
Name "" := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "copy_symbol_table",
arguments = [(LocalReference $ Name "t", []), (LocalReference $ Name "table", [])],
functionAttributes = [],
metadata = []
}]
( Do Br {
dest = Name "l_1",
metadata' = []} ))
: [execCodegen dict $ generateBasicBlocks lexemes name])
}
else
Global.functionDefaults {
Global.name = Name name,
Global.returnType = IntegerType 32,
Global.basicBlocks = concat (
return ( BasicBlock (Name "entry") [
Name "table_alloc" := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "malloc",
arguments = [(ConstantOperand $ Int 64 24, [])],
functionAttributes = [],
metadata = []
},
Name "table" := LLVM.General.AST.BitCast {
Instruction.operand0 = LocalReference $ Name "table_alloc",
Instruction.type' = PointerType (NamedTypeReference $ Name "struct.table") (AddrSpace 0),
metadata = []
},
Name "" := LLVM.General.AST.Call {
isTailCall = False,
callingConvention = C,
returnAttributes = [],
function = Right $ ConstantOperand $ GlobalReference $ Name "initialise",
arguments = [(LocalReference $ Name "table", [])],
functionAttributes = [],
metadata = []
}]
( Do Br {
dest = Name "l_1",
metadata' = []} ))
: [execCodegen dict $ generateBasicBlocks lexemes name])
})
-- |Create a new local variable (?).
fresh :: Codegen Word
fresh = do
i <- gets count
modify $ \s -> s { count = 1 + i }
return $ i + 1
-- |Generate list of LLVM Definitions from list of ASTs
generateFunctions :: [AST] -> GlobalCodegen [Definition]
generateFunctions = mapM generateFunction
-- |Generate a global definition for a constant.
--
-- This is an unnamed, global constant, i. e. it has a numeric name
-- like '@0'.
generateGlobalDefinition :: Integer -> Global -> Definition
generateGlobalDefinition index def = GlobalDefinition def {
Global.name = UnName $ fromInteger index,
Global.isConstant = True,
Global.linkage = Internal,
Global.hasUnnamedAddr = True
}
-- |Generate a global definition for a variable name.
--
-- Such definitions are used to pass variable names to
-- LLVM backend functions like 'pop_into()'.
generateGlobalDefinitionVar :: Integer -> Global -> Definition
generateGlobalDefinitionVar i def = GlobalDefinition def {
Global.isConstant = True,
Global.linkage = Internal,
Global.hasUnnamedAddr = True
}
-- |Entry point into module.
process :: IDT.SemAna2InterCode -> IDT.InterCode2Backend
process (IDT.ISI input) = IDT.IIB $ generateModule $ constants ++ variables ++
[
stackElementTypeDef,
structTable,
lambdaElement,
underflowCheck,
FunctionDeclarations.print,
crash,
start,
finish,
inputFunc,
eofCheck,
pushStringCpy,
pop,
peek,
add,
sub,
rem1,
mul,
div1,
streq,
strlen,
strapp,
strcut,
popInt,
equal,
greater,
popInto,
pushFrom,
popBool,
initialiseSymbolTable,
malloc,
type1,
copySymbolTable,
pushLambda,
getLambda,
popLambda,
getTable,
listPushNil,
listCons,
listBreakup
] ++ codegen input
where
constants = zipWith generateGlobalDefinition [0..] $ generateConstants input
variables = zipWith generateGlobalDefinitionVar [0..] $ generateVariables input
constantPool = fromList $ zipWith createConstantPoolEntry [0..] $ getAllCons input
createConstantPoolEntry index (Constant s) = (s, (length s, index))
codegen input = execGlobalCodegen constantPool $ generateFunctions input
-- vim:ts=2 sw=2 et
| SWP-Ubau-SoSe2014-Haskell/SWPSoSe14 | src/RailCompiler/IntermediateCode.hs | mit | 28,804 | 0 | 26 | 6,419 | 7,402 | 4,245 | 3,157 | 615 | 3 |
import Control.Applicative
import Data.Monoid hiding (Sum, First)
import Test.QuickCheck hiding (Success)
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
-- List Applicative Exercise
-- Implement the List Applicative. Writing a minimally complete Applicative
-- instance calls for writing the definitions of both pure and <*>. We’re going
-- to provide a hint as well. Use the checkers library to validate your
-- Applicative instance.
-- | Test lists for equality (fallibly) by comparing finite prefixes
-- of them. I've arbitrarily chosen a depth of 1,000. There may be
-- better ideas than that.
instance Eq a => EqProp (List a) where
xs =-= ys = takeList 10 xs `eq` takeList 10 ys
-- | Take a prefix of up to @n@ elements from a 'List'.
takeList :: Int -> List a -> List a
takeList _ Nil = Nil
takeList n (Cons a as)
| n > 0 = Cons a (takeList (n-1) as)
| otherwise = Nil
data List a = Nil
| Cons a (List a)
deriving (Eq, Show)
-- Remember what you wrote for the List Functor:
instance Functor List where
fmap _ Nil = Nil
fmap f (Cons a b) = Cons (f a) (fmap f b)
instance Applicative List where
pure a = Cons a Nil
Nil <*> _ = Nil
_ <*> Nil = Nil
a <*> a' = flatMap (\f -> fmap f a') a
-- Expected result:
-- Prelude> let functions = Cons (+1) (Cons (*2) Nil)
-- Prelude> let values = Cons 1 (Cons 2 Nil)
-- Prelude> functions <*> values
-- Cons 2 (Cons 3 (Cons 2 (Cons 4 Nil)))
-- In case you get stuck, use the following functions and hints.
append :: List a -> List a -> List a
append Nil ys = ys
append (Cons x xs) ys = Cons x $ xs `append` ys
fold :: (a -> b -> b) -> b -> List a -> b
fold _ b Nil = b
fold f b (Cons h t) = f h (fold f b t)
concat' :: List (List a) -> List a
concat' = fold append Nil
-- write this one in terms of concat' and fmap
flatMap :: (a -> List b) -> List a -> List b
flatMap f as = fold append Nil $ fmap f as
-- Use the above and try using flatMap and fmap without explicitly
-- pattern-matching on Cons cells. You’ll still need to handle the Nil cases.
-- Applicative instances, unlike Functors, are not guaranteed to have a unique
-- implementation for a given datatype.
take' :: Int -> List a -> List a
take' _ Nil = Nil
take' n (Cons a b) = Cons a $ take' (n - 1) b
newtype ZipList' a = ZipList' (List a)
deriving (Eq, Show)
instance Eq a => EqProp (ZipList' a) where
xs =-= ys = xs' `eq` ys'
where xs' = let (ZipList' l) = xs
in take' 3000 l
ys' = let (ZipList' l) = ys
in take' 3000 l
instance Functor ZipList' where
fmap f (ZipList' xs) = ZipList' $ fmap f xs
instance Applicative ZipList' where
pure a = ZipList' $ Cons a Nil
(ZipList' Nil) <*> _ = ZipList' Nil
_ <*> (ZipList' Nil) = ZipList' Nil
ZipList' (Cons f Nil) <*> ZipList' (Cons x xs) = ZipList' $ Cons (f x) (pure f <*> xs)
ZipList' (Cons f fs) <*> ZipList' (Cons x Nil) = ZipList' $ Cons (f x) (fs <*> pure x)
ZipList' (Cons f fs) <*> ZipList' (Cons x xs) = ZipList' $ Cons (f x) (fs <*> xs)
instance Arbitrary a => Arbitrary (List a) where
arbitrary = genList
instance Arbitrary a => Arbitrary (ZipList' a) where
arbitrary = genZipList
genList :: Arbitrary a => Gen (List a)
genList = do
h <- arbitrary
t <- genList
frequency [(3, return $ Cons h t),
(1, return Nil)]
genZipList :: Arbitrary a => Gen (ZipList' a)
genZipList = do
l <- arbitrary
return $ ZipList' l
------
data Sum' a b = First' a
| Second' b
deriving (Eq, Show)
data Validation e a = Error e
| Success a
deriving (Eq, Show)
instance Functor (Sum' a) where
fmap f (Second' b) = Second' $ f b
fmap _ (First' a) = First' a
instance Applicative (Sum' a) where
pure = Second'
(First' a) <*> _ = First' a
(Second' _) <*> (First' a) = First' a
(Second' f) <*> (Second' a) = Second' $ f a
instance (Arbitrary a, Arbitrary b) => Arbitrary (Sum' a b) where
arbitrary = genSum
genSum :: (Arbitrary a, Arbitrary b) => Gen (Sum' a b)
genSum = do
a <- arbitrary
b <- arbitrary
elements [First' a, Second' b]
-- same as Sum/Either
instance Functor (Validation e) where
fmap f (Success b) = Success $ f b
fmap _ (Error a) = Error a
-- This is different
instance Monoid e => Applicative (Validation e) where
pure = Success
(Error e) <*> (Error e') = Error $ e <> e'
(Error e) <*> (Success _) = Error e
(Success _) <*> (Error e') = Error e'
(Success f) <*> (Success a) = Success $ f a
-- Your hint for this one is that you’re writing the following functions:
applyIfBothSecond :: (Sum' e) (a -> b)
-> (Sum' e) a
-> (Sum' e) b
applyIfBothSecond = undefined
applyMappendError :: Monoid e =>
(Validation e) (a -> b)
-> (Validation e) a
-> (Validation e) b
applyMappendError = undefined
instance (Eq e, Eq a) => EqProp (Validation e a) where
(=-=) = eq
instance (Eq a, Eq b) => EqProp (Sum' a b) where
(=-=) = eq
instance (Arbitrary e, Arbitrary a) => Arbitrary (Validation e a) where
arbitrary = genValidation
genValidation :: (Arbitrary e, Arbitrary a) => Gen (Validation e a)
genValidation = do
e <- arbitrary
a <- arbitrary
elements [Error e, Success a]
main :: IO ()
main = let sum' = undefined :: Sum' String (String, String, String)
validation = undefined :: Validation String (String, String, String)
ziplist = undefined :: ZipList' (String, String, String)
in do putStrLn "Testing applicative for sum"
quickBatch (applicative sum')
putStrLn "Testing applicative for validation"
quickBatch (applicative validation)
putStrLn "Testing applicative for ZipList"
quickBatch (applicative ziplist)
| diminishedprime/.org | reading-list/haskell_programming_from_first_principles/17_08.hs | mit | 5,818 | 75 | 13 | 1,485 | 2,152 | 1,087 | 1,065 | 127 | 1 |
{-# LANGUAGE FlexibleInstances,
UndecidableInstances,
TypeFamilies,
RankNTypes,
GADTs,
OverloadedStrings,
ScopedTypeVariables
#-}
-----------------------------------------------------------------------------
{- |
Module : Language.Javascript.JMacro
Copyright : (c) Gershom Bazerman, 2009
License : BSD 3 Clause
Maintainer : gershomb@gmail.com
Stability : experimental
Simple EDSL for lightweight (untyped) programmatic generation of Javascript.
-}
-----------------------------------------------------------------------------
module Compiler.JMacro.QQ (parseJM, parseJME) where
import Prelude hiding ((<*), tail, init, head, last, minimum, maximum, foldr1, foldl1, (!!), read)
import Control.Arrow (first)
import Control.Monad.State.Strict
import Data.Char (digitToInt, toLower, isAlpha)
import Data.List (isPrefixOf, sort)
import Data.Maybe (fromMaybe, isJust)
import qualified Data.Map as M
import qualified Data.Text as T
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language (javaStyle)
import Compiler.JMacro.Base
import Numeric (readHex)
{--------------------------------------------------------------------
Parsing
--------------------------------------------------------------------}
type JMParser a = CharParser () a
lexer :: P.TokenParser ()
symbol :: String -> JMParser String
parens, braces :: JMParser a -> JMParser a
dot, colon, semi, identifier, identifierWithBang :: JMParser String
whiteSpace :: JMParser ()
reserved, reservedOp :: String -> JMParser ()
commaSep, commaSep1 :: JMParser a -> JMParser [a]
lexer = P.makeTokenParser jsLang
jsLang :: P.LanguageDef ()
jsLang = javaStyle {
P.reservedNames = ["NaN", "Infinity", "var","return","if","else","while","for","in","break","continue","function","switch","case","default","fun","try","catch","finally","foreign","do","new","typeof","delete","instanceof","yield"],
P.reservedOpNames = reservedOpNames,
P.identLetter = alphaNum <|> oneOf "_$",
P.identStart = letter <|> oneOf "_$",
P.opStart = oneOf "|+-/*%<>&^.?=!~:@",
P.opLetter = oneOf "|+-/*%<>&^.?=!~:@",
P.commentLine = "//",
P.commentStart = "/*",
P.commentEnd = "*/",
P.caseSensitive = True}
identifierWithBang = P.identifier $ P.makeTokenParser $ jsLang {P.identStart = letter <|> oneOf "_$!"}
whiteSpace= P.whiteSpace lexer
symbol = P.symbol lexer
parens = P.parens lexer
braces = P.braces lexer
-- brackets = P.brackets lexer
dot = P.dot lexer
colon = P.colon lexer
semi = P.semi lexer
identifier= P.identifier lexer
reserved = P.reserved lexer
reservedOpNames :: [String]
reservedOpNames = ["+=","-=","*=","/=","%=","<<=", ">>=", ">>>=", "&=", "^=", "|=", "--","*","/","+","-",".","%","?","=","==","!=","<",">","&&","||","&", "^", "|", "++","===","!==", ">=","<=","!", "~", "<<", ">>", ">>>", "new", "typeof", "void", "delete", "instanceof", "in", "yield", "?", ":"]
reservedOp name
| name `notElem` reservedOpNames = error ("reservedOp: not a reserved operator: " ++ name)
| all isAlpha name = lexeme $ try $
string name >>
(notFollowedBy letter <?> ("end of " ++ show name))
| otherwise =
let t = [string (drop (length name) n) | n <- reservedOpNames, name `isPrefixOf` n, n /= name]
in lexeme $ try (string name >> notFollowedBy (choice t))
commaSep1 = P.commaSep1 lexer
commaSep = P.commaSep lexer
lexeme :: JMParser a -> JMParser a
lexeme = P.lexeme lexer
(<*) :: Monad m => m b -> m a -> m b
x <* y = do
xr <- x
_ <- y
return xr
parseJM :: String -> Either ParseError JStat
parseJM s = BlockStat <$> runParser jmacroParser () "" s
where jmacroParser = do
ans <- statblock
eof
return ans
parseJME :: String -> Either ParseError JExpr
parseJME s = runParser jmacroParserE () "" s
where jmacroParserE = do
ans <- whiteSpace >> expr
eof
return ans
-- function !foo or function foo or var !x or var x, with optional type
varidentdecl :: JMParser Ident
varidentdecl = do
i <- identifierWithBang
when ("jmId_" `isPrefixOf` i || "!jmId_" `isPrefixOf` i) $ fail "Illegal use of reserved jmId_ prefix in variable name."
when (i=="this" || i=="!this") $ fail "Illegal attempt to name variable 'this'."
return (TxtI $ T.pack i)
-- any other identifier decl
identdecl :: JMParser Ident
identdecl = do
i <- identifier
when ("jmId_" `isPrefixOf` i) $ fail "Illegal use of reserved jmId_ prefix in variable name."
when (i=="this") $ fail "Illegal attempt to name variable 'this'."
return (TxtI (T.pack i))
cleanIdent :: Ident -> Ident
cleanIdent (TxtI x) | "!" `T.isPrefixOf` x = TxtI (T.tail x)
cleanIdent x = x
{-
expr2ident :: JExpr -> Ident
expr2ident (ValExpr (JVar i)) = i
expr2ident e = error ("expr2ident: expected (ValExpr (JVar _)), got: " ++ show e)
-}
-- Handle varident decls for type annotations?
-- Patterns
data PatternTree = PTAs Ident PatternTree
| PTCons PatternTree PatternTree
| PTList [PatternTree]
| PTObj [(String, PatternTree)]
| PTVar Ident
deriving Show
patternTree :: JMParser PatternTree
patternTree = PTVar <$> varidentdecl
--either we have a function from any ident to the constituent parts
--OR the top level is named, and hence we have the top ident, plus decls for the constituent parts
patternBinding :: JMParser (Either (Ident -> [JStat]) (Ident, [JStat]))
patternBinding = do
ptree <- patternTree
let go path (PTAs asIdent pt) = [DeclStat asIdent, AssignStat (ValExpr (JVar (cleanIdent asIdent))) path] ++ go path pt
go path (PTVar i)
| i == (TxtI "_") = []
| otherwise = [DeclStat i, AssignStat (ValExpr (JVar (cleanIdent i))) path]
go path (PTList pts) = concatMap (uncurry go) $ zip (map addIntToPath [0..]) pts
where addIntToPath i = IdxExpr path (ValExpr $ JInt i)
go path (PTObj xs) = concatMap (uncurry go) $ map (first fixPath) xs
where fixPath lbl = IdxExpr path (ValExpr $ JStr (T.pack lbl))
go path (PTCons x xs) = concat [go (IdxExpr path (ValExpr $ JInt 0)) x,
go (ApplExpr (SelExpr path (TxtI "slice")) [ValExpr $ JInt 1]) xs]
case ptree of
PTVar i -> return $ Right (i,[])
PTAs i pt -> return $ Right (i, go (ValExpr $ JVar i) pt)
_ -> return $ Left $ \i -> go (ValExpr $ JVar i) ptree
patternBlocks :: JMParser ([Ident], [JStat])
patternBlocks = fmap concat . unzip . zipWith (\i efr -> either (\f -> (i, f i)) id efr) (map (TxtI . T.pack . ("jmId_match_" ++) . show) [(1::Int)..]) <$> many patternBinding
destructuringDecl :: JMParser [JStat]
destructuringDecl = do
(i,patDecls) <- either (\f -> (matchVar, f matchVar)) id <$> patternBinding
optAssignStat <- optionMaybe $ do
reservedOp "="
e <- expr
return $ AssignStat (ValExpr (JVar (cleanIdent i))) e : patDecls
return $ DeclStat i : fromMaybe [] optAssignStat
where matchVar = TxtI "jmId_match_var"
statblock :: JMParser [JStat]
statblock = concat <$> (sepEndBy1 (whiteSpace >> statement) (semi <|> return ""))
statblock0 :: JMParser [JStat]
statblock0 = try statblock <|> (whiteSpace >> return [])
l2s :: [JStat] -> JStat
l2s xs = BlockStat xs
statementOrEmpty :: JMParser [JStat]
statementOrEmpty = try emptyStat <|> statement
where emptyStat = braces (whiteSpace >> return [])
-- return either an expression or a statement
statement :: JMParser [JStat]
statement = declStat
<|> funDecl
<|> functionDecl
<|> returnStat
<|> labelStat
<|> ifStat
<|> whileStat
<|> switchStat
<|> forStat
<|> doWhileStat
<|> braces statblock
<|> assignOpStat
<|> tryStat
<|> applStat
<|> breakStat
<|> continueStat
<?> "statement"
where
declStat = do
reserved "var"
res <- concat <$> commaSep1 destructuringDecl
_ <- semi
return res
functionDecl = do
reserved "function"
i <- varidentdecl
(as,patDecls) <- fmap (\x -> (x,[])) (try $ parens (commaSep identdecl)) <|> patternBlocks
b' <- try (ReturnStat <$> braces expr) <|> (l2s <$> statement)
let b = BlockStat patDecls `mappend` b'
return $ [DeclStat i,
AssignStat (ValExpr $ JVar (cleanIdent i)) (ValExpr $ JFunc as b)]
funDecl = do
reserved "fun"
n <- identdecl
(as, patDecls) <- patternBlocks
b' <- try (ReturnStat <$> braces expr) <|> (l2s <$> statement) <|> (symbol "->" >> ReturnStat <$> expr)
let b = BlockStat patDecls `mappend` b'
return $ [DeclStat (addBang n),
AssignStat (ValExpr $ JVar n) (ValExpr $ JFunc as b)]
where addBang (TxtI x) = TxtI (T.pack "!!" `mappend` x)
returnStat =
reserved "return" >> (:[]) . ReturnStat <$> option (ValExpr $ JVar $ TxtI "undefined") expr
ifStat = do
reserved "if"
p <- parens expr
b <- l2s <$> statementOrEmpty
isElse <- (lookAhead (reserved "else") >> return True)
<|> return False
if isElse
then do
reserved "else"
return . IfStat p b . l2s <$> statementOrEmpty
else return $ [IfStat p b nullStat]
whileStat =
reserved "while" >> liftM2 (\e b -> [WhileStat False e (l2s b)])
(parens expr) statementOrEmpty
doWhileStat = reserved "do" >> liftM2 (\b e -> [WhileStat True e (l2s b)])
statementOrEmpty (reserved "while" *> parens expr)
switchStat = do
reserved "switch"
e <- parens $ expr
(l,d) <- braces (liftM2 (,) (many caseStat) (option [] dfltStat))
return $ [SwitchStat e l (l2s d)]
caseStat =
reserved "case" >> liftM2 (,) expr (char ':' >> l2s <$> statblock0)
tryStat = do
reserved "try"
s <- statement
isCatch <- (lookAhead (reserved "catch") >> return True)
<|> return False
(i,s1) <- if isCatch
then do
reserved "catch"
liftM2 (,) (parens identdecl) statement
else return $ (TxtI "", [])
isFinally <- (lookAhead (reserved "finally") >> return True)
<|> return False
s2 <- if isFinally
then reserved "finally" >> statement
else return $ []
return [TryStat (BlockStat s) i (BlockStat s1) (BlockStat s2)]
dfltStat =
reserved "default" >> char ':' >> whiteSpace >> statblock
forStat =
reserved "for" >> ((reserved "each" >> inBlock True)
<|> try (inBlock False)
<|> simpleForStat)
inBlock isEach = do
char '(' >> whiteSpace
dec <- isJust <$> optionMaybe (reserved "var")
i <- identdecl
reserved "in"
e <- expr
char ')' >> whiteSpace
s <- l2s <$> statement
return $ (if dec then (DeclStat i:) else id) [ForInStat isEach i e s]
simpleForStat = do
(before,after,p) <- parens threeStat
jFor' before after p <$> statement
where threeStat =
liftM3 (,,) (option [] statement <* optional semi)
(optionMaybe expr <* semi)
(option [] statement)
jFor' :: [JStat] -> Maybe JExpr -> [JStat] -> [JStat] -> [JStat]
jFor' before p after bs = before ++ [WhileStat False (fromMaybe (jsv "true") p) b']
where b' = BlockStat $ bs ++ after
assignOpStat = do
let rop x x' = reservedOp x >> return x'
(e1, op) <- try $ liftM2 (,) dotExpr (
rop "=" Nothing
<|> rop "+=" (Just AddOp)
<|> rop "-=" (Just SubOp)
<|> rop "*=" (Just MulOp)
<|> rop "/=" (Just DivOp)
<|> rop "%=" (Just ModOp)
<|> rop "<<=" (Just LeftShiftOp)
<|> rop ">>=" (Just RightShiftOp)
<|> rop ">>>=" (Just ZRightShiftOp)
<|> rop "&=" (Just BAndOp)
<|> rop "^=" (Just BXorOp)
<|> rop "|=" (Just BOrOp)
)
let gofail = fail "Invalid assignment."
badList = ["this", "true", "false", "undefined", "null"]
case e1 of
ValExpr (JVar (TxtI s)) -> if s `elem` badList then gofail else return ()
ApplExpr {} -> gofail
ValExpr {} -> gofail
_ -> return ()
e2 <- expr
return [AssignStat e1 $ maybe e2 (\o -> InfixExpr o e1 e2) op]
applStat = expr2stat' =<< expr
-- fixme: don't handle ifstats
expr2stat' e = case expr2stat e of
BlockStat [] -> pzero
x -> return [x]
{-
expr2stat' :: JExpr -> JStat
expr2stat' (ApplExpr x y) = return $ (ApplStat x y)
expr2stat' (IfExpr x y z) = liftM2 (IfStat x) (expr2stat' y) (expr2stat' z)
expr2stat' (PostExpr s x) = return $ PostStat s x
expr2stat' (AntiExpr x) = return $ AntiStat x
expr2stat' _ = fail "Value expression used as statement"
-}
breakStat = do
reserved "break"
l <- optionMaybe myIdent
return [BreakStat (T.pack <$> l)]
continueStat = do
reserved "continue"
l <- optionMaybe myIdent
return [ContinueStat (T.pack <$> l)]
labelStat = do
lbl <- try $ do
l <- myIdent <* char ':'
guard (l /= "default")
return l
s <- l2s <$> statblock0
return [LabelStat (T.pack lbl) s]
expr :: JMParser JExpr
expr = exprWithIf
where
exprWithIf = do
e <- rawExpr
addIf e <|> return e
addIf e = do
reservedOp "?"
t <- exprWithIf
_ <- colon
el <- exprWithIf
let ans = (IfExpr e t el)
addIf ans <|> return ans
rawExpr = buildExpressionParser table dotExpr <?> "expression"
table = [[pop "++" PreInc, pop "--" PreDec, poop "++" PostInc, poop "--" PostDec], -- fixme, yield
[pop "~" BNotOp, pop "!" NotOp, negop, pop "+" PlusOp, pop "typeof" TypeofOp, pop "void" VoidOp, pop "delete" DeleteOp],
[iop "*" MulOp, iop "/" DivOp, iop "%" ModOp],
[iop "+" AddOp, iop "-" SubOp],
[iop "<<" LeftShiftOp, iop ">>>" ZRightShiftOp, iop ">>" RightShiftOp],
[iop "<=" LeOp, iop "<" LtOp, iop ">=" GeOp, iop ">" GtOp, iop "in" InOp, iop "instanceof" InstanceofOp],
[iop "===" StrictEqOp, iop "!==" StrictNeqOp, iop "==" EqOp, iop "!=" NeqOp],
[iop "&" BAndOp],
[iop "^" BXorOp],
[iop "|" BOrOp],
[iop "&&" LAndOp],
[iop "||" LOrOp]
]
pop s s' = Prefix (reservedOp s >> return (UOpExpr s'))
poop s s' = Postfix (reservedOp s >> return (UOpExpr s'))
iop s s' = Infix (reservedOp s >> return (InfixExpr s')) AssocLeft
negop = Prefix (reservedOp "-" >> return negexp)
negexp (ValExpr (JDouble n)) = ValExpr (JDouble (-n))
negexp (ValExpr (JInt n)) = ValExpr (JInt (-n))
negexp x = UOpExpr NegOp x
dotExpr :: JMParser JExpr
dotExpr = do
e <- many1 (lexeme dotExprOne) <?> "simple expression"
case e of
[e'] -> return e'
(e':es) -> return $ ApplExpr e' es
_ -> error "dotExpr: exprApp"
dotExprOne :: JMParser JExpr
dotExprOne = addNxt =<< valExpr <|> parens' expr <|> newExpr
where
addNxt e = do
nxt <- (Just <$> lookAhead anyChar <|> return Nothing)
case nxt of
Just '.' -> addNxt =<< (dot >> (SelExpr e <$> (ident' <|> numIdent)))
Just '[' -> addNxt =<< (IdxExpr e <$> brackets' expr)
Just '(' -> addNxt =<< (ApplExpr e <$> parens' (commaSep expr))
_ -> return e
numIdent = TxtI . T.pack <$> many1 digit
newExpr = UOpExpr NewOp <$> (reservedOp "new" >> dotExpr)
valExpr = ValExpr <$> (num <|> str <|> try regex <|> list <|> hash <|> func <|> var) <?> "value"
where num = lexeme $ either JInt JDouble <$> try natFloat
str = lexeme $ JStr . T.pack <$> (myStringLiteral '"' <|> myStringLiteral '\'')
regex = lexeme $ JRegEx . T.pack <$> regexLiteral
--do
-- s <- regexLiteral
-- myStringLiteralNoBr '/'
-- fixme: syntax check for regexp removed because it depends on regex-posix
-- pure (JRegEx $ T.pack s)
list = lexeme $ JList <$> brackets' (commaSep expr)
hash = lexeme $ JHash . M.fromList <$> braces' (commaSep propPair)
var = JVar <$> ident'
func = lexeme $ do
(symbol "\\" >> return ()) <|> reserved "function"
(as,patDecls) <- fmap (\x -> (x,[])) (try $ parens (commaSep identdecl)) <|> patternBlocks
b' <- (braces' statOrEblock <|> (symbol "->" >> (ReturnStat <$> expr)))
return $ JFunc as (BlockStat patDecls `mappend` b')
statOrEblock = try (ReturnStat <$> expr `folBy` '}') <|> (l2s <$> statblock)
propPair = liftM2 (,) (T.pack <$> myIdent) (colon >> expr)
-- notFolBy a b = a <* notFollowedBy (char b)
folBy :: JMParser a -> Char -> JMParser a
folBy a b = a <* (lookAhead (char b) >>= const (return ()))
-- Parsers without Lexeme
braces', brackets', parens' {- , oxfordBraces -} :: JMParser a -> JMParser a
brackets' = around' '[' ']'
braces' = around' '{' '}'
parens' = around' '(' ')'
around' :: Char -> Char -> JMParser a -> JMParser a
around' a b x = lexeme (char a) >> (lexeme x <* char b)
myIdent :: JMParser String
myIdent = lexeme $ many1 (alphaNum <|> oneOf "_-!@#$%^&*()") <|> myStringLiteral '\''
ident' :: JMParser Ident
ident' = lexeme $ do
i <- identifier'
when ("jmId_" `isPrefixOf` i) $ fail "Illegal use of reserved jmId_ prefix in variable name."
return (TxtI $ T.pack i)
where
identifier' =
try $
do{ name <- ident''
; if isReservedName name
then unexpected ("reserved word " ++ show name)
else return name
}
ident''
= do{ c <- P.identStart jsLang
; cs <- many (P.identLetter jsLang)
; return (c:cs)
}
<?> "identifier"
isReservedName name
= isReserved theReservedNames caseName
where
caseName | P.caseSensitive jsLang = name
| otherwise = map toLower name
isReserved names name
= scan names
where
scan [] = False
scan (r:rs) = case (compare r name) of
LT -> scan rs
EQ -> True
GT -> False
theReservedNames
| P.caseSensitive jsLang = sortedNames
| otherwise = map (map toLower) sortedNames
where
sortedNames = sort (P.reservedNames jsLang)
natFloat :: JMParser (Either Integer SaneDouble)
natFloat = (char '0' >> zeroNumFloat) <|> try nan <|> try infinity
<|> decimalFloat <?> "number"
where
nan = reserved "NaN" >> return (Right (0/0))
infinity = reserved "Infinity" >> return (Right (1/0))
zeroNumFloat = (Left <$> (hexadecimal <|> octal))
<|> decimalFloat
<|> fractFloat 0
<|> return (Left 0)
decimalFloat = do n <- decimal
option (Left n)(fractFloat n)
fractFloat n = Right <$> fractExponent n
fractExponent n = (do fract <- fraction
expo <- option 1.0 exponent'
return ((fromInteger n + fract)*expo)
)
<|> ((fromInteger n *) <$> exponent')
fraction = char '.' >> (foldr op 0.0 <$> many1 digit <?> "fraction")
where
op d f = (f + fromIntegral (digitToInt d))/10.0
exponent' = do _ <- oneOf "eE"
power <$> decimal
where
power e | e < 0 = 1.0/power(-e)
| otherwise = fromInteger (10^e)
decimal = number 10 digit
hexadecimal = oneOf "xX" >> number 16 hexDigit
octal = oneOf "oO" >> number 8 octDigit
number base baseDig = foldl (\x d -> base*x + toInteger (digitToInt d)) 0 <$> many1 baseDig
myStringLiteral :: Char -> JMParser String
myStringLiteral t = do
_ <- char t
x <- concat <$> many myChar
_ <- char t
decodeJson x
where myChar = do
c <- noneOf [t]
case c of
'\\' -> do
c2 <- anyChar
return [c,c2]
_ -> return [c]
-- Taken from json package by Sigbjorn Finne.
decodeJson :: String -> JMParser String
decodeJson x = parseIt [] x
where
parseIt rs cs =
case cs of
'\\' : c : ds -> esc rs c ds
c : ds
| c >= '\x20' && c <= '\xff' -> parseIt (c:rs) ds
| c < '\x20' -> fail $ "Illegal unescaped character in string: " ++ x
| i <= 0x10ffff -> parseIt (c:rs) ds
| otherwise -> fail $ "Illegal unescaped character in string: " ++ x
where
i = (fromIntegral (fromEnum c) :: Integer)
[] -> return $ reverse rs
esc rs c cs = case c of
'\\' -> parseIt ('\\' : rs) cs
'"' -> parseIt ('"' : rs) cs
'n' -> parseIt ('\n' : rs) cs
'r' -> parseIt ('\r' : rs) cs
't' -> parseIt ('\t' : rs) cs
'f' -> parseIt ('\f' : rs) cs
'b' -> parseIt ('\b' : rs) cs
'/' -> parseIt ('/' : rs) cs
'u' -> case cs of
d1 : d2 : d3 : d4 : cs' ->
case readHex [d1,d2,d3,d4] of
[(n,"")] -> parseIt (toEnum n : rs) cs'
badHex -> fail $ "Unable to parse JSON String: invalid hex: " ++ show badHex
_ -> fail $ "Unable to parse JSON String: invalid hex: " ++ cs
_ -> fail $ "Unable to parse JSON String: invalid escape char: " ++ [c]
--tricky bit to deal with regex literals and comments / / -- if we hit // inside, then we fail, since that isn't ending the regex but introducing a comment, and thus the initial / could not have introduced a regex.
regexLiteral :: JMParser String
regexLiteral = do
_ <- char '/'
x <- concat <$> many myChar
_ <- char '/'
b <- option False (char '/' >> return True)
if b
then mzero
else return x
where myChar = do
c <- noneOf ['/','\n']
case c of
'\\' -> do
c2 <- anyChar
return [c,c2]
_ -> return [c]
| ghcjs/ghcjs | src/Compiler/JMacro/QQ.hs | mit | 23,943 | 0 | 26 | 8,072 | 7,623 | 3,852 | 3,771 | 481 | 14 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
module Virtual (
vnode,
vnodeFull,
svg,
rect,
vtext,
vbutton,
renderSetup,
rerender,
documentBody,
diff,
patch,
TreeState(..),
makeTreeState,
Size(..),
Position(..),
HTML()
) where
import GHCJS.Foreign
import GHCJS.Types
import System.IO.Unsafe -- TODO This is, of course, bad.
data VNode
data DOMNode
data Patch
data JSProperties
data TreeState =
TreeState {_node :: JSRef DOMNode
,_tree :: HTML}
newtype HTML =
HTML {nodeRef :: JSRef VNode}
newtype HTMLPatch =
HTMLPatch {patchRef :: JSRef Patch}
newtype Properties =
Properties {propsRef :: JSRef JSProperties}
makeTreeState :: JSRef DOMNode -> HTML -> TreeState
makeTreeState n t= TreeState { _node = n, _tree = t}
#ifndef HLINT
foreign import javascript unsafe
"document.body" documentBody :: IO (JSRef DOMNode)
foreign import javascript unsafe
"$1.appendChild($2)" appendChild :: JSRef DOMNode -> JSRef DOMNode -> IO ()
foreign import javascript unsafe
"h($1, $2, $3)" vnode_ :: JSString -> JSRef JSProperties -> JSArray VNode -> IO (JSRef VNode)
foreign import javascript unsafe
"$r = h($1, {'ev-click': $2, 'key': $3}, $4)" vbutton_ :: JSString -> JSFun (JSRef a -> IO ()) -> JSString -> JSString -> IO (JSRef VNode)
foreign import javascript unsafe
"h($1, $2)" vtext_ :: JSString -> JSString -> IO (JSRef VNode)
foreign import javascript unsafe
"$r = createElement($1)" createDOMNode_ :: JSRef VNode -> IO (JSRef DOMNode)
foreign import javascript safe
"diff($1, $2)" diff_ :: JSRef VNode -> JSRef VNode -> JSRef Patch
foreign import javascript safe
"patch($1, $2)" patch_ :: JSRef DOMNode -> JSRef Patch -> IO (JSRef DOMNode)
foreign import javascript unsafe
"$r = {};" noproperty_ :: IO (JSRef JSProperties)
foreign import javascript unsafe
"$r = svg('svg', { width: $1, height: $2 }, $3);" svg_ :: JSString -> JSString -> JSArray VNode -> IO (JSRef VNode)
foreign import javascript unsafe
"$r = svg('rect', { width: $1, height: $2, x: $3, y: $4 });" rect_ :: JSString -> JSString -> JSString -> JSString -> JSRef VNode
#endif
-- property :: String -> String -> Properties
-- property k v =
-- Properties $
-- property_ (toJSString k)
-- (toJSString v)
noProperty :: Properties
noProperty = Properties $ unsafePerformIO noproperty_
data Size = Size String String deriving (Show,Eq)
data Position = Position String String deriving (Show,Eq)
svg :: Size -> [HTML] -> HTML
svg (Size w h) children =
HTML $
unsafePerformIO $
do jsChildren <- toArray (fmap nodeRef children)
svg_ (toJSString w)
(toJSString h)
jsChildren
rect :: Size -> Position -> HTML
rect (Size w h) (Position x y) =
HTML $
rect_ (toJSString w)
(toJSString h)
(toJSString x)
(toJSString y)
vnodeFull :: String -> Properties -> [HTML] -> HTML
vnodeFull tag properties children =
HTML $
unsafePerformIO $
do jsChildren <- toArray (fmap nodeRef children)
vnode_ (toJSString tag)
(propsRef properties)
jsChildren
vnode :: String -> [HTML] -> HTML
vnode tag children =
HTML $
unsafePerformIO $
do jsChildren <- toArray (fmap nodeRef children)
props <- noproperty_
vnode_ (toJSString tag) props jsChildren
vbutton :: String -> (JSRef a -> IO ()) -> String -> HTML
vbutton tag f s =
HTML $
unsafePerformIO $
do f' <- syncCallback1 AlwaysRetain True f
vbutton_ (toJSString tag)
f'
(toJSString s)
(toJSString s)
vtext :: String -> String -> HTML
vtext tag text = HTML $ unsafePerformIO $ vtext_ (toJSString tag) (toJSString text)
createDOMNode :: HTML -> IO (JSRef DOMNode)
createDOMNode (HTML x) = createDOMNode_ x
patch :: JSRef DOMNode -> HTMLPatch -> IO (JSRef DOMNode)
patch n (HTMLPatch p) = patch_ n p
diff :: HTML -> HTML -> HTMLPatch
diff (HTML old) (HTML new) = HTMLPatch (diff_ old new)
rerender :: (a -> HTML) -> a -> TreeState -> IO TreeState
rerender render x (TreeState {_node = oldNode, _tree = oldTree}) = do
let newTree = render x
patches = diff oldTree newTree
newNode <- patch oldNode patches
return (makeTreeState newNode newTree)
renderSetup :: (a -> HTML) -> a -> IO TreeState
renderSetup render x = do
body <- documentBody
let tree = render x
node <- createDOMNode tree
_ <- appendChild body node
return $ makeTreeState node tree
| krisajenkins/BellRinger | src/Virtual.hs | epl-1.0 | 4,427 | 36 | 11 | 969 | 1,433 | 738 | 695 | -1 | -1 |
module PSequent where
import PrelSequent
import Sequent
-- parsers for formulae and sequents. AR 4/4/1999 -- 13/4
pSequent :: Parser Char Sequent
pSequent = pContext ... jL "=>" +.. (pContext *** reverse)
pContext = pTList "," (pFormula 1) ||| succeed []
pFormula :: Int -> Parser Char Formula -- Int is precedence number
pFormula 3 =
pPred .... pArgList pTerm *** uncurry Predic
||| pTerm .... pPrInfix .... pTerm *** (\ (x,(p,y)) -> Predic p [x,y])
||| pScheme ... pArgList pTerm *** uncurry Scheme
||| lits "_|_" <<< Falsum
||| lits "~" +.. pJ (pFormula 3) *** Neg
||| lits "(" +.. jL "/A" +..
pJ pVar ... lits ")" +.. pFormula 3 *** uncurry Univ
||| lits "(" +.. jL "/E" +..
pJ pVar ... lits ")" +.. pFormula 3 *** uncurry Exist
||| pParenth (pFormula 1)
pFormula 2 =
pFormula 3 ...
(jL "&" +.. pTList "&" (pFormula 3) *** (\b x -> Conj x (foldr1 Conj b))
|||
jL "v" +.. pTList "v" (pFormula 3) *** (\b x -> Disj x (foldr1 Disj b))
|||
succeed id)
***
(\ (a,b) -> b a)
pFormula 1 =
pFormula 2 ...
(jL "->" +.. pFormula 1 *** flip Impl
||| succeed id)
***
(\ (a,b) -> b a)
pTerm =
pTerm2 ....
(pInfix .... pTerm2 *** (\ (f,y) x -> Apply f [x,y]) ||| succeed id)
*** (\ (x,y) -> y x)
pTerm2 =
pConst .... pArgList pTerm *** uncurry Apply
+||
pVar *** Var
|||
pMeta *** Meta
|||
pParenth pTerm
pScheme = longestOfSome pCapital -- 1+ capital letters
pPred = pCapital ... longestOfSome pSmall *** junct -- capit. with 1+ small
pConst = pSmall ... longestOfSome pSmall *** junct -- 2+ small letters
|||
longestOfSome pDigit -- or 1+ digits
pVar = pSmall ... longestOfMany (literal '\'') *** junct
-- small with 0+ primes
pMeta = literal '?' +.. pVar ---
pPrInfix =
foldl1 (|||) (map lits (words "= < > #"))
|||
literal '\\' ... longestOfSome pLetter *** junct
pInfix =
foldl1 (|||) (map lits (words "+ - *"))
|||
literal '\\' ... longestOfSome pLetter *** junct
junct (a,b) = a:b
pSmall = satisfy (`elem` ['a'..'z'])
pCapital = satisfy (`elem` ['A'..'Z'])
pLetter = pCapital ||| pSmall
pDigit = satisfy (`elem` ['0'..'9'])
pRuleIdent = longestOfSome (satisfy (not . (`elem` " \n\t")))
pGoalId :: Parser Char [Int]
pGoalId = longestOfSome (satisfy (`elem` ['1' .. '9']) *** read . (:[])) ---
| Tomoaki-Hashizaki/pesca | src/PSequent.hs | gpl-2.0 | 2,545 | 0 | 32 | 758 | 963 | 500 | 463 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE RankNTypes #-}
module Png.PngContainer (PngImage(..), PngImageType(..)) where
import Codec.Picture.Metadata (Metadatas)
import Codec.Picture.Png (decodePngWithMetadata, encodePngWithMetadata)
import Codec.Picture.Png (PngSavable)
import Codec.Picture.Types (thawImage, unsafeThawImage, unsafeFreezeImage, freezeImage)
import Control.Monad.ST (ST)
import Control.Monad.Trans.Class (lift)
import Crypto.RandomMonad (randomElementsLength, RandomElementsListST(), RndST)
import Data.Array.ST (STArray())
import Data.Bits (Bits)
import Data.Word (Word32, Word8)
import Png.PixelStream (Pixel, getPixels)
import SteganographyContainer (SteganographyContainer(..), WritableSteganographyContainer(..), SteganographyContainerOptions(..))
import qualified Codec.Picture.Types as PT
import qualified Data.BitString as BiS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Lazy as LBS
import qualified Png.ImageFileHandler as A
data WritablePngImage pixel s = (PT.Pixel pixel, PngSavable pixel, Bits (PT.PixelBaseComponent pixel)) => WritablePngImage (PT.MutableImage s pixel) (A.PixelInfo s)
instance WritableSteganographyContainer (WritablePngImage a) [A.CryptoPrimitive] where
getPrimitives (WritablePngImage image info) = A.getCryptoPrimitives info
writeBitsP (WritablePngImage image info) prim bits = lift $ A.writeBits_ prim info image bits
-- Slow PNG Handling
data PngImage s = PngImage PT.DynamicImage (A.PixelInfo s)
data PngImageType = PngImageSpawner
| PngImageSpawnerFast
instance SteganographyContainerOptions PngImageType PngImage where
createContainer options imagedata = case decodePngWithMetadata (LBS.toStrict imagedata) of
Right (dynamicImage, metadata) -> A.createCryptoState (case options of PngImageSpawnerFast -> True ; PngImageSpawner -> False) dynamicImage >>= \(element, otherthing) -> return $ Right $ PngImage dynamicImage (element, otherthing, metadata)
Left _ -> return $ Left "Could not decode"
instance SteganographyContainer (PngImage) where
readSalt (PngImage i info) count = A.readSalt info i count
readBits (PngImage i info) count = A.readBits info i (fromIntegral count)
bitsAvailable (PngImage i (elements, _, _)) = randomElementsLength elements >>= return . fromIntegral
unsafeWithSteganographyContainer (PngImage image info@(_, _, metadata)) func = A.pngDynamicMap (\img -> do
thawed <- unsafeThawImage img
result <- func $ WritablePngImage thawed info
case result of
Left err -> return $ Left err
Right _ -> do
frozen <- unsafeFreezeImage thawed
return $ Right $ encodePngWithMetadata metadata frozen) image
withSteganographyContainer (PngImage image info@(_, _, metadata)) func = A.pngDynamicMap (\img -> do
thawed <- thawImage img
result <- func $ WritablePngImage thawed info
case result of
Left err -> return $ Left err
Right _ -> do
frozen <- freezeImage thawed
return $ Right $ encodePngWithMetadata metadata frozen) image
| Ofenhed/Steganography | src/Png/PngContainer.hs | gpl-2.0 | 3,300 | 0 | 17 | 571 | 912 | 499 | 413 | 55 | 0 |
module Main where
import Expressions (Context,LispError)
import Parser (parse)
import Eval (eval)
import Std (initialCtx)
import System.IO
import System.IO.Error
import Control.Exception
import Control.Monad.State
import Control.Monad.Error
main = runErrorT (evalStateT repl initialCtx)
repl :: StateT Context LispError ()
repl = do
liftIO $ putStr "lisp> "
liftIO $ hFlush stdout
x <- liftIO $ getLine `catch` eofHandler
if (x /= "(quit)") then do
expr <- parse x
evaledExpr <- eval expr
liftIO $ print evaledExpr
repl
`catchError` (\e -> do liftIO $ putStrLn e
repl)
else do liftIO $ putStrLn ""
return ()
eofHandler e = if isEOFError e then return "(quit)" else ioError e
| aksiazek/yali | src/Main.hs | gpl-2.0 | 878 | 1 | 16 | 307 | 257 | 134 | 123 | 26 | 2 |
module Test.AllTests
where
import Test.HUnit
import Data.List (isPrefixOf)
import qualified Cluedo.Model as Model
import qualified Cluedo.Utils as Utils
testCardCount :: Assertion
testCardCount = 21 @=? Model.cardCount
testIsPieceCard_isPiece :: Assertion
testIsPieceCard_isPiece = (Model.isPieceCard Model.White) @? "white is a piece"
testIsPieceCard_isNotPiece :: Assertion
testIsPieceCard_isNotPiece =
(not $ Model.isPieceCard Model.Bathroom)
@?
"bathroom is not a piece"
testIsWeaponCard_isWeapon :: Assertion
testIsWeaponCard_isWeapon =
Model.isWeaponCard Model.Knife
@?
"knife is a weapon"
testIsWeaponCard_isNotWeapon :: Assertion
testIsWeaponCard_isNotWeapon =
(not $ Model.isWeaponCard Model.Peacock)
@?
"peacock is not a weapon"
testIsRoomCard_isRoom :: Assertion
testIsRoomCard_isRoom = Model.isRoomCard Model.Garage @? "garage is a room"
testIsRoomCard_isNotRoom :: Assertion
testIsRoomCard_isNotRoom =
(not $ Model.isRoomCard Model.Pipe)
@?
"pie is not a room"
testParseCard_isCard :: Assertion
testParseCard_isCard = Model.parseCard "Plum" @=? Just Model.Plum
testParseCard_notCard :: Assertion
testParseCard_notCard = Model.parseCard "NotCard" @=? Nothing
withUnknown l = zip l (repeat Model.Unknown)
testPieces_fullPlayer :: Assertion
testPieces_fullPlayer =
Model.pieces (Model.fullPlayer "test") @=? withUnknown Model.allPieces
testWeapons_fullPlayer :: Assertion
testWeapons_fullPlayer =
Model.weapons (Model.fullPlayer "test") @=? withUnknown Model.allWeapons
testRooms_fullPlayer :: Assertion
testRooms_fullPlayer =
Model.rooms (Model.fullPlayer "test") @=? withUnknown Model.allRooms
testGetCardStatus :: Assertion
testGetCardStatus =
Model.Unknown @=? Model.getCardStatus
Model.Bathroom
(Model.fullPlayer "test")
playerWithPeacockYes :: Model.Player
playerWithPeacockYes = Model.Player "test" $ withUnknown Model.allWeapons
++ withUnknown Model.allRooms
++ [ (Model.Scarlett, Model.Unknown)
, (Model.Mustard , Model.Unknown)
, (Model.White , Model.Unknown)
, (Model.Green , Model.Unknown)
, (Model.Peacock , Model.Yes )
, (Model.Plum , Model.Unknown)
]
testGetPeacockStatus :: Assertion
testGetPeacockStatus =
Model.Yes @=? Model.getCardStatus
Model.Peacock playerWithPeacockYes
testSetCardTuple :: Assertion
testSetCardTuple =
(Model.Plum, Model.Yes)
@=?
Model.setCardTuple Model.Plum (Model.Plum, Model.Unknown)
testSetCardTuple_notSetForDifferentCard :: Assertion
testSetCardTuple_notSetForDifferentCard =
(Model.Plum, Model.Unknown)
@=?
Model.setCardTuple Model.Peacock (Model.Plum, Model.Unknown)
testClearCardTuple :: Assertion
testClearCardTuple =
(Model.Plum, Model.No)
@=?
Model.clearCardTuple Model.Plum (Model.Plum, Model.Unknown)
testClearCardTuple_notClearForDifferentCard :: Assertion
testClearCardTuple_notClearForDifferentCard =
(Model.Plum, Model.Unknown)
@=?
Model.clearCardTuple Model.Peacock (Model.Plum, Model.Unknown)
testSetCard_givenCardAndPlayerIsSet :: Assertion
testSetCard_givenCardAndPlayerIsSet =
let result = Model.setCard
playerName
Model.Peacock
(Model.fullPlayer playerName)
in
Model.Yes @=? Model.getCardStatus Model.Peacock result
where playerName = "player"
testSetCard_givenCardAndWrongPlayerIsSet :: Assertion
testSetCard_givenCardAndWrongPlayerIsSet =
let result = Model.setCard
"differentPlayer"
Model.Peacock
(Model.fullPlayer "player")
in
Model.No @=? Model.getCardStatus Model.Peacock result
testClearCard_givenCardAndPlayerIsCleared :: Assertion
testClearCard_givenCardAndPlayerIsCleared =
let result = Model.clearCard
playerName
Model.Peacock
(Model.fullPlayer playerName)
in
Model.No @=? Model.getCardStatus Model.Peacock result
where playerName = "player"
testClearCard_givenCardAndWrongPlayerIsCleared :: Assertion
testClearCard_givenCardAndWrongPlayerIsCleared =
let result = Model.clearCard
"differentPlayer"
Model.Peacock
(Model.fullPlayer "player")
in
Model.Unknown @=? Model.getCardStatus Model.Peacock result
testParseCardReply_EmptyCard :: Assertion
testParseCardReply_EmptyCard =
(Just Model.EmptyCard) @=? Model.parseCardReply "EmptyCard"
testParseCardReply_UnknownCard :: Assertion
testParseCardReply_UnknownCard =
(Just Model.UnknownCard) @=? Model.parseCardReply "UnknownCard"
testParseCardReply_Peacock :: Assertion
testParseCardReply_Peacock =
(Just $ Model.CardReply Model.Peacock) @=? Model.parseCardReply "Peacock"
testParseCardReply_InvalidCard :: Assertion
testParseCardReply_InvalidCard =
Nothing @=? Model.parseCardReply "InvalidCard"
testParseReply_Valid :: Assertion
testParseReply_Valid =
(Just $ Model.Reply "player1" (Model.CardReply Model.White))
@=?
(Model.parseReply ["player2", "player1"] ["player1", "White"])
testParseReply_UnknownPlayer :: Assertion
testParseReply_UnknownPlayer =
Nothing
@=?
(Model.parseReply ["player2", "player1"] ["playerUnknown", "White"])
testParseReply_CardInvalid :: Assertion
testParseReply_CardInvalid =
Nothing
@=?
(Model.parseReply ["player2", "player1"] ["player1", "CardInvalid"])
testParseReply_WrongTokenCount :: Assertion
testParseReply_WrongTokenCount =
Nothing
@=?
(Model.parseReply ["player2", "player1"] [ "player1"
, "White"
, "additionalToken"])
testPrintReply :: Assertion
testPrintReply =
"name\tEmptyCard"
@=?
(Model.printReply $ Model.Reply "name" Model.EmptyCard)
testIsTurnEntry_TurnEntry :: Assertion
testIsTurnEntry_TurnEntry =
True @=? (Model.isTurnEntry $ Model.TurnEntry "" [] [])
testIsTurnEntry_NotTurnEntry :: Assertion
testIsTurnEntry_NotTurnEntry =
False @=? (Model.isTurnEntry $ Model.Accusation "" [])
testIsAccusation_Accusation :: Assertion
testIsAccusation_Accusation =
True @=? (Model.isAccusation $ Model.Accusation "" [])
testIsAccusation_NotAccusation :: Assertion
testIsAccusation_NotAccusation =
False @=? (Model.isAccusation $ Model.TurnEntry "" [] [])
testPrintLogEntry_TurnEntry :: Assertion
testPrintLogEntry_TurnEntry =
("asker \n" ++
" Peacock\n" ++
" replier\tEmptyCard")
@=?
(Model.printLogEntry $
Model.TurnEntry
"asker"
[Model.Peacock]
[Model.Reply "replier" Model.EmptyCard])
testPrintLogEntry_Accusation :: Assertion
testPrintLogEntry_Accusation =
("accusation:\tsuggester \n" ++
" Peacock")
@=?
(Model.printLogEntry $
Model.Accusation
"suggester"
[Model.Peacock])
testCardsShowedTo_NonExistentPlayer :: Assertion
testCardsShowedTo_NonExistentPlayer =
[]
@=?
(Model.cardsShowedTo
"NonExistentPlayer"
[ Model.TurnEntry
"player1"
[ Model.Peacock
, Model.Wrench
]
[ Model.Reply
"player2"
(Model.CardReply Model.Peacock)
]
]
)
testCardsShowedTo_MePlayerIsAbsent :: Assertion
testCardsShowedTo_MePlayerIsAbsent =
[]
@=?
(Model.cardsShowedTo
"player1"
[ Model.TurnEntry
"player1"
[ Model.Peacock
, Model.Wrench
]
[ Model.Reply
"player2"
(Model.CardReply Model.Peacock)
]
]
)
testCardsShowedTo_ListedPlayers :: Assertion
testCardsShowedTo_ListedPlayers =
[Model.Peacock]
@=?
(Model.cardsShowedTo
"player1"
[ Model.TurnEntry
"player1"
[ Model.Peacock
, Model.Wrench
]
[ Model.Reply
"me"
(Model.CardReply Model.Peacock)
, Model.Reply
"player2"
(Model.CardReply Model.Wrench)
]
]
)
testFindPlayerPossiblyHasCard_hasCardYes :: Assertion
testFindPlayerPossiblyHasCard_hasCardYes =
(Just ("player1", Model.Peacock))
@=?
(Model.findSinglePlayerWithNonNegativeCardStatus
[ (Model.Player
"player1"
[ (Model.Peacock, Model.Yes)
, (Model.White, Model.No)
])
, (Model.Player
"player2"
[ (Model.Peacock, Model.No)
, (Model.White, Model.No)
])
]
Model.Peacock
)
testFindPlayerPossiblyHasCard_hasCardUnknown :: Assertion
testFindPlayerPossiblyHasCard_hasCardUnknown =
(Just ("player1", Model.Peacock))
@=?
(Model.findSinglePlayerWithNonNegativeCardStatus
[ (Model.Player
"player1"
[ (Model.Peacock, Model.Unknown)
, (Model.White, Model.No)
])
, (Model.Player
"player2"
[ (Model.Peacock, Model.No)
, (Model.White, Model.No)
])
]
Model.Peacock
)
testFindPlayerPossiblyHasCard_hasCardNothing :: Assertion
testFindPlayerPossiblyHasCard_hasCardNothing =
Nothing
@=?
(Model.findSinglePlayerWithNonNegativeCardStatus
[ (Model.Player
"player1"
[ (Model.Peacock, Model.Unknown)
, (Model.White, Model.No)
])
, (Model.Player
"player2"
[ (Model.Peacock, Model.Unknown)
, (Model.White, Model.No)
])
]
Model.Peacock
)
testGenerateCardCompletionList_EmptyStringAllCards :: Assertion
testGenerateCardCompletionList_EmptyStringAllCards =
("", Model.allCardsStrings)
@=?
(Utils.generateCardCompletionList 1 Model.allCards "")
testGenerateCardCompletionList_PeacockPrefix :: Assertion
testGenerateCardCompletionList_PeacockPrefix =
("", ["Peacock"])
@=?
(Utils.generateCardCompletionList 1 Model.allCards $ reverse "Pea")
testGenerateCardCompletionList_PeacockPrefixSecondToken :: Assertion
testGenerateCardCompletionList_PeacockPrefixSecondToken =
(" etihW", ["Peacock"])
@=?
(Utils.generateCardCompletionList 2 Model.allCards $ reverse "White Pea")
testGenerateCardCompletionList_EndingSpaceNonMentionedCards :: Assertion
testGenerateCardCompletionList_EndingSpaceNonMentionedCards =
(" etihW", (filter ("White" /=) Model.allCardsStrings))
@=?
(Utils.generateCardCompletionList 2 Model.allCards $ reverse "White ")
testGenerateCardCompletionList_OnlyFromAllowedCards :: Assertion
testGenerateCardCompletionList_OnlyFromAllowedCards =
("", ["White", "Wrench"])
@=?
(Utils.generateCardCompletionList 1 [Model.White, Model.Wrench] "")
testGenerateCardCompletionList_ListOfCardsIsFull :: Assertion
testGenerateCardCompletionList_ListOfCardsIsFull =
(" etihW", [])
@=?
(Utils.generateCardCompletionList 1 Model.allCards $ reverse "White ")
testGenerateCardCompletionList_ListOfCardsHasSpace :: Assertion
testGenerateCardCompletionList_ListOfCardsHasSpace =
(" etihW", ["Peacock"])
@=?
(Utils.generateCardCompletionList 2 Model.allCards $ reverse "White Pea")
testGenerateCardCompletionList_PartialNonRepeated :: Assertion
testGenerateCardCompletionList_PartialNonRepeated =
(" etihW", [])
@=?
(Utils.generateCardCompletionList 2 Model.allCards $ reverse "White Whi")
testGenerateCardCompletionList_LastWordShouldBeCompleted :: Assertion
testGenerateCardCompletionList_LastWordShouldBeCompleted =
(" etihW", ["Rope"])
@=?
(Utils.generateCardCompletionList 2 Model.allCards $ reverse "White Rope")
testIsUnique_allDifferent :: Assertion
testIsUnique_allDifferent =
True @=? (Utils.isUnique ["A", "B", "C"])
testIsUnique_hasSimilar :: Assertion
testIsUnique_hasSimilar =
False @=? (Utils.isUnique ["A", "B", "B"])
| VictorDenisov/cluedo | src/Test/AllTests.hs | gpl-2.0 | 13,208 | 0 | 14 | 3,851 | 2,507 | 1,365 | 1,142 | 319 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# CFILES cbits/hfrequencyqueue_backend.cpp #-}
{-|
Module : FrequencyQueue.IO
Description : Provide the IO interface for FrequencyQueue
Copyright : (c) Andrea Bellandi 2014
License : GPL-3
Maintainer : bellaz89@gmai.com
Stability : experimental
Portability : POSIX
This module export the IO interface of FrequencyQueue.
-}
module FrequencyQueue.IO(
-- *Types
FrequencyQueue(),
-- *Functions
-- **Creation functions
newFrequencyQueue, cloneFrequencyQueue,
-- **Basic properties
length, probabilityUnit,
-- **Pop-push functions
pushBack, popBack, popBackMax, popBackMin, getRandom, getRandomPop,
-- **Iterative functions
mapWprobability, foldWprobability,
-- **Unsafe interface
popBackUnsafe, popBackMaxUnsafe, popBackMinUnsafe, getRandomUnsafe, getRandomPopUnsafe) where
import Prelude hiding (length)
import GHC.Generics
import Control.Monad(replicateM)
import Foreign.Concurrent(newForeignPtr)
import Foreign.Marshal.Utils(new)
import Foreign.Marshal.Alloc(free)
import Foreign.CStorable(CStorable, cAlignment, cSizeOf, cPoke, cPeek)
import Foreign.Storable(Storable, alignment, sizeOf, poke, peek)
import Foreign.ForeignPtr(ForeignPtr, withForeignPtr)
import Foreign.StablePtr(StablePtr, deRefStablePtr, freeStablePtr, newStablePtr)
import Foreign.Ptr(Ptr)
import Foreign.C.Types
type FrequencyQueue_ a = Ptr a
-- | FrequencyQueue the basic type of the Library
data FrequencyQueue a = FrequencyQueue{ queue :: ForeignPtr a}
data RandomElement a = RandomElement{ probability :: CUInt,
element :: StablePtr a}
deriving(Generic)
instance CStorable (StablePtr a) where
cAlignment = alignment
cSizeOf = sizeOf
cPoke = poke
cPeek = peek
instance CStorable (RandomElement a)
instance Storable (RandomElement a) where
alignment = cAlignment
sizeOf = cSizeOf
poke = cPoke
peek = cPeek
-- the foreign import shouldn't call functions that call-back the GHC runtime (clone_FrequencyQueue_priv_ and free_FrequencyQueue_priv_)
-- unsafely. Functions that are called unsafely should have constant or constant amortized time to not block the caller OS too much.
foreign import ccall unsafe new_FrequencyQueue_priv_ :: CUInt -> IO (FrequencyQueue_ a)
foreign import ccall clone_FrequencyQueue_priv_ :: (FrequencyQueue_ a) -> IO (FrequencyQueue_ a)
foreign import ccall unsafe length_priv_ :: (FrequencyQueue_ a) -> IO (CUInt)
foreign import ccall unsafe probability_unit_priv_ :: (FrequencyQueue_ a) -> IO (CUInt)
foreign import ccall unsafe push_back_priv_ :: (FrequencyQueue_ a) -> (Ptr (RandomElement a)) -> IO ()
foreign import ccall unsafe pop_back_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall pop_back_max_prob_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall pop_back_min_prob_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall get_random_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall get_random_pop_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall unsafe reset_iterator_priv_ :: (FrequencyQueue_ a) -> IO ()
foreign import ccall unsafe get_next_priv_ :: (FrequencyQueue_ a) -> IO (Ptr (RandomElement a))
foreign import ccall unsafe get_random_number_priv_ :: (FrequencyQueue_ a) -> IO (CUInt)
foreign import ccall free_FrequencyQueue_priv_ :: FrequencyQueue_ a -> IO ()
foreign import ccall unsafe free_RandomElement_priv_ :: (Ptr (RandomElement a)) -> IO ()
foreign export ccall freeStablePtr :: StablePtr a -> IO ()
foreign export ccall makeNewStableRef :: StablePtr a -> IO (StablePtr a)
makeNewStableRef :: StablePtr a -> IO (StablePtr a)
makeNewStableRef ptr = deRefStablePtr ptr >>= newStablePtr
-- |Create a new FrequencyQueue with a seed
newFrequencyQueue :: Int -> IO (FrequencyQueue a)
newFrequencyQueue seed = do rawqueue <- new_FrequencyQueue_priv_ (fromIntegral seed)
queue_ <- newForeignPtr rawqueue (free_FrequencyQueue_priv_ rawqueue)
return (FrequencyQueue queue_)
-- |Make a clone of the FrequencyQueue Passed
cloneFrequencyQueue :: FrequencyQueue a -> IO (FrequencyQueue a)
cloneFrequencyQueue oldqueue = do rawqueue <- withForeignPtr (queue oldqueue) clone_FrequencyQueue_priv_
queue_ <- newForeignPtr rawqueue (free_FrequencyQueue_priv_ rawqueue)
return (FrequencyQueue queue_)
-- |Return the number of elements in the queue
length :: FrequencyQueue a -> IO Int
length queue_ = withForeignPtr (queue queue_) length_priv_ >>= (return . fromIntegral)
-- |Return the sum of all elements' probabilities passed to the queue
probabilityUnit :: FrequencyQueue a -> IO Int
probabilityUnit queue_ = withForeignPtr (queue queue_) probability_unit_priv_ >>= (return . fromIntegral)
-- |Push an element a in the queue with a corresponding relative probability
pushBack :: FrequencyQueue a -> a -> Int -> IO()
pushBack queue_ element_ probability_ = do stableElement_ <- newStablePtr element_
let cUIntProbability = (fromIntegral probability_)
let randomElement_ = RandomElement cUIntProbability stableElement_
allocatedElement_ <- new randomElement_
withForeignPtr (queue queue_) (\x -> push_back_priv_ x allocatedElement_)
free allocatedElement_
-- |Pop an element of the queue. Return Nothing if the queue is empty
popBack :: FrequencyQueue a -> IO (Maybe (a,Int))
popBack queue_ = makeSafePop queue_ popBackUnsafe
-- |Pop the element of the queue that have the biggest relative probability.
-- Return Nothing if the queue is empty
popBackMax :: FrequencyQueue a -> IO (Maybe (a,Int))
popBackMax queue_ = makeSafePop queue_ popBackMaxUnsafe
-- |Pop the element of the queue that have the smallest relative probability.
-- Return Nothing if the queue is empty
popBackMin :: FrequencyQueue a -> IO (Maybe (a,Int))
popBackMin queue_ = makeSafePop queue_ popBackMinUnsafe
-- |Return a random element from the queue using its relative probability.
-- Return Nothing if the queue is empty
getRandom :: FrequencyQueue a -> IO (Maybe (a,Int))
getRandom queue_ = makeSafePop queue_ getRandomUnsafe
-- |Pop a random element from the queue using its relative probability.
-- Return Nothing if the queue is empty
getRandomPop :: FrequencyQueue a -> IO (Maybe (a,Int))
getRandomPop queue_ = makeSafePop queue_ getRandomPopUnsafe
-- |Return a new queue with the elements and relative probability mapped
-- by the function provided
mapWprobability :: ((a, Int) -> (b, Int)) -> FrequencyQueue a -> IO (FrequencyQueue b)
mapWprobability fun queue_ = do rnd_number <- withForeignPtr (queue queue_) get_random_number_priv_
queue_length <- length queue_
newqueue_ <- newFrequencyQueue (fromIntegral rnd_number)
withForeignPtr (queue queue_) reset_iterator_priv_
replicateM (queue_length) (trasformCopyQueue queue_ newqueue_)
return newqueue_
where
trasformCopyQueue q1 q2 = do ptr_rawelement <- withForeignPtr (queue q1) get_next_priv_
result <- peek ptr_rawelement
let probability_ = probability result
let elementStable_ = element result
element_ <- deRefStablePtr elementStable_
let transformed_element_ = fun (element_, fromIntegral probability_)
pushBack q2 (fst transformed_element_) (snd transformed_element_)
-- |Return a folded value made by an initial value b and a folding function
-- evaluated on the entire queue.
foldWprobability :: (b -> (a, Int) -> b) -> b -> FrequencyQueue a -> IO b
foldWprobability fold_fun b0 queue_ = do withForeignPtr (queue queue_) reset_iterator_priv_
queue_length <- length queue_
iterateOverFrequencyQueue queue_length b0
where
iterateOverFrequencyQueue 0 acc = return acc
iterateOverFrequencyQueue nitem acc = do ptr_rawelement <- withForeignPtr (queue queue_) get_next_priv_
result <- peek ptr_rawelement
let probability_ = probability result
let elementStable_ = element result
element_ <- deRefStablePtr elementStable_
let next_acc = fold_fun acc (element_, fromIntegral probability_)
iterateOverFrequencyQueue (nitem-1) next_acc
-- |Pop an element of the queue. Fail if empty
popBackUnsafe :: FrequencyQueue a -> IO (a, Int)
popBackUnsafe queue_ = do ptr_rawelement <- withForeignPtr (queue queue_) pop_back_priv_
deRefRawElementPtr ptr_rawelement
-- |Pop the element of the queue that have the biggest relative probability.
-- Fail if empty
popBackMaxUnsafe :: FrequencyQueue a -> IO (a, Int)
popBackMaxUnsafe queue_ = do ptr_rawelement <- withForeignPtr (queue queue_) pop_back_max_prob_priv_
deRefRawElementPtr ptr_rawelement
-- |Pop the element of the queue that have the smallest relative probability.
-- Fail if empty
popBackMinUnsafe :: FrequencyQueue a -> IO (a, Int)
popBackMinUnsafe queue_ = do ptr_rawelement <- withForeignPtr (queue queue_) pop_back_min_prob_priv_
deRefRawElementPtr ptr_rawelement
-- |Pop the element of the queue that have the smallest relative probability.
-- Fail if empty
getRandomUnsafe :: FrequencyQueue a -> IO (a, Int)
getRandomUnsafe queue_ = do ptr_rawelement <- withForeignPtr (queue queue_) get_random_priv_
result <- peek ptr_rawelement
let probability_ = probability result
let elementStable_ = element result
element_ <- deRefStablePtr elementStable_
return (element_, fromIntegral probability_)
-- |Pop a random element from the queue using its relative probability.
-- Fail if empty
getRandomPopUnsafe :: FrequencyQueue a -> IO (a, Int)
getRandomPopUnsafe queue_ = do ptr_rawelement <- withForeignPtr (queue queue_) get_random_pop_priv_
deRefRawElementPtr ptr_rawelement
deRefRawElementPtr :: Ptr (RandomElement a) -> IO (a, Int)
deRefRawElementPtr ptr_rawelement = do result <- peek ptr_rawelement
free_RandomElement_priv_ ptr_rawelement
let probability_ = probability result
let elementStable_ = element result
element_ <- deRefStablePtr elementStable_
freeStablePtr elementStable_
return (element_, fromIntegral probability_)
makeSafePop :: FrequencyQueue a -> (FrequencyQueue a -> IO (a, Int)) -> IO (Maybe (a,Int))
makeSafePop queue_ unsafefun = do qlength <- length queue_
if qlength == 0
then return Nothing
else (unsafefun queue_) >>= (\x -> return (Just x))
| Bellaz/HfrequencyList | src/FrequencyQueue/IO.hs | gpl-3.0 | 11,796 | 0 | 14 | 3,138 | 2,516 | 1,276 | 1,240 | 145 | 2 |
-- Challenge 1:
-- Can we turn the Maybe type constructor into a functor by defining:
--
-- fmap _ _ = Nothing
--
-- which ignores both of its arguments? (Hint: Check the functor laws.)
--
--
-- 1. preserves identity is not met because,
-- fmap _ (Just x) will not be = Just x
--
-- similarly composition will not be met if either LHS, RHS are not Nothing
--
| sujeet4github/MyLangUtils | CategoryTheory_BartoszMilewsky/PI_07_Functors/ex1_Maybe.hs | gpl-3.0 | 362 | 0 | 2 | 76 | 15 | 14 | 1 | 1 | 0 |
import Text.ParserCombinators.Parsec hiding (spaces)
import Control.Monad
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
parseNumber :: Parser LispVal
parseNumber = do
s <- many1 digit
return . Number . read $ s
| lifengsun/haskell-exercise | scheme/02/ex-1-1.a.hs | gpl-3.0 | 356 | 0 | 9 | 116 | 98 | 54 | 44 | 12 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module QLogic.BoxWorld where
-- TODO add export list
import Data.Maybe
import Data.List
import Prelude hiding (and, concat
, foldl, foldl1, sequence)
import Data.IntSet (IntSet)
import qualified Data.IntSet as IS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
-- import Data.Poset.ConcretePoset
import QLogic
import QLogic.Concrete
import QLogic.States
import QLogic.GeneralBoxes
import Data.Attoparsec.ByteString.Char8 hiding (take, space)
import QLogic.Utils
import Control.Applicative
-- |Point in the (generalized) phase space.
-- In classical physics, points of the phase space can be interpreted
-- as a pure states of the system. Consequently, each point specifies
-- exactly outcomes of observables of the classical system.
-- Here we use this interpretation encode points as a *Map* from *Char*
-- (symbol denoting observable) to *Int* (value of that observable).
newtype Point = Point (Map Char Int) deriving (Eq, Ord)
instance Show Point where
show (Point p) = concatMap (\(k, v) -> show k ++ show v) . Map.toList $ p
-- |Phase space is simply a set of points.
newtype PhaseSpace a = PhaseSpace (Set a) deriving (Eq, Ord, Show)
-- |Get a list of points
phasePoints :: PhaseSpace a -> [a]
phasePoints (PhaseSpace a) = Set.toList a
-- |Build a phase space of a classical system, given the list of observables.
--
-- Let us discuss this function for Two system.
-- The argument is of type Two [Observable],
-- i.e. we have list of observables for each component of the system.
-- Then
-- > combineWith phaseSpace1' == sequenceA . fmap phaseSpace1'
-- firstly converts list of observables to list of
-- phaseSpace points that are required to describe output of these observables
-- (the 'fmap phaseSpace1'' part), and then we construct the list of
-- phase space points from the lists for components ('sequenceA').
phaseSpace :: (System a, Ord (a Point)) => a [Observable] -> PhaseSpace (a Point)
phaseSpace = PhaseSpace . Set.fromList . combineWith phaseSpace1'
phaseSpace1' :: [Observable] -> [Point]
phaseSpace1' obs = map (Point . Map.fromList) $ tups [[(name o, k) | k <- domain o] | o <- obs]
where
tups [] = []
tups [v] = map (:[]) v
tups (v:vs) = [p:ps | p <- v, ps <- tups vs]
askBox :: System s => s Box -> s Point -> Bool
askBox b point = and $ liftA2 askBox1 b point
where
askBox1 (Box a alpha) (Point p) = maybe False (== alpha) $ Map.lookup a p
askQ :: System s => Question (s Box) -> s Point -> Bool
askQ q point = or $ fmap (`askBox` point) q
phaseSubset :: System s => PhaseSpace (s Point) -> Question (s Box) -> Set (s Point)
phaseSubset (PhaseSpace points) q = Set.filter (askQ q) points
boxWorldLogic :: (System s, Ord (s Point), Ord (s Box))
=> BoxModel s
-> Representation ConcreteInt IntSet (Question (s Box))
boxWorldLogic obs = Representation q2set set2q ql
where
ql = concreteIntSubOMP (booleanAlgebraInt space) (map q2set atomicQs)
phase = phaseSpace obs
atomicQs = boxAtoms obs
phasePack = packList . phasePoints $ phase
packSet = IS.fromList . map (toKey phasePack) . Set.toList
space = IS.fromList [0..length atomicQs - 1]
q2set = packSet . phaseSubset phase
invAtoms = rightInvMap atomicQs q2set
set2q q
| IS.null q = nullQ
| otherwise = collapse . sequenceA . map (invAtoms Map.!) . decompose ql $ q
where
collapse (Question as) = Question . concat $ as
boxWorldLogic' :: (System s, Ord (s Point), Ord (s Box))
=> BoxModel s
-> Representation (Concrete (s Point)) (Set (s Point)) (Question (s Box))
boxWorldLogic' obs = Representation q2set set2q ql
where
ql = concreteSubOMP (booleanAlgebra ps) (map q2set atomicQs)
phase@(PhaseSpace ps)= phaseSpace obs
atomicQs = boxAtoms obs
q2set = phaseSubset phase
invAtoms = rightInvMap atomicQs q2set
set2q q
| Set.null q = nullQ
| otherwise = collapse . sequenceA . map (invAtoms Map.!) . decompose ql $ q
where
collapse (Question as) = Question . concat $ as
-- | Propositions of the box model, generated by the specified function,
-- represented as a IntSet (much faster).
boxWorldPropositions :: (System s, Ord (s Point)) =>
BoxModel s -> (ConcreteInt -> [IntSet] -> ConcreteInt) -> ConcreteInt
boxWorldPropositions obs typ = typ (booleanAlgebraInt space) (map q2set atomicQs)
where
atomicQs = boxAtoms obs
space = IS.fromList [0..length atomicQs - 1]
phase = phaseSpace obs
q2set = packSet . phaseSubset phase
packSet = IS.fromList . map (toKey phasePack) . Set.toList
phasePack = packList . phasePoints $ phase
-- | Propositions of the box model, generated by the specified function,
-- represented by Set of Box model phase points.
boxWorldPropositions' :: (System s, Ord (s Point)) =>
BoxModel s -> (Concrete (s Point) -> [Set (s Point)] -> Concrete (s Point))
-> Concrete (s Point)
boxWorldPropositions' obs typ = typ (booleanAlgebra ps) (map q2set atomicQs)
where
phase@(PhaseSpace ps)= phaseSpace obs
atomicQs = boxAtoms obs
q2set = phaseSubset phase
rightInvMap :: (Ord b) => [a] -> (a -> b) -> Map b a
rightInvMap dom f = foldl' go Map.empty dom
where
go !accum x = Map.insert (f x) x accum
rightInv :: (Eq b) => [a] -> (a -> b) -> b -> a
rightInv dom f y = fromJust $ find (\x -> f x == y) dom
data Representation p a b = Representation { toRepr :: b -> a
, fromRepr :: a -> b
, logicRepr :: p }
instance (POrdStruct p a) => POrdStruct (Representation p a b) b where
elementsOf ql = map (fromRepr ql) . elementsOf . logicRepr $ ql
lessIn = liftRepr2 lessIn
supIn ql a b = fromRepr ql <$> liftRepr2 supIn ql a b
infIn ql a b = fromRepr ql <$> liftRepr2 infIn ql a b
instance (Eq b, QLogicStruct p a) => QLogicStruct (Representation p a b) b where
ocmplIn = liftRepr ocmplIn
orthoIn = liftRepr2 orthoIn
compatIn = liftRepr2 compatIn
zeroOf = liftRepr0 zeroOf
oneOf = liftRepr0 oneOf
-- subLogic iso els = Representation (toRepr iso) (fromRepr iso) sublogic
-- where
-- sublogic = subLogic (logicRepr iso) $ map (toRepr iso) els
cartesian :: (Ord a, Ord (s a), System s) => s (Set a) -> Set (s a)
cartesian = Set.fromList . sequenceA . fmap Set.toList
proj1 :: (System s, System s', Splitting s s', Ord (s a), Ord (s' a), Ord a)
=> Set (s a) -> Set (One a)
proj1 = Set.map (fst . split)
projs :: (System s, System s', Splitting s s', Ord (s a), Ord (s' a), Ord a)
=> Set (s a) -> [Set (One a)]
projs r = map (\s -> proj1 $ Set.map s r) shifts
stronglyDisjoint1 :: (System s, System s', Splitting s s', Ord (s a), Ord (s' a), Ord a)
=> Set (s a) -> Set (s a) -> Bool
stronglyDisjoint1 p q = disj a b || disj ar br
where
disj sa sb = Set.null (Set.intersection sa sb)
(a, ar) = psplit p
(b, br) = psplit q
psplit s = (Set.map (fst . split) s, Set.map (snd . split) s)
strongDisjoint :: (System s, System s', Splitting s s', Ord (s a), Ord (s' a), Ord a)
=> Set (s a) -> Set (s a) -> Bool
-- strongDisjoint p q = or $
-- zipWith (\a b -> Set.null $ Set.intersection a b)
-- (projs p) (projs q)
strongDisjoint p q = or $ zipWith disj (projs p) (projs q)
where
disj a b = a `Set.isSubsetOf` b || b `Set.isSubsetOf` a
|| Set.null (Set.intersection a b)
-- I kinf of don't understand that.
--
-- stateRepr :: Representation p a b -> State b f -> State a f
-- stateRepr r (State s) = State $ \q -> s . fromRepr $ q
--
-- But if we map atoms when we create the state,
-- then it is much more efficient (fromRepr is expensive)
--
-- There is some "hidden" structure, maybe it will give
-- some hints.
--
liftRepr2 f iso a b = f (logicRepr iso) (toRepr iso a) (toRepr iso b)
liftRepr f iso = fromRepr iso . f (logicRepr iso) . toRepr iso
liftRepr0 f iso = fromRepr iso $ f (logicRepr iso)
twoValuedStates :: ConcreteInt -> [State IntSet Int]
twoValuedStates ql = filter (isStateII' ql) . map (fromAtomicList ql . zip atoms) $ tuples [0, 1] n
where
atoms = atomsOf ql
n = length atoms
-- Printers and readers
parsePRState :: (System s, Ord (s Box)) => Parser [(Question (s Box), Double)]
parsePRState = parseValue `sepBy1` (char ',' >> skipSpace)
where
parseValue = do
q <- parseQ
char '='
v <- double
return (q, v)
-- readState2 :: String -> ConcreteInt -> (Question (Two Atomic) -> IntSet) -> State IntSet Double
-- readState2 str ql repr = case parseOnly (parseAtomicState repr) (pack str) of
-- Right qs -> fromAtomicList ql qs
-- Left err -> error $ "Error parsing" ++ err
-- parseAtomicState :: System a => (Question (a Atomic) -> IntSet) -> Parser [(IntSet, Double)]
-- parseAtomicState repr = parseValue' `sepBy1` (char ',' >> skipSpace)
-- where
-- parseValue' = do
-- q <- parseQ
-- char '='
-- v <- double
-- return (repr . Question $ q, v)
| ttylec/QLogic | src/QLogic/BoxWorld.hs | gpl-3.0 | 9,731 | 0 | 14 | 2,492 | 3,005 | 1,563 | 1,442 | 145 | 3 |
module Main where
import VSim.Runtime
stuff x = return ()
elab :: Elab IO ()
elab = do
t_int <- alloc_unranged_type
t_0_5 <- alloc_ranged_type (alloc_range (int 0) (int 5))
s1 <- alloc_signal "s1" t_int defval
s2 <- alloc_signal "s2" t_int defval
clk <- alloc_signal "clk" t_int (assign $ int 33)
v <- alloc_variable "v" t_0_5 (assign $ int 0)
v2 <- alloc_variable "v2" t_0_5 (assign $ (pure v) .+. (int 0))
proc1 <- alloc_process "main" [clk] $ do
breakpoint
-- (pure s1) .<=. (fs 5, assign (pure clk))
(pure clk) .<=. (us 1, assign $ ((int 1) .+. (pure clk)))
report $ (str "muhaha: clk = " ) .++. (t_image (pure clk) t_int)
return ()
return ()
main = do
sim maxBound elab
| grwlf/vsim | src_r/Test/User1.hs | gpl-3.0 | 795 | 0 | 17 | 242 | 317 | 151 | 166 | 20 | 1 |
import Test.QuickCheck
import Test.QuickCheck.Gen (oneof)
import Lib
-- 1. Equal probabilities for each.
foolGen :: Gen Fool
foolGen = do
oneof [return Frue, return Fulse]
-- 2. 2/3s chance of Fulse, 1/3 chance of Frue.
foolGen' :: Gen Fool
foolGen' = do
frequency [(1, return Frue),
(2, return Fulse)]
main :: IO ()
main = putStrLn "Test suite not yet implemented"
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | Testing/generator/test/Spec.hs | gpl-3.0 | 392 | 2 | 10 | 87 | 125 | 62 | 63 | 12 | 1 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
-- Module : Network.PagerDuty.REST.Services.EmailFilters
-- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
-- | Email Filters are a set of rules that are applied to triggering email's body,
-- subject and from address. It only applies to generic_email kind of
-- Services. The way multiple filters are combined depends on the
-- email_filter_mode attribute of the service.
--
-- /See:/ <http://developer.pagerduty.com/documentation/rest/services/email_filters>
module Network.PagerDuty.REST.Services.EmailFilters
(
-- * Create Email Filter
createEmailFilter
-- * Update Email Filter
, updateEmailFilter
-- * Delete Email Filter
, deleteEmailFilter
-- * Types
, MatchMode (..)
, HasEmailFilterInfo (..)
, EmailFilterInfo
, EmailFilter
, efId
) where
import Control.Applicative hiding (empty)
import Control.Lens hiding ((.=))
import Data.Aeson
import Data.Default.Class
import Data.Monoid
import Data.Text (Text)
import Network.HTTP.Types
import Network.PagerDuty.Internal.TH
import Network.PagerDuty.Internal.Types
default (Path)
filters :: ServiceId -> Path
filters s = "services" % s % "email_filters"
data MatchMode
= Always
| Match
| NoMatch
deriving (Eq, Show)
deriveNullaryWith hyphenated ''MatchMode
-- FIXME: Tighten up this type! Make the regex required for match/no-match and
-- encode the conditional invariants.
data EmailFilterInfo = EmailFilterInfo
{ _efSubjectMode' :: Maybe MatchMode
, _efSubjectRegex' :: Maybe Text
, _efBodyMode' :: Maybe MatchMode
, _efBodyRegex' :: Maybe Text
, _efFromEmailMode' :: Maybe MatchMode
, _efFromEmailRegex' :: Maybe Text
} deriving (Eq, Show)
deriveRecord ''EmailFilterInfo
instance Default EmailFilterInfo where
def = EmailFilterInfo Nothing Nothing Nothing Nothing Nothing Nothing
instance QueryLike EmailFilterInfo where
toQuery = const []
class HasEmailFilterInfo a where
emailFilterInfo :: Lens' a EmailFilterInfo
-- | One of always, match, no-match, which, respectively, means to not
-- filter the email trigger by subject, filter it if the email subject
-- matches the given regex, or filter if it doesn't match the given regex.
--
-- /Default:/ always.
efSubjectMode :: Lens' a (Maybe MatchMode)
-- | The regex to be used when subject_mode is match or no-match.
-- It is a required parameter on such cases.
efSubjectRegex :: Lens' a (Maybe Text)
-- | One of always, match, no-match, which, respectively, means to not filter
-- the email trigger by body, filter it if the body email matches the given
-- regex, or filter if it doesn't match the given regex.
--
-- /Default:/ always.
efBodyMode :: Lens' a (Maybe MatchMode)
-- | The regex to be used when body_mode is match or no-match.
-- It is a required parameter on such cases.
efBodyRegex :: Lens' a (Maybe Text)
-- | One of always, match, no-match, which, respectively, means to not filter
-- the email trigger by its from address, filter it if the email from address
-- matches the given regex, or filter if it doesn't match the given regex.
--
-- /Default:/ always.
efFromEmailMode :: Lens' a (Maybe MatchMode)
-- | The regex to be used when from_email_mode is match or no-match.
-- It is a required parameter on such cases.
efFromEmailRegex :: Lens' a (Maybe Text)
efSubjectMode = emailFilterInfo.efSubjectMode'
efSubjectRegex = emailFilterInfo.efSubjectRegex'
efBodyMode = emailFilterInfo.efBodyMode'
efBodyRegex = emailFilterInfo.efBodyRegex'
efFromEmailMode = emailFilterInfo.efFromEmailMode'
efFromEmailRegex = emailFilterInfo.efFromEmailRegex'
instance (QueryLike a, ToJSON a, HasEmailFilterInfo a)
=> HasEmailFilterInfo (Request a s b) where
emailFilterInfo = upd.emailFilterInfo
instance HasEmailFilterInfo EmailFilterInfo where
emailFilterInfo = id
data EmailFilter = EmailFilter
{ _efId :: EmailFilterId
, _efInfo :: EmailFilterInfo
} deriving (Eq, Show)
instance FromJSON EmailFilter where
parseJSON = withObject "email_filter" $ \o ->
EmailFilter <$> parseJSON (Object o)
<*> o .: "id"
instance ToJSON EmailFilter where
toJSON e = Object (x <> y)
where
Object x = toJSON (_efInfo e)
Object y = object ["id" .= _efId e]
instance HasEmailFilterInfo EmailFilter where
emailFilterInfo = lens _efInfo (\e x -> e { _efInfo = x })
-- | The email filter ID.
makeLens "_efId" ''EmailFilter
-- | Create a new Email Filter for the specified service.
--
-- @POST \/services\/\:service_id\/email_filters@
--
-- /See:/ <http://developer.pagerduty.com/documentation/rest/services/email_filters/create>
createEmailFilter :: ServiceId -> Request EmailFilterInfo s EmailFilter
createEmailFilter s = mk def & meth .~ POST & path .~ filters s
-- | Update an existing Email Filter.
--
-- @PUT \/services\/\:service_id\/email_filters\/\:id@
--
-- /See:/ <http://developer.pagerduty.com/documentation/rest/services/email_filters/update>
updateEmailFilter :: ServiceId
-> EmailFilterId
-> Request EmailFilterInfo s Empty
updateEmailFilter s e = mk def & meth .~ PUT & path .~ filters s % e
-- | Delete an existing Email Filter.
--
-- @DELETE \/services\/\:service_id\/email_filters\/\:id@
--
-- /See:/ <http://developer.pagerduty.com/documentation/rest/services/email_filters/delete>
deleteEmailFilter :: ServiceId -> EmailFilterId -> Request Empty s Empty
deleteEmailFilter s e = empty & meth .~ DELETE & path .~ filters s % e
| brendanhay/pagerduty | src/Network/PagerDuty/REST/Services/EmailFilters.hs | mpl-2.0 | 6,417 | 0 | 13 | 1,482 | 929 | 524 | 405 | 87 | 1 |
{-# LANGUAGE DatatypeContexts #-}
data
( LooooooooooooooooooooongConstraint a
, LooooooooooooooooooooongConstraint b
) =>
MyRecord a b
= MyConstructor
{ foo1, foo2
:: loooooooooooooooooooooooooooooooong
-> loooooooooooooooooooooooooooooooong
, bar :: a
, bazz :: b
}
| lspitzner/brittany | data/Test51.hs | agpl-3.0 | 310 | 3 | 6 | 77 | 54 | 31 | 23 | 11 | 0 |
{-
Copyright (C) 2009 Andrejs Sisojevs <andrejs.sisojevs@nextmail.ru>
All rights reserved.
For license and copyright information, see the file COPYRIGHT
-}
--------------------------------------------------------------------------
--------------------------------------------------------------------------
-- | Here are declared 'LocalizableTemplate' (also called PCLT)
-- and 'LocalizedTemplate'.
-- Here by localization is meant localization in languages.
-- First (localizable template) is above languages,
-- while second (localized template) is a template version
-- in a concrete language.
module Text.PCLT.Template where
import qualified Data.ByteString.Lazy.UTF8.Unified as Lazy (ByteString)
import qualified Data.ByteString.Lazy.UTF8.Unified as B hiding (ByteString)
import Data.List
import qualified Data.Map as M
import Data.Map (Map, (!))
import Data.MyHelpers
import Data.Typeable
import Text.PCLT.Parser.AdvancedSepBy
import Text.PCLT.Parser.ParserInternals
import Text.PCLT.CommonTypes
import Text.PCLT.Config
import Text.PCLT.SDL
-- * Template pieces
-- | These are types of template pieces. They are made by 'ssm2ldtm'
-- from 'Text.PCLT.Parser.AdvancedSepBy.SeparatedSectorMarker'
data PCS_SpecificMarkings =
PlainText_LngTplM
| Parameter_LngTplM
| Composite_LngTplM
| Unsupported_LngTplM SeparatedSectorMarker
deriving (Eq, Show, Typeable)
ssm2ldtm :: SeparatedSectorMarker -> PCS_SpecificMarkings
ssm2ldtm ssm =
case ssm of
Error_SSM err_msg -> Unsupported_LngTplM ssm
InnerMarker_SSM i ->
if i == 0 then PlainText_LngTplM
else if i == 1 then Parameter_LngTplM
else if i == 2 then Composite_LngTplM
else Unsupported_LngTplM ssm
_ -> Unsupported_LngTplM ssm
-- | Template content.
type LngTpl_AbstractedString = [(PCS_SpecificMarkings, Lazy.ByteString, MarkedChunkLength)]
-- | Extract a list of parameter names from a template content.
listOfParams :: LngTpl_AbstractedString -> [ParamName_LBS]
listOfParams str_struct = foldl (\ accum (marker, str, _) -> case marker == Parameter_LngTplM of {True -> str : accum; False -> accum }) [] str_struct
------------------------------------------------------------------------
-- * Parsing from a lazy ByteString to a localized template content
type ParserBadResult = String
-- | These errors are possible only if program is wrong.
data PCLT_ParserLowLevelFailure =
UnexpectedParserResult_PLLF_PCLT ParserBadResult
| BadMarker_PLLF_PCLT SeparatedSectorMarker Lazy.ByteString ChunkIndexInList_
deriving (Show, Typeable)
-- | The parsing uses parameters
-- 'Test.PCLT.Config.pcsParameterPlaceholderWrapper' and
-- 'Test.PCLT.Config.pcsCompositePlaceholderWrapper' of
-- 'Test.PCLT.Config.PCLT_InnerConfig'.
-- The list @[PCLT_CompositeKey]@ in the result is a list of composite keys
-- (template IDs, used by template as inclusions)
doTheParse :: PCLT_InnerConfig
-> Lazy.ByteString
-> ( [PCLT_ParserLowLevelFailure], Maybe ( LngTpl_AbstractedString, [PCLT_CompositeKey] ))
doTheParse pcsc_config str =
let parser = sepBySome
anyChar
standardMarkingStrategy
[ stringLBS $ pcsParameterPlaceholderWrapper pcsc_config
, stringLBS $ pcsCompositePlaceholderWrapper pcsc_config
]
in case parse parser str of
( IllegalInput , _ ) -> ([UnexpectedParserResult_PLLF_PCLT "IllegalInput"], Nothing)
( ReachedEOF , _ ) -> ([UnexpectedParserResult_PLLF_PCLT "ReachedEOF"] , Nothing)
( Success marked_chunks_list, _ ) ->
let _fixed_marked_chunks_list = standardMarkingStrategyFix_StripEmptyChunks marked_chunks_list
list_of_parser_errors = map (\ (ssm, s, idx) -> BadMarker_PLLF_PCLT ssm s idx) $ retrieveErrorsMarkingsList _fixed_marked_chunks_list
non_plain_markings_map = retrieveNonPlainMarkingsMap _fixed_marked_chunks_list
fixed_marked_chunks_list = map (\ (ssm, str, len) -> (ssm2ldtm ssm, str, len)) _fixed_marked_chunks_list
list_of_composites_keys = map B.unpack $ fst $ unzip $ getListOfMarkings non_plain_markings_map 2
in (list_of_parser_errors, Just (fixed_marked_chunks_list, list_of_composites_keys))
------------------------------------------------------------------------
-- * Localized template
type PCLT_CatalogMap = Map PCLT_ID LocalizableTemplate
type LngTpl_SubCompositesMap = PCLT_CatalogMap
data LocalizedTemplate =
LocalizedTemplate {
ldtAbstractedString :: LngTpl_AbstractedString
-- | Each composition tree is kept together with each
-- localization. This is done for speedup and is a source
-- of complexities, when forming a catalog and sustaining it's
-- data consistency. So it comes to this:
-- templates are purely-referenced by
--
-- * catalog ('PCLT_CatalogMap') and
--
-- * templates, that uses them as composites
-- ('LngTpl_SubCompositesMap').
--
-- By \"purely-reference\" here is meant, that templates are
-- formed only once, they have one instace in memory, but
-- are referenced twice - from composeds and from catalog map
, ldtSubcompositesMap :: LngTpl_SubCompositesMap
}
deriving (Show, Typeable)
-- * Text.PCLT.Config.pcsStrictOrient_ofParamsAndCmpsts_onDfltLngTplsSets
type DefaultLngTpl = LocalizedTemplate
type NondefaultLngTpl = LocalizedTemplate
-- | Carrying strict orientation routines. See description of
-- 'Text.PCLT.Config.StrictOrient_ofParamsAndCmpsts_onDfltLngTplsSets'.
compareStrictOrientationOnDefault :: PCLT_ID -> StrictOrient_ofParamsAndCmpsts_onDfltLngTplsSets -> NondefaultLngTpl -> DefaultLngTpl -> Bool
compareStrictOrientationOnDefault tpl_id so nondflt_ldt dflt_ldt =
let ( dflt_subcomps , dflt_params ) = (fst . unzip . M.toList . ldtSubcompositesMap, listOfParams . ldtAbstractedString) `apFrom2ple` dflt_ldt
(nondflt_subcomps , nondflt_params ) = (fst . unzip . M.toList . ldtSubcompositesMap, listOfParams . ldtAbstractedString) `apFrom2ple` nondflt_ldt
in _compareStrictOrientationOnDefault tpl_id so (nondflt_subcomps, nondflt_params) (dflt_subcomps, dflt_params)
_compareStrictOrientationOnDefault :: PCLT_ID -> StrictOrient_ofParamsAndCmpsts_onDfltLngTplsSets -> ([PCLT_ID], [ParamName_LBS]) -> ([PCLT_ID], [ParamName_LBS]) -> Bool
_compareStrictOrientationOnDefault tpl_id so (nondflt_subcomps, nondflt_params) (dflt_subcomps, dflt_params) =
let memb = elem tpl_id $ soExcludingInComposites so
crit1 = soStrict_IsIt so && (not memb)
crit2 = (not $ soStrict_IsIt so) && memb
crit = crit1 || crit2
local_c_exclusions = soExcludingComposites so ++ (snd $ unzip $ filter
(\ (_tpl_id, _) -> _tpl_id == tpl_id)
(soExcludingCompComposites so)
)
local_p_exclusions =
map
B.pack
( soExcludingParameters so ++ (snd $ unzip $ filter
(\ (_tpl_id, _) -> _tpl_id == tpl_id)
(soExcludingCompParameters so)
) )
op :: Eq a => [a] -> [a] -> [a]
op = case crit of
True -> (\\)
False -> intersect
( so_dflt_subcomps , so_dflt_params ) = ( dflt_subcomps `op` local_c_exclusions, dflt_params `op` local_p_exclusions)
(so_nondflt_subcomps , so_nondflt_params ) = ( nondflt_subcomps `op` local_c_exclusions, nondflt_params `op` local_p_exclusions)
( so_union_subcomps , so_union_params ) = ( so_dflt_subcomps `union` so_nondflt_subcomps, so_dflt_params `union` so_nondflt_params)
(so_nondflt_subcomps_len, so_nondflt_params_len) = (length so_nondflt_subcomps, length so_nondflt_params)
( so_dflt_subcomps_len, so_dflt_params_len) = (length so_dflt_subcomps, length so_dflt_params)
( so_union_subcomps_len, so_union_params_len) = (length so_union_subcomps, length so_union_params)
in so_union_subcomps_len == so_dflt_subcomps_len && so_union_params_len == so_dflt_params_len
&& so_union_subcomps_len == so_nondflt_subcomps_len && so_union_params_len == so_nondflt_params_len
------------------------------------------------------------------------
-- * Requirement for making a representation from template - SDL
-- | This is an extending wrapper around SDL. It is used for specification
-- of requirement for making representation from template. This specification
-- is attached to every localizable template in PCLT catalog
data PCLT_ShowDetalizationLevel =
-- | Plain SDL, nominal. If SDL of representation reciever
-- is less then that, then template cann't be used in representation
-- generation.
PCLT_SDL ShowDetalizationLevel
-- | \"The requirement is the same as is specified
-- for referenced template\".
| PCLT_SDL_ToTemplateLink PCLT_ID
-- | \"The requirement is the same as is specified a for referenced
-- template, which is referenced by a @PCSI_PV@ value of referenced
-- parameter (of current template)\".
| PCLT_SDL_ToParamCompositeLink PCLT_ParamKey
-- | In input data for catalog formation the given specification
-- is errornous.
-- If config's ("Text.PCLT.Config") parameters
-- 'Text.PCLT.Config.pcsAllowEmptySDL_parseItByModusMargin' and/or
-- 'Text.PCLT.Config.pcsAllowUnreadableSDL_parseIdByModusMargin' are
-- positive, then instead of @PCLT_SDL_Errornous@ the parser
-- ('str2PCLT_SDL') will use 'Text.PCLT.SDL.marginOfSDLModus' to set
-- valid specification. When representation generator meets
-- @PCLT_SDL_Errornous@ it won't use template, and return an error.
| PCLT_SDL_Errornous PCLT_ErrornousSDL
deriving (Eq, Show, Typeable)
type PCLT_RequiredShowDetalizationLevel = PCLT_ShowDetalizationLevel
type PCLT_AllocatedShowDetalizationLevel = PCLT_ShowDetalizationLevel
-- | Raw templates (both localizeds, and localizables).
-- Input data for catalog formation. Used by @HasStaticRawPCLTs@ class
-- (declared in "Text.PCLT.HasStaticRawPCLTs")
data PCLT_RawCatalogData = PCLT_RawCatalogData (Map PCLT_ID (Map LanguageName Lazy.ByteString, PCLT_RequiredShowDetalizationLevel)) deriving (Show, Typeable)
-- | This is a bad result of parsing some 'String'
-- into 'PCLT_ShowDetalizationLevel'. The second argument is this bad input.
data PCLT_ErrornousSDL = UnreadableSDL_ESDL SDLModus String deriving (Eq, Show, Typeable)
-- | A constant currently set to 25. It is used in a parser 'str2PCLT_SDL':
-- if the input is errornous, this much symbols of input are saved
-- in 'UnreadableSDL_ESDL'. If input is bigger, then the saved trunc is
-- tailed with \"...\"
__const_esdl_rawinputshowsize_inShowAsPCSI :: Int
__const_esdl_rawinputshowsize_inShowAsPCSI = 25
-- | Parse 'String' into 'PCLT_ShowDetalizationLevel'. First of all parser
-- tries 'Text.PCLT.SDL.strict_str2sdl'. Then, if failed, parser uses following
-- config entries:
--
-- * 'Text.PCLT.Config.pcsParameterPlaceholderWrapper' -
-- if prefix and postfix of input is this (by default it is \"\@\@\|\"),
-- then it is parsed into 'PCLT_SDL_ToParamCompositeLink'
--
-- * 'Text.PCLT.Config.pcsCompositePlaceholderWrapper' -
-- if prefix and postfix of input is this (by default it is \"\#\#\|\"),
-- then it is parsed into 'PCLT_SDL_ToTemplateLink'
--
-- * 'Text.PCLT.Config.pcsAllowEmptySDL_parseItByModusMargin' -
-- if it is positive and input is empty, then it gets parsed into
-- (@PCLT_SDL $ 'Text.PCLT.SDL.marginOfSDLModus' modus@), where modus
-- is first argument; esle, if parameter is negative and input is empty,
-- it is parsed to 'PCLT_SDL_Errornous'
--
-- * 'Text.PCLT.Config.pcsAllowUnreadableSDL_parseIdByModusMargin' -
-- if it is positive and input is unparsable, then it gets parsed into
-- @'PCLT_SDL' $ 'Text.PCLT.SDL.marginOfSDLModus' modus@, where modus
-- is first argument; esle, if parameter is negative and input is unparsable,
-- it is parsed to 'PCLT_SDL_Errornous'
str2PCLT_SDL :: SDLModus -> String -> PCLT_InnerConfig -> PCLT_ShowDetalizationLevel
str2PCLT_SDL sdlm s cfg =
let cmpst_phw_str = B.unpack $ pcsCompositePlaceholderWrapper cfg
param_phw_str = B.unpack $ pcsParameterPlaceholderWrapper cfg
stripOfWrapper subj0 wrp =
let wrp_len = length wrp
subj1 = drop wrp_len subj0
subj1_len = length subj1
in take (subj1_len - wrp_len) subj1
in case strict_str2sdl s of
Just n -> PCLT_SDL n
Nothing ->
case isPrefixOf cmpst_phw_str s && isSuffixOf cmpst_phw_str s of
True -> PCLT_SDL_ToTemplateLink (stripOfWrapper s cmpst_phw_str)
False -> case isPrefixOf param_phw_str s && isSuffixOf param_phw_str s of
True -> PCLT_SDL_ToParamCompositeLink (stripOfWrapper s param_phw_str)
False ->
let cond1 = pcsAllowEmptySDL_parseItByModusMargin cfg && null s
cond2 = pcsAllowUnreadableSDL_parseIdByModusMargin cfg
cond = cond1 || cond2
in case cond of
True -> PCLT_SDL $ marginOfSDLModus sdlm
False -> PCLT_SDL_Errornous $ UnreadableSDL_ESDL sdlm $ truncLiterary s __const_esdl_rawinputshowsize_inShowAsPCSI
---------------------------------------------------------------------------------
-- * Localizable template
data LocalizableTemplate =
LocalizableTemplate {
pcltLocalizationsMap :: Map LanguageName LocalizedTemplate
-- | If SDL of representation reciever
-- is less then that, then template cann't be used in representation
-- generation.
, pcltRequiredSDL :: PCLT_RequiredShowDetalizationLevel
}
deriving (Show, Typeable)
| Andrey-Sisoyev/haskell-PCLT | Text/PCLT/Template.hs | lgpl-2.1 | 15,195 | 0 | 23 | 4,035 | 1,931 | 1,106 | 825 | 137 | 6 |
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs)= reverse' xs ++ [x]
| EricYT/Haskell | src/reverse.hs | apache-2.0 | 80 | 0 | 6 | 19 | 58 | 29 | 29 | 3 | 1 |
module Main where
import Options
import Harlson
main :: IO ()
main = progOpts >>= runHarlson
| EchoTeam/harlson | Main.hs | bsd-2-clause | 96 | 0 | 6 | 19 | 29 | 17 | 12 | 5 | 1 |
{-# LANGUAGE TypeFamilies, QuasiQuotes, MultiParamTypeClasses,
TemplateHaskell, OverloadedStrings #-}
import Yesod
data MyWebSite = MyWebSite
instance Yesod MyWebSite
mkYesod "MyWebSite" [parseRoutes|
/ HomeR GET
|]
getHomeR = defaultLayout $ do
[whamlet|
<h2> Things To Do
<ul>
<li> Learn Haskell
<li> Write a killer app
<li> Create a startup
<li> Go public
|]
toWidget [cassius|
body
background-color: #edf
|]main = warpEnv MyWebSite | capello/Haskell_Premier | web.hs | bsd-2-clause | 503 | 2 | 9 | 127 | 70 | 37 | 33 | -1 | -1 |
{-# OPTIONS_GHC -W #-}
module Main where
import Control.Monad (foldM)
import qualified Data.Maybe as Maybe
import Text.Blaze.Html.Renderer.String (renderHtml)
import qualified Data.ByteString.Lazy.Char8 as BS
import qualified System.Console.CmdArgs as CmdArgs
import System.Directory
import System.FilePath
import GHC.Conc
import Build.Dependencies (getSortedDependencies)
import qualified Generate.Html as Html
import qualified Metadata.Prelude as Prelude
import qualified Build.Utils as Utils
import qualified Build.Flags as Flag
import qualified Build.File as File
import qualified Elm.Internal.Paths as Path
main :: IO ()
main = do setNumCapabilities =<< getNumProcessors
compileArgs =<< CmdArgs.cmdArgs Flag.flags
compileArgs :: Flag.Flags -> IO ()
compileArgs flags =
case Flag.files flags of
[] -> putStrLn "Usage: elm [OPTIONS] [FILES]\nFor more help: elm --help"
fs -> mapM_ (build flags) fs
build :: Flag.Flags -> FilePath -> IO ()
build flags rootFile =
do let noPrelude = Flag.no_prelude flags
builtIns <- Prelude.interfaces noPrelude
files <- if Flag.make flags
then getSortedDependencies (Flag.src_dir flags) builtIns rootFile
else return [rootFile]
moduleName <- File.build flags builtIns files
js <- foldM appendToOutput BS.empty files
(extension, code) <-
if Flag.only_js flags
then do putStr "Generating JavaScript ... "
return ("js", js)
else do putStr "Generating HTML ... "
return (makeHtml js moduleName)
let targetFile = Utils.buildPath flags rootFile extension
createDirectoryIfMissing True (takeDirectory targetFile)
BS.writeFile targetFile code
putStrLn "Done"
where
appendToOutput :: BS.ByteString -> FilePath -> IO BS.ByteString
appendToOutput js filePath = do
src <- BS.readFile (Utils.elmo flags filePath)
return (BS.append src js)
sources js = map Html.Link (Flag.scripts flags) ++ [ Html.Source js ]
makeHtml js moduleName = ("html", BS.pack $ renderHtml html)
where
rtsPath = Maybe.fromMaybe Path.runtime (Flag.runtime flags)
html = Html.generate rtsPath (takeBaseName rootFile) (sources js) moduleName ""
| deadfoxygrandpa/Elm | compiler/Compiler.hs | bsd-3-clause | 2,326 | 0 | 13 | 551 | 651 | 338 | 313 | 52 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------------------
-- |
-- Module : Yesod.Comments
-- Copyright : (c) Patrick Brisbin 2010
-- License : as-is
-- Maintainer : pbrisbin@gmail.com
-- Stability : unstable
-- Portability : unportable
--
-- A generic Comments interface for a Yesod application.
--
-------------------------------------------------------------------------------
module Yesod.Comments
( addComments
, module Yesod.Comments.Core
) where
import Yesod
import Yesod.Comments.Core
import Yesod.Comments.Utils
import Yesod.Comments.Form
import Yesod.Comments.View
addComments :: (RenderMessage m FormMessage, YesodComments m) => ThreadId -> WidgetT m IO ()
addComments thread = do
comments <- handlerToWidget $ csLoad commentStorage (Just thread)
mudetails <- handlerToWidget $ currentUserDetails
[whamlet|
<div .yesod_comments>
^{showComments comments}
^{runForm thread mudetails}
|]
| pbrisbin/yesod-comments | Yesod/Comments.hs | bsd-3-clause | 1,107 | 0 | 11 | 205 | 143 | 87 | 56 | 16 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module Search.Site where
import Servant
import Text.Blaze.Html5 hiding (map)
import Database.Persist.Postgresql
import Control.Monad (liftM)
import AppM
import Area.Types
import Blc.Types
import Search.Types
import Search.API
import Search.Views
searchSite :: ServerT SearchSite AppM
searchSite = search
search :: Maybe String -> AppM Html
search mpname = do
results <- case mpname of
Nothing -> return []
Just pname -> do
areas <- liftM (map pack) $ runDb $ selectList [AreaName ==. pname] []
blcs <- liftM (map pack) $ runDb $ selectList [BlcName ==. pname] []
return (concat [areas, blcs])
return (searchPage mpname results)
| hectorhon/autotrace2 | app/Search/Site.hs | bsd-3-clause | 755 | 0 | 18 | 138 | 245 | 128 | 117 | 25 | 2 |
module HSNTP.Util.Misc (udpQuery, runWithTO', runWithTO, Time, seconds, MayIO) where
import Control.Concurrent
import Control.Exception
import Control.Monad.Error
import Foreign
import Foreign.Ptr
import Network.Socket
import Prelude hiding (catch)
import HSNTP.Util.UDP
withUDPBuf :: Int -> ((Socket, BPtr) -> IO a) -> IO a
withUDPBuf n = bracket start end
where start = liftM2 (,) newSock (mallocArray n)
end (s,p) = sClose s >> free p
type MayIO = ErrorT String IO
type Port = Int
type BPtr = Ptr Word8
type Bufi = (Ptr Word8,Int)
udpQuery :: String -> Port -> Int -> Time -> (Bufi -> MayIO Int) -> (Bufi -> MayIO a) -> MayIO a
udpQuery host port blen time put get = liftIO base >>= reith
where base = runWithTO time (withUDPBuf blen (\x -> runErrorT (work x)))
reith :: Either String a -> MayIO a
reith (Left e) = throwError e
reith (Right v)= return v
work (sock,ptr)= do len <- put (ptr,blen)
sa <- liftIO $ sockAddr host port
liftIO $ sendBufTo sock ptr len sa
(len',sa') <- liftIO $ recvBufFrom sock ptr blen
when (sa /= sa') $ throwError "Reply from wrong sockAddr"
get (ptr,len')
runWithTO :: Time -> IO (Either String b) -> IO (Either String b)
runWithTO (T t) co = do mv <- newEmptyMVar
c1 <- forkIO $ (co >>= putMVar mv) `catch` exc mv
c2 <- forkIO $ threadDelay t >> putMVar mv (Left "Timeout")
val<- takeMVar mv
killThread c1; killThread c2
return val
where
exc :: MVar (Either String b) -> SomeException -> IO ()
exc mv = \e -> putMVar mv (Left (show e))
runWithTO' :: Time -> IO t -> IO t
runWithTO' (T t) co = do mt <- myThreadId
c <- forkIO $ threadDelay t >> throwTo mt (AssertionFailed "timeout")
res<- co
killThread c
return res
newtype Time = T Int
seconds :: Int -> Time
seconds n = T $ n * 1000 * 1000
| creswick/hsntp | HSNTP/Util/Misc.hs | bsd-3-clause | 2,190 | 0 | 14 | 780 | 821 | 412 | 409 | 47 | 2 |
import System.Environment (getEnv, getArgs, getProgName, getExecutablePath,
getEnvironment)
| hecrj/haskell-format | test/specs/import-multiline/input.hs | bsd-3-clause | 94 | 0 | 5 | 9 | 23 | 14 | 9 | 2 | 0 |
{-| The context that several of the Translatable typeclasses use
for compiling. It is used to generate new symbols for temporary
variables, store the mappings from encore variables to c variables
and to keep track of which class we're translating at the
moment. -}
module CodeGen.Context (
Context,
ExecContext(..),
new,
newWithForwarding,
substAdd,
substLkp,
substRem,
genNamedSym,
genSym,
getGlobalFunctionNames,
lookupFunction,
lookupField,
lookupMethod,
lookupCalledType,
setMtdCtx,
setFunCtx,
setClsCtx,
getExecCtx,
isAsyncForward
) where
import Identifiers
import Types
import AST.AST
import Control.Monad.State
import qualified CodeGen.ClassTable as Tbl
import qualified CCode.Main as C
import CodeGen.CCodeNames
type NextSym = Int
type VarSubTable = [(Name, C.CCode C.Lval)] -- variable substitutions (for supporting, for instance, nested var decls)
data ExecContext =
FunctionContext{fun :: Function}
| MethodContext {mdecl :: MethodDecl}
| ClosureContext {cls :: Expr} -- for checking closure in the future.
| Empty
deriving(Show)
data Context = Context {
varSubTable :: VarSubTable,
nextSym :: NextSym,
execContext :: ExecContext,
programTbl :: Tbl.ProgramTable,
withForward :: Bool
}
programTable :: Context -> Tbl.ProgramTable
programTable Context{programTbl} = programTbl
new :: VarSubTable -> Tbl.ProgramTable -> Context
new subs table = Context {
varSubTable = subs
,nextSym = 0
,execContext = Empty
,programTbl = table
,withForward = False
}
newWithForwarding subs table = Context {
varSubTable = subs
,nextSym = 0
,execContext = Empty
,programTbl = table
,withForward = True
}
isAsyncForward :: Context -> Bool
isAsyncForward Context{withForward} = withForward
genNamedSym :: String -> State Context String
genNamedSym name = do
let (_, name') = fixPrimes name
c <- get
case c of
ctx@Context{nextSym} ->
do put $ ctx{nextSym = nextSym + 1}
return $ "_" ++ name' ++ "_" ++ show nextSym
genSym :: State Context String
genSym = genNamedSym "tmp"
substAdd :: Context -> Name -> C.CCode C.Lval -> Context
substAdd ctx@Context{varSubTable} na lv = ctx{varSubTable = ((na,lv):varSubTable)}
substRem :: Context -> Name -> Context
substRem ctx@Context{varSubTable = []} na = ctx
substRem ctx@Context{varSubTable = ((na, lv):s)} na'
| na == na' = ctx{varSubTable = s}
| na /= na' = substAdd (substRem ctx{varSubTable = s} na') na lv
substLkp :: Context -> QualifiedName -> Maybe (C.CCode C.Lval)
substLkp ctx@Context{varSubTable} QName{qnspace = Nothing, qnlocal} = lookup qnlocal varSubTable
substLkp ctx@Context{varSubTable} QName{qnspace = Just ns, qnlocal}
| isEmptyNamespace ns = lookup qnlocal varSubTable
| otherwise = Nothing
setExecCtx :: Context -> ExecContext -> Context
setExecCtx ctx execContext = ctx{execContext}
setFunCtx :: Context -> Function -> Context
setFunCtx ctx fun = ctx{execContext = FunctionContext{fun}}
setMtdCtx :: Context -> MethodDecl -> Context
setMtdCtx ctx mdecl = ctx{execContext = MethodContext{mdecl}}
setClsCtx :: Context -> Expr -> Context
setClsCtx ctx cls = ctx{execContext = ClosureContext{cls}}
getExecCtx :: Context -> ExecContext
getExecCtx ctx@Context{execContext} = execContext
lookupField :: Type -> Name -> Context -> FieldDecl
lookupField ty f = Tbl.lookupField ty f . programTable
lookupMethod :: Type -> Name -> Context -> FunctionHeader
lookupMethod ty m = Tbl.lookupMethod ty m . programTable
lookupCalledType :: Type -> Name -> Context -> Type
lookupCalledType ty m = Tbl.lookupCalledType ty m . programTable
lookupFunction :: QualifiedName -> Context -> (C.CCode C.Name, FunctionHeader)
lookupFunction fname = Tbl.lookupFunction fname . programTable
getGlobalFunctionNames :: Context -> [QualifiedName]
getGlobalFunctionNames = Tbl.getGlobalFunctionNames . programTable
| parapluu/encore | src/back/CodeGen/Context.hs | bsd-3-clause | 3,929 | 0 | 15 | 709 | 1,155 | 638 | 517 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification, ScopedTypeVariables, PatternGuards, FlexibleContexts, CPP #-}
module Simple
( Task (..), TaskChan
, startGHCiServer
, restartGHCiServer
, sendToServer
, catchError_fixed
, Interpreter, typeOf, kindOf
, InterpreterError (..), errMsg, interpret
, as, liftIO, parens
) where
import Logger
import Language.Haskell.Interpreter
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar)
import Control.Concurrent.Chan (Chan, newChan, readChan, writeChan)
import Control.Exception (SomeException, catch)
import Control.Monad (when, forever)
import Control.Monad.Error (MonadError, catchError)
import Data.List (isPrefixOf)
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding (catch)
#endif
-------------------------
data Task
= forall a. Task FilePath (MVar (Either InterpreterError a)) (Interpreter a)
newtype TaskChan
= TC (Chan (Maybe Task))
---------------
startGHCiServer :: [String] -> Logger -> IO TaskChan
startGHCiServer paths{-searchpaths-} log = do
ch <- newChan
_ <- forkIO $ forever $ do
logStrMsg 1 log "start interpreter"
e <- runInterpreter (handleTask ch Nothing)
`catch` \(e :: SomeException) ->
return $ Left $ UnknownError "GHCi server died."
case e of
Left e -> logStrMsg 0 log $ "stop interpreter: " ++ show e
Right () -> return ()
return $ TC ch
where
handleTask :: Chan (Maybe Task) -> Maybe FilePath -> Interpreter ()
handleTask ch oldFn = do
task <- lift $ readChan ch
case task of
Just task -> handleTask_ ch oldFn task
Nothing -> liftIO $ logStrMsg 0 log "interpreter stopped intentionally"
handleTask_ ch oldFn (Task fn repVar m) = do
(cont, res) <- do
when (oldFn /= Just fn) $ do
reset
set [searchPath := paths]
loadModules [fn]
setTopLevelModules ["Main"]
x <- m
return (True, Right x)
`catchError_fixed` \er ->
return (not $ fatal er, Left er)
lift $ putMVar repVar res
when cont $ handleTask ch $ case res of
Right _ -> Just fn
Left _ -> Nothing
restartGHCiServer :: TaskChan -> IO ()
restartGHCiServer (TC ch) = writeChan ch Nothing
sendToServer :: TaskChan -> FilePath -> Interpreter a -> IO (Either InterpreterError a)
sendToServer (TC ch) fn m = do
rep <- newEmptyMVar
writeChan ch $ Just $ Task fn rep m
takeMVar rep
fatal :: InterpreterError -> Bool
fatal (WontCompile _) = False
fatal (NotAllowed _) = False
fatal _ = True
catchError_fixed
:: MonadError InterpreterError m
=> m a -> (InterpreterError -> m a) -> m a
m `catchError_fixed` f = m `catchError` (f . fixError)
where
fixError (UnknownError s)
| Just x <- dropPrefix "GHC returned a result but said: [GhcError {errMsg =" s
= WontCompile [GhcError {errMsg = case reads x of ((y,_):_) -> y; _ -> s}]
fixError x = x
dropPrefix :: Eq a => [a] -> [a] -> Maybe [a]
dropPrefix s m
| s `isPrefixOf` m = Just $ drop (length s) m
| otherwise = Nothing
| divipp/ActiveHs | Simple.hs | bsd-3-clause | 3,276 | 1 | 19 | 897 | 1,091 | 559 | 532 | 80 | 4 |
------------------------------------------------------------------------------
-- |
-- Maintainer : Joost Visser
-- Stability : experimental
-- Portability : portable
--
-- This module is part of 'Sdf2Haskell', a tool for generating Haskell
-- code from an SDF grammar. This module contains helper functions
-- for the conversion from Haskell to SDF.
------------------------------------------------------------------------------
module Sdf2HaskellUtils where
import SdfLib
import HaskellLib
import Data.ATerm.Lib (dehyphen)
------------------------------------------------------------------------------
-- | Type synonym for functions that create a Haskell declaration
-- from a type name, a constructor name, and a list of Sdf symbols.
type MkDecl
= HsName -> String -> [Symbol] -> HsDecl
-- | Convert a context-free SDF production to a Haskell declaration.
production2decl :: MkDecl -> Production -> Maybe HsDecl
production2decl mkDecl prod
= case (getConsAttr (getAttributes prod),
sort2hsname (getSort prod),
isRejectOrBracket prod)
of
(Just str,Just hsname,False)
-> Just (mkDecl hsname str (getSyms prod))
_ -> Nothing
-- | Convert an Sdf sort into a Haskell name
sort2hsname :: Symbol -> Maybe HsName
sort2hsname (Sdf_sort str) = Just (HsIdent (dehyphen str))
sort2hsname _ = Nothing
-- | Convert an Sdf sort into a String
sort2string :: Symbol -> Maybe String
sort2string (Sdf_sort str) = Just $ dehyphen str
sort2string _ = Nothing
------------------------------------------------------------------------------
-- * General utilities
-- | Generate variables
variables :: [String]
variables
= map (\i -> '_':(show i)) [0..]
-- | Remove Nothings, and keep values inside Justs.
justFilter :: [Maybe a] -> [a]
justFilter = foldr (\ma as -> maybe as (:as) ma) []
splitBy :: Char -> String -> [String]
splitBy _ [] = []
splitBy split string = first:(splitBy split rest)
where
first = takeWhile (/= split) string
rest = dropWhile (== split) afterFirst
afterFirst = dropWhile (/= split) string
-----------------------------------------------------------------------------
| jkoppel/Strafunski-Sdf2Haskell | generator/Sdf2HaskellUtils.hs | bsd-3-clause | 2,206 | 0 | 12 | 413 | 445 | 246 | 199 | 31 | 2 |
module TwentyFive where
import System.Random
import Data.List
--Generate a random permutation of the elements of a list.
rndPermu :: [a] -> [a]
rndPermu lst = let permu = permutations lst
in permu !! (fst (randomR (0,(length permu) -1) (mkStdGen 9127401298374)))
| michael-j-clark/hjs99 | src/21to30/TwentyFive.hs | bsd-3-clause | 272 | 0 | 15 | 49 | 92 | 50 | 42 | 6 | 1 |
-- #hide
-----------------------------------------------------------------------------
-- |
-- Module : Language.Haskell.ParseUtils
-- Copyright : (c) The GHC Team, 1997-2000
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- Utilities for the Haskell parser.
--
-----------------------------------------------------------------------------
module Language.Haskell.ParseUtils (
splitTyConApp -- HsType -> P (HsName,[HsType])
, mkRecConstrOrUpdate -- HsExp -> [HsFieldUpdate] -> P HsExp
, checkPrec -- Integer -> P Int
, checkContext -- HsType -> P HsContext
, checkAssertion -- HsType -> P HsAsst
, checkDataHeader -- HsQualType -> P (HsContext,HsName,[HsName])
, checkClassHeader -- HsQualType -> P (HsContext,HsName,[HsName])
, checkInstHeader -- HsQualType -> P (HsContext,HsQName,[HsType])
, checkPattern -- HsExp -> P HsPat
, checkExpr -- HsExp -> P HsExp
, checkValDef -- SrcLoc -> HsExp -> HsRhs -> [HsDecl] -> P HsDecl
, checkClassBody -- [HsDecl] -> P [HsDecl]
, checkUnQual -- HsQName -> P HsName
, checkRevDecls -- [HsDecl] -> P [HsDecl]
) where
import Language.Haskell.Syntax
import Language.Haskell.ParseMonad
import Language.Haskell.Pretty
splitTyConApp :: HsType -> P (HsName,[HsType])
splitTyConApp t0 = split t0 []
where
split :: HsType -> [HsType] -> P (HsName,[HsType])
split (HsTyApp t u) ts = split t (u:ts)
split (HsTyCon (UnQual t)) ts = return (t,ts)
split _ _ = fail "Illegal data/newtype declaration"
-----------------------------------------------------------------------------
-- Various Syntactic Checks
checkContext :: HsType -> P HsContext
checkContext (HsTyTuple ts) =
mapM checkAssertion ts
checkContext t = do
c <- checkAssertion t
return [c]
-- Changed for multi-parameter type classes
checkAssertion :: HsType -> P HsAsst
checkAssertion = checkAssertion' []
where checkAssertion' ts (HsTyCon c) = return (c,ts)
checkAssertion' ts (HsTyApp a t) = checkAssertion' (t:ts) a
checkAssertion' _ _ = fail "Illegal class assertion"
checkDataHeader :: HsQualType -> P (HsContext,HsName,[HsName])
checkDataHeader (HsQualType cs t) = do
(c,ts) <- checkSimple "data/newtype" t []
return (cs,c,ts)
checkClassHeader :: HsQualType -> P (HsContext,HsName,[HsName])
checkClassHeader (HsQualType cs t) = do
(c,ts) <- checkSimple "class" t []
return (cs,c,ts)
checkSimple :: String -> HsType -> [HsName] -> P ((HsName,[HsName]))
checkSimple kw (HsTyApp l (HsTyVar a)) xs = checkSimple kw l (a:xs)
checkSimple _kw (HsTyCon (UnQual t)) xs = return (t,xs)
checkSimple kw _ _ = fail ("Illegal " ++ kw ++ " declaration")
checkInstHeader :: HsQualType -> P (HsContext,HsQName,[HsType])
checkInstHeader (HsQualType cs t) = do
(c,ts) <- checkInsts t []
return (cs,c,ts)
checkInsts :: HsType -> [HsType] -> P ((HsQName,[HsType]))
checkInsts (HsTyApp l t) ts = checkInsts l (t:ts)
checkInsts (HsTyCon c) ts = return (c,ts)
checkInsts _ _ = fail "Illegal instance declaration"
-----------------------------------------------------------------------------
-- Checking Patterns.
-- We parse patterns as expressions and check for valid patterns below,
-- converting the expression into a pattern at the same time.
checkPattern :: HsExp -> P HsPat
checkPattern e = checkPat e []
checkPat :: HsExp -> [HsPat] -> P HsPat
checkPat (HsCon c) args = return (HsPApp c args)
checkPat (HsApp f x) args = do
x <- checkPat x []
checkPat f (x:args)
checkPat e [] = case e of
HsVar (UnQual x) -> return (HsPVar x)
HsLit l -> return (HsPLit l)
HsInfixApp l op r -> do
l <- checkPat l []
r <- checkPat r []
case op of
HsQConOp c -> return (HsPInfixApp l c r)
_ -> patFail
HsTuple es -> do
ps <- mapM (\e -> checkPat e []) es
return (HsPTuple ps)
HsList es -> do
ps <- mapM (\e -> checkPat e []) es
return (HsPList ps)
HsParen e -> do
p <- checkPat e []
return (HsPParen p)
HsAsPat n e -> do
p <- checkPat e []
return (HsPAsPat n p)
HsWildCard -> return HsPWildCard
HsIrrPat e -> do
p <- checkPat e []
return (HsPIrrPat p)
HsRecConstr c fs -> do
fs <- mapM checkPatField fs
return (HsPRec c fs)
HsNegApp (HsLit l) -> return (HsPNeg (HsPLit l))
_ -> patFail
checkPat _ _ = patFail
checkPatField :: HsFieldUpdate -> P HsPatField
checkPatField (HsFieldUpdate n e) = do
p <- checkPat e []
return (HsPFieldPat n p)
patFail :: P a
patFail = fail "Parse error in pattern"
-----------------------------------------------------------------------------
-- Check Expression Syntax
checkExpr :: HsExp -> P HsExp
checkExpr e = case e of
HsVar _ -> return e
HsCon _ -> return e
HsLit _ -> return e
HsInfixApp e1 op e2 -> check2Exprs e1 e2 (flip HsInfixApp op)
HsApp e1 e2 -> check2Exprs e1 e2 HsApp
HsNegApp e -> check1Expr e HsNegApp
HsLambda loc ps e -> check1Expr e (HsLambda loc ps)
HsLet bs e -> check1Expr e (HsLet bs)
HsIf e1 e2 e3 -> check3Exprs e1 e2 e3 HsIf
HsCase e alts -> do
alts <- mapM checkAlt alts
e <- checkExpr e
return (HsCase e alts)
HsDo stmts -> do
stmts <- mapM checkStmt stmts
return (HsDo stmts)
HsTuple es -> checkManyExprs es HsTuple
HsList es -> checkManyExprs es HsList
HsParen e -> check1Expr e HsParen
HsLeftSection e op -> check1Expr e (flip HsLeftSection op)
HsRightSection op e -> check1Expr e (HsRightSection op)
HsRecConstr c fields -> do
fields <- mapM checkField fields
return (HsRecConstr c fields)
HsRecUpdate e fields -> do
fields <- mapM checkField fields
e <- checkExpr e
return (HsRecUpdate e fields)
HsEnumFrom e -> check1Expr e HsEnumFrom
HsEnumFromTo e1 e2 -> check2Exprs e1 e2 HsEnumFromTo
HsEnumFromThen e1 e2 -> check2Exprs e1 e2 HsEnumFromThen
HsEnumFromThenTo e1 e2 e3 -> check3Exprs e1 e2 e3 HsEnumFromThenTo
HsListComp e stmts -> do
stmts <- mapM checkStmt stmts
e <- checkExpr e
return (HsListComp e stmts)
HsExpTypeSig loc e ty -> do
e <- checkExpr e
return (HsExpTypeSig loc e ty)
_ -> fail "Parse error in expression"
-- type signature for polymorphic recursion!!
check1Expr :: HsExp -> (HsExp -> a) -> P a
check1Expr e1 f = do
e1 <- checkExpr e1
return (f e1)
check2Exprs :: HsExp -> HsExp -> (HsExp -> HsExp -> a) -> P a
check2Exprs e1 e2 f = do
e1 <- checkExpr e1
e2 <- checkExpr e2
return (f e1 e2)
check3Exprs :: HsExp -> HsExp -> HsExp -> (HsExp -> HsExp -> HsExp -> a) -> P a
check3Exprs e1 e2 e3 f = do
e1 <- checkExpr e1
e2 <- checkExpr e2
e3 <- checkExpr e3
return (f e1 e2 e3)
checkManyExprs :: [HsExp] -> ([HsExp] -> a) -> P a
checkManyExprs es f = do
es <- mapM checkExpr es
return (f es)
checkAlt :: HsAlt -> P HsAlt
checkAlt (HsAlt loc p galts bs) = do
galts <- checkGAlts galts
return (HsAlt loc p galts bs)
checkGAlts :: HsGuardedAlts -> P HsGuardedAlts
checkGAlts (HsUnGuardedAlt e) = check1Expr e HsUnGuardedAlt
checkGAlts (HsGuardedAlts galts) = do
galts <- mapM checkGAlt galts
return (HsGuardedAlts galts)
checkGAlt :: HsGuardedAlt -> P HsGuardedAlt
checkGAlt (HsGuardedAlt loc e1 e2) = check2Exprs e1 e2 (HsGuardedAlt loc)
checkStmt :: HsStmt -> P HsStmt
checkStmt (HsGenerator loc p e) = check1Expr e (HsGenerator loc p)
checkStmt (HsQualifier e) = check1Expr e HsQualifier
checkStmt s@(HsLetStmt _) = return s
checkField :: HsFieldUpdate -> P HsFieldUpdate
checkField (HsFieldUpdate n e) = check1Expr e (HsFieldUpdate n)
-----------------------------------------------------------------------------
-- Check Equation Syntax
checkValDef :: SrcLoc -> HsExp -> HsRhs -> [HsDecl] -> P HsDecl
checkValDef srcloc lhs rhs whereBinds =
case isFunLhs lhs [] of
Just (f,es) -> do
ps <- mapM checkPattern es
return (HsFunBind [HsMatch srcloc f ps rhs whereBinds])
Nothing -> do
lhs <- checkPattern lhs
return (HsPatBind srcloc lhs rhs whereBinds)
-- A variable binding is parsed as an HsPatBind.
isFunLhs :: HsExp -> [HsExp] -> Maybe (HsName, [HsExp])
isFunLhs (HsInfixApp l (HsQVarOp (UnQual op)) r) es = Just (op, l:r:es)
isFunLhs (HsApp (HsVar (UnQual f)) e) es = Just (f, e:es)
isFunLhs (HsApp (HsParen f) e) es = isFunLhs f (e:es)
isFunLhs (HsApp f e) es = isFunLhs f (e:es)
isFunLhs _ _ = Nothing
-----------------------------------------------------------------------------
-- In a class or instance body, a pattern binding must be of a variable.
checkClassBody :: [HsDecl] -> P [HsDecl]
checkClassBody decls = do
mapM_ checkMethodDef decls
return decls
checkMethodDef :: HsDecl -> P ()
checkMethodDef (HsPatBind _ (HsPVar _) _ _) = return ()
checkMethodDef (HsPatBind loc _ _ _) =
fail "illegal method definition" `atSrcLoc` loc
checkMethodDef _ = return ()
-----------------------------------------------------------------------------
-- Check that an identifier or symbol is unqualified.
-- For occasions when doing this in the grammar would cause conflicts.
checkUnQual :: HsQName -> P HsName
checkUnQual (Qual _ _) = fail "Illegal qualified name"
checkUnQual (UnQual n) = return n
checkUnQual (Special _) = fail "Illegal special name"
-----------------------------------------------------------------------------
-- Miscellaneous utilities
checkPrec :: Integer -> P Int
checkPrec i | 0 <= i && i <= 9 = return (fromInteger i)
checkPrec i | otherwise = fail ("Illegal precedence " ++ show i)
mkRecConstrOrUpdate :: HsExp -> [HsFieldUpdate] -> P HsExp
mkRecConstrOrUpdate (HsCon c) fs = return (HsRecConstr c fs)
mkRecConstrOrUpdate e fs@(_:_) = return (HsRecUpdate e fs)
mkRecConstrOrUpdate _ _ = fail "Empty record update"
-----------------------------------------------------------------------------
-- Reverse a list of declarations, merging adjacent HsFunBinds of the
-- same name and checking that their arities match.
checkRevDecls :: [HsDecl] -> P [HsDecl]
checkRevDecls = mergeFunBinds []
where
mergeFunBinds revDs [] = return revDs
mergeFunBinds revDs (HsFunBind ms1@(HsMatch _ name ps _ _:_):ds1) =
mergeMatches ms1 ds1
where
arity = length ps
mergeMatches ms' (HsFunBind ms@(HsMatch loc name' ps' _ _:_):ds)
| name' == name =
if length ps' /= arity
then fail ("arity mismatch for '" ++ prettyPrint name ++ "'")
`atSrcLoc` loc
else mergeMatches (ms++ms') ds
mergeMatches ms' ds = mergeFunBinds (HsFunBind ms':revDs) ds
mergeFunBinds revDs (d:ds) = mergeFunBinds (d:revDs) ds
| FranklinChen/hugs98-plus-Sep2006 | packages/haskell-src/Language/Haskell/ParseUtils.hs | bsd-3-clause | 10,751 | 73 | 16 | 2,274 | 3,708 | 1,823 | 1,885 | 226 | 25 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
module Shapes where
import Graphics.GL.Pal
import Types
galleryShapes :: IO (Shapes Uniforms)
galleryShapes = do
{-
Setting up resources that will be used across multiple
objects
-}
roomProg <- createShaderProgram "app/shaders/world/room.vert" "app/shaders/world/room.frag"
roomGeo <- cubeGeometry (V3 roomWidth roomHeight roomDepth) (V3 1 1 1)
roomShape <- makeShape roomGeo roomProg
pedestalProg <- createShaderProgram "app/shaders/world/pedestal.vert" "app/shaders/world/pedestal.frag"
pedestalGeo <- cubeGeometry ((V3 sculptureSize pedestalHeight sculptureSize)) (V3 1 1 1)
pedestalShape <- makeShape pedestalGeo pedestalProg
codeHolderProg <- createShaderProgram "app/shaders/world/pedestal.vert" "app/shaders/world/pedestal.frag"
codeHolderGeo <- cubeGeometry ((V3 (sculptureSize - 0.1) (3 * pedestalHeight) (sculptureSize- 0.1))) (V3 1 1 1)
codeHolderShape <- makeShape codeHolderGeo codeHolderProg
lightProg <- createShaderProgram "app/shaders/world/light.vert" "app/shaders/world/light.frag"
lightGeo <- icosahedronGeometry 0.02 4
lightShape <- makeShape lightGeo lightProg
let shapes = Shapes { _shpRoom = roomShape
, _shpLight = lightShape
, _shpPedestal = pedestalShape
, _shpCodeHolder = codeHolderShape
}
return shapes
| vrtree/pedestal | app/Shapes.hs | bsd-3-clause | 1,594 | 0 | 13 | 374 | 309 | 153 | 156 | 26 | 1 |
-- | Get items from the DB.
module HN.Model.Items where
import HN.Types
import HN.Monads
import Snap.App
-- | Get items filtered by content source.
getItemsBySource :: Source -> Int -> Model c s [DItem]
getItemsBySource source limit =
query ["SELECT id,source,title,added,published,description,link"
,"FROM item"
,"WHERE source = ?"
,"ORDER BY published DESC"
,"LIMIT ?"]
(source,limit)
-- | Get recent items.
getItems :: Int -> Model c s [DItem]
getItems limit =
query ["SELECT id,source,title,added,published,description,link"
,"FROM item"
,"WHERE published < NOW()"
,"ORDER BY published DESC"
,"LIMIT ?"]
(Only limit)
-- | Get items created after id.
getItemsAfter :: Int -> Int -> Model c s [DItem]
getItemsAfter id limit =
query ["SELECT id,source,title,added,published,description,link"
,"FROM item"
,"WHERE published < NOW() and extract(epoch from published) > ?"
,"ORDER BY published DESC"
,"LIMIT ?"]
(id,limit)
-- | Insert an item, if it doesn't already exist.
addItem :: Source -> NewItem -> Model c s ()
addItem source item = do
exists <- single ["SELECT true"
,"FROM item"
,"WHERE source = ?"
,"AND title = ?"
,"AND link = ?"]
(source
,niTitle item
,niLink item)
case exists :: Maybe Bool of
Just{} -> return ()
Nothing -> void $
exec ["INSERT INTO item"
,"(source,published,title,description,link)"
,"VALUES"
,"(?,?,?,?,?)"]
(source
,niPublished item
,niTitle item
,niDescription item
,niLink item)
| erantapaa/haskellnews | src/HN/Model/Items.hs | bsd-3-clause | 1,782 | 0 | 13 | 578 | 354 | 195 | 159 | 50 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnExpr]{Renaming of expressions}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiWayIf #-}
module RnExpr (
rnLExpr, rnExpr, rnStmts
) where
#include "HsVersions.h"
import RnBinds ( rnLocalBindsAndThen, rnLocalValBindsLHS, rnLocalValBindsRHS,
rnMatchGroup, rnGRHS, makeMiniFixityEnv)
import HsSyn
import TcRnMonad
import Module ( getModule )
import RnEnv
import RnSplice ( rnBracket, rnSpliceExpr, checkThLocalName )
import RnTypes
import RnPat
import DynFlags
import PrelNames
import BasicTypes
import Name
import NameSet
import RdrName
import UniqSet
import Data.List
import Util
import ListSetOps ( removeDups )
import ErrUtils
import Outputable
import SrcLoc
import FastString
import Control.Monad
import TysWiredIn ( nilDataConName )
import qualified GHC.LanguageExtensions as LangExt
import Data.Ord
import Data.Array
{-
************************************************************************
* *
\subsubsection{Expressions}
* *
************************************************************************
-}
rnExprs :: [LHsExpr RdrName] -> RnM ([LHsExpr Name], FreeVars)
rnExprs ls = rnExprs' ls emptyUniqSet
where
rnExprs' [] acc = return ([], acc)
rnExprs' (expr:exprs) acc =
do { (expr', fvExpr) <- rnLExpr expr
-- Now we do a "seq" on the free vars because typically it's small
-- or empty, especially in very long lists of constants
; let acc' = acc `plusFV` fvExpr
; (exprs', fvExprs) <- acc' `seq` rnExprs' exprs acc'
; return (expr':exprs', fvExprs) }
-- Variables. We look up the variable and return the resulting name.
rnLExpr :: LHsExpr RdrName -> RnM (LHsExpr Name, FreeVars)
rnLExpr = wrapLocFstM rnExpr
rnExpr :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
finishHsVar :: Located Name -> RnM (HsExpr Name, FreeVars)
-- Separated from rnExpr because it's also used
-- when renaming infix expressions
finishHsVar (L l name)
= do { this_mod <- getModule
; when (nameIsLocalOrFrom this_mod name) $
checkThLocalName name
; return (HsVar (L l name), unitFV name) }
rnUnboundVar :: RdrName -> RnM (HsExpr Name, FreeVars)
rnUnboundVar v
= do { if isUnqual v
then -- Treat this as a "hole"
-- Do not fail right now; instead, return HsUnboundVar
-- and let the type checker report the error
do { let occ = rdrNameOcc v
; uv <- if startsWithUnderscore occ
then return (TrueExprHole occ)
else OutOfScope occ <$> getGlobalRdrEnv
; return (HsUnboundVar uv, emptyFVs) }
else -- Fail immediately (qualified name)
do { n <- reportUnboundName v
; return (HsVar (noLoc n), emptyFVs) } }
rnExpr (HsVar (L l v))
= do { opt_DuplicateRecordFields <- xoptM LangExt.DuplicateRecordFields
; mb_name <- lookupOccRn_overloaded opt_DuplicateRecordFields v
; case mb_name of {
Nothing -> rnUnboundVar v ;
Just (Left name)
| name == nilDataConName -- Treat [] as an ExplicitList, so that
-- OverloadedLists works correctly
-> rnExpr (ExplicitList placeHolderType Nothing [])
| otherwise
-> finishHsVar (L l name) ;
Just (Right [f@(FieldOcc (L _ fn) s)]) ->
return (HsRecFld (ambiguousFieldOcc (FieldOcc (L l fn) s))
, unitFV (selectorFieldOcc f)) ;
Just (Right fs@(_:_:_)) -> return (HsRecFld (Ambiguous (L l v)
PlaceHolder)
, mkFVs (map selectorFieldOcc fs));
Just (Right []) -> error "runExpr/HsVar" } }
rnExpr (HsIPVar v)
= return (HsIPVar v, emptyFVs)
rnExpr (HsOverLabel v)
= return (HsOverLabel v, emptyFVs)
rnExpr (HsLit lit@(HsString src s))
= do { opt_OverloadedStrings <- xoptM LangExt.OverloadedStrings
; if opt_OverloadedStrings then
rnExpr (HsOverLit (mkHsIsString src s placeHolderType))
else do {
; rnLit lit
; return (HsLit lit, emptyFVs) } }
rnExpr (HsLit lit)
= do { rnLit lit
; return (HsLit lit, emptyFVs) }
rnExpr (HsOverLit lit)
= do { (lit', fvs) <- rnOverLit lit
; return (HsOverLit lit', fvs) }
rnExpr (HsApp fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnLExpr arg
; return (HsApp fun' arg', fvFun `plusFV` fvArg) }
rnExpr (HsAppType fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnHsWcType HsTypeCtx arg
; return (HsAppType fun' arg', fvFun `plusFV` fvArg) }
rnExpr (OpApp e1 op _ e2)
= do { (e1', fv_e1) <- rnLExpr e1
; (e2', fv_e2) <- rnLExpr e2
; (op', fv_op) <- rnLExpr op
-- Deal with fixity
-- When renaming code synthesised from "deriving" declarations
-- we used to avoid fixity stuff, but we can't easily tell any
-- more, so I've removed the test. Adding HsPars in TcGenDeriv
-- should prevent bad things happening.
; fixity <- case op' of
L _ (HsVar (L _ n)) -> lookupFixityRn n
L _ (HsRecFld f) -> lookupFieldFixityRn f
_ -> return (Fixity (show minPrecedence) minPrecedence InfixL)
-- c.f. lookupFixity for unbound
; final_e <- mkOpAppRn e1' op' fixity e2'
; return (final_e, fv_e1 `plusFV` fv_op `plusFV` fv_e2) }
rnExpr (NegApp e _)
= do { (e', fv_e) <- rnLExpr e
; (neg_name, fv_neg) <- lookupSyntaxName negateName
; final_e <- mkNegAppRn e' neg_name
; return (final_e, fv_e `plusFV` fv_neg) }
------------------------------------------
-- Template Haskell extensions
-- Don't ifdef-GHCI them because we want to fail gracefully
-- (not with an rnExpr crash) in a stage-1 compiler.
rnExpr e@(HsBracket br_body) = rnBracket e br_body
rnExpr (HsSpliceE splice) = rnSpliceExpr splice
---------------------------------------------
-- Sections
-- See Note [Parsing sections] in Parser.y
rnExpr (HsPar (L loc (section@(SectionL {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar (L loc (section@(SectionR {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar e)
= do { (e', fvs_e) <- rnLExpr e
; return (HsPar e', fvs_e) }
rnExpr expr@(SectionL {})
= do { addErr (sectionErr expr); rnSection expr }
rnExpr expr@(SectionR {})
= do { addErr (sectionErr expr); rnSection expr }
---------------------------------------------
rnExpr (HsCoreAnn src ann expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsCoreAnn src ann expr', fvs_expr) }
rnExpr (HsSCC src lbl expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsSCC src lbl expr', fvs_expr) }
rnExpr (HsTickPragma src info srcInfo expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsTickPragma src info srcInfo expr', fvs_expr) }
rnExpr (HsLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLExpr matches
; return (HsLam matches', fvMatch) }
rnExpr (HsLamCase _arg matches)
= do { (matches', fvs_ms) <- rnMatchGroup CaseAlt rnLExpr matches
-- ; return (HsLamCase arg matches', fvs_ms) }
; return (HsLamCase placeHolderType matches', fvs_ms) }
rnExpr (HsCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLExpr matches
; return (HsCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnExpr (HsLet (L l binds) expr)
= rnLocalBindsAndThen binds $ \binds' _ -> do
{ (expr',fvExpr) <- rnLExpr expr
; return (HsLet (L l binds') expr', fvExpr) }
rnExpr (HsDo do_or_lc (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmtsWithPostProcessing do_or_lc rnLExpr
postProcessStmtsForApplicativeDo stmts
(\ _ -> return ((), emptyFVs))
; return ( HsDo do_or_lc (L l stmts') placeHolderType, fvs ) }
rnExpr (ExplicitList _ _ exps)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (exps', fvs) <- rnExprs exps
; if opt_OverloadedLists
then do {
; (from_list_n_name, fvs') <- lookupSyntaxName fromListNName
; return (ExplicitList placeHolderType (Just from_list_n_name) exps'
, fvs `plusFV` fvs') }
else
return (ExplicitList placeHolderType Nothing exps', fvs) }
rnExpr (ExplicitPArr _ exps)
= do { (exps', fvs) <- rnExprs exps
; return (ExplicitPArr placeHolderType exps', fvs) }
rnExpr (ExplicitTuple tup_args boxity)
= do { checkTupleSection tup_args
; checkTupSize (length tup_args)
; (tup_args', fvs) <- mapAndUnzipM rnTupArg tup_args
; return (ExplicitTuple tup_args' boxity, plusFVs fvs) }
where
rnTupArg (L l (Present e)) = do { (e',fvs) <- rnLExpr e
; return (L l (Present e'), fvs) }
rnTupArg (L l (Missing _)) = return (L l (Missing placeHolderType)
, emptyFVs)
rnExpr (RecordCon { rcon_con_name = con_id
, rcon_flds = rec_binds@(HsRecFields { rec_dotdot = dd }) })
= do { con_lname@(L _ con_name) <- lookupLocatedOccRn con_id
; (flds, fvs) <- rnHsRecFields (HsRecFieldCon con_name) mk_hs_var rec_binds
; (flds', fvss) <- mapAndUnzipM rn_field flds
; let rec_binds' = HsRecFields { rec_flds = flds', rec_dotdot = dd }
; return (RecordCon { rcon_con_name = con_lname, rcon_flds = rec_binds'
, rcon_con_expr = noPostTcExpr, rcon_con_like = PlaceHolder }
, fvs `plusFV` plusFVs fvss `addOneFV` con_name) }
where
mk_hs_var l n = HsVar (L l n)
rn_field (L l fld) = do { (arg', fvs) <- rnLExpr (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' }), fvs) }
rnExpr (RecordUpd { rupd_expr = expr, rupd_flds = rbinds })
= do { (expr', fvExpr) <- rnLExpr expr
; (rbinds', fvRbinds) <- rnHsRecUpdFields rbinds
; return (RecordUpd { rupd_expr = expr', rupd_flds = rbinds'
, rupd_cons = PlaceHolder, rupd_in_tys = PlaceHolder
, rupd_out_tys = PlaceHolder, rupd_wrap = PlaceHolder }
, fvExpr `plusFV` fvRbinds) }
rnExpr (ExprWithTySig expr pty)
= do { (pty', fvTy) <- rnHsSigWcType ExprWithTySigCtx pty
; (expr', fvExpr) <- bindSigTyVarsFV (hsWcScopedTvs pty') $
rnLExpr expr
; return (ExprWithTySig expr' pty', fvExpr `plusFV` fvTy) }
rnExpr (HsIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLExpr b1
; (b2', fvB2) <- rnLExpr b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnExpr (HsMultiIf _ty alts)
= do { (alts', fvs) <- mapFvRn (rnGRHS IfAlt rnLExpr) alts
-- ; return (HsMultiIf ty alts', fvs) }
; return (HsMultiIf placeHolderType alts', fvs) }
rnExpr (ArithSeq _ _ seq)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (new_seq, fvs) <- rnArithSeq seq
; if opt_OverloadedLists
then do {
; (from_list_name, fvs') <- lookupSyntaxName fromListName
; return (ArithSeq noPostTcExpr (Just from_list_name) new_seq, fvs `plusFV` fvs') }
else
return (ArithSeq noPostTcExpr Nothing new_seq, fvs) }
rnExpr (PArrSeq _ seq)
= do { (new_seq, fvs) <- rnArithSeq seq
; return (PArrSeq noPostTcExpr new_seq, fvs) }
{-
These three are pattern syntax appearing in expressions.
Since all the symbols are reservedops we can simply reject them.
We return a (bogus) EWildPat in each case.
-}
rnExpr EWildPat = return (hsHoleExpr, emptyFVs) -- "_" is just a hole
rnExpr e@(EAsPat {}) =
patSynErr e (text "Did you mean to enable TypeApplications?")
rnExpr e@(EViewPat {}) = patSynErr e empty
rnExpr e@(ELazyPat {}) = patSynErr e empty
{-
************************************************************************
* *
Static values
* *
************************************************************************
For the static form we check that the free variables are all top-level
value bindings. This is done by checking that the name is external or
wired-in. See the Notes about the NameSorts in Name.hs.
-}
rnExpr e@(HsStatic expr) = do
target <- fmap hscTarget getDynFlags
case target of
-- SPT entries are expected to exist in object code so far, and this is
-- not the case in interpreted mode. See bug #9878.
HscInterpreted -> addErr $ sep
[ text "The static form is not supported in interpreted mode."
, text "Please use -fobject-code."
]
_ -> return ()
(expr',fvExpr) <- rnLExpr expr
stage <- getStage
case stage of
Brack _ _ -> return () -- Don't check names if we are inside brackets.
-- We don't want to reject cases like:
-- \e -> [| static $(e) |]
-- if $(e) turns out to produce a legal expression.
Splice _ -> addErr $ sep
[ text "static forms cannot be used in splices:"
, nest 2 $ ppr e
]
_ -> do
let isTopLevelName n = isExternalName n || isWiredInName n
case nameSetElems $ filterNameSet
(\n -> not (isTopLevelName n || isUnboundName n))
fvExpr of
[] -> return ()
fvNonGlobal -> addErr $ cat
[ text $ "Only identifiers of top-level bindings can "
++ "appear in the body of the static form:"
, nest 2 $ ppr e
, text "but the following identifiers were found instead:"
, nest 2 $ vcat $ map ppr fvNonGlobal
]
return (HsStatic expr', fvExpr)
{-
************************************************************************
* *
Arrow notation
* *
************************************************************************
-}
rnExpr (HsProc pat body)
= newArrowScope $
rnPat ProcExpr pat $ \ pat' -> do
{ (body',fvBody) <- rnCmdTop body
; return (HsProc pat' body', fvBody) }
-- Ideally, these would be done in parsing, but to keep parsing simple, we do it here.
rnExpr e@(HsArrApp {}) = arrowFail e
rnExpr e@(HsArrForm {}) = arrowFail e
rnExpr other = pprPanic "rnExpr: unexpected expression" (ppr other)
-- HsWrap
hsHoleExpr :: HsExpr id
hsHoleExpr = HsUnboundVar (TrueExprHole (mkVarOcc "_"))
arrowFail :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
arrowFail e
= do { addErr (vcat [ text "Arrow command found where an expression was expected:"
, nest 2 (ppr e) ])
-- Return a place-holder hole, so that we can carry on
-- to report other errors
; return (hsHoleExpr, emptyFVs) }
----------------------
-- See Note [Parsing sections] in Parser.y
rnSection :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
rnSection section@(SectionR op expr)
= do { (op', fvs_op) <- rnLExpr op
; (expr', fvs_expr) <- rnLExpr expr
; checkSectionPrec InfixR section op' expr'
; return (SectionR op' expr', fvs_op `plusFV` fvs_expr) }
rnSection section@(SectionL expr op)
= do { (expr', fvs_expr) <- rnLExpr expr
; (op', fvs_op) <- rnLExpr op
; checkSectionPrec InfixL section op' expr'
; return (SectionL expr' op', fvs_op `plusFV` fvs_expr) }
rnSection other = pprPanic "rnSection" (ppr other)
{-
************************************************************************
* *
Arrow commands
* *
************************************************************************
-}
rnCmdArgs :: [LHsCmdTop RdrName] -> RnM ([LHsCmdTop Name], FreeVars)
rnCmdArgs [] = return ([], emptyFVs)
rnCmdArgs (arg:args)
= do { (arg',fvArg) <- rnCmdTop arg
; (args',fvArgs) <- rnCmdArgs args
; return (arg':args', fvArg `plusFV` fvArgs) }
rnCmdTop :: LHsCmdTop RdrName -> RnM (LHsCmdTop Name, FreeVars)
rnCmdTop = wrapLocFstM rnCmdTop'
where
rnCmdTop' (HsCmdTop cmd _ _ _)
= do { (cmd', fvCmd) <- rnLCmd cmd
; let cmd_names = [arrAName, composeAName, firstAName] ++
nameSetElems (methodNamesCmd (unLoc cmd'))
-- Generate the rebindable syntax for the monad
; (cmd_names', cmd_fvs) <- lookupSyntaxNames cmd_names
; return (HsCmdTop cmd' placeHolderType placeHolderType
(cmd_names `zip` cmd_names'),
fvCmd `plusFV` cmd_fvs) }
rnLCmd :: LHsCmd RdrName -> RnM (LHsCmd Name, FreeVars)
rnLCmd = wrapLocFstM rnCmd
rnCmd :: HsCmd RdrName -> RnM (HsCmd Name, FreeVars)
rnCmd (HsCmdArrApp arrow arg _ ho rtl)
= do { (arrow',fvArrow) <- select_arrow_scope (rnLExpr arrow)
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdArrApp arrow' arg' placeHolderType ho rtl,
fvArrow `plusFV` fvArg) }
where
select_arrow_scope tc = case ho of
HsHigherOrderApp -> tc
HsFirstOrderApp -> escapeArrowScope tc
-- See Note [Escaping the arrow scope] in TcRnTypes
-- Before renaming 'arrow', use the environment of the enclosing
-- proc for the (-<) case.
-- Local bindings, inside the enclosing proc, are not in scope
-- inside 'arrow'. In the higher-order case (-<<), they are.
-- infix form
rnCmd (HsCmdArrForm op (Just _) [arg1, arg2])
= do { (op',fv_op) <- escapeArrowScope (rnLExpr op)
; let L _ (HsVar (L _ op_name)) = op'
; (arg1',fv_arg1) <- rnCmdTop arg1
; (arg2',fv_arg2) <- rnCmdTop arg2
-- Deal with fixity
; fixity <- lookupFixityRn op_name
; final_e <- mkOpFormRn arg1' op' fixity arg2'
; return (final_e, fv_arg1 `plusFV` fv_op `plusFV` fv_arg2) }
rnCmd (HsCmdArrForm op fixity cmds)
= do { (op',fvOp) <- escapeArrowScope (rnLExpr op)
; (cmds',fvCmds) <- rnCmdArgs cmds
; return (HsCmdArrForm op' fixity cmds', fvOp `plusFV` fvCmds) }
rnCmd (HsCmdApp fun arg)
= do { (fun',fvFun) <- rnLCmd fun
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdApp fun' arg', fvFun `plusFV` fvArg) }
rnCmd (HsCmdLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLCmd matches
; return (HsCmdLam matches', fvMatch) }
rnCmd (HsCmdPar e)
= do { (e', fvs_e) <- rnLCmd e
; return (HsCmdPar e', fvs_e) }
rnCmd (HsCmdCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLCmd matches
; return (HsCmdCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnCmd (HsCmdIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLCmd b1
; (b2', fvB2) <- rnLCmd b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsCmdIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnCmd (HsCmdLet (L l binds) cmd)
= rnLocalBindsAndThen binds $ \ binds' _ -> do
{ (cmd',fvExpr) <- rnLCmd cmd
; return (HsCmdLet (L l binds') cmd', fvExpr) }
rnCmd (HsCmdDo (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmts ArrowExpr rnLCmd stmts (\ _ -> return ((), emptyFVs))
; return ( HsCmdDo (L l stmts') placeHolderType, fvs ) }
rnCmd cmd@(HsCmdWrap {}) = pprPanic "rnCmd" (ppr cmd)
---------------------------------------------------
type CmdNeeds = FreeVars -- Only inhabitants are
-- appAName, choiceAName, loopAName
-- find what methods the Cmd needs (loop, choice, apply)
methodNamesLCmd :: LHsCmd Name -> CmdNeeds
methodNamesLCmd = methodNamesCmd . unLoc
methodNamesCmd :: HsCmd Name -> CmdNeeds
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsFirstOrderApp _rtl)
= emptyFVs
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsHigherOrderApp _rtl)
= unitFV appAName
methodNamesCmd (HsCmdArrForm {}) = emptyFVs
methodNamesCmd (HsCmdWrap _ cmd) = methodNamesCmd cmd
methodNamesCmd (HsCmdPar c) = methodNamesLCmd c
methodNamesCmd (HsCmdIf _ _ c1 c2)
= methodNamesLCmd c1 `plusFV` methodNamesLCmd c2 `addOneFV` choiceAName
methodNamesCmd (HsCmdLet _ c) = methodNamesLCmd c
methodNamesCmd (HsCmdDo (L _ stmts) _) = methodNamesStmts stmts
methodNamesCmd (HsCmdApp c _) = methodNamesLCmd c
methodNamesCmd (HsCmdLam match) = methodNamesMatch match
methodNamesCmd (HsCmdCase _ matches)
= methodNamesMatch matches `addOneFV` choiceAName
--methodNamesCmd _ = emptyFVs
-- Other forms can't occur in commands, but it's not convenient
-- to error here so we just do what's convenient.
-- The type checker will complain later
---------------------------------------------------
methodNamesMatch :: MatchGroup Name (LHsCmd Name) -> FreeVars
methodNamesMatch (MG { mg_alts = L _ ms })
= plusFVs (map do_one ms)
where
do_one (L _ (Match _ _ _ grhss)) = methodNamesGRHSs grhss
-------------------------------------------------
-- gaw 2004
methodNamesGRHSs :: GRHSs Name (LHsCmd Name) -> FreeVars
methodNamesGRHSs (GRHSs grhss _) = plusFVs (map methodNamesGRHS grhss)
-------------------------------------------------
methodNamesGRHS :: Located (GRHS Name (LHsCmd Name)) -> CmdNeeds
methodNamesGRHS (L _ (GRHS _ rhs)) = methodNamesLCmd rhs
---------------------------------------------------
methodNamesStmts :: [Located (StmtLR Name Name (LHsCmd Name))] -> FreeVars
methodNamesStmts stmts = plusFVs (map methodNamesLStmt stmts)
---------------------------------------------------
methodNamesLStmt :: Located (StmtLR Name Name (LHsCmd Name)) -> FreeVars
methodNamesLStmt = methodNamesStmt . unLoc
methodNamesStmt :: StmtLR Name Name (LHsCmd Name) -> FreeVars
methodNamesStmt (LastStmt cmd _ _) = methodNamesLCmd cmd
methodNamesStmt (BodyStmt cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (BindStmt _ cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (RecStmt { recS_stmts = stmts }) =
methodNamesStmts stmts `addOneFV` loopAName
methodNamesStmt (LetStmt {}) = emptyFVs
methodNamesStmt (ParStmt {}) = emptyFVs
methodNamesStmt (TransStmt {}) = emptyFVs
methodNamesStmt ApplicativeStmt{} = emptyFVs
-- ParStmt and TransStmt can't occur in commands, but it's not
-- convenient to error here so we just do what's convenient
{-
************************************************************************
* *
Arithmetic sequences
* *
************************************************************************
-}
rnArithSeq :: ArithSeqInfo RdrName -> RnM (ArithSeqInfo Name, FreeVars)
rnArithSeq (From expr)
= do { (expr', fvExpr) <- rnLExpr expr
; return (From expr', fvExpr) }
rnArithSeq (FromThen expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromThen expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromTo expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromThenTo expr1 expr2 expr3)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; (expr3', fvExpr3) <- rnLExpr expr3
; return (FromThenTo expr1' expr2' expr3',
plusFVs [fvExpr1, fvExpr2, fvExpr3]) }
{-
************************************************************************
* *
\subsubsection{@Stmt@s: in @do@ expressions}
* *
************************************************************************
-}
{-
Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Both ApplicativeDo and RecursiveDo need to create tuples not
present in the source text.
For ApplicativeDo we create:
(a,b,c) <- (\c b a -> (a,b,c)) <$>
For RecursiveDo we create:
mfix (\ ~(a,b,c) -> do ...; return (a',b',c'))
The order of the components in those tuples needs to be stable
across recompilations, otherwise they can get optimized differently
and we end up with incompatible binaries.
To get a stable order we use nameSetElemsStable.
See Note [Deterministic UniqFM] to learn more about nondeterminism.
-}
-- | Rename some Stmts
rnStmts :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmts ctxt rnBody = rnStmtsWithPostProcessing ctxt rnBody noPostProcessStmts
-- | like 'rnStmts' but applies a post-processing step to the renamed Stmts
rnStmtsWithPostProcessing
:: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> (HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars))
-- ^ postprocess the statements
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmtsWithPostProcessing ctxt rnBody ppStmts stmts thing_inside
= do { ((stmts', thing), fvs) <-
rnStmtsWithFreeVars ctxt rnBody stmts thing_inside
; (pp_stmts, fvs') <- ppStmts ctxt stmts'
; return ((pp_stmts, thing), fvs `plusFV` fvs')
}
-- | maybe rearrange statements according to the ApplicativeDo transformation
postProcessStmtsForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
postProcessStmtsForApplicativeDo ctxt stmts
= do {
-- rearrange the statements using ApplicativeStmt if
-- -XApplicativeDo is on. Also strip out the FreeVars attached
-- to each Stmt body.
ado_is_on <- xoptM LangExt.ApplicativeDo
; let is_do_expr | DoExpr <- ctxt = True
| otherwise = False
; if ado_is_on && is_do_expr
then rearrangeForApplicativeDo ctxt stmts
else noPostProcessStmts ctxt stmts }
-- | strip the FreeVars annotations from statements
noPostProcessStmts
:: HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars)
noPostProcessStmts _ stmts = return (map fst stmts, emptyNameSet)
rnStmtsWithFreeVars :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Each Stmt body is annotated with its FreeVars, so that
-- we can rearrange statements for ApplicativeDo.
--
-- Variables bound by the Stmts, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmtsWithFreeVars ctxt _ [] thing_inside
= do { checkEmptyStmts ctxt
; (thing, fvs) <- thing_inside []
; return (([], thing), fvs) }
rnStmtsWithFreeVars MDoExpr rnBody stmts thing_inside -- Deal with mdo
= -- Behave like do { rec { ...all but last... }; last }
do { ((stmts1, (stmts2, thing)), fvs)
<- rnStmt MDoExpr rnBody (noLoc $ mkRecStmt all_but_last) $ \ _ ->
do { last_stmt' <- checkLastStmt MDoExpr last_stmt
; rnStmt MDoExpr rnBody last_stmt' thing_inside }
; return (((stmts1 ++ stmts2), thing), fvs) }
where
Just (all_but_last, last_stmt) = snocView stmts
rnStmtsWithFreeVars ctxt rnBody (lstmt@(L loc _) : lstmts) thing_inside
| null lstmts
= setSrcSpan loc $
do { lstmt' <- checkLastStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt' thing_inside }
| otherwise
= do { ((stmts1, (stmts2, thing)), fvs)
<- setSrcSpan loc $
do { checkStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt $ \ bndrs1 ->
rnStmtsWithFreeVars ctxt rnBody lstmts $ \ bndrs2 ->
thing_inside (bndrs1 ++ bndrs2) }
; return (((stmts1 ++ stmts2), thing), fvs) }
----------------------
rnStmt :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of the statement
-> LStmt RdrName (Located (body RdrName))
-- ^ The statement
-> ([Name] -> RnM (thing, FreeVars))
-- ^ Rename the stuff that this statement scopes over
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Variables bound by the Stmt, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmt ctxt rnBody (L loc (LastStmt body noret _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupStmtName ctxt returnMName
; (thing, fvs3) <- thing_inside []
; return (([(L loc (LastStmt body' noret ret_op), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BodyStmt body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (then_op, fvs1) <- lookupStmtName ctxt thenMName
; (guard_op, fvs2) <- if isListCompExpr ctxt
then lookupStmtName ctxt guardMName
else return (noSyntaxExpr, emptyFVs)
-- Only list/parr/monad comprehensions use 'guard'
-- Also for sub-stmts of same eg [ e | x<-xs, gd | blah ]
-- Here "gd" is a guard
; (thing, fvs3) <- thing_inside []
; return (([(L loc (BodyStmt body'
then_op guard_op placeHolderType), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BindStmt pat body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
-- The binders do not scope over the expression
; (bind_op, fvs1) <- lookupStmtName ctxt bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; rnPat (StmtCtxt ctxt) pat $ \ pat' -> do
{ (thing, fvs3) <- thing_inside (collectPatBinders pat')
; return (( [( L loc (BindStmt pat' body' bind_op fail_op PlaceHolder)
, fv_expr )]
, thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }}
-- fv_expr shouldn't really be filtered by the rnPatsAndThen
-- but it does not matter because the names are unique
rnStmt _ _ (L loc (LetStmt (L l binds))) thing_inside
= do { rnLocalBindsAndThen binds $ \binds' bind_fvs -> do
{ (thing, fvs) <- thing_inside (collectLocalBinders binds')
; return (([(L loc (LetStmt (L l binds')), bind_fvs)], thing), fvs) } }
rnStmt ctxt rnBody (L loc (RecStmt { recS_stmts = rec_stmts })) thing_inside
= do { (return_op, fvs1) <- lookupStmtName ctxt returnMName
; (mfix_op, fvs2) <- lookupStmtName ctxt mfixName
; (bind_op, fvs3) <- lookupStmtName ctxt bindMName
; let empty_rec_stmt = emptyRecStmtName { recS_ret_fn = return_op
, recS_mfix_fn = mfix_op
, recS_bind_fn = bind_op }
-- Step1: Bring all the binders of the mdo into scope
-- (Remember that this also removes the binders from the
-- finally-returned free-vars.)
-- And rename each individual stmt, making a
-- singleton segment. At this stage the FwdRefs field
-- isn't finished: it's empty for all except a BindStmt
-- for which it's the fwd refs within the bind itself
-- (This set may not be empty, because we're in a recursive
-- context.)
; rnRecStmtsAndThen rnBody rec_stmts $ \ segs -> do
{ let bndrs = nameSetElemsStable $
foldr (unionNameSet . (\(ds,_,_,_) -> ds))
emptyNameSet
segs
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
; (thing, fvs_later) <- thing_inside bndrs
; let (rec_stmts', fvs) = segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
-- We aren't going to try to group RecStmts with
-- ApplicativeDo, so attaching empty FVs is fine.
; return ( ((zip rec_stmts' (repeat emptyNameSet)), thing)
, fvs `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) } }
rnStmt ctxt _ (L loc (ParStmt segs _ _ _)) thing_inside
= do { (mzip_op, fvs1) <- lookupStmtNamePoly ctxt mzipName
; (bind_op, fvs2) <- lookupStmtName ctxt bindMName
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; ((segs', thing), fvs4) <- rnParallelStmts (ParStmtCtxt ctxt) return_op segs thing_inside
; return ( ([(L loc (ParStmt segs' mzip_op bind_op placeHolderType), fvs4)], thing)
, fvs1 `plusFV` fvs2 `plusFV` fvs3 `plusFV` fvs4) }
rnStmt ctxt _ (L loc (TransStmt { trS_stmts = stmts, trS_by = by, trS_form = form
, trS_using = using })) thing_inside
= do { -- Rename the 'using' expression in the context before the transform is begun
(using', fvs1) <- rnLExpr using
-- Rename the stmts and the 'by' expression
-- Keep track of the variables mentioned in the 'by' expression
; ((stmts', (by', used_bndrs, thing)), fvs2)
<- rnStmts (TransStmtCtxt ctxt) rnLExpr stmts $ \ bndrs ->
do { (by', fvs_by) <- mapMaybeFvRn rnLExpr by
; (thing, fvs_thing) <- thing_inside bndrs
; let fvs = fvs_by `plusFV` fvs_thing
used_bndrs = filter (`elemNameSet` fvs) bndrs
-- The paper (Fig 5) has a bug here; we must treat any free variable
-- of the "thing inside", **or of the by-expression**, as used
; return ((by', used_bndrs, thing), fvs) }
-- Lookup `return`, `(>>=)` and `liftM` for monad comprehensions
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; (bind_op, fvs4) <- lookupStmtName ctxt bindMName
; (fmap_op, fvs5) <- case form of
ThenForm -> return (noExpr, emptyFVs)
_ -> lookupStmtNamePoly ctxt fmapName
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
`plusFV` fvs4 `plusFV` fvs5
bndr_map = used_bndrs `zip` used_bndrs
-- See Note [TransStmt binder map] in HsExpr
; traceRn (text "rnStmt: implicitly rebound these used binders:" <+> ppr bndr_map)
; return (([(L loc (TransStmt { trS_stmts = stmts', trS_bndrs = bndr_map
, trS_by = by', trS_using = using', trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_bind_arg_ty = PlaceHolder
, trS_fmap = fmap_op }), fvs2)], thing), all_fvs) }
rnStmt _ _ (L _ ApplicativeStmt{}) _ =
panic "rnStmt: ApplicativeStmt"
rnParallelStmts :: forall thing. HsStmtContext Name
-> SyntaxExpr Name
-> [ParStmtBlock RdrName RdrName]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
-- Note [Renaming parallel Stmts]
rnParallelStmts ctxt return_op segs thing_inside
= do { orig_lcl_env <- getLocalRdrEnv
; rn_segs orig_lcl_env [] segs }
where
rn_segs :: LocalRdrEnv
-> [Name] -> [ParStmtBlock RdrName RdrName]
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
rn_segs _ bndrs_so_far []
= do { let (bndrs', dups) = removeDups cmpByOcc bndrs_so_far
; mapM_ dupErr dups
; (thing, fvs) <- bindLocalNames bndrs' (thing_inside bndrs')
; return (([], thing), fvs) }
rn_segs env bndrs_so_far (ParStmtBlock stmts _ _ : segs)
= do { ((stmts', (used_bndrs, segs', thing)), fvs)
<- rnStmts ctxt rnLExpr stmts $ \ bndrs ->
setLocalRdrEnv env $ do
{ ((segs', thing), fvs) <- rn_segs env (bndrs ++ bndrs_so_far) segs
; let used_bndrs = filter (`elemNameSet` fvs) bndrs
; return ((used_bndrs, segs', thing), fvs) }
; let seg' = ParStmtBlock stmts' used_bndrs return_op
; return ((seg':segs', thing), fvs) }
cmpByOcc n1 n2 = nameOccName n1 `compare` nameOccName n2
dupErr vs = addErr (text "Duplicate binding in parallel list comprehension for:"
<+> quotes (ppr (head vs)))
lookupStmtName :: HsStmtContext Name -> Name -> RnM (SyntaxExpr Name, FreeVars)
-- Like lookupSyntaxName, but respects contexts
lookupStmtName ctxt n
| rebindableContext ctxt
= lookupSyntaxName n
| otherwise
= return (mkRnSyntaxExpr n, emptyFVs)
lookupStmtNamePoly :: HsStmtContext Name -> Name -> RnM (HsExpr Name, FreeVars)
lookupStmtNamePoly ctxt name
| rebindableContext ctxt
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if rebindable_on
then do { fm <- lookupOccRn (nameRdrName name)
; return (HsVar (noLoc fm), unitFV fm) }
else not_rebindable }
| otherwise
= not_rebindable
where
not_rebindable = return (HsVar (noLoc name), emptyFVs)
-- | Is this a context where we respect RebindableSyntax?
-- but ListComp/PArrComp are never rebindable
-- Neither is ArrowExpr, which has its own desugarer in DsArrows
rebindableContext :: HsStmtContext Name -> Bool
rebindableContext ctxt = case ctxt of
ListComp -> False
PArrComp -> False
ArrowExpr -> False
PatGuard {} -> False
DoExpr -> True
MDoExpr -> True
MonadComp -> True
GhciStmtCtxt -> True -- I suppose?
ParStmtCtxt c -> rebindableContext c -- Look inside to
TransStmtCtxt c -> rebindableContext c -- the parent context
{-
Note [Renaming parallel Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Renaming parallel statements is painful. Given, say
[ a+c | a <- as, bs <- bss
| c <- bs, a <- ds ]
Note that
(a) In order to report "Defined but not used" about 'bs', we must
rename each group of Stmts with a thing_inside whose FreeVars
include at least {a,c}
(b) We want to report that 'a' is illegally bound in both branches
(c) The 'bs' in the second group must obviously not be captured by
the binding in the first group
To satisfy (a) we nest the segements.
To satisfy (b) we check for duplicates just before thing_inside.
To satisfy (c) we reset the LocalRdrEnv each time.
************************************************************************
* *
\subsubsection{mdo expressions}
* *
************************************************************************
-}
type FwdRefs = NameSet
type Segment stmts = (Defs,
Uses, -- May include defs
FwdRefs, -- A subset of uses that are
-- (a) used before they are bound in this segment, or
-- (b) used here, and bound in subsequent segments
stmts) -- Either Stmt or [Stmt]
-- wrapper that does both the left- and right-hand sides
rnRecStmtsAndThen :: Outputable (body RdrName) =>
(Located (body RdrName)
-> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-- assumes that the FreeVars returned includes
-- the FreeVars of the Segments
-> ([Segment (LStmt Name (Located (body Name)))]
-> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnRecStmtsAndThen rnBody s cont
= do { -- (A) Make the mini fixity env for all of the stmts
fix_env <- makeMiniFixityEnv (collectRecStmtsFixities s)
-- (B) Do the LHSes
; new_lhs_and_fv <- rn_rec_stmts_lhs fix_env s
-- ...bring them and their fixities into scope
; let bound_names = collectLStmtsBinders (map fst new_lhs_and_fv)
-- Fake uses of variables introduced implicitly (warning suppression, see #4404)
implicit_uses = lStmtsImplicits (map fst new_lhs_and_fv)
; bindLocalNamesFV bound_names $
addLocalFixities fix_env bound_names $ do
-- (C) do the right-hand-sides and thing-inside
{ segs <- rn_rec_stmts rnBody bound_names new_lhs_and_fv
; (res, fvs) <- cont segs
; warnUnusedLocalBinds bound_names (fvs `unionNameSet` implicit_uses)
; return (res, fvs) }}
-- get all the fixity decls in any Let stmt
collectRecStmtsFixities :: [LStmtLR RdrName RdrName body] -> [LFixitySig RdrName]
collectRecStmtsFixities l =
foldr (\ s -> \acc -> case s of
(L _ (LetStmt (L _ (HsValBinds (ValBindsIn _ sigs))))) ->
foldr (\ sig -> \ acc -> case sig of
(L loc (FixSig s)) -> (L loc s) : acc
_ -> acc) acc sigs
_ -> acc) [] l
-- left-hand sides
rn_rec_stmt_lhs :: Outputable body => MiniFixityEnv
-> LStmt RdrName body
-- rename LHS, and return its FVs
-- Warning: we will only need the FreeVars below in the case of a BindStmt,
-- so we don't bother to compute it accurately in the other cases
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmt_lhs _ (L loc (BodyStmt body a b c))
= return [(L loc (BodyStmt body a b c), emptyFVs)]
rn_rec_stmt_lhs _ (L loc (LastStmt body noret a))
= return [(L loc (LastStmt body noret a), emptyFVs)]
rn_rec_stmt_lhs fix_env (L loc (BindStmt pat body a b t))
= do
-- should the ctxt be MDo instead?
(pat', fv_pat) <- rnBindPat (localRecNameMaker fix_env) pat
return [(L loc (BindStmt pat' body a b t),
fv_pat)]
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ binds@(HsIPBinds _))))
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt_lhs fix_env (L loc (LetStmt (L l(HsValBinds binds))))
= do (_bound_names, binds') <- rnLocalValBindsLHS fix_env binds
return [(L loc (LetStmt (L l (HsValBinds binds'))),
-- Warning: this is bogus; see function invariant
emptyFVs
)]
-- XXX Do we need to do something with the return and mfix names?
rn_rec_stmt_lhs fix_env (L _ (RecStmt { recS_stmts = stmts })) -- Flatten Rec inside Rec
= rn_rec_stmts_lhs fix_env stmts
rn_rec_stmt_lhs _ stmt@(L _ (ParStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (TransStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (ApplicativeStmt {})) -- Shouldn't appear yet
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ EmptyLocalBinds)))
= panic "rn_rec_stmt LetStmt EmptyLocalBinds"
rn_rec_stmts_lhs :: Outputable body => MiniFixityEnv
-> [LStmt RdrName body]
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmts_lhs fix_env stmts
= do { ls <- concatMapM (rn_rec_stmt_lhs fix_env) stmts
; let boundNames = collectLStmtsBinders (map fst ls)
-- First do error checking: we need to check for dups here because we
-- don't bind all of the variables from the Stmt at once
-- with bindLocatedLocals.
; checkDupNames boundNames
; return ls }
-- right-hand-sides
rn_rec_stmt :: (Outputable (body RdrName)) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> (LStmtLR Name RdrName (Located (body RdrName)), FreeVars)
-> RnM [Segment (LStmt Name (Located (body Name)))]
-- Rename a Stmt that is inside a RecStmt (or mdo)
-- Assumes all binders are already in scope
-- Turns each stmt into a singleton Stmt
rn_rec_stmt rnBody _ (L loc (LastStmt body noret _), _)
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupSyntaxName returnMName
; return [(emptyNameSet, fv_expr `plusFV` fvs1, emptyNameSet,
L loc (LastStmt body' noret ret_op))] }
rn_rec_stmt rnBody _ (L loc (BodyStmt body _ _ _), _)
= do { (body', fvs) <- rnBody body
; (then_op, fvs1) <- lookupSyntaxName thenMName
; return [(emptyNameSet, fvs `plusFV` fvs1, emptyNameSet,
L loc (BodyStmt body' then_op noSyntaxExpr placeHolderType))] }
rn_rec_stmt rnBody _ (L loc (BindStmt pat' body _ _ _), fv_pat)
= do { (body', fv_expr) <- rnBody body
; (bind_op, fvs1) <- lookupSyntaxName bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; let bndrs = mkNameSet (collectPatBinders pat')
fvs = fv_expr `plusFV` fv_pat `plusFV` fvs1 `plusFV` fvs2
; return [(bndrs, fvs, bndrs `intersectNameSet` fvs,
L loc (BindStmt pat' body' bind_op fail_op PlaceHolder))] }
rn_rec_stmt _ _ (L _ (LetStmt (L _ binds@(HsIPBinds _))), _)
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt _ all_bndrs (L loc (LetStmt (L l (HsValBinds binds'))), _)
= do { (binds', du_binds) <- rnLocalValBindsRHS (mkNameSet all_bndrs) binds'
-- fixities and unused are handled above in rnRecStmtsAndThen
; let fvs = allUses du_binds
; return [(duDefs du_binds, fvs, emptyNameSet,
L loc (LetStmt (L l (HsValBinds binds'))))] }
-- no RecStmt case because they get flattened above when doing the LHSes
rn_rec_stmt _ _ stmt@(L _ (RecStmt {}), _)
= pprPanic "rn_rec_stmt: RecStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (ParStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: ParStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (TransStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: TransStmt" (ppr stmt)
rn_rec_stmt _ _ (L _ (LetStmt (L _ EmptyLocalBinds)), _)
= panic "rn_rec_stmt: LetStmt EmptyLocalBinds"
rn_rec_stmt _ _ stmt@(L _ (ApplicativeStmt {}), _)
= pprPanic "rn_rec_stmt: ApplicativeStmt" (ppr stmt)
rn_rec_stmts :: Outputable (body RdrName) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> [(LStmtLR Name RdrName (Located (body RdrName)), FreeVars)]
-> RnM [Segment (LStmt Name (Located (body Name)))]
rn_rec_stmts rnBody bndrs stmts
= do { segs_s <- mapM (rn_rec_stmt rnBody bndrs) stmts
; return (concat segs_s) }
---------------------------------------------
segmentRecStmts :: SrcSpan -> HsStmtContext Name
-> Stmt Name body
-> [Segment (LStmt Name body)] -> FreeVars
-> ([LStmt Name body], FreeVars)
segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
| null segs
= ([], fvs_later)
| MDoExpr <- ctxt
= segsToStmts empty_rec_stmt grouped_segs fvs_later
-- Step 4: Turn the segments into Stmts
-- Use RecStmt when and only when there are fwd refs
-- Also gather up the uses from the end towards the
-- start, so we can tell the RecStmt which things are
-- used 'after' the RecStmt
| otherwise
= ([ L loc $
empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElemsStable
(defs `intersectNameSet` fvs_later)
, recS_rec_ids = nameSetElemsStable
(defs `intersectNameSet` uses) }]
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
, uses `plusFV` fvs_later)
where
(defs_s, uses_s, _, ss) = unzip4 segs
defs = plusFVs defs_s
uses = plusFVs uses_s
-- Step 2: Fill in the fwd refs.
-- The segments are all singletons, but their fwd-ref
-- field mentions all the things used by the segment
-- that are bound after their use
segs_w_fwd_refs = addFwdRefs segs
-- Step 3: Group together the segments to make bigger segments
-- Invariant: in the result, no segment uses a variable
-- bound in a later segment
grouped_segs = glomSegments ctxt segs_w_fwd_refs
----------------------------
addFwdRefs :: [Segment a] -> [Segment a]
-- So far the segments only have forward refs *within* the Stmt
-- (which happens for bind: x <- ...x...)
-- This function adds the cross-seg fwd ref info
addFwdRefs segs
= fst (foldr mk_seg ([], emptyNameSet) segs)
where
mk_seg (defs, uses, fwds, stmts) (segs, later_defs)
= (new_seg : segs, all_defs)
where
new_seg = (defs, uses, new_fwds, stmts)
all_defs = later_defs `unionNameSet` defs
new_fwds = fwds `unionNameSet` (uses `intersectNameSet` later_defs)
-- Add the downstream fwd refs here
{-
Note [Segmenting mdo]
~~~~~~~~~~~~~~~~~~~~~
NB. June 7 2012: We only glom segments that appear in an explicit mdo;
and leave those found in "do rec"'s intact. See
http://ghc.haskell.org/trac/ghc/ticket/4148 for the discussion
leading to this design choice. Hence the test in segmentRecStmts.
Note [Glomming segments]
~~~~~~~~~~~~~~~~~~~~~~~~
Glomming the singleton segments of an mdo into minimal recursive groups.
At first I thought this was just strongly connected components, but
there's an important constraint: the order of the stmts must not change.
Consider
mdo { x <- ...y...
p <- z
y <- ...x...
q <- x
z <- y
r <- x }
Here, the first stmt mention 'y', which is bound in the third.
But that means that the innocent second stmt (p <- z) gets caught
up in the recursion. And that in turn means that the binding for
'z' has to be included... and so on.
Start at the tail { r <- x }
Now add the next one { z <- y ; r <- x }
Now add one more { q <- x ; z <- y ; r <- x }
Now one more... but this time we have to group a bunch into rec
{ rec { y <- ...x... ; q <- x ; z <- y } ; r <- x }
Now one more, which we can add on without a rec
{ p <- z ;
rec { y <- ...x... ; q <- x ; z <- y } ;
r <- x }
Finally we add the last one; since it mentions y we have to
glom it together with the first two groups
{ rec { x <- ...y...; p <- z ; y <- ...x... ;
q <- x ; z <- y } ;
r <- x }
-}
glomSegments :: HsStmtContext Name
-> [Segment (LStmt Name body)]
-> [Segment [LStmt Name body]] -- Each segment has a non-empty list of Stmts
-- See Note [Glomming segments]
glomSegments _ [] = []
glomSegments ctxt ((defs,uses,fwds,stmt) : segs)
-- Actually stmts will always be a singleton
= (seg_defs, seg_uses, seg_fwds, seg_stmts) : others
where
segs' = glomSegments ctxt segs
(extras, others) = grab uses segs'
(ds, us, fs, ss) = unzip4 extras
seg_defs = plusFVs ds `plusFV` defs
seg_uses = plusFVs us `plusFV` uses
seg_fwds = plusFVs fs `plusFV` fwds
seg_stmts = stmt : concat ss
grab :: NameSet -- The client
-> [Segment a]
-> ([Segment a], -- Needed by the 'client'
[Segment a]) -- Not needed by the client
-- The result is simply a split of the input
grab uses dus
= (reverse yeses, reverse noes)
where
(noes, yeses) = span not_needed (reverse dus)
not_needed (defs,_,_,_) = not (intersectsNameSet defs uses)
----------------------------------------------------
segsToStmts :: Stmt Name body -- A RecStmt with the SyntaxOps filled in
-> [Segment [LStmt Name body]] -- Each Segment has a non-empty list of Stmts
-> FreeVars -- Free vars used 'later'
-> ([LStmt Name body], FreeVars)
segsToStmts _ [] fvs_later = ([], fvs_later)
segsToStmts empty_rec_stmt ((defs, uses, fwds, ss) : segs) fvs_later
= ASSERT( not (null ss) )
(new_stmt : later_stmts, later_uses `plusFV` uses)
where
(later_stmts, later_uses) = segsToStmts empty_rec_stmt segs fvs_later
new_stmt | non_rec = head ss
| otherwise = L (getLoc (head ss)) rec_stmt
rec_stmt = empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElemsStable used_later
, recS_rec_ids = nameSetElemsStable fwds }
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
non_rec = isSingleton ss && isEmptyNameSet fwds
used_later = defs `intersectNameSet` later_uses
-- The ones needed after the RecStmt
{-
************************************************************************
* *
ApplicativeDo
* *
************************************************************************
Note [ApplicativeDo]
= Example =
For a sequence of statements
do
x <- A
y <- B x
z <- C
return (f x y z)
We want to transform this to
(\(x,y) z -> f x y z) <$> (do x <- A; y <- B x; return (x,y)) <*> C
It would be easy to notice that "y <- B x" and "z <- C" are
independent and do something like this:
do
x <- A
(y,z) <- (,) <$> B x <*> C
return (f x y z)
But this isn't enough! A and C were also independent, and this
transformation loses the ability to do A and C in parallel.
The algorithm works by first splitting the sequence of statements into
independent "segments", and a separate "tail" (the final statement). In
our example above, the segements would be
[ x <- A
, y <- B x ]
[ z <- C ]
and the tail is:
return (f x y z)
Then we take these segments and make an Applicative expression from them:
(\(x,y) z -> return (f x y z))
<$> do { x <- A; y <- B x; return (x,y) }
<*> C
Finally, we recursively apply the transformation to each segment, to
discover any nested parallelism.
= Syntax & spec =
expr ::= ... | do {stmt_1; ..; stmt_n} expr | ...
stmt ::= pat <- expr
| (arg_1 | ... | arg_n) -- applicative composition, n>=1
| ... -- other kinds of statement (e.g. let)
arg ::= pat <- expr
| {stmt_1; ..; stmt_n} {var_1..var_n}
(note that in the actual implementation,the expr in a do statement is
represented by a LastStmt as the final stmt, this is just a
representational issue and may change later.)
== Transformation to introduce applicative stmts ==
ado {} tail = tail
ado {pat <- expr} {return expr'} = (mkArg(pat <- expr)); return expr'
ado {one} tail = one : tail
ado stmts tail
| n == 1 = ado before (ado after tail)
where (before,after) = split(stmts_1)
| n > 1 = (mkArg(stmts_1) | ... | mkArg(stmts_n)); tail
where
{stmts_1 .. stmts_n} = segments(stmts)
segments(stmts) =
-- divide stmts into segments with no interdependencies
mkArg({pat <- expr}) = (pat <- expr)
mkArg({stmt_1; ...; stmt_n}) =
{stmt_1; ...; stmt_n} {vars(stmt_1) u .. u vars(stmt_n)}
split({stmt_1; ..; stmt_n) =
({stmt_1; ..; stmt_i}, {stmt_i+1; ..; stmt_n})
-- 1 <= i <= n
-- i is a good place to insert a bind
== Desugaring for do ==
dsDo {} expr = expr
dsDo {pat <- rhs; stmts} expr =
rhs >>= \pat -> dsDo stmts expr
dsDo {(arg_1 | ... | arg_n)} (return expr) =
(\argpat (arg_1) .. argpat(arg_n) -> expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
dsDo {(arg_1 | ... | arg_n); stmts} expr =
join (\argpat (arg_1) .. argpat(arg_n) -> dsDo stmts expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
-}
-- | The 'Name's of @return@ and @pure@. These may not be 'returnName' and
-- 'pureName' due to @RebindableSyntax@.
data MonadNames = MonadNames { return_name, pure_name :: Name }
-- | rearrange a list of statements using ApplicativeDoStmt. See
-- Note [ApplicativeDo].
rearrangeForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
rearrangeForApplicativeDo _ [] = return ([], emptyNameSet)
rearrangeForApplicativeDo _ [(one,_)] = return ([one], emptyNameSet)
rearrangeForApplicativeDo ctxt stmts0 = do
optimal_ado <- goptM Opt_OptimalApplicativeDo
let stmt_tree | optimal_ado = mkStmtTreeOptimal stmts
| otherwise = mkStmtTreeHeuristic stmts
return_name <- lookupSyntaxName' returnMName
pure_name <- lookupSyntaxName' pureAName
let monad_names = MonadNames { return_name = return_name
, pure_name = pure_name }
stmtTreeToStmts monad_names ctxt stmt_tree [last] last_fvs
where
(stmts,(last,last_fvs)) = findLast stmts0
findLast [] = error "findLast"
findLast [last] = ([],last)
findLast (x:xs) = (x:rest,last) where (rest,last) = findLast xs
-- | A tree of statements using a mixture of applicative and bind constructs.
data StmtTree a
= StmtTreeOne a
| StmtTreeBind (StmtTree a) (StmtTree a)
| StmtTreeApplicative [StmtTree a]
flattenStmtTree :: StmtTree a -> [a]
flattenStmtTree t = go t []
where
go (StmtTreeOne a) as = a : as
go (StmtTreeBind l r) as = go l (go r as)
go (StmtTreeApplicative ts) as = foldr go as ts
type ExprStmtTree = StmtTree (ExprLStmt Name, FreeVars)
type Cost = Int
-- | Turn a sequence of statements into an ExprStmtTree using a
-- heuristic algorithm. /O(n^2)/
mkStmtTreeHeuristic :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeHeuristic [one] = StmtTreeOne one
mkStmtTreeHeuristic stmts =
case segments stmts of
[one] -> split one
segs -> StmtTreeApplicative (map split segs)
where
split [one] = StmtTreeOne one
split stmts =
StmtTreeBind (mkStmtTreeHeuristic before) (mkStmtTreeHeuristic after)
where (before, after) = splitSegment stmts
-- | Turn a sequence of statements into an ExprStmtTree optimally,
-- using dynamic programming. /O(n^3)/
mkStmtTreeOptimal :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeOptimal stmts =
ASSERT(not (null stmts)) -- the empty case is handled by the caller;
-- we don't support empty StmtTrees.
fst (arr ! (0,n))
where
n = length stmts - 1
stmt_arr = listArray (0,n) stmts
-- lazy cache of optimal trees for subsequences of the input
arr :: Array (Int,Int) (ExprStmtTree, Cost)
arr = array ((0,0),(n,n))
[ ((lo,hi), tree lo hi)
| lo <- [0..n]
, hi <- [lo..n] ]
-- compute the optimal tree for the sequence [lo..hi]
tree lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise =
case segments [ stmt_arr ! i | i <- [lo..hi] ] of
[] -> panic "mkStmtTree"
[_one] -> split lo hi
segs -> (StmtTreeApplicative trees, maximum costs)
where
bounds = scanl (\(_,hi) a -> (hi+1, hi + length a)) (0,lo-1) segs
(trees,costs) = unzip (map (uncurry split) (tail bounds))
-- find the best place to split the segment [lo..hi]
split :: Int -> Int -> (ExprStmtTree, Cost)
split lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise = (StmtTreeBind before after, c1+c2)
where
-- As per the paper, for a sequence s1...sn, we want to find
-- the split with the minimum cost, where the cost is the
-- sum of the cost of the left and right subsequences.
--
-- As an optimisation (also in the paper) if the cost of
-- s1..s(n-1) is different from the cost of s2..sn, we know
-- that the optimal solution is the lower of the two. Only
-- in the case that these two have the same cost do we need
-- to do the exhaustive search.
--
((before,c1),(after,c2))
| hi - lo == 1
= ((StmtTreeOne (stmt_arr ! lo), 1),
(StmtTreeOne (stmt_arr ! hi), 1))
| left_cost < right_cost
= ((left,left_cost), (StmtTreeOne (stmt_arr ! hi), 1))
| otherwise -- left_cost > right_cost
= ((StmtTreeOne (stmt_arr ! lo), 1), (right,right_cost))
| otherwise = minimumBy (comparing cost) alternatives
where
(left, left_cost) = arr ! (lo,hi-1)
(right, right_cost) = arr ! (lo+1,hi)
cost ((_,c1),(_,c2)) = c1 + c2
alternatives = [ (arr ! (lo,k), arr ! (k+1,hi))
| k <- [lo .. hi-1] ]
-- | Turn the ExprStmtTree back into a sequence of statements, using
-- ApplicativeStmt where necessary.
stmtTreeToStmts
:: MonadNames
-> HsStmtContext Name
-> ExprStmtTree
-> [ExprLStmt Name] -- ^ the "tail"
-> FreeVars -- ^ free variables of the tail
-> RnM ( [ExprLStmt Name] -- ( output statements,
, FreeVars ) -- , things we needed
-- If we have a single bind, and we can do it without a join, transform
-- to an ApplicativeStmt. This corresponds to the rule
-- dsBlock [pat <- rhs] (return expr) = expr <$> rhs
-- In the spec, but we do it here rather than in the desugarer,
-- because we need the typechecker to typecheck the <$> form rather than
-- the bind form, which would give rise to a Monad constraint.
stmtTreeToStmts monad_names ctxt (StmtTreeOne (L _ (BindStmt pat rhs _ _ _),_))
tail _tail_fvs
| isIrrefutableHsPat pat, (False,tail') <- needJoin monad_names tail
-- WARNING: isIrrefutableHsPat on (HsPat Name) doesn't have enough info
-- to know which types have only one constructor. So only
-- tuples come out as irrefutable; other single-constructor
-- types, and newtypes, will not. See the code for
-- isIrrefuatableHsPat
= mkApplicativeStmt ctxt [ApplicativeArgOne pat rhs] False tail'
stmtTreeToStmts _monad_names _ctxt (StmtTreeOne (s,_)) tail _tail_fvs =
return (s : tail, emptyNameSet)
stmtTreeToStmts monad_names ctxt (StmtTreeBind before after) tail tail_fvs = do
(stmts1, fvs1) <- stmtTreeToStmts monad_names ctxt after tail tail_fvs
let tail1_fvs = unionNameSets (tail_fvs : map snd (flattenStmtTree after))
(stmts2, fvs2) <- stmtTreeToStmts monad_names ctxt before stmts1 tail1_fvs
return (stmts2, fvs1 `plusFV` fvs2)
stmtTreeToStmts monad_names ctxt (StmtTreeApplicative trees) tail tail_fvs = do
pairs <- mapM (stmtTreeArg ctxt tail_fvs) trees
let (stmts', fvss) = unzip pairs
let (need_join, tail') = needJoin monad_names tail
(stmts, fvs) <- mkApplicativeStmt ctxt stmts' need_join tail'
return (stmts, unionNameSets (fvs:fvss))
where
stmtTreeArg _ctxt _tail_fvs (StmtTreeOne (L _ (BindStmt pat exp _ _ _), _)) =
return (ApplicativeArgOne pat exp, emptyFVs)
stmtTreeArg ctxt tail_fvs tree = do
let stmts = flattenStmtTree tree
pvarset = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
`intersectNameSet` tail_fvs
pvars = nameSetElemsStable pvarset
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
pat = mkBigLHsVarPatTup pvars
tup = mkBigLHsVarTup pvars
(stmts',fvs2) <- stmtTreeToStmts monad_names ctxt tree [] pvarset
(mb_ret, fvs1) <-
if | L _ ApplicativeStmt{} <- last stmts' ->
return (unLoc tup, emptyNameSet)
| otherwise -> do
(ret,fvs) <- lookupStmtNamePoly ctxt returnMName
return (HsApp (noLoc ret) tup, fvs)
return ( ApplicativeArgMany stmts' mb_ret pat
, fvs1 `plusFV` fvs2)
-- | Divide a sequence of statements into segments, where no segment
-- depends on any variables defined by a statement in another segment.
segments
:: [(ExprLStmt Name, FreeVars)]
-> [[(ExprLStmt Name, FreeVars)]]
segments stmts = map fst $ merge $ reverse $ map reverse $ walk (reverse stmts)
where
allvars = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
-- We would rather not have a segment that just has LetStmts in
-- it, so combine those with an adjacent segment where possible.
merge [] = []
merge (seg : segs)
= case rest of
[] -> [(seg,all_lets)]
((s,s_lets):ss) | all_lets || s_lets
-> (seg ++ s, all_lets && s_lets) : ss
_otherwise -> (seg,all_lets) : rest
where
rest = merge segs
all_lets = all (isLetStmt . fst) seg
-- walk splits the statement sequence into segments, traversing
-- the sequence from the back to the front, and keeping track of
-- the set of free variables of the current segment. Whenever
-- this set of free variables is empty, we have a complete segment.
walk :: [(ExprLStmt Name, FreeVars)] -> [[(ExprLStmt Name, FreeVars)]]
walk [] = []
walk ((stmt,fvs) : stmts) = ((stmt,fvs) : seg) : walk rest
where (seg,rest) = chunter fvs' stmts
(_, fvs') = stmtRefs stmt fvs
chunter _ [] = ([], [])
chunter vars ((stmt,fvs) : rest)
| not (isEmptyNameSet vars)
= ((stmt,fvs) : chunk, rest')
where (chunk,rest') = chunter vars' rest
(pvars, evars) = stmtRefs stmt fvs
vars' = (vars `minusNameSet` pvars) `unionNameSet` evars
chunter _ rest = ([], rest)
stmtRefs stmt fvs
| isLetStmt stmt = (pvars, fvs' `minusNameSet` pvars)
| otherwise = (pvars, fvs')
where fvs' = fvs `intersectNameSet` allvars
pvars = mkNameSet (collectStmtBinders (unLoc stmt))
isLetStmt :: LStmt a b -> Bool
isLetStmt (L _ LetStmt{}) = True
isLetStmt _ = False
-- | Find a "good" place to insert a bind in an indivisible segment.
-- This is the only place where we use heuristics. The current
-- heuristic is to peel off the first group of independent statements
-- and put the bind after those.
splitSegment
:: [(ExprLStmt Name, FreeVars)]
-> ( [(ExprLStmt Name, FreeVars)]
, [(ExprLStmt Name, FreeVars)] )
splitSegment [one,two] = ([one],[two])
-- there is no choice when there are only two statements; this just saves
-- some work in a common case.
splitSegment stmts
| Just (lets,binds,rest) <- slurpIndependentStmts stmts
= if not (null lets)
then (lets, binds++rest)
else (lets++binds, rest)
| otherwise
= case stmts of
(x:xs) -> ([x],xs)
_other -> (stmts,[])
slurpIndependentStmts
:: [(LStmt Name (Located (body Name)), FreeVars)]
-> Maybe ( [(LStmt Name (Located (body Name)), FreeVars)] -- LetStmts
, [(LStmt Name (Located (body Name)), FreeVars)] -- BindStmts
, [(LStmt Name (Located (body Name)), FreeVars)] )
slurpIndependentStmts stmts = go [] [] emptyNameSet stmts
where
-- If we encounter a BindStmt that doesn't depend on a previous BindStmt
-- in this group, then add it to the group.
go lets indep bndrs ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go lets ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : indep)
bndrs' rest
where bndrs' = bndrs `unionNameSet` mkNameSet (collectPatBinders pat)
-- If we encounter a LetStmt that doesn't depend on a BindStmt in this
-- group, then move it to the beginning, so that it doesn't interfere with
-- grouping more BindStmts.
-- TODO: perhaps we shouldn't do this if there are any strict bindings,
-- because we might be moving evaluation earlier.
go lets indep bndrs ((L loc (LetStmt binds), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go ((L loc (LetStmt binds), fvs) : lets) indep bndrs rest
go _ [] _ _ = Nothing
go _ [_] _ _ = Nothing
go lets indep _ stmts = Just (reverse lets, reverse indep, stmts)
-- | Build an ApplicativeStmt, and strip the "return" from the tail
-- if necessary.
--
-- For example, if we start with
-- do x <- E1; y <- E2; return (f x y)
-- then we get
-- do (E1[x] | E2[y]); f x y
--
-- the LastStmt in this case has the return removed, but we set the
-- flag on the LastStmt to indicate this, so that we can print out the
-- original statement correctly in error messages. It is easier to do
-- it this way rather than try to ignore the return later in both the
-- typechecker and the desugarer (I tried it that way first!).
mkApplicativeStmt
:: HsStmtContext Name
-> [ApplicativeArg Name Name] -- ^ The args
-> Bool -- ^ True <=> need a join
-> [ExprLStmt Name] -- ^ The body statements
-> RnM ([ExprLStmt Name], FreeVars)
mkApplicativeStmt ctxt args need_join body_stmts
= do { (fmap_op, fvs1) <- lookupStmtName ctxt fmapName
; (ap_op, fvs2) <- lookupStmtName ctxt apAName
; (mb_join, fvs3) <-
if need_join then
do { (join_op, fvs) <- lookupStmtName ctxt joinMName
; return (Just join_op, fvs) }
else
return (Nothing, emptyNameSet)
; let applicative_stmt = noLoc $ ApplicativeStmt
(zip (fmap_op : repeat ap_op) args)
mb_join
placeHolderType
; return ( applicative_stmt : body_stmts
, fvs1 `plusFV` fvs2 `plusFV` fvs3) }
-- | Given the statements following an ApplicativeStmt, determine whether
-- we need a @join@ or not, and remove the @return@ if necessary.
needJoin :: MonadNames
-> [ExprLStmt Name]
-> (Bool, [ExprLStmt Name])
needJoin _monad_names [] = (False, []) -- we're in an ApplicativeArg
needJoin monad_names [L loc (LastStmt e _ t)]
| Just arg <- isReturnApp monad_names e =
(False, [L loc (LastStmt arg True t)])
needJoin _monad_names stmts = (True, stmts)
-- | @Just e@, if the expression is @return e@ or @return $ e@,
-- otherwise @Nothing@
isReturnApp :: MonadNames
-> LHsExpr Name
-> Maybe (LHsExpr Name)
isReturnApp monad_names (L _ (HsPar expr)) = isReturnApp monad_names expr
isReturnApp monad_names (L _ e) = case e of
OpApp l op _ r | is_return l, is_dollar op -> Just r
HsApp f arg | is_return f -> Just arg
_otherwise -> Nothing
where
is_var f (L _ (HsPar e)) = is_var f e
is_var f (L _ (HsAppType e _)) = is_var f e
is_var f (L _ (HsVar (L _ r))) = f r
-- TODO: I don't know how to get this right for rebindable syntax
is_var _ _ = False
is_return = is_var (\n -> n == return_name monad_names
|| n == pure_name monad_names)
is_dollar = is_var (`hasKey` dollarIdKey)
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
checkEmptyStmts :: HsStmtContext Name -> RnM ()
-- We've seen an empty sequence of Stmts... is that ok?
checkEmptyStmts ctxt
= unless (okEmpty ctxt) (addErr (emptyErr ctxt))
okEmpty :: HsStmtContext a -> Bool
okEmpty (PatGuard {}) = True
okEmpty _ = False
emptyErr :: HsStmtContext Name -> SDoc
emptyErr (ParStmtCtxt {}) = text "Empty statement group in parallel comprehension"
emptyErr (TransStmtCtxt {}) = text "Empty statement group preceding 'group' or 'then'"
emptyErr ctxt = text "Empty" <+> pprStmtContext ctxt
----------------------
checkLastStmt :: Outputable (body RdrName) => HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM (LStmt RdrName (Located (body RdrName)))
checkLastStmt ctxt lstmt@(L loc stmt)
= case ctxt of
ListComp -> check_comp
MonadComp -> check_comp
PArrComp -> check_comp
ArrowExpr -> check_do
DoExpr -> check_do
MDoExpr -> check_do
_ -> check_other
where
check_do -- Expect BodyStmt, and change it to LastStmt
= case stmt of
BodyStmt e _ _ _ -> return (L loc (mkLastStmt e))
LastStmt {} -> return lstmt -- "Deriving" clauses may generate a
-- LastStmt directly (unlike the parser)
_ -> do { addErr (hang last_error 2 (ppr stmt)); return lstmt }
last_error = (text "The last statement in" <+> pprAStmtContext ctxt
<+> text "must be an expression")
check_comp -- Expect LastStmt; this should be enforced by the parser!
= case stmt of
LastStmt {} -> return lstmt
_ -> pprPanic "checkLastStmt" (ppr lstmt)
check_other -- Behave just as if this wasn't the last stmt
= do { checkStmt ctxt lstmt; return lstmt }
-- Checking when a particular Stmt is ok
checkStmt :: HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM ()
checkStmt ctxt (L _ stmt)
= do { dflags <- getDynFlags
; case okStmt dflags ctxt stmt of
IsValid -> return ()
NotValid extra -> addErr (msg $$ extra) }
where
msg = sep [ text "Unexpected" <+> pprStmtCat stmt <+> ptext (sLit "statement")
, text "in" <+> pprAStmtContext ctxt ]
pprStmtCat :: Stmt a body -> SDoc
pprStmtCat (TransStmt {}) = text "transform"
pprStmtCat (LastStmt {}) = text "return expression"
pprStmtCat (BodyStmt {}) = text "body"
pprStmtCat (BindStmt {}) = text "binding"
pprStmtCat (LetStmt {}) = text "let"
pprStmtCat (RecStmt {}) = text "rec"
pprStmtCat (ParStmt {}) = text "parallel"
pprStmtCat (ApplicativeStmt {}) = panic "pprStmtCat: ApplicativeStmt"
------------
emptyInvalid :: Validity -- Payload is the empty document
emptyInvalid = NotValid Outputable.empty
okStmt, okDoStmt, okCompStmt, okParStmt, okPArrStmt
:: DynFlags -> HsStmtContext Name
-> Stmt RdrName (Located (body RdrName)) -> Validity
-- Return Nothing if OK, (Just extra) if not ok
-- The "extra" is an SDoc that is appended to an generic error message
okStmt dflags ctxt stmt
= case ctxt of
PatGuard {} -> okPatGuardStmt stmt
ParStmtCtxt ctxt -> okParStmt dflags ctxt stmt
DoExpr -> okDoStmt dflags ctxt stmt
MDoExpr -> okDoStmt dflags ctxt stmt
ArrowExpr -> okDoStmt dflags ctxt stmt
GhciStmtCtxt -> okDoStmt dflags ctxt stmt
ListComp -> okCompStmt dflags ctxt stmt
MonadComp -> okCompStmt dflags ctxt stmt
PArrComp -> okPArrStmt dflags ctxt stmt
TransStmtCtxt ctxt -> okStmt dflags ctxt stmt
-------------
okPatGuardStmt :: Stmt RdrName (Located (body RdrName)) -> Validity
okPatGuardStmt stmt
= case stmt of
BodyStmt {} -> IsValid
BindStmt {} -> IsValid
LetStmt {} -> IsValid
_ -> emptyInvalid
-------------
okParStmt dflags ctxt stmt
= case stmt of
LetStmt (L _ (HsIPBinds {})) -> emptyInvalid
_ -> okStmt dflags ctxt stmt
----------------
okDoStmt dflags ctxt stmt
= case stmt of
RecStmt {}
| LangExt.RecursiveDo `xopt` dflags -> IsValid
| ArrowExpr <- ctxt -> IsValid -- Arrows allows 'rec'
| otherwise -> NotValid (text "Use RecursiveDo")
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
_ -> emptyInvalid
----------------
okCompStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {}
| LangExt.TransformListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use TransformListComp")
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
----------------
okPArrStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {} -> emptyInvalid
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
---------
checkTupleSection :: [LHsTupArg RdrName] -> RnM ()
checkTupleSection args
= do { tuple_section <- xoptM LangExt.TupleSections
; checkErr (all tupArgPresent args || tuple_section) msg }
where
msg = text "Illegal tuple section: use TupleSections"
---------
sectionErr :: HsExpr RdrName -> SDoc
sectionErr expr
= hang (text "A section must be enclosed in parentheses")
2 (text "thus:" <+> (parens (ppr expr)))
patSynErr :: HsExpr RdrName -> SDoc -> RnM (HsExpr Name, FreeVars)
patSynErr e explanation = do { addErr (sep [text "Pattern syntax in expression context:",
nest 4 (ppr e)] $$
explanation)
; return (EWildPat, emptyFVs) }
badIpBinds :: Outputable a => SDoc -> a -> SDoc
badIpBinds what binds
= hang (text "Implicit-parameter bindings illegal in" <+> what)
2 (ppr binds)
| GaloisInc/halvm-ghc | compiler/rename/RnExpr.hs | bsd-3-clause | 80,871 | 107 | 22 | 23,340 | 20,164 | 10,706 | 9,458 | 1,183 | 15 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
-- | Operations on matrices (doubly-nested parallel vectors). All operations in
-- this module assume rectangular matrices.
module Feldspar.Matrix where
import qualified Prelude as P
import Feldspar.Prelude
import Feldspar.Core
import Feldspar.Vector.Internal
type Matrix a = Vector2 a
tMat :: Patch a a -> Patch (Matrix a) (Matrix a)
tMat = tVec2
-- | Converts a matrix to a core array.
freezeMatrix :: Type a => Matrix a -> Data [[a]]
freezeMatrix = freezeVector . map freezeVector
-- | Converts a core array to a matrix.
thawMatrix :: Type a => Data [[a]] -> Matrix a
thawMatrix = map thawVector . thawVector
-- | Converts a core array to a matrix. The first length argument is the number
-- of rows (outer vector), and the second argument is the number of columns
-- (inner vector).
thawMatrix' :: Type a => Length -> Length -> Data [[a]] -> Matrix a
thawMatrix' y x = map (thawVector' x) . thawVector' y
-- | Constructs a matrix. The elements are stored in a core array.
matrix :: Type a => [[a]] -> Matrix a
matrix = value
-- | Constructing a matrix from an index function.
--
-- @indexedMat m n ixf@:
--
-- * @m@ is the number of rows.
--
-- * @n@ is the number of columns.
--
-- * @ifx@ is a function mapping indexes to elements (first argument is row
-- index; second argument is column index).
indexedMat
:: Data Length
-> Data Length
-> (Data Index -> Data Index -> Data a)
-> Matrix a
indexedMat m n idx = indexed m $ \k -> indexed n $ \l -> idx k l
-- | Transpose of a matrix. Assumes that the number of rows is > 0.
transpose :: Type a => Matrix a -> Matrix a
transpose a = indexedMat (length $ head a) (length a) $ \y x -> a ! x ! y
-- TODO This assumes that (head a) can be used even if a is empty.
-- | Concatenates the rows of a matrix.
flatten :: Type a => Matrix a -> Vector (Data a)
flatten matr = Indexed (m*n) ixf Empty
where
m = length matr
n = (m==0) ? 0 $ length (head matr)
ixf i = matr ! y ! x
where
y = i `div` n
x = i `mod` n
-- | The diagonal vector of a square matrix. It happens to work if the number of
-- rows is less than the number of columns, but not the other way around (this
-- would require some overhead).
diagonal :: Type a => Matrix a -> Vector (Data a)
diagonal m = zipWith (!) m (0 ... (length m - 1))
distributeL :: (a -> b -> c) -> a -> Vector b -> Vector c
distributeL f = map . f
distributeR :: (a -> b -> c) -> Vector a -> b -> Vector c
distributeR = flip . distributeL . flip
class Mul a b
where
type Prod a b
-- | General multiplication operator
(***) :: a -> b -> Prod a b
instance Numeric a => Mul (Data a) (Data a)
where
type Prod (Data a) (Data a) = Data a
(***) = (*)
instance Numeric a => Mul (Data a) (Vector1 a)
where
type Prod (Data a) (Vector1 a) = Vector1 a
(***) = distributeL (***)
instance Numeric a => Mul (Vector1 a) (Data a)
where
type Prod (Vector1 a) (Data a) = Vector1 a
(***) = distributeR (***)
instance Numeric a => Mul (Data a) (Matrix a)
where
type Prod (Data a) (Matrix a) = Matrix a
(***) = distributeL (***)
instance Numeric a => Mul (Matrix a) (Data a)
where
type Prod (Matrix a) (Data a) = Matrix a
(***) = distributeR (***)
instance Numeric a => Mul (Vector1 a) (Vector1 a)
where
type Prod (Vector1 a) (Vector1 a) = Data a
(***) = scalarProd
instance Numeric a => Mul (Vector1 a) (Matrix a)
where
type Prod (Vector1 a) (Matrix a) = (Vector1 a)
vec *** mat = distributeL (***) vec (transpose mat)
instance Numeric a => Mul (Matrix a) (Vector1 a)
where
type Prod (Matrix a) (Vector1 a) = (Vector1 a)
(***) = distributeR (***)
instance Numeric a => Mul (Matrix a) (Matrix a)
where
type Prod (Matrix a) (Matrix a) = (Matrix a)
(***) = distributeR (***)
-- | Matrix multiplication
mulMat :: Numeric a => Matrix a -> Matrix a -> Matrix a
mulMat = (***)
class Syntax a => ElemWise a
where
type Scalar a
-- | Operator for general element-wise multiplication
elemWise :: (Scalar a -> Scalar a -> Scalar a) -> a -> a -> a
instance Type a => ElemWise (Data a)
where
type Scalar (Data a) = Data a
elemWise = id
instance (ElemWise a, Syntax (Vector a)) => ElemWise (Vector a)
where
type Scalar (Vector a) = Scalar a
elemWise = zipWith . elemWise
(.+) :: (ElemWise a, Num (Scalar a)) => a -> a -> a
(.+) = elemWise (+)
(.-) :: (ElemWise a, Num (Scalar a)) => a -> a -> a
(.-) = elemWise (-)
(.*) :: (ElemWise a, Num (Scalar a)) => a -> a -> a
(.*) = elemWise (*)
| rCEx/feldspar-lang-small | src/Feldspar/Matrix.hs | bsd-3-clause | 6,432 | 0 | 11 | 1,446 | 1,745 | 936 | 809 | 91 | 1 |
{-# LANGUAGE
FlexibleInstances
, GeneralizedNewtypeDeriving
, MultiParamTypeClasses
, StandaloneDeriving
, TypeOperators
#-}
module Control.Arrow.List where
import Control.Arrow
import Control.Arrow.Kleisli.Class
import Control.Arrow.List.Class
import Control.Arrow.ListLike.Class
import Control.Category
import Control.Monad.Identity
import Control.Monad.List
import Prelude hiding (id, (.))
-- * ListT arrow.
newtype ListTArrow m a b = ListTArrow { runListTArrow' :: Kleisli (ListT m) a b }
deriving
( Category
, Arrow
, ArrowZero
, ArrowPlus
, ArrowApply
, ArrowChoice
)
instance Monad m => ArrowKleisli m (ListTArrow m) where
arrM a = ListTArrow (Kleisli (ListT . (liftM return . a)))
runListTArrow :: ListTArrow m a b -> a -> m [b]
runListTArrow a = runListT . runKleisli (runListTArrow' a)
-- * List arrow.
type ListArrow a b = ListTArrow Identity a b
runListArrow :: ListArrow a b -> a -> [b]
runListArrow a = runIdentity . runListTArrow a
instance Monad m => ArrowList (ListTArrow m) where
arrL a = ListTArrow (Kleisli (ListT . return . a))
mapL f g = arrML (liftM f . runListTArrow g)
instance Monad m => ArrowListLike [] (ListTArrow m) where
embed = ListTArrow (Kleisli (ListT . return))
observe f = ListTArrow . Kleisli $ \a -> ListT $
return `liftM` runListT (runKleisli (runListTArrow' f) a)
-- * Embed a monadic function returning lists.
arrML :: (ArrowList arr, ArrowKleisli m arr) => (a -> m [b]) -> a `arr` b
arrML x = unlist . arrM x
| silkapp/arrow-list | src/Control/Arrow/List.hs | bsd-3-clause | 1,540 | 1 | 13 | 317 | 507 | 273 | 234 | 39 | 1 |
{-# LANGUAGE FlexibleInstances, FlexibleContexts, UndecidableInstances #-}
module QCUtils where
import Test.QuickCheck
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import Data.Iteratee
import Data.Iteratee.Iteratee
import qualified Data.Iteratee as I
import qualified Data.ListLike as LL
import Data.Functor.Identity
import Control.Applicative
import Control.Exception
-- Show instance
instance (Show a, LL.ListLike s el) => Show (Iteratee s Identity a) where
show = (++) "<<Iteratee>> " . show . runIdentity . run
-- Arbitrary instances
instance Arbitrary c => Arbitrary (Stream c) where
arbitrary = do
err <- arbitrary
xs <- arbitrary
elements [EOF err, Chunk xs]
tE :: Exception e => e -> SomeException
tE = toException
instance Arbitrary SomeException where
arbitrary = do
str <- arbitrary
off <- fromInteger <$> (arbitrary :: Gen Integer)
elements [tE DivergentException, tE (SeekException off),
tE EofException, iterStrExc str]
instance (Num a, Ord a, Arbitrary a, Monad m) => Arbitrary (Iteratee [a] m [a]) where
arbitrary = do
n <- suchThat arbitrary (>0)
ns <- arbitrary
elements [
I.drop n >> stream2list
,I.drop n >> return ns
,I.break (< 5)
,I.heads ns >> stream2list
,I.peek >> stream2list
,I.peek >> return ns
,I.identity >> return []
,I.identity >> return ns
]
| iteloo/tsuru-sample | iteratee-0.8.9.6/tests/QCUtils.hs | bsd-3-clause | 1,477 | 0 | 12 | 373 | 464 | 245 | 219 | 40 | 1 |
{-# LANGUAGE TupleSections #-}
module One (one) where
import System.IO
import Control.Monad
import Control.Applicative
import Data.List
import Data.List.Split
import Data.Maybe
import Debug.Trace
data Bearing = N | E | S | W deriving (Eq, Show, Enum)
right W = N
right a = succ a
left N = W
left a = pred a
one = do
input <- (splitOn ", " . filter (/= '\n')) <$> readFile "data/one.txt"
--let input = ["R8", "R4", "R4", "R8"]
let (_, (x1, y1)) = foldl' (\(b, xy) (f, m) -> (f b, step (f b) xy m)) (N, (0, 0)) (parseInsn <$> input)
result1 = abs x1 + abs y1
putStrLn $ "part A: " ++ show result1
let (x2, y2) = partB [] N (0, 0) (parseInsn <$> input)
result2 = abs x2 + abs y2
putStrLn $ "part B: " ++ show result2
parseInsn :: String -> ((Bearing -> Bearing), Integer)
parseInsn (d:n) = (if d == 'R' then right else left, read n)
step :: Bearing -> (Integer, Integer) -> Integer -> (Integer, Integer)
step N (x, y) m = (x, y + m)
step E (x, y) m = (x + m, y)
step S (x, y) m = (x, y - m)
step W (x, y) m = (x - m, y)
-- this is ugly af
-- and wrong if I could get two dupes in one path
-- but whatever
partB s pb (x1, y1) ((f, m):is)
| isJust d = fromJust d
| otherwise = partB ns b (x2, y2) is
where b = f pb
(x2, y2) = step b (x1, y1) m
xys = case b of
N -> tail $ (x2,) <$> [y1 .. y2]
S -> init $ (x2,) <$> [y2 .. y1]
E -> tail $ (,y2) <$> [x1 .. x2]
W -> init $ (,y2) <$> [x2 .. x1]
ns = s ++ xys
d = firstDupe ns
-- this is cute I should put this in util or smth
firstDupe :: (Eq a) => [a] -> Maybe a
firstDupe [] = Nothing
firstDupe (x:xs) = find (x ==) xs <|> firstDupe xs
| alicemaz/advent2016 | One.hs | bsd-3-clause | 1,766 | 0 | 16 | 540 | 836 | 459 | 377 | 44 | 4 |
module Main (main) where
import Test.Framework (Test, defaultMain,
testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import BinaryTree as BinTree
import BinomialHeap as Binom
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests = [
testGroup "Binary Tree" [
testProperty "is ordered" BinTree.prop_bintree_ordered
, testProperty "is balanced" BinTree.prop_bintree_balanced
-- , testProperty "has minimum height" BinTree.prop_bintree_minimumheight
],
testGroup "Binomial Heap" [
testProperty "is a minimum heap" Binom.prop_heap_is_minimum
, testProperty "has one tree per rank" Binom.prop_heap_one_tree_per_rank
, testProperty "has correct number of trees" Binom.prop_heap_max_trees
, testProperty "has correct heap size" Binom.prop_heap_size
, testProperty "has correct tree sizes" Binom.prop_heap_tree_sizes
, testProperty "accepts duplicates" Binom.prop_heap_accepts_dups
, testProperty "sorts properly" Binom.prop_heap_sorts
]
]
| peterson/lets-tree | tests/Main.hs | bsd-3-clause | 1,283 | 0 | 9 | 431 | 185 | 103 | 82 | 21 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is the command line front end to the Simple build system. When given
-- the parsed command-line args and package information, is able to perform
-- basic commands like configure, build, install, register, etc.
--
-- This module exports the main functions that Setup.hs scripts use. It
-- re-exports the 'UserHooks' type, the standard entry points like
-- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of
-- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own
-- behaviour.
--
-- This module isn't called \"Simple\" because it's simple. Far from
-- it. It's called \"Simple\" because it does complicated things to
-- simple software.
--
-- The original idea was that there could be different build systems that all
-- presented the same compatible command line interfaces. There is still a
-- "Distribution.Make" system but in practice no packages use it.
{-
Work around this warning:
libraries/Cabal/Distribution/Simple.hs:78:0:
Warning: In the use of `runTests'
(imported from Distribution.Simple.UserHooks):
Deprecated: "Please use the new testing interface instead!"
-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Distribution.Simple (
module Distribution.Package,
module Distribution.Version,
module Distribution.License,
module Distribution.Simple.Compiler,
module Language.Haskell.Extension,
-- * Simple interface
defaultMain, defaultMainNoRead, defaultMainArgs,
-- * Customization
UserHooks(..), Args,
defaultMainWithHooks, defaultMainWithHooksArgs,
-- ** Standard sets of hooks
simpleUserHooks,
autoconfUserHooks,
defaultUserHooks, emptyUserHooks,
-- ** Utils
defaultHookedPackageDesc
) where
-- local
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Simple.UserHooks
import Distribution.Package --must not specify imports, since we're exporting module.
import Distribution.PackageDescription
( PackageDescription(..), GenericPackageDescription, Executable(..)
, updatePackageDescription, hasLibs
, HookedBuildInfo, emptyHookedBuildInfo )
import Distribution.PackageDescription.Parse
( readPackageDescription, readHookedBuildInfo )
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.Simple.Program
( defaultProgramConfiguration, addKnownPrograms, builtinPrograms
, restoreProgramConfiguration, reconfigurePrograms )
import Distribution.Simple.PreProcess (knownSuffixHandlers, PPSuffixHandler)
import Distribution.Simple.Setup
import Distribution.Simple.Command
import Distribution.Simple.Build ( build, repl )
import Distribution.Simple.SrcDist ( sdist )
import Distribution.Simple.Register
( register, unregister )
import Distribution.Simple.Configure
( getPersistBuildConfig, maybeGetPersistBuildConfig
, writePersistBuildConfig, checkPersistBuildConfigOutdated
, configure, checkForeignDeps )
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) )
import Distribution.Simple.Bench (bench)
import Distribution.Simple.BuildPaths ( srcPref)
import Distribution.Simple.Test (test)
import Distribution.Simple.Install (install)
import Distribution.Simple.Haddock (haddock, hscolour)
import Distribution.Simple.Utils
(die, notice, info, warn, setupMessage, chattyTry,
defaultPackageDesc, defaultHookedPackageDesc,
rawSystemExitWithEnv, cabalVersion, topHandler )
import Distribution.System
( OS(..), buildOS )
import Distribution.Verbosity
import Language.Haskell.Extension
import Distribution.Version
import Distribution.License
import Distribution.Text
( display )
-- Base
import System.Environment(getArgs, getProgName)
import System.Directory(removeFile, doesFileExist,
doesDirectoryExist, removeDirectoryRecursive)
import System.Exit (exitWith,ExitCode(..))
import System.IO.Error (isDoesNotExistError)
import Control.Exception (throwIO)
import Distribution.Compat.Environment (getEnvironment)
import Distribution.Compat.Exception (catchIO)
import Control.Monad (when)
import Data.List (intercalate, unionBy, nub, (\\))
-- | A simple implementation of @main@ for a Cabal setup script.
-- It reads the package description file using IO, and performs the
-- action specified on the command line.
defaultMain :: IO ()
defaultMain = getArgs >>= defaultMainHelper simpleUserHooks
-- | A version of 'defaultMain' that is passed the command line
-- arguments, rather than getting them from the environment.
defaultMainArgs :: [String] -> IO ()
defaultMainArgs = defaultMainHelper simpleUserHooks
-- | A customizable version of 'defaultMain'.
defaultMainWithHooks :: UserHooks -> IO ()
defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks
-- | A customizable version of 'defaultMain' that also takes the command
-- line arguments.
defaultMainWithHooksArgs :: UserHooks -> [String] -> IO ()
defaultMainWithHooksArgs = defaultMainHelper
-- | Like 'defaultMain', but accepts the package description as input
-- rather than using IO to read it.
defaultMainNoRead :: GenericPackageDescription -> IO ()
defaultMainNoRead pkg_descr =
getArgs >>=
defaultMainHelper simpleUserHooks { readDesc = return (Just pkg_descr) }
defaultMainHelper :: UserHooks -> Args -> IO ()
defaultMainHelper hooks args = topHandler $
case commandsRun (globalCommand commands) commands args of
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo (flags, commandParse) ->
case commandParse of
_ | fromFlag (globalVersion flags) -> printVersion
| fromFlag (globalNumericVersion flags) -> printNumericVersion
CommandHelp help -> printHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo action -> action
where
printHelp help = getProgName >>= putStr . help
printOptionsList = putStr . unlines
printErrors errs = do
putStr (intercalate "\n" errs)
exitWith (ExitFailure 1)
printNumericVersion = putStrLn $ display cabalVersion
printVersion = putStrLn $ "Cabal library version "
++ display cabalVersion
progs = addKnownPrograms (hookedPrograms hooks) defaultProgramConfiguration
commands =
[configureCommand progs `commandAddAction` \fs as ->
configureAction hooks fs as >> return ()
,buildCommand progs `commandAddAction` buildAction hooks
,replCommand progs `commandAddAction` replAction hooks
,installCommand `commandAddAction` installAction hooks
,copyCommand `commandAddAction` copyAction hooks
,haddockCommand `commandAddAction` haddockAction hooks
,cleanCommand `commandAddAction` cleanAction hooks
,sdistCommand `commandAddAction` sdistAction hooks
,hscolourCommand `commandAddAction` hscolourAction hooks
,registerCommand `commandAddAction` registerAction hooks
,unregisterCommand `commandAddAction` unregisterAction hooks
,testCommand `commandAddAction` testAction hooks
,benchmarkCommand `commandAddAction` benchAction hooks
]
-- | Combine the preprocessors in the given hooks with the
-- preprocessors built into cabal.
allSuffixHandlers :: UserHooks
-> [PPSuffixHandler]
allSuffixHandlers hooks
= overridesPP (hookedPreProcessors hooks) knownSuffixHandlers
where
overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler]
overridesPP = unionBy (\x y -> fst x == fst y)
configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo
configureAction hooks flags args = do
let distPref = fromFlag $ configDistPref flags
pbi <- preConf hooks args flags
(mb_pd_file, pkg_descr0) <- confPkgDescr
-- get_pkg_descr (configVerbosity flags')
--let pkg_descr = updatePackageDescription pbi pkg_descr0
let epkg_descr = (pkg_descr0, pbi)
--(warns, ers) <- sanityCheckPackage pkg_descr
--errorOut (configVerbosity flags') warns ers
localbuildinfo0 <- confHook hooks epkg_descr flags
-- remember the .cabal filename if we know it
-- and all the extra command line args
let localbuildinfo = localbuildinfo0 {
pkgDescrFile = mb_pd_file,
extraConfigArgs = args
}
writePersistBuildConfig distPref localbuildinfo
let pkg_descr = localPkgDescr localbuildinfo
postConf hooks args flags pkg_descr localbuildinfo
return localbuildinfo
where
verbosity = fromFlag (configVerbosity flags)
confPkgDescr :: IO (Maybe FilePath, GenericPackageDescription)
confPkgDescr = do
mdescr <- readDesc hooks
case mdescr of
Just descr -> return (Nothing, descr)
Nothing -> do
pdfile <- defaultPackageDesc verbosity
descr <- readPackageDescription verbosity pdfile
return (Just pdfile, descr)
buildAction :: UserHooks -> BuildFlags -> Args -> IO ()
buildAction hooks flags args = do
let distPref = fromFlag $ buildDistPref flags
verbosity = fromFlag $ buildVerbosity flags
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(buildProgramPaths flags)
(buildProgramArgs flags)
(withPrograms lbi)
hookedAction preBuild buildHook postBuild
(return lbi { withPrograms = progs })
hooks flags { buildArgs = args } args
replAction :: UserHooks -> ReplFlags -> Args -> IO ()
replAction hooks flags args = do
let distPref = fromFlag $ replDistPref flags
verbosity = fromFlag $ replVerbosity flags
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(replProgramPaths flags)
(replProgramArgs flags)
(withPrograms lbi)
pbi <- preRepl hooks args flags
let lbi' = lbi { withPrograms = progs }
pkg_descr0 = localPkgDescr lbi'
pkg_descr = updatePackageDescription pbi pkg_descr0
replHook hooks pkg_descr lbi' hooks flags args
postRepl hooks args flags pkg_descr lbi'
hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO ()
hscolourAction hooks flags args
= do let distPref = fromFlag $ hscolourDistPref flags
verbosity = fromFlag $ hscolourVerbosity flags
hookedAction preHscolour hscolourHook postHscolour
(getBuildConfig hooks verbosity distPref)
hooks flags args
haddockAction :: UserHooks -> HaddockFlags -> Args -> IO ()
haddockAction hooks flags args = do
let distPref = fromFlag $ haddockDistPref flags
verbosity = fromFlag $ haddockVerbosity flags
lbi <- getBuildConfig hooks verbosity distPref
progs <- reconfigurePrograms verbosity
(haddockProgramPaths flags)
(haddockProgramArgs flags)
(withPrograms lbi)
hookedAction preHaddock haddockHook postHaddock
(return lbi { withPrograms = progs })
hooks flags args
cleanAction :: UserHooks -> CleanFlags -> Args -> IO ()
cleanAction hooks flags args = do
pbi <- preClean hooks args flags
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
-- We don't sanity check for clean as an error
-- here would prevent cleaning:
--sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
cleanHook hooks pkg_descr () hooks flags
postClean hooks args flags pkg_descr ()
where verbosity = fromFlag (cleanVerbosity flags)
copyAction :: UserHooks -> CopyFlags -> Args -> IO ()
copyAction hooks flags args
= do let distPref = fromFlag $ copyDistPref flags
verbosity = fromFlag $ copyVerbosity flags
hookedAction preCopy copyHook postCopy
(getBuildConfig hooks verbosity distPref)
hooks flags args
installAction :: UserHooks -> InstallFlags -> Args -> IO ()
installAction hooks flags args
= do let distPref = fromFlag $ installDistPref flags
verbosity = fromFlag $ installVerbosity flags
hookedAction preInst instHook postInst
(getBuildConfig hooks verbosity distPref)
hooks flags args
sdistAction :: UserHooks -> SDistFlags -> Args -> IO ()
sdistAction hooks flags args = do
let distPref = fromFlag $ sDistDistPref flags
pbi <- preSDist hooks args flags
mlbi <- maybeGetPersistBuildConfig distPref
pdfile <- defaultPackageDesc verbosity
ppd <- readPackageDescription verbosity pdfile
let pkg_descr0 = flattenPackageDescription ppd
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
sDistHook hooks pkg_descr mlbi hooks flags
postSDist hooks args flags pkg_descr mlbi
where verbosity = fromFlag (sDistVerbosity flags)
testAction :: UserHooks -> TestFlags -> Args -> IO ()
testAction hooks flags args = do
let distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
localBuildInfo <- getBuildConfig hooks verbosity distPref
let pkg_descr = localPkgDescr localBuildInfo
-- It is safe to do 'runTests' before the new test handler because the
-- default action is a no-op and if the package uses the old test interface
-- the new handler will find no tests.
runTests hooks args False pkg_descr localBuildInfo
hookedActionWithArgs preTest testHook postTest
(getBuildConfig hooks verbosity distPref)
hooks flags args
benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO ()
benchAction hooks flags args = do
let distPref = fromFlag $ benchmarkDistPref flags
verbosity = fromFlag $ benchmarkVerbosity flags
hookedActionWithArgs preBench benchHook postBench
(getBuildConfig hooks verbosity distPref)
hooks flags args
registerAction :: UserHooks -> RegisterFlags -> Args -> IO ()
registerAction hooks flags args
= do let distPref = fromFlag $ regDistPref flags
verbosity = fromFlag $ regVerbosity flags
hookedAction preReg regHook postReg
(getBuildConfig hooks verbosity distPref)
hooks flags args
unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO ()
unregisterAction hooks flags args
= do let distPref = fromFlag $ regDistPref flags
verbosity = fromFlag $ regVerbosity flags
hookedAction preUnreg unregHook postUnreg
(getBuildConfig hooks verbosity distPref)
hooks flags args
hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedAction pre_hook cmd_hook =
hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> cmd_hook h pd lbi uh flags)
hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo)
-> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> flags -> IO ())
-> (UserHooks -> Args -> flags -> PackageDescription
-> LocalBuildInfo -> IO ())
-> IO LocalBuildInfo
-> UserHooks -> flags -> Args -> IO ()
hookedActionWithArgs pre_hook cmd_hook post_hook get_build_config hooks flags args = do
pbi <- pre_hook hooks args flags
localbuildinfo <- get_build_config
let pkg_descr0 = localPkgDescr localbuildinfo
--pkg_descr0 <- get_pkg_descr (get_verbose flags)
sanityCheckHookedBuildInfo pkg_descr0 pbi
let pkg_descr = updatePackageDescription pbi pkg_descr0
-- TODO: should we write the modified package descr back to the
-- localbuildinfo?
cmd_hook hooks args pkg_descr localbuildinfo hooks flags
post_hook hooks args flags pkg_descr localbuildinfo
sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO ()
sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_)
= die $ "The buildinfo contains info for a library, "
++ "but the package does not have a library."
sanityCheckHookedBuildInfo pkg_descr (_, hookExes)
| not (null nonExistant)
= die $ "The buildinfo contains info for an executable called '"
++ head nonExistant ++ "' but the package does not have a "
++ "executable with that name."
where
pkgExeNames = nub (map exeName (executables pkg_descr))
hookExeNames = nub (map fst hookExes)
nonExistant = hookExeNames \\ pkgExeNames
sanityCheckHookedBuildInfo _ _ = return ()
getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo
getBuildConfig hooks verbosity distPref = do
lbi_wo_programs <- getPersistBuildConfig distPref
-- Restore info about unconfigured programs, since it is not serialized
let lbi = lbi_wo_programs {
withPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(withPrograms lbi_wo_programs)
}
case pkgDescrFile lbi of
Nothing -> return lbi
Just pkg_descr_file -> do
outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file
if outdated
then reconfigure pkg_descr_file lbi
else return lbi
where
reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo
reconfigure pkg_descr_file lbi = do
notice verbosity $ pkg_descr_file ++ " has been changed. "
++ "Re-configuring with most recently used options. "
++ "If this fails, please run configure manually.\n"
let cFlags = configFlags lbi
let cFlags' = cFlags {
-- Since the list of unconfigured programs is not serialized,
-- restore it to the same value as normally used at the beginning
-- of a configure run:
configPrograms = restoreProgramConfiguration
(builtinPrograms ++ hookedPrograms hooks)
(configPrograms cFlags),
-- Use the current, not saved verbosity level:
configVerbosity = Flag verbosity
}
configureAction hooks cFlags' (extraConfigArgs lbi)
-- --------------------------------------------------------------------------
-- Cleaning
clean :: PackageDescription -> CleanFlags -> IO ()
clean pkg_descr flags = do
let distPref = fromFlag $ cleanDistPref flags
notice verbosity "cleaning..."
maybeConfig <- if fromFlag (cleanSaveConf flags)
then maybeGetPersistBuildConfig distPref
else return Nothing
-- remove the whole dist/ directory rather than tracking exactly what files
-- we created in there.
chattyTry "removing dist/" $ do
exists <- doesDirectoryExist distPref
when exists (removeDirectoryRecursive distPref)
-- Any extra files the user wants to remove
mapM_ removeFileOrDirectory (extraTmpFiles pkg_descr)
-- If the user wanted to save the config, write it back
maybe (return ()) (writePersistBuildConfig distPref) maybeConfig
where
removeFileOrDirectory :: FilePath -> IO ()
removeFileOrDirectory fname = do
isDir <- doesDirectoryExist fname
isFile <- doesFileExist fname
if isDir then removeDirectoryRecursive fname
else when isFile $ removeFile fname
verbosity = fromFlag (cleanVerbosity flags)
-- --------------------------------------------------------------------------
-- Default hooks
-- | Hooks that correspond to a plain instantiation of the
-- \"simple\" build system
simpleUserHooks :: UserHooks
simpleUserHooks =
emptyUserHooks {
confHook = configure,
postConf = finalChecks,
buildHook = defaultBuildHook,
replHook = defaultReplHook,
copyHook = \desc lbi _ f -> install desc lbi f, -- has correct 'copy' behavior with params
testHook = defaultTestHook,
benchHook = defaultBenchHook,
instHook = defaultInstallHook,
sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h),
cleanHook = \p _ _ f -> clean p f,
hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f,
haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f,
regHook = defaultRegHook,
unregHook = \p l _ f -> unregister p l f
}
where
finalChecks _args flags pkg_descr lbi = return ()
-- TODO: Replace with a coursier check
-- checkForeignDeps pkg_descr lbi (lessVerbose verbosity)
where
verbosity = fromFlag (configVerbosity flags)
-- | Basic autoconf 'UserHooks':
--
-- * 'postConf' runs @.\/configure@, if present.
--
-- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst',
-- 'preReg' and 'preUnreg' read additional build information from
-- /package/@.buildinfo@, if present.
--
-- Thus @configure@ can use local system information to generate
-- /package/@.buildinfo@ and possibly other files.
{-# DEPRECATED defaultUserHooks
"Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-}
defaultUserHooks :: UserHooks
defaultUserHooks = autoconfUserHooks {
confHook = \pkg flags -> do
let verbosity = fromFlag (configVerbosity flags)
warn verbosity
"defaultUserHooks in Setup script is deprecated."
confHook autoconfUserHooks pkg flags,
postConf = oldCompatPostConf
}
-- This is the annoying old version that only runs configure if it exists.
-- It's here for compatibility with existing Setup.hs scripts. See:
-- https://github.com/haskell/cabal/issues/158
where oldCompatPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
when confExists $
runConfigureScript verbosity
backwardsCompatHack flags lbi
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = True
autoconfUserHooks :: UserHooks
autoconfUserHooks
= simpleUserHooks
{
postConf = defaultPostConf,
preBuild = \_ flags ->
-- not using 'readHook' here because 'build' takes
-- extra args
getHookedBuildInfo $ fromFlag $ buildVerbosity flags,
preClean = readHook cleanVerbosity,
preCopy = readHook copyVerbosity,
preInst = readHook installVerbosity,
preHscolour = readHook hscolourVerbosity,
preHaddock = readHook haddockVerbosity,
preReg = readHook regVerbosity,
preUnreg = readHook regVerbosity
}
where defaultPostConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO ()
defaultPostConf args flags pkg_descr lbi
= do let verbosity = fromFlag (configVerbosity flags)
noExtraFlags args
confExists <- doesFileExist "configure"
if confExists
then runConfigureScript verbosity
backwardsCompatHack flags lbi
else die "configure script not found."
pbi <- getHookedBuildInfo verbosity
sanityCheckHookedBuildInfo pkg_descr pbi
let pkg_descr' = updatePackageDescription pbi pkg_descr
postConf simpleUserHooks args flags pkg_descr' lbi
backwardsCompatHack = False
readHook :: (a -> Flag Verbosity) -> Args -> a -> IO HookedBuildInfo
readHook get_verbosity a flags = do
noExtraFlags a
getHookedBuildInfo verbosity
where
verbosity = fromFlag (get_verbosity flags)
runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo
-> IO ()
runConfigureScript verbosity backwardsCompatHack flags lbi = do
env <- getEnvironment
let programConfig = withPrograms lbi
(ccProg, ccFlags) <- configureCCompiler verbosity programConfig
-- The C compiler's compilation and linker flags (e.g.
-- "C compiler flags" and "Gcc Linker flags" from GHC) have already
-- been merged into ccFlags, so we set both CFLAGS and LDFLAGS
-- to ccFlags
-- We don't try and tell configure which ld to use, as we don't have
-- a way to pass its flags too
let env' = appendToEnvironment ("CFLAGS", unwords ccFlags)
env
args' = args ++ ["--with-gcc=" ++ ccProg]
handleNoWindowsSH $
rawSystemExitWithEnv verbosity "sh" args' env'
where
args = "./configure" : configureArgs backwardsCompatHack flags
appendToEnvironment (key, val) [] = [(key, val)]
appendToEnvironment (key, val) (kv@(k, v) : rest)
| key == k = (key, v ++ " " ++ val) : rest
| otherwise = kv : appendToEnvironment (key, val) rest
handleNoWindowsSH action
| buildOS /= Windows
= action
| otherwise
= action
`catchIO` \ioe -> if isDoesNotExistError ioe
then die notFoundMsg
else throwIO ioe
notFoundMsg = "The package has a './configure' script. This requires a "
++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin."
getHookedBuildInfo :: Verbosity -> IO HookedBuildInfo
getHookedBuildInfo verbosity = do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> do
info verbosity $ "Reading parameters from " ++ infoFile
readHookedBuildInfo verbosity infoFile
defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> TestFlags -> IO ()
defaultTestHook args pkg_descr localbuildinfo _ flags =
test args pkg_descr localbuildinfo flags
defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo
-> UserHooks -> BenchmarkFlags -> IO ()
defaultBenchHook args pkg_descr localbuildinfo _ flags =
bench args pkg_descr localbuildinfo flags
defaultInstallHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> InstallFlags -> IO ()
defaultInstallHook pkg_descr localbuildinfo _ flags = do
let copyFlags = defaultCopyFlags {
copyDistPref = installDistPref flags,
copyDest = toFlag NoCopyDest,
copyVerbosity = installVerbosity flags
}
install pkg_descr localbuildinfo copyFlags
let registerFlags = defaultRegisterFlags {
regDistPref = installDistPref flags,
regInPlace = installInPlace flags,
regPackageDB = installPackageDB flags,
regVerbosity = installVerbosity flags
}
when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags
defaultBuildHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> BuildFlags -> IO ()
defaultBuildHook pkg_descr localbuildinfo hooks flags =
build pkg_descr localbuildinfo flags (allSuffixHandlers hooks)
defaultReplHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> ReplFlags -> [String] -> IO ()
defaultReplHook pkg_descr localbuildinfo hooks flags args =
repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args
defaultRegHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> RegisterFlags -> IO ()
defaultRegHook pkg_descr localbuildinfo _ flags =
if hasLibs pkg_descr
then register pkg_descr localbuildinfo flags
else setupMessage verbosity
"Package contains no library to register:" (packageId pkg_descr)
where verbosity = fromFlag (regVerbosity flags)
| typelead/epm | Cabal/Distribution/Simple.hs | bsd-3-clause | 30,035 | 0 | 17 | 8,190 | 5,832 | 2,980 | 2,852 | 487 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Digestive.Heist.Extras.Debug
( dfShowView
, dfPathList
) where
import Data.Map.Syntax ((##))
import Data.Text (Text)
import qualified Data.Text as T
import Heist.Interpreted
import Text.Digestive.Types (fromPath)
import Text.Digestive.View (View, debugViewPaths)
import qualified Text.XmlHtml as X
----------------------------------------------------------------------
-- Shows the current View
dfShowView :: Monad m => View Text -> Splice m
dfShowView view = return [X.Element "pre" [] [X.TextNode $ T.pack $ show view]]
----------------------------------------------------------------------
-- Provides a list of possible paths that are visible from the current view
dfPathList :: Monad m => View Text -> Splice m
dfPathList view = mapSplices (\x -> runChildrenWith $ "path" ## textSplice $ fromPath x) $ debugViewPaths view
| cimmanon/digestive-functors-heist-extras | src/Text/Digestive/Heist/Extras/Debug.hs | bsd-3-clause | 883 | 4 | 11 | 117 | 223 | 126 | 97 | 15 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Reflex.Dom.Builder.Static where
import Blaze.ByteString.Builder.Html.Utf8
import Control.Lens hiding (element)
import Control.Monad.Exception
import Control.Monad.Identity
import Control.Monad.Ref
import Control.Monad.State.Strict
import Control.Monad.Trans.Control
import Data.ByteString (ByteString)
import Data.ByteString.Builder (toLazyByteString)
import qualified Data.ByteString.Lazy as BL
import Data.Default
import Data.Dependent.Sum (DSum (..))
import qualified Data.Map as Map
import Data.Monoid
import qualified Data.Set as Set
import Data.Text.Encoding
import GHC.Generics
import Reflex.Class
import Reflex.Dom.Builder.Class
import Reflex.Dom.Widget.Basic (applyMap)
import Reflex.Dynamic
import Reflex.Host.Class
import Reflex.PerformEvent.Base
import Reflex.PerformEvent.Class
import Reflex.PostBuild.Class
import Reflex.Spider
newtype StaticDomBuilderT t m a = StaticDomBuilderT
{ unStaticDomBuilderT :: StateT [Behavior t ByteString] m a -- Accumulated Html will be in revesed order
}
deriving (Functor, Applicative, Monad, MonadFix, MonadIO, MonadException, MonadAsyncException)
instance MonadTransControl (StaticDomBuilderT t) where
type StT (StaticDomBuilderT t) a = StT (StateT [Behavior t ByteString]) a
liftWith = defaultLiftWith StaticDomBuilderT unStaticDomBuilderT
restoreT = defaultRestoreT StaticDomBuilderT
instance MonadTrans (StaticDomBuilderT t) where
lift = StaticDomBuilderT . lift
runStaticDomBuilderT :: (Monad m, Reflex t) => StaticDomBuilderT t m a -> m (a, Behavior t ByteString)
runStaticDomBuilderT (StaticDomBuilderT a) = do
(result, a') <- runStateT a []
return (result, mconcat $ reverse a')
instance MonadReflexCreateTrigger t m => MonadReflexCreateTrigger t (StaticDomBuilderT t m) where
{-# INLINABLE newEventWithTrigger #-}
newEventWithTrigger = lift . newEventWithTrigger
{-# INLINABLE newFanEventWithTrigger #-}
newFanEventWithTrigger f = lift $ newFanEventWithTrigger f
instance PerformEvent t m => PerformEvent t (StaticDomBuilderT t m) where
type Performable (StaticDomBuilderT t m) = Performable m
{-# INLINABLE performEvent_ #-}
performEvent_ e = lift $ performEvent_ e
{-# INLINABLE performEvent #-}
performEvent e = lift $ performEvent e
instance MonadSample t m => MonadSample t (StaticDomBuilderT t m) where
{-# INLINABLE sample #-}
sample = lift . sample
instance MonadHold t m => MonadHold t (StaticDomBuilderT t m) where
{-# INLINABLE hold #-}
hold v0 v' = lift $ hold v0 v'
{-# INLINABLE holdDyn #-}
holdDyn v0 v' = lift $ holdDyn v0 v'
{-# INLINABLE holdIncremental #-}
holdIncremental v0 v' = lift $ holdIncremental v0 v'
instance (Reflex t, Monad m, MonadHold t (StateT [Behavior t ByteString] m)) => Deletable t (StaticDomBuilderT t m) where
{-# INLINABLE deletable #-}
deletable delete (StaticDomBuilderT a) = StaticDomBuilderT $ do
(result, a') <- lift $ runStateT a []
let html = mconcat $ reverse a'
b <- hold html (mempty <$ delete)
modify (join b:)
return result
instance (Monad m, Ref m ~ Ref IO, Reflex t) => TriggerEvent t (StaticDomBuilderT t m) where
{-# INLINABLE newTriggerEvent #-}
newTriggerEvent = return (never, const $ return ())
{-# INLINABLE newTriggerEventWithOnComplete #-}
newTriggerEventWithOnComplete = return (never, \_ _ -> return ())
{-# INLINABLE newEventWithLazyTriggerWithOnComplete #-}
newEventWithLazyTriggerWithOnComplete _ = return never
instance MonadRef m => MonadRef (StaticDomBuilderT t m) where
type Ref (StaticDomBuilderT t m) = Ref m
newRef = lift . newRef
readRef = lift . readRef
writeRef r = lift . writeRef r
instance MonadAtomicRef m => MonadAtomicRef (StaticDomBuilderT t m) where
atomicModifyRef r = lift . atomicModifyRef r
type SupportsStaticDomBuilder t m = (Reflex t, MonadIO m, MonadHold t m, MonadFix m, PerformEvent t m, Performable m ~ m, MonadReflexCreateTrigger t m, Deletable t m, MonadRef m, Ref m ~ Ref IO)
data StaticDomSpace
-- | Static documents never produce any events, so this type has no inhabitants
data StaticDomEvent (a :: k)
-- | Static documents don't process events, so all handlers are equivalent
data StaticDomHandler (a :: k) (b :: k) = StaticDomHandler
data StaticEventSpec (er :: EventTag -> *) = StaticEventSpec deriving (Generic)
instance Default (StaticEventSpec er)
instance DomSpace StaticDomSpace where
type EventSpec StaticDomSpace = StaticEventSpec
type RawTextNode StaticDomSpace = ()
type RawElement StaticDomSpace = ()
type RawFile StaticDomSpace = ()
type RawInputElement StaticDomSpace = ()
type RawTextAreaElement StaticDomSpace = ()
type RawSelectElement StaticDomSpace = ()
addEventSpecFlags _ _ _ _ = StaticEventSpec
instance SupportsStaticDomBuilder t m => DomBuilder t (StaticDomBuilderT t m) where
type DomBuilderSpace (StaticDomBuilderT t m) = StaticDomSpace
{-# INLINABLE textNode #-}
textNode (TextNodeConfig initialContents setContents) = StaticDomBuilderT $ do
--TODO: Do not escape quotation marks; see https://stackoverflow.com/questions/25612166/what-characters-must-be-escaped-in-html-5
let escape = BL.toStrict . toLazyByteString . fromHtmlEscapedText
modify . (:) <=< hold (escape initialContents) $ fmap escape setContents
return $ TextNode ()
{-# INLINABLE element #-}
element elementTag cfg child = do
-- https://www.w3.org/TR/html-markup/syntax.html#syntax-elements
let voidElements = Set.fromList ["area", "base", "br", "col", "command", "embed", "hr", "img", "input", "keygen", "link", "meta", "param", "source", "track", "wbr"]
let toAttr (AttributeName _mns k) v = encodeUtf8 k <> "=\"" <> BL.toStrict (toLazyByteString $ fromHtmlEscapedText v) <> "\""
es <- newFanEventWithTrigger $ \_ _ -> return (return ())
StaticDomBuilderT $ do
(result, innerHtml) <- lift $ runStaticDomBuilderT child
attrs0 <- foldDyn applyMap (cfg ^. initialAttributes) (cfg ^. modifyAttributes)
let attrs1 = ffor (current attrs0) $ mconcat . fmap (\(k, v) -> " " <> toAttr k v) . Map.toList
let tagBS = encodeUtf8 elementTag
if Set.member elementTag voidElements
then modify $ (:) $ mconcat [constant ("<" <> tagBS), attrs1, constant " />"]
else do
let open = mconcat [constant ("<" <> tagBS <> " "), attrs1, constant ">"]
let close = constant $ "</" <> tagBS <> ">"
modify $ (:) $ mconcat [open, innerHtml, close]
return (Element es (), result)
{-# INLINABLE placeholder #-}
placeholder (PlaceholderConfig toInsertAbove _delete) = StaticDomBuilderT $ do
result <- lift $ performEvent (fmap runStaticDomBuilderT toInsertAbove)
acc <- foldDyn (:) [] (fmap snd result)
modify $ (:) $ join $ mconcat . reverse <$> current acc
return $ Placeholder (fmap fst result) never
{-# INLINABLE inputElement #-}
inputElement cfg = do
(e, _result) <- element "input" (cfg ^. inputElementConfig_elementConfig) $ return ()
let v0 = constDyn $ cfg ^. inputElementConfig_initialValue
let c0 = constDyn $ cfg ^. inputElementConfig_initialChecked
let hasFocus = constDyn False -- TODO should this be coming from initialAtttributes
return $ InputElement
{ _inputElement_value = v0
, _inputElement_checked = c0
, _inputElement_checkedChange = never
, _inputElement_input = never
, _inputElement_hasFocus = hasFocus
, _inputElement_element = e
, _inputElement_raw = ()
, _inputElement_files = constDyn mempty
}
{-# INLINABLE textAreaElement #-}
textAreaElement cfg = do
--TODO: Support setValue event
(e, _domElement) <- element "textarea" (cfg ^. textAreaElementConfig_elementConfig) $ return ()
let v0 = constDyn $ cfg ^. textAreaElementConfig_initialValue
let hasFocus = constDyn False -- TODO should this be coming from initialAtttributes
return $ TextAreaElement
{ _textAreaElement_value = v0
, _textAreaElement_input = never
, _textAreaElement_hasFocus = hasFocus
, _textAreaElement_element = e
, _textAreaElement_raw = ()
}
selectElement cfg child = do
(e, result) <- element "select" (_selectElementConfig_elementConfig cfg) child
v <- holdDyn (cfg ^. selectElementConfig_initialValue) (cfg ^. selectElementConfig_setValue)
let wrapped = SelectElement
{ _selectElement_value = v
, _selectElement_change = never
, _selectElement_hasFocus = constDyn False --TODO: How do we make sure this is correct?
, _selectElement_element = e
, _selectElement_raw = ()
}
return (wrapped, result)
placeRawElement () = return ()
wrapRawElement () _ = return $ Element (EventSelector $ const never) ()
--TODO: Make this more abstract --TODO: Put the WithWebView underneath PerformEventT - I think this would perform better
type StaticWidget x = PostBuildT Spider (StaticDomBuilderT Spider (PerformEventT Spider (SpiderHost Global)))
renderStatic :: StaticWidget x a -> IO (a, ByteString)
renderStatic w = do
runSpiderHost $ do
(postBuild, postBuildTriggerRef) <- newEventWithTriggerRef
((res, bs), FireCommand fire) <- hostPerformEventT $ runStaticDomBuilderT (runPostBuildT w postBuild)
mPostBuildTrigger <- readRef postBuildTriggerRef
forM_ mPostBuildTrigger $ \postBuildTrigger -> fire [postBuildTrigger :=> Identity ()] $ return ()
bs' <- sample bs
return (res, bs')
| manyoo/reflex-dom | src/Reflex/Dom/Builder/Static.hs | bsd-3-clause | 9,971 | 7 | 24 | 1,800 | 2,683 | 1,414 | 1,269 | -1 | -1 |
{-# LANGUAGE MultiWayIf #-}
-- |
-- Module : Language.SequentCore.Lint
-- Description : Type checker for Sequent Core
-- Maintainer : maurerl@cs.uoregon.edu
-- Stability : experimental
--
-- Provides a sanity check for Sequent Core transformations in the tradition of
-- CoreLint.
module Language.SequentCore.Lint (
lintCoreBindings, lintTerm, assertLintProgram
) where
import Language.SequentCore.Pretty
import Language.SequentCore.Syntax
import Language.SequentCore.WiredIn
import Coercion ( coercionKind, coercionType )
import Id
import Kind
import Literal
import Outputable
import Pair
import Type
import Util ( debugIsOn )
import VarEnv
import Control.Monad
import Data.List
{-
Note [Scope of continuation variables]
--------------------------------------
Terms should always produce a unique result to their given continuation
(i.e. evaluation context). It would be unfortunate if evaluating a term caused
control flow to jump someplace else entirely instead of returning a
result. Maintaining a certain discipline on the scope of continuation variables
prevents unwanted, anamolous jumps of control flow outside of the return path of
a term. More specifically: a term cannot have any reference to a free
continuation variable.
In addition to checking types, we also ensure that terms do not depend on any
continuation variables in their scope. This scoping check is implemented in lint
by maintaining two separate environments: one for continuation variables only,
and another for all other variables. Continuations and commands are type checked
with respect to both environments. When it comes time for checking terms, the
environment of continuation variables is dropped entirely. Going the other way,
checking a compute term introduces a new continuation variable environment for
checking its underlying command, which contains only the continuation
environment introduced by the compute abstraction itself. This restriction means
that every command inside of a compute term *must* exit out of the continuation
it names.
-}
type LintM = Either SDoc
type TermEnv = TvSubst
type KontEnv = TvSubst
type LintEnv = (TermEnv, KontEnv, OutType)
type OutType = Type
type OutVar = Var
termEnv :: LintEnv -> TermEnv
termEnv (env, _enk, _retTy) = env
kontEnv :: LintEnv -> KontEnv
kontEnv (_env, enk, _retTy) = enk
retTy :: LintEnv -> OutType
retTy (_env, _enk, retTy) = retTy
mkLintEnv :: TermEnv -> KontEnv -> OutType -> LintEnv
mkLintEnv env enk ty = (env, enk, ty)
emptyTermEnv :: TermEnv
emptyTermEnv = emptyTvSubst
extendTermEnv :: TermEnv -> Var -> Term Var -> TermEnv
extendTermEnv ent bndr _term
= extendTvInScope ent bndr -- FIXME Should substitute in type!
extendTermEnvList :: TermEnv -> [BindPair Var] -> TermEnv
extendTermEnvList ent pairs
= foldr (\(BindTerm x rhs) ent -> extendTermEnv ent x rhs) ent pairs
extendLintEnv :: LintEnv -> BindPair Var -> LintEnv
extendLintEnv (ent, enk, retTy) (BindTerm bndr _term)
= (extendTvInScope ent bndr, enk, retTy)
extendLintEnv (ent, enk, retTy) (BindJoin bndr _pk)
= (ent, extendTvInScope enk bndr, retTy)
extendLintEnvList :: LintEnv -> [BindPair Var] -> LintEnv
extendLintEnvList = foldr (flip extendLintEnv)
mapTermLintEnv :: (TermEnv -> TermEnv) -> LintEnv -> LintEnv
mapTermLintEnv f env = mkLintEnv (f (termEnv env)) (kontEnv env) (retTy env)
eitherToMaybe :: Either a b -> Maybe a
eitherToMaybe (Left a) = Just a
eitherToMaybe (Right _) = Nothing
lintCoreBindings :: [SeqCoreBind] -> Maybe SDoc
lintCoreBindings binds = eitherToMaybe $ foldM lintCoreTopBind initEnv binds
where
-- All top-level bindings are considered visible (see CoreLint.lintCoreBindings)
initEnv = extendTermEnvList emptyTermEnv (flattenBinds binds)
assertLintProgram :: String -> [SeqCoreBind] -> SDoc -> [SeqCoreBind]
assertLintProgram msg binds extraDoc
| not debugIsOn = binds
| otherwise
= case lintCoreBindings binds of
Nothing -> binds
Just errs -> pprPanic msg (errs $$ pprTopLevelBinds binds $$ extraDoc)
lintTerm :: TvSubst -> SeqCoreTerm -> Maybe SDoc
lintTerm env term = eitherToMaybe $ lintCoreTerm env term
lintCoreTopBind :: TermEnv -> SeqCoreBind -> LintM TermEnv
lintCoreTopBind ent (NonRec (BindTerm bndr rhs))
= do
termTy <- lintCoreTerm ent rhs
bndr' <- lintBndr ent bndr
checkRhsType bndr' (idType bndr') termTy
-- Can't have top-level tyvars, so no need to substitute
return $ extendTermEnv ent bndr' rhs
lintCoreTopBind ent (Rec pairs)
| all bindsTerm pairs
= do
bndrs' <- mapM (lintBndr ent . binderOfPair) pairs
let pairs' = zipWith setPairBinder pairs bndrs'
ent' = extendTermEnvList ent pairs
forM_ pairs' $ \(BindTerm bndr rhs) -> do
termTy <- lintCoreTerm ent' rhs
checkRhsType bndr (idType bndr) termTy
-- Can't have top-level tyvars, so no need to substitute
return ent'
lintCoreTopBind _ bind
= Left $ text "Continuation binding at top level:" <+> ppr bind
lintCoreBind :: LintEnv -> SeqCoreBind -> LintM LintEnv
lintCoreBind env (NonRec pair)
= do
bndr' <- lintBndr (termEnv env) (binderOfPair pair)
let pair' = pair `setPairBinder` bndr'
lintCoreBindPair env pair'
return $ extendLintEnv env pair'
lintCoreBind env (Rec pairs)
= do
bndrs' <- mapM (lintBndr (termEnv env) . binderOfPair) pairs
let pairs' = zipWith setPairBinder pairs bndrs'
env' = extendLintEnvList env pairs'
mapM_ (lintCoreBindPair env') pairs'
return env'
lintBndr :: TermEnv -> SeqCoreBndr -> LintM OutVar
lintBndr ent bndr
= do
-- Could do more, but currently just checks for free tyvars
bndrTy <- lintType (text "binder:" <+> ppr bndr) ent (idType bndr)
return $ bndr `setIdType` bndrTy
{-
Note [Checking terms vs. continuations]
---------------------------------------
Checking a term can be done straightforwardly: As usual, we check that it has a
consistent type, and return that type if so. But in the face of polymorphism, we
can't do the same with continuations. Consider:
$ @ Int; $ 3; $ 4; ret p
What is this continuation's type? Supposing p has type Bool, the most general
type would be forall a. a -> a -> Bool, but it could also be forall a. Int -> a
-> Bool or forall a. Int -> a -> Bool or even conceivably forall a. Int -> Int
-> Bool. Fortunately, we always *expect* a continuation to have a particular
type: If it occurs in a command, it must have the same type as the term, and if
it's bound by a let, it must have the identifier's type.
Hence the asymmetry between lintCoreTerm and lintCoreKont, where the former
returns LintM Type and the latter takes an extra Type parameter but returns
LintM ().
-}
lintCoreBindPair :: LintEnv -> SeqCoreBindPair -> LintM ()
lintCoreBindPair env (BindTerm bndr term)
= do
termTy <- lintCoreTerm (termEnv env) term
checkRhsType bndr (idType bndr) termTy
lintCoreBindPair env (BindJoin bndr (Join xs comm))
= do
lintKontBndrTypes (termEnv env) bndr xs
let (ent, _) = mapAccumL lintBindInTermEnv (termEnv env) xs
lintCoreCommand (mkLintEnv ent (kontEnv env) (retTy env)) comm
lintKontBndrTypes :: TermEnv -> SeqCoreBndr -> [SeqCoreBndr] -> LintM ()
lintKontBndrTypes env bndr argBndrs
= do
bndrTy <- kontIdTyOrError env bndr
go bndrTy argBndrs
where
go ty bndrs
| isUbxExistsTy ty
= case bndrs of
[] -> Left $ text "not enough binders for existential:" <+> pprBndr LetBind bndr
$$ text "binders:" <+> sep (map (pprBndr LambdaBind) argBndrs)
bndr:bndrs' -> go (applyUbxExists ty (substTy env (mkTyVarTy bndr))) bndrs'
| isUnboxedTupleType ty
, Just (_, argTys) <- splitTyConApp_maybe ty
= goTup argTys bndrs
| otherwise
= complain
goTup [] [] = return ()
goTup [lastArgTy] bndrs | isUbxExistsTy lastArgTy
= go lastArgTy bndrs
goTup (argTy:argTys) (bndr:bndrs) | argTy `eqType` substTy env (idType bndr)
= goTup argTys bndrs
goTup _ _ = complain
complain
= Left $ text "wrong binder types for continuation binder:" <+> pprBndr LetBind bndr
$$ text "binders:" <+> sep (map (pprBndr LambdaBind) argBndrs)
lintCoreTerm :: TermEnv -> SeqCoreTerm -> LintM OutType
lintCoreTerm env (Var x)
| not (isLocalId x)
= return (idType x)
| Just x' <- lookupInScope (getTvInScope env) x
= if | not (substTy env (idType x) `eqType` idType x') ->
Left $ text "variable" <+> pprBndr LetBind x <+> text "bound as"
<+> pprBndr LetBind x'
| isDeadBinder x' ->
Left $ text "occurrence of dead id" <+> pprBndr LetBind x'
| otherwise -> return $ idType x'
| otherwise
= Left $ text "not found in context:" <+> pprBndr LetBind x
lintCoreTerm env (Lam x body)
= do
let (env', x') = lintBindInTermEnv env x
retTy <- lintCoreTerm env' body
return $ mkPiType x' retTy
lintCoreTerm env (Compute ty comm)
= do
let ty' = substTy env ty
lintCoreCommand (mkLintEnv env emptyTvSubst ty') comm
return ty'
lintCoreTerm _env (Lit lit)
= return $ literalType lit
lintCoreTerm env (Type ty)
= return $ typeKind (substTy env ty)
lintCoreTerm env (Coercion co)
= return $ substTy env (coercionType co)
lintBindInTermEnv :: TermEnv -> Var -> (TermEnv, Var)
lintBindInTermEnv env x
| isTyVar x
= substTyVarBndr env x
| otherwise
= (env', x')
where
x' = substTyInId env x
env' = extendTvInScope env x'
lintCoreCommand :: LintEnv -> SeqCoreCommand -> LintM ()
lintCoreCommand env (Let bind comm)
= do
env' <- lintCoreBind env bind
lintCoreCommand env' comm
lintCoreCommand env (Eval term frames end)
= lintCoreCut env term (frames, end)
lintCoreCommand env (Jump args j)
= lintCoreJump env args j
lintCoreCut :: LintEnv -> SeqCoreTerm -> SeqCoreKont -> LintM ()
lintCoreCut env term kont
= do
ty <- lintCoreTerm (termEnv env) term
lintCoreKont (text "in continuation of" <+> ppr term) env ty kont
lintCoreJump :: LintEnv -> [SeqCoreArg] -> JoinId -> LintM ()
lintCoreJump env args j
| Just j' <- lookupInScope (getTvInScope (kontEnv env)) j
= if | not (substTy (termEnv env) (idType j) `eqType` idType j') ->
Left $ text "join variable" <+> pprBndr LetBind j <+> text "bound as"
<+> pprBndr LetBind j'
| isDeadBinder j' ->
Left $ text "occurrence of dead id" <+> pprBndr LetBind j'
| otherwise -> do
ty <- kontIdTyOrError (kontEnv env) j
go ty args
| otherwise
= Left $ text "not found in context:" <+> pprBndr LetBind j
where
topArgs = args
go ty (Type argTy : args)
= case applyUbxExists_maybe ty (substTy (termEnv env) argTy) of
Just ty' -> go ty' args
Nothing -> mkError (text "type of polymorphic jump")
(text "existential type") (ppr ty)
go ty args
| isUnboxedTupleType ty
, Just (_, argTys) <- splitTyConApp_maybe ty
= goTup 1 argTys args
go _ _
= complain
goTup _ [] [] = return ()
goTup _ [ty] args@(Type _ : _)
| isUbxExistsTy ty
= go ty args
goTup n (argTy:argTys) (arg:args)
= do
void $ checkingType (speakNth n <+> text "argument:" <+> ppr arg $$
text "of jump to:" <+> pprBndr LetBind j)
(substTy (termEnv env) argTy) $
lintCoreTerm (termEnv env) arg
goTup (n+1) argTys args
goTup _ _ _
= complain
complain
= Left $ text "bad parameterized continuation type in binder:" <+> pprBndr LetBind j
$$ text "for args:" <+> ppr topArgs
lintCoreKont :: SDoc -> LintEnv -> OutType -> SeqCoreKont -> LintM ()
lintCoreKont desc env ty (frames, end)
= do
(env', ty') <- foldM (uncurry (lintCoreFrame desc)) (env, ty) frames
lintCoreEnd desc env' ty' end
lintCoreFrame :: SDoc -> LintEnv -> OutType -> SeqCoreFrame -> LintM (LintEnv, OutType)
lintCoreFrame desc env ty (App (Type tyArg))
| Just (tyVar, resTy) <- splitForAllTy_maybe ty
= do
let tyArg' = substTy (termEnv env) tyArg
if typeKind tyArg' `isSubKind` idType tyVar
then do
let env' = mapTermLintEnv (\ent -> extendTvSubst ent tyVar tyArg') env
resTy' = substTyWith [tyVar] [tyArg'] resTy
return (env', resTy')
else mkError (desc <> colon <+> text "type argument" <+> ppr tyArg)
(ppr (typeKind tyArg')) (ppr (idType tyVar))
| otherwise
= Left $ desc <> colon <+> text "not a forall type:" <+> ppr ty
lintCoreFrame desc env ty (App arg)
| Just (argTy, resTy) <- splitFunTy_maybe (substTy (termEnv env) ty)
= do
void $ checkingType (desc <> colon <+> ppr arg) argTy $ lintCoreTerm (termEnv env) arg
return (env, resTy)
| otherwise
= Left $ desc <> colon <+> text "not a function type:" <+> ppr ty
lintCoreFrame desc env ty (Cast co)
= do
let Pair fromTy toTy = coercionKind co
fromTy' = substTy (termEnv env) fromTy
toTy' = substTy (termEnv env) toTy
void $ checkingType (desc <> colon <+> text "incoming type of" <+> ppr co) ty $ return fromTy'
return (env, toTy')
lintCoreFrame _ env ty (Tick _)
= return (env, ty)
lintCoreEnd :: SDoc -> LintEnv -> OutType -> SeqCoreEnd -> LintM ()
lintCoreEnd desc env ty Return
= let expTy = retTy env
in unless (expTy `eqType` ty) $
mkError (desc <> colon <+> text "return type") (ppr expTy) (ppr ty)
lintCoreEnd desc env ty (Case bndr alts)
= do
let env' = mapTermLintEnv (\ent -> extendTvInScopeSubsted ent bndr) env
forM_ alts $ \(Alt _ bndrs rhs) ->
lintCoreCommand (mapTermLintEnv (\ent' -> extendTvInScopeListSubsted ent' bndrs) env') rhs
void $ checkingType (desc <> colon <+> text "type of case binder" <+> ppr bndr) ty $
return $ substTy (termEnv env) (idType bndr)
extendTvInScopeSubsted :: TvSubst -> Var -> TvSubst
extendTvInScopeSubsted tvs var
= extendTvInScope tvs (substTyInId tvs var)
substTyInId :: TvSubst -> Var -> Var
substTyInId tvs var = var `setIdType` substTy tvs (idType var)
extendTvInScopeListSubsted :: TvSubst -> [Var] -> TvSubst
extendTvInScopeListSubsted tvs vars
= foldr (flip extendTvInScopeSubsted) tvs vars
lintType :: SDoc -> TermEnv -> Type -> LintM OutType
lintType desc ent ty
| null unknown
= return ty'
| otherwise
= Left $ desc <> colon <+> text "type:" <+> ppr ty $$
text "Type variable" <> plural unknown <+> text "not found in scope:" <+>
pprWithCommas ppr unknown
where
ty' = substTy ent ty
known = getInScopeVars (getTvInScope ent)
unknown = varEnvElts $ tyVarsOfType ty' `minusVarEnv` known
mkError :: SDoc -> SDoc -> SDoc -> LintM a
mkError desc ex act = Left (desc $$ text "expected:" <+> ex
$$ text "actual:" <+> act)
checkRhsType :: Var -> OutType -> OutType -> LintM ()
checkRhsType bndr bndrTy rhsTy
= do unless (bndrTy `eqType` rhsTy) $
mkError (text "type of RHS of" <+> ppr bndr) (ppr bndrTy) (ppr rhsTy)
let bndrKi = typeKind bndrTy
unless (isSubOpenTypeKind bndrKi) $
mkError (text "kind of RHS of" <+> ppr bndr) (ppr openTypeKind) (ppr bndrKi)
checkingType :: SDoc -> OutType -> LintM OutType -> LintM OutType
checkingType desc ex go
= do
act <- go
unless (ex `eqType` act) $ mkError desc (ppr ex) (ppr act)
return act
kontIdTyOrError :: KontEnv -> JoinId -> LintM OutType
kontIdTyOrError env k
= case isKontTy_maybe (substTy env (idType k)) of
Just arg -> return arg
_ -> Left (text "bad cont type:" <+> pprBndr LetBind k)
| lukemaurer/sequent-core | src/Language/SequentCore/Lint.hs | bsd-3-clause | 15,850 | 9 | 18 | 3,757 | 4,623 | 2,262 | 2,361 | 315 | 9 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
module Stanag.VsmAuthorisationResponse where
import Ivory.Language
import Stanag.Packing
import Util.Logger
vsmAuthorisationResponseInstance :: MemArea (Stored Uint32)
vsmAuthorisationResponseInstance = area "vsmAuthorisationResponseInstance" (Just (ival 0))
[ivoryFile|Stanag/VsmAuthorisationResponse.ivory|]
| GaloisInc/loi | Stanag/VsmAuthorisationResponse.hs | bsd-3-clause | 470 | 0 | 9 | 46 | 69 | 41 | 28 | 12 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | A subsite which serves static content which is embedded at compile time.
--
-- At compile time, you supply a list of files, directories, processing functions (like javascript
-- minification), and even custom content generators. You can also specify the specific relative
-- locations within the static subsite where these resources should appear. The 'mkEmbeddedStatic'
-- function then computes the resources and embeds them directly into the executable at
-- compile time, so that the original files do not need to be distributed along with
-- the executable. The content is also compressed and hashed at compile time, so that
-- during runtime the compressed content can be sent directly on the wire with the appropriate
-- HTTP header. The precomputed hash is used for an ETag so the client does not redownload
-- the content multiple times. There is also a development mode which does not embed the
-- contents but recomputes it on every request. A simple example using an embedded static
-- subsite is
-- <https://github.com/yesodweb/yesod/blob/master/yesod-static/sample-embed.hs static-embed.hs>.
--
-- To add this to a scaffolded project, replace the code in @Settings/StaticFiles.hs@
-- with a call to 'mkEmbeddedStatic' with the list of all your generators, use the type
-- 'EmbeddedStatic' in your site datatype for @getStatic@, update the route for @/static@ to
-- use the type 'EmbeddedStatic', use 'embedStaticContent' for 'addStaticContent' in
-- @Foundation.hs@, use the routes generated by 'mkEmbeddedStatic' and exported by
-- @Settings/StaticFiles.hs@ to link to your static content, and finally update
-- @Application.hs@ use the variable binding created by 'mkEmbeddedStatic' which
-- contains the created 'EmbeddedStatic'.
--
-- It is recommended that you serve static resources from a separate domain to save time
-- on transmitting cookies. You can use 'urlParamRenderOverride' to do so, by redirecting
-- routes to this subsite to a different domain (but the same path) and then pointing the
-- alternative domain to this server. In addition, you might consider using a reverse
-- proxy like varnish or squid to cache the static content, but the embedded content in
-- this subsite is cached and served directly from memory so is already quite fast.
module Yesod.EmbeddedStatic (
-- * Subsite
EmbeddedStatic
, embeddedResourceR
, mkEmbeddedStatic
, embedStaticContent
-- * Generators
, module Yesod.EmbeddedStatic.Generators
) where
import Control.Applicative ((<$>))
import Data.IORef
import Data.Maybe (catMaybes)
import Language.Haskell.TH
import Network.HTTP.Types.Status (status404)
import Network.Wai (responseLBS, pathInfo)
import Network.Wai.Application.Static (staticApp)
import System.IO.Unsafe (unsafePerformIO)
import Yesod.Core
( HandlerT
, Yesod(..)
, YesodSubDispatch(..)
)
import Yesod.Core.Types
( YesodSubRunnerEnv(..)
, YesodRunnerEnv(..)
)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.HashMap.Strict as M
import qualified WaiAppStatic.Storage.Embedded as Static
import Yesod.EmbeddedStatic.Types
import Yesod.EmbeddedStatic.Internal
import Yesod.EmbeddedStatic.Generators
-- Haddock doesn't support associated types in instances yet so we can't
-- export EmbeddedResourceR directly.
-- | Construct a route to an embedded resource.
embeddedResourceR :: [T.Text] -> [(T.Text, T.Text)] -> Route EmbeddedStatic
embeddedResourceR = EmbeddedResourceR
instance Yesod master => YesodSubDispatch EmbeddedStatic (HandlerT master IO) where
yesodSubDispatch YesodSubRunnerEnv {..} req = resp
where
master = yreSite ysreParentEnv
site = ysreGetSub master
resp = case pathInfo req of
("res":_) -> stApp site req
("widget":_) -> staticApp (widgetSettings site) req
_ -> ($ responseLBS status404 [] "Not Found")
-- | Create the haskell variable for the link to the entry
mkRoute :: ComputedEntry -> Q [Dec]
mkRoute (ComputedEntry { cHaskellName = Nothing }) = return []
mkRoute (c@ComputedEntry { cHaskellName = Just name }) = do
routeType <- [t| Route EmbeddedStatic |]
link <- [| $(cLink c) |]
return [ SigD name routeType
, ValD (VarP name) (NormalB link) []
]
-- | Creates an 'EmbeddedStatic' by running, at compile time, a list of generators.
-- Each generator produces a list of entries to embed into the executable.
--
-- This template haskell splice creates a variable binding holding the resulting
-- 'EmbeddedStatic' and in addition creates variable bindings for all the routes
-- produced by the generators. For example, if a directory called static has
-- the following contents:
--
-- * js/jquery.js
--
-- * css/bootstrap.css
--
-- * img/logo.png
--
-- then a call to
--
-- > #ifdef DEVELOPMENT
-- > #define DEV_BOOL True
-- > #else
-- > #define DEV_BOOL False
-- > #endif
-- > mkEmbeddedStatic DEV_BOOL "myStatic" [embedDir "static"]
--
-- will produce variables
--
-- > myStatic :: EmbeddedStatic
-- > js_jquery_js :: Route EmbeddedStatic
-- > css_bootstrap_css :: Route EmbeddedStatic
-- > img_logo_png :: Route EmbeddedStatic
mkEmbeddedStatic :: Bool -- ^ development?
-> String -- ^ variable name for the created 'EmbeddedStatic'
-> [Generator] -- ^ the generators (see "Yesod.EmbeddedStatic.Generators")
-> Q [Dec]
mkEmbeddedStatic dev esName gen = do
entries <- concat <$> sequence gen
computed <- runIO $ mapM (if dev then devEmbed else prodEmbed) entries
let settings = Static.mkSettings $ return $ map cStEntry computed
devExtra = listE $ catMaybes $ map ebDevelExtraFiles entries
ioRef = [| unsafePerformIO $ newIORef M.empty |]
-- build the embedded static
esType <- [t| EmbeddedStatic |]
esCreate <- if dev
then [| EmbeddedStatic (develApp $settings $devExtra) $ioRef |]
else [| EmbeddedStatic (staticApp $! $settings) $ioRef |]
let es = [ SigD (mkName esName) esType
, ValD (VarP $ mkName esName) (NormalB esCreate) []
]
routes <- mapM mkRoute computed
return $ es ++ concat routes
-- | Use this for 'addStaticContent' to have the widget static content be served by
-- the embedded static subsite. For example,
--
-- > import Yesod
-- > import Yesod.EmbeddedStatic
-- > import Text.Jasmine (minifym)
-- >
-- > data MySite = { ..., getStatic :: EmbeddedStatic, ... }
-- >
-- > mkYesod "MySite" [parseRoutes|
-- > ...
-- > /static StaticR EmbeddedStatic getStatic
-- > ...
-- > |]
-- >
-- > instance Yesod MySite where
-- > ...
-- > addStaticContent = embedStaticContent getStatic StaticR mini
-- > where mini = if development then Right else minifym
-- > ...
embedStaticContent :: Yesod site
=> (site -> EmbeddedStatic) -- ^ How to retrieve the embedded static subsite from your site
-> (Route EmbeddedStatic -> Route site) -- ^ how to convert an embedded static route
-> (BL.ByteString -> Either a BL.ByteString) -- ^ javascript minifier
-> AddStaticContent site
embedStaticContent = staticContentHelper
| erikd/yesod | yesod-static/Yesod/EmbeddedStatic.hs | mit | 7,695 | 0 | 14 | 1,609 | 911 | 553 | 358 | 79 | 3 |
{-# LANGUAGE CPP #-}
{- |
Module : $Id$
Copyright : (c) Uni Bremen 2003-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable (imports Logic.Logic)
The Main module of the Heterogeneous Tool Set.
It provides the main function to call (and not much more).
-}
-- for interactice purposes use Test.hs
module Main where
import System.Environment (getArgs)
import Control.Monad
import Driver.Options
import Driver.AnaLib
import Driver.ReadFn (showFileType)
import Driver.WriteFn
import Static.DevGraph
#ifdef UNI_PACKAGE
import GUI.ShowGraph
import GUI.ShowLogicGraph
#endif
#ifdef PROGRAMATICA
import Haskell.Haskell2DG
#endif
import Common.LibName
import Interfaces.DataTypes
import CMDL.ProcessScript
import CMDL.Interface (cmdlRunShell)
import CMDL.DataTypes
import PGIP.XMLparsing
import PGIP.XMLstate (isRemote)
#ifdef SERVER
import PGIP.Server
#endif
import Maude.Maude2DG (anaMaudeFile)
import LF.Twelf2DG (anaTwelfFile)
import OMDoc.Import (anaOMDocFile)
#ifdef HEXPAT
import HolLight.HolLight2DG (anaHolLightFile)
#endif
#ifdef HAXML
import Isabelle.Isa2DG (anaIsaFile, anaThyFile)
#endif
main :: IO ()
main =
getArgs >>= hetcatsOpts >>= \ opts -> let imode = interactive opts in
#ifdef SERVER
if serve opts then hetsServer opts else
#endif
if isRemote opts || imode
then cmdlRun opts >>= displayGraph "" opts . getMaybeLib . intState
else do
putIfVerbose opts 3 $ "Options: " ++ show opts
case (infiles opts, outputLogicGraph opts) of
([], lg) -> case guiType opts of
UseGui ->
#ifdef UNI_PACKAGE
showPlainLG
#else
noUniPkg
#endif
NoGui | lg -> writeLG opts
_ -> hetsIOError "supply option -G or -g and/or file arguments"
(fs, False) -> mapM_ (processFile opts) fs
_ -> hetsIOError
"option -G is illegal together with file arguments (use -g)"
noUniPkg :: IO ()
noUniPkg = fail $ "No graph display interface; \n"
++ "UNI_PACKAGE option has been "
++ "disabled during compilation of Hets"
processFile :: HetcatsOpts -> FilePath -> IO ()
processFile opts file =
if fileType opts then showFileType opts file else do
putIfVerbose opts 3 ("Processing input: " ++ file)
let doExit = guiType opts == UseGui
res <- case guess file (intype opts) of
#ifdef PROGRAMATICA
HaskellIn -> putStr "this is HaskellIn" >> anaHaskellFile opts file
#endif
#ifdef HEXPAT
HolLightIn -> anaHolLightFile opts file
#endif
#ifdef HAXML
IsaIn -> anaIsaFile opts file
ThyIn -> anaThyFile opts file
#endif
PrfIn -> anaLibReadPrfs opts file
ProofCommand -> do
st <- cmdlProcessFile doExit opts file
liftM (getMaybeLib . intState)
$ (if interactive opts then cmdlRunShell else return) st
MaudeIn -> anaMaudeFile opts file
TwelfIn -> anaTwelfFile opts file
OmdocIn -> anaOMDocFile opts file
_ -> anaLib opts file
case res of
Just (ln, nEnv) ->
writeSpecFiles opts file nEnv ln $ lookupDGraph ln nEnv
_ -> return ()
if guess file (intype opts) /= ProofCommand && interactive opts
then cmdlRun opts >> return ()
else displayGraph file opts res
displayGraph :: FilePath -> HetcatsOpts -> Maybe (LibName, LibEnv) -> IO ()
displayGraph file opts res = case guiType opts of
NoGui -> return ()
UseGui ->
#ifdef UNI_PACKAGE
showGraph file opts res
#else
noUniPkg
#endif
| mariefarrell/Hets | hets.hs | gpl-2.0 | 3,602 | 0 | 21 | 860 | 869 | 449 | 420 | 66 | 10 |
module Specify.Constraint where
import Specify.Expression
import Autolib.TES.Identifier
import Autolib.ToDoc
import Autolib.Reader
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr hiding ( Operator )
import Data.Typeable
data System = System [ Constraint ]
deriving Typeable
instance ToDoc System where
toDoc ( System cs ) = braces $ vcat $ map toDoc cs
instance Reader System where
reader = my_braces $ do
cs <- many reader
return $ System cs
example :: System
example = read "{ forall x . f (x + 1) == 2 + f(x) ; }"
data Constraint = Constraint [ Identifier ] ( Expression Bool )
deriving Typeable
instance ToDoc Constraint where
toDoc ( Constraint [] body ) = toDoc body <+> semi
toDoc ( Constraint vs body ) =
vcat [ hsep [ text "forall", hsep $ map toDoc vs , text "." ]
, toDoc body , semi
]
instance Reader Constraint where
reader = do
vs <- option [] $ do
my_reserved "forall"
vs <- many ident
my_symbol "."
return vs
body <- reader
my_symbol ";"
return $ Constraint vs body
| florianpilz/autotool | src/Specify/Constraint.hs | gpl-2.0 | 1,117 | 5 | 16 | 277 | 344 | 175 | 169 | 35 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.DeleteBandwidthRateLimit
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation deletes the bandwidth rate limits of a gateway. You can delete
-- either the upload and download bandwidth rate limit, or you can delete both.
-- If you delete only one of the limits, the other limit remains unchanged. To
-- specify which gateway to work with, use the Amazon Resource Name (ARN) of the
-- gateway in your request.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_DeleteBandwidthRateLimit.html>
module Network.AWS.StorageGateway.DeleteBandwidthRateLimit
(
-- * Request
DeleteBandwidthRateLimit
-- ** Request constructor
, deleteBandwidthRateLimit
-- ** Request lenses
, dbrl1BandwidthType
, dbrl1GatewayARN
-- * Response
, DeleteBandwidthRateLimitResponse
-- ** Response constructor
, deleteBandwidthRateLimitResponse
-- ** Response lenses
, dbrlr1GatewayARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data DeleteBandwidthRateLimit = DeleteBandwidthRateLimit
{ _dbrl1BandwidthType :: Text
, _dbrl1GatewayARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DeleteBandwidthRateLimit' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbrl1BandwidthType' @::@ 'Text'
--
-- * 'dbrl1GatewayARN' @::@ 'Text'
--
deleteBandwidthRateLimit :: Text -- ^ 'dbrl1GatewayARN'
-> Text -- ^ 'dbrl1BandwidthType'
-> DeleteBandwidthRateLimit
deleteBandwidthRateLimit p1 p2 = DeleteBandwidthRateLimit
{ _dbrl1GatewayARN = p1
, _dbrl1BandwidthType = p2
}
dbrl1BandwidthType :: Lens' DeleteBandwidthRateLimit Text
dbrl1BandwidthType =
lens _dbrl1BandwidthType (\s a -> s { _dbrl1BandwidthType = a })
dbrl1GatewayARN :: Lens' DeleteBandwidthRateLimit Text
dbrl1GatewayARN = lens _dbrl1GatewayARN (\s a -> s { _dbrl1GatewayARN = a })
newtype DeleteBandwidthRateLimitResponse = DeleteBandwidthRateLimitResponse
{ _dbrlr1GatewayARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DeleteBandwidthRateLimitResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbrlr1GatewayARN' @::@ 'Maybe' 'Text'
--
deleteBandwidthRateLimitResponse :: DeleteBandwidthRateLimitResponse
deleteBandwidthRateLimitResponse = DeleteBandwidthRateLimitResponse
{ _dbrlr1GatewayARN = Nothing
}
dbrlr1GatewayARN :: Lens' DeleteBandwidthRateLimitResponse (Maybe Text)
dbrlr1GatewayARN = lens _dbrlr1GatewayARN (\s a -> s { _dbrlr1GatewayARN = a })
instance ToPath DeleteBandwidthRateLimit where
toPath = const "/"
instance ToQuery DeleteBandwidthRateLimit where
toQuery = const mempty
instance ToHeaders DeleteBandwidthRateLimit
instance ToJSON DeleteBandwidthRateLimit where
toJSON DeleteBandwidthRateLimit{..} = object
[ "GatewayARN" .= _dbrl1GatewayARN
, "BandwidthType" .= _dbrl1BandwidthType
]
instance AWSRequest DeleteBandwidthRateLimit where
type Sv DeleteBandwidthRateLimit = StorageGateway
type Rs DeleteBandwidthRateLimit = DeleteBandwidthRateLimitResponse
request = post "DeleteBandwidthRateLimit"
response = jsonResponse
instance FromJSON DeleteBandwidthRateLimitResponse where
parseJSON = withObject "DeleteBandwidthRateLimitResponse" $ \o -> DeleteBandwidthRateLimitResponse
<$> o .:? "GatewayARN"
| romanb/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/DeleteBandwidthRateLimit.hs | mpl-2.0 | 4,526 | 0 | 9 | 898 | 522 | 316 | 206 | 64 | 1 |
-- | Generic stream manipulations
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RankNTypes #-}
module System.IO.Streams.Combinators
( -- * Folds
inputFoldM
, outputFoldM
, fold
, foldM
, any
, all
, maximum
, minimum
-- * Unfolds
, unfoldM
-- * Maps
, map
, mapM
, mapM_
, mapMaybe
, contramap
, contramapM
, contramapM_
, contramapMaybe
-- * Filter
, filter
, filterM
, filterOutput
, filterOutputM
-- * Takes and drops
, give
, take
, drop
, ignore
-- * Zip and unzip
, zip
, zipWith
, zipWithM
, unzip
-- * Utility
, intersperse
, skipToEof
, ignoreEof
, atEndOfInput
, atEndOfOutput
) where
------------------------------------------------------------------------------
import Control.Concurrent.MVar (newMVar, withMVar)
import Control.Monad (liftM, void, when)
import Control.Monad.IO.Class (liftIO)
import Data.Int (Int64)
import Data.IORef (IORef, atomicModifyIORef, modifyIORef, newIORef, readIORef, writeIORef)
import Data.Maybe (isJust)
import Prelude hiding (all, any, drop, filter, map, mapM, mapM_, maximum, minimum, read, take, unzip, zip, zipWith)
------------------------------------------------------------------------------
import System.IO.Streams.Internal (InputStream (..), OutputStream (..), fromGenerator, makeInputStream, makeOutputStream, read, unRead, write, yield)
------------------------------------------------------------------------------
-- | A side-effecting fold over an 'OutputStream', as a stream transformer.
--
-- The IO action returned by 'outputFoldM' can be used to fetch and reset the updated
-- seed value. Example:
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3::Int]
-- ghci> (os, getList) <- Streams.'System.IO.Streams.List.listOutputStream'
-- ghci> (os', getSeed) \<- Streams.'outputFoldM' (\\x y -> return (x+y)) 0 os
-- ghci> Streams.'System.IO.Streams.connect' is os'
-- ghci> getList
-- [1,2,3]
-- ghci> getSeed
-- 6
-- @
outputFoldM :: (a -> b -> IO a) -- ^ fold function
-> a -- ^ initial seed
-> OutputStream b -- ^ output stream
-> IO (OutputStream b, IO a) -- ^ returns a new stream as well as
-- an IO action to fetch and reset the
-- updated seed value.
outputFoldM f initial stream = do
ref <- newIORef initial
os <- makeOutputStream (wr ref)
return (os, fetch ref)
where
wr _ Nothing = write Nothing stream
wr ref mb@(Just x) = do
!z <- readIORef ref
!z' <- f z x
writeIORef ref z'
write mb stream
fetch ref = atomicModifyIORef ref $ \x -> (initial, x)
------------------------------------------------------------------------------
-- | A side-effecting fold over an 'InputStream', as a stream transformer.
--
-- The IO action returned by 'inputFoldM' can be used to fetch and reset the updated seed
-- value. Example:
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3::Int]
-- ghci> (is', getSeed) \<- Streams.'inputFoldM' (\\x y -> return (x+y)) 0 is
-- ghci> Streams.'System.IO.Streams.List.toList' is'
-- [1,2,3]
-- ghci> getSeed
-- 6
-- @
inputFoldM :: (a -> b -> IO a) -- ^ fold function
-> a -- ^ initial seed
-> InputStream b -- ^ input stream
-> IO (InputStream b, IO a) -- ^ returns a new stream as well as an
-- IO action to fetch and reset the
-- updated seed value.
inputFoldM f initial stream = do
ref <- newIORef initial
is <- makeInputStream (rd ref)
return (is, fetch ref)
where
twiddle _ Nothing = return Nothing
twiddle ref mb@(Just x) = do
!z <- readIORef ref
!z' <- f z x
writeIORef ref z'
return mb
rd ref = read stream >>= twiddle ref
fetch ref = atomicModifyIORef ref $ \x -> (initial, x)
------------------------------------------------------------------------------
-- | A left fold over an input stream. The input stream is fully consumed. See
-- 'Prelude.foldl'.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [1..10] >>= Streams.'fold' (+) 0
-- 55
-- @
fold :: (s -> a -> s) -- ^ fold function
-> s -- ^ initial seed
-> InputStream a -- ^ input stream
-> IO s
fold f seed stream = go seed
where
go !s = read stream >>= maybe (return s) (go . f s)
------------------------------------------------------------------------------
-- | A side-effecting left fold over an input stream. The input stream is fully
-- consumed. See 'Prelude.foldl'.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [1..10] >>= Streams.'foldM' (\x y -> 'return' (x + y)) 0
-- 55
-- @
foldM :: (s -> a -> IO s) -- ^ fold function
-> s -- ^ initial seed
-> InputStream a -- ^ input stream
-> IO s
foldM f seed stream = go seed
where
go !s = read stream >>= maybe (return s) ((go =<<) . f s)
------------------------------------------------------------------------------
-- | @any predicate stream@ returns 'True' if any element in @stream@ matches
-- the predicate.
--
-- 'any' consumes as few elements as possible, ending consumption if an element
-- satisfies the predicate.
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3]
-- ghci> Streams.'System.IO.Streams.Combinators.any' (> 0) is -- Consumes one element
-- True
-- ghci> Streams.'System.IO.Streams.read' is
-- Just 2
-- ghci> Streams.'System.IO.Streams.Combinators.any' even is -- Only 3 remains
-- False
-- @
any :: (a -> Bool) -> InputStream a -> IO Bool
any predicate stream = go
where
go = do
mElem <- read stream
case mElem of
Nothing -> return False
Just e -> if predicate e then return True else go
------------------------------------------------------------------------------
-- | @all predicate stream@ returns 'True' if every element in @stream@ matches
-- the predicate.
--
-- 'all' consumes as few elements as possible, ending consumption if any element
-- fails the predicate.
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3]
-- ghci> Streams.'System.IO.Streams.Combinators.all' (< 0) is -- Consumes one element
-- False
-- ghci> Streams.'System.IO.Streams.read' is
-- Just 2
-- ghci> Streams.'System.IO.Streams.Combinators.all' odd is -- Only 3 remains
-- True
-- @
all :: (a -> Bool) -> InputStream a -> IO Bool
all predicate stream = go
where
go = do
mElem <- read stream
case mElem of
Nothing -> return True
Just e -> if predicate e then go else return False
------------------------------------------------------------------------------
-- | @maximum stream@ returns the greatest element in @stream@ or 'Nothing' if
-- the stream is empty.
--
-- 'maximum' consumes the entire stream.
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3]
-- ghci> Streams.'System.IO.Streams.Combinators.maximum' is
-- 3
-- ghci> Streams.'System.IO.Streams.read' is -- The stream is now empty
-- Nothing
-- @
maximum :: (Ord a) => InputStream a -> IO (Maybe a)
maximum stream = do
mElem0 <- read stream
case mElem0 of
Nothing -> return Nothing
Just e -> go e
where
go oldElem = do
mElem <- read stream
case mElem of
Nothing -> return (Just oldElem)
Just newElem -> go (max oldElem newElem)
------------------------------------------------------------------------------
-- | @minimum stream@ returns the greatest element in @stream@
--
-- 'minimum' consumes the entire stream.
--
-- @
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [1, 2, 3]
-- ghci> Streams.'System.IO.Streams.Combinators.minimum' is
-- 1
-- ghci> Streams.'System.IO.Streams.read' is -- The stream is now empty
-- Nothing
-- @
minimum :: (Ord a) => InputStream a -> IO (Maybe a)
minimum stream = do
mElem0 <- read stream
case mElem0 of
Nothing -> return Nothing
Just e -> go e
where
go oldElem = do
mElem <- read stream
case mElem of
Nothing -> return (Just oldElem)
Just newElem -> go (min oldElem newElem)
------------------------------------------------------------------------------
-- | @unfoldM f seed@ builds an 'InputStream' from successively applying @f@ to
-- the @seed@ value, continuing if @f@ produces 'Just' and halting on
-- 'Nothing'.
--
-- @
-- ghci> is \<- Streams.'System.IO.Streams.Combinators.unfoldM' (\n -> return $ if n < 3 then Just (n, n + 1) else Nothing) 0
-- ghci> Streams.'System.IO.Streams.List.toList' is
-- [0,1,2]
-- @
unfoldM :: (b -> IO (Maybe (a, b))) -> b -> IO (InputStream a)
unfoldM f seed = fromGenerator (go seed)
where
go oldSeed = do
m <- liftIO (f oldSeed)
case m of
Nothing -> return $! ()
Just (a, newSeed) -> do
yield a
go newSeed
------------------------------------------------------------------------------
-- | Maps a pure function over an 'InputStream'.
--
-- @map f s@ passes all output from @s@ through the function @f@.
--
-- Satisfies the following laws:
--
-- @
-- Streams.'map' (g . f) === Streams.'map' f >=> Streams.'map' g
-- Streams.'map' 'id' === Streams.'makeInputStream' . Streams.'read'
-- @
map :: (a -> b) -> InputStream a -> IO (InputStream b)
map f s = makeInputStream g
where
g = read s >>= return . fmap f
------------------------------------------------------------------------------
-- | Maps an impure function over an 'InputStream'.
--
-- @mapM f s@ passes all output from @s@ through the IO action @f@.
--
-- Satisfies the following laws:
--
-- @
-- Streams.'mapM' (f >=> g) === Streams.'mapM' f >=> Streams.'mapM' g
-- Streams.'mapM' 'return' === Streams.'makeInputStream' . Streams.'read'
-- @
--
mapM :: (a -> IO b) -> InputStream a -> IO (InputStream b)
mapM f s = makeInputStream g
where
g = do
mb <- read s >>= maybe (return Nothing)
(\x -> liftM Just $ f x)
return mb
------------------------------------------------------------------------------
-- | Maps a side effect over an 'InputStream'.
--
-- @mapM_ f s@ produces a new input stream that passes all output from @s@
-- through the side-effecting IO action @f@.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [1,2,3] >>=
-- Streams.'mapM_' ('putStrLn' . 'show' . (*2)) >>=
-- Streams.'System.IO.Streams.toList'
-- 2
-- 4
-- 6
-- [1,2,3]
-- @
--
mapM_ :: (a -> IO b) -> InputStream a -> IO (InputStream a)
mapM_ f s = makeInputStream $ do
mb <- read s
_ <- maybe (return $! ()) (void . f) mb
return mb
------------------------------------------------------------------------------
-- | A version of map that discards elements
--
-- @mapMaybe f s@ passes all output from @s@ through the function @f@ and
-- discards elements for which @f s@ evaluates to 'Nothing'.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [Just 1, None, Just 3] >>=
-- Streams.'mapMaybe' 'id' >>=
-- Streams.'System.IO.Streams.toList'
-- [1,3]
-- @
--
-- /Since: 1.2.1.0/
mapMaybe :: (a -> Maybe b) -> InputStream a -> IO (InputStream b)
mapMaybe f src = makeInputStream g
where
g = do
s <- read src
case s of
Nothing -> return Nothing
Just x ->
case f x of
Nothing -> g
y -> return y
------------------------------------------------------------------------------
-- | Contravariant counterpart to 'map'.
--
-- @contramap f s@ passes all input to @s@ through the function @f@.
--
-- Satisfies the following laws:
--
-- @
-- Streams.'contramap' (g . f) === Streams.'contramap' g >=> Streams.'contramap' f
-- Streams.'contramap' 'id' === 'return'
-- @
contramap :: (a -> b) -> OutputStream b -> IO (OutputStream a)
contramap f s = makeOutputStream $ flip write s . fmap f
------------------------------------------------------------------------------
-- | Contravariant counterpart to 'mapM'.
--
-- @contramapM f s@ passes all input to @s@ through the IO action @f@
--
-- Satisfies the following laws:
--
-- @
-- Streams.'contramapM' (f >=> g) = Streams.'contramapM' g >=> Streams.'contramapM' f
-- Streams.'contramapM' 'return' = 'return'
-- @
contramapM :: (a -> IO b) -> OutputStream b -> IO (OutputStream a)
contramapM f s = makeOutputStream g
where
g Nothing = write Nothing s
g (Just x) = do
!y <- f x
write (Just y) s
------------------------------------------------------------------------------
-- | Equivalent to 'mapM_' for output.
--
-- @contramapM f s@ passes all input to @s@ through the side-effecting IO
-- action @f@.
--
contramapM_ :: (a -> IO b) -> OutputStream a -> IO (OutputStream a)
contramapM_ f s = makeOutputStream $ \mb -> do
_ <- maybe (return $! ()) (void . f) mb
write mb s
------------------------------------------------------------------------------
-- | Contravariant counterpart to 'contramapMaybe'.
--
-- @contramap f s@ passes all input to @s@ through the function @f@.
-- Discards all the elements for which @f@ returns 'Nothing'.
--
-- /Since: 1.2.1.0/
--
contramapMaybe :: (a -> Maybe b) -> OutputStream b -> IO (OutputStream a)
contramapMaybe f s = makeOutputStream $ g
where
g Nothing = write Nothing s
g (Just a) =
case f a of
Nothing -> return ()
x -> write x s
------------------------------------------------------------------------------
-- | Drives an 'InputStream' to end-of-stream, discarding all of the yielded
-- values.
skipToEof :: InputStream a -> IO ()
skipToEof str = go
where
go = read str >>= maybe (return $! ()) (const go)
{-# INLINE skipToEof #-}
------------------------------------------------------------------------------
-- | Drops chunks from an input stream if they fail to match a given filter
-- predicate. See 'Prelude.filter'.
--
-- Items pushed back to the returned stream are propagated back upstream.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [\"the\", \"quick\", \"brown\", \"fox\"] >>=
-- Streams.'filterM' ('return' . (/= \"brown\")) >>= Streams.'System.IO.Streams.toList'
-- [\"the\",\"quick\",\"fox\"]
-- @
filterM :: (a -> IO Bool)
-> InputStream a
-> IO (InputStream a)
filterM p src = return $! InputStream prod pb
where
prod = read src >>= maybe eof chunk
chunk s = do
b <- p s
if b then return $! Just s
else prod
eof = return Nothing
pb s = unRead s src
------------------------------------------------------------------------------
-- | Drops chunks from an input stream if they fail to match a given filter
-- predicate. See 'Prelude.filter'.
--
-- Items pushed back to the returned stream are propagated back upstream.
--
-- Example:
--
-- @
-- ghci> Streams.'System.IO.Streams.fromList' [\"the\", \"quick\", \"brown\", \"fox\"] >>=
-- Streams.'filter' (/= \"brown\") >>= Streams.'System.IO.Streams.toList'
-- [\"the\",\"quick\",\"fox\"]
-- @
filter :: (a -> Bool)
-> InputStream a
-> IO (InputStream a)
filter p src = return $! InputStream prod pb
where
prod = read src >>= maybe eof chunk
chunk s = do
let b = p s
if b then return $! Just s
else prod
eof = return Nothing
pb s = unRead s src
------------------------------------------------------------------------------
-- | The function @intersperse v s@ wraps the 'OutputStream' @s@, creating a
-- new output stream that writes its input to @s@ interspersed with the
-- provided value @v@. See 'Data.List.intersperse'.
--
-- Example:
--
-- @
-- ghci> import Control.Monad ((>=>))
-- ghci> is <- Streams.'System.IO.Streams.List.fromList' [\"nom\", \"nom\", \"nom\"::'ByteString']
-- ghci> Streams.'System.IO.Streams.List.outputToList' (Streams.'intersperse' \"burp!\" >=> Streams.'System.IO.Streams.connect' is)
-- [\"nom\",\"burp!\",\"nom\",\"burp!\",\"nom\"]
-- @
intersperse :: a -> OutputStream a -> IO (OutputStream a)
intersperse sep os = newIORef False >>= makeOutputStream . f
where
f _ Nothing = write Nothing os
f sendRef s = do
b <- readIORef sendRef
writeIORef sendRef True
when b $ write (Just sep) os
write s os
------------------------------------------------------------------------------
-- | Combines two input streams. Continues yielding elements from both input
-- streams until one of them finishes.
zip :: InputStream a -> InputStream b -> IO (InputStream (a, b))
zip src1 src2 = makeInputStream src
where
src = read src1 >>= (maybe (return Nothing) $ \a ->
read src2 >>= (maybe (unRead a src1 >> return Nothing) $ \b ->
return $! Just $! (a, b)))
------------------------------------------------------------------------------
-- | Combines two input streams using the supplied function. Continues yielding
-- elements from both input streams until one of them finishes.
zipWith :: (a -> b -> c)
-> InputStream a
-> InputStream b
-> IO (InputStream c)
zipWith f src1 src2 = makeInputStream src
where
src = read src1 >>= (maybe (return Nothing) $ \a ->
read src2 >>= (maybe (unRead a src1 >> return Nothing) $ \b ->
return $! Just $! f a b ) )
------------------------------------------------------------------------------
-- | Combines two input streams using the supplied monadic function. Continues
-- yielding elements from both input streams until one of them finishes.
zipWithM :: (a -> b -> IO c)
-> InputStream a
-> InputStream b
-> IO (InputStream c)
zipWithM f src1 src2 = makeInputStream src
where
src = read src1 >>= (maybe (return Nothing) $ \a ->
read src2 >>= (maybe (unRead a src1 >> return Nothing) $ \b ->
f a b >>= \c -> return $! Just $! c ) )
------------------------------------------------------------------------------
-- | Filters output to be sent to the given 'OutputStream' using a pure
-- function. See 'filter'.
--
-- Example:
--
-- @
-- ghci> import qualified "Data.ByteString.Char8" as S
-- ghci> os1 \<- Streams.'System.IO.Streams.stdout' >>= Streams.'System.IO.Streams.unlines
-- ghci> os2 \<- os1 >>= Streams.'contramap' (S.pack . show) >>= Streams.'filterOutput' even
-- ghci> Streams.'write' (Just 3) os2
-- ghci> Streams.'write' (Just 4) os2
-- 4
-- @
{- Note: The example is a lie, because unlines has weird behavior -}
filterOutput :: (a -> Bool) -> OutputStream a -> IO (OutputStream a)
filterOutput p output = makeOutputStream chunk
where
chunk Nothing = write Nothing output
chunk ch@(Just x) = when (p x) $ write ch output
------------------------------------------------------------------------------
-- | Filters output to be sent to the given 'OutputStream' using a predicate
-- function in IO. See 'filterM'.
--
-- Example:
--
-- @
-- ghci> let check a = putStrLn a ("Allow " ++ show a ++ "?") >> readLn :: IO Bool
-- ghci> import qualified Data.ByteString.Char8 as S
-- ghci> os1 <- Streams.'System.IO.Streams.unlines' Streams.'System.IO.Streams.stdout'
-- ghci> os2 \<- os1 >>= Streams.'contramap' (S.pack . show) >>= Streams.'filterOutputM' check
-- ghci> Streams.'System.IO.Streams.write' (Just 3) os2
-- Allow 3?
-- False\<Enter>
-- ghci> Streams.'System.IO.Streams.write' (Just 4) os2
-- Allow 4?
-- True\<Enter>
-- 4
-- @
filterOutputM :: (a -> IO Bool) -> OutputStream a -> IO (OutputStream a)
filterOutputM p output = makeOutputStream chunk
where
chunk Nothing = write Nothing output
chunk ch@(Just x) = do
b <- p x
if b then write ch output else return $! ()
------------------------------------------------------------------------------
-- | Takes apart a stream of pairs, producing a pair of input streams. Reading
-- from either of the produced streams will cause a pair of values to be pulled
-- from the original stream if necessary. Note that reading @n@ values from one
-- of the returned streams will cause @n@ values to be buffered at the other
-- stream.
--
-- Access to the original stream is thread safe, i.e. guarded by a lock.
unzip :: forall a b . InputStream (a, b) -> IO (InputStream a, InputStream b)
unzip os = do
lock <- newMVar $! ()
buf1 <- newIORef id
buf2 <- newIORef id
is1 <- makeInputStream $ src1 lock buf1 buf2
is2 <- makeInputStream $ src2 lock buf1 buf2
return (is1, is2)
where
twist (a,b) = (b,a)
src1 lock aBuf bBuf = withMVar lock $ const $ do
dl <- readIORef aBuf
case dl [] of
[] -> more os id bBuf
(x:xs) -> writeIORef aBuf (xs++) >> (return $! Just x)
src2 lock aBuf bBuf = withMVar lock $ const $ do
dl <- readIORef bBuf
case dl [] of
[] -> more os twist aBuf
(y:ys) -> writeIORef bBuf (ys++) >> (return $! Just y)
more :: forall a b x y .
InputStream (a,b)
-> ((a,b) -> (x,y))
-> IORef ([y] -> [y])
-> IO (Maybe x)
more origs proj buf = read origs >>=
maybe (return Nothing)
(\x -> do
let (a, b) = proj x
modifyIORef buf (. (b:))
return $! Just a)
------------------------------------------------------------------------------
-- | Wraps an 'InputStream', producing a new 'InputStream' that will produce at
-- most @n@ items, subsequently yielding end-of-stream forever.
--
-- Items pushed back to the returned 'InputStream' will be propagated upstream,
-- modifying the count of taken items accordingly.
--
-- Example:
--
-- @
-- ghci> is <- Streams.'fromList' [1..9::Int]
-- ghci> is' <- Streams.'take' 1 is
-- ghci> Streams.'read' is'
-- Just 1
-- ghci> Streams.'read' is'
-- Nothing
-- ghci> Streams.'System.IO.Streams.peek' is
-- Just 2
-- ghci> Streams.'unRead' 11 is'
-- ghci> Streams.'System.IO.Streams.peek' is
-- Just 11
-- ghci> Streams.'System.IO.Streams.peek' is'
-- Just 11
-- ghci> Streams.'read' is'
-- Just 11
-- ghci> Streams.'read' is'
-- Nothing
-- ghci> Streams.'read' is
-- Just 2
-- ghci> Streams.'toList' is
-- [3,4,5,6,7,8,9]
-- @
--
take :: Int64 -> InputStream a -> IO (InputStream a)
take k0 input = do
kref <- newIORef k0
return $! InputStream (prod kref) (pb kref)
where
prod kref = do
!k <- readIORef kref
if k <= 0
then return Nothing
else do
m <- read input
when (isJust m) $ modifyIORef kref $ \x -> x - 1
return m
pb kref !s = do
unRead s input
modifyIORef kref (+1)
------------------------------------------------------------------------------
-- | Wraps an 'InputStream', producing a new 'InputStream' that will drop the
-- first @n@ items produced by the wrapped stream. See 'Prelude.drop'.
--
-- Items pushed back to the returned 'InputStream' will be propagated upstream,
-- modifying the count of dropped items accordingly.
drop :: Int64 -> InputStream a -> IO (InputStream a)
drop k0 input = do
kref <- newIORef k0
return $! InputStream (prod kref) (pb kref)
where
prod kref = do
!k <- readIORef kref
if k <= 0
then getInput kref
else discard kref
getInput kref = do
read input >>= maybe (return Nothing) (\c -> do
modifyIORef kref (\x -> x - 1)
return $! Just c)
discard kref = getInput kref >>= maybe (return Nothing) (const $ prod kref)
pb kref s = do
unRead s input
modifyIORef kref (+1)
------------------------------------------------------------------------------
-- | Wraps an 'OutputStream', producing a new 'OutputStream' that will pass at
-- most @n@ items on to the wrapped stream, subsequently ignoring the rest of
-- the input.
--
give :: Int64 -> OutputStream a -> IO (OutputStream a)
give k output = newIORef k >>= makeOutputStream . chunk
where
chunk ref = maybe (return $! ()) $ \x -> do
!n <- readIORef ref
if n <= 0
then return $! ()
else do
writeIORef ref $! n - 1
write (Just x) output
------------------------------------------------------------------------------
-- | Wraps an 'OutputStream', producing a new 'OutputStream' that will ignore
-- the first @n@ items received, subsequently passing the rest of the input on
-- to the wrapped stream.
--
ignore :: Int64 -> OutputStream a -> IO (OutputStream a)
ignore k output = newIORef k >>= makeOutputStream . chunk
where
chunk ref = maybe (return $! ()) $ \x -> do
!n <- readIORef ref
if n > 0
then writeIORef ref $! n - 1
else write (Just x) output
------------------------------------------------------------------------------
-- | Wraps an 'OutputStream', ignoring any end-of-stream 'Nothing' values
-- written to the returned stream.
--
-- /Since: 1.0.1.0/
--
ignoreEof :: OutputStream a -> IO (OutputStream a)
ignoreEof s = return $ OutputStream f
where
f Nothing = return $! ()
f x = write x s
------------------------------------------------------------------------------
-- | Wraps an 'InputStream', running the specified action when the stream
-- yields end-of-file.
--
-- /Since: 1.0.2.0/
--
atEndOfInput :: IO b -> InputStream a -> IO (InputStream a)
atEndOfInput m is = return $! InputStream prod pb
where
prod = read is >>= maybe eof (return . Just)
eof = void m >> return Nothing
pb s = unRead s is
------------------------------------------------------------------------------
-- | Wraps an 'OutputStream', running the specified action when the stream
-- receives end-of-file.
--
-- /Since: 1.0.2.0/
--
atEndOfOutput :: IO b -> OutputStream a -> IO (OutputStream a)
atEndOfOutput m os = makeOutputStream f
where
f Nothing = write Nothing os >> void m
f x = write x os
| TomMD/io-streams | src/System/IO/Streams/Combinators.hs | bsd-3-clause | 26,753 | 0 | 18 | 6,528 | 5,143 | 2,716 | 2,427 | 334 | 3 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-}
{-
Copyright (C) 2006-2016 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Error
Copyright : Copyright (C) 2006-2016 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
This module provides a standard way to deal with possible errors encounted
during parsing.
-}
module Text.Pandoc.Error (PandocError(..), handleError) where
import Text.Parsec.Error
import Text.Parsec.Pos hiding (Line)
import Text.Pandoc.Compat.Except
import GHC.Generics (Generic)
import Data.Generics (Typeable)
import Control.Exception (Exception)
type Input = String
data PandocError = -- | Generic parse failure
ParseFailure String
-- | Error thrown by a Parsec parser
| ParsecError Input ParseError
deriving (Show, Typeable, Generic)
instance Exception PandocError
instance Error PandocError where
strMsg = ParseFailure
-- | An unsafe method to handle `PandocError`s.
handleError :: Either PandocError a -> a
handleError (Right r) = r
handleError (Left err) =
case err of
ParseFailure string -> error string
ParsecError input err' ->
let errPos = errorPos err'
errLine = sourceLine errPos
errColumn = sourceColumn errPos
theline = (lines input ++ [""]) !! (errLine - 1)
in error $ "\nError at " ++ show err' ++ "\n" ++
theline ++ "\n" ++ replicate (errColumn - 1) ' ' ++
"^"
| janschulz/pandoc | src/Text/Pandoc/Error.hs | gpl-2.0 | 2,295 | 0 | 16 | 534 | 308 | 169 | 139 | 29 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Tests.Readers.RST (tests) where
import Text.Pandoc.Definition
import Test.Framework
import Tests.Helpers
import Tests.Arbitrary()
import Text.Pandoc.Builder
import Text.Pandoc
import Text.Pandoc.Error
rst :: String -> Pandoc
rst = handleError . readRST def{ readerStandalone = True }
infix 4 =:
(=:) :: ToString c
=> String -> (String, c) -> Test
(=:) = test rst
tests :: [Test]
tests = [ "line block with blank line" =:
"| a\n|\n| b" =?> para (str "a") <>
para (str "\160b")
, testGroup "field list"
[ "general" =: unlines
[ "para"
, ""
, ":Hostname: media08"
, ":IP address: 10.0.0.19"
, ":Size: 3ru"
, ":Version: 1"
, ":Indentation: Since the field marker may be quite long, the second"
, " and subsequent lines of the field body do not have to line up"
, " with the first line, but they must be indented relative to the"
, " field name marker, and they must line up with each other."
, ":Parameter i: integer"
, ":Final: item"
, " on two lines" ]
=?> ( doc
$ para "para" <>
definitionList [ (str "Hostname", [para "media08"])
, (text "IP address", [para "10.0.0.19"])
, (str "Size", [para "3ru"])
, (str "Version", [para "1"])
, (str "Indentation", [para "Since the field marker may be quite long, the second\nand subsequent lines of the field body do not have to line up\nwith the first line, but they must be indented relative to the\nfield name marker, and they must line up with each other."])
, (text "Parameter i", [para "integer"])
, (str "Final", [para "item\non two lines"])
])
, "metadata" =: unlines
[ "====="
, "Title"
, "====="
, "--------"
, "Subtitle"
, "--------"
, ""
, ":Version: 1"
]
=?> ( setMeta "version" (para "1")
$ setMeta "title" ("Title" :: Inlines)
$ setMeta "subtitle" ("Subtitle" :: Inlines)
$ doc mempty )
, "with inline markup" =: unlines
[ ":*Date*: today"
, ""
, ".."
, ""
, ":*one*: emphasis"
, ":two_: reference"
, ":`three`_: another one"
, ":``four``: literal"
, ""
, ".. _two: http://example.com"
, ".. _three: http://example.org"
]
=?> ( setMeta "date" (str "today")
$ doc
$ definitionList [ (emph "one", [para "emphasis"])
, (link "http://example.com" "" "two", [para "reference"])
, (link "http://example.org" "" "three", [para "another one"])
, (code "four", [para "literal"])
])
]
, "URLs with following punctuation" =:
("http://google.com, http://yahoo.com; http://foo.bar.baz.\n" ++
"http://foo.bar/baz_(bam) (http://foo.bar)") =?>
para (link "http://google.com" "" "http://google.com" <> ", " <>
link "http://yahoo.com" "" "http://yahoo.com" <> "; " <>
link "http://foo.bar.baz" "" "http://foo.bar.baz" <> ". " <>
softbreak <>
link "http://foo.bar/baz_(bam)" "" "http://foo.bar/baz_(bam)"
<> " (" <> link "http://foo.bar" "" "http://foo.bar" <> ")")
, "Reference names with special characters" =:
("A-1-B_2_C:3:D+4+E.5.F_\n\n" ++
".. _A-1-B_2_C:3:D+4+E.5.F: https://example.com\n") =?>
para (link "https://example.com" "" "A-1-B_2_C:3:D+4+E.5.F")
, "Code directive with class and number-lines" =: unlines
[ ".. code::python"
, " :number-lines: 34"
, " :class: class1 class2 class3"
, ""
, " def func(x):"
, " return y"
] =?>
( doc $ codeBlockWith
( ""
, ["sourceCode", "python", "numberLines", "class1", "class2", "class3"]
, [ ("startFrom", "34") ]
)
"def func(x):\n return y"
)
, "Code directive with number-lines, no line specified" =: unlines
[ ".. code::python"
, " :number-lines: "
, ""
, " def func(x):"
, " return y"
] =?>
( doc $ codeBlockWith
( ""
, ["sourceCode", "python", "numberLines"]
, [ ("startFrom", "") ]
)
"def func(x):\n return y"
)
, testGroup "literal / line / code blocks"
[ "indented literal block" =: unlines
[ "::"
, ""
, " block quotes"
, ""
, " can go on for many lines"
, "but must stop here"]
=?> (doc $
codeBlock "block quotes\n\ncan go on for many lines" <>
para "but must stop here")
, "line block with 3 lines" =: "| a\n| b\n| c"
=?> para ("a" <> linebreak <> "b" <> linebreak <> "c")
, "quoted literal block using >" =: "::\n\n> quoted\n> block\n\nOrdinary paragraph"
=?> codeBlock "> quoted\n> block" <> para "Ordinary paragraph"
, "quoted literal block using | (not a line block)" =: "::\n\n| quoted\n| block\n\nOrdinary paragraph"
=?> codeBlock "| quoted\n| block" <> para "Ordinary paragraph"
, "class directive with single paragraph" =: ".. class:: special\n\nThis is a \"special\" paragraph."
=?> divWith ("", ["special"], []) (para "This is a \"special\" paragraph.")
, "class directive with two paragraphs" =: ".. class:: exceptional remarkable\n\n First paragraph.\n\n Second paragraph."
=?> divWith ("", ["exceptional", "remarkable"], []) (para "First paragraph." <> para "Second paragraph.")
, "class directive around literal block" =: ".. class:: classy\n\n::\n\n a\n b"
=?> divWith ("", ["classy"], []) (codeBlock "a\nb")]
, testGroup "interpreted text roles"
[ "literal role prefix" =: ":literal:`a`" =?> para (code "a")
, "literal role postfix" =: "`a`:literal:" =?> para (code "a")
, "literal text" =: "``text``" =?> para (code "text")
, "code role" =: ":code:`a`" =?> para (codeWith ("", ["sourceCode"], []) "a")
, "inherited code role" =: ".. role:: codeLike(code)\n\n:codeLike:`a`"
=?> para (codeWith ("", ["codeLike", "sourceCode"], []) "a")
, "custom code role with language field"
=: ".. role:: lhs(code)\n :language: haskell\n\n:lhs:`a`"
=?> para (codeWith ("", ["lhs", "haskell","sourceCode"], []) "a")
, "custom role with unspecified parent role"
=: ".. role:: classy\n\n:classy:`text`"
=?> para (spanWith ("", ["classy"], []) "text")
, "role with recursive inheritance"
=: ".. role:: haskell(code)\n.. role:: lhs(haskell)\n\n:lhs:`text`"
=?> para (codeWith ("", ["lhs", "haskell", "sourceCode"], []) "text")
, "unknown role" =: ":unknown:`text`" =?> para (str "text")
]
]
| janschulz/pandoc | tests/Tests/Readers/RST.hs | gpl-2.0 | 7,925 | 0 | 19 | 3,062 | 1,428 | 790 | 638 | 152 | 1 |
{-# LANGUAGE
TemplateHaskell,
MultiParamTypeClasses,
FlexibleInstances,
FlexibleContexts,
UndecidableInstances,
TypeOperators,
ScopedTypeVariables,
TypeSynonymInstances #-}
module DataTypes.Transform where
import Data.Comp
import Data.Comp.Derive
import DataTypes.Standard as S
import DataTypes.Comp
class TransSugar f where
transSugarAlg :: Alg f PExpr
transSugar :: (Functor f, TransSugar f) => Term f -> PExpr
transSugar = cata transSugarAlg
$(derive [liftSum] [''TransSugar])
instance TransSugar Value where
transSugarAlg (VInt i) = PInt i
transSugarAlg (VBool b) = PBool b
transSugarAlg (VPair x y) = PPair x y
instance TransSugar Op where
transSugarAlg (Plus x y) = PPlus x y
transSugarAlg (Mult x y) = PMult x y
transSugarAlg (If b x y) = PIf b x y
transSugarAlg (Lt x y) = PLt x y
transSugarAlg (And x y) = PAnd x y
transSugarAlg (Not x) = PNot x
transSugarAlg (Proj p x) = PProj (ptrans p) x
where ptrans ProjLeft = SProjLeft
ptrans ProjRight = SProjRight
transSugarAlg (Eq x y) = PEq x y
instance TransSugar Sugar where
transSugarAlg (Neg x) = PNeg x
transSugarAlg (Minus x y) = PMinus x y
transSugarAlg (Gt x y) = PGt x y
transSugarAlg (Or x y) = POr x y
transSugarAlg (Impl x y) = PImpl x y
class TransCore f where
transCoreAlg :: Alg f OExpr
transCore :: (Functor f, TransCore f) => Term f -> OExpr
transCore = cata transCoreAlg
$(derive [liftSum] [''TransCore])
instance TransCore Value where
transCoreAlg (VInt i) = OInt i
transCoreAlg (VBool b) = OBool b
transCoreAlg (VPair x y) = OPair x y
instance TransCore Op where
transCoreAlg (Plus x y) = OPlus x y
transCoreAlg (Mult x y) = OMult x y
transCoreAlg (If b x y) = OIf b x y
transCoreAlg (Lt x y) = OLt x y
transCoreAlg (And x y) = OAnd x y
transCoreAlg (Not x) = ONot x
transCoreAlg (Proj p x) = OProj (ptrans p) x
where ptrans ProjLeft = SProjLeft
ptrans ProjRight = SProjRight
transCoreAlg (Eq x y) = OEq x y
class TransVal f where
transValAlg :: Alg f SExpr
transVal :: (Functor f, TransVal f) => Term f -> SExpr
transVal = cata transValAlg
$(derive [liftSum] [''TransVal])
instance TransVal Value where
transValAlg (VInt i) = SInt i
transValAlg (VBool b) = SBool b
transValAlg (VPair x y) = SPair x y
class TransType f where
transTypeAlg :: Alg f VType
transType :: (Functor f, TransType f) => Term f -> VType
transType = cata transTypeAlg
$(derive [liftSum] [''TransType])
instance TransType ValueT where
transTypeAlg TInt = VTInt
transTypeAlg TBool = VTBool
transTypeAlg (TPair x y) = VTPair x y | spacekitteh/compdata | benchmark/DataTypes/Transform.hs | bsd-3-clause | 2,702 | 0 | 9 | 652 | 1,069 | 524 | 545 | 78 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
-- |This module provides an abstraction for a /migration store/, a
-- facility in which 'Migration's can be stored and from which they
-- can be loaded. This module also provides functions for taking
-- 'Migration's from a store and converting them into the appropriate
-- intermediate types for use with the rest of this library.
module Database.Schema.Migrations.Store
( MigrationStore(..)
, MapValidationError(..)
, StoreData(..)
, MigrationMap
-- * High-level Store API
, loadMigrations
, storeMigrations
, storeLookup
-- * Miscellaneous Functions
, depGraphFromMapping
, validateMigrationMap
, validateSingleMigration
)
where
import Data.Maybe ( isJust )
import Control.Monad ( mzero )
import Control.Applicative ( (<$>) )
import qualified Data.Map as Map
import Database.Schema.Migrations.Migration
( Migration(..)
)
import Database.Schema.Migrations.Dependencies
( DependencyGraph(..)
, mkDepGraph
, depsOf
)
-- |A mapping from migration name to 'Migration'. This is exported
-- for testing purposes, but you'll want to interface with this
-- through the encapsulating 'StoreData' type.
type MigrationMap = Map.Map String Migration
data StoreData = StoreData { storeDataMapping :: MigrationMap
, storeDataGraph :: DependencyGraph Migration
}
-- |The type of migration storage facilities. A MigrationStore is a
-- facility in which new migrations can be created, and from which
-- existing migrations can be loaded.
data MigrationStore =
MigrationStore { loadMigration :: String -> IO (Either String Migration)
-- ^ Load a migration from the store.
, saveMigration :: Migration -> IO ()
-- ^ Save a migration to the store.
, getMigrations :: IO [String]
-- ^ Return a list of all available migrations'
-- names.
, fullMigrationName :: String -> IO String
-- ^ Return the full representation of a given
-- migration name; mostly for filesystem stores,
-- where the full representation includes the store
-- path.
}
-- |A type for types of validation errors for migration maps.
data MapValidationError = DependencyReferenceError String String
-- ^ A migration claims a dependency on a
-- migration that does not exist.
| DependencyGraphError String
-- ^ An error was encountered when
-- constructing the dependency graph for
-- this store.
| InvalidMigration String
-- ^ The specified migration is invalid.
deriving (Eq)
instance Show MapValidationError where
show (DependencyReferenceError from to) =
"Migration " ++ (show from) ++ " references nonexistent dependency " ++ show to
show (DependencyGraphError msg) =
"There was an error constructing the dependency graph: " ++ msg
show (InvalidMigration msg) =
"There was an error loading a migration: " ++ msg
-- |A convenience function for extracting the list of 'Migration's
-- extant in the specified 'StoreData'.
storeMigrations :: StoreData -> [Migration]
storeMigrations storeData =
Map.elems $ storeDataMapping storeData
-- |A convenience function for looking up a 'Migration' by name in the
-- specified 'StoreData'.
storeLookup :: StoreData -> String -> Maybe Migration
storeLookup storeData migrationName =
Map.lookup migrationName $ storeDataMapping storeData
-- |Load migrations from the specified 'MigrationStore', validate the
-- loaded migrations, and return errors or a 'MigrationMap' on
-- success. Generally speaking, this will be the first thing you
-- should call once you have constructed a 'MigrationStore'.
loadMigrations :: MigrationStore -> IO (Either [MapValidationError] StoreData)
loadMigrations store = do
migrations <- getMigrations store
loadedWithErrors <- mapM (\name -> loadMigration store name) migrations
let mMap = Map.fromList $ [ (mId e, e) | e <- loaded ]
validationErrors = validateMigrationMap mMap
(loaded, loadErrors) = sortResults loadedWithErrors ([], [])
allErrors = validationErrors ++ (InvalidMigration <$> loadErrors)
sortResults [] v = v
sortResults (Left e:rest) (ms, es) = sortResults rest (ms, e:es)
sortResults (Right m:rest) (ms, es) = sortResults rest (m:ms, es)
case null allErrors of
False -> return $ Left allErrors
True -> do
-- Construct a dependency graph and, if that succeeds, return
-- StoreData.
case depGraphFromMapping mMap of
Left e -> return $ Left [DependencyGraphError e]
Right gr -> return $ Right StoreData { storeDataMapping = mMap
, storeDataGraph = gr
}
-- |Validate a migration map. Returns zero or more validation errors.
validateMigrationMap :: MigrationMap -> [MapValidationError]
validateMigrationMap mMap = do
validateSingleMigration mMap =<< snd <$> Map.toList mMap
-- |Validate a single migration. Looks up the migration's
-- dependencies in the specified 'MigrationMap' and returns a
-- 'MapValidationError' for each one that does not exist in the map.
validateSingleMigration :: MigrationMap -> Migration -> [MapValidationError]
validateSingleMigration mMap m = do
depId <- depsOf m
if isJust $ Map.lookup depId mMap then
mzero else
return $ DependencyReferenceError (mId m) depId
-- |Create a 'DependencyGraph' from a 'MigrationMap'; returns Left if
-- the dependency graph cannot be constructed (e.g., due to a
-- dependency cycle) or Right on success. Generally speaking, you
-- won't want to use this directly; use 'loadMigrations' instead.
depGraphFromMapping :: MigrationMap -> Either String (DependencyGraph Migration)
depGraphFromMapping mapping = mkDepGraph $ Map.elems mapping
| nathankot/dbmigrations | src/Database/Schema/Migrations/Store.hs | bsd-3-clause | 6,225 | 0 | 18 | 1,641 | 946 | 527 | 419 | 76 | 5 |
module Lexer where
import Text.Parsec
import Text.Parsec.String
import qualified Text.Parsec.Token as Tok
import Text.Parsec.Language (haskellStyle)
lexer :: Tok.TokenParser ()
lexer = Tok.makeTokenParser style
where ops = ["->","\\","+","*","-","="]
names = ["True", "False"]
style = haskellStyle {Tok.reservedOpNames = ops,
Tok.reservedNames = names,
Tok.commentLine = "#"}
reserved :: String -> Parser ()
reserved = Tok.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = Tok.reservedOp lexer
identifier :: Parser String
identifier = Tok.identifier lexer
parens :: Parser a -> Parser a
parens = Tok.parens lexer
contents :: Parser a -> Parser a
contents p = do
Tok.whiteSpace lexer
r <- p
eof
return r
natural :: Parser Integer
natural = Tok.natural lexer
| yupferris/write-you-a-haskell | chapter5/stlc/Lexer.hs | mit | 863 | 0 | 8 | 203 | 280 | 151 | 129 | 28 | 1 |
{-
{- 3 lines of comments total! -}
-}
| korsakov/ohcount | test/src_dir/haskell3.hs | gpl-2.0 | 55 | 0 | 2 | 21 | 3 | 2 | 1 | 1 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>Online Menu | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>بحث</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/onlineMenu/src/main/javahelp/org/zaproxy/zap/extension/onlineMenu/resources/help_ar_SA/helpset_ar_SA.hs | apache-2.0 | 972 | 80 | 65 | 159 | 411 | 208 | 203 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Passive Scan Rules - Beta | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Rechercher</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/pscanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/pscanrulesBeta/resources/help_fr_FR/helpset_fr_FR.hs | apache-2.0 | 991 | 78 | 67 | 162 | 420 | 212 | 208 | -1 | -1 |
-- Simple and efficient printing combinators
module PPrint where
infixr 2 &,!
infixr 1 !/
data Output = Sep | Str String | Nl | Indent Int
newtype Document = P ([Output] -> [Output])
class Printable a where
pr :: a -> Document
prList :: [a] -> Document
prList = wpr
instance Printable Document where
pr = id
nil = P id
nl = P (Nl:)
sep = P (Sep:)
indented x = indented' 2 x
indented' n x = P (Indent n:) ! x ! P (Indent (-n):)
vpr xs = foldr (!/) nil xs
wpr xs = prsep sep xs
hpr xs = foldr (!) nil xs
vmap f = foldr ((!/) . f) nil
wmap f xs = wpr (map f xs)
hmap f = foldr ((!) . f) nil
x !/ y = x ! nl ! y
x ! y = comp (pr x) (pr y)
where comp (P x) (P y) = P (x . y)
x & y = x ! sep ! y
prsep s [] = nil
prsep s (x:xs) = x ! prpre s xs
prpre s [] = nil
prpre s (x:xs) = s ! x ! prpre s xs
instance Printable Char where
pr c = P (Str [c]:)
prList s = P (Str s:)
instance Printable a => Printable [a] where
pr = prList
instance Printable Int where
pr x = pr (show (x `asTypeOf` 1))
{-
instance Printable a => Printable (Maybe a) where
pr Nothing = nil
pr (Just x) = pr x
-}
pprint x = fmt0 0 (apply (pr x) [])
where
apply (P pr) = pr
-- The printer is designed to avoid producing redundant spaces:
-- + No indentation space on blank lines.
-- + No trailing spaces at the end of lines.
-- + No double spaces between items.
-- fmt0: at the beginning of a line, before indentation has been made
fmt0 n [] = []
fmt0 n (Nl:os) = "\n"++fmt0 n os
fmt0 n (Indent i:os) = fmt0 (n+i) os
fmt0 n (Sep:os) = fmt0 n os
fmt0 n (Str s:os) = space n++s++fmt n os
space n = replicate (n `div` 8) '\t' ++ replicate (n `mod` 8) ' '
-- fmt: in the middle of a line, after indentation and some text
fmt n [] = []
fmt n (o:os) =
case o of
Str s -> s++fmt n os
Nl -> "\n"++fmt0 n os
Indent i -> fmt (n+i) os
Sep -> fmt1 n os
-- fmt1: in the middle of a line, a space is to be inserted before next item
fmt1 n [] = []
fmt1 n (o:os) =
case o of
Str s -> ' ':s++fmt n os
Nl -> "\n"++fmt0 n os
Indent i -> fmt1 (n+i) os
Sep -> fmt1 n os
| forste/haReFork | tools/base/parse2/LexerGen/PPrint.hs | bsd-3-clause | 2,166 | 2 | 15 | 628 | 1,018 | 523 | 495 | 59 | 13 |
{-# LANGUAGE BangPatterns, CPP, RecordWildCards #-}
-- |
-- Module : Data.Text.Internal.IO
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan,
-- (c) 2009 Simon Marlow
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Low-level support for text I\/O.
module Data.Text.Internal.IO
(
hGetLineWith
, readChunk
) where
import qualified Control.Exception as E
import Data.IORef (readIORef, writeIORef)
import Data.Text (Text)
import Data.Text.Internal.Fusion (unstream)
import Data.Text.Internal.Fusion.Types (Step(..), Stream(..))
import Data.Text.Internal.Fusion.Size (exactSize, maxSize)
import Data.Text.Unsafe (inlinePerformIO)
import Foreign.Storable (peekElemOff)
import GHC.IO.Buffer (Buffer(..), CharBuffer, RawCharBuffer, bufferAdjustL,
bufferElems, charSize, isEmptyBuffer, readCharBuf,
withRawBuffer, writeCharBuf)
import GHC.IO.Handle.Internals (ioe_EOF, readTextDevice, wantReadableHandle_)
import GHC.IO.Handle.Types (Handle__(..), Newline(..))
import System.IO (Handle)
import System.IO.Error (isEOFError)
import qualified Data.Text as T
-- | Read a single line of input from a handle, constructing a list of
-- decoded chunks as we go. When we're done, transform them into the
-- destination type.
hGetLineWith :: ([Text] -> t) -> Handle -> IO t
hGetLineWith f h = wantReadableHandle_ "hGetLine" h go
where
go hh@Handle__{..} = readIORef haCharBuffer >>= fmap f . hGetLineLoop hh []
hGetLineLoop :: Handle__ -> [Text] -> CharBuffer -> IO [Text]
hGetLineLoop hh@Handle__{..} = go where
go ts buf@Buffer{ bufL=r0, bufR=w, bufRaw=raw0 } = do
let findEOL raw r | r == w = return (False, w)
| otherwise = do
(c,r') <- readCharBuf raw r
if c == '\n'
then return (True, r)
else findEOL raw r'
(eol, off) <- findEOL raw0 r0
(t,r') <- if haInputNL == CRLF
then unpack_nl raw0 r0 off
else do t <- unpack raw0 r0 off
return (t,off)
if eol
then do writeIORef haCharBuffer (bufferAdjustL (off+1) buf)
return $ reverse (t:ts)
else do
let buf1 = bufferAdjustL r' buf
maybe_buf <- maybeFillReadBuffer hh buf1
case maybe_buf of
-- Nothing indicates we caught an EOF, and we may have a
-- partial line to return.
Nothing -> do
-- we reached EOF. There might be a lone \r left
-- in the buffer, so check for that and
-- append it to the line if necessary.
let pre | isEmptyBuffer buf1 = T.empty
| otherwise = T.singleton '\r'
writeIORef haCharBuffer buf1{ bufL=0, bufR=0 }
let str = reverse . filter (not . T.null) $ pre:t:ts
if null str
then ioe_EOF
else return str
Just new_buf -> go (t:ts) new_buf
-- This function is lifted almost verbatim from GHC.IO.Handle.Text.
maybeFillReadBuffer :: Handle__ -> CharBuffer -> IO (Maybe CharBuffer)
maybeFillReadBuffer handle_ buf
= E.catch (Just `fmap` getSomeCharacters handle_ buf) $ \e ->
if isEOFError e
then return Nothing
else ioError e
unpack :: RawCharBuffer -> Int -> Int -> IO Text
unpack !buf !r !w
| charSize /= 4 = sizeError "unpack"
| r >= w = return T.empty
| otherwise = withRawBuffer buf go
where
go pbuf = return $! unstream (Stream next r (exactSize (w-r)))
where
next !i | i >= w = Done
| otherwise = Yield (ix i) (i+1)
ix i = inlinePerformIO $ peekElemOff pbuf i
unpack_nl :: RawCharBuffer -> Int -> Int -> IO (Text, Int)
unpack_nl !buf !r !w
| charSize /= 4 = sizeError "unpack_nl"
| r >= w = return (T.empty, 0)
| otherwise = withRawBuffer buf $ go
where
go pbuf = do
let !t = unstream (Stream next r (maxSize (w-r)))
w' = w - 1
return $ if ix w' == '\r'
then (t,w')
else (t,w)
where
next !i | i >= w = Done
| c == '\r' = let i' = i + 1
in if i' < w
then if ix i' == '\n'
then Yield '\n' (i+2)
else Yield '\n' i'
else Done
| otherwise = Yield c (i+1)
where c = ix i
ix i = inlinePerformIO $ peekElemOff pbuf i
-- This function is completely lifted from GHC.IO.Handle.Text.
getSomeCharacters :: Handle__ -> CharBuffer -> IO CharBuffer
getSomeCharacters handle_@Handle__{..} buf@Buffer{..} =
case bufferElems buf of
-- buffer empty: read some more
0 -> {-# SCC "readTextDevice" #-} readTextDevice handle_ buf
-- if the buffer has a single '\r' in it and we're doing newline
-- translation: read some more
1 | haInputNL == CRLF -> do
(c,_) <- readCharBuf bufRaw bufL
if c == '\r'
then do -- shuffle the '\r' to the beginning. This is only safe
-- if we're about to call readTextDevice, otherwise it
-- would mess up flushCharBuffer.
-- See [note Buffer Flushing], GHC.IO.Handle.Types
_ <- writeCharBuf bufRaw 0 '\r'
let buf' = buf{ bufL=0, bufR=1 }
readTextDevice handle_ buf'
else do
return buf
-- buffer has some chars in it already: just return it
_otherwise -> {-# SCC "otherwise" #-} return buf
-- | Read a single chunk of strict text from a buffer. Used by both
-- the strict and lazy implementations of hGetContents.
readChunk :: Handle__ -> CharBuffer -> IO Text
readChunk hh@Handle__{..} buf = do
buf'@Buffer{..} <- getSomeCharacters hh buf
(t,r) <- if haInputNL == CRLF
then unpack_nl bufRaw bufL bufR
else do t <- unpack bufRaw bufL bufR
return (t,bufR)
writeIORef haCharBuffer (bufferAdjustL r buf')
return t
sizeError :: String -> a
sizeError loc = error $ "Data.Text.IO." ++ loc ++ ": bad internal buffer size"
| beni55/text | Data/Text/Internal/IO.hs | bsd-2-clause | 6,351 | 0 | 27 | 1,899 | 1,740 | 908 | 832 | 115 | 6 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Find
-- Copyright : Duncan Coutts 2013
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- A somewhat extended notion of the normal program search path concept.
--
-- Usually when finding executables we just want to look in the usual places
-- using the OS's usual method for doing so. In Haskell the normal OS-specific
-- method is captured by 'findExecutable'. On all common OSs that makes use of
-- a @PATH@ environment variable, (though on Windows it is not just the @PATH@).
--
-- However it is sometimes useful to be able to look in additional locations
-- without having to change the process-global @PATH@ environment variable.
-- So we need an extension of the usual 'findExecutable' that can look in
-- additional locations, either before, after or instead of the normal OS
-- locations.
--
module Distribution.Simple.Program.Find (
-- * Program search path
ProgramSearchPath,
ProgramSearchPathEntry(..),
defaultProgramSearchPath,
findProgramOnSearchPath,
programSearchPathAsPATHVar,
getSystemSearchPath,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Verbosity
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Compat.Environment
import qualified System.Directory as Directory
( findExecutable )
import System.FilePath as FilePath
( (</>), (<.>), splitSearchPath, searchPathSeparator, getSearchPath
, takeDirectory )
#if defined(mingw32_HOST_OS)
import qualified System.Win32 as Win32
#endif
-- | A search path to use when locating executables. This is analogous
-- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use
-- the system default method for finding executables ('findExecutable' which
-- on unix is simply looking on the @$PATH@ but on win32 is a bit more
-- complicated).
--
-- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs
-- either before, after or instead of the default, e.g. here we add an extra
-- dir to search after the usual ones.
--
-- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir]
--
type ProgramSearchPath = [ProgramSearchPathEntry]
data ProgramSearchPathEntry =
ProgramSearchPathDir FilePath -- ^ A specific dir
| ProgramSearchPathDefault -- ^ The system default
deriving (Eq, Generic)
instance Binary ProgramSearchPathEntry
defaultProgramSearchPath :: ProgramSearchPath
defaultProgramSearchPath = [ProgramSearchPathDefault]
findProgramOnSearchPath :: Verbosity -> ProgramSearchPath
-> FilePath -> IO (Maybe (FilePath, [FilePath]))
findProgramOnSearchPath verbosity searchpath prog = do
debug verbosity $ "Searching for " ++ prog ++ " in path."
res <- tryPathElems [] searchpath
case res of
Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path")
Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path)
return res
where
tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry]
-> IO (Maybe (FilePath, [FilePath]))
tryPathElems _ [] = return Nothing
tryPathElems tried (pe:pes) = do
res <- tryPathElem pe
case res of
(Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes
(Just foundat, notfoundat) -> return (Just (foundat, alltried))
where
alltried = concat (reverse (notfoundat : tried))
tryPathElem :: ProgramSearchPathEntry -> NoCallStackIO (Maybe FilePath, [FilePath])
tryPathElem (ProgramSearchPathDir dir) =
findFirstExe [ dir </> prog <.> ext | ext <- exeExtensions ]
-- On windows, getSystemSearchPath is not guaranteed 100% correct so we
-- use findExecutable and then approximate the not-found-at locations.
tryPathElem ProgramSearchPathDefault | buildOS == Windows = do
mExe <- findExecutable prog
syspath <- getSystemSearchPath
case mExe of
Nothing ->
let notfoundat = [ dir </> prog | dir <- syspath ] in
return (Nothing, notfoundat)
Just foundat -> do
let founddir = takeDirectory foundat
notfoundat = [ dir </> prog
| dir <- takeWhile (/= founddir) syspath ]
return (Just foundat, notfoundat)
-- On other OSs we can just do the simple thing
tryPathElem ProgramSearchPathDefault = do
dirs <- getSystemSearchPath
findFirstExe [ dir </> prog <.> ext | dir <- dirs, ext <- exeExtensions ]
findFirstExe :: [FilePath] -> NoCallStackIO (Maybe FilePath, [FilePath])
findFirstExe = go []
where
go fs' [] = return (Nothing, reverse fs')
go fs' (f:fs) = do
isExe <- doesExecutableExist f
if isExe
then return (Just f, reverse fs')
else go (f:fs') fs
-- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var.
-- Note that this is close but not perfect because on Windows the search
-- algorithm looks at more than just the @%PATH%@.
programSearchPathAsPATHVar :: ProgramSearchPath -> NoCallStackIO String
programSearchPathAsPATHVar searchpath = do
ess <- traverse getEntries searchpath
return (intercalate [searchPathSeparator] (concat ess))
where
getEntries (ProgramSearchPathDir dir) = return [dir]
getEntries ProgramSearchPathDefault = do
env <- getEnvironment
return (maybe [] splitSearchPath (lookup "PATH" env))
-- | Get the system search path. On Unix systems this is just the @$PATH@ env
-- var, but on windows it's a bit more complicated.
--
getSystemSearchPath :: NoCallStackIO [FilePath]
getSystemSearchPath = fmap nub $ do
#if defined(mingw32_HOST_OS)
processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE
currentdir <- Win32.getCurrentDirectory
systemdir <- Win32.getSystemDirectory
windowsdir <- Win32.getWindowsDirectory
pathdirs <- FilePath.getSearchPath
let path = processdir : currentdir
: systemdir : windowsdir
: pathdirs
return path
#else
FilePath.getSearchPath
#endif
#ifdef MIN_VERSION_directory
#if MIN_VERSION_directory(1,2,1)
#define HAVE_directory_121
#endif
#endif
findExecutable :: FilePath -> NoCallStackIO (Maybe FilePath)
#ifdef HAVE_directory_121
findExecutable = Directory.findExecutable
#else
findExecutable prog = do
-- With directory < 1.2.1 'findExecutable' doesn't check that the path
-- really refers to an executable.
mExe <- Directory.findExecutable prog
case mExe of
Just exe -> do
exeExists <- doesExecutableExist exe
if exeExists
then return mExe
else return Nothing
_ -> return mExe
#endif
| themoritz/cabal | Cabal/Distribution/Simple/Program/Find.hs | bsd-3-clause | 7,018 | 0 | 22 | 1,566 | 1,187 | 646 | 541 | 95 | 9 |
{-# LANGUAGE MagicHash, BangPatterns #-}
-- Produces a Lint error in GHC 8.0
module T11444 where
import GHC.Exts (reallyUnsafePtrEquality#, Int (..))
ptrEq :: a -> a -> Bool
ptrEq !x !y = I# (reallyUnsafePtrEquality# x y) == 1
| ezyang/ghc | testsuite/tests/simplCore/should_compile/T11444.hs | bsd-3-clause | 230 | 0 | 8 | 41 | 63 | 35 | 28 | 5 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.