code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings, RecordWildCards, ScopedTypeVariables #-}
module Properties (tests) where
import Data.Aeson (eitherDecode)
import Data.Aeson.Encode (encode, encodeToBuilder)
import Data.Aeson.Internal (IResult(..), formatError, ifromJSON, iparse)
import Data.Aeson.Parser (value)
import Data.Aeson.Types
import Data.ByteString.Builder (toLazyByteString)
import Data.Int (Int8)
import Data.Time (Day, LocalTime, UTCTime, ZonedTime, NominalDiffTime)
import Encoders
import Instances ()
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck (Arbitrary(..), Property, (===))
import Types
import qualified Data.Attoparsec.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.HashMap.Strict as H
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Vector as V
encodeDouble :: Double -> Double -> Property
encodeDouble num denom
| isInfinite d || isNaN d = encode d === "null"
| otherwise = (read . L.unpack . encode) d === d
where d = num / denom
encodeInteger :: Integer -> Property
encodeInteger i = encode i === L.pack (show i)
toParseJSON :: (Arbitrary a, Eq a, Show a) =>
(Value -> Parser a) -> (a -> Value) -> a -> Property
toParseJSON parsejson tojson x =
case iparse parsejson . tojson $ x of
IError path msg -> failure "parse" (formatError path msg) x
ISuccess x' -> x === x'
roundTrip :: (FromJSON a, ToJSON a, Show a) =>
(a -> a -> Property) -> a -> a -> Property
roundTrip eq _ i =
case fmap ifromJSON . L.parse value . encode . toJSON $ i of
L.Done _ (ISuccess v) -> v `eq` i
L.Done _ (IError path err) -> failure "fromJSON" (formatError path err) i
L.Fail _ _ err -> failure "parse" err i
roundTripEq :: (Eq a, FromJSON a, ToJSON a, Show a) => a -> a -> Property
roundTripEq x y = roundTrip (===) x y
toFromJSON :: (Arbitrary a, Eq a, FromJSON a, ToJSON a, Show a) => a -> Property
toFromJSON x = case ifromJSON (toJSON x) of
IError path err -> failure "fromJSON" (formatError path err) x
ISuccess x' -> x === x'
modifyFailureProp :: String -> String -> Bool
modifyFailureProp orig added =
result == Error (added ++ orig)
where
parser = const $ modifyFailure (added ++) $ fail orig
result :: Result ()
result = parse parser ()
-- | Perform a bit-for-bit comparison of two encoding methods.
sameAs :: (a -> Value) -> (a -> Encoding) -> a -> Property
sameAs toVal toEnc v =
toLazyByteString (encodeToBuilder (toVal v)) ===
toLazyByteString (fromEncoding (toEnc v))
-- | Behaves like 'sameAs', but compares decoded values to account for
-- HashMap-driven variation in JSON object key ordering.
sameAsV :: (a -> Value) -> (a -> Encoding) -> a -> Property
sameAsV toVal toEnc v =
eitherDecode (toLazyByteString (fromEncoding (toEnc v))) === Right (toVal v)
type P6 = Product6 Int Bool String (Approx Double) (Int, Approx Double) ()
type S4 = Sum4 Int8 ZonedTime T.Text (Map.Map String Int)
--------------------------------------------------------------------------------
-- Value properties
--------------------------------------------------------------------------------
isString :: Value -> Bool
isString (String _) = True
isString _ = False
is2ElemArray :: Value -> Bool
is2ElemArray (Array v) = V.length v == 2 && isString (V.head v)
is2ElemArray _ = False
isTaggedObjectValue :: Value -> Bool
isTaggedObjectValue (Object obj) = "tag" `H.member` obj &&
"contents" `H.member` obj
isTaggedObjectValue _ = False
isTaggedObject :: Value -> Bool
isTaggedObject (Object obj) = "tag" `H.member` obj
isTaggedObject _ = False
isObjectWithSingleField :: Value -> Bool
isObjectWithSingleField (Object obj) = H.size obj == 1
isObjectWithSingleField _ = False
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "properties" [
testGroup "encode" [
testProperty "encodeDouble" encodeDouble
, testProperty "encodeInteger" encodeInteger
]
, testGroup "roundTrip" [
testProperty "Bool" $ roundTripEq True
, testProperty "Double" $ roundTripEq (1 :: Approx Double)
, testProperty "Int" $ roundTripEq (1 :: Int)
, testProperty "Integer" $ roundTripEq (1 :: Integer)
, testProperty "String" $ roundTripEq ("" :: String)
, testProperty "Text" $ roundTripEq T.empty
, testProperty "Foo" $ roundTripEq (undefined :: Foo)
, testProperty "Day" $ roundTripEq (undefined :: Day)
, testProperty "DotNetTime" $ roundTripEq (undefined :: DotNetTime)
, testProperty "LocalTime" $ roundTripEq (undefined :: LocalTime)
, testProperty "UTCTime" $ roundTripEq (undefined :: UTCTime)
, testProperty "ZonedTime" $ roundTripEq (undefined :: ZonedTime)
, testProperty "NominalDiffTime" $ roundTripEq (undefined :: NominalDiffTime)
, testGroup "ghcGenerics" [
testProperty "OneConstructor" $ roundTripEq OneConstructor
, testProperty "Product2" $ roundTripEq (undefined :: Product2 Int Bool)
, testProperty "Product6" $ roundTripEq (undefined :: P6)
, testProperty "Sum4" $ roundTripEq (undefined :: S4)
]
]
, testGroup "toFromJSON" [
testProperty "Integer" (toFromJSON :: Integer -> Property)
, testProperty "Double" (toFromJSON :: Double -> Property)
, testProperty "Maybe Integer" (toFromJSON :: Maybe Integer -> Property)
, testProperty "Either Integer Double" (toFromJSON :: Either Integer Double -> Property)
, testProperty "Either Integer Integer" (toFromJSON :: Either Integer Integer -> Property)
]
, testGroup "failure messages" [
testProperty "modify failure" modifyFailureProp
]
, testGroup "template-haskell" [
testGroup "toJSON" [
testGroup "Nullary" [
testProperty "string" (isString . thNullaryToJSONString)
, testProperty "2ElemArray" (is2ElemArray . thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObjectValue . thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thNullaryToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thNullaryParseJSONString thNullaryToJSONString)
, testProperty "2ElemArray" (toParseJSON thNullaryParseJSON2ElemArray thNullaryToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON thNullaryParseJSONTaggedObject thNullaryToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON thNullaryParseJSONObjectWithSingleField thNullaryToJSONObjectWithSingleField)
]
]
, testGroup "SomeType" [
testProperty "2ElemArray" (is2ElemArray . thSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (isTaggedObject . thSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thSomeTypeToJSONObjectWithSingleField)
, testGroup "roundTrip" [
testProperty "2ElemArray" (toParseJSON thSomeTypeParseJSON2ElemArray thSomeTypeToJSON2ElemArray)
, testProperty "TaggedObject" (toParseJSON thSomeTypeParseJSONTaggedObject thSomeTypeToJSONTaggedObject)
, testProperty "ObjectWithSingleField" (toParseJSON thSomeTypeParseJSONObjectWithSingleField thSomeTypeToJSONObjectWithSingleField)
]
, testGroup "Approx" [
testProperty "string" (isString . thApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (isObjectWithSingleField . thApproxToJSONDefault)
, testGroup "roundTrip" [
testProperty "string" (toParseJSON thApproxParseJSONUnwrap thApproxToJSONUnwrap)
, testProperty "ObjectWithSingleField" (toParseJSON thApproxParseJSONDefault thApproxToJSONDefault)
]
]
]
]
, testGroup "toEncoding" [
testProperty "NullaryString" $
thNullaryToJSONString `sameAs` thNullaryToEncodingString
, testProperty "Nullary2ElemArray" $
thNullaryToJSON2ElemArray `sameAs` thNullaryToEncoding2ElemArray
, testProperty "NullaryTaggedObject" $
thNullaryToJSONTaggedObject `sameAs` thNullaryToEncodingTaggedObject
, testProperty "NullaryObjectWithSingleField" $
thNullaryToJSONObjectWithSingleField `sameAs`
thNullaryToEncodingObjectWithSingleField
, testProperty "ApproxUnwrap" $
thApproxToJSONUnwrap `sameAs` thApproxToEncodingUnwrap
, testProperty "ApproxDefault" $
thApproxToJSONDefault `sameAs` thApproxToEncodingDefault
, testProperty "SomeType2ElemArray" $
thSomeTypeToJSON2ElemArray `sameAsV` thSomeTypeToEncoding2ElemArray
, testProperty "SomeTypeTaggedObject" $
thSomeTypeToJSONTaggedObject `sameAsV` thSomeTypeToEncodingTaggedObject
, testProperty "SomeTypeObjectWithSingleField" $
thSomeTypeToJSONObjectWithSingleField `sameAsV`
thSomeTypeToEncodingObjectWithSingleField
]
]
]
| abbradar/aeson | tests/Properties.hs | bsd-3-clause | 9,286 | 0 | 19 | 1,958 | 2,320 | 1,230 | 1,090 | 158 | 3 |
{- | Intermediate AST used for renaming. -}
module Core.Rename
( NQName(..)
, NQMeta
, NQType
, NQTypes
, NQPat
, NQExpr
, Desugared
, rename
, indexVals
) where
import Control.Monad
import Control.Arrow
import Data.Map (Map)
import qualified Data.Map as M
import qualified Data.IndexedSet as I
import Control.Lens
import Data.Default.Generics
import qualified Text.PrettyPrint.Leijen.Text as PP
import Core.Monad
import Core.Types
import Core.Pretty
import Core.Typecheck
data NQName = NQName Name
| NQTemp Int
deriving (Show, Eq, Ord)
instance Pretty NQName where
pretty (NQName n) = pretty n
pretty (NQTemp i) = "var_" PP.<> pretty i
instance TempVar NQName where
tempNum = NQTemp
type NQMeta = (Pos, [NQType])
type NQType = NType Pos
type NQTypes = NTypes Pos
type NQPat = Pat NQName Name NQMeta
type NQExpr = Expr NQName Name NQMeta
type Desugared = Program NQName Name Name Name Pos NQMeta
type Constrs = Map Name Int
indexVals :: ( Default f
, Index f ~ k
, IxValue f ~ Ann' Pos v
, At f
, MonadError CompError m
) => [(k, Ann' Pos v)] -> m f
indexVals = foldM insertTy def
where insertTy is (k, ty@(Ann pos _)) = do
when (has (ix k) is) $ throwCError pos "Duplicate definition"
return $ is & at k .~ Just ty
renameVar' :: Name -> Compiler QName
renameVar' name = do
i <- uid
return $ QName name i
renameTypes :: NQTypes -> Compiler QTypes
renameTypes typs = typs & I.itraverseSet %%~ rn
where rn (Ann pos (TyDeclD k vars constrs)) = do
vars' <- mapM (\v -> (v, ) <$> renameVar' v) vars
let subst = M.fromList vars'
unless (length vars == M.size subst) $ throwCError pos "Duplicate type variable"
Ann pos <$> TyDeclD k (map snd vars') <$> mapM (rnConstr subst) constrs
rnConstr :: Map Name QName -> NTyCon Pos -> Compiler QTyCon
rnConstr subst (Ann pos (TyCon name pars)) = Ann pos <$> TyCon name <$> mapM (rnPar subst) pars
rnPar :: Map Name QName -> NQType -> Compiler QType
rnPar subst (Ann pos par) = Ann pos <$> case par of
TVar v -> case M.lookup v subst of
Just nv -> return $ TVar nv
Nothing -> throwCError pos "Undefined type variable"
TLit l -> do
unless (l `I.member` typs) $ throwCError pos "Undefined type literal"
return $ TLit l
TFun -> return TFun
TApp a b -> TApp <$> rnPar subst a <*> rnPar subst b
renameType :: NQTypes -> NQType -> Compiler QType
renameType typs = chk
where chk (Ann pos par) = Ann pos <$> case par of
TVar v -> TVar <$> renameVar' v
TLit l -> do
unless (l `I.member` typs) $ throwCError pos "Undefined type literal"
return $ TLit l
TFun -> return TFun
TApp a b -> TApp <$> chk a <*> chk b
buildConstrs :: NQTypes -> Compiler Constrs
buildConstrs = liftM (fmap $ view annval) . indexVals . concatMap getConstrs . I.toList
where getConstrs (Ann _ (TyDecl _ _ constrs)) = map extr constrs
extr (Ann pos (TyCon name vs)) = (name, Ann pos $ length vs)
type Renames = Map NQName QName
renameExpr :: Constrs -> NQTypes -> NQExpr -> Compiler QExpr
renameExpr constrs types = tr M.empty
where rnAnn :: Pos -> [NQType] -> Compiler (a -> Ann' (Pos, [QType]) a)
rnAnn pos anns = do
anns' <- mapM (renameType types) anns
return $ Ann (pos, anns')
tr :: Renames -> NQExpr -> Compiler QExpr
tr names (Ann (pos, anns) l) = rnAnn pos anns <*> case l of
Lit n -> do
unless (n `M.member` constrs) $ throwCError pos "Undefined constructor"
return $ Lit n
Var v -> case M.lookup v names of
Nothing -> throwCError pos "Unresolved name"
Just v' -> return $ Var v'
Builtin n -> return $ Builtin n
Int i -> return $ Int i
Abs name e -> do
v <- newVar name
e' <- tr (M.insert name v names) e
return $ Abs v e'
App e x -> App <$> tr names e <*> tr names x
Let es x -> do
ns <- M.fromList <$> mapM (\k -> (k, ) <$> newVar k) (M.keys es)
let names' = M.union ns names
es' <- M.fromList <$> mapM (\(k, e) -> (ns M.! k, ) <$> tr names' e) (M.toList es)
x' <- tr names' x
return $ Let es' x'
Case e alts -> Case <$> tr names e <*> mapM (trAlt names) alts
newVar (NQName name) = renameVar' name
newVar (NQTemp _) = genTemp
trAlt :: Renames -> (NQPat, NQExpr) -> Compiler (QPat, QExpr)
trAlt names (pat, e) = do
(allNames, pat') <- trPat pat
names' <- M.map (view annval) <$> indexVals allNames
(pat', ) <$> tr (names' `M.union` names) e
trPat :: NQPat -> Compiler ([(NQName, Ann' Pos QName)], QPat)
trPat (Ann (pos, anns) pat) = (second <$> rnAnn pos anns) <*> case pat of
PVar name -> do
v <- newVar name
return ([(name, Ann pos v)], PVar v)
PCon cname vars -> do
case M.lookup cname constrs of
Nothing -> throwCError pos "Undefined constructor"
Just len -> unless (len == length vars) $ throwCError pos "Invalid number of arguments"
(allNames, vars') <- unzip <$> mapM trPat vars
return (concat allNames, PCon cname vars')
rename :: Desugared -> Compiler Renamed
rename prog = do
types <- renameTypes $ progTypes prog
constrs <- buildConstrs $ progTypes prog
expr <- renameExpr constrs (progTypes prog) $ progExpr prog
return $ Program { progTypes = types
, progExpr = expr
}
| abbradar/dnohs | src/Core/Rename.hs | bsd-3-clause | 5,855 | 0 | 20 | 1,881 | 2,247 | 1,112 | 1,135 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnNames]{Extracting imported and top-level names in scope}
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
module RnNames (
rnImports, getLocalNonValBinders, newRecordSelector,
rnExports, extendGlobalRdrEnvRn,
gresFromAvails,
calculateAvails,
reportUnusedNames,
checkConName
) where
#include "HsVersions.h"
import DynFlags
import HsSyn
import TcEnv
import RnEnv
import RnHsDoc ( rnHsDoc )
import LoadIface ( loadSrcInterface )
import TcRnMonad
import PrelNames
import Module
import Name
import NameEnv
import NameSet
import Avail
import FieldLabel
import HscTypes
import RdrName
import RdrHsSyn ( setRdrNameSpace )
import Outputable
import Maybes
import SrcLoc
import BasicTypes ( TopLevelFlag(..), StringLiteral(..) )
import ErrUtils
import Util
import FastString
import FastStringEnv
import ListSetOps
import Id
import Type
import PatSyn
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.Either ( partitionEithers, isRight, rights )
-- import qualified Data.Foldable as Foldable
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Ord ( comparing )
import Data.List ( partition, (\\), find, sortBy )
-- import qualified Data.Set as Set
import System.FilePath ((</>))
import System.IO
{-
************************************************************************
* *
\subsection{rnImports}
* *
************************************************************************
Note [Tracking Trust Transitively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we import a package as well as checking that the direct imports are safe
according to the rules outlined in the Note [HscMain . Safe Haskell Trust Check]
we must also check that these rules hold transitively for all dependent modules
and packages. Doing this without caching any trust information would be very
slow as we would need to touch all packages and interface files a module depends
on. To avoid this we make use of the property that if a modules Safe Haskell
mode changes, this triggers a recompilation from that module in the dependcy
graph. So we can just worry mostly about direct imports.
There is one trust property that can change for a package though without
recompliation being triggered: package trust. So we must check that all
packages a module tranitively depends on to be trusted are still trusted when
we are compiling this module (as due to recompilation avoidance some modules
below may not be considered trusted any more without recompilation being
triggered).
We handle this by augmenting the existing transitive list of packages a module M
depends on with a bool for each package that says if it must be trusted when the
module M is being checked for trust. This list of trust required packages for a
single import is gathered in the rnImportDecl function and stored in an
ImportAvails data structure. The union of these trust required packages for all
imports is done by the rnImports function using the combine function which calls
the plusImportAvails function that is a union operation for the ImportAvails
type. This gives us in an ImportAvails structure all packages required to be
trusted for the module we are currently compiling. Checking that these packages
are still trusted (and that direct imports are trusted) is done in
HscMain.checkSafeImports.
See the note below, [Trust Own Package] for a corner case in this method and
how its handled.
Note [Trust Own Package]
~~~~~~~~~~~~~~~~~~~~~~~~
There is a corner case of package trust checking that the usual transitive check
doesn't cover. (For how the usual check operates see the Note [Tracking Trust
Transitively] below). The case is when you import a -XSafe module M and M
imports a -XTrustworthy module N. If N resides in a different package than M,
then the usual check works as M will record a package dependency on N's package
and mark it as required to be trusted. If N resides in the same package as M
though, then importing M should require its own package be trusted due to N
(since M is -XSafe so doesn't create this requirement by itself). The usual
check fails as a module doesn't record a package dependency of its own package.
So instead we now have a bool field in a modules interface file that simply
states if the module requires its own package to be trusted. This field avoids
us having to load all interface files that the module depends on to see if one
is trustworthy.
Note [Trust Transitive Property]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
So there is an interesting design question in regards to transitive trust
checking. Say I have a module B compiled with -XSafe. B is dependent on a bunch
of modules and packages, some packages it requires to be trusted as its using
-XTrustworthy modules from them. Now if I have a module A that doesn't use safe
haskell at all and simply imports B, should A inherit all the the trust
requirements from B? Should A now also require that a package p is trusted since
B required it?
We currently say no but saying yes also makes sense. The difference is, if a
module M that doesn't use Safe Haskell imports a module N that does, should all
the trusted package requirements be dropped since M didn't declare that it cares
about Safe Haskell (so -XSafe is more strongly associated with the module doing
the importing) or should it be done still since the author of the module N that
uses Safe Haskell said they cared (so -XSafe is more strongly associated with
the module that was compiled that used it).
Going with yes is a simpler semantics we think and harder for the user to stuff
up but it does mean that Safe Haskell will affect users who don't care about
Safe Haskell as they might grab a package from Cabal which uses safe haskell (say
network) and that packages imports -XTrustworthy modules from another package
(say bytestring), so requires that package is trusted. The user may now get
compilation errors in code that doesn't do anything with Safe Haskell simply
because they are using the network package. They will have to call 'ghc-pkg
trust network' to get everything working. Due to this invasive nature of going
with yes we have gone with no for now.
-}
-- | Process Import Decls. See 'rnImportDecl' for a description of what
-- the return types represent.
-- Note: Do the non SOURCE ones first, so that we get a helpful warning
-- for SOURCE ones that are unnecessary
rnImports :: [LImportDecl RdrName]
-> RnM ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImports imports = do
this_mod <- getModule
let (source, ordinary) = partition is_source_import imports
is_source_import d = ideclSource (unLoc d)
stuff1 <- mapAndReportM (rnImportDecl this_mod) ordinary
stuff2 <- mapAndReportM (rnImportDecl this_mod) source
-- Safe Haskell: See Note [Tracking Trust Transitively]
let (decls, rdr_env, imp_avails, hpc_usage) = combine (stuff1 ++ stuff2)
return (decls, rdr_env, imp_avails, hpc_usage)
where
combine :: [(LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)]
-> ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
combine = foldr plus ([], emptyGlobalRdrEnv, emptyImportAvails, False)
plus (decl, gbl_env1, imp_avails1,hpc_usage1)
(decls, gbl_env2, imp_avails2,hpc_usage2)
= ( decl:decls,
gbl_env1 `plusGlobalRdrEnv` gbl_env2,
imp_avails1 `plusImportAvails` imp_avails2,
hpc_usage1 || hpc_usage2 )
-- | Given a located import declaration @decl@ from @this_mod@,
-- calculate the following pieces of information:
--
-- 1. An updated 'LImportDecl', where all unresolved 'RdrName' in
-- the entity lists have been resolved into 'Name's,
--
-- 2. A 'GlobalRdrEnv' representing the new identifiers that were
-- brought into scope (taking into account module qualification
-- and hiding),
--
-- 3. 'ImportAvails' summarizing the identifiers that were imported
-- by this declaration, and
--
-- 4. A boolean 'AnyHpcUsage' which is true if the imported module
-- used HPC.
rnImportDecl :: Module -> LImportDecl RdrName
-> RnM (LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImportDecl this_mod
(L loc decl@(ImportDecl { ideclName = loc_imp_mod_name, ideclPkgQual = mb_pkg
, ideclSource = want_boot, ideclSafe = mod_safe
, ideclQualified = qual_only, ideclImplicit = implicit
, ideclAs = as_mod, ideclHiding = imp_details }))
= setSrcSpan loc $ do
when (isJust mb_pkg) $ do
pkg_imports <- xoptM LangExt.PackageImports
when (not pkg_imports) $ addErr packageImportErr
-- If there's an error in loadInterface, (e.g. interface
-- file not found) we get lots of spurious errors from 'filterImports'
let imp_mod_name = unLoc loc_imp_mod_name
doc = ppr imp_mod_name <+> text "is directly imported"
-- Check for self-import, which confuses the typechecker (Trac #9032)
-- ghc --make rejects self-import cycles already, but batch-mode may not
-- at least not until TcIface.tcHiBootIface, which is too late to avoid
-- typechecker crashes. (Indirect self imports are not caught until
-- TcIface, see #10337 tracking how to make this error better.)
--
-- Originally, we also allowed 'import {-# SOURCE #-} M', but this
-- caused bug #10182: in one-shot mode, we should never load an hs-boot
-- file for the module we are compiling into the EPS. In principle,
-- it should be possible to support this mode of use, but we would have to
-- extend Provenance to support a local definition in a qualified location.
-- For now, we don't support it, but see #10336
when (imp_mod_name == moduleName this_mod &&
(case mb_pkg of -- If we have import "<pkg>" M, then we should
-- check that "<pkg>" is "this" (which is magic)
-- or the name of this_mod's package. Yurgh!
-- c.f. GHC.findModule, and Trac #9997
Nothing -> True
Just (StringLiteral _ pkg_fs) -> pkg_fs == fsLit "this" ||
fsToUnitId pkg_fs == moduleUnitId this_mod))
(addErr (text "A module cannot import itself:" <+> ppr imp_mod_name))
-- Check for a missing import list (Opt_WarnMissingImportList also
-- checks for T(..) items but that is done in checkDodgyImport below)
case imp_details of
Just (False, _) -> return () -- Explicit import list
_ | implicit -> return () -- Do not bleat for implicit imports
| qual_only -> return ()
| otherwise -> whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList)
(missingImportListWarn imp_mod_name)
iface <- loadSrcInterface doc imp_mod_name want_boot (fmap sl_fs mb_pkg)
-- Compiler sanity check: if the import didn't say
-- {-# SOURCE #-} we should not get a hi-boot file
WARN( not want_boot && mi_boot iface, ppr imp_mod_name ) do
-- Issue a user warning for a redundant {- SOURCE -} import
-- NB that we arrange to read all the ordinary imports before
-- any of the {- SOURCE -} imports.
--
-- in --make and GHCi, the compilation manager checks for this,
-- and indeed we shouldn't do it here because the existence of
-- the non-boot module depends on the compilation order, which
-- is not deterministic. The hs-boot test can show this up.
dflags <- getDynFlags
warnIf NoReason
(want_boot && not (mi_boot iface) && isOneShot (ghcMode dflags))
(warnRedundantSourceImport imp_mod_name)
when (mod_safe && not (safeImportsOn dflags)) $
addErr (text "safe import can't be used as Safe Haskell isn't on!"
$+$ ptext (sLit $ "please enable Safe Haskell through either "
++ "Safe, Trustworthy or Unsafe"))
let
qual_mod_name = as_mod `orElse` imp_mod_name
imp_spec = ImpDeclSpec { is_mod = imp_mod_name, is_qual = qual_only,
is_dloc = loc, is_as = qual_mod_name }
-- filter the imports according to the import declaration
(new_imp_details, gres) <- filterImports iface imp_spec imp_details
-- for certain error messages, we’d like to know what could be imported
-- here, if everything were imported
potential_gres <- mkGlobalRdrEnv . snd <$> filterImports iface imp_spec Nothing
let gbl_env = mkGlobalRdrEnv gres
is_hiding | Just (True,_) <- imp_details = True
| otherwise = False
-- should the import be safe?
mod_safe' = mod_safe
|| (not implicit && safeDirectImpsReq dflags)
|| (implicit && safeImplicitImpsReq dflags)
let imv = ImportedModsVal
{ imv_name = qual_mod_name
, imv_span = loc
, imv_is_safe = mod_safe'
, imv_is_hiding = is_hiding
, imv_all_exports = potential_gres
, imv_qualified = qual_only
}
let imports
= (calculateAvails dflags iface mod_safe' want_boot)
{ imp_mods = unitModuleEnv (mi_module iface) [imv] }
-- Complain if we import a deprecated module
whenWOptM Opt_WarnWarningsDeprecations (
case (mi_warns iface) of
WarnAll txt -> addWarn (Reason Opt_WarnWarningsDeprecations)
(moduleWarn imp_mod_name txt)
_ -> return ()
)
let new_imp_decl = L loc (decl { ideclSafe = mod_safe'
, ideclHiding = new_imp_details })
return (new_imp_decl, gbl_env, imports, mi_hpc iface)
-- | Calculate the 'ImportAvails' induced by an import of a particular
-- interface, but without 'imp_mods'.
calculateAvails :: DynFlags
-> ModIface
-> IsSafeImport
-> IsBootInterface
-> ImportAvails
calculateAvails dflags iface mod_safe' want_boot =
let imp_mod = mi_module iface
orph_iface = mi_orphan iface
has_finsts = mi_finsts iface
deps = mi_deps iface
trust = getSafeMode $ mi_trust iface
trust_pkg = mi_trust_pkg iface
-- If the module exports anything defined in this module, just
-- ignore it. Reason: otherwise it looks as if there are two
-- local definition sites for the thing, and an error gets
-- reported. Easiest thing is just to filter them out up
-- front. This situation only arises if a module imports
-- itself, or another module that imported it. (Necessarily,
-- this invoves a loop.)
--
-- We do this *after* filterImports, so that if you say
-- module A where
-- import B( AType )
-- type AType = ...
--
-- module B( AType ) where
-- import {-# SOURCE #-} A( AType )
--
-- then you won't get a 'B does not export AType' message.
-- Compute new transitive dependencies
orphans | orph_iface = ASSERT( not (imp_mod `elem` dep_orphs deps) )
imp_mod : dep_orphs deps
| otherwise = dep_orphs deps
finsts | has_finsts = ASSERT( not (imp_mod `elem` dep_finsts deps) )
imp_mod : dep_finsts deps
| otherwise = dep_finsts deps
pkg = moduleUnitId (mi_module iface)
-- Does this import mean we now require our own pkg
-- to be trusted? See Note [Trust Own Package]
ptrust = trust == Sf_Trustworthy || trust_pkg
(dependent_mods, dependent_pkgs, pkg_trust_req)
| pkg == thisPackage dflags =
-- Imported module is from the home package
-- Take its dependent modules and add imp_mod itself
-- Take its dependent packages unchanged
--
-- NB: (dep_mods deps) might include a hi-boot file
-- for the module being compiled, CM. Do *not* filter
-- this out (as we used to), because when we've
-- finished dealing with the direct imports we want to
-- know if any of them depended on CM.hi-boot, in
-- which case we should do the hi-boot consistency
-- check. See LoadIface.loadHiBootInterface
((moduleName imp_mod,want_boot):dep_mods deps,dep_pkgs deps,ptrust)
| otherwise =
-- Imported module is from another package
-- Dump the dependent modules
-- Add the package imp_mod comes from to the dependent packages
ASSERT2( not (pkg `elem` (map fst $ dep_pkgs deps))
, ppr pkg <+> ppr (dep_pkgs deps) )
([], (pkg, False) : dep_pkgs deps, False)
in ImportAvails {
imp_mods = emptyModuleEnv, -- this gets filled in later
imp_orphs = orphans,
imp_finsts = finsts,
imp_dep_mods = mkModDeps dependent_mods,
imp_dep_pkgs = map fst $ dependent_pkgs,
-- Add in the imported modules trusted package
-- requirements. ONLY do this though if we import the
-- module as a safe import.
-- See Note [Tracking Trust Transitively]
-- and Note [Trust Transitive Property]
imp_trust_pkgs = if mod_safe'
then map fst $ filter snd dependent_pkgs
else [],
-- Do we require our own pkg to be trusted?
-- See Note [Trust Own Package]
imp_trust_own_pkg = pkg_trust_req
}
warnRedundantSourceImport :: ModuleName -> SDoc
warnRedundantSourceImport mod_name
= text "Unnecessary {-# SOURCE #-} in the import of module"
<+> quotes (ppr mod_name)
{-
************************************************************************
* *
\subsection{importsFromLocalDecls}
* *
************************************************************************
From the top-level declarations of this module produce
* the lexical environment
* the ImportAvails
created by its bindings.
Note [Top-level Names in Template Haskell decl quotes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also: Note [Interactively-bound Ids in GHCi] in HscTypes
Note [Looking up Exact RdrNames] in RnEnv
Consider a Template Haskell declaration quotation like this:
module M where
f x = h [d| f = 3 |]
When renaming the declarations inside [d| ...|], we treat the
top level binders specially in two ways
1. We give them an Internal Name, not (as usual) an External one.
This is done by RnEnv.newTopSrcBinder.
2. We make them *shadow* the outer bindings.
See Note [GlobalRdrEnv shadowing]
3. We find out whether we are inside a [d| ... |] by testing the TH
stage. This is a slight hack, because the stage field was really
meant for the type checker, and here we are not interested in the
fields of Brack, hence the error thunks in thRnBrack.
-}
extendGlobalRdrEnvRn :: [AvailInfo]
-> MiniFixityEnv
-> RnM (TcGblEnv, TcLclEnv)
-- Updates both the GlobalRdrEnv and the FixityEnv
-- We return a new TcLclEnv only because we might have to
-- delete some bindings from it;
-- see Note [Top-level Names in Template Haskell decl quotes]
extendGlobalRdrEnvRn avails new_fixities
= do { (gbl_env, lcl_env) <- getEnvs
; stage <- getStage
; isGHCi <- getIsGHCi
; let rdr_env = tcg_rdr_env gbl_env
fix_env = tcg_fix_env gbl_env
th_bndrs = tcl_th_bndrs lcl_env
th_lvl = thLevel stage
-- Delete new_occs from global and local envs
-- If we are in a TemplateHaskell decl bracket,
-- we are going to shadow them
-- See Note [GlobalRdrEnv shadowing]
inBracket = isBrackStage stage
lcl_env_TH = lcl_env { tcl_rdr = delLocalRdrEnvList (tcl_rdr lcl_env) new_occs }
-- See Note [GlobalRdrEnv shadowing]
lcl_env2 | inBracket = lcl_env_TH
| otherwise = lcl_env
-- Deal with shadowing: see Note [GlobalRdrEnv shadowing]
want_shadowing = isGHCi || inBracket
rdr_env1 | want_shadowing = shadowNames rdr_env new_names
| otherwise = rdr_env
lcl_env3 = lcl_env2 { tcl_th_bndrs = extendNameEnvList th_bndrs
[ (n, (TopLevel, th_lvl))
| n <- new_names ] }
; rdr_env2 <- foldlM add_gre rdr_env1 new_gres
; let fix_env' = foldl extend_fix_env fix_env new_gres
gbl_env' = gbl_env { tcg_rdr_env = rdr_env2, tcg_fix_env = fix_env' }
; traceRn (text "extendGlobalRdrEnvRn 2" <+> (pprGlobalRdrEnv True rdr_env2))
; return (gbl_env', lcl_env3) }
where
new_names = concatMap availNames avails
new_occs = map nameOccName new_names
-- If there is a fixity decl for the gre, add it to the fixity env
extend_fix_env fix_env gre
| Just (L _ fi) <- lookupFsEnv new_fixities (occNameFS occ)
= extendNameEnv fix_env name (FixItem occ fi)
| otherwise
= fix_env
where
name = gre_name gre
occ = greOccName gre
new_gres :: [GlobalRdrElt] -- New LocalDef GREs, derived from avails
new_gres = concatMap localGREsFromAvail avails
add_gre :: GlobalRdrEnv -> GlobalRdrElt -> RnM GlobalRdrEnv
-- Extend the GlobalRdrEnv with a LocalDef GRE
-- If there is already a LocalDef GRE with the same OccName,
-- report an error and discard the new GRE
-- This establishes INVARIANT 1 of GlobalRdrEnvs
add_gre env gre
| not (null dups) -- Same OccName defined twice
= do { addDupDeclErr (gre : dups); return env }
| otherwise
= return (extendGlobalRdrEnv env gre)
where
name = gre_name gre
occ = nameOccName name
dups = filter isLocalGRE (lookupGlobalRdrEnv env occ)
{- *********************************************************************
* *
getLocalDeclBindersd@ returns the names for an HsDecl
It's used for source code.
*** See Note [The Naming story] in HsDecls ****
* *
********************************************************************* -}
getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName
-> RnM ((TcGblEnv, TcLclEnv), NameSet)
-- Get all the top-level binders bound the group *except*
-- for value bindings, which are treated separately
-- Specifically we return AvailInfo for
-- * type decls (incl constructors and record selectors)
-- * class decls (including class ops)
-- * associated types
-- * foreign imports
-- * value signatures (in hs-boot files only)
getLocalNonValBinders fixity_env
(HsGroup { hs_valds = binds,
hs_tyclds = tycl_decls,
hs_fords = foreign_decls })
= do { -- Process all type/class decls *except* family instances
; let inst_decls = tycl_decls >>= group_instds
; overload_ok <- xoptM LangExt.DuplicateRecordFields
; (tc_avails, tc_fldss)
<- fmap unzip $ mapM (new_tc overload_ok)
(tyClGroupTyClDecls tycl_decls)
; traceRn (text "getLocalNonValBinders 1" <+> ppr tc_avails)
; envs <- extendGlobalRdrEnvRn tc_avails fixity_env
; setEnvs envs $ do {
-- Bring these things into scope first
-- See Note [Looking up family names in family instances]
-- Process all family instances
-- to bring new data constructors into scope
; (nti_availss, nti_fldss) <- mapAndUnzipM (new_assoc overload_ok)
inst_decls
-- Finish off with value binders:
-- foreign decls and pattern synonyms for an ordinary module
-- type sigs in case of a hs-boot file only
; is_boot <- tcIsHsBootOrSig
; let val_bndrs | is_boot = hs_boot_sig_bndrs
| otherwise = for_hs_bndrs
; val_avails <- mapM new_simple val_bndrs
; let avails = concat nti_availss ++ val_avails
new_bndrs = availsToNameSetWithSelectors avails `unionNameSet`
availsToNameSetWithSelectors tc_avails
flds = concat nti_fldss ++ concat tc_fldss
; traceRn (text "getLocalNonValBinders 2" <+> ppr avails)
; (tcg_env, tcl_env) <- extendGlobalRdrEnvRn avails fixity_env
-- Extend tcg_field_env with new fields (this used to be the
-- work of extendRecordFieldEnv)
; let field_env = extendNameEnvList (tcg_field_env tcg_env) flds
envs = (tcg_env { tcg_field_env = field_env }, tcl_env)
; traceRn (text "getLocalNonValBinders 3" <+> vcat [ppr flds, ppr field_env])
; return (envs, new_bndrs) } }
where
ValBindsIn _val_binds val_sigs = binds
for_hs_bndrs :: [Located RdrName]
for_hs_bndrs = hsForeignDeclsBinders foreign_decls
-- In a hs-boot file, the value binders come from the
-- *signatures*, and there should be no foreign binders
hs_boot_sig_bndrs = [ L decl_loc (unLoc n)
| L decl_loc (TypeSig ns _) <- val_sigs, n <- ns]
-- the SrcSpan attached to the input should be the span of the
-- declaration, not just the name
new_simple :: Located RdrName -> RnM AvailInfo
new_simple rdr_name = do{ nm <- newTopSrcBinder rdr_name
; return (avail nm) }
new_tc :: Bool -> LTyClDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_tc overload_ok tc_decl -- NOT for type/data instances
= do { let (bndrs, flds) = hsLTyClDeclBinders tc_decl
; names@(main_name : sub_names) <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let fld_env = case unLoc tc_decl of
DataDecl { tcdDataDefn = d } -> mk_fld_env d names flds'
_ -> []
; return (AvailTC main_name names flds', fld_env) }
-- Calculate the mapping from constructor names to fields, which
-- will go in tcg_field_env. It's convenient to do this here where
-- we are working with a single datatype definition.
mk_fld_env :: HsDataDefn RdrName -> [Name] -> [FieldLabel] -> [(Name, [FieldLabel])]
mk_fld_env d names flds = concatMap find_con_flds (dd_cons d)
where
find_con_flds (L _ (ConDeclH98 { con_name = L _ rdr
, con_details = RecCon cdflds }))
= [( find_con_name rdr
, concatMap find_con_decl_flds (unLoc cdflds) )]
find_con_flds (L _ (ConDeclGADT
{ con_names = rdrs
, con_type = (HsIB { hsib_body = res_ty})}))
= map (\ (L _ rdr) -> ( find_con_name rdr
, concatMap find_con_decl_flds cdflds))
rdrs
where
(_tvs, _cxt, tau) = splitLHsSigmaTy res_ty
cdflds = case tau of
L _ (HsFunTy
(L _ (HsAppsTy
[L _ (HsAppPrefix (L _ (HsRecTy flds)))])) _) -> flds
L _ (HsFunTy (L _ (HsRecTy flds)) _) -> flds
_ -> []
find_con_flds _ = []
find_con_name rdr
= expectJust "getLocalNonValBinders/find_con_name" $
find (\ n -> nameOccName n == rdrNameOcc rdr) names
find_con_decl_flds (L _ x)
= map find_con_decl_fld (cd_fld_names x)
find_con_decl_fld (L _ (FieldOcc (L _ rdr) _))
= expectJust "getLocalNonValBinders/find_con_decl_fld" $
find (\ fl -> flLabel fl == lbl) flds
where lbl = occNameFS (rdrNameOcc rdr)
new_assoc :: Bool -> LInstDecl RdrName
-> RnM ([AvailInfo], [(Name, [FieldLabel])])
new_assoc _ (L _ (TyFamInstD {})) = return ([], [])
-- type instances don't bind new names
new_assoc overload_ok (L _ (DataFamInstD d))
= do { (avail, flds) <- new_di overload_ok Nothing d
; return ([avail], flds) }
new_assoc overload_ok (L _ (ClsInstD (ClsInstDecl { cid_poly_ty = inst_ty
, cid_datafam_insts = adts })))
| Just (L loc cls_rdr) <- getLHsInstDeclClass_maybe inst_ty
= do { cls_nm <- setSrcSpan loc $ lookupGlobalOccRn cls_rdr
; (avails, fldss)
<- mapAndUnzipM (new_loc_di overload_ok (Just cls_nm)) adts
; return (avails, concat fldss) }
| otherwise
= return ([], []) -- Do not crash on ill-formed instances
-- Eg instance !Show Int Trac #3811c
new_di :: Bool -> Maybe Name -> DataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_di overload_ok mb_cls ti_decl
= do { main_name <- lookupFamInstName mb_cls (dfid_tycon ti_decl)
; let (bndrs, flds) = hsDataFamInstBinders ti_decl
; sub_names <- mapM newTopSrcBinder bndrs
; flds' <- mapM (newRecordSelector overload_ok sub_names) flds
; let avail = AvailTC (unLoc main_name) sub_names flds'
-- main_name is not bound here!
fld_env = mk_fld_env (dfid_defn ti_decl) sub_names flds'
; return (avail, fld_env) }
new_loc_di :: Bool -> Maybe Name -> LDataFamInstDecl RdrName
-> RnM (AvailInfo, [(Name, [FieldLabel])])
new_loc_di overload_ok mb_cls (L _ d) = new_di overload_ok mb_cls d
newRecordSelector :: Bool -> [Name] -> LFieldOcc RdrName -> RnM FieldLabel
newRecordSelector _ [] _ = error "newRecordSelector: datatype has no constructors!"
newRecordSelector overload_ok (dc:_) (L loc (FieldOcc (L _ fld) _))
= do { selName <- newTopSrcBinder $ L loc $ field
; return $ qualFieldLbl { flSelector = selName } }
where
fieldOccName = occNameFS $ rdrNameOcc fld
qualFieldLbl = mkFieldLabelOccs fieldOccName (nameOccName dc) overload_ok
field | isExact fld = fld
-- use an Exact RdrName as is to preserve the bindings
-- of an already renamer-resolved field and its use
-- sites. This is needed to correctly support record
-- selectors in Template Haskell. See Note [Binders in
-- Template Haskell] in Convert.hs and Note [Looking up
-- Exact RdrNames] in RnEnv.hs.
| otherwise = mkRdrUnqual (flSelector qualFieldLbl)
{-
Note [Looking up family names in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module M where
type family T a :: *
type instance M.T Int = Bool
We might think that we can simply use 'lookupOccRn' when processing the type
instance to look up 'M.T'. Alas, we can't! The type family declaration is in
the *same* HsGroup as the type instance declaration. Hence, as we are
currently collecting the binders declared in that HsGroup, these binders will
not have been added to the global environment yet.
Solution is simple: process the type family declarations first, extend
the environment, and then process the type instances.
************************************************************************
* *
\subsection{Filtering imports}
* *
************************************************************************
@filterImports@ takes the @ExportEnv@ telling what the imported module makes
available, and filters it through the import spec (if any).
Note [Dealing with imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
For import M( ies ), we take the mi_exports of M, and make
imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
One entry for each Name that M exports; the AvailInfo is the
AvailInfo exported from M that exports that Name.
The situation is made more complicated by associated types. E.g.
module M where
class C a where { data T a }
instance C Int where { data T Int = T1 | T2 }
instance C Bool where { data T Int = T3 }
Then M's export_avails are (recall the AvailTC invariant from Avails.hs)
C(C,T), T(T,T1,T2,T3)
Notice that T appears *twice*, once as a child and once as a parent. From
this list we construt a raw list including
T -> (T, T( T1, T2, T3 ), Nothing)
T -> (C, C( C, T ), Nothing)
and we combine these (in function 'combine' in 'imp_occ_env' in
'filterImports') to get
T -> (T, T(T,T1,T2,T3), Just C)
So the overall imp_occ_env is
C -> (C, C(C,T), Nothing)
T -> (T, T(T,T1,T2,T3), Just C)
T1 -> (T1, T(T,T1,T2,T3), Nothing) -- similarly T2,T3
If we say
import M( T(T1,T2) )
then we get *two* Avails: C(T), T(T1,T2)
Note that the imp_occ_env will have entries for data constructors too,
although we never look up data constructors.
-}
filterImports
:: ModIface
-> ImpDeclSpec -- The span for the entire import decl
-> Maybe (Bool, Located [LIE RdrName]) -- Import spec; True => hiding
-> RnM (Maybe (Bool, Located [LIE Name]), -- Import spec w/ Names
[GlobalRdrElt]) -- Same again, but in GRE form
filterImports iface decl_spec Nothing
= return (Nothing, gresFromAvails (Just imp_spec) (mi_exports iface))
where
imp_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
filterImports iface decl_spec (Just (want_hiding, L l import_items))
= do -- check for errors, convert RdrNames to Names
items1 <- mapM lookup_lie import_items
let items2 :: [(LIE Name, AvailInfo)]
items2 = concat items1
-- NB the AvailInfo may have duplicates, and several items
-- for the same parent; e.g N(x) and N(y)
names = availsToNameSet (map snd items2)
keep n = not (n `elemNameSet` names)
pruned_avails = filterAvails keep all_avails
hiding_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll }
gres | want_hiding = gresFromAvails (Just hiding_spec) pruned_avails
| otherwise = concatMap (gresFromIE decl_spec) items2
return (Just (want_hiding, L l (map fst items2)), gres)
where
all_avails = mi_exports iface
-- See Note [Dealing with imports]
imp_occ_env :: OccEnv (Name, -- the name
AvailInfo, -- the export item providing the name
Maybe Name) -- the parent of associated types
imp_occ_env = mkOccEnv_C combine [ (nameOccName n, (n, a, Nothing))
| a <- all_avails, n <- availNames a]
where
-- See Note [Dealing with imports]
-- 'combine' is only called for associated data types which appear
-- twice in the all_avails. In the example, we combine
-- T(T,T1,T2,T3) and C(C,T) to give (T, T(T,T1,T2,T3), Just C)
-- NB: the AvailTC can have fields as well as data constructors (Trac #12127)
combine (name1, a1@(AvailTC p1 _ _), mp1)
(name2, a2@(AvailTC p2 _ _), mp2)
= ASSERT( name1 == name2 && isNothing mp1 && isNothing mp2 )
if p1 == name1 then (name1, a1, Just p2)
else (name1, a2, Just p1)
combine x y = pprPanic "filterImports/combine" (ppr x $$ ppr y)
lookup_name :: RdrName -> IELookupM (Name, AvailInfo, Maybe Name)
lookup_name rdr | isQual rdr = failLookupWith (QualImportError rdr)
| Just succ <- mb_success = return succ
| otherwise = failLookupWith BadImport
where
mb_success = lookupOccEnv imp_occ_env (rdrNameOcc rdr)
lookup_lie :: LIE RdrName -> TcRn [(LIE Name, AvailInfo)]
lookup_lie (L loc ieRdr)
= do (stuff, warns) <- setSrcSpan loc $
liftM (fromMaybe ([],[])) $
run_lookup (lookup_ie ieRdr)
mapM_ emit_warning warns
return [ (L loc ie, avail) | (ie,avail) <- stuff ]
where
-- Warn when importing T(..) if T was exported abstractly
emit_warning (DodgyImport n) = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (dodgyImportWarn n)
emit_warning MissingImportList = whenWOptM Opt_WarnMissingImportList $
addWarn (Reason Opt_WarnMissingImportList) (missingImportListItem ieRdr)
emit_warning BadImportW = whenWOptM Opt_WarnDodgyImports $
addWarn (Reason Opt_WarnDodgyImports) (lookup_err_msg BadImport)
run_lookup :: IELookupM a -> TcRn (Maybe a)
run_lookup m = case m of
Failed err -> addErr (lookup_err_msg err) >> return Nothing
Succeeded a -> return (Just a)
lookup_err_msg err = case err of
BadImport -> badImportItemErr iface decl_spec ieRdr all_avails
IllegalImport -> illegalImportItemErr
QualImportError rdr -> qualImportItemErr rdr
-- For each import item, we convert its RdrNames to Names,
-- and at the same time construct an AvailInfo corresponding
-- to what is actually imported by this item.
-- Returns Nothing on error.
-- We return a list here, because in the case of an import
-- item like C, if we are hiding, then C refers to *both* a
-- type/class and a data constructor. Moreover, when we import
-- data constructors of an associated family, we need separate
-- AvailInfos for the data constructors and the family (as they have
-- different parents). See Note [Dealing with imports]
lookup_ie :: IE RdrName -> IELookupM ([(IE Name, AvailInfo)], [IELookupWarning])
lookup_ie ie = handle_bad_import $ do
case ie of
IEVar (L l n) -> do
(name, avail, _) <- lookup_name n
return ([(IEVar (L l name), trimAvail avail name)], [])
IEThingAll (L l tc) -> do
(name, avail, mb_parent) <- lookup_name tc
let warns = case avail of
Avail {} -- e.g. f(..)
-> [DodgyImport tc]
AvailTC _ subs fs
| null (drop 1 subs) && null fs -- e.g. T(..) where T is a synonym
-> [DodgyImport tc]
| not (is_qual decl_spec) -- e.g. import M( T(..) )
-> [MissingImportList]
| otherwise
-> []
renamed_ie = IEThingAll (L l name)
sub_avails = case avail of
Avail {} -> []
AvailTC name2 subs fs -> [(renamed_ie, AvailTC name2 (subs \\ [name]) fs)]
case mb_parent of
Nothing -> return ([(renamed_ie, avail)], warns)
-- non-associated ty/cls
Just parent -> return ((renamed_ie, AvailTC parent [name] []) : sub_avails, warns)
-- associated type
IEThingAbs (L l tc)
| want_hiding -- hiding ( C )
-- Here the 'C' can be a data constructor
-- *or* a type/class, or even both
-> let tc_name = lookup_name tc
dc_name = lookup_name (setRdrNameSpace tc srcDataName)
in
case catIELookupM [ tc_name, dc_name ] of
[] -> failLookupWith BadImport
names -> return ([mkIEThingAbs l name | name <- names], [])
| otherwise
-> do nameAvail <- lookup_name tc
return ([mkIEThingAbs l nameAvail], [])
IEThingWith (L l rdr_tc) wc rdr_ns rdr_fs ->
ASSERT2(null rdr_fs, ppr rdr_fs) do
(name, AvailTC _ ns subflds, mb_parent) <- lookup_name rdr_tc
-- Look up the children in the sub-names of the parent
let subnames = case ns of -- The tc is first in ns,
[] -> [] -- if it is there at all
-- See the AvailTC Invariant in Avail.hs
(n1:ns1) | n1 == name -> ns1
| otherwise -> ns
case lookupChildren (map Left subnames ++ map Right subflds) rdr_ns of
Nothing -> failLookupWith BadImport
Just (childnames, childflds) ->
case mb_parent of
-- non-associated ty/cls
Nothing
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (name:map unLoc childnames) (map unLoc childflds))],
[])
-- associated ty
Just parent
-> return ([(IEThingWith (L l name) wc childnames childflds,
AvailTC name (map unLoc childnames) (map unLoc childflds)),
(IEThingWith (L l name) wc childnames childflds,
AvailTC parent [name] [])],
[])
_other -> failLookupWith IllegalImport
-- could be IEModuleContents, IEGroup, IEDoc, IEDocNamed
-- all errors.
where
mkIEThingAbs l (n, av, Nothing ) = (IEThingAbs (L l n),
trimAvail av n)
mkIEThingAbs l (n, _, Just parent) = (IEThingAbs (L l n),
AvailTC parent [n] [])
handle_bad_import m = catchIELookup m $ \err -> case err of
BadImport | want_hiding -> return ([], [BadImportW])
_ -> failLookupWith err
type IELookupM = MaybeErr IELookupError
data IELookupWarning
= BadImportW
| MissingImportList
| DodgyImport RdrName
-- NB. use the RdrName for reporting a "dodgy" import
data IELookupError
= QualImportError RdrName
| BadImport
| IllegalImport
failLookupWith :: IELookupError -> IELookupM a
failLookupWith err = Failed err
catchIELookup :: IELookupM a -> (IELookupError -> IELookupM a) -> IELookupM a
catchIELookup m h = case m of
Succeeded r -> return r
Failed err -> h err
catIELookupM :: [IELookupM a] -> [a]
catIELookupM ms = [ a | Succeeded a <- ms ]
{-
************************************************************************
* *
\subsection{Import/Export Utils}
* *
************************************************************************
-}
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
= case (n1==s1, n2==s2) of -- Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
-- | trims an 'AvailInfo' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail b n) _ = Avail b n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
-- | filters 'AvailInfo's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
-- | filters an 'AvailInfo' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail _ n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
-- | Given an import\/export spec, construct the appropriate 'GlobalRdrElt's.
gresFromIE :: ImpDeclSpec -> (LIE Name, AvailInfo) -> [GlobalRdrElt]
gresFromIE decl_spec (L loc ie, avail)
= gresFromAvail prov_fn avail
where
is_explicit = case ie of
IEThingAll (L _ name) -> \n -> n == name
_ -> \_ -> True
prov_fn name
= Just (ImpSpec { is_decl = decl_spec, is_item = item_spec })
where
item_spec = ImpSome { is_explicit = is_explicit name, is_iloc = loc }
{-
Note [Children for duplicate record fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the module
{-# LANGUAGE DuplicateRecordFields #-}
module M (F(foo, MkFInt, MkFBool)) where
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool }
The `foo` in the export list refers to *both* selectors! For this
reason, lookupChildren builds an environment that maps the FastString
to a list of items, rather than a single item.
-}
mkChildEnv :: [GlobalRdrElt] -> NameEnv [GlobalRdrElt]
mkChildEnv gres = foldr add emptyNameEnv gres
where
add gre env = case gre_par gre of
FldParent p _ -> extendNameEnv_Acc (:) singleton env p gre
ParentIs p -> extendNameEnv_Acc (:) singleton env p gre
NoParent -> env
PatternSynonym -> env
findPatSyns :: [GlobalRdrElt] -> [GlobalRdrElt]
findPatSyns gres = foldr add [] gres
where
add g@(GRE { gre_par = PatternSynonym }) ps =
g:ps
add _ ps = ps
findChildren :: NameEnv [a] -> Name -> [a]
findChildren env n = lookupNameEnv env n `orElse` []
lookupChildren :: [Either Name FieldLabel] -> [Located RdrName]
-> Maybe ([Located Name], [Located FieldLabel])
-- (lookupChildren all_kids rdr_items) maps each rdr_item to its
-- corresponding Name all_kids, if the former exists
-- The matching is done by FastString, not OccName, so that
-- Cls( meth, AssocTy )
-- will correctly find AssocTy among the all_kids of Cls, even though
-- the RdrName for AssocTy may have a (bogus) DataName namespace
-- (Really the rdr_items should be FastStrings in the first place.)
lookupChildren all_kids rdr_items
= do xs <- mapM doOne rdr_items
return (fmap concat (partitionEithers xs))
where
doOne (L l r) = case (lookupFsEnv kid_env . occNameFS . rdrNameOcc) r of
Just [Left n] -> Just (Left (L l n))
Just rs | all isRight rs -> Just (Right (map (L l) (rights rs)))
_ -> Nothing
-- See Note [Children for duplicate record fields]
kid_env = extendFsEnvList_C (++) emptyFsEnv
[(either (occNameFS . nameOccName) flLabel x, [x]) | x <- all_kids]
classifyGREs :: [GlobalRdrElt] -> ([Name], [FieldLabel])
classifyGREs = partitionEithers . map classifyGRE
classifyGRE :: GlobalRdrElt -> Either Name FieldLabel
classifyGRE gre = case gre_par gre of
FldParent _ Nothing -> Right (FieldLabel (occNameFS (nameOccName n)) False n)
FldParent _ (Just lbl) -> Right (FieldLabel lbl True n)
_ -> Left n
where
n = gre_name gre
-- | Combines 'AvailInfo's from the same family
-- 'avails' may have several items with the same availName
-- E.g import Ix( Ix(..), index )
-- will give Ix(Ix,index,range) and Ix(index)
-- We want to combine these; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
{-
************************************************************************
* *
\subsection{Export list processing}
* *
************************************************************************
Processing the export list.
You might think that we should record things that appear in the export
list as ``occurrences'' (using @addOccurrenceName@), but you'd be
wrong. We do check (here) that they are in scope, but there is no
need to slurp in their actual declaration (which is what
@addOccurrenceName@ forces).
Indeed, doing so would big trouble when compiling @PrelBase@, because
it re-exports @GHC@, which includes @takeMVar#@, whose type includes
@ConcBase.StateAndSynchVar#@, and so on...
Note [Exports of data families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose you see (Trac #5306)
module M where
import X( F )
data instance F Int = FInt
What does M export? AvailTC F [FInt]
or AvailTC F [F,FInt]?
The former is strictly right because F isn't defined in this module.
But then you can never do an explicit import of M, thus
import M( F( FInt ) )
because F isn't exported by M. Nor can you import FInt alone from here
import M( FInt )
because we don't have syntax to support that. (It looks like an import of
the type FInt.)
At one point I implemented a compromise:
* When constructing exports with no export list, or with module M(
module M ), we add the parent to the exports as well.
* But not when you see module M( f ), even if f is a
class method with a parent.
* Nor when you see module M( module N ), with N /= M.
But the compromise seemed too much of a hack, so we backed it out.
You just have to use an explicit export list:
module M( F(..) ) where ...
-}
type ExportAccum -- The type of the accumulating parameter of
-- the main worker function in rnExports
= ([LIE Name], -- Export items with Names
ExportOccMap, -- Tracks exported occurrence names
[AvailInfo]) -- The accumulated exported stuff
-- Not nub'd!
emptyExportAccum :: ExportAccum
emptyExportAccum = ([], emptyOccEnv, [])
type ExportOccMap = OccEnv (Name, IE RdrName)
-- Tracks what a particular exported OccName
-- in an export list refers to, and which item
-- it came from. It's illegal to export two distinct things
-- that have the same occurrence name
rnExports :: Bool -- False => no 'module M(..) where' header at all
-> Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list
-> TcGblEnv
-> RnM (Maybe [LIE Name], TcGblEnv)
-- Complains if two distinct exports have same OccName
-- Warns about identical exports.
-- Complains about exports items not in scope
rnExports explicit_mod exports
tcg_env@(TcGblEnv { tcg_mod = this_mod,
tcg_rdr_env = rdr_env,
tcg_imports = imports })
= unsetWOptM Opt_WarnWarningsDeprecations $
-- Do not report deprecations arising from the export
-- list, to avoid bleating about re-exporting a deprecated
-- thing (especially via 'module Foo' export item)
do {
-- If the module header is omitted altogether, then behave
-- as if the user had written "module Main(main) where..."
-- EXCEPT in interactive mode, when we behave as if he had
-- written "module Main where ..."
-- Reason: don't want to complain about 'main' not in scope
-- in interactive mode
; dflags <- getDynFlags
; let real_exports
| explicit_mod = exports
| ghcLink dflags == LinkInMemory = Nothing
| otherwise
= Just (noLoc [noLoc (IEVar (noLoc main_RDR_Unqual))])
-- ToDo: the 'noLoc' here is unhelpful if 'main'
-- turns out to be out of scope
; (rn_exports, avails) <- exports_from_avail real_exports rdr_env imports this_mod
; traceRn (ppr avails)
; let final_avails = nubAvails avails -- Combine families
final_ns = availsToNameSetWithSelectors final_avails
; traceRn (text "rnExports: Exports:" <+> ppr final_avails)
; let new_tcg_env =
(tcg_env { tcg_exports = final_avails,
tcg_rn_exports = case tcg_rn_exports tcg_env of
Nothing -> Nothing
Just _ -> rn_exports,
tcg_dus = tcg_dus tcg_env `plusDU`
usesOnly final_ns })
; return (rn_exports, new_tcg_env) }
exports_from_avail :: Maybe (Located [LIE RdrName])
-- Nothing => no explicit export list
-> GlobalRdrEnv
-> ImportAvails
-> Module
-> RnM (Maybe [LIE Name], [AvailInfo])
exports_from_avail Nothing rdr_env _imports _this_mod
-- The same as (module M) where M is the current module name,
-- so that's how we handle it, except we also export the data family
-- when a data instance is exported.
= let avails = [ fix_faminst $ availFromGRE gre
| gre <- globalRdrEnvElts rdr_env
, isLocalGRE gre ]
in return (Nothing, avails)
where
-- #11164: when we define a data instance
-- but not data family, re-export the family
-- Even though we don't check whether this is actually a data family
-- only data families can locally define subordinate things (`ns` here)
-- without locally defining (and instead importing) the parent (`n`)
fix_faminst (AvailTC n ns flds)
| not (n `elem` ns)
= AvailTC n (n:ns) flds
fix_faminst avail = avail
exports_from_avail (Just (L _ rdr_items)) rdr_env imports this_mod
= do (ie_names, _, exports) <- foldlM do_litem emptyExportAccum rdr_items
return (Just ie_names, exports)
where
do_litem :: ExportAccum -> LIE RdrName -> RnM ExportAccum
do_litem acc lie = setSrcSpan (getLoc lie) (exports_from_item acc lie)
-- Maps a parent to its in-scope children
kids_env :: NameEnv [GlobalRdrElt]
kids_env = mkChildEnv (globalRdrEnvElts rdr_env)
pat_syns :: [GlobalRdrElt]
pat_syns = findPatSyns (globalRdrEnvElts rdr_env)
imported_modules = [ imv_name imv
| xs <- moduleEnvElts $ imp_mods imports, imv <- xs ]
exports_from_item :: ExportAccum -> LIE RdrName -> RnM ExportAccum
exports_from_item acc@(ie_names, occs, exports)
(L loc (IEModuleContents (L lm mod)))
| let earlier_mods = [ mod
| (L _ (IEModuleContents (L _ mod))) <- ie_names ]
, mod `elem` earlier_mods -- Duplicate export of M
= do { warnIf (Reason Opt_WarnDuplicateExports) True
(dupModuleExport mod) ;
return acc }
| otherwise
= do { let { exportValid = (mod `elem` imported_modules)
|| (moduleName this_mod == mod)
; gre_prs = pickGREsModExp mod (globalRdrEnvElts rdr_env)
; new_exports = map (availFromGRE . fst) gre_prs
; names = map (gre_name . fst) gre_prs
; all_gres = foldr (\(gre1,gre2) gres -> gre1 : gre2 : gres) [] gre_prs
}
; checkErr exportValid (moduleNotImported mod)
; warnIf (Reason Opt_WarnDodgyExports)
(exportValid && null gre_prs)
(nullModuleExport mod)
; traceRn (text "efa" <+> (ppr mod $$ ppr all_gres))
; addUsedGREs all_gres
; occs' <- check_occs (IEModuleContents (noLoc mod)) occs names
-- This check_occs not only finds conflicts
-- between this item and others, but also
-- internally within this item. That is, if
-- 'M.x' is in scope in several ways, we'll have
-- several members of mod_avails with the same
-- OccName.
; traceRn (vcat [ text "export mod" <+> ppr mod
, ppr new_exports ])
; return (L loc (IEModuleContents (L lm mod)) : ie_names,
occs', new_exports ++ exports) }
exports_from_item acc@(lie_names, occs, exports) (L loc ie)
| isDoc ie
= do new_ie <- lookup_doc_ie ie
return (L loc new_ie : lie_names, occs, exports)
| otherwise
= do (new_ie, avail) <- lookup_ie ie
if isUnboundName (ieName new_ie)
then return acc -- Avoid error cascade
else do
occs' <- check_occs ie occs (availNames avail)
return (L loc new_ie : lie_names, occs', avail : exports)
-------------
lookup_ie :: IE RdrName -> RnM (IE Name, AvailInfo)
lookup_ie (IEVar (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEVar (L l name), avail)
lookup_ie (IEThingAbs (L l rdr))
= do (name, avail) <- lookupGreAvailRn rdr
return (IEThingAbs (L l name), avail)
lookup_ie ie@(IEThingAll n)
= do
(n, avail, flds) <- lookup_ie_all ie n
let name = unLoc n
return (IEThingAll n, AvailTC name (name:avail) flds)
lookup_ie ie@(IEThingWith l wc sub_rdrs _)
= do
(lname, subs, avails, flds) <- lookup_ie_with ie l sub_rdrs
(_, all_avail, all_flds) <-
case wc of
NoIEWildcard -> return (lname, [], [])
IEWildcard _ -> lookup_ie_all ie l
let name = unLoc lname
return (IEThingWith lname wc subs [],
AvailTC name (name : avails ++ all_avail)
(flds ++ all_flds))
lookup_ie _ = panic "lookup_ie" -- Other cases covered earlier
lookup_ie_with :: IE RdrName -> Located RdrName -> [Located RdrName]
-> RnM (Located Name, [Located Name], [Name], [FieldLabel])
lookup_ie_with ie (L l rdr) sub_rdrs
= do name <- lookupGlobalOccRn rdr
let gres = findChildren kids_env name
mchildren =
lookupChildren (map classifyGRE (gres ++ pat_syns)) sub_rdrs
addUsedKids rdr gres
if isUnboundName name
then return (L l name, [], [name], [])
else
case mchildren of
Nothing -> do
addErr (exportItemErr ie)
return (L l name, [], [name], [])
Just (non_flds, flds) -> do
addUsedKids rdr gres
return (L l name, non_flds
, map unLoc non_flds
, map unLoc flds)
lookup_ie_all :: IE RdrName -> Located RdrName
-> RnM (Located Name, [Name], [FieldLabel])
lookup_ie_all ie (L l rdr) =
do name <- lookupGlobalOccRn rdr
let gres = findChildren kids_env name
(non_flds, flds) = classifyGREs gres
addUsedKids rdr gres
warnDodgyExports <- woptM Opt_WarnDodgyExports
when (null gres) $
if isTyConName name
then when warnDodgyExports $
addWarn (Reason Opt_WarnDodgyExports)
(dodgyExportWarn name)
else -- This occurs when you export T(..), but
-- only import T abstractly, or T is a synonym.
addErr (exportItemErr ie)
return (L l name, non_flds, flds)
-------------
lookup_doc_ie :: IE RdrName -> RnM (IE Name)
lookup_doc_ie (IEGroup lev doc) = do rn_doc <- rnHsDoc doc
return (IEGroup lev rn_doc)
lookup_doc_ie (IEDoc doc) = do rn_doc <- rnHsDoc doc
return (IEDoc rn_doc)
lookup_doc_ie (IEDocNamed str) = return (IEDocNamed str)
lookup_doc_ie _ = panic "lookup_doc_ie" -- Other cases covered earlier
-- In an export item M.T(A,B,C), we want to treat the uses of
-- A,B,C as if they were M.A, M.B, M.C
-- Happily pickGREs does just the right thing
addUsedKids :: RdrName -> [GlobalRdrElt] -> RnM ()
addUsedKids parent_rdr kid_gres = addUsedGREs (pickGREs parent_rdr kid_gres)
isDoc :: IE RdrName -> Bool
isDoc (IEDoc _) = True
isDoc (IEDocNamed _) = True
isDoc (IEGroup _ _) = True
isDoc _ = False
-------------------------------
check_occs :: IE RdrName -> ExportOccMap -> [Name] -> RnM ExportOccMap
check_occs ie occs names -- 'names' are the entities specifed by 'ie'
= foldlM check occs names
where
check occs name
= case lookupOccEnv occs name_occ of
Nothing -> return (extendOccEnv occs name_occ (name, ie))
Just (name', ie')
| name == name' -- Duplicate export
-- But we don't want to warn if the same thing is exported
-- by two different module exports. See ticket #4478.
-> do { warnIf (Reason Opt_WarnDuplicateExports)
(not (dupExport_ok name ie ie'))
(dupExportWarn name_occ ie ie')
; return occs }
| otherwise -- Same occ name but different names: an error
-> do { global_env <- getGlobalRdrEnv ;
addErr (exportClashErr global_env name' name ie' ie) ;
return occs }
where
name_occ = nameOccName name
dupExport_ok :: Name -> IE RdrName -> IE RdrName -> Bool
-- The Name is exported by both IEs. Is that ok?
-- "No" iff the name is mentioned explicitly in both IEs
-- or one of the IEs mentions the name *alone*
-- "Yes" otherwise
--
-- Examples of "no": module M( f, f )
-- module M( fmap, Functor(..) )
-- module M( module Data.List, head )
--
-- Example of "yes"
-- module M( module A, module B ) where
-- import A( f )
-- import B( f )
--
-- Example of "yes" (Trac #2436)
-- module M( C(..), T(..) ) where
-- class C a where { data T a }
-- instace C Int where { data T Int = TInt }
--
-- Example of "yes" (Trac #2436)
-- module Foo ( T ) where
-- data family T a
-- module Bar ( T(..), module Foo ) where
-- import Foo
-- data instance T Int = TInt
dupExport_ok n ie1 ie2
= not ( single ie1 || single ie2
|| (explicit_in ie1 && explicit_in ie2) )
where
explicit_in (IEModuleContents _) = False -- module M
explicit_in (IEThingAll r) = nameOccName n == rdrNameOcc (unLoc r) -- T(..)
explicit_in _ = True
single (IEVar {}) = True
single (IEThingAbs {}) = True
single _ = False
{-
*********************************************************
* *
\subsection{Unused names}
* *
*********************************************************
-}
reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list
-> TcGblEnv -> RnM ()
reportUnusedNames _export_decls gbl_env
= do { traceRn ((text "RUN") <+> (ppr (tcg_dus gbl_env)))
; warnUnusedImportDecls gbl_env
; warnUnusedTopBinds unused_locals
; warnMissingSignatures gbl_env }
where
used_names :: NameSet
used_names = findUses (tcg_dus gbl_env) emptyNameSet
-- NB: currently, if f x = g, we only treat 'g' as used if 'f' is used
-- Hence findUses
-- Collect the defined names from the in-scope environment
defined_names :: [GlobalRdrElt]
defined_names = globalRdrEnvElts (tcg_rdr_env gbl_env)
-- Note that defined_and_used, defined_but_not_used
-- are both [GRE]; that's why we need defined_and_used
-- rather than just used_names
_defined_and_used, defined_but_not_used :: [GlobalRdrElt]
(_defined_and_used, defined_but_not_used)
= partition (gre_is_used used_names) defined_names
kids_env = mkChildEnv defined_names
-- This is done in mkExports too; duplicated work
gre_is_used :: NameSet -> GlobalRdrElt -> Bool
gre_is_used used_names (GRE {gre_name = name})
= name `elemNameSet` used_names
|| any (\ gre -> gre_name gre `elemNameSet` used_names) (findChildren kids_env name)
-- A use of C implies a use of T,
-- if C was brought into scope by T(..) or T(C)
-- Filter out the ones that are
-- (a) defined in this module, and
-- (b) not defined by a 'deriving' clause
-- The latter have an Internal Name, so we can filter them out easily
unused_locals :: [GlobalRdrElt]
unused_locals = filter is_unused_local defined_but_not_used
is_unused_local :: GlobalRdrElt -> Bool
is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre)
{-
*********************************************************
* *
\subsection{Unused imports}
* *
*********************************************************
This code finds which import declarations are unused. The
specification and implementation notes are here:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports
-}
type ImportDeclUsage
= ( LImportDecl Name -- The import declaration
, [AvailInfo] -- What *is* used (normalised)
, [Name] ) -- What is imported but *not* used
warnUnusedImportDecls :: TcGblEnv -> RnM ()
warnUnusedImportDecls gbl_env
= do { uses <- readMutVar (tcg_used_gres gbl_env)
; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env)
-- This whole function deals only with *user* imports
-- both for warning about unnecessary ones, and for
-- deciding the minimal ones
rdr_env = tcg_rdr_env gbl_env
fld_env = mkFieldEnv rdr_env
; let usage :: [ImportDeclUsage]
usage = findImportUsage user_imports uses
; traceRn (vcat [ text "Uses:" <+> ppr uses
, text "Import usage" <+> ppr usage])
; whenWOptM Opt_WarnUnusedImports $
mapM_ (warnUnusedImport Opt_WarnUnusedImports fld_env) usage
; whenGOptM Opt_D_dump_minimal_imports $
printMinimalImports usage }
-- | Warn the user about top level binders that lack type signatures.
warnMissingSignatures :: TcGblEnv -> RnM ()
warnMissingSignatures gbl_env
= do { let exports = availsToNameSet (tcg_exports gbl_env)
sig_ns = tcg_sigs gbl_env
-- We use sig_ns to exclude top-level bindings that are generated by GHC
binds = collectHsBindsBinders $ tcg_binds gbl_env
pat_syns = tcg_patsyns gbl_env
-- Warn about missing signatures
-- Do this only when we we have a type to offer
; warn_missing_sigs <- woptM Opt_WarnMissingSignatures
; warn_only_exported <- woptM Opt_WarnMissingExportedSignatures
; warn_pat_syns <- woptM Opt_WarnMissingPatternSynonymSignatures
; let add_sig_warns
| warn_only_exported = add_warns Opt_WarnMissingExportedSignatures
| warn_missing_sigs = add_warns Opt_WarnMissingSignatures
| warn_pat_syns = add_warns Opt_WarnMissingPatternSynonymSignatures
| otherwise = return ()
add_warns flag
= when warn_pat_syns
(mapM_ add_pat_syn_warn pat_syns) >>
when (warn_missing_sigs || warn_only_exported)
(mapM_ add_bind_warn binds)
where
add_pat_syn_warn p
= add_warn (patSynName p) (pprPatSynType p)
add_bind_warn id
= do { env <- tcInitTidyEnv -- Why not use emptyTidyEnv?
; let name = idName id
(_, ty) = tidyOpenType env (idType id)
ty_msg = ppr ty
; add_warn name ty_msg }
add_warn name ty_msg
= when (name `elemNameSet` sig_ns && export_check name)
(addWarnAt (Reason flag) (getSrcSpan name)
(get_msg name ty_msg))
export_check name
= not warn_only_exported || name `elemNameSet` exports
get_msg name ty_msg
= sep [ text "Top-level binding with no type signature:",
nest 2 $ pprPrefixName name <+> dcolon <+> ty_msg ]
; add_sig_warns }
{-
Note [The ImportMap]
~~~~~~~~~~~~~~~~~~~~
The ImportMap is a short-lived intermediate data struture records, for
each import declaration, what stuff brought into scope by that
declaration is actually used in the module.
The SrcLoc is the location of the END of a particular 'import'
declaration. Why *END*? Because we don't want to get confused
by the implicit Prelude import. Consider (Trac #7476) the module
import Foo( foo )
main = print foo
There is an implicit 'import Prelude(print)', and it gets a SrcSpan
of line 1:1 (just the point, not a span). If we use the *START* of
the SrcSpan to identify the import decl, we'll confuse the implicit
import Prelude with the explicit 'import Foo'. So we use the END.
It's just a cheap hack; we could equally well use the Span too.
The AvailInfos are the things imported from that decl (just a list,
not normalised).
-}
type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap]
findImportUsage :: [LImportDecl Name]
-> [GlobalRdrElt]
-> [ImportDeclUsage]
findImportUsage imports used_gres
= map unused_decl imports
where
import_usage :: ImportMap
import_usage
= foldr extendImportMap Map.empty used_gres
unused_decl decl@(L loc (ImportDecl { ideclHiding = imps }))
= (decl, nubAvails used_avails, nameSetElemsStable unused_imps)
where
used_avails = Map.lookup (srcSpanEnd loc) import_usage `orElse` []
-- srcSpanEnd: see Note [The ImportMap]
used_names = availsToNameSetWithSelectors used_avails
used_parents = mkNameSet [n | AvailTC n _ _ <- used_avails]
unused_imps -- Not trivial; see eg Trac #7454
= case imps of
Just (False, L _ imp_ies) ->
foldr (add_unused . unLoc) emptyNameSet imp_ies
_other -> emptyNameSet -- No explicit import list => no unused-name list
add_unused :: IE Name -> NameSet -> NameSet
add_unused (IEVar (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAbs (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAll (L _ n)) acc = add_unused_all n acc
add_unused (IEThingWith (L _ p) wc ns fs) acc =
add_wc_all (add_unused_with p xs acc)
where xs = map unLoc ns ++ map (flSelector . unLoc) fs
add_wc_all = case wc of
NoIEWildcard -> id
IEWildcard _ -> add_unused_all p
add_unused _ acc = acc
add_unused_name n acc
| n `elemNameSet` used_names = acc
| otherwise = acc `extendNameSet` n
add_unused_all n acc
| n `elemNameSet` used_names = acc
| n `elemNameSet` used_parents = acc
| otherwise = acc `extendNameSet` n
add_unused_with p ns acc
| all (`elemNameSet` acc1) ns = add_unused_name p acc1
| otherwise = acc1
where
acc1 = foldr add_unused_name acc ns
-- If you use 'signum' from Num, then the user may well have
-- imported Num(signum). We don't want to complain that
-- Num is not itself mentioned. Hence the two cases in add_unused_with.
extendImportMap :: GlobalRdrElt -> ImportMap -> ImportMap
-- For each of a list of used GREs, find all the import decls that brought
-- it into scope; choose one of them (bestImport), and record
-- the RdrName in that import decl's entry in the ImportMap
extendImportMap gre imp_map
= add_imp gre (bestImport (gre_imp gre)) imp_map
where
add_imp :: GlobalRdrElt -> ImportSpec -> ImportMap -> ImportMap
add_imp gre (ImpSpec { is_decl = imp_decl_spec }) imp_map
= Map.insertWith add decl_loc [avail] imp_map
where
add _ avails = avail : avails -- add is really just a specialised (++)
decl_loc = srcSpanEnd (is_dloc imp_decl_spec)
-- For srcSpanEnd see Note [The ImportMap]
avail = availFromGRE gre
warnUnusedImport :: WarningFlag -> NameEnv (FieldLabelString, Name)
-> ImportDeclUsage -> RnM ()
warnUnusedImport flag fld_env (L loc decl, used, unused)
| Just (False,L _ []) <- ideclHiding decl
= return () -- Do not warn for 'import M()'
| Just (True, L _ hides) <- ideclHiding decl
, not (null hides)
, pRELUDE_NAME == unLoc (ideclName decl)
= return () -- Note [Do not warn about Prelude hiding]
| null used = addWarnAt (Reason flag) loc msg1 -- Nothing used; drop entire decl
| null unused = return () -- Everything imported is used; nop
| otherwise = addWarnAt (Reason flag) loc msg2 -- Some imports are unused
where
msg1 = vcat [pp_herald <+> quotes pp_mod <+> pp_not_used,
nest 2 (text "except perhaps to import instances from"
<+> quotes pp_mod),
text "To import instances alone, use:"
<+> text "import" <+> pp_mod <> parens Outputable.empty ]
msg2 = sep [pp_herald <+> quotes sort_unused,
text "from module" <+> quotes pp_mod <+> pp_not_used]
pp_herald = text "The" <+> pp_qual <+> text "import of"
pp_qual
| ideclQualified decl = text "qualified"
| otherwise = Outputable.empty
pp_mod = ppr (unLoc (ideclName decl))
pp_not_used = text "is redundant"
ppr_possible_field n = case lookupNameEnv fld_env n of
Just (fld, p) -> ppr p <> parens (ppr fld)
Nothing -> ppr n
-- Print unused names in a deterministic (lexicographic) order
sort_unused = pprWithCommas ppr_possible_field $
sortBy (comparing nameOccName) unused
{-
Note [Do not warn about Prelude hiding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not warn about
import Prelude hiding( x, y )
because even if nothing else from Prelude is used, it may be essential to hide
x,y to avoid name-shadowing warnings. Example (Trac #9061)
import Prelude hiding( log )
f x = log where log = ()
Note [Printing minimal imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To print the minimal imports we walk over the user-supplied import
decls, and simply trim their import lists. NB that
* We do *not* change the 'qualified' or 'as' parts!
* We do not disard a decl altogether; we might need instances
from it. Instead we just trim to an empty import list
-}
printMinimalImports :: [ImportDeclUsage] -> RnM ()
-- See Note [Printing minimal imports]
printMinimalImports imports_w_usage
= do { imports' <- mapM mk_minimal imports_w_usage
; this_mod <- getModule
; dflags <- getDynFlags
; liftIO $
do { h <- openFile (mkFilename dflags this_mod) WriteMode
; printForUser dflags h neverQualify (vcat (map ppr imports')) }
-- The neverQualify is important. We are printing Names
-- but they are in the context of an 'import' decl, and
-- we never qualify things inside there
-- E.g. import Blag( f, b )
-- not import Blag( Blag.f, Blag.g )!
}
where
mkFilename dflags this_mod
| Just d <- dumpDir dflags = d </> basefn
| otherwise = basefn
where
basefn = moduleNameString (moduleName this_mod) ++ ".imports"
mk_minimal (L l decl, used, unused)
| null unused
, Just (False, _) <- ideclHiding decl
= return (L l decl)
| otherwise
= do { let ImportDecl { ideclName = L _ mod_name
, ideclSource = is_boot
, ideclPkgQual = mb_pkg } = decl
; iface <- loadSrcInterface doc mod_name is_boot (fmap sl_fs mb_pkg)
; let lies = map (L l) (concatMap (to_ie iface) used)
; return (L l (decl { ideclHiding = Just (False, L l lies) })) }
where
doc = text "Compute minimal imports for" <+> ppr decl
to_ie :: ModIface -> AvailInfo -> [IE Name]
-- The main trick here is that if we're importing all the constructors
-- we want to say "T(..)", but if we're importing only a subset we want
-- to say "T(A,B,C)". So we have to find out what the module exports.
to_ie _ (Avail _ n)
= [IEVar (noLoc n)]
to_ie _ (AvailTC n [m] [])
| n==m = [IEThingAbs (noLoc n)]
to_ie iface (AvailTC n ns fs)
= case [(xs,gs) | AvailTC x xs gs <- mi_exports iface
, x == n
, x `elem` xs -- Note [Partial export]
] of
[xs] | all_used xs -> [IEThingAll (noLoc n)]
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns))
(map noLoc fs)]
-- Note [Overloaded field import]
_other | all_non_overloaded fs
-> map (IEVar . noLoc) $ ns ++ map flSelector fs
| otherwise -> [IEThingWith (noLoc n) NoIEWildcard
(map noLoc (filter (/= n) ns)) (map noLoc fs)]
where
fld_lbls = map flLabel fs
all_used (avail_occs, avail_flds)
= all (`elem` ns) avail_occs
&& all (`elem` fld_lbls) (map flLabel avail_flds)
all_non_overloaded = all (not . flIsOverloaded)
{-
Note [Partial export]
~~~~~~~~~~~~~~~~~~~~~
Suppose we have
module A( op ) where
class C a where
op :: a -> a
module B where
import A
f = ..op...
Then the minimal import for module B is
import A( op )
not
import A( C( op ) )
which we would usually generate if C was exported from B. Hence
the (x `elem` xs) test when deciding what to generate.
Note [Overloaded field import]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
On the other hand, if we have
{-# LANGUAGE DuplicateRecordFields #-}
module A where
data T = MkT { foo :: Int }
module B where
import A
f = ...foo...
then the minimal import for module B must be
import A ( T(foo) )
because when DuplicateRecordFields is enabled, field selectors are
not in scope without their enclosing datatype.
************************************************************************
* *
\subsection{Errors}
* *
************************************************************************
-}
qualImportItemErr :: RdrName -> SDoc
qualImportItemErr rdr
= hang (text "Illegal qualified name in import item:")
2 (ppr rdr)
badImportItemErrStd :: ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrStd iface decl_spec ie
= sep [text "Module", quotes (ppr (is_mod decl_spec)), source_import,
text "does not export", quotes (ppr ie)]
where
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
badImportItemErrDataCon :: OccName -> ModIface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrDataCon dataType_occ iface decl_spec ie
= vcat [ text "In module"
<+> quotes (ppr (is_mod decl_spec))
<+> source_import <> colon
, nest 2 $ quotes datacon
<+> text "is a data constructor of"
<+> quotes dataType
, text "To import it use"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> parens_sp datacon)
, text "or"
, nest 2 $ quotes (text "import")
<+> ppr (is_mod decl_spec)
<> parens_sp (dataType <> text "(..)")
]
where
datacon_occ = rdrNameOcc $ ieName ie
datacon = parenSymOcc datacon_occ (ppr datacon_occ)
dataType = parenSymOcc dataType_occ (ppr dataType_occ)
source_import | mi_boot iface = text "(hi-boot interface)"
| otherwise = Outputable.empty
parens_sp d = parens (space <> d <> space) -- T( f,g )
badImportItemErr :: ModIface -> ImpDeclSpec -> IE RdrName -> [AvailInfo] -> SDoc
badImportItemErr iface decl_spec ie avails
= case find checkIfDataCon avails of
Just con -> badImportItemErrDataCon (availOccName con) iface decl_spec ie
Nothing -> badImportItemErrStd iface decl_spec ie
where
checkIfDataCon (AvailTC _ ns _) =
case find (\n -> importedFS == nameOccNameFS n) ns of
Just n -> isDataConName n
Nothing -> False
checkIfDataCon _ = False
availOccName = nameOccName . availName
nameOccNameFS = occNameFS . nameOccName
importedFS = occNameFS . rdrNameOcc $ ieName ie
illegalImportItemErr :: SDoc
illegalImportItemErr = text "Illegal import item"
dodgyImportWarn :: RdrName -> SDoc
dodgyImportWarn item = dodgyMsg (text "import") item
dodgyExportWarn :: Name -> SDoc
dodgyExportWarn item = dodgyMsg (text "export") item
dodgyMsg :: (OutputableBndr n, HasOccName n) => SDoc -> n -> SDoc
dodgyMsg kind tc
= sep [ text "The" <+> kind <+> ptext (sLit "item")
<+> quotes (ppr (IEThingAll (noLoc tc)))
<+> text "suggests that",
quotes (ppr tc) <+> text "has (in-scope) constructors or class methods,",
text "but it has none" ]
exportItemErr :: IE RdrName -> SDoc
exportItemErr export_item
= sep [ text "The export item" <+> quotes (ppr export_item),
text "attempts to export constructors or class methods that are not visible here" ]
exportClashErr :: GlobalRdrEnv -> Name -> Name -> IE RdrName -> IE RdrName
-> MsgDoc
exportClashErr global_env name1 name2 ie1 ie2
= vcat [ text "Conflicting exports for" <+> quotes (ppr occ) <> colon
, ppr_export ie1' name1'
, ppr_export ie2' name2' ]
where
occ = nameOccName name1
ppr_export ie name = nest 3 (hang (quotes (ppr ie) <+> text "exports" <+>
quotes (ppr name))
2 (pprNameProvenance (get_gre name)))
-- get_gre finds a GRE for the Name, so that we can show its provenance
get_gre name
= case lookupGRE_Name global_env name of
Just gre -> gre
Nothing -> pprPanic "exportClashErr" (ppr name)
get_loc name = greSrcSpan (get_gre name)
(name1', ie1', name2', ie2') = if get_loc name1 < get_loc name2
then (name1, ie1, name2, ie2)
else (name2, ie2, name1, ie1)
addDupDeclErr :: [GlobalRdrElt] -> TcRn ()
addDupDeclErr [] = panic "addDupDeclErr: empty list"
addDupDeclErr gres@(gre : _)
= addErrAt (getSrcSpan (last sorted_names)) $
-- Report the error at the later location
vcat [text "Multiple declarations of" <+>
quotes (ppr (nameOccName name)),
-- NB. print the OccName, not the Name, because the
-- latter might not be in scope in the RdrEnv and so will
-- be printed qualified.
text "Declared at:" <+>
vcat (map (ppr . nameSrcLoc) sorted_names)]
where
name = gre_name gre
sorted_names = sortWith nameSrcLoc (map gre_name gres)
dupExportWarn :: OccName -> IE RdrName -> IE RdrName -> SDoc
dupExportWarn occ_name ie1 ie2
= hsep [quotes (ppr occ_name),
text "is exported by", quotes (ppr ie1),
text "and", quotes (ppr ie2)]
dupModuleExport :: ModuleName -> SDoc
dupModuleExport mod
= hsep [text "Duplicate",
quotes (text "Module" <+> ppr mod),
text "in export list"]
moduleNotImported :: ModuleName -> SDoc
moduleNotImported mod
= text "The export item `module" <+> ppr mod <>
text "' is not imported"
nullModuleExport :: ModuleName -> SDoc
nullModuleExport mod
= text "The export item `module" <+> ppr mod <> ptext (sLit "' exports nothing")
missingImportListWarn :: ModuleName -> SDoc
missingImportListWarn mod
= text "The module" <+> quotes (ppr mod) <+> ptext (sLit "does not have an explicit import list")
missingImportListItem :: IE RdrName -> SDoc
missingImportListItem ie
= text "The import item" <+> quotes (ppr ie) <+> ptext (sLit "does not have an explicit import list")
moduleWarn :: ModuleName -> WarningTxt -> SDoc
moduleWarn mod (WarningTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod) <> ptext (sLit ":"),
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
moduleWarn mod (DeprecatedTxt _ txt)
= sep [ text "Module" <+> quotes (ppr mod)
<+> text "is deprecated:",
nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ]
packageImportErr :: SDoc
packageImportErr
= text "Package-qualified imports are not enabled; use PackageImports"
-- This data decl will parse OK
-- data T = a Int
-- treating "a" as the constructor.
-- It is really hard to make the parser spot this malformation.
-- So the renamer has to check that the constructor is legal
--
-- We can get an operator as the constructor, even in the prefix form:
-- data T = :% Int Int
-- from interface files, which always print in prefix form
checkConName :: RdrName -> TcRn ()
checkConName name = checkErr (isRdrDataCon name) (badDataCon name)
badDataCon :: RdrName -> SDoc
badDataCon name
= hsep [text "Illegal data constructor name", quotes (ppr name)]
| vTurbine/ghc | compiler/rename/RnNames.hs | bsd-3-clause | 88,959 | 3 | 29 | 28,057 | 16,638 | 8,597 | 8,041 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Dense.Generic
-- Copyright : (c) Christopher Chalmers
-- License : BSD3
--
-- Maintainer : Christopher Chalmers
-- Stability : provisional
-- Portability : non-portable
--
-- This module provides generic functions over multidimensional arrays.
-----------------------------------------------------------------------------
module Data.Dense.Generic
(
-- * Array types
Array
, Shape (..)
, BArray
, UArray
, SArray
, PArray
-- * Layout of an array
, HasLayout (..)
, Layout
-- ** Extracting size
, extent
, size
-- ** Folds over indexes
, indexes
, indexesFrom
, indexesBetween
-- * Underlying vector
, vector
-- ** Traversals
, values
, values'
, valuesBetween
-- * Construction
-- ** Flat arrays
, flat
, fromList
-- ** From lists
, fromListInto
, fromListInto_
-- ** From vectors
, fromVectorInto
, fromVectorInto_
-- ** Initialisation
, replicate
, generate
, linearGenerate
-- ** Monadic initialisation
, create
, createT
, replicateM
, generateM
, linearGenerateM
-- * Functions on arrays
-- ** Empty arrays
, empty
, null
-- ** Indexing
, (!)
, (!?)
, unsafeIndex
, linearIndex
, unsafeLinearIndex
-- *** Monadic indexing
, indexM
, unsafeIndexM
, linearIndexM
, unsafeLinearIndexM
-- ** Modifying arrays
-- ** Bulk updates
, (//)
-- ** Accumulations
, accum
-- ** Mapping
, map
, imap
-- * Zipping
-- ** Tuples
, Data.Dense.Generic.zip
, Data.Dense.Generic.zip3
-- ** Zip with function
, zipWith
, zipWith3
, izipWith
, izipWith3
-- ** Slices
-- *** Matrix
, ixRow
, rows
, ixColumn
, columns
-- *** 3D
, ixPlane
, planes
, flattenPlane
-- *** Ordinals
, unsafeOrdinals
-- * Mutable
, MArray
, M.BMArray
, M.UMArray
, M.SMArray
, M.PMArray
, thaw
, freeze
, unsafeThaw
, unsafeFreeze
-- * Delayed
, Delayed
-- ** Generating delayed
, delayed
, seqDelayed
, delay
, manifest
, seqManifest
, genDelayed
, indexDelayed
, affirm
, seqAffirm
-- * Focused
, Focused
-- ** Generating focused
, focusOn
, unfocus
, unfocused
, extendFocus
-- ** Focus location
, locale
, shiftFocus
-- ** Boundary
, Boundary (..)
, peekB
, peeksB
, peekRelativeB
-- * Fusion
-- ** Streams
, streamGenerate
, streamGenerateM
, streamIndexes
-- ** Bundles
, bundleGenerate
, bundleGenerateM
, bundleIndexes
) where
#if __GLASGOW_HASKELL__ <= 708
import Control.Applicative (Applicative, pure, (<*>))
import Data.Foldable (Foldable)
#endif
import Control.Comonad
import Control.Comonad.Store
import Control.Lens hiding (imap)
import Control.Monad (liftM)
import Control.Monad.Primitive
import Control.Monad.ST
import qualified Data.Foldable as F
import Data.Functor.Classes
import qualified Data.List as L
import Data.Maybe (fromMaybe)
import qualified Data.Traversable as T
import Data.Typeable
import qualified Data.Vector as B
import Data.Vector.Fusion.Bundle (MBundle)
import qualified Data.Vector.Fusion.Bundle as Bundle
import qualified Data.Vector.Fusion.Bundle.Monadic as MBundle
import Data.Vector.Fusion.Bundle.Size
import Data.Vector.Fusion.Stream.Monadic (Step (..), Stream (..))
import qualified Data.Vector.Fusion.Stream.Monadic as Stream
import Data.Vector.Generic (Vector)
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as GM
import qualified Data.Vector.Primitive as P
import qualified Data.Vector.Storable as S
import qualified Data.Vector.Unboxed as U
import Linear hiding (vector)
import Data.Dense.Base
import Data.Dense.Index
import Data.Dense.Mutable (MArray (..))
import qualified Data.Dense.Mutable as M
import Prelude hiding (map, null, replicate,
zipWith, zipWith3)
-- Aliases -------------------------------------------------------------
-- | 'Boxed' array.
type BArray = Array B.Vector
-- | 'Data.Vector.Unboxed.Unbox'ed array.
type UArray = Array U.Vector
-- | 'Foreign.Storable.Storeable' array.
type SArray = Array S.Vector
-- | 'Data.Primitive.Types.Prim' array.
type PArray = Array P.Vector
-- Lenses --------------------------------------------------------------
-- | Same as 'values' but restrictive in the vector type.
values' :: (Shape f, Vector v a, Vector v b)
=> IndexedTraversal (f Int) (Array v f a) (Array v f b) a b
values' = values
{-# INLINE values' #-}
-- | Traverse over the 'values' between two indexes.
valuesBetween :: (Shape f, Vector v a) => f Int -> f Int -> IndexedTraversal' (f Int) (Array v f a) a
valuesBetween a b = unsafeOrdinals (toListOf (shapeIndexesFrom a) b)
{-# INLINE valuesBetween #-}
-- | 1D arrays are just vectors. You are free to change the length of
-- the vector when going 'over' this 'Iso' (unlike 'linear').
--
-- Note that 'V1' arrays are an instance of 'Vector' so you can use
-- any of the functions in "Data.Vector.Generic" on them without
-- needing to convert.
flat :: Vector w b => Iso (Array v V1 a) (Array w V1 b) (v a) (w b)
flat = iso (\(Array _ v) -> v) (\v -> Array (V1 $ G.length v) v)
{-# INLINE flat #-}
-- Constructing vectors ------------------------------------------------
-- | Contruct a flat array from a list. (This is just 'G.fromList' from
-- 'Data.Vector.Generic'.)
fromList :: Vector v a => [a] -> Array v V1 a
fromList = G.fromList
{-# INLINE fromList #-}
-- | O(n) Convert the first @n@ elements of a list to an Array with the
-- given shape. Returns 'Nothing' if there are not enough elements in
-- the list.
fromListInto :: (Shape f, Vector v a) => Layout f -> [a] -> Maybe (Array v f a)
fromListInto l as
| G.length v == n = Just $ Array l v
| otherwise = Nothing
where v = G.fromListN n as
n = shapeSize l
{-# INLINE fromListInto #-}
-- | O(n) Convert the first @n@ elements of a list to an Array with the
-- given shape. Throw an error if the list is not long enough.
fromListInto_ :: (Shape f, Vector v a) => Layout f -> [a] -> Array v f a
fromListInto_ l as = fromMaybe err $ fromListInto l as
where
err = error $ "fromListInto_: shape " ++ showShape l ++ " is too large for list"
{-# INLINE fromListInto_ #-}
-- | Create an array from a 'vector' and a 'layout'. Return 'Nothing' if
-- the vector is not the right shape.
fromVectorInto :: (Shape f, Vector v a) => Layout f -> v a -> Maybe (Array v f a)
fromVectorInto l v
| shapeSize l == G.length v = Just $! Array l v
| otherwise = Nothing
{-# INLINE fromVectorInto #-}
-- | Create an array from a 'vector' and a 'layout'. Throws an error if
-- the vector is not the right shape.
fromVectorInto_ :: (Shape f, Vector v a) => Layout f -> v a -> Array v f a
fromVectorInto_ l as = fromMaybe err $ fromVectorInto l as
where
err = error $ "fromVectorInto_: shape " ++ showShape l ++ " is too large for the vector"
{-# INLINE fromVectorInto_ #-}
-- | The empty 'Array' with a 'zero' shape.
empty :: (Vector v a, Additive f) => Array v f a
empty = Array zero G.empty
{-# INLINE empty #-}
-- | Test is if the array is 'empty'.
null :: Foldable f => Array v f a -> Bool
null (Array l _) = F.all (==0) l
{-# INLINE null #-}
-- Indexing ------------------------------------------------------------
-- | Index an element of an array. Throws 'IndexOutOfBounds' if the
-- index is out of bounds.
(!) :: (Shape f, Vector v a) => Array v f a -> f Int -> a
(!) (Array l v) i = boundsCheck l i $ G.unsafeIndex v (shapeToIndex l i)
{-# INLINE (!) #-}
-- | Safe index of an element.
(!?) :: (Shape f, Vector v a) => Array v f a -> f Int -> Maybe a
Array l v !? i
| shapeInRange l i = Just $! G.unsafeIndex v (shapeToIndex l i)
| otherwise = Nothing
{-# INLINE (!?) #-}
-- | Index an element of an array without bounds checking.
unsafeIndex :: (Shape f, Vector v a) => Array v f a -> f Int -> a
unsafeIndex (Array l v) i = G.unsafeIndex v (shapeToIndex l i)
{-# INLINE unsafeIndex #-}
-- | Index an element of an array while ignoring its shape.
linearIndex :: Vector v a => Array v f a -> Int -> a
linearIndex (Array _ v) i = v G.! i
{-# INLINE linearIndex #-}
-- | Index an element of an array while ignoring its shape, without
-- bounds checking.
unsafeLinearIndex :: Vector v a => Array v f a -> Int -> a
unsafeLinearIndex (Array _ v) i = G.unsafeIndex v i
{-# INLINE unsafeLinearIndex #-}
-- Monadic indexing ----------------------------------------------------
-- | /O(1)/ Indexing in a monad.
--
-- The monad allows operations to be strict in the vector when necessary.
-- Suppose vector copying is implemented like this:
--
-- > copy mv v = ... write mv i (v ! i) ...
--
-- For lazy vectors, @v ! i@ would not be evaluated which means that
-- @mv@ would unnecessarily retain a reference to @v@ in each element
-- written.
--
-- With 'indexM', copying can be implemented like this instead:
--
-- > copy mv v = ... do
-- > x <- indexM v i
-- > write mv i x
--
-- Here, no references to @v@ are retained because indexing (but /not/
-- the elements) is evaluated eagerly.
--
-- Throws an error if the index is out of range.
indexM :: (Shape f, Vector v a, Monad m) => Array v f a -> f Int -> m a
indexM (Array l v) i = boundsCheck l i $ G.unsafeIndexM v (shapeToIndex l i)
{-# INLINE indexM #-}
-- | /O(1)/ Indexing in a monad without bounds checks. See 'indexM' for an
-- explanation of why this is useful.
unsafeIndexM :: (Shape f, Vector v a, Monad m) => Array v f a -> f Int -> m a
unsafeIndexM (Array l v) i = G.unsafeIndexM v (shapeToIndex l i)
{-# INLINE unsafeIndexM #-}
-- | /O(1)/ Indexing in a monad. Throws an error if the index is out of
-- range.
linearIndexM :: (Shape f, Vector v a, Monad m) => Array v f a -> Int -> m a
linearIndexM (Array l v) i = boundsCheck l (shapeFromIndex l i) $ G.unsafeIndexM v i
{-# INLINE linearIndexM #-}
-- | /O(1)/ Indexing in a monad without bounds checks. See 'indexM' for an
-- explanation of why this is useful.
unsafeLinearIndexM :: (Vector v a, Monad m) => Array v f a -> Int -> m a
unsafeLinearIndexM (Array _ v) = G.unsafeIndexM v
{-# INLINE unsafeLinearIndexM #-}
-- Initialisation ------------------------------------------------------
-- | Execute the monadic action and freeze the resulting array.
create :: Vector v a => (forall s. ST s (MArray (G.Mutable v) f s a)) -> Array v f a
create m = m `seq` runST (m >>= unsafeFreeze)
{-# INLINE create #-}
-- | Execute the monadic action and freeze the resulting array.
createT
:: (Vector v a, Traversable t)
=> (forall s . ST s (t (MArray (G.Mutable v) f s a)))
-> t (Array v f a)
createT m = m `seq` runST (m >>= T.mapM unsafeFreeze)
{-# INLINE createT #-}
-- | O(n) Array of the given shape with the same value in each position.
replicate :: (Shape f, Vector v a) => f Int -> a -> Array v f a
replicate l a
| n > 0 = Array l $ G.replicate n a
| otherwise = empty
where n = shapeSize l
{-# INLINE replicate #-}
-- | O(n) Construct an array of the given shape by applying the
-- function to each index.
linearGenerate :: (Shape f, Vector v a) => Layout f -> (Int -> a) -> Array v f a
linearGenerate l f
| n > 0 = Array l $ G.generate n f
| otherwise = empty
where n = shapeSize l
{-# INLINE linearGenerate #-}
-- | O(n) Construct an array of the given shape by applying the
-- function to each index.
generate :: (Shape f, Vector v a) => Layout f -> (f Int -> a) -> Array v f a
generate l f = Array l $ G.unstream (bundleGenerate l f)
{-# INLINE generate #-}
-- Monadic initialisation ----------------------------------------------
-- | O(n) Construct an array of the given shape by filling each position
-- with the monadic value.
replicateM :: (Monad m, Shape f, Vector v a) => Layout f -> m a -> m (Array v f a)
replicateM l a
| n > 0 = Array l `liftM` G.replicateM n a
| otherwise = return empty
where n = shapeSize l
{-# INLINE replicateM #-}
-- | O(n) Construct an array of the given shape by applying the monadic
-- function to each index.
generateM :: (Monad m, Shape f, Vector v a) => Layout f -> (f Int -> m a) -> m (Array v f a)
generateM l f = Array l `liftM` unstreamM (bundleGenerateM l f)
{-# INLINE generateM #-}
-- | O(n) Construct an array of the given shape by applying the monadic
-- function to each index.
linearGenerateM :: (Monad m, Shape f, Vector v a) => Layout f -> (Int -> m a) -> m (Array v f a)
linearGenerateM l f
| n > 0 = Array l `liftM` G.generateM n f
| otherwise = return empty
where n = shapeSize l
{-# INLINE linearGenerateM #-}
-- Modifying -----------------------------------------------------------
-- | /O(n)/ Map a function over an array
map :: (Vector v a, Vector v b) => (a -> b) -> Array v f a -> Array v f b
map f (Array l a) = Array l (G.map f a)
{-# INLINE map #-}
-- | /O(n)/ Apply a function to every element of a vector and its index
imap :: (Shape f, Vector v a, Vector v b) => (f Int -> a -> b) -> Array v f a -> Array v f b
imap f (Array l v) =
Array l $ (G.unstream . Bundle.inplace (Stream.zipWith f (streamIndexes l)) id . G.stream) v
{-# INLINE imap #-}
-- Bulk updates --------------------------------------------------------
-- | For each pair (i,a) from the list, replace the array element at
-- position i by a.
(//) :: (G.Vector v a, Shape f) => Array v f a -> [(f Int, a)] -> Array v f a
Array l v // xs = Array l $ v G.// over (each . _1) (shapeToIndex l) xs
-- Accumilation --------------------------------------------------------
-- | /O(m+n)/ For each pair @(i,b)@ from the list, replace the array element
-- @a@ at position @i@ by @f a b@.
--
accum :: (Shape f, Vector v a)
=> (a -> b -> a) -- ^ accumulating function @f@
-> Array v f a -- ^ initial array
-> [(f Int, b)] -- ^ list of index/value pairs (of length @n@)
-> Array v f a
accum f (Array l v) us = Array l $ G.accum f v (over (mapped . _1) (shapeToIndex l) us)
{-# INLINE accum #-}
------------------------------------------------------------------------
-- Streams
------------------------------------------------------------------------
-- Copied from Data.Vector.Generic because it isn't exported from there.
unstreamM :: (Monad m, Vector v a) => Bundle.MBundle m u a -> m (v a)
{-# INLINE [1] unstreamM #-}
unstreamM s = do
xs <- MBundle.toList s
return $ G.unstream $ Bundle.unsafeFromList (MBundle.size s) xs
unstreamPrimM :: (PrimMonad m, Vector v a) => Bundle.MBundle m u a -> m (v a)
{-# INLINE [1] unstreamPrimM #-}
unstreamPrimM s = GM.munstream s >>= G.unsafeFreeze
-- FIXME: the next two functions are only necessary for the specialisations
unstreamPrimM_IO :: Vector v a => Bundle.MBundle IO u a -> IO (v a)
{-# INLINE unstreamPrimM_IO #-}
unstreamPrimM_IO = unstreamPrimM
unstreamPrimM_ST :: Vector v a => Bundle.MBundle (ST s) u a -> ST s (v a)
{-# INLINE unstreamPrimM_ST #-}
unstreamPrimM_ST = unstreamPrimM
{-# RULES
"unstreamM[IO]" unstreamM = unstreamPrimM_IO
"unstreamM[ST]" unstreamM = unstreamPrimM_ST #-}
-- | Generate a stream from a 'Layout''s indices.
streamGenerate :: (Monad m, Shape f) => Layout f -> (f Int -> a) -> Stream m a
streamGenerate l f = streamGenerateM l (return . f)
{-# INLINE streamGenerate #-}
-- | Generate a stream from a 'Layout''s indices.
streamGenerateM :: (Monad m, Shape f) => Layout f -> (f Int -> m a) -> Stream m a
streamGenerateM l f = l `seq` Stream step (if eq1 l zero then Nothing else Just zero)
where
{-# INLINE [0] step #-}
step (Just i) = do
x <- f i
return $ Yield x (shapeStep l i)
step Nothing = return Done
{-# INLINE [1] streamGenerateM #-}
-- | Stream a sub-layout of an 'Array'. The layout should be shapeInRange of
-- the array's layout, this is not checked.
unsafeStreamSub :: (Monad m, Shape f, G.Vector v a) => Layout f -> Array v f a -> Stream m a
unsafeStreamSub l2 (Array l1 v) = streamGenerateM l2 $ \x -> G.basicUnsafeIndexM v (shapeToIndex l1 x)
{-# INLINE unsafeStreamSub #-}
-- | Stream a sub-layout of an 'Array'.
streamSub :: (Monad m, Shape f, G.Vector v a) => Layout f -> Array v f a -> Stream m a
streamSub l2 arr@(Array l1 _) = unsafeStreamSub (shapeIntersect l1 l2) arr
{-# INLINE streamSub #-}
-- | Make a stream of the indexes of a 'Layout'.
streamIndexes :: (Monad m, Shape f) => Layout f -> Stream m (f Int)
streamIndexes l = Stream step (if eq1 l zero then Nothing else Just zero)
where
{-# INLINE [0] step #-}
step (Just i) = return $ Yield i (shapeStep l i)
step Nothing = return Done
{-# INLINE [1] streamIndexes #-}
------------------------------------------------------------------------
-- Bundles
------------------------------------------------------------------------
-- | Generate a bundle from 'Layout' indices.
bundleGenerate :: (Monad m, Shape f) => Layout f -> (f Int -> a) -> MBundle m v a
bundleGenerate l f = bundleGenerateM l (return . f)
{-# INLINE bundleGenerate #-}
-- | Generate a bundle from 'Layout' indices.
bundleGenerateM :: (Monad m, Shape f) => Layout f -> (f Int -> m a) -> MBundle m v a
bundleGenerateM l f = MBundle.fromStream (streamGenerateM l f) (Exact (shapeSize l))
{-# INLINE [1] bundleGenerateM #-}
-- | Generate a bundle of indexes for the given 'Layout'.
bundleIndexes :: (Monad m, Shape f) => Layout f -> MBundle m v (f Int)
bundleIndexes l = MBundle.fromStream (streamIndexes l) (Exact (shapeSize l))
{-# INLINE [1] bundleIndexes #-}
------------------------------------------------------------------------
-- Zipping
------------------------------------------------------------------------
-- Tuple zip -----------------------------------------------------------
-- | Zip two arrays element wise. If the array's don't have the same
-- shape, the new array with be the intersection of the two shapes.
zip :: (Shape f, Vector v a, Vector v b, Vector v (a,b))
=> Array v f a
-> Array v f b
-> Array v f (a,b)
zip = zipWith (,)
-- | Zip three arrays element wise. If the array's don't have the same
-- shape, the new array with be the intersection of the two shapes.
zip3 :: (Shape f, Vector v a, Vector v b, Vector v c, Vector v (a,b,c))
=> Array v f a
-> Array v f b
-> Array v f c
-> Array v f (a,b,c)
zip3 = zipWith3 (,,)
-- Zip with function ---------------------------------------------------
-- | Zip two arrays using the given function. If the array's don't have
-- the same shape, the new array with be the intersection of the two
-- shapes.
zipWith :: (Shape f, Vector v a, Vector v b, Vector v c)
=> (a -> b -> c)
-> Array v f a
-> Array v f b
-> Array v f c
zipWith f a1@(Array l1 v1) a2@(Array l2 v2)
| eq1 l1 l2 = Array l1 $ G.zipWith f v1 v2
| otherwise = Array l' $ G.unstream $
MBundle.fromStream (Stream.zipWith f (streamSub l' a1) (streamSub l' a2)) (Exact (shapeSize l'))
where l' = shapeIntersect l1 l2
{-# INLINE zipWith #-}
-- | Zip three arrays using the given function. If the array's don't
-- have the same shape, the new array with be the intersection of the
-- two shapes.
zipWith3 :: (Shape f, Vector v a, Vector v b, Vector v c, Vector v d)
=> (a -> b -> c -> d)
-> Array v f a
-> Array v f b
-> Array v f c
-> Array v f d
zipWith3 f a1@(Array l1 v1) a2@(Array l2 v2) a3@(Array l3 v3)
| eq1 l1 l2 &&
eq1 l2 l3 = Array l1 $ G.zipWith3 f v1 v2 v3
| otherwise = Array l' $ G.unstream $
MBundle.fromStream (Stream.zipWith3 f (streamSub l' a1) (streamSub l' a2) (streamSub l' a3)) (Exact (shapeSize l'))
where l' = shapeIntersect (shapeIntersect l1 l2) l3
{-# INLINE zipWith3 #-}
-- Indexed zipping -----------------------------------------------------
-- | Zip two arrays using the given function with access to the index.
-- If the array's don't have the same shape, the new array with be the
-- intersection of the two shapes.
izipWith :: (Shape f, Vector v a, Vector v b, Vector v c)
=> (f Int -> a -> b -> c)
-> Array v f a
-> Array v f b
-> Array v f c
izipWith f a1@(Array l1 v1) a2@(Array l2 v2)
| eq1 l1 l2 = Array l1 $ G.unstream $ Bundle.zipWith3 f (bundleIndexes l1) (G.stream v1) (G.stream v2)
| otherwise = Array l' $ G.unstream $
MBundle.fromStream (Stream.zipWith3 f (streamIndexes l') (streamSub l' a1) (streamSub l' a2)) (Exact (shapeSize l'))
where l' = shapeIntersect l1 l2
{-# INLINE izipWith #-}
-- | Zip two arrays using the given function with access to the index.
-- If the array's don't have the same shape, the new array with be the
-- intersection of the two shapes.
izipWith3 :: (Shape f, Vector v a, Vector v b, Vector v c, Vector v d)
=> (f Int -> a -> b -> c -> d)
-> Array v f a
-> Array v f b
-> Array v f c
-> Array v f d
izipWith3 f a1@(Array l1 v1) a2@(Array l2 v2) a3@(Array l3 v3)
| eq1 l1 l2 = Array l1 $ G.unstream $ Bundle.zipWith4 f (bundleIndexes l1) (G.stream v1) (G.stream v2) (G.stream v3)
| otherwise =
Array l' $ G.unstream $ MBundle.fromStream
(Stream.zipWith4 f (streamIndexes l') (streamSub l' a1) (streamSub l' a2) (streamSub l' a3)) (Exact (shapeSize l'))
where l' = shapeIntersect (shapeIntersect l1 l2) l3
{-# INLINE izipWith3 #-}
------------------------------------------------------------------------
-- Slices
------------------------------------------------------------------------
-- $setup
-- >>> import Debug.SimpleReflect
-- >>> let m = fromListInto_ (V2 3 4) [a,b,c,d,e,f,g,h,i,j,k,l] :: BArray V2 Expr
-- | Indexed traversal over the rows of a matrix. Each row is an
-- efficient 'Data.Vector.Generic.slice' of the original vector.
--
-- >>> traverseOf_ rows print m
-- [a,b,c,d]
-- [e,f,g,h]
-- [i,j,k,l]
rows :: (Vector v a, Vector w b)
=> IndexedTraversal Int (Array v V2 a) (Array w V2 b) (v a) (w b)
rows f (Array l@(V2 x y) v) = Array l . G.concat <$> go 0 0 where
go i a | i >= x = pure []
| otherwise = (:) <$> indexed f i (G.slice a y v) <*> go (i+1) (a+y)
{-# INLINE rows #-}
-- | Affine traversal over a single row in a matrix.
--
-- >>> traverseOf_ rows print $ m & ixRow 1 . each *~ 2
-- [a,b,c,d]
-- [e * 2,f * 2,g * 2,h * 2]
-- [i,j,k,l]
--
-- The row vector should remain the same size to satisfy traversal
-- laws but give reasonable behaviour if the size differs:
--
-- >>> traverseOf_ rows print $ m & ixRow 1 .~ B.fromList [0,1]
-- [a,b,c,d]
-- [0,1,g,h]
-- [i,j,k,l]
--
-- >>> traverseOf_ rows print $ m & ixRow 1 .~ B.fromList [0..100]
-- [a,b,c,d]
-- [0,1,2,3]
-- [i,j,k,l]
ixRow :: Vector v a => Int -> IndexedTraversal' Int (Array v V2 a) (v a)
ixRow i f m@(Array (l@(V2 x y)) v)
| y >= 0 && i < x = Array l . G.unsafeUpd v . L.zip [a..] . G.toList . G.take y <$> indexed f i (G.slice a y v)
| otherwise = pure m
where a = i * y
{-# INLINE ixRow #-}
-- | Indexed traversal over the columns of a matrix. Unlike 'rows', each
-- column is a new separate vector.
--
-- >>> traverseOf_ columns print m
-- [a,e,i]
-- [b,f,j]
-- [c,g,k]
-- [d,h,l]
--
-- >>> traverseOf_ rows print $ m & columns . indices odd . each .~ 0
-- [a,0,c,0]
-- [e,0,g,0]
-- [i,0,k,0]
--
-- The vectors should be the same size to be a valid traversal. If the
-- vectors are different sizes, the number of rows in the new array
-- will be the length of the smallest vector.
columns :: (Vector v a, Vector w b)
=> IndexedTraversal Int (Array v V2 a) (Array w V2 b) (v a) (w b)
columns f m@(Array l@(V2 _ y) _) = transposeConcat l <$> go 0 where
go j | j >= y = pure []
| otherwise = (:) <$> indexed f j (getColumn m j) <*> go (j+1)
{-# INLINE columns #-}
-- | Affine traversal over a single column in a matrix.
--
-- >>> traverseOf_ rows print $ m & ixColumn 2 . each +~ 1
-- [a,b,c + 1,d]
-- [e,f,g + 1,h]
-- [i,j,k + 1,l]
ixColumn :: Vector v a => Int -> IndexedTraversal' Int (Array v V2 a) (v a)
ixColumn j f m@(Array (l@(V2 _ y)) v)
| j >= 0 && j < y = Array l . G.unsafeUpd v . L.zip js . G.toList . G.take y <$> indexed f j (getColumn m j)
| otherwise = pure m
where js = [j, j + y .. ]
{-# INLINE ixColumn #-}
getColumn :: Vector v a => Array v V2 a -> Int -> v a
getColumn (Array (V2 x y) v) j = G.generate x $ \i -> G.unsafeIndex v (i * y + j)
{-# INLINE getColumn #-}
transposeConcat :: Vector v a => V2 Int -> [v a] -> Array v V2 a
transposeConcat (V2 _ y) vs = Array (V2 x' y) $ G.create $ do
mv <- GM.new (x'*y)
iforM_ vs $ \j v ->
F.for_ [0..x'-1] $ \i ->
GM.write mv (i*y + j) (v G.! i)
return mv
where x' = minimum $ fmap G.length vs
{-# INLINE transposeConcat #-}
-- | Traversal over a single plane of a 3D array given a lens onto that
-- plane (like '_xy', '_yz', '_zx').
ixPlane :: Vector v a
=> ALens' (V3 Int) (V2 Int)
-> Int
-> IndexedTraversal' Int (Array v V3 a) (Array v V2 a)
ixPlane l32 i f a@(Array l v)
| i < 0 || i >= k = pure a
| otherwise = Array l . (v G.//) . L.zip is . toListOf values
<$> indexed f i (getPlane l32 i a)
where
is = toListOf (cloneLens l32 . shapeIndexes . to (\x -> shapeToIndex l $ pure i & l32 #~ x)) l
k = F.sum $ l & l32 #~ 0
-- | Traversal over all planes of 3D array given a lens onto that plane
-- (like '_xy', '_yz', '_zx').
planes :: (Vector v a, Vector w b)
=> ALens' (V3 Int) (V2 Int)
-> IndexedTraversal Int (Array v V3 a) (Array w V3 b) (Array v V2 a) (Array w V2 b)
planes l32 f a@(Array l _) = concatPlanes l l32 <$> go 0 where
go i | i >= k = pure []
| otherwise = (:) <$> indexed f i (getPlane l32 i a) <*> go (i+1)
k = F.sum $ l & l32 #~ 0
{-# INLINE planes #-}
concatPlanes :: Vector v a => V3 Int -> ALens' (V3 Int) (V2 Int) -> [Array v V2 a] -> Array v V3 a
concatPlanes l l32 as = create $ do
arr <- M.new l
iforM_ as $ \i m ->
iforMOf_ values m $ \x a -> do
let w = pure i & l32 #~ x
M.write arr w a
return arr
getPlane :: Vector v a => ALens' (V3 Int) (V2 Int) -> Int -> Array v V3 a -> Array v V2 a
getPlane l32 i a = generate (a ^# layout . l32) $ \x -> a ! (pure i & l32 #~ x)
-- | Flatten a plane by reducing a vector in the third dimension to a
-- single value.
flattenPlane :: (Vector v a, Vector w b)
=> ALens' (V3 Int) (V2 Int)
-> (v a -> b)
-> Array v V3 a
-> Array w V2 b
flattenPlane l32 f a@(Array l _) = generate l' $ \x -> f (getVector x)
where
getVector x = G.generate n $ \i -> a ! (pure i & l32 #~ x)
n = F.sum $ l & l32 #~ 0
l' = l ^# l32
{-# INLINE flattenPlane #-}
-- Ordinals ------------------------------------------------------------
-- | This 'Traversal' should not have any duplicates in the list of
-- indices.
unsafeOrdinals :: (Vector v a, Shape f) => [f Int] -> IndexedTraversal' (f Int) (Array v f a) a
unsafeOrdinals is f (Array l v) = Array l . (v G.//) <$> traverse g is
where g x = let i = shapeToIndex l x in (,) i <$> indexed f x (G.unsafeIndex v i)
{-# INLINE [0] unsafeOrdinals #-}
setOrdinals :: (Indexable (f Int) p, Vector v a, Shape f) => [f Int] -> p a a -> Array v f a -> Array v f a
setOrdinals is f (Array l v) = Array l $ G.unsafeUpd v (fmap g is)
where g x = let i = shapeToIndex l x in (,) i $ indexed f x (G.unsafeIndex v i)
{-# INLINE setOrdinals #-}
{-# RULES
"unsafeOrdinals/setOrdinals" forall (is :: [f Int]).
unsafeOrdinals is = sets (setOrdinals is)
:: Vector v a => ASetter' (Array v f a) a;
"unsafeOrdinalts/isetOrdintals" forall (is :: [f Int]).
unsafeOrdinals is = sets (setOrdinals is)
:: Vector v a => AnIndexedSetter' (f Int) (Array v f a) a
#-}
-- Mutable -------------------------------------------------------------
-- | O(n) Yield a mutable copy of the immutable vector.
freeze :: (PrimMonad m, Vector v a)
=> MArray (G.Mutable v) f (PrimState m) a -> m (Array v f a)
freeze (MArray l mv) = Array l `liftM` G.freeze mv
{-# INLINE freeze #-}
-- | O(n) Yield an immutable copy of the mutable array.
thaw :: (PrimMonad m, Vector v a)
=> Array v f a -> m (MArray (G.Mutable v) f (PrimState m) a)
thaw (Array l v) = MArray l `liftM` G.thaw v
{-# INLINE thaw #-}
------------------------------------------------------------------------
-- Delayed
------------------------------------------------------------------------
-- | Isomorphism between an array and its delayed representation.
-- Conversion to the array is done in parallel.
delayed :: (Vector v a, Vector w b, Shape f, Shape g)
=> Iso (Array v f a) (Array w g b) (Delayed f a) (Delayed g b)
delayed = iso delay manifest
{-# INLINE delayed #-}
-- | Isomorphism between an array and its delayed representation.
-- Conversion to the array is done in parallel.
seqDelayed :: (Vector v a, Vector w b, Shape f, Shape g)
=> Iso (Array v f a) (Array w g b) (Delayed f a) (Delayed g b)
seqDelayed = iso delay seqManifest
{-# INLINE seqDelayed #-}
-- | Sequential manifestation of a delayed array.
seqManifest :: (Vector v a, Shape f) => Delayed f a -> Array v f a
seqManifest (Delayed l f) = generate l f
{-# INLINE seqManifest #-}
-- | 'manifest' an array to a 'UArray' and delay again. See
-- "Data.Dense.Boxed" or "Data.Dense.Storable" to 'affirm' for other
-- types of arrays.
affirm :: (Shape f, U.Unbox a) => Delayed f a -> Delayed f a
affirm = delay . (manifest :: (U.Unbox a, Shape f) => Delayed f a -> UArray f a)
{-# INLINE affirm #-}
-- | 'seqManifest' an array to a 'UArray' and delay again. See
-- "Data.Dense.Boxed" or "Data.Dense.Storable" to 'affirm' for other
-- types of arrays.
seqAffirm :: (Shape f, U.Unbox a) => Delayed f a -> Delayed f a
seqAffirm = delay . (seqManifest :: (U.Unbox a, Shape f) => Delayed f a -> UArray f a)
{-# INLINE seqAffirm #-}
------------------------------------------------------------------------
-- Focused
------------------------------------------------------------------------
-- | Focus on a particular element of a delayed array.
focusOn :: f Int -> Delayed f a -> Focused f a
focusOn = Focused -- XXX do range checking
{-# INLINE focusOn #-}
-- | Discard the focus to retrieve the delayed array.
unfocus :: Focused f a -> Delayed f a
unfocus (Focused _ d) = d
{-# INLINE unfocus #-}
-- | Indexed lens onto the delayed array, indexed at the focus.
unfocused :: IndexedLens (f Int) (Focused f a) (Focused f b) (Delayed f a) (Delayed f b)
unfocused f (Focused x d) = Focused x <$> indexed f x d
{-# INLINE unfocused #-}
-- | Modify a 'Delayed' array by extracting a value from a 'Focused'
-- each point.
extendFocus :: Shape f => (Focused f a -> b) -> Delayed f a -> Delayed f b
extendFocus f = unfocus . extend f . focusOn zero
{-# INLINE extendFocus #-}
-- | Lens onto the position of a 'ComonadStore'.
--
-- @
-- 'locale' :: 'Lens'' ('Focused' l a) (l 'Int')
-- @
locale :: ComonadStore s w => Lens' (w a) s
locale f w = (`seek` w) <$> f (pos w)
{-# INLINE locale #-}
-- | Focus on a neighbouring element, relative to the current focus.
shiftFocus :: Applicative f => f Int -> Focused f a -> Focused f a
shiftFocus dx (Focused x d@(Delayed l _)) = Focused x' d
where
x' = f <$> l <*> x <*> dx
f k i di
| i' < 0 = k + i'
| i' >= k = i' - k
| otherwise = i'
where i' = i + di
{-# INLINE shiftFocus #-}
-- Boundary conditions -------------------------------------------------
-- | The boundary condition used for indexing relative elements in a
-- 'Focused'.
data Boundary
= Clamp -- ^ clamp coordinates to the extent of the array
| Mirror -- ^ mirror coordinates beyond the array extent
| Wrap -- ^ wrap coordinates around on each dimension
deriving (Show, Read, Typeable)
-- Peeking -------------------------------------------------------------
-- | Index a focused using a 'Boundary' condition.
peekB :: Shape f => Boundary -> f Int -> Focused f a -> a
peekB = \b x -> peeksB b (const x)
{-# INLINE peekB #-}
-- | Index an element relative to the current focus using a 'Boundary'
-- condition.
peekRelativeB :: Shape f => Boundary -> f Int -> Focused f a -> a
peekRelativeB = \b i -> peeksB b (^+^ i)
{-# INLINE peekRelativeB #-}
-- | Index an element by applying a function the current position, using
-- a boundary condition.
peeksB :: Shape f => Boundary -> (f Int -> f Int) -> Focused f a -> a
peeksB = \case
Clamp -> clampPeeks
Wrap -> wrapPeeks
Mirror -> mirrorPeeks
{-# INLINE peeksB #-}
-- After much testing, this seems to be the most reliable method to get
-- stencilSum to inline properly.
-- Wrap
wrapPeeks :: Shape f => (f Int -> f Int) -> Focused f a -> a
wrapPeeks f (Focused x (Delayed l ixF)) = ixF $! wrapIndex l (f x)
{-# INLINE wrapPeeks #-}
wrapIndex :: Shape f => Layout f -> f Int -> f Int
wrapIndex !l !x = liftI2 f l x where
f n i
| i < 0 = n + i
| i < n = i
| otherwise = i - n
{-# INLINE wrapIndex #-}
-- Clamp
clampPeeks :: Shape f => (f Int -> f Int) -> Focused f a -> a
clampPeeks f (Focused x (Delayed l ixF)) = ixF $! clampIndex l (f x)
{-# INLINE clampPeeks #-}
clampIndex :: Shape f => Layout f -> f Int -> f Int
clampIndex !l !x = liftI2 f l x where
f n i
| i < 0 = 0
| i >= n = n - 1
| otherwise = i
{-# INLINE clampIndex #-}
-- Mirror
mirrorPeeks :: Shape f => (f Int -> f Int) -> Focused f a -> a
mirrorPeeks f (Focused x (Delayed l ixF)) = ixF $! mirrorIndex l (f x)
{-# INLINE mirrorPeeks #-}
mirrorIndex :: Shape f => Layout f -> f Int -> f Int
mirrorIndex !l !x = liftI2 f l x where
f n i
| i < 0 = - i
| i < n = i
| otherwise = i - n
{-# INLINE mirrorIndex #-}
| cchalmers/dense | src/Data/Dense/Generic.hs | bsd-3-clause | 34,750 | 0 | 19 | 8,313 | 10,110 | 5,289 | 4,821 | -1 | -1 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Model where
--( migrateTables
--, TimeEntry(..)
--, TimeEntryEndedAt
--) where
import Data.Text
import Data.Time
import Database.Persist
import Database.Persist.TH (mkPersist, mkMigrate, persistLowerCase, share, sqlSettings)
share [mkPersist sqlSettings, mkMigrate "migrateTables"] [persistLowerCase|
TimeEntry
description Text
startedAt UTCTime
endedAt UTCTime Maybe
deriving Show
|]
| timhabermaas/time-tracker | src/Model.hs | bsd-3-clause | 824 | 0 | 7 | 210 | 79 | 53 | 26 | 15 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module System.ProgressBar.ByteString(
mkByteStringProgressBar
, mkByteStringProgressWriter
, fileReadProgressBar
, fileReadProgressWriter
)
where
import Data.ByteString.Lazy(ByteString,hGetContents)
import Data.ByteString.Lazy.Progress
import Data.Text.Lazy(Text)
import qualified Data.Text.Lazy.IO as T
import Data.Time.Clock(getCurrentTime)
import System.IO(Handle,hSetBuffering,hPutChar,hPutStr,BufferMode(..))
import System.IO(openFile,hFileSize,IOMode(..))
import System.ProgressBar(Label, Progress(Progress), ProgressBarWidth(..),
Style(..), Timing(..))
import System.ProgressBar(defStyle, renderProgressBar)
type ℤ = Integer
-- |Track the progress of a ByteString as it is consumed by some computation.
-- This is the most general version in the library, and will render a progress
-- string and pass it to the given function. See other functions for interacting
-- with fixed-size files, the console, or generic Handles.
mkByteStringProgressBar :: ByteString {- The ByteString to track. -} ->
(Text -> IO ()) {- ^Function to call on update.-}->
ℤ {- ^ Progress bar width -} ->
ℤ {- ^ The size of the ByteString -} ->
Label () {- ^ Prefixed label -} ->
Label () {- ^ Postfixed label -} ->
IO ByteString
mkByteStringProgressBar input tracker width size prefix postfix =
do start <- getCurrentTime
trackProgressWithChunkSize bestSize (updateFunction start) input
where
style = defStyle{ stylePrefix = prefix
, stylePostfix = postfix
, styleWidth = ConstantWidth (fromIntegral width) }
bestSize | size `div` 100 < 4096 = fromIntegral $ size `div` 100
| size `div` 100 < 16384 = 4096
| otherwise = 16384
updateFunction start _ newAmt =
do now <- getCurrentTime
let progress = Progress (fromIntegral newAmt) (fromIntegral size) ()
timing = Timing start now
tracker $ renderProgressBar style progress timing
-- |As mkByteStringProgressBar, but simply print the output to the given
-- Handle instead of using a callback.
mkByteStringProgressWriter :: ByteString {- ^ The ByteString to track. -} ->
Handle {- ^ Handle to write to -} ->
ℤ {- ^ Progress bar width -} ->
ℤ {- ^ The size of the ByteString -} ->
Label () {- ^ Prefixed label -} ->
Label () {- ^ Postfixed label -} ->
IO ByteString
mkByteStringProgressWriter input handle width size prefix postfix = do
hSetBuffering handle NoBuffering
mkByteStringProgressBar input tracker width size prefix postfix
where
tracker str = T.hPutStr handle "\r" >> T.hPutStr handle str
-- |Track the loading of a file as it is consumed by some computation. The
-- use of this function should be essentially similar to ByteString's
-- readFile, but with a lot more arguments and side effects.
fileReadProgressBar :: FilePath {- ^ The file to load. -} ->
(Text -> IO ()) {- ^ Function to call on update. -} ->
ℤ {- ^ Progress bar width -} ->
Label () {- ^ Prefixed label -} ->
Label () {- ^ Postfixed label -} ->
IO ByteString
fileReadProgressBar path tracker width prefix postfix = do
inHandle <- openFile path ReadMode
size <- hFileSize inHandle
bytestring <- hGetContents inHandle
mkByteStringProgressBar bytestring tracker width size prefix postfix
-- |As fileReadProgressBar, but simply write the progress bar to the given
-- Handle instead of calling a generic function.
fileReadProgressWriter :: FilePath {- ^ The file to load. -} ->
Handle {- ^ Handle to write to -} ->
ℤ {- ^ Progress bar width -} ->
Label () {- ^ Prefixed label -} ->
Label () {- ^ Postfixed label -} ->
IO ByteString
fileReadProgressWriter path handle width prefix postfix = do
inHandle <- openFile path ReadMode
size <- hFileSize inHandle
bytestring <- hGetContents inHandle
mkByteStringProgressWriter bytestring handle width size prefix postfix
| acw/bytestring-progress | System/ProgressBar/ByteString.hs | bsd-3-clause | 4,572 | 0 | 14 | 1,415 | 802 | 433 | 369 | 71 | 1 |
{-# LANGUAGE BangPatterns,RankNTypes,OverloadedStrings #-}
{-# LANGUAGE CPP, DeriveDataTypeable, FlexibleContexts,
GeneralizedNewtypeDeriving, MultiParamTypeClasses,
TemplateHaskell, TypeFamilies, RecordWildCards #-}
module Plow.Service.Alarm.Acid where
import Plow.Service.Alarm.Rules
import Plow.Service.Alarm.Types
import Data.Text hiding (head, last)
import Prelude hiding (head, last)
-- import Data.Time
import Data.Vector
import Control.Applicative
import Control.Monad.Reader (ask)
import Control.Monad.State ( get, put )
import Data.Acid ( AcidState, Query, Update
, makeAcidic, openLocalState )
import Data.Acid.Advanced ( query', update' )
import Data.Acid.Local ( createCheckpointAndClose )
import Data.SafeCopy ( base, deriveSafeCopy )
import Data.Data ( Data, Typeable )
{-| Various naming conventions for stuff here I will try to stick to:
* Controller -> looks at the current state and decides what should happen based apon it
* Handler -> takes an incoming state and some parameters and returns a new state
* Rxer -> recieves some piece of data from an outside source
* Sender -> sends a piece of data to an outside source.
* Store -> put a piece of data into the local storage
* Get -> get a piece of data from the local storage
|-}
person1 :: Person
person1 = Person 555 "test@Test.com" 0
testPeople :: DefaultPeople
testPeople = People (fromList [Person 555 "test@Test.com" 0])
initialAlarmState :: AlarmState
initialAlarmState = AlarmState Clear NotCalling More person1
defaultAlarmParameters :: AlarmParameters
defaultAlarmParameters = AlarmParameters 0 0 0 "test" testPeople
defaultAlarmRunner :: AlarmRunner
defaultAlarmRunner = AlarmRunner (AlarmId 0) defaultAlarmParameters initialAlarmState
initialAlarmTimer :: AlarmTimer
initialAlarmTimer = AlarmTimer (AlarmId 0 ) 0
{-|
timer incrementor, checker and reset |-}
-- | increment the timer by i, flexible alarm incrementor for
-- | time stallouts and resuming after crashes
incTimer :: Int -> Update AlarmTimer Int
incTimer i = do c@AlarmTimer{..} <- get
let newTimer = timer + i
return newTimer
-- | Peek at the time in an alarm to update states
-- | Non-blocking guarantees
checkTimer :: Query AlarmTimer Int
checkTimer = do
timer <$> ask
resetTimer :: Update AlarmTimer Int
resetTimer = do c@AlarmTimer{..} <- get
return 0
{-| AlarmState,
* 'Alarm' Mutator, Checker
*
* 'Call' Mutator, Checker
* 'Person'
+ Incrementor
+ Reset
* 'Count' Mutator, Checker
|-}
changeAlarmState :: AlarmState -> Update AlarmRunner AlarmState
changeAlarmState a = do c@AlarmRunner{..} <- get
case checkAlarmState c a of
True -> return a
False -> return alarmState
-- | Definition of Acidic events
$(makeAcidic ''AlarmRunner ['changeAlarmState])
$(makeAcidic ''AlarmTimer ['incTimer, 'checkTimer, 'resetTimer])
| smurphy8/alarm-service | src/Plow/Service/Alarm/Acid.hs | bsd-3-clause | 3,039 | 0 | 11 | 657 | 525 | 292 | 233 | -1 | -1 |
module Sized where
import {-# SOURCE #-} SizedDigit ()
class Sized a where
size :: a -> Int
| phischu/fragnix | tests/quick/InstancePropagation/Sized.hs | bsd-3-clause | 102 | 0 | 7 | 28 | 29 | 17 | 12 | 4 | 0 |
{-# LANGUAGE MultiParamTypeClasses, TypeOperators #-}
module Generic.Control.Category where
import Prelude ()
infixr 9 .
class Category j (~>) where
id :: (j a ~> j a)
(.) :: (j b ~> j c) -> (j a ~> j b) -> (j a ~> j c)
| tomlokhorst/AwesomePrelude | src/Generic/Control/Category.hs | bsd-3-clause | 230 | 0 | 11 | 55 | 104 | 56 | 48 | 7 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Serialize where
import Data.Serialize
import DataTypes
import Data.DeriveTH
import Data.Derive.Serialize
$( derive makeSerialize ''NetworkDirectMessage )
$( derive makeSerialize ''NetworkMulticastMessage )
| Tener/sheep-transfer | lib/Serialize.hs | bsd-3-clause | 249 | 0 | 8 | 28 | 52 | 28 | 24 | 8 | 0 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Aeson
import qualified Data.ByteString.Lazy as LBS
import Data.Default (def)
import Data.Functor (void)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Qi (withConfig)
import Qi.Config.AWS.ApiGw (ApiMethodEvent (..),
ApiVerb (Get, Post),
RequestBody (JsonBody, PlainTextBody))
import Qi.Config.AWS.S3 (S3Key (S3Key),
S3Object (S3Object))
import Qi.Config.Identifier (S3BucketId)
import Qi.Program.Config.Interface (ConfigProgram, api,
apiMethodLambda, apiResource,
s3Bucket)
import Qi.Program.Lambda.Interface (ApiLambdaProgram,
getS3ObjectContent,
putS3ObjectContent)
import Qi.Util (success)
-- Use the curl commands below to test-drive the two endpoints (substitute your unique api stage url first):
{-
export API="https://60yaf0cfej.execute-api.us-east-1.amazonaws.com/v1"
curl -v -X POST -H "Content-Type: application/json" -d "{\"property\": 3}" "$API/things"
curl -v -X GET "$API/things"
-}
main :: IO ()
main = withConfig config
where
config :: ConfigProgram ()
config = do
bucketId <- s3Bucket "things"
void $ api "world" >>= \apiId ->
apiResource "things" apiId >>= \apiResourceId -> do
apiMethodLambda
"createThing"
Post apiResourceId def
(writeContentsLambda bucketId) def
apiMethodLambda
"viewThing"
Get apiResourceId def
(readContentsLambda bucketId) def
writeContentsLambda
:: S3BucketId
-> ApiLambdaProgram
writeContentsLambda bucketId ApiMethodEvent{_aeBody} = do
putS3ObjectContent (s3Object bucketId) content
success "successfully added content"
where
content = case _aeBody of
PlainTextBody t -> LBS.fromStrict $ encodeUtf8 t
JsonBody v -> encode v
_ -> error "failed to encode request body"
readContentsLambda
:: S3BucketId
-> ApiLambdaProgram
readContentsLambda bucketId _ = do
content <- getS3ObjectContent $ s3Object bucketId
success . String . decodeUtf8 $ LBS.toStrict content
s3Object = (`S3Object` s3Key)
s3Key = S3Key "thing.json"
| ababkin/qmuli | examples/apigw-lambda-s3/src/Main.hs | mit | 2,795 | 0 | 17 | 1,057 | 473 | 264 | 209 | 55 | 3 |
{-# OPTIONS_GHC -Wall -Wno-unticked-promoted-constructors -Wno-unused-imports -Wno-type-defaults -Wno-orphans -fconstraint-solver-iterations=0 #-}
--{-# OPTIONS_GHC -ddump-deriv #-} -- +
--{-# OPTIONS_GHC -ddump-rn #-} -- +
--{-# OPTIONS_GHC -ddump-tc-trace #-} -- +
--{-# OPTIONS_GHC -ddump-tc #-} -- -
--{-# OPTIONS_GHC -ddump-rule-firings #-} -- -
--{-# OPTIONS_GHC -ddump-ds #-} -- -
module Holotype
where
import qualified Codec.Picture as Juicy
import qualified Codec.Picture.Saving as Juicy
import qualified Control.Concurrent.STM as STM
import qualified Control.Monad.Ref
import qualified Data.ByteString.Lazy as B
import qualified Data.Map.Strict as M
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Zipper as T
import qualified Data.Time.Clock as Time
import qualified Data.TypeMap.Dynamic as TM
import qualified Data.IntUnique as U
import qualified GHC.Generics as GHC
import qualified Graphics.GL.Core33 as GL
import qualified Options.Applicative as Opt
import qualified Reflex.GLFW as GLFW
import qualified Text.Parser.Char as P
import qualified Text.Parser.Combinators as P
import qualified Text.Parser.Token as P
import qualified Text.Trifecta.Parser as P
import qualified Text.Trifecta.Result as P
import ExternalImports
-- Local imports
import Elsewhere
import qualified Graphics.Cairo as Cr
import Graphics.Flatland
import qualified Graphics.Flex as Flex
import Holo.Classes
import Holo.Instances
import Holo.Input
import Holo.Item
import Holo.Name
import Holo.Record
import Holo.Widget
import qualified Holo.Widget as Widget
import qualified Holo.Port as Port
import Holo.Port (Port(..), IdToken)
import qualified Holo.System as HOS
-- TEMPORARY
import Generics.SOP (Proxy, Top)
import qualified Generics.SOP as SOP
import qualified "GLFW-b" Graphics.UI.GLFW as GLFW
newPortFrame ∷ RGLFW t m ⇒ Event t VPort → m (Event t (VPort, Port.Frame))
newPortFrame portFrameE = performEvent $ portFrameE <&>
\port@Port{..}→ do
newFrame ← Port.portNextFrame port
pure (port, newFrame)
defVocab ∷ Vocab i (Present i)
defVocab = Vocab
(TM.empty
<: (Proxy @Bool, Desig TextLine)
<: (Proxy @Bool, DesigDenot Switch)
<: (Proxy @Port.WaitVSync, Desig Switch)
<: (Proxy @Double, Desig TextLine)
<: (Proxy @DΠ, Desig TextLine)
<: (Proxy @Port.ScreenMode, Desig TextLine)
<: (Proxy @Float, Desig TextLine)
<: (Proxy @Int, Desig TextLine)
<: (Proxy @Integer, Desig TextLine)
<: (Proxy @Text, DesigDenot TextLine)
-- XXX: this is atrocious, but the suspicion is we have a generic solution : -
<: (Proxy @([(Cr.FontKey,(Either Cr.FontAlias [Cr.FontSpec]))])
, Desig TextLine)
-- <: (Proxy @(Port.ScreenDim (Di Int)), HoloName TextLine)
)
scene ∷ ∀ i t r m. (MonadW i t r m)
⇒ Input t
→ Dynamic t Port.Settings
→ Dynamic t Integer
→ Dynamic t Int
→ Dynamic t Double
→ m (Widget i Port.Settings, Widget.WH i)
scene input sttsD statsValD frameNoD fpsValueD =
let lbinds = listenerBindsParse "Scene" (inStore input)
[ ("Settings.sttsWaitVSync", "VSyncToggle")
, ("Settings.sttsScreenDim", "WinSize")
]
in runWidgetMLBinds @i lbinds $ mdo
fpsD ∷ Widget i Text
← dynPresent "fps" defVocab (T.pack ∘ printf "%3d fps" ∘ (floor ∷ Double → Integer) <$> fpsValueD)
statsD ∷ Widget i Text
← dynPresent "mem" defVocab $ statsValD <&>
\(mem)→ T.pack $ printf "mem: %d" mem
let rectDiD = (PUs <$>) ∘ join unsafe'di ∘ fromIntegral ∘ max 1 ∘ flip mod 200 <$> frameNoD
rectD ∷ Widget i (Di (Unit PU))
← liftPureDynamic "rect" Rect rectDiD
frameCountD ∷ Widget i Text
← dynPresent "nframes" defVocab $ T.pack ∘ printf "frame #%04d" <$> frameNoD
varlenTextD ∷ Widget i Text
← dynPresent "truefalse" defVocab $ T.pack ∘ printf "even: %s" ∘ show ∘ even <$> frameNoD
tupleWD ← present @i "tuple" defVocab
(unsafe'di 320 200 ∷ Di Int)
sttsWDCurr ∷ Widget i Port.Settings
← dynPresent "stts-ro" defVocab sttsD
sttsWDSeeded ∷ Widget i Port.Settings
← present "Settings" defVocab
Port.defaultSettings
longStaticTextD ← present @i "longstatictext" (desDen @Text TextLine) ("0....5...10...15...20...25...30...35...40...45...50...55...60...65...70...75...80...85...90...95..100" ∷ Text)
-- dimD ∷ Widget i (Double, Double)
-- ← liftW eV (X, (Labelled ("x", TextLine)
-- ,Labelled ("y", TextLine)))
-- (0,0)
-- The loop demo (currently incapacitated due to definition of mkTextEntryValidatedStyleD)
let fontNameStyle name = defSty (Proxy @TextLine) & tsFontKey .~ Cr.FK name
styleNameD ← mkTextEntryValidatedStyleD @i "stylename" styleB "defaultSans" $
(\x→ x ≡ "defaultMono" ∨ x ≡ "defaultSans")
styleD ← trackStyle $ fontNameStyle <$> (traceDynWith show $ wValD styleNameD)
let styleB = current styleD
--
(sttsWDSeeded ∷ Widget i Port.Settings,)
<$> vboxD @i
[ stripW frameCountD
, stripW sttsWDCurr
, stripW sttsWDSeeded
, stripW tupleWD
, stripW rectD
, stripW fpsD
, stripW longStaticTextD
, stripW statsD
, stripW varlenTextD
]
vboxD ∷ ∀ i t r m. (MonadW i t r m) ⇒ [WH i] → m (WH i)
vboxD chi = do
lbs ← getLBinds @i
let (subsD, chiD) = foldr (\(sae, s, hb) (ss, hbs)→
trace (printf "vboxD χ %s" (T.unpack $ aeltName sae))
( zipDynWith (<>) s ss
, zipDynWith (:) hb hbs ))
(constDyn mempty, constDyn [])
chi
finaliseNodeWH @i @[WH i] (lbsAE lbs, subsD, vbox <$> chiD)
-- • Could not deduce (SOP.HasDatatypeInfo (Dynamic * t (Blank * i)))
-- arising from a use of ‘finaliseNodeWH’
-- from the context: MonadW i t r m
-- bound by the type signature for:
-- vboxD :: forall i t r (m :: * -> *).
-- MonadW i t r m =>
-- [WH i] -> m (WH i)
-- at src/Holotype.hs:172:1-55
holotype ∷ ∀ i t r m rm
. ( Typeable t
, RGLFW t m
, rm ~ MonadWCtxReaderT t m
, r ~ MonadWCtx t
, i ~ API t r rm
)
⇒ RGLFWGuest t m
holotype win evCtl windowFrameE inputE = runTracing "holotype" $
holotype' @i win evCtl windowFrameE inputE
holotype' ∷ ∀ i t r m pm rpm
. ( Typeable t
, MonadW i t r m
, pm ~ Performable m
, rpm ~ MonadWCtxReaderT t pm
)
⇒ RGLFWGuest t m
holotype' win evCtl windowFrameE inputE = mdo
tr ← getTrace
Options{..} ← liftIO $ Opt.execParser $ Opt.info (parseOptions <**> Opt.helper)
( Opt.fullDesc
-- <> header "A simple holotype."
<> Opt.progDesc "A simple holotype.")
-- when oTrace $
-- liftIO $ setupTracer False
-- [(ALLOC, TOK, TRACE, 0),(FREE, TOK, TRACE, 0)
-- ,(SIZE, HOLO, TRACE, 0)
-- (ALLOC, TOK, TRACE, 0),(FREE, TOK, TRACE, 0)
-- ,(MISSALLOC, VIS, TRACE, 4),(REUSE, VIS, TRACE, 4),(REALLOC, VIS, TRACE, 4),(ALLOC, VIS, TRACE, 4),(FREE, VIS, TRACE, 4)
-- ,(ALLOC, TEX, TRACE, 8),(FREE, TEX, TRACE, 8)
-- ]
HOS.unbufferStdout
initE ← getPostBuild
winD ← holdDyn win $ win <$ initE
initWinDimV ← Port.portWindowSize win
liftIO $ GLFW.enableEvent evCtl GLFW.FramebufferSize
evRawE ∷ Event t Ev ← performEvent $ promoteEv <$> inputE
let (clickRawE, evE) = fanEither $ (\x→ if evMatch inputMaskClickRawAny x then Left x else Right x) <$> evRawE
-- Closing-the-circle issues:
-- 1. To even receive events, the switch needs to be subscribed to <F3> -- but its subscriptions are default.
-- 2. For #1 we need a way to express subscription customisation.
-- 3. Tearing the Settings apart again to form ESettings? ESettings looks way artificial now.
-- 4. Should Settings even be a single structure?
-- 5. EventBinding: Addresses (object names) to IdTokens.
-- 6. Event is routed -- then interpreted how? Currently event interpretation is hard-coded.
-- 7. So we need names (and types) for events -- SemanticEvent (akin to WorldEvent).
-- 8. Then we can express the problem: Named entities handling named events.
-- 9. SemanticEvents, EventBindings and Addresses seem to be only needed during decision of how to compile subscriptions.
-- 10. Objects declare SemanticEvents they can handle and their sub-Addresses.
sttsE ←
let Port.Settings{..} = Port.defaultSettings
in Port.ESettings
<$> pure (initE $> (sttsDΠ, sttsFontPreferences))
<*> pure (fforMaybe (leftmost [evE, Ev (WinSizeEv (Port.ScreenDim initWinDimV)) <$ initE])
(\case
Ev (WinSizeEv dim) → Just (sttsScreenMode, dim)
_ → Nothing))
<*> pure (leftmost [Port.WaitVSync True <$ initE])
sttsD ← accumMaybeDyn (flip const) Port.defaultSettings $ (fmap Port.portSettings) <$> updated maybePortD
maybePortD ← Port.portCreate winD sttsE
portFrameE ← newPortFrame $ fmapMaybe id $ fst <$> attachPromptlyDyn maybePortD windowFrameE
-- * Random data: stats
fpsValueD ← fpsCounterD $ snd <$> portFrameE
frameNoD ← count portFrameE
statsValD ← holdDyn 0 =<< performEvent (portFrameE <&> const HOS.gcKBytesUsed)
-- * SCENE
-- not a loop: subscriptionsD only used/sampled during inputE, which is independent
-- let inputEv = fforMaybe inputE
-- (\case x@(U GLFW.EventMouseButton{}) → Just $ Ev $ GLFWEv x; _ → Nothing)
semStoreV ← declSemStore "main"
[ ("VSyncToggle"
, "Toggle waiting for vertical synchronisation.")
, ("WinSize"
, "Window size change.")
]
let input = mkInput semStoreV evBindsD inputMux
bind = bindSem semStoreV
evBindsD = constDyn $ mempty
& bind "VSyncToggle" (inputMaskKeyPress GLFW.Key'F3 mempty)
& bind "WinSize" (glfwMask GLFW.eventMaskWindowSize)
inputMux ← routeEv evE clickedE subscriptionsD
(,) (Widget' (_,_,csttsWD,_))
((,,) _ae subscriptionsD sceneD)
← upgradeMonadW @i "Scene" input $ scene @i input sttsD statsValD frameNoD fpsValueD
-- * LAYOUT
-- needs port because of DPI and fonts
sceneQueriedE ← performEvent $ (\(s, (p, _f))→
runTracing' tr $
iSizeRequest @rpm p s) <$>
attachPromptlyDyn sceneD portFrameE
sceneQueriedD ← holdDyn mempty sceneQueriedE
let sceneLaidTreeD ∷ Dynamic t (Item Top PLayout)
sceneLaidTreeD = Flex.layout (Size $ fromPU <$> di 800 600) <$> sceneQueriedD
-- * RENDER
sceneDrawE = attachPromptlyDyn sceneLaidTreeD portFrameE
drawnPortE ← performEvent $ sceneDrawE <&>
\(tree, (,) port f@Port.Frame{..}) → runTracing' @t tr $ do
-- let ppItem = \case
-- x@Node{..} → "N: "<>Flex.ppItemArea x<>" ← "<>Flex.ppItemSize x<>" geoΔ: "<>Flex.ppdefGeoDiff (iGeo x)
-- x@Leaf{..} → "L: "<>Flex.ppItemArea x<>" ← "<>Flex.ppItemSize x<>" geoΔ: "<>Flex.ppdefGeoDiff (iGeo x)
-- Flex.dump ppItem tree
let leaves = treeLeaves tree
-- liftIO $ printf " leaves: %d\n" $ M.size leaves
Port.portGarbageCollectVisuals port leaves
tree' ← ensureTreeVisuals port tree
-- XXX: 'render' is called every frame for everything
renderTreeVisuals port tree'
showTreeVisuals f tree'
pure port
drawnPortD ← holdDyn Nothing $ Just <$> drawnPortE
-- * PICKING
let pickE = fmapMaybe id $ attachPromptlyDyn drawnPortD clickRawE <&> \case
(Nothing, _) → Nothing -- We may have no drawn picture yet.
(Just x, y) → Just (x, y)
clickedE ← mousePointId $ (id *** (\(Ev (GLFWEv (U x@GLFW.EventMouseButton{})))→ x)) <$> pickE
performEvent_ $ clickedE <&>
\(ClickEv{..})→ liftIO $ printf "pick=0x%x\n" (Port.tokenHash ceIdToken)
hold False (evMatch (inputMaskKeyPress' GLFW.Key'Escape)
<$> evE)
-- * Boring stuff
--
data Options where
Options ∷
{ oTrace ∷ Bool
} → Options
parseOptions ∷ Opt.Parser Options
parseOptions =
Options
<$> Opt.switch (Opt.long "trace" <> Opt.help "[DEBUG] Enable allocation tracing")
mousePointId ∷ RGLFW t m ⇒ Event t (VPort, GLFW.Input 'GLFW.MouseButton) → m (Event t (Ev' ClickEvK))
mousePointId ev = (ffilter ((≢ 0) ∘ Port.tokenHash ∘ ceIdToken) <$>) <$>
performEvent $ ev <&> \(port@Port{..}, e@(GLFW.EventMouseButton _ _ _ _)) → do
(,) x y ← liftIO $ (GLFW.getCursorPos portWindow)
ClickEv e <$> (Port.portPick port $ floor <$> po x y)
-- * Wijits and various stuffs
--
trackStyle ∷ (As a, RGLFW t m) ⇒ Dynamic t (Sty a) → m (Dynamic t (Style a))
trackStyle sof = do
gene ← count $ updated sof
pure $ zipDynWith Style sof (StyleGene ∘ fromIntegral <$> gene)
-- mkTextEntryStyleD ∷ RGLFW t m ⇒ InputEventMux t → Behavior t (Style Text) → Text → m (W t (Text, HoloBlank))
-- mkTextEntryStyleD mux styleB initialV = do
-- tokenV ← newId
-- let editE = select mux $ Const2 tokenV
-- valD ← liftDyn initialV editE
-- setupE ← getPostBuild
-- let holoE = attachWith (leafStyled tokenV) styleB $ leftmost [updated valD, initialV <$ setupE]
-- holdDyn (initialV, emptyHolo) (attachPromptlyDyn valD holoE)
-- <&> (,) editMaskKeys
mkTextEntryValidatedStyleD ∷ ∀ i t r m. MonadW i t r m ⇒ AElt → Behavior t (Style TextLine) → Text → (Text → Bool) → m (Result i Text)
mkTextEntryValidatedStyleD ae styleB initialV testF = do
unless (testF initialV) $
error $ "Initial value not accepted by test: " <> T.unpack initialV
-- (subD, textD) ← mkTextEntryStyleD mux styleB initialV
Widget' (_, subD, itemD, textD) ← widget @i @Text ae (desDen @Text TextLine) initialV
initial ← sample $ current textD
foldDyn (\new oldValid→
if testF new then new else oldValid)
initial (updated textD)
<&> Widget' ∘ (ae, subD,itemD,)
fpsCounterD ∷ RGLFW t m ⇒ Event t Port.Frame → m (Dynamic t Double)
fpsCounterD frameE = do
frameMomentE ← performEvent $ fmap (\_ → HOS.fromSec <$> HOS.getTime) frameE
frameΔD ← (fst <$>) <$> foldDyn (\y (_,x)->(y-x,y)) (0,0) frameMomentE
avgFrameΔD ← average 300 $ updated frameΔD
pure (recip <$> avgFrameΔD)
instance SOP.Generic (V2 a)
instance SOP.HasDatatypeInfo (V2 a)
deriving instance Generic (Di a)
instance SOP.Generic (Di a)
instance SOP.HasDatatypeInfo (Di a)
deriving instance Generic (Port.ScreenDim (Di a))
instance SOP.Generic (Port.ScreenDim (Di a))
instance SOP.HasDatatypeInfo (Port.ScreenDim (Di a))
| deepfire/mood | src/Holotype.hs | agpl-3.0 | 16,571 | 83 | 22 | 5,100 | 3,877 | 2,059 | 1,818 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Graphics.UI.WX
import Graphics.UI.WXCore
main :: IO ()
main
= start $
do p <- getApplicationDir
f <- frame [text := p]
h <- staticText f [text := "Hello world!", on click ::= goodbye]
b <- button f [text := "Ok", on command ::= longlabel]
set f [layout := column 5 [widget h, widget b]]
set f [clientSize := sz 300 300]
longlabel :: Button () -> IO ()
longlabel b
= do set b [text := "a really long label for a button"
,on command ::= shortlabel]
refit b
shortlabel :: Button () -> IO ()
shortlabel b
= do set b [text := "short"
,on command ::= longlabel]
refit b
goodbye :: StaticText () -> Point -> IO ()
goodbye st _pos
= do set st [text := "Goodbye world!\nAlas, I knew it well..."
,on click ::= hello]
refit st
hello :: StaticText () -> Point -> IO ()
hello st _pos
= do set st [text := "Hi!"
,on click ::= goodbye]
refitMinimal st
| jacekszymanski/wxHaskell | samples/test/Resize.hs | lgpl-2.1 | 1,069 | 0 | 13 | 347 | 414 | 199 | 215 | 33 | 1 |
module Numeric.Units.Dimensional.TF.Prelude
( module Numeric.Units.Dimensional.TF
, module Numeric.Units.Dimensional.TF.Quantities
, module Numeric.Units.Dimensional.TF.SIUnits
, module Numeric.NumType.TF
, module Prelude
) where
import Numeric.Units.Dimensional.TF hiding
( Dimensional (Dimensional)
)
import Numeric.Units.Dimensional.TF.Quantities
import Numeric.Units.Dimensional.TF.SIUnits
import Numeric.NumType.TF
( neg5, neg4, neg3, neg2, neg1, zero, pos1, pos2, pos3, pos4, pos5
) -- Used in exponents.
import Prelude hiding
( (+), (-), (*), (/), (^), (**)
, abs, negate, pi, exp, log, sqrt
, sin, cos, tan, asin, acos, atan, atan2
, sinh, cosh, tanh, asinh, acosh, atanh
, sum
) -- Hide definitions overridden by 'Numeric.Dimensional'.
| khalilfazal/information-units | Numeric/Units/Dimensional/TF/Prelude.hs | lgpl-2.1 | 816 | 2 | 6 | 161 | 220 | 157 | 63 | 18 | 0 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE MagicHash, UnboxedTuples, PatternGuards, ScopedTypeVariables, RankNTypes #-}
-- | Concurrent queue for single reader, single writer
module Control.Distributed.Process.Internal.CQueue
( CQueue
, BlockSpec(..)
, MatchOn(..)
, newCQueue
, enqueue
, enqueueSTM
, dequeue
, mkWeakCQueue
, queueSize
) where
import Prelude hiding (length, reverse)
import Control.Concurrent.STM
( atomically
, STM
, TChan
, TVar
, modifyTVar'
, tryReadTChan
, newTChan
, newTVarIO
, writeTChan
, readTChan
, readTVarIO
, orElse
, retry
)
import Control.Applicative ((<$>), (<*>))
import Control.Exception (mask_, onException)
import System.Timeout (timeout)
import Control.Distributed.Process.Internal.StrictMVar
( StrictMVar(StrictMVar)
, newMVar
, takeMVar
, putMVar
)
import Control.Distributed.Process.Internal.StrictList
( StrictList(..)
, append
)
import Data.Maybe (fromJust)
import Data.Traversable (traverse)
import GHC.MVar (MVar(MVar))
import GHC.IO (IO(IO))
import GHC.Prim (mkWeak#)
import GHC.Weak (Weak(Weak))
-- We use a TCHan rather than a Chan so that we have a non-blocking read
data CQueue a = CQueue (StrictMVar (StrictList a)) -- Arrived
(TChan a) -- Incoming
(TVar Int) -- Queue size
newCQueue :: IO (CQueue a)
newCQueue = CQueue <$> newMVar Nil <*> atomically newTChan <*> newTVarIO 0
-- | Enqueue an element
--
-- Enqueue is strict.
enqueue :: CQueue a -> a -> IO ()
enqueue c !a = atomically (enqueueSTM c a)
-- | Variant of enqueue for use in the STM monad.
enqueueSTM :: CQueue a -> a -> STM ()
enqueueSTM (CQueue _arrived incoming size) !a = do
writeTChan incoming a
modifyTVar' size succ
data BlockSpec =
NonBlocking
| Blocking
| Timeout Int
data MatchOn m a
= MatchMsg (m -> Maybe a)
| MatchChan (STM a)
deriving (Functor)
type MatchChunks m a = [Either [m -> Maybe a] [STM a]]
chunkMatches :: [MatchOn m a] -> MatchChunks m a
chunkMatches [] = []
chunkMatches (MatchMsg m : ms) = Left (m : chk) : chunkMatches rest
where (chk, rest) = spanMatchMsg ms
chunkMatches (MatchChan r : ms) = Right (r : chk) : chunkMatches rest
where (chk, rest) = spanMatchChan ms
spanMatchMsg :: [MatchOn m a] -> ([m -> Maybe a], [MatchOn m a])
spanMatchMsg [] = ([],[])
spanMatchMsg (m : ms)
| MatchMsg msg <- m = (msg:msgs, rest)
| otherwise = ([], m:ms)
where !(msgs,rest) = spanMatchMsg ms
spanMatchChan :: [MatchOn m a] -> ([STM a], [MatchOn m a])
spanMatchChan [] = ([],[])
spanMatchChan (m : ms)
| MatchChan stm <- m = (stm:stms, rest)
| otherwise = ([], m:ms)
where !(stms,rest) = spanMatchChan ms
-- | Dequeue an element
--
-- The timeout (if any) is applied only to waiting for incoming messages, not
-- to checking messages that have already arrived
dequeue :: forall m a.
CQueue m -- ^ Queue
-> BlockSpec -- ^ Blocking behaviour
-> [MatchOn m a] -- ^ List of matches
-> IO (Maybe a) -- ^ 'Nothing' only on timeout
dequeue (CQueue arrived incoming size) blockSpec matchons = mask_ $ decrementJust $
case blockSpec of
Timeout n -> timeout n $ fmap fromJust run
_other ->
case chunks of
[Right ports] -> -- channels only, this is easy:
case blockSpec of
NonBlocking -> atomically $ waitChans ports (return Nothing)
_ -> atomically $ waitChans ports retry
-- no onException needed
_other -> run
where
-- Decrement counter is smth is returned from the queue,
-- this is safe to use as method is called under a mask
-- and there is no 'unmasked' operation inside
decrementJust f =
traverse (either return (\x -> decrement >> return x)) =<< f
decrement = atomically $ modifyTVar' size pred
chunks = chunkMatches matchons
run = do
arr <- takeMVar arrived
let grabNew xs = do
r <- atomically $ tryReadTChan incoming
case r of
Nothing -> return xs
Just x -> grabNew (Snoc xs x)
arr' <- grabNew arr
goCheck chunks arr'
waitChans ports on_block =
foldr orElse on_block (map (fmap (Just . Left)) ports)
--
-- First check the MatchChunks against the messages already in the
-- mailbox. For channel matches, we do a non-blocking check at
-- this point.
--
goCheck :: MatchChunks m a
-> StrictList m -- messages to check, in this order
-> IO (Maybe (Either a a))
goCheck [] old = goWait old
goCheck (Right ports : rest) old = do
r <- atomically $ waitChans ports (return Nothing) -- does not block
case r of
Just _ -> returnOld old r
Nothing -> goCheck rest old
goCheck (Left matches : rest) old = do
-- checkArrived might in principle take arbitrary time, so
-- we ought to call restore and use an exception handler. However,
-- the check is usually fast (just a comparison), and the overhead
-- of passing around restore and setting up exception handlers is
-- high. So just don't use expensive matchIfs!
case checkArrived matches old of
(old', Just r) -> returnOld old' (Just (Right r))
(old', Nothing) -> goCheck rest old'
-- use the result list, which is now left-biased
--
-- Construct an STM transaction that looks at the relevant channels
-- in the correct order.
--
mkSTM :: MatchChunks m a -> STM (Either m a)
mkSTM [] = retry
mkSTM (Left _ : rest)
= fmap Left (readTChan incoming) `orElse` mkSTM rest
mkSTM (Right ports : rest)
= foldr orElse (mkSTM rest) (map (fmap Right) ports)
waitIncoming :: IO (Maybe (Either m a))
waitIncoming = case blockSpec of
NonBlocking -> atomically $ fmap Just stm `orElse` return Nothing
_ -> atomically $ fmap Just stm
where
stm = mkSTM chunks
--
-- The initial pass didn't find a message, so now we go into blocking
-- mode.
--
-- Contents of 'arrived' from now on is (old ++ new), and
-- messages that arrive are snocced onto new.
--
goWait :: StrictList m -> IO (Maybe (Either a a))
goWait old = do
r <- waitIncoming `onException` putMVar arrived old
case r of
-- Nothing => non-blocking and no message
Nothing -> returnOld old Nothing
Just e -> case e of
--
-- Left => message arrived in the process mailbox. We now have to
-- run through the MatchChunks checking each one, because we might
-- have a situation where the first chunk fails to match and the
-- second chunk is a channel match and there *is* a message in the
-- channel. In that case the channel wins.
--
Left m -> goCheck1 chunks m old
--
-- Right => message arrived on a channel first
--
Right a -> returnOld old (Just (Left a))
--
-- A message arrived in the process inbox; check the MatchChunks for
-- a valid match.
--
goCheck1 :: MatchChunks m a
-> m -- single message to check
-> StrictList m -- old messages we have already checked
-> IO (Maybe (Either a a))
goCheck1 [] m old = goWait (Snoc old m)
goCheck1 (Right ports : rest) m old = do
r <- atomically $ waitChans ports (return Nothing) -- does not block
case r of
Nothing -> goCheck1 rest m old
Just _ -> returnOld (Snoc old m) r
goCheck1 (Left matches : rest) m old = do
case checkMatches matches m of
Nothing -> goCheck1 rest m old
Just p -> returnOld old (Just (Right p))
-- a common pattern for putting back the arrived queue at the end
returnOld :: StrictList m -> Maybe (Either a a) -> IO (Maybe (Either a a))
returnOld old r = do putMVar arrived old; return r
-- as a side-effect, this left-biases the list
checkArrived :: [m -> Maybe a] -> StrictList m -> (StrictList m, Maybe a)
checkArrived matches list = go list Nil
where
go Nil Nil = (Nil, Nothing)
go Nil r = go r Nil
go (Append xs ys) tl = go xs (append ys tl)
go (Snoc xs x) tl = go xs (Cons x tl)
go (Cons x xs) tl
| Just y <- checkMatches matches x = (append xs tl, Just y)
| otherwise = let !(rest,r) = go xs tl in (Cons x rest, r)
checkMatches :: [m -> Maybe a] -> m -> Maybe a
checkMatches [] _ = Nothing
checkMatches (m:ms) a = case m a of Nothing -> checkMatches ms a
Just b -> Just b
-- | Weak reference to a CQueue
mkWeakCQueue :: CQueue a -> IO () -> IO (Weak (CQueue a))
mkWeakCQueue m@(CQueue (StrictMVar (MVar m#)) _ _) f = IO $ \s ->
case mkWeak# m# m f s of (# s1, w #) -> (# s1, Weak w #)
queueSize :: CQueue a -> IO Int
queueSize (CQueue _ _ size) = readTVarIO size
| tweag/distributed-process | src/Control/Distributed/Process/Internal/CQueue.hs | bsd-3-clause | 9,244 | 0 | 19 | 2,795 | 2,673 | 1,387 | 1,286 | 179 | 24 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE NoDisambiguateRecordFields, NoRecordWildCards #-}
module Servant.Server.Auth.Token.Acid.Schema where
import Control.Monad.Reader
import Control.Monad.State
import Data.Acid
import Data.Aeson.WithField
import Data.Int
import Data.List (sortBy)
import Data.Map.Strict (Map)
import Data.Ord
import Data.SafeCopy
import Data.Text (Text)
import Data.Time
import Language.Haskell.TH
import Safe
import Servant.API.Auth.Token
import Servant.API.Auth.Token.Pagination
import Servant.Server.Auth.Token.Common
import Servant.Server.Auth.Token.Model(
UserImplId
, UserImpl(..)
, UserPermId
, UserPerm(..)
, AuthTokenId
, AuthToken(..)
, UserRestoreId
, UserRestore(..)
, UserSingleUseCodeId
, UserSingleUseCode(..)
, AuthUserGroupId
, AuthUserGroup(..)
, AuthUserGroupUsersId
, AuthUserGroupUsers(..)
, AuthUserGroupPermsId
, AuthUserGroupPerms(..)
)
import qualified Data.Map.Strict as M
import qualified Data.Foldable as F
-- | Holds all data for auth server in acid-state container
data Model = Model {
-- | Holds users by id
modelUsers :: !(Map UserImplId UserImpl)
-- | Holds users by login (same content as 'modelUsers')
, modelUsersByLogin :: !(Map Login (WithId UserImplId UserImpl))
-- | Holds 'UserPerm'
, modelUserPerms :: !(Map UserPermId UserPerm)
-- | Holds 'AuthToken'
, modelAuthTokens :: !(Map AuthTokenId AuthToken)
-- | Holds 'UserRestore'
, modelUserRestores :: !(Map UserRestoreId UserRestore)
-- | Holds 'UserSingleUseCode'
, modelUserSingleUseCodes :: !(Map UserSingleUseCodeId UserSingleUseCode)
-- | Holds 'AuthUserGroup'
, modelAuthUserGroups :: !(Map AuthUserGroupId AuthUserGroup)
-- | Holds 'AuthUserGroupUsers'
, modelAuthUserGroupUsers :: !(Map AuthUserGroupUsersId AuthUserGroupUsers)
-- | Holds 'AuthUserGroupPerms'
, modelAuthUserGroupPerms :: !(Map AuthUserGroupPermsId AuthUserGroupPerms)
-- | Holds next id for entities
, modelNextUserImplId :: !Int64
-- | Holds next id for entities
, modelNextUserPermId :: !Int64
-- | Holds next id for entities
, modelNextAuthTokenId :: !Int64
-- | Holds next id for entities
, modelNextUserRestoreId :: !Int64
-- | Holds next id for entities
, modelNextUserSingleUseCodeId :: !Int64
-- | Holds next id for entities
, modelNextAuthUserGroupId :: !Int64
-- | Holds next id for entities
, modelNextAuthUserGroupUserId :: !Int64
-- | Holds next id for entities
, modelNextAuthUserGroupPermId :: !Int64
}
-- | Defines empty model for new database
newModel :: Model
newModel = Model {
modelUsers = mempty
, modelUsersByLogin = mempty
, modelUserPerms = mempty
, modelAuthTokens = mempty
, modelUserRestores = mempty
, modelUserSingleUseCodes = mempty
, modelAuthUserGroups = mempty
, modelAuthUserGroupUsers = mempty
, modelAuthUserGroupPerms = mempty
, modelNextUserImplId = 0
, modelNextUserPermId = 0
, modelNextAuthTokenId = 0
, modelNextUserRestoreId = 0
, modelNextUserSingleUseCodeId = 0
, modelNextAuthUserGroupId = 0
, modelNextAuthUserGroupUserId = 0
, modelNextAuthUserGroupPermId = 0
}
-- | The end user should implement this for his global type
class HasModelRead a where
askModel :: a -> Model
-- | The end user should implement this fot his global type
class HasModelRead a => HasModelWrite a where
putModel :: a -> Model -> a
-- | List of queries of the backend. Can be used if you want additional queries alongside
-- with the auth ones.
--
-- Usage:
-- @
-- makeAcidic ''Model (acidQueries ++ [{- your queries herer-}])
-- @
acidQueries :: [Name]
acidQueries = [
mkName "getUserImpl"
, mkName "getUserImplByLogin"
, mkName "listUsersPaged"
, mkName "getUserImplPermissions"
, mkName "deleteUserPermissions"
, mkName "insertUserPerm"
, mkName "insertUserImpl"
, mkName "replaceUserImpl"
, mkName "deleteUserImpl"
, mkName "hasPerm"
, mkName "getFirstUserByPerm"
, mkName "selectUserImplGroups"
, mkName "clearUserImplGroups"
, mkName "insertAuthUserGroup"
, mkName "insertAuthUserGroupUsers"
, mkName "insertAuthUserGroupPerms"
, mkName "getAuthUserGroup"
, mkName "listAuthUserGroupPermissions"
, mkName "listAuthUserGroupUsers"
, mkName "replaceAuthUserGroup"
, mkName "clearAuthUserGroupUsers"
, mkName "clearAuthUserGroupPerms"
, mkName "deleteAuthUserGroup"
, mkName "listGroupsPaged"
, mkName "setAuthUserGroupName"
, mkName "setAuthUserGroupParent"
, mkName "insertSingleUseCode"
, mkName "setSingleUseCodeUsed"
, mkName "getUnusedCode"
, mkName "invalidatePermamentCodes"
, mkName "selectLastRestoreCode"
, mkName "insertUserRestore"
, mkName "findRestoreCode"
, mkName "replaceRestoreCode"
, mkName "findAuthToken"
, mkName "findAuthTokenByValue"
, mkName "insertAuthToken"
, mkName "replaceAuthToken"
]
-- | The end user should inline this TH in his code
makeModelAcidic :: Name -> DecsQ
makeModelAcidic globalStateName = makeAcidic globalStateName acidQueries
instance HasModelRead Model where
askModel = id
instance HasModelWrite Model where
putModel = const id
asksM :: HasModelRead a => (Model -> b) -> Query a b
asksM f = fmap (f . askModel) ask
modifyM :: HasModelWrite a => (Model -> Model) -> Update a ()
modifyM f = modify' (\a -> putModel a . f . askModel $ a)
getM :: HasModelWrite a => Update a Model
getM = fmap askModel get
putM :: HasModelWrite a => Model -> Update a ()
putM m = modifyM (const m)
-- | Mixin queries to work with auth state
deriveQueries :: Name -> DecsQ
deriveQueries globalStateName = [d|
-- | Getting user from storage
getUserImpl :: HasModelRead $a => UserImplId -> Query $a (Maybe UserImpl)
getUserImpl i = M.lookup i <$> asksM modelUsers
-- | Getting user from storage by login
getUserImplByLogin :: HasModelRead $a => Login -> Query $a (Maybe (WithId UserImplId UserImpl))
getUserImplByLogin l = M.lookup l <$> asksM modelUsersByLogin
-- | Helper to get page from map
getPagedList :: Ord i => Page -> PageSize -> Map i a -> ([WithId i a], Word)
getPagedList p s m = (uncurry WithField <$> es, fromIntegral $ F.length m)
where
es = take (fromIntegral s) . drop (fromIntegral $ p * s) . sortBy (comparing fst) . M.toList $ m
-- | Get paged list of users and total count of users
listUsersPaged :: HasModelRead $a => Page -> PageSize -> Query $a ([WithId UserImplId UserImpl], Word)
listUsersPaged p s = getPagedList p s <$> asksM modelUsers
-- | Get user permissions, ascending by tag
getUserImplPermissions :: HasModelRead $a => UserImplId -> Query $a [WithId UserPermId UserPerm]
getUserImplPermissions i = fmap (uncurry WithField) . M.toList . M.filter ((i ==) . userPermUser) <$> asksM modelUserPerms
-- | Delete user permissions
deleteUserPermissions :: HasModelWrite $a => UserImplId -> Update $a ()
deleteUserPermissions i = modifyM $ \m -> m { modelUserPerms = f $ modelUserPerms m }
where
f m = m `M.difference` M.filter ((i ==) . userPermUser) m
-- | Insertion of new user permission
insertUserPerm :: HasModelWrite $a => UserPerm -> Update $a UserPermId
insertUserPerm p = do
m <- getM
let
i = toKey $ modelNextUserPermId m
perms = M.insert i p . modelUserPerms $ m
m' = m { modelUserPerms = perms, modelNextUserPermId = modelNextUserPermId m + 1 }
m' `seq` putM m'
return i
-- | Insertion of new user
insertUserImpl :: HasModelWrite $a => UserImpl -> Update $a UserImplId
insertUserImpl v = do
m <- getM
let
i = toKey $ modelNextUserImplId m
vals = M.insert i v . modelUsers $ m
vals' = M.insert (userImplLogin v) (WithField i v) . modelUsersByLogin $ m
m' = m { modelUsers = vals, modelUsersByLogin = vals', modelNextUserImplId = modelNextUserImplId m + 1 }
m' `seq` putM m'
return i
-- | Replace user with new value
replaceUserImpl :: HasModelWrite $a => UserImplId -> UserImpl -> Update $a ()
replaceUserImpl i v = modifyM $ \m -> m {
modelUsers = M.insert i v . modelUsers $ m
, modelUsersByLogin = M.insert (userImplLogin v) (WithField i v) . modelUsersByLogin $ m
}
-- | Delete user by id
deleteUserImpl :: HasModelWrite $a => UserImplId -> Update $a ()
deleteUserImpl i = do
deleteUserPermissions i
modifyM $ \m -> case M.lookup i . modelUsers $ m of
Nothing -> m
Just ui -> m {
modelUsers = M.delete i . modelUsers $ m
, modelUsersByLogin = M.delete (userImplLogin ui) . modelUsersByLogin $ m
}
-- | Check whether the user has particular permission
hasPerm :: HasModelRead $a => UserImplId -> Permission -> Query $a Bool
hasPerm i p = (> 0) . F.length . M.filter (\up -> userPermUser up == i && userPermPermission up == p) <$> asksM modelUserPerms
-- | Get any user with given permission
getFirstUserByPerm :: HasModelRead $a => Permission -> Query $a (Maybe (WithId UserImplId UserImpl))
getFirstUserByPerm perm = do
m <- asksM modelUserPerms
case M.toList . M.filter (\p -> userPermPermission p == perm) $ m of
[] -> return Nothing
((_, p) : _) -> fmap (WithField $ userPermUser p) <$> getUserImpl (userPermUser p)
-- | Select user groups and sort them by ascending name
selectUserImplGroups :: HasModelRead $a => UserImplId -> Query $a [WithId AuthUserGroupUsersId AuthUserGroupUsers]
selectUserImplGroups i = fmap (uncurry WithField) . M.toList . M.filter ((i ==) . authUserGroupUsersUser) <$> asksM modelAuthUserGroupUsers
-- | Remove user from all groups
clearUserImplGroups :: HasModelWrite $a => UserImplId -> Update $a ()
clearUserImplGroups i = modifyM $ \m -> m { modelAuthUserGroupUsers = f $ modelAuthUserGroupUsers m }
where
f m = m `M.difference` M.filter ((i ==) . authUserGroupUsersUser) m
-- | Add new user group
insertAuthUserGroup :: HasModelWrite $a => AuthUserGroup -> Update $a AuthUserGroupId
insertAuthUserGroup v = do
m <- getM
let
i = toKey $ modelNextAuthUserGroupId m
vals = M.insert i v . modelAuthUserGroups $ m
m' = m { modelAuthUserGroups = vals, modelNextAuthUserGroupId = modelNextAuthUserGroupId m + 1 }
m' `seq` putM m'
return i
-- | Add user to given group
insertAuthUserGroupUsers :: HasModelWrite $a => AuthUserGroupUsers -> Update $a AuthUserGroupUsersId
insertAuthUserGroupUsers v = do
m <- getM
let
i = toKey $ modelNextAuthUserGroupUserId m
vals = M.insert i v . modelAuthUserGroupUsers $ m
m' = m { modelAuthUserGroupUsers = vals, modelNextAuthUserGroupUserId = modelNextAuthUserGroupUserId m + 1 }
m' `seq` putM m'
return i
-- | Add permission to given group
insertAuthUserGroupPerms :: HasModelWrite $a => AuthUserGroupPerms -> Update $a AuthUserGroupPermsId
insertAuthUserGroupPerms v = do
m <- getM
let
i = toKey $ modelNextAuthUserGroupPermId m
vals = M.insert i v . modelAuthUserGroupPerms $ m
m' = m { modelAuthUserGroupPerms = vals, modelNextAuthUserGroupPermId = modelNextAuthUserGroupPermId m + 1 }
m' `seq` putM m'
return i
-- | Find user group by id
getAuthUserGroup :: HasModelRead $a => AuthUserGroupId -> Query $a (Maybe AuthUserGroup)
getAuthUserGroup i = M.lookup i <$> asksM modelAuthUserGroups
-- | Get list of permissions of given group
listAuthUserGroupPermissions :: HasModelRead $a => AuthUserGroupId -> Query $a [WithId AuthUserGroupPermsId AuthUserGroupPerms]
listAuthUserGroupPermissions i = fmap (uncurry WithField) . M.toList . M.filter ((i ==) . authUserGroupPermsGroup) <$> asksM modelAuthUserGroupPerms
-- | Get list of all users of the group
listAuthUserGroupUsers :: HasModelRead $a => AuthUserGroupId -> Query $a [WithId AuthUserGroupUsersId AuthUserGroupUsers]
listAuthUserGroupUsers i = fmap (uncurry WithField) . M.toList . M.filter ((i ==) . authUserGroupUsersGroup) <$> asksM modelAuthUserGroupUsers
-- | Replace record of user group
replaceAuthUserGroup :: HasModelWrite $a => AuthUserGroupId -> AuthUserGroup -> Update $a ()
replaceAuthUserGroup i v = modifyM $ \m -> m { modelAuthUserGroups = M.insert i v $ modelAuthUserGroups m }
-- | Remove all users from group
clearAuthUserGroupUsers :: HasModelWrite $a => AuthUserGroupId -> Update $a ()
clearAuthUserGroupUsers i = modifyM $ \m -> m { modelAuthUserGroupUsers = f $ modelAuthUserGroupUsers m }
where
f m = m `M.difference` M.filter ((i ==) . authUserGroupUsersGroup) m
-- | Remove all permissions from group
clearAuthUserGroupPerms :: HasModelWrite $a => AuthUserGroupId -> Update $a ()
clearAuthUserGroupPerms i = modifyM $ \m -> m { modelAuthUserGroupPerms = f $ modelAuthUserGroupPerms m }
where
f m = m `M.difference` M.filter ((i ==) . authUserGroupPermsGroup) m
-- | Delete user group from storage
deleteAuthUserGroup :: HasModelWrite $a => AuthUserGroupId -> Update $a ()
deleteAuthUserGroup i = do
clearAuthUserGroupUsers i
clearAuthUserGroupPerms i
modifyM $ \m -> m { modelAuthUserGroups = M.delete i $ modelAuthUserGroups m }
-- | Get paged list of user groups with total count
listGroupsPaged :: HasModelRead $a => Page -> PageSize -> Query $a ([WithId AuthUserGroupId AuthUserGroup], Word)
listGroupsPaged p s = getPagedList p s <$> asksM modelAuthUserGroups
-- | Set group name
setAuthUserGroupName :: HasModelWrite $a => AuthUserGroupId -> Text -> Update $a ()
setAuthUserGroupName i n = modifyM $ \m -> m { modelAuthUserGroups = M.adjust (\v -> v { authUserGroupName = n }) i $ modelAuthUserGroups m }
-- | Set group parent
setAuthUserGroupParent :: HasModelWrite $a => AuthUserGroupId -> Maybe AuthUserGroupId -> Update $a ()
setAuthUserGroupParent i p = modifyM $ \m -> m { modelAuthUserGroups = M.adjust (\v -> v { authUserGroupParent = p }) i $ modelAuthUserGroups m }
-- | Add new single use code
insertSingleUseCode :: HasModelWrite $a => UserSingleUseCode -> Update $a UserSingleUseCodeId
insertSingleUseCode v = do
m <- getM
let
i = toKey $ modelNextUserSingleUseCodeId m
vals = M.insert i v . modelUserSingleUseCodes $ m
m' = m { modelUserSingleUseCodes = vals, modelNextUserSingleUseCodeId = modelNextUserSingleUseCodeId m + 1 }
m' `seq` putM m'
return i
-- | Set usage time of the single use code
setSingleUseCodeUsed :: HasModelWrite $a => UserSingleUseCodeId -> Maybe UTCTime -> Update $a ()
setSingleUseCodeUsed i mt = modifyM $ \m -> m { modelUserSingleUseCodes = M.adjust (\v -> v { userSingleUseCodeUsed = mt }) i $ modelUserSingleUseCodes m }
-- | Find unused code for the user and expiration time greater than the given time
getUnusedCode :: HasModelRead $a => SingleUseCode -> UserImplId -> UTCTime -> Query $a (Maybe (WithId UserSingleUseCodeId UserSingleUseCode))
getUnusedCode c i t = fmap (uncurry WithField) . headMay . sorting . M.toList . M.filter f <$> asksM modelUserSingleUseCodes
where
sorting = sortBy (comparing $ Down . userSingleUseCodeExpire . snd)
f usc =
userSingleUseCodeValue usc == c
&& userSingleUseCodeUser usc == i
&& userSingleUseCodeUsed usc == Nothing
&& (userSingleUseCodeExpire usc == Nothing || userSingleUseCodeExpire usc >= Just t)
-- | Invalidate all permament codes for user and set use time for them
invalidatePermamentCodes :: HasModelWrite $a => UserImplId -> UTCTime -> Update $a ()
invalidatePermamentCodes i t = modifyM $ \m -> m { modelUserSingleUseCodes = f $ modelUserSingleUseCodes m }
where
f m = (fmap invalidate . M.filter isPermament $ m) `M.union` m
invalidate su = su { userSingleUseCodeUsed = Just t }
isPermament usc =
userSingleUseCodeUser usc == i
&& userSingleUseCodeUsed usc == Nothing
&& userSingleUseCodeExpire usc == Nothing
-- | Select last valid restoration code by the given current time
selectLastRestoreCode :: HasModelRead $a => UserImplId -> UTCTime -> Query $a (Maybe (WithId UserRestoreId UserRestore))
selectLastRestoreCode i t = fmap (uncurry WithField) . headMay . sorting . M.toList . M.filter f <$> asksM modelUserRestores
where
sorting = sortBy (comparing $ Down . userRestoreExpire . snd)
f ur = userRestoreUser ur == i && userRestoreExpire ur > t
-- | Insert new restore code
insertUserRestore :: HasModelWrite $a => UserRestore -> Update $a UserRestoreId
insertUserRestore v = do
m <- getM
let
i = toKey $ modelNextUserRestoreId m
vals = M.insert i v . modelUserRestores $ m
m' = m { modelUserRestores = vals, modelNextUserRestoreId = modelNextUserRestoreId m + 1 }
m' `seq` putM m'
return i
-- | Find unexpired by the time restore code
findRestoreCode :: HasModelRead $a => UserImplId -> RestoreCode -> UTCTime -> Query $a (Maybe (WithId UserRestoreId UserRestore))
findRestoreCode i rc t = fmap (uncurry WithField) . headMay . sorting . M.toList . M.filter f <$> asksM modelUserRestores
where
sorting = sortBy (comparing $ Down . userRestoreExpire . snd)
f ur = userRestoreUser ur == i && userRestoreValue ur == rc && userRestoreExpire ur > t
-- | Replace restore code with new value
replaceRestoreCode :: HasModelWrite $a => UserRestoreId -> UserRestore -> Update $a ()
replaceRestoreCode i v = modifyM $ \m -> m { modelUserRestores = M.insert i v $ modelUserRestores m }
-- | Find first non-expired by the time token for user
findAuthToken :: HasModelRead $a => UserImplId -> UTCTime -> Query $a (Maybe (WithId AuthTokenId AuthToken))
findAuthToken i t = fmap (uncurry WithField) . headMay . M.toList . M.filter f <$> asksM modelAuthTokens
where
f atok = authTokenUser atok == i && authTokenExpire atok > t
-- | Find token by value
findAuthTokenByValue :: HasModelRead $a => SimpleToken -> Query $a (Maybe (WithId AuthTokenId AuthToken))
findAuthTokenByValue v = fmap (uncurry WithField) . headMay . M.toList . M.filter f <$> asksM modelAuthTokens
where
f atok = authTokenValue atok == v
-- | Insert new token
insertAuthToken :: HasModelWrite $a => AuthToken -> Update $a AuthTokenId
insertAuthToken v = do
m <- getM
let
i = toKey $ modelNextAuthTokenId m
vals = M.insert i v . modelAuthTokens $ m
m' = m { modelAuthTokens = vals, modelNextAuthTokenId = modelNextAuthTokenId m + 1 }
m' `seq` putM m'
return i
-- | Replace auth token with new value
replaceAuthToken :: HasModelWrite $a => AuthTokenId -> AuthToken -> Update $a ()
replaceAuthToken i v = modifyM $ \m -> m { modelAuthTokens = M.insert i v $ modelAuthTokens m }
|]
where
a = conT globalStateName
deriveSafeCopy 0 'base ''UserImplId
deriveSafeCopy 0 'base ''UserImpl
deriveSafeCopy 0 'base ''UserPermId
deriveSafeCopy 0 'base ''UserPerm
deriveSafeCopy 0 'base ''AuthTokenId
deriveSafeCopy 0 'base ''AuthToken
deriveSafeCopy 0 'base ''UserRestoreId
deriveSafeCopy 0 'base ''UserRestore
deriveSafeCopy 0 'base ''UserSingleUseCodeId
deriveSafeCopy 0 'base ''UserSingleUseCode
deriveSafeCopy 0 'base ''AuthUserGroupId
deriveSafeCopy 0 'base ''AuthUserGroup
deriveSafeCopy 0 'base ''AuthUserGroupUsersId
deriveSafeCopy 0 'base ''AuthUserGroupUsers
deriveSafeCopy 0 'base ''AuthUserGroupPermsId
deriveSafeCopy 0 'base ''AuthUserGroupPerms
deriveSafeCopy 0 'base ''Model
instance (SafeCopy k, SafeCopy v) => SafeCopy (WithField i k v) where
putCopy (WithField k v) = contain $ do
safePut k
safePut v
getCopy = contain $ WithField
<$> safeGet
<*> safeGet
| VyacheslavHashov/servant-auth-token | servant-auth-token-acid/src/Servant/Server/Auth/Token/Acid/Schema.hs | bsd-3-clause | 20,132 | 0 | 13 | 4,451 | 1,429 | 792 | 637 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
-- | This module provides data dependency resolution and
-- fault tolerance via /promises/ (known elsewhere as /futures/).
-- It's implemented in terms of the "Remote.Process" module.
module Remote.Task (
-- * Tasks and promises
TaskM, Promise, PromiseList(..),
runTask,
newPromise, newPromiseAt, newPromiseNear, newPromiseHere, newPromiseAtRole,
toPromise, toPromiseAt, toPromiseNear, toPromiseImm,
readPromise,
-- * MapReduce
MapReduce(..),
mapReduce,
-- * Useful auxilliaries
chunkify,
shuffle,
tsay,
tlogS,
Locality(..),
TaskException(..),
-- * Internals, not for general use
__remoteCallMetaData, serialEncodeA, serialDecodeA
) where
import Remote.Reg (putReg,getEntryByIdent,RemoteCallMetaData)
import Remote.Encoding (serialEncodePure,hGetPayload,hPutPayload,Payload,getPayloadContent,Serializable,serialDecode,serialEncode)
import Remote.Process (roundtripQuery, ServiceException(..), TransmitStatus(..),diffTime,getConfig,Config(..),matchProcessDown,terminate,nullPid,monitorProcess,TransmitException(..),MonitorAction(..),ptry,LogConfig(..),getLogConfig,setNodeLogConfig,nodeFromPid,LogLevel(..),LogTarget(..),logS,getLookup,say,LogSphere,NodeId,ProcessM,ProcessId,PayloadDisposition(..),getSelfPid,getSelfNode,matchUnknownThrow,receiveWait,receiveTimeout,roundtripResponse,roundtripResponseAsync,roundtripQueryImpl,match,makePayloadClosure,spawn,spawnLocal,spawnLocalAnd,setDaemonic,send,makeClosure)
import Remote.Closure (Closure(..))
import Remote.Peer (getPeers)
import Data.Dynamic (Dynamic, toDyn, fromDynamic)
import System.IO (withFile,IOMode(..))
import System.Directory (renameFile)
import Data.Binary (Binary,get,put,putWord8,getWord8)
import Control.Exception (SomeException,Exception,throw)
import Data.Typeable (Typeable)
import Control.Applicative (Applicative(..))
import Control.Monad (liftM,when)
import Control.Monad.Trans (liftIO)
import Control.Concurrent.MVar (MVar,modifyMVar,modifyMVar_,newMVar,newEmptyMVar,takeMVar,putMVar,readMVar,withMVar)
import qualified Data.Map as Map (Map,insert,lookup,empty,insertWith',toList)
import Data.List ((\\),union,nub,groupBy,sortBy,delete)
import Data.Time (UTCTime,getCurrentTime)
-- imports required for hashClosure; is there a lighter-weight of doing this?
import Data.Digest.Pure.MD5 (md5)
import Data.ByteString.Lazy.UTF8 (fromString)
import qualified Data.ByteString.Lazy as B (concat)
----------------------------------------------
-- * Promises and tasks
----------------------------------------------
type PromiseId = Integer
type Hash = String
data PromiseList a = PlChunk a (Promise (PromiseList a))
| PlNil deriving Typeable
instance (Serializable a) => Binary (PromiseList a) where
put (PlChunk a p) = putWord8 0 >> put a >> put p
put PlNil = putWord8 1
get = do w <- getWord8
case w of
0 -> do a <- get
p <- get
return $ PlChunk a p
1 -> return PlNil
-- | The basic data type for expressing data dependence
-- in the 'TaskM' monad. A Promise represents a value that
-- may or may not have been computed yet; thus, it's like
-- a distributed thunk (in the sense of a non-strict unit
-- of evaluation). These are created by 'newPromise' and friends,
-- and the underlying value can be gotten with 'readPromise'.
data Promise a = PromiseBasic { _psRedeemer :: ProcessId, _psId :: PromiseId }
| PromiseImmediate a deriving Typeable
-- psRedeemer should maybe be wrapped in an IORef so that it can be updated in case of node failure
instance (Serializable a) => Binary (Promise a) where
put (PromiseBasic a b) = putWord8 0 >> put a >> put b
put (PromiseImmediate a) = putWord8 1 >> put a
get = do a <- getWord8
case a of
0 -> do b <- get
c <- get
return $ PromiseBasic b c
1 -> do b <- get
return $ PromiseImmediate b
-- | Stores the data produced by a promise, in one of its
-- various forms. If it's currently in memory, we keep it
-- as a payload, to be decoded by its ultimate user (who
-- of course has the static type information), the time
-- it was last touched (so we know when to flush it),
-- and perhaps also a decoded version, so that it doesn't
-- need to be decoded repeatedly: this makes this go a lot
-- faster. If it's been flushed to disk, we keep track of
-- where, and if the promise didn't complete, but threw
-- an exception during its execution, we mark there here
-- as well: the exception will be propagated to
-- dependents.
data PromiseStorage = PromiseInMemory PromiseData UTCTime (Maybe Dynamic)
| PromiseOnDisk FilePath
| PromiseException String
type PromiseData = Payload
{- UNUSED
type TimeStamp = UTCTime
-}
-- | Keeps track of what we know about currently running promises.
-- The closure and locality and provided by the initial call to
-- newPromise, the nodeboss is where it is currently running.
-- We need this info to deal with complaints.
data PromiseRecord = PromiseRecord ProcessId (Closure PromiseData) Locality
data MasterState = MasterState
{
-- | Promise IDs are allocated serially from here
msNextId :: PromiseId,
-- | All currently known nodes, with the role, node ID, and node boss. Updated asychronously by prober thread
msNodes :: MVar [(String,NodeId,ProcessId)],
-- | Given a nodeboss, which promises belong to it. Not sure what this is good for
msAllocation :: Map.Map ProcessId [PromiseId],
-- | Given a promise, what do we know about it. Include its nodeboss, its closure, and its locality preference
msPromises :: Map.Map PromiseId PromiseRecord,
-- | The locality preference of new worker tasks, if not specified otherwise
msDefaultLocality :: Locality
}
data MmNewPromise = MmNewPromise (Closure Payload) Locality Queueing deriving (Typeable)
instance Binary MmNewPromise where
get = do a <- get
l <- get
q <- get
return $ MmNewPromise a l q
put (MmNewPromise a l q) = put a >> put l >> put q
data MmNewPromiseResponse = MmNewPromiseResponse ProcessId PromiseId
| MmNewPromiseResponseFail deriving (Typeable)
instance Binary MmNewPromiseResponse where
put (MmNewPromiseResponse a b) =
do putWord8 0
put a
put b
put MmNewPromiseResponseFail = putWord8 1
get = do a <- getWord8
case a of
0 -> do b <- get
c <- get
return $ MmNewPromiseResponse b c
1 -> return MmNewPromiseResponseFail
data MmStatus = MmStatus deriving Typeable
instance Binary MmStatus where
get = return MmStatus
put MmStatus = put ()
data MmStatusResponse = MmStatusResponse [NodeId] (Map.Map ProcessId [PromiseId]) deriving Typeable
instance Binary MmStatusResponse where
get = do a <- get
b <- get
return $ MmStatusResponse a b
put (MmStatusResponse a b) = put a >> put b
data MmComplain = MmComplain ProcessId PromiseId deriving (Typeable)
instance Binary MmComplain where
put (MmComplain a b) = put a >> put b
get = do a <- get
b <- get
return $ MmComplain a b
data MmComplainResponse = MmComplainResponse ProcessId deriving (Typeable)
instance Binary MmComplainResponse where
put (MmComplainResponse a) = put a
get = do a <- get
return $ MmComplainResponse a
data TmNewPeer = TmNewPeer NodeId deriving (Typeable)
instance Binary TmNewPeer where
get = do a <- get
return $ TmNewPeer a
put (TmNewPeer nid) = put nid
data NmStart = NmStart PromiseId (Closure Payload) Queueing deriving (Typeable)
instance Binary NmStart where
get = do a <- get
b <- get
c <- get
return $ NmStart a b c
put (NmStart a b c) = put a >> put b >> put c
data NmStartResponse = NmStartResponse Bool deriving (Typeable)
instance Binary NmStartResponse where
get = do a <- get
return $ NmStartResponse a
put (NmStartResponse a) = put a
data NmRedeem = NmRedeem PromiseId deriving (Typeable)
instance Binary NmRedeem where
get = do a <- get
return $ NmRedeem a
put (NmRedeem prid) = put prid
data NmRedeemResponse = NmRedeemResponse Payload
| NmRedeemResponseUnknown
| NmRedeemResponseException deriving (Typeable)
instance Binary NmRedeemResponse where
get = do a <- getWord8
case a of
0 -> do b <- get
return $ NmRedeemResponse b
1 -> return NmRedeemResponseUnknown
2 -> return NmRedeemResponseException
put (NmRedeemResponse a) = putWord8 0 >> put a
put (NmRedeemResponseUnknown) = putWord8 1
put (NmRedeemResponseException) = putWord8 2
data TaskException = TaskException String deriving (Show,Typeable)
instance Exception TaskException
-- | (Currently ignored.)
data Queueing = QuNone
| QuExclusive
| QuSmall deriving (Typeable,Ord,Eq)
defaultQueueing :: Queueing
defaultQueueing = QuNone
instance Binary Queueing where
put QuNone = putWord8 0
put QuExclusive = putWord8 1
put QuSmall = putWord8 2
get = do a <- getWord8
case a of
0 -> return QuNone
1 -> return QuExclusive
2 -> return QuSmall
-- | A specification of preference
-- of where a promise should be allocated,
-- among the nodes visible to the master.
data Locality = LcUnrestricted -- ^ The promise can be placed anywhere.
| LcDefault -- ^ The default preference is applied, which is for nodes having a role of NODE of WORKER
| LcByRole [String] -- ^ Nodes having the given roles will be preferred
| LcByNode [NodeId] -- ^ The given nodes will be preferred
instance Binary Locality where
put LcUnrestricted = putWord8 0
put LcDefault = putWord8 1
put (LcByRole a) = putWord8 2 >> put a
put (LcByNode a) = putWord8 3 >> put a
get = do a <- getWord8
case a of
0 -> return LcUnrestricted
1 -> return LcDefault
2 -> do r <- get
return $ LcByRole r
3 -> do r <- get
return $ LcByNode r
defaultLocality :: Locality
defaultLocality = LcByRole ["WORKER","NODE"]
taskError :: String -> a
taskError s = throw $ TaskException s
serialEncodeA :: (Serializable a) => a -> TaskM Payload
serialEncodeA = liftTask . liftIO . serialEncode
serialDecodeA :: (Serializable a) => Payload -> TaskM (Maybe a)
serialDecodeA = liftTask . liftIO . serialDecode
monitorTask :: ProcessId -> ProcessId -> ProcessM TransmitStatus
monitorTask monitor monitee
= do res <- ptry $ monitorProcess monitor monitee MaMonitor
case res of
Right _ -> return QteOK
Left (ServiceException e) -> return $ QteOther e
roundtripImpl :: (Serializable a, Serializable b) => ProcessId -> a -> ProcessM (Either TransmitStatus b)
roundtripImpl pid dat = roundtripQueryImpl 0 PldUser pid dat id []
roundtrip :: (Serializable a, Serializable b) => ProcessId -> a -> TaskM (Either TransmitStatus b)
roundtrip apid dat =
TaskM $ \ts ->
case Map.lookup apid (tsMonitoring ts) of
Nothing -> do mypid <- getSelfPid
res0 <- monitorTask mypid apid
case res0 of
QteOK ->
do res <- roundtripImpl apid dat
return (ts {tsMonitoring=Map.insert apid () (tsMonitoring ts)},res)
_ -> return (ts,Left res0)
Just _ -> do res <- roundtripImpl apid dat
return (ts,res)
-- roundtrip a b = liftTask $ roundtripQueryUnsafe PldUser a b
spawnDaemonic :: ProcessM () -> ProcessM ProcessId
spawnDaemonic p = spawnLocalAnd p setDaemonic
runWorkerNode :: ProcessId -> NodeId -> ProcessM ProcessId
runWorkerNode masterpid nid =
do clo <- makeClosure "Remote.Task.runWorkerNode__impl" (masterpid) :: ProcessM (Closure (ProcessM ()))
spawn nid clo
runWorkerNode__impl :: Payload -> ProcessM ()
runWorkerNode__impl pl =
do setDaemonic -- maybe it's good to have the node manager be daemonic, but prolly not. If so, the MASTERPID must be terminated when user-provided MASTERPROC ends
mpid <- liftIO $ serialDecode pl
case mpid of
Just masterpid -> handler masterpid
Nothing -> error "Failure to extract in rwn__impl"
where handler masterpid = startNodeManager masterpid
passthrough__implPl :: Payload -> TaskM Payload
passthrough__implPl pl = return pl
passthrough__closure :: (Serializable a) => a -> Closure (TaskM a)
passthrough__closure a = Closure "Remote.Task.passthrough__impl" (serialEncodePure a)
__remoteCallMetaData :: RemoteCallMetaData
__remoteCallMetaData x = putReg runWorkerNode__impl "Remote.Task.runWorkerNode__impl"
(putReg passthrough__implPl "Remote.Task.passthrough__implPl" x)
updatePromiseInMemory :: PromiseStorage -> IO PromiseStorage
updatePromiseInMemory (PromiseInMemory p _ d) = do utc <- getCurrentTime
return $ PromiseInMemory p utc d
updatePromiseInMemory other = return other
makePromiseInMemory :: PromiseData -> Maybe Dynamic -> IO PromiseStorage
makePromiseInMemory p dyn = do utc <- getCurrentTime
return $ PromiseInMemory p utc dyn
forwardLogs :: Maybe ProcessId -> ProcessM ()
forwardLogs masterpid =
do lc <- getLogConfig
selfnid <- getSelfNode
let newlc = lc {logTarget = case masterpid of
Just mp
| nodeFromPid mp /= selfnid -> LtForward $ nodeFromPid mp
_ -> LtStdout}
in setNodeLogConfig newlc
hashClosure :: Closure a -> Hash
hashClosure (Closure s pl) = show $ md5 $ B.concat [fromString s, getPayloadContent pl]
undiskify :: FilePath -> MVar PromiseStorage -> ProcessM (Maybe PromiseData)
undiskify fpIn mps =
do wrap $ liftIO $ modifyMVar mps (\val ->
case val of
PromiseOnDisk fp ->
do pl <- withFile fp ReadMode hGetPayload
inmem <- makePromiseInMemory pl Nothing
return (inmem,Just pl)
PromiseInMemory payload _ _ -> return (val,Just payload)
_ -> return (val,Nothing))
where wrap a = do res <- ptry a
case res of
Left e -> do logS "TSK" LoCritical $ "Error reading promise from file "++fpIn++": "++show (e::IOError)
return Nothing
Right r -> return r
diskify :: FilePath -> MVar PromiseStorage -> Bool -> ProcessM ()
diskify fp mps reallywrite =
do cfg <- getConfig
when (cfgPromiseFlushDelay cfg > 0)
(handler (cfgPromiseFlushDelay cfg))
where
handler delay =
do _ <- receiveTimeout delay []
again <- wrap $ liftIO $ modifyMVar mps (\val ->
case val of
PromiseInMemory payload utc _ ->
do now <- getCurrentTime
if diffTime now utc > delay
then do when reallywrite $
do liftIO $ withFile tmp WriteMode (\h -> hPutPayload h payload)
renameFile tmp fp
return (PromiseOnDisk fp,False)
else return (val,True)
_ -> return (val,False))
when again
(diskify fp mps reallywrite)
tmp = fp ++ ".tmp"
wrap a = do res <- ptry a
case res of
Left z -> do logS "TSK" LoImportant $ "Error writing promise to disk on file "++fp++": "++show (z::IOError)
return False
Right v -> return v
startNodeWorker :: ProcessId -> NodeBossState ->
MVar PromiseStorage -> Closure Payload -> ProcessM ()
startNodeWorker masterpid nbs mps clo@(Closure cloname cloarg) =
do self <- getSelfPid
_ <- spawnLocalAnd (starter self) (prefix self)
return ()
where
prefix nodeboss =
do self <- getSelfPid
monitorProcess self nodeboss MaLink
setDaemonic
starter nodeboss = -- TODO try to do an undiskify here, if the promise is left over from a previous, failed run
let initialState = TaskState {tsMaster=masterpid,tsNodeBoss=Just nodeboss,
tsPromiseCache=nsPromiseCache nbs, tsRedeemerForwarding=nsRedeemerForwarding nbs,
tsMonitoring=Map.empty}
tasker = do tbl <- liftTask $ getLookup
case getEntryByIdent tbl cloname of
Just funval ->
do val <- funval cloarg
p <- liftTaskIO $ makePromiseInMemory val Nothing
liftTaskIO $ putMVar mps p
cfg <- liftTask $ getConfig
let cachefile = cfgPromisePrefix cfg++hashClosure clo
liftTask $ diskify cachefile mps True
Nothing -> taskError $ "Failed looking up "++cloname++" in closure table"
in do res <- ptry $ runTaskM tasker initialState :: ProcessM (Either SomeException (TaskState,()))
case res of
Left ex -> liftIO (putMVar mps (PromiseException (show ex))) >> throw ex
Right _ -> return ()
data NodeBossState =
NodeBossState
{
nsPromiseCache :: MVar (Map.Map PromiseId (MVar PromiseStorage)),
nsRedeemerForwarding :: MVar (Map.Map PromiseId ProcessId)
}
startNodeManager :: ProcessId -> ProcessM ()
startNodeManager masterpid =
let
handler :: NodeBossState -> ProcessM a
handler state =
let promisecache = nsPromiseCache state
nmStart = roundtripResponse (\(NmStart promise clo _queueing) ->
do promisestore <- liftIO $ newEmptyMVar
ret <- liftIO $ modifyMVar promisecache
(\pc -> let newpc = Map.insert promise promisestore pc
in return (newpc,True))
when (ret)
(startNodeWorker masterpid state promisestore clo)
return (NmStartResponse ret,state))
nmTermination = matchProcessDown masterpid $
do forwardLogs Nothing
logS "TSK" LoInformation $ "Terminating nodeboss after my master "++show masterpid++" is gone"
terminate
nmRedeem = roundtripResponseAsync (\(NmRedeem promise) ans ->
let answerer = do pc <- liftIO $ readMVar promisecache
case Map.lookup promise pc of
Nothing -> ans NmRedeemResponseUnknown
Just v -> do rv <- liftIO $ readMVar v -- possibly long wait
case rv of
PromiseInMemory rrv _ _ ->
do liftIO $ modifyMVar_ v (\_ -> updatePromiseInMemory rv)
ans (NmRedeemResponse rrv)
PromiseOnDisk fp -> do mpd <- undiskify fp v
case mpd of
Nothing ->
ans (NmRedeemResponseUnknown)
Just a ->
ans (NmRedeemResponse a)
diskify fp v False
PromiseException _ -> ans NmRedeemResponseException
in do _ <- spawnLocal answerer
return state) False
in receiveWait [nmStart, nmRedeem, nmTermination, matchUnknownThrow] >>= handler
in do forwardLogs $ Just masterpid
mypid <- getSelfPid
monitorProcess mypid masterpid MaMonitor
logS "TSK" LoInformation $ "Starting a nodeboss owned by " ++ show masterpid
pc <- liftIO $ newMVar Map.empty
pf <- liftIO $ newMVar Map.empty
let initState = NodeBossState {nsPromiseCache=pc,nsRedeemerForwarding=pf}
handler initState
-- | Starts a new context for executing a 'TaskM' environment.
-- The node on which this function is run becomes a new master
-- in a Task application; as a result, the application should
-- only call this function once. The master will attempt to
-- control all nodes that it can find; if you are going to be
-- running more than one CH application on a single network,
-- be sure to give each application a different network
-- magic (via cfgNetworkMagic). The master TaskM environment
-- created by this function can then spawn other threads,
-- locally or remotely, using 'newPromise' and friends.
runTask :: TaskM a -> ProcessM a
runTask = startMaster
startMaster :: TaskM a -> ProcessM a
startMaster proc =
do mvmaster <- liftIO $ newEmptyMVar
mvdone <- liftIO $ newEmptyMVar
master <- runMaster (masterproc mvdone mvmaster)
liftIO $ putMVar mvmaster master
liftIO $ takeMVar mvdone
where masterproc mvdone mvmaster nodeboss =
do master <- liftIO $ takeMVar mvmaster
pc <- liftIO $ newMVar Map.empty
pf <- liftIO $ newMVar Map.empty
let initialState = TaskState {tsMaster=master,tsNodeBoss=Just nodeboss,
tsPromiseCache=pc, tsRedeemerForwarding=pf,
tsMonitoring=Map.empty}
res <- liftM snd $ runTaskM proc initialState
liftIO $ putMVar mvdone res
{- UNUSED
type LocationSelector = MasterState -> ProcessM (NodeId,ProcessId)
-}
selectLocation :: MasterState -> Locality -> ProcessM (Maybe (String,NodeId,ProcessId))
selectLocation ms locality =
let nodes = msNodes ms
in liftIO $ modifyMVar nodes
(\n -> case n of
[] -> return (n,Nothing)
_ -> let dflt = (rotate n,Just $ head n)
filterify f = case filter f n of
[] -> return dflt
(a:_) -> return ((delete a n) ++ [a],Just a)
in case cond locality of
LcUnrestricted -> return dflt
LcDefault -> return dflt
LcByRole l -> filterify (\(r,_,_) -> r `elem` l)
LcByNode l -> filterify (\(_,r,_) -> r `elem` l))
where rotate [] = []
rotate (h:t) = t ++ [h]
cond l = case l of
LcDefault -> msDefaultLocality ms
_ -> l
countLocations :: MasterState -> ProcessM Int
countLocations ms = liftIO $ withMVar (msNodes ms) (\a -> return $ length a)
findPeers :: ProcessM [(String,NodeId)]
findPeers = liftM (concat . (map (\(role,v) -> [ (role,x) | x <- v] )) . Map.toList) getPeers
sendSilent :: (Serializable a) => ProcessId -> a -> ProcessM ()
sendSilent pid a = do res <- ptry $ send pid a
case res of
Left (TransmitException _) -> return ()
Right _ -> return ()
{- UNUSED
getStatus :: TaskM ()
getStatus =
do master <- getMaster
res <- roundtrip master MmStatus
case res of
Left _ -> return ()
Right (MmStatusResponse nodes promises) ->
let verboseNodes = intercalate ", " (map show nodes)
verbosePromises = intercalate "\n" $ map (\(nb,l) -> (show nb)++" -- "++intercalate "," (map show l)) (Map.toList promises)
in tsay $ "\nKnown nodes: " ++ verboseNodes ++ "\n\nNodebosses: " ++ verbosePromises
-}
runMaster :: (ProcessId -> ProcessM ()) -> ProcessM ProcessId
runMaster masterproc =
let
probeOnce nodes seen masterpid =
do recentlist <- findPeers -- TODO if a node fails to response to a probe even once, it's gone forever; be more flexible
let newseen = seen `union` recentlist
let topidlist = recentlist \\ seen
let cleanOut n = filter (\(_,nid,_) -> nid `elem` (map snd recentlist)) n
newlypidded <- mapM (\(role,nid) ->
do pid <- runWorkerNode masterpid nid
return (role,nid,pid)) topidlist
(_newlist,totalseen) <- liftIO $ modifyMVar nodes (\oldlist ->
return ((cleanOut oldlist) ++ newlypidded,(recentlist,newseen)))
let newlyadded = totalseen \\ seen
mapM_ (\nid -> sendSilent masterpid (TmNewPeer nid)) (map snd newlyadded)
return totalseen
proberDelay = 10000000 -- how often do we check the network to see what nodes are available?
prober nodes seen masterpid =
do totalseen <- probeOnce nodes seen masterpid
_ <- receiveTimeout proberDelay [matchUnknownThrow]
prober nodes totalseen masterpid
master state =
let
tryAlloc clo promiseid locality queueing =
do ns <- selectLocation state locality
case ns of
Nothing -> do logS "TSK" LoCritical "Attempt to allocate a task, but no nodes found"
return Nothing
Just (_,nid,nodeboss) ->
do res <- roundtripQuery PldUser nodeboss (NmStart promiseid clo queueing) -- roundtripQuery monitors and then unmonitors, which generates a lot of traffic; we probably don't need to do this
case res of
Left e ->
do logS "TSK" LoImportant $ "Failed attempt to start "++show clo++" on " ++show nid ++": "++show e
return Nothing
Right (NmStartResponse True) -> return $ Just nodeboss
_ -> do logS "TSK" LoImportant $ "Failed attempt to start "++show clo++" on " ++show nid
return Nothing
basicAllocate clo promiseid locality queueing =
do count <- countLocations state
res1 <- tryAlloc clo promiseid locality queueing
case res1 of
Just _ -> return res1
Nothing -> -- TODO we should try all matching locations before moving on to Unrestricted
do res <- stubborn count $ tryAlloc clo promiseid LcUnrestricted queueing
case res of
Nothing -> do logS "TSK" LoCritical $ "Terminally failed to start "++show clo
return res
_ -> return res
statusMsg = roundtripResponse
(\x -> case x of
MmStatus ->
do thenodes <- liftIO $ readMVar $ msNodes state
let knownNodes = map (\(_,n,_) -> n) thenodes
proctree = msAllocation state
return (MmStatusResponse knownNodes proctree,state))
complainMsg = roundtripResponse
(\x -> case x of
MmComplain procid promid ->
case Map.lookup promid (msPromises state) of
Nothing -> return (MmComplainResponse nullPid,state) -- failure
Just (PromiseRecord curprocid curclo curlocality)
| curprocid /= procid -> return (MmComplainResponse curprocid,state)
| otherwise ->
do res <- basicAllocate curclo promid curlocality defaultQueueing
case res of
Nothing -> return (MmComplainResponse nullPid,state) -- failure
Just newprocid ->
let newpromises = Map.insert promid (PromiseRecord newprocid curclo curlocality) (msPromises state)
in return (MmComplainResponse newprocid,state {msPromises=newpromises}))
promiseMsg = roundtripResponse
(\x -> case x of
MmNewPromise clo locality queueing ->
do
let promiseid = msNextId state
res <- basicAllocate clo promiseid locality queueing
case res of
Just nodeboss ->
let newstate = state {msAllocation=newAllocation,msPromises=newPromises,msNextId=promiseid+1}
newAllocation = Map.insertWith' (\a b -> nub $ a++b) nodeboss [promiseid] (msAllocation state)
newPromises = Map.insert promiseid (PromiseRecord nodeboss clo locality) (msPromises state)
in return (MmNewPromiseResponse nodeboss promiseid,newstate)
Nothing ->
return (MmNewPromiseResponseFail,state))
simpleMsg = match
(\x -> case x of
TmNewPeer nid -> do logS "TSK" LoInformation $ "Found new peer " ++show nid
return state)
in receiveWait [simpleMsg, promiseMsg, complainMsg,statusMsg] >>= master -- TODO matchUnknownThrow
in do nodes <- liftIO $ newMVar []
selfnode <- getSelfNode
selfpid <- getSelfPid
let initState = MasterState {msNextId=0, msAllocation=Map.empty, msPromises=Map.empty, msNodes=nodes, msDefaultLocality = defaultLocality}
masterpid <- spawnDaemonic (master initState)
seennodes <- probeOnce nodes [] masterpid
let getByNid _ [] = Nothing
getByNid nid ((_,n,nodeboss):xs) = if nid==n then Just nodeboss else getByNid nid xs
res <- liftIO $ withMVar nodes (\n -> return $ getByNid selfnode n)
_ <- case res of
Nothing -> taskError "Can't find self: make sure cfgKnownHosts includes the master"
Just x -> spawnLocalAnd (masterproc x) (do myself <- getSelfPid
monitorProcess selfpid myself MaLinkError)
_ <- spawnDaemonic (prober nodes seennodes masterpid)
return masterpid
stubborn :: (Monad m) => Int -> m (Maybe a) -> m (Maybe a)
stubborn 0 a = a
stubborn n a | n>0
= do r <- a
case r of
Just _ -> return r
Nothing -> stubborn (n-1) a
-- TODO: setDefaultLocality :: Locality -> TaskM ()
-- | Like 'newPromise', but creates a promise whose
-- values is already known. In other words, it puts
-- a given, already-calculated value in a promise.
-- Conceptually (but not syntactically, due to closures),
-- you can consider it like this:
--
-- > toPromise a = newPromise (return a)
toPromise :: (Serializable a) => a -> TaskM (Promise a)
toPromise = toPromiseAt LcDefault
-- | A variant of 'toPromise' that lets the user
-- express a locality preference, i.e. some information
-- about which node will become the owner of the
-- new promise. These preferences will not necessarily
-- be respected.
toPromiseAt :: (Serializable a) => Locality -> a -> TaskM (Promise a)
toPromiseAt locality a = newPromiseAt locality (passthrough__closure a)
-- | Similar to 'toPromiseAt' and 'newPromiseNear'
toPromiseNear :: (Serializable a,Serializable b) => Promise b -> a -> TaskM (Promise a)
toPromiseNear (PromiseImmediate _) = toPromise
-- TODO should I consult tsRedeemerForwarding here?
toPromiseNear (PromiseBasic prhost _prid) = toPromiseAt (LcByNode [nodeFromPid prhost])
-- | Creates an /immediate promise/, which is to say, a promise
-- in name only. Unlike a regular promise (created by 'toPromise'),
-- this kind of promise contains the value directly. The
-- advantage is that promise redemption is very fast, requiring
-- no network communication. The downside is that it the
-- underlying data will be copied along with the promise.
-- Useful only for small data.
toPromiseImm :: (Serializable a) => a -> TaskM (Promise a)
toPromiseImm = return . PromiseImmediate
-- | Given a function (expressed here as a closure, see "Remote.Call")
-- that computes a value, returns a token identifying that value.
-- This token, a 'Promise' can be moved about even if the
-- value hasn't been computed yet. The computing function
-- will be started somewhere among the nodes visible to the
-- current master, preferring those nodes that correspond
-- to the 'defaultLocality'. Afterwards, attempts to
-- redeem the promise with 'readPromise' will contact the node
-- where the function is executing.
newPromise :: (Serializable a) => Closure (TaskM a) -> TaskM (Promise a)
newPromise = newPromiseAt LcDefault
-- | A variant of 'newPromise' that prefers to start
-- the computing function on the same node as the caller.
-- Useful if you plan to use the resulting value
-- locally.
newPromiseHere :: (Serializable a) => Closure (TaskM a) -> TaskM (Promise a)
newPromiseHere clo =
do mynode <- liftTask $ getSelfNode
newPromiseAt (LcByNode [mynode]) clo
-- | A variant of 'newPromise' that prefers to start
-- the computing function on the same node where some
-- other promise lives. The other promise is not
-- evaluated.
newPromiseNear :: (Serializable a, Serializable b) => Promise b -> Closure (TaskM a) -> TaskM (Promise a)
newPromiseNear (PromiseImmediate _) = newPromise
newPromiseNear (PromiseBasic prhost _prid) = newPromiseAt (LcByNode [nodeFromPid prhost])
-- | A variant of 'newPromise' that prefers to start
-- the computing functions on some set of nodes that
-- have a given role (assigned by the cfgRole configuration
-- option).
newPromiseAtRole :: (Serializable a) => String -> Closure (TaskM a) -> TaskM (Promise a)
newPromiseAtRole role clo = newPromiseAt (LcByRole [role]) clo
-- | A variant of 'newPromise' that lets the user
-- specify a 'Locality'. The other flavors of newPromise,
-- such as 'newPromiseAtRole', 'newPromiseNear', and
-- 'newPromiseHere' at just shorthand for a call to this function.
newPromiseAt :: (Serializable a) => Locality -> Closure (TaskM a) -> TaskM (Promise a)
newPromiseAt locality clo =
let realclo = makePayloadClosure clo
in case realclo of
Just plclo -> do master <- getMaster
res <- roundtrip master (MmNewPromise plclo locality defaultQueueing)
case res of
Right (MmNewPromiseResponse pid prid) -> return $ PromiseBasic pid prid
Right (MmNewPromiseResponseFail) ->
taskError $ "Spawning of closure "++show clo++" by newPromise failed"
Left tms -> taskError $ "Spawning of closure "++show clo++" by newPromise resulted in "++show tms
Nothing -> taskError $ "The specified closure, "++show clo++", can't produce payloads"
-- | Given a promise, gets the value that is being
-- calculated. If the calculation has finished,
-- the owning node will be contacted and the data
-- moved to the current node. If the calculation
-- has not finished, this function will block
-- until it has. If the calculation failed
-- by throwing an exception (e.g. divide by zero),
-- then this function will throw an excption as well
-- (a 'TaskException'). If the node owning the
-- promise is not accessible, the calculation
-- will be restarted.
readPromise :: (Serializable a) => Promise a -> TaskM a
readPromise (PromiseImmediate a) = return a
readPromise thepromise@(PromiseBasic prhost prid) =
do mp <- lookupCachedPromise prid
case mp of
Nothing -> do fprhost <- liftM (maybe prhost id) $ lookupForwardedRedeemer prid
res <- roundtrip fprhost (NmRedeem prid)
case res of
Left e -> do tlogS "TSK" LoInformation $ "Complaining about promise " ++ show prid ++" on " ++show fprhost++" because of "++show e
complain fprhost prid
Right NmRedeemResponseUnknown ->
do tlogS "TSK" LoInformation $ "Complaining about promise " ++ show prid ++" on " ++show fprhost++" because allegedly unknown"
complain fprhost prid
Right (NmRedeemResponse thedata) ->
do extracted <- extractFromPayload thedata
promiseinmem <- liftTaskIO $ makePromiseInMemory thedata (Just $ toDyn extracted)
putPromiseInCache prid promiseinmem
return extracted
Right NmRedeemResponseException ->
taskError "Failed promise redemption" -- don't redeem, this is a terminal failure
Just mv -> do val <- liftTaskIO $ readMVar mv -- possible long wait here
case val of -- TODO this read/write MVars should be combined!
PromiseInMemory v _utc thedyn ->
case thedyn of
Just thedynvalue ->
case fromDynamic thedynvalue of
Nothing -> do liftTask $ logS "TSK" LoStandard "Insufficiently dynamic promise cache"
extractFromPayload v
Just realval -> do updated <- liftTaskIO $ makePromiseInMemory v thedyn
putPromiseInCache prid updated
return realval
Nothing -> do extracted <- extractFromPayload v
updated <- liftTaskIO $ makePromiseInMemory v (Just $ toDyn extracted)
putPromiseInCache prid updated
return extracted
PromiseException _ -> taskError $ "Redemption of promise failed"
PromiseOnDisk fp -> do mpd <- liftTask $ undiskify fp mv
_ <- liftTask $ spawnLocal $ diskify fp mv False
case mpd of
Just dat -> extractFromPayload dat
_ -> taskError "Promise extraction from disk failed"
where extractFromPayload v = do out <- liftTaskIO $ serialDecode v
case out of
Just r -> return r
Nothing -> taskError "Unexpected payload type"
complain fprhost prid =
do master <- getMaster
response <- roundtrip master (MmComplain fprhost prid)
case response of
Left a -> taskError $ "Couldn't file complaint with master about " ++ show fprhost ++ " because " ++ show a
Right (MmComplainResponse newhost)
| newhost == nullPid -> taskError $ "Couldn't file complaint with master about " ++ show fprhost
| otherwise -> do setForwardedRedeemer prid newhost
readPromise thepromise
data TaskState = TaskState
{
tsMaster :: ProcessId,
tsNodeBoss :: Maybe ProcessId,
tsPromiseCache :: MVar (Map.Map PromiseId (MVar PromiseStorage)),
tsRedeemerForwarding :: MVar (Map.Map PromiseId ProcessId),
tsMonitoring :: Map.Map ProcessId ()
}
data TaskM a = TaskM { runTaskM :: TaskState -> ProcessM (TaskState, a) } deriving (Typeable)
instance Monad TaskM where
m >>= k = TaskM $ \ts -> do
(ts',a) <- runTaskM m ts
(ts'',a') <- runTaskM (k a) (ts')
return (ts'',a')
return x = TaskM $ \ts -> return $ (ts,x)
instance Functor TaskM where
f `fmap` m =
TaskM $ \ts ->
runTaskM m ts >>= \(ts', x) ->
return (ts', f x)
instance Applicative TaskM where
mf <*> mx =
TaskM $ \ts ->
runTaskM mf ts >>= \(ts', f) ->
runTaskM mx ts' >>= \(ts'', x) ->
return (ts'', f x)
pure = return
lookupForwardedRedeemer :: PromiseId -> TaskM (Maybe ProcessId)
lookupForwardedRedeemer q =
TaskM $ \ts ->
liftIO $ withMVar (tsRedeemerForwarding ts) $ (\fwd ->
let lo = Map.lookup q fwd
in return (ts,lo))
setForwardedRedeemer :: PromiseId -> ProcessId -> TaskM ()
setForwardedRedeemer from to =
TaskM $ \ts -> liftIO $ modifyMVar (tsRedeemerForwarding ts) (\fwd ->
let newmap = Map.insert from to fwd
in return ( newmap,(ts,()) ) )
lookupCachedPromise :: PromiseId -> TaskM (Maybe (MVar PromiseStorage))
lookupCachedPromise prid = TaskM $ \ts ->
do mv <- liftIO $ withMVar (tsPromiseCache ts)
(\pc -> return $ Map.lookup prid pc)
return (ts,mv)
putPromiseInCache :: PromiseId -> PromiseStorage -> TaskM ()
putPromiseInCache prid ps = TaskM $ \ts ->
do liftIO $ modifyMVar_ (tsPromiseCache ts)
(\pc -> do mv <- newMVar ps
return $ Map.insert prid mv pc)
return (ts,())
getMaster :: TaskM ProcessId
getMaster = TaskM $ \ts -> return (ts,tsMaster ts)
liftTask :: ProcessM a -> TaskM a
liftTask a = TaskM $ \ts -> a >>= (\x -> return (ts,x))
liftTaskIO :: IO a -> TaskM a
liftTaskIO = liftTask . liftIO
-- | A Task-monadic version of 'Remote.Process.say'.
-- Puts text messages in the log.
tsay :: String -> TaskM ()
tsay a = liftTask $ say a
-- | Writes various kinds of messages to the
-- "Remote.Process" log.
tlogS :: LogSphere -> LogLevel -> String -> TaskM ()
tlogS a b c = liftTask $ logS a b c
----------------------------------------------
-- * MapReduce
----------------------------------------------
-- | A data structure that stores the important
-- user-provided functions that are the namesakes
-- of the MapReduce algorithm.
-- The number of mapper processes can be controlled
-- by the user by controlling the length of the string
-- returned by mtChunkify. The number of reducer
-- promises is controlled by the number of values
-- values returned by shuffler.
-- The user must provide their own mapper and reducer.
-- For many cases, the default chunkifier ('chunkify')
-- and shuffler ('shuffle') are adequate.
data MapReduce rawinput input middle1 middle2 result
= MapReduce
{
mtMapper :: input -> Closure (TaskM [middle1]),
mtReducer :: middle2 -> Closure (TaskM result),
mtChunkify :: rawinput -> [input],
mtShuffle :: [middle1] -> [middle2]
}
-- | A convenient way to provide the 'mtShuffle' function
-- as part of 'mapReduce'.
shuffle :: Ord a => [(a,b)] -> [(a,[b])]
shuffle q =
let semi = groupBy (\(a,_) (b,_) -> a==b) (sortBy (\(a,_) (b,_) -> compare a b) q)
in map (\x -> (fst $ head x,map snd x)) semi
-- | A convenient way to provide the 'mtChunkify' function
-- as part of 'mapReduce'.
chunkify :: Int -> [a] -> [[a]]
chunkify numChunks l
| numChunks <= 0 = taskError "Can't chunkify into less than one chunk"
| otherwise = splitSize (ceiling ((fromIntegral (length l) / fromIntegral numChunks) :: Double)) l
where
splitSize _ [] = []
splitSize i v = let (first,second) = splitAt i v
in first : splitSize i second
-- | The MapReduce algorithm, implemented in a very
-- simple form on top of the Task layer. Its
-- use depends on four user-determined data types:
--
-- * input -- The data type provided as the input to the algorithm as a whole and given to the mapper.
--
-- * middle1 -- The output of the mapper. This may include some /key/ which is used by the shuffler to allocate data to reducers.
-- If you use the default shuffler, 'shuffle', this type must have the form @Ord a => (a,b)@.
--
-- * middle2 -- The output of the shuffler. The default shuffler emits a type in the form @Ord => (a,[b])@. Each middle2 output
-- by shuffler is given to a separate reducer.
--
-- * result -- The output of the reducer, upon being given a bunch of middles.
mapReduce :: (Serializable i,Serializable k,Serializable m,Serializable r) =>
MapReduce ri i k m r -> ri -> TaskM [r]
mapReduce mr inputs =
let chunks = (mtChunkify mr) inputs
in do
pmapResult <- mapM (\chunk ->
newPromise ((mtMapper mr) chunk) ) chunks
mapResult <- mapM readPromise pmapResult
let shuffled = (mtShuffle mr) (concat mapResult)
pres <- mapM (\mid2 ->
newPromise ((mtReducer mr) mid2)) shuffled
mapM readPromise pres
| jepst/CloudHaskell | Remote/Task.hs | bsd-3-clause | 47,455 | 652 | 38 | 16,405 | 9,212 | 5,066 | 4,146 | 699 | 12 |
{-# LANGUAGE RankNTypes #-}
{- |
Module : $Header$
License : GPLv2 or higher, see LICENSE.txt
Maintainer : nevrenato@gmail.com
Stability : provisional
Portability : portable
Description :
Parser for an hybridized arbitrary logic
-}
module TopHybrid.Parse_AS where
import Common.AnnoState
import Common.AS_Annotation
import Common.GlobalAnnotations (PrefixMap)
import Common.Token
import Data.Maybe
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec
import Logic.Logic
import TopHybrid.AS_TopHybrid
import Control.Monad (liftM)
-- the top parser; parses an entire specification
thBasic :: (String -> AnyLogic) -> AParser st Spc_Wrap
thBasic getLogic =
do
asKey "baselogic"
logicName <- simpleId
thSpec $ getLogic $ show logicName
basicSpec :: (Syntax lid basic_spec s si sim) =>
lid -> Maybe (PrefixMap -> AParser st basic_spec)
basicSpec l = maybe (parse_basic_spec l) (Just . fst)
(parserAndPrinter Nothing l)
{- Parses the specification after knowing
the underlying logic -}
thSpec :: AnyLogic -> AParser st Spc_Wrap
thSpec (Logic l) =
do
asKey "Basic_Spec"
asKey "{"
s <- callParser l basicSpec Map.empty
asKey "}"
i <- many itemParser
fs <- sepBy (annoFormParser l s) anSemiOrComma
return $ Spc_Wrap l (Bspec i s) fs
{- Calls the underlying logic parser, only if exists. Otherwise
will throw out an error -}
callParser :: (Show a) => a -> (a -> Maybe x) -> x
callParser l f =
fromMaybe (error $ "Failed! No parser for logic " ++ show l) $ f l
-- Parses the declaration of nominals and modalities
itemParser :: AParser st TH_BASIC_ITEM
itemParser =
do
asKey "modalities"
ms <- ids
return $ Simple_mod_decl ms
<|>
do
asKey "nominals"
ns <- ids
return $ Simple_nom_decl ns
where ids = sepBy simpleId anSemiOrComma
-- Formula parser with annotations
annoFormParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (Annoted Frm_Wrap)
annoFormParser l b = allAnnoParser $ formParser l b
-- Just parses the formula, and wraps it in Frm_Wrap
formParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st Frm_Wrap
formParser l bs = liftM (Frm_Wrap l) $ topParser l bs
-- Parser of hybridization of hybridization of sentences
formParser' :: Spc_Wrap -> AParser st Frm_Wrap
formParser' (Spc_Wrap l b _) = liftM (Frm_Wrap l) $ topParser l (und b)
{- Parser of sentences
The precendence order is left associative and when the priority
is defined is as follows : () > (not,@,[],<>) > /\ > \/ > (->,<->) -}
topParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (TH_FORMULA f)
topParser l bs = chainl1 fp1 impAndBiP
where fp1 = chainl1 fp2 disjP
fp2 = chainl1 (fParser l bs) conjP
{- BinaryOps parsers, the reason to separate them, is that so we can get a
precedence order -}
conjP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
conjP = asKey "/\\" >> return Conjunction
disjP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
disjP = asKey "\\/" >> return Disjunction
impAndBiP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
impAndBiP = (asKey "=>" >> return Implication) <|>
(asKey "<=>" >> return BiImplication)
-- ------------
-- Parser of sentences without the binary operators
fParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (TH_FORMULA f)
fParser l bs =
do
asKey "("
f <- topParser l bs
asKey ")"
return $ Par f
<|>
do
asKey "not"
f <- fParser l bs <|> topParser l bs
return $ Neg f
<|>
do
asKey "@"
n <- simpleId
f <- fParser l bs <|> topParser l bs
return $ At n f
<|>
do
asKey "!"
n <- simpleId
f <- fParser l bs
return $ Uni n f
<|>
do
asKey "?"
n <- simpleId
f <- fParser l bs
return $ Exist n f
<|>
do
asKey "["
m <- simpleId
asKey "]"
f <- fParser l bs <|> topParser l bs
return $ Box m f
<|>
try (do
asKey "<"
m <- simpleId
asKey ">\""
f <- fParser l bs <|> topParser l bs
return $ Par $ Conjunction (Dia m f) (Box m f))
<|>
do
asKey "<"
m <- simpleId
asKey ">"
f <- fParser l bs <|> topParser l bs
return $ Dia m f
<|>
do
asKey "true"
return TrueA
<|>
do
asKey "false"
return FalseA
<|>
do
n <- simpleId
return $ Here n
<|>
do
asKey "{"
f <- callParser l parse_basic_sen bs
asKey "}"
return $ UnderLogic f
| keithodulaigh/Hets | TopHybrid/Parse_AS.hs | gpl-2.0 | 5,018 | 0 | 20 | 1,631 | 1,486 | 697 | 789 | 137 | 1 |
module Main where
import Graphics.Blank
import Data.Map (Map)
import qualified Data.Map as Map
import Debug.Trace
main = blankCanvas 3000 $ \ context -> loop context Map.empty X
data XO = X | O
deriving (Eq,Ord,Show)
swap :: XO -> XO
swap X = O
swap O = X
loop :: Context -> Map (Int,Int) XO -> XO -> IO ()
loop context board turn = do
-- print board
-- print turn
(width,height,sz) <- send context $ do
(width,height) <- size
clearRect (0,0,width,height)
beginPath()
let sz = min width height
save()
translate (width / 2,height / 2)
sequence_ [ do bigLine (-sz * 0.45,n) (sz * 0.45,n)
bigLine (n,-sz * 0.45) (n,sz * 0.45)
| n <- [-sz * 0.15,sz * 0.15]
]
sequence_ [ do save()
translate (fromIntegral x * sz * 0.3,fromIntegral y * sz * 0.3)
case Map.lookup (x,y) board of
Just X -> drawX (sz * 0.1)
Just O -> drawO (sz * 0.1)
Nothing -> return ()
restore()
| x <- [-1,0,1]
, y <- [-1,0,1]
]
restore()
return (width,height,sz)
let pointToSq :: (Float,Float) -> Maybe (Int,Int)
pointToSq (x,y) = do
x' <- fd ((x - width / 2) / sz)
y' <- fd ((y - height / 2) / sz)
return (x',y')
fd x =
-- trace (show ("fx",x,r)) $
if r `elem` [-1..1] then Just (signum r) else Nothing
where r = round (x * 3.3333)
event <- send context $ readEvent MouseDown
-- print event
case jsMouse event of
-- if no mouse location, ignore, and redraw
Nothing -> loop context board turn
Just (x',y') -> case pointToSq (fromIntegral x',fromIntegral y') of
Nothing -> loop context board turn
Just pos -> case Map.lookup pos board of
Nothing -> loop context
(Map.insert pos turn board)
(swap turn)
-- already something here
Just _ -> loop context board turn
xColor = "#ff0000"
oColor = "#00a000"
boardColor = "#000080"
drawX :: Float -> Canvas ()
drawX size = do
strokeStyle xColor
lineCap "butt"
beginPath()
moveTo(-size,-size)
lineTo(size,size)
lineWidth 10
stroke()
beginPath()
moveTo(-size,size)
lineTo(size,-size)
lineWidth 10
stroke()
drawO :: Float -> Canvas ()
drawO radius = do
beginPath()
arc(0, 0, radius, 0, 2 * pi, False)
lineWidth 10
strokeStyle oColor
stroke()
bigLine :: (Float,Float) -> (Float,Float) -> Canvas ()
bigLine (x,y) (x',y') = do
beginPath()
moveTo(x,y)
lineTo(x',y')
lineWidth 20
strokeStyle boardColor
lineCap "round"
stroke()
| beni55/blank-canvas | examples/tictactoe/Main.hs | bsd-3-clause | 3,497 | 19 | 20 | 1,661 | 1,204 | 598 | 606 | 85 | 7 |
module HaddockDocs (
Section(..),
formatSections,
Para(..),
formatParas,
formatParasFragment,
Span(..),
formatSpans,
haddockSection,
) where
import Utils
import Data.List as List (lines, words, intersperse)
import Data.Char (isSpace)
data Section = Section Int String [Para]
data Para =
ParaText [Span] -- an ordinary word-wrapped paragraph
| ParaCode [[Span]] -- a @...@ code block
| ParaVerbatm [String] -- a > bird-track verbatum code block
| ParaTitle String -- a title to a subsection eg an example
| ParaDefItem [Span] [Span] -- a definition list item
| ParaListItem [Span] -- a itemisted list item
data Span = SpanText String -- just simple text
| SpanIdent String -- hyperlinked Identifiers
| SpanModule String -- hyperlinked module
| SpanEmphasis String -- emphasised text /.../
| SpanMonospace [Span] -- monospace text @...@
| SpanSpace Int -- Just a bunch of spaces
-- | SpanURL
-- | SpanImage
deriving Show
formatSections :: [Section] -> Doc
formatSections =
vsep
. map (\(Section level title paras) ->
if null paras
then empty
else comment <+> text (replicate level '*') <+> text title
$$ comment
$$ formatParas 77 paras)
formatParasFragment :: [Para] -> Doc
formatParasFragment =
haddockSection empty
. concat
. intersperse [empty]
. map (formatPara 77)
formatParas :: Int -> [Para] -> Doc
formatParas width =
haddockSection (char '|')
. concat
. intersperse [empty]
. map (formatPara width)
formatPara :: Int -> Para -> [Doc]
formatPara width para = case para of
ParaText spans -> formatSpans 3 width spans
ParaCode lines -> [empty, char '@']
++ map (hcat . map (text . formatSpan)) lines
++ [char '@']
ParaVerbatm code -> map (\line -> char '>' <+> text line) code
ParaTitle title -> [char '*' <+> text title]
ParaDefItem term def -> let term' = escape (concatMap formatSpan term)
indent = length term' + 6
in prependToFirstLine (brackets (text term'))
(formatSpans indent width def)
ParaListItem spans -> prependToFirstLine (char '*')
(formatSpans 5 width spans)
where escape [] = []
escape (']':cs) = '\\': ']' : escape cs --we must escape ] in def terms
escape (c:cs) = c : escape cs
formatSpans :: Int -> Int -> [Span] -> [Doc]
formatSpans initialIndent width =
map (hsep . map text)
. wrapText initialIndent width
. words
. concatMap formatSpan
. coalesceTextSpans
formatSpan :: Span -> String
formatSpan (SpanText text) = escapeHaddockSpecialChars text
formatSpan (SpanIdent text) = "'" ++ text ++ "'"
formatSpan (SpanModule text) = "\"" ++ text ++ "\""
formatSpan (SpanEmphasis text) = "/" ++ escapeHaddockSpecialChars text ++ "/"
formatSpan (SpanMonospace spans) = "@" ++ concatMap formatSpan spans ++ "@"
formatSpan (SpanSpace len) = replicate len ' '
coalesceTextSpans :: [Span] -> [Span]
coalesceTextSpans = coalesce []
where coalesce texts (SpanText text : spans) = coalesce (text:texts) spans
coalesce texts (SpanSpace len : spans) = coalesce (text':texts) spans
where text' = replicate len ' '
coalesce [] (span : spans) = span : coalesce [] spans
coalesce texts (span : spans) = SpanText (concat (reverse texts))
: span : coalesce [] spans
coalesce [] [] = []
coalesce texts [] = [SpanText (concat (reverse texts))]
escapeHaddockSpecialChars :: String -> String
escapeHaddockSpecialChars = escape
where escape [] = []
escape ('\'':'s':s:cs) | isSpace s = '\'' : 's' : ' ' : escape cs --often don't need to escape
escape ('\'':'t':s:cs) | isSpace s = '\'' : 't' : ' ' : escape cs --eg it's & don't
escape ('\'':'l':'l':s:cs) | isSpace s = '\'' : 'l' : 'l' : s : escape cs --and you'll
escape (c:cs) | c == '/' || c == '`'
|| c == '"' || c == '@'
|| c == '<' || c == '\''
= '\\': c : escape cs
escape ('\226':'\128':'\148':cs) = '-' : '-' : escape cs -- UTF8 for EM dash
escape (c:cs) = c : escape cs
-------------------------------------------------------------------------------
-- Extra pretty printing bits
-------------------------------------------------------------------------------
prependToFirstLine :: Doc -> [Doc] -> [Doc]
prependToFirstLine start [] = []
prependToFirstLine start (line:lines) = (start <+> line) : lines
haddockSection :: Doc -> [Doc] -> Doc
haddockSection start = commentBlock . prependToFirstLine start
| thiagoarrais/gtk2hs | tools/apiGen/src/HaddockDocs.hs | lgpl-2.1 | 4,930 | 0 | 20 | 1,435 | 1,536 | 798 | 738 | 106 | 8 |
{-# LANGUAGE MagicHash #-}
import GHC.Exts
import GHC.Prim
main :: IO ()
main = do
print (I# (dataToTag# a)) -- used to print 0, should print 1
print (I# (dataToTag# f)) -- used to print 1 correctly
where
{-# NOINLINE f #-}
f = T2
{-# NOINLINE a #-}
a = f
data T = T1 | T2
| sdiehl/ghc | testsuite/tests/codeGen/should_run/T15696_2.hs | bsd-3-clause | 305 | 3 | 11 | 90 | 100 | 49 | 51 | 12 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Web.Tombstone.Schema
( -- * User
usersTable
, runUsersQuery
, UserId'(..)
, User
, UserColumn
-- * Bounties
, bountiesTable
, BountyId'(..)
, Bounty
, BountyColumn
-- * Utilities
, printSql
) where
-------------------------------------------------------------------------------
import Data.Profunctor.Product.Default
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Database.PostgreSQL.Simple (Connection)
import GHC.Int
import Opaleye
-------------------------------------------------------------------------------
import Web.Tombstone.Types
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- User
-------------------------------------------------------------------------------
data User' a b c d e f = User {
userId :: a
, userName :: b
, userAvatarUrl :: c
, userEmail :: d
, userGithubLogin :: e
, userHireable :: f
} deriving (Show)
-- can't use newtype for now :(
data UserId' a = UserId a deriving (Show, Eq, Ord)
type UserIdColumn = UserId' (Column PGInt8)
type UserColumn = User'
UserIdColumn
(Column PGText)
(Column PGText)
(Column (Nullable PGText))
(Column PGText)
(Column PGBool)
type UserId = UserId' Int64
type User = User'
UserId
FullName
URL
(Maybe Email)
GithubLogin
Bool
$(makeAdaptorAndInstance "pUser" ''User')
$(makeAdaptorAndInstance "pUserId" ''UserId')
usersTable :: Table UserColumn UserColumn
usersTable = Table "users" (pUser User { userId = pUserId (UserId (required "id"))
, userName = required "name"
, userAvatarUrl = required "avatar_url"
, userEmail = required "email"
, userGithubLogin = required "github_login"
, userHireable = required "hireable"
})
--TODO: drop, just for testing
runUsersQuery
:: Connection
-> Query UserColumn
-> IO [User]
runUsersQuery = runQuery
-------------------------------------------------------------------------------
-- Bounties
-------------------------------------------------------------------------------
data Currency = USDCents
deriving (Show, Eq, Ord)
-------------------------------------------------------------------------------
data CompensationSchedule = Hourly
| Salary
deriving (Show, Eq)
-------------------------------------------------------------------------------
data CompensationRequirements' a b c = CompensationRequirements {
crSchedule :: a
, crCurrency :: b
, crMagnitude :: c
} deriving (Show, Eq, Ord)
type CompensationRequirements =
CompensationRequirements' CompensationSchedule Currency Int
type CompensationRequirementsColumn =
CompensationRequirements' (Column PGText)
(Column PGText)
(Column PGInt8)
$(makeAdaptorAndInstance "pCompensationRequirements" ''CompensationRequirements')
data Bounty' a b c d e = Bounty {
bountyId :: a
, bountyDescription :: b
, bountyClaimed :: c
, bountyCompensation :: d
, bountyUserId :: e
} deriving (Show)
-- can't use newtype for now :(
data BountyId' a = BountyId a deriving (Show, Eq, Ord)
type BountyIdColumn = BountyId' (Column PGInt8)
type BountyColumn = Bounty'
BountyIdColumn
(Column PGText)
(Column PGBool)
CompensationRequirementsColumn
UserIdColumn
type BountyId = BountyId' Int64
type Bounty = Bounty'
BountyId
BountyDescription
Bool
CompensationRequirements
UserId
$(makeAdaptorAndInstance "pBounty" ''Bounty')
$(makeAdaptorAndInstance "pBountyId" ''BountyId')
-------------------------------------------------------------------------------
bountiesTable :: Table BountyColumn BountyColumn
bountiesTable = Table "bounties" (pBounty Bounty { bountyId = pBountyId (BountyId (required "id"))
, bountyDescription = required "description"
, bountyClaimed = required "claimed"
, bountyCompensation = ccols
, bountyUserId = pUserId (UserId (required "user_id"))
})
where
ccols = pCompensationRequirements CompensationRequirements { crSchedule = required "compensation_schedule"
, crCurrency = required "compensation_currency"
, crMagnitude = required "compensation_magnitude"}
-------------------------------------------------------------------------------
-- Utilities
-------------------------------------------------------------------------------
printSql :: Default Unpackspec a a => Query a -> IO ()
printSql = putStrLn . showSqlForPostgres
| ShadowBan/tombstone | src/Web/Tombstone/Schema.hs | mit | 5,775 | 0 | 14 | 1,876 | 942 | 536 | 406 | 113 | 1 |
{- |
Raw UDP Server used by Ames driver.
1. Opens a UDP socket and makes sure that it stays open.
- If can't open the port, wait and try again repeatedly.
- If there is an error reading to or writing from the open socket,
close it and open another, making sure, however, to reuse the
same port
NOTE: It's not clear what, if anything, closing and reopening
the socket does. We're keeping this behavior out of conservatism
until we understand it better.
2. Receives packets from the socket.
- When packets come in from the socket, they go into a bounded queue.
- If the queue is full, the packet is dropped.
- If the socket is closed, wait and try again repeatedly.
- `usRecv` gets the first packet from the queue.
3. Sends packets to the socket.
- Packets sent to `usSend` enter a bounded queue.
- If that queue is full, the packet is dropped.
- Packets are taken off the queue one at a time.
- If the socket is closed (or broken), the packet is dropped.
4. Runs until `usKill` is run, then all threads are killed and the
socket is closed.
-}
module Urbit.Vere.Ames.UDP
( UdpServ(..)
, fakeUdpServ
, realUdpServ
)
where
import Urbit.Prelude
import Urbit.Vere.Ports
import Network.Socket
import Control.Monad.STM (retry)
import Network.Socket.ByteString (recvFrom, sendTo)
import Urbit.Vere.Stat (AmesStat(..), bump)
-- Types -----------------------------------------------------------------------
data UdpServ = UdpServ
{ usSend :: SockAddr -> ByteString -> IO ()
, usRecv :: STM (PortNumber, HostAddress, ByteString)
, usKill :: IO ()
}
-- Utils -----------------------------------------------------------------------
{- |
Writes to queue and returns `True` unless the queue is full, then do
nothing and return `False`.
-}
tryWriteTBQueue :: TBQueue x -> x -> STM Bool
tryWriteTBQueue q x = do
isFullTBQueue q >>= \case
True -> pure False
False -> writeTBQueue q x $> True
{- |
Open a UDP socket and bind it to a port
-}
doBind :: PortNumber -> HostAddress -> IO (Either IOError Socket)
doBind por hos = tryIOError $ do
sok <- io $ socket AF_INET Datagram defaultProtocol
() <- io $ bind sok (SockAddrInet por hos)
pure sok
{- |
Open a UDP socket and bind it to a port.
If this fails, wait 250ms and repeat forever.
-}
forceBind :: HasLogFunc e => PortNumber -> HostAddress -> RIO e Socket
forceBind por hos = go
where
go = do
logInfo (display ("AMES: UDP: Opening socket on port " <> tshow por))
io (doBind por hos) >>= \case
Right sk -> do
logInfo (display ("AMES: UDP: Opened socket on port " <> tshow por))
pure sk
Left err -> do
logInfo (display ("AMES: UDP: " <> tshow err))
logInfo ("AMES: UDP: Failed to open UDP socket. Waiting")
threadDelay 250_000
go
{- |
Attempt to send a packet to a socket.
If it fails, return `False`. Otherwise, return `True`.
-}
sendPacket :: HasLogFunc e => ByteString -> SockAddr -> Socket -> RIO e Bool
sendPacket fullBytes adr sok = do
logDebug $ displayShow ("AMES", "UDP", "Sending packet.")
res <- io $ tryIOError $ go fullBytes
case res of
Left err -> do
logError $ displayShow ("AMES", "UDP", "Failed to send packet", err)
pure False
Right () -> do
logDebug $ displayShow ("AMES", "UDP", "Packet sent.")
pure True
where
go byt = do
sent <- sendTo sok byt adr
when (sent /= length byt) $ do
go (drop sent byt)
{- |
Attempt to receive a packet from a socket.
- If an exception is throw, return `Left exn`.
- If it wasn't an IPv4 packet, return `Right Nothing`.
- Otherwise, return `Right (Just packet)`.
-}
recvPacket
:: HasLogFunc e
=> Socket
-> RIO e (Either IOError (Maybe (ByteString, PortNumber, HostAddress)))
recvPacket sok = do
io (tryIOError $ recvFrom sok 4096) <&> \case
Left exn -> Left exn
Right (b, SockAddrInet p a) -> Right (Just (b, p, a))
Right (_, _ ) -> Right Nothing
-- Fake Server for No-Networking Mode ------------------------------------------
{- |
Fake UDP API for no-networking configurations.
-}
fakeUdpServ :: HasLogFunc e => RIO e UdpServ
fakeUdpServ = do
logInfo $ displayShow ("AMES", "UDP", "\"Starting\" fake UDP server.")
pure UdpServ { .. }
where
usSend = \_ _ -> pure ()
usRecv = retry
usKill = pure ()
-- Real Server -----------------------------------------------------------------
{- |
Real UDP server. See module-level docs.
-}
realUdpServ
:: forall e
. (HasLogFunc e, HasPortControlApi e)
=> PortNumber
-> HostAddress
-> AmesStat
-> RIO e UdpServ
realUdpServ startPort hos sat = do
logInfo $ displayShow ("AMES", "UDP", "Starting real UDP server.")
env <- ask
vSock <- newTVarIO Nothing
vFail <- newEmptyTMVarIO
qSend <- newTBQueueIO 100 -- TODO Tuning
qRecv <- newTBQueueIO 100 -- TODO Tuning
{-
If reading or writing to a socket fails, unbind it and tell the
socket-open thread to close it and open another.
This is careful about edge-cases. In any of these cases, do nothing.
- If vSock isn't set to the socket we used, do nothing.
- If vFail is already set (another thread signaled failure already).
-}
let signalBrokenSocket :: Socket -> RIO e ()
signalBrokenSocket sock = do
logInfo $ displayShow ("AMES", "UDP"
, "Socket broken. Requesting new socket"
)
atomically $ do
mSock <- readTVar vSock
mFail <- tryReadTMVar vFail
when (mSock == Just sock && mFail == Nothing) $ do
putTMVar vFail sock
writeTVar vSock Nothing
enqueueRecvPacket :: PortNumber -> HostAddress -> ByteString -> RIO e ()
enqueueRecvPacket p a b = do
did <- atomically (tryWriteTBQueue qRecv (p, a, b))
when (did == False) $ do
bump (asUqf sat)
logWarn $ displayShow $ ("AMES", "UDP",)
"Dropping inbound packet because queue is full."
enqueueSendPacket :: SockAddr -> ByteString -> RIO e ()
enqueueSendPacket a b = do
did <- atomically (tryWriteTBQueue qSend (a, b))
when (did == False) $ do
logWarn "AMES: UDP: Dropping outbound packet because queue is full."
let opener por = do
logInfo $ displayShow $ ("AMES", "UDP", "Trying to open socket, port",)
por
sk <- forceBind por hos
sn <- io $ getSocketName sk
sp <- io $ socketPort sk
logInfo $ displayShow $ ("AMES", "UDP", "Got socket", sn, sp)
let waitForRelease = do
atomically (writeTVar vSock (Just sk))
broken <- atomically (takeTMVar vFail)
logWarn "AMES: UDP: Closing broken socket."
io (close broken)
case sn of
(SockAddrInet boundPort _) ->
-- When we're on IPv4, maybe port forward at the NAT.
rwith (requestPortAccess $ fromIntegral boundPort) $
\() -> waitForRelease
_ -> waitForRelease
opener sp
tOpen <- async $ opener startPort
tSend <- async $ forever $ join $ atomically $ do
(adr, byt) <- readTBQueue qSend
readTVar vSock <&> \case
Nothing -> pure ()
Just sk -> do
okay <- sendPacket byt adr sk
unless okay (signalBrokenSocket sk)
tRecv <- async $ forever $ do
atomically (readTVar vSock) >>= \case
Nothing -> threadDelay 100_000
Just sk -> do
recvPacket sk >>= \case
Left exn -> do
bump (asUdf sat)
logError "AMES: UDP: Failed to receive packet"
signalBrokenSocket sk
Right Nothing -> do
bump (asUi6 sat)
logError "AMES: UDP: Dropping non-ipv4 packet"
pure ()
Right (Just (b, p, a)) -> do
logDebug "AMES: UDP: Received packet."
bump (asUdp sat)
enqueueRecvPacket p a b
let shutdown = do
logInfo "AMES: UDP: Shutting down. (killing threads)"
cancel tOpen
cancel tSend
cancel tRecv
logInfo "AMES: UDP: Shutting down. (closing socket)"
io $ join $ atomically $ do
res <- readTVar vSock <&> maybe (pure ()) close
writeTVar vSock Nothing
pure res
pure $ UdpServ { usSend = \a b -> runRIO env (enqueueSendPacket a b)
, usRecv = readTBQueue qRecv
, usKill = runRIO env shutdown
}
| urbit/urbit | pkg/hs/urbit-king/lib/Urbit/Vere/Ames/UDP.hs | mit | 8,618 | 0 | 25 | 2,478 | 2,018 | 971 | 1,047 | -1 | -1 |
module Hopsu.Db where
import Hopsu.Types
-- Nothing here, rewrite
logUser :: User -> String
logUser u = "Log'd"
isOp :: User -> Bool
isOp u = False
getUrl :: String -> String
getUrl u = "https://www.github.com/anobi/hopsu"
addUrl :: String -> String
addUrl u = "No."
| anobi/hopsu | src/Hopsu/Db.hs | mit | 273 | 0 | 5 | 51 | 80 | 44 | 36 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Bitcoin.Api.Types.UnspentTxOut where
-- import Control.Applicative ((<$>), (<*>))
-- import Control.Lens.TH (makeLenses)
import Control.Monad (mzero)
import qualified Data.Base58String as B58S
import Data.Word (Word32, Word64)
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Bitcoin.Types as BT
data UnspentTxOut = UnspentTxOut {
confs :: Integer
,amount :: BT.Btc
,addresses :: [B58S.Base58String]
} deriving (Eq, Show)
instance FromJSON UnspentTxOut where
parseJSON (Object o) =
UnspentTxOut
<$> o .: "confirmations"
<*> o .: "value"
<*> ( (o .: "scriptPubKey") >>= (.: "addresses") )
parseJSON _ = mzero
| solatis/haskell-bitcoin-api | src/Network/Bitcoin/Api/Types/UnspentTxOut.hs | mit | 820 | 0 | 10 | 232 | 176 | 107 | 69 | 20 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
import Control.Applicative
data Free f a
= Pure a
| Free (f (Free f a))
instance Functor f => Monad (Free f) where
return a = Pure a
Pure a >>= f = f a
Free f >>= g = Free (fmap (>>= g) f)
class Monad m => MonadFree f m where
wrap :: f (m a) -> m a
liftF :: (Functor f, MonadFree f m) => f a -> m a
liftF = wrap . fmap return
iter :: Functor f => (f a -> a) -> Free f a -> a
iter _ (Pure a) = a
iter phi (Free m) = phi (iter phi <$> m)
retract :: Monad f => Free f a -> f a
retract (Pure a) = return a
retract (Free as) = as >>= retract
| riwsky/wiwinwlh | src/free_impl.hs | mit | 600 | 0 | 10 | 163 | 337 | 164 | 173 | 19 | 1 |
module Kantour.UtilsSpec where
import Test.Hspec
import Kantour.Utils
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Control.Monad
{-# ANN module "HLint: ignore Redundant do" #-}
spec :: Spec
spec = do
describe "alterAL" $ do
let ex1 = [(1 :: Int, 10 :: Int)]
ex2 = (2,20):(3,30):ex1
specify "insertion behaviors" $ do
insertAL 1 10 [] `shouldBe` ex1
insertAL 1 10 [(1,20)] `shouldBe` ex1
insertAL 5 50 ex2 `shouldBe` ex2 ++ [(5,50)]
insertAL 3 0 ex2 `shouldBe` [(2,20),(3,0),(1,10)]
specify "deletion behaviors" $ do
deleteAL 1 [] `shouldBe` ([] `asTypeOf` ex1)
deleteAL 1 ex1 `shouldBe` []
deleteAL 3 ex2 `shouldBe` [(2,20),(1,10)]
specify "toggle one element" $ do
let toggle Nothing = Just 1
toggle (Just _) = Nothing
alterAL toggle 6 ex1 `shouldBe` [(1,10),(6,1)]
alterAL toggle 3 ex2 `shouldBe` [(2,20),(1,10)]
describe "stripR" $ do
let stripR' = reverse . stripL . reverse
prop "general correctness" $
\xs -> stripR xs === stripR' xs
prop "dense whitespace" $ do
let charGen = elements (['a'..'z'] ++ ['A'..'Z'])
spaceGen = elements " \t\v\n\f\r"
cGen = do
which <- choose (0,9)
if which < (3 :: Int)
then charGen -- 30% chance of generating a-z A-Z
else spaceGen -- 70% chance of generating whitespaces
l <- choose (0,20)
xs <- replicateM l cGen
pure $ counterexample xs (stripR xs === stripR' xs)
| Javran/tuppence | test/Kantour/UtilsSpec.hs | mit | 1,702 | 0 | 20 | 596 | 615 | 327 | 288 | 41 | 3 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE CPP #-}
module Hpack.Render (
-- | /__NOTE:__/ This module is exposed to allow integration of Hpack into
-- other tools. It is not meant for general use by end users. The following
-- caveats apply:
--
-- * The API is undocumented, consult the source instead.
--
-- * The exposed types and functions primarily serve Hpack's own needs, not
-- that of a public API. Breaking changes can happen as Hpack evolves.
--
-- As an Hpack user you either want to use the @hpack@ executable or a build
-- tool that supports Hpack (e.g. @stack@ or @cabal2nix@).
renderPackage
, renderPackageWith
, defaultRenderSettings
, RenderSettings(..)
, Alignment(..)
, CommaStyle(..)
#ifdef TEST
, renderConditional
, renderDependencies
, renderLibraryFields
, renderExecutableFields
, renderFlag
, renderSourceRepository
, renderDirectories
, formatDescription
#endif
) where
import Control.Monad
import Data.Char
import Data.Maybe
import Data.List
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as Map
import Hpack.Util
import Hpack.Config
import Hpack.Render.Hints
import Hpack.Render.Dsl
import Hpack.Syntax.Dependencies
renderPackage :: [String] -> Package -> String
renderPackage oldCabalFile = renderPackageWith settings alignment formattingHintsFieldOrder formattingHintsSectionsFieldOrder
where
FormattingHints{..} = sniffFormattingHints oldCabalFile
alignment = fromMaybe 16 formattingHintsAlignment
settings = formattingHintsRenderSettings
renderPackageWith :: RenderSettings -> Alignment -> [String] -> [(String, [String])] -> Package -> String
renderPackageWith settings headerFieldsAlignment existingFieldOrder sectionsFieldOrder Package{..} = intercalate "\n" (unlines header : chunks)
where
chunks :: [String]
chunks = map unlines . filter (not . null) . map (render settings 0) $ sortStanzaFields sectionsFieldOrder stanzas
header :: [String]
header = concatMap (render settings {renderSettingsFieldAlignment = headerFieldsAlignment} 0) packageFields
packageFields :: [Element]
packageFields = addVerbatim packageVerbatim . sortFieldsBy existingFieldOrder $
headerFields ++ [
Field "extra-source-files" (renderPaths packageExtraSourceFiles)
, Field "extra-doc-files" (renderPaths packageExtraDocFiles)
, Field "data-files" (renderPaths packageDataFiles)
] ++ maybe [] (return . Field "data-dir" . Literal) packageDataDir
sourceRepository :: [Element]
sourceRepository = maybe [] (return . renderSourceRepository) packageSourceRepository
customSetup :: [Element]
customSetup = maybe [] (return . renderCustomSetup) packageCustomSetup
library :: [Element]
library = maybe [] (return . renderLibrary) packageLibrary
stanzas :: [Element]
stanzas = concat [
sourceRepository
, customSetup
, map renderFlag packageFlags
, library
, renderInternalLibraries packageInternalLibraries
, renderExecutables packageExecutables
, renderTests packageTests
, renderBenchmarks packageBenchmarks
]
headerFields :: [Element]
headerFields = mapMaybe (\(name, value) -> Field name . Literal <$> value) $ [
("name", Just packageName)
, ("version", Just packageVersion)
, ("synopsis", packageSynopsis)
, ("description", (formatDescription headerFieldsAlignment <$> packageDescription))
, ("category", packageCategory)
, ("stability", packageStability)
, ("homepage", packageHomepage)
, ("bug-reports", packageBugReports)
, ("author", formatList packageAuthor)
, ("maintainer", formatList packageMaintainer)
, ("copyright", formatList packageCopyright)
, ("license", packageLicense)
, case packageLicenseFile of
[file] -> ("license-file", Just file)
files -> ("license-files", formatList files)
, ("tested-with", packageTestedWith)
, ("build-type", Just (show packageBuildType))
]
formatList :: [String] -> Maybe String
formatList xs = guard (not $ null xs) >> (Just $ intercalate separator xs)
where
separator = let Alignment n = headerFieldsAlignment in ",\n" ++ replicate n ' '
sortStanzaFields :: [(String, [String])] -> [Element] -> [Element]
sortStanzaFields sectionsFieldOrder = go
where
go sections = case sections of
[] -> []
Stanza name fields : xs | Just fieldOrder <- lookup name sectionsFieldOrder -> Stanza name (sortFieldsBy fieldOrder fields) : go xs
x : xs -> x : go xs
formatDescription :: Alignment -> String -> String
formatDescription (Alignment alignment) description = case map emptyLineToDot $ lines description of
x : xs -> intercalate "\n" (x : map (indentation ++) xs)
[] -> ""
where
n = max alignment (length ("description: " :: String))
indentation = replicate n ' '
emptyLineToDot xs
| isEmptyLine xs = "."
| otherwise = xs
isEmptyLine = all isSpace
renderSourceRepository :: SourceRepository -> Element
renderSourceRepository SourceRepository{..} = Stanza "source-repository head" [
Field "type" "git"
, Field "location" (Literal sourceRepositoryUrl)
, Field "subdir" (maybe "" Literal sourceRepositorySubdir)
]
renderFlag :: Flag -> Element
renderFlag Flag {..} = Stanza ("flag " ++ flagName) $ description ++ [
Field "manual" (Literal $ show flagManual)
, Field "default" (Literal $ show flagDefault)
]
where
description = maybe [] (return . Field "description" . Literal) flagDescription
renderInternalLibraries :: Map String (Section Library) -> [Element]
renderInternalLibraries = map renderInternalLibrary . Map.toList
renderInternalLibrary :: (String, Section Library) -> Element
renderInternalLibrary (name, sect) =
Stanza ("library " ++ name) (renderLibrarySection sect)
renderExecutables :: Map String (Section Executable) -> [Element]
renderExecutables = map renderExecutable . Map.toList
renderExecutable :: (String, Section Executable) -> Element
renderExecutable (name, sect@(sectionData -> Executable{..})) =
Stanza ("executable " ++ name) (renderExecutableSection [] sect)
renderTests :: Map String (Section Executable) -> [Element]
renderTests = map renderTest . Map.toList
renderTest :: (String, Section Executable) -> Element
renderTest (name, sect) =
Stanza ("test-suite " ++ name)
(renderExecutableSection [Field "type" "exitcode-stdio-1.0"] sect)
renderBenchmarks :: Map String (Section Executable) -> [Element]
renderBenchmarks = map renderBenchmark . Map.toList
renderBenchmark :: (String, Section Executable) -> Element
renderBenchmark (name, sect) =
Stanza ("benchmark " ++ name)
(renderExecutableSection [Field "type" "exitcode-stdio-1.0"] sect)
renderExecutableSection :: [Element] -> Section Executable -> [Element]
renderExecutableSection extraFields = renderSection renderExecutableFields extraFields [defaultLanguage]
renderExecutableFields :: Executable -> [Element]
renderExecutableFields Executable{..} = mainIs ++ [otherModules, generatedModules]
where
mainIs = maybe [] (return . Field "main-is" . Literal) executableMain
otherModules = renderOtherModules executableOtherModules
generatedModules = renderGeneratedModules executableGeneratedModules
renderCustomSetup :: CustomSetup -> Element
renderCustomSetup CustomSetup{..} =
Stanza "custom-setup" $ renderDependencies "setup-depends" customSetupDependencies
renderLibrary :: Section Library -> Element
renderLibrary sect = Stanza "library" $ renderLibrarySection sect
renderLibrarySection :: Section Library -> [Element]
renderLibrarySection = renderSection renderLibraryFields [] [defaultLanguage]
renderLibraryFields :: Library -> [Element]
renderLibraryFields Library{..} =
maybe [] (return . renderExposed) libraryExposed ++ [
renderExposedModules libraryExposedModules
, renderOtherModules libraryOtherModules
, renderGeneratedModules libraryGeneratedModules
, renderReexportedModules libraryReexportedModules
, renderSignatures librarySignatures
]
renderExposed :: Bool -> Element
renderExposed = Field "exposed" . Literal . show
renderSection :: (a -> [Element]) -> [Element] -> [Element] -> Section a -> [Element]
renderSection renderSectionData extraFieldsStart extraFieldsEnd Section{..} = addVerbatim sectionVerbatim $
extraFieldsStart
++ renderSectionData sectionData ++ [
renderDirectories "hs-source-dirs" sectionSourceDirs
, renderDefaultExtensions sectionDefaultExtensions
, renderOtherExtensions sectionOtherExtensions
, renderGhcOptions sectionGhcOptions
, renderGhcProfOptions sectionGhcProfOptions
, renderGhcjsOptions sectionGhcjsOptions
, renderCppOptions sectionCppOptions
, renderCcOptions sectionCcOptions
, renderCxxOptions sectionCxxOptions
, renderDirectories "include-dirs" sectionIncludeDirs
, Field "install-includes" (LineSeparatedList sectionInstallIncludes)
, Field "c-sources" (renderPaths sectionCSources)
, Field "cxx-sources" (renderPaths sectionCxxSources)
, Field "js-sources" (renderPaths sectionJsSources)
, renderDirectories "extra-lib-dirs" sectionExtraLibDirs
, Field "extra-libraries" (LineSeparatedList sectionExtraLibraries)
, renderDirectories "extra-frameworks-dirs" sectionExtraFrameworksDirs
, Field "frameworks" (LineSeparatedList sectionFrameworks)
, renderLdOptions sectionLdOptions
, Field "pkgconfig-depends" (CommaSeparatedList sectionPkgConfigDependencies)
]
++ renderBuildTools sectionBuildTools sectionSystemBuildTools
++ renderDependencies "build-depends" sectionDependencies
++ maybe [] (return . renderBuildable) sectionBuildable
++ map (renderConditional renderSectionData) sectionConditionals
++ extraFieldsEnd
addVerbatim :: [Verbatim] -> [Element] -> [Element]
addVerbatim verbatim fields = filterVerbatim verbatim fields ++ renderVerbatim verbatim
filterVerbatim :: [Verbatim] -> [Element] -> [Element]
filterVerbatim verbatim = filter p
where
p :: Element -> Bool
p = \ case
Field name _ -> name `notElem` fields
_ -> True
fields = concatMap verbatimFieldNames verbatim
verbatimFieldNames :: Verbatim -> [String]
verbatimFieldNames verbatim = case verbatim of
VerbatimLiteral _ -> []
VerbatimObject o -> Map.keys o
renderVerbatim :: [Verbatim] -> [Element]
renderVerbatim = concatMap $ \ case
VerbatimLiteral s -> [Verbatim s]
VerbatimObject o -> renderVerbatimObject o
renderVerbatimObject :: Map String VerbatimValue -> [Element]
renderVerbatimObject = map renderPair . Map.toList
where
renderPair (key, value) = case lines (verbatimValueToString value) of
[x] -> Field key (Literal x)
xs -> Field key (LineSeparatedList xs)
renderConditional :: (a -> [Element]) -> Conditional (Section a) -> Element
renderConditional renderSectionData (Conditional condition sect mElse) = case mElse of
Nothing -> if_
Just else_ -> Group if_ (Stanza "else" $ renderSection renderSectionData [] [] else_)
where
if_ = Stanza ("if " ++ condition) (renderSection renderSectionData [] [] sect)
defaultLanguage :: Element
defaultLanguage = Field "default-language" "Haskell2010"
renderDirectories :: String -> [String] -> Element
renderDirectories name = Field name . LineSeparatedList . replaceDots
where
replaceDots = map replaceDot
replaceDot xs = case xs of
"." -> "./."
_ -> xs
renderExposedModules :: [String] -> Element
renderExposedModules = Field "exposed-modules" . LineSeparatedList
renderOtherModules :: [String] -> Element
renderOtherModules = Field "other-modules" . LineSeparatedList
renderGeneratedModules :: [String] -> Element
renderGeneratedModules = Field "autogen-modules" . LineSeparatedList
renderReexportedModules :: [String] -> Element
renderReexportedModules = Field "reexported-modules" . LineSeparatedList
renderSignatures :: [String] -> Element
renderSignatures = Field "signatures" . CommaSeparatedList
renderDependencies :: String -> Dependencies -> [Element]
renderDependencies name deps = [
Field name (CommaSeparatedList renderedDeps)
, Field "mixins" (CommaSeparatedList $ concat mixins)
]
where
(renderedDeps, mixins) = unzip . map renderDependency . Map.toList $ unDependencies deps
renderDependency :: (String, DependencyInfo) -> (String, [String])
renderDependency (name, DependencyInfo mixins version) = (
name ++ renderVersion version
, [ name ++ " " ++ mixin | mixin <- mixins ]
)
renderVersion :: DependencyVersion -> String
renderVersion (DependencyVersion _ c) = renderVersionConstraint c
renderVersionConstraint :: VersionConstraint -> String
renderVersionConstraint version = case version of
AnyVersion -> ""
VersionRange x -> " " ++ x
renderBuildTools :: Map BuildTool DependencyVersion -> SystemBuildTools -> [Element]
renderBuildTools (map renderBuildTool . Map.toList -> xs) systemBuildTools = [
Field "build-tools" (CommaSeparatedList $ [x | BuildTools x <- xs] ++ renderSystemBuildTools systemBuildTools)
, Field "build-tool-depends" (CommaSeparatedList [x | BuildToolDepends x <- xs])
]
data RenderBuildTool = BuildTools String | BuildToolDepends String
renderBuildTool :: (BuildTool, DependencyVersion) -> RenderBuildTool
renderBuildTool (buildTool, renderVersion -> version) = case buildTool of
LocalBuildTool executable -> BuildTools (executable ++ version)
BuildTool pkg executable
| pkg == executable && executable `elem` knownBuildTools -> BuildTools (executable ++ version)
| otherwise -> BuildToolDepends (pkg ++ ":" ++ executable ++ version)
where
knownBuildTools :: [String]
knownBuildTools = [
"alex"
, "c2hs"
, "cpphs"
, "greencard"
, "haddock"
, "happy"
, "hsc2hs"
, "hscolour"
]
renderSystemBuildTools :: SystemBuildTools -> [String]
renderSystemBuildTools = map renderSystemBuildTool . Map.toList . unSystemBuildTools
renderSystemBuildTool :: (String, VersionConstraint) -> String
renderSystemBuildTool (name, constraint) = name ++ renderVersionConstraint constraint
renderGhcOptions :: [GhcOption] -> Element
renderGhcOptions = Field "ghc-options" . WordList
renderGhcProfOptions :: [GhcProfOption] -> Element
renderGhcProfOptions = Field "ghc-prof-options" . WordList
renderGhcjsOptions :: [GhcjsOption] -> Element
renderGhcjsOptions = Field "ghcjs-options" . WordList
renderCppOptions :: [CppOption] -> Element
renderCppOptions = Field "cpp-options" . WordList
renderCcOptions :: [CcOption] -> Element
renderCcOptions = Field "cc-options" . WordList
renderCxxOptions :: [CxxOption] -> Element
renderCxxOptions = Field "cxx-options" . WordList
renderLdOptions :: [LdOption] -> Element
renderLdOptions = Field "ld-options" . WordList
renderBuildable :: Bool -> Element
renderBuildable = Field "buildable" . Literal . show
renderDefaultExtensions :: [String] -> Element
renderDefaultExtensions = Field "default-extensions" . WordList
renderOtherExtensions :: [String] -> Element
renderOtherExtensions = Field "other-extensions" . WordList
renderPaths :: [Path] -> Value
renderPaths = LineSeparatedList . map renderPath
where
renderPath :: Path -> FilePath
renderPath (Path path)
| needsQuoting path = show path
| otherwise = path
needsQuoting :: FilePath -> Bool
needsQuoting = any (\x -> isSpace x || x == ',')
| haskell-tinc/hpack | src/Hpack/Render.hs | mit | 15,580 | 0 | 14 | 2,663 | 4,041 | 2,134 | 1,907 | 293 | 3 |
module Test.TestTable
( dropAndRecreateTableDef,
dropTableDef,
dropTableDefSql,
dropTableNameSql,
)
where
import Orville.PostgreSQL.Connection (Connection)
import qualified Orville.PostgreSQL.Internal.Expr as Expr
import qualified Orville.PostgreSQL.Internal.RawSql as RawSql
import Orville.PostgreSQL.Internal.TableDefinition (TableDefinition, mkCreateTableExpr, tableName)
dropTableDef ::
Connection ->
TableDefinition key writeEntity readEntity ->
IO ()
dropTableDef connection tableDef = do
RawSql.executeVoid connection (dropTableDefSql tableDef)
dropAndRecreateTableDef ::
Connection ->
TableDefinition key writeEntity readEntity ->
IO ()
dropAndRecreateTableDef connection tableDef = do
dropTableDef connection tableDef
RawSql.executeVoid connection (mkCreateTableExpr tableDef)
dropTableDefSql ::
TableDefinition key writeEntity readEntity ->
RawSql.RawSql
dropTableDefSql =
dropTableNameExprSql . tableName
dropTableNameSql ::
String ->
RawSql.RawSql
dropTableNameSql =
dropTableNameExprSql . Expr.qualifiedTableName Nothing . Expr.tableName
dropTableNameExprSql ::
Expr.QualifiedTableName ->
RawSql.RawSql
dropTableNameExprSql name =
RawSql.fromString "DROP TABLE IF EXISTS " <> RawSql.toRawSql name
| flipstone/orville | orville-postgresql-libpq/test/Test/TestTable.hs | mit | 1,268 | 0 | 9 | 171 | 273 | 147 | 126 | 37 | 1 |
module Bot (bot) where
import Vindinium.Types
import StupidBot.Bot (stupidBot)
import DumbBot.Bot (dumbBot)
bot :: Bot
--bot = stupidBot
bot = dumbBot
| flyrry/phonypony | src/Bot.hs | mit | 153 | 0 | 5 | 23 | 45 | 28 | 17 | 6 | 1 |
{-# LANGUAGE DataKinds #-}
import Control.Category
import Control.Monad.Random
import Data.MyPrelude
import Data.Utils
import Graph
import Numeric.Neural
import qualified System.Console.ANSI as ANSI
import Prelude hiding ((.))
main :: IO ()
main = flip evalRandT (mkStdGen 739570) $ do
let xs = [0, 0.01 .. 2 * pi]
m <- modelR $ whiten sinModel xs
runEffect $
simpleBatchP [(x, sin x) | x <- xs] 10
>-> descentP m 1 (const 0.5)
>-> reportTSP 50 report
>-> consumeTSP check
where
sinModel :: StdModel (Vector 1) (Vector 1) Double Double
sinModel = mkStdModel
(tanhLayer . (tanhLayer :: Layer 1 4))
(\x -> Diff $ Identity . sqDiff (pure $ fromDouble x))
pure
vhead
getError ts =
let m = tsModel ts
in maximum [abs (sin x - model m x) | x <- [0, 0.1 .. 2 * pi]]
report ts = liftIO $ do
ANSI.clearScreen
ANSI.setSGR [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Red]
ANSI.setCursorPosition 0 0
printf "Generation %d\n" (tsGeneration ts)
ANSI.setSGR [ANSI.Reset]
graph (model (tsModel ts)) 0 (2 * pi) 20 50
check ts = return $ if getError ts < 0.1 then Just () else Nothing
| brunjlar/neural | examples/sin/sin.hs | mit | 1,328 | 0 | 15 | 441 | 485 | 246 | 239 | 35 | 2 |
module LearnParsers where
import Text.Trifecta
stop :: Parser a
stop = unexpected "stop"
one :: Parser Char
one = char '1'
two :: Parser Char
two = char '2'
one' :: Parser Char
one' = one >> stop
oneEof :: Parser ()
oneEof = one >> eof
oneTwo :: Parser Char
oneTwo = one >> two
oneTwo' :: Parser ()
oneTwo' = oneTwo >> stop
oneTwoEof :: Parser ()
oneTwoEof = oneTwo >> eof
testParse :: Show a => Parser a -> IO ()
testParse p = print $ parseString p mempty "123"
pNL :: String -> IO ()
pNL s = putStrLn ('\n' : s)
learnParsersMain :: IO ()
learnParsersMain = do
pNL "stop:"
testParse (stop :: Parser Char)
pNL "one:"
testParse one
pNL "one':"
testParse one'
pNL "oneEof:"
testParse oneEof
pNL "oneTwo:"
testParse oneTwo
pNL "oneTwo':"
testParse oneTwo'
pNL "oneTwoEof:"
testParse oneTwo'
| JoshuaGross/haskell-learning-log | Code/Haskellbook/catchall/src/LearnParsers.hs | mit | 838 | 0 | 9 | 194 | 331 | 155 | 176 | 38 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Views.Components.ComicsList (comicsListView) where
import BasicPrelude
import Text.Blaze.Html5 (Html, toValue, toHtml, (!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Helpers.SPF (SPFHook(SPFLink))
import Models.Comic (Comic)
import qualified Models.Comic as C
import Routes (RouteUrl(ComicUrl))
comicsListView :: [Comic] -> Html
comicsListView comics =
H.div ! A.class_ "list-group" $
mapM_ comicsListItemView comics
comicsListItemView :: Comic -> Html
comicsListItemView comic =
H.a ! A.href (toValue (ComicUrl (C.id comic)))
! A.class_ ("list-group-item " <> toValue SPFLink) $
H.h4 ! A.class_ "list-group-item-heading" $ toHtml (C.title comic)
| nicolashery/example-marvel-haskell | Views/Components/ComicsList.hs | mit | 802 | 0 | 17 | 115 | 239 | 138 | 101 | 20 | 1 |
module Handler.Demo where
import Import
import Crud.Core
--Aform From Entity Demo
iglesiaForm :: Maybe Iglesia -> AForm Handler Iglesia
iglesiaForm iglesia = Iglesia
<$> areq textField "nombre" (iglesiaNombre <$> iglesia)
<*> areq textField "direccion" (iglesiaDireccion <$> iglesia)
<*> areq textField "telefono" (iglesiaTelefono <$> iglesia)
<*> areq textField "ciudad" (iglesiaCiudad <$> iglesia)
-- <*> areq textField "prueba" (iglesiaPrueba <$> iglesia)
--CRUD
--Create
getNewRoute "Iglesia" "iglesiaForm"
postNewRoute "Iglesia" "iglesiaForm" "IglesiaListR"
getEditRoute "Iglesia" "iglesiaForm"
postEditRoute "Iglesia" "iglesiaForm" "IglesiaListR"
deleteCrudRoute "Iglesia" "IglesiaListR"
listCrudRoute "Iglesia" "iglesiaNombre"
| jairoGilC/Yesod-CRUD-Generator | Handler/Demo.hs | mit | 831 | 0 | 11 | 178 | 167 | 81 | 86 | -1 | -1 |
module Handler.Output
( postOutputR
, getOutputR
) where
import Import hiding (Request)
import Data.List (genericLength)
import Yesod.WebSockets
data Request = Request
{ reqContent :: Text
}
instance FromJSON Request where
parseJSON = withObject "Output.Request" $ \o -> Request
<$> o .: "content"
postOutputR :: Token -> Handler ()
postOutputR token = do
now <- liftIO getCurrentTime
req <- requireJsonBody
let output = Output
{ outputContent = reqContent req
, outputCreatedAt = now
}
unsafeRunStorage $ do
void $ (get404 token :: Storage Command)
void $ rpush (History token) output
getOutputR :: Token -> Handler ()
getOutputR token = webSockets $ outputStream token 0
outputStream :: Token -> Integer -> WebSocketsT Handler ()
outputStream token start = do
outputs <- lift $ unsafeRunStorage $ lget (History token) start
forM_ outputs $ \output -> do
sendTextData $ outputContent output
ack <- receiveData -- ensure someone's listening
$(logDebug) $ "received acknowledgement " <> ack
command <- lift $ unsafeRunStorage $ get404 token
if (commandRunning command)
then outputStream token $ start + genericLength outputs
else sendClose ("command no longer running" :: Text)
| wfleming/tee-io | src/Handler/Output.hs | mit | 1,345 | 0 | 14 | 346 | 388 | 194 | 194 | -1 | -1 |
--
-- Copyright (c) 2006 Don Stewart - http://www.cse.unsw.edu.au/~dons/
-- GPL version 2 or later (see http://www.gnu.org/copyleft/gpl.html)
--
import System.Environment
-- | 'main' runs the main program
main :: IO ()
main = getArgs >>= print . haqify . head
haqify s = "Haq! " ++ s
| hskoans/haq | Haq.hs | mit | 286 | 0 | 7 | 49 | 50 | 28 | 22 | 4 | 1 |
module Haskell_Mini_Project
( tail'
, sum'
, distance
, sum''
, reverse'
, reverse''
, pack
) where
import Assist_Lib
-- 1 --
tail' :: [a] -> [a]
tail' [] = []
tail' (_:theList) = theList
-- 2 --
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' (l:theList) = l + sum' theList
-- 3 --
distance :: (Integral a, Floating b) => (a, a) -> (a, a) -> b
distance (x1, y1) (x2, y2) = sqrt . fromIntegral . sum
$ map (^2) [(x1 - x2), (y1 - y2)]
-- 4 --
sum'' :: (Num a) => [a] -> [a] -> [a]
sum'' _ [] = []
sum'' [] _ = []
sum'' (f:firstList) (s:secondList) =
(f + s) : (sum'' firstList secondList)
-- 5 --
reverse' :: [a] -> [a]
reverse' [] = []
reverse' theList = (last theList)
: (reverse' $ init theList)
reverse'' [] = []
reverse'' (l:theList) = reverse' theList ++ [l]
-- 6 --
pack :: [(a, Bool)] -> [(a, Bool)]
pack dataBlocks = [ validData
| validData <- dataBlocks,
not $ snd validData]
| pegurnee/2015-01-341 | projects/project4_mini_haskell/haskell_mini_project.hs | mit | 933 | 0 | 9 | 236 | 485 | 271 | 214 | 33 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Ssconf (testSsconf) where
import Test.QuickCheck
import qualified Test.HUnit as HUnit
import Data.List
import qualified Data.Map as M
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import qualified Ganeti.Ssconf as Ssconf
import qualified Ganeti.Types as Types
-- * Ssconf tests
$(genArbitrary ''Ssconf.SSKey)
instance Arbitrary Ssconf.SSConf where
arbitrary = fmap (Ssconf.SSConf . M.fromList) arbitrary
-- * Reading SSConf
prop_filename :: Ssconf.SSKey -> Property
prop_filename key =
printTestCase "Key doesn't start with correct prefix" $
Ssconf.sSFilePrefix `isPrefixOf` Ssconf.keyToFilename "" key
caseParseNodesVmCapable :: HUnit.Assertion
caseParseNodesVmCapable = do
let str = "node1.example.com=True\nnode2.example.com=False"
result = Ssconf.parseNodesVmCapable str
expected = return
[ ("node1.example.com", True)
, ("node2.example.com", False)
]
HUnit.assertEqual "Mismatch in parsed and expected result" expected result
caseParseHypervisorList :: HUnit.Assertion
caseParseHypervisorList = do
let result = Ssconf.parseHypervisorList "kvm\nxen-pvm\nxen-hvm"
expected = return [Types.Kvm, Types.XenPvm, Types.XenHvm]
HUnit.assertEqual "Mismatch in parsed and expected result" expected result
caseParseEnabledUserShutdown :: HUnit.Assertion
caseParseEnabledUserShutdown = do
let result1 = Ssconf.parseEnabledUserShutdown "True"
result2 = Ssconf.parseEnabledUserShutdown "False"
HUnit.assertEqual "Mismatch in parsed and expected result"
(return True) result1
HUnit.assertEqual "Mismatch in parsed and expected result"
(return False) result2
-- * Creating and writing SSConf
-- | Verify that for SSConf we have readJSON . showJSON = Ok.
prop_ReadShow :: Ssconf.SSConf -> Property
prop_ReadShow = testSerialisation
testSuite "Ssconf"
[ 'prop_filename
, 'caseParseNodesVmCapable
, 'caseParseHypervisorList
, 'caseParseEnabledUserShutdown
, 'prop_ReadShow
]
| ribag/ganeti-experiments | test/hs/Test/Ganeti/Ssconf.hs | gpl-2.0 | 2,849 | 0 | 12 | 459 | 406 | 226 | 180 | 47 | 1 |
{-#LANGUAGE RecordWildCards #-}
module Exploration.UNF.API where
import Control.Monad.ST
import Control.Monad.State.Strict
import Data.List
import Data.Map (Map,fromList,empty)
import Data.Set (Set)
import Domain.Action
import Domain.Class
import Exploration.UNF.State
import Language.SimpleC.AST
import Language.SimpleC.Flow
import Prelude hiding (succ)
import Util.Generic
import qualified Data.HashTable.IO as H
import qualified Data.Map as MA
import qualified Data.Maybe as M
import qualified Data.Set as S
import qualified Debug.Trace as T
import qualified Model.GCS as GCS
showMStr _ = return ()
-- showMStr = putStrLn
-- | Default values for various types
-- @ Bottom event: a very special event
botEID :: EventID
botEID = 0
-- The name for bottom
botName :: EventName
botName = (GCS.botID, GCS.botID, SymId (-1))
-- The initial bottom event
botEvent :: act -> Event act
botEvent acts = Event botName acts [] [] [] [] []
default_unf_opts :: UnfolderOpts
default_unf_opts = UnfOpts False False 10
default_unf_stats :: UnfolderStats
default_unf_stats =
let nr_max_conf = 0
nr_cutoffs = 0
nr_evs_prefix = 1
sum_size_max_conf = 0
nr_evs_per_name = MA.singleton botName 1
nr_warns = S.empty
in UnfStats nr_max_conf nr_cutoffs nr_evs_prefix
sum_size_max_conf nr_evs_per_name nr_warns
-- @ Initial state of the unfolder
i_unf_state :: Domain s a => Bool -> Bool -> Int -> GCS.System s a -> IO (UnfolderState s a)
i_unf_state stl cut wid syst = do
evts <- H.new
H.insert evts botEID $ botEvent $ GCS.gbac syst
let initialState = GCS.gbst syst
controlState = GCS.controlPart initialState
stas = fromList [(controlState,[(initialState,0)])]
-- stas <- H.new
-- H.insert stas initialState botEID
let pcnf = Conf undefined [] [] [] -- @NOTE: Check!
stak = [botEID]
cntr = 1
opts = UnfOpts stl cut wid
return $ UnfolderState syst evts pcnf stak cntr stas opts default_unf_stats
-- API
-- GETTERS
-- | Retrieves the event associated with the event id
get_event :: Show act => String -> EventID -> Events act -> IO (Event act)
{-# INLINE get_event #-}
get_event s e events = do
mv <- H.lookup events e
case mv of
Nothing -> do
str <- showEvents events
error $ s ++ "-get_event: " ++ show e ++ "\n" ++ str
Just ev -> return ev
-- | Retrieves fields of an event: immediate sucessors, predecessors, etc.
-- get_pred,get_succ,... :: EventID -> Events act -> IO EventsID
get_pred e events = do
ev@Event{..} <- get_event "getIPred(ecessors)" e events
return pred
get_succ e events = do
ev@Event{..} <- get_event "getISucc(essors)" e events
return succ
get_icnf e events = do
ev@Event{..} <- get_event "getICnfl(icts)" e events
return icnf
get_disa e events = do
ev@Event{..} <- get_event "getDisa(bled)" e events
return disa
get_alte e events = do
ev@Event{..} <- get_event "getAltrs(natives)" e events
return alte
get_name e events = do
ev@Event{..} <- get_event "getAltrs(natives)" e events
return name
get_tid e events = do
ev@Event{..} <- get_event "getAltrs(natives)" e events
return $ fst3 name
get_tid_sym e events = do
ev@Event{..} <- get_event "getAltrs(natives)" e events
return $ trd3 name
-- SETTERS
set_event :: EventID -> Event act -> Events act -> IO ()
set_event eID ev events = H.insert events eID ev
-- | delete an event from the event hashtable
del_event :: Show act => EventID -> Events act -> IO ()
del_event e events = do
check <- filterEvent e events
if check
then do
ev@Event{..} <- get_event "deleteEvent" e events
mapM_ (\e' -> del_succ e e' events) pred
mapM_ (\e' -> del_icnf e e' events) icnf
mapM_ (\e' -> del_event e' events) succ
H.delete events e
else return ()
add_succ, del_succ, add_icnf, del_icnf :: Show act => EventID -> EventID -> Events act -> IO ()
-- | add e as a sucessor of e'
add_succ e e' events = -- trace ("add_succ: " ++ show e ++ " of " ++ show e') $
do
ev@Event{..} <- get_event "add_succ" e' events
let succEv = e:succ
ev' = ev{ succ = succEv }
set_event e' ev' events
-- | delete e as a successor of e'
del_succ e e' events = -- trace ("setSucc: " ++ show e ++ " of " ++ show e') $
do
mv <- H.lookup events e'
case mv of
Nothing -> return ()
Just ev -> do
let succEv = delete e $ succ ev
ev' = ev{ succ = succEv }
set_event e' ev' events
-- | add e as an immediate conflict of e'
add_icnf e e' events = -- trace ("add_icnf: " ++ show e ++ " of " ++ show e') $
do
ev@Event{..} <- get_event "add_icnf" e' events
let icnfEv = e:icnf
ev' = ev{ icnf = icnfEv }
set_event e' ev' events
-- | delete e as an immediate conflict of e'
del_icnf e e' events = -- trace ("del_icnf: " ++ show e ++ " of " ++ show e') $
do
ev@Event{..} <- get_event "del_icnf" e' events
let icnfEv = delete e icnf
altEv = filter (\v -> not $ elem e v) alte
ev' = ev{ icnf = icnfEv, alte = altEv }
set_event e' ev' events
-- | add e to the disabled set of ê
add_disa :: Show act => EventID -> EventID -> Events act -> IO ()
add_disa e ê events = -- trace ("add_disa: " ++ show e ++ " of " ++ show ê) $
do
ev@Event{..} <- get_event "add_disa" ê events
let disaEv = e:disa
ev' = ev{ disa = disaEv }
set_event ê ev' events
-- | set de as the disabled set of e
set_disa :: Show act => EventID -> EventsID -> Events act -> IO ()
set_disa e de events = -- trace ("setDisa: " ++ show de ++ " of " ++ show e) $
do
ev@Event{..} <- get_event "set_disa" e events
let ev' = ev{ disa = de }
set_event e ev' events
-- | add v to the alternatives of e
add_alte :: Show act => EventID -> Alternative -> Events act -> IO ()
add_alte e v events = -- trace ("adding alternative " ++ show v ++ " of " ++ show e) $
do
ev@Event{..} <- get_event "add_alte" e events
let altEv = nub $ v:alte
ev' = ev{ alte = altEv }
set_event e ev' events
-- | reset the alternatives of e
reset_alte :: Show act => EventID -> Events act -> IO ()
reset_alte e events = do
ev@Event{..} <- get_event "reset_alte" e events
let altEv = []
ev' = ev{ alte = altEv }
set_event e ev' events
-- | set conf as the previous configuration
set_pcnf :: Configuration st -> UnfolderOp st act ()
set_pcnf conf = do
s@UnfolderState{..} <- get
let ns = s{ pcnf = conf }
put ns
-- Stack related operations
-- @ push
push :: EventID -> UnfolderOp st act ()
push e = do
s@UnfolderState{..} <- get
let nstack = e:stak
put s{ stak = nstack }
-- @ pop
pop :: UnfolderOp st act ()
pop = do
s@UnfolderState{..} <- get
let nstack = tail stak
put s{ stak = nstack }
-- @ freshCounter - updates the counter of events
freshCounter :: UnfolderOp st act Counter
freshCounter = do
s@UnfolderState{..} <- get
let ec = cntr
nec = ec + 1
put s{ cntr = nec }
return ec
-- | set (update) the cutoff table
set_cutoff_table :: States st -> UnfolderOp st act ()
set_cutoff_table cutoffs = do
s@UnfolderState{..} <- get
put s{ stas = cutoffs}
-- Update statistics of the unfolding exploration
inc_max_conf, inc_cutoffs, inc_evs_prefix, dec_evs_prefix :: UnfolderOp st act ()
-- | increment nr of maximal configurations
inc_max_conf = do
s@UnfolderState{..} <- get
let n_max_conf = nr_max_conf stats + 1
stats' = stats { nr_max_conf = n_max_conf }
put s{ stats = stats' }
-- | increment nr of cutoffs
inc_cutoffs = do
s@UnfolderState{..} <- get
let n_cutoffs = nr_cutoffs stats + 1
stats' = stats { nr_cutoffs = n_cutoffs }
put s{ stats = stats' }
op_evs_prefix :: (Counter -> Counter -> Counter) -> UnfolderOp st act ()
op_evs_prefix op = do
s@UnfolderState{..} <- get
let n_evs_prefix = op (nr_evs_prefix stats) 1
stats' = stats { nr_evs_prefix = n_evs_prefix }
put s{ stats = stats' }
-- | increment the size of U
inc_evs_prefix = op_evs_prefix (+)
-- | decrement the size of U
dec_evs_prefix = op_evs_prefix (-)
-- | add to the current size
inc_sum_size_max_conf :: UnfolderOp st act ()
inc_sum_size_max_conf = do
s@UnfolderState{..} <- get
let n_size_max_conf = sum_size_max_conf stats + (toInteger $ nr_evs_prefix stats)
stats' = stats { sum_size_max_conf = n_size_max_conf }
put s{ stats = stats' }
-- | increment the table of event names
inc_evs_per_name :: EventName -> UnfolderOp st act ()
inc_evs_per_name name = do
s@UnfolderState{..} <- get
let info = nr_evs_per_name stats
n_evs_per_name =
case MA.lookup name info of
Nothing -> MA.insert name 1 info
Just n -> MA.insert name (n+1) info
stats' = stats { nr_evs_per_name = n_evs_per_name }
put s{ stats = stats' }
-- | add to the warning sets
add_warns :: Set Int -> UnfolderOp st act ()
add_warns warns = do
s@UnfolderState{..} <- get
let _nr_warns = S.union warns $ nr_warns stats
stats' = stats { nr_warns = _nr_warns }
put s{ stats = stats' }
{-
inc_widen_map :: EventName -> UnfolderOp st act ()
inc_widen_map ename = do
s@UnfolderState{..} <- get
let ewide' = case MA.lookup ename ewide of
Nothing -> MA.insert ename 1 ewide
Just n -> MA.insert ename (n+1) ewide
put s { ewide = ewide' }
set_widen_map :: Map NodeId Int -> UnfolderOp st act ()
set_widen_map wmap = do
s@UnfolderState{..} <- get
put s{ widen = wmap }
-}
-- | Utility functions
-- | Filters a list of events ids that are still in the prefix
filterEvents :: EventsID -> Events act -> IO EventsID
filterEvents es events = filterM (\e -> filterEvent e events) es
-- | Checks if an event id *e* is still in the prefix
filterEvent :: EventID -> Events act -> IO Bool
filterEvent e events = do
mv <- H.lookup events e
case mv of
Nothing -> return False
Just ev -> return True
-- | Splits between events that are dependent and independent of
-- the event name and actions
partition_dependent :: (Show act, Action act) => EventInfo act -> Events act -> (EventsID, EventsID) -> EventsID -> IO (EventsID, EventsID)
partition_dependent êinfo events (dep,indep) es = do
case es of
[] -> do
showMStr "partition_dependent: end"
return (dep,indep)
(e:r) -> do
ev@Event{..} <- get_event "partition_dependent" e events
let is_dep = is_dependent êinfo (name,acts)
showMStr $ "\t e = " ++ show e ++ ", result = " ++ show is_dep
showMStr $ "\t name = " ++ show name
showMStr $ "\t acts = " ++ show acts
if is_dep
then partition_dependent êinfo events (e:dep,indep) r
else partition_dependent êinfo events (dep,e:indep) r
is_independent :: (Show act, Action act) => EventID -> EventID -> Events act -> IO Bool
is_independent e1 e2 evts =
if e1 == GCS.botID || e2 == GCS.botID
then return False
else do
ev1 <- get_event "is_independent" e1 evts
ev2 <- get_event "is_independent" e2 evts
return $ not $ is_dependent (name ev1, acts ev1) (name ev2, acts ev2)
-- | Checks if two event names are dependent
-- This occurs if they are events of the same process
-- or their actions are interfering.
-- Of course, one can emulate events of the same process
-- in their actions (by for example considering Writes to
-- the PC variable) but this would be more expensive.
is_dependent :: (Show act, Action act) => EventInfo act -> EventInfo act -> Bool
is_dependent a@((pid,_,tid),acts) b@((pid',_,tid'),acts') =
let c1 = pid == GCS.botID || pid' == GCS.botID
c2 = pid == pid'
c3 = interferes acts acts'
c4 = isCreateOf (SymId pid) acts'
c5 = isCreateOf (SymId pid') acts
r = or [c1,c2,c3,c4,c5]
in r
-- "UBER" EXPENSIVE OPERATIONS THAT SHOULD BE AVOIDED!
-- predecessors (local configuration) and sucessors of an event
predecessors, successors :: Show act => EventID -> Events act -> IO EventsID
{-# INLINABLE predecessors #-}
predecessors e events = do
preds <- predecessors' e events
return $ nub preds
where
predecessors' :: Show act => EventID -> Events act -> IO EventsID
predecessors' e events = do
ev@Event{..} <- get_event "predecessors" e events
foldM (\a e -> predecessors' e events >>= \r -> return $ a ++ r) pred pred
{-# INLINABLE successors #-}
successors e events = do
succs <- successors' e events
return $ nub succs
where
successors' :: Show act => EventID -> Events act -> IO EventsID
successors' e events = do
ev@Event{..} <- get_event "successors" e events
foldM (\a e -> successors' e events >>= \r -> return $ a ++ r) succ succ
-- | Retrieves all events of a configuration
get_evts_of_conf :: Show act => EventsID -> Events act -> IO EventsID
get_evts_of_conf maxevs events = do
preds <- mapM (\e -> predecessors e events) maxevs
return $ maxevs ++ (nub $ concat preds)
-- @OBSOLETE (TO BE REMOVED IN THE NEXT VERSION)
-- | restore previous disable set
set_previous_disa :: Show act => Events act -> UnfolderOp st act ()
set_previous_disa events = do
s@UnfolderState{..} <- get
kv <- lift $ H.toList events
lift $ mapM_ (\(e,ev) -> get_event "setPDisa" e evts >>= \ev' ->
let nev = ev' { disa = disa ev }
in set_event e nev evts) kv
return ()
is_configuration :: Show act => Events act -> EventsID -> IO Bool
is_configuration evts conf = do
cnffree <- allM (\e -> get_icnf e evts >>= \es -> return $! null (es `intersect` conf)) conf
causaclosed <- is_causally_closed evts conf conf
return $! cnffree && causaclosed
is_causally_closed :: Show act => Events act -> EventsID -> EventsID -> IO Bool
is_causally_closed evts conf [] = return True
is_causally_closed evts conf (e:es) = do
prede <- predecessors e evts
if all (\e' -> e' `elem` conf) prede
then is_causally_closed evts conf es
else return False
predecessorWith :: Show act => EventID -> EventName -> Events act -> IO EventID
predecessorWith 0 p events = return GCS.botID
predecessorWith e p events = do
pred <- get_pred e events
epred <- filterM (\e -> get_event "predecessorWith" e events >>=
\ev@Event{..} -> return $ name == p) pred
if null epred
then do
res <- mapM (\e -> predecessorWith e p events) pred
return $ filterResult res
else return $ filterResult epred
where
filterResult :: EventsID -> EventID
filterResult es =
if null es
then error "predecessorWith: shouldn't happen"
else let res = filter (/= 0) es
in if null res
then GCS.botID
else if all (== (head res)) (tail res)
then head res
else error "predecessorWith: multiple possibilities"
| marcelosousa/poet | src/Exploration/UNF/API.hs | gpl-2.0 | 14,608 | 0 | 19 | 3,368 | 4,711 | 2,375 | 2,336 | 319 | 5 |
-- rm1x-template: make keymaps for Yamaha Rm1X
-- Copyright (C) 2017 karamellpelle@hotmail.com
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, write to the Free Software Foundation, Inc.,
-- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
--
module Main where
import Helpers
import Template
--------------------------------------------------------------------------------
--
mainHelp :: IO ()
mainHelp = do
putStrLn "Usage:"
putStrLn "rm1x-template --help"
putStrLn "rm1x-template --svg kit.txt [--output file|folder] [--force]"
putStrLn "rm1x-template --svg-pages kit.txt [--output folder] [--force]"
putStrLn "rm1x-template --book kit1.txt ... kitN.txt [--output file|folder] [--force]"
--------------------------------------------------------------------------------
-- main
main :: IO ()
main = do
help <- getOption "help"
svg <- getOption "svg"
svgpages <- getOption "svg-pages"
book <- getOption "book"
when ( isJust help || (isNothing svg && isNothing svgpages && isNothing book) ) $ mainHelp
-- mainSVG
case svg of
Just [] -> die "No kit input file."
Just [path] -> mainSVG path
Just _ -> die "Command takes only 1 argument."
Nothing -> return ()
-- mainSVGPages
case svgpages of
Just [] -> die "No kit input file."
Just [path] -> mainSVGPages path
Just _ -> die "Command takes only 1 argument."
Nothing -> return ()
-- mainBook
case book of
Just [] -> putStrLn "Warning: Creating empty book" >> mainBook []
Just paths -> mainBook paths
Nothing -> return ()
--------------------------------------------------------------------------------
--
-- | create svg image from kit
mainSVG :: String -> IO ()
mainSVG path = do
output <- getOption "output"
path' <- case output of
Just [path'] -> doesDirectoryExist path' >>= \exist -> if exist
then return $ path' </> takeBaseName path <.> "svg"
else return path'
_ -> return $ takeBaseName path <.> "svg"
-- make sure we have valid input and output
assertExist path
assertOverwrite path'
-- make file
makeSVG path path' >>= \res -> case res of
Just (SVG name svg) -> putStrLn $ name ++ " (" ++ svg ++ ") written."
Nothing -> putStrLn $ "Error: Could not make svg."
-- | create 3 svg images in A4 from kit
mainSVGPages :: String -> IO ()
mainSVGPages path = do
output <- getOption "output"
dir <- case output of
Just [dir] -> doesDirectoryExist dir >>= \exist -> if exist
then return dir
else die $ dir ++ " is not a folder."
_ -> return ""
-- make sure we have valid input and output
assertExist path
forM_ ["_page0", "_page1", "_page2"] $ \end ->
assertOverwrite $ dir </> (takeBaseName path ++ end) <.> "svg"
-- make file
makeSVGPages path dir >>= \res -> case res of
Just (SVGPages name svg0 svg1 svg2) ->
putStrLn $ name ++ " (" ++ svg0 ++ " / " ++ svg1 ++ " / " ++ svg2 ++ ") written."
Nothing ->
putStrLn $ "Error: Could not make svg pages."
-- | create a pdf from kits
mainBook :: [String] -> IO ()
mainBook paths = do
output <- getOption "output"
path' <- case output of
Just [path'] -> doesDirectoryExist path' >>= \exist -> if exist
then return $ path' </> "YamahaRm1x" <.> "pdf"
else return path'
_ -> return $ "YamahaRm1x" <.> "pdf"
-- make sure we have valid input and output
forM_ paths assertExist
assertOverwrite path'
-- make file
makeBook paths path' >>= \res -> case res of
Just (Book name pdf) -> putStrLn $ name ++ " (" ++ pdf ++ ") written."
Nothing -> putStrLn $ "Error: Could not make book."
--------------------------------------------------------------------------------
--
-- | make sure we have an actual input file
assertExist :: FilePath -> IO ()
assertExist path = do
exist <- doesFileExist path
when (not exist) $ die $ "Error: File " ++ path ++ " does not exist!"
-- | make sure we don't overwrite unintentionally
assertOverwrite :: FilePath -> IO ()
assertOverwrite path = do
force <- getFlag "force"
when (not force) $ do
exist <- doesFileExist path
when exist $ yesno ("Overwrite " ++ path ++ "? (y/n) ")
(return ())
(putStrLn "Cancelled." >> exitSuccess)
where
yesno str y n = do
putStr str
hFlush stdout
getLine >>= \a -> case a of
('y':_) -> y
('Y':_) -> y
('n':_) -> n
('N':_) -> n
_ -> yesno str y n
-- |
getOption :: String -> IO (Maybe [String])
getOption opt = do
args <- getArgs
case dropWhile (/= ("--" ++ opt)) args of
[] -> return Nothing
as -> return $ Just $ takeWhile (not . isCommand) $ tail as
where
isCommand ('-':'-':c:_) = True
isCommand _ = False
-- |
getFlag :: String -> IO Bool
getFlag opt =
getOption opt >>= \maybe -> case maybe of
Just [] -> return True
_ -> return False
| karamellpelle/rm1x-template | source/Main.hs | gpl-2.0 | 6,370 | 0 | 19 | 2,177 | 1,428 | 697 | 731 | 107 | 9 |
module H14 where
dupli :: [a] -> [a]
dupli [] = []
dupli (x:xs) = x:x:dupli xs
| hsinhuang/codebase | h99/H14.hs | gpl-2.0 | 81 | 0 | 7 | 19 | 57 | 31 | 26 | 4 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Ampersand.Output.ToJSON.Concepts
(Concepts,Segment)
where
import Ampersand.ADL1
import Ampersand.Output.ToJSON.JSONutils
import Data.List(nub)
import Data.Maybe
import qualified Data.Set as Set
data Concepts = Concepts [Concept] deriving (Generic, Show)
data Concept = Concept
{ cptJSONid :: String
, cptJSONlabel :: String
, cptJSONtype :: String
, cptJSONgeneralizations :: [String]
, cptJSONspecializations :: [String]
, cptJSONdirectGens :: [String]
, cptJSONdirectSpecs :: [String]
, cptJSONaffectedConjuncts :: [String]
, cptJSONinterfaces :: [String]
, cptJSONdefaultViewId :: Maybe String
, cptJSONconceptTable :: TableCols
, cptJSONlargestConcept :: String
} deriving (Generic, Show)
data TableCols = TableCols
{ tclJSONname :: String
, tclJSONcols :: [String]
} deriving (Generic, Show)
data View = View
{ vwJSONlabel :: String
, vwJSONisDefault :: Bool
, vwJSONhtmlTemplate :: Maybe String
, vwJSONsegments :: [Segment]
} deriving (Generic, Show)
data Segment = Segment
{ segJSONseqNr :: Integer
, segJSONlabel :: Maybe String
, segJSONsegType :: String
, segJSONexpADL :: Maybe String
, segJSONexpSQL :: Maybe String
, segJSONtext :: Maybe String
} deriving (Generic, Show)
instance ToJSON Concept where
toJSON = amp2Jason
instance ToJSON Concepts where
toJSON = amp2Jason
instance ToJSON View where
toJSON = amp2Jason
instance ToJSON Segment where
toJSON = amp2Jason
instance ToJSON TableCols where
toJSON = amp2Jason
instance JSON MultiFSpecs Concepts where
fromAmpersand multi _ = Concepts (map (fromAmpersand multi) (Set.elems $ concs fSpec))
where fSpec = userFSpec multi
instance JSON A_Concept Concept where
fromAmpersand multi cpt = Concept
{ cptJSONid = escapeIdentifier . name $ cpt
, cptJSONlabel = name cpt
, cptJSONtype = show . cptTType fSpec $ cpt
, cptJSONgeneralizations = map (escapeIdentifier . name) . largerConcepts (vgens fSpec) $ cpt
, cptJSONspecializations = map (escapeIdentifier . name) . smallerConcepts (vgens fSpec) $ cpt
, cptJSONdirectGens = map (escapeIdentifier . name) $ nub [ g | (s,g) <- fsisa fSpec, s == cpt]
, cptJSONdirectSpecs = map (escapeIdentifier . name) $ nub [ s | (s,g) <- fsisa fSpec, g == cpt]
, cptJSONaffectedConjuncts = map rc_id . fromMaybe [] . lookup cpt . allConjsPerConcept $ fSpec
, cptJSONinterfaces = map name . filter hasAsSourceCpt . interfaceS $ fSpec
, cptJSONdefaultViewId = fmap name . getDefaultViewForConcept fSpec $ cpt
, cptJSONconceptTable = fromAmpersand multi cpt
, cptJSONlargestConcept = escapeIdentifier . name . largestConcept fSpec $ cpt
}
where
fSpec = userFSpec multi
hasAsSourceCpt :: Interface -> Bool
hasAsSourceCpt ifc = (source . objExpression . ifcObj) ifc `elem` cpts
cpts = cpt : largerConcepts (vgens fSpec) cpt
instance JSON A_Concept TableCols where
fromAmpersand multi cpt = TableCols
{ tclJSONname = name cptTable
, tclJSONcols = case nub . map fst $ cols of
[t] -> if name t == name cptTable
then map (attName . snd) cols
else fatal $ "Table names should match: "++name t++" "++name cptTable++"."
_ -> fatal "All concepts in a typology should be in exactly one table."
}
where
fSpec = userFSpec multi
cols = concatMap (lookupCpt fSpec) $ cpt : largerConcepts (vgens fSpec) cpt
cptTable = case lookupCpt fSpec cpt of
[(table,_)] -> table
[] -> fatal ("Concept `"++name cpt++"` not found in a table.")
_ -> fatal ("Concept `"++name cpt++"` found in multiple tables.")
instance JSON ViewDef View where
fromAmpersand multi vd = View
{ vwJSONlabel = name vd
, vwJSONisDefault = vdIsDefault vd
, vwJSONhtmlTemplate = fmap templateName . vdhtml $ vd
, vwJSONsegments = map (fromAmpersand multi) . vdats $ vd
}
where templateName (ViewHtmlTemplateFile fn) = fn
instance JSON ViewSegment Segment where
fromAmpersand multi seg = Segment
{ segJSONseqNr = vsmSeqNr seg
, segJSONlabel = vsmlabel seg
, segJSONsegType = case vsmLoad seg of
ViewExp{} -> "Exp"
ViewText{} -> "Text"
, segJSONexpADL = case vsmLoad seg of
ViewExp expr -> Just . showA $ expr
_ -> Nothing
, segJSONexpSQL = case vsmLoad seg of
ViewExp expr -> Just $ sqlQuery fSpec expr
_ -> Nothing
, segJSONtext = case vsmLoad seg of
ViewText str -> Just str
_ -> Nothing
}
where
fSpec = userFSpec multi
| AmpersandTarski/ampersand | src/Ampersand/Output/ToJSON/Concepts.hs | gpl-3.0 | 5,063 | 0 | 16 | 1,448 | 1,402 | 754 | 648 | 112 | 0 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, UndecidableInstances #-}
module Abstat.Common.AbstractState where
import qualified Data.Map.Strict as Map
import Test.QuickCheck
import Control.Monad(liftM,liftM2)
import Abstat.Interface.Lattice
import Abstat.Interface.State hiding (State)
import qualified Abstat.Interface.State as Generic
import Abstat.Interface.AbstractDomain
import Abstat.Interface.GaloisConnection
import Abstat.Common.Generator
data State abstract
= State (Map.Map String abstract)
| Bottom
deriving (Show,Eq)
normalizeState :: (Lattice abstract, Ord abstract) => State abstract -> State abstract
normalizeState Bottom = Bottom
normalizeState (State s) =
stateWithoutTopBottom
where
mapWithoutTop =
Map.fromList $ filter (\(_, v) -> v /= top) $ Map.toList s
stateWithoutTopBottom =
Map.foldr (\v res -> if v == bottom then Bottom else res) (State mapWithoutTop) mapWithoutTop
instance (Ord abstract, Lattice abstract) => Generic.State State abstract where
empty = State Map.empty
store _ _ Bottom = Bottom
store key val (State s) = normalizeState $ State $ Map.insert key val s
load _ Bottom = bottom
load key (State s) = Map.findWithDefault top key s
defined Bottom = []
defined (State s) = Map.keys s
instance (Ord abstract, Lattice abstract) => Lattice (State abstract) where
join x Bottom = x
join Bottom y = y
join (State a) (State b) = normalizeState $
State $ Map.intersectionWith join a b
meet _ Bottom = Bottom
meet Bottom _ = Bottom
meet (State a) (State b) = normalizeState $
State $ Map.unionWith meet a b
top = State Map.empty
bottom = Bottom
instance (
Show abstract,
Ord abstract,
Lattice (State abstract),
AbstractDomain abstract
) => AbstractDomain (State abstract) where
widening Bottom (State b) = error $ "widening Bottom (State " ++ show b ++ ")"
widening _ Bottom = Bottom
widening (State a) (State b) = normalizeState $
State $ Map.mergeWithKey
(\_ x y -> Just $ widening x y)
(Map.map (`widening` top))
(Map.map (top `widening`))
a b
instance (
Ord abstract,
GaloisConnection concrete abstract,
Generic.State cState concrete,
AbstractDomain abstract
) => GaloisConnection (cState concrete) (State abstract) where
concretization = error "not implemented"
singleAbstraction concreteState = normalizeState $
State $ Map.fromList $ map
(\key -> (key, singleAbstraction $ load key concreteState))
(defined concreteState)
removeValues :: (Eq v, Ord k) => v -> Map.Map k v -> Map.Map k v
removeValues targetVal dict =
foldr (Map.update updateValue) dict (Map.keys dict)
where updateValue val =
if val == targetVal
then Nothing
else Just val
instance (
Ord abstract,
Lattice abstract,
Arbitrary abstract
) => Arbitrary (State abstract) where
arbitrary = liftM
(normalizeState . State . Map.fromList)
(listOf $ liftM2 (,) genVar arbitrary)
| fpoli/abstat | src/Abstat/Common/AbstractState.hs | gpl-3.0 | 3,240 | 0 | 12 | 839 | 1,035 | 544 | 491 | 82 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Calendar.Events.Watch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Watch for changes to Events resources.
--
-- /See:/ <https://developers.google.com/google-apps/calendar/firstapp Calendar API Reference> for @calendar.events.watch@.
module Network.Google.Resource.Calendar.Events.Watch
(
-- * REST Resource
EventsWatchResource
-- * Creating a Request
, eventsWatch
, EventsWatch
-- * Request Lenses
, ewSyncToken
, ewCalendarId
, ewTimeMin
, ewOrderBy
, ewSingleEvents
, ewPrivateExtendedProperty
, ewShowDeleted
, ewPayload
, ewQ
, ewSharedExtendedProperty
, ewMaxAttendees
, ewICalUId
, ewUpdatedMin
, ewPageToken
, ewTimeZone
, ewShowHiddenInvitations
, ewMaxResults
, ewAlwaysIncludeEmail
, ewTimeMax
) where
import Network.Google.AppsCalendar.Types
import Network.Google.Prelude
-- | A resource alias for @calendar.events.watch@ method which the
-- 'EventsWatch' request conforms to.
type EventsWatchResource =
"calendar" :>
"v3" :>
"calendars" :>
Capture "calendarId" Text :>
"events" :>
"watch" :>
QueryParam "syncToken" Text :>
QueryParam "timeMin" DateTime' :>
QueryParam "orderBy" EventsWatchOrderBy :>
QueryParam "singleEvents" Bool :>
QueryParams "privateExtendedProperty" Text :>
QueryParam "showDeleted" Bool :>
QueryParam "q" Text :>
QueryParams "sharedExtendedProperty" Text :>
QueryParam "maxAttendees" (Textual Int32) :>
QueryParam "iCalUID" Text :>
QueryParam "updatedMin" DateTime' :>
QueryParam "pageToken" Text :>
QueryParam "timeZone" Text :>
QueryParam "showHiddenInvitations"
Bool
:>
QueryParam "maxResults"
(Textual Int32)
:>
QueryParam "alwaysIncludeEmail"
Bool
:>
QueryParam "timeMax" DateTime'
:>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Channel :>
Post '[JSON] Channel
-- | Watch for changes to Events resources.
--
-- /See:/ 'eventsWatch' smart constructor.
data EventsWatch = EventsWatch'
{ _ewSyncToken :: !(Maybe Text)
, _ewCalendarId :: !Text
, _ewTimeMin :: !(Maybe DateTime')
, _ewOrderBy :: !(Maybe EventsWatchOrderBy)
, _ewSingleEvents :: !(Maybe Bool)
, _ewPrivateExtendedProperty :: !(Maybe [Text])
, _ewShowDeleted :: !(Maybe Bool)
, _ewPayload :: !Channel
, _ewQ :: !(Maybe Text)
, _ewSharedExtendedProperty :: !(Maybe [Text])
, _ewMaxAttendees :: !(Maybe (Textual Int32))
, _ewICalUId :: !(Maybe Text)
, _ewUpdatedMin :: !(Maybe DateTime')
, _ewPageToken :: !(Maybe Text)
, _ewTimeZone :: !(Maybe Text)
, _ewShowHiddenInvitations :: !(Maybe Bool)
, _ewMaxResults :: !(Textual Int32)
, _ewAlwaysIncludeEmail :: !(Maybe Bool)
, _ewTimeMax :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EventsWatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ewSyncToken'
--
-- * 'ewCalendarId'
--
-- * 'ewTimeMin'
--
-- * 'ewOrderBy'
--
-- * 'ewSingleEvents'
--
-- * 'ewPrivateExtendedProperty'
--
-- * 'ewShowDeleted'
--
-- * 'ewPayload'
--
-- * 'ewQ'
--
-- * 'ewSharedExtendedProperty'
--
-- * 'ewMaxAttendees'
--
-- * 'ewICalUId'
--
-- * 'ewUpdatedMin'
--
-- * 'ewPageToken'
--
-- * 'ewTimeZone'
--
-- * 'ewShowHiddenInvitations'
--
-- * 'ewMaxResults'
--
-- * 'ewAlwaysIncludeEmail'
--
-- * 'ewTimeMax'
eventsWatch
:: Text -- ^ 'ewCalendarId'
-> Channel -- ^ 'ewPayload'
-> EventsWatch
eventsWatch pEwCalendarId_ pEwPayload_ =
EventsWatch'
{ _ewSyncToken = Nothing
, _ewCalendarId = pEwCalendarId_
, _ewTimeMin = Nothing
, _ewOrderBy = Nothing
, _ewSingleEvents = Nothing
, _ewPrivateExtendedProperty = Nothing
, _ewShowDeleted = Nothing
, _ewPayload = pEwPayload_
, _ewQ = Nothing
, _ewSharedExtendedProperty = Nothing
, _ewMaxAttendees = Nothing
, _ewICalUId = Nothing
, _ewUpdatedMin = Nothing
, _ewPageToken = Nothing
, _ewTimeZone = Nothing
, _ewShowHiddenInvitations = Nothing
, _ewMaxResults = 250
, _ewAlwaysIncludeEmail = Nothing
, _ewTimeMax = Nothing
}
-- | Token obtained from the nextSyncToken field returned on the last page of
-- results from the previous list request. It makes the result of this list
-- request contain only entries that have changed since then. All events
-- deleted since the previous list request will always be in the result set
-- and it is not allowed to set showDeleted to False. There are several
-- query parameters that cannot be specified together with nextSyncToken to
-- ensure consistency of the client state. These are: - iCalUID - orderBy -
-- privateExtendedProperty - q - sharedExtendedProperty - timeMin - timeMax
-- - updatedMin If the syncToken expires, the server will respond with a
-- 410 GONE response code and the client should clear its storage and
-- perform a full synchronization without any syncToken. Learn more about
-- incremental synchronization. Optional. The default is to return all
-- entries.
ewSyncToken :: Lens' EventsWatch (Maybe Text)
ewSyncToken
= lens _ewSyncToken (\ s a -> s{_ewSyncToken = a})
-- | Calendar identifier. To retrieve calendar IDs call the calendarList.list
-- method. If you want to access the primary calendar of the currently
-- logged in user, use the \"primary\" keyword.
ewCalendarId :: Lens' EventsWatch Text
ewCalendarId
= lens _ewCalendarId (\ s a -> s{_ewCalendarId = a})
-- | Lower bound (inclusive) for an event\'s end time to filter by. Optional.
-- The default is not to filter by end time. Must be an RFC3339 timestamp
-- with mandatory time zone offset, e.g., 2011-06-03T10:00:00-07:00,
-- 2011-06-03T10:00:00Z. Milliseconds may be provided but will be ignored.
ewTimeMin :: Lens' EventsWatch (Maybe UTCTime)
ewTimeMin
= lens _ewTimeMin (\ s a -> s{_ewTimeMin = a}) .
mapping _DateTime
-- | The order of the events returned in the result. Optional. The default is
-- an unspecified, stable order.
ewOrderBy :: Lens' EventsWatch (Maybe EventsWatchOrderBy)
ewOrderBy
= lens _ewOrderBy (\ s a -> s{_ewOrderBy = a})
-- | Whether to expand recurring events into instances and only return single
-- one-off events and instances of recurring events, but not the underlying
-- recurring events themselves. Optional. The default is False.
ewSingleEvents :: Lens' EventsWatch (Maybe Bool)
ewSingleEvents
= lens _ewSingleEvents
(\ s a -> s{_ewSingleEvents = a})
-- | Extended properties constraint specified as propertyName=value. Matches
-- only private properties. This parameter might be repeated multiple times
-- to return events that match all given constraints.
ewPrivateExtendedProperty :: Lens' EventsWatch [Text]
ewPrivateExtendedProperty
= lens _ewPrivateExtendedProperty
(\ s a -> s{_ewPrivateExtendedProperty = a})
. _Default
. _Coerce
-- | Whether to include deleted events (with status equals \"cancelled\") in
-- the result. Cancelled instances of recurring events (but not the
-- underlying recurring event) will still be included if showDeleted and
-- singleEvents are both False. If showDeleted and singleEvents are both
-- True, only single instances of deleted events (but not the underlying
-- recurring events) are returned. Optional. The default is False.
ewShowDeleted :: Lens' EventsWatch (Maybe Bool)
ewShowDeleted
= lens _ewShowDeleted
(\ s a -> s{_ewShowDeleted = a})
-- | Multipart request metadata.
ewPayload :: Lens' EventsWatch Channel
ewPayload
= lens _ewPayload (\ s a -> s{_ewPayload = a})
-- | Free text search terms to find events that match these terms in any
-- field, except for extended properties. Optional.
ewQ :: Lens' EventsWatch (Maybe Text)
ewQ = lens _ewQ (\ s a -> s{_ewQ = a})
-- | Extended properties constraint specified as propertyName=value. Matches
-- only shared properties. This parameter might be repeated multiple times
-- to return events that match all given constraints.
ewSharedExtendedProperty :: Lens' EventsWatch [Text]
ewSharedExtendedProperty
= lens _ewSharedExtendedProperty
(\ s a -> s{_ewSharedExtendedProperty = a})
. _Default
. _Coerce
-- | The maximum number of attendees to include in the response. If there are
-- more than the specified number of attendees, only the participant is
-- returned. Optional.
ewMaxAttendees :: Lens' EventsWatch (Maybe Int32)
ewMaxAttendees
= lens _ewMaxAttendees
(\ s a -> s{_ewMaxAttendees = a})
. mapping _Coerce
-- | Specifies event ID in the iCalendar format to be included in the
-- response. Optional.
ewICalUId :: Lens' EventsWatch (Maybe Text)
ewICalUId
= lens _ewICalUId (\ s a -> s{_ewICalUId = a})
-- | Lower bound for an event\'s last modification time (as a RFC3339
-- timestamp) to filter by. When specified, entries deleted since this time
-- will always be included regardless of showDeleted. Optional. The default
-- is not to filter by last modification time.
ewUpdatedMin :: Lens' EventsWatch (Maybe UTCTime)
ewUpdatedMin
= lens _ewUpdatedMin (\ s a -> s{_ewUpdatedMin = a})
. mapping _DateTime
-- | Token specifying which result page to return. Optional.
ewPageToken :: Lens' EventsWatch (Maybe Text)
ewPageToken
= lens _ewPageToken (\ s a -> s{_ewPageToken = a})
-- | Time zone used in the response. Optional. The default is the time zone
-- of the calendar.
ewTimeZone :: Lens' EventsWatch (Maybe Text)
ewTimeZone
= lens _ewTimeZone (\ s a -> s{_ewTimeZone = a})
-- | Whether to include hidden invitations in the result. Optional. The
-- default is False.
ewShowHiddenInvitations :: Lens' EventsWatch (Maybe Bool)
ewShowHiddenInvitations
= lens _ewShowHiddenInvitations
(\ s a -> s{_ewShowHiddenInvitations = a})
-- | Maximum number of events returned on one result page. By default the
-- value is 250 events. The page size can never be larger than 2500 events.
-- Optional.
ewMaxResults :: Lens' EventsWatch Int32
ewMaxResults
= lens _ewMaxResults (\ s a -> s{_ewMaxResults = a})
. _Coerce
-- | Whether to always include a value in the email field for the organizer,
-- creator and attendees, even if no real email is available (i.e. a
-- generated, non-working value will be provided). The use of this option
-- is discouraged and should only be used by clients which cannot handle
-- the absence of an email address value in the mentioned places. Optional.
-- The default is False.
ewAlwaysIncludeEmail :: Lens' EventsWatch (Maybe Bool)
ewAlwaysIncludeEmail
= lens _ewAlwaysIncludeEmail
(\ s a -> s{_ewAlwaysIncludeEmail = a})
-- | Upper bound (exclusive) for an event\'s start time to filter by.
-- Optional. The default is not to filter by start time. Must be an RFC3339
-- timestamp with mandatory time zone offset, e.g.,
-- 2011-06-03T10:00:00-07:00, 2011-06-03T10:00:00Z. Milliseconds may be
-- provided but will be ignored.
ewTimeMax :: Lens' EventsWatch (Maybe UTCTime)
ewTimeMax
= lens _ewTimeMax (\ s a -> s{_ewTimeMax = a}) .
mapping _DateTime
instance GoogleRequest EventsWatch where
type Rs EventsWatch = Channel
type Scopes EventsWatch =
'["https://www.googleapis.com/auth/calendar",
"https://www.googleapis.com/auth/calendar.readonly"]
requestClient EventsWatch'{..}
= go _ewCalendarId _ewSyncToken _ewTimeMin _ewOrderBy
_ewSingleEvents
(_ewPrivateExtendedProperty ^. _Default)
_ewShowDeleted
_ewQ
(_ewSharedExtendedProperty ^. _Default)
_ewMaxAttendees
_ewICalUId
_ewUpdatedMin
_ewPageToken
_ewTimeZone
_ewShowHiddenInvitations
(Just _ewMaxResults)
_ewAlwaysIncludeEmail
_ewTimeMax
(Just AltJSON)
_ewPayload
appsCalendarService
where go
= buildClient (Proxy :: Proxy EventsWatchResource)
mempty
| rueshyna/gogol | gogol-apps-calendar/gen/Network/Google/Resource/Calendar/Events/Watch.hs | mpl-2.0 | 14,068 | 0 | 32 | 3,923 | 1,882 | 1,091 | 791 | 255 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.OSLogin.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.OSLogin.Types.Product where
import Network.Google.OSLogin.Types.Sum
import Network.Google.Prelude
-- | A map from SSH public key fingerprint to the associated key object.
--
-- /See:/ 'loginProFileSSHPublicKeys' smart constructor.
newtype LoginProFileSSHPublicKeys =
LoginProFileSSHPublicKeys'
{ _lpfspkAddtional :: HashMap Text SSHPublicKey
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LoginProFileSSHPublicKeys' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lpfspkAddtional'
loginProFileSSHPublicKeys
:: HashMap Text SSHPublicKey -- ^ 'lpfspkAddtional'
-> LoginProFileSSHPublicKeys
loginProFileSSHPublicKeys pLpfspkAddtional_ =
LoginProFileSSHPublicKeys' {_lpfspkAddtional = _Coerce # pLpfspkAddtional_}
lpfspkAddtional :: Lens' LoginProFileSSHPublicKeys (HashMap Text SSHPublicKey)
lpfspkAddtional
= lens _lpfspkAddtional
(\ s a -> s{_lpfspkAddtional = a})
. _Coerce
instance FromJSON LoginProFileSSHPublicKeys where
parseJSON
= withObject "LoginProFileSSHPublicKeys"
(\ o ->
LoginProFileSSHPublicKeys' <$> (parseJSONObject o))
instance ToJSON LoginProFileSSHPublicKeys where
toJSON = toJSON . _lpfspkAddtional
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'empty' smart constructor.
data Empty =
Empty'
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'Empty' with the minimum fields required to make a request.
--
empty
:: Empty
empty = Empty'
instance FromJSON Empty where
parseJSON = withObject "Empty" (\ o -> pure Empty')
instance ToJSON Empty where
toJSON = const emptyObject
-- | The user profile information used for logging in to a virtual machine on
-- Google Compute Engine.
--
-- /See:/ 'loginProFile' smart constructor.
data LoginProFile =
LoginProFile'
{ _lpfPosixAccounts :: !(Maybe [PosixAccount])
, _lpfSSHPublicKeys :: !(Maybe LoginProFileSSHPublicKeys)
, _lpfName :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'LoginProFile' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lpfPosixAccounts'
--
-- * 'lpfSSHPublicKeys'
--
-- * 'lpfName'
loginProFile
:: LoginProFile
loginProFile =
LoginProFile'
{ _lpfPosixAccounts = Nothing
, _lpfSSHPublicKeys = Nothing
, _lpfName = Nothing
}
-- | The list of POSIX accounts associated with the user.
lpfPosixAccounts :: Lens' LoginProFile [PosixAccount]
lpfPosixAccounts
= lens _lpfPosixAccounts
(\ s a -> s{_lpfPosixAccounts = a})
. _Default
. _Coerce
-- | A map from SSH public key fingerprint to the associated key object.
lpfSSHPublicKeys :: Lens' LoginProFile (Maybe LoginProFileSSHPublicKeys)
lpfSSHPublicKeys
= lens _lpfSSHPublicKeys
(\ s a -> s{_lpfSSHPublicKeys = a})
-- | Required. A unique user ID.
lpfName :: Lens' LoginProFile (Maybe Text)
lpfName = lens _lpfName (\ s a -> s{_lpfName = a})
instance FromJSON LoginProFile where
parseJSON
= withObject "LoginProFile"
(\ o ->
LoginProFile' <$>
(o .:? "posixAccounts" .!= mempty) <*>
(o .:? "sshPublicKeys")
<*> (o .:? "name"))
instance ToJSON LoginProFile where
toJSON LoginProFile'{..}
= object
(catMaybes
[("posixAccounts" .=) <$> _lpfPosixAccounts,
("sshPublicKeys" .=) <$> _lpfSSHPublicKeys,
("name" .=) <$> _lpfName])
-- | A response message for importing an SSH public key.
--
-- /See:/ 'importSSHPublicKeyResponse' smart constructor.
data ImportSSHPublicKeyResponse =
ImportSSHPublicKeyResponse'
{ _ispkrLoginProFile :: !(Maybe LoginProFile)
, _ispkrDetails :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ImportSSHPublicKeyResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ispkrLoginProFile'
--
-- * 'ispkrDetails'
importSSHPublicKeyResponse
:: ImportSSHPublicKeyResponse
importSSHPublicKeyResponse =
ImportSSHPublicKeyResponse'
{_ispkrLoginProFile = Nothing, _ispkrDetails = Nothing}
-- | The login profile information for the user.
ispkrLoginProFile :: Lens' ImportSSHPublicKeyResponse (Maybe LoginProFile)
ispkrLoginProFile
= lens _ispkrLoginProFile
(\ s a -> s{_ispkrLoginProFile = a})
-- | Detailed information about import results.
ispkrDetails :: Lens' ImportSSHPublicKeyResponse (Maybe Text)
ispkrDetails
= lens _ispkrDetails (\ s a -> s{_ispkrDetails = a})
instance FromJSON ImportSSHPublicKeyResponse where
parseJSON
= withObject "ImportSSHPublicKeyResponse"
(\ o ->
ImportSSHPublicKeyResponse' <$>
(o .:? "loginProfile") <*> (o .:? "details"))
instance ToJSON ImportSSHPublicKeyResponse where
toJSON ImportSSHPublicKeyResponse'{..}
= object
(catMaybes
[("loginProfile" .=) <$> _ispkrLoginProFile,
("details" .=) <$> _ispkrDetails])
-- | The SSH public key information associated with a Google account.
--
-- /See:/ 'sshPublicKey' smart constructor.
data SSHPublicKey =
SSHPublicKey'
{ _spkFingerprint :: !(Maybe Text)
, _spkKey :: !(Maybe Text)
, _spkName :: !(Maybe Text)
, _spkExpirationTimeUsec :: !(Maybe (Textual Int64))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SSHPublicKey' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'spkFingerprint'
--
-- * 'spkKey'
--
-- * 'spkName'
--
-- * 'spkExpirationTimeUsec'
sshPublicKey
:: SSHPublicKey
sshPublicKey =
SSHPublicKey'
{ _spkFingerprint = Nothing
, _spkKey = Nothing
, _spkName = Nothing
, _spkExpirationTimeUsec = Nothing
}
-- | Output only. The SHA-256 fingerprint of the SSH public key.
spkFingerprint :: Lens' SSHPublicKey (Maybe Text)
spkFingerprint
= lens _spkFingerprint
(\ s a -> s{_spkFingerprint = a})
-- | Public key text in SSH format, defined by RFC4253 section 6.6.
spkKey :: Lens' SSHPublicKey (Maybe Text)
spkKey = lens _spkKey (\ s a -> s{_spkKey = a})
-- | Output only. The canonical resource name.
spkName :: Lens' SSHPublicKey (Maybe Text)
spkName = lens _spkName (\ s a -> s{_spkName = a})
-- | An expiration time in microseconds since epoch.
spkExpirationTimeUsec :: Lens' SSHPublicKey (Maybe Int64)
spkExpirationTimeUsec
= lens _spkExpirationTimeUsec
(\ s a -> s{_spkExpirationTimeUsec = a})
. mapping _Coerce
instance FromJSON SSHPublicKey where
parseJSON
= withObject "SSHPublicKey"
(\ o ->
SSHPublicKey' <$>
(o .:? "fingerprint") <*> (o .:? "key") <*>
(o .:? "name")
<*> (o .:? "expirationTimeUsec"))
instance ToJSON SSHPublicKey where
toJSON SSHPublicKey'{..}
= object
(catMaybes
[("fingerprint" .=) <$> _spkFingerprint,
("key" .=) <$> _spkKey, ("name" .=) <$> _spkName,
("expirationTimeUsec" .=) <$>
_spkExpirationTimeUsec])
-- | The POSIX account information associated with a Google account.
--
-- /See:/ 'posixAccount' smart constructor.
data PosixAccount =
PosixAccount'
{ _paGecos :: !(Maybe Text)
, _paUid :: !(Maybe (Textual Int64))
, _paUsername :: !(Maybe Text)
, _paShell :: !(Maybe Text)
, _paPrimary :: !(Maybe Bool)
, _paAccountId :: !(Maybe Text)
, _paName :: !(Maybe Text)
, _paGid :: !(Maybe (Textual Int64))
, _paOperatingSystemType :: !(Maybe PosixAccountOperatingSystemType)
, _paSystemId :: !(Maybe Text)
, _paHomeDirectory :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PosixAccount' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paGecos'
--
-- * 'paUid'
--
-- * 'paUsername'
--
-- * 'paShell'
--
-- * 'paPrimary'
--
-- * 'paAccountId'
--
-- * 'paName'
--
-- * 'paGid'
--
-- * 'paOperatingSystemType'
--
-- * 'paSystemId'
--
-- * 'paHomeDirectory'
posixAccount
:: PosixAccount
posixAccount =
PosixAccount'
{ _paGecos = Nothing
, _paUid = Nothing
, _paUsername = Nothing
, _paShell = Nothing
, _paPrimary = Nothing
, _paAccountId = Nothing
, _paName = Nothing
, _paGid = Nothing
, _paOperatingSystemType = Nothing
, _paSystemId = Nothing
, _paHomeDirectory = Nothing
}
-- | The GECOS (user information) entry for this account.
paGecos :: Lens' PosixAccount (Maybe Text)
paGecos = lens _paGecos (\ s a -> s{_paGecos = a})
-- | The user ID.
paUid :: Lens' PosixAccount (Maybe Int64)
paUid
= lens _paUid (\ s a -> s{_paUid = a}) .
mapping _Coerce
-- | The username of the POSIX account.
paUsername :: Lens' PosixAccount (Maybe Text)
paUsername
= lens _paUsername (\ s a -> s{_paUsername = a})
-- | The path to the logic shell for this account.
paShell :: Lens' PosixAccount (Maybe Text)
paShell = lens _paShell (\ s a -> s{_paShell = a})
-- | Only one POSIX account can be marked as primary.
paPrimary :: Lens' PosixAccount (Maybe Bool)
paPrimary
= lens _paPrimary (\ s a -> s{_paPrimary = a})
-- | Output only. A POSIX account identifier.
paAccountId :: Lens' PosixAccount (Maybe Text)
paAccountId
= lens _paAccountId (\ s a -> s{_paAccountId = a})
-- | Output only. The canonical resource name.
paName :: Lens' PosixAccount (Maybe Text)
paName = lens _paName (\ s a -> s{_paName = a})
-- | The default group ID.
paGid :: Lens' PosixAccount (Maybe Int64)
paGid
= lens _paGid (\ s a -> s{_paGid = a}) .
mapping _Coerce
-- | The operating system type where this account applies.
paOperatingSystemType :: Lens' PosixAccount (Maybe PosixAccountOperatingSystemType)
paOperatingSystemType
= lens _paOperatingSystemType
(\ s a -> s{_paOperatingSystemType = a})
-- | System identifier for which account the username or uid applies to. By
-- default, the empty value is used.
paSystemId :: Lens' PosixAccount (Maybe Text)
paSystemId
= lens _paSystemId (\ s a -> s{_paSystemId = a})
-- | The path to the home directory for this account.
paHomeDirectory :: Lens' PosixAccount (Maybe Text)
paHomeDirectory
= lens _paHomeDirectory
(\ s a -> s{_paHomeDirectory = a})
instance FromJSON PosixAccount where
parseJSON
= withObject "PosixAccount"
(\ o ->
PosixAccount' <$>
(o .:? "gecos") <*> (o .:? "uid") <*>
(o .:? "username")
<*> (o .:? "shell")
<*> (o .:? "primary")
<*> (o .:? "accountId")
<*> (o .:? "name")
<*> (o .:? "gid")
<*> (o .:? "operatingSystemType")
<*> (o .:? "systemId")
<*> (o .:? "homeDirectory"))
instance ToJSON PosixAccount where
toJSON PosixAccount'{..}
= object
(catMaybes
[("gecos" .=) <$> _paGecos, ("uid" .=) <$> _paUid,
("username" .=) <$> _paUsername,
("shell" .=) <$> _paShell,
("primary" .=) <$> _paPrimary,
("accountId" .=) <$> _paAccountId,
("name" .=) <$> _paName, ("gid" .=) <$> _paGid,
("operatingSystemType" .=) <$>
_paOperatingSystemType,
("systemId" .=) <$> _paSystemId,
("homeDirectory" .=) <$> _paHomeDirectory])
| brendanhay/gogol | gogol-oslogin/gen/Network/Google/OSLogin/Types/Product.hs | mpl-2.0 | 12,945 | 0 | 21 | 3,196 | 2,560 | 1,474 | 1,086 | 286 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : CGI
-- Copyright : Copyright (c) 2005,2006 Minero Aoki
-- License : LGPL (see COPYING)
--
-- Maintainer: masahiro.sakai@gmail.com
-- Stability : experimental
-- Portability : non-portable
{-# LANGUAGE CPP #-}
--
-- $Id: CGI.hs,v 1.2 2006/05/14 17:29:22 aamine Exp $
--
-- Copyright (c) 2005,2006 Minero Aoki
--
-- This program is free software.
-- You can distribute/modify this program under the terms of
-- the GNU LGPL, Lesser General Public License version 2.1.
-- For details of the GNU LGPL, see the file "COPYING".
--
module CGI
(runCGI,
HTTPRequest, varExist, lookupVar, lookupVars,
HTTPResponse(..), textContentType) where
import URLEncoding
import Data.Maybe
import Control.Monad
import System.IO
import System.IO.Error
import System.Environment
runCGI :: (HTTPRequest -> IO HTTPResponse) -> IO ()
runCGI f = do hSetBinaryMode stdin True
-- hSetBinaryMode stdout True
hSetEncoding stdout utf8
hSetNewlineMode stdout noNewlineTranslation
input <- getContents
env <- cgiEnvs
res@(HTTPResponse ctype body) <- f (parseCGIRequest env input)
putStr $ "Content-Type: " ++ ctype ++ "\r\n"
putStr "\r\n"
putStr body
cgiEnvs = return . catMaybes =<< mapM mGetEnvPair names
where
mGetEnvPair :: String -> IO (Maybe (String, String))
mGetEnvPair name =
catchIOError (return . Just . (,) name =<< getEnv name)
(const $ return Nothing)
names = [ "SERVER_NAME", "SERVER_PORT",
"SERVER_SOFTWARE", "SERVER_PROTOCOL",
"GATEWAY_INTERFACE", "SCRIPT_NAME", "REQUEST_METHOD",
"PATH_INFO", "PATH_TRANSLATED",
"CONTENT_TYPE", "CONTENT_LENGTH", "QUERY_STRING",
"HTTP_COOKIE", "HTTP_ACCEPT",
"REMOTE_HOST", "REMOTE_ADDR", "REMOTE_USER",
"AUTH_TYPE", "HTTPS" ]
data HTTPRequest = HTTPRequest { params :: [(String, String)] }
parseCGIRequest env input =
case method of
"GET" -> parseGET env
"POST" -> parsePOST env input
_ -> parseUnknown
where
method = getenv "REQUEST_METHOD" env
getenv key env = fromMaybe "" $ lookup key env
parseGET env = HTTPRequest (parseQueryString $ getenv "QUERY_STRING" env)
parsePOST env input = HTTPRequest (parseQueryString $ input)
-- FIXME
parseUnknown = HTTPRequest []
parseQueryString = map splitKV . splitQueryString
splitQueryString = splitBy (\c -> c == ';' || c == '&')
splitKV kv = case break (== '=') kv of
(k, ('=':v)) -> (decodeWord k, decodeWord v)
(k, "") -> (decodeWord k, "")
decodeWord = urldecode . decodePlus
decodePlus = map (\c -> if c == '+' then ' ' else c)
splitBy :: (Char -> Bool) -> String -> [String]
splitBy _ [] = []
splitBy f str = word : splitBy f cont
where
(word, cont') = break f str
cont = case cont' of
[] -> ""
(c:cs) -> cs
varExist :: String -> HTTPRequest -> Bool
varExist key = isJust . lookupVar key
lookupVar :: String -> HTTPRequest -> Maybe String
lookupVar key = lookup key . params
lookupVars :: String -> HTTPRequest -> [String]
lookupVars key = lookupAll key . params
lookupAll :: Eq a => a -> [(a,b)] -> [b]
lookupAll key = map snd . filter ((== key) . fst)
data HTTPResponse = HTTPResponse {
resContentType :: String,
resBody :: String
}
textContentType typ encoding = concat [typ, "; charset=\"", encoding, "\""]
| msakai/ptq | src/CGI.hs | lgpl-2.1 | 3,656 | 0 | 12 | 962 | 964 | 522 | 442 | 71 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Web.SpiraJira.Config (
JiraConfig(..),
parseConfig
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Configurator as Config
import Data.Configurator.Types (Config, Name)
data JiraConfig = JiraConfig {
username :: BS.ByteString,
password :: BS.ByteString,
uri :: String
} deriving (Eq, Show)
parseConfig :: FilePath -> IO JiraConfig
parseConfig path = do
c <- Config.load [
Config.Optional "$(HOME)/.spirajira",
Config.Optional "../spirajira.cfg",
Config.Optional "spirajira.cfg",
Config.Optional path]
u <- Config.lookupDefault "Missing User" c "user"
p <- Config.lookupDefault "Missing Password" c "pass"
l <- Config.lookupDefault "Missing URI" c "uri"
return $ JiraConfig (BS.pack u) (BS.pack p) l
| tobytripp/SpiraJira | src/Web/SpiraJira/Config.hs | lgpl-3.0 | 874 | 0 | 11 | 161 | 244 | 134 | 110 | 24 | 1 |
import Control.Arrow
import Control.Applicative
import Control.Monad.IO.Class
import Data.Either
import Data.PrettyPrint
import LinkGrammar.AST
import LinkGrammar.Parsec
import LinkGrammar.Process
import System.Environment
import Text.Printf
import Options.Applicative
data CliOptions = CliOptions {
_outfile :: String
, _infile :: String
}
cliOptions :: Parser CliOptions
cliOptions = CliOptions <$>
strOption (long "output"
<> metavar "OUTFILE"
<> short 'o'
<> help "Output file names")
<*> argument str (metavar "GRAMMAR"
<> help "Input file")
-- processFile o f = parseLink <$> (CPP.runCpphs o f =<< readFile f)
main :: IO ()
main = do
cliopts <- execParser $ info (helper <*> cliOptions) fullDesc
ast <- parseLink <$> readFile (_infile cliopts)
case ast of
Left x -> putStrLn x
Right rules -> do
makeRuleset (_outfile cliopts) rules
--mapM (putStrLn . pretty) $ take 20 rules
--mapM (putStrLn . drawTree . fmap show . _links) $ take 20 rules
printf "Ok, imported %d rules\n" $ length rules
| k32/zenmaster | Main.hs | unlicense | 1,144 | 4 | 14 | 296 | 270 | 136 | 134 | 31 | 2 |
import AdventOfCode (readInputFile)
import Data.Char (isUpper)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
-- A => BC increases size by 1
-- A => BRnCAr increases size by 3
-- A => BRnCYDAr increases size by 5
-- A => BRnCYDYEAr increases size by 7
-- So to count number of steps to go from e to the final molecule:
-- Count the elements in the final molecule, subtract 1 (we start from e).
-- Subtract 2 for every RnAr pair, subtract 2 for every Y
iterations :: [String] -> Int -> Int
iterations m maxE = length m - count "Rn" * 2 - count "Y" * 2 - (maxE - 1)
where count p = length (filter (== p) m)
nexts :: Map.Map String [String] -> [String] -> [String]
nexts m s = concatMap (spliceNexts m . flip splitAt s) [0..(length s - 1)]
spliceNexts :: Map.Map String [String] -> ([String], [String]) -> [String]
spliceNexts _ (s, []) = error ("spliceNexts has no after for " ++ unwords s)
spliceNexts m (before, this:after) = map splice choices
where choices = Map.findWithDefault [] this m
splice c = concat before ++ c ++ concat after
-- splitBy isUpper "ABCdeFGHiJ" == ["A", "B", "Cde", "F", "G", "Hi", "J"]
-- This could have been achieved by defining:
-- splitBy f = groupBy (\_ b -> (not . f) b)
-- However, I decided this was dangerous.
-- groupBy assumes its equality predicate is an equality.
-- However the provided function (\_ b -> f b)
-- would not be reflexive nor symmertric nor transitive.
-- So it happens to work now, but could be broken in the future,
-- as it breaks the contract of groupBy.
splitBy :: (a -> Bool) -> [a] -> [[a]]
splitBy _ [] = []
splitBy f (x:xs) = (x:ys) : splitBy f zs
where (ys, zs) = break f xs
parseRule :: String -> (String, [String])
parseRule s = case words s of
[a, "=>", b] -> (a, [b])
_ -> error ("bad rule: " ++ s)
main :: IO ()
main = do
s <- readInputFile
let l = lines s
rulesList = map parseRule (init (init l))
rules = Map.fromListWith (++) rulesList
molecule = splitBy isUpper (last l)
maxE = maximum (map (length . splitBy isUpper) (rules Map.! "e"))
print (Set.size (Set.fromList (nexts rules molecule)))
print (iterations molecule maxE)
| petertseng/adventofcode-hs | bin/19_molecule_replacement.hs | apache-2.0 | 2,184 | 1 | 15 | 460 | 696 | 367 | 329 | 32 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QPalette.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QPalette (
ColorGroup, eInactive, eNColorGroups, eAll
, ColorRole, eWindowText, eButton, eMidlight, eDark, eMid, eBrightText, eButtonText, eBase, eShadow, eHighlight, eHighlightedText, eLink, eLinkVisited, eAlternateBase, eNColorRoles, eForeground, eBackground
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CColorGroup a = CColorGroup a
type ColorGroup = QEnum(CColorGroup Int)
ieColorGroup :: Int -> ColorGroup
ieColorGroup x = QEnum (CColorGroup x)
instance QEnumC (CColorGroup Int) where
qEnum_toInt (QEnum (CColorGroup x)) = x
qEnum_fromInt x = QEnum (CColorGroup x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ColorGroup -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeActive ColorGroup where
eActive
= ieColorGroup $ 0
instance QeDisabled ColorGroup where
eDisabled
= ieColorGroup $ 1
eInactive :: ColorGroup
eInactive
= ieColorGroup $ 2
eNColorGroups :: ColorGroup
eNColorGroups
= ieColorGroup $ 3
instance QeCurrent ColorGroup where
eCurrent
= ieColorGroup $ 4
eAll :: ColorGroup
eAll
= ieColorGroup $ 5
instance QeNormal ColorGroup where
eNormal
= ieColorGroup $ 0
data CColorRole a = CColorRole a
type ColorRole = QEnum(CColorRole Int)
ieColorRole :: Int -> ColorRole
ieColorRole x = QEnum (CColorRole x)
instance QEnumC (CColorRole Int) where
qEnum_toInt (QEnum (CColorRole x)) = x
qEnum_fromInt x = QEnum (CColorRole x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ColorRole -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eWindowText :: ColorRole
eWindowText
= ieColorRole $ 0
eButton :: ColorRole
eButton
= ieColorRole $ 1
instance QeLight ColorRole where
eLight
= ieColorRole $ 2
eMidlight :: ColorRole
eMidlight
= ieColorRole $ 3
eDark :: ColorRole
eDark
= ieColorRole $ 4
eMid :: ColorRole
eMid
= ieColorRole $ 5
instance QeText ColorRole where
eText
= ieColorRole $ 6
eBrightText :: ColorRole
eBrightText
= ieColorRole $ 7
eButtonText :: ColorRole
eButtonText
= ieColorRole $ 8
eBase :: ColorRole
eBase
= ieColorRole $ 9
instance QeWindow ColorRole where
eWindow
= ieColorRole $ 10
eShadow :: ColorRole
eShadow
= ieColorRole $ 11
eHighlight :: ColorRole
eHighlight
= ieColorRole $ 12
eHighlightedText :: ColorRole
eHighlightedText
= ieColorRole $ 13
eLink :: ColorRole
eLink
= ieColorRole $ 14
eLinkVisited :: ColorRole
eLinkVisited
= ieColorRole $ 15
eAlternateBase :: ColorRole
eAlternateBase
= ieColorRole $ 16
instance QeNoRole ColorRole where
eNoRole
= ieColorRole $ 17
eNColorRoles :: ColorRole
eNColorRoles
= ieColorRole $ 17
eForeground :: ColorRole
eForeground
= ieColorRole $ 0
eBackground :: ColorRole
eBackground
= ieColorRole $ 10
| keera-studios/hsQt | Qtc/Enums/Gui/QPalette.hs | bsd-2-clause | 5,569 | 0 | 18 | 1,213 | 1,522 | 786 | 736 | 168 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QIntValidator_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:21
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QIntValidator_h where
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QIntValidator ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QIntValidator_unSetUserMethod" qtc_QIntValidator_unSetUserMethod :: Ptr (TQIntValidator a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QIntValidatorSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QIntValidator ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QIntValidatorSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QIntValidator ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QIntValidatorSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIntValidator_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QIntValidator ()) (QIntValidator x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QIntValidator setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QIntValidator_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIntValidator_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIntValidator_setUserMethod" qtc_QIntValidator_setUserMethod :: Ptr (TQIntValidator a) -> CInt -> Ptr (Ptr (TQIntValidator x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QIntValidator :: (Ptr (TQIntValidator x0) -> IO ()) -> IO (FunPtr (Ptr (TQIntValidator x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QIntValidator_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QIntValidatorSc a) (QIntValidator x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QIntValidator setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QIntValidator_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIntValidator_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QIntValidator ()) (QIntValidator x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QIntValidator setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QIntValidator_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIntValidator_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIntValidator_setUserMethodVariant" qtc_QIntValidator_setUserMethodVariant :: Ptr (TQIntValidator a) -> CInt -> Ptr (Ptr (TQIntValidator x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QIntValidator :: (Ptr (TQIntValidator x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQIntValidator x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QIntValidator_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QIntValidatorSc a) (QIntValidator x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QIntValidator setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QIntValidator_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIntValidator_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QIntValidator ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QIntValidator_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QIntValidator_unSetHandler" qtc_QIntValidator_unSetHandler :: Ptr (TQIntValidator a) -> CWString -> IO (CBool)
instance QunSetHandler (QIntValidatorSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QIntValidator_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QIntValidator ()) (QIntValidator x0 -> Int -> Int -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> CInt -> CInt -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qIntValidatorFromPtr x0
let x1int = fromCInt x1
let x2int = fromCInt x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1int x2int
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIntValidator_setHandler1" qtc_QIntValidator_setHandler1 :: Ptr (TQIntValidator a) -> CWString -> Ptr (Ptr (TQIntValidator x0) -> CInt -> CInt -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIntValidator1 :: (Ptr (TQIntValidator x0) -> CInt -> CInt -> IO ()) -> IO (FunPtr (Ptr (TQIntValidator x0) -> CInt -> CInt -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QIntValidator1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIntValidatorSc a) (QIntValidator x0 -> Int -> Int -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> CInt -> CInt -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qIntValidatorFromPtr x0
let x1int = fromCInt x1
let x2int = fromCInt x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1int x2int
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetRange_h (QIntValidator ()) ((Int, Int)) where
setRange_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIntValidator_setRange cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QIntValidator_setRange" qtc_QIntValidator_setRange :: Ptr (TQIntValidator a) -> CInt -> CInt -> IO ()
instance QsetRange_h (QIntValidatorSc a) ((Int, Int)) where
setRange_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIntValidator_setRange cobj_x0 (toCInt x1) (toCInt x2)
instance QsetHandler (QIntValidator ()) (QIntValidator x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIntValidatorFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIntValidator_setHandler2" qtc_QIntValidator_setHandler2 :: Ptr (TQIntValidator a) -> CWString -> Ptr (Ptr (TQIntValidator x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIntValidator2 :: (Ptr (TQIntValidator x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQIntValidator x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIntValidator2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIntValidatorSc a) (QIntValidator x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIntValidatorFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QIntValidator ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIntValidator_event cobj_x0 cobj_x1
foreign import ccall "qtc_QIntValidator_event" qtc_QIntValidator_event :: Ptr (TQIntValidator a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QIntValidatorSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIntValidator_event cobj_x0 cobj_x1
instance QsetHandler (QIntValidator ()) (QIntValidator x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIntValidatorFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIntValidator_setHandler3" qtc_QIntValidator_setHandler3 :: Ptr (TQIntValidator a) -> CWString -> Ptr (Ptr (TQIntValidator x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIntValidator3 :: (Ptr (TQIntValidator x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQIntValidator x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIntValidator3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIntValidatorSc a) (QIntValidator x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIntValidator3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIntValidator3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIntValidator_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIntValidator x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIntValidatorFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QIntValidator ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIntValidator_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QIntValidator_eventFilter" qtc_QIntValidator_eventFilter :: Ptr (TQIntValidator a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QIntValidatorSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIntValidator_eventFilter cobj_x0 cobj_x1 cobj_x2
| uduki/hsQt | Qtc/Gui/QIntValidator_h.hs | bsd-2-clause | 20,654 | 0 | 18 | 4,648 | 6,737 | 3,209 | 3,528 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
{-|
Module: HaskHOL.Core.Kernel
Copyright: (c) Evan Austin 2015
LICENSE: BSD3
Maintainer: e.c.austin@gmail.com
Stability: unstable
Portability: unknown
This module exports the logical kernel of HaskHOL. It consists of:
* A safe view of HOL theorems for HaskHOL.
* The primitive inference rules of the system.
* The primitive, stateless theory extension functions.
For clarity, all of these items have been seperated based on their influential
system: HOL Light, Stateless HOL, and HOL2P.
Note that, per the stateless approach, any stateful, but still primitive,
functions related to theorems or theory extension have been relocated to the
"HaskHOL.Core.State" module.
-}
module HaskHOL.Core.Kernel
( -- * A View of HOL Types, Terms, and Theorems
-- ** A Quick Note on Pattern Synonyms
-- $ViewPatterns
-- ** Destructors and Accessors for Theorems
HOLThm
, pattern Thm
, destThm
, hyp
, concl
-- * HOL Light Primitive Inference Rules
, primREFL
, primTRANS
, primMK_COMB
, primABS
, primBETA
, primASSUME
, primEQ_MP
, primDEDUCT_ANTISYM
, primINST_TYPE
, primINST_TYPE_FULL
, primINST
-- * HOL2P Primitive Inference Rules
, primTYABS
, primTYAPP2
, primTYAPP
, primTYBETA
-- * Stateless HOL Primitive Theory Extensions
, axiomThm
, newDefinedConst
, newDefinedTypeOp
-- * Primitive Re-Exports
, HOLPrimError(..)
, module HaskHOL.Core.Kernel.Types
, module HaskHOL.Core.Kernel.Terms
) where
import HaskHOL.Core.Lib
import HaskHOL.Core.Kernel.Prims
import HaskHOL.Core.Kernel.Types
import HaskHOL.Core.Kernel.Terms
import Data.Hashable
{-
Used to quickly make an equality between two terms we know to be of the same
type. Not exposed to the user.
-}
safeMkEq :: HOLTerm -> HOLTerm -> HOLTerm
safeMkEq l r =
let eq = tmEq $ typeOf l in
CombIn (CombIn eq l) r
{-
Unions two lists of terms, ordering the result modulo alpha-equivalence. Not
exposed to the user.
-}
termUnion :: [HOLTerm] -> [HOLTerm] -> [HOLTerm]
termUnion [] l2 = l2
termUnion l1 [] = l1
termUnion l1@(h1:t1) l2@(h2:t2) =
case alphaOrder h1 h2 of
EQ -> h1 : termUnion t1 t2
LT -> h1 : termUnion t1 l2
_ -> h2 : termUnion l1 t2
{-
Removes a term from a term list, ordering the result modulo alpha-equivalence.
Not exposed to the user.
-}
termRemove :: HOLTerm -> [HOLTerm] -> [HOLTerm]
termRemove _ [] = []
termRemove t l@(s:ss) =
case alphaOrder t s of
GT -> s : termRemove t ss
EQ -> ss
_ -> l
{-
Maps a function over a list of terms, termUnion-ing the result at each step.
Roughly equivalent to a composition of nub and map that orders the result
modulo alpha-equivalence. Not exposed to the user
-}
termImage :: (HOLTerm -> HOLTerm) -> [HOLTerm] -> [HOLTerm]
termImage _ [] = []
termImage f (h:t) = termUnion [f h] $ termImage f t
termImageM :: Monad m => (HOLTerm -> m HOLTerm) -> [HOLTerm] -> m [HOLTerm]
termImageM _ [] = return []
termImageM f (h:t) =
do h' <- f h
termUnion [h'] `fmap` termImageM f t
{-
HOL Light Theorem Primitives
-}
{-|
Destructs a theorem, returning its list of assumption terms and conclusion
term.
-}
destThm :: HOLThm -> ([HOLTerm], HOLTerm)
destThm (ThmIn as c) = (as, c)
-- | Accessor for the hypotheses, or assumption terms, of a theorem.
hyp :: HOLThm -> [HOLTerm]
hyp (ThmIn as _) = as
-- | Accessor for the conclusion term of a theorem.
concl :: HOLThm -> HOLTerm
concl (ThmIn _ c) = c
{-
HOL Light Primitive Inference Rules
-}
-- Basic Equality Rules
{-|@
t
-----------
|- t = t
@
Never fails.
-}
primREFL :: HOLTerm -> HOLThm
primREFL tm = ThmIn [] $ safeMkEq tm tm
{-|@
A1 |- t1 = t2 A2 |- t2 = t3
-------------------------------
A1 U A2 |- t1 = t3
@
Fails with 'Left' in the following cases:
* The middle terms are not alpha-equivalent.
* One, or both, of the theorem conclusions is not an equation.
-}
primTRANS :: MonadThrow m => HOLThm -> HOLThm -> m HOLThm
primTRANS th@(ThmIn as1 (CombIn eql@(CombIn (TmEq _) _) m1))
(ThmIn as2 (m2 := r))
| m1 `aConv` m2 =
let as' = termUnion as1 as2 in
return . ThmIn as' $ CombIn eql r
| otherwise = throwM $! HOLThmError th "primTRANS: middle terms don't agree"
primTRANS th _ = throwM $! HOLThmError th "primTRANS: not both equations"
-- Basic Congruence Rules
{-|@
A1 |- f = g A2 |- x = y
---------------------------
A1 U A2 |- f x = g y
@
Fails with 'Left' in the following cases:
* One, or both, of the theorem conclusions is not an equation.
* The first theorem conclusion is not an equation of function terms.
* The types of the function terms and argument terms do not agree.
-}
primMK_COMB :: MonadThrow m => HOLThm -> HOLThm -> m HOLThm
primMK_COMB th@(ThmIn as1 (l1 := r1)) (ThmIn as2 (l2 := r2)) =
case typeOf l1 of
TyAppIn TyOpFun (ty:_:_)
| typeOf l2 `tyAConv` ty ->
let as' = termUnion as1 as2 in
return . ThmIn as' $ safeMkEq (CombIn l1 l2) (CombIn r1 r2)
| otherwise -> throwM $! HOLThmError th
"primMK_COMB: types do not agree"
_ -> throwM $! HOLThmError th "primMK_COMB: not a function type"
primMK_COMB th _ = throwM $! HOLThmError th "primMK_COMB: not both equations"
{-|@
A |- t1 = t2
-------------------------------
A |- (\\ x . t1) = (\\ x . t2)
@
Fails with 'Left' in the following cases:
* The term to bind is free in the assumption list of the theorem.
* The conclusion of the theorem is not an equation.
-}
primABS :: MonadThrow m => HOLTerm -> HOLThm -> m HOLThm
primABS v@VarIn{} th@(ThmIn as (l := r))
| any (varFreeIn v) as =
throwM $! HOLThmError th "primABS: variable is free in assumptions"
| otherwise =
return . ThmIn as $ safeMkEq (AbsIn v l) (AbsIn v r)
primABS _ th = throwM $! HOLThmError th "primABS: not an equation"
-- Beta Reduction
{-|@
(\\ x . t) x
----------------------------
|- (\\ x . t) x = t
@
Fails with 'Left' in the following cases:
* The term is not a valid application.
* The reduction is not a trivial one, i.e. the argument term is not equivalent
to the bound variable.
-}
primBETA :: MonadThrow m => HOLTerm -> m HOLThm
primBETA t@(CombIn (AbsIn bv bod) arg)
| arg == bv =
return . ThmIn [] $ safeMkEq t bod
| otherwise = throwM $! HOLTermError t
"primBETA_PRIM: not a trivial beta reduction"
primBETA t = throwM $! HOLTermError t "primBETA_PRIM: not a valid application"
-- Deduction Rules
{-|@
t
-----------
t |- t
@
Fails with 'Left' if the term is not a proposition.
-}
primASSUME :: MonadThrow m => HOLTerm -> m HOLThm
primASSUME tm
| typeOf tm == tyBool = return $! ThmIn [tm] tm
| otherwise = throwM $! HOLTermError tm "primASSUME"
{-|@
A1 |- t1 = t2 A2 |- t1
----------------------------
A1 U A2 |- t2
@
Fails with 'Left' in the following cases:
* The conclusion of the first theorem is not an equation.
* The conclusion term of the second theorem and the left hand side of the
equation are not alpha-equivalent.
-}
primEQ_MP :: MonadThrow m => HOLThm -> HOLThm -> m HOLThm
primEQ_MP th@(ThmIn as1 (l := r)) (ThmIn as2 c)
| l `aConv` c =
let as' = termUnion as1 as2 in
return $! ThmIn as' r
| otherwise = throwM $! HOLThmError th "primEQ_MP: terms do not agree"
primEQ_MP th _ = throwM $! HOLThmError th "primEQ_MP: term is not an equation"
{-|@
A |- p B |- q
--------------------------------
(A - q) U (B - p) |- p \<=\> q
@
Never fails.
-}
primDEDUCT_ANTISYM :: HOLThm -> HOLThm -> HOLThm
primDEDUCT_ANTISYM (ThmIn as p) (ThmIn bs q) =
let as' = termRemove q as `termUnion` termRemove p bs in
ThmIn as' $ safeMkEq p q
-- Instantiation Rules
{-|@
[(ty1, tv1), ..., (tyn, tvn)] A |- t
----------------------------------------
A[ty1, ..., tyn/tv1, ..., tvn]
|- t[ty1, ..., tyn/tv1, ..., tvn]
@
Never fails.
-}
primINST_TYPE :: Inst a b => [(a, b)] -> HOLThm -> HOLThm
primINST_TYPE tyenv (ThmIn as t) =
let instFun = inst tyenv in
ThmIn (termImage instFun as) $ instFun t
-- | A version of 'primINST_TYPE' that instantiates a theorem via 'instFull'.
primINST_TYPE_FULL :: SubstTrip -> HOLThm -> HOLThm
primINST_TYPE_FULL tyenv (ThmIn as t) =
let instFun = instFull tyenv in
ThmIn (termImage instFun as) $ instFun t
{-|@
[(t1, x1), ..., (tn, xn)] A |- t
------------------------------------
A[t1, ..., tn/x1, ..., xn]
|- t[t1, ..., tn/x1, ..., xn]
@
Fails with 'Nothing' in the case where a bad substitution list is provided.
-}
primINST :: MonadThrow m => HOLTermEnv -> HOLThm -> m HOLThm
primINST env (ThmIn as t) =
let instFun = varSubst env in
do as' <- termImageM instFun as
ThmIn as' `fmap` instFun t
{-
HOL2P Primitive Inference Rules
-}
-- Type Congruence rules
{-|@
A |- t1 = t2
-------------------------------
A |- (\\\\ x . t1) = (\\\\ x . t2)
@
Fails with 'Left' in the following cases:
* The type to bind is not a small type variable.
* The conclusion of the theorem is not an equation.
* The type to bind is free in the assumption list of the theorem.
* The type variable to bind is free in the conclusion of the theorem.
-}
primTYABS :: MonadThrow m => HOLType -> HOLThm -> m HOLThm
primTYABS tv@(TyVarIn True _) th@(ThmIn as (l := r))
| tv `notElem` typeVarsInTerms as =
let fvs = frees l `union` frees r in
if any (\ x -> tv `elem` tyVars (typeOf x)) fvs
then throwM $! HOLThmError th
"primTYABS: type variable is free in conclusion"
else return . ThmIn as $ safeMkEq (TyAbsIn tv l) (TyAbsIn tv r)
| otherwise = throwM $! HOLThmError th
"primTYABS: type variable is free in assumptions"
primTYABS (TyVarIn True _) th = throwM $! HOLThmError th
"primTYABS: conclusion not an equation"
primTYABS tv _ = throwM $! HOLTypeError tv
"primTYABS: first argument not a small type variable"
{-|@
A |- t1 = t2
-------------------------------
A |- t1 [: ty1] = t2 [: ty2]
@
Fails with 'Left' in the following cases:
* The conclusion of the theorem is not an equation of terms of universal type.
* The type arguments are not alpha-equivalent.
* One, or both, of the type arguments is not small.
-}
primTYAPP2 :: MonadThrow m => HOLType -> HOLType -> HOLThm -> m HOLThm
primTYAPP2 ty1 ty2 th@(ThmIn as (l := r))
| ty1 `tyAConv` ty2 =
case typeOf l of
UTypeIn{}
| not $ isSmall ty1 ->
throwM $! HOLTypeError ty1 "primTYAPP2: ty1 not small"
| not $ isSmall ty2 ->
throwM $! HOLTypeError ty2 "primTYAPP2: ty2 not small"
| otherwise ->
return . ThmIn as $ safeMkEq (TyCombIn l ty1) (TyCombIn r ty2)
_ -> throwM $! HOLThmError th
"primTYAPP2: terms not of universal type"
| otherwise = throwM $! HOLTypeError ty1
"primTYAPP2: type arguments not alpha-convertible"
primTYAPP2 _ _ th = throwM $! HOLThmError th
"primTYAPP2: conclusion not an equation"
{-|@
A |- t1 = t2
----------------------------
A |- t1 [: ty] = t2 [: ty]
@
Fails with 'Nothing' if the conclusion of the theorem is not an equation.
Note that 'primTYAPP' is equivalent to 'primTYAPP2' when the same type is
applied to both sides, i.e.
@ primTYAPP ty === primTYAPP2 ty ty @
-}
primTYAPP :: MonadThrow m => HOLType -> HOLThm -> m HOLThm
primTYAPP ty thm@(ThmIn _ (_ := _)) = primTYAPP2 ty ty thm
primTYAPP _ th = throwM $! HOLThmError th "primTYAPP"
-- Type Beta Reduction
{-|@
(\\\\ ty . t[ty]) [: ty]
---------------------------------
|- (\\\\ ty . t[ty]) [: ty] = t
@
Fails with 'Left' in the following cases:
* The term is not a valid type application.
* The reduction is not a trivial one, i.e. the argument type is not equivalent
to the bound type variable.
-}
primTYBETA :: MonadThrow m => HOLTerm -> m HOLThm
primTYBETA tm@(TyCombIn (TyAbsIn tv bod) argt)
| argt == tv =
return . ThmIn [] $ safeMkEq tm bod
| otherwise = throwM $! HOLTermError tm
"primTYBETA: not a trivial type beta reduction"
primTYBETA tm = throwM $! HOLTermError tm
"primTYBETA: not a valid type application"
{-
Stateless HOL Theory Extension Primitives
Note that the following primitives are in HaskHOL.Core.State as per
Stateless HOL:
axioms, newAxiom, newBasicDefinition, newBasicTypeDefinition
-}
{-|
Creates a new axiom theorem.
Note that, as discussed in the documentation for 'HOLThm', the introduction of
axioms is not tracked until the stateful layer of the system is introduced so
be careful using this function.
-}
axiomThm :: HOLTerm -> HOLThm
axiomThm = ThmIn []
{-|@
c = t
-----------
|- c = t
@
Creates a new defined constant given a term that equates a variable of the
desired constant name and type to its desired definition. The return value
is a pair of the new constant and its definitional theorem.
Note that internally the constant is tagged with its definitional term via the
@Defined@ 'ConstTag'.
Fails with 'Left' in the following cases:
* The provided term is not an equation.
* The provided term is not closed.
* There are free type variables present in the definition that are not also in
the desired type of the constant.
-}
newDefinedConst :: MonadThrow m => HOLTerm -> m (HOLTerm, HOLThm)
newDefinedConst tm@(VarIn cname ty := r)
| not $ freesIn [] r =
throwM $! HOLTermError tm "newDefinedConst: not closed"
| not $ typeVarsInTerm r `subset` tyVars ty =
throwM $! HOLTermError tm
"newDefinedConst: type vars not refelcted in const"
| otherwise =
let c = ConstIn cname ty (DefinedIn $ hash r)
dth = ThmIn [] $ safeMkEq c r in
return (c, dth)
newDefinedConst tm = throwM $! HOLTermError tm
"newDefinedConst: not an equation"
{-|@
|- p x:rep
-----------------------------------------------------------------
(|- mk:rep->ty (dest:ty->rep a) = a, |- P r \<=\> dest(mk r) = r)
@
Creates a new defined type constant that is defined as an inhabited subset
of an existing type constant. The return value is a pentuple that
collectively provides a bijection between the new type and the old type.
The following four items are taken as input:
* The name of the new type constant - @ty@ in the above sequent.
* The name of the new term constant that will be used to make an instance of
the new type - @mk@ in the above sequent.
* The name of the new term constant that will be used to destruct an instance
of the new type - @dest@ in the above sequent.
* A theorem proving that the desired subset is non-empty. The conclusion of
this theorem must take the form @p x@ where @p@ is the predicate that
defines the subset and @x@ is a witness to inhabitation.
The following items are returned as part of the resultant pentuple:
* The new defined type operator. These type operators carry their name,
arity, and definitional theorem. The arity, in this case, is inferred from
the number of free type variables found in the predicate of the definitional
theorem.
* The new term constants, @mk@ and @dest@, as described above. Note that
constants constructed in this manner are tagged with special instances of
'ConstTag', @MkAbstract@ and @DestAbstract@ accordingly, that carry the
name, arity, and definitional theorem of their related type constant.
* The two theorems proving the bijection, as shown in the sequent above.
-}
newDefinedTypeOp :: MonadThrow m => Text -> Text -> Text -> HOLThm ->
m (TypeOp, HOLTerm, HOLTerm, HOLThm, HOLThm)
newDefinedTypeOp tyname absname repname th@(ThmIn [] c@(CombIn p x))
| containsUType $ typeOf x = throwM $! HOLThmError th
"newDefinedTypeOp: must not contain universal types"
| not $ freesIn [] p = throwM $! HOLThmError th
"newDefinedTypeOp: predicate is not closed"
| otherwise =
let tys = sort (<=) $ typeVarsInTerm p
arity = length tys
hsh = hash c
atyop = TyDefinedIn tyname arity hsh
rty = typeOf x
aty = TyAppIn atyop tys
atm = VarIn "a" aty
rtm = VarIn "r" rty
absCon = ConstIn absname (TyAppIn tyOpFun [rty, aty]) $
MkAbstractIn tyname arity hsh
repCon = ConstIn repname (TyAppIn tyOpFun [aty, rty]) $
DestAbstractIn tyname arity hsh
c1 = CombIn absCon $ CombIn repCon atm
c2 = CombIn p rtm
c3 = CombIn repCon $ CombIn absCon rtm in
return (atyop, absCon, repCon,
ThmIn [] $ safeMkEq c1 atm,
ThmIn [] . safeMkEq c2 $ safeMkEq c3 rtm)
newDefinedTypeOp _ _ _ th = throwM $! HOLThmError th
"newDefinedTypeOp: poorly formed predicate"
-- Documentation copied from HaskHOL.Core.Prims
{-$ViewPatterns
The primitive data types of HaskHOL are implemented using pattern synonyms in
order to simulate private data types:
* Internal constructors are hidden to prevent manual construction of terms.
* Unideriectional pattern synonyms ('Thm', etc.) are exposed to enable
pattern matching.
-}
| ecaustin/haskhol-core | src/HaskHOL/Core/Kernel.hs | bsd-2-clause | 17,773 | 0 | 15 | 4,663 | 3,180 | 1,603 | 1,577 | 215 | 3 |
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-- |
-- Module : Data.Text.ICU
-- Copyright : (c) 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Stability : experimental
-- Portability : GHC
--
-- Commonly used functions for Unicode, implemented as bindings to the
-- International Components for Unicode (ICU) libraries.
--
-- This module contains only the most commonly used types and
-- functions. Other modules in this package expose richer interfaces.
module Data.Text.ICU
(
-- * Data representation
-- $data
-- * Types
LocaleName(..)
-- * Boundary analysis
-- $break
, Breaker
, Break
, brkPrefix
, brkBreak
, brkSuffix
, brkStatus
, Line(..)
, Word(..)
, breakCharacter
, breakLine
, breakSentence
, breakWord
, breaks
, breaksRight
-- * Case mapping
, toCaseFold
, toLower
, toUpper
-- * Iteration
, CharIterator
, fromString
, fromText
, fromUtf8
-- * Normalization
, NormalizationMode(..)
, normalize
, quickCheck
, isNormalized
-- * String comparison
-- ** Normalization-sensitive string comparison
, CompareOption(..)
, compare
-- ** Locale-sensitive string collation
-- $collate
, Collator
, collator
, collatorWith
, collate
, collateIter
, sortKey
, uca
-- * Regular expressions
, MatchOption(..)
, ParseError(errError, errLine, errOffset)
, Match
, Regex
, Regular
-- ** Construction
, regex
, regex'
-- ** Inspection
, pattern
-- ** Searching
, find
, findAll
-- ** Match groups
-- $group
, groupCount
, unfold
, span
, group
, prefix
, suffix
-- * Spoof checking
-- $spoof
, Spoof
, SpoofParams(..)
, S.SpoofCheck(..)
, S.RestrictionLevel(..)
, S.SpoofCheckResult(..)
-- ** Construction
, spoof
, spoofWithParams
, spoofFromSource
, spoofFromSerialized
-- ** String checking
, areConfusable
, spoofCheck
, getSkeleton
-- ** Configuration
, getChecks
, getAllowedLocales
, getRestrictionLevel
-- ** Persistence
, serialize
) where
import Data.Text.ICU.Break.Pure
import Data.Text.ICU.Collate.Pure
import Data.Text.ICU.Internal
import Data.Text.ICU.Iterator
import Data.Text.ICU.Normalize
import Data.Text.ICU.Regex.Pure
import qualified Data.Text.ICU.Spoof as S
import Data.Text.ICU.Spoof.Pure
import Data.Text.ICU.Text
#if defined(__HADDOCK__)
import Data.Text.Foreign
import Data.Text (Text)
#endif
-- $data
--
-- The Haskell 'Text' type is implemented as an array in the Haskell
-- heap. This means that its location is not pinned; it may be copied
-- during a garbage collection pass. ICU, on the other hand, works
-- with strings that are allocated in the normal system heap and have
-- a fixed address.
--
-- To accommodate this need, these bindings use the functions from
-- "Data.Text.Foreign" to copy data between the Haskell heap and the
-- system heap. The copied strings are still managed automatically,
-- but the need to duplicate data does add some performance and memory
-- overhead.
-- $break
--
-- Text boundary analysis is the process of locating linguistic
-- boundaries while formatting and handling text. Examples of this
-- process include:
--
-- * Locating appropriate points to word-wrap text to fit within
-- specific margins while displaying or printing.
--
-- * Counting characters, words, sentences, or paragraphs.
--
-- * Making a list of the unique words in a document.
--
-- * Figuring out if a given range of text contains only whole words.
--
-- * Capitalizing the first letter of each word.
--
-- * Locating a particular unit of the text (For example, finding the
-- third word in the document).
--
-- The 'Breaker' type was designed to support these kinds of
-- tasks.
--
-- For the impure boundary analysis API (which is richer, but less
-- easy to use than the pure API), see the "Data.Text.ICU.Break"
-- module. The impure API supports some uses that may be less
-- efficient via the pure API, including:
--
-- * Locating the beginning of a word that the user has selected.
--
-- * Determining how far to move the text cursor when the user hits an
-- arrow key (Some characters require more than one position in the
-- text store and some characters in the text store do not display
-- at all).
-- $collate
--
-- For the impure collation API (which is richer, but less easy to
-- use than the pure API), see the "Data.Text.ICU.Collate"
-- module.
-- $group
--
-- Capturing groups are numbered starting from zero. Group zero is
-- always the entire matching text. Groups greater than zero contain
-- the text matching each capturing group in a regular expression.
-- $spoof
--
-- The 'Spoof' type performs security checks on visually confusable
-- (spoof) strings. For the impure spoof checking API (which is
-- richer, but less easy to use than the pure API), see the
-- "Data.Text.ICU.Spoof" module.
--
-- See <http://unicode.org/reports/tr36/ UTR #36> and
-- <http://unicode.org/reports/tr39/ UTS #39> for detailed information
-- about the underlying algorithms and databases used by these functions.
| bos/text-icu | Data/Text/ICU.hs | bsd-2-clause | 5,303 | 2 | 5 | 1,199 | 459 | 350 | 109 | 82 | 0 |
module Propellor.Property.Postfix where
import Propellor
import qualified Propellor.Property.Apt as Apt
installed :: Property
installed = Apt.serviceInstalledRunning "postfix"
-- | Configures postfix as a satellite system, which
-- relats all mail through a relay host, which defaults to smtp.domain.
--
-- The smarthost may refuse to relay mail on to other domains, without
-- futher coniguration/keys. But this should be enough to get cron job
-- mail flowing to a place where it will be seen.
satellite :: Property
satellite = setup `requires` installed
where
setup = trivial $ property "postfix satellite system" $ do
hn <- asks hostName
ensureProperty $ Apt.reConfigure "postfix"
[ ("postfix/main_mailer_type", "select", "Satellite system")
, ("postfix/root_address", "string", "root")
, ("postfix/destinations", "string", " ")
, ("postfix/mailname", "string", hn)
]
| abailly/propellor-test2 | src/Propellor/Property/Postfix.hs | bsd-2-clause | 899 | 4 | 13 | 148 | 150 | 91 | 59 | 14 | 1 |
module Drasil.GlassBR.Assumptions (assumpGT, assumpGC, assumpES, assumpSV,
assumpGL, assumpBC, assumpRT, assumpLDFC, assumptionConstants,
assumptions) where
import Language.Drasil hiding (organization)
import qualified Drasil.DocLang.SRS as SRS (valsOfAuxCons)
import Utils.Drasil
import Data.Drasil.Concepts.Documentation as Doc (assumpDom, condition,
constant, practice, reference, scenario, system, value)
import Data.Drasil.Concepts.Math (calculation, surface, shape)
import Data.Drasil.Concepts.PhysicalProperties (materialProprty)
import Drasil.GlassBR.Concepts (beam, cantilever, edge, glaSlab, glass, glassBR,
lShareFac, plane, responseTy)
import Drasil.GlassBR.References (astm2009)
import Drasil.GlassBR.Unitals (constantK, constantLoadDur,
constantLoadSF, constantM, constantModElas, explosion, lateral, lDurFac,
loadDur)
assumptions :: [ConceptInstance]
assumptions = [assumpGT, assumpGC, assumpES, assumpSV, assumpGL, assumpBC,
assumpRT, assumpLDFC]
assumptionConstants :: [QDefinition]
assumptionConstants = [constantM, constantK, constantModElas,
constantLoadDur, constantLoadSF]
assumpGT, assumpGC, assumpES, assumpSV, assumpGL, assumpBC, assumpRT, assumpLDFC :: ConceptInstance
assumpGT = cic "assumpGT" glassTypeDesc "glassType" Doc.assumpDom
assumpGC = cic "assumpGC" glassConditionDesc "glassCondition" Doc.assumpDom
assumpES = cic "assumpES" explainScenarioDesc "explainScenario" Doc.assumpDom
assumpSV = cic "assumpSV" (standardValuesDesc loadDur) "standardValues" Doc.assumpDom
assumpGL = cic "assumpGL" glassLiteDesc "glassLite" Doc.assumpDom
assumpBC = cic "assumpBC" boundaryConditionsDesc "boundaryConditions" Doc.assumpDom
assumpRT = cic "assumpRT" responseTypeDesc "responseType" Doc.assumpDom
assumpLDFC = cic "assumpLDFC" (ldfConstantDesc lDurFac) "ldfConstant" Doc.assumpDom
glassTypeDesc :: Sentence
glassTypeDesc = foldlSent [S "The standard E1300-09a for",
phrase calculation, S "applies only to", foldlList Comma Options $ map S ["monolithic",
"laminated", "insulating"], S "glass constructions" `sOf` S "rectangular", phrase shape,
S "with continuous", phrase lateral, S "support along",
foldlList Comma Options (map S ["one", "two", "three", "four"]) +:+.
plural edge, S "This", phrase practice +: S "assumes that",
foldlEnumList Numb Parens SemiCol List $ map foldlSent_
[[S "the supported glass", plural edge, S "for two, three" `sAnd`
S "four-sided support", plural condition, S "are simply supported" `sAnd`
S "free to slip in", phrase plane],
[S "glass supported on two sides acts as a simply supported", phrase beam],
[S "glass supported on one side acts as a", phrase cantilever]]]
glassConditionDesc :: Sentence
glassConditionDesc = foldlSent [S "Following", makeCiteInfoS astm2009 (Page [1]) `sC`
S "this", phrase practice, S "does not apply to any form of", foldlList Comma Options $ map S ["wired",
"patterned", "etched", "sandblasted", "drilled", "notched", "grooved glass"], S "with",
phrase surface `sAnd` S "edge treatments that alter the glass strength"]
explainScenarioDesc :: Sentence
explainScenarioDesc = foldlSent [S "This", phrase system, S "only considers the external",
phrase explosion, phrase scenario, S "for its", plural calculation]
standardValuesDesc :: UnitaryChunk -> Sentence
standardValuesDesc mainIdea = foldlSent [S "The", plural value, S "provided in",
makeRef2S $ SRS.valsOfAuxCons ([]::[Contents]) ([]::[Section]), S "are assumed for the", phrase mainIdea,
sParen (ch mainIdea) `sC` S "and the", plural materialProprty `sOf`
foldlList Comma List (map ch (take 3 assumptionConstants))]
glassLiteDesc :: Sentence
glassLiteDesc = foldlSent [atStart glass, S "under consideration is assumed to be a single",
S "lite; hence, the", phrase value `sOf` short lShareFac, S "is equal to 1 for all",
plural calculation `sIn` short glassBR]
boundaryConditionsDesc :: Sentence
boundaryConditionsDesc = foldlSent [S "Boundary", plural condition, S "for the",
phrase glaSlab, S "are assumed to be 4-sided support for",
plural calculation]
responseTypeDesc :: Sentence
responseTypeDesc = foldlSent [S "The", phrase responseTy, S "considered in",
short glassBR, S "is flexural"]
ldfConstantDesc :: QuantityDict -> Sentence
ldfConstantDesc mainConcept = foldlSent [S "With", phrase reference, S "to",
makeRef2S assumpSV `sC` S "the", phrase value `sOf`
phrase mainConcept, sParen (ch mainConcept), S "is a", phrase constant,
S "in", short glassBR]
| JacquesCarette/literate-scientific-software | code/drasil-example/Drasil/GlassBR/Assumptions.hs | bsd-2-clause | 4,752 | 0 | 12 | 868 | 1,275 | 708 | 567 | 73 | 1 |
module Language.Drasil.Sentence.Extract(sdep, shortdep, lnames, lnames') where
import Data.List (nub)
import Language.Drasil.UID (UID)
import Language.Drasil.Sentence(Sentence(..), SentenceStyle(..))
import Language.Drasil.RefProg (Reference(Reference))
import Language.Drasil.Expr.Extract(names)
-- | Generic traverse of all positions that could lead to UIDs from sentences
getUIDs :: Sentence -> [UID]
getUIDs (Ch SymbolStyle a) = [a]
getUIDs (Ch ShortStyle _) = []
getUIDs (Ch TermStyle _) = []
getUIDs (Ch PluralTerm _) = []
getUIDs (Sy _) = []
getUIDs (S _) = []
getUIDs (P _) = []
getUIDs (Ref _) = []
getUIDs Percent = []
getUIDs ((:+:) a b) = getUIDs a ++ getUIDs b
getUIDs (Quote a) = getUIDs a
getUIDs (E a) = names a
getUIDs EmptyS = []
-- | Generic traverse of all positions that could lead to UIDs from sentences
-- but don't go into expressions.
getUIDshort :: Sentence -> [UID]
getUIDshort (Ch ShortStyle a) = [a]
getUIDshort (Ch SymbolStyle _) = []
getUIDshort (Ch TermStyle _) = []
getUIDshort (Ch PluralTerm _) = []
getUIDshort (Sy _) = []
getUIDshort (S _) = []
getUIDshort Percent = []
getUIDshort (P _) = []
getUIDshort (Ref _) = []
getUIDshort ((:+:) a b) = getUIDshort a ++ getUIDshort b
getUIDshort (Quote a) = getUIDshort a
getUIDshort (E _) = []
getUIDshort EmptyS = []
-----------------------------------------------------------------------------
-- And now implement the exported traversals all in terms of the above
-- This is to collect UID who is printed out as a Symbol
sdep :: Sentence -> [UID]
sdep = nub . getUIDs
-- This is to collect UID who is printed out as an Abbreviation
shortdep :: Sentence -> [UID]
shortdep = nub . getUIDshort
-- | Generic traverse of all positions that could lead to reference UID from sentences
lnames :: Sentence -> [UID]
lnames (Ch _ _) = []
lnames (Sy _) = []
lnames (S _) = []
lnames Percent = []
lnames (P _) = []
lnames (Ref (Reference u _ _ _)) = [u] -- This should be fixed.
lnames ((:+:) a b) = lnames a ++ lnames b
lnames (Quote _) = []
lnames (E _) = []
lnames EmptyS = []
lnames' :: [Sentence] -> [UID]
lnames' = concatMap lnames
| JacquesCarette/literate-scientific-software | code/drasil-lang/Language/Drasil/Sentence/Extract.hs | bsd-2-clause | 2,390 | 0 | 9 | 647 | 814 | 434 | 380 | 51 | 1 |
-- | Helpers for testing
module Tests.Helpers (
-- * helpers
T(..)
, typeName
, eq
, eqC
, (=~)
-- * Generic QC tests
, monotonicallyIncreases
, monotonicallyIncreasesIEEE
-- * HUnit helpers
, testAssertion
, testEquality
) where
import Data.Complex
import Data.Typeable
import qualified Numeric.IEEE as IEEE
import qualified Test.HUnit as HU
import Test.Framework
import Test.Framework.Providers.HUnit
import Numeric.MathFunctions.Constants
----------------------------------------------------------------
-- Helpers
----------------------------------------------------------------
-- | Phantom typed value used to select right instance in QC tests
data T a = T
-- | String representation of type name
typeName :: Typeable a => T a -> String
typeName = show . typeOf . typeParam
where
typeParam :: T a -> a
typeParam _ = undefined
-- | Approximate equality for 'Double'. Doesn't work well for numbers
-- which are almost zero.
eq :: Double -- ^ Relative error
-> Double -> Double -> Bool
eq eps a b
| a == 0 && b == 0 = True
| otherwise = abs (a - b) <= eps * max (abs a) (abs b)
-- | Approximate equality for 'Complex Double'
eqC :: Double -- ^ Relative error
-> Complex Double
-> Complex Double
-> Bool
eqC eps a@(ar :+ ai) b@(br :+ bi)
| a == 0 && b == 0 = True
| otherwise = abs (ar - br) <= eps * d
&& abs (ai - bi) <= eps * d
where
d = max (realPart $ abs a) (realPart $ abs b)
-- | Approximately equal up to 1 ulp
(=~) :: Double -> Double -> Bool
(=~) = eq m_epsilon
----------------------------------------------------------------
-- Generic QC
----------------------------------------------------------------
-- Check that function is nondecreasing
monotonicallyIncreases :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicallyIncreases f x1 x2 = f (min x1 x2) <= f (max x1 x2)
-- Check that function is nondecreasing taking rounding errors into
-- account.
--
-- In fact funstion is allowed to decrease less than one ulp in order
-- to guard againist problems with excess precision. On x86 FPU works
-- with 80-bit numbers but doubles are 64-bit so rounding happens
-- whenever values are moved from registers to memory
monotonicallyIncreasesIEEE :: (Ord a, IEEE.IEEE b) => (a -> b) -> a -> a -> Bool
monotonicallyIncreasesIEEE f x1 x2 =
y1 <= y2 || (y1 - y2) < y2 * IEEE.epsilon
where
y1 = f (min x1 x2)
y2 = f (max x1 x2)
----------------------------------------------------------------
-- HUnit helpers
----------------------------------------------------------------
testAssertion :: String -> Bool -> Test
testAssertion str cont = testCase str $ HU.assertBool str cont
testEquality :: (Show a, Eq a) => String -> a -> a -> Test
testEquality msg a b = testCase msg $ HU.assertEqual msg a b
| 00tau/statistics | tests/Tests/Helpers.hs | bsd-2-clause | 2,899 | 0 | 13 | 645 | 732 | 400 | 332 | 49 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module WaiApp (fileCgiApp) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import Network.HTTP.Types (preconditionFailed412, movedPermanently301, urlDecode, badRequest400)
import Network.Wai (Application, responseLBS)
import Network.Wai.Internal
import Network.Wai.Application.Classic
import Program.Mighty
data Perhaps a = Found a | Redirect | Fail
fileCgiApp :: ClassicAppSpec -> FileAppSpec -> CgiAppSpec -> RevProxyAppSpec
-> RouteDBRef -> Application
fileCgiApp cspec filespec cgispec revproxyspec rdr req respond
| dotFile = do
let st = badRequest400
fastResponse respond st defaultHeader "Bad Request\r\n"
| otherwise = do
um <- readRouteDBRef rdr
case mmp um of
Fail -> do
let st = preconditionFailed412
fastResponse respond st defaultHeader "Precondition Failed\r\n"
Redirect -> do
let st = movedPermanently301
hdr = defaultHeader ++ redirectHeader req'
fastResponse respond st hdr "Moved Permanently\r\n"
Found (RouteFile src dst) ->
fileApp cspec filespec (FileRoute src dst) req' respond
Found (RouteRedirect src dst) ->
redirectApp cspec (RedirectRoute src dst) req' respond
Found (RouteCGI src dst) ->
cgiApp cspec cgispec (CgiRoute src dst) req' respond
Found (RouteRevProxy src dst dom prt) ->
revProxyApp cspec revproxyspec (RevProxyRoute src dst dom (naturalToInt prt)) req respond
where
(host, _) = hostPort req
rawpath = rawPathInfo req
path = urlDecode False rawpath
dotFile = BS.isPrefixOf "." rawpath || BS.isInfixOf "/." rawpath
mmp um = case getBlock host um of
Nothing -> Fail
Just blk -> getRoute path blk
fastResponse resp st hdr body = resp $ responseLBS st hdr body
defaultHeader = [("Content-Type", "text/plain")]
req' = req { rawPathInfo = path } -- FIXME
getBlock :: ByteString -> RouteDB -> Maybe [Route]
getBlock _ [] = Nothing
getBlock key (Block doms maps : ms)
| "*" `elem` doms = Just maps
| key `elem` doms = Just maps
| otherwise = getBlock key ms
getRoute :: ByteString -> [Route] -> Perhaps Route
getRoute _ [] = Fail
getRoute key (m:ms)
| src `isPrefixOf` key = Found m
| src `isMountPointOf` key = Redirect
| otherwise = getRoute key ms
where
src = routeSource m
routeSource :: Route -> Src
routeSource (RouteFile src _) = src
routeSource (RouteRedirect src _) = src
routeSource (RouteCGI src _) = src
routeSource (RouteRevProxy src _ _ _) = src
isPrefixOf :: Path -> ByteString -> Bool
isPrefixOf src key = src `BS.isPrefixOf` key
isMountPointOf :: Path -> ByteString -> Bool
isMountPointOf src key = hasTrailingPathSeparator src
&& BS.length src - BS.length key == 1
&& key `BS.isPrefixOf` src
| kazu-yamamoto/mighttpd2 | src/WaiApp.hs | bsd-3-clause | 3,022 | 0 | 17 | 784 | 938 | 476 | 462 | 69 | 7 |
gen :: (Double -> Double) -> [Double]
gen f = [f n | n <- [1..]]
pentagonals :: [Double]
pentagonals = gen pent
where
pent :: Double -> Double
pent n = n * (3*n - 1) / 2
hexagonals :: [Double]
hexagonals = gen hex
where
hex :: Double -> Double
hex n = n * (2*n - 1)
common_elems :: [Integer] -> [Integer] -> [Integer]
common_elems [] ys = ys
common_elems xs [] = xs
common_elems (x:xs) (y:ys)
| x == y = x : common_elems xs ys
| x < y = common_elems xs (y:ys)
| x > y = common_elems (x:xs) ys
main :: IO ()
main = do
putStrLn . show $ common_elems pentagonals hexagonals | bgwines/project-euler | src/solved/problem45.hs | bsd-3-clause | 591 | 2 | 9 | 140 | 345 | 170 | 175 | 20 | 1 |
module Data.MultiProto.Protobuf.DecoderSpec
(main, spec) where
import Data.MultiProto.Protobuf.Decoder
import Data.MultiProto.Protobuf.Encoder
import Control.Applicative
import Data.Serialize
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import qualified Data.ByteString as ByteString
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "varint" $ do
prop "decode . encode == id" $ do
n <- arbitrary `suchThat` (> 0)
return $ runGet varintInteger (runPut (putVarint n)) == Right (n :: Integer)
describe "field" $ do
prop "decode . encode == id" $ do
n <- arbitrary `suchThat` (> 0)
t <- elements [Varint, Bit64, LengthDelimited, Bit32]
return $ runGet field (runPut (putFieldLabel n t)) == Right (n, t)
describe "putLengthDelimited" $ do
prop "decode . encode == id" $ do
b <- ByteString.pack <$> arbitrary
return $ runGet parseLengthDelimited (runPut (putLengthDelimited b)) == Right b
| intolerable/multiproto | test/Data/MultiProto/Protobuf/DecoderSpec.hs | bsd-3-clause | 991 | 0 | 19 | 200 | 328 | 171 | 157 | 27 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.TimeGrain.DE.Rules
( rules ) where
import Data.Text (Text)
import Prelude
import Data.String
import Duckling.Dimensions.Types
import qualified Duckling.TimeGrain.Types as TG
import Duckling.Types
grains :: [(Text, String, TG.Grain)]
grains = [ ("second (grain)", "sekunden?", TG.Second)
, ("minute (grain)", "minuten?", TG.Minute)
, ("hour (grain)", "stunden?", TG.Hour)
, ("day (grain)", "tage?n?", TG.Day)
, ("week (grain)", "wochen?", TG.Week)
, ("month (grain)", "monate?n?", TG.Month)
, ("quarter (grain)", "quartale?", TG.Quarter)
, ("year (grain)", "jahre?n?", TG.Year)
]
rules :: [Rule]
rules = map go grains
where
go (name, regexPattern, grain) = Rule
{ name = name
, pattern = [regex regexPattern]
, prod = \_ -> Just $ Token TimeGrain grain
}
| rfranek/duckling | Duckling/TimeGrain/DE/Rules.hs | bsd-3-clause | 1,240 | 0 | 11 | 277 | 272 | 173 | 99 | 25 | 1 |
module Signal.Wavelet.List.CommonTest where
import Control.Arrow ((&&&))
import Test.HUnit (Assertion, (@=?))
import Test.QuickCheck (Property, forAll, sized, choose)
import Signal.Wavelet.List.Common
import Test.ArbitraryInstances (DwtInputList(..))
import Test.Data.Wavelet as DW
import Test.Utils ((=~), (@=~?))
testLattice :: ((Double, Double), [Double], [Double]) -> Assertion
testLattice (baseOp, sig, expected) =
expected @=~? latticeSeq baseOp sig
dataLattice :: [((Double, Double), [Double], [Double])]
dataLattice = DW.dataLattice
propDoubleLatticeIdentity :: DwtInputList -> Bool
propDoubleLatticeIdentity (DwtInputList (ls, sig)) =
latticeSeq baseOp (latticeSeq baseOp sig) =~ sig
where
baseOp = (sin &&& cos) $ head ls
testExtendFront :: (Int, [Double], [Double]) -> Assertion
testExtendFront (ln, sig, expected) =
expected @=~? extendFront ln sig
dataExtendFront :: [(Int, [Double], [Double])]
dataExtendFront = DW.dataExtendFront
testExtendEnd :: (Int, [Double], [Double]) -> Assertion
testExtendEnd (ln, sig, expected) =
expected @=~? extendEnd ln sig
dataExtendEnd :: [(Int, [Double], [Double])]
dataExtendEnd = DW.dataExtendEnd
testCsl :: ([Double], [Double]) -> Assertion
testCsl (input, expected) =
expected @=? csl input
dataCsl :: [([Double], [Double])]
dataCsl = DW.dataCsl
testCsr :: ([Double], [Double]) -> Assertion
testCsr (input, expected) =
expected @=? csr input
dataCsr :: [([Double], [Double])]
dataCsr = DW.dataCsr
testCslN :: (Int, [Double], [Double]) -> Assertion
testCslN (n, input, expected) =
expected @=? cslN n input
dataCslN :: [(Int, [Double], [Double])]
dataCslN = DW.dataCslN
testCsrN :: (Int, [Double], [Double]) -> Assertion
testCsrN (n, input, expected) =
expected @=? csrN n input
dataCsrN :: [(Int, [Double], [Double])]
dataCsrN = DW.dataCsrN
propIdentityShift1 :: [Double] -> Bool
propIdentityShift1 xs = csl (csr xs) == xs
propIdentityShift2 :: [Double] -> Bool
propIdentityShift2 xs = csr (csl xs) == xs
propIdentityShift3 :: [Double] -> Property
propIdentityShift3 xs = forAll (sized $ \s ->
choose (1, s) ) $ \n ->
cslN n (csrN n xs) == xs
propIdentityShift4 :: [Double] -> Property
propIdentityShift4 xs = forAll (sized $ \s ->
choose (1, s)) $ \n ->
csrN n (cslN n xs) == xs
propIdentityShift5 :: [Double] -> Bool
propIdentityShift5 xs = cslN n xs == xs
where n = length xs
propIdentityShift6 :: [Double] -> Bool
propIdentityShift6 xs = csrN n xs == xs
where n = length xs
propLatticeInverseInverse :: [Double] -> Bool
propLatticeInverseInverse xs = inv (inv xs) == xs
propDegRadInvertible :: [Double] -> Bool
propDegRadInvertible xs = toDeg (toRad xs) =~ xs
propRadDegInvertible :: [Double] -> Bool
propRadDegInvertible xs = toRad (toDeg xs) =~ xs
| jstolarek/lattice-structure-hs | tests/Signal/Wavelet/List/CommonTest.hs | bsd-3-clause | 2,872 | 0 | 11 | 545 | 1,100 | 632 | 468 | 71 | 1 |
module Game.Poker.Hands
( Hand
, toHand, fromHand
, PokerHand(..)
, pokerHand
----
-- hint
, straightHint
, flushHint
, nOfKindHint
----
-- hand
, straightFlush
, fourOfAKind
, fullHouse
, flush
, straight
, threeOfAKind
, twoPair
, onePair
----
, DiscardList
, Deck
, getHand
, drawHand
, getDiscardList
, judgeVictory
) where
import Game.Poker.Cards
import Data.List
import Safe
import Control.Monad
import Control.Applicative
import Data.Char
newtype Hand = Hand { fromHand :: [Card] } deriving (Show, Eq, Ord)
toHand :: [Card] -> Maybe Hand
toHand l =
if length l == 5
then Just $ Hand (sort l)
else Nothing
pokerHand :: Hand -> (PokerHand, Card)
pokerHand h@(Hand l) =
case foldl mplus Nothing $ fmap ($h) hands of
Just pc -> pc
Nothing -> (HighCards, last l)
where
hands :: [Hand -> Maybe (PokerHand, Card)]
hands =
[ straightFlush
, fourOfAKind
, fullHouse
, flush
, straight
, threeOfAKind
, twoPair
, onePair
]
-------
-- ポーカー・ハンド
data PokerHand
= HighCards
| OnePair
| TwoPair
| ThreeOfAKind
| Straight
| Flush
| FullHouse
| FourOfAKind
| StraightFlush
deriving (Show, Read, Eq, Ord, Enum)
-------
-- Hint
straightHint :: Hand -> Maybe Card
straightHint (Hand l) =
(judgeStright . extract cardStrength $ l)
`mplus`
(judgeStright . sort . extract cardNumber $ l)
where
isStright :: [Int] -> Bool
isStright xs@(x:_) = xs == [x .. x + 4]
isStright _ = False
judgeStright :: [(Int, Card)] -> Maybe Card
judgeStright l =
if isStright $ map fst l
then Just . snd . last $ l
else Nothing
flushHint :: Hand -> Maybe Card
flushHint (Hand (x:xs)) =
if all ((cardSuit x==).cardSuit) xs then Just (last xs) else Nothing
nOfKindHint :: Int -> Hand -> Maybe [[Card]]
nOfKindHint n (Hand h) = if cards /= [] then Just cards else Nothing
where
cards :: [[Card]]
cards = filter ((==n).length)
$ groupBy (\x y -> cardNumber x == cardNumber y) h
-------
-- PokerHand
straightFlush :: Hand -> Maybe (PokerHand, Card)
straightFlush h = do
c <- straightHint h
d <- flushHint h
return (StraightFlush, max c d)
fourOfAKind :: Hand -> Maybe (PokerHand, Card)
fourOfAKind h = do
cs <- nOfKindHint 4 h
return (FourOfAKind, last $ concat cs)
fullHouse :: Hand -> Maybe (PokerHand, Card)
fullHouse h = do
cs <- nOfKindHint 3 h
nOfKindHint 2 h
return (FullHouse, last $ concat cs)
flush :: Hand -> Maybe (PokerHand, Card)
flush h = do
c <- flushHint h
return (Flush, c)
straight :: Hand -> Maybe (PokerHand, Card)
straight h = do
c <- straightHint h
return (Straight, c)
threeOfAKind :: Hand -> Maybe (PokerHand, Card)
threeOfAKind h = do
cs <- nOfKindHint 3 h
return (ThreeOfAKind, last $ concat cs)
twoPair :: Hand -> Maybe (PokerHand, Card)
twoPair h = do
cs <- nOfKindHint 2 h
if length cs == 2
then Just (TwoPair, last $ concat cs)
else Nothing
onePair :: Hand -> Maybe (PokerHand, Card)
onePair h = do
cs <- nOfKindHint 2 h
return (OnePair, last $ concat cs)
-----------
type DiscardList = [Card] -- 捨て札
type Deck = [Card] -- 山札
getHand :: Deck -> Maybe (Hand, Deck)
getHand deck = do
hand <- toHand . take 5 $ deck
return (hand, drop 5 deck)
drawHand :: Deck -> DiscardList -> Hand -> Maybe (Hand, Deck)
drawHand deck dis h = let
nl = filter (flip notElem dis) (fromHand h)
nr = drop (5 - length nl) deck
in (,) <$> toHand (take 5 $ nl ++ deck) <*> Just nr
getDiscardList :: Hand -> IO (Maybe DiscardList)
getDiscardList h = do
input <- getLine
return $ do
intList <- toIntList input
res <- selectByIndexes (fromHand h) intList
return res
judgeVictory :: (PokerHand, Card) -> (PokerHand, Card) -> Ordering
judgeVictory l r = compare (pullStrength l) (pullStrength r)
where
pullStrength :: (PokerHand, Card) -> (PokerHand, Int)
pullStrength = fmap cardStrength
------
-- helper
extract :: (b -> a) -> [b] -> [(a, b)]
extract f cs = map (\c -> (f c, c)) cs
toIntList :: String -> Maybe [Int]
toIntList str = if and $ map isDigit str then Just $ reads str else Nothing
where
reads :: String -> [Int]
reads = map $ read . (:[])
selectByIndexes :: [a] -> [Int] -> Maybe [a]
selectByIndexes l = sequence . map ((atMay l).(subtract 1))
| tokiwoousaka/draw-poker | src/Game/Poker/Hands.hs | bsd-3-clause | 4,459 | 1 | 13 | 1,140 | 1,772 | 945 | 827 | 147 | 3 |
{-# LANGUAGE
TemplateHaskell
, NamedFieldPuns
, CPP #-}
-- | Provides a quasiquoter for hexadecimal ByteString literals, with
-- placeholders that bind variables.
module Data.Hex.Quote
( -- * The quasiquoter
hex
-- * Helper functions
, parseHex
) where
import Control.Arrow
import Control.Applicative
import Control.Monad
import Data.Char
import Data.Word
import Data.Maybe
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.Quote
import Text.Parsec hiding ( (<|>), many )
import Text.Parsec.Token
import Text.Parsec.String
import Text.Parsec.Language
import qualified Data.ByteString as B
import qualified Data.IntMap as IM
dropComments :: String -> String
dropComments = go where
go ('-':'-':xs) = go (dropWhile (/= '\n') xs)
go (x:xs) = x : go xs
go [] = []
hexMap :: IM.IntMap Word8
hexMap = IM.fromList . map (first ord) $ concat
[ zip ['0'..'9'] [0..9]
, zip ['A'..'F'] [10..15]
, zip ['a'..'f'] [10..15] ]
-- | The hexadecimal parser used for @'hex'@ expressions.
parseHex :: String -> [Word8]
parseHex = pair . catMaybes . map get . dropComments where
get v = IM.lookup (ord v) hexMap
pair (h:l:xs) = (h*16 + l) : pair xs
pair _ = []
-- We can't lift Word8, but Int lifts to a polymorphic literal
liftBS :: [Word8] -> Q Exp
liftBS xs = lift (map fromIntegral xs :: [Int])
hexExp :: String -> Q Exp
hexExp xs = [| B.pack $(liftBS $ parseHex xs) |]
data Tok
= Lit [Word8]
| Take String (Maybe Integer)
deriving (Show)
parseToks :: Parser [Tok]
parseToks = whiteSpace >> lexeme (many parseTok) <* eof where
parseTok :: Parser Tok
parseTok = (angles (Take <$> identifier <* symbol ":" <*> len))
<|> ((Lit . parseHex) <$> lexeme (many1 hexDigit))
len = lexeme (
(Nothing <$ symbol "rest")
<|> (Just <$> decimal))
TokenParser { whiteSpace, identifier, decimal,
symbol, angles, lexeme }
= makeTokenParser emptyDef
{ identStart = letter <|> char '_'
, identLetter = alphaNum <|> oneOf "_'"
, caseSensitive = True }
mkExtract :: [Tok] -> Q Exp
mkExtract [] = [| \x -> guard (B.null x) >> Just [] |]
mkExtract (Lit xs : ts) = let n = length xs in
[| \x -> case B.splitAt n x of
(y,z) | B.unpack y == $(liftBS xs) -> $(mkExtract ts) z
| otherwise -> Nothing |]
mkExtract (Take _ (Just n) : ts) = let nn = fromIntegral n in
[| \x -> case B.splitAt nn x of
(y,z) | B.length y == nn -> (y:) <$> $(mkExtract ts) z
| otherwise -> Nothing |]
mkExtract (Take _ Nothing : ts) = [| \x -> [x] <$ $(mkExtract ts) B.empty |]
mkPat :: [Tok] -> Q Pat
mkPat ts = viewP (mkExtract ts) (conP 'Just [listP vars]) where
mkV "_" = wildP
mkV n = varP (mkName n)
vars = [ mkV n | Take n _ <- ts ]
hexPat :: String -> Q Pat
hexPat xs = case parse parseToks "Data.Hex.Quote pattern" (dropComments xs) of
Left e -> error (show e)
Right v -> mkPat v
{- |
As an expression, the @'hex'@ quasiquoter provides hexadecimal @'B.ByteString'@
literals:
>import Data.Hex.Quote
>import qualified Data.ByteString as B
>
>main = B.putStr [hex|
> 57 65 2c 20 61 6c 6f 6e 65 20 6f 6e 20 65 61 72
> 74 68 2c 20 63 61 6e 20 72 65 62 65 6c 20 61 67
> 61 69 6e 73 74 20 74 68 65 20 74 79 72 61 6e 6e
> 79 20 6f 66 20 74 68 65 20 73 65 6c 66 69 73 68
> 20 72 65 70 6c 69 63 61 74 6f 72 73 2e 0a |]
All characters other than @0123456789abcdefABCDEF@ are ignored, including
whitespace. Comments start with \"@--@\" and continue to end-of-line:
>code = [hex|
> 7e3a -- jle 0x3c
> 4889f5 -- mov rbp, rsi
> bb01000000 -- mov ebx, 0x1
> 488b7d08 |] -- mov rdi, [rbp+0x8]
When using @'hex'@ as a pattern, you can include placeholders of the form
@\<name:size\>@, where
* @name@ is a Haskell identifier, or the wildcard pattern \"@_@\"
* @size@ is the size of the field in bytes, or the word @rest@ to consume
the rest of the @'B.ByteString'@.
The named placeholders bind local variables of type @'B.ByteString'@. Here's
an example of pattern-matching an IPv4-over-Ethernet-II frame:
>import Data.Hex.Quote
>
>describe [hex|
> <src_mac:6> <dst_mac:6> 08 00 -- ethernet header
> 45 <_:1> <len:2> -- start of IP header
> <_:rest> -- discard remaining frame
> |] = (src_mac, dst_mac, len)
>
>describe _ = error "unknown frame"
Quasiquotes require the @QuasiQuotes@ extension. In pattern context, @'hex'@ also
requires the @ViewPatterns@ extension.
-}
hex :: QuasiQuoter
hex = QuasiQuoter
{ quoteExp = hexExp
, quotePat = hexPat
#if MIN_VERSION_template_haskell(2,5,0)
, quoteType = const (error "no type quote for Data.Hex.Quote")
, quoteDec = const (error "no decl quote for Data.Hex.Quote")
#endif
}
| kmcallister/hexquote | Data/Hex/Quote.hs | bsd-3-clause | 4,955 | 0 | 13 | 1,276 | 1,042 | 568 | 474 | 84 | 3 |
{-# LANGUAGE CPP, ConstraintKinds, DeriveDataTypeable, FlexibleContexts, MultiWayIf, NamedFieldPuns,
OverloadedStrings, PackageImports, RankNTypes, RecordWildCards, ScopedTypeVariables,
TemplateHaskell, TupleSections #-}
-- | Run commands in Docker containers
module Stack.Docker
(cleanup
,CleanupOpts(..)
,CleanupAction(..)
,dockerCleanupCmdName
,dockerCmdName
,dockerHelpOptName
,dockerPullCmdName
,entrypoint
,preventInContainer
,pull
,reexecWithOptionalContainer
,reset
,reExecArgName
,StackDockerException(..)
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Unlift
import Control.Monad.Logger (MonadLogger,logError,logInfo,logWarn)
import Control.Monad.Reader (MonadReader,runReaderT)
import Control.Monad.Writer (execWriter,runWriter,tell)
import qualified Crypto.Hash as Hash (Digest, MD5, hash)
import Data.Aeson.Extended (FromJSON(..),(.:),(.:?),(.!=),eitherDecode)
import Data.ByteString.Builder (stringUtf8,charUtf8,toLazyByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (isSpace,toUpper,isAscii,isDigit)
import Data.Conduit.List (sinkNull)
import Data.List (dropWhileEnd,intercalate,isPrefixOf,isInfixOf,foldl')
import Data.List.Extra (trim, nubOrd)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Ord (Down(..))
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..))
import Data.Version (showVersion)
import GHC.Exts (sortWith)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO hiding (canonicalizePath)
import qualified Paths_stack as Meta
import Prelude -- Fix redundant import warnings
import Stack.Config (getInContainer)
import Stack.Constants
import Stack.Docker.GlobalDB
import Stack.Types.PackageIndex
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Docker
import Stack.Types.Internal
import Stack.Types.StackT
import Stack.Setup (ensureDockerStackExe)
import System.Directory (canonicalizePath,getHomeDirectory)
import System.Environment (getEnv,getEnvironment,getProgName,getArgs,getExecutablePath)
import System.Exit (exitSuccess, exitWith, ExitCode(..))
import qualified System.FilePath as FP
import System.IO (stderr,stdin,stdout,hIsTerminalDevice, hClose)
import System.IO.Error (isDoesNotExistError)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.PosixCompat.User as User
import qualified System.PosixCompat.Files as Files
import System.Process (CreateProcess(..), StdStream(..), waitForProcess)
import System.Process.PagerEditor (editByteString)
import System.Process.Read
import System.Process.Run
import Text.Printf (printf)
#ifndef WINDOWS
import Control.Concurrent (threadDelay)
import System.Posix.Signals
import qualified System.Posix.User as PosixUser
#endif
-- | If Docker is enabled, re-runs the currently running OS command in a Docker container.
-- Otherwise, runs the inner action.
--
-- This takes an optional release action which should be taken IFF control is
-- transferring away from the current process to the intra-container one. The main use
-- for this is releasing a lock. After launching reexecution, the host process becomes
-- nothing but an manager for the call into docker and thus may not hold the lock.
reexecWithOptionalContainer
:: (StackM env m, HasConfig env)
=> Maybe (Path Abs Dir)
-> Maybe (m ())
-> IO ()
-> Maybe (m ())
-> Maybe (m ())
-> m ()
reexecWithOptionalContainer mprojectRoot =
execWithOptionalContainer mprojectRoot getCmdArgs
where
getCmdArgs docker envOverride imageInfo isRemoteDocker = do
config <- view configL
deUser <-
if fromMaybe (not isRemoteDocker) (dockerSetUser docker)
then liftIO $ do
duUid <- User.getEffectiveUserID
duGid <- User.getEffectiveGroupID
duGroups <- nubOrd <$> User.getGroups
duUmask <- Files.setFileCreationMask 0o022
-- Only way to get old umask seems to be to change it, so set it back afterward
_ <- Files.setFileCreationMask duUmask
return (Just DockerUser{..})
else return Nothing
args <-
fmap
(["--" ++ reExecArgName ++ "=" ++ showVersion Meta.version
,"--" ++ dockerEntrypointArgName
,show DockerEntrypoint{..}] ++)
(liftIO getArgs)
case dockerStackExe (configDocker config) of
Just DockerStackExeHost
| configPlatform config == dockerContainerPlatform -> do
exePath <- liftIO getExecutablePath
cmdArgs args exePath
| otherwise -> throwIO UnsupportedStackExeHostPlatformException
Just DockerStackExeImage -> do
progName <- liftIO getProgName
return (FP.takeBaseName progName, args, [], [])
Just (DockerStackExePath path) -> do
exePath <- liftIO $ canonicalizePath (toFilePath path)
cmdArgs args exePath
Just DockerStackExeDownload -> exeDownload args
Nothing
| configPlatform config == dockerContainerPlatform -> do
(exePath,exeTimestamp,misCompatible) <-
liftIO $
do exePath <- liftIO getExecutablePath
exeTimestamp <- resolveFile' exePath >>= getModificationTime
isKnown <-
liftIO $
getDockerImageExe
config
(iiId imageInfo)
exePath
exeTimestamp
return (exePath, exeTimestamp, isKnown)
case misCompatible of
Just True -> cmdArgs args exePath
Just False -> exeDownload args
Nothing -> do
e <-
try $
sinkProcessStderrStdout
Nothing
envOverride
"docker"
[ "run"
, "-v"
, exePath ++ ":" ++ "/tmp/stack"
, iiId imageInfo
, "/tmp/stack"
, "--version"]
sinkNull
sinkNull
let compatible =
case e of
Left (ProcessExitedUnsuccessfully _ _) ->
False
Right _ -> True
liftIO $
setDockerImageExe
config
(iiId imageInfo)
exePath
exeTimestamp
compatible
if compatible
then cmdArgs args exePath
else exeDownload args
Nothing -> exeDownload args
exeDownload args = do
exePath <- ensureDockerStackExe dockerContainerPlatform
cmdArgs args (toFilePath exePath)
cmdArgs args exePath = do
let mountPath = hostBinDir FP.</> FP.takeBaseName exePath
return (mountPath, args, [], [Mount exePath mountPath])
-- | If Docker is enabled, re-runs the OS command returned by the second argument in a
-- Docker container. Otherwise, runs the inner action.
--
-- This takes an optional release action just like `reexecWithOptionalContainer`.
execWithOptionalContainer
:: (StackM env m, HasConfig env)
=> Maybe (Path Abs Dir)
-> GetCmdArgs env m
-> Maybe (m ())
-> IO ()
-> Maybe (m ())
-> Maybe (m ())
-> m ()
execWithOptionalContainer mprojectRoot getCmdArgs mbefore inner mafter mrelease =
do config <- view configL
inContainer <- getInContainer
isReExec <- view reExecL
if | inContainer && not isReExec && (isJust mbefore || isJust mafter) ->
throwIO OnlyOnHostException
| inContainer ->
liftIO (do inner
exitSuccess)
| not (dockerEnable (configDocker config)) ->
do fromMaybeAction mbefore
liftIO inner
fromMaybeAction mafter
liftIO exitSuccess
| otherwise ->
do fromMaybeAction mrelease
runContainerAndExit
getCmdArgs
mprojectRoot
(fromMaybeAction mbefore)
(fromMaybeAction mafter)
where
fromMaybeAction Nothing = return ()
fromMaybeAction (Just hook) = hook
-- | Error if running in a container.
preventInContainer :: MonadIO m => m () -> m ()
preventInContainer inner =
do inContainer <- getInContainer
if inContainer
then throwIO OnlyOnHostException
else inner
-- | Run a command in a new Docker container, then exit the process.
runContainerAndExit :: (StackM env m, HasConfig env)
=> GetCmdArgs env m
-> Maybe (Path Abs Dir) -- ^ Project root (maybe)
-> m () -- ^ Action to run before
-> m () -- ^ Action to run after
-> m ()
runContainerAndExit getCmdArgs
mprojectRoot
before
after =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
(env,isStdinTerminal,isStderrTerminal,homeDir) <- liftIO $
(,,,)
<$> getEnvironment
<*> hIsTerminalDevice stdin
<*> hIsTerminalDevice stderr
<*> (parseAbsDir =<< getHomeDirectory)
isStdoutTerminal <- view terminalL
let dockerHost = lookup "DOCKER_HOST" env
dockerCertPath = lookup "DOCKER_CERT_PATH" env
bamboo = lookup "bamboo_buildKey" env
jenkins = lookup "JENKINS_HOME" env
msshAuthSock = lookup "SSH_AUTH_SOCK" env
muserEnv = lookup "USER" env
isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost
image = dockerImage docker
when (isRemoteDocker &&
maybe False (isInfixOf "boot2docker") dockerCertPath)
($logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.")
maybeImageInfo <- inspect envOverride image
imageInfo@Inspect{..} <- case maybeImageInfo of
Just ii -> return ii
Nothing
| dockerAutoPull docker ->
do pullImage envOverride docker image
mii2 <- inspect envOverride image
case mii2 of
Just ii2 -> return ii2
Nothing -> throwM (InspectFailedException image)
| otherwise -> throwM (NotPulledException image)
sandboxDir <- projectDockerSandboxDir projectRoot
let ImageConfig {..} = iiConfig
imageEnvVars = map (break (== '=')) icEnv
platformVariant = show $ hashRepoName image
stackRoot = configStackRoot config
sandboxHomeDir = sandboxDir </> homeDirName
isTerm = not (dockerDetach docker) &&
isStdinTerminal &&
isStdoutTerminal &&
isStderrTerminal
keepStdinOpen = not (dockerDetach docker) &&
-- Workaround for https://github.com/docker/docker/issues/12319
-- This is fixed in Docker 1.9.1, but will leave the workaround
-- in place for now, for users who haven't upgraded yet.
(isTerm || (isNothing bamboo && isNothing jenkins))
hostBinDirPath <- parseAbsDir hostBinDir
newPathEnv <- augmentPath
[ hostBinDirPath
, sandboxHomeDir </> $(mkRelDir ".local/bin")]
(T.pack <$> lookupImageEnv "PATH" imageEnvVars)
(cmnd,args,envVars,extraMount) <- getCmdArgs docker envOverride imageInfo isRemoteDocker
pwd <- getCurrentDir
liftIO
(do updateDockerImageLastUsed config iiId (toFilePath projectRoot)
mapM_ ensureDir [sandboxHomeDir, stackRoot])
-- Since $HOME is now mounted in the same place in the container we can
-- just symlink $HOME/.ssh to the right place for the stack docker user
let sshDir = homeDir </> sshRelDir
sshDirExists <- doesDirExist sshDir
sshSandboxDirExists <-
liftIO
(Files.fileExist
(toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir)))
when (sshDirExists && not sshSandboxDirExists)
(liftIO
(Files.createSymbolicLink
(toFilePathNoTrailingSep sshDir)
(toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir))))
containerID <- (trim . decodeUtf8) <$> readDockerProcess
envOverride
(Just projectRoot)
(concat
[["create"
,"--net=host"
,"-e",inContainerEnvVar ++ "=1"
,"-e",stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot
,"-e",platformVariantEnvVar ++ "=dk" ++ platformVariant
,"-e","HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir
,"-e","PATH=" ++ T.unpack newPathEnv
,"-e","PWD=" ++ toFilePathNoTrailingSep pwd
,"-v",toFilePathNoTrailingSep homeDir ++ ":" ++ toFilePathNoTrailingSep homeDir
,"-v",toFilePathNoTrailingSep stackRoot ++ ":" ++ toFilePathNoTrailingSep stackRoot
,"-v",toFilePathNoTrailingSep projectRoot ++ ":" ++ toFilePathNoTrailingSep projectRoot
,"-v",toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ toFilePathNoTrailingSep sandboxHomeDir
,"-w",toFilePathNoTrailingSep pwd]
,case muserEnv of
Nothing -> []
Just userEnv -> ["-e","USER=" ++ userEnv]
,case msshAuthSock of
Nothing -> []
Just sshAuthSock ->
["-e","SSH_AUTH_SOCK=" ++ sshAuthSock
,"-v",sshAuthSock ++ ":" ++ sshAuthSock]
-- Disable the deprecated entrypoint in FP Complete-generated images
,["--entrypoint=/usr/bin/env"
| isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) &&
(icEntrypoint == ["/usr/local/sbin/docker-entrypoint"] ||
icEntrypoint == ["/root/entrypoint.sh"])]
,concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars
,concatMap mountArg (extraMount ++ dockerMount docker)
,concatMap (\nv -> ["-e", nv]) (dockerEnv docker)
,case dockerContainerName docker of
Just name -> ["--name=" ++ name]
Nothing -> []
,["-t" | isTerm]
,["-i" | keepStdinOpen]
,dockerRunArgs docker
,[image]
,[cmnd]
,args])
before
#ifndef WINDOWS
runInBase <- askRunIO
oldHandlers <- forM [sigINT,sigABRT,sigHUP,sigPIPE,sigTERM,sigUSR1,sigUSR2] $ \sig -> do
let sigHandler = runInBase $ do
readProcessNull Nothing envOverride "docker"
["kill","--signal=" ++ show sig,containerID]
when (sig `elem` [sigTERM,sigABRT]) $ do
-- Give the container 30 seconds to exit gracefully, then send a sigKILL to force it
liftIO $ threadDelay 30000000
readProcessNull Nothing envOverride "docker" ["kill",containerID]
oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing
return (sig, oldHandler)
#endif
let cmd = Cmd Nothing
"docker"
envOverride
(concat [["start"]
,["-a" | not (dockerDetach docker)]
,["-i" | keepStdinOpen]
,[containerID]])
e <- finally
(try $ callProcess'
(\cp -> cp { delegate_ctlc = False })
cmd)
(do unless (dockerPersist docker || dockerDetach docker) $
catch
(readProcessNull Nothing envOverride "docker" ["rm","-f",containerID])
(\(_::ReadProcessException) -> return ())
#ifndef WINDOWS
forM_ oldHandlers $ \(sig,oldHandler) ->
liftIO $ installHandler sig oldHandler Nothing
#endif
)
case e of
Left (ProcessExitedUnsuccessfully _ ec) -> liftIO (exitWith ec)
Right () -> do after
liftIO exitSuccess
where
-- This is using a hash of the Docker repository (without tag or digest) to ensure
-- binaries/libraries aren't shared between Docker and host (or incompatible Docker images)
hashRepoName :: String -> Hash.Digest Hash.MD5
hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@')
lookupImageEnv name vars =
case lookup name vars of
Just ('=':val) -> Just val
_ -> Nothing
mountArg (Mount host container) = ["-v",host ++ ":" ++ container]
projectRoot = fromMaybeProjectRoot mprojectRoot
sshRelDir = $(mkRelDir ".ssh/")
-- | Clean-up old docker images and containers.
cleanup :: (StackM env m, HasConfig env)
=> CleanupOpts -> m ()
cleanup opts =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
let runDocker = readDockerProcess envOverride Nothing
imagesOut <- runDocker ["images","--no-trunc","-f","dangling=false"]
danglingImagesOut <- runDocker ["images","--no-trunc","-f","dangling=true"]
runningContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=running"]
restartingContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=restarting"]
exitedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=exited"]
pausedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=paused"]
let imageRepos = parseImagesOut imagesOut
danglingImageHashes = Map.keys (parseImagesOut danglingImagesOut)
runningContainers = parseContainersOut runningContainersOut ++
parseContainersOut restartingContainersOut
stoppedContainers = parseContainersOut exitedContainersOut ++
parseContainersOut pausedContainersOut
inspectMap <- inspects envOverride
(Map.keys imageRepos ++
danglingImageHashes ++
map fst stoppedContainers ++
map fst runningContainers)
(imagesLastUsed,curTime) <-
liftIO ((,) <$> getDockerImagesLastUsed config
<*> getZonedTime)
let planWriter = buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap
plan = toLazyByteString (execWriter planWriter)
plan' <- case dcAction opts of
CleanupInteractive ->
liftIO (editByteString (intercalate "-" [stackProgName
,dockerCmdName
,dockerCleanupCmdName
,"plan"])
plan)
CleanupImmediate -> return plan
CleanupDryRun -> do liftIO (LBS.hPut stdout plan)
return LBS.empty
mapM_ (performPlanLine envOverride)
(reverse (filter filterPlanLine (lines (LBS.unpack plan'))))
allImageHashesOut <- runDocker ["images","-aq","--no-trunc"]
liftIO (pruneDockerImagesLastUsed config (lines (decodeUtf8 allImageHashesOut)))
where
filterPlanLine line =
case line of
c:_ | isSpace c -> False
_ -> True
performPlanLine envOverride line =
case filter (not . null) (words (takeWhile (/= '#') line)) of
[] -> return ()
(c:_):t:v:_ ->
do args <- if | toUpper c == 'R' && t == imageStr ->
do $logInfo (concatT ["Removing image: '",v,"'"])
return ["rmi",v]
| toUpper c == 'R' && t == containerStr ->
do $logInfo (concatT ["Removing container: '",v,"'"])
return ["rm","-f",v]
| otherwise -> throwM (InvalidCleanupCommandException line)
e <- try (readDockerProcess envOverride Nothing args)
case e of
Left ex@ProcessFailed{} ->
$logError (concatT ["Could not remove: '",v,"': ", show ex])
Left e' -> throwM e'
Right _ -> return ()
_ -> throwM (InvalidCleanupCommandException line)
parseImagesOut = Map.fromListWith (++) . map parseImageRepo . drop 1 . lines . decodeUtf8
where parseImageRepo :: String -> (String, [String])
parseImageRepo line =
case words line of
repo:tag:hash:_
| repo == "<none>" -> (hash,[])
| tag == "<none>" -> (hash,[repo])
| otherwise -> (hash,[repo ++ ":" ++ tag])
_ -> impureThrow (InvalidImagesOutputException line)
parseContainersOut = map parseContainer . drop 1 . lines . decodeUtf8
where parseContainer line =
case words line of
hash:image:rest -> (hash,(image,last rest))
_ -> impureThrow (InvalidPSOutputException line)
buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap =
do case dcAction opts of
CleanupInteractive ->
do buildStrLn
(concat
["# STACK DOCKER CLEANUP PLAN"
,"\n#"
,"\n# When you leave the editor, the lines in this plan will be processed."
,"\n#"
,"\n# Lines that begin with 'R' denote an image or container that will be."
,"\n# removed. You may change the first character to/from 'R' to remove/keep"
,"\n# and image or container that would otherwise be kept/removed."
,"\n#"
,"\n# To cancel the cleanup, delete all lines in this file."
,"\n#"
,"\n# By default, the following images/containers will be removed:"
,"\n#"])
buildDefault dcRemoveKnownImagesLastUsedDaysAgo "Known images last used"
buildDefault dcRemoveUnknownImagesCreatedDaysAgo "Unknown images created"
buildDefault dcRemoveDanglingImagesCreatedDaysAgo "Dangling images created"
buildDefault dcRemoveStoppedContainersCreatedDaysAgo "Stopped containers created"
buildDefault dcRemoveRunningContainersCreatedDaysAgo "Running containers created"
buildStrLn
(concat
["#"
,"\n# The default plan can be adjusted using command-line arguments."
,"\n# Run '" ++ unwords [stackProgName, dockerCmdName, dockerCleanupCmdName] ++
" --help' for details."
,"\n#"])
_ -> buildStrLn
(unlines
["# Lines that begin with 'R' denote an image or container that will be."
,"# removed."])
buildSection "KNOWN IMAGES (pulled/used by stack)"
imagesLastUsed
buildKnownImage
buildSection "UNKNOWN IMAGES (not managed by stack)"
(sortCreated (Map.toList (foldl' (\m (h,_) -> Map.delete h m)
imageRepos
imagesLastUsed)))
buildUnknownImage
buildSection "DANGLING IMAGES (no named references and not depended on by other images)"
(sortCreated (map (,()) danglingImageHashes))
buildDanglingImage
buildSection "STOPPED CONTAINERS"
(sortCreated stoppedContainers)
(buildContainer (dcRemoveStoppedContainersCreatedDaysAgo opts))
buildSection "RUNNING CONTAINERS"
(sortCreated runningContainers)
(buildContainer (dcRemoveRunningContainersCreatedDaysAgo opts))
where
buildDefault accessor description =
case accessor opts of
Just days -> buildStrLn ("# - " ++ description ++ " at least " ++ showDays days ++ ".")
Nothing -> return ()
sortCreated =
sortWith (\(_,_,x) -> Down x) .
mapMaybe (\(h,r) ->
case Map.lookup h inspectMap of
Nothing -> Nothing
Just ii -> Just (h,r,iiCreated ii))
buildSection sectionHead items itemBuilder =
do let (anyWrote,b) = runWriter (forM items itemBuilder)
when (or anyWrote) $
do buildSectionHead sectionHead
tell b
buildKnownImage (imageHash,lastUsedProjects) =
case Map.lookup imageHash imageRepos of
Just repos@(_:_) ->
do case lastUsedProjects of
(l,_):_ -> forM_ repos (buildImageTime (dcRemoveKnownImagesLastUsedDaysAgo opts) l)
_ -> forM_ repos buildKeepImage
forM_ lastUsedProjects buildProject
buildInspect imageHash
return True
_ -> return False
buildUnknownImage (hash, repos, created) =
case repos of
[] -> return False
_ -> do forM_ repos (buildImageTime (dcRemoveUnknownImagesCreatedDaysAgo opts) created)
buildInspect hash
return True
buildDanglingImage (hash, (), created) =
do buildImageTime (dcRemoveDanglingImagesCreatedDaysAgo opts) created hash
buildInspect hash
return True
buildContainer removeAge (hash,(image,name),created) =
do let disp = name ++ " (image: " ++ image ++ ")"
buildTime containerStr removeAge created disp
buildInspect hash
return True
buildProject (lastUsedTime, projectPath) =
buildInfo ("Last used " ++
showDaysAgo lastUsedTime ++
" in " ++
projectPath)
buildInspect hash =
case Map.lookup hash inspectMap of
Just Inspect{iiCreated,iiVirtualSize} ->
buildInfo ("Created " ++
showDaysAgo iiCreated ++
maybe ""
(\s -> " (size: " ++
printf "%g" (fromIntegral s / 1024.0 / 1024.0 :: Float) ++
"M)")
iiVirtualSize)
Nothing -> return ()
showDays days =
case days of
0 -> "today"
1 -> "yesterday"
n -> show n ++ " days ago"
showDaysAgo oldTime = showDays (daysAgo oldTime)
daysAgo oldTime =
let ZonedTime (LocalTime today _) zone = curTime
LocalTime oldDay _ = utcToLocalTime zone oldTime
in diffDays today oldDay
buildImageTime = buildTime imageStr
buildTime t removeAge time disp =
case removeAge of
Just d | daysAgo time >= d -> buildStrLn ("R " ++ t ++ " " ++ disp)
_ -> buildKeep t disp
buildKeep t d = buildStrLn (" " ++ t ++ " " ++ d)
buildKeepImage = buildKeep imageStr
buildSectionHead s = buildStrLn ("\n#\n# " ++ s ++ "\n#\n")
buildInfo = buildStrLn . (" # " ++)
buildStrLn l = do buildStr l
tell (charUtf8 '\n')
buildStr = tell . stringUtf8
imageStr = "image"
containerStr = "container"
-- | Inspect Docker image or container.
inspect :: (MonadUnliftIO m,MonadLogger m)
=> EnvOverride -> String -> m (Maybe Inspect)
inspect envOverride image =
do results <- inspects envOverride [image]
case Map.toList results of
[] -> return Nothing
[(_,i)] -> return (Just i)
_ -> throwIO (InvalidInspectOutputException "expect a single result")
-- | Inspect multiple Docker images and/or containers.
inspects :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> [String] -> m (Map String Inspect)
inspects _ [] = return Map.empty
inspects envOverride images =
do maybeInspectOut <-
try (readDockerProcess envOverride Nothing ("inspect" : images))
case maybeInspectOut of
Right inspectOut ->
-- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8
case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of
Left msg -> throwIO (InvalidInspectOutputException msg)
Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results))
Left (ProcessFailed _ _ _ err)
| any (`LBS.isPrefixOf` err) missingImagePrefixes -> return Map.empty
Left e -> throwIO e
where missingImagePrefixes = ["Error: No such image", "Error: No such object:"]
-- | Pull latest version of configured Docker image from registry.
pull :: (StackM env m, HasConfig env) => m ()
pull =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
pullImage envOverride docker (dockerImage docker)
-- | Pull Docker image from registry.
pullImage :: (MonadLogger m,MonadIO m,MonadThrow m)
=> EnvOverride -> DockerOpts -> String -> m ()
pullImage envOverride docker image =
do $logInfo (concatT ["Pulling image from registry: '",image,"'"])
when (dockerRegistryLogin docker)
(do $logInfo "You may need to log in."
callProcess $ Cmd
Nothing
"docker"
envOverride
(concat
[["login"]
,maybe [] (\n -> ["--username=" ++ n]) (dockerRegistryUsername docker)
,maybe [] (\p -> ["--password=" ++ p]) (dockerRegistryPassword docker)
,[takeWhile (/= '/') image]]))
-- We redirect the stdout of the process to stderr so that the output
-- of @docker pull@ will not interfere with the output of other
-- commands when using --auto-docker-pull. See issue #2733.
let stdoutToStderr cp = cp
{ std_out = UseHandle stderr
, std_err = UseHandle stderr
, std_in = CreatePipe
}
(Just hin, _, _, ph) <- createProcess' "pullImage" stdoutToStderr $
Cmd Nothing "docker" envOverride ["pull",image]
liftIO (hClose hin)
ec <- liftIO (waitForProcess ph)
case ec of
ExitSuccess -> return ()
ExitFailure _ -> throwIO (PullFailedException image)
-- | Check docker version (throws exception if incorrect)
checkDockerVersion
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> DockerOpts -> m ()
checkDockerVersion envOverride docker =
do dockerExists <- doesExecutableExist envOverride "docker"
unless dockerExists (throwIO DockerNotInstalledException)
dockerVersionOut <- readDockerProcess envOverride Nothing ["--version"]
case words (decodeUtf8 dockerVersionOut) of
(_:_:v:_) ->
case parseVersionFromString (stripVersion v) of
Just v'
| v' < minimumDockerVersion ->
throwIO (DockerTooOldException minimumDockerVersion v')
| v' `elem` prohibitedDockerVersions ->
throwIO (DockerVersionProhibitedException prohibitedDockerVersions v')
| not (v' `withinRange` dockerRequireDockerVersion docker) ->
throwIO (BadDockerVersionException (dockerRequireDockerVersion docker) v')
| otherwise ->
return ()
_ -> throwIO InvalidVersionOutputException
_ -> throwIO InvalidVersionOutputException
where minimumDockerVersion = $(mkVersion "1.6.0")
prohibitedDockerVersions = []
stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v)
-- | Remove the project's Docker sandbox.
reset :: (MonadIO m, MonadReader env m, HasConfig env)
=> Maybe (Path Abs Dir) -> Bool -> m ()
reset maybeProjectRoot keepHome = do
dockerSandboxDir <- projectDockerSandboxDir projectRoot
liftIO (removeDirectoryContents
dockerSandboxDir
[homeDirName | keepHome]
[])
where projectRoot = fromMaybeProjectRoot maybeProjectRoot
-- | The Docker container "entrypoint": special actions performed when first entering
-- a container, such as switching the UID/GID to the "outside-Docker" user's.
entrypoint :: (MonadUnliftIO m, MonadLogger m, MonadThrow m)
=> Config -> DockerEntrypoint -> m ()
entrypoint config@Config{..} DockerEntrypoint{..} =
modifyMVar_ entrypointMVar $ \alreadyRan -> do
-- Only run the entrypoint once
unless alreadyRan $ do
envOverride <- getEnvOverride configPlatform
homeDir <- liftIO $ parseAbsDir =<< getEnv "HOME"
-- Get the UserEntry for the 'stack' user in the image, if it exists
estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $
User.getUserEntryForName stackUserName
-- Switch UID/GID if needed, and update user's home directory
case deUser of
Nothing -> return ()
Just (DockerUser 0 _ _ _) -> return ()
Just du -> updateOrCreateStackUser envOverride estackUserEntry0 homeDir du
case estackUserEntry0 of
Left _ -> return ()
Right ue -> do
-- If the 'stack' user exists in the image, copy any build plans and package indices from
-- its original home directory to the host's stack root, to avoid needing to download them
origStackHomeDir <- liftIO $ parseAbsDir (User.homeDirectory ue)
let origStackRoot = origStackHomeDir </> $(mkRelDir ("." ++ stackProgName))
buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot)
when buildPlanDirExists $ do
(_, buildPlans) <- listDir (buildPlanDir origStackRoot)
forM_ buildPlans $ \srcBuildPlan -> do
let destBuildPlan = buildPlanDir configStackRoot </> filename srcBuildPlan
exists <- doesFileExist destBuildPlan
unless exists $ do
ensureDir (parent destBuildPlan)
copyFile srcBuildPlan destBuildPlan
forM_ configPackageIndices $ \pkgIdx -> do
msrcIndex <- flip runReaderT (config{configStackRoot = origStackRoot}) $ do
srcIndex <- configPackageIndex (indexName pkgIdx)
exists <- doesFileExist srcIndex
return $ if exists
then Just srcIndex
else Nothing
case msrcIndex of
Nothing -> return ()
Just srcIndex -> do
flip runReaderT config $ do
destIndex <- configPackageIndex (indexName pkgIdx)
exists <- doesFileExist destIndex
unless exists $ do
ensureDir (parent destIndex)
copyFile srcIndex destIndex
return True
where
updateOrCreateStackUser envOverride estackUserEntry homeDir DockerUser{..} = do
case estackUserEntry of
Left _ -> do
-- If no 'stack' user in image, create one with correct UID/GID and home directory
readProcessNull Nothing envOverride "groupadd"
["-o"
,"--gid",show duGid
,stackUserName]
readProcessNull Nothing envOverride "useradd"
["-oN"
,"--uid",show duUid
,"--gid",show duGid
,"--home",toFilePathNoTrailingSep homeDir
,stackUserName]
Right _ -> do
-- If there is already a 'stack' user in the image, adjust its UID/GID and home directory
readProcessNull Nothing envOverride "usermod"
["-o"
,"--uid",show duUid
,"--home",toFilePathNoTrailingSep homeDir
,stackUserName]
readProcessNull Nothing envOverride "groupmod"
["-o"
,"--gid",show duGid
,stackUserName]
forM_ duGroups $ \gid -> do
readProcessNull Nothing envOverride "groupadd"
["-o"
,"--gid",show gid
,"group" ++ show gid]
-- 'setuid' to the wanted UID and GID
liftIO $ do
User.setGroupID duGid
#ifndef WINDOWS
PosixUser.setGroups duGroups
#endif
User.setUserID duUid
_ <- Files.setFileCreationMask duUmask
return ()
stackUserName = "stack"::String
-- | MVar used to ensure the Docker entrypoint is performed exactly once
entrypointMVar :: MVar Bool
{-# NOINLINE entrypointMVar #-}
entrypointMVar = unsafePerformIO (newMVar False)
-- | Remove the contents of a directory, without removing the directory itself.
-- This is used instead of 'FS.removeTree' to clear bind-mounted directories, since
-- removing the root of the bind-mount won't work.
removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of
-> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal
-> [Path Rel File] -- ^ Top-level file names to exclude from removal
-> IO ()
removeDirectoryContents path excludeDirs excludeFiles =
do isRootDir <- doesDirExist path
when isRootDir
(do (lsd,lsf) <- listDir path
forM_ lsd
(\d -> unless (dirname d `elem` excludeDirs)
(removeDirRecur d))
forM_ lsf
(\f -> unless (filename f `elem` excludeFiles)
(removeFile f)))
-- | Produce a strict 'S.ByteString' from the stdout of a
-- process. Throws a 'ReadProcessException' exception if the
-- process fails. Logs process's stderr using @$logError@.
readDockerProcess
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> Maybe (Path Abs Dir) -> [String] -> m BS.ByteString
readDockerProcess envOverride mpwd = readProcessStdout mpwd envOverride "docker"
-- | Name of home directory within docker sandbox.
homeDirName :: Path Rel Dir
homeDirName = $(mkRelDir "_home/")
-- | Directory where 'stack' executable is bind-mounted in Docker container
hostBinDir :: FilePath
hostBinDir = "/opt/host/bin"
-- | Convenience function to decode ByteString to String.
decodeUtf8 :: BS.ByteString -> String
decodeUtf8 bs = T.unpack (T.decodeUtf8 bs)
-- | Convenience function constructing message for @$log*@.
concatT :: [String] -> Text
concatT = T.pack . concat
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
fromMaybeProjectRoot = fromMaybe (impureThrow CannotDetermineProjectRootException)
-- | Environment variable that contained the old sandbox ID.
-- | Use of this variable is deprecated, and only used to detect old images.
oldSandboxIdEnvVar :: String
oldSandboxIdEnvVar = "DOCKER_SANDBOX_ID"
-- | Options for 'cleanup'.
data CleanupOpts = CleanupOpts
{ dcAction :: !CleanupAction
, dcRemoveKnownImagesLastUsedDaysAgo :: !(Maybe Integer)
, dcRemoveUnknownImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveDanglingImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveStoppedContainersCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveRunningContainersCreatedDaysAgo :: !(Maybe Integer) }
deriving (Show)
-- | Cleanup action.
data CleanupAction = CleanupInteractive
| CleanupImmediate
| CleanupDryRun
deriving (Show)
-- | Parsed result of @docker inspect@.
data Inspect = Inspect
{iiConfig :: ImageConfig
,iiCreated :: UTCTime
,iiId :: String
,iiVirtualSize :: Maybe Integer}
deriving (Show)
-- | Parse @docker inspect@ output.
instance FromJSON Inspect where
parseJSON v =
do o <- parseJSON v
Inspect <$> o .: "Config"
<*> o .: "Created"
<*> o .: "Id"
<*> o .:? "VirtualSize"
-- | Parsed @Config@ section of @docker inspect@ output.
data ImageConfig = ImageConfig
{icEnv :: [String]
,icEntrypoint :: [String]}
deriving (Show)
-- | Parse @Config@ section of @docker inspect@ output.
instance FromJSON ImageConfig where
parseJSON v =
do o <- parseJSON v
ImageConfig
<$> fmap join (o .:? "Env") .!= []
<*> fmap join (o .:? "Entrypoint") .!= []
-- | Function to get command and arguments to run in Docker container
type GetCmdArgs env m
= (StackM env m, HasConfig env)
=> DockerOpts
-> EnvOverride
-> Inspect
-> Bool
-> m (FilePath,[String],[(String,String)],[Mount])
| martin-kolinek/stack | src/Stack/Docker.hs | bsd-3-clause | 42,702 | 0 | 40 | 14,268 | 9,454 | 4,793 | 4,661 | 844 | 22 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.DepthBoundsTest
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/EXT/depth_bounds_test.txt EXT_depth_bounds_test> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.DepthBoundsTest (
-- * Enums
gl_DEPTH_BOUNDS_EXT,
gl_DEPTH_BOUNDS_TEST_EXT,
-- * Functions
glDepthBoundsEXT
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/EXT/DepthBoundsTest.hs | bsd-3-clause | 774 | 0 | 4 | 91 | 52 | 42 | 10 | 6 | 0 |
{-# LANGUAGE
OverloadedStrings
#-}
module Network.WebDav.Server where
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteString (ByteString)
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Char8 as Char8
import Data.List
import Data.Traversable
import Network.HTTP.Types.URI
import Network.Wai.Middleware.AddHeaders
import Network.Wai.Middleware.Servant.Options
import Servant
import System.Directory
import Text.XML.Light
import Network.WebDav.API
import Network.WebDav.Constants
import Network.WebDav.Properties
webDavServer :: Application
webDavServer = addHeaders [("Dav", "1, 2, ordered-collections")] $ provideOptions webDavAPI $ serve webDavAPI
(
doMkCol
:<|> doPropFind
:<|> doGet
:<|> doPut
:<|> doDelete
:<|> doMove
:<|> doCopy
)
doMove::[String]->Maybe String->Handler ()
doMove _ Nothing = error "Missing 'destination' header"
doMove urlPath (Just destination) = liftIO $ do
fromPath <- getSafePath urlPath
toPath <- getSafePath2 destination
renamePath fromPath toPath
doCopy::[String]->Maybe String->Handler ()
doCopy _ Nothing = error "Missing 'destination' header"
doCopy urlPath (Just destination) = liftIO $ do
fromPath <- getSafePath urlPath
toPath <- getSafePath2 destination
copyFile fromPath toPath
doPut::[String]->ByteString->Handler ()
doPut urlPath body = liftIO $ flip ByteString.writeFile body =<< getSafePath urlPath
doGet::[String]->Handler String
doGet urlPath = liftIO $ readFile =<< getSafePath urlPath
doDelete::[String]->Handler ()
doDelete urlPath = liftIO $ removePathForcibly =<< getSafePath urlPath
doMkCol::[String]->Handler ()
doMkCol urlPath = liftIO $ createDirectory =<< getSafePath urlPath
doPropFind::[String]->Element->Handler [PropResults]
doPropFind urlPath doc = do
--TODO - check that the xml path element names are all correct....
let propNames = [qName $ elName x | Elem x <- concat $ map elContent $ [x | Elem x <- elContent doc]]
let relPath = concat $ map ("/" ++) urlPath
let fullPath=fileBase++relPath
-- partial safety check, see notes below....
when ("/../" `isInfixOf` fullPath) $ error "Invalid path"
isDir <- liftIO $ doesDirectoryExist fullPath
isFile <- liftIO $ doesFileExist fullPath
files <-
case (isDir, isFile) of
(False, False) -> throwError err404
(False, True) -> return [relPath]
(True, False) -> do
fileNames <- liftIO $ listDirectory fullPath
return $ relPath:(map ((relPath ++ "/") ++) fileNames)
(True, True) -> error $ "internal logic error, getObject called on object that is both file and dir: " ++ fullPath
for files $
liftIO . getPropResults propNames
-- this function gets the local filepath
-- It is important that the path can not go outside of the webdav folder, so we have to make sure
-- that no double dot paths are in the filepath.
-- Honestly, it looks like servant doesn't allow this anyway, so this check may never trigger.
-- Also, the better way to do this is to normalize the path (remove dots, double slashes, etc),
-- then verify that it fall in the path. Unfortunately I couldn't find a good pre-written
-- normalization function, so I will just do a simpler comparison for now.
getSafePath::[String]->IO FilePath
getSafePath urlPath = do
let fullPath = concat $ map ("/" ++) urlPath
if "/../" `isInfixOf` fullPath
then error "Invalid path"
else return $ fileBase ++ fullPath
-- This is a variant of getSafePath for paths given in the headers.
-- I don't think servant checks for dot paths in the header values, so this check is
-- much more important here.
-- This should probably be thought through a bit more.
getSafePath2::String->IO FilePath
getSafePath2 urlPath = do
let urlPath' = Char8.unpack (urlDecode False (Char8.pack urlPath))
let relativePath =
if webBase `isPrefixOf` urlPath'
then drop (length webBase) urlPath'
else error "destination is not on this webdav server"
let fullPath = fileBase ++ relativePath
if "/../" `isInfixOf` fullPath
then error "Invalid path"
else return fullPath
| jamshidh/webdavServer | src/Network/WebDav/Server.hs | bsd-3-clause | 4,202 | 0 | 18 | 808 | 1,028 | 535 | 493 | 84 | 4 |
module SampleInstances.FirstLastWord.DslInterpreter where
import PolyGraph.Common (OPair(..))
import qualified FreeDSL.GraphBuilder as DSL
import qualified SampleInstances.FirstLastWord as SentenceGraph
interpretAsFirstLastWordDiGraph :: forall v edata . (Eq v, Show v, Show edata)
=> DSL.GraphDSL v edata -> SentenceGraph.FLWordText
interpretAsFirstLastWordDiGraph program = interpretAsFirstLastWordDiModification program (SentenceGraph.FLWordText "")
interpretAsFirstLastWordDiModification :: forall v edata . (Eq v, Show v, Show edata)
=> DSL.GraphDSL v edata -> SentenceGraph.FLWordText -> SentenceGraph.FLWordText
interpretAsFirstLastWordDiModification program graph =
let (_, listOfEdgesWithData) = DSL.runDefaultInterpreter program ([], [])
newEdges = map (\(v1,v2,eData) -> show(v1) ++ " " ++ show(eData) ++ " " ++ show(v2) ) listOfEdgesWithData
oldText = SentenceGraph.getFLWordTextText graph
in SentenceGraph.FLWordText (oldText ++ "\n" ++ (unlines newEdges))
| rpeszek/GraphPlay | play/SampleInstances/FirstLastWord/DslInterpreter.hs | bsd-3-clause | 1,114 | 0 | 17 | 247 | 292 | 159 | 133 | -1 | -1 |
module Zero.Sjcl.Hkdf
(
hkdf
, hkdfText
) where
import qualified Data.JSString as JSS
import Data.JSString (JSString)
import qualified Data.Text as T
import Data.Text (Text)
import Zero.Sjcl.BitArray (BitArray)
------------------------------------------------------------------------------
hkdf :: BitArray -> Int -> BitArray -> Text -> IO BitArray
hkdf ikm niter salt info = do
js_hkdf ikm niter salt (JSS.pack $ T.unpack info)
hkdfText :: Text -> Int -> BitArray -> Text -> IO BitArray
hkdfText ikm niter salt info = do
js_hkdf_string (JSS.pack $ T.unpack ikm) niter salt (JSS.pack $ T.unpack info)
------------------------------------------------------------------------------
-- FFI
------------------------------------------------------------------------------
foreign import javascript unsafe "$r = sjcl.misc.hkdf($1,$2,$3,$4);"
js_hkdf :: BitArray -> Int -> BitArray -> JSString -> IO BitArray
foreign import javascript unsafe "$r = sjcl.misc.hkdf($1,$2,$3,$4);"
js_hkdf_string :: JSString -> Int -> BitArray -> JSString -> IO BitArray
| et4te/zero | src/Zero/Sjcl/Hkdf.hs | bsd-3-clause | 1,101 | 12 | 12 | 185 | 274 | 148 | 126 | 19 | 1 |
--
-- Because commandline options, yeah.
--
module ZeroOpts (Options(..), getOptions) where
import Control.Applicative
import Control.Monad
import Options.Applicative
data Options = Options {
o_http :: Int
, o_zmq :: String
, o_timeout :: Float
} deriving (Show, Eq)
getOptions :: IO Options
getOptions = execParser options
options :: ParserInfo Options
options = info (helper <*> opts) $
fullDesc <> header "HTTP -> 0MQ bridge"
<> progDesc "Run a HTTP server and fulfill requests by distributing them over 0MQ."
where
opts = Options
<$> readOption ( fields 10010 "http" "PORT" "HTTP port" )
<*> strOption ( fields "tcp://127.0.0.1:10011" "zmq" "ZMQ_ENDPOINT"
"ZMQ endpoint for workers to connect to" )
<*> readOption ( fields 10 "timeout" "SEC" "How long to wait for ZMQ reply" )
fields :: Show a => a -> String -> String -> String -> Mod OptionFields a
fields val param mv desc = value val <> showDefault <> long param <> metavar mv <> help desc
readOption :: Read a => Mod OptionFields a -> Parser a
readOption m = nullOption $ reader p <> m
where p s = case readsPrec 0 s of
[(n, "")] -> pure n
_ -> mzero
| element-doo/ekade | code/haskell/src/ZeroOpts.hs | bsd-3-clause | 1,240 | 0 | 12 | 323 | 354 | 182 | 172 | 27 | 2 |
{-# LANGUAGE CPP #-}
module Distribution.Solver.Modular.Cycles (
detectCyclesPhase
) where
import Prelude hiding (cycle)
import Data.Graph (SCC)
import qualified Data.Graph as Gr
import qualified Data.Map as Map
import Distribution.Solver.Modular.Dependency
import Distribution.Solver.Modular.Tree
import qualified Distribution.Solver.Modular.ConflictSet as CS
import Distribution.Solver.Types.PackagePath
-- | Find and reject any solutions that are cyclic
detectCyclesPhase :: Tree a -> Tree a
detectCyclesPhase = cata go
where
-- The only node of interest is DoneF
go :: TreeF a (Tree a) -> Tree a
go (PChoiceF qpn gr cs) = PChoice qpn gr cs
go (FChoiceF qfn gr w m cs) = FChoice qfn gr w m cs
go (SChoiceF qsn gr w cs) = SChoice qsn gr w cs
go (GoalChoiceF cs) = GoalChoice cs
go (FailF cs reason) = Fail cs reason
-- We check for cycles only if we have actually found a solution
-- This minimizes the number of cycle checks we do as cycles are rare
go (DoneF revDeps) = do
case findCycles revDeps of
Nothing -> Done revDeps
Just relSet -> Fail relSet CyclicDependencies
-- | Given the reverse dependency map from a 'Done' node in the tree, as well
-- as the full conflict set containing all decisions that led to that 'Done'
-- node, check if the solution is cyclic. If it is, return the conflict set
-- containing all decisions that could potentially break the cycle.
findCycles :: RevDepMap -> Maybe (ConflictSet QPN)
findCycles revDeps =
case cycles of
[] -> Nothing
c:_ -> Just $ CS.unions $ map (varToConflictSet . P) c
where
cycles :: [[QPN]]
cycles = [vs | Gr.CyclicSCC vs <- scc]
scc :: [SCC QPN]
scc = Gr.stronglyConnComp . map aux . Map.toList $ revDeps
aux :: (QPN, [(comp, QPN)]) -> (QPN, QPN, [QPN])
aux (fr, to) = (fr, fr, map snd to)
| kolmodin/cabal | cabal-install/Distribution/Solver/Modular/Cycles.hs | bsd-3-clause | 1,914 | 0 | 12 | 460 | 511 | 280 | 231 | 34 | 7 |
data Twin a = Twin a a deriving Show
mapTwin :: (a -> b) -> Twin a -> Twin b
mapTwin f (Twin x y) = Twin (f x) (f y)
data Rep a = Rep Int a deriving Show
toList :: Rep a -> [a]
toList (Rep n x) = replicate n x
data Option a b = Single a | Option a b deriving Show
human :: Option String Int -> String
human (Single n) = n
human (Option n a) = n ++ " (" ++ show a ++ ")"
data Tuple a b = Tuple a b deriving Show
fstT :: Tuple a b -> a
fstT (Tuple x _) = x
sndT :: Tuple a b -> b
sndT (Tuple _ y) = y
toTuple :: (a, b) -> Tuple a b
toTuple (x, y) = Tuple x y
fromTuple :: Tuple a b -> (a, b)
fromTuple (Tuple x y) = (x, y)
data MyMaybe a = MyJust a | MyNothing deriving Show
myFromMaybe :: a -> MyMaybe a -> a
myFromMaybe _ (MyJust x) = x
myFromMaybe d MyNothing = d
myMaybe :: b -> (a -> b) -> MyMaybe a -> b
myMaybe _ f (MyJust x) = f x
myMaybe d _ MyNothing = d
data MyEither a b = MyLeft a | MyRight b deriving Show
myEither :: (a -> c) -> (b -> c) -> MyEither a b -> c
myEither f _ (MyLeft x) = f x
myEither _ g (MyRight y) = g y
data List a = Nil | a :~ List a deriving Show
mapL :: (a -> b) -> List a -> List b
mapL f (x :~ xs) = f x :~ mapL f xs
mapL _ Nil = Nil
| YoshikuniJujo/funpaala | samples/22_adt_poly_rec/adtPoly.hs | bsd-3-clause | 1,186 | 0 | 8 | 314 | 666 | 348 | 318 | 34 | 1 |
{- |
utilties for topic operations
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
module Api.Ops.Topic.Parts where
import Control.Applicative
import Control.Monad.Cont
import Control.Monad.Base (MonadBase)
import Control.Exception.Lifted (throwIO)
import qualified Database.Groundhog as Gh
import qualified Chatless.Model.User as Ur
import qualified Chatless.Model.Topic as Tp
import qualified Chatless.Model.TopicMember as Tm
import qualified Chatless.Model.Message as Msg
import qualified Data.Foldable as Fld
import Api.Ops.Base
import Api.Ops.Topic.Base
import Api.Ops.Topic.Message
-- | given a function that gets a target user's mode from the topic, throws
-- an exception if a user does not have permission to perform an operation
-- in the topic.
topicOpModeGuard :: (MonadBase IO m, Functor m) => (TopicOp -> Tp.Topic -> Ur.User -> m Tm.MemberMode) -> TopicOp -> Ur.User -> Tp.Topic -> m ()
topicOpModeGuard opGetMode op user topic = opGetMode op topic user >>= \mmode -> unless (topicOpAllowed op tmode mmode) (throwIO err)
where
tref = Tp.getRefFromTopic topic
tmode = Tp.topicMode topic
err = TopicOpFailed op NotPermitted tref
-- | throws an exception if the user is not a member or does not have
-- permission to perform the operation.
--
-- this is just 'topicOpModeGuard' using 'opGetEffectiveMode'
topicOpPermitGuard :: (MonadBase IO m, Functor m, Gh.PersistBackend m) => TopicOp -> Ur.User -> Tp.Topic -> m ()
topicOpPermitGuard = topicOpModeGuard opGetEffectiveMode
-- | gets the effective member mode of the caller. if the user is the
-- creator, returns 'modeCreator'. otherwise, throws a 'TopicOpFailed' with
-- 'NotMember'.
opGetEffectiveMode :: (MonadBase IO m, Functor m, Gh.PersistBackend m) => TopicOp -> Tp.Topic -> Ur.User -> m Tm.MemberMode
opGetEffectiveMode op topic user
| Tp.isUserCreator user topic = return Tm.modeCreator
| otherwise = opGetTargetMemberMode (const CallerNotMember) op topic (Ur.getRefFromUser user)
-- | gets the mode of a member of a topic, throwing a TopicOpFailed exception if the
-- target is not a member
opGetTargetMemberMode :: (MonadBase IO m, Functor m, Gh.PersistBackend m) => (Ur.UserRef -> TopicOpFailureReason) -> TopicOp -> Tp.Topic -> Ur.UserRef -> m Tm.MemberMode
opGetTargetMemberMode fReason op topic targetRef = Gh.getBy (Tm.TargetMemberKey tr targetRef) >>= fmap Tm.memberMode . throwOrReturn err
where
tr = Tp.getRefFromTopic topic
err = TopicOpFailed op (fReason targetRef) tr
-- | attempt to add a
opAddMember :: MonadMessages m => TopicOp -> (Tp.TopicMode -> Tm.MemberMode) -> (Tm.MemberMode -> Msg.MsgContent) -> Ur.User -> Tp.Topic -> Ur.UserRef -> m Tm.MemberMode
opAddMember op getMMode mkMsg caller topic targetRef = opInsertMember op getMMode mkMsg caller topic targetRef >>= either (const alreadyMember) return
where
alreadyMember = throwIO $ TopicOpFailed op (TargetAlreadyMember targetRef) (Tp.getRefFromTopic topic)
topicOpEnsuredPermitGuard :: MonadMessages m => TopicOp -> Ur.User -> Tp.Topic -> m ()
topicOpEnsuredPermitGuard = topicOpModeGuard opGetEnsuredMode
-- | works similarly to 'opGetEffectiveMode', except that it calls
-- 'opEnsureMember' in the case where the target user is not the creator.
opGetEnsuredMode :: MonadMessages m => TopicOp -> Tp.Topic -> Ur.User -> m Tm.MemberMode
opGetEnsuredMode op topic user
| Tp.isUserCreator user topic = return Tm.modeCreator
| otherwise = opEnsureMember op user topic
-- |
opEnsureMember :: MonadMessages m => TopicOp -> Ur.User -> Tp.Topic -> m Tm.MemberMode
opEnsureMember op caller topic = either id id <$>
opInsertMember op Tm.joinerMode Msg.MsgUserJoined caller topic (Ur.getRefFromUser caller)
-- | attempts to add a member to the topic. if the user is already
-- a member, fails with the existing membership's mode. otherwise, succeeds
-- with the created member's mode. in this case, it will write a message
-- describing the new member into the topic.
opInsertMember :: MonadMessages m => TopicOp -> (Tp.TopicMode -> Tm.MemberMode) -> (Tm.MemberMode -> Msg.MsgContent) -> Ur.User -> Tp.Topic -> Ur.UserRef -> m (Either Tm.MemberMode Tm.MemberMode)
opInsertMember op getMMode mkMsg caller topic targetRef = Gh.getBy (Tm.TargetMemberKey tr targetRef) >>= maybe inner (return . Left . Tm.memberMode)
where
tr = Tp.getRefFromTopic topic
newMode = getMMode (Tp.topicMode topic)
inner = do
Gh.insert_ $ Tm.Member tr targetRef newMode
opWriteMessage op caller topic $ mkMsg newMode
return $ Right newMode
-- * constructing operations
-- | topic operations seem to have a basic structure - determine if there
-- is an actual change, if there is, perform it and then send a message
-- describing the change
operateTopic :: MonadMessages m
=> Ur.User -- ^ caller
-> Tp.Topic -- ^ topic to operate in (lol)
-> TopicOp -- ^ the operation that is being performed
-> (v -> Msg.MsgContent) -- ^ what message to send for the change
-> m (Maybe v) -- ^ if the value is being changed, should produce the new one
-> (v -> m ()) -- ^ action to write the new value
-> m () -- ^ can be run by 'tryTopicOp'
operateTopic caller topic op mkMessageBody changeCheck performUpdate = do
topicOpPermitGuard op caller topic
mNewVal <- changeCheck
-- forM_ over the Maybe runs the inner action only if there is
-- Just a value
Fld.forM_ mNewVal $ \newVal -> do
performUpdate newVal
opWriteMessage op caller topic $ mkMessageBody newVal
-- | simplifies 'operateTopic' by taking Maybe a new value directly,
-- instead of in a monadic action
operateTopicSimple :: MonadMessages m
=> Ur.User
-> Tp.Topic
-> TopicOp
-> (v -> Msg.MsgContent)
-> Maybe v -- ^ simplified value change test
-> (v -> m ())
-> m ()
operateTopicSimple caller topic op mkMsg = operateTopic caller topic op mkMsg . return
-- | simplifies 'operateTopic' by taking an old value and a new value and
-- running the action etc only if the two values are not equal.
operateTopicIfChanged :: (Eq v, MonadMessages m)
=> Ur.User
-> Tp.Topic
-> TopicOp
-> (v -> Msg.MsgContent) -- ^ message to send only after action is performed
-> v -- ^ old value
-> v -- ^ new value
-> m () -- ^ action to perform
-> m ()
operateTopicIfChanged caller topic op mkMessage old new = operateTopicSimple caller topic op mkMessage (passNewIfChanged old new) . const
| raptros/chatless-hs | src/Api/Ops/Topic/Parts.hs | bsd-3-clause | 6,817 | 0 | 15 | 1,524 | 1,541 | 800 | 741 | 84 | 1 |
module Language.Interpreter.StdLib.Shapes
( addShapesStdLib
)
where
import Control.Monad.Except
import Gfx.Context ( drawShape )
import Language.Ast ( Value(Symbol, Null, Number) )
import Language.Interpreter.Types ( InterpreterProcess
, setBuiltIn
, withGfxCtx
)
addShapesStdLib :: InterpreterProcess ()
addShapesStdLib = setBuiltIn "shape" shape
shape :: [Value] -> InterpreterProcess Value
shape args = case args of
[Symbol name, Number x, Number y, Number z] ->
withGfxCtx (\ctx -> drawShape ctx name x y z) >> return Null
_ -> throwError "Wrong number of arguments to shape function"
| rumblesan/proviz | src/Language/Interpreter/StdLib/Shapes.hs | bsd-3-clause | 830 | 0 | 12 | 322 | 179 | 100 | 79 | 15 | 2 |
module RBrun where
-- Grafische weergave van rood-zwart bomen. Werkt alleen voor *binaire* bomen.
--
-- Jan Kuper, 5 mei 2008
-- ============================================================================
import FPPrac.Events
import FPPrac.Graphics
import RBgraphics
import Prelude
-- ============= types ========================================================
-- RBnode c v ts: c=colour, v=value, ts=subtrees
data StateTp = StateTp { mode :: Bool
, rbts :: [RbTreeG]
}
initstate = StateTp { mode = False
, rbts = [ exampleTree, exampleTree, exampleTree ]
}
main = installEventHandler "RBrun" doE initstate (drawTrees m 200 ts) 25
where
StateTp { mode = m, rbts = ts} = initstate
---- ============= event handler ================================================
doE :: StateTp -> Input -> (StateTp, [Output])
doE s (KeyIn 'm') = (s {mode = not (mode s)}, [ScreenClear , DrawPicture (drawTrees (not (mode s)) 200 (rbts s))])
doE s e = (s, [])
-- ======voorbeeldboom=========================================================
-- Let op: deze boom is slechts ter illustratie van de grafische weergave,
-- hij voldoet *niet* aan de rood-zwart eis
exampleTree = RBnode black "9"
[ RBnode red "99"
[ RBnode red "99"
[ RBnode black "9" []
, RBnode black "99" []
]
, RBnode red "ii"
[ RBnode black "99" []
, RBnode black "9" []
]
]
, RBnode red "k"
[ RBnode black "ll" [],
RBnode black "m"
[ RBnode red "nn"
[ RBnode red "q" [ RBnode black "nn" []
, RBnode black "q" []
]
, RBnode red "r" []
]
, RBnode red "pp"
[ RBnode black "r" [ RBnode red "nn" []
, RBnode (dark $ dark white) "" []
]
, RBnode black "r" [ RBnode red "nn" []
, RBnode (dark white) "" []
]
]
]
]
]
| christiaanb/fpprac | examples/RBrun.hs | bsd-3-clause | 3,057 | 0 | 18 | 1,674 | 519 | 281 | 238 | 34 | 1 |
module WebServer.Server (runServer
, URLParams
, POSTParams
, Render
) where
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString
import qualified Data.ByteString.Lazy as B
type HTTPParams = [(String, String)]
type ISFinalView = Bool
type HTTPStatus = Int
type HTTPContentType = (String, String)
type HTTPResponse = (IO ByteString, HTTPStatus, HTTPParams, ISFinalView)
httpStatusCode :: HTTPStatus -> String
httpStatusCode 100 = "Continue"
httpStatusCode 101 = "Switching Protocols"
httpStatusCode 102 = "Processing"
httpStatusCode 118 = "Connection timed out"
httpStatusCode 200 = "OK"
httpStatusCode 201 = "Created"
httpStatusCode 202 = "Accepted"
httpStatusCode 203 = "Non-Authoritative Information"
httpStatusCode 204 = "No Content"
httpStatusCode 205 = "Reset Content"
httpStatusCode 206 = "Partial Content"
httpStatusCode 207 = "Multi-Status"
httpStatusCode 210 = "Content Different"
httpStatusCode 226 = "IM Used"
httpStatusCode 300 = "Multiple Choices"
httpStatusCode 301 = "Moved Permanently"
httpStatusCode 302 = "Moved Temporarily"
httpStatusCode 303 = "See Other"
httpStatusCode 304 = "Not Modified"
httpStatusCode 305 = "Use Proxy"
httpStatusCode 307 = "Temporary Redirect"
httpStatusCode 310 = "Too many Redirects"
httpStatusCode 400 = "Bad Request"
httpStatusCode 401 = "Unauthorized"
httpStatusCode 402 = "Payment Required"
httpStatusCode 403 = "Forbidden"
httpStatusCode 404 = "Not Found"
httpStatusCode 405 = "Method Not Allowed"
httpStatusCode 406 = "Not Acceptable"
httpStatusCode 407 = "Proxy Authentication Required"
httpStatusCode 408 = "Request Time-out"
httpStatusCode 409 = "Conflict"
httpStatusCode 410 = "Gone"
httpStatusCode 411 = "Length Required"
httpStatusCode 412 = "Precondition Failed"
httpStatusCode 413 = "Request Entity Too Large"
httpStatusCode 414 = "Request URI Too Long"
httpStatusCode 415 = "Unsupported Media Type"
httpStatusCode 416 = "Requested Range Unsatisfiable"
httpStatusCode 417 = "Expectation failed"
httpStatusCode 418 = "I'm a teapot"
httpStatusCode 422 = "Unprocessable entity"
httpStatusCode 423 = "Locked"
httpStatusCode 424 = "Method failure"
httpStatusCode 425 = "Unordered Collection"
httpStatusCode 426 = "Retry With"
httpStatusCode 449 = "Blocked By Windows Parental Controls"
httpStatusCode 450 = "Unrecoverable Error"
httpStatusCode 499 = "Client has closed connection"
httpStatusCode 500 = "Internal Server Error"
httpStatusCode 501 = "Not Implemented"
httpStatusCode 502 = "Bad Gateway"
httpStatusCode 503 = "Service Unavailable"
httpStatusCode 504 = "Gateway Time-out"
httpStatusCode 505 = "HTTP Version Not Supported"
httpStatusCode 506 = "Variant Also Negociate"
httpStatusCode 507 = "Insufficient Storage"
httpStatusCode 508 = "Loop Detected"
httpStatusCode 509 = "Bandwidth Limit Exceeded"
httpStatusCode 510 = "Not Extended"
renderUnhandeledStatus :: HTTPStatus -> HTTPResponse
renderUnhandeledStatus status = ("<!doctype html><html>" ++
"<head>" ++
"<meta charset=\"utf-8\"/>" ++
"<title>" ++ statusText ++ "</title>" ++
"</head>" ++
"<body>" ++
"<h1 style=\"text-align: center;\">" ++ statusText ++ "</h1>" ++
"<hr />" ++
"<p style=\"text-align: center;\">Flaskell</p>" ++
"</body>" ++
"</html>",
status,
[],
True
)
where
statusText = (show status) ++ " / " ++ (httpStatusCode status)
parseRequest :: (Socket, Sock.HostName) -> Route -> IO ()
parseRequest (handle, addr) routes = Sock.recv handle 1024 >>= renderReponse routes >>= Sock.send handle >>= close
where
close a = Sock.sClose handle
startConn :: Socket -> Route -> IO ()
startConn socket routes = forever $ Sock.accept socket >>= parseRequest routes
runServer :: PortNumber -> Route -> IO ()
runServer port routes = withSocketsDo $ do
sock <- listenOn $ PortNumber port
startConn sock routes
| davbaumgartner/flaskell | src/WebServer/Server.hs | bsd-3-clause | 3,935 | 30 | 19 | 668 | 930 | 484 | 446 | 98 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1994-1998
\section[ErrsUtils]{Utilities for error reporting}
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
module ErrUtils (
-- * Basic types
Validity(..), andValid, allValid, isValid, getInvalids,
Severity(..),
-- * Messages
ErrMsg, errMsgDoc,
ErrDoc, errDoc, errDocImportant, errDocContext, errDocSupplementary,
WarnMsg, MsgDoc,
Messages, ErrorMessages, WarningMessages,
unionMessages,
errMsgSpan, errMsgContext,
errorsFound, isEmptyMessages,
isWarnMsgFatal,
-- ** Formatting
pprMessageBag, pprErrMsgBagWithLoc,
pprLocErrMsg, printBagOfErrors,
formatErrDoc,
-- ** Construction
emptyMessages, mkLocMessage, mkLocMessageAnn, makeIntoWarning,
mkErrMsg, mkPlainErrMsg, mkErrDoc, mkLongErrMsg, mkWarnMsg,
mkPlainWarnMsg,
warnIsErrorMsg, mkLongWarnMsg,
-- * Utilities
doIfSet, doIfSet_dyn,
-- * Dump files
dumpIfSet, dumpIfSet_dyn, dumpIfSet_dyn_printer,
mkDumpDoc, dumpSDoc,
-- * Issuing messages during compilation
putMsg, printInfoForUser, printOutputForUser,
logInfo, logOutput,
errorMsg, warningMsg,
fatalErrorMsg, fatalErrorMsg', fatalErrorMsg'',
compilationProgressMsg,
showPass, withTiming,
debugTraceMsg,
ghcExit,
prettyPrintGhcErrors,
) where
#include "HsVersions.h"
import Bag
import Exception
import Outputable
import Panic
import SrcLoc
import DynFlags
import System.Directory
import System.Exit ( ExitCode(..), exitWith )
import System.FilePath ( takeDirectory, (</>) )
import Data.List
import qualified Data.Set as Set
import Data.IORef
import Data.Maybe ( fromMaybe )
import Data.Monoid ( mappend )
import Data.Ord
import Data.Time
import Control.Monad
import Control.Monad.IO.Class
import System.IO
import GHC.Conc ( getAllocationCounter )
import System.CPUTime
-------------------------
type MsgDoc = SDoc
-------------------------
data Validity
= IsValid -- ^ Everything is fine
| NotValid MsgDoc -- ^ A problem, and some indication of why
isValid :: Validity -> Bool
isValid IsValid = True
isValid (NotValid {}) = False
andValid :: Validity -> Validity -> Validity
andValid IsValid v = v
andValid v _ = v
-- | If they aren't all valid, return the first
allValid :: [Validity] -> Validity
allValid [] = IsValid
allValid (v : vs) = v `andValid` allValid vs
getInvalids :: [Validity] -> [MsgDoc]
getInvalids vs = [d | NotValid d <- vs]
-- -----------------------------------------------------------------------------
-- Basic error messages: just render a message with a source location.
type Messages = (WarningMessages, ErrorMessages)
type WarningMessages = Bag WarnMsg
type ErrorMessages = Bag ErrMsg
unionMessages :: Messages -> Messages -> Messages
unionMessages (warns1, errs1) (warns2, errs2) =
(warns1 `unionBags` warns2, errs1 `unionBags` errs2)
data ErrMsg = ErrMsg {
errMsgSpan :: SrcSpan,
errMsgContext :: PrintUnqualified,
errMsgDoc :: ErrDoc,
-- | This has the same text as errDocImportant . errMsgDoc.
errMsgShortString :: String,
errMsgSeverity :: Severity,
errMsgReason :: WarnReason
}
-- The SrcSpan is used for sorting errors into line-number order
-- | Categorise error msgs by their importance. This is so each section can
-- be rendered visually distinct. See Note [Error report] for where these come
-- from.
data ErrDoc = ErrDoc {
-- | Primary error msg.
errDocImportant :: [MsgDoc],
-- | Context e.g. \"In the second argument of ...\".
errDocContext :: [MsgDoc],
-- | Supplementary information, e.g. \"Relevant bindings include ...\".
errDocSupplementary :: [MsgDoc]
}
errDoc :: [MsgDoc] -> [MsgDoc] -> [MsgDoc] -> ErrDoc
errDoc = ErrDoc
type WarnMsg = ErrMsg
data Severity
= SevOutput
| SevFatal
| SevInteractive
| SevDump
-- ^ Log messagse intended for compiler developers
-- No file/line/column stuff
| SevInfo
-- ^ Log messages intended for end users.
-- No file/line/column stuff.
| SevWarning
| SevError
-- ^ SevWarning and SevError are used for warnings and errors
-- o The message has a file/line/column heading,
-- plus "warning:" or "error:",
-- added by mkLocMessags
-- o Output is intended for end users
instance Show ErrMsg where
show em = errMsgShortString em
pprMessageBag :: Bag MsgDoc -> SDoc
pprMessageBag msgs = vcat (punctuate blankLine (bagToList msgs))
mkLocMessage :: Severity -> SrcSpan -> MsgDoc -> MsgDoc
mkLocMessage = mkLocMessageAnn Nothing
mkLocMessageAnn :: Maybe String -> Severity -> SrcSpan -> MsgDoc -> MsgDoc
-- Always print the location, even if it is unhelpful. Error messages
-- are supposed to be in a standard format, and one without a location
-- would look strange. Better to say explicitly "<no location info>".
mkLocMessageAnn ann severity locn msg
= sdocWithDynFlags $ \dflags ->
let locn' = if gopt Opt_ErrorSpans dflags
then ppr locn
else ppr (srcSpanStart locn)
in bold (hang (locn' <> colon <+> sevInfo <> optAnn) 4 msg)
where
-- Add prefixes, like Foo.hs:34: warning:
-- <the warning message>
(sevInfo, sevColor) =
case severity of
SevWarning ->
(coloured sevColor (text "warning:"), colBold `mappend` colMagentaFg)
SevError ->
(coloured sevColor (text "error:"), colBold `mappend` colRedFg)
SevFatal ->
(coloured sevColor (text "fatal:"), colBold `mappend` colRedFg)
_ ->
(empty, mempty)
-- Add optional information
optAnn = case ann of
Nothing -> text ""
Just i -> text " [" <> coloured sevColor (text i) <> text "]"
makeIntoWarning :: WarnReason -> ErrMsg -> ErrMsg
makeIntoWarning reason err = err
{ errMsgSeverity = SevWarning
, errMsgReason = reason }
-- -----------------------------------------------------------------------------
-- Collecting up messages for later ordering and printing.
mk_err_msg :: DynFlags -> Severity -> SrcSpan -> PrintUnqualified -> ErrDoc -> ErrMsg
mk_err_msg dflags sev locn print_unqual doc
= ErrMsg { errMsgSpan = locn
, errMsgContext = print_unqual
, errMsgDoc = doc
, errMsgShortString = showSDoc dflags (vcat (errDocImportant doc))
, errMsgSeverity = sev
, errMsgReason = NoReason }
mkErrDoc :: DynFlags -> SrcSpan -> PrintUnqualified -> ErrDoc -> ErrMsg
mkErrDoc dflags = mk_err_msg dflags SevError
mkLongErrMsg, mkLongWarnMsg :: DynFlags -> SrcSpan -> PrintUnqualified -> MsgDoc -> MsgDoc -> ErrMsg
-- ^ A long (multi-line) error message
mkErrMsg, mkWarnMsg :: DynFlags -> SrcSpan -> PrintUnqualified -> MsgDoc -> ErrMsg
-- ^ A short (one-line) error message
mkPlainErrMsg, mkPlainWarnMsg :: DynFlags -> SrcSpan -> MsgDoc -> ErrMsg
-- ^ Variant that doesn't care about qualified/unqualified names
mkLongErrMsg dflags locn unqual msg extra = mk_err_msg dflags SevError locn unqual (ErrDoc [msg] [] [extra])
mkErrMsg dflags locn unqual msg = mk_err_msg dflags SevError locn unqual (ErrDoc [msg] [] [])
mkPlainErrMsg dflags locn msg = mk_err_msg dflags SevError locn alwaysQualify (ErrDoc [msg] [] [])
mkLongWarnMsg dflags locn unqual msg extra = mk_err_msg dflags SevWarning locn unqual (ErrDoc [msg] [] [extra])
mkWarnMsg dflags locn unqual msg = mk_err_msg dflags SevWarning locn unqual (ErrDoc [msg] [] [])
mkPlainWarnMsg dflags locn msg = mk_err_msg dflags SevWarning locn alwaysQualify (ErrDoc [msg] [] [])
----------------
emptyMessages :: Messages
emptyMessages = (emptyBag, emptyBag)
isEmptyMessages :: Messages -> Bool
isEmptyMessages (warns, errs) = isEmptyBag warns && isEmptyBag errs
warnIsErrorMsg :: DynFlags -> ErrMsg
warnIsErrorMsg dflags
= mkPlainErrMsg dflags noSrcSpan (text "\nFailing due to -Werror.")
errorsFound :: DynFlags -> Messages -> Bool
errorsFound _dflags (_warns, errs) = not (isEmptyBag errs)
printBagOfErrors :: DynFlags -> Bag ErrMsg -> IO ()
printBagOfErrors dflags bag_of_errors
= sequence_ [ let style = mkErrStyle dflags unqual
in log_action dflags dflags reason sev s style (formatErrDoc dflags doc)
| ErrMsg { errMsgSpan = s,
errMsgDoc = doc,
errMsgSeverity = sev,
errMsgReason = reason,
errMsgContext = unqual } <- sortMsgBag (Just dflags)
bag_of_errors ]
formatErrDoc :: DynFlags -> ErrDoc -> SDoc
formatErrDoc dflags (ErrDoc important context supplementary)
= case msgs of
[msg] -> vcat msg
_ -> vcat $ map starred msgs
where
msgs = filter (not . null) $ map (filter (not . Outputable.isEmpty dflags))
[important, context, supplementary]
starred = (bullet<+>) . vcat
bullet = text $ if DynFlags.useUnicode dflags then "•" else "*"
pprErrMsgBagWithLoc :: Bag ErrMsg -> [SDoc]
pprErrMsgBagWithLoc bag = [ pprLocErrMsg item | item <- sortMsgBag Nothing bag ]
pprLocErrMsg :: ErrMsg -> SDoc
pprLocErrMsg (ErrMsg { errMsgSpan = s
, errMsgDoc = doc
, errMsgSeverity = sev
, errMsgContext = unqual })
= sdocWithDynFlags $ \dflags ->
withPprStyle (mkErrStyle dflags unqual) $
mkLocMessage sev s (formatErrDoc dflags doc)
sortMsgBag :: Maybe DynFlags -> Bag ErrMsg -> [ErrMsg]
sortMsgBag dflags = sortBy (maybeFlip $ comparing errMsgSpan) . bagToList
where maybeFlip :: (a -> a -> b) -> (a -> a -> b)
maybeFlip
| fromMaybe False (fmap reverseErrors dflags) = flip
| otherwise = id
ghcExit :: DynFlags -> Int -> IO ()
ghcExit dflags val
| val == 0 = exitWith ExitSuccess
| otherwise = do errorMsg dflags (text "\nCompilation had errors\n\n")
exitWith (ExitFailure val)
doIfSet :: Bool -> IO () -> IO ()
doIfSet flag action | flag = action
| otherwise = return ()
doIfSet_dyn :: DynFlags -> GeneralFlag -> IO () -> IO()
doIfSet_dyn dflags flag action | gopt flag dflags = action
| otherwise = return ()
-- -----------------------------------------------------------------------------
-- Dumping
dumpIfSet :: DynFlags -> Bool -> String -> SDoc -> IO ()
dumpIfSet dflags flag hdr doc
| not flag = return ()
| otherwise = log_action dflags
dflags
NoReason
SevDump
noSrcSpan
defaultDumpStyle
(mkDumpDoc hdr doc)
-- | a wrapper around 'dumpSDoc'.
-- First check whether the dump flag is set
-- Do nothing if it is unset
dumpIfSet_dyn :: DynFlags -> DumpFlag -> String -> SDoc -> IO ()
dumpIfSet_dyn dflags flag hdr doc
= when (dopt flag dflags) $ dumpSDoc dflags alwaysQualify flag hdr doc
-- | a wrapper around 'dumpSDoc'.
-- First check whether the dump flag is set
-- Do nothing if it is unset
--
-- Unlike 'dumpIfSet_dyn',
-- has a printer argument but no header argument
dumpIfSet_dyn_printer :: PrintUnqualified
-> DynFlags -> DumpFlag -> SDoc -> IO ()
dumpIfSet_dyn_printer printer dflags flag doc
= when (dopt flag dflags) $ dumpSDoc dflags printer flag "" doc
mkDumpDoc :: String -> SDoc -> SDoc
mkDumpDoc hdr doc
= vcat [blankLine,
line <+> text hdr <+> line,
doc,
blankLine]
where
line = text (replicate 20 '=')
-- | Write out a dump.
-- If --dump-to-file is set then this goes to a file.
-- otherwise emit to stdout.
--
-- When @hdr@ is empty, we print in a more compact format (no separators and
-- blank lines)
--
-- The 'DumpFlag' is used only to choose the filename to use if @--dump-to-file@
-- is used; it is not used to decide whether to dump the output
dumpSDoc :: DynFlags -> PrintUnqualified -> DumpFlag -> String -> SDoc -> IO ()
dumpSDoc dflags print_unqual flag hdr doc
= do let mFile = chooseDumpFile dflags flag
dump_style = mkDumpStyle print_unqual
case mFile of
Just fileName
-> do
let gdref = generatedDumps dflags
gd <- readIORef gdref
let append = Set.member fileName gd
mode = if append then AppendMode else WriteMode
unless append $
writeIORef gdref (Set.insert fileName gd)
createDirectoryIfMissing True (takeDirectory fileName)
handle <- openFile fileName mode
-- We do not want the dump file to be affected by
-- environment variables, but instead to always use
-- UTF8. See:
-- https://ghc.haskell.org/trac/ghc/ticket/10762
hSetEncoding handle utf8
doc' <- if null hdr
then return doc
else do t <- getCurrentTime
let d = text (show t)
$$ blankLine
$$ doc
return $ mkDumpDoc hdr d
defaultLogActionHPrintDoc dflags handle doc' dump_style
hClose handle
-- write the dump to stdout
Nothing -> do
let (doc', severity)
| null hdr = (doc, SevOutput)
| otherwise = (mkDumpDoc hdr doc, SevDump)
log_action dflags dflags NoReason severity noSrcSpan dump_style doc'
-- | Choose where to put a dump file based on DynFlags
--
chooseDumpFile :: DynFlags -> DumpFlag -> Maybe FilePath
chooseDumpFile dflags flag
| gopt Opt_DumpToFile dflags || flag == Opt_D_th_dec_file
, Just prefix <- getPrefix
= Just $ setDir (prefix ++ (beautifyDumpName flag))
| otherwise
= Nothing
where getPrefix
-- dump file location is being forced
-- by the --ddump-file-prefix flag.
| Just prefix <- dumpPrefixForce dflags
= Just prefix
-- dump file location chosen by DriverPipeline.runPipeline
| Just prefix <- dumpPrefix dflags
= Just prefix
-- we haven't got a place to put a dump file.
| otherwise
= Nothing
setDir f = case dumpDir dflags of
Just d -> d </> f
Nothing -> f
-- | Build a nice file name from name of a 'DumpFlag' constructor
beautifyDumpName :: DumpFlag -> String
beautifyDumpName Opt_D_th_dec_file = "th.hs"
beautifyDumpName flag
= let str = show flag
suff = case stripPrefix "Opt_D_" str of
Just x -> x
Nothing -> panic ("Bad flag name: " ++ str)
dash = map (\c -> if c == '_' then '-' else c) suff
in dash
-- -----------------------------------------------------------------------------
-- Outputting messages from the compiler
-- We want all messages to go through one place, so that we can
-- redirect them if necessary. For example, when GHC is used as a
-- library we might want to catch all messages that GHC tries to
-- output and do something else with them.
ifVerbose :: DynFlags -> Int -> IO () -> IO ()
ifVerbose dflags val act
| verbosity dflags >= val = act
| otherwise = return ()
errorMsg :: DynFlags -> MsgDoc -> IO ()
errorMsg dflags msg
= log_action dflags dflags NoReason SevError noSrcSpan (defaultErrStyle dflags) msg
warningMsg :: DynFlags -> MsgDoc -> IO ()
warningMsg dflags msg
= log_action dflags dflags NoReason SevWarning noSrcSpan (defaultErrStyle dflags) msg
fatalErrorMsg :: DynFlags -> MsgDoc -> IO ()
fatalErrorMsg dflags msg = fatalErrorMsg' (log_action dflags) dflags msg
fatalErrorMsg' :: LogAction -> DynFlags -> MsgDoc -> IO ()
fatalErrorMsg' la dflags msg =
la dflags NoReason SevFatal noSrcSpan (defaultErrStyle dflags) msg
fatalErrorMsg'' :: FatalMessager -> String -> IO ()
fatalErrorMsg'' fm msg = fm msg
compilationProgressMsg :: DynFlags -> String -> IO ()
compilationProgressMsg dflags msg
= ifVerbose dflags 1 $
logOutput dflags defaultUserStyle (text msg)
showPass :: DynFlags -> String -> IO ()
showPass dflags what
= ifVerbose dflags 2 $
logInfo dflags defaultUserStyle (text "***" <+> text what <> colon)
-- | Time a compilation phase.
--
-- When timings are enabled (e.g. with the @-v2@ flag), the allocations
-- and CPU time used by the phase will be reported to stderr. Consider
-- a typical usage: @withTiming getDynFlags (text "simplify") force pass@.
-- When timings are enabled the following costs are included in the
-- produced accounting,
--
-- - The cost of executing @pass@ to a result @r@ in WHNF
-- - The cost of evaluating @force r@ to WHNF (e.g. @()@)
--
-- The choice of the @force@ function depends upon the amount of forcing
-- desired; the goal here is to ensure that the cost of evaluating the result
-- is, to the greatest extent possible, included in the accounting provided by
-- 'withTiming'. Often the pass already sufficiently forces its result during
-- construction; in this case @const ()@ is a reasonable choice.
-- In other cases, it is necessary to evaluate the result to normal form, in
-- which case something like @Control.DeepSeq.rnf@ is appropriate.
--
-- To avoid adversely affecting compiler performance when timings are not
-- requested, the result is only forced when timings are enabled.
withTiming :: MonadIO m
=> m DynFlags -- ^ A means of getting a 'DynFlags' (often
-- 'getDynFlags' will work here)
-> SDoc -- ^ The name of the phase
-> (a -> ()) -- ^ A function to force the result
-- (often either @const ()@ or 'rnf')
-> m a -- ^ The body of the phase to be timed
-> m a
withTiming getDFlags what force_result action
= do dflags <- getDFlags
if verbosity dflags >= 2
then do liftIO $ logInfo dflags defaultUserStyle
$ text "***" <+> what <> colon
alloc0 <- liftIO getAllocationCounter
start <- liftIO getCPUTime
!r <- action
() <- pure $ force_result r
end <- liftIO getCPUTime
alloc1 <- liftIO getAllocationCounter
-- recall that allocation counter counts down
let alloc = alloc0 - alloc1
liftIO $ logInfo dflags defaultUserStyle
(text "!!!" <+> what <> colon <+> text "finished in"
<+> doublePrec 2 (realToFrac (end - start) * 1e-9)
<+> text "milliseconds"
<> comma
<+> text "allocated"
<+> doublePrec 3 (realToFrac alloc / 1024 / 1024)
<+> text "megabytes")
pure r
else action
debugTraceMsg :: DynFlags -> Int -> MsgDoc -> IO ()
debugTraceMsg dflags val msg = ifVerbose dflags val $
logInfo dflags defaultDumpStyle msg
putMsg :: DynFlags -> MsgDoc -> IO ()
putMsg dflags msg = logInfo dflags defaultUserStyle msg
printInfoForUser :: DynFlags -> PrintUnqualified -> MsgDoc -> IO ()
printInfoForUser dflags print_unqual msg
= logInfo dflags (mkUserStyle print_unqual AllTheWay) msg
printOutputForUser :: DynFlags -> PrintUnqualified -> MsgDoc -> IO ()
printOutputForUser dflags print_unqual msg
= logOutput dflags (mkUserStyle print_unqual AllTheWay) msg
logInfo :: DynFlags -> PprStyle -> MsgDoc -> IO ()
logInfo dflags sty msg
= log_action dflags dflags NoReason SevInfo noSrcSpan sty msg
logOutput :: DynFlags -> PprStyle -> MsgDoc -> IO ()
-- ^ Like 'logInfo' but with 'SevOutput' rather then 'SevInfo'
logOutput dflags sty msg
= log_action dflags dflags NoReason SevOutput noSrcSpan sty msg
prettyPrintGhcErrors :: ExceptionMonad m => DynFlags -> m a -> m a
prettyPrintGhcErrors dflags
= ghandle $ \e -> case e of
PprPanic str doc ->
pprDebugAndThen dflags panic (text str) doc
PprSorry str doc ->
pprDebugAndThen dflags sorry (text str) doc
PprProgramError str doc ->
pprDebugAndThen dflags pgmError (text str) doc
_ ->
liftIO $ throwIO e
-- | Checks if given 'WarnMsg' is a fatal warning.
isWarnMsgFatal :: DynFlags -> WarnMsg -> Bool
isWarnMsgFatal dflags ErrMsg{errMsgReason = Reason wflag}
= wopt_fatal wflag dflags
isWarnMsgFatal dflags _ = gopt Opt_WarnIsError dflags
| mettekou/ghc | compiler/main/ErrUtils.hs | bsd-3-clause | 21,656 | 0 | 24 | 6,512 | 4,631 | 2,427 | 2,204 | 365 | 6 |
-- | Wraps the expression submodules.
module Nix.Expr
( module Nix.Expr.Types
, module Nix.Expr.Types.Annotated
, module Nix.Expr.Shorthands
)
where
import Nix.Expr.Types
import Nix.Expr.Shorthands
import Nix.Expr.Types.Annotated
| jwiegley/hnix | src/Nix/Expr.hs | bsd-3-clause | 269 | 0 | 5 | 64 | 50 | 35 | 15 | 7 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, CPP, GADTs, TypeFamilies, OverloadedStrings, FlexibleContexts, EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module MigrationColumnLengthTest where
import Database.Persist.TH
import qualified Data.Text as T
import Init
#ifdef WITH_NOSQL
mkPersist persistSettings [persistUpperCase|
#else
share [mkPersist sqlSettings, mkMigrate "migration"] [persistLowerCase|
#endif
VaryingLengths
field1 Int
field2 T.Text sqltype=varchar(5)
|]
specs :: Spec
specs = describe "Migration" $ do
#ifdef WITH_NOSQL
return ()
#else
it "is idempotent" $ db $ do
again <- getMigration migration
liftIO $ again @?= []
#endif
| plow-technologies/persistent | persistent-test/src/MigrationColumnLengthTest.hs | mit | 768 | 0 | 9 | 119 | 66 | 42 | 24 | 14 | 1 |
import Control.Monad.State
newtype Supply s a = S (State [s] a)
{-- snippet unwrapS --}
unwrapS :: Supply s a -> State [s] a
unwrapS (S s) = s
instance Monad (Supply s) where
s >>= m = S (unwrapS s >>= unwrapS . m)
return = S . return
{-- /snippet unwrapS --}
| binesiyu/ifl | examples/ch15/AltSupply.hs | mit | 271 | 0 | 10 | 65 | 117 | 62 | 55 | 7 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : ./Comorphisms/CspCASL2Modal.hs
Copyright : (c) Till Mossakowski and Uni Bremen 2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@informatik.uni-bremen.de
Stability : provisional
Portability : non-portable (imports Logic.Logic)
The embedding comorphism from CspCASL to ModalCASL.
It keeps the CASL part and interprets the CspCASL LTS semantics as
Kripke structure
-}
module Comorphisms.CspCASL2Modal where
import Logic.Logic
import Logic.Comorphism
-- CASL
import CASL.Sign
import CASL.AS_Basic_CASL
import CASL.Morphism
-- CspCASL
import CspCASL.Logic_CspCASL
import CspCASL.SignCSP
import CspCASL.StatAnaCSP (CspBasicSpec)
import CspCASL.Morphism (CspCASLMorphism)
import CspCASL.SymbItems
import CspCASL.Symbol
-- ModalCASL
import Modal.Logic_Modal
import Modal.AS_Modal
import Modal.ModalSign
-- | The identity of the comorphism
data CspCASL2Modal = CspCASL2Modal deriving (Show)
instance Language CspCASL2Modal -- default definition is okay
instance Comorphism CspCASL2Modal
CspCASL () CspBasicSpec CspCASLSen CspSymbItems CspSymbMapItems
CspCASLSign CspCASLMorphism CspSymbol CspRawSymbol ()
Modal () M_BASIC_SPEC ModalFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
MSign ModalMor Symbol RawSymbol () where
sourceLogic CspCASL2Modal = cspCASL
sourceSublogic CspCASL2Modal = ()
targetLogic CspCASL2Modal = Modal
mapSublogic CspCASL2Modal _ = Just ()
map_theory CspCASL2Modal = return . embedTheory mapSen emptyModalSign
map_morphism CspCASL2Modal = return . mapCASLMor emptyModalSign emptyMorExt
map_sentence CspCASL2Modal _ = return . mapSen
mapSen :: CspCASLSen -> ModalFORMULA
mapSen _ = trueForm
| spechub/Hets | Comorphisms/CspCASL2Modal.hs | gpl-2.0 | 1,790 | 0 | 7 | 285 | 280 | 152 | 128 | 32 | 1 |
module InfixDefaultAssoc where
main :: Int
main = 3 + 4 + 4
| roberth/uu-helium | test/correct/InfixDefaultAssoc.hs | gpl-3.0 | 61 | 0 | 6 | 14 | 22 | 13 | 9 | 3 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Context Alert Filters | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/alertFilters/src/main/javahelp/org/zaproxy/zap/extension/alertFilters/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 986 | 80 | 66 | 161 | 422 | 213 | 209 | -1 | -1 |
-- | Clean out unneeded spill\/reload instructions.
--
-- Handling of join points
-- ~~~~~~~~~~~~~~~~~~~~~~~
--
-- B1: B2:
-- ... ...
-- RELOAD SLOT(0), %r1 RELOAD SLOT(0), %r1
-- ... A ... ... B ...
-- jump B3 jump B3
--
-- B3: ... C ...
-- RELOAD SLOT(0), %r1
-- ...
--
-- The Plan
-- ~~~~~~~~
-- As long as %r1 hasn't been written to in A, B or C then we don't need
-- the reload in B3.
--
-- What we really care about here is that on the entry to B3, %r1 will
-- always have the same value that is in SLOT(0) (ie, %r1 is _valid_)
--
-- This also works if the reloads in B1\/B2 were spills instead, because
-- spilling %r1 to a slot makes that slot have the same value as %r1.
--
module RegAlloc.Graph.SpillClean (
cleanSpills
) where
import RegAlloc.Liveness
import Instruction
import Reg
import BlockId
import Cmm
import UniqSet
import UniqFM
import Unique
import State
import Outputable
import Platform
import Data.List
import Data.Maybe
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
-- | The identification number of a spill slot.
-- A value is stored in a spill slot when we don't have a free
-- register to hold it.
type Slot = Int
-- | Clean out unneeded spill\/reloads from this top level thing.
cleanSpills
:: Instruction instr
=> Platform
-> LiveCmmDecl statics instr
-> LiveCmmDecl statics instr
cleanSpills platform cmm
= evalState (cleanSpin platform 0 cmm) initCleanS
-- | Do one pass of cleaning.
cleanSpin
:: Instruction instr
=> Platform
-> Int -- ^ Iteration number for the cleaner.
-> LiveCmmDecl statics instr -- ^ Liveness annotated code to clean.
-> CleanM (LiveCmmDecl statics instr)
cleanSpin platform spinCount code
= do
-- Initialise count of cleaned spill and reload instructions.
modify $ \s -> s
{ sCleanedSpillsAcc = 0
, sCleanedReloadsAcc = 0
, sReloadedBy = emptyUFM }
code_forward <- mapBlockTopM (cleanBlockForward platform) code
code_backward <- cleanTopBackward code_forward
-- During the cleaning of each block we collected information about
-- what regs were valid across each jump. Based on this, work out
-- whether it will be safe to erase reloads after join points for
-- the next pass.
collateJoinPoints
-- Remember how many spill and reload instructions we cleaned in this pass.
spills <- gets sCleanedSpillsAcc
reloads <- gets sCleanedReloadsAcc
modify $ \s -> s
{ sCleanedCount = (spills, reloads) : sCleanedCount s }
-- If nothing was cleaned in this pass or the last one
-- then we're done and it's time to bail out.
cleanedCount <- gets sCleanedCount
if take 2 cleanedCount == [(0, 0), (0, 0)]
then return code
-- otherwise go around again
else cleanSpin platform (spinCount + 1) code_backward
-------------------------------------------------------------------------------
-- | Clean out unneeded reload instructions,
-- while walking forward over the code.
cleanBlockForward
:: Instruction instr
=> Platform
-> LiveBasicBlock instr
-> CleanM (LiveBasicBlock instr)
cleanBlockForward platform (BasicBlock blockId instrs)
= do
-- See if we have a valid association for the entry to this block.
jumpValid <- gets sJumpValid
let assoc = case lookupUFM jumpValid blockId of
Just assoc -> assoc
Nothing -> emptyAssoc
instrs_reload <- cleanForward platform blockId assoc [] instrs
return $ BasicBlock blockId instrs_reload
-- | Clean out unneeded reload instructions.
--
-- Walking forwards across the code
-- On a reload, if we know a reg already has the same value as a slot
-- then we don't need to do the reload.
--
cleanForward
:: Instruction instr
=> Platform
-> BlockId -- ^ the block that we're currently in
-> Assoc Store -- ^ two store locations are associated if
-- they have the same value
-> [LiveInstr instr] -- ^ acc
-> [LiveInstr instr] -- ^ instrs to clean (in backwards order)
-> CleanM [LiveInstr instr] -- ^ cleaned instrs (in forward order)
cleanForward _ _ _ acc []
= return acc
-- Rewrite live range joins via spill slots to just a spill and a reg-reg move
-- hopefully the spill will be also be cleaned in the next pass
cleanForward platform blockId assoc acc (li1 : li2 : instrs)
| LiveInstr (SPILL reg1 slot1) _ <- li1
, LiveInstr (RELOAD slot2 reg2) _ <- li2
, slot1 == slot2
= do
modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
cleanForward platform blockId assoc acc
$ li1 : LiveInstr (mkRegRegMoveInstr platform reg1 reg2) Nothing
: instrs
cleanForward platform blockId assoc acc (li@(LiveInstr i1 _) : instrs)
| Just (r1, r2) <- takeRegRegMoveInstr i1
= if r1 == r2
-- Erase any left over nop reg reg moves while we're here
-- this will also catch any nop moves that the previous case
-- happens to add.
then cleanForward platform blockId assoc acc instrs
-- If r1 has the same value as some slots and we copy r1 to r2,
-- then r2 is now associated with those slots instead
else do let assoc' = addAssoc (SReg r1) (SReg r2)
$ delAssoc (SReg r2)
$ assoc
cleanForward platform blockId assoc' (li : acc) instrs
cleanForward platform blockId assoc acc (li : instrs)
-- Update association due to the spill.
| LiveInstr (SPILL reg slot) _ <- li
= let assoc' = addAssoc (SReg reg) (SSlot slot)
$ delAssoc (SSlot slot)
$ assoc
in cleanForward platform blockId assoc' (li : acc) instrs
-- Clean a reload instr.
| LiveInstr (RELOAD{}) _ <- li
= do (assoc', mli) <- cleanReload platform blockId assoc li
case mli of
Nothing -> cleanForward platform blockId assoc' acc
instrs
Just li' -> cleanForward platform blockId assoc' (li' : acc)
instrs
-- Remember the association over a jump.
| LiveInstr instr _ <- li
, targets <- jumpDestsOfInstr instr
, not $ null targets
= do mapM_ (accJumpValid assoc) targets
cleanForward platform blockId assoc (li : acc) instrs
-- Writing to a reg changes its value.
| LiveInstr instr _ <- li
, RU _ written <- regUsageOfInstr platform instr
= let assoc' = foldr delAssoc assoc (map SReg $ nub written)
in cleanForward platform blockId assoc' (li : acc) instrs
-- | Try and rewrite a reload instruction to something more pleasing
cleanReload
:: Instruction instr
=> Platform
-> BlockId
-> Assoc Store
-> LiveInstr instr
-> CleanM (Assoc Store, Maybe (LiveInstr instr))
cleanReload platform blockId assoc li@(LiveInstr (RELOAD slot reg) _)
-- If the reg we're reloading already has the same value as the slot
-- then we can erase the instruction outright.
| elemAssoc (SSlot slot) (SReg reg) assoc
= do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
return (assoc, Nothing)
-- If we can find another reg with the same value as this slot then
-- do a move instead of a reload.
| Just reg2 <- findRegOfSlot assoc slot
= do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
let assoc' = addAssoc (SReg reg) (SReg reg2)
$ delAssoc (SReg reg)
$ assoc
return ( assoc'
, Just $ LiveInstr (mkRegRegMoveInstr platform reg2 reg) Nothing)
-- Gotta keep this instr.
| otherwise
= do -- Update the association.
let assoc'
= addAssoc (SReg reg) (SSlot slot)
-- doing the reload makes reg and slot the same value
$ delAssoc (SReg reg)
-- reg value changes on reload
$ assoc
-- Remember that this block reloads from this slot.
accBlockReloadsSlot blockId slot
return (assoc', Just li)
cleanReload _ _ _ _
= panic "RegSpillClean.cleanReload: unhandled instr"
-------------------------------------------------------------------------------
-- | Clean out unneeded spill instructions,
-- while walking backwards over the code.
--
-- If there were no reloads from a slot between a spill and the last one
-- then the slot was never read and we don't need the spill.
--
-- SPILL r0 -> s1
-- RELOAD s1 -> r2
-- SPILL r3 -> s1 <--- don't need this spill
-- SPILL r4 -> s1
-- RELOAD s1 -> r5
--
-- Maintain a set of
-- "slots which were spilled to but not reloaded from yet"
--
-- Walking backwards across the code:
-- a) On a reload from a slot, remove it from the set.
--
-- a) On a spill from a slot
-- If the slot is in set then we can erase the spill,
-- because it won't be reloaded from until after the next spill.
--
-- otherwise
-- keep the spill and add the slot to the set
--
-- TODO: This is mostly inter-block
-- we should really be updating the noReloads set as we cross jumps also.
--
-- TODO: generate noReloads from liveSlotsOnEntry
--
cleanTopBackward
:: Instruction instr
=> LiveCmmDecl statics instr
-> CleanM (LiveCmmDecl statics instr)
cleanTopBackward cmm
= case cmm of
CmmData{}
-> return cmm
CmmProc info label live sccs
| LiveInfo _ _ _ liveSlotsOnEntry <- info
-> do sccs' <- mapM (mapSCCM (cleanBlockBackward liveSlotsOnEntry)) sccs
return $ CmmProc info label live sccs'
cleanBlockBackward
:: Instruction instr
=> BlockMap IntSet
-> LiveBasicBlock instr
-> CleanM (LiveBasicBlock instr)
cleanBlockBackward liveSlotsOnEntry (BasicBlock blockId instrs)
= do instrs_spill <- cleanBackward liveSlotsOnEntry emptyUniqSet [] instrs
return $ BasicBlock blockId instrs_spill
cleanBackward
:: Instruction instr
=> BlockMap IntSet -- ^ Slots live on entry to each block
-> UniqSet Int -- ^ Slots that have been spilled, but not reloaded from
-> [LiveInstr instr] -- ^ acc
-> [LiveInstr instr] -- ^ Instrs to clean (in forwards order)
-> CleanM [LiveInstr instr] -- ^ Cleaned instrs (in backwards order)
cleanBackward liveSlotsOnEntry noReloads acc lis
= do reloadedBy <- gets sReloadedBy
cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc lis
cleanBackward'
:: Instruction instr
=> BlockMap IntSet
-> UniqFM [BlockId]
-> UniqSet Int
-> [LiveInstr instr]
-> [LiveInstr instr]
-> State CleanS [LiveInstr instr]
cleanBackward' _ _ _ acc []
= return acc
cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc (li : instrs)
-- If nothing ever reloads from this slot then we don't need the spill.
| LiveInstr (SPILL _ slot) _ <- li
, Nothing <- lookupUFM reloadedBy (SSlot slot)
= do modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
cleanBackward liveSlotsOnEntry noReloads acc instrs
| LiveInstr (SPILL _ slot) _ <- li
= if elementOfUniqSet slot noReloads
-- We can erase this spill because the slot won't be read until
-- after the next one
then do
modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
cleanBackward liveSlotsOnEntry noReloads acc instrs
else do
-- This slot is being spilled to, but we haven't seen any reloads yet.
let noReloads' = addOneToUniqSet noReloads slot
cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- if we reload from a slot then it's no longer unused
| LiveInstr (RELOAD slot _) _ <- li
, noReloads' <- delOneFromUniqSet noReloads slot
= cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- If a slot is live in a jump target then assume it's reloaded there.
--
-- TODO: A real dataflow analysis would do a better job here.
-- If the target block _ever_ used the slot then we assume
-- it always does, but if those reloads are cleaned the slot
-- liveness map doesn't get updated.
| LiveInstr instr _ <- li
, targets <- jumpDestsOfInstr instr
= do
let slotsReloadedByTargets
= IntSet.unions
$ catMaybes
$ map (flip lookupBlockMap liveSlotsOnEntry)
$ targets
let noReloads'
= foldl' delOneFromUniqSet noReloads
$ IntSet.toList slotsReloadedByTargets
cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- some other instruction
| otherwise
= cleanBackward liveSlotsOnEntry noReloads (li : acc) instrs
-- | Combine the associations from all the inward control flow edges.
--
collateJoinPoints :: CleanM ()
collateJoinPoints
= modify $ \s -> s
{ sJumpValid = mapUFM intersects (sJumpValidAcc s)
, sJumpValidAcc = emptyUFM }
intersects :: [Assoc Store] -> Assoc Store
intersects [] = emptyAssoc
intersects assocs = foldl1' intersectAssoc assocs
-- | See if we have a reg with the same value as this slot in the association table.
findRegOfSlot :: Assoc Store -> Int -> Maybe Reg
findRegOfSlot assoc slot
| close <- closeAssoc (SSlot slot) assoc
, Just (SReg reg) <- find isStoreReg $ nonDetEltsUFM close
-- See Note [Unique Determinism and code generation]
= Just reg
| otherwise
= Nothing
-------------------------------------------------------------------------------
-- | Cleaner monad.
type CleanM
= State CleanS
-- | Cleaner state.
data CleanS
= CleanS
{ -- | Regs which are valid at the start of each block.
sJumpValid :: UniqFM (Assoc Store)
-- | Collecting up what regs were valid across each jump.
-- in the next pass we can collate these and write the results
-- to sJumpValid.
, sJumpValidAcc :: UniqFM [Assoc Store]
-- | Map of (slot -> blocks which reload from this slot)
-- used to decide if whether slot spilled to will ever be
-- reloaded from on this path.
, sReloadedBy :: UniqFM [BlockId]
-- | Spills and reloads cleaned each pass (latest at front)
, sCleanedCount :: [(Int, Int)]
-- | Spills and reloads that have been cleaned in this pass so far.
, sCleanedSpillsAcc :: Int
, sCleanedReloadsAcc :: Int }
-- | Construct the initial cleaner state.
initCleanS :: CleanS
initCleanS
= CleanS
{ sJumpValid = emptyUFM
, sJumpValidAcc = emptyUFM
, sReloadedBy = emptyUFM
, sCleanedCount = []
, sCleanedSpillsAcc = 0
, sCleanedReloadsAcc = 0 }
-- | Remember the associations before a jump.
accJumpValid :: Assoc Store -> BlockId -> CleanM ()
accJumpValid assocs target
= modify $ \s -> s {
sJumpValidAcc = addToUFM_C (++)
(sJumpValidAcc s)
target
[assocs] }
accBlockReloadsSlot :: BlockId -> Slot -> CleanM ()
accBlockReloadsSlot blockId slot
= modify $ \s -> s {
sReloadedBy = addToUFM_C (++)
(sReloadedBy s)
(SSlot slot)
[blockId] }
-------------------------------------------------------------------------------
-- A store location can be a stack slot or a register
data Store
= SSlot Int
| SReg Reg
-- | Check if this is a reg store.
isStoreReg :: Store -> Bool
isStoreReg ss
= case ss of
SSlot _ -> False
SReg _ -> True
-- Spill cleaning is only done once all virtuals have been allocated to realRegs
instance Uniquable Store where
getUnique (SReg r)
| RegReal (RealRegSingle i) <- r
= mkRegSingleUnique i
| RegReal (RealRegPair r1 r2) <- r
= mkRegPairUnique (r1 * 65535 + r2)
| otherwise
= error $ "RegSpillClean.getUnique: found virtual reg during spill clean,"
++ "only real regs expected."
getUnique (SSlot i) = mkRegSubUnique i -- [SLPJ] I hope "SubUnique" is ok
instance Outputable Store where
ppr (SSlot i) = text "slot" <> int i
ppr (SReg r) = ppr r
-------------------------------------------------------------------------------
-- Association graphs.
-- In the spill cleaner, two store locations are associated if they are known
-- to hold the same value.
--
type Assoc a = UniqFM (UniqSet a)
-- | An empty association
emptyAssoc :: Assoc a
emptyAssoc = emptyUFM
-- | Add an association between these two things.
addAssoc :: Uniquable a
=> a -> a -> Assoc a -> Assoc a
addAssoc a b m
= let m1 = addToUFM_C unionUniqSets m a (unitUniqSet b)
m2 = addToUFM_C unionUniqSets m1 b (unitUniqSet a)
in m2
-- | Delete all associations to a node.
delAssoc :: (Uniquable a)
=> a -> Assoc a -> Assoc a
delAssoc a m
| Just aSet <- lookupUFM m a
, m1 <- delFromUFM m a
= nonDetFoldUFM (\x m -> delAssoc1 x a m) m1 aSet
-- It's OK to use nonDetFoldUFM here because deletion is commutative
| otherwise = m
-- | Delete a single association edge (a -> b).
delAssoc1 :: Uniquable a
=> a -> a -> Assoc a -> Assoc a
delAssoc1 a b m
| Just aSet <- lookupUFM m a
= addToUFM m a (delOneFromUniqSet aSet b)
| otherwise = m
-- | Check if these two things are associated.
elemAssoc :: (Uniquable a)
=> a -> a -> Assoc a -> Bool
elemAssoc a b m
= elementOfUniqSet b (closeAssoc a m)
-- | Find the refl. trans. closure of the association from this point.
closeAssoc :: (Uniquable a)
=> a -> Assoc a -> UniqSet a
closeAssoc a assoc
= closeAssoc' assoc emptyUniqSet (unitUniqSet a)
where
closeAssoc' assoc visited toVisit
= case nonDetEltsUFM toVisit of
-- See Note [Unique Determinism and code generation]
-- nothing else to visit, we're done
[] -> visited
(x:_)
-- we've already seen this node
| elementOfUniqSet x visited
-> closeAssoc' assoc visited (delOneFromUniqSet toVisit x)
-- haven't seen this node before,
-- remember to visit all its neighbors
| otherwise
-> let neighbors
= case lookupUFM assoc x of
Nothing -> emptyUniqSet
Just set -> set
in closeAssoc' assoc
(addOneToUniqSet visited x)
(unionUniqSets toVisit neighbors)
-- | Intersect two associations.
intersectAssoc :: Assoc a -> Assoc a -> Assoc a
intersectAssoc a b
= intersectUFM_C (intersectUniqSets) a b
| snoyberg/ghc | compiler/nativeGen/RegAlloc/Graph/SpillClean.hs | bsd-3-clause | 21,081 | 0 | 18 | 7,538 | 3,813 | 1,943 | 1,870 | 324 | 3 |
-- | Entirely re-exports.
module Network.API.Builder
( module Network.API.Builder.API
, module Network.API.Builder.Builder
, module Network.API.Builder.Error
, module Network.API.Builder.Query
, module Network.API.Builder.Receive
, module Network.API.Builder.Routes
, module Network.API.Builder.Send ) where
import Network.API.Builder.API
import Network.API.Builder.Builder
import Network.API.Builder.Error
import Network.API.Builder.Query
import Network.API.Builder.Receive
import Network.API.Builder.Routes
import Network.API.Builder.Send
| eryx67/api-builder | src/Network/API/Builder.hs | bsd-3-clause | 556 | 0 | 5 | 58 | 115 | 84 | 31 | 15 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
-- |
-- Copyright : Anders Claesson 2015-2017
-- Maintainer : Anders Claesson <anders.claesson@gmail.com>
-- License : BSD-3
--
module HOPS.GF
( module HOPS.GF.Series
, module HOPS.GF.Transform
, module HOPS.Pretty
, Expr (..)
, Expr0 (..)
, Expr1 (..)
, Expr2 (..)
, Expr3 (..)
, PackedExpr (..)
, Name
, nameSupply
, packExpr
, vars
, anums
, insertVar
, aNumExpr
, tagExpr
-- Expand
, expand
-- Core
, Core (..)
, core
-- Eval
, Env (..)
, emptyEnv
, evalCoreS
, evalCore
-- Parse
, parseExpr
, parseExprErr
) where
import GHC.TypeLits
import Data.Proxy
import Data.Maybe
import Data.List
import Data.Ratio
import Data.Semigroup
import Data.Aeson (FromJSON (..), ToJSON(..), Value (..))
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.Vector (Vector, (!?))
import qualified Data.Vector as V
import Data.Map.Lazy (Map)
import qualified Data.Map.Lazy as M
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Attoparsec.ByteString.Char8 hiding (take, takeWhile)
import qualified Data.Attoparsec.ByteString.Char8 as A
import Control.Monad
import Control.Monad.Trans.State
import Control.Applicative
import HOPS.Pretty
import HOPS.Utils.Parse
import HOPS.OEIS
import HOPS.GF.Series
import HOPS.GF.Transform
import qualified HOPS.GF.Rats as R
import qualified HOPS.GF.Const as C
-- | A compact `ByteString` representation of a `Prg`.
newtype PackedExpr = PackedExpr ByteString deriving (Eq, Show)
instance ToJSON PackedExpr where
toJSON (PackedExpr bs) = String (decodeUtf8 bs)
instance FromJSON PackedExpr where
parseJSON (String s) = pure $ PackedExpr (encodeUtf8 s)
parseJSON _ = mzero
-- | An environment holds a mapping from A-numbers to series, and a
-- mapping from names to series (assignments).
data Env (n :: Nat) = Env
{ aNumEnv :: Vector (Series n)
, varEnv :: Map Name (Series n)
} deriving Show
type Name = ByteString -- Variable name
type Subs = Name -> Name
data Expr
= Singleton Expr0
| ELet Name Expr0
| ESeq Expr Expr
deriving (Show, Eq)
data Expr0
= EAdd Expr0 Expr0
| ESub Expr0 Expr0
| Expr1 Expr1
deriving (Show, Eq)
data Expr1
= EMul Expr1 Expr1
| EDiv Expr1 Expr1
| EBDP Expr1 Expr1
| EPtMul Expr1 Expr1
| EPtDiv Expr1 Expr1
| Expr2 Expr2
deriving (Show, Eq)
data Expr2
= ENeg Expr2
| EPos Expr2
| EFac Expr3
| EPow Expr3 Expr3
| EComp Expr3 Expr3
| ECoef Expr3 Expr3
| Expr3 Expr3
deriving (Show, Eq)
data Expr3
= EX
| EDZ
| EIndet
| EA Int -- An A-number
| ETag Int
| EVar Name
| ELit Integer
| EApp Name [Expr0] -- A named transform
| ESet Name [Expr] -- A named set of expressions
| ERats R.Rats
| Expr Expr
deriving (Show, Eq)
to0 :: Expr -> Expr0
to0 (Singleton e) = e
to0 e = Expr1 (Expr2 (Expr3 (Expr e)))
to1 :: Expr -> Expr1
to1 (Singleton (Expr1 e)) = e
to1 e = Expr2 (Expr3 (Expr e))
from3 :: Expr3 -> Expr
from3 = Singleton . Expr1 . Expr2 . Expr3
instance Num Expr where
e1 + e2 = Singleton $ EAdd (to0 e1) (to0 e2)
e1 - e2 = Singleton $ ESub (to0 e1) (to0 e2)
e1 * e2 = Singleton $ Expr1 $ EMul (to1 e1) (to1 e2)
fromInteger = from3 . ELit
abs = from3 . EApp "abs" . pure . to0
signum = from3 . EApp "sgn" . pure . to0
data Core
= App !Name ![Core]
| X
| A {-# UNPACK #-} !Int
| Tag {-# UNPACK #-} !Int
| Var {-# UNPACK #-} !Name
| Lit !Rat
| Rats !R.Core
| Let {-# UNPACK #-} !Name !Core
| Seq !Core !Core
deriving (Show, Eq, Ord)
instance Pretty Core where
pretty (App f es) = f <> paren (foldl' (<>) "" $ intersperse "," $ map pretty es)
pretty X = "x"
pretty (A i) = B.cons 'A' (pad 6 i)
pretty (Tag i) = "TAG" <> pad 6 i
pretty (Var s) = s
pretty (Lit t) = maybe (pretty t) pretty $ maybeInteger t
pretty (Rats r) = pretty r
pretty (Let s e) = s <> "=" <> pretty e
pretty (Seq e e') = pretty e <> ";" <> pretty e'
instance Num Core where
(+) x y = App "add" [x,y]
(-) x y = App "sub" [x,y]
(*) x y = App "mul" [x,y]
abs x = App "abs" [x]
signum x = App "sgn" [x]
fromInteger = Lit . fromInteger
instance Fractional Core where
fromRational = Lit . fromRational
(/) x y = App "div" [x,y]
instance Floating Core where
pi = Lit pi
exp x = App "exp" [x]
log x = App "log" [x]
sin x = App "sin" [x]
cos x = App "cos" [x]
asin x = App "arcsin" [x]
acos x = App "arccos" [x]
atan x = App "arctan" [x]
sinh x = App "sinh" [x]
cosh x = App "cosh" [x]
asinh x = App "arsinh" [x]
acosh x = App "arcosh" [x]
atanh x = App "artanh" [x]
instance ToJSON Expr where
toJSON = toJSON . decodeUtf8 . pretty
instance FromJSON Expr where
parseJSON (String t) = fromMaybe mzero (return <$> parseExpr (encodeUtf8 t))
parseJSON _ = mzero
instance Semigroup Expr where
p <> q = snd $ rename nameSupply (p2 `ESeq` q2)
where
(vs, p1) = normalForm nameSupply p
(us, p2) = rename vs p1
( _, q1) = rename us q
ELet s _ = lastExpr p2
q2 = subs [("stdin", s)] q1
instance Pretty Expr where
pretty (Singleton e) = pretty e
pretty (ELet s e) = s <> "=" <> pretty e
pretty (ESeq e1 e2) = pretty e1 <> ";" <> pretty e2
instance Pretty Expr0 where
pretty (EAdd e1 e2) = pretty e1 <> "+" <> pretty e2
pretty (ESub e1 e2) = pretty e1 <> "-" <> pretty e2
pretty (Expr1 e) = pretty e
instance Pretty Expr1 where
pretty (EMul e1 e2) = pretty e1 <> "*" <> pretty e2
pretty (EDiv e1 e2) = pretty e1 <> "/" <> pretty e2
pretty (EBDP e1 e2) = pretty e1 <> "<>" <> pretty e2
pretty (EPtMul e1 e2) = pretty e1 <> ".*" <> pretty e2
pretty (EPtDiv e1 e2) = pretty e1 <> "./" <> pretty e2
pretty (Expr2 e) = pretty e
instance Pretty Expr2 where
pretty (ENeg e) = "-" <> pretty e
pretty (EPos e) = pretty e
pretty (EFac e) = pretty e <> "!"
pretty (EPow e1 e2) = pretty e1 <> "^" <> pretty e2
pretty (EComp e1 e2) = pretty e1 <> "@" <> pretty e2
pretty (ECoef e1 e2) = pretty e1 <> "?" <> pretty e2
pretty (Expr3 e) = pretty e
instance Pretty Expr3 where
pretty EX = "x"
pretty EDZ = "DZ"
pretty EIndet = "Indet"
pretty (EA i) = B.cons 'A' (pad 6 i)
pretty (ETag i) = "TAG" <> pad 6 i
pretty (EVar s) = s
pretty (ELit t) = pretty t
pretty (EApp s es) = s <> paren (foldl' (<>) "" $ intersperse "," $ map pretty es)
pretty (ESet s es) = s <> paren (foldl' (<>) "" $ intersperse "," $ map pretty es)
pretty (ERats r) = pretty r
pretty (Expr e) = paren $ pretty e
-- | @pad d n@ packs the integer @n@ into a `ByteString` padding with
-- \'0\' on the right to achieve length @d@.
--
-- > pad 6 123 = "000123"
--
pad :: Int -> Int -> ByteString
pad d n = B.replicate (d - B.length s) '0' <> s where s = B.pack (show n)
-- | A compact representation of an `Expr` as a wrapped `ByteString`.
packExpr :: Expr -> PackedExpr
packExpr = PackedExpr . pretty
-- | The list of variables in a program.
vars :: Core -> [Name]
vars = nub . varsCore
-- | The list of A-numbers in a program.
anums :: Core -> [Int]
anums = nub . anumsCore
subsExpr :: Subs -> Expr -> Expr
subsExpr f (Singleton e) = Singleton (subsExpr0 f e)
subsExpr f (ELet s e) = ELet (f s) (subsExpr0 f e)
subsExpr f (ESeq e1 e2) = ESeq (subsExpr f e1) (subsExpr f e2)
subsExpr0 :: Subs -> Expr0 -> Expr0
subsExpr0 f (EAdd e1 e2) = EAdd (subsExpr0 f e1) (subsExpr0 f e2)
subsExpr0 f (ESub e1 e2) = ESub (subsExpr0 f e1) (subsExpr0 f e2)
subsExpr0 f (Expr1 e) = Expr1 (subsExpr1 f e)
subsExpr1 :: Subs -> Expr1 -> Expr1
subsExpr1 f (EMul e1 e2) = EMul (subsExpr1 f e1) (subsExpr1 f e2)
subsExpr1 f (EDiv e1 e2) = EDiv (subsExpr1 f e1) (subsExpr1 f e2)
subsExpr1 f (EBDP e1 e2) = EBDP (subsExpr1 f e1) (subsExpr1 f e2)
subsExpr1 f (EPtMul e1 e2) = EPtMul (subsExpr1 f e1) (subsExpr1 f e2)
subsExpr1 f (EPtDiv e1 e2) = EPtDiv (subsExpr1 f e1) (subsExpr1 f e2)
subsExpr1 f (Expr2 e) = Expr2 (subsExpr2 f e)
subsExpr2 :: Subs -> Expr2 -> Expr2
subsExpr2 f (ENeg e) = ENeg (subsExpr2 f e)
subsExpr2 f (EPos e) = EPos (subsExpr2 f e)
subsExpr2 f (EFac e) = EFac (subsExpr3 f e)
subsExpr2 f (EPow e1 e2) = EPow (subsExpr3 f e1) (subsExpr3 f e2)
subsExpr2 f (EComp e1 e2) = EComp (subsExpr3 f e1) (subsExpr3 f e2)
subsExpr2 f (ECoef e1 e2) = ECoef (subsExpr3 f e1) (subsExpr3 f e2)
subsExpr2 f (Expr3 e) = Expr3 (subsExpr3 f e)
subsExpr3 :: Subs -> Expr3 -> Expr3
subsExpr3 f (EVar s) = EVar (f s)
subsExpr3 f (EApp s es) = EApp s (map (subsExpr0 f) es)
subsExpr3 f (Expr e) = Expr (subsExpr f e)
subsExpr3 _ e = e
subs :: [(Name, Name)] -> Expr -> Expr
subs assoc = subsExpr f
where
f k = let d = M.fromList assoc in M.findWithDefault k k d
vars' :: Expr -> [Name]
vars' prog = vars (core prog) \\ ["stdin"]
nameSupply :: [Name]
nameSupply = B.words "f g h p q r s t u v w"
++ [ B.pack ('f':show i) | i <- [0::Int ..] ]
lastExpr :: Expr -> Expr
lastExpr (ESeq _ e) = lastExpr e
lastExpr e = e
normalForm :: [Name] -> Expr -> ([Name], Expr)
normalForm vs e = nf e
where
nf (Singleton e0) = let u:us = vs \\ vars' e in (us, ELet u e0)
nf e1@(ELet _ _) = (vs, e1)
nf (ESeq e1 e2) = let (us, e3) = nf e2 in (us, ESeq e1 e3)
rename :: [Name] -> Expr -> ([Name], Expr)
rename vs p = (names, subs assoc p)
where
names = vs \\ map snd assoc
assoc = zip (vars' p) vs
lookupANum :: Int -> Env n -> Maybe (Series n)
lookupANum i env = aNumEnv env !? (i-1)
lookupVar :: ByteString -> Env n -> Maybe (Series n)
lookupVar v env = M.lookup v (varEnv env)
-- | Insert a variable binding into the given environment.
insertVar :: ByteString -> Series n -> Env n -> Env n
insertVar v f (Env a vs) = Env a (M.insert v f vs)
aNumExpr :: Int -> Expr
aNumExpr m = Singleton $ Expr1 (Expr2 (Expr3 (EA m)))
tagExpr :: Int -> Expr
tagExpr m = Singleton $ Expr1 (Expr2 (Expr3 (ETag m)))
--------------------------------------------------------------------------------
-- Expand phase
--------------------------------------------------------------------------------
polyList :: Int -> Int -> [[Int]]
polyList _ 0 = [[]]
polyList 0 j = [ replicate j 0 ]
polyList d j = [ i:xs | i <- [-d .. d], xs <- polyList (d - abs i) (j-1) ]
polyList1 :: Int -> Int -> [[Int]]
polyList1 d j = [ 1:xs | xs <- polyList (d-1) (j-1) ]
fromList :: [Int] -> Expr3
fromList cs = ERats (lift <$> init cs, R.Constant (lift (last cs)), R.Poly)
where
lift = C.Expr1 . C.Expr2 . C.Expr3 . C.ELit . fromIntegral
divide :: Expr3 -> Expr3 -> Expr3
divide e1 e2 = Expr $ Singleton $ Expr1 $ EDiv (Expr2 (Expr3 e1)) (Expr2 (Expr3 e2))
polys :: Int -> [Expr3]
polys d = [ fromList cs | cs <- polyList d d, any (/=0) cs ]
polys1 :: Int -> [Expr3]
polys1 d = [ fromList cs | cs <- polyList1 d d ]
rationals :: Int -> [Expr3]
rationals d = divide <$> polys d <*> polys1 d
-- Calkin-Wilf sequence
cw :: Int -> Rational
cw 0 = 1
cw k = 1 / (2*y - x + 1)
where
x = cwStream !! (k-1)
y = fromInteger (truncate x)
cwStream :: [Rational]
cwStream = map cw [0..]
cwFracs :: Int -> [Rational]
cwFracs n = take (2*(2^n-1)) (cwStream >>= \x -> [-x,x])
fracs :: Int -> [Expr3]
fracs n = [ divide (ELit (numerator r)) (ELit (denominator r)) | r <- cwFracs n ]
lookupSet :: Name -> [Expr] -> [Expr3]
lookupSet "poly" [Singleton (Expr1 (Expr2 (Expr3 (ELit d))))] = polys (fromIntegral d)
lookupSet "poly" _ = error "'poly' expects an integer"
lookupSet "rat" [Singleton (Expr1 (Expr2 (Expr3 (ELit d))))] = rationals (fromIntegral d)
lookupSet "rat" _ = error "'rat' expects an integer"
lookupSet "frac" [Singleton (Expr1 (Expr2 (Expr3 (ELit d))))] = fracs (fromIntegral d)
lookupSet "frac" _ = error "'frac' expects an integer"
lookupSet "oneof" es = Expr <$> es
lookupSet _ _ = undefined
expand :: Expr -> [Expr]
expand = expandExpr
expandExpr :: Expr -> [Expr]
expandExpr (Singleton e) = Singleton <$> expandExpr0 e
expandExpr (ELet s e) = ELet s <$> expandExpr0 e
expandExpr (ESeq e1 e2) = ESeq <$> expandExpr e1 <*> expandExpr e2
expandExpr0 :: Expr0 -> [Expr0]
expandExpr0 (EAdd e1 e2) = EAdd <$> expandExpr0 e1 <*> expandExpr0 e2
expandExpr0 (ESub e1 e2) = ESub <$> expandExpr0 e1 <*> expandExpr0 e2
expandExpr0 (Expr1 e) = Expr1 <$> expandExpr1 e
expandExpr1 :: Expr1 -> [Expr1]
expandExpr1 (EMul e1 e2) = EMul <$> expandExpr1 e1 <*> expandExpr1 e2
expandExpr1 (EDiv e1 e2) = EDiv <$> expandExpr1 e1 <*> expandExpr1 e2
expandExpr1 (EBDP e1 e2) = EBDP <$> expandExpr1 e1 <*> expandExpr1 e2
expandExpr1 (EPtMul e1 e2) = EPtMul <$> expandExpr1 e1 <*> expandExpr1 e2
expandExpr1 (EPtDiv e1 e2) = EPtDiv <$> expandExpr1 e1 <*> expandExpr1 e2
expandExpr1 (Expr2 e) = Expr2 <$> expandExpr2 e
expandExpr2 :: Expr2 -> [Expr2]
expandExpr2 (ENeg e) = ENeg <$> expandExpr2 e
expandExpr2 (EPos e) = EPos <$> expandExpr2 e
expandExpr2 (EFac e) = EFac <$> expandExpr3 e
expandExpr2 (EPow e1 e2) = EPow <$> expandExpr3 e1 <*> expandExpr3 e2
expandExpr2 (EComp e1 e2) = EComp <$> expandExpr3 e1 <*> expandExpr3 e2
expandExpr2 (ECoef e1 e2) = ECoef <$> expandExpr3 e1 <*> expandExpr3 e2
expandExpr2 (Expr3 e) = Expr3 <$> expandExpr3 e
expandExpr3 :: Expr3 -> [Expr3]
expandExpr3 EX = [EX]
expandExpr3 EDZ = [EDZ]
expandExpr3 EIndet = [EIndet]
expandExpr3 (EA i) = [EA i]
expandExpr3 (ETag i) = [ETag i]
expandExpr3 (EVar s) = [EVar s]
expandExpr3 (ESet s es) = lookupSet s es
expandExpr3 (ELit t) = [ELit $ fromInteger t]
expandExpr3 (EApp s es) = EApp s <$> sequence (expandExpr0 <$> es)
expandExpr3 (ERats r) = [ERats r]
expandExpr3 (Expr e) = Expr <$> expandExpr e
--------------------------------------------------------------------------------
-- Core
--------------------------------------------------------------------------------
core :: Expr -> Core
core = coreExpr
coreExpr :: Expr -> Core
coreExpr (Singleton e) = coreExpr0 e
coreExpr (ELet s e) = Let s (coreExpr0 e)
coreExpr (ESeq e1 e2) = Seq (coreExpr e1) (coreExpr e2)
coreExpr0 :: Expr0 -> Core
coreExpr0 (EAdd e1 e2) = App "add" [coreExpr0 e1, coreExpr0 e2]
coreExpr0 (ESub e1 e2) = App "sub" [coreExpr0 e1, coreExpr0 e2]
coreExpr0 (Expr1 e) = coreExpr1 e
coreExpr1 :: Expr1 -> Core
coreExpr1 (EMul e1 e2) = App "mul" [coreExpr1 e1, coreExpr1 e2]
coreExpr1 (EDiv e1 e2) = App "div" [coreExpr1 e1, coreExpr1 e2]
coreExpr1 (EBDP e1 e2) = App "bdp" [coreExpr1 e1, coreExpr1 e2]
coreExpr1 (EPtMul e1 e2) = App "ptmul" [coreExpr1 e1, coreExpr1 e2]
coreExpr1 (EPtDiv e1 e2) = App "ptdiv" [coreExpr1 e1, coreExpr1 e2]
coreExpr1 (Expr2 e) = coreExpr2 e
coreExpr2 :: Expr2 -> Core
coreExpr2 (ENeg e) = App "neg" [coreExpr2 e]
coreExpr2 (EPos e) = coreExpr2 e
coreExpr2 (EFac e) = App "fac" [coreExpr3 e]
coreExpr2 (EPow e1 e2) = App "pow" [coreExpr3 e1, coreExpr3 e2]
coreExpr2 (EComp e1 e2) = App "comp" [coreExpr3 e1, coreExpr3 e2]
coreExpr2 (ECoef e1 e2) = App "coef" [coreExpr3 e1, coreExpr3 e2]
coreExpr2 (Expr3 e) = coreExpr3 e
coreExpr3 :: Expr3 -> Core
coreExpr3 EX = X
coreExpr3 EDZ = Lit DZ
coreExpr3 EIndet = Lit Indet
coreExpr3 (EA i) = A i
coreExpr3 (ETag i) = Tag i
coreExpr3 (EVar s) = Var s
coreExpr3 (ESet _ _) = error "Internal error"
coreExpr3 (ELit t) = Lit $ fromInteger t
coreExpr3 (EApp s es) = App s (map coreExpr0 es)
coreExpr3 (ERats r) = Rats (R.core r)
coreExpr3 (Expr e) = coreExpr e
varsCore :: Core -> [Name]
varsCore (App _ es) = varsCore =<< es
varsCore (Var s) = [s]
varsCore (Seq e1 e2) = varsCore e1 ++ varsCore e2
varsCore (Let s e) = s : varsCore e
varsCore _ = []
anumsCore :: Core -> [Int]
anumsCore (App _ es) = anumsCore =<< es
anumsCore (A i) = [i]
anumsCore (Seq e1 e2) = anumsCore e1 ++ anumsCore e2
anumsCore (Let _ e) = anumsCore e
anumsCore _ = []
--------------------------------------------------------------------------------
-- Eval
--------------------------------------------------------------------------------
emptyEnv :: Env n
emptyEnv = Env V.empty M.empty
evalName :: KnownNat n => Name -> Env n -> [Series n] -> Series n
evalName t env ss =
case lookupTransform t of
Nothing -> case ss of
[s] -> fromMaybe nil (lookupVar t env) `o` s
_ -> nil
Just (Transform k f) -> if length ss == k then f ss else nil
evalCoreS1 :: KnownNat n => Core -> State (Env n) (Series n)
evalCoreS1 (App f es) = evalName f <$> get <*> mapM evalCoreS1 es
evalCoreS1 X = return $ polynomial (Proxy :: Proxy n) [0,1]
evalCoreS1 (A i) = fromMaybe nil . lookupANum i <$> get
evalCoreS1 (Tag _) = return nil
evalCoreS1 (Var v) = fromMaybe nil . lookupVar v <$> get
evalCoreS1 (Lit c) = return $ polynomial (Proxy :: Proxy n) [c]
evalCoreS1 (Rats r) = return $ R.evalCore r
evalCoreS1 (Let v e) = do
(f, env) <- runState (evalCoreS1 e) <$> get
put (insertVar v f env)
return f
evalCoreS1 (Seq e e') = do
(_, env) <- runState (evalCoreS1 e) <$> get
let (f, env') = runState (evalCoreS1 e') env
put env'
return f
evalCoreS :: KnownNat n => Core -> State (Env n) (Series n)
evalCoreS c = go 1
where
f0 = nil
go 0 = return f0
go n = do
(f, env) <- runState (evalCoreS1 c) <$> get
put env
if n == precision f0
then return f
else go (n+1)
-- | Evaluate a program in a given environment. E.g.
--
-- >>> evalCore (emptyEnv :: Env 4) [ log (1/(1-X)) ]
-- series (Proxy :: Proxy 4) [Val (0 % 1),Val (1 % 1),Val (1 % 2),Val (1 % 3)]
--
evalCore :: KnownNat n => Env n -> Core -> Series n
evalCore env c = evalState (evalCoreS c) env
--------------------------------------------------------------------------------
-- Parse
--------------------------------------------------------------------------------
assignment :: Parser (ByteString, Expr0)
assignment = (,) <$> var <*> (string "=" >> expr0)
expr :: Parser Expr
expr = chainl1 (uncurry ELet <$> assignment <|> Singleton <$> expr0) (const ESeq <$> string ";")
<* (string ";" <|> pure "")
expr0 :: Parser Expr0
expr0 = chainl1 (Expr1 <$> expr1) (op <$> oneOf "+ -") <?> "expr0"
where
op "+" = EAdd
op "-" = ESub
op _ = error "internal error"
expr1 :: Parser Expr1
expr1 = chainl1 (Expr2 <$> expr2) (op <$> oneOf ".* ./ * / <>") <?> "expr1"
where
op "*" = EMul
op "/" = EDiv
op ".*" = EPtMul
op "./" = EPtDiv
op "<>" = EBDP
op _ = error "internal error"
expr2 :: Parser Expr2
expr2
= pm <$> oneOf "+ -" <*> expr2
<|> (expr3 >>= \g ->
EPow g <$> (string "^" *> expr3)
<|> EComp g <$> (string "@" *> expr3)
<|> ECoef g <$> (string "?" *> expr3)
<|> pure (EFac g) <* string "!"
<|> pure (Expr3 g))
<?> "expr2"
where
pm "+" = EPos
pm "-" = ENeg
pm _ = error "internal error"
expr3 :: Parser Expr3
expr3
= ESet <$> name' <*> parens (expr `sepBy` char ',')
<|> EApp <$> name <*> parens (expr0 `sepBy` char ',')
<|> ELit <$> decimal
<|> const EDZ <$> string "DZ"
<|> const EIndet <$> string "Indet"
<|> EA <$> aNumInt
<|> ETag <$> tag
<|> EVar <$> var
<|> const EX <$> string "x"
<|> ERats <$> R.rats
<|> Expr <$> parens expr
<?> "expr3"
reserved :: [Name]
reserved = "x" : transforms
name' :: Parser Name
name' = string "poly" <|> string "rat" <|> string "frac" <|> string "oneof"
name :: Parser Name
name = mappend <$> takeWhile1 isAlpha_ascii
<*> A.takeWhile (\c -> isAlpha_ascii c || isDigit c || c == '_')
var :: Parser ByteString
var = name >>= \s -> if s `elem` reserved then mzero else return s
-- | Parse an expression
parseExpr :: ByteString -> Maybe Expr
parseExpr = parse_ (expr <* endOfInput) . B.takeWhile (/='#') . B.filter f
where
f '\t' = False
f ' ' = False
f _ = True
-- | Parse a program and possibly fail with an error.
parseExprErr :: ByteString -> Expr
parseExprErr = fromMaybe (error "error parsing program") . parseExpr
| akc/gfscript | HOPS/GF.hs | bsd-3-clause | 20,177 | 0 | 31 | 4,790 | 8,617 | 4,392 | 4,225 | 517 | 6 |
-- Copyright (c) 2016 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, UndecidableInstances #-}
module Control.Monad.Messages(
MonadMessages(..),
MessagesT,
Messages,
runMessagesT,
runMessages,
mapMessagesT,
putMessagesT,
putMessagesTNoContext,
putMessagesTXML,
putMessagesTXMLNoContext
) where
import Control.Applicative
import Control.Monad.Artifacts.Class
import Control.Monad.CommentBuffer.Class
import Control.Monad.Comments.Class
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.Genpos.Class
import Control.Monad.Gensym.Class
import Control.Monad.GraphBuilder.Class
import Control.Monad.Journal
import Control.Monad.Keywords.Class
import Control.Monad.Loader.Class
import Control.Monad.Messages.Class
import Control.Monad.Positions.Class
import Control.Monad.Reader
import Control.Monad.ScopeBuilder.Class
import Control.Monad.SourceFiles.Class
import Control.Monad.SourceBuffer.Class
import Control.Monad.State
import Control.Monad.Symbols.Class
import Control.Monad.Writer
import System.IO
import qualified Data.Message as Message
import qualified Data.Position as Position
data MessageState msgs =
MessageState {
-- | All reported messages.
msMessages :: !msgs,
-- | The highest severity seen so far.
msSeverity :: !Message.Severity
}
newtype MessagesT msgs msg m a =
MessagesT { unpackMessagesT :: (WriterT (MessageState msgs) m) a }
type Messages msgs msg a = MessagesT msgs msg IO a
-- | Execute the computation wrapped in a MessagesT monad transformer.
runMessagesT :: Monad m =>
MessagesT msgs msg m a
-> m (a, MessageState msgs)
runMessagesT m = runWriterT (unpackMessagesT m)
-- | Execute the computation wrapped in a MessagesT monad transformer.
runMessages :: Messages msgs msg a
-> IO (a, MessageState msgs)
runMessages = runMessagesT
-- | Execute the computation wrapped in a MessagesT monad transformer,
-- output all messages generated to the given handle, return the
-- result only if the maximum severity is below a certain level.
putMessagesT :: (Message.Messages msg msgs, Message.MessagePosition pos msg,
Position.Position info pos, Position.PositionInfo info,
MonadSourceFiles m, MonadPositions m, MonadIO m) =>
Handle
-> Message.Severity
-> MessagesT msgs msg m a
-> m (Maybe a)
putMessagesT handle maxsev m =
do
(res, MessageState { msSeverity = sev, msMessages = msgs }) <-
runMessagesT m
Message.putMessages handle msgs
if sev < maxsev
then return (Just res)
else return Nothing
-- | Execute the computation wrapped in a MessagesT monad transformer,
-- output all messages generated to the given handle without context
-- strings, return the result only if the maximum severity is below a
-- certain level.
putMessagesTNoContext :: (Message.Messages msg msgs,
Message.MessagePosition pos msg,
Position.Position info pos,
Position.PositionInfo info,
MonadPositions m, MonadIO m) =>
Handle
-> Message.Severity
-> MessagesT msgs msg m a
-> m (Maybe a)
putMessagesTNoContext handle maxsev m =
do
(res, MessageState { msSeverity = sev, msMessages = msgs }) <-
runMessagesT m
Message.putMessagesNoContext handle msgs
if sev < maxsev
then return (Just res)
else return Nothing
-- | Execute the computation wrapped in a MessagesT monad transformer,
-- output all messages generated to the given handle, return the
-- result only if the maximum severity is below a certain level.
putMessagesTXML :: (Message.Messages msg msgs, MonadSourceFiles m,
Message.MessagePosition pos msg, Position.Position info pos,
Position.PositionInfo info, MonadPositions m, MonadIO m) =>
Handle
-> Message.Severity
-> MessagesT msgs msg m a
-> m (Maybe a)
putMessagesTXML handle maxsev m =
do
(res, MessageState { msSeverity = sev, msMessages = msgs }) <-
runMessagesT m
Message.putMessagesXML handle msgs
if sev < maxsev
then return (Just res)
else return Nothing
-- | Execute the computation wrapped in a MessagesT monad transformer,
-- output all messages generated to the given handle, return the
-- result only if the maximum severity is below a certain level.
putMessagesTXMLNoContext :: (Message.Messages msg msgs,
Message.MessagePosition pos msg,
Position.Position info pos,
Position.PositionInfo info,
MonadPositions m, MonadIO m) =>
Handle
-> Message.Severity
-> MessagesT msgs msg m a
-> m (Maybe a)
putMessagesTXMLNoContext handle maxsev m =
do
(res, MessageState { msSeverity = sev, msMessages = msgs }) <-
runMessagesT m
Message.putMessagesXMLNoContext handle msgs
if sev < maxsev
then return (Just res)
else return Nothing
mapMessagesT :: (Monad m, Monad n) =>
(m (a, MessageState msgsa) -> n (b, MessageState msgsb)) ->
MessagesT msgsa msg m a -> MessagesT msgsb msg n b
mapMessagesT f = MessagesT . mapWriterT f . unpackMessagesT
message' :: (Message.Messages msg msgs, Monad m) => msg ->
(WriterT (MessageState msgs) m) ()
message' msg = tell MessageState { msMessages = Message.singleton msg,
msSeverity = Message.severity msg }
instance Monoid msgs => Monoid (MessageState msgs) where
mempty = MessageState { msMessages = mempty, msSeverity = mempty }
mappend MessageState { msMessages = msgs1, msSeverity = s1 }
MessageState { msMessages = msgs2, msSeverity = s2 } =
MessageState { msMessages = msgs1 <> msgs2, msSeverity = s1 <> s2 }
instance (Monoid msgs, Monad m) => Monad (MessagesT msgs msg m) where
return = MessagesT . return
s >>= f = MessagesT $ unpackMessagesT s >>= unpackMessagesT . f
instance (Monoid msgs, Monad m) => Applicative (MessagesT msgs msg m) where
pure = return
(<*>) = ap
instance (Monoid msgs, Monad m, Alternative m) =>
Alternative (MessagesT msgs msg m) where
empty = lift empty
s1 <|> s2 = MessagesT (unpackMessagesT s1 <|> unpackMessagesT s2)
instance Functor (MessagesT msgs msg m) where
fmap = fmap
instance (Monoid msgs, MonadIO m) => MonadIO (MessagesT msgs msg m) where
liftIO = MessagesT . liftIO
instance Monoid msgs => MonadTrans (MessagesT msgs msg) where
lift = MessagesT . lift
instance (Message.Messages msg msgs, Message.Message msg,
Monoid msgs, Monad m) =>
MonadMessages msg (MessagesT msgs msg m) where
message = MessagesT . message'
instance (Monoid msgs, MonadArtifacts path m) =>
MonadArtifacts path (MessagesT msgs msg m) where
artifact path = lift . artifact path
artifactBytestring path = lift . artifactBytestring path
artifactLazyBytestring path = lift . artifactLazyBytestring path
instance (Monoid msgs, MonadCommentBuffer m) =>
MonadCommentBuffer (MessagesT msgs msg m) where
startComment = lift startComment
appendComment = lift . appendComment
finishComment = lift finishComment
addComment = lift . addComment
saveCommentsAsPreceeding = lift . saveCommentsAsPreceeding
clearComments = lift clearComments
instance (Monoid msgs, MonadComments m) =>
MonadComments (MessagesT msgs msg m) where
preceedingComments = lift . preceedingComments
instance (Monoid msgs, MonadCont m) => MonadCont (MessagesT msgs msg m) where
callCC f = MessagesT (callCC (\c -> unpackMessagesT (f (MessagesT . c))))
instance (Monoid msgs, MonadError e m) =>
MonadError e (MessagesT msgs msg m) where
throwError = lift . throwError
m `catchError` h =
MessagesT (unpackMessagesT m `catchError` (unpackMessagesT . h))
instance (Monoid msgs, MonadEdgeBuilder nodety m) =>
MonadEdgeBuilder nodety (MessagesT msgs msg m) where
addEdge src dst = lift . addEdge src dst
instance (Monoid msgs, MonadGenpos m) =>
MonadGenpos (MessagesT msgs msg m) where
point = lift . point
filename = lift . filename
instance (Monoid msgs, MonadGensym m) =>
MonadGensym (MessagesT msgs msg m) where
symbol = lift . symbol
unique = lift . unique
instance (Monoid msgs, Monoid w, MonadJournal w m) =>
MonadJournal w (MessagesT msgs msg m) where
journal = lift . journal
history = lift history
clear = lift clear
instance (Monoid msgs, MonadKeywords p t m) =>
MonadKeywords p t (MessagesT msgs msg m) where
mkKeyword p = lift . mkKeyword p
instance (Monoid msgs, MonadLoader path info m) =>
MonadLoader path info (MessagesT msgs msg m) where
load = lift . load
instance (Monoid msgs, MonadNodeBuilder nodety m) =>
MonadNodeBuilder nodety (MessagesT msgs msg m) where
addNode = lift . addNode
instance (Monoid msgs, MonadPositions m) =>
MonadPositions (MessagesT msgs msg m) where
pointInfo = lift . pointInfo
fileInfo = lift . fileInfo
instance (Monoid msgs, MonadScopeStack m) =>
MonadScopeStack (MessagesT msgs msg m) where
enterScope = lift . enterScope
finishScope = lift finishScope
instance (Monoid msgs, MonadScopeBuilder tmpscope m) =>
MonadScopeBuilder tmpscope (MessagesT msgs msg m) where
getScope = lift getScope
setScope = lift . setScope
instance (Monoid msgs, MonadSourceFiles m) =>
MonadSourceFiles (MessagesT msgs msg m) where
sourceFile = lift . sourceFile
instance (Monoid msgs, MonadSourceBuffer m) =>
MonadSourceBuffer (MessagesT msgs msg m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance (Monoid msgs, MonadState s m) =>
MonadState s (MessagesT msgs msg m) where
get = lift get
put = lift . put
instance (Monoid msgs, MonadSymbols m) =>
MonadSymbols (MessagesT msgs msg m) where
nullSym = lift nullSym
allNames = lift allNames
allSyms = lift allSyms
name = lift . name
instance (Monoid msgs, MonadReader r m) =>
MonadReader r (MessagesT msgs msg m) where
ask = lift ask
local f = mapMessagesT (local f)
{-
instance (Monoid msgs, MonadWriter w m) =>
MonadWriter w (MessagesT msgs msg m) where
tell = lift . tell
listen = mapMessagesT listen
pass = mapMessagesT pass
-}
instance (Monoid msgs, MonadPlus m) => MonadPlus (MessagesT msgs msg m) where
mzero = lift mzero
mplus s1 s2 = MessagesT (mplus (unpackMessagesT s1) (unpackMessagesT s2))
instance (Monoid msgs, MonadFix m) => MonadFix (MessagesT msgs msg m) where
mfix f = MessagesT (mfix (unpackMessagesT . f))
| emc2/compiler-misc | src/Control/Monad/Messages.hs | bsd-3-clause | 12,649 | 0 | 15 | 3,028 | 3,051 | 1,628 | 1,423 | 239 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module Text.Translate (translate) where
-- This module is just a wrapper to Text.Language.Translate
-- It exists for backward compatibility.
import Text.Language.Translate (translate)
| nfjinjing/translate | src/Text/Translate.hs | bsd-3-clause | 223 | 0 | 5 | 29 | 25 | 17 | 8 | 3 | 0 |
{-| Path-related helper functions.
-}
{-
Copyright (C) 2012, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Path
( dataDir
, runDir
, logDir
, socketDir
, luxidMessageDir
, livelockDir
, livelockFile
, defaultQuerySocket
, defaultWConfdSocket
, defaultMetadSocket
, confdHmacKey
, clusterConfFile
, lockStatusFile
, tempResStatusFile
, watcherPauseFile
, nodedCertFile
, nodedClientCertFile
, queueDir
, jobQueueSerialFile
, jobQueueLockFile
, jobQueueDrainFile
, jobQueueArchiveSubDir
, instanceReasonDir
, getInstReasonFilename
, jqueueExecutorPy
, postHooksExecutorPy
, kvmPidDir
) where
import System.FilePath
import System.Posix.Env (getEnvDefault)
import AutoConf
-- | Simple helper to concat two paths.
pjoin :: IO String -> String -> IO String
pjoin a b = do
a' <- a
return $ a' </> b
-- | Returns the root directory, which can be either the real root or
-- the virtual root.
getRootDir :: IO FilePath
getRootDir = getEnvDefault "GANETI_ROOTDIR" ""
-- | Prefixes a path with the current root directory.
addNodePrefix :: FilePath -> IO FilePath
addNodePrefix path = do
root <- getRootDir
return $ root ++ path
-- | Directory for data.
dataDir :: IO FilePath
dataDir = addNodePrefix $ AutoConf.localstatedir </> "lib" </> "ganeti"
-- | Helper for building on top of dataDir (internal).
dataDirP :: FilePath -> IO FilePath
dataDirP = (dataDir `pjoin`)
-- | Directory for runtime files.
runDir :: IO FilePath
runDir = addNodePrefix $ AutoConf.localstatedir </> "run" </> "ganeti"
-- | Directory for log files.
logDir :: IO FilePath
logDir = addNodePrefix $ AutoConf.localstatedir </> "log" </> "ganeti"
-- | Directory for Unix sockets.
socketDir :: IO FilePath
socketDir = runDir `pjoin` "socket"
-- | Directory for the jobs' livelocks.
livelockDir :: IO FilePath
livelockDir = runDir `pjoin` "livelocks"
-- | Directory for luxid to write messages to running jobs, like
-- requests to change the priority.
luxidMessageDir :: IO FilePath
luxidMessageDir = runDir `pjoin` "luxidmessages"
-- | A helper for building a job's livelock file. It prepends
-- 'livelockDir' to a given filename.
livelockFile :: FilePath -> IO FilePath
livelockFile = pjoin livelockDir
-- | The default LUXI socket for queries.
defaultQuerySocket :: IO FilePath
defaultQuerySocket = socketDir `pjoin` "ganeti-query"
-- | The default WConfD socket for queries.
defaultWConfdSocket :: IO FilePath
defaultWConfdSocket = socketDir `pjoin` "ganeti-wconfd"
-- | The default MetaD socket for communication.
defaultMetadSocket :: IO FilePath
defaultMetadSocket = socketDir `pjoin` "ganeti-metad"
-- | Path to file containing confd's HMAC key.
confdHmacKey :: IO FilePath
confdHmacKey = dataDirP "hmac.key"
-- | Path to cluster configuration file.
clusterConfFile :: IO FilePath
clusterConfFile = dataDirP "config.data"
-- | Path to the file representing the lock status.
lockStatusFile :: IO FilePath
lockStatusFile = dataDirP "locks.data"
-- | Path to the file representing the lock status.
tempResStatusFile :: IO FilePath
tempResStatusFile = dataDirP "tempres.data"
-- | Path to the watcher pause file.
watcherPauseFile :: IO FilePath
watcherPauseFile = dataDirP "watcher.pause"
-- | Path to the noded certificate.
nodedCertFile :: IO FilePath
nodedCertFile = dataDirP "server.pem"
-- | Path to the noded client certificate.
nodedClientCertFile :: IO FilePath
nodedClientCertFile = dataDirP "client.pem"
-- | Job queue directory.
queueDir :: IO FilePath
queueDir = dataDirP "queue"
-- | Job queue serial file.
jobQueueSerialFile :: IO FilePath
jobQueueSerialFile = queueDir `pjoin` "serial"
-- | Job queue lock file
jobQueueLockFile :: IO FilePath
jobQueueLockFile = queueDir `pjoin` "lock"
-- | Job queue drain file
jobQueueDrainFile :: IO FilePath
jobQueueDrainFile = queueDir `pjoin` "drain"
-- | Job queue archive directory.
jobQueueArchiveSubDir :: FilePath
jobQueueArchiveSubDir = "archive"
-- | Directory containing the reason trails for the last change of status of
-- instances.
instanceReasonDir :: IO FilePath
instanceReasonDir = runDir `pjoin` "instance-reason"
-- | The path of the file containing the reason trail for an instance, given the
-- instance name.
getInstReasonFilename :: String -> IO FilePath
getInstReasonFilename instName = instanceReasonDir `pjoin` instName
-- | The path to the Python executable for starting jobs.
jqueueExecutorPy :: IO FilePath
jqueueExecutorPy = return $ versionedsharedir
</> "ganeti" </> "jqueue" </> "exec.py"
-- | The path to the Python executable for global post hooks of job which
-- process has disappeared.
postHooksExecutorPy :: IO FilePath
postHooksExecutorPy =
return $ versionedsharedir </> "ganeti" </> "jqueue" </> "post_hooks_exec.py"
-- | The path to the directory where kvm stores the pid files.
kvmPidDir :: IO FilePath
kvmPidDir = runDir `pjoin` "kvm-hypervisor" `pjoin` "pid"
| leshchevds/ganeti | src/Ganeti/Path.hs | bsd-2-clause | 6,199 | 0 | 8 | 1,028 | 811 | 464 | 347 | 99 | 1 |
{-# LANGUAGE CPP #-}
module NameShape(
NameShape(..),
emptyNameShape,
mkNameShape,
extendNameShape,
nameShapeExports,
substNameShape,
) where
#include "HsVersions.h"
import Outputable
import HscTypes
import Module
import UniqFM
import Avail
import FieldLabel
import Name
import NameEnv
import TcRnMonad
import Util
import IfaceEnv
import Control.Monad
-- Note [NameShape]
-- ~~~~~~~~~~~~~~~~
-- When we write a declaration in a signature, e.g., data T, we
-- ascribe to it a *name variable*, e.g., {m.T}. This
-- name variable may be substituted with an actual original
-- name when the signature is implemented (or even if we
-- merge the signature with one which reexports this entity
-- from another module).
-- When we instantiate a signature m with a module M,
-- we also need to substitute over names. To do so, we must
-- compute the *name substitution* induced by the *exports*
-- of the module in question. A NameShape represents
-- such a name substitution for a single module instantiation.
-- The "shape" in the name comes from the fact that the computation
-- of a name substitution is essentially the *shaping pass* from
-- Backpack'14, but in a far more restricted form.
-- The name substitution for an export list is easy to explain. If we are
-- filling the module variable <m>, given an export N of the form
-- M.n or {m'.n} (where n is an OccName), the induced name
-- substitution is from {m.n} to N. So, for example, if we have
-- A=impl:B, and the exports of impl:B are impl:B.f and
-- impl:C.g, then our name substitution is {A.f} to impl:B.f
-- and {A.g} to impl:C.g
-- The 'NameShape' type is defined in TcRnTypes, because TcRnTypes
-- needs to refer to NameShape, and having TcRnTypes import
-- NameShape (even by SOURCE) would cause a large number of
-- modules to be pulled into the DynFlags cycle.
{-
data NameShape = NameShape {
ns_mod_name :: ModuleName,
ns_exports :: [AvailInfo],
ns_map :: OccEnv Name
}
-}
-- NB: substitution functions need 'HscEnv' since they need the name cache
-- to allocate new names if we change the 'Module' of a 'Name'
-- | Create an empty 'NameShape' (i.e., the renaming that
-- would occur with an implementing module with no exports)
-- for a specific hole @mod_name@.
emptyNameShape :: ModuleName -> NameShape
emptyNameShape mod_name = NameShape mod_name [] emptyOccEnv
-- | Create a 'NameShape' corresponding to an implementing
-- module for the hole @mod_name@ that exports a list of 'AvailInfo's.
mkNameShape :: ModuleName -> [AvailInfo] -> NameShape
mkNameShape mod_name as =
NameShape mod_name as $ mkOccEnv $ do
a <- as
n <- availName a : availNames a
return (occName n, n)
-- | Given an existing 'NameShape', merge it with a list of 'AvailInfo's
-- with Backpack style mix-in linking. This is used solely when merging
-- signatures together: we successively merge the exports of each
-- signature until we have the final, full exports of the merged signature.
--
-- What makes this operation nontrivial is what we are supposed to do when
-- we want to merge in an export for M.T when we already have an existing
-- export {H.T}. What should happen in this case is that {H.T} should be
-- unified with @M.T@: we've determined a more *precise* identity for the
-- export at 'OccName' @T@.
--
-- Note that we don't do unrestricted unification: only name holes from
-- @ns_mod_name ns@ are flexible. This is because we have a much more
-- restricted notion of shaping than in Backpack'14: we do shaping
-- *as* we do type-checking. Thus, once we shape a signature, its
-- exports are *final* and we're not allowed to refine them further,
extendNameShape :: HscEnv -> NameShape -> [AvailInfo] -> IO (Either SDoc NameShape)
extendNameShape hsc_env ns as =
case uAvailInfos (ns_mod_name ns) (ns_exports ns) as of
Left err -> return (Left err)
Right nsubst -> do
as1 <- mapM (liftIO . substNameAvailInfo hsc_env nsubst) (ns_exports ns)
as2 <- mapM (liftIO . substNameAvailInfo hsc_env nsubst) as
let new_avails = mergeAvails as1 as2
return . Right $ ns {
ns_exports = new_avails,
-- TODO: stop repeatedly rebuilding the OccEnv
ns_map = mkOccEnv $ do
a <- new_avails
n <- availName a : availNames a
return (occName n, n)
}
-- | The export list associated with this 'NameShape' (i.e., what
-- the exports of an implementing module which induces this 'NameShape'
-- would be.)
nameShapeExports :: NameShape -> [AvailInfo]
nameShapeExports = ns_exports
-- | Given a 'Name', substitute it according to the 'NameShape' implied
-- substitution, i.e. map @{A.T}@ to @M.T@, if the implementing module
-- exports @M.T@.
substNameShape :: NameShape -> Name -> Name
substNameShape ns n | nameModule n == ns_module ns
, Just n' <- lookupOccEnv (ns_map ns) (occName n)
= n'
| otherwise
= n
-- | The 'Module' of any 'Name's a 'NameShape' has action over.
ns_module :: NameShape -> Module
ns_module = mkHoleModule . ns_mod_name
{-
************************************************************************
* *
Name substitutions
* *
************************************************************************
-}
-- | Substitution on @{A.T}@. We enforce the invariant that the
-- 'nameModule' of keys of this map have 'moduleUnitId' @hole@
-- (meaning that if we have a hole substitution, the keys of the map
-- are never affected.) Alternately, this is ismorphic to
-- @Map ('ModuleName', 'OccName') 'Name'@.
type ShNameSubst = NameEnv Name
-- NB: In this module, we actually only ever construct 'ShNameSubst'
-- at a single 'ModuleName'. But 'ShNameSubst' is more convenient to
-- work with.
-- | Substitute names in a 'Name'.
substName :: ShNameSubst -> Name -> Name
substName env n | Just n' <- lookupNameEnv env n = n'
| otherwise = n
-- | Substitute names in an 'AvailInfo'. This has special behavior
-- for type constructors, where it is sufficient to substitute the 'availName'
-- to induce a substitution on 'availNames'.
substNameAvailInfo :: HscEnv -> ShNameSubst -> AvailInfo -> IO AvailInfo
substNameAvailInfo _ env (Avail n) = return (Avail (substName env n))
substNameAvailInfo hsc_env env (AvailTC n ns fs) =
let mb_mod = fmap nameModule (lookupNameEnv env n)
in AvailTC (substName env n)
<$> mapM (initIfaceLoad hsc_env . setNameModule mb_mod) ns
<*> mapM (setNameFieldSelector hsc_env mb_mod) fs
-- | Set the 'Module' of a 'FieldSelector'
setNameFieldSelector :: HscEnv -> Maybe Module -> FieldLabel -> IO FieldLabel
setNameFieldSelector _ Nothing f = return f
setNameFieldSelector hsc_env mb_mod (FieldLabel l b sel) = do
sel' <- initIfaceLoad hsc_env $ setNameModule mb_mod sel
return (FieldLabel l b sel')
{-
************************************************************************
* *
AvailInfo merging
* *
************************************************************************
-}
-- | Merges to 'AvailInfo' lists together, assuming the 'AvailInfo's have
-- already been unified ('uAvailInfos').
mergeAvails :: [AvailInfo] -> [AvailInfo] -> [AvailInfo]
mergeAvails as1 as2 =
let mkNE as = mkNameEnv [(availName a, a) | a <- as]
in nameEnvElts (plusNameEnv_C plusAvail (mkNE as1) (mkNE as2))
{-
************************************************************************
* *
AvailInfo unification
* *
************************************************************************
-}
-- | Unify two lists of 'AvailInfo's, given an existing substitution @subst@,
-- with only name holes from @flexi@ unifiable (all other name holes rigid.)
uAvailInfos :: ModuleName -> [AvailInfo] -> [AvailInfo] -> Either SDoc ShNameSubst
uAvailInfos flexi as1 as2 = -- pprTrace "uAvailInfos" (ppr as1 $$ ppr as2) $
let mkOE as = listToUFM $ do a <- as
n <- availNames a
return (nameOccName n, a)
in foldM (\subst (a1, a2) -> uAvailInfo flexi subst a1 a2) emptyNameEnv
(eltsUFM (intersectUFM_C (,) (mkOE as1) (mkOE as2)))
-- Edward: I have to say, this is pretty clever.
-- | Unify two 'AvailInfo's, given an existing substitution @subst@,
-- with only name holes from @flexi@ unifiable (all other name holes rigid.)
uAvailInfo :: ModuleName -> ShNameSubst -> AvailInfo -> AvailInfo
-> Either SDoc ShNameSubst
uAvailInfo flexi subst (Avail n1) (Avail n2) = uName flexi subst n1 n2
uAvailInfo flexi subst (AvailTC n1 _ _) (AvailTC n2 _ _) = uName flexi subst n1 n2
uAvailInfo _ _ a1 a2 = Left $ text "While merging export lists, could not combine"
<+> ppr a1 <+> text "with" <+> ppr a2
<+> parens (text "one is a type, the other is a plain identifier")
-- | Unify two 'Name's, given an existing substitution @subst@,
-- with only name holes from @flexi@ unifiable (all other name holes rigid.)
uName :: ModuleName -> ShNameSubst -> Name -> Name -> Either SDoc ShNameSubst
uName flexi subst n1 n2
| n1 == n2 = Right subst
| isFlexi n1 = uHoleName flexi subst n1 n2
| isFlexi n2 = uHoleName flexi subst n2 n1
| otherwise = Left (text "While merging export lists, could not unify"
<+> ppr n1 <+> text "with" <+> ppr n2 $$ extra)
where
isFlexi n = isHoleName n && moduleName (nameModule n) == flexi
extra | isHoleName n1 || isHoleName n2
= text "Neither name variable originates from the current signature."
| otherwise
= empty
-- | Unify a name @h@ which 'isHoleName' with another name, given an existing
-- substitution @subst@, with only name holes from @flexi@ unifiable (all
-- other name holes rigid.)
uHoleName :: ModuleName -> ShNameSubst -> Name {- hole name -} -> Name
-> Either SDoc ShNameSubst
uHoleName flexi subst h n =
ASSERT( isHoleName h )
case lookupNameEnv subst h of
Just n' -> uName flexi subst n' n
-- Do a quick check if the other name is substituted.
Nothing | Just n' <- lookupNameEnv subst n ->
ASSERT( isHoleName n ) uName flexi subst h n'
| otherwise ->
Right (extendNameEnv subst h n)
| olsner/ghc | compiler/backpack/NameShape.hs | bsd-3-clause | 11,000 | 0 | 18 | 2,924 | 1,658 | 851 | 807 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Bag: an unordered collection with duplicates
-}
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables #-}
module ETA.Utils.Bag (
Bag, -- abstract type
emptyBag, unitBag, unionBags, unionManyBags,
mapBag,
elemBag, lengthBag,
filterBag, partitionBag, partitionBagWith,
concatBag, foldBag, foldrBag, foldlBag,
isEmptyBag, isSingletonBag, consBag, snocBag, anyBag,
listToBag, bagToList,
foldrBagM, foldlBagM, mapBagM, mapBagM_,
flatMapBagM, flatMapBagPairM,
mapAndUnzipBagM, mapAccumBagLM
) where
import ETA.Utils.Outputable
import ETA.Utils.Util
import ETA.Utils.MonadUtils
import Data.Data
import Data.List ( partition )
infixr 3 `consBag`
infixl 3 `snocBag`
data Bag a
= EmptyBag
| UnitBag a
| TwoBags (Bag a) (Bag a) -- INVARIANT: neither branch is empty
| ListBag [a] -- INVARIANT: the list is non-empty
deriving Typeable
emptyBag :: Bag a
emptyBag = EmptyBag
unitBag :: a -> Bag a
unitBag = UnitBag
lengthBag :: Bag a -> Int
lengthBag EmptyBag = 0
lengthBag (UnitBag {}) = 1
lengthBag (TwoBags b1 b2) = lengthBag b1 + lengthBag b2
lengthBag (ListBag xs) = length xs
elemBag :: Eq a => a -> Bag a -> Bool
elemBag _ EmptyBag = False
elemBag x (UnitBag y) = x == y
elemBag x (TwoBags b1 b2) = x `elemBag` b1 || x `elemBag` b2
elemBag x (ListBag ys) = any (x ==) ys
unionManyBags :: [Bag a] -> Bag a
unionManyBags xs = foldr unionBags EmptyBag xs
-- This one is a bit stricter! The bag will get completely evaluated.
unionBags :: Bag a -> Bag a -> Bag a
unionBags EmptyBag b = b
unionBags b EmptyBag = b
unionBags b1 b2 = TwoBags b1 b2
consBag :: a -> Bag a -> Bag a
snocBag :: Bag a -> a -> Bag a
consBag elt bag = (unitBag elt) `unionBags` bag
snocBag bag elt = bag `unionBags` (unitBag elt)
isEmptyBag :: Bag a -> Bool
isEmptyBag EmptyBag = True
isEmptyBag _ = False -- NB invariants
isSingletonBag :: Bag a -> Bool
isSingletonBag EmptyBag = False
isSingletonBag (UnitBag _) = True
isSingletonBag (TwoBags _ _) = False -- Neither is empty
isSingletonBag (ListBag xs) = isSingleton xs
filterBag :: (a -> Bool) -> Bag a -> Bag a
filterBag _ EmptyBag = EmptyBag
filterBag pred b@(UnitBag val) = if pred val then b else EmptyBag
filterBag pred (TwoBags b1 b2) = sat1 `unionBags` sat2
where sat1 = filterBag pred b1
sat2 = filterBag pred b2
filterBag pred (ListBag vs) = listToBag (filter pred vs)
anyBag :: (a -> Bool) -> Bag a -> Bool
anyBag _ EmptyBag = False
anyBag p (UnitBag v) = p v
anyBag p (TwoBags b1 b2) = anyBag p b1 || anyBag p b2
anyBag p (ListBag xs) = any p xs
concatBag :: Bag (Bag a) -> Bag a
concatBag EmptyBag = EmptyBag
concatBag (UnitBag b) = b
concatBag (TwoBags b1 b2) = concatBag b1 `unionBags` concatBag b2
concatBag (ListBag bs) = unionManyBags bs
partitionBag :: (a -> Bool) -> Bag a -> (Bag a {- Satisfy predictate -},
Bag a {- Don't -})
partitionBag _ EmptyBag = (EmptyBag, EmptyBag)
partitionBag pred b@(UnitBag val)
= if pred val then (b, EmptyBag) else (EmptyBag, b)
partitionBag pred (TwoBags b1 b2)
= (sat1 `unionBags` sat2, fail1 `unionBags` fail2)
where (sat1, fail1) = partitionBag pred b1
(sat2, fail2) = partitionBag pred b2
partitionBag pred (ListBag vs) = (listToBag sats, listToBag fails)
where (sats, fails) = partition pred vs
partitionBagWith :: (a -> Either b c) -> Bag a
-> (Bag b {- Left -},
Bag c {- Right -})
partitionBagWith _ EmptyBag = (EmptyBag, EmptyBag)
partitionBagWith pred (UnitBag val)
= case pred val of
Left a -> (UnitBag a, EmptyBag)
Right b -> (EmptyBag, UnitBag b)
partitionBagWith pred (TwoBags b1 b2)
= (sat1 `unionBags` sat2, fail1 `unionBags` fail2)
where (sat1, fail1) = partitionBagWith pred b1
(sat2, fail2) = partitionBagWith pred b2
partitionBagWith pred (ListBag vs) = (listToBag sats, listToBag fails)
where (sats, fails) = partitionWith pred vs
foldBag :: (r -> r -> r) -- Replace TwoBags with this; should be associative
-> (a -> r) -- Replace UnitBag with this
-> r -- Replace EmptyBag with this
-> Bag a
-> r
{- Standard definition
foldBag t u e EmptyBag = e
foldBag t u e (UnitBag x) = u x
foldBag t u e (TwoBags b1 b2) = (foldBag t u e b1) `t` (foldBag t u e b2)
foldBag t u e (ListBag xs) = foldr (t.u) e xs
-}
-- More tail-recursive definition, exploiting associativity of "t"
foldBag _ _ e EmptyBag = e
foldBag t u e (UnitBag x) = u x `t` e
foldBag t u e (TwoBags b1 b2) = foldBag t u (foldBag t u e b2) b1
foldBag t u e (ListBag xs) = foldr (t.u) e xs
foldrBag :: (a -> r -> r) -> r
-> Bag a
-> r
foldrBag _ z EmptyBag = z
foldrBag k z (UnitBag x) = k x z
foldrBag k z (TwoBags b1 b2) = foldrBag k (foldrBag k z b2) b1
foldrBag k z (ListBag xs) = foldr k z xs
foldlBag :: (r -> a -> r) -> r
-> Bag a
-> r
foldlBag _ z EmptyBag = z
foldlBag k z (UnitBag x) = k z x
foldlBag k z (TwoBags b1 b2) = foldlBag k (foldlBag k z b1) b2
foldlBag k z (ListBag xs) = foldl k z xs
foldrBagM :: (Monad m) => (a -> b -> m b) -> b -> Bag a -> m b
foldrBagM _ z EmptyBag = return z
foldrBagM k z (UnitBag x) = k x z
foldrBagM k z (TwoBags b1 b2) = do { z' <- foldrBagM k z b2; foldrBagM k z' b1 }
foldrBagM k z (ListBag xs) = foldrM k z xs
foldlBagM :: (Monad m) => (b -> a -> m b) -> b -> Bag a -> m b
foldlBagM _ z EmptyBag = return z
foldlBagM k z (UnitBag x) = k z x
foldlBagM k z (TwoBags b1 b2) = do { z' <- foldlBagM k z b1; foldlBagM k z' b2 }
foldlBagM k z (ListBag xs) = foldlM k z xs
mapBag :: (a -> b) -> Bag a -> Bag b
mapBag _ EmptyBag = EmptyBag
mapBag f (UnitBag x) = UnitBag (f x)
mapBag f (TwoBags b1 b2) = TwoBags (mapBag f b1) (mapBag f b2)
mapBag f (ListBag xs) = ListBag (map f xs)
mapBagM :: Monad m => (a -> m b) -> Bag a -> m (Bag b)
mapBagM _ EmptyBag = return EmptyBag
mapBagM f (UnitBag x) = do r <- f x
return (UnitBag r)
mapBagM f (TwoBags b1 b2) = do r1 <- mapBagM f b1
r2 <- mapBagM f b2
return (TwoBags r1 r2)
mapBagM f (ListBag xs) = do rs <- mapM f xs
return (ListBag rs)
mapBagM_ :: Monad m => (a -> m b) -> Bag a -> m ()
mapBagM_ _ EmptyBag = return ()
mapBagM_ f (UnitBag x) = f x >> return ()
mapBagM_ f (TwoBags b1 b2) = mapBagM_ f b1 >> mapBagM_ f b2
mapBagM_ f (ListBag xs) = mapM_ f xs
flatMapBagM :: Monad m => (a -> m (Bag b)) -> Bag a -> m (Bag b)
flatMapBagM _ EmptyBag = return EmptyBag
flatMapBagM f (UnitBag x) = f x
flatMapBagM f (TwoBags b1 b2) = do r1 <- flatMapBagM f b1
r2 <- flatMapBagM f b2
return (r1 `unionBags` r2)
flatMapBagM f (ListBag xs) = foldrM k EmptyBag xs
where
k x b2 = do { b1 <- f x; return (b1 `unionBags` b2) }
flatMapBagPairM :: Monad m => (a -> m (Bag b, Bag c)) -> Bag a -> m (Bag b, Bag c)
flatMapBagPairM _ EmptyBag = return (EmptyBag, EmptyBag)
flatMapBagPairM f (UnitBag x) = f x
flatMapBagPairM f (TwoBags b1 b2) = do (r1,s1) <- flatMapBagPairM f b1
(r2,s2) <- flatMapBagPairM f b2
return (r1 `unionBags` r2, s1 `unionBags` s2)
flatMapBagPairM f (ListBag xs) = foldrM k (EmptyBag, EmptyBag) xs
where
k x (r2,s2) = do { (r1,s1) <- f x
; return (r1 `unionBags` r2, s1 `unionBags` s2) }
mapAndUnzipBagM :: Monad m => (a -> m (b,c)) -> Bag a -> m (Bag b, Bag c)
mapAndUnzipBagM _ EmptyBag = return (EmptyBag, EmptyBag)
mapAndUnzipBagM f (UnitBag x) = do (r,s) <- f x
return (UnitBag r, UnitBag s)
mapAndUnzipBagM f (TwoBags b1 b2) = do (r1,s1) <- mapAndUnzipBagM f b1
(r2,s2) <- mapAndUnzipBagM f b2
return (TwoBags r1 r2, TwoBags s1 s2)
mapAndUnzipBagM f (ListBag xs) = do ts <- mapM f xs
let (rs,ss) = unzip ts
return (ListBag rs, ListBag ss)
mapAccumBagLM :: Monad m
=> (acc -> x -> m (acc, y)) -- ^ combining funcction
-> acc -- ^ initial state
-> Bag x -- ^ inputs
-> m (acc, Bag y) -- ^ final state, outputs
mapAccumBagLM _ s EmptyBag = return (s, EmptyBag)
mapAccumBagLM f s (UnitBag x) = do { (s1, x1) <- f s x; return (s1, UnitBag x1) }
mapAccumBagLM f s (TwoBags b1 b2) = do { (s1, b1') <- mapAccumBagLM f s b1
; (s2, b2') <- mapAccumBagLM f s1 b2
; return (s2, TwoBags b1' b2') }
mapAccumBagLM f s (ListBag xs) = do { (s', xs') <- mapAccumLM f s xs
; return (s', ListBag xs') }
listToBag :: [a] -> Bag a
listToBag [] = EmptyBag
listToBag vs = ListBag vs
bagToList :: Bag a -> [a]
bagToList b = foldrBag (:) [] b
instance (Outputable a) => Outputable (Bag a) where
ppr bag = braces (pprWithCommas ppr (bagToList bag))
instance Data a => Data (Bag a) where
gfoldl k z b = z listToBag `k` bagToList b -- traverse abstract type abstractly
toConstr _ = abstractConstr $ "Bag("++show (typeOf (undefined::a))++")"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Bag"
dataCast1 x = gcast1 x
| alexander-at-github/eta | compiler/ETA/Utils/Bag.hs | bsd-3-clause | 9,862 | 0 | 11 | 3,067 | 3,888 | 1,980 | 1,908 | 204 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section{@Vars@: Variables}
-}
{-# LANGUAGE CPP, DeriveDataTypeable, MultiWayIf #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName': see "OccName#name_types"
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name': see "Name#name_types"
--
-- * 'Id.Id': see "Id#name_types"
--
-- * 'Var.Var' is a synonym for the 'Id.Id' type but it may additionally
-- potentially contain type variables, which have a 'TyCoRep.Kind'
-- rather than a 'TyCoRep.Type' and only contain some extra
-- details during typechecking.
--
-- These 'Var.Var' names may either be global or local, see "Var#globalvslocal"
--
-- #globalvslocal#
-- Global 'Id's and 'Var's are those that are imported or correspond
-- to a data constructor, primitive operation, or record selectors.
-- Local 'Id's and 'Var's are those bound within an expression
-- (e.g. by a lambda) or at the top level of the module being compiled.
module Var (
-- * The main data type and synonyms
Var, CoVar, Id, DictId, DFunId, EvVar, EqVar, EvId, IpId,
TyVar, TypeVar, KindVar, TKVar, TyCoVar,
-- ** Taking 'Var's apart
varName, varUnique, varType,
-- ** Modifying 'Var's
setVarName, setVarUnique, setVarType, updateVarType,
updateVarTypeM,
-- ** Constructing, taking apart, modifying 'Id's
mkGlobalVar, mkLocalVar, mkExportedLocalVar, mkCoVar,
idInfo, idDetails,
lazySetIdInfo, setIdDetails, globaliseId,
setIdExported, setIdNotExported,
-- ** Predicates
isId, isTKVar, isTyVar, isTcTyVar,
isLocalVar, isLocalId, isCoVar, isTyCoVar,
isGlobalId, isExportedId,
mustHaveLocalBinding,
-- ** Constructing 'TyVar's
mkTyVar, mkTcTyVar,
-- ** Taking 'TyVar's apart
tyVarName, tyVarKind, tcTyVarDetails, setTcTyVarDetails,
-- ** Modifying 'TyVar's
setTyVarName, setTyVarUnique, setTyVarKind, updateTyVarKind,
updateTyVarKindM
) where
#include "HsVersions.h"
import {-# SOURCE #-} TyCoRep( Type, Kind )
import {-# SOURCE #-} TcType( TcTyVarDetails, pprTcTyVarDetails, vanillaSkolemTv )
import {-# SOURCE #-} IdInfo( IdDetails, IdInfo, coVarDetails, isCoVarDetails, vanillaIdInfo, pprIdDetails )
import Name hiding (varName)
import Unique
import Util
import DynFlags
import Outputable
import Data.Data
{-
************************************************************************
* *
Synonyms
* *
************************************************************************
-- These synonyms are here and not in Id because otherwise we need a very
-- large number of SOURCE imports of Id.hs :-(
-}
type Id = Var -- A term-level identifier
type TyVar = Var -- Type *or* kind variable (historical)
type TKVar = Var -- Type *or* kind variable (historical)
type TypeVar = Var -- Definitely a type variable
type KindVar = Var -- Definitely a kind variable
-- See Note [Kind and type variables]
-- See Note [Evidence: EvIds and CoVars]
type EvId = Id -- Term-level evidence: DictId, IpId, or EqVar
type EvVar = EvId -- ...historical name for EvId
type DFunId = Id -- A dictionary function
type DictId = EvId -- A dictionary variable
type IpId = EvId -- A term-level implicit parameter
type EqVar = EvId -- Boxed equality evidence
type CoVar = Id -- See Note [Evidence: EvIds and CoVars]
type TyCoVar = Id -- Type, kind, *or* coercion variable
{-
Note [Evidence: EvIds and CoVars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* An EvId (evidence Id) is a term-level evidence variable
(dictionary, implicit parameter, or equality). Could be boxed or unboxed.
* DictId, IpId, and EqVar are synonyms when we know what kind of
evidence we are talking about. For example, an EqVar has type (t1 ~ t2).
Note [Kind and type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Before kind polymorphism, TyVar were used to mean type variables. Now
they are use to mean kind *or* type variables. KindVar is used when we
know for sure that it is a kind variable. In future, we might want to
go over the whole compiler code to use:
- TKVar to mean kind or type variables
- TypeVar to mean type variables only
- KindVar to mean kind variables
************************************************************************
* *
\subsection{The main data type declarations}
* *
************************************************************************
Every @Var@ has a @Unique@, to uniquify it and for fast comparison, a
@Type@, and an @IdInfo@ (non-essential info about it, e.g.,
strictness). The essential info about different kinds of @Vars@ is
in its @VarDetails@.
-}
-- | Essentially a typed 'Name', that may also contain some additional information
-- about the 'Var' and it's use sites.
data Var
= TyVar { -- Type and kind variables
-- see Note [Kind and type variables]
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
-- ^ Key for fast comparison
-- Identical to the Unique in the name,
-- cached here for speed
varType :: Kind -- ^ The type or kind of the 'Var' in question
}
| TcTyVar { -- Used only during type inference
-- Used for kind variables during
-- inference, as well
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
varType :: Kind,
tc_tv_details :: TcTyVarDetails
}
| Id {
varName :: !Name,
realUnique :: {-# UNPACK #-} !Int,
varType :: Type,
idScope :: IdScope,
id_details :: IdDetails, -- Stable, doesn't change
id_info :: IdInfo } -- Unstable, updated by simplifier
deriving Typeable
data IdScope -- See Note [GlobalId/LocalId]
= GlobalId
| LocalId ExportFlag
data ExportFlag -- See Note [ExportFlag on binders]
= NotExported -- ^ Not exported: may be discarded as dead code.
| Exported -- ^ Exported: kept alive
{- Note [ExportFlag on binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An ExportFlag of "Exported" on a top-level binder says "keep this
binding alive; do not drop it as dead code". This transitively
keeps alive all the other top-level bindings that this binding refers
to. This property is persisted all the way down the pipeline, so that
the binding will be compiled all the way to object code, and its
symbols will appear in the linker symbol table.
However, note that this use of "exported" is quite different to the
export list on a Haskell module. Setting the ExportFlag on an Id does
/not/ mean that if you import the module (in Haskell source code) you
will see this Id. Of course, things that appear in the export list
of the source Haskell module do indeed have their ExportFlag set.
But many other things, such as dictionary functions, are kept alive
by having their ExportFlag set, even though they are not exported
in the source-code sense.
We should probably use a different term for ExportFlag, like
KeepAlive.
Note [GlobalId/LocalId]
~~~~~~~~~~~~~~~~~~~~~~~
A GlobalId is
* always a constant (top-level)
* imported, or data constructor, or primop, or record selector
* has a Unique that is globally unique across the whole
GHC invocation (a single invocation may compile multiple modules)
* never treated as a candidate by the free-variable finder;
it's a constant!
A LocalId is
* bound within an expression (lambda, case, local let(rec))
* or defined at top level in the module being compiled
* always treated as a candidate by the free-variable finder
After CoreTidy, top-level LocalIds are turned into GlobalIds
-}
instance Outputable Var where
ppr var = sdocWithDynFlags $ \dflags ->
getPprStyle $ \ppr_style ->
if | debugStyle ppr_style && (not (gopt Opt_SuppressVarKinds dflags))
-> parens (ppr (varName var) <+> ppr_debug var ppr_style <+>
dcolon <+> ppr (tyVarKind var))
| otherwise
-> ppr (varName var) <> ppr_debug var ppr_style
ppr_debug :: Var -> PprStyle -> SDoc
ppr_debug (TyVar {}) sty
| debugStyle sty = brackets (text "tv")
ppr_debug (TcTyVar {tc_tv_details = d}) sty
| dumpStyle sty || debugStyle sty = brackets (pprTcTyVarDetails d)
ppr_debug (Id { idScope = s, id_details = d }) sty
| debugStyle sty = brackets (ppr_id_scope s <> pprIdDetails d)
ppr_debug _ _ = empty
ppr_id_scope :: IdScope -> SDoc
ppr_id_scope GlobalId = text "gid"
ppr_id_scope (LocalId Exported) = text "lidx"
ppr_id_scope (LocalId NotExported) = text "lid"
instance NamedThing Var where
getName = varName
instance Uniquable Var where
getUnique = varUnique
instance Eq Var where
a == b = realUnique a == realUnique b
instance Ord Var where
a <= b = realUnique a <= realUnique b
a < b = realUnique a < realUnique b
a >= b = realUnique a >= realUnique b
a > b = realUnique a > realUnique b
a `compare` b = varUnique a `compare` varUnique b
instance Data Var where
-- don't traverse?
toConstr _ = abstractConstr "Var"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Var"
varUnique :: Var -> Unique
varUnique var = mkUniqueGrimily (realUnique var)
setVarUnique :: Var -> Unique -> Var
setVarUnique var uniq
= var { realUnique = getKey uniq,
varName = setNameUnique (varName var) uniq }
setVarName :: Var -> Name -> Var
setVarName var new_name
= var { realUnique = getKey (getUnique new_name),
varName = new_name }
setVarType :: Id -> Type -> Id
setVarType id ty = id { varType = ty }
updateVarType :: (Type -> Type) -> Id -> Id
updateVarType f id = id { varType = f (varType id) }
updateVarTypeM :: Monad m => (Type -> m Type) -> Id -> m Id
updateVarTypeM f id = do { ty' <- f (varType id)
; return (id { varType = ty' }) }
{-
************************************************************************
* *
\subsection{Type and kind variables}
* *
************************************************************************
-}
tyVarName :: TyVar -> Name
tyVarName = varName
tyVarKind :: TyVar -> Kind
tyVarKind = varType
setTyVarUnique :: TyVar -> Unique -> TyVar
setTyVarUnique = setVarUnique
setTyVarName :: TyVar -> Name -> TyVar
setTyVarName = setVarName
setTyVarKind :: TyVar -> Kind -> TyVar
setTyVarKind tv k = tv {varType = k}
updateTyVarKind :: (Kind -> Kind) -> TyVar -> TyVar
updateTyVarKind update tv = tv {varType = update (tyVarKind tv)}
updateTyVarKindM :: (Monad m) => (Kind -> m Kind) -> TyVar -> m TyVar
updateTyVarKindM update tv
= do { k' <- update (tyVarKind tv)
; return $ tv {varType = k'} }
mkTyVar :: Name -> Kind -> TyVar
mkTyVar name kind = TyVar { varName = name
, realUnique = getKey (nameUnique name)
, varType = kind
}
mkTcTyVar :: Name -> Kind -> TcTyVarDetails -> TyVar
mkTcTyVar name kind details
= -- NB: 'kind' may be a coercion kind; cf, 'TcMType.newMetaCoVar'
TcTyVar { varName = name,
realUnique = getKey (nameUnique name),
varType = kind,
tc_tv_details = details
}
tcTyVarDetails :: TyVar -> TcTyVarDetails
tcTyVarDetails (TcTyVar { tc_tv_details = details }) = details
tcTyVarDetails (TyVar {}) = vanillaSkolemTv
tcTyVarDetails var = pprPanic "tcTyVarDetails" (ppr var <+> dcolon <+> ppr (tyVarKind var))
setTcTyVarDetails :: TyVar -> TcTyVarDetails -> TyVar
setTcTyVarDetails tv details = tv { tc_tv_details = details }
{-
%************************************************************************
%* *
\subsection{Ids}
* *
************************************************************************
-}
idInfo :: Id -> IdInfo
idInfo (Id { id_info = info }) = info
idInfo other = pprPanic "idInfo" (ppr other)
idDetails :: Id -> IdDetails
idDetails (Id { id_details = details }) = details
idDetails other = pprPanic "idDetails" (ppr other)
-- The next three have a 'Var' suffix even though they always build
-- Ids, because Id.hs uses 'mkGlobalId' etc with different types
mkGlobalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkGlobalVar details name ty info
= mk_id name ty GlobalId details info
mkLocalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkLocalVar details name ty info
= mk_id name ty (LocalId NotExported) details info
mkCoVar :: Name -> Type -> CoVar
-- Coercion variables have no IdInfo
mkCoVar name ty = mk_id name ty (LocalId NotExported) coVarDetails vanillaIdInfo
-- | Exported 'Var's will not be removed as dead code
mkExportedLocalVar :: IdDetails -> Name -> Type -> IdInfo -> Id
mkExportedLocalVar details name ty info
= mk_id name ty (LocalId Exported) details info
mk_id :: Name -> Type -> IdScope -> IdDetails -> IdInfo -> Id
mk_id name ty scope details info
= Id { varName = name,
realUnique = getKey (nameUnique name),
varType = ty,
idScope = scope,
id_details = details,
id_info = info }
-------------------
lazySetIdInfo :: Id -> IdInfo -> Var
lazySetIdInfo id info = id { id_info = info }
setIdDetails :: Id -> IdDetails -> Id
setIdDetails id details = id { id_details = details }
globaliseId :: Id -> Id
-- ^ If it's a local, make it global
globaliseId id = id { idScope = GlobalId }
setIdExported :: Id -> Id
-- ^ Exports the given local 'Id'. Can also be called on global 'Id's, such as data constructors
-- and class operations, which are born as global 'Id's and automatically exported
setIdExported id@(Id { idScope = LocalId {} }) = id { idScope = LocalId Exported }
setIdExported id@(Id { idScope = GlobalId }) = id
setIdExported tv = pprPanic "setIdExported" (ppr tv)
setIdNotExported :: Id -> Id
-- ^ We can only do this to LocalIds
setIdNotExported id = ASSERT( isLocalId id )
id { idScope = LocalId NotExported }
{-
************************************************************************
* *
\subsection{Predicates over variables}
* *
************************************************************************
-}
isTyVar :: Var -> Bool
isTyVar = isTKVar -- Historical
isTKVar :: Var -> Bool -- True of both type and kind variables
isTKVar (TyVar {}) = True
isTKVar (TcTyVar {}) = True
isTKVar _ = False
isTcTyVar :: Var -> Bool
isTcTyVar (TcTyVar {}) = True
isTcTyVar _ = False
isId :: Var -> Bool
isId (Id {}) = True
isId _ = False
isTyCoVar :: Var -> Bool
isTyCoVar v = isTyVar v || isCoVar v
isCoVar :: Var -> Bool
isCoVar v = isId v && isCoVarDetails (id_details v)
isLocalId :: Var -> Bool
isLocalId (Id { idScope = LocalId _ }) = True
isLocalId _ = False
-- | 'isLocalVar' returns @True@ for type variables as well as local 'Id's
-- These are the variables that we need to pay attention to when finding free
-- variables, or doing dependency analysis.
isLocalVar :: Var -> Bool
isLocalVar v = not (isGlobalId v)
isGlobalId :: Var -> Bool
isGlobalId (Id { idScope = GlobalId }) = True
isGlobalId _ = False
-- | 'mustHaveLocalBinding' returns @True@ of 'Id's and 'TyVar's
-- that must have a binding in this module. The converse
-- is not quite right: there are some global 'Id's that must have
-- bindings, such as record selectors. But that doesn't matter,
-- because it's only used for assertions
mustHaveLocalBinding :: Var -> Bool
mustHaveLocalBinding var = isLocalVar var
-- | 'isExportedIdVar' means \"don't throw this away\"
isExportedId :: Var -> Bool
isExportedId (Id { idScope = GlobalId }) = True
isExportedId (Id { idScope = LocalId Exported}) = True
isExportedId _ = False
| mcschroeder/ghc | compiler/basicTypes/Var.hs | bsd-3-clause | 16,953 | 0 | 19 | 4,653 | 2,796 | 1,550 | 1,246 | 225 | 1 |
#!/usr/bin/runhaskell
import Distribution.Simple
main = defaultMainWithHooks defaultUserHooks
| SAdams601/ParRegexSearch | test/fst-0.9.0.1/Setup.hs | mit | 96 | 0 | 5 | 9 | 15 | 8 | 7 | 2 | 1 |
module Options.RecompilationChecking where
import Types
recompilationCheckingOptions :: [Flag]
recompilationCheckingOptions =
[ flag { flagName = "-fforce-recomp"
, flagDescription =
"Turn off recompilation checking. This is implied by any " ++
"``-ddump-X`` option when compiling a single file " ++
"(i.e. when using :ghc-flag:`c`)."
, flagType = DynamicFlag
, flagReverse = "-fno-force-recomp"
}
]
| oldmanmike/ghc | utils/mkUserGuidePart/Options/RecompilationChecking.hs | bsd-3-clause | 475 | 0 | 9 | 128 | 59 | 37 | 22 | 11 | 1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
-- | Simple example on executing code in multiple threads.
-- Note, before compiling this code you need to install the package: 'random'.
-- Executing: stack install random
import OpenCog.AtomSpace (AtomSpace,insert,get,remove,AtomType(..),
debug,printAtom,(|>),(\>),newAtomSpace,(<:),
Atom(..),TruthVal(..),Gen(..),noTv,stv)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (forkIO,threadDelay)
import System.Random (randomIO,randomRIO)
randomConcept :: Int -> AtomSpace (Atom ConceptT)
randomConcept top = do
num <- liftIO $ randomRIO (1,top)
return $ ConceptNode ("Concept"++show num) noTv
randomList :: Int -> Int -> AtomSpace (Atom ListT)
randomList n m = do
num <- liftIO $ randomRIO (1,n)
list <- mapM (\_ -> randomConcept m >>= return . Gen) [1..num]
return $ ListLink list
main :: IO ()
main = do
as1 <- newAtomSpace Nothing
mapM (\n -> forkIO $ as1 <: loop n) [1..20]
as1 <: loop 21
loop :: Int -> AtomSpace ()
loop idNum = do
liftIO $ putStrLn $ "Thread " ++ show idNum
waitRandom
concept1 <- randomConcept 6
remove concept1
concept2 <- randomConcept 6
insert concept2
concept3 <- randomConcept 6
get concept3
waitRandom
list1 <- randomList 3 6
res <- get list1
case res of
Nothing -> liftIO $ putStrLn "Got: Nothing"
Just l -> liftIO $ putStrLn "Got:" >> printAtom l
list2 <- randomList 3 6
insert list2
list3 <- randomList 3 6
remove list3
if idNum == 1
then do
liftIO $ threadDelay 5000000
liftIO $ putStrLn $ replicate 70 '#'
debug
else return ()
loop idNum
where
waitRandom :: AtomSpace ()
waitRandom = do
n <- liftIO $ randomRIO (0,100000)
liftIO $ threadDelay n
| rodsol/atomspace | examples/haskell/example_multithreading.hs | agpl-3.0 | 1,986 | 0 | 13 | 560 | 652 | 325 | 327 | 53 | 3 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, NondecreasingIndentation,
RecordWildCards, ScopedTypeVariables #-}
{-# OPTIONS_GHC -Wno-name-shadowing #-}
module GHC.IO.Encoding.CodePage.API (
mkCodePageEncoding
) where
import Foreign.C
import Foreign.Ptr
import Foreign.Marshal
import Foreign.Storable
import Data.Bits
import Data.Either
import Data.Word
import GHC.Base
import GHC.List
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
import GHC.IO.Encoding.UTF16
import GHC.Num
import GHC.Show
import GHC.Real
import GHC.Windows
import GHC.ForeignPtr (castForeignPtr)
import System.Posix.Internals
c_DEBUG_DUMP :: Bool
c_DEBUG_DUMP = False
debugIO :: String -> IO ()
debugIO s
| c_DEBUG_DUMP = puts s
| otherwise = return ()
#if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
#elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
#else
# error Unknown mingw32 arch
#endif
type LPCSTR = Ptr Word8
mAX_DEFAULTCHAR :: Int
mAX_DEFAULTCHAR = 2
mAX_LEADBYTES :: Int
mAX_LEADBYTES = 12
-- Don't really care about the contents of this, but we have to make sure the size is right
data CPINFO = CPINFO {
maxCharSize :: UINT,
defaultChar :: [BYTE], -- ^ Always of length mAX_DEFAULTCHAR
leadByte :: [BYTE] -- ^ Always of length mAX_LEADBYTES
}
-- | @since 4.7.0.0
instance Storable CPINFO where
sizeOf _ = sizeOf (undefined :: UINT) + (mAX_DEFAULTCHAR + mAX_LEADBYTES) * sizeOf (undefined :: BYTE)
alignment _ = alignment (undefined :: CInt)
peek ptr = do
ptr <- return $ castPtr ptr
a <- peek ptr
ptr <- return $ castPtr $ advancePtr ptr 1
b <- peekArray mAX_DEFAULTCHAR ptr
c <- peekArray mAX_LEADBYTES (advancePtr ptr mAX_DEFAULTCHAR)
return $ CPINFO a b c
poke ptr val = do
ptr <- return $ castPtr ptr
poke ptr (maxCharSize val)
ptr <- return $ castPtr $ advancePtr ptr 1
pokeArray' "CPINFO.defaultChar" mAX_DEFAULTCHAR ptr (defaultChar val)
pokeArray' "CPINFO.leadByte" mAX_LEADBYTES (advancePtr ptr mAX_DEFAULTCHAR) (leadByte val)
pokeArray' :: Storable a => String -> Int -> Ptr a -> [a] -> IO ()
pokeArray' msg sz ptr xs | length xs == sz = pokeArray ptr xs
| otherwise = errorWithoutStackTrace $ msg ++ ": expected " ++ show sz ++ " elements in list but got " ++ show (length xs)
foreign import WINDOWS_CCONV unsafe "windows.h GetCPInfo"
c_GetCPInfo :: UINT -- ^ CodePage
-> Ptr CPINFO -- ^ lpCPInfo
-> IO BOOL
foreign import WINDOWS_CCONV unsafe "windows.h MultiByteToWideChar"
c_MultiByteToWideChar :: UINT -- ^ CodePage
-> DWORD -- ^ dwFlags
-> LPCSTR -- ^ lpMultiByteStr
-> CInt -- ^ cbMultiByte
-> LPWSTR -- ^ lpWideCharStr
-> CInt -- ^ cchWideChar
-> IO CInt
foreign import WINDOWS_CCONV unsafe "windows.h WideCharToMultiByte"
c_WideCharToMultiByte :: UINT -- ^ CodePage
-> DWORD -- ^ dwFlags
-> LPWSTR -- ^ lpWideCharStr
-> CInt -- ^ cchWideChar
-> LPCSTR -- ^ lpMultiByteStr
-> CInt -- ^ cbMultiByte
-> LPCSTR -- ^ lpDefaultChar
-> LPBOOL -- ^ lpUsedDefaultChar
-> IO CInt
foreign import WINDOWS_CCONV unsafe "windows.h IsDBCSLeadByteEx"
c_IsDBCSLeadByteEx :: UINT -- ^ CodePage
-> BYTE -- ^ TestChar
-> IO BOOL
-- | Returns a slow but correct implementation of TextEncoding using the Win32 API.
--
-- This is useful for supporting DBCS text encoding on the console without having to statically link
-- in huge code tables into all of our executables, or just as a fallback mechanism if a new code page
-- is introduced that we don't know how to deal with ourselves yet.
mkCodePageEncoding :: CodingFailureMode -> Word32 -> TextEncoding
mkCodePageEncoding cfm cp
= TextEncoding {
textEncodingName = "CP" ++ show cp,
mkTextDecoder = newCP (recoverDecode cfm) cpDecode cp,
mkTextEncoder = newCP (recoverEncode cfm) cpEncode cp
}
newCP :: (Buffer from -> Buffer to -> IO (Buffer from, Buffer to))
-> (Word32 -> Int -> CodeBuffer from to)
-> Word32
-> IO (BufferCodec from to ())
newCP rec fn cp = do
-- Fail early if the code page doesn't exist, to match the behaviour of the IConv TextEncoding
max_char_size <- alloca $ \cpinfo_ptr -> do
success <- c_GetCPInfo cp cpinfo_ptr
when (not success) $ throwGetLastError ("GetCPInfo " ++ show cp)
fmap (fromIntegral . maxCharSize) $ peek cpinfo_ptr
debugIO $ "GetCPInfo " ++ show cp ++ " = " ++ show max_char_size
return $ BufferCodec {
encode = fn cp max_char_size,
recover = rec,
close = return (),
-- Windows doesn't supply a way to save/restore the state and doesn't need one
-- since it's a dumb string->string API rather than a clever streaming one.
getState = return (),
setState = const $ return ()
}
utf16_native_encode' :: EncodeBuffer
utf16_native_decode' :: DecodeBuffer
#ifdef WORDS_BIGENDIAN
utf16_native_encode' = utf16be_encode
utf16_native_decode' = utf16be_decode
#else
utf16_native_encode' = utf16le_encode
utf16_native_decode' = utf16le_decode
#endif
saner :: CodeBuffer from to
-> Buffer from -> Buffer to
-> IO (CodingProgress, Int, Buffer from, Buffer to)
saner code ibuf obuf = do
(why, ibuf', obuf') <- code ibuf obuf
-- Weird but true: the UTF16 codes have a special case (see the "done" functions)
-- whereby if they entirely consume the input instead of returning an input buffer
-- that is empty because bufL has reached bufR, they return a buffer that is empty
-- because bufL = bufR = 0.
--
-- This is really very odd and confusing for our code that expects the difference
-- between the old and new input buffer bufLs to indicate the number of elements
-- that were consumed!
--
-- We fix it by explicitly extracting an integer which is the # of things consumed, like so:
if isEmptyBuffer ibuf'
then return (InputUnderflow, bufferElems ibuf, ibuf', obuf')
else return (why, bufL ibuf' - bufL ibuf, ibuf', obuf')
byteView :: Buffer CWchar -> Buffer Word8
byteView (Buffer {..}) = Buffer { bufState = bufState, bufRaw = castForeignPtr bufRaw, bufSize = bufSize * 2, bufL = bufL * 2, bufR = bufR * 2 }
cwcharView :: Buffer Word8 -> Buffer CWchar
cwcharView (Buffer {..}) = Buffer { bufState = bufState, bufRaw = castForeignPtr bufRaw, bufSize = half bufSize, bufL = half bufL, bufR = half bufR }
where half x = case x `divMod` 2 of (y, 0) -> y
_ -> errorWithoutStackTrace "cwcharView: utf16_(encode|decode) (wrote out|consumed) non multiple-of-2 number of bytes"
utf16_native_encode :: CodeBuffer Char CWchar
utf16_native_encode ibuf obuf = do
(why, ibuf, obuf) <- utf16_native_encode' ibuf (byteView obuf)
return (why, ibuf, cwcharView obuf)
utf16_native_decode :: CodeBuffer CWchar Char
utf16_native_decode ibuf obuf = do
(why, ibuf, obuf) <- utf16_native_decode' (byteView ibuf) obuf
return (why, cwcharView ibuf, obuf)
cpDecode :: Word32 -> Int -> DecodeBuffer
cpDecode cp max_char_size = \ibuf obuf -> do
#ifdef CHARBUF_UTF16
let mbuf = obuf
#else
-- FIXME: share the buffer between runs, even if the buffer is not the perfect size
let sz = (bufferElems ibuf * 2) -- I guess in the worst case the input CP text consists of 1-byte sequences that map entirely to things outside the BMP and so require 2 UTF-16 chars
`min` (bufferAvailable obuf * 2) -- In the best case, each pair of UTF-16 points becomes a single UTF-32 point
mbuf <- newBuffer (2 * sz) sz WriteBuffer :: IO (Buffer CWchar)
#endif
debugIO $ "cpDecode " ++ summaryBuffer ibuf ++ " " ++ summaryBuffer mbuf
(why1, ibuf', mbuf') <- cpRecode try' is_valid_prefix max_char_size 1 0 1 ibuf mbuf
debugIO $ "cpRecode (cpDecode) = " ++ show why1 ++ " " ++ summaryBuffer ibuf' ++ " " ++ summaryBuffer mbuf'
#ifdef CHARBUF_UTF16
return (why1, ibuf', mbuf')
#else
-- Convert as much UTF-16 as possible to UTF-32. Note that it's impossible for this to fail
-- due to illegal characters since the output from Window's encoding function should be correct UTF-16.
-- However, it's perfectly possible to run out of either output or input buffer.
debugIO $ "utf16_native_decode " ++ summaryBuffer mbuf' ++ " " ++ summaryBuffer obuf
(why2, target_utf16_count, mbuf', obuf) <- saner utf16_native_decode (mbuf' { bufState = ReadBuffer }) obuf
debugIO $ "utf16_native_decode = " ++ show why2 ++ " " ++ summaryBuffer mbuf' ++ " " ++ summaryBuffer obuf
case why2 of
-- If we successfully translate all of the UTF-16 buffer, we need to know why we couldn't get any more
-- UTF-16 out of the Windows API
InputUnderflow | isEmptyBuffer mbuf' -> return (why1, ibuf', obuf)
| otherwise -> errorWithoutStackTrace "cpDecode: impossible underflown UTF-16 buffer"
-- InvalidSequence should be impossible since mbuf' is output from Windows.
InvalidSequence -> errorWithoutStackTrace "InvalidSequence on output of Windows API"
-- If we run out of space in obuf, we need to ask for more output buffer space, while also returning
-- the characters we have managed to consume so far.
OutputUnderflow -> do
-- We have an interesting problem here similar to the cpEncode case where we have to figure out how much
-- of the byte buffer was consumed to reach as far as the last UTF-16 character we actually decoded to UTF-32 OK.
--
-- The minimum number of bytes it could take is half the number of UTF-16 chars we got on the output, since
-- one byte could theoretically generate two UTF-16 characters.
-- The common case (ASCII text) is that every byte in the input maps to a single UTF-16 character.
-- In the worst case max_char_size bytes map to each UTF-16 character.
byte_count <- bSearch "cpDecode" (cpRecode try' is_valid_prefix max_char_size 1 0 1) ibuf mbuf target_utf16_count (target_utf16_count `div` 2) target_utf16_count (target_utf16_count * max_char_size)
return (OutputUnderflow, bufferRemove byte_count ibuf, obuf)
#endif
where
is_valid_prefix = c_IsDBCSLeadByteEx cp
try' iptr icnt optr ocnt
-- MultiByteToWideChar does surprising things if you have ocnt == 0
| ocnt == 0 = return (Left True)
| otherwise = do
err <- c_MultiByteToWideChar (fromIntegral cp) 8 -- MB_ERR_INVALID_CHARS == 8: Fail if an invalid input character is encountered
iptr (fromIntegral icnt) optr (fromIntegral ocnt)
debugIO $ "MultiByteToWideChar " ++ show cp ++ " 8 " ++ show iptr ++ " " ++ show icnt ++ " " ++ show optr ++ " " ++ show ocnt ++ "\n = " ++ show err
case err of
-- 0 indicates that we did not succeed
0 -> do
err <- getLastError
case err of
122 -> return (Left True)
1113 -> return (Left False)
_ -> failWith "MultiByteToWideChar" err
wrote_chars -> return (Right (fromIntegral wrote_chars))
cpEncode :: Word32 -> Int -> EncodeBuffer
cpEncode cp _max_char_size = \ibuf obuf -> do
#ifdef CHARBUF_UTF16
let mbuf' = ibuf
#else
-- FIXME: share the buffer between runs, even though that means we can't size the buffer as we want.
let sz = (bufferElems ibuf * 2) -- UTF-32 always uses 4 bytes. UTF-16 uses at most 4 bytes.
`min` (bufferAvailable obuf * 2) -- In the best case, each pair of UTF-16 points fits into only 1 byte
mbuf <- newBuffer (2 * sz) sz WriteBuffer
-- Convert as much UTF-32 as possible to UTF-16. NB: this can't fail due to output underflow
-- since we sized the output buffer correctly. However, it could fail due to an illegal character
-- in the input if it encounters a lone surrogate. In this case, our recovery will be applied as normal.
(why1, ibuf', mbuf') <- utf16_native_encode ibuf mbuf
#endif
debugIO $ "\ncpEncode " ++ summaryBuffer mbuf' ++ " " ++ summaryBuffer obuf
(why2, target_utf16_count, mbuf', obuf) <- saner (cpRecode try' is_valid_prefix 2 1 1 0) (mbuf' { bufState = ReadBuffer }) obuf
debugIO $ "cpRecode (cpEncode) = " ++ show why2 ++ " " ++ summaryBuffer mbuf' ++ " " ++ summaryBuffer obuf
#ifdef CHARBUF_UTF16
return (why2, mbuf', obuf)
#else
case why2 of
-- If we succesfully translate all of the UTF-16 buffer, we need to know why
-- we weren't able to get any more UTF-16 out of the UTF-32 buffer
InputUnderflow | isEmptyBuffer mbuf' -> return (why1, ibuf', obuf)
| otherwise -> errorWithoutStackTrace "cpEncode: impossible underflown UTF-16 buffer"
-- With OutputUnderflow/InvalidSequence we only care about the failings of the UTF-16->CP translation.
-- Yes, InvalidSequence is possible even though mbuf' is guaranteed to be valid UTF-16, because
-- the code page may not be able to represent the encoded Unicode codepoint.
_ -> do
-- Here is an interesting problem. If we have only managed to translate part of the mbuf'
-- then we need to return an ibuf which has consumed exactly those bytes required to obtain
-- that part of the mbuf'. To reconstruct this information, we binary search for the number of
-- UTF-32 characters required to get the consumed count of UTF-16 characters:
--
-- When dealing with data from the BMP (the common case), consuming N UTF-16 characters will be the same as consuming N
-- UTF-32 characters. We start our search there so that most binary searches will terminate in a single iteration.
-- Furthermore, the absolute minimum number of UTF-32 characters this can correspond to is 1/2 the UTF-16 byte count
-- (this will be realised when the input data is entirely not in the BMP).
utf32_count <- bSearch "cpEncode" utf16_native_encode ibuf mbuf target_utf16_count (target_utf16_count `div` 2) target_utf16_count target_utf16_count
return (why2, bufferRemove utf32_count ibuf, obuf)
#endif
where
-- Single characters should be mappable to bytes. If they aren't supported by the CP then we have an invalid input sequence.
is_valid_prefix _ = return False
try' iptr icnt optr ocnt
-- WideCharToMultiByte does surprising things if you call it with ocnt == 0
| ocnt == 0 = return (Left True)
| otherwise = alloca $ \defaulted_ptr -> do
poke defaulted_ptr False
err <- c_WideCharToMultiByte (fromIntegral cp) 0 -- NB: the WC_ERR_INVALID_CHARS flag is uselses: only has an effect with the UTF-8 code page
iptr (fromIntegral icnt) optr (fromIntegral ocnt)
nullPtr defaulted_ptr
defaulted <- peek defaulted_ptr
debugIO $ "WideCharToMultiByte " ++ show cp ++ " 0 " ++ show iptr ++ " " ++ show icnt ++ " " ++ show optr ++ " " ++ show ocnt ++ " NULL " ++ show defaulted_ptr ++ "\n = " ++ show err ++ ", " ++ show defaulted
case err of
-- 0 indicates that we did not succeed
0 -> do
err <- getLastError
case err of
122 -> return (Left True)
1113 -> return (Left False)
_ -> failWith "WideCharToMultiByte" err
wrote_bytes | defaulted -> return (Left False)
| otherwise -> return (Right (fromIntegral wrote_bytes))
bSearch :: String
-> CodeBuffer from to
-> Buffer from -> Buffer to -- From buffer (crucial data source) and to buffer (temporary storage only). To buffer must be empty (L=R).
-> Int -- Target size of to buffer
-> Int -> Int -> Int -- Binary search min, mid, max
-> IO Int -- Size of from buffer required to reach target size of to buffer
bSearch msg code ibuf mbuf target_to_elems = go
where
go mn md mx = do
-- NB: this loop repeatedly reencodes on top of mbuf using a varying fraction of ibuf. It doesn't
-- matter if we blast the contents of mbuf since we already consumed all of the contents we are going to use.
(_why, ibuf, mbuf) <- code (ibuf { bufR = bufL ibuf + md }) mbuf
debugIO $ "code (bSearch " ++ msg ++ ") " ++ show md ++ " = " ++ show _why ++ ", " ++ summaryBuffer ibuf ++ summaryBuffer mbuf
-- The normal case is to get InputUnderflow here, which indicates that coding basically
-- terminated normally.
--
-- However, InvalidSequence is also possible if we are being called from cpDecode if we
-- have just been unlucky enough to set md so that ibuf straddles a byte boundary.
-- In this case we have to be really careful, because we don't want to report that
-- "md" elements is the right number when in actual fact we could have had md-1 input
-- elements and still produced the same number of bufferElems in mbuf.
--
-- In fact, we have to worry about this possibility even if we get InputUnderflow
-- since that will report InputUnderflow rather than InvalidSequence if the buffer
-- ends in a valid lead byte. So the expedient thing to do is simply to check if
-- the input buffer was entirely consumed.
--
-- When called from cpDecode, OutputUnderflow is also possible.
--
-- Luckily if we have InvalidSequence/OutputUnderflow and we do not appear to have reached
-- the target, what we should do is the same as normal because the fraction of ibuf that our
-- first "code" coded succesfully must be invalid-sequence-free, and ibuf will always
-- have been decoded as far as the first invalid sequence in it.
case bufferElems mbuf `compare` target_to_elems of
-- Coding n "from" chars from the input yields exactly as many "to" chars
-- as were consumed by the recode. All is peachy:
EQ -> debugIO ("bSearch = " ++ show solution) >> return solution
where solution = md - bufferElems ibuf
-- If we encoded fewer "to" characters than the target number, try again with more "from" characters (and vice-versa)
LT -> go' (md+1) mx
GT -> go' mn (md-1)
go' mn mx | mn <= mx = go mn (mn + ((mx - mn) `div` 2)) mx
| otherwise = errorWithoutStackTrace $ "bSearch(" ++ msg ++ "): search crossed! " ++ show (summaryBuffer ibuf, summaryBuffer mbuf, target_to_elems, mn, mx)
cpRecode :: forall from to. Storable from
=> (Ptr from -> Int -> Ptr to -> Int -> IO (Either Bool Int))
-> (from -> IO Bool)
-> Int -- ^ Maximum length of a complete translatable sequence in the input (e.g. 2 if the input is UTF-16, 1 if the input is a SBCS, 2 is the input is a DBCS). Must be at least 1.
-> Int -- ^ Minimum number of output elements per complete translatable sequence in the input (almost certainly 1)
-> Int -> Int
-> CodeBuffer from to
cpRecode try' is_valid_prefix max_i_size min_o_size iscale oscale = go
where
go :: CodeBuffer from to
go ibuf obuf | isEmptyBuffer ibuf = return (InputUnderflow, ibuf, obuf)
| bufferAvailable obuf < min_o_size = return (OutputUnderflow, ibuf, obuf)
| otherwise = try (bufferElems ibuf `min` ((max_i_size * bufferAvailable obuf) `div` min_o_size)) seek_smaller
where
done why = return (why, ibuf, obuf)
seek_smaller n longer_was_valid
-- In this case, we can't shrink any further via any method. Calling (try 0) wouldn't be right because that will always claim InputUnderflow...
| n <= 1 = if longer_was_valid
-- try m (where m >= n) was valid but we overflowed the output buffer with even a single input element
then done OutputUnderflow
-- there was no initial valid sequence in the input, but it might just be a truncated buffer - we need to check
else do byte <- withBuffer ibuf $ \ptr -> peekElemOff ptr (bufL ibuf)
valid_prefix <- is_valid_prefix byte
done (if valid_prefix && bufferElems ibuf < max_i_size then InputUnderflow else InvalidSequence)
-- If we're already looking at very small buffers, try every n down to 1, to ensure we spot as long a sequence as exists while avoiding trying 0.
-- Doing it this way ensures that we spot a single initial sequence of length <= max_i_size if any such exists.
| n < 2 * max_i_size = try (n - 1) (\pred_n pred_n_was_valid -> seek_smaller pred_n (longer_was_valid || pred_n_was_valid))
-- Otherwise, try a binary chop to try to either get the prefix before the invalid input, or shrink the output down so it fits
-- in the output buffer. After the chop, try to consume extra input elements to try to recover as much of the sequence as possible if we
-- end up chopping a multi-element input sequence into two parts.
--
-- Note that since max_i_size >= 1:
-- * (n `div` 2) >= 1, so we don't try 0
-- * ((n `div` 2) + (max_i_size - 1)) < n, so we don't get into a loop where (seek_smaller n) calls post_divide (n `div` 2) calls (seek_smaller n)
| let n' = n `div` 2 = try n' (post_divide n' longer_was_valid)
post_divide _ _ n True = seek_smaller n True
post_divide n' longer_was_valid n False | n < n' + max_i_size - 1 = try (n + 1) (post_divide n' longer_was_valid) -- There's still a chance..
| otherwise = seek_smaller n' longer_was_valid -- No amount of recovery could save us :(
try n k_fail = withBuffer ibuf $ \iptr -> withBuffer obuf $ \optr -> do
ei_err_wrote <- try' (iptr `plusPtr` (bufL ibuf `shiftL` iscale)) n
(optr `plusPtr` (bufR obuf `shiftL` oscale)) (bufferAvailable obuf)
debugIO $ "try " ++ show n ++ " = " ++ show ei_err_wrote
case ei_err_wrote of
-- ERROR_INSUFFICIENT_BUFFER: A supplied buffer size was not large enough, or it was incorrectly set to NULL.
Left True -> k_fail n True
-- ERROR_NO_UNICODE_TRANSLATION: Invalid Unicode was found in a string.
Left False -> k_fail n False
-- Must have interpreted all given bytes successfully
-- We need to iterate until we have consumed the complete contents of the buffer
Right wrote_elts -> go (bufferRemove n ibuf) (obuf { bufR = bufR obuf + wrote_elts })
| snoyberg/ghc | libraries/base/GHC/IO/Encoding/CodePage/API.hs | bsd-3-clause | 23,234 | 37 | 27 | 6,311 | 3,676 | 1,882 | 1,794 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- $Id: DriverPhases.hs,v 1.38 2005/05/17 11:01:59 simonmar Exp $
--
-- GHC Driver
--
-- (c) The University of Glasgow 2002
--
-----------------------------------------------------------------------------
module DriverPhases (
HscSource(..), isHsBootOrSig, hscSourceString,
Phase(..),
happensBefore, eqPhase, anyHsc, isStopLn,
startPhase,
phaseInputExt,
isHaskellishSuffix,
isHaskellSrcSuffix,
isObjectSuffix,
isCishSuffix,
isDynLibSuffix,
isHaskellUserSrcSuffix,
isHaskellSigSuffix,
isSourceSuffix,
isHaskellishFilename,
isHaskellSrcFilename,
isHaskellSigFilename,
isObjectFilename,
isCishFilename,
isDynLibFilename,
isHaskellUserSrcFilename,
isSourceFilename
) where
#include "HsVersions.h"
import {-# SOURCE #-} DynFlags
import Outputable
import Platform
import System.FilePath
import Binary
-----------------------------------------------------------------------------
-- Phases
{-
Phase of the | Suffix saying | Flag saying | (suffix of)
compilation system | ``start here''| ``stop after''| output file
literate pre-processor | .lhs | - | -
C pre-processor (opt.) | - | -E | -
Haskell compiler | .hs | -C, -S | .hc, .s
C compiler (opt.) | .hc or .c | -S | .s
assembler | .s or .S | -c | .o
linker | other | - | a.out
-}
-- Note [HscSource types]
-- ~~~~~~~~~~~~~~~~~~~~~~
-- There are three types of source file for Haskell code:
--
-- * HsSrcFile is an ordinary hs file which contains code,
--
-- * HsBootFile is an hs-boot file, which is used to break
-- recursive module imports (there will always be an
-- HsSrcFile associated with it), and
--
-- * HsigFile is an hsig file, which contains only type
-- signatures and is used to specify signatures for
-- modules.
--
-- Syntactically, hs-boot files and hsig files are quite similar: they
-- only include type signatures and must be associated with an
-- actual HsSrcFile. isHsBootOrSig allows us to abstract over code
-- which is indifferent to which. However, there are some important
-- differences, mostly owing to the fact that hsigs are proper
-- modules (you `import Sig` directly) whereas HsBootFiles are
-- temporary placeholders (you `import {-# SOURCE #-} Mod).
-- When we finish compiling the true implementation of an hs-boot,
-- we replace the HomeModInfo with the real HsSrcFile. An HsigFile, on the
-- other hand, is never replaced (in particular, we *cannot* use the
-- HomeModInfo of the original HsSrcFile backing the signature, since it
-- will export too many symbols.)
--
-- Additionally, while HsSrcFile is the only Haskell file
-- which has *code*, we do generate .o files for HsigFile, because
-- this is how the recompilation checker figures out if a file
-- needs to be recompiled. These are fake object files which
-- should NOT be linked against.
data HscSource
= HsSrcFile | HsBootFile | HsigFile
deriving( Eq, Ord, Show )
-- Ord needed for the finite maps we build in CompManager
instance Binary HscSource where
put_ bh HsSrcFile = putByte bh 0
put_ bh HsBootFile = putByte bh 1
put_ bh HsigFile = putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> return HsSrcFile
1 -> return HsBootFile
_ -> return HsigFile
hscSourceString :: HscSource -> String
hscSourceString HsSrcFile = ""
hscSourceString HsBootFile = "[boot]"
hscSourceString HsigFile = "[sig]"
-- See Note [isHsBootOrSig]
isHsBootOrSig :: HscSource -> Bool
isHsBootOrSig HsBootFile = True
isHsBootOrSig HsigFile = True
isHsBootOrSig _ = False
data Phase
= Unlit HscSource
| Cpp HscSource
| HsPp HscSource
| Hsc HscSource
| Ccxx -- Compile C++
| Cc -- Compile C
| Cobjc -- Compile Objective-C
| Cobjcxx -- Compile Objective-C++
| HCc -- Haskellised C (as opposed to vanilla C) compilation
| Splitter -- Assembly file splitter (part of '-split-objs')
| SplitAs -- Assembler for split assembly files (part of '-split-objs')
| As Bool -- Assembler for regular assembly files (Bool: with-cpp)
| LlvmOpt -- Run LLVM opt tool over llvm assembly
| LlvmLlc -- LLVM bitcode to native assembly
| LlvmMangle -- Fix up TNTC by processing assembly produced by LLVM
| CmmCpp -- pre-process Cmm source
| Cmm -- parse & compile Cmm code
| MergeStub -- merge in the stub object file
-- The final phase is a pseudo-phase that tells the pipeline to stop.
-- There is no runPhase case for it.
| StopLn -- Stop, but linking will follow, so generate .o file
deriving (Eq, Show)
instance Outputable Phase where
ppr p = text (show p)
anyHsc :: Phase
anyHsc = Hsc (panic "anyHsc")
isStopLn :: Phase -> Bool
isStopLn StopLn = True
isStopLn _ = False
eqPhase :: Phase -> Phase -> Bool
-- Equality of constructors, ignoring the HscSource field
-- NB: the HscSource field can be 'bot'; see anyHsc above
eqPhase (Unlit _) (Unlit _) = True
eqPhase (Cpp _) (Cpp _) = True
eqPhase (HsPp _) (HsPp _) = True
eqPhase (Hsc _) (Hsc _) = True
eqPhase Cc Cc = True
eqPhase Cobjc Cobjc = True
eqPhase HCc HCc = True
eqPhase Splitter Splitter = True
eqPhase SplitAs SplitAs = True
eqPhase (As x) (As y) = x == y
eqPhase LlvmOpt LlvmOpt = True
eqPhase LlvmLlc LlvmLlc = True
eqPhase LlvmMangle LlvmMangle = True
eqPhase CmmCpp CmmCpp = True
eqPhase Cmm Cmm = True
eqPhase MergeStub MergeStub = True
eqPhase StopLn StopLn = True
eqPhase Ccxx Ccxx = True
eqPhase Cobjcxx Cobjcxx = True
eqPhase _ _ = False
{- Note [Partial ordering on phases]
We want to know which phases will occur before which others. This is used for
sanity checking, to ensure that the pipeline will stop at some point (see
DriverPipeline.runPipeline).
A < B iff A occurs before B in a normal compilation pipeline.
There is explicitly not a total ordering on phases, because in registerised
builds, the phase `HsC` doesn't happen before nor after any other phase.
Although we check that a normal user doesn't set the stop_phase to HsC through
use of -C with registerised builds (in Main.checkOptions), it is still
possible for a ghc-api user to do so. So be careful when using the function
happensBefore, and don't think that `not (a <= b)` implies `b < a`.
-}
happensBefore :: DynFlags -> Phase -> Phase -> Bool
happensBefore dflags p1 p2 = p1 `happensBefore'` p2
where StopLn `happensBefore'` _ = False
x `happensBefore'` y = after_x `eqPhase` y
|| after_x `happensBefore'` y
where after_x = nextPhase dflags x
nextPhase :: DynFlags -> Phase -> Phase
nextPhase dflags p
-- A conservative approximation to the next phase, used in happensBefore
= case p of
Unlit sf -> Cpp sf
Cpp sf -> HsPp sf
HsPp sf -> Hsc sf
Hsc _ -> maybeHCc
Splitter -> SplitAs
LlvmOpt -> LlvmLlc
LlvmLlc -> LlvmMangle
LlvmMangle -> As False
SplitAs -> MergeStub
As _ -> MergeStub
Ccxx -> As False
Cc -> As False
Cobjc -> As False
Cobjcxx -> As False
CmmCpp -> Cmm
Cmm -> maybeHCc
HCc -> As False
MergeStub -> StopLn
StopLn -> panic "nextPhase: nothing after StopLn"
where maybeHCc = if platformUnregisterised (targetPlatform dflags)
then HCc
else As False
-- the first compilation phase for a given file is determined
-- by its suffix.
startPhase :: String -> Phase
startPhase "lhs" = Unlit HsSrcFile
startPhase "lhs-boot" = Unlit HsBootFile
startPhase "lhsig" = Unlit HsigFile
startPhase "hs" = Cpp HsSrcFile
startPhase "hs-boot" = Cpp HsBootFile
startPhase "hsig" = Cpp HsigFile
startPhase "hscpp" = HsPp HsSrcFile
startPhase "hspp" = Hsc HsSrcFile
startPhase "hc" = HCc
startPhase "c" = Cc
startPhase "cpp" = Ccxx
startPhase "C" = Cc
startPhase "m" = Cobjc
startPhase "M" = Cobjcxx
startPhase "mm" = Cobjcxx
startPhase "cc" = Ccxx
startPhase "cxx" = Ccxx
startPhase "split_s" = Splitter
startPhase "s" = As False
startPhase "S" = As True
startPhase "ll" = LlvmOpt
startPhase "bc" = LlvmLlc
startPhase "lm_s" = LlvmMangle
startPhase "o" = StopLn
startPhase "cmm" = CmmCpp
startPhase "cmmcpp" = Cmm
startPhase _ = StopLn -- all unknown file types
-- This is used to determine the extension for the output from the
-- current phase (if it generates a new file). The extension depends
-- on the next phase in the pipeline.
phaseInputExt :: Phase -> String
phaseInputExt (Unlit HsSrcFile) = "lhs"
phaseInputExt (Unlit HsBootFile) = "lhs-boot"
phaseInputExt (Unlit HsigFile) = "lhsig"
phaseInputExt (Cpp _) = "lpp" -- intermediate only
phaseInputExt (HsPp _) = "hscpp" -- intermediate only
phaseInputExt (Hsc _) = "hspp" -- intermediate only
-- NB: as things stand, phaseInputExt (Hsc x) must not evaluate x
-- because runPipeline uses the StopBefore phase to pick the
-- output filename. That could be fixed, but watch out.
phaseInputExt HCc = "hc"
phaseInputExt Ccxx = "cpp"
phaseInputExt Cobjc = "m"
phaseInputExt Cobjcxx = "mm"
phaseInputExt Cc = "c"
phaseInputExt Splitter = "split_s"
phaseInputExt (As True) = "S"
phaseInputExt (As False) = "s"
phaseInputExt LlvmOpt = "ll"
phaseInputExt LlvmLlc = "bc"
phaseInputExt LlvmMangle = "lm_s"
phaseInputExt SplitAs = "split_s"
phaseInputExt CmmCpp = "cmm"
phaseInputExt Cmm = "cmmcpp"
phaseInputExt MergeStub = "o"
phaseInputExt StopLn = "o"
haskellish_src_suffixes, haskellish_suffixes, cish_suffixes,
haskellish_user_src_suffixes, haskellish_sig_suffixes
:: [String]
-- When a file with an extension in the haskellish_src_suffixes group is
-- loaded in --make mode, its imports will be loaded too.
haskellish_src_suffixes = haskellish_user_src_suffixes ++
[ "hspp", "hscpp" ]
haskellish_suffixes = haskellish_src_suffixes ++
[ "hc", "cmm", "cmmcpp" ]
cish_suffixes = [ "c", "cpp", "C", "cc", "cxx", "s", "S", "ll", "bc", "lm_s", "m", "M", "mm" ]
-- Will not be deleted as temp files:
haskellish_user_src_suffixes =
haskellish_sig_suffixes ++ [ "hs", "lhs", "hs-boot", "lhs-boot" ]
haskellish_sig_suffixes = [ "hsig", "lhsig" ]
objish_suffixes :: Platform -> [String]
-- Use the appropriate suffix for the system on which
-- the GHC-compiled code will run
objish_suffixes platform = case platformOS platform of
OSMinGW32 -> [ "o", "O", "obj", "OBJ" ]
_ -> [ "o" ]
dynlib_suffixes :: Platform -> [String]
dynlib_suffixes platform = case platformOS platform of
OSMinGW32 -> ["dll", "DLL"]
OSDarwin -> ["dylib", "so"]
_ -> ["so"]
isHaskellishSuffix, isHaskellSrcSuffix, isCishSuffix,
isHaskellUserSrcSuffix, isHaskellSigSuffix
:: String -> Bool
isHaskellishSuffix s = s `elem` haskellish_suffixes
isHaskellSigSuffix s = s `elem` haskellish_sig_suffixes
isHaskellSrcSuffix s = s `elem` haskellish_src_suffixes
isCishSuffix s = s `elem` cish_suffixes
isHaskellUserSrcSuffix s = s `elem` haskellish_user_src_suffixes
isObjectSuffix, isDynLibSuffix :: Platform -> String -> Bool
isObjectSuffix platform s = s `elem` objish_suffixes platform
isDynLibSuffix platform s = s `elem` dynlib_suffixes platform
isSourceSuffix :: String -> Bool
isSourceSuffix suff = isHaskellishSuffix suff || isCishSuffix suff
isHaskellishFilename, isHaskellSrcFilename, isCishFilename,
isHaskellUserSrcFilename, isSourceFilename, isHaskellSigFilename
:: FilePath -> Bool
-- takeExtension return .foo, so we drop 1 to get rid of the .
isHaskellishFilename f = isHaskellishSuffix (drop 1 $ takeExtension f)
isHaskellSrcFilename f = isHaskellSrcSuffix (drop 1 $ takeExtension f)
isCishFilename f = isCishSuffix (drop 1 $ takeExtension f)
isHaskellUserSrcFilename f = isHaskellUserSrcSuffix (drop 1 $ takeExtension f)
isSourceFilename f = isSourceSuffix (drop 1 $ takeExtension f)
isHaskellSigFilename f = isHaskellSigSuffix (drop 1 $ takeExtension f)
isObjectFilename, isDynLibFilename :: Platform -> FilePath -> Bool
isObjectFilename platform f = isObjectSuffix platform (drop 1 $ takeExtension f)
isDynLibFilename platform f = isDynLibSuffix platform (drop 1 $ takeExtension f)
| mfine/ghc | compiler/main/DriverPhases.hs | bsd-3-clause | 13,429 | 0 | 11 | 3,671 | 2,255 | 1,229 | 1,026 | 225 | 20 |
{-# LANGUAGE CPP #-}
module CodeGen.Platform.PPC where
import GhcPrelude
#define MACHREGS_NO_REGS 0
#define MACHREGS_powerpc 1
#include "../../../../includes/CodeGen.Platform.hs"
| ezyang/ghc | compiler/codeGen/CodeGen/Platform/PPC.hs | bsd-3-clause | 183 | 0 | 3 | 21 | 14 | 11 | 3 | 3 | 0 |
-- |
-- Various utilities for forcing Core structures
--
-- It can often be useful to force various parts of the AST. This module
-- provides a number of @seq@-like functions to accomplish this.
module CoreSeq (
-- * Utilities for forcing Core structures
seqExpr, seqExprs, seqUnfolding, seqRules,
megaSeqIdInfo, seqRuleInfo, seqBinds,
) where
import CoreSyn
import IdInfo
import Demand( seqDemand, seqStrictSig )
import BasicTypes( seqOccInfo )
import VarSet( seqDVarSet )
import Var( varType, tyVarKind )
import Type( seqType, isTyVar )
import Coercion( seqCo )
import Id( Id, idInfo )
-- | Evaluate all the fields of the 'IdInfo' that are generally demanded by the
-- compiler
megaSeqIdInfo :: IdInfo -> ()
megaSeqIdInfo info
= seqRuleInfo (ruleInfo info) `seq`
-- Omitting this improves runtimes a little, presumably because
-- some unfoldings are not calculated at all
-- seqUnfolding (unfoldingInfo info) `seq`
seqDemand (demandInfo info) `seq`
seqStrictSig (strictnessInfo info) `seq`
seqCaf (cafInfo info) `seq`
seqOneShot (oneShotInfo info) `seq`
seqOccInfo (occInfo info)
seqOneShot :: OneShotInfo -> ()
seqOneShot l = l `seq` ()
seqRuleInfo :: RuleInfo -> ()
seqRuleInfo (RuleInfo rules fvs) = seqRules rules `seq` seqDVarSet fvs
seqCaf :: CafInfo -> ()
seqCaf c = c `seq` ()
seqRules :: [CoreRule] -> ()
seqRules [] = ()
seqRules (Rule { ru_bndrs = bndrs, ru_args = args, ru_rhs = rhs } : rules)
= seqBndrs bndrs `seq` seqExprs (rhs:args) `seq` seqRules rules
seqRules (BuiltinRule {} : rules) = seqRules rules
seqExpr :: CoreExpr -> ()
seqExpr (Var v) = v `seq` ()
seqExpr (Lit lit) = lit `seq` ()
seqExpr (App f a) = seqExpr f `seq` seqExpr a
seqExpr (Lam b e) = seqBndr b `seq` seqExpr e
seqExpr (Let b e) = seqBind b `seq` seqExpr e
seqExpr (Case e b t as) = seqExpr e `seq` seqBndr b `seq` seqType t `seq` seqAlts as
seqExpr (Cast e co) = seqExpr e `seq` seqCo co
seqExpr (Tick n e) = seqTickish n `seq` seqExpr e
seqExpr (Type t) = seqType t
seqExpr (Coercion co) = seqCo co
seqExprs :: [CoreExpr] -> ()
seqExprs [] = ()
seqExprs (e:es) = seqExpr e `seq` seqExprs es
seqTickish :: Tickish Id -> ()
seqTickish ProfNote{ profNoteCC = cc } = cc `seq` ()
seqTickish HpcTick{} = ()
seqTickish Breakpoint{ breakpointFVs = ids } = seqBndrs ids
seqTickish SourceNote{} = ()
seqBndr :: CoreBndr -> ()
seqBndr b | isTyVar b = seqType (tyVarKind b)
| otherwise = seqType (varType b) `seq`
megaSeqIdInfo (idInfo b)
seqBndrs :: [CoreBndr] -> ()
seqBndrs [] = ()
seqBndrs (b:bs) = seqBndr b `seq` seqBndrs bs
seqBinds :: [Bind CoreBndr] -> ()
seqBinds bs = foldr (seq . seqBind) () bs
seqBind :: Bind CoreBndr -> ()
seqBind (NonRec b e) = seqBndr b `seq` seqExpr e
seqBind (Rec prs) = seqPairs prs
seqPairs :: [(CoreBndr, CoreExpr)] -> ()
seqPairs [] = ()
seqPairs ((b,e):prs) = seqBndr b `seq` seqExpr e `seq` seqPairs prs
seqAlts :: [CoreAlt] -> ()
seqAlts [] = ()
seqAlts ((c,bs,e):alts) = c `seq` seqBndrs bs `seq` seqExpr e `seq` seqAlts alts
seqUnfolding :: Unfolding -> ()
seqUnfolding (CoreUnfolding { uf_tmpl = e, uf_is_top = top,
uf_is_value = b1, uf_is_work_free = b2,
uf_expandable = b3, uf_is_conlike = b4,
uf_guidance = g})
= seqExpr e `seq` top `seq` b1 `seq` b2 `seq` b3 `seq` b4 `seq` seqGuidance g
seqUnfolding _ = ()
seqGuidance :: UnfoldingGuidance -> ()
seqGuidance (UnfIfGoodArgs ns n b) = n `seq` sum ns `seq` b `seq` ()
seqGuidance _ = ()
| tjakway/ghcjvm | compiler/coreSyn/CoreSeq.hs | bsd-3-clause | 3,698 | 0 | 12 | 911 | 1,395 | 758 | 637 | 78 | 1 |
{-# LANGUAGE PartialTypeSignatures #-}
module WildcardInADT1 where
data Foo a = Foo (Either _ a)
| siddhanathan/ghc | testsuite/tests/partial-sigs/should_fail/WildcardInADT1.hs | bsd-3-clause | 98 | 0 | 8 | 16 | 23 | 14 | 9 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.