code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Language.Haskell.Inspector (
hasComposition,
hasGuards,
hasIf,
hasConditional,
hasLambda,
hasDirectRecursion,
hasUsage,
hasComprehension,
hasBinding,
hasTypeDeclaration,
hasTypeSignature,
hasAnonymousVariable,
hasExpression,
hasDecl,
hasRhs,
isParseable,
Inspection,
GlobalInspection
) where
import Language.Haskell.Syntax
import Language.Haskell.Names (isName)
import Language.Haskell.Explorer
type Inspection = Binding -> Code -> Bool
type GlobalInspection = Code -> Bool
-- | Inspection that tells whether a binding uses the composition operator '.'
-- in its definition
hasComposition :: Inspection
hasComposition = hasExpression f
where f (O (HsQVarOp (UnQual (HsSymbol ".")))) = True
f _ = False
-- | Inspection that tells whether a binding uses guards
-- in its definition
hasGuards :: Inspection
hasGuards = hasRhs f
where f (HsGuardedRhss _) = True
f _ = False
-- | Inspection that tells whether a binding uses ifs
-- in its definition
hasIf :: Inspection
hasIf = hasExpression f
where f (E (HsIf _ _ _)) = True
f _ = False
-- | Inspection that tells whether a binding uses ifs or guards
-- in its definition
hasConditional :: Inspection
hasConditional target code = hasIf target code || hasGuards target code
-- | Inspection that tells whether a binding uses a lambda expression
-- in its definition
hasLambda :: Inspection
hasLambda = hasExpression f
where f (E (HsLambda _ _ _)) = True
f _ = False
-- | Inspection that tells whether a binding is direct recursive
hasDirectRecursion :: Inspection
hasDirectRecursion binding = hasUsage binding binding
-- | Inspection that tells whether a binding uses the the given target binding
-- in its definition
hasUsage :: String -> Inspection
hasUsage target = hasExpression f
where f expr | (Just n) <- expressionToBinding expr = n == target
| otherwise = False
-- | Inspection that tells whether a binding uses lists comprehensions
-- in its definition
hasComprehension :: Inspection
hasComprehension = hasExpression f
where f (E (HsListComp _ _)) = True
f _ = False
-- | Inspection that tells whether a top level binding exists
hasBinding :: Inspection
hasBinding binding = not.null.rhssOf binding
hasTypeDeclaration :: Inspection
hasTypeDeclaration binding = hasDecl f
where f (HsTypeDecl _ hsName _ _) = isName binding hsName
f _ = False
hasTypeSignature :: Inspection
hasTypeSignature binding = hasDecl f
where f (HsTypeSig _ [hsName] _) = isName binding hsName
f _ = False
hasAnonymousVariable :: Inspection
hasAnonymousVariable binding = any f . declsOf binding
where f (HsFunBind hsMatches) = any (any (== HsPWildCard) . p) hsMatches
f _ = False
p (HsMatch _ _ params _ _) = params
hasExpression :: (Expression -> Bool) -> Inspection
hasExpression f binding = has f (expressionsOf binding)
hasRhs :: (HsRhs -> Bool)-> Inspection
hasRhs f binding = has f (rhssOf binding)
isParseable :: GlobalInspection
isParseable = not.null.parseDecls
hasDecl :: (HsDecl -> Bool) -> GlobalInspection
hasDecl f = has f parseDecls
-- private
has f g = any f . g
| flbulgarelli/hs-inspector | src/Language/Haskell/Inspector.hs | mit | 3,260 | 0 | 15 | 716 | 812 | 432 | 380 | 76 | 2 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : ./OWL2/Propositional2OWL2.hs
Description : Comorphism from Propostional Logic to OWL 2
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : f.mance@jacobs-university.de
Stability : provisional
Portability : non-portable (via Logic.Logic)
-}
module OWL2.Propositional2OWL2 where
import Common.ProofTree
import Logic.Logic
import Logic.Comorphism
import Common.AS_Annotation
import Common.Id
import Common.Result
import OWL2.AS
import OWL2.Keywords
import OWL2.MS
import OWL2.Translate
import qualified OWL2.Morphism as OWLMor
import qualified OWL2.ProfilesAndSublogics as OWLSub
import qualified OWL2.Sign as OWLSign
import qualified OWL2.Logic_OWL2 as OWLLogic
import qualified OWL2.Symbols as OWLSym
import qualified Propositional.Logic_Propositional as PLogic
import Propositional.AS_BASIC_Propositional
import qualified Propositional.Sublogic as PSL
import qualified Propositional.Sign as PSign
import qualified Propositional.Morphism as PMor
import qualified Propositional.Symbol as PSymbol
import qualified Data.Set as Set
data Propositional2OWL2 = Propositional2OWL2 deriving Show
instance Language Propositional2OWL2
instance Comorphism Propositional2OWL2
PLogic.Propositional
PSL.PropSL
BASIC_SPEC
FORMULA
SYMB_ITEMS
SYMB_MAP_ITEMS
PSign.Sign
PMor.Morphism
PSymbol.Symbol
PSymbol.Symbol
ProofTree
OWLLogic.OWL2
OWLSub.ProfSub
OntologyDocument
Axiom
OWLSym.SymbItems
OWLSym.SymbMapItems
OWLSign.Sign
OWLMor.OWLMorphism
Entity
OWLSym.RawSymb
ProofTree
where
sourceLogic Propositional2OWL2 = PLogic.Propositional
sourceSublogic Propositional2OWL2 = PSL.top
targetLogic Propositional2OWL2 = OWLLogic.OWL2
mapSublogic Propositional2OWL2 = Just . mapSub -- TODO
map_theory Propositional2OWL2 = mapTheory
isInclusionComorphism Propositional2OWL2 = True
has_model_expansion Propositional2OWL2 = True
mkOWLDeclaration :: ClassExpression -> Axiom
mkOWLDeclaration ex = PlainAxiom (ClassEntity $ Expression $ setPrefix "owl"
$ mkQName thingS) $ ListFrameBit (Just SubClass) $ ExpressionBit [([], ex)]
tokToQName :: Token -> QName
tokToQName = idToIRI . simpleIdToId
mapFormula :: FORMULA -> ClassExpression
mapFormula f = case f of
False_atom _ -> Expression $ mkQName nothingS
True_atom _ -> Expression $ mkQName thingS
Predication p -> Expression $ tokToQName p
Negation nf _ -> ObjectComplementOf $ mapFormula nf
Conjunction fl _ -> ObjectJunction IntersectionOf $ map mapFormula fl
Disjunction fl _ -> ObjectJunction UnionOf $ map mapFormula fl
Implication a b _ -> ObjectJunction UnionOf [ObjectComplementOf
$ mapFormula a, mapFormula b]
Equivalence a b _ -> ObjectJunction IntersectionOf $ map mapFormula
[Implication a b nullRange, Implication b a nullRange]
mapPredDecl :: PRED_ITEM -> [Axiom]
mapPredDecl (Pred_item il _) = map (mkOWLDeclaration . Expression
. tokToQName) il
mapAxiomItems :: Annoted FORMULA -> Axiom
mapAxiomItems = mkOWLDeclaration . mapFormula . item
mapBasicItems :: BASIC_ITEMS -> [Axiom]
mapBasicItems bi = case bi of
Pred_decl p -> mapPredDecl p
Axiom_items al -> map mapAxiomItems al
mapBasicSpec :: BASIC_SPEC -> [Axiom]
mapBasicSpec (Basic_spec il) = concatMap (mapBasicItems . item) il
mapSign :: PSign.Sign -> OWLSign.Sign
mapSign ps = OWLSign.emptySign {OWLSign.concepts = Set.fromList
$ map idToIRI $ Set.toList $ PSign.items ps}
mapTheory :: (PSign.Sign, [Named FORMULA])
-> Result (OWLSign.Sign, [Named Axiom])
mapTheory (psig, fl) = return (mapSign psig, map
(mapNamed $ mkOWLDeclaration . mapFormula) fl)
mapSub :: PSL.PropSL -> OWLSub.ProfSub
mapSub _ = OWLSub.topS
| gnn/Hets | OWL2/Propositional2OWL2.hs | gpl-2.0 | 3,922 | 0 | 11 | 719 | 934 | 495 | 439 | 93 | 8 |
import Data.List
main :: IO ()
main = readLn >>= print . solve
-- Take the sum of all diagonal numbers
solve :: Int -> Integer
solve p = sum [ l !! i | i <- indices p ]
where l = [1..]
-- The list [2,4,...(p-1)] represents the gaps between the spiral numbers
-- in every square.
-- These gaps each happen 4 times.
-- There are p squares, but the inner number is not included, so make that (p-1).
indices :: Int -> [Int]
indices p = (0:) $ scanl1 (+) $ concatMap (replicate 4) [2,4..(p-1)]
| NorfairKing/project-euler | 028/haskell/solution.hs | gpl-2.0 | 506 | 0 | 9 | 116 | 145 | 80 | 65 | 8 | 1 |
-- The fraction 49/98 is a curious fraction, as an inexperienced mathematician in attempting to simplify it may incorrectly believe that 49/98 = 4/8, which is correct, is obtained by cancelling the 9s.
--
-- We shall consider fractions like, 30/50 = 3/5, to be trivial examples.
--
-- There are exactly four non-trivial examples of this type of fraction, less than one in value, and containing two digits in the numerator and denominator.
--
-- If the product of these four fractions is given in its lowest common terms, find the value of the denominator.
import Data.Ratio
main :: IO ()
main =
print answer
answer :: Integer
answer =
denominator . product $ map third matches
-- List comprehensions FTW!
-- build up a list by getting some unique as and cs
-- for each a and c
-- call a/c d
-- multiply a by 10 and add b
-- multiply b by 10 and add c
-- i.e. a=4 b=9 c=8 40+9 == 49 90+8=98
-- Call that ratio e (49/98) and if it is equal to d, we got one!
-- Note the use of uncurry. The compiler told me to do it.
matches :: [((Integer, Integer), (Integer, Integer), Ratio Integer)]
matches = [ ((a,c),e,d) | a <- [1..9]
, b <- [1..9]
, c <- [1..9]
, a /= b && a /= c
, let d = a % c
, let e = (10 * a + b, 10 * b + c)
, uncurry (%) e == d
]
third :: (a,a,b) -> b
third (_,_,d) = d
| ciderpunx/project_euler_in_haskell | euler033.hs | gpl-2.0 | 1,404 | 0 | 12 | 388 | 269 | 157 | 112 | 17 | 1 |
module NLP.Summarizer.Arguments where
import Data.Text (Text)
data Ratio = RatioByPercent Int | RatioByLines Int
deriving Show
data SummarizerArguments =
SummarizerArguments { dictionaryLanguage :: Text
, inputString :: Text
, ratio :: Ratio
}
deriving Show
| rainbyte/summarizer | src/NLP/Summarizer/Arguments.hs | gpl-3.0 | 333 | 0 | 8 | 110 | 64 | 40 | 24 | 9 | 0 |
{-# LANGUAGE DeriveFoldable #-}
module A2List where
import A1Nat
data List a = Nil
| a `Cons` List a
deriving (Show, Eq, Ord, Foldable)
infixr 5 `Cons`
append :: List a -> List a -> List a
Nil `append` ys = ys
(x `Cons` xs) `append` ys = x `Cons` (xs `append` ys)
data Perhaps a = Nope
| Have a
deriving (Show, Eq, Ord, Foldable)
headList :: List a -> Perhaps a
headList Nil = Nope
headList (x `Cons` xs) = Have x
tailList :: List a -> Perhaps (List a)
tailList Nil = Nope
tailList (x `Cons` xs) = Have xs
lastList :: List a -> Perhaps a
lastList Nil = Nope
lastList (x `Cons` Nil) = Have x
lastList (x `Cons` xs) = lastList xs
initList :: List a -> Perhaps (List a)
initList Nil = Nope
initList (x `Cons` Nil) = Nope
initList (x `Cons` xs) = Have (go x xs)
where go _ Nil = Nil
go y (z `Cons` zs) = y `Cons` go z zs
unconsList :: List a -> Perhaps (a, List a)
unconsList Nil = Nope
unconsList (x `Cons` xs) = Have (x, xs)
nullList :: List a -> Bool
nullList Nil = True
nullList _ = False
lengthList :: List a -> Nat
lengthList Nil = Z
lengthList (x `Cons` xs) = S Z `plus` lengthList xs
sumList :: List Nat -> Nat
sumList Nil = Z
sumList (x `Cons` xs) = x `plus` sumList xs
productList :: List Nat -> Nat
productList Nil = S Z
productList (x `Cons` xs) = x `times` productList xs
maximumNatList :: List Nat -> Perhaps Nat
maximumNatList Nil = Nope
maximumNatList (x `Cons` xs) = Have (go x xs)
where go y Nil = y
go y (z `Cons` zs) = go (maxNat y z) zs
minimumNatList :: List Nat -> Perhaps Nat
minimumNatList Nil = Nope
minimumNatList (x `Cons` xs) = Have (go x xs)
where go y Nil = y
go y (z `Cons` zs) = go (minNat y z) zs
takeList :: Int -> List a -> List a
takeList _ Nil = Nil
takeList n (x `Cons` xs)| n <= 0 = Nil
| otherwise = x `Cons` takeList (n - 1) xs
dropList :: Int -> List a -> List a
dropList _ Nil = Nil
dropList n xss@(_ `Cons` xs) | n <= 0 = xss
| otherwise = dropList (n - 1) xs
splitList :: Int -> List a -> (List a, List a)
splitList n xs = (takeList n xs, dropList n xs)
reverseList :: List a -> List a
reverseList Nil = Nil
reverseList (x `Cons` xs) = reverseList xs `append` (x `Cons` Nil)
intersperseList :: a -> List a -> List a
intersperseList _ Nil = Nil
intersperseList x (y `Cons` ys) = y `Cons` x `Cons` intersperseList x ys
concatList :: List (List a) -> List a
concatList Nil = Nil
concatList (xs `Cons` Nil) = xs
concatList (xs `Cons` ys) = xs `append` concatList ys
intercalateList :: List a -> List (List a) -> List a
intercalateList xs ys = concatList (intersperseList xs ys)
zipList :: List a -> List b -> List (a, b)
zipList Nil _ = Nil
zipList _ Nil = Nil
zipList (x `Cons` xs) (y `Cons` ys) = (x, y) `Cons` zipList xs ys
elemList :: (Eq a) => a -> List a -> Bool
elemList _ Nil = False
elemList x (y `Cons` ys) | x == y = True
| otherwise = elemList x ys
| alexander-b/thug-beginners | solutions/A2List.hs | gpl-3.0 | 3,230 | 0 | 9 | 1,022 | 1,484 | 783 | 701 | 84 | 2 |
{-# LANGUAGE QuasiQuotes #-}
import Codec.Rga.Parser
import Quote
import Control.Monad
import Data.List
import Data.String.Interpolation
import Triangulation
import Data.Binary
import Control.Applicative
import Data.Ord
import Util
import Codec.Compression.GZip
tetsp p = p . tNumberOfTetrahedra_ . snd
maxTets n = filter (tetsp (<= n))
main = do
dont $ go "/usr/share/regina-normal/examples/closed-or-census.rga" "ClosedOrCensus6" "closedOrCensus6" (maxTets 6)
go "/usr/share/regina-normal/examples/closed-or-census-large.rga" "ClosedOrCensus7To10" "closedOrCensus7To10" (filter (tetsp (> 6)))
dont $ go "/usr/share/regina-normal/examples/closed-nor-census.rga" "ClosedNorCensus8" "closedNorCensus8" (maxTets 8)
dont $ go "/usr/share/regina-normal/examples/closed-hyp-census.rga" "Bench.Triangulations" "trs"
(take 10 . filter (tetsp (liftM2 (&&) (>= 11) (<= 16))))
dont $ go "/usr/share/regina-normal/examples/closed-hyp-census.rga" "ClosedHypCensus" "ClosedHypCensus" id
go rga modname lowername filter_ = do
let fn = "/tmp/"++modname++".hs"
putStrLn fn
trs0 <- readRgaZip rga
let trs = filter_ trs0
it =
quotePrec 11
. compressWith defaultCompressParams { compressLevel = bestCompression }
$ encode trs
let
src =
[str|
module $modname$ where
import Triangulation
import Data.Binary
import qualified Data.ByteString.Lazy.Char8
import Codec.Compression.GZip(decompress)
$lowername$ :: [LabelledTriangulation]
$lowername$ = decode (decompress $it$)
-- vim: nowrap
|]
writeFile fn src
-- vim: wrap
| DanielSchuessler/hstri | import-census.hs | gpl-3.0 | 1,799 | 0 | 16 | 464 | 350 | 179 | 171 | 34 | 1 |
module Hive.Data.Queue
( Queue
, mkEmpty
, isEmpty
, insert
, peak
, size
, remove
)
where
-------------------------------------------------------------------------------
data Queue a = Queue [a] [a]
-------------------------------------------------------------------------------
mkEmpty :: Queue a
mkEmpty = Queue [] []
isEmpty :: Queue a -> Bool
isEmpty (Queue [] []) = True
isEmpty _ = False
insert :: a -> Queue a -> Queue a
insert e (Queue outL inL) = Queue outL (e:inL)
peak :: Queue a -> (a, Queue a)
peak (Queue [] inL) = peak (Queue (reverse inL) [])
peak (Queue outL inL) = (head outL, Queue (tail outL) inL)
size :: Queue a -> Int
size (Queue outL inL) = length outL + length inL
remove :: (Eq a) => a -> Queue a -> Queue a
remove e (Queue outL inL) = Queue (filter' outL) (filter' inL)
where
filter' = filter (/= e) | chrisbloecker/Hive | src/Hive/Data/Queue.hs | gpl-3.0 | 874 | 0 | 9 | 190 | 370 | 192 | 178 | 24 | 1 |
module OSC.UDPSocket( udpSocket
, udpSocketWithHandler
, sendBytes
, sendEmpty
, UdpOscHandle ) where
import qualified Control.Monad as M (when)
import qualified Control.Concurrent as C (forkIO, forkFinally)
import qualified Network.Socket as S hiding (send, sendTo, recv, recvFrom)
import qualified Data.ByteString as B (ByteString, empty, null)
import qualified Data.ByteString.Lazy as L (ByteString, toStrict)
import qualified Data.ByteString.Char8 as BC (pack)
import qualified Network.Socket.ByteString as S
data UdpOscHandle = UdpOscHandle S.Socket S.SockAddr
-- |
-- Open, but do not bind, a UDP Socket on the
-- default interface on /port/.
udpSocket :: String -> IO UdpOscHandle
udpSocket port = S.withSocketsDo $ do
ai <- S.getAddrInfo (Just (S.defaultHints {S.addrFlags = [S.AI_PASSIVE]}))
Nothing
(Just port)
let addr = head ai
saddr = S.addrAddress addr
sock <- S.socket (S.addrFamily addr)
S.Datagram
S.defaultProtocol
return $ UdpOscHandle sock saddr
-- |
-- Bind a UDP socket on the default interface.
bindUDPSocket :: String -> IO UdpOscHandle
bindUDPSocket port = S.withSocketsDo $ do
uoh@(UdpOscHandle sock saddr) <- udpSocket port
S.bind sock saddr
return uoh
-- |
-- Threaded handler which executes blocking /recvFrom/
-- on /sock/, passing bytes to /action/.
closeOnEmptyRecv :: (B.ByteString -> IO ()) -> S.Socket -> IO ()
closeOnEmptyRecv action sock = do
(msg, _) <- S.recvFrom sock 2048
M.when (B.null msg) $ return ()
C.forkIO $ action msg
closeOnEmptyRecv action sock
-- |
-- |
-- General function executing ByteString actions
-- received on a UDP socket in a new thread.
udpSocketWithHandler :: String
-> (B.ByteString -> IO ())
-> IO UdpOscHandle
udpSocketWithHandler port action = do
ush@(UdpOscHandle sock saddr) <- bindUDPSocket port
C.forkFinally
(closeOnEmptyRecv action sock)
(\_ -> S.sClose sock)
return ush
-- |
-- Send a Lazy ByteString to a socket.
sendBytes :: L.ByteString -> UdpOscHandle -> IO Int
sendBytes b (UdpOscHandle s a) = S.sendTo s (L.toStrict b) a
-- |
-- Send a null packet to a socket.
sendEmpty :: UdpOscHandle -> IO Int
sendEmpty (UdpOscHandle s a) = S.sendTo s B.empty a
| destroyhimmyrobots/osc.hs | OSC/UDPSocket.hs | gpl-3.0 | 2,305 | 47 | 14 | 496 | 612 | 352 | 260 | 48 | 1 |
module Hadolint.Process (run, RulesConfig (..)) where
import qualified Control.Foldl as Foldl
import qualified Data.IntMap.Strict as SMap
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Hadolint.Ignore
import Hadolint.Rule (CheckFailure (..), Failures, Rule, RuleCode)
import qualified Hadolint.Rule as Rule
import qualified Hadolint.Rule.DL3000
import qualified Hadolint.Rule.DL3001
import qualified Hadolint.Rule.DL3002
import qualified Hadolint.Rule.DL3003
import qualified Hadolint.Rule.DL3004
import qualified Hadolint.Rule.DL3005
import qualified Hadolint.Rule.DL3006
import qualified Hadolint.Rule.DL3007
import qualified Hadolint.Rule.DL3008
import qualified Hadolint.Rule.DL3009
import qualified Hadolint.Rule.DL3010
import qualified Hadolint.Rule.DL3011
import qualified Hadolint.Rule.DL3012
import qualified Hadolint.Rule.DL3013
import qualified Hadolint.Rule.DL3014
import qualified Hadolint.Rule.DL3015
import qualified Hadolint.Rule.DL3016
import qualified Hadolint.Rule.DL3018
import qualified Hadolint.Rule.DL3019
import qualified Hadolint.Rule.DL3020
import qualified Hadolint.Rule.DL3021
import qualified Hadolint.Rule.DL3022
import qualified Hadolint.Rule.DL3023
import qualified Hadolint.Rule.DL3024
import qualified Hadolint.Rule.DL3025
import qualified Hadolint.Rule.DL3026
import qualified Hadolint.Rule.DL3027
import qualified Hadolint.Rule.DL3028
import qualified Hadolint.Rule.DL3029
import qualified Hadolint.Rule.DL3030
import qualified Hadolint.Rule.DL3032
import qualified Hadolint.Rule.DL3033
import qualified Hadolint.Rule.DL3034
import qualified Hadolint.Rule.DL3035
import qualified Hadolint.Rule.DL3036
import qualified Hadolint.Rule.DL3037
import qualified Hadolint.Rule.DL3038
import qualified Hadolint.Rule.DL3040
import qualified Hadolint.Rule.DL3041
import qualified Hadolint.Rule.DL3042
import qualified Hadolint.Rule.DL3043
import qualified Hadolint.Rule.DL3044
import qualified Hadolint.Rule.DL3045
import qualified Hadolint.Rule.DL3046
import qualified Hadolint.Rule.DL3047
import qualified Hadolint.Rule.DL3048
import qualified Hadolint.Rule.DL3049
import qualified Hadolint.Rule.DL3050
import qualified Hadolint.Rule.DL3051
import qualified Hadolint.Rule.DL3052
import qualified Hadolint.Rule.DL3053
import qualified Hadolint.Rule.DL3054
import qualified Hadolint.Rule.DL3055
import qualified Hadolint.Rule.DL3056
import qualified Hadolint.Rule.DL3057
import qualified Hadolint.Rule.DL3058
import qualified Hadolint.Rule.DL3059
import qualified Hadolint.Rule.DL3060
import qualified Hadolint.Rule.DL4000
import qualified Hadolint.Rule.DL4001
import qualified Hadolint.Rule.DL4003
import qualified Hadolint.Rule.DL4004
import qualified Hadolint.Rule.DL4005
import qualified Hadolint.Rule.DL4006
import qualified Hadolint.Rule.Shellcheck
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax
-- | Contains the required parameters for optional rules
data RulesConfig = RulesConfig
{ -- | The docker registries that are allowed in FROM
allowedRegistries :: Set.Set Registry,
labelSchema :: Rule.LabelSchema,
strictLabels :: Bool
}
deriving (Show, Eq)
instance Semigroup RulesConfig where
RulesConfig a1 a2 a3 <> RulesConfig b1 b2 b3 =
RulesConfig
(a1 <> b1)
(a2 <> b2)
(a3 || b3)
instance Monoid RulesConfig where
mempty = RulesConfig mempty mempty False
data AnalisisResult = AnalisisResult
{ -- | The set of ignored rules per line
ignored :: SMap.IntMap (Set.Set RuleCode),
-- | A set of failures collected for reach rule
failed :: Failures
}
run :: RulesConfig -> [InstructionPos Text.Text] -> Failures
run config dockerfile = Seq.filter shouldKeep failed
where
AnalisisResult {..} = Foldl.fold (analyze config) dockerfile
shouldKeep CheckFailure {line, code} =
Just True /= do
ignoreList <- SMap.lookup line ignored
return $ code `Set.member` ignoreList
analyze :: RulesConfig -> Foldl.Fold (InstructionPos Text.Text) AnalisisResult
analyze config =
AnalisisResult
<$> Hadolint.Ignore.ignored
<*> Foldl.premap parseShell (failures config <> onBuildFailures config)
parseShell :: InstructionPos Text.Text -> InstructionPos Shell.ParsedShell
parseShell = fmap Shell.parseShell
onBuildFailures :: RulesConfig -> Rule Shell.ParsedShell
onBuildFailures config =
Foldl.prefilter
isOnBuild
(Foldl.premap unwrapOnbuild (failures config))
where
isOnBuild InstructionPos {instruction = OnBuild {}} = True
isOnBuild _ = False
unwrapOnbuild inst@InstructionPos {instruction = OnBuild i} = inst {instruction = i}
unwrapOnbuild inst = inst
failures :: RulesConfig -> Rule Shell.ParsedShell
failures RulesConfig {allowedRegistries, labelSchema, strictLabels} =
Hadolint.Rule.DL3000.rule
<> Hadolint.Rule.DL3001.rule
<> Hadolint.Rule.DL3002.rule
<> Hadolint.Rule.DL3003.rule
<> Hadolint.Rule.DL3004.rule
<> Hadolint.Rule.DL3005.rule
<> Hadolint.Rule.DL3006.rule
<> Hadolint.Rule.DL3007.rule
<> Hadolint.Rule.DL3008.rule
<> Hadolint.Rule.DL3009.rule
<> Hadolint.Rule.DL3010.rule
<> Hadolint.Rule.DL3011.rule
<> Hadolint.Rule.DL3012.rule
<> Hadolint.Rule.DL3013.rule
<> Hadolint.Rule.DL3014.rule
<> Hadolint.Rule.DL3015.rule
<> Hadolint.Rule.DL3016.rule
<> Hadolint.Rule.DL3018.rule
<> Hadolint.Rule.DL3019.rule
<> Hadolint.Rule.DL3020.rule
<> Hadolint.Rule.DL3021.rule
<> Hadolint.Rule.DL3022.rule
<> Hadolint.Rule.DL3023.rule
<> Hadolint.Rule.DL3024.rule
<> Hadolint.Rule.DL3025.rule
<> Hadolint.Rule.DL3026.rule allowedRegistries
<> Hadolint.Rule.DL3027.rule
<> Hadolint.Rule.DL3028.rule
<> Hadolint.Rule.DL3029.rule
<> Hadolint.Rule.DL3030.rule
<> Hadolint.Rule.DL3032.rule
<> Hadolint.Rule.DL3033.rule
<> Hadolint.Rule.DL3034.rule
<> Hadolint.Rule.DL3035.rule
<> Hadolint.Rule.DL3036.rule
<> Hadolint.Rule.DL3037.rule
<> Hadolint.Rule.DL3038.rule
<> Hadolint.Rule.DL3040.rule
<> Hadolint.Rule.DL3041.rule
<> Hadolint.Rule.DL3042.rule
<> Hadolint.Rule.DL3043.rule
<> Hadolint.Rule.DL3044.rule
<> Hadolint.Rule.DL3045.rule
<> Hadolint.Rule.DL3046.rule
<> Hadolint.Rule.DL3047.rule
<> Hadolint.Rule.DL3048.rule
<> Hadolint.Rule.DL3049.rule labelSchema
<> Hadolint.Rule.DL3050.rule labelSchema strictLabels
<> Hadolint.Rule.DL3051.rule labelSchema
<> Hadolint.Rule.DL3052.rule labelSchema
<> Hadolint.Rule.DL3053.rule labelSchema
<> Hadolint.Rule.DL3054.rule labelSchema
<> Hadolint.Rule.DL3055.rule labelSchema
<> Hadolint.Rule.DL3056.rule labelSchema
<> Hadolint.Rule.DL3057.rule
<> Hadolint.Rule.DL3058.rule labelSchema
<> Hadolint.Rule.DL3059.rule
<> Hadolint.Rule.DL3060.rule
<> Hadolint.Rule.DL4000.rule
<> Hadolint.Rule.DL4001.rule
<> Hadolint.Rule.DL4003.rule
<> Hadolint.Rule.DL4004.rule
<> Hadolint.Rule.DL4005.rule
<> Hadolint.Rule.DL4006.rule
<> Hadolint.Rule.Shellcheck.rule
| lukasmartinelli/hadolint | src/Hadolint/Process.hs | gpl-3.0 | 7,110 | 0 | 69 | 1,024 | 1,636 | 1,006 | 630 | -1 | -1 |
module Main where
import Data.Char
import Data.List
import Data.Ord
intToList 0 = []
intToList a = ( intToDigit (fromIntegral l)) : intToList r
where
l = a `mod` 10
r = a `div` 10
isGood :: String -> Bool
isGood = (== ['1'..'9'])
--check :: Integer -> Integer -> Integer -> [(Integer , Integer, Integer)]
--check :: (Integral a)=> a -> a -> a -> [(a,a,a)]
check e = [ (c) | a<-[alower..aupper] , b<-[10^blower..10^bupper] , let c=a*b,
(isGood $ list a b c)]
where
alower = 10^e
aupper = 10^(e+1)-1
ee = e+1
blower = div (9-2*ee) 2
bupper = (blower+1)
list a b c= sort ((intToList a)++ (intToList b) ++ (intToList c))
isPalindrome l = l == (reverse l)
toBinaryString 0 = []
toBinaryString n = (intToDigit r) : ( toBinaryString q)
where
r = n `mod` 2
q = n `div` 2
goodN n = dec && bin
where
bin = isPalindrome $ toBinaryString (fromInteger n)
dec = isPalindrome $ intToList (fromInteger n)
solve45 :: Int -> [( Int, Int ) ]
solve45 b = [ (c, length g) | g@(c:_) <- group $ sort ((take b t)++(take b p)++(take b h)), let ln = length g, ln==3]
where
t = [ div (n*(n+1)) 2 | n<-[1..]]
p = [ div (n*(3*n-1)) 2 | n<-[1..]]
h = [ (n*(2*n-1)) | n<-[1..]]
getDiagonals [] _ _ = []
getDiagonals (x:xs) it n = x : (getDiagonals (drop nit xs) nit nn)
where
nn = (mod (n+1) 4)
nit = if (nn)==0 then (it+2) else it
isPrime :: Integer -> Bool
isPrime 2 = True
isPrime n = null ( divisors n)
where
divisors n = [ k | k<-[2..ceiling(sqrt(fromIntegral n))], (mod n k)==0 ]
primesInf = [k | k<-[3,5..] , isPrime k]
diagonals = getDiagonals [3..] 1 0
ratio l = (fromIntegral overlap )/((2*(fromIntegral l))-1)
where
diagonal = takeWhile (<=l^2) diagonals
overlap = length [n | n<-diagonal , isPrime n]
mainsprialprimes = do
s<- getLine
putStrLn $ show $ [(i,ratio i) | i<-[20001,20003..35001]]
--n/phi(n) minimize phi to get this ratio high. phi(n) is small when the number of
--non-coprime number is high. The #that shared divisors with more numbers small then itself
--is the one which factorization includes the most primes less then it and which product is less tha 10^6
mainDontRemember = putStrLn $ show $ product [n | n<-[1..18], isPrime n]
solve80 = sum [(nhun 1 n) | n<-[2..99], not (is_square n)]
where
nhun c n = sum $ map digitToInt $ take 100 $ reverse (intToList (mysqrt (5*n) 5 1 2000))
is_square n = sq * sq == n
where sq = floor $ sqrt $ (fromIntegral n::Double)
mysqrt :: Integer -> Integer -> Integer -> Integer-> Integer
mysqrt a b i limit
| i == limit = b
| a >= b = (mysqrt (a-b) (b+10) (i+1) limit)
| otherwise = (mysqrt(a*100) ((100*(div b 10)) +5) (i+1) limit)
main80 = putStrLn $ show $ solve80
main40 = do
let a=( concat [reverse .intToList $ n | n<-[1..]])
putStrLn $ show $ foldr (\x acc -> acc*(digitToInt x)) 1 [a!!(-1+10^n) | n<-[0,1,2,3,4,5,6]]
main36 = putStrLn (show $ sum $ [n | n<-[0..10^6], goodN n])
main32 = putStrLn (show $sum $ nub $ concat [check i | i<-[0..3]])
solve99BruteForce ls =fst $ maximumBy (comparing snd) $ zip [1..] (map (\(a,b)-> a^b) pi)
where
ps = map (\a -> ( (words a)!!0,(words a)!!1 ) ) ls
pi = map (\p -> (read (fst p) :: Integer , read (snd p):: Integer) ) ps
solve99 ls = fst $ maximumBy (comparing snd) $ zip [1..] (map (\(a,b)-> (fromIntegral b)*(log( fromIntegral(a)))) pi)
where
ps = map (\a -> ( (words a)!!0,(words a)!!1 ) ) ls
pi = map (\p -> (read (fst p) :: Integer , read (snd p):: Integer) ) ps
first9 n = intToList $ div n (10^((numDigits 10 n)-9))
last9 n = intToList $ mod n 1000000000
numDigits b n = 1 + fst (ilog b n) where
ilog b n
| n < b = (0, n)
| otherwise = let (e, r) = ilog (b*b) n
in if r < b then (2*e, r) else (2*e+1, r `div` b)
firstLast n = (drop ((length list)-9) list ,take 9 list)
where
list = intToList n
isPandigital ns = isGood $ sort ns
type FibMatr = (Integer,Integer,Integer,Integer)
identityFibMatr :: FibMatr
identityFibMatr = (1,1,1,0)
multiplyFibMatr :: FibMatr -> FibMatr -> FibMatr
multiplyFibMatr (p0, p1, p2, p3) (q0, q1, q2, q3) = (qq0, qq1, qq2, qq3)
where
qq0 = p0*q0 + p1*q2
qq1 = p0*q1 + p1*q3
qq2 = p2*q0 + p3*q2
qq3 = p2*q1 + p3*q3
getNthFibonacci_worker n
| n <= 1 = identityFibMatr
| even n = multiplyFibMatr div2Fib div2Fib
|otherwise = multiplyFibMatr identityFibMatr fibMinOne
where
div2Fib = getNthFibonacci_worker (div n 2)
fibMinOne = getNthFibonacci_worker (n -1)
getNthFibonacci n = el
where
(_,_,_,el) = (getNthFibonacci_worker n )
solve104 k
| k > 9000 = k
| (isPandigital l) && (isPandigital f) = k
| otherwise = solve104 (k+1)
where
p = getNthFibonacci k
(f,l) = firstLast p
main104 = do
putStrLn (show $solve104 1)
main99 = do
s<- readFile "p099_base_exp.txt"
putStrLn (show $solve99(lines s))
main = do
let l = [n |n <-[100000..200000], isPandigital (last9 (getNthFibonacci n)) ]
putStrLn $ show $ map (\n -> isPandigital (first9 (getNthFibonacci n)) )l | knotman90/google-interview | problems/project_euler/euler.hs | gpl-3.0 | 5,368 | 1 | 17 | 1,482 | 2,705 | 1,423 | 1,282 | 110 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ToolResults.Projects.Histories.Executions.Steps.GetPerfMetricsSummary
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a PerfMetricsSummary. May return any of the following error
-- code(s): - NOT_FOUND - The specified PerfMetricsSummary does not exist
--
-- /See:/ <https://firebase.google.com/docs/test-lab/ Cloud Tool Results API Reference> for @toolresults.projects.histories.executions.steps.getPerfMetricsSummary@.
module Network.Google.Resource.ToolResults.Projects.Histories.Executions.Steps.GetPerfMetricsSummary
(
-- * REST Resource
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummaryResource
-- * Creating a Request
, projectsHistoriesExecutionsStepsGetPerfMetricsSummary
, ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary
-- * Request Lenses
, phesgpmsExecutionId
, phesgpmsStepId
, phesgpmsHistoryId
, phesgpmsProjectId
) where
import Network.Google.Prelude
import Network.Google.ToolResults.Types
-- | A resource alias for @toolresults.projects.histories.executions.steps.getPerfMetricsSummary@ method which the
-- 'ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary' request conforms to.
type ProjectsHistoriesExecutionsStepsGetPerfMetricsSummaryResource
=
"toolresults" :>
"v1beta3" :>
"projects" :>
Capture "projectId" Text :>
"histories" :>
Capture "historyId" Text :>
"executions" :>
Capture "executionId" Text :>
"steps" :>
Capture "stepId" Text :>
"perfMetricsSummary" :>
QueryParam "alt" AltJSON :>
Get '[JSON] PerfMetricsSummary
-- | Retrieves a PerfMetricsSummary. May return any of the following error
-- code(s): - NOT_FOUND - The specified PerfMetricsSummary does not exist
--
-- /See:/ 'projectsHistoriesExecutionsStepsGetPerfMetricsSummary' smart constructor.
data ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary =
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary'
{ _phesgpmsExecutionId :: !Text
, _phesgpmsStepId :: !Text
, _phesgpmsHistoryId :: !Text
, _phesgpmsProjectId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'phesgpmsExecutionId'
--
-- * 'phesgpmsStepId'
--
-- * 'phesgpmsHistoryId'
--
-- * 'phesgpmsProjectId'
projectsHistoriesExecutionsStepsGetPerfMetricsSummary
:: Text -- ^ 'phesgpmsExecutionId'
-> Text -- ^ 'phesgpmsStepId'
-> Text -- ^ 'phesgpmsHistoryId'
-> Text -- ^ 'phesgpmsProjectId'
-> ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary
projectsHistoriesExecutionsStepsGetPerfMetricsSummary pPhesgpmsExecutionId_ pPhesgpmsStepId_ pPhesgpmsHistoryId_ pPhesgpmsProjectId_ =
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary'
{ _phesgpmsExecutionId = pPhesgpmsExecutionId_
, _phesgpmsStepId = pPhesgpmsStepId_
, _phesgpmsHistoryId = pPhesgpmsHistoryId_
, _phesgpmsProjectId = pPhesgpmsProjectId_
}
-- | A tool results execution ID.
phesgpmsExecutionId :: Lens' ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary Text
phesgpmsExecutionId
= lens _phesgpmsExecutionId
(\ s a -> s{_phesgpmsExecutionId = a})
-- | A tool results step ID.
phesgpmsStepId :: Lens' ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary Text
phesgpmsStepId
= lens _phesgpmsStepId
(\ s a -> s{_phesgpmsStepId = a})
-- | A tool results history ID.
phesgpmsHistoryId :: Lens' ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary Text
phesgpmsHistoryId
= lens _phesgpmsHistoryId
(\ s a -> s{_phesgpmsHistoryId = a})
-- | The cloud project
phesgpmsProjectId :: Lens' ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary Text
phesgpmsProjectId
= lens _phesgpmsProjectId
(\ s a -> s{_phesgpmsProjectId = a})
instance GoogleRequest
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary
where
type Rs
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary
= PerfMetricsSummary
type Scopes
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummary'{..}
= go _phesgpmsProjectId _phesgpmsHistoryId
_phesgpmsExecutionId
_phesgpmsStepId
(Just AltJSON)
toolResultsService
where go
= buildClient
(Proxy ::
Proxy
ProjectsHistoriesExecutionsStepsGetPerfMetricsSummaryResource)
mempty
| brendanhay/gogol | gogol-toolresults/gen/Network/Google/Resource/ToolResults/Projects/Histories/Executions/Steps/GetPerfMetricsSummary.hs | mpl-2.0 | 5,690 | 0 | 19 | 1,253 | 548 | 326 | 222 | 101 | 1 |
import System.IO
main = do
todoItem <- getLine
appendFile "./Learn_You_a_Haskell_programs/todo.txt" (todoItem ++ "\n") | haroldcarr/learn-haskell-coq-ml-etc | haskell/book/2011-Learn_You_a_Haskell/appendtodo.hs | unlicense | 127 | 1 | 10 | 21 | 37 | 16 | 21 | 4 | 1 |
module Brainfuck.Pointer (Pointer) where
type Pointer = Int
| expede/brainfucker | src/Brainfuck/Pointer.hs | apache-2.0 | 61 | 0 | 4 | 9 | 17 | 11 | 6 | 2 | 0 |
module Log where
import Control.Applicative
import Control.Monad
-- Simplified version of State Monad
type Log = [String]
newtype Logger a = Logger { run :: (a, Log)}
-- data vs newtype?
instance (Show a) => Show (Logger a) where
show (Logger a) = show a
instance (Eq a) => Eq (Logger a) where
Logger (x,y) /= Logger (a,b) = (x /= a) || (y /= b)
-- Define an instance of Functor for Logger
logmap :: (a -> b) -> Logger a -> Logger b
logmap f log =
let (a, l) = run log
n = f a
in Logger (n, l)
instance Functor Logger where
fmap = logmap
-- Define an instance of Applicative Functor for Logger
lapp :: Logger (a -> b) -> Logger a -> Logger b
lapp lf lg =
let (f, llf) = run lf
ln = logmap f lg
(a, l) = run ln
in Logger (a, l ++ llf)
instance Applicative Logger where
pure a = Logger (a,[])
(<*>) = lapp
-- Define the Logger Monad
-- class Applicative m => Monad m where
-- (>>=) :: m a -> (a -> m b) -> m b
-- >>= is usually the only thing you have to implement
-- Monadic Laws
-- Left identity:return a >>= f ≡ f a
-- f has the type (a -> m b) so it returns a monad
-- this means that the minimal context to return
-- is just applying f to a
-- Right identity: m >>= return≡ m
-- When we feed monadic values to functions by using >>=,
-- those functions take normal values and return monadic ones.
-- return is also one such function, if you consider its type.
-- Associativity: (m >>= f) >>= g ≡ m >>= (\x -> f x >>= g)
-- All monads are Applicative even though
-- it is not explicitly required, because of historical
-- haskell impl
instance Monad Logger where
return = pure
m >>= f = let (a, w) = run m
n = f a
(b, x) = run n
in Logger (b, w ++ x)
-- differently from State, Logger takes only one parameter, i.e.,
-- we can write a non-parametric monad implementation
-- Define a function that takes a number, add one and log the operation
logPlusOne :: (Num a) => a -> Logger a
logPlusOne a = Logger (a+1, ["Add One"])
-- Define a function that takes a number, doubles it and log the operation
logMultiplyTwo :: (Num a) => a -> Logger a
logMultiplyTwo a = Logger (a*2, ["Multiply Two"])
-- Using DO notation (gluing together monadic values)
-- let l = Logger (42, ["The Answer"])
-- Error: logMultiplyTwo l
-- Works: l >>= logMultiplyTwo
-- logOpsLambda = (\lg -> lg >>= logPlusOne >>= logMultiplyTwo )
-- Define a function that takes a logger, adds one, doubles the value
-- and logs all the operations
logOps :: (Num a) => Logger a -> Logger a
logOps lg = do
v <- lg
p1 <- logPlusOne v -- the monads hides some computation,
m2 <- logMultiplyTwo p1 -- i.e., the extraction of (a,list) and the application of f on a
return m2 -- necessary to close the monadic chain
-- Define a record function to record things in the log
record :: String -> Logger ()
record s = Logger ((), [s])
-- Binary tree
data Tree a = EmptyTree | Node a (Tree a) (Tree a) deriving (Read, Eq)
treeFoldr :: (b -> a -> a) -> a -> Tree b -> a
treeFoldr f acc EmptyTree = acc
treeFoldr f acc (Node b left right) = treeFoldr f (f b (treeFoldr f acc right)) left
-- We want to add logging to our tree, so define singletonM, treeInsertM and treeSumM
-- that logs the operations performed on the tree during execution
singletonM :: (Show a) => a -> Logger (Tree a)
singletonM x = do
record ("Created singleton " ++ show x) -- monad hides record >>
return (Node x EmptyTree EmptyTree) -- return the Logger of tree
-- let nums = [8,6,4,1,7,3,5]
-- :t foldM
-- let numsTree = foldM treeInsertM EmptyTree nums
-- (foldM because treeInsertM returns a monad instead of the standard data structure)
treeInsertM :: (Ord a, Show a) => Tree a -> a -> a -> Logger (Tree a)
treeInsertM EmptyTree x y = return EmptyTree
treeInsertM (Node a left right) x y
| x == a = do
l <- treeInsertM left x y
r <- treeInsertM right x y
record ("Inserted " ++ show y)
return (Node y l r)
| otherwise = do
l <- treeInsertM left x y
r <- treeInsertM right x y
return (Node a l r)
createTreeM :: (Integral a, Ord a, Show a) => a -> Logger (Tree a)
createTreeM 0 = singletonM 0
createTreeM n = do
l <- createTreeM (div n 2)
r <- createTreeM ((n - (div n 2))-1)
record ("add node " ++ show n)
return (Node n l r)
-- HOMEWORKS
-- Write a function that sums over the element of the tree (Hint: fold for a tree)
treeSumM :: (Show a, Num a) => Logger (Tree a) -> Logger a
treeSumM t = do
v <- fmap (treeFoldr (+) 0) t
record ("Summed " ++ show v)
return v
-- Define a function that logs if the tree is balanced
-- Hint: define an andM function to do "logical and"(&)
-- on your monad
andM :: Logger Bool -> Logger Bool -> Logger Bool
andM l1 l2 = do
c1 <- l1
c2 <- l2
return (c1 && c2)
treeBalancedM :: Tree a -> Logger Bool
treeBalancedM EmptyTree = do
record "An empty tree is always balanced"
return True
treeBalancedM (Node _ EmptyTree EmptyTree) = do
record "A single node tree is always balanced"
return True
treeBalancedM (Node _ EmptyTree _) = do
record "Unbalanced!"
return False
treeBalancedM (Node _ _ EmptyTree) = do
record "Unbalanced!"
return False
treeBalancedM (Node _ left right) = andM (treeBalancedM left) (treeBalancedM right)
logPlus1 :: (Num a) => a -> Logger a
logPlus1 a = Logger (a+1, ["+1"])
multiplyBy2 :: (Num a) => a -> Logger a
multiplyBy2 a = Logger (a*2, ["*2"])
main logger = do
v <- logger
p1 <- logPlus1 v
m2 <- multiplyBy2 p1
return m2
main2 logger =
logger >>= (\v -> logPlus1 v >>=
(\p1 -> multiplyBy2 p1 >>=
(\m2 -> return m2)))
| riccardotommasini/plp16 | haskell/prepared/ESE20181213/log.hs | apache-2.0 | 5,895 | 14 | 15 | 1,564 | 1,719 | 878 | 841 | -1 | -1 |
-- lets put it all toghether
module Wholemeal where
-- aparently this is bad Haskell style
foobar :: [Integer] -> Integer
foobar [] = 0
foobar (x:xs)
| x > 3 = (7*x + 2) + foobar xs
| otherwise = foobar xs
-- we can do the above, as a series of functions
-- to create a single partial function
foobar' :: [Integer] -> Integer
foobar' = sum . map ((+2) . (*7)) . filter (> 3)
| markmandel/cis194 | src/week2/lecture/Wholemeal.hs | apache-2.0 | 391 | 0 | 10 | 94 | 135 | 74 | 61 | 8 | 1 |
module Emulator.CPU.Instructions.Types where
import Emulator.CPU
import Emulator.Types
import Data.Int
import GHC.Read
data CPUMode = ARM | THUMB
deriving (Show, Read, Eq, Ord)
type ARM = 'ARM
type THUMB = 'THUMB
newtype SetCondition = SetCondition Bool
deriving (Show, Read, Eq, Ord)
data LoadStore = Load | Store
deriving (Show, Read, Eq, Ord)
data PrePost = Pre | Post
deriving (Show, Read, Eq, Ord)
data OffsetDirection = Up | Down
deriving (Show, Read, Eq, Ord)
data AddSub = Add | Subtract
deriving (Show, Read, Eq, Ord)
data LowHigh = Low | High
deriving (Show, Read, Eq, Ord)
data GranularitySize = Lower | Full
deriving (Show, Read, Eq, Ord)
data Granularity a where
Byte :: Granularity a
Word :: Granularity 'Full
HalfWord :: Granularity 'Lower
deriving instance Show (Granularity a)
deriving instance Eq (Granularity a)
deriving instance Ord (Granularity a)
instance Read (Granularity 'Full) where
readPrec = parens $ choose
[ ("Word", return Word)
, ("Byte", return Byte) ]
readList = readListDefault
readListPrec = readListPrecDefault
instance Read (Granularity 'Lower) where
readPrec = parens $ choose
[ ("HalfWord", return HalfWord)
, ("Byte", return Byte) ]
readList = readListDefault
readListPrec = readListPrecDefault
newtype Immediate = Immediate Bool
deriving (Show, Read, Eq, Ord)
data BaseSource = SP | PC
deriving (Show, Read, Eq, Ord)
newtype Link = Link Bool
deriving (Show, Read, Eq, Ord)
type WriteBack = Bool
type Signed = Bool
type Accumulate = Bool
type HighReg = Bool
type SignExtended = Bool
type StoreLR = Bool
type BranchOffset = Int32
type Offset = MWord
type Value = Int -- Value is used for signed values in instructions
type ForceUserMode = Bool
type RegisterList = [RegisterName]
| intolerable/GroupProject | src/Emulator/CPU/Instructions/Types.hs | bsd-2-clause | 1,794 | 0 | 10 | 346 | 626 | 355 | 271 | -1 | -1 |
module RomanNumerals where
solution :: Integer -> String
solution n
| n >= 1000 = 'M':solution (n - 1000)
| n >= 900 = 'C':'M':solution (n - 900)
| n >= 500 = 'D':solution (n - 500)
| n >= 400 = 'C':'D':solution (n - 400)
| n >= 100 = 'C':solution (n - 100)
| n >= 90 = 'X':'C':solution (n - 90)
| n >= 50 = 'L':solution (n - 50)
| n >= 40 = 'X':'L':solution (n - 40)
| n >= 10 = 'X':solution (n - 10)
| n >= 9 = 'I':'X':solution (n - 9)
| n >= 5 = 'V':solution (n - 5)
| n >= 4 = 'I':'V':solution (n - 4)
| n >= 1 = 'I':solution (n - 1)
| otherwise = ""
| lisphacker/codewars | RomanNumerals.hs | bsd-2-clause | 636 | 0 | 9 | 212 | 402 | 195 | 207 | 17 | 1 |
module Drasil.Projectile.TMods (tMods, accelerationTM, velocityTM) where
import Language.Drasil
import Theory.Drasil (TheoryModel, tm)
import Data.Drasil.Quantities.Physics (acceleration, position, time, velocity)
import Drasil.Projectile.References (accelerationWiki, velocityWiki, hibbeler2004)
tMods :: [TheoryModel]
tMods = [accelerationTM, velocityTM]
accelerationTM :: TheoryModel
accelerationTM = tm (cw accelerationRC)
[qw acceleration, qw velocity, qw time] ([] :: [ConceptChunk]) [] [accelerationRel] []
[makeCite accelerationWiki, makeCiteInfo hibbeler2004 $ Page [7]] "acceleration" []
accelerationRC :: RelationConcept
accelerationRC = makeRC "accelerationRC" (cn' "acceleration") EmptyS accelerationRel
accelerationRel :: Relation
accelerationRel = sy acceleration $= deriv (sy velocity) time
----------
velocityTM :: TheoryModel
velocityTM = tm (cw velocityRC)
[qw velocity, qw position, qw time] ([] :: [ConceptChunk]) [] [velocityRel] []
[makeCite velocityWiki, makeCiteInfo hibbeler2004 $ Page [6]] "velocity" []
velocityRC :: RelationConcept
velocityRC = makeRC "velocityRC" (cn' "velocity") EmptyS velocityRel
velocityRel :: Relation
velocityRel = sy velocity $= deriv (sy position) time
| JacquesCarette/literate-scientific-software | code/drasil-example/Drasil/Projectile/TMods.hs | bsd-2-clause | 1,226 | 0 | 9 | 153 | 390 | 215 | 175 | 23 | 1 |
-- |
-- Thorn, Datatype Manipulation with Template Haskell.
module Data.Thorn (
-- * Functors
module Data.Thorn.Functor
-- * Folding and Unfolding
, module Data.Thorn.Fold
-- * Zippers
, module Data.Thorn.Zipper
-- * Basic
, module Data.Thorn.Basic
) where
import Data.Thorn.Functor
import Data.Thorn.Fold
import Data.Thorn.Zipper
import Data.Thorn.Basic
| Kinokkory/thorn | Data/Thorn.hs | bsd-3-clause | 386 | 0 | 5 | 78 | 66 | 47 | 19 | 9 | 0 |
module Flows
( Flow (..)
, FlowGroup
, User
, Host
, Port
, intersection
, null
, all
, isSubFlow
, isOverlapped
, make
, simple
, toMatch
, toMatch'
, flowSwitchMatch
, fromSwitchMatch
, fromMatch
) where
import Prelude hiding (null, all)
import Data.Word (Word16)
import Set (Set)
import qualified Set as Set
import Nettle.IPv4.IPAddress
import qualified Nettle.IPv4.IPAddress as IPAddress
import Nettle.OpenFlow.Match
import qualified Nettle.OpenFlow.Match as Match
import qualified Nettle.OpenFlow as OF
-- TODO(adf): We have lost the richness of the original FlowGroups (from
-- Hot-ICE paper)... which supports sets of users, sets of srcIP, etc. We
-- should return to supporting those by implementing Flows as *sets* of
-- Match rules with duplicated actions.
-- TODO(adf): Might also be able to return "union" operation to Flows at
-- that time.
-- TODO(adf): At one time, we used to consider parsing "Applications" which
-- expanded into sets of parts, or "Network" which expanded into a set of
-- IP addresses (maybe should be CIDR blocks?), or "Group" which was a set
-- of users.
type User = String
type Port = Word16 -- Transport port (UDP,TCP)
type Host = IPAddress
data Flow = Flow (Maybe User) (Maybe User)
(Maybe Port) (Maybe Port)
(Maybe Host) (Maybe Host)
deriving (Eq, Ord, Show)
data FlowGroup
= FlowMatch (Maybe OF.SwitchID) Match
| Empty
deriving (Ord, Eq, Show)
toMatch :: FlowGroup -> Maybe Match
toMatch (FlowMatch _ m) = Just m
toMatch _ = Nothing
-- | Helper function for unit testing
toMatch' :: FlowGroup -> Match
toMatch' f = case toMatch f of
Just m -> m
Nothing -> error "Flows.toMatch invalid argument"
flowSwitchMatch :: FlowGroup -> Maybe (Maybe OF.SwitchID, OF.Match)
flowSwitchMatch (FlowMatch sw match) = Just (sw, match)
flowSwitchMatch _ = Nothing
fromSwitchMatch :: OF.SwitchID -> OF.Match -> FlowGroup
fromSwitchMatch sid m = FlowMatch (Just sid) m
fromMatch :: OF.Match -> FlowGroup
fromMatch m = FlowMatch Nothing m
-- TODO(adf): UNUSED -- For future, generalized version of flow groups
isSubFlow' :: Flow -> Flow -> Bool
isSubFlow' (Flow su du sp dp sh dh) (Flow su' du' sp' dp' sh' dh') =
let (⊆) s1 s2 = case (s1, s2) of
(_, Nothing) -> True -- Remember: "Nothing" represents wildcard
(Just x, Just y) -> x == y
(Nothing, Just _) -> False
in su ⊆ su' && du ⊆ du' && sp ⊆ sp' && dp ⊆ dp' && sh ⊆ sh' && dh ⊆ dh'
all = FlowMatch Nothing matchAny
-- | Helper function for easy unit testing
simple :: Maybe Host -> Maybe Port -> Maybe Host -> Maybe Port -> FlowGroup
simple sh sp dh dp =
let mHost h = case h of
Nothing -> (IPAddress 0, 0)
Just ip -> (ip, maxPrefixLen)
in FlowMatch Nothing (matchAny { srcIPAddress = mHost sh,
dstIPAddress = mHost dh,
srcTransportPort = sp,
dstTransportPort = dp })
-- TODO(adf): UNUSED -- For future, generalized version which we don't know
-- how to actually use/create yet....
make :: Set User -> Set User -> Set Port -> Set Port -> Set Host -> Set Host
-> FlowGroup
make su du sp dp sh dh =
let port set = case Set.toList set of
Just [x] -> Just (Just x)
Nothing -> Just Nothing
otherwise -> Nothing
ip set = case Set.toList set of
Just [x] -> Just (x, maxPrefixLen)
Nothing -> Just (IPAddress 0,0)
otherwise -> Nothing
-- TODO(arjun): srcUser and dstUser are ignored
makeMatch _ _ sp dp sh dh = do
sp' <- port sp
dp' <- port dp
sh' <- ip dh
dh' <- ip dh
return $ matchAny { srcIPAddress = sh', dstIPAddress = dh',
srcTransportPort = sp', dstTransportPort = dp' }
in case makeMatch su du sp dp sh dh of
Just m -> FlowMatch Nothing m
Nothing -> error "flow group unsupported"
intersection :: FlowGroup -> FlowGroup -> FlowGroup
intersection _ Empty = Empty
intersection Empty _ = Empty
intersection (FlowMatch sw1 m1) (FlowMatch sw2 m2) =
let sw' = case (sw1, sw2) of
(Just s, Just s') -> if s == s' then Just (Just s) else Nothing
(Just s, Nothing) -> Just (Just s)
(Nothing, Just s') -> Just (Just s')
(Nothing, Nothing) -> Just Nothing
m' = Match.intersect m1 m2
in case (sw', m') of
(Just s, Just m) -> FlowMatch s m
otherwise -> Empty
null :: FlowGroup -> Bool
null Empty = True
null _ = False
isOverlapped :: FlowGroup -> FlowGroup -> Bool
isOverlapped f1 f2 = not (null (intersection f1 f2))
isSubFlow :: FlowGroup -> FlowGroup -> Bool
isSubFlow Empty _ = True
isSubFlow _ Empty = False
isSubFlow (FlowMatch sw1 m1) (FlowMatch sw2 m2) =
let swContained = case (sw1, sw2) of
(_, Nothing) -> True -- Remember: Nothing is the wildcard here
(Just s, Just s') -> s == s'
(Nothing, Just _) -> False
in swContained && Match.subset m1 m2
| brownsys/pane | src/Flows.hs | bsd-3-clause | 5,237 | 0 | 17 | 1,519 | 1,556 | 820 | 736 | 117 | 6 |
-- | This module provides common functionality for abstract states.
module Jat.PState.Fun
(
mkInstance
, mkAbsInstance
, mergeStates
, Correlation (..)
, mergeStates'
, mkGetField
, mkPutField
, pState2TRS
, instruction
, isTerminal
, isSimilar
{-, isBackJump-}
, isTarget
, maybePutField
, mapAnnotations
, mapValues
, mapValuesFS
, substitute
, rpaths
, rpathValue
, rcommonPrefix
, rmaxPrefix
, valueV
, typeV
, reachableV
, programLocation
, tyOf
, refKindOf
)
where
import qualified Jat.Constraints as PA
import Jat.PState.AbstrValue
import Jat.PState.Data
import Jat.PState.Frame
import Jat.PState.Object
import Jat.PState.IntDomain.Data
import Jat.PState.MemoryModel.Data
import Jat.PState.Heap
import Jat.PState.Step
import Jat.PState.AbstrDomain as AD
import Jat.JatM
import qualified Jinja.Program as P
import Jat.Utils.Pretty
import Data.Char (toLower)
import qualified Data.Array as A
import Data.List (inits)
import qualified Data.Map as M
import Data.Maybe (fromJust)
import Control.Monad.State
--import Debug.Trace
-- | Returns a (concrete) 'Object' of the given class.
mkInstance :: IntDomain i => P.Program -> P.ClassId -> Object i
mkInstance p cn = Instance cn (mkFt . initfds $ fds)
where
fds = P.hasFields p cn
initfds = map (\(lfn,lcn,ltp) -> (lcn,lfn,defaultValue ltp))
mkFt = foldl (flip $ curry3 updateFT) emptyFT
curry3 f (a,b,c) = f a b c
-- | Constructs an instance with its field set to be abstract values.
-- Returns the heap (including) the new instance and the instance itself.
mkAbsInstance :: (Monad m, IntDomain i) => Heap i -> Address -> P.ClassId -> JatM m (Heap i, Object i)
mkAbsInstance hp adr cn = do
p <- getProgram
(hp1,ifds) <- initfds $ P.hasFields p cn
let obt = mkObt cn ifds
hp2 = updateH adr obt hp1
return (hp2,obt)
where
initfds = initfds' (return (hp,[]))
initfds' m [] = m
initfds' m ((ln1,cn1,tp1):fds) = do
(hp1,ifds) <- m
(hp2,v) <- defaultAbstrValue hp1 tp1
initfds' (return (hp2, (cn1,ln1,v):ifds)) fds
mkObt cn1 fds = Instance cn1 (mkFt fds)
mkFt = foldl (flip $ curry3 updateFT) emptyFT
curry3 f (a,b,c) = f a b c
defaultAbstrValue :: (IntDomain i) => Monad m => Heap i -> P.Type -> JatM m (Heap i, AbstrValue i)
defaultAbstrValue hp1 (P.BoolType) = do {v <- AD.top; return (hp1,BoolVal v)}
defaultAbstrValue hp1 (P.IntType) = do {v <- AD.top; return (hp1,IntVal v)}
defaultAbstrValue hp1 (P.RefType cn1) = return (hp2, RefVal r)
where (r, hp2) = insertHA (AbsVar cn1) hp1
defaultAbstrValue _ _ = error "Jat.PState.Fun.mkAbsInstance: unexpected type."
data Correlation = C Address Address deriving (Show,Eq,Ord)
correlation :: Address -> Address -> Correlation
correlation = C
data Corre i = Corr {unCorr :: M.Map Correlation Address, unHeap:: Heap i}
mergeStates :: (Monad m, MemoryModel a, IntDomain i) => P.Program -> PState i a -> PState i a -> a -> JatM m (PState i a)
mergeStates p s1 s2 a = fst `liftM` mergeStates' p s1 s2 a
-- | Given two states, builds a new states with maximal common paths and maximal common aliasing.
mergeStates' :: (Monad m, MemoryModel a, IntDomain i) => P.Program -> PState i a -> PState i a -> a -> JatM m (PState i a, M.Map Correlation Address)
mergeStates' _ st1 st2 _ | not (isSimilar st1 st2) = error "Jat.PState.Fun.mergeStates: non-similar states."
mergeStates' p (PState hp1 frms1 _) (PState hp2 frms2 _) ann = do
(st,frms3) <- wideningFs Corr{unCorr=M.empty, unHeap=emptyH} frms1 frms2
return (PState (unHeap st) frms3 ann, unCorr st)
where
wideningF st (Frame loc1 stk1 cn mn pc) (Frame loc2 stk2 _ _ _) = do
(st1,loc3) <- joinValM st loc1 loc2
(st2,stk3) <- joinValM st1 stk1 stk2
return (st2,Frame loc3 stk3 cn mn pc)
wideningFs st [] [] = return (st,[])
wideningFs st (f1:fs1) (f2:fs2) = do
(st1,f3) <- wideningF st f1 f2
(st2,fs3) <- wideningFs st1 fs1 fs2
return (st2,f3:fs3)
wideningFs _ _ _ = error "unexpected case"
--joinVal _ i j | trace (show (pretty i<> pretty j)) False = undefined
joinVal st (IntVal i) (IntVal j) = do
k <- i `AD.lub` j
return (st, IntVal k)
joinVal st (BoolVal a) (BoolVal b) = do
c <- a `AD.lub` b
return (st, BoolVal c)
joinVal st (RefVal q) (RefVal r) =
case correlation q r `M.lookup` unCorr st of
Just a -> return (st,RefVal a)
Nothing -> do
let (a,hp') = insertHA undefined (unHeap st)
cors = M.insert (correlation q r) a (unCorr st)
st' = st{unCorr=cors,unHeap = hp'}
st'' <- joinObject st' a (lookupH q hp1) (lookupH r hp2)
return (st'',RefVal a)
joinVal st Null Null = return (st,Null)
joinVal st Unit Null = return (st,Null)
joinVal st Null (IntVal _) = (,) st `liftM` IntVal `liftM` top
joinVal st Null (BoolVal _) = (,) st `liftM` BoolVal `liftM` top
joinVal st Null (RefVal r) = do
let (a,heapt) = insertHA (AbsVar . className $ lookupH r hp2) (unHeap st)
return (st{unHeap=heapt},RefVal a)
joinVal st (RefVal r) Null = do
let (a,heap') = insertHA (AbsVar . className $ lookupH r hp1) (unHeap st)
return (st{unHeap=heap'},RefVal a)
joinVal st Null Unit = return (st, Null)
joinVal st v Null = joinVal st Null v
joinVal st Unit (IntVal _) = (,) st `liftM` IntVal `liftM` top
joinVal st Unit (BoolVal _) = (,) st `liftM` BoolVal `liftM` top
joinVal st Unit (RefVal r) = joinVal st Null (RefVal r)
joinVal st (RefVal r) Unit = joinVal st (RefVal r) Null
joinVal st Unit Unit = return (st, Null)
joinVal st v Unit = joinVal st Unit v
joinVal _ _ _ = error "unexpected case."
joinObject st a (Instance cn ft) (Instance cn' ft') | cn == cn' = do
(st',vs) <- joinValM st (M.elems ft) (M.elems ft')
let ft'' = M.fromList $ zip (M.keys ft) vs
hp' = updateH a (Instance cn ft'') (unHeap st')
return st'{unHeap=hp'}
joinObject st a (Instance cn _) (Instance cn' _) = addAbsVar st a cn cn'
joinObject st a (AbsVar cn) (Instance cn' _) = addAbsVar st a cn cn'
joinObject st a (Instance cn _) (AbsVar cn') = addAbsVar st a cn cn'
joinObject st a (AbsVar cn) (AbsVar cn') = addAbsVar st a cn cn'
addAbsVar st a cn cn' = do
let dn = P.theLeastCommonSupClass p cn cn'
hp'= updateH a (AbsVar dn) (unHeap st)
return st{unHeap=hp'}
--joinValMS st [] [] = return (st,[])
--joinValMS st (l:ls) (l':ls') = do
--(st',l'') <- joinValM st l l'
--(d,e) <- joinValMS st' ls ls'
--return (d,l'':e)
--joinValMS _ _ _ = error "Jat.PState.Fun.mergeStates: unexpected case."
joinValM st [] [] = return (st,[])
joinValM st (v:vs) (v':vs') = do
(st',v'') <- joinVal st v v'
(d,e) <- joinValM st' vs vs'
return (d, v'':e)
joinValM _ _ _ = error "Jat.PState.Funl.mergeStates: unexpected case: illegal fieldtable"
mergeStates' _ _ _ _ = error "Jat.PState.Fun.mergeStates: unexpected case."
-- | Performs Getfield, assuming it is valid to perform.
mkGetField :: (MemoryModel a, IntDomain i) => PState i a -> P.ClassId -> P.FieldId -> PStep (PState i a)
mkGetField (PState _ (Frame _ (Null:_) _ _ _ :_) _) _ _ = topEvaluation $ EState NullPointerException
mkGetField (PState hp (Frame loc (RefVal adr:stk) cn1 mn pc :frms) us) cn2 fn =
case lookupH adr hp of
AbsVar _ -> error "Jat.PState.Fun.mkGetField: unexpected case."
Instance _ ft -> let stk' = lookupFT cn2 fn ft :stk
in topEvaluation (PState hp (Frame loc stk' cn1 mn (pc+1) :frms) us)
mkGetField _ _ _ = error "Jat.PState.Fun.mkGetField: unexpected case"
-- | Performs Putfield, assuming it is valid to perform.
mkPutField :: (Monad m, IntDomain i, MemoryModel a) => a -> PState i a -> P.ClassId -> P.FieldId -> JatM m (PStep (PState i a))
mkPutField us2 st@(PState hp (Frame loc fstk fcn mn pc :frms) us1) cn fn =
return $ case opstk $ frame st of
_ : Null : _ -> topEvaluation $ EState NullPointerException
v@(RefVal _) : RefVal o1 : _ -> topEvaluation $ mkPut v o1 us2
v : RefVal o1 : _ -> topEvaluation $ mkPut v o1 us1
_ -> error "Jat.PState.Fun.putField: unexpected case."
where
mkPut v o1 uso = case lookupH o1 hp of
AbsVar _ -> error "Jat.PState.Fun.mkPutField: unexpected case."
Instance cno fto ->
let (_:_:stk) = fstk
obt = Instance cno (updateFT cn fn v fto)
hp' = updateH o1 obt hp
in PState hp' (Frame loc stk fcn mn (pc+1):frms) uso
mkPutField _ _ _ _ = error "Jat.PState.Fun.mkPutField: unexpected case."
-- | Returns the cTRS representation of a state, given functions for checking cyclicity and joinability.
pState2TRS :: (Monad m, IntDomain i, Pretty a) =>
(Address -> Bool)
-> (Address -> Address -> Bool)
-> Maybe Address
-> Side
-> PState i a -> Int -> JatM m PA.PATerm
pState2TRS isSpecial maybeReaches m side (PState hp frms _) k = do
key <- nextVarIdx
return $ PA.ufun ('f':show k) $ evalState (mapM tval (concatMap elemsF frms)) key
{-return $ PA.ufun ('f':show k) $ evalState (return []) [key..]-}
where
fresh :: State Int Int
fresh = modify succ >> get
nullterm = PA.ufun "null" []
var cn = PA.uvar (map toLower cn)
tval :: AbstrDomain i b => AbstrValue i -> State Int PA.PATerm
tval Null = return nullterm
tval Unit = return nullterm
tval (RefVal r) = taddr r
tval (BoolVal b) = return $ atom b
tval (IntVal i) = return $ atom i
{-taddr r | trace (">> taddr" ++ show r ++ show st) False = undefined-}
taddr r = case m of
Just q -> taddrStar q r
Nothing -> taddr' r
taddr' r | isSpecial r = do
key <- fresh
let cn = className $ lookupH r hp
return $ var ('c':show side ++ showcn cn) key
taddr' r =
case lookupH r hp of
AbsVar cn -> return $ var (showcn cn) r
Instance cn ft -> PA.ufun (showcn cn) `liftM` mapM tval (elemsFT ft)
{-taddrStar q r | trace (">> taddrStar" ++ show (q,r,maybeReaches r q, st)) False = undefined-}
taddrStar q r
| maybeReaches r q = do
key <- fresh
let cn = className $ lookupH r hp
return $ var ('x':show side ++ showcn cn) key
| otherwise = taddr' r
showcn = show . pretty
pState2TRS _ _ _ _ (EState ex) _ = return $ PA.ufun (show ex) []
-- | Returns current instruction.
instruction :: P.Program -> PState i a -> P.Instruction
instruction p (PState _ (Frame _ _ cn mn pc :_) _) = P.instruction p cn mn pc
-- | Checks if current state is a terminal state, ie. a state with an empty
-- frame list or an exceptional state.
isTerminal :: PState i a -> Bool
isTerminal (PState _ frms _) = null frms
isTerminal (EState _) = True
-- | Checks if two states are similar.
-- Two states are similar if the class name, field name and program counter of
-- its frames correspond.
isSimilar :: PState i a -> PState i a -> Bool
isSimilar (PState _ frms1 _) (PState _ frms2 _) = isSimilarFS frms1 frms2
where
isSimilarFS (f1:fs1) (f2:fs2) = isSimilarF f1 f2 && isSimilarFS fs1 fs2
isSimilarFS [][] = True
isSimilarFS _ _ = False
isSimilarF (Frame loc1 stk1 cn1 mn1 pc1) (Frame loc2 stk2 cn2 mn2 pc2) =
cn1 == cn2 && mn1 == mn2 && pc1 == pc2
&& length loc1 == length loc2 && length stk1 == length stk2
isSimilar _ _ = False
{-isBackJump :: Monad m => PState i a -> JatM m Bool -}
{-isBackJump (PState _ (Frame _ _ cn mn pc:_) _) = getProgram >>= \p -> return $ P.isBackJump p cn mn pc-}
{-isBackJump _ = return False-}
-- | Checks if current state is target for a backjump.
isTarget :: Monad m => PState i a -> JatM m Bool
isTarget (PState _ (Frame _ _ cn mn pc:_) _) = do
p <- getProgram
return $ pc `elem` [ pc'+i | (pc', P.Goto i) <- A.assocs $ P.instructions p cn mn, i < 0]
isTarget _ = return False
-- | Checks the instruction of the current state is a PutField instruction.
maybePutField :: P.Program -> PState i a -> Maybe Address
maybePutField p (PState _ (Frame _ (_:RefVal q:_) cn mn pc:_) _) =
case P.instruction p cn mn pc of
P.PutField _ _ -> Just q
_ -> Nothing
maybePutField _ _ = Nothing
-- | Maps a function over the annotations.
mapAnnotations :: MemoryModel a => (a -> a) -> PState i a -> PState i a
mapAnnotations f (PState hp frms ann) = PState hp frms (f ann)
mapAnnotations _ st = st
-- | Maps a value function over the frames.
mapValuesFS :: (AbstrValue i -> AbstrValue i) -> [Frame i] -> [Frame i]
mapValuesFS f = map (mapValuesF f)
-- | Maps a value function over the state.
mapValues :: (AbstrValue i -> AbstrValue i) -> PState i a -> PState i a
mapValues f (PState hp frms ann) = PState (mapValuesH f hp) (map (mapValuesF f) frms) ann
mapValues _ st = st
-- | Substitutes a value in a state with another one.
-- Removes the entry in the heap if it the substituted value was a 'RefVal'.
substitute :: Eq i => AbstrValue i -> AbstrValue i -> PState i a -> PState i a
substitute v1@(RefVal adr1) v2 st =
let PState hp frms ann = mapValues (\v -> if v == v1 then v2 else v) st
in PState (deallocate adr1 hp) frms ann
substitute v1 v2 st = mapValues (\v -> if v == v1 then v2 else v) st
-- paths
-- TODO: computation should return (value,path) pairs.
-- | Returns rooted paths 'RPath' from a state.
rpaths :: PState i a -> [RPath]
rpaths (PState hp frms _) =
concatMap rpahts' $ zip [0..] frms
where
rpahts' (m, Frame loc stk _ _ _ ) =
let nloc = zip [0..] (elemsL loc)
nstk = zip [0..] (elemsS stk)
in concatMap (locpath m) nloc ++ concatMap (stkpath m) nstk
locpath m (n,v) = RPath (RLoc m n) `map` valpath v
stkpath m (n,v) = RPath (RStk m n) `map` valpath v
valpath (RefVal q) = paths q hp
valpath _ = [[]]
rpaths (EState _) = []
-- | Computes the value of a 'RPath'.
rpathValue :: (IntDomain i) => RPath -> PState i a -> AbstrValue i
rpathValue rpath st =
let (val,path) =
case rpath of
RPath (RLoc m n) lpath -> (lookupL n . locals $ frames st !! m,lpath)
RPath (RStk m n) spath -> (lookupS n . opstk $ frames st !! m,spath)
in case val of
RefVal q -> pathValue q path (heap st)
_ -> val
-- | Checks if two 'RPath' has the same root.
rcommonRoot :: RPath -> RPath -> Bool
rcommonRoot (RPath r1 _) (RPath r2 _) = r1 == r2
-- | Computes a common prefix of two paths, if it exists, otherwise Nothing.
rcommonPrefix :: RPath -> RPath -> Maybe RPath
rcommonPrefix (RPath r1 ls1) (RPath r2 ls2)
| r1 == r2 = Just $ RPath r1 (commonPrefix ls1 ls2)
| otherwise = Nothing
-- | Computes the maximal prefix of a 'RPath' in a list of 'RPath'.
rmaxPrefix :: RPath -> [RPath] -> RPath
rmaxPrefix path@(RPath r rls) pths =
RPath r (findFirst (reverse $ inits rls) (filterRoot path pths))
where
filterRoot path1 paths2 = [ ls | path2@(RPath _ ls) <- paths2, rcommonRoot path1 path2]
findFirst (l:ls) lss
| l `elem` lss = l
| otherwise = findFirst ls lss
findFirst [] _ = []
-- | Returns the value from a given Frame index.
valueV :: P.Var -> PState i a -> AbstrValue i
valueV (P.StkVar i j) (PState _ frms _) = (reverse . opstk) (reverse frms !! i) !! j
valueV (P.LocVar i j) (PState _ frms _) = locals (reverse frms !! i) !! j
{-valueV (LocVar i j) (PState _ frms _) = case lookup i $ zip [0..] frms of-}
{-Nothing -> error $ "frms" ++ show (i, length frms)-}
{-Just frm -> case lookup j $ zip [0..] (locals frm) of-}
{-Nothing -> error $ "locals" ++ show (j,length (locals frm))-}
{-Just v -> v-}
-- | Returns the type from a given Frame index.
typeV :: P.Var -> PState i a -> P.Type
typeV v st@(PState hp _ _) = case valueV v st of
RefVal q -> P.RefType . className $ lookupH q hp
w -> fromJust $ typeOf w
-- | Computes reachable Addresses from a given a Frame index.
reachableV :: P.Var -> PState i a -> [Address]
reachableV var st = case valueV var st of
RefVal r -> reachable r (heap st)
_ -> []
-- | Returns the program location of a state.
programLocation :: PState i a -> [(P.ClassId,P.MethodId,P.PC)]
programLocation (PState _ frms _) = [(cn,mn,pc) | Frame _ _ cn mn pc <- frms]
programLocation (EState _) = []
-- | Returns the type of an address.
tyOf :: PState i a -> Address -> P.Type
tyOf st q = P.RefType . className $ lookupH q (heap st)
-- | Returns the kind of an address.
refKindOf :: PState i a -> Address -> P.RefKind
refKindOf st q = case lookupH q (heap st) of
Instance cn _ -> P.InstanceKind cn
AbsVar cn -> P.ClassVarKind cn
| ComputationWithBoundedResources/jat | src/Jat/PState/Fun.hs | bsd-3-clause | 17,186 | 0 | 18 | 4,578 | 6,421 | 3,251 | 3,170 | -1 | -1 |
import QuickCheck
import Test.QuickCheck
import HUnit
import Test.HUnit
main :: IO Counts
main = do
--TEST 1
--idempotency — applying a function twice has the same result as applying it only once
quickCheck idempotent_quicksort
quickCheck idempotent_bubblesort
quickCheck idempotent_insertionsort
quickCheck idempotent_mergesort
quickCheck idempotent_selectionsort
quickCheck idempotent_heapsort
--TEST 2
--the first element in a sorted list should always be the smallest element of the input list
--filtering out empty list (reveal an error because od definition of head and minimum)
quickCheck sorted_list_minimum_quicksort
quickCheck sorted_list_minimum_bubblesort
quickCheck sorted_list_minimum_insertionsort
quickCheck sorted_list_minimum_mergesort
quickCheck sorted_list_minimum_selectionsort
quickCheck sorted_list_minimum_heapsort
--TEST 3
--the lat element in a sorted list should always be the largest element of the input list
--filtering out empty list (reveal an error because od definition of head and minimum)
quickCheck sorted_list_maximum_quicksort
quickCheck sorted_list_maximum_bubblesort
quickCheck sorted_list_maximum_insertionsort
quickCheck sorted_list_maximum_mergesort
quickCheck sorted_list_maximum_selectionsort
quickCheck sorted_list_mmaximum_heapsort
--TEST 4
--each element should be smaller than, or equal to, its successor
quickCheck compare_elements_quicksort
quickCheck compare_elements_bubblesort
quickCheck compare_elements_insertionsort
quickCheck compare_elements_mergesort
quickCheck compare_elements_selectionsort
quickCheck compare_elements_heapsort
--TEST 5
--output is a permutation of the input
--operator \\ is difference function on lists, null tests whether the difference list is empty
--is_permut xs ys = null (xs \\ ys) && null (ys \\ xs)
quickCheck output_perm_of_input_quicksort
quickCheck output_perm_of_input_bubblesort
quickCheck output_perm_of_input_insertionsort
quickCheck output_perm_of_input_mergesort
quickCheck output_perm_of_input_selectionsort
quickCheck output_perm_of_input_heapsort
putStrLn "quicksortTest, bubblesortTest, insertionsortTest, insertTest, mergesortTest, mergeTest, selectionsortTest, heapsortTest, childrenTest,parentTest"
runTestTT $ TestList [TestLabel "quicksortTest" quicksortTest,TestLabel "bubblesortTest"bubblesortTest,TestLabel "insertionsortTest" insertionsortTest, TestLabel "myInsertTest" myInsertTest,TestLabel "mergesortTest" mergesortTest,
TestLabel "mergeTest" mergeTest,TestLabel "selectionsortTest" selectionsortTest,TestLabel "heapsortTest" heapsortTest, TestLabel "childrenTest1" childrenTest1, TestLabel "childrenTest2" childrenTest2, TestLabel "parentTest" parentTest ]
| Regulareveryday/our_sorting_algos | test/MainTest.hs | bsd-3-clause | 2,824 | 1 | 10 | 398 | 331 | 141 | 190 | 39 | 1 |
{-# LANGUAGE TypeOperators, TypeFamilies #-}
module Control.Monad.Unpack.Class where
import Control.Applicative
import Control.Monad.Trans.Class
class Unpackable arg where
data UnpackedReaderT arg :: (* -> *) -> * -> *
runUnpackedReaderT :: UnpackedReaderT arg m a -> arg -> m a
unpackedReaderT :: (arg -> m a) -> UnpackedReaderT arg m a
{-# INLINE ask #-}
ask :: (Monad m, Unpackable arg) => UnpackedReaderT arg m arg
ask = unpackedReaderT return
{-# INLINE local #-}
local :: (Monad m, Unpackable arg) => (arg -> arg) -> UnpackedReaderT arg m a -> UnpackedReaderT arg m a
local f m = unpackedReaderT $ runUnpackedReaderT m . f
instance Unpackable arg => MonadTrans (UnpackedReaderT arg) where
{-# INLINE lift #-}
lift m = unpackedReaderT $ \ _ -> m
instance (Unpackable arg, Monad m) => Monad (UnpackedReaderT arg m) where
{-# INLINE return #-}
{-# INLINE (>>=) #-}
return x = lift $ return x
m >>= k = unpackedReaderT $ \ arg ->
do a <- runUnpackedReaderT m arg
runUnpackedReaderT (k a) arg
instance (Unpackable arg, Functor f) => Functor (UnpackedReaderT arg f) where
{-# INLINE fmap #-}
fmap f m = unpackedReaderT $ \ arg -> fmap f (runUnpackedReaderT m arg)
instance (Unpackable arg, Applicative f) => Applicative (UnpackedReaderT arg f) where
{-# INLINE pure #-}
{-# INLINE (<*>) #-}
pure f = unpackedReaderT $ \ _ -> pure f
f <*> x = unpackedReaderT $ \ arg -> runUnpackedReaderT f arg <*> runUnpackedReaderT x arg | lowasser/unpack-funcs | Control/Monad/Unpack/Class.hs | bsd-3-clause | 1,465 | 1 | 13 | 284 | 494 | 254 | 240 | 32 | 1 |
module Main where
import Scheme
import System.Environment
main:: IO ()
main = getArgs >>= print . eval . readExpr . head
-- do
-- (expr:_) <- getArgs
-- print $ eval $ readExpr $ head expr
| Parajao/Scheme | src/Main.hs | bsd-3-clause | 227 | 0 | 8 | 74 | 46 | 27 | 19 | 5 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Heist.Splices.Json (
bindJson
) where
------------------------------------------------------------------------------
import Control.Monad.Reader
import Data.Aeson
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.HashMap.Strict as Map
import Data.Map.Syntax
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as B
import Text.Blaze.Renderer.XmlHtml
import Text.XmlHtml
------------------------------------------------------------------------------
import Heist.Interpreted.Internal
import Heist.Internal.Types.HeistState
------------------------------------------------------------------------------
------------
-- public --
------------
------------------------------------------------------------------------------
-- | This splice binds convenience tags for the given JSON (or
-- JSON-convertible) value and runs the tag's child nodes using the new
-- bindings.
--
-- /Tags bound when you pass in an object/
--
-- Tags bound for an object looking like this:
--
-- > { "k_1": v_1, ..., "k_N": v_N }
--
-- @\<value:{k_i}\>@ -- treats v_i as text
-- @\<snippet:{k_i}\>@ -- treats v_i as HTML
-- @\<with:{k_i}\>@ -- explodes v_i and runs its children
--
-- @\<value var=\"foo.bar.baz\"\/>@ -- walks the JSON tree to find
-- \"foo.bar.baz\", and interprets it as a string
-- @\<snippet var=\"foo.bar.baz\"\/\>@
-- @\<with var=\"foo.bar.baz\"\>...\<with\>@
--
-- /Tags bound when you pass in anything else/
--
-- @\<value\/\>@ -- the given JSON value, as a string
-- @\<snippet\/\>@ -- the given JSON value, parsed and spliced in as HTML
--
bindJson :: (ToJSON a, Monad n) => a -> Splice n
bindJson = runReaderT explodeTag . toJSON
-------------
-- private --
-------------
------------------------------------------------------------------------------
errorMessage :: String -> [Node]
errorMessage s = renderHtmlNodes $
B.strong ! B.customAttribute "class" "error" $
B.toHtml s
------------------------------------------------------------------------------
type JsonMonad n m a = ReaderT Value (HeistT n m) a
------------------------------------------------------------------------------
withValue :: (Monad m) => Value -> JsonMonad n m a -> HeistT n m a
withValue = flip runReaderT
------------------------------------------------------------------------------
boolToText :: Bool -> Text
boolToText b = if b then "true" else "false"
------------------------------------------------------------------------------
numToText :: ToJSON a => a -> Text
numToText = T.decodeUtf8 . S.concat . L.toChunks . encode
------------------------------------------------------------------------------
findExpr :: Text -> Value -> Maybe Value
findExpr t = go (T.split (=='.') t)
where
go [] !value = Just value
go (x:xs) !value = findIn value >>= go xs
where
findIn (Object obj) = Map.lookup x obj
findIn (Array arr) = tryReadIndex >>= \i -> arr V.!? i
findIn _ = Nothing
tryReadIndex = fmap fst . listToMaybe . reads . T.unpack $ x
------------------------------------------------------------------------------
asHtml :: Monad m => Text -> m [Node]
asHtml t =
case (parseHTML "" $ T.encodeUtf8 t) of
Left e -> return $ errorMessage $
"Template error turning JSON into HTML: " ++ e
Right d -> return $! docContent d
------------------------------------------------------------------------------
snippetTag :: Monad m => JsonMonad n m [Node]
snippetTag = ask >>= snip
where
txt t = lift $ asHtml t
snip Null = txt ""
snip (Bool b) = txt $ boolToText b
snip (Number n) = txt $ numToText n
snip (String t) = txt t
snip _ = lift $ do
node <- getParamNode
return $ errorMessage $ concat [
"error processing tag <"
, T.unpack $ fromMaybe "???" $ tagName node
, ">: can't interpret JSON arrays or objects as HTML."
]
------------------------------------------------------------------------------
valueTag :: Monad m => JsonMonad n m [Node]
valueTag = ask >>= go
where
go Null = txt ""
go (Bool b) = txt $ boolToText b
go (Number n) = txt $ numToText n
go (String t) = txt t
go _ = lift $ do
node <- getParamNode
return $ errorMessage $ concat [
"error processing tag <"
, T.unpack $ fromMaybe "???" $ tagName node
, ">: can't interpret JSON arrays or objects as text."
]
txt t = return [TextNode t]
------------------------------------------------------------------------------
explodeTag :: forall n. (Monad n) => JsonMonad n n [Node]
explodeTag = ask >>= go
where
--------------------------------------------------------------------------
go Null = goText ""
go (Bool b) = goText $ boolToText b
go (Number n) = goText $ numToText n
go (String t) = goText t
go (Array a) = goArray a
go (Object o) = goObject o
--------------------------------------------------------------------------
goText t = lift $ runChildrenWith $ do
"value" ## return [TextNode t]
"snippet" ## asHtml t
--------------------------------------------------------------------------
goArray :: V.Vector Value -> JsonMonad n n [Node]
goArray a = do
lift stopRecursion
dl <- V.foldM f id a
return $! dl []
where
f dl jsonValue = do
tags <- go jsonValue
return $! dl . (tags ++)
--------------------------------------------------------------------------
-- search the param node for attribute \"var=expr\", search the given JSON
-- object for the expression, and if it's found run the JsonMonad action m
-- using the restricted JSON object.
varAttrTag :: Value -> (JsonMonad n n [Node]) -> Splice n
varAttrTag v m = do
node <- getParamNode
maybe (noVar node) (hasVar node) $ getAttribute "var" node
where
noVar node = return $ errorMessage $
concat [ "expression error: no var attribute in <"
, T.unpack $ fromMaybe "???" $ tagName node
, "> tag"
]
hasVar node expr = maybe (return $ errorMessage $
concat [
"expression error: can't find \""
, T.unpack expr
, "\" in JSON object (<"
, T.unpack $ fromMaybe "???" $ tagName node
, "> tag)"
])
(runReaderT m)
(findExpr expr v)
--------------------------------------------------------------------------
genericBindings :: JsonMonad n n (Splices (Splice n))
genericBindings = ask >>= \v -> return $ do
"with" ## varAttrTag v explodeTag
"snippet" ## varAttrTag v snippetTag
"value" ## varAttrTag v valueTag
--------------------------------------------------------------------------
goObject obj = do
start <- genericBindings
let bindings = Map.foldlWithKey' bindKvp start obj
lift $ runChildrenWith bindings
--------------------------------------------------------------------------
bindKvp bindings k v =
let newBindings = do
T.append "with:" k ## withValue v explodeTag
T.append "snippet:" k ## withValue v snippetTag
T.append "value:" k ## withValue v valueTag
in bindings >> newBindings
| snapframework/heist | src/Heist/Splices/Json.hs | bsd-3-clause | 8,551 | 0 | 16 | 2,592 | 1,727 | 900 | 827 | 126 | 6 |
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2012 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module Bio.Web.MicroTV4.Types
where
import Data.ByteString (ByteString)
data GeneInfo = GI {
ensgID :: !ByteString
,geneName :: !ByteString
,refSeqID :: ![ByteString]
,description :: !ByteString
,keggPathways :: !(Maybe [ByteString])
,chromosome :: !ByteString -- X,1,2,3,4 ...
} deriving (Show)
data MiRNA_impl = RI_impl {
miID :: !ByteString
,miAccession :: !ByteString
,related :: !(Maybe ByteString)
,miSeq :: !ByteString
} deriving (Show)
data MiRNAInfo = RI {
miRBaseID :: !ByteString
,miRBaseAccession :: !ByteString
,relatedNames :: !(Maybe ByteString)
,miRNASeq :: !ByteString
,relatedDiseases :: !(Maybe [Disease])
} deriving (Show)
data Disease = Di {
diseaseName :: !ByteString
,references :: ![ByteString] -- pubmed id
} deriving (Show)
data BindingSite = BS {
bindingType :: !ByteString
,posAtUTR3' :: !(Int,Int)
,bindSeq :: !ByteString
,seedPosAtUTR3' :: !(Int,Int)
,seedSeq :: !ByteString
,score :: {-# UNPACK #-} !Double
,conservation :: {-# UNPACK #-} !Int
,posOnChromosome :: !PosOnCh
} deriving (Show)
data PosOnCh = P {
chrome :: !ByteString
,beg :: ![Int]
,end :: ![Int]
} deriving (Show)
data CoP = CoP !Bool
!Bool
!Bool
deriving (Show)
data MR_impl = MR {
ensg :: !ByteString
,name :: !ByteString
,mi :: !ByteString
,miT :: {-# UNPACK #-} !Double
,sig :: {-# UNPACK #-} !Double
,pre :: {-# UNPACK #-} !Double
,bs :: ![BindingSite]
} deriving (Show)
data MicroTRecord = MTR {
geneInfo :: GeneInfo
,miRNAInfo :: MiRNAInfo
-- ensgID :: !ByteString
-- ,geneName :: !ByteString
-- ,miRNA :: !ByteString
,miTGScore :: {-# UNPACK #-} !Double
,signalNoiseRatio :: {-# UNPACK #-} !Double
,precision :: {-# UNPACK #-} !Double
,coPredictedInfo :: !CoP
,bindingSites :: ![BindingSite]
} deriving Show
| tangboyun/bio-web-api | Bio/Web/MicroTV4/Types.hs | bsd-3-clause | 2,362 | 0 | 12 | 606 | 542 | 322 | 220 | 134 | 0 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedLabels #-}
module YX.Type.RuntimeConfig
where
import Prelude (($!))
import Control.Applicative (pure)
import Control.Exception (bracketOnError)
import Control.Monad ((>>=), when)
import Data.Bool (Bool(False, True))
import Data.Function (($))
import Data.IORef (IORef, atomicModifyIORef', newIORef, readIORef)
import Data.Maybe (Maybe(Just, Nothing))
import Data.Version (Version)
import GHC.Generics (Generic)
import System.IO (FilePath, IO)
import Data.Default (def)
import Data.OverloadedRecords ((:::), R, get)
import Data.OverloadedRecords.TH (overloadedRecord)
import qualified Database.SQLite.Simple as SQLite (close, open)
import YX.Type.DbConnection (DbConnection(DbConnection))
import YX.Type.ConfigFile (EnvironmentName, ProjectConfig)
import qualified Paths_yx as YX (version)
data RuntimeConfig = RuntimeConfig
{ _yxVersion :: Version
, _yxInvokedAs :: FilePath
, _yxExe :: FilePath
, _projectConfig :: Maybe ProjectConfig
, _currentEnvironment :: Maybe EnvironmentName
, _dbConnection :: !(IO DbConnection)
, _closeDbConnection :: !(IO ())
}
deriving (Generic)
overloadedRecord def ''RuntimeConfig
type MkRuntimeConfigParams =
'[ "database" ::: FilePath
, "yxInvokedAs" ::: FilePath
, "yxExe" ::: FilePath
, "projectConfig" ::: Maybe ProjectConfig
, "currentEnvironment" ::: Maybe EnvironmentName
]
mkRuntimeConfig
:: R MkRuntimeConfigParams opts
=> opts
-> IO RuntimeConfig
mkRuntimeConfig opts = do
dbConnRef <- newIORef Nothing
pure $! mkRuntimeConfig'
(mkGetDbConnection (get #database opts) dbConnRef)
(mkCloseDbConnection dbConnRef)
where
mkRuntimeConfig' getConn closeConn = RuntimeConfig
{ _yxVersion = YX.version
, _yxInvokedAs = get #yxInvokedAs opts
, _yxExe = get #yxExe opts
, _projectConfig = get #projectConfig opts
, _currentEnvironment = get #currentEnvironment opts
, _dbConnection = getConn
, _closeDbConnection = closeConn
}
mkGetDbConnection
:: FilePath
-> IORef (Maybe DbConnection)
-> IO DbConnection
mkGetDbConnection !db !dbConnRef = readIORef dbConnRef >>= \case
Just conn -> pure conn
Nothing -> SQLite.open db `bracketOnError` SQLite.close $ \conn -> do
-- We can not use 'bracket', that would close the connection right
-- away and we need to keep it open as long as its needed.
(isConflict, conn') <- atomicModifyIORef' dbConnRef $ \case
existing@(Just conn') -> (existing, (True, conn'))
-- Other thread has allocated the connection and updated
-- the IORef first.
Nothing ->
let conn' = DbConnection $! conn
in (Just $! conn', (False, conn'))
-- Conflict means that we have opened the database simultaneously
-- with another thread, and the other thread has won the race for
-- publishing it via IORef. Hence the cleanup.
when isConflict $ SQLite.close conn
pure conn'
mkCloseDbConnection :: IORef (Maybe DbConnection) -> IO ()
mkCloseDbConnection !dbConnRef = do
conn <- atomicModifyIORef' dbConnRef $ \case
Nothing -> (Nothing, Nothing)
c@(Just _) -> (Nothing, c)
case conn of
Nothing -> pure ()
Just (DbConnection c) -> SQLite.close c
withDbConnection
:: R '["dbConnection" ::: IO DbConnection] r
=> r
-> (DbConnection -> IO a)
-> IO a
withDbConnection r = (get #dbConnection r >>=)
| trskop/yx | src/YX/Type/RuntimeConfig.hs | bsd-3-clause | 4,033 | 5 | 28 | 988 | 929 | 521 | 408 | 96 | 5 |
{-# LANGUAGE LambdaCase #-}
module Main where
import Control.Monad ( filterM, foldM, join, unless )
import Control.Monad.IO.Class (liftIO)
import Data.List ( isPrefixOf, intercalate, sort )
import Data.Maybe ( fromMaybe )
import System.Directory ( getCurrentDirectory, findExecutable, listDirectory
, doesDirectoryExist )
import System.Environment ( lookupEnv )
import System.Exit ( ExitCode(ExitSuccess), exitFailure )
import System.FilePath ( (</>), pathSeparator, searchPathSeparator
, takeDirectory, takeFileName, isAbsolute )
import System.FilePath.Find ( always, find, extension, (==?) )
import System.IO ( hPutStrLn, stderr )
import System.Process ( readCreateProcessWithExitCode
, shell, CreateProcess(..) )
import Test.Tasty
( defaultMain, localOption, testGroup, mkTimeout, TestTree )
import Test.Tasty.HUnit ( testCase, (@=?), assertBool )
import Test.Tasty.ExpectedFailure ( ignoreTest )
-- | Reads from DISABLED_TESTS env var or disabled_tests.txt file
-- (preference is given to the former) and returns the list of tests.
-- Shell-style comments are removed, and test names are assumed to be
-- a single word without whitespace. The input can separate testnames
-- with any type of whitespace (space, tab, newline).
--
-- Returns the list of disabled test names.
getDisabledTestList :: FilePath -> IO [String]
getDisabledTestList testdir = do
let dtfile = testdir </> "disabled_tests.txt"
dset <- lookupEnv "DISABLED_TESTS" >>= \case
Just d -> return d
Nothing -> readFile dtfile
let removeComment = takeWhile ((/=) '#')
stripWhitespace = words
processInput = join . map (stripWhitespace . removeComment) . lines
return $ processInput dset
-- | Gets the list of tests (subdirectories) to run given the base
-- directory and the list of disabled tests
getTestList :: FilePath -> [String] -> IO [String]
getTestList testdir disabled = do
f <- listDirectory testdir
let isTest = filter (isPrefixOf "test")
notDisabled = filter (not . flip elem disabled)
inTestdir = fmap (testdir </>)
filterM doesDirectoryExist $ inTestdir $ notDisabled $ isTest $ sort f
-- ----------------------------------------------------------------------=
-- * Environment variable handling.
--
-- Start with an initial set of variables and an asociated value (or
-- set of values with a separator), then override/update with any
-- environment variables set.
data EnvVarSpec = EV String String
-- ^ single string value
| EVp String Char [String]
-- ^ accumulative path with separator
updEnvVars :: String -> String -> [EnvVarSpec] -> [EnvVarSpec]
updEnvVars n v [] = [EV n v | v /= ""]
updEnvVars n v (EV n' v' : evs) | n == n' = EV n (if v == "" then v' else v) : evs
updEnvVars n v (EVp n' s v' : evs) | n == n' = EVp n s (v' <> [v]) : evs
updEnvVars n v (ev : evs) = ev : updEnvVars n v evs
envVarAssocList :: [EnvVarSpec] -> [(String, String)]
envVarAssocList = map envVarAssoc
where
envVarAssoc (EV n v) = (n, v)
envVarAssoc (EVp n s vs) = (n, intercalate [s] vs)
-- ----------------------------------------------------------------------
-- * Test Parameters
--
-- Determine all Environment Variables that should be set for the
-- tests, using the internal defaults plus overrides from the current
-- environment:
-- - SAW = saw invocation command (with jars specification)
-- - JAVA = path to java runtime
-- - HOME = directory to act as home when running tests
-- - PATH = PATH searchlist, supplemented with discovered elements
-- - JAVA_HOME = path to java installation
-- - TESTBASE = path to intTests directory
-- - SAW_JDK_JAR = path to rt.jar
--
-- These environment variables may already be set to supply default
-- locations for these components.
--
-- Also determine the list of JAR files to pass to the various tests:
-- - The rt.jar runtime library from the JAVA installation
-- - Any jars supplied with the jvm-verifier
-- - Any jars supplied by saw
--
-- Note that even if SAW is specified in the environment, this test runner will
-- augment those definitions with the discovered jar files and target path
-- specifications.
-- | Returns the environment variable assocList to use for running
-- each individual test
testParams :: FilePath -> (String -> IO ()) -> IO [(String, String)]
testParams intTestBase verbose = do
here <- getCurrentDirectory
let absTestBase = if isAbsolute intTestBase then intTestBase
else here </> intTestBase
-- try to determine where the saw binary is in case there are other
-- executables there (e.g. z3, etc.)
sawExe <- findExecutable "saw" >>= \case
Just e -> return e
_ -> return "" -- may be supplied via env var
verbose $ "Found saw: " <> sawExe
let eVars0 = [ EV "HOME" absTestBase
, EVp "PATH" searchPathSeparator [takeDirectory sawExe]
, EV "TESTBASE" absTestBase
, EV "DIRSEP" [pathSeparator]
, EV "CPSEP" [searchPathSeparator]
-- The eval is used to protect the evaluation of the
-- single-quoted arguments supplied below when run in a
-- bash test.sh script.
, EVp "SAW" ' ' ["eval", "saw"]
]
addEnvVar evs e = do v <- lookupEnv e
return $ updEnvVars e (fromMaybe "" v) evs
-- override eVars0 with any environment variables set in this process
e1 <- foldM addEnvVar eVars0 [ "SAW", "PATH", "JAVAC", "JAVA_HOME", "SAW_JDK_JAR" ]
-- Create a pathlist of jars for invoking saw
let jarsDir = absTestBase </> "jars"
let findJarsIn p = doesDirectoryExist p >>= \case
True -> find always (extension ==? ".jar") p
False -> return []
verbose $ "Finding JARs in " <> jarsDir
jars <- intercalate [searchPathSeparator] <$> findJarsIn jarsDir
-- Set the SAW env var for the testing scripts to invoke saw with the JAR
-- list, again allowing ENV override.
let e3 = updEnvVars "SAW" (unwords [ "-j", "'" <> jars <> "'" ]) e1
return $ envVarAssocList e3
-- ----------------------------------------------------------------------
-- | Generate a HUnit test for each test, executed by running the
-- test.sh file in that test directory. The first argument is the set
-- of environment variables to set when running the tests and the
-- second is the list of disabled tests.
genTests :: [(String,String)] -> [String] -> [String] -> [TestTree]
genTests envvars disabled = map mkTest
where
preTest n = if takeFileName n `elem` disabled then ignoreTest else id
mkTest n = preTest n $ testCase (takeFileName n) $ do
let cmd = (shell "bash test.sh") { cwd = Just n, env = Just envvars }
(r,o,e) <- liftIO $ readCreateProcessWithExitCode cmd ""
if r == ExitSuccess
then return ()
else putStrLn o >> hPutStrLn stderr e
r @=? ExitSuccess
-- | Several of the tests use definitions from the cryptol-specs
-- repository, which should be present in deps/cryptol-specs.
-- Verify the existence of that repository if any of the tools here
-- need it.
check_cryptol_specs :: String -> [String] -> [String] -> TestTree
check_cryptol_specs testPath disabled tests = testCase "cryptol-specs Available" $
let need_cryptol_spec = any (\t -> let tp = (testPath </> t)
in tp `elem` tests && not (t `elem` disabled))
[ "test0001", "test0002"
, "test0006", "test0006_w4"
, "test0035_aes_consistent"
, "test_w4"
, "test_examples"
]
cspec_dir = takeDirectory testPath </> "deps" </> "cryptol-specs"
in if need_cryptol_spec
then do have_cs <- liftIO $ doesDirectoryExist $ cspec_dir </> "Primitive"
unless (have_cs) $ liftIO $ do
hPutStrLn stderr "Tests require cryptol-specs as a checked-out subrepo:"
hPutStrLn stderr " $ git submodule update --init deps/cryptol-specs"
assertBool "Missing cryptol-specs" have_cs
else return ()
main :: IO ()
main = do
let base = "intTests"
-- Run tests with VERBOSE=y environment variable for extra output.
verbose <- lookupEnv "VERBOSE" >>= \case
Just "y" -> return $ putStrLn
_ -> return $ const (return ())
found <- doesDirectoryExist base
unless found $ do
curwd <- getCurrentDirectory
hPutStrLn stderr $ "FAILURE: cannot find test directory " <> base <> " from " <> curwd
exitFailure
dset <- getDisabledTestList base
verbose $ "Disabled tests: " <> show dset
testdirs' <- getTestList base [] -- no filtering here; they will be ignoreTest'd by genTests
testdirs <- fromMaybe testdirs' .
(fmap (\et -> let path_ets = fmap (base </>) $ words et in
filter (flip elem path_ets) testdirs')) <$>
lookupEnv "ENABLED_TESTS"
envVars <- testParams base verbose
verbose $ "ENV: " <> show envVars
defaultMain $
localOption (mkTimeout $ 500 * 1000 * 1000) $ -- 500 second timeout in usecs
testGroup "intTests" $
check_cryptol_specs base dset testdirs : (genTests envVars dset) testdirs
| GaloisInc/saw-script | intTests/IntegrationTest.hs | bsd-3-clause | 9,405 | 0 | 20 | 2,343 | 2,019 | 1,067 | 952 | 125 | 4 |
{- Directive to allow Text and String to be mixed -}
{-# LANGUAGE OverloadedStrings #-}
module Dropbox.Types ( DropboxSession (..)
, AccountInfo
, Quota
, Metadata
, Content
, SessionId
) where
import Web.Authenticate.OAuth (Credential, OAuth)
import Data.Aeson ((.:), (.:?), decode, eitherDecode, FromJSON(..), Value(..))
import Control.Applicative ((<$>), (<*>))
import Data.Attoparsec.Number (Number(..))
import Dropbox.Types.AccountInfo
import Dropbox.Types.Quota
import Dropbox.Types.Content
import Dropbox.Types.Metadata
type SessionId = String
data DropboxSession = DropboxSession { getSessionId :: SessionId
, getTemporaryToken :: Credential
, accessToken :: Maybe Credential
, getOAuth :: OAuth
, getAuthorizationUrl :: String
} deriving (Show, Read) | tinkhaven/haskell-dropbox-api | Dropbox/Types.hs | bsd-3-clause | 1,128 | 0 | 9 | 460 | 198 | 131 | 67 | 22 | 0 |
module Main where
import Lib
import VCard
main :: IO ()
main = someFunc
| illis/hbook | app/Main.hs | bsd-3-clause | 74 | 0 | 6 | 16 | 25 | 15 | 10 | 5 | 1 |
{-# LANGUAGE RecordWildCards, NamedFieldPuns, OverloadedStrings #-}
module Network.OAuth.Http.BrowserClient where
import Network.OAuth.Http.HttpClient
import Network.OAuth.Http.Request
import Network.OAuth.Http.Response
import qualified Network.HTTP as B
import Data.ByteString.Lazy.Char8 hiding (map, empty, head, drop, intercalate)
import Data.List (intercalate)
import Control.Arrow
import Network.URI
import Network.CGI
import Data.Maybe
import Control.Monad.Trans
import Data.List.Split
import Control.Applicative hiding (empty)
data BrowserClient = BrowserClient
mtdDic :: [(Method, B.RequestMethod)]
mtdDic = [(GET, B.GET), (POST, B.POST), (PUT, B.PUT), (DELETE, B.DELETE), (TRACE, B.TRACE), (CONNECT, B.CONNECT), (HEAD, B.HEAD)]
toOAReq :: B.Request ByteString -> Request
toOAReq B.Request{..} = ReqHttp{ version = Http11, ..}
where
ssl = uriScheme rqURI == "https"
method = fromMaybe GET $ lookup rqMethod $ map (snd&&&fst) mtdDic
pathComps = splitOn "/" $ uriPath rqURI
qString = fromList $ formDecode $ drop 1 $ uriQuery rqURI
reqHeaders = fromList $ map (show . B.hdrName &&& B.hdrValue) rqHeaders
reqPayload = rqBody
host = fromJust (uriRegName <$> uriAuthority rqURI)
port = fromJust (readWith 80 . drop 1 . uriPort <$> uriAuthority rqURI)
readWith :: Read a => a -> String -> a
readWith def = fst . head .(++[(def, "")]) . reads
fromOAReq :: Request -> B.Request ByteString
fromOAReq ReqHttp{..} = B.Request { rqURI, rqMethod, rqHeaders, rqBody = reqPayload }
where
rqMethod = fromMaybe (B.Custom "GET") $ lookup method mtdDic
rqURI :: URI
rqURI = let protocol = if ssl then "https//:" else "http://"
uriQuery = if (rqMethod /= B.GET || qString == empty)
then ""
else '?':formEncode (toList qString)
in fromJust $ parseURI $ protocol ++ host ++ ':':show port ++ intercalate "/" pathComps ++ uriQuery
rqHeaders = map (first B.HdrCustom >>> uncurry B.mkHeader) $ toList reqHeaders
rqBody = reqPayload
toOARsp :: B.Response ByteString -> Response
toOARsp B.Response{rspCode = (a,b,c), rspHeaders=hdrs, ..} = RspHttp{..}
where
status = a*100+b*10+c
reason = rspReason
rspHeaders = fromList $ map toPair hdrs
toPair :: B.Header -> (String, String)
toPair (B.Header name val) = (show name, val)
rspPayload = rspBody
instance HttpClient BrowserClient where
runClient _ req = liftIO $ do
either (Left . show) (Right . toOARsp) <$> B.simpleHTTP (fromOAReq req)
| konn/libkonn | src/Network/OAuth/Http/BrowserClient.hs | bsd-3-clause | 2,586 | 1 | 16 | 550 | 908 | 500 | 408 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
-- | We can't lens into DOM on the server-side, yet, so we've made some auxilliary functions
-- for creating @row_@ @<div>@ tags, and some class names for creating columns.
-- .
-- Use them like this:
-- .
-- > div_ [class_ $ mconcat [ row_
-- > , small_collapsed_
-- > , large_uncollapsed_ ]] $
-- > div_ [class_ $ mconcat [ columns_
-- > , small_ 6
-- > , medium_ 4
-- > , large_ 3
-- > , small_centered_
-- > , large_uncentered_
-- > , small_push_ 6
-- > , medium_pull_ 4
-- > , large_reset_order_ ]] $
-- > -- rest of DOM...
module Lucid.Foundation.Structure.Grid where
import Lucid.Base
import Lucid.Html5
import qualified Data.Text as T
import Data.Monoid
row_ :: T.Text
row_ = " row "
small_collapsed_ :: T.Text
small_collapsed_ = " small-collapsed "
medium_collapsed_ :: T.Text
medium_collapsed_ = " medium-collapsed "
large_collapsed_ :: T.Text
large_collapsed_ = " large-collapsed "
medium_uncollapsed_ :: T.Text
medium_uncollapsed_ = " medium-uncollapsed "
large_uncollapsed_ :: T.Text
large_uncollapsed_ = " large-uncollapsed "
columns_ :: T.Text
columns_ = " columns "
small_ :: Int -> T.Text
small_ n = " small-" <> (T.pack $ show n) <> " "
medium_ :: Int -> T.Text
medium_ n = " medium-" <> (T.pack $ show n) <> " "
large_ :: Int -> T.Text
large_ n = " large-" <> (T.pack $ show n) <> " "
small_offset_ :: Int -> T.Text
small_offset_ n = " small-offset-" <> (T.pack $ show n) <> " "
medium_offset_ :: Int -> T.Text
medium_offset_ n = " medium-offset-" <> (T.pack $ show n) <> " "
large_offset_ :: Int -> T.Text
large_offset_ n = " large-offset-" <> (T.pack $ show n) <> " "
small_centered_ :: T.Text
small_centered_ = " small-centered "
medium_centered_ :: T.Text
medium_centered_ = " medium-centered "
large_centered_ :: T.Text
large_centered_ = " large-centered "
small_uncentered_ :: T.Text
small_uncentered_ = " small-uncentered "
medium_uncentered_ :: T.Text
medium_uncentered_ = " medium-uncentered "
large_uncentered_ :: T.Text
large_uncentered_ = " large-uncentered "
small_push_ :: Int -> T.Text
small_push_ n = " small-push-" <> (T.pack $ show n) <> " "
medium_push_ :: Int -> T.Text
medium_push_ n = " medium-push-" <> (T.pack $ show n) <> " "
large_push_ :: Int -> T.Text
large_push_ n = " large-push-" <> (T.pack $ show n) <> " "
small_pull_ :: Int -> T.Text
small_pull_ n = " small-pull-" <> (T.pack $ show n) <> " "
medium_pull_ :: Int -> T.Text
medium_pull_ n = " medium-pull-" <> (T.pack $ show n) <> " "
large_pull_ :: Int -> T.Text
large_pull_ n = " large-pull-" <> (T.pack $ show n) <> " "
small_reset_order_ :: T.Text
small_reset_order_ = " small-reset-order "
medium_reset_order_ :: T.Text
medium_reset_order_ = " medium-reset-order "
large_reset_order_ :: T.Text
large_reset_order_ = " large-reset-order "
| athanclark/lucid-foundation | src/Lucid/Foundation/Structure/Grid.hs | bsd-3-clause | 3,125 | 0 | 9 | 784 | 711 | 393 | 318 | 63 | 1 |
{-# LANGUAGE GADTs #-}
-------------------------------------------------------------------------------
-- |
-- Module : Control.Monad.CC.Seq
-- Copyright : (c) R. Kent Dybvig, Simon L. Peyton Jones and Amr Sabry
-- License : MIT
--
-- Maintainer : Dan Doel
-- Stability : Experimental
-- Portability : Non-portable (generalized algebraic datatypes)
--
-- A monadic treatment of delimited continuations.
--
-- Adapted from the paper
-- /A Monadic Framework for Delimited Continuations/,
-- by R. Kent Dybvig, Simon Peyton Jones and Amr Sabry
-- (<http://www.cs.indiana.edu/~sabry/papers/monadicDC.pdf>)
--
-- This module implements the generalized sequence type used as a stack of
-- frames representation of the delimited continuations.
module Control.Monad.CC.Seq (
-- * Sequence datatype
Seq(..),
-- * Sub-sequences
SubSeq,
appendSubSeq,
pushSeq,
splitSeq,
inSeq,
) where
import Control.Monad.CC.Prompt
-- | This is a generalized sequence datatype, parameterized by three types:
-- seg : A constructor for segments of the sequence.
--
-- ans : the type resulting from applying all the segments of the sequence.
-- Also used as a region parameter.
--
-- a : The type expected as input to the sequence of segments.
data Seq seg ans a where
EmptyS :: Seq seg ans ans
PushP :: Prompt ans a -> Seq seg ans a -> Seq seg ans a
PushSeg :: seg ans a b -> Seq seg ans b -> Seq seg ans a
-- | A type representing a sub-sequence, which may be appended to a sequence
-- of appropriate type. It represents a sequence that takes values of type
-- a to values of type b, and may be pushed onto a sequence that takes values
-- of type b to values of type ans.
type SubSeq seg ans a b = Seq seg ans b -> Seq seg ans a
-- | The null sub-sequence
emptySubSeq :: SubSeq seg ans a a
emptySubSeq = id
-- | Concatenate two subsequences
appendSubSeq :: SubSeq seg ans a b -> SubSeq seg ans b c -> SubSeq seg ans a c
appendSubSeq = (.)
-- | Push a sub-sequence onto the front of a sequence
pushSeq :: SubSeq seg ans a b -> Seq seg ans b -> Seq seg ans a
pushSeq = ($)
-- | Splits a sequence at the given prompt into a sub-sequence, and
-- the rest of the sequence
splitSeq :: Prompt ans b -> Seq seg ans a -> (SubSeq seg ans a b, Seq seg ans b)
splitSeq _ EmptyS = error "Prompt was not found on the stack."
splitSeq p (PushP p' sk) =
case eqPrompt p' p of
EQU -> (emptySubSeq, sk)
NEQ -> case splitSeq p sk of
(subk, sk') -> (appendSubSeq (PushP p') subk, sk')
splitSeq p (PushSeg seg sk) =
case splitSeq p sk of
(subk, sk') -> (appendSubSeq (PushSeg seg) subk, sk')
inSeq :: Prompt ans b -> Seq seg ans a -> Bool
inSeq _ EmptyS = False
inSeq p (PushP p' sk) =
case eqPrompt p' p of
EQU -> True
NEQ -> inSeq p sk
inSeq p (PushSeg _ sk) = inSeq p sk
| vito/hummus | src/Control/Monad/CC/Seq.hs | bsd-3-clause | 2,930 | 0 | 14 | 713 | 601 | 331 | 270 | 37 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Codegen.TigerSymbolTable where
import Tiger.TigerLanguage
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.Type as Type
import qualified LLVM.General.AST.Constant as Constant
import Control.Lens
import qualified Data.Map as Map
import Data.Map.Lens
-- Type table: Assuming there is only one scope of types
-- report error if one type shadows the other
type TypeTable = [Map.Map Symbol Type.Type]
-- symbol table
type SymbolTable = [Map.Map Symbol AST.Operand]
lookupST :: SymbolTable -> Symbol -> AST.Operand
lookupST st s = case st of
[] -> error $ "Local variable not in scope: " ++ show s
otherwise -> case (head st)^.at s of
Just x -> x
Nothing -> lookupST (tail st) s
-- Name Table
type Names = Map.Map Symbol Int
emptyNames :: Names
emptyNames = Map.empty
-- TODO: add more base types.
baseVarType = [("int", Type.i32)]
baseFunType = []
emptySymbolTable :: SymbolTable
emptySymbolTable = [Map.fromList [("str", AST.ConstantOperand $ Constant.GlobalReference (Type.ptr Type.i8) (AST.Name "str"))]]
initTypeTable :: TypeTable
initTypeTable = [Map.fromList [("int", Type.i32)]]
-- Function Table
type FuncTable = [Map.Map Symbol Type.Type]
| lialan/TigerCompiler | app/Codegen/TigerSymbolTable.hs | bsd-3-clause | 1,262 | 0 | 13 | 224 | 352 | 203 | 149 | 27 | 3 |
{-# LANGUAGE QuasiQuotes #-}
module Init
( Init.createInstance
, Init.createDevice
, PhysicalDeviceInfo(..)
, createVMA
, createCommandPools
) where
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.Trans.Maybe ( MaybeT(..) )
import Control.Monad.Trans.Resource
import qualified Data.ByteString as BS
import Data.Foldable ( for_
, traverse_
)
import qualified Data.Vector as V
import Data.Vector ( Vector )
import Data.Word
import GHC.IO.Exception ( IOErrorType(NoSuchThing)
, IOException(IOError)
)
import HasVulkan
import MonadVulkan ( Queues(..)
, RTInfo(..)
, checkCommands
)
import qualified SDL.Video as SDL
import qualified SDL.Video.Vulkan as SDL
import Say
import UnliftIO.Exception
import Vulkan.CStruct.Extends
import Vulkan.Core10 as Vk
hiding ( withBuffer
, withImage
)
import Vulkan.Core11 ( pattern API_VERSION_1_1 )
import Vulkan.Core12.Promoted_From_VK_KHR_buffer_device_address
import Vulkan.Core12.Promoted_From_VK_KHR_timeline_semaphore
( PhysicalDeviceTimelineSemaphoreFeatures(..)
)
import Vulkan.Dynamic ( DeviceCmds
( DeviceCmds
, pVkGetDeviceProcAddr
)
, InstanceCmds
( InstanceCmds
, pVkGetInstanceProcAddr
)
)
import Vulkan.Extensions.VK_KHR_acceleration_structure
import Vulkan.Extensions.VK_KHR_get_physical_device_properties2
import Vulkan.Extensions.VK_KHR_ray_tracing_pipeline
import Vulkan.Extensions.VK_KHR_surface
import Vulkan.Requirement
import Vulkan.Utils.Initialization
import Vulkan.Utils.QueueAssignment
import Vulkan.Utils.Requirements
import Vulkan.Utils.Requirements.TH ( reqs )
import Vulkan.Zero
import VulkanMemoryAllocator ( Allocator
, AllocatorCreateFlagBits(..)
, AllocatorCreateInfo(..)
, VulkanFunctions(..)
, vkGetInstanceProcAddr
, withAllocator
)
import Window
import Foreign.Ptr (castFunPtr)
myApiVersion :: Word32
myApiVersion = API_VERSION_1_1
----------------------------------------------------------------
-- Instance Creation
----------------------------------------------------------------
-- | Create an instance with a debug messenger
createInstance :: MonadResource m => SDL.Window -> m Instance
createInstance win = do
windowExtensions <-
liftIO $ traverse BS.packCString =<< SDL.vkGetInstanceExtensions win
let createInfo = zero
{ applicationInfo = Just zero { applicationName = Nothing
, apiVersion = myApiVersion
}
}
requirements =
(\n -> RequireInstanceExtension Nothing n minBound)
<$> ( KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME
: windowExtensions
)
createInstanceFromRequirements requirements [] createInfo
----------------------------------------------------------------
-- Device creation
----------------------------------------------------------------
-- TODO: check VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress.
createDevice
:: forall m
. (MonadResource m)
=> Instance
-> SDL.Window
-> m
( PhysicalDevice
, PhysicalDeviceInfo
, Device
, Queues (QueueFamilyIndex, Queue)
, SurfaceKHR
)
createDevice inst win = do
(_ , surf) <- createSurface inst win
((pdi, SomeStruct dci), phys) <-
maybe (noSuchThing "Unable to find appropriate PhysicalDevice") pure
=<< pickPhysicalDevice inst (physicalDeviceInfo surf) (pdiScore . fst)
sayErr . ("Using device: " <>) =<< physicalDeviceName phys
(_, dev) <- withDevice phys dci Nothing allocate
requireCommands inst dev
queues <- liftIO $ pdiGetQueues pdi dev
pure (phys, pdi, dev, queues, surf)
deviceRequirements :: [DeviceRequirement]
deviceRequirements = [reqs|
VK_KHR_swapchain
VK_KHR_timeline_semaphore
PhysicalDeviceTimelineSemaphoreFeatures.timelineSemaphore
-- Ray tracing
1.2.162
PhysicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline
PhysicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure
PhysicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress
VK_KHR_ray_tracing_pipeline
VK_KHR_acceleration_structure
VK_EXT_descriptor_indexing
VK_KHR_buffer_device_address
VK_KHR_deferred_host_operations
VK_KHR_get_memory_requirements2
VK_KHR_maintenance3
VK_KHR_pipeline_library
|]
----------------------------------------------------------------
-- Physical device tools
----------------------------------------------------------------
data PhysicalDeviceInfo = PhysicalDeviceInfo
{ pdiTotalMemory :: Word64
, pdiRTInfo :: RTInfo
-- ^ The relevant information from PhysicalDeviceProperties2KHR
, pdiQueueCreateInfos :: Vector (DeviceQueueCreateInfo '[])
, pdiGetQueues :: Device -> IO (Queues (QueueFamilyIndex, Queue))
}
pdiScore :: PhysicalDeviceInfo -> Word64
pdiScore = pdiTotalMemory
physicalDeviceInfo
:: MonadIO m
=> SurfaceKHR
-> PhysicalDevice
-> m (Maybe (PhysicalDeviceInfo, SomeStruct DeviceCreateInfo))
physicalDeviceInfo surf phys = runMaybeT $ do
--
-- Check device requirements
--
(mbDCI, rs, os) <- checkDeviceRequirements deviceRequirements [] phys zero
-- Report any missing features
traverse_ sayErrString (requirementReport rs os)
-- Fail if we didn't meet requirements
SomeStruct dciNoQueues <- maybe empty pure mbDCI
--
-- Assign queues
--
(pdiQueueCreateInfos, pdiGetQueues) <- MaybeT
$ assignQueues phys (queueRequirements phys surf)
let dci =
dciNoQueues { queueCreateInfos = SomeStruct <$> pdiQueueCreateInfos }
--
-- Query properties
--
pdiRTInfo <- getDeviceRTProps phys
--
-- We'll use the amount of memory to pick the "best" device
--
pdiTotalMemory <- do
heaps <- memoryHeaps <$> getPhysicalDeviceMemoryProperties phys
pure $ sum ((size :: MemoryHeap -> DeviceSize) <$> heaps)
pure (PhysicalDeviceInfo { .. }, SomeStruct dci)
-- | Requirements for a 'Queue' which has graphics suppor and can present to
-- the specified surface.
queueRequirements
:: MonadIO m => PhysicalDevice -> SurfaceKHR -> Queues (QueueSpec m)
queueRequirements phys surf = Queues (QueueSpec 1 isGraphicsPresentQueue)
where
isGraphicsPresentQueue queueFamilyIndex queueFamilyProperties =
pure (isGraphicsQueueFamily queueFamilyProperties)
<&&> isPresentQueueFamily phys surf queueFamilyIndex
----------------------------------------------------------------
-- Physical device tools
----------------------------------------------------------------
getDeviceRTProps :: MonadIO m => PhysicalDevice -> m RTInfo
getDeviceRTProps phys = do
props <- getPhysicalDeviceProperties2KHR phys
let _ ::& PhysicalDeviceRayTracingPipelinePropertiesKHR {..} :& () = props
pure RTInfo { rtiShaderGroupHandleSize = shaderGroupHandleSize
, rtiShaderGroupBaseAlignment = shaderGroupBaseAlignment
}
----------------------------------------------------------------
-- VulkanMemoryAllocator
----------------------------------------------------------------
createVMA
:: MonadResource m => Instance -> PhysicalDevice -> Device -> m Allocator
createVMA inst phys dev =
snd
<$> withAllocator
zero
{ flags = ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT
, physicalDevice = physicalDeviceHandle phys
, device = deviceHandle dev
, instance' = instanceHandle inst
, vulkanApiVersion = myApiVersion
, vulkanFunctions = Just $ case inst of
Instance _ InstanceCmds {..} -> case dev of
Device _ DeviceCmds {..} -> zero
{ vkGetInstanceProcAddr = castFunPtr pVkGetInstanceProcAddr
, vkGetDeviceProcAddr = castFunPtr pVkGetDeviceProcAddr
}
}
allocate
----------------------------------------------------------------
-- Command pools
----------------------------------------------------------------
-- | Create several command pools for a queue family
createCommandPools
:: MonadResource m
=> Device
-> Int
-- ^ Number of pools to create
-> QueueFamilyIndex
-- ^ Queue family for the pools
-> m (Vector CommandPool)
createCommandPools dev n (QueueFamilyIndex queueFamilyIndex) = do
let commandPoolCreateInfo :: CommandPoolCreateInfo
commandPoolCreateInfo = zero { queueFamilyIndex = queueFamilyIndex }
V.replicateM
n
( snd
<$> withCommandPool dev
commandPoolCreateInfo
noAllocationCallbacks
allocate
)
----------------------------------------------------------------
-- Utils
----------------------------------------------------------------
requireCommands :: MonadIO f => Instance -> Device -> f ()
requireCommands inst dev = case checkCommands inst dev of
[] -> pure ()
xs -> do
for_ xs $ \n -> sayErr ("Failed to load function pointer for: " <> n)
noSuchThing "Missing commands"
noSuchThing :: MonadIO m => String -> m a
noSuchThing message =
liftIO . throwIO $ IOError Nothing NoSuchThing "" message Nothing Nothing
(<&&>) :: Applicative f => f Bool -> f Bool -> f Bool
(<&&>) = liftA2 (&&)
| expipiplus1/vulkan | examples/rays/Init.hs | bsd-3-clause | 11,010 | 0 | 18 | 3,551 | 1,730 | 959 | 771 | -1 | -1 |
{-# OPTIONS #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Python.Version2.Parser
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : bjpop@csse.unimelb.edu.au
-- Stability : experimental
-- Portability : ghc
--
-- A parser for Python version 2.x programs. Parsers are provided for
-- modules, statements, and expressions. The parsers produce comment tokens
-- in addition to the abstract syntax tree.
--
-- See:
--
-- * <http://www.python.org/doc/2.6/reference/index.html> for an overview of the language.
--
-- * <http://www.python.org/doc/2.6/reference/grammar.html> for the full grammar.
--
-- * <http://www.python.org/doc/2.6/reference/toplevel_components.html> for a description of
-- the various Python top-levels, which correspond to the parsers provided here.
-----------------------------------------------------------------------------
module Language.Python.Version2.Parser (
-- * Parsing modules
parseModule,
-- * Parsing statements
parseStmt,
-- * Parsing expressions
parseExpr) where
import Language.Python.Version2.Parser.Parser (parseFileInput, parseSingleInput, parseEval)
import Language.Python.Version2.Parser.Lexer (initStartCodeStack)
import Language.Python.Common.AST (ModuleSpan, StatementSpan, ExprSpan)
import Language.Python.Common.Token (Token)
import Language.Python.Common.SrcLocation (initialSrcLocation)
import Language.Python.Common.ParserMonad (execParser, execParserKeepComments, ParseError, initialState)
-- | Parse a whole Python source file. Return comments in addition to the parsed module.
parseModule :: String -- ^ The input stream (python module source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError (ModuleSpan, [Token]) -- ^ An error or the abstract syntax tree (AST) of the python module and comment tokens.
parseModule input srcName =
execParserKeepComments parseFileInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
-- | Parse one compound statement, or a sequence of simple statements. Generally used for interactive input, such as from the command line of an interpreter. Return comments in addition to the parsed statements.
parseStmt :: String -- ^ The input stream (python statement source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError ([StatementSpan], [Token]) -- ^ An error or maybe the abstract syntax tree (AST) of zero or more python statements, plus comments.
parseStmt input srcName =
execParserKeepComments parseSingleInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
-- | Parse an expression. Generally used as input for the \'eval\' primitive. Return comments in addition to the parsed expression.
parseExpr :: String -- ^ The input stream (python statement source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError (ExprSpan, [Token]) -- ^ An error or maybe the abstract syntax tree (AST) of the python expression, plus comment tokens.
parseExpr input srcName =
execParserKeepComments parseEval state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
| jml/language-python | src/Language/Python/Version2/Parser.hs | bsd-3-clause | 3,452 | 0 | 9 | 550 | 347 | 216 | 131 | 32 | 1 |
{-|
Module : Data.Filterable
Description : Generalization of filter function.
Copyright : (c) Anton Gushcha, 2015-2016
Oganyan Levon, 2016
License : BSD3
Maintainer : ncrashed@gmail.com
Stability : experimental
Portability : POSIX
Defines generic filter utilities for collections.
-}
module Data.Filterable(
Filterable(..)
, KeyHashMap(..)
, cutMaybes
) where
import Control.Monad (filterM)
import Data.Hashable
import Data.Maybe
import GHC.Exts
import qualified Data.Foldable as F
import qualified Data.HashMap.Strict as H
import qualified Data.Sequence as S
import qualified Data.Map.Strict as M
-- | Generic filter for collections
class Filterable f where
-- | Specific constraint for instance
type FilterConstraint f o :: Constraint
type FilterConstraint f o = ()
-- | Test collection for emptiness
fNull :: FilterConstraint f a => f a -> Bool
-- | Filter function for collection
fFilter :: FilterConstraint f a => (a -> Bool) -> f a -> f a
-- | Monad version of filter
fFilterM :: (FilterConstraint f a, Monad m) => (a -> m Bool) -> f a -> m (f a)
instance Filterable [] where
fNull = null
fFilter = filter
fFilterM = filterM
{-# INLINE fNull #-}
{-# INLINE fFilter #-}
{-# INLINE fFilterM #-}
instance Filterable S.Seq where
fNull = S.null
fFilter = S.filter
fFilterM p = F.foldlM (\xs x -> do
f <- p x
return $! if f then xs S.|> x else xs) S.empty
{-# INLINE fNull #-}
{-# INLINE fFilter #-}
{-# INLINE fFilterM #-}
-- | Wrapper around HashMap to Filterable instance over keys
newtype KeyHashMap v k = KeyHashMap { unKeyHashMap :: H.HashMap k v }
instance Filterable (KeyHashMap v) where
type FilterConstraint (KeyHashMap v) o = (Eq o, Hashable o)
fNull = H.null . unKeyHashMap
fFilter p (KeyHashMap m) = KeyHashMap $ H.filterWithKey (\k _ -> p k) m
fFilterM p (KeyHashMap m) = fmap KeyHashMap $ H.foldlWithKey' (\mxs k x -> do
xs <- mxs
f <- p k
return $! if f then H.insert k x xs else xs) (return H.empty) m
{-# INLINE fNull #-}
{-# INLINE fFilter #-}
{-# INLINE fFilterM #-}
instance (Eq k, Hashable k) => Filterable (H.HashMap k) where
fNull = H.null
fFilter = H.filter
fFilterM p = H.foldlWithKey' (\mxs k x -> do
xs <- mxs
f <- p x
return $! if f then H.insert k x xs else xs) (return H.empty)
{-# INLINE fNull #-}
{-# INLINE fFilter #-}
{-# INLINE fFilterM #-}
instance Ord k => Filterable (M.Map k) where
fNull = M.null
fFilter = M.filter
fFilterM p = M.foldlWithKey' (\mxs k x -> do
xs <- mxs
f <- p x
return $! if f then M.insert k x xs else xs) (return M.empty)
{-# INLINE fNull #-}
{-# INLINE fFilter #-}
{-# INLINE fFilterM #-}
-- | Throw away Nothings from collection
cutMaybes :: (Functor f, Filterable f, FilterConstraint f (Maybe a)) => f (Maybe a) -> f a
cutMaybes = fmap fromJust . fFilter isJust
| Teaspot-Studio/gore-and-ash | src/Data/Filterable.hs | bsd-3-clause | 2,902 | 0 | 15 | 660 | 860 | 463 | 397 | -1 | -1 |
class X where
x :: Int
<ESC>a{- |
Another Class
-}
| itchyny/vim-haskell-indent | test/comment/top_level_block_comment.in.hs | mit | 51 | 1 | 7 | 11 | 24 | 11 | 13 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.ElasticBeanstalk.DeleteConfigurationTemplate
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified configuration template.
--
-- When you launch an environment using a configuration template, the
-- environment gets a copy of the template. You can delete or modify the
-- environment\'s copy of the template without affecting the running
-- environment.
--
-- /See:/ <http://docs.aws.amazon.com/elasticbeanstalk/latest/api/API_DeleteConfigurationTemplate.html AWS API Reference> for DeleteConfigurationTemplate.
module Network.AWS.ElasticBeanstalk.DeleteConfigurationTemplate
(
-- * Creating a Request
deleteConfigurationTemplate
, DeleteConfigurationTemplate
-- * Request Lenses
, dctApplicationName
, dctTemplateName
-- * Destructuring the Response
, deleteConfigurationTemplateResponse
, DeleteConfigurationTemplateResponse
) where
import Network.AWS.ElasticBeanstalk.Types
import Network.AWS.ElasticBeanstalk.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | This documentation target is not reported in the API reference.
--
-- /See:/ 'deleteConfigurationTemplate' smart constructor.
data DeleteConfigurationTemplate = DeleteConfigurationTemplate'
{ _dctApplicationName :: !Text
, _dctTemplateName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteConfigurationTemplate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dctApplicationName'
--
-- * 'dctTemplateName'
deleteConfigurationTemplate
:: Text -- ^ 'dctApplicationName'
-> Text -- ^ 'dctTemplateName'
-> DeleteConfigurationTemplate
deleteConfigurationTemplate pApplicationName_ pTemplateName_ =
DeleteConfigurationTemplate'
{ _dctApplicationName = pApplicationName_
, _dctTemplateName = pTemplateName_
}
-- | The name of the application to delete the configuration template from.
dctApplicationName :: Lens' DeleteConfigurationTemplate Text
dctApplicationName = lens _dctApplicationName (\ s a -> s{_dctApplicationName = a});
-- | The name of the configuration template to delete.
dctTemplateName :: Lens' DeleteConfigurationTemplate Text
dctTemplateName = lens _dctTemplateName (\ s a -> s{_dctTemplateName = a});
instance AWSRequest DeleteConfigurationTemplate where
type Rs DeleteConfigurationTemplate =
DeleteConfigurationTemplateResponse
request = postQuery elasticBeanstalk
response
= receiveNull DeleteConfigurationTemplateResponse'
instance ToHeaders DeleteConfigurationTemplate where
toHeaders = const mempty
instance ToPath DeleteConfigurationTemplate where
toPath = const "/"
instance ToQuery DeleteConfigurationTemplate where
toQuery DeleteConfigurationTemplate'{..}
= mconcat
["Action" =:
("DeleteConfigurationTemplate" :: ByteString),
"Version" =: ("2010-12-01" :: ByteString),
"ApplicationName" =: _dctApplicationName,
"TemplateName" =: _dctTemplateName]
-- | /See:/ 'deleteConfigurationTemplateResponse' smart constructor.
data DeleteConfigurationTemplateResponse =
DeleteConfigurationTemplateResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteConfigurationTemplateResponse' with the minimum fields required to make a request.
--
deleteConfigurationTemplateResponse
:: DeleteConfigurationTemplateResponse
deleteConfigurationTemplateResponse = DeleteConfigurationTemplateResponse'
| fmapfmapfmap/amazonka | amazonka-elasticbeanstalk/gen/Network/AWS/ElasticBeanstalk/DeleteConfigurationTemplate.hs | mpl-2.0 | 4,310 | 0 | 9 | 772 | 445 | 272 | 173 | 65 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.AutoScaling.DeleteNotificationConfiguration
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified notification.
--
-- /See:/ <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_DeleteNotificationConfiguration.html AWS API Reference> for DeleteNotificationConfiguration.
module Network.AWS.AutoScaling.DeleteNotificationConfiguration
(
-- * Creating a Request
deleteNotificationConfiguration
, DeleteNotificationConfiguration
-- * Request Lenses
, dncAutoScalingGroupName
, dncTopicARN
-- * Destructuring the Response
, deleteNotificationConfigurationResponse
, DeleteNotificationConfigurationResponse
) where
import Network.AWS.AutoScaling.Types
import Network.AWS.AutoScaling.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteNotificationConfiguration' smart constructor.
data DeleteNotificationConfiguration = DeleteNotificationConfiguration'
{ _dncAutoScalingGroupName :: !Text
, _dncTopicARN :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteNotificationConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dncAutoScalingGroupName'
--
-- * 'dncTopicARN'
deleteNotificationConfiguration
:: Text -- ^ 'dncAutoScalingGroupName'
-> Text -- ^ 'dncTopicARN'
-> DeleteNotificationConfiguration
deleteNotificationConfiguration pAutoScalingGroupName_ pTopicARN_ =
DeleteNotificationConfiguration'
{ _dncAutoScalingGroupName = pAutoScalingGroupName_
, _dncTopicARN = pTopicARN_
}
-- | The name of the Auto Scaling group.
dncAutoScalingGroupName :: Lens' DeleteNotificationConfiguration Text
dncAutoScalingGroupName = lens _dncAutoScalingGroupName (\ s a -> s{_dncAutoScalingGroupName = a});
-- | The Amazon Resource Name (ARN) of the Amazon Simple Notification Service
-- (SNS) topic.
dncTopicARN :: Lens' DeleteNotificationConfiguration Text
dncTopicARN = lens _dncTopicARN (\ s a -> s{_dncTopicARN = a});
instance AWSRequest DeleteNotificationConfiguration
where
type Rs DeleteNotificationConfiguration =
DeleteNotificationConfigurationResponse
request = postQuery autoScaling
response
= receiveNull
DeleteNotificationConfigurationResponse'
instance ToHeaders DeleteNotificationConfiguration
where
toHeaders = const mempty
instance ToPath DeleteNotificationConfiguration where
toPath = const "/"
instance ToQuery DeleteNotificationConfiguration
where
toQuery DeleteNotificationConfiguration'{..}
= mconcat
["Action" =:
("DeleteNotificationConfiguration" :: ByteString),
"Version" =: ("2011-01-01" :: ByteString),
"AutoScalingGroupName" =: _dncAutoScalingGroupName,
"TopicARN" =: _dncTopicARN]
-- | /See:/ 'deleteNotificationConfigurationResponse' smart constructor.
data DeleteNotificationConfigurationResponse =
DeleteNotificationConfigurationResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteNotificationConfigurationResponse' with the minimum fields required to make a request.
--
deleteNotificationConfigurationResponse
:: DeleteNotificationConfigurationResponse
deleteNotificationConfigurationResponse =
DeleteNotificationConfigurationResponse'
| fmapfmapfmap/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/DeleteNotificationConfiguration.hs | mpl-2.0 | 4,189 | 0 | 9 | 777 | 439 | 266 | 173 | 67 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE NamedFieldPuns, RecordWildCards, BangPatterns,
StandaloneDeriving, GeneralizedNewtypeDeriving #-}
module Distribution.Server.Features.EditCabalFiles (
initEditCabalFilesFeature
, diffCabalRevisions
, Change(..)
) where
import Distribution.Server.Framework
import Distribution.Server.Framework.Templating
import Distribution.Server.Features.Users
import Distribution.Server.Features.Core
import Distribution.Server.Packages.Types
import Distribution.Server.Features.Upload
import Distribution.Package
import Distribution.Text (display)
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
(parsePackageDescription, sourceRepoFieldDescrs)
import Distribution.PackageDescription.Check
import Distribution.ParseUtils
( ParseResult(..), locatedErrorMsg, showPWarning )
import Distribution.Server.Util.Parse (unpackUTF8)
import Distribution.ParseUtils (FieldDescr(..))
import Distribution.Text (Text(..))
import Text.PrettyPrint as Doc
(nest, empty, isEmpty, (<+>), colon, (<>), text, vcat, ($+$), Doc)
import Text.StringTemplate (ToSElem(..))
import Data.List
import qualified Data.Char as Char
import Data.ByteString.Lazy (ByteString)
import qualified Data.Map as Map
import Control.Monad.Error (ErrorT, runErrorT)
import Control.Monad.Writer (MonadWriter(..), Writer, runWriter)
import Data.Time (getCurrentTime)
import qualified Data.ByteString.Lazy.Char8 as BS -- TODO: Verify that we don't need to worry about UTF8
-- | A feature to allow editing cabal files without uploading new tarballs.
--
initEditCabalFilesFeature :: ServerEnv
-> IO (UserFeature
-> CoreFeature
-> UploadFeature
-> IO HackageFeature)
initEditCabalFilesFeature env@ServerEnv{ serverTemplatesDir,
serverTemplatesMode } = do
-- Page templates
templates <- loadTemplates serverTemplatesMode
[serverTemplatesDir, serverTemplatesDir </> "EditCabalFile"]
["cabalFileEditPage.html", "cabalFilePublished.html"]
return $ \user core upload -> do
let feature = editCabalFilesFeature env templates user core upload
return feature
editCabalFilesFeature :: ServerEnv -> Templates
-> UserFeature -> CoreFeature -> UploadFeature
-> HackageFeature
editCabalFilesFeature _env templates
UserFeature{guardAuthorised}
CoreFeature{..}
UploadFeature{maintainersGroup, trusteesGroup} =
(emptyHackageFeature "edit-cabal-files") {
featureResources =
[ editCabalFileResource
]
, featureState = []
, featureReloadFiles = reloadTemplates templates
}
where
CoreResource{..} = coreResource
editCabalFileResource =
(resourceAt "/package/:package/:cabal.cabal/edit") {
resourceDesc = [(GET, "Page to edit package metadata")
,(POST, "Modify the package metadata")],
resourceGet = [("html", serveEditCabalFileGet)],
resourcePost = [("html", serveEditCabalFilePost)]
}
serveEditCabalFileGet :: DynamicPath -> ServerPartE Response
serveEditCabalFileGet dpath = do
template <- getTemplate templates "cabalFileEditPage.html"
pkg <- packageInPath dpath >>= lookupPackageId
let pkgname = packageName pkg
pkgid = packageId pkg
-- check that the cabal name matches the package
guard (lookup "cabal" dpath == Just (display pkgname))
ok $ toResponse $ template
[ "pkgid" $= pkgid
, "cabalfile" $= insertRevisionField (1 + length (pkgDataOld pkg))
(cabalFileByteString (pkgData pkg))
]
serveEditCabalFilePost :: DynamicPath -> ServerPartE Response
serveEditCabalFilePost dpath = do
template <- getTemplate templates "cabalFileEditPage.html"
pkg <- packageInPath dpath >>= lookupPackageId
let pkgname = packageName pkg
pkgid = packageId pkg
-- check that the cabal name matches the package
guard (lookup "cabal" dpath == Just (display pkgname))
uid <- guardAuthorised [ InGroup (maintainersGroup pkgname)
, InGroup trusteesGroup ]
let oldVersion = cabalFileByteString (pkgData pkg)
newRevision <- getCabalFile
shouldPublish <- getPublish
case diffCabalRevisions pkgid oldVersion newRevision of
Left errs ->
responseTemplate template pkgid newRevision
shouldPublish [errs] []
Right changes
| shouldPublish && not (null changes) -> do
template' <- getTemplate templates "cabalFilePublished.html"
time <- liftIO getCurrentTime
updateAddPackageRevision pkgid (CabalFileText newRevision)
(time, uid)
ok $ toResponse $ template'
[ "pkgid" $= pkgid
, "cabalfile" $= newRevision
, "changes" $= changes
]
| otherwise ->
responseTemplate template pkgid newRevision
shouldPublish [] changes
where
getCabalFile = body (lookBS "cabalfile")
getPublish = body $ (look "review" >> return False) `mplus`
(look "publish" >> return True)
responseTemplate :: ([TemplateAttr] -> Template) -> PackageId
-> ByteString -> Bool -> [String] -> [Change]
-> ServerPartE Response
responseTemplate template pkgid cabalFile publish errors changes =
ok $ toResponse $ template
[ "pkgid" $= pkgid
, "cabalfile" $= cabalFile
, "publish" $= publish
, "errors" $= errors
, "changes" $= changes
]
instance ToSElem Change where
toSElem (Change change from to) =
toSElem (Map.fromList [("what", change)
,("from", from)
,("to", to)])
newtype CheckM a = CheckM { unCheckM :: ErrorT String (Writer [Change]) a }
runCheck :: CheckM () -> Either String [Change]
runCheck c = case runWriter . runErrorT . unCheckM $ c of
(Left err, _ ) -> Left err
(Right (), changes) -> Right changes
instance Monad CheckM where
return = CheckM . return
CheckM m >>= f = CheckM (m >>= unCheckM . f)
fail = CheckM . throwError
data Change = Change String String String -- what, from, to
deriving Show
logChange :: Change -> CheckM ()
logChange change = CheckM (tell [change])
type Check a = a -> a -> CheckM ()
diffCabalRevisions :: PackageId -> ByteString -> ByteString
-> Either String [Change]
diffCabalRevisions pkgid oldVersion newRevision =
runCheck $ checkCabalFileRevision pkgid oldVersion newRevision
checkCabalFileRevision :: PackageId -> Check ByteString
checkCabalFileRevision pkgid old new = do
(pkg, warns) <- parseCabalFile old
(pkg', warns') <- parseCabalFile new
checkGenericPackageDescription pkg pkg'
checkParserWarnings warns warns'
checkPackageChecks pkg pkg'
where
filename = display pkgid ++ ".cabal"
parseCabalFile fileContent =
case parsePackageDescription . unpackUTF8 $ fileContent of
ParseFailed err -> fail (formatErrorMsg (locatedErrorMsg err))
ParseOk warnings pkg -> return (pkg, warnings)
formatErrorMsg (Nothing, msg) = msg
formatErrorMsg (Just n, msg) = "Line " ++ show n ++ ": " ++ msg
checkParserWarnings warns warns' =
case warns' \\ warns of
[] -> return ()
newwarns -> fail $ "New parse warning: "
++ unlines (map (showPWarning filename) newwarns)
checkPackageChecks pkg pkg' =
let checks = checkPackage pkg Nothing
checks' = checkPackage pkg' Nothing
in case checks' \\ checks of
[] -> return ()
newchecks -> fail $ unlines (map explanation newchecks)
checkGenericPackageDescription :: Check GenericPackageDescription
checkGenericPackageDescription
(GenericPackageDescription descrA flagsA libsA exesA testsA benchsA)
(GenericPackageDescription descrB flagsB libsB exesB testsB benchsB) = do
checkPackageDescriptions descrA descrB
checkSame "Sorry, cannot edit the package flags"
flagsA flagsB
checkMaybe "Cannot add or remove library sections"
(checkCondTree checkLibrary) libsA libsB
checkListAssoc "Cannot add or remove executable sections"
(checkCondTree checkExecutable) exesA exesB
checkListAssoc "Cannot add or remove test-suite sections"
(checkCondTree checkTestSuite) testsA testsB
checkListAssoc "Cannot add or remove benchmark sections"
(checkCondTree checkBenchmark) benchsA benchsB
checkPackageDescriptions :: Check PackageDescription
checkPackageDescriptions
(PackageDescription
packageIdA licenseA licenseFileA
copyrightA maintainerA authorA stabilityA testedWithA homepageA
pkgUrlA bugReportsA sourceReposA synopsisA descriptionA
categoryA customFieldsA _buildDependsA specVersionA buildTypeA
_libraryA _executablesA _testSuitesA _benchmarksA dataFilesA dataDirA
extraSrcFilesA extraTmpFilesA extraDocFilesA)
(PackageDescription
packageIdB licenseB licenseFileB
copyrightB maintainerB authorB stabilityB testedWithB homepageB
pkgUrlB bugReportsB sourceReposB synopsisB descriptionB
categoryB customFieldsB _buildDependsB specVersionB buildTypeB
_libraryB _executablesB _testSuitesB _benchmarksB dataFilesB dataDirB
extraSrcFilesB extraTmpFilesB extraDocFilesB)
= do
checkSame "Don't be silly! You can't change the package name!"
(packageName packageIdA) (packageName packageIdB)
checkSame "You can't change the package version!"
(packageVersion packageIdA) (packageVersion packageIdB)
checkSame "Cannot change the license"
(licenseA, licenseFileA) (licenseB, licenseFileB)
changesOk "copyright" id copyrightA copyrightB
changesOk "maintainer" id maintainerA maintainerB
changesOk "author" id authorA authorB
checkSame "The stability field is unused, don't bother changing it."
stabilityA stabilityB
checkSame "The tested-with field is unused, don't bother changing it."
testedWithA testedWithB
changesOk "homepage" id homepageA homepageB
checkSame "The package-url field is unused, don't bother changing it."
pkgUrlA pkgUrlB
changesOk "bug-reports" id bugReportsA bugReportsB
changesOkList changesOk "source-repository" (show . ppSourceRepo)
sourceReposA sourceReposB
changesOk "synopsis" id synopsisA synopsisB
changesOk "description" id descriptionA descriptionB
changesOk "category" id categoryA categoryB
checkSame "Cannot change the Cabal spec version"
specVersionA specVersionB
checkSame "Cannot change the build-type"
buildTypeA buildTypeB
checkSame "Cannot change the data files"
(dataFilesA, dataDirA) (dataFilesB, dataDirB)
checkSame "Changing extra-tmp-files is a bit pointless at this stage"
extraTmpFilesA extraTmpFilesB
checkSame "Changing extra-source-files would not make sense!"
extraSrcFilesA extraSrcFilesB
checkSame "You can't change the extra-doc-files."
extraDocFilesA extraDocFilesB
checkSame "Cannot change custom/extension fields"
(filter (\(f,_) -> f /= "x-revision") customFieldsA)
(filter (\(f,_) -> f /= "x-revision") customFieldsB)
checkRevision customFieldsA customFieldsB
checkRevision :: Check [(String, String)]
checkRevision customFieldsA customFieldsB =
checkSame ("The new x-revision must be " ++ show expectedRevision)
newRevision expectedRevision
where
oldRevision = getRevision customFieldsA
newRevision = getRevision customFieldsB
expectedRevision = oldRevision + 1
getRevision customFields =
case lookup "x-revision" customFields of
Just s | [(n,"")] <- reads s -> n :: Int
_ -> 0
checkCondTree :: Check a -> Check (CondTree ConfVar [Dependency] a)
checkCondTree checkElem
(CondNode dataA constraintsA componentsA)
(CondNode dataB constraintsB componentsB) = do
checkDependencies constraintsA constraintsB
checkList "Cannot add or remove 'if' conditionals"
checkComponent componentsA componentsB
checkElem dataA dataB
where
checkComponent (condA, ifPartA, thenPartA)
(condB, ifPartB, thenPartB) = do
checkSame "Cannot change the 'if' condition expressions"
condA condB
checkCondTree checkElem ifPartA ifPartB
checkMaybe "Cannot add or remove the 'else' part in conditionals"
(checkCondTree checkElem) thenPartA thenPartB
checkDependencies :: Check [Dependency]
checkDependencies =
checkList "Cannot don't add or remove dependencies, \
\just change the version constraints"
checkDependency
checkDependency :: Check Dependency
checkDependency (Dependency pkgA verA) (Dependency pkgB verB)
| pkgA == pkgB = changesOk ("dependency on " ++ display pkgA) display
verA verB
| otherwise = fail "Cannot change which packages are dependencies, \
\just their version constraints."
checkLibrary :: Check Library
checkLibrary (Library modulesA exposedA buildInfoA)
(Library modulesB exposedB buildInfoB) = do
checkSame "Cannot change the exposed modules" modulesA modulesB
checkSame "Cannot change the package exposed status" exposedA exposedB
checkBuildInfo buildInfoA buildInfoB
checkExecutable :: Check Executable
checkExecutable (Executable _nameA pathA buildInfoA)
(Executable _nameB pathB buildInfoB) = do
checkSame "Cannot change build information" pathA pathB
checkBuildInfo buildInfoA buildInfoB
checkTestSuite :: Check TestSuite
checkTestSuite (TestSuite _nameA interfaceA buildInfoA _enabledA)
(TestSuite _nameB interfaceB buildInfoB _enabledB) = do
checkSame "Cannot change test-suite type" interfaceA interfaceB
checkBuildInfo buildInfoA buildInfoB
checkBenchmark :: Check Benchmark
checkBenchmark (Benchmark _nameA interfaceA buildInfoA _enabledA)
(Benchmark _nameB interfaceB buildInfoB _enabledB) = do
checkSame "Cannot change benchmark type" interfaceA interfaceB
checkBuildInfo buildInfoA buildInfoB
checkBuildInfo :: Check BuildInfo
checkBuildInfo =
checkSame "Cannot change build information \
\(just the dependency version constraints)"
changesOk :: Eq a => String -> (a -> String) -> Check a
changesOk what render a b
| a == b = return ()
| otherwise = logChange (Change what (render a) (render b))
changesOkList :: (String -> (a -> String) -> Check a)
-> String -> (a -> String) -> Check [a]
changesOkList changesOkElem what render = go
where
go [] [] = return ()
go (a:_) [] = logChange (Change ("added " ++ what) (render a) "")
go [] (b:_) = logChange (Change ("removed " ++ what) "" (render b))
go (a:as) (b:bs) = changesOkElem what render a b >> go as bs
checkSame :: Eq a => String -> Check a
checkSame msg x y | x == y = return ()
| otherwise = fail msg
checkList :: String -> Check a -> Check [a]
checkList _ _ [] [] = return ()
checkList msg checkElem (x:xs) (y:ys) = checkElem x y
>> checkList msg checkElem xs ys
checkList msg _ _ _ = fail msg
checkListAssoc :: Eq b => String -> Check a -> Check [(b,a)]
checkListAssoc _ _ [] [] = return ()
checkListAssoc msg checkElem ((kx,x):xs) ((ky,y):ys)
| kx == ky = checkElem x y
>> checkListAssoc msg checkElem xs ys
| otherwise = fail msg
checkListAssoc msg _ _ _ = fail msg
checkMaybe :: String -> Check a -> Check (Maybe a)
checkMaybe _ _ Nothing Nothing = return ()
checkMaybe _ check (Just x) (Just y) = check x y
checkMaybe msg _ _ _ = fail msg
--TODO: export from Cabal
ppSourceRepo :: SourceRepo -> Doc
ppSourceRepo repo =
emptyLine $ text "source-repository" <+> disp (repoKind repo) $+$
(nest 4 (ppFields sourceRepoFieldDescrs' repo))
where
sourceRepoFieldDescrs' =
filter (\fd -> fieldName fd /= "kind") sourceRepoFieldDescrs
emptyLine :: Doc -> Doc
emptyLine d = text " " $+$ d
ppFields :: [FieldDescr a] -> a -> Doc
ppFields fields x =
vcat [ ppField name (getter x)
| FieldDescr name getter _ <- fields]
ppField :: String -> Doc -> Doc
ppField name fielddoc | isEmpty fielddoc = Doc.empty
| otherwise = text name <> colon <+> fielddoc
insertRevisionField :: Int -> ByteString -> ByteString
insertRevisionField rev
| rev == 1 = BS.unlines . insertAfterVersion . BS.lines
| otherwise = BS.unlines . replaceRevision . BS.lines
where
replaceRevision [] = []
replaceRevision (ln:lns)
| isField (BS.pack "x-revision") ln
= BS.pack ("x-revision: " ++ show rev) : lns
| otherwise
= ln : replaceRevision lns
insertAfterVersion [] = []
insertAfterVersion (ln:lns)
| isField (BS.pack "version") ln
= ln : BS.pack ("x-revision: " ++ show rev) : lns
| otherwise
= ln : insertAfterVersion lns
isField nm ln
| BS.isPrefixOf nm (BS.map Char.toLower ln)
, let (_, t) = BS.span (\c -> c == ' ' || c == '\t')
(BS.drop (BS.length nm) ln)
, Just (':',_) <- BS.uncons t
= True
| otherwise = False
| haskell-infra/hackage-server | Distribution/Server/Features/EditCabalFiles.hs | bsd-3-clause | 18,260 | 0 | 18 | 4,938 | 4,537 | 2,280 | 2,257 | 365 | 5 |
{-# LANGUAGE CPP #-}
-- | Read and write JSMods.
module Haste.Module (writeModule, readModule) where
import Module (moduleNameSlashes, mkModuleName)
import qualified Data.ByteString.Lazy as B
import Control.Shell
import Control.Applicative
import Control.Monad (when, filterM)
import Data.JSTarget
import Data.Binary
import Data.List (isSuffixOf)
import qualified Data.ByteString.UTF8 as BS
import qualified Haste.JSLib as JSLib
import qualified System.IO as IO
-- | The file extension to use for modules.
jsmodExt :: Bool -> String
jsmodExt boot = if boot then "jsmod-boot" else "jsmod"
moduleFilePath :: FilePath -> String -> String -> Bool -> FilePath
moduleFilePath basepath pkgid modname boot =
flip addExtension (jsmodExt boot) $
basepath </> pkgid </> (moduleNameSlashes $ mkModuleName modname)
-- | Write a module to file, with the extension specified in `fileExt`.
-- Assuming that fileExt = "jsmod", a module Foo.Bar is written to
-- @basepath/Foo/Bar.jsmod@.
--
-- If any directory in the path where the module is to be written doesn't
-- exist, it gets created.
--
-- Boot modules and "normal" modules get merged at this stage.
writeModule :: FilePath -> Module -> Bool -> IO ()
writeModule basepath m@(Module pkgid modname _ _) boot =
fromRight "writeModule" . shell $ do
mkdir True (takeDirectory path)
mcompanion <- readMod basepath pkgstr modstr (not boot)
m' <- case mcompanion of
Just companion -> do
bootfileExists <- isFile bootpath
when bootfileExists $ rm bootpath
return $ merge' m companion
_ -> do
return m
liftIO . B.writeFile path $ encode m'
where
pkgstr = BS.toString pkgid
modstr = BS.toString modname
path = moduleFilePath basepath pkgstr modstr boot
bootpath = moduleFilePath basepath pkgstr modstr True
merge' = if boot then merge else flip merge
-- | Read a module from file. If the module is not found at the specified path,
-- libpath/path is tried instead. Returns Nothing is the module is not found
-- on either path.
readModule :: FilePath -> String -> String -> IO (Maybe Module)
readModule basepath pkgid modname = fromRight "readModule" . shell $ do
libfile <- (basepath </>) `fmap` jslibFileName basepath pkgid
mmlib <- liftIO $ JSLib.readModule libfile modname
mm <- readMod basepath pkgid modname False
mmboot <- readMod basepath pkgid modname True
case (mmlib, mm, mmboot) of
(Just m, _, _) -> return $ Just m
(_, Just m, Nothing) -> return $ Just m
(_, Just m, Just mboot) -> return . Just $ merge mboot m
_ -> return Nothing
-- | Get the file name for a given package identifier.
jslibFileName :: FilePath -> String -> Shell FilePath
jslibFileName basepath pkgid
| pkgid `elem` specials = do
dirs <- ls basepath
case filter (and . zipWith (==) pkgid) dirs of
(dir:_) -> do
files <- ls (basepath </> dir)
case filter (".jslib" `isSuffixOf`) files of
(f:_) -> return $ dir </> f
_ -> fail $ "Package " ++ pkgid ++ " has no jslib file!"
_ -> do
return stdname
| otherwise = do
#if __GLASGOW_HASKELL__ < 709
return stdname
#else
dirs <- filter (pkgid `isSuffixOf`) . map (basepath </>) <$> ls basepath
dirs' <- filterM isDirectory dirs
case dirs' of
ds | not (null ds) -> findLibFile ds
| otherwise -> return stdname
#endif
where
findLibFile (d:ds) = do
fs <- map (d </>) . filter (libfilesuffix `isSuffixOf`) <$> ls d
fs' <- filterM isFile fs
case fs' of
(f:_) -> return f
_ -> findLibFile ds
findLibFile _ = do
return stdname
-- Use this for non-special packages
stdname = pkgid </> "libHS" ++ pkgid <.> "jslib"
-- These package ids are special and come without version number
specials = ["ghc-prim", "base", "integer-gmp"]
libfilesuffix = pkgid <.> "jslib"
readMod :: FilePath -> String -> String -> Bool -> Shell (Maybe Module)
readMod basepath pkgid modname boot = do
x <- isFile path
let path' = if x then path else syspath
isF <- isFile path'
if isF
then do
liftIO $ IO.withFile path' IO.ReadMode $ \h -> do
m <- decode <$> (B.hGet h . fromInteger =<< IO.hFileSize h)
m `seq` return (Just m)
else do
return Nothing
where
path = moduleFilePath "." pkgid modname boot
syspath = moduleFilePath basepath pkgid modname boot
fromRight :: String -> IO (Either String b) -> IO b
fromRight from m = do
ex <- m
case ex of
Right x -> return x
Left e -> fail $ "shell expression failed in " ++ from ++ ": " ++ e
| santolucito/haste-compiler | src/Haste/Module.hs | bsd-3-clause | 4,776 | 0 | 20 | 1,252 | 1,321 | 673 | 648 | 93 | 5 |
{-#LANGUAGE NoImplicitPrelude #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE OverloadedLists #-}
{-#LANGUAGE LambdaCase #-}
{-#LANGUAGE ScopedTypeVariables #-}
{-#LANGUAGE FlexibleInstances #-}
{-#LANGUAGE FlexibleContexts #-}
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE TemplateHaskell #-}
{-#LANGUAGE TypeApplications #-}
module Web.Sprinkles.Bake
where
import Web.Sprinkles.Prelude
import qualified Data.Text as Text
import Data.Text (Text)
import qualified Data.Set as Set
import Data.Set (Set)
import System.Directory (createDirectoryIfMissing)
import System.FilePath ( (</>), takeDirectory, replaceExtension )
import Control.Monad.State
import Control.Lens
import Control.Lens.TH (makeLenses)
import Text.Printf (printf)
import Network.HTTP.Types (Status (..), status200)
import Network.Wai.Test
import Network.Wai (Application, Request (..))
import qualified Network.Wai as Wai
import Web.Sprinkles.Serve (appFromProject)
import Web.Sprinkles.Project
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString as BS
import Data.Char (ord)
import Text.HTML.TagSoup (parseTags, Tag (..), Attribute)
import qualified Data.CSS.Syntax.Tokens as CSS
import Data.FileEmbed (embedStringFile)
defHtaccess :: ByteString
defHtaccess = $(embedStringFile "embedded/.htaccess")
data BakeState
= BakeState
{ _bsTodo :: [FilePath]
, _bsDone :: Set FilePath
, _bsBasedir :: FilePath
, _bsApp :: Application
}
makeLenses ''BakeState
defBakeState :: BakeState
defBakeState = BakeState [] Set.empty "." defaultApplication
defaultApplication :: Application
defaultApplication rq respond =
respond $
Wai.responseLBS
status200
[("Content-type", "text/plain;charset=utf8")]
"Hello, world!"
type Bake = StateT BakeState IO
bakeProject :: FilePath -> Project -> [FilePath] -> IO ()
bakeProject destDir project extraEntryPoints = do
putStrLn @Text $ "Baking project into " <> pack destDir
createDirectoryIfMissing True destDir
let app = appFromProject project
runBake destDir entryPoints app $ do
bakeHtaccess
bake404
bakeApp
where
entryPoints =
[ "/"
, "/sitemap.xml"
, "/favicon.ico"
, "/robots.txt"
]
++ extraEntryPoints
runBake :: FilePath -> [FilePath] -> Application -> Bake a -> IO a
runBake baseDir entryPoints app a =
evalStateT a $ defBakeState
{ _bsTodo = entryPoints
, _bsBasedir = baseDir
, _bsApp = app
}
bakeHtaccess :: Bake ()
bakeHtaccess = do
basedir <- use bsBasedir
liftIO $ writeFile (basedir </> ".htaccess") defHtaccess
bakeApp :: Bake ()
bakeApp = do
use bsTodo >>= \case
(current:rest) -> do
bsTodo .= rest
bakePath current
bsDone %= Set.insert current
bakeApp
_ -> return ()
bakePath :: FilePath -> Bake ()
bakePath fp = do
done <- use bsDone
unless (fp `Set.member` done) $
bakePage CreateIndexHtml [200] fp (dropLeadingSlash fp)
data HtmlMappingMode = MapHtmlDirect | CreateIndexHtml
bake404 :: Bake ()
bake404 = do
bakePage MapHtmlDirect [404] nonsensicalPath "_errors/404"
where
nonsensicalPath = "/123087408972309872109873012984709218371209847123"
dropLeadingSlash :: FilePath -> FilePath
dropLeadingSlash = \case
'/':x -> x
x -> x
bakePage :: HtmlMappingMode -> [Int] -> FilePath -> FilePath -> Bake ()
bakePage htmlMode expectedStatuses fp fn = do
app <- use bsApp
basedir <- use bsBasedir
let dstFile = basedir </> fn
dstDir = takeDirectory dstFile
let session = do
let rq = setPath defaultRequest (fromString fp)
request rq
rp <- liftIO $ runSession session app
let status = simpleStatus rp
liftIO $ printf "GET %s %i %s\n" ("/" </> fp) (statusCode status) (decodeUtf8 $ statusMessage status)
if statusCode status `elem` expectedStatuses
then do
let ty = fromMaybe "application/octet-stream" $ lookup "content-type" (simpleHeaders rp)
rawTy = BS.takeWhile (/= fromIntegral (ord ';')) ty
rawTySplit = BS.split (fromIntegral . ord $ '/') rawTy
liftIO $ printf "%s\n" (decodeUtf8 ty)
let (linkUrls, dstDir', dstFile') = case rawTySplit of
["text", "html"] ->
let body = LBS.toStrict $ simpleBody rp
soup = parseTags (decodeUtf8 body)
linkUrls = map (fp </>) . map Text.unpack $ extractLinkedUrls soup
in case htmlMode of
CreateIndexHtml ->
(linkUrls, dstFile, dstFile </> "index.html")
MapHtmlDirect ->
(linkUrls, dstDir, replaceExtension dstFile "html")
[_, "css"] ->
let body = decodeUtf8 . LBS.toStrict $ simpleBody rp
tokens = CSS.tokenize body
linkUrls = map (takeDirectory fp </>) . map Text.unpack $ extractCssUrls tokens
in (linkUrls, dstDir, dstFile)
_ ->
([], dstDir, dstFile)
liftIO $ do
createDirectoryIfMissing True dstDir'
LBS.writeFile dstFile' (simpleBody rp)
bsTodo <>= linkUrls
else do
liftIO $ putStrLn @String "skip"
extractLinkedUrls :: [Tag Text] -> [Text]
extractLinkedUrls tags = filter isLocalUrl $ do
tags >>= \case
TagOpen "a" attrs -> do
attrs >>= \case
("href", url) -> return url
_ -> []
TagOpen "link" attrs -> do
attrs >>= \case
("href", url) -> return url
_ -> []
TagOpen "script" attrs -> do
attrs >>= \case
("src", url) -> return url
_ -> []
TagOpen "img" attrs -> do
attrs >>= \case
("src", url) -> return url
_ -> []
_ -> []
isLocalUrl :: Text -> Bool
isLocalUrl url = not
( ("//" `Text.isPrefixOf` url)
|| ("http://" `Text.isPrefixOf` url)
|| ("https://" `Text.isPrefixOf` url)
)
extractCssUrls :: [CSS.Token] -> [Text]
extractCssUrls tokens = filter isLocalUrl $ go tokens
where
go (CSS.Url url:xs) = url:go xs
go (CSS.Function "url":CSS.String url:xs) = url:go xs
go (x:xs) = go xs
go _ = []
| tdammers/templar | src/Web/Sprinkles/Bake.hs | bsd-3-clause | 6,679 | 0 | 24 | 2,057 | 1,862 | 976 | 886 | 173 | 9 |
module Database.Persist.Redis.Internal
( toKey
, unKey
, mkEntity
, toKeyId
, toKeyText
, toInsertFields
, toB
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.UTF8 as U
import Data.Text (Text, unpack)
import qualified Data.Text as T
import Database.Persist.Class
import Database.Persist.Types
import Database.Persist.Redis.Parser
toLabel :: FieldDef -> B.ByteString
toLabel = U.fromString . unpack . unDBName . fieldDB
toEntityString :: PersistEntity val => val -> Text
toEntityString = unDBName . entityDB . entityDef . Just
toEntityName :: EntityDef -> B.ByteString
toEntityName = U.fromString . unpack . unDBName . entityDB
mkEntity :: (Monad m, PersistEntity val) => Key val -> [(B.ByteString, B.ByteString)] -> m (Entity val)
mkEntity key fields = do
let values = redisToPerisistValues fields
let v = fromPersistValues values
case v of
Right body -> return $ Entity key body
Left a -> fail (unpack a)
zipAndConvert :: PersistField t => [FieldDef] -> [t] -> [(B.ByteString, B.ByteString)]
zipAndConvert [] _ = []
zipAndConvert _ [] = []
zipAndConvert (e:efields) (p:pfields) =
let pv = toPersistValue p
in
if pv == PersistNull then zipAndConvert efields pfields
else (toLabel e, toValue pv) : zipAndConvert efields pfields
-- | Create a list for create/update in Redis store
toInsertFields :: PersistEntity val => val -> [(B.ByteString, B.ByteString)]
toInsertFields record = zipAndConvert entity fields
where
entity = entityFields $ entityDef $ Just record
fields = toPersistFields record
underscoreBs :: B.ByteString
underscoreBs = U.fromString "_"
-- | Make a key for given entity and id
toKeyText :: PersistEntity val => val -> Integer -> Text
toKeyText val k = toEntityString val `T.append` T.pack "_" `T.append` T.pack (show k)
toB :: Text -> B.ByteString
toB = U.fromString . unpack
-- | Create a string key for given entity
toObjectPrefix :: PersistEntity val => val -> B.ByteString
toObjectPrefix val = B.append (toEntityName $ entityDef $ Just val) underscoreBs
idBs :: B.ByteString
idBs = U.fromString "id"
-- | Construct an id key, that is incremented for access
toKeyId :: PersistEntity val => val -> B.ByteString
toKeyId val = B.append (toObjectPrefix val) idBs
unKey :: (PersistEntity val) => Key val -> B.ByteString
unKey = toValue . head . keyToValues
toKey :: (Monad m, PersistEntity val) => Text -> m (Key val)
toKey x = case q of
Right z -> return z
Left a -> fail (unpack a)
where
q = keyFromValues [PersistText x]
| creichert/persistent | persistent-redis/Database/Persist/Redis/Internal.hs | mit | 2,625 | 0 | 12 | 538 | 860 | 453 | 407 | 58 | 2 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances,
PatternGuards #-}
module Text.Pandoc.Readers.Docx.Reducible ( concatReduce
, (<+>)
)
where
import Text.Pandoc.Builder
import Data.List
import Data.Sequence (ViewR(..), ViewL(..), viewl, viewr)
import qualified Data.Sequence as Seq (null)
data Modifier a = Modifier (a -> a)
| AttrModifier (Attr -> a -> a) Attr
| NullModifier
class (Eq a) => Modifiable a where
modifier :: a -> Modifier a
innards :: a -> a
getL :: a -> (a, a)
getR :: a -> (a, a)
spaceOut :: a -> (a, a, a)
spaceOutL :: (Monoid a, Modifiable a) => a -> (a, a)
spaceOutL ms = (l, stack fs (m' <> r))
where (l, m, r) = spaceOut ms
(fs, m') = unstack m
spaceOutR :: (Monoid a, Modifiable a) => a -> (a, a)
spaceOutR ms = (stack fs (l <> m'), r)
where (l, m, r) = spaceOut ms
(fs, m') = unstack m
instance (Monoid a, Show a) => Show (Modifier a) where
show (Modifier f) = show $ f mempty
show (AttrModifier f attr) = show $ f attr mempty
show (NullModifier) = "NullModifier"
instance (Monoid a, Eq a) => Eq (Modifier a) where
(Modifier f) == (Modifier g) = (f mempty == g mempty)
(AttrModifier f attr) == (AttrModifier g attr') = (f attr mempty == g attr' mempty)
(NullModifier) == (NullModifier) = True
_ == _ = False
instance Modifiable Inlines where
modifier ils = case viewl (unMany ils) of
(x :< xs) | Seq.null xs -> case x of
(Emph _) -> Modifier emph
(Strong _) -> Modifier strong
(SmallCaps _) -> Modifier smallcaps
(Strikeout _) -> Modifier strikeout
(Superscript _) -> Modifier superscript
(Subscript _) -> Modifier subscript
(Span attr _) -> AttrModifier spanWith attr
_ -> NullModifier
_ -> NullModifier
innards ils = case viewl (unMany ils) of
(x :< xs) | Seq.null xs -> case x of
(Emph lst) -> fromList lst
(Strong lst) -> fromList lst
(SmallCaps lst) -> fromList lst
(Strikeout lst) -> fromList lst
(Superscript lst) -> fromList lst
(Subscript lst) -> fromList lst
(Span _ lst) -> fromList lst
_ -> ils
_ -> ils
getL ils = case viewl $ unMany ils of
(s :< sq) -> (singleton s, Many sq)
_ -> (mempty, ils)
getR ils = case viewr $ unMany ils of
(sq :> s) -> (Many sq, singleton s)
_ -> (ils, mempty)
spaceOut ils =
let (fs, ils') = unstack ils
contents = unMany ils'
left = case viewl contents of
(Space :< _) -> space
_ -> mempty
right = case viewr contents of
(_ :> Space) -> space
_ -> mempty in
(left, (stack fs $ trimInlines .Many $ contents), right)
instance Modifiable Blocks where
modifier blks = case viewl (unMany blks) of
(x :< xs) | Seq.null xs -> case x of
(BlockQuote _) -> Modifier blockQuote
-- (Div attr _) -> AttrModifier divWith attr
_ -> NullModifier
_ -> NullModifier
innards blks = case viewl (unMany blks) of
(x :< xs) | Seq.null xs -> case x of
(BlockQuote lst) -> fromList lst
-- (Div attr lst) -> fromList lst
_ -> blks
_ -> blks
spaceOut blks = (mempty, blks, mempty)
getL ils = case viewl $ unMany ils of
(s :< sq) -> (singleton s, Many sq)
_ -> (mempty, ils)
getR ils = case viewr $ unMany ils of
(sq :> s) -> (Many sq, singleton s)
_ -> (ils, mempty)
unstack :: (Modifiable a) => a -> ([Modifier a], a)
unstack ms = case modifier ms of
NullModifier -> ([], ms)
_ -> (f : fs, ms') where
f = modifier ms
(fs, ms') = unstack $ innards ms
stack :: (Monoid a, Modifiable a) => [Modifier a] -> a -> a
stack [] ms = ms
stack (NullModifier : fs) ms = stack fs ms
stack ((Modifier f) : fs) ms =
if isEmpty ms
then stack fs ms
else f $ stack fs ms
stack ((AttrModifier f attr) : fs) ms = f attr $ stack fs ms
isEmpty :: (Monoid a, Eq a) => a -> Bool
isEmpty x = x == mempty
combine :: (Monoid a, Modifiable a, Eq a) => a -> a -> a
combine x y =
let (xs', x') = getR x
(y', ys') = getL y
in
xs' <> (combineSingleton x' y') <> ys'
isAttrModifier :: Modifier a -> Bool
isAttrModifier (AttrModifier _ _) = True
isAttrModifier _ = False
combineSingleton :: (Monoid a, Modifiable a, Eq a) => a -> a -> a
combineSingleton x y =
let (xfs, xs) = unstack x
(yfs, ys) = unstack y
shared = xfs `intersect` yfs
x_remaining = xfs \\ shared
y_remaining = yfs \\ shared
x_rem_attr = filter isAttrModifier x_remaining
y_rem_attr = filter isAttrModifier y_remaining
in
case null shared of
True | isEmpty xs && isEmpty ys ->
stack (x_rem_attr ++ y_rem_attr) mempty
| isEmpty xs ->
let (sp, y') = spaceOutL y in
(stack x_rem_attr mempty) <> sp <> y'
| isEmpty ys ->
let (x', sp) = spaceOutR x in
x' <> sp <> (stack y_rem_attr mempty)
| otherwise ->
let (x', xsp) = spaceOutR x
(ysp, y') = spaceOutL y
in
x' <> xsp <> ysp <> y'
False -> stack shared $
combine
(stack x_remaining xs)
(stack y_remaining ys)
(<+>) :: (Monoid a, Modifiable a, Eq a) => a -> a -> a
x <+> y = combine x y
concatReduce :: (Monoid a, Modifiable a) => [a] -> a
concatReduce xs = foldl combine mempty xs
| alexvong1995/pandoc | src/Text/Pandoc/Readers/Docx/Reducible.hs | gpl-2.0 | 5,674 | 80 | 19 | 1,886 | 2,283 | 1,190 | 1,093 | 145 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module SqliteInit (
(@/=), (@==), (==@)
, asIO
, assertNotEqual
, assertNotEmpty
, assertEmpty
, isTravis
, BackendMonad
, runConn
, MonadIO
, persistSettings
, MkPersistSettings (..)
, db
, sqlite_database
, sqlite_database_file
, BackendKey(..)
, GenerateKey(..)
, RunDb
-- re-exports
, module Database.Persist
, module Test.Hspec
, module Test.HUnit
, liftIO
, mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase
, Int32, Int64
, Text
, module Control.Monad.Trans.Reader
, module Control.Monad
, module Database.Persist.Sql
, BS.ByteString
, SomeException
, TestFn(..)
, truncateTimeOfDay
, truncateToMicro
, truncateUTCTime
, arbText
, liftA2
, MonadFail
) where
import Init
( TestFn(..), truncateTimeOfDay, truncateUTCTime
, truncateToMicro, arbText, liftA2, GenerateKey(..)
, (@/=), (@==), (==@), MonadFail
, assertNotEqual, assertNotEmpty, assertEmpty, asIO
, isTravis, RunDb
)
-- re-exports
import Control.Exception (SomeException)
import Control.Monad (void, replicateM, liftM, when, forM_)
import Control.Monad.Trans.Reader
import Database.Persist.TH (mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase, MkPersistSettings(..))
import Test.Hspec
-- testing
import Test.HUnit ((@?=),(@=?), Assertion, assertFailure, assertBool)
import Control.Monad (unless, (>=>))
import Control.Monad.IO.Unlift (MonadUnliftIO)
import Control.Monad.Logger
import Control.Monad.Trans.Resource (ResourceT, runResourceT)
import qualified Data.ByteString as BS
import Data.Text (Text)
import System.Log.FastLogger (fromLogStr)
import Database.Persist
import Database.Persist.Sql
import Database.Persist.Sqlite
import Database.Persist.TH ()
-- Data types
import Control.Monad.IO.Class
import Data.Int (Int32, Int64)
_debugOn :: Bool
_debugOn = False
persistSettings :: MkPersistSettings
persistSettings = sqlSettings { mpsGeneric = True }
type BackendMonad = SqlBackend
sqlite_database_file :: Text
sqlite_database_file = "testdb.sqlite3"
sqlite_database :: SqliteConnectionInfo
sqlite_database = mkSqliteConnectionInfo sqlite_database_file
runConn :: MonadUnliftIO m => SqlPersistT (LoggingT m) t -> m ()
runConn f = do
travis <- liftIO isTravis
let debugPrint = not travis && _debugOn
let printDebug = if debugPrint then print . fromLogStr else void . return
flip runLoggingT (\_ _ _ s -> printDebug s) $ do
_<-withSqlitePoolInfo sqlite_database 1 $ runSqlPool f
return ()
db :: SqlPersistT (LoggingT (ResourceT IO)) () -> Assertion
db actions = do
runResourceT $ runConn $ actions >> transactionUndo
| creichert/persistent | persistent-sqlite/test/SqliteInit.hs | mit | 2,710 | 0 | 12 | 444 | 739 | 456 | 283 | 83 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Regular Expression Tester</title>
<maps>
<homeID>regextester</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/regextester/src/main/javahelp/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 979 | 89 | 29 | 157 | 391 | 210 | 181 | -1 | -1 |
-- In this example, remove 'Tree(Leaf,Branch)' from the export. Note that this
-- refactoring does not handle sub entries in the export list and this may be
-- improved in the future.
module C3(sumTree, myFringe, SameOrNot(..)) where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
sumTree:: (Num a) => Tree a -> a
sumTree (Leaf x ) = x
sumTree (Branch left right) = sumTree left + sumTree right
myFringe:: Tree a -> [a]
myFringe (Leaf x ) = [x]
myFringe (Branch left right) = myFringe left
class SameOrNot a where
isSame :: a -> a -> Bool
isNotSame :: a -> a -> Bool
instance SameOrNot Int where
isSame a b = a == b
isNotSame a b = a /= b
| SAdams601/HaRe | old/testing/rmFromExport/C3_TokOut.hs | bsd-3-clause | 668 | 0 | 8 | 154 | 234 | 124 | 110 | 14 | 1 |
module E4 where
--Any type/data constructor name declared in this module can be renamed.
--Any type variable can be renamed.
--Rename type Constructor 'BTree' to 'MyBTree'
data BTree a = Empty | T a (BTree a) (BTree a)
deriving Show
buildtree :: (Monad m, Ord a) => [a] -> m (BTree a)
buildtree [] = return Empty
buildtree (x:xs) = do
res1 <- buildtree xs
res <- insert x res1
return res
insert :: (Monad m, Ord a) => a -> BTree a -> m (BTree a)
insert val v2 = do
case v2 of
T val Empty Empty
| val == val -> return Empty
| otherwise -> return (T val Empty (T val Empty Empty))
T val (T val2 Empty Empty) Empty -> return Empty
_ -> return v2
main :: IO ()
main = do
n@(T val _ Empty) <- buildtree [3, 1, 2]
if True
then do putStrLn $ (show n)
else do putStrLn $ (show (T val Empty n)) | kmate/HaRe | old/testing/unfoldAsPatterns/E4.hs | bsd-3-clause | 1,074 | 0 | 15 | 452 | 375 | 186 | 189 | 23 | 3 |
module Monad where
-- create :: Int -> (Ptr Word8 -> IO ()) -> IO ByteString
create l = do
fp <- mallocByteString l
return ()
data P a = P a
mallocByteString :: a -> IO (P a)
mallocByteString l = undefined
| abakst/liquidhaskell | tests/pos/monad1.hs | bsd-3-clause | 220 | 0 | 8 | 57 | 67 | 34 | 33 | 7 | 1 |
module T14773b where
b :: Bool
(Just b) | False = Nothing
c :: Bool
(Just c) | False = Nothing
| True = Just True
| sdiehl/ghc | testsuite/tests/deSugar/should_compile/T14773b.hs | bsd-3-clause | 125 | 0 | 7 | 37 | 62 | 30 | 32 | 6 | 1 |
module GitHashObject where
import qualified Github.GitData.Blobs as Github
import Data.List( intercalate)
main = do
possibleBlob <- Github.blob "mike-burns" "github" "1dc7b1f6e0c7bf1118f3b03195071dd6ea6db9b3"
case possibleBlob of
(Left error) -> putStrLn $ "Error: " ++ (show error)
(Right blob) -> putStrLn $ Github.blobContent blob
| mavenraven/github | samples/GitData/Blobs/GitHashObject.hs | bsd-3-clause | 348 | 0 | 12 | 54 | 98 | 52 | 46 | 8 | 2 |
{-# LANGUAGE UnboxedTuples #-}
module Main where
{-# NOINLINE f1 #-}
f1 :: (# #) -> (# #) -> String
f1 (# #) (# #) = "o"
{-# NOINLINE f2 #-}
f2 :: (# (# #), (# #) #) -> String
f2 (# (# #), (# #) #) = "k"
main :: IO ()
main = do
let t = (# (# #), (# #) #)
case t of
(# t1, t2 #) -> putStrLn (f1 t1 t2 ++ f2 t)
| shlevy/ghc | testsuite/tests/unboxedsums/unarise.hs | bsd-3-clause | 327 | 0 | 13 | 99 | 122 | 65 | 57 | 13 | 1 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import Prelude
import Text.Shakespeare.Text (st)
import Language.Haskell.TH.Syntax
import Database.Persist.Postgresql (PostgresConf)
import Yesod.Default.Config
import Yesod.Default.Util
import Data.Text (Text)
import Data.Yaml
import Control.Applicative
import Settings.Development
import Data.Default (def)
import Text.Hamlet
-- | Which Persistent backend this site is using.
type PersistConf = PostgresConf
-- Static setting below. Changing these requires a recompile
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticDir :: FilePath
staticDir = "static"
-- | The base URL for your static files. As you can see by the default
-- value, this can simply be "static" appended to your application root.
-- A powerful optimization can be serving static files from a separate
-- domain name. This allows you to use a web server optimized for static
-- files, more easily set expires and cache values, and avoid possibly
-- costly transference of cookies on static files. For more information,
-- please see:
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
--
-- If you change the resource pattern for StaticR in Foundation.hs, you will
-- have to make a corresponding change here.
--
-- To see how this value is used, see urlRenderOverride in Foundation.hs
staticRoot :: AppConfig DefaultEnv x -> Text
staticRoot conf = [st|#{appRoot conf}/static|]
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
{ wfsHamletSettings = defaultHamletSettings
{ hamletNewlines = AlwaysNewlines
}
}
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if development then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
data Extra = Extra
{ ldapurl :: String
, ldapdn :: String
, ldappassword :: String
, smtpserver :: String
, emailfrom :: Text
, emailuser :: String
, emailpassword :: String
} deriving Show
parseExtra :: DefaultEnv -> Object -> Parser Extra
parseExtra _ o = Extra
<$> o .: "ldapurl"
<*> o .: "ldapdn"
<*> o .: "ldappassword"
<*> o .: "smtpserver"
<*> o .: "emailfrom"
<*> o .: "emailuser"
<*> o .: "emailpassword"
| dgonyeo/lambdollars | Settings.hs | mit | 2,966 | 0 | 18 | 558 | 353 | 220 | 133 | -1 | -1 |
-- | Athena.Translation.Rule.Strip module.
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
module Athena.Translation.Rules.Strip
( inferSplit
, split
, unshunt
)
where
------------------------------------------------------------------------------
import Data.Proof
( ProofMap
, ProofTree
, ProofTreeGen ( Root, Leaf )
)
import Data.TSTP
import qualified Data.Map as Map
import Athena.Translation.Utils ( stdName )
import Athena.Utils.PrettyPrint
( (<+>)
, (<>)
, (<@>)
, Doc
, Pretty(pretty)
-- , braces
, colon
, comma
, comment
, dot
, empty
, encloseSep
, equals
, hashtag
, hypenline
, indent
, int
, lbracket
, line
, parens
, rbracket
, space
, vsep
)
------------------------------------------------------------------------------
unshunt ∷ Formula → Formula
unshunt (BinOp x (:=>:) (BinOp y (:=>:) z)) =
unshunt (BinOp (BinOp x (:&:) y) (:=>:) z)
unshunt (BinOp x (:=>:) (BinOp y (:&:) z)) =
BinOp (unshunt (BinOp x (:=>:) y))
(:&:) (unshunt (BinOp x (:=>:) z))
unshunt fm = fm
split ∷ Formula → Formula
split (BinOp φ₁ (:&:) φ₂) =
BinOp
(unshunt $ split φ₁)
(:&:)
(unshunt $ BinOp φ₁ (:=>:) (split φ₂))
split (BinOp φ₁ (:|:) φ₂) =
unshunt $ BinOp ((:~:) φ₁) (:=>:) (split φ₂)
split (BinOp φ₁ (:=>:) φ₂) =
unshunt (BinOp φ₁ (:=>:) (split φ₂))
split (BinOp φ₁ (:<=>:) φ₂) =
BinOp
(unshunt $ BinOp φ₁ (:=>:) (split φ₂))
(:&:)
(unshunt $ BinOp φ₂ (:=>:) (split φ₁))
split ((:~:) (BinOp φ₁ (:&:) φ₂)) =
unshunt $ BinOp φ₁ (:=>:) (split ((:~:) φ₂))
split ((:~:) (BinOp φ₁ (:|:) φ₂)) =
BinOp
(unshunt $ split ((:~:) φ₁))
(:&:)
(unshunt $ BinOp ((:~:) φ₁) (:=>:) (split ((:~:) φ₂)))
split ((:~:) (BinOp φ₁ (:=>:) φ₂)) =
BinOp
(unshunt $ split φ₁)
(:&:)
(unshunt $ BinOp φ₁ (:=>:) (split ((:~:) φ₂)))
split ((:~:) (BinOp φ₁ (:<=>:) φ₂)) =
BinOp
(unshunt $ BinOp φ₁ (:=>:) (split ((:~:) φ₂)))
(:&:)
(unshunt $ BinOp ((:~:) φ₂) (:=>:) (split φ₁))
split ((:~:) ((:~:) φ₁)) = split φ₁
split ((:~:) (PredApp (AtomicWord "$false") [])) = PredApp (AtomicWord "$true") []
split ((:~:) (PredApp (AtomicWord "$true") [])) = PredApp (AtomicWord "$false") []
split fm = fm
proofSplit :: Formula → [Formula] → Int → (Doc, [Formula], Int)
proofSplit _ [] n = (empty, [], n)
proofSplit _ [_] n = (pretty . stdName $ "proof" ++ show n, [], n+1)
proofSplit φ@(BinOp ψ (:&:) γ) gs@(goal : goals) n
| φ == goal = (pretty (stdName ("proof" ++ show n)), goals, n+1)
| otherwise = (docφ, lgoals, ln)
where
(docψ, cgoals, cn) = proofSplit ψ gs n
(docγ, lgoals, ln) = proofSplit γ cgoals cn
docφ ∷ Doc
docφ = parens (pretty "∧-intro" <> line
<> indent 2 (docψ <> line <> docγ))
proofSplit φ gs@(goal : goals) n
| φ == goal = (pretty (stdName $ "proof" ++ show n), goals, n+1)
| otherwise = (empty,gs,n)
inferSplit ∷ Formula → [Formula] → Doc
inferSplit _ [] = pretty '?'
inferSplit _ [_] = pretty "proof₀"
inferSplit _ [_,_] =
parens $ pretty "∧-intro"
<+> pretty "proof₀"
<+> pretty "proof₁"
inferSplit φ sgoals = doc
where
splitted ∷ Formula
splitted = split φ
doc ∷ Doc
(doc, _, _) = proofSplit (split φ) sgoals 0
| jonaprieto/athena | src/Athena/Translation/Rules/Strip.hs | mit | 3,455 | 93 | 14 | 780 | 1,607 | 885 | 722 | 105 | 1 |
module Spec where
main :: IO ()
main = return () | barkmadley/etd-retreat-2014-hteam | testsuite/tests/Spec.hs | mit | 49 | 0 | 6 | 11 | 24 | 13 | 11 | 3 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.SVGStyleElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.SVGStyleElement
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.SVGStyleElement
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/SVGStyleElement.hs | mit | 358 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
-----------------------------------------------------------------------------
--
-- Module : CFrac
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- | Fun continued fractions: given the terms of the continued fraction, produces
-- a list of corresponding fractions.
--
-----------------------------------------------------------------------------
module CFrac (
cf2q
) where
-- a/b = x + y/R -> (a+b*x)/b = y/R -> b/y*(a+b*x) = R
-- a2/b2 = a1/(b1+
cf2q cfs = cf1 0 1 cfs
where
cf1 a b ((x,y):t) = (a,b):(cf1 b (y*(a+b*x)) t)
cf1 a b [] = []
-- 5/12 = 0 + 1/(2 + 1/4)
--
-- putStrLn (show (cf2q [(5,1),(5,1),(5,1),(5,1),(5,1)]))
| equational/JL2012 | HaskellExamples/src/CFrac.hs | mit | 729 | 0 | 14 | 153 | 124 | 76 | 48 | 5 | 2 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, ScopedTypeVariables, Rank2Types, FlexibleContexts, ExistentialQuantification, OverlappingInstances, FunctionalDependencies, UndecidableInstances #-}
module Perl.Sub
( G.getSubContext
, retSub
, Subable (..)
, sub
, subDo
, defSub
, die
) where
import Data.Array.IArray
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
import Foreign.C.String
import Foreign.Ptr
import Perl.Type
import Perl.Monad
import Perl.SV
import Perl.SVArray
import qualified Perl.Internal.MonadGlue as G
retSub :: ToSVArray a => a -> Perl s ToSVArrayObj
retSub = return . ToSVArrayObj
class ToSVArray ret => Subable a ret | a -> ret where
subBody :: [SV] -> a -> Perl s ret
instance ToSVArray ret => Subable (Perl s ret) ret where
subBody _ body = PerlT $ \perl cv ->
unPerlT body perl cv
instance (FromSV a, ToSVArray ret) => Subable ([a] -> Perl s ret) ret where
subBody args lambda = do
a <- mapM fromSV args
subBody undefined (lambda a)
currySub :: (ToSVArray ret, FromSV a, Subable others ret) => [SV] -> (a -> others) -> Perl s ret
currySub args lambda = do
(a, others) <- case args of
[] -> do
a' <- fromSVNon
return (a', [])
(a:as) -> do
a' <- fromSV a
return (a', as)
subBody others (lambda a)
instance Subable others ret => Subable (SV -> others) ret where subBody = currySub
instance Subable others ret => Subable (Int -> others) ret where subBody = currySub
instance Subable others ret => Subable (Double -> others) ret where subBody = currySub
instance Subable others ret => Subable (String -> others) ret where subBody = currySub
instance Subable others ret => Subable (RefSV -> others) ret where subBody = currySub
instance Subable others ret => Subable (RefAV -> others) ret where subBody = currySub
instance Subable others ret => Subable (RefHV -> others) ret where subBody = currySub
instance Subable others ret => Subable (RefCV -> others) ret where subBody = currySub
instance ToSVArray ret => Subable (String -> Perl s ret) ret where subBody = currySub
subCommon :: (ToSVArray ret, Subable a ret) => a -> Perl s ret
subCommon body = do
args <- G.getSubArgs
subBody (elems args) body
sub :: (ToSVArray ret, MonadCatch m, MonadIO m, Subable a ret) => a -> PerlT s m RefCV
sub body = G.makeSub $ subCommon body
subDo :: ToSVArray ret => Perl s ret -> Perl s ret
subDo = id
defSub :: (ToSVArray ret, MonadCatch m, MonadIO m, Subable a ret) => String -> a -> PerlT s m ()
defSub name body = PerlT $ \perl cv -> do
liftIO $ withCString name $ \cName ->
unPerlT (G.defineSub cName $ subCommon body) perl cv
-- | Throwe a Perl exception
die :: (MonadCatch m, MonadIO m) => String -> PerlT s m a
die msg = do
errSV <- toSVMortal msg
throwM $ PerlException msg errSV
| CindyLinz/Haskell-Perl | src/Perl/Sub.hs | mit | 2,867 | 0 | 15 | 565 | 1,079 | 557 | 522 | 67 | 2 |
{-# LANGUAGE TupleSections #-}
-- | Convert the concrete syntax into the syntax of cubical TT.
module Resolver where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.Except
import Control.Monad.Identity
import Data.Maybe
import Data.List
import Data.Map (Map,(!))
import qualified Data.Map as Map
import Exp.Abs
import CTT (Ter,Ident,Loc(..),mkApps,mkWheres)
import qualified CTT
import Connections (negFormula,andFormula,orFormula)
import qualified Connections as C
-- | Useful auxiliary functions
-- Applicative cons
(<:>) :: Applicative f => f a -> f [a] -> f [a]
a <:> b = (:) <$> a <*> b
-- Un-something functions
unVar :: Exp -> Maybe Ident
unVar (Var (AIdent (_,x))) = Just x
unVar _ = Nothing
unWhere :: ExpWhere -> Exp
unWhere (Where e ds) = Let ds e
unWhere (NoWhere e) = e
-- Tail recursive form to transform a sequence of applications
-- App (App (App u v) ...) w into (u, [v, …, w])
-- (cleaner than the previous version of unApps)
unApps :: Exp -> [Exp] -> (Exp, [Exp])
unApps (App u v) ws = unApps u (v : ws)
unApps u ws = (u, ws)
-- Turns an expression of the form App (... (App id1 id2) ... idn)
-- into a list of idents
appsToIdents :: Exp -> Maybe [Ident]
appsToIdents = mapM unVar . uncurry (:) . flip unApps []
-- Transform a sequence of applications
-- (((u v1) .. vn) phi1) .. phim into (u,[v1,..,vn],[phi1,..,phim])
unAppsFormulas :: Exp -> [Formula]-> (Exp,[Exp],[Formula])
unAppsFormulas (AppFormula u phi) phis = unAppsFormulas u (phi:phis)
unAppsFormulas u phis = (x,xs,phis)
where (x,xs) = unApps u []
-- Flatten a tele
flattenTele :: [Tele] -> [(Ident,Exp)]
flattenTele tele =
[ (unAIdent i,typ) | Tele id ids typ <- tele, i <- id:ids ]
-- Flatten a PTele
flattenPTele :: [PTele] -> Resolver [(Ident,Exp)]
flattenPTele [] = return []
flattenPTele (PTele exp typ : xs) = case appsToIdents exp of
Just ids -> do
pt <- flattenPTele xs
return $ map (,typ) ids ++ pt
Nothing -> throwError "malformed ptele"
-------------------------------------------------------------------------------
-- | Resolver and environment
data SymKind = Variable | Constructor | PConstructor | Name
deriving (Eq,Show)
-- local environment for constructors
data Env = Env { envModule :: String,
variables :: [(Ident,SymKind)] }
deriving (Eq,Show)
type Resolver a = ReaderT Env (ExceptT String Identity) a
emptyEnv :: Env
emptyEnv = Env "" []
runResolver :: Resolver a -> Either String a
runResolver x = runIdentity $ runExceptT $ runReaderT x emptyEnv
updateModule :: String -> Env -> Env
updateModule mod e = e{envModule = mod}
insertIdent :: (Ident,SymKind) -> Env -> Env
insertIdent (n,var) e
| n == "_" = e
| otherwise = e{variables = (n,var) : variables e}
insertIdents :: [(Ident,SymKind)] -> Env -> Env
insertIdents = flip $ foldr insertIdent
insertName :: AIdent -> Env -> Env
insertName (AIdent (_,x)) = insertIdent (x,Name)
insertNames :: [AIdent] -> Env -> Env
insertNames = flip $ foldr insertName
insertVar :: Ident -> Env -> Env
insertVar x = insertIdent (x,Variable)
insertVars :: [Ident] -> Env -> Env
insertVars = flip $ foldr insertVar
insertAIdent :: AIdent -> Env -> Env
insertAIdent (AIdent (_,x)) = insertIdent (x,Variable)
insertAIdents :: [AIdent] -> Env -> Env
insertAIdents = flip $ foldr insertAIdent
getLoc :: (Int,Int) -> Resolver Loc
getLoc l = Loc <$> asks envModule <*> pure l
unAIdent :: AIdent -> Ident
unAIdent (AIdent (_,x)) = x
resolveName :: AIdent -> Resolver C.Name
resolveName (AIdent (l,x)) = do
modName <- asks envModule
vars <- asks variables
case lookup x vars of
Just Name -> return $ C.Name x
_ -> throwError $ "Cannot resolve name " ++ x ++ " at position " ++
show l ++ " in module " ++ modName
resolveVar :: AIdent -> Resolver Ter
resolveVar (AIdent (l,x)) = do
modName <- asks envModule
vars <- asks variables
case lookup x vars of
Just Variable -> return $ CTT.Var x
Just Constructor -> return $ CTT.Con x []
Just PConstructor ->
throwError $ "The path constructor " ++ x ++ " is used as a" ++
" variable at " ++ show l ++ " in " ++ modName ++
" (path constructors should have their type in" ++
" curly braces as first argument)"
Just Name ->
throwError $ "Name " ++ x ++ " used as a variable at position " ++
show l ++ " in module " ++ modName
_ -> throwError $ "Cannot resolve variable " ++ x ++ " at position " ++
show l ++ " in module " ++ modName
lam :: (Ident,Exp) -> Resolver Ter -> Resolver Ter
lam (a,t) e = CTT.Lam a <$> resolveExp t <*> local (insertVar a) e
lams :: [(Ident,Exp)] -> Resolver Ter -> Resolver Ter
lams = flip $ foldr lam
plam :: AIdent -> Resolver Ter -> Resolver Ter
plam i e = CTT.PLam (C.Name (unAIdent i)) <$> local (insertName i) e
plams :: [AIdent] -> Resolver Ter -> Resolver Ter
plams [] _ = throwError "Empty plam abstraction"
plams xs e = foldr plam e xs
bind :: (Ter -> Ter) -> (Ident,Exp) -> Resolver Ter -> Resolver Ter
bind f (x,t) e = f <$> lam (x,t) e
binds :: (Ter -> Ter) -> [(Ident,Exp)] -> Resolver Ter -> Resolver Ter
binds f = flip $ foldr $ bind f
resolveApps :: Exp -> [Exp] -> Resolver Ter
resolveApps x xs = mkApps <$> resolveExp x <*> mapM resolveExp xs
resolveExp :: Exp -> Resolver Ter
resolveExp e = case e of
U -> return CTT.U
Var x -> resolveVar x
App t s -> resolveApps x xs
where (x,xs) = unApps t [s]
Sigma ptele b -> do
tele <- flattenPTele ptele
binds CTT.Sigma tele (resolveExp b)
Pi ptele b -> do
tele <- flattenPTele ptele
binds CTT.Pi tele (resolveExp b)
Fun a b -> bind CTT.Pi ("_",a) (resolveExp b)
Lam ptele t -> do
tele <- flattenPTele ptele
lams tele (resolveExp t)
Fst t -> CTT.Fst <$> resolveExp t
Snd t -> CTT.Snd <$> resolveExp t
Pair t0 ts -> do
e <- resolveExp t0
es <- mapM resolveExp ts
return $ foldr1 CTT.Pair (e:es)
Split t brs -> do
t' <- resolveExp t
brs' <- mapM resolveBranch brs
l@(Loc n (i,j)) <- getLoc (case brs of
OBranch (AIdent (l,_)) _ _:_ -> l
PBranch (AIdent (l,_)) _ _ _:_ -> l
_ -> (0,0))
return $ CTT.Split (n ++ "_L" ++ show i ++ "_C" ++ show j) l t' brs'
Let decls e -> do
(rdecls,names) <- resolveDecls decls
mkWheres rdecls <$> local (insertIdents names) (resolveExp e)
PLam is e -> plams is (resolveExp e)
Hole (HoleIdent (l,_)) -> CTT.Hole <$> getLoc l
AppFormula t phi ->
let (x,xs,phis) = unAppsFormulas e []
in case x of
PCon n a ->
CTT.PCon (unAIdent n) <$> resolveExp a <*> mapM resolveExp xs
<*> mapM resolveFormula phis
_ -> CTT.AppFormula <$> resolveExp t <*> resolveFormula phi
PathP a u v -> CTT.PathP <$> resolveExp a <*> resolveExp u <*> resolveExp v
Comp u v ts -> CTT.Comp <$> resolveExp u <*> resolveExp v <*> resolveSystem ts
Fill u v ts -> CTT.Fill <$> resolveExp u <*> resolveExp v <*> resolveSystem ts
Trans u v -> CTT.Comp <$> resolveExp u <*> resolveExp v <*> pure Map.empty
Glue u ts -> CTT.Glue <$> resolveExp u <*> resolveSystem ts
GlueElem u ts -> CTT.GlueElem <$> resolveExp u <*> resolveSystem ts
UnGlueElem u ts -> CTT.UnGlueElem <$> resolveExp u <*> resolveSystem ts
Id a u v -> CTT.Id <$> resolveExp a <*> resolveExp u <*> resolveExp v
IdPair u ts -> CTT.IdPair <$> resolveExp u <*> resolveSystem ts
IdJ a t c d x p -> CTT.IdJ <$> resolveExp a <*> resolveExp t <*> resolveExp c
<*> resolveExp d <*> resolveExp x <*> resolveExp p
_ -> do
modName <- asks envModule
throwError ("Could not resolve " ++ show e ++ " in module " ++ modName)
resolveWhere :: ExpWhere -> Resolver Ter
resolveWhere = resolveExp . unWhere
resolveSystem :: System -> Resolver (C.System Ter)
resolveSystem (System ts) = do
ts' <- sequence [ (,) <$> resolveFace alpha <*> resolveExp u
| Side alpha u <- ts ]
let alphas = map fst ts'
unless (nub alphas == alphas) $
throwError $ "system contains same face multiple times: " ++
C.showListSystem ts'
-- Note: the symbols in alpha are in scope in u, but they mean 0 or 1
return $ Map.fromList ts'
resolveFace :: [Face] -> Resolver C.Face
resolveFace alpha =
Map.fromList <$> sequence [ (,) <$> resolveName i <*> resolveDir d
| Face i d <- alpha ]
resolveDir :: Dir -> Resolver C.Dir
resolveDir Dir0 = return 0
resolveDir Dir1 = return 1
resolveFormula :: Formula -> Resolver C.Formula
resolveFormula (Dir d) = C.Dir <$> resolveDir d
resolveFormula (Atom i) = C.Atom <$> resolveName i
resolveFormula (Neg phi) = negFormula <$> resolveFormula phi
resolveFormula (Conj phi _ psi) =
andFormula <$> resolveFormula phi <*> resolveFormula psi
resolveFormula (Disj phi psi) =
orFormula <$> resolveFormula phi <*> resolveFormula psi
resolveBranch :: Branch -> Resolver CTT.Branch
resolveBranch (OBranch (AIdent (_,lbl)) args e) = do
re <- local (insertAIdents args) $ resolveWhere e
return $ CTT.OBranch lbl (map unAIdent args) re
resolveBranch (PBranch (AIdent (_,lbl)) args is e) = do
re <- local (insertNames is . insertAIdents args) $ resolveWhere e
let names = map (C.Name . unAIdent) is
return $ CTT.PBranch lbl (map unAIdent args) names re
resolveTele :: [(Ident,Exp)] -> Resolver CTT.Tele
resolveTele [] = return []
resolveTele ((i,d):t) =
((i,) <$> resolveExp d) <:> local (insertVar i) (resolveTele t)
resolveLabel :: [(Ident,SymKind)] -> Label -> Resolver CTT.Label
resolveLabel _ (OLabel n vdecl) =
CTT.OLabel (unAIdent n) <$> resolveTele (flattenTele vdecl)
resolveLabel cs (PLabel n vdecl is sys) = do
let tele' = flattenTele vdecl
ts = map fst tele'
names = map (C.Name . unAIdent) is
n' = unAIdent n
cs' = delete (n',PConstructor) cs
CTT.PLabel n' <$> resolveTele tele' <*> pure names
<*> local (insertNames is . insertIdents cs' . insertVars ts)
(resolveSystem sys)
-- Resolve a non-mutual declaration; returns resolver for type and
-- body separately
resolveNonMutualDecl :: Decl -> (Ident,Resolver CTT.Ter
,Resolver CTT.Ter,[(Ident,SymKind)])
resolveNonMutualDecl d = case d of
DeclDef (AIdent (_,f)) tele t body ->
let tele' = flattenTele tele
a = binds CTT.Pi tele' (resolveExp t)
d = lams tele' (local (insertVar f) $ resolveWhere body)
in (f,a,d,[(f,Variable)])
DeclData x tele sums -> resolveDeclData x tele sums null
DeclHData x tele sums ->
resolveDeclData x tele sums (const False) -- always pick HSum
DeclSplit (AIdent (l,f)) tele t brs ->
let tele' = flattenTele tele
vars = map fst tele'
a = binds CTT.Pi tele' (resolveExp t)
d = do
loc <- getLoc l
ty <- local (insertVars vars) $ resolveExp t
brs' <- local (insertVars (f:vars)) (mapM resolveBranch brs)
lams tele' (return $ CTT.Split f loc ty brs')
in (f,a,d,[(f,Variable)])
DeclUndef (AIdent (l,f)) tele t ->
let tele' = flattenTele tele
a = binds CTT.Pi tele' (resolveExp t)
d = CTT.Undef <$> getLoc l <*> a
in (f,a,d,[(f,Variable)])
-- Helper function to resolve data declarations. The predicate p is
-- used to decide if we should use Sum or HSum.
resolveDeclData :: AIdent -> [Tele] -> [Label] -> ([(Ident,SymKind)] -> Bool) ->
(Ident, Resolver Ter, Resolver Ter, [(Ident, SymKind)])
resolveDeclData (AIdent (l,f)) tele sums p =
let tele' = flattenTele tele
a = binds CTT.Pi tele' (return CTT.U)
cs = [ (unAIdent lbl,Constructor) | OLabel lbl _ <- sums ]
pcs = [ (unAIdent lbl,PConstructor) | PLabel lbl _ _ _ <- sums ]
sum = if p pcs then CTT.Sum else CTT.HSum
d = lams tele' $ local (insertVar f) $
sum <$> getLoc l <*> pure f
<*> mapM (resolveLabel (cs ++ pcs)) sums
in (f,a,d,(f,Variable):cs ++ pcs)
resolveRTele :: [Ident] -> [Resolver CTT.Ter] -> Resolver CTT.Tele
resolveRTele [] _ = return []
resolveRTele (i:is) (t:ts) = do
a <- t
as <- local (insertVar i) (resolveRTele is ts)
return ((i,a):as)
-- Best effort to find the location of a declaration. This implementation
-- returns the location of the first identifier it contains.
findDeclLoc :: Decl -> Resolver Loc
findDeclLoc d = getLoc loc
where loc = fromMaybe (-1, 0) $ mloc d
mloc d = case d of
DeclDef (AIdent (l, _)) _ _ _ -> Just l
DeclData (AIdent (l, _)) _ _ -> Just l
DeclHData (AIdent (l, _)) _ _ -> Just l
DeclSplit (AIdent (l, _)) _ _ _ -> Just l
DeclUndef (AIdent (l, _)) _ _ -> Just l
DeclMutual ds -> listToMaybe $ mapMaybe mloc ds
DeclOpaque (AIdent (l, _)) -> Just l
DeclTransparent (AIdent (l, _)) -> Just l
DeclTransparentAll -> Nothing
-- Resolve a declaration
resolveDecl :: Decl -> Resolver (CTT.Decls,[(Ident,SymKind)])
resolveDecl d = case d of
DeclMutual decls -> do
let (fs,ts,bs,nss) = unzip4 $ map resolveNonMutualDecl decls
ns = concat nss -- TODO: some sanity checks? Duplicates!?
when (nub (map fst ns) /= concatMap (map fst) nss) $
throwError ("Duplicated constructor or ident: " ++ show nss)
as <- resolveRTele fs ts
-- The bodies know about all the names and constructors in the
-- mutual block
ds <- sequence $ map (local (insertIdents ns)) bs
let ads = zipWith (\ (x,y) z -> (x,(y,z))) as ds
l <- findDeclLoc d
return (CTT.MutualDecls l ads,ns)
DeclOpaque i -> do
resolveVar i
return (CTT.OpaqueDecl (unAIdent i), [])
DeclTransparent i -> do
resolveVar i
return (CTT.TransparentDecl (unAIdent i), [])
DeclTransparentAll -> return (CTT.TransparentAllDecl, [])
_ -> do let (f,typ,body,ns) = resolveNonMutualDecl d
l <- findDeclLoc d
a <- typ
d <- body
return (CTT.MutualDecls l [(f,(a,d))],ns)
resolveDecls :: [Decl] -> Resolver ([CTT.Decls],[(Ident,SymKind)])
resolveDecls [] = return ([],[])
resolveDecls (d:ds) = do
(rtd,names) <- resolveDecl d
(rds,names') <- local (insertIdents names) $ resolveDecls ds
return (rtd : rds, names' ++ names)
resolveModule :: Module -> Resolver ([CTT.Decls],[(Ident,SymKind)])
resolveModule (Module (AIdent (_,n)) _ decls) =
local (updateModule n) $ resolveDecls decls
resolveModules :: [Module] -> Resolver ([CTT.Decls],[(Ident,SymKind)])
resolveModules [] = return ([],[])
resolveModules (mod:mods) = do
(rmod, names) <- resolveModule mod
(rmods,names') <- local (insertIdents names) $ resolveModules mods
return (rmod ++ rmods, names' ++ names)
| linuborj/cubicaltt | Resolver.hs | mit | 15,188 | 0 | 20 | 3,931 | 6,002 | 3,030 | 2,972 | 319 | 29 |
module Network.BitFunctor.Tests where
import Network.BitFunctor.Account (AccountId)
import Network.BitFunctor.Block (Block)
import Network.BitFunctor.Block.Arbitrary ()
import Network.BitFunctor.Crypto.Hash (Hash, Id)
import Network.BitFunctor.Crypto.Types (PublicKey, Signature)
import Network.BitFunctor.Transaction (Transaction)
import Network.BitFunctor.Transaction.Arbitrary()
import Data.Binary (Binary, encode, decode)
import Test.QuickCheck
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (testCase)
import Test.Tasty.QuickCheck (testProperty)
import Test.HUnit (Assertion, (@?=))
tests :: TestTree
tests = testGroup "Network.BitFunctor.Tests"
[ testCase "true" $
True @?= True
, testProperty "binary_decode(binary_encode(pubkey)) == pubkey" (prop_binary_encdec_inv :: PublicKey -> Bool)
, testProperty "binary_decode(binary_encode(sig)) == sig" (prop_binary_encdec_inv :: Signature -> Bool)
, testProperty "binary_decode(binary_encode(accountid)) == accountid" (prop_binary_encdec_inv :: AccountId -> Bool)
, testProperty "binary_decode(binary_encode(hash_id)) == hash_id" (prop_binary_encdec_inv :: Hash Id -> Bool)
, testProperty "binary_decode(binary_encode(tx)) == tx" (prop_binary_encdec_inv :: Transaction -> Bool)
, testProperty "binary_decode(binary_encode(block)) == block" (prop_binary_encdec_inv :: Block -> Bool)
, testProperty "h1 == h2 => binary_encode(h1) == binary_encode(h2)" (prop_binary_eq_over_enc :: Hash Id -> Hash Id -> Bool)
]
prop_binary_encdec_inv :: (Binary a, Eq a) => a -> Bool
prop_binary_encdec_inv obj = (decode . encode) obj == obj
prop_binary_eq_over_enc :: (Binary a, Eq a) => a -> a -> Bool
prop_binary_eq_over_enc o1 o2 = (o1 == o2) <= (encode o1 == encode o2)
| BitFunctor/bitfunctor | test/src/Network/BitFunctor/Tests.hs | mit | 1,755 | 0 | 11 | 216 | 437 | 251 | 186 | 29 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Todo.Cli (run) where
import Data.List (sortBy, isInfixOf)
import Data.Char (toLower, isUpper)
import Data.Ord (comparing)
import System.IO
import qualified System.IO.Strict as S
import Options.Applicative
import Todo
type TaskArg = Task
type RepoReader = (String -> [Task]) -> IO [Task]
type RepoWriter = [Task] -> IO ()
data Command = CmdAdd TaskArg RepoReader RepoWriter
| CmdList RepoReader
| CmdQuery RepoReader [QueryArg] (Maybe SortArg)
data QueryArg = QueryTodo -- [-C | --no-complete] means not complete
| QueryDone -- [-c | --complete] means complete
| QueryMinPriority (Maybe Priority) -- [-m MIN | --min=MIN] no priority is lowest priority
| QueryMaxPriority (Maybe Priority) -- [-M MAX | --max=MAX] no priority is lowest priority
| QueryContentLike Content -- [ --like=TEXT] find tasks with TEXT in their content
deriving Show
data SortArg = SortDateAsc -- [ --sort-date-asc ]
| SortDateDesc -- [ --sort-date-desc ]
| SortPriorityAsc -- [ --sort-priority-asc ]
| SortPriorityDesc -- [ --sort-priority-desc ]
deriving Show
run :: IO ()
run = do
cmd <- execParser $ info (helper <*> parseCommand) idm
case cmd of
CmdList r -> putStr . showTasks =<< r readTasks
CmdAdd t r w -> do
oldRepo <- r readTasks
w (t : oldRepo)
CmdQuery r qs s -> do
allTasks <- r readTasks
putStr . showTasks . sort s . foldr query allTasks $ qs
repoFileReader :: FilePath -> RepoReader
repoFileReader f h = withFile f ReadMode (fmap h . S.hGetContents)
repoStdin :: RepoReader
repoStdin h = h <$> S.getContents
repoFileWriter :: FilePath -> RepoWriter
repoFileWriter f ts = withFile f WriteMode $ flip hPutStr (showTasks ts)
repoStdout :: RepoWriter
repoStdout = putStr . showTasks
query :: QueryArg -> [Task] -> [Task]
query q = filter $ case q of
QueryTodo -> not . isComplete
QueryDone -> isComplete
QueryMinPriority (Just m) -> minPriority m
QueryMinPriority _ -> const True
QueryMaxPriority (Just m) -> maxPriority m
QueryMaxPriority _ -> const True
QueryContentLike s -> isInfixOf s' . lower . content where
s' = lower s
lower = map toLower
-- let's avoid universal pattern matching so if we add
-- a new QueryArg constructor option the compiler can
-- warn us about the non-exhaustive pattern matching
sort :: Maybe SortArg -> [Task] -> [Task]
sort (Just s) = sortBy $ case s of
SortDateAsc -> asc maybeDate
SortDateDesc -> desc maybeDate
SortPriorityAsc -> asc priority
SortPriorityDesc -> desc priority
where
asc :: Ord b => (a -> b) -> a -> a -> Ordering
asc = comparing
desc :: Ord b => (a -> b) -> a -> a -> Ordering
desc f = flip $ comparing f
sort _ = id
parseCommand :: Parser Command
parseCommand = subparser $ cmdAdd <> cmdList <> cmdQuery
cmdAdd :: Mod CommandFields Command
cmdAdd = command "add" $ info (CmdAdd <$> fmap readTask taskInput <*> repoReaderArg <*> repoWriterArg)
(briefDesc <> progDesc "Add TASK to REPO")
where
taskInput = strArgument $ metavar "TASK"
cmdList :: Mod CommandFields Command
cmdList = command "list" $ info (CmdList <$> repoReaderArg)
(briefDesc <> progDesc "List all tasks from REPO (or stdin if missing)")
repoReaderArg :: Parser RepoReader
repoReaderArg = pathArg <|> pure repoStdin
where
pathArg = repoFileReader <$> strOption (short 'i' <> long "input" <> metavar "REPO" <> help "tasks source")
repoWriterArg :: Parser RepoWriter
repoWriterArg = pathArg <|> pure repoStdout
where
pathArg = repoFileWriter <$> strOption (short 'o' <> long "output" <> metavar "REPO" <> help "tasks destination")
cmdQuery :: Mod CommandFields Command
cmdQuery = command "query" $ info (CmdQuery <$> repoReaderArg <*> queryArgs <*> sortArg)
(briefDesc <> progDesc "Query REPO according to specified filters")
queryArgs :: Parser [QueryArg]
queryArgs = many $
(qDone <|> qTodo)
<|> qMinPriority
<|> qMaxPriority
<|> qContentLike
where
qDone = flag' QueryDone (short 'c' <> long "complete" <> help "Complete tasks only")
qTodo = flag' QueryTodo (short 'C' <> long "no-complete" <> help "Incomplete tasks only")
qMinPriority = QueryMinPriority <$> bindPriority <$> priorityOption (short 'm' <> long "min-priority" <> metavar "MIN" <> help "tasks with priority at least MIN")
qMaxPriority = QueryMaxPriority <$> bindPriority <$> priorityOption (short 'M' <> long "max-priority" <> metavar "MAX" <> help "tasks with priority at most MAX")
qContentLike = QueryContentLike <$> strOption (short 'l' <> long "like" <> metavar "TEXT" <> help "task with content like TEXT")
bindPriority :: Maybe Char -> Maybe Priority
bindPriority mc = mc >>= mkPriority
priorityOption :: Mod OptionFields (Maybe Char) -> Parser (Maybe Char)
priorityOption = option char where
char [c] | isUpper c = return $ Just c
char _ = fail "Invalid option (single uppercase character required)"
sortArg :: Parser (Maybe SortArg)
sortArg = optional $ dateAsc <|> dateDesc <|> priorityAsc <|> priorityDesc
where
dateAsc = build SortDateAsc "date" "asc"
dateDesc = build SortDateDesc "date" "desc"
priorityAsc = build SortPriorityAsc "priority" "asc"
priorityDesc = build SortPriorityDesc "priority" "desc"
build constr param order = flag' constr (long ("sort-" ++ param ++ "-" ++ order) <> help ("Sort tasks by " ++ param ++ " in " ++ order ++ "ending order"))
| nadirs/todohs | src/cli/Todo/Cli.hs | mit | 5,708 | 0 | 15 | 1,359 | 1,602 | 809 | 793 | 109 | 7 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Concurrent
import Control.Monad
import System.IO
import System.Exit
import System.Environment
import Data.Time.Clock
import System.ZMQ4.Monadic
import qualified Data.ByteString as SB
import Text.Printf
main :: IO ()
main = do
args <- getArgs
when (length args /= 3) $ do
hPutStrLn stderr usage
exitFailure
let bindTo = args !! 0
size = read $ args !! 1
count = read $ args !! 2
runZMQ $ do
s <- socket Sub
subscribe s ""
bind s bindTo
receive' s size
start <- liftIO $ getCurrentTime
loop s count size
end <- liftIO $ getCurrentTime
liftIO $ printStat start end size count
where
receive' s sz = do
msg <- receive s
when (SB.length msg /= sz) $
error "message of incorrect size received"
loop s c sz = unless (c < 0) $ do
receive' s sz
loop s (c - 1) sz
printStat :: UTCTime -> UTCTime -> Int -> Int -> IO ()
printStat start end size count = do
let elapsed = fromRational . toRational $ diffUTCTime end start :: Double
through = fromIntegral count / elapsed
mbits = (through * fromIntegral size * 8) / 1000000
printf "message size: %d [B]\n" size
printf "message count: %d\n" count
printf "mean throughput: %.3f [msg/s]\n" through
printf "mean throughput: %.3f [Mb/s]\n" mbits
usage :: String
usage = "usage: local_thr <bind-to> <message-size> <message-count>"
| twittner/zeromq-haskell | examples/perf/local_thr.hs | mit | 1,558 | 1 | 14 | 473 | 483 | 228 | 255 | 46 | 1 |
module Git.Repository where
import Foreign.ForeignPtr hiding (newForeignPtr)
import Foreign.Concurrent
import Foreign.Marshal.Alloc
import Foreign.C.String
import Foreign.Storable
import Bindings.Libgit2
import Control.Monad
import Git.Result
data Repository = Repository { repository_ptr::ForeignPtr C'git_repository }
open::String -> Result Repository
open path = alloca $ \ptr_ptr -> withCString path $ \c'path ->
c'git_repository_open ptr_ptr c'path `handle_git_return` wrap ptr_ptr
where
wrap ptr_ptr = do
repo_ptr <- peek ptr_ptr
liftM Repository $ repo_ptr `newForeignPtr` c'git_repository_free repo_ptr | sakari/hgit | src/Git/Repository.hs | gpl-2.0 | 641 | 0 | 11 | 97 | 166 | 90 | 76 | 16 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.TextEditor.GtkSourceView
-- Copyright : 2007-2013 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.TextEditor.GtkSourceView (
TextEditor(..)
, EditorBuffer(..)
, EditorView(..)
, EditorIter(..)
, EditorMark(..)
, EditorTag(..)
, EditorTagTable(..)
, GtkSourceView(..)
, newGtkBuffer
) where
import IDE.TextEditor.Class (TextEditor(..))
import IDE.Core.Types (colorHexString, Prefs(..), IDE(..), IDEM)
import Control.Monad.IO.Class (MonadIO(..))
import Graphics.UI.Gtk
(castToWidget, textViewGetIterAtLocation, focusOutEvent,
focusInEvent, modifiedChanged, textTagUnderline, textTagBackground,
textTagTableLookup, textTagTableAdd, textTagNew, textIterSetOffset,
textIterSetLineOffset, textIterSetLine, textIterGetBuffer,
textIterStartsWord, textIterStartsLine, textIterIsEnd,
textIterIsStart, textIterGetOffset, textIterGetLine,
textIterGetCharsInLine, textIterGetChar, textIterForwardWordEnd,
textIterForwardToLineEnd, textIterForwardSearch,
textIterForwardFindChar, textIterForwardChars, textIterForwardChar,
textIterEndsWord, textIterBackwardChars, textIterGetLineOffset,
textIterBackwardWordStart, textIterBackwardFindChar,
textIterBackwardChar, populatePopup, eventModifier,
keyReleaseEvent, leaveNotifyEvent, motionNotifyEvent,
keyPressEvent, focusOutEvent, textIterEqual, idleAdd,
bufferInsertText, buttonReleaseEvent, buttonPressEvent,
toggleOverwrite, endUserAction, widgetAddEvents, moveCursor,
scrolledWindowSetPolicy, textViewScrollToIter,
textViewScrollToMark, widgetGrabFocus, widgetGetParent,
castToScrolledWindow, textViewGetOverwrite,
textViewGetIterLocation, textViewBuffer,
textViewBufferToWindowCoords, textBufferSetModified,
textBufferSelectRange, textBufferRemoveTagByName,
textBufferPlaceCursor, textBufferPasteClipboard, widgetModifyFont,
containerAdd, scrolledWindowNew, textViewSetWrapMode,
textBufferMoveMark, textBufferInsert, textBufferHasSelection,
textBufferGetText, textBufferGetTagTable, textBufferGetStartIter,
textBufferGetSlice, textBufferGetSelectionBounds,
textBufferGetSelectionBound, textBufferGetModified,
textBufferGetLineCount, textBufferGetIterAtOffset,
textBufferGetIterAtMark, textBufferGetIterAtLine,
textBufferGetInsert, textBufferGetEndIter, textBufferEndUserAction,
textBufferDeleteSelection, textBufferDelete,
textBufferCutClipboard, textBufferCreateMark,
textBufferCopyClipboard, textBufferBeginUserAction,
textBufferApplyTagByName, TextTag, TextTagTable, TextMark,
textBufferSetText, textIterCopy, TextIter, Modifier(..),
FontDescription, fontDescriptionFromString, fontDescriptionNew,
fontDescriptionSetFamily, EventMask(..), after,
#ifdef MIN_VERSION_gtk3
widgetGetWindow
#else
widgetGetDrawWindow
#endif
)
import Data.Typeable (Typeable)
import Control.Applicative ((<$>))
import Graphics.UI.Gtk.SourceView
(sourceBufferCreateSourceMark, SourceMark,
sourceViewSetShowLineMarks,
sourceStyleSchemeManagerAppendSearchPath, sourceViewSetTabWidth,
sourceViewSetShowLineNumbers, sourceViewSetRightMarginPosition,
sourceViewSetShowRightMargin, sourceViewSetIndentWidth,
castToSourceBuffer, sourceViewSetDrawSpaces, sourceBufferUndo,
sourceBufferSetStyleScheme, sourceStyleSchemeManagerGetScheme,
sourceStyleSchemeManagerGetSchemeIds, sourceStyleSchemeManagerNew,
sourceBufferRedo, sourceViewSetSmartHomeEnd,
sourceViewSetAutoIndent, sourceViewSetIndentOnTab,
sourceViewSetInsertSpacesInsteadOfTabs,
sourceViewSetHighlightCurrentLine, sourceViewNewWithBuffer,
sourceBufferGetCanUndo, sourceBufferGetCanRedo, SourceView,
SourceBuffer, sourceBufferEndNotUndoableAction,
sourceBufferBeginNotUndoableAction, sourceBufferSetMaxUndoLevels,
sourceBufferNew, sourceBufferNewWithLanguage,
sourceLanguageManagerGuessLanguage,
sourceLanguageManagerSetSearchPath,
sourceLanguageManagerGetSearchPath, sourceLanguageManagerNew)
import System.FilePath ((</>))
import System.GIO (contentTypeGuess)
import IDE.Core.State (onIDE, reflectIDE, readIDE, getDataDir)
import Graphics.UI.Gtk.SourceView.Enums
(SourceDrawSpacesFlags(..), SourceSmartHomeEndType(..))
import Graphics.UI.Gtk.General.Enums
(PolicyType(..), TextWindowType(..), WrapMode(..))
import Control.Monad (when, forM_)
import Control.Monad.Reader.Class (MonadReader(..))
import Graphics.UI.Editor.Basics (Connection(..))
import Data.Maybe (maybeToList, fromJust)
import Data.IORef (writeIORef, readIORef, newIORef)
import System.Glib.MainLoop (priorityDefault, idleRemove)
import Data.Char (isSymbol, isAlphaNum)
import System.Glib.Signals (after, on)
import Control.Monad.Trans.Class (MonadTrans(..))
import System.Glib.Attributes (get, AttrOp(..), set)
import qualified Graphics.UI.Gtk as Gtk (endUserAction)
import IDE.Utils.GUIUtils (fontDescription)
import Data.Text (Text)
import qualified Data.Text as T (all, length, pack)
import Data.Monoid ((<>))
import Graphics.UI.Gtk.Multiline.TextBuffer
(textBufferDeleteMark, textBufferGetMark)
import Graphics.UI.Gtk.SourceView.SourceMarkAttributes
(queryTooltipText, sourceMarkAttributesSetIconName,
sourceMarkAttributesNew)
import Graphics.UI.Gtk.SourceView.SourceView
(sourceViewSetMarkAttributes)
import Graphics.UI.Gtk.Multiline.TextMark (toTextMark)
transformGtkIter :: EditorIter GtkSourceView -> (TextIter -> IO a) -> IDEM (EditorIter GtkSourceView)
transformGtkIter (GtkIter i) f = do
new <- liftIO $ textIterCopy i
liftIO $ f new
return (GtkIter new)
transformGtkIterMaybe :: EditorIter GtkSourceView -> (TextIter -> IO Bool) -> IDEM (Maybe (EditorIter GtkSourceView))
transformGtkIterMaybe (GtkIter i) f = do
new <- liftIO $ textIterCopy i
found <- liftIO $ f new
return $ if found
then Just (GtkIter new)
else Nothing
data GtkSourceView = GtkSourceView deriving( Typeable, Show )
newGtkBuffer :: Maybe FilePath -> Text -> IDEM (EditorBuffer GtkSourceView)
newGtkBuffer mbFilename contents = liftIO $ do
lm <- sourceLanguageManagerNew
dataDir <- getDataDir
oldPath <- sourceLanguageManagerGetSearchPath lm
sourceLanguageManagerSetSearchPath lm (Just $ (dataDir </> "language-specs") : oldPath)
mbLang <- case mbFilename of
Just filename -> do
guess <- contentTypeGuess filename contents (T.length contents)
sourceLanguageManagerGuessLanguage lm (Just filename) $
case guess of
(True, _) -> Just "text/x-haskell"
(False, t) -> Just t
Nothing -> sourceLanguageManagerGuessLanguage lm Nothing (Just ("text/x-haskell" :: Text))
mbLang2 <- case mbLang of
Nothing -> sourceLanguageManagerGuessLanguage lm Nothing (Just ("text/x-haskell" :: Text))
_ -> return mbLang
buffer <- case mbLang2 of
Just sLang -> sourceBufferNewWithLanguage sLang
Nothing -> sourceBufferNew Nothing
sourceBufferSetMaxUndoLevels buffer (-1)
sourceBufferBeginNotUndoableAction buffer
textBufferSetText buffer contents
sourceBufferEndNotUndoableAction buffer
return $ GtkBuffer buffer
instance TextEditor GtkSourceView where
data EditorBuffer GtkSourceView = GtkBuffer SourceBuffer
data EditorView GtkSourceView = GtkView SourceView
data EditorMark GtkSourceView = GtkMark TextMark
data EditorIter GtkSourceView = GtkIter TextIter
data EditorTagTable GtkSourceView = GtkTagTable TextTagTable
data EditorTag GtkSourceView = GtkTag TextTag
newBuffer = newGtkBuffer
applyTagByName (GtkBuffer sb) name (GtkIter first) (GtkIter last) = liftIO $
textBufferApplyTagByName sb name first last
beginNotUndoableAction (GtkBuffer sb) = liftIO $ sourceBufferBeginNotUndoableAction sb
beginUserAction (GtkBuffer sb) = liftIO $ textBufferBeginUserAction sb
canRedo (GtkBuffer sb) = liftIO $ sourceBufferGetCanRedo sb
canUndo (GtkBuffer sb) = liftIO $ sourceBufferGetCanUndo sb
copyClipboard (GtkBuffer sb) clipboard = liftIO $ textBufferCopyClipboard sb clipboard
createMark (GtkView sv) name (GtkIter i) icon tooltip = liftIO $ do
attributes <- sourceMarkAttributesNew
sourceMarkAttributesSetIconName attributes (Just icon)
on attributes queryTooltipText $ \ mark -> return tooltip
sourceViewSetMarkAttributes sv name (Just attributes) 1
sb <- castToSourceBuffer <$> get sv textViewBuffer
GtkMark . toTextMark <$> sourceBufferCreateSourceMark sb (Just name) name i
cutClipboard (GtkBuffer sb) clipboard defaultEditable = liftIO $ textBufferCutClipboard sb clipboard defaultEditable
delete (GtkBuffer sb) (GtkIter first) (GtkIter last) = liftIO $
textBufferDelete sb first last
deleteSelection (GtkBuffer sb) = liftIO $
textBufferDeleteSelection sb True True >> return ()
endNotUndoableAction (GtkBuffer sb) = liftIO $ sourceBufferEndNotUndoableAction sb
endUserAction (GtkBuffer sb) = liftIO $ textBufferEndUserAction sb
getEndIter (GtkBuffer sb) = liftIO $ GtkIter <$> textBufferGetEndIter sb
getInsertMark (GtkBuffer sb) = liftIO $ GtkMark <$> textBufferGetInsert sb
getIterAtLine (GtkBuffer sb) line = liftIO $ GtkIter <$> textBufferGetIterAtLine sb line
getIterAtMark (GtkBuffer sb) (GtkMark m) = liftIO $ GtkIter <$> textBufferGetIterAtMark sb m
getIterAtOffset (GtkBuffer sb) offset = liftIO $ GtkIter <$> textBufferGetIterAtOffset sb offset
getLineCount (GtkBuffer sb) = liftIO $ textBufferGetLineCount sb
getModified (GtkBuffer sb) = liftIO $ textBufferGetModified sb
getSelectionBoundMark (GtkBuffer sb) = liftIO $ GtkMark <$> textBufferGetSelectionBound sb
getSelectionBounds (GtkBuffer sb) = liftIO $ (\(a, b) -> (GtkIter a, GtkIter b)) <$>
textBufferGetSelectionBounds sb
getInsertIter (GtkBuffer sb) = liftIO $ GtkIter <$> do
insertMark <- textBufferGetInsert sb
textBufferGetIterAtMark sb insertMark
getSlice (GtkBuffer sb) (GtkIter first) (GtkIter last) includeHidenChars = liftIO $
textBufferGetSlice sb first last includeHidenChars
getStartIter (GtkBuffer sb) = liftIO $ GtkIter <$> textBufferGetStartIter sb
getTagTable (GtkBuffer sb) = liftIO $ GtkTagTable <$> textBufferGetTagTable sb
getText (GtkBuffer sb) (GtkIter first) (GtkIter last) includeHidenChars = liftIO $
textBufferGetText sb first last includeHidenChars
hasSelection (GtkBuffer sb) = liftIO $ textBufferHasSelection sb
insert (GtkBuffer sb) (GtkIter i) text = liftIO $ textBufferInsert sb i text
newView (GtkBuffer sb) mbFontString = do
prefs <- readIDE prefs
fd <- fontDescription mbFontString
liftIO $ GtkView <$> do
sv <- sourceViewNewWithBuffer sb
sourceViewSetHighlightCurrentLine sv True
sourceViewSetInsertSpacesInsteadOfTabs sv True
sourceViewSetIndentOnTab sv True
sourceViewSetAutoIndent sv True
sourceViewSetSmartHomeEnd sv SourceSmartHomeEndBefore
sourceViewSetShowLineMarks sv True
if wrapLines prefs
then textViewSetWrapMode sv WrapWord
else textViewSetWrapMode sv WrapNone
sw <- scrolledWindowNew Nothing Nothing
containerAdd sw sv
widgetModifyFont sv (Just fd)
return sv
pasteClipboard (GtkBuffer sb) clipboard (GtkIter i) defaultEditable = liftIO $
textBufferPasteClipboard sb clipboard i defaultEditable
placeCursor (GtkBuffer sb) (GtkIter i) = liftIO $ textBufferPlaceCursor sb i
redo (GtkBuffer sb) = liftIO $ sourceBufferRedo sb
removeTagByName (GtkBuffer sb) name = liftIO $ do
first <- textBufferGetStartIter sb
last <- textBufferGetEndIter sb
textBufferRemoveTagByName sb name first last
mbMark <- textBufferGetMark sb name
case mbMark of
Just mark -> textBufferDeleteMark sb mark
Nothing -> return ()
selectRange (GtkBuffer sb) (GtkIter first) (GtkIter last) = liftIO $
textBufferSelectRange sb first last
setModified (GtkBuffer sb) modified = liftIO $ textBufferSetModified sb modified >> return ()
setStyle preferDark (GtkBuffer sb) mbStyle = liftIO $ do
case mbStyle of
Nothing -> return ()
Just str -> do
styleManager <- sourceStyleSchemeManagerNew
dataDir <- getDataDir
sourceStyleSchemeManagerAppendSearchPath styleManager $ dataDir </> "data/styles"
ids <- sourceStyleSchemeManagerGetSchemeIds styleManager
let preferedNames = if preferDark then [str<>"-dark", str] else [str]
forM_ (take 1 $ filter (flip elem ids) preferedNames) $ \ name -> do
scheme <- sourceStyleSchemeManagerGetScheme styleManager name
sourceBufferSetStyleScheme sb (Just scheme)
setText (GtkBuffer sb) text = liftIO $ textBufferSetText sb text
undo (GtkBuffer sb) = liftIO $ sourceBufferUndo sb
afterModifiedChanged (GtkBuffer sb) f = do
ideR <- ask
liftIO $ do
id1 <- sb `after` modifiedChanged $ reflectIDE f ideR
return [ConnectC id1]
-- View
bufferToWindowCoords (GtkView sv) point = liftIO $ textViewBufferToWindowCoords sv TextWindowWidget point
drawTabs (GtkView sv) = liftIO $ sourceViewSetDrawSpaces sv [SourceDrawSpacesTab, SourceDrawSpacesSpace, SourceDrawSpacesTrailing]
getBuffer (GtkView sv) = liftIO $ (GtkBuffer . castToSourceBuffer) <$> sv `get` textViewBuffer
#ifdef MIN_VERSION_gtk3
getWindow (GtkView sv) = liftIO $ widgetGetWindow sv
#else
getWindow (GtkView sv) = liftIO $ Just <$> widgetGetDrawWindow sv
#endif
getIterAtLocation (GtkView sv) x y = liftIO $ GtkIter <$> textViewGetIterAtLocation sv x y
getIterLocation (GtkView sv) (GtkIter i) = liftIO $ textViewGetIterLocation sv i
getOverwrite (GtkView sv) = liftIO $ textViewGetOverwrite sv
getScrolledWindow (GtkView sv) = liftIO $ fmap (castToScrolledWindow . fromJust) $ widgetGetParent sv
getEditorWidget (GtkView sv) = return $ castToWidget sv
grabFocus (GtkView sv) = liftIO $ widgetGrabFocus sv
scrollToMark (GtkView sv) (GtkMark m) withMargin mbAlign = liftIO $ textViewScrollToMark sv m withMargin mbAlign
scrollToIter (GtkView sv) (GtkIter i) withMargin mbAlign = liftIO $ textViewScrollToIter sv i withMargin mbAlign >> return ()
setFont (GtkView sv) mbFontString = do
fd <- fontDescription mbFontString
liftIO $ widgetModifyFont sv (Just fd)
setIndentWidth (GtkView sv) width = liftIO $ sourceViewSetIndentWidth sv width
setWrapMode v@(GtkView sv) wrapLines = do
sw <- getScrolledWindow v
if wrapLines
then liftIO $ do
textViewSetWrapMode sv WrapWord
scrolledWindowSetPolicy sw PolicyNever PolicyAutomatic
else liftIO $ do
textViewSetWrapMode sv WrapNone
scrolledWindowSetPolicy sw PolicyAutomatic PolicyAutomatic
setRightMargin (GtkView sv) mbRightMargin = liftIO $ do
case mbRightMargin of
Just n -> do
sourceViewSetShowRightMargin sv True
sourceViewSetRightMarginPosition sv (fromIntegral n)
Nothing -> sourceViewSetShowRightMargin sv False
setShowLineNumbers (GtkView sv) show = liftIO $ sourceViewSetShowLineNumbers sv show
setTabWidth (GtkView sv) width = liftIO $ sourceViewSetTabWidth sv width
-- Events
afterFocusIn (GtkView sv) f = do
ideR <- ask
liftIO $ do
id1 <- sv `after` focusInEvent $ lift $ reflectIDE f ideR >> return False
return [ConnectC id1]
afterMoveCursor v@(GtkView sv) f = do
ideR <- ask
(GtkBuffer sb) <- getBuffer v
liftIO $ do
id1 <- sv `after` moveCursor $ \_ _ _ -> reflectIDE f ideR
sv `widgetAddEvents` [ButtonReleaseMask]
id2 <- sv `on` buttonReleaseEvent $ lift $ reflectIDE f ideR >> return False
id3 <- sb `after` Gtk.endUserAction $ reflectIDE f ideR
return [ConnectC id1, ConnectC id2, ConnectC id3]
afterToggleOverwrite (GtkView sv) f = do
ideR <- ask
liftIO $ do
id1 <- sv `after` toggleOverwrite $ reflectIDE f ideR
return [ConnectC id1]
onButtonPress (GtkView sv) f = do
id1 <- sv `onIDE` buttonPressEvent $ f
return [ConnectC id1]
onButtonRelease (GtkView sv) f = do
id1 <- sv `onIDE` buttonReleaseEvent $ f
return [ConnectC id1]
onCompletion v@(GtkView sv) start cancel = do
ideR <- ask
(GtkBuffer sb) <- getBuffer v
liftIO $ do
-- when multiple afterBufferInsertText are called quickly,
-- we cancel previous idle action which was not processed,
-- its handler is stored here.
-- Paste operation is example of such sequential events (each word!).
lastHandler <- newIORef Nothing
id1 <- after sb bufferInsertText $ \iter text -> do
mapM_ idleRemove =<< maybeToList <$> readIORef lastHandler
writeIORef lastHandler =<< Just <$> do
(flip idleAdd) priorityDefault $ do
let isIdent a = isAlphaNum a || a == '\'' || a == '_' || a == '.'
let isOp a = isSymbol a || a == ':' || a == '\\' || a == '*' || a == '/' || a == '-'
|| a == '!' || a == '@' || a == '%' || a == '&' || a == '?'
if (T.all isIdent text) || (T.all isOp text)
then do
hasSel <- textBufferHasSelection sb
if not hasSel
then do
(iterC, _) <- textBufferGetSelectionBounds sb
atC <- textIterEqual iter iterC
when atC $ reflectIDE start ideR
return False
else do
reflectIDE cancel ideR
return False
else do
reflectIDE cancel ideR
return False
return ()
id2 <- sv `on` moveCursor $ \_ _ _ -> reflectIDE cancel ideR
id3 <- sv `on` buttonPressEvent $ lift $ reflectIDE cancel ideR >> return False
id4 <- sv `on` focusOutEvent $ lift $ reflectIDE cancel ideR >> return False
return [ConnectC id1, ConnectC id2, ConnectC id3, ConnectC id4]
onKeyPress (GtkView sv) f = do
id1 <- sv `onIDE` keyPressEvent $ f
return [ConnectC id1]
onMotionNotify (GtkView sv) f = do
id1 <- sv `onIDE` motionNotifyEvent $ f
return [ConnectC id1]
onLeaveNotify (GtkView sv) f = do
id1 <- sv `onIDE` leaveNotifyEvent $ f
return [ConnectC id1]
onKeyRelease (GtkView sv) f = do
id1 <- sv `onIDE` keyReleaseEvent $ f
return [ConnectC id1]
onLookupInfo (GtkView sv) f = do
liftIO $ sv `widgetAddEvents` [ButtonReleaseMask]
id1 <- sv `onIDE` buttonReleaseEvent $ do
mod <- lift $ eventModifier
case mod of
[Control] -> f >> return True
_ -> return False
return [ConnectC id1]
onMotionNotifyEvent (GtkView sv) handler = do
liftIO $ widgetAddEvents sv [ButtonMotionMask, Button1MotionMask] -- TODO: this doesn't work yet event gets fired anyways: restrict event to being fired when left mouse button is pressed down
id1 <- sv `onIDE` motionNotifyEvent $ handler --TODO this is potentially slowing leksah, a better event (if there was any) could be more efficient here
liftIO $ widgetAddEvents sv [ButtonMotionMask, Button1MotionMask] -- TODO: this doesn't work yet event gets fired anyways: restrict event to being fired when left mouse button is pressed down
return [ConnectC id1]
onPopulatePopup (GtkView sv) f = do
ideR <- ask
liftIO $ do
id1 <- sv `on` populatePopup $ \menu -> reflectIDE (f menu) ideR
return [ConnectC id1]
-- Iter
backwardCharC i = transformGtkIter i textIterBackwardChar
backwardFindCharC i pred mbLimit = transformGtkIterMaybe i $ \x ->
textIterBackwardFindChar x pred $
case mbLimit of
Just (GtkIter limit) -> Just limit
Nothing -> Nothing
backwardWordStartC i = transformGtkIterMaybe i textIterBackwardWordStart
backwardToLineStartC i = transformGtkIter i $ \new -> do
n <- textIterGetLineOffset new
textIterBackwardChars new n
return ()
endsWord (GtkIter i) = liftIO $ textIterEndsWord i
forwardCharC i = transformGtkIter i textIterForwardChar
forwardCharsC i n = transformGtkIter i $ flip textIterForwardChars n
forwardFindCharC i pred mbLimit = transformGtkIterMaybe i $ \x ->
textIterForwardFindChar x pred $
case mbLimit of
Just (GtkIter limit) -> Just limit
Nothing -> Nothing
forwardSearch (GtkIter i) str flags mbLimit = liftIO $ fmap (fmap (\(a, b) -> (GtkIter a, GtkIter b))) $
textIterForwardSearch i str flags $
case mbLimit of
Just (GtkIter limit) -> Just limit
Nothing -> Nothing
forwardToLineEndC i = transformGtkIter i textIterForwardToLineEnd
forwardWordEndC i = transformGtkIterMaybe i textIterForwardWordEnd
getChar (GtkIter i) = liftIO $ textIterGetChar i
getCharsInLine (GtkIter i) = liftIO $ textIterGetCharsInLine i
getLine (GtkIter i) = liftIO $ textIterGetLine i
getLineOffset (GtkIter i) = liftIO $ textIterGetLineOffset i
getOffset (GtkIter i) = liftIO $ textIterGetOffset i
isStart (GtkIter i) = liftIO $ textIterIsStart i
isEnd (GtkIter i) = liftIO $ textIterIsEnd i
iterEqual (GtkIter i1) (GtkIter i2) = liftIO $ textIterEqual i1 i2
startsLine (GtkIter i) = liftIO $ textIterStartsLine i
startsWord (GtkIter i) = liftIO $ textIterStartsWord i
atEnd (GtkIter i) = liftIO $ GtkIter <$> do
buffer <- textIterGetBuffer i
textBufferGetEndIter buffer
atLine i line = transformGtkIter i $ flip textIterSetLine line
atLineOffset i column = transformGtkIter i $ flip textIterSetLineOffset column
atOffset i offset = transformGtkIter i $ flip textIterSetOffset offset
atStart (GtkIter i) = liftIO $ GtkIter <$> do
buffer <- textIterGetBuffer i
textBufferGetEndIter buffer
-- Tag Table
newTag (GtkTagTable tt) name = liftIO $ GtkTag <$> do
t <- textTagNew (Just name)
textTagTableAdd tt t
return t
lookupTag (GtkTagTable tt) name = liftIO $ fmap GtkTag <$> textTagTableLookup tt name
-- Tag
background (GtkTag t) color = liftIO $ set t [textTagBackground := T.pack $ colorHexString color]
underline (GtkTag t) value = liftIO $ set t [textTagUnderline := value]
| cessationoftime/leksah | src/IDE/TextEditor/GtkSourceView.hs | gpl-2.0 | 24,180 | 0 | 46 | 6,073 | 6,214 | 3,148 | 3,066 | 421 | 5 |
-- Copyright : 2012 Eric Kow
-- License : BSD3 (NB: GPL still applies due to GenI 0.20.x)
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
module NLP.GenI.Test.Show where
import NLP.GenI
import NLP.GenI.FeatureStructure
import NLP.GenI.GeniShow
import NLP.GenI.GeniVal
import NLP.GenI.LexicalSelection.Types
import NLP.GenI.Lexicon
import NLP.GenI.Morphology.Types
import NLP.GenI.Polarity.Internal
import NLP.GenI.Polarity.Types
import NLP.GenI.Semantics
import NLP.GenI.TreeSchema
instance Show GeniVal where
show = geniShow
instance Show (AvPair GeniVal) where
show = geniShow
instance Show LexEntry where
show = geniShow
deriving instance Show PathEqLhs
deriving instance Show NodePathEqLhs
deriving instance Show TopBottom
deriving instance Show (GNode SchemaVal)
deriving instance Show (AvPair SchemaVal)
deriving instance Show SchemaVal
deriving instance Show SchemaTree
deriving instance Show MorphOutput
deriving instance Show PolarityDetectionResult
deriving instance Show PolarityKey
deriving instance Show (Literal GeniVal)
| kowey/GenI | geni-test/NLP/GenI/Test/Show.hs | gpl-2.0 | 1,109 | 0 | 7 | 150 | 232 | 134 | 98 | 31 | 0 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module System.Console.RemoteCLI.CommandState (
CommandState (..)
, Synopsis
, Help
, CommandHandlerEntry
, Printout
, MonadicCommandHandler
, PureCommandHandler
, lookupEntry
, lookupHandler
, localCommands
, remoteCommands
, empty
, fromList
) where
import System.Console.RemoteCLI.CommandLine (CommandLine (..)
, Scope (..)
, Value)
import Text.Printf (printf)
import qualified Data.Map.Strict as M
import Control.Monad (mplus)
-- | The "Printout" type for the CLI, i.e. the content that will be
-- displayed by the eval loop
type Printout = [String]
-- | Alias for a string shortly describing the intention for a command
type Synopsis = String
-- | Alias for the help information for the command
type Help = Printout
-- | A map from strings to values
type VariableMap = M.Map String Value
-- | A map from strings to command handlers
type CommandHandlerMap = M.Map String (Synopsis, Help, PureCommandHandler)
-- | A command handler entry as it will look like when dumping the
-- contents of a command handler map
type CommandHandlerEntry = (String, (Synopsis, Help, PureCommandHandler))
-- | The state for the CLI
data CommandState = CommandState VariableMap -- Variables
CommandHandlerMap -- Local commands
CommandHandlerMap -- Remote command
CommandHandlerMap -- Default scope
deriving (Eq, Show)
-- | A monadic function to take care of the 'dirty' aspects of command
-- execution. E.g. network communication
type MonadicCommandHandler = CommandState ->
IO (Either Printout (Printout, CommandState))
-- | A pure function to take care of the 'pure' aspects of command
-- execution. E.g. indata checking, state manipulation and preparation
-- of the monadic handler
type PureCommandHandler = CommandLine -> CommandState ->
Either Printout (Printout
, CommandState
, MonadicCommandHandler)
-- | Type class Eq instance
instance Eq PureCommandHandler where
_ == _ = True
-- | Type class Show instance
instance Show PureCommandHandler where
show _ = "PureCommandHandler"
-- | Lookup a command entry. Will search the command from both scopes
lookupEntry :: String -> CommandState -> Maybe CommandHandlerEntry
lookupEntry cmd (CommandState _ local remote _) =
case M.lookup cmd local `mplus` M.lookup cmd remote of
Just e -> Just (cmd, e)
Nothing -> Nothing
-- | Lookup the pure handler for the given command line and its
-- selected scope
lookupHandler :: CommandLine -> CommandState ->
Either [String] PureCommandHandler
lookupHandler (CommandLine scope cmd _) (CommandState _ local _ deflt) =
case M.lookup cmd (select scope) of
Nothing -> Left [printf "Command \"%s\" not found" cmd]
Just (_, _, handler) -> Right handler
where
select Local = local
select _ = deflt
-- | Dump the contents of the local commands. Sorted in ascending
-- order based on key
localCommands :: CommandState -> [CommandHandlerEntry]
localCommands (CommandState _ l _ _) = M.toAscList l
-- | Dump the contents of the remote commands. Sorted in ascending
-- order based on key
remoteCommands :: CommandState -> [CommandHandlerEntry]
remoteCommands (CommandState _ _ r _) = M.toAscList r
-- | Create the empty state
empty :: CommandState
empty = CommandState M.empty M.empty M.empty M.empty
-- | Create a map from list (ugly kind of module export :-( )
fromList :: [(String, a)] -> M.Map String a
fromList = M.fromList
| SneakingCat/rcli | src/System/Console/RemoteCLI/CommandState.hs | gpl-3.0 | 3,964 | 0 | 10 | 1,131 | 676 | 389 | 287 | 63 | 3 |
--
-- Exotic Model Utility Function
-- by Ian-Woo Kim (iankim@umich.edu)
--
-- created on 2011 Jan 12
--
-- based on arXiv:0911.3237
module HEP.Physics.TTBar.Model.Exotic where
import Prelude hiding (subtract)
import HEP.Util.Functions hiding (beta)
import Numeric.GSL.Integration
import HEP.Physics.TTBar.Model.Mandelstam
data ColorExoticArg = SA {
sa_c0 :: Double,
sa_c2 :: Double,
sa_y :: Double,
sa_mt :: Double,
sa_mphi :: Double
}
{-
data SextetArg = SA {
sa_c0 :: Double,
sa_c2 :: Double,
sa_y :: Double,
sa_mt :: Double,
sa_mphi :: Double
} -}
testSextetArg = SA {
sa_c0 = colorFactor Sextet Zero,
sa_c2 = colorFactor Sextet Two,
sa_y = 1.0,
sa_mt = 174.3,
sa_mphi = 600.0
}
testTripletArg = SA {
sa_c0 = colorFactor Triplet Zero,
sa_c2 = colorFactor Triplet Two,
sa_y = 1.0,
sa_mt = 174.3,
sa_mphi = 600.0
}
data ColorExoticType = NoExotic | Singlet | Octet | Triplet | Sextet
deriving (Show,Eq)
data CoeffType = Zero | Two
data PartCalculation = OnlySM | OnlyNP | OnlyInterfere | All
data ModelParameters = MP {
exoticType :: ColorExoticType
, mTop :: Double
, mPhi :: Double
, gStrong :: Double
, yS :: Double
, yP :: Double
}
colorFactor :: ColorExoticType -> CoeffType -> Double
colorFactor NoExotic Zero = 0.0
colorFactor Singlet Zero = 4.0
colorFactor Octet Zero = -2.0/3.0
colorFactor Triplet Zero = 1.0
colorFactor Sextet Zero = -1.0 -- bug fixed according to 1103.2757
colorFactor NoExotic Two = 0.0
colorFactor Singlet Two = 9.0
colorFactor Octet Two = 2.0
colorFactor Triplet Two = 3.0/4.0
colorFactor Sextet Two = 3.0/2.0
beta mt s = sqrt (1.0 - (4.0*mt^2 / s))
-- currently these results can be applied only to Triplet and Sextets .
sumM2 pcalc c0 c2 gs y s tt ut mt tphi uphi =
let onlysm = (16.0 * gs^(4 :: Int) ) / (s^(2 :: Int)) * ( ut^(2 :: Int) + tt^(2 :: Int) + 2.0*s*mt^(2 :: Int))
onlyinter = 8.0 * c0 * gs^(2 :: Int) * y^(2 :: Int) * (s * mt^(2 :: Int) + ut^(2 :: Int) ) / ( s * uphi )
onlynp = c2 * 4.0 * y^(4 :: Int) * ut^(2 :: Int) / (uphi^(2 :: Int) )
in case pcalc of
OnlySM -> onlysm
OnlyNP -> onlynp
OnlyInterfere -> onlyinter
All -> onlysm + onlynp + onlyinter
dsigma s beta sumM2 = 0.25 * (1.0/9.0)* beta / (32.0*pi*s) * sumM2
sumM2Wcosth pcalc c0 c2 gs y s mt uphi bcosth =
let m = mt / sqrt s
onlysm = (8.0*gs^(4 :: Int))*(1.0 + bcosth^(2 :: Int) + 4*m^(2 :: Int))
onlyinter = 2.0*y^(2 :: Int)*gs^(2 :: Int)*c0*s*((1.0+bcosth)^(2 :: Int) + 4.0*m^(2 :: Int))/uphi
onlynp = y^(4 :: Int)*c2*s^(2 :: Int)*((1.0+bcosth)^(2 :: Int))/(uphi^(2 :: Int))
in case pcalc of
OnlySM -> onlysm
OnlyNP -> onlynp
OnlyInterfere -> onlyinter
All -> onlysm + onlynp + onlyinter
dsigma_dcosth_from_mom :: ModelParameters -> PartCalculation
-> Two2TwoMomConf -> Double
dsigma_dcosth_from_mom param@(MP typ mt mphi gs ys yp) pcalc mc@(TTMC p1 p2 k1 k2) =
let s = mandelstamS mc
t = mandelstamT mc
u = mandelstamU mc
tt = t - mt^(2 :: Int)
tphi = t - mphi^(2 :: Int)
ut = u - mt^(2 :: Int)
uphi = u - mphi^(2 :: Int)
y = sqrt ( ys^(2 :: Int) + yp^(2 :: Int) )
bet = beta mt s
c0 = colorFactor typ Zero
c2 = colorFactor typ Two
summ2 = sumM2 pcalc c0 c2 gs y s tt ut mt tphi uphi
in dsigma s bet summ2
-- | Calculate total cross section from a given cm energy
sigma_from_initial_mom :: ModelParameters
-> PartCalculation
-> Double -- ^ Center-of-Mass energy (not individual energy !)
-> Double -- ^ total cross section
sigma_from_initial_mom param@(MP typ mt mphi gs ys yp) pcalc cmenergy =
let energy = cmenergy / 2.0
pfin = sqrt ( energy^(2 :: Int) - mt^(2 :: Int) )
p1 = (energy, 0, 0, energy)
p2 = (energy, 0, 0, -energy)
ds_dcth costh = let sinth = sqrt ( 1.0 - costh^(2 :: Int) )
k1 = (energy, pfin*sinth , 0, pfin*costh)
k2 = (energy, -pfin*sinth, 0, -pfin*costh )
in dsigma_dcosth_from_mom param pcalc (TTMC p1 p2 k1 k2)
in fst $ integrateQAGS 1e-6 1000 (ds_dcth) (-1.0) (1.0)
modelParameterFrom09113237 :: ColorExoticType
-> Double -- ^ mass of phi
-> ModelParameters
modelParameterFrom09113237 typ mphi = MP {
exoticType = typ
, mTop = 174.3
, mPhi = mphi
, gStrong = alphaStoGS 0.118
, yS = yvalue Sextet mphi
, yP = 0.0
}
yvalue typ mphi | typ == Triplet = mphi / 228.57 + 1.8125
| typ == Sextet = mphi / 257.0 + 1.28
| otherwise = error "don't know what to do"
{-
modelParameterFrom09113237 typ mphi = MP {
exoticType = typ
, mTop = 174.3
, mPhi = mphi
, gStrong = alphaStoGS 0.118
, yS = yvalue Sextet mphi
, yP = 0.0
} -}
totalXSec_qq_exotic :: ColorExoticArg -> PartCalculation
-> (Double,Double,Double) -> Double
totalXSec_qq_exotic (SA c0 c2 y mt mphi) pcalc (alphaS, s, costh) =
if s > 4.0*mt^(2::Int)
then let gs = sqrt (4.0*pi*alphaS)
bet = beta mt s
bcosth = bet*costh
ut = -s*(1+bcosth)/2
uphi = ut+mt^(2::Int)-mphi^(2::Int)
summ2 = sumM2Wcosth pcalc c0 c2 gs y s mt uphi bcosth
in dsigma s bet summ2
else 0.0
| wavewave/ttbar | lib/HEP/Physics/TTBar/Model/Exotic.hs | gpl-3.0 | 5,759 | 0 | 18 | 1,883 | 1,990 | 1,105 | 885 | 121 | 4 |
module Hython.AttributeDict
where
import Control.Monad.IO.Class (MonadIO)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.Text (Text)
import Hython.Ref
type AttributeDict obj = HashMap Text (Ref obj)
empty :: AttributeDict obj
empty = HashMap.empty
insertRef :: Text -> Ref obj -> AttributeDict obj -> AttributeDict obj
insertRef = HashMap.insert
lookup :: (MonadIO m) => Text -> AttributeDict obj -> m (Maybe obj)
lookup attr dict = case lookupRef attr dict of
Just ref -> Just <$> readRef ref
Nothing -> return Nothing
lookupRef :: Text -> AttributeDict obj -> Maybe (Ref obj)
lookupRef = HashMap.lookup
fromList :: [(Text, Ref obj)] -> AttributeDict obj
fromList = HashMap.fromList
new :: AttributeDict obj
new = HashMap.empty
set :: (MonadIO m) => Text -> obj -> AttributeDict obj -> m (AttributeDict obj)
set attr obj dict = case HashMap.lookup attr dict of
Just ref -> do
writeRef ref obj
return dict
Nothing -> do
ref <- newRef obj
return $ HashMap.insert attr ref dict
| mattgreen/hython | src/Hython/AttributeDict.hs | gpl-3.0 | 1,083 | 0 | 12 | 217 | 391 | 200 | 191 | 29 | 2 |
module Crepuscolo.Recognizer.Ruby
( recognizer
) where
import System.FilePath (takeExtension)
import qualified Crepuscolo.Recognize.DSL as DSL
data Recognizer = Ruby deriving (Show)
instance DSL.Recognizer Recognizer where
recognize Ruby path =
return (DSL.recognizePath Ruby path)
recognizePath Ruby path =
case takeExtension path of
".rb" -> Just "ruby"
_ -> Nothing
recognizeContent Ruby string =
undefined
recognizer :: DSL.Recognizable
recognizer = DSL.recognizable Ruby
| meh/crepuscolo | src/Crepuscolo/Recognizer/Ruby.hs | gpl-3.0 | 556 | 0 | 9 | 139 | 137 | 74 | 63 | 16 | 1 |
{- |
Module : $Header$
Description : Module to deal with generic modular arithmetic functions
Copyright : (c) Michal Parusinski
License : GPLv3
Maintainer : mparusinski@gmail.com
Stability : experimental
Portability : portable
<module description starting at first column>
-}
module ModularArithmetic.Standard where
euclideanDivision :: (Integral a) => a -> a -> (a,a)
euclideanDivision = divMod
removePowersOfTwo 0 = (0, 0)
removePowersOfTwo n
= removePowersOfTwoAccum n 0
removePowersOfTwoAccum n accum
| rem == 1 = (n, accum)
| otherwise = removePowersOfTwoAccum quot (accum + 1)
where (quot, rem) = euclideanDivision n 2
| mparusinski/Haskell-number-theory-library | ModularArithmetic/Standard.hs | gpl-3.0 | 670 | 0 | 8 | 136 | 133 | 71 | 62 | 10 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE OverloadedStrings #-}
-- Copyright (C) 2009-2012 John Millikin <john@john-millikin.com>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
-- | D-Bus clients are an abstraction over the lower-level messaging
-- system. When combined with an external daemon called the \"bus\", clients
-- can perform remote procedure calls to other clients on the bus.
--
-- Clients may also listen for or emit /signals/, which are asynchronous
-- broadcast notifications.
--
-- Example: connect to the session bus, and get a list of active names.
--
-- @
--{-\# LANGUAGE OverloadedStrings \#-}
--
--import Data.List (sort)
--import DBus
--import DBus.Client
--
--main = do
-- client <- 'connectSession'
-- //
-- \-- Request a list of connected clients from the bus
-- reply <- 'call_' client ('methodCall' \"\/org\/freedesktop\/DBus\" \"org.freedesktop.DBus\" \"ListNames\")
-- { 'methodCallDestination' = Just \"org.freedesktop.DBus\"
-- }
-- //
-- \-- org.freedesktop.DBus.ListNames() returns a single value, which is
-- \-- a list of names (here represented as [String])
-- let Just names = 'fromVariant' ('methodReturnBody' reply !! 0)
-- //
-- \-- Print each name on a line, sorted so reserved names are below
-- \-- temporary names.
-- mapM_ putStrLn (sort names)
-- @
--
module DBus.Client
(
-- * Clients
Client
-- * Connecting to a bus
, connect
, connectSystem
, connectSession
, connectStarter
, disconnect
-- * Sending method calls
, call
, call_
, callNoReply
-- * Receiving method calls
, export
, unexport
, Method
, method
, Reply
, replyReturn
, replyError
, throwError
-- ** Automatic method signatures
, AutoMethod
, autoMethod
-- * Signals
, SignalHandler
, addMatch
, removeMatch
, emit
, listen
-- ** Match rules
, MatchRule
, formatMatchRule
, matchAny
, matchSender
, matchDestination
, matchPath
, matchInterface
, matchMember
-- * Name reservation
, requestName
, releaseName
, RequestNameFlag
, nameAllowReplacement
, nameReplaceExisting
, nameDoNotQueue
, RequestNameReply(NamePrimaryOwner, NameInQueue, NameExists, NameAlreadyOwner)
, ReleaseNameReply(NameReleased, NameNonExistent, NameNotOwner)
-- * Client errors
, ClientError
, clientError
, clientErrorMessage
, clientErrorFatal
-- * Advanced connection options
, ClientOptions
, clientSocketOptions
, clientThreadRunner
, defaultClientOptions
, connectWith
) where
import Control.Concurrent
import Control.Exception (SomeException, throwIO)
import qualified Control.Exception
import Control.Monad (forever, forM_, when)
import Data.Bits ((.|.))
import Data.IORef
import Data.List (foldl', intercalate)
import qualified Data.Map
import Data.Map (Map)
import Data.Maybe (catMaybes, listToMaybe)
import Data.Typeable (Typeable)
import Data.Unique
import Data.Word (Word32)
import DBus
import qualified DBus.Introspection as I
import qualified DBus.Socket
import DBus.Transport (TransportOpen, SocketTransport)
data ClientError = ClientError
{ clientErrorMessage :: String
, clientErrorFatal :: Bool
}
deriving (Eq, Show, Typeable)
instance Control.Exception.Exception ClientError
clientError :: String -> ClientError
clientError msg = ClientError msg True
-- | An active client session to a message bus. Clients may send or receive
-- method calls, and listen for or emit signals.
data Client = Client
{ clientSocket :: DBus.Socket.Socket
, clientPendingCalls :: IORef (Map Serial (MVar (Either MethodError MethodReturn)))
, clientSignalHandlers :: IORef (Map Unique SignalHandler)
, clientObjects :: IORef (Map ObjectPath ObjectInfo)
, clientThreadID :: ThreadId
}
data ClientOptions t = ClientOptions
{
-- | Options for the underlying socket, for advanced use cases. See
-- the "DBus.Socket" module.
clientSocketOptions :: DBus.Socket.SocketOptions t
-- | A function to run the client thread. The provided IO computation
-- should be called repeatedly; each time it is called, it will process
-- one incoming message.
--
-- The provided computation will throw a 'ClientError' if it fails to
-- process an incoming message, or if the connection is lost.
--
-- The default implementation is 'forever'.
, clientThreadRunner :: IO () -> IO ()
}
type Callback = (ReceivedMessage -> IO ())
type FormattedMatchRule = String
data SignalHandler = SignalHandler Unique FormattedMatchRule (IORef Bool) (Signal -> IO ())
data Reply
= ReplyReturn [Variant]
| ReplyError ErrorName [Variant]
-- | Reply to a method call with a successful return, containing the given body.
replyReturn :: [Variant] -> Reply
replyReturn = ReplyReturn
-- | Reply to a method call with an error, containing the given error name and
-- body.
--
-- Typically, the first item of the error body is a string with a message
-- describing the error.
replyError :: ErrorName -> [Variant] -> Reply
replyError = ReplyError
data Method = Method InterfaceName MemberName Signature Signature (MethodCall -> IO Reply)
type ObjectInfo = Map InterfaceName InterfaceInfo
type InterfaceInfo = Map MemberName MethodInfo
data MethodInfo = MethodInfo Signature Signature Callback
-- | Connect to the bus specified in the environment variable
-- @DBUS_SYSTEM_BUS_ADDRESS@, or to
-- @unix:path=\/var\/run\/dbus\/system_bus_socket@ if @DBUS_SYSTEM_BUS_ADDRESS@
-- is not set.
--
-- Throws a 'ClientError' if @DBUS_SYSTEM_BUS_ADDRESS@ contains an invalid
-- address, or if connecting to the bus failed.
connectSystem :: IO Client
connectSystem = do
env <- getSystemAddress
case env of
Nothing -> throwIO (clientError "connectSystem: DBUS_SYSTEM_BUS_ADDRESS is invalid.")
Just addr -> connect addr
-- | Connect to the bus specified in the environment variable
-- @DBUS_SESSION_BUS_ADDRESS@, which must be set.
--
-- Throws a 'ClientError' if @DBUS_SESSION_BUS_ADDRESS@ is unset, contains an
-- invalid address, or if connecting to the bus failed.
connectSession :: IO Client
connectSession = do
env <- getSessionAddress
case env of
Nothing -> throwIO (clientError "connectSession: DBUS_SESSION_BUS_ADDRESS is missing or invalid.")
Just addr -> connect addr
-- | Connect to the bus specified in the environment variable
-- @DBUS_STARTER_ADDRESS@, which must be set.
--
-- Throws a 'ClientError' if @DBUS_STARTER_ADDRESS@ is unset, contains an
-- invalid address, or if connecting to the bus failed.
connectStarter :: IO Client
connectStarter = do
env <- getStarterAddress
case env of
Nothing -> throwIO (clientError "connectStarter: DBUS_STARTER_ADDRESS is missing or invalid.")
Just addr -> connect addr
-- | Connect to the bus at the specified address.
--
-- Throws a 'ClientError' on failure.
connect :: Address -> IO Client
connect = connectWith defaultClientOptions
-- | Connect to the bus at the specified address, with the given connection
-- options. Most users should use 'connect' instead.
--
-- Throws a 'ClientError' on failure.
connectWith :: TransportOpen t => ClientOptions t -> Address -> IO Client
connectWith opts addr = do
sock <- DBus.Socket.openWith (clientSocketOptions opts) addr
pendingCalls <- newIORef Data.Map.empty
signalHandlers <- newIORef Data.Map.empty
objects <- newIORef Data.Map.empty
let threadRunner = clientThreadRunner opts
clientMVar <- newEmptyMVar
threadID <- forkIO $ do
client <- readMVar clientMVar
threadRunner (mainLoop client)
let client = Client
{ clientSocket = sock
, clientPendingCalls = pendingCalls
, clientSignalHandlers = signalHandlers
, clientObjects = objects
, clientThreadID = threadID
}
putMVar clientMVar client
export client "/" [introspectRoot client]
callNoReply client (methodCall dbusPath dbusInterface "Hello")
{ methodCallDestination = Just dbusName
}
return client
-- | Default client options. Uses the built-in Socket-based transport, which
-- supports the @tcp:@ and @unix:@ methods.
defaultClientOptions :: ClientOptions SocketTransport
defaultClientOptions = ClientOptions
{ clientSocketOptions = DBus.Socket.defaultSocketOptions
, clientThreadRunner = forever
}
-- | Stop a 'Client''s callback thread and close its underlying socket.
disconnect :: Client -> IO ()
disconnect client = do
killThread (clientThreadID client)
disconnect' client
disconnect' :: Client -> IO ()
disconnect' client = do
pendingCalls <- atomicModifyIORef (clientPendingCalls client) (\p -> (Data.Map.empty, p))
forM_ (Data.Map.toList pendingCalls) $ \(k, v) -> do
putMVar v (Left (methodError k errorDisconnected))
atomicWriteIORef (clientSignalHandlers client) Data.Map.empty
atomicWriteIORef (clientObjects client) Data.Map.empty
DBus.Socket.close (clientSocket client)
mainLoop :: Client -> IO ()
mainLoop client = do
let sock = clientSocket client
received <- Control.Exception.try (DBus.Socket.receive sock)
msg <- case received of
Left err -> do
disconnect' client
throwIO (clientError (DBus.Socket.socketErrorMessage err))
Right msg -> return msg
dispatch client msg
dispatch :: Client -> ReceivedMessage -> IO ()
dispatch client = go where
go (ReceivedMethodReturn _ msg) = dispatchReply (methodReturnSerial msg) (Right msg)
go (ReceivedMethodError _ msg) = dispatchReply (methodErrorSerial msg) (Left msg)
go (ReceivedSignal _ msg) = do
handlers <- readIORef (clientSignalHandlers client)
forM_ (Data.Map.toAscList handlers) (\(_, SignalHandler _ _ _ h) -> forkIO (h msg) >> return ())
go received@(ReceivedMethodCall serial msg) = do
objects <- readIORef (clientObjects client)
let sender = methodCallSender msg
_ <- forkIO $ case findMethod objects msg of
Right io -> io received
Left errName -> send_ client
(methodError serial errName)
{ methodErrorDestination = sender
}
(\_ -> return ())
return ()
go _ = return ()
dispatchReply serial result = do
pending <- atomicModifyIORef
(clientPendingCalls client)
(\p -> case Data.Map.lookup serial p of
Nothing -> (p, Nothing)
Just mvar -> (Data.Map.delete serial p, Just mvar))
case pending of
Just mvar -> putMVar mvar result
Nothing -> return ()
data RequestNameFlag
= AllowReplacement
| ReplaceExisting
| DoNotQueue
deriving (Eq, Show)
-- | Allow this client's reservation to be replaced, if another client
-- requests it with the 'nameReplaceExisting' flag.
--
-- If this client's reservation is replaced, this client will be added to the
-- wait queue unless the request also included the 'nameDoNotQueue' flag.
nameAllowReplacement :: RequestNameFlag
nameAllowReplacement = AllowReplacement
-- | If the name being requested is already reserved, attempt to replace it.
-- This only works if the current owner provided the 'nameAllowReplacement'
-- flag.
nameReplaceExisting :: RequestNameFlag
nameReplaceExisting = ReplaceExisting
-- | If the name is already in use, do not add this client to the queue, just
-- return an error.
nameDoNotQueue :: RequestNameFlag
nameDoNotQueue = DoNotQueue
data RequestNameReply
-- | This client is now the primary owner of the requested name.
= NamePrimaryOwner
-- | The name was already reserved by another client, and replacement
-- was either not attempted or not successful.
| NameInQueue
-- | The name was already reserved by another client, 'DoNotQueue'
-- was set, and replacement was either not attempted or not
-- successful.
| NameExists
-- | This client is already the primary owner of the requested name.
| NameAlreadyOwner
-- | Not exported; exists to generate a compiler warning if users
-- case on the reply and forget to include a default case.
| UnknownRequestNameReply Word32
deriving (Eq, Show)
data ReleaseNameReply
-- | This client has released the provided name.
= NameReleased
-- | The provided name is not assigned to any client on the bus.
| NameNonExistent
-- | The provided name is not assigned to this client.
| NameNotOwner
-- | Not exported; exists to generate a compiler warning if users
-- case on the reply and forget to include a default case.
| UnknownReleaseNameReply Word32
deriving (Eq, Show)
encodeFlags :: [RequestNameFlag] -> Word32
encodeFlags = foldr (.|.) 0 . map flagValue where
flagValue AllowReplacement = 0x1
flagValue ReplaceExisting = 0x2
flagValue DoNotQueue = 0x4
-- | Asks the message bus to assign the given name to this client. The bus
-- maintains a queue of possible owners, where the head of the queue is the
-- current (\"primary\") owner.
--
-- There are several uses for name reservation:
--
-- * Clients which export methods reserve a name so users and applications
-- can send them messages. For example, the GNOME Keyring reserves the name
-- @\"org.gnome.keyring\"@ on the user's session bus, and NetworkManager
-- reserves @\"org.freedesktop.NetworkManager\"@ on the system bus.
--
-- * When there are multiple implementations of a particular service, the
-- service standard will ususally include a generic bus name for the
-- service. This allows other clients to avoid depending on any particular
-- implementation's name. For example, both the GNOME Keyring and KDE
-- KWallet services request the @\"org.freedesktop.secrets\"@ name on the
-- user's session bus.
--
-- * A process with \"single instance\" behavior can use name assignment to
-- check whether the instance is already running, and invoke some method
-- on it (e.g. opening a new window).
--
-- Throws a 'ClientError' if the call failed.
requestName :: Client -> BusName -> [RequestNameFlag] -> IO RequestNameReply
requestName client name flags = do
reply <- call_ client (methodCall dbusPath dbusInterface "RequestName")
{ methodCallDestination = Just dbusName
, methodCallBody = [toVariant name, toVariant (encodeFlags flags)]
}
var <- case listToMaybe (methodReturnBody reply) of
Just x -> return x
Nothing -> throwIO (clientError "requestName: received empty response")
{ clientErrorFatal = False
}
code <- case fromVariant var of
Just x -> return x
Nothing -> throwIO (clientError ("requestName: received invalid response code " ++ showsPrec 11 var ""))
{ clientErrorFatal = False
}
return $ case code :: Word32 of
1 -> NamePrimaryOwner
2 -> NameInQueue
3 -> NameExists
4 -> NameAlreadyOwner
_ -> UnknownRequestNameReply code
-- | Release a name that this client previously requested. See 'requestName'
-- for an explanation of name reservation.
--
-- Throws a 'ClientError' if the call failed.
releaseName :: Client -> BusName -> IO ReleaseNameReply
releaseName client name = do
reply <- call_ client (methodCall dbusPath dbusInterface "ReleaseName")
{ methodCallDestination = Just dbusName
, methodCallBody = [toVariant name]
}
var <- case listToMaybe (methodReturnBody reply) of
Just x -> return x
Nothing -> throwIO (clientError "releaseName: received empty response")
{ clientErrorFatal = False
}
code <- case fromVariant var of
Just x -> return x
Nothing -> throwIO (clientError ("releaseName: received invalid response code " ++ showsPrec 11 var ""))
{ clientErrorFatal = False
}
return $ case code :: Word32 of
1 -> NameReleased
2 -> NameNonExistent
3 -> NameNotOwner
_ -> UnknownReleaseNameReply code
send_ :: Message msg => Client -> msg -> (Serial -> IO a) -> IO a
send_ client msg io = do
result <- Control.Exception.try (DBus.Socket.send (clientSocket client) msg io)
case result of
Right x -> return x
Left err -> throwIO (clientError (DBus.Socket.socketErrorMessage err))
{ clientErrorFatal = DBus.Socket.socketErrorFatal err
}
-- | Send a method call to the bus, and wait for the response.
--
-- Throws a 'ClientError' if the method call couldn't be sent, or if the reply
-- couldn't be parsed.
call :: Client -> MethodCall -> IO (Either MethodError MethodReturn)
call client msg = do
-- If ReplyExpected is False, this function would block indefinitely
-- if the remote side honors it.
let safeMsg = msg
{ methodCallReplyExpected = True
}
mvar <- newEmptyMVar
let ref = clientPendingCalls client
serial <- send_ client safeMsg (\serial -> atomicModifyIORef ref (\p -> (Data.Map.insert serial mvar p, serial)))
-- At this point, we wait for the reply to arrive. The user may cancel
-- a pending call by sending this thread an exception via something
-- like 'timeout'; in that case, we want to clean up the pending call.
Control.Exception.onException
(takeMVar mvar)
(atomicModifyIORef_ ref (Data.Map.delete serial))
-- | Send a method call to the bus, and wait for the response.
--
-- Unsets the 'noReplyExpected' message flag before sending.
--
-- Throws a 'ClientError' if the method call couldn't sent, if the reply
-- couldn't be parsed, or if the reply was a 'MethodError'.
call_ :: Client -> MethodCall -> IO MethodReturn
call_ client msg = do
result <- call client msg
case result of
Left err -> throwIO (clientError ("Call failed: " ++ methodErrorMessage err))
{ clientErrorFatal = methodErrorName err == errorDisconnected
}
Right ret -> return ret
-- | Send a method call to the bus, and do not wait for a response.
--
-- Sets the 'noReplyExpected' message flag before sending.
--
-- Throws a 'ClientError' if the method call couldn't be sent.
callNoReply :: Client -> MethodCall -> IO ()
callNoReply client msg = do
-- Ensure that noReplyExpected is always set.
let safeMsg = msg
{ methodCallReplyExpected = False
}
send_ client safeMsg (\_ -> return ())
-- | Request that the bus forward signals matching the given rule to this
-- client, and process them in a callback.
--
-- A received signal might be processed by more than one callback at a time.
-- Callbacks each run in their own thread.
--
-- The returned 'SignalHandler' can be passed to 'removeMatch'
-- to stop handling this signal.
--
-- Throws a 'ClientError' if the match rule couldn't be added to the bus.
addMatch :: Client -> MatchRule -> (Signal -> IO ()) -> IO SignalHandler
addMatch client rule io = do
let formatted = case formatMatchRule rule of
"" -> "type='signal'"
x -> "type='signal'," ++ x
handlerId <- newUnique
registered <- newIORef True
let handler = SignalHandler handlerId formatted registered (\msg -> when (checkMatchRule rule msg) (io msg))
atomicModifyIORef (clientSignalHandlers client) (\hs -> (Data.Map.insert handlerId handler hs, ()))
_ <- call_ client (methodCall dbusPath dbusInterface "AddMatch")
{ methodCallDestination = Just dbusName
, methodCallBody = [toVariant formatted]
}
return handler
-- | Request that the bus stop forwarding signals for the given handler.
--
-- Throws a 'ClientError' if the match rule couldn't be removed from the bus.
removeMatch :: Client -> SignalHandler -> IO ()
removeMatch client (SignalHandler handlerId formatted registered _) = do
shouldUnregister <- atomicModifyIORef registered (\wasRegistered -> (False, wasRegistered))
when shouldUnregister $ do
atomicModifyIORef (clientSignalHandlers client) (\hs -> (Data.Map.delete handlerId hs, ()))
_ <- call_ client (methodCall dbusPath dbusInterface "RemoveMatch")
{ methodCallDestination = Just dbusName
, methodCallBody = [toVariant formatted]
}
return ()
-- | Equivalent to 'addMatch', but does not return the added 'SignalHandler'.
listen :: Client -> MatchRule -> (Signal -> IO ()) -> IO ()
listen client rule io = addMatch client rule io >> return ()
{-# DEPRECATED listen "Prefer DBus.Client.addMatch in new code." #-}
-- | Emit the signal on the bus.
--
-- Throws a 'ClientError' if the signal message couldn't be sent.
emit :: Client -> Signal -> IO ()
emit client msg = send_ client msg (\_ -> return ())
-- | A match rule describes which signals a particular callback is interested
-- in. Use 'matchAny' to construct match rules.
--
-- Example: a match rule which matches signals sent by the root object.
--
-- @
--matchFromRoot :: MatchRule
--matchFromRoot = 'matchAny' { 'matchPath' = Just \"/\" }
-- @
data MatchRule = MatchRule
{
-- | If set, only receives signals sent from the given bus name.
--
-- The standard D-Bus implementation from <http://dbus.freedesktop.org/>
-- almost always sets signal senders to the unique name of the sending
-- client. If 'matchSender' is a requested name like
-- @\"com.example.Foo\"@, it will not match any signals.
--
-- The exception is for signals sent by the bus itself, which always
-- have a sender of @\"org.freedesktop.DBus\"@.
matchSender :: Maybe BusName
-- | If set, only receives signals sent to the given bus name.
, matchDestination :: Maybe BusName
-- | If set, only receives signals sent with the given path.
, matchPath :: Maybe ObjectPath
-- | If set, only receives signals sent with the given interface name.
, matchInterface :: Maybe InterfaceName
-- | If set, only receives signals sent with the given member name.
, matchMember :: Maybe MemberName
}
instance Show MatchRule where
showsPrec d rule = showParen (d > 10) (showString "MatchRule " . shows (formatMatchRule rule))
-- | Convert a match rule into the textual format accepted by the bus.
formatMatchRule :: MatchRule -> String
formatMatchRule rule = intercalate "," predicates where
predicates = catMaybes
[ f "sender" matchSender formatBusName
, f "destination" matchDestination formatBusName
, f "path" matchPath formatObjectPath
, f "interface" matchInterface formatInterfaceName
, f "member" matchMember formatMemberName
]
f :: String -> (MatchRule -> Maybe a) -> (a -> String) -> Maybe String
f key get text = do
val <- fmap text (get rule)
return (concat [key, "='", val, "'"])
-- | Match any signal.
matchAny :: MatchRule
matchAny = MatchRule Nothing Nothing Nothing Nothing Nothing
checkMatchRule :: MatchRule -> Signal -> Bool
checkMatchRule rule msg = and
[ maybe True (\x -> signalSender msg == Just x) (matchSender rule)
, maybe True (\x -> signalDestination msg == Just x) (matchDestination rule)
, maybe True (== signalPath msg) (matchPath rule)
, maybe True (== signalInterface msg) (matchInterface rule)
, maybe True (== signalMember msg) (matchMember rule)
]
data MethodExc = MethodExc ErrorName [Variant]
deriving (Show, Eq, Typeable)
instance Control.Exception.Exception MethodExc
-- | Normally, any exceptions raised while executing a method will be
-- given the generic @\"org.freedesktop.DBus.Error.Failed\"@ name.
-- 'throwError' allows the programmer to specify an error name, and provide
-- additional information to the remote application. You may use this instead
-- of 'Control.Exception.throwIO' to abort a method call.
throwError :: ErrorName
-> String -- ^ Error message
-> [Variant] -- ^ Additional items of the error body
-> IO a
throwError name message extra = Control.Exception.throwIO (MethodExc name (toVariant message : extra))
-- | Define a method handler, which will accept method calls with the given
-- interface and member name.
--
-- Note that the input and output parameter signatures are used for
-- introspection, but are not checked when executing a method.
--
-- See 'autoMethod' for an easier way to export functions with simple
-- parameter and return types.
method :: InterfaceName
-> MemberName
-> Signature -- ^ Input parameter signature
-> Signature -- ^ Output parameter signature
-> (MethodCall -> IO Reply)
-> Method
method iface name inSig outSig io = Method iface name inSig outSig
(\msg -> Control.Exception.catch
(Control.Exception.catch
(io msg)
(\(MethodExc name' vs') -> return (ReplyError name' vs')))
(\exc -> return (ReplyError errorFailed
[toVariant (show (exc :: SomeException))])))
-- | Export the given functions under the given 'ObjectPath' and
-- 'InterfaceName'.
--
-- Use 'autoMethod' to construct a 'Method' from a function that accepts and
-- returns simple types.
--
-- Use 'method' to construct a 'Method' from a function that handles parameter
-- conversion manually.
--
-- @
--ping :: MethodCall -> IO 'Reply'
--ping _ = replyReturn []
--
--sayHello :: String -> IO String
--sayHello name = return (\"Hello \" ++ name ++ \"!\")
--
--export client \"/hello_world\"
-- [ 'method' \"com.example.HelloWorld\" \"Ping\" ping
-- , 'autoMethod' \"com.example.HelloWorld\" \"Hello\" sayHello
-- ]
-- @
export :: Client -> ObjectPath -> [Method] -> IO ()
export client path methods = atomicModifyIORef (clientObjects client) addObject where
addObject objs = (Data.Map.insert path info objs, ())
info = foldl' addMethod Data.Map.empty (defaultIntrospect : methods)
addMethod m (Method iface name inSig outSig cb) = Data.Map.insertWith'
Data.Map.union iface
(Data.Map.fromList [(name, MethodInfo inSig outSig (wrapCB cb))]) m
wrapCB cb (ReceivedMethodCall serial msg) = do
reply <- cb msg
let sender = methodCallSender msg
case reply of
ReplyReturn vs -> send_ client (methodReturn serial)
{ methodReturnDestination = sender
, methodReturnBody = vs
} (\_ -> return ())
ReplyError name vs -> send_ client (methodError serial name)
{ methodErrorDestination = sender
, methodErrorBody = vs
} (\_ -> return ())
wrapCB _ _ = return ()
defaultIntrospect = methodIntrospect $ do
objects <- readIORef (clientObjects client)
let Just obj = Data.Map.lookup path objects
return (introspect path obj)
-- | Revokes the export of the given 'ObjectPath'. This will remove all
-- interfaces and methods associated with the path.
unexport :: Client -> ObjectPath -> IO ()
unexport client path = atomicModifyIORef (clientObjects client) deleteObject where
deleteObject objs = (Data.Map.delete path objs, ())
findMethod :: Map ObjectPath ObjectInfo -> MethodCall -> Either ErrorName Callback
findMethod objects msg = case Data.Map.lookup (methodCallPath msg) objects of
Nothing -> Left errorUnknownObject
Just obj -> case methodCallInterface msg of
Nothing -> let
members = do
iface <- Data.Map.elems obj
case Data.Map.lookup (methodCallMember msg) iface of
Just member -> [member]
Nothing -> []
in case members of
[MethodInfo _ _ io] -> Right io
_ -> Left errorUnknownMethod
Just ifaceName -> case Data.Map.lookup ifaceName obj of
Nothing -> Left errorUnknownInterface
Just iface -> case Data.Map.lookup (methodCallMember msg) iface of
Just (MethodInfo _ _ io) -> Right io
_ -> Left errorUnknownMethod
introspectRoot :: Client -> Method
introspectRoot client = methodIntrospect $ do
objects <- readIORef (clientObjects client)
let paths = filter (/= "/") (Data.Map.keys objects)
return (I.object "/")
{ I.objectInterfaces =
[ (I.interface interfaceIntrospectable)
{ I.interfaceMethods =
[ (I.method "Introspect")
{ I.methodArgs =
[ I.methodArg "" TypeString I.directionOut
]
}
]
}
]
, I.objectChildren = [I.object p | p <- paths]
}
methodIntrospect :: IO I.Object -> Method
methodIntrospect get = method interfaceIntrospectable "Introspect" "" "s" $
\msg -> case methodCallBody msg of
[] -> do
obj <- get
let Just xml = I.formatXML obj
return (replyReturn [toVariant xml])
_ -> return (replyError errorInvalidParameters [])
introspect :: ObjectPath -> ObjectInfo -> I.Object
introspect path obj = (I.object path) { I.objectInterfaces = interfaces } where
interfaces = map introspectIface (Data.Map.toList obj)
introspectIface (name, iface) = (I.interface name)
{ I.interfaceMethods = concatMap introspectMethod (Data.Map.toList iface)
}
args inSig outSig =
map (introspectArg I.directionIn) (signatureTypes inSig) ++
map (introspectArg I.directionOut) (signatureTypes outSig)
introspectMethod (name, MethodInfo inSig outSig _) =
[ (I.method name)
{ I.methodArgs = args inSig outSig
}
]
introspectArg dir t = I.methodArg "" t dir
-- | Used to automatically generate method signatures for introspection
-- documents. To support automatic signatures, a method's parameters and
-- return value must all be instances of 'IsValue'.
--
-- This class maps Haskell idioms to D-Bus; it is therefore unable to
-- generate some signatures. In particular, it does not support methods
-- which accept/return a single structure, or single-element structures.
-- It also cannot generate signatures for methods with parameters or return
-- values which are only instances of 'IsVariant'. For these cases, please
-- use 'DBus.Client.method'.
--
-- To match common Haskell use, if the return value is a tuple, it will be
-- converted to a list of return values.
class AutoMethod a where
funTypes :: a -> ([Type], [Type])
apply :: a -> [Variant] -> Maybe (IO [Variant])
instance AutoMethod (IO ()) where
funTypes _ = ([], [])
apply io [] = Just (io >> return [])
apply _ _ = Nothing
instance IsValue a => AutoMethod (IO a) where
funTypes io = cased where
cased = ([], case ioT io undefined of
(_, t) -> case t of
TypeStructure ts -> ts
_ -> [t])
ioT :: IsValue a => IO a -> a -> (a, Type)
ioT _ a = (a, typeOf a)
apply io [] = Just (do
var <- fmap toVariant io
case fromVariant var of
Just struct -> return (structureItems struct)
Nothing -> return [var])
apply _ _ = Nothing
instance (IsValue a, AutoMethod fn) => AutoMethod (a -> fn) where
funTypes fn = cased where
cased = case valueT undefined of
(a, t) -> case funTypes (fn a) of
(ts, ts') -> (t : ts, ts')
valueT :: IsValue a => a -> (a, Type)
valueT a = (a, typeOf a)
apply _ [] = Nothing
apply fn (v:vs) = case fromVariant v of
Just v' -> apply (fn v') vs
Nothing -> Nothing
-- | Prepare a Haskell function for export, automatically detecting the
-- function's type signature.
--
-- See 'AutoMethod' for details on the limitations of this function.
--
-- See 'method' for exporting functions with user-defined types.
autoMethod :: (AutoMethod fn) => InterfaceName -> MemberName -> fn -> Method
autoMethod iface name fun = DBus.Client.method iface name inSig outSig io where
(typesIn, typesOut) = funTypes fun
inSig = case signature typesIn of
Just sig -> sig
Nothing -> invalid "input"
outSig = case signature typesOut of
Just sig -> sig
Nothing -> invalid "output"
io msg = case apply fun (methodCallBody msg) of
Nothing -> return (ReplyError errorInvalidParameters [])
Just io' -> fmap ReplyReturn io'
invalid label = error (concat
[ "Method "
, formatInterfaceName iface
, "."
, formatMemberName name
, " has an invalid "
, label
, " signature."])
errorFailed :: ErrorName
errorFailed = errorName_ "org.freedesktop.DBus.Error.Failed"
errorDisconnected :: ErrorName
errorDisconnected = errorName_ "org.freedesktop.DBus.Error.Disconnected"
errorUnknownObject :: ErrorName
errorUnknownObject = errorName_ "org.freedesktop.DBus.Error.UnknownObject"
errorUnknownInterface :: ErrorName
errorUnknownInterface = errorName_ "org.freedesktop.DBus.Error.UnknownInterface"
errorUnknownMethod :: ErrorName
errorUnknownMethod = errorName_ "org.freedesktop.DBus.Error.UnknownMethod"
errorInvalidParameters :: ErrorName
errorInvalidParameters = errorName_ "org.freedesktop.DBus.Error.InvalidParameters"
dbusName :: BusName
dbusName = busName_ "org.freedesktop.DBus"
dbusPath :: ObjectPath
dbusPath = objectPath_ "/org/freedesktop/DBus"
dbusInterface :: InterfaceName
dbusInterface = interfaceName_ "org.freedesktop.DBus"
interfaceIntrospectable :: InterfaceName
interfaceIntrospectable = interfaceName_ "org.freedesktop.DBus.Introspectable"
atomicModifyIORef_ :: IORef a -> (a -> a) -> IO ()
atomicModifyIORef_ ref fn = atomicModifyIORef ref (\x -> (fn x, ()))
#if !MIN_VERSION_base(4,6,0)
atomicWriteIORef :: IORef a -> a -> IO ()
atomicWriteIORef ref x = atomicModifyIORef ref (\_ -> (x, ()))
#endif
| jotrk/haskell-dbus | lib/DBus/Client.hs | gpl-3.0 | 32,576 | 1,041 | 15 | 5,979 | 6,777 | 3,930 | 2,847 | 528 | 8 |
module Lab5 where
import System.Random
import Data.List
import Data.Maybe (fromJust)
import Data.Sequence (Seq,fromList)
import Lecture5
import Example
import Control.Applicative
-- Define Main --
main = do
putStrLn "===================="
putStrLn "Assignment 5 / Lab 5"
putStrLn "===================="
putStrLn "> Exercise 1"
exercise1
putStrLn "> Exercise 2"
-- exercise2
putStrLn "> Exercise 3"
exercise3
putStrLn "> Exercise 4"
exercise4
putStrLn "> Exercise 5"
exercise5
putStrLn "> Exercise 6"
-- exercise6
putStrLn "> Exercise 7"
-- exercise7
-- =============================================================================
-- Exercise 1 :: Time spent: +- 180 minutes
--
-- Basically modified the provided code to take the additional constraints into
-- account.
-- =============================================================================
exercise1 = do
-- Not neccesarily a efficient solution, but since we work together sharing
-- all the same base files (i.e. Example.hs and Lecture5.hs) I needed to find
-- a solution that didn't require me modifying Lecture5.hs (which would have
-- been alot easier)
solveAndShowNrc example
nrcBlock :: [[Int]]
nrcBlock = [[2..4],[6..8]]
nrcBl :: Int -> [Int]
nrcBl x = concat $ filter (elem x) nrcBlock
subBlock :: Sudoku -> (Row,Column) -> [Value]
subBlock s (r, c) = [ s (r',c')| r' <- nrcBl r ,c' <- nrcBl c]
nrcFreeInSubgrid :: Sudoku -> (Row,Column) -> [Value]
nrcFreeInSubgrid s (r,c) = freeInSeq (subBlock s (r,c))
freeNrc :: Sudoku -> (Row,Column) -> [Value]
freeNrc s (r, c) =
if r `elem` (concat nrcBlock) && c `elem` (concat nrcBlock) then
(nrcFreeInSubgrid s (r,c))
else values
nrcGridInjective :: Sudoku -> (Row,Column) -> Bool
nrcGridInjective s (r,c) = injective vs where
vs = filter (/= 0) (subBlock s (r,c))
nrcFreeAtPos :: Sudoku -> (Row,Column) -> [Value]
nrcFreeAtPos s (r,c) = (freeInRow s r)
`intersect` (freeInColumn s c)
`intersect` (freeInSubgrid s (r,c))
`intersect` (freeNrc s (r,c))
nrcConstraints :: Sudoku -> [Constraint]
nrcConstraints s = sortBy length3rd
[(r,c, nrcFreeAtPos s (r,c)) |
(r,c) <- openPositions s ]
nrcConsistent :: Sudoku -> Bool
nrcConsistent s = and $
[ rowInjective s r | r <- positions ]
++
[ colInjective s c | c <- positions ]
++
[ subgridInjective s (r,c) | r <- [1,4,7], c <- [1,4,7]]
++
[ nrcGridInjective s (r,c) | r <- [2, 6], c <- [2, 6]]
initNrcNode :: Grid -> [Node]
initNrcNode gr = let s = grid2sud gr in
if (not . nrcConsistent) s then []
else [(s, nrcConstraints s)]
nrcSolveNs :: [Node] -> [Node]
nrcSolveNs = search nrcSuccNode solved
nrcRsolveNs :: [Node] -> IO [Node]
nrcRsolveNs ns = rsearch nrcRsuccNode solved (return ns)
nrcSuccNode :: Node -> [Node]
nrcSuccNode (s,[]) = []
nrcSuccNode (s,p:ps) = nrcExtendNode (s,ps) p
nrcRsuccNode :: Node -> IO [Node]
nrcRsuccNode (s,cs) = do
xs <- getRandomCnstr cs
if null xs
then
return []
else
return (nrcExtendNode (s,cs\\xs) (head xs))
nrcExtendNode :: Node -> Constraint -> [Node]
nrcExtendNode (s,nrcConstraints) (r,c,vs) =
[(extend s ((r,c),v), sortBy length3rd $ nrcPrune (r,c,v) nrcConstraints) | v <- vs ]
nrcPrune :: (Row,Column,Value) -> [Constraint] -> [Constraint]
nrcPrune _ [] = []
nrcPrune (r,c,v) ((x,y,zs):rest)
| r == x = (x,y,zs\\[v]) : nrcPrune (r,c,v) rest
| c == y = (x,y,zs\\[v]) : nrcPrune (r,c,v) rest
| sameblock (r,c) (x,y) = (x,y,zs\\[v]) : nrcPrune (r,c,v) rest
| nrcSameblock (r,c) (x,y) = (x,y,zs\\[v]) : nrcPrune (r,c,v) rest
| otherwise = (x,y,zs) : nrcPrune (r,c,v) rest
nrcSameblock :: (Row, Column) -> (Row,Column) -> Bool
nrcSameblock (r,c) (x,y) = nrcBl r == nrcBl x && nrcBl c == nrcBl y
nrcSolveShowNs :: [Node] -> IO[()]
nrcSolveShowNs = sequence . fmap showNode . nrcSolveNs
solveAndShowNrc :: Grid -> IO[()]
solveAndShowNrc gr = nrcSolveShowNs (initNrcNode gr)
--- SOLUTION
-- +-------+-------+-------+
-- | 4 7 8 | 3 9 2 | 6 1 5 |
-- | 6 1 9 | 7 5 8 | 3 2 4 |
-- | 2 3 5 | 4 1 6 | 9 7 8 |
-- +-------+-------+-------+
-- | 7 2 6 | 8 3 5 | 1 4 9 |
-- | 8 9 1 | 6 2 4 | 7 5 3 |
-- | 3 5 4 | 9 7 1 | 2 8 6 |
-- +-------+-------+-------+
-- | 5 6 7 | 2 8 9 | 4 3 1 |
-- | 9 8 3 | 1 4 7 | 5 6 2 |
-- | 1 4 2 | 5 6 3 | 8 9 7 |
-- +-------+-------+-------+
-- =============================================================================
-- Exercise 2 :: Time spent: +-
-- =============================================================================
exercise2 = do
print()
type Position = (Row,Column)
type Constrnt = [[Position]]
rowConstrnt = [[(r,c)| c <- values ] | r <- values ]
columnConstrnt = [[(r,c)| r <- values ] | c <- values ]
blockConstrnt = [[(r,c)| r <- b1, c <- b2 ] | b1 <- blocks, b2 <- blocks ]
freeAtPos' :: Sudoku -> Position -> Constrnt -> [Value]
freeAtPos' s (r,c) xs = let
ys = filter (elem (r,c)) xs
in
foldl1 intersect (map ((values \\) . map s) ys)
-- =============================================================================
-- Exercise 3 :: Time spent: +- 480 minutes
--
-- Added code that will randomly change coordinates in a grid to 0, modified
-- the testcode from Lab2 and run the 'null-coordinates' function x times. If
-- a Sudoku is found that ALSO has 1 solution, and it is NOT the same Sudoku as
-- the original (i.e. a 0 has been changed to 0) than the Sudoku is not minimal
-- =============================================================================
exercise3 = do
test 1 100 example testGrid
test :: Integer -> Integer -> Grid -> (Grid -> IO Grid) -> IO ()
test k n i f =
if k == n then
print (show n ++ " tests passed")
else do
x <- f i
if (solveAndCountNrc x) > 1 || (x == i) then
do test (k+1) n i f
else print ("failed after " ++ (show k) ++ " attempts. " ++ show (solveAndCountNrc x) ++ " solutions." ++ show x)
testGrid :: Grid -> IO Grid
testGrid gr = do
x <- randomRow gr
n <- randomRIO (0, length gr -1)
return $ replaceAtIndex (findRowIndex x gr) (replaceNth n x) gr
replaceAtIndex :: Int -> [Int] -> [[Int]] -> [[Int]]
replaceAtIndex n item ls = a ++ (item:b) where (a, (_:b)) = splitAt n ls
pick :: [a] -> IO a
pick xs = randomRIO (0, length xs - 1) >>= return . (xs !!)
replaceNth :: Int -> [Int] -> [Int]
replaceNth n (x:xs)
| n == 0 = 0:xs
| otherwise = x:replaceNth (n-1) xs
randomRow :: Grid -> IO [Int]
randomRow gr = let
x = pick gr
in x
findRowIndex :: [Int] -> [[Int]] -> Int
findRowIndex r gr = fromJust $ elemIndex r gr
mapOnce :: (a -> Maybe a) -> [a] -> [a]
mapOnce _ [] = []
mapOnce f (x:xs) = case f x of
Nothing -> x : mapOnce f xs
Just y -> y : xs
removeHint :: [Int] -> [Int]
removeHint r = mapOnce check r where
check c | c /= 0 = Just 0
| otherwise = Nothing
calculateHints :: Grid -> Int
calculateHints gr = sum $ map calculateHintsRow gr
calculateHintsRow :: [Value] -> Int
calculateHintsRow r = sum $ map (\a -> 1) $ filter (> 1) r
solveAndCountNrc :: Grid -> Int
solveAndCountNrc gr =
let
x = nrcSolveNs (initNrcNode gr)
in length x
-- =============================================================================
-- Exercise 4 :: Time spent: +- 180 minutes
--
-- Chose the following approach:
-- 1. define all blocks (by combining the blocks, row, and colums)
-- 2. calculate the powerset (all subsets, including itself and the empty set)
-- 3. filter the number of blocks, if we choose x, then the number of blocks
-- that are ignored is 9 - x
-- 4. use modified version of genProblem (that takes a grid as argument) to
-- calculate the posibilities given that the other blocks are already filled
-- (i.e. they are ignore and can be left empty)
-- =============================================================================
exercise4 = do
presetEmptyProblems emptyblocks2
presetEmptyProblems emptyblocks3
presetEmptyProblems emptyblocks4
presetEmptyProblems emptyblocks5
-- Block definitions
emptyblocks2 = filter(\x -> length x == 7) (powerset blocksALL) -- lenght is 7, 2 blocks are empty
emptyblocks3 = filter(\x -> length x == 6) (powerset blocksALL) -- lenght is 6, 3 blocks are empty
emptyblocks4 = filter(\x -> length x == 5) (powerset blocksALL) -- lenght is 5, 4 blocks are empty
emptyblocks5 = filter(\x -> length x == 4) (powerset blocksALL) -- lenght is 4, 5 blocks are empty
-- Walk through the possibilities using presetEmptyProblem
presetEmptyProblems :: [[[(Row,Column)]]] -> IO ()
presetEmptyProblems [] = print $ "done"
presetEmptyProblems (x:xs) = do
presetEmptyProblem (concat x)
presetEmptyProblems xs
-- Generate a single problem with a preset
presetEmptyProblem :: [(Row,Column)] -> IO ()
presetEmptyProblem gr = do
[r] <- rsolveNs [emptyN]
showNode r
s <- presetGenProblem r gr
showNode s
-- modified genproblem to specify a node with filled coordinates
presetGenProblem :: Node -> [(Row, Column)] -> IO Node
presetGenProblem n gr = do
ys <- randomize xs
return (minimalize n ys)
where xs = gr
-- Fancy combine function
combine :: [Row] -> [Column] -> [(Row,Column)]
combine x y = liftA2 (,) x y
-- Rows of 3 blocks
blocksT,blocksM,blocksB :: [Int]
blocksT = blocks !! 0
blocksM = blocks !! 1
blocksB = blocks !! 2
-- Individual blocks
blockTT,blockTM,blockTB :: [(Row,Column)]
blockMT,blockMM,blockMB :: [(Row,Column)]
blockBT,blockBM,blockBB :: [(Row,Column)]
blockTT = combine blocksT blocksT
blockTM = combine blocksT blocksM
blockTB = combine blocksT blocksB
blockMT = combine blocksM blocksT
blockMM = combine blocksM blocksM
blockMB = combine blocksM blocksB
blockBT = combine blocksB blocksT
blockBM = combine blocksB blocksM
blockBB = combine blocksB blocksB
-- List of all blocks
blocksALL :: [[(Row,Column)]]
blocksALL = [blockTT,blockTM,blockTB,
blockMT,blockMM,blockMB,
blockBT,blockBM,blockBB]
-- Powerset definition (borrowed from haskell.org)
powerset :: [a] -> [[a]]
powerset [] = [[]]
powerset (x:xs) = powerset xs ++ map (x:) (powerset xs)
-- Modified version to count
solveAndCount :: Grid -> Int
solveAndCount gr =
let
x = solveNs (initNode gr)
in length x
-- Used this to prove it was actually possible
-- emptyblocks4 :: Grid
-- emptyblocks4 = [[1,3,4,0,0,0,0,0,0],
-- [8,6,5,0,0,0,0,0,0],
-- [2,7,9,0,0,0,0,0,0],
-- [5,2,6,3,4,7,0,0,0],
-- [7,9,1,8,2,6,0,0,0],
-- [3,4,8,5,9,1,2,0,0],
-- [4,5,7,0,0,0,6,3,1],
-- [6,8,3,0,0,0,9,7,2],
-- [9,1,2,0,0,0,8,5,4]]
-- =============================================================================
-- Exercise 5 :: Time spent: +- 150 minutes
--
-- Basically modified the provided generator to take the additional constraints
-- into account
-- =============================================================================
exercise5 = do
[r] <- nrcRsolveNs [emptyN]
showNode r
s <- nrcGenProblem r
showNode s
nrcEraseN :: Node -> (Row,Column) -> Node
nrcEraseN n (r,c) = (s, nrcConstraints s)
where s = eraseS (fst n) (r,c)
nrcUniqueSol :: Node -> Bool
nrcUniqueSol node = singleton (nrcSolveNs [node]) where
singleton [] = False
singleton [x] = True
singleton (x:y:zs) = False
nrcMinimalize :: Node -> [(Row,Column)] -> Node
nrcMinimalize n [] = n
nrcMinimalize n ((r,c):rcs) | nrcUniqueSol n' = nrcMinimalize n' rcs
| otherwise = nrcMinimalize n rcs
where n' = nrcEraseN n (r,c)
nrcGenProblem :: Node -> IO Node
nrcGenProblem n = do
ys <- randomize xs
return (nrcMinimalize n ys)
where xs = filledPositions (fst n)
-- =============================================================================
-- =============================================================================
-- Exercise 6 :: Time spent: +-
exercise6 = do
print()
-- =============================================================================
-- Exercise 7 :: Time spent: +-
-- =============================================================================
exercise7 = do
print()
| vdweegen/UvA-Software_Testing | Lab5/Cas/Exercises.hs | gpl-3.0 | 12,144 | 0 | 17 | 2,476 | 3,935 | 2,132 | 1,803 | 234 | 3 |
import Control.Monad (replicateM)
import qualified Data.Map.Strict as M
import Data.Word
-- Add first ocurrence
seen x p = M.insert x p
-- Remove older ocurrences
prune p = M.filter (>= p)
enmap x p (mg, m)
| M.notMember x m = (mg, seen x p m)
| otherwise = case (mg, m M.! x, p - (m M.! x)) of
(Nothing, p', g) -> (,) (Just g) $ seen x p $ prune p' m
(Just g, p', g') -> if g' < g
then enmap x p (Nothing, m)
else (Just g, seen x p m)
minGap (g, _) [] = g
minGap (Just 1, _) _ = Just 1
minGap m ((p, x) : xs) = minGap (enmap x p m) xs
minimumDistance :: [Word32] -> Maybe Int
minimumDistance = minGap (Nothing, M.empty) . zip [1..]
showResult Nothing = "-1"
showResult (Just x) = show x
main = do
n <- readLn
xs <- fmap (take n . map read . words) getLine
putStrLn $ showResult $ minimumDistance xs
| itsbruce/hackerrank | alg/implementation/minDistance.hs | unlicense | 900 | 7 | 12 | 271 | 463 | 234 | 229 | 23 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.SecurityContextConstraintsList where
import GHC.Generics
import Data.Text
import Kubernetes.Unversioned.ListMeta
import Kubernetes.V1.SecurityContextConstraints
import qualified Data.Aeson
-- |
data SecurityContextConstraintsList = SecurityContextConstraintsList
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ListMeta -- ^
, items :: [SecurityContextConstraints] -- ^
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON SecurityContextConstraintsList
instance Data.Aeson.ToJSON SecurityContextConstraintsList
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/SecurityContextConstraintsList.hs | apache-2.0 | 1,298 | 0 | 9 | 170 | 125 | 77 | 48 | 19 | 0 |
{-|
-}
module Image (
Image
) where
import VecMath (UVCoord)
import Raster (Color)
-- | An image takes a UVCoord and returns a Color.
--
-- Images are conceptually continuous.
type Image = UVCoord -> Color
| lancelet/approxier | src/lib/Image.hs | apache-2.0 | 217 | 0 | 5 | 48 | 39 | 26 | 13 | 5 | 0 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|Functions utilizing Kleisli Arrows to merge, pipe & split input.
-}
module Control.Arrow.Utils (
-- * Lifting & composing functions
liftP,
(>>>),
(<<<),
idP,
-- * Splitting inputs
(<>),
(<>*),
-- ** Convenience functions
copy,
flattenP,
switch,
-- * Merging outputs
(><),
-- * Guards
guard,
(??),
failIf,
checkThat,
-- ** Guard with messages
failIfEither,
-- * Type synonyms for common pieces
Pipe,
-- ** Splitters
Splitter,
ManySplitter,
-- ** Mergers
Merger,
ManyMerger
) where
import Control.Monad(MonadPlus(..), liftM)
import Control.Monad.Error ((>=>))
import Control.Arrow
import Control.Monad.Trans.Either
-- |A basic (monadic) function.
type Pipe m a b = Kleisli m a b
-- |A function with two outputs.
type Splitter m a b c = Kleisli m a (b,c)
-- |A function with three outputs.
type ManySplitter m a b = Kleisli m a [b]
-- |A function with two inputs.
type Merger m a b c = Kleisli m (a,b) c
-- |A function with three inputs.
-- |A function with a list of inputs.
type ManyMerger m a b = Kleisli m [a] b
-- |Wraps the return value of a function into a monad.
liftP :: Monad m => (a -> b) -> Pipe m a b
liftP = Kleisli . (return .)
-- |The identity pipe.
idP :: Monad m => Pipe m a a
idP = liftP id
-- |Splits an input into two parts and gives each to a handler.
(<>) :: Monad m
=> Splitter m a b c
-> (Pipe m b d, Pipe m c e)
-> Splitter m a d e
(<>) f (g,h) = f >>> first g >>> second h
-- |Splits an input into a list and gives each list element to a handler.
(<>*) :: Monad m
=> ManySplitter m a b
-> Pipe m b c
-> ManySplitter m a c
(<>*) s p = Kleisli (runKleisli s >=> mapM (runKleisli p))
-- |Takes a splitter and merges its outputs.
(><) :: Monad m
=> Splitter m a b c
-> Merger m b c d
-> Pipe m a d
(><) = (>>>)
-- |Applies a pipe if a certain condition is met.
-- If the condition is false, the identity pipe (id) is returned.
guard :: Monad m
=> (a -> Bool)
-> Pipe m a a
-> Pipe m a a
guard f p = Kleisli $ \x -> if f x then runKleisli p x else return x
-- |Synonym for 'guard'.
(??) :: Monad m
=> (a -> Bool)
-> Pipe m a a
-> Pipe m a a
(??) = guard
-- |Induces a global failure that causes everything
-- down the line to fail if a certain condition is met.
failIf :: MonadPlus m => (a -> Bool) -> Pipe m a a -> Pipe m a a
failIf f p = Kleisli $ \x -> if f x then mzero else runKleisli p x
-- |A variant of @failIf@ which inserts an error message in case of
-- failure.
failIfEither :: Monad m =>
(a -> Bool)
-> String
-> Pipe (EitherT String m) a a
-> Pipe (EitherT String m) a a
failIfEither f err p = Kleisli $
\x -> if f x then EitherT $ return $ Left err else runKleisli p x
-- |Inverse of 'failIf': only proceeds if a certain condition is
-- met and induces a global failure otherwise.
checkThat :: MonadPlus m => (a -> Bool) -> Pipe m a a -> Pipe m a a
checkThat f = failIf (not . f)
copy :: Monad m => Splitter m a a a
copy = liftP (id &&& id)
-- |Flattens the result of a Splitter 'a -> m (b, m c)' into
-- 'a -> m (b, c)'. This is useful is 'm c' is some monadic
-- auxiliary information that was previously added.
flattenP :: (Monad m)
=> Pipe m (b, m c) (b,c)
flattenP = Kleisli $ \(x,y) -> liftM (x,) y
-- |Switches the outputs of a splitter.
switch :: Monad m => Pipe m (a,b) (b,a)
switch = liftP (\(x,y) -> (y,x))
| ombocomp/Renaming | Control/Arrow/Utils.hs | apache-2.0 | 3,594 | 0 | 11 | 964 | 1,087 | 613 | 474 | 79 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTranslator.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:32
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QTranslator (
QqTranslator(..)
,qTranslator_delete
,qTranslator_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
instance QuserMethod (QTranslator ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QTranslator_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QTranslator_userMethod" qtc_QTranslator_userMethod :: Ptr (TQTranslator a) -> CInt -> IO ()
instance QuserMethod (QTranslatorSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QTranslator_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QTranslator ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QTranslator_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QTranslator_userMethodVariant" qtc_QTranslator_userMethodVariant :: Ptr (TQTranslator a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QTranslatorSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QTranslator_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqTranslator x1 where
qTranslator :: x1 -> IO (QTranslator ())
instance QqTranslator (()) where
qTranslator ()
= withQTranslatorResult $
qtc_QTranslator
foreign import ccall "qtc_QTranslator" qtc_QTranslator :: IO (Ptr (TQTranslator ()))
instance QqTranslator ((QObject t1)) where
qTranslator (x1)
= withQTranslatorResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator1 cobj_x1
foreign import ccall "qtc_QTranslator1" qtc_QTranslator1 :: Ptr (TQObject t1) -> IO (Ptr (TQTranslator ()))
instance QqisEmpty (QTranslator ()) (()) where
qisEmpty x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_isEmpty_h cobj_x0
foreign import ccall "qtc_QTranslator_isEmpty_h" qtc_QTranslator_isEmpty_h :: Ptr (TQTranslator a) -> IO CBool
instance QqisEmpty (QTranslatorSc a) (()) where
qisEmpty x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_isEmpty_h cobj_x0
instance Qload (QTranslator a) ((String)) (IO (Bool)) where
load x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_load cobj_x0 cstr_x1
foreign import ccall "qtc_QTranslator_load" qtc_QTranslator_load :: Ptr (TQTranslator a) -> CWString -> IO CBool
instance Qload (QTranslator a) ((String, String)) (IO (Bool)) where
load x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QTranslator_load1 cobj_x0 cstr_x1 cstr_x2
foreign import ccall "qtc_QTranslator_load1" qtc_QTranslator_load1 :: Ptr (TQTranslator a) -> CWString -> CWString -> IO CBool
instance Qload (QTranslator a) ((String, String, String)) (IO (Bool)) where
load x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QTranslator_load2 cobj_x0 cstr_x1 cstr_x2 cstr_x3
foreign import ccall "qtc_QTranslator_load2" qtc_QTranslator_load2 :: Ptr (TQTranslator a) -> CWString -> CWString -> CWString -> IO CBool
instance Qload (QTranslator a) ((String, String, String, String)) (IO (Bool)) where
load x0 (x1, x2, x3, x4)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withCWString x3 $ \cstr_x3 ->
withCWString x4 $ \cstr_x4 ->
qtc_QTranslator_load3 cobj_x0 cstr_x1 cstr_x2 cstr_x3 cstr_x4
foreign import ccall "qtc_QTranslator_load3" qtc_QTranslator_load3 :: Ptr (TQTranslator a) -> CWString -> CWString -> CWString -> CWString -> IO CBool
instance Qqtranslate (QTranslator a) ((String, String)) (IO (String)) where
qtranslate x0 (x1, x2)
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QTranslator_translate cobj_x0 cstr_x1 cstr_x2
foreign import ccall "qtc_QTranslator_translate" qtc_QTranslator_translate :: Ptr (TQTranslator a) -> CWString -> CWString -> IO (Ptr (TQString ()))
instance Qqtranslate (QTranslator a) ((String, String, String)) (IO (String)) where
qtranslate x0 (x1, x2, x3)
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QTranslator_translate1 cobj_x0 cstr_x1 cstr_x2 cstr_x3
foreign import ccall "qtc_QTranslator_translate1" qtc_QTranslator_translate1 :: Ptr (TQTranslator a) -> CWString -> CWString -> CWString -> IO (Ptr (TQString ()))
instance Qqtranslate (QTranslator ()) ((String, String, String, Int)) (IO (String)) where
qtranslate x0 (x1, x2, x3, x4)
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QTranslator_translate2_h cobj_x0 cstr_x1 cstr_x2 cstr_x3 (toCInt x4)
foreign import ccall "qtc_QTranslator_translate2_h" qtc_QTranslator_translate2_h :: Ptr (TQTranslator a) -> CWString -> CWString -> CWString -> CInt -> IO (Ptr (TQString ()))
instance Qqtranslate (QTranslatorSc a) ((String, String, String, Int)) (IO (String)) where
qtranslate x0 (x1, x2, x3, x4)
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withCWString x3 $ \cstr_x3 ->
qtc_QTranslator_translate2_h cobj_x0 cstr_x1 cstr_x2 cstr_x3 (toCInt x4)
qTranslator_delete :: QTranslator a -> IO ()
qTranslator_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_delete cobj_x0
foreign import ccall "qtc_QTranslator_delete" qtc_QTranslator_delete :: Ptr (TQTranslator a) -> IO ()
qTranslator_deleteLater :: QTranslator a -> IO ()
qTranslator_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_deleteLater cobj_x0
foreign import ccall "qtc_QTranslator_deleteLater" qtc_QTranslator_deleteLater :: Ptr (TQTranslator a) -> IO ()
instance QchildEvent (QTranslator ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTranslator_childEvent" qtc_QTranslator_childEvent :: Ptr (TQTranslator a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QTranslatorSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QTranslator ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QTranslator_connectNotify" qtc_QTranslator_connectNotify :: Ptr (TQTranslator a) -> CWString -> IO ()
instance QconnectNotify (QTranslatorSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QTranslator ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTranslator_customEvent" qtc_QTranslator_customEvent :: Ptr (TQTranslator a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QTranslatorSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QTranslator ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QTranslator_disconnectNotify" qtc_QTranslator_disconnectNotify :: Ptr (TQTranslator a) -> CWString -> IO ()
instance QdisconnectNotify (QTranslatorSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_disconnectNotify cobj_x0 cstr_x1
instance Qevent (QTranslator ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QTranslator_event_h" qtc_QTranslator_event_h :: Ptr (TQTranslator a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QTranslatorSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_event_h cobj_x0 cobj_x1
instance QeventFilter (QTranslator ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTranslator_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QTranslator_eventFilter_h" qtc_QTranslator_eventFilter_h :: Ptr (TQTranslator a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QTranslatorSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTranslator_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QTranslator ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QTranslator_receivers" qtc_QTranslator_receivers :: Ptr (TQTranslator a) -> CWString -> IO CInt
instance Qreceivers (QTranslatorSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTranslator_receivers cobj_x0 cstr_x1
instance Qsender (QTranslator ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_sender cobj_x0
foreign import ccall "qtc_QTranslator_sender" qtc_QTranslator_sender :: Ptr (TQTranslator a) -> IO (Ptr (TQObject ()))
instance Qsender (QTranslatorSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTranslator_sender cobj_x0
instance QtimerEvent (QTranslator ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTranslator_timerEvent" qtc_QTranslator_timerEvent :: Ptr (TQTranslator a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QTranslatorSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTranslator_timerEvent cobj_x0 cobj_x1
| uduki/hsQt | Qtc/Core/QTranslator.hs | bsd-2-clause | 11,810 | 0 | 16 | 2,023 | 3,785 | 1,939 | 1,846 | -1 | -1 |
import Data.Foldable
import Data.List
import Data.Sequence
rotate 0 _ = []
rotate n str = (Data.Foldable.foldr (:) [] str') : rotate (pred n) str'
where
(x :< xs) = viewl str
str' = xs |> x
tst = do
str <- getLine
putStrLn (intercalate " " $ rotate (Prelude.length str) (fromList str))
main = do
tstr <- getLine
Prelude.mapM_ (const tst) [1 .. (read tstr)]
| pbl64k/HackerRank-Contests | 2014-06-20-FP/RotateString/rs.accepted.hs | bsd-2-clause | 397 | 0 | 13 | 105 | 184 | 93 | 91 | 13 | 1 |
module Eclair.Frontend
( module Eclair.Frontend.Base
, module Eclair.Frontend.Itf
) where
import Eclair.Frontend.Base
import Eclair.Frontend.Itf
| amiddelk/eclair | src/Eclair/Frontend.hs | bsd-3-clause | 152 | 0 | 5 | 21 | 34 | 23 | 11 | 5 | 0 |
------------------------------------------------------------------------
-- |
-- Module : Hyena.Config
-- Copyright : (c) Johan Tibell 2008
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : johan.tibell@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- This module specifies the server configuration.
--
------------------------------------------------------------------------
module Hyena.Config
( Config(..),
configFromFlags,
defaultConfig
) where
import Control.Monad (when)
import Data.Monoid (Monoid(..))
import System.Console.GetOpt
import System.Directory (createDirectoryIfMissing, getCurrentDirectory)
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import System.FilePath ((</>), dropFileName)
import System.IO (BufferMode(..), Handle, IOMode(..), hSetBuffering, openFile,
stderr)
-- ---------------------------------------------------------------------
-- Config type
-- | The server configuration.
data Config = Config
{ address :: String
-- ^ Address (hostname or IP) to bind to when listening for
-- connections.
, daemonize :: Bool
-- ^ Run in the background.
, debug :: Bool
-- ^ Print lots of debug information.
, logHandle :: Handle
-- ^ Where to dump log messages in daemon mode.
, port :: Int
-- ^ Port to bind to when listening for connections.
} deriving Show
-- | Converts a set of flags into a server configuration.
flagsToConfig :: Flags -> IO Config
flagsToConfig flags = do
when (flag flagDaemonize) $
createDirectoryIfMissing True $ dropFileName (flag flagLogFile)
logHandle' <- if flag flagDaemonize
then openFile (flag flagLogFile) AppendMode
else return stderr
hSetBuffering logHandle' LineBuffering
return Config
{ address = flag flagAddress
, daemonize = flag flagDaemonize
, debug = flag flagDebug
, logHandle = logHandle'
, port = flag flagPort
}
where flag field = fromFlag $ field flags
-- | Reads the server options from the command line. Settings from
-- 'defaultConfig' is used for unspecified options. Creates missing
-- directories as needed for the log file referred to by the @--log@
-- flag when in 'daemonize'd mode.
configFromFlags :: IO Config
configFromFlags = do
argv <- getArgs
cwd <- getCurrentDirectory
progName <- getProgName
case parseArgs argv progName of
Left err -> putStr err >> exitFailure
Right flags -> flagsToConfig $ defaultFlags cwd `mappend` flags
-- | A set of default options most users should use. Creates missing
-- directories as needed for the default log file when in 'daemonize'd
-- mode.
defaultConfig :: IO Config
defaultConfig = do
cwd <- getCurrentDirectory
flagsToConfig $ defaultFlags cwd
-- ---------------------------------------------------------------------
-- Flag type
data Flag a = Flag a | NoFlag deriving Show
instance Functor Flag where
fmap f (Flag x) = Flag (f x)
fmap _ NoFlag = NoFlag
instance Monoid (Flag a) where
mempty = NoFlag
_ `mappend` f@(Flag _) = f
f `mappend` NoFlag = f
fromFlag :: Flag a -> a
fromFlag (Flag x) = x
fromFlag NoFlag = error "fromFlag NoFlag"
-- ---------------------------------------------------------------------
-- Config flags
data Flags = Flags
{ flagAddress :: Flag String
, flagDaemonize :: Flag Bool
, flagDebug :: Flag Bool
, flagLogFile :: Flag FilePath
, flagPort :: Flag Int
} deriving Show
defaultFlags :: FilePath -> Flags
defaultFlags cwd =
-- NOTE: If we add a flag to change the working directory it has
-- to be taken into account here.
Flags { flagAddress = Flag "0.0.0.0"
, flagDaemonize = Flag False
, flagDebug = Flag False
, flagLogFile = Flag $ cwd </> "log/hyena.log"
, flagPort = Flag 3000
}
emptyFlags :: Flags
emptyFlags = mempty
instance Monoid Flags where
mempty = Flags
{ flagAddress = mempty
, flagDaemonize = mempty
, flagDebug = mempty
, flagLogFile = mempty
, flagPort = mempty
}
mappend a b = Flags
{ flagAddress = combine flagAddress
, flagDaemonize = combine flagDaemonize
, flagDebug = combine flagDebug
, flagLogFile = combine flagLogFile
, flagPort = combine flagPort
}
where combine field = field a `mappend` field b
-- ---------------------------------------------------------------------
-- Args parsing
-- | Converts a 'String' containing a port number to an integer and
-- fails with an 'error' if the 'String' contained non-digit
-- characters.
flagToPort :: String -> Int
flagToPort str =
case reads str of
[(i, "")] -> i
_ -> error $ "--port: invalid port `" ++ str ++ "'"
-- | The command line options.
options :: [OptDescr (Flags -> Flags)]
options =
[Option "a" ["address"]
(reqArgFlag "ADDRESS" flagAddress
(\v flags -> flags {flagAddress = v}))
"bind to ADDRESS (hostname or IP) on localhost"
,Option "d" ["daemonize"]
(trueArg flagDaemonize (\v flags -> flags {flagDaemonize = v}))
"run in the background"
,Option "B" ["debug"]
(trueArg flagDebug (\v flags -> flags {flagDebug = v}))
"print lots of debug information"
,Option "l" ["log"]
(reqArgFlag "FILE" flagLogFile
(\v flags -> flags {flagLogFile = v}))
"dump log messages to FILE when daemonized"
,Option "p" ["port"]
(reqArg "PORT" (Flag . flagToPort)
flagPort (\v flags -> flags {flagPort = v}))
"bind to PORT on localhost"
]
-- | Parses the given command line arguments. Returns either the
-- parsed flags or a 'String' explaining the error on failure.
parseArgs :: [String] -> String -> Either String Flags
parseArgs argv progName =
case getOpt Permute options argv of
(flags, _, []) -> Right $ foldl (flip id) emptyFlags flags
(_, _, errs) -> Left $ concat errs ++ usageInfo header options
where header = "Usage: " ++ progName ++ " [OPTION]..."
-- ---------------------------------------------------------------------
-- GetOpt helpers
reqArg :: (Monoid a) =>
String -> (String -> a) -> (t -> a) -> (a -> t -> t1)
-> ArgDescr (t -> t1)
reqArg name mkFlag get set =
ReqArg (\v flags -> set (get flags `mappend` mkFlag v) flags) name
noArg :: (Monoid a) => a -> (t -> a) -> (a -> t -> t1) -> ArgDescr (t -> t1)
noArg flag get set =
NoArg (\flags -> set (get flags `mappend` flag) flags)
trueArg :: (t -> Flag Bool) -> (Flag Bool -> t -> t1)
-> ArgDescr (t -> t1)
trueArg = noArg (Flag True)
reqArgFlag :: String -> (t -> Flag String) -> (Flag String -> t -> t1)
-> ArgDescr (t -> t1)
reqArgFlag name = reqArg name Flag
| tibbe/hyena | Hyena/Config.hs | bsd-3-clause | 7,273 | 0 | 12 | 2,036 | 1,661 | 916 | 745 | 133 | 2 |
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
-- | This module provides a strategy for certification with \CeTA\.
-- All proofs produced should be certifiable.
--
-- Example calls (assumes the standard tct-trs executable).
--
-- > tct-trs s "withCertification certify" $file
-- > tct-trs -a s --ceta -s certify $file > $file.xml
module Tct.Trs.Strategy.Certify
( certify
, certify'
, certifyDeclaration
) where
import Data.Foldable (toList)
import Tct.Core
import qualified Tct.Core.Data as T
import Tct.Trs.Data (TrsStrategy)
import qualified Tct.Trs.Data.Problem as Prob
import qualified Tct.Trs.Data.Rules as RS
import qualified Tct.Trs.Data.Signature as Sig
import Tct.Trs.Processor.Matrix.MI
import Tct.Trs.Processors hiding (matchbounds)
-- | Declaration for strategy "certify".
certifyDeclaration :: T.Declaration ('[T.Argument 'Optional CombineWith, Argument 'Optional T.Nat] T.:-> TrsStrategy)
certifyDeclaration =
strategy
"certify"
( combineWithArg `optional` Fastest
, degreeArg `optional` 5)
certifyStrategy
-- | Default "certify" strategy.
certify :: TrsStrategy
certify = T.deflFun certifyDeclaration
-- | > certify = certify' Fastest 5
certify' :: CombineWith -> Degree -> TrsStrategy
certify' = T.declFun certifyDeclaration
-- | Default strategy for certification with CeTA.
certifyStrategy :: CombineWith -> Degree -> TrsStrategy
certifyStrategy cmb deg = withProblem k where
k prob
| isRC && isIn = let ?ua = UArgs in certifyRCI cmb deg
| isRC = let ?ua = NoUArgs in certifyRC cmb deg
| otherwise = certifyDC cmb deg
where
isRC = Prob.isRCProblem prob
isIn = Prob.isInnermostProblem prob
matchbounds :: TrsStrategy
matchbounds = withProblem $ \prob ->
when (RS.isLeftLinear $ Prob.allComponents prob) (bounds PerSymbol Match .>>> empty)
shifts :: (?ua :: UsableArgs) => Degree -> Degree -> TrsStrategy
shifts l u = chain [ tew (intes d) | d <- [(max 0 l) .. u] ]
intes :: (?ua :: UsableArgs) => Degree -> TrsStrategy
intes 0 = px 0
intes 1 = mx 1 1 .<||> mx 2 1 .<||> ma 2 1
intes 2 = px 2 .<||> mx 2 2 .<||> mx 3 2 .<||> ma 3 2
intes 3 = px 3 .<||> mx 3 3 .<||> ma 3 3
intes n = mx n n
px :: (?ua :: UsableArgs) => Degree -> TrsStrategy
mx, ma :: (?ua :: UsableArgs) => Degree -> Degree -> TrsStrategy
px d = poly' (Mixed d) Restrict ?ua URules (Just selAny)
mx dim deg = matrix' dim deg (if deg < dim then Triangular else Algebraic) ?ua URules (Just selAny)
ma dim deg = T.processor MI
{ miKind = MaximalMatrix (MaxAutomaton $ if deg < dim then Just deg else Nothing)
, miDimension = dim
, miUArgs = ?ua
, miURules = URules
, miSelector = Just (selAny) }
combineWith :: CombineWith -> [TrsStrategy] -> TrsStrategy
combineWith Fastest sts = fastest [ st .>>> empty | st <- sts ]
combineWith Best sts = best cmpTimeUB [ st .>>> empty | st <- sts ]
certifyRC :: (?ua :: UsableArgs) => CombineWith -> Degree -> TrsStrategy
certifyRC cmb deg =
combineWith cmb
[ timeoutIn 8 trivialRC
, timeoutIn 8 matchbounds .<||> interpretations ]
where
trivialRC = shifts 0 0 .>>> dependencyPairs' WDP .>>> try usableRules .>>> shifts 0 0 .>>> empty
interpretations =
shifts 1 1
.>>! combineWith cmb
[ dependencyPairs' WDP .>>> try usableRules .>>> shifts 1 deg
, shifts 2 deg
, force (shifts 2 2)
.>>! combineWith cmb
[ dependencyPairs' WDP .>>> try usableRules .>>> shifts 1 deg
, shifts 3 deg ]
]
certifyRCI :: (?ua :: UsableArgs) => CombineWith -> Degree -> TrsStrategy
certifyRCI cmb deg =
withProblem $ \p ->
try innermostRuleRemoval
.>>! combineWith cmb
[ timeoutIn 20 trivialRCI
, timeoutIn 20 matchbounds .<||> interpretations p ]
where
trivialRCI = shifts 0 0 .>>> dependencyTuples .>>> try usableRules .>>> shifts 0 0
interpretations p =
shifts 1 1
.>>!
alternative
[ combineWith cmb
[ dt .>>> try usableRules .>>> shifts 1 deg
, wdp p .>>> try usableRules .>>> shifts 1 deg
, shifts 2 deg ]
, force (shifts 2 2)
.>>! combineWith cmb
[ dt .>>> try usableRules .>>> shifts 1 deg
, wdp p .>>> try usableRules .>>> shifts 1 deg
, shifts 3 deg ] ]
dt = dependencyTuples
wdp p1 = withProblem $ \p2 ->
if Sig.defineds (Prob.signature p1) == Sig.defineds (RS.signature (Prob.allComponents p2))
then dependencyPairs' WDP
else abort
certifyDC :: CombineWith -> Degree -> TrsStrategy
certifyDC cmb degree =
try innermostRuleRemoval
.>>! combineWith cmb
[ matchbounds
, srs
, interpretations degree mxf
, interpretations degree mxs ]
where
isSRS prob = all (\sym -> Sig.arity sig sym == 1) (toList $ Sig.symbols sig) where sig = Prob.signature prob
whenSRS st = withProblem $ \prob -> when (isSRS prob) st
withMini :: Int -> Int -> TrsStrategy -> TrsStrategy
withMini ib ob = withKvPair ("solver", ["minismt", "-m", "-v2", "-neg", "-ib", show ib, "-ob", show ob])
mi dim kind =
T.processor MI
{ miKind = kind
, miDimension = dim
, miUArgs = NoUArgs
, miURules = NoURules
, miSelector = Just (selAny) }
mxu dim deg = mi dim $ MaximalMatrix (UpperTriangular $ Multiplicity $ if deg < dim then Just deg else Nothing)
mxl dim deg = mi dim $ MaximalMatrix (LowerTriangular $ Multiplicity $ if deg < dim then Just deg else Nothing)
mxa dim deg = mi dim $ MaximalMatrix (MaxAutomaton $ if deg < dim then Just deg else Nothing)
interpretations u st = chain [ tew (st n) | n <- [1 .. min u degree] ]
-- SRS strategy
-- try low dimension with high bits
-- try high dimension with low bits
srs = whenSRS $ fastest
[ withMini 8 10 (tew (mxu 1 1)) .>>> empty
, chain [ tew (withMini 1 2 $ mxf n) | n <- [1.. min 4 degree] ] .>>> empty ]
-- fast strategy
-- rule shifting using triangular and EDA with implicit bounds
mxf :: Int -> TrsStrategy
mxf 0 = mxu 1 1
mxf 1 = mxu 1 1
mxf 2 = mxu 2 2 .<||> mxl 2 2
mxf 3 = mxu 3 3 .<||> mxa 3 3 .<||> withMini 1 2 (mxu 3 3 .<||> mxa 3 3 )
mxf n = withMini 1 2 (mxu n n .<||> mxa n n )
-- precise strategy
-- strategy with increasing bound
mxs :: Int -> TrsStrategy
mxs 0 = mxu 1 1
mxs 1 = mxu 1 1 .<||> mxu 2 1 .<||> mxl 2 1 .<||> mxu 3 1 .<||> mxa 3 1
mxs 2 = mxu 2 2 .<||> mxl 2 2 .<||> mxu 3 2 .<||> mxa 3 2
mxs 3 = mxu 3 3 .<||> mxl 3 3 .<||> mxu 4 3 .<||> mxa 4 3 .<||> withMini 1 2 (mxu 4 3 .<||> mxa 4 3)
mxs n = withMini 1 2 $ mxu n n .<||> mxa n n
| ComputationWithBoundedResources/tct-trs | src/Tct/Trs/Strategy/Certify.hs | bsd-3-clause | 7,112 | 0 | 18 | 2,115 | 2,292 | 1,177 | 1,115 | -1 | -1 |
module Warnings where
import qualified Types as T
data Warning = Unencapsulated String
| Unprotected String
| MissingDocstring String
| StmtExp T.Exp
deriving (Eq, Ord, Show)
| bergmark/mmdoc | src/Warnings.hs | bsd-3-clause | 218 | 0 | 7 | 69 | 51 | 31 | 20 | 7 | 0 |
module Tone where
import qualified Scene
import qualified Data.Vect as Vect
import Data.Word (Word8)
type Illuminance = Float
-- |Tone Reproduction functions operate on the buffer of collected radiances
type ToneReproduce = [Scene.ColorF] -> [(Word8, Word8, Word8)]
-- |Maximum output luminance of target device
targetLdmax :: Float
targetLdmax = 300
-- |Quick and dirty approximation to pixel illuminance given R,G,B
pixIllum :: Scene.ColorF -> Illuminance
pixIllum (r, g, b) = 0.27 * r + 0.67 * g + 0.06 * b
-- |Utility function mapping functions over ColorFs
colorMap :: (Float -> Float) -> Scene.ColorF -> Scene.ColorF
colorMap f (r, g, b) = (f r, f g, f b)
-- |Scales an rgb color triple from [0,1] to [0,255]
-- If the triple value is greater than 1, clamps to 1.
colorFToWords :: Scene.ColorF -> (Word8, Word8, Word8)
colorFToWords (r, g, b) = (s r, s g, s b) where
s = truncate.(*255).(min 1)
-- |Device model for a simple actual device with a maximum output of ldmax
-- and a gamma of 1 with standard sRGB color space.
simpleDevice :: Illuminance -> Scene.ColorF -> Scene.ColorF
simpleDevice ldmax = colorMap (/ldmax)
-- |Ward's perceptural-based tone reproduction scale factor formula.
-- A function of max display illuminance and adaptation luminance
wardScaleFactor :: Illuminance -> Illuminance -> Float
wardScaleFactor ldmax lwa = ((1.219 + (ldmax / 2)**0.4)
/(1.219 + lwa**0.4))**2.5
-- |Calculates the log-average of a list of numbers given a delta to prevent
-- log from going to infinity
logAvg :: Float -> [Float] -> Float
logAvg delta ls = exp ((recip sN) * (sum.map (log.(delta+)) $ ls)) where
sN = fromIntegral $ length ls
-- |Generates a tone reproduction operator for Ward's model given a
-- target ldmax
wardTR :: Illuminance -> ToneReproduce
wardTR ldmax radiances = map ((colorFToWords).
(simpleDevice ldmax).
(colorMap (*sf))) radiances
where
sf = wardScaleFactor ldmax lwa
lwa = logAvg 0.01 (map pixIllum radiances)
-- |Generates a tone reproduction operator for Reinhard's model given
-- only a target ldmax, using the default zone (V -- a=0.18)
reinhardTR :: Illuminance -> ToneReproduce
reinhardTR = reinhardTR' 0.18 (logAvg 0.01)
-- |Generates a tone reproduction operator for Reinhard's model given
-- a zone constant, a key function, and a target ldmax
reinhardTR' :: Float -> ([Illuminance] -> Illuminance) -> Illuminance
-> ToneReproduce
reinhardTR' a keyfunc ldmax radiances
= map ((colorFToWords).(simpleDevice ldmax).(reinhard key a)) radiances
where
key = keyfunc (map pixIllum radiances)
reinhard key a = colorMap ((*ldmax).filmresponse.(*(a/key)))
filmresponse v = v / (1+v)
| notabotanist/fuzzy-octo-adventure | Tone.hs | bsd-3-clause | 2,755 | 6 | 13 | 550 | 705 | 396 | 309 | 38 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import Synth.SGD
import qualified Settings as S
import Types.Filter
import Types.PrettyFilter
import Types.DSPNode
import Data.Tree
import qualified Data.Map.Strict as M
import System.Random
----------
--
-- Test the stochastic gradient descent implementation and its components
--
----------
main :: IO ()
main = do
rGen <- getStdGen
(_,c1,_) <- multiVarSGD
S.defaultOptions
simpleCost
rGen
M.empty
([head $ extractThetaUpdaters initFilter])
initFilter
(_,c2,_) <- multiVarSGD
S.defaultOptions
simpleCost2
rGen
M.empty
(extractThetaUpdaters initFilter)
initFilter
if c1 < 0.2
then putStrLn "PASSED GD in 1 dimension" >> return ()
else error "FAILED GD in 1 dimension"
if c2 < 0.2
then putStrLn "PASSED GD in 2 dimensions" >> return ()
else error "FAILED GD in 2 dimensions"
return ()
initFilter = toInternalFilter $ Node_p $ LPF 1 1
simpleCost :: Filter -> IO Double
simpleCost f =
return $ abs $ snd $ head $ getParams $ nodeContent $ rootLabel f
simpleCost2 :: Filter -> IO Double
simpleCost2 f =
return $ abs $ sum $ map snd $ getParams $ nodeContent $ rootLabel f
| aedanlombardo/HaskellPS | DSP-PBE/Tests/GradientDescent.hs | bsd-3-clause | 1,306 | 0 | 12 | 361 | 345 | 181 | 164 | 41 | 3 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.Signal.Internal
-- Copyright : (c) Alexander Vivian Hugh McPhail 2010, 2014, 2015, 2016
-- License : BSD3
--
-- Maintainer : haskell.vivian.mcphail <at> gmail <dot> com
-- Stability : provisional
-- Portability : uses FFI
--
-- low-level interface
--
-----------------------------------------------------------------------------
module Numeric.Signal.Internal (
Convolvable(..),
Filterable(..),
freqz,
pwelch,
hilbert
) where
import Numeric.LinearAlgebra
import Numeric.LinearAlgebra.Devel
--import Numeric.LinearAlgebra.Linear
import qualified Numeric.GSL.Fourier as F
import Foreign
--import Data.Complex
import Foreign.C.Types
import Prelude hiding(filter)
import System.IO.Unsafe(unsafePerformIO)
-----------------------------------------------------------------------------
infixr 1 #
a # b = applyRaw a b
{-# INLINE (#) #-}
-----------------------------------------------------------------------------
type PD = Ptr Double
type PC = Ptr (Complex Double)
type PF = Ptr Float
-----------------------------------------------------------------------------
class Convolvable a where
-- | convolve two containers, output is the size of the second argument, no zero padding
convolve :: a -> a -> a
-----------------------------------------------------------------------------
class (Storable a, Container Vector a, Num (Vector a)
, Convert a, Floating (Vector a), RealElement a
, Num a)
=> Filterable a where
-- | convert from Vector Double
fromDouble :: Vector Double -> Vector a
-- b ~ ComplexOf a, Container Vector b, Convert b) => Filterable a where
-- | filter a signal
filter_ :: Vector a -- ^ zero coefficients
-> Vector a -- ^ pole coefficients
-> Vector a -- ^ input signal
-> Vector a -- ^ output signal
-- | coefficients of a Hamming window
hamming_ :: Int -- ^ length
-> Vector a -- ^ the Hamming coeffficents
-- | the complex power : real $ v * (conj v)
complex_power_ :: Vector (Complex Double) -- ^ input
-> Vector a -- ^ output
-- | resample, take one sample every n samples in the original
downsample_ :: Int -> Vector a -> Vector a
-- | the difference between consecutive elements of a vector
deriv_ :: Vector a -> Vector a
-- | unwrap the phase of signal (input expected to be within (-pi,pi)
unwrap_ :: Vector a -> Vector a
-- | evaluate a real coefficient polynomial for complex arguments
polyEval_ :: Vector a -- ^ the real coefficients
-> Vector (Complex Double) -- ^ the points at which to be evaluated
-> Vector (Complex Double) -- ^ the values
-- | the cross covariance of two signals
cross_covariance_ :: Int -- ^ maximum delay
-> Vector a -- ^ time series
-> Vector a -- ^ time series
-> (a,a,Vector a) -- ^ (sd_x,sd_y,cross_covariance)
-- | the cumulative sum of a signal
cumulative_sum_ :: Vector a -- ^ time series
-> Vector a -- ^ result
-----------------------------------------------------------------------------
instance Convolvable (Vector Double) where
convolve x y = fst $ fromComplex $ F.ifft $ (F.fft (complex x) * F.fft (complex y))
-- convolve = convolve_vector_double
convolve_vector_double c a = unsafePerformIO $ do
r <- createVector (size a)
(c # a # r # id) signal_vector_double_convolve #| "signalDoubleConvolve"
return r
foreign import ccall "signal-aux.h vector_double_convolve" signal_vector_double_convolve :: CInt -> PD -> CInt -> PD -> CInt -> PD -> IO CInt
instance Convolvable (Vector Float) where
convolve x y = single $ fst $ fromComplex $ F.ifft $ (F.fft (complex $ double x) * F.fft (complex $ double y))
-- convolve = convolve_vector_double
convolve_vector_float c a = unsafePerformIO $ do
r <- createVector (size a)
(c # a # r # id ) signal_vector_float_convolve #| "signalFloatConvolve"
return r
foreign import ccall "signal-aux.h vector_float_convolve" signal_vector_float_convolve :: CInt -> PF -> CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
instance Convolvable (Vector (Complex Double)) where
convolve x y = F.ifft $ (F.fft x * F.fft y)
-- convolve = convolve_vector_complex
convolve_vector_complex c a = unsafePerformIO $ do
r <- createVector (size a)
(c # a # r # id) signal_vector_complex_convolve #| "signalComplexConvolve"
return r
foreign import ccall "signal-aux.h vector_complex_convolve" signal_vector_complex_convolve :: CInt -> PC -> CInt -> PC -> CInt -> PC -> IO CInt
instance Convolvable (Vector (Complex Float)) where
convolve x y = single $ F.ifft $ (F.fft (double x) * F.fft (double y))
-----------------------------------------------------------------------------
instance Filterable Double where
fromDouble = id
filter_ = filterD
hamming_ = hammingD
complex_power_ = complex_powerD
downsample_ = downsampleD
deriv_ = derivD
unwrap_ = unwrapD
polyEval_ = polyEval
cross_covariance_ = crossCovarianceD
cumulative_sum_ = cumSumD
instance Filterable Float where
fromDouble = single
filter_ = filterF
hamming_ = hammingF
complex_power_ = complex_powerF
downsample_ = downsampleF
deriv_ = derivF
unwrap_ = unwrapF
polyEval_ c = polyEval (double c)
cross_covariance_ = crossCovarianceF
cumulative_sum_ = cumSumF
-----------------------------------------------------------------------------
-- | filters the signal
filterD :: Vector Double -- ^ zero coefficients
-> Vector Double -- ^ pole coefficients
-> Vector Double -- ^ input signal
-> Vector Double -- ^ output signal
filterD l k v = unsafePerformIO $ do
r <- createVector (size v)
(l # k # v # r # id) signal_filter_double #| "signalFilter"
return r
foreign import ccall "signal-aux.h filter_double" signal_filter_double :: CInt -> PD -> CInt -> PD -> CInt -> PD -> CInt -> PD -> IO CInt
-- | filters the signal
filterF :: Vector Float -- ^ zero coefficients
-> Vector Float -- ^ pole coefficients
-> Vector Float -- ^ input signal
-> Vector Float -- ^ output signal
filterF l k v = unsafePerformIO $ do
r <- createVector (size v)
(l # k # v # r # id) signal_filter_float #| "signalFilter"
return r
foreign import ccall "signal-aux.h filter_float" signal_filter_float :: CInt -> PF -> CInt -> PF -> CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | Hilbert transform with original vector as real value, transformed as imaginary
hilbert :: Vector Double -> Vector (Complex Double)
hilbert v = unsafePerformIO $ do
let r = complex v
-- could use (complex v) to make a complex vector in haskell rather than C
(r # id) signal_hilbert #| "hilbert"
return r
foreign import ccall "signal-aux.h hilbert" signal_hilbert :: CInt -> PC -> IO CInt
-----------------------------------------------------------------------------
-- | Welch (1967) power spectrum density using periodogram/FFT method
pwelch :: Int -- ^ window size (multiple of 2)
-> Vector Double -- ^ input signal
-> Vector Double -- ^ power density
pwelch w v = unsafePerformIO $ do
let r = konst 0.0 ((w `div` 2) + 1)
(complex v # r # id) (signal_pwelch $ fromIntegral w) #| "pwelch"
return r
foreign import ccall "signal-aux.h pwelch" signal_pwelch :: CInt -> CInt -> PC -> CInt -> PD -> IO CInt
-----------------------------------------------------------------------------
-- | coefficients of a Hamming window
hammingD :: Int -- ^ length
-> Vector Double -- ^ the Hamming coeffficents
hammingD l
| l == 1 = konst 1.0 1
| otherwise = unsafePerformIO $ do
r <- createVector l
(r # id) signal_hamming_double #| "Hamming"
return r
foreign import ccall "signal-aux.h hamming_double" signal_hamming_double :: CInt -> PD -> IO CInt
-- | coefficients of a Hamming window
hammingF :: Int -- ^ length
-> Vector Float -- ^ the Hamming coeffficents
hammingF l
| l == 1 = konst 1.0 1
| otherwise = unsafePerformIO $ do
r <- createVector l
(r # id) signal_hamming_float #| "Hamming"
return r
foreign import ccall "signal-aux.h hamming_float" signal_hamming_float :: CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | determine the frequency response of a filter
{-freqz :: (Filterable a, Storable a, Container Vector a, Convert a, RealElement a,
DoubleOf a ~ DoubleOf (RealOf b), RealElement c, c ~ DoubleOf a, c ~ DoubleOf (RealOf b),
b ~ Complex a, b ~ ComplexOf a, Convert b, Container Vector b,
Container Vector c, Convert c, b ~ ComplexOf a, b ~ ComplexOf c)
⇒ Vector a -- ^ zero coefficients
-> Vector a -- ^ pole coefficients
-> Vector a -- ^ points (between 0 and 2*pi)
-> Vector a -- ^ response
-}
freqz :: (Filterable a, Complex Double ~ ComplexOf (DoubleOf a)
,Filterable (DoubleOf a)) =>
Vector a -- ^ zero coefficients
-> Vector a -- ^ pole coefficients
-> Vector a -- ^ points (between 0 and 2*pi)
-> Vector a -- ^ response
freqz b a w = let k = max (size b) (size a)
hb = polyEval_ (postpad b k) (exp (scale (0 :+ 1) ((complex $ double w))))
ha = polyEval_ (postpad a k) (exp (scale (0 :+ 1) ((complex $ double w))))
in complex_power_ (hb / ha)
postpad v n = let d = size v
in if d < n then vjoin [v,(konst 0.0 (n-d))]
else v
-----------------------------------------------------------------------------
-- | evaluate a real coefficient polynomial for complex arguments
polyEval :: Vector Double -- ^ the real coefficients
-> Vector (Complex Double) -- ^ the points at which to be evaluated
-> Vector (Complex Double) -- ^ the values
polyEval c z = unsafePerformIO $ do
r <- createVector (size z)
(c # z # r # id) signal_real_poly_complex_eval #| "polyEval"
return r
foreign import ccall "signal-aux.h real_poly_complex_eval" signal_real_poly_complex_eval :: CInt -> PD -> CInt -> PC -> CInt -> PC -> IO CInt
-----------------------------------------------------------------------------
-- | the complex power : real $ v * (conj v)
complex_powerD :: Vector (Complex Double) -- ^ input
-> Vector Double -- ^ output
complex_powerD v = unsafePerformIO $ do
r <- createVector (size v)
(v # r # id) signal_complex_power_double #| "complex_power"
return r
foreign import ccall "signal-aux.h complex_power_double" signal_complex_power_double :: CInt -> PC -> CInt -> PD -> IO CInt
-- | the complex power : real $ v * (conj v)
complex_powerF :: Vector (Complex Double) -- ^ input
-> Vector Float -- ^ output
complex_powerF v = unsafePerformIO $ do
r <- createVector (size v)
(v # r # id) signal_complex_power_float #| "complex_power"
return r
foreign import ccall "signal-aux.h complex_power_float" signal_complex_power_float :: CInt -> PC -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | resample, take one sample every n samples in the original
downsampleD :: Int -> Vector Double -> Vector Double
downsampleD n v = unsafePerformIO $ do
r <- createVector (size v `div` n)
(v # r # id) (signal_downsample_double $ fromIntegral n) #| "downsample"
return r
foreign import ccall "signal-aux.h downsample_double" signal_downsample_double :: CInt -> CInt -> PD -> CInt -> PD -> IO CInt
-- | resample, take one sample every n samples in the original
downsampleF :: Int -> Vector Float -> Vector Float
downsampleF n v = unsafePerformIO $ do
r <- createVector (size v `div` n)
(v # r # id) (signal_downsample_float $ fromIntegral n) #| "downsample"
return r
foreign import ccall "signal-aux.h downsample_float" signal_downsample_float :: CInt -> CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | the difference between consecutive elements of a vector
derivD :: Vector Double -> Vector Double
derivD v = unsafePerformIO $ do
r <- createVector (size v - 1)
(v # r # id) (signal_diff_double) #| "diff"
return r
foreign import ccall "signal-aux.h vector_diff_double" signal_diff_double :: CInt -> PD -> CInt -> PD -> IO CInt
-- | the difference between consecutive elements of a vector
derivF :: Vector Float -> Vector Float
derivF v = unsafePerformIO $ do
r <- createVector (size v - 1)
(v # r # id) (signal_diff_float) #| "diff"
return r
foreign import ccall "signal-aux.h vector_diff_float" signal_diff_float :: CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | unwrap the phase of signal (input expected to be within (-pi,pi)
unwrapD :: Vector Double -> Vector Double
unwrapD v = unsafePerformIO $ do
r <- createVector $ size v
(v # r # id) signal_unwrap_double #| "unwrap"
return r
foreign import ccall "signal-aux.h unwrap_double" signal_unwrap_double :: CInt -> PD -> CInt -> PD -> IO CInt
-- | unwrap the phase of signal (input expected to be within (-pi,pi)
unwrapF :: Vector Float -> Vector Float
unwrapF v = unsafePerformIO $ do
r <- createVector $ size v
(v # r # id) signal_unwrap_float #| "unwrap"
return r
foreign import ccall "signal-aux.h unwrap_float" signal_unwrap_float :: CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
-- | compute the cross covariance of two signals
crossCovarianceD :: Int -> Vector Double -> Vector Double -> (Double,Double,Vector Double)
crossCovarianceD l x y = unsafePerformIO $ do
r <- createVector (2*l)
alloca $ \sx ->
alloca $ \sy -> do
(x # y # r # id) (signal_cross_covariance_double (fromIntegral l) sx sy) #| "cross_covariance"
sx' <- peek sx
sy' <- peek sy
return (sx',sy',r)
foreign import ccall "signal-aux.h cross_covariance_double"
signal_cross_covariance_double :: CInt -> PD -> PD -> CInt -> PD -> CInt
-> PD -> CInt -> PD -> IO CInt
-- | compute the cross covariance of two signals
crossCovarianceF :: Int -> Vector Float -> Vector Float -> (Float,Float,Vector Float)
crossCovarianceF l x y = unsafePerformIO $ do
r <- createVector (2*l)
alloca $ \sx ->
alloca $ \sy -> do
(x # y # r # id) (signal_cross_covariance_float (fromIntegral l) sx sy) #| "cross_covariance"
sx' <- peek sx
sy' <- peek sy
return (sx',sy',r)
foreign import ccall "signal-aux.h cross_covariance_float"
signal_cross_covariance_float :: CInt -> PF -> PF -> CInt -> PF -> CInt
-> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
cumSumD :: Vector Double -> Vector Double
cumSumD v = unsafePerformIO $ do
r <- createVector (size v)
(v # r # id) signal_cum_sum_double #| "cumSumD"
return r
cumSumF :: Vector Float -> Vector Float
cumSumF v = unsafePerformIO $ do
r <- createVector (size v)
(v # r # id) signal_cum_sum_float #| "cumSumF"
return r
foreign import ccall "signal-aux.h cum_sum_double"
signal_cum_sum_double :: CInt -> PD -> CInt -> PD -> IO CInt
foreign import ccall "signal-aux.h cum_sum_float"
signal_cum_sum_float :: CInt -> PF -> CInt -> PF -> IO CInt
-----------------------------------------------------------------------------
| amcphail/hsignal | lib/Numeric/Signal/Internal.hs | bsd-3-clause | 17,501 | 0 | 19 | 5,007 | 3,905 | 1,977 | 1,928 | 253 | 2 |
module Manager where
import Control.Monad.Error
import Control.Monad.Reader
import qualified System.Directory as Dir
import System.FilePath ((</>))
type Manager =
ErrorT String (ReaderT Environment IO)
run :: Environment -> Manager a -> IO (Either String a)
run environment manager =
runReaderT (runErrorT manager) environment
data Environment = Environment
{ catalog :: String
, cacheDirectory :: FilePath
}
defaultEnvironment :: IO Environment
defaultEnvironment =
do cacheDirectory <- getCacheDirectory
return (Environment "http://45.55.164.161:8000" cacheDirectory)
getCacheDirectory :: IO FilePath
getCacheDirectory =
do root <- Dir.getAppUserDataDirectory "elm"
let dir = root </> "package"
Dir.createDirectoryIfMissing True dir
return dir
| rtfeldman/elm-package | src/Manager.hs | bsd-3-clause | 819 | 0 | 10 | 164 | 212 | 111 | 101 | 23 | 1 |
import Misc
import Formulas
import Workflows
import UMap
import Data.Set (Set,empty,elems,singleton,insert,toList,union,foldl,null)
import Data.List
-- Two cuncurrent workflows are equal if the lists of sequential workflows are
-- just permutations
instance Eq ParWf where
Par ws1 == Par ws2 = ws1==ws2 --permut xs ys -- TODO: I'm not sure whether permut can cause problems because we rely to a certain degree on the ordering of workflows.
-- This makes only a difference if (partly) identical workflows are running. Is this likely?
-- delays all initial actions from wfi until wft that are mutually independent from ga, gb;
-- it is assumed that i>=0 and t<length ws;
-- it is assumed that wj|=Skip
-- it is assumed that all gj1, gj2 are not delayed for all i<=j<=t
indepIter:: ParWf -> GuardedAction -> GuardedAction -> Int -> Int -> ParWf
indepIter (Par ws) ga gb i t = if i>t then Par ws
else let wi = ws!!i
gi1 = sel_g1(wi)
ai1 = sel_a1(wi)
wi1 = sel_w1(wi)
gi2 = sel_g2(wi)
ai2 = sel_a2(wi)
wi2 = sel_w2(wi)
win = if indepActions ((gi1,ai1),(gi2,ai2)) (ga,gb)
then
Choice (Delayed (get gi1),ai1) wi1
(Delayed (get gi2),ai2) wi2
else wi
in indepIter (Par (listUpdate ws i win)) ga gb (i+1) t
-- calculate preconditions of all concurrent workflows that are constructed by replacing the ith sequential
-- workflow wi by w1(wi) and w2(wi) respectively for all indexes up to t and save them in the given
-- lookup table, as a second result calculate the corresponding subformulas;
-- it is assumed that i>=0 and t<length ws;
-- it is assumed that for all gi1, gi2: gi1 is delayed iff gi2 is delayed
preIter :: ParWf -> Int -> Int -> UMap ParWf Formula -> (UMap ParWf Formula, Set Formula)
preIter (Par ws) i t lut =
if i > t then (lut,Data.Set.empty)
else let
wi = ws!!i
gi1 = sel_g1(wi)
ai1 = sel_a1(wi)
wi1 = sel_w1(wi)
gi2 = sel_g2(wi)
ai2 = sel_a2(wi)
wi2 = sel_w2(wi)
-- TODO: This could be done more intelligently!
par1 = unskip (indepIter (undelay (Par (listUpdate ws i wi1))) (gi1,ai1) (gi2,ai2) 0 (i-1))
par2 = unskip (indepIter (undelay (Par (listUpdate ws i wi2))) (gi1,ai1) (gi2,ai2) 0 (i-1))
lut1 = if gi1 == natom "false" || delayed gi1 then lut
else pre par1 lut
lut2 = if gi2 == natom "false" || delayed gi2 then lut1
else pre par2 lut1
loc_cond = if delayed gi1 && delayed gi2 then Atom "itrue" else
if gi1 == natom "false" && gi2 == natom "false" then Atom "false" else
if gi1 == natom "false" then if lut2!par2 == Atom "itrue" then Atom "itrue"
else And (get gi2) (Wp ai2 (lut2!par2)) else
if gi2 == natom "false" then if lut2!par1 == Atom "itrue" then Atom "itrue"
else And (get gi1) (Wp ai1 (lut2!par1)) else
if lut2!par1 == Atom "itrue" || lut2!par2 == Atom "itrue" then
if lut2!par1 == Atom "itrue" && lut2!par2 == Atom "itrue" then Atom "itrue" else -- gi1 --> wp(ai1,T) /\ g2 --> wp(ai2,T)
-- /\ (gi1 \/ gi2) is implied by another path
if lut2!par1 == Atom "itrue" then Imp (get gi2) (Wp ai2 (lut2!par2)) else -- gi1 --> wp(ai1,T) /\ (gi1 \/ gi2) is implied by another path
Imp (get gi1) (Wp ai1 (lut2!par1)) -- gi2 --> wp(ai2,T) /\ (gi1 \/ gi2) is implied by another path
else
Andn
(Andn (Imp (get gi1) (Wp ai1 (lut2!par1)))
(Imp (get gi2) (Wp ai2 (lut2!par2))))
(Or (get gi1) (get gi2)) -- if there is some delayed workflow in par1 (or par2), we would actually not need to include the check (gi1 \/ gi2)
(lutn,fs) = preIter (Par ws) (i+1) t lut2
fsn = if loc_cond == Atom "itrue" then fs else Data.Set.insert loc_cond fs
in (lutn,fsn)
-- prioritise the actions of workflow wi and calculate the corresponding look-up table
-- i is assumed to be between 0 and (length ws)-1;
-- it is assumed that all wi |= Skip
-- it is assumed that gi1 and gi2 are not delayed;
prioritise :: ParWf -> Int -> UMap ParWf Formula -> UMap ParWf Formula
prioritise (Par ws) i lut =
let wi = ws!!i
gi1 = sel_g1(wi)
ai1 = sel_a1(wi)
wi1 = sel_w1(wi)
gi2 = sel_g2(wi)
ai2 = sel_a2(wi)
wi2 = sel_w2(wi)
-- TODO: This could be done more intelligently!
par1 = unskip (undelay (Par (listUpdate ws i wi1)))
par2 = unskip (undelay (Par (listUpdate ws i wi2)))
lut1 = if gi1 == natom "false" then lut else pre par1 lut
lutn = if gi1 == natom "false" then lut1 else pre par2 lut1
form = if gi1 == natom "false" && gi2 == natom "false" then Atom "false" else
if gi1 == natom "false" then And (get gi2) (Wp ai2 (lutn!par2)) else
if gi2 == natom "false" then And (get gi1) (Wp ai1 (lutn!par1)) else
Andn
(Andn (Imp (get gi1) (Wp ai1 (lutn!par1)))
(Imp (get gi2) (Wp ai2 (lutn!par2))))
(Or (get gi1) (get gi2))
in def lutn (Par ws,form)
-- calculate the precondition of a concurrent workflow and save it in the given lookup table;
-- it is assumed that all wi |= Skip
pre :: ParWf -> UMap ParWf Formula -> UMap ParWf Formula
pre (Par []) lut = lut
pre (Par xs) lut = if lut ? (Par xs) then lut
else
let i = indep (Par xs) 0 ((length xs) -1) -- Find i such that ai1,ai2 are independent to ALL
-- possible subsequent actions of the other workflows.
-- Returns -1 is there is no such i
in if i == -1 then let -- the standard case
(lutn,formset) = preIter (Par xs) 0 ((length xs) -1) lut
form = if Data.Set.null formset then Atom "itrue"
else foldr1 (Andn) (toList formset)
in def lutn ((Par xs),form)
else prioritise (Par xs) i lut -- If ai1 and ai2 are independent from all guarded-action pairs of the other workflows,
-- we can prioritise their execution over all other initial actions.
-- The main program ---------------------------------------------------------------------
initLut = def UMap.empty (Par [],Atom "I")
main = putStr (show ((pre exParWfHuge initLut) ! exParWfHuge))
| Booster2/Booster2 | Workflow_Precond/impl_nondisjoint/Precond6.hs | bsd-3-clause | 7,517 | 41 | 23 | 2,949 | 1,997 | 1,051 | 946 | 89 | 14 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section{Tidying up Core}
-}
{-# LANGUAGE CPP #-}
module TidyPgm (
mkBootModDetailsTc, tidyProgram, globaliseAndTidyId
) where
#include "HsVersions.h"
import TcRnTypes
import DynFlags
import CoreSyn
import CoreUnfold
import CoreFVs
import CoreTidy
import CoreMonad
import CorePrep
import CoreUtils
import CoreLint
import Literal
import Rules
import PatSyn
import ConLike
import CoreArity ( exprArity, exprBotStrictness_maybe )
import VarEnv
import VarSet
import Var
import Id
import MkId ( mkDictSelRhs )
import IdInfo
import InstEnv
import FamInstEnv
import Type ( tidyTopType )
import Demand ( appIsBottom, isNopSig, isBottomingSig )
import BasicTypes
import Name hiding (varName)
import NameSet
import NameEnv
import Avail
import IfaceEnv
import TcEnv
import TcRnMonad
import DataCon
import TyCon
import Class
import Module
import Packages( isDllName )
import HscTypes
import Maybes
import UniqSupply
import ErrUtils (Severity(..))
import Outputable
import FastBool hiding ( fastOr )
import SrcLoc
import FastString
import qualified ErrUtils as Err
import Control.Monad
import Data.Function
import Data.List ( sortBy )
import Data.IORef ( atomicModifyIORef' )
{-
Constructing the TypeEnv, Instances, Rules, VectInfo from which the
ModIface is constructed, and which goes on to subsequent modules in
--make mode.
Most of the interface file is obtained simply by serialising the
TypeEnv. One important consequence is that if the *interface file*
has pragma info if and only if the final TypeEnv does. This is not so
important for *this* module, but it's essential for ghc --make:
subsequent compilations must not see (e.g.) the arity if the interface
file does not contain arity If they do, they'll exploit the arity;
then the arity might change, but the iface file doesn't change =>
recompilation does not happen => disaster.
For data types, the final TypeEnv will have a TyThing for the TyCon,
plus one for each DataCon; the interface file will contain just one
data type declaration, but it is de-serialised back into a collection
of TyThings.
************************************************************************
* *
Plan A: simpleTidyPgm
* *
************************************************************************
Plan A: mkBootModDetails: omit pragmas, make interfaces small
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Ignore the bindings
* Drop all WiredIn things from the TypeEnv
(we never want them in interface files)
* Retain all TyCons and Classes in the TypeEnv, to avoid
having to find which ones are mentioned in the
types of exported Ids
* Trim off the constructors of non-exported TyCons, both
from the TyCon and from the TypeEnv
* Drop non-exported Ids from the TypeEnv
* Tidy the types of the DFunIds of Instances,
make them into GlobalIds, (they already have External Names)
and add them to the TypeEnv
* Tidy the types of the (exported) Ids in the TypeEnv,
make them into GlobalIds (they already have External Names)
* Drop rules altogether
* Tidy the bindings, to ensure that the Caf and Arity
information is correct for each top-level binder; the
code generator needs it. And to ensure that local names have
distinct OccNames in case of object-file splitting
* If this an hsig file, drop the instances altogether too (they'll
get pulled in by the implicit module import.
-}
-- This is Plan A: make a small type env when typechecking only,
-- or when compiling a hs-boot file, or simply when not using -O
--
-- We don't look at the bindings at all -- there aren't any
-- for hs-boot files
mkBootModDetailsTc :: HscEnv -> TcGblEnv -> IO ModDetails
mkBootModDetailsTc hsc_env
TcGblEnv{ tcg_exports = exports,
tcg_type_env = type_env, -- just for the Ids
tcg_tcs = tcs,
tcg_patsyns = pat_syns,
tcg_insts = insts,
tcg_fam_insts = fam_insts
}
= do { let dflags = hsc_dflags hsc_env
; showPassIO dflags CoreTidy
; let { insts' = map (tidyClsInstDFun globaliseAndTidyId) insts
; pat_syns' = map (tidyPatSynIds globaliseAndTidyId) pat_syns
; type_env1 = mkBootTypeEnv (availsToNameSet exports)
(typeEnvIds type_env) tcs fam_insts
; type_env2 = extendTypeEnvWithPatSyns pat_syns' type_env1
; dfun_ids = map instanceDFunId insts'
; type_env' = extendTypeEnvWithIds type_env2 dfun_ids
}
; return (ModDetails { md_types = type_env'
, md_insts = insts'
, md_fam_insts = fam_insts
, md_rules = []
, md_anns = []
, md_exports = exports
, md_vect_info = noVectInfo
})
}
where
mkBootTypeEnv :: NameSet -> [Id] -> [TyCon] -> [FamInst] -> TypeEnv
mkBootTypeEnv exports ids tcs fam_insts
= tidyTypeEnv True $
typeEnvFromEntities final_ids tcs fam_insts
where
-- Find the LocalIds in the type env that are exported
-- Make them into GlobalIds, and tidy their types
--
-- It's very important to remove the non-exported ones
-- because we don't tidy the OccNames, and if we don't remove
-- the non-exported ones we'll get many things with the
-- same name in the interface file, giving chaos.
--
-- Do make sure that we keep Ids that are already Global.
-- When typechecking an .hs-boot file, the Ids come through as
-- GlobalIds.
final_ids = [ if isLocalId id then globaliseAndTidyId id
else id
| id <- ids
, keep_it id ]
-- default methods have their export flag set, but everything
-- else doesn't (yet), because this is pre-desugaring, so we
-- must test both.
keep_it id = isExportedId id || idName id `elemNameSet` exports
globaliseAndTidyId :: Id -> Id
-- Takes an LocalId with an External Name,
-- makes it into a GlobalId
-- * unchanged Name (might be Internal or External)
-- * unchanged details
-- * VanillaIdInfo (makes a conservative assumption about Caf-hood)
globaliseAndTidyId id
= Id.setIdType (globaliseId id) tidy_type
where
tidy_type = tidyTopType (idType id)
{-
************************************************************************
* *
Plan B: tidy bindings, make TypeEnv full of IdInfo
* *
************************************************************************
Plan B: include pragmas, make interfaces
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Figure out which Ids are externally visible
* Tidy the bindings, externalising appropriate Ids
* Drop all Ids from the TypeEnv, and add all the External Ids from
the bindings. (This adds their IdInfo to the TypeEnv; and adds
floated-out Ids that weren't even in the TypeEnv before.)
Step 1: Figure out external Ids
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note [choosing external names]
See also the section "Interface stability" in the
RecompilationAvoidance commentary:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance
First we figure out which Ids are "external" Ids. An
"external" Id is one that is visible from outside the compilation
unit. These are
a) the user exported ones
b) ones mentioned in the unfoldings, workers,
rules of externally-visible ones ,
or vectorised versions of externally-visible ones
While figuring out which Ids are external, we pick a "tidy" OccName
for each one. That is, we make its OccName distinct from the other
external OccNames in this module, so that in interface files and
object code we can refer to it unambiguously by its OccName. The
OccName for each binder is prefixed by the name of the exported Id
that references it; e.g. if "f" references "x" in its unfolding, then
"x" is renamed to "f_x". This helps distinguish the different "x"s
from each other, and means that if "f" is later removed, things that
depend on the other "x"s will not need to be recompiled. Of course,
if there are multiple "f_x"s, then we have to disambiguate somehow; we
use "f_x0", "f_x1" etc.
As far as possible we should assign names in a deterministic fashion.
Each time this module is compiled with the same options, we should end
up with the same set of external names with the same types. That is,
the ABI hash in the interface should not change. This turns out to be
quite tricky, since the order of the bindings going into the tidy
phase is already non-deterministic, as it is based on the ordering of
Uniques, which are assigned unpredictably.
To name things in a stable way, we do a depth-first-search of the
bindings, starting from the exports sorted by name. This way, as long
as the bindings themselves are deterministic (they sometimes aren't!),
the order in which they are presented to the tidying phase does not
affect the names we assign.
Step 2: Tidy the program
~~~~~~~~~~~~~~~~~~~~~~~~
Next we traverse the bindings top to bottom. For each *top-level*
binder
1. Make it into a GlobalId; its IdDetails becomes VanillaGlobal,
reflecting the fact that from now on we regard it as a global,
not local, Id
2. Give it a system-wide Unique.
[Even non-exported things need system-wide Uniques because the
byte-code generator builds a single Name->BCO symbol table.]
We use the NameCache kept in the HscEnv as the
source of such system-wide uniques.
For external Ids, use the original-name cache in the NameCache
to ensure that the unique assigned is the same as the Id had
in any previous compilation run.
3. Rename top-level Ids according to the names we chose in step 1.
If it's an external Id, make it have a External Name, otherwise
make it have an Internal Name. This is used by the code generator
to decide whether to make the label externally visible
4. Give it its UTTERLY FINAL IdInfo; in ptic,
* its unfolding, if it should have one
* its arity, computed from the number of visible lambdas
* its CAF info, computed from what is free in its RHS
Finally, substitute these new top-level binders consistently
throughout, including in unfoldings. We also tidy binders in
RHSs, so that they print nicely in interfaces.
-}
tidyProgram :: HscEnv -> ModGuts -> IO (CgGuts, ModDetails)
tidyProgram hsc_env (ModGuts { mg_module = mod
, mg_exports = exports
, mg_rdr_env = rdr_env
, mg_tcs = tcs
, mg_insts = cls_insts
, mg_fam_insts = fam_insts
, mg_binds = binds
, mg_patsyns = patsyns
, mg_rules = imp_rules
, mg_vect_info = vect_info
, mg_anns = anns
, mg_deps = deps
, mg_foreign = foreign_stubs
, mg_hpc_info = hpc_info
, mg_modBreaks = modBreaks
})
= do { let { dflags = hsc_dflags hsc_env
; omit_prags = gopt Opt_OmitInterfacePragmas dflags
; expose_all = gopt Opt_ExposeAllUnfoldings dflags
; print_unqual = mkPrintUnqualified dflags rdr_env
}
; showPassIO dflags CoreTidy
; let { type_env = typeEnvFromEntities [] tcs fam_insts
; implicit_binds
= concatMap getClassImplicitBinds (typeEnvClasses type_env) ++
concatMap getTyConImplicitBinds (typeEnvTyCons type_env)
}
; (unfold_env, tidy_occ_env)
<- chooseExternalIds hsc_env mod omit_prags expose_all
binds implicit_binds imp_rules (vectInfoVar vect_info)
; let { (trimmed_binds, trimmed_rules)
= findExternalRules omit_prags binds imp_rules unfold_env }
; (tidy_env, tidy_binds)
<- tidyTopBinds hsc_env mod unfold_env tidy_occ_env trimmed_binds
; let { final_ids = [ id | id <- bindersOfBinds tidy_binds,
isExternalName (idName id)]
; type_env1 = extendTypeEnvWithIds type_env final_ids
; tidy_cls_insts = map (tidyClsInstDFun (lookup_aux_id tidy_type_env)) cls_insts
-- A DFunId will have a binding in tidy_binds, and so will now be in
-- tidy_type_env, replete with IdInfo. Its name will be unchanged since
-- it was born, but we want Global, IdInfo-rich (or not) DFunId in the
-- tidy_cls_insts. Similarly the Ids inside a PatSyn.
; tidy_rules = tidyRules tidy_env trimmed_rules
-- You might worry that the tidy_env contains IdInfo-rich stuff
-- and indeed it does, but if omit_prags is on, ext_rules is
-- empty
; tidy_vect_info = tidyVectInfo tidy_env vect_info
-- Tidy the Ids inside each PatSyn, very similarly to DFunIds
-- and then override the PatSyns in the type_env with the new tidy ones
-- This is really the only reason we keep mg_patsyns at all; otherwise
-- they could just stay in type_env
; tidy_patsyns = map (tidyPatSynIds (lookup_aux_id tidy_type_env)) patsyns
; type_env2 = extendTypeEnvWithPatSyns tidy_patsyns type_env1
; tidy_type_env = tidyTypeEnv omit_prags type_env2
-- See Note [Injecting implicit bindings]
; all_tidy_binds = implicit_binds ++ tidy_binds
-- get the TyCons to generate code for. Careful! We must use
-- the untidied TypeEnv here, because we need
-- (a) implicit TyCons arising from types and classes defined
-- in this module
-- (b) wired-in TyCons, which are normally removed from the
-- TypeEnv we put in the ModDetails
-- (c) Constructors even if they are not exported (the
-- tidied TypeEnv has trimmed these away)
; alg_tycons = filter isAlgTyCon (typeEnvTyCons type_env)
}
; endPassIO hsc_env print_unqual CoreTidy all_tidy_binds tidy_rules
-- If the endPass didn't print the rules, but ddump-rules is
-- on, print now
; unless (dopt Opt_D_dump_simpl dflags) $
Err.dumpIfSet_dyn dflags Opt_D_dump_rules
(showSDoc dflags (ppr CoreTidy <+> ptext (sLit "rules")))
(pprRulesForUser tidy_rules)
-- Print one-line size info
; let cs = coreBindsStats tidy_binds
; when (dopt Opt_D_dump_core_stats dflags)
(log_action dflags dflags SevDump noSrcSpan defaultDumpStyle
(ptext (sLit "Tidy size (terms,types,coercions)")
<+> ppr (moduleName mod) <> colon
<+> int (cs_tm cs)
<+> int (cs_ty cs)
<+> int (cs_co cs) ))
; return (CgGuts { cg_module = mod,
cg_tycons = alg_tycons,
cg_binds = all_tidy_binds,
cg_foreign = foreign_stubs,
cg_dep_pkgs = map fst $ dep_pkgs deps,
cg_hpc_info = hpc_info,
cg_modBreaks = modBreaks },
ModDetails { md_types = tidy_type_env,
md_rules = tidy_rules,
md_insts = tidy_cls_insts,
md_vect_info = tidy_vect_info,
md_fam_insts = fam_insts,
md_exports = exports,
md_anns = anns -- are already tidy
})
}
lookup_aux_id :: TypeEnv -> Var -> Id
lookup_aux_id type_env id
= case lookupTypeEnv type_env (idName id) of
Just (AnId id') -> id'
_other -> pprPanic "lookup_aux_id" (ppr id)
tidyTypeEnv :: Bool -- Compiling without -O, so omit prags
-> TypeEnv -> TypeEnv
-- The competed type environment is gotten from
-- a) the types and classes defined here (plus implicit things)
-- b) adding Ids with correct IdInfo, including unfoldings,
-- gotten from the bindings
-- From (b) we keep only those Ids with External names;
-- the CoreTidy pass makes sure these are all and only
-- the externally-accessible ones
-- This truncates the type environment to include only the
-- exported Ids and things needed from them, which saves space
--
-- See Note [Don't attempt to trim data types]
tidyTypeEnv omit_prags type_env
= let
type_env1 = filterNameEnv (not . isWiredInName . getName) type_env
-- (1) remove wired-in things
type_env2 | omit_prags = mapNameEnv trimThing type_env1
| otherwise = type_env1
-- (2) trimmed if necessary
in
type_env2
--------------------------
trimThing :: TyThing -> TyThing
-- Trim off inessentials, for boot files and no -O
trimThing (AnId id)
| not (isImplicitId id)
= AnId (id `setIdInfo` vanillaIdInfo)
trimThing other_thing
= other_thing
extendTypeEnvWithPatSyns :: [PatSyn] -> TypeEnv -> TypeEnv
extendTypeEnvWithPatSyns tidy_patsyns type_env
= extendTypeEnvList type_env [AConLike (PatSynCon ps) | ps <- tidy_patsyns ]
tidyVectInfo :: TidyEnv -> VectInfo -> VectInfo
tidyVectInfo (_, var_env) info@(VectInfo { vectInfoVar = vars
, vectInfoParallelVars = parallelVars
})
= info { vectInfoVar = tidy_vars
, vectInfoParallelVars = tidy_parallelVars
}
where
-- we only export mappings whose domain and co-domain is exported (otherwise, the iface is
-- inconsistent)
tidy_vars = mkVarEnv [ (tidy_var, (tidy_var, tidy_var_v))
| (var, var_v) <- varEnvElts vars
, let tidy_var = lookup_var var
tidy_var_v = lookup_var var_v
, isExternalId tidy_var && isExportedId tidy_var
, isExternalId tidy_var_v && isExportedId tidy_var_v
, isDataConWorkId var || not (isImplicitId var)
]
tidy_parallelVars = mkVarSet [ tidy_var
| var <- varSetElems parallelVars
, let tidy_var = lookup_var var
, isExternalId tidy_var && isExportedId tidy_var
]
lookup_var var = lookupWithDefaultVarEnv var_env var var
-- We need to make sure that all names getting into the iface version of 'VectInfo' are
-- external; otherwise, 'MkIface' will bomb out.
isExternalId = isExternalName . idName
{-
Note [Don't attempt to trim data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For some time GHC tried to avoid exporting the data constructors
of a data type if it wasn't strictly necessary to do so; see Trac #835.
But "strictly necessary" accumulated a longer and longer list
of exceptions, and finally I gave up the battle:
commit 9a20e540754fc2af74c2e7392f2786a81d8d5f11
Author: Simon Peyton Jones <simonpj@microsoft.com>
Date: Thu Dec 6 16:03:16 2012 +0000
Stop attempting to "trim" data types in interface files
Without -O, we previously tried to make interface files smaller
by not including the data constructors of data types. But
there are a lot of exceptions, notably when Template Haskell is
involved or, more recently, DataKinds.
However Trac #7445 shows that even without TemplateHaskell, using
the Data class and invoking Language.Haskell.TH.Quote.dataToExpQ
is enough to require us to expose the data constructors.
So I've given up on this "optimisation" -- it's probably not
important anyway. Now I'm simply not attempting to trim off
the data constructors. The gain in simplicity is worth the
modest cost in interface file growth, which is limited to the
bits reqd to describe those data constructors.
************************************************************************
* *
Implicit bindings
* *
************************************************************************
Note [Injecting implicit bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We inject the implicit bindings right at the end, in CoreTidy.
Some of these bindings, notably record selectors, are not
constructed in an optimised form. E.g. record selector for
data T = MkT { x :: {-# UNPACK #-} !Int }
Then the unfolding looks like
x = \t. case t of MkT x1 -> let x = I# x1 in x
This generates bad code unless it's first simplified a bit. That is
why CoreUnfold.mkImplicitUnfolding uses simleExprOpt to do a bit of
optimisation first. (Only matters when the selector is used curried;
eg map x ys.) See Trac #2070.
[Oct 09: in fact, record selectors are no longer implicit Ids at all,
because we really do want to optimise them properly. They are treated
much like any other Id. But doing "light" optimisation on an implicit
Id still makes sense.]
At one time I tried injecting the implicit bindings *early*, at the
beginning of SimplCore. But that gave rise to real difficulty,
because GlobalIds are supposed to have *fixed* IdInfo, but the
simplifier and other core-to-core passes mess with IdInfo all the
time. The straw that broke the camels back was when a class selector
got the wrong arity -- ie the simplifier gave it arity 2, whereas
importing modules were expecting it to have arity 1 (Trac #2844).
It's much safer just to inject them right at the end, after tidying.
Oh: two other reasons for injecting them late:
- If implicit Ids are already in the bindings when we start TidyPgm,
we'd have to be careful not to treat them as external Ids (in
the sense of chooseExternalIds); else the Ids mentioned in *their*
RHSs will be treated as external and you get an interface file
saying a18 = <blah>
but nothing refererring to a18 (because the implicit Id is the
one that does, and implicit Ids don't appear in interface files).
- More seriously, the tidied type-envt will include the implicit
Id replete with a18 in its unfolding; but we won't take account
of a18 when computing a fingerprint for the class; result chaos.
There is one sort of implicit binding that is injected still later,
namely those for data constructor workers. Reason (I think): it's
really just a code generation trick.... binding itself makes no sense.
See Note [Data constructor workers] in CorePrep.
-}
getTyConImplicitBinds :: TyCon -> [CoreBind]
getTyConImplicitBinds tc = map get_defn (mapMaybe dataConWrapId_maybe (tyConDataCons tc))
getClassImplicitBinds :: Class -> [CoreBind]
getClassImplicitBinds cls
= [ NonRec op (mkDictSelRhs cls val_index)
| (op, val_index) <- classAllSelIds cls `zip` [0..] ]
get_defn :: Id -> CoreBind
get_defn id = NonRec id (unfoldingTemplate (realIdUnfolding id))
{-
************************************************************************
* *
\subsection{Step 1: finding externals}
* *
************************************************************************
See Note [Choosing external names].
-}
type UnfoldEnv = IdEnv (Name{-new name-}, Bool {-show unfolding-})
-- Maps each top-level Id to its new Name (the Id is tidied in step 2)
-- The Unique is unchanged. If the new Name is external, it will be
-- visible in the interface file.
--
-- Bool => expose unfolding or not.
chooseExternalIds :: HscEnv
-> Module
-> Bool -> Bool
-> [CoreBind]
-> [CoreBind]
-> [CoreRule]
-> VarEnv (Var, Var)
-> IO (UnfoldEnv, TidyOccEnv)
-- Step 1 from the notes above
chooseExternalIds hsc_env mod omit_prags expose_all binds implicit_binds imp_id_rules vect_vars
= do { (unfold_env1,occ_env1) <- search init_work_list emptyVarEnv init_occ_env
; let internal_ids = filter (not . (`elemVarEnv` unfold_env1)) binders
; tidy_internal internal_ids unfold_env1 occ_env1 }
where
nc_var = hsc_NC hsc_env
-- init_ext_ids is the intial list of Ids that should be
-- externalised. It serves as the starting point for finding a
-- deterministic, tidy, renaming for all external Ids in this
-- module.
--
-- It is sorted, so that it has adeterministic order (i.e. it's the
-- same list every time this module is compiled), in contrast to the
-- bindings, which are ordered non-deterministically.
init_work_list = zip init_ext_ids init_ext_ids
init_ext_ids = sortBy (compare `on` getOccName) $
filter is_external binders
-- An Id should be external if either (a) it is exported,
-- (b) it appears in the RHS of a local rule for an imported Id, or
-- (c) it is the vectorised version of an imported Id
-- See Note [Which rules to expose]
is_external id = isExportedId id || id `elemVarSet` rule_rhs_vars || id `elemVarSet` vect_var_vs
rule_rhs_vars = mapUnionVarSet ruleRhsFreeVars imp_id_rules
vect_var_vs = mkVarSet [var_v | (var, var_v) <- nameEnvElts vect_vars, isGlobalId var]
binders = bindersOfBinds binds
implicit_binders = bindersOfBinds implicit_binds
binder_set = mkVarSet binders
avoids = [getOccName name | bndr <- binders ++ implicit_binders,
let name = idName bndr,
isExternalName name ]
-- In computing our "avoids" list, we must include
-- all implicit Ids
-- all things with global names (assigned once and for
-- all by the renamer)
-- since their names are "taken".
-- The type environment is a convenient source of such things.
-- In particular, the set of binders doesn't include
-- implicit Ids at this stage.
-- We also make sure to avoid any exported binders. Consider
-- f{-u1-} = 1 -- Local decl
-- ...
-- f{-u2-} = 2 -- Exported decl
--
-- The second exported decl must 'get' the name 'f', so we
-- have to put 'f' in the avoids list before we get to the first
-- decl. tidyTopId then does a no-op on exported binders.
init_occ_env = initTidyOccEnv avoids
search :: [(Id,Id)] -- The work-list: (external id, referrring id)
-- Make a tidy, external Name for the external id,
-- add it to the UnfoldEnv, and do the same for the
-- transitive closure of Ids it refers to
-- The referring id is used to generate a tidy
--- name for the external id
-> UnfoldEnv -- id -> (new Name, show_unfold)
-> TidyOccEnv -- occ env for choosing new Names
-> IO (UnfoldEnv, TidyOccEnv)
search [] unfold_env occ_env = return (unfold_env, occ_env)
search ((idocc,referrer) : rest) unfold_env occ_env
| idocc `elemVarEnv` unfold_env = search rest unfold_env occ_env
| otherwise = do
(occ_env', name') <- tidyTopName mod nc_var (Just referrer) occ_env idocc
let
(new_ids, show_unfold)
| omit_prags = ([], False)
| otherwise = addExternal expose_all refined_id
-- add vectorised version if any exists
new_ids' = new_ids ++ maybeToList (fmap snd $ lookupVarEnv vect_vars idocc)
-- 'idocc' is an *occurrence*, but we need to see the
-- unfolding in the *definition*; so look up in binder_set
refined_id = case lookupVarSet binder_set idocc of
Just id -> id
Nothing -> WARN( True, ppr idocc ) idocc
unfold_env' = extendVarEnv unfold_env idocc (name',show_unfold)
referrer' | isExportedId refined_id = refined_id
| otherwise = referrer
--
search (zip new_ids' (repeat referrer') ++ rest) unfold_env' occ_env'
tidy_internal :: [Id] -> UnfoldEnv -> TidyOccEnv
-> IO (UnfoldEnv, TidyOccEnv)
tidy_internal [] unfold_env occ_env = return (unfold_env,occ_env)
tidy_internal (id:ids) unfold_env occ_env = do
(occ_env', name') <- tidyTopName mod nc_var Nothing occ_env id
let unfold_env' = extendVarEnv unfold_env id (name',False)
tidy_internal ids unfold_env' occ_env'
addExternal :: Bool -> Id -> ([Id], Bool)
addExternal expose_all id = (new_needed_ids, show_unfold)
where
new_needed_ids = bndrFvsInOrder show_unfold id
idinfo = idInfo id
show_unfold = show_unfolding (unfoldingInfo idinfo)
never_active = isNeverActive (inlinePragmaActivation (inlinePragInfo idinfo))
loop_breaker = isStrongLoopBreaker (occInfo idinfo)
bottoming_fn = isBottomingSig (strictnessInfo idinfo)
-- Stuff to do with the Id's unfolding
-- We leave the unfolding there even if there is a worker
-- In GHCi the unfolding is used by importers
show_unfolding (CoreUnfolding { uf_src = src, uf_guidance = guidance })
= expose_all -- 'expose_all' says to expose all
-- unfoldings willy-nilly
|| isStableSource src -- Always expose things whose
-- source is an inline rule
|| not (bottoming_fn -- No need to inline bottom functions
|| never_active -- Or ones that say not to
|| loop_breaker -- Or that are loop breakers
|| neverUnfoldGuidance guidance)
show_unfolding (DFunUnfolding {}) = True
show_unfolding _ = False
{-
************************************************************************
* *
Deterministic free variables
* *
************************************************************************
We want a deterministic free-variable list. exprFreeVars gives us
a VarSet, which is in a non-deterministic order when converted to a
list. Hence, here we define a free-variable finder that returns
the free variables in the order that they are encountered.
See Note [Choosing external names]
-}
bndrFvsInOrder :: Bool -> Id -> [Id]
bndrFvsInOrder show_unfold id
= run (dffvLetBndr show_unfold id)
run :: DFFV () -> [Id]
run (DFFV m) = case m emptyVarSet (emptyVarSet, []) of
((_,ids),_) -> ids
newtype DFFV a
= DFFV (VarSet -- Envt: non-top-level things that are in scope
-- we don't want to record these as free vars
-> (VarSet, [Var]) -- Input State: (set, list) of free vars so far
-> ((VarSet,[Var]),a)) -- Output state
instance Functor DFFV where
fmap = liftM
instance Applicative DFFV where
pure = return
(<*>) = ap
instance Monad DFFV where
return a = DFFV $ \_ st -> (st, a)
(DFFV m) >>= k = DFFV $ \env st ->
case m env st of
(st',a) -> case k a of
DFFV f -> f env st'
extendScope :: Var -> DFFV a -> DFFV a
extendScope v (DFFV f) = DFFV (\env st -> f (extendVarSet env v) st)
extendScopeList :: [Var] -> DFFV a -> DFFV a
extendScopeList vs (DFFV f) = DFFV (\env st -> f (extendVarSetList env vs) st)
insert :: Var -> DFFV ()
insert v = DFFV $ \ env (set, ids) ->
let keep_me = isLocalId v &&
not (v `elemVarSet` env) &&
not (v `elemVarSet` set)
in if keep_me
then ((extendVarSet set v, v:ids), ())
else ((set, ids), ())
dffvExpr :: CoreExpr -> DFFV ()
dffvExpr (Var v) = insert v
dffvExpr (App e1 e2) = dffvExpr e1 >> dffvExpr e2
dffvExpr (Lam v e) = extendScope v (dffvExpr e)
dffvExpr (Tick (Breakpoint _ ids) e) = mapM_ insert ids >> dffvExpr e
dffvExpr (Tick _other e) = dffvExpr e
dffvExpr (Cast e _) = dffvExpr e
dffvExpr (Let (NonRec x r) e) = dffvBind (x,r) >> extendScope x (dffvExpr e)
dffvExpr (Let (Rec prs) e) = extendScopeList (map fst prs) $
(mapM_ dffvBind prs >> dffvExpr e)
dffvExpr (Case e b _ as) = dffvExpr e >> extendScope b (mapM_ dffvAlt as)
dffvExpr _other = return ()
dffvAlt :: (t, [Var], CoreExpr) -> DFFV ()
dffvAlt (_,xs,r) = extendScopeList xs (dffvExpr r)
dffvBind :: (Id, CoreExpr) -> DFFV ()
dffvBind(x,r)
| not (isId x) = dffvExpr r
| otherwise = dffvLetBndr False x >> dffvExpr r
-- Pass False because we are doing the RHS right here
-- If you say True you'll get *exponential* behaviour!
dffvLetBndr :: Bool -> Id -> DFFV ()
-- Gather the free vars of the RULES and unfolding of a binder
-- We always get the free vars of a *stable* unfolding, but
-- for a *vanilla* one (InlineRhs), the flag controls what happens:
-- True <=> get fvs of even a *vanilla* unfolding
-- False <=> ignore an InlineRhs
-- For nested bindings (call from dffvBind) we always say "False" because
-- we are taking the fvs of the RHS anyway
-- For top-level bindings (call from addExternal, via bndrFvsInOrder)
-- we say "True" if we are exposing that unfolding
dffvLetBndr vanilla_unfold id
= do { go_unf (unfoldingInfo idinfo)
; mapM_ go_rule (specInfoRules (specInfo idinfo)) }
where
idinfo = idInfo id
go_unf (CoreUnfolding { uf_tmpl = rhs, uf_src = src })
= case src of
InlineRhs | vanilla_unfold -> dffvExpr rhs
| otherwise -> return ()
_ -> dffvExpr rhs
go_unf (DFunUnfolding { df_bndrs = bndrs, df_args = args })
= extendScopeList bndrs $ mapM_ dffvExpr args
go_unf _ = return ()
go_rule (BuiltinRule {}) = return ()
go_rule (Rule { ru_bndrs = bndrs, ru_rhs = rhs })
= extendScopeList bndrs (dffvExpr rhs)
{-
************************************************************************
* *
findExternalRules
* *
************************************************************************
Note [Finding external rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The complete rules are gotten by combining
a) local rules for imported Ids
b) rules embedded in the top-level Ids
There are two complications:
* Note [Which rules to expose]
* Note [Trimming auto-rules]
Note [Which rules to expose]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The function 'expose_rule' filters out rules that mention, on the LHS,
Ids that aren't externally visible; these rules can't fire in a client
module.
The externally-visible binders are computed (by chooseExternalIds)
assuming that all orphan rules are externalised (see init_ext_ids in
function 'search'). So in fact it's a bit conservative and we may
export more than we need. (It's a sort of mutual recursion.)
Note [Trimming auto-rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Second, with auto-specialisation we may specialise local or imported
dfuns or INLINE functions, and then later inline them. That may leave
behind something like
RULE "foo" forall d. f @ Int d = f_spec
where f is either local or imported, and there is no remaining
reference to f_spec except from the RULE.
Now that RULE *might* be useful to an importing module, but that is
purely speculative, and meanwhile the code is taking up space and
codegen time. So is seeems better to drop the binding for f_spec if
the auto-generated rule is the *only* reason that it is being kept
alive.
(The RULE still might have been useful in the past; that is, it was
the right thing to have generated it in the first place. See Note
[Inline specialisations] in Specialise. But now it has served its
purpose, and can be discarded.)
So findExternalRules does this:
* Remove all bindings that are kept alive *only* by isAutoRule rules
(this is done in trim_binds)
* Remove all auto rules that mention bindings that have been removed
(this is done by filtering by keep_rule)
So if a binding is kept alive for some *other* reason (e.g. f_spec is
called in the final code), we keep the rule too.
I found that binary sizes jumped by 6-10% when I started to specialise
INLINE functions (again, Note [Inline specialisations] in Specialise).
Adding trimAutoRules removed all this bloat.
-}
findExternalRules :: Bool -- Omit pragmas
-> [CoreBind]
-> [CoreRule] -- Local rules for imported fns
-> UnfoldEnv -- Ids that are exported, so we need their rules
-> ([CoreBind], [CoreRule])
-- See Note [Finding external rules]
findExternalRules omit_prags binds imp_id_rules unfold_env
= (trimmed_binds, filter keep_rule all_rules)
where
imp_rules = filter expose_rule imp_id_rules
imp_user_rule_fvs = mapUnionVarSet user_rule_rhs_fvs imp_rules
user_rule_rhs_fvs rule | isAutoRule rule = emptyVarSet
| otherwise = ruleRhsFreeVars rule
(trimmed_binds, local_bndrs, _, all_rules) = trim_binds binds
keep_rule rule = ruleFreeVars rule `subVarSet` local_bndrs
-- Remove rules that make no sense, because they mention a
-- local binder (on LHS or RHS) that we have now discarded.
-- (NB: ruleFreeVars only includes LocalIds)
--
-- LHS: we have alrady filtered out rules that mention internal Ids
-- on LHS but that isn't enough because we might have by now
-- discarded a binding with an external Id. (How?
-- chooseExternalIds is a bit conservative.)
--
-- RHS: the auto rules that might mention a binder that has
-- been discarded; see Note [Trimming auto-rules]
expose_rule rule
| omit_prags = False
| otherwise = all is_external_id (varSetElems (ruleLhsFreeIds rule))
-- Don't expose a rule whose LHS mentions a locally-defined
-- Id that is completely internal (i.e. not visible to an
-- importing module). NB: ruleLhsFreeIds only returns LocalIds.
-- See Note [Which rules to expose]
is_external_id id = case lookupVarEnv unfold_env id of
Just (name, _) -> isExternalName name
Nothing -> False
trim_binds :: [CoreBind]
-> ( [CoreBind] -- Trimmed bindings
, VarSet -- Binders of those bindings
, VarSet -- Free vars of those bindings + rhs of user rules
-- (we don't bother to delete the binders)
, [CoreRule]) -- All rules, imported + from the bindings
-- This function removes unnecessary bindings, and gathers up rules from
-- the bindings we keep. See Note [Trimming auto-rules]
trim_binds [] -- Base case, start with imp_user_rule_fvs
= ([], emptyVarSet, imp_user_rule_fvs, imp_rules)
trim_binds (bind:binds)
| any needed bndrs -- Keep binding
= ( bind : binds', bndr_set', needed_fvs', local_rules ++ rules )
| otherwise -- Discard binding altogether
= stuff
where
stuff@(binds', bndr_set, needed_fvs, rules)
= trim_binds binds
needed bndr = isExportedId bndr || bndr `elemVarSet` needed_fvs
bndrs = bindersOf bind
rhss = rhssOfBind bind
bndr_set' = bndr_set `extendVarSetList` bndrs
needed_fvs' = needed_fvs `unionVarSet`
mapUnionVarSet idUnfoldingVars bndrs `unionVarSet`
-- Ignore type variables in the type of bndrs
mapUnionVarSet exprFreeVars rhss `unionVarSet`
mapUnionVarSet user_rule_rhs_fvs local_rules
-- In needed_fvs', we don't bother to delete binders from the fv set
local_rules = [ rule
| id <- bndrs
, is_external_id id -- Only collect rules for external Ids
, rule <- idCoreRules id
, expose_rule rule ] -- and ones that can fire in a client
{-
************************************************************************
* *
tidyTopName
* *
************************************************************************
This is where we set names to local/global based on whether they really are
externally visible (see comment at the top of this module). If the name
was previously local, we have to give it a unique occurrence name if
we intend to externalise it.
-}
tidyTopName :: Module -> IORef NameCache -> Maybe Id -> TidyOccEnv
-> Id -> IO (TidyOccEnv, Name)
tidyTopName mod nc_var maybe_ref occ_env id
| global && internal = return (occ_env, localiseName name)
| global && external = return (occ_env, name)
-- Global names are assumed to have been allocated by the renamer,
-- so they already have the "right" unique
-- And it's a system-wide unique too
-- Now we get to the real reason that all this is in the IO Monad:
-- we have to update the name cache in a nice atomic fashion
| local && internal = do { new_local_name <- atomicModifyIORef' nc_var mk_new_local
; return (occ_env', new_local_name) }
-- Even local, internal names must get a unique occurrence, because
-- if we do -split-objs we externalise the name later, in the code generator
--
-- Similarly, we must make sure it has a system-wide Unique, because
-- the byte-code generator builds a system-wide Name->BCO symbol table
| local && external = do { new_external_name <- atomicModifyIORef' nc_var mk_new_external
; return (occ_env', new_external_name) }
| otherwise = panic "tidyTopName"
where
name = idName id
external = isJust maybe_ref
global = isExternalName name
local = not global
internal = not external
loc = nameSrcSpan name
old_occ = nameOccName name
new_occ | Just ref <- maybe_ref
, ref /= id
= mkOccName (occNameSpace old_occ) $
let
ref_str = occNameString (getOccName ref)
occ_str = occNameString old_occ
in
case occ_str of
'$':'w':_ -> occ_str
-- workers: the worker for a function already
-- includes the occname for its parent, so there's
-- no need to prepend the referrer.
_other | isSystemName name -> ref_str
| otherwise -> ref_str ++ '_' : occ_str
-- If this name was system-generated, then don't bother
-- to retain its OccName, just use the referrer. These
-- system-generated names will become "f1", "f2", etc. for
-- a referrer "f".
| otherwise = old_occ
(occ_env', occ') = tidyOccName occ_env new_occ
mk_new_local nc = (nc { nsUniqs = us }, mkInternalName uniq occ' loc)
where
(uniq, us) = takeUniqFromSupply (nsUniqs nc)
mk_new_external nc = allocateGlobalBinder nc mod occ' loc
-- If we want to externalise a currently-local name, check
-- whether we have already assigned a unique for it.
-- If so, use it; if not, extend the table.
-- All this is done by allcoateGlobalBinder.
-- This is needed when *re*-compiling a module in GHCi; we must
-- use the same name for externally-visible things as we did before.
{-
************************************************************************
* *
\subsection{Step 2: top-level tidying}
* *
************************************************************************
-}
-- TopTidyEnv: when tidying we need to know
-- * nc_var: The NameCache, containing a unique supply and any pre-ordained Names.
-- These may have arisen because the
-- renamer read in an interface file mentioning M.$wf, say,
-- and assigned it unique r77. If, on this compilation, we've
-- invented an Id whose name is $wf (but with a different unique)
-- we want to rename it to have unique r77, so that we can do easy
-- comparisons with stuff from the interface file
--
-- * occ_env: The TidyOccEnv, which tells us which local occurrences
-- are 'used'
--
-- * subst_env: A Var->Var mapping that substitutes the new Var for the old
tidyTopBinds :: HscEnv
-> Module
-> UnfoldEnv
-> TidyOccEnv
-> CoreProgram
-> IO (TidyEnv, CoreProgram)
tidyTopBinds hsc_env this_mod unfold_env init_occ_env binds
= do mkIntegerId <- lookupMkIntegerName dflags hsc_env
integerSDataCon <- lookupIntegerSDataConName dflags hsc_env
let cvt_integer = cvtLitInteger dflags mkIntegerId integerSDataCon
return $ tidy cvt_integer init_env binds
where
dflags = hsc_dflags hsc_env
init_env = (init_occ_env, emptyVarEnv)
this_pkg = thisPackage dflags
tidy _ env [] = (env, [])
tidy cvt_integer env (b:bs)
= let (env1, b') = tidyTopBind dflags this_pkg this_mod
cvt_integer unfold_env env b
(env2, bs') = tidy cvt_integer env1 bs
in (env2, b':bs')
------------------------
tidyTopBind :: DynFlags
-> PackageKey
-> Module
-> (Integer -> CoreExpr)
-> UnfoldEnv
-> TidyEnv
-> CoreBind
-> (TidyEnv, CoreBind)
tidyTopBind dflags this_pkg this_mod cvt_integer unfold_env
(occ_env,subst1) (NonRec bndr rhs)
= (tidy_env2, NonRec bndr' rhs')
where
Just (name',show_unfold) = lookupVarEnv unfold_env bndr
caf_info = hasCafRefs dflags this_pkg this_mod (subst1, cvt_integer) (idArity bndr) rhs
(bndr', rhs') = tidyTopPair dflags show_unfold tidy_env2 caf_info name' (bndr, rhs)
subst2 = extendVarEnv subst1 bndr bndr'
tidy_env2 = (occ_env, subst2)
tidyTopBind dflags this_pkg this_mod cvt_integer unfold_env
(occ_env, subst1) (Rec prs)
= (tidy_env2, Rec prs')
where
prs' = [ tidyTopPair dflags show_unfold tidy_env2 caf_info name' (id,rhs)
| (id,rhs) <- prs,
let (name',show_unfold) =
expectJust "tidyTopBind" $ lookupVarEnv unfold_env id
]
subst2 = extendVarEnvList subst1 (bndrs `zip` map fst prs')
tidy_env2 = (occ_env, subst2)
bndrs = map fst prs
-- the CafInfo for a recursive group says whether *any* rhs in
-- the group may refer indirectly to a CAF (because then, they all do).
caf_info
| or [ mayHaveCafRefs (hasCafRefs dflags this_pkg this_mod
(subst1, cvt_integer)
(idArity bndr) rhs)
| (bndr,rhs) <- prs ] = MayHaveCafRefs
| otherwise = NoCafRefs
-----------------------------------------------------------
tidyTopPair :: DynFlags
-> Bool -- show unfolding
-> TidyEnv -- The TidyEnv is used to tidy the IdInfo
-- It is knot-tied: don't look at it!
-> CafInfo
-> Name -- New name
-> (Id, CoreExpr) -- Binder and RHS before tidying
-> (Id, CoreExpr)
-- This function is the heart of Step 2
-- The rec_tidy_env is the one to use for the IdInfo
-- It's necessary because when we are dealing with a recursive
-- group, a variable late in the group might be mentioned
-- in the IdInfo of one early in the group
tidyTopPair dflags show_unfold rhs_tidy_env caf_info name' (bndr, rhs)
= (bndr1, rhs1)
where
bndr1 = mkGlobalId details name' ty' idinfo'
details = idDetails bndr -- Preserve the IdDetails
ty' = tidyTopType (idType bndr)
rhs1 = tidyExpr rhs_tidy_env rhs
idinfo' = tidyTopIdInfo dflags rhs_tidy_env name' rhs rhs1 (idInfo bndr)
show_unfold caf_info
-- tidyTopIdInfo creates the final IdInfo for top-level
-- binders. There are two delicate pieces:
--
-- * Arity. After CoreTidy, this arity must not change any more.
-- Indeed, CorePrep must eta expand where necessary to make
-- the manifest arity equal to the claimed arity.
--
-- * CAF info. This must also remain valid through to code generation.
-- We add the info here so that it propagates to all
-- occurrences of the binders in RHSs, and hence to occurrences in
-- unfoldings, which are inside Ids imported by GHCi. Ditto RULES.
-- CoreToStg makes use of this when constructing SRTs.
tidyTopIdInfo :: DynFlags -> TidyEnv -> Name -> CoreExpr -> CoreExpr
-> IdInfo -> Bool -> CafInfo -> IdInfo
tidyTopIdInfo dflags rhs_tidy_env name orig_rhs tidy_rhs idinfo show_unfold caf_info
| not is_external -- For internal Ids (not externally visible)
= vanillaIdInfo -- we only need enough info for code generation
-- Arity and strictness info are enough;
-- c.f. CoreTidy.tidyLetBndr
`setCafInfo` caf_info
`setArityInfo` arity
`setStrictnessInfo` final_sig
| otherwise -- Externally-visible Ids get the whole lot
= vanillaIdInfo
`setCafInfo` caf_info
`setArityInfo` arity
`setStrictnessInfo` final_sig
`setOccInfo` robust_occ_info
`setInlinePragInfo` (inlinePragInfo idinfo)
`setUnfoldingInfo` unfold_info
-- NB: we throw away the Rules
-- They have already been extracted by findExternalRules
where
is_external = isExternalName name
--------- OccInfo ------------
robust_occ_info = zapFragileOcc (occInfo idinfo)
-- It's important to keep loop-breaker information
-- when we are doing -fexpose-all-unfoldings
--------- Strictness ------------
mb_bot_str = exprBotStrictness_maybe orig_rhs
sig = strictnessInfo idinfo
final_sig | not $ isNopSig sig
= WARN( _bottom_hidden sig , ppr name ) sig
-- try a cheap-and-cheerful bottom analyser
| Just (_, nsig) <- mb_bot_str = nsig
| otherwise = sig
_bottom_hidden id_sig = case mb_bot_str of
Nothing -> False
Just (arity, _) -> not (appIsBottom id_sig arity)
--------- Unfolding ------------
unf_info = unfoldingInfo idinfo
unfold_info | show_unfold = tidyUnfolding rhs_tidy_env unf_info unf_from_rhs
| otherwise = noUnfolding
unf_from_rhs = mkTopUnfolding dflags is_bot tidy_rhs
is_bot = isBottomingSig final_sig
-- NB: do *not* expose the worker if show_unfold is off,
-- because that means this thing is a loop breaker or
-- marked NOINLINE or something like that
-- This is important: if you expose the worker for a loop-breaker
-- then you can make the simplifier go into an infinite loop, because
-- in effect the unfolding is exposed. See Trac #1709
--
-- You might think that if show_unfold is False, then the thing should
-- not be w/w'd in the first place. But a legitimate reason is this:
-- the function returns bottom
-- In this case, show_unfold will be false (we don't expose unfoldings
-- for bottoming functions), but we might still have a worker/wrapper
-- split (see Note [Worker-wrapper for bottoming functions] in WorkWrap.hs
--------- Arity ------------
-- Usually the Id will have an accurate arity on it, because
-- the simplifier has just run, but not always.
-- One case I found was when the last thing the simplifier
-- did was to let-bind a non-atomic argument and then float
-- it to the top level. So it seems more robust just to
-- fix it here.
arity = exprArity orig_rhs
{-
************************************************************************
* *
\subsection{Figuring out CafInfo for an expression}
* *
************************************************************************
hasCafRefs decides whether a top-level closure can point into the dynamic heap.
We mark such things as `MayHaveCafRefs' because this information is
used to decide whether a particular closure needs to be referenced
in an SRT or not.
There are two reasons for setting MayHaveCafRefs:
a) The RHS is a CAF: a top-level updatable thunk.
b) The RHS refers to something that MayHaveCafRefs
Possible improvement: In an effort to keep the number of CAFs (and
hence the size of the SRTs) down, we could also look at the expression and
decide whether it requires a small bounded amount of heap, so we can ignore
it as a CAF. In these cases however, we would need to use an additional
CAF list to keep track of non-collectable CAFs.
Note [Disgusting computation of CafRefs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We compute hasCafRefs here, because IdInfo is supposed to be finalised
after TidyPgm. But CorePrep does some transformations that affect CAF-hood.
So we have to *predict* the result here, which is revolting.
In particular CorePrep expands Integer literals. So in the prediction code
here we resort to applying the same expansion (cvt_integer). Ugh!
-}
type CafRefEnv = (VarEnv Id, Integer -> CoreExpr)
-- The env finds the Caf-ness of the Id
-- The Integer -> CoreExpr is the desugaring function for Integer literals
-- See Note [Disgusting computation of CafRefs]
hasCafRefs :: DynFlags -> PackageKey -> Module
-> CafRefEnv -> Arity -> CoreExpr
-> CafInfo
hasCafRefs dflags this_pkg this_mod p@(_,cvt_integer) arity expr
| is_caf || mentions_cafs = MayHaveCafRefs
| otherwise = NoCafRefs
where
mentions_cafs = isFastTrue (cafRefsE p expr)
is_dynamic_name = isDllName dflags this_pkg this_mod
is_caf = not (arity > 0 || rhsIsStatic (targetPlatform dflags) is_dynamic_name cvt_integer expr)
-- NB. we pass in the arity of the expression, which is expected
-- to be calculated by exprArity. This is because exprArity
-- knows how much eta expansion is going to be done by
-- CorePrep later on, and we don't want to duplicate that
-- knowledge in rhsIsStatic below.
cafRefsE :: CafRefEnv -> Expr a -> FastBool
cafRefsE p (Var id) = cafRefsV p id
cafRefsE p (Lit lit) = cafRefsL p lit
cafRefsE p (App f a) = fastOr (cafRefsE p f) (cafRefsE p) a
cafRefsE p (Lam _ e) = cafRefsE p e
cafRefsE p (Let b e) = fastOr (cafRefsEs p (rhssOfBind b)) (cafRefsE p) e
cafRefsE p (Case e _bndr _ alts) = fastOr (cafRefsE p e) (cafRefsEs p) (rhssOfAlts alts)
cafRefsE p (Tick _n e) = cafRefsE p e
cafRefsE p (Cast e _co) = cafRefsE p e
cafRefsE _ (Type _) = fastBool False
cafRefsE _ (Coercion _) = fastBool False
cafRefsEs :: CafRefEnv -> [Expr a] -> FastBool
cafRefsEs _ [] = fastBool False
cafRefsEs p (e:es) = fastOr (cafRefsE p e) (cafRefsEs p) es
cafRefsL :: CafRefEnv -> Literal -> FastBool
-- Don't forget that mk_integer id might have Caf refs!
-- We first need to convert the Integer into its final form, to
-- see whether mkInteger is used.
cafRefsL p@(_, cvt_integer) (LitInteger i _) = cafRefsE p (cvt_integer i)
cafRefsL _ _ = fastBool False
cafRefsV :: CafRefEnv -> Id -> FastBool
cafRefsV (subst, _) id
| not (isLocalId id) = fastBool (mayHaveCafRefs (idCafInfo id))
| Just id' <- lookupVarEnv subst id = fastBool (mayHaveCafRefs (idCafInfo id'))
| otherwise = fastBool False
fastOr :: FastBool -> (a -> FastBool) -> a -> FastBool
-- hack for lazy-or over FastBool.
fastOr a f x = fastBool (isFastTrue a || isFastTrue (f x))
{-
------------------------------------------------------------------------------
-- Old, dead, type-trimming code
-------------------------------------------------------------------------------
We used to try to "trim off" the constructors of data types that are
not exported, to reduce the size of interface files, at least without
-O. But that is not always possible: see the old Note [When we can't
trim types] below for exceptions.
Then (Trac #7445) I realised that the TH problem arises for any data type
that we have deriving( Data ), because we can invoke
Language.Haskell.TH.Quote.dataToExpQ
to get a TH Exp representation of a value built from that data type.
You don't even need {-# LANGUAGE TemplateHaskell #-}.
At this point I give up. The pain of trimming constructors just
doesn't seem worth the gain. So I've dumped all the code, and am just
leaving it here at the end of the module in case something like this
is ever resurrected.
Note [When we can't trim types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The basic idea of type trimming is to export algebraic data types
abstractly (without their data constructors) when compiling without
-O, unless of course they are explicitly exported by the user.
We always export synonyms, because they can be mentioned in the type
of an exported Id. We could do a full dependency analysis starting
from the explicit exports, but that's quite painful, and not done for
now.
But there are some times we can't do that, indicated by the 'no_trim_types' flag.
First, Template Haskell. Consider (Trac #2386) this
module M(T, makeOne) where
data T = Yay String
makeOne = [| Yay "Yep" |]
Notice that T is exported abstractly, but makeOne effectively exports it too!
A module that splices in $(makeOne) will then look for a declartion of Yay,
so it'd better be there. Hence, brutally but simply, we switch off type
constructor trimming if TH is enabled in this module.
Second, data kinds. Consider (Trac #5912)
{-# LANGUAGE DataKinds #-}
module M() where
data UnaryTypeC a = UnaryDataC a
type Bug = 'UnaryDataC
We always export synonyms, so Bug is exposed, and that means that
UnaryTypeC must be too, even though it's not explicitly exported. In
effect, DataKinds means that we'd need to do a full dependency analysis
to see what data constructors are mentioned. But we don't do that yet.
In these two cases we just switch off type trimming altogether.
mustExposeTyCon :: Bool -- Type-trimming flag
-> NameSet -- Exports
-> TyCon -- The tycon
-> Bool -- Can its rep be hidden?
-- We are compiling without -O, and thus trying to write as little as
-- possible into the interface file. But we must expose the details of
-- any data types whose constructors or fields are exported
mustExposeTyCon no_trim_types exports tc
| no_trim_types -- See Note [When we can't trim types]
= True
| not (isAlgTyCon tc) -- Always expose synonyms (otherwise we'd have to
-- figure out whether it was mentioned in the type
-- of any other exported thing)
= True
| isEnumerationTyCon tc -- For an enumeration, exposing the constructors
= True -- won't lead to the need for further exposure
| isFamilyTyCon tc -- Open type family
= True
-- Below here we just have data/newtype decls or family instances
| null data_cons -- Ditto if there are no data constructors
= True -- (NB: empty data types do not count as enumerations
-- see Note [Enumeration types] in TyCon
| any exported_con data_cons -- Expose rep if any datacon or field is exported
= True
| isNewTyCon tc && isFFITy (snd (newTyConRhs tc))
= True -- Expose the rep for newtypes if the rep is an FFI type.
-- For a very annoying reason. 'Foreign import' is meant to
-- be able to look through newtypes transparently, but it
-- can only do that if it can "see" the newtype representation
| otherwise
= False
where
data_cons = tyConDataCons tc
exported_con con = any (`elemNameSet` exports)
(dataConName con : dataConFieldLabels con)
-}
| urbanslug/ghc | compiler/main/TidyPgm.hs | bsd-3-clause | 63,067 | 4 | 22 | 19,184 | 7,499 | 4,047 | 3,452 | 560 | 5 |
{-# LANGUAGE OverloadedStrings #-}
import Data.Aeson
import Data.Either (fromRight)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import Mars.Command
import Mars.Command.Cat
import Mars.Command.Cd
import Mars.Command.Ls
import Mars.Command.Pwd
import Mars.Parser
import Mars.Query
import Mars.Renderable
import Mars.Types
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck as QC
import Tests.Mars.Arbitraries ()
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests =
testGroup
"Martian Tests"
[ unitTests,
stateTests,
queryProperties,
evaluationTests
]
queryProperties :: TestTree
queryProperties =
testGroup
"Query Tests"
[ testProperty "command parse unparse" prop_command_parse,
testProperty "query parse unparse" prop_query_parse,
testProperty "move up shortens" prop_move_up_shorten
]
parseCase :: String -> [Operation] -> TestTree
parseCase s q = testCase s $ parser (toS s) @?= Right q
newtype WrappedText = WrappedText Text
deriving (Show)
instance Arbitrary WrappedText where
arbitrary =
WrappedText . toS
<$> listOf
(elements (['A' .. 'Z'] <> ['a' .. 'z']))
`suchThat` (not . null)
evaluationTests :: TestTree
evaluationTests =
testGroup
"Evaluation"
[ testProperty
"colored text contains text"
prop_contains_colored_text
]
prop_contains_colored_text :: ANSIColour -> WrappedText -> Bool
prop_contains_colored_text color (WrappedText text) =
(length . Text.breakOnAll text . ansiColor color $ text) /= 0
stateTests :: TestTree
stateTests =
testGroup
"State Updates"
[ testGroup
"cd"
[ test_cd_existing,
test_cd_nonexisting
],
testGroup "ls" [test_ls_top_level, test_ls_second_level]
]
initMarsState :: Text -> MarsState
initMarsState t =
MarsState
{ path = DefaultLocation,
document = fromJust . decode . toS $ t
}
test_cd_existing :: TestTree
test_cd_existing =
testCase
"cd to existing object"
$ newPath @?= q
where
(ChangePath newPath) = evalCommand oldState (Cd q)
q = fromRight (error "parseQuery test_cd_existing") . parseQuery $ "a"
oldState = initMarsState "{\"a\": {}}"
test_cd_nonexisting :: TestTree
test_cd_nonexisting =
testCase
"cd to existing object"
$ newPath @?= path oldState
where
(ChangePath newPath) = evalCommand oldState (Cd q)
q = fromRight (error "parseQuery test_cd_nonexisting") . parseQuery $ "b"
oldState = initMarsState "{\"a\": {}}"
test_ls_top_level :: TestTree
test_ls_top_level =
testCase
"ls should print entries for top level"
$ result @?= DirectoryEntries [DirectoryEntry (ItemName "a") MarsBool]
where
result = evalCommand state (Ls q)
q = fromRight (error "aef322") . parseQuery $ ""
state = initMarsState "{\"a\": true}"
test_ls_second_level :: TestTree
test_ls_second_level =
testCase
"ls should print entries for second level"
$ result
@?= DirectoryEntries
[ DirectoryEntry (ItemName "ann") MarsBool,
DirectoryEntry (ItemName "barry") MarsNumber
]
where
result = evalCommand state (Ls q)
q = fromRight (error "aef322") . parseQuery $ "a"
state = initMarsState "{\"a\": {\"ann\": true, \"barry\": 1}}"
unitTests :: TestTree
unitTests =
testGroup
"Unit Tests"
[ testGroup
"Parsing Commands"
[ parseCase "ls" [OpLs . Ls $ DefaultLocation],
parseCase "cat" [OpCat . Cat $ []],
parseCase "pwd" [OpPwd Pwd],
parseCase "cd" [OpCd . Cd $ DefaultLocation],
parseCase
"ls *"
[ OpLs . Ls $
( Query . NonEmpty.fromList $
[ Glob . NonEmpty.fromList $
[AnyCharMultiple]
]
)
],
parseCase
"ls b*"
[ OpLs . Ls
$ Query
. NonEmpty.fromList
$ [ Glob . NonEmpty.fromList $
[LiteralString "b", AnyCharMultiple]
]
]
],
testGroup
"Parsing Queries"
[ testCase "empty query" $
parseQuery "" @?= Right DefaultLocation
],
testGroup
"Glob Patterns"
[ testCase "exact match" $
globKeys
( HashMap.fromList
[ ("a", Number 1),
("b", Number 2)
]
)
(NonEmpty.fromList [LiteralString "a"])
@?= ["a"]
],
testGroup
"General"
[ testCase "Can query long array" $
queryDoc
(Query . NonEmpty.fromList $ [Glob (NonEmpty.fromList [LiteralString "3"])])
(Array . Vector.fromList $ ["1", "2", "3", "4"])
@?= ["4"],
testCase "Can query array" $
queryDoc
(Query . NonEmpty.fromList $ [Glob (NonEmpty.fromList [LiteralString "0"])])
(Array . Vector.singleton $ "1")
@?= ["1"],
testCase "Can query nested arrays" testNestedArray,
testCase "Can query nested objects" testNestedObject,
testCase "Modify document" $
modifyDoc
(Array (Vector.fromList [Number 1, Number 2, Number 3]))
(Query . NonEmpty.fromList $ [Glob (NonEmpty.fromList [LiteralString "2"])])
(Number 4)
@?= Array (Vector.fromList [Number 1, Number 2, Number 4]),
testCase "Can list items using wildcard" $
queryDoc
(Query . NonEmpty.fromList $ [Glob . NonEmpty.fromList $ [LiteralString "b", AnyCharMultiple]])
( Object . HashMap.fromList $
[ ("beer", Number 1),
("bear", Number 2),
("cart", Number 3)
]
)
@?= [Number 2, Number 1] -- TODO Ordering of keys in HashMap is not stable, test is brittle
]
]
testNestedArray :: Assertion
testNestedArray = queryDoc q v @?= ["a"]
where
v = Array (Vector.fromList [Array (Vector.fromList ["a"])])
q =
Query . NonEmpty.fromList $
[ Glob (NonEmpty.fromList [LiteralString "0"]),
Glob (NonEmpty.fromList [LiteralString "0"])
]
testNestedObject :: Assertion
testNestedObject = queryDoc q v @?= [toJSON ("Test" :: Text)]
where
v =
Object . HashMap.fromList $
[ ("a", Object . HashMap.fromList $ [("b", "Test")])
]
q =
Query . NonEmpty.fromList $
[ Glob (LiteralString "a" NonEmpty.:| []),
Glob (LiteralString "b" NonEmpty.:| [])
]
prop_command_parse :: Operation -> Bool
prop_command_parse (OpCat c) = case parser . render $ c of
Right ((OpCat x) : _) -> x == c
_ -> False
prop_command_parse (OpCd c) = case parser . render $ c of
Right ((OpCd x) : _) -> x == c
_ -> False
prop_command_parse (OpLoad c) = case parser . render $ c of
Right ((OpLoad x) : _) -> x == c
_ -> False
prop_command_parse (OpLs c) = case parser . render $ c of
Right ((OpLs x) : _) -> x == c
_ -> False
prop_command_parse (OpPwd c) = case parser . render $ c of
Right ((OpPwd x) : _) -> x == c
_ -> False
prop_command_parse (OpSave c) = case parser . render $ c of
Right ((OpSave x) : _) -> x == c
_ -> False
prop_command_parse (OpSet c) = case parser . render $ c of
Right ((OpSet x) : _) -> x == c
_ -> False
prop_query_parse :: Query -> Bool
prop_query_parse q = case parseQuery . render $ q of
Left _ -> False
Right qry -> qry == q
prop_move_up_shorten :: Query -> Bool
prop_move_up_shorten q = case moveUp q of
Just shorter -> len shorter < len q
Nothing -> True
where
len DefaultLocation = 1
len (Query l) = length l
| lorcanmcdonald/mars | src/Tests/Mars/Main.hs | bsd-3-clause | 8,051 | 0 | 19 | 2,420 | 2,358 | 1,237 | 1,121 | 221 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module HipBot.Naggy.Session where
import Blaze.ByteString.Builder (toLazyByteString)
import Control.Lens
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans
import Control.Monad.Trans.Maybe
import Data.Bifunctor
import qualified Data.ByteString.Lazy as LB
import Data.List (find)
import Data.Maybe
import qualified Data.Serialize as Serial
import qualified Data.Text.Encoding as T
import Network.Wai.Lens
import Web.ClientSession
import Web.Cookie
import Webcrank.Wai
import HipBot
import HipBot.Naggy.Types
writeSession :: Registration -> NaggyCrank ()
writeSession reg = do
k <- lift . view $ csKey
let sess = (reg ^. oauthId . to T.encodeUtf8, reg ^. roomId . to fromJust)
sess' <- liftIO . encryptIO k . Serial.encode $ sess
putResponseHeader "Set-Cookie" . LB.toStrict . toLazyByteString . renderSetCookie $ def
{ setCookieName = "sid"
, setCookieValue = sess'
, setCookieMaxAge = Just 86400 -- 24 hours
, setCookieHttpOnly = True
}
checkAuthorization :: NaggyCrank Authorized
checkAuthorization = do
sess <- readSession
lift . assign session $ sess
return $ maybe (Unauthorized "Naggy") (const Authorized) sess
readSession :: NaggyCrank (Maybe Session)
readSession = runMaybeT $ do
hdr <- MaybeT . preview $ request . headers . value "Cookie"
let cs = parseCookies hdr
c <- maybe mzero (return . snd) (find (("sid" ==) . fst) cs)
k <- lift . lift . view $ csKey
hoistMaybe .
fmap (first T.decodeUtf8) .
(eitherToMaybe . Serial.decode =<<) $
decrypt k c
withSession :: (Session -> HaltT NaggyCrank a) -> HaltT NaggyCrank a
withSession f =
maybe (halt forbidden403) f =<< (lift . lift . use) session
withSession_ :: HaltT NaggyCrank () -> HaltT NaggyCrank ()
withSession_ f = withSession (const f)
hoistMaybe :: Monad m => Maybe a -> MaybeT m a
hoistMaybe = MaybeT . return
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe = either (const Nothing) Just
| purefn/naggy | src/HipBot/Naggy/Session.hs | bsd-3-clause | 1,992 | 0 | 14 | 354 | 669 | 351 | 318 | -1 | -1 |
module Problem76 where
import Data.Array
lim :: Int
lim = 100
main :: IO ()
main = print $ partitionsWith [1 .. lim] lim - 1
partitions :: Array (Int, Int) Integer
partitions = array
((0, 0), (lim, lim))
[ ((i, j), partitionsWith [1 .. i] j) | j <- [1 .. lim], i <- [1 .. lim] ]
partitionsWith :: [Int] -> Int -> Integer
partitionsWith [] n'
| n' < 0 = 0
| otherwise = partitions ! (n', n')
partitionsWith xs n' = eq + (sum . map (\x -> partitions ! (x, n' - x)) $ xs')
where
eq = fromIntegral . length . filter (== n') $ xs
xs' = filter (< n') xs
{-
1 -> (1) -> 0
2 -> 1+1, (2)-> 1
3 -> 1+1+1, 1+2, (3) -> 2
4 -> 1+1+1+1, 1+1+2, 2+2, 1+3, (4) -> 4
5 -> 1+1+1+1+1, 1+1+1+2, 1+2+2, 1+1+3, 2+3, 1+4, (5) -> 6
6 -> 1+1+1+1+1+1, 1+1+1+1+2, 1+1+2+2, 2+2+2, 1+1+1+3, 1+2+3, 3+3, 1+1+4, 2+4, 1+5, (6) -> 10
7 -> 1+1+1+1+1+1+1, 1+1+1+1+1+2, 1+1+1+2+2, 1+2+2+2, 1+1+1+1+3, 1+1+2+3, 2+2+3, 1+3+3, 1+1+1+4, 1+2+4, 3+4, 1+1+5, 2+5, 1+6, (7) -> 14
-}
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem76.hs | bsd-3-clause | 973 | 0 | 14 | 227 | 299 | 167 | 132 | 17 | 1 |
{-# LANGUAGE ViewPatterns, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-
Raise an error if you are bracketing an atom, or are enclosed by a
list bracket.
<TEST>
-- expression bracket reduction
yes = (f x) x -- @Suggestion f x x
no = f (x x)
yes = (foo) -- foo
yes = (foo bar) -- @Suggestion foo bar
yes = foo (bar) -- @Warning bar
yes = foo ((x x)) -- @Suggestion (x x)
yes = (f x) ||| y -- @Suggestion f x ||| y
yes = if (f x) then y else z -- @Suggestion if f x then y else z
yes = if x then (f y) else z -- @Suggestion if x then f y else z
yes = (a foo) :: Int -- @Suggestion a foo :: Int
yes = [(foo bar)] -- @Suggestion [foo bar]
yes = foo ((x y), z) -- @Suggestion (x y, z)
yes = C { f = (e h) } -- @Suggestion C {f = e h}
yes = \ x -> (x && x) -- @Suggestion \x -> x && x
no = \(x -> y) -> z
yes = (`foo` (bar baz)) -- @Suggestion (`foo` bar baz)
yes = f ((x)) -- @Warning x
main = do f; (print x) -- @Suggestion do f print x
yes = f (x) y -- @Warning x
no = f (+x) y
no = f ($ x) y
no = ($ x)
yes = (($ x)) -- @Warning ($ x)
no = ($ 1)
yes = (($ 1)) -- @Warning ($ 1)
no = (+5)
yes = ((+5)) -- @Warning (+5)
issue909 = case 0 of { _ | n <- (0 :: Int) -> n }
issue909 = foo (\((x :: z) -> y) -> 9 + x * 7)
issue909 = foo (\((x : z) -> y) -> 9 + x * 7) -- \(x : z -> y) -> 9 + x * 7
issue909 = let ((x:: y) -> z) = q in q
issue909 = do {((x :: y) -> z) <- e; return 1}
issue970 = (f x +) (g x) -- f x + (g x)
issue969 = (Just \x -> x || x) *> Just True
issue1179 = do(this is a test) -- do this is a test
issue1212 = $(Git.hash)
-- type bracket reduction
foo :: (Int -> Int) -> Int
foo :: (Maybe Int) -> a -- @Suggestion Maybe Int -> a
instance Named (DeclHead S)
data Foo = Foo {foo :: (Maybe Foo)} -- @Suggestion foo :: Maybe Foo
-- pattern bracket reduction
foo (x:xs) = 1
foo (True) = 1 -- @Warning True
foo ((True)) = 1 -- @Warning True
f x = case x of (Nothing) -> 1; _ -> 2 -- Nothing
-- dollar reduction tests
no = groupFsts . sortFst $ mr
yes = split "to" $ names -- split "to" names
yes = white $ keysymbol -- white keysymbol
yes = operator foo $ operator -- operator foo operator
no = operator foo $ operator bar
yes = return $ Record{a=b}
no = f $ [1,2..5] -- f [1,2..5]
-- $/bracket rotation tests
yes = (b $ c d) ++ e -- b (c d) ++ e
yes = (a b $ c d) ++ e -- a b (c d) ++ e
no = (f . g $ a) ++ e
no = quickCheck ((\h -> cySucc h == succ h) :: Hygiene -> Bool)
foo = (case x of y -> z; q -> w) :: Int
-- backup fixity resolution
main = do a += b . c; return $ a . b
-- <$> bracket tests
yes = (foo . bar x) <$> baz q -- foo . bar x <$> baz q
no = foo . bar x <$> baz q
-- annotations
main = 1; {-# ANN module ("HLint: ignore Use camelCase" :: String) #-}
main = 1; {-# ANN module (1 + (2)) #-} -- 2
-- special case from esqueleto, see #224
main = operate <$> (select $ from $ \user -> return $ user ^. UserEmail)
-- unknown fixity, see #426
bad x = x . (x +? x . x)
-- special case people don't like to warn on
special = foo $ f{x=1}
special = foo $ Rec{x=1}
special = foo (f{x=1})
loadCradleOnlyonce = skipManyTill anyMessage (message @PublishDiagnosticsNotification)
-- These used to require a bracket
$(pure [])
$(x)
-- People aren't a fan of the record constructors being secretly atomic
function (Ctor (Rec { field })) = Ctor (Rec {field = 1})
-- type splices are a bit special
no = f @($x)
-- template haskell is harder
issue1292 = [e| handleForeignCatch $ \ $(varP pylonExPtrVarName) -> $(quoteExp C.block modifiedStr) |]
</TEST>
-}
module Hint.Bracket(bracketHint) where
import Hint.Type(DeclHint,Idea(..),rawIdea,warn,suggest,Severity(..),toRefactSrcSpan,toSSA)
import Data.Data
import Data.List.Extra
import Data.Generics.Uniplate.DataOnly
import Refact.Types
import GHC.Hs
import GHC.Utils.Outputable
import GHC.Types.SrcLoc
import GHC.Util
import Language.Haskell.GhclibParserEx.GHC.Hs.Expr
import Language.Haskell.GhclibParserEx.GHC.Utils.Outputable
import Language.Haskell.GhclibParserEx.GHC.Hs.Pat
bracketHint :: DeclHint
bracketHint _ _ x =
concatMap (\x -> bracket prettyExpr isPartialAtom True x ++ dollar x) (childrenBi (descendBi splices $ descendBi annotations x) :: [LHsExpr GhcPs]) ++
concatMap (bracket unsafePrettyPrint (\_ _ -> False) False) (childrenBi x :: [LHsType GhcPs]) ++
concatMap (bracket unsafePrettyPrint (\_ _ -> False) False) (childrenBi x :: [LPat GhcPs]) ++
concatMap fieldDecl (childrenBi x)
where
-- Brackets the roots of annotations are fine, so we strip them.
annotations :: AnnDecl GhcPs -> AnnDecl GhcPs
annotations= descendBi $ \x -> case (x :: LHsExpr GhcPs) of
L _ (HsPar _ x) -> x
x -> x
-- Brackets at the root of splices used to be required, but now they aren't
splices :: HsDecl GhcPs -> HsDecl GhcPs
splices (SpliceD a x) = SpliceD a $ flip descendBi x $ \x -> case (x :: LHsExpr GhcPs) of
L _ (HsPar _ x) -> x
x -> x
splices x = x
-- If we find ourselves in the context of a section and we want to
-- issue a warning that a child therein has unneccessary brackets,
-- we'd rather report 'Found : (`Foo` (Bar Baz))' rather than 'Found :
-- `Foo` (Bar Baz)'. If left to 'unsafePrettyPrint' we'd get the
-- latter (in contrast to the HSE pretty printer). This patches things
-- up.
prettyExpr :: LHsExpr GhcPs -> String
prettyExpr s@(L _ SectionL{}) = unsafePrettyPrint (noLocA (HsPar EpAnnNotUsed s) :: LHsExpr GhcPs)
prettyExpr s@(L _ SectionR{}) = unsafePrettyPrint (noLocA (HsPar EpAnnNotUsed s) :: LHsExpr GhcPs)
prettyExpr x = unsafePrettyPrint x
-- 'Just _' if at least one set of parens were removed. 'Nothing' if
-- zero parens were removed.
remParens' :: Brackets (LocatedA a) => LocatedA a -> Maybe (LocatedA a)
remParens' = fmap go . remParen
where
go e = maybe e go (remParen e)
isPartialAtom :: Maybe (LHsExpr GhcPs) -> LHsExpr GhcPs -> Bool
-- Might be '$x', which was really '$ x', but TH enabled misparsed it.
isPartialAtom _ (L _ (HsSpliceE _ (HsTypedSplice _ DollarSplice _ _) )) = True
isPartialAtom _ (L _ (HsSpliceE _ (HsUntypedSplice _ DollarSplice _ _) )) = True
-- Might be '$(x)' where the brackets are required in GHC 8.10 and below
isPartialAtom (Just (L _ HsSpliceE{})) _ = True
isPartialAtom _ x = isRecConstr x || isRecUpdate x
bracket :: forall a . (Data a, Outputable a, Brackets (LocatedA a)) => (LocatedA a -> String) -> (Maybe (LocatedA a) -> LocatedA a -> Bool) -> Bool -> LocatedA a -> [Idea]
bracket pretty isPartialAtom root = f Nothing
where
msg = "Redundant bracket"
-- 'f' is a (generic) function over types in 'Brackets
-- (expressions, patterns and types). Arguments are, 'f (Maybe
-- (index, parent, gen)) child'.
f :: (Data a, Outputable a, Brackets (LocatedA a)) => Maybe (Int, LocatedA a , LocatedA a -> LocatedA a) -> LocatedA a -> [Idea]
-- No context. Removing parentheses from 'x' succeeds?
f Nothing o@(remParens' -> Just x)
-- If at the root, or 'x' is an atom, 'x' parens are redundant.
| root || isAtom x
, not $ isPartialAtom Nothing x =
(if isAtom x then bracketError else bracketWarning) msg o x : g x
-- In some context, removing parentheses from 'x' succeeds and 'x'
-- is atomic?
f (Just (_, p, _)) o@(remParens' -> Just x)
| isAtom x
, not $ isPartialAtom (Just p) x =
bracketError msg o x : g x
-- In some context, removing parentheses from 'x' succeeds. Does
-- 'x' actually need bracketing in this context?
f (Just (i, o, gen)) v@(remParens' -> Just x)
| not $ needBracket i o x
, not $ isPartialAtom (Just o) x
, not $ any isSplicePat $ universeBi o -- over-appoximate ,see #1292
= rawIdea Suggestion msg (getLocA v) (pretty o) (Just (pretty (gen x))) [] [r] : g x
where
typ = findType v
r = Replace typ (toSSA v) [("x", toSSA x)] "x"
-- Regardless of the context, there are no parentheses to remove
-- from 'x'.
f _ x = g x
g :: (Data a, Outputable a, Brackets (LocatedA a)) => LocatedA a -> [Idea]
-- Enumerate over all the immediate children of 'o' looking for
-- redundant parentheses in each.
g o = concat [f (Just (i, o, gen)) x | (i, (x, gen)) <- zipFrom 0 $ holes o]
bracketWarning msg o x =
suggest msg (reLoc o) (reLoc x) [Replace (findType x) (toSSA o) [("x", toSSA x)] "x"]
bracketError :: (Outputable a, Outputable b, Brackets (LocatedA b)) => String -> LocatedA a -> LocatedA b -> Idea
bracketError msg o x =
warn msg (reLoc o) (reLoc x) [Replace (findType x) (toSSA o) [("x", toSSA x)] "x"]
fieldDecl :: LConDeclField GhcPs -> [Idea]
fieldDecl o@(L loc f@ConDeclField{cd_fld_type=v@(L l (HsParTy _ c))}) =
let r = L loc (f{cd_fld_type=c}) :: LConDeclField GhcPs in
[rawIdea Suggestion "Redundant bracket" (locA l)
(showSDocUnsafe $ ppr_fld o) -- Note this custom printer!
(Just (showSDocUnsafe $ ppr_fld r))
[]
[Replace Type (toSSA v) [("x", toSSA c)] "x"]]
where
-- If we call 'unsafePrettyPrint' on a field decl, we won't like
-- the output (e.g. "[foo, bar] :: T"). Here we use a custom
-- printer to work around (snarfed from
-- https://hackage.haskell.org/package/ghc-lib-parser-8.8.1/docs/src/HsTypes.html#pprConDeclFields).
ppr_fld (L _ ConDeclField { cd_fld_names = ns, cd_fld_type = ty, cd_fld_doc = doc })
= ppr_names ns <+> dcolon <+> ppr ty <+> ppr_mbDoc doc
ppr_fld (L _ (XConDeclField x)) = ppr x
ppr_names [n] = ppr n
ppr_names ns = sep (punctuate comma (map ppr ns))
fieldDecl _ = []
-- This function relies heavily on fixities having been applied to the
-- raw parse tree.
dollar :: LHsExpr GhcPs -> [Idea]
dollar = concatMap f . universe
where
f x = [ (suggest "Redundant $" (reLoc x) (reLoc y) [r]){ideaSpan = locA (getLoc d)} | L _ (OpApp _ a d b) <- [x], isDol d
, let y = noLocA (HsApp EpAnnNotUsed a b) :: LHsExpr GhcPs
, not $ needBracket 0 y a
, not $ needBracket 1 y b
, not $ isPartialAtom (Just x) b
, let r = Replace Expr (toSSA x) [("a", toSSA a), ("b", toSSA b)] "a b"]
++
[ suggest "Move brackets to avoid $" (reLoc x) (reLoc (t y)) [r]
|(t, e@(L _ (HsPar _ (L _ (OpApp _ a1 op1 a2))))) <- splitInfix x
, isDol op1
, isVar a1 || isApp a1 || isPar a1, not $ isAtom a2
, varToStr a1 /= "select" -- special case for esqueleto, see #224
, let y = noLocA $ HsApp EpAnnNotUsed a1 (noLocA (HsPar EpAnnNotUsed a2))
, let r = Replace Expr (toSSA e) [("a", toSSA a1), ("b", toSSA a2)] "a (b)" ]
++ -- Special case of (v1 . v2) <$> v3
[ (suggest "Redundant bracket" (reLoc x) (reLoc y) [r]){ideaSpan = locA locPar}
| L _ (OpApp _ (L locPar (HsPar _ o1@(L locNoPar (OpApp _ _ (isDot -> True) _)))) o2 v3) <- [x], varToStr o2 == "<$>"
, let y = noLocA (OpApp EpAnnNotUsed o1 o2 v3) :: LHsExpr GhcPs
, let r = Replace Expr (toRefactSrcSpan (locA locPar)) [("a", toRefactSrcSpan (locA locNoPar))] "a"]
++
[ suggest "Redundant section" (reLoc x) (reLoc y) [r]
| L _ (HsApp _ (L _ (HsPar _ (L _ (SectionL _ a b)))) c) <- [x]
-- , error $ show (unsafePrettyPrint a, gshow b, unsafePrettyPrint c)
, let y = noLocA $ OpApp EpAnnNotUsed a b c :: LHsExpr GhcPs
, let r = Replace Expr (toSSA x) [("x", toSSA a), ("op", toSSA b), ("y", toSSA c)] "x op y"]
splitInfix :: LHsExpr GhcPs -> [(LHsExpr GhcPs -> LHsExpr GhcPs, LHsExpr GhcPs)]
splitInfix (L l (OpApp _ lhs op rhs)) =
[(L l . OpApp EpAnnNotUsed lhs op, rhs), (\lhs -> L l (OpApp EpAnnNotUsed lhs op rhs), lhs)]
splitInfix _ = []
| ndmitchell/hlint | src/Hint/Bracket.hs | bsd-3-clause | 11,733 | 0 | 25 | 2,785 | 2,987 | 1,546 | 1,441 | 114 | 5 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, ViewPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module KrakenSpec (main, spec) where
import Control.Applicative
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Aeson (Object, FromJSON, ToJSON)
import Data.Graph.Wrapper
import Data.List
import GHC.Generics
import System.Environment
import System.Exit
import System.IO
import System.IO.Silently
import System.IO.Temp
import Test.HUnit
import Test.Hspec
import Test.QuickCheck
import qualified System.Logging.Facade as Log
import Kraken hiding (catch, runAsMain)
import qualified Kraken
import Kraken.ActionMSpec (mockStateful)
main :: IO ()
main = hspec spec
runAsMain :: Store -> IO ()
runAsMain = Kraken.runAsMain "test program"
runWithExitCode :: Store -> IO ExitCode
runWithExitCode store = catch
(Kraken.runAsMain "test program" store >> return ExitSuccess)
(\ (e :: ExitCode) -> return e)
newtype ExampleConfig = ExampleConfig { foo :: Int } deriving Generic
instance ToJSON ExampleConfig
instance FromJSON ExampleConfig
spec :: Spec
spec = do
describe "createStore" $ do
it "does not store dependencies doubled" $ do
let store = createStore $
Target "t1" [] (return ()) Nothing :
Target "t2" ["t1", "t1"] (return ()) Nothing :
[]
edges (graphWithPriorities store) `shouldBe` [("t2", "t1")]
describe "runAsMain" $ do
describe "global --config file command line option" $ do
it "reads kraken.conf.example successfully" $ do
withArgs (words "check --config kraken.conf.example") $
runAsMain (createStore [])
it "allows to specify global options after the command" $ do
withArgs (words "check --config kraken.conf.example") $ do
runAsMain (createStore [])
it "allows to specify a default config file (through the API)" $
withSystemTempFile "kraken-tests" $ \ file handle -> do
hClose handle
readFile "kraken.conf.example" >>= writeFile file
withArgs ["check"] $
runAsMainWithCustomConfig "test program" file $
\ (_ :: (FilePath, Object)) -> return (createStore [])
it "reads the custom configuration section of the example file successfully" $ do
withArgs (words "check --config kraken.conf.example") $
runAsMainWithCustomConfig "test program" "kraken.conf.example" $ \ (_, config) -> do
foo config `shouldBe` 42
return $ createStore []
describe "check command" $ do
it "allows to perform static checks on the store" $ do
let run = withArgs ["check"] $
runWithExitCode $ createStore $
Target "t1" ["t2"] (return ()) Nothing :
[]
run `shouldThrow` (\ (e :: ErrorCall) -> show e == "target dependencies cannot be found: t2")
describe "run command" $ do
it "fails when given a non-existing target" $ do
(output, exitCode) <- hCapture [stderr] $ withArgs (words "run foo") $
runWithExitCode (createStore [])
exitCode `shouldBe` ExitFailure 70
output `shouldContain` "target not found: foo"
it "allow to run monitors as targets" $ do
result <- capture_ $ withArgs (words "run monitor") $ runAsMain $ createStore $
Target "target" []
(error "target error")
(Just (Monitor "monitor" [] (const (liftIO $ putStrLn "monitor output")))) :
[]
result `shouldContain` "monitor output"
context "when run target cancels" $ do
let store = createStore [Target "foo" [] (cancel "some error") Nothing]
run = withArgs ["run", "foo"] (runWithExitCode store)
it "writes error message to stderr twice (once during execution and once in a summary at the end)" $ do
(output, exitCode) <- hCapture [stderr] run
exitCode `shouldSatisfy` (/= ExitSuccess)
output `shouldBe` unlines [
"INFO: execution plan:"
, " foo"
, "INFO: running target foo"
, "ERROR: foo:"
, " some error"
, "INFO: "
, "FAILURE"
, "-------"
, "foo:"
, " some error"
, ""
]
it "exits with ExitFailure 70 (internal software error)" $ do
hSilence [stderr] run `shouldReturn` ExitFailure 70
context "when having dependencies" $ do
let store mvar = createStore $
Target "t1" [] (append mvar "t1") Nothing :
Target "t2" ["t1"] (append mvar "t2") Nothing :
[]
run mvar = withArgs (words "run t2") (runAsMain $ store mvar)
it "runs the dependencies first" $ do
mvar :: MVar [String] <- newMVar []
(hSilence [stderr] (run mvar) >> readMVar mvar) `shouldReturn` ["t1", "t2"]
it "doesn't run dependencies when given -x" $ do
mvar :: MVar [String] <- newMVar []
withArgs (words "run t2 -x") $ runAsMain $ store mvar
readMVar mvar `shouldReturn` ["t2"]
it "doesn't run dependencies from priority dependencies" $ do
mvar :: MVar [String] <- newMVar []
let store mvar = createStoreWithPriorities ["t3", "t2"] $
Target "t1" [] (append mvar "t1") Nothing :
Target "t2" ["t1"] (append mvar "t2") Nothing :
Target "t3" ["t1"] (append mvar "t3") Nothing :
[]
withArgs (words "run t2") $ runAsMain $ store mvar
readMVar mvar `shouldReturn` ["t1", "t2"]
context "when given multiple targets with dependencies" $ do
let store = createStore $
Target "A" ["C"] (Log.info "executing A") Nothing :
Target "B" ["C"] (Log.info "executing B") Nothing :
Target "C" [] (Log.info "executing C") Nothing :
[]
run :: IO String
run = hCapture_ [stderr] $ withArgs (words "run A B") $ runAsMain store
it "does execute every given target" $ do
result <- run
result `shouldContain` "executing A"
result `shouldContain` "executing B"
it "does not run one target multiple times" $ do
result <- lines <$> run
result `shouldSatisfy` (\ lines ->
length (filter (== "INFO: executing C") lines) == 1)
context "when one of multiple targets fails" $ do
let targetList errorAction =
Target "A" [] errorAction Nothing :
Target "B" [] (Log.info "executing B") Nothing :
Target "C" ["A", "B"] (Log.info "executing C") Nothing :
[]
store = createStore . targetList
it "runs all targets that don't depend on failing targets" $ do
(output, exitCode) <- hCapture [stderr] $ withArgs (words "run C") $
runWithExitCode $ store (cancel "error from A")
exitCode `shouldBe` ExitFailure 70
-- ensure "A" is run first
output `shouldContain` unlines (
"execution plan:" :
" A" :
" B" :
" C" :
[])
output `shouldContain` "executing B"
it "runs all targets that don't depend on failing targets, with priorities in place" $ do
let store = createStoreWithPriorities ["A", "B", "C"] . targetList
(output, exitCode) <- hCapture [stderr] $ withArgs (words "run C") $
runWithExitCode $ store (cancel "error from A")
exitCode `shouldBe` ExitFailure 70
-- ensure "A" is run first
output `shouldContain` unlines (
"execution plan:" :
" A" :
" B" :
" C" :
[])
output `shouldContain` "executing B"
it "does not run targets that depend on failed targets" $ do
output <- hCapture_ [stderr] $ withArgs (words "run C") $
runWithExitCode $ store (cancel "error from A")
output `shouldSatisfy` (not . ("executing C" `isInfixOf`))
it "runs all targets that don't depend on failing targets even in case of exceptions" $ do
output <- hCapture_ [stderr] $ withArgs (words "run C") $
runWithExitCode $ store (error "error from A")
output `shouldContain` "executing B"
it "fails immediately after the first failing target when --fail-fast is given" $ do
(output, exitCode) <- hCapture [stderr] $ withArgs (words "run C --fail-fast") $
runWithExitCode $ store (cancel "error from A")
output `shouldContain` "error from A"
output `shouldSatisfy` (not . ("executing B" `isInfixOf`))
exitCode `shouldBe` ExitFailure 70
context "when having monitors" $ do
let store :: MVar [String] -> TargetM () -> Store
store mvar monitor = createStore $
Target "t1" [] (append mvar "t1") (Just (Monitor "m1" [] (const monitor))) :
[]
it "doesn't execute target when monitor runs successfully" $ do
mvar <- newMVar []
let run = withArgs ["run", "t1"] (runAsMain (store mvar (append mvar "m1")))
hSilence [stderr] run
readMVar mvar `shouldReturn` ["m1"]
context "when the first run of the monitor complains" $ do
it "re-runs the monitor after running the target" $ do
mvar <- newMVar []
let run = withArgs (words "run t1")
(runWithExitCode (store mvar (append mvar "m1" >> triggerTarget "m1 complains")))
exitCode <- hSilence [stderr] run
exitCode `shouldSatisfy` (/= ExitSuccess)
readMVar mvar `shouldReturn` ["m1", "t1", "m1"]
it "raises an error when the second run of the monitor complains" $ do
mvar <- newMVar []
let run = withArgs ["run", "t1"] (runWithExitCode (store mvar (triggerTarget "m1 complains")))
hSilence [stderr] run `shouldReturn` ExitFailure 70
it "runs successfully if the monitor does not complain the second time" $ do
mvar <- newMVar []
monitor <- stateDummy $
(append mvar "m1.1" >> triggerTarget "m1 complains") :
append mvar "m1.2" :
[]
let run = withArgs (words "run t1") $ runAsMain $ store mvar monitor
hSilence [stderr] run -- should not throw any exceptions
readMVar mvar `shouldReturn` ["m1.1", "t1", "m1.2"]
it "does not output the error message of the first monitor run like a normal error message" $ do
mvar <- newMVar []
monitor <- stateDummy $
(triggerTarget "foo") :
(return ()) :
[]
output <- hCapture_ [stderr] $ withArgs (words "run t1") $ runAsMain $ store mvar monitor
output `shouldSatisfy` (not . (showError (Error Nothing "foo") `isInfixOf`))
output `shouldSatisfy` (not . (showError (Error (Just "m1") "foo") `isInfixOf`))
it "does not run the target if the first run of the monitor throws an Exception (instead of abort)" $ do
mvar <- newMVar []
_ <- hSilence [stdout, stderr] $
withArgs (words "run t1") $ runWithExitCode (store mvar (error "m1 raises an Exception"))
readMVar mvar `shouldReturn` []
it "doesn't execute monitors when called with --omit-monitors" $ do
mvar <- newMVar []
let run = withArgs ["run", "--omit-monitors", "t1"] (runAsMain (store mvar (append mvar "m1")))
hSilence [stderr] run
readMVar mvar `shouldReturn` ["t1"]
context "--retry-on-failure" $ do
it "reruns failing targets" $ do
mvar :: MVar [String] <- newMVar []
failingOnce <- mockStateful $
const (append mvar "failing" >> cancel "cancel") :
const (append mvar "success") :
[]
let store = createStore $
Target "t1" [] (failingOnce ()) Nothing :
[]
run = withArgs (words "run --retry-on-failure t1") (runWithExitCode store)
run `shouldReturn` ExitFailure 70
readMVar mvar `shouldReturn` ["failing", "success"]
it "does not retry indefinitely" $ do
let store = createStore $
Target "t1" [] (cancel "error") Nothing :
[]
run = withArgs (words "run --retry-on-failure t1") (runWithExitCode store)
run `shouldReturn` ExitFailure 70
context "-e --exclude with multiple targets" $ do
let store = createStore $
Target "A" [] (Log.info "executing A") Nothing :
Target "B" ["A"] (Log.info "executing B") Nothing :
Target "C" ["B"] (Log.info "executing C") Nothing :
Target "D" ["B"] (Log.info "executing D") Nothing :
Target "E" ["D"] (Log.info "executing E") Nothing :
[]
run :: IO String
run = hCapture_ [stderr] $ withArgs (words "run C E -e D -e A") $ runAsMain store
it "does execute all dependencies targets not in excluded list" $ do
result <- run
result `shouldContain` "executing B"
result `shouldContain` "executing C"
result `shouldContain` "executing E"
it "does not execute excluded targets" $ do
result <- lines <$> run
result `shouldSatisfy` (\r -> not $ any (`elem` r) ["executing A", "executing D"])
context "using addPriorities" $ do
let store mvar priorities = createStoreWithPriorities priorities $
Target "a" [] (append mvar "a") Nothing :
Target "b" ["a"] (append mvar "b") Nothing :
Target "c" ["a"] (append mvar "c") Nothing :
Target "d" ["b", "c"] (append mvar "d") Nothing :
[]
run :: [TargetName] -> IO [String]
run priorities = do
mvar <- newMVar []
withArgs (words "run d") $ runAsMain $
(store mvar priorities)
readMVar mvar
it "honors supplied priorities" $ do
run ["b", "c"] `shouldReturn` words "a b c d"
run ["c", "b"] `shouldReturn` words "a c b d"
run ["a", "b", "c"] `shouldReturn` words "a b c d"
it "reports cycles in case of impossible priorities" $ do
run ["b", "a"] `shouldThrow` (\ (ErrorCall message) -> "cycles" `isInfixOf` message)
describe "list command" $ do
it "lists available targets" $ do
withArgs ["list"] $ do
let store = createStore [
Target "foo" [] (return ()) Nothing
, Target "bar" [] (return ()) Nothing
, Target "baz" [] (return ()) Nothing
]
capture_ (runAsMain store) `shouldReturn` (unlines . sort) ["foo", "bar", "baz"]
describe "dot command" $ do
it "includes all nodes in dot output" $ do
property $ \ (nub -> nodes) -> do
withArgs ["dot"] $ do
let store = createStore
(map (\ name -> Target name [] (return ()) Nothing) nodes)
output <- capture_ (runAsMain store)
output `shouldSatisfy` \ output ->
all (\ (TargetName node) -> node `isInfixOf` output) nodes
it "produces output target graph in dot format" $ do
withArgs ["dot"] $ do
let store = createStore [
Target "foo" ["bar"] (return ()) Nothing
, Target "bar" [] (return ()) Nothing
]
(unwords . words <$> capture_ (runAsMain store)) `shouldReturn` unwords [
"digraph targets {"
, "rankdir = RL;"
, "\"bar\""
, "[shape = oval];"
, "\"foo\""
, "[shape = oval];"
, "\"foo\" -> \"bar\" [color = \"black\"];"
, "}"
]
context "when used with multiple prefix options" $ do
it "removes the longest matching prefix" $ do
withArgs ["dot", "-p", "prefix", "-p", "prefixlong"] $ do
let store = createStore $
Target "prefix_t1" [] (return ()) Nothing :
Target "prefixlong_t2" [] (return ()) Nothing :
[]
output <- capture_ (runAsMain store)
when ("long" `isInfixOf` output) $
assertFailure ("long is in output:\n" ++ output)
it "doesn't strip but abbreviates prefixes" $ do
let store = createStore $
Target "foobar.1" [] (return ()) Nothing :
Target "fuubar.2" [] (return ()) Nothing :
[]
output <- withArgs (words "dot -p foobar. -p fuubar.") $ capture_ (runAsMain store)
output `shouldContain` "\"fo.1\""
output `shouldContain` "\"fu.2\""
stateDummy :: MonadIO m => [m a] -> IO (m a)
stateDummy l = do
ref <- newMVar l
return $ do
next <- liftIO $ modifyMVar ref $ \ (a : r) -> return (r, a)
next
append :: MonadIO m => MVar [a] -> a -> m ()
append mvar n = liftIO $ modifyMVar_ mvar $ \ r -> return (r ++ [n])
instance Arbitrary TargetName where
arbitrary = TargetName <$> listOf1 (elements ['a' .. 'z'])
shrink = map TargetName . shrink . show
| zalora/kraken | test/KrakenSpec.hs | bsd-3-clause | 17,751 | 2 | 71 | 5,939 | 5,080 | 2,496 | 2,584 | 353 | 1 |
module HaskellEditor.Gui.Util
where
import qualified HaskQuery
import Control.Concurrent.STM
import HaskellEditor.Types
import Data.Typeable
import Data.Proxy
insertNamedWidget :: Typeable a => TVar(Widgets) -> String -> a -> IO ()
insertNamedWidget widgetTVar name widget =
atomically $ do
modifyTVar widgetTVar $ \currentWidget ->
currentWidget {_widgets = HaskQuery.insert (_widgets currentWidget) $ namedDynamic name widget}
insertWidgets :: TVar(Widgets) -> Widgets -> IO ()
insertWidgets widgetTVar widgets = do
atomically $ do
modifyTVar widgetTVar (\currentWidget -> currentWidget {_widgets = HaskQuery.insertInto (_widgets currentWidget) (HaskQuery.select (_widgets widgets))})
getWidgets :: TVar(Widgets) -> HaskQuery.Cont (b -> IO b) Widgets
getWidgets widgetTVar = HaskQuery.executeM $ readTVarIO widgetTVar
selectWidget :: Typeable a => Widgets -> String -> Proxy a -> (HaskQuery.Cont (b -> IO b) a)
selectWidget widgets identifier typeProxy = do
widget <- HaskQuery.selectM $ _widgets widgets
HaskQuery.filterM $ (_identifier widget) == identifier
selectedWidget <- HaskQuery.selectDynamicWithTypeM typeProxy (_content widget)
return selectedWidget
selectWidgetRef :: Typeable a => Widgets -> WidgetRef a -> (HaskQuery.Cont (b -> IO b) a)
selectWidgetRef widgets widgetRef = selectWidget widgets (_identifier widgetRef) (_content widgetRef)
insertWidget :: Typeable a => WidgetRef a -> a -> TVar(Widgets) -> IO ()
insertWidget widgetRef widget widgetTVar = atomically $ do
let namedWidget = namedDynamic (_identifier widgetRef) widget
modifyTVar widgetTVar (\currentWidget -> currentWidget {_widgets = HaskQuery.insert (_widgets currentWidget) namedWidget})
return ()
| stevechy/haskellEditor | src/HaskellEditor/Gui/Util.hs | bsd-3-clause | 1,800 | 0 | 19 | 326 | 578 | 285 | 293 | 30 | 1 |
{-|
Module : Network.HTTP.Client.Request.Modifiers
Description : Convenient monadic HTTP request modifiers
Copyright : (c) 2014 Sean Leather
License : BSD3
Maintainer : sean.leather@gmail.com
Stability : experimental
Each of the functions in this module is a monadic request modifier, using the
'ReqMod' type. Most of them do not have any side effects; however, the
consistent use of 'Monad' allows for easy chaining with bind ('>>=') or Kleisli
composition ('>=>').
== Example
The example that inspired this package is modifying the 'Request' from
'parseUrl':
@
'parseUrl' \"http:\/\/httpbin.org\/post\"
>>= 'setMethod' 'POST'
>>= 'setBodyLBS' "hello"
@
Suppose I want to reuse the URL post request but not the body. I can define a
function for just that part:
@
let httpbinPost :: 'MonadThrow' m => m 'Request'
httpbinPost = 'parseUrl' \"http:\/\/httpbin.org\/post\" >>= 'setMethod' 'POST'
@
Alternative formulations of the above, without using request modifiers, are:
@
'parseUrl' \"http:\/\/httpbin.org\/post\"
>>= \req -> return $ req
{ 'method' = 'renderStdMethod' 'POST'
, 'requestBody' = 'RequestBodyLBS' "hello"
}
@
and
@
let httpbinPost :: 'MonadThrow' m => m 'Request'
httpbinPost = do req <- 'parseUrl' \"http:\/\/httpbin.org\/post\"
return $ req { 'method' = 'renderStdMethod' 'POST' }
@
== Benefits
The main benefits of monadic request modifiers are:
* composability,
* conciseness, and
* allowing an arbitrary combination of 'Monad's.
== Naming Scheme
The naming scheme used for functions in this module is:
* @set@ - Set a value, overriding any existing value.
* @add@ - Append a value to the end of a list and do not override any existing
values.
* @BS@ - Use a strict 'BS.ByteString' as a parameter.
* @LBS@ - Use a lazy 'BS.ByteString' as a parameter.
-}
module Network.HTTP.Client.Request.Modifiers (
-- * Request Modifier Type
ReqMod
-- * URI/URL
, setUri
, setUriRelative
-- * Query String
, setQueryBS
, setQuery
, addQuery
, addQueryPair
-- * Method
, setMethodBS
, setMethod
-- * Headers
, setHeaders
, setHeader
, addHeaders
, addHeader
, setContentTypeHeader
, setAcceptHeader
-- * Body
, setBody
, setBodyBS
, setBodyLBS
, setUrlEncodedBody
-- * Convenient Combinations
, setSimpleRequestBS
, setSimpleRequestLBS
) where
--------------------------------------------------------------------------------
import Control.Monad
import Control.Monad.Catch (MonadThrow)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Network.HTTP.Client
import qualified Network.HTTP.Client.Internal as I
import Network.HTTP.Media.MediaType
import Network.HTTP.Types
import Network.HTTP.Types.QueryLike
import Network.URI (URI)
--------------------------------------------------------------------------------
-- | Request modifier, abbreviated
--
-- Since 0.1
type ReqMod m = Request -> m Request
--------------------------------------------------------------------------------
-- | Validate and set the request URI.
--
-- Since 0.1
setUri :: MonadThrow m => URI -> ReqMod m
setUri = flip I.setUri
-- | Extend the request URI with a relative URI.
--
-- Since 0.1
setUriRelative :: MonadThrow m => URI -> ReqMod m
setUriRelative = flip I.setUriRelative
--------------------------------------------------------------------------------
-- | Set the query string with a strict 'BS.ByteString'.
--
-- Since 0.1
setQueryBS :: Monad m => BS.ByteString -> ReqMod m
setQueryBS q req = return $ req { queryString = q }
-- | Set the query string with a rendered 'Query'.
--
-- Since 0.1
setQuery :: (Monad m, QueryLike q) => q -> ReqMod m
setQuery = setQueryBS . renderQuery True . toQuery
-- | Add a rendered 'Query' to the end of the query string.
--
-- Since 0.1
addQuery :: (Monad m, QueryLike q) => q -> ReqMod m
addQuery q req = setQuery (parseQuery (queryString req) ++ toQuery q) req
-- | Add a single query key/value pair to the end of the query string.
--
-- Since 0.1
addQueryPair :: (Monad m, QueryKeyLike k, QueryValueLike v) => k -> v -> ReqMod m
addQueryPair k v = addQuery [(toQueryKey k, toQueryValue v)]
--------------------------------------------------------------------------------
-- | Set the method with a strict 'BS.ByteString'.
--
-- See "Network.HTTP.Types.Method" for the methods, e.g. 'methodGet' or
-- 'methodPost'.
--
-- Since 0.1
setMethodBS :: Monad m => Method -> ReqMod m
setMethodBS m req = return $ req { method = m }
-- | Set the method with a standard method, e.g. 'GET' or 'POST'.
--
-- Since 0.1
setMethod :: Monad m => StdMethod -> ReqMod m
setMethod = setMethodBS . renderStdMethod
--------------------------------------------------------------------------------
-- | Set the request headers.
--
-- Since 0.1
setHeaders :: Monad m => RequestHeaders -> ReqMod m
setHeaders hs req = return $ req { requestHeaders = hs }
-- | Set the request header by name, removing any other headers with the same
-- name.
--
-- Since 0.1
setHeader :: Monad m => HeaderName -> BS.ByteString -> ReqMod m
setHeader n v req =
setHeaders (filter ((/= n) . fst) (requestHeaders req) ++ [(n, v)]) req
-- | Add headers to the request.
--
-- Since 0.1
addHeaders :: Monad m => RequestHeaders -> ReqMod m
addHeaders hs req = setHeaders (requestHeaders req ++ hs) req
-- | Add a single header.
--
-- Since 0.1
addHeader :: Monad m => HeaderName -> BS.ByteString -> ReqMod m
addHeader n v = addHeaders [(n, v)]
-- | Set the @Content-Type@ header with a 'MediaType'.
--
-- Since 0.1
setContentTypeHeader :: Monad m => MediaType -> ReqMod m
setContentTypeHeader = setHeader hContentType . toByteString
-- | Set the @Accept@ header with a 'MediaType'.
--
-- Since 0.1
setAcceptHeader :: Monad m => MediaType -> ReqMod m
setAcceptHeader = setHeader hAccept . toByteString
--------------------------------------------------------------------------------
-- | Set the request body.
--
-- Since 0.1
setBody :: Monad m => RequestBody -> Request -> m Request
setBody b req = return $ req { requestBody = b }
-- | Set the request body with a strict 'BS.ByteString'.
--
-- Since 0.1
setBodyBS :: Monad m => BS.ByteString -> Request -> m Request
setBodyBS = setBody . RequestBodyBS
-- | Set the request body with a lazy 'LBS.ByteString'.
--
-- Since 0.1
setBodyLBS :: Monad m => LBS.ByteString -> Request -> m Request
setBodyLBS = setBody . RequestBodyLBS
-- | Set the request body with URL-encoded key/value pairs, the method to
-- @POST@, and the @Content-Type@ to @application/x-www-form-urlencoded@.
--
-- Since 0.1
setUrlEncodedBody :: Monad m => [(BS.ByteString, BS.ByteString)] -> ReqMod m
setUrlEncodedBody b = return . urlEncodedBody b
--------------------------------------------------------------------------------
-- | Set the method, @Content-Type@, and strict 'BS.ByteString' body.
--
-- Since 0.1
setSimpleRequestBS :: Monad m => StdMethod -> MediaType -> BS.ByteString -> ReqMod m
setSimpleRequestBS m c b = setMethod m >=> setContentTypeHeader c >=> setBodyBS b
-- | Set the method, @Content-Type@, and lazy 'LBS.ByteString' body.
--
-- Since 0.1
setSimpleRequestLBS :: Monad m => StdMethod -> MediaType -> LBS.ByteString -> ReqMod m
setSimpleRequestLBS m c b = setMethod m >=> setContentTypeHeader c >=> setBodyLBS b
| spl/http-client-request-modifiers | lib/Network/HTTP/Client/Request/Modifiers.hs | bsd-3-clause | 7,387 | 0 | 11 | 1,302 | 1,140 | 640 | 500 | 74 | 1 |
-- |The "Geometry" module represents the geometry of the game world.
-- A real Mars rover would likely use real 3D geometry and trigonometry
-- but the game world 'Plateau' is more like a chess board. The implementation
-- here models the game world rather than 3D space.
module Geometry
where
-- |A 'RoverPos' is position of a rover: its location and heading
data RoverPos = RoverPos {
roverLocation :: Location,
roverHeading :: Heading
} deriving (Show,Eq,Ord)
-- |A 'Location' is an x,y position. Just a type synonym for a pair of 'Int's
type Location = (Int,Int)
-- |A 'MovementVector' is an offset to a 'Location'.
-- Used to represent the distance travelled in a single step.
type MovementVector = Location
-- |A 'Heading' represents the compass direction the rover is facing
data Heading = N -- ^North
| E -- ^East
| S -- ^South
| W -- ^West
deriving (Show,Eq,Ord,Bounded,Enum)
-- |A 'Rotation' represents a rotation direction.
-- (Note that 'Left' and 'Right' mean something completely different in Haskell.)
data Rotation = AntiClockwise -- ^rotation right
| Clockwise -- ^rotation left
deriving (Show,Eq,Ord,Bounded,Enum)
-- |Given a 'Heading' and a 'Rotation', returns the new 'Heading'
rotate :: Rotation -> Heading -> Heading
rotate AntiClockwise N = W
rotate AntiClockwise W = S
rotate AntiClockwise S = E
rotate AntiClockwise E = N
rotate Clockwise N = E
rotate Clockwise W = N
rotate Clockwise S = W
rotate Clockwise E = S
-- |Returns the x and y movement components for the given 'Heading'.
-- East is positive x and North is positive y.
vectorFromDirection :: Heading -> MovementVector
vectorFromDirection N = ( 0, 1)
vectorFromDirection W = (-1, 0)
vectorFromDirection S = ( 0,-1)
vectorFromDirection E = ( 1, 0)
-- |Add a 'MovementVector' to a 'Location', returning the new 'Location'.
moveLocation :: MovementVector -> Location -> Location
moveLocation (dx,dy) (x,y) = (x+dx,y+dy)
-- |Move a 'Location' in the given 'Heading'.
-- Implementation is just the composition of two functions:
-- 'vectorFromDirection' then 'moveLocation'.
moveInDirection :: Heading -> Location -> Location
moveInDirection = moveLocation . vectorFromDirection
-- |Move a 'RoverPos' forwards
-- moveForwards :: RoverPos -> RoverPos
-- moveForwards (RoverPos p h) = RoverPos p' h
-- where
-- p' = moveInDirection h p
mkRover :: Location -> Heading -> RoverPos
mkRover = RoverPos
| garethrowlands/marsrover | src/Geometry.hs | bsd-3-clause | 2,501 | 0 | 8 | 512 | 410 | 241 | 169 | 35 | 1 |
{-# LANGUAGE FlexibleContexts, GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Xmobar.Parsers
-- Copyright : (c) Andrea Rossato
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Jose A. Ortega Ruiz <jao@gnu.org>
-- Stability : unstable
-- Portability : unportable
--
-- Parsers needed for Xmobar, a text based status bar
--
-----------------------------------------------------------------------------
module Parsers
( parseString
, parseTemplate
, parseConfig
) where
import Config
import Runnable
import Commands
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Perm
-- | Runs the string parser
parseString :: Config -> String -> IO [(String, String)]
parseString c s =
case parse (stringParser (fgColor c)) "" s of
Left _ -> return [("Could not parse string: " ++ s, fgColor c)]
Right x -> return (concat x)
-- | Gets the string and combines the needed parsers
stringParser :: String -> Parser [[(String, String)]]
stringParser c = manyTill (textParser c <|> colorParser) eof
-- | Parses a maximal string without color markup.
textParser :: String -> Parser [(String, String)]
textParser c = do s <- many1 $
noneOf "<" <|>
( try $ notFollowedBy' (char '<')
(string "fc=" <|> string "/fc>" ) )
return [(s, c)]
-- | Wrapper for notFollowedBy that returns the result of the first parser.
-- Also works around the issue that, at least in Parsec 3.0.0, notFollowedBy
-- accepts only parsers with return type Char.
notFollowedBy' :: Parser a -> Parser b -> Parser a
notFollowedBy' p e = do x <- p
notFollowedBy $ try (e >> return '*')
return x
-- | Parsers a string wrapped in a color specification.
colorParser :: Parser [(String, String)]
colorParser = do
c <- between (string "<fc=") (string ">") colors
s <- manyTill (textParser c <|> colorParser) (try $ string "</fc>")
return (concat s)
-- | Parses a color specification (hex or named)
colors :: Parser String
colors = many1 (alphaNum <|> char ',' <|> char '#')
-- | Parses the output template string
templateStringParser :: Config -> Parser (String,String,String)
templateStringParser c = do
s <- allTillSep c
com <- templateCommandParser c
ss <- allTillSep c
return (com, s, ss)
-- | Parses the command part of the template string
templateCommandParser :: Config -> Parser String
templateCommandParser c =
let chr = char . head . sepChar
in between (chr c) (chr c) (allTillSep c)
-- | Combines the template parsers
templateParser :: Config -> Parser [(String,String,String)]
templateParser = many . templateStringParser
-- | Actually runs the template parsers
parseTemplate :: Config -> String -> IO [(Runnable,String,String)]
parseTemplate c s =
do str <- case parse (templateParser c) "" s of
Left _ -> return [("", s, "")]
Right x -> return x
let cl = map alias (commands c)
m = Map.fromList $ zip cl (commands c)
return $ combine c m str
-- | Given a finite "Map" and a parsed template produce the resulting
-- output string.
combine :: Config -> Map.Map String Runnable -> [(String, String, String)] -> [(Runnable,String,String)]
combine _ _ [] = []
combine c m ((ts,s,ss):xs) = (com, s, ss) : combine c m xs
where com = Map.findWithDefault dflt ts m
dflt = Run $ Com ts [] [] 10
allTillSep :: Config -> Parser String
allTillSep = many . noneOf . sepChar
stripComments :: String -> String
stripComments = unlines . map (drop 5 . strip False . (replicate 5 ' '++)) . lines
where strip m ('-':'-':xs) = if m then "--" ++ strip m xs else ""
strip m ('\\':xss) = case xss of
'\\':xs -> '\\' : strip m xs
_ -> strip m $ drop 1 xss
strip m ('"':xs) = '"': strip (not m) xs
strip m (x:xs) = x : strip m xs
strip _ [] = []
-- | Parse the config, logging a list of fields that were missing and replaced
-- by the default definition.
parseConfig :: String -> Either ParseError (Config,[String])
parseConfig = runParser parseConf fields "Config" . stripComments
where
parseConf = do
many space
sepEndSpc ["Config","{"]
x <- perms
eof
s <- getState
return (x,s)
perms = permute $ Config
<$?> pFont <|?> pBgColor
<|?> pFgColor <|?> pPosition
<|?> pDock
<|?> pBorder <|?> pBdColor
<|?> pHideOnStart <|?> pLowerOnStart
<|?> pPersistent <|?> pCommands
<|?> pSepChar <|?> pAlignSep
<|?> pTemplate
fields = [ "font", "bgColor", "fgColor", "sepChar", "alignSep"
, "border", "borderColor" ,"template", "position", "dock"
, "hideOnStart", "lowerOnStart", "persistent", "commands"
]
pFont = strField font "font"
pBgColor = strField bgColor "bgColor"
pFgColor = strField fgColor "fgColor"
pBdColor = strField borderColor "borderColor"
pSepChar = strField sepChar "sepChar"
pAlignSep = strField alignSep "alignSep"
pTemplate = strField template "template"
pPosition = field position "position" $ tillFieldEnd >>= read' "position"
pDock = field dock "dock" $ tillFieldEnd >>= read' "dock"
pHideOnStart = field hideOnStart "hideOnStart" $ tillFieldEnd >>= read' "hideOnStart"
pLowerOnStart = field lowerOnStart "lowerOnStart" $ tillFieldEnd >>= read' "lowerOnStart"
pPersistent = field persistent "persistent" $ tillFieldEnd >>= read' "persistent"
pBorder = field border "border" $ tillFieldEnd >>= read' "border"
pCommands = field commands "commands" $ readCommands
staticPos = do string "Static"
wrapSkip (string "{")
p <- many (noneOf "}")
wrapSkip (string "}")
string ","
return ("Static {" ++ p ++ "}")
tillFieldEnd = staticPos <|> many (noneOf ",}\n\r")
commandsEnd = wrapSkip (string "]") >> (string "}" <|> notNextRun)
notNextRun = do { string ","; notFollowedBy $ wrapSkip $ string "Run"; return ","}
readCommands = manyTill anyChar (try commandsEnd) >>= read' commandsErr . flip (++) "]"
strField e n = field e n . between (strDel "start" n) (strDel "end" n) . many $ noneOf "\"\n\r"
strDel t n = char '"' <?> strErr t n
strErr t n = "the " ++ t ++ " of the string field " ++ n ++ " - a double quote (\")."
wrapSkip x = many space >> x >>= \r -> many space >> return r
sepEndSpc = mapM_ (wrapSkip . try . string)
fieldEnd = many $ space <|> oneOf ",}"
field e n c = (,) (e defaultConfig) $
updateState (filter (/= n)) >> sepEndSpc [n,"="] >>
wrapSkip c >>= \r -> fieldEnd >> return r
read' d s = case reads s of
[(x, _)] -> return x
_ -> fail $ "error reading the " ++ d ++ " field: " ++ s
commandsErr :: String
commandsErr = "commands: this usually means that a command could not be parsed.\n" ++
"The error could be located at the begining of the command which follows the offending one."
| raboof/xmobar | src/Parsers.hs | bsd-3-clause | 7,668 | 0 | 21 | 2,264 | 2,150 | 1,101 | 1,049 | 131 | 7 |
{-# LANGUAGE FlexibleContexts #-}
module DimensionalVector
( Scalar
, Vector
, directionV
, mulSV
, divVS
, magV
, normaliseV
, normZV
, rotateV
, rotate90ccw
, dotV
, addV
, mapV
, (|+|)
, (*|)
, (|*)
, (|/)
, cross
, atan2V
) where
import qualified Prelude as P
import Numeric.Units.Dimensional.Prelude
import Numeric.Units.Dimensional
import Numeric.NumType(Pos2)
type Scalar d = Quantity d Double
type Vector d = (Scalar d, Scalar d)
infixl 7 `mulSV`
infixl 6 `addV`
mapV f (x, y) = (f x, f y)
directionV :: Scalar DOne -> Vector DOne
directionV dir = rotateV dir (1 *~ one, 0 *~ one)
mulSV :: Mul a b c => Scalar a -> Vector b -> Vector c
mulSV c = mapV (c *)
divVS :: Div a b c => Vector a -> Scalar b -> Vector c
l `divVS` d = mapV (/ d) l
magV :: (Mul d d sq, Root sq Pos2 d, Floating a) => (Quantity d a, Quantity d a) -> Quantity d a
magV (x, y) = sqrt $ x * x + y * y
normaliseV :: (Mul d d sq, Root sq Pos2 d, Div d d one) => Vector d -> Vector one
normaliseV l = l `divVS` magV l
normZV :: (Mul d d sq, Root sq Pos2 d, Div d d one) => Vector d -> Vector one
normZV v | magV v < Dimensional 1e-10 = (Dimensional 0,Dimensional 0)
| otherwise = normaliseV v
rotateV :: Mul d DOne d => Scalar DOne -> Vector d -> Vector d
rotateV a (x, y) = (x * cos a - y * sin a, y * cos a + x * sin a)
rotate90ccw :: Vector d -> Vector d
rotate90ccw (x, y) = (negate y, x)
dotV (x1, y1) (x2, y2) = x1 * x2 + y1 * y2
addV :: Vector a -> Vector a -> Vector a
addV (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
atan2V (x, y) = atan2 y x
infixl 6 |+|
infixl 7 *|, |/, |*
(|+|) = addV
(|*) :: Mul b a c => Vector b -> Scalar a -> Vector c
v |* c = mapV (* c) v
(*|) :: Mul a b c => Scalar a -> Vector b -> Vector c
(*|) = mulSV
(|/) :: Div a b c => Vector a -> Scalar b -> Vector c
(|/) = divVS
cross :: Mul a b c => Vector a -> Vector b -> Scalar c
(x1, y1) `cross` (x2, y2) = x1 * y2 - y1 * x2
| Rotsor/wheeled-vehicle | DimensionalVector.hs | bsd-3-clause | 1,971 | 0 | 9 | 533 | 1,041 | 555 | 486 | 62 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.