code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Raskell.Parser.Whitespace
( whitespace
, lexeme
) where
import Text.Parsec (many)
import Text.Parsec.String (Parser)
import Text.Parsec.Char
import Control.Monad (void)
import Control.Applicative ((<*))
whitespace :: Parser ()
whitespace = void $ many $ oneOf " \t"
lexeme :: Parser a -> Parser a
lexeme p = p <* whitespace
| joshsz/raskell | src/Raskell/Parser/Whitespace.hs | mit | 334 | 0 | 6 | 51 | 117 | 67 | 50 | 12 | 1 |
module Checker where
import Syntax
import Data.Monoid hiding (Sum)
import Control.Monad
import qualified Data.List as List
data Value = Literal Lit
| Closure Name Type Expr Scope
deriving (Eq, Show)
type Scope = [(Name, Type)]
check :: Expr -> Result (Elab Type)
check = check' []
where
check' :: Scope -> Expr -> Result (Elab Type)
check' ctx expr = case out expr of
Var n -> maybe (Left ("free variable " <> n)) (Right . EIn (Var n)) (List.lookup n ctx)
Lit l -> Right $ EIn (Lit l) (case l of
LInt _ -> TInt
LBool _ -> TBool)
Pair a b -> do
f@(EIn _ fstTy) <- check' ctx a
s@(EIn _ sndTy) <- check' ctx b
pure (EIn (Pair f s) (TPair fstTy sndTy))
Fst a -> do
p@(EIn tm ty) <- check' ctx a
case ty of
TPair ty1 _ -> pure (EIn (Fst p) ty1)
_ -> Left $ "fst expected pair but got " <> show ty
Snd a -> do
p@(EIn tm ty) <- check' ctx a
case ty of
TPair _ ty2 -> pure (EIn (Snd p) ty2)
_ -> Left $ "snd expected pair but got " <> show ty
InL l (TSum t1 t2) -> do
lE@(EIn tm ty) <- check' ctx l
unless (ty == t1) (Left ("expected " <> show t1 <> " but got " <> show ty))
let t' = TSum ty t2 in pure (EIn (InL lE t') t')
InL{} -> Left "left must be constructed with a sum type"
InR r (TSum t1 t2) -> do
rE@(EIn tm ty) <- check' ctx r
unless (ty == t2) (Left ("expected " <> show t2 <> " but got " <> show ty))
let t' = TSum t1 ty in pure (EIn (InR rE t') t')
InR{} -> Left "right must be constructed with a sum type"
Case e l r -> do
leftE@(EIn _ fnTyL) <- check' ctx l
rightE@(EIn _ fnTyR) <- check' ctx r
caseE@(EIn tm scrutinee) <- check' ctx e
case (fnTyL, fnTyR, scrutinee) of
(TArr inTyL outTyL, TArr inTyR outTyR, TSum t1 t2)
| outTyL == outTyR
, inTyL == t1
, inTyR == t2 -> Right (EIn (Case caseE leftE rightE) outTyL)
| otherwise -> Left $ "nope " <> show scrutinee <> ". left is " <> show inTyL <> " -> " <> show outTyL
<> ". right is " <> show inTyR <> " -> " <> show outTyR <> "."
(TArr inTyL outTyL, TArr inTyR outTyR, t) -> Left "don't know how to handle case with non-sum types"
_ -> Left "both case patterns must be lambdas"
App fn arg -> do
fnE@(EIn fnTm fnTy) <- check' ctx fn
argE@(EIn argTm argTy) <- check' ctx arg
case fnTy of
TArr inTy outTy | inTy == argTy -> Right (EIn (App fnE argE) outTy)
| otherwise -> Left $ "expected type " <> show inTy <> " but got " <> show argTy
_ -> Left "expected a function type"
Lam n ty body -> do
bodyE@(EIn _ retTy) <- check' ((n, ty) : ctx) body
pure (EIn (Lam n ty bodyE) (TArr ty retTy))
-- Then, think about, study:
-- type inference
-- unification, hindley-milner (algorithm m, algorithm w)
-- Γ, _n_ : t |- Var n : t
-- Γ |- \ n : t -> e
-- -----------------
-- Γ, n : t |- e
| tclem/lilo | src/Checker.hs | mit | 3,159 | 0 | 26 | 1,090 | 1,287 | 624 | 663 | 64 | 18 |
{-# LANGUAGE DeriveDataTypeable #-}
{-
Copyright (C) 2012-2017 Kacper Bak, Jimmy Liang <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
{- | Command Line Arguments of the compiler.
See also <http://t3-necsis.cs.uwaterloo.ca:8091/ClaferTools/CommandLineArguments a model of the arguments in Clafer>, including constraints and examples.
-}
module Language.Clafer.ClaferArgs where
import System.Console.CmdArgs
import System.Console.CmdArgs.Explicit hiding (mode)
import Data.List
import Language.Clafer.SplitJoin
import Paths_clafer (version)
import Data.Version (showVersion)
import GetURL
-- | Type of output to be generated at the end of compilation
data ClaferMode = Alloy | JSON | Clafer | Html | Graph | CVLGraph | Choco
deriving (Eq, Show, Ord, Data, Typeable)
instance Default ClaferMode where
def = Alloy
-- | Scope inference strategy
data ScopeStrategy = None | Simple
deriving (Eq, Show, Data, Typeable)
instance Default ScopeStrategy where
def = Simple
data ClaferArgs = ClaferArgs {
mode :: [ ClaferMode ],
console_output :: Bool,
flatten_inheritance :: Bool,
timeout_analysis :: Int,
no_layout :: Bool,
new_layout :: Bool,
check_duplicates :: Bool,
skip_resolver :: Bool,
keep_unused :: Bool,
no_stats :: Bool,
validate :: Bool,
tooldir :: FilePath,
alloy_mapping :: Bool,
self_contained :: Bool,
add_graph :: Bool,
show_references :: Bool,
add_comments :: Bool,
ecore2clafer :: Bool,
scope_strategy :: ScopeStrategy,
afm :: Bool,
meta_data :: Bool,
file :: FilePath
} deriving (Eq, Show, Data, Typeable)
clafer :: ClaferArgs
clafer = ClaferArgs {
mode = [] &= help "Generated output type. Available CLAFERMODEs are: 'alloy' (default, Alloy 4.2); 'json' (intermediate representation of Clafer model); 'clafer' (analyzed and desugared clafer model); 'html' (original model in HTML); 'graph' (graphical representation written in DOT language); 'cvlgraph' (cvl notation representation written in DOT language); 'choco' (Choco constraint programming solver). Multiple modes can be specified at the same time, e.g., '-m alloy -m html'." &= name "m",
console_output = def &= help "Output code on console." &= name "o",
flatten_inheritance = def &= help "Flatten inheritance ('alloy' mode only)." &= name "i",
timeout_analysis = def &= help "Timeout for analysis.",
no_layout = def &= help "Don't resolve off-side rule layout." &= name "l",
new_layout = def &= help "Use new fast layout resolver (experimental)." &= name "nl",
check_duplicates = def &= help "Check duplicated clafer names in the entire model." &= name "c",
skip_resolver = def &= help "Skip name resolution." &= name "f",
keep_unused = def &= help "Keep uninstantated abstract clafers ('alloy' mode only)." &= name "k",
no_stats = def &= help "Don't print statistics." &= name "s",
validate = def &= help "Validate outputs of all modes. Uses '<tooldir>/alloy4.2.jar' for Alloy models, '<tooldir>/chocosolver.jar' for Alloy models, and Clafer translator for desugared Clafer models. Use '--tooldir' to override the default location ('.') of these tools." &= name "v",
tooldir = "." &= typDir &= help "Specify the tools directory ('validate' only). Default: '.' (current directory).",
alloy_mapping = def &= help "Generate mapping to Alloy source code ('alloy' mode only)." &= name "a",
self_contained = def &= help "Generate a self-contained html document ('html' mode only).",
add_graph = def &= help "Add a graph to the generated html model ('html' mode only). Requires the \"dot\" executable to be on the system path.",
show_references = def &= help "Whether the links for references should be rendered. ('html' and 'graph' modes only)." &= name "sr",
add_comments = def &= help "Include comments from the source file in the html output ('html' mode only).",
ecore2clafer = def &= help "Translate an ECore model into Clafer.",
scope_strategy = def &= help "Use scope computation strategy: none or simple (default)." &= name "ss",
afm = def &= help "Throws an error if the cardinality of any of the clafers is above 1." &= name "check-afm",
meta_data = def &= help "Generate a 'fully qualified name'-'least-partially-qualified name'-'unique ID' map ('.cfr-map'). In Alloy and Choco modes, generate the scopes map ('.cfr-scope').",
file = def &= args &= typ "FILE"
} &= summary ("Clafer " ++ showVersion Paths_clafer.version) &= program "clafer"
mergeArgs :: ClaferArgs -> ClaferArgs -> ClaferArgs
mergeArgs a1 a2 = ClaferArgs (mode a1) coMergeArg
(mergeArg flatten_inheritance) (mergeArg timeout_analysis)
(mergeArg no_layout) (mergeArg new_layout)
(mergeArg check_duplicates) (mergeArg skip_resolver)
(mergeArg keep_unused) (mergeArg no_stats)
(mergeArg validate) toolMergeArg
(mergeArg alloy_mapping) (mergeArg self_contained)
(mergeArg add_graph) (mergeArg show_references)
(mergeArg add_comments) (mergeArg ecore2clafer)
(mergeArg scope_strategy) (mergeArg afm)
(mergeArg meta_data) (mergeArg file)
where
coMergeArg :: Bool
coMergeArg = if r1 then r1 else
if r2 then r2 else (null $ file a1)
where r1 = console_output a1;r2 = console_output a2
toolMergeArg :: String
toolMergeArg = if r1 /= "" then r1 else
if r2 /= "" then r2 else "/tools"
where r1 = tooldir a1;r2 = tooldir a2
mergeArg :: (Default a, Eq a) => (ClaferArgs -> a) -> a
mergeArg f = (\r -> if r /= def then r else f a2) $ f a1
mainArgs :: IO (ClaferArgs, String)
mainArgs = do
argsFromCmd <- cmdArgs clafer
model <- retrieveModelFromURL $ file argsFromCmd
let argsWithOpts = argsWithOPTIONS argsFromCmd model
-- Alloy should be the default mode but only if nothing else was specified
-- cannot use [ Alloy ] as the default in the definition of `clafer :: ClaferArgs` since
-- Alloy will always be a mode in addition to the other specified modes (it will become mandatory)
let argsWithDef = if null $ mode argsWithOpts
then argsWithOpts{mode = [ Alloy ]}
else argsWithOpts
return (argsWithDef, model)
retrieveModelFromURL :: String -> IO String
retrieveModelFromURL url =
case url of
"" -> getContents -- this is the pre-module system behavior
('f':'i':'l':'e':':':'/':'/':n) -> readFile n
('h':'t':'t':'p':':':'/':'/':_) -> getURL url
('f':'t':'p':':':'/':'/':_) -> getURL url
n -> readFile n -- this is the pre-module system behavior
argsWithOPTIONS :: ClaferArgs -> String -> ClaferArgs
argsWithOPTIONS args' model =
if "//# OPTIONS " `isPrefixOf` model
then either (const args') (mergeArgs args' . cmdArgsValue) $ -- merge wth command line arguments, which take precedence
process (cmdArgsMode clafer) $ -- instantiate ClaferArgs record
Language.Clafer.SplitJoin.splitArgs $ -- extract individual arguments
drop 12 $ -- strip "//# OPTIONS "
takeWhile (/= '\n') model -- get first line
else args'
defaultClaferArgs :: ClaferArgs
defaultClaferArgs = ClaferArgs
{ mode = [ def ]
, console_output = True
, flatten_inheritance = False
, timeout_analysis = 0
, no_layout = False
, new_layout = False
, check_duplicates = False
, skip_resolver = False
, keep_unused = False
, no_stats = False
, validate = False
, tooldir = "."
, alloy_mapping = False
, self_contained = False
, add_graph = False
, show_references = False
, add_comments = False
, ecore2clafer = False
, scope_strategy = Simple
, afm = False
, meta_data = False
, file = ""
}
| juodaspaulius/clafer | src/Language/Clafer/ClaferArgs.hs | mit | 8,888 | 0 | 15 | 1,943 | 1,643 | 907 | 736 | 138 | 6 |
import Linear
import Linear.V
import Data.Vector
identity :: V 2 (V 2 Float)
identity = V (fromList [ V (fromList [1, 0]), V (fromList [0, 1]) ])
calculation :: V 2 (V 2 Float)
calculation = identity !*! identity | hibou107/algocpp | linear.hs | mit | 214 | 0 | 12 | 40 | 109 | 58 | 51 | -1 | -1 |
-- Lucas numbers
-- http://www.codewars.com/kata/55a7de09273f6652b200002e/
module Codewars.Exercise.Lucas where
lucasnum :: Int -> Integer
lucasnum n = (if (n<0) then (-1)^n' else 1) * (fibS n' + 2 * fibS (n'-1))
where n' = abs n
fib = (map fibS [0 ..] !!)
fibS 0 = 0
fibS 1 = 1
fibS k | k > 0 = fib (k-2) + fib (k-1)
| otherwise = (-1)^(k+1) * fibS (abs k)
| gafiatulin/codewars | src/6 kyu/Lucas.hs | mit | 421 | 0 | 11 | 133 | 205 | 109 | 96 | 9 | 4 |
-- 'filter', applied to a predicate and a list,
-- returns the list of those elements that
-- satisfy the predicate, i.e.
-- > filter p xs = [ x | x <- xs, p x]
filter :: (a -> Bool) -> [a] -> [a]
filter _pred [] = []
filter pred (x:xs)
| pred x = x : filter pred xs
| otherwise = filter pred xs | iharh/fp-by-example | tex/src/filter_hof.hs | mit | 316 | 0 | 8 | 88 | 94 | 48 | 46 | 5 | 1 |
import Data.List
import Test.QuickCheck
import Control.Monad
{-
Definim un tip de date pentru a reprezenta o rețea de mulțimi de puncte în plan:
-}
type Punct = (Int,Int)
data Multime = X
| Y
| DX Int Multime
| DY Int Multime
| U Multime Multime
{-
Planul este centrat în punctul (0,0). Prima coordonată a unui punct (coordonata x) reprezintă distanța pe orizontală de la origine iar a doua (coordonata y) reprezintă distanța pe verticală. Prin convenție, coordonatele x cresc spre dreapta, în timp ce coordonatele y cresc în sus.
Constructorul X reprezintă mulțimea punctelor de pe axa x, adică punctele care au coordonata y zero:
... (-2,0) (-1,0) (0,0) (1,0) (2,0) ...
Constructorul Y reprezintă mulțimea punctelor de pe axa y, adică punctele care au coordonata x zero:
... (0,-2) (0,-1) (0,0) (0,1) (0,2) ...
Constructorul DX dx p, unde dx este un întreg și p e o mulțime de puncte, reprezintă punctele p deplasate la dreapta cu dx. De exemplu, DX 2 Y are ca rezultat axa y deplasată cu două unități la dreapta:
... (2,-2) (2,-1) (2,0) (2,1) (2,2) ...
Observație 1: DX dx X și X denotă aceeași mulțime de puncte, deoarece prin deplasarea axei x pe orizontală se obține tot axa x.
Observație 2: Un punct (x,y) aparține lui DX dx p dacă și numai dacă punctul (x-dx,y) aparține lui p.
Constructorul DY dy p, unde dy este un întreg și p e o mulțime de puncte, reprezintă punctele p deplasate în sus cu dx. De exemplu, DY 3 X are ca rezultat axa X deplasată cu două unități în sus:
... (-2,3) (-1,3) (0,3) (1,3) (2,3) ...
Observație 1: DY dy Y și Y denotă aceeași mulțime de puncte, deoarece prin deplasarea axei y pe verticală se obține tot axa y.
Observație 2: Un punct (x,y) aparține lui DY dy p dacă și numai dacă punctul (x,y-dy) aparține lui p.
Constructorul U p q, unde p și q sunt mulțimi de puncte, reprezintă reuniunea punctelor din p și q. De exemplu,
U (U X Y) (U (DY 3 X) (DX 2 Y))
reprezintă mulțimea de puncte de forma
... (-2,0) (-1,0) (0,0) (1,0) (2,0) ...
... (0,-2) (0,-1) (0,0) (0,1) (0,2) ...
... (-2,3) (-1,3) (0,3) (1,3) (2,3) ...
... (2,-2) (2,-1) (2,0) (2,1) (2,2) ...
Cerințe:
Scrieți o funcție
-}
apartine :: Punct -> Multime -> Bool
(_,y) `apartine` X = y == 0
(x,_) `apartine` Y = x == 0
(x,y) `apartine` (DX d m) = (x - d, y) `apartine` m
(x,y) `apartine` (DY d m) = (x, y - d) `apartine` m
(x,y) `apartine` (U m1 m2) = (x, y) `apartine` m1 || (x,y) `apartine` m2
{-
care determină dacă un punct aparține unei mulțimi de puncte date. De exemplu:
apartine (3,0) X == True
apartine (0,1) Y == True
apartine (3,3) (DY 3 X) == True
apartine (2,1) (DX 2 Y) == True
apartine (3,0) (U X Y) == True
apartine (0,1) (U X Y) == True
apartine (3,3) (U (DY 3 X) (DX 2 Y)) == True
apartine (2,1) (U (DY 3 X) (DX 2 Y)) == True
apartine (3,0) (U (U X Y) (U (DX 2 Y) (DY 3 X))) == True
apartine (0,1) (U (U X Y) (U (DX 2 Y) (DY 3 X))) == True
apartine (3,3) (U (U X Y) (U (DX 2 Y) (DY 3 X))) == True
apartine (2,1) (U (U X Y) (U (DX 2 Y) (DY 3 X))) == True
apartine (1,1) X == False
apartine (1,1) Y == False
apartine (1,1) (DY 3 X) == False
apartine (1,1) (DX 2 Y) == False
apartine (1,1) (U X Y) == False
apartine (1,1) (U X Y) == False
apartine (1,1) (U (DY 3 X) (DX 2 Y)) == False
apartine (1,1) (U (DY 3 X) (DX 2 Y)) == False
apartine (1,1) (U (U X Y) (U (DX 2 Y) (DY 3 X))) == False
Scrieți o funcție
-}
nrAxe :: Multime -> Int
nrAxe X = 1
nrAxe Y = 1
nrAxe (DX _ m) = nrAxe m
nrAxe (DY _ m) = nrAxe m
nrAxe (U m1 m2) = nrAxe m1 + nrAxe m2
{-
care numără de câte ori apare X sau Y în descrierea unei mulțimi de puncte. Fiecare axă trebuie numărată câte o dată pentru fiecare apariție a sa. De exemplu:
nrAxe X == 1
nrAxe Y == 1
nrAxe (U X Y) == 2
nrAxe (U (DY 3 X) (DX 2 Y)) == 2
nrAxe (U (U X Y) (U (DX 2 Y) (DY 3 X))) == 4
nrAxe (U (U X Y) X) == 3
Se consideră următorul tip algebric de date:
-}
data Expr = Const Int
| Neg Expr
| Expr :+: Expr
| Expr :*: Expr
deriving (Eq, Show)
data Op = NEG | PLUS | TIMES
deriving (Eq, Show)
data Atom = AConst Int | AOp Op
deriving (Eq, Show)
type Polish = [Atom]
{-
Cerințe:
Să se scrie o funcție
-}
fp :: Expr -> Polish
fp (Const x) = [AConst x]
fp (Neg e) = AOp NEG : fp e
fp (e1 :+: e2) = AOp PLUS : fp e1 ++ fp e2
fp (e1 :*: e2) = AOp TIMES : fp e1 ++ fp e2
{-
care asociază unei expresii aritmetice date scrierea ei în forma poloneză: o listă de Atomi, obținută prin parcurgerea în preordine a arborelui asociat expresiei (operațiile precedând reprezentărilor operanzilor).
Exemple:
* forma poloneză a expresiei 5 * 3 este * 5 3
fp (Const 5 :*: Const 3) == [AOp TIMES, AConst 5, AConst 3]
* forma poloneză a expresiei −(7 * 3) este - * 7 3
fp (Neg (Const 7 :*: Const 3)) == [AOp NEG, AOp TIMES, AConst 7, AConst 3]
* forma poloneză a expresiei (5 + −3) * 17 este * + 5 - 3 17
fp ((Const 5 :+: Neg (Const 3)) :*: Const 17) == [AOp TIMES, AOp PLUS, AConst 5, AOp NEG, AConst 3, AConst 17]
* forma poloneză a expresiei (15 + (7 * (2 + 1))) * 3 este * + 15 * 7 + 2 1 3
fp ((Const 15 :+: (Const 7 :*: (Const 2 :+: Const 1))) :*: Const 3) == [AOp TIMES, AOp PLUS, AConst 15, AOp TIMES, AConst 7, AOp PLUS, AConst 2, AConst 1, AConst 3]
Definiți o funcție
-}
newtype PolishParser a = PolishParser {apply :: Polish -> (Polish, Maybe a)}
instance Functor PolishParser where
fmap f pa = PolishParser (\p -> let (p', ma) = apply pa p in (p', fmap f ma))
instance Applicative PolishParser where
pure x = PolishParser (\p -> (p,Just x))
pf <*> pa = PolishParser (\ p ->
let (p',mf) = apply pf p
(p'',ma) = apply pa p'
in (p'', mf <*> ma))
instance Monad PolishParser where
pa >>= k = PolishParser (\ p ->
let (p', ma) = apply pa p
in case ma of
Nothing -> (p, Nothing)
Just a -> apply (k a) p')
readAtom :: PolishParser Atom
readAtom = PolishParser (\p -> case p of [] -> (p,Nothing); (h:t) -> (t, Just h))
readExp :: PolishParser Expr
readExp = readAtom >>= \a -> case a of
AConst x -> return (Const x)
AOp NEG -> readExp >>= return . Neg
AOp PLUS -> bOp (:+:)
AOp TIMES -> bOp (:*:)
where
bOp op = readExp >>= \e1 -> readExp >>= \e2 -> return (e1 `op` e2)
rfp :: Polish -> Maybe Expr
rfp = snd . apply readExp
{-
astfel încât rfp . fp = Just . id
Introducem un tip de date ce reprezinta o colectie de puncte (o tabela).
-}
type Point =(Integer, Integer)
data Points = Rectangle Point Point
| Union Points Points
| Difference Points Points
deriving (Show)
instance Arbitrary Points where
arbitrary = sized points
where
points n | n <= 0 = rectangle
| otherwise = oneof [ rectangle
, liftM2 Union subpoints subpoints
, liftM2 Difference subpoints subpoints
]
where
rectangle = liftM4 (\x0 y0 x1 y1 -> Rectangle (x0,y0) (x1,y1)) arbitrary arbitrary arbitrary arbitrary
subpoints = points (n `div` 2)
type InfinitePoint = (InfiniteInteger, InfiniteInteger)
data PointsBoolean = BRectangle InfinitePoint InfinitePoint
| All [PointsBoolean]
| Any [PointsBoolean]
| Comp PointsBoolean
-- | Empty = Any []
-- | Full = All []
deriving (Show)
data InfiniteInteger = Finite Integer | Inf | NInf
deriving Eq
instance Show InfiniteInteger where
show (Finite i) = show i
show Inf = "infinity"
show NInf = "-infinity"
instance Ord InfiniteInteger where
Finite i <= Finite j = i <= j
_ <= Inf = True
x <= NInf = x == NInf
NInf <= _ = True
Inf <= x = x == Inf
instance Num InfiniteInteger where
(Finite i) + (Finite j) = Finite (i + j)
(Finite i) + infinity = infinity
infinity + (Finite i) = infinity
Inf + Inf = Inf
NInf + NInf = NInf
inf + ninf = error "cannot add inifinities of opposite sign"
(Finite i) * (Finite j) = Finite (i * j)
(Finite i) * infinity
| i > 0 = infinity
| i < 0 = negate infinity
| i == 0 = error "cannot multiply inifinity with 0"
infinity * (Finite i) = (Finite i) * infinity
Inf * Inf = Inf
NInf * NInf = Inf
Inf * NInf = NInf
NInf * Inf = NInf
abs (Finite i) = Finite (abs i)
abs Inf = Inf
abs NInf = Inf
signum (Finite i) = Finite (signum i)
signum Inf = Finite 1
signum NInf = Finite (-1)
fromInteger = Finite
negate (Finite i) = Finite (negate i)
negate Inf = NInf
negate NInf = Inf
{-
Tabela incepe cu punctul (0,0) stanga jos.
Constructorul Rectangle selecteaza toate punctele dintr-o forma rectangulara.
De pilda, Rectangle (0,0) (2,1) da colturile din stanga jos si dreapta sus ale unui dreptunghi si include punctele (0,0) ; (1,0) ;(2,0) ; (0,1) ; (1,1) ; (2,1)
Union combina doua colectii de puncte iar Difference contine acele puncte care sunt in prima colectie dar nu sunt in a doua.
Scrieti o functie perimeter care calculeaza perimetrul celui mai mic dreptunghi care cuprinde complet o colectie de puncte.
-}
pointsBA :: Points -> PointsBoolean
pointsBA (Rectangle (x0,y0) (x1,y1)) = BRectangle (Finite x0, Finite y0) (Finite x1, Finite y1)
pointsBA (Union m1 m2) = Any [pointsBA m1, pointsBA m2]
pointsBA (Difference m1 m2) = All [pointsBA m1, Comp (pointsBA m2)]
dnfP :: PointsBoolean -> PointsBoolean
dnfP m@(BRectangle _ _) = Any [All [m]]
dnfP (Any ms) = foldr agg (Any []) (map dnfP ms)
where
agg _ t@(Any [All []]) = t
agg t@(Any [All []]) _ = t
agg (Any xs) (Any r) = Any (xs ++ r)
agg pb1 pb2 = error $ "Cannot agg1 " ++ show pb1 ++ " and " ++ show pb2
dnfP (Comp (Any ms)) = dnfP (All (map Comp ms))
dnfP (Comp (All ms)) = dnfP (Any (map Comp ms))
dnfP (Comp (Comp m)) = dnfP m
dnfP (Comp (BRectangle (x0,y0) (x1,y1))) = dnfP $ Any[BRectangle (-Inf,-Inf) (x0,Inf), BRectangle (x0, y1) (Inf, Inf), BRectangle (x0,-Inf) (Inf, y0), BRectangle (x1,y0) (Inf, y1)]
dnfP (All ms) = foldr agg (Any [All []]) (map dnfP ms)
where
agg _ (Any []) = Any []
agg (Any[]) _ = Any []
agg (Any ds) (Any ds') = Any [All (cs ++ cs') | All cs <- ds, All cs' <- ds']
agg pb1 pb2 = error $ "Cannot agg2 " ++ show pb1 ++ " and " ++ show pb2
simplify :: PointsBoolean -> PointsBoolean
simplify (Any alls) = Any $ filter isRectangle $ map simplifyAll alls
where
simplifyAll (All ms) = foldr intersect (BRectangle (-Inf,-Inf) (Inf,Inf)) ms
intersect (BRectangle (x0,y0) (x1,y1)) (BRectangle (x0', y0') (x1', y1')) =
BRectangle (max x0 x0', max y0 y0') (min x1 x1', min y1 y1')
isRectangle (BRectangle (x0,y0) (x1,y1)) = x0 <= x1 && y0 <= y1
bBox :: PointsBoolean -> PointsBoolean
bBox m = BRectangle (minimum xs, minimum ys) (maximum xs, maximum ys)
where
points (Any rs) = foldr (\(BRectangle p0 p1) ps -> p0:p1:ps) [] rs
ps = points m
ps' = if null ps then [(0,0)] else ps
xs = map fst ps'
ys = map snd ps'
bPerimeter :: Points -> Integer
bPerimeter m = i
where
BRectangle (x0,y0) (x1,y1) = bBox $ simplify $ dnfP $ pointsBA m
Finite i = 2 * (x1 - x0 + y1 - y0)
points :: Points -> [Point]
points (Rectangle (x0,y0) (x1,y1)) = [(x,y) | x <- [x0..x1], y <- [y0..y1]]
points (Union m1 m2) = nub (points m1 ++ points m2)
points (Difference m1 m2) = points m1 \\ points m2
box :: Points -> Points
box m = Rectangle (minimum xs, minimum ys) (maximum xs, maximum ys)
where
ps = points m
ps' = if null ps then [(0,0)] else ps
xs = map fst ps'
ys = map snd ps'
perimeter :: Points -> Integer
perimeter m = 2 * (x1 - x0 + y1 - y0)
where
Rectangle (x0,y0) (x1,y1) = box m
prop_perimeter :: Points -> Bool
prop_perimeter m = show (perimeter m) == show (bPerimeter m)
{-
Scrieti o functie distance care calculeaza distanta dintre doua colectii de puncte ca reprezentand distanta intre coltul dreapta-sus al dreptunghiului minimal care cuprinde prima colectie si coltul stanga-jos al dreptunghiului minimal care cuprinde cea de-a doua colectie.
-}
distance :: Points -> Points -> Float
distance m m' = sqrt $ fromIntegral $ (x1-x0')^2 + (y1-y0')^2
where
Rectangle (x0,y0) (x1,y1) = box m
Rectangle (x0',y0') (x1',y1') = box m'
{-
Introducem un tip de date ce reprezinta o expresie booleana formată din literali / variabile (Lit), negație (Not), conjuncție (And), disjuncție (Or) și implicație (:->:), in care conjuncțiile si disjuncțiile au un număr arbitrar de termeni.-}
data Exp = Lit String
| Not Exp
| And [Exp]
| Or [Exp]
| Exp :->: Exp
deriving (Show)
{-
Un atom este fie un literal (Lit), fie negația unui literal. O expresie este în formă normală disjunctivă dacă este o disjuncție de conjuncții de atomi.
Să se scrie o funcție care dată fiind o expresie are ca rezultat forma normală disjunctivă a acelei expresii.
-}
dnf :: Exp -> Exp
dnf m@(Lit _) = Or [And [m]]
dnf (Or ms) = foldr agg (Or []) (map dnf ms)
where
agg _ t@(Or [And []]) = t
agg t@(Or [And []]) _ = t
agg (Or xs) (Or r) = Or (xs ++ r)
agg pb1 pb2 = error $ "Cannot agg1 " ++ show pb1 ++ " and " ++ show pb2
dnf (Not (Or ms)) = dnf (And (map Not ms))
dnf (Not (And ms)) = dnf (Or (map Not ms))
dnf (Not (Not m)) = dnf m
dnf a@(Not (Lit _)) = Or [And [a]]
dnf (Not (e1 :->: e2)) = dnf (And [e1, Not e2])
dnf (And ms) = foldr agg (Or [And []]) (map dnf ms)
where
agg _ (Or []) = Or []
agg (Or []) _ = Or []
agg (Or ds) (Or ds') = Or [And (cs ++ cs') | And cs <- ds, And cs' <- ds']
agg pb1 pb2 = error $ "Cannot agg2 " ++ show pb1 ++ " and " ++ show pb2
dnf (e1 :->: e2) = dnf $ Or [Not e1, e2]
{-
dnf ((Lit "a" :->: Lit "b") :->: Lit "c")
= Or [And [Lit "a",Not (Lit "b")],And [Lit "c"]]
dnf (Lit "a" :->: (Lit "b" :->: Lit "c"))
= Or [And [Not (Lit "b")],And [Lit "c"],And [Not (Lit "a")]]
• a → b ≡ ¬a ∨ b
• ¬¬a = a
• ¬ /\ (a 1 , . . . , a n ) = \/(¬a 1 , . . . , ¬a n )
• ¬ \/ (a 1 , . . . , a n ) = /\(¬a 1 , . . . , ¬a n )
• /\ (a 1 , . . . , a i , /\(b 1 , . . . , b m ), a i+1 , . . . , a n ) = /\ (a 1 , . . . , a n , b 1 , . . . , b m )
• \/ (a 1 , . . . , a i , \/ (b 1 , . . . , b m ), a i+1 , . . . , a n ) = \/ (a 1 , . . . , a n , b 1 , . . . , b m )
• /\ (a 1 , . . . , a i , \/ (b 1 , . . . , b m ), a i+1 , . . . , a n ) = \/ (/\ (b 1 , a 1 , . . . , a n ), . . . , /\ (b m , . . . , a n ))
-}
| PavelClaudiuStefan/FMI | An_3_Semestru_1/ProgramareDeclarativa/Extra/Laborator/Modele test 2/Test2/test2-rezolvat.hs | cc0-1.0 | 14,659 | 0 | 16 | 3,790 | 4,118 | 2,129 | 1,989 | 200 | 7 |
import Data.Char
main = do
putStrLn "What's your first name?"
firstName <- getLine
putStrLn "What's your last name?"
lastName <- getLine
let bigFirstName = map toUpper firstName
bigLastName = map toUpper lastName
putStrLn $ "hey" ++ bigFirstName ++ " " ++ bigLastName ++ "how are you?"
| softwaremechanic/Miscellaneous | Haskell/4.hs | gpl-2.0 | 305 | 0 | 11 | 65 | 84 | 38 | 46 | 9 | 1 |
import Graphics.UI.Gtk hiding (Settings)
import Graphics.Rendering.Cairo
import Data.Time.Clock.POSIX
import Data.Time
import System.Directory
import Control.Exception
import System.Locale (defaultTimeLocale)
import Data.IORef
import Control.Monad (when,forM)
import Data.List
import Text.Printf
import Data.Char (toUpper)
iDuration = 30
rDuration = 120
amountOfIntervals = rDuration `div` iDuration
data Result = Result {
rDate :: String,
rMrks, rRank, rErrs :: Int
} deriving (Read, Show)
instance Eq Result where
(Result date1 mrks1 rnk1 errs1) == (Result date2 mrks2 rnk2 errs2) =
mrks1 == mrks2 && date1 == date2
instance Ord Result where
compare = fasterFst
fasterFst (Result date1 mrks1 rnk1 errs1) (Result date2 mrks2 rnk2 errs2) =
if mrks1 /= mrks2
then mrks2 `compare` mrks1
else date1 `compare` date2
zeroResult = Result {
rDate = "0000-00-00 00:00:00",
rMrks = 0, rRank = 0, rErrs = 0 }
data Timing = Timing {
sSession :: String, sTotal :: Int,
sSecsLeft :: Int, sSpeed :: Double
} deriving Show
zeroTiming = Timing {
sSession = "00:00", sTotal = 0,
sSecsLeft = iDuration, sSpeed = 0.0 }
data Interval = Interval {
iNum, iMrks, iErrs :: Int
} deriving Show
zeroInterval = Interval {
iNum = -1, iMrks = 0, iErrs = 0 }
data GUI = NotCreated | GUI {
gWindow :: Window,
gErrorCanvas, gTimingCanvas, gHelperCanvas :: DrawingArea,
gEntry :: Entry,
gLabel1, gLabel2 :: Label,
gStatusbar :: Label,
gStyle :: Style,
gModelR :: ListStore Result,
gModelS :: ListStore Timing,
gModelI :: ListStore Interval
}
data GameStatus = Error | Correct | Back | NotStarted
deriving (Eq, Show)
data State = State {
homeDirectory :: String,
textLines :: [String],
startTime :: POSIXTime,
oldStatus :: GameStatus,
showTimingPict :: Bool,
oldlen :: Int,
lastShownIv :: Int,
oLabelStrs :: [String],
total :: Int,
lastLetter,nextLetter :: Char,
speedNows :: [(POSIXTime, Int)],
intervals :: [Interval],
results :: [Result],
sessionBest :: Result,
settings :: Settings,
gui :: GUI
}
initState = State {
homeDirectory = "",
textLines = [],
startTime = fromIntegral 0 :: POSIXTime,
oldStatus = NotStarted,
showTimingPict = False,
oLabelStrs = ["",""],
oldlen = 0,
total = 0,
lastLetter = ' ', nextLetter = ' ',
speedNows = [],
lastShownIv = -1,
intervals = [],
results = [],
sessionBest = zeroResult,
settings = defaultSettings,
gui = NotCreated
}
data Settings = Settings {
startLine :: Int,
lineLen :: Int,
textfile :: String,
font :: String,
useHelper :: Bool,
keyrow1,keyrow2,keyrow3 :: String
} deriving (Read, Show)
defaultSettings = Settings {
startLine = 0,
lineLen = 60,
textfile = "morse.txt",
font = "monospace 10",
useHelper = True,
keyrow1 = keyboard !! 0,
keyrow2 = keyboard !! 1,
keyrow3 = keyboard !! 2
}
xxx = replicate (lineLen defaultSettings) 'x'
s gs = settings gs
g gs = gui gs
r gs = results gs
resultsFromFile fname = do
structFromFile fname readRs []
settingsFromFile fname = do
structFromFile fname readSs defaultSettings
structFromFile fname pFunc zero = do
content <- readFile fname `catch`
\(SomeException e) -> return ""
result <- pFunc content `catch`
\(SomeException e) -> return zero
return result
readRs :: String -> IO [Result]
readRs = readIO
readSs :: String -> IO Settings
readSs = readIO
resultsFile = "results.txt"
settingsFile = "settings.txt"
getStartupConfig gsRef = do
gs <- readIORef gsRef
-- directory
homedir <- getHomeDirectory
let dir = homedir ++ "/.hatupist"
createDirectoryIfMissing False (dir)
-- settings
let rname = dir ++ "/" ++ settingsFile
oldSettings <- settingsFromFile rname
-- results
let rname = dir ++ "/" ++ resultsFile
oldResults <- resultsFromFile rname
listStoreSetValue (gModelR (g gs)) 0 (bestResult oldResults)
-- other
writeIORef gsRef gs {
homeDirectory = dir,
results = oldResults,
settings = oldSettings
}
afterConfig gsRef
afterConfig gsRef = do
setHelperVisibility gsRef
setFonts gsRef
getLines gsRef = do
gs <- readIORef gsRef
originalText <- readFile (textfile (s gs))
let textLinesss = colLines (collectWords (words (originalText))
(lineLen (s gs)))
textLiness = map (++" ") textLinesss
writeIORef gsRef gs {
textLines = textLiness
}
renewLabels gsRef
quitProgram gsRef = do
gs <- readIORef gsRef
-- results
let rname = (homeDirectory gs) ++ "/" ++ resultsFile
writeFile rname (show (r gs))
-- settings
let rname = (homeDirectory gs) ++ "/" ++ settingsFile
writeFile rname (show (s gs))
mainQuit
main = do
gsRef <- newIORef initState
initGUI
createGUI gsRef
getStartupConfig gsRef
getLines gsRef
gs <- readIORef gsRef
setStatusText "Voit aloittaa." (toGtkColor green) gs
mainGUI
setFonts gsRef = do
gs <- readIORef gsRef
let gui = g gs
newFont = font (s gs)
srcfont <- fontDescriptionFromString newFont
widgetModifyFont (gLabel1 gui) (Just srcfont)
widgetModifyFont (gLabel2 gui) (Just srcfont)
widgetModifyFont (gEntry gui) (Just srcfont)
labelSetWidthChars (gLabel1 gui) (lineLen (s gs) + 3)
setHelperVisibility gsRef = do
gs <- readIORef gsRef
case useHelper (s gs) of
True -> widgetShow (gHelperCanvas (g gs))
False -> widgetHide (gHelperCanvas (g gs))
resize (gWindow (g gs))
resize window = do
Requisition w h <- widgetSizeRequest window
windowResize window w h
getStatus :: String -> String -> Int -> GameStatus
getStatus written goal oldlen
| a == b && c < d = Correct
| a == b = Back
| otherwise = Error
where
a = written
b = commonPrefix written goal
c = oldlen
d = length written
commonPrefix (x:xs) (y:ys)
| x == y = x : commonPrefix xs ys
| otherwise = []
commonPrefix _ _ = []
rInitModel = replicate 3 zeroResult
rColTitles = ["Päiväys", "Tulos", "Sija", "Virheitä" ]
rColFuncs = [ rDate, rSpeed . rMrks, rShowRank, rErrorPros]
sInitModel = [zeroTiming]
sColTitles = ["Istunto", "Yhteensä", "Jäljellä", "Nopeus"]
sColFuncs = [ sSession, show . sTotal, show . sSecsLeft, f01 . sSpeed]
iInitModel = replicate amountOfIntervals zeroInterval
iColTitles = ["Alkoi", "Päättyi", "Nopeus", "Virheitä" ]
iColFuncs = [ iStarts . iNum, iEnds . iNum, iSpeed . iMrks, iErrorPros]
createGUI gsRef = do
gs <- readIORef gsRef
window <- windowNew
style <- widgetGetStyle window
onDestroy window (quitProgram gsRef)
extrmVBox <- vBoxNew False 0
outerHBox <- hBoxNew False 0
outerVBox <- vBoxNew False 0
middleHBox <- hBoxNew False 0
innerVBox1 <- vBoxNew False 0
innerVBox2 <- vBoxNew False 0
menubar <- createMenuBar menuBarDescr gsRef
boxPackStart extrmVBox menubar PackNatural 0
rModel <- setupView rInitModel rColTitles rColFuncs innerVBox1
sModel <- setupView sInitModel sColTitles sColFuncs innerVBox1
iModel <- setupView iInitModel iColTitles iColFuncs innerVBox2
timingCanvas <- drawingAreaNew
widgetSetSizeRequest timingCanvas 120 3
onExpose timingCanvas (
drawTimingCanvas gsRef timingCanvas)
boxPackStart innerVBox1 timingCanvas PackNatural 0
keyrowsCanvas <- drawingAreaNew
widgetSetSizeRequest keyrowsCanvas 275 73
onExpose keyrowsCanvas (
drawKeyrowsCanvas gsRef keyrowsCanvas)
boxPackStart innerVBox2 keyrowsCanvas PackNatural 0
boxPackStart middleHBox innerVBox1 PackNatural 0
boxPackStart middleHBox innerVBox2 PackNatural 6
boxPackStart outerVBox middleHBox PackNatural 3
boxPackStart outerHBox outerVBox PackNatural 6
boxPackStart extrmVBox outerHBox PackNatural 0
set window [
containerChild := extrmVBox ]
errorCanvas <- drawingAreaNew
widgetSetSizeRequest errorCanvas 300 40
onExpose errorCanvas (
drawErrorCanvas gsRef errorCanvas)
boxPackStart outerVBox errorCanvas PackGrow 0
label1 <- labelNew (Just xxx)
miscSetAlignment label1 0 0
boxPackStart outerVBox label1 PackNatural 0
label2 <- labelNew (Just xxx)
miscSetAlignment label2 0 0
boxPackStart outerVBox label2 PackNatural 0
entry <- entryNew
entrySetHasFrame entry False
boxPackStart outerVBox entry PackNatural 3
onEditableChanged entry (
whenEntryChanged gsRef)
statusbar <- labelNew Nothing
miscSetAlignment statusbar 0 0
miscSetPadding statusbar 6 0
eventbox <- eventBoxNew
containerAdd eventbox statusbar
boxPackEnd extrmVBox eventbox PackNatural 0
widgetShowAll window
writeIORef gsRef gs {
gui = GUI {
gWindow = window,
gTimingCanvas = timingCanvas, gErrorCanvas = errorCanvas,
gHelperCanvas = keyrowsCanvas,
gEntry = entry,
gLabel1 = label1, gLabel2 = label2,
gStatusbar = statusbar,
gStyle = style,
gModelR = rModel, gModelS = sModel, gModelI = iModel }}
toWord x = round (x*65535.0)
toGtkColor (r,g,b) = Color (toWord r) (toWord g) (toWord b)
toGtkColors xs = [toGtkColor x | x <- xs]
blue = (0.200, 0.400, 1.000)
green = (0.451, 0.824, 0.086)
red = (1.000, 0.200, 0.400)
yellow = (0.988, 0.914, 0.310)
black = (0.000, 0.000, 0.000)
gray = (0.502, 0.502, 0.502)
white = (1.000, 1.000, 1.000)
brkRed = (0.886, 0.031, 0.000)
drawStatusText gsRef = do
gs <- readIORef gsRef
if (oldStatus gs) /= Error
then setStatusText "" (toGtkColor white) gs
else setStatusText "Korjaa virheet!" (toGtkColor red) gs
drawEmptyPicture canvas = do
return True
drawErrorCanvas gsRef widget _evt = do
gs <- readIORef gsRef
drawWin <- widgetGetDrawWindow widget
(wInt,hInt) <- widgetGetSize widget
let (w,h) = (intToDouble wInt, intToDouble hInt)
if (oldStatus gs) /= Error
then drawEmptyPicture widget
else renderWithDrawable drawWin (drawErrorPicture w h)
return True
relPolygon (x,y) points (r,g,b) = do
moveTo x y
mapM (\(x,y) -> relLineTo x y) points
closePath
setSourceRGB r g b
fill
drawErrorPicture w h = do
let c = h
r = 15
mapM
( \(x,y,points,color) -> relPolygon (x,y) points color)
[(x,0,[((-c),h),(r,0),(c,(-h))],
color) | (x,color) <- zip [0,r..w+c] (cycle [blue,red])]
return True
onTimeToClear canvas = do
widgetQueueDraw (canvas)
return False
drawTimingCanvas gsRef canvas _evt = do
gs <- readIORef gsRef
if (showTimingPict gs)
then drawTimingPicture gs canvas
else drawEmptyPicture canvas
writeIORef gsRef gs { showTimingPict = False }
return True
hueLimits = (0.00, 1.00)
valLimits = (1.00, 0.25)
limitsFrom = (0.00, 12.3)
ptAlong limitsTo limitsFrom pointFrom =
to0 + distTo*((ptFrom - from0)/distFrom)
where
(to0,to1) = limitsTo
distTo = to1 - to0
(from0,from1) = limitsFrom
distFrom = from1 - from0
ptFrom = from0 `max` pointFrom `min` from1
drawTimingRect w h (r,g,b) = do
rectangle 0 0 w h
setSourceRGB r g b
fill
drawTimingPicture gs canvas = do
row <- listStoreGetValue (gModelR (g gs)) 2
(wInt,hInt) <- widgetGetSize canvas
drawWin <- widgetGetDrawWindow canvas
let (w,h) = (intToDouble wInt, intToDouble hInt)
rankD = intToDouble (rRank row)
colorPoint = logBase 2.0 rankD
sat = 0.40
hue = ptAlong hueLimits limitsFrom colorPoint
val = ptAlong valLimits limitsFrom colorPoint
(r,g,b) = hsvToRgb (hue,sat,val)
renderWithDrawable drawWin (drawTimingRect w h (r,g,b))
timeoutAdd (onTimeToClear canvas) 1000
return True
qwertyColor letter =
if null as then Nothing else Just (areaColors !! (head as))
where
as = [a |(a,i) <- zip [0..] qwertyAreas, letter `elem` i]
qwertyLetter x y =
if y < length qwerty && x < length (qwerty!!y)
then (qwerty !! y) !! x
else ' '
drawRect x y r1 (r,g,b) filled = do
rectangle x y r1 r1
setSourceRGB r g b
if filled then fill else stroke
paintLetter x y letter (r,g,b) = do
fntDscr <- liftIO (fontDescriptionFromString "Sans 8")
layout <- createLayout (letter)
liftIO (layoutSetFontDescription layout (Just fntDscr))
moveTo x y
setSourceRGB r g b
showLayout layout
drawKey x y letter selected dupl = do
setLineWidth 1.0
let co = qwertyColor(qwertyLetter x y)
botColor = if selected then white else black
selColor = if dupl then gray else black
case co of
Just c -> drawRect (intToDouble (zentr xx))
(intToDouble (zentr yy))
(intToDouble r2)
c True
Nothing -> return ()
when selected (drawRect (intToDouble xx)
(intToDouble yy)
(intToDouble r1)
selColor True)
drawRect (intToDouble xx) (intToDouble yy) (intToDouble r1) botColor False
paintLetter (intToDouble (xx+2)) (intToDouble (yy+2)) letter botColor
where
r1 = 18
r2 = 21
zentr z = z - (r2-r1) `div` 2
deltaXs = [3,12,0]
margin = 5
xx = x*r2+deltaXs!!y + margin
yy = y*r2 + margin
drawKeyrowsCanvas gsRef canvas _evt = do
gs <- readIORef gsRef
writeIORef gsRef gs {
lastLetter = nextLetter gs
}
let c = nextLetter gs
selected = [toUpper c]
dupl = toUpper c == toUpper (lastLetter gs)
keymap = [keyrow1 (s gs), keyrow2 (s gs), keyrow3 (s gs)]
leK y = length (keymap !! y)
keyK x y = [toUpper ((keymap !! y) !! x)]
drawWin <- widgetGetDrawWindow canvas
renderWithDrawable drawWin (do
mapM
( \(x,y,k) -> drawKey x y k (k == selected) dupl)
[(x,y,keyK x y)| y <- [0..2], x <- [0..(leK y)-1], keyK x y /= " "]
return True)
setupView initModel titles funcs parent = do
model <- listStoreNew (initModel)
view <- treeViewNewWithModel model
mapM
( \(title, func) -> newcol view model title func )
( zip titles funcs )
set view [ widgetCanFocus := False ]
boxPackStart parent view PackNatural 3
return model
where
newcol view model title func = do
renderer <- cellRendererTextNew
col <- treeViewColumnNew
cellLayoutPackStart col renderer True
cellLayoutSetAttributes col renderer model (
\row -> [ cellText := func row])
treeViewColumnSetTitle col title
treeViewAppendColumn view col
modify parent color text gs = do
if (text == "")
then do
bg <- styleGetBackground (gStyle (g gs)) StateNormal
widgetModifyBg parent StateNormal bg
else widgetModifyBg parent StateNormal color
setStatusText text color gs = do
let label = gStatusbar (g gs)
labelSetText label text
parent <- widgetGetParent label
case parent of
Nothing -> print "No parent"
Just parent -> modify parent color text gs
return ()
menuBarDescr =
[("_Tiedosto",
[("gtk-open", openFile),
("gtk-select-font", openFont),
("gtk-preferences", setPreferences),
("gtk-about", noop),
("gtk-quit", quitProgram)])
]
createMenuBar descr gsRef = do
bar <- menuBarNew
mapM_ (createMenu bar) descr
return bar
where
createMenu bar (name,items) = do
menu <- menuNew
item <- menuItemNewWithLabelOrMnemonic name
menuItemSetSubmenu item menu
menuShellAppend bar item
mapM_ (createMenuItem menu) items
createMenuItem menu (stock,action) = do
item <- imageMenuItemNewFromStock stock
menuShellAppend menu item
onActivateLeaf item (do action gsRef)
menuItemNewWithLabelOrMnemonic name
| elem '_' name = menuItemNewWithMnemonic name
| otherwise = menuItemNewWithLabel name
noop gsRef = do
return ()
keyboard = qwerty
qwerty = [
"qwertyuiopå",
"asdfghjklöä",
" zxcvbnm,.-"]
qwertyAreas = ["rfvujm", "edcik,", "wsxol.", "qazpö-"]
areaColors = [blue, yellow, red, green]
colTitle = ["Muuttuja", "Arvo"]
showBool True = "Kyllä"
showBool False = "Ei"
settingsTable (Settings a b _ _ c e f g) = [
["Aloitusrivi", show a],
["Rivinpituus (mrk)", show b],
["Näytä näppäimistö (K/E)",showBool c],
["Näppäimistön ylärivi", e],
["Näppäimistön keskirivi", f],
["Näppäimistön alarivi", g] ]
setPreferences gsRef = do
oldGs <- readIORef gsRef
result <- preferencesDialog "Asetukset" oldGs gsRef
case result of
Just "OK" -> do
newGs <- readIORef gsRef
when ((lineLen (s oldGs)) /= (lineLen (s newGs))) (getLines gsRef)
when ((startLine (s oldGs)) /= (startLine (s newGs))) (renewLabels gsRef)
afterConfig gsRef
otherwise -> do
writeIORef gsRef oldGs
preferencesDialog title gs gsRef = do
dialog <- dialogNew
set dialog [ windowTitle := title ]
dialogAddButton dialog stockCancel ResponseCancel
dialogAddButton dialog stockOk ResponseOk
model <- listStoreNew (settingsTable (s gs))
view <- treeViewNewWithModel model
setupSettingsView gsRef view model
upbox <- dialogGetUpper dialog
boxPackStart upbox view PackNatural 10
widgetShowAll upbox
response <- dialogRun dialog
widgetDestroy dialog
case response of
ResponseOk -> do
return (Just "OK")
ResponseCancel -> do
return Nothing
ResponseDeleteEvent -> do
return Nothing
_ -> return Nothing
setupSettingsView gsRef view model = do
mapM
( \(title, i) -> newcol view model title i )
( zip colTitle [0..] )
where
newcol view model title i = do
renderer <- cellRendererTextNew
col <- treeViewColumnNew
cellLayoutPackStart col renderer True
cellLayoutSetAttributes col renderer model (
\row -> [ cellText := row !! i, cellTextEditable := (i==1) ])
treeViewColumnSetTitle col title
treeViewAppendColumn view col
on renderer edited (onCellEdited gsRef model)
readInt :: String -> IO Int
readInt s = readIO s
readBool :: String -> IO Bool
readBool s = readIO s
readKBool :: String -> IO Bool
readKBool s = do
let c = toUpper (head (s ++ " "))
result = if c == 'K' then True else False
return result
v lst i = (lst !! i) !! 1
tryFunc g lst = do
a <- readInt (v lst 0)
b <- readInt (v lst 1)
c <- readKBool (v lst 2)
return g {
startLine = a,
lineLen = b,
useHelper = c,
keyrow1 = v lst 3,
keyrow2 = v lst 4,
keyrow3 = v lst 5
}
refreshSettingsTable model newS = do
mapM
( \(i,newRow) -> listStoreSetValue model i newRow )
( zip [0..] (settingsTable newS) )
onCellEdited gsRef model path newText = do
gs <- readIORef gsRef
let i = head path
[key,oldText] <- listStoreGetValue model i
listStoreSetValue model i [key,newText]
lst <- listStoreToList model
newS <- tryFunc (s gs) lst `catch`
\(SomeException e) -> return (s gs)
refreshSettingsTable model newS
writeIORef gsRef gs { settings = newS }
openFont gsRef = do
gs <- readIORef gsRef
result <- chooseFont "Valitse kirjasin" (font (s gs))
case result of
Just newFont -> do
writeIORef gsRef gs {
settings = (s gs) {
font = newFont }}
setFonts gsRef
otherwise -> return ()
chooseFont prompt oldFont = do
dialog <- fontSelectionDialogNew prompt
fontSelectionDialogSetFontName dialog oldFont
widgetShow dialog
response <- dialogRun dialog
print response
case response of
ResponseOk -> do
fn <- fontSelectionDialogGetFontName dialog
widgetDestroy dialog
return fn
ResponseCancel -> do
widgetDestroy dialog
return Nothing
ResponseDeleteEvent -> do
widgetDestroy dialog
return Nothing
_ -> return Nothing
openFile gsRef = do
gs <- readIORef gsRef
result <- chooseFile "Valitse teksti"
case result of
Just newTextFile -> do
writeIORef gsRef gs {
settings = (s gs) {
textfile = newTextFile,
startLine = 0 }}
getLines gsRef
otherwise -> return ()
chooseFile prompt = do
dialog <- fileChooserDialogNew (Just prompt) Nothing
FileChooserActionOpen
[("gtk-cancel",ResponseCancel),
("gtk-open", ResponseAccept)]
fileChooserSetCurrentFolder dialog "."
widgetShow dialog
response <- dialogRun dialog
case response of
ResponseAccept -> do
fn <- fileChooserGetFilename dialog
widgetDestroy dialog
return fn
ResponseCancel -> do
widgetDestroy dialog
return Nothing
ResponseDeleteEvent -> do
widgetDestroy dialog
return Nothing
_ -> return Nothing
rShowRank rR =
showRank (rRank rR)
rErrorPros rR =
f02p (errorPros (rErrs rR) (rMrks rR))
iErrorPros iV =
f02p (errorPros (iErrs iV) (iMrks iV))
errorPros errs mrks
| total == 0 = 0.0
| otherwise = 100.0 * (intToDouble errs) / (intToDouble total)
where
total = mrks + errs
f01 :: Double -> String
f01 = printf "%.1f"
f02p :: Double -> String
f02p = printf "%.2f%%"
speed mrks t =
(intToDouble mrks) * 60.0 / (max t 1.0)
iSpeed mrks =
f01 (speed mrks (intToDouble iDuration))
rSpeed mrks =
f01 (speed mrks (intToDouble rDuration))
iStarts n
| n <= 0 = "00:00"
| otherwise = mmss (fromIntegral (n*iDuration) :: Double)
iEnds n = iStarts (n+1)
iNumber t =
floor t `div` iDuration
iLeft t =
iDuration - (floor t `mod` iDuration)
mmss seconds =
leadingZero (show (floor seconds `div` 60)) ++
":" ++
leadingZero (show (floor seconds `mod` 60))
leadingZero s
| length s < 2 = "0" ++ s
| otherwise = s
secondsFrom startPt endPt =
a - b
where
a = ptToDouble endPt
b = ptToDouble startPt
ptToDouble :: POSIXTime -> Double
ptToDouble t = fromRational (toRational t)
intToDouble :: Int -> Double
intToDouble i = fromRational (toRational i)
nextChar n str =
head (drop n (str ++ " "))
blankStart n str =
replicate n ' ' ++ drop n str
renewLabels gsRef = do
gs <- readIORef gsRef
let labelStrs = labelStrings (startLine (settings gs)) (textLines gs)
set (gLabel1 (g gs)) [ labelLabel := labelStrs !! 0 ]
set (gLabel2 (g gs)) [ labelLabel := labelStrs !! 1 ]
writeIORef gsRef gs {
oLabelStrs = labelStrs,
nextLetter = head ((labelStrs !! 0) ++ " ")
}
entrySetText (gEntry (g gs)) ""
labelStrings :: Int -> [String] -> [String]
labelStrings startline textLines =
[textLines !! first] ++ [textLines !! second]
where
first = startline `mod` (length textLines)
second = (startline + 1) `mod` (length textLines)
ivsBetween iMin iMax ivs =
filter (\iv -> iMin <= (iNum iv) && (iNum iv) <= iMax) ivs
ivsFrom iMin ivs =
filter (\iv -> iMin <= (iNum iv)) ivs
ivsAllBetween iMin iMax ivs =
[ivExactly n ivs | n <- [iMin .. iMax]]
ivExactly n ivs =
case find (\iv -> n == (iNum iv)) ivs of
Just x -> x
Nothing -> zeroInterval { iNum = n }
tableRRefreshMs = 500
speedFromMs = 10000
speedCount = speedFromMs `div` tableRRefreshMs
difs speds =
if null speds
then (0.0, 0)
else (secondsFrom (fst start) (fst end), (snd end) - (snd start))
where
start = last speds
end = head speds
renewTableS gs t = do
pt <- getPOSIXTime
let newGs = gs {
speedNows = [(pt, (total gs))] ++ take speedCount (speedNows gs)
}
let s = difs (speedNows newGs)
listStoreSetValue (gModelS (g gs)) 0 Timing {
sSecsLeft = iLeft t,
sSession = mmss t,
sTotal = total gs,
sSpeed = speed (snd s) (fst s)
}
return newGs
bestResult results = if null results
then zeroResult
else head results
timeFormatted :: ZonedTime -> String
timeFormatted = formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S"
reRank1 (Result { rDate = a, rMrks = b, rRank = c, rErrs = d }, newRank) =
Result { rDate = a, rMrks = b, rRank = newRank, rErrs = d }
reRank rs = map reRank1 (zip rs [1..])
addResult showIvs gs = do
pt <- getPOSIXTime
tz <- getCurrentTimeZone
let newResult0 = zeroResult {
rDate = timeFormatted (utcToZonedTime tz (posixSecondsToUTCTime pt)),
rMrks = sum [iMrks g | g <- showIvs],
rErrs = sum [iErrs g | g <- showIvs]
}
let newResult = newResult0 {
rRank = tellRank newResult0 (results gs)
}
let
newRs = take maxRank (insert newResult (results gs))
new2Rs = reRank newRs
newShownRs = [
bestResult new2Rs,
(sessionBest gs) `min` newResult,
newResult ]
return (new2Rs, newShownRs)
showRank rank
| rank <= maxRank = show rank
| otherwise = ">" ++ show maxRank
tellRank x xs =
case findIndex (x <=) xs of
Just n -> n + 1
Nothing -> length xs + 1
maxRank = 5000
renewTableR gs shownRs = do
mapM
(\(a,b) -> listStoreSetValue (gModelR (g gs)) a b)
(zip [0..] shownRs)
return ()
renewTableI gs iCur = do
mapM
(\(a,b) -> listStoreSetValue (gModelI (g gs)) (amountOfIntervals-a) b)
(zip [1..] showIvs)
(newRs, newShownRs) <- addResult showIvs gs
return (gs {
intervals = newIvs,
lastShownIv = iCur,
results = newRs,
sessionBest = newShownRs !! 1
}, newShownRs)
where
iMaxShow = iCur - 1
infimum = iMaxShow - amountOfIntervals + 1
iMinShow = max 0 infimum
iMinNeed = max 0 (infimum + 1)
newIvs = ivsFrom iMinNeed (intervals gs)
showIvs = reverse (ivsAllBetween iMinShow iMaxShow (intervals gs))
renewSeldomTables gs iCur = do
(newGs, shownRs) <- renewTableI gs iCur
renewTableR newGs shownRs
widgetQueueDraw (gTimingCanvas (g gs))
return newGs { showTimingPict = True }
renewTables gs t iCur = do
newGs <- renewTableS gs t
new2Gs <- if (lastShownIv newGs /= iCur) && iCur >= 1
then renewSeldomTables newGs iCur
else return newGs
return new2Gs
onTimeout gsRef = do
gs <- readIORef gsRef
pt <- getPOSIXTime
let t = secondsFrom (startTime gs) pt
iCur = iNumber t
newGs <- renewTables gs t iCur
writeIORef gsRef newGs
return True
whenEntryChanged gsRef = do
pt <- getPOSIXTime
gs <- readIORef gsRef
txt <- entryGetText (gEntry (g gs))
let label1Str = head (oLabelStrs gs)
status = getStatus txt label1Str (oldlen gs)
f = case (status,oldStatus gs) of
(_,NotStarted) -> whenNotStarted status
(Correct,_) -> whenCorrect txt
(Error,Correct) -> whenNewError
otherwise -> whenOther status (oldStatus gs)
cprfix = length (commonPrefix txt label1Str)
newGs <- f pt gsRef gs
set (gLabel1 (g gs)) [
labelLabel := blankStart cprfix label1Str]
writeIORef gsRef newGs {
oldStatus = status,
oldlen = max cprfix (oldlen gs),
nextLetter = nextChar cprfix label1Str
}
drawStatusText gsRef
widgetQueueDraw (gErrorCanvas (g gs))
widgetQueueDraw (gHelperCanvas (g gs))
when (label1Str == txt) (advanceLine gsRef newGs)
return ()
whenNotStarted status pt gsRef gs = do
timeoutAdd (onTimeout gsRef) tableRRefreshMs
return gs {
total = if status == Correct then 1 else 0,
startTime = pt,
intervals = addTime
status
(iNumber 0.0)
(intervals gs)
}
whenCorrect txt pt gsRef gs = do
return gs {
total = (total gs) + 1,
intervals = addTime
Correct
(iNumber (secondsFrom (startTime gs) pt))
(intervals gs)
}
whenNewError pt gsRef gs = do
return gs {
intervals = addTime
Error
(iNumber (secondsFrom (startTime gs) pt))
(intervals gs)
}
whenOther status oldStatus pt gsRef gs = do
return gs
latestIvNum ivs = if null ivs then -1 else iNum (head ivs)
addTime status i intervals =
[newHead] ++ tail newIvs
where
newHead = case status of
Correct -> headIv { iMrks = (iMrks headIv) + 1 }
Error -> headIv { iErrs = (iErrs headIv) + 1 }
headIv = head newIvs
newIvs = if null intervals || i /= latestIvNum intervals
then [zeroInterval { iNum = i }] ++ intervals
else intervals
advanceLine gsRef gs = do
gs <- readIORef gsRef
let newStartLine = ((startLine (s gs)) + 1) `mod` (length (textLines gs))
writeIORef gsRef gs {
settings = (s gs) {
startLine = newStartLine},
oldlen = 0
}
renewLabels gsRef
return ()
colLines (xs:xss) =
(unwords xs) : colLines xss
colLines [] = []
collectWords [] n = []
collectWords ys n =
p1 : collectWords p2 n
where
(p1,p2) = splitAt (length (untilLen ys 0 n)) ys
untilLen (t:ts) s n
| s+x<n || s==0 = t : untilLen ts (s+x) n
| otherwise = []
where
x = length t + 1
untilLen [] s n = []
| jsavatgy/hatupist | code/settingsDialog.hs | gpl-2.0 | 28,189 | 0 | 18 | 6,814 | 10,495 | 5,292 | 5,203 | 869 | 4 |
module Machine.Class where
import Autolib.Set
import Autolib.Schichten
import Autolib.Reporter hiding ( output )
import Autolib.ToDoc
import Autolib.Size
import Machine.History
import Data.Typeable
import qualified Data.Set as S -- for priority queue
class ( ToDoc m, Size m
, ToDoc dat, Ord dat
, ToDoc conf, Ord conf
, InOut m dat conf
, Compute m conf
, History conf
, Typeable m, Typeable dat, Typeable conf
) => Machine m dat conf | m -> dat, m -> conf
instance ( ToDoc m, Size m
, ToDoc dat, Ord dat
, ToDoc conf, Ord conf
, InOut m dat conf
, Compute m conf
, History conf
, Typeable m, Typeable dat, Typeable conf
) => Machine m dat conf
--------------------------------------------------------------------
class In m dat conf | m -> dat, m -> conf where -- strong dependencies ??
-- | startkonf. herstellen (tupel von args)
input_reporter :: ( Typeable m, Typeable dat, Typeable conf )
=> m -> dat -> Reporter conf
class Ord conf => Compute m conf where
-- | alle direkten nachfolger ( nichtdeterministisch )
next :: m -> conf -> Set conf
accepting :: m -> conf -> Bool
depth :: m -> conf -> Int
-- | the search could use a priority queue ordered by weights
-- (configurations with smaller weights are preferred)
-- this function is only called once per item
-- (for comparison in the queue, the value is cached)
-- default implementation: weight == depth
-- this leads to a breadth first search
weight :: m -> conf -> Double
weight m conf = fromIntegral $ depth m conf
-- | unendliche liste
nachfolger :: Compute m conf
=> m -> conf -> [ conf ]
nachfolger a k = concat $ map setToList $
schichten (next a) k
-- | possibly infinite list of reachable configurations,
-- search prefers smaller weights
weighted_nachfolger :: Compute m conf
=> m -> conf -> [ conf ]
weighted_nachfolger a k = do
let lift k = ( weight a k, k )
let handle done todo = case S.minView todo of
Nothing -> []
Just ( (w, top) , rest ) ->
let done' = S.insert top done
succs = map lift
$ filter ( \ x -> not $ S.member x done' )
$ setToList
$ next a top
in top : handle done' ( foldr S.insert rest succs )
handle S.empty ( S.singleton $ lift k )
-- | unendliche liste
nachfolger_cut :: Compute m conf
=> Int -> m -> conf -> [ conf ]
nachfolger_cut cut a k =
-- concat $ map setToList $ take cut $ schichten (next a) k
weighted_nachfolger a k
class Out m dat conf | m -> dat, m -> conf where
-- | endkonf. lesen (ein einziges arg)
output_reporter :: m -> conf -> Reporter dat
class ( In m dat conf, Out m dat conf )
=> InOut m dat conf | m -> dat, m -> conf
instance ( In m dat conf, Out m dat conf )
=> InOut m dat conf
--------------------------------------------------------------------
class Encode dat where
encode :: [ Integer ] -> dat
class Decode dat where
decode :: dat -> Integer
class ( Encode dat, Decode dat ) => Numerical dat
instance ( Encode dat, Decode dat ) => Numerical dat
| florianpilz/autotool | src/Machine/Class.hs | gpl-2.0 | 3,251 | 5 | 19 | 931 | 942 | 496 | 446 | -1 | -1 |
module Ruab.Main (main) where
-- imports {{{1
import Ruab.Options (options)
import Ruab.Test (runTests)
import System.Environment (getArgs, getProgName)
import System.Exit (exitWith)
import System.IO (stderr, hPutStrLn)
import qualified Ruab.Frontend as F
main :: IO () -- {{{1
main = do
argv <- getArgs
case argv of
("--test":rest) -> runTests rest
_ -> runDebugger argv
runDebugger :: [String] -> IO ()
runDebugger argv = do
prg <- getProgName
opt <- either exit return $ options prg argv
F.run opt
return ()
where
exit (ec, why) = hPutStrLn stderr why >> exitWith ec
_tests :: String -> IO () -- {{{1
_tests args = runTests $ words $ "--hide-successes --plain -j 3 " ++ args
| copton/ocram | ruab/src/Ruab/Main.hs | gpl-2.0 | 711 | 0 | 11 | 144 | 263 | 138 | 125 | 22 | 2 |
{- |
Module : Primitive
Description : SVG primitives
Copyright : (c) Frédéric BISSON, 2015
License : GPL-3
Maintainer : zigazou@free.fr
Stability : experimental
Portability : POSIX
SVG primitives like a circle or rect for SVG generation.
-}
module Tank.SVG.Primitive (circle, rect, path, g, svg) where
import qualified Data.Text as T
import Text.XML.Generator
import Tank.SVG.Attribute
import Tank.SVG.Show
import Tank.Units
{-|
Generate an SVG circle.
-}
circle :: Coords -- ^ Center of the circle
-> Double -- ^ Radius of the circle
-> T.Text -- ^ Fill-in color in #RRGGBB format
-> Xml Elem
circle c r fill = snode "circle"
[ "cx" =. x
, "cy" =. y
, "r" =. r
, "fill" =. fill
]
[]
where (x, y) = toTuple c
{-|
Generate an SVG rectangle.
-}
rect :: Coords -- ^ Top-left corner coordinates
-> Coords -- ^ Width and height
-> T.Text -- ^ Fill-in color in #RRGGBB format
-> Xml Elem
rect c d fill = snode "rect"
[ "x" =. x
, "y" =. y
, "width" =. w
, "height" =. h
, "fill" =. fill
]
[]
where ((x, y), (w, h)) = (toTuple c, toTuple d)
{-|
Generate an SVG path.
-}
path :: T.Text -- ^ A string containing the path
-> T.Text -- ^ Fill-in color in #RRGGBB format
-> Xml Elem
path d fill = snode "path" [ "d" =. d, "fill" =. fill ] []
{-|
Group multiple elements and generate an SVG g element.
-}
g :: [Xml Attr] -- ^ List of attributes
-> [Xml Elem] -- ^ List of children
-> Xml Elem
g = snode "g"
{-|
Generate the top SVG element
-}
svg :: [Xml Attr] -- ^ List of attributes
-> [Xml Elem] -- ^ List of children
-> Xml Elem
svg = snode "svg" | Zigazou/Tank | src/Tank/SVG/Primitive.hs | gpl-3.0 | 1,926 | 0 | 8 | 692 | 402 | 227 | 175 | 41 | 1 |
{-
Copyright (C) 2015 Michael Dunsmuir
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE TemplateHaskell,
ScopedTypeVariables,
OverloadedStrings #-}
-- | Types representing various GTFS data
module RoutequeryService.GTFS.Types (
WheelchairBoarding(..),
Agency(..),
Stop(..),
LocationType(..),
Route(..),
RouteType(..),
Trip(..),
TripDirection(..),
BikesAllowed(..),
StopTime(..),
GTFSTime(..),
PickupType(..),
DropoffType(..),
TimepointInfo(..)
) where
import Control.Applicative (empty)
import Data.Csv
import qualified Data.ByteString as BS
import qualified Data.Text as T
import Text.Read
import Data.List.Split (splitOn)
import RoutequeryService.GTFS.Types.TH
-- * Types
-- Common Enums
$(makeEnum "WheelchairBoarding"
["WheelchairUnknown", "WheelchairAvailable", "WheelchairNotAvailable"])
-- Agency
$(makeGTFSType "Agency" [
Optional "agency_id" ''T.Text,
Required "agency_name" ''T.Text,
Required "agency_url" ''T.Text,
Required "agency_timezone" ''T.Text,
Optional "agency_lang" ''T.Text,
Optional "agency_phone" ''T.Text,
Optional "agency_fare_url" ''T.Text])
-- Stop
$(makeEnum "LocationType" ["StopLocation", "StationLocation"])
$(makeGTFSType "Stop" [
Required "stop_id" ''T.Text,
Optional "stop_code" ''T.Text,
Required "stop_name" ''T.Text,
Optional "stop_desc" ''T.Text,
Required "stop_lat" ''Double,
Required "stop_lon" ''Double,
Optional "zone_id" ''T.Text,
Optional "stop_url" ''T.Text,
Optional "location_type" ''LocationType,
Optional "parent_station" ''T.Text,
Optional "stop_timezone" ''T.Text,
Optional "wheelchair_boarding" ''WheelchairBoarding])
-- Route
$(makeEnum "RouteType"
["LightRail", "Subway", "Rail", "Bus",
"Ferry", "CableCar", "Gondola", "Funicular"])
$(makeGTFSType "Route" [
Required "route_id" ''T.Text,
Optional "agency_id" ''T.Text,
Required "route_short_name" ''T.Text,
Required "route_long_name" ''T.Text,
Optional "route_desc" ''T.Text,
Required "route_type" ''RouteType,
Optional "route_url" ''T.Text,
Optional "route_color" ''T.Text,
Optional "route_text_color" ''T.Text ])
-- Trip
$(makeEnum "TripDirection" ["ThisDirection", "ThatDirection"])
$(makeEnum "BikesAllowed" ["BikesUnknown", "BikesYes", "BikesNo"])
$(makeGTFSType "Trip" [
Required "route_id" ''T.Text,
Required "service_id" ''T.Text,
Required "trip_id" ''T.Text,
Optional "trip_headsign" ''T.Text,
Optional "trip_short_name" ''T.Text,
Optional "direction_id" ''TripDirection,
Optional "block_id" ''T.Text,
Optional "shape_id" ''T.Text,
Optional "wheelchair_accessible" ''WheelchairBoarding,
Optional "bikes_allowed" ''BikesAllowed])
-- StopTime
data GTFSTime = GTFSTime {
hour :: Int,
minute :: Int,
second :: Int } deriving (Eq, Ord, Show)
instance FromField GTFSTime where
parseField f
| length splitF /= 3 = empty
| otherwise = case maybeTime of
Just t -> return t
Nothing -> empty
where
f' = map (toEnum . fromEnum) $ BS.unpack f :: [Char]
splitF = splitOn ":" f'
maybeTime :: Maybe GTFSTime
maybeTime = do
h <- readMaybe $ splitF !! 0
m <- readMaybe $ splitF !! 1
s <- readMaybe $ splitF !! 2
return $ GTFSTime h m s
$(makeEnum "PickupType" [
"RegularPickup",
"NoPickup",
"PhoneAgencyPickup",
"CoordinateWithDriverPickup"])
$(makeEnum "DropoffType" [
"RegularDropoff",
"NoDropoff",
"PhoneAgencyDropoff",
"CoordinateWithDriverDropoff"])
$(makeEnum "TimepointInfo" [
"ApproximateTimes",
"ExactTimes"])
$(makeGTFSType "StopTime" [
Required "trip_id" ''T.Text,
Optional "arrival_time" ''GTFSTime,
Required "departure_time" ''GTFSTime,
Required "stop_id" ''T.Text,
Required "stop_sequence" ''Int,
Optional "stop_headsign" ''T.Text,
Optional "pickup_type" ''PickupType,
Optional "dropoff_type" ''DropoffType,
Optional "shape_dist_traveled" ''Double,
Optional "timepoint" ''TimepointInfo])
| mdunsmuir/routequery-service | src/RoutequeryService/GTFS/Types.hs | gpl-3.0 | 4,695 | 0 | 12 | 903 | 1,173 | 624 | 549 | 117 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Estuary.Help.Crudo where
import Reflex
import Reflex.Dom
import Data.Text
import GHCJS.DOM.EventM
import Estuary.Widgets.Reflex
import Estuary.Widgets.Reflex
--render multiple sub-help files
crudoHelpFile :: MonadWidget t m => m ()
crudoHelpFile = divClass "languageHelpContainer" $ divClass "languageHelp" $ do
about
functionRef "trueno"
functionRef "río"
functionRef "cascada"
functionRef "volcán"
functionRef "rama"
functionRef "viento"
functionRef "eco"
functionRef "oscuridad"
functionRef "salvaje"
functionRef "este"
functionRef "oeste"
return ()
-- about
about :: MonadWidget t m => m ()
about = do
divClass "about primary-color code-font" $ text "Crudo"
divClass "about primary-color code-font" $ text "A mini live coding esolang developed in Bogotá, Colombia."
exampleText :: Text -> Text
exampleText "trueno" = " \"trueno\""
exampleText "río" = " \"río\""
exampleText "cascada" = " \"cascada\""
exampleText "volcán" = " \"volcán\""
exampleText "rama" = " \"rama\""
exampleText "viento" = " \"viento\""
exampleText "cueva" = " \"short\""
exampleText "eco" = " \"trueno\" eco 2"
exampleText "oscuridad" = " \"río\" oscuridad 2"
exampleText "salvaje" = " \"rama rama rama rama\" salvaje 3"
exampleText "este" = " \"viento\" este 8"
exampleText "oeste" = " \"cueva\" oeste 0.75"
referenceText :: Text -> Text
referenceText "trueno" = "returns Dirt's \"bd\" sample"
referenceText "río" = "returns Dirt's \"sn\" sample"
referenceText "cascada" = "returns Dirt's \"wind\" sample"
referenceText "volcán" = "returns Dirt's \"stomp\" sample"
referenceText "rama" = "returns Dirt's \"hh\" sample"
referenceText "viento" = "returns Dirt's \"wind\" sample"
referenceText "cueva" = "returns Dirt's \"short\" sample"
referenceText "eco" = "returns TidalCycles' palindrome"
referenceText "oscuridad" = "returns TidalCycles' slow"
referenceText "salvaje" = "returns TidalCycles' density"
referenceText "este" = "returns TidalCycles' fast"
referenceText "oeste" = "returns TidalCycles' trunc"
functionRef :: MonadWidget t m => Text -> m ()
functionRef x = divClass "helpWrapper" $ do
switchToReference <- buttonWithClass' x
exampleVisible <- toggle True switchToReference
referenceVisible <- toggle False switchToReference
hideableWidget exampleVisible "exampleText primary-color code-font" $ text (exampleText x)
hideableWidget referenceVisible "referenceText code-font" $ text (referenceText x)
return ()
| d0kt0r0/estuary | client/src/Estuary/Help/Crudo.hs | gpl-3.0 | 2,495 | 0 | 11 | 356 | 538 | 253 | 285 | 61 | 1 |
-- Copyright (C) 2013 Michael Zuser mikezuser@gmail.com
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
-- | Miscellaneous list functions
module KitchenSink.List
( -- * Combinatorial functions
hydra
, permutations
, choices
, nonEmptySubseqs
, sublists
, cartesianProduct
, partitions
-- * Miscellaneous utility
, nonempty
, filterByIndex
, filterWithIndex
, pinch
-- * Recursion schemes
-- ** Cascade
-- | A cascade is like a fold, but each step in the fold can produce more
-- elements that need to folded.
, cascadel, cascadel', cascadelM
, cascader, cascader', cascaderM
)where
import KitchenSink.Combinators
import Control.Arrow
import Control.Applicative
import Data.List hiding (permutations)
import Data.Foldable (Foldable)
import qualified Data.Foldable as F
----------------------------
-- Combinatorial functions
----------------------------
-- | A many 'head'ed beast. Produces lists with each element of the given
-- list moved to the front.
hydra :: [a] -> [[a]]
hydra [] = []
hydra (x:xs) = (x:xs) : [y:x:ys | (y:ys) <- hydra xs]
-- | Like Data.List.'Data.List.permutations', but produces the results in a
-- more canonical order.
permutations :: [a] -> [[a]]
permutations [] = [[]]
permutations xs = [y:zs | (y:ys) <- hydra xs, zs <- permutations ys]
-- | Produces a list of all possible choices of /n/ elements from a given list.
choices :: Int -> [a] -> [[a]]
choices n xs = n > length xs ? [] ?? go n xs
where
go 0 _ = [[]]
go _ [] = []
go n (x:xs) = map (x:) (go (n-1) xs) ++ go n xs
-- | Produces all non-empty subsequences of a given list.
-- Code from "Data.List", where it is mysteriously unexported.
nonEmptySubseqs :: [a] -> [[a]]
nonEmptySubseqs [] = []
nonEmptySubseqs (x:xs) = [x] : foldr f [] (nonEmptySubseqs xs)
where f ys r = ys : (x:ys) : r
-- | Produce all (non-empty) sublists of a given list.
sublists :: [a] -> [[a]]
sublists = concatMap (tail . inits) . tails
-- | Takes the Cartesian product of a list of lists.
cartesianProduct :: [[a]] -> [[a]]
cartesianProduct [] = [[]]
cartesianProduct (l:ls) = [x:xs | x <- l, xs <- cartesianProduct ls]
-- | Produce all partitions of a given list.
partitions :: [a] -> [([a],[a])]
partitions [] = [([], [])]
partitions (x:xs) = uncurry (++) . (consF &&& consS) $ partitions xs
where
consF = map $ first (x:)
consS = map $ second (x:)
--------------------------
-- Miscellaneous utility
--------------------------
-- | Just a nonempty list or Nothing.
nonempty :: [a] -> Maybe [a]
nonempty xs = null xs ? Just xs ?? Nothing
-- | Filter elements of a list based on their index.
filterByIndex :: (Int -> Bool) -> [a] -> [a]
filterByIndex = filterWithIndex <$> flip . const
-- | Filter elements of a list based on their index and value.
filterWithIndex :: (Int -> a -> Bool) -> [a] -> [a]
filterWithIndex f = map snd . filter (uncurry f) . zip [0..]
-- | 'pinch' is like 'zip' but hold onto the end of the longer list.
pinch :: [a] -> [b] -> ([(a,b)], Either [a] [b])
pinch xs [] = ([], Left xs)
pinch [] ys = ([], Right ys)
pinch (x:xs) (y:ys) = ((x,y):zs, rs)
where (zs, rs) = pinch xs ys
----------------------
-- Recursion schemes
----------------------
-- | cascade left
cascadel :: Foldable f => (a -> b -> (a, f b)) -> a -> f b -> a
cascadel f = F.foldl $ uncurry (cascadel f) <$$> f
-- | cascade left strictly
cascadel' :: Foldable f => (a -> b -> (a, f b)) -> a -> f b -> a
cascadel' f = F.foldl' $ uncurry (cascadel f) <$$> f
-- | cascade left monadicly
cascadelM :: (Foldable f, Monad m) => (a -> b -> m (a, f b)) -> a -> f b -> m a
cascadelM f = F.foldlM $ (>>= uncurry (cascadelM f)) <$$> f
-- | cascade right
cascader :: Foldable f => (b -> a -> (a, f b)) -> a -> f b -> a
cascader f = F.foldr $ uncurry (cascader f) <$$> f
-- | cascade right strictly
cascader' :: Foldable f => (b -> a -> (a, f b)) -> a -> f b -> a
cascader' f = F.foldr' $ uncurry (cascader f) <$$> f
-- | cascade right monadicly
cascaderM :: (Foldable f, Monad m) => (b -> a -> m (a, f b)) -> a -> f b -> m a
cascaderM f = F.foldrM $ (>>= uncurry (cascaderM f)) <$$> f
| bacchanalia/KitchenSink | KitchenSink/List.hs | gpl-3.0 | 4,786 | 0 | 12 | 1,021 | 1,553 | 857 | 696 | 69 | 3 |
module Utility.Rotate
( rotate
, shuffle
, choose
, swap
) where
import System.Random
rotate :: Int -> [a] -> [a]
rotate n xs
| n > ln || n < 0 = rotate (n `mod` ln) xs
| otherwise = drop n xs ++ take n xs
where ln = length xs
shuffle :: RandomGen g => [a] -> g -> ([a], g)
shuffle [] g = ([], g)
shuffle xs g = ((head xrs) : fst shf, snd shf)
where
shf = shuffle (tail xrs) (snd ng)
xrs = rotate (fst ng) xs
ng = randomR (0, (ln-1)) g
ln = length xs
choose :: RandomGen g => [a] -> g -> (a, g)
choose xs g = (xs !! fst ng, snd ng)
where
ng = randomR (0, (ln-1)) g
ln = length xs
swap :: [a] -> Int -> Int -> [a]
swap xs i j = zipWith swapper [0..] xs
where swapper index x
| index == i = xs !! j
| index == j = xs !! i
| otherwise = x
{-
rotate :: [a] -> Int -> [a]
rotate xs n
| n >= 0 = drop n xs ++ take n xs
| n <= 0= reverse (drop m (reverse xs) ++ take m (reverse xs))
where m = abs n
shuffle :: [a] -> [a]
shuffle [] = []
shuffle xs = (head xrs) : (shuffle $ tail xrs)
where
xrs = rotate xs $ fst $ randomR (0, (ln-1)) (mkStdGen 0)
ln = length xs
shuffle' :: RandomGen g => [a] -> g -> ([a], g)
shuffle' [] g = ([], g)
shuffle' xs g =
head xrs : fst shf, snd shf
[ xr
| xr <- rotate xs $ fst $ ng
]
where
shf = shuffle' tail xrs snd ng
ng = randomR (0, (ln-1)) g
swap xs i j
| i == j = xs
| j < i = swap xs j i
| otherwise = take i xs ++ [xs !! j] ++ (take (j - i - 1) $ drop (i + 1) xs) ++ [xs !! i] ++ drop (j + 1) xs
-} | xaphiriron/maps | Utility/Rotate.hs | gpl-3.0 | 1,518 | 7 | 11 | 451 | 454 | 241 | 213 | 28 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.FirebaseDynamicLinks
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Firebase Dynamic Links API enables third party developers to
-- programmatically create and manage Dynamic Links.
--
-- /See:/ <https://firebase.google.com/docs/dynamic-links/ Firebase Dynamic Links API Reference>
module Network.Google.FirebaseDynamicLinks
(
-- * Service Configuration
firebaseDynamicLinksService
-- * OAuth Scopes
, firebaseScope
-- * API Declaration
, FirebaseDynamicLinksAPI
-- * Resources
-- ** firebasedynamiclinks.shortLinks.create
, module Network.Google.Resource.FirebaseDynamicLinks.ShortLinks.Create
-- * Types
-- ** Suffix
, Suffix
, suffix
, sOption
-- ** DynamicLinkWarning
, DynamicLinkWarning
, dynamicLinkWarning
, dlwWarningCode
, dlwWarningMessage
-- ** CreateShortDynamicLinkRequest
, CreateShortDynamicLinkRequest
, createShortDynamicLinkRequest
, csdlrLongDynamicLink
, csdlrSuffix
, csdlrDynamicLinkInfo
-- ** SocialMetaTagInfo
, SocialMetaTagInfo
, socialMetaTagInfo
, smtiSocialImageLink
, smtiSocialDescription
, smtiSocialTitle
-- ** CreateShortDynamicLinkResponse
, CreateShortDynamicLinkResponse
, createShortDynamicLinkResponse
, csdlrPreviewLink
, csdlrWarning
, csdlrShortLink
-- ** IosInfo
, IosInfo
, iosInfo
, iiIosBundleId
, iiIosIPadBundleId
, iiIosAppStoreId
, iiIosIPadFallbackLink
, iiIosCustomScheme
, iiIosFallbackLink
-- ** DynamicLinkInfo
, DynamicLinkInfo
, dynamicLinkInfo
, dliSocialMetaTagInfo
, dliDynamicLinkDomain
, dliLink
, dliIosInfo
, dliAndroidInfo
, dliAnalyticsInfo
-- ** SuffixOption
, SuffixOption (..)
-- ** Xgafv
, Xgafv (..)
-- ** AndroidInfo
, AndroidInfo
, androidInfo
, aiAndroidMinPackageVersionCode
, aiAndroidFallbackLink
, aiAndroidLink
, aiAndroidPackageName
-- ** DynamicLinkWarningWarningCode
, DynamicLinkWarningWarningCode (..)
-- ** AnalyticsInfo
, AnalyticsInfo
, analyticsInfo
, aiItunesConnectAnalytics
, aiGooglePlayAnalytics
-- ** ITunesConnectAnalytics
, ITunesConnectAnalytics
, iTunesConnectAnalytics
, itcaAt
, itcaMt
, itcaPt
, itcaCt
-- ** GooglePlayAnalytics
, GooglePlayAnalytics
, googlePlayAnalytics
, gpaUtmContent
, gpaUtmMedium
, gpaUtmTerm
, gpaUtmCampaign
, gpaGclid
, gpaUtmSource
) where
import Network.Google.FirebaseDynamicLinks.Types
import Network.Google.Prelude
import Network.Google.Resource.FirebaseDynamicLinks.ShortLinks.Create
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Firebase Dynamic Links API service.
type FirebaseDynamicLinksAPI =
ShortLinksCreateResource
| rueshyna/gogol | gogol-firebase-dynamiclinks/gen/Network/Google/FirebaseDynamicLinks.hs | mpl-2.0 | 3,357 | 0 | 5 | 779 | 302 | 218 | 84 | 81 | 0 |
{-# LANGUAGE GADTs #-}
module Gonimo.Database.Effects.Servant where
-- Little helpers integrating db functions with servant:
import Control.Exception (SomeException)
import Control.Monad ((<=<))
import Control.Monad.Freer
import Control.Monad.Freer.Exception (Exc (..))
import Database.Persist (Entity, Key, Unique)
import Gonimo.Database.Effects
import Gonimo.Server.Error
import Servant.Server
get404 :: FullDbConstraint backend a r => Key a -> Eff r a
get404 = servantErrOnNothing err404 <=< get
getBy404 :: FullDbConstraint backend a r => Unique a -> Eff r (Entity a)
getBy404 = servantErrOnNothing err404 <=< getBy
servantErrOnNothing :: (Member (Exc SomeException) r) => ServantErr -> Maybe a -> Eff r a
servantErrOnNothing err Nothing = throwServant err
servantErrOnNothing _ (Just v) = return v
| charringer/gonimo-back | src/Gonimo/Database/Effects/Servant.hs | agpl-3.0 | 929 | 0 | 9 | 231 | 243 | 133 | 110 | -1 | -1 |
module ViperVM.Runtime.Scheduler where
import ViperVM.Platform.Platform
import ViperVM.VirtualPlatform.Task
import Control.Applicative ((<$>))
import Control.Concurrent.STM
data Scheduler = Scheduler {
platform :: Platform,
submit :: Task -> IO TaskEvent
}
newtype TaskEvent = TaskEvent (TVar Bool)
initEvent :: IO TaskEvent
initEvent = TaskEvent <$> newTVarIO False
setEvent :: TaskEvent -> IO ()
setEvent (TaskEvent ev) = atomically (writeTVar ev True)
| hsyl20/HViperVM | lib/ViperVM/Runtime/Scheduler.hs | lgpl-3.0 | 467 | 0 | 10 | 69 | 138 | 78 | 60 | 13 | 1 |
--
-- Minio Haskell SDK, (C) 2017 Minio, Inc.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import qualified Data.List as L
import Lib.Prelude
import Network.Minio.API.Test
import Network.Minio.PutObject
import Network.Minio.Utils.Test
import Network.Minio.XmlGenerator.Test
import Network.Minio.XmlParser.Test
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties, unitTests]
properties :: TestTree
properties = testGroup "Properties" [qcProps] -- [scProps]
-- scProps = testGroup "(checked by SmallCheck)"
-- [ SC.testProperty "sort == sort . reverse" $
-- \list -> sort (list :: [Int]) == sort (reverse list)
-- , SC.testProperty "Fermat's little theorem" $
-- \x -> ((x :: Integer)^7 - x) `mod` 7 == 0
-- -- the following property does not hold
-- , SC.testProperty "Fermat's last theorem" $
-- \x y z n ->
-- (n :: Integer) >= 3 SC.==> x^n + y^n /= (z^n :: Integer)
-- ]
qcProps :: TestTree
qcProps = testGroup "(checked by QuickCheck)"
[ QC.testProperty "selectPartSizes:" $
\n -> let (pns, offs, sizes) = L.unzip3 (selectPartSizes n)
-- check that pns increments from 1.
isPNumsAscendingFrom1 = all (\(a, b) -> a == b) $ zip pns [1..]
consPairs [] = []
consPairs [_] = []
consPairs (a:(b:c)) = (a, b):(consPairs (b:c))
-- check `offs` is monotonically increasing.
isOffsetsAsc = all (\(a, b) -> a < b) $ consPairs offs
-- check sizes sums to n.
isSumSizeOk = sum sizes == n
-- check sizes are constant except last
isSizesConstantExceptLast =
all (\(a, b) -> a == b) (consPairs $ L.init sizes)
-- check each part except last is at least minPartSize;
-- last part may be 0 only if it is the only part.
nparts = length sizes
isMinPartSizeOk =
if | nparts > 1 -> -- last part can be smaller but > 0
all (>= minPartSize) (take (nparts - 1) sizes) &&
all (\s -> s > 0) (drop (nparts - 1) sizes)
| nparts == 1 -> -- size may be 0 here.
maybe True (\x -> x >= 0 && x <= minPartSize) $
headMay sizes
| otherwise -> False
in n < 0 ||
(isPNumsAscendingFrom1 && isOffsetsAsc && isSumSizeOk &&
isSizesConstantExceptLast && isMinPartSizeOk)
, QC.testProperty "selectCopyRanges:" $
\(start, end) ->
let (_, pairs) = L.unzip (selectCopyRanges (start, end))
-- is last part's snd offset end?
isLastPartOk = maybe False ((end ==) . snd) $ lastMay pairs
-- is first part's fst offset start
isFirstPartOk = maybe False ((start ==) . fst) $ headMay pairs
-- each pair is >=64MiB except last, and all those parts
-- have same size.
initSizes = maybe [] (map (\(a, b) -> b - a + 1)) $ initMay pairs
isPartSizesOk = all (>= minPartSize) initSizes &&
maybe True (\k -> all (== k) initSizes)
(headMay initSizes)
-- returned offsets are contiguous.
fsts = drop 1 $ map fst pairs
snds = take (length pairs - 1) $ map snd pairs
isContParts = length fsts == length snds &&
and (map (\(a, b) -> a == b + 1) $ zip fsts snds)
in start < 0 || start > end ||
(isLastPartOk && isFirstPartOk && isPartSizesOk && isContParts)
]
unitTests :: TestTree
unitTests = testGroup "Unit tests" [xmlGeneratorTests, xmlParserTests,
bucketNameValidityTests,
objectNameValidityTests,
limitedMapConcurrentlyTests]
| donatello/minio-hs | test/Spec.hs | apache-2.0 | 4,560 | 0 | 21 | 1,508 | 954 | 532 | 422 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE ViewPatterns #-}
import Control.Monad
import Control.Monad.IO.Class
import Data.IORef
import Data.List (sort)
import Data.Monoid
import Data.Sequence ((|>))
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import Data.UUID (UUID)
import System.IO.Unsafe
import qualified System.Log.Logger as Logger
import System.Log.Logger (Priority(DEBUG),rootLoggerName,setLevel,updateGlobalLogger)
import System.Log.Logger.TH
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Path,Test)
import Test.QuickCheck.Arbitrary hiding ((><))
import Test.QuickCheck.Gen hiding (shuffle)
import Test.QuickCheck.Instances ()
import Test.QuickCheck.Modifiers
import LogicGrowsOnTrees.Checkpoint
import LogicGrowsOnTrees.Parallel.ExplorationMode
import LogicGrowsOnTrees.Path
import LogicGrowsOnTrees.Parallel.Common.Supervisor
import LogicGrowsOnTrees.Parallel.Common.Worker
import LogicGrowsOnTrees.Testing
import LogicGrowsOnTrees.Workload
deriveLoggers "Logger" [DEBUG]
main :: IO ()
main = do
-- updateGlobalLogger rootLoggerName (setLevel DEBUG)
defaultMain [tests]
tests :: Test
tests = testGroup "LogicGrowsOnTrees.Parallel.Common.Supervisor"
[testCase "immediately abort" $ do
SupervisorOutcome{..} ← runSupervisor AllMode bad_test_supervisor_actions (TestProgram abortSupervisor)
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= ([] :: [Int])
,testCase "failure" $ do
SupervisorOutcome{..} ← runTestSupervisor AllMode bad_test_supervisor_actions (receiveWorkerFailure () "FAIL" :: ∀ α. SupervisorMonad (AllMode ()) () IO α)
supervisorTerminationReason @?= SupervisorFailure mempty () "FAIL"
supervisorRemainingWorkers @?= []
,testGroup "adding and removing workers"
[testGroup "without workload buffer"
[testCase "add one worker then abort" $ do
(maybe_workload_ref,actions) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
SupervisorOutcome{..} ← runTestSupervisor AllMode actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker ()
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [()]
readIORef maybe_workload_ref >>= (@?= Just ((),entire_workload))
,testCase "add then remove one worker then abort" $ do
(maybe_workload_ref,actions) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
SupervisorOutcome{..} ← runTestSupervisor AllMode actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker ()
removeWorker ()
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= []
readIORef maybe_workload_ref >>= (@?= Just ((),entire_workload))
,testCase "add then remove then add one worker then abort" $ do
(maybe_workload_ref,actions) ← addAcceptMultipleWorkloadsAction bad_test_supervisor_actions
SupervisorOutcome{..} ← runTestSupervisor AllMode actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker 1
removeWorker 1
addWorker 2
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [2::Int]
readIORef maybe_workload_ref >>= (@?= [(1,entire_workload),(2,entire_workload)])
,testCase "add two workers then remove first worker then abort" $ do
(maybe_workload_ref,actions1) ← addAcceptMultipleWorkloadsAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions2) ← addAppendWorkloadStealBroadcastIdsAction actions1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions2 $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker 1
addWorker 2
removeWorker 1
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [2::Int]
readIORef maybe_workload_ref >>= (@?= [(1,entire_workload),(2,entire_workload)])
readIORef broadcast_ids_list_ref >>= (@?= [[1]])
,testProperty "add then remove many workers then abort" $ do
(NonEmpty worker_ids_to_add :: NonEmptyList UUID) ← arbitrary
worker_ids_to_remove ←
(fmap concat
$
forM (tail worker_ids_to_add)
$
\worker_id → do
should_remove ← arbitrary
if should_remove
then return [worker_id]
else return []
) >>= shuffle
let worker_ids_left = Set.toAscList $ Set.fromList worker_ids_to_add `Set.difference` Set.fromList worker_ids_to_remove
return . unsafePerformIO $ do
(maybe_workload_ref,actions_1) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions_2) ← addAppendWorkloadStealBroadcastIdsAction actions_1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions_2 $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
mapM_ addWorker worker_ids_to_add
mapM_ removeWorker worker_ids_to_remove
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
sort supervisorRemainingWorkers @?= worker_ids_left
readIORef maybe_workload_ref >>= (@?= Just (head worker_ids_to_add,entire_workload))
readIORef broadcast_ids_list_ref >>= (@?= if (null . tail) worker_ids_to_add then [] else [[head worker_ids_to_add]])
return True
]
,testGroup "with workload buffer"
[testCase "add one worker then abort" $ do
(maybe_workload_ref,actions1) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
(broadcasts_ref,actions2) ← addAppendWorkloadStealBroadcastIdsAction actions1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions2 $ do
enableSupervisorDebugMode
addWorker ()
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [()]
readIORef maybe_workload_ref >>= (@?= Just ((),entire_workload))
readIORef broadcasts_ref >>= (@?= [[()]])
,testCase "add then remove one worker then abort" $ do
(maybe_workload_ref,actions1) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
(broadcasts_ref,actions2) ← addAppendWorkloadStealBroadcastIdsAction actions1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions2 $ do
enableSupervisorDebugMode
addWorker ()
removeWorker ()
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= []
readIORef maybe_workload_ref >>= (@?= Just ((),entire_workload))
readIORef broadcasts_ref >>= (@?= [[()]])
,testCase "add then remove then add one worker then abort" $ do
(maybe_workload_ref,actions1) ← addAcceptMultipleWorkloadsAction bad_test_supervisor_actions
(broadcasts_ref,actions2) ← addAppendWorkloadStealBroadcastIdsAction actions1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions2 $ do
enableSupervisorDebugMode
addWorker 1
removeWorker 1
addWorker 2
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [2::Int]
readIORef maybe_workload_ref >>= (@?= [(1,entire_workload),(2,entire_workload)])
readIORef broadcasts_ref >>= (@?= [[1],[2]])
]
]
,testGroup "progress updates"
[testCase "request progress update when no workers present" $ do
(maybe_progress_ref,actions) ← addReceiveCurrentProgressAction bad_test_supervisor_actions
SupervisorOutcome{..} ← runTestSupervisor AllMode actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
performGlobalProgressUpdate
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= ([] :: [()])
readIORef maybe_progress_ref >>= (@?= Just (Progress Unexplored ()))
,testProperty "request progress update when all active workers present leave" $ do
number_of_active_workers ← choose (1,10 :: Int)
number_of_inactive_workers ← choose (0,10)
let active_workers = [0..number_of_active_workers-1]
inactive_workers = [101..101+number_of_inactive_workers-1]
return . unsafePerformIO $ do
(maybe_progress_ref,actions1) ← addReceiveCurrentProgressAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions2) ← addAppendProgressBroadcastIdsAction actions1
(workload_steal_ids_ref,actions3) ← addSetWorkloadStealBroadcastIdsAction actions2
let actions4 = ignoreAcceptWorkloadAction $ actions3
let progress = Progress Unexplored (Sum (0::Int))
SupervisorOutcome{..} ← runTestSupervisor AllMode actions4 $ do
setWorkloadBufferSize 0
addWorker 0
forM_ (zip [0..] (tail active_workers)) $ \(prefix_count,worker_id) → do
addWorker worker_id
[worker_to_steal_from] ← liftIO $ readIORef workload_steal_ids_ref
let remaining_workload = Workload (Seq.replicate (prefix_count+1) (ChoiceStep LeftBranch)) Unexplored
let stolen_workload = Workload (Seq.replicate (prefix_count) (ChoiceStep LeftBranch) |> (ChoiceStep RightBranch)) Unexplored
receiveStolenWorkload worker_to_steal_from $ Just (StolenWorkload (ProgressUpdate mempty remaining_workload) stolen_workload)
mapM_ addWorker inactive_workers
performGlobalProgressUpdate
mapM_ removeWorker active_workers
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted progress
supervisorRemainingWorkers @?= inactive_workers
readIORef broadcast_ids_list_ref >>= (@?= [active_workers])
readIORef maybe_progress_ref >>= (@?= Just progress)
return True
,testCase "request and receive Just progress update when one worker present" $ do
(maybe_progress_ref,actions1) ← addReceiveCurrentProgressAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions2) ← addAppendProgressBroadcastIdsAction actions1
let actions3 = ignoreAcceptWorkloadAction actions2
let progress = Progress (ChoicePoint Unexplored Unexplored) (Sum (1::Int))
SupervisorOutcome{..} ← runTestSupervisor AllMode actions3 $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker ()
performGlobalProgressUpdate
receiveProgressUpdate () $ ProgressUpdate progress entire_workload
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted progress
supervisorRemainingWorkers @?= [()]
readIORef maybe_progress_ref >>= (@?= Just progress)
readIORef broadcast_ids_list_ref >>= (@?= [[()]])
,testCase "request and receive progress update when active and inactive workers present" $ do
(maybe_progress_ref,actions1) ← addReceiveCurrentProgressAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions2) ← addAppendProgressBroadcastIdsAction actions1
let actions3 = ignoreAcceptWorkloadAction . ignoreWorkloadStealAction $ actions2
let progress = Progress (ChoicePoint Unexplored Unexplored) (Sum (1::Int))
SupervisorOutcome{..} ← runTestSupervisor AllMode actions3 $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker (1 :: Int)
addWorker (2 :: Int)
performGlobalProgressUpdate
receiveProgressUpdate 1 $ ProgressUpdate progress entire_workload
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted progress
supervisorRemainingWorkers @?= [1,2]
readIORef maybe_progress_ref >>= (@?= Just progress)
readIORef broadcast_ids_list_ref >>= (@?= [[1]])
]
,testGroup "workload steals"
[testCase "failure to steal from a worker leads to second attempt" $ do
(broadcast_ids_list_ref,actions1) ← addAppendWorkloadStealBroadcastIdsAction bad_test_supervisor_actions
let actions2 = ignoreAcceptWorkloadAction actions1
SupervisorOutcome{..} ← runTestSupervisor AllMode actions2 $ do
addWorker (1::Int)
addWorker 2
receiveStolenWorkload 1 Nothing
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted (Progress Unexplored ())
supervisorRemainingWorkers @?= [1,2]
readIORef broadcast_ids_list_ref >>= (@?= [[1],[1]])
]
,testCase "starting from previous checkpoint" $ do
(maybe_workload_ref,actions1) ← addAcceptOneWorkloadAction bad_test_supervisor_actions
(broadcast_ids_list_ref,actions2) ← addAppendWorkloadStealBroadcastIdsAction actions1
let checkpoint = ChoicePoint Unexplored Unexplored
progress = Progress checkpoint (Sum (1::Int))
SupervisorOutcome{..} ← runTestSupervisorStartingFrom AllMode progress actions2 $ do
addWorker ()
abortSupervisor
supervisorTerminationReason @?= SupervisorAborted progress
supervisorRemainingWorkers @?= [()]
readIORef maybe_workload_ref >>= (@?= Just ((),(Workload Seq.empty checkpoint)))
readIORef broadcast_ids_list_ref >>= (@?= [[()]])
,testGroup "FirstMode" $
[testGroup "single worker"
[testCase "finishes with Explored" $ do
SupervisorOutcome{..} ← runTestSupervisor FirstMode ignore_supervisor_actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker ()
receiveWorkerFinished () (Progress Explored Nothing)
error "Supervisor did not terminate"
supervisorTerminationReason @?= SupervisorCompleted (Nothing :: Maybe (Progress ()))
,testCase "finishes with result" $ do
SupervisorOutcome{..} ← runTestSupervisor FirstMode ignore_supervisor_actions $ do
enableSupervisorDebugMode
setWorkloadBufferSize 0
addWorker ()
receiveWorkerFinished () (Progress Explored (Just ()))
error "Supervisor did not terminate"
supervisorTerminationReason @?= SupervisorCompleted (Just (Progress Explored ()))
]
,testGroup "two workers"
[testCase "both finish with Explored" $ do
SupervisorOutcome{..} ← runTestSupervisor FirstMode ignore_supervisor_actions $ do
enableSupervisorDebugMode
addWorker True
addWorker False
receiveStolenWorkload True . Just $
StolenWorkload
(ProgressUpdate
Unexplored
(Workload
(Seq.singleton $ ChoiceStep LeftBranch)
Unexplored
)
)
(Workload
(Seq.singleton $ ChoiceStep RightBranch)
Unexplored
)
receiveWorkerFinished True (Progress (ChoicePoint Explored Unexplored) Nothing)
receiveWorkerFinished False (Progress (ChoicePoint Unexplored Explored) Nothing)
error "Supervisor did not terminate"
supervisorTerminationReason @?= SupervisorCompleted (Nothing :: Maybe (Progress ()))
,testCase "both finish with result" $ do
SupervisorOutcome{..} ← runTestSupervisor FirstMode ignore_supervisor_actions $ do
enableSupervisorDebugMode
addWorker True
addWorker False
receiveStolenWorkload True . Just $
StolenWorkload
(ProgressUpdate
Unexplored
(Workload
(Seq.singleton $ ChoiceStep LeftBranch)
Unexplored
)
)
(Workload
(Seq.singleton $ ChoiceStep RightBranch)
Unexplored
)
receiveWorkerFinished False (Progress (ChoicePoint Explored Unexplored) (Just False))
receiveWorkerFinished True (Progress (ChoicePoint Unexplored Explored) (Just True))
error "Supervisor did not terminate"
supervisorTerminationReason @?= SupervisorCompleted (Just (Progress (ChoicePoint Explored Unexplored) False))
]
]
,testCase "worker count listener" $ do
count_1_ref ← newIORef (-1)
count_2_ref ← newIORef (-1)
_ ← runTestSupervisor AllMode ignore_supervisor_actions $ (do
addWorkerCountListener $ writeIORef count_1_ref
liftIO $ readIORef count_1_ref >>= (@?= 0)
addWorker (0::Int)
liftIO $ readIORef count_1_ref >>= (@?= 1)
addWorkerCountListener $ writeIORef count_2_ref
liftIO $ readIORef count_2_ref >>= (@?= 1)
addWorker 1
liftIO $ readIORef count_1_ref >>= (@?= 2)
liftIO $ readIORef count_2_ref >>= (@?= 2)
removeWorker 0
liftIO $ readIORef count_1_ref >>= (@?= 1)
liftIO $ readIORef count_2_ref >>= (@?= 1)
addWorker 2
liftIO $ readIORef count_1_ref >>= (@?= 2)
liftIO $ readIORef count_2_ref >>= (@?= 2)
receiveStolenWorkload 1 . Just $
StolenWorkload
(ProgressUpdate
(Progress (ChoicePoint Unexplored Unexplored) ())
(Workload (Seq.fromList [ChoiceStep LeftBranch]) Unexplored)
)
(Workload (Seq.fromList [ChoiceStep RightBranch]) Unexplored)
receiveWorkerFinishedAndRemoved 1 (Progress (ChoicePoint Explored Unexplored) ())
liftIO $ readIORef count_1_ref >>= (@?= 1)
liftIO $ readIORef count_2_ref >>= (@?= 1)
abortSupervisor
:: ∀ α. SupervisorMonad (AllMode ()) Int IO α)
readIORef count_1_ref >>= (@?= 0)
readIORef count_2_ref >>= (@?= 0)
return ()
]
| gcross/LogicGrowsOnTrees | LogicGrowsOnTrees/tests/test-Supervisor.hs | bsd-2-clause | 21,352 | 0 | 31 | 6,902 | 4,404 | 2,139 | 2,265 | 357 | 3 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QSizePolicy.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:20
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QSizePolicy (
QqSizePolicy(..)
,QqSizePolicy_nf(..)
,controlType
,horizontalPolicy
,horizontalStretch
,setControlType
,setHeightForWidth
,setHorizontalPolicy
,setVerticalPolicy
,verticalPolicy
,verticalStretch
,qSizePolicy_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Enums.Gui.QSizePolicy
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqSizePolicy x1 where
qSizePolicy :: x1 -> IO (QSizePolicy ())
instance QqSizePolicy (()) where
qSizePolicy ()
= withQSizePolicyResult $
qtc_QSizePolicy
foreign import ccall "qtc_QSizePolicy" qtc_QSizePolicy :: IO (Ptr (TQSizePolicy ()))
instance QqSizePolicy ((QSizePolicy t1)) where
qSizePolicy (x1)
= withQSizePolicyResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSizePolicy1 cobj_x1
foreign import ccall "qtc_QSizePolicy1" qtc_QSizePolicy1 :: Ptr (TQSizePolicy t1) -> IO (Ptr (TQSizePolicy ()))
instance QqSizePolicy ((Policy, Policy)) where
qSizePolicy (x1, x2)
= withQSizePolicyResult $
qtc_QSizePolicy2 (toCLong $ qEnum_toInt x1) (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QSizePolicy2" qtc_QSizePolicy2 :: CLong -> CLong -> IO (Ptr (TQSizePolicy ()))
instance QqSizePolicy ((Policy, Policy, ControlType)) where
qSizePolicy (x1, x2, x3)
= withQSizePolicyResult $
qtc_QSizePolicy3 (toCLong $ qEnum_toInt x1) (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QSizePolicy3" qtc_QSizePolicy3 :: CLong -> CLong -> CLong -> IO (Ptr (TQSizePolicy ()))
class QqSizePolicy_nf x1 where
qSizePolicy_nf :: x1 -> IO (QSizePolicy ())
instance QqSizePolicy_nf (()) where
qSizePolicy_nf ()
= withObjectRefResult $
qtc_QSizePolicy
instance QqSizePolicy_nf ((QSizePolicy t1)) where
qSizePolicy_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSizePolicy1 cobj_x1
instance QqSizePolicy_nf ((Policy, Policy)) where
qSizePolicy_nf (x1, x2)
= withObjectRefResult $
qtc_QSizePolicy2 (toCLong $ qEnum_toInt x1) (toCLong $ qEnum_toInt x2)
instance QqSizePolicy_nf ((Policy, Policy, ControlType)) where
qSizePolicy_nf (x1, x2, x3)
= withObjectRefResult $
qtc_QSizePolicy3 (toCLong $ qEnum_toInt x1) (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
controlType :: QSizePolicy a -> (()) -> IO (ControlType)
controlType x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_controlType cobj_x0
foreign import ccall "qtc_QSizePolicy_controlType" qtc_QSizePolicy_controlType :: Ptr (TQSizePolicy a) -> IO CLong
instance QexpandingDirections (QSizePolicy a) (()) where
expandingDirections x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_expandingDirections cobj_x0
foreign import ccall "qtc_QSizePolicy_expandingDirections" qtc_QSizePolicy_expandingDirections :: Ptr (TQSizePolicy a) -> IO CLong
instance QhasHeightForWidth (QSizePolicy a) (()) where
hasHeightForWidth x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_hasHeightForWidth cobj_x0
foreign import ccall "qtc_QSizePolicy_hasHeightForWidth" qtc_QSizePolicy_hasHeightForWidth :: Ptr (TQSizePolicy a) -> IO CBool
horizontalPolicy :: QSizePolicy a -> (()) -> IO (Policy)
horizontalPolicy x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_horizontalPolicy cobj_x0
foreign import ccall "qtc_QSizePolicy_horizontalPolicy" qtc_QSizePolicy_horizontalPolicy :: Ptr (TQSizePolicy a) -> IO CLong
horizontalStretch :: QSizePolicy a -> (()) -> IO (Int)
horizontalStretch x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_horizontalStretch cobj_x0
foreign import ccall "qtc_QSizePolicy_horizontalStretch" qtc_QSizePolicy_horizontalStretch :: Ptr (TQSizePolicy a) -> IO CInt
setControlType :: QSizePolicy a -> ((ControlType)) -> IO ()
setControlType x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_setControlType cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QSizePolicy_setControlType" qtc_QSizePolicy_setControlType :: Ptr (TQSizePolicy a) -> CLong -> IO ()
setHeightForWidth :: QSizePolicy a -> ((Bool)) -> IO ()
setHeightForWidth x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_setHeightForWidth cobj_x0 (toCBool x1)
foreign import ccall "qtc_QSizePolicy_setHeightForWidth" qtc_QSizePolicy_setHeightForWidth :: Ptr (TQSizePolicy a) -> CBool -> IO ()
setHorizontalPolicy :: QSizePolicy a -> ((Policy)) -> IO ()
setHorizontalPolicy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_setHorizontalPolicy cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QSizePolicy_setHorizontalPolicy" qtc_QSizePolicy_setHorizontalPolicy :: Ptr (TQSizePolicy a) -> CLong -> IO ()
setVerticalPolicy :: QSizePolicy a -> ((Policy)) -> IO ()
setVerticalPolicy x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_setVerticalPolicy cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QSizePolicy_setVerticalPolicy" qtc_QSizePolicy_setVerticalPolicy :: Ptr (TQSizePolicy a) -> CLong -> IO ()
instance Qqtranspose (QSizePolicy a) (()) where
qtranspose x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_transpose cobj_x0
foreign import ccall "qtc_QSizePolicy_transpose" qtc_QSizePolicy_transpose :: Ptr (TQSizePolicy a) -> IO ()
verticalPolicy :: QSizePolicy a -> (()) -> IO (Policy)
verticalPolicy x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_verticalPolicy cobj_x0
foreign import ccall "qtc_QSizePolicy_verticalPolicy" qtc_QSizePolicy_verticalPolicy :: Ptr (TQSizePolicy a) -> IO CLong
verticalStretch :: QSizePolicy a -> (()) -> IO (Int)
verticalStretch x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_verticalStretch cobj_x0
foreign import ccall "qtc_QSizePolicy_verticalStretch" qtc_QSizePolicy_verticalStretch :: Ptr (TQSizePolicy a) -> IO CInt
qSizePolicy_delete :: QSizePolicy a -> IO ()
qSizePolicy_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSizePolicy_delete cobj_x0
foreign import ccall "qtc_QSizePolicy_delete" qtc_QSizePolicy_delete :: Ptr (TQSizePolicy a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QSizePolicy.hs | bsd-2-clause | 6,800 | 0 | 14 | 1,025 | 1,900 | 988 | 912 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
import System.Console.CmdArgs
import Control.Applicative ((<$>))
import Control.Monad (forM_)
import Data.List (intercalate)
import qualified Data.Text as T
import qualified Data.DAWG.Static as D
import qualified NLP.LexemeClustering as LC
---------------------------------------
-- Command line options
---------------------------------------
-- | Clustering options.
data Cluster = Cluster
{ inputPath :: FilePath
, freqMin :: Double
, nMax :: Int
, eps :: Bool
, normMut :: Bool
, kappa :: Double }
deriving (Data, Typeable, Show)
cluster :: Cluster
cluster = Cluster
{ inputPath = def &= argPos 0 &= typ "INPUT-FILE"
, freqMin = 0.001 &= help "N-gram frequency threshold"
, nMax = 8 &= help "Maximum n-gram length taken on account"
, eps = False &= help "Add epsilon to suffix set"
, normMut = False &= help "Normalize mutual information"
, kappa = 0.01 &= help "Kappa parameter" }
&= summary "Grouping morphologically related words"
&= program "lexeme-clustering"
---------------------------------------
-- Main
---------------------------------------
main :: IO ()
main = exec =<< cmdArgs cluster
exec :: Cluster -> IO ()
exec Cluster{..} = do
putStrLn "# Collecting words"
langDAWG <- D.weigh . D.fromLang . map T.unpack <$> LC.readWords inputPath
putStr "# Number of language DAWG states: " >> print (D.numStates langDAWG)
putStrLn "# Collecting suffixes"
let ngCfg = LC.NGramConf
{ LC.freqMin = freqMin
, LC.nMax = nMax
, LC.eps = eps }
sufDAWG = D.weigh $ D.fromLang
$ map (T.unpack . fst)
$ LC.ngrams ngCfg langDAWG
putStr "# Number of suffix DAWG states: " >> print (D.numStates sufDAWG)
putStrLn "# Suffix partitioning"
let sufDist = LC.mkSufDist langDAWG sufDAWG
cmCfg = LC.CMEnv
{ LC.baseDist = sufDist
, LC.normMut = normMut
, LC.kappa = kappa }
parMap <- LC.runCM cmCfg $ LC.partitionMap sufDAWG
putStrLn "# Clustering"
forM_ (LC.cluster langDAWG sufDAWG parMap) $ \xs -> do
putStrLn $ intercalate ", " xs
| kawu/lexeme-clustering | tools/lexeme-clustering.hs | bsd-2-clause | 2,374 | 0 | 15 | 650 | 580 | 309 | 271 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Common
import Web.Twitter.Conduit
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Map as M
import System.Environment
main :: IO ()
main = do
[screenName] <- getArgs
twInfo <- getTWInfoFromEnv
mgr <- newManager tlsManagerSettings
let sn = ScreenNameParam screenName
folids <- runConduit $ sourceWithCursor twInfo mgr (followersIds sn) .| CL.consume
friids <- runConduit $ sourceWithCursor twInfo mgr (friendsIds sn) .| CL.consume
let folmap = M.fromList $ map (flip (,) True) folids
os = filter (\uid -> M.notMember uid folmap) friids
bo = filter (\usr -> M.member usr folmap) friids
putStrLn "one sided:"
print os
putStrLn "both following:"
print bo
| himura/twitter-conduit | sample/oslist.hs | bsd-2-clause | 813 | 0 | 14 | 175 | 260 | 131 | 129 | 23 | 1 |
{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards, RecursiveDo #-}
module Distribution.Server.Features.Core (
CoreFeature(..),
CoreResource(..),
initCoreFeature,
-- * Change events
PackageChange(..),
isPackageChangeAny,
isPackageAdd,
isPackageDelete,
isPackageIndexChange,
-- * Misc other utils
packageExists,
packageIdExists,
) where
import Distribution.Server.Framework
import Distribution.Server.Features.Core.State
import Distribution.Server.Features.Core.Backup
import Distribution.Server.Features.Users
import Distribution.Server.Packages.Types
import Distribution.Server.Users.Types (UserId)
import Distribution.Server.Users.Users (userIdToName)
import qualified Distribution.Server.Packages.Index as Packages.Index
import qualified Codec.Compression.GZip as GZip
import Data.Digest.Pure.MD5 (md5)
import qualified Distribution.Server.Framework.ResponseContentTypes as Resource
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Server.Packages.PackageIndex (PackageIndex)
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (formatTime)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as BS
import Distribution.Text (display)
import Distribution.Package
import Distribution.Version (Version(..))
import Data.Aeson (Value(..))
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Vector as Vec
import qualified Data.Text as Text
-- | The core feature, responsible for the main package index and all access
-- and modifications of it.
--
-- All packages must have a Cabal file, uploader, and upload time, and may have
-- a source tarball.
data CoreFeature = CoreFeature {
-- | The core `HackageFeature`.
coreFeatureInterface :: HackageFeature,
-- | Core package resources and combinators.
coreResource :: CoreResource,
-- Queries
-- | Retrieves the entire main package index.
queryGetPackageIndex :: forall m. MonadIO m => m (PackageIndex PkgInfo),
-- Update transactions
-- | Adds a version of a package which did not previously exist in the
-- index. This requires a Cabal file and context, and optionally a
-- reference to a tarball blob, and does not do any consistency checking
-- of these.
--
-- If a package was able to be newly added, runs a `PackageChangeAdd` hook
-- when done and returns True.
updateAddPackage :: forall m. MonadIO m => PackageId ->
CabalFileText -> UploadInfo ->
Maybe PkgTarball -> m Bool,
-- | Deletes a version of an existing package, deleting the package if it
-- was the last version.
--
-- If a package was found and deleted, runs a `PackageChangeDelete` hook
-- when done and returns True.
updateDeletePackage :: forall m. MonadIO m => PackageId -> m Bool,
-- | Adds a new Cabal file for this package version, creating it if
-- necessary. Previous Cabal files are kept around.
--
-- Runs either a `PackageChangeAdd` or `PackageChangeInfo` hook, depending
-- on whether a package with the given version already existed.
updateAddPackageRevision :: forall m. MonadIO m => PackageId ->
CabalFileText -> UploadInfo -> m (),
-- | Sets the source tarball for an existing package version. References to
-- previous tarballs, if any, are kept around.
--
-- If this package was found, runs a `PackageChangeInfo` hook when done and
-- returns True.
updateAddPackageTarball :: forall m. MonadIO m => PackageId ->
PkgTarball -> UploadInfo -> m Bool,
-- | Sets the uploader of an existing package version.
--
-- If this package was found, runs a `PackageChangeInfo` hook when done and
-- returns True.
updateSetPackageUploader :: forall m. MonadIO m => PackageId -> UserId -> m Bool,
-- | Sets the upload time of an existing package version.
--
-- If this package was found, runs a `PackageChangeInfo` hook when done and
-- returns True.
updateSetPackageUploadTime :: forall m. MonadIO m => PackageId -> UTCTime -> m Bool,
-- | Set an entry in the 00-index.tar file.
--
-- The 00-index.tar file contains all the package entries, but it is an
-- extensible format and we can add more stuff. E.g. version preferences
-- or crypto signatures. This requires a file name, file contents, and
-- modification time for the tar entry.
--
-- This runs a `PackageChangeIndexExtra` hook when done.
updateArchiveIndexEntry :: forall m. MonadIO m => String -> (ByteString, UTCTime) -> m (),
-- | Notification of package or index changes.
packageChangeHook :: Hook PackageChange (),
-- | Notification of tarball downloads.
packageDownloadHook :: Hook PackageId ()
}
instance IsHackageFeature CoreFeature where
getFeatureInterface = coreFeatureInterface
-- | This is designed so that you can pattern match on just the kinds of
-- events you are interested in.
data PackageChange
-- | A package was newly added with this `PkgInfo`.
= PackageChangeAdd PkgInfo
-- | A package was deleted, and this `PkgInfo` is no longer accessible in
-- the package index.
| PackageChangeDelete PkgInfo
-- | A package was updated from the first `PkgInfo` to the second.
| PackageChangeInfo PkgInfo PkgInfo
-- | A file has changed in the package index tar not covered by any of the
-- other change types.
| PackageChangeIndexExtra String ByteString UTCTime
-- | A predicate to use with `packageChangeHook` and `registerHookJust` for
-- keeping other features synchronized with the main package index.
--
-- This indicates an update for a given `PackageId`, and the new `PkgInfo` if
-- a new one has been added (`Nothing` in the case of deletion).
isPackageChangeAny :: PackageChange -> Maybe (PackageId, Maybe PkgInfo)
isPackageChangeAny (PackageChangeAdd pkginfo) = Just (packageId pkginfo, Just pkginfo)
isPackageChangeAny (PackageChangeDelete pkginfo) = Just (packageId pkginfo, Nothing)
isPackageChangeAny (PackageChangeInfo _ pkginfo) = Just (packageId pkginfo, Just pkginfo)
isPackageChangeAny PackageChangeIndexExtra {} = Nothing
-- | A predicate to use with `packageChangeHook` and `registerHookJust` for
-- newly added packages.
isPackageAdd :: PackageChange -> Maybe PkgInfo
isPackageAdd (PackageChangeAdd pkginfo) = Just pkginfo
isPackageAdd _ = Nothing
-- | A predicate to use with `packageChangeHook` and `registerHookJust` for
-- deleted packages.
isPackageDelete :: PackageChange -> Maybe PkgInfo
isPackageDelete (PackageChangeDelete pkginfo) = Just pkginfo
isPackageDelete _ = Nothing
-- | A predicate to use with `packageChangeHook` and `registerHookJust` for
-- any kind of change to packages or extras.
isPackageIndexChange :: PackageChange -> Maybe ()
isPackageIndexChange _ = Just ()
{-
-- Other examples we may want later...
isPackageAddVersion :: Maybe PackageId,
isPackageDeleteVersion :: Maybe PackageId,
isPackageChangeCabalFile :: Maybe (PackageId, CabalFileText),
isPackageChangeCabalFileUploadInfo :: Maybe (PackageId, UploadInfo),
isPackageChangeTarball :: Maybe (PackageId, PkgTarball),
isPackageIndexExtraChange :: Maybe (String, ByteString, UTCTime)
-}
data CoreResource = CoreResource {
-- | The collection all packages.
corePackagesPage :: Resource,
-- | An individual package.
corePackagePage :: Resource,
-- | A Cabal file for a package version.
coreCabalFile :: Resource,
-- | A tarball for a package version.
corePackageTarball :: Resource,
-- Rendering resources.
-- | URI for `corePackagesPage`, given a format (blank for none).
indexPackageUri :: String -> String,
-- | URI for `corePackagePage`, given a format and `PackageId`.
corePackageIdUri :: String -> PackageId -> String,
-- | URI for `corePackagePage`, given a format and `PackageName`.
corePackageNameUri :: String -> PackageName -> String,
-- | URI for `coreCabalFile`, given a PackageId.
coreCabalUri :: PackageId -> String,
-- | URI for `corePackageTarball`, given a PackageId.
coreTarballUri :: PackageId -> String,
-- | Find a PackageId or PackageName inside a path.
packageInPath :: forall m a. (MonadPlus m, FromReqURI a) => DynamicPath -> m a,
-- | Find a tarball's PackageId from inside a path, doing some checking
-- for consistency between the package and tarball.
--
-- TODO: This is a rather ad-hoc function. Do we really need it?
packageTarballInPath :: forall m. MonadPlus m => DynamicPath -> m PackageId,
-- | Check that a particular version of a package exists (guard fails if
-- version is empty)
guardValidPackageId :: PackageId -> ServerPartE (),
-- | Check that a package exists.
guardValidPackageName :: PackageName -> ServerPartE (),
-- | Find a package in the package DB, failing if not found. This uses the
-- highest version number of a package.
--
-- In the presence of deprecation or preferred versions,
-- `withPackagePreferred` should generally be used instead for user-facing
-- version resolution.
lookupPackageName :: PackageName -> ServerPartE [PkgInfo],
-- | Find a package version in the package DB, failing if not found. Behaves
-- like `lookupPackageName` if the version is empty.
lookupPackageId :: PackageId -> ServerPartE PkgInfo
}
initCoreFeature :: ServerEnv -> IO (UserFeature -> IO CoreFeature)
initCoreFeature env@ServerEnv{serverStateDir, serverCacheDelay,
serverVerbosity = verbosity} = do
-- Canonical state
packagesState <- packagesStateComponent verbosity serverStateDir
-- Ephemeral state
-- Additional files to put in the index tarball like preferred-versions
extraMap <- newMemStateWHNF Map.empty
-- Hooks
packageChangeHook <- newHook
packageDownloadHook <- newHook
return $ \users -> do
rec let (feature, getIndexTarball)
= coreFeature env users
packagesState extraMap indexTar
packageChangeHook packageDownloadHook
-- Caches
-- The index.tar.gz file
indexTar <- newAsyncCacheNF getIndexTarball
defaultAsyncCachePolicy {
asyncCacheName = "index tarball",
asyncCacheUpdateDelay = serverCacheDelay,
asyncCacheSyncInit = False,
asyncCacheLogVerbosity = verbosity
}
registerHookJust packageChangeHook isPackageIndexChange $ \_ ->
prodAsyncCache indexTar
return feature
packagesStateComponent :: Verbosity -> FilePath -> IO (StateComponent AcidState PackagesState)
packagesStateComponent verbosity stateDir = do
let stateFile = stateDir </> "db" </> "PackagesState"
st <- logTiming verbosity "Loaded PackagesState" $
openLocalStateFrom stateFile initialPackagesState
return StateComponent {
stateDesc = "Main package database"
, stateHandle = st
, getState = query st GetPackagesState
, putState = update st . ReplacePackagesState
, backupState = \_ -> indexToAllVersions
, restoreState = packagesBackup
, resetState = packagesStateComponent verbosity
}
coreFeature :: ServerEnv
-> UserFeature
-> StateComponent AcidState PackagesState
-> MemState (Map String (ByteString, UTCTime))
-> AsyncCache IndexTarball
-> Hook PackageChange ()
-> Hook PackageId ()
-> ( CoreFeature
, IO IndexTarball )
coreFeature ServerEnv{serverBlobStore = store} UserFeature{..}
packagesState indexExtras cacheIndexTarball
packageChangeHook packageDownloadHook
= (CoreFeature{..}, getIndexTarball)
where
coreFeatureInterface = (emptyHackageFeature "core") {
featureDesc = "Core functionality"
, featureResources = [
coreIndexTarball
, corePackagesPage
, corePackagePage
, corePackageRedirect
, corePackageTarball
, coreCabalFile
, coreCabalFileRevs
, coreCabalFileRev
]
, featureState = [abstractAcidStateComponent packagesState]
, featureCaches = [
CacheComponent {
cacheDesc = "main package index tarball",
getCacheMemSize = memSize <$> readAsyncCache cacheIndexTarball
}
, CacheComponent {
cacheDesc = "package index extra files",
getCacheMemSize = memSize <$> readMemState indexExtras
}
]
, featurePostInit = syncAsyncCache cacheIndexTarball
}
-- the rudimentary HTML resources are for when we don't want an additional HTML feature
coreResource = CoreResource {..}
coreIndexTarball = (resourceAt "/packages/index.tar.gz") {
resourceDesc = [(GET, "tarball of package descriptions")]
, resourceGet = [("tarball", servePackagesIndex)]
}
corePackagesPage = (resourceAt "/packages/.:format") {
resourceDesc = [(GET, "List of all packages")]
, resourceGet = [("json", servePackageList)]
}
corePackagePage = resourceAt "/package/:package.:format"
corePackageRedirect = (resourceAt "/package/") {
resourceDesc = [(GET, "Redirect to /packages/")]
, resourceGet = [("", \_ -> seeOther "/packages/" $ toResponse ())]
}
corePackageTarball = (resourceAt "/package/:package/:tarball.tar.gz") {
resourceDesc = [(GET, "Get package tarball")]
, resourceGet = [("tarball", servePackageTarball)]
}
coreCabalFile = (resourceAt "/package/:package/:cabal.cabal") {
resourceDesc = [(GET, "Get package .cabal file")]
, resourceGet = [("cabal", serveCabalFile)]
}
coreCabalFileRevs = (resourceAt "/package/:package/revisions/.:format") {
resourceDesc = [(GET, "List all package .cabal file revisions")]
, resourceGet = [("json", serveCabalFileRevisionsList)]
}
coreCabalFileRev = (resourceAt "/package/:package/revision/:revision.:format") {
resourceDesc = [(GET, "Get package .cabal file revision")]
, resourceGet = [("cabal", serveCabalFileRevision)]
}
indexPackageUri = \format ->
renderResource corePackagesPage [format]
corePackageIdUri = \format pkgid ->
renderResource corePackagePage [display pkgid, format]
corePackageNameUri = \format pkgname ->
renderResource corePackagePage [display pkgname, format]
coreCabalUri = \pkgid ->
renderResource coreCabalFile [display pkgid, display (packageName pkgid)]
coreTarballUri = \pkgid ->
renderResource corePackageTarball [display pkgid, display pkgid]
packageInPath dpath = maybe mzero return (lookup "package" dpath >>= fromReqURI)
packageTarballInPath dpath = do
PackageIdentifier name version <- packageInPath dpath
case lookup "tarball" dpath >>= fromReqURI of
Nothing -> mzero
Just pkgid@(PackageIdentifier name' version') -> do
-- rules:
-- * the package name and tarball name must be the same
-- * the tarball must specify a version
-- * the package must either have no version or the same version as the tarball
guard $ name == name' && version' /= Version [] [] && (version == version' || version == Version [] [])
return pkgid
guardValidPackageId pkgid = do
guard (pkgVersion pkgid /= Version [] [])
void $ lookupPackageId pkgid
guardValidPackageName pkgname =
void $ lookupPackageName pkgname
-- Queries
--
queryGetPackageIndex :: MonadIO m => m (PackageIndex PkgInfo)
queryGetPackageIndex = return . packageList =<< queryState packagesState GetPackagesState
-- Update transactions
--
updateAddPackage :: MonadIO m => PackageId
-> CabalFileText -> UploadInfo
-> Maybe PkgTarball -> m Bool
updateAddPackage pkgid cabalFile uploadinfo mtarball = do
mpkginfo <- updateState packagesState
(AddPackage pkgid cabalFile uploadinfo mtarball)
case mpkginfo of
Nothing -> return False
Just pkginfo -> do
runHook_ packageChangeHook (PackageChangeAdd pkginfo)
return True
updateDeletePackage :: MonadIO m => PackageId -> m Bool
updateDeletePackage pkgid = do
mpkginfo <- updateState packagesState (DeletePackage pkgid)
case mpkginfo of
Nothing -> return False
Just pkginfo -> do
runHook_ packageChangeHook (PackageChangeDelete pkginfo)
return True
updateAddPackageRevision :: MonadIO m => PackageId -> CabalFileText -> UploadInfo -> m ()
updateAddPackageRevision pkgid cabalfile uploadinfo = do
(moldpkginfo, newpkginfo) <- updateState packagesState (AddPackageRevision pkgid cabalfile uploadinfo)
case moldpkginfo of
Nothing ->
runHook_ packageChangeHook (PackageChangeAdd newpkginfo)
Just oldpkginfo ->
runHook_ packageChangeHook (PackageChangeInfo oldpkginfo newpkginfo)
updateAddPackageTarball :: MonadIO m => PackageId -> PkgTarball -> UploadInfo -> m Bool
updateAddPackageTarball pkgid tarball uploadinfo = do
mpkginfo <- updateState packagesState (AddPackageTarball pkgid tarball uploadinfo)
case mpkginfo of
Nothing -> return False
Just (oldpkginfo, newpkginfo) -> do
runHook_ packageChangeHook (PackageChangeInfo oldpkginfo newpkginfo)
return True
updateSetPackageUploader pkgid userid = do
mpkginfo <- updateState packagesState (SetPackageUploader pkgid userid)
case mpkginfo of
Nothing -> return False
Just (oldpkginfo, newpkginfo) -> do
runHook_ packageChangeHook (PackageChangeInfo oldpkginfo newpkginfo)
return True
updateSetPackageUploadTime pkgid time = do
mpkginfo <- updateState packagesState (SetPackageUploadTime pkgid time)
case mpkginfo of
Nothing -> return False
Just (oldpkginfo, newpkginfo) -> do
runHook_ packageChangeHook (PackageChangeInfo oldpkginfo newpkginfo)
return True
updateArchiveIndexEntry :: MonadIO m => String -> (ByteString, UTCTime) -> m ()
updateArchiveIndexEntry entryName entryDetails@(entryData, entryTime) = do
modifyMemState indexExtras (Map.insert entryName entryDetails)
runHook_ packageChangeHook (PackageChangeIndexExtra entryName entryData entryTime)
-- Cache updates
--
getIndexTarball :: IO IndexTarball
getIndexTarball = do
users <- queryGetUserDb -- note, changes here don't automatically propagate
index <- queryGetPackageIndex
extras <- readMemState indexExtras
time <- getCurrentTime
let indexTarball = GZip.compress (Packages.Index.write users extras index)
return $! IndexTarball indexTarball (fromIntegral $ BS.length indexTarball)
(md5 indexTarball) time
------------------------------------------------------------------------------
packageError :: [MessageSpan] -> ServerPartE a
packageError = errNotFound "Package not found"
lookupPackageName :: PackageName -> ServerPartE [PkgInfo]
lookupPackageName pkgname = do
pkgsIndex <- queryGetPackageIndex
case PackageIndex.lookupPackageName pkgsIndex pkgname of
[] -> packageError [MText "No such package in package index"]
pkgs -> return pkgs
lookupPackageId :: PackageId -> ServerPartE PkgInfo
lookupPackageId (PackageIdentifier name (Version [] [])) = do
pkgs <- lookupPackageName name
-- pkgs is sorted by version number and non-empty
return (last pkgs)
lookupPackageId pkgid = do
pkgsIndex <- queryGetPackageIndex
case PackageIndex.lookupPackageId pkgsIndex pkgid of
Just pkg -> return pkg
_ -> packageError [MText $ "No such package version for " ++ display (packageName pkgid)]
------------------------------------------------------------------------
servePackagesIndex :: DynamicPath -> ServerPartE Response
servePackagesIndex _ = do
tarball@(IndexTarball _ _ tarballmd5 _) <- readAsyncCache cacheIndexTarball
cacheControl [Public, NoTransform, maxAgeMinutes 5]
(ETag (show tarballmd5))
return (toResponse tarball)
-- TODO: should we include more information here? description and
-- category for instance (but they are not readily available as long
-- as we don't keep the parsed cabal files in memory)
servePackageList :: DynamicPath -> ServerPartE Response
servePackageList _ = do
pkgIndex <- queryGetPackageIndex
let pkgs = PackageIndex.allPackagesByName pkgIndex
list = [display . pkgName . pkgInfoId $ pkg | pkg <- map head pkgs]
-- We construct the JSON manually so that we control what it looks like;
-- in particular, we use objects for the packages so that we can add
-- additional fields later without (hopefully) breaking clients
let json = flip map list $ \str ->
Object . HashMap.fromList $ [
(Text.pack "packageName", String (Text.pack str))
]
return . toResponse $ Array (Vec.fromList json)
-- result: tarball or not-found error
servePackageTarball :: DynamicPath -> ServerPartE Response
servePackageTarball dpath = do
pkgid <- packageTarballInPath dpath
guard (pkgVersion pkgid /= Version [] [])
pkg <- lookupPackageId pkgid
case pkgLatestTarball pkg of
Nothing -> errNotFound "Tarball not found"
[MText "No tarball exists for this package version."]
Just (tarball, (uploadtime,_uid)) -> do
let blobId = pkgTarballGz tarball
cacheControl [Public, NoTransform, maxAgeDays 30]
(BlobStorage.blobETag blobId)
file <- liftIO $ BlobStorage.fetch store blobId
runHook_ packageDownloadHook pkgid
return $ toResponse $ Resource.PackageTarball file blobId uploadtime
-- result: cabal file or not-found error
serveCabalFile :: DynamicPath -> ServerPartE Response
serveCabalFile dpath = do
pkginfo <- packageInPath dpath >>= lookupPackageId
-- check that the cabal name matches the package
guard (lookup "cabal" dpath == Just (display $ packageName pkginfo))
let (fileRev, (utime, _uid)) = pkgLatestRevision pkginfo
cabalfile = Resource.CabalFile (cabalFileByteString fileRev) utime
return $ toResponse cabalfile
serveCabalFileRevisionsList :: DynamicPath -> ServerPartE Response
serveCabalFileRevisionsList dpath = do
pkginfo <- packageInPath dpath >>= lookupPackageId
users <- queryGetUserDb
let revisions = pkgMetadataRevisions pkginfo
revisionToObj rev (_, (utime, uid)) =
let uname = userIdToName users uid in
Object $ HashMap.fromList
[ (Text.pack "number", Number (fromIntegral rev))
, (Text.pack "user", String (Text.pack (display uname)))
, (Text.pack "time", String (Text.pack (formatTime defaultTimeLocale "%c" utime)))
]
revisionsJson = Array $ Vec.imap revisionToObj revisions
return (toResponse revisionsJson)
serveCabalFileRevision :: DynamicPath -> ServerPartE Response
serveCabalFileRevision dpath = do
pkginfo <- packageInPath dpath >>= lookupPackageId
let mrev = lookup "revision" dpath >>= fromReqURI
revisions = pkgMetadataRevisions pkginfo
case mrev >>= \rev -> revisions Vec.!? rev of
Just (fileRev, (utime, _uid)) -> return $ toResponse cabalfile
where
cabalfile = Resource.CabalFile (cabalFileByteString fileRev) utime
Nothing -> errNotFound "Package revision not found"
[MText "Cannot parse revision, or revision out of range."]
packageExists, packageIdExists :: (Package pkg, Package pkg') => PackageIndex pkg -> pkg' -> Bool
-- | Whether a package exists in the given package index.
packageExists pkgs pkg = not . null $ PackageIndex.lookupPackageName pkgs (packageName pkg)
-- | Whether a particular package version exists in the given package index.
packageIdExists pkgs pkg = maybe False (const True) $ PackageIndex.lookupPackageId pkgs (packageId pkg)
| chrisdotcode/hackage-server | Distribution/Server/Features/Core.hs | bsd-3-clause | 25,162 | 0 | 22 | 6,163 | 4,588 | 2,448 | 2,140 | 350 | 13 |
{-# LANGUAGE RankNTypes #-}
module Main where
import Control.Applicative
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as LB
import qualified Data.Map as Map
import Data.String
import qualified Data.Vector as V
import Data.Word ( Word32 )
import qualified System.IO as IO
import qualified Test.Tasty as T
import qualified Test.Tasty.HUnit as T
import qualified Test.Tasty.QuickCheck as T
import Prelude
import Data.Elf
testEmptyElf :: T.Assertion
testEmptyElf = IO.withBinaryFile "./tests/empty.elf" IO.ReadMode $ \h -> do
fil <- B.hGetContents h
case parseElf fil of
Left _e -> return ()
Right _a -> T.assertFailure "Empty ELF did not cause an exception."
testIdentityTransform :: FilePath -> T.Assertion
testIdentityTransform fp = do
bs <- B.readFile fp
withElf bs $ \e -> do
int0 <- elfInterpreter e
withElf (LB.toStrict (renderElf e)) $ \e' -> do
T.assertEqual "Segment Count" (length (elfSegments e)) (length (elfSegments e'))
withElf bs $ \ehi -> do
withElf (LB.toStrict (renderElf e)) $ \ehi' -> do
let [st1] = elfSymtab ehi
[st2] = elfSymtab ehi'
let cnt1 = V.length (elfSymbolTableEntries st1)
let cnt2 = V.length (elfSymbolTableEntries st2)
T.assertEqual "Symbol table sizes" cnt1 cnt2
int1 <- elfInterpreter e'
T.assertEqual "Interpreter" int0 int1
stringTableConsistencyProp :: [AsciiString] -> Bool
stringTableConsistencyProp strings =
all (checkStringTableEntry bytes) (Map.toList tab)
where
(bytes, tab) = stringTable (map unwrapAsciiString strings)
checkStringTableEntry :: C8.ByteString -> (B.ByteString, Word32) -> Bool
checkStringTableEntry bytes (str, off) = str == bstr
where
bstr = C8.take (B.length str) $ C8.drop (fromIntegral off) bytes
withElfHeaderInfo :: B.ByteString -> (forall w . ElfHeaderInfo w -> T.Assertion) -> T.Assertion
withElfHeaderInfo bs f =
case parseElfHeaderInfo bs of
Left e -> T.assertFailure ("Failed to parse elf header info: " ++ show e)
Right (Elf32 ehi32) -> f ehi32
Right (Elf64 ehi64) -> f ehi64
withElf :: B.ByteString -> (forall w . Elf w -> T.Assertion) -> T.Assertion
withElf bs f =
case parseElf bs of
Left e -> T.assertFailure ("Failed to parse elf file: " ++ show e)
Right (Elf32 e32) -> f e32
Right (Elf64 e64) -> f e64
tests :: T.TestTree
tests = T.testGroup "ELF Tests"
[ T.testCase "Empty ELF" testEmptyElf
, T.testCase "Identity Transformation (simple static)" (testIdentityTransform "./tests/simple.static.elf")
, T.testCase "Identity Transformation (simple)" (testIdentityTransform "./tests/simple.elf")
, T.testProperty "stringTable consistency" stringTableConsistencyProp
]
main :: IO ()
main = T.defaultMain tests
newtype AsciiString = AsciiString { unwrapAsciiString :: B.ByteString }
deriving (Show)
instance T.Arbitrary AsciiString where
arbitrary = AsciiString . fromString <$> genAsciiString
genAsciiString :: T.Gen String
genAsciiString = T.listOf genAsciiChar
genAsciiChar :: T.Gen Char
genAsciiChar = T.elements (['a'..'z'] ++ ['A'..'Z'])
| GaloisInc/elf | tests/Test.hs | bsd-3-clause | 3,252 | 0 | 28 | 673 | 1,015 | 522 | 493 | 73 | 3 |
module Lib where
import Data.Text
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
loginRole :: Connection -> Text -> Text -> IO (Maybe Text)
loginRole c user pass = do
res <- query c "select login_role(?,?)" (user, pass)
return $ case res of
[Only (Just role)] -> Just role
_ -> Nothing
| begriffs/jwt-auth-basic | src/Lib.hs | bsd-3-clause | 327 | 0 | 14 | 62 | 118 | 62 | 56 | 10 | 2 |
module Bot.Source
( Source
, fromText
, toText
)
where
import Data.Text (Text)
import qualified Data.Text as Text
newtype Source = Source Text
fromText :: Text -> Source
fromText txt | Text.null txt = Source txt
| Text.head txt == '@' = Source (Text.tail txt)
| otherwise = Source txt
toText :: Source -> Text
toText (Source txt) = txt
| frublox/aichanbot | src/Bot/Source.hs | bsd-3-clause | 410 | 0 | 10 | 134 | 138 | 72 | 66 | 13 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Development.GitRev
import qualified Data.ByteString.Lazy.Char8 as BC
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy.IO as TL
import System.IO
import System.Environment
import System.Exit
import DataFlow.Reader
import DataFlow.Core
import qualified DataFlow.Validation as V
import qualified DataFlow.DFD as DFD
import qualified DataFlow.SequenceDiagram as SEQ
import qualified DataFlow.Graphviz.Renderer as GVR
import qualified DataFlow.PlantUML.Renderer as PUR
import qualified DataFlow.Mustache.Renderer as MR
import qualified DataFlow.JSONGraphFormat.Renderer as JG
usage :: IO ()
usage = hPutStrLn stderr $ unlines [
"Usage: dataflow command args*",
"",
"Commands",
"--------",
"dfd SRC - outputs a DFD in the Graphviz DOT format",
"seq SRC - outputs a sequence diagram in PlantUML format",
"template TEMPLATE SRC - renders the TEMPLATE using data from SRC",
"json SRC - outputs a sequence diagram in JSON Graph Format",
" (http://jsongraphformat.info/)",
"validate SRC - validates the input",
"",
"--version - display VCS information",
"--help - display this help message",
"",
"All commands print to stdout"
]
showErrors :: Show s => Either [s] v -> Either String v
showErrors = either (Left . unlines . map show) Right
readAndValidate :: FilePath -> IO (Either String Diagram)
readAndValidate path = do
res <- readDiagramFile path
case res of
(Left err) -> return $ Left $ show err
(Right d) -> return (showErrors $ V.validate d)
dfd :: FilePath -> IO ()
dfd path = do
res <- readAndValidate path
case res of
(Left err) -> putStrLn err
(Right d) -> putStr $ GVR.renderGraphviz $ DFD.asDFD d
seq' :: FilePath -> IO ()
seq' path = do
res <- readAndValidate path
case res of
(Left err) -> putStrLn err
(Right d) -> putStr $ PUR.renderPlantUML $ SEQ.asSequenceDiagram d
template :: FilePath -> FilePath -> IO ()
template tmplPath path = do
res <- readAndValidate path
tmplStr <- readFile tmplPath
case res of
(Left err) -> putStrLn err
(Right d) -> either print T.putStr $ MR.renderTemplate tmplStr path d
json :: FilePath -> IO ()
json path = do
res <- readAndValidate path
case res of
(Left err) -> putStrLn err
(Right d) -> BC.putStrLn $ JG.renderJSONGraph d
validate :: FilePath -> IO ()
validate path = do
res <- readAndValidate path
case res of
(Left err) -> putStrLn err
(Right _) -> return ()
version :: IO ()
version = do
putStrLn $ "Branch: " ++ $(gitBranch)
putStrLn $ "Hash: " ++ $(gitHash)
main :: IO ()
main = do
args <- getArgs
case args of
["dfd", path] -> dfd path
["seq", path] -> seq' path
["template", tmplPath, path] -> template tmplPath path
["json", path] -> json path
["validate", path] -> validate path
["--version"] -> version
["--help"] -> usage
_ -> do hPutStrLn stderr "Invalid command!\n\nRun with --help to see usage."
exitWith $ ExitFailure 1
| sonyxperiadev/dataflow | cli/Main.hs | bsd-3-clause | 3,157 | 0 | 14 | 756 | 950 | 495 | 455 | 91 | 8 |
module MultilineRe (multilineRe) where
import Text.Regex.PCRE.Heavy
import Text.Regex.PCRE.Light (multiline, utf8)
import Language.Haskell.TH.Quote (QuasiQuoter)
multilineRe :: QuasiQuoter
multilineRe = mkRegexQQ [multiline, utf8]
| FranklinChen/twenty-four-days2015-of-hackage | app/MultilineRe.hs | bsd-3-clause | 233 | 0 | 6 | 23 | 62 | 40 | 22 | 6 | 1 |
{-# LANGUAGE Safe #-}
------------------------------------------------------------------------
-- |
-- Module : AI.Rete
-- Copyright : (c) 2015 Konrad Grzanek
-- License : BSD-style (see the file LICENSE)
-- Created : 2015-03-16
-- Maintainer : kongra@gmail.com
-- Stability : experimental
------------------------------------------------------------------------
module AI.Rete
(
-- * Abstraction
Rete
, emptyRete
-- * Adding Wmes
, addWme
, addWmeP
-- * Adding productions
, addProd
, addProdP
, c
, var
, Pred
, Action
-- * Variable value access
, val
, valE
, valM
-- * Strategies
, StepStrategy
, breadthFirst
, depthFirst
-- * Forward chaining, evaluation
, forwardChain
, exec
, execIO
, eval
-- * Predefined actions and tools
, acompose
, passAction
, traceAction
, traceMsgAction
)
where
import AI.Rete.Data
import AI.Rete.Flow
import AI.Rete.Net
import AI.Rete.State
| kongra/rete | AI/Rete.hs | bsd-3-clause | 1,053 | 0 | 4 | 306 | 121 | 86 | 35 | 31 | 0 |
{-# LANGUAGE FlexibleInstances, RecordWildCards, DeriveGeneric #-}
-- | Scored tests and Gradescope JSON output for Tasty
module Test.Tasty.GradeScope where
import Test.Tasty
import Test.Tasty.Providers
import Test.Tasty.Options
( IsOption(..)
, OptionSet(..)
, OptionDescription(..)
, lookupOption
, safeRead
)
import Test.Tasty.Ingredients
( Ingredient(..)
, composeReporters
)
import Test.Tasty.Runners
( Ingredient(TestReporter)
, Status(Done)
, StatusMap
, Time
, Traversal(..)
, TreeFold(..)
)
import qualified Test.Tasty.Runners as Runners
import Text.JSON hiding (Result)
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import Control.Monad ((>=>), liftM)
import qualified Control.Monad.State as State
import Data.Functor.Const
import Data.Functor.Compose
import Data.Maybe
import Data.Monoid
import Data.IntMap (IntMap)
import Data.Proxy (Proxy(..))
import Data.Tagged (Tagged(..))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Text.ParserCombinators.ReadP
import qualified Data.IntMap as IntMap
-- | EXAMPLE OUTPUT
--
-- { "score": 44.0, // optional, but required if not on each test case below
-- "execution_time": 136, // optional, seconds
-- "output": "Text relevant to the entire submission", // optional
-- "visibility": "after_due_date" // Optional visibility setting
-- "tests": // Optional, but required if no top-level score
-- [
-- {
-- "score": 2.0, // optional, but required if not on top level submission
-- "max_score": 2.0, // optional
-- "name": "Your name here", // optional
-- "number": "1.1", // optional (will just be numbered in order of array if no number given)
-- "output": "Giant multiline string that will be placed in a <pre> tag and collapsed by default", // optional
-- "tags": ["tag1", "tag2", "tag3"] // optional
-- "visibility": "visible" // Optional visibility setting
-- },
-- // and more test cases...
-- ]
-- }
-- * Options
gradeOptions :: [OptionDescription]
gradeOptions = [ Option (Proxy :: Proxy (Maybe GSScoreFile))
, Option (Proxy :: Proxy Weight)
, Option (Proxy :: Proxy Visibility)
, Option (Proxy :: Proxy NegScoring)
, Option (Proxy :: Proxy TotalPoints)
]
newtype TotalPoints = TotalPoints (Sum Int) deriving (Eq, Ord, Typeable)
instance IsOption TotalPoints where
defaultValue = TotalPoints mempty
parseValue = Just . TotalPoints . pure . read
optionName = Tagged "TotalPoints"
optionHelp = Tagged "Total Points: if neg scoring this is max possible for the suite; else running total"
newtype NegScoring = NegScoring Bool deriving (Eq, Ord, Typeable)
instance IsOption NegScoring where
defaultValue = NegScoring False
parseValue = Just . NegScoring . read
optionName = Tagged "NegScoring"
optionHelp = Tagged "Negative Scoring (deduct points from max)"
data Visibility = Hidden | AfterDue | AfterPub | Visible deriving (Eq, Ord, Typeable)
instance IsOption Visibility where
defaultValue = Visible
parseValue = undefined
optionName = Tagged "Visibility"
optionHelp = Tagged "Is this test visible?"
newtype Weight = Weight Int deriving (Eq, Ord, Typeable)
instance IsOption Weight where
defaultValue = Weight 1
parseValue = fmap Weight . safeRead
optionName = Tagged "Weight"
optionHelp = Tagged "How many points a test is worth"
newtype GSScoreFile = GSScoreFile FilePath deriving (Eq, Ord, Typeable)
instance IsOption (Maybe GSScoreFile) where
defaultValue = Just (GSScoreFile "result.json")
parseValue = Just . Just . GSScoreFile
optionName = Tagged "scores"
optionHelp = Tagged "A file path to output scores, as a JSON file in GradeScope format"
-- ** Toggles
scored :: Int -> TestTree -> TestTree
scored n = localOption (Weight n)
visibility :: Visibility -> TestTree -> TestTree
visibility v = localOption v
-- * Reporter
gsConsoleReporter :: Ingredient
gsConsoleReporter = Runners.consoleTestReporter `composeReporters` gradescopeReporter
gradescopeReporter :: Ingredient
gradescopeReporter = TestReporter gradeOptions runner
where
runner opts tests = do
GSScoreFile output <- lookupOption opts
pure $ scoreTests output opts tests
scoreTests :: FilePath -> OptionSet -> TestTree -> (StatusMap -> IO (Time -> IO Bool))
scoreTests outfile opts tests = \testStatus -> do
TotalPoints totalPoints <- pure (lookupOption opts)
Const summary <- flip State.evalStateT 0 $ getCompose $ getTraversal $
Runners.foldTestTree (foldScores testStatus) opts tests
return $ \time -> do
writeFile outfile $
(encode (toJSObject [ ("execution_time" , showJSON (ceiling time :: Int))
, ("score" , showJSON (getSum (runningTotal summary <> totalPoints)))
, ("tests" , showJSON summary)
]))
return $ numFailures summary == mempty
-- * Internals
data TestResult = TestResult
{ resultId :: Int
, resultName :: TestName
, resultWeight :: Int
, resultMetadata :: Result
, resultVisible :: Visibility
}
instance JSON TestResult where
readJSON = undefined
showJSON tr@TestResult{..} =
JSObject $ toJSObject
[ ("name" , showJSON resultName)
, ("score" , showJSON score)
, ("max_score" , showJSON resultWeight)
, ("number" , showJSON resultId)
, ("output" , showJSON (Runners.resultDescription resultMetadata))
, ("visibility" , showJSON resultVisible)
]
where
score | resultSuccessful tr = resultWeight
| otherwise = 0
resultSuccessful :: TestResult -> Bool
resultSuccessful = Runners.resultSuccessful . resultMetadata
instance JSON Visibility where
readJSON = undefined
showJSON Hidden = showJSON "hidden"
showJSON AfterDue = showJSON "after_due_date"
showJSON AfterPub = showJSON "after_published"
showJSON Visible = showJSON "visible"
data ScoreSummary = ScoreSummary
{ individualTests :: [TestResult]
, runningTotal :: Sum Int
, numFailures :: Sum Int
} deriving Generic
instance JSON ScoreSummary where
readJSON = undefined
showJSON ss = showJSON (individualTests ss)
instance Monoid ScoreSummary where
mempty = ScoreSummary mempty mempty mempty
(ScoreSummary ts1 tot1 f1) `mappend` (ScoreSummary ts2 tot2 f2) =
ScoreSummary (ts1<>ts2) (tot1<>tot2) (f1<>f2)
type ScoreTraversal = Traversal (Compose (State.StateT Int IO) (Const ScoreSummary))
foldScores :: StatusMap -> TreeFold ScoreTraversal
foldScores statusMap = Runners.trivialFold
{ foldSingle = scoreSingleTest statusMap
, foldGroup = scoreGroup
}
scoreSingleTest :: IsTest t
=> StatusMap -> OptionSet -> TestName -> t -> ScoreTraversal
scoreSingleTest statusMap options resultName _ = Traversal $ Compose $ do
resultId <- State.get
let Weight resultWeight = lookupOption options
resultVisible = lookupOption options :: Visibility
NegScoring ns = lookupOption options
testResult <- State.lift $ do
resultMetadata <- atomically . waitFinished $ statusMap IntMap.! resultId
return TestResult{..}
let
failed = not (resultSuccessful testResult)
summary = ScoreSummary [testResult] (scoreQ resultWeight failed ns) (countFail failed)
Const summary <$ State.modify (+1)
where
-- negative scoring, fail
scoreQ w True True = Sum (-w)
-- positive scoring, pass
scoreQ w False False = Sum w
scoreQ w _ _ = Sum 0
countFail True = Sum 1
countFail False = Sum 0
waitFinished :: TVar Status -> STM Result
waitFinished = readTVar >=> \st ->
case st of
Done x -> pure x
_ -> retry
scoreGroup :: TestName -> ScoreTraversal -> ScoreTraversal
scoreGroup group kids = kids
| PLSysSec/tasty-gradescope | src/Test/Tasty/GradeScope.hs | bsd-3-clause | 8,005 | 0 | 24 | 1,832 | 1,935 | 1,050 | 885 | 164 | 4 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Unlift
import Control.Monad.Logger
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.Foldable as F
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List (intercalate, intersect,
maximumBy)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import qualified Distribution.Text as C
import qualified Distribution.Version as C
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import qualified Paths_stack as Meta
import Stack.BuildPlan
import Stack.Config (getSnapshots,
makeConcreteResolver)
import Stack.Constants
import Stack.Snapshot (loadResolver)
import Stack.Solver
import Stack.Types.Build
import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Resolver
import Stack.Types.StackT (StackM)
import Stack.Types.StringError
import Stack.Types.Version
import qualified System.FilePath as FP
-- | Generate stack.yaml
initProject
:: (StackM env m, HasConfig env, HasGHCVariant env)
=> WhichSolverCmd
-> Path Abs Dir
-> InitOpts
-> Maybe AbstractResolver
-> m ()
initProject whichCmd currDir initOpts mresolver = do
let dest = currDir </> stackDotYaml
reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest
exists <- doesFileExist dest
when (not (forceOverwrite initOpts) && exists) $
throwString
("Error: Stack configuration file " <> reldest <>
" exists, use 'stack solver' to fix the existing config file or \
\'--force' to overwrite it.")
dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts)
let noPkgMsg = "In order to init, you should have an existing .cabal \
\file. Please try \"stack new\" instead."
find = findCabalFiles (includeSubDirs initOpts)
dirs' = if null dirs then [currDir] else dirs
$logInfo "Looking for .cabal or package.yaml files to use to init the project."
cabalfps <- liftM concat $ mapM find dirs'
(bundle, dupPkgs) <- cabalPackagesCheck cabalfps noPkgMsg Nothing
(sd, flags, extraDeps, rbundle) <- getDefaultResolver whichCmd dest initOpts
mresolver bundle
-- Kind of inefficient, since we've already parsed this value. But
-- better to reparse in this one case than carry the unneeded data
-- around everywhere in the codebase.
resolver <- parseCustomLocation (Just (parent dest)) (void (sdResolver sd))
let ignored = Map.difference bundle rbundle
dupPkgMsg
| dupPkgs /= [] =
"Warning (added by new or init): Some packages were found to \
\have names conflicting with others and have been commented \
\out in the packages section.\n"
| otherwise = ""
missingPkgMsg
| Map.size ignored > 0 =
"Warning (added by new or init): Some packages were found to \
\be incompatible with the resolver and have been left commented \
\out in the packages section.\n"
| otherwise = ""
extraDepMsg
| Map.size extraDeps > 0 =
"Warning (added by new or init): Specified resolver could not \
\satisfy all dependencies. Some external packages have been \
\added as dependencies.\n"
| otherwise = ""
makeUserMsg msgs =
let msg = concat msgs
in if msg /= "" then
msg <> "You can suppress this message by removing it from \
\stack.yaml\n"
else ""
userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg]
gpds = Map.elems $ fmap snd rbundle
p = Project
{ projectUserMsg = if userMsg == "" then Nothing else Just userMsg
, projectPackages = pkgs
, projectDependencies = map
(\(n, v) -> PLIndex $ PackageIdentifierRevision (PackageIdentifier n v) Nothing)
(Map.toList extraDeps)
, projectFlags = removeSrcPkgDefaultFlags gpds flags
, projectResolver = resolver
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
makeRelDir dir =
case stripDir currDir dir of
Nothing
| currDir == dir -> "."
| otherwise -> assert False $ toFilePathNoTrailingSep dir
Just rel -> toFilePathNoTrailingSep rel
makeRel = fmap toFilePath . makeRelativeToCurrentDir
pkgs = map toPkg $ Map.elems (fmap (parent . fst) rbundle)
toPkg dir = PLFilePath $ makeRelDir dir
indent t = T.unlines $ fmap (" " <>) (T.lines t)
$logInfo $ "Initialising configuration using resolver: " <> sdResolverName sd
$logInfo $ "Total number of user packages considered: "
<> T.pack (show (Map.size bundle + length dupPkgs))
when (dupPkgs /= []) $ do
$logWarn $ "Warning! Ignoring "
<> T.pack (show $ length dupPkgs)
<> " duplicate packages:"
rels <- mapM makeRel dupPkgs
$logWarn $ indent $ showItems rels
when (Map.size ignored > 0) $ do
$logWarn $ "Warning! Ignoring "
<> T.pack (show $ Map.size ignored)
<> " packages due to dependency conflicts:"
rels <- mapM makeRel (Map.elems (fmap fst ignored))
$logWarn $ indent $ showItems rels
when (Map.size extraDeps > 0) $ do
$logWarn $ "Warning! " <> T.pack (show $ Map.size extraDeps)
<> " external dependencies were added."
$logInfo $
(if exists then "Overwriting existing configuration file: "
else "Writing configuration to file: ")
<> T.pack reldest
liftIO $ L.writeFile (toFilePath dest)
$ B.toLazyByteString
$ renderStackYaml p
(Map.elems $ fmap (makeRelDir . parent . fst) ignored)
(map (makeRelDir . parent) dupPkgs)
$logInfo "All done."
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder
renderStackYaml p ignoredPackages dupPackages =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString headerHelp
<> B.byteString "\n\n"
<> F.foldMap (goComment o) comments
<> goOthers (o `HM.difference` HM.fromList comments)
<> B.byteString footerHelp
goComment o (name, comment) =
case (convert <$> HM.lookup name o) <|> nonPresentValue name of
Nothing -> assert (name == "user-message") mempty
Just v ->
B.byteString comment <>
B.byteString "\n" <>
v <>
if name == "packages" then commentedPackages else "" <>
B.byteString "\n"
where
convert v = B.byteString (Yaml.encode $ Yaml.object [(name, v)])
-- Some fields in stack.yaml are optional and may not be
-- generated. For these, we provided commented out dummy
-- values to go along with the comments.
nonPresentValue "extra-deps" = Just "# extra-deps: []\n"
nonPresentValue "flags" = Just "# flags: {}\n"
nonPresentValue "extra-package-dbs" = Just "# extra-package-dbs: []\n"
nonPresentValue _ = Nothing
commentLine l | null l = "#"
| otherwise = "# " ++ l
commentHelp = BC.pack . intercalate "\n" . map commentLine
commentedPackages =
let ignoredComment = commentHelp
[ "The following packages have been ignored due to incompatibility with the"
, "resolver compiler, dependency conflicts with other packages"
, "or unsatisfied dependencies."
]
dupComment = commentHelp
[ "The following packages have been ignored due to package name conflict "
, "with other packages."
]
in commentPackages ignoredComment ignoredPackages
<> commentPackages dupComment dupPackages
commentPackages comment pkgs
| pkgs /= [] =
B.byteString comment
<> B.byteString "\n"
<> B.byteString (BC.pack $ concat
$ map (\x -> "#- " ++ x ++ "\n") pkgs ++ ["\n"])
| otherwise = ""
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
-- Per Section Help
comments =
[ ("user-message" , userMsgHelp)
, ("resolver" , resolverHelp)
, ("packages" , packageHelp)
, ("extra-deps" , "# Dependency packages to be pulled from upstream that are not in the resolver\n# (e.g., acme-missiles-0.3)")
, ("flags" , "# Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "# Extra package databases containing global packages")
]
-- Help strings
headerHelp = commentHelp
[ "This file was automatically generated by 'stack init'"
, ""
, "Some commonly used options have been documented as comments in this file."
, "For advanced use and comprehensive documentation of the format, please see:"
, "https://docs.haskellstack.org/en/stable/yaml_configuration/"
]
resolverHelp = commentHelp
[ "Resolver to choose a 'specific' stackage snapshot or a compiler version."
, "A snapshot resolver dictates the compiler version and the set of packages"
, "to be used for project dependencies. For example:"
, ""
, "resolver: lts-3.5"
, "resolver: nightly-2015-09-21"
, "resolver: ghc-7.10.2"
, "resolver: ghcjs-0.1.0_ghc-7.10.2"
, "resolver:"
, " name: custom-snapshot"
, " location: \"./custom-snapshot.yaml\""
]
userMsgHelp = commentHelp
[ "A warning or info to be displayed to the user on config load." ]
packageHelp = commentHelp
[ "User packages to be built."
, "Various formats can be used as shown in the example below."
, ""
, "packages:"
, "- some-directory"
, "- https://example.com/foo/bar/baz-0.0.2.tar.gz"
, "- location:"
, " git: https://github.com/commercialhaskell/stack.git"
, " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a"
, "- location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a"
, " extra-dep: true"
, " subdirs:"
, " - auto-update"
, " - wai"
, ""
, "A package marked 'extra-dep: true' will only be built if demanded by a"
, "non-dependency (i.e. a user package), and its test suites and benchmarks"
, "will not be run. This is useful for tweaking upstream packages."
]
footerHelp =
let major = toCabalVersion
$ toMajorVersion $ fromCabalVersion Meta.version
in commentHelp
[ "Control whether we use the GHC we find on the path"
, "system-ghc: true"
, ""
, "Require a specific version of stack, using version ranges"
, "require-stack-version: -any # Default"
, "require-stack-version: \""
++ C.display (C.orLaterVersion major) ++ "\""
, ""
, "Override the architecture used by stack, especially useful on Windows"
, "arch: i386"
, "arch: x86_64"
, ""
, "Extra directories used by stack for building"
, "extra-include-dirs: [/path/to/dir]"
, "extra-lib-dirs: [/path/to/dir]"
, ""
, "Allow a newer minor version of GHC than the snapshot specifies"
, "compiler-check: newer-minor"
]
getSnapshots' :: (StackM env m, HasConfig env)
=> m Snapshots
getSnapshots' = do
getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " http://docs.haskellstack.org/en/stable/yaml_configuration/"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
errorString ""
-- | Get the default resolver value
getDefaultResolver
:: (StackM env m, HasConfig env, HasGHCVariant env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Maybe AbstractResolver
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> m ( SnapshotDef
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getDefaultResolver whichCmd stackYaml initOpts mresolver bundle = do
sd <- maybe selectSnapResolver (makeConcreteResolver (Just root) >=> loadResolver) mresolver
getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd
where
root = parent stackYaml
-- TODO support selecting best across regular and custom snapshots
selectSnapResolver = do
let gpds = Map.elems (fmap snd bundle)
snaps <- fmap getRecommendedSnapshots getSnapshots'
sds <- mapM (loadResolver . ResolverSnapshot) snaps
(s, r) <- selectBestSnapshot (parent stackYaml) gpds sds
case r of
BuildPlanCheckFail {} | not (omitPackages initOpts)
-> throwM (NoMatchingSnapshot whichCmd snaps)
_ -> return s
getWorkingResolverPlan
:: (StackM env m, HasConfig env, HasGHCVariant env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> SnapshotDef
-> m ( SnapshotDef
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( SnapshotDef
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd = do
$logInfo $ "Selected resolver: " <> sdResolverName sd
go bundle
where
go info = do
eres <- checkBundleResolver whichCmd stackYaml initOpts info sd
-- if some packages failed try again using the rest
case eres of
Right (f, edeps)-> return (sd, f, edeps, info)
Left ignored
| Map.null available -> do
$logWarn "*** Could not find a working plan for any of \
\the user packages.\nProceeding to create a \
\config anyway."
return (sd, Map.empty, Map.empty, Map.empty)
| otherwise -> do
when (Map.size available == Map.size info) $
error "Bug: No packages to ignore"
if length ignored > 1 then do
$logWarn "*** Ignoring packages:"
$logWarn $ indent $ showItems ignored
else
$logWarn $ "*** Ignoring package: "
<> T.pack (packageNameString (head ignored))
go available
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
isAvailable k _ = k `notElem` ignored
available = Map.filterWithKey isAvailable info
checkBundleResolver
:: (StackM env m, HasConfig env, HasGHCVariant env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> SnapshotDef
-> m (Either [PackageName] ( Map PackageName (Map FlagName Bool)
, Map PackageName Version))
checkBundleResolver whichCmd stackYaml initOpts bundle sd = do
result <- checkSnapBuildPlan (parent stackYaml) gpds Nothing sd
case result of
BuildPlanCheckOk f -> return $ Right (f, Map.empty)
BuildPlanCheckPartial f e
| needSolver resolver initOpts -> do
warnPartial result
solve f
| omitPackages initOpts -> do
warnPartial result
$logWarn "*** Omitting packages with unsatisfied dependencies"
return $ Left $ failedUserPkgs e
| otherwise -> throwM $ ResolverPartial whichCmd (sdResolverName sd) (show result)
BuildPlanCheckFail _ e _
| omitPackages initOpts -> do
$logWarn $ "*** Resolver compiler mismatch: "
<> sdResolverName sd
$logWarn $ indent $ T.pack $ show result
return $ Left $ failedUserPkgs e
| otherwise -> throwM $ ResolverMismatch whichCmd (sdResolverName sd) (show result)
where
resolver = sdResolver sd
indent t = T.unlines $ fmap (" " <>) (T.lines t)
warnPartial res = do
$logWarn $ "*** Resolver " <> sdResolverName sd
<> " will need external packages: "
$logWarn $ indent $ T.pack $ show res
failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap deNeededBy e))
gpds = Map.elems (fmap snd bundle)
solve flags = do
let cabalDirs = map parent (Map.elems (fmap fst bundle))
srcConstraints = mergeConstraints (gpdPackages gpds) flags
eresult <- solveResolverSpec stackYaml cabalDirs
(sd, srcConstraints, Map.empty)
case eresult of
Right (src, ext) ->
return $ Right (fmap snd (Map.union src ext), fmap fst ext)
Left packages
| omitPackages initOpts, srcpkgs /= []-> do
pkg <- findOneIndependent srcpkgs flags
return $ Left [pkg]
| otherwise -> throwM (SolverGiveUp giveUpMsg)
where srcpkgs = Map.keys bundle `intersect` packages
-- among a list of packages find one on which none among the rest of the
-- packages depend. This package is a good candidate to be removed from
-- the list of packages when there is conflict in dependencies among this
-- set of packages.
findOneIndependent packages flags = do
platform <- view platformL
menv <- getMinimalEnvOverride
(compiler, _) <- getResolverConstraints menv Nothing stackYaml sd
let getGpd pkg = snd (fromJust (Map.lookup pkg bundle))
getFlags pkg = fromJust (Map.lookup pkg flags)
deps pkg = gpdPackageDeps (getGpd pkg) compiler platform
(getFlags pkg)
allDeps = concatMap (Map.keys . deps) packages
isIndependent pkg = pkg `notElem` allDeps
-- prefer to reject packages in deeper directories
path pkg = fst (fromJust (Map.lookup pkg bundle))
pathlen = length . FP.splitPath . toFilePath . path
maxPathlen = maximumBy (compare `on` pathlen)
return $ maxPathlen (filter isIndependent packages)
giveUpMsg = concat
[ " - Use '--omit-packages to exclude conflicting package(s).\n"
, " - Tweak the generated "
, toFilePath stackDotYaml <> " and then run 'stack solver':\n"
, " - Add any missing remote packages.\n"
, " - Add extra dependencies to guide solver.\n"
, " - Update external packages with 'stack update' and try again.\n"
]
needSolver _ InitOpts {useSolver = True} = True
needSolver (ResolverCompiler _) _ = True
needSolver _ _ = False
getRecommendedSnapshots :: Snapshots -> NonEmpty SnapName
getRecommendedSnapshots snapshots =
-- in order - Latest LTS, Latest Nightly, all LTS most recent first
case NonEmpty.nonEmpty ltss of
Just (mostRecent :| older)
-> mostRecent :| (nightly : older)
Nothing
-> nightly :| []
where
ltss = map (uncurry LTS) (IntMap.toDescList $ snapshotsLts snapshots)
nightly = Nightly (snapshotsNightly snapshots)
data InitOpts = InitOpts
{ searchDirs :: ![T.Text]
-- ^ List of sub directories to search for .cabal files
, useSolver :: Bool
-- ^ Use solver to determine required external dependencies
, omitPackages :: Bool
-- ^ Exclude conflicting or incompatible user packages
, forceOverwrite :: Bool
-- ^ Overwrite existing stack.yaml
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
| martin-kolinek/stack | src/Stack/Init.hs | bsd-3-clause | 23,469 | 0 | 21 | 7,961 | 4,604 | 2,342 | 2,262 | 431 | 7 |
module Internal.Texture
( ImageTexture (..)
, TextureAnimation (..)
, unsafeLoadTexture
, loadTexture
, freeTexture
) where
import Graphics.UI.SDL as SDL
import Graphics.UI.SDL.Image
import Graphics.Rendering.OpenGL
import System.IO.Unsafe
data ImageTexture = ImageTexture !TextureObject !GLfloat !GLfloat
deriving (Eq)
data TextureAnimation = TA
{ texture :: ImageTexture
, textureDivs :: (Int, Int)
, frames :: [(Int, Int)]
} deriving (Eq)
unsafeLoadTexture :: FilePath -> ImageTexture
unsafeLoadTexture fileName = unsafePerformIO $ do
putStrLn "WARNING: Unsafe texture loading."
loadTexture fileName
loadTexture :: FilePath -> IO ImageTexture
loadTexture fileName = do
-- load to SDL Surface
srcSurface <- load fileName
let w = surfaceGetWidth srcSurface
h = surfaceGetHeight srcSurface
-- create pow2 SDL Surface
let size = ceilingPow2 $ max w h
sizeGL = fromIntegral size
newSurface <- createRGBSurfaceEndian [] size size 32
True <- setAlpha srcSurface [] 128
True <- blitSurface srcSurface Nothing newSurface Nothing
freeSurface srcSurface
pixels <- surfaceGetPixels newSurface
let pixelData = PixelData RGBA UnsignedByte pixels
-- gen OpenGL Texture
[tex] <- genObjectNames 1
textureBinding Texture2D $= Just tex
textureFilter Texture2D $=! ((Linear', Nothing), Linear')
texImage2D Nothing NoProxy 0 RGBA' (TextureSize2D sizeGL sizeGL) 0 pixelData
return $ ImageTexture tex (w `fdiv` size) (h `fdiv` size)
freeTexture texs = do
let [objs] = map (\(ImageTexture x _ _) -> x) texs
deleteObjectNames [objs]
fdiv x y = (realToFrac x) / (realToFrac y)
ceilingPow2 :: Integral a => a -> a
ceilingPow2 x = head $ dropWhile (< x) [2^n | n <- [0..]]
| c000/PaperPuppet | src/Internal/Texture.hs | bsd-3-clause | 1,732 | 0 | 14 | 322 | 564 | 291 | 273 | 51 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_FUCHSIA_external_semaphore - device extension
--
-- == VK_FUCHSIA_external_semaphore
--
-- [__Name String__]
-- @VK_FUCHSIA_external_semaphore@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 366
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_external_semaphore_capabilities@
--
-- - Requires @VK_KHR_external_semaphore@
--
-- [__Contact__]
--
-- - John Rosasco
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_FUCHSIA_external_semaphore] @rosasco%0A<<Here describe the issue or question you have about the VK_FUCHSIA_external_semaphore extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-03-08
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Craig Stout, Google
--
-- - John Bauman, Google
--
-- - John Rosasco, Google
--
-- == Description
--
-- An application using external memory may wish to synchronize access to
-- that memory using semaphores. This extension enables an application to
-- export semaphore payload to and import semaphore payload from Zircon
-- event handles.
--
-- == New Commands
--
-- - 'getSemaphoreZirconHandleFUCHSIA'
--
-- - 'importSemaphoreZirconHandleFUCHSIA'
--
-- == New Structures
--
-- - 'ImportSemaphoreZirconHandleInfoFUCHSIA'
--
-- - 'SemaphoreGetZirconHandleInfoFUCHSIA'
--
-- == New Enum Constants
--
-- - 'FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME'
--
-- - 'FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits':
--
-- - 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA'
--
-- == Issues
--
-- 1) Does the application need to close the Zircon event handle returned
-- by 'getSemaphoreZirconHandleFUCHSIA'?
--
-- __RESOLVED__: Yes, unless it is passed back in to a driver instance to
-- import the semaphore. A successful get call transfers ownership of the
-- Zircon event handle to the application, and a successful import
-- transfers it back to the driver. Destroying the original semaphore
-- object will not close the Zircon event handle nor remove its reference
-- to the underlying semaphore resource associated with it.
--
-- == Version History
--
-- - Revision 1, 2021-03-08 (John Rosasco)
--
-- - Initial revision
--
-- == See Also
--
-- 'ImportSemaphoreZirconHandleInfoFUCHSIA',
-- 'SemaphoreGetZirconHandleInfoFUCHSIA',
-- 'getSemaphoreZirconHandleFUCHSIA', 'importSemaphoreZirconHandleFUCHSIA'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_FUCHSIA_external_semaphore Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_FUCHSIA_external_semaphore ( getSemaphoreZirconHandleFUCHSIA
, importSemaphoreZirconHandleFUCHSIA
, ImportSemaphoreZirconHandleInfoFUCHSIA(..)
, SemaphoreGetZirconHandleInfoFUCHSIA(..)
, FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION
, pattern FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION
, FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME
, pattern FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME
, Zx_handle_t
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.NamedType ((:::))
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetSemaphoreZirconHandleFUCHSIA))
import Vulkan.Dynamic (DeviceCmds(pVkImportSemaphoreZirconHandleFUCHSIA))
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits (ExternalSemaphoreHandleTypeFlagBits)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Handles (Semaphore)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Extensions.VK_FUCHSIA_imagepipe_surface (Zx_handle_t)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Extensions.VK_FUCHSIA_imagepipe_surface (Zx_handle_t)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetSemaphoreZirconHandleFUCHSIA
:: FunPtr (Ptr Device_T -> Ptr SemaphoreGetZirconHandleInfoFUCHSIA -> Ptr Zx_handle_t -> IO Result) -> Ptr Device_T -> Ptr SemaphoreGetZirconHandleInfoFUCHSIA -> Ptr Zx_handle_t -> IO Result
-- | vkGetSemaphoreZirconHandleFUCHSIA - Get a Zircon event handle for a
-- semaphore
--
-- = Description
--
-- Each call to 'getSemaphoreZirconHandleFUCHSIA' /must/ create a Zircon
-- event handle and transfer ownership of it to the application. To avoid
-- leaking resources, the application /must/ release ownership of the
-- Zircon event handle when it is no longer needed.
--
-- Note
--
-- Ownership can be released in many ways. For example, the application can
-- call zx_handle_close() on the file descriptor, or transfer ownership
-- back to Vulkan by using the file descriptor to import a semaphore
-- payload.
--
-- Exporting a Zircon event handle from a semaphore /may/ have side effects
-- depending on the transference of the specified handle type, as described
-- in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore State>.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_TOO_MANY_OBJECTS'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_external_semaphore VK_FUCHSIA_external_semaphore>,
-- 'Vulkan.Core10.Handles.Device', 'SemaphoreGetZirconHandleInfoFUCHSIA'
getSemaphoreZirconHandleFUCHSIA :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore being
-- exported.
--
-- #VUID-vkGetSemaphoreZirconHandleFUCHSIA-device-parameter# @device@
-- /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @pGetZirconHandleInfo@ is a pointer to a
-- 'SemaphoreGetZirconHandleInfoFUCHSIA' structure containing parameters of
-- the export operation.
--
-- #VUID-vkGetSemaphoreZirconHandleFUCHSIA-pGetZirconHandleInfo-parameter#
-- @pGetZirconHandleInfo@ /must/ be a valid pointer to a valid
-- 'SemaphoreGetZirconHandleInfoFUCHSIA' structure
SemaphoreGetZirconHandleInfoFUCHSIA
-> io (("zirconHandle" ::: Zx_handle_t))
getSemaphoreZirconHandleFUCHSIA device getZirconHandleInfo = liftIO . evalContT $ do
let vkGetSemaphoreZirconHandleFUCHSIAPtr = pVkGetSemaphoreZirconHandleFUCHSIA (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetSemaphoreZirconHandleFUCHSIAPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetSemaphoreZirconHandleFUCHSIA is null" Nothing Nothing
let vkGetSemaphoreZirconHandleFUCHSIA' = mkVkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIAPtr
pGetZirconHandleInfo <- ContT $ withCStruct (getZirconHandleInfo)
pPZirconHandle <- ContT $ bracket (callocBytes @Zx_handle_t 4) free
r <- lift $ traceAroundEvent "vkGetSemaphoreZirconHandleFUCHSIA" (vkGetSemaphoreZirconHandleFUCHSIA' (deviceHandle (device)) pGetZirconHandleInfo (pPZirconHandle))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pZirconHandle <- lift $ peek @Zx_handle_t pPZirconHandle
pure $ (pZirconHandle)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkImportSemaphoreZirconHandleFUCHSIA
:: FunPtr (Ptr Device_T -> Ptr ImportSemaphoreZirconHandleInfoFUCHSIA -> IO Result) -> Ptr Device_T -> Ptr ImportSemaphoreZirconHandleInfoFUCHSIA -> IO Result
-- | vkImportSemaphoreZirconHandleFUCHSIA - Import a semaphore from a Zircon
-- event handle
--
-- = Description
--
-- Importing a semaphore payload from a Zircon event handle transfers
-- ownership of the handle from the application to the Vulkan
-- implementation. The application /must/ not perform any operations on the
-- handle after a successful import.
--
-- Applications /can/ import the same semaphore payload into multiple
-- instances of Vulkan, into the same instance from which it was exported,
-- and multiple times into a given Vulkan instance.
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_INVALID_EXTERNAL_HANDLE'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_external_semaphore VK_FUCHSIA_external_semaphore>,
-- 'Vulkan.Core10.Handles.Device', 'ImportSemaphoreZirconHandleInfoFUCHSIA'
importSemaphoreZirconHandleFUCHSIA :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore.
--
-- #VUID-vkImportSemaphoreZirconHandleFUCHSIA-device-parameter# @device@
-- /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
Device
-> -- | @pImportSemaphoreZirconHandleInfo@ is a pointer to a
-- 'ImportSemaphoreZirconHandleInfoFUCHSIA' structure specifying the
-- semaphore and import parameters.
--
-- #VUID-vkImportSemaphoreZirconHandleFUCHSIA-pImportSemaphoreZirconHandleInfo-parameter#
-- @pImportSemaphoreZirconHandleInfo@ /must/ be a valid pointer to a valid
-- 'ImportSemaphoreZirconHandleInfoFUCHSIA' structure
ImportSemaphoreZirconHandleInfoFUCHSIA
-> io ()
importSemaphoreZirconHandleFUCHSIA device importSemaphoreZirconHandleInfo = liftIO . evalContT $ do
let vkImportSemaphoreZirconHandleFUCHSIAPtr = pVkImportSemaphoreZirconHandleFUCHSIA (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkImportSemaphoreZirconHandleFUCHSIAPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkImportSemaphoreZirconHandleFUCHSIA is null" Nothing Nothing
let vkImportSemaphoreZirconHandleFUCHSIA' = mkVkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIAPtr
pImportSemaphoreZirconHandleInfo <- ContT $ withCStruct (importSemaphoreZirconHandleInfo)
r <- lift $ traceAroundEvent "vkImportSemaphoreZirconHandleFUCHSIA" (vkImportSemaphoreZirconHandleFUCHSIA' (deviceHandle (device)) pImportSemaphoreZirconHandleInfo)
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
-- | VkImportSemaphoreZirconHandleInfoFUCHSIA - Structure specifying Zircon
-- event handle to import to a semaphore
--
-- = Description
--
-- The handle types supported by @handleType@ are:
--
-- +-------------------------------------------------------------------------------------------------------------------+------------------+---------------------+
-- | Handle Type | Transference | Permanence |
-- | | | Supported |
-- +===================================================================================================================+==================+=====================+
-- | 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA' | Reference | Temporary,Permanent |
-- +-------------------------------------------------------------------------------------------------------------------+------------------+---------------------+
--
-- Handle Types Supported by 'ImportSemaphoreZirconHandleInfoFUCHSIA'
--
-- == Valid Usage
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-handleType-04765#
-- @handleType@ /must/ be a value included in the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphore-handletypes-fuchsia Handle Types Supported by >
-- table
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-zirconHandle-04766#
-- @zirconHandle@ /must/ obey any requirements listed for @handleType@
-- in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#external-semaphore-handle-types-compatibility external semaphore handle types compatibility>
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-zirconHandle-04767#
-- @zirconHandle@ /must/ have @ZX_RIGHTS_BASIC@ and @ZX_RIGHTS_SIGNAL@
-- rights
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-semaphoreType-04768#
-- The
-- 'Vulkan.Core12.Promoted_From_VK_KHR_timeline_semaphore.SemaphoreTypeCreateInfo'::@semaphoreType@
-- field /must/ not be
-- 'Vulkan.Core12.Enums.SemaphoreType.SEMAPHORE_TYPE_TIMELINE'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA'
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-pNext-pNext# @pNext@
-- /must/ be @NULL@
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-semaphore-parameter#
-- @semaphore@ /must/ be a valid 'Vulkan.Core10.Handles.Semaphore'
-- handle
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-flags-parameter#
-- @flags@ /must/ be a valid combination of
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlagBits'
-- values
--
-- - #VUID-VkImportSemaphoreZirconHandleInfoFUCHSIA-handleType-parameter#
-- @handleType@ /must/ be a valid
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value
--
-- == Host Synchronization
--
-- - Host access to @semaphore@ /must/ be externally synchronized
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_external_semaphore VK_FUCHSIA_external_semaphore>,
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits',
-- 'Vulkan.Core10.Handles.Semaphore',
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlags',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'importSemaphoreZirconHandleFUCHSIA'
data ImportSemaphoreZirconHandleInfoFUCHSIA = ImportSemaphoreZirconHandleInfoFUCHSIA
{ -- | @semaphore@ is the semaphore into which the payload will be imported.
semaphore :: Semaphore
, -- | @flags@ is a bitmask of
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlagBits'
-- specifying additional parameters for the semaphore payload import
-- operation.
flags :: SemaphoreImportFlags
, -- | @handleType@ is a
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value specifying the type of @zirconHandle@.
handleType :: ExternalSemaphoreHandleTypeFlagBits
, -- | @zirconHandle@ is the external handle to import.
zirconHandle :: Zx_handle_t
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImportSemaphoreZirconHandleInfoFUCHSIA)
#endif
deriving instance Show ImportSemaphoreZirconHandleInfoFUCHSIA
instance ToCStruct ImportSemaphoreZirconHandleInfoFUCHSIA where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImportSemaphoreZirconHandleInfoFUCHSIA{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags)) (flags)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
poke ((p `plusPtr` 32 :: Ptr Zx_handle_t)) (zirconHandle)
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
poke ((p `plusPtr` 32 :: Ptr Zx_handle_t)) (zero)
f
instance FromCStruct ImportSemaphoreZirconHandleInfoFUCHSIA where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
flags <- peek @SemaphoreImportFlags ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
zirconHandle <- peek @Zx_handle_t ((p `plusPtr` 32 :: Ptr Zx_handle_t))
pure $ ImportSemaphoreZirconHandleInfoFUCHSIA
semaphore flags handleType zirconHandle
instance Storable ImportSemaphoreZirconHandleInfoFUCHSIA where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImportSemaphoreZirconHandleInfoFUCHSIA where
zero = ImportSemaphoreZirconHandleInfoFUCHSIA
zero
zero
zero
zero
-- | VkSemaphoreGetZirconHandleInfoFUCHSIA - Structure describing a Zircon
-- event handle semaphore export operation
--
-- = Description
--
-- The properties of the Zircon event handle returned depend on the value
-- of @handleType@. See
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- for a description of the properties of the defined external semaphore
-- handle types.
--
-- == Valid Usage
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04758#
-- @handleType@ /must/ have been included in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore.ExportSemaphoreCreateInfo'::@handleTypes@
-- when @semaphore@’s current payload was created
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-04759#
-- @semaphore@ /must/ not currently have its payload replaced by an
-- imported payload as described below in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>
-- unless that imported payload’s handle type was included in
-- 'Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore_capabilities.ExternalSemaphoreProperties'::@exportFromImportedHandleTypes@
-- for @handleType@
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04760# If
-- @handleType@ refers to a handle type with copy payload transference
-- semantics, as defined below in
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>,
-- there /must/ be no queue waiting on @semaphore@
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04761# If
-- @handleType@ refers to a handle type with copy payload transference
-- semantics, @semaphore@ /must/ be signaled, or have an associated
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-semaphores-signaling semaphore signal operation>
-- pending execution
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-04762#
-- @handleType@ /must/ be defined as a Zircon event handle
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-04763#
-- @semaphore@ /must/ have been created with a
-- 'Vulkan.Core12.Enums.SemaphoreType.SemaphoreType' of
-- 'Vulkan.Core12.Enums.SemaphoreType.SEMAPHORE_TYPE_BINARY'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-sType-sType# @sType@
-- /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA'
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-pNext-pNext# @pNext@
-- /must/ be @NULL@
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-semaphore-parameter#
-- @semaphore@ /must/ be a valid 'Vulkan.Core10.Handles.Semaphore'
-- handle
--
-- - #VUID-VkSemaphoreGetZirconHandleInfoFUCHSIA-handleType-parameter#
-- @handleType@ /must/ be a valid
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_external_semaphore VK_FUCHSIA_external_semaphore>,
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits',
-- 'Vulkan.Core10.Handles.Semaphore',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getSemaphoreZirconHandleFUCHSIA'
data SemaphoreGetZirconHandleInfoFUCHSIA = SemaphoreGetZirconHandleInfoFUCHSIA
{ -- | @semaphore@ is the semaphore from which state will be exported.
semaphore :: Semaphore
, -- | @handleType@ is a
-- 'Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits.ExternalSemaphoreHandleTypeFlagBits'
-- value specifying the type of handle requested.
handleType :: ExternalSemaphoreHandleTypeFlagBits
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (SemaphoreGetZirconHandleInfoFUCHSIA)
#endif
deriving instance Show SemaphoreGetZirconHandleInfoFUCHSIA
instance ToCStruct SemaphoreGetZirconHandleInfoFUCHSIA where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p SemaphoreGetZirconHandleInfoFUCHSIA{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
f
instance FromCStruct SemaphoreGetZirconHandleInfoFUCHSIA where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
pure $ SemaphoreGetZirconHandleInfoFUCHSIA
semaphore handleType
instance Storable SemaphoreGetZirconHandleInfoFUCHSIA where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero SemaphoreGetZirconHandleInfoFUCHSIA where
zero = SemaphoreGetZirconHandleInfoFUCHSIA
zero
zero
type FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION"
pattern FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION :: forall a . Integral a => a
pattern FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION = 1
type FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME = "VK_FUCHSIA_external_semaphore"
-- No documentation found for TopLevel "VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME"
pattern FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME = "VK_FUCHSIA_external_semaphore"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_FUCHSIA_external_semaphore.hs | bsd-3-clause | 27,563 | 0 | 17 | 5,049 | 3,026 | 1,830 | 1,196 | -1 | -1 |
module Data.BEncode.Type
( BEncode(..)
) where
data BEncode a = BInt Integer
| BString a
| BList [BEncode a]
| BDict [(a , BEncode a)]
deriving (Eq, Ord, Read, Show)
instance Functor BEncode where
fmap _ (BInt x) = BInt x
fmap f (BString x) = BString (f x)
fmap f (BList xs) = BList $ map (fmap f) xs
fmap f (BDict xs) = BDict $ map (\(k, v) -> (f k, fmap f v)) xs
| mat8913/bencode-unicode | Data/BEncode/Type.hs | bsd-3-clause | 424 | 0 | 11 | 136 | 214 | 114 | 100 | 12 | 0 |
-- --------------------------------------------------
-- Examples of Functional images
-- --------------------------------------------------
module FunctionalImages
where
import FunctionalImagesBase
import Data.Bits
-- The function to genrate a vertical strip
vstrip :: Image Bool
vstrip (x, y) = abs x < 0.5
-- The function to create a chess board
checker :: Image Bool
checker (x, y) = even $ floor x + floor y
-- | Polar Checkboard
polarChecker :: Int -> Image Bool
polarChecker n = checker . sc . toPolar
where
sc (r,a) = (r,a * fromIntegral n / pi)
-- | alternate concentric Rings
altRings :: Image Bool
altRings = even . floor . dist0
-- | waveDist
wavDist :: Image Frac
wavDist p = (1 + cos (pi * dist0 p)) / 2
-- | sierpinski - An other way to draw a sierpinski triangle
sierpinski :: Image Bool
sierpinski (x , y) = abs ix .|. abs iy == abs ix
where
ix = round x :: Int
iy = round y :: Int
bilerpBRBW :: Image Color
bilerpBRBW = bilerpC black red blue white
blackWhiteIm, blueYellowIm :: Region -> ImageC
blackWhiteIm reg = cond reg blackI whiteI
blueYellowIm reg = cond reg blueI yellowI
coloredPolarChess :: ImageC
coloredPolarChess = lerpI wavDist(blackWhiteIm (polarChecker 10)) (blueYellowIm checker )
-- | Rings that alternate between yellow and blue
ybRings :: ImageC
ybRings = lerpI wavDist blueI yellowI
| hansroland/FunctionalImages | src/FunctionalImages.hs | bsd-3-clause | 1,358 | 0 | 11 | 261 | 387 | 205 | 182 | 27 | 1 |
-- | Generators in Haskell
--
-- We translate the in-order tree traversal example from an old article
-- Generators in Icon, Python, and Scheme, 2004.
--
-- <http://okmij.org/ftp/Scheme/enumerators-callcc.html#Generators>
--
-- using Haskell and delimited continuations rather than call/cc + mutation.
-- The code is shorter, and it even types.
-- To be honest, we actually translate the OCaml code generator.ml
--
-- In this code, we use a single global prompt (that is, ordinary shift0)
-- Generator2.hs shows the need for several prompts.
--
module Control.Generator1 where
import Control.CCExc
import Control.Monad.Trans (liftIO, lift)
import Control.Monad.ST -- for pure tests
import Data.STRef
{-
A sample program Python programmers seem to be proud of: an in-order
traversal of a tree:
>>>> # A recursive generator that generates Tree leaves in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
Given below is the complete implementation in Haskell.
-}
-- | A few preliminaries: define the tree and build a sample tree
--
type Label = Int
data Tree = Leaf | Node Label Tree Tree deriving Show
make_full_tree :: Int -> Tree
make_full_tree depth = loop 1 depth
where
loop label 0 = Leaf
loop label n = Node label (loop (2*label) (pred n)) (loop (2*label+1) (pred n))
tree1 = make_full_tree 3
-- | In Python, `yield' is a keyword. In Haskell, it is a regular function.
-- Furthermore, it is a user-defined function, in one line of code.
-- To get generators there is no need to extend a language.
--
type P m a = PS (Res m a) -- the type of the single prompt (recursive)
newtype Res m a = Res ( (a -> CC (P m a) m ()) -> CC (P m a) m () )
outRes body (Res f) = f body
yield :: Monad m => a -> CC (P m a) m ()
yield v = shift0P ps (\k -> return . Res $ \b -> b v >> k () >>= outRes b)
-- | The enumerator: the for-loop essentially
enumerate iterator body =
pushPrompt ps (iterator >> (return . Res . const $ return ())) >>=
outRes body
-- | The in_order function itself: compare with the Python version
in_order :: (Monad m) => Tree -> CC (P m Label) m ()
in_order Leaf = return ()
in_order (Node label left right) = do
in_order left
yield label
in_order right
-- | Print out the result of the in-order traversal
test_io :: IO ()
test_io = runCC $ enumerate (in_order tree1) (liftIO . print)
-- 4 2 5 1 6 3 7
-- | Or return it as a pure list; the effects are encapsulated
test_st :: [Label]
test_st = runST (do
res <- newSTRef []
let body v = modifySTRef res (v:)
runCC $ enumerate (in_order tree1) (lift . body)
readSTRef res >>= return . reverse)
-- [4,2,5,1,6,3,7]
| suhailshergill/liboleg | Control/Generator1.hs | bsd-3-clause | 2,818 | 2 | 13 | 650 | 640 | 340 | 300 | 34 | 2 |
{-# OPTIONS -fno-warn-incomplete-patterns -optc-DNON_POSIX_SOURCE #-}
-----------------------------------------------------------------------------
--
-- GHC Driver program
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module Main (main) where
-- The official GHC API
import qualified GHC
import GHC ( -- DynFlags(..), HscTarget(..),
-- GhcMode(..), GhcLink(..),
Ghc, GhcMonad(..),
LoadHowMuch(..) )
import CmdLineParser
-- Implementations of the various modes (--show-iface, mkdependHS. etc.)
import LoadIface ( showIface )
import HscMain ( newHscEnv )
import DriverPipeline ( oneShot, compileFile )
import DriverMkDepend ( doMkDependHS )
#ifdef GHCI
import InteractiveUI ( interactiveUI, ghciWelcomeMsg )
#endif
-- Various other random stuff that we need
import Config
import HscTypes
import Packages ( dumpPackages )
import DriverPhases ( Phase(..), isSourceFilename, anyHsc,
startPhase, isHaskellSrcFilename )
import BasicTypes ( failed )
import StaticFlags
import StaticFlagParser
import DynFlags
import ErrUtils
import FastString
import Outputable
import SrcLoc
import Util
import Panic
import MonadUtils ( liftIO )
-- Imports for --abi-hash
import LoadIface ( loadUserInterface )
import Module ( mkModuleName )
import Finder ( findImportedModule, cannotFindInterface )
import TcRnMonad ( initIfaceCheck )
import Binary ( openBinMem, put_, fingerprintBinMem )
-- Standard Haskell libraries
import System.IO
import System.Environment
import System.Exit
import System.FilePath
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
-----------------------------------------------------------------------------
-- ToDo:
-- time commands when run with -v
-- user ways
-- Win32 support: proper signal handling
-- reading the package configuration file is too slow
-- -K<size>
-----------------------------------------------------------------------------
-- GHC's command-line interface
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
GHC.defaultErrorHandler defaultLogAction $ do
-- 1. extract the -B flag from the args
argv0 <- getArgs
let (minusB_args, argv1) = partition ("-B" `isPrefixOf`) argv0
mbMinusB | null minusB_args = Nothing
| otherwise = Just (drop 2 (last minusB_args))
let argv1' = map (mkGeneralLocated "on the commandline") argv1
(argv2, staticFlagWarnings) <- parseStaticFlags argv1'
-- 2. Parse the "mode" flags (--make, --interactive etc.)
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
let flagWarnings = staticFlagWarnings ++ modeFlagWarnings
-- If all we want to do is something like showing the version number
-- then do it now, before we start a GHC session etc. This makes
-- getting basic information much more resilient.
-- In particular, if we wait until later before giving the version
-- number then bootstrapping gets confused, as it tries to find out
-- what version of GHC it's using before package.conf exists, so
-- starting the session fails.
case mode of
Left preStartupMode ->
do case preStartupMode of
ShowSupportedExtensions -> showSupportedExtensions
ShowVersion -> showVersion
ShowNumVersion -> putStrLn cProjectVersion
Print str -> putStrLn str
Right postStartupMode ->
-- start our GHC session
GHC.runGhc mbMinusB $ do
dflags <- GHC.getSessionDynFlags
case postStartupMode of
Left preLoadMode ->
liftIO $ do
case preLoadMode of
ShowInfo -> showInfo dflags
ShowGhcUsage -> showGhcUsage dflags
ShowGhciUsage -> showGhciUsage dflags
PrintWithDynFlags f -> putStrLn (f dflags)
Right postLoadMode ->
main' postLoadMode dflags argv3 flagWarnings
main' :: PostLoadMode -> DynFlags -> [Located String] -> [Located String]
-> Ghc ()
main' postLoadMode dflags0 args flagWarnings = do
-- set the default GhcMode, HscTarget and GhcLink. The HscTarget
-- can be further adjusted on a module by module basis, using only
-- the -fvia-C and -fasm flags. If the default HscTarget is not
-- HscC or HscAsm, -fvia-C and -fasm have no effect.
let dflt_target = hscTarget dflags0
(mode, lang, link)
= case postLoadMode of
DoInteractive -> (CompManager, HscInterpreted, LinkInMemory)
DoEval _ -> (CompManager, HscInterpreted, LinkInMemory)
DoMake -> (CompManager, dflt_target, LinkBinary)
DoMkDependHS -> (MkDepend, dflt_target, LinkBinary)
DoAbiHash -> (OneShot, dflt_target, LinkBinary)
_ -> (OneShot, dflt_target, LinkBinary)
let dflags1 = dflags0{ ghcMode = mode,
hscTarget = lang,
ghcLink = link,
-- leave out hscOutName for now
hscOutName = panic "Main.main:hscOutName not set",
verbosity = case postLoadMode of
DoEval _ -> 0
_other -> 1
}
-- turn on -fimplicit-import-qualified for GHCi now, so that it
-- can be overriden from the command-line
dflags1a | DoInteractive <- postLoadMode = imp_qual_enabled
| DoEval _ <- postLoadMode = imp_qual_enabled
| otherwise = dflags1
where imp_qual_enabled = dflags1 `dopt_set` Opt_ImplicitImportQualified
-- The rest of the arguments are "dynamic"
-- Leftover ones are presumably files
(dflags2, fileish_args, dynamicFlagWarnings) <- GHC.parseDynamicFlags dflags1a args
let flagWarnings' = flagWarnings ++ dynamicFlagWarnings
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
liftIO $ handleFlagWarnings dflags2 flagWarnings'
-- make sure we clean up after ourselves
GHC.defaultCleanupHandler dflags2 $ do
liftIO $ showBanner postLoadMode dflags2
-- we've finished manipulating the DynFlags, update the session
_ <- GHC.setSessionDynFlags dflags2
dflags3 <- GHC.getSessionDynFlags
hsc_env <- GHC.getSession
let
-- To simplify the handling of filepaths, we normalise all filepaths right
-- away - e.g., for win32 platforms, backslashes are converted
-- into forward slashes.
normal_fileish_paths = map (normalise . unLoc) fileish_args
(srcs, objs) = partition_args normal_fileish_paths [] []
-- Note: have v_Ld_inputs maintain the order in which 'objs' occurred on
-- the command-line.
liftIO $ mapM_ (consIORef v_Ld_inputs) (reverse objs)
---------------- Display configuration -----------
when (verbosity dflags3 >= 4) $
liftIO $ dumpPackages dflags3
when (verbosity dflags3 >= 3) $ do
liftIO $ hPutStrLn stderr ("Hsc static flags: " ++ unwords staticFlags)
---------------- Final sanity checking -----------
liftIO $ checkOptions postLoadMode dflags3 srcs objs
---------------- Do the business -----------
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
case postLoadMode of
ShowInterface f -> liftIO $ doShowIface dflags3 f
DoMake -> doMake srcs
DoMkDependHS -> doMkDependHS (map fst srcs)
StopBefore p -> liftIO (oneShot hsc_env p srcs)
DoInteractive -> interactiveUI srcs Nothing
DoEval exprs -> interactiveUI srcs $ Just $ reverse exprs
DoAbiHash -> abiHash srcs
liftIO $ dumpFinalStats dflags3
#ifndef GHCI
interactiveUI :: b -> c -> Ghc ()
interactiveUI _ _ =
ghcError (CmdLineError "not built for interactive use")
#endif
-- -----------------------------------------------------------------------------
-- Splitting arguments into source files and object files. This is where we
-- interpret the -x <suffix> option, and attach a (Maybe Phase) to each source
-- file indicating the phase specified by the -x option in force, if any.
partition_args :: [String] -> [(String, Maybe Phase)] -> [String]
-> ([(String, Maybe Phase)], [String])
partition_args [] srcs objs = (reverse srcs, reverse objs)
partition_args ("-x":suff:args) srcs objs
| "none" <- suff = partition_args args srcs objs
| StopLn <- phase = partition_args args srcs (slurp ++ objs)
| otherwise = partition_args rest (these_srcs ++ srcs) objs
where phase = startPhase suff
(slurp,rest) = break (== "-x") args
these_srcs = zip slurp (repeat (Just phase))
partition_args (arg:args) srcs objs
| looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs
| otherwise = partition_args args srcs (arg:objs)
{-
We split out the object files (.o, .dll) and add them
to v_Ld_inputs for use by the linker.
The following things should be considered compilation manager inputs:
- haskell source files (strings ending in .hs, .lhs or other
haskellish extension),
- module names (not forgetting hierarchical module names),
- and finally we consider everything not containing a '.' to be
a comp manager input, as shorthand for a .hs or .lhs filename.
Everything else is considered to be a linker object, and passed
straight through to the linker.
-}
looks_like_an_input :: String -> Bool
looks_like_an_input m = isSourceFilename m
|| looksLikeModuleName m
|| '.' `notElem` m
-- -----------------------------------------------------------------------------
-- Option sanity checks
-- | Ensure sanity of options.
--
-- Throws 'UsageError' or 'CmdLineError' if not.
checkOptions :: PostLoadMode -> DynFlags -> [(String,Maybe Phase)] -> [String] -> IO ()
-- Final sanity checking before kicking off a compilation (pipeline).
checkOptions mode dflags srcs objs = do
-- Complain about any unknown flags
let unknown_opts = [ f | (f@('-':_), _) <- srcs ]
when (notNull unknown_opts) (unknownFlagsErr unknown_opts)
when (notNull (filter isRTSWay (wayNames dflags))
&& isInterpretiveMode mode) $
hPutStrLn stderr ("Warning: -debug, -threaded and -ticky are ignored by GHCi")
-- -prof and --interactive are not a good combination
when (notNull (filter (not . isRTSWay) (wayNames dflags))
&& isInterpretiveMode mode) $
do ghcError (UsageError
"--interactive can't be used with -prof or -unreg.")
-- -ohi sanity check
if (isJust (outputHi dflags) &&
(isCompManagerMode mode || srcs `lengthExceeds` 1))
then ghcError (UsageError "-ohi can only be used when compiling a single source file")
else do
-- -o sanity checking
if (srcs `lengthExceeds` 1 && isJust (outputFile dflags)
&& not (isLinkMode mode))
then ghcError (UsageError "can't apply -o to multiple source files")
else do
let not_linking = not (isLinkMode mode) || isNoLink (ghcLink dflags)
when (not_linking && not (null objs)) $
hPutStrLn stderr ("Warning: the following files would be used as linker inputs, but linking is not being done: " ++ unwords objs)
-- Check that there are some input files
-- (except in the interactive case)
if null srcs && (null objs || not_linking) && needsInputsMode mode
then ghcError (UsageError "no input files")
else do
-- Verify that output files point somewhere sensible.
verifyOutputFiles dflags
-- Compiler output options
-- called to verify that the output files & directories
-- point somewhere valid.
--
-- The assumption is that the directory portion of these output
-- options will have to exist by the time 'verifyOutputFiles'
-- is invoked.
--
verifyOutputFiles :: DynFlags -> IO ()
verifyOutputFiles dflags = do
-- not -odir: we create the directory for -odir if it doesn't exist (#2278).
let ofile = outputFile dflags
when (isJust ofile) $ do
let fn = fromJust ofile
flg <- doesDirNameExist fn
when (not flg) (nonExistentDir "-o" fn)
let ohi = outputHi dflags
when (isJust ohi) $ do
let hi = fromJust ohi
flg <- doesDirNameExist hi
when (not flg) (nonExistentDir "-ohi" hi)
where
nonExistentDir flg dir =
ghcError (CmdLineError ("error: directory portion of " ++
show dir ++ " does not exist (used with " ++
show flg ++ " option.)"))
-----------------------------------------------------------------------------
-- GHC modes of operation
type Mode = Either PreStartupMode PostStartupMode
type PostStartupMode = Either PreLoadMode PostLoadMode
data PreStartupMode
= ShowVersion -- ghc -V/--version
| ShowNumVersion -- ghc --numeric-version
| ShowSupportedExtensions -- ghc --supported-extensions
| Print String -- ghc --print-foo
showVersionMode, showNumVersionMode, showSupportedExtensionsMode :: Mode
showVersionMode = mkPreStartupMode ShowVersion
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
isShowVersionMode :: Mode -> Bool
isShowVersionMode (Left ShowVersion) = True
isShowVersionMode _ = False
isShowNumVersionMode :: Mode -> Bool
isShowNumVersionMode (Left ShowNumVersion) = True
isShowNumVersionMode _ = False
data PreLoadMode
= ShowGhcUsage -- ghc -?
| ShowGhciUsage -- ghci -?
| ShowInfo -- ghc --info
| PrintWithDynFlags (DynFlags -> String) -- ghc --print-foo
showGhcUsageMode, showGhciUsageMode, showInfoMode :: Mode
showGhcUsageMode = mkPreLoadMode ShowGhcUsage
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
printSetting :: String -> Mode
printSetting k = mkPreLoadMode (PrintWithDynFlags f)
where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
$ lookup k (compilerInfo dflags)
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
isShowGhcUsageMode :: Mode -> Bool
isShowGhcUsageMode (Right (Left ShowGhcUsage)) = True
isShowGhcUsageMode _ = False
isShowGhciUsageMode :: Mode -> Bool
isShowGhciUsageMode (Right (Left ShowGhciUsage)) = True
isShowGhciUsageMode _ = False
data PostLoadMode
= ShowInterface FilePath -- ghc --show-iface
| DoMkDependHS -- ghc -M
| StopBefore Phase -- ghc -E | -C | -S
-- StopBefore StopLn is the default
| DoMake -- ghc --make
| DoInteractive -- ghc --interactive
| DoEval [String] -- ghc -e foo -e bar => DoEval ["bar", "foo"]
| DoAbiHash -- ghc --abi-hash
doMkDependHSMode, doMakeMode, doInteractiveMode, doAbiHashMode :: Mode
doMkDependHSMode = mkPostLoadMode DoMkDependHS
doMakeMode = mkPostLoadMode DoMake
doInteractiveMode = mkPostLoadMode DoInteractive
doAbiHashMode = mkPostLoadMode DoAbiHash
showInterfaceMode :: FilePath -> Mode
showInterfaceMode fp = mkPostLoadMode (ShowInterface fp)
stopBeforeMode :: Phase -> Mode
stopBeforeMode phase = mkPostLoadMode (StopBefore phase)
doEvalMode :: String -> Mode
doEvalMode str = mkPostLoadMode (DoEval [str])
mkPostLoadMode :: PostLoadMode -> Mode
mkPostLoadMode = Right . Right
isDoInteractiveMode :: Mode -> Bool
isDoInteractiveMode (Right (Right DoInteractive)) = True
isDoInteractiveMode _ = False
isStopLnMode :: Mode -> Bool
isStopLnMode (Right (Right (StopBefore StopLn))) = True
isStopLnMode _ = False
isDoMakeMode :: Mode -> Bool
isDoMakeMode (Right (Right DoMake)) = True
isDoMakeMode _ = False
#ifdef GHCI
isInteractiveMode :: PostLoadMode -> Bool
isInteractiveMode DoInteractive = True
isInteractiveMode _ = False
#endif
-- isInterpretiveMode: byte-code compiler involved
isInterpretiveMode :: PostLoadMode -> Bool
isInterpretiveMode DoInteractive = True
isInterpretiveMode (DoEval _) = True
isInterpretiveMode _ = False
needsInputsMode :: PostLoadMode -> Bool
needsInputsMode DoMkDependHS = True
needsInputsMode (StopBefore _) = True
needsInputsMode DoMake = True
needsInputsMode _ = False
-- True if we are going to attempt to link in this mode.
-- (we might not actually link, depending on the GhcLink flag)
isLinkMode :: PostLoadMode -> Bool
isLinkMode (StopBefore StopLn) = True
isLinkMode DoMake = True
isLinkMode DoInteractive = True
isLinkMode (DoEval _) = True
isLinkMode _ = False
isCompManagerMode :: PostLoadMode -> Bool
isCompManagerMode DoMake = True
isCompManagerMode DoInteractive = True
isCompManagerMode (DoEval _) = True
isCompManagerMode _ = False
-- -----------------------------------------------------------------------------
-- Parsing the mode flag
parseModeFlags :: [Located String]
-> IO (Mode,
[Located String],
[Located String])
parseModeFlags args = do
let ((leftover, errs1, warns), (mModeFlag, errs2, flags')) =
runCmdLine (processArgs mode_flags args)
(Nothing, [], [])
mode = case mModeFlag of
Nothing -> doMakeMode
Just (m, _) -> m
errs = errs1 ++ map (mkGeneralLocated "on the commandline") errs2
when (not (null errs)) $ ghcError $ errorsToGhcException errs
return (mode, flags' ++ leftover, warns)
type ModeM = CmdLineP (Maybe (Mode, String), [String], [Located String])
-- mode flags sometimes give rise to new DynFlags (eg. -C, see below)
-- so we collect the new ones and return them.
mode_flags :: [Flag ModeM]
mode_flags =
[ ------- help / version ----------------------------------------------
Flag "?" (PassFlag (setMode showGhcUsageMode))
, Flag "-help" (PassFlag (setMode showGhcUsageMode))
, Flag "V" (PassFlag (setMode showVersionMode))
, Flag "-version" (PassFlag (setMode showVersionMode))
, Flag "-numeric-version" (PassFlag (setMode showNumVersionMode))
, Flag "-info" (PassFlag (setMode showInfoMode))
, Flag "-supported-languages" (PassFlag (setMode showSupportedExtensionsMode))
, Flag "-supported-extensions" (PassFlag (setMode showSupportedExtensionsMode))
] ++
[ Flag k' (PassFlag (setMode (printSetting k)))
| k <- ["Project version",
"Booter version",
"Stage",
"Build platform",
"Host platform",
"Target platform",
"Have interpreter",
"Object splitting supported",
"Have native code generator",
"Support SMP",
"Unregisterised",
"Tables next to code",
"RTS ways",
"Leading underscore",
"Debug on",
"LibDir",
"Global Package DB",
"C compiler flags",
"Gcc Linker flags",
"Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
] ++
------- interfaces ----------------------------------------------------
[ Flag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
"--show-iface"))
------- primary modes ------------------------------------------------
, Flag "c" (PassFlag (\f -> do setMode (stopBeforeMode StopLn) f
addFlag "-no-link" f))
, Flag "M" (PassFlag (setMode doMkDependHSMode))
, Flag "E" (PassFlag (setMode (stopBeforeMode anyHsc)))
, Flag "C" (PassFlag (\f -> do setMode (stopBeforeMode HCc) f
addFlag "-fvia-C" f))
, Flag "S" (PassFlag (setMode (stopBeforeMode As)))
, Flag "-make" (PassFlag (setMode doMakeMode))
, Flag "-interactive" (PassFlag (setMode doInteractiveMode))
, Flag "-abi-hash" (PassFlag (setMode doAbiHashMode))
, Flag "e" (SepArg (\s -> setMode (doEvalMode s) "-e"))
]
setMode :: Mode -> String -> EwM ModeM ()
setMode newMode newFlag = liftEwM $ do
(mModeFlag, errs, flags') <- getCmdLineState
let (modeFlag', errs') =
case mModeFlag of
Nothing -> ((newMode, newFlag), errs)
Just (oldMode, oldFlag) ->
case (oldMode, newMode) of
-- -c/--make are allowed together, and mean --make -no-link
_ | isStopLnMode oldMode && isDoMakeMode newMode
|| isStopLnMode newMode && isDoMakeMode oldMode ->
((doMakeMode, "--make"), [])
-- If we have both --help and --interactive then we
-- want showGhciUsage
_ | isShowGhcUsageMode oldMode &&
isDoInteractiveMode newMode ->
((showGhciUsageMode, oldFlag), [])
| isShowGhcUsageMode newMode &&
isDoInteractiveMode oldMode ->
((showGhciUsageMode, newFlag), [])
-- Otherwise, --help/--version/--numeric-version always win
| isDominantFlag oldMode -> ((oldMode, oldFlag), [])
| isDominantFlag newMode -> ((newMode, newFlag), [])
-- We need to accumulate eval flags like "-e foo -e bar"
(Right (Right (DoEval esOld)),
Right (Right (DoEval [eNew]))) ->
((Right (Right (DoEval (eNew : esOld))), oldFlag),
errs)
-- Saying e.g. --interactive --interactive is OK
_ | oldFlag == newFlag -> ((oldMode, oldFlag), errs)
-- Otherwise, complain
_ -> let err = flagMismatchErr oldFlag newFlag
in ((oldMode, oldFlag), err : errs)
putCmdLineState (Just modeFlag', errs', flags')
where isDominantFlag f = isShowGhcUsageMode f ||
isShowGhciUsageMode f ||
isShowVersionMode f ||
isShowNumVersionMode f
flagMismatchErr :: String -> String -> String
flagMismatchErr oldFlag newFlag
= "cannot use `" ++ oldFlag ++ "' with `" ++ newFlag ++ "'"
addFlag :: String -> String -> EwM ModeM ()
addFlag s flag = liftEwM $ do
(m, e, flags') <- getCmdLineState
putCmdLineState (m, e, mkGeneralLocated loc s : flags')
where loc = "addFlag by " ++ flag ++ " on the commandline"
-- ----------------------------------------------------------------------------
-- Run --make mode
doMake :: [(String,Maybe Phase)] -> Ghc ()
doMake srcs = do
let (hs_srcs, non_hs_srcs) = partition haskellish srcs
haskellish (f,Nothing) =
looksLikeModuleName f || isHaskellSrcFilename f || '.' `notElem` f
haskellish (_,Just phase) =
phase `notElem` [As, Cc, Cobjc, Cobjcpp, CmmCpp, Cmm, StopLn]
hsc_env <- GHC.getSession
-- if we have no haskell sources from which to do a dependency
-- analysis, then just do one-shot compilation and/or linking.
-- This means that "ghc Foo.o Bar.o -o baz" links the program as
-- we expect.
if (null hs_srcs)
then liftIO (oneShot hsc_env StopLn srcs)
else do
o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x)
non_hs_srcs
liftIO $ mapM_ (consIORef v_Ld_inputs) (reverse o_files)
targets <- mapM (uncurry GHC.guessTarget) hs_srcs
GHC.setTargets targets
ok_flag <- GHC.load LoadAllTargets
when (failed ok_flag) (liftIO $ exitWith (ExitFailure 1))
return ()
-- ---------------------------------------------------------------------------
-- --show-iface mode
doShowIface :: DynFlags -> FilePath -> IO ()
doShowIface dflags file = do
hsc_env <- newHscEnv dflags
showIface hsc_env file
-- ---------------------------------------------------------------------------
-- Various banners and verbosity output.
showBanner :: PostLoadMode -> DynFlags -> IO ()
showBanner _postLoadMode dflags = do
let verb = verbosity dflags
#ifdef GHCI
-- Show the GHCi banner
when (isInteractiveMode _postLoadMode && verb >= 1) $ putStrLn ghciWelcomeMsg
#endif
-- Display details of the configuration in verbose mode
when (verb >= 2) $
do hPutStr stderr "Glasgow Haskell Compiler, Version "
hPutStr stderr cProjectVersion
hPutStr stderr ", stage "
hPutStr stderr cStage
hPutStr stderr " booted by GHC version "
hPutStrLn stderr cBooterVersion
-- We print out a Read-friendly string, but a prettier one than the
-- Show instance gives us
showInfo :: DynFlags -> IO ()
showInfo dflags = do
let sq x = " [" ++ x ++ "\n ]"
putStrLn $ sq $ intercalate "\n ," $ map show $ compilerInfo dflags
showSupportedExtensions :: IO ()
showSupportedExtensions = mapM_ putStrLn supportedLanguagesAndExtensions
showVersion :: IO ()
showVersion = putStrLn (cProjectName ++ ", version " ++ cProjectVersion)
showGhcUsage :: DynFlags -> IO ()
showGhcUsage = showUsage False
showGhciUsage :: DynFlags -> IO ()
showGhciUsage = showUsage True
showUsage :: Bool -> DynFlags -> IO ()
showUsage ghci dflags = do
let usage_path = if ghci then ghciUsagePath dflags
else ghcUsagePath dflags
usage <- readFile usage_path
dump usage
where
dump "" = return ()
dump ('$':'$':s) = putStr progName >> dump s
dump (c:s) = putChar c >> dump s
dumpFinalStats :: DynFlags -> IO ()
dumpFinalStats dflags =
when (dopt Opt_D_faststring_stats dflags) $ dumpFastStringStats dflags
dumpFastStringStats :: DynFlags -> IO ()
dumpFastStringStats dflags = do
buckets <- getFastStringTable
let (entries, longest, is_z, has_z) = countFS 0 0 0 0 buckets
msg = text "FastString stats:" $$
nest 4 (vcat [text "size: " <+> int (length buckets),
text "entries: " <+> int entries,
text "longest chain: " <+> int longest,
text "z-encoded: " <+> (is_z `pcntOf` entries),
text "has z-encoding: " <+> (has_z `pcntOf` entries)
])
-- we usually get more "has z-encoding" than "z-encoded", because
-- when we z-encode a string it might hash to the exact same string,
-- which will is not counted as "z-encoded". Only strings whose
-- Z-encoding is different from the original string are counted in
-- the "z-encoded" total.
putMsg dflags msg
where
x `pcntOf` y = int ((x * 100) `quot` y) <> char '%'
countFS :: Int -> Int -> Int -> Int -> [[FastString]] -> (Int, Int, Int, Int)
countFS entries longest is_z has_z [] = (entries, longest, is_z, has_z)
countFS entries longest is_z has_z (b:bs) =
let
len = length b
longest' = max len longest
entries' = entries + len
is_zs = length (filter isZEncoded b)
has_zs = length (filter hasZEncoding b)
in
countFS entries' longest' (is_z + is_zs) (has_z + has_zs) bs
-- -----------------------------------------------------------------------------
-- ABI hash support
{-
ghc --abi-hash Data.Foo System.Bar
Generates a combined hash of the ABI for modules Data.Foo and
System.Bar. The modules must already be compiled, and appropriate -i
options may be necessary in order to find the .hi files.
This is used by Cabal for generating the InstalledPackageId for a
package. The InstalledPackageId must change when the visible ABI of
the package chagnes, so during registration Cabal calls ghc --abi-hash
to get a hash of the package's ABI.
-}
abiHash :: [(String, Maybe Phase)] -> Ghc ()
abiHash strs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
liftIO $ do
let find_it str = do
let modname = mkModuleName str
r <- findImportedModule hsc_env modname Nothing
case r of
Found _ m -> return m
_error -> ghcError $ CmdLineError $ showSDoc $
cannotFindInterface dflags modname r
mods <- mapM find_it (map fst strs)
let get_iface modl = loadUserInterface False (text "abiHash") modl
ifaces <- initIfaceCheck hsc_env $ mapM get_iface mods
bh <- openBinMem (3*1024) -- just less than a block
put_ bh opt_HiVersion
-- package hashes change when the compiler version changes (for now)
-- see #5328
mapM_ (put_ bh . mi_mod_hash) ifaces
f <- fingerprintBinMem bh
putStrLn (showSDoc (ppr f))
-- -----------------------------------------------------------------------------
-- Util
unknownFlagsErr :: [String] -> a
unknownFlagsErr fs = ghcError (UsageError ("unrecognised flags: " ++ unwords fs))
| mcmaniac/ghc | ghc/Main.hs | bsd-3-clause | 29,902 | 0 | 27 | 8,294 | 6,414 | 3,314 | 3,100 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, GADTs, RecordWildCards, TypeFamilies, TupleSections #-}
module Llvm.Pass.Visualization where
import Data.Maybe
import qualified Data.Set as Ds
import qualified Data.Map as Dm
import qualified Data.List as L
import qualified Compiler.Hoopl as H
import Compiler.Hoopl
import Llvm.Hir.Data
import Llvm.Hir.Composer
import Llvm.Hir.Cast
import Llvm.Hir.Internalization
import Llvm.Query.HirCxt
import Llvm.Query.Conversion
import Llvm.Query.Type
import Llvm.Hir.Print
import Control.Monad (liftM,foldM, mapM)
import Llvm.Hir.DataLayoutMetrics
{-
This pass inserts code to printout the operands and results of store
and and load instructions at runtime. It should be the last pass
before the native code generation, any tranformation running after
this will skew the printout. The pass currently support visualizing
nodes that are open on both ends. It can be exanded to supporting
nodes of other shapes if the needs arise.
-}
data VisualPlugin dlm a = VisualPlugin {
dataLayoutMetrics :: dlm
-- the prefix added before each visualization string, this might be
-- needed to avoid naming collision.
, visPrefix :: Maybe String
-- the functions whose instructions should be visualized.
-- If the set does not exist, all functions are visualized
, includedFunctions :: Maybe (Ds.Set GlobalId)
, visFunctions :: [FunctionDeclare]
, captureCinsts :: Cinst -> Ds.Set String -> Ds.Set String
, visNodeOO :: TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O]
}
sampleVisualPlugin :: DataLayoutMetrics dlm => dlm -> VisualPlugin dlm a
sampleVisualPlugin dlm =
VisualPlugin { dataLayoutMetrics = dlm
, visPrefix = Just ".visual_"
, includedFunctions = Nothing
, visFunctions = []
, captureCinsts = \comp f -> case comp of
I_store{..} -> Ds.insert (render $ printIr comp) f
I_load{..} -> Ds.insert (render $ printIr comp) f
I_getelementptr{..} -> Ds.insert (render $ printIr comp) f
_ -> f
, visNodeOO = \te mp node -> case node of
(Cnode cinst _) -> case Dm.lookup (render $ printIr cinst) mp of
Nothing -> [Comment $ Cstring $ render $ printIr (typeof te cinst), node]
Just x ->
case cinst of
I_store{..} -> [Comment $ Cstring $ render $ printIr x, node]
I_load{..} -> [Comment $ Cstring $ render $ printIr (typeof te cinst), node]
I_getelementptr{..} -> [Comment $ Cstring $ render $ printIr (typeof te cinst), node]
_ -> [node]
}
type Visualized = Ds.Set String
emptyVisualized :: Visualized
emptyVisualized = Ds.empty
visLattice :: H.DataflowLattice Visualized
visLattice = H.DataflowLattice
{ H.fact_name = "Visualized Instructions"
, H.fact_bot = emptyVisualized
, H.fact_join = add
}
where add _ (H.OldFact old) (H.NewFact new) = (ch, j)
where
j = Ds.union old new
ch = H.changeIf (j /= old)
bwdScan :: H.FuelMonad m => (Cinst -> Ds.Set String -> Ds.Set String) -> H.BwdPass m (Node a) Visualized
bwdScan collectString =
let bwdTran :: (Node a) e x -> H.Fact x Visualized -> Visualized
bwdTran n@(Tnode _ _) f = let bs = H.successors n
in foldl (\p l -> p `Ds.union` (fromMaybe emptyVisualized $ H.lookupFact l f))
emptyVisualized bs
bwdTran (Lnode _) f = f
bwdTran (Pnode _ _) f = f
bwdTran (Mnode _ _) f = f
bwdTran (Comment _) f = f
bwdTran (Enode _ _) f = f
bwdTran n@(Cnode comp _) f = collectString comp f
in H.BwdPass { H.bp_lattice = visLattice
, H.bp_transfer = H.mkBTransfer bwdTran
, H.bp_rewrite = H.noBwdRewrite
}
scanDefine :: (DataLayoutMetrics dlm, CheckpointMonad m, FuelMonad m) => VisualPlugin dlm a -> TlDefine a -> m Visualized
scanDefine visualPlugin (TlDefine fn entry graph) =
do { (_, a, b) <- H.analyzeAndRewriteBwd (bwdScan (captureCinsts visualPlugin)) (H.JustC [entry]) graph H.mapEmpty
; return (fromMaybe emptyVisualized (H.lookupFact entry a))
}
scanModule :: (DataLayoutMetrics dlm, CheckpointMonad m, FuelMonad m) => VisualPlugin dlm a -> Module a -> m (Ds.Set String)
scanModule visPlugin (Module l) =
foldM (\p x -> case x of
ToplevelDefine def@(TlDefine fn _ _) ->
do { fct <- scanDefine visPlugin def
; return (Ds.union fct p)
}
_ -> return p
) Ds.empty l
{- rewrite this with foldBlock -}
rwBlockCC :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> H.Block (Node a) C C -> H.Block (Node a) C C
rwBlockCC rwNodeOO te mp blk = let (f, m, l) = blockSplit blk
middles = blockToList m
in blockJoin f (blockFromList $ concat $ fmap (rwNodeOO te mp) middles) l
rwBlockCO :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> H.Block (Node a) C O -> H.Block (Node a) C O
rwBlockCO rwNodeOO te mp blk = let (f, m) = blockSplitHead blk
middles = blockToList m
in blockJoinHead f (blockFromList $ concat $ fmap (rwNodeOO te mp) middles)
rwBlockOO :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> H.Block (Node a) O O -> H.Block (Node a) O O
rwBlockOO rwNodeOO te mp blk = let middles = blockToList blk
in blockFromList $ concat $ fmap (rwNodeOO te mp) middles
rwBlockOC :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> H.Block (Node a) O C -> H.Block (Node a) O C
rwBlockOC rwNodeOO te mp blk = let (m,l) = blockSplitTail blk
middles = blockToList m
in blockJoinTail (blockFromList $ concat $ fmap (rwNodeOO te mp) middles) l
rwBlock :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> H.Block (Node a) e x -> H.Block (Node a) e x
rwBlock rwNodeOO te mp blk = case blk of
BlockCO _ _ -> rwBlockCO rwNodeOO te mp blk
BlockCC _ _ _ -> rwBlockCC rwNodeOO te mp blk
BlockOC _ _ -> rwBlockOC rwNodeOO te mp blk
BNil -> blk
BMiddle _ -> rwBlockOO rwNodeOO te mp blk
BCat _ _ -> rwBlockOO rwNodeOO te mp blk
BSnoc _ _ -> rwBlockOO rwNodeOO te mp blk
BCons _ _ -> rwBlockOO rwNodeOO te mp blk
rwDefine :: (TypeEnv -> Dm.Map String Const -> (Node a) O O -> [(Node a) O O])
-> TypeEnv -> Dm.Map String Const -> TlDefine a -> TlDefine a
rwDefine rwNodeOO te gmp (TlDefine fn entry graph) =
let graph0 = mapGraphBlocks (rwBlock rwNodeOO te gmp) graph
in TlDefine fn entry graph0
{- this is more correct, because we inherit the DataLayoutMetrics of the input module in the output module -}
rwModule :: DataLayoutMetrics dlm => VisualPlugin dlm a -> Module a -> Ds.Set String -> Module a
rwModule visPlugin m@(Module l) duM =
let (globals, duC) = stringnize duM
irCxt = irCxtOfModule m
in Module $ globals
++ (fmap (ToplevelDeclare . TlDeclare) (visFunctions visPlugin))
++ (fmap (\x -> case x of
ToplevelDefine def@(TlDefine fn _ _) ->
if (maybe True (Ds.member (fi_fun_name fn)) (includedFunctions visPlugin))
then ToplevelDefine (rwDefine (visNodeOO visPlugin) (typeEnv $ globalCxt irCxt) duC def)
else x
_ -> x
) l)
stringnize :: Ds.Set String -> ([Toplevel a], Dm.Map String Const)
stringnize mp =
let (kvs, tpl) = runSimpleLlvmGlobalGen ".visual_" 0
(mapM (\c -> do { (DefAndRef _ (T _ c0)) <- internalize c
; return (c, c0)
}) (Ds.toList mp))
in (Dm.elems $ Dm.map llvmDef tpl, Dm.fromList kvs)
visualize :: DataLayoutMetrics dlm => VisualPlugin dlm a -> Module a -> Module a
visualize visPlugin m =
let mp = runSimpleUniqueMonad $ runWithFuel H.infiniteFuel ((scanModule visPlugin m)::H.SimpleFuelMonad (Ds.Set String))
in rwModule visPlugin m mp
| sanjoy/hLLVM | src/Llvm/Pass/Visualization.hs | bsd-3-clause | 8,625 | 0 | 21 | 2,572 | 2,994 | 1,518 | 1,476 | 142 | 8 |
module Main (main) where
import Control.Arrow
import Crypto.Random
import Data.Ratio
import Crypto.Ed25519.Pure
import Text.Read
import Data.Thyme.Clock
import System.IO
import System.FilePath
import qualified Data.Yaml as Y
import qualified Data.Set as Set
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Juno.Types
nodes :: [NodeID]
nodes = iterate (\n@(NodeID h p _) -> n {_port = p + 1, _fullAddr = "tcp://" ++ h ++ ":" ++ show (p+1)}) (NodeID "127.0.0.1" 10000 "tcp://127.0.0.1:10000")
makeKeys :: CryptoRandomGen g => Int -> g -> [(PrivateKey,PublicKey)]
makeKeys 0 _ = []
makeKeys n g = case generateKeyPair g of
Left err -> error $ show err
Right (p,priv,g') -> (p,priv) : makeKeys (n-1) g'
keyMaps :: [(PrivateKey,PublicKey)] -> (Map NodeID PrivateKey, Map NodeID PublicKey)
keyMaps ls = (Map.fromList $ zip nodes (fst <$> ls), Map.fromList $ zip nodes (snd <$> ls))
main :: IO ()
main = do
putStrLn "Number of cluster nodes?"
hFlush stdout
mn <- fmap readMaybe getLine
putStrLn "Number of client nodes?"
hFlush stdout
cn <- fmap readMaybe getLine
putStrLn "Enable logging for Followers (True/False)?"
hFlush stdout
debugFollower <- fmap readMaybe getLine
case (mn,cn,debugFollower) of
(Just n,Just c,Just df)-> do
g <- newGenIO :: IO SystemRandom
keyMaps' <- return $! keyMaps $ makeKeys (n+c) g
clientIds <- return $ take c $ drop n nodes
let isAClient nid _ = Set.member nid (Set.fromList clientIds)
let isNotAClient nid _ = not $ Set.member nid (Set.fromList clientIds)
clusterKeyMaps <- return $ (Map.filterWithKey isNotAClient *** Map.filterWithKey isNotAClient) keyMaps'
clientKeyMaps <- return $ (Map.filterWithKey isAClient *** Map.filterWithKey isAClient) keyMaps'
clusterConfs <- return (createClusterConfig df clusterKeyMaps (snd clientKeyMaps) <$> take n nodes)
clientConfs <- return (createClientConfig df (snd clusterKeyMaps) clientKeyMaps <$> clientIds)
mapM_ (\c' -> Y.encodeFile ("conf" </> show (_port $ _nodeId c') ++ "-cluster.yaml") c') clusterConfs
mapM_ (\c' -> Y.encodeFile ("conf" </> show (_port $ _nodeId c') ++ "-client.yaml") c') clientConfs
_ -> putStrLn "Failed to read either input into a number, please try again"
createClusterConfig :: Bool -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> Map NodeID PublicKey -> NodeID -> Config
createClusterConfig debugFollower (privMap, pubMap) clientPubMap nid = Config
{ _otherNodes = Set.delete nid $ Map.keysSet pubMap
, _nodeId = nid
, _publicKeys = pubMap
, _clientPublicKeys = Map.union pubMap clientPubMap -- NOTE: [2016 04 26] all nodes are client (support API signing)
, _myPrivateKey = privMap Map.! nid
, _myPublicKey = pubMap Map.! nid
, _electionTimeoutRange = (3000000,6000000)
, _heartbeatTimeout = 1500000 -- seems like a while...
, _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms
, _enableDebug = True
, _clientTimeoutLimit = 50000
, _dontDebugFollower = not debugFollower
, _apiPort = 8000
}
createClientConfig :: Bool -> Map NodeID PublicKey -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> NodeID -> Config
createClientConfig debugFollower clusterPubMap (privMap, pubMap) nid = Config
{ _otherNodes = Map.keysSet clusterPubMap
, _nodeId = nid
, _publicKeys = clusterPubMap
, _clientPublicKeys = pubMap
, _myPrivateKey = privMap Map.! nid
, _myPublicKey = pubMap Map.! nid
, _electionTimeoutRange = (3000000,6000000)
, _heartbeatTimeout = 1500000
, _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms
, _enableDebug = False
, _clientTimeoutLimit = 50000
, _dontDebugFollower = not debugFollower
, _apiPort = 8000
}
| buckie/juno | executables/GenerateConfigFiles.hs | bsd-3-clause | 3,927 | 0 | 22 | 881 | 1,237 | 652 | 585 | 78 | 2 |
{-# LINE 1 "Data.Type.Bool.hs" #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE TypeFamilies, TypeOperators, DataKinds, NoImplicitPrelude,
PolyKinds #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Type.Bool
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : not portable
--
-- Basic operations on type-level Booleans.
--
-- @since 4.7.0.0
-----------------------------------------------------------------------------
module Data.Type.Bool (
If, type (&&), type (||), Not
) where
import Data.Bool
-- This needs to be in base because (&&) is used in Data.Type.Equality.
-- The other functions do not need to be in base, but seemed to be appropriate
-- here.
-- | Type-level "If". @If True a b@ ==> @a@; @If False a b@ ==> @b@
type family If cond tru fls where
If 'True tru fls = tru
If 'False tru fls = fls
-- | Type-level "and"
type family a && b where
'False && a = 'False
'True && a = a
a && 'False = 'False
a && 'True = a
a && a = a
infixr 3 &&
-- | Type-level "or"
type family a || b where
'False || a = a
'True || a = 'True
a || 'False = a
a || 'True = 'True
a || a = a
infixr 2 ||
-- | Type-level "not"
type family Not a where
Not 'False = 'True
Not 'True = 'False
| phischu/fragnix | builtins/base/Data.Type.Bool.hs | bsd-3-clause | 1,455 | 0 | 7 | 371 | 277 | 164 | 113 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Graph.VC.Input where
-- $Id$
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
import SAT.Types
data Input = Input
{ formel :: Formel
, anzeige_groesse :: Int
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Input])
i0 :: Input
i0 = Input { formel = read "(x || y || z) && (! x || y || !z )"
, anzeige_groesse = 6
}
-- local variables:
-- mode: haskell
-- end:
| florianpilz/autotool | src/Graph/VC/Input.hs | gpl-2.0 | 476 | 7 | 9 | 123 | 117 | 71 | 46 | 14 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>شكل معالج | انطلق امتداد</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>محتوى</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>الفهرس</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>بحث</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>المفضلة</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/formhandler/src/main/javahelp/org/zaproxy/zap/extension/formhandler/resources/help_ar_SA/helpset_ar_SA.hs | apache-2.0 | 1,003 | 90 | 61 | 160 | 398 | 203 | 195 | -1 | -1 |
module HOCon where
import CLaSH.Prelude
topEntity :: Vec 8 Int -> Vec 8 (Maybe Int)
topEntity = map Just
| christiaanb/clash-compiler | tests/shouldwork/Vector/HOCon.hs | bsd-2-clause | 107 | 0 | 8 | 21 | 42 | 22 | 20 | -1 | -1 |
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.DataType
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling DataType.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.DataType (
DataType(..), marshalDataType, unmarshalDataType,
DataTypeType(..), marshalDataTypeType, unmarshalDataTypeType
) where
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
-- basically table 3.2 (pixel data type parameter) plus a few additions
data DataType =
UnsignedByte
| Byte
| UnsignedShort
| Short
| UnsignedInt
| Int
| HalfFloat
| Float
| UnsignedByte332
| UnsignedByte233Rev
| UnsignedShort565
| UnsignedShort565Rev
| UnsignedShort4444
| UnsignedShort4444Rev
| UnsignedShort5551
| UnsignedShort1555Rev
| UnsignedInt8888
| UnsignedInt8888Rev
| UnsignedInt1010102
| UnsignedInt2101010Rev
| UnsignedInt248
| UnsignedInt10f11f11fRev
| UnsignedInt5999Rev
| Float32UnsignedInt248Rev
| Bitmap -- pixel data, deprecated in 3.1
| UnsignedShort88 -- MESA_ycbcr_texture/APPLE_ycbcr_422
| UnsignedShort88Rev -- MESA_ycbcr_texture/APPLE_ycbcr_422
| Double -- vertex arrays (EXT_vertex_array, now core)
| TwoBytes -- CallLists
| ThreeBytes -- CallLists
| FourBytes -- CallLists
deriving ( Eq, Ord, Show )
marshalDataType :: DataType -> GLenum
marshalDataType x = case x of
UnsignedByte -> gl_UNSIGNED_BYTE
Byte -> gl_BYTE
UnsignedShort -> gl_UNSIGNED_SHORT
Short -> gl_SHORT
UnsignedInt -> gl_UNSIGNED_INT
Int -> gl_INT
HalfFloat -> gl_HALF_FLOAT
Float -> gl_FLOAT
UnsignedByte332 -> gl_UNSIGNED_BYTE_3_3_2
UnsignedByte233Rev -> gl_UNSIGNED_BYTE_2_3_3_REV
UnsignedShort565 -> gl_UNSIGNED_SHORT_5_6_5
UnsignedShort565Rev -> gl_UNSIGNED_SHORT_5_6_5_REV
UnsignedShort4444 -> gl_UNSIGNED_SHORT_4_4_4_4
UnsignedShort4444Rev -> gl_UNSIGNED_SHORT_4_4_4_4_REV
UnsignedShort5551 -> gl_UNSIGNED_SHORT_5_5_5_1
UnsignedShort1555Rev -> gl_UNSIGNED_SHORT_1_5_5_5_REV
UnsignedInt8888 -> gl_UNSIGNED_INT_8_8_8_8
UnsignedInt8888Rev -> gl_UNSIGNED_INT_8_8_8_8_REV
UnsignedInt1010102 -> gl_UNSIGNED_INT_10_10_10_2
UnsignedInt2101010Rev -> gl_UNSIGNED_INT_2_10_10_10_REV
UnsignedInt248 -> gl_UNSIGNED_INT_24_8
UnsignedInt10f11f11fRev -> gl_UNSIGNED_INT_10F_11F_11F_REV
UnsignedInt5999Rev -> gl_UNSIGNED_INT_5_9_9_9_REV
Float32UnsignedInt248Rev -> gl_FLOAT_32_UNSIGNED_INT_24_8_REV
Bitmap -> gl_BITMAP
-- TODO: Use UNSIGNED_SHORT_8_8_APPLE from APPLE_ycbcr_422 extension
UnsignedShort88 -> 0x85ba
-- TODO: Use UNSIGNED_SHORT_8_8_REV_APPLE from APPLE_ycbcr_422 extension
UnsignedShort88Rev -> 0x85bb
Double -> gl_DOUBLE
TwoBytes -> gl_2_BYTES
ThreeBytes -> gl_3_BYTES
FourBytes -> gl_4_BYTES
unmarshalDataType :: GLenum -> DataType
unmarshalDataType x
| x == gl_UNSIGNED_BYTE = UnsignedByte
| x == gl_BYTE = Byte
| x == gl_UNSIGNED_SHORT = UnsignedShort
| x == gl_SHORT = Short
| x == gl_UNSIGNED_INT = UnsignedInt
| x == gl_INT = Int
| x == gl_HALF_FLOAT = HalfFloat
| x == gl_FLOAT = Float
| x == gl_UNSIGNED_BYTE_3_3_2 = UnsignedByte332
| x == gl_UNSIGNED_BYTE_2_3_3_REV = UnsignedByte233Rev
| x == gl_UNSIGNED_SHORT_5_6_5 = UnsignedShort565
| x == gl_UNSIGNED_SHORT_5_6_5_REV = UnsignedShort565Rev
| x == gl_UNSIGNED_SHORT_4_4_4_4 = UnsignedShort4444
| x == gl_UNSIGNED_SHORT_4_4_4_4_REV = UnsignedShort4444Rev
| x == gl_UNSIGNED_SHORT_5_5_5_1 = UnsignedShort5551
| x == gl_UNSIGNED_SHORT_1_5_5_5_REV = UnsignedShort1555Rev
| x == gl_UNSIGNED_INT_8_8_8_8 = UnsignedInt8888
| x == gl_UNSIGNED_INT_8_8_8_8_REV = UnsignedInt8888Rev
| x == gl_UNSIGNED_INT_10_10_10_2 = UnsignedInt1010102
| x == gl_UNSIGNED_INT_2_10_10_10_REV = UnsignedInt2101010Rev
| x == gl_UNSIGNED_INT_24_8 = UnsignedInt248
| x == gl_UNSIGNED_INT_10F_11F_11F_REV = UnsignedInt10f11f11fRev
| x == gl_UNSIGNED_INT_5_9_9_9_REV = UnsignedInt5999Rev
| x == gl_FLOAT_32_UNSIGNED_INT_24_8_REV = Float32UnsignedInt248Rev
| x == gl_BITMAP = Bitmap
-- TODO: Use UNSIGNED_SHORT_8_8_APPLE from APPLE_ycbcr_422 extension
| x == 0x85ba = UnsignedShort88
-- TODO: Use UNSIGNED_SHORT_8_8_REV_APPLE from APPLE_ycbcr_422 extension
| x == 0x85bb = UnsignedShort88Rev
| x == gl_DOUBLE = Double
| x == gl_2_BYTES = TwoBytes
| x == gl_3_BYTES = ThreeBytes
| x == gl_4_BYTES = FourBytes
| otherwise = error ("unmarshalDataType: illegal value " ++ show x)
data DataTypeType
= TNone
| TSignedNormalized
| TUnsignedNormalized
| TFloat
| TInt
| TUnsignedInt
marshalDataTypeType :: DataTypeType -> GLenum
marshalDataTypeType x = case x of
TNone -> gl_NONE
TSignedNormalized -> gl_SIGNED_NORMALIZED
TUnsignedNormalized -> gl_UNSIGNED_NORMALIZED
TFloat -> gl_FLOAT
TInt -> gl_INT
TUnsignedInt -> gl_UNSIGNED_INT
unmarshalDataTypeType :: GLenum -> DataTypeType
unmarshalDataTypeType x
| x == gl_NONE = TNone
| x == gl_SIGNED_NORMALIZED = TSignedNormalized
| x == gl_UNSIGNED_NORMALIZED = TUnsignedNormalized
| x == gl_FLOAT = TFloat
| x == gl_INT = TInt
| x == gl_UNSIGNED_INT = TUnsignedInt
| otherwise = error $ "unmarshalDataTypeType: illegal value " ++ show x
| IreneKnapp/direct-opengl | Graphics/Rendering/OpenGL/GL/DataType.hs | bsd-3-clause | 5,758 | 0 | 9 | 1,096 | 1,020 | 532 | 488 | 129 | 31 |
module MAAM.MonadStep where
import FP
class MonadStep ς m | m -> ς where
mstepγ :: (a -> m b) -> ς a -> ς b
mstepγP :: (MonadStep ς m) => P m -> (a -> m b) -> ς a -> ς b
mstepγP P = mstepγ
-- Identity
instance MonadStep ID ID where
mstepγ :: (a -> ID b) -> (ID a -> ID b)
mstepγ = extend
-- State
instance (MonadStep ς m, Functor m) => MonadStep (ς :.: (,) 𝓈) (StateT 𝓈 m) where
mstepγ :: (a -> StateT 𝓈 m b) -> ((ς :.: (,) 𝓈) a -> (ς :.: (,) 𝓈) b)
mstepγ f = onComposeIso $ mstepγ $ \ (s, a) -> unStateT (f a) s
deriving instance (MonadStep ς m, Functor m) => MonadStep (ς :.: (,) 𝓈1) (AddStateT 𝓈12 𝓈1 m)
-- Nondeterminism
instance (MonadStep ς m, Functorial JoinLattice m) => MonadStep (ς :.: ListSet) (ListSetT m) where
mstepγ :: forall a b. (a -> ListSetT m b) -> (ς :.: ListSet) a -> (ς :.: ListSet) b
mstepγ f = onComposeIso $ mstepγ_m ff
where
mstepγ_m :: forall a' b'. (a' -> m b') -> (ς a' -> ς b')
mstepγ_m = mstepγ
ff :: ListSet a -> m (ListSet b)
ff = with (functorial :: W (JoinLattice (m (ListSet b)))) $
joins . map (unListSetT . f)
-- Nondeterminism with top
instance (MonadStep ς m, Functorial JoinLattice m, Functorial Top m) => MonadStep (ς :.: ListSetWithTop) (ListSetWithTopT m) where
mstepγ :: forall a b. (a -> ListSetWithTopT m b) -> (ς :.: ListSetWithTop) a -> (ς :.: ListSetWithTop) b
mstepγ f = onComposeIso $ mstepγ_m ff
where
mstepγ_m :: forall a' b'. (a' -> m b') -> (ς a' -> ς b')
mstepγ_m = mstepγ
ff :: ListSetWithTop a -> m (ListSetWithTop b)
ff =
with (functorial :: W (JoinLattice (m (ListSetWithTop b)))) $
with (functorial :: W (Top (m (ListSetWithTop b)))) $
listSetWithTopElim top joins . map (unListSetWithTopT . f)
instance Commute ID ListSet where
commute :: ID (ListSet a) -> ListSet (ID a)
commute = map ID . unID
instance (JoinLattice 𝓈) => Commute ((,) 𝓈) ListSet where
commute :: (𝓈, ListSet a) -> ListSet (𝓈, a)
commute (s, xs) = map (s,) xs
instance (Commute t ListSet, Commute u ListSet, Functor t) => Commute (t :.: u) ListSet where
commute :: (t :.: u) (ListSet a) -> ListSet ((t :.: u) a)
commute = map Compose . commute . map commute . unCompose
newtype IsoMonadStep ς1 ς2 m a = IsoMonadStep { runIsoMonadStep :: m a }
deriving
( Unit, Functor, Product, Applicative, Bind, Monad
, MonadBot, MonadPlus, MonadTop
, MonadState s
)
instance (MonadStep ς2 m, Isomorphism2 ς1 ς2) => MonadStep ς1 (IsoMonadStep ς1 ς2 m) where
mstepγ :: (a -> IsoMonadStep ς1 ς2 m b) -> (ς1 a -> ς1 b)
mstepγ f = isofrom2 . mstepγ (runIsoMonadStep . f) . isoto2
| FranklinChen/maam | src/MAAM/MonadStep.hs | bsd-3-clause | 2,744 | 78 | 16 | 643 | 1,290 | 667 | 623 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section{SetLevels}
***************************
Overview
***************************
1. We attach binding levels to Core bindings, in preparation for floating
outwards (@FloatOut@).
2. We also let-ify many expressions (notably case scrutinees), so they
will have a fighting chance of being floated sensible.
3. We clone the binders of any floatable let-binding, so that when it is
floated out it will be unique. (This used to be done by the simplifier
but the latter now only ensures that there's no shadowing; indeed, even
that may not be true.)
NOTE: this can't be done using the uniqAway idea, because the variable
must be unique in the whole program, not just its current scope,
because two variables in different scopes may float out to the
same top level place
NOTE: Very tiresomely, we must apply this substitution to
the rules stored inside a variable too.
We do *not* clone top-level bindings, because some of them must not change,
but we *do* clone bindings that are heading for the top level
4. In the expression
case x of wild { p -> ...wild... }
we substitute x for wild in the RHS of the case alternatives:
case x of wild { p -> ...x... }
This means that a sub-expression involving x is not "trapped" inside the RHS.
And it's not inconvenient because we already have a substitution.
Note that this is EXACTLY BACKWARDS from the what the simplifier does.
The simplifier tries to get rid of occurrences of x, in favour of wild,
in the hope that there will only be one remaining occurrence of x, namely
the scrutinee of the case, and we can inline it.
-}
{-# LANGUAGE CPP #-}
module Eta.SimplCore.SetLevels (
setLevels,
Level(..), tOP_LEVEL,
LevelledBind, LevelledExpr, LevelledBndr,
FloatSpec(..), floatSpecLevel,
incMinorLvl, ltMajLvl, ltLvl, isTopLvl
) where
#include "HsVersions.h"
import Eta.Core.CoreSyn
import Eta.SimplCore.CoreMonad ( FloatOutSwitches(..) )
import Eta.Core.CoreUtils ( exprType, exprOkForSpeculation, exprIsBottom )
import Eta.Core.CoreArity ( exprBotStrictness_maybe )
import Eta.Core.CoreFVs -- all of it
import Eta.Types.Coercion ( isCoVar )
import Eta.Core.CoreSubst ( Subst, emptySubst, substBndrs, substRecBndrs,
extendIdSubst, extendSubstWithVar, cloneBndrs,
cloneRecIdBndrs, substTy, substCo, substDVarSet )
import Eta.Core.MkCore ( sortQuantVars )
import Eta.BasicTypes.Id
import Eta.BasicTypes.IdInfo
import Eta.BasicTypes.Var
import Eta.BasicTypes.VarSet
import Eta.BasicTypes.VarEnv
import Eta.BasicTypes.Literal ( litIsTrivial )
import Eta.BasicTypes.Demand ( StrictSig )
import Eta.BasicTypes.Name ( getOccName, mkSystemVarName )
import Eta.BasicTypes.OccName ( occNameString )
import Eta.Types.Type ( isUnLiftedType, Type, mkPiTypes )
import Eta.BasicTypes.BasicTypes ( Arity, RecFlag(..) )
import Eta.BasicTypes.UniqSupply
import Eta.Utils.Util
import Eta.Utils.Outputable
import Eta.Utils.FastString
import Eta.Utils.UniqDFM ( nonDetFoldUDFM )
import Eta.Utils.FV
{-
************************************************************************
* *
\subsection{Level numbers}
* *
************************************************************************
-}
type LevelledExpr = TaggedExpr FloatSpec
type LevelledBind = TaggedBind FloatSpec
type LevelledBndr = TaggedBndr FloatSpec
data Level = Level Int -- Major level: number of enclosing value lambdas
Int -- Minor level: number of big-lambda and/or case
-- expressions between here and the nearest
-- enclosing value lambda
data FloatSpec
= FloatMe Level -- Float to just inside the binding
-- tagged with this level
| StayPut Level -- Stay where it is; binding is
-- tagged with tihs level
floatSpecLevel :: FloatSpec -> Level
floatSpecLevel (FloatMe l) = l
floatSpecLevel (StayPut l) = l
{-
The {\em level number} on a (type-)lambda-bound variable is the
nesting depth of the (type-)lambda which binds it. The outermost lambda
has level 1, so (Level 0 0) means that the variable is bound outside any lambda.
On an expression, it's the maximum level number of its free
(type-)variables. On a let(rec)-bound variable, it's the level of its
RHS. On a case-bound variable, it's the number of enclosing lambdas.
Top-level variables: level~0. Those bound on the RHS of a top-level
definition but ``before'' a lambda; e.g., the \tr{x} in (levels shown
as ``subscripts'')...
\begin{verbatim}
a_0 = let b_? = ... in
x_1 = ... b ... in ...
\end{verbatim}
The main function @lvlExpr@ carries a ``context level'' (@ctxt_lvl@).
That's meant to be the level number of the enclosing binder in the
final (floated) program. If the level number of a sub-expression is
less than that of the context, then it might be worth let-binding the
sub-expression so that it will indeed float.
If you can float to level @Level 0 0@ worth doing so because then your
allocation becomes static instead of dynamic. We always start with
context @Level 0 0@.
Note [FloatOut inside INLINE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@InlineCtxt@ very similar to @Level 0 0@, but is used for one purpose:
to say "don't float anything out of here". That's exactly what we
want for the body of an INLINE, where we don't want to float anything
out at all. See notes with lvlMFE below.
But, check this out:
-- At one time I tried the effect of not float anything out of an InlineMe,
-- but it sometimes works badly. For example, consider PrelArr.done. It
-- has the form __inline (\d. e)
-- where e doesn't mention d. If we float this to
-- __inline (let x = e in \d. x)
-- things are bad. The inliner doesn't even inline it because it doesn't look
-- like a head-normal form. So it seems a lesser evil to let things float.
-- In SetLevels we do set the context to (Level 0 0) when we get to an InlineMe
-- which discourages floating out.
So the conclusion is: don't do any floating at all inside an InlineMe.
(In the above example, don't float the {x=e} out of the \d.)
One particular case is that of workers: we don't want to float the
call to the worker outside the wrapper, otherwise the worker might get
inlined into the floated expression, and an importing module won't see
the worker at all.
-}
instance Outputable FloatSpec where
ppr (FloatMe l) = char 'F' <> ppr l
ppr (StayPut l) = ppr l
tOP_LEVEL :: Level
tOP_LEVEL = Level 0 0
incMajorLvl :: Level -> Level
incMajorLvl (Level major _) = Level (major + 1) 0
incMinorLvl :: Level -> Level
incMinorLvl (Level major minor) = Level major (minor+1)
maxLvl :: Level -> Level -> Level
maxLvl l1@(Level maj1 min1) l2@(Level maj2 min2)
| (maj1 > maj2) || (maj1 == maj2 && min1 > min2) = l1
| otherwise = l2
ltLvl :: Level -> Level -> Bool
ltLvl (Level maj1 min1) (Level maj2 min2)
= (maj1 < maj2) || (maj1 == maj2 && min1 < min2)
ltMajLvl :: Level -> Level -> Bool
-- Tells if one level belongs to a difft *lambda* level to another
ltMajLvl (Level maj1 _) (Level maj2 _) = maj1 < maj2
isTopLvl :: Level -> Bool
isTopLvl (Level 0 0) = True
isTopLvl _ = False
instance Outputable Level where
ppr (Level maj min) = hcat [ char '<', int maj, char ',', int min, char '>' ]
instance Eq Level where
(Level maj1 min1) == (Level maj2 min2) = maj1 == maj2 && min1 == min2
{-
************************************************************************
* *
\subsection{Main level-setting code}
* *
************************************************************************
-}
setLevels :: FloatOutSwitches
-> CoreProgram
-> UniqSupply
-> [LevelledBind]
setLevels float_lams binds us
= initLvl us (do_them init_env binds)
where
init_env = initialEnv float_lams
do_them :: LevelEnv -> [CoreBind] -> LvlM [LevelledBind]
do_them _ [] = return []
do_them env (b:bs)
= do { (lvld_bind, env') <- lvlTopBind env b
; lvld_binds <- do_them env' bs
; return (lvld_bind : lvld_binds) }
lvlTopBind :: LevelEnv -> Bind Id -> LvlM (LevelledBind, LevelEnv)
lvlTopBind env (NonRec bndr rhs)
= do { rhs' <- lvlExpr env (freeVars rhs)
; let (env', [bndr']) = substAndLvlBndrs NonRecursive env tOP_LEVEL [bndr]
; return (NonRec bndr' rhs', env') }
lvlTopBind env (Rec pairs)
= do let (bndrs,rhss) = unzip pairs
(env', bndrs') = substAndLvlBndrs Recursive env tOP_LEVEL bndrs
rhss' <- mapM (lvlExpr env' . freeVars) rhss
return (Rec (bndrs' `zip` rhss'), env')
{-
************************************************************************
* *
\subsection{Setting expression levels}
* *
************************************************************************
Note [Floating over-saturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see (f x y), and (f x) is a redex (ie f's arity is 1),
we call (f x) an "over-saturated application"
Should we float out an over-sat app, if can escape a value lambda?
It is sometimes very beneficial (-7% runtime -4% alloc over nofib -O2).
But we don't want to do it for class selectors, because the work saved
is minimal, and the extra local thunks allocated cost money.
Arguably we could float even class-op applications if they were going to
top level -- but then they must be applied to a constant dictionary and
will almost certainly be optimised away anyway.
-}
lvlExpr :: LevelEnv -- Context
-> CoreExprWithFVs -- Input expression
-> LvlM LevelledExpr -- Result expression
{-
The @ctxt_lvl@ is, roughly, the level of the innermost enclosing
binder. Here's an example
v = \x -> ...\y -> let r = case (..x..) of
..x..
in ..
When looking at the rhs of @r@, @ctxt_lvl@ will be 1 because that's
the level of @r@, even though it's inside a level-2 @\y@. It's
important that @ctxt_lvl@ is 1 and not 2 in @r@'s rhs, because we
don't want @lvlExpr@ to turn the scrutinee of the @case@ into an MFE
--- because it isn't a *maximal* free expression.
If there were another lambda in @r@'s rhs, it would get level-2 as well.
-}
lvlExpr env (_, AnnType ty) = return (Type (substTy (le_subst env) ty))
lvlExpr env (_, AnnCoercion co) = return (Coercion (substCo (le_subst env) co))
lvlExpr env (_, AnnVar v) = return (lookupVar env v)
lvlExpr _ (_, AnnLit lit) = return (Lit lit)
lvlExpr env (_, AnnCast expr (_, co)) = do
expr' <- lvlExpr env expr
return (Cast expr' (substCo (le_subst env) co))
lvlExpr env (_, AnnTick tickish expr) = do
expr' <- lvlExpr env expr
return (Tick tickish expr')
lvlExpr env expr@(_, AnnApp _ _) = do
let
(fun, args) = collectAnnArgs expr
--
case fun of
(_, AnnVar f) | floatOverSat env -- See Note [Floating over-saturated applications]
, arity > 0
, arity < n_val_args
, Nothing <- isClassOpId_maybe f ->
do
let (lapp, rargs) = left (n_val_args - arity) expr []
rargs' <- mapM (lvlMFE False env) rargs
lapp' <- lvlMFE False env lapp
return (foldl App lapp' rargs')
where
n_val_args = count (isValArg . deAnnotate) args
arity = idArity f
-- separate out the PAP that we are floating from the extra
-- arguments, by traversing the spine until we have collected
-- (n_val_args - arity) value arguments.
left 0 e rargs = (e, rargs)
left n (_, AnnApp f a) rargs
| isValArg (deAnnotate a) = left (n-1) f (a:rargs)
| otherwise = left n f (a:rargs)
left _ _ _ = panic "SetLevels.lvlExpr.left"
-- No PAPs that we can float: just carry on with the
-- arguments and the function.
_otherwise -> do
args' <- mapM (lvlMFE False env) args
fun' <- lvlExpr env fun
return (foldl App fun' args')
-- We don't split adjacent lambdas. That is, given
-- \x y -> (x+1,y)
-- we don't float to give
-- \x -> let v = x+1 in \y -> (v,y)
-- Why not? Because partial applications are fairly rare, and splitting
-- lambdas makes them more expensive.
lvlExpr env expr@(_, AnnLam {})
= do { new_body <- lvlMFE True new_env body
; return (mkLams new_bndrs new_body) }
where
(bndrs, body) = collectAnnBndrs expr
(env1, bndrs1) = substBndrsSL NonRecursive env bndrs
(new_env, new_bndrs) = lvlLamBndrs env1 (le_ctxt_lvl env) bndrs1
-- At one time we called a special verion of collectBinders,
-- which ignored coercions, because we don't want to split
-- a lambda like this (\x -> coerce t (\s -> ...))
-- This used to happen quite a bit in state-transformer programs,
-- but not nearly so much now non-recursive newtypes are transparent.
-- [See SetLevels rev 1.50 for a version with this approach.]
lvlExpr env (_, AnnLet bind body)
= do { (bind', new_env) <- lvlBind env bind
; body' <- lvlExpr new_env body
-- No point in going via lvlMFE here. If the binding is alive
-- (mentioned in body), and the whole let-expression doesn't
-- float, then neither will the body
; return (Let bind' body') }
lvlExpr env (_, AnnCase scrut@(scrut_fvs,_) case_bndr ty alts)
= do { scrut' <- lvlMFE True env scrut
; lvlCase env scrut_fvs scrut' case_bndr ty alts }
-------------------------------------------
lvlCase :: LevelEnv -- Level of in-scope names/tyvars
-> DVarSet -- Free vars of input scrutinee
-> LevelledExpr -- Processed scrutinee
-> Id -> Type -- Case binder and result type
-> [AnnAlt Id DVarSet] -- Input alternatives
-> LvlM LevelledExpr -- Result expression
lvlCase env scrut_fvs scrut' case_bndr ty alts
| [(con@(DataAlt {}), bs, body)] <- alts
, exprOkForSpeculation scrut' -- See Note [Check the output scrutinee for okForSpec]
, not (isTopLvl dest_lvl) -- Can't have top-level cases
= -- See Note [Floating cases]
-- Always float the case if possible
-- Unlike lets we don't insist that it escapes a value lambda
do { (rhs_env, (case_bndr':bs')) <- cloneVars NonRecursive env dest_lvl (case_bndr:bs)
-- We don't need to use extendCaseBndrLvlEnv here
-- because we are floating the case outwards so
-- no need to do the binder-swap thing
; body' <- lvlMFE True rhs_env body
; let alt' = (con, [TB b (StayPut dest_lvl) | b <- bs'], body')
; return (Case scrut' (TB case_bndr' (FloatMe dest_lvl)) ty [alt']) }
| otherwise -- Stays put
= do { let (alts_env1, [case_bndr']) = substAndLvlBndrs NonRecursive env incd_lvl [case_bndr]
alts_env = extendCaseBndrEnv alts_env1 case_bndr scrut'
; alts' <- mapM (lvl_alt alts_env) alts
; return (Case scrut' case_bndr' ty alts') }
where
incd_lvl = incMinorLvl (le_ctxt_lvl env)
dest_lvl = maxFvLevel (const True) env scrut_fvs
-- Don't abstact over type variables, hence const True
lvl_alt alts_env (con, bs, rhs)
= do { rhs' <- lvlMFE True new_env rhs
; return (con, bs', rhs') }
where
(new_env, bs') = substAndLvlBndrs NonRecursive alts_env incd_lvl bs
{-
Note [Floating cases]
~~~~~~~~~~~~~~~~~~~~~
Consider this:
data T a = MkT !a
f :: T Int -> blah
f x vs = case x of { MkT y ->
let f vs = ...(case y of I# w -> e)...f..
in f vs
Here we can float the (case y ...) out , because y is sure
to be evaluated, to give
f x vs = case x of { MkT y ->
caes y of I# w ->
let f vs = ...(e)...f..
in f vs
That saves unboxing it every time round the loop. It's important in
some DPH stuff where we really want to avoid that repeated unboxing in
the inner loop.
Things to note
* We can't float a case to top level
* It's worth doing this float even if we don't float
the case outside a value lambda. Example
case x of {
MkT y -> (case y of I# w2 -> ..., case y of I# w2 -> ...)
If we floated the cases out we could eliminate one of them.
* We only do this with a single-alternative case
Note [Check the output scrutinee for okForSpec]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
case x of y {
A -> ....(case y of alts)....
}
Because of the binder-swap, the inner case will get substituted to
(case x of ..). So when testing whether the scrutinee is
okForSpecuation we must be careful to test the *result* scrutinee ('x'
in this case), not the *input* one 'y'. The latter *is* ok for
speculation here, but the former is not -- and indeed we can't float
the inner case out, at least not unless x is also evaluated at its
binding site.
That's why we apply exprOkForSpeculation to scrut' and not to scrut.
-}
lvlMFE :: Bool -- True <=> strict context [body of case or let]
-> LevelEnv -- Level of in-scope names/tyvars
-> CoreExprWithFVs -- input expression
-> LvlM LevelledExpr -- Result expression
-- lvlMFE is just like lvlExpr, except that it might let-bind
-- the expression, so that it can itself be floated.
lvlMFE _ env (_, AnnType ty)
= return (Type (substTy (le_subst env) ty))
-- No point in floating out an expression wrapped in a coercion or note
-- If we do we'll transform lvl = e |> co
-- to lvl' = e; lvl = lvl' |> co
-- and then inline lvl. Better just to float out the payload.
lvlMFE strict_ctxt env (_, AnnTick t e)
= do { e' <- lvlMFE strict_ctxt env e
; return (Tick t e') }
lvlMFE strict_ctxt env (_, AnnCast e (_, co))
= do { e' <- lvlMFE strict_ctxt env e
; return (Cast e' (substCo (le_subst env) co)) }
-- Note [Case MFEs]
lvlMFE True env e@(_, AnnCase {})
= lvlExpr env e -- Don't share cases
lvlMFE strict_ctxt env ann_expr@(fvs, _)
| isUnLiftedType (exprType expr)
-- Can't let-bind it; see Note [Unlifted MFEs]
-- This includes coercions, which we don't want to float anyway
-- NB: no need to substitute cos isUnLiftedType doesn't change
|| notWorthFloating ann_expr abs_vars
|| not float_me
= -- Don't float it out
lvlExpr env ann_expr
| otherwise -- Float it out!
= do { expr' <- lvlFloatRhs abs_vars dest_lvl env ann_expr
; var <- newLvlVar expr' is_bot
; return (Let (NonRec (TB var (FloatMe dest_lvl)) expr')
(mkVarApps (Var var) abs_vars)) }
where
expr = deAnnotate ann_expr
is_bot = exprIsBottom expr -- Note [Bottoming floats]
dest_lvl = destLevel env fvs (isFunction ann_expr) is_bot
abs_vars = abstractVars dest_lvl env fvs
-- A decision to float entails let-binding this thing, and we only do
-- that if we'll escape a value lambda, or will go to the top level.
float_me = dest_lvl `ltMajLvl` (le_ctxt_lvl env) -- Escapes a value lambda
-- OLD CODE: not (exprIsCheap expr) || isTopLvl dest_lvl
-- see Note [Escaping a value lambda]
|| (isTopLvl dest_lvl -- Only float if we are going to the top level
&& floatConsts env -- and the floatConsts flag is on
&& not strict_ctxt) -- Don't float from a strict context
-- We are keen to float something to the top level, even if it does not
-- escape a lambda, because then it needs no allocation. But it's controlled
-- by a flag, because doing this too early loses opportunities for RULES
-- which (needless to say) are important in some nofib programs
-- (gcd is an example).
--
-- Beware:
-- concat = /\ a -> foldr ..a.. (++) []
-- was getting turned into
-- lvl = /\ a -> foldr ..a.. (++) []
-- concat = /\ a -> lvl a
-- which is pretty stupid. Hence the strict_ctxt test
--
-- Also a strict contxt includes uboxed values, and they
-- can't be bound at top level
{-
Note [Unlifted MFEs]
~~~~~~~~~~~~~~~~~~~~
We don't float unlifted MFEs, which potentially loses big opportunites.
For example:
\x -> f (h y)
where h :: Int -> Int# is expensive. We'd like to float the (h y) outside
the \x, but we don't because it's unboxed. Possible solution: box it.
Note [Bottoming floats]
~~~~~~~~~~~~~~~~~~~~~~~
If we see
f = \x. g (error "urk")
we'd like to float the call to error, to get
lvl = error "urk"
f = \x. g lvl
Furthermore, we want to float a bottoming expression even if it has free
variables:
f = \x. g (let v = h x in error ("urk" ++ v))
Then we'd like to abstact over 'x' can float the whole arg of g:
lvl = \x. let v = h x in error ("urk" ++ v)
f = \x. g (lvl x)
See Maessen's paper 1999 "Bottom extraction: factoring error handling out
of functional programs" (unpublished I think).
When we do this, we set the strictness and arity of the new bottoming
Id, *immediately*, for two reasons:
* To prevent the abstracted thing being immediately inlined back in again
via preInlineUnconditionally. The latter has a test for bottoming Ids
to stop inlining them, so we'd better make sure it *is* a bottoming Id!
* So that it's properly exposed as such in the interface file, even if
this is all happening after strictness analysis.
Note [Bottoming floats: eta expansion] c.f Note [Bottoming floats]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tiresomely, though, the simplifier has an invariant that the manifest
arity of the RHS should be the same as the arity; but we can't call
etaExpand during SetLevels because it works over a decorated form of
CoreExpr. So we do the eta expansion later, in FloatOut.
Note [Case MFEs]
~~~~~~~~~~~~~~~~
We don't float a case expression as an MFE from a strict context. Why not?
Because in doing so we share a tiny bit of computation (the switch) but
in exchange we build a thunk, which is bad. This case reduces allocation
by 7% in spectral/puzzle (a rather strange benchmark) and 1.2% in real/fem.
Doesn't change any other allocation at all.
-}
annotateBotStr :: Id -> Maybe (Arity, StrictSig) -> Id
-- See Note [Bottoming floats] for why we want to add
-- bottoming information right now
annotateBotStr id Nothing = id
annotateBotStr id (Just (arity, sig)) = id `setIdArity` arity
`setIdStrictness` sig
notWorthFloating :: CoreExprWithFVs -> [Var] -> Bool
-- Returns True if the expression would be replaced by
-- something bigger than it is now. For example:
-- abs_vars = tvars only: return True if e is trivial,
-- but False for anything bigger
-- abs_vars = [x] (an Id): return True for trivial, or an application (f x)
-- but False for (f x x)
--
-- One big goal is that floating should be idempotent. Eg if
-- we replace e with (lvl79 x y) and then run FloatOut again, don't want
-- to replace (lvl79 x y) with (lvl83 x y)!
notWorthFloating e abs_vars
= go e (count isId abs_vars)
where
go (_, AnnVar {}) n = n >= 0
go (_, AnnLit lit) n = ASSERT( n==0 )
litIsTrivial lit -- Note [Floating literals]
go (_, AnnTick t e) n = not (tickishIsCode t) && go e n
go (_, AnnCast e _) n = go e n
go (_, AnnApp e arg) n
| (_, AnnType {}) <- arg = go e n
| (_, AnnCoercion {}) <- arg = go e n
| n==0 = False
| is_triv arg = go e (n-1)
| otherwise = False
go _ _ = False
is_triv (_, AnnLit {}) = True -- Treat all literals as trivial
is_triv (_, AnnVar {}) = True -- (ie not worth floating)
is_triv (_, AnnCast e _) = is_triv e
is_triv (_, AnnApp e (_, AnnType {})) = is_triv e
is_triv (_, AnnApp e (_, AnnCoercion {})) = is_triv e
is_triv (_, AnnTick t e) = not (tickishIsCode t) && is_triv e
is_triv _ = False
{-
Note [Floating literals]
~~~~~~~~~~~~~~~~~~~~~~~~
It's important to float Integer literals, so that they get shared,
rather than being allocated every time round the loop.
Hence the litIsTrivial.
We'd *like* to share MachStr literal strings too, mainly so we could
CSE them, but alas can't do so directly because they are unlifted.
Note [Escaping a value lambda]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to float even cheap expressions out of value lambdas,
because that saves allocation. Consider
f = \x. .. (\y.e) ...
Then we'd like to avoid allocating the (\y.e) every time we call f,
(assuming e does not mention x).
An example where this really makes a difference is simplrun009.
Another reason it's good is because it makes SpecContr fire on functions.
Consider
f = \x. ....(f (\y.e))....
After floating we get
lvl = \y.e
f = \x. ....(f lvl)...
and that is much easier for SpecConstr to generate a robust specialisation for.
The OLD CODE (given where this Note is referred to) prevents floating
of the example above, so I just don't understand the old code. I
don't understand the old comment either (which appears below). I
measured the effect on nofib of changing OLD CODE to 'True', and got
zeros everywhere, but a 4% win for 'puzzle'. Very small 0.5% loss for
'cse'; turns out to be because our arity analysis isn't good enough
yet (mentioned in Simon-nofib-notes).
OLD comment was:
Even if it escapes a value lambda, we only
float if it's not cheap (unless it'll get all the
way to the top). I've seen cases where we
float dozens of tiny free expressions, which cost
more to allocate than to evaluate.
NB: exprIsCheap is also true of bottom expressions, which
is good; we don't want to share them
It's only Really Bad to float a cheap expression out of a
strict context, because that builds a thunk that otherwise
would never be built. So another alternative would be to
add
|| (strict_ctxt && not (exprIsBottom expr))
to the condition above. We should really try this out.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
The binding stuff works for top level too.
-}
lvlBind :: LevelEnv
-> CoreBindWithFVs
-> LvlM (LevelledBind, LevelEnv)
lvlBind env (AnnNonRec bndr rhs@(rhs_fvs,_))
| isTyVar bndr -- Don't do anything for TyVar binders
-- (simplifier gets rid of them pronto)
|| isCoVar bndr -- Difficult to fix up CoVar occurrences (see extendPolyLvlEnv)
-- so we will ignore this case for now
|| not (profitableFloat env dest_lvl)
|| (isTopLvl dest_lvl && isUnLiftedType (idType bndr))
-- We can't float an unlifted binding to top level, so we don't
-- float it at all. It's a bit brutal, but unlifted bindings
-- aren't expensive either
= -- No float
do { rhs' <- lvlExpr env rhs
; let bind_lvl = incMinorLvl (le_ctxt_lvl env)
(env', [bndr']) = substAndLvlBndrs NonRecursive env bind_lvl [bndr]
; return (NonRec bndr' rhs', env') }
-- Otherwise we are going to float
| null abs_vars
= do { -- No type abstraction; clone existing binder
rhs' <- lvlExpr (setCtxtLvl env dest_lvl) rhs
; (env', [bndr']) <- cloneVars NonRecursive env dest_lvl [bndr]
; return (NonRec (TB bndr' (FloatMe dest_lvl)) rhs', env') }
| otherwise
= do { -- Yes, type abstraction; create a new binder, extend substitution, etc
rhs' <- lvlFloatRhs abs_vars dest_lvl env rhs
; (env', [bndr']) <- newPolyBndrs dest_lvl env abs_vars [bndr]
; return (NonRec (TB bndr' (FloatMe dest_lvl)) rhs', env') }
where
bind_fvs = rhs_fvs `unionDVarSet` fvDVarSet (idFVs bndr)
abs_vars = abstractVars dest_lvl env bind_fvs
dest_lvl = destLevel env bind_fvs (isFunction rhs) is_bot
is_bot = exprIsBottom (deAnnotate rhs)
lvlBind env (AnnRec pairs)
| not (profitableFloat env dest_lvl)
= do { let bind_lvl = incMinorLvl (le_ctxt_lvl env)
(env', bndrs') = substAndLvlBndrs Recursive env bind_lvl bndrs
; rhss' <- mapM (lvlExpr env') rhss
; return (Rec (bndrs' `zip` rhss'), env') }
| null abs_vars
= do { (new_env, new_bndrs) <- cloneVars Recursive env dest_lvl bndrs
; new_rhss <- mapM (lvlExpr (setCtxtLvl new_env dest_lvl)) rhss
; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss)
, new_env) }
-- ToDo: when enabling the floatLambda stuff,
-- I think we want to stop doing this
| [(bndr,rhs)] <- pairs
, count isId abs_vars > 1
= do -- Special case for self recursion where there are
-- several variables carried around: build a local loop:
-- poly_f = \abs_vars. \lam_vars . letrec f = \lam_vars. rhs in f lam_vars
-- This just makes the closures a bit smaller. If we don't do
-- this, allocation rises significantly on some programs
--
-- We could elaborate it for the case where there are several
-- mutually functions, but it's quite a bit more complicated
--
-- This all seems a bit ad hoc -- sigh
let (rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars
rhs_lvl = le_ctxt_lvl rhs_env
(rhs_env', [new_bndr]) <- cloneVars Recursive rhs_env rhs_lvl [bndr]
let
(lam_bndrs, rhs_body) = collectAnnBndrs rhs
(body_env1, lam_bndrs1) = substBndrsSL NonRecursive rhs_env' lam_bndrs
(body_env2, lam_bndrs2) = lvlLamBndrs body_env1 rhs_lvl lam_bndrs1
new_rhs_body <- lvlExpr body_env2 rhs_body
(poly_env, [poly_bndr]) <- newPolyBndrs dest_lvl env abs_vars [bndr]
return (Rec [(TB poly_bndr (FloatMe dest_lvl)
, mkLams abs_vars_w_lvls $
mkLams lam_bndrs2 $
Let (Rec [( TB new_bndr (StayPut rhs_lvl)
, mkLams lam_bndrs2 new_rhs_body)])
(mkVarApps (Var new_bndr) lam_bndrs1))]
, poly_env)
| otherwise -- Non-null abs_vars
= do { (new_env, new_bndrs) <- newPolyBndrs dest_lvl env abs_vars bndrs
; new_rhss <- mapM (lvlFloatRhs abs_vars dest_lvl new_env) rhss
; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss)
, new_env) }
where
(bndrs,rhss) = unzip pairs
-- Finding the free vars of the binding group is annoying
bind_fvs = ((unionDVarSets [ freeVarsOf rhs | (_, rhs) <- pairs])
`unionDVarSet`
(fvDVarSet $ unionsFV [ idFVs bndr
| (bndr, (_,_)) <- pairs]))
`delDVarSetList`
bndrs
dest_lvl = destLevel env bind_fvs (all isFunction rhss) False
abs_vars = abstractVars dest_lvl env bind_fvs
profitableFloat :: LevelEnv -> Level -> Bool
profitableFloat env dest_lvl
= (dest_lvl `ltMajLvl` le_ctxt_lvl env) -- Escapes a value lambda
|| isTopLvl dest_lvl -- Going all the way to top level
----------------------------------------------------
-- Three help functions for the type-abstraction case
lvlFloatRhs :: [OutVar] -> Level -> LevelEnv -> CoreExprWithFVs
-> UniqSM (Expr LevelledBndr)
lvlFloatRhs abs_vars dest_lvl env rhs
= do { rhs' <- lvlExpr rhs_env rhs
; return (mkLams abs_vars_w_lvls rhs') }
where
(rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars
{-
************************************************************************
* *
\subsection{Deciding floatability}
* *
************************************************************************
-}
substAndLvlBndrs :: RecFlag -> LevelEnv -> Level -> [InVar] -> (LevelEnv, [LevelledBndr])
substAndLvlBndrs is_rec env lvl bndrs
= lvlBndrs subst_env lvl subst_bndrs
where
(subst_env, subst_bndrs) = substBndrsSL is_rec env bndrs
substBndrsSL :: RecFlag -> LevelEnv -> [InVar] -> (LevelEnv, [OutVar])
-- So named only to avoid the name clash with CoreSubst.substBndrs
substBndrsSL is_rec env@(LE { le_subst = subst, le_env = id_env }) bndrs
= ( env { le_subst = subst'
, le_env = foldl add_id id_env (bndrs `zip` bndrs') }
, bndrs')
where
(subst', bndrs') = case is_rec of
NonRecursive -> substBndrs subst bndrs
Recursive -> substRecBndrs subst bndrs
lvlLamBndrs :: LevelEnv -> Level -> [OutVar] -> (LevelEnv, [LevelledBndr])
-- Compute the levels for the binders of a lambda group
lvlLamBndrs env lvl bndrs
= lvlBndrs env new_lvl bndrs
where
new_lvl | any is_major bndrs = incMajorLvl lvl
| otherwise = incMinorLvl lvl
is_major bndr = isId bndr && not (isProbablyOneShotLambda bndr)
-- The "probably" part says "don't float things out of a
-- probable one-shot lambda"
-- See Note [Computing one-shot info] in Demand.lhs
lvlBndrs :: LevelEnv -> Level -> [CoreBndr] -> (LevelEnv, [LevelledBndr])
-- The binders returned are exactly the same as the ones passed,
-- apart from applying the substitution, but they are now paired
-- with a (StayPut level)
--
-- The returned envt has ctxt_lvl updated to the new_lvl
--
-- All the new binders get the same level, because
-- any floating binding is either going to float past
-- all or none. We never separate binders.
lvlBndrs env@(LE { le_lvl_env = lvl_env }) new_lvl bndrs
= ( env { le_ctxt_lvl = new_lvl
, le_lvl_env = foldl add_lvl lvl_env bndrs }
, lvld_bndrs)
where
lvld_bndrs = [TB bndr (StayPut new_lvl) | bndr <- bndrs]
add_lvl env v = extendVarEnv env v new_lvl
-- Destination level is the max Id level of the expression
-- (We'll abstract the type variables, if any.)
destLevel :: LevelEnv -> DVarSet
-> Bool -- True <=> is function
-> Bool -- True <=> is bottom
-> Level
destLevel env fvs is_function is_bot
| is_bot = tOP_LEVEL -- Send bottoming bindings to the top
-- regardless; see Note [Bottoming floats]
| Just n_args <- floatLams env
, n_args > 0 -- n=0 case handled uniformly by the 'otherwise' case
, is_function
, countFreeIds fvs <= n_args
= tOP_LEVEL -- Send functions to top level; see
-- the comments with isFunction
| otherwise = maxFvLevel isId env fvs -- Max over Ids only; the tyvars
-- will be abstracted
isFunction :: CoreExprWithFVs -> Bool
-- The idea here is that we want to float *functions* to
-- the top level. This saves no work, but
-- (a) it can make the host function body a lot smaller,
-- and hence inlinable.
-- (b) it can also save allocation when the function is recursive:
-- h = \x -> letrec f = \y -> ...f...y...x...
-- in f x
-- becomes
-- f = \x y -> ...(f x)...y...x...
-- h = \x -> f x x
-- No allocation for f now.
-- We may only want to do this if there are sufficiently few free
-- variables. We certainly only want to do it for values, and not for
-- constructors. So the simple thing is just to look for lambdas
isFunction (_, AnnLam b e) | isId b = True
| otherwise = isFunction e
-- isFunction (_, AnnTick _ e) = isFunction e -- dubious
isFunction _ = False
countFreeIds :: DVarSet -> Int
countFreeIds = nonDetFoldUDFM add 0
-- It's OK to use nonDetFoldUDFM here because we're just counting things.
where
add :: Var -> Int -> Int
add v n | isId v = n+1
| otherwise = n
{-
************************************************************************
* *
\subsection{Free-To-Level Monad}
* *
************************************************************************
-}
type InVar = Var -- Pre cloning
type InId = Id -- Pre cloning
type OutVar = Var -- Post cloning
type OutId = Id -- Post cloning
data LevelEnv
= LE { le_switches :: FloatOutSwitches
, le_ctxt_lvl :: Level -- The current level
, le_lvl_env :: VarEnv Level -- Domain is *post-cloned* TyVars and Ids
, le_subst :: Subst -- Domain is pre-cloned TyVars and Ids
-- The Id -> CoreExpr in the Subst is ignored
-- (since we want to substitute a LevelledExpr for
-- an Id via le_env) but we do use the Co/TyVar substs
, le_env :: IdEnv ([OutVar], LevelledExpr) -- Domain is pre-cloned Ids
}
-- We clone let- and case-bound variables so that they are still
-- distinct when floated out; hence the le_subst/le_env.
-- (see point 3 of the module overview comment).
-- We also use these envs when making a variable polymorphic
-- because we want to float it out past a big lambda.
--
-- The le_subst and le_env always implement the same mapping, but the
-- le_subst maps to CoreExpr and the le_env to LevelledExpr
-- Since the range is always a variable or type application,
-- there is never any difference between the two, but sadly
-- the types differ. The le_subst is used when substituting in
-- a variable's IdInfo; the le_env when we find a Var.
--
-- In addition the le_env records a list of tyvars free in the
-- type application, just so we don't have to call freeVars on
-- the type application repeatedly.
--
-- The domain of the both envs is *pre-cloned* Ids, though
--
-- The domain of the le_lvl_env is the *post-cloned* Ids
initialEnv :: FloatOutSwitches -> LevelEnv
initialEnv float_lams
= LE { le_switches = float_lams
, le_ctxt_lvl = tOP_LEVEL
, le_lvl_env = emptyVarEnv
, le_subst = emptySubst
, le_env = emptyVarEnv }
floatLams :: LevelEnv -> Maybe Int
floatLams le = floatOutLambdas (le_switches le)
floatConsts :: LevelEnv -> Bool
floatConsts le = floatOutConstants (le_switches le)
floatOverSat :: LevelEnv -> Bool
floatOverSat le = floatOutOverSatApps (le_switches le)
setCtxtLvl :: LevelEnv -> Level -> LevelEnv
setCtxtLvl env lvl = env { le_ctxt_lvl = lvl }
-- extendCaseBndrLvlEnv adds the mapping case-bndr->scrut-var if it can
-- (see point 4 of the module overview comment)
extendCaseBndrEnv :: LevelEnv
-> Id -- Pre-cloned case binder
-> Expr LevelledBndr -- Post-cloned scrutinee
-> LevelEnv
extendCaseBndrEnv le@(LE { le_subst = subst, le_env = id_env })
case_bndr (Var scrut_var)
= le { le_subst = extendSubstWithVar subst case_bndr scrut_var
, le_env = add_id id_env (case_bndr, scrut_var) }
extendCaseBndrEnv env _ _ = env
maxFvLevel :: (Var -> Bool) -> LevelEnv -> DVarSet -> Level
maxFvLevel max_me (LE { le_lvl_env = lvl_env, le_env = id_env }) var_set
= foldDVarSet max_in tOP_LEVEL var_set
where
max_in in_var lvl
= foldr max_out lvl (case lookupVarEnv id_env in_var of
Just (abs_vars, _) -> abs_vars
Nothing -> [in_var])
max_out out_var lvl
| max_me out_var = case lookupVarEnv lvl_env out_var of
Just lvl' -> maxLvl lvl' lvl
Nothing -> lvl
| otherwise = lvl -- Ignore some vars depending on max_me
lookupVar :: LevelEnv -> Id -> LevelledExpr
lookupVar le v = case lookupVarEnv (le_env le) v of
Just (_, expr) -> expr
_ -> Var v
abstractVars :: Level -> LevelEnv -> DVarSet -> [OutVar]
-- Find the variables in fvs, free vars of the target expresion,
-- whose level is greater than the destination level
-- These are the ones we are going to abstract out
--
-- Note that to get reproducible builds, the variables need to be
-- abstracted in deterministic order, not dependent on the values of
-- Uniques. This is achieved by using DVarSets, deterministic free
-- variable computation and deterministic sort.
-- See Note [Unique Determinism] in Unique for explanation of why
-- Uniques are not deterministic.
abstractVars dest_lvl (LE { le_subst = subst, le_lvl_env = lvl_env }) in_fvs
= map zap $ sortQuantVars $ uniq
[out_var | out_fv <- dVarSetElems (substDVarSet subst in_fvs)
, out_var <- dVarSetElems (close out_fv)
, abstract_me out_var ]
-- NB: it's important to call abstract_me only on the OutIds the
-- come from substDVarSet (not on fv, which is an InId)
where
uniq :: [Var] -> [Var]
-- Remove duplicates, preserving order
uniq = dVarSetElems . mkDVarSet
abstract_me v = case lookupVarEnv lvl_env v of
Just lvl -> dest_lvl `ltLvl` lvl
Nothing -> False
-- We are going to lambda-abstract, so nuke any IdInfo,
-- and add the tyvars of the Id (if necessary)
zap v | isId v = WARN( isStableUnfolding (idUnfolding v) ||
not (isEmptyRuleInfo (idSpecialisation v)),
text "absVarsOf: discarding info on" <+> ppr v )
setIdInfo v vanillaIdInfo
| otherwise = v
close :: Var -> DVarSet -- Close over variables free in the type
-- Result includes the input variable itself
close v = foldDVarSet (unionDVarSet . close)
(unitDVarSet v)
(fvDVarSet $ varTypeTyFVs v)
type LvlM result = UniqSM result
initLvl :: UniqSupply -> UniqSM a -> a
initLvl = initUs_
newPolyBndrs :: Level -> LevelEnv -> [OutVar] -> [InId] -> UniqSM (LevelEnv, [OutId])
-- The envt is extended to bind the new bndrs to dest_lvl, but
-- the ctxt_lvl is unaffected
newPolyBndrs dest_lvl
env@(LE { le_lvl_env = lvl_env, le_subst = subst, le_env = id_env })
abs_vars bndrs
= ASSERT( all (not . isCoVar) bndrs ) -- What would we add to the CoSubst in this case. No easy answer.
do { uniqs <- getUniquesM
; let new_bndrs = zipWith mk_poly_bndr bndrs uniqs
bndr_prs = bndrs `zip` new_bndrs
env' = env { le_lvl_env = foldl add_lvl lvl_env new_bndrs
, le_subst = foldl add_subst subst bndr_prs
, le_env = foldl add_id id_env bndr_prs }
; return (env', new_bndrs) }
where
add_lvl env v' = extendVarEnv env v' dest_lvl
add_subst env (v, v') = extendIdSubst env v (mkVarApps (Var v') abs_vars)
add_id env (v, v') = extendVarEnv env v ((v':abs_vars), mkVarApps (Var v') abs_vars)
mk_poly_bndr bndr uniq = transferPolyIdInfo bndr abs_vars $ -- Note [transferPolyIdInfo] in Id.lhs
mkSysLocal (mkFastString str) uniq poly_ty
where
str = "poly_" ++ occNameString (getOccName bndr)
poly_ty = mkPiTypes abs_vars (substTy subst (idType bndr))
newLvlVar :: LevelledExpr -- The RHS of the new binding
-> Bool -- Whether it is bottom
-> LvlM Id
newLvlVar lvld_rhs is_bot
= do { uniq <- getUniqueM
; return (add_bot_info (mkLocalId (mk_name uniq) rhs_ty)) }
where
add_bot_info var -- We could call annotateBotStr always, but the is_bot
-- flag just tells us when we don't need to do so
| is_bot = annotateBotStr var (exprBotStrictness_maybe de_tagged_rhs)
| otherwise = var
de_tagged_rhs = deTagExpr lvld_rhs
rhs_ty = exprType de_tagged_rhs
mk_name uniq = mkSystemVarName uniq (mkFastString "lvl")
cloneVars :: RecFlag -> LevelEnv -> Level -> [Var] -> LvlM (LevelEnv, [Var])
-- Works for Ids, TyVars and CoVars
-- The dest_lvl is attributed to the binders in the new env,
-- but cloneVars doesn't affect the ctxt_lvl of the incoming env
cloneVars is_rec
env@(LE { le_subst = subst, le_lvl_env = lvl_env, le_env = id_env })
dest_lvl vs
= do { us <- getUniqueSupplyM
; let (subst', vs1) = case is_rec of
NonRecursive -> cloneBndrs subst us vs
Recursive -> cloneRecIdBndrs subst us vs
vs2 = map zap_demand_info vs1 -- See Note [Zapping the demand info]
prs = vs `zip` vs2
env' = env { le_lvl_env = foldl add_lvl lvl_env vs2
, le_subst = subst'
, le_env = foldl add_id id_env prs }
; return (env', vs2) }
where
add_lvl env v_cloned = extendVarEnv env v_cloned dest_lvl
add_id :: IdEnv ([Var], LevelledExpr) -> (Var, Var) -> IdEnv ([Var], LevelledExpr)
add_id id_env (v, v1)
| isTyVar v = delVarEnv id_env v
| otherwise = extendVarEnv id_env v ([v1], ASSERT(not (isCoVar v1)) Var v1)
zap_demand_info :: Var -> Var
zap_demand_info v
| isId v = zapIdDemandInfo v
| otherwise = v
{-
Note [Zapping the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
VERY IMPORTANT: we must zap the demand info if the thing is going to
float out, becuause it may be less demanded than at its original
binding site. Eg
f :: Int -> Int
f x = let v = 3*4 in v+x
Here v is strict; but if we float v to top level, it isn't any more.
-}
| rahulmutt/ghcvm | compiler/Eta/SimplCore/SetLevels.hs | bsd-3-clause | 47,432 | 0 | 21 | 13,761 | 7,629 | 4,077 | 3,552 | -1 | -1 |
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TupleSections #-}
module Vaultaire.Types.Common
(
Origin(..),
makeOrigin,
Epoch,
NumBuckets
) where
import Control.Applicative
import Control.Exception (Exception, SomeException (..))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import Data.Char
import Data.Either
import Data.Hashable (Hashable)
import Data.Locator
import Data.String (IsString)
import Data.Typeable (Typeable)
import Data.Word (Word64)
import Test.QuickCheck
-- |Origin is a ByteString representing a data origin. It must be
-- between one and eight bytes.
newtype Origin = Origin { unOrigin :: ByteString }
deriving (Eq, Ord, IsString, Hashable)
instance Arbitrary Origin where
-- suchThat condition should be removed once locators package is fixed
arbitrary = Origin . S.pack . toLocator16a 6 <$> arbitrary `suchThat` (>0)
instance Read Origin where
readsPrec _ = fmap (,"") . rights . (:[]) . makeOrigin . S.pack
instance Show Origin where
show = S.unpack . unOrigin
-- | Invalid origin Exception
data BadOrigin = NullOrigin | NonAlphaNumOrigin | OriginTooLong
deriving (Show, Typeable)
instance Exception BadOrigin
makeOrigin :: ByteString -> Either SomeException Origin
makeOrigin bs
| S.null bs = Left (SomeException NullOrigin)
| S.any (not . isAlphaNum) bs = Left (SomeException NonAlphaNumOrigin)
| S.length bs > 8 = Left (SomeException OriginTooLong)
| otherwise = Right (Origin bs)
-- These can all be newtype wrapped as make work, perhaps excluding DayMap.
-- They have no reason to be inter-mixed.
type Epoch = Word64
type NumBuckets = Word64
| afcowie/vaultaire-common | lib/Vaultaire/Types/Common.hs | bsd-3-clause | 2,032 | 0 | 11 | 377 | 443 | 252 | 191 | 40 | 1 |
import Testsuite
import Data.Array.Parallel.Unlifted
$(testcases [ "" <@ [t| ( (), Char, Bool, Int ) |]
, "acc" <@ [t| ( (), Int ) |]
, "num" <@ [t| ( Int ) |]
, "ord" <@ [t| ( (), Char, Bool, Int ) |]
, "enum" <@ [t| ( (), Char, Bool, Int ) |]
]
[d|
-- missing: permuteU
-- missing: bpermuteU
-- missing: bpermuteDftU
prop_reverseU :: (Eq a, UA a) => UArr a -> Bool
prop_reverseU arr =
fromU (reverseU arr) == reverse (fromU arr)
|])
| mainland/dph | dph-test/old/Unlifted_Permutes.hs | bsd-3-clause | 568 | 0 | 9 | 221 | 82 | 56 | 26 | -1 | -1 |
module WithCliSpec where
import System.Environment
import System.Exit
import System.IO
import System.IO.Silently
import Test.Hspec
import WithCli
spec :: Spec
spec = do
describe "withCli" $ do
context "no arguments" $ do
it "executes the operation in case of no command line arguments" $ do
let main :: IO ()
main = putStrLn "success"
(capture_ $ withArgs [] $ withCli main)
`shouldReturn` "success\n"
it "produces nice error messages" $ do
let main :: IO ()
main = putStrLn "success"
output <- hCapture_ [stderr] (withArgs ["foo"] (withCli main) `shouldThrow` (== ExitFailure 1))
output `shouldBe` "unknown argument: foo\n"
context "1 argument" $ do
it "parses Ints" $ do
let main :: Int -> IO ()
main n = putStrLn ("success: " ++ show n)
(capture_ $ withArgs ["12"] $ withCli main)
`shouldReturn` "success: 12\n"
it "error parsing" $ do
let main :: Int -> IO ()
main n = putStrLn ("error: " ++ show n)
output <- hCapture_ [stderr] (withArgs (words "12 foo") (withCli main) `shouldThrow` (== ExitFailure 1))
output `shouldBe` "unknown argument: foo\n"
it "handle Maybes as positional arguments with a proper error message" $ do
let main :: Maybe Int -> IO ()
main = error "main"
output <- hCapture_ [stderr] (withCli main `shouldThrow` (== ExitFailure 1))
output `shouldBe` "cannot use Maybes for positional arguments\n"
| kosmikus/getopt-generics | test/WithCliSpec.hs | bsd-3-clause | 1,614 | 0 | 22 | 505 | 485 | 239 | 246 | 37 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Fm where
import Data.List
import qualified Data.Text as T
import System.Random
impossibles :: [[Double]]
impossibles = [[1.0,1.0,1.0,4.0],
[1.0,1.0,1.0,5.0],
[1.0,1.0,1.0,6.0],
[1.0,1.0,1.0,7.0],
[1.0,1.0,1.0,12.0],
[1.0,1.0,1.0,13.0],
[1.0,1.0,1.0,14.0],
[1.0,1.0,1.0,15.0],
[1.0,1.0,1.0,16.0],
[1.0,1.0,5.0,11.0],
[1.0,1.0,5.0,12.0],
[1.0,1.0,6.0,11.0],
[1.0,1.0,7.0,8.0],
[1.0,1.0,8.0,15.0],
[1.0,1.0,11.0,12.0],
[1.0,1.0,11.0,14.0],
[1.0,1.0,11.0,15.0],
[1.0,1.0,11.0,16.0],
[1.0,1.0,11.0,17.0],
[1.0,1.0,12.0,13.0],
[1.0,1.0,12.0,15.0],
[1.0,1.0,12.0,17.0],
[1.0,4.0,7.0,19.0],
[1.0,4.0,7.0,20.0],
[1.0,4.0,11.0,11.0],
[1.0,4.0,11.0,18.0],
[1.0,4.0,11.0,19.0],
[1.0,4.0,12.0,14.0],
[1.0,4.0,12.0,19.0],
[1.0,4.0,12.0,20.0],
[1.0,5.0,6.0,6.0],
[1.0,5.0,6.0,7.0],
[1.0,5.0,6.0,17.0],
[1.0,5.0,8.0,9.0],
[1.0,5.0,9.0,19.0],
[1.0,5.0,11.0,12.0],
[1.0,5.0,11.0,17.0],
[1.0,5.0,11.0,19.0],
[1.0,5.0,12.0,20.0],
[1.0,6.0,6.0,11.0],
[1.0,6.0,9.0,19.0],
[1.0,6.0,11.0,17.0],
[1.0,6.0,11.0,19.0],
[1.0,6.0,12.0,12.0],
[2.0,3.0,9.0,18.0],
[2.0,4.0,9.0,11.0],
[2.0,6.0,9.0,11.0],
[2.0,6.0,9.0,18.0],
[3.0,3.0,3.0,3.0],
[3.0,3.0,3.0,4.0],
[3.0,3.0,3.0,12.0],
[3.0,3.0,3.0,15.0],
[3.0,3.0,5.0,17.0],
[3.0,3.0,8.0,8.0],
[3.0,3.0,8.0,17.0],
[3.0,3.0,9.0,9.0],
[3.0,3.0,12.0,12.0],
[3.0,4.0,9.0,9.0],
[3.0,4.0,11.0,11.0],
[3.0,5.0,9.0,11.0],
[3.0,5.0,11.0,19.0],
[3.0,6.0,6.0,15.0],
[3.0,6.0,7.0,13.0],
[4.0,4.0,5.0,13.0],
[4.0,4.0,5.0,18.0],
[4.0,4.0,7.0,10.0],
[4.0,4.0,7.0,18.0],
[4.0,4.0,8.0,15.0],
[4.0,4.0,10.0,13.0],
[4.0,4.0,10.0,17.0],
[4.0,4.0,11.0,14.0],
[4.0,4.0,11.0,18.0],
[4.0,5.0,5.0,18.0],
[4.0,5.0,6.0,12.0],
[4.0,5.0,9.0,18.0],
[4.0,5.0,12.0,14.0],
[4.0,6.0,8.0,20.0],
[4.0,6.0,9.0,12.0],
[4.0,6.0,12.0,19.0],
[4.0,6.0,12.0,20.0],
[5.0,5.0,8.0,14.0],
[5.0,5.0,11.0,17.0],
[5.0,5.0,11.0,18.0],
[5.0,6.0,6.0,17.0],
[5.0,6.0,11.0,11.0],
[5.0,6.0,11.0,12.0],
[5.0,6.0,11.0,17.0],
[5.0,6.0,11.0,18.0],
[6.0,6.0,6.0,6.0],
[6.0,6.0,6.0,7.0],
[6.0,6.0,6.0,11.0],
[6.0,6.0,6.0,17.0],
[6.0,6.0,7.0,10.0],
[6.0,6.0,7.0,11.0],
[6.0,6.0,7.0,17.0],
[6.0,6.0,9.0,9.0],
[6.0,6.0,9.0,13.0],
[6.0,6.0,10.0,13.0],
[6.0,6.0,10.0,15.0],
[6.0,6.0,10.0,17.0],
[6.0,6.0,11.0,11.0],
[6.0,6.0,11.0,12.0],
[6.0,6.0,11.0,17.0],
[6.0,6.0,12.0,15.0]]
toDouble :: Int -> Double
toDouble x = (read (show x)) :: Double
rM :: Int -> IO Int
rM x = getStdRandom (System.Random.randomR ((1,x) :: (Int,Int)))
start :: Int -> Int -> Int -> Int -> IO [Int]
start ax bx cx dx = do
a <- rM ax
b <- rM bx
c <- rM cx
d <- rM dx
return [a,b,c,d]
rollFunc :: [String] -> T.Text
rollFunc [a,b,c,d] = T.pack (a ++ "," ++ b ++ "," ++ c ++ "," ++ d)
rollFunc _ = "Problem in rollFunc"
rollT :: Int -> Int -> Int -> Int -> IO T.Text
rollT ax bx cx dx = do
x <- start ax bx cx dx
let y = map show x
return $ rollFunc y
roll :: Int -> Int -> Int -> Int -> IO [Double]
roll ax bx cx dx = do
x <- start ax bx cx dx
return $ map toDouble x
computation :: Double -> String -> Double -> Double
computation a b c | b == "+" = (+) a c
| b == "-" = (-) a c
| b == "*" = (*) a c
| b == "/" = (/) a c
| b == "Concatenate" = cat a c
| otherwise = 5000
fRound :: Double -> Int
fRound = round
whole :: Double -> Bool
whole x = toDouble (fRound x) == x
cat :: Double -> Double -> Double
cat l m | (whole l) && (whole m) && m >= 0 && l /= 0 = read ((show $ fRound l) ++ (show $ fRound m))
| otherwise = 8.888
g :: (Double -> Double -> Double) -> String
g x | x 3 2 == 5 = " + "
| x 3 2 == 1 = " - "
| x 3 2 == 6 = " * "
| x 18 3 == 6 = " / "
| x 5 5 == 55 = " concatenated left of "
| otherwise = " cow "
f :: Double -> String
f x = show (fRound x)
ops :: [Double -> Double -> Double]
ops = [cat, (+), (-), (*), (/)]
calc :: Double -> Double -> Double -> Double -> Double -> [(String, String, String, String, String, String)]
calc a b c d e = [(f a', g op1, f b', g op2, f c', show e) |
[a',b',c',_'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op2 (op1 a' b') c' == e]
calc2 :: Double -> Double -> Double -> Double -> Double -> [(String, String, String, String, String, String)]
calc2 a b c d e = [(f a', g op1, f b', g op2, f c', show e) |
[a',b',c',_'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op2 a' (op1 b' c') == e]
calc3 :: Double -> Double -> Double -> Double -> Double -> [(String, String, String, String, String, String, String, String)]
calc3 a b c d e = [(f a', g op1, f b', g op3, f c', g op2, f d', show e) |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op3 <- ops,
op3 (op1 a' b') (op2 c' d') == e]
calc4 :: Double -> Double -> Double -> Double -> Double ->[(String, String, String, String, String, String, String, String)]
calc4 a b c d e = [(f a', g op1, f b', g op3, f c', g op2, f d', show e) |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op3 <- ops,
op3 (op2 (op1 a' b') c') d' == e]
calc5 :: Double
-> Double
-> Double
-> Double
-> Double
-> [(String, String, String, String, String, String, String, String)]
calc5 a b c d e = [(f a', g op1, f b', g op3, f c', g op2, f d', show e) |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op3 <- ops,
op3 (op2 c' (op1 a' b')) d' == e]
calc6 :: Double
-> Double
-> Double
-> Double
-> Double
-> [(String, String, String, String, String, String, String, String)]
calc6 a b c d e = [(f a', g op1, f b', g op3, f c', g op2, f d', show e) |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op3 <- ops,
op3 d' (op2 (op1 a' b') c') == e]
calc7 :: Double
-> Double
-> Double
-> Double
-> Double
-> [(String, String, String, String, String, String, String, String)]
calc7 a b c d e = [(f a', g op1, f b', g op3, f c', g op2, f d', show e) |
[a',b',c',d'] <- nub(permutations [a,b,c,d]),
op1 <- ops,
op2 <- ops,
op3 <- ops,
op3 d' (op2 c' (op1 a' b')) == e]
combined [a, b, c, d, e] = (null (calc a b c d e)) && (null (calc2 a b c d e)) && (null (calc3 a b c d e))
&& (null (calc4 a b c d e)) && (null (calc5 a b c d e)) && (null (calc6 a b c d e)) && (null (calc7 a b c d e))
h :: (String, String, String, String, String, String) -> String
h (a',b',c',d',e',goal) = "(" ++ a' ++ b' ++ c' ++ ")" ++ d' ++ e' ++ " = " ++ goal ++ "<br /> "
h2 :: (String, String, String, String, String, String) -> String
h2 (a',b',c',d',e', goal) = a' ++ d' ++ "(" ++ c' ++ b' ++ e'++ ") = " ++ goal ++ "<br /> "
h3 :: (String, String, String, String, String, String, String, String) -> String
h3 (a',b',c',d',e',f',g', goal) = "(" ++ a' ++ b' ++ c' ++ ")" ++ d' ++ "(" ++ e' ++ f' ++
g' ++ ") = " ++ goal ++ "<br /> "
h4 :: (String, String, String, String, String, String, String, String) -> String
h4 (a',b',c',d',e',f',g', goal) = "((" ++ a' ++ b' ++ c' ++ ")" ++
f' ++ e' ++ ")" ++ d' ++ g' ++ ") = " ++ goal ++ "<br /> "
h5 :: (String, String, String, String, String, String, String, String) -> String
h5 (a',b',c',d',e',f',g', goal) = "(" ++ e' ++ f' ++ "(" ++ a' ++
b' ++ c' ++ "))" ++ d' ++ g' ++ ") = " ++ goal ++ "<br /> "
h6:: (String, String, String, String, String, String, String, String) -> String
h6 (a',b',c',d',e',f',g', goal) = g' ++ d' ++ "((" ++ a' ++ b' ++
c' ++ ")" ++ f' ++ e' ++ ") = " ++ goal ++ "<br /> "
h7 :: (String, String, String, String, String, String, String, String) -> String
h7 (a',b',c',d',e',f',g', goal) = g' ++ d' ++ "(" ++ e' ++ f' ++
"(" ++ a' ++ b' ++ c' ++ ")) = " ++ goal ++ "<br /> "
pim :: [(String, String, String, String, String, String, String, String)] -> [String]
pim x | null x = [" -- There are no solutions in this category"]
| otherwise = [" "]
pim' :: [(String, String, String, String, String, String)] -> [String]
pim' x | null x = [" -- There are no solutions in this category"]
| otherwise = [" "]
ca :: [Double] -> [String]
ca [a, b, c, d, e] = ["Using the result from two numbers left of a third.<br />"] ++
map h (calc a b c d e) ++
pim' (calc a b c d e) ++
["<br /><br />Using a number left of the result obtained from two other numbers.<br />"] ++
map h2 (calc2 a b c d e) ++
pim' (calc2 a b c d e) ++
["<br /><br />Using two numbers and then the remaining two numbers - then using those results.<br />"] ++
map h3 (calc3 a b c d e) ++
pim (calc3 a b c d e) ++
["<br /><br />Using the result from two numbers left of a third - then that result left of the remaining number.<br />"] ++
map h4 (calc4 a b c d e) ++
pim (calc4 a b c d e) ++
["<br /><br />Using the third number left of the result obtained from the first two - then that result left of the fourth number.<br />"] ++
map h5 (calc5 a b c d e) ++
pim (calc5 a b c d e) ++
["<br /><br />Using the the remaining number to the left of the result of using the result of two numbers' left of another.<br />"] ++
map h6 (calc6 a b c d e) ++
pim (calc6 a b c d e) ++
["<br /><br />Using the remaining number to the left of the result from using the a number left of the result from two others.<br />"] ++
map h7 (calc7 a b c d e) ++
pim (calc7 a b c d e)
ca _ = ["What?"]
cars :: [Double] -> String
cars [a,b,c,d,e] = concat $ ca [a,b,c,d,e]
cars _ = []
tru :: T.Text -> [Double]
tru x = map read (map T.unpack (T.split (==',') x))
fz :: [Double] -> String
fz [a,b,c,d,e] = "Impossible. Roll " ++ show (round a) ++ " " ++ show (round b) ++ " "
++ show (round c) ++ " " ++ show (round d) ++ " cannot be made into " ++ show (round e) ++ "."
fz _ = "Error in fz in Fm.hs"
truck :: [Double] -> IO String
truck x = do
if (combined x)
then return $ fz x
else do
let y = map round x
let z = show (y !! 0) ++ " " ++ show (y !! 1) ++ " " ++ show (y !! 2) ++ " " ++ show (y !! 3) ++ "<br /><br />"
let a = (" " ++ z ++ (cars x) ++ "<br />") :: String
return a
arg :: [Double]
arg = [1,1,1,1,42]
rText :: [Int] -> IO T.Text
rText [a,b,c,d] = do
x <- roll a b c d
return $ rollFunc $ map show $ map round x
rText _ = return $ T.pack "String"
xyz :: [Double]
xyz = [1, 1, 1, 1, 20]
main :: IO ()
main = truck [1,1,1,1,20] >>= print
| dschalk/monads-for-functional-javascript | Fm.hs | mit | 12,241 | 0 | 27 | 4,218 | 5,962 | 3,390 | 2,572 | 299 | 2 |
-- |
-- Module : Network.TLS.Record.Engage
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Engage a record into the Record layer.
-- The record is compressed, added some integrity field, then encrypted.
--
{-# LANGUAGE BangPatterns #-}
module Network.TLS.Record.Engage
( engageRecord
) where
import Control.Applicative
import Control.Monad.State
import Crypto.Cipher.Types (AuthTag(..))
import Network.TLS.Cap
import Network.TLS.Record.State
import Network.TLS.Record.Types
import Network.TLS.Cipher
import Network.TLS.Compression
import Network.TLS.Wire
import Network.TLS.Packet
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteArray as B (convert)
engageRecord :: Record Plaintext -> RecordM (Record Ciphertext)
engageRecord = compressRecord >=> encryptRecord
compressRecord :: Record Plaintext -> RecordM (Record Compressed)
compressRecord record =
onRecordFragment record $ fragmentCompress $ \bytes -> do
withCompression $ compressionDeflate bytes
-- when Tx Encrypted is set, we pass the data through encryptContent, otherwise
-- we just return the compress payload directly as the ciphered one
--
encryptRecord :: Record Compressed -> RecordM (Record Ciphertext)
encryptRecord record = onRecordFragment record $ fragmentCipher $ \bytes -> do
st <- get
case stCipher st of
Nothing -> return bytes
_ -> encryptContent record bytes
encryptContent :: Record Compressed -> ByteString -> RecordM ByteString
encryptContent record content = do
cst <- getCryptState
bulk <- getBulk
case cstKey cst of
BulkStateBlock encryptF -> do
digest <- makeDigest (recordToHeader record) content
let content' = B.concat [content, digest]
encryptBlock encryptF content' bulk
BulkStateStream encryptF -> do
digest <- makeDigest (recordToHeader record) content
let content' = B.concat [content, digest]
encryptStream encryptF content'
BulkStateAEAD encryptF ->
encryptAead encryptF content record
BulkStateUninitialized ->
return content
encryptBlock :: BulkBlock -> ByteString -> Bulk -> RecordM ByteString
encryptBlock encryptF content bulk = do
cst <- getCryptState
ver <- getRecordVersion
let blockSize = fromIntegral $ bulkBlockSize bulk
let msg_len = B.length content
let padding = if blockSize > 0
then
let padbyte = blockSize - (msg_len `mod` blockSize) in
let padbyte' = if padbyte == 0 then blockSize else padbyte in B.replicate padbyte' (fromIntegral (padbyte' - 1))
else
B.empty
let (e, iv') = encryptF (cstIV cst) $ B.concat [ content, padding ]
if hasExplicitBlockIV ver
then return $ B.concat [cstIV cst,e]
else do
modify $ \tstate -> tstate { stCryptState = cst { cstIV = iv' } }
return e
encryptStream :: BulkStream -> ByteString -> RecordM ByteString
encryptStream (BulkStream encryptF) content = do
cst <- getCryptState
let (!e, !newBulkStream) = encryptF content
modify $ \tstate -> tstate { stCryptState = cst { cstKey = BulkStateStream newBulkStream } }
return e
encryptAead :: BulkAEAD
-> ByteString -> Record Compressed
-> RecordM ByteString
encryptAead encryptF content record = do
cst <- getCryptState
encodedSeq <- encodeWord64 <$> getMacSequence
let hdr = recordToHeader record
ad = B.concat [encodedSeq, encodeHeader hdr]
nonce = B.concat [cstIV cst, encodedSeq]
(e, AuthTag authtag) = encryptF nonce content ad
modify incrRecordState
return $ B.concat [encodedSeq, e, B.convert authtag]
getCryptState :: RecordM CryptState
getCryptState = stCryptState <$> get
| lancelotsix/hs-tls | core/Network/TLS/Record/Engage.hs | bsd-3-clause | 4,003 | 0 | 19 | 974 | 1,028 | 524 | 504 | 81 | 4 |
module Test18 where
f [x] = [x] ++ [x]
g = f [1]
| kmate/HaRe | old/testing/refacFunDef/Test18AST.hs | bsd-3-clause | 52 | 0 | 6 | 16 | 36 | 21 | 15 | 3 | 1 |
module A1 where
import D1
sumSq xs ys= sum (map sq xs) + sumSquares xs
main = sumSq [1..4]
| kmate/HaRe | old/testing/rmOneParameter/A1_TokOut.hs | bsd-3-clause | 97 | 0 | 8 | 25 | 48 | 25 | 23 | 4 | 1 |
module Vectorise.Env (
Scope(..),
-- * Local Environments
LocalEnv(..),
emptyLocalEnv,
-- * Global Environments
GlobalEnv(..),
initGlobalEnv,
extendImportedVarsEnv,
extendFamEnv,
setPAFunsEnv,
setPRFunsEnv,
modVectInfo
) where
import HscTypes
import InstEnv
import FamInstEnv
import CoreSyn
import Type
import Class
import TyCon
import DataCon
import VarEnv
import VarSet
import Var
import NameSet
import Name
import NameEnv
import FastString
import Data.Maybe
-- |Indicates what scope something (a variable) is in.
--
data Scope a b
= Global a
| Local b
-- LocalEnv -------------------------------------------------------------------
-- |The local environment.
--
data LocalEnv
= LocalEnv
{ local_vars :: VarEnv (Var, Var)
-- ^Mapping from local variables to their vectorised and lifted versions.
, local_tyvars :: [TyVar]
-- ^In-scope type variables.
, local_tyvar_pa :: VarEnv CoreExpr
-- ^Mapping from tyvars to their PA dictionaries.
, local_bind_name :: FastString
-- ^Local binding name. This is only used to generate better names for hoisted
-- expressions.
}
-- |Create an empty local environment.
--
emptyLocalEnv :: LocalEnv
emptyLocalEnv = LocalEnv
{ local_vars = emptyVarEnv
, local_tyvars = []
, local_tyvar_pa = emptyVarEnv
, local_bind_name = fsLit "fn"
}
-- GlobalEnv ------------------------------------------------------------------
-- |The global environment: entities that exist at top-level.
--
data GlobalEnv
= GlobalEnv
{ global_vect_avoid :: Bool
-- ^'True' implies to avoid vectorisation as far as possible.
, global_vars :: VarEnv Var
-- ^Mapping from global variables to their vectorised versions — aka the /vectorisation
-- map/.
, global_parallel_vars :: VarSet
-- ^The domain of 'global_vars'.
--
-- This information is not redundant as it is impossible to extract the domain from a
-- 'VarEnv' (which is keyed on uniques alone). Moreover, we have mapped variables that
-- do not involve parallelism — e.g., the workers of vectorised, but scalar data types.
-- In addition, workers of parallel data types that we could not vectorise also need to
-- be tracked.
, global_vect_decls :: VarEnv (Maybe (Type, CoreExpr))
-- ^Mapping from global variables that have a vectorisation declaration to the right-hand
-- side of that declaration and its type and mapping variables that have NOVECTORISE
-- declarations to 'Nothing'.
, global_tycons :: NameEnv TyCon
-- ^Mapping from TyCons to their vectorised versions. The vectorised version will be
-- identical to the original version if it is not changed by vectorisation. In any case,
-- if a tycon appears in the domain of this mapping, it was successfully vectorised.
, global_parallel_tycons :: NameSet
-- ^Type constructors whose definition directly or indirectly includes a parallel type,
-- such as '[::]'.
--
-- NB: This information is not redundant as some types have got a mapping in
-- 'global_tycons' (to a type other than themselves) and are still not parallel. An
-- example is '(->)'. Moreover, some types have *not* got a mapping in 'global_tycons'
-- (because they couldn't be vectorised), but still contain parallel types.
, global_datacons :: NameEnv DataCon
-- ^Mapping from DataCons to their vectorised versions.
, global_pa_funs :: NameEnv Var
-- ^Mapping from TyCons to their PA dfuns.
, global_pr_funs :: NameEnv Var
-- ^Mapping from TyCons to their PR dfuns.
, global_inst_env :: InstEnvs
-- ^External package inst-env & home-package inst-env for class instances.
, global_fam_inst_env :: FamInstEnvs
-- ^External package inst-env & home-package inst-env for family instances.
, global_bindings :: [(Var, CoreExpr)]
-- ^Hoisted bindings — temporary storage for toplevel bindings during code gen.
}
-- |Create an initial global environment.
--
-- We add scalar variables and type constructors identified by vectorisation pragmas already here
-- to the global table, so that we can query scalarness during vectorisation, and especially, when
-- vectorising the scalar entities' definitions themselves.
--
initGlobalEnv :: Bool
-> VectInfo
-> [CoreVect]
-> InstEnvs
-> FamInstEnvs
-> GlobalEnv
initGlobalEnv vectAvoid info vectDecls instEnvs famInstEnvs
= GlobalEnv
{ global_vect_avoid = vectAvoid
, global_vars = mapVarEnv snd $ vectInfoVar info
, global_vect_decls = mkVarEnv vects
, global_parallel_vars = vectInfoParallelVars info
, global_parallel_tycons = vectInfoParallelTyCons info
, global_tycons = mapNameEnv snd $ vectInfoTyCon info
, global_datacons = mapNameEnv snd $ vectInfoDataCon info
, global_pa_funs = emptyNameEnv
, global_pr_funs = emptyNameEnv
, global_inst_env = instEnvs
, global_fam_inst_env = famInstEnvs
, global_bindings = []
}
where
vects = [(var, Just (ty, exp)) | Vect var exp@(Var rhs_var) <- vectDecls
, let ty = varType rhs_var] ++
-- FIXME: we currently only allow RHSes consisting of a
-- single variable to be able to obtain the type without
-- inference — see also 'TcBinds.tcVect'
[(var, Nothing) | NoVect var <- vectDecls]
-- Operators on Global Environments -------------------------------------------
-- |Extend the list of global variables in an environment.
--
extendImportedVarsEnv :: [(Var, Var)] -> GlobalEnv -> GlobalEnv
extendImportedVarsEnv ps genv
= genv { global_vars = extendVarEnvList (global_vars genv) ps }
-- |Extend the list of type family instances.
--
extendFamEnv :: [FamInst] -> GlobalEnv -> GlobalEnv
extendFamEnv new genv
= genv { global_fam_inst_env = (g_fam_inst, extendFamInstEnvList l_fam_inst new) }
where (g_fam_inst, l_fam_inst) = global_fam_inst_env genv
-- |Set the list of PA functions in an environment.
--
setPAFunsEnv :: [(Name, Var)] -> GlobalEnv -> GlobalEnv
setPAFunsEnv ps genv = genv { global_pa_funs = mkNameEnv ps }
-- |Set the list of PR functions in an environment.
--
setPRFunsEnv :: [(Name, Var)] -> GlobalEnv -> GlobalEnv
setPRFunsEnv ps genv = genv { global_pr_funs = mkNameEnv ps }
-- |Compute vectorisation information that goes into 'ModGuts' (and is stored in interface files).
-- The incoming 'vectInfo' is that from the 'HscEnv' and 'EPS'. The outgoing one contains only the
-- declarations for the currently compiled module; this includes variables, type constructors, and
-- data constructors referenced in VECTORISE pragmas, even if they are defined in an imported
-- module.
--
-- The variables explicitly include class selectors and dfuns.
--
modVectInfo :: GlobalEnv -> [Id] -> [TyCon] -> [CoreVect]-> VectInfo -> VectInfo
modVectInfo env mg_ids mg_tyCons vectDecls info
= info
{ vectInfoVar = mk_env ids (global_vars env)
, vectInfoTyCon = mk_env tyCons (global_tycons env)
, vectInfoDataCon = mk_env dataCons (global_datacons env)
, vectInfoParallelVars = (global_parallel_vars env `minusVarSet` vectInfoParallelVars info)
`intersectVarSet` (mkVarSet ids)
, vectInfoParallelTyCons = global_parallel_tycons env `minusNameSet` vectInfoParallelTyCons info
}
where
vectIds = [id | Vect id _ <- vectDecls] ++
[id | VectInst id <- vectDecls]
vectTypeTyCons = [tycon | VectType _ tycon _ <- vectDecls] ++
[tycon | VectClass tycon <- vectDecls]
vectDataCons = concatMap tyConDataCons vectTypeTyCons
ids = mg_ids ++ vectIds ++ dataConIds ++ selIds
tyCons = mg_tyCons ++ vectTypeTyCons
dataCons = concatMap tyConDataCons mg_tyCons ++ vectDataCons
dataConIds = map dataConWorkId dataCons
selIds = concat [ classAllSelIds cls
| tycon <- tyCons
, cls <- maybeToList . tyConClass_maybe $ tycon]
-- Produce an entry for every declaration that is mentioned in the domain of the 'inspectedEnv'
mk_env decls inspectedEnv
= mkNameEnv [(name, (decl, to))
| decl <- decls
, let name = getName decl
, Just to <- [lookupNameEnv inspectedEnv name]]
| tjakway/ghcjvm | compiler/vectorise/Vectorise/Env.hs | bsd-3-clause | 9,317 | 0 | 14 | 2,809 | 1,242 | 730 | 512 | 116 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Get information on modules, expreesions, and identifiers
module GHCi.UI.Info
( ModInfo(..)
, SpanInfo(..)
, spanInfoFromRealSrcSpan
, collectInfo
, findLoc
, findNameUses
, findType
, getModInfo
) where
import Control.Exception
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Except
import Control.Monad.Trans.Maybe
import Data.Data
import Data.Function
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Time
import Prelude hiding (mod)
import System.Directory
import qualified CoreUtils
import Desugar
import DynFlags (HasDynFlags(..))
import FastString
import GHC
import GhcMonad
import Name
import NameSet
import Outputable
import SrcLoc
import TcHsSyn
import Var
-- | Info about a module. This information is generated every time a
-- module is loaded.
data ModInfo = ModInfo
{ modinfoSummary :: !ModSummary
-- ^ Summary generated by GHC. Can be used to access more
-- information about the module.
, modinfoSpans :: [SpanInfo]
-- ^ Generated set of information about all spans in the
-- module that correspond to some kind of identifier for
-- which there will be type info and/or location info.
, modinfoInfo :: !ModuleInfo
-- ^ Again, useful from GHC for accessing information
-- (exports, instances, scope) from a module.
, modinfoLastUpdate :: !UTCTime
}
-- | Type of some span of source code. Most of these fields are
-- unboxed but Haddock doesn't show that.
data SpanInfo = SpanInfo
{ spaninfoSrcSpan :: {-# UNPACK #-} !RealSrcSpan
-- ^ The span we associate information with
, spaninfoType :: !(Maybe Type)
-- ^ The 'Type' associated with the span
, spaninfoVar :: !(Maybe Id)
-- ^ The actual 'Var' associated with the span, if
-- any. This can be useful for accessing a variety of
-- information about the identifier such as module,
-- locality, definition location, etc.
}
-- | Test whether second span is contained in (or equal to) first span.
-- This is basically 'containsSpan' for 'SpanInfo'
containsSpanInfo :: SpanInfo -> SpanInfo -> Bool
containsSpanInfo = containsSpan `on` spaninfoSrcSpan
-- | Filter all 'SpanInfo' which are contained in 'SpanInfo'
spaninfosWithin :: [SpanInfo] -> SpanInfo -> [SpanInfo]
spaninfosWithin spans' si = filter (si `containsSpanInfo`) spans'
-- | Construct a 'SpanInfo' from a 'RealSrcSpan' and optionally a
-- 'Type' and an 'Id' (for 'spaninfoType' and 'spaninfoVar'
-- respectively)
spanInfoFromRealSrcSpan :: RealSrcSpan -> Maybe Type -> Maybe Id -> SpanInfo
spanInfoFromRealSrcSpan spn mty mvar =
SpanInfo spn mty mvar
-- | Convenience wrapper around 'spanInfoFromRealSrcSpan' which needs
-- only a 'RealSrcSpan'
spanInfoFromRealSrcSpan' :: RealSrcSpan -> SpanInfo
spanInfoFromRealSrcSpan' s = spanInfoFromRealSrcSpan s Nothing Nothing
-- | Convenience wrapper around 'srcSpanFile' which results in a 'FilePath'
srcSpanFilePath :: RealSrcSpan -> FilePath
srcSpanFilePath = unpackFS . srcSpanFile
-- | Try to find the location of the given identifier at the given
-- position in the module.
findLoc :: GhcMonad m
=> Map ModuleName ModInfo
-> RealSrcSpan
-> String
-> ExceptT SDoc m (ModInfo,Name,SrcSpan)
findLoc infos span0 string = do
name <- maybeToExceptT "Couldn't guess that module name. Does it exist?" $
guessModule infos (srcSpanFilePath span0)
info <- maybeToExceptT "No module info for current file! Try loading it?" $
MaybeT $ pure $ M.lookup name infos
name' <- findName infos span0 info string
case getSrcSpan name' of
UnhelpfulSpan{} -> do
throwE ("Found a name, but no location information." <+>
"The module is:" <+>
maybe "<unknown>" (ppr . moduleName)
(nameModule_maybe name'))
span' -> return (info,name',span')
-- | Find any uses of the given identifier in the codebase.
findNameUses :: (GhcMonad m)
=> Map ModuleName ModInfo
-> RealSrcSpan
-> String
-> ExceptT SDoc m [SrcSpan]
findNameUses infos span0 string =
locToSpans <$> findLoc infos span0 string
where
locToSpans (modinfo,name',span') =
stripSurrounding (span' : map toSrcSpan spans)
where
toSrcSpan = RealSrcSpan . spaninfoSrcSpan
spans = filter ((== Just name') . fmap getName . spaninfoVar)
(modinfoSpans modinfo)
-- | Filter out redundant spans which surround/contain other spans.
stripSurrounding :: [SrcSpan] -> [SrcSpan]
stripSurrounding xs = filter (not . isRedundant) xs
where
isRedundant x = any (x `strictlyContains`) xs
(RealSrcSpan s1) `strictlyContains` (RealSrcSpan s2)
= s1 /= s2 && s1 `containsSpan` s2
_ `strictlyContains` _ = False
-- | Try to resolve the name located at the given position, or
-- otherwise resolve based on the current module's scope.
findName :: GhcMonad m
=> Map ModuleName ModInfo
-> RealSrcSpan
-> ModInfo
-> String
-> ExceptT SDoc m Name
findName infos span0 mi string =
case resolveName (modinfoSpans mi) (spanInfoFromRealSrcSpan' span0) of
Nothing -> tryExternalModuleResolution
Just name ->
case getSrcSpan name of
UnhelpfulSpan {} -> tryExternalModuleResolution
RealSrcSpan {} -> return (getName name)
where
tryExternalModuleResolution =
case find (matchName $ mkFastString string)
(fromMaybe [] (modInfoTopLevelScope (modinfoInfo mi))) of
Nothing -> throwE "Couldn't resolve to any modules."
Just imported -> resolveNameFromModule infos imported
matchName :: FastString -> Name -> Bool
matchName str name =
str ==
occNameFS (getOccName name)
-- | Try to resolve the name from another (loaded) module's exports.
resolveNameFromModule :: GhcMonad m
=> Map ModuleName ModInfo
-> Name
-> ExceptT SDoc m Name
resolveNameFromModule infos name = do
modL <- maybe (throwE $ "No module for" <+> ppr name) return $
nameModule_maybe name
info <- maybe (throwE (ppr (moduleUnitId modL) <> ":" <>
ppr modL)) return $
M.lookup (moduleName modL) infos
maybe (throwE "No matching export in any local modules.") return $
find (matchName name) (modInfoExports (modinfoInfo info))
where
matchName :: Name -> Name -> Bool
matchName x y = occNameFS (getOccName x) ==
occNameFS (getOccName y)
-- | Try to resolve the type display from the given span.
resolveName :: [SpanInfo] -> SpanInfo -> Maybe Var
resolveName spans' si = listToMaybe $ mapMaybe spaninfoVar $
reverse spans' `spaninfosWithin` si
-- | Try to find the type of the given span.
findType :: GhcMonad m
=> Map ModuleName ModInfo
-> RealSrcSpan
-> String
-> ExceptT SDoc m (ModInfo, Type)
findType infos span0 string = do
name <- maybeToExceptT "Couldn't guess that module name. Does it exist?" $
guessModule infos (srcSpanFilePath span0)
info <- maybeToExceptT "No module info for current file! Try loading it?" $
MaybeT $ pure $ M.lookup name infos
case resolveType (modinfoSpans info) (spanInfoFromRealSrcSpan' span0) of
Nothing -> (,) info <$> lift (exprType string)
Just ty -> return (info, ty)
where
-- | Try to resolve the type display from the given span.
resolveType :: [SpanInfo] -> SpanInfo -> Maybe Type
resolveType spans' si = listToMaybe $ mapMaybe spaninfoType $
reverse spans' `spaninfosWithin` si
-- | Guess a module name from a file path.
guessModule :: GhcMonad m
=> Map ModuleName ModInfo -> FilePath -> MaybeT m ModuleName
guessModule infos fp = do
target <- lift $ guessTarget fp Nothing
case targetId target of
TargetModule mn -> return mn
TargetFile fp' _ -> guessModule' fp'
where
guessModule' :: GhcMonad m => FilePath -> MaybeT m ModuleName
guessModule' fp' = case findModByFp fp' of
Just mn -> return mn
Nothing -> do
fp'' <- liftIO (makeRelativeToCurrentDirectory fp')
target' <- lift $ guessTarget fp'' Nothing
case targetId target' of
TargetModule mn -> return mn
_ -> MaybeT . pure $ findModByFp fp''
findModByFp :: FilePath -> Maybe ModuleName
findModByFp fp' = fst <$> find ((Just fp' ==) . mifp) (M.toList infos)
where
mifp :: (ModuleName, ModInfo) -> Maybe FilePath
mifp = ml_hs_file . ms_location . modinfoSummary . snd
-- | Collect type info data for the loaded modules.
collectInfo :: (GhcMonad m) => Map ModuleName ModInfo -> [ModuleName]
-> m (Map ModuleName ModInfo)
collectInfo ms loaded = do
df <- getDynFlags
liftIO (filterM cacheInvalid loaded) >>= \case
[] -> return ms
invalidated -> do
liftIO (putStrLn ("Collecting type info for " ++
show (length invalidated) ++
" module(s) ... "))
foldM (go df) ms invalidated
where
go df m name = do { info <- getModInfo name; return (M.insert name info m) }
`gcatch`
(\(e :: SomeException) -> do
liftIO $ putStrLn
$ showSDocForUser df alwaysQualify
$ "Error while getting type info from" <+>
ppr name <> ":" <+> text (show e)
return m)
cacheInvalid name = case M.lookup name ms of
Nothing -> return True
Just mi -> do
let fp = ml_obj_file (ms_location (modinfoSummary mi))
last' = modinfoLastUpdate mi
exists <- doesFileExist fp
if exists
then (> last') <$> getModificationTime fp
else return True
-- | Get info about the module: summary, types, etc.
getModInfo :: (GhcMonad m) => ModuleName -> m ModInfo
getModInfo name = do
m <- getModSummary name
p <- parseModule m
typechecked <- typecheckModule p
allTypes <- processAllTypeCheckedModule typechecked
let i = tm_checked_module_info typechecked
now <- liftIO getCurrentTime
return (ModInfo m allTypes i now)
-- | Get ALL source spans in the module.
processAllTypeCheckedModule :: forall m . GhcMonad m => TypecheckedModule
-> m [SpanInfo]
processAllTypeCheckedModule tcm = do
bts <- mapM getTypeLHsBind $ listifyAllSpans tcs
ets <- mapM getTypeLHsExpr $ listifyAllSpans tcs
pts <- mapM getTypeLPat $ listifyAllSpans tcs
return $ mapMaybe toSpanInfo
$ sortBy cmpSpan
$ catMaybes (bts ++ ets ++ pts)
where
tcs = tm_typechecked_source tcm
-- | Extract 'Id', 'SrcSpan', and 'Type' for 'LHsBind's
getTypeLHsBind :: LHsBind Id -> m (Maybe (Maybe Id,SrcSpan,Type))
getTypeLHsBind (L _spn FunBind{fun_id = pid,fun_matches = MG _ _ _typ _})
= pure $ Just (Just (unLoc pid),getLoc pid,varType (unLoc pid))
getTypeLHsBind _ = pure Nothing
-- | Extract 'Id', 'SrcSpan', and 'Type' for 'LHsExpr's
getTypeLHsExpr :: LHsExpr Id -> m (Maybe (Maybe Id,SrcSpan,Type))
getTypeLHsExpr e = do
hs_env <- getSession
(_,mbe) <- liftIO $ deSugarExpr hs_env e
return $ fmap (\expr -> (mid, getLoc e, CoreUtils.exprType expr)) mbe
where
mid :: Maybe Id
mid | HsVar (L _ i) <- unwrapVar (unLoc e) = Just i
| otherwise = Nothing
unwrapVar (HsWrap _ var) = var
unwrapVar e' = e'
-- | Extract 'Id', 'SrcSpan', and 'Type' for 'LPats's
getTypeLPat :: LPat Id -> m (Maybe (Maybe Id,SrcSpan,Type))
getTypeLPat (L spn pat) =
pure (Just (getMaybeId pat,spn,hsPatType pat))
where
getMaybeId (VarPat (L _ vid)) = Just vid
getMaybeId _ = Nothing
-- | Get ALL source spans in the source.
listifyAllSpans :: Typeable a => TypecheckedSource -> [Located a]
listifyAllSpans = everythingAllSpans (++) [] ([] `mkQ` (\x -> [x | p x]))
where
p (L spn _) = isGoodSrcSpan spn
-- | Variant of @syb@'s @everything@ (which summarises all nodes
-- in top-down, left-to-right order) with a stop-condition on 'NameSet's
everythingAllSpans :: (r -> r -> r) -> r -> GenericQ r -> GenericQ r
everythingAllSpans k z f x
| (False `mkQ` (const True :: NameSet -> Bool)) x = z
| otherwise = foldl k (f x) (gmapQ (everythingAllSpans k z f) x)
cmpSpan (_,a,_) (_,b,_)
| a `isSubspanOf` b = LT
| b `isSubspanOf` a = GT
| otherwise = EQ
-- | Pretty print the types into a 'SpanInfo'.
toSpanInfo :: (Maybe Id,SrcSpan,Type) -> Maybe SpanInfo
toSpanInfo (n,RealSrcSpan spn,typ)
= Just $ spanInfoFromRealSrcSpan spn (Just typ) n
toSpanInfo _ = Nothing
-- helper stolen from @syb@ package
type GenericQ r = forall a. Data a => a -> r
mkQ :: (Typeable a, Typeable b) => r -> (b -> r) -> a -> r
(r `mkQ` br) a = maybe r br (cast a)
| tjakway/ghcjvm | ghc/GHCi/UI/Info.hs | bsd-3-clause | 13,975 | 1 | 21 | 4,161 | 3,405 | 1,738 | 1,667 | 265 | 5 |
module LiquidArray where
import Language.Haskell.Liquid.Prelude (liquidAssume)
{-@ set :: forall a <p :: x0: Int -> x1: a -> Prop, r :: x0: Int -> Prop>.
i: Int<r> ->
x: a<p i> ->
a: (j: {v: Int<r> | v != i} -> a<p j>) ->
(k: Int<r> -> a<p k>) @-}
set :: Int -> a -> (Int -> a) -> (Int -> a)
set i x a = \k -> if k == i then x else a k
{-@ get :: forall a <p :: x0: Int -> x1: a -> Prop, r :: x0: Int -> Prop>.
i: Int<r> ->
a: (j: Int<r> -> a<p j>) ->
a<p i> @-}
get :: Int -> (Int -> a) -> a
get i a = a i
-------------------------------------------------------------------------------
---------------------------- memoization --------------------------------------
-------------------------------------------------------------------------------
{-@ measure fib :: Int -> Int @-}
{-@ type FibV = j:Int -> {v:Int| ((v != 0) => (v = fib(j)))} @-}
{-@ assume axiom_fib :: i:Int -> {v: Bool | Prop v <=> (fib i = (if i <= 1 then 1 else (fib (i-1) + fib (i-2)))) } @-}
axiom_fib :: Int -> Bool
axiom_fib i = undefined
{-@ fastFib :: x:Int -> {v:Int | v = fib(x)} @-}
fastFib :: Int -> Int
fastFib n = snd $ fibMemo (\_ -> 0) n
{-@ fibMemo :: FibV -> i:Int -> (FibV, {v: Int | v = fib(i)}) @-}
fibMemo t i
| i <= 1
= (t, liquidAssume (axiom_fib i) (1 :: Int))
| otherwise
= case get i t of
0 -> let (t1, n1) = fibMemo t (i-1)
(t2, n2) = fibMemo t1 (i-2)
n = liquidAssume (axiom_fib i) (n1 + n2)
in (set i n t2, n)
n -> (t, n)
| mightymoose/liquidhaskell | benchmarks/esop2013-submission/Fib.hs | bsd-3-clause | 1,614 | 0 | 15 | 484 | 342 | 186 | 156 | 20 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module T9738 where
import System.IO
import Language.Haskell.TH
data Foo = MkFoo
$( do decs <- [d| {-# ANN type Foo "hi" #-}
{-# ANN MkFoo "there" #-}
{-# ANN module "Charley" #-}
|]
runIO $ print decs
runIO $ hFlush stdout
return [] )
| urbanslug/ghc | testsuite/tests/th/T9738.hs | bsd-3-clause | 345 | 0 | 10 | 125 | 69 | 36 | 33 | 11 | 0 |
-- | Reuse the memory blocks of arrays.
--
-- Enable by setting the environment variable MEMORY_BLOCK_MERGING_REUSE=1.
module Futhark.Optimise.MemoryBlockMerging.Reuse
( reuseInProg
) where
import Futhark.MonadFreshNames
import Futhark.Representation.AST
import Futhark.Representation.ExplicitMemory (ExplicitMemory)
import Futhark.Optimise.MemoryBlockMerging.AuxiliaryInfo
import Futhark.Optimise.MemoryBlockMerging.Types
import Futhark.Optimise.MemoryBlockMerging.Miscellaneous
import Futhark.Optimise.MemoryBlockMerging.Reuse.AllocationSizeHoisting
import Futhark.Optimise.MemoryBlockMerging.Reuse.Core
reuseInProg :: MonadFreshNames m
=> Prog ExplicitMemory
-> m (Prog ExplicitMemory, Log)
reuseInProg = intraproceduralTransformationWithLog reuseInFunDef
reuseInFunDef :: MonadFreshNames m
=> FunDef ExplicitMemory
-> m (FunDef ExplicitMemory, Log)
reuseInFunDef fundef0 = do
let aux0 = getAuxiliaryInfo fundef0
debug0 = debugAuxiliaryInfo aux0 "Before reuse"
fundef1 = hoistAllocSizesFunDef fundef0
aux1 = getAuxiliaryInfo fundef1
debug1 = debugAuxiliaryInfo aux1 "After allocation size hoisting"
(fundef2, proglog) <- coreReuseFunDef fundef1
(auxFirstUses aux1) (auxInterferences aux1)
(auxPotentialKernelDataRaceInterferences aux1) (auxVarMemMappings aux1)
(auxActualVariables aux1) (auxExistentials aux1)
let debug = debug0 >> debug1
withDebug debug $ return (fundef2, proglog)
| ihc/futhark | src/Futhark/Optimise/MemoryBlockMerging/Reuse.hs | isc | 1,500 | 0 | 10 | 242 | 297 | 161 | 136 | 29 | 1 |
module Absyn where
data Cmd = Print | Print_P | Label String
| Sub Regex String
deriving (Show)
data Address = DirectAddress Int
| AddressRange Address Address
| AddressRE Regex
deriving (Show)
data Regex = RELiteral String
deriving (Show)
data Statement = Block [Statement]
| AddressedCommand Address Statement
| Command Cmd
deriving (Show)
| Roguelazer/sedc | Absyn.hs | isc | 447 | 2 | 7 | 158 | 110 | 64 | 46 | 14 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module InstVar where
class Def a where def :: a
data X a = MkX a
instance Def a => Def (X a) where def = MkX (def :: a)
| antalsz/hs-to-coq | examples/tests/InstVar.hs | mit | 160 | 0 | 7 | 37 | 64 | 35 | 29 | 5 | 0 |
module TeX.Parser.Prim
where
import Prelude ( Maybe(Just, Nothing)
, show, otherwise
, (==), (<*)
)
import Text.Parsec (tokenPrim, getState, putState)
import TeX.Parser.Parser
import TeX.Token
import TeX.Category
runGrouped :: TeXParser a -> TeXParser a
runGrouped p = do
oldState <- getState
p <* (putState oldState)
tokenWithFunc :: (Token -> Maybe Token) -> TeXParser Token
tokenWithFunc func =
tokenPrim showToken nextPos func
where
showToken = show
nextPos pos _ _ = pos
controlSequence :: TeXParser Token
controlSequence =
tokenWithFunc testControlSequence
where
testControlSequence esc@(ControlSequence _) = Just esc
testControlSequence _ = Nothing
exactToken :: Token -> TeXParser Token
exactToken tok =
tokenWithFunc testToken
where
testToken tok'
| tok == tok' = Just tok
| otherwise = Nothing
categoryToken :: Category -> TeXParser Token
categoryToken cat =
tokenWithFunc testToken
where
testToken tok@(CharToken _ cat')
| cat == cat' = Just tok
| otherwise = Nothing
testToken _ = Nothing
| spicyj/tex-parser | src/TeX/Parser/Prim.hs | mit | 1,125 | 0 | 10 | 269 | 343 | 178 | 165 | 35 | 2 |
module App where
import Graphics.UI.GLFW as GLFW
import Graphics.Rendering.OpenGL as GL
import Control.Concurrent.STM
import Control.Monad.Trans.RWS
import Events
import Graphics.Object
import Objects.Paddle
import Objects.Ball
data AppEnv = AppEnv
{ envWindow :: !GLFW.Window
, envPaddle :: !Object
, envBall :: !Object
, envQueue :: TQueue Event
}
data AppState = AppState
{ stateLeftPaddle :: Paddle
, stateRightPaddle :: Paddle
, stateBall :: Ball
}
type App = RWST AppEnv () AppState IO ()
| xymostech/hpong | src/App.hs | mit | 519 | 0 | 10 | 95 | 140 | 86 | 54 | 25 | 0 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
module Types where
import Control.Lens
import qualified Data.Text as T
import qualified Data.Map as M
import Data.Monoid
class SqlShow a where
sqlShow :: a -> T.Text
-- Column types. When have time get all from http://www.postgresql.org/docs/8.4/static/datatype.html
data Type = CInt
| CChar Int
| CDate
| CReal
deriving (Show, Eq)
instance SqlShow Type where
sqlShow (CInt) = "INT"
sqlShow (CChar n) = "CHAR(" <> T.pack (show n) <> ")"
sqlShow (CDate) = "DATE"
sqlShow (CReal) = "REAL"
-- Grab from http://www.postgresql.org/docs/9.1/static/sql-createtable.html
data Constraint = NotNull
| PrimaryKey
deriving (Show, Eq)
instance SqlShow Constraint where
sqlShow (NotNull) = "NOT NULL"
sqlShow (PrimaryKey) = "PRIMARY KEY"
data Column = Column { _typ :: Type
, _constraints :: [Constraint]
} deriving (Show, Eq)
instance SqlShow Column where
sqlShow (Column t cs) = sqlShow t <> text
where csText = T.intercalate " " $ map sqlShow cs
text = if T.null csText
then ""
else " " <> csText
makeLenses ''Column
data Statement = Create T.Text CreateStatement
| Alter T.Text AlterStatement
deriving (Show, Eq)
instance SqlShow Statement where
sqlShow (Create name q) = "CREATE TABLE " <> name <> " (" <> sqlShow q <> ")" <> ";"
sqlShow (Alter name q) = "ALTER TABLE " <> name <> " " <> sqlShow q <> ";"
data AlterStatement = RenameTable T.Text
| RenameColumn T.Text T.Text
| AddColumn T.Text Column
| DropColumn T.Text
| ChangeColumnType T.Text Type
deriving (Show, Eq)
instance SqlShow AlterStatement where
sqlShow (RenameTable name) = "RENAME TO " <> name
sqlShow (RenameColumn old new) = "RENAME " <> old <> " TO " <> new
sqlShow (AddColumn name col) = "ADD " <> name <> " " <> sqlShow col
sqlShow (DropColumn name) = "DROP " <> name
sqlShow (ChangeColumnType name typ) = name <> " " <> sqlShow typ
data CreateStatement = CreateTable { _cols :: M.Map T.Text Column
} deriving (Show, Eq)
instance SqlShow CreateStatement where
sqlShow (CreateTable cols) = T.intercalate ", " colsText
where colsText = M.foldWithKey (\k val acc -> acc ++ [k <> " " <> sqlShow val]) [] cols
makeLenses ''CreateStatement
| mattyhall/MigrationMerger | Types.hs | mit | 2,560 | 0 | 14 | 762 | 747 | 393 | 354 | 58 | 0 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
module Eyeshadow.Data.Options
(InvocationMode(..),
InvocationOptions(..))
where
import Eyeshadow.Diagnostic
import Eyeshadow.Prelude
data InvocationMode
= HelpInvocationMode
| CompilationInvocationMode
data InvocationOptions =
InvocationOptions {
invocationOptionsDiagnostic :: DiagnosticOptions,
invocationOptionsMode :: InvocationMode
}
| IreneKnapp/Eyeshadow | Haskell/Eyeshadow/Data/Options.hs | mit | 430 | 0 | 8 | 69 | 65 | 42 | 23 | 13 | 0 |
{-# OPTIONS_HADDOCK hide, prune #-}
module Handler.Mooc
( getMoocHomeR, postMoocHomeR
) where
--import qualified Data.Map as Map
import qualified Data.Maybe as Mb
import Database.Esqueleto
import Import hiding ((/=.), (==.), (=.), on, isNothing, count)
import Handler.Mooc.BrowseProposals
import Handler.Mooc.EdxLogin
import Handler.Mooc.User
import qualified Text.Blaze as Blaze
postMoocHomeR :: Handler TypedContent
postMoocHomeR = do
master <- getYesod
yreq <- getRequest
dispatchLti (appLTICredentials $ appSettings master) yreq
getMoocHomeR :: Handler TypedContent
getMoocHomeR = toTypedContent <$> do
setUltDestCurrent
muser <- maybeAuth
createAccW <- case muser of
Nothing -> return mempty
Just (Entity userId user) -> do
return $ if Mb.isNothing $ userEmail user
then setupLocalAccountW userId
else mempty
-- ses <- map (\(k,v) -> k <> " - " <> decodeUtf8 v) . Map.toList <$> getSession
let urole = muserRole muser
mSubmissionsWidget <- case muser of
Just (Entity uId _) -> do
submissions <- runDB $ fetchLastSubmissions $ noProposalParams {
propLimit = Just 1
, onlyByAuthorId = Just uId
, sortOrder = Newest }
widget <- mkSubmissionsWidget submissions
return $ Just widget
-- fmap listToMaybe $ runDB $ selectList [CurrentScenarioAuthorId ==. uId] []
Nothing -> return Nothing
newsItems <- renderNewsItems muser
mvoteCountWidget <- renderVoteCountWidget muser
let showEditorBtn = Mb.isNothing mSubmissionsWidget || urole /= UR_STUDENT
fullLayout Nothing "Welcome to QUA-KIT!" $ do
setTitle "qua-kit"
toWidgetHead
[hamlet|
<meta property="og:url" content="@{MoocHomeR}" />
<meta property="og:type" content="website" />
<meta property="og:title" content="Quick Urban Analysis kit" />
<meta property="og:description" content="Qua-kit is an urban design, education, sharing, and analysis platform." />
<meta property="og:image" content="@{StaticR img_bgimg_png}" />
|]
toWidgetHead
[cassius|
.critIcon
position: relative
top: 5px
.stars
color: #ff6f00
.newscard
width: 100%
padding: 0 10px
.stars
margin: 8px 0
.commentPara
margin-top: 3px
>.icon24
margin-right: 3px
|]
$(widgetFile "mooc/home")
renderNewsItems :: Maybe (Entity User) -> Handler [(UTCTime, Widget)]
renderNewsItems Nothing = return []
renderNewsItems (Just (Entity uId _)) = do
rs <- fetchReviews uId
eRs <- fetchExpertReviews uId
bVs <- fetchBetterVotes uId
wVs <- fetchWorseVotes uId
let newestFirst (d1, _) (d2, _)
| d1 < d2 = GT
| otherwise = LT
return $ sortBy newestFirst $ rs ++ eRs ++ bVs ++ wVs
-- | TODO: at some point, it is better to implement pagination instead of a hard limit
newsLimit :: Int64
newsLimit = 20
fetchExpertReviews :: UserId -> Handler [(UTCTime, Widget)]
fetchExpertReviews uId = do
reviewScs <- runDB $ select $ from $ \(expReview `InnerJoin` scenario) -> do
on $ expReview ^. ExpertReviewScenarioId ==. scenario ^. ScenarioId
where_ $ scenario ^. ScenarioAuthorId ==. val uId
orderBy [desc $ expReview ^. ExpertReviewTimestamp]
limit newsLimit
return (expReview, scenario)
let renderReview (Entity _ r, Entity _ sc) =
let stars =
let grade = expertReviewGrade r
in mconcat $
(replicate grade [whamlet|<span.icon.icon-lg>star</span>|]) ++
replicate (5 - grade) [whamlet|<span.icon.icon-lg>star_border</span>|]
widget =
[whamlet|
^{viewSubmissionBtn sc}
<p .stars>
^{stars}
<p>#{expertReviewComment r}
|]
in (expertReviewTimestamp r, widget)
return $ map renderReview reviewScs
fetchReviews :: UserId -> Handler [(UTCTime, Widget)]
fetchReviews uId = do
reviewData <- runDB $ select $
from $ \(review
`InnerJoin` scenario
`InnerJoin` criterion
`InnerJoin` user
) -> do
on (review ^. ReviewReviewerId ==. user ^. UserId)
on (review ^. ReviewCriterionId ==. criterion ^. CriterionId)
on (review ^. ReviewScenarioId ==. scenario ^. ScenarioId)
where_ $ scenario ^. ScenarioAuthorId ==. val uId
orderBy [desc $ review ^. ReviewTimestamp]
limit newsLimit
return (review, scenario, criterion, user)
let renderReview (Entity _ r, Entity _ sc, Entity _ crit, Entity _ reviewer) =
let critIcon = Blaze.preEscapedToMarkup $ criterionIcon crit
widget =
[whamlet|
^{viewSubmissionBtn sc}
<p .commentPara>
<span .critIcon>#{ critIcon }
<span class="icon icon24 text-brand-accent">
$if reviewPositive r
thumb_up
$else
thumb_down
Review from #{userName reviewer}
<p>
#{reviewComment r}
|]
in (reviewTimestamp r, widget)
return $ map renderReview reviewData
fetchBetterVotes :: UserId -> Handler [(UTCTime, Widget)]
fetchBetterVotes = fetchVotesWithComment VoteBetterId renderReview
where
renderReview (Entity _ v, Entity _ sc) =
let widget =
[whamlet|
^{viewSubmissionBtn sc}
<p>
<strong>Your submission was voted better than another
$maybe expl <- voteExplanation v
– #{expl}
|]
in (voteTimestamp v, widget)
fetchWorseVotes :: UserId -> Handler [(UTCTime, Widget)]
fetchWorseVotes = fetchVotesWithComment VoteWorseId renderReview
where
renderReview (Entity _ v, Entity _ sc) =
let widget =
[whamlet|
^{viewSubmissionBtn sc}
<p>
<strong>Another submission was voted even better than yours
$maybe expl <- voteExplanation v
– #{expl}
|]
in (voteTimestamp v, widget)
fetchVotesWithComment :: EntityField Vote ScenarioId
-> ((Entity Vote, Entity Scenario) -> (UTCTime, Widget))
-> UserId
-> Handler [(UTCTime, Widget)]
fetchVotesWithComment voteBetterOrWorse renderReview uId = do
votes <- runDB $ select $ from $ \(vote `InnerJoin` scenario) -> do
on $ vote ^. voteBetterOrWorse ==. scenario ^. ScenarioId
where_ $ scenario ^. ScenarioAuthorId ==. val uId
&&. (not_ $ vote ^. VoteExplanation ==. nothing)
orderBy [desc $ vote ^. VoteTimestamp]
limit newsLimit
return (vote, scenario)
return $ map renderReview votes
renderVoteCountWidget :: Maybe (Entity User) -> Handler (Maybe Widget)
renderVoteCountWidget Nothing = return Nothing
renderVoteCountWidget (Just (Entity uId _)) = do
let countVotes voteBetterOrWorse = do
counts <- runDB $ select $ from $ \(vote `InnerJoin` scenario) -> do
on $ vote ^. voteBetterOrWorse ==. scenario ^. ScenarioId
where_ $ scenario ^. ScenarioAuthorId ==. val uId
return $ count $ vote ^. VoteId
return $ case counts of
c:_ -> unValue c
[] -> 0::Int
betterVoteCount <- countVotes VoteBetterId
worseVoteCount <- countVotes VoteWorseId
let totalVoteCount = betterVoteCount + worseVoteCount
widget =
[whamlet|
<div class="col-lg-4 col-md-6 col-sm-9">
<div .card>
<div .card-inner>
<p>
Your submission was compared #{totalVoteCount} times to another:
<p>
<span class="icon icon24 text-brand-accent">
thumb_up
#{betterVoteCount} times it was voted better,
<p>
<span class="icon icon24 text-brand-accent">
thumb_down
#{worseVoteCount} times the other one was voted better.
|]
return $ if totalVoteCount > 0
then Just widget
else Nothing
viewSubmissionBtn :: Scenario -> Widget
viewSubmissionBtn sc = [whamlet|
<div class="card-action-btn pull-right">
<a href=@{ SubmissionViewerR (scenarioTaskId sc) (scenarioAuthorId sc) }>
<span .icon>visibility
|]
| mb21/qua-kit | apps/hs/qua-server/src/Handler/Mooc.hs | mit | 9,103 | 0 | 20 | 3,175 | 1,799 | 916 | 883 | -1 | -1 |
module Ledger.Commodity where
import Control.Exception
import Data.Text
import Data.Commodity.Errors
import Data.Commodity.Types
ifCommoditiesMatch :: Text -> Amount -> Amount -> a -> a
ifCommoditiesMatch op x@(Amount _ xc _) y@(Amount _ yc _) z
| xc == 0 || yc == 0 || xc == yc = z
| otherwise = throw (CommodityMismatch op x y)
-- Commodity.hs ends here
| ledger/commodities | Data/Commodity.hs | mit | 363 | 0 | 12 | 66 | 138 | 73 | 65 | 9 | 1 |
{-# LANGUAGE Safe, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, UndecidableInstances, GADTs #-}
module Control.Monad.Operational.Class where
import Data.Monoid (Monoid)
import qualified Control.Monad.Operational as Operational
import Control.Monad.Trans.Class (MonadTrans, lift)
import Control.Monad.Trans.Reader (ReaderT)
import qualified Control.Monad.Trans.State.Strict as Strict (StateT)
import qualified Control.Monad.Trans.State.Lazy as Lazy (StateT)
import Control.Monad.Trans.Cont (ContT)
import qualified Control.Monad.Trans.Writer.Strict as Strict (WriterT)
import qualified Control.Monad.Trans.Writer.Lazy as Lazy (WriterT)
import qualified Control.Monad.Trans.RWS.Strict as Strict (RWST)
import qualified Control.Monad.Trans.RWS.Lazy as Lazy (RWST)
import Control.Monad.Trans.Maybe (MaybeT)
import Control.Monad.Trans.Identity (IdentityT)
import Control.Monad.Trans.List (ListT)
class Monad m => MonadProgram instr m | m -> instr where
liftProgram :: Operational.Program instr a -> m a
liftProgramTrans :: (Monad (t n), MonadTrans t, MonadProgram instr n) => Operational.Program instr a -> t n a
liftProgramTrans = lift . liftProgram
singleton :: MonadProgram instr m => instr a -> m a
singleton = liftProgram . Operational.singleton
instance Monad m => MonadProgram instr (Operational.ProgramT instr m) where
liftProgram = Operational.liftProgram
instance (MonadProgram instr m) => MonadProgram instr (ReaderT e m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (Strict.StateT s m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (Lazy.StateT s m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (ContT r m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m, Monoid w) => MonadProgram instr (Strict.WriterT w m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m, Monoid w) => MonadProgram instr (Lazy.WriterT w m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m, Monoid w) => MonadProgram instr (Strict.RWST r w s m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m, Monoid w) => MonadProgram instr (Lazy.RWST r w s m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (MaybeT m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (IdentityT m) where
liftProgram = liftProgramTrans
instance (MonadProgram instr m) => MonadProgram instr (ListT m) where
liftProgram = liftProgramTrans
| srijs/haskell-operational-class | src/Control/Monad/Operational/Class.hs | mit | 2,648 | 0 | 9 | 355 | 782 | 444 | 338 | 46 | 1 |
module Upgrade.Util where
import Control.Exception
import Control.Monad
import Control.Monad.Reader
import Database.HDBC
import Database.HDBC.Sqlite3
import Upgrade.Core
sqlite3Context :: Connection -> Ctx IO SqlValue
sqlite3Context c = Ctx { runDDL = \q -> void $ run c q []
, runDQL = \q -> quickQuery c q []
}
runSqlite3 :: DBCmd () SqlValue -> IO ()
runSqlite3 upgrade =
bracket (connectSqlite3 "upgrade.db") close runUpgrade
where close c = commit c >> disconnect c
runUpgrade = runReaderT upgrade . sqlite3Context
stdoutContext :: (String -> [[v]]) -> Ctx IO v
stdoutContext f = Ctx { runDDL = putStrLn
, runDQL = \q -> putStrLn q >> return (f q)
}
runStdout :: (String -> [[v]]) -> DBCmd () v -> IO ()
runStdout f upgrade = runReaderT upgrade (stdoutContext f)
| csierra/didactic-upgrade-monads | src/Upgrade/Util.hs | gpl-2.0 | 883 | 0 | 11 | 242 | 309 | 163 | 146 | 20 | 1 |
-- -*- mode: haskell -*-
module JVM.Check where
import JVM.Type
import JVM.Builtin
import Machine.Numerical.Config
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Set
import Data.Typeable
data Checker = Builtins (Set Statement)
| Smallnums Integer
deriving Typeable
instance Check Checker Program where
check (Builtins allowed) p = do
inform $ text "erlaubt sind diese Rechenbefehle:"
inform $ nest 4 $ toDoc allowed
let you = mkSet $ do
b <- flatten p
guard $ is_builtin b
return b
inform $ text "Sie benutzen:" <+> toDoc you
let wrong = minusSet you allowed
assert ( isEmptySet wrong ) $ text "sind alle zugelassen?"
check (Smallnums allowed) p = do
inform $ text "Push (i) ist nur erlaubt für abs(i) <= "
<+> toDoc allowed
let you = mkSet $ do
Push i <- flatten p
return $ abs i
inform $ text "Sie benutzen:" <+> toDoc you
let wrong = sfilter ( > allowed ) you
assert ( isEmptySet wrong ) $ text "sind alle zugelassen?"
instance Reader Checker where
atomic_readerPrec d = readerParenPrec d $ \ d -> do
((do guard (d < 9)
my_reserved "Builtins"
aa <- readerPrec 9
return (Builtins aa))
<|>
(do guard (d < 9)
my_reserved "Smallnums"
aa <- readerPrec 9
return (Smallnums aa)))
instance ToDoc Checker where
toDocPrec d (Builtins aa) = docParen (d >= 10)
(text "Builtins" </> fsep [toDocPrec 10 aa])
toDocPrec d (Smallnums aa) = docParen (d >= 10)
(text "Smallnums" </> fsep [toDocPrec 10 aa])
| florianpilz/autotool | src/JVM/Check.hs | gpl-2.0 | 1,880 | 8 | 14 | 695 | 541 | 264 | 277 | -1 | -1 |
-- Copyright (C) 2002-2005,2007 David Roundy
-- Copyright (C) 2009 Reinier Lamers
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2, or (at your option)
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; see the file COPYING. If not, write to
-- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
-- Boston, MA 02110-1301, USA.
-- | This module contains unit tests of the code in 'Darcs.Email'
--
-- These tests check whether the emails generated by darcs meet a few criteria.
-- We check for line length and non-ASCII characters. We apparently do not have
-- to check for CR-LF newlines because that's handled by sendmail.
module Darcs.Test.Email ( testSuite ) where
import Data.Char ( isPrint )
import qualified Data.ByteString as B ( length, unpack, null, head,
cons, empty, foldr, ByteString )
import qualified Data.ByteString.Char8 as BC ( unpack )
import Test.Framework ( Test, testGroup )
import Test.Framework.Providers.QuickCheck2 ( testProperty )
import Darcs.Util.Printer ( text, renderPS, RenderMode(..) )
import Darcs.UI.Email ( makeEmail, readEmail, formatHeader )
testSuite :: Test
testSuite = testGroup "Darcs.Email"
[ emailParsing
, emailHeaderNoLongLines
, emailHeaderAsciiChars
, emailHeaderLinesStart
, emailHeaderNoEmptyLines
]
-- | Checks that darcs can read the emails it generates
emailParsing :: Test
emailParsing = testProperty "Checking that email can be parsed" $ \s ->
unlines ("":s++["", ""]) ==
BC.unpack (readEmail (renderPS Standard
$ makeEmail "reponame" [] (Just (text "contents\n"))
Nothing
(text $ unlines s) (Just "filename")))
-- | Check that formatHeader never creates lines longer than 78 characters
-- (excluding the carriage return and line feed)
emailHeaderNoLongLines :: Test
emailHeaderNoLongLines =
testProperty "Checking email header line length" $ \field value ->
let cleanField = cleanFieldString field
in not $ any (>78) $ map B.length $ bsLines $ formatHeader cleanField value
-- Check that an email header does not contain non-ASCII characters
-- formatHeader doesn't escape field names, there is no such thing as non-ascii
-- field names afaik
emailHeaderAsciiChars :: Test
emailHeaderAsciiChars =
testProperty "Checking email for illegal characters" $ \field value ->
let cleanField = cleanFieldString field
in not (any (>127) (B.unpack (formatHeader cleanField value)))
-- Check that header the second and later lines of a header start with a space
emailHeaderLinesStart :: Test
emailHeaderLinesStart =
testProperty "Checking for spaces at start of folded email header lines" $ \field value ->
let headerLines = bsLines (formatHeader cleanField value)
cleanField = cleanFieldString field
in all (\l -> B.null l || B.head l == 32) (tail headerLines)
-- Checks that there are no lines in email headers with only whitespace
emailHeaderNoEmptyLines :: Test
emailHeaderNoEmptyLines =
testProperty "Checking that there are no empty lines in email headers" $ \field value ->
let headerLines = bsLines (formatHeader cleanField value)
cleanField = cleanFieldString field
in all (not . B.null) headerLines --(not . B.null . B.filter (not . (`elem` [10, 32, 9]))) headerLines
bsLines :: B.ByteString -> [B.ByteString]
bsLines = finalizeFold . B.foldr splitAtLines (B.empty, [])
where splitAtLines 10 (thisLine, prevLines) = (B.empty, thisLine:prevLines)
splitAtLines c (thisLine, prevLines) = (B.cons c thisLine, prevLines)
finalizeFold (lastLine, otherLines) = lastLine : otherLines
cleanFieldString :: String -> String
cleanFieldString = filter (\c -> isPrint c && c < '\x80' && c /= ':')
| DavidAlphaFox/darcs | harness/Darcs/Test/Email.hs | gpl-2.0 | 4,311 | 0 | 17 | 898 | 766 | 431 | 335 | 52 | 2 |
import Control.Concurrent.Async (mapConcurrently)
import System.Directory (setCurrentDirectory)
import System.Environment (getArgs)
import System.FilePath ((</>))
import System.FilePath.Glob (namesMatching)
import PackagesNG.ParsePackage (printPackage)
main :: IO ()
main = do
args <- getArgs
let repo = case args of
[] -> "/usr/portage"
path : _ -> path
setCurrentDirectory $ repo </> "metadata/md5-cache"
allFiles <- namesMatching "*/*"
_ <- printPackage `mapConcurrently` allFiles
return ()
| vikraman/packages-ng | parse-packages/src/Main.hs | gpl-2.0 | 628 | 0 | 13 | 192 | 160 | 85 | 75 | 16 | 2 |
{-
Copyright 2009 Victor Nazarov
This file is part of LambdaInterpreter.
LambdaInterpreter is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
LambdaInterpreter is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with LambdaInterpreter. If not, see <http://www.gnu.org/licenses/>.
-}
-- Parser for lambda-terms
module Lambda.Parser (parser, nameParser) where
import Control.Monad
import Text.ParserCombinators.Parsec as Parsec
import Lambda.Common (Term, mkApp, mkVar, mkLam, mkI, mkK, mkB, mkC, mkS)
-- Main parser
parser :: CharParser () Term
parser = appParser
-- Application is just a secuence of terms
appParser :: CharParser () Term
appParser =
do apps <- many1 $ try opParser
return $ foldl1 mkApp apps
{-
Custom term, except top-level application
Either abstraction or variable
Or maybe some application in parentheses
-}
opParser :: CharParser () Term
opParser =
do skipMany space
abstrParser
<|> between (char '(') (skipMany space >> char ')' <?> "закрывающая скобка") appParser
<|> varParser
{-
Abstraction starts with lambda "\\"
and contains a sequence of variables
that ends with "." and then some term
-}
abstrParser :: CharParser () Term
abstrParser = (<?> "λ-абстракция") $
do char '\\' <|> (char 'λ' >> space)
skipMany space
abstrNextVarParser
where abstrVarParser =
do skipMany space
abstrDotParser <|> abstrNextVarParser
abstrNextVarParser =
do name <- nameParser
body <- abstrVarParser
return $ name `mkLam` body
abstrDotParser = (char '.' >> appParser) <?> "точка"
{-
Variable is just a sequence of characters
-}
varParser :: CharParser () Term
varParser =
do name <- try nameParser <|> msum (map string ["I", "K", "B", "C", "S"])
return $ case name of
"I" -> mkI
"K" -> mkK
"B" -> mkB
"C" -> mkC
"S" -> mkS
name -> mkVar name
nameParser = (<?> "имя переменной") $
do firstLetter <- letter
lettersOrDigits <- many alphaNum
primes <- many (char '\'')
let name = firstLetter : (lettersOrDigits ++ primes)
when (name `elem` ["I", "K", "B", "C", "S"]) $
fail "I, K, B, C, S --- зарезервированные идентификаторы"
return name
| sviperll/LambdaInterpreter | src/Lambda/Parser.hs | gpl-3.0 | 2,804 | 0 | 13 | 658 | 533 | 276 | 257 | 47 | 6 |
module GLInit where
import Control.Monad.Reader
import Control.Concatenative
import Control.DeepSeq
import Data.IORef
import Data.Metrology.Show()
import Data.Metrology.SI.Poly
import Data.Metrology.Poly
import qualified Data.Map as Map
import Linear
import Linear.Affine
import Graphics.GLUtil
import Graphics.GLUtil.Camera3D
import Graphics.Rendering.OpenGL
import qualified Graphics.UI.GLFW as GLFW
import System.Exit
import Control.Concurrent
import Control.Concurrent.Timer
import Control.Concurrent.Suspend.Lifted
import Constants
import Common
import Physics
import GLHelper
import Shaders
import GL
glInit :: IO ()
glInit = do
GLFW.setErrorCallback (Just errorC)
success <- GLFW.init
unless success exitFailure
windowHints
let f = GLFW.terminate
>> exitFailure
s = triM_ (GLFW.makeContextCurrent . Just) glMain GLFW.destroyWindow
>=> const (GLFW.terminate >> exitSuccess)
GLFW.createWindow 1024 768 "xenocrat" Nothing Nothing >>= maybe f s
glMain :: GLFW.Window -> IO ()
glMain window = do
crossSP <- loadShaderProgramWithBS [passthroughVS, crossGS, defaultFS] prepare
vectorSP <- loadShaderProgramWithBS [defaultVS, vectorGS, defaultFS] prepare
planetSP <- loadShaderProgramWithBS [planetVS, planetTCS, planetTES, planetFS] prepare
crossB <- makeBuffer ArrayBuffer [nil, one]
crossV <- setup crossSP
planetB <- makeBuffer ArrayBuffer [nil]
planetV <- setup planetSP
vectorB <- makeBuffer ArrayBuffer $ replicate 6 nil
vectorV <- setup vectorSP
let sbv = Map.fromList [(Cross, (crossSP, Map.fromList [(ArrayBuffer, crossB)], crossV)),
(Vector, (vectorSP, Map.fromList [(ArrayBuffer, vectorB)], vectorV)),
(Planet, (planetSP, Map.fromList [(ArrayBuffer, planetB)], planetV))] :: MSBV
patchVertices $= 3
lineWidth $= 0.1
let s = V2 1024 768
c = dolly (V3 0 0 1) fpsCamera :: Camera FT
b = [earth, moon, sun] :: [Body SI V3 DT]
screen <- newIORef s
cam <- newIORef c
bds <- newIORef b
verts <- newIORef $ conv b
GLFW.swapInterval 1
GLFW.setKeyCallback window $ Just $ keyboardC cam
GLFW.setCursorPosCallback window $ Just $ cursorC cam
GLFW.setWindowRefreshCallback window $ Just $ displayState verts screen sbv cam
GLFW.setWindowSizeCallback window $ Just $ reshapeC screen
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
depthFunc $= Just Less
clearColor $= blue
timer <- repeatedTimer GLFW.postEmptyEvent delay
success <- repeatedRestart timer
unless success $ GLFW.terminate >> exitFailure
_ <- forkIO $ compute bds verts
displayState verts screen sbv cam window
where
prepare p = bindFragDataLocation p "outColor" $= 0
setup s = makeVAO $ enableAttrib s "position" >> setAttrib s "position" ToFloat (VertexArrayDescriptor 3 Float 0 offset0)
nil = V3 0 0 0 :: V3 FT
one = V3 1 1 1 :: V3 FT
delay = msDelay 100
compute :: forall f a b. (NFData (f b), Fractional b, NFData a, NFData (f a), Metric f, Metric (Diff f), Eq a, Real a, Floating a, Fractional (f a), Eq (f a), Functor f) =>
IORef [Body SI f a] -> IORef [f b] -> IO ()
compute bds verts = get bds >>= comp >> return ()
where
interval = 1 % Second
comp :: [Body SI f a] -> IO [Body SI f a]
comp b = do
let b' = force $ runReader (updateState b) interval
v = force $ conv b'
writeIORef bds b'
writeIORef verts v
comp b'
| hesiod/xenocrat | src/exec/GLInit.hs | gpl-3.0 | 3,482 | 0 | 16 | 755 | 1,257 | 633 | 624 | -1 | -1 |
-- | Propositional Logic Formulas in Conjunctive Normal Form.
-- This module is a model implementation of the API
-- (obviously correct, but also obviously non-efficient)
module Satchmo.Form.Model
( module Satchmo.Form.Types
, CNF, cnf, C
, size
, variables, clauses, clausesL
, smallest_clauses
, empty_clauses
, get_clause, get_unit_clause
, units
-- , positive_units, negative_units
, clauses_for, positive_clauses_for, negative_clauses_for
-- , literals_for, positive_literals_for, negative_literals_for
, satisfied, contradictory
-- , add_clauses, add_clause, add_clause'
-- , drop_variable, add_variable
, empty
, assign
, drop_variable
, add_clauses
, check_asserts, using_model
)
where
import Satchmo.Form.Types
import qualified Data.Set as DS
import qualified Data.EnumSet as S
import qualified Data.Map as M
import Control.Monad ( mzero )
import Data.Function (on)
import Data.List ( sortBy )
check_asserts = False
using_model = True
data CNF = CNF { clauses :: DS.Set Clause }
deriving Show
cnf cls = CNF { clauses = DS.fromList cls }
type C = Clause
get_clause _ cl = cl
get_unit_clause f cl = case literals (get_clause f cl) of
[l] -> (variable l,positive l)
clausesL = DS.toList . clauses
smallest_clauses n cnf = take n
$ sortBy (compare `on` sizeC )
$ clausesL cnf
empty_clauses cnf = filter nullC
$ clausesL cnf
empty :: CNF
empty = CNF { clauses = DS.empty }
size :: CNF -> Int
size cnf = DS.size $ clauses cnf
variables :: CNF -> [ V ]
variables cnf = S.toList $ S.fromList $ do
c <- clausesL cnf
map variable $ literals c
clauses_for :: V -> CNF -> M.Map C Bool
clauses_for v cnf = M.fromList $ do
cl <- clausesL cnf
case get_value cl v of
Nothing -> mzero
Just b -> return (cl,b)
positive_clauses_for v cnf =
M.filter id $ clauses_for v cnf
negative_clauses_for v cnf =
M.filter not $ clauses_for v cnf
units :: CNF -> [ Clause ]
units cnf = filter unit $ clausesL cnf
satisfied :: CNF -> Bool
satisfied cnf = 0 == size cnf
contradictory :: CNF -> Bool
contradictory cnf = any nullC $ clausesL cnf
add_clauses cls (CNF s) = CNF $ DS.union (DS.fromList cls) s
drop_variable :: V -> CNF -> CNF
drop_variable v cnf = CNF $ DS.fromList $ do
cl <- clausesL cnf
case get_value cl v of
Nothing -> return cl
Just b' -> mzero
assign (v,b) cnf = CNF $ DS.fromList $ do
cl <- clausesL cnf
case get_value cl v of
Nothing -> return cl
Just b' -> if b == b' then mzero
else return $ cl `without` v
| jwaldmann/satchmo-solver | src/Satchmo/Form/Model.hs | gpl-3.0 | 2,537 | 0 | 13 | 551 | 841 | 445 | 396 | 75 | 3 |
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, BangPatterns, ParallelListComp #-}
{-# OPTIONS_GHC -Wall #-}
import System.IO
import qualified Data.Array
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import Control.Monad.State
import Control.Monad.Trans.Error
import Control.Monad.ST
import Data.Array.ST
import Prelude hiding (lookup)
import Data.List
import qualified Data.Map as Map
import qualified Data.ByteString
import Data.Map ((!), Map)
import Data.Word
import Data.Bits
-- import Data.Binary
import qualified System.Environment
--import Debug.Trace
trace :: t -> t1 -> t1
trace _ a = a
-- #define TRACE_BYTES(str) !_ <- traceBytes (str)
#define TRACE_BYTES(str)
{-
data OutChars a = OutChars
{ len :: Int
, current :: Data.Array.Array
-}
type OutChars a = [a]
-- data OutChars a = Nil | Cons a (OutChars (a,a)) | Cons2 a a (OutChars (a,a))
emptyOutChars :: [a]
emptyOutChars = []
mcons :: a -> OutChars a -> OutChars a
mcons = (:)
{-
mcons a Nil = Cons a Nil
mcons a (Cons b l) = Cons2 a b l
mcons a (Cons2 b c l) = Cons a $ mcons (b,c) l
-}
oc2listReverse :: OutChars a -> [a]
oc2listReverse = reverse
oc2list :: OutChars a -> [a]
oc2list = id
{-
oc2list o =
let unpair :: [(a, a)] -> [a]
unpair [] = []
unpair ((a, b) : l) = a : b : (unpair l)
oc2list' :: OutChars a -> [a]
oc2list' Nil = []
oc2list' (Cons b l) = b : (unpair $ oc2list' l)
oc2list' (Cons2 a b l) = a : b : (unpair $ oc2list' l)
in
oc2list' o
-}
list2oc :: [a] -> OutChars a
list2oc = id
{-
list2oc [] = Nil
list2oc (x : l) = mcons x $ list2oc l
-}
concOcWithReversedList :: [a] -> OutChars a -> OutChars a
concOcWithReversedList [] oo = oo
concOcWithReversedList (x : ll) oo = concOcWithReversedList ll $ mcons x oo
concOcWithList :: [a] -> OutChars a -> OutChars a
concOcWithList l oc = concOcWithReversedList (reverse l) oc
{-
ocNth :: Int -> OutChars a -> Maybe a
ocNth _ Nil = Nothing
ocNth 0 (Cons a _) = Just a
ocNth n (Cons _ l) = let m = testBit n 0
n' = shift (n-1) (-1)
o = ocNth n' l
in
case o of Nothing -> Nothing
Just (x, y) -> Just $ if m then x else y
ocNth 0 (Cons2 a _ _) = Just a
ocNth n (Cons2 _ b l) = ocNth (n-1) (Cons b l)
-}
traceme :: Show a => String -> a -> a
traceme s a = trace (s ++ " " ++ (show a)) a
-- traceBytes :: String -> DeflateParse ()
-- traceBytes q = do
-- (!_, !by, !_) <- gets strIn
-- l <- gets inLen
-- return $ trace (q ++ " " ++ (show by) ++ " of " ++ (show l) ++ " bytes processed") ()
data CodeTree a = EmptyLeaf | Leaf a | Fork (CodeTree a) (CodeTree a)
weaveCodeTree :: Show a => Map a [Bool] -> Either String (CodeTree a)
weaveCodeTree x =
let
eitherFork :: Either String (CodeTree a) -> Either String (CodeTree a) -> Either String (CodeTree a)
eitherFork (Right xx) (Right y) = Right (Fork xx y)
eitherFork (Left xx) _ = Left xx
eitherFork (Right _) (Left y) = Left y
splitCodeList :: [(a, [Bool])] -> ([(a,[Bool])],[(a,[Bool])],[a])
splitCodeList [] = ([],[],[])
splitCodeList ((y, b) : l) =
let (nl, nr, ne) = splitCodeList l
in
case b of [] -> (nl, nr, y : ne)
False : br -> ((y, br) : nl, nr, ne)
True : br -> (nl, (y, br) : nr, ne)
weaveList :: Show a => [(a, [Bool])] -> Either String (CodeTree a)
weaveList [] = Right EmptyLeaf
weaveList cl = case (splitCodeList cl) of (l, r, []) -> eitherFork (weaveList l) (weaveList r)
([], [], [xx]) -> Right (Leaf xx)
f -> Left ("Error in weaveList with map " ++
(show cl) ++ ": " ++ (show f))
in weaveList $ filter (\ (_, y) -> case y of [] -> False
_ -> True) (Map.toList x)
-- (bit in current byte, byte number, word list)
--type BitStream = (Int, Int, [Word8])
type BitStream = (Int, [[Bool]])
bitsOf :: Word8 -> [Bool]
bitsOf c = map (testBit c) [0..7]
wordToBitSeq :: Data.Array.Array Word8 [Bool]
wordToBitSeq = Data.Array.listArray (0,255) (map (bitsOf . fromIntegral) [0..255])
wordsToBitSeq :: [Word8] -> [[Bool]]
wordsToBitSeq = map (wordToBitSeq Data.Array.!)
wordsToBitStream :: [Word8] -> BitStream
wordsToBitStream w = (0, wordsToBitSeq w)
data OutWord = WCh Word8 | BackRef Int -- BackRef Dist
data DeflateState = DeflateState {strIn :: BitStream, out :: OutChars OutWord, inLen :: Int }
newtype DeflateParse a = DeflateParse
{ unDeflateParse :: ErrorT String (State DeflateState) a }
deriving (Functor, Applicative, Alternative,
Monad, MonadPlus, MonadState DeflateState)
throwUp :: String -> DeflateParse a
throwUp aleph =
do (byte, _) <- gets strIn
_sofar <- gets out
DeflateParse {unDeflateParse = throwError (aleph ++ ", byte=" ++ (show byte))}
-- ++ ", decompressed so far:" ++ (show $ Data.ByteString.pack (reverse $ oc2list sofar)) ++ ")")}
--throwUp aleph = error aleph
{-# INLINE peekAndPopBit #-}
peekAndPopBit :: BitStream -> Maybe (Bool, BitStream)
peekAndPopBit (_, []) = Nothing
peekAndPopBit (n, [] : l) = peekAndPopBit (n+1, l)
peekAndPopBit (n, (c : r) : l) = Just (c, (n, r : l))
peekBit :: BitStream -> Maybe Bool
peekBit (_, []) = Nothing
peekBit (n, [] : l) = peekBit (n+1, l)
peekBit (_n, (c : _r) : _l) = Just c
popBit :: BitStream -> Maybe BitStream
popBit (_, []) = Nothing
popBit (n, [] : l) = popBit (n+1, l)
popBit (n, (_c : r) : l) = Just (n, r : l)
finishByte :: BitStream -> Maybe BitStream
finishByte (_, []) = Nothing
finishByte (n, [] : l) = finishByte (n+1, l)
finishByte (n, _ : l) = Just (n+1, l)
peekByte :: BitStream -> Either String Word8
peekByte (_, []) = Left "peekByte at end of stream"
peekByte (n, [] : l) = peekByte (n+1, l)
peekByte (_n, j : _l) =
if (length j) < 8
then Left ("peekByte not at byte position in byte " ++ (show j))
else Right $ fromIntegral (lsbToInt j)
-- map every integer onto the count of the numbers of its occurences in the passed list
countNumbers :: [Int] -> Map Int Int
countNumbers l = foldl' (\ m i -> Map.insertWith (+) i 1 m) Map.empty l
minimalCodeOfLength :: Map Int Int -> Int -> Int
minimalCodeOfLength _ 0 = 0
minimalCodeOfLength countNums n = 2 * ((minimalCodeOfLength countNums (n-1)) +
(Map.findWithDefault 0 (n-1) countNums))
-- Convert a sequence of code lengths to a code map CharCode ->
-- (Length, Code)
toCodeMap :: [Int] -> Map Int (Int, Int)
toCodeMap l =
let cNum = countNumbers l
mLen = maximum l
mCodes = Map.fromList (map (\ x -> (x, minimalCodeOfLength cNum x)) [1 .. mLen])
(_, tCodes) = mapAccumL (\ (codeMap, charCode) len ->
let cc = codeMap ! len
in
((Map.insert len (1+cc) codeMap , 1+charCode), (charCode, (len, cc)))) (mCodes, 0) l
in
Map.fromList tCodes
intToCode :: Int -> Int -> [Bool]
intToCode l c = Prelude.map (\ x -> testBit c x) (reverse [0..(l-1)])
toBoolCodeMap :: [Int] -> Map Int [Bool]
toBoolCodeMap l = (Map.map (\ (x, y) -> intToCode x y) (toCodeMap l))
toBoolCodeMapWithoutZeroes :: [Int] -> Map Int [Bool]
toBoolCodeMapWithoutZeroes l =
let
withCodes = zip [0..] l
(zeroCodes, realCodes) = partition (\ (_, y) -> y == 0) withCodes
remaining = map (\ (_, y) -> y) realCodes
codeMap = toBoolCodeMap remaining
xCodeMap = Map.mapKeys (\ k -> case realCodes !! k of (code, _len) ->
code)
codeMap
in
Map.union xCodeMap (Map.fromList
(map (\ (x, _) -> (x, [])) zeroCodes))
-- Tests
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap [2,1,3,3]))
-- fromList [(0,[True,False]),(1,[False]),(2,[True,True,False]),(3,[True,True,True])]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 0
-- [False,False,True,True,False,False,False,False]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 143
-- [True,False,True,True,True,True,True,True]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 144
-- [True,True,False,False,True,False,False,False,False]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 255
-- [True,True,True,True,True,True,True,True,True]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 256
-- [False,False,False,False,False,False,False]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 279
-- [False,False,True,False,True,True,True]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 280
-- [True,True,False,False,False,False,False,False]
-- *Main> (Map.map (\ (x, y) -> intToCode x y) (toCodeMap ((replicate 144 8) ++ (replicate 112 9) ++ (replicate 24 7) ++ (replicate 8 8)))) ! 287
-- [True,True,False,False,False,True,True,True]
-- *Main>
mapStrIn :: (BitStream -> BitStream) -> DeflateState -> DeflateState
mapStrIn f st = st {strIn = f (strIn st)}
setStrIn :: BitStream -> DeflateState -> DeflateState
setStrIn bs st = st {strIn = bs}
mapOut :: (OutChars OutWord -> OutChars OutWord) -> DeflateState -> DeflateState
mapOut f st = st {out = f (out st)}
setOut :: OutChars OutWord -> DeflateState -> DeflateState
setOut bs st = st { out = bs }
advanceInput :: DeflateParse ()
advanceInput =
do
!si <- gets strIn
case popBit si of Just sj -> modify $ setStrIn sj
Nothing -> throwUp "advanceInput: End of strInt"
peekAtNextBit :: DeflateParse Bool
peekAtNextBit = do
!sti <- gets strIn
case peekBit sti of
Just x -> return x
Nothing -> throwUp "peekAtNextBit failed: End of bitstream"
advanceToNextByte :: DeflateParse ()
advanceToNextByte = do
!sti <- gets strIn
case finishByte sti of
Just x -> modify $ setStrIn x
Nothing -> throwUp "advanceToNextByte: End of strInt"
peekAtNextByte :: DeflateParse Word8
peekAtNextByte = do
!sti <- gets strIn
case peekByte sti of Left x -> throwUp x
Right x -> do return x
singleBit :: Bool -> DeflateParse ()
singleBit x = do
guard . (x ==) =<< peekAtNextBit
advanceInput
parseBit :: DeflateParse Bool
parseBit = do
!sti <- gets strIn
case peekAndPopBit sti of
Nothing -> throwUp "parseBit: End of strInt"
Just (b, sti2) -> do
modify $ setStrIn sti2
return b
parseByte :: DeflateParse Word8
parseByte = do
!b <- peekAtNextByte
_ <- parseBit
advanceToNextByte
return b
-- | Read @i@ bits.
nBits :: Int -> DeflateParse [Bool]
nBits i = do
!bits <- sequence $ replicate i parseBit
return bits
msbToInt :: [Bool] -> Int
msbToInt bits = foldl' (\ acc b -> acc * 2 + if b then 1 else 0) 0 bits
lsbToInt :: [Bool] -> Int
lsbToInt bits = foldr (\ b acc -> acc * 2 + if b then 1 else 0) 0 bits
parseBytesReverse :: Int -> DeflateParse [Word8]
parseBytesReverse 0 = do return []
parseBytesReverse i = do
TRACE_BYTES("parseBytesReverse")
byte <- parseByte
rest <- parseBytesReverse $ i - 1
return $ byte : rest
uncompressedBlock :: DeflateParse ()
uncompressedBlock = do
advanceToNextByte
!lenBits <- nBits 16
!negLenBits <- nBits 16
if (and (map (\ (y, z) -> not (y == z)) (zip lenBits negLenBits)))
then do !bytes <- parseBytesReverse (lsbToInt lenBits)
TRACE_BYTES("uncompressed Block" ++ (show $ lsbToInt lenBits))
modify $ mapOut (concOcWithReversedList (map WCh bytes))
else throwUp "LEN and NLEN are not negated in uncompressed block"
parseAccToTree :: (CodeTree a) -> DeflateParse a
parseAccToTree EmptyLeaf = throwUp "Parsing illegal code"
parseAccToTree (Leaf x) = do return x
parseAccToTree (Fork ctA ctB) = do
!b <- parseBit
parseAccToTree (if b then ctB else ctA)
repeatCodeToBitNum :: Int -> Int
repeatCodeToBitNum rc = case rc of _ | rc <= 264 -> 0
| rc == 285 -> 0
| otherwise -> ((rc - 265) `div` 4) + 1
repeatCodeToBase :: Int -> Int
repeatCodeToBase 257 = 3
repeatCodeToBase 285 = 258
repeatCodeToBase rc = 2^(repeatCodeToBitNum $ rc - 1) + (repeatCodeToBase $ rc - 1)
distCodeToBitNum :: Int -> Int
distCodeToBitNum lc = if (lc <= 3) then 0 else ((lc - 4) `div` 2) + 1
distCodeToBaseC :: Int -> Int
distCodeToBaseC 0 = 1
distCodeToBaseC dc = 2^(distCodeToBitNum $ dc - 1) + (distCodeToBase $ dc - 1)
distCodeToBaseA :: Data.Array.Array Int Int
distCodeToBaseA = Data.Array.listArray (0,29) [ distCodeToBaseC j | j <- [0..29] ]
distCodeToBase :: Int -> Int
distCodeToBase = (distCodeToBaseA Data.Array.!)
{-
forwardRepeatUnsafe :: Int -> Int -> OutChars a -> Maybe (OutChars a)
forwardRepeatUnsafe _ 0 sq = Just sq
forwardRepeatUnsafe dist len sq =
case ocNth dist sq of Nothing -> Nothing
Just x -> forwardRepeatUnsafe dist (len - 1) (mcons x sq)
forwardRepeat :: Show a => Int -> Int -> OutChars a -> Maybe (OutChars a)
forwardRepeat dist len sq =
case dist of _ | dist < 1 -> Nothing
| otherwise ->
if len < 3 then Nothing
else forwardRepeatUnsafe (dist - 1) len sq
-}
parseRepeater :: CodeTree Int -> Int -> DeflateParse ()
parseRepeater distMap code =
do
!bits <- nBits $ repeatCodeToBitNum code
!distCode <- parseAccToTree distMap
if (distCode < 0) || (distCode > 29) then throwUp "Illegal Distance Code in parseRepeater"
else
do distBits <- nBits $ distCodeToBitNum distCode
--o <- gets out
let dist = (distCodeToBase distCode) + (lsbToInt distBits)
len = (repeatCodeToBase code) + (lsbToInt bits)
-- !xout = forwardRepeat dist len o
in
modify $ mapOut $ (concOcWithReversedList (replicate len (BackRef dist)))
-- case xout of Nothing -> throwUp "Problem with repeting in parseRepeater"
-- Just x -> modify $ setOut x
parseCompressedBlock :: CodeTree Int -> CodeTree Int -> DeflateParse ()
parseCompressedBlock codeMap distMap = do
!code <- parseAccToTree codeMap
case code of _ | code < 0 -> throwUp "Error: Code<0 in parseCompressedBlock"
| code <= 255 -> do !o <- gets out
modify $ setOut (mcons (WCh (fromIntegral code)) o)
TRACE_BYTES("compressed Block")
parseCompressedBlock codeMap distMap
| code == 256 -> do
return ()
| code <= 285 -> do
parseRepeater distMap code
TRACE_BYTES("compressed Block")
parseCompressedBlock codeMap distMap
| code <= 287 -> throwUp "Error: code 286 or 287 in parseCompressedBlock"
| otherwise -> throwUp "Error: Code>287 in parseCompressedBlock"
staticallyCompressedBlock :: DeflateParse ()
staticallyCompressedBlock =
let
cm_ = weaveCodeTree $ toBoolCodeMap ((replicate 144 8) ++
(replicate 112 9) ++
(replicate 24 7) ++
(replicate 8 8))
dm_ = weaveCodeTree $ toBoolCodeMap (replicate 32 5)
cm = case cm_ of Right x -> x
_ -> error "staticallyCompressedBlock: cm: This should never happen!"
dm = case dm_ of Right x -> x
_ -> error "staticallyCompressedBlock: dm: This should never happen!"
in do
parseCompressedBlock cm dm
dynamicBlockHeader :: DeflateParse (Int, Int, Int)
dynamicBlockHeader = do
!hlitBits <- nBits 5
!hdistBits <- nBits 5
!hclenBits <- nBits 4
return ((lsbToInt hlitBits) + 257,
(lsbToInt hdistBits) + 1,
(lsbToInt hclenBits) + 4)
dynamicBlockLenListRaw :: Int -> DeflateParse [Int]
dynamicBlockLenListRaw hclen = do
!code <- sequence $ replicate hclen $ nBits 3
return $ map lsbToInt code
dynamicBlockLenListToActualLens :: [Int] -> [Int]
dynamicBlockLenListToActualLens !l =
let
-- cf RFC 1951, page 13
helper = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]
!mapz = Map.fromList $ zip helper l
in
map (\ x -> Map.findWithDefault 0 x mapz) ([0..18] :: [Int])
dynamicBlockLenMap :: Int -> DeflateParse (CodeTree Int)
dynamicBlockLenMap hclen = do
!raw <- dynamicBlockLenListRaw hclen
let
lM = weaveCodeTree $ toBoolCodeMapWithoutZeroes $ dynamicBlockLenListToActualLens raw
in
case lM of Right lm -> do return lm
Left lm -> throwUp ("Did not receive Length Map in dynamicBlockLenMap: " ++ lm)
readCodeLengthCode :: CodeTree Int -> DeflateParse Int
readCodeLengthCode tree = do parseAccToTree tree
-- read the raw code lengths and, if applicable, the repeat length
-- (which defaults to 0 if not applicable)
readCodeLengthsRaw :: Int -> CodeTree Int -> DeflateParse [(Int, Int)]
readCodeLengthsRaw 0 _ = return []
readCodeLengthsRaw n ct =
if (n < 0) then throwUp ("Error in readCodeLengthsRaw: negative number of codes: " ++ (show n))
else let
proceed :: Int -> Int -> Int -> Int -> DeflateParse [(Int, Int)]
proceed m clc bitNum base = do
!bits <- nBits bitNum
let repeats = base + (lsbToInt bits) in
do !rest <- readCodeLengthsRaw (m - repeats) ct
return $ (clc, repeats) : rest
in
do
!clc <- readCodeLengthCode ct
case clc of _ | clc <= 15 -> proceed n clc 0 1 -- TODO: reference RFC
| clc == 16 -> proceed n clc 2 3
| clc == 17 -> proceed n clc 3 3
| clc == 18 -> proceed n clc 7 11
| otherwise -> throwUp "Error in readCodeLengthsRaw: illegal code"
-- use this with Map.fromList
rawCodeLengthsToActualCodeLengths :: [(Int,Int)] -> Either String (Map Int Int)
rawCodeLengthsToActualCodeLengths [] = Left "Empty Code Length List in rawCodeLengthsToActualCodeLengths"
rawCodeLengthsToActualCodeLengths [(16,_)] = Left "Replication of last Byte at start in rawCodeLengthsToActualCodeLengths"
rawCodeLengthsToActualCodeLengths rcl =
Right $ Map.fromList $ zip [0..] (
foldl' ( \ accum (cC, cR) ->
accum ++ case cC of 18 -> replicate cR 0
17 -> replicate cR 0
16 -> replicate cR (last accum) -- exists, see above
_ -> [cC] ) [] rcl )
dynamicBlockCodeTree :: Int -> CodeTree Int -> DeflateParse (CodeTree Int)
dynamicBlockCodeTree n ct = do
!codeList <- readCodeLengthsRaw n ct
case rawCodeLengthsToActualCodeLengths codeList of
Left x -> throwUp x
Right x -> case weaveCodeTree (toBoolCodeMapWithoutZeroes
$ map (\ m -> Map.findWithDefault 0 m x) [0..(maximum $ Map.keys x)]) of
Right y -> return y
Left xx -> throwUp ("Error in dynamicBlockCodeTree: " ++ xx)
dynamicallyCompressedBlock :: DeflateParse ()
dynamicallyCompressedBlock = do
(!hlit, !hdist, !hclen) <- dynamicBlockHeader
!lm <- dynamicBlockLenMap hclen
!cm <- dynamicBlockCodeTree hlit lm
!dm <- dynamicBlockCodeTree hdist lm
parseCompressedBlock cm dm
-- traceState :: String -> DeflateParse ()
-- traceState q = do
-- (bt, by, _) <- gets strIn
-- si <- gets strIn
-- --o <- gets out
-- modify $ trace (q ++ "trace: bit=" ++ (show bt) ++ " byte=" ++ (show by)) $ setStrIn si
-- modify $ trace (q ++ "trace: bit=" ++ (show bt) ++ " byte=" ++ (show by) ++ " out=" ++ (show $ reverse $ oc2list o)) $ setStrIn si
-- toplevelParser :: DeflateParse (GHC.Arr.Array Int Word8)
-- toplevelParser =
-- let block = do !headers <- nBits 2
-- let !ret = case headers of [True, True] -> (throwUp "Illegal Block Type 11 in toplevelParser")
-- [False, False] -> do
-- TRACE_BYTES("start uncompressed block")
-- uncompressedBlock
-- [False, True] -> do
-- TRACE_BYTES("start dynamically compressed block")
-- dynamicallyCompressedBlock
-- [True, False] -> do
-- TRACE_BYTES("start statically compressed block")
-- staticallyCompressedBlock
-- _ -> error "Impossible error in toplevelParser"
-- in ret
-- nBlock = do { singleBit False; block }
-- lBlock = do { singleBit True; block }
-- in do
-- _ <- many nBlock
-- lBlock
-- !o <- gets out
-- let
-- !ro = reverse (oc2list o)
-- rl = length ro
-- arr = Data.Array.array (0, rl-1) [(i, case i of WCh x -> x; BackRef dist -> arr !! (i - dist)) | i <- [0..(rl-1)]]
-- in
-- return arr
replaceBackreferences :: [OutWord] -> (Data.Array.Array Int Word8)
replaceBackreferences l =
let
replaceNth :: Int -> OutWord -> (STArray s Int Word8) -> ST s ()
replaceNth n (WCh c) arr = writeArray arr n c
replaceNth n (BackRef b) arr = do c <- readArray arr (n - b)
writeArray arr n c
insertToArray :: Int -> [OutWord] -> (STArray s Int Word8) -> ST s ()
insertToArray _ [] _arr = return ()
insertToArray n (x : ll) arr = do replaceNth n x arr
insertToArray (n + 1) ll arr
toplevel :: [OutWord] -> ST s (STArray s Int Word8)
toplevel ow =
do arr <- newArray (0, (length ow) - 1) (fromIntegral 0) :: ST s (STArray s Int Word8)
insertToArray 0 ow arr
return arr
in runSTArray (toplevel l)
toplevelParser :: DeflateParse (Data.Array.Array Int Word8)
toplevelParser =
let block = do !headers <- nBits 2
let !ret = case headers of [True, True] -> (throwUp "Illegal Block Type 11 in toplevelParser")
[False, False] -> do
TRACE_BYTES("start uncompressed block")
uncompressedBlock
[False, True] -> do
TRACE_BYTES("start dynamically compressed block")
dynamicallyCompressedBlock
[True, False] -> do
TRACE_BYTES("start statically compressed block")
staticallyCompressedBlock
_ -> error "Impossible error in toplevelParser"
in ret
nBlock = do { singleBit False; block }
lBlock = do { singleBit True; block }
in do
_ <- many nBlock
lBlock
!o <- gets out
let !ro = oc2listReverse o
-- rl = length ro
-- arr = Data.Array.listArray (0, rl-1)
-- [ case j of
-- WCh x -> x
-- BackRef dist -> arr Data.Array.! (i - dist)
-- | (i,j) <- zip [0..] ro
-- -- | i <- [0..(rl-1)]
-- -- | j <- ro
-- ]
return $ replaceBackreferences ro
inflate :: [Word8] -> Either String (Data.Array.Array Int Word8)
inflate !str =
let !blub = Control.Monad.State.runState (
runErrorT ( unDeflateParse toplevelParser )) ( DeflateState { strIn = wordsToBitStream str, out = emptyOutChars, inLen = length str })
in case blub of (!r, _) -> r
gzStrip :: [Word8] -> [Word8]
gzStrip !w8 =
let
untilZero :: [Word8] -> ([Word8], [Word8])
untilZero [] = ([], [])
untilZero (0 : l) = ([], l)
untilZero (r : l) = let (x, y) = untilZero l in (r : x, y)
[_ftext, fhcrc, fextra, fname, fcomment] = take 5 [0..]
[_id1, _id2, _cm, flg, _mtime1, _mtime2, _mtime3, _mtime4, _xfl, _os] = take 10 w8
n1 = drop 10 w8
(n2, _extra) = if testBit flg fextra
then let [xl1, xl2] = map fromIntegral $ take 2 n1
in (drop (2 + xl1 + 256*xl2) n1, take (xl1 + 256*xl2) (drop 2 n1))
else (n1, [])
(_name, n3) = if testBit flg fname then untilZero n2 else ([], n2)
(_comment, n4) = if testBit flg fcomment then untilZero n3 else ([], n3)
(_hcrc, n5) = if testBit flg fhcrc then (take 2 n4, drop 2 n4) else ([], n4)
!ret = take ((length n5) - 8) n5
in ret
gunzip :: [Word8] -> Either String (Data.Array.Array Int Word8)
gunzip = inflate . gzStrip
mainWithArgs :: [String] -> IO ()
mainWithArgs args = do
!content <- Data.ByteString.readFile (head args)
case gunzip (Data.ByteString.unpack content) of
Left x -> print $ "Error: " ++ x
Right !arr -> do
arr `deepseq` print "Success."
-- print $ foldl' xor 255 $ Data.Array.elems arr
Data.ByteString.writeFile (args !! 1) (Data.ByteString.pack (Data.Array.elems arr))
main :: IO ()
main = do
args <- System.Environment.getArgs
mainWithArgs args
-- deflcont :: IO ()
-- deflcont = do content <- Data.ByteString.readFile "/tmp/deflcont"
-- case (inflate $ Data.ByteString.unpack content) of
-- Left x -> print $ "Error: " ++ x
-- Right y -> print $ Data.ByteString.pack y
| dasuxullebt/inflate.hs | inflate.runST.hs | gpl-3.0 | 26,165 | 0 | 21 | 7,681 | 7,204 | 3,732 | 3,472 | 423 | 10 |
-- Translation Module
-- By Gregory W. Schwartz
{- | Collects all functions pertaining to the translation of nucleotides to
amino acids for Text.
-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Fasta.Text.Translation ( codon2aa
, customCodon2aa
, translate
, customTranslate
) where
-- Built in
import Data.Either
import qualified Data.Text as T
-- Local
import Data.Fasta.Text.Types
-- | Converts a codon to an amino acid
-- Remember, if there is an "N" in that DNA sequence, then it is translated
-- as an X, an unknown amino acid.
codon2aa :: Codon -> Either T.Text AA
codon2aa x
| codon `elem` ["GCT", "GCC", "GCA", "GCG"] = Right 'A'
| codon `elem` ["CGT", "CGC", "CGA", "CGG", "AGA", "AGG"] = Right 'R'
| codon `elem` ["AAT", "AAC"] = Right 'N'
| codon `elem` ["GAT", "GAC"] = Right 'D'
| codon `elem` ["TGT", "TGC"] = Right 'C'
| codon `elem` ["CAA", "CAG"] = Right 'Q'
| codon `elem` ["GAA", "GAG"] = Right 'E'
| codon `elem` ["GGT", "GGC", "GGA", "GGG"] = Right 'G'
| codon `elem` ["CAT", "CAC"] = Right 'H'
| codon `elem` ["ATT", "ATC", "ATA"] = Right 'I'
| codon `elem` ["ATG"] = Right 'M'
| codon `elem` ["TTA", "TTG", "CTT", "CTC", "CTA", "CTG"] = Right 'L'
| codon `elem` ["AAA", "AAG"] = Right 'K'
| codon `elem` ["TTT", "TTC"] = Right 'F'
| codon `elem` ["CCT", "CCC", "CCA", "CCG"] = Right 'P'
| codon `elem` ["TCT", "TCC", "TCA", "TCG", "AGT", "AGC"] = Right 'S'
| codon `elem` ["ACT", "ACC", "ACA", "ACG"] = Right 'T'
| codon `elem` ["TGG"] = Right 'W'
| codon `elem` ["TAT", "TAC"] = Right 'Y'
| codon `elem` ["GTT", "GTC", "GTA", "GTG"] = Right 'V'
| codon `elem` ["TAA", "TGA", "TAG"] = Right '*'
| codon `elem` ["---", "..."] = Right '-'
| codon == "~~~" = Right '-'
| "N" `T.isInfixOf` codon = Right 'X'
| "-" `T.isInfixOf` codon = Right '-'
| "." `T.isInfixOf` codon = Right '-'
| otherwise = Left errorMsg
where
codon = T.toUpper x
errorMsg = T.append "Unidentified codon: " codon
-- | Translate a codon using a custom table
customCodon2aa :: [(Codon, Char)] -> Codon -> Either T.Text AA
customCodon2aa table codon = case lookup codon table of
(Just x) -> Right x
Nothing -> codon2aa codon
-- | Translates a text of nucleotides given a reading frame (1, 2, or
-- 3) -- drops the first 0, 1, or 2 nucleotides respectively. Returns
-- a text with the error if the codon is invalid. Also has customized codon
-- translations as well overriding the defaults.
customTranslate :: [(Codon, AA)]
-> Int
-> FastaSequence
-> Either T.Text FastaSequence
customTranslate table pos x
| any isLeft' translation = Left $ head . lefts $ translation
| otherwise = Right $ x { fastaSeq = T.pack
. rights
$ translation }
where
translation = map (customCodon2aa table)
. filter ((== 3) . T.length)
. T.chunksOf 3
. T.drop (pos - 1)
. fastaSeq
$ x
isLeft' (Left _) = True
isLeft' _ = False
-- | Translates a text of nucleotides given a reading frame (1, 2, or
-- 3) -- drops the first 0, 1, or 2 nucleotides respectively. Returns
-- a text with the error if the codon is invalid.
translate :: Int -> FastaSequence -> Either T.Text FastaSequence
translate = customTranslate []
| GregorySchwartz/fasta | src/Data/Fasta/Text/Translation.hs | gpl-3.0 | 4,340 | 0 | 15 | 1,809 | 1,078 | 587 | 491 | 62 | 2 |
{-# OPTIONS -Wall #-}
module Data.MArray (
MArray,
mEmpty,
mIndex,
mSubArray,
arrayUpdate,
killSub,
order
) where
-- Copyright 2007, 2010 Antoine Latter
-- aslatter@gmail.com
import Data.Map
import Data.MValue hiding (split)
import Prelude hiding (lookup,null)
import Test.QuickCheck
data MArray = MArray (Maybe MValue) (Map MValue MArray)
-- |Returns an empty MArray
mEmpty :: MArray
mEmpty = MArray Nothing empty
lookup' :: (Monad m, Ord k) => k -> Map k v -> m v
lookup' k m = case lookup k m of
Just a -> return a
Nothing -> fail "Data.Map.lookup: failed!"
-- |Given an MArray and a list of subscripts, maybe
-- return the value associated with those subs.
mIndex :: Monad m => MArray -> [MValue] -> m MValue
mIndex (MArray v _map) [] = case v of
Nothing -> fail "mIndex: value not set at specified index"
Just mv -> return mv
mIndex (MArray _ map') (x:xs) = do
vc <- lookup' x map'
mIndex vc xs
mSubArray :: Monad m => MArray -> [MValue] -> m MArray
mSubArray m [] = return m
mSubArray (MArray _ map') (x:xs) = do
vc <- lookup' x map'
mSubArray vc xs
-- |Takes an array, subscripts and a value and returns the
-- updated array.
arrayUpdate :: MArray -> [MValue] -> MValue -> MArray
arrayUpdate (MArray _ map') [] v' = MArray (Just v') map'
arrayUpdate ma@(MArray n map') (sub:subs) v' = MArray n map'' where
map'' :: Map MValue MArray
map'' = insert sub ma' map'
ma' :: MArray
ma' = arrayUpdate (nextArray sub ma) subs v'
killSub :: MArray -> [MValue] -> MArray
killSub MArray{} [] = error "fatal error in MArray.killSub"
killSub (MArray v m) [x] = MArray v $ x `delete` m
killSub a@(MArray v m) (x:xs)
= case x `lookup` m of
Nothing -> a
Just a' -> MArray v $ insert x (killSub a' xs) m
-- Given an Array and a Subscript reurns either the next
-- array or an 'empty' array.
nextArray :: MValue -> MArray -> MArray
nextArray v (MArray _v map') = case lookup v map' of
Nothing -> MArray Nothing empty
Just ma' -> ma'
-- |Returns the next highest subscript for the last
-- subscript provided. Passing false for the bool
-- gives the next lowest, instead.
order :: MArray -> Bool -> MValue -> Maybe MValue
order (MArray _ map') forward mv =
let (mapBack, mapForward) = split mv map'
map'' | forward = mapForward
| otherwise = mapBack
findElem | forward = findMin
| otherwise = findMax
in if null map'' then Nothing
else let (k, _) = findElem map'' in Just k
{-
instance Arbitrary MArray where
arbitrary = do
n <- arbitrary
xs <- arbitrary
return $ MArray n (fromList xs)
coarbitrary (MArray n xs) = variant 0 . coarbitrary (n,toList xs)
-} | aslatter/hmumps | Data/MArray.hs | gpl-3.0 | 2,936 | 0 | 12 | 864 | 873 | 446 | 427 | 59 | 2 |
{-
Created : 2014 Nov 25 (Tue) 18:20:43 by Harold Carr.
Last Modified : 2014 Nov 25 (Tue) 21:29:27 by Harold Carr.
-}
import Parsing
import Test.HUnit as T
import Test.HUnit.Util as U
-- HOMEWORK 7
------------------------------------------------------------------------------
-- EXERCISE 0
e0 :: [Test]
e0 = U.t "e0"
(parse item "hello")
[('h',"ello")]
------------------------------------------------------------------------------
-- EXERCISE 1
{-
:t (return 1 +++ return 2)
;=> (return 1 +++ return 2) :: Num a => Parser a
-}
e1 :: [Test]
e1 = U.t "e1"
(parse (return (1::Int) +++ return 2) "2")
[(1,"2")]
------------------------------------------------------------------------------
-- EXERCISE 2
e2 :: [Test]
e2 = U.t "e2"
(parse (return (1::Int)) "hello")
[(1,"hello")]
------------------------------------------------------------------------------
-- EXERCISE 3
e3 :: [Test]
e3 = U.t "e3"
(parse (item +++ return 'a') "hello")
[('h',"ello")]
------------------------------------------------------------------------------
-- EXERCISE 4
e4 :: [Test]
e4 = U.t "e4"
(parse (return (2::Int) >>= \_ -> item) "hello")
[('h',"ello")]
------------------------------------------------------------------------------
-- EXERCISE 5
e5t :: [Test]
e5t = U.t "e5t"
(parse (char 'a' +++ return 'b') "abc")
[('a',"bc")]
e5f :: [Test]
e5f = U.t "e5f"
(parse (char 'a' +++ return 'b') "1aca")
[('b',"1aca")]
------------------------------------------------------------------------------
-- EXERCISE 6
e6p :: [Test]
e6p = U.t "e6p"
(parse int "01234")
[(1234,"")]
e6n :: [Test]
e6n = U.t "e6n"
(parse int "-0001234")
[(-1234,"")]
------------------------------------------------------------------------------
-- EXERCISE 7
e7 :: [Test]
e7 = U.t "e7"
(parse comment "-- foo bar\nbaz")
[((),"baz")]
------------------------------------------------------------------------------
-- EXERCISE 8
e8 :: [Test]
e8 = U.t "e8"
(parse expr "34 - 3 - 1")
[(30,"")]
------------------------------------------------------------------------------
main :: IO Counts
main =
T.runTestTT $ T.TestList $ e0 ++ e1 ++ e2 ++ e3 ++ e4 ++ e5t ++ e5f ++
e6p ++ e6n ++ e7 ++ e8
-- End of file.
| haroldcarr/learn-haskell-coq-ml-etc | haskell/course/2014-10-edx-delft-fp101x-intro-to-fp-erik-meijer/hw07-parsing.hs | unlicense | 2,371 | 0 | 17 | 493 | 628 | 359 | 269 | 51 | 1 |
import System.IO
import Data.Char (toUpper)
main :: IO ()
main = do
inh <- openFile "input.txt" ReadMode
outh <- openFile "out.txt" WriteMode
inpStr <- hGetContents inh
let result = processData inpStr
hPutStr outh result
hClose inh
hClose outh
processData :: String -> String
processData = map toUpper
| EricYT/Haskell | src/real_haskell/chapter-7/toupper-lazy.hs | apache-2.0 | 333 | 0 | 10 | 78 | 114 | 52 | 62 | 13 | 1 |
-- Digit Number
-- http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=0002
digits :: Int -> Int -> Int
digits n a
| a == 0 = n
| otherwise = digits (n + 1) (a `div` 10)
ans :: (Int,Int) -> Int
ans (a,b) = digits 0 (a + b)
main = do
c <- getContents
let i = map (map read . words) $ lines c :: [[Int]]
j = map (\(x0:x1:xs) -> (x0,x1)) i
k = map ans j
mapM_ (putStrLn.show) k
| a143753/AOJ | 0002.hs | apache-2.0 | 413 | 0 | 15 | 111 | 215 | 113 | 102 | 12 | 1 |
{-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
{-# OPTIONS_HADDOCK not-home #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Text.Internal.Builder
-- Copyright : (c) 2013 Bryan O'Sullivan
-- (c) 2010 Johan Tibell
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Johan Tibell <johan.tibell@gmail.com>
-- Stability : experimental
-- Portability : portable to Hugs and GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Efficient construction of lazy @Text@ values. The principal
-- operations on a @Builder@ are @singleton@, @fromText@, and
-- @fromLazyText@, which construct new builders, and 'mappend', which
-- concatenates two builders.
--
-- To get maximum performance when building lazy @Text@ values using a
-- builder, associate @mappend@ calls to the right. For example,
-- prefer
--
-- > singleton 'a' `mappend` (singleton 'b' `mappend` singleton 'c')
--
-- to
--
-- > singleton 'a' `mappend` singleton 'b' `mappend` singleton 'c'
--
-- as the latter associates @mappend@ to the left.
--
-----------------------------------------------------------------------------
module Data.Text.Internal.Builder
( -- * Public API
-- ** The Builder type
Builder
, toLazyText
, toLazyTextWith
-- ** Constructing Builders
, singleton
, fromText
, fromLazyText
, fromString
-- ** Flushing the buffer state
, flush
-- * Internal functions
, append'
, ensureFree
, writeN
) where
import Control.Monad.ST (ST, runST)
import Data.Monoid (Monoid(..))
#if MIN_VERSION_base(4,9,0)
import Data.Semigroup (Semigroup(..))
#endif
import Data.Text.Internal (Text(..))
import Data.Text.Internal.Lazy (smallChunkSize)
import Data.Text.Unsafe (inlineInterleaveST)
import Data.Text.Internal.Unsafe.Char (unsafeWrite)
import Prelude hiding (map, putChar)
import qualified Data.String as String
import qualified Data.Text as S
import qualified Data.Text.Array as A
import qualified Data.Text.Lazy as L
------------------------------------------------------------------------
-- | A @Builder@ is an efficient way to build lazy @Text@ values.
-- There are several functions for constructing builders, but only one
-- to inspect them: to extract any data, you have to turn them into
-- lazy @Text@ values using @toLazyText@.
--
-- Internally, a builder constructs a lazy @Text@ by filling arrays
-- piece by piece. As each buffer is filled, it is \'popped\' off, to
-- become a new chunk of the resulting lazy @Text@. All this is
-- hidden from the user of the @Builder@.
newtype Builder = Builder {
-- Invariant (from Data.Text.Lazy):
-- The lists include no null Texts.
runBuilder :: forall s. (Buffer s -> ST s [S.Text])
-> Buffer s
-> ST s [S.Text]
}
#if MIN_VERSION_base(4,9,0)
instance Semigroup Builder where
(<>) = append
{-# INLINE (<>) #-}
#endif
instance Monoid Builder where
mempty = empty
{-# INLINE mempty #-}
#if MIN_VERSION_base(4,9,0)
mappend = (<>) -- future-proof definition
#else
mappend = append
#endif
{-# INLINE mappend #-}
mconcat = foldr mappend Data.Monoid.mempty
{-# INLINE mconcat #-}
instance String.IsString Builder where
fromString = fromString
{-# INLINE fromString #-}
instance Show Builder where
show = show . toLazyText
instance Eq Builder where
a == b = toLazyText a == toLazyText b
instance Ord Builder where
a <= b = toLazyText a <= toLazyText b
------------------------------------------------------------------------
-- | /O(1)./ The empty @Builder@, satisfying
--
-- * @'toLazyText' 'empty' = 'L.empty'@
--
empty :: Builder
empty = Builder (\ k buf -> k buf)
{-# INLINE empty #-}
-- | /O(1)./ A @Builder@ taking a single character, satisfying
--
-- * @'toLazyText' ('singleton' c) = 'L.singleton' c@
--
singleton :: Char -> Builder
singleton c = writeAtMost 4 $ \ marr o -> unsafeWrite marr o c
{-# INLINE singleton #-}
------------------------------------------------------------------------
-- | /O(1)./ The concatenation of two builders, an associative
-- operation with identity 'empty', satisfying
--
-- * @'toLazyText' ('append' x y) = 'L.append' ('toLazyText' x) ('toLazyText' y)@
--
append :: Builder -> Builder -> Builder
append (Builder f) (Builder g) = Builder (f . g)
{-# INLINE [0] append #-}
-- TODO: Experiment to find the right threshold.
copyLimit :: Int
copyLimit = 128
-- This function attempts to merge small @Text@ values instead of
-- treating each value as its own chunk. We may not always want this.
-- | /O(1)./ A @Builder@ taking a 'S.Text', satisfying
--
-- * @'toLazyText' ('fromText' t) = 'L.fromChunks' [t]@
--
fromText :: S.Text -> Builder
fromText t@(Text arr off l)
| S.null t = empty
| l <= copyLimit = writeN l $ \marr o -> A.copyI marr o arr off (l+o)
| otherwise = flush `append` mapBuilder (t :)
{-# INLINE [1] fromText #-}
{-# RULES
"fromText/pack" forall s .
fromText (S.pack s) = fromString s
#-}
-- | /O(1)./ A Builder taking a @String@, satisfying
--
-- * @'toLazyText' ('fromString' s) = 'L.fromChunks' [S.pack s]@
--
fromString :: String -> Builder
fromString str = Builder $ \k (Buffer p0 o0 u0 l0) ->
let loop !marr !o !u !l [] = k (Buffer marr o u l)
loop marr o u l s@(c:cs)
| l <= 1 = do
arr <- A.unsafeFreeze marr
let !t = Text arr o u
marr' <- A.new chunkSize
ts <- inlineInterleaveST (loop marr' 0 0 chunkSize s)
return $ t : ts
| otherwise = do
n <- unsafeWrite marr (o+u) c
loop marr o (u+n) (l-n) cs
in loop p0 o0 u0 l0 str
where
chunkSize = smallChunkSize
{-# INLINE fromString #-}
-- | /O(1)./ A @Builder@ taking a lazy @Text@, satisfying
--
-- * @'toLazyText' ('fromLazyText' t) = t@
--
fromLazyText :: L.Text -> Builder
fromLazyText ts = flush `append` mapBuilder (L.toChunks ts ++)
{-# INLINE fromLazyText #-}
------------------------------------------------------------------------
-- Our internal buffer type
data Buffer s = Buffer {-# UNPACK #-} !(A.MArray s)
{-# UNPACK #-} !Int -- offset
{-# UNPACK #-} !Int -- used units
{-# UNPACK #-} !Int -- length left
------------------------------------------------------------------------
-- | /O(n)./ Extract a lazy @Text@ from a @Builder@ with a default
-- buffer size. The construction work takes place if and when the
-- relevant part of the lazy @Text@ is demanded.
toLazyText :: Builder -> L.Text
toLazyText = toLazyTextWith smallChunkSize
-- | /O(n)./ Extract a lazy @Text@ from a @Builder@, using the given
-- size for the initial buffer. The construction work takes place if
-- and when the relevant part of the lazy @Text@ is demanded.
--
-- If the initial buffer is too small to hold all data, subsequent
-- buffers will be the default buffer size.
toLazyTextWith :: Int -> Builder -> L.Text
toLazyTextWith chunkSize m = L.fromChunks (runST $
newBuffer chunkSize >>= runBuilder (m `append` flush) (const (return [])))
-- | /O(1)./ Pop the strict @Text@ we have constructed so far, if any,
-- yielding a new chunk in the result lazy @Text@.
flush :: Builder
flush = Builder $ \ k buf@(Buffer p o u l) ->
if u == 0
then k buf
else do arr <- A.unsafeFreeze p
let !b = Buffer p (o+u) 0 l
!t = Text arr o u
ts <- inlineInterleaveST (k b)
return $! t : ts
{-# INLINE [1] flush #-}
-- defer inlining so that flush/flush rule may fire.
------------------------------------------------------------------------
-- | Sequence an ST operation on the buffer
withBuffer :: (forall s. Buffer s -> ST s (Buffer s)) -> Builder
withBuffer f = Builder $ \k buf -> f buf >>= k
{-# INLINE withBuffer #-}
-- | Get the size of the buffer
withSize :: (Int -> Builder) -> Builder
withSize f = Builder $ \ k buf@(Buffer _ _ _ l) ->
runBuilder (f l) k buf
{-# INLINE withSize #-}
-- | Map the resulting list of texts.
mapBuilder :: ([S.Text] -> [S.Text]) -> Builder
mapBuilder f = Builder (fmap f .)
------------------------------------------------------------------------
-- | Ensure that there are at least @n@ many elements available.
ensureFree :: Int -> Builder
ensureFree !n = withSize $ \ l ->
if n <= l
then empty
else flush `append'` withBuffer (const (newBuffer (max n smallChunkSize)))
{-# INLINE [0] ensureFree #-}
writeAtMost :: Int -> (forall s. A.MArray s -> Int -> ST s Int) -> Builder
writeAtMost n f = ensureFree n `append'` withBuffer (writeBuffer f)
{-# INLINE [0] writeAtMost #-}
-- | Ensure that @n@ many elements are available, and then use @f@ to
-- write some elements into the memory.
writeN :: Int -> (forall s. A.MArray s -> Int -> ST s ()) -> Builder
writeN n f = writeAtMost n (\ p o -> f p o >> return n)
{-# INLINE writeN #-}
writeBuffer :: (A.MArray s -> Int -> ST s Int) -> Buffer s -> ST s (Buffer s)
writeBuffer f (Buffer p o u l) = do
n <- f p (o+u)
return $! Buffer p o (u+n) (l-n)
{-# INLINE writeBuffer #-}
newBuffer :: Int -> ST s (Buffer s)
newBuffer size = do
arr <- A.new size
return $! Buffer arr 0 0 size
{-# INLINE newBuffer #-}
------------------------------------------------------------------------
-- Some nice rules for Builder
-- This function makes GHC understand that 'writeN' and 'ensureFree'
-- are *not* recursive in the precense of the rewrite rules below.
-- This is not needed with GHC 7+.
append' :: Builder -> Builder -> Builder
append' (Builder f) (Builder g) = Builder (f . g)
{-# INLINE append' #-}
{-# RULES
"append/writeAtMost" forall a b (f::forall s. A.MArray s -> Int -> ST s Int)
(g::forall s. A.MArray s -> Int -> ST s Int) ws.
append (writeAtMost a f) (append (writeAtMost b g) ws) =
append (writeAtMost (a+b) (\marr o -> f marr o >>= \ n ->
g marr (o+n) >>= \ m ->
let s = n+m in s `seq` return s)) ws
"writeAtMost/writeAtMost" forall a b (f::forall s. A.MArray s -> Int -> ST s Int)
(g::forall s. A.MArray s -> Int -> ST s Int).
append (writeAtMost a f) (writeAtMost b g) =
writeAtMost (a+b) (\marr o -> f marr o >>= \ n ->
g marr (o+n) >>= \ m ->
let s = n+m in s `seq` return s)
"ensureFree/ensureFree" forall a b .
append (ensureFree a) (ensureFree b) = ensureFree (max a b)
"flush/flush"
append flush flush = flush
#-}
| text-utf8/text | Data/Text/Internal/Builder.hs | bsd-2-clause | 10,858 | 0 | 17 | 2,504 | 1,917 | 1,066 | 851 | 155 | 2 |
{- System.Process enhancements, including additional ways of running
- processes, and logging.
-
- Copyright 2012-2015 Joey Hess <id@joeyh.name>
-
- License: BSD-2-clause
-}
{-# LANGUAGE CPP, Rank2Types #-}
{-# OPTIONS_GHC -fno-warn-tabs #-}
module Utility.Process (
module X,
CreateProcess(..),
StdHandle(..),
readProcess,
readProcess',
readProcessEnv,
writeReadProcessEnv,
forceSuccessProcess,
checkSuccessProcess,
ignoreFailureProcess,
createProcessSuccess,
createProcessChecked,
createBackgroundProcess,
processTranscript,
processTranscript',
withHandle,
withIOHandles,
withOEHandles,
withQuietOutput,
feedWithQuietOutput,
createProcess,
waitForProcess,
startInteractiveProcess,
stdinHandle,
stdoutHandle,
stderrHandle,
ioHandles,
processHandle,
devNull,
) where
import qualified Utility.Process.Shim
import qualified Utility.Process.Shim as X hiding (CreateProcess(..), createProcess, runInteractiveProcess, readProcess, readProcessWithExitCode, system, rawSystem, runInteractiveCommand, runProcess)
import Utility.Process.Shim hiding (createProcess, readProcess, waitForProcess)
import Utility.Misc
import Utility.Exception
import System.Exit
import System.IO
import System.Log.Logger
import Control.Concurrent
import qualified Control.Exception as E
import Control.Monad
#ifndef mingw32_HOST_OS
import qualified System.Posix.IO
#else
import Control.Applicative
#endif
import Data.Maybe
import Prelude
type CreateProcessRunner = forall a. CreateProcess -> ((Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle) -> IO a) -> IO a
data StdHandle = StdinHandle | StdoutHandle | StderrHandle
deriving (Eq)
-- | Normally, when reading from a process, it does not need to be fed any
-- standard input.
readProcess :: FilePath -> [String] -> IO String
readProcess cmd args = readProcessEnv cmd args Nothing
readProcessEnv :: FilePath -> [String] -> Maybe [(String, String)] -> IO String
readProcessEnv cmd args environ = readProcess' p
where
p = (proc cmd args)
{ std_out = CreatePipe
, env = environ
}
readProcess' :: CreateProcess -> IO String
readProcess' p = withHandle StdoutHandle createProcessSuccess p $ \h -> do
output <- hGetContentsStrict h
hClose h
return output
-- | Runs an action to write to a process on its stdin,
-- returns its output, and also allows specifying the environment.
writeReadProcessEnv
:: FilePath
-> [String]
-> Maybe [(String, String)]
-> (Maybe (Handle -> IO ()))
-> (Maybe (Handle -> IO ()))
-> IO String
writeReadProcessEnv cmd args environ writestdin adjusthandle = do
(Just inh, Just outh, _, pid) <- createProcess p
maybe (return ()) (\a -> a inh) adjusthandle
maybe (return ()) (\a -> a outh) adjusthandle
-- fork off a thread to start consuming the output
output <- hGetContents outh
outMVar <- newEmptyMVar
_ <- forkIO $ E.evaluate (length output) >> putMVar outMVar ()
-- now write and flush any input
maybe (return ()) (\a -> a inh >> hFlush inh) writestdin
hClose inh -- done with stdin
-- wait on the output
takeMVar outMVar
hClose outh
-- wait on the process
forceSuccessProcess p pid
return output
where
p = (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
, env = environ
}
-- | Waits for a ProcessHandle, and throws an IOError if the process
-- did not exit successfully.
forceSuccessProcess :: CreateProcess -> ProcessHandle -> IO ()
forceSuccessProcess p pid = do
code <- waitForProcess pid
case code of
ExitSuccess -> return ()
ExitFailure n -> fail $ showCmd p ++ " exited " ++ show n
-- | Waits for a ProcessHandle and returns True if it exited successfully.
-- Note that using this with createProcessChecked will throw away
-- the Bool, and is only useful to ignore the exit code of a process,
-- while still waiting for it. -}
checkSuccessProcess :: ProcessHandle -> IO Bool
checkSuccessProcess pid = do
code <- waitForProcess pid
return $ code == ExitSuccess
ignoreFailureProcess :: ProcessHandle -> IO Bool
ignoreFailureProcess pid = do
void $ waitForProcess pid
return True
-- | Runs createProcess, then an action on its handles, and then
-- forceSuccessProcess.
createProcessSuccess :: CreateProcessRunner
createProcessSuccess p a = createProcessChecked (forceSuccessProcess p) p a
-- | Runs createProcess, then an action on its handles, and then
-- a checker action on its exit code, which must wait for the process.
createProcessChecked :: (ProcessHandle -> IO b) -> CreateProcessRunner
createProcessChecked checker p a = do
t@(_, _, _, pid) <- createProcess p
r <- tryNonAsync $ a t
_ <- checker pid
either E.throw return r
-- | Leaves the process running, suitable for lazy streaming.
-- Note: Zombies will result, and must be waited on.
createBackgroundProcess :: CreateProcessRunner
createBackgroundProcess p a = a =<< createProcess p
-- | Runs a process, optionally feeding it some input, and
-- returns a transcript combining its stdout and stderr, and
-- whether it succeeded or failed.
processTranscript :: String -> [String] -> (Maybe String) -> IO (String, Bool)
processTranscript = processTranscript' id
processTranscript' :: (CreateProcess -> CreateProcess) -> String -> [String] -> Maybe String -> IO (String, Bool)
processTranscript' modproc cmd opts input = do
#ifndef mingw32_HOST_OS
{- This implementation interleves stdout and stderr in exactly the order
- the process writes them. -}
(readf, writef) <- System.Posix.IO.createPipe
readh <- System.Posix.IO.fdToHandle readf
writeh <- System.Posix.IO.fdToHandle writef
p@(_, _, _, pid) <- createProcess $ modproc $
(proc cmd opts)
{ std_in = if isJust input then CreatePipe else Inherit
, std_out = UseHandle writeh
, std_err = UseHandle writeh
}
hClose writeh
get <- mkreader readh
writeinput input p
transcript <- get
ok <- checkSuccessProcess pid
return (transcript, ok)
#else
{- This implementation for Windows puts stderr after stdout. -}
p@(_, _, _, pid) <- createProcess $ modproc $
(proc cmd opts)
{ std_in = if isJust input then CreatePipe else Inherit
, std_out = CreatePipe
, std_err = CreatePipe
}
getout <- mkreader (stdoutHandle p)
geterr <- mkreader (stderrHandle p)
writeinput input p
transcript <- (++) <$> getout <*> geterr
ok <- checkSuccessProcess pid
return (transcript, ok)
#endif
where
mkreader h = do
s <- hGetContents h
v <- newEmptyMVar
void $ forkIO $ do
void $ E.evaluate (length s)
putMVar v ()
return $ do
takeMVar v
return s
writeinput (Just s) p = do
let inh = stdinHandle p
unless (null s) $ do
hPutStr inh s
hFlush inh
hClose inh
writeinput Nothing _ = return ()
-- | Runs a CreateProcessRunner, on a CreateProcess structure, that
-- is adjusted to pipe only from/to a single StdHandle, and passes
-- the resulting Handle to an action.
withHandle
:: StdHandle
-> CreateProcessRunner
-> CreateProcess
-> (Handle -> IO a)
-> IO a
withHandle h creator p a = creator p' $ a . select
where
base = p
{ std_in = Inherit
, std_out = Inherit
, std_err = Inherit
}
(select, p')
| h == StdinHandle =
(stdinHandle, base { std_in = CreatePipe })
| h == StdoutHandle =
(stdoutHandle, base { std_out = CreatePipe })
| h == StderrHandle =
(stderrHandle, base { std_err = CreatePipe })
-- | Like withHandle, but passes (stdin, stdout) handles to the action.
withIOHandles
:: CreateProcessRunner
-> CreateProcess
-> ((Handle, Handle) -> IO a)
-> IO a
withIOHandles creator p a = creator p' $ a . ioHandles
where
p' = p
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
}
-- | Like withHandle, but passes (stdout, stderr) handles to the action.
withOEHandles
:: CreateProcessRunner
-> CreateProcess
-> ((Handle, Handle) -> IO a)
-> IO a
withOEHandles creator p a = creator p' $ a . oeHandles
where
p' = p
{ std_in = Inherit
, std_out = CreatePipe
, std_err = CreatePipe
}
-- | Forces the CreateProcessRunner to run quietly;
-- both stdout and stderr are discarded.
withQuietOutput
:: CreateProcessRunner
-> CreateProcess
-> IO ()
withQuietOutput creator p = withFile devNull WriteMode $ \nullh -> do
let p' = p
{ std_out = UseHandle nullh
, std_err = UseHandle nullh
}
creator p' $ const $ return ()
-- | Stdout and stderr are discarded, while the process is fed stdin
-- from the handle.
feedWithQuietOutput
:: CreateProcessRunner
-> CreateProcess
-> (Handle -> IO a)
-> IO a
feedWithQuietOutput creator p a = withFile devNull WriteMode $ \nullh -> do
let p' = p
{ std_in = CreatePipe
, std_out = UseHandle nullh
, std_err = UseHandle nullh
}
creator p' $ a . stdinHandle
devNull :: FilePath
#ifndef mingw32_HOST_OS
devNull = "/dev/null"
#else
devNull = "NUL"
#endif
-- | Extract a desired handle from createProcess's tuple.
-- These partial functions are safe as long as createProcess is run
-- with appropriate parameters to set up the desired handle.
-- Get it wrong and the runtime crash will always happen, so should be
-- easily noticed.
type HandleExtractor = (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle) -> Handle
stdinHandle :: HandleExtractor
stdinHandle (Just h, _, _, _) = h
stdinHandle _ = error "expected stdinHandle"
stdoutHandle :: HandleExtractor
stdoutHandle (_, Just h, _, _) = h
stdoutHandle _ = error "expected stdoutHandle"
stderrHandle :: HandleExtractor
stderrHandle (_, _, Just h, _) = h
stderrHandle _ = error "expected stderrHandle"
ioHandles :: (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle) -> (Handle, Handle)
ioHandles (Just hin, Just hout, _, _) = (hin, hout)
ioHandles _ = error "expected ioHandles"
oeHandles :: (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle) -> (Handle, Handle)
oeHandles (_, Just hout, Just herr, _) = (hout, herr)
oeHandles _ = error "expected oeHandles"
processHandle :: (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle) -> ProcessHandle
processHandle (_, _, _, pid) = pid
-- | Shows the command that a CreateProcess will run.
showCmd :: CreateProcess -> String
showCmd = go . cmdspec
where
go (ShellCommand s) = s
go (RawCommand c ps) = c ++ " " ++ show ps
-- | Starts an interactive process. Unlike runInteractiveProcess in
-- System.Process, stderr is inherited.
startInteractiveProcess
:: FilePath
-> [String]
-> Maybe [(String, String)]
-> IO (ProcessHandle, Handle, Handle)
startInteractiveProcess cmd args environ = do
let p = (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
, env = environ
}
(Just from, Just to, _, pid) <- createProcess p
return (pid, to, from)
-- | Wrapper around 'System.Process.createProcess' that does debug logging.
createProcess :: CreateProcess -> IO (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
createProcess p = do
debugProcess p
Utility.Process.Shim.createProcess p
-- | Debugging trace for a CreateProcess.
debugProcess :: CreateProcess -> IO ()
debugProcess p = debugM "Utility.Process" $ unwords
[ action ++ ":"
, showCmd p
]
where
action
| piped (std_in p) && piped (std_out p) = "chat"
| piped (std_in p) = "feed"
| piped (std_out p) = "read"
| otherwise = "call"
piped Inherit = False
piped _ = True
-- | Wrapper around 'System.Process.waitForProcess' that does debug logging.
waitForProcess :: ProcessHandle -> IO ExitCode
waitForProcess h = do
r <- Utility.Process.Shim.waitForProcess h
debugM "Utility.Process" ("process done " ++ show r)
return r
| np/propellor | src/Utility/Process.hs | bsd-2-clause | 11,557 | 220 | 16 | 2,156 | 3,022 | 1,643 | 1,379 | 259 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
-- |
-- Module: Database.Ocilib.Connection
-- Copyright: (c) 2016 Thierry Bourrillon
-- (c) 2016 FPInsight, Eurl.
-- License: BSD3
-- Maintainer: Thierry Bourrillon <thierry.bourrillon@fpinsight.com>
-- Stability: experimental
-- Portability: portable
--
--
module Database.Ocilib.Connection
(
-- * Connecting to Database
ociConnectionCreate
, ociConnectionFree
, ociIsConnected
, ociPing
, ociBreak
) where
import Data.ByteString
import Data.Monoid ((<>))
import Foreign.Marshal.Utils
import Foreign.Ptr
import qualified Language.C.Inline as C
import Database.Ocilib.Oci
import Database.Ocilib.Enums
import Database.Ocilib.Internal
C.context (C.baseCtx <> C.funCtx <> ociCtx)
C.include "<ocilib.h>"
-- Connecting to database
type Connection = Ptr OCI_Connection
-- | Create a physical connection to an Oracle database server.
ociConnectionCreate :: ByteString -> ByteString -> ByteString -> SessionMode -> IO (Maybe Connection)
ociConnectionCreate db user pass mode = do
let m = fromIntegral $ fromEnum mode
useAsCString db ( \d ->
useAsCString user ( \u ->
useAsCString pass ( \p ->
fmap toMaybePtr [C.exp| OCI_Connection* { OCI_ConnectionCreate($(char* d), $(char* u), $(char* p), $(unsigned int m)) } |]
)
)
)
-- | Close a physical connection to an Oracle database server.
ociConnectionFree :: Ptr OCI_Connection -> IO Bool
ociConnectionFree c =
fmap toBool [C.exp| int { OCI_ConnectionFree($(OCI_Connection *c)) } |]
-- | Returns TRUE is the given connection is still connected otherwise FALSE.
ociIsConnected :: Ptr OCI_Connection -> IO Bool
ociIsConnected c =
fmap toBool [C.exp| int { OCI_IsConnected($(OCI_Connection *c)) } |]
{-
-- | Return the pointer to user data previously associated with the connection.
-- void *OCI_GetUserData (OCI_Connection *con)
-- | Associate a pointer to user data to the given connection.
-- boolean OCI_SetUserData (OCI_Connection *con, void *data)
-- | Associate a tag to the given connection/session.
-- boolean OCI_SetSessionTag (OCI_Connection *con, const otext *tag)
-- | Return the tag associated the given connection.
-- const otext *OCI_GetSessionTag (OCI_Connection *con)
-- | Return the name of the connected database/service name.
-- const otext *OCI_GetDatabase (OCI_Connection *con)
-- | Return the current logged user name.
-- const otext *OCI_GetUserName (OCI_Connection *con)
-- | Return the current logged user password.
-- const otext *OCI_GetPassword (OCI_Connection *con)
-- | Change the password of the logged user.
-- boolean OCI_SetPassword(OCI_Connection *con, const otext *password)
-- | Change the password of the given user on the given database.
-- boolean OCI_SetUserPassword (const otext *db, const otext *user, const otext *pwd, const otext *new_pwd)
-- | Return the current session mode.
-- unsigned int OCI_GetSessionMode (OCI_Connection *con)
-- | Return the connected database server version.
-- const otext *OCI_GetVersionServer (OCI_Connection *con)
-- | Return the major version number of the connected database server.
-- unsigned int OCI_GetServerMajorVersion (OCI_Connection *con)
-- | Return the minor version number of the connected database server.
-- unsigned int OCI_GetServerMinorVersion (OCI_Connection *con)
-- | Return the revision version number of the connected database server.
-- unsigned int OCI_GetServerRevisionVersion (OCI_Connection *con)
-- | Set the format string for implicit string conversions of the given type.
-- boolean OCI_SetFormat (OCI_Connection *con, unsigned int type, const otext *format)
-- | Return the format string for implicit string conversions of the given type.
-- const otext *OCI_GetFormat (OCI_Connection *con, unsigned int type)
-- | Return the current transaction of the connection.
-- OCI_Transaction *OCI_GetTransaction (OCI_Connection *con)
-- | Return the highest Oracle version is supported by the connection.
-- unsigned int OCI_GetVersionConnection (OCI_Connection *con)
-- | Set tracing information to the session of the given connection.
-- boolean OCI_SetTrace (OCI_Connection *con, unsigned int trace, const otext *value)
-- | Get the current trace for the trace type from the given connection.
-- const otext *OCI_GetTrace (OCI_Connection *con, unsigned int trace)
-}
-- | Makes a round trip call to the server to confirm that the connection and the server are active.
ociPing :: Ptr OCI_Connection -> IO Bool
ociPing c = fmap toBool [C.exp| int { OCI_Ping($(OCI_Connection *c)) } |]
{-
-- | Return the Oracle server database name of the connected database/service name.
-- const otext *OCI_GetDBName (OCI_Connection *con)
-- | Return the Oracle server Instance name of the connected database/service name.
-- const otext *OCI_GetInstanceName (OCI_Connection *con)
-- | Return the Oracle server service name of the connected database/service name.
-- const otext *OCI_GetServiceName (OCI_Connection *con)
-- | Return the Oracle server machine name of the connected database/service name.
-- const otext *OCI_GetServerName (OCI_Connection *con)
-- | Return the Oracle server domain name of the connected database/service name.
-- const otext *OCI_GetDomainName (OCI_Connection *con)
-- | Return the date and time (Timestamp) server instance start of the connected database/service name.
-- OCI_Timestamp *OCI_GetInstanceStartTime (OCI_Connection *con)
-- | Verify if the given connection support TAF events.
-- boolean OCI_IsTAFCapable (OCI_Connection *con)
-- | Set the Transparent Application Failover (TAF) user handler.
-- boolean OCI_SetTAFHandler (OCI_Connection *con, POCI_TAF_HANDLER handler)
-- | Return the maximum number of statements to keep in the statement cache.
-- unsigned int OCI_GetStatementCacheSize (OCI_Connection *con)
-- | Set the maximum number of statements to keep in the statement cache.
-- boolean OCI_SetStatementCacheSize (OCI_Connection *con, unsigned int value)
-- | Return the default LOB prefetch buffer size for the connection.
-- unsigned int OCI_GetDefaultLobPrefetchSize (OCI_Connection *con)
-- | Enable or disable prefetching for all LOBs fetched in the connection.
-- boolean OCI_SetDefaultLobPrefetchSize (OCI_Connection *con, unsigned int value)
-- | Return the maximum number of SQL statements that can be opened in one session.
-- unsigned int OCI_GetMaxCursors (OCI_Connection *con)
-}
-- | Perform an immediate abort of any currently Oracle OCI call.
ociBreak :: Ptr OCI_Connection -> IO Bool
ociBreak c = fmap toBool [C.exp| int { OCI_Break($(OCI_Connection *c)) } |]
| fpinsight/hocilib | src/Database/Ocilib/Connection.hs | bsd-2-clause | 6,818 | 0 | 17 | 1,188 | 390 | 224 | 166 | 38 | 1 |
{-# LANGUAGE TemplateHaskell,TypeFamilies,EmptyDataDecls,DeriveDataTypeable,GADTs #-}
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Database.Persist.MongoDB hiding (master)
import Language.Haskell.TH.Syntax
import Data.Typeable (Typeable)
import Data.Maybe
import qualified Data.Text as T
import Data.ByteString (ByteString)
import Data.Time.Clock
import qualified Parser.Paper as P
import Parser.JSON
import Model.Defs
import Data.Data
{-
import Database.Persist.Quasi
import Database.Persist.MongoDB hiding (master)
import Language.Haskell.TH.Syntax
import Data.Aeson.TH
import qualified Parser.Paper as P
-- import Database.Persist.Store
-}
-- sqlType _ = SqlInt32
-- isNullable _ = False
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
let mongoSettings = (mkPersistSettings (ConT ''MongoBackend))
{ mpsGeneric = False
}
in share [mkPersist mongoSettings]
$(persistFileWith lowerCaseSettings "config/models")
-- This is a general summary function. cf. summaryEx in Handler.PaperListW2UI
-- This just returns a pair, so that you can add or delete elements before applying object.
-- Caution: you need to apply object to get a dictionary type, otherwise, this will generate an array.
paperSummary :: PaperId -> Paper -> [(Text,Value)]
paperSummary pid p
= ["id" .= toPathPiece pid, "doi" .= paperDoi p, "url" .= paperUrl p, "title" .= citationTitle c,
"tags" .= (paperTags p), "note" .= paperNote p , "citation" .= toJSON c
, "cittxt" .= mkCitTxt c]
where
c = paperCitation p
mkCitTxt c = T.concat
["<i>"
, fromMaybe "" (citationJournal c)
, "</i>, "
, maybe "" (\v -> T.concat["<b>",v,"</b>"]) (citationVolume c)
, ", "
, fromMaybe "" (citationPageFrom c)
, maybe "" (\to -> "-" `T.append` to) (citationPageTo c)
, maybe "" (\y -> T.pack $ " (" ++ show y ++ ")") (citationYear c)
]
paperSummary' :: Entity Paper -> Value
paperSummary' (Entity pid p) = object $ paperSummary pid p
{-
-- default values
emptyCitation :: Citation
emptyCitation = Citation Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing [] Nothing Nothing
-}
emptyMisc :: ByteString
emptyMisc = ""
emptyRefs :: [Reference]
emptyRefs = []
defaultTime :: UTCTime
defaultTime = read "1970-01-01 01:01:01 +0000"
{-
emptyPaper :: Paper
emptyPaper = Paper
"" "" -- doi and url
"" Nothing Nothing -- html, abs, main
emptyCitation emptyRefs [] [] -- cit ref fig res
Nothing [] Nothing emptyMisc -- toc tags note misc
Nothing P.SUndecidable --- parserInfo, supportLevel
(ResourceAvailability False False False False False False)
Nothing defaultTime -- email, time
-}
| hirokai/PaperServer | Model.hs | bsd-2-clause | 3,145 | 0 | 15 | 788 | 537 | 301 | 236 | 44 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Mote.Util where
import Control.Monad ((<=<), liftM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Concurrent.MVar(modifyMVar_, readMVar, newMVar)
import qualified Data.Map as M
import Data.Maybe (catMaybes)
import DynFlags (DynFlags)
import GHC ()
import GhcMonad (GhcMonad, getSessionDynFlags)
import Name (Name)
import OccName
import Outputable (Outputable, SDoc, neverQualify, ppr,
showSDoc, showSDocForUser)
import System.IO
import Type (Type)
-- PARSE IMPORTS
import Control.Monad.Error (MonadError, throwError)
import FastString (fsLit, unpackFS)
import Lexer (P, ParseResult (..), mkPState, unP)
import SrcLoc (GenLocated (..), SrcSpan(..), isSubspanOf,
mkRealSrcLoc, RealSrcSpan)
import StringBuffer (stringToStringBuffer)
import Mote.Types
runParserM :: GhcMonad m => Lexer.P a -> String -> m (Either String a)
runParserM parser str = do
fs <- getSessionDynFlags
let buf = stringToStringBuffer str
loc = mkRealSrcLoc (fsLit "<mote>") 1 1
return $ case unP parser (mkPState fs buf loc) of
PFailed _span err -> Left (showSDoc fs err)
POk _pst x -> Right x
(>>|) :: Functor f => f a -> (a -> b) -> f b
(>>|) x f = fmap f x
showPprM :: (Outputable a, GhcMonad m) => a -> m String
showPprM = showSDocM . ppr
output :: (Outputable a, GhcMonad m) => a -> m ()
output = liftIO . putStrLn <=< showSDocM . ppr
showSDocM :: GhcMonad m => SDoc -> m String
showSDocM x = getSessionDynFlags >>| \fs -> showSDoc fs x
newRef :: MonadIO m => a -> m (Ref a)
newRef = liftIO . newMVar
gReadRef :: MonadIO m => Ref a -> m a
gReadRef = liftIO . readMVar
gModifyRef :: MonadIO m => Ref a -> (a -> a) -> m ()
gModifyRef x f = liftIO $ modifyMVar_ x (return . f)
logS :: MonadIO m => Ref MoteState -> String -> m ()
logS stRef s = liftIO $ flip hPutStrLn s . logFile =<< readMVar stRef
nextSubexpr :: SrcSpan -> [GenLocated SrcSpan b] -> b
nextSubexpr hole = foldr (\(L l x) r -> if hole `isSubspanOf` l then x else r) (error "nextSubexpr failure")
nextLocatedSubexpr :: SrcSpan -> [GenLocated SrcSpan b] -> b
nextLocatedSubexpr hole = foldr (\(L l x) r -> if hole `isSubspanOf` l then x else r) (error "nextLocatedSubexpr failure")
nextSubexpr' :: SrcSpan -> [GenLocated SrcSpan a] -> Maybe a
nextSubexpr' hole = foldr (\(L l x) r -> if hole `isSubspanOf` l then Just x else r) Nothing
toRealSrcSpan :: SrcSpan -> RealSrcSpan
toRealSrcSpan (UnhelpfulSpan _) = error "toRealSrcSpan: Got UnhelpfulSpan"
toRealSrcSpan (RealSrcSpan x0) = x0
-- foldExprs :: ([s] -> s) -> (LHsExpr id -> s -> Maybe s) -> HsModule id ->
eitherThrow :: MonadError e m => Either e a -> m a
eitherThrow = either throwError return
maybeThrow :: MonadError e m => e -> Maybe a -> m a
maybeThrow err = maybe (throwError err) return
getCurrentHoleErr :: Ref MoteState -> M AugmentedHoleInfo
getCurrentHoleErr r = maybe (throwError NoHole) return . currentHole =<< gReadRef r
getFileDataErr :: Ref MoteState -> M FileData
getFileDataErr = maybe (throwError NoFile) return . fileData <=< gReadRef
getHoles :: Ref MoteState -> M [Hole]
getHoles = fmap (M.keys . holesInfo) . getFileDataErr
occNameToString :: OccName -> String
occNameToString = unpackFS . occNameFS
nameToString :: Name -> String
nameToString = occNameToString . occName
showType :: DynFlags -> Type -> String
showType fs = showSDocForUser fs neverQualify . ppr
mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b]
mapMaybeM f = liftM catMaybes . mapM f
headErr :: MonadError ErrorType m => [a] -> m a
headErr xs = case xs of
[] -> throwError $ OtherError "headErr: Empty list"
x : _ -> return x
| imeckler/mote | Mote/Util.hs | bsd-3-clause | 4,101 | 0 | 13 | 1,121 | 1,392 | 731 | 661 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module DTX2MIDI.MIDISpec where
import Codec.Midi (Midi (..))
import qualified Codec.Midi as Midi
import DTX2MIDI.MIDI
import Test.Hspec
spec :: Spec
spec = do
describe "bpmToTempo" $ do
it "returns tempo which has a specified bpm" $ do
bpmToTempo 120 `shouldBe` 500000
bpmToTempo 140 `shouldBe` 428571
describe "updateInitialTempo" $ do
it "returns midi which has a specified initial tempo" $ do
let chan = 9
let vel = 127
let bd = 35
let sd = 38
let hh = 42
let expectedTracks =
[ [ (0, Midi.ProgramChange chan 0),
(0, Midi.TempoChange 250000),
(0, Midi.NoteOn chan bd vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan bd vel),
(0, Midi.NoteOff chan hh vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan hh vel),
(0, Midi.NoteOn chan sd vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan hh vel),
(48, Midi.NoteOff chan sd vel)
]
]
let expectedMidi =
Midi
{ Midi.fileType = Midi.SingleTrack,
Midi.timeDiv = Midi.TicksPerBeat 96,
Midi.tracks = expectedTracks
}
let inputTracks =
[ [ (0, Midi.ProgramChange chan 0),
(0, Midi.TempoChange 500000),
(0, Midi.NoteOn chan bd vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan bd vel),
(0, Midi.NoteOff chan hh vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan hh vel),
(0, Midi.NoteOn chan sd vel),
(0, Midi.NoteOn chan hh vel),
(48, Midi.NoteOff chan hh vel),
(48, Midi.NoteOff chan sd vel)
]
]
let inputMidi =
Midi
{ Midi.fileType = Midi.SingleTrack,
Midi.timeDiv = Midi.TicksPerBeat 96,
Midi.tracks = inputTracks
}
updateInitialTempo 250000 inputMidi `shouldBe` expectedMidi
| akiomik/dtx2midi | test/DTX2MIDI/MIDISpec.hs | bsd-3-clause | 2,261 | 0 | 20 | 910 | 700 | 379 | 321 | 56 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-|
Module : Numeric.AERN.RmToRn.Integration
Description : integrators of function enclosures
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Integrators of function enclosures.
-}
module Numeric.AERN.RmToRn.Integration where
import Numeric.AERN.RmToRn.Domain
class RoundedIntegration f where
type IntegrationEffortIndicator f
integrationDefaultEffort :: f -> IntegrationEffortIndicator f
{-| approximate the primitive function by integrating the given variable,
starting at the left endpoint of its domain -}
primitiveFunctionOutEff ::
IntegrationEffortIndicator f ->
f {-^ function @f@ -} ->
(Var f) {-^ variable @x@ in the domain box of @f@ with values in @[l,r]@ -} ->
f {-^ primitive function of @f(x,...)@ by @x@ with @f(l,...) = 0@ -}
primitiveFunctionInEff ::
IntegrationEffortIndicator f ->
f {-^ function @f@ -} ->
(Var f) {-^ variable @x@ in the domain box of @f@ with values in @[l,r]@ -} ->
f {-^ primitive function of @f(x,...)@ by @x@ with @f(l,...) = 0@ -}
primitiveFunctionOut ::
RoundedIntegration f =>
f {-^ function @f@ -} ->
(Var f) {-^ variable @x@ in the domain box of @f@ with values in @[l,r]@ -} ->
f {-^ primitive function of @f(x,...)@ by @x@ with @f(l,...) = 0@ -}
primitiveFunctionOut fn var =
primitiveFunctionOutEff (integrationDefaultEffort fn) fn var
primitiveFunctionIn ::
RoundedIntegration f =>
f {-^ function @f@ -} ->
(Var f) {-^ variable @x@ in the domain box of @f@ with values in @[l,r]@ -} ->
f {-^ primitive function of @f(x,...)@ by @x@ with @f(l,...) = 0@ -}
primitiveFunctionIn fn var =
primitiveFunctionInEff (integrationDefaultEffort fn) fn var
| michalkonecny/aern | aern-realfn/src/Numeric/AERN/RmToRn/Integration.hs | bsd-3-clause | 1,961 | 0 | 11 | 512 | 205 | 113 | 92 | 30 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Language.JavaScript.Parser.AST
(
Node (..)
, JSNode(..)
, SrcSpan (..)
, AlexPosn(..)
, showStripped
) where
import Data.Data
import Data.List
import Language.JavaScript.Parser.SrcLocation (SrcSpan(..), AlexPosn(..))
-- ---------------------------------------------------------------------
data JSNode = NS Node SrcSpan
deriving (Show, Eq, Read, Data, Typeable)
data Node = JSArguments [[JSNode]]
| JSArrayLiteral [JSNode]
| JSBlock JSNode
| JSBreak [JSNode] [JSNode]
| JSCallExpression String [JSNode] -- type : ., (), []; rest
| JSCase JSNode JSNode
| JSCatch JSNode [JSNode] JSNode
| JSContinue [JSNode]
| JSDecimal String -- Was Integer
| JSDefault JSNode
| JSDoWhile JSNode JSNode JSNode
| JSElision [JSNode]
| JSEmpty JSNode
| JSExpression [JSNode]
| JSExpressionBinary String [JSNode] [JSNode]
| JSExpressionParen JSNode
| JSExpressionPostfix String [JSNode]
| JSExpressionTernary [JSNode] [JSNode] [JSNode]
| JSFinally JSNode
| JSFor [JSNode] [JSNode] [JSNode] JSNode
| JSForIn [JSNode] JSNode JSNode
| JSForVar [JSNode] [JSNode] [JSNode] JSNode
| JSForVarIn JSNode JSNode JSNode
| JSFunction JSNode [JSNode] JSNode -- name, parameter list, body
| JSFunctionBody [JSNode]
| JSFunctionExpression [JSNode] [JSNode] JSNode -- name, parameter list, body
| JSHexInteger String -- Was Integer
| JSIdentifier String
| JSIf JSNode JSNode
| JSIfElse JSNode JSNode JSNode
| JSLabelled JSNode JSNode
| JSLiteral String
| JSMemberDot [JSNode] JSNode
| JSMemberSquare [JSNode] JSNode
| JSObjectLiteral [JSNode]
| JSOperator String
| JSPropertyNameandValue JSNode [JSNode]
| JSPropertyAccessor String JSNode [JSNode] JSNode
| JSRegEx String
| JSReturn [JSNode]
| JSSourceElements [JSNode]
| JSSourceElementsTop [JSNode]
| JSStatementBlock JSNode
| JSStatementList [JSNode]
| JSStringLiteral Char [Char]
| JSSwitch JSNode [JSNode]
| JSThrow JSNode
| JSTry JSNode [JSNode]
| JSUnary String
| JSVarDecl JSNode [JSNode]
| JSVariables String [JSNode]
| JSWhile JSNode JSNode
| JSWith JSNode [JSNode]
deriving (Show, Eq, Read, Data, Typeable)
-- Strip out the location info, leaving the original JSNode text representation
showStripped :: JSNode -> String
showStripped = ss
-- Alias for internal use
ss :: JSNode -> String
ss (NS node _) = showStrippedNode node
sss :: [JSNode] -> String
--sss xs = "[" ++ (concatMap ss xs) ++ "]"
sss xs = "[" ++ (concat (intersperse "," $ map ss xs)) ++ "]"
ssss :: [[JSNode]] -> String
--ssss xss = "[" ++ (concatMap sss xss) ++ "]"
ssss xss = "[" ++ (concat (intersperse "," $ map sss xss)) ++ "]"
showStrippedNode :: Node -> String
showStrippedNode (JSArguments xss) = "JSArguments " ++ ssss xss
showStrippedNode (JSArrayLiteral xs) = "JSArrayLiteral " ++ sss xs
showStrippedNode (JSBlock x) = "JSBlock (" ++ ss x ++ ")"
showStrippedNode (JSBreak x1s x2s) = "JSBreak " ++ sss x1s ++ " " ++ sss x2s
showStrippedNode (JSCallExpression s xs) = "JSCallExpression " ++ show s ++ " " ++ sss xs
showStrippedNode (JSCase x1 x2) = "JSCase (" ++ ss x1 ++ ") (" ++ ss x2 ++ ")"
showStrippedNode (JSCatch x1 x2s x3) = "JSCatch (" ++ ss x1 ++ ") " ++ sss x2s ++ " (" ++ ss x3 ++ ")"
showStrippedNode (JSContinue xs) = "JSContinue " ++ sss xs
showStrippedNode (JSDecimal s) = "JSDecimal " ++ show s
showStrippedNode (JSDefault x) = "JSDefault (" ++ ss x ++ ")"
showStrippedNode (JSDoWhile x1 x2 x3) = "JSDoWhile (" ++ ss x1 ++ ") (" ++ ss x2 ++ ") (" ++ ss x3 ++ ")"
showStrippedNode (JSElision xs) = "JSElision " ++ sss xs
showStrippedNode (JSEmpty x) = "JSEmpty (" ++ ss x ++ ")"
showStrippedNode (JSExpression xs) = "JSExpression " ++ sss xs
showStrippedNode (JSExpressionBinary s x2s x3s) = "JSExpressionBinary " ++ show s ++ " " ++ sss x2s ++ " " ++ sss x3s
showStrippedNode (JSExpressionParen x) = "JSExpressionParen (" ++ ss x ++ ")"
showStrippedNode (JSExpressionPostfix s xs) = "JSExpressionPostfix " ++ show s ++ " " ++ sss xs
showStrippedNode (JSExpressionTernary x1s x2s x3s) = "JSExpressionTernary " ++ sss x1s ++ " " ++ sss x2s ++ " " ++ sss x3s
showStrippedNode (JSFinally x) = "JSFinally (" ++ ss x ++ ")"
showStrippedNode (JSFor x1s x2s x3s x4) = "JSFor " ++ sss x1s ++ " " ++ sss x2s ++ " " ++ sss x3s ++ " (" ++ ss x4 ++ ")"
showStrippedNode (JSForIn x1s x2 x3) = "JSForIn " ++ sss x1s ++ " (" ++ ss x2 ++ ") (" ++ ss x3 ++ ")"
showStrippedNode (JSForVar x1s x2s x3s x4) = "JSForVar " ++ sss x1s ++ " " ++ sss x2s ++ " " ++ sss x3s ++ " (" ++ ss x4 ++ ")"
showStrippedNode (JSForVarIn x1 x2 x3) = "JSForVarIn (" ++ ss x1 ++ ") (" ++ ss x2 ++ ") (" ++ ss x3 ++ ")"
showStrippedNode (JSFunction x1 x2s x3) = "JSFunction (" ++ ss x1 ++ ") " ++ sss x2s ++ " (" ++ ss x3 ++ ")"
showStrippedNode (JSFunctionBody xs) = "JSFunctionBody " ++ sss xs
showStrippedNode (JSFunctionExpression x1s x2s x3) = "JSFunctionExpression " ++ sss x1s ++ " " ++ sss x2s ++ " (" ++ ss x3 ++ ")"
showStrippedNode (JSHexInteger s) = "JSHexInteger " ++ show s
showStrippedNode (JSIdentifier s) = "JSIdentifier " ++ show s
showStrippedNode (JSIf x1 x2) = "JSIf (" ++ ss x1 ++ ") (" ++ ss x2 ++ ")"
showStrippedNode (JSIfElse x1 x2 x3) = "JSIfElse (" ++ ss x1 ++ ") (" ++ ss x2 ++ ") (" ++ ss x3 ++ ")"
showStrippedNode (JSLabelled x1 x2) = "JSLabelled (" ++ ss x1 ++ ") (" ++ ss x2 ++ ")"
showStrippedNode (JSLiteral s) = "JSLiteral " ++ show s
showStrippedNode (JSMemberDot x1s x2) = "JSMemberDot " ++ sss x1s ++ " (" ++ ss x2 ++ ")"
showStrippedNode (JSMemberSquare x1s x2) = "JSMemberSquare " ++ sss x1s ++ " (" ++ ss x2 ++ ")"
showStrippedNode (JSObjectLiteral xs) = "JSObjectLiteral " ++ sss xs
showStrippedNode (JSOperator s) = "JSOperator " ++ show s
showStrippedNode (JSPropertyNameandValue x1 x2s) = "JSPropertyNameandValue (" ++ ss x1 ++ ") " ++ sss x2s
showStrippedNode (JSPropertyAccessor s x1 x2s x3) = "JSPropertyAccessor " ++ show s ++ " (" ++ ss x1 ++ ") " ++ sss x2s ++ " (" ++ ss x3 ++ ")"
showStrippedNode (JSRegEx s) = "JSRegEx " ++ show s
showStrippedNode (JSReturn xs) = "JSReturn " ++ sss xs
showStrippedNode (JSSourceElements xs) = "JSSourceElements " ++ sss xs
showStrippedNode (JSSourceElementsTop xs) = "JSSourceElementsTop " ++ sss xs
showStrippedNode (JSStatementBlock x) = "JSStatementBlock (" ++ ss x ++ ")"
showStrippedNode (JSStatementList xs) = "JSStatementList " ++ sss xs
showStrippedNode (JSStringLiteral c s) = "JSStringLiteral " ++ show c ++ " " ++ show s
showStrippedNode (JSSwitch x x2s) = "JSSwitch (" ++ ss x ++ ") " ++ sss x2s
showStrippedNode (JSThrow x) = "JSThrow (" ++ ss x ++ ")"
showStrippedNode (JSTry x1 x2s) = "JSTry (" ++ ss x1 ++ ") " ++ sss x2s
showStrippedNode (JSUnary s) = "JSUnary " ++ show s
showStrippedNode (JSVarDecl x1 x2s) = "JSVarDecl (" ++ ss x1 ++ ") " ++ sss x2s
showStrippedNode (JSVariables s xs) = "JSVariables " ++ show s ++ " " ++ sss xs
showStrippedNode (JSWhile x1 x2) = "JSWhile (" ++ ss x1 ++ ") (" ++ ss x2 ++ ")"
showStrippedNode (JSWith x1 x2s) = "JSWith (" ++ ss x1 ++ ") " ++ sss x2s
-- EOF
| thdtjsdn/language-javascript | src/Language/JavaScript/Parser/AST.hs | bsd-3-clause | 7,732 | 0 | 13 | 2,013 | 2,540 | 1,294 | 1,246 | 129 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, Trustworthy #-}
-- |
-- Maintainer : Ricky Elrod <ricky@elrod.me>
-- Stability : stable
--
-- Contains data types/constructors for individual sandbox runs.
-- For example, the compile step will produce a 'SandboxResult', which is
-- defined in this module. The execution/evaluation step will also produce a
-- 'SandboxResult'.
module Evalso.Cruncher.SandboxResult (SandboxResult (..)) where
import Control.Applicative
import Control.Lens hiding ((.=))
import Control.Monad (mzero)
import Data.Aeson
import Data.Map (Map)
import Data.Text (Text)
-- | Describes the result we get back after performing an evaluation (or
-- compilation). This is almost always wrapped in 'IO'.
data SandboxResult = SandboxResult {
stdout :: Text -- ^ Standard output stream
, stderr :: Text -- ^ Standard error stream
, wallTime :: Int -- ^ How long the process took
, exitCode :: Int -- ^ The exit code returned by the process
, outputFiles :: Map String Text -- ^ Base64-encoded output files
} deriving (Eq, Show)
makeLenses ''SandboxResult
instance ToJSON SandboxResult where
toJSON (SandboxResult stdout' stderr' wallTime' exitCode' outputFiles') = object
[
"stdout" .= stdout'
, "stderr" .= stderr'
, "wallTime" .= wallTime'
, "exitCode" .= exitCode'
, "outputFiles" .= outputFiles'
]
instance FromJSON SandboxResult where
parseJSON (Object v) = SandboxResult <$>
v .: "stdout"
<*> v .: "stderr"
<*> v .: "wallTime"
<*> v .: "exitCode"
<*> v .: "outputFiles"
parseJSON _ = mzero
| eval-so/cruncher-types | src/Evalso/Cruncher/SandboxResult.hs | bsd-3-clause | 1,730 | 0 | 15 | 444 | 286 | 167 | 119 | 32 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
-- |
-- Module: $HEADER$
-- Description: Create Default instances using GHC Generics.
-- Copyright: (c) 2016-2017 Peter Trško
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: stable
-- Portability: GHC specific language extension.
--
-- __DEPRECATED__ because package
-- <https://hackage.haskell.org/package/data-default-class data-default-class>
-- now supports GHC Generics.
--
-- Create 'Default' instances using GHC Generics. For more information see:
--
-- * <https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ GHC User's Guide>
--
-- * <https://wiki.haskell.org/GHC.Generics HaskellWiki: GHC.Generics>
--
-- Package
-- <https://hackage.haskell.org/package/data-default-class data-default-class>
-- supports GHC Generics since version 0.1.0, but it is still useful to have
-- 'GDefault' and 'genericDef' exposed for more complex cases.
module Data.Default.Generic
{-# DEPRECATED "Use DefaultSignatures or DeriveAnyClass language extension instead." #-}
(
-- | Rule of thumb, if generic instance definition, contains more code then
-- the explicit definition, then use the explicit definition.
--
-- Note, that sum types aren't supported, but even if they were, it is
-- always better to explicitly specify 'Default' instance for sum types.
--
-- Usage example:
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- import GHC.Generics (Generic)
--
--
-- data MyType = MyType Int (Maybe String)
-- deriving (Generic, Show)
--
-- instance 'Default' MyType where
-- 'def' = 'genericDef'
-- @
--
-- >>> def :: MyType
-- MyType 0 Nothing
genericDef
, GDefault(gdef)
)
where
import GHC.Generics
( Generic
, (:*:)((:*:))
, K1(K1)
, M1(M1)
, Rep
, U1(U1)
, to
)
import Data.Default.Class (Default(def))
-- | Derive implementation of 'def' by using GHC Generics.
genericDef :: (Generic a, GDefault (Rep a)) => a
genericDef = to gdef
-- | Simple derivation of 'def' definition that handles only product types, but
-- not sum types. In case of sum types it is better to provide hand written
-- instances for 'Default'.
class GDefault f where
gdef :: f a
instance GDefault U1 where
gdef = U1
instance Default a => GDefault (K1 i a) where
gdef = K1 def
instance GDefault a => GDefault (M1 i c a) where
gdef = M1 gdef
instance (GDefault a, GDefault b) => GDefault (a :*: b) where
gdef = gdef :*: gdef
| trskop/data-default-instances-unordered-containers | extra/src/Data/Default/Generic.hs | bsd-3-clause | 2,651 | 0 | 8 | 576 | 305 | 198 | 107 | 33 | 1 |
module DTD where
import Xml2Haskell
{-Type decls-}
newtype Persons = Persons [Person] deriving (Eq,Show)
data Person = Person_Male_FathersName Person_Attrs
Male (Maybe FathersName)
| Person_Female_MothersName Person_Attrs Female
(Maybe MothersName)
deriving (Eq,Show)
data Person_Attrs = Person_Attrs
{ personId :: Id
} deriving (Eq,Show)
newtype FathersName = FathersName String deriving (Eq,Show)
newtype MothersName = MothersName String deriving (Eq,Show)
data Male = Male
{ maleSrc :: (Maybe String)
, maleAlt :: Alt
} deriving (Eq,Show)
data Alt = A | B
deriving (Eq,Show)
data Female = Female deriving (Eq,Show)
{-Instance decls-}
instance XmlContent Persons where
fromElem (CElem (Elem "persons" [] c0):rest) =
(\(a,ca)->
(Just (Persons a), rest))
(many fromElem c0)
fromElem rest = (Nothing, rest)
toElem (Persons a) =
[CElem (Elem "persons" [] (concatMap toElem a))]
instance XmlContent Person where
fromElem (CElem (Elem "Person" as c0):rest) =
case (\(a,ca)->
(\(b,cb)->
(a,b,cb))
(fromElem ca))
(fromElem c0) of
(Nothing,Nothing,_) -> case (\(a,ca)->
(\(b,cb)->
(a,b,cb))
(fromElem ca))
(fromElem c0) of
(Nothing,Nothing,_) -> (Nothing, c0)
(Just a,b,[]) -> (Just (Person_Female_MothersName (fromAttrs as) a
b), rest)
(Just a,b,[]) -> (Just (Person_Male_FathersName (fromAttrs as) a
b), rest)
toElem (Person_Male_FathersName as a
b) = [CElem (Elem "Person" (toAttrs as) (toElem a
++
maybe [] toElem b) )]
toElem (Person_Female_MothersName as a
b) = [CElem (Elem "Person" (toAttrs as) (toElem a
++
maybe [] toElem b) )]
instance XmlAttributes Person_Attrs where
fromAttrs as =
Person_Attrs
{ personId = definiteA fromAttrToTyp "Person" "id" as
}
toAttrs v = catMaybes
[ toAttrFrTyp "id" (personId v)
]
instance XmlContent FathersName where
fromElem (CElem (Elem "FathersName" [] c0):rest) =
(\(a,ca)->
(Just (FathersName a), rest))
(definite fromText "text" "FathersName" c0)
fromElem rest = (Nothing, rest)
toElem (FathersName a) =
[CElem (Elem "FathersName" [] (toText a))]
instance XmlContent MothersName where
fromElem (CElem (Elem "MothersName" [] c0):rest) =
(\(a,ca)->
(Just (MothersName a), rest))
(definite fromText "text" "MothersName" c0)
fromElem rest = (Nothing, rest)
toElem (MothersName a) =
[CElem (Elem "MothersName" [] (toText a))]
instance XmlContent Male where
fromElem (CElem (Elem "Male" as []):rest) =
(Just (fromAttrs as), rest)
fromElem rest = (Nothing, rest)
toElem v =
[CElem (Elem "Male" (toAttrs v) [])]
instance XmlAttributes Male where
fromAttrs as =
Male
{ maleSrc = possibleA fromAttrToStr "src" as
, maleAlt = definiteA fromAttrToTyp "Male" "alt" as
}
toAttrs v = catMaybes
[ maybeA toAttrFrStr "src" (maleSrc v)
, toAttrFrTyp "alt" (maleAlt v)
]
instance XmlAttrType Alt where
fromAttrToTyp n (n',v)
| n==n' = translate (attr2str v)
| otherwise = Nothing
where translate "A" = Just A
translate "B" = Just B
translate _ = Nothing
toAttrFrTyp n A = Just (n, str2attr "A")
toAttrFrTyp n B = Just (n, str2attr "B")
instance XmlContent Female where
fromElem (CElem (Elem "Female" [] []):rest) =
(Just Female, rest)
fromElem rest = (Nothing, rest)
toElem Female =
[CElem (Elem "Female" [] [])]
{-Done-}
| FranklinChen/hugs98-plus-Sep2006 | packages/HaXml/bugs/malc.hs | bsd-3-clause | 3,543 | 96 | 18 | 859 | 1,428 | 786 | 642 | 104 | 0 |
-----------
-- Halma --
-----------
module Halma (Halma, halma) where
import Game
import Data.Array
-- import Graphics.UI.WX
import Graphics.UI.WX hiding (border, empty, point)
import Graphics.UI.WXCore hiding (empty, point)
import Tools
data Halma = Halma (Array (Int, Int) (Maybe Player)) deriving (Eq, Show)
type HalmaMove = ((Int, Int), (Int, Int))
halma :: Halma
halma = undefined
instance Game Halma where
name _ = "halma"
standard _ = Properties { players = 2, boardsize = 8, human = [True, False, False, False, False, False] }
possible _ = PropertyRange { playersrange = [2, 3, 4, 6], boardsizerange = [8] }
new pr = let empty = [((x, y), Nothing) | x <- [-8 .. 8], y <- [-8 .. 8]]
in Halma $ array ((-8, -8), (8, 8)) empty // concatMap (\p -> map (\t -> (t, Just p)) $ startpos $ pos pr p) [0 .. players pr - 1]
moves pr p (Halma s) = map (move pr) (allMoves pr p s)
showmove pr p (Halma s) i = let ((x1, y1), (x2, y2)) = allMoves pr p s !! i
in "abcdefghijklmnopq" !! (x1 + 8) : show (9 - y1) ++ "-" ++ "abcdefghijklmnopq" !! (x2 + 8) : show (9 - y2)
value pr p (Halma st) | null $ allMoves pr p st = let winners = map snd $ filter (\(d, _) -> d == 20) $ zip totaldists [0..]
in foldr ($) (replicate (players pr) (-1)) $ map (|> 1) winners
| otherwise = map myvalue [0 .. players pr - 1]
where
totaldists :: [Int]
totaldists = map totaldist [0 .. players pr - 1]
totaldist :: Player -> Int
totaldist p' = let mypieces = map (\(i, _e) -> i) $ filter (\(_i, e) -> e == Just p') $ assocs st
in sum $ map (dist pr p') mypieces
myvalue :: Player -> Float
myvalue p' = let d = sum (map totaldist [0 .. players pr - 1]) - (players pr) * totaldist p'
in (fromInteger . toInteger) d / (fromInteger . toInteger) (120 * (players pr))
board p pr vart _ia move' = do
marble <- bitmapCreateLoad "images\\marble.bmp" wxBITMAP_TYPE_ANY
varg <- varCreate $ grate rectZero 0 (0, 0) sizeZero
vare <- varCreate (Nothing :: Maybe (Int, Int))
let
onpaint :: DC () -> Rect -> IO ()
onpaint dc r = do
t <- varGet vart
e <- varGet vare
b_ <- border dc (16, 16)
let g_ = grate r b_ (26, 17) (Size 4 7)
b <- fit dc (16, 16) $ rectWidth (field g_ (0, 0))
let Halma st = state t
g = grate r b (26, 17) (Size 4 7)
radius = rectHeight (field g (0, 0)) `div` 3
lin' :: Rect -> Rect -> IO ()
lin' (Rect x1 y1 w1 h1) (Rect x2 y2 w2 h2) = do
line dc (pt (x1 + w1) (y1 + h1 `div` 2)) (pt (x2 + w2) (y2 + h2 `div` 2)) []
lin :: (Int, Int) -> (Int, Int) -> IO ()
lin p' q = lin' (field g $ tograte p') (field g $ tograte q)
varSet varg g
tileBitmap dc r marble
--{ drawGrate dc g [penColor := yellow]
for 0 16 (\j -> do
let i = head $ dropWhile (\i' -> inside $ fromgrate (i', j)) [13 ..]
drawTextRect dc (show $ 17 - j) $ field g ( i - 1, j) |#| field g ( i, j)
drawTextRect dc (show $ 17 - j) $ field g (25 - i, j) |#| field g (26 - i, j)
let d = (i - 1 + 3 * j) `div` 2 - 18
e' = (25 - i + 3 * j) `div` 2 - 18
drawTextRect dc [['A' ..] !! (16 - j)] $ field g (i - 1 - d, j - d) |#| field g (i - 1 - d, j - 1 - d)
drawTextRect dc [['A' ..] !! (16 - j)] $ field g (25 - i - e', j - e') |#| field g (25 - i - e', j - 1 - e')
)
for 0 4 (\n -> do
lin ( - 4, n - 8) (n - 4, n - 8)
lin (n - 8, n - 4) ( 4, n - 4)
lin ( - 4, n ) (n + 4, n )
lin (n , n + 4) ( 4, n + 4)
lin (n - 8, - 4) (n - 8, n - 4)
lin (n - 4, n - 8) (n - 4, 4)
lin (n , - 4) (n , n + 4)
lin (n + 4, n ) (n + 4, 4)
lin ( - 4, -n + 4) (n - 4, 4)
lin (n - 8, - 4) ( 4, -n + 8)
lin ( - 4, -n - 4) (n + 4, 4)
lin (n , - 4) ( 4, -n )
)
for 0 24 (\i -> for 0 16 (\j ->
when (even (i + j)) $ when (inside $ fromgrate (i, j)) $
drawPiece dc (field g (i, j)) radius (st ! fromgrate (i, j))
) )
case e of Just p' -> drawBrightPiece dc (field g $ tograte p') radius
Nothing -> return ()
onclick :: Point -> IO ()
onclick point = do
t <- varGet vart
e <- varGet vare
g <- varGet varg
let Halma st = state t
n = fromgrate $ locate g point
case (e, inside n) of
(Nothing, True ) -> when (st ! n == Just (player t)) $ varSet vare (Just n) >> repaint p
(_ , False) -> varSet vare Nothing >> repaint p
(Just te, True ) -> case lookup (te, n) $ zip (allMoves pr (player t) st) [0..] of
Nothing -> varSet vare Nothing >> repaint p
Just i -> varSet vare Nothing >> repaint p >> move' i
set p [ on click := onclick
, on unclick := onclick
, on paint := onpaint
, on resize ::= repaint
]
return ()
fit :: DC () -> (Int, Int) -> Int -> IO Int
fit dc t m = do
s <- get dc fontSize
fit_ dc (s + 6)
where
fit_ :: DC () -> Int -> IO Int
fit_ _dc 1 = border dc t
fit_ dc' s = do
set dc' [fontSize := s - 1]
b <- border dc t
if b <= m then return b
else fit_ dc' (s - 1)
drawPiece :: DC () -> Rect -> Int -> Maybe Player -> IO ()
drawPiece dc (Rect x y w h) r mp = circle dc (pt (x + w) (y + h `div` 2)) r [brushColor := col mp]
drawBrightPiece :: DC () -> Rect -> Int -> IO ()
drawBrightPiece dc (Rect x y w h) r = circle dc (pt (x + w) (y + h `div` 2)) r [brushKind := BrushTransparent, penWidth := 3, penColor := yellow]
tograte :: (Int, Int) -> (Int, Int)
tograte (i, j) = (12 + 2 * i - j, 8 + j)
fromgrate :: (Int, Int) -> (Int, Int)
fromgrate (i, j) = (-10 + (i + j) `div` 2, -8 + j)
-- x = ½ (i + j) - 10
-- y = j - 8
col :: Maybe Player -> Color
col p = case p of
Nothing -> white
Just 0 -> blue
Just 1 -> red
Just 2 -> green
Just 3 -> rgb 160 0 (192 :: Int)
Just 4 -> rgb 192 128 (0 :: Int)
Just 5 -> grey
_ -> black
(+-) :: Num a => (a, a) -> (a, a) -> (a, a)
(a, b) +- (c, d) = (a + c, b + d)
allMoves :: Properties -> Player -> Array (Int, Int) (Maybe Player) -> [HalmaMove]
allMoves pr p st | p == 0 && 20 `elem` (map totaldist [0 .. players pr - 1]) = []
| otherwise = stepmoves ++ jumpmoves
where
mypieces :: Player -> [(Int, Int)]
mypieces p' = map (\(i, _e) -> i) $ filter (\(_i, e) -> e == Just p') $ assocs st
stepmoves :: [HalmaMove]
stepmoves = let potmoves = concatMap (\t -> map (\s -> (t, t +- s)) $ steps pr p) (mypieces p)
in filter (\(_f, t) -> inside t && st ! t == Nothing) potmoves
jumpmoves :: [HalmaMove]
jumpmoves = concatMap (\t -> map (\s -> (t, s)) $ floodfill t []) (mypieces p)
floodfill :: (Int, Int) -> [(Int, Int)] -> [(Int, Int)]
floodfill t fs = let news = map (\j -> t +- j +- j)
$ filter (\j -> let u = t +- j +- j
in inside u
&& st ! (t +- j) /= Nothing
&& st ! u == Nothing
&& not (u `elem` fs)
)
$ halfjumps
in foldr ($) fs $ map (\u -> floodfill u . (u :)) news
totaldist :: Player -> Int
totaldist p' = sum $ map (dist pr p') $ mypieces p'
steps :: Properties -> Player -> [(Int, Int)]
steps pr p = steppos (pos pr p)
where
steppos 0 = [( 1, 1), ( 1, 0), ( 0, -1), (-1, -1)]
steppos 1 = [( 0, 1), ( 1, 1), ( 1, 0), ( 0, -1)]
steppos 2 = [(-1, 0), ( 0, 1), ( 1, 1), ( 1, 0)]
steppos 3 = [(-1, -1), (-1, 0), ( 0, 1), ( 1, 1)]
steppos 4 = [( 0, -1), (-1, -1), (-1, 0), ( 0, 1)]
steppos 5 = [( 1, 0), ( 0, -1), (-1, -1), (-1, 0)]
steppos _ = error "steps: Unexpected value"
{-
jumps :: [(Int, Int)]
jumps = map (\(x, y) -> (2 * x, 2 * y)) halfjumps
-}
halfjumps :: [(Int, Int)]
halfjumps = [(1, 1), (1, 0), (0, -1), (-1, -1), (-1, 0), (0, 1)]
dist :: Properties -> Player -> (Int, Int) -> Int
dist pr p t = distpos (pos pr p) t
where
distpos 0 (x, y) = 8 - x + y + max 0 (-4 + x ) + max 0 (-4 - y )
distpos 1 (x, y) = 8 - x + max 0 (-4 + x - y) + max 0 (-4 + y )
distpos 2 (x, y) = 8 - y + max 0 (-4 + x ) + max 0 (-4 + y - x)
distpos 3 (x, y) = 8 + x - y + max 0 (-4 - x ) + max 0 (-4 + y )
distpos 4 (x, y) = 8 + x + max 0 (-4 - x + y) + max 0 (-4 - y )
distpos 5 (x, y) = 8 + y + max 0 (-4 - x ) + max 0 (-4 - y + x)
distpos _ _ = error "dist: Unexpected value"
move :: Properties -> HalmaMove -> (Player, Halma) -> (Player, Halma)
move pr (f, t) (p, Halma s) = ( (p + 1) `mod` players pr
, Halma $ s // [(f, Nothing), (t, Just p)]
)
startpos :: (Eq t, Num t, Num t1, Enum t1) => t -> [(t1, t1)]
startpos 0 = [(x, y) | x <- [-4 .. -1], y <- [x + 5 .. 4]]
startpos 1 = [(x, y) | x <- [-8 .. -5], y <- [ - 4 .. x + 4]]
startpos 2 = [(x, y) | x <- [-4 .. -1], y <- [x - 4 .. - 5]]
startpos 3 = [(x, y) | x <- [ 1 .. 4], y <- [ - 4 .. x - 5]]
startpos 4 = [(x, y) | x <- [ 5 .. 8], y <- [x - 4 .. 4]]
startpos 5 = [(x, y) | x <- [ 1 .. 4], y <- [ 5 .. x + 4]]
startpos _ = error "startpos: Unexpected value"
pos :: Properties -> Player -> Int
pos pr p | players pr == 2 = [0, 3 ] !! p
| players pr == 3 = [0, 2, 4 ] !! p
| players pr == 4 = [0, 1, 3, 4] !! p
| players pr == 6 = p
| otherwise = error "pos: Unexpected value"
inside :: (Int, Int) -> Bool
inside (x, y) = (x >= -4 && y <= 4 && x <= y + 4)
|| (y >= -4 && x <= 4 && y <= x + 4)
{- the halmaboard internally looks like this:
y/j
-8 ....x............
-7 ....xx...........
-6 ....xxx..........
-5 ....xxxx.........
-4 xxxx*****xxxx....
-3 .xxx******xxx....
-2 ..xx*******xx....
-1 ...x********x....
0 ....*********....
1 ....x********x...
2 ....xx*******xx..
3 ....xxx******xxx.
4 ....xxxx*****xxxx
5 .........xxxx....
6 ..........xxx....
7 ...........xx....
8 ............x....
87654321012345678 x/i
--------
-}
| HJvT/GeBoP | Halma.hs | bsd-3-clause | 10,720 | 0 | 27 | 3,949 | 5,591 | 2,973 | 2,618 | 188 | 8 |
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Compile.PrimName
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Names for primtive functions used in the output of the compiler.
--
-----------------------------------------------------------------------------
module Berp.Compile.PrimName where
import Language.Haskell.Exts.Syntax as Hask
import Language.Python.Common.AST as Py
import Prelude hiding (read, init)
import Language.Haskell.Exts.Build
import Berp.Compile.Utils
preludeModuleName, berpModuleName :: ModuleName
berpModuleName = ModuleName "Berp.Base"
preludeModuleName = ModuleName "Prelude"
prim :: String -> Exp
prim = var . name
importAll :: Exp
importAll = prim "importAll"
setItem :: Exp
setItem = prim "setitem"
unpack :: Exp
unpack = prim "unpack"
complex :: Exp
complex = prim "complex"
tailCall :: Exp
tailCall = prim "tailCall"
dict :: Exp
dict = prim "dictionary"
unaryPlus :: Exp
unaryPlus = prim "unaryPlus"
unaryMinus :: Exp
unaryMinus = prim "unaryMinus"
invert :: Exp
invert = prim "invert"
not :: Exp
not = prim "not"
generator :: Exp
generator = prim "generator"
returnGenerator :: Exp
returnGenerator = prim "returnGenerator"
yield :: Exp
yield = prim "yield"
for :: Exp
for = prim "for"
forElse :: Exp
forElse = prim "forElse"
break :: Exp
break = prim "break"
continue :: Exp
continue = prim "continue"
raise :: Exp
raise = prim "raise"
raiseFrom :: Exp
raiseFrom = prim "raiseFrom"
reRaise :: Exp
reRaise = prim "reRaise"
exceptDefault :: Exp
exceptDefault = prim "exceptDefault"
except :: Exp
except = prim "except"
exceptAs :: Exp
exceptAs = prim "exceptAs"
stmt :: Exp
stmt = prim "stmt"
list :: Exp
list = prim "list"
try :: Exp
try = prim "try"
tryElse :: Exp
tryElse = prim "tryElse"
tryFinally :: Exp
tryFinally = prim "tryFinally"
tryElseFinally :: Exp
tryElseFinally = prim "tryElseFinally"
subscript :: Exp
subscript = prim "subs"
pure :: Exp
pure = prim "pure"
pureObj :: Exp
pureObj = prim "pureObject"
primOp :: String -> QOp
primOp = op . sym
assignOp :: QOp
assignOp = primOp "=:"
writeLocal :: Exp
writeLocal = prim "writeLocal"
writeGlobal :: Exp
writeGlobal = prim "writeGlobal"
setAttr :: Exp
setAttr = prim "setattr"
while :: Exp
while = prim "while"
{-
global :: Exp
global = prim "global"
-}
{-
globalRef :: Exp
globalRef = prim "globalRef"
-}
globalsName :: String
globalsName = "globals"
globalsPat :: Pat
globalsPat = pvar $ name globalsName
globals :: Exp
globals = prim globalsName
topVar :: Exp
topVar = prim "topVar"
variable :: Exp
variable = prim "var"
{-
globalVariable :: Exp
globalVariable = prim "globalVar"
-}
tuple :: Exp
tuple = prim "tuple"
set :: Exp
set = prim "set"
whileElse :: Exp
whileElse = prim "whileElse"
runStmt :: Exp
runStmt = prim "runStmt"
interpretStmt :: Exp
interpretStmt = prim "interpretStmt"
initName :: Name
initName = name "init"
init :: Exp
init = var initName
ret :: Exp
ret = prim "ret"
ite :: Exp
ite = prim "ifThenElse"
ifThen :: Exp
ifThen = prim "ifThen"
def :: Exp
def = prim "def"
klass :: Exp
klass = prim "klass"
lambda :: Exp
lambda = prim "lambda"
call :: Exp
call = prim "call"
apply :: QOp
apply = primOp "@@"
read :: Exp
read = prim "read"
readLocal :: Exp
readLocal = prim "readLocal"
readGlobal :: Exp
readGlobal = prim "readGlobal"
integer :: Integer -> Exp
integer i = app (prim "integer") (intE i)
bool :: Bool -> Exp
bool b = if b then true else false
true,false :: Exp
true = prim "true"
false = prim "false"
none :: Exp
none = prim "none"
pass :: Exp
pass = prim "pass"
mkModule :: Exp
mkModule = prim "mkModule"
importModule :: Exp
importModule = prim "importModule"
string :: String -> Exp
string s = app (prim "string") (strE s)
opExp :: Py.OpSpan -> Hask.QOp
opExp (And {}) = op $ name "and"
opExp (Or {}) = op $ name "or"
opExp (Exponent {}) = primOp "**"
opExp (LessThan {}) = primOp "<"
opExp (GreaterThan {}) = primOp ">"
opExp (Equality {}) = primOp "=="
opExp (GreaterThanEquals {}) = primOp ">=" -- not sure if this is official
opExp (LessThanEquals {}) = primOp "<="
opExp (NotEquals {}) = primOp "!="
opExp (BinaryOr {}) = primOp "||"
opExp (Xor {}) = primOp "^"
opExp (BinaryAnd {}) = primOp "&"
opExp (ShiftLeft {}) = primOp "<<"
opExp (ShiftRight {}) = primOp ">>"
opExp (Multiply {}) = primOp "*"
opExp (Plus {}) = primOp "+"
opExp (Minus {}) = primOp "-"
opExp (Divide {}) = primOp "/"
opExp (FloorDivide {}) = primOp "//"
opExp (Invert {}) = primOp "~"
opExp (Modulo {}) = primOp "%"
opExp (Dot {}) = primOp "."
opExp other = unsupported $ "opExp: " ++ show other
| bjpop/berp | libs/src/Berp/Compile/PrimName.hs | bsd-3-clause | 4,753 | 0 | 7 | 878 | 1,510 | 813 | 697 | 174 | 2 |
-- Lightweight calculus for composing patterns as functions.
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ViewPatterns #-}
{- |
Module : Verifier.SAW.Recognizer
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : jhendrix@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.Recognizer
( Recognizer
, (<:), emptyl, endl
, (:*:)(..)
, asFTermF
, asGlobalDef
, isGlobalDef
, asApp
, (<@>), (@>)
, asApplyAll
, asPairValue
, asPairSelector
, asTupleType
, asTupleValue
, asTupleSelector
, asRecordType
, asRecordValue
, asRecordSelector
, asCtor
, asDataType
, isDataType
, asNatLit
, asStringLit
, asLambda
, asLambdaList
, asPi
, asPiList
, asLocalVar
-- * Prelude recognizers.
, asBool
, asBoolType
, Nat
, asBitvectorType
, asVectorType
, asVecType
, isVecType
, asMux
) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Data.Map (Map)
import Verifier.SAW.Prim
import Verifier.SAW.TypedAST
data a :*: b = (:*:) a b
deriving (Eq,Ord,Show)
instance Field1 (a :*: b) (a' :*: b) a a' where
_1 k (a :*: b) = indexed k (0 :: Int) a <&> (:*: b)
instance Field2 (a :*: b) (a :*: b') b b' where
_2 k (a :*: b) = (a :*:) <$> indexed k (1 :: Int) b
type Recognizer m t a = t -> m a
-- | Tries both recognizers.
(<>) :: Alternative f => Recognizer f t a -> Recognizer f t a -> Recognizer f t a
(<>) f g t = f t <|> g t
-- | Recognizes the head and tail of a list, and returns head.
(<:) :: Monad f
=> Recognizer f t a -> Recognizer f [t] () -> Recognizer f [t] a
(<:) f g (h:r) = do x <- f h; _ <- g r; return x
(<:) _ _ [] = fail "empty-list"
-- | Recognizes the head and tail of a list, and returns head.
(<:>) :: Monad f
=> Recognizer f t a -> Recognizer f [t] b -> Recognizer f [t] (a :*: b)
(<:>) f g (h:r) = do x <- f h; y <- g r; return (x :*: y)
(<:>) _ _ [] = fail "empty-list"
-- | Recognizes empty list
emptyl :: Monad m => Recognizer m [t] ()
emptyl [] = return ()
emptyl _ = fail "non-empty"
-- | Recognizes singleton list
endl :: Monad f => Recognizer f t a -> Recognizer f [t] a
endl f = f <: emptyl
asFTermF :: (Monad f, Termlike t) => Recognizer f t (FlatTermF t)
asFTermF (unwrapTermF -> FTermF ftf) = return ftf
asFTermF _ = fail "not ftermf"
asGlobalDef :: (Monad f, Termlike t) => Recognizer f t Ident
asGlobalDef t = do GlobalDef i <- asFTermF t; return i
isGlobalDef :: (Monad f, Termlike t) => Ident -> Recognizer f t ()
isGlobalDef i t = do
o <- asGlobalDef t
if i == o then return () else fail ("not " ++ show i)
asApp :: (Monad f, Termlike t) => Recognizer f t (t, t)
asApp (unwrapTermF -> App x y) = return (x, y)
asApp _ = fail "not app"
(<@>) :: (Monad f, Termlike t)
=> Recognizer f t a -> Recognizer f t b -> Recognizer f t (a :*: b)
(<@>) f g t = do
(a,b) <- asApp t
liftM2 (:*:) (f a) (g b)
-- | Recognizes a function application, and returns argument.
(@>) :: (Monad f, Termlike t) => Recognizer f t () -> Recognizer f t b -> Recognizer f t b
(@>) f g t = do
(x, y) <- asApp t
liftM2 (const id) (f x) (g y)
asApplyAll :: Termlike t => t -> (t, [t])
asApplyAll = go []
where go xs t =
case asApp t of
Nothing -> (t, xs)
Just (t', x) -> go (x : xs) t'
asPairValue :: (Monad m, Termlike t) => Recognizer m t (t, t)
asPairValue t = do
ftf <- asFTermF t
case ftf of
PairValue x y -> return (x, y)
_ -> fail "asPairValue"
asPairSelector :: (Monad m, Termlike t) => Recognizer m t (t, Bool)
asPairSelector t = do
ftf <- asFTermF t
case ftf of
PairLeft x -> return (x, False)
PairRight x -> return (x, True)
_ -> fail "asPairSelector"
asTupleType :: (Monad m, Termlike t) => Recognizer m t [t]
asTupleType t = do
ftf <- asFTermF t
case ftf of
UnitType -> return []
PairType x y -> do xs <- asTupleType y; return (x : xs)
_ -> fail "asTupleType"
asTupleValue :: (Monad m, Termlike t) => Recognizer m t [t]
asTupleValue t = do
ftf <- asFTermF t
case ftf of
UnitValue -> return []
PairValue x y -> do xs <- asTupleValue y; return (x : xs)
_ -> fail "asTupleValue"
asTupleSelector :: (Monad m, Termlike t) => Recognizer m t (t, Int)
asTupleSelector t = do
ftf <- asFTermF t
case ftf of
PairLeft x -> return (x, 1)
PairRight y -> do (x, i) <- asTupleSelector y; return (x, i+1)
_ -> fail "asTupleSelector"
asRecordType :: (Monad m, Termlike t) => Recognizer m t (Map FieldName t)
asRecordType t = do RecordType m <- asFTermF t; return m
asRecordValue :: (Monad m, Termlike t) => Recognizer m t (Map FieldName t)
asRecordValue t = do RecordValue m <- asFTermF t; return m
asRecordSelector :: (Monad m, Termlike t) => Recognizer m t (t, FieldName)
asRecordSelector t = do RecordSelector u i <- asFTermF t; return (u,i)
asCtor :: (Monad f, Termlike t) => Recognizer f t (Ident, [t])
asCtor t = do CtorApp c l <- asFTermF t; return (c,l)
asDataType :: (Monad f, Termlike t) => Recognizer f t (Ident, [t])
asDataType t = do DataTypeApp c l <- asFTermF t; return (c,l)
isDataType :: (Monad f, Termlike t) => Ident -> Recognizer f [t] a -> Recognizer f t a
isDataType i p t = do
(o,l) <- asDataType t
if i == o then p l else fail "not datatype"
asNatLit :: (Monad f, Termlike t) => Recognizer f t Nat
asNatLit t = do NatLit i <- asFTermF t; return (fromInteger i)
asStringLit :: (Monad f, Termlike t) => Recognizer f t String
asStringLit t = do StringLit i <- asFTermF t; return i
asLambda :: (Monad m, Termlike t) => Recognizer m t (String, t, t)
asLambda (unwrapTermF -> Lambda s ty body) = return (s, ty, body)
asLambda _ = fail "not a lambda"
asLambdaList :: Termlike t => t -> ([(String, t)], t)
asLambdaList = go []
where go r (asLambda -> Just (nm,tp,rhs)) = go ((nm,tp):r) rhs
go r rhs = (reverse r, rhs)
asPi :: (Monad m, Termlike t) => Recognizer m t (String, t, t)
asPi (unwrapTermF -> Pi nm tp body) = return (nm, tp, body)
asPi _ = fail "not a Pi term"
-- | Decomposes a term into a list of pi bindings, followed by a right
-- term that is not a pi binding.
asPiList :: Termlike t => t -> ([(String, t)], t)
asPiList = go []
where go r (asPi -> Just (nm,tp,rhs)) = go ((nm,tp):r) rhs
go r rhs = (reverse r, rhs)
asLocalVar :: (Monad m, Termlike t) => Recognizer m t DeBruijnIndex
asLocalVar (unwrapTermF -> LocalVar i) = return i
asLocalVar _ = fail "not a local variable"
-- | Returns term as a constant Boolean if it is one.
asBool :: (Monad f, Termlike t) => Recognizer f t Bool
asBool (asCtor -> Just ("Prelude.True", [])) = return True
asBool (asCtor -> Just ("Prelude.False", [])) = return False
asBool _ = fail "not bool"
asBoolType :: (Monad f, Termlike t) => Recognizer f t ()
asBoolType = isDataType "Prelude.Bool" emptyl
asVectorType :: (Monad f, Termlike t) => Recognizer f t (t, t)
asVectorType = isDataType "Prelude.Vec" r
where r [n, t] = return (n, t)
r _ = fail "asVectorType: wrong number of arguments"
isVecType :: (Monad f, Termlike t)
=> Recognizer f t a -> Recognizer f t (Nat :*: a)
isVecType tp = isDataType "Prelude.Vec" (asNatLit <:> endl tp)
asVecType :: (Monad f, Termlike t) => Recognizer f t (Nat :*: t)
asVecType = isVecType return
asBitvectorType :: (Alternative f, Monad f, Termlike t) => Recognizer f t Nat
asBitvectorType =
(isGlobalDef "Prelude.bitvector" @> asNatLit)
<> isDataType "Prelude.Vec"
(asNatLit <: endl (isDataType "Prelude.Bool" emptyl))
asMux :: (Monad f, Termlike t) => Recognizer f t (t :*: t :*: t :*: t)
asMux = isGlobalDef "Prelude.ite" @> return <@> return <@> return <@> return
| iblumenfeld/saw-core | src/Verifier/SAW/Recognizer.hs | bsd-3-clause | 7,871 | 166 | 9 | 1,849 | 3,202 | 1,740 | 1,462 | 187 | 3 |
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Dimitri Sabadie
-- License : BSD3
--
-- Maintainer : Dimitri Sabadie <dimitri.sabadie@gmail.com>
-- Stability : experimental
-- Portability : portable
--
----------------------------------------------------------------------------
module Graphics.Rendering.IGL.Texture (
-- *
) where
import Foreign.Marshal.Array
import Graphics.GL
import Graphics.Rendering.IGL.GL
import Numeric.Natural ( Natural )
newtype Texture = Texture { textureID :: GLuint }
genTextures :: Natural -> GL i i [Texture]
genTextures n = wrapGL $ do
allocaArray (fromIntegral n) $ \p -> do
glGenTextures (fromIntegral n) p
textures <- peekArray (fromIntegral n) p
pure $ map Texture textures
deleteTextures :: [Texture] -> GL i i ()
deleteTextures textures = wrapGL $ do
withArrayLen (map textureID textures) $ glDeleteTextures . fromIntegral
class TextureTarget t where
fromTextureTarget :: t -> GLenum
data Texture1D = Texture1D deriving (Eq,Show)
data Texture2D = Texture2D deriving (Eq,Show)
data Texture3D = Texture3D deriving (Eq,Show)
data TextureRectangle = TextureRectangle deriving (Eq,Show)
data TextureBuffer = TextureBuffer deriving (Eq,Show)
data TextureCubeMap = TextureCubeMap deriving (Eq,Show)
data Texture1DArray = Texture1DArray deriving (Eq,Show)
data Texture2DArray = Texture2DArray deriving (Eq,Show)
data TextureCubeMapArray = TextureCubeMapArray deriving (Eq,Show)
data Texture2DMultiSample = Texture2DMultiSample deriving (Eq,Show)
data Texture2DMultiSampleArray = Texture2DMultiSampleArray deriving (Eq,Show)
instance TextureTarget Texture1D where
fromTextureTarget = const GL_TEXTURE_1D
instance TextureTarget Texture2D where
fromTextureTarget = const GL_TEXTURE_2D
instance TextureTarget Texture3D where
fromTextureTarget = const GL_TEXTURE_3D
instance TextureTarget TextureRectangle where
fromTextureTarget = const GL_TEXTURE_RECTANGLE
instance TextureTarget TextureBuffer where
fromTextureTarget = const GL_TEXTURE_BUFFER
instance TextureTarget TextureCubeMap where
fromTextureTarget = const GL_TEXTURE_CUBE_MAP
instance TextureTarget Texture1DArray where
fromTextureTarget = const GL_TEXTURE_1D_ARRAY
instance TextureTarget Texture2DArray where
fromTextureTarget = const GL_TEXTURE_2D_ARRAY
instance TextureTarget TextureCubeMapArray where
fromTextureTarget = const GL_TEXTURE_CUBE_MAP_ARRAY
instance TextureTarget Texture2DMultiSample where
fromTextureTarget = const GL_TEXTURE_2D_MULTISAMPLE
instance TextureTarget Texture2DMultiSampleArray where
fromTextureTarget = const GL_TEXTURE_2D_MULTISAMPLE_ARRAY
| phaazon/igl | src/Graphics/Rendering/IGL/Texture.hs | bsd-3-clause | 2,670 | 0 | 15 | 332 | 606 | 325 | 281 | 50 | 1 |
-- Copyright (c) 2016 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
-- | This module contains code that converts FlatIR types into LLVM types.
module IR.FlatIR.LLVMGen.Types(
toLLVMType,
genTypeDefs
) where
import Data.Array
import Data.Graph.Inductive.Graph
import IR.Common.Ptr
import IR.FlatIR.Syntax
import Prelude hiding (mapM_, mapM, foldr, foldl, sequence)
import qualified Data.ByteString.UTF8 as Strict
import qualified LLVM.General.AST as LLVM
import qualified LLVM.General.AST.AddrSpace as LLVM
import qualified LLVM.General.AST.Type as LLVM
-- | Generate the LLVM type for a given Flat IR type.
toLLVMType :: Graph gr =>
Module tagty typedesc gr
-- ^ The FlatIR module being translated
-> Type tagty
-- ^ The FlatIR type to translate.
-> LLVM.Type
-- ^ The corresponding LLVM type
toLLVMType m FuncType { funcTyRetTy = retty, funcTyArgTys = argtys } =
let
retty' = toLLVMType m retty
argtys' = map (toLLVMType m) argtys
in
LLVM.FunctionType { LLVM.resultType = retty', LLVM.argumentTypes = argtys',
LLVM.isVarArg = False }
toLLVMType m StructType { structPacked = packed, structFields = fields } =
let
fieldtys = map (\(_, _, ty) -> toLLVMType m ty) (elems fields)
in
LLVM.StructureType { LLVM.elementTypes = fieldtys, LLVM.isPacked = packed }
toLLVMType m ArrayType { arrayLen = Just size, arrayElemTy = inner } =
let
inner' = toLLVMType m inner
in
LLVM.ArrayType { LLVM.nArrayElements = fromIntegral size,
LLVM.elementType = inner' }
toLLVMType m ArrayType { arrayLen = Nothing, arrayElemTy = inner } =
let
inner' = toLLVMType m inner
in
LLVM.ArrayType { LLVM.elementType = inner', LLVM.nArrayElements = 0 }
toLLVMType m PtrType { ptrTy = Native { nativeTy = inner } } =
let
inner' = toLLVMType m inner
in
LLVM.PointerType { LLVM.pointerReferent = inner',
LLVM.pointerAddrSpace = LLVM.AddrSpace 0 }
toLLVMType Module { modTypes = types, modTags = tags }
PtrType { ptrTy = Tagged { taggedTag = tagid } } =
let
TagDesc { tagDescTy = tname } = tags ! tagid
innerty' = case types ! tname of
Anon {} -> LLVM.UnName $! fromIntegral $! fromEnum tagid
Name { nameStr = bstr } -> LLVM.Name (Strict.toString bstr)
TypeDef { typeDefStr = bstr } -> LLVM.Name (Strict.toString bstr)
in
LLVM.PointerType { LLVM.pointerReferent = LLVM.NamedTypeReference $!
innerty',
LLVM.pointerAddrSpace = LLVM.AddrSpace 0 }
toLLVMType Module { modTypes = types } IdType { idName = tyid } =
let
tyname = case types ! tyid of
Anon {} -> LLVM.UnName $! fromIntegral $! fromEnum tyid
Name { nameStr = bstr } -> LLVM.Name (Strict.toString bstr)
TypeDef { typeDefStr = bstr } -> LLVM.Name (Strict.toString bstr)
in
LLVM.NamedTypeReference $! tyname
-- Int and float types are a straightaway conversion
toLLVMType _ IntType { intSize = 1 } = LLVM.i1
toLLVMType _ IntType { intSize = 8 } = LLVM.i8
toLLVMType _ IntType { intSize = 16 } = LLVM.i16
toLLVMType _ IntType { intSize = 32 } = LLVM.i32
toLLVMType _ IntType { intSize = 64 } = LLVM.i64
toLLVMType _ IntType { intSize = size } =
LLVM.IntegerType { LLVM.typeBits = fromIntegral size }
toLLVMType _ FloatType { floatSize = 16 } = LLVM.half
toLLVMType _ FloatType { floatSize = 32 } = LLVM.float
toLLVMType _ FloatType { floatSize = 64 } = LLVM.double
toLLVMType _ FloatType { floatSize = 80 } = LLVM.x86_fp80
toLLVMType _ FloatType { floatSize = 128 } = LLVM.fp128
toLLVMType _ FloatType { floatSize = n } =
error ("Cannot generate floating point type with " ++ show n ++ " bits")
toLLVMType _ UnitType {} = LLVM.StructureType { LLVM.elementTypes = [],
LLVM.isPacked = False }
-- | Generate the LLVM type for a given Flat IR type.
genTypeDefs :: (MonadIO m, MonadDebug m, Graph gr) =>
Module tagty typedesc gr
-- ^ The FlatIR module being translated
-> m [LLVM.Definition]
-- ^ The corresponding LLVM type
genTypeDefs m @ Module { modTypes = types } =
let
-- Anonymous definitions get a nameless entry
mapfun (tyid, tydef @ Anon { anonTy = ty }) =
do
genTypeDefMD tyid tydef
return $! LLVM.TypeDefinition (LLVM.UnName $! fromIntegral $!
fromEnum tyid)
(Just $! toLLVMType m ty)
-- Name-only definitions get an empty type definition
mapfun (tyid, tydef @ Name { nameStr = bstr }) =
do
genTypeDefMD tyid tydef
return $! LLVM.TypeDefinition (LLVM.Name (Strict.toString bstr)) Nothing
-- Named definitions get both
mapfun (tyid, tydef @ TypeDef { typeDefStr = bstr, typeDefTy = ty }) =
do
genTypeDefMD tyid tydef
return $! LLVM.TypeDefinition (LLVM.Name (Strict.toString bstr))
(Just $! toLLVMType m ty)
in
mapM mapfun (assocs types)
| emc2/iridium | src/IR/FlatIR/LLVMGen/Types.hs | bsd-3-clause | 6,684 | 4 | 18 | 1,668 | 1,495 | 811 | 684 | 96 | 5 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Time.HE.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Time.HE.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HE Tests"
[ makeCorpusTest [This Time] corpus
]
| rfranek/duckling | tests/Duckling/Time/HE/Tests.hs | bsd-3-clause | 590 | 0 | 9 | 95 | 80 | 51 | 29 | 11 | 1 |
import Test.QuickCheck
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
prop_revReverse :: [Int] -> Bool
prop_revReverse xs = xs == (myReverse $ myReverse xs)
prop_revConcat :: [Int] -> [Int] -> Bool
prop_revConcat xs ys = myReverse (xs ++ ys) == myReverse ys ++ myReverse xs
main = do
quickCheck prop_revReverse
quickCheck prop_revConcat
| SandeepTuniki/99-Haskell-Problems | src/problem5.hs | bsd-3-clause | 384 | 0 | 9 | 73 | 162 | 83 | 79 | 11 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.