code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
-- Problems/Problem055.hs
module Problems.Problem055 (p55) where
main = print p55
p55 :: Int
p55 = length $ filter (not . (flip isLynchel) 0) [1..9999]
isLynchel :: Integer -> Int -> Bool
isLynchel num i
| i >= 50 = False
| isPalindrom nextNum = True
| otherwise = isLynchel nextNum (i + 1)
where nextNum = num + (reverseInt num)
isPalindrom :: Integer -> Bool
isPalindrom num = num == (reverseInt num)
reverseInt :: Integer -> Integer
reverseInt = read . reverse . show
| Sgoettschkes/learning | haskell/ProjectEuler/src/Problems/Problem055.hs | mit | 493 | 0 | 11 | 106 | 192 | 99 | 93 | 14 | 1 |
module Main where
import qualified Data.Map as M
errorsPerLine = M.fromList
[ ("Chris", 472), ("Don", 100), ("Simon", -5) ]
main = do putStrLn "Who are you?"
name <- getLine
case M.lookup name errorsPerLine of
Nothing -> putStrLn "I don't know you"
Just n -> do putStr "Errors per line: "
print n | zaqwes8811/micro-apps | buffer/haskell/hello.hs | mit | 383 | 0 | 12 | 138 | 113 | 59 | 54 | 10 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module TestSuite where
import Control.Concurrent
import Control.Monad
import Control.Applicative
import Control.Monad.IO.Class
import Data.Either
import Data.Maybe
import Hexdump
import Test.Hspec
import Test.Hspec.HUnit
import Test.HUnit
import Data.ByteString(ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Base64 as B64
import System.Gearman.Connection
import System.Gearman.Error
import System.Gearman.Protocol
import System.Gearman.Worker
-- | testConnectEcho connects to the provided host and port, sends a simple
-- ECHO_REQ packet, and verifies the result. Returns Nothing on success,
-- Just GearmanError on failure.
testConnectEcho :: String -> String -> IO (Maybe GearmanError)
testConnectEcho h p = do
c <- connect h p
case c of
Left x -> return $ Just x
Right x -> echo x ["ping"]
testConnectivity = do
c <- testConnectEcho "localhost" "4730"
assertBool "connect failed" (isNothing c)
return ()
testHello :: IO ()
testHello = do
_ <- registerFuncsAndWork [registerHelloTriple]
result <- newEmptyMVar
requestWork "hello" "" "" result
result' <- readMVar result
(Right expected) <- helloFunc undefined
checkMatch (result' !! 1) expected
return ()
testReverse :: IO ()
testReverse = replicateM_ 1 $ do
_ <- registerFuncsAndWork [registerReverseTriple]
result <- newEmptyMVar
let input = "this is a string"
requestWork "reverse" "" input result
result' <- readMVar result
let expected = L.reverse input
checkMatch (result' !! 1) expected
return ()
testGoodNagios :: IO ()
testGoodNagios = do
maBox <- newEmptyMVar
let maString = B64.encode "\"host_name=kvm33.syd1.anchor.net.au\ncore_start_time=1405044854.0\nstart_time=1405044867.223223\nfinish_time=1405044867.347834\nreturn_code=0\nexited_ok=1\nservice_description=procs\noutput=PROCS OK: 796 processes |procs=796;;\\n\n\n\n\n\"" :: ByteString
requestWork "testfunc" "" (L.fromStrict maString) maBox
maResult <- readMVar maBox
putStrLn $ concat ["Ma result was: ", show maResult]
apply :: [(a -> m b)]-> [a] -> [m b]
apply [] _ = []
apply _ [] = []
apply (f:fs) (x:xs) = (f x):(apply fs xs)
checkMatch expected received = assertBool (concat ["incorrect output, expected: ", show expected, " , received: ", show received]) (expected == received)
registerFuncsAndWork funcRegTriples = forkIO $ do
runGearman "localhost" "4730" $ work funcRegTriples >> return ()
requestWork funcName uniqId args resultBox = do
forkIO $ runGearman "localhost" "4730" $ do
let request = buildSubmitJob funcName uniqId args
sendPacket request
_ <- receiveResponse
ret <- receiveResponse
liftIO $ putMVar resultBox ret
return ()
receiveResponse = do
creationResponse <- recvPacket DomainClient
case creationResponse of
Left e -> do
liftIO $ putStrLn $ concat ["Asked server to give a response and it died: ", show e]
return []
Right (GearmanPacket _ _ handle) -> do
return handle
registerHelloTriple = ("hello", helloFunc, Nothing)
registerReverseTriple = ("reverse", reverseFunc, Nothing)
helloFunc :: WorkerFunc
helloFunc _ = return $ Right "Hello World"
reverseFunc :: WorkerFunc
reverseFunc Job{jobData = jd} = return $ Right $ L.reverse jd
suite :: Spec
suite = do
-- This test is commented out for travis (it doesn't run a nagios gearman collector)
-- describe "Nagios Perfdata" $ do
-- it "dos that thing" $ do
-- testGoodNagios
describe "Connectivity" $ do
it "connects to a local gearman server on localhost:4730" $ do
testConnectivity
describe "Basics - no parameter funcs" $ do
it "can Hello World!" $ do
testHello
describe "Basics - one parameter funcs" $ do
it "can reverse strings" $ do
testReverse
| anchor/gearman-haskell | tests/TestSuite.hs | mit | 4,050 | 0 | 15 | 888 | 1,063 | 525 | 538 | 96 | 2 |
module Main where
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import System.Environment (lookupEnv)
import System.Log.Logger
import Persistence
import Server
main :: IO ()
main = do
let comp = "HComments.Main"
mPort <- lookupEnv "PORT"
let port = fromMaybe 8080 (read <$> mPort)
let defaultDbConnection = "host=127.0.0.1 user=postgres dbname=postgres port=5432"
mDbConnection <- lookupEnv "DB_CONNECTION"
let dbConnection = fromMaybe defaultDbConnection mDbConnection
warningM comp $ "Connecting to DB with " <> dbConnection
pool <- createDefaultPool dbConnection
warningM comp $ "Starting server on port " <> show port
startServer port pool
| ilya-murzinov/hcomments | app/Main.hs | mit | 776 | 0 | 12 | 208 | 185 | 91 | 94 | 19 | 1 |
-- | Data.Graph is sorely lacking in several ways, This just tries to fill in
-- some holes and provide a more convinient interface
{-# LANGUAGE RecursiveDo #-}
module Util.Graph(
Graph(),
fromGraph,
newGraph,
newGraph',
newGraphReachable,
reachableFrom,
Util.Graph.reachable,
fromScc,
findLoopBreakers,
sccGroups,
Util.Graph.scc,
sccForest,
Util.Graph.dff,
Util.Graph.components,
Util.Graph.topSort,
cyclicNodes,
toDag,
restitchGraph,
mapGraph,
transitiveClosure,
transitiveReduction
) where
import Control.Monad
import Control.Monad.ST
import Data.Array.IArray
import Data.Array.ST hiding(unsafeFreeze)
import Data.Array.Unsafe (unsafeFreeze)
import Data.Graph hiding(Graph)
import Data.Maybe
import GenUtil
import List(sort,sortBy,group,delete)
import qualified Data.Graph as G
import qualified Data.Map as Map
data Graph n = Graph G.Graph (Table n)
instance Show n => Show (Graph n) where
showsPrec n g = showsPrec n (Util.Graph.scc g)
fromGraph :: Graph n -> [(n,[n])]
fromGraph (Graph g lv) = [ (lv!v,map (lv!) vs) | (v,vs) <- assocs g ]
newGraph :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Graph n)
newGraph ns a b = snd $ newGraph' ns a b
newGraphReachable :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> ([k] -> [n],Graph n)
newGraphReachable ns fn fd = (rable,ng) where
(vmap,ng) = newGraph' ns fn fd
rable ks = Util.Graph.reachable ng [ v | Just v <- map (flip Map.lookup vmap) ks ]
reachableFrom :: Ord k => (n -> k) -> (n -> [k]) -> [n] -> [k] -> [n]
reachableFrom fn fd ns = fst $ newGraphReachable ns fn fd
-- | Build a graph from a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node should have edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; they are ignored.
newGraph' :: Ord k => [n] -> (n -> k) -> (n -> [k]) -> (Map.Map k Vertex,Graph n)
newGraph' ns fn fd = (kmap,Graph graph nr) where
nr = listArray bounds0 ns
max_v = length ns - 1
bounds0 = (0,max_v) :: (Vertex, Vertex)
kmap = Map.fromList [ (fn n,i) | (i,n) <- zip [0 ..] ns ]
graph = listArray bounds0 [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- ns]
fromScc (Left n) = [n]
fromScc (Right n) = n
-- | determine a set of loopbreakers subject to a fitness function
-- loopbreakers have a minimum of their incoming edges ignored.
findLoopBreakers
:: (n -> Int) -- ^ fitness function, greater numbers mean more likely to be a loopbreaker
-> (n -> Bool) -- ^ whether a node is suitable at all for a choice as loopbreaker
-> Graph n -- ^ the graph
-> ([n],[n]) -- ^ (loop breakers,dependency ordered nodes after loopbreaking)
findLoopBreakers func ex (Graph g ln) = ans where
scc = G.scc g
ans = f g scc [] [] where
f g (Node v []:sccs) fs lb
| v `elem` g ! v = let ng = (fmap (List.delete v) g) in f ng (G.scc ng) [] (v:lb)
| otherwise = f g sccs (v:fs) lb
f g (n:_) fs lb = f ng (G.scc ng) [] (mv:lb) where
mv = case sortBy (\ a b -> compare (snd b) (snd a)) [ (v,func (ln!v)) | v <- ns, ex (ln!v) ] of
((mv,_):_) -> mv
[] -> error "findLoopBreakers: no valid loopbreakers"
ns = dec n []
ng = fmap (List.delete mv) g
f _ [] xs lb = (map ((ln!) . head) (group $ sort lb),reverse $ map (ln!) xs)
dec (Node v ts) vs = v:foldr dec vs ts
reachable :: Graph n -> [Vertex] -> [n]
reachable (Graph g ln) vs = map (ln!) $ snub $ concatMap (G.reachable g) vs
sccGroups :: Graph n -> [[n]]
sccGroups g = map fromScc (Util.Graph.scc g)
scc :: Graph n -> [Either n [n]]
scc (Graph g ln) = map decode forest where
forest = G.scc g
decode (Node v [])
| v `elem` g ! v = Right [ln!v]
| otherwise = Left (ln!v)
decode other = Right (dec other [])
dec (Node v ts) vs = ln!v:foldr dec vs ts
sccForest :: Graph n -> Forest n
sccForest (Graph g ln) = map (fmap (ln!)) forest where
forest = G.scc g
dff :: Graph n -> Forest n
dff (Graph g ln) = map (fmap (ln!)) forest where
forest = G.dff g
components :: Graph n -> [[n]]
components (Graph g ln) = map decode forest where
forest = G.components g
decode n = dec n []
dec (Node v ts) vs = ln!v:foldr dec vs ts
topSort :: Graph n -> [n]
topSort (Graph g ln) = map (ln!) $ G.topSort g
cyclicNodes :: Graph n -> [n]
cyclicNodes g = concat [ xs | Right xs <- Util.Graph.scc g]
toDag :: Graph n -> Graph [n]
toDag (Graph g lv) = Graph g' ns' where
ns' = listArray (0,max_v) [ map (lv!) ns | ns <- nss ]
g' = listArray (0,max_v) [ snub [ v | n <- ns, v <- g!n ] | ns <- nss ]
max_v = length nss - 1
nss = map (flip f []) (G.scc g) where
f (Node v ts) rs = v:foldr f rs ts
type AdjacencyMatrix s = STArray s (Vertex,Vertex) Bool
type IAdjacencyMatrix = Array (Vertex,Vertex) Bool
transitiveClosureAM :: AdjacencyMatrix s -> ST s ()
transitiveClosureAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
dij <- readArray arr (i,j)
dik <- readArray arr (i,k)
dkj <- readArray arr (k,j)
writeArray arr (i,j) (dij || (dik && dkj))
transitiveReductionAM :: AdjacencyMatrix s -> ST s ()
transitiveReductionAM arr = do
bnds@(_,(max_v,_)) <- getBounds arr
transitiveClosureAM arr
(farr :: IAdjacencyMatrix) <- freeze arr
forM_ [0 .. max_v] $ \k -> do
forM_ (range bnds) $ \ (i,j) -> do
if farr!(k,i) && farr!(i,j) then
writeArray arr (k,j) False
else return ()
toAdjacencyMatrix :: G.Graph -> ST s (AdjacencyMatrix s)
toAdjacencyMatrix g = do
let (0,max_v) = bounds g
arr <- newArray ((0,0),(max_v,max_v)) False :: ST s (STArray s (Vertex,Vertex) Bool)
sequence_ [ writeArray arr (v,u) True | (v,vs) <- assocs g, u <- vs ]
return arr
fromAdjacencyMatrix :: AdjacencyMatrix s -> ST s G.Graph
fromAdjacencyMatrix arr = do
bnds@(_,(max_v,_)) <- getBounds arr
rs <- getAssocs arr
let rs' = [ x | (x,True) <- rs ]
return (listArray (0,max_v) [ [ v | (n',v) <- rs', n == n' ] | n <- [ 0 .. max_v] ])
transitiveClosure :: Graph n -> Graph n
transitiveClosure (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveClosureAM a
fromAdjacencyMatrix a
transitiveReduction :: Graph n -> Graph n
transitiveReduction (Graph g ns) = let g' = runST (tc g) in (Graph g' ns) where
tc g = do
a <- toAdjacencyMatrix g
transitiveReductionAM a
fromAdjacencyMatrix a
instance Functor Graph where
fmap f (Graph g n) = Graph g (fmap f n)
--mapT :: (Vertex -> a -> b) -> Table a -> Table b
--mapT f t = listArray (bounds t) [ (f v (t!v)) | v <- indices t ]
restitchGraph :: Ord k => (n -> k) -> (n -> [k]) -> Graph n -> Graph n
restitchGraph fn fd (Graph g nr) = Graph g' nr where
kmap = Map.fromList [ (fn n,i) | (i,n) <- assocs nr ]
g' = listArray (bounds g) [mapMaybe (flip Map.lookup kmap) (snub $ fd n) | n <- elems nr]
mapGraph :: forall a b . (a -> [b] -> b) -> Graph a -> Graph b
mapGraph f (Graph gr nr) = runST $ do
mnr <- thaw nr :: ST s (STArray s Vertex a)
mnr <- mapArray Left mnr
let g i = readArray mnr i >>= \v -> case v of
Right m -> return m
Left l -> mdo
writeArray mnr i (Right r)
rs <- mapM g (gr!i)
let r = f l rs
return r
mapM_ g (range $ bounds nr)
mnr <- mapArray fromRight mnr
mnr <- unsafeFreeze mnr
return (Graph gr mnr)
| m-alvarez/jhc | src/Util/Graph.hs | mit | 7,780 | 6 | 21 | 2,132 | 3,536 | 1,834 | 1,702 | -1 | -1 |
module GenHtmlDoc where
import Control.Monad.State
import Test.QuickCheck
import Text.PrettyPrint
import Css.CssTypes
import Html.HtmlNode
import Css.GenCss
import Html.GenHtml
import Css.ShowCss
import Html.ShowHtml
import HtmlDoc
instance Arbitrary HtmlDoc where
arbitrary = genHtml
shrink = shrinkHtml
genHtml :: Gen HtmlDoc
genHtml = do
css <- arbitrary
html <- arbitrary
return $ htmlDoc css $ addIdsToHtml html
renderHtml :: HtmlDoc -> String
renderHtml (HtmlDoc (css, html)) = render $ ppdoc css html
h :: IO ()
h = generate genHtml >>= putStrLn . renderHtml
content :: String -> Doc -> Doc
content name doc = vcat [ text ("<" ++ name ++ ">"),
nest 2 doc,
text ("</" ++ name ++ ">") ]
ppdoc :: [CssRule] -> [HtmlNode] -> Doc
ppdoc css html = content "html" $ hhead css $$ hbody html
hhead css = content "head" (reset $$ content "style" (vcat $ map (text . showcss) css))
hbody html = content "body" (hcat $ map (text . compactHtml) html)
reset = text $ "<style>" ++ cssReset ++ "</style>"
addIdsToHtml :: [HtmlNode] -> [HtmlNode]
addIdsToHtml nodes = fst $ runState (addIds nodes) 0
addIds :: [HtmlNode] -> State Int [HtmlNode]
addIds nodes = mapM addIds' nodes
addIds' :: HtmlNode -> State Int HtmlNode
addIds' n = case n of
Text _ -> return n
Node attrs children -> do
id <- nextId
newChildren <- addIds children
return $ Node (setId ("n" ++ show id) attrs) newChildren
nextId :: State Int Int
nextId = do
id <- get
put (id + 1)
return id
shrinkHtml :: HtmlDoc -> [HtmlDoc]
shrinkHtml (HtmlDoc (css, doc)) = [ HtmlDoc (css', doc') | (css', doc') <- shrink (css, doc) ]
cssReset = "html,body,div,span,applet,object,iframe,h1,h2,h3,h4,h5,h6,p,blockquote,pre,a,abbr,acronym,address,big,cite,code,del,dfn,em,img,ins,kbd,q,s,samp,small,strike,strong,sub,sup,tt,var,b,u,i,center,dl,dt,dd,ol,ul,li,fieldset,form,label,legend,table,caption,tbody,tfoot,thead,tr,th,td,article,aside,canvas,details,embed,figure,figcaption,footer,header,hgroup,menu,nav,output,ruby,section,summary,time,mark,audio,video{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}article,aside,details,figcaption,figure,footer,header,hgroup,menu,nav,section{display:block}body{line-height:1;height:100%}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:before,blockquote:after,q:before,q:after{content:'';content:none}table{border-collapse:collapse;border-spacing:0}"
| pbevin/toycss | src/GenHtmlDoc.hs | gpl-2.0 | 2,473 | 0 | 16 | 370 | 666 | 342 | 324 | 51 | 2 |
-- Cyclic nand embedded in a non-constructive context.
module T where
import Tests.Basis
c = arr (\() -> ()) >>> trueA >>> combLoop (andA >>> notA >>> arr dupA)
prop_correct = property (\xs -> simulate c xs == take (length xs) (repeat bottom))
test_constructive = isNothing (isConstructive c)
| peteg/ADHOC | Tests/01_CyclicCircuits/017_nand_embedded_not_cons.hs | gpl-2.0 | 298 | 0 | 11 | 53 | 111 | 58 | 53 | 5 | 1 |
{-# language NoMonomorphismRestriction #-}
{-# language FlexibleInstances #-}
{-# language FlexibleContexts #-}
{-# language TypeFamilies #-}
module Polynomial.Unary.Reader where
import Polynomial.Class
import Polynomial.Patch
import qualified Prelude
import Prelude ( return, ($), (.), Eq, Ord, read, Bool(..) )
import Polynomial.Unary.Data
import Autolib.Reader
import Control.Applicative ((<$>),(<*>), (<*), (*>) )
import Control.Lens (over)
import Control.Monad ( mzero )
import qualified Text.Parsec.Expr as E
instance (Reader c, Ring c) => Reader (Poly c) where
reader = ( poly . Prelude.map (\(Id c,Id e) -> (c,e)) . unP )
<$> reader
{-
instance (Ring c , Reader c) => Reader (P c Integer) where
reader = do
t <- factorI ; ts <- many $ opI <*> factorI
return $ P $ t : ts
opI = do my_reservedOp "+" ; return $ Prelude.id
<|> do my_reservedOp "-" ; return $ \ (c,e) -> (negate c, e)
factorI :: (Ring c, Reader c) => Parser (c,Integer)
factorI = do
f <- option one $ reader <* my_reservedOp "*"
e <- option 0 $ my_reserved "x" *> option 1 ( my_reservedOp "^" *> reader )
return (f,e)
-}
instance (Pattern c, Reader c, Ring (Base c)
, Pattern e, Base e ~ Integer, Reader e)
=> Reader (P c e) where
reader = do
t <- factorP ; ts <- many $ opP <*> factorP
return $ P $ t : ts
-- | this parses "+" / "-" and returns id / negate function
opP = do my_reservedOp "+" ; return $ Prelude.id
<|> do my_reservedOp "-"
return $ \ (c,e) -> (pmap negate c, e)
-- | this parses c * x ^ e
-- where "x" is fixed (the name of the (single) variable)
-- "c *" is optional (if missing, then c = one)
-- "^ e" is optional (if missing, then e = 1)
-- "* x ^ e" is optinal (if missine, then e = 0)
factorP = do
(cpresent, c) <- option (False, inject one)
$ (,) True <$> reader
spresent <- option False
$ my_reservedOp "*" *> return True
let xe = my_reserved "x"
*> option (inject one) (my_reservedOp "^" *> reader )
cont = case (cpresent, spresent) of
(True, False) -> return $ inject zero
(True, True) -> xe
(False, False) -> xe
(False, True) -> mzero
(,) c <$> cont
natural :: Parser Integer
natural = do
ds <- many1 digit ; my_whiteSpace
return $ read ds
| marcellussiegburg/autotool | collection/src/Polynomial/Unary/Reader.hs | gpl-2.0 | 2,379 | 0 | 14 | 641 | 607 | 335 | 272 | 44 | 4 |
{- |
Module : $Header$
Description : Abstract syntax for propositional logic extended with QBFs
Copyright : (c) Jonathan von Schroeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <jonathan.von_schroeder@dfki.de>
Stability : experimental
Portability : portable
Definition of abstract syntax for propositional logic extended with QBFs
Ref.
<http://en.wikipedia.org/wiki/Propositional_logic>
<http://www.voronkov.com/lics.cgi>
-}
module QBF.AS_BASIC_QBF
( FORMULA (..) -- datatype for Propositional Formulas
, BASICITEMS (..) -- Items of a Basic Spec
, BASICSPEC (..) -- Basic Spec
, SYMBITEMS (..) -- List of symbols
, SYMB (..) -- Symbols
, SYMBMAPITEMS (..) -- Symbol map
, SYMBORMAP (..) -- Symbol or symbol map
, PREDITEM (..) -- Predicates
, isPrimForm
, ID (..)
) where
import Common.Id as Id
import Common.Doc
import Common.DocUtils
import Common.Keywords
import Common.AS_Annotation as AS_Anno
import qualified Data.List as List
import Data.Maybe (isJust)
-- DrIFT command
{-! global: GetRange !-}
-- | predicates = propositions
data PREDITEM = PredItem [Id.Token] Id.Range
deriving Show
newtype BASICSPEC = BasicSpec [AS_Anno.Annoted BASICITEMS]
deriving Show
data BASICITEMS =
PredDecl PREDITEM
| AxiomItems [AS_Anno.Annoted FORMULA]
-- pos: dots
deriving Show
-- | Datatype for QBF formulas
data FORMULA =
FalseAtom Id.Range
-- pos: "False
| TrueAtom Id.Range
-- pos: "True"
| Predication Id.Token
-- pos: Propositional Identifiers
| Negation FORMULA Id.Range
-- pos: not
| Conjunction [FORMULA] Id.Range
-- pos: "/\"s
| Disjunction [FORMULA] Id.Range
-- pos: "\/"s
| Implication FORMULA FORMULA Id.Range
-- pos: "=>"
| Equivalence FORMULA FORMULA Id.Range
-- pos: "<=>"
| ForAll [Id.Token] FORMULA Id.Range
| Exists [Id.Token] FORMULA Id.Range
deriving (Show, Ord)
data ID = ID Id.Token (Maybe Id.Token)
instance Eq ID where
ID t1 (Just t2) == ID t3 (Just t4) =
((t1 == t3) && (t2 == t4))
|| ((t2 == t3) && (t1 == t4))
ID t1 Nothing == ID t2 t3 = (t1 == t2) || (Just t1 == t3)
ID _ (Just _) == ID _ Nothing = False
{- two QBFs are equivalent if bound variables
can be renamed such that the QBFs are equal -}
qbfMakeEqual :: Maybe [ID] -> FORMULA -> [Id.Token]
-> FORMULA -> [Id.Token] -> Maybe [ID]
qbfMakeEqual (Just ids) f ts f1 ts1 = if length ts /= length ts1 then
Nothing
else case (f, f1) of
(Predication t, Predication t1)
| t == t1 -> Just ids
| t `elem` ts && t1 `elem` ts1 -> let tt1 = ID t (Just t1) in
if tt1 `elem` ids then
Just ids
else
if ID t Nothing `notElem` ids && ID t1 Nothing `notElem` ids then
Just (tt1 : ids)
else
Nothing
| otherwise -> Nothing
(Negation f_ _, Negation f1_ _) -> qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Conjunction (f_ : fs) _, Conjunction (f1_ : fs1) _) ->
if length fs /= length fs1 then Nothing else
case r of
Nothing -> Nothing
_ -> qbfMakeEqual r
(Conjunction fs nullRange) ts
(Conjunction fs1 nullRange) ts1
where
r = qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Disjunction fs r, Disjunction fs1 r1) -> qbfMakeEqual (Just ids)
(Conjunction fs r) ts (Conjunction fs1 r1) ts1
(Implication f_ f1_ _, Implication f2 f3 _) -> case r of
Nothing -> Nothing
_ -> qbfMakeEqual r f1_ ts f3 ts1
where
r = qbfMakeEqual (Just ids) f_ ts f2 ts1
(Equivalence f_ f1_ r1, Equivalence f2 f3 _) -> qbfMakeEqual (Just ids)
(Implication f_ f1_ r1) ts
(Implication f2 f3 r1) ts1
(ForAll ts_ f_ _, ForAll ts1_ f1_ _) -> case r of
Nothing -> Nothing
(Just ids_) -> Just (ids ++ filter (\ (ID x my) ->
let Just y = my in
(x `elem` ts_ && y `notElem` ts1_) ||
(x `elem` ts1_ && y `notElem` ts_)) d)
where
d = ids_ List.\\ ids
where
r = qbfMakeEqual (Just ids) f_ (ts ++ ts_) f1_ (ts1 ++ ts1_)
(Exists ts_ f_ r, Exists ts1_ f1_ r1) -> qbfMakeEqual (Just ids)
(Exists ts_ f_ r) ts
(Exists ts1_ f1_ r1) ts1
(_1, _2) -> Nothing
qbfMakeEqual Nothing _ _ _ _ = Nothing
-- ranges are always equal (see Common/Id.hs) - thus they can be ignored
instance Eq FORMULA where
FalseAtom _ == FalseAtom _ = True
TrueAtom _ == TrueAtom _ = True
Predication t == Predication t1 = t == t1
Negation f _ == Negation f1 _ = f == f1
Conjunction xs _ == Conjunction xs1 _ = xs == xs1
Disjunction xs _ == Disjunction xs1 _ = xs == xs1
Implication f f1 _ == Implication f2 f3 _ = (f == f2) && (f1 == f3)
Equivalence f f1 _ == Equivalence f2 f3 _ = (f == f2) && (f1 == f3)
ForAll ts f _ == ForAll ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
Exists ts f _ == Exists ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
_ == _ = False
data SYMBITEMS = SymbItems [SYMB] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq)
newtype SYMB = SymbId Id.Token
-- pos: colon
deriving (Show, Eq)
data SYMBMAPITEMS = SymbMapItems [SYMBORMAP] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq)
data SYMBORMAP = Symb SYMB
| SymbMap SYMB SYMB Id.Range
-- pos: "|->"
deriving (Show, Eq)
-- All about pretty printing we chose the easy way here :)
instance Pretty FORMULA where
pretty = printFormula
instance Pretty BASICSPEC where
pretty = printBasicSpec
instance Pretty SYMB where
pretty = printSymbol
instance Pretty SYMBITEMS where
pretty = printSymbItems
instance Pretty SYMBMAPITEMS where
pretty = printSymbMapItems
instance Pretty BASICITEMS where
pretty = printBasicItems
instance Pretty SYMBORMAP where
pretty = printSymbOrMap
instance Pretty PREDITEM where
pretty = printPredItem
isPrimForm :: FORMULA -> Bool
isPrimForm f = case f of
TrueAtom _ -> True
FalseAtom _ -> True
Predication _ -> True
Negation _ _ -> True
_ -> False
-- Pretty printing for formulas
printFormula :: FORMULA -> Doc
printFormula frm =
let ppf p f = (if p f then id else parens) $ printFormula f
isJunctForm f = case f of
Implication _ _ _ -> False
Equivalence _ _ _ -> False
ForAll _ _ _ -> False
Exists _ _ _ -> False
_ -> True
in case frm of
FalseAtom _ -> text falseS
TrueAtom _ -> text trueS
Predication x -> pretty x
Negation f _ -> notDoc <+> ppf isPrimForm f
Conjunction xs _ -> sepByArbitrary andDoc $ map (ppf isPrimForm) xs
Disjunction xs _ -> sepByArbitrary orDoc $ map (ppf isPrimForm) xs
Implication x y _ -> ppf isJunctForm x <+> implies <+> ppf isJunctForm y
Equivalence x y _ -> ppf isJunctForm x <+> equiv <+> ppf isJunctForm y
ForAll xs y _ -> forallDoc <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
Exists xs y _ -> exists <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
sepByArbitrary :: Doc -> [Doc] -> Doc
sepByArbitrary d = fsep . prepPunctuate (d <> space)
printPredItem :: PREDITEM -> Doc
printPredItem (PredItem xs _) = fsep $ map pretty xs
printBasicSpec :: BASICSPEC -> Doc
printBasicSpec (BasicSpec xs) = vcat $ map pretty xs
printBasicItems :: BASICITEMS -> Doc
printBasicItems (AxiomItems xs) = vcat $ map pretty xs
printBasicItems (PredDecl x) = pretty x
printSymbol :: SYMB -> Doc
printSymbol (SymbId sym) = pretty sym
printSymbItems :: SYMBITEMS -> Doc
printSymbItems (SymbItems xs _) = fsep $ map pretty xs
printSymbOrMap :: SYMBORMAP -> Doc
printSymbOrMap (Symb sym) = pretty sym
printSymbOrMap (SymbMap source dest _) =
pretty source <+> mapsto <+> pretty dest
printSymbMapItems :: SYMBMAPITEMS -> Doc
printSymbMapItems (SymbMapItems xs _) = fsep $ map pretty xs
-- Generated by DrIFT, look but don't touch!
instance GetRange PREDITEM where
getRange = const nullRange
rangeSpan x = case x of
PredItem a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange BASICSPEC where
getRange = const nullRange
rangeSpan x = case x of
BasicSpec a -> joinRanges [rangeSpan a]
instance GetRange BASICITEMS where
getRange = const nullRange
rangeSpan x = case x of
PredDecl a -> joinRanges [rangeSpan a]
AxiomItems a -> joinRanges [rangeSpan a]
instance GetRange FORMULA where
getRange = const nullRange
rangeSpan x = case x of
FalseAtom a -> joinRanges [rangeSpan a]
TrueAtom a -> joinRanges [rangeSpan a]
Predication a -> joinRanges [rangeSpan a]
Negation a b -> joinRanges [rangeSpan a, rangeSpan b]
Conjunction a b -> joinRanges [rangeSpan a, rangeSpan b]
Disjunction a b -> joinRanges [rangeSpan a, rangeSpan b]
Implication a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
Equivalence a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
ForAll a b c -> joinRanges [rangeSpan a, rangeSpan b, rangeSpan c]
Exists a b c -> joinRanges [rangeSpan a, rangeSpan b, rangeSpan c]
instance GetRange ID where
getRange = const nullRange
rangeSpan x = case x of
ID a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange SYMBITEMS where
getRange = const nullRange
rangeSpan x = case x of
SymbItems a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange SYMB where
getRange = const nullRange
rangeSpan x = case x of
SymbId a -> joinRanges [rangeSpan a]
instance GetRange SYMBMAPITEMS where
getRange = const nullRange
rangeSpan x = case x of
SymbMapItems a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange SYMBORMAP where
getRange = const nullRange
rangeSpan x = case x of
Symb a -> joinRanges [rangeSpan a]
SymbMap a b c -> joinRanges [rangeSpan a, rangeSpan b, rangeSpan c]
| nevrenato/Hets_Fork | QBF/AS_BASIC_QBF.hs | gpl-2.0 | 10,320 | 0 | 24 | 2,895 | 3,448 | 1,741 | 1,707 | 226 | 16 |
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
{-|
Module : Client.States.InGameState.Types
Description : The types used for the state in game.
Copyright : (c) Robin Raymond, 2016-2017
License : GPL-3
Maintainer : robin@robinraymond.de
Portability : POSIX
-}
{-# LANGUAGE TemplateHaskell #-}
module Client.States.InGameState.Types
( State(..), physicalObjects, accumTime, gameObjects, controls, netState,
keymap, gameFonts, gameSprites
, NetworkState(..), lastPacket, lastID, ackField, socket, tbqueue
) where
import qualified Control.Concurrent.STM as CCS
import qualified Control.Lens as CLE
import qualified Data.IntMap.Strict as DIS
import qualified Data.Thyme.Clock as DTC
import qualified Network.Socket as NSO
import qualified Client.Assets.Font as CAF
import qualified Client.Assets.Sprite as CAS
import qualified PurpleMuon.Game.Types as PGT
import qualified PurpleMuon.Input.Types as PIT
import qualified PurpleMuon.Network.Types as PNT
import qualified PurpleMuon.Physics.Types as PPT
data State
= State
{ _physicalObjects :: PPT.PhysicalObjects
, _accumTime :: PPT.DeltaTime -- ^ Accumulated time for fixed physics step
, _gameObjects :: DIS.IntMap PGT.GameObject
, _controls :: PIT.Controls
, _netState :: NetworkState
, _keymap :: PIT.KeyMap
, _gameSprites :: CAS.SpriteLoaderType
, _gameFonts :: CAF.FontLoaderType
}
-- | The network state of a client.
-- This data type contains every information that is available for a connection
-- to a game server.
data NetworkState
= NetworkState
{ _lastPacket :: DTC.UTCTime
, _lastID :: PNT.MessageCount
, _ackField :: PNT.AckField
, _socket :: NSO.Socket
, _tbqueue :: CCS.TBQueue PNT.ServerToClientMsg
}
CLE.makeLenses ''State
CLE.makeLenses ''NetworkState
| r-raymond/purple-muon | src/Client/States/InGameState/Types.hs | gpl-3.0 | 2,638 | 0 | 10 | 587 | 332 | 226 | 106 | 35 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Estuary.Help.ColombiaEsPasion where
import Reflex
import Reflex.Dom
import Data.Text
import GHCJS.DOM.EventM
import Estuary.Widgets.Reflex
import Estuary.Widgets.Reflex
--render multiple sub-help files
colombiaEsPasionHelpFile :: MonadWidget t m => m ()
colombiaEsPasionHelpFile = divClass "languageHelpContainer" $ divClass "languageHelp" $ do
about
functionRef "voz"
functionRef "pasión"
functionRef "paz"
functionRef "educación"
functionRef "protesta"
functionRef "soacha"
return ()
-- about
about :: MonadWidget t m => m ()
about = do
divClass "about primary-color code-font" $ text "Colombia Es Pasion"
divClass "about primary-color code-font" $ text "A mini live coding esolang developed in Bogotá, Colombia."
exampleText :: Text -> Text
exampleText "voz" = "\"voz voz\""
exampleText "pasión" = "\"pasión\""
exampleText "paz" = "\"paz paz paz\""
exampleText "educación" = "\"pasión pasión pasión pasión\" educación 2"
exampleText "protesta" = "\"paz\" protesta 3"
exampleText "soacha" = "\"voz voz voz voz\" soacha"
referenceText :: Text -> Text
referenceText "voz" = "returns Dirt's \"birds3\" sample"
referenceText "pasión" = "returns Dirt's \"blip\" sample"
referenceText "paz" = "returns Dirt's \"sax\" sample"
referenceText "educación" = "returns TidalCycles' slow"
referenceText "protesta" = "returns TidalCycles' fast"
referenceText "soacha" = "returns TidalCycles' brak"
-- help files for samples
functionRef :: MonadWidget t m => Text -> m ()
functionRef x = divClass "helpWrapper" $ do
switchToReference <- buttonWithClass' x
exampleVisible <- toggle True switchToReference
referenceVisible <- toggle False switchToReference
hideableWidget exampleVisible "exampleText primary-color code-font" $ text (exampleText x)
hideableWidget referenceVisible "referenceText code-font" $ text (referenceText x)
return ()
| d0kt0r0/estuary | client/src/Estuary/Help/ColombiaEsPasion.hs | gpl-3.0 | 1,944 | 0 | 11 | 298 | 413 | 196 | 217 | 44 | 1 |
module Main where
main :: IO ()
main = do
print("Hello World")
| danielgoncalvesti/BIGDATA2017 | Atividade01/Haskell/Hello.hs | gpl-3.0 | 75 | 0 | 8 | 24 | 28 | 15 | 13 | 4 | 1 |
{-# LANGUAGE CPP #-}
-- | The module 'UserIO' defines how to do the interaction with the user.
-- By default, we use a command-line interface (as defined by the module 'CLI').
-- To define a new mode of interaction (e.g., GUI), create a new module similar
-- to 'CLI' and export the new module in 'UserIO'.
module UserIO (
#if defined(__USE_READLINE__)
module CLI
#else
module CLINoReadLine
#endif
) where
#if defined(__USE_READLINE__)
import CLI
#else
import CLINoReadLine
#endif
| jff/TeLLer | src/UserIO.hs | gpl-3.0 | 505 | 0 | 4 | 102 | 24 | 20 | 4 | 4 | 0 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module MEnv.Env.ScreenObject.GLFW
(
ScreenInit (..),
ScreenObject(..),
withLoadedScreen,
) where
import LoadM
import qualified Graphics.UI.GLFW as GLFW
import qualified Graphics.Rendering.OpenGL as GL
data ScreenInit =
ScreenInit
data ScreenObject =
ScreenObject
--------------------------------------------------------------------------------
--
withLoadedScreen :: ScreenInit -> (ScreenObject -> LoadM a) -> LoadM a
withLoadedScreen init handler = do
init <- liftIO $ GLFW.initialize
unless init $ loggingError "could not initialize GLFW"
--liftIO $ GLFW.openWindowHint GL.$= (GLFW.FSAASamples, 8)
open <- liftIO $ GLFW.openWindow (GL.Size 800 600) [ GLFW.DisplayAlphaBits 32,
GLFW.DisplayDepthBits 32 ]
GLFW.Window
--GLFW.FullScreen
unless open $ loggingError "could not open window"
let screenobj = ScreenObject
-- handle object
a <- handler screenobj
liftIO $ GLFW.terminate
return a
| karamellpelle/grid | designer/source/MEnv/Env/ScreenObject/GLFW.hs | gpl-3.0 | 1,932 | 0 | 12 | 536 | 243 | 138 | 105 | 24 | 1 |
module Test where
import Memory
import Lexer
import Parser
import Exception
import ParserCombinators
import Eval
te1 = (extract . lexer) $ "0 X23 PROGRAM (X12) WHILE DO () END"
p = runP pExpr te1
te2 = (extract . lexer) $ "SUC(X0) X23 PROGRAM (X12) WHILE DO () END"
p2 = runP pExpr te2
code = "PROGRAM (X0)" ++
"X1 := SUC(X0)" ++
"RESULT (X1)"
lexed = (extract.lexer) $ code
code2 = "PROGRAM (X0)" ++
"X1 := SUC(X0)" ++
"X2 := 0 " ++
"RESULT (X1)"
lexed2 = (extract.lexer) $ code2
ast2 = fst.head $ runP pProgram lexed2
code3 = "PROGRAM (X0)" ++
"X1 := SUC(X0)" ++
"X2 := PRED(X1) " ++
"RESULT (X2)"
lexed3 = (extract.lexer) $ code3
ast3 = fst.head $ runP pProgram lexed3
| jota191/PLang | src/Test.hs | gpl-3.0 | 765 | 0 | 7 | 218 | 195 | 108 | 87 | 27 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Memory.MemoryWorld.OutputState.Plain
(
OutputState (..),
makeOutputState,
) where
import MyPrelude
import Game
data OutputState =
OutputState
{
outputstateLevelIx :: !UInt,
outputstateColorIx :: !UInt,
outputstateColorIx' :: !UInt,
outputstateTick :: !Tick,
outputstateAlpha :: !Float
}
makeOutputState :: UInt -> MEnv' OutputState
makeOutputState mix = do
return OutputState
{
outputstateLevelIx = mix,
outputstateColorIx = 0,
outputstateColorIx' = 0,
outputstateTick = 0.0,
outputstateAlpha = 0.0
}
| karamellpelle/grid | source/Game/Memory/MemoryWorld/OutputState/Plain.hs | gpl-3.0 | 1,438 | 0 | 9 | 374 | 148 | 96 | 52 | 31 | 1 |
-- For interacting with an MPD server
address = "localhost"
port = 6600
data MPDStatus =
-- The order is important here
Song { songFile :: String
, songLastModified :: String
, songTime :: String
, songArtist :: String
, songTitle :: String
, songAlbum :: String
, songTrack :: String
, songDate :: String
, songGenre :: String
, songPos :: String
, songId :: String
}
| UndeadMastodon/ShrubBot | MPD.hs | gpl-3.0 | 455 | 0 | 8 | 154 | 88 | 57 | 31 | 14 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataproc.Projects.Regions.Jobs.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the resource representation for a job in a project.
--
-- /See:/ <https://cloud.google.com/dataproc/ Cloud Dataproc API Reference> for @dataproc.projects.regions.jobs.get@.
module Network.Google.Resource.Dataproc.Projects.Regions.Jobs.Get
(
-- * REST Resource
ProjectsRegionsJobsGetResource
-- * Creating a Request
, projectsRegionsJobsGet
, ProjectsRegionsJobsGet
-- * Request Lenses
, prjgXgafv
, prjgJobId
, prjgUploadProtocol
, prjgAccessToken
, prjgUploadType
, prjgRegion
, prjgProjectId
, prjgCallback
) where
import Network.Google.Dataproc.Types
import Network.Google.Prelude
-- | A resource alias for @dataproc.projects.regions.jobs.get@ method which the
-- 'ProjectsRegionsJobsGet' request conforms to.
type ProjectsRegionsJobsGetResource =
"v1" :>
"projects" :>
Capture "projectId" Text :>
"regions" :>
Capture "region" Text :>
"jobs" :>
Capture "jobId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Job
-- | Gets the resource representation for a job in a project.
--
-- /See:/ 'projectsRegionsJobsGet' smart constructor.
data ProjectsRegionsJobsGet =
ProjectsRegionsJobsGet'
{ _prjgXgafv :: !(Maybe Xgafv)
, _prjgJobId :: !Text
, _prjgUploadProtocol :: !(Maybe Text)
, _prjgAccessToken :: !(Maybe Text)
, _prjgUploadType :: !(Maybe Text)
, _prjgRegion :: !Text
, _prjgProjectId :: !Text
, _prjgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsRegionsJobsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prjgXgafv'
--
-- * 'prjgJobId'
--
-- * 'prjgUploadProtocol'
--
-- * 'prjgAccessToken'
--
-- * 'prjgUploadType'
--
-- * 'prjgRegion'
--
-- * 'prjgProjectId'
--
-- * 'prjgCallback'
projectsRegionsJobsGet
:: Text -- ^ 'prjgJobId'
-> Text -- ^ 'prjgRegion'
-> Text -- ^ 'prjgProjectId'
-> ProjectsRegionsJobsGet
projectsRegionsJobsGet pPrjgJobId_ pPrjgRegion_ pPrjgProjectId_ =
ProjectsRegionsJobsGet'
{ _prjgXgafv = Nothing
, _prjgJobId = pPrjgJobId_
, _prjgUploadProtocol = Nothing
, _prjgAccessToken = Nothing
, _prjgUploadType = Nothing
, _prjgRegion = pPrjgRegion_
, _prjgProjectId = pPrjgProjectId_
, _prjgCallback = Nothing
}
-- | V1 error format.
prjgXgafv :: Lens' ProjectsRegionsJobsGet (Maybe Xgafv)
prjgXgafv
= lens _prjgXgafv (\ s a -> s{_prjgXgafv = a})
-- | Required. The job ID.
prjgJobId :: Lens' ProjectsRegionsJobsGet Text
prjgJobId
= lens _prjgJobId (\ s a -> s{_prjgJobId = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prjgUploadProtocol :: Lens' ProjectsRegionsJobsGet (Maybe Text)
prjgUploadProtocol
= lens _prjgUploadProtocol
(\ s a -> s{_prjgUploadProtocol = a})
-- | OAuth access token.
prjgAccessToken :: Lens' ProjectsRegionsJobsGet (Maybe Text)
prjgAccessToken
= lens _prjgAccessToken
(\ s a -> s{_prjgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prjgUploadType :: Lens' ProjectsRegionsJobsGet (Maybe Text)
prjgUploadType
= lens _prjgUploadType
(\ s a -> s{_prjgUploadType = a})
-- | Required. The Dataproc region in which to handle the request.
prjgRegion :: Lens' ProjectsRegionsJobsGet Text
prjgRegion
= lens _prjgRegion (\ s a -> s{_prjgRegion = a})
-- | Required. The ID of the Google Cloud Platform project that the job
-- belongs to.
prjgProjectId :: Lens' ProjectsRegionsJobsGet Text
prjgProjectId
= lens _prjgProjectId
(\ s a -> s{_prjgProjectId = a})
-- | JSONP
prjgCallback :: Lens' ProjectsRegionsJobsGet (Maybe Text)
prjgCallback
= lens _prjgCallback (\ s a -> s{_prjgCallback = a})
instance GoogleRequest ProjectsRegionsJobsGet where
type Rs ProjectsRegionsJobsGet = Job
type Scopes ProjectsRegionsJobsGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsRegionsJobsGet'{..}
= go _prjgProjectId _prjgRegion _prjgJobId _prjgXgafv
_prjgUploadProtocol
_prjgAccessToken
_prjgUploadType
_prjgCallback
(Just AltJSON)
dataprocService
where go
= buildClient
(Proxy :: Proxy ProjectsRegionsJobsGetResource)
mempty
| brendanhay/gogol | gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/Jobs/Get.hs | mpl-2.0 | 5,593 | 0 | 20 | 1,334 | 859 | 500 | 359 | 127 | 1 |
{-# LANGUAGE GADTs #-}
module Gonimo.Database.Effects.Servant where
-- Little helpers integrating db functions with servant:
import Control.Monad ((<=<))
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Base (MonadBase)
import Database.Persist (Entity, Key, Unique, getBy)
import Database.Persist.Class (get, PersistStoreRead, PersistRecordBackend, PersistUniqueRead)
import Gonimo.Server.Error
import Control.Monad.Trans.Reader (ReaderT)
type GetConstraint backend m a = (PersistStoreRead backend, MonadIO m, PersistRecordBackend a backend)
get404 :: (GetConstraint backend m a, MonadBase IO m) => Key a -> ReaderT backend m a
get404 = getErr NotFound
getErr :: (GetConstraint backend m a, MonadBase IO m)
=> ServerError -> Key a -> ReaderT backend m a
getErr err = serverErrOnNothing err <=< get
getBy404 :: (PersistUniqueRead backend, MonadIO m, PersistRecordBackend a backend, MonadBase IO m)
=> Unique a -> ReaderT backend m (Entity a)
getBy404 = getByErr NotFound
getByErr :: (PersistUniqueRead backend, MonadIO m, PersistRecordBackend a backend, MonadBase IO m)
=> ServerError -> Unique a -> ReaderT backend m (Entity a)
getByErr err = serverErrOnNothing err <=< getBy
serverErrOnNothing :: (MonadIO m, MonadBase IO m)
=> ServerError -> Maybe a -> ReaderT backend m a
serverErrOnNothing err Nothing = throwServer err
serverErrOnNothing _ (Just v) = return v
| gonimo/gonimo-back | src/Gonimo/Database/Effects/Servant.hs | agpl-3.0 | 1,570 | 0 | 10 | 379 | 449 | 242 | 207 | -1 | -1 |
module Main where
import Data.Char
dnaT :: Char -> Char
dnaT c | c == 'a' = 't'
| c == 't' = 'a'
| c == 'g' = 'c'
| c == 'c' = 'g'
| otherwise = c
rnaT :: Char -> Char
rnaT c | c == 't' = 'a'
| c == 'a' = 'u'
| c == 'g' = 'c'
| c == 'c' = 'g'
| otherwise = c
dna_complement = map $ dnaT . toLower
dna2rna = map $ rnaT . toLower
main = putStrLn $ dna2rna . dna_complement $ "tc" | davidnuon/candc-dna-challenge | haskell/main.hs | unlicense | 437 | 0 | 8 | 153 | 202 | 99 | 103 | 17 | 1 |
module Braxton.A284433Spec (main, spec) where
import Test.Hspec
import Braxton.A284433 (a284433)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A284433" $
it "correctly computes the first 5 elements" $
take 5 (map a284433 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,6,2,18]
| peterokagey/haskellOEIS | test/Braxton/A284433Spec.hs | apache-2.0 | 318 | 0 | 10 | 59 | 115 | 65 | 50 | 10 | 1 |
import Controller (withClog)
import Network.Wai.Handler.Warp (run)
main :: IO ()
main = withClog $ run 3000
| nkpart/clog | production.hs | bsd-2-clause | 109 | 0 | 6 | 17 | 43 | 24 | 19 | 4 | 1 |
import "hint" HLint.HLint
ignore "Reduce duplication"
ignore "Redundant lambda"
ignore "Use >=>"
ignore "Use const"
ignore "Avoid lambda"
ignore "Redundant flip"
ignore "Use let"
ignore "Redundant $"
| cartazio/monad-ste | Hlint.hs | bsd-2-clause | 201 | 0 | 5 | 28 | 55 | 20 | 35 | -1 | -1 |
{-# LANGUAGE CPP, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeOperators, ViewPatterns #-}
-- | YUI tools for Happstack.
--
-- Some of the examples assume you're using the @OverloadedStrings@ GHC
-- extension.
module Happstack.Server.YUI
( -- * Combo Handler
implYUISite
, YUISitemap(..)
, sitemap
, route
, showCSSComboURL
-- * CSS utilities
, gridUnit
, fontSize
-- * JS utilities
, createNode
-- * Bundle utilities
, isYUIFile
, readYUIFile
) where
import Prelude hiding ((.))
import qualified Data.ByteString as B
import qualified Data.Text as T
import qualified Web.Routes as WR
import Control.Category (Category((.)))
import Control.Monad (guard, liftM, void)
import Control.Monad.Trans (liftIO)
import Data.List (intercalate)
import Data.Ratio ((%), numerator,denominator)
import Data.Text.Encoding (encodeUtf8)
import Happstack.Server (Happstack, Response, neverExpires, setHeaderM, badRequest, ok, toResponse, guessContentTypeM, mimeTypes, lookPairs)
import Happstack.Server.Compression (compressedResponseFilter)
import Happstack.Server.JMacro ()
import Happstack.Server.YUI.Bundle (isYUIFile, readYUIFile)
import HSP (XML, renderAsHTML)
import Language.Javascript.JMacro (JStat(BlockStat), JExpr, jmacro, jmacroE, renderJs, jhFromList, toJExpr)
import Text.Boomerang.TH (derivePrinterParsers)
import Text.InterpolatedString.Perl6 (qq)
import Text.PrettyPrint (Style(mode), Mode(OneLineMode), renderStyle, style)
import Text.Printf (printf)
import Web.Routes (Site, RouteT)
import Web.Routes.Boomerang (Router, (:-), (<>), (</>), rList, anyText, eos, boomerangSiteRouteT)
import Web.Routes.Happstack (implSite)
import Web.Routes.TH (derivePathInfo)
#if !MIN_VERSION_template_haskell(2,7,0)
import Language.Javascript.JMacro (JStat(..), JExpr(..), JVal(..), Ident(..))
#endif
-- | The @web-routes@ sitemap for the handler that serves up the YUI
-- bundle. You can embed this in your own sitemap, something like:
--
-- >data Sitemap = YUI YUISitemap | Home
--
-- The version number of the bundled YUI release is included in the routes
-- for sake of cache-busting: the routes all respond with far-future
-- expiration dates.
--
-- A 'WR.PathInfo' instance is provided in case you're using @web-routes@
-- without @boomerang@. This isn't recommended for production, however,
-- since the YUI version number is not included in this case.
data YUISitemap
= SeedURL
-- ^ [@\/YUI_VERSION\/@]
-- The YUI seed file plus the configuration for using our own
-- combo loader.
| ComboURL
-- ^ [@\/YUI_VERSION\/combo@]
-- The combo loader.
| BundleURL [T.Text]
-- ^ [@\/YUI_VERSION\/bundle\/\<filename\>@]
-- Get an individual file without combo loading.
| ConfigURL
-- ^ [@\/YUI_VERSION\/config@]
-- The code for configuring YUI to use our own combo loader. Not needed
-- if you use the seed file mentioned above.
| CSSComboURL
-- ^ [@\/YUI_VERSION\/css@]
-- A specialized combo loader for CSS modules, for use in @\<link\/\>@
-- tags. Simply list the CSS modules in the query string by name rather
-- than file path, for example
-- @\"\/YUI_VERSION\/css?reset&base&fonts&grids\"@. Order matters;
-- you'll usually want reset first if you use it.
derivePathInfo ''YUISitemap
derivePrinterParsers ''YUISitemap
-- | A @boomerang@ 'Router' for 'YUISitemap'. If you embed the
-- @YUISitemap@ in your own, you can also embed this router in your own:
--
-- >import qualified Happstack.Server.YUI as Y
-- >sitemap = (rYUI . ("yui" </> Y.sitemap)) <> rHome
sitemap :: Router () (YUISitemap :- ())
sitemap =
YUI_VERSION_STR </>
( rComboURL . "combo"
<> rCSSComboURL . "css"
<> rBundleURL . "bundle" </> rList (anyText . eos)
<> rConfigURL . "config"
<> rSeedURL
)
site :: Happstack m => Site YUISitemap (m Response)
site = boomerangSiteRouteT route sitemap
-- | Mounts a handler for serving YUI. You can use this if you're not
-- using @web-routes@ in your own application. See 'YUISitemap' for the
-- routes the mounted handler responds to.
implYUISite :: Happstack m
=> T.Text -- ^ The URL of your application, e.g. @\"http:\/\/localhost:8000\"@.
-> T.Text -- ^ The path under which to mount the YUI handler, e.g. @\"/yui\"@.
-> m Response
implYUISite domain approot = implSite domain approot site
mkConfig :: Happstack m => RouteT YUISitemap m JStat
mkConfig = do
comboURL <- WR.showURL ComboURL
return [jmacro|
YUI.applyConfig { comboBase: `((T.unpack comboURL) ++ "?")`, root: "" }
|]
-- | Routes a 'YUISitemap' to its handler. If you embed @YUISitemap@ in
-- your own sitemap, you can use 'WR.nestURL' in your own routing function
-- to dispatch to this one:
--
-- >import qualified Happstack.Server.YUI as Y
-- >route (YUI url) = nestURL YUI (Y.route url)
route :: Happstack m => YUISitemap -> RouteT YUISitemap m Response
route url = do
neverExpires
void compressedResponseFilter
case url of
BundleURL (map T.unpack -> paths) ->
do let name = intercalate "/" paths
exists <- liftIO $ isYUIFile name
guard exists
mime <- guessContentTypeM mimeTypes name
setHeaderM "Content-Type" mime
bytes <- liftIO $ readYUIFile name
ok . toResponse $ bytes
ComboURL ->
do qs <- liftM (map fst) lookPairs
exists <- liftIO $ mapM isYUIFile qs
if null qs || any not exists
then badRequest $ toResponse ()
else do mime <- guessContentTypeM mimeTypes $ head qs
setHeaderM "Content-Type" mime
bytes <- liftIO $ mapM readYUIFile qs
ok $ toResponse $ B.concat bytes
CSSComboURL ->
do qs <- liftM (map (css . fst)) lookPairs
exists <- liftIO $ mapM isYUIFile qs
if null qs || any not exists
then badRequest $ toResponse ()
else do setHeaderM "Content-Type" "text/css"
bytes <- liftIO $ mapM readYUIFile qs
ok $ toResponse $ B.concat bytes
ConfigURL ->
do config <- mkConfig
ok $ toResponse config
SeedURL ->
do config <- mkConfig
seed <- liftIO $ readYUIFile "yui/yui-min.js"
setHeaderM "Content-Type" "application/javascript"
ok $ toResponse $ seed `B.append` (encode . render) config
where
render = renderStyle (style { mode = OneLineMode }) . renderJs
encode = encodeUtf8 . T.pack
css fn = "css" ++ fn ++ "/css" ++ fn ++ "-min.css"
-- | Helper for building a URL to 'CSSComboURL'.
--
-- >do cssURL <- showCSSComboURL YUI ["reset", "base", "fonts", "grids"]
-- > unXMLGenT
-- > <html>
-- > <head>
-- > <link href=cssURL rel="stylesheet"/>
-- > </head>
-- > </html>
showCSSComboURL :: WR.MonadRoute m
=> (YUISitemap -> WR.URL m) -- ^ Constructor for 'YUISitemap' inside your own sitemap.
-> [T.Text] -- ^ Names of CSS modules to include, in order.
-> m T.Text
showCSSComboURL yui ms =
WR.showURLParams (yui CSSComboURL) [(m,Nothing) | m <- ms]
-- | Gets the class name for the grid unit of the ratio of the two argument
-- integers. YUI doesn't define redundant classes like \"6\/24\" because
-- that is the same as 1\/4 and presumably for sake of a smaller CSS file.
-- This helper function handles that for you, though:
--
-- >>> gridUnit 6 24
-- "yui3-u-1-4"
-- >>> gridUnit 24 24
-- "yui3-u-1"
--
-- The intention is for this function to be used in templates to create
-- values for class attributes, for example with HSP:
--
-- ><div class=(gridUnit 6 24)>
-- > <% someContent %>
-- ></div>
gridUnit :: Integer -> Integer -> T.Text
gridUnit n d
| num == 0 = "yui3-u"
| (num,den) == (1,1) = "yui3-u-1"
| otherwise = [qq|yui3-u-$num-$den|]
where
num = numerator $ n % d
den = denominator $ n % d
-- | Converts a pixel size to a percentage suitable for use
-- with the CSS fonts module:
--
-- >>> fontSize 16
-- "123.1%"
--
-- Useful in generated stylesheets, for example with HSP:
--
-- ><style>
-- > h1 { font-size: <% fontSize 26 %> }
-- ></style>
fontSize :: Integer -> T.Text
fontSize 10 = "77%"
fontSize 11 = "85%"
fontSize 12 = "93%"
fontSize 13 = "100%"
fontSize 14 = "108%"
fontSize 15 = "116%"
fontSize 16 = "123.1%"
fontSize 17 = "131%"
fontSize 18 = "138.5%"
fontSize 19 = "146.5%"
fontSize 20 = "153.9%"
fontSize 21 = "161.6%"
fontSize 22 = "167%"
fontSize 23 = "174%"
fontSize 24 = "182%"
fontSize 25 = "189%"
fontSize 26 = "197%"
fontSize px =
T.pack . printf "%.1f%%" $ percentage
where
percentage :: Double
percentage = fromIntegral px * (100 / 13)
-- | Creates a YUI Node object from XML created using HSP, for use with
-- JMacro. This generates less code than using the @hsx-jmacro@ package to
-- achieve the same effect, since it goes straight to YUI without directly
-- using the DOM itself. The first argument is the YUI object that gets
-- passed to the function you give to @YUI().use()@. Such variables are
-- available in antiquotation splices with JMacro:
--
-- @
--do html \<- unXMLGenT \<p>Hello, World!\</p>
-- ok [jmacro| YUI().use \"node\" \\y ->
-- y.one(\"body\").append(`(y ``createNode`` html`)) |]
-- @
createNode :: JExpr -> XML -> JExpr
createNode y xml = [jmacroE| `(y)`.Node.create(`(renderAsHTML xml)`) |]
| dag/happstack-yui | src/Happstack/Server/YUI.hs | bsd-2-clause | 9,852 | 0 | 17 | 2,485 | 1,741 | 982 | 759 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-
- Early parser for TAGs. Preliminary version.
-}
module NLP.LTAG.Early
(
-- * Traversal
Elem (..)
, Trav (..)
, treeTrav
, auxTrav
, toTree
-- * Grammar
, Grammar
-- * Chart state
, State (..)
, scan
, ignore
, subst
, adjoin
-- * Chart entry
, Entry
, final
, parsed
, showParsed
, printParsed
-- * Chart
, Chart
, lastEntry
, chartFinal
-- * Early
, early
) where
import Control.Applicative ((<$>), (<|>))
import Control.Monad (guard)
import Data.Maybe (mapMaybe)
import qualified Data.Set as S
import qualified Data.IntSet as IS
import qualified Data.IntMap as I
-- For parsing
import qualified Text.ParserCombinators.Poly.Plain as P
import qualified NLP.LTAG.Tree as G
-- import Debug.Trace (trace)
-- An alternative view of a TAG tree is a list of terminal or
-- non-terminal labels obtained with a traversal in which each
-- non-teminal occurs twice in the output -- once when the
-- subtree is entered, once when the traversal of the subtree is
-- done.
-- | Identifier of an elementary tree.
type ID = Int
--------------------------------------------------
-- TRAVERSAL ELEMENT
--------------------------------------------------
-- | A traversal element.
data Elem a b
= Leaf a
| Open a
| Close
| Term b
| Foot
deriving (Show, Eq, Ord)
-- | Extract terminal label.
mayTerm :: Elem a b -> Maybe b
mayTerm t = case t of
Term v -> Just v
_ -> Nothing
-- | Extract opening non-terminal label.
mayLeaf :: Elem a b -> Maybe a
mayLeaf t = case t of
Leaf x -> Just x
_ -> Nothing
-- | Extract opening non-terminal label.
mayOpen :: Elem a b -> Maybe a
mayOpen t = case t of
Open x -> Just x
_ -> Nothing
-- | Is it a non-terminal closing tag?
isClose :: Elem a b -> Bool
isClose t = case t of
Close -> True
_ -> False
-- | Is it an opening or closing tag-non-terminal?
isTag :: Elem a b -> Bool
isTag t = case t of
Open _ -> True
Close -> True
_ -> False
-- | Is it a foot?
isFoot :: Elem a b -> Bool
isFoot t = case t of
Foot -> True
_ -> False
--------------------------------------------------
-- TRAVERSAL
--------------------------------------------------
-- | A traversal of a tree.
type Trav a b = [Elem a b]
-- | Get traversal of the given tree.
treeTrav :: G.Tree a b -> Trav a b
treeTrav G.INode{..} = case subTrees of
[] -> [Leaf labelI]
_ ->
let xs = concatMap treeTrav subTrees
in Open labelI : xs ++ [Close]
treeTrav G.FNode{..} = [Term labelF]
-- | Get traversal of the given auxiliary tree.
auxTrav :: G.AuxTree a b -> Trav a b
auxTrav G.AuxTree{..} =
doit auxTree auxFoot
where
doit (G.INode labelI []) [] = [Open labelI, Foot, Close]
doit G.INode{..} (k:ks) =
let onChild i subTree = if k == i
then doit subTree ks
else treeTrav subTree
xs = concatMap (uncurry onChild) $ zip [0..] subTrees
in Open labelI : xs ++ [Close]
doit G.FNode{..} _ = [Term labelF]
doit _ _ = error "auxTrav: incorrect path"
-- | Make a tree from the traversal. The foot-node is
-- ignored for the moment.
toTree :: Trav a b -> G.Tree a b
toTree = doParse readTree
where
readTree = readNode <|> readTerm <|> readLeaf
readNode = do
x <- readOpen
ts <- readForest
readClose
return $ G.INode x ts
readOpen = mayNext mayOpen
readClose = P.satisfy isClose
readForest = P.many1 readTree
readTerm = do
t <- mayNext mayTerm
return $ G.FNode t
readLeaf = do
x <- mayNext mayLeaf
return $ G.INode x []
doParse p xs = case fst (P.runParser p xs) of
Left err -> error $ "toTree error: " ++ err
Right x -> x
mayNext may = do
let pred t = case may t of
Just _ -> True
_ -> False
t <- P.satisfy pred
return $ case may t of
Nothing -> error "toTree: impossible!"
Just x -> x
-- | Consume all subtrees and replace the closing non-terminal
-- with the given sequence.
replaceClose
:: Trav a b -- ^ The sequence to be placed
-> Trav a b -- ^ The sequence to be searched
-> Trav a b
replaceClose new =
go (0 :: Int)
where
go k (x:xs) = case x of
Open _ -> x : go (k + 1) xs
Close -> if k > 0
then x : go (k - 1) xs
else new ++ xs
_ -> x : go k xs
go _ [] = error "replaceClose: empty list"
-- | Does the tree (represented as a traversal) has a specific
-- label in the root?
hasInRoot :: Eq a => Trav a b -> a -> Bool
hasInRoot ts =
hasInRoot' $ reverse ts
where
hasInRoot' (t:_) x = case t of
Open y -> x == y
_ -> False
hasInRoot' _ _ = False
--------------------------------------------------
-- GRAMMAR
--------------------------------------------------
-- | A grammar is just a set of traversal representations of
-- initial and auxiliary trees. Additionally, to each elementary
-- tree an index is assigned so that it can be checked if the
-- given tree has been already used in a derivation of the given
-- state.
type Grammar a b = I.IntMap (Trav a b)
--------------------------------------------------
-- CHART STATE ...
--
-- ... and chart extending operations: scan,
-- ignore, complement (subst + adjoin)
--------------------------------------------------
-- | Parsing state: processed traversal elements and the elements
-- yet to process.
data State a b = State {
-- | The list of processed elements of the tree.
-- Stored in the inverse order.
left :: Trav a b
-- | The set of indices of the rules used in `left' + the
-- indice of the elementary tree itself. In other words, the
-- set of indices of all the trees used in the derivation of
-- this particular state.
, ids :: IS.IntSet
-- | The list of elements yet to process.
, right :: Trav a b
} deriving (Show, Eq, Ord)
-- | The scan operation: read a symbol from the input if it is
-- consistent with the non-terminal in the state.
scan
:: Eq b
=> b -- ^ Terminal to scan
-> State a b -- ^ Within the context of the state
-> Maybe (State a b) -- ^ Output state
scan x st@State{..} = do
(r, rs) <- decoList right
y <- mayTerm r
guard $ x == y
return $ st {left = r : left, right = rs}
-- | Ignore the internal non-terminal -- no adjunction will take
-- place.
ignore :: State a b -> Maybe (State a b)
ignore st@State{..} = do
(r, rs) <- decoList right
guard $ isTag r
return $ st {left = r:left, right = rs}
-- | Complete a leaf non-terminal with a parsed tree.
subst
:: Eq a
=> State a b -- ^ The parsed tree
-> State a b -- ^ A state to complement
-> Maybe (State a b)
subst fin tre = do
-- Are you sure it's parsed?
guard $ null $ right fin
(treHead, treRest) <- decoList $ right tre
x <- mayLeaf treHead
guard $ noOverlap (ids fin) (ids tre)
guard $ left fin `hasInRoot` x -- TODO: inefficient!
return $ State
{ left = left fin ++ left tre
, ids = IS.union (ids fin) (ids tre)
, right = treRest }
-- | Try to complete an internal non-terminal with a partially
-- parsed auxiliary tree. Check if the tree is partially parsed
-- indeed and remove the foot node.
adjoin
:: Eq a
=> State a b -- ^ Partially parsed auxiliary tree
-> State a b -- ^ Tree to complement (adjoin)
-> Maybe (State a b)
adjoin aux tre = do
-- Check if the first element of the axuliary traversal
-- is a foot-node and skip it.
(auxHead, auxRest) <- decoList $ right aux
guard $ isFoot auxHead
-- Take the root label of the auxiliary tree (x) and the
-- internal label (y) of the tree to complement and check if
-- they match.
x <- mayOpen . fst =<< decoList (left aux)
(treHead, treRest) <- decoList $ right tre
y <- mayOpen treHead
guard $ x == y
-- Do not compose trees which have overlaping set of indices.
guard $ noOverlap (ids aux) (ids tre)
-- Construct the final result.
return $ State
{ left = left aux ++ left tre
, ids = IS.union (ids aux) (ids tre)
, right = replaceClose auxRest treRest }
--------------------------------------------------
-- ENTRY
--
-- As well as entry elements
--------------------------------------------------
-- | A chart entry is a set of states together with information
-- about where their corresponding spans begin.
type Entry a b = S.Set (State a b, Int)
-- | A new state based on the traversal.
mkStatePos :: Int -> (ID, Trav a b) -> (State a b, Int)
mkStatePos k (i, t) = (,k) $ State
{ left=[]
, ids=IS.singleton i
, right=t }
-- | Is it a final state/pos pair?
final :: (State a b, Int) -> Bool
final (State{..}, i) = null right && i == 0
-- | The parsed part of the state.
parsed :: (State a b, c) -> Trav a b
parsed (State{..}, _) = left
-- | Show the parsed part of the given state.
showParsed :: (Show a, Show b) => (State a b, c) -> String
showParsed = G.showTree' . toTree . reverse . parsed
-- | Show and print.
printParsed :: (Show a, Show b) => (State a b, c) -> IO ()
printParsed = putStr . showParsed
--------------------------------------------------
-- CHART
--------------------------------------------------
-- | A chart is a map from indiced to chart entries.
type Chart a b = I.IntMap (Entry a b)
-- | X-th position of the chart.
(!?) :: Chart a b -> Int -> Entry a b
chart !? k = case I.lookup k chart of
Just e -> e
Nothing -> error $ "!?: no such index in the chart"
-- | Retrieve the last entry of the chart. Error if chart is
-- empty.
lastEntry :: Chart a b -> Entry a b
lastEntry ch = if I.null ch
then error "lastEntry: null chart"
else snd $ I.findMax ch
-- | Show the final results of the early parsing.
chartFinal :: Chart a b -> [(State a b, Int)]
chartFinal = filter final . S.toList . lastEntry
-- | Scan input w.r.t. all the states of the specific entry of
-- the chart. Once the Chart[i] is ready, we can run `scanAll`
-- on this chart just once to get the next chart entry, i.e.
-- Chart[i+1]. One has to remember to also add to Chart[i+1] all
-- the preliminary states (since we don't do prediction yet) at
-- some point.
scanAll
:: (Ord a, Ord b)
=> b -- ^ The next symbol on the input
-> Entry a b -- ^ Previous chart entry (Chart[i])
-> Entry a b -- ^ The scanned part of the entry Chart[i+1]
scanAll x curr =
let doit (st, k) = (,k) <$> scan x st
in S.fromList $ mapMaybe doit (S.toList curr)
-- | We update the current entry by `ignore'ing the non-terminal
-- internal nodes where possible. Note, that after performing
-- `ignoreAll' there may be new states in the entry which can be,
-- again, possibly ignored.
ignoreAll
:: (Ord a, Ord b)
=> Entry a b -- ^ The current chart entry
-> Entry a b
ignoreAll curr = S.union curr $
let doit (st, k) = (,k) <$> ignore st
in S.fromList $ mapMaybe doit $ S.toList curr
-- | We try to complete states from previous chart entries given
-- the final (fully parsed) states from the current entry. While
-- doing this we can obtain new final states and thus `substAll`
-- may be needed to run again.
substAll
:: (Ord a, Ord b)
=> Entry a b -- ^ The current chart entry Chart[i]
-> Chart a b -- ^ The chart with previous entries
-> Entry a b
substAll curr chart
= S.union curr $ S.fromList
$ concatMap doit $ S.toList curr
where
doit (st, i) =
-- We do substitution only with respect to completed
-- parse trees.
if null (right st) then
-- Substitution on some previous state from Chart[i]
-- which starts on position j does not change its
-- position.
let substOn (xs, j) = (,j) <$> subst st xs
-- Below we know, that <i> refers to some previous
-- entry and not the current state because each tree
-- spans over at least one non-terminal.
in mapMaybe substOn $ S.toList $ chart !? i
-- Otherwise: no new states.
else []
-- | We try to complete states from previous chart entries given
-- the partially parsed auxiliary trees from the current entry.
adjoinAll
:: (Ord a, Ord b)
=> Entry a b -- ^ The current chart entry Chart[i]
-> Chart a b -- ^ The chart with previous entries
-> Entry a b
adjoinAll curr chart
= S.union curr $ S.fromList $ concatMap doit
$ mapMaybe getRelevantAux $ S.toList curr
where
-- Check if the tree is relevant -- partially parsed (up to
-- the foot node) auxiliary tree.
getRelevantAux st@(State{..}, _) = do
(r, _) <- decoList right
guard $ isFoot r
return st
doit (aux, i) =
-- Adjoin on some previous state from Chart[i] which
-- starts on position j does not change its position.
let adjoinOn (st, j) = (,j) <$> adjoin aux st
-- TODO: this is kind of dangerous! We assume here
-- that <i> values are always correct and thus, if
-- they are not in the chart, they refer to the
-- current entry.
entry = case I.lookup i chart of
Just e -> e
Nothing -> curr
in mapMaybe adjoinOn $ S.toList $ entry
-- | Update (i.e. perform the ignore, subst and adjoin
-- operations) the current entry of the chart.
updateOnce
:: (Ord a, Ord b)
=> Entry a b -- ^ The current chart entry Chart[i]
-> Chart a b -- ^ The chart with previous entries
-> Entry a b
updateOnce curr chart
= flip adjoinAll chart
$ flip substAll chart
$ ignoreAll curr
-- | `Update' as long as the size of the current state grows.
updateLoop
:: (Ord a, Ord b)
=> Entry a b -- ^ The current chart entry Chart[i]
-> Chart a b -- ^ The chart with previous entries
-> Entry a b
updateLoop curr chart =
let n = S.size curr
next = updateOnce curr chart
m = S.size next
in if m > n
then updateLoop next chart
else next
-- | Perform early parsing.
early
:: (Ord a, Ord b)
=> Grammar a b -- ^ Grammar
-> [b] -- ^ Input
-> Chart a b
early gram sent = earlyStep gram sent 1 $ I.singleton 0 $
let new = S.fromList $ map (mkStatePos 0) $ I.toList gram
in updateLoop new I.empty
-- | Early parsing step.
earlyStep
:: (Ord a, Ord b)
=> Grammar a b -- ^ Grammar
-> [b] -- ^ Input still to process
-> Int -- ^ Current position
-> Chart a b -- ^ Previous entries
-> Chart a b -- ^ With new entry
earlyStep gram (x:xs) k chart =
earlyStep gram xs (k+1) $ I.insert k entry chart
where
entry = updateLoop (new `S.union` scanned) chart
new = S.fromList $ map (mkStatePos k) $ I.toList gram
scanned = scanAll x (chart !? (k-1))
earlyStep _ [] _ chart = chart
--------------------------------------------------
-- UTILITIES
--------------------------------------------------
-- | Deconstruct list. Utility function.
decoList :: [a] -> Maybe (a, [a])
decoList [] = Nothing
decoList (y:ys) = Just (y, ys)
-- | Is there no overlap between to IntSets?
noOverlap :: IS.IntSet -> IS.IntSet -> Bool
noOverlap x y = IS.null $ IS.intersection x y
| kawu/ltag | src/NLP/LTAG/Early.hs | bsd-2-clause | 15,632 | 0 | 15 | 4,420 | 3,936 | 2,078 | 1,858 | 304 | 5 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Lang.Php.Ast.Common
(
module Data.Binary
, module Data.Char
, module Data.Data
, module Data.List
, module Data.Maybe
, module Common
, module Parse
, module Unparse
, module Lang.Php.Ast.WS
) where
import Data.Binary
import Data.Binary.Generic
import Data.Char
import Data.Data hiding (Infix, Prefix)
import Data.List
import Data.Maybe
import Common
import Parse
import Unparse
import Lang.Php.Ast.WS
instance (Data a) => Binary a where
get = getGeneric
put = putGeneric
| facebookarchive/lex-pass | src/Lang/Php/Ast/Common.hs | bsd-3-clause | 607 | 0 | 6 | 130 | 145 | 95 | 50 | 26 | 0 |
module Data.GraphQL.XXX.Schema.Info
( analyzeSchemaInfo
, getAnalyzedStatements
, SchemaInfo(..)
, emptySchemaInfo
, lookupField
) where
import Control.Monad
import Control.Monad.State
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import Debug.Trace
import Data.GraphQL.XXX.Schema.AST
data Ctx = Ctx
{ schemaInfo :: SchemaInfo
, statement :: Maybe Statement
, field :: Maybe Field
, argument :: Maybe Argument
}
emptyCtx :: Ctx
emptyCtx = Ctx emptySchemaInfo Nothing Nothing Nothing
getSchemaInfo :: State Ctx SchemaInfo
getSchemaInfo =
schemaInfo <$> get
putSchemaInfo :: SchemaInfo -> State Ctx ()
putSchemaInfo ai = do
ctx <- get
put ctx
{ schemaInfo = ai
}
setStatement :: Statement -> State Ctx ()
setStatement g = do
ctx <- get
put ctx { statement = Just g }
resetStatement :: State Ctx ()
resetStatement = do
ctx <- get
put ctx { statement = Nothing }
setField :: Field -> State Ctx ()
setField f = do
ctx <- get
put ctx { field = Just f }
resetField :: State Ctx ()
resetField = do
ctx <- get
put ctx { field = Nothing }
setArgument :: Argument -> State Ctx ()
setArgument a = do
ctx <- get
put ctx { argument = Just a }
resetArgument :: State Ctx ()
resetArgument = do
ctx <- get
put ctx { argument = Nothing }
data SchemaInfo = SchemaInfo
{ getOriginalStatements :: [Statement]
, getStatements :: Set.Set Statement
, getEnumNames :: Set.Set String
, getFieldNames :: Set.Set String
, getTypeNames :: Set.Set String
, getEnums :: Set.Set Statement
, getInputs :: Set.Set Statement
, getInterfaces :: Set.Set Statement
, getObjects :: Set.Set Statement
, getScalars :: Set.Set Statement
, getUnions :: Set.Set Statement
, getRelations :: [(TypeName, TypeName)]
, getReferences :: [(TypeName, TypeName)]
, getScalarMap :: [(TypeName, String)]
} deriving (Show)
emptySchemaInfo :: SchemaInfo
emptySchemaInfo =
SchemaInfo
[]
Set.empty
Set.empty Set.empty Set.empty
Set.empty Set.empty Set.empty Set.empty Set.empty Set.empty
[] []
[]
analyzeSchemaInfo :: SchemaInfo -> SchemaInfo
analyzeSchemaInfo info =
schemaInfo $
execState f $
emptyCtx
{ schemaInfo = info
}
where
f = do
resetStatements
analyzeStatements
resetStatements
analyzeInterfaces
analyzeRelations
analyzeObjects
getAnalyzedStatements :: SchemaInfo -> [Statement]
getAnalyzedStatements ai =
map stmt $ getOriginalStatements ai
where
stmt g@EnumDefinition{} =
lookupEnum_ ai g
stmt g@InputDefinition{} =
lookupInput_ ai g
stmt g@InterfaceDefinition{} =
lookupInterface_ ai g
stmt g@ObjectDefinition{} =
lookupObject_ ai g
stmt g@ScalarDefinition{} =
lookupScalar_ ai g
stmt g = g
{------------------------------------------------------------------------------}
analyzeStatements :: State Ctx ()
analyzeStatements = do
ai <- getSchemaInfo
mapM_ (iterateStatement analyzeType) (getOriginalStatements ai)
resetStatements :: State Ctx ()
resetStatements = do
ai <- getSchemaInfo
putSchemaInfo emptySchemaInfo
{ getOriginalStatements = getOriginalStatements ai
, getScalarMap = getScalarMap ai
}
mapM_ (iterateStatement reset) (getOriginalStatements ai)
iterateStatement :: (Statement -> State Ctx ()) -> Statement -> State Ctx ()
iterateStatement f g = do
ai <- getSchemaInfo
unless (Set.member (getName g) (getTypeNames ai)) $ do
ai <- getSchemaInfo
putSchemaInfo ai
{ getStatements = Set.insert g (getStatements ai)
, getTypeNames = Set.insert (getName g) (getTypeNames ai)
}
f g
{------------------------------------------------------------------------------}
class Analyzable a where
reset :: a -> State Ctx ()
analyzeType :: a -> State Ctx ()
analyzeInterface :: a -> State Ctx ()
analyzeObject :: SchemaInfo -> a -> State Ctx a
instance Analyzable Statement where
{- reset
-}
reset g@(EnumDefinition t es) = do
ai <- getSchemaInfo
putSchemaInfo ai { getEnums = Set.insert g (getEnums ai) }
reset g@(InputDefinition t fs) = do
ai <- getSchemaInfo
putSchemaInfo ai { getInputs = Set.insert g (getInputs ai) }
reset g@(InterfaceDefinition t _ fs) = do
ai <- getSchemaInfo
putSchemaInfo ai { getInterfaces = Set.insert g (getInterfaces ai) }
reset g@(ObjectDefinition t i fs) = do
ai <- getSchemaInfo
putSchemaInfo ai { getObjects = Set.insert g (getObjects ai) }
reset g@(ScalarDefinition t) = do
ai <- getSchemaInfo
putSchemaInfo ai { getScalars = Set.insert g (getScalars ai) }
reset g@(UnionDefinition t ns) = do
ai <- getSchemaInfo
putSchemaInfo ai { getUnions = Set.insert g (getUnions ai) }
{- analyzeType
-}
analyzeType g@(EnumDefinition t es) =
mapM_ analyzeType es
analyzeType g@(InputDefinition t fs) =
mapM_ analyzeType fs
analyzeType g@(InterfaceDefinition t _ fs) =
mapM_ analyzeType fs
analyzeType g@(ObjectDefinition t i fs) =
mapM_ analyzeType fs
analyzeType g@(ScalarDefinition t) =
return () -- XXX
analyzeType g@(UnionDefinition t ns) =
mapM_ analyzeType ns
{- analyzeInterface
-}
analyzeInterface g@(InterfaceDefinition t _ fs) = do
ai <- getSchemaInfo
let
g' = analyzeInterface' ai g
putSchemaInfo ai { getInterfaces = Set.insert g' (getInterfaces ai) }
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai g@(InputDefinition t fs) = do
setStatement g
g' <- InputDefinition t <$>
mapM (analyzeObject ai) fs
resetStatement
return g'
analyzeObject ai g@(InterfaceDefinition t ts fs) = do
setStatement g
g' <- InterfaceDefinition t ts <$>
mapM (analyzeObject ai) fs
resetStatement
return g'
analyzeObject ai g@(ObjectDefinition t i fs) = do
setStatement g
g' <- ObjectDefinition t i <$>
mapM (analyzeObject ai) fs
resetStatement
return g'
analyzeObject ai g =
pure g
instance Analyzable Argument where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType (Argument f t b) =
analyzeType f
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai a@(Argument an at ann) =
pure $
Argument an (resetType ai at) ann
-- Argument an (resetType ai (trace ("resetType: "++show at++" -> "++show (resetType ai at)) at)) ann
instance Analyzable Field where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType (Field f as t b _) = do
mapM_ analyzeType as
analyzeType f
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai f@(Field fn fargs ft fnn _) = do
ctx <- get
fargs' <- mapM (analyzeObject ai) fargs
let ft' = resetType ai ft
let frel' = setRelation ai ft' (statement ctx)
pure $
Field fn fargs' ft' fnn frel'
instance Analyzable InputField where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType (InputField ifn _ _) =
analyzeType ifn
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai f@(InputField ifn ift ifnn) = do
let ift' = resetType ai ift
pure $
InputField ifn ift' ifnn
instance Analyzable EnumName where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType e = do
ai <- getSchemaInfo
putSchemaInfo ai { getEnumNames = Set.insert (getName e) (getEnumNames ai) }
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai =
pure
instance Analyzable FieldName where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType f = do
ai <- getSchemaInfo
putSchemaInfo ai { getFieldNames = Set.insert (getName f) (getFieldNames ai) }
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai =
pure
instance Analyzable TypeName where
{- reset
-}
reset _ =
pure ()
{- analyzeType
-}
analyzeType t = do
ai <- getSchemaInfo
putSchemaInfo ai { getTypeNames = Set.insert (getName t) (getTypeNames ai) }
{- analyzeInterface
-}
analyzeInterface _ =
pure ()
{- analyzeObject
-}
analyzeObject ai =
pure
{------------------------------------------------------------------------------}
analyzeInterfaces :: State Ctx ()
analyzeInterfaces = do
ai <- getSchemaInfo
mapM_ analyzeInterface (getOriginalStatements ai)
analyzeInterface' ai g@(InterfaceDefinition t _ fs) =
InterfaceDefinition t (interfaceTypes ai t) (collectFields g ai)
where
collectFields :: Statement -> SchemaInfo -> [Field]
collectFields i@(InterfaceDefinition t _ fs) ai =
mergeFields fs $
map filterFields $
filter (isInterfaceOf i) (getOriginalStatements ai)
filterFields :: Statement -> [Field]
filterFields (ObjectDefinition xt (Just xi) xfs) = xfs
mergeFields :: [Field] -> [[Field]] -> [Field]
mergeFields fs moreFields = fs ++ concatMap (diffFields fs) moreFields
-- XXX No check
diffFields :: [Field] -> [Field] -> [Field]
diffFields o = drop (length o)
interfaceTypes :: SchemaInfo -> TypeName -> [TypeName]
interfaceTypes ai name =
map (TypeName . getName) $
filter (isInterfaceOf (InterfaceDefinition name [] [])) $
getOriginalStatements ai
{------------------------------------------------------------------------------}
analyzeObjects :: State Ctx ()
analyzeObjects = do
ai <- getSchemaInfo
os <- mapM (analyzeObject ai) (Set.toList (getObjects ai))
is <- mapM (analyzeObject ai) (Set.toList (getInterfaces ai))
putSchemaInfo ai
{ getObjects = Set.fromList os
, getInterfaces = Set.fromList is
}
return ()
resetType :: SchemaInfo -> Type -> Type
resetType ai (List t@Object{}) =
List (resetType ai t)
resetType ai t@(Object x _ b)
| isJust (lookupEnum ai x) =
Enum x
| isJust (lookupScalar ai x) =
Scalar
x
(fromMaybe "string" (lookupScalarMap ai x)) -- XXX Scalar
| otherwise =
Object
x
(fromMaybe x (lookupInterfaceByObject ai x))
b
resetType ai t =
t
setRelation :: SchemaInfo -> Type -> Maybe Statement -> Maybe Relation
setRelation ai (List (Object u _ _)) mg =
Just $ Relation
(lookupRelationForward ai u)
(do
g <- mg
lookupRelation ai u (TypeName $ getName g))
setRelation _ _ _ =
Nothing
{------------------------------------------------------------------------------}
{-
When:
type Order {
bike Bike
}
type Bike {
orders [Order]
}
- Then there is ai relation of (Order -> Bike).
- Order's table (datastore) records ai pointer (ID string) to Bike.
- Bike's orders are looked up by matching ai Bike ID string against
the Order table's bike value.
-}
analyzeRelations :: State Ctx ()
analyzeRelations = do
ai <- getSchemaInfo
let
gs = (Set.toList . getObjects) ai
rs = collectRelations ai gs
refs = collectReferences ai gs
putSchemaInfo ai
{ getRelations = rs
, getReferences = refs
}
collectRelations :: SchemaInfo -> [Statement] -> [(TypeName, TypeName)]
collectRelations ai gs =
[ (z ga, z gb)
| ga <- gs
, gb <- gs
, isRelatedForward ga gb
, isRelatedBackward ga gb
, isJust (lookupObject ai (z ga))
, isJust (lookupObject ai (z gb))
]
where
z = TypeName . getName
collectReferences :: SchemaInfo -> [Statement] -> [(TypeName, TypeName)]
collectReferences ai gs =
[ (z ga, z gb)
| ga <- gs
, gb <- gs
, isRelatedForward ga gb
, not (isRelatedBackward ga gb)
, isJust (lookupObject ai (z ga))
, isJust (lookupObject ai (z gb))
, getName ga /= "Query"
, getName ga /= "Mutation"
]
where
z = TypeName . getName
-- Order has `bike Bike'
isRelatedForward :: Statement -> Statement -> Bool
isRelatedForward (ObjectDefinition _ _ fsa) (ObjectDefinition tb _ _) =
(not . null) $ filter p fsa
where
p (Field _ _ (Object ua _ _) _ _) =
getName tb == getName ua
p _ =
False
-- Bike has `orders [Order]'
isRelatedBackward :: Statement -> Statement -> Bool
isRelatedBackward (ObjectDefinition ta _ _) (ObjectDefinition _ _ fsb) =
(not . null) $ filter p fsb
where
p (Field _ _ (List (Object ub _ _)) _ _) =
getName ta == getName ub
p _ =
False
{------------------------------------------------------------------------------}
lookupInput :: SchemaInfo -> TypeName -> Maybe Statement
lookupInput ai t = do
idx <- Set.lookupIndex (InputDefinition t []) (getInputs ai)
return $ Set.elemAt idx (getInputs ai)
lookupInput_ ai g =
fromMaybe g (lookupInput ai (TypeName $ getName g))
lookupObject :: SchemaInfo -> TypeName -> Maybe Statement
lookupObject ai t = do
idx <- Set.lookupIndex (ObjectDefinition t Nothing []) (getObjects ai)
return $ Set.elemAt idx (getObjects ai)
lookupObject_ ai g =
fromMaybe g (lookupObject ai (TypeName $ getName g))
lookupInterface :: SchemaInfo -> TypeName -> Maybe Statement
lookupInterface ai t = do
idx <- Set.lookupIndex (InterfaceDefinition t [] []) (getInterfaces ai)
return $ Set.elemAt idx (getInterfaces ai)
lookupInterface_ ai g =
fromMaybe g (lookupInterface ai (TypeName $ getName g))
lookupInterfaceByObject :: SchemaInfo -> TypeName -> Maybe TypeName
lookupInterfaceByObject ai t = do
(ObjectDefinition _ i _) <- lookupObject ai t
i
lookupEnum :: SchemaInfo -> TypeName -> Maybe Statement
lookupEnum ai t = do
idx <- Set.lookupIndex (EnumDefinition t []) (getEnums ai)
return $ Set.elemAt idx (getEnums ai)
lookupEnum_ ai g =
fromMaybe g (lookupEnum ai (TypeName $ getName g))
lookupScalar :: SchemaInfo -> TypeName -> Maybe Statement
lookupScalar ai t = do
idx <- Set.lookupIndex (ScalarDefinition t) (getScalars ai)
return $ Set.elemAt idx (getScalars ai)
lookupScalar_ ai g =
fromMaybe g (lookupScalar ai (TypeName $ getName g))
lookupScalarMap :: SchemaInfo -> TypeName -> Maybe String
lookupScalarMap ai x =
Map.lookup x $
Map.fromList (getScalarMap ai) -- XXX Scalar
lookupRelationForward :: SchemaInfo -> TypeName -> Maybe TypeName
lookupRelationForward ai i =
let
is = interfaceTypes ai i
ts =
[ snd r
| i <- if null is then [i] else is
, r <- getRelations ai
, i == fst r
]
in
if null ts
then Nothing
else Just $ head ts
lookupRelation :: SchemaInfo -> TypeName -> TypeName -> Maybe TypeName
lookupRelation ai u i =
let
is = interfaceTypes ai i
ts =
[ snd r
| i <- if null is then [i] else is
, r <- getRelations ai
, u == fst r
, i == snd r
]
in
if null ts
then Nothing
else Just $ head ts
lookupReference :: SchemaInfo -> TypeName -> Maybe TypeName
lookupReference ai i =
let
is = interfaceTypes ai i
ts =
[ snd r
| i <- if null is then [i] else is
, r <- getReferences ai
, i == fst r
]
in
if null ts
then Nothing
else Just $ head ts
lookupField :: SchemaInfo -> TypeName -> FieldName -> Maybe Field
lookupField ai tn fn = do
(ObjectDefinition _ i fs) <- lookupObject ai tn
idx <- Set.lookupIndex
(Field fn [] Int False Nothing)
(Set.fromList fs)
Just (Set.elemAt idx (Set.fromList fs))
| uebayasi/haskell-graphql-schema | src/Data/GraphQL/XXX/Schema/Info.hs | bsd-3-clause | 17,140 | 0 | 15 | 5,395 | 5,191 | 2,583 | 2,608 | 438 | 6 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE NamedFieldPuns #-}
-- | This TextureAtlas structure is responsible for the packing of small
-- regions into a bigger texture. It is based on the skyline bottom left
-- algorith which appears to be well suited for storing glyps.
module Graphics.UI.Font.TextureAtlas
( TextureAtlas (..)
, newAtlas
, deleteTexture
, uploadTexture
, setRegion
, merge
, getRegion
, clear
)
where
import Control.Monad (when)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.List as List
import Graphics.Rendering.OpenGL.GL (($=))
import qualified Graphics.Rendering.OpenGL.GL as GL
import Graphics.UI.Font.Common
import Graphics.UI.Font.Types
specialsDim :: Int
specialsDim = 4
-- | Create a new atlas
newAtlas ::
Int -- ^ Width of the atlas texture
-> Int -- ^ Height of the atlas texture
-> Int -- ^ Color depth, should be 1 or 3
-> TextureAtlas
newAtlas
width
height
depth
| depth == 1 || depth == 3 = atlas3
where
node = Node 0 0 width
atlas = TextureAtlas
{ atlasNodes = [node]
, atlasUsed = 0
, atlasWidth = width
, atlasHeight = height
, atlasDepth = depth
, atlasTexId = GL.TextureObject (-1)
, atlasImgData = B.replicate (width * height * depth) 0
, atlasBlack = undefined
}
-- This is a special region that is used for background and underlined
-- decorations of glyps
buffer = B.replicate (specialsDim^(2::Int)) 255
(atlas',r@(Region x y w h)) =
getRegion atlas specialsDim specialsDim
atlas'' = setRegion atlas' r buffer specialsDim
atlas3 = atlas'' {atlasBlack = Region (x+1) (y+1) (w-2) (h-2)}
newAtlas _ _ depth
= error $ "Invalid depth, should be 1 or 3, but is: " ++ show depth
-- | Delete the GL Texture reference
deleteTexture ::
TextureAtlas
-> IO ()
deleteTexture (TextureAtlas{atlasTexId}) = do
isNotDeleted <- GL.isObjectName atlasTexId
when isNotDeleted $
GL.deleteObjectNames [atlasTexId]
-- | Create or refresh OpenGL text reference to the bitmap information
-- stored in the atlas
uploadTexture ::
TextureAtlas
-> IO TextureAtlas
uploadTexture atlas@(TextureAtlas{atlasTexId,atlasImgData,atlasWidth,atlasHeight,atlasDepth}) = do
texIdDefined <- GL.isObjectName $ atlasTexId
(texId,atlas') <- if texIdDefined
then return (atlasTexId, atlas)
else do
[texId] <- GL.genObjectNames 1
return (texId,atlas {atlasTexId = texId})
B.useAsCString atlasImgData $ \ptr -> do
GL.texture GL.Texture2D $= GL.Enabled
GL.textureBinding GL.Texture2D $= Just texId
case atlasDepth of
1 -> GL.texImage2D
Nothing
GL.NoProxy
0
GL.Alpha'
(GL.TextureSize2D
(gsizei atlasWidth)
(gsizei atlasHeight)
)
0
(GL.PixelData GL.Alpha GL.UnsignedByte ptr)
_ -> GL.texImage2D
Nothing
GL.NoProxy
0
GL.RGB'
(GL.TextureSize2D
(gsizei atlasWidth)
(gsizei atlasHeight)
)
0
(GL.PixelData GL.RGB GL.UnsignedByte ptr)
GL.textureFilter GL.Texture2D $= ((GL.Linear', Nothing), GL.Linear')
GL.textureWrapMode GL.Texture2D GL.S $= (GL.Repeated, GL.ClampToEdge)
GL.textureWrapMode GL.Texture2D GL.T $= (GL.Repeated, GL.ClampToEdge)
GL.textureBinding GL.Texture2D $= Nothing
GL.texture GL.Texture2D $= GL.Disabled
return atlas'
setRegion ::
TextureAtlas
-> Region
-> ByteString -> Int
-> TextureAtlas
setRegion atlas@(TextureAtlas{atlasWidth,atlasHeight,atlasDepth,atlasImgData}) (Region x y width height) buf stride
| x < atlasWidth
, (x + width) <= atlasWidth
, y < atlasHeight
, (y + height) <= atlasHeight
= atlas'
where
imgData = splitRows atlasImgData (atlasWidth * atlasDepth)
buf' = splitRows buf (stride * atlasDepth)
(pre,rep) = splitAt y imgData
(rep',post) = splitAt height rep
rep'' = zipWith (replace x width) rep' buf'
atlas' = atlas {atlasImgData = B.concat $ pre ++ rep'' ++ post}
setRegion _ _ _ _ = error $ "Region does not fit"
splitRows ::
ByteString
-> Int
-> [ByteString]
splitRows b w
| not (B.null b)
= let (pre,post) = B.splitAt w b
in pre:(splitRows post w)
| otherwise
= []
replace ::
Int -> Int
-> ByteString
-> ByteString
-> ByteString
replace x w b1 b2 = B.concat [pre,b2,post]
where
(pre,rep) = B.splitAt x b1
(_,post) = B.splitAt w rep
-- | Decide, given an atlas, a node index, and the with and height of the
-- rectangle you want to fit, will fit at that node index. A rectangle fits
-- if, nodeX + width < atlaswidth, and max { nodeY all nodes covered by
-- nodeX + width } + height < atlasheight. If it fits, the bottem-left
-- rectangle coord is returned.
atlasFit ::
TextureAtlas
-> Int -> Int
-> Node
-> Int
-> Maybe Int
atlasFit TextureAtlas{atlasWidth,atlasHeight,atlasNodes} width height (Node x y _) index = fit
where
fit = if (x + width) > atlasWidth || yMax + height > atlasHeight
then Nothing
else Just yMax
-- Get node with highest y, in x range [nodeX .. nodeX+width]
(_,yMax,_) =
until (\(n,_,_) -> n <= 0)
(\(width',yMax',((Node _ y' w'):ns)) -> (width' - w', max yMax' y', ns))
(width, y,drop index atlasNodes)
merge ::
TextureAtlas
-> TextureAtlas
merge atlas = atlas {atlasNodes = nodes'}
where
nodes = atlasNodes atlas
nodes' = mergeNodes nodes
mergeNodes [] = []
mergeNodes [x] = [x]
mergeNodes (x:y:xs) = let
(Node x1 y1 w1) = x
(Node _ y2 w2) = y
in
if (y1 == y2)
then mergeNodes ((Node x1 y1 (w1+w2)):xs)
else x : mergeNodes (y:xs)
getRegion ::
TextureAtlas
-> Int -> Int
-> (TextureAtlas,Region)
getRegion atlas@(TextureAtlas {atlasUsed,atlasNodes}) width height = (atlas',region)
where
potentialFits = zipWith (atlasFit atlas width height) atlasNodes [0..]
(bestIndex,bestFit,maxYM) = head $
List.sortBy betterFit (zip3 [0..] atlasNodes potentialFits)
region = case (bestFit,maxYM) of
(_,Nothing) -> Region (-1) (-1) 0 0
((Node x _ _), Just maxY) -> Region x maxY width height
atlas' = case (bestFit,maxYM) of
(_,Nothing) -> atlas
((Node x _ _), Just maxY) -> let
newNode = Node x (maxY+height) width
(pre,post) = List.splitAt bestIndex atlasNodes
post' = shrink newNode post
in
atlas { atlasNodes = pre ++ (newNode:post')
, atlasUsed = atlasUsed + width * height
}
betterFit ::
(Int,Node,Maybe Int)
-> (Int,Node,Maybe Int)
-> Ordering
betterFit (_,_,Nothing) (_,_,Nothing) = EQ
betterFit (_,_,Nothing) (_,_,Just _ ) = GT
betterFit (_,_,Just _ ) (_,_,Nothing) = LT
betterFit (_,Node _ _ w1,Just y1)
(_,Node _ _ w2,Just y2) = compare (y1,w1) (y2,w2)
-- | Shrink '(n:ns)' to fit 'np'
shrink ::
Node
-> [Node]
-> [Node]
shrink np@(Node xP _ wP) ((n@(Node x y w)):ns)
| x >= xP + wP = n:ns
| w' <= 0 = shrink np ns
| otherwise = (Node x' y w'):ns
where
s = xP + wP - x
x' = x + s
w' = w - s
shrink _ [] = []
clear ::
TextureAtlas
-> TextureAtlas
clear atlas@(TextureAtlas{atlasWidth,atlasHeight,atlasDepth}) = atlas4
where
node = Node 0 0 atlasWidth
atlas' = atlas
{ atlasNodes = [node]
, atlasUsed = 0
, atlasImgData = B.replicate (atlasWidth * atlasHeight * atlasDepth) 0
}
(atlas'',r@(Region x y w h)) =
getRegion atlas' specialsDim specialsDim
buffer = B.replicate (specialsDim^(2 :: Int)) 255
atlas3 = setRegion atlas'' r buffer 1
atlas4 = atlas3 {atlasBlack = Region (x+1) (y+1) (w-2) (h-2)}
| christiaanb/glfont | src/Graphics/UI/Font/TextureAtlas.hs | bsd-3-clause | 8,342 | 0 | 18 | 2,528 | 2,679 | 1,448 | 1,231 | 213 | 4 |
-- Compiler Toolkit: pretty-printer combinators
--
-- Author : Manuel M. T. Chakravarty
-- Created: 16 February 95
--
-- Copyright (c) [1995..2000] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides combinators for pretty-printing, following the ideas
-- in ``Pretty-printing: An Exercise in Functional Programming (DRAFT)'' by
-- John Hughes. Subsequently, partially brought in line with Simon Peyton
-- Jones' version of John Hughes' combinators.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * In a revision of the module, the names of the exported functions where
-- brought in line with SimonPJs variant. The old names are still exported
-- as to maintain compatibility to older code. They will disappear
-- somewhere down the road.
--
-- * The type of `fullRender' is different from the one in SimonPJ's variant.
--
-- * `toDoc' is not supported by SimonPJ's variant.
--
-- * The combinators `($+$)', `fcat', and `fsep' are not supported.
--
--- TODO ----------------------------------------------------------------------
--
-- * currently `$|$' imposes a n^2 cost when building a text from top to
-- bottom
--
module Text.CTK.Pretty (
Doc, -- instance Show
empty, isEmpty, char, text, nest, ($$), (<>), cat, sep, fullRender,
--
-- derived combinators
--
semi, comma, colon, dot, space, equals, lparen, rparen, lbrack, rbrack,
lbrace, rbrace, toDoc, int, integer, float, double, rational, parens,
brackets, braces, quotes, doubleQuotes, (<+>), hcat, hsep, vcat, hang,
punctuate, render,
--
-- pretty-printing type class & precedences
--
Pretty(pretty, prettyPrec), usedWhen, Assoc(..), infixOp,
--
-- the following routines are part of the legacy interface that should not
-- be used anymore - it will disappear in due course
--
textDoc, nestDoc, ($|$), (<^>), sepDocs, bestDoc,
--
-- *** for debugging ONLY ***
--
dumpDoc
) where
infixl 6 <>, <+> -- vertical composition
infixl 5 $$ -- horizontal composition
-- default parameters
-- ------------------
dftWidth :: Int
dftWidth = 79
dftRibbonRatio :: Float
dftRibbonRatio = 1.5
-- representation of documents
-- ---------------------------
-- a document is a compact representation (tree shaped) of a set of layouts
-- for a given text (EXPORTED ABSTRACTLY)
--
data Doc = Nest Int [DocAlt] -- set of layouts, indented as given
data DocAlt = Text String -- one row
| TextAbove String Doc -- row of text above the remaining doc
-- render with defaults
--
instance Show Doc where
showsPrec _ = showString . render
-- empty document (EXPORTED)
--
empty :: Doc
empty = Nest 0 []
-- test for emptiness (EXPORTED)
--
isEmpty :: Doc -> Bool
isEmpty (Nest _ []) = True
isEmpty _ = False
-- single character (EXPORTED)
--
char :: Char -> Doc
char c = text [c]
-- single line of text (EXPORTED)
--
text :: String -> Doc
text s = Nest 0 [Text s]
-- increase nesting of given document (EXPORTED)
--
nest :: Int -> Doc -> Doc
nest k (Nest m alts) = Nest (k + m) alts
-- vertical composition of documents (EXPORTED)
--
($$) :: Doc -> Doc -> Doc
(Nest _ [] ) $$ doc = doc
(Nest m alts) $$ doc = Nest m [below a | a <- alts]
where
below :: DocAlt -> DocAlt
below (Text s) = let
doc' = nestDoc (-m) doc
in
TextAbove s doc'
below (TextAbove s rest) = let
doc' = rest
$$
nestDoc (-m) doc
in
TextAbove s doc'
-- horizontal composition of documents (EXPORTED)
--
(<>) :: Doc -> Doc -> Doc
(Nest _ [] ) <> doc = doc
doc <> (Nest _ [] ) = doc
(Nest m alts) <> doc = Nest m (concat [nextTo a | a <- alts])
where
nextTo :: DocAlt -> [DocAlt]
nextTo (Text s) = let
Nest _ bs = doc
in
[s `inFrontOf` b | b <- bs]
nextTo (TextAbove s rest) = [TextAbove s (rest <> doc)]
inFrontOf :: String -> DocAlt -> DocAlt
s `inFrontOf` (Text t) = Text (s ++ t)
s `inFrontOf` (TextAbove t doc') = let
l = length s
in
TextAbove (s ++ t)
(nestDoc l doc')
-- given a list of sub-documents, generate a composite document where the
-- sub-documents are placed next to each other (EXPORTED)
--
-- * when generating a layout a horizontal layout is only chosen
-- when the given collection of sub-documents fits on a single line
--
cat :: [Doc] -> Doc
cat docs = catsep (<>) docs
-- given a list of sub-documents, generate a composite document where the
-- sub-documents are placed next to each other with some seperation between
-- each of them (EXPORTED)
--
-- * when generating a layout a horizontal layout is only chosen
-- when the given collection of sub-documents fits on a single line
--
sep :: [Doc] -> Doc
sep docs = catsep (<+>) docs
-- generalise `cat' and `sep'
--
catsep :: (Doc -> Doc -> Doc) -> [Doc] -> Doc
catsep _ [] = textDoc ""
catsep hcomb docs = fitunion (foldr hcomb empty docs)
(foldr ($$) empty docs)
where
--
-- given two documents, where the first one is a horizontal
-- composition, we only choose a single line alternative (if at
-- all present) from the first document
--
fitunion :: Doc -> Doc -> Doc
fitunion (Nest m (Text s : _)) (Nest _ alts) = Nest m (Text s : alts)
fitunion _ doc = doc
-- select the best layout from a document and return it in string form
-- (EXPORTED)
--
-- * given are the overall width and the ribbon ration, ie, the number of
-- times the ribbon fits into a line (the ribbon is the number of
-- characters on a line excluding leading and trailing white spaces)
--
fullRender :: Int -> Float -> Doc -> String
fullRender width ribbonRatio =
let
ribbon = round (fromIntegral width / ribbonRatio)
in
dropWhile (== '\n') . nestbest 0 width ribbon
where
--
-- like `best', but with explicit nesting
--
nestbest :: Int -> Int -> Int -> Doc -> String
nestbest k w r (Nest _ [] ) = ""
nestbest k w r (Nest m alts) =
case foldr1 (choose (w - m) r) alts
of
Text s -> indent (k + m) s
TextAbove s bs -> indent (k + m) s
++ nestbest (k + m) (w - m) r bs
--
-- indent the given string by the given amount
--
indent :: Int -> String -> String
indent k s = "\n" ++ copy k ' ' ++ s
where
copy :: Int -> a -> [a]
copy n = take n . repeat
-- given the remaining width and ribbon together with two possible
-- documents, choose the first one if its first line is nice; otherwise,
-- take the second
--
choose :: Int -> Int -> DocAlt -> DocAlt -> DocAlt
choose w r alts1 alts2 = if (nice w r (firstline alts1))
then alts1
else alts2
where
firstline (Text s) = s
firstline (TextAbove s _) = s
-- given remaining width and ribbon width decide whether a line
-- is nice or not
--
nice :: Int -> Int -> String -> Bool
nice w r s = (l <= w) && (l <= r)
where
l = length s
-- derived combinators
-- -------------------
-- punctuation characters (EXPORTED)
--
semi, comma, colon, dot :: Doc
semi = char ';'
comma = char ','
colon = char ':'
dot = char '.'
-- separators (EXPORTED)
--
space, equals :: Doc
space = char ' '
equals = char '='
-- round parenthesis (EXPORTED)
--
lparen, rparen :: Doc
lparen = char '('
rparen = char ')'
-- square brackets (EXPORTED)
--
lbrack, rbrack :: Doc
lbrack = char '['
rbrack = char ']'
-- curly braces (EXPORTED)
--
lbrace, rbrace :: Doc
lbrace = char '{'
rbrace = char '}'
-- any value that has a textual representation (EXPORTED)
--
toDoc :: Show a => a -> Doc
toDoc = text . show
-- ints (EXPORTED)
--
-- * these are only for compatibility with SimonPJ's `Pretty' module as `toDoc'
-- is more general
--
int :: Int -> Doc
int = toDoc
integer :: Integer -> Doc
integer = toDoc
float :: Float -> Doc
float = toDoc
double :: Double -> Doc
double = toDoc
rational :: Rational -> Doc
rational = toDoc
-- wrap a document into various forms of brackets
--
parens, brackets, braces :: Doc -> Doc
parens doc = lparen <> doc <> rparen
brackets doc = lbrack <> doc <> rbrack
braces doc = lbrace <> doc <> rbrace
-- wrap a document into quotes
--
quotes, doubleQuotes :: Doc -> Doc
quotes doc = char '`' <> doc <> char '\''
doubleQuotes doc = char '"' <> doc <> char '"'
-- horizontal composition including a space if none of the documents is empty
-- (EXPORTED)
--
(<+>) :: Doc -> Doc -> Doc
d1 <+> d2 | isEmpty d1 = d2
| isEmpty d2 = d1
| otherwise = d1 <> space <> d2
-- list version of horizontal composition (EXPORTED)
--
hcat :: [Doc] -> Doc
hcat = foldr (<>) empty
-- list version of horizontal composition including a space (EXPORTED)
--
hsep :: [Doc] -> Doc
hsep = foldr (<+>) empty
-- list version of vertical composition (EXPORTED)
--
vcat :: [Doc] -> Doc
vcat = foldr ($$) empty
-- hang the second document of the first, where the second one is indented
-- (EXPORTED)
--
hang :: Doc -> Int -> Doc -> Doc
hang doc1 n doc2 = sep [doc1, nest n doc2]
-- add a punctuation document to every document in a list, but the last
-- (EXPORTED)
--
punctuate :: Doc -> [Doc] -> [Doc]
punctuate _ [] = []
punctuate p ds = map (<> p) (init ds) ++ [last ds]
-- render a document using the default settings
--
render :: Doc -> String
render = fullRender dftWidth dftRibbonRatio
-- type class and precedence
-- -------------------------
-- overloaded pretty-printing function (EXPORTED)
--
class Pretty a where
pretty :: a -> Doc
prettyPrec :: Int -> a -> Doc
pretty = prettyPrec 0
prettyPrec _ = pretty
-- useful to keep the interface simple and general
--
instance Pretty Doc where
pretty = id
-- conditionally apply a document transformer (EXPORTED)
--
-- * typically a function like `parens' is applied when the precedences require
-- this
--
usedWhen :: (Doc -> Doc) -> Bool -> Doc -> Doc
usedWhen wrap c doc | c = wrap doc
| otherwise = doc
-- associativity of an infix operator (EXPORTED)
--
data Assoc = LeftAssoc | RightAssoc | NoAssoc
deriving (Eq)
-- pretty print an infix operator given its precedence, lexeme, and its two
-- arguments (EXPORTED)
--
infixOp :: (Pretty a, Pretty b)
=> Assoc -- associativity of operator
-> Int -- precedence of operator
-> String -- lexeme of operator
-> a -- left argument
-> b -- right argument
-> Int -- precedence of context
-> Doc
infixOp assoc opp lexeme arg1 arg2 p = parens `usedWhen` (p > opp) $
hsep [
prettyPrec leftOpp arg1,
text lexeme,
prettyPrec rightOpp arg2
]
where
leftOpp = if (assoc == RightAssoc) then opp + 1 else opp
rightOpp = if (assoc == LeftAssoc ) then opp + 1 else opp
-- the legacy interface (this is only kept for compatibility)
-- --------------------
infixr 1 $|$ -- vertical composition
infixr 1 <^> -- horizontal composition
textDoc :: String -> Doc
textDoc = text
nestDoc :: Int -> Doc -> Doc
nestDoc = nest
($|$) :: Doc -> Doc -> Doc
($|$) = ($$)
(<^>) :: Doc -> Doc -> Doc
(<^>) = (<>)
sepDocs :: [Doc] -> Doc
sepDocs = sep
bestDoc :: Int -> Int -> Doc -> String
bestDoc width ribbon = fullRender width
(fromIntegral width / fromIntegral ribbon)
-- debugging support
-- -----------------
dumpDoc :: Doc -> String
dumpDoc (Nest _ [] ) = "<empty>"
dumpDoc (Nest m alts) = unlines . map (++ "\n--") . map outline $ alts
where
outline (Text str ) = str
outline (TextAbove str _) = str ++ "\n..."
| mwotton/ctkl | src/Text/CTK/Pretty.hs | bsd-3-clause | 13,452 | 0 | 14 | 4,195 | 2,882 | 1,633 | 1,249 | 197 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Network.BitcoinRPC.MarkerAddressesTest
( markerAdressesTests
) where
import Control.Arrow
import Data.Foldable
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test)
import Network.BitcoinRPC.Events
import Network.BitcoinRPC.MarkerAddresses
import Network.BitcoinRPC.TestTypes
import Network.BitcoinRPC.Types
propSumsMatch :: [(ArbBitcoinAddress, ArbBitcoinAmount)]-> [(ArbBitcoinAddress, ArbBitcoinAmount)] -> Bool
propSumsMatch arbListA arbListB =
let listA = map ((***) unABAddr unABAmount) arbListA
listB = map ((***) unABAddr unABAmount) arbListB
sumA = sumAcceptedMarkerAmounts listA
sumB = sumAcceptedMarkerAmounts listB
sumTotal = sumAcceptedMarkerAmounts (listA ++ listB)
sumTotal' = sumAcceptedMarkerAmounts (sumA ++ sumB)
in sumTotal == sumTotal'
propSizeLessOrEqual :: [(ArbBitcoinAddress, ArbBitcoinAmount)] -> Bool
propSizeLessOrEqual arbListA =
let listA = map ((***) unABAddr unABAmount) arbListA
sumA = sumAcceptedMarkerAmounts listA
sumA' = sumAcceptedMarkerAmounts sumA
in length sumA <= length listA
&& length sumA == length sumA'
makeTransactionEvents :: TransactionID-> BitcoinAddress-> BitcoinAmount-> [BitcoinAddress]-> (BitcoinEvent, BitcoinEvent, BitcoinEvent, BitcoinEvent)
makeTransactionEvents txid address amount origins =
let tx = ReceiveTx 0 amount address 0 txid 0
utxid = UniqueTransactionID txid 0
eventNew = NewTransaction utxid tx origins
eventUpdate = TransactionUpdate utxid 1
eventAcc = TransactionAccepted utxid
eventDis = TransactionDisappeared utxid
in (eventNew, eventUpdate, eventAcc, eventDis)
checkTestData :: (MAStore, [(String, [BitcoinEvent], Int)]) -> IO ()
checkTestData (initialStore, entries) = do
_ <- foldlM check initialStore entries
return ()
where
check store (msg, events, expectedReply) = do
let (store', fEvents) = processEvents store events
errMsg = "Unexpected result in step \"" ++ msg ++ "\"."
++ " Expected " ++ show expectedReply ++ " event(s),"
++ " but got: " ++ show fEvents
++ " also: " ++ show store'
assertBool errMsg (length fEvents == expectedReply)
return store'
standardTransactionTestData :: (MAStore, [(String, [BitcoinEvent], Int)])
standardTransactionTestData =
let store = initMarkerAddressStore []
(new, update, acc, _) = makeTransactionEvents
(TransactionID "abc") (BitcoinAddress "1ab")
1 []
in (store, [ ("transaction appears", [new], 0)
, ("transaction is updated", [update], 0)
, ("transaction is accepted", [acc], 1)
] )
test1 :: Test
test1 = testCase "standard transaction" $
checkTestData standardTransactionTestData
markerTransactionTestData :: (MAStore, [(String, [BitcoinEvent], Int)])
markerTransactionTestData =
let store = initMarkerAddressStore [(BitcoinAddress "1def", 1)]
(new, update, acc, _) = makeTransactionEvents
(TransactionID "abc") (BitcoinAddress "1ab")
1 [BitcoinAddress "1def"]
in (store, [ ("transaction appears", [new], 1)
, ("transaction is updated", [update], 0)
, ("transaction is accepted", [acc], 0)
] )
test2 :: Test
test2 = testCase "marker transaction" $
checkTestData markerTransactionTestData
standardTransactionDisappearingTestData :: (MAStore, [(String, [BitcoinEvent], Int)])
standardTransactionDisappearingTestData =
let store = initMarkerAddressStore []
(new, update, _, dis) = makeTransactionEvents
(TransactionID "abc") (BitcoinAddress "1ab")
1 []
in (store, [ ("transaction appears", [new], 0)
, ("transaction is updated", [update], 0)
, ("transaction disappears", [dis], 0)
] )
test3 :: Test
test3 = testCase "standard transaction, disappearing" $
checkTestData standardTransactionDisappearingTestData
markerTransactionDisappearingTestData :: (MAStore, [(String, [BitcoinEvent], Int)])
markerTransactionDisappearingTestData =
let store = initMarkerAddressStore [(BitcoinAddress "1def", 1)]
(new1, update1, _, dis1) = makeTransactionEvents
(TransactionID "abc") (BitcoinAddress "1ab")
1 [BitcoinAddress "1def"]
(new2, update2, acc2, _) = makeTransactionEvents
(TransactionID "def") (BitcoinAddress "1de")
1 [BitcoinAddress "1def"]
in (store, [ ("transaction appears", [new1], 1)
, ("transaction is updated", [update1], 0)
, ("transaction is accepted", [dis1], 1)
, ("second transaction appears", [new2], 0)
-- is no longer accepted right away
, ("second transaction is updated", [update2], 0)
, ("second transaction is accepted", [acc2], 1)
] )
test4 :: Test
test4 = testCase "marker transaction, disappearing" $
checkTestData markerTransactionDisappearingTestData
complexScenarioTestData :: (MAStore, [(String, [BitcoinEvent], Int)])
complexScenarioTestData =
let store = initMarkerAddressStore [(BitcoinAddress "marker", 1)]
(new1, update1, acc1, _) = makeTransactionEvents
(TransactionID "t1") (BitcoinAddress "a1")
1 [BitcoinAddress "marker"]
(new2, update2, acc2, _) = makeTransactionEvents
(TransactionID "t2") (BitcoinAddress "a2")
1 [BitcoinAddress "marker"]
(new3, update3, acc3, _) = makeTransactionEvents
(TransactionID "t3") (BitcoinAddress "a3")
1 []
in (store, [ ("t1 & t3 new", [new1, new3], 1)
, ("t2 new", [new2], 0) -- pending amount is too high
, ("t3 accepted", [update3, acc3], 1)
, ("t1 & t2 updated", [update1, update2], 0)
, ("t1 accepted", [acc1], 1) -- t2 below limit now
, ("t2 accepted", [acc2], 0)
] )
test5 :: Test
test5 = testCase "complex scenario" $
checkTestData complexScenarioTestData
markerAdressesTests :: [Test]
markerAdressesTests = [ testProperty "sums match" propSumsMatch
, testProperty "sum size is less or equal" propSizeLessOrEqual
, test1, test2, test3, test4, test5
]
| javgh/bitcoin-rpc | Network/BitcoinRPC/MarkerAddressesTest.hs | bsd-3-clause | 6,992 | 0 | 20 | 2,094 | 1,741 | 990 | 751 | 129 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE RankNTypes #-}
-- | Representa la especificacion de una propiedad de CSS
module Data.Property (
-- * Tipos de Datos
Property
, FunctionComputed
, FunctionUsed
, PropertyValue (..)
-- ** Funciones Publicas
, mkProp
, getPropertyName
, propertyValue -- sera publico ???
, showPropertyValues
, propertyParser
, doSpecifiedValue
, doComputedValue
, doUsedValue
, computed_asSpecified
, used_asComputed
, applyInheritance
, get
, getM
, adjustPropertyValue
-- ** Funciones para Valores de Propiedades
, unPixelValue
, unPixelUsedValue
, unPixelComputedValue
, unPixelSpecifiedValue
, unKeyUsedColor
, unKeyComputedColor
, unKeySpecifiedColor
, unKeyUsedValue
, unKeyComputedValue
, unKeySpecifiedValue
, compareKeyPropertyValue
, compareKeyPropertyValueWith
, verifyProperty
) where
import Data.List
import qualified Data.Map as Map
import Text.ParserCombinators.UU
import Text.ParserCombinators.UU.BasicInstances
import Text.ParserCombinators.UU.Utils
import Data.DataTreeCSS
import Utils.Utiles
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- DataTypes
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- | Propiedad CSS
data Property
= Property { name :: String
, inherited :: Bool
, initial :: Value
, value :: Parser Value
, propertyValue :: PropertyValue
, fnComputedValue:: FunctionComputed
, fnUsedValue :: FunctionUsed
}
instance Show Property where
show (Property nm _ _ _ pv _ _) = "Property: " ++ nm ++ "\n" ++ show pv
data PropertyValue
= PropertyValue { specifiedValue :: Value
, computedValue :: Value
, usedValue :: Value
, actualValue :: Value
}
deriving Show
type FunctionComputed = Bool -- soy el root?
-> Map.Map String Property -- father props
-> Map.Map String Property -- local props
-> Maybe Bool -- soy replaced ?
-> Bool -- revizare el pseudo?
-> String -- Nombre
-> PropertyValue -- PropertyValue
-> Value
type FunctionUsed = Bool -- soy el root?
-> (Float, Float) -- dimenciones del root
-> Map.Map String Property -- father props
-> Map.Map String Property -- local props
-> Map.Map String String -- atributos
-> Bool -- soy replaced?
-> String -- Nombre
-> PropertyValue -- PropertyValue
-> Value
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- Funciones para la Propiedad
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- | Contruye una Propiedad con (Nombre, Inherited, Value Inicial, Parser, fnc)
mkProp :: (String, Bool, Value, Parser Value, FunctionComputed, FunctionUsed) -> Property
mkProp (nm, bool, init, pval, fnc, fnu)
= Property nm
bool
init
pval
defaultPropertyValue
fnc
fnu
-- | Extrae el nombre de una propiedad
getPropertyName = name
-- | Extrae el Nombre y sus Values (specified, computed, used, actual) de una Propiedad, y los coloca en una tupla de 5.
showPropertyValues :: Property -> (String, String, String, String, String)
showPropertyValues (Property nm _ _ _ (PropertyValue sv cv uv av) _ _) = (nm, show sv, show cv, show uv, show av)
-- | Extrae el nombre y su Parser correspondiente de un Propiedad, y los coloca en una tupla de 2.
propertyParser :: Property -> (String, Parser Value)
propertyParser (Property nm _ _ pr _ _ _) = (nm,pr)
-- | Extrae el specifiedValue
getSpecifiedValue :: Property -> Value
getSpecifiedValue = specifiedValue . propertyValue
-- | Extrae el computedValue
getComputedValue :: Property -> Value
getComputedValue = computedValue . propertyValue
-- | Extrae el PrpertyValue de una Propiedad
get :: Map.Map String Property -> String -> PropertyValue
get map k = propertyValue $ map Map.! k
getM :: Map.Map String Property -> String -> Maybe PropertyValue
getM map k = maybe Nothing (Just . propertyValue) $ Map.lookup k map
-- | Modificar los valores de una propiedad
adjustPropertyValue fpv prop@(Property _ _ _ _ pv _ _)
= prop{propertyValue = fpv pv}
-- | inheritance value
applyInheritance :: Bool -> Map.Map String Property -> Property -> Property
applyInheritance isRoot father prop@(Property nm inh defval _ pv _ _)
= if inh && not isRoot
then let sv = computedValue $ father `get` nm
in prop{propertyValue = pv{specifiedValue = sv}}
else prop{propertyValue = pv{specifiedValue = defval}}
-- | Aplica la funcion doSpecifiedValue para calcular el valor specified de una propiedad
doSpecifiedValue :: Map.Map String Property -> Bool -> [(Tipo,Origen,Declaraciones,Int)] -> Property -> Property
doSpecifiedValue father isRoot rules prop@(Property nm inh defval _ pv@(PropertyValue NotSpecified _ _ _) _ _)
= selectValue . applyCascadingSorting $ getPropertyDeclarations nm rules
where applyCascadingSorting
= head' . dropWhile null . cascadingSorting
selectValue rlist
= if null rlist
then if inh && not isRoot
then let sv = computedValue $ father `get` nm
in prop{propertyValue = pv{specifiedValue = sv}}
else prop{propertyValue = pv{specifiedValue = defval}}
else let (_, _, Declaracion _ val _, _, _) = head rlist
in if compareKeyPropertyValue val "inherit"
then if isRoot
then prop{propertyValue = pv{specifiedValue = defval}}
else let sv = computedValue $ father `get` nm
in prop{propertyValue = pv{specifiedValue = sv}}
else prop{propertyValue = pv{specifiedValue = val}}
doSpecifiedValue _ _ _ p = p
-- | Aplica la funcion doComputedValue para calcular el valor computed de una propiedad
doComputedValue :: Bool -> Map.Map String Property -> Map.Map String Property -> Maybe Bool -> Bool -> Property -> Property
doComputedValue iamtheroot fatherProps locProps iamreplaced iamPseudo prop@(Property nm _ _ _ pv@(PropertyValue _ NotSpecified _ _) fnc _)
= let cv = fnc iamtheroot fatherProps locProps iamreplaced iamPseudo nm pv
in prop{propertyValue = pv{computedValue = cv}}
-- si el valor es diferente de NotSpecified, solo devolvemos la propiedad
doComputedValue _ _ _ _ _ p = p
-- | Hace que el valor computed sea el mismo que el specified
computed_asSpecified :: FunctionComputed
computed_asSpecified _ _ _ _ _ _ = specifiedValue
-- | Aplica la funcion doUsedValue para calcular el valor used de una propiedad
doUsedValue :: Bool -> (Float,Float) -> Map.Map String Property -> Map.Map String Property -> Map.Map String String -> Bool -> Property -> Property
doUsedValue iamtheroot icbsize fatherProps locProps attrs iamreplaced prop@(Property nm _ _ _ pv@(PropertyValue _ _ NotSpecified _) _ fnu)
= let uv = fnu iamtheroot icbsize fatherProps locProps attrs iamreplaced nm pv
in prop{propertyValue = pv{usedValue = uv}}
-- si el valor es diferente de NotSpecified, solo devolvemos la propiedad
doUsedValue _ _ _ _ _ _ p = p
-- | Hace que el valor computed sea el mismo que el specified
used_asComputed :: FunctionUsed
used_asComputed _ _ _ _ _ _ _ = computedValue
-- | Obtener las declaraciones de una propiedad
getPropertyDeclarations :: String -> [(Tipo,Origen,Declaraciones,Int)] -> [(Tipo,Origen,Declaracion,Int)]
getPropertyDeclarations nm1 = foldr fConcat []
where fConcat (tipo,origen,declaraciones,spe) r2
= let r0 = filter (\(Declaracion nm2 _ _) -> nm1 == nm2) declaraciones
in if null r0
then r2
else let r1 = map (\decl -> (tipo,origen,decl,spe)) r0
in r1 ++ r2
-- | aplicar al algoritmo cascada a una lista de reglas
cascadingSorting :: [(Tipo,Origen,Declaracion,Int)] -> [[(Tipo,Origen,Declaracion,Int,Int)]]
cascadingSorting lista1
= let lista2 = myZip lista1 [1..]
lst1 = sortBy fsort $ getDeclarations User True lista2
lst2 = sortBy fsort $ getDeclarations Author True lista2
lst3 = sortBy fsort $ getDeclarations Author False lista2
lst4 = sortBy fsort $ getDeclarations User False lista2
lst5 = sortBy fsort $ getDeclarations UserAgent False lista2
in [lst1, lst2 ,lst3, lst4, lst5]
where myZip [] _ = []
myZip ((a,b,c,d):next) (f:fs) = (a,b,c,d,f) : myZip next fs
getDeclarations origin important
= filter (\(_,org, Declaracion _ _ imp,_,_) -> origin==org && important==imp)
fsort (_, _, _, v1, v3) (_, _, _, v2, v4)
| v1 > v2 = LT
| v1 < v2 = GT
| v1 == v2 && v3 > v4 = LT
| v1 == v2 && v3 < v4 = GT
| otherwise = EQ
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- Funciones de Utilidad para PropertyValue
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
defaultPropertyValue = PropertyValue { specifiedValue = NotSpecified
, computedValue = NotSpecified
, usedValue = NotSpecified
, actualValue = NotSpecified
}
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- Funciones de Utilidad para Values
-- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-- unwrap a property value
unPixelValue val
= case val of
PixelNumber px -> px
PointNumber p -> error $ "[Property] PointNumber "
++ show p ++ ", expecting PixelNumber"
EmNumber e -> error $ "[Property] EmNumber "
++ show e ++ ", expecting PixelNumber"
Percentage p -> error $ "[Property] Percentage "
++ show p ++ ", expecting PixelNumber"
NotSpecified -> error $ "[Property] NotSpecified"
++ ", expecting PixelNumber"
_ -> error $ "[Property] I don't know the type: "
++ show val ++ ", expecting PixelNumber"
unPixelFunction place val tp
= case val of
PixelNumber px -> px
PointNumber p -> error $ "[Property] PointNumber "
++ show p ++ ", " ++ show tp ++ " expecting PixelNumber at " ++ place
EmNumber e -> error $ "[Property] EmNumber "
++ show e ++ ", " ++ show tp ++ " expecting PixelNumber at " ++ place
Percentage p -> error $ "[Property] Percentage "
++ show p ++ ", " ++ show tp ++ " expecting PixelNumber at " ++ place
NotSpecified -> error $ "[Property] NotSpecified"
++ ", " ++ show tp ++ " expecting PixelNumber at " ++ place
_ -> error $ "[Property] I don't know the type: "
++ show val ++ ", " ++ show tp ++ " expecting PixelNumber at " ++ place
unPixelUsedValue place = (\val -> unPixelFunction place val "used value") . usedValue
unPixelComputedValue = (\val -> unPixelFunction "p" val "computed value") . computedValue
unPixelSpecifiedValue = (\val -> unPixelFunction "p" val "specified value") . specifiedValue
unKeyUsedColor = (\(KeyColor v) -> v) . usedValue
unKeyComputedColor = (\(KeyColor v) -> v) . computedValue
unKeySpecifiedColor = (\(KeyColor v) -> v) . specifiedValue
unKeyUsedValue = (\(KeyValue v) -> v) . usedValue
unKeyComputedValue = (\(KeyValue v) -> v) . computedValue
unKeySpecifiedValue = (\(KeyValue v) -> v) . specifiedValue
-- | Compara el valor clave de una Propiedad
compareKeyPropertyValue :: Value -> String -> Bool
compareKeyPropertyValue = compareKeyPropertyValueWith (==)
-- generic function to compare
compareKeyPropertyValueWith fcmp val str
= case val of
KeyValue str' -> fcmp str' str
_ -> False
-- | verificar si el nombre de una propiedad tiene el valor que le enviamos
verifyProperty :: String -> String -> Map.Map String Property -> Bool
verifyProperty nm val props
= let pval = computedValue $ props `get` nm
in compareKeyPropertyValue pval val
| carliros/Simple-San-Simon-Functional-Web-Browser | src/Data/Property.hs | bsd-3-clause | 13,404 | 0 | 17 | 4,282 | 3,013 | 1,643 | 1,370 | 214 | 6 |
{- Testfor Statement -}
module TestStatement where
import Statement
import Test.QuickCheck
test = do
quickCheck checkP1
quickCheck checkP2
quickCheck checkP3
quickCheck checkP4
quickCheck checkP5
quickCheck checkP6
quickCheck checkP7
quickCheck checkP8
quickCheck checkP9
quickCheck checkP10
quickCheck checkP11
--The checks are maybe a little bit "hacky" by converting to string but for this lab was it convenient and good enough
checkP1 = show p1 == "Skip"
checkP2 = show p2 == "Read \"count\""
checkP3 = show p3 == "Write (Add (Var \"count\") (Num 1))"
checkP4 = show p4 == "Assignment \"count\" (Num 0)"
checkP5 = show p5 == "Block [Skip]"
checkP6 = show p6 == "Block [Assignment \"x\" (Num 0),Assignment \"x\" (Add (Var \"x\") (Num 1))]"
checkP7 = show p7 == "If (Var \"x\") Skip (Assignment \"x\" (Sub (Num 0) (Var \"x\")))"
checkP8 = show p8 == "While (Var \"n\") (Assignment \"n\" (Sub (Var \"n\") (Num 1)))"
checkP9 = show p9 == "While (Var \"n\") (Block [Assignment \"fac\" (Mul (Var \"fac\") (Var \"n\")),Assignment \"n\" (Sub (Var \"n\") (Num 1))])"
checkP10 = show p10 == "Block [Read \"x\",Assignment \"x\" (Add (Var \"x\") (Num 1)),Write (Var \"x\")]"
checkP11 = show p11 == "Block [Read \"n\",Assignment \"fac\" (Num 1),While (Var \"n\") (Block [Assignment \"fac\" (Mul (Var \"fac\") (Var \"n\")),Assignment \"n\" (Sub (Var \"n\") (Num 1))]),Write (Var \"fac\")]"
p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11 :: Statement.T
p1 = fromString "skip;"
p2 = fromString "read count;"
p3 = fromString "write count+1;"
p4 = fromString "count := 0;"
p5 = fromString "begin skip; end"
p6 = fromString "begin x:=0; x:=x+1; end"
p7 = fromString "if x then skip; else x:=0-x;"
p8 = fromString "while n do n:=n-1;"
s9 = "while n do begin fac:=fac*n; n:=n-1; end"
p9 = fromString s9
p10 = fromString "begin read x ; x := x + 1 ; write x; end"
p11 = fromString ("begin read n; fac:=1; " ++ s9 ++ " write fac; end")
| DavidRutqvist/ParserOperators-D7012E-Lab2 | test/TestStatement.hs | bsd-3-clause | 2,010 | 0 | 8 | 413 | 348 | 178 | 170 | 39 | 1 |
{-# LANGUAGE RebindableSyntax #-}
-- Copyright : (C) 2009 Corey O'Connor
-- License : BSD-style (see the file LICENSE)
import Bind.Marshal.Prelude
import Bind.Marshal.Verify
import Bind.Marshal.StaticProperties
main = run_test $ do
returnM ()
| coreyoconnor/bind-marshal | test/verify_static_properties.hs | bsd-3-clause | 259 | 0 | 9 | 48 | 39 | 23 | 16 | 6 | 1 |
{-# language CPP #-}
-- No documentation found for Chapter "Promoted_From_VK_KHR_shader_float16_int8"
module Vulkan.Core12.Promoted_From_VK_KHR_shader_float16_int8 ( PhysicalDeviceShaderFloat16Int8Features(..)
, StructureType(..)
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES))
import Vulkan.Core10.Enums.StructureType (StructureType(..))
-- | VkPhysicalDeviceShaderFloat16Int8Features - Structure describing
-- features supported by VK_KHR_shader_float16_int8
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceShaderFloat16Int8Features' structure is included
-- in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceShaderFloat16Int8Features' /can/ also be used
-- in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_shader_float16_int8 VK_KHR_shader_float16_int8>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_2 VK_VERSION_1_2>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceShaderFloat16Int8Features = PhysicalDeviceShaderFloat16Int8Features
{ -- | #extension-features-shaderFloat16# @shaderFloat16@ indicates whether
-- 16-bit floats (halfs) are supported in shader code. This also indicates
-- whether shader modules /can/ declare the @Float16@ capability. However,
-- this only enables a subset of the storage classes that SPIR-V allows for
-- the @Float16@ SPIR-V capability: Declaring and using 16-bit floats in
-- the @Private@, @Workgroup@ (for non-Block variables), and @Function@
-- storage classes is enabled, while declaring them in the interface
-- storage classes (e.g., @UniformConstant@, @Uniform@, @StorageBuffer@,
-- @Input@, @Output@, and @PushConstant@) is not enabled.
shaderFloat16 :: Bool
, -- | #extension-features-shaderInt8# @shaderInt8@ indicates whether 8-bit
-- integers (signed and unsigned) are supported in shader code. This also
-- indicates whether shader modules /can/ declare the @Int8@ capability.
-- However, this only enables a subset of the storage classes that SPIR-V
-- allows for the @Int8@ SPIR-V capability: Declaring and using 8-bit
-- integers in the @Private@, @Workgroup@ (for non-Block variables), and
-- @Function@ storage classes is enabled, while declaring them in the
-- interface storage classes (e.g., @UniformConstant@, @Uniform@,
-- @StorageBuffer@, @Input@, @Output@, and @PushConstant@) is not enabled.
shaderInt8 :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderFloat16Int8Features)
#endif
deriving instance Show PhysicalDeviceShaderFloat16Int8Features
instance ToCStruct PhysicalDeviceShaderFloat16Int8Features where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderFloat16Int8Features{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (shaderFloat16))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (shaderInt8))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceShaderFloat16Int8Features where
peekCStruct p = do
shaderFloat16 <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
shaderInt8 <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
pure $ PhysicalDeviceShaderFloat16Int8Features
(bool32ToBool shaderFloat16) (bool32ToBool shaderInt8)
instance Storable PhysicalDeviceShaderFloat16Int8Features where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderFloat16Int8Features where
zero = PhysicalDeviceShaderFloat16Int8Features
zero
zero
| expipiplus1/vulkan | src/Vulkan/Core12/Promoted_From_VK_KHR_shader_float16_int8.hs | bsd-3-clause | 5,736 | 0 | 14 | 892 | 923 | 541 | 382 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, StandaloneDeriving, GADTs, Rank2Types #-}
module Main where
import Control.Concurrent
-- from this package
-- import Control.Monad.Coroutine
import Control.Monad.Trans.Crtn.Driver
import Control.Monad.Trans.Crtn.Event
import Control.Monad.Trans.Crtn.EventHandler
-- import Control.Monad.Coroutine.Logger.Simple
-- import Control.Monad.Coroutine.Logger.Simple
--
import Event
import Sample
import SampleActor
import Simple
-- |
test_tickingevent :: IO ()
test_tickingevent = do
dref <- newEmptyMVar :: IO (MVar (Maybe (Driver Event IO ())))
let logger = simplelogger -- weblogger "http://127.0.0.1:7800"
putMVar dref . Just $ (driver logger world)
putStrLn "starting ticking"
ticking dref 0
second :: Int
second = 1000000
-- |
ticking :: MVar (Maybe (Driver Event IO ())) -> Int -> IO ()
ticking mvar n = do
putStrLn "--------------------------"
putStrLn ("ticking : " ++ show n)
if n `mod` 10 == 0
then eventHandler mvar Open
else if n `mod` 10 == 5
then eventHandler mvar Close
else if n `mod` 10 == 3 || n `mod` 10 == 6
then eventHandler mvar Render
else eventHandler mvar (Message ("test : " ++ show n))
putStrLn "_-_-_-_-_-_-_-_-_-_-_-_-_-"
threadDelay (1*second)
ticking mvar (n+1)
{- putStrLn "--------------------------"
putStrLn ("ticking : " ++ show n)
{- if n `mod` 10 == 0
then eventHandler mvar Open
else if n `mod` 10 == 5
then eventHandler mvar Close
else if n `mod` 10 == 3
then eventHandler mvar Render
else eventHandler mvar (Message ("test : " ++ show n)) -}
{- if n == 5
then eventHandler mvar Start
else if n `mod` 3 == 0
then eventHandler mvar Render
else eventHandler mvar (Message ("test : " ++ show n)) -}
let action | n `mod` 10 == 5 = eventHandler mvar (eventWrap (Start mycmd))
| n `mod` 10 == 9 = eventHandler mvar (eventWrap (Init (n `div` 10)))
| otherwise = eventHandler mvar (eventWrap Render)
action
putStrLn "_-_-_-_-_-_-_-_-_-_-_-_-_-"
threadDelay (1*second)
ticking mvar (n+1) -}
-------------------------------
-- test
-------------------------------
main :: IO ()
main = test_tickingevent
| wavewave/coroutine-object | example/coroutine-object.hs | bsd-3-clause | 2,420 | 0 | 15 | 670 | 392 | 210 | 182 | 35 | 4 |
--------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) Oliver Braun
-- License : BSD3
--
-- Maintainer: Oliver Braun <ob@obraun.net>
-- Stability : provisional
-- Portability: portable
--
-- This module provides a CLI for a MovieStore using the "Movies" module.
--
--------------------------------------------------------------------
module Main ( main
, readMovies
, saveMovies
, rentMovie
, returnMovie
, mainloop
) where
import qualified Movies as M
import System.IO(hSetBuffering, stdin, stdout, BufferMode(NoBuffering))
-- | The 'readMovies' function can be used to read a file containing
-- movies.
readMovies :: FilePath -- ^ The name of the file
-> IO M.Movies -- ^ The contents as value of type 'M.Movies'
readMovies = fmap read . readFile
-- | The 'saveMovies' function can be used to write the movies to a file.
saveMovies :: FilePath -- ^ The name of the file
-> M.Movies -- ^ The movies
-> IO ()
saveMovies fp = writeFile fp . show
-- | The 'rentMovie' function can be used to rent a movie.
-- It returns a successflag and the new movies. The function asks for a
-- title and tries to rent a copy.
rentMovie :: M.Movies -- ^ The movies
-> IO (Bool,M.Movies) -- ^ An indicator whether a copy was rent and the new movies.
rentMovie vl = do
putStr "Title? "
title <- getLine
if M.rentable title vl
then return (True, M.rent title vl)
else do
putStrLn $ "Sorry! " ++ title ++ " is currently not rentable"
return (False,vl)
-- | The 'returnMovie' function can be used to return a movie.
-- It returns the new movies. The function asks for a
-- title and tries to put a copy it back.
returnMovie :: M.Movies -- ^ All movies
-> IO M.Movies -- ^ The new movies
returnMovie vl = do
putStr "Title? "
title <- getLine
let (vl',ok) = M.return title vl
in if ok
then return vl'
else do
putStrLn $ title ++ " is not ours!"
return vl
-- | The 'main' function calls 'mainloop' with no movies.
main :: IO ()
main = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
mainloop $ M.fromList []
-- | The 'mainloop' function shows the following menu:
--
-- > *********************************************
-- > * Press q(uit) or one of the following keys *
-- > * g - get a movie *
-- > * r - return a movie *
-- > * l - load Movies from a file *
-- > * s - save Movies to a file *
-- > * p - print state of video store *
-- > *********************************************
-- > Input:
--
-- After a keypress the corresponding action will be performed.
--
-- Use for example:
--
-- > mainloop $ M.Movies.fromList ["Am Limit", "Matrix", "Matrix"]
mainloop :: M.Movies -- ^ The initial movies
-> IO ()
mainloop vl = do
menu
c <- getChar
putStr "\n"
vl' <- case c of
'g' -> do fmap snd $ rentMovie vl
'r' -> do returnMovie vl
'l' -> do putStr "Filename? "
filename <- getLine
readMovies filename
's' -> do putStr "Filename? "
filename <- getLine
saveMovies filename vl
return vl
'p' -> do putStr $ M.showMovieStore vl
return vl
_ -> return vl
case c of
'q' -> putStrLn "Bye" >> return ()
_ -> mainloop vl'
where
menu :: IO ()
menu = do
putStrLn "*********************************************"
putStrLn "* Press q(uit) or one of the following keys *"
putStrLn "* g - get a movie *"
putStrLn "* r - return a movie *"
putStrLn "* l - load Movies from a file *"
putStrLn "* s - save Movies to a file *"
putStrLn "* p - print state of video store *"
putStrLn "*********************************************"
putStr "Input: "
| obcode/moviestore_haskell | src/MovieStoreCLI.hs | bsd-3-clause | 4,141 | 0 | 15 | 1,293 | 683 | 344 | 339 | 74 | 7 |
{-# LANGUAGE RankNTypes #-}
import Control.Applicative
import Data.Set (Set)
import qualified Data.Set as S
newtype CodensityOrd m a = CodensityOrd { runCodensityOrd :: forall b. Ord b => (a -> m b) -> m b }
-- liftCodensityOrd :: Monad m => m a -> CodensityOrd m a
-- liftCodensityOrd m = CodensityOrd ((>>=) m)
--
-- lowerCodensityOrd :: (Ord a, Monad m) => CodensityOrd m a -> m a
-- lowerCodensityOrd m = runCodensityOrd m return
instance Functor (CodensityOrd f) where
fmap f m = CodensityOrd (\k -> runCodensityOrd m (k . f))
instance Applicative (CodensityOrd f) where
pure x = CodensityOrd (\k -> k x)
mf <*> mx = CodensityOrd (\k -> runCodensityOrd mf (\f -> runCodensityOrd mx (\x -> k (f x))))
instance Monad (CodensityOrd f) where
return = pure
m >>= k = CodensityOrd (\c -> runCodensityOrd m (\a -> runCodensityOrd (k a) c))
liftSet :: Ord a => Set a -> CodensityOrd Set a
liftSet m = CodensityOrd (bind m)
where bind :: (Ord a, Ord b) => Set a -> (a -> Set b) -> Set b
mx `bind` fxmy = S.fold (\x my -> fxmy x `S.union` my) S.empty mx
lowerSet :: Ord a => CodensityOrd Set a -> Set a
lowerSet m = runCodensityOrd m S.singleton
main = print $ lowerSet $ monadicPlus (liftSet $ S.fromList [1, 2, 3]) (liftSet $ S.fromList [1, 2, 3])
monadicPlus :: Monad m => m Int -> m Int -> m Int
monadicPlus mx my = do
x <- mx
y <- my
return (x + y)
| batterseapower/haskell-kata | CodensitySet.hs | bsd-3-clause | 1,409 | 0 | 17 | 321 | 576 | 297 | 279 | 25 | 1 |
module Test.FFmpeg where
import Test.Hspec
import System.Process
import FFmpeg.Config
import FFmpeg.Probe (ffprobe)
import FFmpeg.Process
import FFmpeg.Data.H264
import Data.Maybe
import Control.Exception
import System.Directory
import Text.Printf
test :: IO ()
test = hspec $ after_ cleanUp $ do
describe "Normal test/test.in" $ do
it "frames = Fix 100" $ do
let arg = (defaultCfg :: H264) {frames = Fix 100}
let input = "test/test.in"
ffmpeg arg =<< (ffprobe input)
it "frameRate = 10" $ do
let arg = (defaultCfg :: H264) {frameRate = Max 10}
let input = "test/test.in"
ffmpeg arg =<< (ffprobe input)
it "bitRate = 10" $ do
let arg = (defaultCfg :: H264) {frameRate = Max 100, bitRate = Max 1}
let input = "test/test.in"
ffmpeg arg =<< (ffprobe input)
it "audioBitRate = 10" $ do
let arg = (defaultCfg :: H264) {frameRate = Max 100, audioBitRate = Max 1}
let input = "test/test.in"
ffmpeg arg =<< (ffprobe input)
it "size = 10x10" $ do
let arg = (defaultCfg :: H264) {frameRate = Max 100, height = Max 10, width = Max 10}
let input = "test/test.in"
ffmpeg arg =<< (ffprobe input)
describe "Orphan test" $ do
it "Kill immediately" $ do
let arg = (defaultCfg :: H264) {frames = Fix 100}
let input = "test/test.in"
h <- spawnFFmpeg arg =<< (ffprobe input)
killFFmpeg h
code <- getFFmpegExitCode h
code `shouldSatisfy` isJust
it "On Exception Kill" $ do
let arg = (defaultCfg :: H264) {frames = Fix 100}
let input = "test/test.in"
h <- spawnFFmpeg arg =<< (ffprobe input)
(`shouldThrow` anyException) $ onExceptionKill h `handle` do
printFFmpeg h
throw UserInterrupt
code <- getFFmpegExitCode h
code `shouldSatisfy` isJust
describe "FFProbe" $ do
it "Read a video" $ do
probe <- ffprobe "test/test.in"
print probe
cleanUp = do
removeFile f `catch` handle
where
f = "test/h264_test.mp4"
handle :: IOException -> IO ()
handle = putStrLn . displayException
| YLiLarry/compress-video | test/Test/FFmpeg.hs | bsd-3-clause | 2,289 | 0 | 19 | 743 | 740 | 362 | 378 | 60 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
-------------------------------------------------------------------------------
-- |
-- Copyright : (c) 2015 Michael Carpenter
-- License : BSD3
-- Maintainer : Michael Carpenter <oldmanmike.dev@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-------------------------------------------------------------------------------
module Sound.Csound.FunctionTableDisplay (
csoundSetIsGraphable
--csoundSetMakeGraphCallback,
--csoundSetDrawGraphCallback,
--csoundSetKillGraphCallback,
--csoundSetExitGraphCallback
) where
import Control.Monad.IO.Class
import Foreign.Ptr
import Foreign.C.Types
foreign import ccall "csound.h csoundSetIsGraphable" csoundSetIsGraphable' :: Ptr () -> CInt -> IO CInt
--foreign import ccall "csound.h csoundSetMakeGraphCallback" csoundSetMakeGraphCallback'
--foreign import ccall "csound.h csoundSetDrawGraphCallback" csoundSetDrawGraphCallback'
--foreign import ccall "csound.h csoundSetKillGraphCallback" csoundSetKillGraphCallback'
--foreign import ccall "csound.h csoundSetExitGraphCallback" csoundSetExitGraphCallback'
csoundSetIsGraphable :: MonadIO m => Ptr () -> CInt -> m CInt
csoundSetIsGraphable csnd isGraphable = liftIO (csoundSetIsGraphable' csnd isGraphable)
--csoundSetMakeGraphCallback
--csoundSetMakeGraphCallback
--csoundSetDrawGraphCallback
--csoundSetDrawGraphCallback
--csoundSetKillGraphCallback
--csoundSetKillGraphCallback
--csoundSetExitGraphCallback
--csoundSetExitGraphCallback
| oldmanmike/CsoundRaw | src/Sound/Csound/FunctionTableDisplay.hs | bsd-3-clause | 1,531 | 0 | 8 | 157 | 133 | 83 | 50 | 9 | 1 |
module Main where
import Determination
main :: IO ()
main = run
| 5outh/determination | app/Main.hs | bsd-3-clause | 76 | 0 | 6 | 24 | 22 | 13 | 9 | 4 | 1 |
module Network.Irc (
module Network.Irc.Configuration,
module Network.Irc.Types,
module Network.Irc.Run,
module Network.Irc.Client
)
where
import Network.Irc.Configuration
import Network.Irc.Types
import Network.Irc.Run
import Network.Irc.Client
| geoffhuston/Network.Irc | Network/Irc.hs | bsd-3-clause | 312 | 0 | 5 | 87 | 60 | 41 | 19 | 9 | 0 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, GeneralizedNewtypeDeriving, TemplateHaskell,
OverloadedStrings, GADTs, FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls #-}
-- This handles the database interface for Ribot. This defines the schema and
-- the interface types, and it defines the "model" functions, i.e., those that
-- have any database access.
module Database.Ribot
( UserGeneric(..)
, User
, UserId
, Unique(..)
, MessageGeneric(..)
, Message
, MessageId
, TopicGeneric(..)
, Topic
, TopicId
, TokenGeneric(..)
, Token
, TokenId
, PositionGeneric(..)
, Position
, PositionId
, SavedItem(..)
, initDatabase
, addTempTable
, runDb
, runPool
, getOrCreateUser
, getOrCreateTopic
, saveMessage
, setUserLogging
, getUserMessages
, withResourceLogger
, withResourceNoLogger
) where
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Logger
import Control.Monad.Trans.Resource
import Database.Persist hiding (runPool)
import Database.Persist.Sqlite hiding (runPool)
import Database.Persist.Quasi
import Database.Persist.TH
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import qualified Network.IRC.Base as B
-- This creates the model types from their names.
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
$(persistFileWith upperCaseSettings "config/models")
-- This is for the output of `saveMessage`. This lets me wrap up the its that
-- was saved so I can retrieve it later. This could probably be a newtype for
-- `Maybe (Either Topic Message)`, but this seems more extensible.
data SavedItem = NothingSaved
| SavedTopic TopicId
| SavedMessage MessageId
-- This initializes the database by opening the connection and migrating.
initDatabase :: FilePath -> IO ()
initDatabase dbFile = withResourceNoLogger . runDb dbFile $ do
runMigration migrateAll
addIndices
addTempTable
return ()
-- This takes a function and runs it in the context of a SQLite database.
runDb :: (MonadIO m, MonadBaseControl IO m) => FilePath -> SqlPersistT m a -> m a
runDb sqliteFile = withSqliteConn (T.pack sqliteFile) . runSqlConn
-- This takes a function and runs it in the context of a pool of SQLite
-- database connections.
runPool :: (MonadIO m, MonadBaseControl IO m)
=> FilePath -> Int -> SqlPersistT m a -> m a
runPool sqliteFile poolSize =
withSqlitePool (T.pack sqliteFile) poolSize . runSqlPool
-- This takes a database and executes the SQL to create the database's
-- indices. These include "IF NOT EXISTS" phrases, so this can safely be
-- executed more than once on the same database.
addIndices :: SqlPersistM ()
addIndices = mapM_ (execute' []) sql
where
execute' = flip rawExecute
sql = [ " CREATE INDEX IF NOT EXISTS idx_message ON \"Message\" \
\ (id, \"userId\", posted);"
, " CREATE INDEX IF NOT EXISTS idx_token ON \"Token\" \
\ (id, text);"
, " CREATE INDEX IF NOT EXISTS idx_position on \"Position\" \
\ (id, \"tokenId\", \"messageId\");"
]
-- This creates the temporary table used for building the inverted index.
--
-- This might be unsafe in some circumstances. That is, messageId could be
-- either a messageId or a topicId. This could only be a problem very, very
-- early in the indexing process, when there are very few messages and very few
-- topics, and a topic and a message with the same ID are both being indexed at
-- the same time. If you're re-indexing the entire database, this isn't an
-- issue, however; because messages and topics aren't indexed at the same time.
addTempTable :: SqlPersistM ()
addTempTable = rawExecute sql []
where
sql = " CREATE TEMPORARY TABLE IF NOT EXISTS msg_token \
\ (\"tokenId\" INTEGER DEFAULT NULL, \
\ \"messageId\" INTEGER, \
\ text VARCHAR, \
\ UNIQUE (\"messageId\", text) ON CONFLICT IGNORE, \
\ FOREIGN KEY (\"messageId\") REFERENCES \"Message\"(id) \
\ );"
-- This looks for a username in the database. If it doesn't exist, this creates
-- it.
getOrCreateUser :: T.Text -> SqlPersistM (Entity User)
getOrCreateUser = get' (0 :: Int)
where
-- This attempts to insert and get the user. If it takes too many
-- tries, just fail.
get' 3 _ = fail "too many attempts"
get' n name = do
exists <- getBy $ UniqueUser name
case exists of
Just user -> return user
Nothing -> do
insert_ $ User name True
get' (n-1) name
-- This looks for a topic with a given text from a user. If it doesn't exist,
-- this creates it.
getOrCreateTopic :: UserId -> T.Text -> SqlPersistM (Entity Topic)
getOrCreateTopic userId text = get' (0 :: Int)
where
get' 3 = fail "too many attempts"
get' n = do
exists <- getBy $ UniqueTopic userId text
case exists of
Just topic -> return topic
Nothing -> do
now <- liftIO getCurrentTime
insert_ $ Topic userId text now
get' (n-1)
-- This takes a `Message` from IRC and saves it to the database.
saveMessage :: B.Message -> SqlPersistM SavedItem
saveMessage (B.Message (Just B.NickName{}) "PRIVMSG" [_, ""]) =
return NothingSaved
saveMessage (B.Message (Just B.NickName{}) "PRIVMSG" [_, '!':_]) =
return NothingSaved
saveMessage (B.Message (Just (B.NickName name _ _)) "PRIVMSG" [_, message]) = do
Entity userId user <- getOrCreateUser $ T.pack name
if userLoggingOn user
then insertMessage userId message
else return NothingSaved
where insertMessage userId msg = do
now <- liftIO getCurrentTime
mid <- insert $ Message userId (T.pack msg) now
transactionSave
return $ SavedMessage mid
saveMessage (B.Message (Just (B.NickName name _ _)) "TOPIC" [_, topic]) = do
(Entity userId _) <- getOrCreateUser $ T.pack name
(Entity topicId _) <- getOrCreateTopic userId $ T.pack topic
transactionSave
return $ SavedTopic topicId
saveMessage _ = return NothingSaved
-- This takes a userId and sets the logging for it.
setUserLogging :: UserId -> Bool -> SqlPersistM ()
setUserLogging userId logging =
update userId [UserLoggingOn =. logging] >> transactionSave
-- This returns all the messages for the user with a given user name.
getUserMessages :: T.Text -> SqlPersistM (Maybe [Entity Message])
getUserMessages userName = do
user' <- getBy $ UniqueUser userName
case user' of
Nothing -> return Nothing
Just (Entity userId _) ->
Just `fmap` selectList [MessageUserId ==. userId] []
withResourceLogger :: LoggingT (ResourceT IO) a -> IO a
withResourceLogger = runResourceT . runStderrLoggingT
withResourceNoLogger :: NoLoggingT (ResourceT IO) a -> IO a
withResourceNoLogger = runResourceT . runNoLoggingT
| erochest/ribot | src/Database/Ribot.hs | bsd-3-clause | 7,270 | 0 | 16 | 1,937 | 1,401 | 736 | 665 | 125 | 3 |
module TestCandSel (
testMakeCandidates
, testDropSafeOverlap
, testOverlaps
, testGetNextCandidates
, testMakeThenGetCandidates
)
where
-- import Debug.Trace (trace)
import Data.Char (ord)
import Data.List (sort)
import qualified Data.ByteString as B
import Utils
import InputText
import EnnGram
import CandidateSelection
-- traceId :: Show x => x -> x
-- traceId x = trace (show x) x
mkCand :: (String, Count) -> Candidate
mkCand (s, c) = Candidate (B.pack . map (fromIntegral . ord) $ s) c
oneTestMakeCandidates :: [String] -> [(String, Count)]
-> Either ([Candidate], [(String, Count)])
Bool
oneTestMakeCandidates sl xpct =
maybe (Right False) id $ do
it <- toCodepoints sl
let cands = uncurry (makeCandidates maxCompressions) . ennGramMap $ it
if length cands == length xpct
&& and (zipWith (==) cands $ map mkCand xpct)
then return $ Right True
else return $ Left (cands, xpct)
testMakeCandidates :: Either ([Candidate], [(String, Count)]) Bool
testMakeCandidates =
oneTestMakeCandidates ["abc", "abc"] []
>> oneTestMakeCandidates ["abcde", "abcde"]
[("abcde", 2)
, ("bcde", 2), ("abcd", 2)]
>> oneTestMakeCandidates ["abcde", "abcde", "bcd"]
[("abcde", 2)
, ("bcd", 3)
, ("bcde", 2), ("abcd", 2)]
>> oneTestMakeCandidates ["abcde", "abcde", "bcd", "abc"]
[("abcde", 2)
, ("bcd", 3), ("abc", 3)
, ("bcde", 2), ("abcd", 2)
, ("bc", 4)]
>> oneTestMakeCandidates ["abcde", "abcde", "bcd", "abc"]
[("abcde", 2)
, ("bcd", 3), ("abc", 3)
, ("bcde", 2), ("abcd", 2)
, ("bc", 4)]
>> oneTestMakeCandidates ["aab", "aab", "aac", "abd", "aae", "abf", "aa"]
-- but not ("ab", 4): makeCandidates takes
-- the minimum number of Digram candidates
-- and as a side-effect, they are already
-- filtered not to overlap.
[("aa", 5)]
testDropSafeOverlap :: Either ((String, Count), (String, Count))
Bool
testDropSafeOverlap =
t "abcde" 4 "abcd" 4 True
>> t "abcde" 4 "bcde" 4 True
>> t "abcde" 4 "abc" 4 True
>> t "abcde" 4 "abd" 4 False
>> t "abcde" 4 "abz" 4 False
>> t "abcd" 4 "abc" 4 True
>> t "abcd" 4 "abc" 5 False
>> t "abcd" 4 "bcd" 4 True
>> t "abcd" 4 "bc" 4 True
>> t "abcd" 4 "abcde" 4 False
>> t "abcd" 4 "defg" 4 False
>> t "abcd" 4 "xyz" 4 False
where t :: String -> Count -> String -> Count -> Bool
-> Either ((String, Count), (String, Count))
Bool
t a c x y e =
if e == dropSafeOverlap (mkCand (a, c))
(mkCand (x, y))
then Right True
else Left ((a, c), (x, y))
testOverlaps :: Either (String, String) Bool
testOverlaps =
t "abcd" "abc" True
>> t "abcd" "ca" True
>> t "abcd" "cab" True
>> t "abcd" "bac" False
>> t "abcd" "bacd" False
>> t "abcd" "dacb" True
>> t "abcd" "dbc" True
>> t "abcd" "dbca" True
>> t "abcd" "bcd" True
>> t "abcd" "bc" True
>> t "abcd" "abcde" True
>> t "abcd" "defg" True
>> t "abcd" "xyz" False
>> t "abcd" "xyzd" False
>> t "abcd" "axyz" False
>> t "abcd" "xyza" True
>> t "abcd" "dxyz" True
>> t "abcd" "dxyza" True
>> t "abcd" "xyabcdz" True
where t :: String -> String -> Bool
-> Either (String, String) Bool
t a x e =
if e == overlaps (mkCand (a, undefined))
(mkCand (x, undefined))
&& e == overlaps (mkCand (x, undefined))
(mkCand (a, undefined))
then Right True
else Left (a, x)
oneTestGetNextCandidates :: [(String, Count)]
-> [(String, Count)]
-> Either ([Candidate], [(String, Count)]) Bool
oneTestGetNextCandidates a xpct =
if obs == map mkCand xpct
then Right True
else Left (obs, xpct)
where obs = getNextCandidates $ map mkCand a
testGetNextCandidates :: Either ([Candidate], [(String, Count)]) Bool
testGetNextCandidates =
oneTestGetNextCandidates [("abcde", 4)
, ("abcd", 4)
, ("bcde", 4)
, ("abc", 4)
, ("abd", 4)
, ("abz", 4)
, ("bcd", 4)
, ("cde", 4)
, ("def", 4)
, ("xy", 4)]
[("abcde", 4)
, ("abd", 4)
, ("abz", 4)]
>> oneTestGetNextCandidates [("abcde", 4)]
[("abcde", 4)]
>> oneTestGetNextCandidates [("abcde", 4)
, ("xabc", 5)
, ("yabc", 5)
, ("zabc", 5)
, ("bcde", 4)
, ("abc", 4)
, ("abd", 4)
, ("abz", 4)
, ("bcd", 4)
, ("cde", 4)
, ("def", 4)
, ("xy", 4)]
[("abcde", 4)]
>> oneTestGetNextCandidates [("abcde", 4)
, ("cdex", 5)
, ("bcde", 4)
, ("abc", 4)
, ("abd", 4)
, ("abz", 4)
, ("bcd", 4)
, ("cde", 4)
, ("def", 4)
, ("xy", 4)]
[("abcde", 4)]
-- see also oneTestMakeThenGetCandidates that integrates these 3 cases
>> oneTestGetNextCandidates [("aa", 6), ("ab", 5)]
[("aa", 6)]
>> oneTestGetNextCandidates [("aa", 6), ("ab", 5), ("ff", 5)]
[("aa", 6), ("ff", 5)]
>> oneTestGetNextCandidates [("!!", 16),("!!?", 5),("!?", 11),("**", 11),("++", 11),("--", 11),("//", 11),("00", 11),("11", 11),("22", 11),("33", 11),("44", 11),("55", 11),("66", 11),("77", 11),("88", 11),("99", 11),("AA", 11),("BB", 11),("CC", 11),("DD", 11),("EE", 11),("FF", 11),("GG", 11),("HH", 11),("II", 11),("JJ", 11),("KK", 11),("LL", 11),("MM", 11),("NN", 11),("OO", 11),("PP", 11),("QQ", 11),("RR", 11),("SS", 11),("TT", 11),("UU", 11),("VV", 11),("WW", 11),("XX", 11),("YY", 11),("ZZ", 11),("__", 11),("aa", 11),("bb", 11),("cc", 11),("dd", 11),("ee", 11),("ff", 11),("gg", 11),("hh", 11),("ii", 11),("jj", 11),("kk", 11),("ll", 11),("mm", 11),("nn", 11),("oo", 11),("pp", 11),("qq", 11),("rr", 11),("ss", 11),("tt", 11),("uu", 11)]
[("!!", 16),("**", 11),("++", 11),("--", 11),("//", 11),("00", 11),("11", 11),("22", 11),("33", 11),("44", 11),("55", 11),("66", 11),("77", 11),("88", 11),("99", 11),("AA", 11),("BB", 11),("CC", 11),("DD", 11),("EE", 11),("FF", 11),("GG", 11),("HH", 11),("II", 11),("JJ", 11),("KK", 11),("LL", 11),("MM", 11),("NN", 11),("OO", 11),("PP", 11),("QQ", 11),("RR", 11),("SS", 11),("TT", 11),("UU", 11),("VV", 11),("WW", 11),("XX", 11),("YY", 11),("ZZ", 11),("__", 11),("aa", 11),("bb", 11),("cc", 11),("dd", 11),("ee", 11),("ff", 11),("gg", 11),("hh", 11),("ii", 11),("jj", 11),("kk", 11),("ll", 11),("mm", 11),("nn", 11),("oo", 11),("pp", 11),("qq", 11),("rr", 11),("ss", 11),("tt", 11),("uu", 11)]
oneTestMakeThenGetCandidates :: [String]
-> [(String, Count)]
-> Either ([String], [Candidate]) Bool
oneTestMakeThenGetCandidates input xp =
if observed == map mkCand xp then Right True else Left (input, observed)
where observed = getNextCandidates
$ uncurry (makeCandidates maxCompressions)
$ ennGramMap
$ maybe undefined id
$ toCodepoints input
testMakeThenGetCandidates :: Either ([String], [Candidate]) Bool
testMakeThenGetCandidates =
oneTestMakeThenGetCandidates ["aab", "aab", "aac", "aad", "aae",
"aa", "ab", "ab", "ab",
"ff", "ff", "ff", "ff"]
-- *NOT* ("ab", 5) because it is
-- invalidated by ("aa", 6)
-- *NOT* ("ff", 4) because after we
-- dropped the better ("ab", 5), we
-- need to recount if ("ff", 4)
-- would beat the new "ab" count
-- after replacing all "aa". In this
-- case, it would, but see the next
-- test.
[("aa", 6)]
>> oneTestMakeThenGetCandidates ["aa", "aa", "aac", "aad", "aae",
"aa", "ab", "ab", "ab", "ab", "ab",
"ff", "ff", "ff", "ff"]
-- *NOT* ("ab", 5) because it is
-- invalidated by ("aa", 6)
-- *NOT* ("ff", 4) because after we
-- dropped the better ("ab", 5), we
-- need to recount if ("ff", 4)
-- would beat the new "ab" count
-- after replacing all "aa". In
-- this case, it wouldn't, but see
-- the previous test.
[("aa", 6)]
>> oneTestMakeThenGetCandidates ["aab", "aab", "aac", "aad", "aae",
"aa", "ab", "ab", "ab",
"ff", "ff", "ff", "ff", "ff"]
-- *NOT* ("ab", 5) because it is
-- invalidated by ("aa", 6)
-- ("ff", 5) is independent of
-- ("ab", 5) and ("aa", 6) and just
-- as good as ("ab", 5) so we can
-- keep it
[("aa", 6), ("ff", 5)]
-- The idea of this test case is to check that when there are more
-- candidates than the number of candidates requested, the exact
-- number is returned (even if because of overlap, some candidates
-- were dropped along the way)
--
-- preconditions for test case are verified below
>> let alphabet = sort "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+-*/_()"
intToString x = replicate 2 $ alphabet !! x
-- Utils.hs: compressionGain = (count - 1) * (fromIntegral len - 1) - 2
n = 11 -- 10 * 1 - 2 = 8
m = 6 -- 5 * 2 - 2 = 8
spec1 = '!'
spec2 = '?'
t1 = [intToString x | x <- [0..maxCompressions + 2]]
t2 = [spec1, spec1]:t1 -- prepended literal should have nothing in common with `alphabet'
t3 = concat $ (replicate m [spec1, spec1, spec2]) -- "!!?"
:(replicate n $ spec1:"a") -- "!a"
:(replicate n $ spec1:"b") -- "!b"
:(replicate n $ spec1:"c") -- "!c"
:replicate n t2
in either (Left . id)
-- verify preconditions for test case:
(Right . (&& compressionGain m 3 == compressionGain n 2
&& compressionGain n 2 > 0
&& length alphabet > maxCompressions
&& not (spec1 `elem` alphabet)
&& not (spec2 `elem` alphabet)
&& spec1 /= spec2))
$ oneTestMakeThenGetCandidates t3
$ ("!!", n + m)
-- since all candidates below have
-- the same compressionGain, their
-- order shouldn't matter, but my
-- comparison of expected and
-- observed values is dumb
:reverse [(intToString x, n)
| x <- [4..maxCompressions + 2]]
| pcrama/message-compiler | test/TestCandSel.hs | bsd-3-clause | 13,020 | 0 | 24 | 5,619 | 3,831 | 2,307 | 1,524 | 204 | 2 |
{-# LANGUAGE PackageImports #-}
module Foreign.Marshal.Safe (module M) where
import "base" Foreign.Marshal.Safe as M
| silkapp/base-noprelude | src/Foreign/Marshal/Safe.hs | bsd-3-clause | 122 | 0 | 4 | 18 | 23 | 17 | 6 | 3 | 0 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module Bench.Network.Commons
( MsgId
, Ping (..)
, Pong (..)
, Payload (..)
, logMeasure
, loadLogConfig
, Timestamp
, MeasureEvent (..)
, MeasureInfo (..)
, LogMessage (..)
, measureInfoParser
, logMessageParser
) where
import Control.Applicative ((<|>))
import Control.Lens ((&), (.~), (^.))
import Control.Monad (join)
import Control.Monad.Trans (MonadIO (..))
import Data.Attoparsec.Text (Parser, char, decimal, string, takeWhile)
import Data.Binary (Binary (..))
import qualified Data.ByteString.Lazy as BL
import Data.Data (Data)
import Data.Functor (($>))
import qualified Data.HashMap.Strict as HM
import Data.Int (Int64)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Time.Units (toMicroseconds)
import Formatting.Buildable (Buildable (build))
import qualified Formatting as F
import GHC.Generics (Generic)
import Prelude hiding (takeWhile)
import Node (Message (..))
import Pos.Util (realTime)
import Pos.Util.Log.Internal (LoggingHandler)
import Pos.Util.Log.LoggerConfig (defaultInteractiveConfiguration,
lcLoggerTree, ltMinSeverity, ltNamedSeverity)
import Pos.Util.Trace (Trace, traceWith)
import Pos.Util.Wlog (LoggerConfig (..), Severity (..),
parseLoggerConfig, setLogPrefix, setupLogging')
-- * Transfered data types
type MsgId = Int
-- | Serializes into message of (given size + const)
data Payload = Payload
{ getPayload :: Int64
} deriving (Generic, Data)
data Ping = Ping MsgId Payload
deriving (Generic, Data, Binary)
instance Message Ping where
messageCode _ = 0
formatMessage _ = "Ping"
data Pong = Pong MsgId Payload
deriving (Generic, Data, Binary)
instance Message Pong where
messageCode _ = 1
formatMessage _ = "Pong"
instance Binary Payload where
get = Payload . BL.length <$> get
put (Payload l) = put $ BL.replicate l 42
-- * Util
logMeasure :: (MonadIO m) => Trace IO Text -> MeasureEvent -> MsgId -> Payload -> m ()
logMeasure logTrace miEvent miId miPayload = do
miTime <- toMicroseconds <$> realTime
liftIO $ traceWith logTrace $ F.sformat F.build $ LogMessage MeasureInfo{..}
defaultLogConfig :: LoggerConfig
defaultLogConfig =
let lc0 = defaultInteractiveConfiguration Info
newlt = lc0 ^. lcLoggerTree
& ltMinSeverity .~ Info
& ltNamedSeverity .~
HM.fromList [ ("cardano-sl.sender", Info)
, ("cardano-sl.sender.comm", Error)
, ("cardano-sl.receiver", Info)
, ("cardano-sl.receiver.comm", Error) ]
in
lc0 & lcLoggerTree .~ newlt
loadLogConfig :: MonadIO m => Maybe FilePath -> Maybe FilePath -> m LoggingHandler
loadLogConfig logsPrefix configFile = do
lc1 <- case configFile of
Nothing -> return defaultLogConfig
Just lc0 -> parseLoggerConfig lc0
lc <- liftIO $ setLogPrefix logsPrefix lc1
setupLogging' "bench" lc
-- * Logging & parsing
-- ** Measure event
-- | Type of event in measurement.
data MeasureEvent
= PingSent
| PingReceived
| PongSent
| PongReceived
deriving (Show, Eq, Ord, Enum, Bounded)
instance Buildable MeasureEvent where
build PingSent = "• → "
build PingReceived = " → •"
build PongSent = " ← •"
build PongReceived = "• ← "
measureEventParser :: Parser MeasureEvent
measureEventParser = string "• → " $> PingSent
<|> string " → •" $> PingReceived
<|> string " ← •" $> PongSent
<|> string "• ← " $> PongReceived
-- ** Measure info
type Timestamp = Integer
-- | Single event in measurement.
data MeasureInfo = MeasureInfo
{ miId :: MsgId
, miEvent :: MeasureEvent
, miTime :: Timestamp
, miPayload :: Payload
}
instance Buildable MeasureInfo where
build MeasureInfo{..} = mconcat
[ build miId
, " "
, build miEvent
, " ("
, build $ getPayload miPayload
, ") "
, build miTime
]
measureInfoParser :: Parser MeasureInfo
measureInfoParser = do
miId <- decimal
_ <- string " "
miEvent <- measureEventParser
_ <- string " ("
miPayload <- Payload <$> decimal
_ <- string ") "
miTime <- decimal
return MeasureInfo{..}
-- ** Log message
-- | Allows to extract bare message content from logs.
-- Just inserts separator at beginning.
data LogMessage a = LogMessage a
instance Buildable a => Buildable (LogMessage a) where
build (LogMessage a) = "#" <> build a
logMessageParser :: Parser a -> Parser (Maybe (LogMessage a))
logMessageParser p = (takeWhile (/= '#') >>) . join $ do
(char '#' $> (Just . LogMessage <$> p))
<|> pure (pure Nothing)
| input-output-hk/pos-haskell-prototype | networking/src/Bench/Network/Commons.hs | mit | 5,363 | 0 | 13 | 1,577 | 1,337 | 749 | 588 | 132 | 2 |
{-# LANGUAGE ViewPatterns #-}
module U.Codebase.Reflog where
import Data.Text (Text)
import U.Codebase.HashTags (BranchHash)
data Entry = Entry {from :: BranchHash, to :: BranchHash, reason :: Text}
-- fromText :: Text -> Maybe Entry
-- fromText t =
-- case Text.words t of
-- (Hash.fromBase32Hex -> Just old) : (Hash.fromBase32Hex -> Just new) : (Text.unwords -> reason) ->
-- Just $ Entry (Causal.RawHash old) (Causal.RawHash new) reason
-- _ -> Nothing
-- toText :: Entry -> Text
-- toText (Entry old new reason) =
-- Text.unwords [ Hash.base32Hex . Causal.unRawHash $ old
-- , Hash.base32Hex . Causal.unRawHash $ new
-- , reason ]
| unisonweb/platform | codebase2/codebase/U/Codebase/Reflog.hs | mit | 688 | 0 | 8 | 155 | 67 | 47 | 20 | 5 | 0 |
module Extra.List
( consperse
, surround
, changePrefix
, dropPrefix
, cartesianProduct
, wordsBy
, empty
, sortByMapped
, sortByMappedM
, partitionM
, listIntersection
, isSublistOf
) where
import Control.Monad
import Data.List
{-# DEPRECATED consperse "Use intercalate" #-}
consperse :: [a] -> [[a]] -> [a]
-- ^ The mighty consperse function - e.g. consperse "," ["a", "b"] -> "a,b"
-- consperse = MissingH.List.join
consperse s l = concat . intersperse s $ l
surround :: [a] -> [a] -> [[a]] -> [a]
-- ^ surround each element of a list - e.g. surround "(" ")" ["a", "b"] -> ["(a)(b)"]
surround prefix suffix items = concat $ map ((prefix ++) . (++ suffix)) items
-- |Replace the prefix of s, return Nothing if it doesn't match.
changePrefix :: (Eq a) => [a] -> [a] -> [a] -> Maybe [a]
changePrefix old new s = maybe Nothing (Just . (new ++)) (dropPrefix old s)
-- |Remove a prefix of s, return nothing if it doesn't match.
dropPrefix :: (Eq a) => [a] -> [a] -> Maybe [a]
dropPrefix prefix s =
case isPrefixOf prefix s of
True -> Just (drop (length prefix) s)
False -> Nothing
cartesianProduct :: [[a]] -> [[a]]
-- ^ cartesianProduct [[1,2,3], [4,5],[6]] -> [[1,4,6],[1,5,6],[2,4,6],[2,5,6],[3,4,6],[3,5,6]]
cartesianProduct [] = []
cartesianProduct [xs] = map (: []) xs
cartesianProduct (xs : yss) =
distribute xs (cartesianProduct yss)
where distribute xs yss = concat (map (\ x -> map (x :) yss) xs)
-- |FIXME: implement for a string
wordsBy :: Eq a => (a -> Bool) -> [a] -> [[a]]
wordsBy p s =
case (break p s) of
(s, []) -> [s]
(h, t) -> h : wordsBy p (drop 1 t)
-- |Like maybe, but with empty vs. non-empty list
empty :: b -> ([a] -> b) -> [a] -> b
empty e _ [] = e
empty _ f l = f l
-- |Sort a list using the compare function on the list elements mapped
-- over f. This is like "sortBy (\ a b -> compare (f a) (f b))"
-- except that f is applied O(n) times instead of O(n log n)
sortByMapped :: (a -> b) -> (b -> b -> Ordering) -> [a] -> [a]
sortByMapped f compare list =
map fst sorted
where
sorted = sortBy (\ (_, x) (_, y) -> compare x y) pairs
pairs = zip list (map f list)
-- |Monadic version of sortByMapped
sortByMappedM :: (a -> IO b) -> (b -> b -> Ordering) -> [a] -> IO [a]
sortByMappedM f compare list =
do
pairs <- mapM f list >>= return . (zip list)
let sorted = sortBy (\ (_, x) (_, y) -> compare x y) pairs
return (map fst sorted)
partitionM :: (Monad m) => (a -> m Bool) -> [a] -> m ([a], [a])
partitionM p xs =
foldM f ([], []) xs
where f (a, b) x = p x >>= (\ flag -> return $ if flag then (x : a, b) else (a, x : b))
listIntersection :: Eq a => [[a]] -> [a]
listIntersection [] = []
listIntersection (first : rest) = foldr intersect first rest
isSublistOf :: Eq a => [a] -> [a] -> Maybe Int
isSublistOf sub lst =
maybe Nothing (\ s -> Just (length s - length sub))
(find (isSuffixOf sub) (inits lst))
{-
lookups :: (Eq a) => a -> [(a, b)] -> [b]
lookups a = map snd . filter ((a ==) . fst)
-}
| ddssff/haskell-extra | Extra/List.hs | bsd-3-clause | 3,096 | 0 | 13 | 759 | 1,173 | 638 | 535 | 63 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Bead.Domain.Entity.Comment where
import Control.Applicative
import Data.Data
import Data.Time (UTCTime)
#ifdef TEST
import Test.Tasty.Arbitrary
#endif
-- Comment type basically indicates that who left the comment,
-- constructors are self explanatories
data CommentType
= CT_Student
| CT_GroupAdmin
| CT_CourseAdmin
| CT_Admin
deriving (Data, Eq, Read, Show, Typeable)
commentTypeCata
student
groupAdmin
courseAdmin
admin
c = case c of
CT_Student -> student
CT_GroupAdmin -> groupAdmin
CT_CourseAdmin -> courseAdmin
CT_Admin -> admin
#ifdef TEST
instance Arbitrary CommentType where
arbitrary = elements [CT_Student, CT_GroupAdmin, CT_CourseAdmin, CT_Admin]
shrink = commentTypeCata
[CT_GroupAdmin, CT_CourseAdmin, CT_Admin]
[CT_CourseAdmin, CT_Admin]
[CT_Admin]
[]
#endif
-- | Comment on the text of exercise, on the evaluation
data Comment = Comment {
comment :: String
, commentAuthor :: String
, commentDate :: UTCTime
, commentType :: CommentType
} deriving (Eq, Show)
commentCata f (Comment c a d t) = f c a d t
commentAna comment author date type_ =
Comment <$> comment <*> author <*> date <*> type_
-- Returns True if the comment can be displayed for the student
-- otherwise false
isStudentComment :: Comment -> Bool
isStudentComment = commentCata $ \_comment _owner _date -> student where
student = commentTypeCata
True -- Student
True -- Group Admin
True -- Course Admin
False -- Admin
| andorp/bead | src/Bead/Domain/Entity/Comment.hs | bsd-3-clause | 1,623 | 0 | 8 | 372 | 342 | 197 | 145 | 38 | 4 |
module B1.Data.Range
( gradualRange
, linearRange
) where
-- | Get a gradual range containing values from start to end with step number
-- of elements.
gradualRange :: Fractional a => a -> a -> Int -> [a]
gradualRange start end steps
| steps < 0 = error "Number of steps cannot be negative."
| otherwise = map (gradualValue start end (steps + 2)) [0 .. steps + 1]
gradualValue :: Fractional a => a -> a -> Int -> Int -> a
gradualValue start end numSteps step
| step == 0 = start
| step == numSteps - 1 = end
| otherwise = prevValue + (end - prevValue) / 2
where
prevValue = gradualValue start end numSteps (step - 1)
linearRange :: (Fractional a, Ord a) => a -> a -> Int -> [a]
linearRange start end steps
| steps < 0 = error "Number of steps cannot be negative."
| otherwise = map (linearValue start end (steps + 2)) [0 .. steps + 1]
linearValue :: (Fractional a, Ord a) => a -> a -> Int -> Int -> a
linearValue start end numSteps step
| step == 0 = start
| step == numSteps - 1 = end
| start <= end = start + stepAmount * realToFrac step
| otherwise = start - stepAmount * realToFrac step
where
stepAmount = abs (start - end) / realToFrac (numSteps - 1)
| madjestic/b1 | src/B1/Data/Range.hs | bsd-3-clause | 1,203 | 0 | 10 | 281 | 479 | 240 | 239 | 24 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, ScopedTypeVariables, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Storable
-- Copyright : (c) The FFI task force 2001
-- License : see libraries/base/LICENSE
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- The module "Foreign.Storable" provides most elementary support for
-- marshalling and is part of the language-independent portion of the
-- Foreign Function Interface (FFI), and will normally be imported via
-- the "Foreign" module.
--
-----------------------------------------------------------------------------
module Foreign.Storable
( Storable(
sizeOf,
alignment,
peekElemOff,
pokeElemOff,
peekByteOff,
pokeByteOff,
peek,
poke)
) where
import GHC.Storable
import GHC.Stable ( StablePtr )
import GHC.Num
import GHC.Int
import GHC.Word
import GHC.Ptr
import GHC.Base
import GHC.Fingerprint.Type
import Data.Bits
import GHC.Real
{- |
The member functions of this class facilitate writing values of
primitive types to raw memory (which may have been allocated with the
above mentioned routines) and reading values from blocks of raw
memory. The class, furthermore, includes support for computing the
storage requirements and alignment restrictions of storable types.
Memory addresses are represented as values of type @'Ptr' a@, for some
@a@ which is an instance of class 'Storable'. The type argument to
'Ptr' helps provide some valuable type safety in FFI code (you can\'t
mix pointers of different types without an explicit cast), while
helping the Haskell type system figure out which marshalling method is
needed for a given pointer.
All marshalling between Haskell and a foreign language ultimately
boils down to translating Haskell data structures into the binary
representation of a corresponding data structure of the foreign
language and vice versa. To code this marshalling in Haskell, it is
necessary to manipulate primitive data types stored in unstructured
memory blocks. The class 'Storable' facilitates this manipulation on
all types for which it is instantiated, which are the standard basic
types of Haskell, the fixed size @Int@ types ('Int8', 'Int16',
'Int32', 'Int64'), the fixed size @Word@ types ('Word8', 'Word16',
'Word32', 'Word64'), 'StablePtr', all types from "Foreign.C.Types",
as well as 'Ptr'.
-}
class Storable a where
{-# MINIMAL sizeOf, alignment,
(peek | peekElemOff | peekByteOff),
(poke | pokeElemOff | pokeByteOff) #-}
sizeOf :: a -> Int
-- ^ Computes the storage requirements (in bytes) of the argument.
-- The value of the argument is not used.
alignment :: a -> Int
-- ^ Computes the alignment constraint of the argument. An
-- alignment constraint @x@ is fulfilled by any address divisible
-- by @x@. The value of the argument is not used.
peekElemOff :: Ptr a -> Int -> IO a
-- ^ Read a value from a memory area regarded as an array
-- of values of the same kind. The first argument specifies
-- the start address of the array and the second the index into
-- the array (the first element of the array has index
-- @0@). The following equality holds,
--
-- > peekElemOff addr idx = IOExts.fixIO $ \result ->
-- > peek (addr `plusPtr` (idx * sizeOf result))
--
-- Note that this is only a specification, not
-- necessarily the concrete implementation of the
-- function.
pokeElemOff :: Ptr a -> Int -> a -> IO ()
-- ^ Write a value to a memory area regarded as an array of
-- values of the same kind. The following equality holds:
--
-- > pokeElemOff addr idx x =
-- > poke (addr `plusPtr` (idx * sizeOf x)) x
peekByteOff :: Ptr b -> Int -> IO a
-- ^ Read a value from a memory location given by a base
-- address and offset. The following equality holds:
--
-- > peekByteOff addr off = peek (addr `plusPtr` off)
pokeByteOff :: Ptr b -> Int -> a -> IO ()
-- ^ Write a value to a memory location given by a base
-- address and offset. The following equality holds:
--
-- > pokeByteOff addr off x = poke (addr `plusPtr` off) x
peek :: Ptr a -> IO a
-- ^ Read a value from the given memory location.
--
-- Note that the peek and poke functions might require properly
-- aligned addresses to function correctly. This is architecture
-- dependent; thus, portable code should ensure that when peeking or
-- poking values of some type @a@, the alignment
-- constraint for @a@, as given by the function
-- 'alignment' is fulfilled.
poke :: Ptr a -> a -> IO ()
-- ^ Write the given value to the given memory location. Alignment
-- restrictions might apply; see 'peek'.
-- circular default instances
peekElemOff = peekElemOff_ undefined
where peekElemOff_ :: a -> Ptr a -> Int -> IO a
peekElemOff_ undef ptr off = peekByteOff ptr (off * sizeOf undef)
pokeElemOff ptr off val = pokeByteOff ptr (off * sizeOf val) val
peekByteOff ptr off = peek (ptr `plusPtr` off)
pokeByteOff ptr off = poke (ptr `plusPtr` off)
peek ptr = peekElemOff ptr 0
poke ptr = pokeElemOff ptr 0
-- System-dependent, but rather obvious instances
instance Storable Bool where
sizeOf _ = sizeOf (undefined::Int32)
alignment _ = alignment (undefined::Int32)
peekElemOff p i = liftM (/= (0::Int32)) $ peekElemOff (castPtr p) i
pokeElemOff p i x = pokeElemOff (castPtr p) i (if x then 1 else 0::Int32)
#define STORABLE(T,size,align,read,write) \
instance Storable (T) where { \
sizeOf _ = size; \
alignment _ = align; \
peekElemOff = read; \
pokeElemOff = write }
STORABLE(Char,4,4,readWideCharOffPtr,writeWideCharOffPtr)
STORABLE(Int,4,4,readIntOffPtr,writeIntOffPtr)
STORABLE(Word,4,4,readWordOffPtr,writeWordOffPtr)
STORABLE((Ptr a),4,4,readPtrOffPtr,writePtrOffPtr)
STORABLE((FunPtr a),4,4,readFunPtrOffPtr,writeFunPtrOffPtr)
STORABLE((StablePtr a),4,4,readStablePtrOffPtr,writeStablePtrOffPtr)
STORABLE(Float,4,4,readFloatOffPtr,writeFloatOffPtr)
STORABLE(Double,8,8,readDoubleOffPtr,writeDoubleOffPtr)
STORABLE(Word8,1,1,readWord8OffPtr,writeWord8OffPtr)
STORABLE(Word16,2,2,readWord16OffPtr,writeWord16OffPtr)
STORABLE(Word32,4,4,readWord32OffPtr,writeWord32OffPtr)
STORABLE(Word64,8,8,readWord64OffPtr,writeWord64OffPtr)
STORABLE(Int8,1,1,readInt8OffPtr,writeInt8OffPtr)
STORABLE(Int16,2,2,readInt16OffPtr,writeInt16OffPtr)
STORABLE(Int32,4,4,readInt32OffPtr,writeInt32OffPtr)
STORABLE(Int64,8,8,readInt64OffPtr,writeInt64OffPtr)
instance (Storable a, Integral a) => Storable (Ratio a) where
sizeOf _ = 2 * sizeOf (undefined :: a)
alignment _ = alignment (undefined :: a )
peek p = do
q <- return $ castPtr p
r <- peek q
i <- peekElemOff q 1
return (r % i)
poke p (r :% i) = do
q <-return $ (castPtr p)
poke q r
pokeElemOff q 1 i
-- XXX: here to avoid orphan instance in GHC.Fingerprint
instance Storable Fingerprint where
sizeOf _ = 16
alignment _ = 8
peek = peekFingerprint
poke = pokeFingerprint
-- peek/poke in fixed BIG-endian 128-bit format
peekFingerprint :: Ptr Fingerprint -> IO Fingerprint
peekFingerprint p0 = do
let peekW64 :: Ptr Word8 -> Int -> Word64 -> IO Word64
peekW64 _ 0 !i = return i
peekW64 !p !n !i = do
w8 <- peek p
peekW64 (p `plusPtr` 1) (n-1)
((i `shiftL` 8) .|. fromIntegral w8)
high <- peekW64 (castPtr p0) 8 0
low <- peekW64 (castPtr p0 `plusPtr` 8) 8 0
return (Fingerprint high low)
pokeFingerprint :: Ptr Fingerprint -> Fingerprint -> IO ()
pokeFingerprint p0 (Fingerprint high low) = do
let pokeW64 :: Ptr Word8 -> Int -> Word64 -> IO ()
pokeW64 _ 0 _ = return ()
pokeW64 p !n !i = do
pokeElemOff p (n-1) (fromIntegral i)
pokeW64 p (n-1) (i `shiftR` 8)
pokeW64 (castPtr p0) 8 high
pokeW64 (castPtr p0 `plusPtr` 8) 8 low
| alexander-at-github/eta | libraries/base/Foreign/Storable.hs | bsd-3-clause | 8,644 | 0 | 16 | 2,245 | 1,525 | 823 | 702 | -1 | -1 |
{-|
Module : Language.Qux.Annotated.Syntax
Description : Abstract syntax tree nodes with annotations.
Copyright : (c) Henry J. Wylde, 2015
License : BSD3
Maintainer : hjwylde@gmail.com
Abstract syntax tree nodes with annotations.
The annotation style was inspired by haskell-src-exts.
Instances of 'Simplifiable' are provided for simplifying a node down to it's unannotated form and of
'Pretty' for pretty printing.
The instances of 'Eq' are defined in terms of the simplified nodes, i.e., the annotation does not
impact node equality.
-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Language.Qux.Annotated.Syntax (
-- * Type classes
Annotated(..),
Simplifiable(..),
-- * Annotated nodes
Id(..), Program(..), Decl(..), Attribute(..), Stmt(..), Expr(..), Type(..),
-- * Regular nodes
BinaryOp(..), UnaryOp(..), Value(..),
-- * Extra methods
-- ** Utility
qualify, mangle,
) where
import Data.Function
import Data.List
import Data.Tuple.Extra
import Language.Qux.Syntax (BinaryOp (..), UnaryOp (..), Value (..))
import qualified Language.Qux.Syntax as Simp
import Text.PrettyPrint.HughesPJClass
-- | An annotated class.
-- Annotations are used for attaching data to a node, such as a 'Text.Parsec.SourcePos'.
class Annotated n where
ann :: n a -> a
-- | A simplifiable class.
-- Simplifiable is used to simplify a node to a a simpler form.
-- See "Language.Qux.Syntax" for simpler forms of the nodes defined here.
class Simplifiable n r | n -> r where
simp :: n -> r
-- | An identifier. Identifiers should match '[a-z_][a-zA-Z0-9_']*'.
data Id a = Id a String
deriving (Functor, Show)
instance Annotated Id where
ann (Id a _) = a
instance Eq (Id a) where
(==) = (==) `on` simp
instance Simplifiable (Id a) [Char] where
simp (Id _ id) = id
instance Pretty (Id a) where
pPrint = text . simp
-- | A program is a module identifier (list of 'Id''s) and a list of declarations.
data Program a = Program a [Id a] [Decl a]
deriving (Functor, Show)
instance Annotated Program where
ann (Program a _ _) = a
instance Eq (Program a) where
(==) = (==) `on` simp
instance Simplifiable (Program a) Simp.Program where
simp (Program _ module_ decls) = Simp.Program (map simp module_) (map simp decls)
instance Pretty (Program a) where
pPrint = pPrint . simp
-- | A declaration.
data Decl a = FunctionDecl a [Attribute a] (Id a) [(Type a, Id a)] [Stmt a] -- ^ A name, list of ('Type', 'Id') parameters and statements.
-- The return type is treated as a parameter with id '@'.
| ImportDecl a [Id a] -- ^ A module identifier to import.
| TypeDecl a [Attribute a] (Id a) -- ^ A type declaration.
deriving (Functor, Show)
instance Annotated Decl where
ann (FunctionDecl a _ _ _ _) = a
ann (ImportDecl a _) = a
ann (TypeDecl a _ _) = a
instance Eq (Decl a) where
(==) = (==) `on` simp
instance Simplifiable (Decl a) Simp.Decl where
simp (FunctionDecl _ attrs name type_ stmts) = Simp.FunctionDecl (map simp attrs) (simp name) (map (simp *** simp) type_) (map simp stmts)
simp (ImportDecl _ id) = Simp.ImportDecl $ map simp id
simp (TypeDecl _ attrs name) = Simp.TypeDecl (map simp attrs) (simp name)
instance Pretty (Decl a) where
pPrint = pPrint . simp
-- | A declaration attribute.
data Attribute a = External a
deriving (Functor, Show)
instance Annotated Attribute where
ann (External a) = a
instance Eq (Attribute a) where
(==) = (==) `on` simp
instance Simplifiable (Attribute a) Simp.Attribute where
simp (External _) = Simp.External
instance Pretty (Attribute a) where
pPrint = pPrint . simp
-- | A statement.
data Stmt a = IfStmt a (Expr a) [Stmt a] [Stmt a] -- ^ A condition, true block and false block of statements.
| CallStmt a (Expr a) -- ^ A call statement.
| ReturnStmt a (Maybe (Expr a)) -- ^ An expression.
| WhileStmt a (Expr a) [Stmt a] -- ^ A condition and block of statements.
deriving (Functor, Show)
instance Annotated Stmt where
ann (IfStmt a _ _ _) = a
ann (CallStmt a _ ) = a
ann (ReturnStmt a _) = a
ann (WhileStmt a _ _) = a
instance Eq (Stmt a) where
(==) = (==) `on` simp
instance Simplifiable (Stmt a) Simp.Stmt where
simp (IfStmt _ condition trueStmts falseStmts) = Simp.IfStmt (simp condition) (map simp trueStmts) (map simp falseStmts)
simp (CallStmt _ expr) = Simp.CallStmt (simp expr)
simp (ReturnStmt _ mExpr) = Simp.ReturnStmt (simp <$> mExpr)
simp (WhileStmt _ condition stmts) = Simp.WhileStmt (simp condition) (map simp stmts)
instance Pretty (Stmt a) where
pPrint = pPrint . simp
-- | A complex expression.
data Expr a = ApplicationExpr a (Id a) [Expr a] -- ^ A function name (unresolved) to call
-- and the arguments to pass as parameters.
| BinaryExpr a BinaryOp (Expr a) (Expr a) -- ^ A binary operation.
| CallExpr a [Id a] [Expr a] -- ^ A function id (resolved) to call and
-- the arguments to pass as parameters.
| TypedExpr a Simp.Type (Expr a) -- ^ A typed expression.
-- See "Language.Qux.Annotated.TypeResolver".
| UnaryExpr a UnaryOp (Expr a) -- ^ A unary operation.
| ValueExpr a Value -- ^ A raw value.
| VariableExpr a (Id a) -- ^ A local variable access.
deriving (Functor, Show)
instance Annotated Expr where
ann (ApplicationExpr a _ _) = a
ann (BinaryExpr a _ _ _) = a
ann (CallExpr a _ _) = a
ann (TypedExpr a _ _) = a
ann (UnaryExpr a _ _) = a
ann (ValueExpr a _) = a
ann (VariableExpr a _) = a
instance Eq (Expr a) where
(==) = (==) `on` simp
instance Simplifiable (Expr a) Simp.Expr where
simp (ApplicationExpr _ name arguments) = Simp.ApplicationExpr (simp name) (map simp arguments)
simp (BinaryExpr _ op lhs rhs) = Simp.BinaryExpr op (simp lhs) (simp rhs)
simp (CallExpr _ id arguments) = Simp.CallExpr (map simp id) (map simp arguments)
simp (TypedExpr _ type_ expr) = Simp.TypedExpr type_ (simp expr)
simp (UnaryExpr _ op expr) = Simp.UnaryExpr op (simp expr)
simp (ValueExpr _ value) = Simp.ValueExpr value
simp (VariableExpr _ name) = Simp.VariableExpr $ simp name
instance Pretty (Expr a) where
pPrint = pPrint . simp
-- | A type.
data Type a = AnyType a
| BoolType a
| IntType a
| StrType a
| VoidType a
deriving (Functor, Show)
instance Annotated Type where
ann (AnyType a) = a
ann (BoolType a) = a
ann (IntType a) = a
ann (StrType a) = a
ann (VoidType a) = a
instance Eq (Type a) where
(==) = (==) `on` simp
instance Simplifiable (Type a) Simp.Type where
simp (AnyType _) = Simp.AnyType
simp (BoolType _) = Simp.BoolType
simp (IntType _) = Simp.IntType
simp (StrType _) = Simp.StrType
simp (VoidType _) = Simp.VoidType
instance Pretty (Type a) where
pPrint = pPrint . simp
-- | Qualifies the identifier into a single 'Id' joined with periods.
qualify :: [Id a] -> Id a
qualify id = Id (ann $ head id) (intercalate "." (map simp id))
-- | Mangles the identifier into a single 'Id' joined with underscores.
mangle :: [Id a] -> Id a
mangle id = Id (ann $ head id) (intercalate "_" (map simp id))
| hjwylde/qux-haskell | src/Language/Qux/Annotated/Syntax.hs | bsd-3-clause | 8,172 | 0 | 10 | 2,512 | 2,391 | 1,289 | 1,102 | 139 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- Usage:
-- ghc ../examples/Test.hs
-- ghc Unit.hs
-- ./Unit ../examples/Test
-- Requirements:
-- - Empty ~/.haskeline (or set to defaults)
-- - Assumes the dummy folder is in the current folder
-- - On Mac OS X, may need to clear out /usr/lib/charset.alias
-- (In particular, the line "* UTF-8" which makes locale_charset()
-- always return UTF-8; otherwise we can't test latin-1.)
-- - NB: Window size isn't provided by screen so it's picked up from
-- terminfo or defaults (either way: 80x24), rather than the user's
-- terminal.
module Main where
import System.Environment
import Test.HUnit
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Word
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import RunTTY
legacyEncoding :: Bool
legacyEncoding = False
-- Generally we want the legacy and new backends to perform the same.
-- The only two differences I'm aware of are:
-- 1. base decodes invalid bytes as '\65533', but legacy decodes them as '?'
-- 2. if there's an incomplete sequence and no more input immediately
-- available (but not eof), then base will pause to wait for more input,
-- whereas legacy will immediately stop.
whenLegacy s = if legacyEncoding then s else B.empty
main = do
[p] <- getArgs
let i = setTerm "xterm"
Invocation {
prog = p,
progArgs = [],
runInTTY = True,
environment = []
}
runTestTT $ test [interactionTests i, fileStyleTests i]
interactionTests i = "interaction" ~: test
[ unicodeEncoding i
, unicodeMovement i
, tabCompletion i
, incorrectInput i
, historyTests i
, inputChar $ setCharInput i
, dumbTests $ setTerm "dumb" i
]
unicodeEncoding i = "Unicode encoding (valid)" ~:
[ utf8Test i [utf8 "xαβγy"]
[prompt 0, utf8 "xαβγy"]
, utf8Test i [utf8 "a\n", "quit\n"]
[ prompt 0
, utf8 "a" <> end
<> output 0 (utf8 "a") <> prompt 1
, utf8 "quit" <> end
]
, utf8Test i [utf8 "xαβyψ안기q영\n", "quit\n"]
[ prompt 0
, utf8 "xαβyψ안기q영" <> end
<> output 0 (utf8 "xαβyψ안기q영") <> prompt 1
, utf8 "quit" <> end
]
-- test buffering: 32 bytes is in middle of a char encoding,
-- also test long paste
, "multipleLines" ~: utf8Test i [l1 <> "\n" <> l1]
[ prompt 0
, l1 <> end <> output 0 l1 <> prompt 1 <> l1]
]
where
l1 = utf8 $ T.replicate 30 "안" -- three bytes, width 60
unicodeMovement i = "Unicode movement" ~:
[ "separate" ~: utf8Test i [utf8 "α", utf8 "\ESC[Dx"]
[prompt 0, utf8 "α", utf8 "\bxα\b"]
, "coalesced" ~: utf8Test i [utf8 "α\ESC[Dx"]
[prompt 0, utf8 "xα\b"]
, "lineWrap" ~: utf8Test i
[ utf8 longWideChar
, raw [1]
, raw [5]
]
[prompt 0, utf8 lwc1 <> wrap <> utf8 lwc2 <> wrap <> utf8 lwc3
, cr <> "\ESC[2A\ESC[2C"
, cr <> nl <> nl <> "\ESC[22C"
]
]
where
longWideChar = T.concat $ replicate 30 $ "안기영"
(lwc1,lwcs1) = T.splitAt ((80-2)`div`2) longWideChar
(lwc2,lwcs2) = T.splitAt (80`div`2) lwcs1
(lwc3,lwcs3) = T.splitAt (80`div`2) lwcs2
-- lwc3 has length 90 - (80-2)/2 - 80/2 = 11,
-- so the last line as wide width 2*11=22.
tabCompletion i = "tab completion" ~:
[ utf8Test i [ utf8 "dummy-μ\t\t" ]
[ prompt 0, utf8 "dummy-μασ/"
<> nl <> utf8 "bar ςερτ" <> nl
<> prompt' 0 <> utf8 "dummy-μασ/"
]
]
incorrectInput i = "incorrect input" ~:
[ utf8Test i [ utf8 "x" <> raw [206] ] -- needs one more byte
-- non-legacy encoder ignores the "206" since it's still waiting
-- for more input.
[ prompt 0, utf8 "x" <> whenLegacy err ]
, utf8Test i [ raw [206] <> utf8 "x" ]
-- 'x' is not valid after '\206', so both the legacy and
-- non-legacy encoders should handle the "x" correctly.
[ prompt 0, err <> utf8 "x"]
, utf8Test i [ raw [236,149] <> utf8 "x" ] -- needs one more byte
[prompt 0, err <> err <> utf8 "x"]
]
historyTests i = "history encoding" ~:
[ utf8TestValidHist i [ "\ESC[A" ]
[prompt 0, utf8 "abcα" ]
, utf8TestInvalidHist i [ "\ESC[A" ]
-- NB: this is decoded by either utf8-string or base;
-- either way they produce \65533 instead of '?'.
[prompt 0, utf8 "abcα\65533x\65533x\65533" ]
-- In latin-1: read errors as utf-8 '\65533', display as '?'
, latin1TestInvalidHist i [ "\ESC[A" ]
[prompt 0, utf8 "abc??x?x?" ]
]
invalidHist = utf8 "abcα"
`B.append` raw [149] -- invalid start of UTF-8 sequence
`B.append` utf8 "x"
`B.append` raw [206] -- incomplete start
`B.append` utf8 "x"
-- incomplete at end of file
`B.append` raw [206]
validHist = utf8 "abcα"
inputChar i = "getInputChar" ~:
[ utf8Test i [utf8 "xαβ"]
[ prompt 0, utf8 "x" <> end <> output 0 (utf8 "x")
<> prompt 1 <> utf8 "α" <> end <> output 1 (utf8 "α")
<> prompt 2 <> utf8 "β" <> end <> output 2 (utf8 "β")
<> prompt 3
]
, "bad encoding (separate)" ~:
utf8Test i [utf8 "α", raw [149], utf8 "x", raw [206]]
[ prompt 0, utf8 "α" <> end <> output 0 (utf8 "α") <> prompt 1
, err <> end <> output 1 err <> prompt 2
, utf8 "x" <> end <> output 2 (utf8 "x") <> prompt 3
, whenLegacy (err <> end <> output 3 err <> prompt 4)
]
, "bad encoding (together)" ~:
utf8Test i [utf8 "α" <> raw [149] <> utf8 "x" <> raw [206]]
[ prompt 0, utf8 "α" <> end <> output 0 (utf8 "α")
<> prompt 1 <> err <> end <> output 1 err
<> prompt 2 <> utf8 "x" <> end <> output 2 (utf8 "x")
<> prompt 3 <> whenLegacy (err <> end <> output 3 err <> prompt 4)
]
, utf8Test i [raw [206]] -- incomplete
[ prompt 0, whenLegacy (utf8 "?" <> end <> output 0 (utf8 "?"))
<> whenLegacy (prompt 1)
]
]
setCharInput i = i { progArgs = ["chars"] }
fileStyleTests i = "file style" ~:
[ "line input" ~: utf8Test iFile
[utf8 "xαβyψ안기q영\nquit\n"]
[ prompt' 0, output 0 (utf8 "xαβyψ안기q영") <> prompt' 1]
, "char input" ~: utf8Test iFileChar
[utf8 "xαβt"]
[ prompt' 0
, output 0 (utf8 "x")
<> prompt' 1 <> output 1 (utf8 "α")
<> prompt' 2 <> output 2 (utf8 "β")
<> prompt' 3 <> output 3 (utf8 "t")
<> prompt' 4]
, "invalid line input" ~: utf8Test iFile
-- NOTE: the 206 is an incomplete byte sequence,
-- but we MUST not pause since we're at EOF, not just
-- end of term.
--
-- Also recall GHC bug #5436 which caused a crash
-- if the last byte started an incomplete sequence.
[ utf8 "a" <> raw [149] <> utf8 "x" <> raw [206] ]
[ prompt' 0
, B.empty
-- It only prompts after the EOF.
, output 0 (utf8 "a" <> err <> utf8 "x" <> err) <> prompt' 1
]
, "invalid char input (following a newline)" ~: utf8Test iFileChar
[ utf8 "a\n" <> raw [149] <> utf8 "x\n" <> raw [206] ]
$ [ prompt' 0
, output 0 (utf8 "a")
<> prompt' 1 <> output 1 err
<> prompt' 2 <> output 2 (utf8 "x")
<> prompt' 3
<> whenLegacy (output 3 err <> prompt' 4)
] ++ if legacyEncoding then [] else [ output 3 err <> prompt' 4 ]
, "invalid char file input (no preceding newline)" ~: utf8Test iFileChar
[ utf8 "a" <> raw [149] <> utf8 "x" <> raw [206] ]
-- make sure it tries to read a newline
-- and instead gets the incomplete 206.
-- This should *not* cause it to crash or block.
$ [ prompt' 0
, output 0 (utf8 "a")
<> prompt' 1 <> output 1 err
<> prompt' 2 <> output 2 (utf8 "x")
<> prompt' 3
<> whenLegacy (output 3 err <> prompt' 4)
] ++ if legacyEncoding then [] else [ output 3 err <> prompt' 4 ]
]
-- also single char and buffer break and other stuff
where
iFile = i { runInTTY = False }
iFileChar = setCharInput iFile
-- Test that the dumb terminal backend does encoding correctly.
-- If all the above tests work for the terminfo backend,
-- then we just need to make sure the dumb term plugs into everything
-- correctly, i.e., encodes the input/output and doesn't double-encode.
dumbTests i = "dumb term" ~:
[ "line input" ~: utf8Test i
[ utf8 "xαβγy" ]
[ prompt' 0, utf8 "xαβγy" ]
, "line input wide movement" ~: utf8Test i
[ utf8 wideChar, raw [1], raw [5] ]
[ prompt' 0, utf8 wideChar
, utf8 (T.replicate 60 "\b")
, utf8 wideChar
]
, "line char input" ~: utf8Test (setCharInput i)
[utf8 "xαβ"]
[ prompt' 0, utf8 "x" <> nl <> output 0 (utf8 "x")
<> prompt' 1 <> utf8 "α" <> nl <> output 1 (utf8 "α")
<> prompt' 2 <> utf8 "β" <> nl <> output 2 (utf8 "β")
<> prompt' 3
]
]
where
wideChar = T.concat $ replicate 10 $ "안기영"
-------------
-- Building blocks for expected input/output
smkx,rmkx :: B.ByteString
smkx = utf8 "\ESC[?1h\ESC="
rmkx = utf8 "\ESC[?1l\ESC>"
prompt, prompt' :: Int -> B.ByteString
prompt k = smkx <> prompt' k
prompt' k = utf8 (T.pack (show k ++ ":"))
end :: B.ByteString
end = nl <> rmkx
cr :: B.ByteString
cr = raw [13]
nl :: B.ByteString
nl = raw [13,10] -- NB: see fixNL: this is really [13,13,10]
output :: Int -> B.ByteString -> B.ByteString
output k s = utf8 (T.pack $ "line " ++ show k ++ ":")
<> s <> raw [10]
wrap :: B.ByteString
wrap = utf8 " \b"
(<>) :: B.ByteString -> B.ByteString -> B.ByteString
(<>) = B.append
utf8 :: T.Text -> B.ByteString
utf8 = E.encodeUtf8
raw :: [Word8] -> B.ByteString
raw = B.pack
err :: B.ByteString
err = if legacyEncoding
then utf8 "?"
else utf8 "\65533"
----------------------
utf8Test = testI . setUTF8
utf8TestInvalidHist i input output = test $ do
B.writeFile "myhist" $ invalidHist
assertInvocation (setUTF8 i) input output
utf8TestValidHist i input output = test $ do
B.writeFile "myhist" validHist
assertInvocation (setUTF8 i) input output
latin1TestInvalidHist i input output = test $ do
B.writeFile "myhist" $ invalidHist
assertInvocation (setLatin1 i) input output
| DavidAlphaFox/ghc | libraries/haskeline/tests/Unit.hs | bsd-3-clause | 10,756 | 0 | 23 | 3,267 | 3,111 | 1,594 | 1,517 | 201 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>OAST Support Add-on</title>
<maps>
<homeID>oast</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/oast/src/main/javahelp/org/zaproxy/addon/oast/resources/help_es_ES/helpset_es_ES.hs | apache-2.0 | 965 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Replacer | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/replacer/src/main/javahelp/org/zaproxy/zap/extension/replacer/resources/help_hi_IN/helpset_hi_IN.hs | apache-2.0 | 969 | 78 | 66 | 158 | 411 | 208 | 203 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Sadržaj</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Traži</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriti</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/wappalyzer/src/main/javahelp/org/zaproxy/zap/extension/wappalyzer/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 981 | 78 | 66 | 159 | 419 | 211 | 208 | -1 | -1 |
module TupleIn2 where
f :: (a, ([Int],c)) -> ([Int],c)
f (x, y@(b_1@[], b_2)) = y
f (x, y@(b_1@(b_4 : b_3), b_2)) = y
f (x, y@(b_1, b_2)) = y
f (x, y@([], m)) = y | kmate/HaRe | old/testing/subIntroPattern/TupleIn2_TokOut.hs | bsd-3-clause | 164 | 0 | 11 | 36 | 146 | 90 | 56 | 6 | 1 |
{-# LANGUAGE BangPatterns, CPP, DeriveFunctor, ScopedTypeVariables #-}
-- | This module allows for incremental decoding and encoding of CSV
-- data. This is useful if you e.g. want to interleave I\/O with
-- parsing or if you want finer grained control over how you deal with
-- type conversion errors.
--
-- Decoding example:
--
-- > main :: IO ()
-- > main = withFile "salaries.csv" ReadMode $ \ csvFile -> do
-- > let loop !_ (Fail _ errMsg) = putStrLn errMsg >> exitFailure
-- > loop acc (Many rs k) = loop (acc + sumSalaries rs) =<< feed k
-- > loop acc (Done rs) = putStrLn $ "Total salaries: " ++
-- > show (sumSalaries rs + acc)
-- >
-- > feed k = do
-- > isEof <- hIsEOF csvFile
-- > if isEof
-- > then return $ k B.empty
-- > else k `fmap` B.hGetSome csvFile 4096
-- > loop 0 (decode NoHeader)
-- > where
-- > sumSalaries rs = sum [salary | Right (_ :: String, salary :: Int) <- rs]
--
-- Encoding example:
--
-- > data Person = Person { name :: !String, salary :: !Int }
-- > deriving Generic
-- >
-- > instance FromNamedRecord Person
-- > instance ToNamedRecord Person
-- > instance DefaultOrdered Person
-- >
-- > persons :: [Person]
-- > persons = [Person "John" 50000, Person "Jane" 60000]
-- >
-- > main :: IO ()
-- > main = putStrLn $ encodeDefaultOrderedByName (go persons)
-- > where
-- > go (x:xs) = encodeNamedRecord x <> go xs
--
module Data.Csv.Incremental
(
-- * Decoding
HeaderParser(..)
, decodeHeader
, decodeHeaderWith
-- $typeconversion
, Parser(..)
-- ** Index-based record conversion
-- $indexbased
, HasHeader(..)
, decode
, decodeWith
-- ** Name-based record conversion
-- $namebased
, decodeByName
, decodeByNameWith
-- * Encoding
-- ** Index-based record conversion
-- $indexbased
, encode
, encodeWith
, encodeRecord
, Builder
-- ** Name-based record conversion
-- $namebased
, encodeByName
, encodeDefaultOrderedByName
, encodeByNameWith
, encodeDefaultOrderedByNameWith
, encodeNamedRecord
, NamedBuilder
) where
import Control.Applicative ((<|>))
import qualified Data.Attoparsec.ByteString as A
import Data.Attoparsec.ByteString.Char8 (endOfInput)
import qualified Data.ByteString as B
import qualified Blaze.ByteString.Builder as Builder
import qualified Data.ByteString.Lazy as L
import Data.Monoid ((<>))
import qualified Data.Vector as V
import Data.Word (Word8)
import Data.Csv.Conversion hiding (Parser, header, namedRecord, record,
toNamedRecord)
import qualified Data.Csv.Conversion as Conversion
import qualified Data.Csv.Encoding as Encoding
import Data.Csv.Encoding (EncodeOptions(..), Quoting(..), recordSep)
import Data.Csv.Parser
import Data.Csv.Types
import Data.Csv.Util (endOfLine)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(mappend, mempty))
import Control.Applicative ((<*))
#endif
-- $feed-header
--
-- These functions are sometimes convenient when working with
-- 'HeaderParser', but don't let you do anything you couldn't already
-- do using the 'HeaderParser' constructors directly.
-- $indexbased
--
-- See documentation on index-based conversion in "Data.Csv" for more
-- information.
-- $namebased
--
-- See documentation on name-based conversion in "Data.Csv" for more
-- information.
-- $feed-records
--
-- These functions are sometimes convenient when working with
-- 'Parser', but don't let you do anything you couldn't already do
-- using the 'Parser' constructors directly.
------------------------------------------------------------------------
-- * Decoding headers
-- | An incremental parser that when fed data eventually returns a
-- parsed 'Header', or an error.
data HeaderParser a =
-- | The input data was malformed. The first field contains any
-- unconsumed input and second field contains information about
-- the parse error.
FailH !B.ByteString String
-- | The parser needs more input data before it can produce a
-- result. Use an 'B.empty' string to indicate that no more
-- input data is available. If fed an 'B.empty string', the
-- continuation is guaranteed to return either 'FailH' or
-- 'DoneH'.
| PartialH (B.ByteString -> HeaderParser a)
-- | The parse succeeded and produced the given 'Header'.
| DoneH !Header a
deriving Functor
instance Show a => Show (HeaderParser a) where
showsPrec d (FailH rest msg) = showParen (d > appPrec) showStr
where
showStr = showString "FailH " . showsPrec (appPrec+1) rest .
showString " " . showsPrec (appPrec+1) msg
showsPrec _ (PartialH _) = showString "PartialH <function>"
showsPrec d (DoneH hdr x) = showParen (d > appPrec) showStr
where
showStr = showString "DoneH " . showsPrec (appPrec+1) hdr .
showString " " . showsPrec (appPrec+1) x
-- Application has precedence one more than the most tightly-binding
-- operator
appPrec :: Int
appPrec = 10
-- | Parse a CSV header in an incremental fashion. When done, the
-- 'HeaderParser' returns any unconsumed input in the second field of
-- the 'DoneH' constructor.
decodeHeader :: HeaderParser B.ByteString
decodeHeader = decodeHeaderWith defaultDecodeOptions
-- | Like 'decodeHeader', but lets you customize how the CSV data is
-- parsed.
decodeHeaderWith :: DecodeOptions -> HeaderParser B.ByteString
decodeHeaderWith !opts = PartialH (go . parser)
where
parser = A.parse (header $ decDelimiter opts)
go (A.Fail rest _ msg) = FailH rest err
where err = "parse error (" ++ msg ++ ")"
-- TODO: Check empty and give attoparsec one last chance to return
-- something:
go (A.Partial k) = PartialH $ \ s -> go (k s)
go (A.Done rest r) = DoneH r rest
------------------------------------------------------------------------
-- * Decoding records
-- $typeconversion
--
-- Just like in the case of non-incremental decoding, there are two
-- ways to convert CSV records to and from and user-defined data
-- types: index-based conversion and name-based conversion.
-- | An incremental parser that when fed data eventually produces some
-- parsed records, converted to the desired type, or an error in case
-- of malformed input data.
data Parser a =
-- | The input data was malformed. The first field contains any
-- unconsumed input and second field contains information about
-- the parse error.
Fail !B.ByteString String
-- | The parser parsed and converted zero or more records. Any
-- records that failed type conversion are returned as @'Left'
-- errMsg@ and the rest as @'Right' val@. Feed a 'B.ByteString'
-- to the continuation to continue parsing. Use an 'B.empty'
-- string to indicate that no more input data is available. If
-- fed an 'B.empty' string, the continuation is guaranteed to
-- return either 'Fail' or 'Done'.
| Many [Either String a] (B.ByteString -> Parser a)
-- | The parser parsed and converted some records. Any records
-- that failed type conversion are returned as @'Left' errMsg@
-- and the rest as @'Right' val@.
| Done [Either String a]
deriving Functor
instance Show a => Show (Parser a) where
showsPrec d (Fail rest msg) = showParen (d > appPrec) showStr
where
showStr = showString "Fail " . showsPrec (appPrec+1) rest .
showString " " . showsPrec (appPrec+1) msg
showsPrec d (Many rs _) = showParen (d > appPrec) showStr
where
showStr = showString "Many " . showsPrec (appPrec+1) rs .
showString " <function>"
showsPrec d (Done rs) = showParen (d > appPrec) showStr
where
showStr = showString "Done " . showsPrec (appPrec+1) rs
-- | Have we read all available input?
data More = Incomplete | Complete
deriving (Eq, Show)
-- | Efficiently deserialize CSV in an incremental fashion. Equivalent
-- to @'decodeWith' 'defaultDecodeOptions'@.
decode :: FromRecord a
=> HasHeader -- ^ Data contains header that should be
-- skipped
-> Parser a
decode = decodeWith defaultDecodeOptions
-- | Like 'decode', but lets you customize how the CSV data is parsed.
decodeWith :: FromRecord a
=> DecodeOptions -- ^ Decoding options
-> HasHeader -- ^ Data contains header that should be
-- skipped
-> Parser a
decodeWith !opts hasHeader = case hasHeader of
HasHeader -> go (decodeHeaderWith opts)
NoHeader -> Many [] $ \ s -> decodeWithP parseRecord opts s
where go (FailH rest msg) = Fail rest msg
go (PartialH k) = Many [] $ \ s' -> go (k s')
go (DoneH _ rest) = decodeWithP parseRecord opts rest
------------------------------------------------------------------------
-- | Efficiently deserialize CSV in an incremental fashion. The data
-- is assumed to be preceeded by a header. Returns a 'HeaderParser'
-- that when done produces a 'Parser' for parsing the actual records.
-- Equivalent to @'decodeByNameWith' 'defaultDecodeOptions'@.
decodeByName :: FromNamedRecord a
=> HeaderParser (Parser a)
decodeByName = decodeByNameWith defaultDecodeOptions
-- | Like 'decodeByName', but lets you customize how the CSV data is
-- parsed.
decodeByNameWith :: FromNamedRecord a
=> DecodeOptions -- ^ Decoding options
-> HeaderParser (Parser a)
decodeByNameWith !opts = go (decodeHeaderWith opts)
where
go (FailH rest msg) = FailH rest msg
go (PartialH k) = PartialH $ \ s -> go (k s)
go (DoneH hdr rest) =
DoneH hdr (decodeWithP (parseNamedRecord . toNamedRecord hdr) opts rest)
------------------------------------------------------------------------
-- TODO: 'decodeWithP' should probably not take an initial
-- 'B.ByteString' input.
-- | Like 'decode', but lets you customize how the CSV data is parsed.
decodeWithP :: (Record -> Conversion.Parser a) -> DecodeOptions -> B.ByteString
-> Parser a
decodeWithP p !opts = go Incomplete [] . parser
where
go !_ !acc (A.Fail rest _ msg)
| null acc = Fail rest err
| otherwise = Many (reverse acc) (\ s -> Fail (rest `B.append` s) err)
where err = "parse error (" ++ msg ++ ")"
go Incomplete acc (A.Partial k) = Many (reverse acc) cont
where cont s = go m [] (k s)
where m | B.null s = Complete
| otherwise = Incomplete
go Complete _ (A.Partial _) = moduleError "decodeWithP" msg
where msg = "attoparsec should never return Partial in this case"
go m acc (A.Done rest r)
| B.null rest = case m of
Complete -> Done (reverse acc')
Incomplete -> Many (reverse acc') (cont [])
| otherwise = go m acc' (parser rest)
where cont acc'' s
| B.null s = Done (reverse acc'')
| otherwise = go Incomplete acc'' (parser s)
acc' | blankLine r = acc
| otherwise = let !r' = convert r in r' : acc
parser = A.parse (record (decDelimiter opts) <* (endOfLine <|> endOfInput))
convert = runParser . p
{-# INLINE decodeWithP #-}
blankLine :: V.Vector B.ByteString -> Bool
blankLine v = V.length v == 1 && (B.null (V.head v))
------------------------------------------------------------------------
-- * Encoding
-- | Efficiently serialize records in an incremental
-- fashion. Equivalent to @'encodeWith' 'defaultEncodeOptions'@.
encode :: ToRecord a => Builder a -> L.ByteString
encode = encodeWith Encoding.defaultEncodeOptions
-- | Like 'encode', but lets you customize how the CSV data is
-- encoded.
encodeWith :: ToRecord a => EncodeOptions -> Builder a
-> L.ByteString
encodeWith opts b =
Builder.toLazyByteString $
runBuilder b (encQuoting opts) (encDelimiter opts) (encUseCrLf opts)
-- | Encode a single record.
encodeRecord :: ToRecord a => a -> Builder a
encodeRecord r = Builder $ \ qtng delim useCrLf ->
Encoding.encodeRecord qtng delim (toRecord r) <> recordSep useCrLf
-- | A builder for building the CSV data incrementally. Just like the
-- @ByteString@ builder, this builder should be used in a
-- right-associative, 'foldr' style. Using '<>' to compose builders in
-- a left-associative, `foldl'` style makes the building not be
-- incremental.
newtype Builder a = Builder {
runBuilder :: Quoting -> Word8 -> Bool -> Builder.Builder
}
instance Monoid (Builder a) where
mempty = Builder (\ _ _ _ -> mempty)
mappend (Builder f) (Builder g) =
Builder $ \ qtng delim useCrlf ->
f qtng delim useCrlf <> g qtng delim useCrlf
------------------------------------------------------------------------
-- ** Index-based record conversion
-- | Efficiently serialize named records in an incremental fashion,
-- including the leading header. Equivalent to @'encodeWith'
-- 'defaultEncodeOptions'@. The header is written before any records
-- and dictates the field order.
encodeByName :: ToNamedRecord a => Header -> NamedBuilder a -> L.ByteString
encodeByName = encodeByNameWith Encoding.defaultEncodeOptions
-- | Like 'encodeByName', but header and field order is dictated by
-- the 'Conversion.headerOrder' method.
encodeDefaultOrderedByName :: (DefaultOrdered a, ToNamedRecord a) =>
NamedBuilder a -> L.ByteString
encodeDefaultOrderedByName =
encodeDefaultOrderedByNameWith Encoding.defaultEncodeOptions
-- | Like 'encodeByName', but lets you customize how the CSV data is
-- encoded.
encodeByNameWith :: ToNamedRecord a => EncodeOptions -> Header -> NamedBuilder a
-> L.ByteString
encodeByNameWith opts hdr b =
Builder.toLazyByteString $
Encoding.encodeRecord (encQuoting opts) (encDelimiter opts) hdr <>
recordSep (encUseCrLf opts) <>
runNamedBuilder b hdr (encQuoting opts) (encDelimiter opts)
(encUseCrLf opts)
-- | Like 'encodeDefaultOrderedByName', but lets you customize how the
-- CSV data is encoded.
encodeDefaultOrderedByNameWith ::
forall a. (DefaultOrdered a, ToNamedRecord a) =>
EncodeOptions -> NamedBuilder a -> L.ByteString
encodeDefaultOrderedByNameWith opts b =
Builder.toLazyByteString $
Encoding.encodeRecord (encQuoting opts) (encDelimiter opts) hdr <>
recordSep (encUseCrLf opts) <>
runNamedBuilder b hdr (encQuoting opts)
(encDelimiter opts) (encUseCrLf opts)
where hdr = Conversion.headerOrder (undefined :: a)
-- | Encode a single named record.
encodeNamedRecord :: ToNamedRecord a => a -> NamedBuilder a
encodeNamedRecord nr = NamedBuilder $ \ hdr qtng delim useCrLf ->
Encoding.encodeNamedRecord hdr qtng delim
(Conversion.toNamedRecord nr) <> recordSep useCrLf
-- | A builder for building the CSV data incrementally. Just like the
-- @ByteString@ builder, this builder should be used in a
-- right-associative, 'foldr' style. Using '<>' to compose builders in
-- a left-associative, `foldl'` style makes the building not be
-- incremental.
newtype NamedBuilder a = NamedBuilder {
runNamedBuilder :: Header -> Quoting -> Word8 -> Bool -> Builder.Builder
}
instance Monoid (NamedBuilder a) where
mempty = NamedBuilder (\ _ _ _ _ -> mempty)
mappend (NamedBuilder f) (NamedBuilder g) =
NamedBuilder $ \ hdr qtng delim useCrlf ->
f hdr qtng delim useCrlf <> g hdr qtng delim useCrlf
------------------------------------------------------------------------
moduleError :: String -> String -> a
moduleError func msg = error $ "Data.Csv.Incremental." ++ func ++ ": " ++ msg
{-# NOINLINE moduleError #-}
| tibbe/cassava | Data/Csv/Incremental.hs | bsd-3-clause | 15,861 | 0 | 15 | 3,639 | 2,931 | 1,588 | 1,343 | 195 | 5 |
{-# LANGUAGE TypeFamilies, GADTs, ScopedTypeVariables, KindSignatures #-}
{-# LANGUAGE EmptyDataDecls #-}
-- Tests whether a type signature can refine a type
-- See the definition of bug2a
module ShouldCompile where
import qualified Data.Kind as K (Type)
data Typed
data Untyped
type family TU a b :: K.Type
type instance TU Typed b = b
type instance TU Untyped b = ()
-- A type witness type, use eg. for pattern-matching on types
data Type a where
TypeInt :: Type Int
TypeBool :: Type Bool
TypeString :: Type String
TypeList :: Type t -> Type [t]
data Expr :: K.Type -> K.Type -> K.Type {- tu a -} where
Const :: Type a -> a -> Expr tu (TU tu a)
Var2 :: a -> TU tu (Type a) -> Expr tu (TU tu a)
bug1 :: Expr Typed Bool -> ()
bug1 (Const TypeBool False) = ()
bug2a :: Expr Typed Bool -> ()
bug2a (Var2 x (TypeBool :: Type Bool)) = ()
bug2c :: Expr Typed Bool -> ()
bug2c (Var2 x TypeBool) = ()
bug2b :: Expr Typed (TU Typed Bool) -> ()
bug2b (Var2 x TypeBool) = ()
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/GADT12.hs | bsd-3-clause | 1,013 | 0 | 10 | 239 | 355 | 191 | 164 | -1 | -1 |
{-# OPTIONS_GHC -fwarn-safe -Werror #-}
-- | Basic test to see if Safe warning flags compile
-- Warn if module is infered safe
-- In this test the warning _should_ fire and cause a compile fail
module SafeFlags26 where
f :: Int
f = 1
| urbanslug/ghc | testsuite/tests/safeHaskell/flags/SafeFlags26.hs | bsd-3-clause | 236 | 0 | 4 | 47 | 18 | 13 | 5 | 4 | 1 |
-- | Imperative code with an OpenCL component.
--
-- Apart from ordinary imperative code, this also carries around an
-- OpenCL program as a string, as well as a list of kernels defined by
-- the OpenCL program.
--
-- The imperative code has been augmented with a 'LaunchKernel'
-- operation that allows one to execute an OpenCL kernel.
module Futhark.CodeGen.ImpCode.OpenCL
( Program (..),
Function,
FunctionT (Function),
Code,
KernelName,
KernelArg (..),
OpenCL (..),
KernelSafety (..),
numFailureParams,
KernelTarget (..),
FailureMsg (..),
module Futhark.CodeGen.ImpCode,
module Futhark.IR.GPU.Sizes,
)
where
import qualified Data.Map as M
import qualified Data.Text as T
import Futhark.CodeGen.ImpCode hiding (Code, Function)
import qualified Futhark.CodeGen.ImpCode as Imp
import Futhark.IR.GPU.Sizes
import Futhark.Util.Pretty
-- | An program calling OpenCL kernels.
data Program = Program
{ openClProgram :: T.Text,
-- | Must be prepended to the program.
openClPrelude :: T.Text,
openClKernelNames :: M.Map KernelName KernelSafety,
-- | So we can detect whether the device is capable.
openClUsedTypes :: [PrimType],
-- | Runtime-configurable constants.
openClSizes :: M.Map Name SizeClass,
-- | Assertion failure error messages.
openClFailures :: [FailureMsg],
hostDefinitions :: Definitions OpenCL
}
-- | Something that can go wrong in a kernel. Part of the machinery
-- for reporting error messages from within kernels.
data FailureMsg = FailureMsg
{ failureError :: ErrorMsg Exp,
failureBacktrace :: String
}
-- | A function calling OpenCL kernels.
type Function = Imp.Function OpenCL
-- | A piece of code calling OpenCL.
type Code = Imp.Code OpenCL
-- | The name of a kernel.
type KernelName = Name
-- | An argument to be passed to a kernel.
data KernelArg
= -- | Pass the value of this scalar expression as argument.
ValueKArg Exp PrimType
| -- | Pass this pointer as argument.
MemKArg VName
| -- | Create this much local memory per workgroup.
SharedMemoryKArg (Count Bytes Exp)
deriving (Show)
-- | Whether a kernel can potentially fail (because it contains bounds
-- checks and such).
data MayFail = MayFail | CannotFail
deriving (Show)
-- | Information about bounds checks and how sensitive it is to
-- errors. Ordered by least demanding to most.
data KernelSafety
= -- | Does not need to know if we are in a failing state, and also
-- cannot fail.
SafetyNone
| -- | Needs to be told if there's a global failure, and that's it,
-- and cannot fail.
SafetyCheap
| -- | Needs all parameters, may fail itself.
SafetyFull
deriving (Eq, Ord, Show)
-- | How many leading failure arguments we must pass when launching a
-- kernel with these safety characteristics.
numFailureParams :: KernelSafety -> Int
numFailureParams SafetyNone = 0
numFailureParams SafetyCheap = 1
numFailureParams SafetyFull = 3
-- | Host-level OpenCL operation.
data OpenCL
= LaunchKernel KernelSafety KernelName [KernelArg] [Exp] [Exp]
| GetSize VName Name
| CmpSizeLe VName Name Exp
| GetSizeMax VName SizeClass
deriving (Show)
-- | The target platform when compiling imperative code to a 'Program'
data KernelTarget
= TargetOpenCL
| TargetCUDA
deriving (Eq)
instance Pretty OpenCL where
ppr = text . show
| HIPERFIT/futhark | src/Futhark/CodeGen/ImpCode/OpenCL.hs | isc | 3,376 | 0 | 10 | 685 | 509 | 325 | 184 | 64 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Unison.Test.Term where
-- import Test.Tasty.QuickCheck as QC
-- import Test.Tasty.SmallCheck as SC
import Test.Tasty
import Test.Tasty.HUnit
import Unison.Hash (Hash)
import Unison.Codebase.MemCodebase ()
import Unison.Parsers (unsafeParseTerm)
import Unison.Reference as R
import Unison.Symbol (Symbol)
import Unison.Term
import Unison.View (DFO)
import Unison.Dimensions (Width(..),Height(..),Region,X(..),Y(..))
import Data.Text (Text)
import Data.Maybe (fromMaybe)
import qualified Unison.ABT as ABT
import qualified Unison.Doc as Doc
import qualified Unison.Paths as Paths
import qualified Unison.Parser as Parser
import qualified Unison.TermParser as TermParser
import qualified Unison.Test.Common as Common
import qualified Unison.Var as Var
import qualified Unison.Views as Views
import Debug.Trace
-- term for testing
type TTerm = Term (Symbol DFO)
hash :: TTerm -> Hash
hash = ABT.hash
atPts :: Bool -> Common.TCodebase -> [(Int,Int)] -> TTerm -> [(Paths.Path, Region)]
atPts print (_,symbol,_,_) pts t = map go pts where
go (x,y) = let p = path x y in (p, Doc.region bounds p)
doc = Views.term symbol t
layout = Doc.layout Doc.textWidth (Width 80) doc
bounds = debug $ Doc.bounds (\t -> (Doc.textWidth t, Height 1)) (Doc.box layout)
path x y = Doc.at bounds (X (fromIntegral x), Y (fromIntegral y))
debug b = if print then trace ("\n" ++ Doc.debugDoc doc ++ "\n\n" ++ Doc.debugBox b ++ "\n\n" ++ Doc.debugBoxp b) b else b
main :: IO ()
main = defaultMain tests
unsafeParseTerm' :: String -> TTerm
unsafeParseTerm' = unsafeParseTerm
tests :: TestTree
tests = withResource Common.codebase (\_ -> pure ()) $ \codebase -> testGroup "Term"
[ testCase "alpha equivalence (term)" $ assertEqual "identity"
(unsafeParseTerm' "a -> a")
(unsafeParseTerm' "x -> x")
, testCase "hash cycles" $ assertEqual "pingpong"
(hash pingpong1)
(hash pingpong2)
-- , testCase "infix-rendering (1)" $ codebase >>= \(_,symbol,_) ->
-- let t = unsafeParseTerm "Number.plus 1 1"
-- in assertEqual "+"
-- "1 + 1"
-- (Doc.formatText (Width 80) (Views.term symbol t))
-- , testCase "infix-rendering (unsaturated)" $ codebase >>= \(_,symbol,_) ->
-- let t = unsafeParseTerm "Number.plus _"
-- in assertEqual "+"
-- "(+) _"
-- (Doc.formatText (Width 80) (Views.term symbol t))
-- , testCase "infix-rendering (totally unsaturated)" $ codebase >>= \(_,symbol,_) ->
-- let t = unsafeParseTerm "Number.plus"
-- in assertEqual "+" "(+)" (Doc.formatText (Width 80) (Views.term symbol t))
-- , testCase "infix-rendering (2)" $ codebase >>= \(_,symbol,_) ->
-- do
-- t <- pure $ unsafeParseTerm "Number.plus 1 1"
-- let d = Views.term symbol t
-- assertEqual "path sanity check"
-- [Paths.Fn,Paths.Arg]
-- (head $ Doc.leafPaths d)
-- , testCase "let-rendering (1)" $ codebase >>= \codebase ->
-- do
-- -- let xy = 4223 in 42
-- t <- pure $ unsafeParseTerm "let xy = 4223 in 42"
-- [(p1,r1), (p2,_), (p3,r3), (p4,_), (p5,r5), (p6,r6)] <- pure $
-- atPts False codebase [(0,0), (1,0), (10,0), (11,0), (5,0), (8,0)] t
-- assertEqual "p1" [] p1
-- assertEqual "p2" [] p2
-- assertEqual "r1" (rect 0 0 19 1) r1
-- assertEqual "p3" [Paths.Binding 0, Paths.Body] p3
-- assertEqual "r3" (rect 9 0 4 1) r3
-- assertEqual "p3 == p4" p3 p4
-- assertEqual "p5" [Paths.Binding 0, Paths.Bound] p5
-- assertEqual "r5" (rect 4 0 2 1) r5
-- assertEqual "p6" [Paths.Binding 0] p6
-- assertEqual "r6" (rect 4 0 9 1) r6
-- , testCase "map lambda rendering" $ codebase >>= \codebase ->
-- do
-- -- map (x -> _) [1,2,3]
-- t <- pure $ builtin "Vector.map" `app` lam' ["x"] blank `app` vector (map num [1,2,3])
-- [(p1,r1)] <- pure $ atPts False codebase [(5,0)] t
-- assertEqual "p1" [Paths.Fn, Paths.Arg] p1
-- assertEqual "r1" (rect 4 0 8 1) r1
-- , testCase "operator chain rendering" $ codebase >>= \codebase ->
-- do
-- t <- pure $ unsafeParseTerm "1 + 2 + 3"
-- [(p1,r1),(p2,_)] <- pure $ atPts False codebase [(1,0), (2,0)] t
-- assertEqual "p1" [Paths.Fn, Paths.Arg, Paths.Fn, Paths.Arg] p1
-- assertEqual "r1" (rect 0 0 1 1) r1
-- assertEqual "p2" [] p2
]
rect :: Int -> Int -> Int -> Int -> (X,Y,Width,Height)
rect x y w h =
(X (fromIntegral x), Y (fromIntegral y), Width (fromIntegral w), Height (fromIntegral h))
-- various unison terms, useful for testing
pingpong1 :: TTerm
pingpong1 =
unsafeParseTerm $
unlines [ "let rec "
, " ping x = pong (x + 1);"
, " pong y = ping (y - 1);"
, " ping 1;;"
]
pingpong2 :: TTerm
pingpong2 =
unsafeParseTerm $ "let rec pong1 p = ping1 (p - 1); ping1 q = pong1 (q + 1); ping1 1;;"
| nightscape/platform | shared/tests/Unison/Test/Term.hs | mit | 5,039 | 0 | 16 | 1,259 | 864 | 510 | 354 | 59 | 2 |
-- counting intervals in the permutohedron
module Permutohedra where
import Data.List
import Formulas
import Bijections
inversions :: Perm -> [(Int,Int)]
inversions pi = [(i,j) | i <- dom pi, j <- dom pi \\ [i], i < j, act (inv pi) i > act (inv pi) j]
perm_le :: Perm -> Perm -> Bool
perm_le pi1 pi2 = all (\(i,j) -> elem (i,j) (inversions pi2)) (inversions pi1)
weak_bruhat = perm_le
perm_lattice :: Int -> [(Perm,Perm)]
perm_lattice n =
[(pi1,pi2) | pi1 <- permute [1..n], pi2 <- permute [1..n], perm_le pi1 pi2]
-- [length $ perm_lattice n | n <- [0..]] == [1,1,3,17,151,1899,...]
a007767 :: Int -> [(Perm,Perm)]
a007767 n =
[(pi1,pi2) | pi1 <- permute [1..n], pi2 <- permute [1..n], not $ any (\i -> any (\j -> i < j && (act pi1 i) < (act pi1 j) && (act pi2 j) < (act pi2 i)) [1..n]) [1..n]]
strong_bruhat :: Perm -> Perm -> Bool
strong_bruhat pi1 pi2 =
let (n1,n2) = (length pi1,length pi2) in
let (w1,w2) = (map (act pi1) (sort $ dom pi1),
map (act pi2) (sort $ dom pi2)) in
(n1 == n2) &&
flip all [1..n1-1] (\i ->
all (uncurry (<)) (zip (sort (take i w1)) (sort (take i w2))))
-- [length $ [(pi1,pi2) | pi1 <- permute [1..n], pi2 <- permute [1..n], strong_bruhat pi1 pi2] | n <- [1..]] == [1,1,3,19,213,3781,...]
| noamz/linlam-gos | src/Permutohedra.hs | mit | 1,263 | 0 | 19 | 271 | 632 | 336 | 296 | 23 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Control.Monad (mzero)
import Data.Aeson
import Data.Text
import GHC.Generics
import Servant.API (FromText(..), ToText(..))
data Auth = Auth
{ csrf :: Text
, device_id :: Text
, logged :: Bool
, premium :: Bool
, invalid :: Bool
, timestamp :: Int
} deriving (Show, Generic)
instance FromJSON Auth
instance ToJSON Auth
data SearchResult = SearchResult
{ albums :: [Album]
, artists :: [Artist]
} deriving (Show, Generic)
instance FromJSON SearchResult where
parseJSON (Object vs) = SearchResult <$> (vs .: "albums" >>= (.: "items"))
<*> (vs .: "artists" >>= (.: "items"))
parseJSON _ = mzero
instance ToJSON SearchResult
data Album = Album
{ albumId :: Int
, title :: Text
, year :: Int
} deriving (Show, Generic)
instance FromJSON Album where
parseJSON (Object vs) = Album <$> vs .: "id"
<*> vs .: "title"
<*> vs .: "year"
parseJSON _ = mzero
instance ToJSON Album
data Artist = Artist
{ artistId :: Int
, name :: Text
} deriving (Show, Generic)
instance FromJSON Artist where
parseJSON (Object vs) = Artist <$> vs .: "id"
<*> vs .: "name"
parseJSON _ = mzero
instance ToJSON Artist
data Lang = Uk | En | Ru deriving Show
instance ToText Lang where
toText Uk = "uk"
toText En = "en"
toText Ru = "ru"
instance FromText Lang where
fromText "uk" = Just Uk
fromText "ru" = Just Ru
fromText "en" = Just En
fromText _ = Nothing
newtype CookieString = CookieString { cookie :: Text } deriving Show
instance ToText CookieString where
toText = cookie
instance FromText CookieString where
fromText = Just . CookieString
mkCookie :: Auth -> CookieString
mkCookie = CookieString . device_id
newtype QueryString = QueryString { query :: Text } deriving Show
instance ToText QueryString where
toText = query
instance FromText QueryString where
fromText = Just . QueryString
data Type = All deriving Show
instance ToText Type where
toText _ = "all"
instance FromText Type where
fromText "all" = Just All
fromText _ = Nothing
newtype NCRnd = NCRnd { ncrnd :: Double } deriving Show
instance ToText NCRnd where
toText = toText . ncrnd
instance FromText NCRnd where
fromText str = NCRnd <$> fromText str
data ExternalDomain = YandexDomain deriving Show
instance ToText ExternalDomain where
toText YandexDomain = "music.yandex.ru"
instance FromText ExternalDomain where
fromText "music.yandex.ru" = Just YandexDomain
fromText _ = Nothing
newtype Overembed = Overembed { overembed :: Bool } deriving Show
instance ToText Overembed where
toText = toText . overembed
instance FromText Overembed where
fromText bool = Overembed <$> fromText bool
| ushfnuk/music | src/Types.hs | mit | 2,957 | 0 | 11 | 767 | 844 | 465 | 379 | 91 | 1 |
-- | Using Plugins with SimpleOptions
module Plugins.Commands
( commandsFromPlugins
, toCommand
) where
import Control.Monad.Trans.Either (EitherT)
import Control.Monad.Trans.Writer (Writer)
import Data.Text (Text, unpack)
import Data.Foldable (foldMap)
import Plugins
import Options.Applicative.Simple
-- | Generate the "commands" argument to simpleOptions
-- based on available plugins.
commandsFromPlugins :: Plugins -> EitherT Text (Writer (Mod CommandFields Text)) ()
commandsFromPlugins plugins = mapM_ toCommand (listPlugins plugins)
-- | Convert a single plugin into a command.
toCommand :: Plugin -> EitherT Text (Writer (Mod CommandFields Text)) ()
toCommand plugin = addCommand
(unpack $ pluginName plugin)
(unpack $ pluginSummary plugin)
id
(pure $ pluginName plugin)
| fpco/stackage-cli | src/Plugins/Commands.hs | mit | 797 | 0 | 10 | 116 | 204 | 113 | 91 | 17 | 1 |
module Carbon.DataStructures.Trees.SelfBalancingBinarySearchTree.Scaffolding (get_tree, get_distributed_tree, insert_distributed_tree, search_tree, remove_tree, max_size, golden_ratio) where
import qualified Carbon.DataStructures.Trees.SelfBalancingBinarySearchTree as Tree
import qualified Carbon.DataStructures.Trees.NaturalTree as NaturalTree
import Carbon.Testing
max_size :: Integer
max_size = 2 ^ 16
golden_ratio :: Double
golden_ratio = (1 + (sqrt 5)) / 2
get_tree :: Integer -> Tree.Tree Integer
get_tree
= let
get_tree' 0 = Tree.create
get_tree' n = Tree.add (get_tree (n - 1)) n
in NaturalTree.index (fmap get_tree' NaturalTree.naturals)
get_distributed_tree :: Integer -> Tree.Tree Integer
get_distributed_tree
= let
get_distributed_tree' 0 = Tree.create
get_distributed_tree' n = Tree.add (get_distributed_tree (n - 1)) (distribute_range n)
in NaturalTree.index (fmap get_distributed_tree' NaturalTree.naturals)
insert_distributed_tree :: Integer -> Tree.Tree Integer
insert_distributed_tree n
= if n > 0 then (Tree.add (get_distributed_tree (n - 1)) (distribute_range n)) else get_tree 0
search_tree :: Integer -> Int
search_tree n = Tree.count (get_tree n) n
remove_tree :: Integer -> Tree.Tree Integer
remove_tree n
= if n > 0 then (Tree.removeall (get_tree (n - 1)) (distribute_range n)) else get_tree 0 | Raekye/Carbon | haskell/testsuite/tests/Carbon/DataStructures/Trees/SelfBalancingBinarySearchTree/Scaffolding.hs | mit | 1,341 | 8 | 13 | 177 | 430 | 230 | 200 | 28 | 2 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.MediaStream
(newMediaStream, newMediaStream', newMediaStream'', getAudioTracks,
getAudioTracks_, getVideoTracks, getVideoTracks_, getTracks,
getTracks_, getTrackById, getTrackById_, addTrack, removeTrack,
clone, clone_, getId, getActive, addTrackEvent, removeTrackEvent,
active, inactive, MediaStream(..), gTypeMediaStream)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream Mozilla webkitMediaStream documentation>
newMediaStream :: (MonadDOM m) => m MediaStream
newMediaStream
= liftDOM (MediaStream <$> new (jsg "MediaStream") ())
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream Mozilla webkitMediaStream documentation>
newMediaStream' :: (MonadDOM m) => MediaStream -> m MediaStream
newMediaStream' stream
= liftDOM
(MediaStream <$> new (jsg "MediaStream") [toJSVal stream])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream Mozilla webkitMediaStream documentation>
newMediaStream'' ::
(MonadDOM m, IsMediaStreamTrack tracks) =>
[tracks] -> m MediaStream
newMediaStream'' tracks
= liftDOM
(MediaStream <$> new (jsg "MediaStream") [toJSVal (array tracks)])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getAudioTracks Mozilla webkitMediaStream.getAudioTracks documentation>
getAudioTracks ::
(MonadDOM m) => MediaStream -> m [MediaStreamTrack]
getAudioTracks self
= liftDOM
((self ^. jsf "getAudioTracks" ()) >>= fromJSArrayUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getAudioTracks Mozilla webkitMediaStream.getAudioTracks documentation>
getAudioTracks_ :: (MonadDOM m) => MediaStream -> m ()
getAudioTracks_ self
= liftDOM (void (self ^. jsf "getAudioTracks" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getVideoTracks Mozilla webkitMediaStream.getVideoTracks documentation>
getVideoTracks ::
(MonadDOM m) => MediaStream -> m [MediaStreamTrack]
getVideoTracks self
= liftDOM
((self ^. jsf "getVideoTracks" ()) >>= fromJSArrayUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getVideoTracks Mozilla webkitMediaStream.getVideoTracks documentation>
getVideoTracks_ :: (MonadDOM m) => MediaStream -> m ()
getVideoTracks_ self
= liftDOM (void (self ^. jsf "getVideoTracks" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getTracks Mozilla webkitMediaStream.getTracks documentation>
getTracks :: (MonadDOM m) => MediaStream -> m [MediaStreamTrack]
getTracks self
= liftDOM ((self ^. jsf "getTracks" ()) >>= fromJSArrayUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getTracks Mozilla webkitMediaStream.getTracks documentation>
getTracks_ :: (MonadDOM m) => MediaStream -> m ()
getTracks_ self = liftDOM (void (self ^. jsf "getTracks" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getTrackById Mozilla webkitMediaStream.getTrackById documentation>
getTrackById ::
(MonadDOM m, ToJSString trackId) =>
MediaStream -> trackId -> m MediaStreamTrack
getTrackById self trackId
= liftDOM
((self ^. jsf "getTrackById" [toJSVal trackId]) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.getTrackById Mozilla webkitMediaStream.getTrackById documentation>
getTrackById_ ::
(MonadDOM m, ToJSString trackId) => MediaStream -> trackId -> m ()
getTrackById_ self trackId
= liftDOM (void (self ^. jsf "getTrackById" [toJSVal trackId]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.addTrack Mozilla webkitMediaStream.addTrack documentation>
addTrack ::
(MonadDOM m, IsMediaStreamTrack track) =>
MediaStream -> track -> m ()
addTrack self track
= liftDOM (void (self ^. jsf "addTrack" [toJSVal track]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.removeTrack Mozilla webkitMediaStream.removeTrack documentation>
removeTrack ::
(MonadDOM m, IsMediaStreamTrack track) =>
MediaStream -> track -> m ()
removeTrack self track
= liftDOM (void (self ^. jsf "removeTrack" [toJSVal track]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.clone Mozilla webkitMediaStream.clone documentation>
clone :: (MonadDOM m) => MediaStream -> m MediaStream
clone self
= liftDOM ((self ^. jsf "clone" ()) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.clone Mozilla webkitMediaStream.clone documentation>
clone_ :: (MonadDOM m) => MediaStream -> m ()
clone_ self = liftDOM (void (self ^. jsf "clone" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.id Mozilla webkitMediaStream.id documentation>
getId ::
(MonadDOM m, FromJSString result) => MediaStream -> m result
getId self = liftDOM ((self ^. js "id") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.active Mozilla webkitMediaStream.active documentation>
getActive :: (MonadDOM m) => MediaStream -> m Bool
getActive self = liftDOM ((self ^. js "active") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.onaddtrack Mozilla webkitMediaStream.onaddtrack documentation>
addTrackEvent :: EventName MediaStream Event
addTrackEvent = unsafeEventName (toJSString "addtrack")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.onremovetrack Mozilla webkitMediaStream.onremovetrack documentation>
removeTrackEvent :: EventName MediaStream Event
removeTrackEvent = unsafeEventName (toJSString "removetrack")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.onactive Mozilla webkitMediaStream.onactive documentation>
active :: EventName MediaStream Event
active = unsafeEventName (toJSString "active")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/webkitMediaStream.oninactive Mozilla webkitMediaStream.oninactive documentation>
inactive :: EventName MediaStream Event
inactive = unsafeEventName (toJSString "inactive")
| ghcjs/jsaddle-dom | src/JSDOM/Generated/MediaStream.hs | mit | 7,231 | 0 | 12 | 957 | 1,464 | 809 | 655 | 95 | 1 |
module Week5Tests where
import Week5.Tests
import Test.Tasty
import Test.Tasty.HUnit
main = do tastyTests
tastyTests = defaultMain tests
tests :: TestTree
tests = testGroup "Tasty Week 5 Unit Tests" [week5UnitTests] | rglew/cis194 | src/Week5Tests.hs | mit | 222 | 0 | 6 | 35 | 54 | 31 | 23 | 8 | 1 |
module STMGraphs.Homogenous where
import STMGraphs.Prelude
import qualified ListT
import qualified GHC.Exts
import qualified STMContainers.Multimap as Multimap
import qualified STMContainers.Set as Set
import qualified STMContainers.Map as Map
import qualified Focus
-- |
-- A homogenous directed mutable graph in STM.
data Node e v =
Node {
unique :: !Unique,
value :: !(TVar v),
targets :: !(Multimap.Multimap e (Node e v)),
sources :: !(Multimap.Multimap (Node e v) e)
}
instance Eq (Node e v) where
a == b =
unique a == unique b
instance Hashable (Node e v) where
hashWithSalt s n =
combine s (hashUnique (unique n))
where
combine h1 h2 = (h1 * 16777619) `xor` h2
hash n =
hashUnique (unique n)
new :: v -> STM (Node e v)
new a =
Node <$> newUniqueSTM <*> newTVar a <*> Multimap.new <*> Multimap.new
where
newUniqueSTM = pure $ unsafePerformIO newUnique
-- * On
-------------------------
-- |
-- A computation in a context of a node.
type On e v m r =
ReaderT (Node e v) m r
on :: Node e v -> On e v m r -> m r
on node reader =
runReaderT reader node
get :: On e v STM v
get =
ReaderT $ \n -> readTVar (value n)
set :: v -> On e v STM ()
set a =
ReaderT $ \n -> writeTVar (value n) a
addEdge :: (Multimap.Key e) => Node e v -> e -> On e v STM ()
addEdge target edge =
ReaderT $ \source -> do
Multimap.insert target edge (targets source)
Multimap.insert edge source (sources target)
removeEdge :: (Multimap.Key e) => Node e v -> e -> On e v STM ()
removeEdge target edge =
ReaderT $ \source -> do
Multimap.delete target edge (targets source)
Multimap.delete edge source (sources target)
remove :: (Multimap.Key e) => On e v STM ()
remove =
ReaderT $ \target -> do
flip ListT.traverse_ (Multimap.stream (sources target)) $ \(source, edge) ->
on source $ removeEdge target edge
streamTargets :: (Multimap.Key e) => e -> On e v (ListT.ListT STM) (Node e v)
streamTargets e =
ReaderT $ \source ->
Multimap.streamByKey e (targets source)
| nikita-volkov/stm-graphs | library/STMGraphs/Homogenous.hs | mit | 2,059 | 0 | 14 | 478 | 835 | 433 | 402 | 65 | 1 |
-- Helpers/Numbers.hs
module Helpers.Numbers (
divisors,
divisorCount,
intToDigits,
lcm',
gcd',
collatz,
fact,
digitSum ) where
import Data.IntSet (toList, fromList)
import Data.List
import Data.Maybe
divisorCount :: Int -> Int
divisorCount num = length $ divisors num
divisors :: Int -> [Int]
divisors 0 = []
divisors num = nub' $ 1:firstHalf ++ reverse (map (\x -> num `div` x) firstHalf) where
nub' = toList . fromList
firstHalf = filter (\x -> num `mod` x == 0) [2..(round . sqrt $ fromIntegral num)]
intToDigits :: Integral a => a -> [a]
intToDigits 0 = []
intToDigits num = intToDigits (num `div` 10) ++ [num `mod` 10]
lcm' :: Int -> Int -> Int
lcm' a b = abs a `div` gcd a b * abs b
gcd' :: Int -> Int -> Int
gcd' a b = fromJust $ find (\x -> a `mod` x == 0 && b `mod` x == 0) $ reverse [1..(max a b)]
collatz :: Int -> [Int]
collatz 1 = [1]
collatz num
| even num = num : (collatz $ num `div` 2)
| otherwise = num : (collatz $ 3 * num + 1)
fact :: Integer -> Integer
fact 0 = 1
fact num = foldr1 (*) [1..num]
digitSum :: Integer -> Integer
digitSum = sum . map (read . return) . show
| Sgoettschkes/learning | haskell/ProjectEuler/Helpers/Numbers.hs | mit | 1,150 | 0 | 14 | 278 | 547 | 297 | 250 | 36 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module CacheSpec where
import Bce.Cache
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Arbitrary
import Bce.TimeStamp
import qualified Data.Map as Map
import qualified Data.Set as Set
data SmallList a = SmallList [a] deriving Show
data BigList a = BigList [a] deriving Show
instance (Ord a, Arbitrary a, Arbitrary b) => Arbitrary (SmallList (a, b)) where
arbitrary = do
xs <- arbitrary `suchThat` (\ys -> and [ length ys == 10
, length ys == (Set.size $ Set.fromList (map fst ys))])
return $ SmallList $ xs
instance (Ord a, Arbitrary a, Arbitrary b) => Arbitrary (BigList (a, b)) where
arbitrary = do
xs <- arbitrary `suchThat` (\ys -> and [ length ys >= 10
, length ys == (Set.size $ Set.fromList (map fst ys))])
return $ BigList xs
spec = parallel $ do
describe "cache" $ do
it "is empty at first" $ do
cache <- createCache 10 now :: IO (Cache Int Int)
asList cache `shouldReturn` []
it "stores values under limit" $ property $ \lst -> do
let SmallList xs = lst
cache <- createCache 10 now :: IO (Cache Int String)
mapM (\(k, v) -> cacheValue k v cache) xs
(Set.fromList <$> asList cache) `shouldReturn` (Set.fromList xs)
it "stores values over limit" $ property $ \lst -> do
let BigList xs = lst
cache <- createCache 10 now :: IO (Cache Int String)
mapM (\(k, v) -> cacheValue k v cache) xs
incache <- Set.fromList <$> asList cache
Set.size incache `shouldBe` 10
incache `shouldSatisfy` (\i -> Set.isSubsetOf i (Set.fromList xs))
| dehun/bce | test/CacheSpec.hs | mit | 1,850 | 0 | 21 | 618 | 658 | 339 | 319 | 38 | 1 |
module S where
s :: [Int] -> Int
s lst = foldr (+) 0 lst
exponents :: Int -> [Int]
exponents base = base : (exponents (2*base))
addtwo :: [Int] -> [Int]
addtwo lst = map (2+) lst
| fodder008/VisualizingHaskellAST | testing/s.hs | mit | 182 | 0 | 9 | 41 | 102 | 57 | 45 | 7 | 1 |
{- |
Module : ./TPTP/Pretty.hs
Description : A pretty printer for the TPTP Syntax v6.4.0.7
Copyright : (c) Eugen Kuksa University of Magdeburg 2017
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Eugen Kuksa <kuksa@iks.cs.ovgu.de>
Stability : provisional
Portability : portable
A pretty printer for the TPTP Input Syntax v6.4.0.7 taken from
<http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html>
-}
module TPTP.Pretty (printBasicTheory, printNamedSentence) where
import TPTP.AS
import TPTP.Sign
import Common.AS_Annotation hiding (Name)
import Common.Id (Token)
import Common.Doc hiding (defn)
import Common.DocUtils
import Data.Char (toLower)
import qualified Data.Map as Map
import qualified Data.Set as Set
{- -----------------------------------------------------------------------------
Pretty instances
----------------------------------------------------------------------------- -}
instance Pretty Symbol where
pretty = printSymbol
instance Pretty Sign where
pretty = printSign
instance Pretty BASIC_SPEC where
pretty = printBasicSpec
instance Pretty TPTP where
pretty = printTPTP
instance Pretty TPTP_input where
pretty = printTPTP_input
instance Pretty Comment where
pretty = printComment
instance Pretty DefinedComment where
pretty = printDefinedComment
instance Pretty SystemComment where
pretty = printSystemComment
instance Pretty Annotated_formula where
pretty = printAnnotated_formula
instance Pretty TPI_annotated where
pretty = printTPI_annotated
instance Pretty THF_annotated where
pretty = printTHF_annotated
instance Pretty TFX_annotated where
pretty = printTFX_annotated
instance Pretty TFF_annotated where
pretty = printTFF_annotated
instance Pretty TCF_annotated where
pretty = printTCF_annotated
instance Pretty FOF_annotated where
pretty = printFOF_annotated
instance Pretty CNF_annotated where
pretty = printCNF_annotated
instance Pretty Annotations where
pretty = printAnnotations
instance Pretty Formula_role where
pretty = printFormula_role
instance Pretty THF_formula where
pretty = printTHF_formula
instance Pretty THF_logic_formula where
pretty = printTHF_logic_formula
instance Pretty THF_binary_formula where
pretty = printTHF_binary_formula
instance Pretty THF_binary_pair where
pretty = printTHF_binary_pair
instance Pretty THF_binary_tuple where
pretty = printTHF_binary_tuple
instance Pretty THF_unitary_formula where
pretty = printTHF_unitary_formula
instance Pretty THF_quantified_formula where
pretty = printTHF_quantified_formula
instance Pretty THF_quantification where
pretty = printTHF_quantification
instance Pretty THF_variable where
pretty = printTHF_variable
instance Pretty THF_typed_variable where
pretty = printTHF_typed_variable
instance Pretty THF_unary_formula where
pretty = printTHF_unary_formula
instance Pretty THF_atom where
pretty = printTHF_atom
instance Pretty THF_function where
pretty = printTHF_function
instance Pretty THF_conn_term where
pretty = printTHF_conn_term
instance Pretty THF_conditional where
pretty = printTHF_conditional
instance Pretty THF_let where
pretty = printTHF_let
instance Pretty THF_let_defns where
pretty = printTHF_let_defns
instance Pretty THF_let_defn where
pretty = printTHF_let_defn
instance Pretty THF_let_quantified_defn where
pretty = printTHF_let_quantified_defn
instance Pretty THF_let_plain_defn where
pretty = printTHF_let_plain_defn
instance Pretty THF_let_defn_LHS where
pretty = printTHF_let_defn_LHS
instance Pretty THF_type_formula where
pretty = printTHF_type_formula
instance Pretty THF_typeable_formula where
pretty = printTHF_typeable_formula
instance Pretty THF_subtype where
pretty = printTHF_subtype
instance Pretty THF_top_level_type where
pretty = printTHF_top_level_type
instance Pretty THF_unitary_type where
pretty = printTHF_unitary_type
instance Pretty THF_binary_type where
pretty = printTHF_binary_type
instance Pretty THF_sequent where
pretty = printTHF_sequent
instance Pretty THF_tuple where
pretty = printTHF_tuple
instance Pretty TFX_formula where
pretty = printTFX_formula
instance Pretty TFX_logic_formula where
pretty = printTFX_logic_formula
instance Pretty TFF_formula where
pretty = printTFF_formula
instance Pretty TFF_logic_formula where
pretty = printTFF_logic_formula
instance Pretty TFF_binary_formula where
pretty = printTFF_binary_formula
instance Pretty TFF_binary_nonassoc where
pretty = printTFF_binary_nonassoc
instance Pretty TFF_binary_assoc where
pretty = printTFF_binary_assoc
instance Pretty TFF_unitary_formula where
pretty = printTFF_unitary_formula
instance Pretty TFF_quantified_formula where
pretty = printTFF_quantified_formula
instance Pretty TFF_variable where
pretty = printTFF_variable
instance Pretty TFF_typed_variable where
pretty = printTFF_typed_variable
instance Pretty TFF_unary_formula where
pretty = printTFF_unary_formula
instance Pretty TFF_conditional where
pretty = printTFF_conditional
instance Pretty TFF_let where
pretty = printTFF_let
instance Pretty TFF_let_term_defns where
pretty = printTFF_let_term_defns
instance Pretty TFF_let_term_defn where
pretty = printTFF_let_term_defn
instance Pretty TFF_let_term_binding where
pretty = printTFF_let_term_binding
instance Pretty TFF_let_formula_defns where
pretty = printTFF_let_formula_defns
instance Pretty TFF_let_formula_defn where
pretty = printTFF_let_formula_defn
instance Pretty TFF_let_formula_binding where
pretty = printTFF_let_formula_binding
instance Pretty TFF_sequent where
pretty = printTFF_sequent
instance Pretty TFF_formula_tuple where
pretty = printTFF_formula_tuple
instance Pretty TFF_typed_atom where
pretty = printTFF_typed_atom
instance Pretty TFF_subtype where
pretty = printTFF_subtype
instance Pretty TFF_top_level_type where
pretty = printTFF_top_level_type
instance Pretty TF1_quantified_type where
pretty = printTF1_quantified_type
instance Pretty TFF_monotype where
pretty = printTFF_monotype
instance Pretty TFF_unitary_type where
pretty = printTFF_unitary_type
instance Pretty TFF_atomic_type where
pretty = printTFF_atomic_type
instance Pretty TFF_mapping_type where
pretty = printTFF_mapping_type
instance Pretty TFF_xprod_type where
pretty = printTFF_xprod_type
instance Pretty TCF_formula where
pretty = printTCF_formula
instance Pretty TCF_logic_formula where
pretty = printTCF_logic_formula
instance Pretty TCF_quantified_formula where
pretty = printTCF_quantified_formula
instance Pretty FOF_formula where
pretty = printFOF_formula
instance Pretty FOF_logic_formula where
pretty = printFOF_logic_formula
instance Pretty FOF_binary_formula where
pretty = printFOF_binary_formula
instance Pretty FOF_binary_nonassoc where
pretty = printFOF_binary_nonassoc
instance Pretty FOF_binary_assoc where
pretty = printFOF_binary_assoc
instance Pretty FOF_unitary_formula where
pretty = printFOF_unitary_formula
instance Pretty FOF_quantified_formula where
pretty = printFOF_quantified_formula
instance Pretty FOF_unary_formula where
pretty = printFOF_unary_formula
instance Pretty FOF_infix_unary where
pretty = printFOF_infix_unary
instance Pretty FOF_atomic_formula where
pretty = printFOF_atomic_formula
instance Pretty FOF_plain_atomic_formula where
pretty = printFOF_plain_atomic_formula
instance Pretty FOF_defined_atomic_formula where
pretty = printFOF_defined_atomic_formula
instance Pretty FOF_defined_plain_formula where
pretty = printFOF_defined_plain_formula
instance Pretty FOF_defined_infix_formula where
pretty = printFOF_defined_infix_formula
instance Pretty FOF_system_atomic_formula where
pretty = printFOF_system_atomic_formula
instance Pretty FOF_plain_term where
pretty = printFOF_plain_term
instance Pretty FOF_defined_term where
pretty = printFOF_defined_term
instance Pretty FOF_defined_atomic_term where
pretty = printFOF_defined_atomic_term
instance Pretty FOF_defined_plain_term where
pretty = printFOF_defined_plain_term
instance Pretty FOF_system_term where
pretty = printFOF_system_term
instance Pretty FOF_term where
pretty = printFOF_term
instance Pretty FOF_function_term where
pretty = printFOF_function_term
instance Pretty TFF_conditional_term where
pretty = printTFF_conditional_term
instance Pretty TFF_let_term where
pretty = printTFF_let_term
instance Pretty FOF_sequent where
pretty = printFOF_sequent
instance Pretty FOF_formula_tuple where
pretty = printFOF_formula_tuple
instance Pretty CNF_formula where
pretty = printCNF_formula
instance Pretty Disjunction where
pretty = printDisjunction
instance Pretty Literal where
pretty = printLiteral
instance Pretty THF_quantifier where
pretty = printTHF_quantifier
instance Pretty TH1_quantifier where
pretty = printTH1_quantifier
instance Pretty TH0_quantifier where
pretty = printTH0_quantifier
instance Pretty THF_pair_connective where
pretty = printTHF_pair_connective
instance Pretty THF_unary_connective where
pretty = printTHF_unary_connective
instance Pretty TH1_unary_connective where
pretty = printTH1_unary_connective
instance Pretty FOF_quantifier where
pretty = printFOF_quantifier
instance Pretty Binary_connective where
pretty = printBinary_connective
instance Pretty Assoc_connective where
pretty = printAssoc_connective
instance Pretty Unary_connective where
pretty = printUnary_connective
instance Pretty Defined_type where
pretty = printDefined_type
instance Pretty Atom where
pretty = printAtom
instance Pretty Untyped_atom where
pretty = printUntyped_atom
instance Pretty Defined_proposition where
pretty = printDefined_proposition
instance Pretty Defined_predicate where
pretty = printDefined_predicate
instance Pretty Defined_infix_pred where
pretty = printDefined_infix_pred
instance Pretty Defined_functor where
pretty = printDefined_functor
instance Pretty Defined_term where
pretty = printDefined_term
instance Pretty Source where
pretty = printSource
instance Pretty DAG_source where
pretty = printDAG_source
instance Pretty Inference_record where
pretty = printInference_record
instance Pretty Parent_info where
pretty = printParent_info
instance Pretty Internal_source where
pretty = printInternal_source
instance Pretty Intro_type where
pretty = printIntro_type
instance Pretty External_source where
pretty = printExternal_source
instance Pretty File_source where
pretty = printFile_source
instance Pretty Theory where
pretty = printTheory
instance Pretty Theory_name where
pretty = printTheory_name
instance Pretty Creator_source where
pretty = printCreator_source
instance Pretty Useful_info where
pretty = printUseful_info
instance Pretty Info_item where
pretty = printInfo_item
instance Pretty Formula_item where
pretty = printFormula_item
instance Pretty Inference_item where
pretty = printInference_item
instance Pretty Inference_status where
pretty = printInference_status
instance Pretty Status_value where
pretty = printStatus_value
instance Pretty Inference_info where
pretty = printInference_info
instance Pretty New_symbol_record where
pretty = printNew_symbol_record
instance Pretty Principal_symbol where
pretty = printPrincipal_symbol
instance Pretty Include where
pretty = printInclude
instance Pretty General_term where
pretty = printGeneral_term
instance Pretty General_data where
pretty = printGeneral_data
instance Pretty General_function where
pretty = printGeneral_function
instance Pretty Formula_data where
pretty = printFormula_data
instance Pretty Name where
pretty = printName
instance Pretty Number where
pretty = printNumber
{- -----------------------------------------------------------------------------
Logic components
----------------------------------------------------------------------------- -}
-- Print a newline at the end of the document for good style.
printBasicTheory :: (Sign, [Named Sentence]) -> Doc
printBasicTheory (_, l) = vsep (map printNamedSentence l) $+$ text ""
printNamedSentence :: Named Sentence -> Doc
printNamedSentence = printAnnotated_formula . sentence . adjust_formula_role
printSymbol :: Symbol -> Doc
printSymbol = pretty . symbolId
printSign :: Sign -> Doc
printSign s =
vsep [ text "%{"
, if Set.null $ constantSet s then empty else
text "constants: "
<+> sepByCommas (map pretty $ Set.toList $ constantSet s)
, if Set.null $ numberSet s then empty else
text "numbers: "
<+> sepByCommas (map pretty $ Set.toList $ numberSet s)
, if Set.null $ propositionSet s then empty else
text "propositions: "
<+> sepByCommas (map pretty $ Set.toList $ propositionSet s)
, if Map.null (tffPredicateMap s) && Map.null (thfPredicateMap s) && Map.null (fofPredicateMap s) then empty else
text "predicates: "
<+> vcat (punctuate comma
(map printTHFType (Map.toList $ thfPredicateMap s)
++ map printTFFType (Map.toList $ tffPredicateMap s)
++ map printFOFPredicate (Map.toList $ fofPredicateMap s)))
, if Map.null (fofFunctorMap s) then empty else
text "functors: "
<+> vcat (punctuate comma
(map printFOFFunctor (Map.toList $ fofFunctorMap s)))
, if Map.null (tffTypeConstantMap s) && Map.null (thfTypeConstantMap s) then empty else
text "type constants: "
<+> vcat (punctuate comma
(map printTHFType (Map.toList $ thfTypeConstantMap s)
++ map printTFFType (Map.toList $ tffTypeConstantMap s)))
, if Map.null (tffTypeFunctorMap s) && Map.null (thfTypeFunctorMap s) then empty else
text "type functors: "
<+> vcat (punctuate comma
(map printTHFType (Map.toList $ thfTypeFunctorMap s)
++ map printTFFType (Map.toList $ tffTypeFunctorMap s)))
, if Map.null (tffSubtypeMap s) && Map.null (thfSubtypeMap s) then empty else
text "subtypes: "
<+> vcat (punctuate comma
(map printTHFSubType (Map.toList $ thfSubtypeMap s)
++ map printTFFSubType (Map.toList $ tffSubtypeMap s)))
, text "}%"]
where
printTHFType :: (THFTypeable, THF_top_level_type) -> Doc
printTHFType (t, tlt) = pretty $ case t of
THFTypeFormula f -> THFTF_typeable f tlt
THFTypeConstant c -> THFTF_constant c tlt
printTFFType :: (Untyped_atom, TFF_top_level_type) -> Doc
printTFFType (a, tlt) = pretty $ TFFTA_plain a tlt
printFOFPredicate :: (Predicate, Set.Set Int) -> Doc
printFOFPredicate (p, arities) = vcat $ punctuate comma $
map (printFOFPredicateOrFunctor p O) $ Set.toList arities
printFOFFunctor :: (TPTP_functor, Set.Set Int) -> Doc
printFOFFunctor (p, arities) = vcat $ punctuate comma $
map (printFOFPredicateOrFunctor p I) $ Set.toList arities
printFOFPredicateOrFunctor :: Token -> Defined_type -> Int -> Doc
printFOFPredicateOrFunctor token typ arity =
let arguments =
if arity == 0
then empty
else if arity == 1
then pretty I <+> text ">"
else parens (sepBy (text "*") $ map pretty $ replicate arity I)
<+> text ">"
in pretty token <> colon <+> arguments <+> pretty typ
printTHFSubType :: (THF_atom, THF_atom) -> Doc
printTHFSubType (a1, a2) = pretty $ THF_subtype a1 a2
printTFFSubType :: (Untyped_atom, Atom) -> Doc
printTFFSubType (a1, a2) = pretty $ TFF_subtype a1 a2
printBasicSpec :: BASIC_SPEC -> Doc
printBasicSpec (Basic_spec xs) = vcat $ map pretty xs
{- -----------------------------------------------------------------------------
Files. Empty file is OK
----------------------------------------------------------------------------- -}
-- <TPTP_file> ::= <TPTP_input>*
printTPTP :: TPTP -> Doc
printTPTP (TPTP tptp_inputs) = vsep $ map pretty tptp_inputs
-- <TPTP_input> ::= <annotated_formula> | <include>
printTPTP_input :: TPTP_input -> Doc
printTPTP_input x = case x of
Annotated_formula a -> pretty a
TPTP_include i -> pretty i
TPTP_comment c -> pretty c
TPTP_defined_comment c -> pretty c
TPTP_system_comment c -> pretty c
{- -----------------------------------------------------------------------------
Comments
----------------------------------------------------------------------------- -}
-- <comment> ::- <comment_line>|<comment_block>
-- <comment_line> ::- [%]<printable_char>*
-- <comment_block> ::: [/][*]<not_star_slash>[*][*]*[/]
printComment :: Comment -> Doc
printComment comment = case comment of
Comment_line c -> text "%" <+> pretty c
Comment_block c -> text "/*" <+> pretty c <+> text "*/"
-- %---- <defined_comment> ::- <def_comment_line>|<def_comment_block>
-- %---- <def_comment_line> ::: [%]<dollar><printable_char>*
-- %---- <def_comment_block> ::: [/][*]<dollar><not_star_slash>[*][*]*[/]
printDefinedComment :: DefinedComment -> Doc
printDefinedComment comment = case comment of
Defined_comment_line c -> text "%$" <+> pretty c
Defined_comment_block c -> text "/*$" <+> pretty c <+> text "*/"
-- %---- <system_comment> ::- <sys_comment_line>|<sys_comment_block>
-- %---- <sys_comment_line> ::: [%]<dollar><dollar><printable_char>*
-- %---- <sys_comment_block> ::: [/][*]<dollar><dollar><not_star_slash>[*][*]*[/]
printSystemComment :: SystemComment -> Doc
printSystemComment comment = case comment of
System_comment_line c -> text "%$$" <+> pretty c
System_comment_block c -> text "/*$$" <+> pretty c <+> text "*/"
-- %----Formula records
-- <annotated_formula> ::= <thf_annotated> | <tfx_annotated> | <tff_annotated> |
-- <tcf_annotated> | <fof_annotated> | <cnf_annotated> |
-- <tpi_annotated>
printAnnotated_formula :: Annotated_formula -> Doc
printAnnotated_formula x = case x of
AF_THF_Annotated f -> pretty f
AF_TFX_Annotated f -> pretty f
AF_TFF_Annotated f -> pretty f
AF_TCF_Annotated f -> pretty f
AF_FOF_Annotated f -> pretty f
AF_CNF_Annotated f -> pretty f
AF_TPI_Annotated f -> pretty f
-- <???_annotated> contains the annotations, which are introduced with a comma.
-- <annotations> ::= ,<source><optional_info> | <null>
printAnnotationsIfAnnotated :: Annotations -> Doc
printAnnotationsIfAnnotated a@(Annotations ma) = case ma of
Just _ -> comma <+> pretty a
Nothing -> empty
-- <tpi_annotated> ::= tpi(<name>,<formula_role>,<tpi_formula><annotations>).
printTPI_annotated :: TPI_annotated -> Doc
printTPI_annotated x = case x of
TPI_annotated n r f a ->
text "tpi"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <tpi_formula> ::= <fof_formula>
-- <thf_annotated> ::= thf(<name>,<formula_role>,<thf_formula>
-- <annotations>).
printTHF_annotated :: THF_annotated -> Doc
printTHF_annotated x = case x of
THF_annotated n r f a ->
text "thf"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <tfx_annotated> ::= tfx(<name>,<formula_role>,<tfx_formula>
-- <annotations>).
printTFX_annotated :: TFX_annotated -> Doc
printTFX_annotated x = case x of
TFX_annotated n r f a ->
text "tfx"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <tff_annotated> ::= tff(<name>,<formula_role>,<tff_formula>
-- <annotations>).
printTFF_annotated :: TFF_annotated -> Doc
printTFF_annotated x = case x of
TFF_annotated n r f a ->
text "tff"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <tcf_annotated> ::= tcf(<name>,<formula_role>,<tcf_formula>
-- <annotations>).
printTCF_annotated :: TCF_annotated -> Doc
printTCF_annotated x = case x of
TCF_annotated n r f a ->
text "tcf"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <fof_annotated> ::= fof(<name>,<formula_role>,<fof_formula>
-- <annotations>).
printFOF_annotated :: FOF_annotated -> Doc
printFOF_annotated x = case x of
FOF_annotated n r f a ->
text "fof"
<> parens (sepByCommas [pretty n, pretty r, pretty f] <> printAnnotationsIfAnnotated a)
<> text "."
-- <cnf_annotated> ::= cnf(<name>,<formula_role>,<cnf_formula>
-- <annotations>).
printCNF_annotated :: CNF_annotated -> Doc
printCNF_annotated x = case x of
CNF_annotated n r f a ->
text "cnf"
<> parens (sepByCommas [pretty n, pretty r, pretty f]
<> printAnnotationsIfAnnotated a)
<> text "."
-- <annotations> ::= ,<source><optional_info> | <null>
printAnnotations :: Annotations -> Doc
printAnnotations (Annotations mAnno) = case mAnno of
Nothing -> empty
Just (source, optionalInfo) ->
fsep [pretty source, printOptional_info optionalInfo]
-- Types for problems
-- %----Types for problems.
-- <formula_role> ::= <lower_word>
-- <formula_role> :== axiom | hypothesis | definition | assumption |
-- lemma | theorem | corollary | conjecture |
-- negated_conjecture | plain | type |
-- fi_domain | fi_functors | fi_predicates | unknown
printFormula_role :: Formula_role -> Doc
printFormula_role x = case x of
Other_formula_role t -> pretty t
-- ^ For future updates. Should not be used.
_ -> text $ map toLower $ show x
-- %----THF formulae.
-- <thf_formula> ::= <thf_logic_formula> | <thf_sequent>
printTHF_formula :: THF_formula -> Doc
printTHF_formula x = case x of
THFF_logic f -> pretty f
THFF_sequent s -> pretty s
-- <thf_logic_formula> ::= <thf_binary_formula> | <thf_unitary_formula> |
-- <thf_type_formula> | <thf_subtype>
printTHF_logic_formula :: THF_logic_formula -> Doc
printTHF_logic_formula x = case x of
THFLF_binary f -> pretty f
THFLF_unitary f -> pretty f
THFLF_type f -> pretty f
THFLF_subtype f -> pretty f
-- <thf_binary_formula> ::= <thf_binary_pair> | <thf_binary_tuple>
printTHF_binary_formula :: THF_binary_formula -> Doc
printTHF_binary_formula x = case x of
THFBF_pair a -> pretty a
THFBF_tuple a -> pretty a
-- %----Only some binary connectives can be written without ()s.
-- %----There's no precedence among binary connectives
-- <thf_binary_pair> ::= <thf_unitary_formula> <thf_pair_connective>
-- <thf_unitary_formula>
printTHF_binary_pair :: THF_binary_pair -> Doc
printTHF_binary_pair x = case x of
THF_binary_pair c f1 f2 -> fsep [pretty f1, pretty c, pretty f2]
-- <thf_binary_tuple> ::= <thf_or_formula> | <thf_and_formula> |
-- <thf_apply_formula>
printTHF_binary_tuple :: THF_binary_tuple -> Doc
printTHF_binary_tuple x = case x of
THFBT_or fs -> printTHF_or_formula fs
THFBT_and fs -> printTHF_and_formula fs
THFBT_apply fs -> printTHF_apply_formula fs
-- <thf_or_formula> ::= <thf_unitary_formula> <vline> <thf_unitary_formula> |
-- <thf_or_formula> <vline> <thf_unitary_formula>
printTHF_or_formula :: THF_or_formula -> Doc
printTHF_or_formula fs = sepBy vline $ map pretty fs
-- <thf_and_formula> ::= <thf_unitary_formula> & <thf_unitary_formula> |
-- <thf_and_formula> & <thf_unitary_formula>
printTHF_and_formula :: THF_or_formula -> Doc
printTHF_and_formula fs = sepBy andD $ map pretty fs
-- <thf_apply_formula> ::= <thf_unitary_formula> @ <thf_unitary_formula> |
-- <thf_apply_formula> @ <thf_unitary_formula>
printTHF_apply_formula :: THF_or_formula -> Doc
printTHF_apply_formula fs = sepBy atD $ map pretty fs
-- <thf_unitary_formula> ::= <thf_quantified_formula> | <thf_unary_formula> |
-- <thf_atom> | <thf_conditional> | <thf_let> |
-- <thf_tuple> | (<thf_logic_formula>)
printTHF_unitary_formula :: THF_unitary_formula -> Doc
printTHF_unitary_formula x = case x of
THFUF_quantified a -> pretty a
THFUF_unary a -> pretty a
THFUF_atom a -> pretty a
THFUF_conditional a -> pretty a
THFUF_let a -> pretty a
THFUF_tuple a -> pretty a
THFUF_logic a -> parens $ pretty a
-- <thf_quantified_formula> ::= <thf_quantification> <thf_unitary_formula>
printTHF_quantified_formula :: THF_quantified_formula -> Doc
printTHF_quantified_formula x = case x of
THF_quantified_formula q f -> hsep [pretty q, pretty f]
-- <thf_quantification> ::= <thf_quantifier> [<thf_variable_list>] :
printTHF_quantification :: THF_quantification -> Doc
printTHF_quantification x = case x of
THF_quantification q vars ->
hsep [pretty q, brackets (printTHF_variable_list vars) <> colon]
-- <thf_variable_list> ::= <thf_variable> | <thf_variable>,<thf_variable_list>
printTHF_variable_list :: THF_variable_list -> Doc
printTHF_variable_list vars = sepByCommas $ map pretty vars
-- <thf_variable> ::= <thf_typed_variable> | <variable>
printTHF_variable :: THF_variable -> Doc
printTHF_variable x = case x of
THFV_typed a -> pretty a
THFV_variable a -> pretty a
-- <thf_typed_variable> ::= <variable> : <thf_top_level_type>
printTHF_typed_variable :: THF_typed_variable -> Doc
printTHF_typed_variable x = case x of
THF_typed_variable v tlt -> fsep [pretty v <> colon, pretty tlt]
-- <thf_unary_formula> ::= <thf_unary_connective> (<thf_logic_formula>)
printTHF_unary_formula :: THF_unary_formula -> Doc
printTHF_unary_formula x = case x of
THF_unary_formula c f -> pretty c <> parens (pretty f)
-- <thf_atom> ::= <thf_function> | <variable> | <defined_term> |
-- <thf_conn_term>
printTHF_atom :: THF_atom -> Doc
printTHF_atom x = case x of
THF_atom_function a -> pretty a
THF_atom_variable a -> pretty a
THF_atom_defined a -> pretty a
THF_atom_conn a -> pretty a
-- <thf_function> ::= <atom> | <functor>(<thf_arguments>) |
-- <defined_functor>(<thf_arguments>) |
-- <system_functor>(<thf_arguments>)
printTHF_function :: THF_function -> Doc
printTHF_function x = case x of
THFF_atom a -> pretty a
THFF_functor f args -> pretty f <> parens (printTHF_arguments args)
THFF_defined f args -> pretty f <> parens (printTHF_arguments args)
THFF_system f args -> pretty f <> parens (printTHF_arguments args)
-- <thf_conn_term> ::= <thf_pair_connective> | <assoc_connective> |
-- <thf_unary_connective>
printTHF_conn_term :: THF_conn_term -> Doc
printTHF_conn_term x = case x of
THFC_pair a -> pretty a
THFC_assoc a -> pretty a
THFC_unary a -> pretty a
-- <thf_conditional> ::= $ite(<thf_logic_formula>,<thf_logic_formula>,
-- <thf_logic_formula>)
printTHF_conditional :: THF_conditional -> Doc
printTHF_conditional x = case x of
THF_conditional f_if f_then f_else ->
text "$ite"
<> parens (sepByCommas [pretty f_if, pretty f_then, pretty f_else])
-- %----The LHS of a term or formula binding must be a non-variable term that
-- %----is flat with pairwise distinct variable arguments, and the variables in
-- %----the LHS must be exactly those bound in the universally quantified variable
-- %----list, in the same order. Let definitions are not recursive: a non-variable
-- %----symbol introduced in the LHS of a let definition cannot occur in the RHS.
-- %----If a symbol with the same signature as the one in the LHS of the binding
-- %----is declared above the let expression (at the top level or in an
-- %----encompassing let) then it can be used in the RHS of the binding, but it is
-- %----not accessible in the term or formula of the let expression. Let
-- %----expressions can be eliminated by a simple definition expansion.
-- <thf_let> ::= $let(<thf_unitary_formula>,<thf_formula>)
-- <thf_let> :== $let(<thf_let_defns>,<thf_formula>)
printTHF_let :: THF_let -> Doc
printTHF_let x = case x of
THF_let defns f ->
text "$let" <> parens (sepByCommas [pretty defns, pretty f])
-- <thf_let_defns> :== <thf_let_defn> | [<thf_let_defn_list>]
printTHF_let_defns :: THF_let_defns -> Doc
printTHF_let_defns x = case x of
THFLD_single a -> pretty a
THFLD_many a -> brackets $ printTHF_let_defn_list a
-- <thf_let_defn_list> :== <thf_let_defn> | <thf_let_defn>,<thf_let_defn_list>
printTHF_let_defn_list :: THF_let_defn_list -> Doc
printTHF_let_defn_list = sepByCommas . map pretty
-- <thf_let_defn> :== <thf_let_quantified_defn> | <thf_let_plain_defn>
printTHF_let_defn :: THF_let_defn -> Doc
printTHF_let_defn x = case x of
THFLD_quantified a -> pretty a
THFLD_plain a -> pretty a
-- <thf_let_quantified_defn> :== <thf_quantification> (<thf_let_plain_defn>)
printTHF_let_quantified_defn :: THF_let_quantified_defn -> Doc
printTHF_let_quantified_defn x = case x of
THF_let_quantified_defn q lpd ->
pretty q <> parens (pretty lpd)
-- <thf_let_plain_defn> :== <thf_let_defn_LHS> <assignment> <thf_formula>
printTHF_let_plain_defn :: THF_let_plain_defn -> Doc
printTHF_let_plain_defn x = case x of
THF_let_plain_defn lhs f -> fsep [pretty lhs, assignment, pretty f]
-- <thf_let_defn_LHS> :== <constant> | <functor>(<fof_arguments>) |
-- <thf_tuple>
-- %----The <fof_arguments> must all be <variable>s, and the <thf_tuple> may
-- %----contain only <constant>s and <functor>(<fof_arguments>)s
printTHF_let_defn_LHS :: THF_let_defn_LHS -> Doc
printTHF_let_defn_LHS x = case x of
THFLDL_constant a -> pretty a
THFLDL_functor f args -> pretty f <> parens (printFOF_arguments args)
THFLDL_tuple a -> pretty a
-- <thf_arguments> ::= <thf_formula_list>
printTHF_arguments :: THF_arguments -> Doc
printTHF_arguments x = printTHF_formula_list x
-- <thf_type_formula> ::= <thf_typeable_formula> : <thf_top_level_type>
-- <thf_type_formula> :== <constant> : <thf_top_level_type>
printTHF_type_formula :: THF_type_formula -> Doc
printTHF_type_formula x = case x of
THFTF_typeable f tlt -> fsep [pretty f <> colon, pretty tlt]
THFTF_constant c tlt -> fsep [pretty c <> colon, pretty tlt]
-- <thf_typeable_formula> ::= <thf_atom> | (<thf_logic_formula>)
printTHF_typeable_formula :: THF_typeable_formula -> Doc
printTHF_typeable_formula x = case x of
THFTF_atom a -> pretty a
THFTF_logic a -> parens $ pretty a
-- <thf_subtype> ::= <thf_atom> <subtype_sign> <thf_atom>
printTHF_subtype :: THF_subtype -> Doc
printTHF_subtype x = case x of
THF_subtype a1 a2 -> fsep [pretty a1, subtype_sign, pretty a2]
-- %----<thf_top_level_type> appears after ":", where a type is being specified
-- %----for a term or variable. <thf_unitary_type> includes <thf_unitary_formula>,
-- %----so the syntax allows just about any lambda expression with "enough"
-- %----parentheses to serve as a type. The expected use of this flexibility is
-- %----parametric polymorphism in types, expressed with lambda abstraction.
-- %----Mapping is right-associative: o > o > o means o > (o > o).
-- %----Xproduct is left-associative: o * o * o means (o * o) * o.
-- %----Union is left-associative: o + o + o means (o + o) + o.
-- <thf_top_level_type> ::= <thf_unitary_type> | <thf_mapping_type>
printTHF_top_level_type :: THF_top_level_type -> Doc
printTHF_top_level_type x = case x of
THFTLT_unitary a -> pretty a
THFTLT_mapping a -> printTHF_mapping_type a
-- <thf_unitary_type> ::= <thf_unitary_formula> | (<thf_binary_type>)
printTHF_unitary_type :: THF_unitary_type -> Doc
printTHF_unitary_type x = case x of
THFUT_unitary a -> pretty a
THFUT_binary a -> parens $ pretty a
-- Each of these binary types has at least two (!) list entries.
-- <thf_binary_type> ::= <thf_mapping_type> | <thf_xprod_type> |
-- <thf_union_type>
printTHF_binary_type :: THF_binary_type -> Doc
printTHF_binary_type x = case x of
THFBT_mapping a -> printTHF_mapping_type a
THFBT_xprod a -> printTHF_xprod_type a
THFBT_union a -> printTHF_union_type a
-- <thf_mapping_type> ::= <thf_unitary_type> <arrow> <thf_unitary_type> |
-- <thf_unitary_type> <arrow> <thf_mapping_type>
printTHF_mapping_type :: THF_mapping_type -> Doc
printTHF_mapping_type = sepBy arrow . map pretty
-- <thf_xprod_type> ::= <thf_unitary_type> <star> <thf_unitary_type> |
-- <thf_xprod_type> <star> <thf_unitary_type>
printTHF_xprod_type :: THF_xprod_type -> Doc
printTHF_xprod_type = sepBy star . map pretty
-- <thf_union_type> ::= <thf_unitary_type> <plus> <thf_unitary_type> |
-- <thf_union_type> <plus> <thf_unitary_type>
printTHF_union_type :: THF_union_type -> Doc
printTHF_union_type = sepBy plus . map pretty
-- %----Sequents using the Gentzen arrow
-- <thf_sequent> ::= <thf_tuple> <gentzen_arrow> <thf_tuple> |
-- (<thf_sequent>)
printTHF_sequent :: THF_sequent -> Doc
printTHF_sequent x = case x of
THFS_plain t1 t2 -> sepBy gentzen_arrow [pretty t1, pretty t2]
THFS_parens a -> parens $ pretty a
-- <thf_tuple> ::= [] | [<thf_formula_list>]
printTHF_tuple :: THF_tuple -> Doc
printTHF_tuple x = case x of
THF_tuple a -> brackets $ printTHF_formula_list a
-- <thf_formula_list> ::= <thf_logic_formula> |
-- <thf_logic_formula>,<thf_formula_list>
printTHF_formula_list :: THF_formula_list -> Doc
printTHF_formula_list = sepByCommas . map pretty
-- NOTE: not used by parser
-- %----New material for modal logic semantics, not integrated yet
-- <logic_defn_rule> :== <logic_defn_LHS> <assignment> <logic_defn_RHS>-
-- data Logic_defn_rule = Logic_defn_rule Logic_defn_LHS Logic_defn_RHS
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_LHS> :== <logic_defn_value> | <thf_top_level_type> | <name>
-- <logic_defn_LHS> :== $constants | $quantification | $consequence |
-- $modalities
-- %----The $constants, $quantification, and $consequence apply to all of the
-- %----$modalities. Each of these may be specified only once, but not necessarily
-- %----all in a single annotated formula.-
-- data Logic_defn_LHS = Logic_defn_LHS_value Logic_defn_value
-- | Logic_defn_LHS_THF_Top_level_type THF_top_level_type
-- | Logic_defn_LHS_name Name
-- | LDLC_constants
-- | LDLC_quantification
-- | LDLC_consequence
-- | LDLC_modalities
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_RHS> :== <logic_defn_value> | <thf_unitary_formula>-
-- data Logic_defn_RHS = Logic_defn_RHS_value Logic_defn_value
-- | Logic_defn_RNG_THF_Unitary_forumla THF_unitary_formula
-- deriving (Show, Ord, Eq, Data, Typeable)
-- NOTE: not used by parser
-- <logic_defn_value> :== <defined_constant>
-- <logic_defn_value> :== $rigid | $flexible |
-- $constant | $varying | $cumulative | $decreasing |
-- $local | $global |
-- $modal_system_K | $modal_system_T | $modal_system_D |
-- $modal_system_S4 | $modal_system_S5 |
-- $modal_axiom_K | $modal_axiom_T | $modal_axiom_B |
-- $modal_axiom_D | $modal_axiom_4 | $modal_axiom_5-
-- data Logic_defn_value = Rigid
-- | Flexible
-- | Constant
-- | Varying
-- | Cumulative
-- | Decreasing
-- | Local
-- | Global
-- | Modal_system_K
-- | Modal_system_T
-- | Modal_system_D
-- | Modal_system_S4
-- | Modal_system_S5
-- | Modal_axiom_K
-- | Modal_axiom_T
-- | Modal_axiom_B
-- | Modal_axiom_D
-- | Modal_axiom_4
-- | Modal_axiom_5
-- deriving (Show, Ord, Eq, Data, Typeable)
-- %----TFX formulae
-- <tfx_formula> ::= <tfx_logic_formula> | <thf_sequent>
printTFX_formula :: TFX_formula -> Doc
printTFX_formula x = case x of
TFXF_logic a -> pretty a
TFXF_sequent a -> pretty a
-- <tfx_logic_formula> ::= <thf_logic_formula>
-- % <tfx_logic_formula> ::= <thf_binary_formula> | <thf_unitary_formula> |
-- % <tff_typed_atom> | <tff_subtype>
printTFX_logic_formula :: TFX_logic_formula -> Doc
printTFX_logic_formula x = case x of
TFXLF_binary a -> pretty a
TFXLF_unitary a -> pretty a
TFXLF_typed a -> pretty a
TFXLF_subtype a -> pretty a
-- %----TFF formulae.
-- <tff_formula> ::= <tff_logic_formula> | <tff_typed_atom> |
-- <tff_sequent>
printTFF_formula :: TFF_formula -> Doc
printTFF_formula x = case x of
TFFF_logic a -> pretty a
TFFF_atom a -> pretty a
TFFF_sequent a -> pretty a
-- <tff_logic_formula> ::= <tff_binary_formula> | <tff_unitary_formula> |
-- <tff_subtype>
printTFF_logic_formula :: TFF_logic_formula -> Doc
printTFF_logic_formula x = case x of
TFFLF_binary a -> pretty a
TFFLF_unitary a -> pretty a
TFFLF_subtype a -> pretty a
-- <tff_binary_formula> ::= <tff_binary_nonassoc> | <tff_binary_assoc>
printTFF_binary_formula :: TFF_binary_formula -> Doc
printTFF_binary_formula x = case x of
TFFBF_nonassoc a -> pretty a
TFFBF_assoc a -> pretty a
-- <tff_binary_nonassoc> ::= <tff_unitary_formula> <binary_connective>
-- <tff_unitary_formula>
printTFF_binary_nonassoc :: TFF_binary_nonassoc -> Doc
printTFF_binary_nonassoc x = case x of
TFF_binary_nonassoc c f1 f2 -> fsep [pretty f1, pretty c, pretty f2]
-- <tff_binary_assoc> ::= <tff_or_formula> | <tff_and_formula>
printTFF_binary_assoc :: TFF_binary_assoc -> Doc
printTFF_binary_assoc x = case x of
TFFBA_or a -> printTFF_or_formula a
TFFBA_and a -> printTFF_and_formula a
-- <tff_or_formula> ::= <tff_unitary_formula> <vline> <tff_unitary_formula> |
-- <tff_or_formula> <vline> <tff_unitary_formula>
printTFF_or_formula :: TFF_or_formula -> Doc
printTFF_or_formula = sepBy vline . map pretty
-- <tff_and_formula> ::= <tff_unitary_formula> & <tff_unitary_formula> |
-- <tff_and_formula> & <tff_unitary_formula>
printTFF_and_formula :: TFF_and_formula -> Doc
printTFF_and_formula = sepBy andD . map pretty
-- <tff_unitary_formula> ::= <tff_quantified_formula> | <tff_unary_formula> |
-- <tff_atomic_formula> | <tff_conditional> |
-- <tff_let> | (<tff_logic_formula>)
printTFF_unitary_formula :: TFF_unitary_formula -> Doc
printTFF_unitary_formula x = case x of
TFFUF_quantified a -> pretty a
TFFUF_unary a -> pretty a
TFFUF_atomic a -> pretty a
TFFUF_conditional a -> pretty a
TFFUF_let a -> pretty a
TFFUF_logic a -> parens $ pretty a
-- <tff_quantified_formula> ::= <fof_quantifier> [<tff_variable_list>] :
-- <tff_unitary_formula>
printTFF_quantified_formula :: TFF_quantified_formula -> Doc
printTFF_quantified_formula x = case x of
TFF_quantified_formula q vars f ->
hsep [pretty q, brackets (printTFF_variable_list vars) <> colon, pretty f]
-- <tff_variable_list> ::= <tff_variable> | <tff_variable>,<tff_variable_list>
printTFF_variable_list :: TFF_variable_list -> Doc
printTFF_variable_list = sepByCommas . map pretty
-- <tff_variable> ::= <tff_typed_variable> | <variable>
printTFF_variable :: TFF_variable -> Doc
printTFF_variable x = case x of
TFFV_typed a -> pretty a
TFFV_variable a -> pretty a
-- <tff_typed_variable> ::= <variable> : <tff_atomic_type>
printTFF_typed_variable :: TFF_typed_variable -> Doc
printTFF_typed_variable x = case x of
TFF_typed_variable v t -> fsep [pretty v <> colon, pretty t]
-- <tff_unary_formula> ::= <unary_connective> <tff_unitary_formula> |
-- <fof_infix_unary>
printTFF_unary_formula :: TFF_unary_formula -> Doc
printTFF_unary_formula x = case x of
TFFUF_connective c f -> fsep [pretty c, pretty f]
TFFUF_infix a -> pretty a
-- <tff_atomic_formula> ::= <fof_atomic_formula>
-- already has a pretty instance (FOF_atomic_formula)
-- <tff_conditional> ::= $ite_f(<tff_logic_formula>,<tff_logic_formula>,
-- <tff_logic_formula>)
printTFF_conditional :: TFF_conditional -> Doc
printTFF_conditional x = case x of
TFF_conditional f_if f_then f_else ->
text "$ite_f"
<> parens (sepByCommas [pretty f_if, pretty f_then, pretty f_else])
-- <tff_let> ::= $let_tf(<tff_let_term_defns>,<tff_formula>) |
-- $let_ff(<tff_let_formula_defns>,<tff_formula>)
printTFF_let :: TFF_let -> Doc
printTFF_let x = case x of
TFF_let_term_defns defns f ->
text "$let_tf"
<> parens (sepByCommas [pretty defns, pretty f])
TFF_let_formula_defns defns f ->
text "$let_ff"
<> parens (sepByCommas [pretty defns, pretty f])
-- %----See the commentary for <thf_let>.
-- <tff_let_term_defns> ::= <tff_let_term_defn> | [<tff_let_term_list>]
printTFF_let_term_defns :: TFF_let_term_defns -> Doc
printTFF_let_term_defns x = case x of
TFFLTD_single a -> pretty a
TFFLTD_many a -> brackets $ printTFF_let_term_list a
-- <tff_let_term_list> ::= <tff_let_term_defn> |
-- <tff_let_term_defn>,<tff_let_term_list>
printTFF_let_term_list :: TFF_let_term_list -> Doc
printTFF_let_term_list = sepByCommas . map pretty
-- <tff_let_term_defn> ::= ! [<tff_variable_list>] : <tff_let_term_defn> |
-- <tff_let_term_binding>
printTFF_let_term_defn :: TFF_let_term_defn -> Doc
printTFF_let_term_defn x = case x of
TFFLTD_variable vars defn ->
fsep [ text "!"
, brackets (printTFF_variable_list vars) <> colon
, pretty defn
]
TFFLTD_binding a -> pretty a
-- <tff_let_term_binding> ::= <fof_plain_term> = <fof_term> |
-- (<tff_let_term_binding>)
printTFF_let_term_binding :: TFF_let_term_binding -> Doc
printTFF_let_term_binding x = case x of
TFFLTB_plain pt t -> fsep [pretty pt, text "=" ,pretty t]
TFFLTB_binding a -> parens $ pretty a
-- <tff_let_formula_defns> ::= <tff_let_formula_defn> | [<tff_let_formula_list>]
printTFF_let_formula_defns :: TFF_let_formula_defns -> Doc
printTFF_let_formula_defns x = case x of
TFFLFD_single a -> pretty a
TFFLFD_many a -> brackets $ printTFF_let_formula_list a
-- <tff_let_formula_list> ::= <tff_let_formula_defn> |
-- <tff_let_formula_defn>,<tff_let_formula_list>
printTFF_let_formula_list :: TFF_let_formula_list -> Doc
printTFF_let_formula_list = sepByCommas . map pretty
-- <tff_let_formula_defn> ::= ! [<tff_variable_list>] : <tff_let_formula_defn> |
-- <tff_let_formula_binding>
printTFF_let_formula_defn :: TFF_let_formula_defn -> Doc
printTFF_let_formula_defn x = case x of
TFFLFD_variable vars defn ->
fsep [ text "!"
, brackets (printTFF_variable_list vars) <> colon
, pretty defn
]
TFFLFD_binding a -> pretty a
-- <tff_let_formula_binding> ::= <fof_plain_atomic_formula> <=>
-- <tff_unitary_formula> | (<tff_let_formula_binding>)
printTFF_let_formula_binding :: TFF_let_formula_binding -> Doc
printTFF_let_formula_binding x = case x of
TFFLFB_plain paf uf -> fsep [pretty paf, pretty uf]
TFFLFB_binding a -> parens $ pretty a
-- <tff_sequent> ::= <tff_formula_tuple> <gentzen_arrow>
-- <tff_formula_tuple> | (<tff_sequent>)
printTFF_sequent :: TFF_sequent -> Doc
printTFF_sequent x = case x of
TFFS_plain t1 t2 -> sepBy gentzen_arrow [pretty t1, pretty t2]
TFFS_parens a -> parens $ pretty a
-- <tff_formula_tuple> ::= [] | [<tff_formula_tuple_list>]
printTFF_formula_tuple :: TFF_formula_tuple -> Doc
printTFF_formula_tuple x = case x of
TFF_formula_tuple a -> brackets $ printTFF_formula_tuple_list a
-- <tff_formula_tuple_list> ::= <tff_logic_formula> |
-- <tff_logic_formula>,<tff_formula_tuple_list>
printTFF_formula_tuple_list :: TFF_formula_tuple_list -> Doc
printTFF_formula_tuple_list = sepByCommas . map pretty
-- %----<tff_typed_atom> can appear only at top level
-- <tff_typed_atom> ::= <untyped_atom> : <tff_top_level_type> |
-- (<tff_typed_atom>)
printTFF_typed_atom :: TFF_typed_atom -> Doc
printTFF_typed_atom x = case x of
TFFTA_plain ua tlt -> fsep [pretty ua <> colon, pretty tlt]
TFFTA_parens a -> parens $ pretty a
-- <tff_subtype> ::= <untyped_atom> <subtype_sign> <atom>
printTFF_subtype :: TFF_subtype -> Doc
printTFF_subtype x = case x of
TFF_subtype ua a -> fsep [pretty ua, subtype_sign, pretty a]
-- %----See <thf_top_level_type> for commentary.
-- <tff_top_level_type> ::= <tff_atomic_type> | <tff_mapping_type> |
-- <tf1_quantified_type> | (<tff_top_level_type>)
printTFF_top_level_type :: TFF_top_level_type -> Doc
printTFF_top_level_type x = case x of
TFFTLT_atomic a -> pretty a
TFFTLT_mapping a -> pretty a
TFFTLT_quantified a -> pretty a
TFFTLT_parens a -> parens $ pretty a
-- <tf1_quantified_type> ::= !> [<tff_variable_list>] : <tff_monotype>
printTF1_quantified_type :: TF1_quantified_type -> Doc
printTF1_quantified_type x = case x of
TF1_quantified_type vars t ->
fsep [ text "!>"
, brackets (printTFF_variable_list vars) <> colon
, pretty t
]
-- <tff_monotype> ::= <tff_atomic_type> | (<tff_mapping_type>)
printTFF_monotype :: TFF_monotype -> Doc
printTFF_monotype x = case x of
TFFMT_atomic a -> pretty a
TFFMT_mapping a -> parens $ pretty a
-- <tff_unitary_type> ::= <tff_atomic_type> | (<tff_xprod_type>)
printTFF_unitary_type :: TFF_unitary_type -> Doc
printTFF_unitary_type x = case x of
TFFUT_atomic a -> pretty a
TFFUT_xprod a -> parens $ pretty a
-- <tff_atomic_type> ::= <type_constant> | <defined_type> |
-- <type_functor>(<tff_type_arguments>) | <variable>
printTFF_atomic_type :: TFF_atomic_type -> Doc
printTFF_atomic_type x = case x of
TFFAT_constant a -> pretty a
TFFAT_defined a -> pretty a
TFFAT_functor f args -> pretty f <> parens (printTFF_type_arguments args)
TFFAT_variable a -> pretty a
-- <tff_type_arguments> ::= <tff_atomic_type> |
-- <tff_atomic_type>,<tff_type_arguments>
printTFF_type_arguments :: TFF_type_arguments -> Doc
printTFF_type_arguments = sepByCommas . map pretty
-- %----For consistency with <thf_unitary_type> (the analogue in thf),
-- %----<tff_atomic_type> should also allow (<tff_atomic_type>), but that causes
-- %----ambiguity.
-- <tff_mapping_type> ::= <tff_unitary_type> <arrow> <tff_atomic_type>
printTFF_mapping_type :: TFF_mapping_type -> Doc
printTFF_mapping_type x = case x of
TFF_mapping_type ut at -> fsep [pretty ut, arrow, pretty at]
-- <tff_xprod_type> ::= <tff_unitary_type> <star> <tff_atomic_type> |
-- <tff_xprod_type> <star> <tff_atomic_type>
printTFF_xprod_type :: TFF_xprod_type -> Doc
printTFF_xprod_type x = case x of
TFF_xprod_type ut ats -> sepBy star $ pretty ut : map pretty ats
-- %----TCF formulae.
-- <tcf_formula> ::= <tcf_logic_formula> | <tff_typed_atom>
printTCF_formula :: TCF_formula -> Doc
printTCF_formula x = case x of
TCFF_logic a -> pretty a
TCFF_atom a -> pretty a
-- <tcf_logic_formula> ::= <tcf_quantified_formula> | <cnf_formula>
printTCF_logic_formula :: TCF_logic_formula -> Doc
printTCF_logic_formula x = case x of
TCFLF_quantified a -> pretty a
TCFLF_cnf a -> pretty a
-- <tcf_quantified_formula> ::= ! [<tff_variable_list>] : <cnf_formula>
printTCF_quantified_formula :: TCF_quantified_formula -> Doc
printTCF_quantified_formula x = case x of
TCF_quantified vars f ->
fsep [ text "!"
, brackets (printTFF_variable_list vars) <> colon
, pretty f
]
-- %----FOF formulae.
-- <fof_formula> ::= <fof_logic_formula> | <fof_sequent>
printFOF_formula :: FOF_formula -> Doc
printFOF_formula x = case x of
FOFF_logic a -> pretty a
FOFF_sequent a -> pretty a
-- <fof_logic_formula> ::= <fof_binary_formula> | <fof_unitary_formula>
printFOF_logic_formula :: FOF_logic_formula -> Doc
printFOF_logic_formula x = case x of
FOFLF_binary a -> pretty a
FOFLF_unitary a -> pretty a
-- %----Future answer variable ideas | <answer_formula>
-- <fof_binary_formula> ::= <fof_binary_nonassoc> | <fof_binary_assoc>
printFOF_binary_formula :: FOF_binary_formula -> Doc
printFOF_binary_formula x = case x of
FOFBF_nonassoc a -> pretty a
FOFBF_assoc a -> pretty a
-- %----Only some binary connectives are associative
-- %----There's no precedence among binary connectives
-- <fof_binary_nonassoc> ::= <fof_unitary_formula> <binary_connective>
-- <fof_unitary_formula>
printFOF_binary_nonassoc :: FOF_binary_nonassoc -> Doc
printFOF_binary_nonassoc x = case x of
FOF_binary_nonassoc c f1 f2 ->
fsep [pretty f1, pretty c, pretty f2]
-- %----Associative connectives & and | are in <binary_assoc>
-- <fof_binary_assoc> ::= <fof_or_formula> | <fof_and_formula>
printFOF_binary_assoc :: FOF_binary_assoc -> Doc
printFOF_binary_assoc x = case x of
FOFBA_or a -> printFOF_or_formula a
FOFBA_and a -> printFOF_and_formula a
-- <fof_or_formula> ::= <fof_unitary_formula> <vline> <fof_unitary_formula> |
-- <fof_or_formula> <vline> <fof_unitary_formula>
printFOF_or_formula :: FOF_or_formula -> Doc
printFOF_or_formula = sepBy vline . map pretty
-- <fof_and_formula> ::= <fof_unitary_formula> & <fof_unitary_formula> |
-- <fof_and_formula> & <fof_unitary_formula>
printFOF_and_formula :: FOF_and_formula -> Doc
printFOF_and_formula = sepBy andD . map pretty
-- %----<fof_unitary_formula> are in ()s or do not have a <binary_connective> at
-- %----the top level.
-- <fof_unitary_formula> ::= <fof_quantified_formula> | <fof_unary_formula> |
-- <fof_atomic_formula> | (<fof_logic_formula>)
printFOF_unitary_formula :: FOF_unitary_formula -> Doc
printFOF_unitary_formula x = case x of
FOFUF_quantified a -> pretty a
FOFUF_unary a -> pretty a
FOFUF_atomic a -> pretty a
FOFUF_logic a -> parens $ pretty a
-- <fof_quantified_formula> ::= <fof_quantifier> [<fof_variable_list>] :
-- <fof_unitary_formula>
printFOF_quantified_formula :: FOF_quantified_formula -> Doc
printFOF_quantified_formula x = case x of
FOF_quantified_formula q vars f ->
fsep [ pretty q
, brackets (printFOF_variable_list vars) <> colon
, pretty f
]
-- <fof_variable_list> ::= <variable> | <variable>,<fof_variable_list>
printFOF_variable_list :: FOF_variable_list -> Doc
printFOF_variable_list = sepByCommas . map pretty
-- <fof_unary_formula> ::= <unary_connective> <fof_unitary_formula> |
-- <fof_infix_unary>
printFOF_unary_formula :: FOF_unary_formula -> Doc
printFOF_unary_formula x = case x of
FOFUF_connective c f -> fsep [pretty c, pretty f]
FOFUF_infix a -> pretty a
-- <fof_infix_unary> ::= <fof_term> <infix_inequality> <fof_term>
printFOF_infix_unary :: FOF_infix_unary -> Doc
printFOF_infix_unary x = case x of
FOF_infix_unary t1 t2 -> fsep [pretty t1, infix_inequality, pretty t2]
-- <fof_atomic_formula> ::= <fof_plain_atomic_formula> |
-- <fof_defined_atomic_formula> |
-- <fof_system_atomic_formula>
printFOF_atomic_formula :: FOF_atomic_formula -> Doc
printFOF_atomic_formula x = case x of
FOFAT_plain a -> pretty a
FOFAT_defined a -> pretty a
FOFAT_system a -> pretty a
-- <fof_plain_atomic_formula> ::= <fof_plain_term>
-- <fof_plain_atomic_formula> :== <proposition> | <predicate>(<fof_arguments>)
printFOF_plain_atomic_formula :: FOF_plain_atomic_formula -> Doc
printFOF_plain_atomic_formula x = case x of
FOFPAF_proposition a -> pretty a
FOFPAF_predicate p args -> pretty p <> parens (printFOF_arguments args)
-- <fof_defined_atomic_formula> ::= <fof_defined_plain_formula> |
-- <fof_defined_infix_formula>
printFOF_defined_atomic_formula :: FOF_defined_atomic_formula -> Doc
printFOF_defined_atomic_formula x = case x of
FOFDAF_plain a -> pretty a
FOFDAF_infix a -> pretty a
-- <fof_defined_plain_formula> ::= <fof_defined_plain_term>
-- <fof_defined_plain_formula> :== <defined_proposition> |
-- <defined_predicate>(<fof_arguments>)
printFOF_defined_plain_formula :: FOF_defined_plain_formula -> Doc
printFOF_defined_plain_formula x = case x of
FOFDPF_proposition a -> pretty a
FOFDPF_predicate p args -> pretty p <> parens (printFOF_arguments args)
-- <fof_defined_infix_formula> ::= <fof_term> <defined_infix_pred> <fof_term>
printFOF_defined_infix_formula :: FOF_defined_infix_formula -> Doc
printFOF_defined_infix_formula x = case x of
FOF_defined_infix_formula dip t1 t2 -> fsep [pretty t1, pretty dip, pretty t2]
-- %----System terms have system specific interpretations
-- <fof_system_atomic_formula> ::= <fof_system_term>
-- %----<fof_system_atomic_formula>s are used for evaluable predicates that are
-- %----available in particular tools. The predicate names are not controlled
-- %----by the TPTP syntax, so use with due care. The same is true for
-- %----<fof_system_term>s.
printFOF_system_atomic_formula :: FOF_system_atomic_formula -> Doc
printFOF_system_atomic_formula x = case x of
FOF_system_atomic_formula a -> pretty a
-- %----FOF terms.
-- <fof_plain_term> ::= <constant> | <functor>(<fof_arguments>)
printFOF_plain_term :: FOF_plain_term -> Doc
printFOF_plain_term x = case x of
FOFPT_constant a -> pretty a
FOFPT_functor f args -> pretty f <> parens (printFOF_arguments args)
-- %----Defined terms have TPTP specific interpretations
-- <fof_defined_term> ::= <defined_term> | <fof_defined_atomic_term>
printFOF_defined_term :: FOF_defined_term -> Doc
printFOF_defined_term x = case x of
FOFDT_term a -> pretty a
FOFDT_atomic a -> pretty a
-- <fof_defined_atomic_term> ::= <fof_defined_plain_term>
-- %----None yet | <defined_infix_term>
printFOF_defined_atomic_term :: FOF_defined_atomic_term -> Doc
printFOF_defined_atomic_term x = case x of
FOFDAT_plain a -> pretty a
-- | FOFDAT_indix a -> pretty a
-- %----None yet <defined_infix_term> ::= <fof_term> <defined_infix_func> <fof_term>
-- data Defined_infix_term = Defined_infix_term Defined_infix_func FOF_term FOF_term
-- deriving (Show, Ord, Eq, Data, Typeable)
-- %----None yet <defined_infix_func> ::=
-- data Defined_infix_func =
-- <fof_defined_plain_term> ::= <defined_constant> |
-- <defined_functor>(<fof_arguments>)
-- %----Add $tuple for tuples, because [<fof_arguments>] doesn't work.
printFOF_defined_plain_term :: FOF_defined_plain_term -> Doc
printFOF_defined_plain_term x = case x of
FOFDPT_constant a -> pretty a
FOFDPT_functor f args -> pretty f <> parens (printFOF_arguments args)
-- %----System terms have system specific interpretations
-- <fof_system_term> ::= <system_constant> | <system_functor>(<fof_arguments>)
printFOF_system_term :: FOF_system_term -> Doc
printFOF_system_term x = case x of
FOFST_constant a -> pretty a
FOFST_functor f args -> pretty f <> parens (printFOF_arguments args)
-- %----Arguments recurse back up to terms (this is the FOF world here)
-- <fof_arguments> ::= <fof_term> | <fof_term>,<fof_arguments>
printFOF_arguments :: FOF_arguments -> Doc
printFOF_arguments = sepByCommas . map pretty
-- %----These are terms used as arguments. Not the entry point for terms because
-- %----<fof_plain_term> is also used as <fof_plain_atomic_formula>
-- <fof_term> ::= <fof_function_term> | <variable> |
-- <tff_conditional_term> | <tff_let_term>
printFOF_term :: FOF_term -> Doc
printFOF_term x = case x of
FOFT_function a -> pretty a
FOFT_variable a -> pretty a
FOFT_conditional a -> pretty a
FOFT_let a -> pretty a
-- %% DAMN THIS JUST WON'T WORK | <tuple_term>
-- %----<tuple_term> is for TFF only, but it's here because it's used in
-- %----<fof_atomic_formula>, which is also used as <tff_atomic_formula>.
-- % <tuple_term> ::= [] | [<fof_arguments>]
-- <fof_function_term> ::= <fof_plain_term> | <fof_defined_term> |
-- <fof_system_term>
printFOF_function_term :: FOF_function_term -> Doc
printFOF_function_term x = case x of
FOFFT_plain a -> pretty a
FOFFT_defined a -> pretty a
FOFFT_system a -> pretty a
-- %----Conditional terms should be used by only TFF.
-- <tff_conditional_term> ::= $ite_t(<tff_logic_formula>,<fof_term>,<fof_term>)
printTFF_conditional_term :: TFF_conditional_term -> Doc
printTFF_conditional_term x = case x of
TFF_conditional_term f_if t_then t_else ->
text "$ite_t"
<> parens (sepByCommas [pretty f_if, pretty t_then, pretty t_else])
-- %----Let terms should be used by only TFF. $let_ft is for use when there is
-- %----a $ite_t in the <fof_term>. See the commentary for $let_tf and $let_ff.
-- <tff_let_term> ::= $let_ft(<tff_let_formula_defns>,<fof_term>) |
-- $let_tt(<tff_let_term_defns>,<fof_term>)
printTFF_let_term :: TFF_let_term -> Doc
printTFF_let_term x = case x of
TFFLT_formula defns t ->
text "$let_ft" <> parens (sepByCommas [pretty defns, pretty t])
TFFLT_term defns t ->
text "$let_tt" <> parens (sepByCommas [pretty defns, pretty t])
{-
%----This section is the FOFX syntax. Not yet in use.
% <fof_let> ::= := [<fof_let_list>] : <fof_unitary_formula>
% <fof_let_list> ::= <fof_defined_var> |
% <fof_defined_var>,<fof_let_list>
% <fof_defined_var> ::= <variable> := <fof_logic_formula> |
% <variable> :- <fof_term> | (<fof_defined_var>)
%
% <fof_conditional> ::= $ite_f(<fof_logic_formula>,<fof_logic_formula>,
% <fof_logic_formula>)
%
% <fof_conditional_term> ::= $ite_t(<fof_logic_formula>,<fof_term>,<fof_term>)
-}
-- <fof_sequent> ::= <fof_formula_tuple> <gentzen_arrow>
-- <fof_formula_tuple> | (<fof_sequent>)
printFOF_sequent :: FOF_sequent -> Doc
printFOF_sequent x = case x of
FOFS_plain t1 t2 -> fsep [pretty t1, gentzen_arrow, pretty t2]
FOFS_parens a -> parens $ pretty a
-- <fof_formula_tuple> ::= [] | [<fof_formula_tuple_list>]
printFOF_formula_tuple :: FOF_formula_tuple -> Doc
printFOF_formula_tuple x = case x of
FOF_formula_tuple a -> brackets $ printFOF_formula_tuple_list a
-- <fof_formula_tuple_list> ::= <fof_logic_formula> |
-- <fof_logic_formula>,<fof_formula_tuple_list>
printFOF_formula_tuple_list :: FOF_formula_tuple_list -> Doc
printFOF_formula_tuple_list = sepByCommas . map pretty
-- %----CNF formulae (variables implicitly universally quantified)
-- <cnf_formula> ::= <disjunction> | (<disjunction>)
printCNF_formula :: CNF_formula -> Doc
printCNF_formula x = case x of
CNFF_plain a -> pretty a
CNFF_parens a -> parens $ pretty a
-- <disjunction> ::= <literal> | <disjunction> <vline> <literal>
printDisjunction :: Disjunction -> Doc
printDisjunction x = case x of
Disjunction ls -> sepBy vline $ map pretty ls
-- <literal> ::= <fof_atomic_formula> | ~ <fof_atomic_formula> |
-- <fof_infix_unary>
printLiteral :: Literal -> Doc
printLiteral x = case x of
Lit_atomic a -> pretty a
Lit_negative a -> text "~" <+> pretty a
Lit_fof_infix a -> pretty a
-- %----Connectives - THF
-- <thf_quantifier> ::= <fof_quantifier> | <th0_quantifier> |
-- <th1_quantifier>
printTHF_quantifier :: THF_quantifier -> Doc
printTHF_quantifier x = case x of
THFQ_fof a -> pretty a
THFQ_th0 a -> pretty a
THFQ_th1 a -> pretty a
-- %----TH0 quantifiers are also available in TH1
-- <th1_quantifier> ::= !> | ?*
printTH1_quantifier :: TH1_quantifier -> Doc
printTH1_quantifier x = case x of
TH1_DependentProduct -> text "!>"
TH1_DependentSum -> text "?*"
-- <th0_quantifier> ::= ^ | @+ | @-
printTH0_quantifier :: TH0_quantifier -> Doc
printTH0_quantifier x = case x of
TH0_LambdaBinder -> text "^"
TH0_IndefiniteDescription -> text "@+"
TH0_DefiniteDescription -> text "@-"
-- <thf_pair_connective> ::= <infix_equality> | <infix_inequality> |
-- <binary_connective> | <assignment>
printTHF_pair_connective :: THF_pair_connective -> Doc
printTHF_pair_connective x = case x of
THF_infix_equality -> infix_equality
Infix_inequality -> infix_inequality
THFPC_binary a -> pretty a
THF_assignment -> assignment
-- <thf_unary_connective> ::= <unary_connective> | <th1_unary_connective>
printTHF_unary_connective :: THF_unary_connective -> Doc
printTHF_unary_connective x = case x of
THFUC_unary a -> pretty a
THFUC_th1 a -> pretty a
-- <th1_unary_connective> ::= !! | ?? | @@+ | @@- | @=
printTH1_unary_connective :: TH1_unary_connective -> Doc
printTH1_unary_connective x = case x of
TH1_PiForAll -> text "!!"
TH1_PiSigmaExists -> text "??"
TH1_PiIndefiniteDescription -> text "@@+"
TH1_PiDefiniteDescription -> text "@@-"
TH1_PiEquality -> text "@="
-- %----Connectives - TFF
-- % <tff_pair_connective> ::= <binary_connective> | <assignment>
-- Note: not used
-- data TFF_pair_connective = TFFPC_binary Binary_connective
-- | TFFPC_assignment TFF_assignment
-- deriving (Show, Ord, Eq, Data, Typeable)
-- <subtype_sign> ::= <less_sign><less_sign>
subtype_sign :: Doc
subtype_sign = less_sign <> less_sign
-- %----Connectives - FOF
-- <fof_quantifier> ::= ! | ?
printFOF_quantifier :: FOF_quantifier -> Doc
printFOF_quantifier x = case x of
ForAll -> text "!"
Exists -> text "?"
-- <binary_connective> ::= <=> | => | <= | <~> | ~<vline> | ~&
printBinary_connective :: Binary_connective -> Doc
printBinary_connective x = case x of
Equivalence -> text "<=>"
Implication -> text "=>"
ReverseImplication -> text "<="
XOR -> text "<~>"
NOR -> text "~|"
NAND -> text "~&"
-- <assoc_connective> ::= <vline> | &
printAssoc_connective :: Assoc_connective -> Doc
printAssoc_connective x = case x of
OR -> text "|"
AND -> text "&"
-- <unary_connective> ::= ~
printUnary_connective :: Unary_connective -> Doc
printUnary_connective x = case x of
NOT -> text "~"
-- %----Types for THF and TFF
-- <type_constant> ::= <type_functor>
-- already has a pretty instance (Token)
-- <type_functor> ::= <atomic_word>
-- already has a pretty instance (Token)
-- <defined_type> ::= <atomic_defined_word>
-- <defined_type> :== $oType | $o | $iType | $i | $tType |
-- $real | $rat | $int
printDefined_type :: Defined_type -> Doc
printDefined_type x = case x of
OType -> text "$oType"
O -> text "$o"
IType -> text "$iType"
I -> text "$i"
TType -> text "$tType"
Real -> text "$real"
Rat -> text "$rat"
Int -> text "$int"
-- <system_type> :== <atomic_system_word>
-- Note: not used
-- type System_type = Token
-- %----For all language types
-- <atom> ::= <untyped_atom> | <defined_constant>
printAtom :: Atom -> Doc
printAtom x = case x of
Atom_untyped a -> pretty a
Atom_constant a -> pretty a
-- <untyped_atom> ::= <constant> | <system_constant>
printUntyped_atom :: Untyped_atom -> Doc
printUntyped_atom x = case x of
UA_constant a -> pretty a
UA_system a -> pretty a
-- Proposition
-- already has a pretty instance (Token)
-- Predicate
-- already has a pretty instance (Token)
-- <defined_proposition> :== <atomic_defined_word>
-- <defined_proposition> :== $true | $false
printDefined_proposition :: Defined_proposition -> Doc
printDefined_proposition x = case x of
TPTP_true -> text "$true"
TPTP_false -> text "$false"
-- <defined_predicate> :== <atomic_defined_word>
-- <defined_predicate> :== $distinct |
-- $less | $lesseq | $greater | $greatereq |
-- $is_int | $is_rat |
-- $box_P | $box_i | $box_int | $box |
-- $dia_P | $dia_i | $dia_int | $dia
-- %----$distinct means that each of it's constant arguments are pairwise !=. It
-- %----is part of the TFF syntax. It can be used only as a fact, not under any
-- %----connective.
printDefined_predicate :: Defined_predicate -> Doc
printDefined_predicate x = case x of
Distinct -> text "$distinct"
Less -> text "$less"
Lesseq -> text "$lesseq"
Greater -> text "$greater"
Greatereq -> text "$greatereq"
Is_int -> text "$is_int"
Is_rat -> text "$is_rat"
Box_P -> text "$box_P"
Box_i -> text "$box_i"
Box_int -> text "$box_int"
Box -> text "$box"
Dia_P -> text "$dia_P"
Dia_i -> text "$dia_i"
Dia_int -> text "$dia_int"
Dia -> text "$dia"
-- <defined_infix_pred> ::= <infix_equality> | <assignment>
-- <infix_equality> ::= =
-- <infix_inequality> ::= !=
printDefined_infix_pred :: Defined_infix_pred -> Doc
printDefined_infix_pred x = case x of
Defined_infix_equality -> infix_equality
Defined_assignment -> assignment
-- <constant> ::= <functor>
-- already has a pretty instance (Token)
-- <functor> ::= <atomic_word>
-- already has a pretty instance (Token)
-- <system_constant> ::= <system_functor>
-- already has a pretty instance (Token)
-- <system_functor> ::= <atomic_system_word>
-- already has a pretty instance (Token)
-- <defined_constant> ::= <defined_functor>
-- already has a pretty instance (Token)
-- <defined_functor> ::= <atomic_defined_word>
-- <defined_functor> :== $uminus | $sum | $difference | $product |
-- $quotient | $quotient_e | $quotient_t | $quotient_f |
-- $remainder_e | $remainder_t | $remainder_f |
-- $floor | $ceiling | $truncate | $round |
-- $to_int | $to_rat | $to_real
printDefined_functor :: Defined_functor -> Doc
printDefined_functor x = case x of
Uminus -> text "$uminus"
Sum -> text "$sum"
Difference -> text "$difference"
Product -> text "$product"
Quotient -> text "$quotient"
Quotient_e -> text "$quotient_e"
Quotient_t -> text "$quotient_t"
Quotient_f -> text "$quotient_f"
Remainder_e -> text "$remainder_e"
Remainder_t -> text "$remainder_t"
Remainder_f -> text "$remainder_f"
Floor -> text "$floor"
Ceiling -> text "$ceiling"
Truncate -> text "$truncate"
Round -> text "$round"
To_int -> text "$to_int"
To_rat -> text "$to_rat"
To_real -> text "$to_real"
DF_atomic_defined_word a -> pretty a
-- <defined_term> ::= <number> | <distinct_object>
printDefined_term :: Defined_term -> Doc
printDefined_term x = case x of
DT_number a -> pretty a
DT_object a -> pretty a
-- <variable> ::= <upper_word>
-- already has a pretty instance (Token)
-- %----Formula sources
-- <source> ::= <general_term>
-- <source> :== <dag_source> | <internal_source> |
-- <external_source> | unknown | [<sources>]
printSource :: Source -> Doc
printSource x = case x of
Source_DAG a -> pretty a
Source_internal a -> pretty a
Source_external a -> pretty a
Unknown_source -> text "unknown"
Source_many a -> brackets $ printSources a
-- %----Alternative sources are recorded like this, thus allowing representation
-- %----of alternative derivations with shared parts.
-- <sources> :== <source> | <source>,<sources>
printSources :: Sources -> Doc
printSources = sepByCommas . map pretty
-- %----Only a <dag_source> can be a <name>, i.e., derived formulae can be
-- %----identified by a <name> or an <inference_record>
-- <dag_source> :== <name> | <inference_record>
printDAG_source :: DAG_source -> Doc
printDAG_source x = case x of
DAGS_name a -> pretty a
DAGS_record a -> pretty a
-- <inference_record> :== inference(<inference_rule>,<useful_info>,
-- <inference_parents>)
printInference_record :: Inference_record -> Doc
printInference_record x = case x of
Inference_record ir ui ip ->
text "inference"
<> parens (sepByCommas [pretty ir, pretty ui, printInference_parents ip])
-- <inference_rule> :== <atomic_word>
-- %----Examples are deduction | modus_tollens | modus_ponens | rewrite |
-- % resolution | paramodulation | factorization |
-- % cnf_conversion | cnf_refutation | ...
printInference_rule :: Inference_rule -> Doc
printInference_rule = pretty
-- %----<inference_parents> can be empty in cases when there is a justification
-- %----for a tautologous theorem. In case when a tautology is introduced as
-- %----a leaf, e.g., for splitting, then use an <internal_source>.
-- <inference_parents> :== [] | [<parent_list>]
printInference_parents :: Inference_parents -> Doc
printInference_parents = brackets . printParent_list
-- <parent_list> :== <parent_info> | <parent_info>,<parent_list>
printParent_list :: Parent_list -> Doc
printParent_list = sepByCommas . map pretty
-- <parent_info> :== <source><parent_details>
printParent_info :: Parent_info -> Doc
printParent_info x = case x of
Parent_info s d -> fsep [pretty s, printParent_details d]
-- <parent_details> :== :<general_list> | <null>
printParent_details :: Parent_details -> Doc
printParent_details x = case x of
Just gl -> printGeneral_list gl
Nothing -> empty
-- <internal_source> :== introduced(<intro_type><optional_info>)
printInternal_source :: Internal_source -> Doc
printInternal_source x = case x of
Internal_source it oi ->
text "introduced" <> parens (fsep [pretty it, pretty oi])
-- <intro_type> :== definition | axiom_of_choice | tautology | assumption
-- %----This should be used to record the symbol being defined, or the function
-- %----for the axiom of choice
printIntro_type :: Intro_type -> Doc
printIntro_type x = case x of
IntroTypeDefinition -> text "definition"
AxiomOfChoice -> text "axiom_of_choice"
Tautology -> text "tautology"
IntroTypeAssumption -> text "assumption"
-- <external_source> :== <file_source> | <theory> | <creator_source>
printExternal_source :: External_source -> Doc
printExternal_source x = case x of
ExtSrc_file a -> pretty a
ExtSrc_theory a -> pretty a
ExtSrc_creator a -> pretty a
-- <file_source> :== file(<file_name><file_info>)
printFile_source :: File_source -> Doc
printFile_source x = case x of
File_source fn fi ->
text "file" <> parens (fsep [text "'" <> pretty fn <> text "'",
printFile_info fi])
-- <file_info> :== ,<name> | <null>
printFile_info :: File_info -> Doc
printFile_info x = case x of
Just n -> fsep [comma, text "'" <> pretty n <> text "'"]
Nothing -> empty
-- <theory> :== theory(<theory_name><optional_info>)
printTheory :: Theory -> Doc
printTheory x = case x of
Theory tn oi -> text "theory" <> parens (fsep [pretty tn, pretty oi])
-- <theory_name> :== equality | ac
printTheory_name :: Theory_name -> Doc
printTheory_name x = case x of
TN_equality -> text "equality"
TN_ac -> text "ac"
-- %----More theory names may be added in the future. The <optional_info> is
-- %----used to store, e.g., which axioms of equality have been implicitly used,
-- %----e.g., theory(equality,[rst]). Standard format still to be decided.
-- <creator_source> :== creator(<creator_name><optional_info>)
printCreator_source :: Creator_source -> Doc
printCreator_source x = case x of
Creator_source cn oi ->
text "creator" <> parens (fsep [printCreator_name cn, pretty oi])
-- <creator_name> :== <atomic_word>
printCreator_name :: Creator_name -> Doc
printCreator_name = pretty
-- %----Useful info fields
-- <optional_info> ::= ,<useful_info> | <null>
printOptional_info :: Optional_info -> Doc
printOptional_info x = case x of
Just ui -> comma <+> pretty ui
Nothing -> empty
-- <useful_info> ::= <general_list>
-- <useful_info> :== [] | [<info_items>]
printUseful_info :: Useful_info -> Doc
printUseful_info x = case x of
UI_items a -> brackets $ printInfo_items a
UI_general_list a -> pretty a
-- <info_items> :== <info_item> | <info_item>,<info_items>
printInfo_items :: Info_items -> Doc
printInfo_items = sepByCommas . map pretty
-- <info_item> :== <formula_item> | <inference_item> |
-- <general_function>
printInfo_item :: Info_item -> Doc
printInfo_item x = case x of
Info_formula a -> pretty a
Info_inference a -> pretty a
Info_general a -> pretty a
-- %----Useful info for formula records
-- <formula_item> :== <description_item> | <iquote_item>
printFormula_item :: Formula_item -> Doc
printFormula_item x = case x of
FI_description a -> printDescription_item a
FI_iquote a -> printIquote_item a
-- <description_item> :== description(<atomic_word>)
printDescription_item :: Description_item -> Doc
printDescription_item a = text "description" <> parens (pretty a)
-- <iquote_item> :== iquote(<atomic_word>)
-- %----<iquote_item>s are used for recording exactly what the system output about
-- %----the inference step. In the future it is planned to encode this information
-- %----in standardized forms as <parent_details> in each <inference_record>.
-- %----Useful info for inference records
printIquote_item :: Iquote_item -> Doc
printIquote_item a = text "iquote" <> parens (pretty a)
-- <inference_item> :== <inference_status> | <assumptions_record> |
-- <new_symbol_record> | <refutation>
printInference_item :: Inference_item -> Doc
printInference_item x = case x of
Inf_status a -> pretty a
Inf_assumption a -> printAssumptions_record a
Inf_symbol a -> pretty a
Inf_refutation a -> printRefutation a
-- <inference_status> :== status(<status_value>) | <inference_info>
printInference_status :: Inference_status -> Doc
printInference_status x = case x of
Inf_value a -> text "status" <> parens (pretty a)
Inf_info a -> pretty a
-- %----These are the success status values from the SZS ontology. The most
-- %----commonly used values are:
-- %---- thm - Every model of the parent formulae is a model of the inferred
-- %---- formula. Regular logical consequences.
-- %---- cth - Every model of the parent formulae is a model of the negation of
-- %---- the inferred formula. Used for negation of conjectures in FOF to
-- %---- CNF conversion.
-- %---- esa - There exists a model of the parent formulae iff there exists a
-- %---- model of the inferred formula. Used for Skolemization steps.
-- %----For the full hierarchy see the SZSOntology file distributed with the TPTP.
-- <status_value> :== suc | unp | sap | esa | sat | fsa | thm | eqv | tac |
-- wec | eth | tau | wtc | wth | cax | sca | tca | wca |
-- cup | csp | ecs | csa | cth | ceq | unc | wcc | ect |
-- fun | uns | wuc | wct | scc | uca | noc
printStatus_value :: Status_value -> Doc
printStatus_value x = case x of
SUC -> text "suc"
UNP -> text "unp"
SAP -> text "sap"
ESA -> text "esa"
SAT -> text "sat"
FSA -> text "fsa"
THM -> text "thm"
EQV -> text "eqv"
TAC -> text "tac"
WEC -> text "wec"
ETH -> text "eth"
TAU -> text "tau"
WTC -> text "wtc"
WTH -> text "wth"
CAX -> text "cax"
SCA -> text "sca"
TCA -> text "tca"
WCA -> text "wca"
CUP -> text "cup"
CSP -> text "csp"
ECS -> text "ecs"
CSA -> text "csa"
CTH -> text "cth"
CEQ -> text "ceq"
UNC -> text "unc"
WCC -> text "wcc"
ECT -> text "ect"
FUN -> text "fun"
UNS -> text "uns"
WUC -> text "wuc"
WCT -> text "wct"
SCC -> text "scc"
UCA -> text "uca"
NOC -> text "noc"
-- %----<inference_info> is used to record standard information associated with an
-- %----arbitrary inference rule. The <inference_rule> is the same as the
-- %----<inference_rule> of the <inference_record>. The <atomic_word> indicates
-- %----the information being recorded in the <general_list>. The <atomic_word>
-- %----are (loosely) set by TPTP conventions, and include esplit, sr_split, and
-- %----discharge.
-- <inference_info> :== <inference_rule>(<atomic_word>,<general_list>)
printInference_info :: Inference_info -> Doc
printInference_info x = case x of
Inference_info ir aw gl ->
printInference_rule ir
<> parens (sepByCommas [pretty aw, printGeneral_list gl])
-- %----An <assumptions_record> lists the names of assumptions upon which this
-- %----inferred formula depends. These must be discharged in a completed proof.
-- <assumptions_record> :== assumptions([<name_list>])
printAssumptions_record :: Assumptions_record -> Doc
printAssumptions_record x =
text "assumptions" <> parens (brackets $ printName_list x)
-- %----A <refutation> record names a file in which the inference recorded here
-- %----is recorded as a proof by refutation.
-- <refutation> :== refutation(<file_source>)
printRefutation :: Refutation -> Doc
printRefutation a = text "refutation" <> parens (printFile_source a)
-- %----A <new_symbol_record> provides information about a newly introduced symbol.
-- <new_symbol_record> :== new_symbols(<atomic_word>,[<new_symbol_list>])
printNew_symbol_record :: New_symbol_record -> Doc
printNew_symbol_record x = case x of
New_symbol_record aw nsl ->
text "new_symbols"
<> parens (sepByCommas [pretty aw, brackets $ printNew_symbol_list nsl])
-- <new_symbol_list> :== <principal_symbol> |
-- <principal_symbol>,<new_symbol_list>
printNew_symbol_list :: New_symbol_list -> Doc
printNew_symbol_list = sepByCommas . map pretty
-- %----Principal symbols are predicates, functions, variables
-- <principal_symbol> :== <functor> | <variable>
printPrincipal_symbol :: Principal_symbol -> Doc
printPrincipal_symbol x = case x of
PS_functor a -> pretty a
PS_variable a -> pretty a
-- %----Include directives
-- <include> ::= include(<file_name><formula_selection>).
printInclude :: Include -> Doc
printInclude x = case x of
Include fn fs ->
text "include" <> parens (fsep [pretty fn, printFormula_selection fs])
-- <formula_selection> ::= ,[<name_list>] | <null>
printFormula_selection :: Formula_selection -> Doc
printFormula_selection x = case x of
Just ns -> fsep [comma, brackets $ printName_list ns]
Nothing -> empty
-- <name_list> ::= <name> | <name>,<name_list>
printName_list :: Name_list -> Doc
printName_list = sepByCommas . map pretty
-- %----Non-logical data
-- <general_term> ::= <general_data> | <general_data>:<general_term> |
-- <general_list>
printGeneral_term :: General_term -> Doc
printGeneral_term x = case x of
GT_data a -> pretty a
GT_DataTerm gd gt -> fsep [pretty gd <> colon, pretty gt]
GT_list a -> printGeneral_list a
-- <general_data> ::= <atomic_word> | <general_function> |
-- <variable> | <number> | <distinct_object> |
-- <formula_data>
printGeneral_data :: General_data -> Doc
printGeneral_data x = case x of
GD_atomic_word a -> pretty a
GD_general_function a -> pretty a
GD_variable a -> pretty a
GD_number a -> pretty a
GD_distinct_object a -> pretty a
GD_formula_data a -> pretty a
-- %----A <general_data> bind() term is used to record a variable binding in an
-- %----inference, as an element of the <parent_details> list.
-- <general_data> :== bind(<variable>,<formula_data>)
GD_bind v fd -> text "bind" <> parens (sepByCommas [pretty v, pretty fd])
-- <general_function> ::= <atomic_word>(<general_terms>)
printGeneral_function :: General_function -> Doc
printGeneral_function x = case x of
General_function aw gt -> pretty aw <> parens (printGeneral_terms gt)
-- <formula_data> ::= $thf(<thf_formula>) | $tff(<tff_formula>) |
-- $fof(<fof_formula>) | $cnf(<cnf_formula>) |
-- $fot(<fof_term>)
-- only used in inference
printFormula_data :: Formula_data -> Doc
printFormula_data x = case x of
FD_THF a -> pretty a
FD_TFF a -> pretty a
FD_FOF a -> pretty a
FD_CNF a -> pretty a
FD_FOT a -> pretty a
-- <general_list> ::= [] | [<general_terms>]
printGeneral_list :: General_list -> Doc
printGeneral_list = brackets . pretty
-- <general_terms> ::= <general_term> | <general_term>,<general_terms>
printGeneral_terms :: General_terms -> Doc
printGeneral_terms = sepByCommas . map pretty
-- %----General purpose
-- <name> ::= <atomic_word> | <integer>
-- %----Integer names are expected to be unsigned
printName :: Name -> Doc
printName x = case x of
NameString a -> pretty a
NameInteger a -> pretty a
-- <atomic_word> ::= <lower_word> | <single_quoted>
-- already has a pretty instance (Token)
-- <atomic_defined_word> ::= <dollar_word>
-- already has a pretty instance (Token)
-- <atomic_system_word> ::= <dollar_dollar_word>
-- already has a pretty instance (Token)
-- <number> ::= <integer> | <rational> | <real>
printNumber :: Number -> Doc
printNumber x = case x of
NumInteger a -> text $ show a
NumRational a -> text $ show a
NumReal a -> text $ show a
-- <distinct_object> ::- <double_quote><do_char>*<double_quote>
-- already has a pretty instance (Token)
-- <file_name> ::= <single_quoted>
-- already has a pretty instance (IRI)
{- -----------------------------------------------------------------------------
Tokens used in syntax
----------------------------------------------------------------------------- -}
-- <gentzen_arrow> ::= -->
gentzen_arrow :: Doc
gentzen_arrow = text "-->"
-- <assignment> ::= :=
assignment :: Doc
assignment = text ":="
-- <infix_equality> ::= =
infix_equality :: Doc
infix_equality = text "="
-- <infix_inequality> ::= !=
infix_inequality :: Doc
infix_inequality = text "!="
vline :: Doc
vline = text "|"
star :: Doc
star = text "*"
plus :: Doc
plus = text "+"
arrow :: Doc
arrow = text ">"
less_sign :: Doc
less_sign = text "<"
andD :: Doc
andD = text "&"
atD :: Doc
atD = text "@"
{- -----------------------------------------------------------------------------
Tokens used in syntax
----------------------------------------------------------------------------- -}
sepBy :: Doc -> [Doc] -> Doc
sepBy delimiter items = case items of
[] -> empty
_ -> fsep $ tail $ concatMap (\ i -> [delimiter, i]) items
| spechub/Hets | TPTP/Pretty.hs | gpl-2.0 | 84,149 | 0 | 20 | 17,616 | 14,713 | 7,451 | 7,262 | 1,277 | 34 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.SexpGrammar.Base
( SexpGrammar (..)
, AtomGrammar (..)
, SeqGrammar (..)
, PropGrammar (..)
, runParse
, runGen
, SexpG
, SexpG_
, module Data.InvertibleGrammar
) where
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Control.Monad.State
import Data.Map (Map)
import qualified Data.Map as M
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
#endif
import Data.Scientific
import Data.Text (Text)
import qualified Data.Text.Lazy as Lazy
import Data.InvertibleGrammar
import Data.InvertibleGrammar.Monad
import Language.Sexp.Pretty (prettySexp)
import Language.Sexp.Types
-- | Grammar which matches Sexp to a value of type a and vice versa.
type SexpG a = forall t. Grammar SexpGrammar (Sexp :- t) (a :- t)
-- | Grammar which pattern matches Sexp and produces nothing, or
-- consumes nothing but generates some Sexp.
type SexpG_ = forall t. Grammar SexpGrammar (Sexp :- t) t
unexpectedStr :: (MonadContextError (Propagation Position) (GrammarError Position) m) => Text -> m a
unexpectedStr msg = grammarError $ unexpected msg
unexpectedSexp :: (MonadContextError (Propagation Position) (GrammarError Position) m) => Text -> Sexp -> m a
unexpectedSexp exp got =
grammarError $ expected exp `mappend` unexpected (Lazy.toStrict $ prettySexp got)
unexpectedAtom :: (MonadContextError (Propagation Position) (GrammarError Position) m) => Atom -> Atom -> m a
unexpectedAtom expected atom = do
unexpectedSexp (Lazy.toStrict $ prettySexp (Atom dummyPos expected)) (Atom dummyPos atom)
unexpectedAtomType :: (MonadContextError (Propagation Position) (GrammarError Position) m) => Text-> Atom -> m a
unexpectedAtomType expected atom = do
unexpectedSexp ("atom of type " `mappend` expected) (Atom dummyPos atom)
----------------------------------------------------------------------
-- Top-level grammar
data SexpGrammar a b where
GPos :: SexpGrammar (Sexp :- t) (Position :- Sexp :- t)
GAtom :: Grammar AtomGrammar (Atom :- t) t' -> SexpGrammar (Sexp :- t) t'
GList :: Grammar SeqGrammar t t' -> SexpGrammar (Sexp :- t) t'
GVect :: Grammar SeqGrammar t t' -> SexpGrammar (Sexp :- t) t'
instance
( MonadPlus m
, MonadContextError (Propagation Position) (GrammarError Position) m
) => InvertibleGrammar m SexpGrammar where
forward GPos (s :- t) =
return (getPos s :- s :- t)
forward (GAtom g) (s :- t) =
case s of
Atom p a -> dive $ locate p >> forward g (a :- t)
other -> locate (getPos other) >> unexpectedSexp "atom" other
forward (GList g) (s :- t) = do
case s of
List p xs -> dive $ locate p >> parseSequence xs g t
other -> locate (getPos other) >> unexpectedSexp "list" other
forward (GVect g) (s :- t) = do
case s of
Vector p xs -> dive $ locate p >> parseSequence xs g t
other -> locate (getPos other) >> unexpectedSexp "vector" other
backward GPos (_ :- s :- t) =
return (s :- t)
backward (GAtom g) t = do
(a :- t') <- dive $ backward g t
return (Atom dummyPos a :- t')
backward (GList g) t = do
(t', SeqCtx xs) <- runStateT (dive $ backward g t) (SeqCtx [])
return (List dummyPos xs :- t')
backward (GVect g) t = do
(t', SeqCtx xs) <- runStateT (dive $ backward g t) (SeqCtx [])
return (Vector dummyPos xs :- t')
----------------------------------------------------------------------
-- Atom grammar
data AtomGrammar a b where
GSym :: Text -> AtomGrammar (Atom :- t) t
GKw :: Kw -> AtomGrammar (Atom :- t) t
GBool :: AtomGrammar (Atom :- t) (Bool :- t)
GInt :: AtomGrammar (Atom :- t) (Integer :- t)
GReal :: AtomGrammar (Atom :- t) (Scientific :- t)
GString :: AtomGrammar (Atom :- t) (Text :- t)
GSymbol :: AtomGrammar (Atom :- t) (Text :- t)
GKeyword :: AtomGrammar (Atom :- t) (Kw :- t)
instance
( MonadPlus m
, MonadContextError (Propagation Position) (GrammarError Position) m
) => InvertibleGrammar m AtomGrammar where
forward (GSym sym') (atom :- t) =
case atom of
AtomSymbol sym | sym' == sym -> return t
_ -> unexpectedAtom (AtomSymbol sym') atom
forward (GKw kw') (atom :- t) =
case atom of
AtomKeyword kw | kw' == kw -> return t
_ -> unexpectedAtom (AtomKeyword kw') atom
forward GBool (atom :- t) =
case atom of
AtomBool a -> return $ a :- t
_ -> unexpectedAtomType "bool" atom
forward GInt (atom :- t) =
case atom of
AtomInt a -> return $ a :- t
_ -> unexpectedAtomType "int" atom
forward GReal (atom :- t) =
case atom of
AtomReal a -> return $ a :- t
_ -> unexpectedAtomType "real" atom
forward GString (atom :- t) =
case atom of
AtomString a -> return $ a :- t
_ -> unexpectedAtomType "string" atom
forward GSymbol (atom :- t) =
case atom of
AtomSymbol a -> return $ a :- t
_ -> unexpectedAtomType "symbol" atom
forward GKeyword (atom :- t) =
case atom of
AtomKeyword a -> return $ a :- t
_ -> unexpectedAtomType "keyword" atom
backward (GSym sym) t = return (AtomSymbol sym :- t)
backward (GKw kw) t = return (AtomKeyword kw :- t)
backward GBool (a :- t) = return (AtomBool a :- t)
backward GInt (a :- t) = return (AtomInt a :- t)
backward GReal (a :- t) = return (AtomReal a :- t)
backward GString (a :- t) = return (AtomString a :- t)
backward GSymbol (a :- t) = return (AtomSymbol a :- t)
backward GKeyword (a :- t) = return (AtomKeyword a :- t)
-----------------------------------------------------------------------
-- Sequence grammar
parseSequence :: (MonadContextError (Propagation Position) (GrammarError Position) m, InvertibleGrammar (StateT SeqCtx m) g) => [Sexp] -> g a b -> a -> m b
parseSequence xs g t = do
(a, SeqCtx rest) <- runStateT (forward g t) (SeqCtx xs)
unless (null rest) $
unexpectedStr $ "leftover elements: " `mappend`
(Lazy.toStrict $ Lazy.unwords $ map prettySexp rest)
return a
data SeqGrammar a b where
GElem :: Grammar SexpGrammar (Sexp :- t) t'
-> SeqGrammar t t'
GRest :: Grammar SexpGrammar (Sexp :- t) (a :- t)
-> SeqGrammar t ([a] :- t)
GProps :: Grammar PropGrammar t t'
-> SeqGrammar t t'
newtype SeqCtx = SeqCtx { getItems :: [Sexp] }
instance
( MonadPlus m
, MonadState SeqCtx m
, MonadContextError (Propagation Position) (GrammarError Position) m
) => InvertibleGrammar m SeqGrammar where
forward (GElem g) t = do
step
xs <- gets getItems
case xs of
[] -> unexpectedStr "end of sequence"
x:xs' -> do
modify $ \s -> s { getItems = xs' }
forward g (x :- t)
forward (GRest g) t = do
xs <- gets getItems
modify $ \s -> s { getItems = [] }
go xs t
where
go [] t = return $ [] :- t
go (x:xs) t = do
step
y :- t' <- forward g (x :- t)
ys :- t'' <- go xs t'
return $ (y:ys) :- t''
forward (GProps g) t = do
xs <- gets getItems
modify $ \s -> s { getItems = [] }
props <- go xs M.empty
(res, PropCtx ctx) <- runStateT (forward g t) (PropCtx props)
when (not $ M.null ctx) $
unexpectedStr $ "property-list keys: " `mappend`
(Lazy.toStrict $ Lazy.unwords $
map (prettySexp . Atom dummyPos . AtomKeyword) (M.keys ctx))
return res
where
go [] props = return props
go (Atom _ (AtomKeyword kwd):x:xs) props = step >> go xs (M.insert kwd x props)
go other _ =
unexpectedStr $ "malformed property-list: " `mappend`
(Lazy.toStrict $ Lazy.unwords $ map prettySexp other)
backward (GElem g) t = do
step
(x :- t') <- backward g t
modify $ \s -> s { getItems = x : getItems s }
return t'
backward (GRest g) (ys :- t) = do
xs :- t' <- go ys t
put (SeqCtx xs)
return t'
where
go [] t = return $ [] :- t
go (y:ys) t = do
step
x :- t' <- backward g (y :- t)
xs :- t'' <- go ys t'
return $ (x : xs) :- t''
backward (GProps g) t = do
step
(t', PropCtx props) <- runStateT (backward g t) (PropCtx M.empty)
let plist = foldr (\(name, sexp) acc -> Atom dummyPos (AtomKeyword name) : sexp : acc) [] (M.toList props)
put $ SeqCtx plist
return t'
----------------------------------------------------------------------
-- Property list grammar
data PropGrammar a b where
GProp :: Kw
-> Grammar SexpGrammar (Sexp :- t) (a :- t)
-> PropGrammar t (a :- t)
GOptProp :: Kw
-> Grammar SexpGrammar (Sexp :- t) (a :- t)
-> PropGrammar t (Maybe a :- t)
newtype PropCtx = PropCtx { getProps :: Map Kw Sexp }
instance
( MonadPlus m
, MonadState PropCtx m
, MonadContextError (Propagation Position) (GrammarError Position) m
) => InvertibleGrammar m PropGrammar where
forward (GProp kwd g) t = do
ps <- gets getProps
case M.lookup kwd ps of
Nothing -> unexpectedStr $
mconcat [ "key "
, Lazy.toStrict . prettySexp . Atom dummyPos . AtomKeyword $ kwd
, " not found"
]
Just x -> do
put (PropCtx $ M.delete kwd ps)
forward g $ x :- t
forward (GOptProp kwd g) t = do
ps <- gets getProps
case M.lookup kwd ps of
Nothing ->
return (Nothing :- t)
Just x -> do
put (PropCtx $ M.delete kwd ps)
(a :- t') <- forward g (x :- t)
return (Just a :- t')
backward (GProp kwd g) t = do
x :- t' <- backward g t
modify $ \ps -> ps { getProps = M.insert kwd x (getProps ps) }
return t'
backward (GOptProp _ _) (Nothing :- t) = do
return t
backward (GOptProp kwd g) (Just x :- t) = do
x' :- t' <- backward g (x :- t)
modify $ \ps -> ps { getProps = M.insert kwd x' (getProps ps) }
return t'
runParse
:: (Functor m, MonadPlus m, MonadContextError (Propagation Position) (GrammarError Position) m, InvertibleGrammar m g)
=> Grammar g (Sexp :- ()) (a :- ())
-> Sexp
-> m a
runParse gram input =
(\(x :- _) -> x) <$> forward gram (input :- ())
runGen
:: (Functor m, MonadPlus m, MonadContextError (Propagation Position) (GrammarError Position) m, InvertibleGrammar m g)
=> Grammar g (Sexp :- ()) (a :- ())
-> a
-> m Sexp
runGen gram input =
(\(x :- _) -> x) <$> backward gram (input :- ())
| ricardopenyamari/ir2haskell | clir-parser-haskell-master/lib/sexp-grammar/src/Language/SexpGrammar/Base.hs | gpl-2.0 | 10,853 | 0 | 18 | 2,942 | 4,215 | 2,133 | 2,082 | 260 | 1 |
{-# OPTIONS_GHC -Wall -fwarn-tabs -Werror #-}
-------------------------------------------------------------------------------
-- |
-- Module : Math.Vector.Types
-- Copyright : Copyright (c) 2014 Michael R. Shannon
-- License : GPLv2 or Later
-- Maintainer : mrshannon.aerospace@gmail.com
-- Stability : unstable
-- Portability : portable
--
-- Vector types.
-------------------------------------------------------------------------------
module Math.Vector.Types
( Vector3(..)
) where
data Vector3 a = Vector3 a a a deriving(Eq, Show)
instance Num a => Num (Vector3 a) where
-- (+)
(Vector3 x1 y1 z1) + (Vector3 x2 y2 z2) =
Vector3 (x1 + x2) (y1 + y2) (z1 + z2)
-- (*) element wise multiplication.
(Vector3 x1 y1 z1) * (Vector3 x2 y2 z2) =
Vector3 (x1 * x2) (y1 * y2) (z1 * z2)
-- (-)
(Vector3 x1 y1 z1) - (Vector3 x2 y2 z2) =
Vector3 (x1 - x2) (y1 - y2) (z1 - z2)
-- abs
abs (Vector3 x y z) = Vector3 (abs x) (abs y) (abs z)
-- signum
signum (Vector3 x y z) = Vector3 (signum x) (signum y) (signum z)
-- signum
fromInteger n = Vector3 (fromInteger n) (fromInteger n) (fromInteger n)
| mrshannon/trees | src/Math/Vector/Types.hs | gpl-2.0 | 1,190 | 0 | 8 | 286 | 376 | 201 | 175 | 14 | 0 |
module Lib.Boolean (specialOperators) where
import Data.IORef
import Control.Monad (foldM)
import Control.Conditional (ifM, (<&&>), (<||>))
import Base
import Evaluator
default (Int)
biNot :: IORef Scope -> [SExpr] -> EvalM SExpr
biNot _ [SAtom p (ABool b)] = return $ SAtom p (ABool $ not b)
biNot _ [other] = reportE (point other) "boolean expected"
biNot _ _ = reportE' "just one argument required"
soAnd :: IORef Scope -> [SExpr] -> EvalM SExpr
soAnd scopeRef = fmap bool . foldM (\acc x -> return acc <&&> (getBool =<< evalAlone scopeRef x)) True
soOr :: IORef Scope -> [SExpr] -> EvalM SExpr
soOr scopeRef = fmap bool . foldM (\acc x -> return acc <||> (getBool =<< evalAlone scopeRef x)) False
soImpl :: IORef Scope -> [SExpr] -> EvalM SExpr
soImpl scopeRef [arg1, arg2] = ifM (getBool =<< evalAlone scopeRef arg1) (evalAlone scopeRef arg2) (return $ bool True)
soImpl _ _ = reportE' "two arguments requried"
specialOperators = [("and", Nothing, soAnd)
,("or", Nothing, soOr)
,("->", Just 2, soImpl)
,("not", Just 1, withEvaluatedArgs biNot)]
| Kotolegokot/Underlisp | src/Lib/Boolean.hs | gpl-3.0 | 1,166 | 0 | 12 | 286 | 452 | 240 | 212 | 22 | 1 |
-- Author: Viacheslav Lotsmanov
-- License: GPLv3 https://raw.githubusercontent.com/unclechu/xlib-keys-hack/master/LICENSE
{-# LANGUAGE DeriveGeneric, DeriveAnyClass, OverloadedLists #-}
{-# LANGUAGE TemplateHaskell, TupleSections, FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables, RankNTypes #-}
module Keys
( type KeyName (..)
, type KeyMap
, type AlternativeModeKeyAction (..)
, getKeyMap
, getAliasByKeyDevNum
, getKeyCodeByName
, getDefaultKeyCodeByName
, ergoEnterKey
, getAlternativeRemapByName
, hasAlternativeRemap
, hasAlternativeKeyAction
, hasAlternativeKey
, getMediaKeyCode
, isMediaKey
, getRemapByName
, getRemappedByName
, getExtraKeys
) where
import Prelude hiding (lookup)
import "base" GHC.Generics (Generic)
import "base" Data.List (find)
import "base" Data.Tuple (swap)
import "base" Data.Word (type Word16)
import "base" Data.Either (isLeft, isRight)
import "containers" Data.Set (type Set)
import "containers" Data.Map.Strict (type Map, lookup, member, insert, delete)
import qualified "containers" Data.Map.Strict as Map
import qualified "containers" Data.Set as Set
import "qm-interpolated-string" Text.InterpolatedString.QM (qm, qms)
import "base" Control.Arrow ((&&&), (***))
import "base" Control.Applicative ((<**>))
import "base" Control.Monad (join)
import "extra" Control.Monad.Extra (whenM)
import "mtl" Control.Monad.Except (type MonadError (throwError))
import "deepseq" Control.DeepSeq (type NFData)
import "type-operators" Control.Type.Operator (type ($))
import "lens" Control.Lens ( type Lens', type Field1, type Field2
, (&~), (.=), (%~)
, view, use, set, _1, _2, _3
)
import "X11" Graphics.X11.Types (type KeyCode)
-- local imports
import Utils.Sugar ((.>), (<$.), (&), (<&>), (?), applyIf, liftAT2)
import Utils.Lens (makeApoClassy)
import qualified Options as O
import Types (type AlternativeModeLevel (..))
data KeyName
= EscapeKey
| F1Key | F2Key | F3Key | F4Key
| F5Key | F6Key | F7Key | F8Key
| F9Key | F10Key | F11Key | F12Key
| PrintScreenKey | ScrollLockKey | PauseKey
-- Apple keyboard additional fn keys
| F13Key | F14Key | F15Key | F16Key
| F17Key | F18Key | F19Key | F20Key
| F21Key | F22Key | F23Key | F24Key
| GraveKey
| Number1Key | Number2Key | Number3Key | Number4Key | Number5Key
| Number6Key | Number7Key | Number8Key | Number9Key | Number0Key
| MinusKey | EqualKey | BackSpaceKey
| TabKey
| QKey | WKey | EKey | RKey | TKey
| YKey | UKey | IKey | OKey | PKey
| BracketLeftKey | BracketRightKey | BackslashKey
| CapsLockKey
| AKey | SKey | DKey | FKey | GKey
| HKey | JKey | KKey | LKey
| SemicolonKey | ApostropheKey | EnterKey
| ShiftLeftKey
| LessKey -- Near left shift
| ZKey | XKey | CKey | VKey | BKey | NKey | MKey
| CommaKey | PeriodKey | SlashKey | ShiftRightKey
| ControlLeftKey | SuperLeftKey | AltLeftKey
| SpaceKey
| AltRightKey | SuperRightKey | MenuKey | ControlRightKey
| FNKey -- On Apple Magic Keyboard (where @InsertKey@ is supposed to be)
| InsertKey | HomeKey | PageUpKey
| DeleteKey | EndKey | PageDownKey
| ArrowLeftKey | ArrowRightKey | ArrowUpKey | ArrowDownKey
| NumLockKey | KPEqualKey | KPDivideKey | KPMultiplyKey
| KPNumber7Key | KPNumber8Key | KPNumber9Key | KPSubtractKey
| KPNumber4Key | KPNumber5Key | KPNumber6Key | KPAddKey
| KPNumber1Key | KPNumber2Key | KPNumber3Key | KPEnterKey
| KPNumber0Key | KPDecimalKey
-- Media keys
| MCalculatorKey | MEjectKey
| MAudioMuteKey | MAudioLowerVolumeKey | MAudioRaiseVolumeKey
| MAudioPlayKey | MAudioStopKey | MAudioPrevKey | MAudioNextKey
| MMonBrightnessDownKey | MMonBrightnessUpKey
deriving (Eq, Show, Ord, Generic, NFData)
-- | Use @0@ ("minBound") for "Word16" (second tuple item) as a plug
-- (to turn key off on a keyboard).
--
-- But override it to "minBound" only inside "getKeyMap".
type KeyAlias = (KeyName, Word16, KeyCode)
-- | Mappings between keys names and device key codes
-- and X key codes to trigger them.
--
-- This mapping supposed to be as standard as possible, without any remaps,
-- it will be taken as a source of original key codes for actual remaps.
defaultKeyAliases :: Set KeyAlias
defaultKeyAliases =
[ (EscapeKey, 1, 9)
, (F1Key, 59, 67)
, (F2Key, 60, 68)
, (F3Key, 61, 69)
, (F4Key, 62, 70)
, (F5Key, 63, 71)
, (F6Key, 64, 72)
, (F7Key, 65, 73)
, (F8Key, 66, 74)
, (F9Key, 67, 75)
, (F10Key, 68, 76)
, (F11Key, 87, 95)
, (F12Key, 88, 96)
, (PrintScreenKey, 99, 107)
, (ScrollLockKey, 70, 78)
, (PauseKey, 119, 127)
-- Apple keyboard additional fn keys
, (F13Key, 183, 191)
, (F14Key, 184, 192)
, (F15Key, 185, 193)
, (F16Key, 186, 194)
, (F17Key, 187, 195)
, (F18Key, 188, 196)
, (F19Key, 189, 197)
, (F20Key, 190, 198)
, (F21Key, 191, 199)
, (F22Key, 192, 200)
, (F23Key, 193, 201)
, (F24Key, 194, 202)
-- Numbers line
, (GraveKey, 41, 49)
, (Number1Key, 2, 10)
, (Number2Key, 3, 11)
, (Number3Key, 4, 12)
, (Number4Key, 5, 13)
, (Number5Key, 6, 14)
, (Number6Key, 7, 15)
, (Number7Key, 8, 16)
, (Number8Key, 9, 17)
, (Number9Key, 10, 18)
, (Number0Key, 11, 19)
, (MinusKey, 12, 20)
, (EqualKey, 13, 21)
, (BackSpaceKey, 14, 22)
-- QWER line
, (TabKey, 15, 23)
, (QKey, 16, 24)
, (WKey, 17, 25)
, (EKey, 18, 26)
, (RKey, 19, 27)
, (TKey, 20, 28)
, (YKey, 21, 29)
, (UKey, 22, 30)
, (IKey, 23, 31)
, (OKey, 24, 32)
, (PKey, 25, 33)
, (BracketLeftKey, 26, 34)
, (BracketRightKey, 27, 35)
, (BackslashKey, 43, 51)
-- ASDF line
, (CapsLockKey, 58, 66)
, (AKey, 30, 38)
, (SKey, 31, 39)
, (DKey, 32, 40)
, (FKey, 33, 41)
, (GKey, 34, 42)
, (HKey, 35, 43)
, (JKey, 36, 44)
, (KKey, 37, 45)
, (LKey, 38, 46)
, (SemicolonKey, 39, 47)
, (ApostropheKey, 40, 48)
, (EnterKey, 28, 36)
-- ZXCV
, (ShiftLeftKey, 42, 50)
-- An additional key on some keyboards between "Left Shift" and "Z".
-- Could be found on Russian version of Apple Magic Keyboard.
, (LessKey, 86, 94)
, (ZKey, 44, 52)
, (XKey, 45, 53)
, (CKey, 46, 54)
, (VKey, 47, 55)
, (BKey, 48, 56)
, (NKey, 49, 57)
, (MKey, 50, 58)
, (CommaKey, 51, 59)
, (PeriodKey, 52, 60)
, (SlashKey, 53, 61)
, (ShiftRightKey, 54, 62)
-- Bottom line
, (ControlLeftKey, 29, 37)
, (SuperLeftKey, 125, 133)
, (AltLeftKey, 56, 64)
, (SpaceKey, 57, 65)
, (AltRightKey, 100, 108)
, (SuperRightKey, 126, 134)
, (MenuKey, 127, 135)
, (ControlRightKey, 97, 105)
-- Right block
-- On apple keyboard (no X num).
-- Also it will be handled as Fn when is used with media keys.
, (FNKey, 464, minBound)
, (InsertKey, 110, 118)
, (HomeKey, 102, 110)
, (PageUpKey, 104, 112)
, (DeleteKey, 111, 119)
, (EndKey, 107, 115)
, (PageDownKey, 109, 117)
, (ArrowLeftKey, 105, 113)
, (ArrowRightKey, 106, 114)
, (ArrowUpKey, 103, 111)
, (ArrowDownKey, 108, 116)
-- Numeric keypad
, (NumLockKey, 69, 77)
, (KPEqualKey, 117, 125)
, (KPDivideKey, 98, 106)
, (KPMultiplyKey, 55, 63)
, (KPNumber7Key, 71, 79)
, (KPNumber8Key, 72, 80)
, (KPNumber9Key, 73, 81)
, (KPSubtractKey, 74, 82)
, (KPNumber4Key, 75, 83)
, (KPNumber5Key, 76, 84)
, (KPNumber6Key, 77, 85)
, (KPAddKey, 78, 86)
, (KPNumber1Key, 79, 87)
, (KPNumber2Key, 80, 88)
, (KPNumber3Key, 81, 89)
, (KPEnterKey, 96, 104)
, (KPNumber0Key, 82, 90)
, (KPDecimalKey, 83, 91)
]
-- | A map of keys which supposed to act like another keys.
--
-- It could be extended in "getKeyMap" depending on provided "O.Options".
--
-- To remap a key you replace "KeyCode" in original mapping inside "getKeyMap".
basicKeyRemapping :: O.ErgonomicMode -> Map KeyName KeyName
basicKeyRemapping ergoMode =
[ (FNKey, InsertKey)
, (CapsLockKey, EscapeKey)
, (LessKey, ShiftLeftKey)
, (MenuKey, SuperRightKey)
]
& flip applyIf (ergoMode == O.ErgonomicMode) (
Map.union
[ (ergoEnterKey, EnterKey)
, (BracketLeftKey, ApostropheKey)
]
)
& flip applyIf (ergoMode == O.ErgoDoxErgonomicMode) (
Map.union
[ (BackslashKey, ApostropheKey)
]
)
-- | A representation of a key action that do something inside and with
-- alternative mode.
data AlternativeModeKeyAction
= AlternativeModeFreeze
| AlternativeModeLevelUp
| AlternativeModeLevelDown
deriving (Show, Eq, Generic, NFData)
-- | Remapping for keys when Alternative mode is turned on
alternativeModeRemaps
:: O.ErgonomicMode
-> ( Map KeyName $ Either AlternativeModeKeyAction KeyName -- First level
, Map KeyName $ Either AlternativeModeKeyAction KeyName -- Second level
)
alternativeModeRemaps ergoMode = (,)
-- 4th row shifted down to 3rd
[ (TabKey, Right GraveKey)
, (QKey, Right Number1Key)
, (WKey, Right Number2Key)
, (EKey, Right Number3Key)
, (RKey, Right Number4Key)
, (TKey, Right Number5Key)
, (YKey, Right Number6Key)
, (UKey, Right Number7Key)
, (IKey, Right Number8Key)
, (OKey, Right Number9Key)
, (PKey, Right Number0Key)
, (BracketLeftKey, Right MinusKey)
, (BracketRightKey, Right EqualKey)
, (BackslashKey, Right BackSpaceKey)
-- Arrow keys
, (HKey, Right ArrowLeftKey)
, (JKey, Right ArrowDownKey)
, (KKey, Right ArrowUpKey)
, (LKey, Right ArrowRightKey)
-- Delete backward (BS) and forward (Del)
, (SemicolonKey, Right BackSpaceKey)
, (ApostropheKey, Right DeleteKey)
-- Insert key
, (NKey, Right InsertKey)
-- Home, PgDown, PgUp, End
-- Symmetric to @hjkl@:
-- Home/End = Left/Right = j;
-- PgDown/PgUp = Down/Up = kl
, (MKey, Right HomeKey)
, (CommaKey, Right PageDownKey)
, (PeriodKey, Right PageUpKey)
, (SlashKey, Right EndKey)
-- Alternative mode operations
, (ZKey, Left AlternativeModeFreeze)
, (XKey, Left AlternativeModeLevelDown)
, (CKey, Left AlternativeModeLevelUp)
, (VKey, Right LessKey)
, (BKey, Right CapsLockKey)
]
-- FN keys row shifted down to 3rd row
[ (TabKey, Right EscapeKey)
, (QKey, Right F1Key)
, (WKey, Right F2Key)
, (EKey, Right F3Key)
, (RKey, Right F4Key)
, (TKey, Right F5Key)
, (YKey, Right F6Key)
, (UKey, Right F7Key)
, (IKey, Right F8Key)
, (OKey, Right F9Key)
, (PKey, Right F10Key)
, (BracketLeftKey, Right F11Key)
, (BracketRightKey, Right F12Key)
, (BackslashKey, Right PrintScreenKey)
, (AKey, Right PrintScreenKey)
, (SKey, Right ScrollLockKey)
, (DKey, Right PauseKey)
, (FKey, Right MEjectKey)
, (HKey, Right MAudioPrevKey)
, (JKey, Right MMonBrightnessDownKey)
, (KKey, Right MMonBrightnessUpKey)
, (LKey, Right MAudioNextKey)
, (SemicolonKey, Right MAudioPlayKey)
, (ApostropheKey, Right MAudioStopKey)
, (NKey, Right MCalculatorKey)
, (MKey, Right MAudioMuteKey)
, (CommaKey, Right MAudioLowerVolumeKey)
, (PeriodKey, Right MAudioRaiseVolumeKey)
, (SlashKey, Right MenuKey)
-- Alternative mode operations
, (ZKey, Left AlternativeModeFreeze)
, (XKey, Left AlternativeModeLevelDown)
, (CKey, Left AlternativeModeLevelUp)
]
& flip applyIf (ergoMode == O.ErgonomicMode) (
(_1 %~ delete ergoEnterKey)
.
(_2 %~ delete ergoEnterKey)
.
(_1 %~) (
Map.union
[ (BracketLeftKey, Right DeleteKey)
, (AKey, Right MinusKey)
, (SKey, Right EqualKey)
, (DKey, Right BracketLeftKey)
, (FKey, Right BracketRightKey)
, (GKey, Right BackslashKey)
]
)
.
(_2 %~) (
Map.union
[ (TabKey, Right F12Key)
, (BracketLeftKey, Right F11Key)
-- Get back Stop key since Apostrophe (remapped to Open Bracket)
-- is taken by F11 (and original Apostrophe is remapped to Enter key).
, (GKey, Right MAudioStopKey)
]
)
)
& flip applyIf (ergoMode == O.ErgoDoxErgonomicMode) (
(
let
keys :: Set KeyName
keys = [ApostropheKey, BracketLeftKey, BracketRightKey]
reducer accFunc k = accFunc . (_1 %~ delete k) . (_2 %~ delete k)
in
foldl reducer id keys
)
.
(_1 %~) (
Map.union
[ (BackslashKey, Right DeleteKey)
, (AKey, Right MinusKey)
, (SKey, Right EqualKey)
, (DKey, Right BracketLeftKey)
, (FKey, Right BracketRightKey)
, (GKey, Right BackslashKey)
]
)
.
(_2 %~) (
Map.union
[ (TabKey, Right F12Key)
, (BackslashKey, Right F11Key)
, (GKey, Right MAudioStopKey)
]
)
)
-- | Device key numbers aliases for media keys
mediaDevNums :: Map KeyName Word16
mediaDevNums =
[ (MCalculatorKey, 140)
, (MEjectKey, 161)
, (MAudioMuteKey, 113)
, (MAudioLowerVolumeKey, 114)
, (MAudioRaiseVolumeKey, 115)
, (MAudioPlayKey, 164)
, (MAudioStopKey, 166)
, (MAudioPrevKey, 165)
, (MAudioNextKey, 163)
, (MMonBrightnessDownKey, 224)
, (MMonBrightnessUpKey, 225)
]
numericShift :: Map KeyName KeyName
numericShift =
[ (Number1Key, MinusKey)
, (Number2Key, Number1Key)
, (Number3Key, Number2Key)
, (Number4Key, Number3Key)
, (Number5Key, Number4Key)
, (Number6Key, Number5Key)
, (Number7Key, Number6Key)
, (Number8Key, Number7Key)
, (Number9Key, Number8Key)
, (Number0Key, Number9Key)
, (MinusKey, Number0Key)
]
hjklShift :: Map KeyName KeyName
hjklShift =
[ (HKey, SemicolonKey)
, (JKey, HKey)
, (KKey, JKey)
, (LKey, KKey)
, (SemicolonKey, LKey)
]
fourthRow :: [KeyName]
fourthRow =
[ GraveKey
, Number1Key , Number2Key , Number3Key , Number4Key , Number5Key
, Number6Key , Number7Key , Number8Key , Number9Key , Number0Key
, MinusKey , EqualKey , BackSpaceKey
]
-- | "fourthRow" is also part of this set.
outOfErgonomicZoneKeys :: [KeyName]
outOfErgonomicZoneKeys = [BracketRightKey, BackslashKey, EnterKey]
-- | Returns "Map" of aliases list using key name as a "Map" key
getByNameMap :: Set KeyAlias -> Map KeyName KeyAlias
getByNameMap = Map.fromList . Set.toList . Set.map f where
f (name, devNum, xNum) = (name, (name, devNum, xNum))
-- | Returns "Map" of aliases list using device key code as a "Map" key
getByDevNumMap :: Set KeyAlias -> Map Word16 KeyAlias
getByDevNumMap = Map.fromList . Set.toList . Set.map f where
f (name, devNum, xNum) = (devNum, (name, devNum, xNum))
data KeyMap
= KeyMap
{ byNameDefaultKeyCode :: Map KeyName KeyCode
, byNameMap :: Map KeyName KeyAlias
, byDevNumMap :: Map Word16 KeyAlias
, byNameAlternativeMap ::
( Map KeyName $ Either AlternativeModeKeyAction (KeyName, KeyCode)
, Map KeyName $ Either AlternativeModeKeyAction (KeyName, KeyCode)
)
, byNameMediaMap :: Map KeyName KeyCode
, byNameRemaps :: Map KeyName KeyName
, extraByRemaps :: Set (KeyName, KeyName)
} deriving (Show, Eq, Generic, NFData)
-- | @moreAliases@ supposed to contain media keys.
--
-- Accept "Map" instead of list of pairs.
getKeyMap
:: forall m. MonadError String m
=> O.Options
-> Map KeyName KeyCode -- ^ Obtained media key codes from X server
-> m KeyMap
getKeyMap opts mediaKeyCodes = go where
go = do
defaultKeyCodes' <- defaultKeyCodes
nameMap' <- nameMap
devMap' <- devMap
alternativeModeKeyCodes' <- alternativeModeKeyCodes
byNameMediaAliasesMap' <- byNameMediaAliasesMap
pure
$ KeyMap
{ byNameDefaultKeyCode = defaultKeyCodes'
, byNameMap = nameMap'
, byDevNumMap = devMap'
, byNameAlternativeMap = alternativeModeKeyCodes'
, byNameMediaMap = byNameMediaAliasesMap'
, byNameRemaps = remaps
, extraByRemaps = remapsMirror
}
keyAliases :: m $ Set KeyAlias
keyAliases = go' where
go' = pure defaultKeyAliases
<**> fmap mappend resolvedMediaKeys
<**> pure (turnOffFourthRow `applyIf` O.turnOffFourthRow opts)
<**> pure (turnOffOutOfErgonomicZone
`applyIf` (O.ergonomicMode opts == O.ErgonomicMode))
resolvedMediaKeys =
Set.fromList . Map.elems <$>
Map.traverseWithKey mediaKeysReducer mediaKeyCodes
mediaKeysReducer keyName keyCode
= throwError [qm| Unexpected media key: {keyName} |] `maybe` pure
$ (keyName,,keyCode) <$> keyName `lookup` mediaDevNums
turnOffFourthRow = Set.map (&~ f) where
f = whenM (use _1 <&> (`elem` fourthRow)) $ _2 .= minBound
turnOffOutOfErgonomicZone = Set.map (&~ f) where
f = whenM (use _1 <&> (`elem` outOfErgonomicZoneKeys)) $ _2 .= minBound
defaultKeyCodes :: m $ Map KeyName KeyCode
defaultKeyCodes =
keyAliases <&> Map.fromList . Set.toList . Set.map (view _1 &&& view _3)
remaps :: Map KeyName KeyName
remaps = f $ basicKeyRemapping $ O.ergonomicMode opts where
f = (delete CapsLockKey `applyIf` O.realCapsLock opts)
.> (rightCtrlAsSuper `applyIf` O.rightControlAsRightSuper opts)
.> (rightSuperAsSpace `applyIf` O.rightSuperAsSpace opts)
.> ((<> numericShift) `applyIf` O.shiftNumericKeys opts)
.> ((<> hjklShift) `applyIf` O.shiftHJKLKeys opts)
rightCtrlAsSuper = insert ControlRightKey SuperRightKey
rightSuperAsSpace = insert SuperRightKey SpaceKey
remapsMirror = Set.fromList $ swap <$> Map.toList remaps
remapsWithKeyCodes :: m $ Map KeyName (KeyName, KeyCode)
remapsWithKeyCodes = keyAliases >>= \aliases ->
flip Map.traverseWithKey remaps $ \keyNameRemapFrom keyNameRemapTo ->
let
found = find (view _1 .> (== keyNameRemapTo)) aliases
failMsg = [qms| Default key code of {keyNameRemapTo} not found
to remap {keyNameRemapFrom} to |]
in
maybe (throwError failMsg) (pure . (keyNameRemapTo,) . view _3) found
remappedKeyAliases :: m $ Set KeyAlias
remappedKeyAliases = go' where
go' = join $ f <$> keyAliases <*> remapsWithKeyCodes
f aliases remaps' = resolve where
(new, xsRemaps) = foldl reducer (Set.empty, remaps') aliases
resolve = Map.null xsRemaps ? pure new $ throwError failMsg
failMsg = [qms| Some keys which are supposed to be remapped
haven't found their targets: {xsRemaps} |]
reducer (flip Set.insert -> append, xsRemaps) alias@(view _1 -> keyName) =
case lookup keyName xsRemaps <&> view _2 <&> set _3 of
Nothing -> (append alias, xsRemaps)
Just remap -> (append $ remap alias, delete keyName xsRemaps)
nameMap :: m $ Map KeyName KeyAlias
nameMap = getByNameMap <$> remappedKeyAliases
devMap :: m $ Map Word16 KeyAlias
devMap = delete minBound . getByDevNumMap <$> remappedKeyAliases
byNameMediaAliasesMap :: m $ Map KeyName KeyCode
byNameMediaAliasesMap
= remappedKeyAliases
<&> Set.filter (view _1 .> (`member` mediaDevNums))
<&> Map.fromList . Set.toList . Set.map (view _1 &&& view _3)
alternativeModeKeyCodes :: m $
( Map KeyName $ Either AlternativeModeKeyAction (KeyName, KeyCode)
, Map KeyName $ Either AlternativeModeKeyAction (KeyName, KeyCode)
)
alternativeModeKeyCodes = go' where
go'
= liftAT2 $ handle *** handle
$ alternativeModeRemaps $ O.ergonomicMode opts
handle = followRemaps <$. Map.traverseWithKey f
followRemaps result = g $ foldl reducer intialAcc remapsMirror where
g (toDelete, toAdd) = Map.withoutKeys result toDelete <> toAdd
-- | A "Set" of keys to delete from original alternative mapping
-- and a "Map" of new remapped keys to add to it.
intialAcc = (Set.empty, Map.empty)
reducer acc@(toDelete, toAdd) (toKey, fromKey)
= maybe acc (\x -> (Set.insert toKey toDelete, insert fromKey x toAdd))
$ lookup toKey result
f _ (Left x) = pure $ Left x
f keyNameFrom (Right keyNameTo) = x where
x = lookup keyNameTo <$> defaultKeyCodes >>= resolve
resolve = throwError failMsg `maybe` (pure . Right . (keyNameTo,))
failMsg = [qms| Default key code of {keyNameTo} not found
for alternative {keyNameFrom} |]
getAliasByKeyDevNum :: KeyMap -> Word16 -> Maybe KeyAlias
getAliasByKeyDevNum keyMap devNum = devNum `lookup` byDevNumMap keyMap
-- | Gets a key and returns a key it remmaped to in alternative mode
-- and remapped "KeyCode" to trigger remapped key.
getAlternativeRemapByName
:: KeyMap
-> AlternativeModeLevel
-> KeyName
-> Maybe $ Either AlternativeModeKeyAction (KeyName, KeyCode)
getAlternativeRemapByName keyMap (alternativeLevelToLens -> getter) keyName =
lookup keyName $ view getter $ byNameAlternativeMap keyMap
-- | Check if a key has a remap in alternative mode.
hasAlternativeRemap :: KeyMap -> AlternativeModeLevel -> KeyName -> Bool
hasAlternativeRemap keyMap (alternativeLevelToLens -> getter) keyName =
member keyName $ view getter $ byNameAlternativeMap keyMap
-- | Checks if a key has a remap in alternative mode
-- and this remap is "AlternativeModeKeyAction".
hasAlternativeKeyAction :: KeyMap -> AlternativeModeLevel -> KeyName -> Bool
hasAlternativeKeyAction keyMap (alternativeLevelToLens -> getter) keyName
= maybe False isLeft
$ lookup keyName $ view getter $ byNameAlternativeMap keyMap
-- | Checks if a key has a remap in alternative mode
-- and this remap is an alias to another key to trigger.
hasAlternativeKey :: KeyMap -> AlternativeModeLevel -> KeyName -> Bool
hasAlternativeKey keyMap (alternativeLevelToLens -> getter) keyName
= maybe False isRight
$ lookup keyName $ view getter $ byNameAlternativeMap keyMap
-- | Get a key provided key remapped to.
getRemapByName :: KeyMap -> KeyName -> Maybe KeyName
getRemapByName keyMap keyName = keyName `lookup` byNameRemaps keyMap
-- | Returns a "Set" of keys which have been remapped to a specific key.
--
-- Mirrored getting of remapped keys. For example "MenuKey" and
-- "ControlRightKey" are both remapped to "SuperRightKey", so you call
-- "getRemappedByName" with "SuperRightKey" and you get a "Set" of
-- "ControlRightKey" and "MenuKey".
getRemappedByName :: KeyMap -> KeyName -> Set.Set KeyName
getRemappedByName keyMap keyName =
Map.keysSet $ Map.filter (== keyName) $ byNameRemaps keyMap
-- | Gets a specific key and returns a "Set" that contains extra keys
-- aliased as this specific key (you get a "Set" of extra synonyms keys).
--
-- For example "LessKey" remapped as "ShiftLeftKey" and if you need to
-- handle in some way "ShiftLeftKey" you should also handle and treat "LessKey"
-- the same way you do it with "ShiftLeftKey", because they're same keys,
-- they're kinda synonyms.
--
-- So, you could try this:
--
-- @
-- getExtraKeys keyMap ShiftLeftKey
-- @
--
-- And then you get a "Set" of extra aliases for this key like this:
--
-- @
-- Set.fromList [LessKey]
-- @
--
-- (this "Set" may contain more than one alias).
--
-- TODO Is this duplicate of "getRemappedByName"?
-- Find out and tell the difference (or remove one and keep another).
getExtraKeys :: KeyMap -> KeyName -> Set.Set KeyName
getExtraKeys (extraByRemaps -> extra) keyName =
extra & Set.filter (view _1 .> (== keyName)) & Set.map (view _2)
-- | Check if a key is a media key
isMediaKey :: KeyMap -> KeyName -> Bool
isMediaKey keyMap keyName = keyName `member` byNameMediaMap keyMap
getMediaKeyCode :: KeyMap -> KeyName -> Maybe KeyCode
getMediaKeyCode keyMap keyName = keyName `lookup` byNameMediaMap keyMap
-- | Get mapped "KeyCode" to "KeyName"
getKeyCodeByName :: KeyMap -> KeyName -> Maybe KeyCode
getKeyCodeByName keyMap keyName = keyName `lookup` byNameMap keyMap <&> view _3
getDefaultKeyCodeByName :: KeyMap -> KeyName -> Maybe KeyCode
getDefaultKeyCodeByName keyMap keyName =
keyName `lookup` byNameDefaultKeyCode keyMap
ergoEnterKey :: KeyName
ergoEnterKey = ApostropheKey
alternativeLevelToLens
:: (Field1 a a b b, Field2 a a b b)
=> AlternativeModeLevel
-> Lens' a b
alternativeLevelToLens FirstAlternativeModeLevel = _1
alternativeLevelToLens SecondAlternativeModeLevel = _2
makeApoClassy ''KeyMap
| unclechu/xlib-keys-hack | src/Keys.hs | gpl-3.0 | 26,170 | 0 | 19 | 7,453 | 6,558 | 3,924 | 2,634 | -1 | -1 |
module Request where
import Text.JSON
import Control.Applicative
import Control.Monad
data Request = Cps Double |
Hush |
Pattern Int String |
Info String |
Render String Double Double
instance Show Request where
show (Cps x) = "cps " ++ (show x)
show (Hush) = "hush"
show (Pattern n p) = "d" ++ (show n) ++ " $ " ++ p
show (Info x) = "info: " ++ x
show (Render patt cps cycles) = "render (cps=" ++ (show cps) ++ "; cycles=" ++ (show cycles) ++ "): " ++ patt
instance JSON Request where
showJSON (Cps x) = encJSDict [("cps",x)]
showJSON (Hush) = encJSDict [("hush","hush")]
showJSON (Pattern n p) = encJSDict [("d"++(show n),p)]
showJSON (Info x) = encJSDict [("info",x)]
showJSON (Render patt cps cycles) = encJSDict [("render",patt),("cps",show cps),("cycles",show cycles)]
readJSON (JSObject x) | (firstKeyIs "cps" x) = Cps <$> (valFromObj "cps" x)
readJSON (JSObject x) | (firstKeyIs "hush" x) = Ok Hush
readJSON (JSObject x) | (firstKeyIs "d1" x) = Pattern 1 <$> (valFromObj "d1" x)
readJSON (JSObject x) | (firstKeyIs "d2" x) = Pattern 2 <$> (valFromObj "d2" x)
readJSON (JSObject x) | (firstKeyIs "d3" x) = Pattern 3 <$> (valFromObj "d3" x)
readJSON (JSObject x) | (firstKeyIs "d4" x) = Pattern 4 <$> (valFromObj "d4" x)
readJSON (JSObject x) | (firstKeyIs "d5" x) = Pattern 5 <$> (valFromObj "d5" x)
readJSON (JSObject x) | (firstKeyIs "d6" x) = Pattern 6 <$> (valFromObj "d6" x)
readJSON (JSObject x) | (firstKeyIs "d7" x) = Pattern 7 <$> (valFromObj "d7" x)
readJSON (JSObject x) | (firstKeyIs "d8" x) = Pattern 8 <$> (valFromObj "d8" x)
readJSON (JSObject x) | (firstKeyIs "d9" x) = Pattern 9 <$> (valFromObj "d9" x)
readJSON (JSObject x) | (firstKeyIs "info" x) = Info <$> (valFromObj "info" x)
readJSON (JSObject x) | (firstKeyIs "render" x) = Render <$> (valFromObj "render" x) <*> (valFromObj "cps" x) <*> (valFromObj "cycles" x)
readJSON _ = Error "First key must be cps, hush, info or d1-9"
firstKeyIs :: String -> JSObject JSValue -> Bool
firstKeyIs key = f . fromJSObject
where f ((x,_):_) = x==key
f _ = False
| Moskau/estuary | server/Request.hs | gpl-3.0 | 2,151 | 0 | 12 | 470 | 1,008 | 500 | 508 | 39 | 2 |
module Balconies where
import Data.Maybe (mapMaybe)
import Algebra
import DataStructure
import Utils
-- check if balcony passes from startPoint
-- if no mark edges from start to end
-- if yes mark edges from end to end to edgeEnd and from edgeStart to start
-- check if balcony crosses the starting line
-- assume balcony is "normal"
-- check any point on the first edge that should have balcony
-- if it doesn't have balcony, then balcony is not normal
-- gets the points that make the balcony
getBalcPoints :: [Edge] -> Balcony -> [Vec]
getBalcPoints es balc = pointsFrom (pointFromSkel es $ balcStart balc) (balcGeom balc) ++ [pointFromSkel es $ balcEnd balc]
getBalcLines :: [Edge] -> Balcony -> [Line]
getBalcLines es balc = zip (getBalcPoints es balc) (vectorsFromPoints $ getBalcPoints es balc)
-- moving away perpendicularly from a point onSkel returns list with the balcony edges you meet
getBalcFromSkel :: [Edge] -> Balcony -> OnSkel -> [(Double, Double)]
getBalcFromSkel es balc p = filter ((<1) . snd) $ filter ((>0) . snd) $ filter ((>0) . fst) $ mapMaybe (intersect perp ) (getBalcLines es balc)
where perp = (pointFromSkel es p, cw $ geom (es !! (fst p - 1)))
-- checks if a point on edges has balcony (if it crosses the balcony even number of times)
hasBalcony :: [Edge] -> Balcony -> OnSkel -> Bool
hasBalcony es balc p = odd $ length $ getBalcFromSkel es balc p
-- returns the distance of the balcony
getBalcDist :: [Edge] -> Balcony -> OnSkel -> Maybe Double
getBalcDist es balc p
| null l = Nothing
| otherwise = Just $ fst $ head l
where l = getBalcFromSkel es balc p
-- returns the point that if it has balcony then the balcony doesn't cross the start
-- any point on the first edge of the balcony that is after the starting point
getPivotPoint :: Balcony -> OnSkel
getPivotPoint balc
| fst start == fst end = (fst start, snd start + 0.44 * (snd end - snd start))
| otherwise = (fst start, snd start + 0.44 * (1 - snd start))
where start' = min (balcStart balc) (balcEnd balc)
start = if snd start' == 1 then (fst start' + 1, 0) else start'
end = max (balcStart balc) (balcEnd balc)
-- returns the parts of the edges that are affected by the balcony
getEdgeWithBalc :: [Edge] -> Balcony -> [(OnSkel, OnSkel)]
getEdgeWithBalc es balc
| hasBalcony es balc $ getPivotPoint balc = [sorted]
| otherwise = [((0, 0), fst sorted), (snd sorted, (length es, 1))]
where sorted = sortTupple (balcStart balc, balcEnd balc)
| apanagio/easykenak | slicer2/src/Balconies.hs | gpl-3.0 | 2,486 | 0 | 14 | 491 | 752 | 395 | 357 | 31 | 2 |
p :: c -> a
q :: c -> b | hmemcpy/milewski-ctfp-pdf | src/content/1.5/code/haskell/snippet09.hs | gpl-3.0 | 23 | 0 | 5 | 9 | 19 | 10 | 9 | 2 | 0 |
{-# LANGUAGE GADTs, RankNTypes, ScopedTypeVariables, BangPatterns #-}
module Control.Fusion.List.Nu where
import Data.List (unfoldr)
import Control.Comonad
import Data.Foldable as T hiding (fold)
import Data.Monoid
---------------
-- Nu lists
-- List functor
data Step a s = Done | Yield a s
-- and its greatest fix point
data NuList a where
Unfold :: s -> (s -> Step a s) -> NuList a
instance Show a => Show (NuList a) where
show = show . nuToList
{-
-- | blerg
{-# INLINE concatMapNu #-}
concatMapNu :: (a -> NuList b) -> NuList a -> NuList b
concatMapNu f (Unfold sa0 nexta) = Unfold (sa0, Nothing) (uncurry next) where
next sa Nothing = case nexta sa of
Done -> Done
Yield a sa' -> next sa' (Just (f a)) -- ! uses general recursion !
next sa (Just (Unfold sb nextb)) = case nextb sb of
Done -> next sa Nothing
Yield b sb' -> Yield b (sa,Just (Unfold sb' nextb))
instance Monad NuList where
{-# INLINE return #-}
return x = Unfold True $ \s -> case s of
True -> Yield x False
False -> Done
{-# INLINE (>>=) #-}
(>>=) = flip concatMapNu -- Not *really* a monad: uses general recursion.
-}
{-# INLINE stepToMaybe #-}
stepToMaybe :: Step t t1 -> Maybe (t, t1)
stepToMaybe Done = Nothing
stepToMaybe (Yield a s) = Just (a,s)
{-# INLINE maybeToStep #-}
maybeToStep :: Maybe (t, t1) -> Step t t1
maybeToStep Nothing = Done
maybeToStep (Just (a,s)) = Yield a s
{-# INLINE nuToList #-}
nuToList :: NuList a -> [a]
nuToList (Unfold s psi) = unfoldr (stepToMaybe . psi) s
{-# INLINE nuFromList #-}
nuFromList :: [a] -> NuList a
nuFromList = listToNu
{-# INLINE listToNu #-}
listToNu :: [a] -> NuList a
listToNu xs0 = Unfold xs0 go
where go [] = Done
go (x:xs) = Yield x xs
instance Functor NuList where
fmap f (Unfold s g) = Unfold s $ \x -> case g x of
Done -> Done
Yield a t -> Yield (f a) t
{-# INLINE zipNu #-}
zipNu :: NuList a -> NuList b -> NuList (a,b)
zipNu = zipWithNu (,)
{-# INLINE zipWithNu #-}
zipWithNu :: (a -> b -> c) -> NuList a -> NuList b -> NuList c
zipWithNu f (Unfold s1 psi1) (Unfold s2 psi2) = Unfold (s1,s2) go
where go ~(t1,t2) = case (psi1 t1,psi2 t2) of
(Done,_) -> Done
(_,Done) -> Done
(Yield x u1,Yield y u2) -> Yield (f x y) (u1,u2)
{-# INLINE takeNu #-}
takeNu :: Int -> NuList a -> NuList a
takeNu n0 (Unfold s0 psi) = Unfold (n0,s0) (uncurry go) where
go 0 _ = Done
go n s = case psi s of
Done -> Done
Yield x t -> Yield x (n-1,t)
{-# INLINE takeWhileNu #-}
takeWhileNu :: (a -> Bool) -> NuList a -> NuList a
takeWhileNu p (Unfold s0 psi) = Unfold s0 go where
go s = case psi s of
Done -> Done
Yield x t -> if p x then Yield x t else Done
{-# INLINE enumNu #-}
enumNu :: (Num a,Ord a) => a -> NuList a
enumNu to = Unfold 0 $ \n -> if n <= to then Yield n (n+1) else Done
{-# INLINE enumFromNu #-}
enumFromNu :: Num a => a -> NuList a
enumFromNu from = Unfold from $ \n -> Yield n (n+1)
{-# INLINE enumFromToNu #-}
enumFromToNu :: (Num a,Ord a) => a -> a -> NuList a
enumFromToNu from to = enumFromThenToNu from (from+1) to
{-# INLINE enumFromThenToNu #-}
enumFromThenToNu :: (Num a,Ord a) => a -> a -> a -> NuList a
enumFromThenToNu from thn to = Unfold from $ \ n -> if n <= to then Yield n (n + delta) else Done
where
delta = thn - from
{-# INLINE scanNu #-}
scanNu :: (b -> a -> b) -> b -> NuList a -> NuList b
scanNu f k (Unfold s0 psi) = Unfold (Just (k,s0)) go where
go Nothing = Done
go (Just (acc,s)) = case psi s of
Done -> Yield acc Nothing
Yield x t -> Yield acc (Just (f acc x,t))
{-# INLINE iterateNu #-}
iterateNu :: (a -> a) -> a -> NuList a
iterateNu f x0 = Unfold x0 go where
go x = Yield x (f x)
{-
-- Do not fuse: use freeze
dropNu :: Int -> NuList a -> NuList a
dropNu n0 (Unfold s0 psi) = Unfold (go n0 s0) psi
where go 0 s = s
go n s = case psi s of
Yield _ t -> go (n-1) t
Done -> s
dropWhileNu :: (a -> Bool) -> NuList a -> NuList a
dropWhileNu p (Unfold s0 psi) = Unfold (go s0) psi
where go s = case psi s of
Yield x t -> if p x then go t else t
Done -> s
splitAtNu :: Int -> NuList a -> (NuList a, NuList a)
splitAtNu n xs = (takeNu n xs,dropNu n xs)
spanNu :: (a -> Bool) -> NuList a -> (NuList a, NuList a)
spanNu p xs = (takeWhileNu p xs,dropWhileNu p xs)
-}
instance Comonad NuList where
extract (Unfold s0 psi) = case psi s0 of
Done -> error "extract: empty NuList"
Yield x _ -> x
duplicate (Unfold s0 psi) = Unfold s0 go where
go s = case psi s of
Done -> Done
Yield _ t -> Yield (Unfold s psi) t
unfold :: (b -> Maybe (a, b)) -> b -> NuList a
unfold psi s = Unfold s (maybeToStep . psi)
repeatNu :: a -> NuList a
repeatNu a = Unfold () (const (Yield a ()))
cycleNu :: NuList a -> NuList a
cycleNu (Unfold s0 psi) = Unfold (s0,s0) psi'
where psi' (s,s') =
case psi s of
Done -> case psi s' of
Done -> Done -- exception empty list?
Yield a s'' -> Yield a (s'',s')
Yield a s'' -> Yield a (s'',s')
cycleNu' :: NuList a -> NuList a
cycleNu' (Unfold s0 psi) = case psi s0 of
Done -> error "cycleNu': empty list"
_ -> Unfold s0 psi' where
psi' s = case psi s of
Done -> psi s0
y -> y
-- TODO: check if fusion happens in the above case.
-- intersperseNu see coutts & al
consNu :: a -> NuList a -> NuList a
consNu a0 (Unfold s0 psi) = Unfold Nothing psi'
where psi' Nothing = Yield a0 (Just s0)
psi' (Just s) = case psi s of
Done -> Done
Yield b s' -> Yield b (Just s')
snocNu :: NuList a -> a -> NuList a
snocNu (Unfold s0 psi) a0 = Unfold (Just s0) psi'
where psi' (Just s) = case psi s of
Yield b s' -> Yield b (Just s')
Done -> Yield a0 Nothing
psi' Nothing = Done
{-
consNu :: a -> NuList a -> NuList a
consNu a0 (Unfold s0 psi) = Unfold (Just (a0,s0)) psi'
where psi' Nothing = Done
psi' (Just (a,s)) = case psi s of
Done -> Yield a Nothing
Yield b s' -> Yield a (Just (b,s'))
-}
nilNu :: NuList a
nilNu = Unfold () (const Done)
headNu :: NuList a -> a
headNu (Unfold s psi) = case psi s of
Done -> error "headNu: empty list"
Yield a _ -> a
tailNu :: NuList a -> NuList a
tailNu (Unfold s psi) = Unfold s' psi
where s' = case psi s of
Done -> error "tailNu: empty list"
Yield _ s'' -> s''
viewNu :: NuList a -> Step a (NuList a)
viewNu (Unfold s psi) = case psi s of
Done -> Done
Yield a s' -> Yield a (Unfold s' psi)
-- loops
-- | Tail-recursive, strict, left fold. Attention: this may allocate a data structure on the heap if the
-- accumulating function is lazy. Example: if the accumulator type is
-- a lazy tuple, typically its components won't be forced. Thus the
-- function will just construct a long chain of thunks.
{-# INLINE foldNu #-}
foldNu :: (b -> a -> b) -> b -> NuList a -> b
foldNu f k (Unfold s0 psi) = go k s0
where go !acc s = case psi s of
Done -> acc
Yield x t -> go (f acc x) t
instance Foldable NuList where
foldMap f = foldNu (\b a -> b `mappend` f a) mempty
{-# INLINE sumNu #-}
sumNu :: NuList Int -> Int
sumNu = foldNu (+) 0
| jyp/ControlledFusion | Control/Fusion/List/Nu.hs | gpl-3.0 | 7,386 | 0 | 14 | 2,100 | 2,425 | 1,228 | 1,197 | 146 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.PlayCustomApp
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- API to create and publish custom Android apps
--
-- /See:/ <https://developers.google.com/android/work/play/custom-app-api/ Google Play Custom App Publishing API Reference>
module Network.Google.PlayCustomApp
(
-- * Service Configuration
playCustomAppService
-- * OAuth Scopes
, androidPublisherScope
-- * API Declaration
, PlayCustomAppAPI
-- * Resources
-- ** playcustomapp.accounts.customApps.create
, module Network.Google.Resource.PlayCustomApp.Accounts.CustomApps.Create
-- * Types
-- ** Xgafv
, Xgafv (..)
-- ** CustomApp
, CustomApp
, customApp
, caLanguageCode
, caPackageName
, caTitle
) where
import Network.Google.Prelude
import Network.Google.PlayCustomApp.Types
import Network.Google.Resource.PlayCustomApp.Accounts.CustomApps.Create
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Google Play Custom App Publishing API service.
type PlayCustomAppAPI =
AccountsCustomAppsCreateResource
| brendanhay/gogol | gogol-playcustomapp/gen/Network/Google/PlayCustomApp.hs | mpl-2.0 | 1,533 | 0 | 5 | 293 | 106 | 83 | 23 | 22 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Agent.Sessions.EntityTypes.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the specified session entity type.
--
-- /See:/ <https://cloud.google.com/dialogflow-enterprise/ Dialogflow API Reference> for @dialogflow.projects.agent.sessions.entityTypes.get@.
module Network.Google.Resource.DialogFlow.Projects.Agent.Sessions.EntityTypes.Get
(
-- * REST Resource
ProjectsAgentSessionsEntityTypesGetResource
-- * Creating a Request
, projectsAgentSessionsEntityTypesGet
, ProjectsAgentSessionsEntityTypesGet
-- * Request Lenses
, paSetgXgafv
, paSetgUploadProtocol
, paSetgAccessToken
, paSetgUploadType
, paSetgName
, paSetgCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.agent.sessions.entityTypes.get@ method which the
-- 'ProjectsAgentSessionsEntityTypesGet' request conforms to.
type ProjectsAgentSessionsEntityTypesGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] GoogleCloudDialogflowV2SessionEntityType
-- | Retrieves the specified session entity type.
--
-- /See:/ 'projectsAgentSessionsEntityTypesGet' smart constructor.
data ProjectsAgentSessionsEntityTypesGet =
ProjectsAgentSessionsEntityTypesGet'
{ _paSetgXgafv :: !(Maybe Xgafv)
, _paSetgUploadProtocol :: !(Maybe Text)
, _paSetgAccessToken :: !(Maybe Text)
, _paSetgUploadType :: !(Maybe Text)
, _paSetgName :: !Text
, _paSetgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsAgentSessionsEntityTypesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paSetgXgafv'
--
-- * 'paSetgUploadProtocol'
--
-- * 'paSetgAccessToken'
--
-- * 'paSetgUploadType'
--
-- * 'paSetgName'
--
-- * 'paSetgCallback'
projectsAgentSessionsEntityTypesGet
:: Text -- ^ 'paSetgName'
-> ProjectsAgentSessionsEntityTypesGet
projectsAgentSessionsEntityTypesGet pPaSetgName_ =
ProjectsAgentSessionsEntityTypesGet'
{ _paSetgXgafv = Nothing
, _paSetgUploadProtocol = Nothing
, _paSetgAccessToken = Nothing
, _paSetgUploadType = Nothing
, _paSetgName = pPaSetgName_
, _paSetgCallback = Nothing
}
-- | V1 error format.
paSetgXgafv :: Lens' ProjectsAgentSessionsEntityTypesGet (Maybe Xgafv)
paSetgXgafv
= lens _paSetgXgafv (\ s a -> s{_paSetgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
paSetgUploadProtocol :: Lens' ProjectsAgentSessionsEntityTypesGet (Maybe Text)
paSetgUploadProtocol
= lens _paSetgUploadProtocol
(\ s a -> s{_paSetgUploadProtocol = a})
-- | OAuth access token.
paSetgAccessToken :: Lens' ProjectsAgentSessionsEntityTypesGet (Maybe Text)
paSetgAccessToken
= lens _paSetgAccessToken
(\ s a -> s{_paSetgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
paSetgUploadType :: Lens' ProjectsAgentSessionsEntityTypesGet (Maybe Text)
paSetgUploadType
= lens _paSetgUploadType
(\ s a -> s{_paSetgUploadType = a})
-- | Required. The name of the session entity type. Format:
-- \`projects\/\/agent\/sessions\/\/entityTypes\/\`.
paSetgName :: Lens' ProjectsAgentSessionsEntityTypesGet Text
paSetgName
= lens _paSetgName (\ s a -> s{_paSetgName = a})
-- | JSONP
paSetgCallback :: Lens' ProjectsAgentSessionsEntityTypesGet (Maybe Text)
paSetgCallback
= lens _paSetgCallback
(\ s a -> s{_paSetgCallback = a})
instance GoogleRequest
ProjectsAgentSessionsEntityTypesGet
where
type Rs ProjectsAgentSessionsEntityTypesGet =
GoogleCloudDialogflowV2SessionEntityType
type Scopes ProjectsAgentSessionsEntityTypesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient
ProjectsAgentSessionsEntityTypesGet'{..}
= go _paSetgName _paSetgXgafv _paSetgUploadProtocol
_paSetgAccessToken
_paSetgUploadType
_paSetgCallback
(Just AltJSON)
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy ProjectsAgentSessionsEntityTypesGetResource)
mempty
| brendanhay/gogol | gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Agent/Sessions/EntityTypes/Get.hs | mpl-2.0 | 5,474 | 0 | 15 | 1,195 | 701 | 411 | 290 | 109 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Rank2Types #-}
module Network.JsonRpc.Tests (tests) where
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Exception hiding (assert)
import Control.Monad
import Data.Aeson.Types
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.List
import Data.Conduit.Network
import Data.Conduit.TMChan
import qualified Data.HashMap.Strict as M
import Data.Maybe
import Data.Text (Text)
import Network.JsonRpc
import Network.JsonRpc.Arbitrary
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Test.Framework
import Test.Framework.Providers.QuickCheck2
tests :: [Test]
tests =
[ testGroup "JSON-RPC Requests"
[ testProperty "Check fields"
(reqFields :: Request Value -> Bool)
, testProperty "Encode/decode"
(reqDecode :: Request Value -> Bool)
]
, testGroup "JSON-RPC Notifications"
[ testProperty "Check fields"
(notifFields :: Notif Value -> Bool)
, testProperty "Encode/decode"
(notifDecode :: Notif Value -> Bool)
]
, testGroup "JSON-RPC Responses"
[ testProperty "Check fields"
(resFields :: Response Value -> Bool)
, testProperty "Encode/decode"
(resDecode :: ReqRes Value Value -> Bool)
, testProperty "Bad response id"
(rpcBadResId :: ReqRes Value Value -> Bool)
, testProperty "Error response"
(rpcErrRes :: (ReqRes Value Value, ErrorObj) -> Bool)
]
, testGroup "JSON-RPC Conduits"
[ testProperty "Outgoing conduit"
(newMsgConduit :: [Message Value Value Value] -> Property)
, testProperty "Decode requests"
(decodeReqConduit :: ([Request Value], Ver) -> Property)
, testProperty "Decode responses"
(decodeResConduit :: ([ReqRes Value Value], Ver) -> Property)
, testProperty "Bad responses"
(decodeErrConduit :: ([ReqRes Value Value], Ver) -> Property)
, testProperty "Sending messages" sendMsgNet
, testProperty "Two-way communication" twoWayNet
, testProperty "Real network communication" realNet
]
]
--
-- Requests
--
reqFields :: (ToRequest a, ToJSON a) => Request a -> Bool
reqFields rq = case rq of
Request V1 m p i -> r1ks && vals m p i
Request V2 m p i -> r2ks && vals m p i
where
(Object o) = toJSON rq
r1ks = sort (M.keys o) == ["id", "method", "params"]
r2ks = sort (M.keys o) == ["id", "jsonrpc", "method", "params"]
|| sort (M.keys o) == ["id", "jsonrpc", "method"]
vals m p i = fromMaybe False $ parseMaybe (f m p i) o
f m p i _ = do
j <- o .:? "jsonrpc"
guard $ fromMaybe True $ fmap (== ("2.0" :: Text)) j
i' <- o .: "id"
guard $ i == i'
m' <- o .: "method"
guard $ m == m'
p' <- o .:? "params" .!= Null
guard $ (toJSON p) == p'
return True
reqDecode :: (Eq a, ToRequest a, ToJSON a, FromRequest a) => Request a -> Bool
reqDecode rq = case parseMaybe parseRequest (toJSON rq) of
Nothing -> False
Just rqE -> either (const False) (rq ==) rqE
--
-- Notifications
--
notifFields :: (ToNotif a, ToJSON a) => Notif a -> Bool
notifFields rn = case rn of
Notif V1 m p -> n1ks && vals m p
Notif V2 m p -> n2ks && vals m p
where
(Object o) = toJSON rn
n1ks = sort (M.keys o) == ["id", "method", "params"]
n2ks = sort (M.keys o) == ["jsonrpc", "method", "params"]
|| sort (M.keys o) == ["jsonrpc", "method"]
vals m p = fromMaybe False $ parseMaybe (f m p) o
f m p _ = do
i <- o .:? "id" .!= Null
guard $ i == Null
j <- o .:? "jsonrpc"
guard $ fromMaybe True $ fmap (== ("2.0" :: Text)) j
m' <- o .: "method"
guard $ m == m'
p' <- o .:? "params" .!= Null
guard $ (toJSON p) == p'
return True
notifDecode :: (Eq a, ToNotif a, ToJSON a, FromNotif a)
=> Notif a -> Bool
notifDecode rn = case parseMaybe parseNotif (toJSON rn) of
Nothing -> False
Just rnE -> either (const False) (rn ==) rnE
--
-- Responses
--
resFields :: (Eq a, ToJSON a, FromJSON a) => Response a -> Bool
resFields rs = case rs of
Response V1 s i -> s1ks && vals s i
Response V2 s i -> s2ks && vals s i
where
(Object o) = toJSON rs
s1ks = sort (M.keys o) == ["error", "id", "result"]
s2ks = sort (M.keys o) == ["id", "jsonrpc", "result"]
vals s i = fromMaybe False $ parseMaybe (f s i) o
f s i _ = do
i' <- o .: "id"
guard $ i == i'
j <- o .:? "jsonrpc"
guard $ fromMaybe True $ fmap (== ("2.0" :: Text)) j
s' <- o .: "result"
guard $ s == s'
e <- o .:? "error" .!= Null
guard $ e == Null
return True
resDecode :: (Eq r, ToJSON r, FromResponse r)
=> ReqRes q r -> Bool
resDecode (ReqRes rq rs) = case parseMaybe (parseResponse rq) (toJSON rs) of
Nothing -> False
Just rsE -> either (const False) (rs ==) rsE
rpcBadResId :: forall q r. (ToJSON r, FromResponse r)
=> ReqRes q r -> Bool
rpcBadResId (ReqRes rq rs) = case parseMaybe f (toJSON rs') of
Nothing -> True
_ -> False
where
f :: FromResponse r => Value -> Parser (Either ErrorObj (Response r))
f = parseResponse rq
rs' = rs { getResId = IdNull }
rpcErrRes :: forall q r. FromResponse r => (ReqRes q r, ErrorObj) -> Bool
rpcErrRes (ReqRes rq _, re) = case parseMaybe f (toJSON re') of
Nothing -> False
Just (Left _) -> True
_ -> False
where
f :: FromResponse r => Value -> Parser (Either ErrorObj (Response r))
f = parseResponse rq
re' = re { getErrId = getReqId rq }
--
-- Conduit
--
newMsgConduit :: ( ToRequest q, ToJSON q, ToNotif n, ToJSON n
, ToJSON r, FromResponse r )
=> [Message q n r] -> Property
newMsgConduit (snds) = monadicIO $ do
msgs <- run $ do
qs <- atomically initSession
CL.sourceList snds' $= msgConduit False qs $$ CL.consume
assert $ length msgs == length snds'
assert $ length (filter rqs msgs) == length (filter rqs snds')
assert $ map idn (filter rqs msgs) == take (length (filter rqs msgs)) [1..]
where
rqs (MsgRequest _) = True
rqs _ = False
idn (MsgRequest rq) = getIdInt $ getReqId rq
idn _ = error "Unexpected request"
snds' = flip map snds $ \m -> case m of
(MsgRequest rq) -> MsgRequest $ rq { getReqId = IdNull }
_ -> m
decodeReqConduit :: forall q. (ToRequest q, FromRequest q, Eq q, ToJSON q)
=> ([Request q], Ver) -> Property
decodeReqConduit (vs, ver) = monadicIO $ do
inmsgs <- run $ do
qs <- atomically initSession
qs' <- atomically initSession
CL.sourceList vs
$= CL.map f
$= msgConduit False qs
$= encodeConduit
$= decodeConduit ver False qs'
$$ CL.consume
assert $ null $ filter unexpected inmsgs
assert $ all (uncurry match) (zip vs inmsgs)
where
unexpected :: IncomingMsg () q () () -> Bool
unexpected (IncomingMsg (MsgRequest _) Nothing) = False
unexpected _ = True
match rq (IncomingMsg (MsgRequest rq') _) =
rq { getReqId = getReqId rq' } == rq'
match _ _ = False
f rq = MsgRequest $ rq { getReqId = IdNull } :: Message q () ()
decodeResConduit :: forall q r.
( ToRequest q, FromRequest q, Eq q, ToJSON q, ToJSON r
, FromResponse r, Eq r )
=> ([ReqRes q r], Ver) -> Property
decodeResConduit (rr, ver) = monadicIO $ do
inmsgs <- run $ do
qs <- atomically initSession
qs' <- atomically initSession
CL.sourceList vs
$= CL.map f
$= msgConduit False qs
$= encodeConduit
$= decodeConduit ver False qs'
$= CL.map respond
$= encodeConduit
$= decodeConduit ver False qs
$$ CL.consume
assert $ null $ filter unexpected inmsgs
assert $ all (uncurry match) (zip vs inmsgs)
where
unexpected :: IncomingMsg q () () r -> Bool
unexpected (IncomingMsg (MsgResponse _) (Just _)) = False
unexpected _ = True
match rq (IncomingMsg (MsgResponse rs) (Just rq')) =
rq { getReqId = getReqId rq' } == rq'
&& rs == g rq'
match _ _ = False
respond :: IncomingMsg () q () () -> Response r
respond (IncomingMsg (MsgRequest rq) Nothing) = g rq
respond _ = undefined
f rq = MsgRequest $ rq { getReqId = IdNull } :: Message q () ()
vs = map (\(ReqRes rq _) -> rq) rr
g rq = let (ReqRes _ rs) = fromJust $ find h rr
h (ReqRes rq' _) = getReqParams rq == getReqParams rq'
in rs { getResId = getReqId rq }
decodeErrConduit :: forall q r.
( ToRequest q, FromRequest q, Eq q, ToJSON q, ToJSON r
, FromResponse r, Eq r, Show r, Show q )
=> ([ReqRes q r], Ver) -> Property
decodeErrConduit (vs, ver) = monadicIO $ do
inmsgs <- run $ do
qs <- atomically initSession
qs' <- atomically initSession
CL.sourceList vs
$= CL.map f
$= msgConduit False qs
$= encodeConduit
$= decodeConduit ver False qs'
$= CL.map respond
$= encodeConduit
$= decodeConduit ver False qs
$$ CL.consume
assert $ null $ filter unexpected inmsgs
assert $ all (uncurry match) (zip vs inmsgs)
where
unexpected :: IncomingMsg q () () r -> Bool
unexpected (IncomingMsg (MsgError _) (Just _)) = False
-- unexpected _ = True
unexpected i = error $ show i
match (ReqRes rq _) (IncomingMsg (MsgError _) (Just rq')) =
rq' { getReqId = getReqId rq } == rq
match _ _ = False
respond :: IncomingMsg () q () () -> ErrorObj
respond (IncomingMsg (MsgRequest (Request ver' _ _ i)) Nothing) =
ErrorObj ver' "test" (getIdInt i) Null i
respond _ = undefined
f (ReqRes rq _) = MsgRequest $ rq { getReqId = IdNull } :: Message q () ()
type ClientAppConduits = AppConduits Value Value Value () () () IO
type ServerAppConduits = AppConduits () () () Value Value Value IO
sendMsgNet :: ([Message Value Value Value], Ver) -> Property
sendMsgNet (rs, ver) = monadicIO $ do
rt <- run $ do
mv <- newEmptyMVar
to <- atomically $ newTBMChan 128
ti <- atomically $ newTBMChan 128
let tiSink = sinkTBMChan ti True
toSource = sourceTBMChan to
toSink = sinkTBMChan to True
tiSource = sourceTBMChan ti
withAsync (srv tiSink toSource mv) $ \_ ->
runConduits ver False toSink tiSource (cliApp mv)
assert $ length rt == length rs
assert $ all (uncurry match) (zip rs rt)
where
srv tiSink toSource mv = runConduits ver False tiSink toSource (srvApp mv)
srvApp :: MVar [IncomingMsg () Value Value Value]
-> ServerAppConduits -> IO ()
srvApp mv (src, snk) =
(CL.sourceNull $$ snk) >> (src $$ CL.consume) >>= putMVar mv
cliApp :: MVar [IncomingMsg () Value Value Value]
-> ClientAppConduits -> IO [IncomingMsg () Value Value Value]
cliApp mv (src, snk) =
(CL.sourceList rs $$ snk) >> (src $$ CL.sinkNull) >> readMVar mv
match (MsgRequest rq) (IncomingMsg (MsgRequest rq') Nothing) =
rq == rq'
match (MsgNotif rn) (IncomingMsg (MsgNotif rn') Nothing) =
rn == rn'
match (MsgResponse _) (IncomingError e) =
getErrMsg e == "Id not recognized"
match (MsgError e) (IncomingMsg (MsgError e') Nothing) =
getErrMsg e == getErrMsg e'
match (MsgError _) (IncomingError e) =
getErrMsg e == "Id not recognized"
match _ _ = False
type TwoWayAppConduits = AppConduits Value Value Value Value Value Value IO
twoWayNet :: ([Message Value Value Value], Ver) -> Property
twoWayNet (rr, ver) = monadicIO $ do
rt <- run $ do
to <- atomically $ newTBMChan 128
ti <- atomically $ newTBMChan 128
let tiSink = sinkTBMChan ti True
toSource = sourceTBMChan to
toSink = sinkTBMChan to True
tiSource = sourceTBMChan ti
withAsync (srv tiSink toSource) $ \_ ->
runConduits ver False toSink tiSource cliApp
assert $ length rt == length rs
assert $ all (uncurry match) (zip rs rt)
where
rs = map f rr where
f (MsgRequest rq) = MsgRequest $ rq { getReqId = IdNull }
f m = m
cliApp :: TwoWayAppConduits -> IO [IncomingMsg Value Value Value Value]
cliApp (src, snk) = (CL.sourceList rs $$ snk) >> (src $$ CL.consume)
srv tiSink toSource = runConduits ver False tiSink toSource srvApp
srvApp :: TwoWayAppConduits -> IO ()
srvApp (src, snk) = src $= CL.map respond $$ snk
respond (IncomingError e) =
MsgError e
respond (IncomingMsg (MsgRequest (Request ver' _ p i)) _) =
MsgResponse (Response ver' p i)
respond (IncomingMsg (MsgNotif rn) _) =
MsgNotif rn
respond (IncomingMsg (MsgError e) _) =
MsgNotif (Notif (getErrVer e) "error" (toJSON e))
respond _ = undefined
match (MsgRequest (Request ver' m p _))
( IncomingMsg (MsgResponse (Response ver'' p' _))
(Just (Request ver''' m' p'' _)) ) =
p == p' && p == p'' && m == m' && ver' == ver'' && ver'' == ver'''
match (MsgNotif (Notif ver' _ p))
(IncomingMsg (MsgNotif (Notif ver'' _ p')) Nothing) =
p == p' && ver' == ver''
match (MsgResponse (Response ver' _ _))
(IncomingMsg (MsgError e) Nothing) =
ver' == getErrVer e && getErrMsg e == "Id not recognized"
match (MsgError e@(ErrorObj _ _ _ _ IdNull))
(IncomingMsg (MsgNotif (Notif _ "error" e')) Nothing) =
toJSON e == e'
match (MsgError _)
(IncomingMsg (MsgError e) Nothing) =
getErrMsg e == "Id not recognized"
match _ _ = False
realNet :: ([Request Value], Ver) -> Property
realNet (rr, ver) = monadicIO $ do
rs <- run $ do
withAsync (tcpServer ver ss srvApp) $ \_ -> cli
assert $ length rs == length rr
assert $
map (getReqParams . fromJust . matchingReq) rs == map getReqParams rr
where
ss = serverSettings 58493 "127.0.0.1"
cs = clientSettings 58493 "127.0.0.1"
cli = do
cE <- try $ tcpClient ver True cs cliApp
either (const cli) return
(cE :: Either SomeException [IncomingMsg Value () () Value])
srvApp :: AppConduits () () Value Value () () IO -> IO ()
srvApp (src, snk) = src $= CL.map respond $$ snk
cliApp :: AppConduits Value () () () () Value IO
-> IO [IncomingMsg Value () () Value]
cliApp (src, snk) = do
CL.sourceList (map f rr) $$ snk
src $$ CL.consume
where
f rq = MsgRequest (rq { getReqId = IdNull })
respond (IncomingMsg (MsgRequest (Request ver' _ p i)) _) =
MsgResponse (Response ver' p i)
respond _ = undefined
| anton-dessiatov/json-rpc | test/Network/JsonRpc/Tests.hs | unlicense | 15,188 | 0 | 21 | 4,573 | 5,832 | 2,925 | 2,907 | 353 | 11 |
module CostasLikeArrays.A321534 where
import Helpers.CostasLikeArrays (distinctDirections, countPermutationsUpToDihedralSymmetry)
import Helpers.Records (allMax)
import Data.List (permutations)
a321534 :: Int -> Int
a321534 n = countPermutationsUpToDihedralSymmetry n $ allMax distinctDirections $ permutations [0..n-1]
| peterokagey/haskellOEIS | src/CostasLikeArrays/A321534.hs | apache-2.0 | 321 | 0 | 8 | 30 | 82 | 45 | 37 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE PatternGuards #-}
module GenBase where
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Text.Regex.TDFA
import Text.Regex.TDFA.Text
generateBaseBundle ::
FilePath -> [Text] -> String -> FilePath -> FilePath -> IO ()
generateBaseBundle hoogleDB blacklist mode mainFile baseFile = do
(imports, exprs) <- readHoogleDB hoogleDB blacklist
let defs =
[ "d" <> T.pack (show i) <> " = " <> e
| i <- [0 :: Int ..]
| e <- exprs
]
src = T.unlines ("module LinkBase where" : imports ++ defs)
mainDef =
case mode of
"codeworld" -> "program = drawingOf(blank)"
_ -> "main = return ()"
T.writeFile baseFile src
T.writeFile mainFile $
T.unlines ["module Main where", "import LinkBase", mainDef]
readHoogleDB :: FilePath -> [Text] -> IO ([Text], [Text])
readHoogleDB file blacklist = do
lns <- T.lines <$> T.readFile file
return (parseHoogleDB blacklist Nothing lns)
parseHoogleDB :: [Text] -> Maybe Text -> [Text] -> ([Text], [Text])
parseHoogleDB blacklist _ (t:ts)
| Just mod <- submatch t "^module ([A-Za-z0-9._']+)"
, not (mod `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in ("import qualified " <> mod : i, e)
parseHoogleDB blacklist (Just mod) (t:ts)
| Just ident <- submatch t "^([A-Za-z0-9_']+) :: .*"
, not (ident `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in (i, mod <> "." <> ident : e)
| Just sym <- submatch t "^\\(([!#$%&*+./<=>?@\\\\^|-~]+)\\) :: .*"
, not (sym `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in (i, "(" <> mod <> "." <> sym <> ")" : e)
parseHoogleDB blacklist mmod (_:ts) = parseHoogleDB blacklist mmod ts
parseHoogleDB _ _ [] = ([], [])
submatch :: Text -> Text -> Maybe Text
submatch t pat
| [_, match] <- getAllTextSubmatches (t =~ pat) = Just match
| otherwise = Nothing
| tgdavies/codeworld | codeworld-compiler/exec/GenBase.hs | apache-2.0 | 2,152 | 1 | 16 | 550 | 782 | 407 | 375 | 51 | 2 |
module Seraph.TypesSpec
( tests
) where
-------------------------------------------------------------------------------
import Test.QuickCheck.Property.Monoid
-------------------------------------------------------------------------------
import Seraph.Types
import SpecHelper
-------------------------------------------------------------------------------
tests :: TestTree
tests = testGroup "Seraph.Types" [directivesTests]
-------------------------------------------------------------------------------
directivesTests :: TestTree
directivesTests = testGroup "Directives" [
testProperty "Monoid" $ eq $ prop_Monoid (T :: T Directives)
]
| MichaelXavier/Seraph | test/Seraph/TypesSpec.hs | bsd-2-clause | 689 | 0 | 10 | 92 | 89 | 52 | 37 | 10 | 1 |
{-# OPTIONS_HADDOCK hide #-}
-- |
--
-- Copyright:
-- This file is part of the package byline. It is subject to the
-- license terms in the LICENSE file found in the top-level
-- directory of this distribution and at:
--
-- https://github.com/pjones/byline
--
-- No part of this package, including this file, may be copied,
-- modified, propagated, or distributed except according to the
-- terms contained in the LICENSE file.
--
-- License: BSD-2-Clause
module Byline.Internal.Prim
( PrimF (..),
say,
sayLn,
askLn,
askChar,
askPassword,
pushCompFunc,
popCompFunc,
)
where
import Byline.Internal.Completion (CompletionFunc)
import Byline.Internal.Stylized (Stylized, text)
import Control.Monad.Trans.Free.Church (MonadFree)
import qualified Control.Monad.Trans.Free.Church as Free
-- | Primitive operations as a free monad.
--
-- @since 1.0.0.0
data PrimF f
= Say (Stylized Text) f
| AskLn (Stylized Text) (Maybe Text) (Text -> f)
| AskChar (Stylized Text) (Char -> f)
| AskPassword (Stylized Text) (Maybe Char) (Text -> f)
| PushCompFunc (CompletionFunc IO) f
| PopCompFunc f
deriving (Functor)
-- | Smart constructor.
--
-- @since 1.0.0.0
say :: MonadFree PrimF m => Stylized Text -> m ()
say = Free.liftF . (`Say` ())
-- | Smart constructor.
--
-- @since 1.0.0.0
sayLn :: MonadFree PrimF m => Stylized Text -> m ()
sayLn message = say (message <> text "\n")
-- | Smart constructor.
--
-- @since 1.0.0.0
askLn :: MonadFree PrimF m => Stylized Text -> Maybe Text -> m Text
askLn prompt def = Free.liftF (AskLn prompt def id)
-- | Smart constructor.
--
-- @since 1.0.0.0
askChar :: MonadFree PrimF m => Stylized Text -> m Char
askChar = Free.liftF . (`AskChar` id)
-- | Smart constructor.
--
-- @since 1.0.0.0
askPassword :: MonadFree PrimF m => Stylized Text -> Maybe Char -> m Text
askPassword prompt mask = Free.liftF (AskPassword prompt mask id)
-- | Smart constructor.
--
-- @since 1.0.0.0
pushCompFunc :: MonadFree PrimF m => CompletionFunc IO -> m ()
pushCompFunc = Free.liftF . (`PushCompFunc` ())
-- | Smart constructor.
--
-- @since 1.0.0.0
popCompFunc :: MonadFree PrimF m => m ()
popCompFunc = Free.liftF (PopCompFunc ())
| pjones/byline | src/Byline/Internal/Prim.hs | bsd-2-clause | 2,207 | 0 | 8 | 418 | 572 | 326 | 246 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QPixmapCache.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:16
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QPixmapCache (
QqPixmapCache(..)
,QqPixmapCache_nf(..)
,qPixmapCacheCacheLimit
,qPixmapCacheClear
,qPixmapCacheFind
,qPixmapCacheInsert
,qPixmapCacheRemove
,qPixmapCacheSetCacheLimit
,qPixmapCache_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqPixmapCache x1 where
qPixmapCache :: x1 -> IO (QPixmapCache ())
instance QqPixmapCache (()) where
qPixmapCache ()
= withQPixmapCacheResult $
qtc_QPixmapCache
foreign import ccall "qtc_QPixmapCache" qtc_QPixmapCache :: IO (Ptr (TQPixmapCache ()))
instance QqPixmapCache ((QPixmapCache t1)) where
qPixmapCache (x1)
= withQPixmapCacheResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QPixmapCache1 cobj_x1
foreign import ccall "qtc_QPixmapCache1" qtc_QPixmapCache1 :: Ptr (TQPixmapCache t1) -> IO (Ptr (TQPixmapCache ()))
class QqPixmapCache_nf x1 where
qPixmapCache_nf :: x1 -> IO (QPixmapCache ())
instance QqPixmapCache_nf (()) where
qPixmapCache_nf ()
= withObjectRefResult $
qtc_QPixmapCache
instance QqPixmapCache_nf ((QPixmapCache t1)) where
qPixmapCache_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QPixmapCache1 cobj_x1
qPixmapCacheCacheLimit :: (()) -> IO (Int)
qPixmapCacheCacheLimit ()
= withIntResult $
qtc_QPixmapCache_cacheLimit
foreign import ccall "qtc_QPixmapCache_cacheLimit" qtc_QPixmapCache_cacheLimit :: IO CInt
qPixmapCacheClear :: (()) -> IO ()
qPixmapCacheClear ()
= qtc_QPixmapCache_clear
foreign import ccall "qtc_QPixmapCache_clear" qtc_QPixmapCache_clear :: IO ()
qPixmapCacheFind :: ((String)) -> IO (QPixmap ())
qPixmapCacheFind (x1)
= withObjectRefResult $
withCWString x1 $ \cstr_x1 ->
qtc_QPixmapCache_find cstr_x1
foreign import ccall "qtc_QPixmapCache_find" qtc_QPixmapCache_find :: CWString -> IO (Ptr (TQPixmap ()))
qPixmapCacheInsert :: ((String, QPixmap t2)) -> IO (Bool)
qPixmapCacheInsert (x1, x2)
= withBoolResult $
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QPixmapCache_insert cstr_x1 cobj_x2
foreign import ccall "qtc_QPixmapCache_insert" qtc_QPixmapCache_insert :: CWString -> Ptr (TQPixmap t2) -> IO CBool
qPixmapCacheRemove :: ((String)) -> IO ()
qPixmapCacheRemove (x1)
= withCWString x1 $ \cstr_x1 ->
qtc_QPixmapCache_remove cstr_x1
foreign import ccall "qtc_QPixmapCache_remove" qtc_QPixmapCache_remove :: CWString -> IO ()
qPixmapCacheSetCacheLimit :: ((Int)) -> IO ()
qPixmapCacheSetCacheLimit (x1)
= qtc_QPixmapCache_setCacheLimit (toCInt x1)
foreign import ccall "qtc_QPixmapCache_setCacheLimit" qtc_QPixmapCache_setCacheLimit :: CInt -> IO ()
qPixmapCache_delete :: QPixmapCache a -> IO ()
qPixmapCache_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QPixmapCache_delete cobj_x0
foreign import ccall "qtc_QPixmapCache_delete" qtc_QPixmapCache_delete :: Ptr (TQPixmapCache a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QPixmapCache.hs | bsd-2-clause | 3,505 | 0 | 12 | 528 | 873 | 466 | 407 | 81 | 1 |
import Data.Array
import qualified Data.Array.Unboxed as U
import qualified Data.Foldable as Fld
import Data.List
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Sequence as Seq
import System.Random
import Data.Array.IO
import Control.Monad
import System.IO.Unsafe
shuffle :: [a] -> IO [a]
shuffle xs = do
ar <- newArray n xs
forM [1..n] $ \i -> do
j <- randomRIO (i,n)
vi <- readArray ar i
vj <- readArray ar j
writeArray ar j vi
return vj
where
n = length xs
newArray :: Int -> [a] -> IO (IOArray Int a)
newArray n xs = newListArray (1, n) xs
buildArr la a b f = res
where
res = la (a, b) (map (f (res !)) [a .. b])
arrlst i j xs = map (xs !) [i .. j]
ordr cs acc n = Fld.foldr (\x acc -> ordr cs acc x) (n : acc) (cs ! n)
intersects (x1, x2) (y1, y2) = (x1 <= y1 && y1 <= x2) || (x1 <= y2 && y2 <= x2) || (y1 <= x1 && x1 <= y2)
data KdTree = Leaf | Node Bool Int Int Int Int Int Int Int KdTree KdTree deriving (Show, Eq, Ord)
part flag pivot xs = part' (not flag, 0, Nothing, []) (not flag, 0, Nothing, []) xs
where
f = if flag then snd else fst
pc = f pivot
pf c = f c < pc
add (flag, 0, Nothing, []) e@(x, y) = (flag, 1, Just (x, x, y, y), [e])
add (flag, n, Just (xl, xh, yl, yh), xs) e@(x, y) = (flag, n + 1, Just (min xl x, max xh x, min yl y, max yh y), e : xs)
part' accl accr [] = (accl, accr)
part' accl accr (x : xs) = if pf x then part' (add accl x) accr xs else part' accl (add accr x) xs
cons (_, 0, Nothing, []) = Leaf
cons (flag, n, Just (xl, xh, yl, yh), xs) = Node flag (n - 1) x y xl xh yl yh (cons l) (cons r)
where
-- cannot change pivot easily
pivot@(x, y) = head xs
(l, r) = part flag pivot (tail xs)
cnt Leaf _ _ _ _ = 0
cnt (Node _ n x y xl' xh' yl' yh' l r) xl xh yl yh =
--(unsafePerformIO $ putStrLn $ show (n, xl, xh, yl, yh, xl', xh', yl', yh')) `seq`
if not (intersects (xl, xh) (xl', xh') && intersects (yl, yh) (yl', yh'))
then 0
else
(if xl <= x && x <= xh && yl <= y && y <= yh then 1 else 0) +
(if xl <= xl' && xh' <= xh && yl <= yl' && yh' <= yh
then n
else cnt l xl xh yl yh + cnt r xl xh yl yh)
boundy t xl xh yl yh k =
--(unsafePerformIO $ putStrLn $ show (xl, xh, yl, yh, ym, kym, k)) `seq`
if yl == yh
then yh
else
if kym > k
then boundy t xl xh yl (pred ym) k
else
if kym < k
then boundy t xl xh (succ ym) yh (k - kym)
--then boundy t xl xh (succ ym) yh k
else boundy t xl xh yl ym k
where
ym = (yl + yh) `div` 2
kym = cnt t xl xh yl ym
--kym = cnt t xl xh 0 ym
main = do
pstr <- getLine
let (n : q : _) = map read (words pstr)
es <- mapM (const readEdge) [1 .. pred n]
let mchildren = Map.fromListWith (Seq.><) (map (\(a, b) -> (b, Seq.singleton a)) es)
let children = buildArr listArray 1 n (\_ x -> Fld.toList $ if x `Map.member` mchildren then fromJust $ x `Map.lookup` mchildren else Seq.empty)
let ranks = buildArr U.listArray 1 n (\mem x -> Fld.foldl' (+) 0 ((\x -> succ $ mem x) `map` (children ! x)))
sstr <- getLine
--let sals = (U.listArray (1, n) (map read (words sstr))) :: Array Int Int
let sals = (map read $ words sstr) :: [Int]
let ordering = ordr children [] 1
let toord = (U.array (1, n) $ zip ordering [1 ..]) :: Array Int Int
let fromord = (U.listArray (1, n) ordering) :: Array Int Int
let ints = buildArr U.listArray 1 n (\mem x -> if null $ children ! (fromord ! x) then x else mem $ toord ! (head (children ! (fromord ! x))))
let sorted = map snd (sort $ zip sals [1 .. n])
let tosrt = (U.array (1, n) $ zip sorted [1 ..]) :: Array Int Int
let fromsrt = (U.listArray (1, n) sorted) :: Array Int Int
let ordsrt = (U.listArray (1, n) (((tosrt !) . (fromord !)) `map` [1 .. n])) :: Array Int Int
--let qt = cons (n, Just (0, n, 0, n), (zip (arrlst 1 n toord) (arrlst 1 n tosrt)))
xxs <- shuffle (zip (arrlst 1 n toord) (arrlst 1 n tosrt))
let qt = cons (True, n, Just (0, n, 0, n), xxs)
--putStrLn $ show qt
tst q 0 (toord, fromord, tosrt, fromsrt, ordsrt, ints, qt, n)
tst 0 _ _ = return ()
tst q d dt@(toord, fromord, tosrt, fromsrt, ordsrt, ints, qt, n) = do
qstr <- getLine
let (v : k : _) = map read (words qstr)
let vo = toord ! (v + d)
let lo = ints ! vo
let ro = pred vo
--putStrLn $ show (':', v, k, d, vo, lo, ro)
--putStrLn $ show (cnt qt lo ro 0 15000)
--putStrLn $ show (cnt qt lo ro 15001 26250)
--putStrLn $ show (cnt qt lo ro 0 26250)
--let dord = qs (pred k) (arrlst lo ro ordsrt)
let dord = boundy qt lo ro 0 n k
let d' = fromsrt ! dord
putStrLn $ show d'
tst (pred q) d' dt
readEdge :: IO (Int, Int)
readEdge = do
estr <- getLine
let (a : b : _) = map read (words estr)
return (a, b)
| pbl64k/HackerRank-Contests | 2014-06-20-FP/BoleynSalary/bs.kdt.hs | bsd-2-clause | 5,111 | 0 | 20 | 1,629 | 2,377 | 1,269 | 1,108 | 95 | 5 |
module Linear.Quaternion.Utils where
import Linear.Affine
import Linear.Epsilon
import Linear.Matrix as M
import Linear.Metric
import Linear.Quaternion
import Linear.V3 (V3(..),cross)
import Linear.Vector
import qualified Debug.Trace as D
{- | This is an alternative to the Quaternion format,
following the THREE.js representation of Euler.
These are intrinsic Tait–Bryan rotations.
From THREE.js documentation:
"The order in which to apply rotations. Default is 'XYZ',
which means that the object will first be rotated around its
X axis, then its Y axis and finally its Z axis. Other
possibilities are: 'YZX', 'ZXY', 'XZY', 'YXZ' and 'ZYX'.
These must be in upper case.
Three.js uses intrinsic (Tait-Bryan) ordering, also known as
yaw, pitch and roll. This means that rotations are performed
with respect to the local coordinate system. That is, for
order 'XYZ', the rotation is first around world-X, then
around local-Y (which may now be different from the world
Y-axis), then local-Z (which may be different from the world
Z-axis).
Some implementations may use extrinsic (proper) ordering, in
which case rotations are performed with respect to the world
coordinate system, so that for order 'XYZ', the rotations are
around world-X, world-Y, and world-Z.
Converting between the two types is relatively
straightforward, you just need to reverse the order and the
rotation, so that an intrinsic (three.js) Euler rotation of
angles a, b, c about XYZ will be equivalent to to an
extrinsic Euler rotation of angles c, b, a about ZYX."
-}
data Euler a = Euler
{ order :: Order
, x :: a
, y :: a
, z :: a
}
deriving (Eq, Ord, Show, Read)
data Order = XYZ | XZY | YXZ | YZX | ZXY | ZYX
deriving (Eq, Ord, Show, Read)
{-# RULES "reorder" forall o v . reorder (invert o) (reorder o v) = v #-}
-- | Reorder a V3, assuming X-Y-Z, into the given order.
--
-- RULE: reorder (invert o) (reorder o v) = v
--
reorder :: Order -> V3 a -> V3 a
reorder XYZ (V3 x y z) = V3 x y z
reorder XZY (V3 x y z) = V3 x z y
reorder YXZ (V3 x y z) = V3 y x z
reorder YZX (V3 x y z) = V3 y z x
reorder ZXY (V3 x y z) = V3 z x y
reorder ZYX (V3 x y z) = V3 z y x
{-# RULES "invert" forall o . invert (invert o) = o #-}
-- | This inverts the order. Think of 'Order'
-- as a maping; invert inverts the mapping.
-- When used in conjuction with reorder,
-- the reorder performed
-- will be the inverse, compared to using
-- reorder without invert.
--
-- RULE: invert (invert o) = o
--
invert :: Order -> Order
invert XYZ = XYZ
invert XZY = XZY
invert YXZ = YXZ
invert YZX = ZXY
invert ZXY = YZX
invert ZYX = ZYX
{-
vX = V3 1 0 0
vY = V3 0 1 0
vZ = V3 0 0 1
-}
-- | Convert a 'Quaternion' into a (ordered) 'Euler'.
quaternionToEuler :: (Epsilon a, RealFloat a) => Order -> Quaternion a -> Euler a
quaternionToEuler o q = Euler o rX rY rZ
where
-- adapted from https://github.com/mrdoob/three.js/blob/master/src/math/Euler.js
V3 (V3 m11 m12 m13)
(V3 m21 m22 m23)
(V3 m31 m32 m33) = reorder o $ transpose $ reorder o $ transpose $ fromQuaternion q
V3 a1 a2 a3 = reorder o $ V3 xAxis yAxis zAxis :: V3 (V3 Int)
-- Does order reflect right-hand rule, or does it need inverted.
sgn x = if cross a1 a2 == a3 then x else -x
V3 rX rY rZ = sgn $ reorder (invert o) $ V3 r1 r2 r3
where
gimbal = not $ nearZero $ 1 - abs m13
r2 = asin $ max (-1) $ min m13 $ 1
r1 | gimbal = atan2 (-m23) m33
| otherwise = atan2 m32 m22
r3 | gimbal = atan2 (-m12) m11
| otherwise = 0
{-
function f(w,x,y,z,order) {
var q = new THREE.Quaternion(x,y,z,w);
var e = new THREE.Euler(0,0,0,order);
e.setFromQuaternion(q);
return e;
-}
-- Adapted from http://stackoverflow.com/questions/1171849/finding-quaternion-representing-the-rotation-from-one-vector-to-another
-- | Take two vectors, and figure a 'Quaternion' that rotates between them.
-- The result 'Quaternion' might not be unique.
betweenq :: (RealFloat a, Show a, Epsilon a, Floating a) => V3 a -> V3 a -> Quaternion a
betweenq v1 v2
-- for 180 rotation, rotate around any orthogonal vector
| nearZero (d + 1) = axisAngle (orthogonal n_v1) pi
| otherwise = axisAngle c (acos d)
where
n_v1 = normalize v1
n_v2 = normalize v2
c = cross n_v1 n_v2
d = max (-1) $ min 1 $ dot n_v1 n_v2
ca = acos d
-- for a slightly better solution
orthogonal :: (Ord a, Num a) => V3 a -> V3 a
orthogonal v = cross v other
where
V3 x y z = abs v
other | x < y && x < z = xAxis
| x < y = zAxis
| y < z = yAxis
| otherwise = zAxis
-- map a 'Euler' back into a 'Quaternion'
eulerToQuaternion :: (Epsilon a, RealFloat a) => Euler a -> Quaternion a
eulerToQuaternion (Euler o x y z) = product $ reorder o (V3 _x _y _z)
where
_x = axisAngle xAxis x
_y = axisAngle yAxis y
_z = axisAngle zAxis z
-- Normals along specific axis.
xAxis, yAxis, zAxis :: Num a => V3 a
xAxis = V3 1 0 0
yAxis = V3 0 1 0
zAxis = V3 0 0 1
| andygill/obj-tools | src/Linear/Quaternion/Utils.hs | bsd-3-clause | 5,171 | 0 | 14 | 1,326 | 1,211 | 622 | 589 | 72 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PolyKinds #-}
module Language.Common where
data All (p :: k -> *) :: [k] -> * where
An :: All p '[]
Ac :: p x -> All p xs -> All p (x ': xs)
(.@.) :: p x -> All p xs -> All p (x ': xs)
(.@.) = Ac
infixr 2 .@.
infixr 2 `Ac`
| nazrhom/vcs-clojure | src/Language/Common.hs | bsd-3-clause | 355 | 0 | 10 | 83 | 140 | 81 | 59 | 13 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.VertexBufferObject
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/vertex_buffer_object.txt ARB_vertex_buffer_object> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.VertexBufferObject (
-- * Enums
gl_ARRAY_BUFFER_ARB,
gl_ARRAY_BUFFER_BINDING_ARB,
gl_BUFFER_ACCESS_ARB,
gl_BUFFER_MAPPED_ARB,
gl_BUFFER_MAP_POINTER_ARB,
gl_BUFFER_SIZE_ARB,
gl_BUFFER_USAGE_ARB,
gl_COLOR_ARRAY_BUFFER_BINDING_ARB,
gl_DYNAMIC_COPY_ARB,
gl_DYNAMIC_DRAW_ARB,
gl_DYNAMIC_READ_ARB,
gl_EDGE_FLAG_ARRAY_BUFFER_BINDING_ARB,
gl_ELEMENT_ARRAY_BUFFER_ARB,
gl_ELEMENT_ARRAY_BUFFER_BINDING_ARB,
gl_FOG_COORDINATE_ARRAY_BUFFER_BINDING_ARB,
gl_INDEX_ARRAY_BUFFER_BINDING_ARB,
gl_NORMAL_ARRAY_BUFFER_BINDING_ARB,
gl_READ_ONLY_ARB,
gl_READ_WRITE_ARB,
gl_SECONDARY_COLOR_ARRAY_BUFFER_BINDING_ARB,
gl_STATIC_COPY_ARB,
gl_STATIC_DRAW_ARB,
gl_STATIC_READ_ARB,
gl_STREAM_COPY_ARB,
gl_STREAM_DRAW_ARB,
gl_STREAM_READ_ARB,
gl_TEXTURE_COORD_ARRAY_BUFFER_BINDING_ARB,
gl_VERTEX_ARRAY_BUFFER_BINDING_ARB,
gl_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING_ARB,
gl_WEIGHT_ARRAY_BUFFER_BINDING_ARB,
gl_WRITE_ONLY_ARB,
-- * Functions
glBindBufferARB,
glBufferDataARB,
glBufferSubDataARB,
glDeleteBuffersARB,
glGenBuffersARB,
glGetBufferParameterivARB,
glGetBufferPointervARB,
glGetBufferSubDataARB,
glIsBufferARB,
glMapBufferARB,
glUnmapBufferARB
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/VertexBufferObject.hs | bsd-3-clause | 1,872 | 0 | 4 | 208 | 169 | 120 | 49 | 45 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
-- | This is a legacy module from the pre-GHC HaRe, and will disappear
-- eventually.
module Language.Haskell.Refact.Utils.TypeSyn where
-- Modules from GHC
import qualified GHC as GHC
import qualified Name as GHC
import qualified Outputable as GHC
type HsExpP = GHC.HsExpr GHC.RdrName
type HsPatP = GHC.Pat GHC.RdrName
type HsDeclP = GHC.LHsDecl GHC.RdrName
type HsDeclsP = GHC.HsGroup GHC.Name
type InScopes = [GHC.Name]
type Export = GHC.LIE GHC.RdrName
-- ---------------------------------------------------------------------
-- From old/tools/base/defs/PNT.hs
-- | HsName is a name as it is found in the source
-- This seems to be quite a close correlation
type HsName = GHC.RdrName
-- |The PN is the name as it occurs to the parser, and
-- corresponds with the GHC.RdrName
-- type PN = GHC.RdrName
newtype PName = PN HsName deriving (Eq)
-- | The PNT is the unique name, after GHC renaming. It corresponds to
-- GHC.Name data PNT = PNT GHC.Name deriving (Data,Typeable) -- Note:
-- GHC.Name has SrcLoc in it already
instance Show GHC.NameSpace where
show ns
| ns == GHC.tcName = "TcClsName"
| ns == GHC.dataName = "DataName"
| ns == GHC.varName = "VarName"
| ns == GHC.tvName = "TvName"
| otherwise = "UnknownNamespace"
instance GHC.Outputable GHC.NameSpace where
ppr x = GHC.text $ show x
instance GHC.Outputable (GHC.MatchGroup GHC.Name (GHC.LHsExpr GHC.Name)) where
ppr (GHC.MG ms _ _ _) = GHC.text "MatchGroup" GHC.<+> GHC.ppr ms
instance GHC.Outputable (GHC.Match GHC.Name (GHC.LHsExpr GHC.Name)) where
ppr (GHC.Match _fn pats mtyp grhs) = GHC.text "Match" GHC.<+> GHC.ppr pats
GHC.<+> GHC.ppr mtyp
GHC.<+> GHC.ppr grhs
instance GHC.Outputable (GHC.GRHSs GHC.Name (GHC.LHsExpr GHC.Name)) where
ppr (GHC.GRHSs grhss binds) = GHC.text "GRHSs" GHC.<+> GHC.ppr grhss
GHC.<+> GHC.ppr binds
instance GHC.Outputable (GHC.GRHS GHC.Name (GHC.LHsExpr GHC.Name)) where
ppr (GHC.GRHS guards rhs) = GHC.text "GRHS" GHC.<+> GHC.ppr guards
GHC.<+> GHC.ppr rhs
instance GHC.Outputable (GHC.HsTupArg GHC.Name) where
ppr (GHC.Present e) = GHC.text "Present" GHC.<+> GHC.ppr e
ppr (GHC.Missing _typ) = GHC.text "Missing"
instance GHC.Outputable (GHC.ConDeclField GHC.Name) where
ppr (GHC.ConDeclField name typ doc) = GHC.text "ConDeclField"
GHC.<+> GHC.ppr name
GHC.<+> GHC.ppr typ
GHC.<+> GHC.ppr doc
instance GHC.Outputable (GHC.TyFamEqn GHC.Name (GHC.LHsTyVarBndrs GHC.Name)) where
ppr (GHC.TyFamEqn name pats rhs) = GHC.text "TyFamEqn"
GHC.<+> GHC.ppr name
GHC.<+> GHC.ppr pats
GHC.<+> GHC.ppr rhs
-- ---------------------------------------------------------------------
| kmate/HaRe | src/Language/Haskell/Refact/Utils/TypeSyn.hs | bsd-3-clause | 3,202 | 0 | 10 | 927 | 812 | 414 | 398 | 48 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ViewPatterns #-}
module Laws where
import Test.QuickCheck
import Test.QuickCheck.Function
monadAssocProp :: forall m a b c. (Monad m, Eq (m c)) => m a -> Fun a (m b) -> Fun b (m c) -> Bool
monadAssocProp x (apply -> f) (apply -> g) = ((x >>= f) >>= g) == (x >>= (\x' -> f x' >>= g))
monadLeftIdProp :: forall m a b. (Monad m, Eq (m b)) => a -> Fun a (m b) -> Bool
monadLeftIdProp x (apply -> f) = (return x >>= f) == (f x)
monadRightIdProp :: forall m a. (Monad m, Eq (m a)) => m a -> Bool
monadRightIdProp x = (x >>= return) == x
| isovector/dependent-types | src/Laws.hs | bsd-3-clause | 579 | 0 | 12 | 128 | 301 | 163 | 138 | 11 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
-- GHC >=7.10 deprecated OverlappingInstances in favour of instance by instance
-- annotation using OVERLAPPABLE and OVERLAPPING pragmas.
#ifdef DEPRECATED_LANGUAGE_OVERLAPPING_INSTANCES
#define PRAGMA_OVERLAPPABLE {-# OVERLAPPABLE #-}
#else
{-# LANGUAGE OverlappingInstances #-}
#define PRAGMA_OVERLAPPABLE
#endif
-- |
-- Module: Data.OpenUnion.Internal
-- Description: Open unions (type-indexed co-products) for extensible effects.
--
-- Copyright: (c) 2016 Allele Dev; 2017 Ixperta Solutions s.r.o.
-- License: BSD3
-- Maintainer: ixcom-core@ixperta.com
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- These are internal definitions and should be used with caution. There are no
-- guarantees that the API of this module will be preserved between minor
-- versions of this package.
--
-- Open unions (type-indexed co-products, i.e. type-indexed sums) for
-- extensible effects All operations are constant-time.
--
-- Based on
-- <http://okmij.org/ftp/Haskell/extensible/OpenUnion51.hs OpenUnion51.hs>.
--
-- Type-list @r :: [* -> *]@ of open union components is a small Universe.
-- Therefore, we can use a @Typeable@-like evidence in that universe. In our
-- case a simple index of an element in the type-list is sufficient
-- substitution for @Typeable@.
module Data.OpenUnion.Internal (module Data.OpenUnion.Internal)
where
import Prelude ((+), (-))
import Data.Bool (otherwise)
import Data.Either (Either(Left, Right))
import Data.Eq ((==))
import Data.Function (($))
import Data.Maybe (Maybe(Just, Nothing))
import Data.Word (Word)
import Unsafe.Coerce (unsafeCoerce)
-- | Open union is a strong sum (existential with an evidence).
data Union (r :: [ * -> * ]) a where
Union :: {-# UNPACK #-} !Word -> t a -> Union r a
-- | Takes a request of type @t :: * -> *@, and injects it into the 'Union'.
--
-- Summand is assigning a specified 'Word' value, which is a position in the
-- type-list @(t ': r) :: * -> *@.
--
-- __This function is unsafe.__
--
-- /O(1)/
unsafeInj :: Word -> t a -> Union r a
unsafeInj = Union
{-# INLINE unsafeInj #-}
-- | Project a value of type @'Union' (t ': r) :: * -> *@ into a possible
-- summand of the type @t :: * -> *@. 'Nothing' means that @t :: * -> *@ is not
-- the value stored in the @'Union' (t ': r) :: * -> *@.
--
-- It is assumed that summand is stored in the 'Union' when the 'Word' value is
-- the same value as is stored in the 'Union'.
--
-- __This function is unsafe.__
--
-- /O(1)/
unsafePrj :: Word -> Union r a -> Maybe (t a)
unsafePrj n (Union n' x)
| n == n' = Just (unsafeCoerce x)
| otherwise = Nothing
{-# INLINE unsafePrj #-}
-- | Represents position of element @t :: * -> *@ in a type list
-- @r :: [* -> *]@.
newtype P t r = P {unP :: Word}
-- | Find an index of an element @t :: * -> *@ in a type list @r :: [* -> *]@.
-- The element must exist.
--
-- This is essentially a compile-time computation without run-time overhead.
class FindElem (t :: * -> *) (r :: [* -> *]) where
-- | Position of the element @t :: * -> *@ in a type list @r :: [* -> *]@.
--
-- Position is computed during compilation, i.e. there is no run-time
-- overhead.
--
-- /O(1)/
elemNo :: P t r
-- | Base case; element is at the current position in the list.
instance FindElem t (t ': r) where
elemNo = P 0
-- | Recursion; element is not at the current position, but is somewhere in the
-- list.
instance PRAGMA_OVERLAPPABLE FindElem t r => FindElem t (t' ': r) where
elemNo = P $ 1 + unP (elemNo :: P t r)
-- | This type class is used for two following purposes:
--
-- * As a @Constraint@ it guarantees that @t :: * -> *@ is a member of a
-- type-list @r :: [* -> *]@.
--
-- * Provides a way how to inject\/project @t :: * -> *@ into\/from a 'Union',
-- respectively.
--
-- Following law has to hold:
--
-- @
-- 'prj' . 'inj' === 'Just'
-- @
class FindElem t r => Member (t :: * -> *) r where
-- | Takes a request of type @t :: * -> *@, and injects it into the
-- 'Union'.
--
-- /O(1)/
inj :: t a -> Union r a
-- | Project a value of type @'Union' (t ': r) :: * -> *@ into a possible
-- summand of the type @t :: * -> *@. 'Nothing' means that @t :: * -> *@ is
-- not the value stored in the @'Union' (t ': r) :: * -> *@.
--
-- /O(1)/
prj :: Union r a -> Maybe (t a)
instance FindElem t r => Member t r where
inj = unsafeInj $ unP (elemNo :: P t r)
{-# INLINE inj #-}
prj = unsafePrj $ unP (elemNo :: P t r)
{-# INLINE prj #-}
-- | Orthogonal decomposition of a @'Union' (t ': r) :: * -> *@. 'Right' value
-- is returned if the @'Union' (t ': r) :: * -> *@ contains @t :: * -> *@, and
-- 'Left' when it doesn't. Notice that 'Left' value contains
-- @Union r :: * -> *@, i.e. it can not contain @t :: * -> *@.
--
-- /O(1)/
decomp :: Union (t ': r) a -> Either (Union r a) (t a)
decomp (Union 0 a) = Right $ unsafeCoerce a
decomp (Union n a) = Left $ Union (n - 1) a
{-# INLINE [2] decomp #-}
-- | Specialized version of 'decomp' for efficiency.
--
-- /O(1)/
--
-- TODO: Check that it actually adds on efficiency.
decomp0 :: Union '[t] a -> Either (Union '[] a) (t a)
decomp0 (Union _ a) = Right $ unsafeCoerce a
{-# INLINE decomp0 #-}
{-# RULES "decomp/singleton" decomp = decomp0 #-}
-- | Specialised version of 'prj'\/'decomp' that works on an
-- @'Union' '[t] :: * -> *@ which contains only one specific summand. Hence the
-- absence of 'Maybe', and 'Either'.
--
-- /O(1)/
extract :: Union '[t] a -> t a
extract (Union _ a) = unsafeCoerce a
{-# INLINE extract #-}
-- | Inject whole @'Union' r@ into a weaker @'Union' (any ': r)@ that has one
-- more summand.
--
-- /O(1)/
weaken :: Union r a -> Union (any ': r) a
weaken (Union n a) = Union (n + 1) a
{-# INLINE weaken #-}
| IxpertaSolutions/freer-effects | src/Data/OpenUnion/Internal.hs | bsd-3-clause | 6,180 | 0 | 10 | 1,300 | 922 | 549 | 373 | 61 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Write.Type.FuncPointer
( writeFuncPointerType
) where
import Spec.Type
import Text.InterpolatedString.Perl6
import Text.PrettyPrint.Leijen.Text hiding ((<$>))
import Write.TypeConverter
import Write.WriteMonad
writeFuncPointerType :: FuncPointerType -> Write Doc
writeFuncPointerType fpt = do
hsType <- cTypeToHsTypeString (fptCType fpt)
pure [qc|type {fptName fpt} = {hsType}
|]
| oldmanmike/vulkan | generate/src/Write/Type/FuncPointer.hs | bsd-3-clause | 428 | 0 | 10 | 56 | 95 | 56 | 39 | 12 | 1 |
--
-- HTTP client for use with io-streams
--
-- Copyright © 2012-2014 Operational Dynamics Consulting, Pty Ltd
--
-- The code in this file, and the program it is a part of, is made
-- available to you by its authors as open source software: you can
-- redistribute it and/or modify it under a BSD licence.
--
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -fno-warn-unused-imports #-}
module Snippet where
import Blaze.ByteString.Builder (Builder)
import qualified Blaze.ByteString.Builder as Builder
import Control.Exception (bracket)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import System.IO.Streams (InputStream, OutputStream, stdout)
import qualified System.IO.Streams as Streams
-- Obviously don't need all those imports, but useful for experimenting
import Network.Http.Client
main :: IO ()
main = do
c <- openConnection "kernel.operationaldynamics.com" 58080
let q = buildRequest1 $ do
http GET "/time"
setAccept "text/plain"
sendRequest c q emptyBody
receiveResponse c debugHandler
closeConnection c
| laurencer/confluence-sync | vendor/http-streams/tests/BasicSnippet.hs | bsd-3-clause | 1,110 | 0 | 13 | 206 | 175 | 103 | 72 | 20 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Reflex.Animation
( Animation (..)
, stretched
, delayed
, Clip (..)
, sampleClip
, toMaybe
, stretchTo
, apply
, crop
, clamped
, repeat
, replicate
, cropEnd
, cropStart
, reCrop
, linear
, linearIn
, linearOut
, piecewise
, keyframes
, keyframesWith
, half
, sine
, cosine
, clamp
, fmod
)
where
import Control.Applicative
import Data.Profunctor
import Data.Semigroup
import Data.VectorSpace
import Data.List.NonEmpty (NonEmpty(..))
import Data.Functor
import Data.Maybe
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Prelude hiding (repeat, replicate)
-- | Infinite animations time -> a. Supports operations:
-- * Mapping over either time or the value using the Functor/Profunctor(lmap, rmap)
-- * Combined in parallel with other infinite animations using Applicative/Monad
-- * Turned into a finite animation by 'crop'
newtype Animation time a = Animation { sampleAt :: time -> a }
deriving (Functor, Applicative, Monad, Profunctor)
stretched :: (Num time) => time -> Animation time a -> Animation time a
stretched factor = lmap (* factor)
delayed :: (Num time) => time -> Animation time a -> Animation time a
delayed t = lmap (subtract t)
-- | Finite animations, Animation with a period. Supports operations:
-- * Combined end-to end using Semigroup instance, e.g. 'sconcat'
-- * Combined with Infinite animations with 'apply'
-- * Turned into Inifinite animations by either:
-- > Clamping time - 'clamped'
-- > Using Maybe - 'toMaybe'
-- > Repeating - 'repeat'
-- * Futher cropped in various ways
data Clip time a = Clip { clipAnim :: Animation time a, period :: time }
instance Functor (Clip time) where
fmap f (Clip anim p) = Clip (f <$> anim) p
instance (Num time, Ord time) => Semigroup (Clip time a) where
c <> c' = piecewise [c, c']
sconcat (c :| cs) = piecewise (c : cs)
-- | Constructor for clips to simplify creation
clip :: (time -> a) -> time -> Clip time a
clip anim = Clip (Animation anim)
apply :: Clip time (a -> b) -> Animation time a -> Clip time b
apply (Clip anim p) a = Clip (anim <*> a) p
-- | Take a section of an infinite animation as a Clip
crop :: (Ord time, Num time) => (time, time) -> Animation time a -> Clip time a
crop (s, e) a = Clip (lmap (+s) a) (s - e)
-- | Sample from a clip, returning Nothing outside the domain
sampleClip :: (Ord time, Num time) => Clip time a -> time -> Maybe a
sampleClip c t | t >= 0 && t <= period c = Just $ sampleAt (clipAnim c) t
| otherwise = Nothing
-- | Turn a clip into an infinite Animation by using Maybe
toMaybe :: (Ord time, Num time) => Clip time a -> Animation time (Maybe a)
toMaybe c = Animation (sampleClip c)
-- | Make an infinite animation by clamping time to lie within the period
clamped :: (Ord time, Num time) => Clip time a -> Animation time a
clamped (Clip anim p) = lmap (clamp (0, p)) anim
-- | Make an infinite animation by repeating the clip
repeat :: (RealFrac time) => Clip time a -> Animation time a
repeat (Clip anim p) = lmap (`fmod` p) anim
-- | Repeat a clip a fixed number of times to make a new one
replicate :: (RealFrac time) => Int -> Clip time a -> Clip time a
replicate n (Clip anim p) = Clip (lmap time anim) (fromIntegral n * p) where
time t | t < 0 = 0.0
| t >= fromIntegral n * p = p
| otherwise = t `fmod` p
-- | Stretch a clip to a specific size by scaling time
stretchTo :: (RealFrac time) => time -> Clip time a -> Clip time a
stretchTo p c = Clip (lmap (* factor) (clipAnim c)) p
where factor = period c / p
-- | Shorten a clip to a certain period by cropping the end
cropEnd :: (Ord time, Num time) => time -> Clip time a -> Clip time a
cropEnd p' (Clip anim p) = Clip anim (clamp (0, p) p')
-- | Shorten a clip by cropping the start
cropStart :: (Ord time, Num time) => time -> Clip time a -> Clip time a
cropStart s (Clip anim p) = Clip (lmap (+ s') anim) (p - s')
where s' = clamp (0, p) s
-- | Crop the clip to a range
reCrop :: (Ord time, Num time) => (time, time) -> Clip time a -> Clip time a
reCrop (s, e) = cropStart s . cropEnd e
-- | Crop the clip to half the period
half :: (RealFrac time) => Clip time a -> Clip time a
half c = cropStart (0.5 * period c) c
type Interpolater time a = time -> (a, a) -> Clip time a
linear :: (VectorSpace v, RealFrac (Scalar v)) => Interpolater (Scalar v) v
linear p (s, e) = clip (\t -> lerp s e (t / p)) p
intervalsWith :: (RealFrac time) => Interpolater time a -> a -> [(time, a)] -> [Clip time a]
intervalsWith _ start [] = error "intervalsWith: empty list"
intervalsWith interp start frames = zipWith toInterval ((0, start) : frames) frames
where toInterval (_, k) (p, k') = interp p (k, k')
-- | Keyframes using an interpolator between intervals (e.g. 'linear')
keyframesWith :: (RealFrac time) => Interpolater time a -> a -> [(time, a)] -> Clip time a
keyframesWith interp start frames = piecewise $ intervalsWith interp start frames
-- | Keyframer using linear interpolation
-- Specified as pairs of (value, interval)
-- First key is provided separately and always starts at time = 0
keyframes :: (VectorSpace v, RealFrac (Scalar v)) => v -> [(Scalar v, v)] -> Clip (Scalar v) v
keyframes = keyframesWith linear
sampleInterval :: (Ord time, Num time) => Animation time a -> Map time (Animation time a) -> time -> a
sampleInterval start m t = sampleAt anim0 (t - t0) where
(t0, anim0) = fromMaybe (0, start) (Map.lookupLT t m)
-- | Piecewise animation using several clips concatenated end to end,
-- one playing after the other, equivalent to 'sconcat'.
piecewise :: (Ord time, Num time) => [Clip time a] -> Clip time a
piecewise [] = error "piecewise: empty list"
piecewise [a] = a
piecewise clips = clip (sampleInterval start m) (last times) where
m = Map.fromList (zip times (clipAnim <$> clips))
times = scanl (+) 0 (period <$> clips)
start = clipAnim $ head clips
-- | Predefined clips based on special functions for building up animations
linearIn :: (RealFrac time) => time -> Clip time time
linearIn p | p <= 0.0 = error "linearIn: time must be >= 0"
| otherwise = clip (/ p) p
linearOut :: (RealFrac time) => time -> Clip time time
linearOut p | p <= 0 = error "linearOut: time must be >= 0"
| otherwise = clip (\t -> 1.0 - t / p) p
sine :: (RealFrac time, Floating time) => time -> Clip time time
sine p = stretchTo p (clip sin pi)
cosine :: (RealFrac time, Floating time) => time -> Clip time time
cosine p = stretchTo p (clip cos pi)
-- | Utility functions
fmod :: RealFrac a => a -> a -> a
fmod x d | x > 0 || frac == 0 = frac * d
| otherwise = (frac + 1) * d
where (_::Int, frac) = properFraction (x / d)
clamp :: Ord a => (a, a) -> a -> a
clamp (lower, upper) a = max lower (min upper a)
| Saulzar/reflex-animation | src/Reflex/Animation.hs | bsd-3-clause | 7,113 | 0 | 12 | 1,758 | 2,466 | 1,303 | 1,163 | -1 | -1 |
{-|
Module: Codec.Parser.Common
Copyright: Jeremy List
License: BSD-3
Maintainer: quick.dudley@gmail.com
Common functions which do not need to be in 'Phaser.Core', mostly for using
'Phase's and 'Automaton's as parsers.
-}
{-# LANGUAGE MultiParamTypeClasses,FlexibleContexts,CPP #-}
module Codec.Phaser.Common (
Position(..),
PhaserType(..),
Standardized(..),
Trie,
newTrie,
fromTrie,
satisfy,
match,
char,
iChar,
string,
iString,
(<#>),
integerDecimal,
positiveIntegerDecimal,
decimal,
scientificNotation,
directHex,
hex,
positiveInteger,
integer,
countChar,
countLine,
trackPosition,
normalizeNewlines,
parse,
sepBy,
sepBy1,
munch,
munch1,
parseFile,
parseHandle,
latin1
) where
import Data.Bits
import Data.Char
import Data.Int
import Data.Word
import Data.Ratio
#if MIN_VERSION_base(4,9,0)
import Data.Semigroup
#endif
import Control.Monad
import Control.Applicative
import qualified Data.Map as M
import System.IO (Handle)
import Codec.Phaser.Core
import qualified Codec.Phaser.ByteString as BP
-- | Class for types which have standardized or otherwise unambiguous
-- representations. Implementations of 'regular' may be more permissive than
-- the corresponding 'Read' instance (if any).
class Standardized r a where
regular :: Monoid p => Phase p r o a
-- | A data type for describing a position in a text file. Constructor arguments
-- are row number and column number.
data Position = Position
{-# UNPACK #-}!Int
{-# UNPACK #-}!Int
deriving (Eq,Ord)
instance Show Position where
showsPrec p (Position r c) = b m where
b a = if p > 0
then ('(' :) . a . (')' :)
else a
m = ("Row " ++) . showsPrec 0 r . (", Column " ++) . showsPrec 0 c
instance Read Position where
readsPrec p = toReadS (toAutomaton (go p)) where
parenthes a = surround a
(many (satisfy isSpace) >> char '(')
(char ')' >> many (satisfy isSpace))
go :: Int -> Phase () Char o Position
go 0 = inner <|> parenthes (go 0)
go _ = parenthes (go 0)
inner = do
many (satisfy isSpace)
iString "row"
some (satisfy isSpace)
r <- integer
many (satisfy isSpace)
char ','
many (satisfy isSpace)
iString "column"
some (satisfy isSpace)
c <- integer
return (Position r c)
#if MIN_VERSION_base(4,9,0)
instance Semigroup Position where
(<>) = mappend
#endif
instance Monoid Position where
mempty = Position 0 0
mappend (Position r1 c1) (Position r2 c2)
| r2 == 0 = Position r1 (c1 + c2)
| otherwise = Position (r1 + r2) c2
-- | Tries in this module can be used for creating more efficient parsers
-- when several of the recognized strings begin with the same few characters
data Trie c a = Trie [a] (M.Map c (Trie c a))
instance Ord c => Monoid (Trie c a) where
mempty = Trie [] M.empty
mappend ~(Trie l1 m1) ~(Trie l2 m2) =
Trie (l1 ++ l2) (M.unionWith mappend m1 m2)
#if MIN_VERSION_base(4,9,0)
instance Ord c => Semigroup (Trie c a) where
(<>) = mappend
#endif
-- | Consume one input, return it if it matches the predicate, otherwise fail.
satisfy :: (Monoid p) => (i -> Bool) -> Phase p i o i
satisfy p = get >>= \c -> if p c then return c else empty
-- | Consume one input, if it's equal to the parameter then return it, otherwise
-- fail.
match :: (Eq i, Monoid p) => i -> Phase p i o i
match t = satisfy (== t)
-- | 'match' specialized to 'Char'
char :: (Monoid p) => Char -> Phase p Char o Char
char = match
-- | Case insensitive version of 'char'
iChar :: (Monoid p) => Char -> Phase p Char o Char
iChar t = satisfy (\i -> toLower t == toLower i)
-- | Match a list of input values
string :: (Eq i, Monoid p) => [i] -> Phase p i o [i]
string t = go t where
go [] = return t
go (a:r) = get >>= \c -> if c == a then go r else empty
-- | Match a string (case insensitive)
iString :: (Monoid p) => String -> Phase p Char o String
iString = mapM iChar
infixl 5 <#>
(<#>) :: (PhaserType d, PhaserType s, Monoid p) =>
s p b c (a -> z) -> d p c t a -> Automaton p b t z
(<#>) = chainWith ($)
-- | Parse a standard positive base 10 integer
positiveIntegerDecimal :: (Num a, Monoid p) => Phase p Char o a
positiveIntegerDecimal = go 0 where
go acc = do
d <- fmap (fromIntegral . digitToInt) $ satisfy isDigit
let acc' = acc * 10 + d
acc' `seq` go acc' <|> return acc'
-- | Parse a standard base 10 integer
integerDecimal :: (Num a, Monoid p) => Phase p Char o a
integerDecimal = (pure id <|> (char '-' *> munch isSpace *> pure negate)) <*>
positiveIntegerDecimal
-- | Take some hexadecimal digits and parse a number from hexadecimal
directHex :: (Num a, Monoid p) => Phase p Char o a
directHex = go 0 where
go acc = do
d <- fmap (fromIntegral . digitToInt) $ satisfy isHexDigit
let acc' = acc * 16 + d
acc' `seq` go acc' <|> return acc'
-- | Parse a hexadecimal number prefixed with "Ox"
hex :: (Num a, Monoid p) => Phase p Char o a
hex = string "0x" >> directHex
-- | Parse a positive integer from either decimal or hexadecimal format
positiveInteger :: (Num a, Monoid p) => Phase p Char o a
positiveInteger = positiveIntegerDecimal <|> hex
-- | Parse a number either from decimal digits or from hexadecimal prefixed with
-- "0x"
integer :: (Num a, Monoid p) => Phase p Char o a
integer = integerDecimal <|> hex
-- | Parse a number from decimal digits, "-", and "."
decimal :: (Fractional a, Monoid p) => Phase p Char o a
decimal = (pure id <|> (negate <$ char '-' <* munch isSpace)) <*>
positiveDecimal
-- | Parse a positive number from decimal digits and "."
positiveDecimal :: (Fractional a, Monoid p) => Phase p Char o a
positiveDecimal = fromRational <$> do
w <- positiveIntegerDecimal
(match '.' >> go True 0.1 w) <|> return w
where
go i s acc = do
let
p = if i
then ("At least one digit required after decimal point" <?>)
else id
d <- p $ fmap (fromIntegral . digitToInt) $ satisfy isDigit
let acc' = acc + d * s
acc' `seq` go False (s / 10) acc' <|> return acc'
-- | Parse a number from standard decimal format or from scientific notation.
scientificNotation :: (Fractional a, Monoid p) => Phase p Char o a
scientificNotation = fmap fromRational $ flip id <$> decimal <*> (pure id <|> (
(\o p n -> o n (10 ^ p)) <$> (iChar 'e' *>
(pure (*) <|> ((*) <$ char '+') <|> ((/) <$ char '-'))) <*>
positiveIntegerDecimal
))
-- | Move the position counter one character to the right
countChar :: Phase Position i o ()
{-# INLINE countChar #-}
countChar = count (Position 0 1)
-- | Move the position counter to the next line
countLine :: Phase Position i o ()
{-# INLINE countLine #-}
countLine = count (Position 1 1)
-- | Count the lines and characters from the input before yielding them again.
-- If the phase pipeline does not include this or similar: parsing errors will
-- not report the correct position. Unix, Windows, Mac-OS Classic, and Acorn
-- newline formats are all recognized.
trackPosition :: Phase Position Char Char ()
{-# INLINABLE[1] trackPosition #-}
trackPosition = go where
go = flip (<|>) (return ()) $ get >>= \c -> yield c >> case c of
'\n' -> countLine >> goN
'\r' -> countLine >> goR
_ -> countChar >> go
goN = flip (<|>) (return ()) $ get >>= \c -> yield c >> case c of
'\n' -> countLine >> goN
'\r' -> go
_ -> countChar >> go
goR = flip (<|>) (return ()) $ get >>= \c -> yield c >> case c of
'\n' -> go
'\r' -> countLine >> goR
_ -> countChar >> go
-- | Converts all line separators into Unix format.
normalizeNewlines :: Monoid p => Phase p Char Char ()
normalizeNewlines = go where
go = flip (<|>) (return ()) $ get >>= \c -> case c of
'\n' -> yield '\n' >> goN
'\r' -> yield '\n' >> goR
_ -> yield c >> go
goN = flip (<|>) (return ()) $ get >>= \c -> case c of
'\n' -> yield '\n' >> goN
'\r' -> go
_ -> yield c >> go
goR = flip (<|>) (return ()) $ get >>= \c -> case c of
'\n' -> go
'\r' -> yield '\n' >> goR
_ -> yield c >> go
-- | Use a 'Phase' as a parser. Note that unlike other parsers the reported
-- position in the input when the parser fails is the position reached when
-- all parsing options are exhausted, not the beginning of the failing token.
-- Since the characters may be counted nondeterministically: if multiple errors
-- are returned the reported error position may be different for each error
-- report.
parse :: (PhaserType s) => s Position i o a -> [i] -> Either [(Position,[String])] [a]
parse = parse_ (Position 1 1)
-- | sepBy p sep parses zero or more occurrences of p, separated by sep. Returns
-- a list of values returned by p.
sepBy :: Monoid p => Phase p i o a -> Phase p i o s -> Phase p i o [a]
sepBy p sep = sepBy1 p sep <|> return []
-- | sepBy1 p sep parses one or more occurrences of p, separated by sep. Returns
-- a list of values returned by p.
sepBy1 :: Monoid p => Phase p i o a -> Phase p i o s -> Phase p i o [a]
sepBy1 p sep = ((:) <$> p <*> many (sep >> p))
surround :: Phase p i o a -> Phase p i o b -> Phase p i o e -> Phase p i o a
surround m o c = (\_ r _ -> r) <$> o <*> m <*> c
-- | Parses the first zero or more values satisfying the predicate. Always
-- succeds, exactly once, having consumed all the characters Hence NOT the same
-- as (many (satisfy p))
munch :: Monoid p => (i -> Bool) -> Phase p i o [i]
munch p = go id where
go acc = flip (<|>) (eof >> return (acc [])) $ do
c <- get
if p c
then go (acc . (c :))
else put1 c >> return (acc [])
-- | Parses the first one or more values satisfying the predicate. Succeeds if
-- at least one value matches, having consumed all the characters Hence NOT the
-- same as (some (satisfy p))
munch1 :: Monoid p => (i -> Bool) -> Phase p i o [i]
munch1 p = go1 where
go1 = do
c <- get
if p c
then go (c :) <|> (eof >> return [c])
else empty
go acc = do
c <- get
if p c
then go (acc . (c :)) <|> (eof >> return (acc [c]))
else put1 c >> return (acc [])
-- | Run a parser on input from a file. Input is provided as bytes, if
-- characters are needed: a decoding phase such as
-- 'Codec.Phaser.UTF8.utf8_stream' or 'latin1' may be used.
parseFile :: (PhaserType s) => s Position Word8 o a -> FilePath ->
IO (Either [(Position,[String])] [a])
parseFile = BP.parseFile_ (Position 1 1)
-- | Run a parser on input from a handle. Input is provided as bytes, if
-- characters are needed: a decoding phase such as
-- 'Codec.Phaser.UTF8.utf8_stream' may be used.
parseHandle :: (PhaserType s) => s Position Word8 o a -> Handle ->
IO (Either [(Position,[String])] [a])
parseHandle = BP.parseHandle_ (Position 1 1)
-- | Decode bytes to characters using the Latin1 (ISO8859-1) encoding
latin1 :: Monoid p => Phase p Word8 Char ()
latin1 = go where
go = flip (<|>) (return ()) $
fmap (toEnum . fromIntegral) get >>= yield >> go
-- | Decode bytes to characters using the ASCII encoding, aborting if
-- any byte is outside the ASCII range.
ascii :: Monoid p => Phase p Word8 Char ()
ascii = go where
go = flip (<|>) (return ()) $ get >>= \c -> if c .&. 0x80 == 0
then yield (toEnum $ fromIntegral c) >> go
else fail "Byte out of ASCII range"
instance Standardized Char Int where
regular = integer
instance Standardized Char Integer where
regular = integer
instance Standardized Char Word where
regular = positiveInteger
instance Standardized Char Word8 where
regular = positiveInteger
instance Standardized Char Word16 where
regular = positiveInteger
instance Standardized Char Word32 where
regular = positiveInteger
instance Standardized Char Word64 where
regular = positiveInteger
instance Standardized Char Int8 where
regular = integer
instance Standardized Char Int16 where
regular = integer
instance Standardized Char Int32 where
regular = integer
instance Standardized Char Int64 where
regular = integer
instance Standardized Char Float where
regular = scientificNotation
instance Standardized Char Double where
regular = scientificNotation
instance (Integral a,Standardized Char a) => Standardized Char (Ratio a) where
regular = scientificNotation <|> ((%) <$> regular <*> (
munch isSpace *> char '%' *> munch isSpace *> regular
))
instance Standardized Char Bool where
regular = (False <$ (void (char '0') <|> void (iString "false"))) <|>
(True <$ (void (char '1') <|> void (iString "true")))
-- | Create a trie which maps a single string to an object. Analogous to
-- 'M.singleton'.
newTrie :: Ord c => [c] -> a -> Trie c a
newTrie l0 a = go l0 where
go [] = Trie [a] M.empty
go (c:r) = Trie [] $ M.singleton c $ go r
-- | Create a trie from a list of strings and corresponding objects. Analogous
-- to 'M.fromList'
listToTrie :: Ord c => [([c],a)] -> Trie c a
listToTrie = mconcat . map (uncurry newTrie)
-- | Create a 'Phase' or 'Automaton' from a 'Trie'
fromTrie :: (Monoid p, PhaserType s, Ord c) => Trie c a -> s p c o a
fromTrie = fromPhase . go where
go ~(Trie l m) = let
n = get >>= \c -> case M.lookup c m of
Nothing -> empty
Just r -> go r
in foldr (<|>) n (map pure l)
| quickdudley/phaser | Codec/Phaser/Common.hs | bsd-3-clause | 13,177 | 0 | 18 | 3,015 | 4,303 | 2,248 | 2,055 | 268 | 7 |
-----------------------------------------------------------------------------
-- |
-- Module : Plugins.Monitors.Common
-- Copyright : (c) 2010, 2011 Jose Antonio Ortega Ruiz
-- (c) 2007-2010 Andrea Rossato
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Jose A. Ortega Ruiz <jao@gnu.org>
-- Stability : unstable
-- Portability : unportable
--
-- Utilities used by xmobar's monitors
--
-----------------------------------------------------------------------------
module Plugins.Monitors.Common (
-- * Monitors
-- $monitor
Monitor
, MConfig (..)
, Opts (..)
, setConfigValue
, getConfigValue
, mkMConfig
, runM
, runMB
, io
-- * Parsers
-- $parsers
, runP
, skipRestOfLine
, getNumbers
, getNumbersAsString
, getAllBut
, getAfterString
, skipTillString
, parseTemplate
-- ** String Manipulation
-- $strings
, padString
, showWithPadding
, showWithColors
, showWithColors'
, showPercentWithColors
, showPercentsWithColors
, showPercentBar
, showLogBar
, showWithUnits
, takeDigits
, showDigits
, floatToPercent
, parseFloat
, parseInt
, stringParser
) where
import Control.Monad.Reader
import qualified Data.ByteString.Lazy.Char8 as B
import Data.IORef
import qualified Data.Map as Map
import Data.List
import Numeric
import Text.ParserCombinators.Parsec
import System.Console.GetOpt
import Control.Exception (SomeException,handle)
import Plugins
-- $monitor
type Monitor a = ReaderT MConfig IO a
data MConfig =
MC { normalColor :: IORef (Maybe String)
, low :: IORef Int
, lowColor :: IORef (Maybe String)
, high :: IORef Int
, highColor :: IORef (Maybe String)
, template :: IORef String
, export :: IORef [String]
, ppad :: IORef Int
, decDigits :: IORef Int
, minWidth :: IORef Int
, maxWidth :: IORef Int
, padChars :: IORef String
, padRight :: IORef Bool
, barBack :: IORef String
, barFore :: IORef String
, barWidth :: IORef Int
, useSuffix :: IORef Bool
}
-- | from 'http:\/\/www.haskell.org\/hawiki\/MonadState'
type Selector a = MConfig -> IORef a
sel :: Selector a -> Monitor a
sel s =
do hs <- ask
liftIO $ readIORef (s hs)
mods :: Selector a -> (a -> a) -> Monitor ()
mods s m =
do v <- ask
io $ modifyIORef (s v) m
setConfigValue :: a -> Selector a -> Monitor ()
setConfigValue v s =
mods s (\_ -> v)
getConfigValue :: Selector a -> Monitor a
getConfigValue = sel
mkMConfig :: String
-> [String]
-> IO MConfig
mkMConfig tmpl exprts =
do lc <- newIORef Nothing
l <- newIORef 33
nc <- newIORef Nothing
h <- newIORef 66
hc <- newIORef Nothing
t <- newIORef tmpl
e <- newIORef exprts
p <- newIORef 0
d <- newIORef 0
mn <- newIORef 0
mx <- newIORef 0
pc <- newIORef " "
pr <- newIORef False
bb <- newIORef ":"
bf <- newIORef "#"
bw <- newIORef 10
up <- newIORef False
return $ MC nc l lc h hc t e p d mn mx pc pr bb bf bw up
data Opts = HighColor String
| NormalColor String
| LowColor String
| Low String
| High String
| Template String
| PercentPad String
| DecDigits String
| MinWidth String
| MaxWidth String
| Width String
| PadChars String
| PadAlign String
| BarBack String
| BarFore String
| BarWidth String
| UseSuffix String
options :: [OptDescr Opts]
options =
[
Option "H" ["High"] (ReqArg High "number") "The high threshold"
, Option "L" ["Low"] (ReqArg Low "number") "The low threshold"
, Option "h" ["high"] (ReqArg HighColor "color number") "Color for the high threshold: ex \"#FF0000\""
, Option "n" ["normal"] (ReqArg NormalColor "color number") "Color for the normal threshold: ex \"#00FF00\""
, Option "l" ["low"] (ReqArg LowColor "color number") "Color for the low threshold: ex \"#0000FF\""
, Option "t" ["template"] (ReqArg Template "output template") "Output template."
, Option "S" ["suffix"] (ReqArg UseSuffix "True/False") "Use % to display percents or other suffixes."
, Option "d" ["ddigits"] (ReqArg DecDigits "decimal digits") "Number of decimal digits to display."
, Option "p" ["ppad"] (ReqArg PercentPad "percent padding") "Minimum percentage width."
, Option "m" ["minwidth"] (ReqArg MinWidth "minimum width") "Minimum field width"
, Option "M" ["maxwidth"] (ReqArg MaxWidth "maximum width") "Maximum field width"
, Option "w" ["width"] (ReqArg Width "fixed width") "Fixed field width"
, Option "c" ["padchars"] (ReqArg PadChars "padding chars") "Characters to use for padding"
, Option "a" ["align"] (ReqArg PadAlign "padding alignment") "'l' for left padding, 'r' for right"
, Option "b" ["bback"] (ReqArg BarBack "bar background") "Characters used to draw bar backgrounds"
, Option "f" ["bfore"] (ReqArg BarFore "bar foreground") "Characters used to draw bar foregrounds"
, Option "W" ["bwidth"] (ReqArg BarWidth "bar width") "Bar width"
]
doArgs :: [String] -> ([String] -> Monitor String) -> Monitor String
doArgs args action =
case getOpt Permute options args of
(o, n, []) -> do doConfigOptions o
action n
(_, _, errs) -> return (concat errs)
doConfigOptions :: [Opts] -> Monitor ()
doConfigOptions [] = io $ return ()
doConfigOptions (o:oo) =
do let next = doConfigOptions oo
nz s = let x = read s in max 0 x
bool = (`elem` ["True", "true", "Yes", "yes", "On", "on"])
(case o of
High h -> setConfigValue (read h) high
Low l -> setConfigValue (read l) low
HighColor c -> setConfigValue (Just c) highColor
NormalColor c -> setConfigValue (Just c) normalColor
LowColor c -> setConfigValue (Just c) lowColor
Template t -> setConfigValue t template
PercentPad p -> setConfigValue (nz p) ppad
DecDigits d -> setConfigValue (nz d) decDigits
MinWidth w -> setConfigValue (nz w) minWidth
MaxWidth w -> setConfigValue (nz w) maxWidth
Width w -> setConfigValue (nz w) minWidth >>
setConfigValue (nz w) maxWidth
PadChars s -> setConfigValue s padChars
PadAlign a -> setConfigValue ("r" `isPrefixOf` a) padRight
BarBack s -> setConfigValue s barBack
BarFore s -> setConfigValue s barFore
BarWidth w -> setConfigValue (nz w) barWidth
UseSuffix u -> setConfigValue (bool u) useSuffix) >> next
runM :: [String] -> IO MConfig -> ([String] -> Monitor String) -> Int
-> (String -> IO ()) -> IO ()
runM args conf action r = runMB args conf action (tenthSeconds r)
runMB :: [String] -> IO MConfig -> ([String] -> Monitor String)
-> IO () -> (String -> IO ()) -> IO ()
runMB args conf action wait cb = handle (cb . showException) loop
where ac = doArgs args action
loop = conf >>= runReaderT ac >>= cb >> wait >> loop
showException :: SomeException -> String
showException = ("error: "++) . show . flip asTypeOf undefined
io :: IO a -> Monitor a
io = liftIO
-- $parsers
runP :: Parser [a] -> String -> IO [a]
runP p i =
case parse p "" i of
Left _ -> return []
Right x -> return x
getAllBut :: String -> Parser String
getAllBut s =
manyTill (noneOf s) (char $ head s)
getNumbers :: Parser Float
getNumbers = skipMany space >> many1 digit >>= \n -> return $ read n
getNumbersAsString :: Parser String
getNumbersAsString = skipMany space >> many1 digit >>= \n -> return n
skipRestOfLine :: Parser Char
skipRestOfLine =
do many $ noneOf "\n\r"
newline
getAfterString :: String -> Parser String
getAfterString s =
do { try $ manyTill skipRestOfLine $ string s
; manyTill anyChar newline
} <|> return ""
skipTillString :: String -> Parser String
skipTillString s =
manyTill skipRestOfLine $ string s
-- | Parses the output template string
templateStringParser :: Parser (String,String,String)
templateStringParser =
do { s <- nonPlaceHolder
; com <- templateCommandParser
; ss <- nonPlaceHolder
; return (s, com, ss)
}
where
nonPlaceHolder = liftM concat . many $
many1 (noneOf "<") <|> colorSpec
-- | Recognizes color specification and returns it unchanged
colorSpec :: Parser String
colorSpec = try (string "</fc>") <|> try (
do string "<fc="
s <- many1 (alphaNum <|> char ',' <|> char '#')
char '>'
return $ "<fc=" ++ s ++ ">")
-- | Parses the command part of the template string
templateCommandParser :: Parser String
templateCommandParser =
do { char '<'
; com <- many $ noneOf ">"
; char '>'
; return com
}
-- | Combines the template parsers
templateParser :: Parser [(String,String,String)]
templateParser = many templateStringParser --"%")
-- | Takes a list of strings that represent the values of the exported
-- keys. The strings are joined with the exported keys to form a map
-- to be combined with 'combine' to the parsed template. Returns the
-- final output of the monitor.
parseTemplate :: [String] -> Monitor String
parseTemplate l =
do t <- getConfigValue template
s <- io $ runP templateParser t
e <- getConfigValue export
let m = Map.fromList . zip e $ l
return $ combine m s
-- | Given a finite "Map" and a parsed templatet produces the
-- | resulting output string.
combine :: Map.Map String String -> [(String, String, String)] -> String
combine _ [] = []
combine m ((s,ts,ss):xs) =
s ++ str ++ ss ++ combine m xs
where str = Map.findWithDefault err ts m
err = "<" ++ ts ++ " not found!>"
-- $strings
type Pos = (Int, Int)
takeDigits :: Int -> Float -> Float
takeDigits d n =
fromIntegral (round (n * fact) :: Int) / fact
where fact = 10 ^ d
showDigits :: (RealFloat a) => Int -> a -> String
showDigits d n = showFFloat (Just d) n ""
showWithUnits :: Int -> Int -> Float -> String
showWithUnits d n x
| x < 0 = '-' : showWithUnits d n (-x)
| n > 3 || x < 10^(d + 1) = show (round x :: Int) ++ units n
| x <= 1024 = showDigits d (x/1024) ++ units (n+1)
| otherwise = showWithUnits d (n+1) (x/1024)
where units = (!!) ["B", "K", "M", "G", "T"]
padString :: Int -> Int -> String -> Bool -> String -> String
padString mnw mxw pad pr s =
let len = length s
rmin = if mnw <= 0 then 1 else mnw
rmax = if mxw <= 0 then max len rmin else mxw
(rmn, rmx) = if rmin <= rmax then (rmin, rmax) else (rmax, rmin)
rlen = min (max rmn len) rmx
in if rlen < len then
take rlen s
else let ps = take (rlen - len) (cycle pad)
in if pr then s ++ ps else ps ++ s
parseFloat :: String -> Float
parseFloat s = case readFloat s of
(v, _):_ -> v
_ -> 0
parseInt :: String -> Int
parseInt s = case readDec s of
(v, _):_ -> v
_ -> 0
floatToPercent :: Float -> Monitor String
floatToPercent n =
do pad <- getConfigValue ppad
pc <- getConfigValue padChars
pr <- getConfigValue padRight
up <- getConfigValue useSuffix
let p = showDigits 0 (n * 100)
ps = if up then "%" else ""
return $ padString pad pad pc pr p ++ ps
stringParser :: Pos -> B.ByteString -> String
stringParser (x,y) =
B.unpack . li x . B.words . li y . B.lines
where li i l | length l > i = l !! i
| otherwise = B.empty
setColor :: String -> Selector (Maybe String) -> Monitor String
setColor str s =
do a <- getConfigValue s
case a of
Nothing -> return str
Just c -> return $
"<fc=" ++ c ++ ">" ++ str ++ "</fc>"
showWithPadding :: String -> Monitor String
showWithPadding s =
do mn <- getConfigValue minWidth
mx <- getConfigValue maxWidth
p <- getConfigValue padChars
pr <- getConfigValue padRight
return $ padString mn mx p pr s
colorizeString :: (Num a, Ord a) => a -> String -> Monitor String
colorizeString x s = do
h <- getConfigValue high
l <- getConfigValue low
let col = setColor s
[ll,hh] = map fromIntegral $ sort [l, h] -- consider high < low
head $ [col highColor | x > hh ] ++
[col normalColor | x > ll ] ++
[col lowColor | True]
showWithColors :: (Num a, Ord a) => (a -> String) -> a -> Monitor String
showWithColors f x = showWithPadding (f x) >>= colorizeString x
showWithColors' :: (Num a, Ord a) => String -> a -> Monitor String
showWithColors' str = showWithColors (const str)
showPercentsWithColors :: [Float] -> Monitor [String]
showPercentsWithColors fs =
do fstrs <- mapM floatToPercent fs
zipWithM (showWithColors . const) fstrs (map (*100) fs)
showPercentWithColors :: Float -> Monitor String
showPercentWithColors f = liftM head $ showPercentsWithColors [f]
showPercentBar :: Float -> Float -> Monitor String
showPercentBar v x = do
bb <- getConfigValue barBack
bf <- getConfigValue barFore
bw <- getConfigValue barWidth
let len = min bw $ round (fromIntegral bw * x)
s <- colorizeString v (take len $ cycle bf)
return $ s ++ take (bw - len) (cycle bb)
showLogBar :: Float -> Float -> Monitor String
showLogBar f v = do
h <- fromIntegral `fmap` getConfigValue high
l <- fromIntegral `fmap` getConfigValue low
bw <- fromIntegral `fmap` getConfigValue barWidth
let [ll, hh] = sort [l, h]
choose x | x == 0.0 = 0
| x <= ll = 1 / bw
| otherwise = f + logBase 2 (x / hh) / bw
showPercentBar v $ choose v
| raboof/xmobar | src/Plugins/Monitors/Common.hs | bsd-3-clause | 14,784 | 0 | 15 | 4,723 | 4,626 | 2,343 | 2,283 | 335 | 17 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.