code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
module Y2015.D12Spec (spec) where
{-# LANGUAGE OverloadedStrings #-}
import Y2015
import Test.Hspec
import Data.ByteString.Lazy.Char8 (pack)
spec :: Spec
spec = parallel $ do
describe "Day 12" $ do
describe "jsonSum" $ do
it "sums three-member lists" $
jsonSum (pack "[1,2,3]") `shouldBe` 6
it "sums two-member objects" $
jsonSum (pack "{\"a\":2,\"b\":4}") `shouldBe` 6
it "sums nested lists" $
jsonSum (pack "[[[3]]]") `shouldBe` 3
it "sums nested objects" $
jsonSum (pack "{\"a\":{\"b\":4},\"c\":-1}") `shouldBe` 3
it "sums mixed objects" $
jsonSum (pack "{\"a\":[-1,1]}") `shouldBe` 0
it "sums mixed lists" $
jsonSum (pack "[-1,{\"a\":1}]") `shouldBe` 0
it "sums empty lists" $
jsonSum (pack "[]") `shouldBe` 0
it "sums empty objects" $
jsonSum (pack "{}") `shouldBe` 0
describe "jsonSumFixed" $ do
it "sums three-member lists" $
jsonSumFixed (pack "[1,2,3]") `shouldBe` 6
it "ignores red in nested objects" $
jsonSumFixed (pack "[1,{\"c\":\"red\",\"b\":2},3]") `shouldBe` 4
it "ignores red objects" $
jsonSumFixed (pack "{\"d\":\"red\",\"e\":[1,2,3,4],\"f\":5}") `shouldBe` 0
it "ignores red array elements" $
jsonSumFixed (pack "[1,\"red\",5]") `shouldBe` 6
|
tylerjl/adventofcode
|
test/Y2015/D12Spec.hs
|
Haskell
|
mit
| 1,522
|
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Provides a dummy authentication module that simply lets a user specify
-- their identifier. This is not intended for real world use, just for
-- testing. This plugin supports form submissions via JSON (since 1.6.8).
--
-- = Using the JSON Login Endpoint
--
-- We are assuming that you have declared `authRoute` as follows
--
-- @
-- Just $ AuthR LoginR
-- @
--
-- If you are using a different one, then you have to adjust the
-- endpoint accordingly.
--
-- @
-- Endpoint: \/auth\/page\/dummy
-- Method: POST
-- JSON Data: {
-- "ident": "my identifier"
-- }
-- @
--
-- Remember to add the following headers:
--
-- - Accept: application\/json
-- - Content-Type: application\/json
module Yesod.Auth.Dummy
( authDummy
) where
import Yesod.Auth
import Yesod.Form (runInputPost, textField, ireq)
import Yesod.Core
import Data.Text (Text)
import Data.Aeson.Types (Result(..), Parser)
import qualified Data.Aeson.Types as A (parseEither, withObject)
identParser :: Value -> Parser Text
identParser = A.withObject "Ident" (.: "ident")
authDummy :: YesodAuth m => AuthPlugin m
authDummy =
AuthPlugin "dummy" dispatch login
where
dispatch "POST" [] = do
(jsonResult :: Result Value) <- parseCheckJsonBody
eIdent <- case jsonResult of
Success val -> return $ A.parseEither identParser val
Error err -> return $ Left err
case eIdent of
Right ident ->
setCredsRedirect $ Creds "dummy" ident []
Left _ -> do
ident <- runInputPost $ ireq textField "ident"
setCredsRedirect $ Creds "dummy" ident []
dispatch _ _ = notFound
url = PluginR "dummy" []
login authToMaster = do
request <- getRequest
toWidget [hamlet|
$newline never
<form method="post" action="@{authToMaster url}">
$maybe t <- reqToken request
<input type=hidden name=#{defaultCsrfParamName} value=#{t}>
Your new identifier is: #
<input type="text" name="ident">
<input type="submit" value="Dummy Login">
|]
|
geraldus/yesod
|
yesod-auth/Yesod/Auth/Dummy.hs
|
Haskell
|
mit
| 2,294
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Common where
import Control.Arrow ((>>>), first, left)
import Control.Applicative
import Control.Concurrent (forkIO, killThread, threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Control.Monad.Error.Class
import Control.Monad.Trans.Maybe
import Control.Monad.IO.Class
import Data.Foldable
import Data.Function ((&))
import Data.IntCast
import Data.Int (Int64)
import Data.Map.Strict (Map)
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.Sequence (Seq, ViewL((:<)), (|>))
import Data.Serialize
import Data.Tuple (swap)
import Data.Typeable (Typeable, typeRep)
import Data.Word (Word64, Word32)
import Foreign (Storable, sizeOf)
import System.IO
import qualified Data.Map.Strict as Map
import qualified Data.Sequence as Seq
{- DEBUG -}
import qualified System.GlobalLock as GLock
atomicPrintStderr :: String -> IO ()
atomicPrintStderr msg = GLock.lock $ do
hPutStrLn stderr msg
hFlush stderr
traceIO :: Show b => String -> b -> IO a -> IO a
traceIO name ctx action = do
atomicPrintStderr ("++" <> name <> " " <> show ctx)
result <- try action
case result of
Left e -> do
atomicPrintStderr ("!!" <> name <> " " <> show ctx <>
": " <> show (e :: SomeException))
throwIO e
Right x -> do
atomicPrintStderr ("--" <> name <> " " <> show ctx)
pure x
-- END DEBUG -}
modifyMVarPure :: MVar a -> (a -> a) -> IO ()
modifyMVarPure v f = modifyMVar_ v (pure . f)
mapConcurrently_ :: Foldable t => (a -> IO ()) -> t a -> IO ()
mapConcurrently_ f = runConcurrently . traverse_ (Concurrently . f)
concurrently_ :: IO () -> IO () -> IO ()
concurrently_ x y = void (concurrently x y)
standby :: IO a
standby = forever (threadDelay 1000000000)
-- | Automatically restart if the action fails (after the given delay).
autorestart :: Int -> IO a -> IO a
-- this type signature is not ideal ^
autorestart delay action = do
result <- tryAny action
case result of
Right x -> pure x
Left e -> do
hPutStrLn stderr (show e)
hFlush stderr
threadDelay delay
autorestart delay action
forkIO_ :: IO () -> IO ()
forkIO_ = void . forkIO
tryAny :: IO a -> IO (Either SomeException a)
tryAny action = do
result <- newEmptyMVar
mask $ \ unmask -> do
thread <- forkIO (try (unmask action) >>= putMVar result)
unmask (readMVar result) `onException` killThread thread
fromJustIO1 :: Exception e => (a -> e) -> (a -> Maybe b) -> a -> IO b
fromJustIO1 e f x = fromJustIO (e x) (f x)
fromJustIO :: Exception e => e -> Maybe a -> IO a
fromJustIO _ (Just x) = pure x
fromJustIO e Nothing = throwIO e
fromJustE :: String -> Maybe a -> a
fromJustE e = fromMaybe (error e)
maybeToMonadError :: MonadError e m => e -> Maybe a -> m a
maybeToMonadError e = maybeToEither e >>> eitherToMonadError
eitherToMonadError :: MonadError e m => Either e a -> m a
eitherToMonadError (Left e) = throwError e
eitherToMonadError (Right x) = pure x
eitherToMaybe :: Either e a -> Maybe a
eitherToMaybe (Right e) = Just e
eitherToMaybe (Left _) = Nothing
maybeToAlternative :: Alternative f => Maybe a -> f a
maybeToAlternative Nothing = empty
maybeToAlternative (Just x) = pure x
maybeToEither :: e -> Maybe a -> Either e a
maybeToEither e Nothing = Left e
maybeToEither _ (Just x) = Right x
generateBidiMap :: (Ord a, Ord b) => [(a, b)] -> (Map a b, Map b a)
generateBidiMap t = (Map.fromList t, Map.fromList (swap <$> t))
popMap :: Ord k => k -> Map k a -> Maybe (a, Map k a)
popMap k m = Map.lookup k m <&> \ x -> (x, Map.delete k m)
-- | Same as '<$>' but flips the order of the arguments.
{-# INLINE (<&>) #-}
infixl 1 <&>
(<&>) :: Functor f => f a -> (a -> b) -> f b
m <&> f = fmap f m
-- | Similar to 'sizeOf' but acts on a proxy value.
{-# INLINE sizeOfProxy #-}
sizeOfProxy :: Storable a => proxy a -> Int
sizeOfProxy = sizeOf . unproxy
-- | Restrict the type of the first argument based on a proxy value.
-- It otherwise behaves identical to 'const'.
{-# INLINE asTypeOfProxy #-}
asTypeOfProxy :: a -> proxy a -> a
asTypeOfProxy = const
-- | Construct a dummy value based on the type of a proxy value.
-- The dummy value must not be evaluated.
unproxy :: proxy a -> a
unproxy _ = error "unproxy: dummy value is not meant to be evaluated"
-- | A dummy value that must not be evaluated.
__ :: a
__ = error "__: dummy value is not meant to be evaluated"
|
Rufflewind/conplex
|
Common.hs
|
Haskell
|
mit
| 4,561
|
-- makeBackronym
-- http://www.codewars.com/kata/55805ab490c73741b7000064/
module Codewars.Exercise.Backronym where
import Codewars.Exercise.Backronym.Dictionary (dict)
import Data.Char (toUpper)
makeBackronym :: String -> String
makeBackronym = unwords . map (dict . toUpper)
|
gafiatulin/codewars
|
src/7 kyu/Backronym.hs
|
Haskell
|
mit
| 280
|
module Tamari where
import Data.List
import Data.Maybe
import Catalan
import Bijections
rotR1 :: Tree -> [Tree]
rotR1 (B (t1 @ (B t11 t12)) t2) =
B t11 (B t12 t2) : [B t1' t2 | t1' <- rotR1 t1] ++ [B t1 t2' | t2' <- rotR1 t2]
rotR1 (B L t2) = [B L t2' | t2' <- rotR1 t2]
rotR1 _ = []
rotL1 :: Tree -> [Tree]
rotL1 (B t1 (t2 @ (B t21 t22))) =
B (B t1 t21) t22 : [B t1' t2 | t1' <- rotL1 t1] ++ [B t1 t2' | t2' <- rotL1 t2]
rotL1 (B t1 L) = [B t1' L | t1' <- rotL1 t1]
rotL1 _ = []
tamari_up :: Tree -> [Tree]
tamari_up t = t : foldr union [] [tamari_up t' | t' <- rotR1 t]
tamari_down :: Tree -> [Tree]
tamari_down t = t : foldr union [] [tamari_down t' | t' <- rotL1 t]
tamari_order :: Tree -> Tree -> Bool
tamari_order t1 t2 = elem t2 (tamari_up t1)
kreweras_order :: Tree -> Tree -> Bool
kreweras_order L L = True
kreweras_order (B t1 t2) (B t1' t2') =
(kreweras_order t1 t1' && kreweras_order t2 t2') ||
case t1 of
B t11 t12 -> kreweras_order (B t11 (B t12 t2)) (B t1' t2')
L -> False
kreweras_order _ _ = False
tamari :: Int -> [(Tree,Tree)]
tamari n = [(t1,t2) | t1 <- binary_trees n, t2 <- tamari_up t1]
-- [length $ tamari n | n <- [0..]] == [1,1,3,13,68,399,2530,...]
kreweras :: Int -> [(Tree,Tree)]
kreweras n = [(t1,t2) | t1 <- binary_trees n, t2 <- binary_trees n, kreweras_order t1 t2]
tamari_parts :: Int -> [Int]
tamari_parts n = [length $ tamari_down t | t <- binary_trees n]
-- some properties of the Tamari lattice
-- If t<=u in the Tamari order, then the left-branching spine of t is at
-- least as long as the left-branching spine of u.
-- verified for n<=6
prop1 :: Int -> Bool
prop1 n =
flip all (tamari n) $ \(t1,t2) ->
length (tree2spine t1) >= length (tree2spine t2)
-- sequent-style decision procedure for Tamari order
tamari_seq :: [Tree] -> Tree -> Tree -> Bool
tamari_seq g (B t1 t2) u = tamari_seq (t2:g) t1 u
tamari_seq g L L = g == []
tamari_seq g L (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,t2:g2) -> tamari_seq (reverse g1) L u1 && tamari_seq g2 t2 u2
Just (g1,[]) -> False
-- claim: tamari_seq agrees with tamari_order
-- verified for n<=6
prop2 :: Int -> Bool
prop2 n =
flip all (binary_trees n) $ \t1 ->
flip all (binary_trees n) $ \t2 ->
tamari_order t1 t2 == tamari_seq [] t1 t2
-- focused sequent calculus
tamari_linv :: Tree -> [Tree] -> Tree -> Bool
tamari_neu :: [Tree] -> Tree -> Bool
tamari_linv t g u = let ts = tree2spine t in tamari_neu (reverse ts ++ g) u
tamari_neu g L = g == []
tamari_neu g (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,t2:g2) -> tamari_neu (reverse g1) u1 && tamari_linv t2 g2 u2
Just (g1,[]) -> False
-- verified for n<=7
prop3 :: Int -> Bool
prop3 n =
flip all (binary_trees n) $ \t1 ->
flip all (binary_trees n) $ \t2 ->
tamari_linv t1 [] t2 == tamari_seq [] t1 t2
shuffle_linv :: Tree -> [Tree] -> Tree -> Bool
shuffle_neu :: [Tree] -> Tree -> Bool
shuffle_linv t g u =
let ts = tree2spine t in
-- flip any (permutations ts) $ \ts' ->
flip any (shuffle ts g) $ \g' ->
shuffle_neu g' u
shuffle_neu g L = g == []
shuffle_neu g (B u1 u2) =
let k = leaves u1 in
let grab k g acc =
if k == 0 then Just (acc,g)
else if g == [] then Nothing
else
let (t:g') = g in
let i = leaves t in
if i > k then Nothing
else grab (k - i) g' (t:acc) in
case grab (k-1) g [] of
Nothing -> False
Just (g1,g2) ->
flip any (remove g2) $ \(t2,g2') ->
shuffle_neu (reverse g1) u1 && shuffle_linv t2 g2' u2
-- lattice structure
tamari_meetc :: [Tree] -> [Tree] -> [Tree]
tamari_meetc [] [] = []
tamari_meetc (B t1 t2:g) d = tamari_meetc (t1:t2:g) d
tamari_meetc g (B t1 t2:d) = tamari_meetc g (t1:t2:d)
tamari_meetc (L:g) (L:d) =
let match = map fst $ fst $ break (uncurry (/=)) (zip g d) in
let g' = fromJust $ stripPrefix match g in
let d' = fromJust $ stripPrefix match d in
L:match ++ tamari_meetc g' d'
-- tamari_join :: Tree -> Tree -> Tree
-- tamari_join t1 t2 =
-- if tamari_linv t1 [] t2 then t2 else
-- if tamari_linv t2 [] t1 then t1 else
-- let (cur1:g1) = tree2spine t1 in
-- let (cur2:g2) = tree2spine t2 in
-- match_and_join g1 g2 (leaves cur1,[cur1]) (leaves cur2,[cur2])
-- where
-- match_and_join :: [Tree] -> [Tree] -> (Int,[Tree]) -> (Int,[Tree]) -> Tree
-- match_and_join g1 g2 (k1,cur1) (k2,cur2) =
-- if k1 == k2 then
-- let j = tamari_join (tpsi cur1) (tpsi cur2) in
-- if g1 == g2 then j else let (t1:g1') = g1 in let (t2:g2') = g2 in B j (match_and_join g1' g2' (leaves t1,[t1]) (leaves t2,[t2]))
-- else if k1 < k2 then
-- let (t1:g1') = g1 in
-- match_and_join g1' g2 (k1+leaves t1,t1:cur1) (k2,cur2)
-- else
-- let (t2:g2') = g2 in
-- match_and_join g1 g2' (k1,cur1) (k2+leaves t2,t2:cur2)
-- tpsi :: [Tree] -> Tree
-- tpsi [t] = t
-- tpsi (t:ts) = foldl B t ts
tree_type :: Tree -> [Bool]
tree_type t = pol False t
where
pol :: Bool -> Tree -> [Bool]
pol b L = [b]
pol b (B t1 t2) = pol False t1 ++ pol True t2
|
noamz/linlam-gos
|
src/Tamari.hs
|
Haskell
|
mit
| 5,691
|
--Task 1
--sumProducts [[1,2,3], [4,5], [], [-2,3,0,5,1]] -> 27 -- 27 = 6 + 20 + 1 + 0
sumProducts m = sum (map product nonEmptyLists)
where nonEmptyLists = filter ( \ x -> not (null x)) m
sumProducts2 m = sum products
where products = map ( \ lst -> (foldr ( \ x res -> x*res ) 1 lst) ) m
--Task 2
--occurrences [1..6] [1,3,4,3,2,3,3,0,5,3,1] -> [2,1,5,1,1,0]
occurrences lst1 lst2 = [ count elem lst2 | elem<-lst1 ]
where count elem lst = length (filter ( \ x -> x == elem) lst)
--Task 6
--matchLengths [[1..4],[0..3],[5,4,8,10]] -> True
--matchLengths [[1..4],[0..3],[],[5,4,8,10]] -> False
matchLengths lst = allEquals (map length lst)
where allEquals l = all ( \ x -> x == head l ) l
--Task 7
--setUnion [1,2,3,5] [2,4,5,6,7] -> [1,2,3,4,5,6,7]
--setIntersect [1,2,3,5] [2,4,5,6,7] -> [2,5]
--setDiff [1,2,3,5] [2,4,5,6,7] -> [1,3]
--setDiff [2,4,5,6,7] [1,2,3,5] -> [4,6,7]
setUnion s1 [] = s1
setUnion [] s2 = s2
setUnion (x:xs) (y:ys)
| x < y = x : setUnion xs (y:ys)
| x > y = y : setUnion (x:xs) ys
| x == y = x : setUnion xs ys
setIntersect s1 [] = []
setIntersect [] s2 = []
setIntersect (x:xs) (y:ys)
| x < y = setIntersect xs (y:ys)
| x > y = setIntersect (x:xs) ys
| x == y = x : setIntersect xs ys
setDiff s1 [] = s1
setDiff [] s2 = []
setDiff (x:xs) (y:ys)
| x == y = setDiff xs ys
| x < y = x : setDiff xs (y:ys)
| x > y = setDiff (x:xs) ys
setSumDiff s1 [] = s1
setSumDiff [] s2 = s2
setSumDiff s1 s2 = setUnion (setDiff s1 s2) (setDiff s2 s1)
setSum s1 [] = s1
setSum [] s2 = s2
setSum (x:xs) (y:ys)
| x == y = [x,y] ++ setSum xs ys
| x < y = x : setSum xs (y:ys)
| x > y = y : setSum (x:xs) ys
|
pepincho/Functional-Programming
|
haskell/ex-4.hs
|
Haskell
|
mit
| 1,678
|
-- FIXME: Depend on the not-yet-released project-template library.
{-# LANGUAGE OverloadedStrings #-}
module MultiFile where
import Control.Monad (unless)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Resource (runExceptionT)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64 as B64
import Data.Conduit (Conduit, MonadResource, Sink,
await, awaitForever, leftover,
yield, ($$), (=$))
import Data.Conduit.Binary (sinkFile)
import Data.Conduit.List (sinkNull)
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import Data.Functor.Identity (runIdentity)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Filesystem (createTree)
import Filesystem.Path.CurrentOS (FilePath, directory, encode,
encodeString, fromText, (</>))
import Prelude hiding (FilePath)
unpackMultiFile
:: MonadResource m
=> FilePath -- ^ output folder
-> (Text -> Text) -- ^ fix each input line, good for variables
-> Sink S.ByteString m ()
unpackMultiFile root fixLine =
CT.decode CT.utf8 =$ CT.lines =$ CL.map fixLine =$ start
where
start =
await >>= maybe (return ()) go
where
go t =
case getFileName t of
Nothing -> error $ "Invalid input: " ++ show t
Just (fp', isBinary) -> do
let fp = root </> fromText fp'
liftIO $ createTree $ directory fp
let src
| isBinary = binaryLoop
| otherwise = textLoop
src =$ sinkFile (encodeString fp)
start
binaryLoop = do
await >>= maybe (error "binaryLoop needs 1 line") go
where
go = yield . B64.decodeLenient . encodeUtf8
textLoop =
await >>= maybe (return ()) go
where
go t =
case getFileName t of
Just{} -> leftover t
Nothing -> do
yield $ encodeUtf8 t
yield "\n"
textLoop
getFileName t =
case T.words t of
["{-#", "START_FILE", fn, "#-}"] -> Just (fn, False)
["{-#", "START_FILE", "BASE64", fn, "#-}"] -> Just (fn, True)
_ -> Nothing
createMultiFile
:: MonadIO m
=> FilePath -- ^ folder containing the files
-> Conduit FilePath m S.ByteString -- ^ FilePath is relative to containing folder
createMultiFile root = do
awaitForever handleFile
where
handleFile fp' = do
bs <- liftIO $ S.readFile $ encodeString fp
case runIdentity $ runExceptionT $ yield bs $$ CT.decode CT.utf8 =$ sinkNull of
Left{} -> do
yield "{-# START_FILE BASE64 "
yield $ encode fp'
yield " #-}\n"
yield $ B64.encode bs
yield "\n"
Right{} -> do
yield "{-# START_FILE "
yield $ encode fp'
yield " #-}\n"
yield bs
unless ("\n" `S.isSuffixOf` bs) $ yield "\n"
where
fp = root </> fp'
|
piyush-kurur/yesod
|
yesod/MultiFile.hs
|
Haskell
|
mit
| 3,652
|
{-# LANGUAGE MultiWayIf#-}
module Mealy where
import Data.Maybe (isJust)
import FRP.Yampa
import qualified Graphics.Gloss.Interface.IO.Game as G
import Buttons
leftmost p = (p>90)
rightmost p = (p<(-90))
centermost p = (p>(-10) && p <10)
isClick = isEvent. filterE (isJust. toYampaEvent)
moveLeft = (-)
moveRight = (+)
updateVel = id
updateScore = id
stateTrans :: (Int,Int,Event G.Event,Int,Int) -> (Int,Int,Int,Int)
stateTrans (p,v,c,s,d) =
if
| p1(p,v,c,s,d) -> c1 (p,v,c,s,d)
-- | p2(p,v,c,s,d) -> c2 (p,v,c,s,d)
p1 (p,v,c,s,d) =
rightmost p
&& (not.centermost) p
&& (not. isClick) c
&& (not. leftmost) p
&& (d==0)
c1 (p,v,c,s,d) = (p',v',s',d')
where
p' = moveLeft p v
v' = updateVel 1
s' = updateScore 0
d' = id d
|
santolucito/Euterpea_Projects
|
QuantumArt/Mealy.hs
|
Haskell
|
mit
| 801
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.MutationObserver
(js_newMutationObserver, newMutationObserver, js_observe, observe,
js_takeRecords, takeRecords, js_disconnect, disconnect,
MutationObserver, castToMutationObserver, gTypeMutationObserver)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"new window[\"MutationObserver\"]($1)" js_newMutationObserver ::
JSRef MutationCallback -> IO (JSRef MutationObserver)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver Mozilla MutationObserver documentation>
newMutationObserver ::
(MonadIO m, IsMutationCallback callback) =>
Maybe callback -> m MutationObserver
newMutationObserver callback
= liftIO
(js_newMutationObserver
(maybe jsNull (unMutationCallback . toMutationCallback) callback)
>>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"observe\"]($2, $3)"
js_observe ::
JSRef MutationObserver -> JSRef Node -> JSRef Dictionary -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.observe Mozilla MutationObserver.observe documentation>
observe ::
(MonadIO m, IsNode target, IsDictionary options) =>
MutationObserver -> Maybe target -> Maybe options -> m ()
observe self target options
= liftIO
(js_observe (unMutationObserver self)
(maybe jsNull (unNode . toNode) target)
(maybe jsNull (unDictionary . toDictionary) options))
foreign import javascript unsafe "$1[\"takeRecords\"]()"
js_takeRecords ::
JSRef MutationObserver -> IO (JSRef [Maybe MutationRecord])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.takeRecords Mozilla MutationObserver.takeRecords documentation>
takeRecords ::
(MonadIO m) => MutationObserver -> m [Maybe MutationRecord]
takeRecords self
= liftIO
((js_takeRecords (unMutationObserver self)) >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"disconnect\"]()"
js_disconnect :: JSRef MutationObserver -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MutationObserver.disconnect Mozilla MutationObserver.disconnect documentation>
disconnect :: (MonadIO m) => MutationObserver -> m ()
disconnect self = liftIO (js_disconnect (unMutationObserver self))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/MutationObserver.hs
|
Haskell
|
mit
| 3,169
|
module HeronianTriangles.A305704Spec (main, spec) where
import Test.Hspec
import HeronianTriangles.A305704 (a305704)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A305704" $
it "correctly computes the first 20 elements" $
take 20 (map a305704 [1..]) `shouldBe` expectedValue where
expectedValue = [6, 8, 9, 12, 15, 16, 18, 20, 21, 22, 24, 25, 27, 28, 30, 32, 33, 34, 35, 36]
|
peterokagey/haskellOEIS
|
test/HeronianTriangles/A305704Spec.hs
|
Haskell
|
apache-2.0
| 405
|
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Thrift.Protocol.Binary
( module Thrift.Protocol
, BinaryProtocol(..)
) where
import Control.Exception ( throw )
import Control.Monad
import Data.Bits
import Data.ByteString.Builder
#if __GLASGOW_HASKELL__ < 710
import Data.Functor
#endif
import Data.Int
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid
#endif
import Data.Word
import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 )
import Thrift.Protocol
import Thrift.Transport
import Thrift.Types
import qualified Data.Attoparsec.ByteString as P
import qualified Data.Attoparsec.ByteString.Lazy as LP
import qualified Data.Binary as Binary
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as Map
import qualified Data.Text.Lazy as LT
-- | The Binary Protocol uses the standard Thrift 'TBinaryProtocol'
data BinaryProtocol a = BinaryProtocol a
-- ^ Construct a 'BinaryProtocol' with a 'Transport'
versionMask :: Int32
versionMask = fromIntegral (0xffff0000 :: Word32)
version1 :: Int32
version1 = fromIntegral (0x80010000 :: Word32)
-- NOTE: Reading and Writing functions rely on Builders and Data.Binary to
-- encode and decode data. Data.Binary assumes that the binary values it is
-- encoding to and decoding from are in BIG ENDIAN format, and converts the
-- endianness as necessary to match the local machine.
instance Protocol BinaryProtocol where
mkProtocol = BinaryProtocol
getTransport (BinaryProtocol t) = t
writeMessage p (n, t, s) = (writeMessageBegin >>)
where
writeMessageBegin = tWrite (getTransport p) $ toLazyByteString $
buildBinaryValue (TI32 (version1 .|. fromIntegral (fromEnum t))) <>
buildBinaryValue (TString $ encodeUtf8 n) <>
buildBinaryValue (TI32 s)
readMessage p = (readMessageBegin >>=)
where
readMessageBegin =runParser p $ do
TI32 ver <- parseBinaryValue T_I32
if ver .&. versionMask /= version1
then throw $ ProtocolExn PE_BAD_VERSION "Missing version identifier"
else do
TString s <- parseBinaryValue T_STRING
TI32 sz <- parseBinaryValue T_I32
return (decodeUtf8 s, toEnum $ fromIntegral $ ver .&. 0xFF, sz)
serializeVal _ = toLazyByteString . buildBinaryValue
deserializeVal _ ty bs =
case LP.eitherResult $ LP.parse (parseBinaryValue ty) bs of
Left s -> error s
Right val -> val
readVal p = runParser p . parseBinaryValue
-- | Writing Functions
buildBinaryValue :: ThriftVal -> Builder
buildBinaryValue (TStruct fields) = buildBinaryStruct fields <> buildType T_STOP
buildBinaryValue (TMap ky vt entries) =
buildType ky <>
buildType vt <>
int32BE (fromIntegral (length entries)) <>
buildBinaryMap entries
buildBinaryValue (TList ty entries) =
buildType ty <>
int32BE (fromIntegral (length entries)) <>
buildBinaryList entries
buildBinaryValue (TSet ty entries) =
buildType ty <>
int32BE (fromIntegral (length entries)) <>
buildBinaryList entries
buildBinaryValue (TBool b) =
word8 $ toEnum $ if b then 1 else 0
buildBinaryValue (TByte b) = int8 b
buildBinaryValue (TI16 i) = int16BE i
buildBinaryValue (TI32 i) = int32BE i
buildBinaryValue (TI64 i) = int64BE i
buildBinaryValue (TFloat f) = floatBE f
buildBinaryValue (TDouble d) = doubleBE d
buildBinaryValue (TString s) = int32BE len <> lazyByteString s
where
len :: Int32 = fromIntegral (LBS.length s)
buildBinaryStruct :: Map.HashMap Int16 (LT.Text, ThriftVal) -> Builder
buildBinaryStruct = Map.foldrWithKey combine mempty
where
combine fid (_,val) s =
buildTypeOf val <> int16BE fid <> buildBinaryValue val <> s
buildBinaryMap :: [(ThriftVal, ThriftVal)] -> Builder
buildBinaryMap = foldl combine mempty
where
combine s (key, val) = s <> buildBinaryValue key <> buildBinaryValue val
buildBinaryList :: [ThriftVal] -> Builder
buildBinaryList = foldr (mappend . buildBinaryValue) mempty
-- | Reading Functions
parseBinaryValue :: ThriftType -> P.Parser ThriftVal
parseBinaryValue (T_STRUCT _) = TStruct <$> parseBinaryStruct
parseBinaryValue (T_MAP _ _) = do
kt <- parseType
vt <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TMap kt vt <$> parseBinaryMap kt vt n
parseBinaryValue (T_LIST _) = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TList t <$> parseBinaryList t n
parseBinaryValue (T_SET _) = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 4
TSet t <$> parseBinaryList t n
parseBinaryValue T_BOOL = TBool . (/=0) <$> P.anyWord8
parseBinaryValue T_BYTE = TByte . Binary.decode . LBS.fromStrict <$> P.take 1
parseBinaryValue T_I16 = TI16 . Binary.decode . LBS.fromStrict <$> P.take 2
parseBinaryValue T_I32 = TI32 . Binary.decode . LBS.fromStrict <$> P.take 4
parseBinaryValue T_I64 = TI64 . Binary.decode . LBS.fromStrict <$> P.take 8
parseBinaryValue T_FLOAT = TFloat . bsToFloating byteSwap32 <$> P.take 4
parseBinaryValue T_DOUBLE = TDouble . bsToFloating byteSwap64 <$> P.take 8
parseBinaryValue T_STRING = do
i :: Int32 <- Binary.decode . LBS.fromStrict <$> P.take 4
TString . LBS.fromStrict <$> P.take (fromIntegral i)
parseBinaryValue ty = error $ "Cannot read value of type " ++ show ty
parseBinaryStruct :: P.Parser (Map.HashMap Int16 (LT.Text, ThriftVal))
parseBinaryStruct = Map.fromList <$> P.manyTill parseField (matchType T_STOP)
where
parseField = do
t <- parseType
n <- Binary.decode . LBS.fromStrict <$> P.take 2
v <- parseBinaryValue t
return (n, ("", v))
parseBinaryMap :: ThriftType -> ThriftType -> Int32 -> P.Parser [(ThriftVal, ThriftVal)]
parseBinaryMap kt vt n | n <= 0 = return []
| otherwise = do
k <- parseBinaryValue kt
v <- parseBinaryValue vt
((k,v) :) <$> parseBinaryMap kt vt (n-1)
parseBinaryList :: ThriftType -> Int32 -> P.Parser [ThriftVal]
parseBinaryList ty n | n <= 0 = return []
| otherwise = liftM2 (:) (parseBinaryValue ty)
(parseBinaryList ty (n-1))
-- | Write a type as a byte
buildType :: ThriftType -> Builder
buildType t = word8 $ fromIntegral $ fromEnum t
-- | Write type of a ThriftVal as a byte
buildTypeOf :: ThriftVal -> Builder
buildTypeOf v = buildType $ case v of
TStruct{} -> T_STRUCT Map.empty
TMap{} -> T_MAP T_VOID T_VOID
TList{} -> T_LIST T_VOID
TSet{} -> T_SET T_VOID
TBool{} -> T_BOOL
TByte{} -> T_BYTE
TI16{} -> T_I16
TI32{} -> T_I32
TI64{} -> T_I64
TString{} -> T_STRING
TFloat{} -> T_FLOAT
TDouble{} -> T_DOUBLE
-- | Read a byte as though it were a ThriftType
parseType :: P.Parser ThriftType
parseType = toEnum . fromIntegral <$> P.anyWord8
matchType :: ThriftType -> P.Parser ThriftType
matchType t = t <$ P.word8 (fromIntegral $ fromEnum t)
|
facebook/fbthrift
|
thrift/lib/hs/Thrift/Protocol/Binary.hs
|
Haskell
|
apache-2.0
| 7,761
|
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Trustworthy #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Copyright : (C) 2013-2015, University of Twente
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <christiaan.baaij@gmail.com>
-}
module CLaSH.Signal
( -- * Implicitly clocked synchronous signal
Signal
-- * Basic circuit functions
, signal
, register
, regEn
, mux
-- * Boolean connectives
, (.&&.), (.||.), not1
-- * Product/Signal isomorphism
, Bundle
, Unbundled
, bundle
, unbundle
-- * Simulation functions (not synthesisable)
, simulate
, simulateB
-- * List \<-\> Signal conversion (not synthesisable)
, sample
, sampleN
, fromList
-- * QuickCheck combinators
, testFor
-- * Type classes
-- ** 'Eq'-like
, (.==.), (./=.)
-- ** 'Ord'-like
, compare1, (.<.), (.<=.), (.>=.), (.>.)
-- ** 'Enum'-like
, fromEnum1
-- ** 'Rational'-like
, toRational1
-- ** 'Integral'-like
, toInteger1
-- ** 'Bits'-like
, testBit1
, popCount1
, shift1
, rotate1
, setBit1
, clearBit1
, shiftL1
, unsafeShiftL1
, shiftR1
, unsafeShiftR1
, rotateL1
, rotateR1
)
where
import Data.Bits (Bits) -- Haddock only
import CLaSH.Signal.Internal (Signal', register#, regEn#, (.==.), (./=.),
compare1, (.<.), (.<=.), (.>=.), (.>.), fromEnum1,
toRational1, toInteger1, testBit1, popCount1,
shift1, rotate1, setBit1, clearBit1, shiftL1,
unsafeShiftL1, shiftR1, unsafeShiftR1, rotateL1,
rotateR1, (.||.), (.&&.), not1, mux, sample,
sampleN, fromList, simulate, signal, testFor)
import CLaSH.Signal.Explicit (SystemClock, systemClock, simulateB')
import CLaSH.Signal.Bundle (Bundle (..), Unbundled')
{- $setup
>>> let oscillate = register False (not1 oscillate)
>>> let count = regEn 0 oscillate (count + 1)
-}
-- * Implicitly clocked synchronous signal
-- | Signal synchronised to the \"system\" clock, which has a period of 1000.
type Signal a = Signal' SystemClock a
-- * Basic circuit functions
{-# INLINE register #-}
-- | 'register' @i s@ delays the values in 'Signal' @s@ for one cycle, and sets
-- the value at time 0 to @i@
--
-- >>> sampleN 3 (register 8 (fromList [1,2,3,4]))
-- [8,1,2]
register :: a -> Signal a -> Signal a
register = register# systemClock
{-# INLINE regEn #-}
-- | Version of 'register' that only updates its content when its second argument
-- is asserted. So given:
--
-- @
-- oscillate = 'register' False ('not1' oscillate)
-- count = 'regEn' 0 oscillate (count + 1)
-- @
--
-- We get:
--
-- >>> sampleN 8 oscillate
-- [False,True,False,True,False,True,False,True]
-- >>> sampleN 8 count
-- [0,0,1,1,2,2,3,3]
regEn :: a -> Signal Bool -> Signal a -> Signal a
regEn = regEn# systemClock
-- * Product/Signal isomorphism
-- | Isomorphism between a 'Signal' of a product type (e.g. a tuple) and a
-- product type of 'Signal's.
type Unbundled a = Unbundled' SystemClock a
{-# INLINE unbundle #-}
-- | Example:
--
-- @
-- __unbundle__ :: 'Signal' (a,b) -> ('Signal' a, 'Signal' b)
-- @
--
-- However:
--
-- @
-- __unbundle__ :: 'Signal' 'CLaSH.Sized.BitVector.Bit' -> 'Signal' 'CLaSH.Sized.BitVector.Bit'
-- @
unbundle :: Bundle a => Signal a -> Unbundled a
unbundle = unbundle' systemClock
{-# INLINE bundle #-}
-- | Example:
--
-- @
-- __bundle__ :: ('Signal' a, 'Signal' b) -> 'Signal' (a,b)
-- @
--
-- However:
--
-- @
-- __bundle__ :: 'Signal' 'CLaSH.Sized.BitVector.Bit' -> 'Signal' 'CLaSH.Sized.BitVector.Bit'
-- @
bundle :: Bundle a => Unbundled a -> Signal a
bundle = bundle' systemClock
-- | Simulate a (@'Unbundled' a -> 'Unbundled' b@) function given a list of
-- samples of type @a@
--
-- >>> simulateB (unbundle . register (8,8) . bundle) [(1,1), (2,2), (3,3)] :: [(Int,Int)]
-- [(8,8),(1,1),(2,2),(3,3)...
--
-- __NB__: This function is not synthesisable
simulateB :: (Bundle a, Bundle b) => (Unbundled a -> Unbundled b) -> [a] -> [b]
simulateB = simulateB' systemClock systemClock
|
Ericson2314/clash-prelude
|
src/CLaSH/Signal.hs
|
Haskell
|
bsd-2-clause
| 4,201
|
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Data.Ini
import Data.List (group, sort, sortOn)
import Data.Maybe (mapMaybe, listToMaybe)
import Data.Ord (Down(Down))
import Data.Text (Text, unpack)
import Data.Time.Calendar (Day)
import Text.XmlHtml (Document)
type File = (FilePath, Text)
data Blog = Blog
{ name :: Text
, url :: Text
, unpublished :: [Post]
, published :: [Post]
}
deriving Show
-- toBlog does three really important things:
--
-- 1. It ensures there are now slug collisions,
-- 2. It moves the "published" posts with a future publish date
-- into the "unpublished" list, and
-- 3. It sorts the list of published entries on datestamp (in
-- reverse chronological order!)
--
toBlog :: Ini -> Day -> [Post] -> [Post] -> Either [String] Blog
toBlog config today drafts published =
let
getDuplicates = mapMaybe (listToMaybe . drop 1) . group . sort
duplicates = getDuplicates (map slug (drafts ++ published))
unpublished = filter ((> today) . datestamp) published ++ drafts
actualPublished = sortOn (Down . datestamp) (filter ((<= today) . datestamp) published)
coerce = either (Left . (:[])) return
in do
blogName <- coerce (lookupValue "general" "title" config)
blogUrl <- coerce (lookupValue "general" "url" config)
if null duplicates then
Right (Blog blogName blogUrl unpublished actualPublished)
else
Left (map (("duplicate slug: " ++) . unpack . fromSlug) duplicates)
data Post = Post
{ title :: Text
, slug :: Slug Text
, datestamp :: Day
, content :: Document
, author :: () -- for a more civilized age
, tags :: [Slug Text]
}
deriving Show
-- Wrapper for slugs so we don't mix 'em up with regular text
data Slug a = Slug
{ fromSlug :: a
}
deriving (Show, Eq, Ord)
instance Monoid a => Monoid (Slug a) where
mempty = Slug mempty
mappend (Slug a) (Slug b) = Slug (mappend a b)
instance Functor Slug where
fmap f (Slug a) = Slug (f a)
|
kqr/two-wrongs-st
|
src/Types.hs
|
Haskell
|
bsd-2-clause
| 2,105
|
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for @xm list --long@ parser -}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Hypervisor.Xen.XmParser
( testHypervisor_Xen_XmParser
) where
import Test.HUnit
import Test.QuickCheck as QuickCheck hiding (Result)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Control.Monad (liftM)
import qualified Data.Attoparsec.Text as A
import Data.Text (pack)
import Data.Char
import qualified Data.Map as Map
import Text.Printf
import Ganeti.Hypervisor.Xen.Types
import Ganeti.Hypervisor.Xen.XmParser
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Arbitraries
-- | Generator for 'ListConfig'.
--
-- A completely arbitrary configuration would contain too many lists
-- and its size would be to big to be actually parsable in reasonable
-- time. This generator builds a random Config that is still of a
-- reasonable size, and it also Avoids generating strings that might
-- be interpreted as numbers.
genConfig :: Int -> Gen LispConfig
genConfig 0 =
-- only terminal values for size 0
frequency [ (5, liftM LCString (genName `suchThat` (not . canBeNumber)))
, (5, liftM LCDouble arbitrary)
]
genConfig n =
-- for size greater than 0, allow "some" lists
frequency [ (5, liftM LCString (resize n genName `suchThat`
(not . canBeNumber)))
, (5, liftM LCDouble arbitrary)
, (1, liftM LCList (choose (1, n) >>=
(\n' -> vectorOf n' (genConfig $ n `div` n'))))
]
-- | Arbitrary instance for 'LispConfig' using 'genConfig'.
instance Arbitrary LispConfig where
arbitrary = sized genConfig
-- | Determines conservatively whether a string could be a number.
canBeNumber :: String -> Bool
canBeNumber [] = False
canBeNumber [c] = canBeNumberChar c
canBeNumber (c:xs) = canBeNumberChar c && canBeNumber xs
-- | Determines whether a char can be part of the string representation of a
-- number (even in scientific notation).
canBeNumberChar :: Char -> Bool
canBeNumberChar c = isDigit c || (c `elem` "eE-")
-- | Generates an arbitrary @xm uptime@ output line.
instance Arbitrary UptimeInfo where
arbitrary = do
name <- genFQDN
NonNegative idNum <- arbitrary :: Gen (NonNegative Int)
NonNegative days <- arbitrary :: Gen (NonNegative Int)
hours <- choose (0, 23) :: Gen Int
mins <- choose (0, 59) :: Gen Int
secs <- choose (0, 59) :: Gen Int
let uptime :: String
uptime =
if days /= 0
then printf "%d days, %d:%d:%d" days hours mins secs
else printf "%d:%d:%d" hours mins secs
return $ UptimeInfo name idNum uptime
-- * Helper functions for tests
-- | Function for testing whether a domain configuration is parsed correctly.
testDomain :: String -> Map.Map String Domain -> Assertion
testDomain fileName expectedContent = do
fileContent <- readTestData fileName
case A.parseOnly xmListParser $ pack fileContent of
Left msg -> assertFailure $ "Parsing failed: " ++ msg
Right obtained -> assertEqual fileName expectedContent obtained
-- | Function for testing whether a @xm uptime@ output (stored in a file)
-- is parsed correctly.
testUptimeInfo :: String -> Map.Map Int UptimeInfo -> Assertion
testUptimeInfo fileName expectedContent = do
fileContent <- readTestData fileName
case A.parseOnly xmUptimeParser $ pack fileContent of
Left msg -> assertFailure $ "Parsing failed: " ++ msg
Right obtained -> assertEqual fileName expectedContent obtained
-- | Determines whether two LispConfig are equal, with the exception of Double
-- values, that just need to be \"almost equal\".
--
-- Meant mainly for testing purposes, given that Double values may be slightly
-- rounded during parsing.
isAlmostEqual :: LispConfig -> LispConfig -> Property
isAlmostEqual (LCList c1) (LCList c2) =
(length c1 ==? length c2) .&&.
conjoin (zipWith isAlmostEqual c1 c2)
isAlmostEqual (LCString s1) (LCString s2) = s1 ==? s2
isAlmostEqual (LCDouble d1) (LCDouble d2) = printTestCase msg $ rel <= 1e-12
where rel = relativeError d1 d2
msg = "Relative error " ++ show rel ++ " not smaller than 1e-12\n" ++
"expected: " ++ show d2 ++ "\n but got: " ++ show d1
isAlmostEqual a b =
failTest $ "Comparing different types: '" ++ show a ++ "' with '" ++
show b ++ "'"
-- | Function to serialize LispConfigs in such a way that they can be rebuilt
-- again by the lispConfigParser.
serializeConf :: LispConfig -> String
serializeConf (LCList c) = "(" ++ unwords (map serializeConf c) ++ ")"
serializeConf (LCString s) = s
serializeConf (LCDouble d) = show d
-- | Function to serialize UptimeInfos in such a way that they can be rebuilt
-- againg by the uptimeLineParser.
serializeUptime :: UptimeInfo -> String
serializeUptime (UptimeInfo name idNum uptime) =
printf "%s\t%d\t%s" name idNum uptime
-- | Test whether a randomly generated config can be parsed.
-- Implicitly, this also tests that the Show instance of Config is correct.
prop_config :: LispConfig -> Property
prop_config conf =
case A.parseOnly lispConfigParser . pack . serializeConf $ conf of
Left msg -> failTest $ "Parsing failed: " ++ msg
Right obtained -> printTestCase "Failing almost equal check" $
isAlmostEqual obtained conf
-- | Test whether a randomly generated UptimeInfo text line can be parsed.
prop_uptimeInfo :: UptimeInfo -> Property
prop_uptimeInfo uInfo =
case A.parseOnly uptimeLineParser . pack . serializeUptime $ uInfo of
Left msg -> failTest $ "Parsing failed: " ++ msg
Right obtained -> obtained ==? uInfo
-- | Test a Xen 4.0.1 @xm list --long@ output.
case_xen401list :: Assertion
case_xen401list = testDomain "xen-xm-list-long-4.0.1.txt" $
Map.fromList
[ ("Domain-0", Domain 0 "Domain-0" 184000.41332 ActualRunning Nothing)
, ("instance1.example.com", Domain 119 "instance1.example.com" 24.116146647
ActualBlocked Nothing)
]
-- | Test a Xen 4.0.1 @xm uptime@ output.
case_xen401uptime :: Assertion
case_xen401uptime = testUptimeInfo "xen-xm-uptime-4.0.1.txt" $
Map.fromList
[ (0, UptimeInfo "Domain-0" 0 "98 days, 2:27:44")
, (119, UptimeInfo "instance1.example.com" 119 "15 days, 20:57:07")
]
testSuite "Hypervisor/Xen/XmParser"
[ 'prop_config
, 'prop_uptimeInfo
, 'case_xen401list
, 'case_xen401uptime
]
|
apyrgio/snf-ganeti
|
test/hs/Test/Ganeti/Hypervisor/Xen/XmParser.hs
|
Haskell
|
bsd-2-clause
| 7,773
|
module Data.Iteratee (
module Data.Iteratee.Base
, module Data.Iteratee.Exception
, module Data.Iteratee.IO
) where
------------------------------------------------------------------------
-- Imports
------------------------------------------------------------------------
import Data.Iteratee.Base
import Data.Iteratee.Exception
import Data.Iteratee.IO
|
tanimoto/iteratee
|
src/Data/Iteratee.hs
|
Haskell
|
bsd-3-clause
| 357
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.PT.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale PT Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "primeira"
, "primeiros"
]
, examples (OrdinalData 2)
[ "Segundo"
, "segundas"
]
, examples (OrdinalData 7)
[ "setimo"
, "sétimo"
, "sétimas"
]
, examples (OrdinalData 10)
[ "decimos"
, "décimos"
, "decima"
, "décima"
, "decimas"
, "décimas"
]
, examples (OrdinalData 11)
[ "decimos primeiros"
, "décimos primeiros"
, "decimo primeiro"
, "décimo primeiro"
, "decimas primeiras"
, "décimas primeiras"
, "decima primeira"
, "décima primeira"
]
, examples (OrdinalData 12)
[ "decimos segundos"
, "décimos segundos"
, "decimo segundo"
, "décimo segundo"
, "decimas segundas"
, "décimas segundas"
, "decima segunda"
, "décima segunda"
]
, examples (OrdinalData 17)
[ "decimos setimos"
, "décimos setimos"
, "decimo setimo"
, "décimo sétimo"
, "decimas setimas"
, "décimas sétimas"
, "decima setima"
, "décima setima"
]
, examples (OrdinalData 58)
[ "quinquagésimas oitavas"
, "qüinquagesimo oitavo"
, "quinquagésimo oitavo"
]
]
|
facebookincubator/duckling
|
Duckling/Ordinal/PT/Corpus.hs
|
Haskell
|
bsd-3-clause
| 2,204
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.Concurrent where
import qualified Test.Hspec as HS
import qualified Test.Hspec.QuickCheck as HS
import qualified Test.QuickCheck as Q
import qualified Control.Concurrent as CC
import qualified Control.Concurrent.Async as AS
import qualified Control.Concurrent.MVar as MV
import qualified Control.Exception as E
import Control.Monad ((>=>))
import qualified Control.Monad as M
import qualified GHC.Conc as CC
import qualified Data.Maybe as MB
import qualified Data.Traversable as TF
import Data.Typeable (Typeable)
-- | Multiple times test enabled IO spec
ioprop :: (HS.HasCallStack, Q.Testable prop) => String -> prop -> HS.Spec
ioprop desc prop = HS.prop desc $ \ () -> prop
class HasThread th where
threadId :: th -> IO CC.ThreadId
throwTo :: E.Exception e => th -> e -> IO ()
throwTo th e = threadId th >>= flip E.throwTo e
threadStatus :: HasThread th => th -> IO CC.ThreadStatus
threadStatus = threadId >=> CC.threadStatus
instance HasThread CC.ThreadId where
threadId = return
instance HasThread (AS.Async x) where
threadId = return . AS.asyncThreadId
isFinish :: CC.ThreadStatus -> Bool
isFinish CC.ThreadFinished = True
isFinish CC.ThreadDied = True
isFinish _ = False
isStop :: CC.ThreadStatus -> Bool
isStop CC.ThreadRunning = False
isStop _ = True
withWaitStart :: (IO () -> IO x) -> IO x
withWaitStart actf = do
mv <- MV.newEmptyMVar
mdelay <- Q.generate $ arbitraryDelay 20000
async <- AS.async . actf $ MV.readMVar mv
case mdelay of
Just delay -> CC.threadDelay delay
Nothing -> return ()
CC.putMVar mv ()
AS.wait async
concurrently :: IO a -> IO b -> IO (a, b)
concurrently act1 act2 = do
mdelay1 <- Q.generate $ arbitraryDelay 20000
mdelay2 <- Q.generate $ arbitraryDelay 20000
withWaitStart $ \ wait ->
wrap wait mdelay1 act1 `AS.concurrently` wrap wait mdelay2 act2
where
wrap :: IO () -> Maybe Int -> IO a -> IO a
wrap wait mdelay act = wait >> TF.for mdelay CC.threadDelay >> act
mapConcurrently :: [IO a] -> IO [a]
mapConcurrently acts = do
let len = length acts
mds <- Q.generate . Q.vectorOf len $ fmap (`mod` 20000) <$> Q.arbitrary
withWaitStart $ \ wait -> do
AS.mapConcurrently id $ wrap wait <$> zip mds acts
where
wrap :: IO () -> (Maybe Int, IO a) -> IO a
wrap wait (mdelay, act) = wait >> TF.for mdelay CC.threadDelay >> act
mapConcurrently_ :: [IO a] -> IO ()
mapConcurrently_ = M.void . mapConcurrently
waitStop :: HasThread th => th -> IO CC.ThreadStatus
waitStop th = snd . head <$> waitAny isStop [th]
waitFinish :: HasThread th => th -> IO CC.ThreadStatus
waitFinish th = snd . head <$> waitFinishAny [th]
waitFinishAny :: HasThread th => [th] -> IO [(Int, CC.ThreadStatus)]
waitFinishAny = waitAny isFinish
waitAny :: HasThread th =>
(CC.ThreadStatus -> Bool) -> [th] -> IO [(Int, CC.ThreadStatus)]
waitAny = waitAnyAtLeast 1
waitAnyAtLeast :: HasThread th =>
Int -> (CC.ThreadStatus -> Bool) -> [th] -> IO [(Int, CC.ThreadStatus)]
waitAnyAtLeast num f ths = go
where
go = do
statuses <- M.sequence $ threadStatus <$> ths
let satisfied = filter (f . snd) $ zip [0..] statuses
if length satisfied >= num
then return satisfied
else CC.threadDelay 1 >> go
data RandomException = RandomException Int String
deriving (Show, Typeable)
instance E.Exception RandomException
ignoreException :: IO a -> IO (Maybe a)
ignoreException act = (Just <$> act)
`E.catch` \ (_err :: RandomException) -> do
-- E.uninterruptibleMask_ $ putStrLn $ "---- Exception throwed : " ++ show _err
return Nothing
ignoreException_ :: IO a -> IO ()
ignoreException_ = M.void . ignoreException
runningThreadId :: HasThread th => th -> IO (Maybe CC.ThreadId)
runningThreadId th = do
status <- threadStatus th
if isFinish status
then return Nothing
else Just <$> threadId th
throwExceptionRandomly :: HasThread th => [th] -> IO ()
throwExceptionRandomly ths = go (1 :: Int)
where
getAlives = fmap MB.catMaybes . M.sequence $ runningThreadId <$> ths
go !c = do
mdelay <- Q.generate $ arbitraryDelay $ 20000 * c
case mdelay of
Just delay -> CC.threadDelay delay
Nothing -> return ()
alives <- getAlives
if length alives == 0
then return ()
else do
alive <- Q.generate $ Q.elements alives
throwTo alive . RandomException c $ show mdelay ++ " : " ++ show (length alives)
go $ c+1
arbitraryDelay :: Int -> Q.Gen (Maybe Int)
arbitraryDelay limit = do
mbase <- Q.arbitrary
multi1 <- (+1) . abs <$> Q.arbitrary
multi2 <- (+1) . abs <$> Q.arbitrary
case mbase of
Just base -> return . Just . (`mod` limit) $ base * multi1 * multi2
Nothing -> return Nothing
|
asakamirai/kazura-queue
|
test/Test/Concurrent.hs
|
Haskell
|
bsd-3-clause
| 5,114
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
module AtomTestCases where
import Text.XML.HXT.Core
import Data.Tree.NTree.TypeDefs(NTree)
import Web.HRSS.Data.Atom
import Test.Framework as TF (testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
xmlAtom0 :: [Char]
xmlAtom0 = "\
\<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
\<feed xmlns='http://www.w3.org/2005/Atom' xmlns:thr='http://purl.org/syndication/thread/1.0' xml:lang='en' xml:base='http://www.1point2vue.com/wp-atom.php'>\
\ <title type='text'>1point2vue</title>\
\</feed>"
xmlAtom1 :: [Char]
xmlAtom1 = "\
\<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
\<feed xmlns='http://www.w3.org/2005/Atom' xmlns:thr='http://purl.org/syndication/thread/1.0' xml:lang='en' xml:base='http://www.1point2vue.com/wp-atom.php'>\
\ <title type='text'>mon titre</title>\
\ <subtitle type='text'>Apprendre à faire des photos et à les retoucher</subtitle>\
\ <updated>2013-03-20T18:53:12Z</updated>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com'/>\
\ <id>http://www.1point2vue.com/feed/atom/</id>\
\ <link rel='self' type='application/atom+xml' href='http://www.1point2vue.com/feed/atom/'/>\
\ <generator uri='http://wordpress.org/' version='3.3.1'>WordPress</generator>\
\ <entry>\
\ <author>\
\ <name>Antoine</name>\
\ <uri>http://www.1point2vue.com</uri>\
\ </author>\
\ <title type='html'>Le projet photo: un outil pour façonner l’experience du photographe</title>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com/projet-photo-experience/'/>\
\ <id>http://www.1point2vue.com/?p=11026</id>\
\ <updated>2013-03-20T18:53:12Z</updated>\
\ <published>2013-03-20T18:51:02Z</published>\
\ <category scheme='http://www.1point2vue.com' term='projet photo'/>\
\ <category scheme='http://www.1point2vue.com' term='wordpress'/>\
\ <summary type='html'>S'imposer un projet photo est une façon de consolider son experience de la photo. Découvrez par quels moyens vous pouvez devenir un meilleur photographe simplement en ajoutant quelques contraintes à votre pratique de la photo.<br/><br/>Lire l'article <a href='http://www.1point2vue.com/projet-photo-experience/'>Le projet photo: un outil pour façonner l’experience du photographe</a><br /><hr /><em>Le bon plan du moment: <a href='http://ad.zanox.com/ppc/?17906432C82208704&zpar9=168E7CE40089AE6CAF3B'>80 photos offertes pour les nouveaux inscrit sur MyPix.com</a></em><hr /></summary>\
\ <link rel='replies' type='text/html' href='http://www.1point2vue.com/projet-photo-experience/#comments' thr:count='9'/>\
\ <link rel='replies' type='application/atom+xml' href='http://www.1point2vue.com/projet-photo-experience/feed/atom/' thr:count='9'/>\
\ <thr:total>9</thr:total>\
\ </entry>\
\ <entry>\
\ <author>\
\ <name>Antoine</name>\
\ <uri>http://www.1point2vue.com</uri>\
\ </author>\
\ <title type='html'>Réaliser un panographe avec Gimp</title>\
\ <link rel='alternate' type='text/html' href='http://www.1point2vue.com/panographe-avec-gimp/'/>\
\ <id>http://www.1point2vue.com/?p=10953</id>\
\ <updated>2013-01-26T18:01:13Z</updated>\
\ <published>2013-01-26T18:01:13Z</published>\
\ <category scheme='http://www.1point2vue.com' term='Graphisme'/>\
\ <category scheme='http://www.1point2vue.com' term='assemblage'/>\
\ <category scheme='http://www.1point2vue.com' term='deplacement'/>\
\ <category scheme='http://www.1point2vue.com' term='la boite à photo'/>\
\ <category scheme='http://www.1point2vue.com' term='Panographe'/>\
\ <category scheme='http://www.1point2vue.com' term='rotation'/>\
\ <summary type='html'>Le panographe est une autre façon de faire de la photo panoramique. Bien plus simple du point de vue de la prise de vue, il permet d'obtenir des effets vraiment originaux.<br/><br/>Lire l'article <a href='http://www.1point2vue.com/panographe-avec-gimp/'>Réaliser un panographe avec Gimp</a><br /><hr /><em>Le bon plan du moment: <a href='http://ad.zanox.com/ppc/?17906432C82208704&zpar9=168E7CE40089AE6CAF3B'>80 photos offertes pour les nouveaux inscrit sur MyPix.com</a></em><hr /></summary>\
\ <link rel='replies' type='text/html' href='http://www.1point2vue.com/panographe-avec-gimp/#comments' thr:count='7'/>\
\ <link rel='replies' type='application/atom+xml' href='http://www.1point2vue.com/panographe-avec-gimp/feed/atom/' thr:count='7'/>\
\ <thr:total>7</thr:total>\
\ </entry>\
\</feed>"
parse :: IOSLA (XIOState ()) (NTree XNode) a -> String -> IO [a]
parse get xml = runX ( parseXML xml >>> get )
where
parseXML :: String -> IOStateArrow s b XmlTree
parseXML doc = readString
[ withValidate no
, withTrace 0
, withRemoveWS yes
] doc
prop_getAtom :: [Atom] -> Bool -> String -> Int -> Int -> Bool
prop_getAtom [] True _ _ _ = True
prop_getAtom [(Atom t es ls)] False tt tel tll = and [ tt == t, tel == length es, tll == length ls]
prop_getAtom _ _ _ _ _ = False
tests :: IO (TF.Test)
tests = do
xml0 <- parse getAtom xmlAtom0
xml1 <- parse getAtom xmlAtom1
return $ testGroup "AtomTestCases"
[ testProperty "Error: xmlAtom0" $ prop_getAtom xml0 False "1point2vue" 0 0
, testProperty "NoError: xmlAtom1" $ prop_getAtom xml1 False "mon titre" 2 2
]
|
kdridi/hrss
|
src/test/AtomTestCases.hs
|
Haskell
|
bsd-3-clause
| 5,484
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
-- | The general Stack configuration that starts everything off. This should
-- be smart to falback if there is no stack.yaml, instead relying on
-- whatever files are available.
--
-- If there is no stack.yaml, and there is a cabal.config, we
-- read in those constraints, and if there's a cabal.sandbox.config,
-- we read any constraints from there and also find the package
-- database from there, etc. And if there's nothing, we should
-- probably default to behaving like cabal, possibly with spitting out
-- a warning that "you should run `stk init` to make things better".
module Stack.Config
(MiniConfig
,loadConfig
,loadMiniConfig
,packagesParser
,resolvePackageEntry
,getImplicitGlobalProjectDir
,getIsGMP4
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Arrow ((***))
import Control.Exception (IOException)
import Control.Monad
import Control.Monad.Catch (Handler(..), MonadCatch, MonadThrow, catches, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger hiding (Loc)
import Control.Monad.Reader (MonadReader, ask, runReaderT)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Crypto.Hash.SHA256 as SHA256
import Data.Aeson.Extended
import qualified Data.ByteString as S
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8, decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.Yaml as Yaml
import Distribution.System (OS (..), Platform (..), buildPlatform)
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange)
import GHC.Conc (getNumProcessors)
import Network.HTTP.Client.Conduit (HasHttpManager, getHttpManager, Manager, parseUrl)
import Network.HTTP.Download (download)
import Options.Applicative (Parser, strOption, long, help)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Safe (headMay)
import Stack.BuildPlan
import Stack.Constants
import Stack.Config.Docker
import qualified Stack.Image as Image
import Stack.Init
import Stack.PackageIndex
import Stack.Types
import Stack.Types.Internal
import System.Directory (getAppUserDataDirectory, createDirectoryIfMissing, canonicalizePath)
import System.Environment
import System.IO
import System.Process.Read
-- | Get the latest snapshot resolver available.
getLatestResolver
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m)
=> m Resolver
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
(x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots)))
return (LTS x y)
snap = fromMaybe (Nightly (snapshotsNightly snapshots)) mlts
return (ResolverSnapshot snap)
-- Interprets ConfigMonoid options.
configFromConfigMonoid
:: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml
-> Maybe Project
-> ConfigMonoid
-> m Config
configFromConfigMonoid configStackRoot configUserConfigPath mproject configMonoid@ConfigMonoid{..} = do
let configConnectionCount = fromMaybe 8 configMonoidConnectionCount
configHideTHLoading = fromMaybe True configMonoidHideTHLoading
configLatestSnapshotUrl = fromMaybe
"https://s3.amazonaws.com/haddock.stackage.org/snapshots.json"
configMonoidLatestSnapshotUrl
configPackageIndices = fromMaybe
[PackageIndex
{ indexName = IndexName "Hackage"
, indexLocation = ILGitHttp
"https://github.com/commercialhaskell/all-cabal-hashes.git"
"https://s3.amazonaws.com/hackage.fpcomplete.com/00-index.tar.gz"
, indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/"
, indexGpgVerify = False
, indexRequireHashes = False
}]
configMonoidPackageIndices
configGHCVariant0 = configMonoidGHCVariant
configSystemGHC = fromMaybe (isNothing configGHCVariant0) configMonoidSystemGHC
configInstallGHC = fromMaybe False configMonoidInstallGHC
configSkipGHCCheck = fromMaybe False configMonoidSkipGHCCheck
configSkipMsys = fromMaybe False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
arch = fromMaybe defArch
$ configMonoidArch >>= Distribution.Text.simpleParse
os = fromMaybe defOS
$ configMonoidOS >>= Distribution.Text.simpleParse
configPlatform = Platform arch os
configRequireStackVersion = simplifyVersionRange configMonoidRequireStackVersion
configConfigMonoid = configMonoid
configImage = Image.imgOptsFromMonoid configMonoidImageOpts
configCompilerCheck = fromMaybe MatchMinor configMonoidCompilerCheck
configDocker <- dockerOptsFromMonoid mproject configStackRoot configMonoidDockerOpts
rawEnv <- liftIO getEnvironment
origEnv <- mkEnvOverride configPlatform
$ augmentPathMap (map toFilePath configMonoidExtraPath)
$ Map.fromList
$ map (T.pack *** T.pack) rawEnv
let configEnvOverride _ = return origEnv
platformOnlyDir <- runReaderT platformOnlyRelDir configPlatform
configLocalProgramsBase <-
case configPlatform of
Platform _ Windows -> do
progsDir <- getWindowsProgsDir configStackRoot origEnv
return $ progsDir </> $(mkRelDir stackProgName)
_ ->
return $
configStackRoot </> $(mkRelDir "programs")
let configLocalPrograms = configLocalProgramsBase </> platformOnlyDir
configLocalBin <-
case configMonoidLocalBinPath of
Nothing -> do
localDir <- liftIO (getAppUserDataDirectory "local") >>= parseAbsDir
return $ localDir </> $(mkRelDir "bin")
Just userPath ->
liftIO (canonicalizePath userPath >>= parseAbsDir)
`catches`
[Handler (\(_ :: IOException) -> throwM $ NoSuchDirectory userPath)
,Handler (\(_ :: PathParseException) -> throwM $ NoSuchDirectory userPath)
]
configJobs <-
case configMonoidJobs of
Nothing -> liftIO getNumProcessors
Just i -> return i
let configConcurrentTests = fromMaybe True configMonoidConcurrentTests
let configTemplateParams = configMonoidTemplateParameters
configScmInit = configMonoidScmInit
configGhcOptions = configMonoidGhcOptions
configSetupInfoLocations = configMonoidSetupInfoLocations
configPvpBounds = fromMaybe PvpBoundsNone configMonoidPvpBounds
configModifyCodePage = fromMaybe True configMonoidModifyCodePage
configExplicitSetupDeps = configMonoidExplicitSetupDeps
configRebuildGhcOptions = fromMaybe False configMonoidRebuildGhcOptions
configApplyGhcOptions = fromMaybe AGOLocals configMonoidApplyGhcOptions
configAllowNewer = fromMaybe False configMonoidAllowNewer
return Config {..}
-- | Get the default 'GHCVariant'. On older Linux systems with libgmp4, returns 'GHCGMP4'.
getDefaultGHCVariant
:: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> EnvOverride -> Platform -> m GHCVariant
getDefaultGHCVariant menv (Platform _ Linux) = do
isGMP4 <- getIsGMP4 menv
return (if isGMP4 then GHCGMP4 else GHCStandard)
getDefaultGHCVariant _ _ = return GHCStandard
-- Determine whether 'stack' is linked with libgmp4 (libgmp.so.3)
getIsGMP4
:: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> EnvOverride -> m Bool
getIsGMP4 menv = do
executablePath <- liftIO getExecutablePath
elddOut <- tryProcessStdout Nothing menv "ldd" [executablePath]
return $
case elddOut of
Left _ -> False
Right lddOut -> hasLineWithFirstWord "libgmp.so.3" lddOut
where
hasLineWithFirstWord w =
elem (Just w) .
map (headMay . T.words) . T.lines . decodeUtf8With lenientDecode
-- | Get the directory on Windows where we should install extra programs. For
-- more information, see discussion at:
-- https://github.com/fpco/minghc/issues/43#issuecomment-99737383
getWindowsProgsDir :: MonadThrow m
=> Path Abs Dir
-> EnvOverride
-> m (Path Abs Dir)
getWindowsProgsDir stackRoot m =
case Map.lookup "LOCALAPPDATA" $ unEnvOverride m of
Just t -> do
lad <- parseAbsDir $ T.unpack t
return $ lad </> $(mkRelDir "Programs")
Nothing -> return $ stackRoot </> $(mkRelDir "Programs")
-- | An environment with a subset of BuildConfig used for setup.
data MiniConfig = MiniConfig Manager GHCVariant Config
instance HasConfig MiniConfig where
getConfig (MiniConfig _ _ c) = c
instance HasStackRoot MiniConfig
instance HasHttpManager MiniConfig where
getHttpManager (MiniConfig man _ _) = man
instance HasPlatform MiniConfig
instance HasGHCVariant MiniConfig where
getGHCVariant (MiniConfig _ v _) = v
-- | Load the 'MiniConfig'.
loadMiniConfig
:: (MonadIO m, HasHttpManager a, MonadReader a m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
=> Config -> m MiniConfig
loadMiniConfig config = do
menv <- liftIO $ configEnvOverride config minimalEnvSettings
manager <- getHttpManager <$> ask
ghcVariant <-
case configGHCVariant0 config of
Just ghcVariant -> return ghcVariant
Nothing -> getDefaultGHCVariant menv (configPlatform config)
return (MiniConfig manager ghcVariant config)
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary.
loadConfig :: (MonadLogger m,MonadIO m,MonadCatch m,MonadThrow m,MonadBaseControl IO m,MonadReader env m,HasHttpManager env,HasTerminal env)
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (LoadConfig m)
loadConfig configArgs mstackYaml = do
stackRoot <- determineStackRoot
userConfigPath <- getDefaultUserConfigPath stackRoot
extraConfigs0 <- getExtraConfigs userConfigPath >>= mapM loadYaml
let extraConfigs =
-- non-project config files' existence of a docker section should never default docker
-- to enabled, so make it look like they didn't exist
map (\c -> c {configMonoidDockerOpts =
(configMonoidDockerOpts c) {dockerMonoidDefaultEnable = False}})
extraConfigs0
mproject <- loadProjectConfig mstackYaml
config <- configFromConfigMonoid stackRoot userConfigPath (fmap (\(proj, _, _) -> proj) mproject) $ mconcat $
case mproject of
Nothing -> configArgs : extraConfigs
Just (_, _, projectConfig) -> configArgs : projectConfig : extraConfigs
unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
return LoadConfig
{ lcConfig = config
, lcLoadBuildConfig = loadBuildConfig mproject config
, lcProjectRoot = fmap (\(_, fp, _) -> parent fp) mproject
}
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
loadBuildConfig :: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env, MonadBaseControl IO m, HasTerminal env)
=> Maybe (Project, Path Abs File, ConfigMonoid)
-> Config
-> Maybe AbstractResolver -- override resolver
-> Maybe CompilerVersion -- override compiler
-> m BuildConfig
loadBuildConfig mproject config mresolver mcompiler = do
env <- ask
miniConfig <- loadMiniConfig config
(project', stackYamlFP) <- case mproject of
Just (project, fp, _) -> return (project, fp)
Nothing -> do
$logInfo "Run from outside a project, using implicit global project config"
destDir <- getImplicitGlobalProjectDir config
let dest :: Path Abs File
dest = destDir </> stackDotYaml
dest' :: FilePath
dest' = toFilePath dest
createTree destDir
exists <- fileExists dest
if exists
then do
ProjectAndConfigMonoid project _ <- loadYaml dest
when (getTerminal env) $
case mresolver of
Nothing ->
$logInfo ("Using resolver: " <> resolverName (projectResolver project) <>
" from implicit global project's config file: " <> T.pack dest')
Just aresolver -> do
let name =
case aresolver of
ARResolver resolver -> resolverName resolver
ARLatestNightly -> "nightly"
ARLatestLTS -> "lts"
ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
ARGlobal -> "global"
$logInfo ("Using resolver: " <> name <>
" specified on command line")
return (project, dest)
else do
r <- runReaderT getLatestResolver miniConfig
$logInfo ("Using latest snapshot resolver: " <> resolverName r)
$logInfo ("Writing implicit global project config file to: " <> T.pack dest')
$logInfo "Note: You can change the snapshot via the resolver field there."
let p = Project
{ projectPackages = mempty
, projectExtraDeps = mempty
, projectFlags = mempty
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
liftIO $ do
S.writeFile dest' $ S.concat
[ "# This is the implicit global project's config file, which is only used when\n"
, "# 'stack' is run outside of a real project. Settings here do _not_ act as\n"
, "# defaults for all projects. To change stack's default settings, edit\n"
, "# '", encodeUtf8 (T.pack $ toFilePath $ configUserConfigPath config), "' instead.\n"
, "#\n"
, "# For more information about stack's configuration, see\n"
, "# https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n"
, "#\n"
, Yaml.encode p]
S.writeFile (toFilePath $ parent dest </> $(mkRelFile "README.txt")) $ S.concat
[ "This is the implicit global project, which is used only when 'stack' is run\n"
, "outside of a real project.\n" ]
return (p, dest)
resolver <-
case mresolver of
Nothing -> return $ projectResolver project'
Just aresolver ->
runReaderT (makeConcreteResolver aresolver) miniConfig
let project = project'
{ projectResolver = resolver
, projectCompiler = mcompiler <|> projectCompiler project'
}
wantedCompiler <-
case projectCompiler project of
Just wantedCompiler -> return wantedCompiler
Nothing -> case projectResolver project of
ResolverSnapshot snapName -> do
mbp <- runReaderT (loadMiniBuildPlan snapName) miniConfig
return $ mbpCompilerVersion mbp
ResolverCustom _name url -> do
mbp <- runReaderT (parseCustomMiniBuildPlan stackYamlFP url) miniConfig
return $ mbpCompilerVersion mbp
ResolverCompiler wantedCompiler -> return wantedCompiler
extraPackageDBs <- mapM parseRelAsAbsDir (projectExtraPackageDBs project)
packageCaches <- runReaderT (getMinimalEnvOverride >>= getPackageCaches) miniConfig
return BuildConfig
{ bcConfig = config
, bcResolver = projectResolver project
, bcWantedCompiler = wantedCompiler
, bcPackageEntries = projectPackages project
, bcExtraDeps = projectExtraDeps project
, bcExtraPackageDBs = extraPackageDBs
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
, bcImplicitGlobal = isNothing mproject
, bcGHCVariant = getGHCVariant miniConfig
, bcPackageCaches = packageCaches
}
-- | Resolve a PackageEntry into a list of paths, downloading and cloning as
-- necessary.
resolvePackageEntry
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageEntry
-> m [(Path Abs Dir, Bool)]
resolvePackageEntry menv projRoot pe = do
entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
paths <-
case peSubdirs pe of
[] -> return [entryRoot]
subs -> mapM (resolveDir entryRoot) subs
case peValidWanted pe of
Nothing -> return ()
Just _ -> $logWarn "Warning: you are using the deprecated valid-wanted field. You should instead use extra-dep. See: https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md#packages"
return $ map (, not $ peExtraDep pe) paths
-- | Resolve a PackageLocation into a path, downloading and cloning as
-- necessary.
resolvePackageLocation
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageLocation
-> m (Path Abs Dir)
resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
resolvePackageLocation _ projRoot (PLHttpTarball url) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 url
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
fileRel <- parseRelFile $ name ++ ".tar.gz"
dirRel <- parseRelDir name
dirRelTmp <- parseRelDir $ name ++ ".tmp"
let file = root </> fileRel
dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
req <- parseUrl $ T.unpack url
_ <- download req file
removeTreeIfExists dirTmp
liftIO $ withBinaryFile (toFilePath file) ReadMode $ \h -> do
lbs <- L.hGetContents h
let entries = Tar.read $ GZip.decompress lbs
Tar.unpack (toFilePath dirTmp) entries
renameDir dirTmp dir
x <- listDirectory dir
case x of
([dir'], []) -> return dir'
(dirs, files) -> do
removeFileIfExists file
removeTreeIfExists dir
throwM $ UnexpectedTarballContents dirs files
resolvePackageLocation menv projRoot (PLGit url commit) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 $ T.unwords [url, commit]
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
dirRel <- parseRelDir $ name ++ ".git"
dirRelTmp <- parseRelDir $ name ++ ".git.tmp"
let dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
removeTreeIfExists dirTmp
createTree (parent dirTmp)
readInNull (parent dirTmp) "git" menv
[ "clone"
, T.unpack url
, toFilePath dirTmp
]
Nothing
readInNull dirTmp "git" menv
[ "reset"
, "--hard"
, T.unpack commit
]
Nothing
renameDir dirTmp dir
return dir
-- | Get the stack root, e.g. ~/.stack
determineStackRoot :: (MonadIO m, MonadThrow m) => m (Path Abs Dir)
determineStackRoot = do
env <- liftIO getEnvironment
case lookup stackRootEnvVar env of
Nothing -> do
x <- liftIO $ getAppUserDataDirectory stackProgName
parseAbsDir x
Just x -> do
y <- liftIO $ do
createDirectoryIfMissing True x
canonicalizePath x
parseAbsDir y
-- | Determine the extra config file locations which exist.
--
-- Returns most local first
getExtraConfigs :: (MonadIO m, MonadLogger m)
=> Path Abs File -- ^ use config path
-> m [Path Abs File]
getExtraConfigs userConfigPath = do
defaultStackGlobalConfigPath <- getDefaultGlobalConfigPath
liftIO $ do
env <- getEnvironment
mstackConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_CONFIG" env
mstackGlobalConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_GLOBAL_CONFIG" env
filterM fileExists
$ fromMaybe userConfigPath mstackConfig
: maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfigPath)
-- | Load and parse YAML from the given file.
loadYaml :: (FromJSON (a, [JSONWarning]), MonadIO m, MonadLogger m) => Path Abs File -> m a
loadYaml path = do
(result,warnings) <-
liftIO $
Yaml.decodeFileEither (toFilePath path) >>=
either (throwM . ParseConfigFileException path) return
logJSONWarnings (toFilePath path) warnings
return result
-- | Get the location of the project config file, if it exists.
getProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Path Abs File))
getProjectConfig (Just stackYaml) = return $ Just stackYaml
getProjectConfig Nothing = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
$logInfo "Getting project config file from STACK_YAML environment"
liftM Just $ case parseAbsFile fp of
Left _ -> do
currDir <- getWorkingDir
resolveFile currDir fp
Right path -> return path
Nothing -> do
currDir <- getWorkingDir
search currDir
where
search dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
$logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- fileExists fp
if exists
then return $ Just fp
else do
let dir' = parent dir
if dir == dir'
-- fully traversed, give up
then return Nothing
else search dir'
-- | Find the project config file location, respecting environment variables
-- and otherwise traversing parents. If no config is found, we supply a default
-- based on current directory.
loadProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Project, Path Abs File, ConfigMonoid))
loadProjectConfig mstackYaml = do
mfp <- getProjectConfig mstackYaml
case mfp of
Just fp -> do
currDir <- getWorkingDir
$logDebug $ "Loading project config file " <>
T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
load fp
Nothing -> do
$logDebug $ "No project config file found, using defaults."
return Nothing
where
load fp = do
ProjectAndConfigMonoid project config <- loadYaml fp
return $ Just (project, fp, config)
-- | Get the location of the default stack configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultGlobalConfigPath
:: (MonadIO m, MonadLogger m)
=> m (Maybe (Path Abs File))
getDefaultGlobalConfigPath =
case (defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated) of
(Just new,Just old) ->
liftM (Just . fst ) $
tryDeprecatedPath
(Just "non-project global configuration file")
fileExists
new
old
(Just new,Nothing) -> return (Just new)
_ -> return Nothing
-- | Get the location of the default user configuration file.
-- If a file already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getDefaultUserConfigPath
:: (MonadIO m, MonadLogger m)
=> Path Abs Dir -> m (Path Abs File)
getDefaultUserConfigPath stackRoot = do
(path, exists) <- tryDeprecatedPath
(Just "non-project configuration file")
fileExists
(defaultUserConfigPath stackRoot)
(defaultUserConfigPathDeprecated stackRoot)
unless exists $ do
createTree (parent path)
liftIO $ S.writeFile (toFilePath path) $ S.concat
[ "# This file contains default non-project-specific settings for 'stack', used\n"
, "# in all projects. For more information about stack's configuration, see\n"
, "# https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n"
, "#\n"
, Yaml.encode (mempty :: Object) ]
return path
packagesParser :: Parser [String]
packagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
|
vigoo/stack
|
src/Stack/Config.hs
|
Haskell
|
bsd-3-clause
| 27,563
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
module Diagrams.TwoD.Path.Metafont.Types where
import Control.Lens hiding (( # ))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup
#endif
import Diagrams.Direction
import Diagrams.TwoD.Types
-- | A @PathJoin@ specifies the directions at both ends of a segment,
-- and a join which describes the control points explicitly or implicitly.
data PathJoin d j = PJ { _d1 :: d, _j :: j, _d2 :: d }
deriving (Functor, Show)
makeLenses ''PathJoin
-- | A direction can be specified at any point of a path. A /curl/
-- should only be specified at the endpoints. The endpoints default
-- to curl 1 if not set.
data PathDir n
= PathDirCurl n
| PathDirDir (Dir n)
deriving Show
-- | A predicate to determine the constructor used.
isCurl :: PathDir n -> Bool
isCurl (PathDirDir _) = False
isCurl (PathDirCurl _) = True
type Curl n = n
type Dir n = Direction V2 n
type BasicJoin n = Either (TensionJoin n) (ControlJoin n)
-- | Higher /Tension/ brings the path closer to a straight line
-- between segments. Equivalently, it brings the control points
-- closer to the endpoints. @TensionAmt@ introduces a fixed tension.
-- @TensionAtLeast@ introduces a tension which will be increased if by
-- so doing, an inflection point can be eliminated.
data Tension n
= TensionAmt n
| TensionAtLeast n
deriving Show
getTension :: Tension n -> n
getTension (TensionAmt t) = t
getTension (TensionAtLeast t) = t
-- | Two tensions and two directions completely determine the control
-- points of a segment.
data TensionJoin n = TJ { _t1 :: Tension n, _t2 :: Tension n }
deriving Show
-- | The two intermediate control points of a segment, specified directly.
data ControlJoin n = CJ { _c1 :: P2 n, _c2 :: P2 n}
deriving Show
makeLenses ''TensionJoin
makeLenses ''ControlJoin
data P
data J
-- | @MFPathData@ is the type manipulated by the metafont combinators.
data MFPathData a n where
MFPathCycle:: MFPathData P n
MFPathEnd :: P2 n -> MFPathData P n
MFPathPt :: P2 n -> MFPathData J n -> MFPathData P n
MFPathJoin :: PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n)) -> MFPathData P n -> MFPathData J n
-- | @MetafontSegment@ is used internally in solving the metafont
-- equations. It represents a segment with two known endpoints, and a
-- /join/, which may be specified in various ways.
data MetafontSegment d j n = MFS { _x1 :: P2 n, _pj :: (PathJoin d j ), _x2 :: P2 n }
deriving (Functor, Show)
-- | @MFPath@ is the type used internally in solving the metafont
-- equations. The direction and join types are progressively refined
-- until all control points are known. The @loop@ flag affects both
-- the equations to be solved and the type of 'Trail' in the result.
-- If constructing an @MFPath@ in new code, the responsibility rests
-- on the user to ensure that successive @MetafontSegment@s share an
-- endpoint. If this is not true, the result is undefined.
data MFPath d j n = MFP { _loop :: Bool, _segs :: [MetafontSegment d j n] }
deriving Show
-- | MFP is a type synonym to clarify signatures in Metafont.Internal.
-- Note that the type permits segments which are \"overspecified\",
-- having one or both directions specified, and also a 'ControlJoin'.
-- In this case, "Metafont.Internal" ignores the directions.
type MFP n = MFPath (Maybe (PathDir n)) (BasicJoin n) n
-- | MFS is a type synonym to clarify signatures in "Metafont.Internal".
type MFS n = MetafontSegment (Maybe (PathDir n)) (BasicJoin n) n
makeLenses ''MetafontSegment
makeLenses ''MFPath
instance Monoid (PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n))) where
-- | The default join, with no directions specified, and both tensions 1.
mempty = PJ Nothing Nothing Nothing
l `mappend` r = PJ (c (l^.d1) (r^.d1)) (c (l^.j) (r^.j)) (c (l^.d2) (r^.d2))
where
c a b = case b of
Nothing -> a
Just _ -> b
instance Semigroup (PathJoin (Maybe (PathDir n)) (Maybe (BasicJoin n))) where
(<>) = mappend
|
diagrams/diagrams-contrib
|
src/Diagrams/TwoD/Path/Metafont/Types.hs
|
Haskell
|
bsd-3-clause
| 4,322
|
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE TupleSections #-}
-- | Common helpers for git vogue plugins
module Git.Vogue.PluginCommon
(
-- * Output
outputGood,
outputUnfortunate,
outputBad,
lineWrap,
-- * FilePath handling
hsProjects,
forProjects,
-- * Command line parsing
getPluginCommand,
pureSubCommand,
PluginCommand(..),
-- * Utility
forWithKey_,
forWithKey,
) where
import Control.Applicative
import Control.Monad.IO.Class
import Data.Char
import Data.Functor
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid
import Data.Ord
import Options.Applicative
import System.Directory
import System.FilePath
-- | The check went or is going well, this should make the developer happy
outputGood :: MonadIO m => String -> m ()
outputGood = outputWithIcon " \x1b[32m[+]\x1b[0m "
-- | A non-fatal warning of some sort. The developer should be able to ignore
-- this.
outputUnfortunate :: MonadIO m => String -> m ()
outputUnfortunate = outputWithIcon " \x1b[33m[*]\x1b[0m "
-- | If any of these appear, you should probably be exploding and the developer
-- will be sad.
outputBad :: MonadIO m => String -> m ()
outputBad = outputWithIcon " \x1b[31m[-]\x1b[0m "
outputWithIcon :: MonadIO m => String -> String -> m ()
outputWithIcon icon = liftIO . putStrLn . (icon <>) . prependWS
-- | Prepend some whitespace to every line but the first so that subsequent
-- lines line up below a [+] or [-].
prependWS :: String -> String
prependWS "" = ""
prependWS input =
let (x:xs) = lines input
in intercalate "\n" $ x : fmap (" " <>) xs
-- | Convenience for line wrapping long lines.
lineWrap :: Int -> String -> String
lineWrap line_len =
intercalate "\n" . fmap (intercalate "\n" . unfoldr f) . lines
where
f [] = Nothing
f xs = Just . fmap lstrip $ splitAt line_len xs
lstrip = dropWhile isSpace
-- | Helper for traversing a Map with keys
forWithKey_ :: Applicative f => Map k v -> (k -> v -> f ()) -> f ()
forWithKey_ m a = void $ M.traverseWithKey a m
forWithKey :: Applicative f => Map k v -> (k -> v -> f a) -> f (Map k a)
forWithKey = flip M.traverseWithKey
-- | Find .cabal files in hsFiles and arrange children underneath these
-- "headings".
hsProjects
:: [FilePath] -- ^ Files to be checked
-> [FilePath] -- ^ All files
-> Map FilePath [FilePath]
hsProjects check_fs all_fs =
-- We want to stick the subset of files to be checked under the same
-- project headings as we would if we were checking all files. So we mush
-- them together.
--
-- Discard the remainder, the user probably doesn't know what to do with
-- it.
let (complete_proj_map, _) = findProjects (isSuffixOf ".cabal") all_fs
-- Now do the awesome quadratic thing and traverse lists.
proj_map = fmap (filter (`elem` check_fs)) complete_proj_map
-- And finally strip the prefixes of the dirs, so that this looks a bit
-- like a one level trie.
bug = error "BUG: hsProjects: A key was not a prefix of its elements"
in M.mapWithKey (\k -> fmap (fromMaybe bug . stripPrefix k)) proj_map
-- | For the given projects, perform the supplied action on each given relative
-- URLS and having set the current directory to the project.
--
-- This will also take care of printing out a "Checking project in: " message.
forProjects
:: (MonadIO m, Applicative m)
=> Map FilePath [FilePath]
-> ([FilePath] -> m a)
-> m (Map FilePath a)
forProjects projs f = do
cwd <- liftIO $ getCurrentDirectory >>= canonicalizePath
forWithKey projs $ \dir fs -> do
let pdir = "." </> dir
liftIO $ do
putStrLn $ "Checking project in: " <> pdir
setCurrentDirectory pdir
x <- f fs
liftIO $ setCurrentDirectory cwd
return x
-- | Given a predicate to identify a file as being in the "root" of a
-- directory and a bunch of FilePaths, figure out which file paths belong under
-- these roots and "compartmentalize" them. The remainder of possibly
-- un-accounted-for files are the second element returned.
--
-- This is useful for finding files belonging to distinct projects within a
-- repository.
findProjects
:: (FilePath -> Bool)
-> [FilePath]
-> (Map FilePath [FilePath], [FilePath])
findProjects p xs =
-- We start out by putting all of the files in a nested list, splitting
-- up the path.
let all_paths = fmap (splitPath . ('/':)) xs
-- Now we find all of the project roots. Again tacking on the root so
-- that init is safe and everything lines up.
roots = sortBy (comparing length) . fmap (init . splitPath . ('/':)) $
filter p xs
-- Now iterate over the project roots, taking the bits of the whole
-- list as we go.
f current_root (result, remaining) =
let included = isPrefixOf current_root
to_take = filter included remaining
to_leave = filter (not . included) remaining
in ( M.insert (joinPath $ tail current_root) to_take result
, to_leave)
(projects, remainder) = foldr f (mempty, all_paths) roots
-- Now put the broken up paths back together and take the roots off.
in ((fmap . fmap) (joinPath . tail) projects
, fmap (joinPath . tail) remainder)
-- | Parser for plugin arguments
pluginCommandParser :: Parser PluginCommand
pluginCommandParser = subparser
( pureSubCommand "name" CmdName "Get name of plugin"
<> fpCommand "check" CmdCheck "Check for problems"
<> fpCommand "fix" CmdFix "Try to fix problems"
)
-- Helper for plugin commands that take [FilePath]s
fpCommand
:: String
-> ([FilePath] -> [FilePath] -> a)
-> String
-> Mod CommandFields a
fpCommand name ctor desc = command name (info parser (progDesc desc))
where
parser = ctor <$> argument (lines <$> str) (metavar "CHECKABLE_FILES")
<*> argument (lines <$> str) (metavar "ALL_FILES")
-- | Sub-command helper
pureSubCommand :: String -> a -> String -> Mod CommandFields a
pureSubCommand name ctor desc = command name (info (pure ctor) (progDesc desc))
-- | Get the plugin command requested given a header and a description
getPluginCommand :: String -> String -> IO PluginCommand
getPluginCommand hdr desc = execParser parser
where
parser = info (helper <*> pluginCommandParser)
( fullDesc
<> progDesc desc
<> header hdr)
-- | Arguments to the plugin
data PluginCommand
-- | Check the project for problems.
= CmdCheck [FilePath] [FilePath]
-- | Fix problems in the project.
| CmdFix [FilePath] [FilePath]
-- | Report details.
| CmdName
|
olorin/git-vogue
|
lib/Git/Vogue/PluginCommon.hs
|
Haskell
|
bsd-3-clause
| 7,195
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Servant.Servant.Db
( PackageDB(..)
, AddPackageIfMissing
, GetPackage
) where
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Data.Acid
import Data.SafeCopy
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import qualified Data.Map.Strict as Map
import Servant.Servant.Types
newtype PackageDB = PackageDB { unPkgDB :: Map.Map String Package }
deriving (Typeable, Generic)
deriveSafeCopy 0 'base ''PackageDB
addPackageIfMissing :: Package -> Update PackageDB ()
addPackageIfMissing pkg = modify go
where
pickOld _ old = old
go (PackageDB db) = PackageDB
$ Map.insertWith pickOld (packageName pkg) pkg db
getPackage :: String -> Query PackageDB (Maybe Package)
getPackage pkgName = ask >>= return . Map.lookup pkgName . unPkgDB
makeAcidic ''PackageDB ['addPackageIfMissing, 'getPackage]
|
jkarni/servant-servant
|
src/Servant/Servant/Db.hs
|
Haskell
|
bsd-3-clause
| 1,081
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
-- | Github API: http://developer.github.com/v3/oauth/
module Main where
import Data.Aeson.TH (defaultOptions, deriveJSON)
import qualified Data.ByteString.Char8 as BS
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Network.HTTP.Conduit
import URI.ByteString.QQ
import URI.ByteString
import Network.OAuth.OAuth2
import Keys
data SiteInfo = SiteInfo { items :: [SiteItem]
, has_more :: Bool
, quota_max :: Integer
, quota_remaining :: Integer
} deriving (Show, Eq)
data SiteItem = SiteItem { new_active_users :: Integer
, total_users :: Integer
, badges_per_minute :: Double
, total_badges :: Integer
, total_votes :: Integer
, total_comments :: Integer
, answers_per_minute :: Double
, questions_per_minute :: Double
, total_answers :: Integer
, total_accepted :: Integer
, total_unanswered :: Integer
, total_questions :: Integer
, api_revision :: Text
} deriving (Show, Eq)
$(deriveJSON defaultOptions ''SiteInfo)
$(deriveJSON defaultOptions ''SiteItem)
main :: IO ()
main = do
BS.putStrLn $ serializeURIRef' $ authorizationUrl stackexchangeKey
putStrLn "visit the url and paste code here: "
code <- fmap (ExchangeToken . T.pack) getLine
mgr <- newManager tlsManagerSettings
token <- fetchAccessToken mgr stackexchangeKey code
print token
case token of
Right at -> siteInfo mgr (accessToken at) >>= print
Left _ -> putStrLn "no access token found yet"
-- | Test API: info
siteInfo :: Manager -> AccessToken -> IO (OAuth2Result SiteInfo)
siteInfo mgr token = authGetJSON mgr token [uri|https://api.stackexchange.com/2.2/info?site=stackoverflow|]
sToBS :: String -> BS.ByteString
sToBS = T.encodeUtf8 . T.pack
|
reactormonk/hoauth2
|
example/StackExchange/test.hs
|
Haskell
|
bsd-3-clause
| 2,512
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- | SendGrid Client
module Email.SendGrid.Client where
import Data.Bifunctor (bimap)
import Data.ByteString.Lazy.Builder
import qualified Data.ByteString as B
-- import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Text.Encoding (encodeUtf8)
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.HTTP.Types.Status as NHTS
import Email.SendGrid.Types
type Reply = Network.HTTP.Client.Response BL8.ByteString
newtype SendGridUser = SendGridUser B.ByteString deriving (Eq, Show)
newtype SendGridKey = SendGridKey B.ByteString deriving (Eq, Show)
data SendGridCredentials = SendGridCredentials {
apiUser :: SendGridUser
, apiKey :: SendGridKey
} deriving (Eq, Show)
data SendGridError =
SendGridUnknownError B.ByteString
| SendGridWrongCredentials
deriving (Eq, Show)
data SendGridResponseStatus =
SendGridSuccess
| SendGridFailed
deriving (Eq, Show)
data SendGridResponse = SendGridResponse {
sgMessage :: SendGridResponseStatus
, sgErrors :: [SendGridError]
} deriving (Eq, Show)
sendGridMailSendEndpoint :: String
sendGridMailSendEndpoint = "https://api.sendgrid.com/api/mail.send.json"
exampleEmail :: [(B.ByteString, B.ByteString)]
exampleEmail = [ ("to[]", "callen.23dc@gmail.com")
, ("toname[]", "Chris The Coolest")
, ("to[]", "cma@bitemyapp.com")
, ("toname[]", "Chris The Cooler")
, ("subject", "herro, test email")
, ("text", "SendGrid test email yo :)")
, ("from", "cma@bitemyapp.com")
]
serialiseCredentials :: SendGridCredentials -> [(B.ByteString, B.ByteString)]
serialiseCredentials (SendGridCredentials (SendGridUser user) (SendGridKey key))
= [ ("api_user", user)
, ("api_key", key) ]
serialiseEmailAddress :: EmailAddress -> B.ByteString
serialiseEmailAddress (EmailAddress e) = encodeUtf8 e
serialiseRecipientName :: RecipientName -> B.ByteString
serialiseRecipientName (RecipientName r) = encodeUtf8 r
-- fmap (bimap serialiseEmailAddress serialiseRecipientName)
exampleRecipients :: Recipients
exampleRecipients = Recipients
[(EmailAddress "callen@woot.com"
, RecipientName "Chris Allen")]
tuplesToList :: ((a, b), (a, b)) -> [(a, b)]
tuplesToList ((a, b), (c, d)) = [(a, b), (c, d)]
serialiseEmailName :: B.ByteString
-> B.ByteString
-> [(EmailAddress, RecipientName)]
-> [(B.ByteString, B.ByteString)]
serialiseEmailName e n pairs =
pairs >>= (tuplesToList . toTuples)
where toTuples = bimap sEmail sRName
sEmail = (e,) . serialiseEmailAddress
sRName = (n,) . serialiseRecipientName
serialiseRecipients :: Recipients -> [(B.ByteString, B.ByteString)]
serialiseRecipients (Recipients addies) =
serialiseEmailName "to[]" "toname[]" addies
serialiseCc :: CarbonCopies -> [(B.ByteString, B.ByteString)]
serialiseCc (CarbonCopies ccs) =
serialiseEmailName "cc" "ccname" ccs
serialiseBcc :: BlindCarbonCopies -> [(B.ByteString, B.ByteString)]
serialiseBcc (BlindCarbonCopies bccs) =
serialiseEmailName "bcc" "bccname" bccs
serialiseFrom :: FromAddress -> [(B.ByteString, B.ByteString)]
serialiseFrom (FromAddress emailAddy) =
[("from", serialiseEmailAddress emailAddy)]
serialiseSenderName :: SenderName -> [(B.ByteString, B.ByteString)]
serialiseSenderName (SenderName sender) =
[("fromname", encodeUtf8 sender)]
serialiseEmailBody :: EmailBody -> [(B.ByteString, B.ByteString)]
serialiseEmailBody = undefined
serialiseEmailSubject :: EmailSubject -> [(B.ByteString, B.ByteString)]
serialiseEmailSubject = undefined
serialiseEmail :: Email -> [(B.ByteString, B.ByteString)]
serialiseEmail (Email recipients cc bcc fromAddress
senderName emailBody subject)
= undefined
type SendGridEndpoint = String
sendEmail' :: SendGridEndpoint -> SendGridCredentials -> Email -> IO Reply
sendEmail' url creds email = do
initReq <- parseUrl url
let preBody = initReq { method = NHTM.methodPost
, checkStatus = \_ _ _ -> Nothing }
serialisedBody = (serialiseCredentials creds) ++ (serialiseEmail email)
withBody = urlEncodedBody serialisedBody preBody
withManager tlsManagerSettings $ httpLbs withBody
sendEmail :: SendGridCredentials -> Email -> IO SendGridResponse
sendEmail creds email = do
reply <- sendEmail' sendGridMailSendEndpoint creds email
return $ SendGridResponse SendGridSuccess []
|
bitemyapp/sendgrid-haskell
|
src/Email/SendGrid/Client.hs
|
Haskell
|
bsd-3-clause
| 4,729
|
{-# language CPP #-}
-- | = Name
--
-- VK_EXT_shader_atomic_float - device extension
--
-- == VK_EXT_shader_atomic_float
--
-- [__Name String__]
-- @VK_EXT_shader_atomic_float@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 261
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Contact__]
--
-- - Vikram Kushwaha
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_shader_atomic_float] @vkushwaha-nv%0A<<Here describe the issue or question you have about the VK_EXT_shader_atomic_float extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2020-07-15
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/EXT/SPV_EXT_shader_atomic_float_add.html SPV_EXT_shader_atomic_float_add>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/ext/GLSL_EXT_shader_atomic_float.txt GL_EXT_shader_atomic_float>
--
-- [__Contributors__]
--
-- - Vikram Kushwaha, NVIDIA
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension allows a shader to contain floating-point atomic
-- operations on buffer, workgroup, and image memory. It also advertises
-- the SPIR-V @AtomicFloat32AddEXT@ and @AtomicFloat64AddEXT@ capabilities
-- that allows atomic addition on floating-points numbers. The supported
-- operations include @OpAtomicFAddEXT@, @OpAtomicExchange@, @OpAtomicLoad@
-- and @OpAtomicStore@.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceShaderAtomicFloatFeaturesEXT'
--
-- == New Enum Constants
--
-- - 'EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME'
--
-- - 'EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT'
--
-- == New SPIR-V Capabilities
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-AtomicFloat32AddEXT AtomicFloat32AddEXT>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-AtomicFloat64AddEXT AtomicFloat64AddEXT>
--
-- == Version History
--
-- - Revision 1, 2020-07-15 (Vikram Kushwaha)
--
-- - Internal revisions
--
-- == See Also
--
-- 'PhysicalDeviceShaderAtomicFloatFeaturesEXT'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_shader_atomic_float Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_EXT_shader_atomic_float ( PhysicalDeviceShaderAtomicFloatFeaturesEXT(..)
, EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION
, pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION
, EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME
, pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT))
-- | VkPhysicalDeviceShaderAtomicFloatFeaturesEXT - Structure describing
-- features supported by VK_EXT_shader_atomic_float
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceShaderAtomicFloatFeaturesEXT' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceShaderAtomicFloatFeaturesEXT' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_shader_atomic_float VK_EXT_shader_atomic_float>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceShaderAtomicFloatFeaturesEXT = PhysicalDeviceShaderAtomicFloatFeaturesEXT
{ -- | #features-shaderBufferFloat32Atomics# @shaderBufferFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic operations on storage buffers.
shaderBufferFloat32Atomics :: Bool
, -- | #features-shaderBufferFloat32AtomicAdd# @shaderBufferFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- operations on storage buffers.
shaderBufferFloat32AtomicAdd :: Bool
, -- | #features-shaderBufferFloat64Atomics# @shaderBufferFloat64Atomics@
-- indicates whether shaders /can/ perform 64-bit floating-point load,
-- store and exchange atomic operations on storage buffers.
shaderBufferFloat64Atomics :: Bool
, -- | #features-shaderBufferFloat64AtomicAdd# @shaderBufferFloat64AtomicAdd@
-- indicates whether shaders /can/ perform 64-bit floating-point add atomic
-- operations on storage buffers.
shaderBufferFloat64AtomicAdd :: Bool
, -- | #features-shaderSharedFloat32Atomics# @shaderSharedFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic operations on shared memory.
shaderSharedFloat32Atomics :: Bool
, -- | #features-shaderSharedFloat32AtomicAdd# @shaderSharedFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- operations on shared memory.
shaderSharedFloat32AtomicAdd :: Bool
, -- | #features-shaderSharedFloat64Atomics# @shaderSharedFloat64Atomics@
-- indicates whether shaders /can/ perform 64-bit floating-point load,
-- store and exchange atomic operations on shared memory.
shaderSharedFloat64Atomics :: Bool
, -- | #features-shaderSharedFloat64AtomicAdd# @shaderSharedFloat64AtomicAdd@
-- indicates whether shaders /can/ perform 64-bit floating-point add atomic
-- operations on shared memory.
shaderSharedFloat64AtomicAdd :: Bool
, -- | #features-shaderImageFloat32Atomics# @shaderImageFloat32Atomics@
-- indicates whether shaders /can/ perform 32-bit floating-point load,
-- store and exchange atomic image operations.
shaderImageFloat32Atomics :: Bool
, -- | #features-shaderImageFloat32AtomicAdd# @shaderImageFloat32AtomicAdd@
-- indicates whether shaders /can/ perform 32-bit floating-point add atomic
-- image operations.
shaderImageFloat32AtomicAdd :: Bool
, -- | #features-sparseImageFloat32Atomics# @sparseImageFloat32Atomics@
-- indicates whether 32-bit floating-point load, store and exchange atomic
-- operations /can/ be used on sparse images.
sparseImageFloat32Atomics :: Bool
, -- | #features-sparseImageFloat32AtomicAdd# @sparseImageFloat32AtomicAdd@
-- indicates whether 32-bit floating-point add atomic operations /can/ be
-- used on sparse images.
sparseImageFloat32AtomicAdd :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderAtomicFloatFeaturesEXT)
#endif
deriving instance Show PhysicalDeviceShaderAtomicFloatFeaturesEXT
instance ToCStruct PhysicalDeviceShaderAtomicFloatFeaturesEXT where
withCStruct x f = allocaBytes 64 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderAtomicFloatFeaturesEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat32Atomics))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat32AtomicAdd))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat64Atomics))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (shaderBufferFloat64AtomicAdd))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat32Atomics))
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat32AtomicAdd))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat64Atomics))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (shaderSharedFloat64AtomicAdd))
poke ((p `plusPtr` 48 :: Ptr Bool32)) (boolToBool32 (shaderImageFloat32Atomics))
poke ((p `plusPtr` 52 :: Ptr Bool32)) (boolToBool32 (shaderImageFloat32AtomicAdd))
poke ((p `plusPtr` 56 :: Ptr Bool32)) (boolToBool32 (sparseImageFloat32Atomics))
poke ((p `plusPtr` 60 :: Ptr Bool32)) (boolToBool32 (sparseImageFloat32AtomicAdd))
f
cStructSize = 64
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 28 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 32 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 48 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 52 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 56 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 60 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceShaderAtomicFloatFeaturesEXT where
peekCStruct p = do
shaderBufferFloat32Atomics <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
shaderBufferFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
shaderBufferFloat64Atomics <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
shaderBufferFloat64AtomicAdd <- peek @Bool32 ((p `plusPtr` 28 :: Ptr Bool32))
shaderSharedFloat32Atomics <- peek @Bool32 ((p `plusPtr` 32 :: Ptr Bool32))
shaderSharedFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 36 :: Ptr Bool32))
shaderSharedFloat64Atomics <- peek @Bool32 ((p `plusPtr` 40 :: Ptr Bool32))
shaderSharedFloat64AtomicAdd <- peek @Bool32 ((p `plusPtr` 44 :: Ptr Bool32))
shaderImageFloat32Atomics <- peek @Bool32 ((p `plusPtr` 48 :: Ptr Bool32))
shaderImageFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 52 :: Ptr Bool32))
sparseImageFloat32Atomics <- peek @Bool32 ((p `plusPtr` 56 :: Ptr Bool32))
sparseImageFloat32AtomicAdd <- peek @Bool32 ((p `plusPtr` 60 :: Ptr Bool32))
pure $ PhysicalDeviceShaderAtomicFloatFeaturesEXT
(bool32ToBool shaderBufferFloat32Atomics) (bool32ToBool shaderBufferFloat32AtomicAdd) (bool32ToBool shaderBufferFloat64Atomics) (bool32ToBool shaderBufferFloat64AtomicAdd) (bool32ToBool shaderSharedFloat32Atomics) (bool32ToBool shaderSharedFloat32AtomicAdd) (bool32ToBool shaderSharedFloat64Atomics) (bool32ToBool shaderSharedFloat64AtomicAdd) (bool32ToBool shaderImageFloat32Atomics) (bool32ToBool shaderImageFloat32AtomicAdd) (bool32ToBool sparseImageFloat32Atomics) (bool32ToBool sparseImageFloat32AtomicAdd)
instance Storable PhysicalDeviceShaderAtomicFloatFeaturesEXT where
sizeOf ~_ = 64
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderAtomicFloatFeaturesEXT where
zero = PhysicalDeviceShaderAtomicFloatFeaturesEXT
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
type EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION"
pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION = 1
type EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME = "VK_EXT_shader_atomic_float"
-- No documentation found for TopLevel "VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME"
pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME = "VK_EXT_shader_atomic_float"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_EXT_shader_atomic_float.hs
|
Haskell
|
bsd-3-clause
| 14,106
|
module Main ( main ) where
import Debug.Trace
import Control.Exception ( evaluate )
main :: IO ()
main = do
putStrLn "The test is successful if the word 'Evaluated' appears only once below:"
evaluate $ let x = trace "Evaluated" (1 + 1) in x + (trace "Evaluated" (1 + 1)) + x
return ()
|
thoughtpolice/cse-ghc-plugin
|
tests/Traced.hs
|
Haskell
|
bsd-3-clause
| 298
|
--------------------------------------------------------------------
-- !
-- Module : Text.TDoc.QQ
-- Copyright : (c) Nicolas Pouillard 2009-2011
-- License : BSD3
--
-- Maintainer : Nicolas Pouillard <nicolas.pouillard@gmail.com>
--
--------------------------------------------------------------------
{-# LANGUAGE TemplateHaskell, FlexibleContexts #-}
module Text.TDoc.QQ (
-- * frquotes support
frQQ, frTop, frAntiq) where
import qualified Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Text.TDoc (spanDoc, Star, Span, SpanTag(..), ToChildren(..), ChildOf(..))
import Data.Char (isSpace)
import Data.Monoid
frTop :: SpanTag t => Star t Span
frTop = spanDoc
frAntiq :: ToChildren a t father => a -> [ChildOf t father]
frAntiq = toChildren
expandingQQExpr :: String -> TH.ExpQ
expandingQQExpr = chunk . stripIndents
where
chunk x | null x = TH.varE 'mempty
| otherwise = TH.varE 'toChildren `TH.appE` TH.stringE x
stripIndents :: String -> String
stripIndents = go
where go (x:xs) | isSpace x = ' ' : go (dropWhile isSpace xs)
| otherwise = x:go xs
go "" = ""
quasiQuoter :: String -> QuasiQuoter
quasiQuoter qqName =
QuasiQuoter (err "expressions") (err "patterns")
-- if GHC7
(err "types") (err "declarations")
-- endif
where err kind _ = error $ qqName ++ ": not available in " ++ kind
frQQ :: QuasiQuoter
frQQ = (quasiQuoter "Text.TDoc.QQ.frQQ"){quoteExp = expandingQQExpr }
|
np/tdoc
|
Text/TDoc/QQ.hs
|
Haskell
|
bsd-3-clause
| 1,507
|
-- | Extra functions to help DFM deal with operators tree.
module Youtan.Regex.OperatorsExtra where
import Control.Monad.State
import Youtan.Regex.Operators ( Counter(..), Operator(..), OperatorID, initID, nextFreeID )
-- | Replaces counters in a tree with 'KleeneStar' keeping the input language.
simplifyCounter :: Operator -> Operator
simplifyCounter ( Disjunction i oper1 oper2 )
= Disjunction i ( simplifyCounter oper1 ) ( simplifyCounter oper2 )
simplifyCounter ( Concatenation i oper1 oper2 )
= Concatenation i ( simplifyCounter oper1 ) ( simplifyCounter oper2 )
simplifyCounter ( Counts i c oper ) = case c of
KleeneStar -> Counts i c o
OneOrMore -> Concatenation initID o ( Counts i KleeneStar o )
ZeroOrOne -> Disjunction i o ( Empty initID )
where
o = simplifyCounter oper
simplifyCounter ( Group oper ) = Group ( simplifyCounter oper )
simplifyCounter oper = oper
-- | Assigns id to each and every single node (except for 'Group') of a tree.
assignIDs :: Operator -> State OperatorID Operator
assignIDs o = case o of
Empty _ -> Empty <$> nextID
Literal _ c -> Literal <$> nextID <*> return c
Concatenation _ oper1 oper2 ->
Concatenation <$> nextID <*> assignIDs oper1 <*> assignIDs oper2
Disjunction _ oper1 oper2 ->
Disjunction <$> nextID <*> assignIDs oper1 <*> assignIDs oper2
Counts _ c oper ->
Counts <$> nextID <*> return c <*> assignIDs oper
CharClass _ c -> CharClass <$> nextID <*> return c
Group oper -> Group <$> assignIDs oper
where
nextID :: State OperatorID OperatorID
nextID = modify nextFreeID >> get
-- TODO: Replace me with data fields.
-- | Returns id of operator.
operID :: Operator -> OperatorID
operID ( Empty i ) = i
operID ( Literal i _ ) = i
operID ( Disjunction i _ _ ) = i
operID ( Concatenation i _ _ ) = i
operID ( Counts i _ _ ) = i
operID ( CharClass i _ ) = i
operID ( Group oper ) = operID oper
|
triplepointfive/Youtan
|
src/Youtan/Regex/OperatorsExtra.hs
|
Haskell
|
bsd-3-clause
| 1,903
|
import qualified TUDMensa as T
main = T.tudMensa T.defaultOpts
|
dschoepe/tud-mensa
|
Main.hs
|
Haskell
|
bsd-3-clause
| 64
|
{- Data/Singletons/Util.hs
(c) Richard Eisenberg 2013
eir@cis.upenn.edu
This file contains helper functions internal to the singletons package.
Users of the package should not need to consult this file.
-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, RankNTypes,
TemplateHaskell, GeneralizedNewtypeDeriving,
MultiParamTypeClasses, StandaloneDeriving,
UndecidableInstances, MagicHash, UnboxedTuples,
LambdaCase, NoMonomorphismRestriction #-}
module Data.Singletons.Util where
import Prelude hiding ( exp, foldl, concat, mapM, any, pred )
import Language.Haskell.TH.Syntax hiding ( lift )
import Language.Haskell.TH.Desugar
import Data.Char
import Control.Monad hiding ( mapM )
import Control.Monad.Writer hiding ( mapM )
import Control.Monad.Reader hiding ( mapM )
import qualified Data.Map as Map
import Data.List.NonEmpty (NonEmpty)
import Data.Map ( Map )
import Data.Foldable
import Data.Traversable
import Data.Generics
import Control.Monad.Fail ( MonadFail )
-- The list of types that singletons processes by default
basicTypes :: [Name]
basicTypes = [ ''Maybe
, ''[]
, ''Either
, ''NonEmpty
] ++ boundedBasicTypes
boundedBasicTypes :: [Name]
boundedBasicTypes =
[ ''(,)
, ''(,,)
, ''(,,,)
, ''(,,,,)
, ''(,,,,,)
, ''(,,,,,,)
] ++ enumBasicTypes
enumBasicTypes :: [Name]
enumBasicTypes = [ ''Bool, ''Ordering, ''() ]
-- like reportWarning, but generalized to any Quasi
qReportWarning :: Quasi q => String -> q ()
qReportWarning = qReport False
-- like reportError, but generalized to any Quasi
qReportError :: Quasi q => String -> q ()
qReportError = qReport True
-- | Generate a new Unique
qNewUnique :: DsMonad q => q Int
qNewUnique = do
Name _ flav <- qNewName "x"
case flav of
NameU n -> return n
_ -> error "Internal error: `qNewName` didn't return a NameU"
checkForRep :: Quasi q => [Name] -> q ()
checkForRep names =
when (any ((== "Rep") . nameBase) names)
(fail $ "A data type named <<Rep>> is a special case.\n" ++
"Promoting it will not work as expected.\n" ++
"Please choose another name for your data type.")
checkForRepInDecls :: Quasi q => [DDec] -> q ()
checkForRepInDecls decls =
checkForRep (allNamesIn decls)
tysOfConFields :: DConFields -> [DType]
tysOfConFields (DNormalC stys) = map snd stys
tysOfConFields (DRecC vstys) = map (\(_,_,ty) -> ty) vstys
-- extract the name and number of arguments to a constructor
extractNameArgs :: DCon -> (Name, Int)
extractNameArgs = liftSnd length . extractNameTypes
-- extract the name and types of constructor arguments
extractNameTypes :: DCon -> (Name, [DType])
extractNameTypes (DCon _ _ n fields _) = (n, tysOfConFields fields)
extractName :: DCon -> Name
extractName (DCon _ _ n _ _) = n
-- is an identifier uppercase?
isUpcase :: Name -> Bool
isUpcase n = let first = head (nameBase n) in isUpper first || first == ':'
-- make an identifier uppercase
upcase :: Name -> Name
upcase = mkName . toUpcaseStr noPrefix
-- make an identifier uppercase and return it as a String
toUpcaseStr :: (String, String) -- (alpha, symb) prefixes to prepend
-> Name -> String
toUpcaseStr (alpha, symb) n
| isHsLetter first
= upcase_alpha
| otherwise
= upcase_symb
where
str = nameBase n
first = head str
upcase_alpha = alpha ++ (toUpper first) : tail str
upcase_symb
| first == ':'
|| first == '$' -- special case to avoid name clashes. See #29
= symb ++ str
| otherwise
= symb ++ ':' : str
noPrefix :: (String, String)
noPrefix = ("", "")
-- make an identifier lowercase
locase :: Name -> Name
locase n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName ((toLower first) : tail str)
else mkName (tail str) -- remove the ":"
-- put an uppercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixUCName :: String -> String -> Name -> Name
prefixUCName pre tyPre n = case (nameBase n) of
(':' : rest) -> mkName (tyPre ++ rest)
alpha -> mkName (pre ++ alpha)
-- put a lowercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixLCName :: String -> String -> Name -> Name
prefixLCName pre tyPre n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName (pre ++ str)
else mkName (tyPre ++ str)
suffixName :: String -> String -> Name -> Name
suffixName ident symb n =
let str = nameBase n
first = head str in
if isHsLetter first
then mkName (str ++ ident)
else mkName (str ++ symb)
-- convert a number into both alphanumeric and symoblic forms
uniquePrefixes :: String -- alphanumeric prefix
-> String -- symbolic prefix
-> Int
-> (String, String) -- (alphanum, symbolic)
uniquePrefixes alpha symb n = (alpha ++ n_str, symb ++ convert n_str)
where
n_str = show n
convert [] = []
convert (d : ds) =
let d' = case d of
'0' -> '!'
'1' -> '#'
'2' -> '$'
'3' -> '%'
'4' -> '&'
'5' -> '*'
'6' -> '+'
'7' -> '.'
'8' -> '/'
'9' -> '>'
_ -> error "non-digit in show #"
in d' : convert ds
-- extract the kind from a TyVarBndr
extractTvbKind :: DTyVarBndr -> Maybe DKind
extractTvbKind (DPlainTV _) = Nothing
extractTvbKind (DKindedTV _ k) = Just k
-- extract the name from a TyVarBndr.
extractTvbName :: DTyVarBndr -> Name
extractTvbName (DPlainTV n) = n
extractTvbName (DKindedTV n _) = n
tvbToType :: DTyVarBndr -> DType
tvbToType = DVarT . extractTvbName
inferMaybeKindTV :: Name -> Maybe DKind -> DTyVarBndr
inferMaybeKindTV n Nothing = DPlainTV n
inferMaybeKindTV n (Just k) = DKindedTV n k
resultSigToMaybeKind :: DFamilyResultSig -> Maybe DKind
resultSigToMaybeKind DNoSig = Nothing
resultSigToMaybeKind (DKindSig k) = Just k
resultSigToMaybeKind (DTyVarSig (DPlainTV _)) = Nothing
resultSigToMaybeKind (DTyVarSig (DKindedTV _ k)) = Just k
-- Get argument types from an arrow type. Removing ForallT is an
-- important preprocessing step required by promoteType.
unravel :: DType -> ([DTyVarBndr], [DPred], [DType], DType)
unravel (DForallT tvbs cxt ty) =
let (tvbs', cxt', tys, res) = unravel ty in
(tvbs ++ tvbs', cxt ++ cxt', tys, res)
unravel (DAppT (DAppT DArrowT t1) t2) =
let (tvbs, cxt, tys, res) = unravel t2 in
(tvbs, cxt, t1 : tys, res)
unravel t = ([], [], [], t)
-- Reconstruct arrow kind from the list of kinds
ravel :: [DType] -> DType -> DType
ravel [] res = res
ravel (h:t) res = DAppT (DAppT DArrowT h) (ravel t res)
-- count the number of arguments in a type
countArgs :: DType -> Int
countArgs ty = length args
where (_, _, args, _) = unravel ty
-- changes all TyVars not to be NameU's. Workaround for GHC#11812
noExactTyVars :: Data a => a -> a
noExactTyVars = everywhere go
where
go :: Data a => a -> a
go = mkT fix_tvb `extT` fix_ty `extT` fix_inj_ann
no_exact_name :: Name -> Name
no_exact_name (Name (OccName occ) (NameU unique)) = mkName (occ ++ show unique)
no_exact_name n = n
fix_tvb (DPlainTV n) = DPlainTV (no_exact_name n)
fix_tvb (DKindedTV n k) = DKindedTV (no_exact_name n) k
fix_ty (DVarT n) = DVarT (no_exact_name n)
fix_ty ty = ty
fix_inj_ann (InjectivityAnn lhs rhs)
= InjectivityAnn (no_exact_name lhs) (map no_exact_name rhs)
substKind :: Map Name DKind -> DKind -> DKind
substKind = substType
substType :: Map Name DType -> DType -> DType
substType subst ty | Map.null subst = ty
substType subst (DForallT tvbs cxt inner_ty)
= DForallT tvbs' cxt' inner_ty'
where
(subst', tvbs') = mapAccumL subst_tvb subst tvbs
cxt' = map (substPred subst') cxt
inner_ty' = substType subst' inner_ty
subst_tvb s tvb@(DPlainTV n) = (Map.delete n s, tvb)
subst_tvb s (DKindedTV n k) = (Map.delete n s, DKindedTV n (substKind s k))
substType subst (DAppT ty1 ty2) = substType subst ty1 `DAppT` substType subst ty2
substType subst (DSigT ty ki) = substType subst ty `DSigT` substType subst ki
substType subst (DVarT n) =
case Map.lookup n subst of
Just ki -> ki
Nothing -> DVarT n
substType _ ty@(DConT {}) = ty
substType _ ty@(DArrowT) = ty
substType _ ty@(DLitT {}) = ty
substType _ ty@DWildCardT = ty
substType _ ty@DStarT = ty
substPred :: Map Name DType -> DPred -> DPred
substPred subst pred | Map.null subst = pred
substPred subst (DAppPr pred ty) =
DAppPr (substPred subst pred) (substType subst ty)
substPred subst (DSigPr pred ki) = DSigPr (substPred subst pred) ki
substPred _ pred@(DVarPr {}) = pred
substPred _ pred@(DConPr {}) = pred
substPred _ pred@DWildCardPr = pred
substKindInPred :: Map Name DKind -> DPred -> DPred
substKindInPred subst pred | Map.null subst = pred
substKindInPred subst (DAppPr pred ty) =
DAppPr (substKindInPred subst pred) (substType subst ty)
substKindInPred subst (DSigPr pred ki) = DSigPr (substKindInPred subst pred)
(substKind subst ki)
substKindInPred _ pred@(DVarPr {}) = pred
substKindInPred _ pred@(DConPr {}) = pred
substKindInPred _ pred@DWildCardPr = pred
substKindInTvb :: Map Name DKind -> DTyVarBndr -> DTyVarBndr
substKindInTvb _ tvb@(DPlainTV _) = tvb
substKindInTvb subst (DKindedTV n ki) = DKindedTV n (substKind subst ki)
addStar :: DKind -> DKind
addStar t = DAppT (DAppT DArrowT t) DStarT
addStar_maybe :: Maybe DKind -> Maybe DKind
addStar_maybe = fmap addStar
-- apply a type to a list of types
foldType :: DType -> [DType] -> DType
foldType = foldl DAppT
-- apply an expression to a list of expressions
foldExp :: DExp -> [DExp] -> DExp
foldExp = foldl DAppE
-- is a function type?
isFunTy :: DType -> Bool
isFunTy (DAppT (DAppT DArrowT _) _) = True
isFunTy (DForallT _ _ _) = True
isFunTy _ = False
-- choose the first non-empty list
orIfEmpty :: [a] -> [a] -> [a]
orIfEmpty [] x = x
orIfEmpty x _ = x
emptyMatches :: [DMatch]
emptyMatches = [DMatch DWildPa (DAppE (DVarE 'error) (DLitE (StringL errStr)))]
where errStr = "Empty case reached -- this should be impossible"
-- build a pattern match over several expressions, each with only one pattern
multiCase :: [DExp] -> [DPat] -> DExp -> DExp
multiCase [] [] body = body
multiCase scruts pats body =
DCaseE (mkTupleDExp scruts) [DMatch (mkTupleDPat pats) body]
-- Make a desugar function into a TH function.
wrapDesugar :: (Desugar th ds, DsMonad q) => (th -> ds -> q ds) -> th -> q th
wrapDesugar f th = do
ds <- desugar th
fmap sweeten $ f th ds
-- a monad transformer for writing a monoid alongside returning a Q
newtype QWithAux m q a = QWA { runQWA :: WriterT m q a }
deriving ( Functor, Applicative, Monad, MonadTrans
, MonadWriter m, MonadReader r
, MonadFail )
-- make a Quasi instance for easy lifting
instance (Quasi q, Monoid m) => Quasi (QWithAux m q) where
qNewName = lift `comp1` qNewName
qReport = lift `comp2` qReport
qLookupName = lift `comp2` qLookupName
qReify = lift `comp1` qReify
qReifyInstances = lift `comp2` qReifyInstances
qLocation = lift qLocation
qRunIO = lift `comp1` qRunIO
qAddDependentFile = lift `comp1` qAddDependentFile
qReifyRoles = lift `comp1` qReifyRoles
qReifyAnnotations = lift `comp1` qReifyAnnotations
qReifyModule = lift `comp1` qReifyModule
qAddTopDecls = lift `comp1` qAddTopDecls
qAddModFinalizer = lift `comp1` qAddModFinalizer
qGetQ = lift qGetQ
qPutQ = lift `comp1` qPutQ
qReifyFixity = lift `comp1` qReifyFixity
qReifyConStrictness = lift `comp1` qReifyConStrictness
qIsExtEnabled = lift `comp1` qIsExtEnabled
qExtsEnabled = lift qExtsEnabled
qRecover exp handler = do
(result, aux) <- lift $ qRecover (evalForPair exp) (evalForPair handler)
tell aux
return result
instance (DsMonad q, Monoid m) => DsMonad (QWithAux m q) where
localDeclarations = lift localDeclarations
-- helper functions for composition
comp1 :: (b -> c) -> (a -> b) -> a -> c
comp1 = (.)
comp2 :: (c -> d) -> (a -> b -> c) -> a -> b -> d
comp2 f g a b = f (g a b)
-- run a computation with an auxiliary monoid, discarding the monoid result
evalWithoutAux :: Quasi q => QWithAux m q a -> q a
evalWithoutAux = liftM fst . runWriterT . runQWA
-- run a computation with an auxiliary monoid, returning only the monoid result
evalForAux :: Quasi q => QWithAux m q a -> q m
evalForAux = execWriterT . runQWA
-- run a computation with an auxiliary monoid, return both the result
-- of the computation and the monoid result
evalForPair :: QWithAux m q a -> q (a, m)
evalForPair = runWriterT . runQWA
-- in a computation with an auxiliary map, add a binding to the map
addBinding :: (Quasi q, Ord k) => k -> v -> QWithAux (Map.Map k v) q ()
addBinding k v = tell (Map.singleton k v)
-- in a computation with an auxiliar list, add an element to the list
addElement :: Quasi q => elt -> QWithAux [elt] q ()
addElement elt = tell [elt]
-- lift concatMap into a monad
-- could this be more efficient?
concatMapM :: (Monad monad, Monoid monoid, Traversable t)
=> (a -> monad monoid) -> t a -> monad monoid
concatMapM fn list = do
bss <- mapM fn list
return $ fold bss
-- make a one-element list
listify :: a -> [a]
listify = (:[])
fstOf3 :: (a,b,c) -> a
fstOf3 (a,_,_) = a
liftFst :: (a -> b) -> (a, c) -> (b, c)
liftFst f (a, c) = (f a, c)
liftSnd :: (a -> b) -> (c, a) -> (c, b)
liftSnd f (c, a) = (c, f a)
snocView :: [a] -> ([a], a)
snocView [] = error "snocView nil"
snocView [x] = ([], x)
snocView (x : xs) = liftFst (x:) (snocView xs)
partitionWith :: (a -> Either b c) -> [a] -> ([b], [c])
partitionWith f = go [] []
where go bs cs [] = (reverse bs, reverse cs)
go bs cs (a:as) =
case f a of
Left b -> go (b:bs) cs as
Right c -> go bs (c:cs) as
partitionWithM :: Monad m => (a -> m (Either b c)) -> [a] -> m ([b], [c])
partitionWithM f = go [] []
where go bs cs [] = return (reverse bs, reverse cs)
go bs cs (a:as) = do
fa <- f a
case fa of
Left b -> go (b:bs) cs as
Right c -> go bs (c:cs) as
partitionLetDecs :: [DDec] -> ([DLetDec], [DDec])
partitionLetDecs = partitionWith (\case DLetDec ld -> Left ld
dec -> Right dec)
mapAndUnzip3M :: Monad m => (a -> m (b,c,d)) -> [a] -> m ([b],[c],[d])
mapAndUnzip3M _ [] = return ([],[],[])
mapAndUnzip3M f (x:xs) = do
(r1, r2, r3) <- f x
(rs1, rs2, rs3) <- mapAndUnzip3M f xs
return (r1:rs1, r2:rs2, r3:rs3)
-- is it a letter or underscore?
isHsLetter :: Char -> Bool
isHsLetter c = isLetter c || c == '_'
|
int-index/singletons
|
src/Data/Singletons/Util.hs
|
Haskell
|
bsd-3-clause
| 15,210
|
module Emit where
import Syntax
import Codegen
import LLVM.Module
import LLVM.AST as AST
import LLVM.Context
import Control.Monad.Except
runEmit :: ExceptT String IO String -> IO String
runEmit e = do
result <- runExceptT e
case result of
Right code -> return code
Left error -> putStrLn error >> return ""
emitInContext :: AST.Module -> Context -> IO String
emitInContext fileModule ctx =
runEmit $ withModuleFromAST ctx fileModule moduleLLVMAssembly
makeModule :: FilePath -> AST.Module
makeModule filepath =
defaultModule {
moduleName = filepath,
moduleSourceFileName = filepath
}
emit :: FilePath -> Program -> IO String
emit filepath ast = do
let fileModule = makeModule filepath
let finalModule =
runLLVM fileModule . mapM codegenTop . programClasses $ ast
withContext $ emitInContext finalModule
|
poiuj/pfcc
|
src/Emit.hs
|
Haskell
|
bsd-3-clause
| 846
|
module ParserUtil ( runGet
, runGetMaybe
, parseManyLazily
, parseManyLazyByteStringLazily
, parseKeepRaw
, match
, match_
, eof
) where
import qualified Control.Monad as M
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString as BS
import qualified Data.Serialize.Get as Get
import Control.Applicative ((<$>), (*>), (<*>), pure)
runGet :: Get.Get a -> BS.ByteString -> a
runGet p s = either error id $ Get.runGet p s
{-# INLINE runGet #-}
runGetMaybe :: Get.Get a -> BS.ByteString -> Maybe a
runGetMaybe p s = either (const Nothing) Just $ Get.runGet p s
{-# INLINE runGetMaybe #-}
parseManyLazily :: Get.Get a -> BS.ByteString -> [a]
parseManyLazily p s | BS.null s = []
| otherwise = case Get.runGetState p s 0 of
Right (r, rest) -> r : parseManyLazily p rest
Left _ -> []
{-# INLINE parseManyLazily #-}
parseManyLazyByteStringLazily :: Get.Get a -> BSL.ByteString -> [a]
parseManyLazyByteStringLazily p = concatMap (parseManyLazily p) . BSL.toChunks
match :: Eq a => Get.Get a -> a -> Get.Get a
match p test = do
result <- p
if result == test
then return result
else fail ""
{-# INLINE match #-}
match_ :: Eq a => Get.Get a -> a -> Get.Get ()
match_ p test = match p test *> pure ()
{-# INLINE match_ #-}
parseKeepRaw :: Get.Get a -> Get.Get (BS.ByteString, a)
parseKeepRaw g = do
(len, r) <- Get.lookAhead $ do
(res,after) <- Get.lookAhead $ (,) <$> g <*> Get.remaining
total <- Get.remaining
return (total-after, res)
bs <- Get.getBytes len
return (bs, r)
{-# INLINE parseKeepRaw #-}
eof :: Get.Get ()
eof = do
empty <- Get.isEmpty
M.unless empty $ fail "expected eof"
{-# INLINE eof #-}
|
benma/blockchain-parser-hs
|
src/ParserUtil.hs
|
Haskell
|
bsd-3-clause
| 1,869
|
{-|
This is a module of cross-platform file handling for Unix\/Mac\/Windows.
The standard module "System.Directory" and "System.FilePath" have
following shortcomings:
* getModificationTime exists in "System.Directory". But getAccessTime,
getChangeTime, getCreationTime do not exist.
* getModificationTime returns obsoleted type, 'ClockTime'. It should
return modern type, 'UTCTime', I believe.
* Some file functions are missing. A function to tell the link counter,
for instance.
* Path separator is not unified. Even though Windows accepts \'\/\' as a
file separator, getCurrentDirectory in "System.Directory" returns \'\\\'
as a file separator. So, we need to specify regular expression like
this: \"[\/\\\\]foo[\/\\\\]bar[\/\\\\]baz\".
* getHomeDirectory returns @HOMEDRIVE@\/@HOMEPATH@ instead of the @HOME@
environment variable on Windows.
This module aims to resolve these problems and provides:
* 'getModificationTime', 'getAccessTime', 'getChangeTime', and
'getCreationTime'. They return 'UTCTime'.
* 'isSymlink', 'getLinkCount', and 'hasSubDirectories'.
* \'\/\' as the single 'pathSeparator'. For instance,
'getCurrentDirectory' returns a path whose separator is \'\/\'
even on Windows.
* 'getHomeDirectory2' which refers the @HOME@ environment variable.
* Necessary functions in "System.Directory" and "System.FilePath".
-}
module System.EasyFile (
-- * Actions on directories
createDirectory
, createDirectoryIfMissing
, removeDirectory
, removeDirectoryRecursive
, renameDirectory
, getDirectoryContents
, getCurrentDirectory
, setCurrentDirectory
-- * Pre-defined directories
, getHomeDirectory
, getHomeDirectory2 -- missing
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
-- * Actions on files
, removeFile
, renameFile
, copyFile
, canonicalizePath
-- , makeRelativeToCurrentDirectory -- xxx
-- , findExecutable -- xxx
-- * Existence tests
, doesFileExist
, doesDirectoryExist
-- * Permissions
, Permissions(..)
, getPermissions
, setPermissions
, copyPermissions
-- * Timestamps
, getCreationTime
, getChangeTime
, getModificationTime
, getAccessTime
-- * Size
, getFileSize
-- * File\/directory information
, isSymlink
, getLinkCount
, hasSubDirectories
, module System.EasyFile.FilePath
) where
----------------------------------------------------------------
import System.EasyFile.Directory
import System.EasyFile.FilePath
import System.EasyFile.Missing
|
kazu-yamamoto/easy-file
|
System/EasyFile.hs
|
Haskell
|
bsd-3-clause
| 2,530
|
module Jerimum.Storage.PostgreSQL.Setup
( Context(..)
, setup
, destroy
, findSegment
, openSegment
, closeSegment
, addSchemas
) where
import Control.Monad
import qualified Data.Map as M
import Data.Monoid
import qualified Data.Text as T
import Data.UUID.Types
import qualified Database.PostgreSQL.Simple as PQ
import Database.PostgreSQL.Simple.Types (Identifier (..))
import qualified Jerimum.Storage.PostgreSQL.Schemas.EventType as EventType
import Jerimum.Storage.PostgreSQL.SqlMonad
data Context = Context
{ segmentId :: UUID
, databaseName :: T.Text
, schemasTable :: Identifier
, eventsTable :: Identifier
, knownSchemas :: M.Map EventType.Version UUID
} deriving (Show)
addSchemas :: Context -> M.Map EventType.Version UUID -> Context
addSchemas ctx newSchemas =
ctx {knownSchemas = M.union (knownSchemas ctx) newSchemas}
createSegmentTable :: PQ.Query
createSegmentTable =
"CREATE TABLE IF NOT EXISTS segments" <> " (" <>
" segment_id uuid not null primary key" <>
" , dbname text not null" <>
" , lsn_lower pg_lsn" <>
" , lsn_upper pg_lsn" <>
" , time_lower bigint" <>
" , time_upper bigint" <>
" , is_open boolean not null" <>
" CHECK ((lsn_upper - lsn_lower) < 1073741824)" <>
" , EXCLUDE (is_open with =) WHERE (is_open)" <>
" );"
createSchemasMasterTable :: PQ.Query
createSchemasMasterTable =
"CREATE TABLE IF NOT EXISTS schemas" <> " (" <>
" segment_id uuid not null" <>
" , schema_id uuid not null" <>
" , schema_version bytea not null" <>
" , schema_type smallint" <>
" , table_schema text" <>
" , table_name text" <>
" , table_cols text[]" <>
" , table_types bytea" <>
" , message_prefix text" <>
" , message_transactional boolean" <>
" , updated_at timestamptz" <>
" );"
createEventsMasterTable :: PQ.Query
createEventsMasterTable =
"CREATE TABLE IF NOT EXISTS events" <> "(" <> " segment_id uuid not null" <>
", lsn pg_lsn not null" <>
", xno integer not null" <>
", len integer not null" <>
", timestamp bigint not null" <>
", schema_ids uuid[]" <>
", bin_events bytea" <>
", updated_at timestamptz" <>
");"
setup :: SqlMonad ()
setup =
performSQL $ \conn -> do
_ <- PQ.execute_ conn createSegmentTable
_ <- PQ.execute_ conn createSchemasMasterTable
void $ PQ.execute_ conn createEventsMasterTable
destroy :: SqlMonad ()
destroy =
performSQL $ \conn -> do
_ <- PQ.execute_ conn "DROP TABLE IF EXISTS events CASCADE"
_ <- PQ.execute_ conn "DROP TABLE IF EXISTS schemas CASCADE"
void $ PQ.execute_ conn "DROP TABLE IF EXISTS segments CASCADE"
closeSegment :: UUID -> SqlMonad ()
closeSegment uuid =
let query =
"UPDATE segments" <> " SET is_open = false" <> " WHERE segment_id = ?"
in performSQL $ \conn -> void $ PQ.execute conn query [uuid]
findSegment :: T.Text -> SqlMonad (Maybe Context)
findSegment dbname =
let query =
"SELECT segment_id FROM segments" <> " WHERE is_open AND dbname = ?"
params = [dbname]
in do results <- performSQL $ \conn -> PQ.query conn query params
case results of
[PQ.Only (Just segmentId)] ->
pure (Just $ makeContext dbname segmentId)
_ -> pure Nothing
makeContext :: T.Text -> UUID -> Context
makeContext dbname uuid =
let suffix = T.replace "-" "" (toText uuid)
in Context
uuid
dbname
(Identifier $ "schemas_" <> suffix)
(Identifier $ "events_" <> suffix)
M.empty
openSegment :: T.Text -> UUID -> SqlMonad Context
openSegment dbname uuid = do
performSQL $ \conn ->
PQ.withTransaction conn $ do
insertSegment conn
createSchemasTable conn
createEventsTable conn
pure context
where
context = makeContext dbname uuid
insertSegment conn =
let query =
"INSERT INTO segments " <> " ( segment_id, dbname, is_open )" <>
" VALUES (?, ?, true)"
in void $ PQ.execute conn query (uuid, dbname)
createSchemasTable conn =
let query =
"CREATE TABLE ?" <> " ( CHECK (segment_id = ?)" <>
" , PRIMARY KEY (schema_id)" <>
" , UNIQUE (schema_version)" <>
" , FOREIGN KEY (segment_id) REFERENCES segments (segment_id)" <>
" )" <>
" INHERITS (schemas)"
in void $ PQ.execute conn query (schemasTable context, uuid)
createEventsTable conn =
let query =
"CREATE TABLE ?" <> " ( CHECK (segment_id = ?)" <>
" , PRIMARY KEY (lsn, xno)" <>
" , FOREIGN KEY (segment_id) REFERENCES segments (segment_id)" <>
" )" <>
" INHERITS (events)"
in void $ PQ.execute conn query (eventsTable context, uuid)
|
dgvncsz0f/nws
|
src/Jerimum/Storage/PostgreSQL/Setup.hs
|
Haskell
|
bsd-3-clause
| 5,022
|
-- Can get Pythagorean triplets via 2ab, a^2-b^2,a^2+b^2 trick
-- Brute-force some values of a and b
main :: IO ()
main = print answer
where possibleTrips :: [(Int,Int)]
possibleTrips = [(a,b) | a <- [1..500], b <- [1..500]]
passedTrips = filter (\(a,b) -> a>b && (2*a*b)+(a*a-b*b)+(a*a+b*b) == 1000) possibleTrips
x = fst $ head passedTrips
y = snd $ head passedTrips
answer = (2*x*y)*(x*x-y*y)*(x*x+y*y)
|
akerber47/haskalah
|
test/files/euler/9.hs
|
Haskell
|
bsd-3-clause
| 462
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
module Language.Fixpoint.Solver.Eliminate
(eliminateAll) where
import Language.Fixpoint.Types
import Language.Fixpoint.Types.Names (existSymbol)
import Language.Fixpoint.Types.Visitor (kvars)
import Language.Fixpoint.Solver.Deps (depNonCuts, deps)
import Language.Fixpoint.Misc (fst3)
import Language.Fixpoint.Solver.Solution (Solution, mkJVar)
import qualified Data.HashMap.Strict as M
import Data.List (foldl')
import Control.Arrow (first, second)
import Control.DeepSeq (($!!))
--------------------------------------------------------------
eliminateAll :: SInfo a -> (Solution, SInfo a)
eliminateAll !fi = {-# SCC "eliminateAll" #-} foldl' eliminate (M.empty, fi) nonCuts
where
nonCuts = depNonCuts $ deps fi
--------------------------------------------------------------
eliminate :: (Solution, SInfo a) -> KVar -> (Solution, SInfo a)
eliminate (!s, !fi) k = (M.insert k (mkJVar orPred) s, fi { cm = remainingCs , ws = M.delete k $ ws fi })
where
relevantCs = M.filter ( elem k . kvars . crhs) (cm fi)
remainingCs = M.filter (notElem k . kvars . crhs) (cm fi)
kvWfC = ws fi M.! k
be = bs fi
kDom = domain be kvWfC
orPred = {-# SCC "orPred" #-} POr $!! extractPred kDom be <$> M.elems relevantCs
extractPred :: [Symbol] -> BindEnv -> SimpC a -> Pred
extractPred kDom be sc = renameQuantified (subcId sc) kSol
where
env = clhs be sc
binds = second sr_sort <$> env
nonFuncBinds = filter (nonFunction be . fst) binds
lhsPreds = bindPred <$> env
suPreds = substPreds kDom $ crhs sc
kSol = PExist nonFuncBinds $ PAnd (lhsPreds ++ suPreds)
-- x:{v:int|v=10} -> (x=10)
bindPred :: (Symbol, SortedReft) -> Pred
bindPred (sym, sr) = subst1 (reftPred rft) sub
where
rft = sr_reft sr
sub = (reftBind rft, eVar sym)
-- k0[v:=e1][x:=e2] -> [v = e1, x = e2]
substPreds :: [Symbol] -> Pred -> [Pred]
substPreds dom (PKVar _ (Su subs)) = [PAtom Eq (eVar sym) e | (sym, e) <- M.toList subs , sym `elem` dom]
nonFunction :: BindEnv -> Symbol -> Bool
nonFunction be sym = sym `notElem` funcs
where
funcs = [sym | (_, sym, sr) <- bindEnvToList be, isFunctionSortedReft sr]
domain :: BindEnv -> WfC a -> [Symbol]
domain be wfc = (fst3 $ wrft wfc) : map fst (envCs be $ wenv wfc)
renameQuantified :: Integer -> Pred -> Pred
renameQuantified i (PExist bs p) = PExist bs' p'
where
su = substFromQBinds i bs
bs' = (first $ subst su) <$> bs
p' = subst su p
substFromQBinds :: Integer -> [(Symbol, Sort)] -> Subst
substFromQBinds i bs = Su $ M.fromList [(s, EVar $ existSymbol s i) | s <- fst <$> bs]
|
gridaphobe/liquid-fixpoint
|
src/Language/Fixpoint/Solver/Eliminate.hs
|
Haskell
|
bsd-3-clause
| 2,798
|
-- | @TemplateHaskell@ utilities for generating lens fields.
module Extended.Lens.TH
( fieldsVerboseLensRules
) where
import Universum
import Data.Char (toUpper)
import Data.List (stripPrefix)
import Language.Haskell.TH.Syntax (Name, mkName, nameBase)
import Lens.Micro.Platform (DefName (MethodName), LensRules,
camelCaseFields, lensField, makeLensesWith)
-- | A field namer for 'fieldsVerboseLensRules'.
verboseFieldsNamer :: Name -> [Name] -> Name -> [DefName]
verboseFieldsNamer _ _ fieldName = maybeToList $ do
fieldUnprefixed@(x:xs) <- stripPrefix "_" (nameBase fieldName)
let className = "HasPoly" ++ toUpper x : xs
let methodName = fieldUnprefixed
pure (MethodName (mkName className) (mkName methodName))
-- | Custom rules for generating lenses. This is similar to
-- @makeFields@ but generated type classes have names like @HasPolyFoo@
-- instead of @HasFoo@ so they supposed to be used by introducing new
-- constraint aliases. See 'Importify.Environment' for details.
fieldsVerboseLensRules :: LensRules
fieldsVerboseLensRules = camelCaseFields & lensField .~ verboseFieldsNamer
|
serokell/importify
|
src/Extended/Lens/TH.hs
|
Haskell
|
mit
| 1,262
|
-- | Types describing runtime errors related to DB.
module Pos.DB.Error
( DBError (..)
) where
import Formatting (bprint, int, stext, (%))
import qualified Formatting.Buildable
import Universum
data DBError =
-- | Structure of DB is malformed (e. g. data is inconsistent,
-- something is missing, etc.)
DBMalformed !Text
| DBUnexpectedVersionTag !Word8 !Word8 -- ^ The first field is the expected version
-- tag. The second is the one received.
deriving (Show)
instance Exception DBError
-- TODO Make it cardanoException
instance Buildable DBError where
build (DBMalformed msg) = bprint ("malformed DB ("%stext%")") msg
build (DBUnexpectedVersionTag w1 w2) =
bprint ("unexpected version tag (Expected version tag: "%int%". Got: "%int%")")
w1
w2
|
input-output-hk/pos-haskell-prototype
|
db/src/Pos/DB/Error.hs
|
Haskell
|
mit
| 899
|
{-# LANGUAGE TemplateHaskellQuotes #-}
-- Trac #2632
module MkData where
import Language.Haskell.TH
op :: Num v => v -> v -> v
op a b = a + b
decl1 = [d| func = 0 `op` 3 |]
decl2 = [d| op x y = x
func = 0 `op` 3 |]
|
mpickering/ghc-exactprint
|
tests/examples/ghc8/T2632.hs
|
Haskell
|
bsd-3-clause
| 233
|
-- | Operations on the 'Area' type that involve random numbers.
module Game.LambdaHack.Server.DungeonGen.AreaRnd
( -- * Picking points inside areas
xyInArea, mkRoom, mkVoidRoom
-- * Choosing connections
, connectGrid, randomConnection
-- * Plotting corridors
, Corridor, connectPlaces
) where
import Control.Exception.Assert.Sugar
import Data.Maybe
import qualified Data.Set as S
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.Random
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Server.DungeonGen.Area
-- Picking random points inside areas
-- | Pick a random point within an area.
xyInArea :: Area -> Rnd Point
xyInArea area = do
let (x0, y0, x1, y1) = fromArea area
rx <- randomR (x0, x1)
ry <- randomR (y0, y1)
return $! Point rx ry
-- | Create a random room according to given parameters.
mkRoom :: (X, Y) -- ^ minimum size
-> (X, Y) -- ^ maximum size
-> Area -- ^ the containing area, not the room itself
-> Rnd Area
mkRoom (xm, ym) (xM, yM) area = do
let (x0, y0, x1, y1) = fromArea area
let !_A = assert (xm <= x1 - x0 + 1 && ym <= y1 - y0 + 1) ()
let aW = (xm, ym, min xM (x1 - x0 + 1), min yM (y1 - y0 + 1))
areaW = fromMaybe (assert `failure` aW) $ toArea aW
Point xW yW <- xyInArea areaW -- roll size
let a1 = (x0, y0, max x0 (x1 - xW + 1), max y0 (y1 - yW + 1))
area1 = fromMaybe (assert `failure` a1) $ toArea a1
Point rx1 ry1 <- xyInArea area1 -- roll top-left corner
let a3 = (rx1, ry1, rx1 + xW - 1, ry1 + yW - 1)
area3 = fromMaybe (assert `failure` a3) $ toArea a3
return $! area3
-- | Create a void room, i.e., a single point area within the designated area.
mkVoidRoom :: Area -> Rnd Area
mkVoidRoom area = do
-- Pass corridors closer to the middle of the grid area, if possible.
let core = fromMaybe area $ shrink area
pxy <- xyInArea core
return $! trivialArea pxy
-- Choosing connections between areas in a grid
-- | Pick a subset of connections between adjacent areas within a grid until
-- there is only one connected component in the graph of all areas.
connectGrid :: (X, Y) -> Rnd [(Point, Point)]
connectGrid (nx, ny) = do
let unconnected = S.fromList [ Point x y
| x <- [0..nx-1], y <- [0..ny-1] ]
-- Candidates are neighbours that are still unconnected. We start with
-- a random choice.
rx <- randomR (0, nx-1)
ry <- randomR (0, ny-1)
let candidates = S.fromList [Point rx ry]
connectGrid' (nx, ny) unconnected candidates []
connectGrid' :: (X, Y) -> S.Set Point -> S.Set Point
-> [(Point, Point)]
-> Rnd [(Point, Point)]
connectGrid' (nx, ny) unconnected candidates acc
| S.null candidates = return $! map sortPoint acc
| otherwise = do
c <- oneOf (S.toList candidates)
-- potential new candidates:
let ns = S.fromList $ vicinityCardinal nx ny c
nu = S.delete c unconnected -- new unconnected
-- (new candidates, potential connections):
(nc, ds) = S.partition (`S.member` nu) ns
new <- if S.null ds
then return id
else do
d <- oneOf (S.toList ds)
return ((c, d) :)
connectGrid' (nx, ny) nu
(S.delete c (candidates `S.union` nc)) (new acc)
-- | Sort the sequence of two points, in the derived lexicographic order.
sortPoint :: (Point, Point) -> (Point, Point)
sortPoint (a, b) | a <= b = (a, b)
| otherwise = (b, a)
-- | Pick a single random connection between adjacent areas within a grid.
randomConnection :: (X, Y) -> Rnd (Point, Point)
randomConnection (nx, ny) =
assert (nx > 1 && ny > 0 || nx > 0 && ny > 1 `blame` "wrong connection"
`twith` (nx, ny)) $ do
rb <- oneOf [False, True]
if rb || ny <= 1
then do
rx <- randomR (0, nx-2)
ry <- randomR (0, ny-1)
return (Point rx ry, Point (rx+1) ry)
else do
rx <- randomR (0, nx-1)
ry <- randomR (0, ny-2)
return (Point rx ry, Point rx (ry+1))
-- Plotting individual corridors between two areas
-- | The choice of horizontal and vertical orientation.
data HV = Horiz | Vert
-- | The coordinates of consecutive fields of a corridor.
type Corridor = [Point]
-- | Create a corridor, either horizontal or vertical, with
-- a possible intermediate part that is in the opposite direction.
mkCorridor :: HV -- ^ orientation of the starting section
-> Point -- ^ starting point
-> Point -- ^ ending point
-> Area -- ^ the area containing the intermediate point
-> Rnd Corridor -- ^ straight sections of the corridor
mkCorridor hv (Point x0 y0) (Point x1 y1) b = do
Point rx ry <- xyInArea b
return $! map (uncurry Point) $ case hv of
Horiz -> [(x0, y0), (rx, y0), (rx, y1), (x1, y1)]
Vert -> [(x0, y0), (x0, ry), (x1, ry), (x1, y1)]
-- | Try to connect two interiors of places with a corridor.
-- Choose entrances at least 4 or 3 tiles distant from the edges, if the place
-- is big enough. Note that with @pfence == FNone@, the area considered
-- is the strict interior of the place, without the outermost tiles.
connectPlaces :: (Area, Area) -> (Area, Area) -> Rnd Corridor
connectPlaces (sa, so) (ta, to) = do
let (_, _, sx1, sy1) = fromArea sa
(_, _, sox1, soy1) = fromArea so
(tx0, ty0, _, _) = fromArea ta
(tox0, toy0, _, _) = fromArea to
let !_A = assert (sx1 <= tx0 || sy1 <= ty0 `blame` (sa, ta)) ()
let !_A = assert (sx1 <= sox1 || sy1 <= soy1 `blame` (sa, so)) ()
let !_A = assert (tx0 >= tox0 || ty0 >= toy0 `blame` (ta, to)) ()
let trim area =
let (x0, y0, x1, y1) = fromArea area
trim4 (v0, v1) | v1 - v0 < 6 = (v0, v1)
| v1 - v0 < 8 = (v0 + 3, v1 - 3)
| otherwise = (v0 + 4, v1 - 4)
(nx0, nx1) = trim4 (x0, x1)
(ny0, ny1) = trim4 (y0, y1)
in fromMaybe (assert `failure` area) $ toArea (nx0, ny0, nx1, ny1)
Point sx sy <- xyInArea $ trim so
Point tx ty <- xyInArea $ trim to
let hva sarea tarea = do
let (_, _, zsx1, zsy1) = fromArea sarea
(ztx0, zty0, _, _) = fromArea tarea
xa = (zsx1+2, min sy ty, ztx0-2, max sy ty)
ya = (min sx tx, zsy1+2, max sx tx, zty0-2)
xya = (zsx1+2, zsy1+2, ztx0-2, zty0-2)
case toArea xya of
Just xyarea -> fmap (\hv -> (hv, Just xyarea)) (oneOf [Horiz, Vert])
Nothing ->
case toArea xa of
Just xarea -> return (Horiz, Just xarea)
Nothing -> return (Vert, toArea ya) -- Vertical bias.
(hvOuter, areaOuter) <- hva so to
(hv, area) <- case areaOuter of
Just arenaOuter -> return (hvOuter, arenaOuter)
Nothing -> do
-- TODO: let mkCorridor only pick points on the floor fence
(hvInner, aInner) <- hva sa ta
let yell = assert `failure` (sa, so, ta, to, areaOuter, aInner)
areaInner = fromMaybe yell aInner
return (hvInner, areaInner)
-- We cross width one places completely with the corridor, for void
-- rooms and others (e.g., one-tile wall room then becomes a door, etc.).
let (p0, p1) = case hv of
Horiz -> (Point sox1 sy, Point tox0 ty)
Vert -> (Point sx soy1, Point tx toy0)
-- The condition imposed on mkCorridor are tricky: there might not always
-- exist a good intermediate point if the places are allowed to be close
-- together and then we let the intermediate part degenerate.
mkCorridor hv p0 p1 area
|
Concomitant/LambdaHack
|
Game/LambdaHack/Server/DungeonGen/AreaRnd.hs
|
Haskell
|
bsd-3-clause
| 7,635
|
module Negation where
print' :: Double -> Fay ()
print' = print
main :: Fay ()
main = do print' $ (-7/2)
print' $ (-7)/2
print' $ -f x/y
where f n = n * n
x = 5
y = 2
|
fpco/fay
|
tests/negation.hs
|
Haskell
|
bsd-3-clause
| 216
|
{-# LANGUAGE CPP #-}
module CmmInfo (
mkEmptyContInfoTable,
cmmToRawCmm,
mkInfoTable,
srtEscape,
-- info table accessors
closureInfoPtr,
entryCode,
getConstrTag,
cmmGetClosureType,
infoTable,
infoTableConstrTag,
infoTableSrtBitmap,
infoTableClosureType,
infoTablePtrs,
infoTableNonPtrs,
funInfoTable,
funInfoArity,
-- info table sizes and offsets
stdInfoTableSizeW,
fixedInfoTableSizeW,
profInfoTableSizeW,
maxStdInfoTableSizeW,
maxRetInfoTableSizeW,
stdInfoTableSizeB,
stdSrtBitmapOffset,
stdClosureTypeOffset,
stdPtrsOffset, stdNonPtrsOffset,
) where
#include "HsVersions.h"
import Cmm
import CmmUtils
import CLabel
import SMRep
import Bitmap
import Stream (Stream)
import qualified Stream
import Hoopl
import Maybes
import DynFlags
import Panic
import UniqSupply
import MonadUtils
import Util
import Outputable
import Data.Bits
import Data.Word
-- When we split at proc points, we need an empty info table.
mkEmptyContInfoTable :: CLabel -> CmmInfoTable
mkEmptyContInfoTable info_lbl
= CmmInfoTable { cit_lbl = info_lbl
, cit_rep = mkStackRep []
, cit_prof = NoProfilingInfo
, cit_srt = NoC_SRT }
cmmToRawCmm :: DynFlags -> Stream IO CmmGroup ()
-> IO (Stream IO RawCmmGroup ())
cmmToRawCmm dflags cmms
= do { uniqs <- mkSplitUniqSupply 'i'
; let do_one uniqs cmm = do
case initUs uniqs $ concatMapM (mkInfoTable dflags) cmm of
(b,uniqs') -> return (uniqs',b)
-- NB. strictness fixes a space leak. DO NOT REMOVE.
; return (Stream.mapAccumL do_one uniqs cmms >> return ())
}
-- Make a concrete info table, represented as a list of CmmStatic
-- (it can't be simply a list of Word, because the SRT field is
-- represented by a label+offset expression).
--
-- With tablesNextToCode, the layout is
-- <reversed variable part>
-- <normal forward StgInfoTable, but without
-- an entry point at the front>
-- <code>
--
-- Without tablesNextToCode, the layout of an info table is
-- <entry label>
-- <normal forward rest of StgInfoTable>
-- <forward variable part>
--
-- See includes/rts/storage/InfoTables.h
--
-- For return-points these are as follows
--
-- Tables next to code:
--
-- <srt slot>
-- <standard info table>
-- ret-addr --> <entry code (if any)>
--
-- Not tables-next-to-code:
--
-- ret-addr --> <ptr to entry code>
-- <standard info table>
-- <srt slot>
--
-- * The SRT slot is only there if there is SRT info to record
mkInfoTable :: DynFlags -> CmmDecl -> UniqSM [RawCmmDecl]
mkInfoTable _ (CmmData sec dat)
= return [CmmData sec dat]
mkInfoTable dflags proc@(CmmProc infos entry_lbl live blocks)
--
-- in the non-tables-next-to-code case, procs can have at most a
-- single info table associated with the entry label of the proc.
--
| not (tablesNextToCode dflags)
= case topInfoTable proc of -- must be at most one
-- no info table
Nothing ->
return [CmmProc mapEmpty entry_lbl live blocks]
Just info@CmmInfoTable { cit_lbl = info_lbl } -> do
(top_decls, (std_info, extra_bits)) <-
mkInfoTableContents dflags info Nothing
let
rel_std_info = map (makeRelativeRefTo dflags info_lbl) std_info
rel_extra_bits = map (makeRelativeRefTo dflags info_lbl) extra_bits
--
-- Separately emit info table (with the function entry
-- point as first entry) and the entry code
--
return (top_decls ++
[CmmProc mapEmpty entry_lbl live blocks,
mkDataLits Data info_lbl
(CmmLabel entry_lbl : rel_std_info ++ rel_extra_bits)])
--
-- With tables-next-to-code, we can have many info tables,
-- associated with some of the BlockIds of the proc. For each info
-- table we need to turn it into CmmStatics, and collect any new
-- CmmDecls that arise from doing so.
--
| otherwise
= do
(top_declss, raw_infos) <-
unzip `fmap` mapM do_one_info (mapToList (info_tbls infos))
return (concat top_declss ++
[CmmProc (mapFromList raw_infos) entry_lbl live blocks])
where
do_one_info (lbl,itbl) = do
(top_decls, (std_info, extra_bits)) <-
mkInfoTableContents dflags itbl Nothing
let
info_lbl = cit_lbl itbl
rel_std_info = map (makeRelativeRefTo dflags info_lbl) std_info
rel_extra_bits = map (makeRelativeRefTo dflags info_lbl) extra_bits
--
return (top_decls, (lbl, Statics info_lbl $ map CmmStaticLit $
reverse rel_extra_bits ++ rel_std_info))
-----------------------------------------------------
type InfoTableContents = ( [CmmLit] -- The standard part
, [CmmLit] ) -- The "extra bits"
-- These Lits have *not* had mkRelativeTo applied to them
mkInfoTableContents :: DynFlags
-> CmmInfoTable
-> Maybe Int -- Override default RTS type tag?
-> UniqSM ([RawCmmDecl], -- Auxiliary top decls
InfoTableContents) -- Info tbl + extra bits
mkInfoTableContents dflags
info@(CmmInfoTable { cit_lbl = info_lbl
, cit_rep = smrep
, cit_prof = prof
, cit_srt = srt })
mb_rts_tag
| RTSRep rts_tag rep <- smrep
= mkInfoTableContents dflags info{cit_rep = rep} (Just rts_tag)
-- Completely override the rts_tag that mkInfoTableContents would
-- otherwise compute, with the rts_tag stored in the RTSRep
-- (which in turn came from a handwritten .cmm file)
| StackRep frame <- smrep
= do { (prof_lits, prof_data) <- mkProfLits dflags prof
; let (srt_label, srt_bitmap) = mkSRTLit dflags srt
; (liveness_lit, liveness_data) <- mkLivenessBits dflags frame
; let
std_info = mkStdInfoTable dflags prof_lits rts_tag srt_bitmap liveness_lit
rts_tag | Just tag <- mb_rts_tag = tag
| null liveness_data = rET_SMALL -- Fits in extra_bits
| otherwise = rET_BIG -- Does not; extra_bits is
-- a label
; return (prof_data ++ liveness_data, (std_info, srt_label)) }
| HeapRep _ ptrs nonptrs closure_type <- smrep
= do { let layout = packIntsCLit dflags ptrs nonptrs
; (prof_lits, prof_data) <- mkProfLits dflags prof
; let (srt_label, srt_bitmap) = mkSRTLit dflags srt
; (mb_srt_field, mb_layout, extra_bits, ct_data)
<- mk_pieces closure_type srt_label
; let std_info = mkStdInfoTable dflags prof_lits
(mb_rts_tag `orElse` rtsClosureType smrep)
(mb_srt_field `orElse` srt_bitmap)
(mb_layout `orElse` layout)
; return (prof_data ++ ct_data, (std_info, extra_bits)) }
where
mk_pieces :: ClosureTypeInfo -> [CmmLit]
-> UniqSM ( Maybe StgHalfWord -- Override the SRT field with this
, Maybe CmmLit -- Override the layout field with this
, [CmmLit] -- "Extra bits" for info table
, [RawCmmDecl]) -- Auxiliary data decls
mk_pieces (Constr con_tag con_descr) _no_srt -- A data constructor
= do { (descr_lit, decl) <- newStringLit con_descr
; return ( Just (toStgHalfWord dflags (fromIntegral con_tag))
, Nothing, [descr_lit], [decl]) }
mk_pieces Thunk srt_label
= return (Nothing, Nothing, srt_label, [])
mk_pieces (ThunkSelector offset) _no_srt
= return (Just (toStgHalfWord dflags 0), Just (mkWordCLit dflags (fromIntegral offset)), [], [])
-- Layout known (one free var); we use the layout field for offset
mk_pieces (Fun arity (ArgSpec fun_type)) srt_label
= do { let extra_bits = packIntsCLit dflags fun_type arity : srt_label
; return (Nothing, Nothing, extra_bits, []) }
mk_pieces (Fun arity (ArgGen arg_bits)) srt_label
= do { (liveness_lit, liveness_data) <- mkLivenessBits dflags arg_bits
; let fun_type | null liveness_data = aRG_GEN
| otherwise = aRG_GEN_BIG
extra_bits = [ packIntsCLit dflags fun_type arity
, srt_lit, liveness_lit, slow_entry ]
; return (Nothing, Nothing, extra_bits, liveness_data) }
where
slow_entry = CmmLabel (toSlowEntryLbl info_lbl)
srt_lit = case srt_label of
[] -> mkIntCLit dflags 0
(lit:_rest) -> ASSERT( null _rest ) lit
mk_pieces other _ = pprPanic "mk_pieces" (ppr other)
mkInfoTableContents _ _ _ = panic "mkInfoTableContents" -- NonInfoTable dealt with earlier
packIntsCLit :: DynFlags -> Int -> Int -> CmmLit
packIntsCLit dflags a b = packHalfWordsCLit dflags
(toStgHalfWord dflags (fromIntegral a))
(toStgHalfWord dflags (fromIntegral b))
mkSRTLit :: DynFlags
-> C_SRT
-> ([CmmLit], -- srt_label, if any
StgHalfWord) -- srt_bitmap
mkSRTLit dflags NoC_SRT = ([], toStgHalfWord dflags 0)
mkSRTLit dflags (C_SRT lbl off bitmap) = ([cmmLabelOffW dflags lbl off], bitmap)
-------------------------------------------------------------------------
--
-- Lay out the info table and handle relative offsets
--
-------------------------------------------------------------------------
-- This function takes
-- * the standard info table portion (StgInfoTable)
-- * the "extra bits" (StgFunInfoExtraRev etc.)
-- * the entry label
-- * the code
-- and lays them out in memory, producing a list of RawCmmDecl
-------------------------------------------------------------------------
--
-- Position independent code
--
-------------------------------------------------------------------------
-- In order to support position independent code, we mustn't put absolute
-- references into read-only space. Info tables in the tablesNextToCode
-- case must be in .text, which is read-only, so we doctor the CmmLits
-- to use relative offsets instead.
-- Note that this is done even when the -fPIC flag is not specified,
-- as we want to keep binary compatibility between PIC and non-PIC.
makeRelativeRefTo :: DynFlags -> CLabel -> CmmLit -> CmmLit
makeRelativeRefTo dflags info_lbl (CmmLabel lbl)
| tablesNextToCode dflags
= CmmLabelDiffOff lbl info_lbl 0
makeRelativeRefTo dflags info_lbl (CmmLabelOff lbl off)
| tablesNextToCode dflags
= CmmLabelDiffOff lbl info_lbl off
makeRelativeRefTo _ _ lit = lit
-------------------------------------------------------------------------
--
-- Build a liveness mask for the stack layout
--
-------------------------------------------------------------------------
-- There are four kinds of things on the stack:
--
-- - pointer variables (bound in the environment)
-- - non-pointer variables (bound in the environment)
-- - free slots (recorded in the stack free list)
-- - non-pointer data slots (recorded in the stack free list)
--
-- The first two are represented with a 'Just' of a 'LocalReg'.
-- The last two with one or more 'Nothing' constructors.
-- Each 'Nothing' represents one used word.
--
-- The head of the stack layout is the top of the stack and
-- the least-significant bit.
mkLivenessBits :: DynFlags -> Liveness -> UniqSM (CmmLit, [RawCmmDecl])
-- ^ Returns:
-- 1. The bitmap (literal value or label)
-- 2. Large bitmap CmmData if needed
mkLivenessBits dflags liveness
| n_bits > mAX_SMALL_BITMAP_SIZE dflags -- does not fit in one word
= do { uniq <- getUniqueUs
; let bitmap_lbl = mkBitmapLabel uniq
; return (CmmLabel bitmap_lbl,
[mkRODataLits bitmap_lbl lits]) }
| otherwise -- Fits in one word
= return (mkStgWordCLit dflags bitmap_word, [])
where
n_bits = length liveness
bitmap :: Bitmap
bitmap = mkBitmap dflags liveness
small_bitmap = case bitmap of
[] -> toStgWord dflags 0
[b] -> b
_ -> panic "mkLiveness"
bitmap_word = toStgWord dflags (fromIntegral n_bits)
.|. (small_bitmap `shiftL` bITMAP_BITS_SHIFT dflags)
lits = mkWordCLit dflags (fromIntegral n_bits)
: map (mkStgWordCLit dflags) bitmap
-- The first word is the size. The structure must match
-- StgLargeBitmap in includes/rts/storage/InfoTable.h
-------------------------------------------------------------------------
--
-- Generating a standard info table
--
-------------------------------------------------------------------------
-- The standard bits of an info table. This part of the info table
-- corresponds to the StgInfoTable type defined in
-- includes/rts/storage/InfoTables.h.
--
-- Its shape varies with ticky/profiling/tables next to code etc
-- so we can't use constant offsets from Constants
mkStdInfoTable
:: DynFlags
-> (CmmLit,CmmLit) -- Closure type descr and closure descr (profiling)
-> Int -- Closure RTS tag
-> StgHalfWord -- SRT length
-> CmmLit -- layout field
-> [CmmLit]
mkStdInfoTable dflags (type_descr, closure_descr) cl_type srt_len layout_lit
= -- Parallel revertible-black hole field
prof_info
-- Ticky info (none at present)
-- Debug info (none at present)
++ [layout_lit, type_lit]
where
prof_info
| gopt Opt_SccProfilingOn dflags = [type_descr, closure_descr]
| otherwise = []
type_lit = packHalfWordsCLit dflags (toStgHalfWord dflags (fromIntegral cl_type)) srt_len
-------------------------------------------------------------------------
--
-- Making string literals
--
-------------------------------------------------------------------------
mkProfLits :: DynFlags -> ProfilingInfo -> UniqSM ((CmmLit,CmmLit), [RawCmmDecl])
mkProfLits dflags NoProfilingInfo = return ((zeroCLit dflags, zeroCLit dflags), [])
mkProfLits _ (ProfilingInfo td cd)
= do { (td_lit, td_decl) <- newStringLit td
; (cd_lit, cd_decl) <- newStringLit cd
; return ((td_lit,cd_lit), [td_decl,cd_decl]) }
newStringLit :: [Word8] -> UniqSM (CmmLit, GenCmmDecl CmmStatics info stmt)
newStringLit bytes
= do { uniq <- getUniqueUs
; return (mkByteStringCLit uniq bytes) }
-- Misc utils
-- | Value of the srt field of an info table when using an StgLargeSRT
srtEscape :: DynFlags -> StgHalfWord
srtEscape dflags = toStgHalfWord dflags (-1)
-------------------------------------------------------------------------
--
-- Accessing fields of an info table
--
-------------------------------------------------------------------------
closureInfoPtr :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer and returns the info table pointer
closureInfoPtr dflags e = CmmLoad e (bWord dflags)
entryCode :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info pointer (the first word of a closure)
-- and returns its entry code
entryCode dflags e
| tablesNextToCode dflags = e
| otherwise = CmmLoad e (bWord dflags)
getConstrTag :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer, and return the *zero-indexed*
-- constructor tag obtained from the info table
-- This lives in the SRT field of the info table
-- (constructors don't need SRTs).
getConstrTag dflags closure_ptr
= CmmMachOp (MO_UU_Conv (halfWordWidth dflags) (wordWidth dflags)) [infoTableConstrTag dflags info_table]
where
info_table = infoTable dflags (closureInfoPtr dflags closure_ptr)
cmmGetClosureType :: DynFlags -> CmmExpr -> CmmExpr
-- Takes a closure pointer, and return the closure type
-- obtained from the info table
cmmGetClosureType dflags closure_ptr
= CmmMachOp (MO_UU_Conv (halfWordWidth dflags) (wordWidth dflags)) [infoTableClosureType dflags info_table]
where
info_table = infoTable dflags (closureInfoPtr dflags closure_ptr)
infoTable :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info pointer (the first word of a closure)
-- and returns a pointer to the first word of the standard-form
-- info table, excluding the entry-code word (if present)
infoTable dflags info_ptr
| tablesNextToCode dflags = cmmOffsetB dflags info_ptr (- stdInfoTableSizeB dflags)
| otherwise = cmmOffsetW dflags info_ptr 1 -- Past the entry code pointer
infoTableConstrTag :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the constr tag
-- field of the info table (same as the srt_bitmap field)
infoTableConstrTag = infoTableSrtBitmap
infoTableSrtBitmap :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the srt_bitmap
-- field of the info table
infoTableSrtBitmap dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdSrtBitmapOffset dflags)) (bHalfWord dflags)
infoTableClosureType :: DynFlags -> CmmExpr -> CmmExpr
-- Takes an info table pointer (from infoTable) and returns the closure type
-- field of the info table.
infoTableClosureType dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdClosureTypeOffset dflags)) (bHalfWord dflags)
infoTablePtrs :: DynFlags -> CmmExpr -> CmmExpr
infoTablePtrs dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdPtrsOffset dflags)) (bHalfWord dflags)
infoTableNonPtrs :: DynFlags -> CmmExpr -> CmmExpr
infoTableNonPtrs dflags info_tbl
= CmmLoad (cmmOffsetB dflags info_tbl (stdNonPtrsOffset dflags)) (bHalfWord dflags)
funInfoTable :: DynFlags -> CmmExpr -> CmmExpr
-- Takes the info pointer of a function,
-- and returns a pointer to the first word of the StgFunInfoExtra struct
-- in the info table.
funInfoTable dflags info_ptr
| tablesNextToCode dflags
= cmmOffsetB dflags info_ptr (- stdInfoTableSizeB dflags - sIZEOF_StgFunInfoExtraRev dflags)
| otherwise
= cmmOffsetW dflags info_ptr (1 + stdInfoTableSizeW dflags)
-- Past the entry code pointer
-- Takes the info pointer of a function, returns the function's arity
funInfoArity :: DynFlags -> CmmExpr -> CmmExpr
funInfoArity dflags iptr
= cmmToWord dflags (cmmLoadIndex dflags rep fun_info (offset `div` rep_bytes))
where
fun_info = funInfoTable dflags iptr
rep = cmmBits (widthFromBytes rep_bytes)
(rep_bytes, offset)
| tablesNextToCode dflags = ( pc_REP_StgFunInfoExtraRev_arity pc
, oFFSET_StgFunInfoExtraRev_arity dflags )
| otherwise = ( pc_REP_StgFunInfoExtraFwd_arity pc
, oFFSET_StgFunInfoExtraFwd_arity dflags )
pc = sPlatformConstants (settings dflags)
-----------------------------------------------------------------------------
--
-- Info table sizes & offsets
--
-----------------------------------------------------------------------------
stdInfoTableSizeW :: DynFlags -> WordOff
-- The size of a standard info table varies with profiling/ticky etc,
-- so we can't get it from Constants
-- It must vary in sync with mkStdInfoTable
stdInfoTableSizeW dflags
= fixedInfoTableSizeW
+ if gopt Opt_SccProfilingOn dflags
then profInfoTableSizeW
else 0
fixedInfoTableSizeW :: WordOff
fixedInfoTableSizeW = 2 -- layout, type
profInfoTableSizeW :: WordOff
profInfoTableSizeW = 2
maxStdInfoTableSizeW :: WordOff
maxStdInfoTableSizeW =
1 {- entry, when !tablesNextToCode -}
+ fixedInfoTableSizeW
+ profInfoTableSizeW
maxRetInfoTableSizeW :: WordOff
maxRetInfoTableSizeW =
maxStdInfoTableSizeW
+ 1 {- srt label -}
stdInfoTableSizeB :: DynFlags -> ByteOff
stdInfoTableSizeB dflags = stdInfoTableSizeW dflags * wORD_SIZE dflags
stdSrtBitmapOffset :: DynFlags -> ByteOff
-- Byte offset of the SRT bitmap half-word which is
-- in the *higher-addressed* part of the type_lit
stdSrtBitmapOffset dflags = stdInfoTableSizeB dflags - hALF_WORD_SIZE dflags
stdClosureTypeOffset :: DynFlags -> ByteOff
-- Byte offset of the closure type half-word
stdClosureTypeOffset dflags = stdInfoTableSizeB dflags - wORD_SIZE dflags
stdPtrsOffset, stdNonPtrsOffset :: DynFlags -> ByteOff
stdPtrsOffset dflags = stdInfoTableSizeB dflags - 2 * wORD_SIZE dflags
stdNonPtrsOffset dflags = stdInfoTableSizeB dflags - 2 * wORD_SIZE dflags + hALF_WORD_SIZE dflags
|
lukexi/ghc
|
compiler/cmm/CmmInfo.hs
|
Haskell
|
bsd-3-clause
| 20,843
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Mga WebSocket | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/websocket/src/main/javahelp/org/zaproxy/zap/extension/websocket/resources/help_fil_PH/helpset_fil_PH.hs
|
Haskell
|
apache-2.0
| 987
|
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Luxi (testLuxi) where
import Test.HUnit
import Test.QuickCheck
import Test.QuickCheck.Monadic (monadicIO, run, stop)
import Data.List
import Control.Applicative
import Control.Concurrent (forkIO)
import Control.Exception (bracket)
import qualified Text.JSON as J
import Test.Ganeti.OpCodes ()
import Test.Ganeti.Query.Language (genFilter)
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Test.Ganeti.Types (genReasonTrail)
import Ganeti.BasicTypes
import qualified Ganeti.Luxi as Luxi
import qualified Ganeti.UDSServer as US
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Luxi tests
$(genArbitrary ''Luxi.LuxiReq)
instance Arbitrary Luxi.LuxiOp where
arbitrary = do
lreq <- arbitrary
case lreq of
Luxi.ReqQuery -> Luxi.Query <$> arbitrary <*> genFields <*> genFilter
Luxi.ReqQueryFields -> Luxi.QueryFields <$> arbitrary <*> genFields
Luxi.ReqQueryNodes -> Luxi.QueryNodes <$> listOf genFQDN <*>
genFields <*> arbitrary
Luxi.ReqQueryGroups -> Luxi.QueryGroups <$> arbitrary <*>
arbitrary <*> arbitrary
Luxi.ReqQueryNetworks -> Luxi.QueryNetworks <$> arbitrary <*>
arbitrary <*> arbitrary
Luxi.ReqQueryInstances -> Luxi.QueryInstances <$> listOf genFQDN <*>
genFields <*> arbitrary
Luxi.ReqQueryFilters -> Luxi.QueryFilters <$> arbitrary <*> genFields
Luxi.ReqReplaceFilter -> Luxi.ReplaceFilter <$> genMaybe genUUID <*>
arbitrary <*> arbitrary <*> arbitrary <*>
genReasonTrail
Luxi.ReqDeleteFilter -> Luxi.DeleteFilter <$> genUUID
Luxi.ReqQueryJobs -> Luxi.QueryJobs <$> arbitrary <*> genFields
Luxi.ReqQueryExports -> Luxi.QueryExports <$>
listOf genFQDN <*> arbitrary
Luxi.ReqQueryConfigValues -> Luxi.QueryConfigValues <$> genFields
Luxi.ReqQueryClusterInfo -> pure Luxi.QueryClusterInfo
Luxi.ReqQueryTags -> do
kind <- arbitrary
Luxi.QueryTags kind <$> genLuxiTagName kind
Luxi.ReqSubmitJob -> Luxi.SubmitJob <$> resize maxOpCodes arbitrary
Luxi.ReqSubmitJobToDrainedQueue -> Luxi.SubmitJobToDrainedQueue <$>
resize maxOpCodes arbitrary
Luxi.ReqSubmitManyJobs -> Luxi.SubmitManyJobs <$>
resize maxOpCodes arbitrary
Luxi.ReqWaitForJobChange -> Luxi.WaitForJobChange <$> arbitrary <*>
genFields <*> pure J.JSNull <*>
pure J.JSNull <*> arbitrary
Luxi.ReqPickupJob -> Luxi.PickupJob <$> arbitrary
Luxi.ReqArchiveJob -> Luxi.ArchiveJob <$> arbitrary
Luxi.ReqAutoArchiveJobs -> Luxi.AutoArchiveJobs <$> arbitrary <*>
arbitrary
Luxi.ReqCancelJob -> Luxi.CancelJob <$> arbitrary <*> arbitrary
Luxi.ReqChangeJobPriority -> Luxi.ChangeJobPriority <$> arbitrary <*>
arbitrary
Luxi.ReqSetDrainFlag -> Luxi.SetDrainFlag <$> arbitrary
Luxi.ReqSetWatcherPause -> Luxi.SetWatcherPause <$> arbitrary
-- | Simple check that encoding/decoding of LuxiOp works.
prop_CallEncoding :: Luxi.LuxiOp -> Property
prop_CallEncoding op =
(US.parseCall (US.buildCall (Luxi.strOfOp op) (Luxi.opToArgs op))
>>= uncurry Luxi.decodeLuxiCall) ==? Ok op
-- | Server ping-pong helper.
luxiServerPong :: Luxi.Client -> IO ()
luxiServerPong c = do
msg <- Luxi.recvMsgExt c
case msg of
Luxi.RecvOk m -> Luxi.sendMsg c m >> luxiServerPong c
_ -> return ()
-- | Client ping-pong helper.
luxiClientPong :: Luxi.Client -> [String] -> IO [String]
luxiClientPong c =
mapM (\m -> Luxi.sendMsg c m >> Luxi.recvMsg c)
-- | Monadic check that, given a server socket, we can connect via a
-- client to it, and that we can send a list of arbitrary messages and
-- get back what we sent.
prop_ClientServer :: [[DNSChar]] -> Property
prop_ClientServer dnschars = monadicIO $ do
let msgs = map (map dnsGetChar) dnschars
fpath <- run $ getTempFileName "luxitest"
-- we need to create the server first, otherwise (if we do it in the
-- forked thread) the client could try to connect to it before it's
-- ready
server <- run $ Luxi.getLuxiServer False fpath
-- fork the server responder
_ <- run . forkIO $
bracket
(Luxi.acceptClient server)
(\c -> Luxi.closeClient c >> Luxi.closeServer server)
luxiServerPong
replies <- run $
bracket
(Luxi.getLuxiClient fpath)
Luxi.closeClient
(`luxiClientPong` msgs)
stop $ replies ==? msgs
-- | Check that Python and Haskell define the same Luxi requests list.
case_AllDefined :: Assertion
case_AllDefined = do
py_stdout <- runPython "from ganeti import luxi\n\
\print '\\n'.join(luxi.REQ_ALL)" "" >>=
checkPythonResult
let py_ops = sort $ lines py_stdout
hs_ops = Luxi.allLuxiCalls
extra_py = py_ops \\ hs_ops
extra_hs = hs_ops \\ py_ops
assertBool ("Luxi calls missing from Haskell code:\n" ++
unlines extra_py) (null extra_py)
assertBool ("Extra Luxi calls in the Haskell code:\n" ++
unlines extra_hs) (null extra_hs)
testSuite "Luxi"
[ 'prop_CallEncoding
, 'prop_ClientServer
, 'case_AllDefined
]
|
apyrgio/ganeti
|
test/hs/Test/Ganeti/Luxi.hs
|
Haskell
|
bsd-2-clause
| 6,883
|
{-# LANGUAGE CPP, TypeFamilies #-}
-- -----------------------------------------------------------------------------
-- | This is the top-level module in the LLVM code generator.
--
module LlvmCodeGen ( llvmCodeGen, llvmFixupAsm ) where
#include "HsVersions.h"
import Llvm
import LlvmCodeGen.Base
import LlvmCodeGen.CodeGen
import LlvmCodeGen.Data
import LlvmCodeGen.Ppr
import LlvmCodeGen.Regs
import LlvmMangler
import CgUtils ( fixStgRegisters )
import Cmm
import Hoopl
import PprCmm
import BufWrite
import DynFlags
import ErrUtils
import FastString
import Outputable
import UniqSupply
import SysTools ( figureLlvmVersion )
import qualified Stream
import Control.Monad ( when )
import Data.IORef ( writeIORef )
import Data.Maybe ( fromMaybe, catMaybes )
import System.IO
-- -----------------------------------------------------------------------------
-- | Top-level of the LLVM Code generator
--
llvmCodeGen :: DynFlags -> Handle -> UniqSupply
-> Stream.Stream IO RawCmmGroup ()
-> IO ()
llvmCodeGen dflags h us cmm_stream
= do bufh <- newBufHandle h
-- Pass header
showPass dflags "LLVM CodeGen"
-- get llvm version, cache for later use
ver <- (fromMaybe defaultLlvmVersion) `fmap` figureLlvmVersion dflags
writeIORef (llvmVersion dflags) ver
-- warn if unsupported
debugTraceMsg dflags 2
(text "Using LLVM version:" <+> text (show ver))
let doWarn = wopt Opt_WarnUnsupportedLlvmVersion dflags
when (ver < minSupportLlvmVersion && doWarn) $
errorMsg dflags (text "You are using an old version of LLVM that"
<> text " isn't supported anymore!"
$+$ text "We will try though...")
when (ver > maxSupportLlvmVersion && doWarn) $
putMsg dflags (text "You are using a new version of LLVM that"
<> text " hasn't been tested yet!"
$+$ text "We will try though...")
-- run code generation
runLlvm dflags ver bufh us $
llvmCodeGen' (liftStream cmm_stream)
bFlush bufh
llvmCodeGen' :: Stream.Stream LlvmM RawCmmGroup () -> LlvmM ()
llvmCodeGen' cmm_stream
= do -- Preamble
renderLlvm pprLlvmHeader
ghcInternalFunctions
cmmMetaLlvmPrelude
-- Procedures
let llvmStream = Stream.mapM llvmGroupLlvmGens cmm_stream
_ <- Stream.collect llvmStream
-- Declare aliases for forward references
renderLlvm . pprLlvmData =<< generateExternDecls
-- Postamble
cmmUsedLlvmGens
llvmGroupLlvmGens :: RawCmmGroup -> LlvmM ()
llvmGroupLlvmGens cmm = do
-- Insert functions into map, collect data
let split (CmmData s d' ) = return $ Just (s, d')
split (CmmProc h l live g) = do
-- Set function type
let l' = case mapLookup (g_entry g) h of
Nothing -> l
Just (Statics info_lbl _) -> info_lbl
lml <- strCLabel_llvm l'
funInsert lml =<< llvmFunTy live
return Nothing
cdata <- fmap catMaybes $ mapM split cmm
{-# SCC "llvm_datas_gen" #-}
cmmDataLlvmGens cdata
{-# SCC "llvm_procs_gen" #-}
mapM_ cmmLlvmGen cmm
-- -----------------------------------------------------------------------------
-- | Do LLVM code generation on all these Cmms data sections.
--
cmmDataLlvmGens :: [(Section,CmmStatics)] -> LlvmM ()
cmmDataLlvmGens statics
= do lmdatas <- mapM genLlvmData statics
let (gss, tss) = unzip lmdatas
let regGlobal (LMGlobal (LMGlobalVar l ty _ _ _ _) _)
= funInsert l ty
regGlobal _ = return ()
mapM_ regGlobal (concat gss)
gss' <- mapM aliasify $ concat gss
renderLlvm $ pprLlvmData (concat gss', concat tss)
-- | Complete LLVM code generation phase for a single top-level chunk of Cmm.
cmmLlvmGen ::RawCmmDecl -> LlvmM ()
cmmLlvmGen cmm@CmmProc{} = do
-- rewrite assignments to global regs
dflags <- getDynFlag id
let fixed_cmm = {-# SCC "llvm_fix_regs" #-}
fixStgRegisters dflags cmm
dumpIfSetLlvm Opt_D_dump_opt_cmm "Optimised Cmm" (pprCmmGroup [fixed_cmm])
-- generate llvm code from cmm
llvmBC <- withClearVars $ genLlvmProc fixed_cmm
-- allocate IDs for info table and code, so the mangler can later
-- make sure they end up next to each other.
itableSection <- freshSectionId
_codeSection <- freshSectionId
-- pretty print
(docs, ivars) <- fmap unzip $ mapM (pprLlvmCmmDecl itableSection) llvmBC
-- Output, note down used variables
renderLlvm (vcat docs)
mapM_ markUsedVar $ concat ivars
cmmLlvmGen _ = return ()
-- -----------------------------------------------------------------------------
-- | Generate meta data nodes
--
cmmMetaLlvmPrelude :: LlvmM ()
cmmMetaLlvmPrelude = do
metas <- flip mapM stgTBAA $ \(uniq, name, parent) -> do
-- Generate / lookup meta data IDs
tbaaId <- getMetaUniqueId
setUniqMeta uniq tbaaId
parentId <- maybe (return Nothing) getUniqMeta parent
-- Build definition
return $ MetaUnamed tbaaId $ MetaStruct
[ MetaStr name
, case parentId of
Just p -> MetaNode p
Nothing -> MetaVar $ LMLitVar $ LMNullLit i8Ptr
]
renderLlvm $ ppLlvmMetas metas
-- -----------------------------------------------------------------------------
-- | Marks variables as used where necessary
--
cmmUsedLlvmGens :: LlvmM ()
cmmUsedLlvmGens = do
-- LLVM would discard variables that are internal and not obviously
-- used if we didn't provide these hints. This will generate a
-- definition of the form
--
-- @llvm.used = appending global [42 x i8*] [i8* bitcast <var> to i8*, ...]
--
-- Which is the LLVM way of protecting them against getting removed.
ivars <- getUsedVars
let cast x = LMBitc (LMStaticPointer (pVarLift x)) i8Ptr
ty = (LMArray (length ivars) i8Ptr)
usedArray = LMStaticArray (map cast ivars) ty
sectName = Just $ fsLit "llvm.metadata"
lmUsedVar = LMGlobalVar (fsLit "llvm.used") ty Appending sectName Nothing Constant
lmUsed = LMGlobal lmUsedVar (Just usedArray)
if null ivars
then return ()
else renderLlvm $ pprLlvmData ([lmUsed], [])
|
green-haskell/ghc
|
compiler/llvmGen/LlvmCodeGen.hs
|
Haskell
|
bsd-3-clause
| 6,448
|
--------------------------------------------------------------------
-- |
-- Module : XMonad.Util.EZConfig
-- Copyright : Devin Mullins <me@twifkak.com>
-- Brent Yorgey <byorgey@gmail.com> (key parsing)
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Devin Mullins <me@twifkak.com>
--
-- Useful helper functions for amending the defaultConfig, and for
-- parsing keybindings specified in a special (emacs-like) format.
--
-- (See also "XMonad.Util.CustomKeys" in xmonad-contrib.)
--
--------------------------------------------------------------------
module XMonad.Util.EZConfig (
-- * Usage
-- $usage
-- * Adding or removing keybindings
additionalKeys, additionalKeysP,
removeKeys, removeKeysP,
additionalMouseBindings, removeMouseBindings,
-- * Emacs-style keybinding specifications
mkKeymap, checkKeymap,
mkNamedKeymap,
parseKey -- used by XMonad.Util.Paste
) where
import XMonad
import XMonad.Actions.Submap
import XMonad.Util.NamedActions
import qualified Data.Map as M
import Data.List (foldl', sortBy, groupBy, nub)
import Data.Ord (comparing)
import Data.Maybe
import Control.Arrow (first, (&&&))
import Text.ParserCombinators.ReadP
-- $usage
-- To use this module, first import it into your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Util.EZConfig
--
-- Then, use one of the provided functions to modify your
-- configuration. You can use 'additionalKeys', 'removeKeys',
-- 'additionalMouseBindings', and 'removeMouseBindings' to easily add
-- and remove keybindings or mouse bindings. You can use 'mkKeymap'
-- to create a keymap using emacs-style keybinding specifications
-- like @\"M-x\"@ instead of @(modMask, xK_x)@, or 'additionalKeysP'
-- and 'removeKeysP' to easily add or remove emacs-style keybindings.
-- If you use emacs-style keybindings, the 'checkKeymap' function is
-- provided, suitable for adding to your 'startupHook', which can warn
-- you of any parse errors or duplicate bindings in your keymap.
--
-- For more information and usage examples, see the documentation
-- provided with each exported function, and check the xmonad config
-- archive (<http://haskell.org/haskellwiki/Xmonad/Config_archive>)
-- for some real examples of use.
-- |
-- Add or override keybindings from the existing set. Example use:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `additionalKeys`
-- > [ ((mod1Mask, xK_m ), spawn "echo 'Hi, mom!' | dzen2 -p 4")
-- > , ((mod1Mask, xK_BackSpace), withFocused hide) -- N.B. this is an absurd thing to do
-- > ]
--
-- This overrides the previous definition of mod-m.
--
-- Note that, unlike in xmonad 0.4 and previous, you can't use modMask to refer
-- to the modMask you configured earlier. You must specify mod1Mask (or
-- whichever), or add your own @myModMask = mod1Mask@ line.
additionalKeys :: XConfig a -> [((ButtonMask, KeySym), X ())] -> XConfig a
additionalKeys conf keyList =
conf { keys = \cnf -> M.union (M.fromList keyList) (keys conf cnf) }
-- | Like 'additionalKeys', except using short @String@ key
-- descriptors like @\"M-m\"@ instead of @(modMask, xK_m)@, as
-- described in the documentation for 'mkKeymap'. For example:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `additionalKeysP`
-- > [ ("M-m", spawn "echo 'Hi, mom!' | dzen2 -p 4")
-- > , ("M-<Backspace>", withFocused hide) -- N.B. this is an absurd thing to do
-- > ]
additionalKeysP :: XConfig l -> [(String, X ())] -> XConfig l
additionalKeysP conf keyList =
conf { keys = \cnf -> M.union (mkKeymap cnf keyList) (keys conf cnf) }
-- |
-- Remove standard keybindings you're not using. Example use:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `removeKeys` [(mod1Mask .|. shiftMask, n) | n <- [xK_1 .. xK_9]]
removeKeys :: XConfig a -> [(ButtonMask, KeySym)] -> XConfig a
removeKeys conf keyList =
conf { keys = \cnf -> keys conf cnf `M.difference` M.fromList (zip keyList $ repeat ()) }
-- | Like 'removeKeys', except using short @String@ key descriptors
-- like @\"M-m\"@ instead of @(modMask, xK_m)@, as described in the
-- documentation for 'mkKeymap'. For example:
--
-- > main = xmonad $ defaultConfig { terminal = "urxvt" }
-- > `removeKeysP` ["M-S-" ++ [n] | n <- ['1'..'9']]
removeKeysP :: XConfig l -> [String] -> XConfig l
removeKeysP conf keyList =
conf { keys = \cnf -> keys conf cnf `M.difference` mkKeymap cnf (zip keyList $ repeat (return ())) }
-- | Like 'additionalKeys', but for mouse bindings.
additionalMouseBindings :: XConfig a -> [((ButtonMask, Button), Window -> X ())] -> XConfig a
additionalMouseBindings conf mouseBindingsList =
conf { mouseBindings = \cnf -> M.union (M.fromList mouseBindingsList) (mouseBindings conf cnf) }
-- | Like 'removeKeys', but for mouse bindings.
removeMouseBindings :: XConfig a -> [(ButtonMask, Button)] -> XConfig a
removeMouseBindings conf mouseBindingList =
conf { mouseBindings = \cnf -> mouseBindings conf cnf `M.difference`
M.fromList (zip mouseBindingList $ repeat ()) }
--------------------------------------------------------------
-- Keybinding parsing ---------------------------------------
--------------------------------------------------------------
-- | Given a config (used to determine the proper modifier key to use)
-- and a list of @(String, X ())@ pairs, create a key map by parsing
-- the key sequence descriptions contained in the Strings. The key
-- sequence descriptions are \"emacs-style\": @M-@, @C-@, @S-@, and
-- @M\#-@ denote mod, control, shift, and mod1-mod5 (where @\#@ is
-- replaced by the appropriate number) respectively. Note that if
-- you want to make a keybinding using \'alt\' even though you use a
-- different key (like the \'windows\' key) for \'mod\', you can use
-- something like @\"M1-x\"@ for alt+x (check the output of @xmodmap@
-- to see which mod key \'alt\' is bound to). Some special keys can
-- also be specified by enclosing their name in angle brackets.
--
-- For example, @\"M-C-x\"@ denotes mod+ctrl+x; @\"S-\<Escape\>\"@
-- denotes shift-escape; @\"M1-C-\<Delete\>\"@ denotes alt+ctrl+delete
-- (assuming alt is bound to mod1, which is common).
--
-- Sequences of keys can also be specified by separating the key
-- descriptions with spaces. For example, @\"M-x y \<Down\>\"@ denotes the
-- sequence of keys mod+x, y, down. Submaps (see
-- "XMonad.Actions.Submap") will be automatically generated to
-- correctly handle these cases.
--
-- So, for example, a complete key map might be specified as
--
-- > keys = \c -> mkKeymap c $
-- > [ ("M-S-<Return>", spawn $ terminal c)
-- > , ("M-x w", spawn "xmessage 'woohoo!'") -- type mod+x then w to pop up 'woohoo!'
-- > , ("M-x y", spawn "xmessage 'yay!'") -- type mod+x then y to pop up 'yay!'
-- > , ("M-S-c", kill)
-- > ]
--
-- Alternatively, you can use 'additionalKeysP' to automatically
-- create a keymap and add it to your config.
--
-- Here is a complete list of supported special keys. Note that a few
-- keys, such as the arrow keys, have synonyms. If there are other
-- special keys you would like to see supported, feel free to submit a
-- patch, or ask on the xmonad mailing list; adding special keys is
-- quite simple.
--
-- > <Backspace>
-- > <Tab>
-- > <Return>
-- > <Pause>
-- > <Scroll_lock>
-- > <Sys_Req>
-- > <Print>
-- > <Escape>, <Esc>
-- > <Delete>
-- > <Home>
-- > <Left>, <L>
-- > <Up>, <U>
-- > <Right>, <R>
-- > <Down>, <D>
-- > <Page_Up>
-- > <Page_Down>
-- > <End>
-- > <Insert>
-- > <Break>
-- > <Space>
-- > <F1>-<F24>
-- > <KP_Space>
-- > <KP_Tab>
-- > <KP_Enter>
-- > <KP_F1>
-- > <KP_F2>
-- > <KP_F3>
-- > <KP_F4>
-- > <KP_Home>
-- > <KP_Left>
-- > <KP_Up>
-- > <KP_Right>
-- > <KP_Down>
-- > <KP_Prior>
-- > <KP_Page_Up>
-- > <KP_Next>
-- > <KP_Page_Down>
-- > <KP_End>
-- > <KP_Begin>
-- > <KP_Insert>
-- > <KP_Delete>
-- > <KP_Equal>
-- > <KP_Multiply>
-- > <KP_Add>
-- > <KP_Separator>
-- > <KP_Subtract>
-- > <KP_Decimal>
-- > <KP_Divide>
-- > <KP_0>-<KP_9>
--
-- Long list of multimedia keys. Please note that not all keys may be
-- present in your particular setup although most likely they will do.
--
-- > <XF86ModeLock>
-- > <XF86MonBrightnessUp>
-- > <XF86MonBrightnessDown>
-- > <XF86KbdLightOnOff>
-- > <XF86KbdBrightnessUp>
-- > <XF86KbdBrightnessDown>
-- > <XF86Standby>
-- > <XF86AudioLowerVolume>
-- > <XF86AudioMute>
-- > <XF86AudioRaiseVolume>
-- > <XF86AudioPlay>
-- > <XF86AudioStop>
-- > <XF86AudioPrev>
-- > <XF86AudioNext>
-- > <XF86HomePage>
-- > <XF86Mail>
-- > <XF86Start>
-- > <XF86Search>
-- > <XF86AudioRecord>
-- > <XF86Calculator>
-- > <XF86Memo>
-- > <XF86ToDoList>
-- > <XF86Calendar>
-- > <XF86PowerDown>
-- > <XF86ContrastAdjust>
-- > <XF86RockerUp>
-- > <XF86RockerDown>
-- > <XF86RockerEnter>
-- > <XF86Back>
-- > <XF86Forward>
-- > <XF86Stop>
-- > <XF86Refresh>
-- > <XF86PowerOff>
-- > <XF86WakeUp>
-- > <XF86Eject>
-- > <XF86ScreenSaver>
-- > <XF86WWW>
-- > <XF86Sleep>
-- > <XF86Favorites>
-- > <XF86AudioPause>
-- > <XF86AudioMedia>
-- > <XF86MyComputer>
-- > <XF86VendorHome>
-- > <XF86LightBulb>
-- > <XF86Shop>
-- > <XF86History>
-- > <XF86OpenURL>
-- > <XF86AddFavorite>
-- > <XF86HotLinks>
-- > <XF86BrightnessAdjust>
-- > <XF86Finance>
-- > <XF86Community>
-- > <XF86AudioRewind>
-- > <XF86XF86BackForward>
-- > <XF86Launch0>-<XF86Launch9>, <XF86LaunchA>-<XF86LaunchF>
-- > <XF86ApplicationLeft>
-- > <XF86ApplicationRight>
-- > <XF86Book>
-- > <XF86CD>
-- > <XF86Calculater>
-- > <XF86Clear>
-- > <XF86Close>
-- > <XF86Copy>
-- > <XF86Cut>
-- > <XF86Display>
-- > <XF86DOS>
-- > <XF86Documents>
-- > <XF86Excel>
-- > <XF86Explorer>
-- > <XF86Game>
-- > <XF86Go>
-- > <XF86iTouch>
-- > <XF86LogOff>
-- > <XF86Market>
-- > <XF86Meeting>
-- > <XF86MenuKB>
-- > <XF86MenuPB>
-- > <XF86MySites>
-- > <XF86New>
-- > <XF86News>
-- > <XF86OfficeHome>
-- > <XF86Open>
-- > <XF86Option>
-- > <XF86Paste>
-- > <XF86Phone>
-- > <XF86Q>
-- > <XF86Reply>
-- > <XF86Reload>
-- > <XF86RotateWindows>
-- > <XF86RotationPB>
-- > <XF86RotationKB>
-- > <XF86Save>
-- > <XF86ScrollUp>
-- > <XF86ScrollDown>
-- > <XF86ScrollClick>
-- > <XF86Send>
-- > <XF86Spell>
-- > <XF86SplitScreen>
-- > <XF86Support>
-- > <XF86TaskPane>
-- > <XF86Terminal>
-- > <XF86Tools>
-- > <XF86Travel>
-- > <XF86UserPB>
-- > <XF86User1KB>
-- > <XF86User2KB>
-- > <XF86Video>
-- > <XF86WheelButton>
-- > <XF86Word>
-- > <XF86Xfer>
-- > <XF86ZoomIn>
-- > <XF86ZoomOut>
-- > <XF86Away>
-- > <XF86Messenger>
-- > <XF86WebCam>
-- > <XF86MailForward>
-- > <XF86Pictures>
-- > <XF86Music>
-- > <XF86TouchpadToggle>
-- > <XF86_Switch_VT_1>-<XF86_Switch_VT_12>
-- > <XF86_Ungrab>
-- > <XF86_ClearGrab>
-- > <XF86_Next_VMode>
-- > <XF86_Prev_VMode>
mkKeymap :: XConfig l -> [(String, X ())] -> M.Map (KeyMask, KeySym) (X ())
mkKeymap c = M.fromList . mkSubmaps . readKeymap c
mkNamedKeymap :: XConfig l -> [(String, NamedAction)] -> [((KeyMask, KeySym), NamedAction)]
mkNamedKeymap c = mkNamedSubmaps . readKeymap c
-- | Given a list of pairs of parsed key sequences and actions,
-- group them into submaps in the appropriate way.
mkNamedSubmaps :: [([(KeyMask, KeySym)], NamedAction)] -> [((KeyMask, KeySym), NamedAction)]
mkNamedSubmaps = mkSubmaps' submapName
mkSubmaps :: [ ([(KeyMask,KeySym)], X ()) ] -> [((KeyMask, KeySym), X ())]
mkSubmaps = mkSubmaps' $ submap . M.fromList
mkSubmaps' :: (Ord a) => ([(a, c)] -> c) -> [([a], c)] -> [(a, c)]
mkSubmaps' subm binds = map combine gathered
where gathered = groupBy fstKey
. sortBy (comparing fst)
$ binds
combine [([k],act)] = (k,act)
combine ks = (head . fst . head $ ks,
subm . mkSubmaps' subm $ map (first tail) ks)
fstKey = (==) `on` (head . fst)
on :: (a -> a -> b) -> (c -> a) -> c -> c -> b
op `on` f = \x y -> f x `op` f y
-- | Given a configuration record and a list of (key sequence
-- description, action) pairs, parse the key sequences into lists of
-- @(KeyMask,KeySym)@ pairs. Key sequences which fail to parse will
-- be ignored.
readKeymap :: XConfig l -> [(String, t)] -> [([(KeyMask, KeySym)], t)]
readKeymap c = mapMaybe (maybeKeys . first (readKeySequence c))
where maybeKeys (Nothing,_) = Nothing
maybeKeys (Just k, act) = Just (k, act)
-- | Parse a sequence of keys, returning Nothing if there is
-- a parse failure (no parse, or ambiguous parse).
readKeySequence :: XConfig l -> String -> Maybe [(KeyMask, KeySym)]
readKeySequence c = listToMaybe . parses
where parses = map fst . filter (null.snd) . readP_to_S (parseKeySequence c)
-- | Parse a sequence of key combinations separated by spaces, e.g.
-- @\"M-c x C-S-2\"@ (mod+c, x, ctrl+shift+2).
parseKeySequence :: XConfig l -> ReadP [(KeyMask, KeySym)]
parseKeySequence c = sepBy1 (parseKeyCombo c) (many1 $ char ' ')
-- | Parse a modifier-key combination such as "M-C-s" (mod+ctrl+s).
parseKeyCombo :: XConfig l -> ReadP (KeyMask, KeySym)
parseKeyCombo c = do mods <- many (parseModifier c)
k <- parseKey
return (foldl' (.|.) 0 mods, k)
-- | Parse a modifier: either M- (user-defined mod-key),
-- C- (control), S- (shift), or M#- where # is an integer
-- from 1 to 5 (mod1Mask through mod5Mask).
parseModifier :: XConfig l -> ReadP KeyMask
parseModifier c = (string "M-" >> return (modMask c))
+++ (string "C-" >> return controlMask)
+++ (string "S-" >> return shiftMask)
+++ do _ <- char 'M'
n <- satisfy (`elem` ['1'..'5'])
_ <- char '-'
return $ indexMod (read [n] - 1)
where indexMod = (!!) [mod1Mask,mod2Mask,mod3Mask,mod4Mask,mod5Mask]
-- | Parse an unmodified basic key, like @\"x\"@, @\"<F1>\"@, etc.
parseKey :: ReadP KeySym
parseKey = parseRegular +++ parseSpecial
-- | Parse a regular key name (represented by itself).
parseRegular :: ReadP KeySym
parseRegular = choice [ char s >> return k
| (s,k) <- zip ['!'..'~'] [xK_exclam..xK_asciitilde]
]
-- | Parse a special key name (one enclosed in angle brackets).
parseSpecial :: ReadP KeySym
parseSpecial = do _ <- char '<'
key <- choice [ string name >> return k
| (name,k) <- keyNames
]
_ <- char '>'
return key
-- | A list of all special key names and their associated KeySyms.
keyNames :: [(String, KeySym)]
keyNames = functionKeys ++ specialKeys ++ multimediaKeys
-- | A list pairing function key descriptor strings (e.g. @\"<F2>\"@) with
-- the associated KeySyms.
functionKeys :: [(String, KeySym)]
functionKeys = [ ('F' : show n, k)
| (n,k) <- zip ([1..24] :: [Int]) [xK_F1..] ]
-- | A list of special key names and their corresponding KeySyms.
specialKeys :: [(String, KeySym)]
specialKeys = [ ("Backspace" , xK_BackSpace)
, ("Tab" , xK_Tab)
, ("Return" , xK_Return)
, ("Pause" , xK_Pause)
, ("Scroll_lock", xK_Scroll_Lock)
, ("Sys_Req" , xK_Sys_Req)
, ("Print" , xK_Print)
, ("Escape" , xK_Escape)
, ("Esc" , xK_Escape)
, ("Delete" , xK_Delete)
, ("Home" , xK_Home)
, ("Left" , xK_Left)
, ("Up" , xK_Up)
, ("Right" , xK_Right)
, ("Down" , xK_Down)
, ("L" , xK_Left)
, ("U" , xK_Up)
, ("R" , xK_Right)
, ("D" , xK_Down)
, ("Page_Up" , xK_Page_Up)
, ("Page_Down" , xK_Page_Down)
, ("End" , xK_End)
, ("Insert" , xK_Insert)
, ("Break" , xK_Break)
, ("Space" , xK_space)
, ("KP_Space" , xK_KP_Space)
, ("KP_Tab" , xK_KP_Tab)
, ("KP_Enter" , xK_KP_Enter)
, ("KP_F1" , xK_KP_F1)
, ("KP_F2" , xK_KP_F2)
, ("KP_F3" , xK_KP_F3)
, ("KP_F4" , xK_KP_F4)
, ("KP_Home" , xK_KP_Home)
, ("KP_Left" , xK_KP_Left)
, ("KP_Up" , xK_KP_Up)
, ("KP_Right" , xK_KP_Right)
, ("KP_Down" , xK_KP_Down)
, ("KP_Prior" , xK_KP_Prior)
, ("KP_Page_Up" , xK_KP_Page_Up)
, ("KP_Next" , xK_KP_Next)
, ("KP_Page_Down", xK_KP_Page_Down)
, ("KP_End" , xK_KP_End)
, ("KP_Begin" , xK_KP_Begin)
, ("KP_Insert" , xK_KP_Insert)
, ("KP_Delete" , xK_KP_Delete)
, ("KP_Equal" , xK_KP_Equal)
, ("KP_Multiply", xK_KP_Multiply)
, ("KP_Add" , xK_KP_Add)
, ("KP_Separator", xK_KP_Separator)
, ("KP_Subtract", xK_KP_Subtract)
, ("KP_Decimal" , xK_KP_Decimal)
, ("KP_Divide" , xK_KP_Divide)
, ("KP_0" , xK_KP_0)
, ("KP_1" , xK_KP_1)
, ("KP_2" , xK_KP_2)
, ("KP_3" , xK_KP_3)
, ("KP_4" , xK_KP_4)
, ("KP_5" , xK_KP_5)
, ("KP_6" , xK_KP_6)
, ("KP_7" , xK_KP_7)
, ("KP_8" , xK_KP_8)
, ("KP_9" , xK_KP_9)
]
-- | List of multimedia keys. If X server does not know about some
-- | keysym it's omitted from list. (stringToKeysym returns noSymbol in this case)
multimediaKeys :: [(String, KeySym)]
multimediaKeys = filter ((/= noSymbol) . snd) . map (id &&& stringToKeysym) $
[ "XF86ModeLock"
, "XF86MonBrightnessUp"
, "XF86MonBrightnessDown"
, "XF86KbdLightOnOff"
, "XF86KbdBrightnessUp"
, "XF86KbdBrightnessDown"
, "XF86Standby"
, "XF86AudioLowerVolume"
, "XF86AudioMute"
, "XF86AudioRaiseVolume"
, "XF86AudioPlay"
, "XF86AudioStop"
, "XF86AudioPrev"
, "XF86AudioNext"
, "XF86HomePage"
, "XF86Mail"
, "XF86Start"
, "XF86Search"
, "XF86AudioRecord"
, "XF86Calculator"
, "XF86Memo"
, "XF86ToDoList"
, "XF86Calendar"
, "XF86PowerDown"
, "XF86ContrastAdjust"
, "XF86RockerUp"
, "XF86RockerDown"
, "XF86RockerEnter"
, "XF86Back"
, "XF86Forward"
, "XF86Stop"
, "XF86Refresh"
, "XF86PowerOff"
, "XF86WakeUp"
, "XF86Eject"
, "XF86ScreenSaver"
, "XF86WWW"
, "XF86Sleep"
, "XF86Favorites"
, "XF86AudioPause"
, "XF86AudioMedia"
, "XF86MyComputer"
, "XF86VendorHome"
, "XF86LightBulb"
, "XF86Shop"
, "XF86History"
, "XF86OpenURL"
, "XF86AddFavorite"
, "XF86HotLinks"
, "XF86BrightnessAdjust"
, "XF86Finance"
, "XF86Community"
, "XF86AudioRewind"
, "XF86BackForward"
, "XF86Launch0"
, "XF86Launch1"
, "XF86Launch2"
, "XF86Launch3"
, "XF86Launch4"
, "XF86Launch5"
, "XF86Launch6"
, "XF86Launch7"
, "XF86Launch8"
, "XF86Launch9"
, "XF86LaunchA"
, "XF86LaunchB"
, "XF86LaunchC"
, "XF86LaunchD"
, "XF86LaunchE"
, "XF86LaunchF"
, "XF86ApplicationLeft"
, "XF86ApplicationRight"
, "XF86Book"
, "XF86CD"
, "XF86Calculater"
, "XF86Clear"
, "XF86Close"
, "XF86Copy"
, "XF86Cut"
, "XF86Display"
, "XF86DOS"
, "XF86Documents"
, "XF86Excel"
, "XF86Explorer"
, "XF86Game"
, "XF86Go"
, "XF86iTouch"
, "XF86LogOff"
, "XF86Market"
, "XF86Meeting"
, "XF86MenuKB"
, "XF86MenuPB"
, "XF86MySites"
, "XF86New"
, "XF86News"
, "XF86OfficeHome"
, "XF86Open"
, "XF86Option"
, "XF86Paste"
, "XF86Phone"
, "XF86Q"
, "XF86Reply"
, "XF86Reload"
, "XF86RotateWindows"
, "XF86RotationPB"
, "XF86RotationKB"
, "XF86Save"
, "XF86ScrollUp"
, "XF86ScrollDown"
, "XF86ScrollClick"
, "XF86Send"
, "XF86Spell"
, "XF86SplitScreen"
, "XF86Support"
, "XF86TaskPane"
, "XF86Terminal"
, "XF86Tools"
, "XF86Travel"
, "XF86UserPB"
, "XF86User1KB"
, "XF86User2KB"
, "XF86Video"
, "XF86WheelButton"
, "XF86Word"
, "XF86Xfer"
, "XF86ZoomIn"
, "XF86ZoomOut"
, "XF86Away"
, "XF86Messenger"
, "XF86WebCam"
, "XF86MailForward"
, "XF86Pictures"
, "XF86Music"
, "XF86TouchpadToggle"
, "XF86_Switch_VT_1"
, "XF86_Switch_VT_2"
, "XF86_Switch_VT_3"
, "XF86_Switch_VT_4"
, "XF86_Switch_VT_5"
, "XF86_Switch_VT_6"
, "XF86_Switch_VT_7"
, "XF86_Switch_VT_8"
, "XF86_Switch_VT_9"
, "XF86_Switch_VT_10"
, "XF86_Switch_VT_11"
, "XF86_Switch_VT_12"
, "XF86_Ungrab"
, "XF86_ClearGrab"
, "XF86_Next_VMode"
, "XF86_Prev_VMode" ]
-- | Given a configuration record and a list of (key sequence
-- description, action) pairs, check the key sequence descriptions
-- for validity, and warn the user (via a popup xmessage window) of
-- any unparseable or duplicate key sequences. This function is
-- appropriate for adding to your @startupHook@, and you are highly
-- encouraged to do so; otherwise, duplicate or unparseable
-- keybindings will be silently ignored.
--
-- For example, you might do something like this:
--
-- > main = xmonad $ myConfig
-- >
-- > myKeymap = [("S-M-c", kill), ...]
-- > myConfig = defaultConfig {
-- > ...
-- > keys = \c -> mkKeymap c myKeymap
-- > startupHook = return () >> checkKeymap myConfig myKeymap
-- > ...
-- > }
--
-- NOTE: the @return ()@ in the example above is very important!
-- Otherwise, you might run into problems with infinite mutual
-- recursion: the definition of myConfig depends on the definition of
-- startupHook, which depends on the definition of myConfig, ... and
-- so on. Actually, it's likely that the above example in particular
-- would be OK without the @return ()@, but making @myKeymap@ take
-- @myConfig@ as a parameter would definitely lead to
-- problems. Believe me. It, uh, happened to my friend. In... a
-- dream. Yeah. In any event, the @return () >>@ introduces enough
-- laziness to break the deadlock.
--
checkKeymap :: XConfig l -> [(String, a)] -> X ()
checkKeymap conf km = warn (doKeymapCheck conf km)
where warn ([],[]) = return ()
warn (bad,dup) = spawn $ "xmessage 'Warning:\n"
++ msg "bad" bad ++ "\n"
++ msg "duplicate" dup ++ "'"
msg _ [] = ""
msg m xs = m ++ " keybindings detected: " ++ showBindings xs
showBindings = unwords . map (("\""++) . (++"\""))
-- | Given a config and a list of (key sequence description, action)
-- pairs, check the key sequence descriptions for validity,
-- returning a list of unparseable key sequences, and a list of
-- duplicate key sequences.
doKeymapCheck :: XConfig l -> [(String,a)] -> ([String], [String])
doKeymapCheck conf km = (bad,dups)
where ks = map ((readKeySequence conf &&& id) . fst) km
bad = nub . map snd . filter (isNothing . fst) $ ks
dups = map (snd . head)
. filter ((>1) . length)
. groupBy ((==) `on` fst)
. sortBy (comparing fst)
. map (first fromJust)
. filter (isJust . fst)
$ ks
|
markus1189/xmonad-contrib-710
|
XMonad/Util/EZConfig.hs
|
Haskell
|
bsd-3-clause
| 26,039
|
module Test13 where
f (x:xs) = x : xs
g = 1 : [1,2]
|
kmate/HaRe
|
old/testing/refacFunDef/Test13.hs
|
Haskell
|
bsd-3-clause
| 55
|
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
module T14164 where
data G (x :: a) = GNil | GCons (G x)
type family F (xs :: [a]) (g :: G (z :: a)) = (res :: [a]) | res -> a where
F (x:xs) GNil = x:xs
F (x:xs) (GCons rest) = x:F xs rest
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/T14164.hs
|
Haskell
|
bsd-3-clause
| 313
|
-- Original test case for #11627 (space_leak_001.hs)
import Data.List
main :: IO ()
main = print $ length $ show (foldl' (*) 1 [1..100000] :: Integer)
|
ezyang/ghc
|
testsuite/tests/profiling/should_run/T11627a.hs
|
Haskell
|
bsd-3-clause
| 153
|
{-# LANGUAGE TypeFamilies, RankNTypes #-}
module T10899 where
class C a where
type F a
type F a = forall m. m a
|
ezyang/ghc
|
testsuite/tests/indexed-types/should_fail/T10899.hs
|
Haskell
|
bsd-3-clause
| 118
|
{-# LANGUAGE TemplateHaskell, EmptyCase #-}
-- Trac #2431: empty case expression
-- now accepted
module Main where
import Language.Haskell.TH
f :: Int
f = $(caseE (litE $ CharL 'a') [])
main = print f
|
urbanslug/ghc
|
testsuite/tests/th/TH_emptycase.hs
|
Haskell
|
bsd-3-clause
| 217
|
-- Logic module
-- Functions for updating game state and responding to user input
module Logic(updateGameState, handleEvent) where
import State
import Piece
import Playfield
import Graphics.Gloss
import Graphics.Gloss.Interface.Pure.Game -- for Event
import System.Random
-- Piece falling velocity, in cells/second
pieceVelocity :: Float
pieceVelocity = 10
acceleratedPieceVelocity :: Float
acceleratedPieceVelocity = 30
effectivePieceVelocity :: State -> Float
effectivePieceVelocity s | accelerate s = acceleratedPieceVelocity | otherwise = pieceVelocity
-- Time to wait before dropping piece again
effectivePiecePeriod :: State -> Float
effectivePiecePeriod s = 1.0 / (effectivePieceVelocity s)
handleEvent :: Event -> State -> State
handleEvent (EventKey (SpecialKey KeyLeft) Down _ _) s = movePiece (-2) s
handleEvent (EventKey (SpecialKey KeyRight) Down _ _) s = movePiece 2 s
handleEvent (EventKey (SpecialKey KeyDown) Down _ _) s = s {accelerate = True}
handleEvent (EventKey (SpecialKey KeyDown) Up _ _) s = s {accelerate = False}
handleEvent (EventKey (Char 'a') Down _ _) s = rotateCW s
handleEvent (EventKey (Char 's') Down _ _) s = rotateCCW s
handleEvent _ s = s
-- Moves the falling piece horizontally, if possible
movePiece :: Int -> State -> State
movePiece offset s
| canPieceBeAt (piece s) piecePos' (well s) = s {piecePos = piecePos'}
| otherwise = s
where
piecePos' = (fst (piecePos s) + offset, snd (piecePos s))
-- Transforms the falling piece, if possible
transformPiece :: (Piece -> Piece) -> State -> State
transformPiece transform s
| canPieceBeAt piece' (piecePos s) (well s) = s {piece = piece'}
| otherwise = s
where
piece' = transform (piece s)
-- Rotates the falling piece clockwise, if possible
rotateCW :: State -> State
rotateCW = transformPiece pieceCW -- I feel SO badass for doing this!
rotateCCW = transformPiece pieceCCW
-- Update function passed to gloss
updateGameState :: Float -> State -> State
updateGameState t s = unityStyleUpdate (s {time = (time s + t), deltaTime = t}) -- ok, after all gloss passes dt to us
-- my update function
unityStyleUpdate :: State -> State
unityStyleUpdate s
| secondsToNextMove stateWithUpdatedClocks <= 0 = applyMove stateWithUpdatedClocks {secondsToNextMove = effectivePiecePeriod s}
| otherwise = stateWithUpdatedClocks
where
stateWithUpdatedClocks = s {secondsToNextMove = (secondsToNextMove s) - (deltaTime s)}
-- Refactored from applyMove. We also needed it to move left-right and rotate a piece
canPieceBeAt :: Piece -> (Int, Int) -> Well -> Bool
canPieceBeAt piece coord well = insidePlayfield && (not colliding)
where
insidePlayfield = validPos coord piece
colliding = pieceCollides piece coord well
-- Moves the current piece one cell down
applyMove :: State -> State
applyMove s
| nextPosInvalid = handleFullRows (fixPiece s)
| otherwise = s {piecePos = piecePos'}
where
nextPosInvalid = not (canPieceBeAt (piece s) piecePos' (well s))
piecePos' = (fst (piecePos s), snd (piecePos s) - 2)
-- Fixes the falling piece to its current position and resets the piece to a new one
fixPiece :: State -> State
fixPiece s
| ((snd (piecePos s)) > (-2)) = resetGameState s -- reset game state when 'fixing' a piece that overflows the well
| otherwise = s
{ well = renderPiece (piece s) (piecePos s) (well s)
, piece = randomPiece (fst reseed)
, piecePos = (0, 0)
, randomSeed = snd reseed
, accelerate = False -- We don't want acceleration to affect next falling piece
}
where
reseed :: (Double, StdGen)
reseed = randomR (0.0, 1.0) (randomSeed s)
-- Removes filled rows and changes the score accordingly
handleFullRows :: State -> State
handleFullRows s = s {well = fst result, score = (score s) + linesToScore (snd result)}
where result = clearAndCountFilledRows (well s)
-- Finally, it can't be called "Tetris" without the scoring system
linesToScore :: Int -> Int
linesToScore 0 = 0
linesToScore 1 = 40
linesToScore 2 = 100
linesToScore 3 = 300
linesToScore 4 = 1200
linesToScore _ = error "Invalid cleared Line count"
|
mgeorgoulopoulos/TetrisHaskellWeekend
|
Logic.hs
|
Haskell
|
mit
| 4,308
|
-- Tenn1518's XMonad configuration
import XMonad
import XMonad.Actions.Warp
import XMonad.Actions.Commands
import XMonad.Actions.RotSlaves
import XMonad.Util.EZConfig
import XMonad.Util.Ungrab
import XMonad.Util.Loggers
import qualified XMonad.StackSet as W
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.EwmhDesktops
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.StatusBar
import XMonad.Hooks.StatusBar.PP
import XMonad.Hooks.DynamicLog
import XMonad.Layout.NoBorders
import XMonad.Layout.Spacing
import XMonad.Layout.TwoPane
import XMonad.Layout.TwoPanePersistent
main :: IO ()
main = xmonad . ewmhFullscreen . ewmh . docks . withSB mySB $ myConf
where
mySB = statusBarProp "xmobar ~/.config/xmobar/xmobarrc" (pure myXmobarPP)
myConf = def
{ modMask = mod4Mask
, terminal = "alacritty -e zsh -c 'tmux a || tmux'"
, focusFollowsMouse = False
, borderWidth = 3
, normalBorderColor = "#000000"
, focusedBorderColor = "#bd93f9"
, layoutHook = myLayoutHook
, manageHook = myManageHook
, logHook = dynamicLog
}
`additionalKeysP`
[ ("M-S-e" , spawn "emacsclient -c -n -a 'emacs'")
, ("M-<Space>" , spawn "rofi -modi windowcd,run -show combi --combi-modi windowcd,drun")
, ("M-<Tab>" , sendMessage NextLayout)
, ("M-S-<Tab>" , sendMessage FirstLayout)
, ("M-S-;" , commands >>= runCommand)
-- Banish or beckon cursor, akin to Stump
, ("M-S-b" , banishScreen LowerRight)
, ("M-b" , warpToWindow 0.5 0.5)
-- Rotate slave windows (TODO: don't clobber monitor keybinding)
, ("M-r" , rotSlavesUp)
, ("M-S-r" , rotSlavesDown)
-- Gaps
, ("M-g" , toggleWindowSpacingEnabled)
, ("M-[" , decScreenWindowSpacing 2)
, ("M-]" , incScreenWindowSpacing 2)
-- brightness
, ("<XF86MonBrightnessUp>" , spawn "brightnessctl s +4%")
, ("<XF86MonBrightnessDown>", spawn "brightnessctl s 4%-")
-- volume
, ("<XF86AudioLowerVolume>" , spawn "pamixer -d 4; vol-info")
, ("<XF86AudioRaiseVolume>" , spawn "pamixer -i 4; vol-info")
, ("<XF86AudioMute>" , spawn "pamixer -t; vol-info")
]
commands :: X [(String, X())]
commands = defaultCommands
-- sending XMonad state to XMobar
myXmobarPP :: PP
myXmobarPP = def
{ ppLayout = const ""
, ppCurrent = wrap " " "" . xmobarBorder "Bottom" "#8be9fd" 3
, ppHidden = white . wrap " " ""
, ppHiddenNoWindows = lowWhite . wrap " " ""
, ppTitle = shorten 50
, ppSep = " · "
}
where
white, lowWhite :: String -> String
white = xmobarColor "#f8f8f2" ""
lowWhite = xmobarColor "#bbbbbb" ""
myLayoutHook = smartBorders
$ avoidStruts
$ spacingWithEdge 3 (myTall
||| myTwoPane)
||| Full
where
-- Two panes, new windows split slave pane
myTall = Tall nmaster delta ratio
-- Two splits, new windows swap into slave pane
myTwoPane = TwoPane delta gratio
nmaster = 1
delta = 3/100
ratio = 1/2
gratio = 56/100
-- If adding more in the future:
-- myManageHook = (otherStuff) <+> (fmap not isDialog --> doF avoidMaster)
myManageHook = fmap not isDialog --> doF avoidMaster
-- Windows do not displace master window when it is focused
avoidMaster :: W.StackSet i l a s sd -> W.StackSet i l a s sd
avoidMaster = W.modify' $ \c -> case c of
W.Stack t [] (r:rs) -> W.Stack t [r] rs
otherwise -> c
|
Tenn1518/dotfiles
|
config/xmonad/xmonad.hs
|
Haskell
|
mit
| 3,744
|
-----------------------------------------------------------------------------
-- |
-- Module : GitBak
-- Copyright : (c) Agorgianitis Loukas, 2015
-- License : MIT
--
-- Maintainer : Agorgianitis Loukas <agorglouk@gmail.com>
-- Stability : experimental
-- Portability : portable
--
-- Main of gitbak executable
--
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Options.Applicative
import System.FilePath
import System.Directory
import System.IO
import System.Process
import System.Exit
import qualified Control.Lens as Ln
import qualified Network.Wreq as Wr
import qualified Data.ByteString.Char8 as B8
---------------------------------------------------------------------------
-- Command Line Options
---------------------------------------------------------------------------
-- Helper
withInfo :: Parser a -> String -> ParserInfo a
opts `withInfo` desc = info (helper <*> opts) $ progDesc desc
data Options = Options { user :: String, ghAPIKey :: Maybe String }
-- Main parser
parseOptions :: Parser Options
parseOptions = Options
<$> argument str (metavar "USER" <> help "The GitHub user to clone repos from")
<*> optional (argument str (metavar "APIKEY" <> help "The optional GitHub API key to enable more API requests per minute"))
-- The main description generator
parseOptionsInfo :: ParserInfo Options
parseOptionsInfo = info (helper <*> parseOptions)
(fullDesc
<> header "GitBak - A GitHub mass clone utility")
---------------------------------------------------------------------------
-- Deserializing
---------------------------------------------------------------------------
-- The data structure that holds a fetched repo information
data RepoInfo = RepoInfo { repoName :: String , repoLink :: String } deriving Show
instance FromJSON RepoInfo where
parseJSON (Object v) = RepoInfo <$>
v .: "name" <*>
v .: "clone_url"
parseJSON _ = mempty
---------------------------------------------------------------------------
-- Actions
---------------------------------------------------------------------------
-- Gathers a RepoInfo list for the given username, using the optional API key
gatherRepoList :: String -> Maybe String -> IO [RepoInfo]
gatherRepoList name apiKey = do
let initUrl = "https://api.github.com/users/" ++ name ++ "/repos"
let opts = Wr.defaults
let opts2 = case apiKey of
Just key -> opts Ln.& Wr.header "Authorization" Ln..~ [B8.pack $ "token " ++ key]
Nothing -> opts
let getRepoLinks url progress =
Wr.getWith opts2 url >>= (\x ->
let body = x Ln.^. Wr.responseBody
restLink = x Ln.^? Wr.responseLink "rel" "next" . Wr.linkURL
in case decode body :: Maybe [RepoInfo] of
Nothing -> return []
Just v -> do
let newProgress = progress + length v
putStr $ "\rTotal repos: " ++ show newProgress
hFlush stdout
rest <- case restLink of
Nothing -> return []
Just l -> getRepoLinks (B8.unpack l) newProgress
return $ v ++ rest)
putStr $ "Total repos: " ++ show (0 :: Int)
hFlush stdout
repoLinks <- getRepoLinks initUrl 0
putStr "\n"
return repoLinks
-- Clones a git repository using the git executable
cloneGitRepo :: RepoInfo -> IO ExitCode
cloneGitRepo inf = system $ "git clone " ++ repoLink inf ++ " " ++ repoName inf
-- Returns a list of all the given directory contents recursively
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topdir = do
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- mapM (\x -> do
let path = topdir </> x
isDir <- doesDirectoryExist path
if isDir
then (++ [path]) <$> getRecursiveContents path
else return [path]
) properNames
return (concat paths)
-- Sets write permission of a file or folder to true
makeWritable :: FilePath -> IO ()
makeWritable path = do
p <- getPermissions path
setPermissions path (p {writable = True})
-- Archives a given folder using the tar util
zipFolder :: FilePath -> IO ExitCode
zipFolder name =
system $ "tar czvf " ++ name ++ ".tar.gz " ++ name
---------------------------------------------------------------------------
-- Entrypoint
---------------------------------------------------------------------------
main :: IO ()
main = do
opts <- execParser parseOptionsInfo
putStrLn "Fetching repos..."
repos <- gatherRepoList (user opts) (ghAPIKey opts)
forM_ repos (\x -> do
let name = repoName x
-- Clone
putStrLn $ "Cloning " ++ name ++ "..."
_ <- cloneGitRepo x
-- Zip
putStrLn $ "Archiving " ++ name ++ "..."
_ <- zipFolder name
-- Delete cloned folder
getRecursiveContents name >>= mapM_ makeWritable
removeDirectoryRecursive name)
|
ElArtista/GitBak
|
src/Main.hs
|
Haskell
|
mit
| 5,319
|
-- Section 6
import Data.List
import qualified Data.Map as Map
import Data.Char
numUniques :: (Eq a) => [a] -> Int
numUniques = length . nub
wordNums :: String -> [(String, Int)]
wordNums = map (\ws -> (head ws, length ws)) . group . sort . words
digitSum :: Int -> Int
digitSum = sum .map digitToInt . show
firstToInt :: Int -> Maybe Int
firstToInt n = find (\x -> digitSum x == n) [1..]
findKey :: (Eq k) => k -> [(k, v)] -> v
findKey key xs = snd . head .filter (\(k, v) -> key == k) $ xs
betterfindKey :: (Eq k) => k -> [(k, v)] -> Maybe v
betterfindKey key [] = Nothing
betterfindKey key ((k,v):xs)
| key == k = Just v
| otherwise = betterfindKey key xs
findKey' :: (Eq k) => k -> [(k,v)] -> Maybe v
findKey' key xs = foldr
(\(k,v) acc -> if key == k then Just v else acc)
Nothing xs
phoneBook :: Map.Map String String
phoneBook = Map.fromList $
[("betty", "555-1938")
,("bonnie", "42-2332")
,("paty","44420-323")
,("fasn","78-203")
,("frontia","7-23-32")
]
string2digits :: String -> [Int]
string2digits = map digitToInt . filter isDigit
phoneBookToMap :: (Ord k) => [(k,String)] -> Map.Map k String
phoneBookToMap xs = Map.fromListWith add xs
where add number1 number2 = number1 ++ ", " ++ number2
|
ymkjp/Algorithms-and-Data-Structures
|
6.hs
|
Haskell
|
mit
| 1,230
|
{-# LANGUAGE GADTs #-}
module Main where
import Test.Framework.Runners.Console
import Test.Framework.Providers.API
import Test.Framework.Providers.HUnit
import Test.HUnit
import Data.List
import Data.Traversable
--import LProperties
import LUnitTests
main = do
unitTests <- LUnitTests.tests
defaultMain [unitTests] --, LProperties.tests]
|
joshcough/L5-Haskell
|
test/main.hs
|
Haskell
|
mit
| 348
|
module MessageProperties where
import Data.Binary
import qualified Data.ByteString.Lazy as LBS
import Network.Linx.Gateway.Message
import Network.Linx.Gateway.Types
import Generators ()
prop_message :: Message -> Bool
prop_message message@(Message _ msgPayload) =
let encodedMessage = encode message
(encHeader, encPayload) = LBS.splitAt 8 encodedMessage
Header msgType (Length len) = decode encHeader
msgPayload' = decodeProtocolPayload msgType encPayload
in msgPayload == msgPayload'
&& (fromIntegral len) == (LBS.length encPayload)
|
kosmoskatten/linx-gateway
|
test/MessageProperties.hs
|
Haskell
|
mit
| 598
|
-- Copyright (c) 2014 Zachary King
import System.Environment
import Data.Time
data Entry = Entry { line :: String
, time :: UTCTime
} deriving (Show)
type Index = [Entry]
emptyIndex :: Index
emptyIndex = []
main :: IO ()
main = loop emptyIndex
loop :: Index -> IO ()
loop index = do
putStrLn "command:"
line <- getLine
time <- getCurrentTime
if line /= "exit"
then do
let index2 = index ++ [Entry line time]
print index2
loop index2
else
-- quit
return ()
|
zakandrewking/eightball
|
eightball.hs
|
Haskell
|
mit
| 557
|
module Grammar
( Grammar (..)
, ProductionElement (..)
, Production (..)
, isDiscardable
, isSymbol
, fromSymbol
) where
import qualified Data.Map as Map
data Grammar terminals productionNames symbols = Grammar
{ startSymbol :: symbols
, productions :: Map.Map productionNames (Production terminals symbols)
}
data Production terminals symbols
= Production
{ productionSymbol :: symbols
-- Some productions serve only to define precedence, which is unambiguous in a tree, so they can be discarded
-- Those must not have more than 1 non-discardable production element
, productionDiscardable :: Bool
, productionString :: [ProductionElement terminals symbols]
}
data ProductionElement terminals symbols
= Terminal terminals
-- Many terminals are only used in one production so the productionName uniquely identifies the result and they are discardable
| DiscardableTerminal terminals
| Symbol symbols
deriving (Eq, Ord)
instance (Show terminals, Show symbols) => Show (ProductionElement terminals symbols) where
show (Terminal t) = show t
show (DiscardableTerminal t) = "(" ++ show t ++ ")"
show (Symbol s) = "<" ++ show s ++ ">"
instance (Show terminals, Show symbols) => Show (Production terminals symbols) where
show (Production symbol discardable string) = foldl (\ l r -> l ++ " " ++ show r) accum string
where accum = "<" ++ show symbol ++ ">" ++ if discardable then " (discardable)" else "" ++ " ::="
instance (Show terminals, Show symbols, Eq symbols, Show productionNames, Ord productionNames)
=> Show (Grammar terminals symbols productionNames) where
show (Grammar startSymbol productions)
= Map.foldlWithKey concatProduction
( Map.foldlWithKey concatProduction "== Grammar ==\n= Start Symbol ="
(Map.filter isStartProduction productions)
++ "\n\n= Rest ="
)
$ Map.filter (not . isStartProduction) productions
where
isStartProduction prod = productionSymbol prod == startSymbol
concatProduction accum productionName production
= accum ++ "\n" ++ show productionName ++ ":\n "
++ show production
isDiscardable :: ProductionElement terminals symbols -> Bool
isDiscardable (DiscardableTerminal _) = True
isDiscardable _ = False
isSymbol :: ProductionElement terminals symbols -> Bool
isSymbol (Symbol _) = True
isSymbol _ = False
fromSymbol :: ProductionElement terminals symbols -> symbols
fromSymbol (Symbol s) = s
|
mrwonko/wonkococo
|
wcc/Grammar.hs
|
Haskell
|
mit
| 2,590
|
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..))
import Control.Monad (void)
import Gigasecond (fromDay)
import Data.Time.Calendar (fromGregorian)
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = void $ runTestTT $ TestList
[ TestList gigasecondTests ]
gigasecondTests :: [Test]
gigasecondTests =
[ testCase "from apr 25 2011" $
fromGregorian 2043 1 1 @=? fromDay (fromGregorian 2011 04 25)
, testCase "from jun 13 1977" $
fromGregorian 2009 2 19 @=? fromDay (fromGregorian 1977 6 13)
, testCase "from jul 19 1959" $
fromGregorian 1991 3 27 @=? fromDay (fromGregorian 1959 7 19)
-- customize this to test your birthday and find your gigasecond date:
]
|
tfausak/exercism-solutions
|
haskell/gigasecond/gigasecond_test.hs
|
Haskell
|
mit
| 771
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Network.Wai.Middleware.RequestLogger
import qualified Database.Persist
import Database.Persist.Sql (runMigration)
import Network.HTTP.Conduit (newManager, def)
import Control.Monad.Logger (runLoggingT)
import System.IO (stdout)
import System.Log.FastLogger (mkLogger)
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
import Handler.WishList
import Handler.Register
import Handler.WishHandler
import Handler.WishListLogin
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO Application
makeApplication conf = do
foundation <- makeFoundation conf
-- Initialize the logging middleware
logWare <- mkRequestLogger def
{ outputFormat =
if development
then Detailed True
else Apache FromSocket
, destination = Logger $ appLogger foundation
}
-- Create the WAI application and apply middlewares
app <- toWaiAppPlain foundation
return $ logWare app
-- | Loads up any necessary settings, creates your foundation datatype, and
-- performs some initialization.
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/sqlite.yml" (appEnv conf)
Database.Persist.loadConfig >>=
Database.Persist.applyEnv
p <- Database.Persist.createPoolConfig (dbconf :: Settings.PersistConf)
logger <- mkLogger True stdout
let foundation = App conf s p manager dbconf logger
-- Perform database migration using our application's logging settings.
runLoggingT
(Database.Persist.runPool dbconf (runMigration migrateAll) p)
(messageLoggerSource foundation logger)
return foundation
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader makeApplication
where
loader = Yesod.Default.Config.loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
|
lulf/wishsys
|
Application.hs
|
Haskell
|
mit
| 2,809
|
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Data.Monoid (mconcat,(<>))
import Data.Function (on)
import Data.List (sortBy,intersperse,intercalate,isInfixOf)
import Data.List.Split (chunksOf)
import qualified Data.Map as M
import Hakyll
import System.FilePath (dropExtension,takeBaseName)
import Text.Blaze.Html.Renderer.String (renderHtml)
import Text.Blaze ((!), toValue)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Debug.Trace
-------------------------------------------------------------------------------
debug s = trace ("STUFF: " ++ show s) s
articlesPerIndexPage :: Int
articlesPerIndexPage = 2
hakyllConf :: Configuration
hakyllConf = defaultConfiguration {
deployCommand =
"rsync -ave ssh _site/ " ++ "ben@benkolera.com:/opt/blog/"
}
main :: IO ()
main = doHakyll
doHakyll :: IO ()
doHakyll = hakyllWith hakyllConf $ do
match "static/**" $ do
route $ gsubRoute "static/" (const "")
compile copyFileCompiler
match ("md/pages/**.md") $ do
route $ gsubRoute "md/pages/" (const "") `composeRoutes` setExtension ".html"
compile staticCompiler
match "templates/*" $ compile templateCompiler
-- Build tags
tags <- buildTags "md/posts/*" (fromCapture "tags/*.html")
-- Render each and every post
match "md/posts/*.md" $ do
route $ gsubRoute "md/" (const "") `composeRoutes` setExtension ".html"
compile $ templatedPandoc "templates/post.html" tags
create ["posts.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAllSnapshots "md/posts/**" "content"
let ctx = listField "posts" (postCtx tags) (return . debug $ posts) <> baseCtx
makeItem ""
>>= loadAndApplyTemplate "templates/posts.html" ctx
>>= loadAndApplyTemplate "templates/default.html" ctx
>>= relativizeUrls
match "less/*.less" $ compile getResourceBody
d <- makePatternDependency "less/**/*.less"
rulesExtraDependencies [d] $ create ["css/blog.css"] $ do
route idRoute
compile $ loadBody "less/blog.less"
>>= makeItem
>>= withItemBody
(unixFilter "node_modules/less/bin/lessc" ["-","--include-path=less/"])
--------------------------------------------------------------------------------
templatedPandoc :: Identifier -> Tags -> Compiler (Item String)
templatedPandoc templatePath tags = pandocCompiler
>>= saveSnapshot "content"
>>= loadAndApplyTemplate templatePath (postCtx tags)
>>= defaultCompiler baseCtx
staticCompiler :: Compiler (Item String)
staticCompiler = pandocCompiler >>= defaultCompiler baseCtx
defaultCompiler :: Show a => Context a -> Item a -> Compiler (Item String)
defaultCompiler ctx item =
loadAndApplyTemplate "templates/default.html" ctx item
>>= relativizeUrls
postCtx tags = mconcat
[ modificationTimeField "mtime" "%U"
, tagsField "tags" tags
, baseCtx
]
baseCtx :: Context String
baseCtx =
mconcat [
functionField "pagetitle" pageTitle
, dateField "today" "%B %e, %Y"
, defaultContext
]
where
pageTitle _ i = do
m <- getMetadata $ itemIdentifier i
return $ "Confessions of a Typeholic" ++ (maybe "" (" | "++ ) (M.lookup "title" m))
|
benkolera/blog
|
hs/Site.hs
|
Haskell
|
mit
| 3,428
|
-- |This module, and the corresponding 'Zeno.Isabellable' modules,
-- deal with the conversion a 'ProofSet' into Isabelle/HOL code, outputting this
-- code into a file and then checking this file with Isabelle.
module Zeno.Isabelle (
toIsabelle
) where
import Prelude ()
import Zeno.Prelude
import Zeno.Core
import Zeno.Proof
import Zeno.Isabellable.Class
import Zeno.Isabellable.Proof
import Zeno.Isabellable.Core
import qualified Data.Map as Map
import qualified Data.Text as Text
instance Isabellable ZProofSet where
toIsabelle (ProofSet pgm named_proofs) =
"theory Zeno\nimports Main List\nbegin"
++ isa_dtypes ++ isa_binds ++ isa_proofs ++ "\n\nend\n"
where
flags = programFlags pgm
(names, proofs) = unzip named_proofs
names' = map convert names
all_vars =
(nubOrd . concatMap (toList . proofGoal)) proofs
isa_proofs = foldl (++) mempty
$ zipWith namedIsabelleProof names' proofs
binds = sortBindings binds'
(dtypes, binds')
| flagIsabelleAll flags =
( Map.elems . programDataTypes $ pgm,
programBindings $ pgm )
| otherwise =
dependencies pgm all_vars
builtInType dt = show dt `elem`
["list", "bool", "(,)", "(,,)", "(,,,)"]
isa_binds = foldl (++) mempty
. map toIsabelle
$ binds
isa_dtypes = foldl (++) mempty
. map toIsabelle
. filter (not . builtInType)
$ dtypes
|
Gurmeet-Singh/Zeno
|
src/Zeno/Isabelle.hs
|
Haskell
|
mit
| 1,484
|
-- Simple Haskell program that generates KB clauses for the position functions.
n (x,y) = (x,y-1)
e (x,y) = (x+1,y)
s (x,y) = (x,y+1)
w (x,y) = (x-1,y)
ne (x,y) = (x+1,y-1)
se (x,y) = (x+1,y+1)
nw (x,y) = (x-1,y-1)
sw (x,y) = (x-1,y+1)
xrange = [1..4]
yrange = [1..4]
valid (x,y) = x `elem` xrange && y `elem` yrange
kb1 fname (x,y) (x',y') = "KB: "++ fname ++"(p" ++ show x ++ show y ++")=p" ++ show x' ++ show y' ++ " "
kb f fn = unlines $ [ foldr (++) "" $ map (uncurry (kb1 fn)) $ filter (\(_,p) -> valid p) $ map (\p -> (p, f p)) $ [(x,y) | x <- xrange] | y <- yrange]
main = putStrLn $ unlines $ map (uncurry kb) [(n,"n"),(e,"e"),(s,"s"),(w,"w"),(ne,"ne"),(se,"se"),(sw,"sw"),(nw,"nw")]
|
schwering/limbo
|
examples/tui/battleship-pos.hs
|
Haskell
|
mit
| 700
|
module Main where
import Day7.Main
main = run
|
brsunter/AdventOfCodeHaskell
|
src/Main.hs
|
Haskell
|
mit
| 46
|
main :: IO ()
main = do
p1 <- getLine
p2 <- getLine
let [x1, y1] = map (\x -> read x :: Int) (words p1)
[x2, y2] = map (\x -> read x :: Int) (words p2)
print $ abs (x1 - x2) + abs (y1 - y2) + 1
|
knuu/competitive-programming
|
atcoder/other/idn_qb_1.hs
|
Haskell
|
mit
| 208
|
{-# LANGUAGE LambdaCase #-}
module Main where
import Control.Concurrent hiding (Chan)
import Control.Concurrent.GoChan
import Control.Monad
import Data.IORef
import Test.Hspec
main :: IO ()
main =
hspec $
do describe "with buffer size 0" $
do it "chanMake doesn't blow up" $ do void (chanMake 0 :: IO (Chan Int))
it "send & recv doesn't blow up" $ do sendRecv 0 1 10 sendN drain
it "send & recv/select doesn't blow up" $
do sendRecv 0 1 10 sendN drainSelect
it "send/select & recv doesn't blow up" $
do sendRecv 0 1 10 sendNSelect drain
it "send/select & recv/select doesn't blow up" $
do sendRecv 0 1 10 sendNSelect drainSelect
it "multi-case select doesn't blow up" $ do multiTest 0
describe "with buffer size 1" $
do it "chanMake doesn't blow up" $ void (chanMake 1 :: IO (Chan Int))
it "send & recv doesn't blow up" $ do sendRecv 1 1 10 sendN drain
it "send & recv/select doesn't blow up" $
do sendRecv 1 1 10 sendN drainSelect
it "send/select & recv doesn't blow up" $
do sendRecv 1 1 10 sendNSelect drain
it "send/select & recv/select doesn't blow up" $
do sendRecv 1 1 10 sendNSelect drainSelect
it "multi-case select doesn't blow up" $ do multiTest 1
describe "with buffer size 2" $
do it "chanMake doesn't blow up" $ void (chanMake 2 :: IO (Chan Int))
it "send & recv doesn't blow up" $ do sendRecv 2 1 10 sendN drain
it "send & recv/select doesn't blow up" $
do sendRecv 2 1 10 sendN drainSelect
it "send/select & recv doesn't blow up" $
do sendRecv 2 1 10 sendNSelect drain
it "send/select & recv/select doesn't blow up" $
do sendRecv 2 1 10 sendNSelect drainSelect
it "multi-case select doesn't blow up" $ do multiTest 2
describe "with buffer size 3" $
do it "chanMake doesn't blow up" $ do void (chanMake 3 :: IO (Chan Int))
it "send & recv doesn't blow up" $ do sendRecv 3 1 10 sendN drain
it "send & recv/select doesn't blow up" $
do sendRecv 3 1 10 sendN drainSelect
it "send/select & recv doesn't blow up" $
do sendRecv 3 1 10 sendNSelect drain
it "send/select & recv/select doesn't blow up" $
do sendRecv 3 1 10 sendNSelect drainSelect
it "multi-case select doesn't blow up" $ do multiTest 3
type Sender = Chan Int -> Int -> Int -> IO ()
type Drainer = Chan Int -> (Int -> IO ()) -> IO () -> IO ()
drain :: Drainer
drain ch recvAct closeAct = do
mn <- chanRecv ch
case mn of
Msg n -> do
recvAct n
drain ch recvAct closeAct
_ -> closeAct
drainSelect :: Drainer
drainSelect ch recvAct closeAct = do
chanSelect
[ Recv
ch
(\case
Msg n -> do
recvAct n
drainSelect ch recvAct closeAct
_ -> closeAct)]
Nothing
sendN :: Sender
sendN ch low hi = do
chanSend ch low
when (low < hi) (sendN ch (low + 1) hi)
sendNSelect :: Sender
sendNSelect ch low hi = do
chanSelect [Send ch low (return ())] Nothing
when (low < hi) (sendNSelect ch (low + 1) hi)
sendRecv :: Int -> Int -> Int -> Sender -> Drainer -> Expectation
sendRecv size low hi sender drainer = do
lock <- newEmptyMVar
c <- chanMake size
totalRef <- newIORef 0
forkIO $
do sender c low hi
chanClose c
drainer
c
(\n ->
modifyIORef' totalRef (+ n))
(when (size > 0) (putMVar lock ()))
readIORef totalRef
-- when the channel is un-buffered, draining should act as synchronization;
-- only lock when the buffer size is greater than 0.
when
(size > 0)
(takeMVar lock)
total <- readIORef totalRef
total `shouldBe` sum [low .. hi]
multiTest :: Int -> Expectation
multiTest size = do
lock1 <- newEmptyMVar
lock2 <- newEmptyMVar
c1 <- chanMake size
c2 <- chanMake size
c1sentRef <- newIORef 0
c1recvdRef <- newIORef 0
c2sentRef <- newIORef 0
c2recvdRef <- newIORef 0
forkIO $ ping2 c1sentRef c2sentRef c1 c2 0 (putMVar lock2 ())
pong2 c1recvdRef c2recvdRef c1 c2 0 (putMVar lock1 ())
takeMVar lock1
takeMVar lock2
c1sent <- readIORef c1sentRef
c1recvd <- readIORef c1recvdRef
c2sent <- readIORef c2sentRef
c2recvd <- readIORef c2recvdRef
-- each channel should recv as often as it is sent on.
(c1sent, c2sent) `shouldBe`
(c1recvd, c2recvd)
ping2
:: IORef Int
-> IORef Int
-> Chan Int
-> Chan Int
-> Int
-> IO ()
-> IO ()
ping2 ref1 ref2 c1 c2 n doneAct = do
if (n < 20)
then do
chanSelect
[ Send c1 n (void (modifyIORef' ref1 (+ 1)))
, Send c2 n (void (modifyIORef' ref2 (+ 1)))]
Nothing
ping2 ref1 ref2 c1 c2 (n + 1) doneAct
else doneAct
pong2
:: IORef Int
-> IORef Int
-> Chan Int
-> Chan Int
-> Int
-> IO ()
-> IO ()
pong2 ref1 ref2 c1 c2 n doneAct = do
if (n < 20)
then do
chanSelect
[ Recv
c1
(\case
Msg n -> modifyIORef' ref1 (+ 1))
, Recv
c2
(\case
Msg n -> modifyIORef' ref2 (+ 1))]
Nothing
pong2 ref1 ref2 c1 c2 (n + 1) doneAct
else doneAct
|
cstrahan/gochan
|
fuzz/Main.hs
|
Haskell
|
mit
| 5,923
|
module Problem16 where
import Data.Char (digitToInt)
main = print . sum . map digitToInt . show $ 2^1000
|
DevJac/haskell-project-euler
|
src/Problem16.hs
|
Haskell
|
mit
| 107
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | /Warning/: This is an internal module and subject
-- to change without notice.
module System.ZMQ4.Internal
( Context (..)
, Socket (..)
, SocketRepr (..)
, SocketType (..)
, SocketLike (..)
, Message (..)
, Flag (..)
, Timeout
, Size
, Switch (..)
, EventType (..)
, EventMsg (..)
, SecurityMechanism (..)
, KeyFormat (..)
, messageOf
, messageOfLazy
, messageClose
, messageFree
, messageInit
, messageInitSize
, setIntOpt
, setStrOpt
, getIntOpt
, getStrOpt
, getInt32Option
, setInt32OptFromRestricted
, ctxIntOption
, setCtxIntOption
, getByteStringOpt
, setByteStringOpt
, z85Encode
, z85Decode
, toZMQFlag
, combine
, combineFlags
, mkSocketRepr
, closeSock
, onSocket
, bool2cint
, toSwitch
, fromSwitch
, events2cint
, eventMessage
, toMechanism
, fromMechanism
, getKey
) where
import Control.Applicative
import Control.Monad (foldM_, when, void)
import Control.Monad.IO.Class
import Control.Exception
import Data.IORef (IORef, mkWeakIORef, readIORef, atomicModifyIORef)
import Foreign hiding (throwIfNull, void)
import Foreign.C.String
import Foreign.C.Types (CInt, CSize)
import Data.IORef (newIORef)
import Data.Restricted
import Data.Typeable
import Prelude
import System.Posix.Types (Fd(..))
import System.ZMQ4.Internal.Base
import System.ZMQ4.Internal.Error
import qualified Data.ByteString as SB
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Unsafe as UB
type Timeout = Int64
type Size = Word
-- | Flags to apply on send operations (cf. man zmq_send)
data Flag =
DontWait -- ^ ZMQ_DONTWAIT (Only relevant on Windows.)
| SendMore -- ^ ZMQ_SNDMORE
deriving (Eq, Ord, Show)
-- | Configuration switch
data Switch =
Default -- ^ Use default setting
| On -- ^ Activate setting
| Off -- ^ De-activate setting
deriving (Eq, Ord, Show)
-- | Event types to monitor.
data EventType =
ConnectedEvent
| ConnectDelayedEvent
| ConnectRetriedEvent
| ListeningEvent
| BindFailedEvent
| AcceptedEvent
| AcceptFailedEvent
| ClosedEvent
| CloseFailedEvent
| DisconnectedEvent
| MonitorStoppedEvent
| AllEvents
deriving (Eq, Ord, Show)
-- | Event Message to receive when monitoring socket events.
data EventMsg =
Connected !SB.ByteString !Fd
| ConnectDelayed !SB.ByteString
| ConnectRetried !SB.ByteString !Int
| Listening !SB.ByteString !Fd
| BindFailed !SB.ByteString !Int
| Accepted !SB.ByteString !Fd
| AcceptFailed !SB.ByteString !Int
| Closed !SB.ByteString !Fd
| CloseFailed !SB.ByteString !Int
| Disconnected !SB.ByteString !Fd
| MonitorStopped !SB.ByteString !Int
deriving (Eq, Show)
data SecurityMechanism
= Null
| Plain
| Curve
deriving (Eq, Show)
data KeyFormat a where
BinaryFormat :: KeyFormat Div4
TextFormat :: KeyFormat Div5
deriving instance Eq (KeyFormat a)
deriving instance Show (KeyFormat a)
-- | A 0MQ context representation.
newtype Context = Context { _ctx :: ZMQCtx }
deriving instance Typeable Context
-- | A 0MQ Socket.
newtype Socket a = Socket
{ _socketRepr :: SocketRepr }
data SocketRepr = SocketRepr
{ _socket :: ZMQSocket
, _sockLive :: IORef Bool
}
-- | Socket types.
class SocketType a where
zmqSocketType :: a -> ZMQSocketType
class SocketLike s where
toSocket :: s t -> Socket t
instance SocketLike Socket where
toSocket = id
-- A 0MQ Message representation.
newtype Message = Message { msgPtr :: ZMQMsgPtr }
-- internal helpers:
onSocket :: String -> Socket a -> (ZMQSocket -> IO b) -> IO b
onSocket _func (Socket (SocketRepr sock _state)) act = act sock
{-# INLINE onSocket #-}
mkSocketRepr :: SocketType t => t -> Context -> IO SocketRepr
mkSocketRepr t c = do
let ty = typeVal (zmqSocketType t)
s <- throwIfNull "mkSocketRepr" (c_zmq_socket (_ctx c) ty)
ref <- newIORef True
addFinalizer ref $ do
alive <- readIORef ref
when alive $ c_zmq_close s >> return ()
return (SocketRepr s ref)
where
addFinalizer r f = mkWeakIORef r f >> return ()
closeSock :: SocketRepr -> IO ()
closeSock (SocketRepr s status) = do
alive <- atomicModifyIORef status (\b -> (False, b))
when alive $ throwIfMinus1_ "close" . c_zmq_close $ s
messageOf :: SB.ByteString -> IO Message
messageOf b = UB.unsafeUseAsCStringLen b $ \(cstr, len) -> do
msg <- messageInitSize (fromIntegral len)
data_ptr <- c_zmq_msg_data (msgPtr msg)
copyBytes data_ptr cstr len
return msg
messageOfLazy :: LB.ByteString -> IO Message
messageOfLazy lbs = do
msg <- messageInitSize (fromIntegral len)
data_ptr <- c_zmq_msg_data (msgPtr msg)
let fn offset bs = UB.unsafeUseAsCStringLen bs $ \(cstr, str_len) -> do
copyBytes (data_ptr `plusPtr` offset) cstr str_len
return (offset + str_len)
foldM_ fn 0 (LB.toChunks lbs)
return msg
where
len = LB.length lbs
messageClose :: Message -> IO ()
messageClose (Message ptr) = do
throwIfMinus1_ "messageClose" $ c_zmq_msg_close ptr
free ptr
messageFree :: Message -> IO ()
messageFree (Message ptr) = free ptr
messageInit :: IO Message
messageInit = do
ptr <- new (ZMQMsg nullPtr)
throwIfMinus1_ "messageInit" $ c_zmq_msg_init ptr
return (Message ptr)
messageInitSize :: Size -> IO Message
messageInitSize s = do
ptr <- new (ZMQMsg nullPtr)
throwIfMinus1_ "messageInitSize" $
c_zmq_msg_init_size ptr (fromIntegral s)
return (Message ptr)
setIntOpt :: (Storable b, Integral b) => Socket a -> ZMQOption -> b -> IO ()
setIntOpt sock (ZMQOption o) i = onSocket "setIntOpt" sock $ \s ->
throwIfMinus1Retry_ "setIntOpt" $ with i $ \ptr ->
c_zmq_setsockopt s (fromIntegral o)
(castPtr ptr)
(fromIntegral . sizeOf $ i)
setCStrOpt :: ZMQSocket -> ZMQOption -> CStringLen -> IO CInt
setCStrOpt s (ZMQOption o) (cstr, len) =
c_zmq_setsockopt s (fromIntegral o) (castPtr cstr) (fromIntegral len)
setByteStringOpt :: Socket a -> ZMQOption -> SB.ByteString -> IO ()
setByteStringOpt sock opt str = onSocket "setByteStringOpt" sock $ \s ->
throwIfMinus1Retry_ "setByteStringOpt" . UB.unsafeUseAsCStringLen str $ setCStrOpt s opt
setStrOpt :: Socket a -> ZMQOption -> String -> IO ()
setStrOpt sock opt str = onSocket "setStrOpt" sock $ \s ->
throwIfMinus1Retry_ "setStrOpt" . withCStringLen str $ setCStrOpt s opt
getIntOpt :: (Storable b, Integral b) => Socket a -> ZMQOption -> b -> IO b
getIntOpt sock (ZMQOption o) i = onSocket "getIntOpt" sock $ \s -> do
bracket (new i) free $ \iptr ->
bracket (new (fromIntegral . sizeOf $ i :: CSize)) free $ \jptr -> do
throwIfMinus1Retry_ "getIntOpt" $
c_zmq_getsockopt s (fromIntegral o) (castPtr iptr) jptr
peek iptr
getCStrOpt :: (CStringLen -> IO s) -> Socket a -> ZMQOption -> IO s
getCStrOpt peekA sock (ZMQOption o) = onSocket "getCStrOpt" sock $ \s ->
bracket (mallocBytes 255) free $ \bPtr ->
bracket (new (255 :: CSize)) free $ \sPtr -> do
throwIfMinus1Retry_ "getCStrOpt" $
c_zmq_getsockopt s (fromIntegral o) (castPtr bPtr) sPtr
peek sPtr >>= \len -> peekA (bPtr, fromIntegral len)
getStrOpt :: Socket a -> ZMQOption -> IO String
getStrOpt = getCStrOpt (peekCString . fst)
getByteStringOpt :: Socket a -> ZMQOption -> IO SB.ByteString
getByteStringOpt = getCStrOpt SB.packCStringLen
getInt32Option :: ZMQOption -> Socket a -> IO Int
getInt32Option o s = fromIntegral <$> getIntOpt s o (0 :: CInt)
setInt32OptFromRestricted :: Integral i => ZMQOption -> Restricted r i -> Socket b -> IO ()
setInt32OptFromRestricted o x s = setIntOpt s o ((fromIntegral . rvalue $ x) :: CInt)
ctxIntOption :: Integral i => String -> ZMQCtxOption -> Context -> IO i
ctxIntOption name opt ctx = fromIntegral <$>
(throwIfMinus1 name $ c_zmq_ctx_get (_ctx ctx) (ctxOptVal opt))
setCtxIntOption :: Integral i => String -> ZMQCtxOption -> i -> Context -> IO ()
setCtxIntOption name opt val ctx = throwIfMinus1_ name $
c_zmq_ctx_set (_ctx ctx) (ctxOptVal opt) (fromIntegral val)
z85Encode :: (MonadIO m) => Restricted Div4 SB.ByteString -> m SB.ByteString
z85Encode b = liftIO $ UB.unsafeUseAsCStringLen (rvalue b) $ \(c, s) ->
allocaBytes ((s * 5) `div` 4 + 1) $ \w -> do
void . throwIfNull "z85Encode" $
c_zmq_z85_encode w (castPtr c) (fromIntegral s)
SB.packCString w
z85Decode :: (MonadIO m) => Restricted Div5 SB.ByteString -> m SB.ByteString
z85Decode b = liftIO $ SB.useAsCStringLen (rvalue b) $ \(c, s) -> do
let size = (s * 4) `div` 5
allocaBytes size $ \w -> do
void . throwIfNull "z85Decode" $
c_zmq_z85_decode (castPtr w) (castPtr c)
SB.packCStringLen (w, size)
getKey :: KeyFormat f -> Socket a -> ZMQOption -> IO SB.ByteString
getKey kf sock (ZMQOption o) = onSocket "getKey" sock $ \s -> do
let len = case kf of
BinaryFormat -> 32
TextFormat -> 41
with len $ \lenptr -> allocaBytes len $ \w -> do
throwIfMinus1Retry_ "getKey" $
c_zmq_getsockopt s (fromIntegral o) (castPtr w) (castPtr lenptr)
SB.packCString w
toZMQFlag :: Flag -> ZMQFlag
toZMQFlag DontWait = dontWait
toZMQFlag SendMore = sndMore
combineFlags :: [Flag] -> CInt
combineFlags = fromIntegral . combine . map (flagVal . toZMQFlag)
combine :: (Integral i, Bits i) => [i] -> i
combine = foldr (.|.) 0
bool2cint :: Bool -> CInt
bool2cint True = 1
bool2cint False = 0
toSwitch :: (Show a, Integral a) => String -> a -> Switch
toSwitch _ (-1) = Default
toSwitch _ 0 = Off
toSwitch _ 1 = On
toSwitch m n = error $ m ++ ": " ++ show n
fromSwitch :: Integral a => Switch -> a
fromSwitch Default = -1
fromSwitch Off = 0
fromSwitch On = 1
toZMQEventType :: EventType -> ZMQEventType
toZMQEventType AllEvents = allEvents
toZMQEventType ConnectedEvent = connected
toZMQEventType ConnectDelayedEvent = connectDelayed
toZMQEventType ConnectRetriedEvent = connectRetried
toZMQEventType ListeningEvent = listening
toZMQEventType BindFailedEvent = bindFailed
toZMQEventType AcceptedEvent = accepted
toZMQEventType AcceptFailedEvent = acceptFailed
toZMQEventType ClosedEvent = closed
toZMQEventType CloseFailedEvent = closeFailed
toZMQEventType DisconnectedEvent = disconnected
toZMQEventType MonitorStoppedEvent = monitorStopped
toMechanism :: SecurityMechanism -> ZMQSecMechanism
toMechanism Null = secNull
toMechanism Plain = secPlain
toMechanism Curve = secCurve
fromMechanism :: String -> Int -> SecurityMechanism
fromMechanism s m
| m == secMechanism secNull = Null
| m == secMechanism secPlain = Plain
| m == secMechanism secCurve = Curve
| otherwise = error $ s ++ ": " ++ show m
events2cint :: [EventType] -> CInt
events2cint = fromIntegral . foldr ((.|.) . eventTypeVal . toZMQEventType) 0
eventMessage :: SB.ByteString -> ZMQEvent -> EventMsg
eventMessage str (ZMQEvent e v)
| e == connected = Connected str (Fd . fromIntegral $ v)
| e == connectDelayed = ConnectDelayed str
| e == connectRetried = ConnectRetried str (fromIntegral $ v)
| e == listening = Listening str (Fd . fromIntegral $ v)
| e == bindFailed = BindFailed str (fromIntegral $ v)
| e == accepted = Accepted str (Fd . fromIntegral $ v)
| e == acceptFailed = AcceptFailed str (fromIntegral $ v)
| e == closed = Closed str (Fd . fromIntegral $ v)
| e == closeFailed = CloseFailed str (fromIntegral $ v)
| e == disconnected = Disconnected str (fromIntegral $ v)
| e == monitorStopped = MonitorStopped str (fromIntegral $ v)
| otherwise = error $ "unknown event type: " ++ show e
|
twittner/zeromq-haskell
|
src/System/ZMQ4/Internal.hs
|
Haskell
|
mit
| 12,198
|
module Data.Hadoop.SequenceFile.Types
( Header(..)
, MD5(..)
, RecordBlock(..)
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Text (Text)
import Text.Printf (printf)
import Data.Hadoop.Writable
------------------------------------------------------------------------
-- | The header of a sequence file. Contains the names of the Java classes
-- used to encode the file and potentially some metadata.
data Header = Header
{ hdKeyType :: !Text -- ^ Package qualified class name of the key type.
, hdValueType :: !Text -- ^ Package qualified class name of the value type.
, hdCompressionType :: !Text -- ^ Package qualified class name of the compression codec.
, hdMetadata :: ![(Text, Text)] -- ^ File metadata.
, hdSync :: !MD5 -- ^ The synchronization pattern used to check for
-- corruption throughout the file.
} deriving (Eq, Ord, Show)
-- | An MD5 hash. Stored between each record block in a sequence file to check
-- for corruption.
newtype MD5 = MD5 { unMD5 :: ByteString }
deriving (Eq, Ord)
-- | A block of key\/value pairs. The key at index /i/ always relates to the
-- value at index /i/. Both vectors will always be the same size.
data RecordBlock k v = RecordBlock
{ rbCount :: Int -- ^ The number of records.
, rbKeys :: Collection k -- ^ The keys.
, rbValues :: Collection v -- ^ The values.
}
------------------------------------------------------------------------
instance Show MD5 where
show (MD5 bs) = printf "MD5 %0x%0x%0x%0x%0x%0x"
(bs `B.index` 0)
(bs `B.index` 1)
(bs `B.index` 2)
(bs `B.index` 3)
(bs `B.index` 4)
(bs `B.index` 5)
|
jystic/hadoop-formats
|
src/Data/Hadoop/SequenceFile/Types.hs
|
Haskell
|
apache-2.0
| 1,943
|
{-# LANGUAGE OverloadedStrings #-}
module Web.Actions.User where
import Web.Utils
import Model.ResponseTypes
import Model.CoreTypes
import System.Random
import Database.Persist
import Database.Persist.Sql
import Data.Word8
import Control.Monad
import Data.Time
import Control.Monad.Trans
import qualified Data.ByteString as BS
import qualified Crypto.Hash.SHA512 as SHA
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
randomBytes:: Int -> StdGen -> [Word8]
randomBytes 0 _ = []
randomBytes ct g =
let (value, nextG) = next g
in fromIntegral value:randomBytes (ct - 1) nextG
randomBS :: Int -> StdGen -> BS.ByteString
randomBS len g =
BS.pack $ randomBytes len g
hashPassword :: T.Text -> BS.ByteString -> BS.ByteString
hashPassword password salt =
SHA.finalize $ SHA.updates SHA.init [salt, T.encodeUtf8 $ password]
createSession :: UserId -> SqlPersistM SessionId
createSession userId =
do now <- liftIO getCurrentTime
insert (Session (addUTCTime (5 * 3600) now) userId)
killSessions :: UserId -> SqlPersistM ()
killSessions userId =
deleteWhere [ SessionUserId ==. userId ]
loginUser :: T.Text -> T.Text -> SqlPersistM (Maybe UserId)
loginUser username password =
do mUserU <- getBy (UniqueUsername username)
mUserE <- getBy (UniqueEmail username)
case mUserU `mplus` mUserE of
Just userEntity ->
let user = entityVal userEntity
in if userPassword user == (makeHex $ hashPassword password (decodeHex $ userSalt user))
then return $ Just (entityKey userEntity)
else return Nothing
Nothing ->
return Nothing
loadUser :: SessionId -> SqlPersistM (Maybe (UserId, User))
loadUser sessId =
do mSess <- get sessId
now <- liftIO getCurrentTime
case mSess of
Just sess | sessionValidUntil sess > now ->
do mUser <- get (sessionUserId sess)
return $ fmap (\user -> (sessionUserId sess, user)) mUser
_ ->
return Nothing
registerUser :: T.Text -> T.Text -> T.Text -> SqlPersistM CommonResponse
registerUser username email password =
do mUserU <- getBy (UniqueUsername username)
mUserE <- getBy (UniqueEmail email)
case (mUserU, mUserE) of
(Just _, _) ->
return (CommonError "Username already taken!")
(_, Just _) ->
return (CommonError "Email already registered!")
(Nothing, Nothing) ->
do g <- liftIO $ getStdGen
let salt = randomBS 512 g
hash = hashPassword password salt
_ <- insert (User username (makeHex hash) (makeHex salt) email False False)
return (CommonSuccess "Signup complete. You may now login.")
|
agrafix/funblog
|
src/Web/Actions/User.hs
|
Haskell
|
apache-2.0
| 2,807
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PackageImports #-}
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
--------------------------------------------------------------------------------
-- |The standard set of functions and variables available to all programs.
--
-- You may use any of these functions and variables without defining them.
module Prelude (
module Internal.Exports
-- * Numbers
, module Internal.Num
-- * Text
, module Internal.Text
-- * General purpose functions
, module Internal.Prelude
, IO
) where
import Internal.Exports
import "base" Prelude (IO)
import Internal.Num
import Internal.Prelude hiding (randomsFrom)
import Internal.Text hiding (fromCWText, toCWText)
import Internal.CodeWorld
import Internal.Color
import Internal.Event
import Internal.Picture
|
alphalambda/codeworld
|
codeworld-base/src/Prelude.hs
|
Haskell
|
apache-2.0
| 1,393
|
-- 1533776805
import Data.List.Ordered(isect, member)
import Euler(triangular, pentagonal, hexagonal)
nn = 143
-- walk the pentagonal and triangular lists
-- check whether the hexagonal value is in them
-- also provide the remainder of each list for the next check
walkSet n ps ts = (member n $ isect ps2 ts2, ps3, ts3)
where (ps2,ps3) = span (n>=) ps
(ts2,ts3) = span (n>=) ts
findTriple0 [] _ _ = error "findTriple0: empty"
findTriple0 (x:xs) ps ts = if f then x else nf
where (f,ps2,ts2) = walkSet x ps ts
nf = findTriple0 xs ps2 ts2
findTriple n = findTriple0 (drop n hexagonal) pentagonal triangular
main = putStrLn $ show $ findTriple nn
|
higgsd/euler
|
hs/45.hs
|
Haskell
|
bsd-2-clause
| 676
|
module Chart.Distribution (renderContinuousDistribution,
renderDiscreteDistribution,
continuousDistributionPlot,
discreteDistributionPlot) where
import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Backend.Cairo
import Data.Colour
import Data.Default.Class
import Control.Lens ((&), (.~))
import Data.List (sort, span)
import qualified Data.Map as M
renderContinuousDistribution :: FilePath -> [Double] -> IO (PickFn ())
renderContinuousDistribution file xs = renderableToFile def renderable file
where renderable = toRenderable $ def
& layout_plots .~ [plotBars $ continuousDistributionPlot xs]
& layout_left_axis_visibility .~ nolabel
nolabel = def & axis_show_labels .~ False
& axis_show_ticks .~ False
renderDiscreteDistribution :: (Ord k, Show k) => FilePath -> [k] -> IO (PickFn ())
renderDiscreteDistribution file xs = renderableToFile def renderable file
where renderable = toRenderable $ def
& layout_plots .~ [plotBars $ discreteDistributionPlot $ map snd counts]
& layout_left_axis_visibility .~ nolabel
& layout_x_axis . laxis_generate .~ (autoIndexAxis $ map (show . fst) counts)
nolabel = def & axis_show_labels .~ False
& axis_show_ticks .~ False
counts = M.toList $ M.fromListWith (+) $ map (\x->(x,1)) xs
continuousDistributionPlot :: [Double] -> PlotBars Double Double
continuousDistributionPlot xs = def & plot_bars_alignment .~ BarsRight
& plot_bars_spacing .~ BarsFixGap 0 0
& plot_bars_values .~ (zip binmaxes $ map (:[]) bindensity)
where sorted = sort xs
numberofbins = ceiling $ 2 * (fromIntegral $ length xs) ** (0.333) -- rice rule
min = minimum sorted
max = maximum sorted
range = max - min
binwidths = range / (fromIntegral numberofbins)
binmaxes = map (\x-> min + (fromIntegral x)*binwidths) [1..numberofbins]
binranges = zip (min:binmaxes) (binmaxes++[max])
bincounts = map fromIntegral $ allBinCounts binranges sorted
bindensity = map (\(x,y) -> y/x) $ zip (repeat binwidths) bincounts
singleBinCount :: (Double,Double) -> [Double] -> (Int,[Double])
singleBinCount (min,max) xs = (length included, rest)
where (included, rest) = span (<max) xs
allBinCounts :: [(Double,Double)] -> [Double] -> [Int]
allBinCounts [] _ = []
allBinCounts (b:bs) xs = c : (allBinCounts bs rest)
where (c,rest) = singleBinCount b xs
discreteDistributionPlot :: [Double] -> PlotBars PlotIndex Double
discreteDistributionPlot vals = def
& plot_bars_values .~ (addIndexes $ map (:[]) vals)
& plot_bars_spacing .~ BarsFixGap 30 5
|
richardfergie/chart-distribution
|
Chart/Distribution.hs
|
Haskell
|
bsd-3-clause
| 2,929
|
module Unregister where
import InstalledPackages
import Distribution.Text
import Distribution.Package (PackageId)
import Distribution.Simple.PackageIndex
import Distribution.InstalledPackageInfo
import Distribution.Simple.GHC
import Distribution.Simple.Program.HcPkg
import Distribution.Simple.Compiler (PackageDB(..))
import Distribution.Verbosity (normal)
import Config
import GraphExtraction
-- | Unregister the given list of packages and return
-- the full list of unregistered packages.
main :: Config -> [String] -> IO ()
main config pkgStrs =
do (_,pgmConfig) <- getProgramConfiguration config
let hcPkg = hcPkgInfo pgmConfig
pkgIds <- traverse parsePkg pkgStrs
pkgIndex <- getUserPackages config
let plan = computePlan pkgIds pkgIndex
mapM_ (unregister hcPkg normal UserPackageDB) plan
parsePkg :: String -> IO PackageId
parsePkg str =
case simpleParse str of
Nothing -> fail ("Unable to parse package: " ++ str)
Just p -> return p
computePlan ::
[PackageId] ->
PackageIndex InstalledPackageInfo ->
[PackageId]
computePlan rootIds pkgIndex = sourcePackageId . lookupVertex <$> plan
where
rootPkgs = lookupSourcePackageId pkgIndex =<< rootIds
rootVertexes = unitIdToVertex' . installedUnitId <$> rootPkgs
plan = extract pkgGraph rootVertexes
(pkgGraph, lookupVertex, unitIdToVertex) = dependencyGraph pkgIndex
unitIdToVertex' i =
case unitIdToVertex i of
Nothing -> error ("computePlan: " ++ show i)
Just v -> v
|
glguy/GhcPkgUtils
|
Unregister.hs
|
Haskell
|
bsd-3-clause
| 1,525
|
{-|
Module : Idris.Elab.Term
Description : Code to elaborate terms.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE LambdaCase, PatternGuards, ViewPatterns #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Elab.Term where
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.DSL
import Idris.Delaborate
import Idris.Error
import Idris.ProofSearch
import Idris.Output (pshow)
import Idris.Core.CaseTree (SC, SC'(STerm), findCalls, findUsedArgs)
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Core.Unify
import Idris.Core.ProofTerm (getProofTerm)
import Idris.Core.Typecheck (check, recheck, converts, isType)
import Idris.Core.WHNF (whnf)
import Idris.Coverage (buildSCG, checkDeclTotality, checkPositive, genClauses, recoverableCoverage, validCoverageCase)
import Idris.ErrReverse (errReverse)
import Idris.Elab.Quasiquote (extractUnquotes)
import Idris.Elab.Utils
import Idris.Elab.Rewrite
import Idris.Reflection
import qualified Util.Pretty as U
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.State.Strict
import Data.Foldable (for_)
import Data.List
import qualified Data.Map as M
import Data.Maybe (mapMaybe, fromMaybe, catMaybes, maybeToList)
import qualified Data.Set as S
import qualified Data.Text as T
import Debug.Trace
data ElabMode = ETyDecl | ETransLHS | ELHS | ERHS
deriving Eq
data ElabResult = ElabResult {
-- | The term resulting from elaboration
resultTerm :: Term
-- | Information about new metavariables
, resultMetavars :: [(Name, (Int, Maybe Name, Type, [Name]))]
-- | Deferred declarations as the meaning of case blocks
, resultCaseDecls :: [PDecl]
-- | The potentially extended context from new definitions
, resultContext :: Context
-- | Meta-info about the new type declarations
, resultTyDecls :: [RDeclInstructions]
-- | Saved highlights from elaboration
, resultHighlighting :: [(FC, OutputAnnotation)]
-- | The new global name counter
, resultName :: Int
}
-- | Using the elaborator, convert a term in raw syntax to a fully
-- elaborated, typechecked term.
--
-- If building a pattern match, we convert undeclared variables from
-- holes to pattern bindings.
--
-- Also find deferred names in the term and their types
build :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ElabResult
build ist info emode opts fn tm
= do elab ist info emode opts fn tm
let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
hs <- get_holes
ivs <- get_instances
ptm <- get_term
-- Resolve remaining type classes. Two passes - first to get the
-- default Num instances, second to clean up the rest
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
try (resolveTC' True True 10 g fn ist)
(movelast n)) ivs
ivs <- get_instances
hs <- get_holes
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
ptm <- get_term
resolveTC' True True 10 g fn ist) ivs
when (not pattern) $ solveAutos ist fn False
tm <- get_term
ctxt <- get_context
probs <- get_probs
u <- getUnifyLog
hs <- get_holes
when (not pattern) $
traceWhen u ("Remaining holes:\n" ++ show hs ++ "\n" ++
"Remaining problems:\n" ++ qshow probs) $
do unify_all; matchProblems True; unifyProblems
when (not pattern) $ solveAutos ist fn True
probs <- get_probs
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> traceWhen u ("Final problems:\n" ++ qshow probs ++ "\nin\n" ++ show tm) $
if inf then return ()
else lift (Error e)
when tydecl (do mkPat
update_term liftPats
update_term orderPats)
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if log /= ""
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS
tydecl = emode == ETyDecl
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
-- | Build a term autogenerated as a typeclass method definition.
--
-- (Separate, so we don't go overboard resolving things that we don't
-- know about yet on the LHS of a pattern def)
buildTC :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name ->
[Name] -> -- Cached names in the PTerm, before adding PAlternatives
PTerm ->
ElabD ElabResult
buildTC ist info emode opts fn ns tm
= do let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
-- set name supply to begin after highest index in tm
initNextNameFrom ns
elab ist info emode opts fn tm
probs <- get_probs
tm <- get_term
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> if inf then return ()
else lift (Error e)
dots <- get_dotterm
-- 'dots' are the PHidden things which have not been solved by
-- unification
when (not (null dots)) $
lift (Error (CantMatch (getInferTerm tm)))
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if (log /= "")
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS
-- | return whether arguments of the given constructor name can be
-- matched on. If they're polymorphic, no, unless the type has beed
-- made concrete by the time we get around to elaborating the
-- argument.
getUnmatchable :: Context -> Name -> [Bool]
getUnmatchable ctxt n | isDConName n ctxt && n /= inferCon
= case lookupTyExact n ctxt of
Nothing -> []
Just ty -> checkArgs [] [] ty
where checkArgs :: [Name] -> [[Name]] -> Type -> [Bool]
checkArgs env ns (Bind n (Pi _ t _) sc)
= let env' = case t of
TType _ -> n : env
_ -> env in
checkArgs env' (intersect env (refsIn t) : ns)
(instantiate (P Bound n t) sc)
checkArgs env ns t
= map (not . null) (reverse ns)
getUnmatchable ctxt n = []
data ElabCtxt = ElabCtxt { e_inarg :: Bool,
e_isfn :: Bool, -- ^ Function part of application
e_guarded :: Bool,
e_intype :: Bool,
e_qq :: Bool,
e_nomatching :: Bool -- ^ can't pattern match
}
initElabCtxt = ElabCtxt False False False False False False
goal_polymorphic :: ElabD Bool
goal_polymorphic =
do ty <- goal
case ty of
P _ n _ -> do env <- get_env
case lookup n env of
Nothing -> return False
_ -> return True
_ -> return False
-- | Returns the set of declarations we need to add to complete the
-- definition (most likely case blocks to elaborate) as well as
-- declarations resulting from user tactic scripts (%runElab)
elab :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ()
elab ist info emode opts fn tm
= do let loglvl = opt_logLevel (idris_options ist)
when (loglvl > 5) $ unifyLog True
compute -- expand type synonyms, etc
let fc = maybe "(unknown)"
elabE initElabCtxt (elabFC info) tm -- (in argument, guarded, in type, in qquote)
est <- getAux
sequence_ (get_delayed_elab est)
end_unify
ptm <- get_term
when (pattern || intransform) -- convert remaining holes to pattern vars
(do unify_all
matchProblems False -- only the ones we matched earlier
unifyProblems
mkPat
update_term liftPats)
where
pattern = emode == ELHS
intransform = emode == ETransLHS
bindfree = emode == ETyDecl || emode == ELHS || emode == ETransLHS
autoimpls = opt_autoimpls (idris_options ist)
get_delayed_elab est =
let ds = delayed_elab est in
map snd $ sortBy (\(p1, _) (p2, _) -> compare p1 p2) ds
tcgen = Dictionary `elem` opts
reflection = Reflection `elem` opts
isph arg = case getTm arg of
Placeholder -> (True, priority arg)
tm -> (False, priority arg)
toElab ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (priority arg, elabE ina (elabFC info) v)
toElab' ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (elabE ina (elabFC info) v)
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
elabRec = elabE initElabCtxt Nothing
-- | elabE elaborates an expression, possibly wrapping implicit coercions
-- and forces/delays. If you make a recursive call in elab', it is
-- normally correct to call elabE - the ones that don't are desugarings
-- typically
elabE :: ElabCtxt -> Maybe FC -> PTerm -> ElabD ()
elabE ina fc' t =
do solved <- get_recents
as <- get_autos
hs <- get_holes
-- If any of the autos use variables which have recently been solved,
-- have another go at solving them now.
mapM_ (\(a, (failc, ns)) ->
if any (\n -> n `elem` solved) ns && head hs /= a
then solveAuto ist fn False (a, failc)
else return ()) as
apt <- expandToArity t
itm <- if not pattern then insertImpLam ina apt else return apt
ct <- insertCoerce ina itm
t' <- insertLazy ct
g <- goal
tm <- get_term
ps <- get_probs
hs <- get_holes
--trace ("Elaborating " ++ show t' ++ " in " ++ show g
-- ++ "\n" ++ show tm
-- ++ "\nholes " ++ show hs
-- ++ "\nproblems " ++ show ps
-- ++ "\n-----------\n") $
--trace ("ELAB " ++ show t') $
env <- get_env
let fc = fileFC "Force"
handleError (forceErr t' env)
(elab' ina fc' t')
(elab' ina fc' (PApp fc (PRef fc [] (sUN "Force"))
[pimp (sUN "t") Placeholder True,
pimp (sUN "a") Placeholder True,
pexp ct]))
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t'),
ht == txt "Delayed" = notDelay orig
forceErr orig env (InfiniteUnify _ t _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (Elaborating _ _ _ t) = forceErr orig env t
forceErr orig env (ElaboratingArg _ _ _ t) = forceErr orig env t
forceErr orig env (At _ t) = forceErr orig env t
forceErr orig env t = False
notDelay t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = False
notDelay _ = True
local f = do e <- get_env
return (f `elem` map fst e)
-- | Is a constant a type?
constType :: Const -> Bool
constType (AType _) = True
constType StrType = True
constType VoidType = True
constType _ = False
-- "guarded" means immediately under a constructor, to help find patvars
elab' :: ElabCtxt -- ^ (in an argument, guarded, in a type, in a quasiquote)
-> Maybe FC -- ^ The closest FC in the syntax tree, if applicable
-> PTerm -- ^ The term to elaborate
-> ElabD ()
elab' ina fc (PNoImplicits t) = elab' ina fc t -- skip elabE step
elab' ina fc (PType fc') =
do apply RType []
solve
highlightSource fc' (AnnType "Type" "The type of types")
elab' ina fc (PUniverse u) = do apply (RUType u) []; solve
-- elab' (_,_,inty) (PConstant c)
-- | constType c && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ina fc tm@(PConstant fc' c)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTypeConst c
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = do apply (RConstant c) []
solve
highlightSource fc' (AnnConst c)
elab' ina fc (PQuote r) = do fill r; solve
elab' ina _ (PTrue fc _) =
do hnf_compute
g <- goal
case g of
TType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
UType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
_ -> elab' ina (Just fc) (PRef fc [] unitCon)
elab' ina fc (PResolveTC (FC "HACK" _ _)) -- for chasing parent classes
= do g <- goal; resolveTC False False 5 g fn elabRec ist
elab' ina fc (PResolveTC fc')
= do c <- getNameFrom (sMN 0 "__class")
instanceArg c
-- Elaborate the equality type first homogeneously, then
-- heterogeneously as a fallback
elab' ina _ (PApp fc (PRef _ _ n) args)
| n == eqTy, [Placeholder, Placeholder, l, r] <- map getTm args
= try (do tyn <- getNameFrom (sMN 0 "aqty")
claim tyn RType
movelast tyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] tyn) True,
pimp (sUN "B") (PRef NoFC [] tyn) False,
pexp l, pexp r]))
(do atyn <- getNameFrom (sMN 0 "aqty")
btyn <- getNameFrom (sMN 0 "bqty")
claim atyn RType
movelast atyn
claim btyn RType
movelast btyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] atyn) True,
pimp (sUN "B") (PRef NoFC [] btyn) False,
pexp l, pexp r]))
elab' ina _ (PPair fc hls _ l r)
= do hnf_compute
g <- goal
let (tc, _) = unApply g
case g of
TType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairTy)
[pexp l,pexp r])
UType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls upairTy)
[pexp l,pexp r])
_ -> case tc of
P _ n _ | n == upairTy
-> elab' ina (Just fc) (PApp fc (PRef fc hls upairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
_ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l@(PRef nfc hl n) t r)
= case p of
IsType -> asType
IsTerm -> asValue
TypeOrTerm ->
do hnf_compute
g <- goal
case g of
TType _ -> asType
_ -> asValue
where asType = elab' ina (Just fc) (PApp fc (PRef NoFC hls sigmaTy)
[pexp t,
pexp (PLam fc n nfc Placeholder r)])
asValue = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l t r) = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina fc (PAlternative ms (ExactlyOne delayok) as)
= do as_pruned <- doPrune as
-- Finish the mkUniqueNames job with the pruned set, rather than
-- the full set.
uns <- get_usedns
let as' = map (mkUniqueNames (uns ++ map snd ms) ms) as_pruned
(h : hs) <- get_holes
ty <- goal
case as' of
[] -> do hds <- mapM showHd as
lift $ tfail $ NoValidAlts hds
[x] -> elab' ina fc x
-- If there's options, try now, and if that fails, postpone
-- to later.
_ -> handleError isAmbiguous
(do hds <- mapM showHd as'
tryAll (zip (map (elab' ina fc) as')
hds))
(do movelast h
delayElab 5 $ do
hs <- get_holes
when (h `elem` hs) $ do
focus h
as'' <- doPrune as'
case as'' of
[x] -> elab' ina fc x
_ -> do hds <- mapM showHd as''
tryAll' False (zip (map (elab' ina fc) as'')
hds))
where showHd (PApp _ (PRef _ _ (UN l)) [_, _, arg])
| l == txt "Delay" = showHd (getTm arg)
showHd (PApp _ (PRef _ _ n) _) = return n
showHd (PRef _ _ n) = return n
showHd (PApp _ h _) = showHd h
showHd x = getNameFrom (sMN 0 "_") -- We probably should do something better than this here
doPrune as =
do compute
ty <- goal
let (tc, _) = unApply (unDelay ty)
env <- get_env
return $ pruneByType env tc (unDelay ty) ist as
unDelay t | (P _ (UN l) _, [_, arg]) <- unApply t,
l == txt "Delayed" = unDelay arg
| otherwise = t
isAmbiguous (CantResolveAlts _) = delayok
isAmbiguous (Elaborating _ _ _ e) = isAmbiguous e
isAmbiguous (ElaboratingArg _ _ _ e) = isAmbiguous e
isAmbiguous (At _ e) = isAmbiguous e
isAmbiguous _ = False
elab' ina fc (PAlternative ms FirstSuccess as_in)
= do -- finish the mkUniqueNames job
uns <- get_usedns
let as = map (mkUniqueNames (uns ++ map snd ms) ms) as_in
trySeq as
where -- if none work, take the error from the first
trySeq (x : xs) = let e1 = elab' ina fc x in
try' e1 (trySeq' e1 xs) True
trySeq [] = fail "Nothing to try in sequence"
trySeq' deferr [] = do deferr; unifyProblems
trySeq' deferr (x : xs)
= try' (tryCatch (do elab' ina fc x
solveAutos ist fn False
unifyProblems)
(\_ -> trySeq' deferr []))
(trySeq' deferr xs) True
elab' ina fc (PAlternative ms TryImplicit (orig : alts)) = do
env <- get_env
compute
ty <- goal
let doelab = elab' ina fc orig
tryCatch doelab
(\err ->
if recoverableErr err
then -- trace ("NEED IMPLICIT! " ++ show orig ++ "\n" ++
-- show alts ++ "\n" ++
-- showQuick err) $
-- Prune the coercions so that only the ones
-- with the right type to fix the error will be tried!
case pruneAlts err alts env of
[] -> lift $ tfail err
alts' -> do
try' (elab' ina fc (PAlternative ms (ExactlyOne False) alts'))
(lift $ tfail err) -- take error from original if all fail
True
else lift $ tfail err)
where
recoverableErr (CantUnify _ _ _ _ _ _) = True
recoverableErr (TooManyArguments _) = False
recoverableErr (CantSolveGoal _ _) = False
recoverableErr (CantResolveAlts _) = False
recoverableErr (NoValidAlts _) = True
recoverableErr (ProofSearchFail (Msg _)) = True
recoverableErr (ProofSearchFail _) = False
recoverableErr (ElaboratingArg _ _ _ e) = recoverableErr e
recoverableErr (At _ e) = recoverableErr e
recoverableErr (ElabScriptDebug _ _ _) = False
recoverableErr _ = True
pruneAlts (CantUnify _ (inc, _) (outc, _) _ _ _) alts env
= case unApply (normalise (tt_ctxt ist) env inc) of
(P (TCon _ _) n _, _) -> filter (hasArg n env) alts
(Constant _, _) -> alts
_ -> filter isLend alts -- special case hack for 'Borrowed'
pruneAlts (ElaboratingArg _ _ _ e) alts env = pruneAlts e alts env
pruneAlts (At _ e) alts env = pruneAlts e alts env
pruneAlts (NoValidAlts as) alts env = alts
pruneAlts err alts _ = filter isLend alts
hasArg n env ap | isLend ap = True -- special case hack for 'Borrowed'
hasArg n env (PApp _ (PRef _ _ a) _)
= case lookupTyExact a (tt_ctxt ist) of
Just ty -> let args = map snd (getArgTys (normalise (tt_ctxt ist) env ty)) in
any (fnIs n) args
Nothing -> False
hasArg n env (PAlternative _ _ as) = any (hasArg n env) as
hasArg n _ tm = False
isLend (PApp _ (PRef _ _ l) _) = l == sNS (sUN "lend") ["Ownership"]
isLend _ = False
fnIs n ty = case unApply ty of
(P _ n' _, _) -> n == n'
_ -> False
showQuick (CantUnify _ (l, _) (r, _) _ _ _)
= show (l, r)
showQuick (ElaboratingArg _ _ _ e) = showQuick e
showQuick (At _ e) = showQuick e
showQuick (ProofSearchFail (Msg _)) = "search fail"
showQuick _ = "No chance"
elab' ina _ (PPatvar fc n) | bindfree
= do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
-- elab' (_, _, inty) (PRef fc f)
-- | isTConName f (tt_ctxt ist) && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ec _ tm@(PRef fc hl n)
| pattern && not reflection && not (e_qq ec) && not (e_intype ec)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ec) && e_nomatching ec
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| (pattern || intransform || (bindfree && bindable n)) && not (inparamBlock n) && not (e_qq ec)
= do ty <- goal
testImplicitWarning fc n ty
let ina = e_inarg ec
guarded = e_guarded ec
inty = e_intype ec
ctxt <- get_context
let defined = case lookupTy n ctxt of
[] -> False
_ -> True
-- this is to stop us resolve type classes recursively
-- trace (show (n, guarded)) $
if (tcname n && ina && not intransform)
then erun fc $
do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
else if (defined && not guarded)
then do apply (Var n) []
annot <- findHighlight n
solve
highlightSource fc annot
else try (do apply (Var n) []
annot <- findHighlight n
solve
highlightSource fc annot)
(do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False))
where inparamBlock n = case lookupCtxtName n (inblock info) of
[] -> False
_ -> True
bindable (NS _ _) = False
bindable (MN _ _) = True
bindable n = implicitable n && autoimpls
elab' ina _ f@(PInferRef fc hls n) = elab' ina (Just fc) (PApp NoFC f [])
elab' ina fc' tm@(PRef fc hls n)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise =
do fty <- get_type (Var n) -- check for implicits
ctxt <- get_context
env <- get_env
let a' = insertScopedImps fc (normalise ctxt env fty) []
if null a'
then erun fc $
do apply (Var n) []
hilite <- findHighlight n
solve
mapM_ (uncurry highlightSource) $
(fc, hilite) : map (\f -> (f, hilite)) hls
else elab' ina fc' (PApp fc tm [])
elab' ina _ (PLam _ _ _ _ PImpossible) = lift . tfail . Msg $ "Only pattern-matching lambdas can be impossible"
elab' ina _ (PLam fc n nfc Placeholder sc)
= do -- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
attack; intro (Just n);
addPSname n -- okay for proof search
-- trace ("------ intro " ++ show n ++ " ---- \n" ++ show ptm)
elabE (ina { e_inarg = True } ) (Just fc) sc; solve
highlightSource nfc (AnnBoundName n False)
elab' ec _ (PLam fc n nfc ty sc)
= do tyn <- getNameFrom (sMN 0 "lamty")
-- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
claim tyn RType
explicit tyn
attack
ptm <- get_term
hs <- get_holes
introTy (Var tyn) (Just n)
addPSname n -- okay for proof search
focus tyn
elabE (ec { e_inarg = True, e_intype = True }) (Just fc) ty
elabE (ec { e_inarg = True }) (Just fc) sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc Placeholder sc)
= do attack; arg n (is_scoped p) (sMN 0 "ty")
addAutoBind p n
addPSname n -- okay for proof search
elabE (ina { e_inarg = True, e_intype = True }) fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc ty sc)
= do attack; tyn <- getNameFrom (sMN 0 "ty")
claim tyn RType
n' <- case n of
MN _ _ -> unique_hole n
_ -> return n
forall n' (is_scoped p) (Var tyn)
addAutoBind p n'
addPSname n' -- okay for proof search
focus tyn
let ec' = ina { e_inarg = True, e_intype = True }
elabE ec' fc ty
elabE ec' fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ tm@(PLet fc n nfc ty val sc)
= do attack
ivs <- get_instances
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
explicit valn
letbind n (Var tyn) (Var valn)
addPSname n
case ty of
Placeholder -> return ()
_ -> do focus tyn
explicit tyn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) ty
focus valn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) val
ivs' <- get_instances
env <- get_env
elabE (ina { e_inarg = True }) (Just fc) sc
when (not (pattern || intransform)) $
mapM_ (\n -> do focus n
g <- goal
hs <- get_holes
if all (\n -> n == tyn || not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC True False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
-- HACK: If the name leaks into its type, it may leak out of
-- scope outside, so substitute in the outer scope.
expandLet n (case lookup n env of
Just (Let t v) -> v
other -> error ("Value not a let binding: " ++ show other))
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ (PGoal fc r n sc) = do
rty <- goal
attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letbind n (Var tyn) (Var valn)
focus valn
elabE (ina { e_inarg = True, e_intype = True }) (Just fc) (PApp fc r [pexp (delab ist rty)])
env <- get_env
computeLet n
elabE (ina { e_inarg = True }) (Just fc) sc
solve
-- elab' ina fc (PLet n Placeholder
-- (PApp fc r [pexp (delab ist rty)]) sc)
elab' ina _ tm@(PApp fc (PInferRef _ _ f) args) = do
rty <- goal
ds <- get_deferred
ctxt <- get_context
-- make a function type a -> b -> c -> ... -> rty for the
-- new function name
env <- get_env
argTys <- claimArgTys env args
fn <- getNameFrom (sMN 0 "inf_fn")
let fty = fnTy argTys rty
-- trace (show (ptm, map fst argTys)) $ focus fn
-- build and defer the function application
attack; deferType (mkN f) fty (map fst argTys); solve
-- elaborate the arguments, to unify their types. They all have to
-- be explicit.
mapM_ elabIArg (zip argTys args)
where claimArgTys env [] = return []
claimArgTys env (arg : xs) | Just n <- localVar env (getTm arg)
= do nty <- get_type (Var n)
ans <- claimArgTys env xs
return ((n, (False, forget nty)) : ans)
claimArgTys env (_ : xs)
= do an <- getNameFrom (sMN 0 "inf_argTy")
aval <- getNameFrom (sMN 0 "inf_arg")
claim an RType
claim aval (Var an)
ans <- claimArgTys env xs
return ((aval, (True, (Var an))) : ans)
fnTy [] ret = forget ret
fnTy ((x, (_, xt)) : xs) ret = RBind x (Pi Nothing xt RType) (fnTy xs ret)
localVar env (PRef _ _ x)
= case lookup x env of
Just _ -> Just x
_ -> Nothing
localVar env _ = Nothing
elabIArg ((n, (True, ty)), def) =
do focus n; elabE ina (Just fc) (getTm def)
elabIArg _ = return () -- already done, just a name
mkN n@(NS _ _) = n
mkN n@(SN _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
elab' ina _ (PMatchApp fc fn)
= do (fn', imps) <- case lookupCtxtName fn (idris_implicits ist) of
[(n, args)] -> return (n, map (const True) args)
_ -> lift $ tfail (NoSuchVariable fn)
ns <- match_apply (Var fn') (map (\x -> (x,0)) imps)
solve
-- if f is local, just do a simple_app
-- FIXME: Anyone feel like refactoring this mess? - EB
elab' ina topfc tm@(PApp fc (PRef ffc hls f) args_in)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = implicitApp $
do env <- get_env
ty <- goal
fty <- get_type (Var f)
ctxt <- get_context
annot <- findHighlight f
mapM_ checkKnownImplicit args_in
let args = insertScopedImps fc (normalise ctxt env fty) args_in
let unmatchableArgs = if pattern
then getUnmatchable (tt_ctxt ist) f
else []
-- trace ("BEFORE " ++ show f ++ ": " ++ show ty) $
when (pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName f (tt_ctxt ist)) $
lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
-- trace (show (f, args_in, args)) $
if (f `elem` map fst env && length args == 1 && length args_in == 1)
then -- simple app, as below
do simple_app False
(elabE (ina { e_isfn = True }) (Just fc) (PRef ffc hls f))
(elabE (ina { e_inarg = True }) (Just fc) (getTm (head args)))
(show tm)
solve
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
return []
else
do ivs <- get_instances
ps <- get_probs
-- HACK: we shouldn't resolve type classes if we're defining an instance
-- function or default definition.
let isinf = f == inferCon || tcname f
-- if f is a type class, we need to know its arguments so that
-- we can unify with them
case lookupCtxt f (idris_classes ist) of
[] -> return ()
_ -> do mapM_ setInjective (map getTm args)
-- maybe more things are solvable now
unifyProblems
let guarded = isConName f ctxt
-- trace ("args is " ++ show args) $ return ()
ns <- apply (Var f) (map isph args)
-- trace ("ns is " ++ show ns) $ return ()
-- mark any type class arguments as injective
when (not pattern) $ mapM_ checkIfInjective (map snd ns)
unifyProblems -- try again with the new information,
-- to help with disambiguation
ulog <- getUnifyLog
annot <- findHighlight f
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
elabArgs ist (ina { e_inarg = e_inarg ina || not isinf })
[] fc False f
(zip ns (unmatchableArgs ++ repeat False))
(f == sUN "Force")
(map (\x -> getTm x) args) -- TODO: remove this False arg
imp <- if (e_isfn ina) then
do guess <- get_guess
env <- get_env
case safeForgetEnv (map fst env) guess of
Nothing ->
return []
Just rguess -> do
gty <- get_type rguess
let ty_n = normalise ctxt env gty
return $ getReqImps ty_n
else return []
-- Now we find out how many implicits we needed at the
-- end of the application by looking at the goal again
-- - Have another go, but this time add the
-- implicits (can't think of a better way than this...)
case imp of
rs@(_:_) | not pattern -> return rs -- quit, try again
_ -> do solve
hs <- get_holes
ivs' <- get_instances
-- Attempt to resolve any type classes which have 'complete' types,
-- i.e. no holes in them
when (not pattern || (e_inarg ina && not tcgen &&
not (e_guarded ina))) $
mapM_ (\n -> do focus n
g <- goal
env <- get_env
hs <- get_holes
if all (\n -> not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC False False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
return []
where
-- Run the elaborator, which returns how many implicit
-- args were needed, then run it again with those args. We need
-- this because we have to elaborate the whole application to
-- find out whether any computations have caused more implicits
-- to be needed.
implicitApp :: ElabD [ImplicitInfo] -> ElabD ()
implicitApp elab
| pattern || intransform = do elab; return ()
| otherwise
= do s <- get
imps <- elab
case imps of
[] -> return ()
es -> do put s
elab' ina topfc (PAppImpl tm es)
checkKnownImplicit imp
| UnknownImp `elem` argopts imp
= lift $ tfail $ UnknownImplicit (pname imp) f
checkKnownImplicit _ = return ()
getReqImps (Bind x (Pi (Just i) ty _) sc)
= i : getReqImps sc
getReqImps _ = []
checkIfInjective n = do
env <- get_env
case lookup n env of
Nothing -> return ()
Just b ->
case unApply (normalise (tt_ctxt ist) env (binderTy b)) of
(P _ c _, args) ->
case lookupCtxtExact c (idris_classes ist) of
Nothing -> return ()
Just ci -> -- type class, set as injective
do mapM_ setinjArg (getDets 0 (class_determiners ci) args)
-- maybe we can solve more things now...
ulog <- getUnifyLog
probs <- get_probs
traceWhen ulog ("Injective now " ++ show args ++ "\n" ++ qshow probs) $
unifyProblems
probs <- get_probs
traceWhen ulog (qshow probs) $ return ()
_ -> return ()
setinjArg (P _ n _) = setinj n
setinjArg _ = return ()
getDets i ds [] = []
getDets i ds (a : as) | i `elem` ds = a : getDets (i + 1) ds as
| otherwise = getDets (i + 1) ds as
tacTm (PTactics _) = True
tacTm (PProof _) = True
tacTm _ = False
setInjective (PRef _ _ n) = setinj n
setInjective (PApp _ (PRef _ _ n) _) = setinj n
setInjective _ = return ()
elab' ina _ tm@(PApp fc f [arg]) =
erun fc $
do simple_app (not $ headRef f)
(elabE (ina { e_isfn = True }) (Just fc) f)
(elabE (ina { e_inarg = True }) (Just fc) (getTm arg))
(show tm)
solve
where headRef (PRef _ _ _) = True
headRef (PApp _ f _) = headRef f
headRef (PAlternative _ _ as) = all headRef as
headRef _ = False
elab' ina fc (PAppImpl f es) = do appImpl (reverse es) -- not that we look...
solve
where appImpl [] = elab' (ina { e_isfn = False }) fc f -- e_isfn not set, so no recursive expansion of implicits
appImpl (e : es) = simple_app False
(appImpl es)
(elab' ina fc Placeholder)
(show f)
elab' ina fc Placeholder
= do (h : hs) <- get_holes
movelast h
elab' ina fc (PMetavar nfc n) =
do ptm <- get_term
-- When building the metavar application, leave out the unique
-- names which have been used elsewhere in the term, since we
-- won't be able to use them in the resulting application.
let unique_used = getUniqueUsed (tt_ctxt ist) ptm
let n' = metavarName (namespace info) n
attack
psns <- getPSnames
n' <- defer unique_used n'
solve
highlightSource nfc (AnnName n' (Just MetavarOutput) Nothing Nothing)
elab' ina fc (PProof ts) = do compute; mapM_ (runTac True ist (elabFC info) fn) ts
elab' ina fc (PTactics ts)
| not pattern = do mapM_ (runTac False ist fc fn) ts
| otherwise = elab' ina fc Placeholder
elab' ina fc (PElabError e) = lift $ tfail e
elab' ina mfc (PRewrite fc substfn rule sc newg)
= elabRewrite (elab' ina mfc) ist fc substfn rule sc newg
elab' ina _ c@(PCase fc scr opts)
= do attack
tyn <- getNameFrom (sMN 0 "scty")
claim tyn RType
valn <- getNameFrom (sMN 0 "scval")
scvn <- getNameFrom (sMN 0 "scvar")
claim valn (Var tyn)
letbind scvn (Var tyn) (Var valn)
-- Start filling in the scrutinee type, if we can work one
-- out from the case options
let scrTy = getScrType (map fst opts)
case scrTy of
Nothing -> return ()
Just ty -> do focus tyn
elabE ina (Just fc) ty
focus valn
elabE (ina { e_inarg = True }) (Just fc) scr
-- Solve any remaining implicits - we need to solve as many
-- as possible before making the 'case' type
unifyProblems
matchProblems True
args <- get_env
envU <- mapM (getKind args) args
let namesUsedInRHS = nub $ scvn : concatMap (\(_,rhs) -> allNamesIn rhs) opts
-- Drop the unique arguments used in the term already
-- and in the scrutinee (since it's
-- not valid to use them again anyway)
--
-- Also drop unique arguments which don't appear explicitly
-- in either case branch so they don't count as used
-- unnecessarily (can only do this for unique things, since we
-- assume they don't appear implicitly in types)
ptm <- get_term
let inOpts = (filter (/= scvn) (map fst args)) \\ (concatMap (\x -> allNamesIn (snd x)) opts)
let argsDropped = filter (isUnique envU)
(nub $ allNamesIn scr ++ inApp ptm ++
inOpts)
let args' = filter (\(n, _) -> n `notElem` argsDropped) args
attack
cname' <- defer argsDropped (mkN (mkCaseName fc fn))
solve
-- if the scrutinee is one of the 'args' in env, we should
-- inspect it directly, rather than adding it as a new argument
let newdef = PClauses fc [] cname'
(caseBlock fc cname' scr
(map (isScr scr) (reverse args')) opts)
-- elaborate case
updateAux (\e -> e { case_decls = (cname', newdef) : case_decls e } )
-- if we haven't got the type yet, hopefully we'll get it later!
movelast tyn
solve
where mkCaseName fc (NS n ns) = NS (mkCaseName fc n) ns
mkCaseName fc n = SN (CaseN (FC' fc) n)
-- mkCaseName (UN x) = UN (x ++ "_case")
-- mkCaseName (MN i x) = MN i (x ++ "_case")
mkN n@(NS _ _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
getScrType [] = Nothing
getScrType (f : os) = maybe (getScrType os) Just (getAppType f)
getAppType (PRef _ _ n) =
case lookupTyName n (tt_ctxt ist) of
[(n', ty)] | isDConName n' (tt_ctxt ist) ->
case unApply (getRetTy ty) of
(P _ tyn _, args) ->
Just (PApp fc (PRef fc [] tyn)
(map pexp (map (const Placeholder) args)))
_ -> Nothing
_ -> Nothing -- ambiguity is no help to us!
getAppType (PApp _ t as) = getAppType t
getAppType _ = Nothing
inApp (P _ n _) = [n]
inApp (App _ f a) = inApp f ++ inApp a
inApp (Bind n (Let _ v) sc) = inApp v ++ inApp sc
inApp (Bind n (Guess _ v) sc) = inApp v ++ inApp sc
inApp (Bind n b sc) = inApp sc
inApp _ = []
isUnique envk n = case lookup n envk of
Just u -> u
_ -> False
getKind env (n, _)
= case lookup n env of
Nothing -> return (n, False) -- can't happen, actually...
Just b ->
do ty <- get_type (forget (binderTy b))
case ty of
UType UniqueType -> return (n, True)
UType AllTypes -> return (n, True)
_ -> return (n, False)
tcName tm | (P _ n _, _) <- unApply tm
= case lookupCtxt n (idris_classes ist) of
[_] -> True
_ -> False
tcName _ = False
usedIn ns (n, b)
= n `elem` ns
|| any (\x -> x `elem` ns) (allTTNames (binderTy b))
elab' ina fc (PUnifyLog t) = do unifyLog True
elab' ina fc t
unifyLog False
elab' ina fc (PQuasiquote t goalt)
= do -- First extract the unquoted subterms, replacing them with fresh
-- names in the quasiquoted term. Claim their reflections to be
-- an inferred type (to support polytypic quasiquotes).
finalTy <- goal
(t, unq) <- extractUnquotes 0 t
let unquoteNames = map fst unq
mapM_ (\uqn -> claim uqn (forget finalTy)) unquoteNames
-- Save the old state - we need a fresh proof state to avoid
-- capturing lexically available variables in the quoted term.
ctxt <- get_context
datatypes <- get_datatypes
g_nextname <- get_global_nextname
saveState
updatePS (const .
newProof (sMN 0 "q") (constraintNS info) ctxt datatypes g_nextname $
P Ref (reflm "TT") Erased)
-- Re-add the unquotes, letting Idris infer the (fictional)
-- types. Here, they represent the real type rather than the type
-- of their reflection.
mapM_ (\n -> do ty <- getNameFrom (sMN 0 "unqTy")
claim ty RType
movelast ty
claim n (Var ty)
movelast n)
unquoteNames
-- Determine whether there's an explicit goal type, and act accordingly
-- Establish holes for the type and value of the term to be
-- quasiquoted
qTy <- getNameFrom (sMN 0 "qquoteTy")
claim qTy RType
movelast qTy
qTm <- getNameFrom (sMN 0 "qquoteTm")
claim qTm (Var qTy)
-- Let-bind the result of elaborating the contained term, so that
-- the hole doesn't disappear
nTm <- getNameFrom (sMN 0 "quotedTerm")
letbind nTm (Var qTy) (Var qTm)
-- Fill out the goal type, if relevant
case goalt of
Nothing -> return ()
Just gTy -> do focus qTy
elabE (ina { e_qq = True }) fc gTy
-- Elaborate the quasiquoted term into the hole
focus qTm
elabE (ina { e_qq = True }) fc t
end_unify
-- We now have an elaborated term. Reflect it and solve the
-- original goal in the original proof state, preserving highlighting
env <- get_env
EState _ _ _ hs _ _ <- getAux
loadState
updateAux (\aux -> aux { highlighting = hs })
let quoted = fmap (explicitNames . binderVal) $ lookup nTm env
isRaw = case unApply (normaliseAll ctxt env finalTy) of
(P _ n _, []) | n == reflm "Raw" -> True
_ -> False
case quoted of
Just q -> do ctxt <- get_context
(q', _, _) <- lift $ recheck (constraintNS info) ctxt [(uq, Lam Erased) | uq <- unquoteNames] (forget q) q
if pattern
then if isRaw
then reflectRawQuotePattern unquoteNames (forget q')
else reflectTTQuotePattern unquoteNames q'
else do if isRaw
then -- we forget q' instead of using q to ensure rechecking
fill $ reflectRawQuote unquoteNames (forget q')
else fill $ reflectTTQuote unquoteNames q'
solve
Nothing -> lift . tfail . Msg $ "Broken elaboration of quasiquote"
-- Finally fill in the terms or patterns from the unquotes. This
-- happens last so that their holes still exist while elaborating
-- the main quotation.
mapM_ elabUnquote unq
where elabUnquote (n, tm)
= do focus n
elabE (ina { e_qq = False }) fc tm
elab' ina fc (PUnquote t) = fail "Found unquote outside of quasiquote"
elab' ina fc (PQuoteName n False nfc) =
do fill $ reflectName n
solve
elab' ina fc (PQuoteName n True nfc) =
do ctxt <- get_context
env <- get_env
case lookup n env of
Just _ -> do fill $ reflectName n
solve
highlightSource nfc (AnnBoundName n False)
Nothing ->
case lookupNameDef n ctxt of
[(n', _)] -> do fill $ reflectName n'
solve
highlightSource nfc (AnnName n' Nothing Nothing Nothing)
[] -> lift . tfail . NoSuchVariable $ n
more -> lift . tfail . CantResolveAlts $ map fst more
elab' ina fc (PAs _ n t) = lift . tfail . Msg $ "@-pattern not allowed here"
elab' ina fc (PHidden t)
| reflection = elab' ina fc t
| otherwise
= do (h : hs) <- get_holes
-- Dotting a hole means that either the hole or any outer
-- hole (a hole outside any occurrence of it)
-- must be solvable by unification as well as being filled
-- in directly.
-- Delay dotted things to the end, then when we elaborate them
-- we can check the result against what was inferred
movelast h
delayElab 10 $ do hs <- get_holes
when (h `elem` hs) $ do
focus h
dotterm
elab' ina fc t
elab' ina fc (PRunElab fc' tm ns) =
do attack
n <- getNameFrom (sMN 0 "tacticScript")
let scriptTy = RApp (Var (sNS (sUN "Elab")
["Elab", "Reflection", "Language"]))
(Var unitTy)
claim n scriptTy
focus n
attack -- to get an extra hole
elab' ina (Just fc') tm
script <- get_guess
fullyElaborated script
solve -- eliminate the hole. Becuase there are no references, the script is only in the binding
env <- get_env
runElabAction info ist (maybe fc' id fc) env script ns
solve
elab' ina fc (PConstSugar constFC tm) =
-- Here we elaborate the contained term, then calculate
-- highlighting for constFC. The highlighting is the
-- highlighting for the outermost constructor of the result of
-- evaluating the elaborated term, if one exists (it always
-- should, but better to fail gracefully for something silly
-- like highlighting info). This is how implicit applications of
-- fromInteger get highlighted.
do saveState -- so we don't pollute the elaborated term
n <- getNameFrom (sMN 0 "cstI")
n' <- getNameFrom (sMN 0 "cstIhole")
g <- forget <$> goal
claim n' g
movelast n'
-- In order to intercept the elaborated value, we need to
-- let-bind it.
attack
letbind n g (Var n')
focus n'
elab' ina fc tm
env <- get_env
ctxt <- get_context
let v = fmap (normaliseAll ctxt env . finalise . binderVal)
(lookup n env)
loadState -- we have the highlighting - re-elaborate the value
elab' ina fc tm
case v of
Just val -> highlightConst constFC val
Nothing -> return ()
where highlightConst fc (P _ n _) =
highlightSource fc (AnnName n Nothing Nothing Nothing)
highlightConst fc (App _ f _) =
highlightConst fc f
highlightConst fc (Constant c) =
highlightSource fc (AnnConst c)
highlightConst _ _ = return ()
elab' ina fc x = fail $ "Unelaboratable syntactic form " ++ showTmImpls x
-- delay elaboration of 't', with priority 'pri' until after everything
-- else is done.
-- The delayed things with lower numbered priority will be elaborated
-- first. (In practice, this means delayed alternatives, then PHidden
-- things.)
delayElab pri t
= updateAux (\e -> e { delayed_elab = delayed_elab e ++ [(pri, t)] })
isScr :: PTerm -> (Name, Binder Term) -> (Name, (Bool, Binder Term))
isScr (PRef _ _ n) (n', b) = (n', (n == n', b))
isScr _ (n', b) = (n', (False, b))
caseBlock :: FC -> Name
-> PTerm -- original scrutinee
-> [(Name, (Bool, Binder Term))] -> [(PTerm, PTerm)] -> [PClause]
caseBlock fc n scr env opts
= let args' = findScr env
args = map mkarg (map getNmScr args') in
map (mkClause args) opts
where -- Find the variable we want as the scrutinee and mark it as
-- 'True'. If the scrutinee is in the environment, match on that
-- otherwise match on the new argument we're adding.
findScr ((n, (True, t)) : xs)
= (n, (True, t)) : scrName n xs
findScr [(n, (_, t))] = [(n, (True, t))]
findScr (x : xs) = x : findScr xs
-- [] can't happen since scrutinee is in the environment!
findScr [] = error "The impossible happened - the scrutinee was not in the environment"
-- To make sure top level pattern name remains in scope, put
-- it at the end of the environment
scrName n [] = []
scrName n [(_, t)] = [(n, t)]
scrName n (x : xs) = x : scrName n xs
getNmScr (n, (s, _)) = (n, s)
mkarg (n, s) = (PRef fc [] n, s)
-- may be shadowed names in the new pattern - so replace the
-- old ones with an _
-- Also, names which don't appear on the rhs should not be
-- fixed on the lhs, or this restricts the kind of matching
-- we can do to non-dependent types.
mkClause args (l, r)
= let args' = map (shadowed (allNamesIn l)) args
args'' = map (implicitable (allNamesIn r ++
keepscrName scr)) args'
lhs = PApp (getFC fc l) (PRef NoFC [] n)
(map (mkLHSarg l) args'') in
PClause (getFC fc l) n lhs [] r []
-- Keep scrutinee available if it's just a name (this makes
-- the names in scope look better when looking at a hole on
-- the rhs of a case)
keepscrName (PRef _ _ n) = [n]
keepscrName _ = []
mkLHSarg l (tm, True) = pexp l
mkLHSarg l (tm, False) = pexp tm
shadowed new (PRef _ _ n, s) | n `elem` new = (Placeholder, s)
shadowed new t = t
implicitable rhs (PRef _ _ n, s) | n `notElem` rhs = (Placeholder, s)
implicitable rhs t = t
getFC d (PApp fc _ _) = fc
getFC d (PRef fc _ _) = fc
getFC d (PAlternative _ _ (x:_)) = getFC d x
getFC d x = d
-- Fail if a term is not yet fully elaborated (e.g. if it contains
-- case block functions that don't yet exist)
fullyElaborated :: Term -> ElabD ()
fullyElaborated (P _ n _) =
do estate <- getAux
case lookup n (case_decls estate) of
Nothing -> return ()
Just _ -> lift . tfail $ ElabScriptStaging n
fullyElaborated (Bind n b body) = fullyElaborated body >> for_ b fullyElaborated
fullyElaborated (App _ l r) = fullyElaborated l >> fullyElaborated r
fullyElaborated (Proj t _) = fullyElaborated t
fullyElaborated _ = return ()
-- If the goal type is a "Lazy", then try elaborating via 'Delay'
-- first. We need to do this brute force approach, rather than anything
-- more precise, since there may be various other ambiguities to resolve
-- first.
insertLazy :: PTerm -> ElabD PTerm
insertLazy t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = return t
insertLazy t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Force" = return t
insertLazy (PCoerced t) = return t
-- Don't add a delay to pattern variables, since they can be forced
-- on the rhs
insertLazy t@(PPatvar _ _) | pattern = return t
insertLazy t =
do ty <- goal
env <- get_env
let (tyh, _) = unApply (normalise (tt_ctxt ist) env ty)
let tries = [mkDelay env t, t]
case tyh of
P _ (UN l) _ | l == txt "Delayed"
-> return (PAlternative [] FirstSuccess tries)
_ -> return t
where
mkDelay env (PAlternative ms b xs) = PAlternative ms b (map (mkDelay env) xs)
mkDelay env t
= let fc = fileFC "Delay" in
addImplBound ist (map fst env) (PApp fc (PRef fc [] (sUN "Delay"))
[pexp t])
-- Don't put implicit coercions around applications which are marked
-- as '%noImplicit', or around case blocks, otherwise we get exponential
-- blowup especially where there are errors deep in large expressions.
notImplicitable (PApp _ f _) = notImplicitable f
-- TMP HACK no coercing on bind (make this configurable)
notImplicitable (PRef _ _ n)
| [opts] <- lookupCtxt n (idris_flags ist)
= NoImplicit `elem` opts
notImplicitable (PAlternative _ _ as) = any notImplicitable as
-- case is tricky enough without implicit coercions! If they are needed,
-- they can go in the branches separately.
notImplicitable (PCase _ _ _) = True
notImplicitable _ = False
-- Elaboration works more smoothly if we expand function applications
-- to their full arity and elaborate it all at once (better error messages
-- in particular)
expandToArity tm@(PApp fc f a) = do
env <- get_env
case fullApp tm of
-- if f is global, leave it alone because we've already
-- expanded it to the right arity
PApp fc ftm@(PRef _ _ f) args | Just aty <- lookup f env ->
do let a = length (getArgTys (normalise (tt_ctxt ist) env (binderTy aty)))
return (mkPApp fc a ftm args)
_ -> return tm
expandToArity t = return t
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
insertScopedImps fc (Bind n (Pi im@(Just i) _ _) sc) xs
| tcinstance i && not (toplevel_imp i)
= pimp n (PResolveTC fc) True : insertScopedImps fc sc xs
| not (toplevel_imp i)
= pimp n Placeholder True : insertScopedImps fc sc xs
insertScopedImps fc (Bind n (Pi _ _ _) sc) (x : xs)
= x : insertScopedImps fc sc xs
insertScopedImps _ _ xs = xs
insertImpLam ina t =
do ty <- goal
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
addLam ty' t
where
-- just one level at a time
addLam (Bind n (Pi (Just _) _ _) sc) t =
do impn <- unique_hole n -- (sMN 0 "scoped_imp")
if e_isfn ina -- apply to an implicit immediately
then return (PApp emptyFC
(PLam emptyFC impn NoFC Placeholder t)
[pexp Placeholder])
else return (PLam emptyFC impn NoFC Placeholder t)
addLam _ t = return t
insertCoerce ina t@(PCase _ _ _) = return t
insertCoerce ina t | notImplicitable t = return t
insertCoerce ina t =
do ty <- goal
-- Check for possible coercions to get to the goal
-- and add them as 'alternatives'
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
let cs = getCoercionsTo ist ty'
let t' = case (t, cs) of
(PCoerced tm, _) -> tm
(_, []) -> t
(_, cs) -> PAlternative [] TryImplicit
(t : map (mkCoerce env t) cs)
return t'
where
mkCoerce env (PAlternative ms aty tms) n
= PAlternative ms aty (map (\t -> mkCoerce env t n) tms)
mkCoerce env t n = let fc = maybe (fileFC "Coercion") id (highestFC t) in
addImplBound ist (map fst env)
(PApp fc (PRef fc [] n) [pexp (PCoerced t)])
-- | Elaborate the arguments to a function
elabArgs :: IState -- ^ The current Idris state
-> ElabCtxt -- ^ (in an argument, guarded, in a type, in a qquote)
-> [Bool]
-> FC -- ^ Source location
-> Bool
-> Name -- ^ Name of the function being applied
-> [((Name, Name), Bool)] -- ^ (Argument Name, Hole Name, unmatchable)
-> Bool -- ^ under a 'force'
-> [PTerm] -- ^ argument
-> ElabD ()
elabArgs ist ina failed fc retry f [] force _ = return ()
elabArgs ist ina failed fc r f (((argName, holeName), unm):ns) force (t : args)
= do hs <- get_holes
if holeName `elem` hs then
do focus holeName
case t of
Placeholder -> do movelast holeName
elabArgs ist ina failed fc r f ns force args
_ -> elabArg t
else elabArgs ist ina failed fc r f ns force args
where elabArg t =
do -- solveAutos ist fn False
now_elaborating fc f argName
wrapErr f argName $ do
hs <- get_holes
tm <- get_term
-- No coercing under an explicit Force (or it can Force/Delay
-- recursively!)
let elab = if force then elab' else elabE
failed' <- -- trace (show (n, t, hs, tm)) $
-- traceWhen (not (null cs)) (show ty ++ "\n" ++ showImp True t) $
do focus holeName;
g <- goal
-- Can't pattern match on polymorphic goals
poly <- goal_polymorphic
ulog <- getUnifyLog
traceWhen ulog ("Elaborating argument " ++ show (argName, holeName, g)) $
elab (ina { e_nomatching = unm && poly }) (Just fc) t
return failed
done_elaborating_arg f argName
elabArgs ist ina failed fc r f ns force args
wrapErr f argName action =
do elabState <- get
while <- elaborating_app
let while' = map (\(x, y, z)-> (y, z)) while
(result, newState) <- case runStateT action elabState of
OK (res, newState) -> return (res, newState)
Error e -> do done_elaborating_arg f argName
lift (tfail (elaboratingArgErr while' e))
put newState
return result
elabArgs _ _ _ _ _ _ (((arg, hole), _) : _) _ [] =
fail $ "Can't elaborate these args: " ++ show arg ++ " " ++ show hole
addAutoBind :: Plicity -> Name -> ElabD ()
addAutoBind (Imp _ _ _ _ False) n
= updateAux (\est -> est { auto_binds = n : auto_binds est })
addAutoBind _ _ = return ()
testImplicitWarning :: FC -> Name -> Type -> ElabD ()
testImplicitWarning fc n goal
| implicitable n && emode == ETyDecl
= do env <- get_env
est <- getAux
when (n `elem` auto_binds est) $
tryUnify env (lookupTyName n (tt_ctxt ist))
| otherwise = return ()
where
tryUnify env [] = return ()
tryUnify env ((nm, ty) : ts)
= do inj <- get_inj
hs <- get_holes
case unify (tt_ctxt ist) env (ty, Nothing) (goal, Nothing)
inj hs [] [] of
OK _ ->
updateAux (\est -> est { implicit_warnings =
(fc, nm) : implicit_warnings est })
_ -> tryUnify env ts
-- For every alternative, look at the function at the head. Automatically resolve
-- any nested alternatives where that function is also at the head
pruneAlt :: [PTerm] -> [PTerm]
pruneAlt xs = map prune xs
where
prune (PApp fc1 (PRef fc2 hls f) as)
= PApp fc1 (PRef fc2 hls f) (fmap (fmap (choose f)) as)
prune t = t
choose f (PAlternative ms a as)
= let as' = fmap (choose f) as
fs = filter (headIs f) as' in
case fs of
[a] -> a
_ -> PAlternative ms a as'
choose f (PApp fc f' as) = PApp fc (choose f f') (fmap (fmap (choose f)) as)
choose f t = t
headIs f (PApp _ (PRef _ _ f') _) = f == f'
headIs f (PApp _ f' _) = headIs f f'
headIs f _ = True -- keep if it's not an application
-- Rule out alternatives that don't return the same type as the head of the goal
-- (If there are none left as a result, do nothing)
pruneByType :: Env -> Term -> -- head of the goal
Type -> -- goal
IState -> [PTerm] -> [PTerm]
-- if an alternative has a locally bound name at the head, take it
pruneByType env t goalty c as
| Just a <- locallyBound as = [a]
where
locallyBound [] = Nothing
locallyBound (t:ts)
| Just n <- getName t,
n `elem` map fst env = Just t
| otherwise = locallyBound ts
getName (PRef _ _ n) = Just n
getName (PApp _ (PRef _ _ (UN l)) [_, _, arg]) -- ignore Delays
| l == txt "Delay" = getName (getTm arg)
getName (PApp _ f _) = getName f
getName (PHidden t) = getName t
getName _ = Nothing
-- 'n' is the name at the head of the goal type
pruneByType env (P _ n _) goalty ist as
-- if the goal type is polymorphic, keep everything
| Nothing <- lookupTyExact n ctxt = as
-- if the goal type is a ?metavariable, keep everything
| Just _ <- lookup n (idris_metavars ist) = as
| otherwise
= let asV = filter (headIs True n) as
as' = filter (headIs False n) as in
case as' of
[] -> asV
_ -> as'
where
ctxt = tt_ctxt ist
-- Get the function at the head of the alternative and see if it's
-- a plausible match against the goal type. Keep if so. Also keep if
-- there is a possible coercion to the goal type.
headIs var f (PRef _ _ f') = typeHead var f f'
headIs var f (PApp _ (PRef _ _ (UN l)) [_, _, arg])
| l == txt "Delay" = headIs var f (getTm arg)
headIs var f (PApp _ (PRef _ _ f') _) = typeHead var f f'
headIs var f (PApp _ f' _) = headIs var f f'
headIs var f (PPi _ _ _ _ sc) = headIs var f sc
headIs var f (PHidden t) = headIs var f t
headIs var f t = True -- keep if it's not an application
typeHead var f f'
= -- trace ("Trying " ++ show f' ++ " for " ++ show n) $
case lookupTyExact f' ctxt of
Just ty -> case unApply (getRetTy ty) of
(P _ ctyn _, _) | isTConName ctyn ctxt && not (ctyn == f)
-> False
_ -> let ty' = normalise ctxt [] ty in
-- trace ("Trying " ++ show (getRetTy ty') ++ " for " ++ show goalty) $
case unApply (getRetTy ty') of
(V _, _) ->
isPlausible ist var env n ty
_ -> matching (getRetTy ty') goalty
|| isCoercion (getRetTy ty') goalty
-- May be useful to keep for debugging purposes for a bit:
-- let res = matching (getRetTy ty') goalty in
-- traceWhen (not res)
-- ("Rejecting " ++ show (getRetTy ty', goalty)) res
_ -> False
-- If the goal is a constructor, it must match the suggested function type
matching (P _ ctyn _) (P _ n' _)
| isTConName n' ctxt = ctyn == n'
| otherwise = True
-- Variables match anything
matching (V _) _ = True
matching _ (V _) = True
matching _ (P _ n _) = not (isTConName n ctxt)
matching (P _ n _) _ = not (isTConName n ctxt)
-- Binders are a plausible match, so keep them
matching (Bind n _ sc) _ = True
matching _ (Bind n _ sc) = True
-- If we hit a function name, it's a plausible match
matching (App _ (P _ f _) _) _ | not (isConName f ctxt) = True
matching _ (App _ (P _ f _) _) | not (isConName f ctxt) = True
-- Otherwise, match the rest of the structure
matching (App _ f a) (App _ f' a') = matching f f' && matching a a'
matching (TType _) (TType _) = True
matching (UType _) (UType _) = True
matching l r = l == r
-- Return whether there is a possible coercion between the return type
-- of an alternative and the goal type
isCoercion rty gty | (P _ r _, _) <- unApply rty
= not (null (getCoercionsBetween r gty))
isCoercion _ _ = False
getCoercionsBetween :: Name -> Type -> [Name]
getCoercionsBetween r goal
= let cs = getCoercionsTo ist goal in
findCoercions r cs
where findCoercions t [] = []
findCoercions t (n : ns) =
let ps = case lookupTy n (tt_ctxt ist) of
[ty'] -> let as = map snd (getArgTys (normalise (tt_ctxt ist) [] ty')) in
[n | any useR as]
_ -> [] in
ps ++ findCoercions t ns
useR ty =
case unApply (getRetTy ty) of
(P _ t _, _) -> t == r
_ -> False
pruneByType _ t _ _ as = as
-- Could the name feasibly be the return type?
-- If there is a type class constraint on the return type, and no instance
-- in the environment or globally for that name, then no
-- Otherwise, yes
-- (FIXME: This isn't complete, but I'm leaving it here and coming back
-- to it later - just returns 'var' for now. EB)
isPlausible :: IState -> Bool -> Env -> Name -> Type -> Bool
isPlausible ist var env n ty
= let (hvar, classes) = collectConstraints [] [] ty in
case hvar of
Nothing -> True
Just rth -> var -- trace (show (rth, classes)) var
where
collectConstraints :: [Name] -> [(Term, [Name])] -> Type ->
(Maybe Name, [(Term, [Name])])
collectConstraints env tcs (Bind n (Pi _ ty _) sc)
= let tcs' = case unApply ty of
(P _ c _, _) ->
case lookupCtxtExact c (idris_classes ist) of
Just tc -> ((ty, map fst (class_instances tc))
: tcs)
Nothing -> tcs
_ -> tcs
in
collectConstraints (n : env) tcs' sc
collectConstraints env tcs t
| (V i, _) <- unApply t = (Just (env !! i), tcs)
| otherwise = (Nothing, tcs)
-- | Use the local elab context to work out the highlighting for a name
findHighlight :: Name -> ElabD OutputAnnotation
findHighlight n = do ctxt <- get_context
env <- get_env
case lookup n env of
Just _ -> return $ AnnBoundName n False
Nothing -> case lookupTyExact n ctxt of
Just _ -> return $ AnnName n Nothing Nothing Nothing
Nothing -> lift . tfail . InternalMsg $
"Can't find name " ++ show n
-- Try again to solve auto implicits
solveAuto :: IState -> Name -> Bool -> (Name, [FailContext]) -> ElabD ()
solveAuto ist fn ambigok (n, failc)
= do hs <- get_holes
when (not (null hs)) $ do
env <- get_env
g <- goal
handleError cantsolve (when (n `elem` hs) $ do
focus n
isg <- is_guess -- if it's a guess, we're working on it recursively, so stop
when (not isg) $
proofSearch' ist True ambigok 100 True Nothing fn [] [])
(lift $ Error (addLoc failc
(CantSolveGoal g (map (\(n, b) -> (n, binderTy b)) env))))
return ()
where addLoc (FailContext fc f x : prev) err
= At fc (ElaboratingArg f x
(map (\(FailContext _ f' x') -> (f', x')) prev) err)
addLoc _ err = err
cantsolve (CantSolveGoal _ _) = True
cantsolve (InternalMsg _) = True
cantsolve (At _ e) = cantsolve e
cantsolve (Elaborating _ _ _ e) = cantsolve e
cantsolve (ElaboratingArg _ _ _ e) = cantsolve e
cantsolve _ = False
solveAutos :: IState -> Name -> Bool -> ElabD ()
solveAutos ist fn ambigok
= do autos <- get_autos
mapM_ (solveAuto ist fn ambigok) (map (\(n, (fc, _)) -> (n, fc)) autos)
-- Return true if the given error suggests a type class failure is
-- recoverable
tcRecoverable :: ElabMode -> Err -> Bool
tcRecoverable ERHS (CantResolve f g _) = f
tcRecoverable ETyDecl (CantResolve f g _) = f
tcRecoverable e (ElaboratingArg _ _ _ err) = tcRecoverable e err
tcRecoverable e (At _ err) = tcRecoverable e err
tcRecoverable _ _ = True
trivial' ist
= trivial (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
trivialHoles' psn h ist
= trivialHoles psn h (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
proofSearch' ist rec ambigok depth prv top n psns hints
= do unifyProblems
proofSearch rec prv ambigok (not prv) depth
(elab ist toplevel ERHS [] (sMN 0 "tac")) top n psns hints ist
resolveTC' di mv depth tm n ist
= resolveTC di mv depth tm n (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
collectDeferred :: Maybe Name -> [Name] -> Context ->
Term -> State [(Name, (Int, Maybe Name, Type, [Name]))] Term
collectDeferred top casenames ctxt tm = cd [] tm
where
cd env (Bind n (GHole i psns t) app) =
do ds <- get
t' <- collectDeferred top casenames ctxt t
when (not (n `elem` map fst ds)) $ put (ds ++ [(n, (i, top, t', psns))])
cd env app
cd env (Bind n b t)
= do b' <- cdb b
t' <- cd ((n, b) : env) t
return (Bind n b' t')
where
cdb (Let t v) = liftM2 Let (cd env t) (cd env v)
cdb (Guess t v) = liftM2 Guess (cd env t) (cd env v)
cdb b = do ty' <- cd env (binderTy b)
return (b { binderTy = ty' })
cd env (App s f a) = liftM2 (App s) (cd env f)
(cd env a)
cd env t = return t
case_ :: Bool -> Bool -> IState -> Name -> PTerm -> ElabD ()
case_ ind autoSolve ist fn tm = do
attack
tyn <- getNameFrom (sMN 0 "ity")
claim tyn RType
valn <- getNameFrom (sMN 0 "ival")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "irule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
env <- get_env
let (Just binding) = lookup letn env
let val = binderVal binding
if ind then induction (forget val)
else casetac (forget val)
when autoSolve solveAll
-- | Compute the appropriate name for a top-level metavariable
metavarName :: [String] -> Name -> Name
metavarName _ n@(NS _ _) = n
metavarName (ns@(_:_)) n = sNS n ns
metavarName _ n = n
runElabAction :: ElabInfo -> IState -> FC -> Env -> Term -> [String] -> ElabD Term
runElabAction info ist fc env tm ns = do tm' <- eval tm
runTacTm tm'
where
eval tm = do ctxt <- get_context
return $ normaliseAll ctxt env (finalise tm)
returnUnit = return $ P (DCon 0 0 False) unitCon (P (TCon 0 0) unitTy Erased)
patvars :: [(Name, Term)] -> Term -> ([(Name, Term)], Term)
patvars ns (Bind n (PVar t) sc) = patvars ((n, t) : ns) (instantiate (P Bound n t) sc)
patvars ns tm = (ns, tm)
pullVars :: (Term, Term) -> ([(Name, Term)], Term, Term)
pullVars (lhs, rhs) = (fst (patvars [] lhs), snd (patvars [] lhs), snd (patvars [] rhs)) -- TODO alpha-convert rhs
requireError :: Err -> ElabD a -> ElabD ()
requireError orErr elab =
do state <- get
case runStateT elab state of
OK (_, state') -> lift (tfail orErr)
Error e -> return ()
-- create a fake TT term for the LHS of an impossible case
fakeTT :: Raw -> Term
fakeTT (Var n) =
case lookupNameDef n (tt_ctxt ist) of
[(n', TyDecl nt _)] -> P nt n' Erased
_ -> P Ref n Erased
fakeTT (RBind n b body) = Bind n (fmap fakeTT b) (fakeTT body)
fakeTT (RApp f a) = App Complete (fakeTT f) (fakeTT a)
fakeTT RType = TType (UVar [] (-1))
fakeTT (RUType u) = UType u
fakeTT (RConstant c) = Constant c
defineFunction :: RFunDefn Raw -> ElabD ()
defineFunction (RDefineFun n clauses) =
do ctxt <- get_context
ty <- maybe (fail "no type decl") return $ lookupTyExact n ctxt
let info = CaseInfo True True False -- TODO document and figure out
clauses' <- forM clauses (\case
RMkFunClause lhs rhs ->
do (lhs', lty) <- lift $ check ctxt [] lhs
(rhs', rty) <- lift $ check ctxt [] rhs
lift $ converts ctxt [] lty rty
return $ Right (lhs', rhs')
RMkImpossibleClause lhs ->
do requireError (Msg "Not an impossible case") . lift $
check ctxt [] lhs
return $ Left (fakeTT lhs))
let clauses'' = map (\case Right c -> pullVars c
Left lhs -> let (ns, lhs') = patvars [] lhs
in (ns, lhs', Impossible))
clauses'
let clauses''' = map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) clauses''
ctxt'<- lift $
addCasedef n (const [])
info False (STerm Erased)
True False -- TODO what are these?
(map snd $ getArgTys ty) [] -- TODO inaccessible types
clauses'
clauses'''
clauses'''
clauses'''
clauses'''
ty
ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RClausesInstrs n clauses'' : new_tyDecls e}
return ()
checkClosed :: Raw -> Elab' aux (Term, Type)
checkClosed tm = do ctxt <- get_context
(val, ty) <- lift $ check ctxt [] tm
return $! (finalise val, finalise ty)
-- | Add another argument to a Pi
mkPi :: RFunArg -> Raw -> Raw
mkPi arg rTy = RBind (argName arg) (Pi Nothing (argTy arg) (RUType AllTypes)) rTy
mustBeType ctxt tm ty =
case normaliseAll ctxt [] (finalise ty) of
UType _ -> return ()
TType _ -> return ()
ty' -> lift . tfail . InternalMsg $
show tm ++ " is not a type: it's " ++ show ty'
mustNotBeDefined ctxt n =
case lookupDefExact n ctxt of
Just _ -> lift . tfail . InternalMsg $
show n ++ " is already defined."
Nothing -> return ()
-- | Prepare a constructor to be added to a datatype being defined here
prepareConstructor :: Name -> RConstructorDefn -> ElabD (Name, [PArg], Type)
prepareConstructor tyn (RConstructor cn args resTy) =
do ctxt <- get_context
-- ensure the constructor name is not qualified, and
-- construct a qualified one
notQualified cn
let qcn = qualify cn
-- ensure that the constructor name is not defined already
mustNotBeDefined ctxt qcn
-- construct the actual type for the constructor
let cty = foldr mkPi resTy args
(checkedTy, ctyTy) <- lift $ check ctxt [] cty
mustBeType ctxt checkedTy ctyTy
-- ensure that the constructor builds the right family
case unApply (getRetTy (normaliseAll ctxt [] (finalise checkedTy))) of
(P _ n _, _) | n == tyn -> return ()
t -> lift . tfail . Msg $ "The constructor " ++ show cn ++
" doesn't construct " ++ show tyn ++
" (return type is " ++ show t ++ ")"
-- add temporary type declaration for constructor (so it can
-- occur in later constructor types)
set_context (addTyDecl qcn (DCon 0 0 False) checkedTy ctxt)
-- Save the implicits for high-level Idris
let impls = map rFunArgToPArg args
return (qcn, impls, checkedTy)
where
notQualified (NS _ _) = lift . tfail . Msg $ "Constructor names may not be qualified"
notQualified _ = return ()
qualify n = case tyn of
(NS _ ns) -> NS n ns
_ -> n
getRetTy :: Type -> Type
getRetTy (Bind _ (Pi _ _ _) sc) = getRetTy sc
getRetTy ty = ty
elabScriptStuck :: Term -> ElabD a
elabScriptStuck x = lift . tfail $ ElabScriptStuck x
-- Should be dependent
tacTmArgs :: Int -> Term -> [Term] -> ElabD [Term]
tacTmArgs l t args | length args == l = return args
| otherwise = elabScriptStuck t -- Probably should be an argument size mismatch internal error
-- | Do a step in the reflected elaborator monad. The input is the
-- step, the output is the (reflected) term returned.
runTacTm :: Term -> ElabD Term
runTacTm tac@(unApply -> (P _ n _, args))
| n == tacN "Prim__Solve"
= do ~[] <- tacTmArgs 0 tac args -- patterns are irrefutable because `tacTmArgs` returns lists of exactly the size given to it as first argument
solve
returnUnit
| n == tacN "Prim__Goal"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
case hs of
(h : _) -> do t <- goal
fmap fst . checkClosed $
rawPair (Var (reflm "TTName"), Var (reflm "TT"))
(reflectName h, reflect t)
[] -> lift . tfail . Msg $
"Elaboration is complete. There are no goals."
| n == tacN "Prim__Holes"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
fmap fst . checkClosed $
mkList (Var $ reflm "TTName") (map reflectName hs)
| n == tacN "Prim__Guess"
= do ~[] <- tacTmArgs 0 tac args
g <- get_guess
fmap fst . checkClosed $ reflect g
| n == tacN "Prim__LookupTy"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
ctxt <- get_context
let getNameTypeAndType = \case Function ty _ -> (Ref, ty)
TyDecl nt ty -> (nt, ty)
Operator ty _ _ -> (Ref, ty)
CaseOp _ ty _ _ _ _ -> (Ref, ty)
-- Idris tuples nest to the right
reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [Var (reflm "NameType"), Var (reflm "TT")]
, x
, raw_apply (Var pairCon) [ Var (reflm "NameType"), Var (reflm "TT")
, y, z]]
let defs = [ reflectTriple (reflectName n, reflectNameType nt, reflect ty)
| (n, def) <- lookupNameDef n' ctxt
, let (nt, ty) = getNameTypeAndType def ]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [ Var (reflm "NameType")
, Var (reflm "TT")]])
defs
| n == tacN "Prim__LookupDatatype"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
datatypes <- get_datatypes
ctxt <- get_context
fmap fst . checkClosed $
rawList (Var (tacN "Datatype"))
(map reflectDatatype (buildDatatypes ist n'))
| n == tacN "Prim__LookupFunDefn"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
fmap fst . checkClosed $
rawList (RApp (Var $ tacN "FunDefn") (Var $ reflm "TT"))
(map reflectFunDefn (buildFunDefns ist n'))
| n == tacN "Prim__LookupArgs"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
let listTy = Var (sNS (sUN "List") ["List", "Prelude"])
listFunArg = RApp listTy (Var (tacN "FunArg"))
-- Idris tuples nest to the right
let reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [listFunArg, Var (reflm "Raw")]
, x
, raw_apply (Var pairCon) [listFunArg, Var (reflm "Raw")
, y, z]]
let out =
[ reflectTriple (reflectName fn, reflectList (Var (tacN "FunArg")) (map reflectArg args), reflectRaw res)
| (fn, pargs) <- lookupCtxtName n' (idris_implicits ist)
, (args, res) <- getArgs pargs . forget <$>
maybeToList (lookupTyExact fn (tt_ctxt ist))
]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [Var (reflm "TTName")
, raw_apply (Var pairTy) [ RApp listTy
(Var (tacN "FunArg"))
, Var (reflm "Raw")]])
out
| n == tacN "Prim__SourceLocation"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
reflectFC fc
| n == tacN "Prim__Namespace"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
rawList (RConstant StrType) (map (RConstant . Str) ns)
| n == tacN "Prim__Env"
= do ~[] <- tacTmArgs 0 tac args
env <- get_env
fmap fst . checkClosed $ reflectEnv env
| n == tacN "Prim__Fail"
= do ~[_a, errs] <- tacTmArgs 2 tac args
errs' <- eval errs
parts <- reifyReportParts errs'
lift . tfail $ ReflectionError [parts] (Msg "")
| n == tacN "Prim__PureElab"
= do ~[_a, tm] <- tacTmArgs 2 tac args
return tm
| n == tacN "Prim__BindElab"
= do ~[_a, _b, first, andThen] <- tacTmArgs 4 tac args
first' <- eval first
res <- eval =<< runTacTm first'
next <- eval (App Complete andThen res)
runTacTm next
| n == tacN "Prim__Try"
= do ~[_a, first, alt] <- tacTmArgs 3 tac args
first' <- eval first
alt' <- eval alt
try' (runTacTm first') (runTacTm alt') True
| n == tacN "Prim__Fill"
= do ~[raw] <- tacTmArgs 1 tac args
raw' <- reifyRaw =<< eval raw
apply raw' []
returnUnit
| n == tacN "Prim__Apply" || n == tacN "Prim__MatchApply"
= do ~[raw, argSpec] <- tacTmArgs 2 tac args
raw' <- reifyRaw =<< eval raw
argSpec' <- map (\b -> (b, 0)) <$> reifyList reifyBool argSpec
let op = if n == tacN "Prim__Apply"
then apply
else match_apply
ns <- op raw' argSpec'
fmap fst . checkClosed $
rawList (rawPairTy (Var $ reflm "TTName") (Var $ reflm "TTName"))
[ rawPair (Var $ reflm "TTName", Var $ reflm "TTName")
(reflectName n1, reflectName n2)
| (n1, n2) <- ns
]
| n == tacN "Prim__Gensym"
= do ~[hint] <- tacTmArgs 1 tac args
hintStr <- eval hint
case hintStr of
Constant (Str h) -> do
n <- getNameFrom (sMN 0 h)
fmap fst $ get_type_val (reflectName n)
_ -> fail "no hint"
| n == tacN "Prim__Claim"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
claim n' ty'
returnUnit
| n == tacN "Prim__Check"
= do ~[env', raw] <- tacTmArgs 2 tac args
env <- reifyEnv env'
raw' <- reifyRaw =<< eval raw
ctxt <- get_context
(tm, ty) <- lift $ check ctxt env raw'
fmap fst . checkClosed $
rawPair (Var (reflm "TT"), Var (reflm "TT"))
(reflect tm, reflect ty)
| n == tacN "Prim__Attack"
= do ~[] <- tacTmArgs 0 tac args
attack
returnUnit
| n == tacN "Prim__Rewrite"
= do ~[rule] <- tacTmArgs 1 tac args
r <- reifyRaw rule
rewrite r
returnUnit
| n == tacN "Prim__Focus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
hs <- get_holes
if elem n' hs
then focus n' >> returnUnit
else lift . tfail . Msg $ "The name " ++ show n' ++ " does not denote a hole"
| n == tacN "Prim__Unfocus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
movelast n'
returnUnit
| n == tacN "Prim__Intro"
= do ~[mn] <- tacTmArgs 1 tac args
n <- case fromTTMaybe mn of
Nothing -> return Nothing
Just name -> fmap Just $ reifyTTName name
intro n
returnUnit
| n == tacN "Prim__Forall"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
forall n' Nothing ty'
returnUnit
| n == tacN "Prim__PatVar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patvar' n'
returnUnit
| n == tacN "Prim__PatBind"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patbind n'
returnUnit
| n == tacN "Prim__LetBind"
= do ~[n, ty, tm] <- tacTmArgs 3 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
tm' <- reifyRaw tm
letbind n' ty' tm'
returnUnit
| n == tacN "Prim__Compute"
= do ~[] <- tacTmArgs 0 tac args; compute ; returnUnit
| n == tacN "Prim__Normalise"
= do ~[env, tm] <- tacTmArgs 2 tac args
env' <- reifyEnv env
tm' <- reifyTT tm
ctxt <- get_context
let out = normaliseAll ctxt env' (finalise tm')
fmap fst . checkClosed $ reflect out
| n == tacN "Prim__Whnf"
= do ~[tm] <- tacTmArgs 1 tac args
tm' <- reifyTT tm
ctxt <- get_context
fmap fst . checkClosed . reflect $ whnf ctxt tm'
| n == tacN "Prim__Converts"
= do ~[env, tm1, tm2] <- tacTmArgs 3 tac args
env' <- reifyEnv env
tm1' <- reifyTT tm1
tm2' <- reifyTT tm2
ctxt <- get_context
lift $ converts ctxt env' tm1' tm2'
returnUnit
| n == tacN "Prim__DeclareType"
= do ~[decl] <- tacTmArgs 1 tac args
(RDeclare n args res) <- reifyTyDecl decl
ctxt <- get_context
let rty = foldr mkPi res args
(checked, ty') <- lift $ check ctxt [] rty
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let decl = TyDecl Ref checked
ctxt' = addCtxtDef n decl ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = (RTyDeclInstrs n fc (map rFunArgToPArg args) checked) :
new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineFunction"
= do ~[decl] <- tacTmArgs 1 tac args
defn <- reifyFunDefn decl
defineFunction defn
returnUnit
| n == tacN "Prim__DeclareDatatype"
= do ~[decl] <- tacTmArgs 1 tac args
RDeclare n args resTy <- reifyTyDecl decl
ctxt <- get_context
let tcTy = foldr mkPi resTy args
(checked, ty') <- lift $ check ctxt [] tcTy
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let ctxt' = addTyDecl n (TCon 0 0) checked ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RDatatypeDeclInstrs n (map rFunArgToPArg args) : new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineDatatype"
= do ~[defn] <- tacTmArgs 1 tac args
RDefineDatatype n ctors <- reifyRDataDefn defn
ctxt <- get_context
tyconTy <- case lookupTyExact n ctxt of
Just t -> return t
Nothing -> lift . tfail . Msg $ "Type not previously declared"
datatypes <- get_datatypes
case lookupCtxtName n datatypes of
[] -> return ()
_ -> lift . tfail . Msg $ show n ++ " is already defined as a datatype."
-- Prepare the constructors
ctors' <- mapM (prepareConstructor n) ctors
ttag <- do ES (ps, aux) str prev <- get
let i = global_nextname ps
put $ ES (ps { global_nextname = global_nextname ps + 1 },
aux)
str
prev
return i
let ctxt' = addDatatype (Data n ttag tyconTy False (map (\(cn, _, cty) -> (cn, cty)) ctors')) ctxt
set_context ctxt'
-- the rest happens in a bit
updateAux $ \e -> e { new_tyDecls = RDatatypeDefnInstrs n tyconTy ctors' : new_tyDecls e }
returnUnit
| n == tacN "Prim__AddInstance"
= do ~[cls, inst] <- tacTmArgs 2 tac args
className <- reifyTTName cls
instName <- reifyTTName inst
updateAux $ \e -> e { new_tyDecls = RAddInstance className instName :
new_tyDecls e }
returnUnit
| n == tacN "Prim__IsTCName"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
case lookupCtxtExact n' (idris_classes ist) of
Just _ -> fmap fst . checkClosed $ Var (sNS (sUN "True") ["Bool", "Prelude"])
Nothing -> fmap fst . checkClosed $ Var (sNS (sUN "False") ["Bool", "Prelude"])
| n == tacN "Prim__ResolveTC"
= do ~[fn] <- tacTmArgs 1 tac args
g <- goal
fn <- reifyTTName fn
resolveTC' False True 100 g fn ist
returnUnit
| n == tacN "Prim__Search"
= do ~[depth, hints] <- tacTmArgs 2 tac args
d <- eval depth
hints' <- eval hints
case (d, unList hints') of
(Constant (I i), Just hs) ->
do actualHints <- mapM reifyTTName hs
unifyProblems
let psElab = elab ist toplevel ERHS [] (sMN 0 "tac")
proofSearch True True False False i psElab Nothing (sMN 0 "search ") [] actualHints ist
returnUnit
(Constant (I _), Nothing ) ->
lift . tfail . InternalMsg $ "Not a list: " ++ show hints'
(_, _) -> lift . tfail . InternalMsg $ "Can't reify int " ++ show d
| n == tacN "Prim__RecursiveElab"
= do ~[goal, script] <- tacTmArgs 2 tac args
goal' <- reifyRaw goal
ctxt <- get_context
script <- eval script
(goalTT, goalTy) <- lift $ check ctxt [] goal'
lift $ isType ctxt [] goalTy
recH <- getNameFrom (sMN 0 "recElabHole")
aux <- getAux
datatypes <- get_datatypes
env <- get_env
g_next <- get_global_nextname
(ctxt', ES (p, aux') _ _) <-
do (ES (current_p, _) _ _) <- get
lift $ runElab aux
(do runElabAction info ist fc [] script ns
ctxt' <- get_context
return ctxt')
((newProof recH (constraintNS info) ctxt datatypes g_next goalTT)
{ nextname = nextname current_p })
set_context ctxt'
let tm_out = getProofTerm (pterm p)
do (ES (prf, _) s e) <- get
let p' = prf { nextname = nextname p
, global_nextname = global_nextname p
}
put (ES (p', aux') s e)
env' <- get_env
(tm, ty, _) <- lift $ recheck (constraintNS info) ctxt' env (forget tm_out) tm_out
let (tm', ty') = (reflect tm, reflect ty)
fmap fst . checkClosed $
rawPair (Var $ reflm "TT", Var $ reflm "TT")
(tm', ty')
| n == tacN "Prim__Metavar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
ctxt <- get_context
ptm <- get_term
-- See documentation above in the elab case for PMetavar
let unique_used = getUniqueUsed ctxt ptm
let mvn = metavarName ns n'
attack
defer unique_used mvn
solve
returnUnit
| n == tacN "Prim__Fixity"
= do ~[op'] <- tacTmArgs 1 tac args
opTm <- eval op'
case opTm of
Constant (Str op) ->
let opChars = ":!#$%&*+./<=>?@\\^|-~"
invalidOperators = [":", "=>", "->", "<-", "=", "?=", "|", "**", "==>", "\\", "%", "~", "?", "!"]
fixities = idris_infixes ist
in if not (all (flip elem opChars) op) || elem op invalidOperators
then lift . tfail . Msg $ "'" ++ op ++ "' is not a valid operator name."
else case nub [f | Fix f someOp <- fixities, someOp == op] of
[] -> lift . tfail . Msg $ "No fixity found for operator '" ++ op ++ "'."
[f] -> fmap fst . checkClosed $ reflectFixity f
many -> lift . tfail . InternalMsg $ "Ambiguous fixity for '" ++ op ++ "'! Found " ++ show many
_ -> lift . tfail . Msg $ "Not a constant string for an operator name: " ++ show opTm
| n == tacN "Prim__Debug"
= do ~[ty, msg] <- tacTmArgs 2 tac args
msg' <- eval msg
parts <- reifyReportParts msg
debugElaborator parts
runTacTm x = elabScriptStuck x
-- Running tactics directly
-- if a tactic adds unification problems, return an error
runTac :: Bool -> IState -> Maybe FC -> Name -> PTactic -> ElabD ()
runTac autoSolve ist perhapsFC fn tac
= do env <- get_env
g <- goal
let tac' = fmap (addImplBound ist (map fst env)) tac
if autoSolve
then runT tac'
else no_errors (runT tac')
(Just (CantSolveGoal g (map (\(n, b) -> (n, binderTy b)) env)))
where
runT (Intro []) = do g <- goal
attack; intro (bname g)
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Intro xs) = mapM_ (\x -> do attack; intro (Just x)) xs
runT Intros = do g <- goal
attack;
intro (bname g)
try' (runT Intros)
(return ()) True
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Exact tm) = do elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (MatchRefine fn)
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map (const True) a)) ns)
let tacs = map (\ (fn', imps) ->
(match_apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where envArgs n = do e <- get_env
case lookup n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn [])
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map isImp a)) ns)
let tacs = map (\ (fn', imps) ->
(apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where isImp (PImp _ _ _ _ _) = True
isImp _ = False
envArgs n = do e <- get_env
case lookup n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn imps) = do ns <- apply (Var fn) (map (\x -> (x,0)) imps)
when autoSolve solveAll
runT DoUnify = do unify_all
when autoSolve solveAll
runT (Claim n tm) = do tmHole <- getNameFrom (sMN 0 "newGoal")
claim tmHole RType
claim n (Var tmHole)
focus tmHole
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus n
runT (Equiv tm) -- let bind tm, then
= do attack
tyn <- getNameFrom (sMN 0 "ety")
claim tyn RType
valn <- getNameFrom (sMN 0 "eqval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "equiv_val")
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus valn
when autoSolve solveAll
runT (Rewrite tm) -- to elaborate tm, let bind it, then rewrite by that
= do attack; -- (h:_) <- get_holes
tyn <- getNameFrom (sMN 0 "rty")
-- start_unify h
claim tyn RType
valn <- getNameFrom (sMN 0 "rval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "rewrite_rule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
rewrite (Var letn)
when autoSolve solveAll
runT (Induction tm) -- let bind tm, similar to the others
= case_ True autoSolve ist fn tm
runT (CaseTac tm)
= case_ False autoSolve ist fn tm
runT (LetTac n tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (LetTacTy n ty tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") ty
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT Compute = compute
runT Trivial = do trivial' ist; when autoSolve solveAll
runT TCInstance = runT (Exact (PResolveTC emptyFC))
runT (ProofSearch rec prover depth top psns hints)
= do proofSearch' ist rec False depth prover top fn psns hints
when autoSolve solveAll
runT (Focus n) = focus n
runT Unfocus = do hs <- get_holes
case hs of
[] -> return ()
(h : _) -> movelast h
runT Solve = solve
runT (Try l r) = do try' (runT l) (runT r) True
runT (TSeq l r) = do runT l; runT r
runT (ApplyTactic tm) = do tenv <- get_env -- store the environment
tgoal <- goal -- store the goal
attack -- let f : List (TTName, Binder TT) -> TT -> Tactic = tm in ...
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
elab ist toplevel ERHS [] (sMN 0 "tac") tm
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal and context
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (raw_apply (forget script')
[reflectEnv tenv, reflect tgoal])
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
listTy = Var (sNS (sUN "List") ["List", "Prelude"])
scriptTy = (RBind (sMN 0 "__pi_arg")
(Pi Nothing (RApp listTy envTupleType) RType)
(RBind (sMN 1 "__pi_arg")
(Pi Nothing (Var $ reflm "TT") RType) tacticTy))
runT (ByReflection tm) -- run the reflection function 'tm' on the
-- goal, then apply the resulting reflected Tactic
= do tgoal <- goal
attack
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
ptm <- get_term
elab ist toplevel ERHS [] (sMN 0 "tac")
(PApp emptyFC tm [pexp (delabTy' ist [] tgoal True True)])
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (forget script')
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
scriptTy = tacticTy
runT (Reflect v) = do attack -- let x = reflect v in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = hnf ctxt env value
runTac autoSolve ist perhapsFC fn (Exact $ PQuote (reflect value'))
runT (Fill v) = do attack -- let x = fill x in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = normalise ctxt env value
rawValue <- reifyRaw value'
runTac autoSolve ist perhapsFC fn (Exact $ PQuote rawValue)
runT (GoalType n tac) = do g <- goal
case unApply g of
(P _ n' _, _) ->
if nsroot n' == sUN n
then runT tac
else fail "Wrong goal type"
_ -> fail "Wrong goal type"
runT ProofState = do g <- goal
return ()
runT Skip = return ()
runT (TFail err) = lift . tfail $ ReflectionError [err] (Msg "")
runT SourceFC =
case perhapsFC of
Nothing -> lift . tfail $ Msg "There is no source location available."
Just fc ->
do fill $ reflectFC fc
solve
runT Qed = lift . tfail $ Msg "The qed command is only valid in the interactive prover"
runT x = fail $ "Not implemented " ++ show x
runReflected t = do t' <- reify ist t
runTac autoSolve ist perhapsFC fn t'
elaboratingArgErr :: [(Name, Name)] -> Err -> Err
elaboratingArgErr [] err = err
elaboratingArgErr ((f,x):during) err = fromMaybe err (rewrite err)
where rewrite (ElaboratingArg _ _ _ _) = Nothing
rewrite (ProofSearchFail e) = fmap ProofSearchFail (rewrite e)
rewrite (At fc e) = fmap (At fc) (rewrite e)
rewrite err = Just (ElaboratingArg f x during err)
withErrorReflection :: Idris a -> Idris a
withErrorReflection x = idrisCatch x (\ e -> handle e >>= ierror)
where handle :: Err -> Idris Err
handle e@(ReflectionError _ _) = do logElab 3 "Skipping reflection of error reflection result"
return e -- Don't do meta-reflection of errors
handle e@(ReflectionFailed _ _) = do logElab 3 "Skipping reflection of reflection failure"
return e
-- At and Elaborating are just plumbing - error reflection shouldn't rewrite them
handle e@(At fc err) = do logElab 3 "Reflecting body of At"
err' <- handle err
return (At fc err')
handle e@(Elaborating what n ty err) = do logElab 3 "Reflecting body of Elaborating"
err' <- handle err
return (Elaborating what n ty err')
handle e@(ElaboratingArg f a prev err) = do logElab 3 "Reflecting body of ElaboratingArg"
hs <- getFnHandlers f a
err' <- if null hs
then handle err
else applyHandlers err hs
return (ElaboratingArg f a prev err')
-- ProofSearchFail is an internal detail - so don't expose it
handle (ProofSearchFail e) = handle e
-- TODO: argument-specific error handlers go here for ElaboratingArg
handle e = do ist <- getIState
logElab 2 "Starting error reflection"
logElab 5 (show e)
let handlers = idris_errorhandlers ist
applyHandlers e handlers
getFnHandlers :: Name -> Name -> Idris [Name]
getFnHandlers f arg = do ist <- getIState
let funHandlers = maybe M.empty id .
lookupCtxtExact f .
idris_function_errorhandlers $ ist
return . maybe [] S.toList . M.lookup arg $ funHandlers
applyHandlers e handlers =
do ist <- getIState
let err = fmap (errReverse ist) e
logElab 3 $ "Using reflection handlers " ++
concat (intersperse ", " (map show handlers))
let reports = map (\n -> RApp (Var n) (reflectErr err)) handlers
-- Typecheck error handlers - if this fails, then something else was wrong earlier!
handlers <- case mapM (check (tt_ctxt ist) []) reports of
Error e -> ierror $ ReflectionFailed "Type error while constructing reflected error" e
OK hs -> return hs
-- Normalize error handler terms to produce the new messages
-- Need to use 'normaliseAll' since we have to reduce private
-- names in error handlers too
ctxt <- getContext
let results = map (normaliseAll ctxt []) (map fst handlers)
logElab 3 $ "New error message info: " ++ concat (intersperse " and " (map show results))
-- For each handler term output, either discard it if it is Nothing or reify it the Haskell equivalent
let errorpartsTT = mapMaybe unList (mapMaybe fromTTMaybe results)
errorparts <- case mapM (mapM reifyReportPart) errorpartsTT of
Left err -> ierror err
Right ok -> return ok
return $ case errorparts of
[] -> e
parts -> ReflectionError errorparts e
solveAll = try (do solve; solveAll) (return ())
-- | Do the left-over work after creating declarations in reflected
-- elaborator scripts
processTacticDecls :: ElabInfo -> [RDeclInstructions] -> Idris ()
processTacticDecls info steps =
-- The order of steps is important: type declarations might
-- establish metavars that later function bodies resolve.
forM_ (reverse steps) $ \case
RTyDeclInstrs n fc impls ty ->
do logElab 3 $ "Declaration from tactics: " ++ show n ++ " : " ++ show ty
logElab 3 $ " It has impls " ++ show impls
updateIState $ \i -> i { idris_implicits =
addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
ds <- checkDef info fc (\_ e -> e) True [(n, (-1, Nothing, ty, []))]
addIBC (IBCDef n)
ctxt <- getContext
case lookupDef n ctxt of
(TyDecl _ _ : _) ->
-- If the function isn't defined at the end of the elab script,
-- then it must be added as a metavariable. This needs guarding
-- to prevent overwriting case defs with a metavar, if the case
-- defs come after the type decl in the same script!
let ds' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, True, True))) ds
in addDeferred ds'
_ -> return ()
RDatatypeDeclInstrs n impls ->
do addIBC (IBCDef n)
updateIState $ \i -> i { idris_implicits = addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
RDatatypeDefnInstrs tyn tyconTy ctors ->
do let cn (n, _, _) = n
cimpls (_, impls, _) = impls
cty (_, _, t) = t
addIBC (IBCDef tyn)
mapM_ (addIBC . IBCDef . cn) ctors
ctxt <- getContext
let params = findParams tyn (normalise ctxt [] tyconTy) (map cty ctors)
let typeInfo = TI (map cn ctors) False [] params []
-- implicit precondition to IBCData is that idris_datatypes on the IState is populated.
-- otherwise writing the IBC just fails silently!
updateIState $ \i -> i { idris_datatypes =
addDef tyn typeInfo (idris_datatypes i) }
addIBC (IBCData tyn)
ttag <- getName -- from AbsSyntax.hs, really returns a disambiguating Int
let metainf = DataMI params
addIBC (IBCMetaInformation tyn metainf)
updateContext (setMetaInformation tyn metainf)
for_ ctors $ \(cn, impls, _) ->
do updateIState $ \i -> i { idris_implicits = addDef cn impls (idris_implicits i) }
addIBC (IBCImp cn)
for_ ctors $ \(ctorN, _, _) ->
do totcheck (NoFC, ctorN)
ctxt <- tt_ctxt <$> getIState
case lookupTyExact ctorN ctxt of
Just cty -> do checkPositive (tyn : map cn ctors) (ctorN, cty)
return ()
Nothing -> return ()
case ctors of
[ctor] -> do setDetaggable (cn ctor); setDetaggable tyn
addIBC (IBCOpt (cn ctor)); addIBC (IBCOpt tyn)
_ -> return ()
-- TODO: inaccessible
RAddInstance className instName ->
do -- The type class resolution machinery relies on a special
logElab 2 $ "Adding elab script instance " ++ show instName ++
" for " ++ show className
addInstance False True className instName
addIBC (IBCInstance False True className instName)
RClausesInstrs n cs ->
do logElab 3 $ "Pattern-matching definition from tactics: " ++ show n
solveDeferred emptyFC n
let lhss = map (\(_, lhs, _) -> lhs) cs
let fc = fileFC "elab_reflected"
pmissing <-
do ist <- getIState
possible <- genClauses fc n lhss
(map (\lhs ->
delab' ist lhs True True) lhss)
missing <- filterM (checkPossible n) possible
return (filter (noMatch ist lhss) missing)
let tot = if null pmissing
then Unchecked -- still need to check recursive calls
else Partial NotCovering -- missing cases implies not total
setTotality n tot
updateIState $ \i -> i { idris_patdefs =
addDef n (cs, pmissing) $ idris_patdefs i }
addIBC (IBCDef n)
ctxt <- getContext
case lookupDefExact n ctxt of
Just (CaseOp _ _ _ _ _ cd) ->
-- Here, we populate the call graph with a list of things
-- we refer to, so that if they aren't total, the whole
-- thing won't be.
let (scargs, sc) = cases_compiletime cd
calls = map fst $ findCalls sc scargs
in do logElab 2 $ "Called names in reflected elab: " ++ show calls
addCalls n calls
addIBC $ IBCCG n
Just _ -> return () -- TODO throw internal error
Nothing -> return ()
-- checkDeclTotality requires that the call graph be present
-- before calling it.
-- TODO: reduce code duplication with Idris.Elab.Clause
buildSCG (fc, n)
-- Actually run the totality checker. In the main clause
-- elaborator, this is deferred until after. Here, we run it
-- now to get totality information as early as possible.
tot' <- checkDeclTotality (fc, n)
setTotality n tot'
when (tot' /= Unchecked) $ addIBC (IBCTotal n tot')
where
-- TODO: see if the code duplication with Idris.Elab.Clause can be
-- reduced or eliminated.
checkPossible :: Name -> PTerm -> Idris Bool
checkPossible fname lhs_in =
do ctxt <- getContext
ist <- getIState
let lhs = addImplPat ist lhs_in
let fc = fileFC "elab_reflected_totality"
let tcgen = False -- TODO: later we may support dictionary generation
case elaborate (constraintNS info) ctxt (idris_datatypes ist) (idris_name ist) (sMN 0 "refPatLHS") infP initEState
(erun fc (buildTC ist info ELHS [] fname (allNamesIn lhs_in)
(infTerm lhs))) of
OK (ElabResult lhs' _ _ _ _ _ name', _) ->
do -- not recursively calling here, because we don't
-- want to run infinitely many times
let lhs_tm = orderPats (getInferTerm lhs')
updateIState $ \i -> i { idris_name = name' }
case recheck (constraintNS info) ctxt [] (forget lhs_tm) lhs_tm of
OK _ -> return True
err -> return False
-- if it's a recoverable error, the case may become possible
Error err -> if tcgen then return (recoverableCoverage ctxt err)
else return (validCoverageCase ctxt err ||
recoverableCoverage ctxt err)
-- TODO: Attempt to reduce/eliminate code duplication with Idris.Elab.Clause
noMatch i cs tm = all (\x -> case matchClause i (delab' i x True True) tm of
Right _ -> False
Left _ -> True) cs
|
KaneTW/Idris-dev
|
src/Idris/Elab/Term.hs
|
Haskell
|
bsd-3-clause
| 132,339
|
{-# language CPP #-}
-- No documentation found for Chapter "ExternalMemoryFeatureFlagBits"
module Vulkan.Core11.Enums.ExternalMemoryFeatureFlagBits ( ExternalMemoryFeatureFlags
, ExternalMemoryFeatureFlagBits( EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT
, EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT
, EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
, ..
)
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
type ExternalMemoryFeatureFlags = ExternalMemoryFeatureFlagBits
-- | VkExternalMemoryFeatureFlagBits - Bitmask specifying features of an
-- external memory handle type
--
-- = Description
--
-- Because their semantics in external APIs roughly align with that of an
-- image or buffer with a dedicated allocation in Vulkan, implementations
-- are /required/ to report 'EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT'
-- for the following external handle types:
--
-- Implementations /must/ not report
-- 'EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT' for buffers with external
-- handle type
-- 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID'.
-- Implementations /must/ not report
-- 'EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT' for images or buffers with
-- external handle type
-- 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT',
-- or
-- 'Vulkan.Core11.Enums.ExternalMemoryHandleTypeFlagBits.EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT'.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_1 VK_VERSION_1_1>,
-- 'ExternalMemoryFeatureFlags'
newtype ExternalMemoryFeatureFlagBits = ExternalMemoryFeatureFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT' specifies that images or
-- buffers created with the specified parameters and handle type /must/ use
-- the mechanisms defined by
-- 'Vulkan.Core11.Promoted_From_VK_KHR_dedicated_allocation.MemoryDedicatedRequirements'
-- and
-- 'Vulkan.Core11.Promoted_From_VK_KHR_dedicated_allocation.MemoryDedicatedAllocateInfo'
-- to create (or import) a dedicated allocation for the image or buffer.
pattern EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = ExternalMemoryFeatureFlagBits 0x00000001
-- | 'EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT' specifies that handles of this
-- type /can/ be exported from Vulkan memory objects.
pattern EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = ExternalMemoryFeatureFlagBits 0x00000002
-- | 'EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT' specifies that handles of this
-- type /can/ be imported as Vulkan memory objects.
pattern EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = ExternalMemoryFeatureFlagBits 0x00000004
conNameExternalMemoryFeatureFlagBits :: String
conNameExternalMemoryFeatureFlagBits = "ExternalMemoryFeatureFlagBits"
enumPrefixExternalMemoryFeatureFlagBits :: String
enumPrefixExternalMemoryFeatureFlagBits = "EXTERNAL_MEMORY_FEATURE_"
showTableExternalMemoryFeatureFlagBits :: [(ExternalMemoryFeatureFlagBits, String)]
showTableExternalMemoryFeatureFlagBits =
[ (EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, "DEDICATED_ONLY_BIT")
, (EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT , "EXPORTABLE_BIT")
, (EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT , "IMPORTABLE_BIT")
]
instance Show ExternalMemoryFeatureFlagBits where
showsPrec = enumShowsPrec enumPrefixExternalMemoryFeatureFlagBits
showTableExternalMemoryFeatureFlagBits
conNameExternalMemoryFeatureFlagBits
(\(ExternalMemoryFeatureFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read ExternalMemoryFeatureFlagBits where
readPrec = enumReadPrec enumPrefixExternalMemoryFeatureFlagBits
showTableExternalMemoryFeatureFlagBits
conNameExternalMemoryFeatureFlagBits
ExternalMemoryFeatureFlagBits
|
expipiplus1/vulkan
|
src/Vulkan/Core11/Enums/ExternalMemoryFeatureFlagBits.hs
|
Haskell
|
bsd-3-clause
| 4,826
|
{-|
Module : ChainFlyer
Copyright : (c) Tatsuya Hirose, 2015
License : BSD3
Maintainer : tatsuya.hirose.0804@gmail.com
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Servant.ChainFlyer.Types.Transaction
( TransactionInput(..)
, TransactionOutput(..)
, Transaction(..)
) where
import Control.Monad (mzero)
import Data.Aeson
import Data.Time.Clock
import Data.Time.Format
import GHC.Generics
-- | Input of a transaction.
data TransactionInput = TransactionInput
{ prev_hash :: String
, prev_index :: Integer
, in_value :: Integer
, in_script :: String
, in_address :: String
, sequence :: Integer
} deriving (Show, Generic)
instance FromJSON TransactionInput where
parseJSON (Object v) = TransactionInput
<$> v .: "prev_hash"
<*> v .: "prev_index"
<*> v .: "value"
<*> v .: "script"
<*> v .: "address"
<*> v .: "sequence"
parseJSON _ = mzero
instance ToJSON TransactionInput where
toJSON (TransactionInput prev_hash prev_index value script address sequence) =
object [ "prev_hash" .= prev_hash
, "prev_index" .= prev_index
, "value" .= value
, "script" .= script
, "address" .= address
, "sequence" .= sequence
]
-- | Output of a transaction.
data TransactionOutput = TransactionOutput
{ out_value :: Integer
, out_script :: String
, out_address :: String
} deriving (Show, Generic)
instance FromJSON TransactionOutput where
parseJSON (Object v) = TransactionOutput
<$> v .: "value"
<*> v .: "script"
<*> v .: "address"
parseJSON _ = mzero
instance ToJSON TransactionOutput where
toJSON (TransactionOutput value script address) =
object [ "value" .= value
, "script" .= script
, "address" .= address
]
-- | Transaction.
data Transaction = Transaction
{ tx_hash :: String
, block_height :: Int
, confirmed :: Int
, fees :: Integer
, size :: Int
, received_date :: UTCTime
, version :: Int
, lock_time :: Int
, inputs :: [TransactionInput]
, outputs :: [TransactionOutput]
} deriving (Show, Generic)
instance FromJSON Transaction where
parseJSON (Object v) = Transaction
<$> v .: "tx_hash"
<*> v .: "block_height"
<*> v .: "confirmed"
<*> v .: "fees"
<*> v .: "size"
<*> (v .: "received_date" >>= parseTimeM True defaultTimeLocale "%Y-%m-%dT%H:%M:%S" . takeWhile (/='.'))
<*> v .: "version"
<*> v .: "lock_time"
<*> v .: "inputs"
<*> v .: "outputs"
parseJSON _ = mzero
instance ToJSON Transaction
|
lotz84/chainFlyer
|
src/Servant/ChainFlyer/Types/Transaction.hs
|
Haskell
|
bsd-3-clause
| 3,091
|
module JIT where
import Foreign.Ptr (FunPtr, castFunPtr)
import Control.Monad.Except
import qualified LLVM.General.AST as AST
import LLVM.General.Context
import qualified LLVM.General.ExecutionEngine as EE
import LLVM.General.Module as Mod
import LLVM.General.PassManager
foreign import ccall "dynamic" haskFun :: FunPtr (IO Int) -> IO Int
run :: FunPtr a -> IO Int
run fn = haskFun (castFunPtr fn :: FunPtr (IO Int))
jit :: Context -> (EE.MCJIT -> IO a) -> IO a
jit c = EE.withMCJIT c optlevel model ptrelim fastins
where
optlevel = Just 2
model = Nothing
ptrelim = Nothing
fastins = Nothing
passes :: PassSetSpec
passes = defaultCuratedPassSetSpec {optLevel = Just 3}
runJIT :: AST.Module -> IO (Either String AST.Module)
runJIT mod =
withContext $ \context ->
jit context $ \executionEngine ->
runExceptT $
withModuleFromAST context mod $ \m ->
withPassManager passes $ \pm -> do
_ <- runPassManager pm m
optmod <- moduleAST m
s <- moduleLLVMAssembly m
putStrLn s
EE.withModuleInEngine executionEngine m $ \ee -> do
mainfn <- EE.getFunction ee (AST.Name "main")
case mainfn of
Just fn -> do
res <- run fn
putStrLn $ "Evaluated to: " ++ show res
Nothing -> return ()
return optmod
|
jjingram/satori
|
src/JIT.hs
|
Haskell
|
bsd-3-clause
| 1,379
|
-- parser produced by Happy Version 1.13
{-
%
% (c) The GRASP/AQUA Project, Glasgow University, 1998
%
% @(#) $Docid: Jul. 12th 2001 10:08 Sigbjorn Finne $
% @(#) $Contactid: sof@galconn.com $
%
A grammar for OMG CORBA IDL
-}
module OmgParser ( parseIDL ) where
import LexM
import Lex
import IDLToken
import IDLSyn
import BasicTypes
import Literal
{-
BEGIN_GHC_ONLY
import GlaExts
END_GHC_ONLY
-}
data HappyAbsSyn
= HappyTerminal IDLToken
| HappyErrorToken Int
| HappyAbsSyn4 ([Defn])
| HappyAbsSyn6 (Defn)
| HappyAbsSyn10 ((Id, Inherit))
| HappyAbsSyn13 (Inherit)
| HappyAbsSyn15 (String)
| HappyAbsSyn18 (Type)
| HappyAbsSyn19 (Expr)
| HappyAbsSyn27 (UnaryOp)
| HappyAbsSyn29 (Literal)
| HappyAbsSyn32 ((Type, [Id]))
| HappyAbsSyn38 ([Id])
| HappyAbsSyn39 (Id)
| HappyAbsSyn50 ([Member])
| HappyAbsSyn51 (Member)
| HappyAbsSyn54 ([Switch])
| HappyAbsSyn55 (Switch)
| HappyAbsSyn56 ([CaseLabel])
| HappyAbsSyn57 (CaseLabel)
| HappyAbsSyn58 (SwitchArm)
| HappyAbsSyn60 ([(Id,[Attribute],Maybe Expr)])
| HappyAbsSyn64 ((Id, [Expr]))
| HappyAbsSyn65 ([Expr])
| HappyAbsSyn68 (Bool)
| HappyAbsSyn76 ([Param])
| HappyAbsSyn78 (Param)
| HappyAbsSyn79 (Attribute)
| HappyAbsSyn80 (Maybe Raises)
| HappyAbsSyn81 (Maybe Context)
| HappyAbsSyn82 ([String])
| HappyAbsSyn87 (IntegerLit)
type HappyReduction =
Int
-> (IDLToken)
-> HappyState (IDLToken) (HappyStk HappyAbsSyn -> LexM(HappyAbsSyn))
-> [HappyState (IDLToken) (HappyStk HappyAbsSyn -> LexM(HappyAbsSyn))]
-> HappyStk HappyAbsSyn
-> LexM(HappyAbsSyn)
action_0,
action_1,
action_2,
action_3,
action_4,
action_5,
action_6,
action_7,
action_8,
action_9,
action_10,
action_11,
action_12,
action_13,
action_14,
action_15,
action_16,
action_17,
action_18,
action_19,
action_20,
action_21,
action_22,
action_23,
action_24,
action_25,
action_26,
action_27,
action_28,
action_29,
action_30,
action_31,
action_32,
action_33,
action_34,
action_35,
action_36,
action_37,
action_38,
action_39,
action_40,
action_41,
action_42,
action_43,
action_44,
action_45,
action_46,
action_47,
action_48,
action_49,
action_50,
action_51,
action_52,
action_53,
action_54,
action_55,
action_56,
action_57,
action_58,
action_59,
action_60,
action_61,
action_62,
action_63,
action_64,
action_65,
action_66,
action_67,
action_68,
action_69,
action_70,
action_71,
action_72,
action_73,
action_74,
action_75,
action_76,
action_77,
action_78,
action_79,
action_80,
action_81,
action_82,
action_83,
action_84,
action_85,
action_86,
action_87,
action_88,
action_89,
action_90,
action_91,
action_92,
action_93,
action_94,
action_95,
action_96,
action_97,
action_98,
action_99,
action_100,
action_101,
action_102,
action_103,
action_104,
action_105,
action_106,
action_107,
action_108,
action_109,
action_110,
action_111,
action_112,
action_113,
action_114,
action_115,
action_116,
action_117,
action_118,
action_119,
action_120,
action_121,
action_122,
action_123,
action_124,
action_125,
action_126,
action_127,
action_128,
action_129,
action_130,
action_131,
action_132,
action_133,
action_134,
action_135,
action_136,
action_137,
action_138,
action_139,
action_140,
action_141,
action_142,
action_143,
action_144,
action_145,
action_146,
action_147,
action_148,
action_149,
action_150,
action_151,
action_152,
action_153,
action_154,
action_155,
action_156,
action_157,
action_158,
action_159,
action_160,
action_161,
action_162,
action_163,
action_164,
action_165,
action_166,
action_167,
action_168,
action_169,
action_170,
action_171,
action_172,
action_173,
action_174,
action_175,
action_176,
action_177,
action_178,
action_179,
action_180,
action_181,
action_182,
action_183,
action_184,
action_185,
action_186,
action_187,
action_188,
action_189,
action_190,
action_191,
action_192,
action_193,
action_194,
action_195,
action_196,
action_197,
action_198,
action_199,
action_200,
action_201,
action_202,
action_203,
action_204,
action_205,
action_206,
action_207,
action_208,
action_209,
action_210,
action_211,
action_212,
action_213,
action_214,
action_215,
action_216,
action_217,
action_218,
action_219,
action_220,
action_221,
action_222,
action_223,
action_224,
action_225,
action_226,
action_227,
action_228,
action_229,
action_230,
action_231,
action_232,
action_233,
action_234,
action_235,
action_236,
action_237,
action_238,
action_239,
action_240,
action_241,
action_242,
action_243,
action_244,
action_245,
action_246,
action_247,
action_248,
action_249,
action_250,
action_251,
action_252,
action_253,
action_254,
action_255,
action_256,
action_257,
action_258,
action_259,
action_260,
action_261,
action_262,
action_263,
action_264,
action_265,
action_266,
action_267,
action_268,
action_269,
action_270,
action_271,
action_272,
action_273,
action_274,
action_275,
action_276,
action_277 :: Int -> HappyReduction
happyReduce_1,
happyReduce_2,
happyReduce_3,
happyReduce_4,
happyReduce_5,
happyReduce_6,
happyReduce_7,
happyReduce_8,
happyReduce_9,
happyReduce_10,
happyReduce_11,
happyReduce_12,
happyReduce_13,
happyReduce_14,
happyReduce_15,
happyReduce_16,
happyReduce_17,
happyReduce_18,
happyReduce_19,
happyReduce_20,
happyReduce_21,
happyReduce_22,
happyReduce_23,
happyReduce_24,
happyReduce_25,
happyReduce_26,
happyReduce_27,
happyReduce_28,
happyReduce_29,
happyReduce_30,
happyReduce_31,
happyReduce_32,
happyReduce_33,
happyReduce_34,
happyReduce_35,
happyReduce_36,
happyReduce_37,
happyReduce_38,
happyReduce_39,
happyReduce_40,
happyReduce_41,
happyReduce_42,
happyReduce_43,
happyReduce_44,
happyReduce_45,
happyReduce_46,
happyReduce_47,
happyReduce_48,
happyReduce_49,
happyReduce_50,
happyReduce_51,
happyReduce_52,
happyReduce_53,
happyReduce_54,
happyReduce_55,
happyReduce_56,
happyReduce_57,
happyReduce_58,
happyReduce_59,
happyReduce_60,
happyReduce_61,
happyReduce_62,
happyReduce_63,
happyReduce_64,
happyReduce_65,
happyReduce_66,
happyReduce_67,
happyReduce_68,
happyReduce_69,
happyReduce_70,
happyReduce_71,
happyReduce_72,
happyReduce_73,
happyReduce_74,
happyReduce_75,
happyReduce_76,
happyReduce_77,
happyReduce_78,
happyReduce_79,
happyReduce_80,
happyReduce_81,
happyReduce_82,
happyReduce_83,
happyReduce_84,
happyReduce_85,
happyReduce_86,
happyReduce_87,
happyReduce_88,
happyReduce_89,
happyReduce_90,
happyReduce_91,
happyReduce_92,
happyReduce_93,
happyReduce_94,
happyReduce_95,
happyReduce_96,
happyReduce_97,
happyReduce_98,
happyReduce_99,
happyReduce_100,
happyReduce_101,
happyReduce_102,
happyReduce_103,
happyReduce_104,
happyReduce_105,
happyReduce_106,
happyReduce_107,
happyReduce_108,
happyReduce_109,
happyReduce_110,
happyReduce_111,
happyReduce_112,
happyReduce_113,
happyReduce_114,
happyReduce_115,
happyReduce_116,
happyReduce_117,
happyReduce_118,
happyReduce_119,
happyReduce_120,
happyReduce_121,
happyReduce_122,
happyReduce_123,
happyReduce_124,
happyReduce_125,
happyReduce_126,
happyReduce_127,
happyReduce_128,
happyReduce_129,
happyReduce_130,
happyReduce_131,
happyReduce_132,
happyReduce_133,
happyReduce_134,
happyReduce_135,
happyReduce_136,
happyReduce_137,
happyReduce_138,
happyReduce_139,
happyReduce_140,
happyReduce_141,
happyReduce_142,
happyReduce_143,
happyReduce_144,
happyReduce_145,
happyReduce_146,
happyReduce_147,
happyReduce_148,
happyReduce_149,
happyReduce_150,
happyReduce_151,
happyReduce_152,
happyReduce_153,
happyReduce_154,
happyReduce_155,
happyReduce_156,
happyReduce_157,
happyReduce_158,
happyReduce_159,
happyReduce_160,
happyReduce_161,
happyReduce_162,
happyReduce_163,
happyReduce_164,
happyReduce_165,
happyReduce_166,
happyReduce_167,
happyReduce_168,
happyReduce_169,
happyReduce_170,
happyReduce_171,
happyReduce_172,
happyReduce_173,
happyReduce_174,
happyReduce_175,
happyReduce_176 :: HappyReduction
action_0 (4) = happyGoto action_3
action_0 (5) = happyGoto action_2
action_0 _ = happyReduce_2
action_1 (5) = happyGoto action_2
action_1 _ = happyFail
action_2 (91) = happyShift action_15
action_2 (92) = happyShift action_16
action_2 (101) = happyShift action_17
action_2 (116) = happyShift action_18
action_2 (125) = happyShift action_19
action_2 (126) = happyShift action_20
action_2 (130) = happyShift action_21
action_2 (154) = happyShift action_22
action_2 (158) = happyShift action_23
action_2 (159) = happyShift action_24
action_2 (160) = happyShift action_25
action_2 (6) = happyGoto action_4
action_2 (7) = happyGoto action_5
action_2 (8) = happyGoto action_6
action_2 (9) = happyGoto action_7
action_2 (10) = happyGoto action_8
action_2 (17) = happyGoto action_9
action_2 (31) = happyGoto action_10
action_2 (49) = happyGoto action_11
action_2 (52) = happyGoto action_12
action_2 (59) = happyGoto action_13
action_2 (70) = happyGoto action_14
action_2 _ = happyReduce_1
action_3 (162) = happyAccept
action_3 _ = happyFail
action_4 _ = happyReduce_3
action_5 (90) = happyShift action_87
action_5 _ = happyFail
action_6 (90) = happyShift action_86
action_6 _ = happyFail
action_7 _ = happyReduce_13
action_8 (95) = happyShift action_85
action_8 _ = happyFail
action_9 (90) = happyShift action_84
action_9 _ = happyFail
action_10 (90) = happyShift action_83
action_10 _ = happyFail
action_11 _ = happyReduce_68
action_12 _ = happyReduce_69
action_13 _ = happyReduce_70
action_14 (90) = happyShift action_82
action_14 _ = happyFail
action_15 (141) = happyShift action_27
action_15 (89) = happyGoto action_81
action_15 _ = happyFail
action_16 (141) = happyShift action_27
action_16 (89) = happyGoto action_80
action_16 _ = happyFail
action_17 (98) = happyShift action_53
action_17 (118) = happyShift action_54
action_17 (119) = happyShift action_55
action_17 (120) = happyShift action_56
action_17 (121) = happyShift action_57
action_17 (122) = happyShift action_58
action_17 (123) = happyShift action_59
action_17 (124) = happyShift action_60
action_17 (141) = happyShift action_61
action_17 (146) = happyShift action_62
action_17 (147) = happyShift action_63
action_17 (153) = happyShift action_79
action_17 (15) = happyGoto action_69
action_17 (18) = happyGoto action_70
action_17 (41) = happyGoto action_71
action_17 (42) = happyGoto action_72
action_17 (43) = happyGoto action_73
action_17 (44) = happyGoto action_74
action_17 (45) = happyGoto action_75
action_17 (62) = happyGoto action_76
action_17 (63) = happyGoto action_77
action_17 (86) = happyGoto action_78
action_17 _ = happyFail
action_18 (98) = happyShift action_53
action_18 (118) = happyShift action_54
action_18 (119) = happyShift action_55
action_18 (120) = happyShift action_56
action_18 (121) = happyShift action_57
action_18 (122) = happyShift action_58
action_18 (123) = happyShift action_59
action_18 (124) = happyShift action_60
action_18 (125) = happyShift action_19
action_18 (126) = happyShift action_20
action_18 (130) = happyShift action_21
action_18 (141) = happyShift action_61
action_18 (146) = happyShift action_62
action_18 (147) = happyShift action_63
action_18 (148) = happyShift action_64
action_18 (149) = happyShift action_65
action_18 (150) = happyShift action_66
action_18 (151) = happyShift action_67
action_18 (153) = happyShift action_68
action_18 (15) = happyGoto action_31
action_18 (32) = happyGoto action_32
action_18 (33) = happyGoto action_33
action_18 (34) = happyGoto action_34
action_18 (35) = happyGoto action_35
action_18 (36) = happyGoto action_36
action_18 (37) = happyGoto action_37
action_18 (41) = happyGoto action_38
action_18 (42) = happyGoto action_39
action_18 (43) = happyGoto action_40
action_18 (44) = happyGoto action_41
action_18 (45) = happyGoto action_42
action_18 (46) = happyGoto action_43
action_18 (47) = happyGoto action_44
action_18 (48) = happyGoto action_45
action_18 (49) = happyGoto action_46
action_18 (52) = happyGoto action_47
action_18 (59) = happyGoto action_48
action_18 (61) = happyGoto action_49
action_18 (62) = happyGoto action_50
action_18 (63) = happyGoto action_51
action_18 (85) = happyGoto action_52
action_18 _ = happyFail
action_19 (141) = happyShift action_27
action_19 (89) = happyGoto action_30
action_19 _ = happyFail
action_20 (141) = happyShift action_27
action_20 (89) = happyGoto action_29
action_20 _ = happyFail
action_21 (141) = happyShift action_27
action_21 (89) = happyGoto action_28
action_21 _ = happyFail
action_22 (141) = happyShift action_27
action_22 (89) = happyGoto action_26
action_22 _ = happyFail
action_23 _ = happyReduce_10
action_24 _ = happyReduce_11
action_25 _ = happyReduce_9
action_26 (95) = happyShift action_108
action_26 _ = happyFail
action_27 _ = happyReduce_176
action_28 (95) = happyShift action_107
action_28 _ = happyFail
action_29 (127) = happyShift action_106
action_29 _ = happyFail
action_30 (95) = happyShift action_105
action_30 _ = happyFail
action_31 (98) = happyShift action_94
action_31 _ = happyReduce_76
action_32 _ = happyReduce_67
action_33 (141) = happyShift action_27
action_33 (38) = happyGoto action_102
action_33 (40) = happyGoto action_103
action_33 (89) = happyGoto action_104
action_33 _ = happyFail
action_34 _ = happyReduce_72
action_35 _ = happyReduce_74
action_36 _ = happyReduce_75
action_37 _ = happyReduce_73
action_38 _ = happyReduce_77
action_39 _ = happyReduce_78
action_40 _ = happyReduce_79
action_41 _ = happyReduce_80
action_42 _ = happyReduce_81
action_43 _ = happyReduce_82
action_44 _ = happyReduce_83
action_45 _ = happyReduce_84
action_46 _ = happyReduce_89
action_47 _ = happyReduce_90
action_48 _ = happyReduce_91
action_49 _ = happyReduce_85
action_50 _ = happyReduce_86
action_51 _ = happyReduce_87
action_52 _ = happyReduce_88
action_53 (141) = happyShift action_101
action_53 _ = happyFail
action_54 _ = happyReduce_97
action_55 _ = happyReduce_98
action_56 (119) = happyShift action_100
action_56 _ = happyFail
action_57 (119) = happyShift action_99
action_57 _ = happyFail
action_58 _ = happyReduce_101
action_59 _ = happyReduce_102
action_60 _ = happyReduce_103
action_61 _ = happyReduce_27
action_62 (131) = happyShift action_98
action_62 _ = happyReduce_131
action_63 (131) = happyShift action_97
action_63 _ = happyReduce_133
action_64 (131) = happyShift action_96
action_64 _ = happyFail
action_65 _ = happyReduce_106
action_66 _ = happyReduce_105
action_67 _ = happyReduce_104
action_68 (131) = happyShift action_95
action_68 _ = happyFail
action_69 (98) = happyShift action_94
action_69 _ = happyReduce_41
action_70 (141) = happyShift action_27
action_70 (89) = happyGoto action_93
action_70 _ = happyFail
action_71 _ = happyReduce_37
action_72 _ = happyReduce_33
action_73 _ = happyReduce_34
action_74 _ = happyReduce_35
action_75 _ = happyReduce_36
action_76 _ = happyReduce_38
action_77 _ = happyReduce_39
action_78 _ = happyReduce_40
action_79 _ = happyReduce_173
action_80 (95) = happyReduce_24
action_80 (97) = happyShift action_92
action_80 (13) = happyGoto action_90
action_80 (14) = happyGoto action_91
action_80 _ = happyReduce_14
action_81 (95) = happyShift action_89
action_81 _ = happyFail
action_82 _ = happyReduce_6
action_83 _ = happyReduce_4
action_84 _ = happyReduce_5
action_85 (11) = happyGoto action_88
action_85 _ = happyReduce_17
action_86 _ = happyReduce_7
action_87 _ = happyReduce_8
action_88 (96) = happyShift action_154
action_88 (101) = happyShift action_17
action_88 (116) = happyShift action_18
action_88 (125) = happyShift action_19
action_88 (126) = happyShift action_20
action_88 (130) = happyShift action_21
action_88 (142) = happyReduce_140
action_88 (152) = happyShift action_155
action_88 (154) = happyShift action_22
action_88 (157) = happyShift action_156
action_88 (12) = happyGoto action_146
action_88 (17) = happyGoto action_147
action_88 (31) = happyGoto action_148
action_88 (49) = happyGoto action_11
action_88 (52) = happyGoto action_12
action_88 (59) = happyGoto action_13
action_88 (67) = happyGoto action_149
action_88 (68) = happyGoto action_150
action_88 (70) = happyGoto action_151
action_88 (73) = happyGoto action_152
action_88 (74) = happyGoto action_153
action_88 _ = happyReduce_149
action_89 (5) = happyGoto action_145
action_89 _ = happyReduce_2
action_90 _ = happyReduce_16
action_91 _ = happyReduce_25
action_92 (98) = happyShift action_53
action_92 (141) = happyShift action_61
action_92 (15) = happyGoto action_144
action_92 _ = happyFail
action_93 (102) = happyShift action_143
action_93 _ = happyFail
action_94 (141) = happyShift action_142
action_94 _ = happyFail
action_95 (98) = happyShift action_53
action_95 (113) = happyShift action_135
action_95 (139) = happyShift action_136
action_95 (141) = happyShift action_61
action_95 (143) = happyShift action_137
action_95 (145) = happyShift action_138
action_95 (15) = happyGoto action_122
action_95 (19) = happyGoto action_123
action_95 (20) = happyGoto action_124
action_95 (21) = happyGoto action_125
action_95 (22) = happyGoto action_126
action_95 (23) = happyGoto action_127
action_95 (24) = happyGoto action_128
action_95 (25) = happyGoto action_129
action_95 (26) = happyGoto action_130
action_95 (27) = happyGoto action_131
action_95 (28) = happyGoto action_132
action_95 (29) = happyGoto action_133
action_95 (30) = happyGoto action_141
action_95 _ = happyFail
action_96 (98) = happyShift action_53
action_96 (118) = happyShift action_54
action_96 (119) = happyShift action_55
action_96 (120) = happyShift action_56
action_96 (121) = happyShift action_57
action_96 (122) = happyShift action_58
action_96 (123) = happyShift action_59
action_96 (124) = happyShift action_60
action_96 (141) = happyShift action_61
action_96 (146) = happyShift action_62
action_96 (147) = happyShift action_63
action_96 (148) = happyShift action_64
action_96 (149) = happyShift action_65
action_96 (150) = happyShift action_66
action_96 (151) = happyShift action_67
action_96 (153) = happyShift action_68
action_96 (15) = happyGoto action_31
action_96 (34) = happyGoto action_140
action_96 (35) = happyGoto action_35
action_96 (36) = happyGoto action_36
action_96 (41) = happyGoto action_38
action_96 (42) = happyGoto action_39
action_96 (43) = happyGoto action_40
action_96 (44) = happyGoto action_41
action_96 (45) = happyGoto action_42
action_96 (46) = happyGoto action_43
action_96 (47) = happyGoto action_44
action_96 (48) = happyGoto action_45
action_96 (61) = happyGoto action_49
action_96 (62) = happyGoto action_50
action_96 (63) = happyGoto action_51
action_96 (85) = happyGoto action_52
action_96 _ = happyFail
action_97 (98) = happyShift action_53
action_97 (113) = happyShift action_135
action_97 (139) = happyShift action_136
action_97 (141) = happyShift action_61
action_97 (143) = happyShift action_137
action_97 (145) = happyShift action_138
action_97 (15) = happyGoto action_122
action_97 (19) = happyGoto action_123
action_97 (20) = happyGoto action_124
action_97 (21) = happyGoto action_125
action_97 (22) = happyGoto action_126
action_97 (23) = happyGoto action_127
action_97 (24) = happyGoto action_128
action_97 (25) = happyGoto action_129
action_97 (26) = happyGoto action_130
action_97 (27) = happyGoto action_131
action_97 (28) = happyGoto action_132
action_97 (29) = happyGoto action_133
action_97 (30) = happyGoto action_139
action_97 _ = happyFail
action_98 (98) = happyShift action_53
action_98 (113) = happyShift action_135
action_98 (139) = happyShift action_136
action_98 (141) = happyShift action_61
action_98 (143) = happyShift action_137
action_98 (145) = happyShift action_138
action_98 (15) = happyGoto action_122
action_98 (19) = happyGoto action_123
action_98 (20) = happyGoto action_124
action_98 (21) = happyGoto action_125
action_98 (22) = happyGoto action_126
action_98 (23) = happyGoto action_127
action_98 (24) = happyGoto action_128
action_98 (25) = happyGoto action_129
action_98 (26) = happyGoto action_130
action_98 (27) = happyGoto action_131
action_98 (28) = happyGoto action_132
action_98 (29) = happyGoto action_133
action_98 (30) = happyGoto action_134
action_98 _ = happyFail
action_99 _ = happyReduce_99
action_100 _ = happyReduce_100
action_101 _ = happyReduce_28
action_102 (99) = happyShift action_121
action_102 _ = happyReduce_71
action_103 _ = happyReduce_92
action_104 (135) = happyShift action_120
action_104 (65) = happyGoto action_118
action_104 (66) = happyGoto action_119
action_104 _ = happyReduce_95
action_105 (98) = happyShift action_53
action_105 (118) = happyShift action_54
action_105 (119) = happyShift action_55
action_105 (120) = happyShift action_56
action_105 (121) = happyShift action_57
action_105 (122) = happyShift action_58
action_105 (123) = happyShift action_59
action_105 (124) = happyShift action_60
action_105 (125) = happyShift action_19
action_105 (126) = happyShift action_20
action_105 (130) = happyShift action_21
action_105 (141) = happyShift action_61
action_105 (146) = happyShift action_62
action_105 (147) = happyShift action_63
action_105 (148) = happyShift action_64
action_105 (149) = happyShift action_65
action_105 (150) = happyShift action_66
action_105 (151) = happyShift action_67
action_105 (153) = happyShift action_68
action_105 (15) = happyGoto action_31
action_105 (33) = happyGoto action_109
action_105 (34) = happyGoto action_34
action_105 (35) = happyGoto action_35
action_105 (36) = happyGoto action_36
action_105 (37) = happyGoto action_37
action_105 (41) = happyGoto action_38
action_105 (42) = happyGoto action_39
action_105 (43) = happyGoto action_40
action_105 (44) = happyGoto action_41
action_105 (45) = happyGoto action_42
action_105 (46) = happyGoto action_43
action_105 (47) = happyGoto action_44
action_105 (48) = happyGoto action_45
action_105 (49) = happyGoto action_46
action_105 (50) = happyGoto action_116
action_105 (51) = happyGoto action_117
action_105 (52) = happyGoto action_47
action_105 (59) = happyGoto action_48
action_105 (61) = happyGoto action_49
action_105 (62) = happyGoto action_50
action_105 (63) = happyGoto action_51
action_105 (85) = happyGoto action_52
action_105 _ = happyFail
action_106 (93) = happyShift action_115
action_106 _ = happyFail
action_107 (141) = happyShift action_27
action_107 (60) = happyGoto action_113
action_107 (89) = happyGoto action_114
action_107 _ = happyFail
action_108 (98) = happyShift action_53
action_108 (118) = happyShift action_54
action_108 (119) = happyShift action_55
action_108 (120) = happyShift action_56
action_108 (121) = happyShift action_57
action_108 (122) = happyShift action_58
action_108 (123) = happyShift action_59
action_108 (124) = happyShift action_60
action_108 (125) = happyShift action_19
action_108 (126) = happyShift action_20
action_108 (130) = happyShift action_21
action_108 (141) = happyShift action_61
action_108 (146) = happyShift action_62
action_108 (147) = happyShift action_63
action_108 (148) = happyShift action_64
action_108 (149) = happyShift action_65
action_108 (150) = happyShift action_66
action_108 (151) = happyShift action_67
action_108 (153) = happyShift action_68
action_108 (15) = happyGoto action_31
action_108 (33) = happyGoto action_109
action_108 (34) = happyGoto action_34
action_108 (35) = happyGoto action_35
action_108 (36) = happyGoto action_36
action_108 (37) = happyGoto action_37
action_108 (41) = happyGoto action_38
action_108 (42) = happyGoto action_39
action_108 (43) = happyGoto action_40
action_108 (44) = happyGoto action_41
action_108 (45) = happyGoto action_42
action_108 (46) = happyGoto action_43
action_108 (47) = happyGoto action_44
action_108 (48) = happyGoto action_45
action_108 (49) = happyGoto action_46
action_108 (51) = happyGoto action_110
action_108 (52) = happyGoto action_47
action_108 (59) = happyGoto action_48
action_108 (61) = happyGoto action_49
action_108 (62) = happyGoto action_50
action_108 (63) = happyGoto action_51
action_108 (71) = happyGoto action_111
action_108 (72) = happyGoto action_112
action_108 (85) = happyGoto action_52
action_108 _ = happyReduce_144
action_109 (141) = happyShift action_27
action_109 (38) = happyGoto action_205
action_109 (40) = happyGoto action_103
action_109 (89) = happyGoto action_104
action_109 _ = happyFail
action_110 _ = happyReduce_146
action_111 (96) = happyShift action_204
action_111 _ = happyFail
action_112 (98) = happyShift action_53
action_112 (118) = happyShift action_54
action_112 (119) = happyShift action_55
action_112 (120) = happyShift action_56
action_112 (121) = happyShift action_57
action_112 (122) = happyShift action_58
action_112 (123) = happyShift action_59
action_112 (124) = happyShift action_60
action_112 (125) = happyShift action_19
action_112 (126) = happyShift action_20
action_112 (130) = happyShift action_21
action_112 (141) = happyShift action_61
action_112 (146) = happyShift action_62
action_112 (147) = happyShift action_63
action_112 (148) = happyShift action_64
action_112 (149) = happyShift action_65
action_112 (150) = happyShift action_66
action_112 (151) = happyShift action_67
action_112 (153) = happyShift action_68
action_112 (15) = happyGoto action_31
action_112 (33) = happyGoto action_109
action_112 (34) = happyGoto action_34
action_112 (35) = happyGoto action_35
action_112 (36) = happyGoto action_36
action_112 (37) = happyGoto action_37
action_112 (41) = happyGoto action_38
action_112 (42) = happyGoto action_39
action_112 (43) = happyGoto action_40
action_112 (44) = happyGoto action_41
action_112 (45) = happyGoto action_42
action_112 (46) = happyGoto action_43
action_112 (47) = happyGoto action_44
action_112 (48) = happyGoto action_45
action_112 (49) = happyGoto action_46
action_112 (51) = happyGoto action_203
action_112 (52) = happyGoto action_47
action_112 (59) = happyGoto action_48
action_112 (61) = happyGoto action_49
action_112 (62) = happyGoto action_50
action_112 (63) = happyGoto action_51
action_112 (85) = happyGoto action_52
action_112 _ = happyReduce_145
action_113 (96) = happyShift action_201
action_113 (99) = happyShift action_202
action_113 _ = happyFail
action_114 _ = happyReduce_126
action_115 (98) = happyShift action_53
action_115 (119) = happyShift action_55
action_115 (120) = happyShift action_56
action_115 (121) = happyShift action_57
action_115 (122) = happyShift action_58
action_115 (124) = happyShift action_60
action_115 (130) = happyShift action_21
action_115 (141) = happyShift action_61
action_115 (15) = happyGoto action_195
action_115 (42) = happyGoto action_196
action_115 (43) = happyGoto action_197
action_115 (45) = happyGoto action_198
action_115 (53) = happyGoto action_199
action_115 (59) = happyGoto action_200
action_115 _ = happyFail
action_116 (96) = happyShift action_194
action_116 (98) = happyShift action_53
action_116 (118) = happyShift action_54
action_116 (119) = happyShift action_55
action_116 (120) = happyShift action_56
action_116 (121) = happyShift action_57
action_116 (122) = happyShift action_58
action_116 (123) = happyShift action_59
action_116 (124) = happyShift action_60
action_116 (125) = happyShift action_19
action_116 (126) = happyShift action_20
action_116 (130) = happyShift action_21
action_116 (141) = happyShift action_61
action_116 (146) = happyShift action_62
action_116 (147) = happyShift action_63
action_116 (148) = happyShift action_64
action_116 (149) = happyShift action_65
action_116 (150) = happyShift action_66
action_116 (151) = happyShift action_67
action_116 (153) = happyShift action_68
action_116 (15) = happyGoto action_31
action_116 (33) = happyGoto action_109
action_116 (34) = happyGoto action_34
action_116 (35) = happyGoto action_35
action_116 (36) = happyGoto action_36
action_116 (37) = happyGoto action_37
action_116 (41) = happyGoto action_38
action_116 (42) = happyGoto action_39
action_116 (43) = happyGoto action_40
action_116 (44) = happyGoto action_41
action_116 (45) = happyGoto action_42
action_116 (46) = happyGoto action_43
action_116 (47) = happyGoto action_44
action_116 (48) = happyGoto action_45
action_116 (49) = happyGoto action_46
action_116 (51) = happyGoto action_193
action_116 (52) = happyGoto action_47
action_116 (59) = happyGoto action_48
action_116 (61) = happyGoto action_49
action_116 (62) = happyGoto action_50
action_116 (63) = happyGoto action_51
action_116 (85) = happyGoto action_52
action_116 _ = happyFail
action_117 _ = happyReduce_108
action_118 (135) = happyShift action_120
action_118 (66) = happyGoto action_192
action_118 _ = happyReduce_96
action_119 _ = happyReduce_135
action_120 (98) = happyShift action_53
action_120 (113) = happyShift action_135
action_120 (139) = happyShift action_136
action_120 (141) = happyShift action_61
action_120 (143) = happyShift action_137
action_120 (145) = happyShift action_138
action_120 (15) = happyGoto action_122
action_120 (19) = happyGoto action_123
action_120 (20) = happyGoto action_124
action_120 (21) = happyGoto action_125
action_120 (22) = happyGoto action_126
action_120 (23) = happyGoto action_127
action_120 (24) = happyGoto action_128
action_120 (25) = happyGoto action_129
action_120 (26) = happyGoto action_130
action_120 (27) = happyGoto action_131
action_120 (28) = happyGoto action_132
action_120 (29) = happyGoto action_133
action_120 (30) = happyGoto action_191
action_120 _ = happyFail
action_121 (141) = happyShift action_27
action_121 (40) = happyGoto action_190
action_121 (89) = happyGoto action_104
action_121 _ = happyFail
action_122 (98) = happyShift action_94
action_122 _ = happyReduce_63
action_123 _ = happyReduce_66
action_124 (105) = happyShift action_189
action_124 _ = happyReduce_42
action_125 (107) = happyShift action_188
action_125 _ = happyReduce_43
action_126 (108) = happyShift action_187
action_126 _ = happyReduce_45
action_127 (110) = happyShift action_186
action_127 _ = happyReduce_47
action_128 (143) = happyShift action_184
action_128 (145) = happyShift action_185
action_128 _ = happyReduce_49
action_129 (111) = happyShift action_181
action_129 (112) = happyShift action_182
action_129 (144) = happyShift action_183
action_129 _ = happyReduce_51
action_130 _ = happyReduce_54
action_131 (98) = happyShift action_53
action_131 (139) = happyShift action_136
action_131 (141) = happyShift action_61
action_131 (15) = happyGoto action_122
action_131 (28) = happyGoto action_180
action_131 (29) = happyGoto action_133
action_131 _ = happyFail
action_132 _ = happyReduce_59
action_133 _ = happyReduce_64
action_134 (133) = happyShift action_179
action_134 _ = happyFail
action_135 _ = happyReduce_62
action_136 _ = happyReduce_65
action_137 _ = happyReduce_61
action_138 _ = happyReduce_60
action_139 (133) = happyShift action_178
action_139 _ = happyFail
action_140 (99) = happyShift action_176
action_140 (133) = happyShift action_177
action_140 _ = happyFail
action_141 (99) = happyShift action_175
action_141 _ = happyFail
action_142 _ = happyReduce_29
action_143 (98) = happyShift action_53
action_143 (113) = happyShift action_135
action_143 (139) = happyShift action_136
action_143 (141) = happyShift action_61
action_143 (143) = happyShift action_137
action_143 (145) = happyShift action_138
action_143 (15) = happyGoto action_122
action_143 (19) = happyGoto action_174
action_143 (20) = happyGoto action_124
action_143 (21) = happyGoto action_125
action_143 (22) = happyGoto action_126
action_143 (23) = happyGoto action_127
action_143 (24) = happyGoto action_128
action_143 (25) = happyGoto action_129
action_143 (26) = happyGoto action_130
action_143 (27) = happyGoto action_131
action_143 (28) = happyGoto action_132
action_143 (29) = happyGoto action_133
action_143 _ = happyFail
action_144 (98) = happyShift action_94
action_144 (99) = happyShift action_173
action_144 (16) = happyGoto action_172
action_144 _ = happyReduce_30
action_145 (91) = happyShift action_15
action_145 (92) = happyShift action_16
action_145 (96) = happyShift action_171
action_145 (101) = happyShift action_17
action_145 (116) = happyShift action_18
action_145 (125) = happyShift action_19
action_145 (126) = happyShift action_20
action_145 (130) = happyShift action_21
action_145 (154) = happyShift action_22
action_145 (158) = happyShift action_23
action_145 (159) = happyShift action_24
action_145 (160) = happyShift action_25
action_145 (6) = happyGoto action_4
action_145 (7) = happyGoto action_5
action_145 (8) = happyGoto action_6
action_145 (9) = happyGoto action_7
action_145 (10) = happyGoto action_8
action_145 (17) = happyGoto action_9
action_145 (31) = happyGoto action_10
action_145 (49) = happyGoto action_11
action_145 (52) = happyGoto action_12
action_145 (59) = happyGoto action_13
action_145 (70) = happyGoto action_14
action_145 _ = happyFail
action_146 _ = happyReduce_18
action_147 (90) = happyShift action_170
action_147 _ = happyFail
action_148 (90) = happyShift action_169
action_148 _ = happyFail
action_149 (90) = happyShift action_168
action_149 _ = happyFail
action_150 (142) = happyShift action_167
action_150 _ = happyFail
action_151 (90) = happyShift action_166
action_151 _ = happyFail
action_152 (90) = happyShift action_165
action_152 _ = happyFail
action_153 (98) = happyShift action_53
action_153 (118) = happyShift action_54
action_153 (119) = happyShift action_55
action_153 (120) = happyShift action_56
action_153 (121) = happyShift action_57
action_153 (122) = happyShift action_58
action_153 (123) = happyShift action_59
action_153 (124) = happyShift action_60
action_153 (137) = happyShift action_164
action_153 (141) = happyShift action_61
action_153 (146) = happyShift action_62
action_153 (147) = happyShift action_63
action_153 (149) = happyShift action_65
action_153 (150) = happyShift action_66
action_153 (151) = happyShift action_67
action_153 (153) = happyShift action_68
action_153 (15) = happyGoto action_157
action_153 (35) = happyGoto action_158
action_153 (41) = happyGoto action_38
action_153 (42) = happyGoto action_39
action_153 (43) = happyGoto action_40
action_153 (44) = happyGoto action_41
action_153 (45) = happyGoto action_42
action_153 (46) = happyGoto action_43
action_153 (47) = happyGoto action_44
action_153 (48) = happyGoto action_45
action_153 (62) = happyGoto action_159
action_153 (63) = happyGoto action_160
action_153 (75) = happyGoto action_161
action_153 (84) = happyGoto action_162
action_153 (85) = happyGoto action_163
action_153 _ = happyFail
action_154 _ = happyReduce_15
action_155 _ = happyReduce_150
action_156 _ = happyReduce_139
action_157 (98) = happyShift action_94
action_157 _ = happyReduce_171
action_158 _ = happyReduce_167
action_159 _ = happyReduce_168
action_160 _ = happyReduce_169
action_161 (141) = happyShift action_27
action_161 (89) = happyGoto action_224
action_161 _ = happyFail
action_162 _ = happyReduce_151
action_163 _ = happyReduce_170
action_164 _ = happyReduce_152
action_165 _ = happyReduce_23
action_166 _ = happyReduce_21
action_167 (98) = happyShift action_53
action_167 (118) = happyShift action_54
action_167 (119) = happyShift action_55
action_167 (120) = happyShift action_56
action_167 (121) = happyShift action_57
action_167 (122) = happyShift action_58
action_167 (123) = happyShift action_59
action_167 (124) = happyShift action_60
action_167 (141) = happyShift action_61
action_167 (146) = happyShift action_62
action_167 (147) = happyShift action_63
action_167 (149) = happyShift action_65
action_167 (150) = happyShift action_66
action_167 (151) = happyShift action_67
action_167 (153) = happyShift action_68
action_167 (15) = happyGoto action_157
action_167 (35) = happyGoto action_158
action_167 (41) = happyGoto action_38
action_167 (42) = happyGoto action_39
action_167 (43) = happyGoto action_40
action_167 (44) = happyGoto action_41
action_167 (45) = happyGoto action_42
action_167 (46) = happyGoto action_43
action_167 (47) = happyGoto action_44
action_167 (48) = happyGoto action_45
action_167 (62) = happyGoto action_159
action_167 (63) = happyGoto action_160
action_167 (84) = happyGoto action_223
action_167 (85) = happyGoto action_163
action_167 _ = happyFail
action_168 _ = happyReduce_22
action_169 _ = happyReduce_19
action_170 _ = happyReduce_20
action_171 _ = happyReduce_12
action_172 _ = happyReduce_26
action_173 (98) = happyShift action_53
action_173 (141) = happyShift action_61
action_173 (15) = happyGoto action_222
action_173 _ = happyFail
action_174 _ = happyReduce_32
action_175 (139) = happyShift action_221
action_175 (87) = happyGoto action_220
action_175 _ = happyFail
action_176 (98) = happyShift action_53
action_176 (113) = happyShift action_135
action_176 (139) = happyShift action_136
action_176 (141) = happyShift action_61
action_176 (143) = happyShift action_137
action_176 (145) = happyShift action_138
action_176 (15) = happyGoto action_122
action_176 (19) = happyGoto action_123
action_176 (20) = happyGoto action_124
action_176 (21) = happyGoto action_125
action_176 (22) = happyGoto action_126
action_176 (23) = happyGoto action_127
action_176 (24) = happyGoto action_128
action_176 (25) = happyGoto action_129
action_176 (26) = happyGoto action_130
action_176 (27) = happyGoto action_131
action_176 (28) = happyGoto action_132
action_176 (29) = happyGoto action_133
action_176 (30) = happyGoto action_219
action_176 _ = happyFail
action_177 _ = happyReduce_129
action_178 _ = happyReduce_132
action_179 _ = happyReduce_130
action_180 _ = happyReduce_58
action_181 (98) = happyShift action_53
action_181 (113) = happyShift action_135
action_181 (139) = happyShift action_136
action_181 (141) = happyShift action_61
action_181 (143) = happyShift action_137
action_181 (145) = happyShift action_138
action_181 (15) = happyGoto action_122
action_181 (26) = happyGoto action_218
action_181 (27) = happyGoto action_131
action_181 (28) = happyGoto action_132
action_181 (29) = happyGoto action_133
action_181 _ = happyFail
action_182 (98) = happyShift action_53
action_182 (113) = happyShift action_135
action_182 (139) = happyShift action_136
action_182 (141) = happyShift action_61
action_182 (143) = happyShift action_137
action_182 (145) = happyShift action_138
action_182 (15) = happyGoto action_122
action_182 (26) = happyGoto action_217
action_182 (27) = happyGoto action_131
action_182 (28) = happyGoto action_132
action_182 (29) = happyGoto action_133
action_182 _ = happyFail
action_183 (98) = happyShift action_53
action_183 (113) = happyShift action_135
action_183 (139) = happyShift action_136
action_183 (141) = happyShift action_61
action_183 (143) = happyShift action_137
action_183 (145) = happyShift action_138
action_183 (15) = happyGoto action_122
action_183 (26) = happyGoto action_216
action_183 (27) = happyGoto action_131
action_183 (28) = happyGoto action_132
action_183 (29) = happyGoto action_133
action_183 _ = happyFail
action_184 (98) = happyShift action_53
action_184 (113) = happyShift action_135
action_184 (139) = happyShift action_136
action_184 (141) = happyShift action_61
action_184 (143) = happyShift action_137
action_184 (145) = happyShift action_138
action_184 (15) = happyGoto action_122
action_184 (25) = happyGoto action_215
action_184 (26) = happyGoto action_130
action_184 (27) = happyGoto action_131
action_184 (28) = happyGoto action_132
action_184 (29) = happyGoto action_133
action_184 _ = happyFail
action_185 (98) = happyShift action_53
action_185 (113) = happyShift action_135
action_185 (139) = happyShift action_136
action_185 (141) = happyShift action_61
action_185 (143) = happyShift action_137
action_185 (145) = happyShift action_138
action_185 (15) = happyGoto action_122
action_185 (25) = happyGoto action_214
action_185 (26) = happyGoto action_130
action_185 (27) = happyGoto action_131
action_185 (28) = happyGoto action_132
action_185 (29) = happyGoto action_133
action_185 _ = happyFail
action_186 (98) = happyShift action_53
action_186 (113) = happyShift action_135
action_186 (139) = happyShift action_136
action_186 (141) = happyShift action_61
action_186 (143) = happyShift action_137
action_186 (145) = happyShift action_138
action_186 (15) = happyGoto action_122
action_186 (24) = happyGoto action_213
action_186 (25) = happyGoto action_129
action_186 (26) = happyGoto action_130
action_186 (27) = happyGoto action_131
action_186 (28) = happyGoto action_132
action_186 (29) = happyGoto action_133
action_186 _ = happyFail
action_187 (98) = happyShift action_53
action_187 (113) = happyShift action_135
action_187 (139) = happyShift action_136
action_187 (141) = happyShift action_61
action_187 (143) = happyShift action_137
action_187 (145) = happyShift action_138
action_187 (15) = happyGoto action_122
action_187 (23) = happyGoto action_212
action_187 (24) = happyGoto action_128
action_187 (25) = happyGoto action_129
action_187 (26) = happyGoto action_130
action_187 (27) = happyGoto action_131
action_187 (28) = happyGoto action_132
action_187 (29) = happyGoto action_133
action_187 _ = happyFail
action_188 (98) = happyShift action_53
action_188 (113) = happyShift action_135
action_188 (139) = happyShift action_136
action_188 (141) = happyShift action_61
action_188 (143) = happyShift action_137
action_188 (145) = happyShift action_138
action_188 (15) = happyGoto action_122
action_188 (22) = happyGoto action_211
action_188 (23) = happyGoto action_127
action_188 (24) = happyGoto action_128
action_188 (25) = happyGoto action_129
action_188 (26) = happyGoto action_130
action_188 (27) = happyGoto action_131
action_188 (28) = happyGoto action_132
action_188 (29) = happyGoto action_133
action_188 _ = happyFail
action_189 (98) = happyShift action_53
action_189 (113) = happyShift action_135
action_189 (139) = happyShift action_136
action_189 (141) = happyShift action_61
action_189 (143) = happyShift action_137
action_189 (145) = happyShift action_138
action_189 (15) = happyGoto action_122
action_189 (21) = happyGoto action_210
action_189 (22) = happyGoto action_126
action_189 (23) = happyGoto action_127
action_189 (24) = happyGoto action_128
action_189 (25) = happyGoto action_129
action_189 (26) = happyGoto action_130
action_189 (27) = happyGoto action_131
action_189 (28) = happyGoto action_132
action_189 (29) = happyGoto action_133
action_189 _ = happyFail
action_190 _ = happyReduce_93
action_191 (136) = happyShift action_209
action_191 _ = happyFail
action_192 _ = happyReduce_136
action_193 _ = happyReduce_109
action_194 _ = happyReduce_107
action_195 (98) = happyShift action_94
action_195 _ = happyReduce_116
action_196 _ = happyReduce_112
action_197 _ = happyReduce_113
action_198 _ = happyReduce_114
action_199 (94) = happyShift action_208
action_199 _ = happyFail
action_200 _ = happyReduce_115
action_201 _ = happyReduce_125
action_202 (141) = happyShift action_27
action_202 (89) = happyGoto action_207
action_202 _ = happyFail
action_203 _ = happyReduce_147
action_204 _ = happyReduce_143
action_205 (90) = happyShift action_206
action_205 (99) = happyShift action_121
action_205 _ = happyFail
action_206 _ = happyReduce_110
action_207 _ = happyReduce_127
action_208 (95) = happyShift action_233
action_208 _ = happyFail
action_209 _ = happyReduce_137
action_210 (107) = happyShift action_188
action_210 _ = happyReduce_44
action_211 (108) = happyShift action_187
action_211 _ = happyReduce_46
action_212 (110) = happyShift action_186
action_212 _ = happyReduce_48
action_213 (143) = happyShift action_184
action_213 (145) = happyShift action_185
action_213 _ = happyReduce_50
action_214 (111) = happyShift action_181
action_214 (112) = happyShift action_182
action_214 (144) = happyShift action_183
action_214 _ = happyReduce_53
action_215 (111) = happyShift action_181
action_215 (112) = happyShift action_182
action_215 (144) = happyShift action_183
action_215 _ = happyReduce_52
action_216 _ = happyReduce_55
action_217 _ = happyReduce_57
action_218 _ = happyReduce_56
action_219 (133) = happyShift action_232
action_219 _ = happyFail
action_220 (133) = happyShift action_231
action_220 _ = happyFail
action_221 _ = happyReduce_174
action_222 (98) = happyShift action_94
action_222 (99) = happyShift action_173
action_222 (16) = happyGoto action_230
action_222 _ = happyReduce_30
action_223 (141) = happyShift action_27
action_223 (39) = happyGoto action_227
action_223 (69) = happyGoto action_228
action_223 (89) = happyGoto action_229
action_223 _ = happyFail
action_224 (93) = happyShift action_226
action_224 (76) = happyGoto action_225
action_224 _ = happyFail
action_225 (155) = happyShift action_247
action_225 (80) = happyGoto action_246
action_225 _ = happyReduce_159
action_226 (94) = happyShift action_244
action_226 (138) = happyShift action_245
action_226 (77) = happyGoto action_241
action_226 (78) = happyGoto action_242
action_226 (79) = happyGoto action_243
action_226 _ = happyFail
action_227 _ = happyReduce_141
action_228 (99) = happyShift action_240
action_228 _ = happyReduce_138
action_229 _ = happyReduce_94
action_230 _ = happyReduce_31
action_231 _ = happyReduce_172
action_232 _ = happyReduce_128
action_233 (128) = happyShift action_238
action_233 (129) = happyShift action_239
action_233 (54) = happyGoto action_234
action_233 (55) = happyGoto action_235
action_233 (56) = happyGoto action_236
action_233 (57) = happyGoto action_237
action_233 _ = happyFail
action_234 (96) = happyShift action_261
action_234 (128) = happyShift action_238
action_234 (129) = happyShift action_239
action_234 (55) = happyGoto action_260
action_234 (56) = happyGoto action_236
action_234 (57) = happyGoto action_237
action_234 _ = happyFail
action_235 _ = happyReduce_117
action_236 (98) = happyShift action_53
action_236 (118) = happyShift action_54
action_236 (119) = happyShift action_55
action_236 (120) = happyShift action_56
action_236 (121) = happyShift action_57
action_236 (122) = happyShift action_58
action_236 (123) = happyShift action_59
action_236 (124) = happyShift action_60
action_236 (125) = happyShift action_19
action_236 (126) = happyShift action_20
action_236 (128) = happyShift action_238
action_236 (129) = happyShift action_239
action_236 (130) = happyShift action_21
action_236 (141) = happyShift action_61
action_236 (146) = happyShift action_62
action_236 (147) = happyShift action_63
action_236 (148) = happyShift action_64
action_236 (149) = happyShift action_65
action_236 (150) = happyShift action_66
action_236 (151) = happyShift action_67
action_236 (153) = happyShift action_68
action_236 (15) = happyGoto action_31
action_236 (33) = happyGoto action_257
action_236 (34) = happyGoto action_34
action_236 (35) = happyGoto action_35
action_236 (36) = happyGoto action_36
action_236 (37) = happyGoto action_37
action_236 (41) = happyGoto action_38
action_236 (42) = happyGoto action_39
action_236 (43) = happyGoto action_40
action_236 (44) = happyGoto action_41
action_236 (45) = happyGoto action_42
action_236 (46) = happyGoto action_43
action_236 (47) = happyGoto action_44
action_236 (48) = happyGoto action_45
action_236 (49) = happyGoto action_46
action_236 (52) = happyGoto action_47
action_236 (57) = happyGoto action_258
action_236 (58) = happyGoto action_259
action_236 (59) = happyGoto action_48
action_236 (61) = happyGoto action_49
action_236 (62) = happyGoto action_50
action_236 (63) = happyGoto action_51
action_236 (85) = happyGoto action_52
action_236 _ = happyFail
action_237 _ = happyReduce_120
action_238 (98) = happyShift action_53
action_238 (113) = happyShift action_135
action_238 (139) = happyShift action_136
action_238 (141) = happyShift action_61
action_238 (143) = happyShift action_137
action_238 (145) = happyShift action_138
action_238 (15) = happyGoto action_122
action_238 (19) = happyGoto action_256
action_238 (20) = happyGoto action_124
action_238 (21) = happyGoto action_125
action_238 (22) = happyGoto action_126
action_238 (23) = happyGoto action_127
action_238 (24) = happyGoto action_128
action_238 (25) = happyGoto action_129
action_238 (26) = happyGoto action_130
action_238 (27) = happyGoto action_131
action_238 (28) = happyGoto action_132
action_238 (29) = happyGoto action_133
action_238 _ = happyFail
action_239 (97) = happyShift action_255
action_239 _ = happyFail
action_240 (141) = happyShift action_27
action_240 (39) = happyGoto action_254
action_240 (89) = happyGoto action_229
action_240 _ = happyFail
action_241 (94) = happyShift action_252
action_241 (99) = happyShift action_253
action_241 _ = happyFail
action_242 _ = happyReduce_155
action_243 (98) = happyShift action_53
action_243 (118) = happyShift action_54
action_243 (119) = happyShift action_55
action_243 (120) = happyShift action_56
action_243 (121) = happyShift action_57
action_243 (122) = happyShift action_58
action_243 (123) = happyShift action_59
action_243 (124) = happyShift action_60
action_243 (141) = happyShift action_61
action_243 (146) = happyShift action_62
action_243 (147) = happyShift action_63
action_243 (149) = happyShift action_65
action_243 (150) = happyShift action_66
action_243 (151) = happyShift action_67
action_243 (153) = happyShift action_68
action_243 (15) = happyGoto action_157
action_243 (35) = happyGoto action_158
action_243 (41) = happyGoto action_38
action_243 (42) = happyGoto action_39
action_243 (43) = happyGoto action_40
action_243 (44) = happyGoto action_41
action_243 (45) = happyGoto action_42
action_243 (46) = happyGoto action_43
action_243 (47) = happyGoto action_44
action_243 (48) = happyGoto action_45
action_243 (62) = happyGoto action_159
action_243 (63) = happyGoto action_160
action_243 (84) = happyGoto action_251
action_243 (85) = happyGoto action_163
action_243 _ = happyFail
action_244 _ = happyReduce_154
action_245 _ = happyReduce_158
action_246 (156) = happyShift action_250
action_246 (81) = happyGoto action_249
action_246 _ = happyReduce_161
action_247 (93) = happyShift action_248
action_247 _ = happyFail
action_248 (98) = happyShift action_53
action_248 (141) = happyShift action_61
action_248 (15) = happyGoto action_268
action_248 (82) = happyGoto action_269
action_248 _ = happyFail
action_249 _ = happyReduce_148
action_250 (93) = happyShift action_267
action_250 _ = happyFail
action_251 (141) = happyShift action_27
action_251 (39) = happyGoto action_266
action_251 (89) = happyGoto action_229
action_251 _ = happyFail
action_252 _ = happyReduce_153
action_253 (138) = happyShift action_245
action_253 (78) = happyGoto action_265
action_253 (79) = happyGoto action_243
action_253 _ = happyFail
action_254 _ = happyReduce_142
action_255 _ = happyReduce_123
action_256 (97) = happyShift action_264
action_256 _ = happyFail
action_257 (141) = happyShift action_27
action_257 (40) = happyGoto action_263
action_257 (89) = happyGoto action_104
action_257 _ = happyFail
action_258 _ = happyReduce_121
action_259 (90) = happyShift action_262
action_259 _ = happyFail
action_260 _ = happyReduce_118
action_261 _ = happyReduce_111
action_262 _ = happyReduce_119
action_263 _ = happyReduce_124
action_264 _ = happyReduce_122
action_265 _ = happyReduce_156
action_266 _ = happyReduce_157
action_267 (140) = happyShift action_273
action_267 (83) = happyGoto action_272
action_267 _ = happyFail
action_268 (98) = happyShift action_94
action_268 _ = happyReduce_163
action_269 (94) = happyShift action_270
action_269 (99) = happyShift action_271
action_269 _ = happyFail
action_270 _ = happyReduce_160
action_271 (98) = happyShift action_53
action_271 (141) = happyShift action_61
action_271 (15) = happyGoto action_276
action_271 _ = happyFail
action_272 (94) = happyShift action_274
action_272 (99) = happyShift action_275
action_272 _ = happyFail
action_273 _ = happyReduce_165
action_274 _ = happyReduce_162
action_275 (140) = happyShift action_277
action_275 _ = happyFail
action_276 (98) = happyShift action_94
action_276 _ = happyReduce_164
action_277 _ = happyReduce_166
happyReduce_1 = happySpecReduce_1 4 happyReduction_1
happyReduction_1 (HappyAbsSyn4 happy_var_1)
= HappyAbsSyn4
((reverse happy_var_1)
)
happyReduction_1 _ = notHappyAtAll
happyReduce_2 = happySpecReduce_0 5 happyReduction_2
happyReduction_2 = HappyAbsSyn4
([]
)
happyReduce_3 = happySpecReduce_2 5 happyReduction_3
happyReduction_3 (HappyAbsSyn6 happy_var_2)
(HappyAbsSyn4 happy_var_1)
= HappyAbsSyn4
(happy_var_2 : happy_var_1
)
happyReduction_3 _ _ = notHappyAtAll
happyReduce_4 = happySpecReduce_2 6 happyReduction_4
happyReduction_4 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_4 _ _ = notHappyAtAll
happyReduce_5 = happySpecReduce_2 6 happyReduction_5
happyReduction_5 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_5 _ _ = notHappyAtAll
happyReduce_6 = happySpecReduce_2 6 happyReduction_6
happyReduction_6 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_6 _ _ = notHappyAtAll
happyReduce_7 = happySpecReduce_2 6 happyReduction_7
happyReduction_7 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_7 _ _ = notHappyAtAll
happyReduce_8 = happySpecReduce_2 6 happyReduction_8
happyReduction_8 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_8 _ _ = notHappyAtAll
happyReduce_9 = happySpecReduce_1 6 happyReduction_9
happyReduction_9 (HappyTerminal (T_pragma happy_var_1))
= HappyAbsSyn6
(Pragma happy_var_1
)
happyReduction_9 _ = notHappyAtAll
happyReduce_10 = happySpecReduce_1 6 happyReduction_10
happyReduction_10 (HappyTerminal (T_include_start happy_var_1))
= HappyAbsSyn6
(IncludeStart happy_var_1
)
happyReduction_10 _ = notHappyAtAll
happyReduce_11 = happySpecReduce_1 6 happyReduction_11
happyReduction_11 _
= HappyAbsSyn6
(IncludeEnd
)
happyReduce_12 = happyReduce 5 7 happyReduction_12
happyReduction_12 (_ `HappyStk`
(HappyAbsSyn4 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(Module happy_var_2 (reverse happy_var_4)
) `HappyStk` happyRest
happyReduce_13 = happySpecReduce_1 8 happyReduction_13
happyReduction_13 (HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_13 _ = notHappyAtAll
happyReduce_14 = happySpecReduce_2 8 happyReduction_14
happyReduction_14 (HappyAbsSyn39 happy_var_2)
_
= HappyAbsSyn6
(Forward happy_var_2
)
happyReduction_14 _ _ = notHappyAtAll
happyReduce_15 = happyReduce 4 9 happyReduction_15
happyReduction_15 (_ `HappyStk`
(HappyAbsSyn4 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn10 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn6
(let (ids,inherit) = happy_var_1 in Interface ids inherit (reverse happy_var_3)
) `HappyStk` happyRest
happyReduce_16 = happySpecReduce_3 10 happyReduction_16
happyReduction_16 (HappyAbsSyn13 happy_var_3)
(HappyAbsSyn39 happy_var_2)
_
= HappyAbsSyn10
((happy_var_2,happy_var_3)
)
happyReduction_16 _ _ _ = notHappyAtAll
happyReduce_17 = happySpecReduce_0 11 happyReduction_17
happyReduction_17 = HappyAbsSyn4
([]
)
happyReduce_18 = happySpecReduce_2 11 happyReduction_18
happyReduction_18 (HappyAbsSyn6 happy_var_2)
(HappyAbsSyn4 happy_var_1)
= HappyAbsSyn4
(happy_var_2 : happy_var_1
)
happyReduction_18 _ _ = notHappyAtAll
happyReduce_19 = happySpecReduce_2 12 happyReduction_19
happyReduction_19 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_19 _ _ = notHappyAtAll
happyReduce_20 = happySpecReduce_2 12 happyReduction_20
happyReduction_20 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_20 _ _ = notHappyAtAll
happyReduce_21 = happySpecReduce_2 12 happyReduction_21
happyReduction_21 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_21 _ _ = notHappyAtAll
happyReduce_22 = happySpecReduce_2 12 happyReduction_22
happyReduction_22 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_22 _ _ = notHappyAtAll
happyReduce_23 = happySpecReduce_2 12 happyReduction_23
happyReduction_23 _
(HappyAbsSyn6 happy_var_1)
= HappyAbsSyn6
(happy_var_1
)
happyReduction_23 _ _ = notHappyAtAll
happyReduce_24 = happySpecReduce_0 13 happyReduction_24
happyReduction_24 = HappyAbsSyn13
([]
)
happyReduce_25 = happySpecReduce_1 13 happyReduction_25
happyReduction_25 (HappyAbsSyn13 happy_var_1)
= HappyAbsSyn13
(happy_var_1
)
happyReduction_25 _ = notHappyAtAll
happyReduce_26 = happySpecReduce_3 14 happyReduction_26
happyReduction_26 (HappyAbsSyn13 happy_var_3)
(HappyAbsSyn15 happy_var_2)
_
= HappyAbsSyn13
(happy_var_2:(reverse happy_var_3)
)
happyReduction_26 _ _ _ = notHappyAtAll
happyReduce_27 = happySpecReduce_1 15 happyReduction_27
happyReduction_27 (HappyTerminal (T_id happy_var_1))
= HappyAbsSyn15
(happy_var_1
)
happyReduction_27 _ = notHappyAtAll
happyReduce_28 = happySpecReduce_2 15 happyReduction_28
happyReduction_28 (HappyTerminal (T_id happy_var_2))
_
= HappyAbsSyn15
(("::"++ happy_var_2)
)
happyReduction_28 _ _ = notHappyAtAll
happyReduce_29 = happySpecReduce_3 15 happyReduction_29
happyReduction_29 (HappyTerminal (T_id happy_var_3))
_
(HappyAbsSyn15 happy_var_1)
= HappyAbsSyn15
(happy_var_1 ++ ':':':':happy_var_3
)
happyReduction_29 _ _ _ = notHappyAtAll
happyReduce_30 = happySpecReduce_0 16 happyReduction_30
happyReduction_30 = HappyAbsSyn13
([]
)
happyReduce_31 = happySpecReduce_3 16 happyReduction_31
happyReduction_31 (HappyAbsSyn13 happy_var_3)
(HappyAbsSyn15 happy_var_2)
_
= HappyAbsSyn13
(happy_var_2 : happy_var_3
)
happyReduction_31 _ _ _ = notHappyAtAll
happyReduce_32 = happyReduce 5 17 happyReduction_32
happyReduction_32 ((HappyAbsSyn19 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_3) `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(Constant happy_var_3 [] happy_var_2 happy_var_5
) `HappyStk` happyRest
happyReduce_33 = happySpecReduce_1 18 happyReduction_33
happyReduction_33 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_33 _ = notHappyAtAll
happyReduce_34 = happySpecReduce_1 18 happyReduction_34
happyReduction_34 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_34 _ = notHappyAtAll
happyReduce_35 = happySpecReduce_1 18 happyReduction_35
happyReduction_35 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_35 _ = notHappyAtAll
happyReduce_36 = happySpecReduce_1 18 happyReduction_36
happyReduction_36 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_36 _ = notHappyAtAll
happyReduce_37 = happySpecReduce_1 18 happyReduction_37
happyReduction_37 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_37 _ = notHappyAtAll
happyReduce_38 = happySpecReduce_1 18 happyReduction_38
happyReduction_38 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_38 _ = notHappyAtAll
happyReduce_39 = happySpecReduce_1 18 happyReduction_39
happyReduction_39 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_39 _ = notHappyAtAll
happyReduce_40 = happySpecReduce_1 18 happyReduction_40
happyReduction_40 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_40 _ = notHappyAtAll
happyReduce_41 = happySpecReduce_1 18 happyReduction_41
happyReduction_41 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn18
(TyName happy_var_1 Nothing
)
happyReduction_41 _ = notHappyAtAll
happyReduce_42 = happySpecReduce_1 19 happyReduction_42
happyReduction_42 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_42 _ = notHappyAtAll
happyReduce_43 = happySpecReduce_1 20 happyReduction_43
happyReduction_43 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_43 _ = notHappyAtAll
happyReduce_44 = happySpecReduce_3 20 happyReduction_44
happyReduction_44 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Or happy_var_1 happy_var_3
)
happyReduction_44 _ _ _ = notHappyAtAll
happyReduce_45 = happySpecReduce_1 21 happyReduction_45
happyReduction_45 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_45 _ = notHappyAtAll
happyReduce_46 = happySpecReduce_3 21 happyReduction_46
happyReduction_46 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Xor happy_var_1 happy_var_3
)
happyReduction_46 _ _ _ = notHappyAtAll
happyReduce_47 = happySpecReduce_1 22 happyReduction_47
happyReduction_47 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_47 _ = notHappyAtAll
happyReduce_48 = happySpecReduce_3 22 happyReduction_48
happyReduction_48 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary And happy_var_1 happy_var_3
)
happyReduction_48 _ _ _ = notHappyAtAll
happyReduce_49 = happySpecReduce_1 23 happyReduction_49
happyReduction_49 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_49 _ = notHappyAtAll
happyReduce_50 = happySpecReduce_3 23 happyReduction_50
happyReduction_50 (HappyAbsSyn19 happy_var_3)
(HappyTerminal (T_shift happy_var_2))
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary (Shift happy_var_2) happy_var_1 happy_var_3
)
happyReduction_50 _ _ _ = notHappyAtAll
happyReduce_51 = happySpecReduce_1 24 happyReduction_51
happyReduction_51 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_51 _ = notHappyAtAll
happyReduce_52 = happySpecReduce_3 24 happyReduction_52
happyReduction_52 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Add happy_var_1 happy_var_3
)
happyReduction_52 _ _ _ = notHappyAtAll
happyReduce_53 = happySpecReduce_3 24 happyReduction_53
happyReduction_53 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Sub happy_var_1 happy_var_3
)
happyReduction_53 _ _ _ = notHappyAtAll
happyReduce_54 = happySpecReduce_1 25 happyReduction_54
happyReduction_54 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_54 _ = notHappyAtAll
happyReduce_55 = happySpecReduce_3 25 happyReduction_55
happyReduction_55 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Mul happy_var_1 happy_var_3
)
happyReduction_55 _ _ _ = notHappyAtAll
happyReduce_56 = happySpecReduce_3 25 happyReduction_56
happyReduction_56 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Div happy_var_1 happy_var_3
)
happyReduction_56 _ _ _ = notHappyAtAll
happyReduce_57 = happySpecReduce_3 25 happyReduction_57
happyReduction_57 (HappyAbsSyn19 happy_var_3)
_
(HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(Binary Mod happy_var_1 happy_var_3
)
happyReduction_57 _ _ _ = notHappyAtAll
happyReduce_58 = happySpecReduce_2 26 happyReduction_58
happyReduction_58 (HappyAbsSyn19 happy_var_2)
(HappyAbsSyn27 happy_var_1)
= HappyAbsSyn19
(Unary happy_var_1 happy_var_2
)
happyReduction_58 _ _ = notHappyAtAll
happyReduce_59 = happySpecReduce_1 26 happyReduction_59
happyReduction_59 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_59 _ = notHappyAtAll
happyReduce_60 = happySpecReduce_1 27 happyReduction_60
happyReduction_60 _
= HappyAbsSyn27
(Minus
)
happyReduce_61 = happySpecReduce_1 27 happyReduction_61
happyReduction_61 _
= HappyAbsSyn27
(Plus
)
happyReduce_62 = happySpecReduce_1 27 happyReduction_62
happyReduction_62 _
= HappyAbsSyn27
(Not
)
happyReduce_63 = happySpecReduce_1 28 happyReduction_63
happyReduction_63 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn19
(Var happy_var_1
)
happyReduction_63 _ = notHappyAtAll
happyReduce_64 = happySpecReduce_1 28 happyReduction_64
happyReduction_64 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn19
(Lit happy_var_1
)
happyReduction_64 _ = notHappyAtAll
happyReduce_65 = happySpecReduce_1 29 happyReduction_65
happyReduction_65 (HappyTerminal (T_literal happy_var_1))
= HappyAbsSyn29
(happy_var_1
)
happyReduction_65 _ = notHappyAtAll
happyReduce_66 = happySpecReduce_1 30 happyReduction_66
happyReduction_66 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn19
(happy_var_1
)
happyReduction_66 _ = notHappyAtAll
happyReduce_67 = happySpecReduce_2 31 happyReduction_67
happyReduction_67 (HappyAbsSyn32 happy_var_2)
_
= HappyAbsSyn6
(let (spec, decls) = happy_var_2 in Typedef spec [] decls
)
happyReduction_67 _ _ = notHappyAtAll
happyReduce_68 = happySpecReduce_1 31 happyReduction_68
happyReduction_68 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn6
(TypeDecl happy_var_1
)
happyReduction_68 _ = notHappyAtAll
happyReduce_69 = happySpecReduce_1 31 happyReduction_69
happyReduction_69 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn6
(TypeDecl happy_var_1
)
happyReduction_69 _ = notHappyAtAll
happyReduce_70 = happySpecReduce_1 31 happyReduction_70
happyReduction_70 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn6
(TypeDecl happy_var_1
)
happyReduction_70 _ = notHappyAtAll
happyReduce_71 = happySpecReduce_2 32 happyReduction_71
happyReduction_71 (HappyAbsSyn38 happy_var_2)
(HappyAbsSyn18 happy_var_1)
= HappyAbsSyn32
((happy_var_1,happy_var_2)
)
happyReduction_71 _ _ = notHappyAtAll
happyReduce_72 = happySpecReduce_1 33 happyReduction_72
happyReduction_72 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_72 _ = notHappyAtAll
happyReduce_73 = happySpecReduce_1 33 happyReduction_73
happyReduction_73 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_73 _ = notHappyAtAll
happyReduce_74 = happySpecReduce_1 34 happyReduction_74
happyReduction_74 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_74 _ = notHappyAtAll
happyReduce_75 = happySpecReduce_1 34 happyReduction_75
happyReduction_75 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_75 _ = notHappyAtAll
happyReduce_76 = happySpecReduce_1 34 happyReduction_76
happyReduction_76 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn18
(TyName happy_var_1 Nothing
)
happyReduction_76 _ = notHappyAtAll
happyReduce_77 = happySpecReduce_1 35 happyReduction_77
happyReduction_77 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_77 _ = notHappyAtAll
happyReduce_78 = happySpecReduce_1 35 happyReduction_78
happyReduction_78 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_78 _ = notHappyAtAll
happyReduce_79 = happySpecReduce_1 35 happyReduction_79
happyReduction_79 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_79 _ = notHappyAtAll
happyReduce_80 = happySpecReduce_1 35 happyReduction_80
happyReduction_80 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_80 _ = notHappyAtAll
happyReduce_81 = happySpecReduce_1 35 happyReduction_81
happyReduction_81 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_81 _ = notHappyAtAll
happyReduce_82 = happySpecReduce_1 35 happyReduction_82
happyReduction_82 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_82 _ = notHappyAtAll
happyReduce_83 = happySpecReduce_1 35 happyReduction_83
happyReduction_83 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_83 _ = notHappyAtAll
happyReduce_84 = happySpecReduce_1 35 happyReduction_84
happyReduction_84 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_84 _ = notHappyAtAll
happyReduce_85 = happySpecReduce_1 36 happyReduction_85
happyReduction_85 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_85 _ = notHappyAtAll
happyReduce_86 = happySpecReduce_1 36 happyReduction_86
happyReduction_86 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_86 _ = notHappyAtAll
happyReduce_87 = happySpecReduce_1 36 happyReduction_87
happyReduction_87 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_87 _ = notHappyAtAll
happyReduce_88 = happySpecReduce_1 36 happyReduction_88
happyReduction_88 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_88 _ = notHappyAtAll
happyReduce_89 = happySpecReduce_1 37 happyReduction_89
happyReduction_89 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_89 _ = notHappyAtAll
happyReduce_90 = happySpecReduce_1 37 happyReduction_90
happyReduction_90 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_90 _ = notHappyAtAll
happyReduce_91 = happySpecReduce_1 37 happyReduction_91
happyReduction_91 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_91 _ = notHappyAtAll
happyReduce_92 = happySpecReduce_1 38 happyReduction_92
happyReduction_92 (HappyAbsSyn39 happy_var_1)
= HappyAbsSyn38
([happy_var_1]
)
happyReduction_92 _ = notHappyAtAll
happyReduce_93 = happySpecReduce_3 38 happyReduction_93
happyReduction_93 (HappyAbsSyn39 happy_var_3)
_
(HappyAbsSyn38 happy_var_1)
= HappyAbsSyn38
(happy_var_3 : happy_var_1
)
happyReduction_93 _ _ _ = notHappyAtAll
happyReduce_94 = happySpecReduce_1 39 happyReduction_94
happyReduction_94 (HappyAbsSyn39 happy_var_1)
= HappyAbsSyn39
(happy_var_1
)
happyReduction_94 _ = notHappyAtAll
happyReduce_95 = happySpecReduce_1 40 happyReduction_95
happyReduction_95 (HappyAbsSyn39 happy_var_1)
= HappyAbsSyn39
(happy_var_1
)
happyReduction_95 _ = notHappyAtAll
happyReduce_96 = happySpecReduce_2 40 happyReduction_96
happyReduction_96 (HappyAbsSyn65 happy_var_2)
(HappyAbsSyn39 happy_var_1)
= HappyAbsSyn39
(ArrayId happy_var_1 happy_var_2
)
happyReduction_96 _ _ = notHappyAtAll
happyReduce_97 = happySpecReduce_1 41 happyReduction_97
happyReduction_97 (HappyTerminal (T_float happy_var_1))
= HappyAbsSyn18
(TyFloat happy_var_1
)
happyReduction_97 _ = notHappyAtAll
happyReduce_98 = happySpecReduce_1 42 happyReduction_98
happyReduction_98 (HappyTerminal (T_int happy_var_1))
= HappyAbsSyn18
(TyInteger happy_var_1
)
happyReduction_98 _ = notHappyAtAll
happyReduce_99 = happySpecReduce_2 42 happyReduction_99
happyReduction_99 (HappyTerminal (T_int happy_var_2))
_
= HappyAbsSyn18
(TyApply (TySigned True) (TyInteger happy_var_2)
)
happyReduction_99 _ _ = notHappyAtAll
happyReduce_100 = happySpecReduce_2 42 happyReduction_100
happyReduction_100 (HappyTerminal (T_int happy_var_2))
_
= HappyAbsSyn18
(TyApply (TySigned False) (TyInteger happy_var_2)
)
happyReduction_100 _ _ = notHappyAtAll
happyReduce_101 = happySpecReduce_1 43 happyReduction_101
happyReduction_101 _
= HappyAbsSyn18
(TyChar
)
happyReduce_102 = happySpecReduce_1 44 happyReduction_102
happyReduction_102 _
= HappyAbsSyn18
(TyWChar
)
happyReduce_103 = happySpecReduce_1 45 happyReduction_103
happyReduction_103 _
= HappyAbsSyn18
(TyBool
)
happyReduce_104 = happySpecReduce_1 46 happyReduction_104
happyReduction_104 _
= HappyAbsSyn18
(TyOctet
)
happyReduce_105 = happySpecReduce_1 47 happyReduction_105
happyReduction_105 _
= HappyAbsSyn18
(TyAny
)
happyReduce_106 = happySpecReduce_1 48 happyReduction_106
happyReduction_106 _
= HappyAbsSyn18
(TyObject
)
happyReduce_107 = happyReduce 5 49 happyReduction_107
happyReduction_107 (_ `HappyStk`
(HappyAbsSyn50 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyStruct (Just happy_var_2) happy_var_4 Nothing
) `HappyStk` happyRest
happyReduce_108 = happySpecReduce_1 50 happyReduction_108
happyReduction_108 (HappyAbsSyn51 happy_var_1)
= HappyAbsSyn50
([happy_var_1]
)
happyReduction_108 _ = notHappyAtAll
happyReduce_109 = happySpecReduce_2 50 happyReduction_109
happyReduction_109 (HappyAbsSyn51 happy_var_2)
(HappyAbsSyn50 happy_var_1)
= HappyAbsSyn50
(happy_var_2:happy_var_1
)
happyReduction_109 _ _ = notHappyAtAll
happyReduce_110 = happySpecReduce_3 51 happyReduction_110
happyReduction_110 _
(HappyAbsSyn38 happy_var_2)
(HappyAbsSyn18 happy_var_1)
= HappyAbsSyn51
((happy_var_1,[],happy_var_2)
)
happyReduction_110 _ _ _ = notHappyAtAll
happyReduce_111 = happyReduce 9 52 happyReduction_111
happyReduction_111 (_ `HappyStk`
(HappyAbsSyn54 happy_var_8) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_5) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyUnion (Just happy_var_2) happy_var_5 (Id "tagged_union") Nothing (reverse happy_var_8)
) `HappyStk` happyRest
happyReduce_112 = happySpecReduce_1 53 happyReduction_112
happyReduction_112 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_112 _ = notHappyAtAll
happyReduce_113 = happySpecReduce_1 53 happyReduction_113
happyReduction_113 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_113 _ = notHappyAtAll
happyReduce_114 = happySpecReduce_1 53 happyReduction_114
happyReduction_114 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_114 _ = notHappyAtAll
happyReduce_115 = happySpecReduce_1 53 happyReduction_115
happyReduction_115 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_115 _ = notHappyAtAll
happyReduce_116 = happySpecReduce_1 53 happyReduction_116
happyReduction_116 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn18
(TyName happy_var_1 Nothing
)
happyReduction_116 _ = notHappyAtAll
happyReduce_117 = happySpecReduce_1 54 happyReduction_117
happyReduction_117 (HappyAbsSyn55 happy_var_1)
= HappyAbsSyn54
([happy_var_1]
)
happyReduction_117 _ = notHappyAtAll
happyReduce_118 = happySpecReduce_2 54 happyReduction_118
happyReduction_118 (HappyAbsSyn55 happy_var_2)
(HappyAbsSyn54 happy_var_1)
= HappyAbsSyn54
(happy_var_2:happy_var_1
)
happyReduction_118 _ _ = notHappyAtAll
happyReduce_119 = happySpecReduce_3 55 happyReduction_119
happyReduction_119 _
(HappyAbsSyn58 happy_var_2)
(HappyAbsSyn56 happy_var_1)
= HappyAbsSyn55
(Switch happy_var_1 (Just happy_var_2)
)
happyReduction_119 _ _ _ = notHappyAtAll
happyReduce_120 = happySpecReduce_1 56 happyReduction_120
happyReduction_120 (HappyAbsSyn57 happy_var_1)
= HappyAbsSyn56
([happy_var_1]
)
happyReduction_120 _ = notHappyAtAll
happyReduce_121 = happySpecReduce_2 56 happyReduction_121
happyReduction_121 (HappyAbsSyn57 happy_var_2)
(HappyAbsSyn56 happy_var_1)
= HappyAbsSyn56
(happy_var_2:happy_var_1
)
happyReduction_121 _ _ = notHappyAtAll
happyReduce_122 = happySpecReduce_3 57 happyReduction_122
happyReduction_122 _
(HappyAbsSyn19 happy_var_2)
_
= HappyAbsSyn57
(Case [happy_var_2]
)
happyReduction_122 _ _ _ = notHappyAtAll
happyReduce_123 = happySpecReduce_2 57 happyReduction_123
happyReduction_123 _
_
= HappyAbsSyn57
(Default
)
happyReduce_124 = happySpecReduce_2 58 happyReduction_124
happyReduction_124 (HappyAbsSyn39 happy_var_2)
(HappyAbsSyn18 happy_var_1)
= HappyAbsSyn58
((Param happy_var_2 happy_var_1 [])
)
happyReduction_124 _ _ = notHappyAtAll
happyReduce_125 = happyReduce 5 59 happyReduction_125
happyReduction_125 (_ `HappyStk`
(HappyAbsSyn60 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyEnum (Just happy_var_2) (reverse happy_var_4)
) `HappyStk` happyRest
happyReduce_126 = happySpecReduce_1 60 happyReduction_126
happyReduction_126 (HappyAbsSyn39 happy_var_1)
= HappyAbsSyn60
([(happy_var_1,[],Nothing)]
)
happyReduction_126 _ = notHappyAtAll
happyReduce_127 = happySpecReduce_3 60 happyReduction_127
happyReduction_127 (HappyAbsSyn39 happy_var_3)
_
(HappyAbsSyn60 happy_var_1)
= HappyAbsSyn60
(((happy_var_3,[],Nothing):happy_var_1)
)
happyReduction_127 _ _ _ = notHappyAtAll
happyReduce_128 = happyReduce 6 61 happyReduction_128
happyReduction_128 (_ `HappyStk`
(HappyAbsSyn19 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TySequence happy_var_3 (Just happy_var_5)
) `HappyStk` happyRest
happyReduce_129 = happyReduce 4 61 happyReduction_129
happyReduction_129 (_ `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TySequence happy_var_3 Nothing
) `HappyStk` happyRest
happyReduce_130 = happyReduce 4 62 happyReduction_130
happyReduction_130 (_ `HappyStk`
(HappyAbsSyn19 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyString (Just happy_var_3)
) `HappyStk` happyRest
happyReduce_131 = happySpecReduce_1 62 happyReduction_131
happyReduction_131 _
= HappyAbsSyn18
(TyString Nothing
)
happyReduce_132 = happyReduce 4 63 happyReduction_132
happyReduction_132 (_ `HappyStk`
(HappyAbsSyn19 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyWString (Just happy_var_3)
) `HappyStk` happyRest
happyReduce_133 = happySpecReduce_1 63 happyReduction_133
happyReduction_133 _
= HappyAbsSyn18
(TyWString Nothing
)
happyReduce_134 = happySpecReduce_2 64 happyReduction_134
happyReduction_134 (HappyAbsSyn65 happy_var_2)
(HappyAbsSyn39 happy_var_1)
= HappyAbsSyn64
((happy_var_1, reverse happy_var_2)
)
happyReduction_134 _ _ = notHappyAtAll
happyReduce_135 = happySpecReduce_1 65 happyReduction_135
happyReduction_135 (HappyAbsSyn19 happy_var_1)
= HappyAbsSyn65
([happy_var_1]
)
happyReduction_135 _ = notHappyAtAll
happyReduce_136 = happySpecReduce_2 65 happyReduction_136
happyReduction_136 (HappyAbsSyn19 happy_var_2)
(HappyAbsSyn65 happy_var_1)
= HappyAbsSyn65
(happy_var_2:happy_var_1
)
happyReduction_136 _ _ = notHappyAtAll
happyReduce_137 = happySpecReduce_3 66 happyReduction_137
happyReduction_137 _
(HappyAbsSyn19 happy_var_2)
_
= HappyAbsSyn19
(happy_var_2
)
happyReduction_137 _ _ _ = notHappyAtAll
happyReduce_138 = happyReduce 4 67 happyReduction_138
happyReduction_138 ((HappyAbsSyn38 happy_var_4) `HappyStk`
(HappyAbsSyn18 happy_var_3) `HappyStk`
_ `HappyStk`
(HappyAbsSyn68 happy_var_1) `HappyStk`
happyRest)
= HappyAbsSyn6
(Attribute (reverse happy_var_4) happy_var_1 happy_var_3
) `HappyStk` happyRest
happyReduce_139 = happySpecReduce_1 68 happyReduction_139
happyReduction_139 _
= HappyAbsSyn68
(True
)
happyReduce_140 = happySpecReduce_0 68 happyReduction_140
happyReduction_140 = HappyAbsSyn68
(False
)
happyReduce_141 = happySpecReduce_1 69 happyReduction_141
happyReduction_141 (HappyAbsSyn39 happy_var_1)
= HappyAbsSyn38
([happy_var_1]
)
happyReduction_141 _ = notHappyAtAll
happyReduce_142 = happySpecReduce_3 69 happyReduction_142
happyReduction_142 (HappyAbsSyn39 happy_var_3)
_
(HappyAbsSyn38 happy_var_1)
= HappyAbsSyn38
(happy_var_3:happy_var_1
)
happyReduction_142 _ _ _ = notHappyAtAll
happyReduce_143 = happyReduce 5 70 happyReduction_143
happyReduction_143 (_ `HappyStk`
(HappyAbsSyn50 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn39 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(Exception happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_144 = happySpecReduce_0 71 happyReduction_144
happyReduction_144 = HappyAbsSyn50
([]
)
happyReduce_145 = happySpecReduce_1 71 happyReduction_145
happyReduction_145 (HappyAbsSyn50 happy_var_1)
= HappyAbsSyn50
(happy_var_1
)
happyReduction_145 _ = notHappyAtAll
happyReduce_146 = happySpecReduce_1 72 happyReduction_146
happyReduction_146 (HappyAbsSyn51 happy_var_1)
= HappyAbsSyn50
([happy_var_1]
)
happyReduction_146 _ = notHappyAtAll
happyReduce_147 = happySpecReduce_2 72 happyReduction_147
happyReduction_147 (HappyAbsSyn51 happy_var_2)
(HappyAbsSyn50 happy_var_1)
= HappyAbsSyn50
(happy_var_2:happy_var_1
)
happyReduction_147 _ _ = notHappyAtAll
happyReduce_148 = happyReduce 6 73 happyReduction_148
happyReduction_148 ((HappyAbsSyn81 happy_var_6) `HappyStk`
(HappyAbsSyn80 happy_var_5) `HappyStk`
(HappyAbsSyn76 happy_var_4) `HappyStk`
(HappyAbsSyn39 happy_var_3) `HappyStk`
(HappyAbsSyn18 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(Operation (FunId happy_var_3 Nothing happy_var_4) happy_var_2 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_149 = happySpecReduce_0 74 happyReduction_149
happyReduction_149 = HappyAbsSyn68
(False
)
happyReduce_150 = happySpecReduce_1 74 happyReduction_150
happyReduction_150 _
= HappyAbsSyn68
(True
)
happyReduce_151 = happySpecReduce_1 75 happyReduction_151
happyReduction_151 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_151 _ = notHappyAtAll
happyReduce_152 = happySpecReduce_1 75 happyReduction_152
happyReduction_152 _
= HappyAbsSyn18
(TyVoid
)
happyReduce_153 = happySpecReduce_3 76 happyReduction_153
happyReduction_153 _
(HappyAbsSyn76 happy_var_2)
_
= HappyAbsSyn76
((reverse happy_var_2)
)
happyReduction_153 _ _ _ = notHappyAtAll
happyReduce_154 = happySpecReduce_2 76 happyReduction_154
happyReduction_154 _
_
= HappyAbsSyn76
([]
)
happyReduce_155 = happySpecReduce_1 77 happyReduction_155
happyReduction_155 (HappyAbsSyn78 happy_var_1)
= HappyAbsSyn76
([happy_var_1]
)
happyReduction_155 _ = notHappyAtAll
happyReduce_156 = happySpecReduce_3 77 happyReduction_156
happyReduction_156 (HappyAbsSyn78 happy_var_3)
_
(HappyAbsSyn76 happy_var_1)
= HappyAbsSyn76
(happy_var_3:happy_var_1
)
happyReduction_156 _ _ _ = notHappyAtAll
happyReduce_157 = happySpecReduce_3 78 happyReduction_157
happyReduction_157 (HappyAbsSyn39 happy_var_3)
(HappyAbsSyn18 happy_var_2)
(HappyAbsSyn79 happy_var_1)
= HappyAbsSyn78
(Param happy_var_3 happy_var_2 [happy_var_1]
)
happyReduction_157 _ _ _ = notHappyAtAll
happyReduce_158 = happySpecReduce_1 79 happyReduction_158
happyReduction_158 (HappyTerminal (T_mode happy_var_1))
= HappyAbsSyn79
(Mode happy_var_1
)
happyReduction_158 _ = notHappyAtAll
happyReduce_159 = happySpecReduce_0 80 happyReduction_159
happyReduction_159 = HappyAbsSyn80
(Nothing
)
happyReduce_160 = happyReduce 4 80 happyReduction_160
happyReduction_160 (_ `HappyStk`
(HappyAbsSyn82 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn80
(Just (reverse happy_var_3)
) `HappyStk` happyRest
happyReduce_161 = happySpecReduce_0 81 happyReduction_161
happyReduction_161 = HappyAbsSyn81
(Nothing
)
happyReduce_162 = happyReduce 4 81 happyReduction_162
happyReduction_162 (_ `HappyStk`
(HappyAbsSyn82 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn81
(Just (reverse happy_var_3)
) `HappyStk` happyRest
happyReduce_163 = happySpecReduce_1 82 happyReduction_163
happyReduction_163 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn82
([happy_var_1]
)
happyReduction_163 _ = notHappyAtAll
happyReduce_164 = happySpecReduce_3 82 happyReduction_164
happyReduction_164 (HappyAbsSyn15 happy_var_3)
_
(HappyAbsSyn82 happy_var_1)
= HappyAbsSyn82
(happy_var_3:happy_var_1
)
happyReduction_164 _ _ _ = notHappyAtAll
happyReduce_165 = happySpecReduce_1 83 happyReduction_165
happyReduction_165 (HappyTerminal (T_string_lit happy_var_1))
= HappyAbsSyn82
([happy_var_1]
)
happyReduction_165 _ = notHappyAtAll
happyReduce_166 = happySpecReduce_3 83 happyReduction_166
happyReduction_166 (HappyTerminal (T_string_lit happy_var_3))
_
(HappyAbsSyn82 happy_var_1)
= HappyAbsSyn82
(happy_var_3:happy_var_1
)
happyReduction_166 _ _ _ = notHappyAtAll
happyReduce_167 = happySpecReduce_1 84 happyReduction_167
happyReduction_167 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_167 _ = notHappyAtAll
happyReduce_168 = happySpecReduce_1 84 happyReduction_168
happyReduction_168 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_168 _ = notHappyAtAll
happyReduce_169 = happySpecReduce_1 84 happyReduction_169
happyReduction_169 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_169 _ = notHappyAtAll
happyReduce_170 = happySpecReduce_1 84 happyReduction_170
happyReduction_170 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn18
(happy_var_1
)
happyReduction_170 _ = notHappyAtAll
happyReduce_171 = happySpecReduce_1 84 happyReduction_171
happyReduction_171 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn18
(TyName happy_var_1 Nothing
)
happyReduction_171 _ = notHappyAtAll
happyReduce_172 = happyReduce 6 85 happyReduction_172
happyReduction_172 (_ `HappyStk`
(HappyAbsSyn87 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn19 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(TyFixed (Just (happy_var_3,happy_var_5))
) `HappyStk` happyRest
happyReduce_173 = happySpecReduce_1 86 happyReduction_173
happyReduction_173 _
= HappyAbsSyn18
(TyFixed Nothing
)
happyReduce_174 = happySpecReduce_1 87 happyReduction_174
happyReduction_174 (HappyTerminal (T_literal happy_var_1))
= HappyAbsSyn87
(let (IntegerLit il) = happy_var_1 in il
)
happyReduction_174 _ = notHappyAtAll
happyReduce_175 = happySpecReduce_1 88 happyReduction_175
happyReduction_175 (HappyTerminal (T_string_lit happy_var_1))
= HappyAbsSyn15
(happy_var_1
)
happyReduction_175 _ = notHappyAtAll
happyReduce_176 = happySpecReduce_1 89 happyReduction_176
happyReduction_176 (HappyTerminal (T_id happy_var_1))
= HappyAbsSyn39
((Id happy_var_1)
)
happyReduction_176 _ = notHappyAtAll
happyNewToken action sts stk
= lexIDL(\tk ->
let cont i = action i i tk (HappyState action) sts stk in
case tk of {
T_eof -> action 162 162 (error "reading EOF!") (HappyState action) sts stk;
T_semi -> cont 90;
T_module -> cont 91;
T_interface -> cont 92;
T_oparen -> cont 93;
T_cparen -> cont 94;
T_ocurly -> cont 95;
T_ccurly -> cont 96;
T_colon -> cont 97;
T_dcolon -> cont 98;
T_comma -> cont 99;
T_dot -> cont 100;
T_const -> cont 101;
T_equal -> cont 102;
T_eqeq -> cont 103;
T_neq -> cont 104;
T_or -> cont 105;
T_rel_or -> cont 106;
T_xor -> cont 107;
T_and -> cont 108;
T_rel_and -> cont 109;
T_shift happy_dollar_dollar -> cont 110;
T_div -> cont 111;
T_mod -> cont 112;
T_not -> cont 113;
T_negate -> cont 114;
T_question -> cont 115;
T_typedef -> cont 116;
T_type happy_dollar_dollar -> cont 117;
T_float happy_dollar_dollar -> cont 118;
T_int happy_dollar_dollar -> cont 119;
T_unsigned -> cont 120;
T_signed -> cont 121;
T_char -> cont 122;
T_wchar -> cont 123;
T_boolean -> cont 124;
T_struct -> cont 125;
T_union -> cont 126;
T_switch -> cont 127;
T_case -> cont 128;
T_default -> cont 129;
T_enum -> cont 130;
T_lt -> cont 131;
T_le -> cont 132;
T_gt -> cont 133;
T_ge -> cont 134;
T_osquare -> cont 135;
T_csquare -> cont 136;
T_void -> cont 137;
T_mode happy_dollar_dollar -> cont 138;
T_literal happy_dollar_dollar -> cont 139;
T_string_lit happy_dollar_dollar -> cont 140;
T_id happy_dollar_dollar -> cont 141;
T_attribute -> cont 142;
T_plus -> cont 143;
T_times -> cont 144;
T_minus -> cont 145;
T_string -> cont 146;
T_wstring -> cont 147;
T_sequence -> cont 148;
T_object -> cont 149;
T_any -> cont 150;
T_octet -> cont 151;
T_oneway -> cont 152;
T_fixed -> cont 153;
T_exception -> cont 154;
T_raises -> cont 155;
T_context -> cont 156;
T_readonly -> cont 157;
T_include_start happy_dollar_dollar -> cont 158;
T_include_end -> cont 159;
T_pragma happy_dollar_dollar -> cont 160;
T_unknown happy_dollar_dollar -> cont 161;
_ -> happyError
})
happyThen :: LexM a -> (a -> LexM b) -> LexM b
happyThen = (thenLexM)
happyReturn :: a -> LexM a
happyReturn = (returnLexM)
happyThen1 = happyThen
happyReturn1 = happyReturn
parseIDL = happyThen (happyParse action_0) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
happyError :: LexM a
happyError = do
l <- getSrcLoc
str <- getStream
ioToLexM (ioError (userError (show l ++ ": Parse error: " ++ takeWhile (/='\n') str)))
{-# LINE 1 "GenericTemplate.hs" #-}
-- $Id: GenericTemplate.hs,v 1.23 2002/05/23 09:24:27 simonmar Exp $
{-# LINE 15 "GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
{-# LINE 150 "GenericTemplate.hs" #-}
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Int -> -- token number
Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let i = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k - ((1) :: Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
where sts1@(((st1@(HappyState (action))):(_))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery ((1) is the error token)
-- parse error if we are in recovery and we fail again
happyFail (1) tk old_st _ stk =
-- trace "failing" $
happyError
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail (1) tk old_st (((HappyState (action))):(sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
|
dchagniot/hdirect
|
src/OmgParser.hs
|
Haskell
|
bsd-3-clause
| 98,554
|
module Cz.MartinDobias.ChristmasKoma.PadCracker (
Message,
PossibleKeystrokes,
Passphrase(..),
crack,
decode,
zipAll
) where
import Data.Char(ord, chr)
import Data.Bits(xor)
type Message = String
type PossibleKeystrokes = [Char]
type AvailableChars = [Char]
type Passphrase = [PossibleKeystrokes]
zipAll :: [Message] -> [AvailableChars]
zipAll [] = []
zipAll xs = map head (filterEmpty xs) : zipAll (filterEmpty (map tail (filterEmpty xs)))
where filterEmpty = filter (not . null)
legalOutput = ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ " ,.:!-\'"
legalOutputInitial = ['A'..'Z'] ++ ['0'..'9']
possibleCandidates = [(chr 0) .. (chr 255)]
candecode :: [Char] -> Char -> AvailableChars -> Bool
candecode l c = all (\x -> chr(ord c `xor` ord x) `elem` l)
valid :: [Char] -> AvailableChars -> PossibleKeystrokes -> PossibleKeystrokes
valid l xs ps = [p | p <- ps, candecode l p xs]
findCandidates :: [AvailableChars] -> [PossibleKeystrokes] -> [PossibleKeystrokes]
findCandidates [] _ = []
findCandidates (x : xs) (p : ps) = (if null p then valid legalOutput x possibleCandidates else valid legalOutput x p) : findCandidates xs ps
findCandidatesInitial :: [AvailableChars] -> [PossibleKeystrokes] -> [PossibleKeystrokes]
findCandidatesInitial [] _ = []
findCandidatesInitial (x : xs) (p : ps) = (if null p then valid legalOutputInitial x possibleCandidates else valid legalOutputInitial x p) : findCandidates xs ps
crack :: [Message] -> Passphrase -> Passphrase
crack ms ps = findCandidatesInitial (zipAll ms) (infinitePassphrase ps)
infinitePassphrase p = p ++ infinitePassphrase p
decodeOrHide [] = []
decodeOrHide ((_, []) : m) = '_' : decodeOrHide m
decodeOrHide ((c, [p]) : m) = chr (ord c `xor` ord p) : decodeOrHide m
decodeOrHide ((_, p : ps) : m) = '_' : decodeOrHide m
decode :: Passphrase -> Message -> String
decode cs m = decodeOrHide zipped
where zipped = zip m (infinitePassphrase cs)
|
martindobias/christmass-koma
|
src/Cz/MartinDobias/ChristmasKoma/PadCracker.hs
|
Haskell
|
bsd-3-clause
| 1,989
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
module FPNLA.Operations.BLAS.Strategies.SYRK.MonadPar.DefPar () where
import Control.DeepSeq (NFData)
import Control.Monad.Par as MP (parMap,
runPar)
import FPNLA.Matrix (MatrixVector,
foldr_v,
fromCols_vm,
generate_v)
import FPNLA.Operations.BLAS (SYRK (syrk))
import FPNLA.Operations.BLAS.Strategies.DataTypes (DefPar_MP)
import FPNLA.Operations.Parameters (Elt,
TransType (..),
blasResultM,
dimTrans_m,
dimTriang,
elemSymm,
elemTrans_m)
instance (NFData (v e), Elt e, MatrixVector m v e) => SYRK DefPar_MP m v e where
syrk _ alpha pmA beta pmB
| p /= p' = error "syrk: incompatible ranges"
| otherwise = blasResultM $ generatePar_m p p (\i j -> (alpha * pmAMultIJ i j) + beta * elemSymm i j pmB)
where
(p, p') = dimTriang pmB
matMultIJ i j tmA tmB = foldr_v (+) 0 (generate_v (snd $ dimTrans_m tmA) (\k -> (*) (elemTrans_m i k tmA) (elemTrans_m k j tmB)) :: v e)
pmAMultIJ i j =
case pmA of
(NoTrans mA) -> matMultIJ i j pmA (Trans mA)
(Trans mA) -> matMultIJ i j (Trans mA) pmA
(ConjTrans mA) -> matMultIJ i j (Trans mA) pmA
generatePar_m m n gen = fromCols_vm . MP.runPar . MP.parMap (\j -> generate_v m (`gen` j) :: v e) $ [0 .. (n - 1)]
|
mauroblanco/fpnla-examples
|
src/FPNLA/Operations/BLAS/Strategies/SYRK/MonadPar/DefPar.hs
|
Haskell
|
bsd-3-clause
| 2,251
|
{-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables, KindSignatures, GADTs, InstanceSigs, TypeOperators, MultiParamTypeClasses, FlexibleInstances, OverloadedStrings #-}
module Text.AFrame where
import Control.Applicative
import Data.Char (isSpace)
import Data.Generic.Diff
import Data.Map(Map)
import Data.String
import Data.Text(Text,pack,unpack)
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Maybe (listToMaybe)
import Data.List as L
import Data.Monoid ((<>))
--import Text.XML.Light as X
import Data.Aeson
import Data.Monoid
import qualified Text.Taggy as T
import qualified Data.HashMap.Strict as H
-- | 'AFrame' describes the contents of an a-frame scene,
-- and is stored as a classical rose tree.
-- 'AFrame' follows the DOM, except there are no textual
-- content; it is tags all the way down.
--
-- An xception is that \<script>ABC\</script> is encoded using
-- \<script text=\"ABC\">\</script>
data AFrame = AFrame Primitive [Attribute] [AFrame]
deriving (Show, Eq)
newtype Primitive = Primitive Text
deriving (Show, Eq, Ord, IsString, ToJSON, FromJSON)
newtype Label = Label Text
deriving (Show, Eq, Ord, IsString, ToJSON, FromJSON)
newtype Property = Property Text
deriving (Show, Eq, Ord, IsString, ToJSON, FromJSON)
type Attribute = (Label,Property)
-- | A valid css or jquerty-style path, in Haskell from.
-- An example of the string form might be
-- $('a-scene > a-entity:nth-of-type(2) > a-collada-model:nth-of-type(1) > a-animation:nth-of-type(1)')
data Path = Path Primitive [(Int,Primitive)]
deriving (Show, Eq, Ord)
-------------------------------------------------------------------------------------------------
instance ToJSON Path where
toJSON (Path p ps) = toJSON $
toJSON p : concat
[ [toJSON i,toJSON p]
| (i,p) <- ps
]
instance FromJSON Path where
parseJSON as = do
xs :: [Value] <- parseJSON as
toPath xs
where toPath [p] = do
txt <- parseJSON p
return $ Path txt []
toPath (p1:i1:rest) = do
txt :: Primitive <- parseJSON p1
n :: Int <- parseJSON i1
Path p ps <- toPath rest
return $ Path txt ((n,p):ps)
toPath _ = fail "bad Path"
-------------------------------------------------------------------------------------------------
setAttribute :: Label -> Property -> AFrame -> AFrame
setAttribute lbl prop (AFrame p as af) = AFrame p ((lbl,prop) : [ (l,p) | (l,p) <- as, l /= lbl ]) af
getAttribute :: Label -> AFrame -> Maybe Property
getAttribute lbl (AFrame p as af) = lookup lbl as
resetAttribute :: Label -> AFrame -> AFrame
resetAttribute lbl (AFrame p as af) = AFrame p [ (l,p) | (l,p) <- as, l /= lbl ] af
-------------------------------------------------------------------------------------------------
--setPath :: Path -> Label -> Property -> AFrame -> AFrame
--getPath :: Path -> Label -> AFrame -> Mabe Property
getElementById :: AFrame -> Text -> Maybe AFrame
getElementById af@(AFrame p as is) i =
case lookup "id" as of
Just (Property i') | i == i' -> return af
_ -> listToMaybe [ af' | Just af' <- map (flip getElementById i) is ]
-------------------------------------------------------------------------------------------------
-- | 'aFrameToElement' converts an 'AFrame' to an (XML) 'Element'. Total.
aFrameToElement :: AFrame -> T.Element
aFrameToElement (AFrame prim attrs rest) = T.Element prim' attrs' rest'
where
Primitive prim' = prim
attrs' = H.fromList
$ [ (a,p)
| (Label a,Property p) <- attrs
, not (prim' == "script" && a == "text")
]
rest' = [ T.NodeContent p
| (Label "text",Property p) <- attrs
, prim' == "script"
]
++ map (T.NodeElement . aFrameToElement) rest
-- | 'aFrameToElement' converts an (HTML) 'Element' to an 'AFrame'. Total.
-- Strips out any text (which is not used by 'AFrame' anyway.)
elementToAFrame :: T.Element -> AFrame
elementToAFrame ele = AFrame prim' attrs' content'
where
prim' = Primitive $ T.eltName $ ele
attrs' = [ (Label a,Property p)| (a,p) <- H.toList $ T.eltAttrs ele ]
++ [ (Label "text",Property txt)| T.NodeContent txt <- T.eltChildren ele ]
content' = [ elementToAFrame ele' | T.NodeElement ele' <- T.eltChildren ele ]
-- | reads an aframe document. This can be enbedded in an XML-style document (such as HTML)
readAFrame :: String -> Maybe AFrame
readAFrame str = do
let doms = T.parseDOM True (LT.fromStrict $ pack str)
case doms of
[T.NodeElement dom] -> do
let aframe = elementToAFrame dom
findAFrame aframe
_ -> error $ show ("found strange DOM",doms)
where
findAFrame :: AFrame -> Maybe AFrame
findAFrame a@(AFrame (Primitive "a-scene") _ _) = return a
findAFrame (AFrame _ _ xs) = listToMaybe
[ x
| Just x <- map findAFrame xs
]
showAFrame :: AFrame -> String
showAFrame = LT.unpack . T.renderWith False . aFrameToElement
-- | inject 'AFrame' into an existing (HTML) file. Replaces complete "<a-scene>" element.
injectAFrame :: AFrame -> String -> String
injectAFrame aframe str = findScene str 0
where
openTag = "<a-scene"
closeTag = "</a-scene>"
findScene :: String -> Int -> String
findScene xs n | openTag `L.isPrefixOf` xs = insertScene (drop (length openTag) xs) n
findScene (x:xs) n =
case x of
' ' -> x : findScene xs (n+1)
_ -> x : findScene xs 0
findScene [] n = []
insertScene :: String -> Int -> String
insertScene xs n = unlines (s : map (spaces ++) (ss ++ [remainingScene xs]))
where
(s:ss) = lines $ showAFrame $ aframe
spaces = take n $ repeat ' '
-- This will mess up if the closeTag strict appears in the scene.
remainingScene :: String -> String
remainingScene xs | closeTag `L.isPrefixOf` xs = drop (length closeTag) xs
remainingScene (x:xs) = remainingScene xs
remainingScene [] = []
------
-- Adding gdiff support
------
data AFrameFamily :: * -> * -> * where
AFrame' :: AFrameFamily AFrame (Cons Primitive
(Cons [Attribute]
(Cons [AFrame] Nil)))
ConsAttr' :: AFrameFamily [Attribute] (Cons Attribute (Cons [Attribute] Nil))
NilAttr' :: AFrameFamily [Attribute] Nil
ConsAFrame' :: AFrameFamily [AFrame] (Cons AFrame (Cons [AFrame] Nil))
NilAFrame' :: AFrameFamily [AFrame] Nil
Primitive' :: Primitive -> AFrameFamily Primitive Nil
Attribute' :: Attribute -> AFrameFamily Attribute Nil
instance Family AFrameFamily where
decEq :: AFrameFamily tx txs -> AFrameFamily ty tys -> Maybe (tx :~: ty, txs :~: tys)
decEq AFrame' AFrame' = Just (Refl, Refl)
decEq ConsAttr' ConsAttr' = Just (Refl, Refl)
decEq NilAttr' NilAttr' = Just (Refl, Refl)
decEq ConsAFrame' ConsAFrame' = Just (Refl, Refl)
decEq NilAFrame' NilAFrame' = Just (Refl, Refl)
decEq (Primitive' p1) (Primitive' p2) | p1 == p2 = Just (Refl, Refl)
decEq (Attribute' a1) (Attribute' a2) | a1 == a2 = Just (Refl, Refl)
decEq _ _ = Nothing
fields :: AFrameFamily t ts -> t -> Maybe ts
fields AFrame' (AFrame prim attrs fs)
= Just $ CCons prim $ CCons attrs $ CCons fs $ CNil
fields ConsAttr' ((lbl,prop):xs)
= Just $ CCons (lbl,prop) $ CCons xs $ CNil
fields NilAttr' [] = Just CNil
fields ConsAFrame' (x:xs)
= Just $ CCons x $ CCons xs $ CNil
fields NilAFrame' [] = Just CNil
fields (Primitive' _) _ = Just CNil
fields (Attribute' _) _ = Just CNil
fields _ _ = Nothing
apply :: AFrameFamily t ts -> ts -> t
apply AFrame' (CCons prim (CCons attrs (CCons fs CNil)))
= AFrame prim attrs fs
apply ConsAttr' (CCons (lbl,prop) (CCons xs CNil)) = (lbl,prop) : xs
apply NilAttr' CNil = []
apply ConsAFrame' (CCons x (CCons xs CNil)) = x : xs
apply NilAFrame' CNil = []
apply (Primitive' p1) CNil = p1
apply (Attribute' a1) CNil = a1
string :: AFrameFamily t ts -> String
string AFrame' = "AFrame"
string ConsAttr' = "ConsAttr"
string NilAttr' = "NilAttr"
string ConsAFrame' = "ConsAFrame"
string NilAFrame' = "NilAFrame"
string (Primitive' l1) = show l1
string (Attribute' p1) = show p1
instance Type AFrameFamily AFrame where
constructors = [Concr AFrame']
instance Type AFrameFamily Primitive where
constructors = [Abstr Primitive']
instance Type AFrameFamily [Attribute] where
constructors = [Concr ConsAttr',Concr NilAttr']
instance Type AFrameFamily [AFrame] where
constructors = [Concr ConsAFrame',Concr NilAFrame']
instance Type AFrameFamily Attribute where
constructors = [Abstr Attribute']
data AFrameUpdate = AFrameUpdate
{ aframePath :: Path
, aframeLabel :: Label
, aframeProperty :: Property
}
{-
compareAFrame :: AFrame -> AFrame -> Maybe [([Text],Attribute)]
compareAFrame aframe1 aframe2 = fmap (fmap (\ (xs,a) -> (intercalate " > " xs,a)))
$ deltaAFrame aframe1 aframe2
-}
deltaAFrame :: AFrame -> AFrame -> Maybe [(Path,Attribute)]
deltaAFrame (AFrame p1@(Primitive primName) attrs1 aframes1)
(AFrame p2 attrs2 aframes2)
| p1 /= p2 = fail "element name does not match in deltasAFrame"
| length aframes1 /= length aframes2
= fail "sub elements count do not match in deltasAFrame"
| otherwise = do
attrsD <- fmap (\ a -> (Path p1 [],a)) <$> deltaAttributes attrs1 attrs2
let ps = [ p | AFrame p _ _ <- aframes1 ]
xs = [ length [ () | x' <- xs, x' == x ] | (x:xs) <- tail $ scanl (flip (:)) [] ps ]
aframesD <- concat <$> sequence
[ do ds <- deltaAFrame a1 a2
return $ fmap (\ (Path p ps,at) -> (Path p1 ((x,p):ps),at)) ds
| (a1,a2,x) <- zip3 aframes1 aframes2 xs
]
return $ attrsD ++ aframesD
deltaAttributes :: [Attribute] -> [Attribute] -> Maybe [Attribute]
deltaAttributes xs ys | length xs /= length ys = fail "different number of arguments for deltaAttributes"
deltaAttributes xs ys = concat <$> sequence [ deltaAttribute x y | (x,y) <- xs `zip` ys ]
deltaAttribute :: Attribute -> Attribute -> Maybe [Attribute]
deltaAttribute attr1@(lbl1,_) attr2@(lbl2,_)
| attr1 == attr2 = return [] -- same result
| lbl1 == lbl2 = return [attr2] -- true update
| otherwise = fail "labels do not match in deltaAttributes"
------------------------------------------------------------------------------------------
unpackProperty :: Property -> [(Label,Property)]
unpackProperty (Property prop) =
[ (Label (T.dropWhile isSpace l), Property (T.dropWhile (\ c -> isSpace c || c == ':') p))
| (l,p) <- map (T.span (/= ':')) (T.splitOn ";" prop)
, not (T.null p)
]
packProperty :: [(Label,Property)] -> Property
packProperty = Property
. T.intercalate "; "
. map (\ (Label lbl,Property txt) -> lbl <> ": " <> txt)
------------------------------------------------------------------------------------------
preOrderFrame :: Monad m => (AFrame -> m AFrame) -> AFrame -> m AFrame
preOrderFrame f af = do
AFrame prim attrs aframes <- f af
aframes' <- traverse (preOrderFrame f) aframes
return $ AFrame prim attrs aframes'
-- This finds \<script src=\"...\"> and inserts the text=\"..\" into the \<script>.
resolveScript :: Monad m => (Text -> m LT.Text) -> AFrame -> m AFrame
resolveScript rf = preOrderFrame fn
where
fn af@(AFrame "script" attrs aframes) = case lookup "src" attrs of
Nothing -> return af
Just (Property path) ->
do txt <- rf path
return $ AFrame "script"
((Label "text",Property (LT.toStrict txt))
: [(l,p) | (l,p) <- attrs, l `notElem` ["src","text"]]
)
aframes
fn af = return af
instantiateTemplates :: Monad m => ([Attribute] -> AFrame -> m AFrame) -> AFrame -> m AFrame
instantiateTemplates f root = preOrderFrame fn root
where
fn (aEntity@(AFrame "a-entity" attrs aframes)) = case lookup "template" attrs of
Nothing -> return aEntity
Just templ -> case lookup "src" (unpackProperty templ) of
Nothing -> return aEntity
Just (Property src) | T.take 1 src == "#" ->
case getElementById root (T.drop 1 src) of
Just (script@(AFrame "script" attrs _)) -> do
txt <- f attrs script
return aEntity
_ -> return aEntity -- id not found
fn af = return af
|
ku-fpg/aframe
|
src/Text/AFrame.hs
|
Haskell
|
bsd-3-clause
| 13,322
|
-- | This module provides the /TcT/ monad.
module Tct.Core.Data.TctM
(
-- * Tct Monad
TctM (..)
, TctROState (..)
, TctStatus (..)
, askState
, setState
, setKvPair
, getKvPair
, askStatus
-- * Lifted IO functions
, async
, wait
, timed
, paused
, raceWith
, concurrently
) where
import Control.Applicative ((<|>))
import Control.Concurrent (threadDelay)
import qualified Control.Concurrent.Async as Async
import Control.Monad (liftM)
import Control.Monad.Reader (ask, liftIO, local, runReaderT)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified System.Time as Time
import Tct.Core.Data.Types
-- | Returns the state of the Monad.
askState :: TctM TctROState
askState = ask
-- | Sets (locally) the state of the Monad.
setState :: (TctROState -> TctROState) -> TctM a -> TctM a
setState = local
-- | Sets (locally) a key-value pair.
setKvPair :: (String, [String]) -> TctM a -> TctM a
setKvPair (k,v) = local $ \st -> st { kvPairs = M.insert k v (kvPairs st) }
-- | Given a key; asks for a value. Returns [] if there is no value.
getKvPair :: String -> TctM [String]
getKvPair s = (get . kvPairs) <$> ask
where get m = [] `fromMaybe` M.lookup s m
-- | Returns 'TctStatus' which is obtained from 'TctROState' during runtime.
--
-- > runningTime = now - startTime
-- > remainingTime = max (0, now - stopTime)
askStatus :: prob -> TctM (TctStatus prob)
askStatus prob = do
st <- askState
now <- liftIO Time.getClockTime
return TctStatus
{ currentProblem = prob
, runningTime = Time.tdSec (Time.diffClockTimes now (startTime st))
, remainingTime = (max 0 . Time.tdSec . flip Time.diffClockTimes now) `fmap` stopTime st }
toIO :: TctM a -> TctM (IO a)
toIO m = runReaderT (runTctM m) `fmap` askState
-- | Lifts 'Async.async'.
async :: TctM a -> TctM (Async.Async a)
async m = toIO m >>= liftIO . Async.async
-- | Lifts 'Async.wait'.
wait :: Async.Async a -> TctM a
wait = liftIO . Async.wait
--waitEither :: Async.Async a -> Async.Async b -> TctM (Either a b)
--waitEither a1 a2 = liftIO $ Async.waitEither a1 a2
-- | Lifts 'Async.concurrently'.
concurrently :: TctM a -> TctM b -> TctM (a,b)
concurrently m1 m2 = do
io1 <- toIO m1
io2 <- toIO m2
liftIO $ Async.withAsync io1 $ \a1 ->
liftIO $ Async.withAsync io2 $ \a2 ->
liftIO $ Async.waitBoth a1 a2
-- | @'raceWith' p1 m1 m2@ runs @m1@ and @m2@ in parallel.
--
-- * Returns the first result that satisfies @p1@.
-- * Otherwise returns the latter result.
raceWith :: (a -> Bool) -> TctM a -> TctM a -> TctM a
raceWith p1 m1 m2 = do
io1 <- toIO m1
io2 <- toIO m2
liftIO $ raceWithIO p1 io1 io2
raceWithIO :: (a -> Bool) -> IO a -> IO a -> IO a
raceWithIO p1 m1 m2 =
Async.withAsync m1 $ \a1 ->
Async.withAsync m2 $ \a2 -> do
e <- Async.waitEither a1 a2
case e of
Left r1
| p1 r1 -> Async.cancel a2 >> return r1
| otherwise -> Async.wait a2
Right r2
| p1 r2 -> Async.cancel a1 >> return r2
| otherwise -> Async.wait a1
-- | @'timed' seconds m@ wraps the Tct action in timeout, and locally sets 'stopTime'.
-- When @seconds@
--
-- * is negative, no timeout is set;
-- * is @0@, the computation aborts immediately, returning 'Nothing';
-- * is positive the computation runs at most @i@ seconds.
--
-- Returns 'Nothing' if @m@ does not end before the timeout.
--
-- Sets 'stopTime'.
--
-- > stopTime = min (now + timeout) stopTime
timed :: Int -> TctM a -> TctM (Maybe a)
timed n m
| n < 0 = Just `liftM` m
| n == 0 = return Nothing
| otherwise = do
e <- toIO m' >>= liftIO . Async.race (threadDelay $ toMicroSec n)
return $ either (const Nothing) Just e
where
m' = do
Time.TOD sec pico <- liftIO Time.getClockTime
let newTime = Just $ Time.TOD (sec + toInteger n) pico
local (\ r -> r { stopTime = min newTime (stopTime r) <|> newTime }) m
-- | @'wait' seconds m@ pauses seconds
paused :: Int -> TctM a -> TctM a
paused n m
| n <= 0 = m
| otherwise = liftIO (threadDelay (toMicroSec n)) >> m
toMicroSec :: Num a => a -> a
toMicroSec i = i*1000000
|
ComputationWithBoundedResources/tct-core
|
src/Tct/Core/Data/TctM.hs
|
Haskell
|
bsd-3-clause
| 4,268
|
module Dir.CoerceType(CoerceType(..)) where
newtype CoerceType = CoerceType Int
|
ndmitchell/weeder
|
test/foo/src/Dir/CoerceType.hs
|
Haskell
|
bsd-3-clause
| 81
|
{-# LANGUAGE OverloadedStrings #-}
module Rede.Workers.VeryBasic(
veryBasic
,bad404ResponseData
,bad404ResponseHeaders
) where
import qualified Data.ByteString as B
import Data.Conduit
import Data.ByteString.Char8 (pack)
import Rede.MainLoop.CoherentWorker
trivialHeaders :: [(B.ByteString, B.ByteString)]
trivialHeaders = [
(":status", "200"),
("server", "reh0m")
]
veryBasic :: CoherentWorker
veryBasic (headers, _) = do
let data_and_conclussion = yield "Hello world!"
putStrLn "Got these headers: "
print headers
return (trivialHeaders, [], data_and_conclussion)
bad404ResponseData :: B.ByteString
bad404ResponseData = "404: ReH: Didn't find that"
bad404ResponseHeaders :: Headers
bad404ResponseHeaders = [
(":status", "404")
,(":version", "HTTP/1.1")
,("content-length", (pack.show $ B.length bad404ResponseData))
,("content-type", "text/plain")
,("server", "ReHv0.0")
]
|
loadimpact/http2-test
|
hs-src/Rede/Workers/VeryBasic.hs
|
Haskell
|
bsd-3-clause
| 1,060
|
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as Map
jsonFile :: FilePath
jsonFile = "kataids.json"
hsFile :: FilePath
hsFile = "Kataids.hs"
getJSON :: IO B.ByteString
getJSON = B.readFile jsonFile
kidspref = "module Kataids where\n\
\import qualified Data.Map as Map\n\
\kataids = Map."
main = do
d <- (eitherDecode <$> getJSON) :: IO (Either String (Map.Map String String))
case d of
Left err -> putStrLn err
Right kids -> writeFile hsFile $ kidspref ++ (show kids)
|
klpn/lablinkfix
|
kataidsgen.hs
|
Haskell
|
bsd-3-clause
| 558
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[SimplCore]{Driver for simplifying @Core@ programs}
-}
{-# LANGUAGE CPP #-}
module SimplCore ( core2core, simplifyExpr ) where
#include "HsVersions.h"
import GhcPrelude
import DynFlags
import CoreSyn
import HscTypes
import CSE ( cseProgram )
import Rules ( mkRuleBase, unionRuleBase,
extendRuleBaseList, ruleCheckProgram, addRuleInfo, )
import PprCore ( pprCoreBindings, pprCoreExpr )
import OccurAnal ( occurAnalysePgm, occurAnalyseExpr )
import IdInfo
import CoreStats ( coreBindsSize, coreBindsStats, exprSize )
import CoreUtils ( mkTicks, stripTicksTop )
import CoreLint ( endPass, lintPassResult, dumpPassResult,
lintAnnots )
import Simplify ( simplTopBinds, simplExpr, simplRules )
import SimplUtils ( simplEnvForGHCi, activeRule, activeUnfolding )
import SimplEnv
import SimplMonad
import CoreMonad
import qualified ErrUtils as Err
import FloatIn ( floatInwards )
import FloatOut ( floatOutwards )
import FamInstEnv
import Id
import ErrUtils ( withTiming )
import BasicTypes ( CompilerPhase(..), isDefaultInlinePragma )
import VarSet
import VarEnv
import LiberateCase ( liberateCase )
import SAT ( doStaticArgs )
import Specialise ( specProgram)
import SpecConstr ( specConstrProgram)
import DmdAnal ( dmdAnalProgram )
import CallArity ( callArityAnalProgram )
import Exitify ( exitifyProgram )
import WorkWrap ( wwTopBinds )
import Vectorise ( vectorise )
import SrcLoc
import Util
import Module
import Maybes
import UniqSupply ( UniqSupply, mkSplitUniqSupply, splitUniqSupply )
import UniqFM
import Outputable
import Control.Monad
import qualified GHC.LanguageExtensions as LangExt
#if defined(GHCI)
import DynamicLoading ( loadPlugins )
import Plugins ( installCoreToDos )
#else
import DynamicLoading ( pluginError )
#endif
{-
************************************************************************
* *
\subsection{The driver for the simplifier}
* *
************************************************************************
-}
core2core :: HscEnv -> ModGuts -> IO ModGuts
core2core hsc_env guts@(ModGuts { mg_module = mod
, mg_loc = loc
, mg_deps = deps
, mg_rdr_env = rdr_env })
= do { us <- mkSplitUniqSupply 's'
-- make sure all plugins are loaded
; let builtin_passes = getCoreToDo dflags
orph_mods = mkModuleSet (mod : dep_orphs deps)
;
; (guts2, stats) <- runCoreM hsc_env hpt_rule_base us mod
orph_mods print_unqual loc $
do { all_passes <- addPluginPasses builtin_passes
; runCorePasses all_passes guts }
; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl_stats
"Grand total simplifier statistics"
(pprSimplCount stats)
; return guts2 }
where
dflags = hsc_dflags hsc_env
home_pkg_rules = hptRules hsc_env (dep_mods deps)
hpt_rule_base = mkRuleBase home_pkg_rules
print_unqual = mkPrintUnqualified dflags rdr_env
-- mod: get the module out of the current HscEnv so we can retrieve it from the monad.
-- This is very convienent for the users of the monad (e.g. plugins do not have to
-- consume the ModGuts to find the module) but somewhat ugly because mg_module may
-- _theoretically_ be changed during the Core pipeline (it's part of ModGuts), which
-- would mean our cached value would go out of date.
{-
************************************************************************
* *
Generating the main optimisation pipeline
* *
************************************************************************
-}
getCoreToDo :: DynFlags -> [CoreToDo]
getCoreToDo dflags
= flatten_todos core_todo
where
opt_level = optLevel dflags
phases = simplPhases dflags
max_iter = maxSimplIterations dflags
rule_check = ruleCheck dflags
call_arity = gopt Opt_CallArity dflags
exitification = gopt Opt_Exitification dflags
strictness = gopt Opt_Strictness dflags
full_laziness = gopt Opt_FullLaziness dflags
do_specialise = gopt Opt_Specialise dflags
do_float_in = gopt Opt_FloatIn dflags
cse = gopt Opt_CSE dflags
spec_constr = gopt Opt_SpecConstr dflags
liberate_case = gopt Opt_LiberateCase dflags
late_dmd_anal = gopt Opt_LateDmdAnal dflags
static_args = gopt Opt_StaticArgumentTransformation dflags
rules_on = gopt Opt_EnableRewriteRules dflags
eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags
ww_on = gopt Opt_WorkerWrapper dflags
vectorise_on = gopt Opt_Vectorise dflags
static_ptrs = xopt LangExt.StaticPointers dflags
maybe_rule_check phase = runMaybe rule_check (CoreDoRuleCheck phase)
maybe_strictness_before phase
= runWhen (phase `elem` strictnessBefore dflags) CoreDoStrictness
base_mode = SimplMode { sm_phase = panic "base_mode"
, sm_names = []
, sm_dflags = dflags
, sm_rules = rules_on
, sm_eta_expand = eta_expand_on
, sm_inline = True
, sm_case_case = True }
simpl_phase phase names iter
= CoreDoPasses
$ [ maybe_strictness_before phase
, CoreDoSimplify iter
(base_mode { sm_phase = Phase phase
, sm_names = names })
, maybe_rule_check (Phase phase) ]
-- Vectorisation can introduce a fair few common sub expressions involving
-- DPH primitives. For example, see the Reverse test from dph-examples.
-- We need to eliminate these common sub expressions before their definitions
-- are inlined in phase 2. The CSE introduces lots of v1 = v2 bindings,
-- so we also run simpl_gently to inline them.
++ (if vectorise_on && phase == 3
then [CoreCSE, simpl_gently]
else [])
vectorisation
= runWhen vectorise_on $
CoreDoPasses [ simpl_gently, CoreDoVectorisation ]
-- By default, we have 2 phases before phase 0.
-- Want to run with inline phase 2 after the specialiser to give
-- maximum chance for fusion to work before we inline build/augment
-- in phase 1. This made a difference in 'ansi' where an
-- overloaded function wasn't inlined till too late.
-- Need phase 1 so that build/augment get
-- inlined. I found that spectral/hartel/genfft lost some useful
-- strictness in the function sumcode' if augment is not inlined
-- before strictness analysis runs
simpl_phases = CoreDoPasses [ simpl_phase phase ["main"] max_iter
| phase <- [phases, phases-1 .. 1] ]
-- initial simplify: mk specialiser happy: minimum effort please
simpl_gently = CoreDoSimplify max_iter
(base_mode { sm_phase = InitialPhase
, sm_names = ["Gentle"]
, sm_rules = rules_on -- Note [RULEs enabled in SimplGently]
, sm_inline = not vectorise_on
-- See Note [Inline in InitialPhase]
, sm_case_case = False })
-- Don't do case-of-case transformations.
-- This makes full laziness work better
strictness_pass = if ww_on
then [CoreDoStrictness,CoreDoWorkerWrapper]
else [CoreDoStrictness]
-- New demand analyser
demand_analyser = (CoreDoPasses (
strictness_pass ++
[simpl_phase 0 ["post-worker-wrapper"] max_iter]
))
-- Static forms are moved to the top level with the FloatOut pass.
-- See Note [Grand plan for static forms] in StaticPtrTable.
static_ptrs_float_outwards =
runWhen static_ptrs $ CoreDoPasses
[ simpl_gently -- Float Out can't handle type lets (sometimes created
-- by simpleOptPgm via mkParallelBindings)
, CoreDoFloatOutwards FloatOutSwitches
{ floatOutLambdas = Just 0
, floatOutConstants = True
, floatOutOverSatApps = False
, floatToTopLevelOnly = True
}
]
core_todo =
if opt_level == 0 then
[ vectorisation,
static_ptrs_float_outwards,
CoreDoSimplify max_iter
(base_mode { sm_phase = Phase 0
, sm_names = ["Non-opt simplification"] })
]
else {- opt_level >= 1 -} [
-- We want to do the static argument transform before full laziness as it
-- may expose extra opportunities to float things outwards. However, to fix
-- up the output of the transformation we need at do at least one simplify
-- after this before anything else
runWhen static_args (CoreDoPasses [ simpl_gently, CoreDoStaticArgs ]),
-- We run vectorisation here for now, but we might also try to run
-- it later
vectorisation,
-- initial simplify: mk specialiser happy: minimum effort please
simpl_gently,
-- Specialisation is best done before full laziness
-- so that overloaded functions have all their dictionary lambdas manifest
runWhen do_specialise CoreDoSpecialising,
if full_laziness then
CoreDoFloatOutwards FloatOutSwitches {
floatOutLambdas = Just 0,
floatOutConstants = True,
floatOutOverSatApps = False,
floatToTopLevelOnly = False }
-- Was: gentleFloatOutSwitches
--
-- I have no idea why, but not floating constants to
-- top level is very bad in some cases.
--
-- Notably: p_ident in spectral/rewrite
-- Changing from "gentle" to "constantsOnly"
-- improved rewrite's allocation by 19%, and
-- made 0.0% difference to any other nofib
-- benchmark
--
-- Not doing floatOutOverSatApps yet, we'll do
-- that later on when we've had a chance to get more
-- accurate arity information. In fact it makes no
-- difference at all to performance if we do it here,
-- but maybe we save some unnecessary to-and-fro in
-- the simplifier.
else
-- Even with full laziness turned off, we still need to float static
-- forms to the top level. See Note [Grand plan for static forms] in
-- StaticPtrTable.
static_ptrs_float_outwards,
simpl_phases,
-- Phase 0: allow all Ids to be inlined now
-- This gets foldr inlined before strictness analysis
-- At least 3 iterations because otherwise we land up with
-- huge dead expressions because of an infelicity in the
-- simplifier.
-- let k = BIG in foldr k z xs
-- ==> let k = BIG in letrec go = \xs -> ...(k x).... in go xs
-- ==> let k = BIG in letrec go = \xs -> ...(BIG x).... in go xs
-- Don't stop now!
simpl_phase 0 ["main"] (max max_iter 3),
runWhen do_float_in CoreDoFloatInwards,
-- Run float-inwards immediately before the strictness analyser
-- Doing so pushes bindings nearer their use site and hence makes
-- them more likely to be strict. These bindings might only show
-- up after the inlining from simplification. Example in fulsom,
-- Csg.calc, where an arg of timesDouble thereby becomes strict.
runWhen call_arity $ CoreDoPasses
[ CoreDoCallArity
, simpl_phase 0 ["post-call-arity"] max_iter
],
runWhen strictness demand_analyser,
runWhen exitification CoreDoExitify,
-- See note [Placement of the exitification pass]
runWhen full_laziness $
CoreDoFloatOutwards FloatOutSwitches {
floatOutLambdas = floatLamArgs dflags,
floatOutConstants = True,
floatOutOverSatApps = True,
floatToTopLevelOnly = False },
-- nofib/spectral/hartel/wang doubles in speed if you
-- do full laziness late in the day. It only happens
-- after fusion and other stuff, so the early pass doesn't
-- catch it. For the record, the redex is
-- f_el22 (f_el21 r_midblock)
runWhen cse CoreCSE,
-- We want CSE to follow the final full-laziness pass, because it may
-- succeed in commoning up things floated out by full laziness.
-- CSE used to rely on the no-shadowing invariant, but it doesn't any more
runWhen do_float_in CoreDoFloatInwards,
maybe_rule_check (Phase 0),
-- Case-liberation for -O2. This should be after
-- strictness analysis and the simplification which follows it.
runWhen liberate_case (CoreDoPasses [
CoreLiberateCase,
simpl_phase 0 ["post-liberate-case"] max_iter
]), -- Run the simplifier after LiberateCase to vastly
-- reduce the possibility of shadowing
-- Reason: see Note [Shadowing] in SpecConstr.hs
runWhen spec_constr CoreDoSpecConstr,
maybe_rule_check (Phase 0),
-- Final clean-up simplification:
simpl_phase 0 ["final"] max_iter,
runWhen late_dmd_anal $ CoreDoPasses (
strictness_pass ++
[simpl_phase 0 ["post-late-ww"] max_iter]
),
-- Final run of the demand_analyser, ensures that one-shot thunks are
-- really really one-shot thunks. Only needed if the demand analyser
-- has run at all. See Note [Final Demand Analyser run] in DmdAnal
-- It is EXTREMELY IMPORTANT to run this pass, otherwise execution
-- can become /exponentially/ more expensive. See Trac #11731, #12996.
runWhen (strictness || late_dmd_anal) CoreDoStrictness,
maybe_rule_check (Phase 0)
]
-- Remove 'CoreDoNothing' and flatten 'CoreDoPasses' for clarity.
flatten_todos [] = []
flatten_todos (CoreDoNothing : rest) = flatten_todos rest
flatten_todos (CoreDoPasses passes : rest) =
flatten_todos passes ++ flatten_todos rest
flatten_todos (todo : rest) = todo : flatten_todos rest
-- Loading plugins
addPluginPasses :: [CoreToDo] -> CoreM [CoreToDo]
#if !defined(GHCI)
addPluginPasses builtin_passes
= do { dflags <- getDynFlags
; let pluginMods = pluginModNames dflags
; unless (null pluginMods) (pluginError pluginMods)
; return builtin_passes }
#else
addPluginPasses builtin_passes
= do { hsc_env <- getHscEnv
; named_plugins <- liftIO (loadPlugins hsc_env)
; foldM query_plug builtin_passes named_plugins }
where
query_plug todos (_, plug, options) = installCoreToDos plug options todos
#endif
{- Note [Inline in InitialPhase]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In GHC 8 and earlier we did not inline anything in the InitialPhase. But that is
confusing for users because when they say INLINE they expect the function to inline
right away.
So now we do inlining immediately, even in the InitialPhase, assuming that the
Id's Activation allows it.
This is a surprisingly big deal. Compiler performance improved a lot
when I made this change:
perf/compiler/T5837.run T5837 [stat too good] (normal)
perf/compiler/parsing001.run parsing001 [stat too good] (normal)
perf/compiler/T12234.run T12234 [stat too good] (optasm)
perf/compiler/T9020.run T9020 [stat too good] (optasm)
perf/compiler/T3064.run T3064 [stat too good] (normal)
perf/compiler/T9961.run T9961 [stat too good] (normal)
perf/compiler/T13056.run T13056 [stat too good] (optasm)
perf/compiler/T9872d.run T9872d [stat too good] (normal)
perf/compiler/T783.run T783 [stat too good] (normal)
perf/compiler/T12227.run T12227 [stat too good] (normal)
perf/should_run/lazy-bs-alloc.run lazy-bs-alloc [stat too good] (normal)
perf/compiler/T1969.run T1969 [stat too good] (normal)
perf/compiler/T9872a.run T9872a [stat too good] (normal)
perf/compiler/T9872c.run T9872c [stat too good] (normal)
perf/compiler/T9872b.run T9872b [stat too good] (normal)
perf/compiler/T9872d.run T9872d [stat too good] (normal)
Note [RULEs enabled in SimplGently]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
RULES are enabled when doing "gentle" simplification. Two reasons:
* We really want the class-op cancellation to happen:
op (df d1 d2) --> $cop3 d1 d2
because this breaks the mutual recursion between 'op' and 'df'
* I wanted the RULE
lift String ===> ...
to work in Template Haskell when simplifying
splices, so we get simpler code for literal strings
But watch out: list fusion can prevent floating. So use phase control
to switch off those rules until after floating.
************************************************************************
* *
The CoreToDo interpreter
* *
************************************************************************
-}
runCorePasses :: [CoreToDo] -> ModGuts -> CoreM ModGuts
runCorePasses passes guts
= foldM do_pass guts passes
where
do_pass guts CoreDoNothing = return guts
do_pass guts (CoreDoPasses ps) = runCorePasses ps guts
do_pass guts pass
= withTiming getDynFlags
(ppr pass <+> brackets (ppr mod))
(const ()) $ do
{ guts' <- lintAnnots (ppr pass) (doCorePass pass) guts
; endPass pass (mg_binds guts') (mg_rules guts')
; return guts' }
mod = mg_module guts
doCorePass :: CoreToDo -> ModGuts -> CoreM ModGuts
doCorePass pass@(CoreDoSimplify {}) = {-# SCC "Simplify" #-}
simplifyPgm pass
doCorePass CoreCSE = {-# SCC "CommonSubExpr" #-}
doPass cseProgram
doCorePass CoreLiberateCase = {-# SCC "LiberateCase" #-}
doPassD liberateCase
doCorePass CoreDoFloatInwards = {-# SCC "FloatInwards" #-}
floatInwards
doCorePass (CoreDoFloatOutwards f) = {-# SCC "FloatOutwards" #-}
doPassDUM (floatOutwards f)
doCorePass CoreDoStaticArgs = {-# SCC "StaticArgs" #-}
doPassU doStaticArgs
doCorePass CoreDoCallArity = {-# SCC "CallArity" #-}
doPassD callArityAnalProgram
doCorePass CoreDoExitify = {-# SCC "Exitify" #-}
doPass exitifyProgram
doCorePass CoreDoStrictness = {-# SCC "NewStranal" #-}
doPassDFM dmdAnalProgram
doCorePass CoreDoWorkerWrapper = {-# SCC "WorkWrap" #-}
doPassDFU wwTopBinds
doCorePass CoreDoSpecialising = {-# SCC "Specialise" #-}
specProgram
doCorePass CoreDoSpecConstr = {-# SCC "SpecConstr" #-}
specConstrProgram
doCorePass CoreDoVectorisation = {-# SCC "Vectorise" #-}
vectorise
doCorePass CoreDoPrintCore = observe printCore
doCorePass (CoreDoRuleCheck phase pat) = ruleCheckPass phase pat
doCorePass CoreDoNothing = return
doCorePass (CoreDoPasses passes) = runCorePasses passes
#if defined(GHCI)
doCorePass (CoreDoPluginPass _ pass) = {-# SCC "Plugin" #-} pass
#endif
doCorePass pass = pprPanic "doCorePass" (ppr pass)
{-
************************************************************************
* *
\subsection{Core pass combinators}
* *
************************************************************************
-}
printCore :: DynFlags -> CoreProgram -> IO ()
printCore dflags binds
= Err.dumpIfSet dflags True "Print Core" (pprCoreBindings binds)
ruleCheckPass :: CompilerPhase -> String -> ModGuts -> CoreM ModGuts
ruleCheckPass current_phase pat guts =
withTiming getDynFlags
(text "RuleCheck"<+>brackets (ppr $ mg_module guts))
(const ()) $ do
{ rb <- getRuleBase
; dflags <- getDynFlags
; vis_orphs <- getVisibleOrphanMods
; liftIO $ putLogMsg dflags NoReason Err.SevDump noSrcSpan
(defaultDumpStyle dflags)
(ruleCheckProgram current_phase pat
(RuleEnv rb vis_orphs) (mg_binds guts))
; return guts }
doPassDUM :: (DynFlags -> UniqSupply -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDUM do_pass = doPassM $ \binds -> do
dflags <- getDynFlags
us <- getUniqueSupplyM
liftIO $ do_pass dflags us binds
doPassDM :: (DynFlags -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDM do_pass = doPassDUM (\dflags -> const (do_pass dflags))
doPassD :: (DynFlags -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassD do_pass = doPassDM (\dflags -> return . do_pass dflags)
doPassDU :: (DynFlags -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDU do_pass = doPassDUM (\dflags us -> return . do_pass dflags us)
doPassU :: (UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassU do_pass = doPassDU (const do_pass)
doPassDFM :: (DynFlags -> FamInstEnvs -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDFM do_pass guts = do
dflags <- getDynFlags
p_fam_env <- getPackageFamInstEnv
let fam_envs = (p_fam_env, mg_fam_inst_env guts)
doPassM (liftIO . do_pass dflags fam_envs) guts
doPassDFU :: (DynFlags -> FamInstEnvs -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPassDFU do_pass guts = do
dflags <- getDynFlags
us <- getUniqueSupplyM
p_fam_env <- getPackageFamInstEnv
let fam_envs = (p_fam_env, mg_fam_inst_env guts)
doPass (do_pass dflags fam_envs us) guts
-- Most passes return no stats and don't change rules: these combinators
-- let us lift them to the full blown ModGuts+CoreM world
doPassM :: Monad m => (CoreProgram -> m CoreProgram) -> ModGuts -> m ModGuts
doPassM bind_f guts = do
binds' <- bind_f (mg_binds guts)
return (guts { mg_binds = binds' })
doPass :: (CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts
doPass bind_f guts = return $ guts { mg_binds = bind_f (mg_binds guts) }
-- Observer passes just peek; don't modify the bindings at all
observe :: (DynFlags -> CoreProgram -> IO a) -> ModGuts -> CoreM ModGuts
observe do_pass = doPassM $ \binds -> do
dflags <- getDynFlags
_ <- liftIO $ do_pass dflags binds
return binds
{-
************************************************************************
* *
Gentle simplification
* *
************************************************************************
-}
simplifyExpr :: DynFlags -- includes spec of what core-to-core passes to do
-> CoreExpr
-> IO CoreExpr
-- simplifyExpr is called by the driver to simplify an
-- expression typed in at the interactive prompt
--
-- Also used by Template Haskell
simplifyExpr dflags expr
= withTiming (pure dflags) (text "Simplify [expr]") (const ()) $
do {
; us <- mkSplitUniqSupply 's'
; let sz = exprSize expr
; (expr', counts) <- initSmpl dflags emptyRuleEnv
emptyFamInstEnvs us sz
(simplExprGently (simplEnvForGHCi dflags) expr)
; Err.dumpIfSet dflags (dopt Opt_D_dump_simpl_stats dflags)
"Simplifier statistics" (pprSimplCount counts)
; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl "Simplified expression"
(pprCoreExpr expr')
; return expr'
}
simplExprGently :: SimplEnv -> CoreExpr -> SimplM CoreExpr
-- Simplifies an expression
-- does occurrence analysis, then simplification
-- and repeats (twice currently) because one pass
-- alone leaves tons of crud.
-- Used (a) for user expressions typed in at the interactive prompt
-- (b) the LHS and RHS of a RULE
-- (c) Template Haskell splices
--
-- The name 'Gently' suggests that the SimplMode is SimplGently,
-- and in fact that is so.... but the 'Gently' in simplExprGently doesn't
-- enforce that; it just simplifies the expression twice
-- It's important that simplExprGently does eta reduction; see
-- Note [Simplifying the left-hand side of a RULE] above. The
-- simplifier does indeed do eta reduction (it's in Simplify.completeLam)
-- but only if -O is on.
simplExprGently env expr = do
expr1 <- simplExpr env (occurAnalyseExpr expr)
simplExpr env (occurAnalyseExpr expr1)
{-
************************************************************************
* *
\subsection{The driver for the simplifier}
* *
************************************************************************
-}
simplifyPgm :: CoreToDo -> ModGuts -> CoreM ModGuts
simplifyPgm pass guts
= do { hsc_env <- getHscEnv
; us <- getUniqueSupplyM
; rb <- getRuleBase
; liftIOWithCount $
simplifyPgmIO pass hsc_env us rb guts }
simplifyPgmIO :: CoreToDo
-> HscEnv
-> UniqSupply
-> RuleBase
-> ModGuts
-> IO (SimplCount, ModGuts) -- New bindings
simplifyPgmIO pass@(CoreDoSimplify max_iterations mode)
hsc_env us hpt_rule_base
guts@(ModGuts { mg_module = this_mod
, mg_rdr_env = rdr_env
, mg_deps = deps
, mg_binds = binds, mg_rules = rules
, mg_fam_inst_env = fam_inst_env })
= do { (termination_msg, it_count, counts_out, guts')
<- do_iteration us 1 [] binds rules
; Err.dumpIfSet dflags (dopt Opt_D_verbose_core2core dflags &&
dopt Opt_D_dump_simpl_stats dflags)
"Simplifier statistics for following pass"
(vcat [text termination_msg <+> text "after" <+> ppr it_count
<+> text "iterations",
blankLine,
pprSimplCount counts_out])
; return (counts_out, guts')
}
where
dflags = hsc_dflags hsc_env
print_unqual = mkPrintUnqualified dflags rdr_env
simpl_env = mkSimplEnv mode
active_rule = activeRule mode
active_unf = activeUnfolding mode
do_iteration :: UniqSupply
-> Int -- Counts iterations
-> [SimplCount] -- Counts from earlier iterations, reversed
-> CoreProgram -- Bindings in
-> [CoreRule] -- and orphan rules
-> IO (String, Int, SimplCount, ModGuts)
do_iteration us iteration_no counts_so_far binds rules
-- iteration_no is the number of the iteration we are
-- about to begin, with '1' for the first
| iteration_no > max_iterations -- Stop if we've run out of iterations
= WARN( debugIsOn && (max_iterations > 2)
, hang (text "Simplifier bailing out after" <+> int max_iterations
<+> text "iterations"
<+> (brackets $ hsep $ punctuate comma $
map (int . simplCountN) (reverse counts_so_far)))
2 (text "Size =" <+> ppr (coreBindsStats binds)))
-- Subtract 1 from iteration_no to get the
-- number of iterations we actually completed
return ( "Simplifier baled out", iteration_no - 1
, totalise counts_so_far
, guts { mg_binds = binds, mg_rules = rules } )
-- Try and force thunks off the binds; significantly reduces
-- space usage, especially with -O. JRS, 000620.
| let sz = coreBindsSize binds
, () <- sz `seq` () -- Force it
= do {
-- Occurrence analysis
let { -- Note [Vectorisation declarations and occurrences]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- During the 'InitialPhase' (i.e., before vectorisation), we need to make sure
-- that the right-hand sides of vectorisation declarations are taken into
-- account during occurrence analysis. After the 'InitialPhase', we need to ensure
-- that the binders representing variable vectorisation declarations are kept alive.
-- (In contrast to automatically vectorised variables, their unvectorised versions
-- don't depend on them.)
vectVars = mkVarSet $
catMaybes [ fmap snd $ lookupDVarEnv (vectInfoVar (mg_vect_info guts)) bndr
| Vect bndr _ <- mg_vect_decls guts]
++
catMaybes [ fmap snd $ lookupDVarEnv (vectInfoVar (mg_vect_info guts)) bndr
| bndr <- bindersOfBinds binds]
-- FIXME: This second comprehensions is only needed as long as we
-- have vectorised bindings where we get "Could NOT call
-- vectorised from original version".
; (maybeVects, maybeVectVars)
= case sm_phase mode of
InitialPhase -> (mg_vect_decls guts, vectVars)
_ -> ([], vectVars)
; tagged_binds = {-# SCC "OccAnal" #-}
occurAnalysePgm this_mod active_unf active_rule rules
maybeVects maybeVectVars binds
} ;
Err.dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis"
(pprCoreBindings tagged_binds);
-- Get any new rules, and extend the rule base
-- See Note [Overall plumbing for rules] in Rules.hs
-- We need to do this regularly, because simplification can
-- poke on IdInfo thunks, which in turn brings in new rules
-- behind the scenes. Otherwise there's a danger we'll simply
-- miss the rules for Ids hidden inside imported inlinings
eps <- hscEPS hsc_env ;
let { rule_base1 = unionRuleBase hpt_rule_base (eps_rule_base eps)
; rule_base2 = extendRuleBaseList rule_base1 rules
; fam_envs = (eps_fam_inst_env eps, fam_inst_env)
; vis_orphs = this_mod : dep_orphs deps } ;
-- Simplify the program
((binds1, rules1), counts1) <-
initSmpl dflags (mkRuleEnv rule_base2 vis_orphs) fam_envs us1 sz $
do { (floats, env1) <- {-# SCC "SimplTopBinds" #-}
simplTopBinds simpl_env tagged_binds
-- Apply the substitution to rules defined in this module
-- for imported Ids. Eg RULE map my_f = blah
-- If we have a substitution my_f :-> other_f, we'd better
-- apply it to the rule to, or it'll never match
; rules1 <- simplRules env1 Nothing rules
; return (getTopFloatBinds floats, rules1) } ;
-- Stop if nothing happened; don't dump output
if isZeroSimplCount counts1 then
return ( "Simplifier reached fixed point", iteration_no
, totalise (counts1 : counts_so_far) -- Include "free" ticks
, guts { mg_binds = binds1, mg_rules = rules1 } )
else do {
-- Short out indirections
-- We do this *after* at least one run of the simplifier
-- because indirection-shorting uses the export flag on *occurrences*
-- and that isn't guaranteed to be ok until after the first run propagates
-- stuff from the binding site to its occurrences
--
-- ToDo: alas, this means that indirection-shorting does not happen at all
-- if the simplifier does nothing (not common, I know, but unsavoury)
let { binds2 = {-# SCC "ZapInd" #-} shortOutIndirections binds1 } ;
-- Dump the result of this iteration
dump_end_iteration dflags print_unqual iteration_no counts1 binds2 rules1 ;
lintPassResult hsc_env pass binds2 ;
-- Loop
do_iteration us2 (iteration_no + 1) (counts1:counts_so_far) binds2 rules1
} }
| otherwise = panic "do_iteration"
where
(us1, us2) = splitUniqSupply us
-- Remember the counts_so_far are reversed
totalise :: [SimplCount] -> SimplCount
totalise = foldr (\c acc -> acc `plusSimplCount` c)
(zeroSimplCount dflags)
simplifyPgmIO _ _ _ _ _ = panic "simplifyPgmIO"
-------------------
dump_end_iteration :: DynFlags -> PrintUnqualified -> Int
-> SimplCount -> CoreProgram -> [CoreRule] -> IO ()
dump_end_iteration dflags print_unqual iteration_no counts binds rules
= dumpPassResult dflags print_unqual mb_flag hdr pp_counts binds rules
where
mb_flag | dopt Opt_D_dump_simpl_iterations dflags = Just Opt_D_dump_simpl_iterations
| otherwise = Nothing
-- Show details if Opt_D_dump_simpl_iterations is on
hdr = text "Simplifier iteration=" <> int iteration_no
pp_counts = vcat [ text "---- Simplifier counts for" <+> hdr
, pprSimplCount counts
, text "---- End of simplifier counts for" <+> hdr ]
{-
************************************************************************
* *
Shorting out indirections
* *
************************************************************************
If we have this:
x_local = <expression>
...bindings...
x_exported = x_local
where x_exported is exported, and x_local is not, then we replace it with this:
x_exported = <expression>
x_local = x_exported
...bindings...
Without this we never get rid of the x_exported = x_local thing. This
save a gratuitous jump (from \tr{x_exported} to \tr{x_local}), and
makes strictness information propagate better. This used to happen in
the final phase, but it's tidier to do it here.
Note [Transferring IdInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to propagage any useful IdInfo on x_local to x_exported.
STRICTNESS: if we have done strictness analysis, we want the strictness info on
x_local to transfer to x_exported. Hence the copyIdInfo call.
RULES: we want to *add* any RULES for x_local to x_exported.
Note [Messing up the exported Id's RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must be careful about discarding (obviously) or even merging the
RULES on the exported Id. The example that went bad on me at one stage
was this one:
iterate :: (a -> a) -> a -> [a]
[Exported]
iterate = iterateList
iterateFB c f x = x `c` iterateFB c f (f x)
iterateList f x = x : iterateList f (f x)
[Not exported]
{-# RULES
"iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x)
"iterateFB" iterateFB (:) = iterateList
#-}
This got shorted out to:
iterateList :: (a -> a) -> a -> [a]
iterateList = iterate
iterateFB c f x = x `c` iterateFB c f (f x)
iterate f x = x : iterate f (f x)
{-# RULES
"iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x)
"iterateFB" iterateFB (:) = iterate
#-}
And now we get an infinite loop in the rule system
iterate f x -> build (\cn -> iterateFB c f x)
-> iterateFB (:) f x
-> iterate f x
Old "solution":
use rule switching-off pragmas to get rid
of iterateList in the first place
But in principle the user *might* want rules that only apply to the Id
he says. And inline pragmas are similar
{-# NOINLINE f #-}
f = local
local = <stuff>
Then we do not want to get rid of the NOINLINE.
Hence hasShortableIdinfo.
Note [Rules and indirection-zapping]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Problem: what if x_exported has a RULE that mentions something in ...bindings...?
Then the things mentioned can be out of scope! Solution
a) Make sure that in this pass the usage-info from x_exported is
available for ...bindings...
b) If there are any such RULES, rec-ify the entire top-level.
It'll get sorted out next time round
Other remarks
~~~~~~~~~~~~~
If more than one exported thing is equal to a local thing (i.e., the
local thing really is shared), then we do one only:
\begin{verbatim}
x_local = ....
x_exported1 = x_local
x_exported2 = x_local
==>
x_exported1 = ....
x_exported2 = x_exported1
\end{verbatim}
We rely on prior eta reduction to simplify things like
\begin{verbatim}
x_exported = /\ tyvars -> x_local tyvars
==>
x_exported = x_local
\end{verbatim}
Hence,there's a possibility of leaving unchanged something like this:
\begin{verbatim}
x_local = ....
x_exported1 = x_local Int
\end{verbatim}
By the time we've thrown away the types in STG land this
could be eliminated. But I don't think it's very common
and it's dangerous to do this fiddling in STG land
because we might elminate a binding that's mentioned in the
unfolding for something.
Note [Indirection zapping and ticks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unfortunately this is another place where we need a special case for
ticks. The following happens quite regularly:
x_local = <expression>
x_exported = tick<x> x_local
Which we want to become:
x_exported = tick<x> <expression>
As it makes no sense to keep the tick and the expression on separate
bindings. Note however that that this might increase the ticks scoping
over the execution of x_local, so we can only do this for floatable
ticks. More often than not, other references will be unfoldings of
x_exported, and therefore carry the tick anyway.
-}
type IndEnv = IdEnv (Id, [Tickish Var]) -- Maps local_id -> exported_id, ticks
shortOutIndirections :: CoreProgram -> CoreProgram
shortOutIndirections binds
| isEmptyVarEnv ind_env = binds
| no_need_to_flatten = binds' -- See Note [Rules and indirect-zapping]
| otherwise = [Rec (flattenBinds binds')] -- for this no_need_to_flatten stuff
where
ind_env = makeIndEnv binds
-- These exported Ids are the subjects of the indirection-elimination
exp_ids = map fst $ nonDetEltsUFM ind_env
-- It's OK to use nonDetEltsUFM here because we forget the ordering
-- by immediately converting to a set or check if all the elements
-- satisfy a predicate.
exp_id_set = mkVarSet exp_ids
no_need_to_flatten = all (null . ruleInfoRules . idSpecialisation) exp_ids
binds' = concatMap zap binds
zap (NonRec bndr rhs) = [NonRec b r | (b,r) <- zapPair (bndr,rhs)]
zap (Rec pairs) = [Rec (concatMap zapPair pairs)]
zapPair (bndr, rhs)
| bndr `elemVarSet` exp_id_set = []
| Just (exp_id, ticks) <- lookupVarEnv ind_env bndr
= [(transferIdInfo exp_id bndr,
mkTicks ticks rhs),
(bndr, Var exp_id)]
| otherwise = [(bndr,rhs)]
makeIndEnv :: [CoreBind] -> IndEnv
makeIndEnv binds
= foldr add_bind emptyVarEnv binds
where
add_bind :: CoreBind -> IndEnv -> IndEnv
add_bind (NonRec exported_id rhs) env = add_pair (exported_id, rhs) env
add_bind (Rec pairs) env = foldr add_pair env pairs
add_pair :: (Id,CoreExpr) -> IndEnv -> IndEnv
add_pair (exported_id, exported) env
| (ticks, Var local_id) <- stripTicksTop tickishFloatable exported
, shortMeOut env exported_id local_id
= extendVarEnv env local_id (exported_id, ticks)
add_pair _ env = env
-----------------
shortMeOut :: IndEnv -> Id -> Id -> Bool
shortMeOut ind_env exported_id local_id
-- The if-then-else stuff is just so I can get a pprTrace to see
-- how often I don't get shorting out because of IdInfo stuff
= if isExportedId exported_id && -- Only if this is exported
isLocalId local_id && -- Only if this one is defined in this
-- module, so that we *can* change its
-- binding to be the exported thing!
not (isExportedId local_id) && -- Only if this one is not itself exported,
-- since the transformation will nuke it
not (local_id `elemVarEnv` ind_env) -- Only if not already substituted for
then
if hasShortableIdInfo exported_id
then True -- See Note [Messing up the exported Id's IdInfo]
else WARN( True, text "Not shorting out:" <+> ppr exported_id )
False
else
False
-----------------
hasShortableIdInfo :: Id -> Bool
-- True if there is no user-attached IdInfo on exported_id,
-- so we can safely discard it
-- See Note [Messing up the exported Id's IdInfo]
hasShortableIdInfo id
= isEmptyRuleInfo (ruleInfo info)
&& isDefaultInlinePragma (inlinePragInfo info)
&& not (isStableUnfolding (unfoldingInfo info))
where
info = idInfo id
-----------------
transferIdInfo :: Id -> Id -> Id
-- See Note [Transferring IdInfo]
-- If we have
-- lcl_id = e; exp_id = lcl_id
-- and lcl_id has useful IdInfo, we don't want to discard it by going
-- gbl_id = e; lcl_id = gbl_id
-- Instead, transfer IdInfo from lcl_id to exp_id
-- Overwriting, rather than merging, seems to work ok.
transferIdInfo exported_id local_id
= modifyIdInfo transfer exported_id
where
local_info = idInfo local_id
transfer exp_info = exp_info `setStrictnessInfo` strictnessInfo local_info
`setUnfoldingInfo` unfoldingInfo local_info
`setInlinePragInfo` inlinePragInfo local_info
`setRuleInfo` addRuleInfo (ruleInfo exp_info) new_info
new_info = setRuleInfoHead (idName exported_id)
(ruleInfo local_info)
-- Remember to set the function-name field of the
-- rules as we transfer them from one function to another
|
shlevy/ghc
|
compiler/simplCore/SimplCore.hs
|
Haskell
|
bsd-3-clause
| 45,493
|
module System.Build.Access.Top where
class Top r where
top ::
Maybe String
-> r
-> r
getTop ::
r
-> Maybe String
|
tonymorris/lastik
|
System/Build/Access/Top.hs
|
Haskell
|
bsd-3-clause
| 144
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Typewriter.Data.List where
import Data.Typewriter.Core
type family Head xs :: *
type instance Head (h :*: t) = h
type family Tail xs :: *
type instance Tail (h :*: t) = t
type family Length xs :: *
type instance Length Unit = Z
type instance Length (h :*: t) = S (Length t)
type family Concat xs ys :: *
type instance Concat Unit ys = ys
type instance Concat (x :*: xs) ys = x :*: Concat xs ys
type family Map (f :: * -> *) xs :: *
type instance Map f Unit = Unit
type instance Map f (x :*: xs) = f x :*: Map f xs
type family FoldR (f :: * -> * -> *) z xs :: *
type instance FoldR f z Unit = z
type instance FoldR f z (x :*: xs) = f x (FoldR f z xs)
class List l where
tConcat :: (List a) => l -> a -> Concat l a
tLength :: l -> Length l
tMap :: (forall x. x -> f x) -> l -> Map f l
tFoldR :: (forall x y. x -> y -> f x y) -> z -> l -> FoldR f z l
instance List Unit where
tConcat Unit ys = ys
tLength Unit = Zero
tMap _ Unit = Unit
tFoldR _ z Unit = z
instance (List t) => List (h :*: t) where
tConcat (x :*: xs) ys = x :*: tConcat xs ys
tLength (_ :*: xs) = Succ (tLength xs)
tMap f (x :*: xs) = f x :*: tMap f xs
tFoldR f z (x :*: xs) = f x (tFoldR f z xs)
type family Reverse xs :: *
type instance Reverse Unit = Unit
type instance Reverse (x :*: xs) = Concat (Reverse xs) (x :*: Unit)
instance ListReverse Unit where
tReverse Unit = Unit
class (List l, List (Reverse l)) => ListReverse l where
tReverse :: l -> Reverse l
type family ZipWith (f :: * -> * -> *) xs ys :: *
type instance ZipWith f xs Unit = Unit
type instance ZipWith f Unit ys = Unit
type instance ZipWith f (x :*: xs) (y :*: ys) = f x y :*: ZipWith f xs ys
class (List xs, List ys) => Zip xs ys where
tZipWith :: (forall x y. x -> y -> f x y) -> xs -> ys -> ZipWith f xs ys
instance (Zip xs ys) => Zip (x :*: xs) (y :*: ys) where
tZipWith f (x :*: xs) (y :*: ys) = f x y :*: tZipWith f xs ys
instance (List xs) => Zip (x :*: xs) Unit where
tZipWith _ _ Unit = Unit
instance (List ys) => Zip Unit (y :*: ys) where
tZipWith _ Unit _ = Unit
instance Zip Unit Unit where
tZipWith _ Unit Unit = Unit
|
isomorphism/typewriter
|
Data/Typewriter/Data/List.hs
|
Haskell
|
bsd-3-clause
| 2,383
|
module Events.LeaveChannelConfirm where
import Prelude ()
import Prelude.MH
import qualified Graphics.Vty as Vty
import State.Channels
import Types
onEventLeaveChannelConfirm :: Vty.Event -> MH ()
onEventLeaveChannelConfirm (Vty.EvKey k []) = do
case k of
Vty.KChar c | c `elem` ("yY"::String) ->
leaveCurrentChannel
_ -> return ()
setMode Main
onEventLeaveChannelConfirm _ = return ()
|
aisamanra/matterhorn
|
src/Events/LeaveChannelConfirm.hs
|
Haskell
|
bsd-3-clause
| 467
|
{-# LANGUAGE OverloadedStrings #-}
module Data.GraphQL.AST.Transform where
import Control.Applicative (empty)
import Control.Monad ((<=<))
import Data.Bifunctor (first)
import Data.Either (partitionEithers)
import Data.Foldable (fold, foldMap)
import qualified Data.List.NonEmpty as NonEmpty
import Data.Monoid (Alt(Alt,getAlt), (<>))
import Data.Text (Text)
import qualified Data.GraphQL.AST as Full
import qualified Data.GraphQL.AST.Core as Core
import qualified Data.GraphQL.Schema as Schema
type Name = Text
-- | Replaces a fragment name by a list of 'Field'. If the name doesn't match an
-- empty list is returned.
type Fragmenter = Name -> [Core.Field]
-- TODO: Replace Maybe by MonadThrow with CustomError
document :: Schema.Subs -> Full.Document -> Maybe Core.Document
document subs doc = operations subs fr ops
where
(fr, ops) = first foldFrags
. partitionEithers
. NonEmpty.toList
$ defrag subs
<$> doc
foldFrags :: [Fragmenter] -> Fragmenter
foldFrags fs n = getAlt $ foldMap (Alt . ($ n)) fs
-- * Operation
-- TODO: Replace Maybe by MonadThrow CustomError
operations
:: Schema.Subs
-> Fragmenter
-> [Full.OperationDefinition]
-> Maybe Core.Document
operations subs fr = NonEmpty.nonEmpty <=< traverse (operation subs fr)
-- TODO: Replace Maybe by MonadThrow CustomError
operation
:: Schema.Subs
-> Fragmenter
-> Full.OperationDefinition
-> Maybe Core.Operation
operation subs fr (Full.OperationSelectionSet sels) =
operation subs fr $ Full.OperationDefinition Full.Query empty empty empty sels
-- TODO: Validate Variable definitions with substituter
operation subs fr (Full.OperationDefinition ot _n _vars _dirs sels) =
case ot of
Full.Query -> Core.Query <$> node
Full.Mutation -> Core.Mutation <$> node
where
node = traverse (hush . selection subs fr) sels
selection
:: Schema.Subs
-> Fragmenter
-> Full.Selection
-> Either [Core.Field] Core.Field
selection subs fr (Full.SelectionField fld) =
Right $ field subs fr fld
selection _ fr (Full.SelectionFragmentSpread (Full.FragmentSpread n _dirs)) =
Left $ fr n
selection _ _ (Full.SelectionInlineFragment _) =
error "Inline fragments not supported yet"
-- * Fragment replacement
-- | Extract Fragments into a single Fragmenter function and a Operation
-- Definition.
defrag
:: Schema.Subs
-> Full.Definition
-> Either Fragmenter Full.OperationDefinition
defrag _ (Full.DefinitionOperation op) =
Right op
defrag subs (Full.DefinitionFragment fragDef) =
Left $ fragmentDefinition subs fragDef
fragmentDefinition :: Schema.Subs -> Full.FragmentDefinition -> Fragmenter
fragmentDefinition subs (Full.FragmentDefinition name _tc _dirs sels) name' =
-- TODO: Support fragments within fragments. Fold instead of map.
if name == name'
then either id pure =<< NonEmpty.toList (selection subs mempty <$> sels)
else empty
field :: Schema.Subs -> Fragmenter -> Full.Field -> Core.Field
field subs fr (Full.Field a n args _dirs sels) =
Core.Field a n (fold $ argument subs `traverse` args) (foldr go empty sels)
where
go :: Full.Selection -> [Core.Field] -> [Core.Field]
go (Full.SelectionFragmentSpread (Full.FragmentSpread name _dirs)) = (fr name <>)
go sel = (either id pure (selection subs fr sel) <>)
argument :: Schema.Subs -> Full.Argument -> Maybe Core.Argument
argument subs (Full.Argument n v) = Core.Argument n <$> value subs v
value :: Schema.Subs -> Full.Value -> Maybe Core.Value
value subs (Full.ValueVariable n) = subs n
value _ (Full.ValueInt i) = pure $ Core.ValueInt i
value _ (Full.ValueFloat f) = pure $ Core.ValueFloat f
value _ (Full.ValueString x) = pure $ Core.ValueString x
value _ (Full.ValueBoolean b) = pure $ Core.ValueBoolean b
value _ Full.ValueNull = pure Core.ValueNull
value _ (Full.ValueEnum e) = pure $ Core.ValueEnum e
value subs (Full.ValueList l) =
Core.ValueList <$> traverse (value subs) l
value subs (Full.ValueObject o) =
Core.ValueObject <$> traverse (objectField subs) o
objectField :: Schema.Subs -> Full.ObjectField -> Maybe Core.ObjectField
objectField subs (Full.ObjectField n v) = Core.ObjectField n <$> value subs v
hush :: Either a b -> Maybe b
hush = either (const Nothing) Just
|
jdnavarro/graphql-haskell
|
Data/GraphQL/AST/Transform.hs
|
Haskell
|
bsd-3-clause
| 4,328
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett 2011-2015
-- License : BSD3
--
-- Maintainer : ekmett@gmail.com
-- Stability : experimental
-- Portability : non-portable
--
-- Results and Parse Errors
-----------------------------------------------------------------------------
module Text.Trifecta.Result
(
-- * Parse Results
Result(..)
, AsResult(..)
, _Success
, _Failure
-- * Parsing Errors
, Err(..), HasErr(..), Errable(..)
, ErrInfo(..)
, explain
, failed
) where
import Control.Applicative as Alternative
import Control.Lens hiding (snoc, cons)
import Control.Monad (guard)
import Data.Foldable
import Data.Maybe (fromMaybe, isJust)
import qualified Data.List as List
import Data.Semigroup
import Data.Set as Set hiding (empty, toList)
import Text.PrettyPrint.ANSI.Leijen as Pretty hiding (line, (<>), (<$>), empty)
import Text.Trifecta.Instances ()
import Text.Trifecta.Rendering
import Text.Trifecta.Delta as Delta
data ErrInfo = ErrInfo
{ _errDoc :: Doc
, _errDeltas :: [Delta]
} deriving (Show)
-- | This is used to report an error. What went wrong, some supplemental docs and a set of things expected
-- at the current location. This does not, however, include the actual location.
data Err = Err
{ _reason :: Maybe Doc
, _footnotes :: [Doc]
, _expected :: Set String
, _finalDeltas :: [Delta]
, _ignoredErr :: Maybe Err
}
makeClassy ''Err
instance Semigroup Err where
Err md mds mes delta1 ig1 <> Err nd nds nes delta2 ig2
= Err (nd <|> md) (if isJust nd then nds else if isJust md then mds else nds ++ mds) (mes <> nes) (delta1 <> delta2) (ig1 <> ig2)
{-# INLINE (<>) #-}
instance Monoid Err where
mempty = Err Nothing [] mempty mempty Nothing
{-# INLINE mempty #-}
mappend = (<>)
{-# INLINE mappend #-}
-- | Generate a simple 'Err' word-wrapping the supplied message.
failed :: String -> Err
failed m = Err (Just (fillSep (pretty <$> words m))) [] mempty mempty Nothing
{-# INLINE failed #-}
-- | Convert a location and an 'Err' into a 'Doc'
explain :: Rendering -> Err -> Doc
explain r (Err mm as es _ _)
| Set.null es = report (withEx mempty)
| isJust mm = report $ withEx $ Pretty.char ',' <+> expecting
| otherwise = report expecting
where
now = spaceHack $ toList es
spaceHack [""] = ["space"]
spaceHack xs = List.filter (/= "") xs
withEx x = fromMaybe (fillSep $ text <$> words "unspecified error") mm <> x
expecting = text "expected:" <+> fillSep (punctuate (Pretty.char ',') (text <$> now))
report txt = vsep $ [pretty (delta r) <> Pretty.char ':' <+> red (text "error") <> Pretty.char ':' <+> nest 4 txt]
<|> pretty r <$ guard (not (nullRendering r))
<|> as
class Errable m where
raiseErr :: Err -> m a
instance Monoid ErrInfo where
mempty = ErrInfo mempty mempty
mappend (ErrInfo xs d1) (ErrInfo ys d2) = ErrInfo (vsep [xs, ys]) (max d1 d2)
-- | The result of parsing. Either we succeeded or something went wrong.
data Result a
= Success a
| Failure ErrInfo
deriving (Show,Functor,Foldable,Traversable)
-- | A 'Prism' that lets you embed or retrieve a 'Result' in a potentially larger type.
class AsResult s t a b | s -> a, t -> b, s b -> t, t a -> s where
_Result :: Prism s t (Result a) (Result b)
instance AsResult (Result a) (Result b) a b where
_Result = id
{-# INLINE _Result #-}
-- | The 'Prism' for the 'Success' constructor of 'Result'
_Success :: AsResult s t a b => Prism s t a b
_Success = _Result . dimap seta (either id id) . right' . rmap (fmap Success) where
seta (Success a) = Right a
seta (Failure e) = Left (pure (Failure e))
{-# INLINE _Success #-}
-- | The 'Prism' for the 'Failure' constructor of 'Result'
_Failure :: AsResult s s a a => Prism' s ErrInfo
_Failure = _Result . dimap seta (either id id) . right' . rmap (fmap Failure) where
seta (Failure e) = Right e
seta (Success a) = Left (pure (Success a))
{-# INLINE _Failure #-}
instance Show a => Pretty (Result a) where
pretty (Success a) = pretty (show a)
pretty (Failure xs) = pretty . _errDoc $ xs
instance Applicative Result where
pure = Success
{-# INLINE pure #-}
Success f <*> Success a = Success (f a)
Success _ <*> Failure y = Failure y
Failure x <*> Success _ = Failure x
Failure x <*> Failure y = Failure $ ErrInfo (vsep [_errDoc x, _errDoc y]) (_errDeltas x <> _errDeltas y)
{-# INLINE (<*>) #-}
instance Alternative Result where
Failure x <|> Failure y =
Failure $ ErrInfo (vsep [_errDoc x, _errDoc y]) (_errDeltas x <> _errDeltas y)
Success a <|> Success _ = Success a
Success a <|> Failure _ = Success a
Failure _ <|> Success a = Success a
{-# INLINE (<|>) #-}
empty = Failure mempty
{-# INLINE empty #-}
|
mikeplus64/trifecta
|
src/Text/Trifecta/Result.hs
|
Haskell
|
bsd-3-clause
| 5,198
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.FourTwoTwoPixels
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/EXT/422_pixels.txt EXT_422_pixels> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.FourTwoTwoPixels (
-- * Enums
gl_422_AVERAGE_EXT,
gl_422_EXT,
gl_422_REV_AVERAGE_EXT,
gl_422_REV_EXT
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/EXT/FourTwoTwoPixels.hs
|
Haskell
|
bsd-3-clause
| 707
|
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
module Main where
import Control.Monad (liftM, when)
import Data.Bits
import Data.Char
import Data.List (foldl', nub)
import Numeric
import System.Console.CmdArgs.Implicit
import System.IO
import System.Directory
import System.Exit
data Opts = Opts
{ hex :: [String]
, dec :: [String]
, oct :: [String]
, bin :: [String]
, file_hex :: [FilePath]
, file_dec :: [FilePath]
, file_oct :: [FilePath]
, file_bin :: [FilePath]
, stdin_hex :: Bool
, out_hex :: Bool
, out_dec :: Bool
, out_oct :: Bool
, out_bin :: Bool
} deriving (Data, Typeable, Show, Eq)
progOpts :: Opts
progOpts = Opts
{ hex = [] &= name "x" &= typ "HEXADECIMAL" &= help "Additional hashes in hexadecimal to XOR into (use this flag once for each additional hash; also, do not prefix the hex with `0x'; e.g., use `f3' instead of `0xf3'). Leading zeroes are ignored; trailing non-hex characters (as well as non-leading-hex strings) are also ignored."
, dec = [] &= typ "DECIMAL" &= help "Like --hex, but in decimal (0-9)."
, oct = [] &= name "o" &= typ "OCTAL" &= help "Like --hex, but in octal (0-7)."
, bin = [] &= typ "BINARY" &= help "Like --hex, but in binary (0s and 1s)."
, file_hex = [] &= name "X" &= typFile &= help "Read hex hashes from a file; the expected format of the file is the output of the sha1sum(1) program. You can use this flag multiple times for multiple files."
, file_dec = [] &= name "D" &= typFile &= help "Like --file-hex, but read in decimal values."
, file_oct = [] &= name "O" &= typFile &= help "Like --file-hex, but read in octal values."
, file_bin = [] &= name "B" &= typFile &= help "Like --file-hex, but read in binary values."
, stdin_hex = False &= help "Enable reading from STDIN. Only hexadecimal values (sha1sum(1) format) are read in with this option. If no input files are specified with --file-{hex,dec,bin}, and no other hashes are specified with --{hex,dec,bin}, then this flag is automatically turned on. In other words, if no arguments are specified, then panxor expects input from STDIN."
, out_hex = False &= help "Output the final hash in hexadecimal (without the leading `0x'). If no output format is specified with --out-{hex,dec,bin}, then this flag is turned on automatically."
, out_dec = False &= help "Output the final hash in decimal."
, out_oct = False &= help "Output the final hash in octal."
, out_bin = False &= help "Output the final hash in binary."
}
&= details
[ "Notes:"
, ""
, " Panxor can read in any arbitrarily long hex, decimal, or binary string, and is also compatible with the sha1sum(1) format."
]
getOpts :: IO Opts
getOpts = cmdArgs $ progOpts
&= summary (_PROGRAM_INFO ++ ", " ++ _COPYRIGHT)
&= program _PROGRAM_NAME
&= help _PROGRAM_DESC
&= helpArg [explicit, name "help", name "h"]
&= versionArg [explicit, name "version", name "v", summary _PROGRAM_INFO]
_PROGRAM_NAME
, _PROGRAM_VERSION
, _PROGRAM_INFO
, _PROGRAM_DESC
, _COPYRIGHT :: String
_PROGRAM_NAME = "panxor"
_PROGRAM_VERSION = "0.0.2"
_PROGRAM_INFO = _PROGRAM_NAME ++ " version " ++ _PROGRAM_VERSION
_PROGRAM_DESC = "binary XOR multiple hex, decimal, octal, or binary values"
_COPYRIGHT = "(C) Linus Arver 2012"
data Color
= Red
| Green
| Yellow
| Blue
| Magenta
| Cyan
deriving (Show, Eq)
colorize :: Color -> String -> String
colorize c s = c' ++ s ++ e
where
c' = "\x1b[" ++ case c of
Red -> "1;31m"
Green -> "1;32m"
Yellow -> "1;33m"
Blue -> "1;34m"
Magenta -> "1;35m"
Cyan -> "1;36m"
e = "\x1b[0m"
argsCheck :: Opts -> IO Int
argsCheck Opts{..}
| otherwise = return 0
-- Verify that the --file and --list arguments actually make sense.
filesCheck :: [Bool] -> IO Int
filesCheck fs
| elem False fs = errMsgNum "an argument to --file does not exist" 1
| otherwise = return 0
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
hSetEcho stdin False -- disable terminal echo
opts <- getOpts
(\e -> when (e > 0) . exitWith $ ExitFailure e) =<< argsCheck opts
let
opts'@Opts{..} = autoOpts opts -- automatically use sane defaults
stdinHashHex <- return . xorNum NumHex =<< (if stdin_hex
then getContents
else return [])
fs <- mapM doesFileExist (file_hex ++ file_dec ++ file_oct ++ file_bin)
(\e -> when (e > 0) . exitWith $ ExitFailure e) =<< filesCheck fs
errNo' <- filesCheck fs
when (errNo' > 0) $ exitWith $ ExitFailure errNo'
prog opts' stdinHashHex file_hex file_dec file_oct file_bin
autoOpts :: Opts -> Opts
autoOpts opts@Opts{..} = opts
{ stdin_hex = null
(hex
++ dec
++ oct
++ bin
++ file_hex
++ file_dec
++ file_oct
++ file_bin)
}
prog
:: Opts
-> Integer
-> [FilePath]
-> [FilePath]
-> [FilePath]
-> [FilePath]
-> IO ()
prog Opts{..} stdinHashHex filesHex filesDec filesOct filesBin = do
filesHashHex <- mapM (return . liftM (xorNum NumHex) =<< readFile) filesHex
filesHashDec <- mapM (return . liftM (xorNum NumDec) =<< readFile) filesDec
filesHashOct <- mapM (return . liftM (xorNum NumOct) =<< readFile) filesOct
filesHashBin <- mapM (return . liftM (xorNum NumBin) =<< readFile) filesBin
let
hashesHex = map (xorNum NumHex) hex
hashesDec = map (xorNum NumDec) dec
hashesOct = map (xorNum NumOct) oct
hashesBin = map (xorNum NumBin) bin
hash = foldl' xor stdinHashHex
( filesHashHex
++ filesHashDec
++ filesHashOct
++ filesHashBin
++ hashesHex
++ hashesDec
++ hashesOct
++ hashesBin
)
putStrLn $ showStyle hash []
where
showStyle :: (Integral a, Show a) => a -> ShowS
showStyle
| out_hex = showHex
| out_dec = showInt
| out_oct = showOct
| out_bin = showIntAtBase 2 intToDigit
| otherwise = showHex
data NumBase
= NumHex
| NumDec
| NumOct
| NumBin
deriving (Eq)
-- Takes a sha1sum(1) formatted string (hex hashes), and XORs all of the hashes in there.
xorNum :: NumBase -> String -> Integer
xorNum b = foldl' xor 0
. map
( (\x -> if null x
then 0
else fst $ head x)
. (case b of
NumHex -> readHex
NumDec -> readDec
NumOct -> readOct
NumBin -> readInt 2 isBinaryDigit digitToBinaryInt
)
)
. nub
. filter (not . null)
. lines
where
isBinaryDigit :: Char -> Bool
isBinaryDigit c = c == '0' || c == '1'
digitToBinaryInt :: Char -> Int
digitToBinaryInt c
| c == '0' = 0
| c == '1' = 1
| otherwise = error $ "digitToBinaryInt: not a binary digit " ++ squote (show c)
errMsg :: String -> IO ()
errMsg msg = hPutStrLn stderr $ "error: " ++ msg
errMsgNum :: String -> Int -> IO Int
errMsgNum str num = errMsg str >> return num
squote :: String -> String
squote s = "`" ++ s ++ "'"
|
listx/syscfg
|
script/panxor.hs
|
Haskell
|
bsd-3-clause
| 6,620
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FlexibleContexts #-}
module EFA.Application.Simulation where
import EFA.Application.Utility (quantityTopology)
import qualified EFA.Application.Optimisation.Sweep as Sweep
import EFA.Application.Optimisation.Params (Name(Name))
import qualified EFA.Application.Optimisation.Params as Params
import qualified EFA.Flow.Topology.Absolute as EqSys
import qualified EFA.Flow.Topology.Quantity as FlowTopo
import qualified EFA.Flow.Topology.Index as XIdx
import qualified EFA.Flow.Topology.Variable as Variable
import EFA.Flow.Topology.Absolute ( (.=), (=.=) )
import qualified EFA.Flow.Absolute as EqAbs
import qualified EFA.Equation.Arithmetic as Arith
import qualified EFA.Equation.RecordIndex as RecIdx
import qualified EFA.Equation.Verify as Verify
import EFA.Equation.Result (Result)
import qualified EFA.Graph.Topology.Node as Node
import qualified EFA.Graph.Topology as Topo
import qualified EFA.Signal.Vector as SV
import qualified EFA.Signal.Signal as Sig
import qualified EFA.Signal.Record as Record
import qualified EFA.Signal.Data as Data
import EFA.Signal.Data (Data(Data), Nil,(:>))
import qualified UniqueLogic.ST.TF.System as ULSystem
import qualified Data.Map as Map
import qualified Data.Foldable as Fold
import Data.Map (Map)
import Data.Monoid((<>))
solve ::
(Ord c, Show c, Arith.ZeroTestable c, Arith.Constant c,
SV.Storage t c, SV.Storage t Bool, SV.Singleton t, SV.Len (t d),
Node.C node, SV.Zipper t, SV.Walker t,
SV.FromList t) =>
Topo.Topology node ->
Map (XIdx.Position node) (Name, Name) ->
Map Name (Params.EtaFunction c c) ->
Record.Record s s1 typ t1 (XIdx.Position node) t d c ->
FlowTopo.Section node (EFA.Equation.Result.Result (Data (t :> Nil) c))
solve topology etaAssign etaFunc powerRecord =
EqSys.solve (quantityTopology topology) $
givenSimulate etaAssign etaFunc powerRecord
givenSimulate ::
(Ord c, Show c, Arith.Constant c, Node.C node,
Verify.GlobalVar mode
(Data (t :> Nil) c)
(RecIdx.Record RecIdx.Absolute (Variable.Signal node)),
SV.Zipper t, SV.Walker t, SV.Storage t c, SV.Len (t d),
SV.FromList t) =>
Map (XIdx.Position node) (Name, Name) ->
Map Name (Params.EtaFunction c c) ->
Record.Record s1 s2 typ t1 (XIdx.Position node) t d c ->
EqSys.EquationSystem mode node s (Data (t :> Nil) c)
givenSimulate etaAssign etaFunc (Record.Record t xs) =
(XIdx.dTime .=
(Data $ SV.fromList $ replicate (Sig.len t) $ Arith.one))
<> EqSys.withExpressionGraph (makeEtaFuncGiven etaAssign etaFunc)
<> Fold.fold (Map.mapWithKey f xs)
where
f ppos p = XIdx.powerFromPosition ppos .= Sig.unpack p
-- | Generate given equations using efficiency curves or functions for a specified section
makeEtaFuncGiven ::
(Ord node, Ord d1, Show d1,
ULSystem.Value mode (Data c d1),
Arith.Constant d1, Data.ZipWith c, Data.Storage c d1) =>
Map (XIdx.Position node) (Name, Name) ->
Map Name (Params.EtaFunction d1 d1) ->
FlowTopo.Section node (EqAbs.Expression mode vars s (Data c d1)) ->
EqAbs.VariableSystem mode vars s
makeEtaFuncGiven etaAssign etaFunc topo =
Fold.fold $
Map.mapWithKey
(\se (strP, strN) ->
Fold.foldMap
(\(eta, power) ->
eta
=.=
EqAbs.liftF
(Data.map (absEtaFunction strP strN etaFunc))
power)
(FlowTopo.lookupAutoDirSection
(\flow -> (FlowTopo.flowEta flow, FlowTopo.flowPowerOut flow))
(\flow -> (FlowTopo.flowEta flow, FlowTopo.flowPowerIn flow))
id se topo))
etaAssign
makeEtaFuncGiven2 ::
(Ord node, Ord a, Show a, ULSystem.Value mode (sweep vec a),
Arith.Sum (sweep vec a), Arith.Constant a,
Sweep.SweepMap sweep vec a a) =>
Map (XIdx.Position node) (Name, Name) ->
Map Name (Params.EtaFunction a a) ->
FlowTopo.Section node (EqAbs.Expression mode vars s (sweep vec a)) ->
EqAbs.VariableSystem mode vars s
makeEtaFuncGiven2 etaAssign etaFunc topo =
Fold.fold $
Map.mapWithKey
(\se (strP, strN) ->
Fold.foldMap
(\(eta, power) ->
eta =.= EqAbs.liftF (Sweep.map (absEtaFunction strP strN etaFunc)) power)
(FlowTopo.lookupAutoDirSection
(\flow -> (FlowTopo.flowEta flow, FlowTopo.flowPowerOut flow))
(\flow -> (FlowTopo.flowEta flow, FlowTopo.flowPowerIn flow))
id se topo))
etaAssign
absEtaFunction ::
(Ord a, Show a, Arith.Constant a, Arith.Product b) =>
Name -> Name -> Map Name (Params.EtaFunction a b) -> a -> b
absEtaFunction strP strN etaFunc =
let fpos = check strP id $ Map.lookup strP $ Map.map Params.func etaFunc
fneg = check strN rev $ Map.lookup strN $ Map.map Params.func etaFunc
rev h = Arith.recip . h . Arith.negate
check (Name str) =
maybe (\x -> error ("not defined: '" ++ str ++ "' for " ++ show x))
in \x -> if x >= Arith.zero then fpos x else fneg x
|
energyflowanalysis/efa-2.1
|
src/EFA/Application/Simulation.hs
|
Haskell
|
bsd-3-clause
| 5,080
|
{-# LANGUAGE ViewPatterns, TupleSections #-}
-- | Easy logging of images and matrices(palnned) for CV research.
-- messages are send via TCP connection as MessagePack objects.
--
-- * ["str", utf8 string]: string
--
-- * ["uni", "u8", shape, raw]: uniform array
--
module Main where
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Data.IORef
import Data.List
import Control.Monad
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Gdk.Events as Ev
import Graphics.Rendering.Cairo
import System.IO
import qualified Data.ByteString as BS
import Text.Printf
import Network
import Data.Word
import Data.Array.MArray
import Data.MessagePack as MP
import Data.Attoparsec.ByteString as Atto
import qualified Data.Array.Base as Unsafe
import qualified Codec.Binary.UTF8.String
import qualified Data.Foldable as Fold
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
type Model = (MVar (Seq.Seq Message), MVar (Set.Set String))
data Message =
StringMessage String |
UniformArrayMessage [Int] BS.ByteString
deriving(Show)
main = do
let port = 8001
(mseq, mClients) <- runServer port
initGUI
window <- windowNew
vb <- vBoxNew False 1
status <- hBoxNew False 0
lb <- labelNew (Just $ printf "listening on port %d" port)
lbClients <- labelNew (Just "")
clear <- buttonNewFromStock "gtk-clear"
on clear buttonActivated $ void $ swapMVar mseq Seq.empty
hb <- hBoxNew False 0
tb <- vBoxNew False 0
refill mseq tb 0
adj <- adjustmentNew 0 1 100 1 20 20
onScroll window $ \ev->do
v0 <- adjustmentGetValue adj
delta <- adjustmentGetPageIncrement adj
case Ev.eventDirection ev of
ScrollDown -> adjustmentSetValue adj (v0+delta/3)
ScrollUp -> adjustmentSetValue adj (v0-delta/3)
_ -> return ()
return True
scr <- vScrollbarNew adj
on scr valueChanged (adjustmentGetValue adj >>= refill mseq tb)
boxPackStart hb tb PackGrow 0
boxPackStart hb scr PackNatural 0
boxPackStart status lb PackGrow 0
boxPackStart status clear PackNatural 0
boxPackStart vb status PackNatural 0
boxPackStart vb lbClients PackNatural 0
boxPackStart vb hb PackGrow 0
set window [containerChild := vb]
nmseq <- newIORef 0
nmcls <- newIORef 0
let
updateLog=do
prev_n <- readIORef nmseq
curr_n <- withMVar mseq (return . Seq.length)
when (curr_n/=prev_n) $ do
adjustmentSetUpper adj $ fromIntegral curr_n
adjustmentGetValue adj >>= refill mseq tb
writeIORef nmseq curr_n
updateClients = do
prev_n <- readIORef nmcls
curr_n <- withMVar mClients (return . Set.size)
when (curr_n/=prev_n) $ do
str <- withMVar mClients (return . intercalate " / " . Fold.toList)
labelSetText lbClients str
writeIORef nmcls curr_n
timeoutAdd (updateLog >> updateClients >> return True) 50
onDestroy window mainQuit
widgetShowAll window
mainGUI
-- seems ok
refill mseq box fromd=do
let from=floor $ realToFrac fromd-1
mapM_ widgetDestroy =<< containerGetChildren box
msgs <- withMVar mseq $ return . Fold.toList . fst . Seq.splitAt 20 . snd . Seq.splitAt from
mapM_ (\msg->do{l<-instantiateView msg; boxPackStart box l PackNatural 0}) msgs
widgetShowAll box
instantiateView :: Message -> IO Widget
instantiateView (StringMessage x)=do
l <- labelNew (Just x)
set l [miscXalign := 0]
return $ castToWidget l
instantiateView (UniformArrayMessage shape raw)=do
l <- drawingAreaNew
surf <- arrayToSurface shape raw
widgetSetSizeRequest l (-1) $ max 25 (head shape)
onExpose l $ \ev -> do
dw <- widgetGetDrawWindow l
w <- imageSurfaceGetWidth surf
renderWithDrawable dw $ do
save
when (w<15) $ scale 3 3
setSourceSurface surf 0 0
getSource >>= flip patternSetFilter FilterNearest
paint
restore
return True
return $ castToWidget l
arrayToSurface :: [Int] -> BS.ByteString -> IO Surface
arrayToSurface [h,w] raw = arrayToSurface [h,w,1] raw
arrayToSurface [h,w,c] raw = do
surf <- createImageSurface FormatRGB24 w h
arr <- imageSurfaceGetPixels surf -- BGRA packing
case c of
1 -> mapM_ (\i -> transfer1 arr i) [0..w*h-1]
3 -> mapM_ (\i -> transfer3 arr i) [0..w*h-1]
_ -> mapM_ (\i -> transfer arr i) [0..w*h-1]
return surf
where
transfer arr i = do
mapM_ (\d -> Unsafe.unsafeWrite arr (i*4+d) $ BS.index raw (i*c+d)) [0..min c 3-1]
transfer1 arr i = do
let v = BS.index raw i
Unsafe.unsafeWrite arr (i*4+0) v
Unsafe.unsafeWrite arr (i*4+1) v
Unsafe.unsafeWrite arr (i*4+2) v
transfer3 arr i = do
Unsafe.unsafeWrite arr (i*4+0) $ BS.index raw (i*3+2)
Unsafe.unsafeWrite arr (i*4+1) $ BS.index raw (i*3+1)
Unsafe.unsafeWrite arr (i*4+2) $ BS.index raw (i*3+0)
arrayToSurface shape _ = do
putStrLn "unknown shape"
print shape
createImageSurface FormatRGB24 1 1
runServer :: Int -> IO Model
runServer port = do
mLog <- newMVar Seq.empty
mClients <- newMVar Set.empty
ch <- newChan
forkIO $ forever $ do
x <- readChan ch
modifyMVar_ mLog $ return . (Seq.|> x)
sock <- listenOn (PortNumber $ fromIntegral port)
putStrLn "started listening"
forkIO $ forever $ do
(h, host, port) <- accept sock
let clientName = printf "%s %s" host (show port)
modifyMVar_ mClients $ return . Set.insert clientName
hSetBuffering h NoBuffering
forkIO $ do
handleConn ch h (Atto.parse MP.get)
modifyMVar_ mClients $ return . Set.delete clientName
return (mLog, mClients)
handleConn ch h parse_cont = BS.hGet h 1024 >>= resolveFull parse_cont
where
resolveFull parse_cont x = do
case parse_cont x of
Fail _ ctx err -> putStrLn "wrong packet"
Partial f -> handleConn ch h f
Done rest packet -> do
case translateMessage packet of
Nothing -> return () -- ignore
Just msg -> writeChan ch msg
resolveFull (Atto.parse MP.get) rest
translateMessage :: [MP.Object] -> Maybe Message
translateMessage
[ObjectRAW (decodeUTF8 -> "str"),
(ObjectRAW msg)]=Just $ StringMessage $ decodeUTF8 msg
translateMessage
[ObjectRAW (decodeUTF8 -> "uni"),
ObjectRAW (decodeUTF8 -> "u8"),
shape_raw,
ObjectRAW raw]=do
shape <- MP.fromObject shape_raw
if BS.length raw==product shape
then return $ UniformArrayMessage shape raw
else Nothing
translateMessage _=Nothing
decodeUTF8 :: BS.ByteString -> String
decodeUTF8 = Codec.Binary.UTF8.String.decode . BS.unpack
|
xanxys/mmterm
|
Main.hs
|
Haskell
|
bsd-3-clause
| 7,101
|
module DTW
(
dtw, cdtw, gdtw
) where
import Data.Array
import Data.List (foldl1')
add :: (Double, Int) -> (Double, Int) -> (Double, Int)
add (a, b) (c, d) = (a+c, b+d)
quo :: (Double, Int) -> Double
quo (a, b) = a/(fromIntegral b)
-- Unconstrained DTW
dtw :: Eq a => ( a -> a -> Double ) -> [a] -> [a] -> Double
dtw measure s [] = gdtw measure [] s
dtw measure [] _ = error "Can not compare empty series!"
dtw measure s o = quo $ a!(n,m) where
n = length s
s' = listArray (1, n) s
m = length o
o' = listArray (1, m) o
a = array ((0,0),(n,m))
([((i,j), (1/0, 0)) | i <- [0..n], j <- [0..m]] ++
[((0,0), (0, 0))] ++
[((i,j), (measure (s'!i) (o'!j), 1) `add` minimum [a!(i,j-1), a!(i-1,j-1), a!(i-1,j)])
| i <- [1..n], j <- [1..m]])
-- Constrained DTW
cdtw :: Eq a => ( a -> a -> Double ) -> Int -> [a] -> [a] -> Double
cdtw measure w s [] = gdtw measure [] s
cdtw measure w [] _ = error "Can not compare empty series!"
cdtw measure w s o = quo $ a!(n,m) where
n = length s
s' = listArray (1, n) s
m = length o
o' = listArray (1, m) o
a = array ((0,0),(n,m))
([((i,j), (1/0, 1)) | i <- [0..n], j <- [0..m]] ++
[((0,0), (0, 1))] ++
[((i,j), (measure (s'!i) (o'!j), 1) `add` minimum [a!(i,j-1), a!(i-1,j-1), a!(i-1,j)] )
| i <- [1..n], j <- [max 1 (i-w)..min m (i+w)]])
-- Greedy dtw
gdtw :: Eq a => ( a -> a -> Double ) -> [a] -> [a] -> Double
gdtw measure s [] = gdtw measure [] s
gdtw measure [] _ = error "Can not compare empty series!"
gdtw measure s o = quo $ gdtw' measure s o (measure (head s) (head o), 1) where
gdtw' measure [a] s (r,l) = (r + foldl1' (+) (map (measure a) s), l + length s)
gdtw' measure s [a] (r,l) = gdtw' measure [a] s (r,l)
gdtw' measure s o (r,l) | left == min = gdtw' measure (tail s) o (r + left, l+1)
| middle == min = gdtw' measure (tail s) (tail o) (r + middle, l+1)
| right == min = gdtw' measure s (tail o) (r + right, l+1)
where
left = measure ((head.tail) s) (head o)
middle = measure ((head.tail) s) ((head.tail) o)
right = measure (head s) ((head.tail) o)
min = minimum [left, middle, right]
|
kirel/detexify-hs-backend
|
src/DTW.hs
|
Haskell
|
mit
| 2,359
|
{-# LANGUAGE ScopedTypeVariables
, GADTs
#-}
{-# OPTIONS_GHC
-F
-pgmF hspec-discover
-optF --module-name=Spec
-fno-warn-implicit-prelude
#-}
|
hanepjiv/make10_hs
|
test/spec/Spec.hs
|
Haskell
|
mit
| 183
|
{-# LANGUAGE PatternGuards #-}
module Lambda.Text.Reduction
( rNF
, rHNF
, reduce1
, isNF
, isHNF
) where
import Lambda.Text.Term
import Lambda.Text.InputSet
import qualified Lambda.Text.Substitution as S
reduce2NF :: NormalForm -> InputSet -> Term -> Term
reduce2NF nf ips t = undefined
sequentialize :: InputSet -> Term -> [Term]
sequentialize ips t = seqize ips t []
seqize :: InputSet -> Term -> [Term] -> [Term]
seqize ips (Var a) ap = undefined
seqize ips (Abs a t) ap = undefined
seqize ips (App p q) ap | (Var a) <- p = undefined
seqize ips (App p q) ap | (Abs a t) <- p = undefined
seqize ips (App p q) ap | (App t u) <- p = undefined
{-
f x y z
= (f x) y z
= ((f x) y) z
-}
-- ????????????????????????????
-- Q: not in Δ => is a Δ-redex
-- ????????????????????????????
{- ######################################### -}
rNF :: InputSet -> Term -> Maybe Term
rNF is (Var a) = Just (Var a)
rNF is (Abs a t) | Just u <- rNF is t = if t == u
then Nothing else Just (Abs a u)
rNF is (Abs a t) | Nothing <- rNF is t = Nothing
rNF is (App p q) | Just s <- rNF is p
, Just t <- rNF is q
= if (s == p || t == q)
then Nothing
else if isRedex is (App s t)
then let u = reduce1 is (App s t) in
if u == (App s t)
then Nothing
else rNF is u
else Just (App s t)
rNF is (App p q) | otherwise
= Nothing
rHNF :: InputSet -> Term -> Maybe Term
rHNF = rNF
isNF :: InputSet -> Term -> Bool
isNF = undefined
isHNF :: InputSet -> Term -> Bool
isHNF = undefined
reduce1 :: InputSet -> Term -> Term
reduce1 is (Var a) = Var a
reduce1 is (Abs a t) = Abs a (reduce1 is t)
--
reduce1 is (App p q) | not $ inInputSet is q
, isRedex is q
= App p (reduce1 is q)
reduce1 is (App p q) | not $ inInputSet is q
, not $ isRedex is q
, not $ isRedex is p
= (App p q)
--
reduce1 is (App p q) | not $ inInputSet is q
, not $ isRedex is q
, isRedex is p
, isHNF is p
= S.subs t q a where (Abs a t) = p
--
reduce1 is (App p q) | not $ isHNF is p
= reduce1 is (App p q)
--
|
jaiyalas/haha
|
src/Lambda/Text/Reduction.hs
|
Haskell
|
mit
| 2,478
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.