code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module DefaultParser(run) where
import Transform
import Language.Haskell.Interpreter as I
parse :: String -> String
parse = toStringList.encloseWithQuotes.lines
listToString = tail.unwords.map ("\n"++)
run :: String -> String -> IO ()
run functionStr processedArgs =
do
result <- runInterpreter $ setImports ["Prelude"] >> interpret (functionStr ++ " " ++ parse processedArgs) (as :: [String])
case result of
(Right res) -> putStrLn $ listToString res
(Left err) -> error $ show err
| iostreamer-X/FuncShell | src/DefaultParser.hs | mit | 516 | 0 | 12 | 102 | 176 | 93 | 83 | 13 | 2 |
import HashTablesPerformance hiding (main)
main = writeLargeFile 100000
| zmoazeni/testing-hashtable-performance | GenerateFiles.hs | mit | 73 | 0 | 5 | 9 | 18 | 10 | 8 | 2 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Orville.PostgreSQL.Internal.Expr.LimitExpr
( LimitExpr,
limitExpr,
)
where
import qualified Orville.PostgreSQL.Internal.RawSql as RawSql
import qualified Orville.PostgreSQL.Internal.SqlValue as SqlValue
newtype LimitExpr
= LimitExpr RawSql.RawSql
deriving (RawSql.SqlExpression)
limitExpr :: Int -> LimitExpr
limitExpr limitValue =
LimitExpr $
RawSql.fromString "LIMIT " <> RawSql.parameter (SqlValue.fromInt limitValue)
| flipstone/orville | orville-postgresql-libpq/src/Orville/PostgreSQL/Internal/Expr/LimitExpr.hs | mit | 494 | 0 | 9 | 67 | 101 | 62 | 39 | 13 | 1 |
module Arithmetic.Model where
import Control.Applicative
import Text.Trifecta
data Term =
TmTrue
| TmFalse
| TmIf Term Term Term
| TmZero
| TmSucc Term
| TmPred Term
| TmIsZero Term
deriving (Eq, Show)
isNumeric :: Term -> Bool
isNumeric TmZero = True
isNumeric (TmSucc t) = isNumeric t
isNumeric _ = False
isValue :: Term -> Bool
isValue TmTrue = True
isValue TmFalse = True
isValue t
| isNumeric t = True
isValue _ = False
step :: Term -> Maybe Term
step (TmIf TmTrue t _) = Just t
step (TmIf TmFalse _ t) = Just t
step (TmIf t1 t2 t3) =
TmIf <$> step t1 <*> Just t2 <*> Just t3
step (TmSucc t) = TmSucc <$> step t
step (TmPred TmZero) = Just TmZero
step (TmPred (TmSucc vt))
| isNumeric vt = Just vt
| otherwise = TmPred . TmSucc <$> step vt
step (TmIsZero TmZero) = Just TmTrue
step (TmIsZero vt)
| isNumeric vt = Just vt
| otherwise = TmIsZero <$> step vt
step _ = Nothing
eval :: Term -> Maybe Term
eval t
| isValue t = Just t
| otherwise = step t >>= eval
| raduom/tapl | src/Arithmetic/Model.hs | mit | 1,003 | 0 | 9 | 233 | 454 | 219 | 235 | 41 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html
module Stratosphere.ResourceProperties.ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.ApplicationAutoScalingScalingPolicyStepAdjustment
-- | Full data type definition for
-- ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration. See
-- 'applicationAutoScalingScalingPolicyStepScalingPolicyConfiguration' for a
-- more convenient constructor.
data ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration =
ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration
{ _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationAdjustmentType :: Maybe (Val Text)
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationCooldown :: Maybe (Val Integer)
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMetricAggregationType :: Maybe (Val Text)
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMinAdjustmentMagnitude :: Maybe (Val Integer)
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationStepAdjustments :: Maybe [ApplicationAutoScalingScalingPolicyStepAdjustment]
} deriving (Show, Eq)
instance ToJSON ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration where
toJSON ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration{..} =
object $
catMaybes
[ fmap (("AdjustmentType",) . toJSON) _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationAdjustmentType
, fmap (("Cooldown",) . toJSON) _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationCooldown
, fmap (("MetricAggregationType",) . toJSON) _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMetricAggregationType
, fmap (("MinAdjustmentMagnitude",) . toJSON) _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMinAdjustmentMagnitude
, fmap (("StepAdjustments",) . toJSON) _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationStepAdjustments
]
-- | Constructor for
-- 'ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration'
-- containing required fields as arguments.
applicationAutoScalingScalingPolicyStepScalingPolicyConfiguration
:: ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration
applicationAutoScalingScalingPolicyStepScalingPolicyConfiguration =
ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration
{ _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationAdjustmentType = Nothing
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationCooldown = Nothing
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMetricAggregationType = Nothing
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMinAdjustmentMagnitude = Nothing
, _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationStepAdjustments = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html#cfn-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration-adjustmenttype
aasspsspcAdjustmentType :: Lens' ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration (Maybe (Val Text))
aasspsspcAdjustmentType = lens _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationAdjustmentType (\s a -> s { _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationAdjustmentType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html#cfn-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration-cooldown
aasspsspcCooldown :: Lens' ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration (Maybe (Val Integer))
aasspsspcCooldown = lens _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationCooldown (\s a -> s { _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationCooldown = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html#cfn-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration-metricaggregationtype
aasspsspcMetricAggregationType :: Lens' ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration (Maybe (Val Text))
aasspsspcMetricAggregationType = lens _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMetricAggregationType (\s a -> s { _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMetricAggregationType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html#cfn-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration-minadjustmentmagnitude
aasspsspcMinAdjustmentMagnitude :: Lens' ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration (Maybe (Val Integer))
aasspsspcMinAdjustmentMagnitude = lens _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMinAdjustmentMagnitude (\s a -> s { _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationMinAdjustmentMagnitude = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration.html#cfn-applicationautoscaling-scalingpolicy-stepscalingpolicyconfiguration-stepadjustments
aasspsspcStepAdjustments :: Lens' ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration (Maybe [ApplicationAutoScalingScalingPolicyStepAdjustment])
aasspsspcStepAdjustments = lens _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationStepAdjustments (\s a -> s { _applicationAutoScalingScalingPolicyStepScalingPolicyConfigurationStepAdjustments = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/ApplicationAutoScalingScalingPolicyStepScalingPolicyConfiguration.hs | mit | 6,192 | 0 | 12 | 351 | 540 | 309 | 231 | 43 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Style where
import Errors.Style
import Player.Board.Style
import Common.CommonClasses
import Common.CssClass
import Clay
import Data.ByteString.Lazy hiding (repeat, ByteString)
import Data.ByteString (ByteString)
import Data.Text.Lazy.Encoding
import Prelude hiding (div, repeat)
mainStyle :: Css
mainStyle = do
errorStyle
boardStyle
importUrl "https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css"
body ? do
backgroundImage $ url "data/background.png"
backgroundRepeat repeat
fontFamily [] [sansSerif]
fontSize (px 12)
margin nil nil nil nil
padding nil nil nil nil
div ? margin nil nil nil nil
star # classSelector hideClass ? display none
mainStyleByteString :: ByteString
mainStyleByteString = toStrict $ encodeUtf8 $ render mainStyle
| martin-kolinek/some-board-game | src/Style.hs | mit | 845 | 0 | 12 | 133 | 221 | 113 | 108 | 27 | 1 |
module SodCurses.SodCurses where
import SodCurses.Canvas
import Control.Concurrent
import Control.Monad
import FRP.Sodium
import UI.HSCurses.Curses
import UI.HSCurses.CursesHelper
-- | Takes a function that takes a event that fires on each tick and one which fires on events
-- and produces a tuple of a behaviour of when to stop and a behaviour of what to draw.
-- The system will refresh 10 times a second, drawing the canvas in the second term until
-- the first term is false.
run :: (Event () -> Event Key -> IO (Behaviour Bool, Behaviour Canvas)) -> IO ()
run f = do
(eTick,tickID) <- tickEvent $ 10^5
(eKey,keyID) <- keyEvent
f eTick eKey >>= uncurry setup
killThread tickID
killThread keyID
-- | Does the house keeping for making the program run. Runs runLoop as well.
setup :: Behaviour Bool -> Behaviour Canvas -> IO ()
setup bCont bCanvas = do
start
cursSet CursorInvisible
w <- initScr
wclear w
runLoop w bCont bCanvas
cursSet CursorVisible
end
-- | Draws the Canvas every 100 miliseconds (roughly) until the first behaviour
-- is false
runLoop :: Window -> Behaviour Bool -> Behaviour Canvas -> IO ()
runLoop w bCont bCanv = do
erase
sync (sample bCanv) >>= drawCanvas w
wRefresh w
c <- sync $ sample bCont
when c $ timeout (10^2) >> runLoop w bCont bCanv
-- | Produces and event which ticks every t micro seconds
tickEvent :: Int -> IO (Event (), ThreadId)
tickEvent t = do
(eTick,pushTick) <- sync newEvent
i <- forkIO $ forever ( sync (pushTick ()) >> threadDelay t)
return (eTick,i)
-- | Produces and event which fires when a key is pressed
keyEvent :: IO (Event Key, ThreadId)
keyEvent = do
(eKey,pushKey) <- sync newEvent
i <- forkIO $ forever (getCh >>= sync . pushKey)
return (eKey,i)
| edwardwas/sod-curses | src/lib/SodCurses/SodCurses.hs | mit | 1,812 | 0 | 15 | 403 | 521 | 253 | 268 | 40 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.Entity
(js_getPublicId, getPublicId, js_getSystemId, getSystemId,
js_getNotationName, getNotationName, Entity, castToEntity,
gTypeEntity)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"publicId\"]" js_getPublicId
:: JSRef Entity -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Entity.publicId Mozilla Entity.publicId documentation>
getPublicId ::
(MonadIO m, FromJSString result) => Entity -> m (Maybe result)
getPublicId self
= liftIO (fromMaybeJSString <$> (js_getPublicId (unEntity self)))
foreign import javascript unsafe "$1[\"systemId\"]" js_getSystemId
:: JSRef Entity -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Entity.systemId Mozilla Entity.systemId documentation>
getSystemId ::
(MonadIO m, FromJSString result) => Entity -> m (Maybe result)
getSystemId self
= liftIO (fromMaybeJSString <$> (js_getSystemId (unEntity self)))
foreign import javascript unsafe "$1[\"notationName\"]"
js_getNotationName :: JSRef Entity -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Entity.notationName Mozilla Entity.notationName documentation>
getNotationName ::
(MonadIO m, FromJSString result) => Entity -> m (Maybe result)
getNotationName self
= liftIO
(fromMaybeJSString <$> (js_getNotationName (unEntity self))) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/Entity.hs | mit | 2,292 | 18 | 11 | 327 | 601 | 354 | 247 | 38 | 1 |
trimInit :: [a] -> Int -> [a]
trimInit [] _ = []
trimInit l@(h:t) p | p == 0 = l
| otherwise = trimInit t (p - 1)
{- Pack consecutive duplicates of list elements into sublists. -}
packConsecutives :: (Eq a) => [a] -> [[a]]
packConsecutives [] = []
packConsecutives [x] = [[x]]
packConsecutives l@(h:t) = [nList] ++ (packConsecutives rest)
where nList = takeWhile (\x -> x == h) l
rest = trimInit l (length nList)
| andrewaguiar/s99-haskell | p09.hs | mit | 494 | 0 | 10 | 159 | 213 | 114 | 99 | 10 | 1 |
{-# LANGUAGE Arrows, GADTs, FlexibleContexts #-}
module Automata.Combinators where
import Automata.Types
import Automata.Helpers
import Data.Set (Set)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Control.Arrow
import Data.Maybe
import Data.List (foldl')
makeDFA :: (ArrowChoice arrow, Ord s, Ord a) => [a] -> s -> [s] -> [(s, a -> s)] -> DFA arrow s a
makeDFA alphabet q_0 accepts transitionList = DFA states (S.fromList alphabet) (arr delta) q_0 (S.fromList accepts)
where map = M.fromList transitionList
states = M.keysSet map
delta (s, a) = (map M.! s) a
makeNFA :: (ArrowChoice arrow, Ord s, Ord a) => [a] -> s -> [s] -> [(s,Maybe a -> [s])] -> NFA arrow s a
makeNFA alpha q_0 accepts transitionList = NFA states (S.fromList alpha) (arr delta) q_0 (S.fromList accepts)
where ts = map (second (>>> S.fromList)) transitionList
m = M.fromList ts
states = M.keysSet m
delta (s,a) = (m M.! s) a
reverseTransitionFA :: (FiniteAutomata fa, ArrowChoice (ArrowType fa),
Ord (State fa), Ord (Alphabet fa)) =>
fa -> NFA (ArrowType fa) (State fa) (Alphabet fa)
reverseTransitionFA auto = NFA states (get_E auto) delta' (get_q_0 auto) (get_F auto)
where states = get_Q auto
delta' = proc (q, sigma) ->
reverseOne -< (S.toList states, q, sigma)
reverseOne = proc (cQ, q, sigma) -> case cQ of
[] -> returnA -< S.empty
p:ps -> do
(p',_) <- first (currentStateToSet auto) <<< stepFA auto <<< first (toCurrentState auto) -< (p, maybeToList sigma)
ps' <- reverseOne -< (ps, q, sigma)
returnA -< if q `S.member` p' then p `S.insert` ps' else ps'
switchStartAcceptNFA :: (ArrowChoice arrow, Ord s, Ord a) =>
s -> NFA arrow s a -> NFA arrow s a
switchStartAcceptNFA q_0 nfa = NFA states (get_E nfa) delta q_0 accepts
where states = q_0 `S.insert` get_Q nfa
accepts = S.singleton $ get_q_0 nfa
delta = proc (q, sigma) -> if q == q_0
then returnA -< if isNothing sigma then get_F nfa else S.empty
else arr fst <<< stepFA nfa -< (S.singleton q, maybeToList sigma)
reverseFA :: (FiniteAutomata fa, ArrowChoice (ArrowType fa),
Ord (State fa), Ord (Alphabet fa)) =>
State fa -> fa -> NFA (ArrowType fa) (State fa) (Alphabet fa)
reverseFA q_0 = reverseTransitionFA >>> switchStartAcceptNFA q_0
intersectFA :: (FiniteAutomata fa1, FiniteAutomata fa2,
ArrowChoice (ArrowType fa1), ArrowType fa1 ~ ArrowType fa2,
Ord (Alphabet fa1), Alphabet fa1 ~ Alphabet fa2,
Ord (State fa1), Ord (State fa2)) =>
fa1 -> fa2 -> NFA (ArrowType fa1) (State fa1, State fa2) (Alphabet fa1)
intersectFA auto1 auto2 = NFA newQ newE delta' q_0' newF
where newQ = get_Q auto1 `cartProd` get_Q auto2
newE = get_E auto1 `S.union` get_E auto2
q_0' = (get_q_0 auto1, get_q_0 auto2)
newF = get_F auto1 `cartProd` get_F auto2
delta' = ((first (arr fst >>> arr S.singleton) >>> transitionFA (toNFA auto1)) &&&
(first (arr snd >>> arr S.singleton) >>> transitionFA (toNFA auto2))) >>>
arr (uncurry cartProd)
| terrelln/automata | src/Automata/Combinators.hs | mit | 3,464 | 3 | 19 | 1,056 | 1,342 | 695 | 647 | 59 | 3 |
import qualified Data.Map as M
import Control.Applicative
import Control.Monad.Trans.Class
import Data.Maybe
import System.Directory
import Control.Exception
import qualified System.FilePath as FP
import Data.Version
import Control.Monad
import qualified Text.PrettyPrint.Leijen as P
import Text.PrettyPrint.Leijen ((<//>), (</>), Pretty, pretty, displayS, renderPretty)
import System.Console.Haskeline as H
import Paths_unitcalc
import Eval
import Types
import Parse
import Util
dataDir = getAppUserDataDirectory "unitcalc"
dataFile filename = (FP.</> filename) <$> dataDir
historyLoc = dataFile "history.txt"
addedUnitsLoc = dataFile "addedUnits.txt"
main = do
putStrLn $ "unitcalc " ++ showVersion version ++ ", by Nathan Stoddard"
createDirectoryIfMissing True =<< dataDir
stdlibFilename <- stdlibLoc
addedUnitsFilename <- addedUnitsLoc
historyFilename <- historyLoc
addedUnitsExists <- doesFileExist addedUnitsFilename
env <- loadFile stdlibFilename emptyEnv
env <- if addedUnitsExists then join <$> mapM (loadFile addedUnitsFilename) env else pure env
case env of
Left err -> putStrLn err
Right env -> void $ runInputT (Settings noCompletion (Just historyFilename) True) $ repl env
repl :: Env -> InputT IO Env
repl env = do
input <- replGetInput Nothing
case input of
Nothing -> pure env
Just input -> do
let stmt = parseInput "" input parseReplCmd
case stmt of
Left err -> lift (putStrLn err) >> repl env
Right (RLoad path) -> loadFileRepl path env
Right (RStmt stmt) -> case evalStmt stmt env of
Left err -> lift (putStrLn err) >> repl env
Right (res, env') -> do
case stmt of
SExpr {} -> pure ()
_ -> lift $ do
addedUnitsFilename <- addedUnitsLoc
appendFile addedUnitsFilename (input ++ "\n")
lift $ putStrLn (prettyPrint res)
repl env'
loadFileRepl path env = do
env' <- lift $ loadFile path env
case env' of
Left err -> lift (putStrLn err) >> repl env
Right env' -> repl env'
handleCtrlC = H.handle . ctrlC where
ctrlC :: a -> AsyncException -> InputT IO a
ctrlC def UserInterrupt = pure def
ctrlC def e = lift (putStrLn $ "Unknown exception: " ++ show e) >> pure def
replGetInput cont = do
let prompt = if isJust cont then "... " else "> "
input_ <- handleCtrlC (Just "") $ getInputLine prompt
if isNothing input_ then pure Nothing else do
let input = fromJust input_
if null input || input == "exit" then pure Nothing else do
let
input' = case cont of
Just cont -> cont ++ "\n" ++ input
Nothing -> input
if countParens input' > 0 then replGetInput (Just input')
else pure (Just input')
countParens [] = 0
countParens (x:xs)
| x `elem` "(" = countParens xs + 1
| x `elem` ")" = countParens xs - 1
| otherwise = countParens xs
| nstoddard/unitcalc | Main.hs | mit | 3,149 | 0 | 28 | 930 | 993 | 486 | 507 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
module ZoomHub.API.Types.NonRESTfulResponse
( NonRESTfulResponse,
mkNonRESTful200,
mkNonRESTful301,
mkNonRESTful400,
mkNonRESTful404,
mkNonRESTful503,
)
where
import Data.Aeson (ToJSON, object, toJSON, (.=))
import qualified Data.Text as T
import Network.HTTP.Types.Status
( Status,
badRequest400,
movedPermanently301,
notFound404,
ok200,
serviceUnavailable503,
statusCode,
statusMessage,
)
import Network.URI (URI)
import ZoomHub.Utils (lenientDecodeUtf8)
data NonRESTfulResponse a = ToJSON a =>
NonRESTfulResponse
{ nrrStatus :: Status,
nrrBodyKey :: String,
nrrBody :: a,
nrrRedirectLocation :: Maybe URI
}
mkNonRESTful200 :: ToJSON a => String -> a -> NonRESTfulResponse a
mkNonRESTful200 key body =
NonRESTfulResponse
{ nrrStatus = ok200,
nrrBodyKey = key,
nrrBody = body,
nrrRedirectLocation = Nothing
}
mkNonRESTful301 :: ToJSON a => String -> a -> URI -> NonRESTfulResponse a
mkNonRESTful301 key body redirectLocation =
NonRESTfulResponse
{ nrrStatus = movedPermanently301,
nrrBodyKey = key,
nrrBody = body,
nrrRedirectLocation = Just redirectLocation
}
mkNonRESTful400 :: String -> NonRESTfulResponse String
mkNonRESTful400 message =
NonRESTfulResponse
{ nrrStatus = badRequest400,
nrrBodyKey = "error",
nrrBody = message,
nrrRedirectLocation = Nothing
}
mkNonRESTful404 :: String -> NonRESTfulResponse String
mkNonRESTful404 message =
NonRESTfulResponse
{ nrrStatus = notFound404,
nrrBodyKey = "error",
nrrBody = message,
nrrRedirectLocation = Nothing
}
mkNonRESTful503 :: String -> NonRESTfulResponse String
mkNonRESTful503 message =
NonRESTfulResponse
{ nrrStatus = serviceUnavailable503,
nrrBodyKey = "error",
nrrBody = message,
nrrRedirectLocation = Nothing
}
-- JSON
instance ToJSON a => ToJSON (NonRESTfulResponse a) where
toJSON r =
object
[ "status" .= statusCode status,
"statusText" .= lenientDecodeUtf8 (statusMessage status),
bodyKey .= toJSON (nrrBody r),
"redirectLocation" .= redirectLocation
]
where
bodyKey = T.pack (nrrBodyKey r)
redirectLocation = toJSON (show <$> nrrRedirectLocation r)
status = nrrStatus r
| zoomhub/zoomhub | src/ZoomHub/API/Types/NonRESTfulResponse.hs | mit | 2,403 | 0 | 11 | 544 | 549 | 317 | 232 | 73 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Text.Greek.Mounce.Euphony where
import Text.Greek.Grammar
import Text.Greek.Mounce.Phonology
import Text.Greek.Mounce.Quote
mounceEuphony :: [Cited Euphony]
mounceEuphony =
[mounce § "2.2c" $
Euphony "ι followed by long α, η, ω subscripts" [rules|
α + ι } ᾳ
η + ι } ῃ
ω + ι } ῳ
|]
,mounce § "2.3" $
Euphony "Two like vowels form their common long" [rules|
α + α } α
ι + ι } ι
υ + υ } υ
η + η } η
ω + ω } ω
ε + η } η
η + ε } η
ο + ω } ω
ω + ο } ω
|]
, mounce § "2.4" $
Euphony "Exceptions to §2.3" [rules|
ε + ε } ει
ο + ο } ου
|]
, mounce § "2.4a" $
Euphony "ει is formed by εε" [rules|
ε + ε } ει
|]
, mounce § "2.4b" $
Euphony "ου is formed by εο" [rules|
ε + ο } ου
ο + ε } ου
ο + ο } ου
|]
, mounce § "2.5" $
Euphony "ο or ω overcome α, ε, or ὴ regardless of the order, and form ω" [rules|
α + ο } ω
ο + η } ω
|]
, mounce § "2.6" $
Euphony "exceptions to 2.5" [rules|
ε + ο } ου
ο + ε } ου
|]
, mounce § "2.7" $
Euphony "If α comes before ε or η, they will contract to α. If ε or η comes before α, they will contract to η ('progressive assimilation,' §2.2)" [rules|
α + ε } α
α + η } α
ε + α } α
η + α } α
|]
, mounce § "2.7a" $
Euphony "α is formed from αε" [rules|
α + ε } α
|]
, mounce § "2.7b" $
Euphony "η is formed from εα" [rules|
ε + α } η
|]
, mounce § "2.13a" $
Euphony "Single vowel + diphthong (beginning with the same vowel as the single vowel)" [rules|
α + αι } αι
α + αι } ᾳ
ο + ου } ου
|]
, mounce § "2.13b" $
Euphony "Single vowel + diphthong (beginning with a vowel different from the single vowel)" [rules|
ε + οι } οι
ο + ει } οι
ο + ῃ } οι
|]
, mounce § "14.1" $
Euphony "Aspiration" [rules|
π + ῾ } φ῾
κ + ῾ } χ῾
τ + ῾ } θ῾
|]
, mounce § "14.4" $
Euphony "labial + θ" [rules|
π + θ } φθ
β + θ } φθ
ψ + θ } φθ
|]
, mounce § "14.5" $
Euphony "velar + θ" [rules|
κ + θ } χθ
γ + θ } χθ
ξ + θ } χθ
|]
]
| scott-fleischman/greek-grammar | haskell/greek-grammar/src/Text/Greek/Mounce/Euphony.hs | mit | 2,542 | 0 | 7 | 901 | 346 | 211 | 135 | 38 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module BaseCompat (
module X
) where
import Intro as X
import Control.Applicative as X
import Control.Arrow as X hiding (first, second)
import Control.Category as X
import Control.Concurrent as X
import Control.Exception as X
import Control.Exception.Base as X
import Control.Monad as X hiding (fail)
import Control.Monad.Fix as X
import Control.Monad.IO.Class as X
import Control.Monad.ST as X
import Control.Monad.ST.Unsafe as X
import Control.Monad.Zip as X
import Data.Bifunctor as X
import Data.Bits as X
import Data.Bool as X
import Data.Char as X
import Data.Coerce as X
import Data.Complex as X
import Data.Data as X
import Data.Dynamic as X
import Data.Either as X
import Data.Eq as X
import Data.Fixed as X
import Data.Foldable as X
import Data.Function as X hiding ((.), id)
import Data.Functor as X
import Data.Functor.Classes as X
import Data.Functor.Compose as X
import Data.Functor.Identity as X
import Data.IORef as X
import Data.Int as X
import Data.Ix as X
import Data.List as X hiding (scanl1, scanr1, map, cycle, head, init, last, tail)
import Data.Maybe as X
import Data.Monoid as X hiding (First(..), Last(..), (<>))
import Data.Ord as X
import Data.Proxy as X
import Data.Ratio as X
import Data.STRef as X
import Data.Semigroup as X
import Data.String as X
import Data.Traversable as X
import Data.Tuple as X
import Data.Type.Bool as X
import Data.Type.Coercion as X
import Data.Type.Equality as X hiding (trans, sym)
import Data.Unique as X
import Data.Version as X
import Data.Void as X
import Data.Word as X
import Numeric as X
import Numeric.Natural as X
import Prelude as X hiding ((.), id, map, putChar, putStrLn, putStr, getContents, getLine, print, getChar, appendFile, readFile, writeFile, fail, show, undefined, scanl1, scanr1, cycle, head, init, last, tail)
import System.CPUTime as X
import System.Console.GetOpt as X hiding (Option)
import System.Environment as X
import System.Exit as X
import System.IO as X hiding (putChar, putStrLn, putStr, getContents, getLine, print, getChar, appendFile, readFile, writeFile)
import System.IO.Error as X
import System.IO.Unsafe as X
import System.Info as X
import System.Mem as X
import System.Mem.StableName as X
import System.Mem.Weak as X
import System.Timeout as X
import Text.ParserCombinators.ReadP as X hiding (get, (+++), optional, (<++), look, pfail, many, choice, option)
import Text.ParserCombinators.ReadPrec as X hiding (get, (+++), lift)
import Text.Printf as X
import Text.Read as X hiding (readMaybe, get, lift, EOF, (+++))
import Text.Show as X hiding (show)
import Unsafe.Coerce as X
import Control.Monad.Fail as X hiding (fail)
import Data.Kind as X
| minad/intro | test/BaseCompat.hs | mit | 2,684 | 0 | 6 | 396 | 822 | 587 | 235 | 77 | 0 |
module Lackey.Internal.Header where
newtype Header = Header String
deriving (Eq, Ord, Read, Show)
| bitemyapp/lackey | library/Lackey/Internal/Header.hs | mit | 103 | 0 | 6 | 18 | 34 | 20 | 14 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Poi.Migrations.Utils where
import Control.Exception
import Data.Char (toLower)
import Data.List (findIndex, foldl', sortOn)
import qualified Data.Map.Strict as DM
import Data.Maybe (mapMaybe)
import Data.Monoid
import qualified Data.Ord
import Data.Yaml (FromJSON(..), (.:))
import qualified Data.Yaml as Y
import Data.Yaml.Config (loadYamlSettings, ignoreEnv)
import Database.PostgreSQL.Simple (ConnectInfo(..))
import Options.Applicative
import Poi.Migrations.Types
import System.Console.ANSI
import System.Directory (getDirectoryContents)
import System.Environment (getEnv)
up :: Parser Mode
up = pure Up
down :: Parser Mode
down = pure Down
redo :: Parser Mode
redo = pure Redo
prepare :: Parser Mode
prepare = pure Prepare
new :: Parser Mode
new = New <$> argument str (metavar "name")
<*> ( flag' Sql (long "sql" <> help "Create a SQL migration file")
<|> flag' Yaml (long "yaml" <> help "Create an Yaml migration file")
<|> flag Hs Hs (long "hs" <> help "Default. Creates a haskell migration file."))
status :: Parser Mode
status = pure Status
sqlFileType, hsFileType, yamlFileType :: Parser FileType
sqlFileType = pure Sql
hsFileType = pure Hs
yamlFileType = pure Yaml
modeOpts :: Parser Mode
modeOpts = subparser
( command "up" (info up (progDesc "Runs migration up"))
<> command "down"(info down (progDesc "Runs migration down"))
<> command "redo" (info redo (progDesc "Redoes the last migration run"))
<> command "new" (info new (progDesc "Create a new migration in migrations directory"))
<> command "prepare" (info prepare (progDesc "Creates schema_migrations table, migrations directory and Migrations.hs file"))
<> command "status" (info status (progDesc "Shows the status of Migrations that are run."))
)
migrateOpts :: Parser MigrateArgs
migrateOpts = MigrateArgs <$> modeOpts
<*> strOption
( long "env"
<> short 'e'
<> help "production or development environment"
<> metavar "ENVIRONMENT"
<> showDefault
<> value "development")
<*> (gg <$> strOption
( long "version"
<> short 'v'
<> help "Fuzzy matches the specific migration to run."
<> metavar "VERSION"
<> value ""))
where
gg :: String -> Maybe String
gg "" = Nothing
gg a = Just a
options :: Parser Options
options = Options <$> migrateOpts
poiOpts :: Parser Options
poiOpts = subparser
( command "migrate" (info (options <**> helper) (fullDesc <> progDesc "Runs migrations")))
poiArgs :: (Options -> IO ()) -> IO ()
poiArgs f = f =<< execParser opts
where
opts = info (poiOpts <**> helper)
( fullDesc
<> progDesc "Has helper functions"
<> header "Poi - a tool to organize web apps in haskell" )
migArgs :: (MigrateArgs -> IO ()) -> IO ()
migArgs f = f =<< execParser opts
where
opts = info (migrateOpts <**> helper)
( fullDesc
<> progDesc "Runs migrations")
main :: IO ()
main = poiArgs greet
greet :: Options -> IO ()
greet (Options (MigrateArgs mode env ver)) = do case mode of
Up -> putStrLn "Up"
Down -> putStrLn "Down"
Redo -> putStrLn "Redo"
New x f -> putStrLn $ "New " ++ x ++ " " ++ (show f)
Prepare -> putStrLn "Prepare"
Status -> putStrLn "Status"
putStrLn env
putStrLn $ show ver
--
-- DB Config
--
data DbConfig = DbConfig { getConnectInfo :: ConnectInfo } deriving (Eq, Show)
--
-- Config
--
type EnvName = String
data Config = Config
{ getDbConfig :: DbConfig
, getEnvName :: EnvName
} deriving (Eq, Show)
instance FromJSON DbConfig where
parseJSON (Y.Object v) = DbConfig
<$> (ConnectInfo
<$> v .: "host"
<*> (fromInteger <$> v .: "port")
<*> v .: "username"
<*> v .: "password"
<*> v .: "name")
parseJSON x = fail ("not an object: " ++ show x)
instance FromJSON Config where
parseJSON (Y.Object v) = Config
<$> v .: "database"
<*> (pure undefined) -- will be filled from the read config function
parseJSON _ = fail $ "unable to parse config.yml"
readConfigForEnv :: EnvName -> IO Config
readConfigForEnv ename = do
settingsMap <- loadYamlSettings ["config.yml"] [] ignoreEnv :: IO (DM.Map String Config)
case DM.lookup ename settingsMap of
Just c -> return (c { getEnvName = ename})
Nothing -> error $ "Config section for APP_ENV='" ++ ename ++ "' not found. Have you mis-spelt it? Does the section exist in config.yml?"
readConfig :: IO Config
readConfig = readEnvName >>= readConfigForEnv
readEnvName :: IO EnvName
readEnvName = (getEnv "APP_ENV")
readConfigWithDefault :: String -> IO Config
readConfigWithDefault ename = do
result <- try $ getEnv "APP_ENV" :: IO (Either SomeException String)
either (\_ -> readConfigForEnv ename)
(\en -> readConfigForEnv en)
result
dbConfig :: Config -> ConnectInfo
dbConfig c = getConnectInfo (getDbConfig c)
timeStampFromFileName :: String -> String
timeStampFromFileName name@(x:xs) | x == 'M' = takeWhile (\c -> c /= '_') xs
| otherwise = error ("File not properly named " ++ name ++ ".")
timeStampFromFileName _ = error ("Invalid file name")
fileNameFromTimeStamp :: String -> IO (Maybe String)
fileNameFromTimeStamp name = do
files <- getDirectoryContents "./Migrations"
let xs = map (\f@(x:_) -> if x /= 'M' then "" else timeStampFromFileName f) files
ys = findIndex (== name) xs
return (fmap (files !!) ys)
match :: String -- ^ The pattern to match against
-> String -- ^ The value containing the text to search in
-> Maybe (String, Int)
match p t =
if null p then Nothing else Just (t, totalScore)
where
(s', pattern') = let f = map toLower
in (f t, f p)
(totalScore, _, _, _) =
foldl'
(\(tot, cur, res, pat) c ->
case splitAtPrefix pat of
Nothing -> (tot, 0, res ++ [c], pat)
Just (x, xs) ->
if x == c
then let cur' = cur * 2 + 1
in (tot + cur', cur', res ++ [c], xs)
else (tot, 0, res ++ [c], pat)
) (0, 0, "", pattern') s'
fuzzyFilter :: String -> [Migration] -> [Migration]
fuzzyFilter pat xs = map fst $ bestMatch $ filter ((> 0) . snd) $ sortOn (Data.Ord.Down . snd) $
mapMaybe (\x@(t, _) -> maybe (Nothing) (\(_, score) -> Just (x, score)) (match pat t)) xs
bestMatch :: [(a, Int)] -> [(a, Int)]
bestMatch [] = []
bestMatch (x : []) = [x]
bestMatch (x : y : z) = if (snd x) > (snd y)
then [x]
else x : y : bestMatch z
splitAtPrefix :: [t] -> Maybe (t, [t])
splitAtPrefix [] = Nothing
splitAtPrefix (x:xs) = Just (x, xs)
colorPutStrLn :: Color -> String -> IO ()
colorPutStrLn color str = do
setSGR [ SetColor Foreground Dull color
, SetConsoleIntensity NormalIntensity
]
putStrLn str
setSGR []
| pranaysashank/poi | poi-bin/src/Poi/Migrations/Utils.hs | mit | 7,991 | 0 | 19 | 2,753 | 2,374 | 1,241 | 1,133 | 175 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Text as DT
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import qualified TemplateGen.Hash as H
import qualified TemplateGen.PageContext as PC
import qualified TemplateGen.Resource as R
import qualified TemplateGen.Settings as S
import qualified TemplateGen.SiteInfo as SI
import qualified TemplateGen.TemplateContext as TC
import qualified TemplateGen.Url as U
import qualified TemplateGen.UrlString as US
import qualified Templates as T
import Text.Blaze.Html.Renderer.String (renderHtml)
import qualified Text.Lucius as L
import qualified Yesod.Static as YS
-- A note about URLs as handled by the site generator
-- Internal relative URLs
-- Example: "/about" is converted to "/<root>/about"
-- Internal absolute URLs
-- Example: "//about" is converted to "/about"
-- External absolute URLs
-- Example: "http://foo/bar" is left as is
renderHtmlUrl :: Show a => SI.SiteInfo -> U.Url -> a -> DT.Text
renderHtmlUrl _ U.AboutR _ = "//about" -- an internal absolute URL
renderHtmlUrl _ U.HomeR _ = "//" -- an internal absolute URL
readResources :: FilePath -> [FilePath] -> IO [R.Resource]
readResources dir = mapM $ \x ->
let relativePath = dir ++ "/" ++ x
in readResource (staticFilePath relativePath) (staticUrl relativePath)
where
readResource :: FilePath -> US.UrlString -> IO R.Resource
readResource path url = do
bs <- BSL.readFile path
return $ R.Resource url (Just $ H.Hash (YS.base64md5 bs))
staticFilePath :: FilePath -> FilePath
staticFilePath path = "seattlehaskell-org/static/" ++ path
staticUrl :: FilePath -> US.UrlString
staticUrl path = "//static/" ++ path -- internal absolute URLs
-- Configuration: should store this information externally
localStylesheetFileNames :: [FilePath]
localStylesheetFileNames = ["bootstrap.css", "haskell.font.css"]
externalScriptUrls = ["https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"]
externalScriptUrls :: [US.UrlString]
localScriptFileNames :: [FilePath]
localScriptFileNames = ["bootstrap.min.js", "ie10-viewport-bug-workaround.js"]
-- End of configuration
-- Internal absolute URL
generateDefaultCss :: (TL.Text, H.Hash)
generateDefaultCss =
let
text = (L.renderCss . T.defaultCssTemplate) undefined
bs = TLE.encodeUtf8 text
hash = H.Hash (YS.base64md5 bs)
in (text, hash)
mkDefaultCssResource :: IO R.Resource
mkDefaultCssResource = do
let (_, hash) = generateDefaultCss
return $ R.Resource ("//static/tmp/autogen-" ++ H.toString hash ++ ".css") Nothing
generateHtmlTemplateFile :: SI.SiteInfo -> IO ()
generateHtmlTemplateFile si = do
let
settings = S.Settings "2016-2017" "Seattle Area Haskell Users' Group" Nothing
pageCtx = PC.mkPageContext { PC.title = "SeaHUG - $title$" }
templateCtx = TC.mkTemplateContext settings pageCtx
html = T.renderHtmlTemplate si templateCtx (renderHtmlUrl si)
putStrLn (renderHtml html)
main :: IO ()
main = do
localStylesheets <- readResources "css" localStylesheetFileNames
let externalScripts = map (`R.Resource` Nothing) externalScriptUrls
localScripts <- readResources "js" localScriptFileNames
let scripts = externalScripts ++ localScripts
defaultStylesheet <- mkDefaultCssResource
let
stylesheets = localStylesheets ++ [defaultStylesheet]
siteInfo = SI.SiteInfo stylesheets scripts
generateHtmlTemplateFile siteInfo
| seahug/seattlehaskell-org-static | src/templategen/Main.hs | mit | 3,638 | 0 | 16 | 663 | 829 | 463 | 366 | 69 | 1 |
-- | This module provides an API for interacting with
-- stack-ide over the websocket interface provided by soh-runner.
--
-- This API wraps up the different patterns of sending requests and
-- expecting responses, such that the code which uses it mostly does
-- not need to worry about violating any protocol invariants.
--
-- The only current invariant that needs to be preserved is that all
-- the functions which expect a response can't be executed
-- concurrently. In particular, this applies to all of the queries,
-- 'updateSession', and 'expectWelcome'. Process starting, stdin,
-- stdout, and killing can all be done concurrently.
--
-- In the future, a runtime check for this might be added. However,
-- for now this is enforced by the single-threaded nature of "Model".
module Model.Protocol
( Backend
, withUrl
-- * Commands
, updateSession
, requestRun
-- * Queries
, getSourceErrors
, getSpanInfo
, getExpTypes
, getAnnExpTypes
-- * Process IO
, setProcessHandler
, sendProcessInput
, sendProcessKill
-- * Misc
, expectWelcome
-- * Runner commands
, requestOpenPort
, requestPortListening
) where
import Control.Concurrent.Async (race)
import Control.Concurrent.STM
import Data.Aeson (ToJSON, FromJSON, eitherDecodeStrict, encode)
import Data.ByteString.Lazy (toStrict)
import Data.Function (fix)
import Data.IORef
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.UUID.Types as UUID
import Data.Void (absurd)
import Import
import qualified JavaScript.WebSockets as WS
import Model.Server (lookupPort)
import SchoolOfHaskell.Runner.API
import SchoolOfHaskell.Scheduler.API
-- | Given the URL of the SoH container, this creates a websockets
-- connection to it.
withUrl :: Text -> PortMappings -> ContainerReceipt -> (Backend -> IO a) -> IO a
withUrl backendHost backendPortMappings (ContainerReceipt uuid) f =
let port = lookupPort defaultBackendPort backendPortMappings
url = "ws://" <> backendHost <> ":" <> tshow port in
WS.withUrl url $ \conn -> do
-- Send the receipt to the backend. If it's rejected, then an
-- exception is thrown.
let receiptText = decodeUtf8 (UUID.toASCIIBytes uuid)
sendJson conn (RunnerRequestAuth receiptText)
authResponse <- receiveJson conn
case authResponse of
RunnerResponseAuthSuccess -> return ()
_ -> fail "Didn't receive expected authentication success from runner."
-- Initialize state of the 'Backend' type, and fork off threads for
-- handling communication with the backend.
backendRequestChan <- newTChanIO
backendResponseChan <- newTChanIO
backendProcessHandler <- newIORef $ \_ ->
consoleWarnText "backendProcessHandler not yet set"
let sendThread = showExceptions "sendThread" $ forever $
atomically (readTChan backendRequestChan) >>= sendJson conn
receiveThread = showExceptions "receiveThread" $ forever $ do
response <- receiveJson conn
let enqueueResponse = atomically (writeTChan backendResponseChan response)
case response of
RunnerResponseAuthSuccess ->
fail "Didn't expect to receive auth response while running"
RunnerResponseAuthFailure ->
fail "Didn't expect to receive auth response while running"
RunnerResponsePortIsListening ->
readIORef backendProcessHandler >>= ($ ProcessListening)
RunnerResponseOpenPort {} ->
enqueueResponse
RunnerResponseClient (NoSeq response') ->
case response' of
ResponseProcessOutput bs ->
readIORef backendProcessHandler >>= ($ ProcessOutput bs)
ResponseProcessDone rr ->
readIORef backendProcessHandler >>= ($ ProcessDone rr)
-- This is expected to happen due to always requesting
-- kill before running.
ResponseNoProcessError ->
consoleWarnText "No running process"
ResponseLog msg ->
consoleLogText msg
_ ->
enqueueResponse
RunnerResponseClient HasSeq{} ->
consoleErrorText "Didn't expect sequenced response from server."
result <- receiveThread `race` sendThread `race` f Backend {..}
case result of
Left (Left x) -> absurd x
Left (Right x) -> absurd x
Right x -> return x
--------------------------------------------------------------------------------
-- Commands
-- | Sends updates to the backend. The backend will send back
-- progress updates until it finishes compilation. These progress
-- updates are provided to the callback function. Once compilation is
-- finished, 'Nothing' is sent to the callback and this function
-- returns.
updateSession :: Backend -> [RequestSessionUpdate] -> (UpdateStatus -> IO ()) -> IO ()
updateSession backend updates f = do
sendRequest backend (RequestUpdateSession updates)
fix $ \loop -> do
x <- expectResponse backend
(^? _RunnerResponseClient . _NoSeq . _ResponseUpdateSession)
"ResponseUpdateSession"
f x
case x of
UpdateStatusProgress _ -> loop
_ -> return ()
-- | Requests that the backend run the user's code. The module nad
-- identifier to run are taken as parameters.
requestRun :: Backend -> ModuleName -> Identifier -> IO ()
requestRun backend mn ident = sendRequest backend $ RequestRun True mn ident
--------------------------------------------------------------------------------
-- Queries
-- | Gets the source errors of the last compilation.
getSourceErrors :: Backend -> IO [SourceError]
getSourceErrors backend =
queryBackend backend
RequestGetSourceErrors
_ResponseGetSourceErrors
"ResponseGetSourceErrors"
-- | Gets the span info of the last __error-free__ compile. Span info
-- tells you where an identifier came from.
getSpanInfo :: Backend -> SourceSpan -> IO [ResponseSpanInfo]
getSpanInfo backend ss =
queryBackend backend
(RequestGetSpanInfo ss)
_ResponseGetSpanInfo
"ResponseGetSpanInfo"
-- | Gets the type info of the last __error-free__ compile. This
-- tells you the type info
getExpTypes :: Backend -> SourceSpan -> IO [ResponseExpType]
getExpTypes backend ss =
queryBackend backend
(RequestGetExpTypes ss)
_ResponseGetExpTypes
"ResponseGetExpTypes"
-- | Gets the annotated type info of the last __error-free__ compile. These
-- annotations add identifier info to the type info, so that doc links
-- can be provided in the type info.
getAnnExpTypes :: Backend -> SourceSpan -> IO [ResponseAnnExpType]
getAnnExpTypes backend ss =
queryBackend backend
(RequestGetAnnExpTypes ss)
_ResponseGetAnnExpTypes
"ResponseGetAnnExpTypes"
-- Pattern of sending a request and expecting a response, common to
-- the queries above.
queryBackend :: Backend -> Request -> Prism' Response a -> String -> IO a
queryBackend backend request p expected = do
sendRequest backend request
expectResponse backend (^? _RunnerResponseClient . _NoSeq . p) expected
--------------------------------------------------------------------------------
-- Process IO
-- | Sets the callback which is used to handle process output. Stdout
-- is provided as 'Right' values, and the 'Left' values let you know
-- that the process exited.
setProcessHandler :: Backend -> (ProcessOutput -> IO ()) -> IO ()
setProcessHandler = atomicWriteIORef . backendProcessHandler
-- | Sends stdin to the process.
sendProcessInput :: Backend -> String -> IO ()
sendProcessInput backend = sendRequest backend . RequestProcessInput
-- | Sends a SIGINT signal to the process, equivalent of using Ctrl-C.
sendProcessKill :: Backend -> IO ()
sendProcessKill backend = sendRequest backend RequestProcessKill
--------------------------------------------------------------------------------
-- Misc
-- | Expects the welcome message which is sent by stack-ide once the
-- connection is established.
expectWelcome :: Backend -> IO VersionInfo
expectWelcome backend =
expectResponse backend (^? _RunnerResponseClient . _NoSeq . _ResponseWelcome) "ResponseWelcome"
--------------------------------------------------------------------------------
-- SoH Runner Commands
requestOpenPort :: Backend -> IO Int
requestOpenPort backend = do
sendRequest' backend RunnerRequestOpenPort
expectResponse backend (^? _RunnerResponseOpenPort) "RunnerResponseOpenPort"
requestPortListening :: Backend -> Int -> IO ()
requestPortListening backend = sendRequest' backend . RunnerRequestPortListening
--------------------------------------------------------------------------------
-- Backend IO
sendRequest :: Backend -> Request -> IO ()
sendRequest backend = sendRequest' backend . RunnerRequestClient . NoSeq
sendRequest' :: Backend -> RunnerRequest -> IO ()
sendRequest' backend = atomically . writeTChan (backendRequestChan backend)
receiveResponse :: Backend -> IO RunnerResponse
receiveResponse = atomically . readTChan . backendResponseChan
expectResponse :: Backend -> (RunnerResponse -> Maybe a) -> String -> IO a
expectResponse backend f expected = do
response <- receiveResponse backend
case f response of
Nothing -> fail $
"Protocol error: expected " ++ expected ++
" instead of " ++ show response
Just x -> return x
--------------------------------------------------------------------------------
-- Sending and receiving JSON
-- TODO: fewer conversions...
sendJson :: ToJSON a => WS.Connection -> a -> IO ()
sendJson conn = sendText conn . decodeUtf8 . toStrict . encode
sendText :: WS.Connection -> Text -> IO ()
sendText conn req = do
connected <- WS.sendText conn req
when (not connected) $ fail "Websocket disconnected"
receiveJson :: FromJSON a => WS.Connection -> IO a
receiveJson conn = do
t <- WS.receiveText conn
case eitherDecodeStrict (encodeUtf8 t) of
Left err -> fail $ "JSON decode error: " ++ err
Right x -> return x
| fpco/schoolofhaskell | soh-client/src/Model/Protocol.hs | mit | 10,290 | 0 | 25 | 2,269 | 1,808 | 928 | 880 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
module Cafe.ChefTodoList
( chefTodoListMain
) where
import Control.Monad (forM_, unless)
import Control.Monad.Logger (runNoLoggingT)
import Data.List (foldl')
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, mapMaybe)
import Data.Monoid ((<>))
import Data.Text (pack)
import Database.Persist.Sql
import Database.Persist.Sqlite
import Options.Applicative
import System.Console.ANSI (clearScreen, setCursorPosition)
import Eventful
import Eventful.ReadModel.Memory
import Eventful.Store.Sqlite
import Cafe.CLI.Options (parseDatabaseFileOption)
import Cafe.CLI.Transformer
import Cafe.Models.Tab
-- | Create an in-memory read model that polls the SQLite event store and
-- updates the chef's todo list.
chefTodoListMain :: IO ()
chefTodoListMain = do
dbFilePath <- execParser $ info (helper <*> parseDatabaseFileOption) (fullDesc <> progDesc "Chef Todo List Terminal")
pool <- runNoLoggingT $ createSqlitePool (pack dbFilePath) 1
readModel <- memoryReadModel Map.empty handleChefReadModelEvents
runPollingReadModel readModel cliGloballyOrderedEventStore (`runSqlPool` pool) 1
handleChefReadModelEvents
:: Map UUID [Maybe Food]
-> [GloballyOrderedEvent JSONString]
-> IO (Map UUID [Maybe Food])
handleChefReadModelEvents foodMap (map globallyOrderedEventToStoredEvent -> events) = do
let
tabEvents = mapMaybe (traverse $ deserialize jsonStringSerializer) events :: [StoredEvent TabEvent]
foodMap' = foldl' handleEventToMap foodMap $ tabEvents
unless (null events) $ printFood foodMap'
return foodMap'
handleEventToMap :: Map UUID [Maybe Food] -> StoredEvent TabEvent -> Map UUID [Maybe Food]
handleEventToMap foodMap (StoredEvent uuid _ (TabClosed _)) = Map.delete uuid foodMap
handleEventToMap foodMap storedEvent =
let
uuid = storedEventProjectionId storedEvent
oldList = Map.findWithDefault [] uuid foodMap
in Map.insert uuid (handleEventToFood oldList $ storedEventEvent storedEvent) foodMap
handleEventToFood :: [Maybe Food] -> TabEvent -> [Maybe Food]
handleEventToFood oldFood (FoodOrdered newFood) = oldFood ++ map Just newFood
handleEventToFood oldFood (FoodPrepared indexes) = setIndexesToNothing indexes oldFood
handleEventToFood food _ = food
printFood :: Map UUID [Maybe Food] -> IO ()
printFood foodMap = do
clearScreen
setCursorPosition 0 0
putStrLn "Chef's Todo List:"
forM_ (Map.keys foodMap) $ \uuid -> do
let foodItems = catMaybes $ foodMap Map.! uuid
unless (null foodItems) $ do
putStrLn $ "Tab: " ++ show uuid
forM_ foodItems $ \(Food (MenuItem desc _)) -> putStrLn $ " - Item: " ++ desc
| jdreaver/eventful | examples/cafe/src/Cafe/ChefTodoList.hs | mit | 2,655 | 0 | 19 | 400 | 784 | 402 | 382 | 58 | 1 |
import System.Environment
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as S
main = do
(copyFrom:copyTo:_) <- getArgs
copy copyFrom copyTo
copy :: FilePath -> FilePath -> IO ()
copy copyFrom copyTo = do
contents <- B.readFile copyFrom
B.writeFile copyTo contents
-- (tempName, tempHandle) <- openTempFile "." "temp"
-- B.hPutStr tempHandle contents
-- hClose tempHandle
-- renameFile tempName copyTo
| RAFIRAF/HASKELL | IO/copy.hs | mit | 449 | 0 | 10 | 78 | 108 | 58 | 50 | 10 | 1 |
{-|
Copyright: (c) Guilherme Azzi, 2014
License: MIT
Maintainer: ggazzi@inf.ufrgs.br
Stability: experimental
Provides combinators for changing the drawing position.
Dzen's drawing behavior for the title bar is as follows. At any time, there is a
current (x,y) position. Whenever something is drawn, the x position is advanced
by the width of the object; the y position remains constant.
While the combinators for the vertical position are compositional, those for the
horizontal position are not. Their used must therefore always be encapsulated in
consistent ways:
* The widget should not draw over previously drawn widgets.
* The next widget to be drawn should not draw over the widget.
-}
module Reactive.Banana.Dzen.Unsafe.Position
( xpos, xposB
, ypos, yposB
, ycenter
) where
import Control.Applicative
import Control.Monad.State
import Reactive.Banana
import Reactive.Banana.Dzen.Internal.Widget
-- | Change the current horizontal position by the given amount of pixels.
--
-- Positive values move to the right; negative values, to the left.
xpos :: Int -> Widget t
xpos = Widget . pure . changeX
-- | Change the current horizontal position by the given time-varying amount of
-- pixels.
--
-- Positive values move to the right; negative values, to the left.
xposB :: Behavior t Int -> Widget t
xposB = Widget . fmap changeX
changeX :: Int -> WidgetM ()
changeX x = command "p" [show x]
-- | Change the widget's vertical position by the given amount of pixels.
--
-- Positive values move down; negative values, up.
ypos :: Int -> Widget t -> Widget t
ypos y = Widget . fmap (changeY $ Just y) . unWidget
-- | Change the widget's vertical position by the given time-varying amount of
-- pixels.
--
-- Positive values move down; negative values, up.
yposB :: Behavior t Int -> Widget t -> Widget t
yposB by = Widget . (changeY <$> (Just <$> by) <*>) . unWidget
-- | Change the widget's vertical position to the center of the line.
ycenter :: Widget t -> Widget t
ycenter = Widget . fmap (changeY Nothing) . unWidget
changeY :: Maybe Int -> WidgetM () -> WidgetM ()
changeY newY = \subWidget -> do
prevY <- gets yPos
setY newY
subWidget
setY prevY
where setY y = do modify (\s -> s {yPos=y})
command "p" $ maybe [] (\v -> [";", show v]) $ y
| ggazzi/hzen | src/Reactive/Banana/Dzen/Unsafe/Position.hs | mit | 2,310 | 0 | 15 | 454 | 427 | 226 | 201 | 28 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
import qualified Data.List as L
import Control.Applicative
colwidth = 20
linechar = '-'
colchar = '|'
minutesperline = 30
-- TODO: refine these type classes to set up the structure for printing a task
class (Timeable t, Functor tt) => Taskable tt t p d where
showTask :: (tt t p d) -> String
class Timeable t where
date :: (Show y, Integral y) => t -> y
time :: (Show y, Integral y) => t -> y
timesTolines :: (Integral y) => t -> t -> y
formatDesc :: t -> t -> String -> String
-- a task is made of a start date, end date, description, and a priority
-- taskTimes should be of at least length 2
data Task tt p d = Task
{taskTimes :: [tt]
,taskPrio :: p
,taskDesc :: d
} deriving (Eq, Ord, Show)
data TaskTime d t = TaskTime
{taskTimeDate :: d
,taskTimeTime :: t
} deriving (Eq, Ord, Show)
-- creates the display string for all of the TaskTime Pairs for a given
-- Task. Returns the list of display strings
showTask :: => (Task dt t p d) -> [String]
showTask (Task {taskTimes=tts,taskPrio=p,taskDesc=d}) = pTasks (tts,p,d)
where
pTasks ([],_,_) = [""]
pTasks ([x],_,_) = [""]
pTasks ((t1:t2:tt),p,d) = (pTask t1 t2 p d) : pTasks (tt,p,d)
-- the print layout for a single task
pTask t1 t2 p d = (pTimeLine t1 t2 p) -- print the line with priority and time info
++ "\n" ++ (showDesc t1 t2 d) -- print the description (formatted)
++ lineFilled -- put a blank line at the end
pTimeLine t1 t2 p = insertAt 7 (show $ taskTimeTime t1)
$ insertAt (colwidth-6) (show $ taskTimeTime t2)
$ insertAt 3 (show p) lineFilled
-- formats the description of a task given the start and end times and the description
showDesc :: (Integral ta, Integral tb, Show ta, Show tb, Show d) => (TaskTime ta tb) -> (TaskTime ta tb) -> d -> String
showDesc t1 t2 d = truncLines $ (L.intercalate "\n" $ putInLine <$> formatLines) ++ blines
where
formatLines = splitEvery colwidth (show d)
blines = ('\n':) . nBlankLines $ n
putInLine = (flip $ insertAt 1) lineBlank
n = fromIntegral (timesToLines t1 t2) - (length formatLines)
truncLines = unlines . (\s -> take (sn s) s) . lines
sn = (n+) . length
-- converts a time difference into a number of lines
timesToLines :: (Integral dt, Integral t, Integral y) => (TaskTime dt t) -> (TaskTime dt t) -> y
timesToLines (TaskTime dt1 t1) (TaskTime dt2 t2) = linesPerMinute $ (fromIntegral deldt)+(fromIntegral delt)
where
deldt = dt2 - dt1
delt = t2 - t1
linesPerMinute = round . (/minutesperline) . toRational
-- convenience functions to draw lines with column markers
-- a : |--...--| line
lineFilled = line linechar
-- a : | ... | line
lineBlank = line ' '
nBlankLines :: Int -> String
nBlankLines x
| x <= 0 = ""
| otherwise = L.intercalate "\n" $ L.replicate x lineBlank
-- a : |cc...cc| line
line c = colchar : (replicate colwidth c) ++ (colchar:[])
-- generic insert at function (used above to insert text into a line
-- NOTE: if length needle > length haystack then the needle will be truncated to
-- fit within the line
insertAt :: Int -> [a] -> [a] -> [a]
insertAt index needle haystack = take (length haystack) $ prefix ++ needle ++ suffix
where
prefix = take index haystack
suffix = drop ns haystack
ns = (index+) $ length needle
-- takes a list and splits it into a list of lists each with a length of at
-- least n
-- used above to format the description
splitEvery :: Int -> [a] -> [[a]]
splitEvery _ [] = []
splitEvery n xs = (fst sp) : splitEvery n (snd sp)
where sp = L.splitAt n xs
ta = TaskTime 12 10
tb = TaskTime 182 10
t = Task [ta,tb,ta,tb] 'a' "Lorem ipsum dolor sit amet, consetetur sadipscing elitr,"
tst :: (Integral dt, Integral t, Show dt, Show t, Show p) => (Task dt t p String) -> IO ()
tst = f . showTask
where
f [] = return ()
f (s:ss) = putStrLn s >>= (\s -> f ss)
| theNerd247/yadphs | src/T.hs | gpl-2.0 | 3,855 | 11 | 13 | 824 | 1,386 | 747 | 639 | -1 | -1 |
module Main where
import Data.Maybe (isNothing)
import Data.Tree (Tree(..))
import Data.List (nub)
import Test.Tasty (TestTree, defaultMain, testGroup)
import Test.Tasty.HUnit ((@=?), testCase)
import Test.Tasty.QuickCheck ((==>), Property, testProperty)
import Utils (addTrees, intersectTrees, itemPred, removeTrees)
prop_itemPred_bounded :: Eq a => [a] -> a -> Property
prop_itemPred_bounded ns x =
nub ns == ns ==>
not (null ns) ==>
head ns == x ==>
isNothing (ns `itemPred` x)
properties :: TestTree
properties = testGroup "Properties"
[ testProperty "itemPred bounded"
(prop_itemPred_bounded :: [Int] -> Int -> Property) ]
addTreesTests :: TestTree
addTreesTests =
testGroup "Unit tests for addTrees"
[ testCase "... when trees are equal"
((Just $ Node 0 []) @=? addTrees (Node 0 []) (Node 0 []))
, testCase "... when trees do not have same rot"
(Nothing @=? addTrees (Node 0 []) (Node 1 []))
, testCase "... when the second tree is fully in the first"
((Just $ Node 0 [Node 1 [], Node 2 []]) @=?
addTrees (Node 0 [Node 1 [], Node 2 []])
(Node 0 [Node 2 []]))
, testCase "... when the second is fully in the first (bis)"
((Just $ Node 0 [Node 1 [], Node 2 [Node 3 []]]) @=?
addTrees (Node 0 [Node 1 [], Node 2 [Node 3 []]])
(Node 0 [Node 2 [Node 3 []]]))
, testCase "... when the second is not fully in the first"
((Just $ Node 0 [Node 1 []]) @=?
addTrees (Node 0 [])
(Node 0 [Node 1 []])) ]
removeTreesTests :: TestTree
removeTreesTests = testGroup "Unit tests for removeTrees"
[ testCase "... when trees are equal"
(Nothing @=? removeTrees (Node 0 []) (Node 0 []))
, testCase "... when trees do not have same rot"
((Just $ Node 0 []) @=? removeTrees (Node 0 []) (Node 1 []))
, testCase "... when the second tree is fully in the first"
((Just $ Node 0 [Node 1 []]) @=?
removeTrees (Node 0 [Node 1 [], Node 2 []])
(Node 0 [Node 2 []]))
, testCase "... when the second is fully in the first (bis)"
((Just $ Node 0 [Node 1 []]) @=?
removeTrees (Node 0 [Node 1 [], Node 2 [Node 3 []]])
(Node 0 [Node 2 [Node 3 []]]))
, testCase "... when the second is not fully in the first"
(Nothing @=?
removeTrees (Node 0 [])
(Node 0 [Node 1 []])) ]
intersectTreesTests :: TestTree
intersectTreesTests =
testGroup "Unit tests for intersectTrees"
[ testCase "... when trees are equal"
(Just (Node 0 []) @=?
intersectTrees (Node 0 []) (Node 0 []))
, testCase "... when trees do not have same root"
(Nothing @=? intersectTrees (Node 0 []) (Node 1 []))
, testCase "... when the second tree is fully in the first"
((Just $ Node 0 [Node 2 []]) @=?
intersectTrees (Node 0 [Node 1 [], Node 2 []])
(Node 0 [Node 2 []]))
, testCase "... when the second is fully in the first (bis)"
((Just $ Node 0 [Node 2 [Node 3 []]]) @=?
intersectTrees (Node 0 [Node 1 [], Node 2 [Node 3 []]])
(Node 0 [Node 2 [Node 3 []]]))
, testCase "... when the second is not fully in the first"
((Just $ Node 0 []) @=?
intersectTrees (Node 0 [])
(Node 0 [Node 1 []])) ]
unitTests :: TestTree
unitTests = testGroup "Unit tests"
[ addTreesTests, removeTreesTests, intersectTreesTests ]
tests :: TestTree
tests = testGroup "tests" [ properties, unitTests ]
main :: IO ()
main = defaultMain tests
| bbshortcut/Palace | test/tests.hs | gpl-3.0 | 4,297 | 0 | 17 | 1,719 | 1,407 | 726 | 681 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Texture.ODF
( ODF ( odfIntensity
, odfGrid
, odfGridSize
, odfGridStep
, odfTree
, odfSymm
, odfKernelWidth
)
, buildEmptyODF
, resetODF
, addPoints
, addPointsWithConst
, integrateODFwith
, getODFeval
, getMaxOrientation
, renderODFVTK
) where
import Data.Maybe
import Hammer.VTK
import Linear.Vect
import qualified Data.BlazeVPtree as VP
import qualified Data.Vector.Unboxed as U
import Texture.Orientation
import Texture.Symmetry
import Texture.IsoSphere
import Texture.TesseractGrid
import Texture.Kernel
instance VP.Metric Quaternion where
dist = getMisoAngle Cubic
data ODF
= ODF
{ odfIntensity :: U.Vector Double
, odfGrid :: U.Vector Quaternion
, odfGridSize :: Int
, odfGridStep :: Int
, odfTree :: VP.VPtree Quaternion
, odfSymm :: Symm
, odfKernelWidth :: Rad
} deriving (Show)
buildEmptyODF :: Deg -> Symm -> Deg -> ODF
buildEmptyODF kw symm step
= ODF
{ odfIntensity = U.replicate n 0
, odfGrid = qs
, odfGridSize = n
, odfGridStep = s
, odfTree = VP.fromVector qs
, odfSymm = symm
, odfKernelWidth = toAngle (fromAngle kw)
}
where
n = U.length qs
s = abs $ round (4 / fromAngle step)
qs = U.filter (isInRodriFZ symm) $ genQuaternionGrid s
resetODF :: ODF -> ODF
resetODF odf = odf {odfIntensity = U.replicate (odfGridSize odf) 0}
addPointsWithConst :: U.Vector Quaternion -> Double -> Maybe Rad -> ODF -> ODF
addPointsWithConst qs k customWidth odf@ODF{..} = odf { odfIntensity = is }
where is = addManyKernelsWithConst width k odfTree qs odfIntensity
width = fromMaybe odfKernelWidth customWidth
addPoints :: U.Vector Quaternion -> ODF -> ODF
addPoints qs odf@ODF{..} = odf { odfIntensity = is }
where is = addManyKernels odfKernelWidth odfTree qs odfIntensity
getODFeval :: ODF -> (Quaternion -> Double)
getODFeval ODF{..} = maybe 0 (\(i, _, _) -> odfIntensity U.! i) . func
where
step = 4 / fromIntegral odfGridStep
func = VP.nearestThanNeighbor odfTree step
getMaxOrientation :: ODF -> (Quaternion, Double)
getMaxOrientation ODF{..} = (odfGrid U.! i, odfIntensity U.! i)
where i = U.maxIndex odfIntensity
integrateODFwith :: (U.Unbox a, Num a)=> (Quaternion -> Double -> a) -> ODF -> a
integrateODFwith func ODF{..} = U.foldl' (+) 0 $ U.zipWith func odfGrid odfIntensity
-- | Render ODF
renderODFVTK :: ODF -> VTK Vec3D
renderODFVTK ODF{..} = let
attr = mkPointValueAttr "Intensity" (\i _ -> odfIntensity U.! i)
vtk = renderQuaternions odfGrid []
in addPointValueAttr vtk attr
| lostbean/sledge | src/Texture/ODF.hs | gpl-3.0 | 2,696 | 0 | 12 | 604 | 841 | 465 | 376 | 79 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.CreativeFieldValues.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing creative field value.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.creativeFieldValues.update@.
module Network.Google.Resource.DFAReporting.CreativeFieldValues.Update
(
-- * REST Resource
CreativeFieldValuesUpdateResource
-- * Creating a Request
, creativeFieldValuesUpdate
, CreativeFieldValuesUpdate
-- * Request Lenses
, cfvuCreativeFieldId
, cfvuProFileId
, cfvuPayload
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.creativeFieldValues.update@ method which the
-- 'CreativeFieldValuesUpdate' request conforms to.
type CreativeFieldValuesUpdateResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"creativeFields" :>
Capture "creativeFieldId" (Textual Int64) :>
"creativeFieldValues" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CreativeFieldValue :>
Put '[JSON] CreativeFieldValue
-- | Updates an existing creative field value.
--
-- /See:/ 'creativeFieldValuesUpdate' smart constructor.
data CreativeFieldValuesUpdate = CreativeFieldValuesUpdate'
{ _cfvuCreativeFieldId :: !(Textual Int64)
, _cfvuProFileId :: !(Textual Int64)
, _cfvuPayload :: !CreativeFieldValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreativeFieldValuesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cfvuCreativeFieldId'
--
-- * 'cfvuProFileId'
--
-- * 'cfvuPayload'
creativeFieldValuesUpdate
:: Int64 -- ^ 'cfvuCreativeFieldId'
-> Int64 -- ^ 'cfvuProFileId'
-> CreativeFieldValue -- ^ 'cfvuPayload'
-> CreativeFieldValuesUpdate
creativeFieldValuesUpdate pCfvuCreativeFieldId_ pCfvuProFileId_ pCfvuPayload_ =
CreativeFieldValuesUpdate'
{ _cfvuCreativeFieldId = _Coerce # pCfvuCreativeFieldId_
, _cfvuProFileId = _Coerce # pCfvuProFileId_
, _cfvuPayload = pCfvuPayload_
}
-- | Creative field ID for this creative field value.
cfvuCreativeFieldId :: Lens' CreativeFieldValuesUpdate Int64
cfvuCreativeFieldId
= lens _cfvuCreativeFieldId
(\ s a -> s{_cfvuCreativeFieldId = a})
. _Coerce
-- | User profile ID associated with this request.
cfvuProFileId :: Lens' CreativeFieldValuesUpdate Int64
cfvuProFileId
= lens _cfvuProFileId
(\ s a -> s{_cfvuProFileId = a})
. _Coerce
-- | Multipart request metadata.
cfvuPayload :: Lens' CreativeFieldValuesUpdate CreativeFieldValue
cfvuPayload
= lens _cfvuPayload (\ s a -> s{_cfvuPayload = a})
instance GoogleRequest CreativeFieldValuesUpdate
where
type Rs CreativeFieldValuesUpdate =
CreativeFieldValue
type Scopes CreativeFieldValuesUpdate =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient CreativeFieldValuesUpdate'{..}
= go _cfvuProFileId _cfvuCreativeFieldId
(Just AltJSON)
_cfvuPayload
dFAReportingService
where go
= buildClient
(Proxy :: Proxy CreativeFieldValuesUpdateResource)
mempty
| rueshyna/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/CreativeFieldValues/Update.hs | mpl-2.0 | 4,258 | 0 | 16 | 964 | 505 | 297 | 208 | 81 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Logging.Organizations.Logs.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes all the log entries in a log for the _Default Log Bucket. The
-- log reappears if it receives new entries. Log entries written shortly
-- before the delete operation might not be deleted. Entries received after
-- the delete operation with a timestamp before the operation will be
-- deleted.
--
-- /See:/ <https://cloud.google.com/logging/docs/ Cloud Logging API Reference> for @logging.organizations.logs.delete@.
module Network.Google.Resource.Logging.Organizations.Logs.Delete
(
-- * REST Resource
OrganizationsLogsDeleteResource
-- * Creating a Request
, organizationsLogsDelete
, OrganizationsLogsDelete
-- * Request Lenses
, oldXgafv
, oldUploadProtocol
, oldAccessToken
, oldUploadType
, oldLogName
, oldCallback
) where
import Network.Google.Logging.Types
import Network.Google.Prelude
-- | A resource alias for @logging.organizations.logs.delete@ method which the
-- 'OrganizationsLogsDelete' request conforms to.
type OrganizationsLogsDeleteResource =
"v2" :>
Capture "logName" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes all the log entries in a log for the _Default Log Bucket. The
-- log reappears if it receives new entries. Log entries written shortly
-- before the delete operation might not be deleted. Entries received after
-- the delete operation with a timestamp before the operation will be
-- deleted.
--
-- /See:/ 'organizationsLogsDelete' smart constructor.
data OrganizationsLogsDelete =
OrganizationsLogsDelete'
{ _oldXgafv :: !(Maybe Xgafv)
, _oldUploadProtocol :: !(Maybe Text)
, _oldAccessToken :: !(Maybe Text)
, _oldUploadType :: !(Maybe Text)
, _oldLogName :: !Text
, _oldCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsLogsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oldXgafv'
--
-- * 'oldUploadProtocol'
--
-- * 'oldAccessToken'
--
-- * 'oldUploadType'
--
-- * 'oldLogName'
--
-- * 'oldCallback'
organizationsLogsDelete
:: Text -- ^ 'oldLogName'
-> OrganizationsLogsDelete
organizationsLogsDelete pOldLogName_ =
OrganizationsLogsDelete'
{ _oldXgafv = Nothing
, _oldUploadProtocol = Nothing
, _oldAccessToken = Nothing
, _oldUploadType = Nothing
, _oldLogName = pOldLogName_
, _oldCallback = Nothing
}
-- | V1 error format.
oldXgafv :: Lens' OrganizationsLogsDelete (Maybe Xgafv)
oldXgafv = lens _oldXgafv (\ s a -> s{_oldXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
oldUploadProtocol :: Lens' OrganizationsLogsDelete (Maybe Text)
oldUploadProtocol
= lens _oldUploadProtocol
(\ s a -> s{_oldUploadProtocol = a})
-- | OAuth access token.
oldAccessToken :: Lens' OrganizationsLogsDelete (Maybe Text)
oldAccessToken
= lens _oldAccessToken
(\ s a -> s{_oldAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
oldUploadType :: Lens' OrganizationsLogsDelete (Maybe Text)
oldUploadType
= lens _oldUploadType
(\ s a -> s{_oldUploadType = a})
-- | Required. The resource name of the log to delete:
-- projects\/[PROJECT_ID]\/logs\/[LOG_ID]
-- organizations\/[ORGANIZATION_ID]\/logs\/[LOG_ID]
-- billingAccounts\/[BILLING_ACCOUNT_ID]\/logs\/[LOG_ID]
-- folders\/[FOLDER_ID]\/logs\/[LOG_ID][LOG_ID] must be URL-encoded. For
-- example, \"projects\/my-project-id\/logs\/syslog\",
-- \"organizations\/123\/logs\/cloudaudit.googleapis.com%2Factivity\".For
-- more information about log names, see LogEntry.
oldLogName :: Lens' OrganizationsLogsDelete Text
oldLogName
= lens _oldLogName (\ s a -> s{_oldLogName = a})
-- | JSONP
oldCallback :: Lens' OrganizationsLogsDelete (Maybe Text)
oldCallback
= lens _oldCallback (\ s a -> s{_oldCallback = a})
instance GoogleRequest OrganizationsLogsDelete where
type Rs OrganizationsLogsDelete = Empty
type Scopes OrganizationsLogsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/logging.admin"]
requestClient OrganizationsLogsDelete'{..}
= go _oldLogName _oldXgafv _oldUploadProtocol
_oldAccessToken
_oldUploadType
_oldCallback
(Just AltJSON)
loggingService
where go
= buildClient
(Proxy :: Proxy OrganizationsLogsDeleteResource)
mempty
| brendanhay/gogol | gogol-logging/gen/Network/Google/Resource/Logging/Organizations/Logs/Delete.hs | mpl-2.0 | 5,619 | 0 | 15 | 1,162 | 713 | 423 | 290 | 102 | 1 |
-- eidolon -- A simple gallery in Haskell and Yesod
-- Copyright (C) 2015 Amedeo Molnár
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published
-- by the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module Helper where
import Prelude
import Yesod.Static
import Model
import Control.Applicative
import Data.Maybe
import Data.List as L
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import Data.Time
import Data.Char
import Database.Persist
import System.Random
import System.Locale
import Yesod
import Numeric (readHex, showHex)
import Network.Mail.Mime
import Text.Blaze.Html.Renderer.Utf8
import Graphics.ImageMagick.MagickWand
import Filesystem.Path.CurrentOS
getUserIdFromText :: T.Text -> UserId
getUserIdFromText tempUserId =
case key of
Left a ->
error $ T.unpack a
Right k ->
k
where
key = keyFromValues $ pInt64 : []
pInt64 = PersistInt64 $ fromIntegral $ read $ T.unpack tempUserId
extractKey :: PersistEntity record => Key record -> T.Text
extractKey = extractKey' . keyToValues
where
extractKey' [PersistInt64 k] = T.pack $ show k
extractKey' _ = ""
fromHex :: String -> BL.ByteString
fromHex = BL.pack . hexToWords
where hexToWords (c:c':text) =
let hex = [c, c']
(word, _):_ = readHex hex
in word : hexToWords text
hexToWords _ = []
-- strict variant
fromHex' :: String -> B.ByteString
fromHex' = B.concat . BL.toChunks . fromHex
toHex :: B.ByteString -> T.Text
toHex = T.pack . concatMap mapByte . B.unpack
where mapByte = pad 2 '0' . flip showHex ""
pad len padding s
| length s < len = pad len padding $ padding:s
| otherwise = s
makeRandomToken :: IO T.Text
makeRandomToken = (T.pack . take 16 . randoms) `fmap` newStdGen
generateSalt :: IO B.ByteString
generateSalt = (B.pack . take 8 . randoms) <$> getStdGen
tagField :: Monad m => Field m [T.Text]
tagField = Field
{ fieldParse = \rawVals _ -> do
case rawVals of
[x] -> case L.null [x] of
False -> return $ Right $ Just $ removeItem "" $ T.splitOn " " x
True -> return $ Right $ Nothing
_ -> return $ Left $ error "unexpected tag list"
, fieldView = \idAttr nameAttr _ eResult _ ->
[whamlet|<input id=#{idAttr} type="text" name=#{nameAttr} value=#{either id (T.intercalate " ") eResult}>|]
, fieldEnctype = UrlEncoded
}
userField :: Monad m => [(T.Text, UserId)] -> Field m [UserId]
userField users = Field
{ fieldParse = \rawVals _ -> do
case rawVals of
[x] -> case x == "" of
False ->
-- clean = removeItem "" $ T.splitOn " " x
let ids = map (\u -> lookup u users) (removeItem "" $ T.splitOn " " x)
in case Nothing `elem` ids of
False -> return $ Right $ Just $ nub $ map fromJust ids
True -> return $ Left $ error "Invalid username list"
True -> return $ Right $ Just $ []
_ -> return $ Left $ error "unexpected username list"
, fieldView = \idAttr nameAttr _ eResult _ ->
[whamlet|<input id=#{idAttr} type="text" name=#{nameAttr} value=#{either id (getUsersFromResult users) eResult}>|]
, fieldEnctype = UrlEncoded
}
getUsersFromResult :: Eq b => [(T.Text, b)] -> [b] -> T.Text
getUsersFromResult users res = T.intercalate " " $ map (\x -> fromMaybe "" $ reverseLookup x users) res
sendMail :: MonadIO m => T.Text -> T.Text -> Html -> m ()
sendMail toEmail subject body =
liftIO $ renderSendMail
Mail
{ mailFrom = Address Nothing "noreply" -- TODO: set sender Address
, mailTo = [Address Nothing toEmail]
, mailCc = []
, mailBcc = []
, mailHeaders = [("Subject", subject)]
, mailParts = [[Part
{ partType = "text/html; charset=utf-8"
, partEncoding = None
, partFilename = Nothing
, partHeaders = []
, partContent = renderHtml body
}]]
}
generateString :: IO T.Text
generateString = (toHex . B.pack . take 16 . randoms) <$> newStdGen
removeItem :: Eq a => a -> [a] -> [a]
removeItem _ [] = []
removeItem x (y:ys)
| x == y = removeItem x ys
| otherwise = y : removeItem x ys
reverseLookup :: Eq b => b -> [(a, b)] -> Maybe a
reverseLookup s ((x, y):zs)
| s == y = Just x
| s /= y = reverseLookup s zs
| otherwise = Nothing
acceptedTypes :: [T.Text]
acceptedTypes = ["image/jpeg", "image/jpg", "image/png", "image/x-ms-bmp", "image/x-bmp", "image/bmp", "image/tiff", "image/tiff-fx", "image/svg+xml", "image/gif"]
iso8601 :: FormatTime t => t -> String
iso8601 time =
formatTime defaultTimeLocale (iso8601DateFormat $ Just "%H:%M:%S") time ++
zone
where zone = case formatTime defaultTimeLocale "%z" time of
(sig:digits@(h1:h2:m1:m2))
| sig `elem` "+-" &&
all isDigit digits ->
sig:h1:h2:':':m1:m2
_ ->
"Z"
localTimeToZonedTime :: TimeZone -> LocalTime -> ZonedTime
localTimeToZonedTime tz =
utcToZonedTime tz . localTimeToUTC tz
rfc822 :: FormatTime t => t -> String
rfc822 = formatTime defaultTimeLocale rfc822DateFormat
mediumStaticImageRoute :: Medium -> Route Static
mediumStaticImageRoute medium =
StaticRoute (drop 2 $ T.splitOn "/" $ T.pack $ mediumPath medium) []
mediumStaticThumbRoute :: Medium -> Route Static
mediumStaticThumbRoute medium =
StaticRoute (drop 2 $ T.splitOn "/" $ T.pack $ mediumThumb medium) []
--getThumbWidth :: MonadIO m => Maybe String -> m (Maybe Int)
getThumbWidth path
| path == Nothing = pure 230
| otherwise = liftIO $ withMagickWandGenesis $ do
(_, w) <- magickWand
readImage w (decodeString $ fromJust path)
getImageWidth w
multiFileField :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m [FileInfo]
multiFileField = Field
{ fieldParse = \_ files -> return $
case files of
[] -> Right Nothing
file:_ -> Right $ Just files
, fieldView = \id' name attrs _ isReq -> toWidget [hamlet|
<input id=#{id'} name=#{name} *{attrs} type=file :isReq:required multiple="" enctype="multipart/form-data">
|]
, fieldEnctype = Multipart
}
| Mic92/eidolon | Helper.hs | agpl-3.0 | 6,907 | 0 | 24 | 1,741 | 2,021 | 1,071 | 950 | -1 | -1 |
-- | Configuration for the code generator.
module Data.GI.CodeGen.Config
( Config(..)
, CodeGenFlags(..)
) where
import Data.Text (Text)
import Data.GI.CodeGen.Overrides (Overrides)
-- | Flags controlling different aspects of the code generator.
data CodeGenFlags = CodeGenFlags {
-- | Whether to generate overloaded properties.
cgOverloadedProperties :: Bool
-- | Whether to generate support for overloaded signals.
, cgOverloadedSignals :: Bool
-- | Whether to generate support for overloaded methods.
, cgOverloadedMethods :: Bool
} deriving Show
data Config = Config {
-- | Name of the module being generated.
modName :: Maybe Text,
-- | Whether to print extra info.
verbose :: Bool,
-- | List of loaded overrides for the code generator.
overrides :: Overrides,
-- | List of flags for the code generator.
cgFlags :: CodeGenFlags
} deriving Show
| hamishmack/haskell-gi | lib/Data/GI/CodeGen/Config.hs | lgpl-2.1 | 978 | 0 | 9 | 259 | 124 | 83 | 41 | 16 | 0 |
{-# LANGUAGE Haskell2010 #-}
{-|
Copyright: Foo,
Bar,
Baz
The module description
-}
-- The module header can start with newlines. They are not taken into account for the indentation level
module Bug280 where
x = ""
| haskell/haddock | html-test/src/Bug280.hs | bsd-2-clause | 239 | 0 | 4 | 61 | 12 | 9 | 3 | 3 | 1 |
-- | A Simple section just adds absolute and relative influences
-- on a score, which is presumed to get full credit if no dings
-- are given.
--
-- Expects the section header to specify the section maximum
--
-- Accepts a number or number-followed-by-%-sign for its ding arg.
module Grade.Score.Simple (sectySimple) where
import Numeric
import qualified Text.Trifecta as T
import Grade.Types (ExSecCallback(..), SecCallback(..))
data Score = S Double Double
deriving (Show)
instance Monoid Score where
mempty = S 0.0 0.0
mappend (S la lr) (S ra rr) = S (la + ra) (lr + rr)
efid :: (T.TokenParsing f) => f Double
efid = (either fromIntegral id) <$> T.integerOrDouble
parseDingScore :: (T.TokenParsing f) => f (Score,())
parseDingScore = (\x -> (x,())) <$> T.choice
[ -- A number followed by a '%' sign is a relative modifier
T.try ( ((\n -> S 0.0 (n/100.0)) <$> efid) <* T.symbolic '%' )
-- A number by itself is an absolute modifier
, (\n -> S n 0.0) <$> efid
]
impact :: Double -> Score -> Double
impact sm (S a r) = a + (sm * r)
printfn :: Double -> () -> () -> Score -> Maybe String
printfn sm () () s = Just $ case s of
(S 0.0 0.0) -> "0"
(S 0.0 r) -> (p (r*100)) ++ "% == " ++ si
(S _ 0.0) -> si
(S a r ) -> (p a) ++ " and " ++ (p r) ++ "% == " ++ si
where
si = p $ impact sm s
p x = showFFloat (Just 1) x ""
scorefn :: Double -> () -> () -> Score -> Either String Double
scorefn sm () () s = Right $ sm + impact sm s
sectySimple_ :: (T.TokenParsing f) => f (SecCallback f () () Score)
sectySimple_ = (\smax -> SC (Nothing, pure ())
parseDingScore
(printfn smax)
(scorefn smax)
(\_ -> smax))
<$> efid
sectySimple :: (T.TokenParsing f) => f (ExSecCallback f)
sectySimple = ExSecCB <$> sectySimple_
| nwf/grade | lib/Grade/Score/Simple.hs | bsd-2-clause | 2,014 | 0 | 17 | 647 | 723 | 387 | 336 | 37 | 4 |
module WaveSim
(module WaveSim.Types,
module WaveSim.WaveSim,
module WaveSim.Graphics) where
import WaveSim.Types
import WaveSim.WaveSim
import WaveSim.Graphics
| jethomas/WaveSim | src/WaveSim.hs | bsd-3-clause | 173 | 0 | 5 | 27 | 39 | 25 | 14 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Foldable (fold, foldMap)
import Data.Monoid
import Prelude hiding (unlines)
import System.Environment
import System.IO
import Data.List.Split
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.ByteString.Builder as L
main :: IO ()
main = do
factor:path:_ <- getArgs
text <- L.readFile path
L.hPutBuilder stdout $ thinDownBy (read factor) text
thinDownBy :: Int -> L.ByteString -> L.Builder
thinDownBy factor = fold . map head . chunksOf factor . splitSample
splitSample :: L.ByteString -> [L.Builder]
splitSample = map unlines . split byBeginSample . L.lines
where
unlines = foldMap (\text -> L.lazyByteString text <> "\n")
byBeginSample = keepDelimsL $ whenElt $ L.isPrefixOf "BEGIN_SAMPLE"
| maoe/ghc-heap-prof | bin/ghp.hs | bsd-3-clause | 797 | 0 | 12 | 125 | 257 | 139 | 118 | 21 | 1 |
import Data.List
import Data.Char
solve :: String -> Int
solve input = length triangleWord
where words' = filter (\s -> (length s) >= 2) (groupBy (\a b -> ((a == ',') == (b == ','))) input)
words = sort $ map (tail . init) words'
wordValue s = sum $ map (\c -> ord c - ord 'A' + 1) s
triangleNumber = [ (n * (n + 1)) `div` 2 | n <- [1 .. 100] ]
triangleWord = [ x | x <- words, (wordValue x) `elem` triangleNumber ]
main = readFile "input/p042_words.txt" >>= (print . solve)
| foreverbell/project-euler-solutions | src/42.hs | bsd-3-clause | 524 | 0 | 15 | 148 | 253 | 137 | 116 | 10 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-orphans -Wwarn #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.StoreSpec where
import qualified Data.ByteString as BS
import Data.Containers (mapFromList, setFromList)
import Data.Sequences (fromList)
import Data.Store.Internal (StaticSize (..))
import Data.Store.TH
import qualified Data.Vector.Unboxed as UV
import GHC.TypeLits (KnownNat)
import Language.Haskell.TH
import Language.Haskell.TH.ReifyMany
import Stack.Prelude
import Stack.Types.Build
import Stack.Types.PackageDump
import Test.Hspec
import Test.SmallCheck.Series
-- NOTE: these were copied from Data.Store. Should probably be moved to
-- smallcheck.
instance (Monad m, Serial m k, Serial m a, Ord k) => Serial m (Map k a) where
series = fmap mapFromList series
instance (Monad m, Serial m k, Serial m a, Eq k, Hashable k) => Serial m (HashMap k a) where
series = fmap mapFromList series
instance Monad m => Serial m Text where
series = fmap fromList series
instance (Monad m, Serial m a, UV.Unbox a) => Serial m (UV.Vector a) where
series = fmap fromList series
instance Monad m => Serial m BS.ByteString where
series = fmap BS.pack series
instance (Monad m, Serial m a, Ord a) => Serial m (Set a) where
series = fmap setFromList series
instance (Monad m, KnownNat n) => Serial m (StaticSize n BS.ByteString)
addMinAndMaxBounds :: forall a. (Bounded a, Eq a) => [a] -> [a]
addMinAndMaxBounds xs =
(if (minBound :: a) `notElem` xs then [minBound] else []) ++
(if (maxBound :: a) `notElem` xs && (maxBound :: a) /= minBound then maxBound : xs else xs)
$(do let ns = [ ''Int64, ''Word64, ''Word, ''Word8
]
f n = [d| instance Monad m => Serial m $(conT n) where
series = generate (\_ -> addMinAndMaxBounds [0, 1]) |]
concat <$> mapM f ns)
$(do let tys = [ ''InstalledCacheInner
-- FIXME , ''PackageCache
-- FIXME , ''LoadedSnapshot
, ''BuildCache
, ''ConfigCache
]
ns <- reifyManyWithoutInstances ''Serial tys (`notElem` [''UV.Vector])
let f n = [d| instance Monad m => Serial m $(conT n) |]
concat <$> mapM f ns)
verbose :: Bool
verbose = False
spec :: Spec
spec = do
describe "Roundtrips binary formats" $ do
$(smallcheckManyStore False 6
[ [t| InstalledCacheInner |]
, [t| BuildCache |]
])
-- Blows up with > 5
{-
$(smallcheckManyStore False 5
[ -- FIXME [t| PackageCache |]
-- FIXME , [t| LoadedSnapshot |]
])
-}
-- Blows up with > 4
$(smallcheckManyStore False 4
[ [t| ConfigCache |]
])
| MichielDerhaeg/stack | src/test/Stack/StoreSpec.hs | bsd-3-clause | 3,054 | 0 | 14 | 869 | 794 | 446 | 348 | 60 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{- |
Module : Verifier.SAW.SCTypeCheck
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : jhendrix@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.SCTypeCheck
( scTypeCheck
, scTypeCheckError
, scTypeCheckComplete
, scTypeCheckCompleteError
, scTypeCheckWHNF
, scConvertible
, scCheckSubtype
, TCError(..)
, prettyTCError
, throwTCError
, TCM
, runTCM
, askCtx
, askModName
, withVar
, withCtx
, atPos
, LiftTCM(..)
, TypedTerm(..)
, TypeInfer(..)
, typeCheckWHNF
, typeInferCompleteWHNF
, TypeInferCtx(..)
, typeInferCompleteInCtx
, checkSubtype
, ensureSort
, applyPiTyped
, compileRecursor
) where
import Control.Applicative
import Control.Monad.Except
import Control.Monad.State.Strict
import Control.Monad.Reader
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
#if !MIN_VERSION_base(4,8,0)
import Data.Traversable (Traversable(..))
#endif
import qualified Data.Vector as V
import Prelude hiding (mapM, maximum)
import Verifier.SAW.Conversion (natConversions)
import Verifier.SAW.Recognizer
import Verifier.SAW.Rewriter
import Verifier.SAW.SharedTerm
import Verifier.SAW.TypedAST
import Verifier.SAW.Module
import Verifier.SAW.Position
-- | The state for a type-checking computation = a memoization table
type TCState = Map TermIndex Term
-- | The monad for type checking and inference, which:
--
-- * Maintains a 'SharedContext', the name of the current module, and a variable
-- context, where the latter assigns types to the deBruijn indices in scope;
--
-- * Memoizes the most general type inferred for each expression; AND
--
-- * Can throw 'TCError's
type TCM a =
ReaderT (SharedContext, Maybe ModuleName, [(LocalName, Term)])
(StateT TCState (ExceptT TCError IO)) a
-- | Run a type-checking computation in a given context, starting from the empty
-- memoization table
runTCM ::
TCM a -> SharedContext -> Maybe ModuleName -> [(LocalName, Term)] ->
IO (Either TCError a)
runTCM m sc mnm ctx =
runExceptT $ evalStateT (runReaderT m (sc, mnm, ctx)) Map.empty
-- | Read the current typing context
askCtx :: TCM [(LocalName, Term)]
askCtx = (\(_,_,ctx) -> ctx) <$> ask
-- | Read the current module name
askModName :: TCM (Maybe ModuleName)
askModName = (\(_,mnm,_) -> mnm) <$> ask
-- | Run a type-checking computation in a typing context extended with a new
-- variable with the given type. This throws away the memoization table while
-- running the sub-computation, as memoization tables are tied to specific sets
-- of bindings.
--
-- NOTE: the type given for the variable should be in WHNF, so that we do not
-- have to normalize the types of variables each time we see them.
withVar :: LocalName -> Term -> TCM a -> TCM a
withVar x tp m =
flip catchError (throwError . ErrorCtx x tp) $
do saved_table <- get
put Map.empty
a <- local (\(sc,mnm,ctx) -> (sc, mnm, (x,tp):ctx)) m
put saved_table
return a
-- | Run a type-checking computation in a typing context extended by a list of
-- variables and their types. See 'withVar'.
withCtx :: [(LocalName, Term)] -> TCM a -> TCM a
withCtx = flip (foldr (\(x,tp) -> withVar x tp))
-- | Run a type-checking computation @m@ and tag any error it throws with the
-- 'ErrorTerm' constructor
withErrorTerm :: Term -> TCM a -> TCM a
withErrorTerm tm m = catchError m (throwError . ErrorTerm tm)
-- | Lift @withErrorTerm@ to `TermF Term`
withErrorTermF :: TermF Term -> TCM a -> TCM a
withErrorTermF tm = withErrorTerm (Unshared tm)
-- | Lift @withErrorTerm@ to `TermF TypedTerm`
withErrorTypedTermF :: TermF TypedTerm -> TCM a -> TCM a
withErrorTypedTermF tm = withErrorTermF (fmap typedVal tm)
-- | Run a type-checking computation @m@ and tag any error it throws with the
-- given position, using the 'ErrorPos' constructor, unless that error is
-- already tagged with a position
atPos :: Pos -> TCM a -> TCM a
atPos p m = catchError m (throwError . ErrorPos p)
-- | Typeclass for lifting 'IO' computations that take a 'SharedContext' to
-- 'TCM' computations
class LiftTCM a where
type TCMLifted a
liftTCM :: (SharedContext -> a) -> TCMLifted a
instance LiftTCM (IO a) where
type TCMLifted (IO a) = TCM a
liftTCM f =
do sc <- (\(sc,_,_) -> sc) <$> ask
liftIO (f sc)
instance LiftTCM b => LiftTCM (a -> b) where
type TCMLifted (a -> b) = a -> TCMLifted b
liftTCM f a = liftTCM (\sc -> f sc a)
-- | Errors that can occur during type-checking
data TCError
= NotSort Term
| NotFuncTypeInApp TypedTerm TypedTerm
| NotTupleType Term
| BadTupleIndex Int Term
| NotStringLit Term
| NotRecordType TypedTerm
| BadRecordField FieldName Term
| DanglingVar Int
| UnboundName Text
| SubtypeFailure TypedTerm Term
| EmptyVectorLit
| NoSuchDataType Ident
| NoSuchCtor Ident
| NotFullyAppliedRec (PrimName Term)
| BadParamsOrArgsLength Bool (PrimName Term) [Term] [Term]
| BadRecursorApp Term [Term] Term
| BadConstType NameInfo Term Term
| MalformedRecursor Term String
| DeclError Text String
| ErrorPos Pos TCError
| ErrorCtx LocalName Term TCError
| ErrorTerm Term TCError
| ExpectedRecursor TypedTerm
-- | Throw a type-checking error
throwTCError :: TCError -> TCM a
throwTCError = throwError
type PPErrM = Reader ([LocalName], Maybe Pos)
-- | Pretty-print a type-checking error
prettyTCError :: TCError -> [String]
prettyTCError e = runReader (helper e) ([], Nothing) where
ppWithPos :: [PPErrM String] -> PPErrM [String]
ppWithPos str_ms =
do strs <- mapM id str_ms
(_, maybe_p) <- ask
case maybe_p of
Just p -> return (ppPos p : strs)
Nothing -> return strs
helper :: TCError -> PPErrM [String]
helper (NotSort ty) = ppWithPos [ return "Not a sort" , ishow ty ]
helper (NotFuncTypeInApp f arg) =
ppWithPos [ return "Function application with non-function type"
, return "For term:"
, ishow (typedVal f)
, return "With type:"
, ishow (typedType f)
, return "To argument:"
, ishow (typedVal arg) ]
helper (NotTupleType ty) =
ppWithPos [ return "Tuple field projection with non-tuple type" ,
ishow ty ]
helper (BadTupleIndex n ty) =
ppWithPos [ return ("Bad tuple index (" ++ show n ++ ") for type")
, ishow ty ]
helper (NotStringLit trm) =
ppWithPos [ return "Record selector is not a string literal", ishow trm ]
helper (NotRecordType (TypedTerm trm tp)) =
ppWithPos [ return "Record field projection with non-record type"
, ishow tp
, return "In term:"
, ishow trm ]
helper (BadRecordField n ty) =
ppWithPos [ return ("Bad record field (" ++ show n ++ ") for type")
, ishow ty ]
helper (BadRecursorApp r ixs arg) =
ppWithPos [ return "Type mismatch in recursor application"
, ishow (Unshared $ FTermF $ RecursorApp r ixs arg)
]
helper (DanglingVar n) =
ppWithPos [ return ("Dangling bound variable index: " ++ show n)]
helper (UnboundName str) = ppWithPos [ return ("Unbound name: " ++ show str)]
helper (SubtypeFailure trm tp2) =
ppWithPos [ return "Inferred type", ishow (typedType trm),
return "Not a subtype of expected type", ishow tp2,
return "For term", ishow (typedVal trm) ]
helper EmptyVectorLit = ppWithPos [ return "Empty vector literal"]
helper (NoSuchDataType d) =
ppWithPos [ return ("No such data type: " ++ show d)]
helper (NoSuchCtor c) =
ppWithPos [ return ("No such constructor: " ++ show c) ]
helper (NotFullyAppliedRec i) =
ppWithPos [ return ("Recursor not fully applied: " ++ show i) ]
helper (BadParamsOrArgsLength is_dt ident params args) =
ppWithPos
[ return ("Wrong number of parameters or arguments to "
++ (if is_dt then "datatype" else "constructor") ++ ": "),
ishow (Unshared $ FTermF $
(if is_dt then DataTypeApp else CtorApp) ident params args)
]
helper (BadConstType n rty ty) =
ppWithPos [ return ("Type of constant " ++ show n), ishow rty
, return "doesn't match declared type", ishow ty ]
helper (MalformedRecursor trm reason) =
ppWithPos [ return "Malformed recursor",
ishow trm, return reason ]
helper (DeclError nm reason) =
ppWithPos [ return ("Malformed declaration for " ++ show nm), return reason ]
helper (ErrorPos p err) =
local (\(ctx,_) -> (ctx, Just p)) $ helper err
helper (ErrorCtx x _ err) =
local (\(ctx,p) -> (x:ctx, p)) $ helper err
helper (ErrorTerm tm err) = do
info <- ppWithPos [ return ("While typechecking term: ")
, ishow tm ]
cont <- helper err
return (info ++ cont)
helper (ExpectedRecursor ttm) =
ppWithPos [ return "Expected recursor value", ishow (typedVal ttm), ishow (typedType ttm)]
ishow :: Term -> PPErrM String
ishow tm =
-- return $ show tm
(\(ctx,_) -> " " ++ scPrettyTermInCtx defaultPPOpts ctx tm) <$> ask
instance Show TCError where
show = unlines . prettyTCError
-- | Infer the type of a term using 'scTypeCheck', calling 'fail' on failure
scTypeCheckError :: TypeInfer a => SharedContext -> a -> IO Term
scTypeCheckError sc t0 =
either (fail . unlines . prettyTCError) return =<< scTypeCheck sc Nothing t0
-- | Infer the type of a 'Term', ensuring in the process that the entire term is
-- well-formed and that all internal type annotations are correct. Types are
-- evaluated to WHNF as necessary, and the returned type is in WHNF.
scTypeCheck :: TypeInfer a => SharedContext -> Maybe ModuleName -> a ->
IO (Either TCError Term)
scTypeCheck sc mnm = scTypeCheckInCtx sc mnm []
-- | Like 'scTypeCheck', but type-check the term relative to a typing context,
-- which assigns types to free variables in the term
scTypeCheckInCtx ::
TypeInfer a => SharedContext -> Maybe ModuleName ->
[(LocalName, Term)] -> a -> IO (Either TCError Term)
scTypeCheckInCtx sc mnm ctx t0 = runTCM (typeInfer t0) sc mnm ctx
-- | Infer the type of an @a@ and complete it to a term using
-- 'scTypeCheckComplete', calling 'fail' on failure
scTypeCheckCompleteError :: TypeInfer a => SharedContext ->
Maybe ModuleName -> a -> IO TypedTerm
scTypeCheckCompleteError sc mnm t0 =
either (fail . unlines . prettyTCError) return =<<
scTypeCheckComplete sc mnm t0
-- | Infer the type of an @a@ and complete it to a term, ensuring in the
-- process that the entire term is well-formed and that all internal type
-- annotations are correct. Types are evaluated to WHNF as necessary, and the
-- returned type is in WHNF, though the returned term may not be.
scTypeCheckComplete :: TypeInfer a => SharedContext -> Maybe ModuleName ->
a -> IO (Either TCError TypedTerm)
scTypeCheckComplete sc mnm = scTypeCheckCompleteInCtx sc mnm []
-- | Like 'scTypeCheckComplete', but type-check the term relative to a typing
-- context, which assigns types to free variables in the term
scTypeCheckCompleteInCtx :: TypeInfer a => SharedContext ->
Maybe ModuleName -> [(LocalName, Term)] -> a ->
IO (Either TCError TypedTerm)
scTypeCheckCompleteInCtx sc mnm ctx t0 =
runTCM (typeInferComplete t0) sc mnm ctx
-- | Check that one type is a subtype of another using 'checkSubtype', calling
-- 'fail' on failure
scCheckSubtype :: SharedContext -> Maybe ModuleName ->
TypedTerm -> Term -> IO ()
scCheckSubtype sc mnm arg req_tp =
either (fail . unlines . prettyTCError) return =<<
runTCM (checkSubtype arg req_tp) sc mnm []
-- | A pair of a 'Term' and its type
data TypedTerm = TypedTerm { typedVal :: Term, typedType :: Term }
-- | The class of things that we can infer types of. The 'typeInfer' method
-- returns the most general (with respect to subtyping) type of its input.
class TypeInfer a where
-- | Infer the type of an @a@
typeInfer :: a -> TCM Term
-- | Infer the type of an @a@ and complete it to a 'Term'
typeInferComplete :: a -> TCM TypedTerm
-- | Infer the type of an @a@ and complete it to a 'Term', and then evaluate the
-- resulting term to WHNF
typeInferCompleteWHNF :: TypeInfer a => a -> TCM TypedTerm
typeInferCompleteWHNF a =
do TypedTerm a_trm a_tp <- typeInferComplete a
a_whnf <- typeCheckWHNF a_trm
return $ TypedTerm a_whnf a_tp
-- | Perform type inference on a context, i.e., a list of variable names and
-- their associated types. The type @var@ gives the type of variable names,
-- while @a@ is the type of types. This will give us 'Term's for each type, as
-- well as their 'Sort's, since the type of any type is a 'Sort'.
class TypeInferCtx var a where
typeInferCompleteCtx :: [(var,a)] -> TCM [(LocalName, Term, Sort)]
instance TypeInfer a => TypeInferCtx LocalName a where
typeInferCompleteCtx [] = return []
typeInferCompleteCtx ((x,tp):ctx) =
do typed_tp <- typeInferComplete tp
s <- ensureSort (typedType typed_tp)
((x,typedVal typed_tp,s):) <$>
withVar x (typedVal typed_tp) (typeInferCompleteCtx ctx)
-- | Perform type inference on a context via 'typeInferCompleteCtx', and then
-- run a computation in that context via 'withCtx', also passing in that context
-- to the computation
typeInferCompleteInCtx ::
TypeInferCtx var tp => [(var, tp)] ->
([(LocalName, Term, Sort)] -> TCM a) -> TCM a
typeInferCompleteInCtx ctx f =
do typed_ctx <- typeInferCompleteCtx ctx
withCtx (map (\(x,tp,_) -> (x,tp)) typed_ctx) (f typed_ctx)
-- Type inference for Term dispatches to type inference on TermF Term, but uses
-- memoization to avoid repeated work
instance TypeInfer Term where
typeInfer t@(Unshared tf) = withErrorTerm t $ typeInfer tf
typeInfer t@(STApp{ stAppIndex = i, stAppTermF = tf}) =
do table <- get
case Map.lookup i table of
Just x -> return x
Nothing ->
do x <- withErrorTerm t $ typeInfer tf
x' <- typeCheckWHNF x
modify (Map.insert i x')
return x'
typeInferComplete trm = TypedTerm trm <$> withErrorTerm trm (typeInfer trm)
-- Type inference for TermF Term dispatches to that for TermF TypedTerm by
-- calling inference on all the sub-components and extending the context inside
-- of the binding forms
instance TypeInfer (TermF Term) where
typeInfer (FTermF ftf) =
-- Dispatch to the TypeInfer instance for FlatTermF Term, which does some
-- special-case handling itself
typeInfer ftf
typeInfer (Lambda x a rhs) =
do a_tptrm <- typeInferCompleteWHNF a
-- NOTE: before adding a type to the context, we want to be sure it is in
-- WHNF, so we don't have to normalize each time we look up a var type
rhs_tptrm <- withVar x (typedVal a_tptrm) $ typeInferComplete rhs
typeInfer (Lambda x a_tptrm rhs_tptrm)
typeInfer (Pi x a rhs) =
do a_tptrm <- typeInferCompleteWHNF a
-- NOTE: before adding a type to the context, we want to be sure it is in
-- WHNF, so we don't have to normalize each time we look up a var type
rhs_tptrm <- withVar x (typedVal a_tptrm) $ typeInferComplete rhs
typeInfer (Pi x a_tptrm rhs_tptrm)
typeInfer (Constant ec _) =
-- NOTE: this special case is to prevent us from re-type-checking the
-- definition of each constant, as we assume it was type-checked when it was
-- created
return $ ecType ec
typeInfer t = typeInfer =<< mapM typeInferComplete t
typeInferComplete tf =
TypedTerm <$> liftTCM scTermF tf <*> withErrorTermF tf (typeInfer tf)
-- Type inference for FlatTermF Term dispatches to that for FlatTermF TypedTerm,
-- with special cases for primitives and constants to avoid re-type-checking
-- their types as we are assuming they were type-checked when they were created
instance TypeInfer (FlatTermF Term) where
typeInfer (Primitive pn) = return $ primType pn
typeInfer (ExtCns ec) = return $ ecType ec
typeInfer t = typeInfer =<< mapM typeInferComplete t
typeInferComplete ftf =
TypedTerm <$> liftTCM scFlatTermF ftf
<*> withErrorTermF (FTermF ftf) (typeInfer ftf)
-- Type inference for TermF TypedTerm is the main workhorse. Intuitively, this
-- represents the case where each immediate subterm of a term is labeled with
-- its (most general) type.
instance TypeInfer (TermF TypedTerm) where
typeInfer (FTermF ftf) = typeInfer ftf
typeInfer (App x@(TypedTerm _ x_tp) y) =
applyPiTyped (NotFuncTypeInApp x y) x_tp y
typeInfer (Lambda x (TypedTerm a a_tp) (TypedTerm _ b)) =
void (ensureSort a_tp) >> liftTCM scTermF (Pi x a b)
typeInfer (Pi _ (TypedTerm _ a_tp) (TypedTerm _ b_tp)) =
do s1 <- ensureSort a_tp
s2 <- ensureSort b_tp
-- NOTE: the rule for type-checking Pi types is that (Pi x a b) is a Prop
-- when b is a Prop (this is a forall proposition), otherwise it is a
-- (Type (max (sortOf a) (sortOf b)))
liftTCM scSort $ if s2 == propSort then propSort else max s1 s2
typeInfer (LocalVar i) =
do ctx <- askCtx
if i < length ctx then
-- The ith type in the current variable typing context is well-typed
-- relative to the suffix of the context after it, so we have to lift it
-- (i.e., call incVars) to make it well-typed relative to all of ctx
liftTCM incVars 0 (i+1) (snd (ctx !! i))
else
error ("Context = " ++ show ctx)
-- throwTCError (DanglingVar (i - length ctx))
typeInfer (Constant (EC _ n (TypedTerm req_tp req_tp_sort)) (Just (TypedTerm _ tp))) =
do void (ensureSort req_tp_sort)
-- NOTE: we do the subtype check here, rather than call checkSubtype, so
-- that we can throw the custom BadConstType error on failure
ok <- isSubtype tp req_tp
if ok then return tp else
throwTCError $ BadConstType n tp req_tp
typeInfer (Constant (EC _ _ (TypedTerm req_tp req_tp_sort)) Nothing) =
-- Constant with no body, just return the EC type
do void (ensureSort req_tp_sort)
return req_tp
typeInferComplete tf =
TypedTerm <$> liftTCM scTermF (fmap typedVal tf)
<*> withErrorTypedTermF tf (typeInfer tf)
-- Type inference for FlatTermF TypedTerm is the main workhorse for flat
-- terms. Intuitively, this represents the case where each immediate subterm of
-- a term has already been labeled with its (most general) type.
instance TypeInfer (FlatTermF TypedTerm) where
typeInfer (Primitive ec) =
typeCheckWHNF $ typedVal $ primType ec
typeInfer UnitValue = liftTCM scUnitType
typeInfer UnitType = liftTCM scSort (mkSort 0)
typeInfer (PairValue (TypedTerm _ tx) (TypedTerm _ ty)) =
liftTCM scPairType tx ty
typeInfer (PairType (TypedTerm _ tx) (TypedTerm _ ty)) =
do sx <- ensureSort tx
sy <- ensureSort ty
liftTCM scSort (max sx sy)
typeInfer (PairLeft (TypedTerm _ tp)) =
ensurePairType tp >>= \(t1,_) -> return t1
typeInfer (PairRight (TypedTerm _ tp)) =
ensurePairType tp >>= \(_,t2) -> return t2
typeInfer (DataTypeApp d params args) =
-- Look up the DataType structure, check the length of the params and args,
-- and then apply the cached Pi type of dt to params and args
do dt <- liftTCM scRequireDataType (primName d)
let err = BadParamsOrArgsLength True (fmap typedVal d) (map typedVal params) (map typedVal args)
unless (length params == length (dtParams dt) &&
length args == length (dtIndices dt))
(throwTCError err)
-- NOTE: we assume dtType is already well-typed and in WHNF
foldM (applyPiTyped err) (dtType dt) (params ++ args)
typeInfer (CtorApp c params args) =
-- Look up the Ctor structure, check the length of the params and args, and
-- then apply the cached Pi type of ctor to params and args
do ctor <- liftTCM scRequireCtor (primName c)
let err = BadParamsOrArgsLength False (fmap typedVal c) (map typedVal params) (map typedVal args)
unless (length params == ctorNumParams ctor &&
length args == ctorNumArgs ctor)
(throwTCError err)
-- NOTE: we assume ctorType is already well-typed and in WHNF
foldM (applyPiTyped err) (ctorType ctor) (params ++ args)
typeInfer (RecursorType d ps motive mty) =
do s <- inferRecursorType d ps motive mty
liftTCM scSort s
typeInfer (Recursor rec) =
inferRecursor rec
typeInfer (RecursorApp r ixs arg) =
inferRecursorApp r ixs arg
typeInfer (RecordType elems) =
-- NOTE: record types are always predicative, i.e., non-Propositional, so we
-- ensure below that we return at least sort 0
do sorts <- mapM (ensureSort . typedType . snd) elems
liftTCM scSort (maxSort $ mkSort 0 : sorts)
typeInfer (RecordValue elems) =
liftTCM scFlatTermF $ RecordType $
map (\(f,TypedTerm _ tp) -> (f,tp)) elems
typeInfer (RecordProj t@(TypedTerm _ t_tp) fld) =
ensureRecordType (NotRecordType t) t_tp >>= \case
(Map.lookup fld -> Just tp) -> return tp
_ -> throwTCError $ BadRecordField fld t_tp
typeInfer (Sort s _) = liftTCM scSort (sortOf s)
typeInfer (NatLit _) = liftTCM scNatType
typeInfer (ArrayValue (TypedTerm tp tp_tp) vs) =
do n <- liftTCM scNat (fromIntegral (V.length vs))
_ <- ensureSort tp_tp -- TODO: do we care about the level?
tp' <- typeCheckWHNF tp
forM_ vs $ \v_elem -> checkSubtype v_elem tp'
liftTCM scVecType n tp'
typeInfer (StringLit{}) = liftTCM scStringType
typeInfer (ExtCns ec) =
-- FIXME: should we check that the type of ecType is a sort?
typeCheckWHNF $ typedVal $ ecType ec
typeInferComplete ftf =
TypedTerm <$> liftTCM scFlatTermF (fmap typedVal ftf)
<*> withErrorTypedTermF (FTermF ftf) (typeInfer ftf)
-- | Check that @fun_tp=Pi x a b@ and that @arg@ has type @a@, and return the
-- result of substituting @arg@ for @x@ in the result type @b@, i.e.,
-- @[arg/x]b@. This substitution could create redexes, so we call the
-- evaluator. If @fun_tp@ is not a pi type, raise the supplied error.
applyPiTyped :: TCError -> Term -> TypedTerm -> TCM Term
applyPiTyped err fun_tp arg =
ensurePiType err fun_tp >>= \(_,arg_tp,ret_tp) ->
do checkSubtype arg arg_tp
liftTCM instantiateVar 0 (typedVal arg) ret_tp >>= typeCheckWHNF
-- | Ensure that a 'Term' matches a recognizer function, normalizing if
-- necessary; otherwise throw the supplied 'TCError'
ensureRecognizer :: Recognizer Term a -> TCError -> Term -> TCM a
ensureRecognizer f _ (f -> Just a) = return a
ensureRecognizer f err trm =
typeCheckWHNF trm >>= \case
(f -> Just a) -> return a
_ -> throwTCError err
-- | Ensure a 'Term' is a sort, normalizing if necessary, and return that sort
ensureSort :: Term -> TCM Sort
ensureSort tp = ensureRecognizer asSort (NotSort tp) tp
-- | Ensure a 'Term' is a pair type, normalizing if necessary, and return the
-- two components of that pair type
ensurePairType :: Term -> TCM (Term, Term)
ensurePairType tp = ensureRecognizer asPairType (NotSort tp) tp
-- | Ensure a 'Term' is a record type, normalizing if necessary, and return the
-- components of that record type
ensureRecordType :: TCError -> Term -> TCM (Map FieldName Term)
ensureRecordType err tp = ensureRecognizer asRecordType err tp
-- | Ensure a 'Term' is a pi type, normalizing if necessary. Return the
-- components of that pi type on success; otherwise throw the supplied error.
ensurePiType :: TCError -> Term -> TCM (LocalName, Term, Term)
ensurePiType err tp = ensureRecognizer asPi err tp
-- | Reduce a type to WHNF (using 'scWhnf'), also adding in some conversions for
-- operations on Nat literals that are useful in type-checking
typeCheckWHNF :: Term -> TCM Term
typeCheckWHNF = liftTCM scTypeCheckWHNF
-- | The 'IO' version of 'typeCheckWHNF'
scTypeCheckWHNF :: SharedContext -> Term -> IO Term
scTypeCheckWHNF sc t =
do (_, t') <- rewriteSharedTerm sc (addConvs natConversions emptySimpset :: Simpset ()) t
scWhnf sc t'
-- | Check that one type is a subtype of another, assuming both arguments are
-- types, i.e., that both have type Sort s for some s, and that they are both
-- already in WHNF
checkSubtype :: TypedTerm -> Term -> TCM ()
checkSubtype arg req_tp =
do ok <- isSubtype (typedType arg) req_tp
if ok then return () else throwTCError $ SubtypeFailure arg req_tp
-- | Check if one type is a subtype of another, assuming both arguments are
-- types, i.e., that both have type Sort s for some s, and that they are both
-- already in WHNF
isSubtype :: Term -> Term -> TCM Bool
isSubtype (unwrapTermF -> Pi x1 a1 b1) (unwrapTermF -> Pi _ a2 b2) =
(&&) <$> areConvertible a1 a2 <*> withVar x1 a1 (isSubtype b1 b2)
isSubtype (asSort -> Just s1) (asSort -> Just s2) | s1 <= s2 = return True
isSubtype t1' t2' = areConvertible t1' t2'
-- | Check if two terms are "convertible for type-checking", meaning that they
-- are convertible up to 'natConversions'
areConvertible :: Term -> Term -> TCM Bool
areConvertible t1 t2 = liftTCM scConvertibleEval scTypeCheckWHNF True t1 t2
inferRecursorType ::
PrimName TypedTerm {- ^ data type name -} ->
[TypedTerm] {- ^ data type parameters -} ->
TypedTerm {- ^ elimination motive -} ->
TypedTerm {- ^ type of the elimination motive -} ->
TCM Sort
inferRecursorType d params motive motiveTy =
do dt <- liftTCM scRequireDataType (primName d)
let mk_err str =
MalformedRecursor
(Unshared $ fmap typedVal $ FTermF $
Recursor (CompiledRecursor d params motive motiveTy mempty []))
str
-- Check that the params have the correct types by making sure
-- they correspond to the input types of dt
unless (length params == length (dtParams dt)) $
throwTCError $ mk_err "Incorrect number of parameters"
_ <- foldM (applyPiTyped (mk_err "Incorrect data type signature"))
(dtType dt) params
-- Get the type of p_ret and make sure that it is of the form
--
-- (ix1::Ix1) -> .. -> (ixn::Ixn) -> d params ixs -> s
--
-- for some allowed sort s, where the Ix are the indices of of dt
motive_srt <-
case asPiList (typedType motive) of
(_, (asSort -> Just s)) -> return s
_ -> throwTCError $ mk_err "Motive function should return a sort"
motive_req <-
liftTCM scRecursorRetTypeType dt (map typedVal params) motive_srt
-- Technically this is an equality test, not a subtype test, but we
-- use the precise sort used in the motive, so they are the same, and
-- checkSubtype is handy...
checkSubtype motive motive_req
unless (allowedElimSort dt motive_srt) $
throwTCError $ mk_err "Disallowed propositional elimination"
return motive_srt
compileRecursor ::
DataType ->
[TypedTerm] {- ^ datatype parameters -} ->
TypedTerm {- ^ elimination motive -} ->
[TypedTerm] {- ^ constructor eliminators -} ->
TCM (CompiledRecursor TypedTerm)
compileRecursor dt params motive cs_fs =
do motiveTy <- typeInferComplete (typedType motive)
cs_fs' <- forM cs_fs (\e -> do ety <- typeInferComplete (typedType e)
pure (e,ety))
d <- traverse typeInferComplete (dtPrimName dt)
let ctorVarIxs = map ctorVarIndex (dtCtors dt)
ctorOrder <- traverse (traverse typeInferComplete) (map ctorPrimName (dtCtors dt))
let elims = Map.fromList (zip ctorVarIxs cs_fs')
let rec = CompiledRecursor d params motive motiveTy elims ctorOrder
let mk_err str =
MalformedRecursor
(Unshared $ fmap typedVal $ FTermF $ Recursor rec)
str
unless (length cs_fs == length (dtCtors dt)) $
throwTCError $ mk_err "Extra constructors"
-- Check that the parameters and motive are correct for the given datatype
_s <- inferRecursorType d params motive motiveTy
-- Check that the elimination functions each have the right types, and
-- that we have exactly one for each constructor of dt
elims_tps <-
liftTCM scRecursorElimTypes (fmap typedVal d) (map typedVal params) (typedVal motive)
forM_ elims_tps $ \(c,req_tp) ->
case Map.lookup (primVarIndex c) elims of
Nothing ->
throwTCError $ mk_err ("Missing constructor: " ++ show c)
Just (f,_fty) -> checkSubtype f req_tp
return rec
inferRecursor ::
CompiledRecursor TypedTerm ->
TCM Term
inferRecursor rec =
do let d = recursorDataType rec
let params = recursorParams rec
let motive = recursorMotive rec
let motiveTy = recursorMotiveTy rec
-- return the type of this recursor
liftTCM scFlatTermF $ fmap typedVal $
RecursorType d params motive motiveTy
-- | Infer the type of a recursor application
inferRecursorApp ::
TypedTerm {- ^ recursor term -} ->
[TypedTerm] {- ^ data type indices -} ->
TypedTerm {- ^ recursor argument -} ->
TCM Term
inferRecursorApp r ixs arg =
do recty <- typeCheckWHNF (typedType r)
case asRecursorType recty of
Nothing -> throwTCError (ExpectedRecursor r)
Just (_d, _ps, motive, motiveTy) -> do
-- Apply the indices to the type of the motive
-- to check the types of the `ixs` and `arg`, and
-- ensure that the result is fully applied
let err = BadRecursorApp (typedVal r) (fmap typedVal ixs) (typedVal arg)
_s <- ensureSort =<< foldM (applyPiTyped err) motiveTy (ixs ++ [arg])
-- return the type (p_ret ixs arg)
liftTCM scTypeCheckWHNF =<<
liftTCM scApplyAll motive (map typedVal (ixs ++ [arg]))
| GaloisInc/saw-script | saw-core/src/Verifier/SAW/SCTypeCheck.hs | bsd-3-clause | 30,117 | 0 | 18 | 6,998 | 7,373 | 3,742 | 3,631 | 494 | 26 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Socket (watchFile) where
import Control.Concurrent (forkIO, threadDelay)
import Control.Exception (SomeException, catch)
import qualified Data.ByteString.Char8 as BS
import qualified Network.WebSockets as WS
import qualified System.FSNotify.Devel as Notify
import qualified System.FSNotify as Notify
watchFile :: FilePath -> WS.PendingConnection -> IO ()
watchFile watchedFile pendingConnection =
do connection <- WS.acceptRequest pendingConnection
Notify.withManager $ \mgmt ->
do stop <- Notify.treeExtAny mgmt "." ".elm" print
tend connection
stop
| wakatime/wakatime | tests/samples/codefiles/haskell.hs | bsd-3-clause | 665 | 0 | 13 | 118 | 154 | 88 | 66 | 16 | 1 |
module PeerTrader.Strategy.Schedule (updateDailyInvested) where
import Control.Concurrent.STM
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (asks)
import Data.Traversable as T
import Database.Groundhog
import NoteScript (Money)
import PeerTrader.Account
import PeerTrader.Database (runDb)
import PeerTrader.Ops
import PeerTrader.Strategy.Strategy
updateDailyInvested :: OpsReader ()
updateDailyInvested = do
accts <- liftIO . readTVarIO =<< asks _accounts
_ <- T.traverse resetInvestState accts
resetDatabase
resetDatabase :: OpsReader ()
resetDatabase = runDb $ update [DailyInvestedField =. (0 :: Money)] CondEmpty
resetInvestState :: PeerTraderAccount -> OpsReader ()
resetInvestState (PTA { _prosperInvestState = Just tInvestState }) =
liftIO . atomically $
modifyTVar' tInvestState $ fmap resetDaily
resetInvestState (PTA { _prosperInvestState = Nothing }) = return ()
| WraithM/peertrader-backend | src/PeerTrader/Strategy/Schedule.hs | bsd-3-clause | 1,081 | 0 | 10 | 286 | 254 | 139 | 115 | 23 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Exception (handle, catch, SomeException, ErrorCall(..))
import Control.Monad.Trans.State
import Control.Monad.Trans.Maybe
import Data.Function (on, (&))
import Data.List
import Data.List.Split
import Data.Maybe (mapMaybe)
import Data.Ord (comparing)
import System.Environment (getArgs)
import Control.Monad.IO.Class (liftIO)
import Control.Monad (mzero)
import BenchShow
------------------------------------------------------------------------------
-- Command line parsing
------------------------------------------------------------------------------
data BenchType
= Linear
| LinearAsync
| LinearRate
| Nested
| Base
| FileIO
| Array
| Concurrent
deriving Show
data Options = Options
{ genGraphs :: Bool
, groupDiff :: Bool
, benchType :: BenchType
} deriving Show
defaultOptions = Options False False Linear
setGenGraphs val = do
(args, opts) <- get
put (args, opts { genGraphs = val })
setGroupDiff val = do
(args, opts) <- get
put (args, opts { groupDiff = val })
setBenchType val = do
(args, opts) <- get
put (args, opts { benchType = val })
-- Like the shell "shift" to shift the command line arguments
shift :: StateT ([String], Options) (MaybeT IO) (Maybe String)
shift = do
s <- get
case s of
([], _) -> return Nothing
(x : xs, opts) -> put (xs, opts) >> return (Just x)
parseBench :: StateT ([String], Options) (MaybeT IO) ()
parseBench = do
x <- shift
case x of
Just "linear" -> setBenchType Linear
Just "linear-async" -> setBenchType LinearAsync
Just "linear-rate" -> setBenchType LinearRate
Just "nested" -> setBenchType Nested
Just "base" -> setBenchType Base
Just "fileio" -> setBenchType FileIO
Just "array" -> setBenchType Array
Just "concurrent" -> setBenchType Concurrent
Just str -> do
liftIO $ putStrLn $ "unrecognized benchmark type " <> str
mzero
Nothing -> do
liftIO $ putStrLn "please provide a benchmark type "
mzero
-- totally imperative style option parsing
parseOptions :: IO (Maybe Options)
parseOptions = do
args <- getArgs
runMaybeT $ flip evalStateT (args, defaultOptions) $ do
parseLoop
fmap snd get
where
parseOpt opt =
case opt of
"--graphs" -> setGenGraphs True
"--group-diff" -> setGroupDiff True
"--benchmark" -> parseBench
str -> do
liftIO $ putStrLn $ "Unrecognized option " <> str
mzero
parseLoop = do
next <- shift
case next of
Just opt -> parseOpt opt >> parseLoop
Nothing -> return ()
ignoringErr a = catch a (\(ErrorCall err :: ErrorCall) ->
putStrLn $ "Failed with error:\n" <> err <> "\nSkipping.")
------------------------------------------------------------------------------
-- Linear composition charts
------------------------------------------------------------------------------
makeLinearGraphs :: Config -> String -> IO ()
makeLinearGraphs cfg@Config{..} inputFile = do
ignoringErr $ graph inputFile "generation" $ cfg
{ title = (++) <$> title <*> Just " generation"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/generation/"
}
ignoringErr $ graph inputFile "elimination" $ cfg
{ title = (++) <$> title <*> Just " Elimination"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/elimination/"
}
ignoringErr $ graph inputFile "transformation-zip" $ cfg
{ title = (++) <$> title <*> Just " Transformation & Zip"
, classifyBenchmark = \b ->
if "serially/transformation/" `isPrefixOf` b
|| "serially/zipping" `isPrefixOf` b
then Just ("Streamly", last $ splitOn "/" b)
else Nothing
}
ignoringErr $ graph inputFile "filtering" $ cfg
{ title = (++) <$> title <*> Just " Filtering"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/filtering/"
}
ignoringErr $ graph inputFile "transformationX4" $ cfg
{ title = (++) <$> title <*> Just " Transformation x 4"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/transformationX4/"
}
ignoringErr $ graph inputFile "filteringX4"
$ cfg
{ title = (++) <$> title <*> Just " Filtering x 4"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/filteringX4/"
}
ignoringErr $ graph inputFile "mixedX4"
$ cfg
{ title = (++) <$> title <*> Just " Mixed x 4"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/mixedX4/"
}
ignoringErr $ graph inputFile "iterated"
$ cfg
{ title = Just "iterate 10,000 times over 10 elems"
, classifyBenchmark =
fmap ("Streamly",) . stripPrefix "serially/iterated/"
}
------------------------------------------------------------------------------
-- Nested composition charts
------------------------------------------------------------------------------
makeNestedGraphs :: Config -> String -> IO ()
makeNestedGraphs cfg inputFile =
ignoringErr $ graph inputFile "nested-all" $ cfg
{ presentation = Groups Absolute
, classifyBenchmark = classifyNested
, selectGroups = \gs ->
groupBy ((==) `on` snd) gs
& fmap (\xs -> mapMaybe (\x -> (x,) <$> lookup x xs) order)
& concat
}
where
order = ["serially", "asyncly", "wAsyncly", "aheadly", "parallely"]
classifyNested b
| "serially/" `isPrefixOf` b =
("serially",) <$> stripPrefix "serially/" b
| "asyncly/" `isPrefixOf` b =
("asyncly",) <$> stripPrefix "asyncly/" b
| "wAsyncly/" `isPrefixOf` b =
("wAsyncly",) <$> stripPrefix "wAsyncly/" b
| "aheadly/" `isPrefixOf` b =
("aheadly",) <$> stripPrefix "aheadly/" b
| "parallely/" `isPrefixOf` b =
("parallely",) <$> stripPrefix "parallely/" b
| otherwise = Nothing
------------------------------------------------------------------------------
-- Charts for parallel streams
------------------------------------------------------------------------------
makeLinearAsyncGraphs :: Config -> String -> IO ()
makeLinearAsyncGraphs cfg inputFile =
ignoringErr $ graph inputFile "linear-async" cfg
{ presentation = Groups Absolute
, classifyBenchmark = classifyAsync
, selectGroups = \gs ->
groupBy ((==) `on` snd) gs
& fmap (\xs -> mapMaybe (\x -> (x,) <$> lookup x xs) order)
& concat
}
where
order = ["asyncly", "wAsyncly", "aheadly", "parallely"]
classifyAsync b
| "asyncly/" `isPrefixOf` b =
("asyncly",) <$> stripPrefix "asyncly/" b
| "wAsyncly/" `isPrefixOf` b =
("wAsyncly",) <$> stripPrefix "wAsyncly/" b
| "aheadly/" `isPrefixOf` b =
("aheadly",) <$> stripPrefix "aheadly/" b
| "parallely/" `isPrefixOf` b =
("parallely",) <$> stripPrefix "parallely/" b
| otherwise = Nothing
makeLinearRateGraphs :: Config -> String -> IO ()
makeLinearRateGraphs cfg inputFile = do
putStrLn "Not implemented"
return ()
------------------------------------------------------------------------------
-- FileIO
------------------------------------------------------------------------------
makeFileIOGraphs :: Config -> String -> IO ()
makeFileIOGraphs cfg@Config{..} inputFile =
ignoringErr $ graph inputFile "fileIO" cfg
makeArrayGraphs :: Config -> String -> IO ()
makeArrayGraphs cfg@Config{..} inputFile =
ignoringErr $ graph inputFile "array" cfg
makeConcurrentGraphs :: Config -> String -> IO ()
makeConcurrentGraphs cfg@Config{..} inputFile =
ignoringErr $ graph inputFile "concurrent" cfg
------------------------------------------------------------------------------
-- Reports/Charts for base streams
------------------------------------------------------------------------------
showStreamDVsK Options{..} cfg inp out =
let cfg' = cfg { classifyBenchmark = classifyBase }
in if genGraphs
then ignoringErr $ graph inp "streamD-vs-streamK"
cfg' { outputDir = Just out
, presentation = Groups Absolute
}
else ignoringErr $ report inp Nothing cfg'
where
classifyBase b
| "streamD/" `isPrefixOf` b = ("streamD",) <$> stripPrefix "streamD/" b
| "streamK/" `isPrefixOf` b = ("streamK",) <$> stripPrefix "streamK/" b
| otherwise = Nothing
showStreamD Options{..} cfg inp out =
let cfg' = cfg { classifyBenchmark = classifyStreamD }
in if genGraphs
then ignoringErr $ graph inp "streamD"
cfg' {outputDir = Just out}
else ignoringErr $ report inp Nothing cfg'
where
classifyStreamD b
| "streamD/" `isPrefixOf` b = ("streamD",) <$> stripPrefix "streamD/" b
| otherwise = Nothing
showStreamK Options{..} cfg inp out =
let cfg' = cfg { classifyBenchmark = classifyStreamK }
in if genGraphs
then ignoringErr $ graph inp "streamK"
cfg' {outputDir = Just out}
else ignoringErr $ report inp Nothing cfg'
where
classifyStreamK b
| "streamK/" `isPrefixOf` b = ("streamK",) <$> stripPrefix "streamK/" b
| otherwise = Nothing
------------------------------------------------------------------------------
-- text reports
------------------------------------------------------------------------------
selectBench
:: (SortColumn -> Maybe GroupStyle -> Either String [(String, Double)])
-> [String]
selectBench f =
reverse
$ fmap fst
$ either
(const $ either error (sortOn snd) $ f (ColumnIndex 0) (Just PercentDiff))
(sortOn snd)
$ f (ColumnIndex 1) (Just PercentDiff)
benchShow Options{..} cfg func inp out =
if genGraphs
then func cfg {outputDir = Just out} inp
else ignoringErr $ report inp Nothing cfg
main :: IO ()
main = do
let cfg = defaultConfig
{ presentation = Groups PercentDiff
, selectBenchmarks = selectBench
}
res <- parseOptions
case res of
Nothing -> do
putStrLn "cannot parse options"
return ()
Just opts@Options{..} ->
case benchType of
Linear -> benchShow opts cfg
{ title = Just "100,000 elems" }
makeLinearGraphs
"charts/linear/results.csv"
"charts/linear"
LinearAsync -> benchShow opts cfg
{ title = Just "Async 10,000 elems" }
makeLinearAsyncGraphs
"charts/linear-async/results.csv"
"charts/linear-async"
LinearRate -> benchShow opts cfg makeLinearRateGraphs
"charts/linear-rate/results.csv"
"charts/linear-rate"
Nested -> benchShow opts cfg
{ title = Just "Nested loops 100 x 100 elems" }
makeNestedGraphs
"charts/nested/results.csv"
"charts/nested"
FileIO -> benchShow opts cfg
{ title = Just "File IO" }
makeFileIOGraphs
"charts/fileio/results.csv"
"charts/fileio"
Array -> benchShow opts cfg
{ title = Just "Array" }
makeArrayGraphs
"charts/array/results.csv"
"charts/array"
Concurrent -> benchShow opts cfg
{ title = Just "Concurrent Ops" }
makeConcurrentGraphs
"charts/concurrent/results.csv"
"charts/concurrent"
Base -> do
let cfg' = cfg { title = Just "100,000 elems" }
if groupDiff
then showStreamDVsK opts cfg'
"charts/base/results.csv"
"charts/base"
else do
showStreamD opts cfg'
"charts/base/results.csv"
"charts/base"
showStreamK opts cfg'
"charts/base/results.csv"
"charts/base"
| harendra-kumar/asyncly | benchmark/Chart.hs | bsd-3-clause | 13,152 | 0 | 20 | 4,203 | 3,080 | 1,610 | 1,470 | 282 | 10 |
--------------------------------------------------------------------------------
-- |
-- Module : Network.OpenID.Assocation.Map
-- Copyright : (c) Trevor Elliott, 2008
-- License : BSD3
--
-- Maintainer : Trevor Elliott <trevor@geekgateway.com>
-- Stability :
-- Portability :
--
module Network.OpenID.Association.Map (
-- Association Map
AssociationMap(..)
, emptyAssociationMap
) where
-- friends
import Network.OpenID.Association.Manager
import Network.OpenID.Types
-- libraries
import Data.Time
import qualified Data.Map as Map
-- | A simple association manager based on Data.Map
newtype AssociationMap = AM (Map.Map String (UTCTime,Association))
deriving (Show,Read)
instance AssociationManager AssociationMap where
findAssociation (AM m) p = snd `fmap` Map.lookup (showProvider p) m
addAssociation (AM m) now p a = AM (Map.insert (showProvider p) (expire,a) m)
where expire = addUTCTime (toEnum (assocExpiresIn a)) now
expire (AM m) now = AM (Map.filter ((now >) . fst) m)
exportAssociations (AM m) = map f (Map.toList m)
where f (p,(t,a)) = (p,t,a)
-- | An empty association map.
emptyAssociationMap :: AssociationMap
emptyAssociationMap = AM Map.empty
| substack/hsopenid | src/Network/OpenID/Association/Map.hs | bsd-3-clause | 1,219 | 0 | 12 | 208 | 323 | 186 | 137 | 18 | 1 |
module BibTex (
Bibliography
, Reference(..)
, Field
, bibliography
) where
import Text.ParserCombinators.Parsec
import Control.Monad (liftM)
-- Our main data structure
data Reference = Reference {
getType :: Type
, getName :: Name
, getFields :: [Field]
} deriving Show
type Bibliography = [Reference]
type Type = String
type Name = String
type Field = (Key, Value)
type Key = String
type Value = String
bibliography :: Parser Bibliography
bibliography = many $ surroundedByComments reference
where comments = many comment
surroundedByComments = between comments comments
comment :: Parser ()
comment = do
noneOf "@"
anyChar `manyTill` eol
return ()
reference :: Parser Reference
reference = do
char '@'
rtype <- many1 alphaNum
(name, fields) <- spaced $ bracketed block
return $ Reference rtype name fields
block :: Parser (Name, [Field])
block = do
name <- identifier
spaces >> comma
fields <- fields'
spaces
return (name, fields)
where
fields' = try (spaced field) `sepEndBy` comma
comma = char ','
field :: Parser Field
field = do
key <- identifier
spaced $ char '='
value <- value'
return $ (key, value)
where
value' = bracketed' <|> quoted' <|> identifier
bracketed' = bracketed $ contentsWithout "{}"
quoted' = quoted $ contentsWithout "\""
contents nonSpecial = liftM concat $ many (nonSpecial <|> bracketed')
contentsWithout x = contents $ many1 $ noneOf x
-- Helpers
spaced = between spaces spaces
bracketed = between (char '{') (char '}')
quoted = between (char '"') (char '"')
identifier = many1 (alphaNum <|> oneOf ":-_")
eol = try (string "\n\r")
<|> try (string "\r\n")
<|> string "\n"
<|> string "\r"
<?> "end of line"
| palaga/HsBible | BibTex.hs | bsd-3-clause | 1,948 | 0 | 11 | 578 | 597 | 312 | 285 | 62 | 1 |
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Emit where
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.Global as G
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.Float as F
import qualified LLVM.General.AST.FloatingPointPredicate as FP
import qualified LLVM.General.AST.IntegerPredicate as IP
import LLVM.General.AST.Type (ptr)
import LLVM.General.AST (Instruction(GetElementPtr), moduleDefinitions)
import LLVM.General (moduleLLVMAssembly, withModuleFromAST)
import LLVM.General.Context (withContext)
import LLVM.General.Module
import LLVM.General.Diagnostic (Diagnostic(..))
import LLVM.General.Target (withDefaultTargetMachine)
import Data.Traversable
import Data.Functor ((<$>))
import Data.List (sort, delete)
import Data.Maybe (fromJust, fromMaybe, listToMaybe)
import Data.Char (ord)
import Data.Bool (bool)
import Control.Applicative ((<|>))
import Control.Monad.Trans.Except
import Control.Monad.Trans (lift)
import Control.Monad
import Control.Monad.State (modify, gets, get, put)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Control.Monad.Trans.Error (ErrorT(..))
import Control.Monad.State.Strict (StateT(..), MonadState)
import Control.Monad.Reader (ReaderT(..), MonadReader, ask)
import System.Process
import Codegen
import qualified Codegen as CG
import Syntax
import JIT
import Options
import Immediates
import Paths_lc_hs (getDataDir)
import System.FilePath ((</>))
import Text.Printf (printf)
import Utils (readBinary)
type CrispComputation a = ReaderT CompilerOptions (StateT CompilerState (ErrorT String IO)) a
runCrispComputation :: CrispComputation a -> CompilerOptions -> CompilerState
-> IO (Either String (a, CompilerState))
runCrispComputation cc opts crispMod =
runErrorT (runStateT (runReaderT cc opts) crispMod)
liftErrorT :: ErrorT String IO a -> CrispComputation a
liftErrorT = lift . lift
data CompilerState = CompilerState
{ astModule :: AST.Module
, defExprs :: [Expr]
}
emptyModule :: CompilerState
emptyModule = CompilerState AST.defaultModule []
-------------------------------------------------------------------------------
-- Compilation to LLVM
-------------------------------------------------------------------------------
printLLVMasm :: AST.Module -> ExceptT String IO AST.Module
printLLVMasm modl = ExceptT $ withContext $ \context ->
runExceptT $ withModuleFromAST context modl $ \m -> do
putStrLn =<< moduleLLVMAssembly m
return modl
codegen :: CompilerOptions -> AST.Module -> [Expr] -> [Expr] -> IO AST.Module
codegen CompilerOptions{..} modl nonDefExprs defExprs = do
res <- runExceptT $ process preOptiAst
case res of
Right newAst -> return newAst
Left err -> putStrLn err >> return preOptiAst
where
preOptiAst = runLLVM modl deltaModl
process = bool return printLLVMasm optPrintLLVM
>=> optimize
>=> bool return jit optReplMode
deltaModl = delPrevMain >> codegenTop nonDefExprs defExprs
delPrevMain = delFunc entryFuncName
codegenTop :: [Expr] -> [Expr] -> LLVM ()
codegenTop nonDefExprs defExprs = do
processDefinitons
processExpressions
where
globalVars = flip map defExprs $ \(DefExp name _) -> name
processExpressions =
codegenFunction entryFuncName [] bodyPrelude [] globalVars nonDefExprs
where bodyPrelude = call (funcOpr uint (AST.Name initGlobalsFuncName) []) []
processDefinitons = do
traverse processDefiniton defExprs
delFunc initGlobalsFuncName
codegenFunction initGlobalsFuncName [] (return ()) [] globalVars defExprs
where
processDefiniton (DefExp name expr) =
codegenGlobalVar name
codegenGlobalVar :: SymName -> LLVM()
codegenGlobalVar = defineGlobalVar
codegenFunction :: SymName -> [AST.Type] -> Codegen a
-> [SymName] -> [SymName] -> [Expr] -> LLVM ()
codegenFunction funcName argTys prologue args globalVars exprs = do
defineFunc uint funcName fnargs blks
sequence_ extraFuncsComputations
where
fnargs = zip argTys $ map AST.Name args
cgst = execCodegen funcName globalVars $ do
blk <- addBlock entryBlockName
setBlock blk
for args $ \a -> do
var <- alloca uint
store var (local (AST.Name a))
assign a var
prologue
res <- cgenComputation
ret res
cgenComputation = do
resList <- traverse codegenExpr exprs
return $ if null resList
then constUint nilValue
else last resList
blks = createBlocks cgst
extraFuncsComputations = extraFuncs cgst
codegenType :: SymName -> AST.Type -> LLVM ()
codegenType = defineType
codegenExterns :: LLVM ()
codegenExterns =
external uint "memalign" [(AST.IntegerType 64, AST.Name "alignment")
,(AST.IntegerType 64, AST.Name "size") ]
-------------------------------------------------------------------------------
-- Translation of Expr values into llvm IR
-------------------------------------------------------------------------------
codegenExpr :: Expr -> Codegen AST.Operand
codegenExpr (BoolExp True) = return . constUint $ trueValue
codegenExpr (BoolExp False) = return . constUint $ falseValue
codegenExpr (NumberExp n) = return . constUint . toFixnum $ n
codegenExpr (CharExp c) = return . constUint . toChar $ c
codegenExpr EmptyExp = return . constUint $ nilValue
codegenExpr (StringExp str) = do
vecPtr <- memalignRaw $ uintSizeBytes + strLen
vecPtrC <- inttoptr vecPtr $ ptr uint
store vecPtrC $ constUint strLen
bytePtr <- flip bitcast i8ptr =<< getelementptrRaw vecPtrC [1]
for (zip [0..] str) $ \(i, char) -> do
let opr = constUintSize 8 $ ord char
targetPtr <- getelementptrRaw bytePtr [i]
store targetPtr opr
iadd vecPtr $ constUint $ readBinary stringFormat
where
strLen = length str
codegenExpr (ArrayExp exprs) = do
vecPtr <- memalign $ exprCount + 1
vecPtrC <- inttoptr vecPtr $ ptr uint
store vecPtrC $ constUint exprCount
for (zip [1..] exprs) $ \(i, expr) -> do
opr <- codegenExpr expr
targetPtr <- getelementptrRaw vecPtrC [i]
store targetPtr opr
iadd vecPtr $ constUint $ readBinary vectorFormat
where
exprCount = length exprs
codegenExpr (BinOpExp op a b) = do
ca <- codegenExpr a
cb <- codegenExpr b
asIRbinOp op ca cb
codegenExpr (VarExp varName) =
maybe planB load =<< getvar varName
where
planB = load $ extern (AST.Name varName)
codegenExpr (DefExp defName expr) = do
gvs <- gets globalVars
modify $ \s -> s { globalVars = defName : gvs }
codegenExpr (SetExp defName expr)
codegenExpr (SetExp symName expr) = do
mVarPtr <- getvar symName
let ptr = fromMaybe (extern $ AST.Name symName) mVarPtr
store ptr =<< codegenExpr expr
return $ constUint nilValue
codegenExpr (PrimCallExp primName args) = do
operands <- traverse codegenExpr args
call (extern $ AST.Name primName) operands
codegenExpr (CallExp func args) = do
funcEnvPtr <- codegenExpr func
funcEnvPtrC <- inttoptr funcEnvPtr $ ptr $ structType [uint, uint]
envPtrPtr <- getelementptr funcEnvPtrC 0
envPtr <- load envPtrPtr
funcPtrPtr <- getelementptr funcEnvPtrC 1
funcPtr <- load funcPtrPtr
funcPtrC <- inttoptr funcPtr $
ptr $ AST.FunctionType uint (argsTypeList $ length args + 1) False
operands <- traverse codegenExpr args
call funcPtrC $ envPtr : operands
codegenExpr fe@(FuncExp vars body) = do
cgst <- get
let
(lambdaName, supply) =
uniqueName (funcName cgst ++ suffLambda) $ names cgst
freeVars = sort $ findFreeVars (globalVars cgst ++ vars) body
est = envStructType freeVars
atl = argsTypeList $ length vars + 1
createFuncComputation =
codegenFunction
lambdaName atl prologue (envVarName : vars) (globalVars cgst) body
envPtr =
AST.LocalReference uint
$ AST.Name envVarName
prologue = do
envPtrC <- inttoptr envPtr $ ptr est
for (zip [0..] freeVars) $ \(ix,freeVar) -> do
heapPtrPtr <- getelementptr envPtrC ix
heapPtrPtrC <- inttoptr heapPtrPtr $ ptr uint
heapPtr <- load heapPtrPtrC
heapPtrC <- inttoptr heapPtr $ ptr uint
assign freeVar heapPtrC
--Adding llvm computations to add the lambda function and env struct
--as globals in the llvm module
modify $ \cgst -> cgst
{ extraFuncs = createFuncComputation
{-: createTypeComputation-}
: extraFuncs cgst
, names = supply
}
--Setting up the operand to return
--returnedOpr <- malloc 2
returnedOpr <- memalign 2
returnedOprC <- inttoptr returnedOpr $ ptr $ structType [uint, uint]
--Instantiating an env struct and filling it
--envPtr <- malloc $ length freeVars
envPtr <- memalign $ length freeVars
envPtrC <- inttoptr envPtr $ ptr est
for (zip [0..] freeVars) $ \(ix,freeVar) -> do
fvPtr <- flip liftM (getvar freeVar)
$ fromMaybe
(error "bug - freevar filling")
fvVal <- load fvPtr
heapPtr <- memalign 1
heapPtrC <- inttoptr heapPtr $ ptr uint
store heapPtrC fvVal
assign freeVar heapPtrC
setElemPtr envPtrC ix heapPtr
setElemPtr returnedOprC 0 envPtr
funcOprC <- ptrtoint (funcOpr uint (AST.Name lambdaName) atl) uint
setElemPtr returnedOprC 1 funcOprC
return returnedOpr
codegenExpr (IfExp cond tr fl) = do
ifthen <- addBlock "if.then"
ifelse <- addBlock "if.else"
ifexit <- addBlock "if.exit"
cond <- codegenExpr cond
test <- icmp IP.NE falseOpr cond
cbr test ifthen ifelse -- Branch based on the condition
(trval, ifthen) <- thenBr ifthen ifexit
(flval, ifelse) <- elseBr ifelse ifexit
setBlock ifexit
phi uint [(trval, ifthen), (flval, ifelse)]
where
falseOpr = constUint falseValue
thenBr ifthen ifexit = do
setBlock ifthen
trval <- codegenExpr tr
br ifexit
ifthen <- getBlock
return (trval, ifthen)
elseBr ifelse ifexit = do
setBlock ifelse
flval <- codegenExpr fl
br ifexit
ifelse <- getBlock
return (flval, ifelse)
codegenExpr _ = error "codegenExpr called with unexpected Expr"
-------------------------------------------------------------------------------
-- Composite Types
-------------------------------------------------------------------------------
emptyStructType :: AST.Type
emptyStructType = AST.StructureType True []
structType :: [AST.Type] -> AST.Type
structType tys = AST.StructureType True tys
envStructType :: [SymName] -> AST.Type
envStructType freeVars =
AST.StructureType True . argsTypeList $ length freeVars
-------------------------------------------------------------------------------
-- Operations
-------------------------------------------------------------------------------
memalignRaw :: Int -> Codegen AST.Operand
memalignRaw sizeInBytes =
call (funcOpr uint (AST.Name "memalign") $ replicate 2 $ AST.IntegerType 64)
$ map (constUintSize 64) [1, sizeInBytes]
memalign :: Int -> Codegen AST.Operand
memalign sizeInWords = memalignRaw $ sizeInWords * uintSizeBytes
comp :: IP.IntegerPredicate -> AST.Operand -> AST.Operand -> Codegen AST.Operand
comp ip a b = do
res <- zext uint =<< icmp ip a b
resShifted <- shl res . constUint $ shiftWidthOfFormat boolFormat
CG.or resShifted . constUint $ formatMasked boolFormat
binArithOp :: (AST.Operand -> AST.Operand -> Codegen AST.Operand)
-> AST.Operand -> AST.Operand -> Codegen AST.Operand
binArithOp op a b = do
a' <- shr a . constUint $ shiftWidthOfFormat fixnumFormat
b' <- shr b . constUint $ shiftWidthOfFormat fixnumFormat
res <- op a' b'
resShifted <- shl res . constUint $ shiftWidthOfFormat fixnumFormat
CG.or resShifted . constUint $ formatMasked fixnumFormat
asIRbinOp :: BinOp -> AST.Operand -> AST.Operand -> Codegen AST.Operand
asIRbinOp Add = binArithOp iadd
asIRbinOp Sub = binArithOp isub
asIRbinOp Mul = binArithOp imul
asIRbinOp Div = binArithOp idiv
asIRbinOp Lt = comp IP.ULT
asIRbinOp Lte = comp IP.ULE
asIRbinOp Gt = comp IP.UGT
asIRbinOp Gte = comp IP.UGE
asIRbinOp Eq = comp IP.EQ
-------------------------------------------------------------------------------
-- Linking LLVM modules
-------------------------------------------------------------------------------
linkModule :: File -> AST.Module -> ErrorT String IO AST.Module
linkModule fp modlAST = ErrorT $ withContext $ \context -> do
result <- runExceptT . withModuleFromLLVMAssembly context fp $
\modToLink -> (join <$>) . runExceptT . withModuleFromAST context modlAST $
\modl -> runExceptT $ do
linkModules False modl modToLink
liftIO $ moduleAST modl
return $ either (Left . show) id result
-------------------------------------------------------------------------------
-- Compilation to machine code
-------------------------------------------------------------------------------
writeTargetCode :: CrispComputation ()
writeTargetCode = do
CompilerOptions{..} <- ask
let objfn = optInputFilePath ++ ".o"
writeObjFile objfn
liftIO $ callProcess "gcc" ["-lm", objfn, "-o", optOutputFilePath]
where
writeObjFile objfn = do
astMod <- gets astModule
liftErrorT . ErrorT $
withContext $ \context ->
fmap join . runExceptT . withModuleFromAST context astMod $ \mod ->
fmap join . runExceptT . withDefaultTargetMachine $ \target ->
runExceptT $ writeObjectToFile target (File objfn) mod
-------------------------------------------------------------------------------
-- Initial AST module
-------------------------------------------------------------------------------
-- | The initial module comes with external declarations, and is linked
-- with primitive functions and constants defined in C.
-- If it is used for compilation
-- (and not for the initial module of a REPL session) the C driver is
-- linked as well.
initModule :: Bool -> String -> CrispComputation ()
initModule linkDriver label = do
initialASTmod <- getLinkedASTmod
put $ CompilerState initialASTmod []
where
initialASTmod = runLLVM
(AST.defaultModule { AST.moduleName = label })
codegenExterns
getLinkedASTmod = liftErrorT $ do
dataDir <- liftIO getDataDir
let preCompModDir = dataDir </> "precompiled-modules"
primModFilePath = File $ preCompModDir </> "primitives.ll"
driverModFilePath = File $ preCompModDir </> "driver.ll"
constsModFilePath = File $ preCompModDir </> "constants.ll"
linkModule primModFilePath
>=> linkModule constsModFilePath
>=> (if linkDriver then linkModule driverModFilePath else return)
$ initialASTmod
-------------------------------------------------------------------------------
-- Helper functions for emitting
-------------------------------------------------------------------------------
argsTypeList :: Int -> [AST.Type]
argsTypeList n = replicate n uint
setElemPtr :: AST.Operand -> Int -> AST.Operand -> Codegen AST.Operand
setElemPtr struct ix item =
getelementptr struct ix >>= flip store item
| talw/crisp-compiler | src/Emit.hs | bsd-3-clause | 15,099 | 0 | 19 | 2,822 | 4,341 | 2,164 | 2,177 | 323 | 2 |
{-# LANGUAGE ParallelListComp, TupleSections, MultiWayIf, ViewPatterns,
LambdaCase, BangPatterns, CPP #-}
-- |
-- Module : Language.SequentCore.Translate
-- Description : Core \<-\> Sequent Core
-- Maintainer : maurerl@cs.uoregon.edu
-- Stability : experimental
--
-- Translation between Sequent Core and native GHC Core.
module Language.SequentCore.Translate (
-- $txn
fromCoreModule, termFromCoreExpr, joinFromCoreExprByKontType,
bindsToCore,
commandToCoreExpr, termToCoreExpr, joinToCoreExpr, joinIdToCore,
CoreContext, kontToCoreExpr,
onCoreExpr, onSequentCoreTerm
) where
import Language.SequentCore.Syntax
import Language.SequentCore.WiredIn
import BasicTypes ( Arity, RecFlag(..), TopLevelFlag(..), TupleSort(..)
, isNonRec, isNotTopLevel )
import CoreSubst
import CoreSyn ( Unfolding(..), UnfoldingGuidance(..) )
import CoreUnfold
import qualified CoreSyn as Core
import qualified CoreUtils as Core
import qualified CoreFVs as Core
import FastString
import Id
import IdInfo
import Maybes
import qualified MkCore as Core
import MkId
import Outputable hiding ( (<>) )
import Type hiding ( substTy )
import TysPrim
import TysWiredIn
import UniqFM ( intersectUFM_C )
import Unique
import Util ( count )
import VarEnv
import VarSet
import Control.Applicative
import Control.Exception ( assert )
import Control.Monad
import Control.Monad.Fix
import Data.List
import Data.Monoid
-- $txn
-- The translations to and from Sequent Core are /not/ guaranteed to be perfect
-- inverses. However, any differences between @e@ and @commandToCoreExpr
-- (fromCoreExpr e)@ should be operationally insignificant, such as a @let@
-- floating out from a function being applied. A more precise characterization
-- of the indended invariants of these functions would entail some sort of
-- /bisimulation/, but it should suffice to know that the translations are
-- "faithful enough."
------------------------------------------------
-- Public interface for Core --> Sequent Core --
------------------------------------------------
-- | Translates a list of Core bindings into Sequent Core.
fromCoreModule :: [Core.CoreBind] -> [SeqCoreBind]
fromCoreModule = fromCoreBinds . escAnalProgram
-- | Translates a single Core expression as a Sequent Core term.
termFromCoreExpr :: Core.CoreExpr -> SeqCoreTerm
termFromCoreExpr expr
= fromCoreExprAsTerm env markedExpr
where
markedExpr = runEscM (escAnalExpr expr)
env = initFromCoreEnvForExpr expr
-- | Translates a single Core expression as a Sequent Core parameterized
-- continuation, given the continuation type it should translate to. Used in
-- translating unfoldings on JoinIds, since we have the Sequent Core type
-- already in that case.
joinFromCoreExprByKontType :: Type -> Core.CoreExpr -> SeqCoreJoin
joinFromCoreExprByKontType ty expr
= fromCoreExprAsJoin env ([], Return) (argDescsForKontTy ty) markedExpr
where
markedExpr = runEscM (escAnalExpr expr)
env = initFromCoreEnvForExpr expr
---------------------------------------
-- Phase 1: Escape-analyse Core code --
---------------------------------------
{-
Note [Escape analysis]
~~~~~~~~~~~~~~~~~~~~~~
The purpose of the escape analysis is to work out which let-bound functions we
can translate as parameterized continuations rather than as functions. To do
this, we gather information on all the identifier's occurrences, namely:
Does every occurrence of f appear in a non-escaping context?
To be in a non-escaping context, the occurrence of f must be a tail call in the
context that declared it - that is, not inside a lambda, an argument, a cast
(see Note [Calls inside casts]), etc.
We perform the escape analysis by passing a Var -> Bool mapping bottom-up. Any
candidate for contification (that is, any let-bound variable) that appears in an
expression will appear in the returned mapping. If f appears only in
non-escaping contexts (that is, does not escape), it maps to True; if it appears
at least once in an escaping context, it maps to False. When combining mappings,
say when analysing the branches of a case, we union them together, performing an
AND on any two variables that appear in both mappings. Then, whenever the
traversal returns from an escaping context, such as a lambda or an argument
position, we take the whole mapping and set every value to False, since every
variable that appears in such a context escapes.
(In practice, we save some time by keeping two sets rather than one mapping--one
records all variables seen, and the other records the subset of those that
escape. Rather than set every value to False, then, we just set the escapee set
equal to the occurrence set.)
The result of the escape analysis is an annotated version of the code where each
binder is marked according to whether it should be contified and, if so, what
its total arity is (that is, arity counting both type and value binders).
Note [Calls inside casts]
~~~~~~~~~~~~~~~~~~~~~~~~~
If we try to contify a function that's invoked inside a cast, the resulting
program will be ill-typed. From the perspective of (Dual) System FC's
operational semantics, this is unsurprising because a cast is an operation and
a tail call is definitionally the final operation a function performs. However,
the cast is a fiction; all casts (and types) are erased on translation to STG.
Hence CoreToStg's escape analysis is able to contify (or let-no-escape) more
functions than ours. It's unclear what the workaround might be, though it's also
unclear how often this is a problem in practice.
-}
-- Bottom-up data --
data EscapeAnalysis
= EA { ea_nonEsc :: IdEnv CallInfo
, ea_allVars :: IdSet }
data CallInfo
= CI { ci_arity :: TotalArity -- Counts *all* arguments, including types
, ci_args :: Call -- Invariant: Length is ci_arity
, ci_scope :: ScopeType } -- Recursive call?
type TotalArity = Arity -- Counts *all* arguments, including types
type Call = [Core.CoreExpr]
data Occs = Esc | NonEsc CallInfo
data ScopeType = Inside | Outside -- In recursive RHS or not?
emptyEscapeAnalysis :: EscapeAnalysis
emptyEscapeAnalysis = EA { ea_nonEsc = emptyVarEnv
, ea_allVars = emptyVarSet }
unitCall :: Id -> Call -> ScopeType -> EscapeAnalysis
unitCall x call scope = EA { ea_nonEsc = unitVarEnv x (CI { ci_arity = length call
, ci_args = call
, ci_scope = scope })
, ea_allVars = unitVarSet x }
markAllAsEscaping :: EscapeAnalysis -> EscapeAnalysis
markAllAsEscaping ea = ea { ea_nonEsc = emptyVarEnv }
-- XXX This implementation is probably slower than is possible using something
-- like Data.IntMap.mergeWithKey.
intersectWithMaybeVarEnv :: (elt1 -> elt2 -> Maybe elt3)
-> VarEnv elt1 -> VarEnv elt2 -> VarEnv elt3
intersectWithMaybeVarEnv f env1 env2
= mapVarEnv fromJust $ filterVarEnv isJust $ intersectUFM_C f env1 env2
combineEscapeAnalyses :: EscapeAnalysis -> EscapeAnalysis -> EscapeAnalysis
combineEscapeAnalyses ea1 ea2
| isEmptyVarEnv (ea_allVars ea1) = ea2
| isEmptyVarEnv (ea_allVars ea2) = ea1
| otherwise = EA { ea_allVars = ea_allVars ea1 `unionVarSet` ea_allVars ea2
, ea_nonEsc = onlyIn1 `plusVarEnv` onlyIn2
`plusVarEnv` nonEscBoth }
where
-- There are three ways a variable makes it into the non-escaping set for
-- the combined analysis:
-- 1. It appears in the left non-escaping set and not at all on the right
-- 2. It appears in the right non-escaping set and not at all on the left
-- 3. It appears in both non-escaping sets with the same arity
onlyIn1 = ea_nonEsc ea1 `minusVarEnv` ea_allVars ea2
onlyIn2 = ea_nonEsc ea2 `minusVarEnv` ea_allVars ea1
nonEscBoth = intersectWithMaybeVarEnv combine (ea_nonEsc ea1) (ea_nonEsc ea2)
-- Only need to keep one call made to each function
-- Prefer non-recursive calls (see Note [Determining fixed type values])
combine ci1 ci2 | ci_arity ci1 /= ci_arity ci2 = Nothing
| Inside <- ci_scope ci1 = Just ci2
| otherwise = Just ci1
forgetVars :: EscapeAnalysis -> [Id] -> EscapeAnalysis
forgetVars (EA { ea_nonEsc = nonEsc, ea_allVars = allVars }) xs
= EA { ea_nonEsc = nonEsc `delVarEnvList` xs
, ea_allVars = allVars `delVarSetList` xs }
occurrences :: EscapeAnalysis -> Id -> Maybe Occs
occurrences ea x
| Just ci <- lookupVarEnv (ea_nonEsc ea) x = Just (NonEsc ci)
| x `elemVarEnv` ea_allVars ea = Just Esc
| otherwise = Nothing
-- If none of the variables escape, return the list of variables that occur
-- along with their apparent arities and call lists
allOccurrences :: EscapeAnalysis -> [Id] -> Maybe [(Id, Maybe CallInfo)]
allOccurrences _ [] = Just []
allOccurrences ea (x : xs) = case occurrences ea x of
Just (NonEsc ci) -> ((x, Just ci) :) <$>
allOccurrences ea xs
Just Esc -> Nothing
Nothing -> ((x, Nothing) :) <$>
allOccurrences ea xs
instance Monoid EscapeAnalysis where
mempty = emptyEscapeAnalysis
mappend = combineEscapeAnalyses
-- Top-down data --
type CandidateEnv = IdEnv ScopeType
emptyCandidateEnv :: CandidateEnv
emptyCandidateEnv = emptyVarEnv
addCandidate :: CandidateEnv -> Id -> ScopeType -> CandidateEnv
addCandidate = extendVarEnv
addCandidates :: CandidateEnv -> [Id] -> ScopeType -> CandidateEnv
addCandidates env ids sc = extendVarEnvList env [ (id, sc) | id <- ids ]
candidateScope :: CandidateEnv -> Id -> Maybe ScopeType
candidateScope = lookupVarEnv
-- Monad --
-- | The monad underlying the escape analysis.
newtype EscM a = EscM { unEscM :: CandidateEnv -> (EscapeAnalysis, a) }
instance Monad EscM where
return x = EscM $ \_ -> (emptyEscapeAnalysis, x)
m >>= k = EscM $ \env -> let (escs1, x) = unEscM m env
(escs2, y) = unEscM (k x) env
escs = escs1 <> escs2
in (escs, y)
instance Functor EscM where fmap = liftM
instance Applicative EscM where { pure = return; (<*>) = ap }
instance MonadFix EscM where
mfix f = EscM $ \env -> let pair@(_, ans) = unEscM (f ans) env
in pair
runEscM :: EscM a -> a
runEscM m = snd $ unEscM m emptyCandidateEnv
-- Monad operations --
getCandidates :: EscM CandidateEnv
getCandidates = EscM $ \env -> (emptyEscapeAnalysis, env)
alteringEnv :: (CandidateEnv -> CandidateEnv) -> EscM a -> EscM a
alteringEnv f m = EscM $ \env -> unEscM m (f env)
withEnv :: CandidateEnv -> EscM a -> EscM a
withEnv env m = EscM $ \_ -> unEscM m env
withoutCandidate :: Id -> EscM a -> EscM a
withoutCandidate x = alteringEnv (`delVarEnv` x)
withoutCandidates :: [Id] -> EscM a -> EscM a
withoutCandidates xs = alteringEnv (`delVarEnvList` xs)
reportCall :: Id -> Call -> ScopeType -> EscM ()
reportCall x call scope = --pprTrace "reportCall" (ppr x <+> ppr call <+> ppr scope)
writeAnalysis (unitCall x call scope)
captureAnalysis, readAnalysis :: EscM a -> EscM (EscapeAnalysis, a)
captureAnalysis m = EscM $ \env -> let (escs, ans) = unEscM m env
in (emptyEscapeAnalysis, (escs, ans))
readAnalysis m = EscM $ \env -> let (escs, ans) = unEscM m env
in (escs, (escs, ans))
writeAnalysis :: EscapeAnalysis -> EscM ()
writeAnalysis escs = EscM $ \_ -> (escs, ())
filterAnalysis :: (EscapeAnalysis -> EscapeAnalysis) -> EscM a -> EscM a
filterAnalysis f m = EscM $ \env -> let (escs, ans) = unEscM m env
in (f escs, ans)
-- Result: Marked binders --
{-
Note [Fixing type arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are contifying the polymorphic function:
k :: forall a b. Bool -> a -> b -> [b]
Since we're contifying it, it is always tail-called from a particular context,
and that context expects a result of type [T] for some particular T. Thus we
cannot allow b to vary in the contified version of k: It must *always* return
[T] (and its final argument must be a T). Hence we must eliminate the type
parameter b and substitute T for b in the type and body of k. Supposing T is Int,
the contified k looks like
k :: Cont# (exists a. (Bool, a, Int))
(type simplified for clarity). Note that since a doesn't appear in the original
function's return type, it is free to vary, and we construct the existential as
usual. This is important for case analyses on existential types, which produce
polymorphic join points.
Note [Determining fixed type values]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The above discussion glossed over a detail: How did we determine T to be the
constant value of b? It is true that k must always be invoked with the same
value of b *but* recursive calls will pass on the constant value, so looking
at them is unhelpful.
For instance:
let rec { replicate :: forall a. Int -> a -> [a] -> [a]
replicate @a n x xs =
case n <= 0 of True -> xs
False -> rep @a (n-1) (x:xs) }
in case q of True -> rep @Char 4 'a' []
False -> rep @Char 3 'b' []
The rep function is always tail-called with Char as the type argument, but in
the recursive call this is not apparent from syntax alone: The recursive call
passes a, not Char. Thus we need to differentiate between recursive calls and
"outside" calls and we need to look at an outside call to determine a. If there
are no outside calls, we would need either abstract interpretation or
unification to find the correct type, so we punt and give up on contifying.
(One is tempted to detect when a recursive call passes a tyvar that's equal to
the corresponding binder. This could solve the above case - we would know not
to use a because a is itself the binder. However, this does not cover mutual
recursion or other cases where a is passed indirectly just as the actual type
is.)
-}
data KontOrFunc = MakeKont [ArgDesc] | MakeFunc
data ArgDesc = FixedType Type | FixedVoidArg | TyArg TyVar | ValArg Type
data MarkedVar = Marked Var KontOrFunc
unmark :: MarkedVar -> Var
unmark (Marked var _) = var
instance HasId MarkedVar where identifier = unmark
-- | Decide whether a variable should be contified, returning the marked
-- variable and a flag (True if contifying).
markVar :: Id -> CallInfo -> (MarkedVar, Bool)
markVar x ci
= case mkArgDescs x (idType x) ci of
Just descs -> (Marked x (MakeKont descs), True)
Nothing -> (Marked x MakeFunc, False)
-- | Decide whether a group of mutually recursive variables should be contified,
-- returning the marked variables and a flag. Either all of the variables will
-- be contified (in which case the flag is True) or none of them will.
markVars :: [Id] -> [CallInfo] -> ([MarkedVar], Bool)
markVars xs cis
= case zipWithM (\x ci -> mkArgDescs x (idType x) ci) xs cis of
Just descss -> ([ Marked x (MakeKont descs) | x <- xs | descs <- descss ], True)
Nothing -> ([ Marked x MakeFunc | x <- xs ] , False)
-- | Return a constant value for each argument that needs one, given the type
-- and total arity of a function to be contified and a call made to it. Any
-- type parameters binding variables appearing in the return type must be made
-- constant, since the contified function will return to a fixed continuation in
-- which those parameters are not bound. (See Note [Determining fixed type
-- values].)
--
-- Returns Nothing if a type parameter needs to be fixed but the scope of the
-- given call is Inside, meaning only recursive calls were made to the function.
-- In this case, we give up on contification. (TODO: A more sophisticated
-- analysis could still find the correct type to use.)
--
-- We also don't contify if the id has rules; this is uncommon, but it does
-- happen (often due to SpecConstr), and we don't want to stop rules from firing.
--
-- It's possible the total arity is greater than the number of arrows and foralls
-- in the type, but only if the return type of the function is a type variable
-- bound in an outer scope. This is fine, because the extra arguments cannot
-- change the actual return type, so we don't need to fix (mask out) the extra
-- arguments. TODO Be more sure about this.
mkArgDescs :: Var -> Type -> CallInfo -> Maybe [ArgDesc]
mkArgDescs x _ _
| idHasRules x = Nothing -- unlikely but possible, and contification
-- would likely get in the way of rule firings
mkArgDescs x ty (CI { ci_arity = arity, ci_args = call, ci_scope = scope })
= go ty call
where
(_tyVars, retTy) = splitPiTyN ty arity
freeInRetTy = tyVarsOfType retTy
go ty (Core.Type tyArg : call)
| tyVar `elemVarSet` freeInRetTy
= case scope of
Outside ->
-- Start over with new return type
(FixedType tyArg :) <$> mkArgDescs x (substTyWith [tyVar] [tyArg] bodyTy)
(CI { ci_arity = length call
, ci_args = call
, ci_scope = scope })
Inside -> Nothing
| otherwise
= (TyArg tyVar :) <$> go bodyTy call
where
(tyVar, bodyTy) = splitForAllTy_maybe ty `orElse`
pprPanic "mkArgDescs" (ppr ty <+> ppr tyArg)
go ty (arg : call)
| argTy `eqType` voidPrimTy
= (FixedVoidArg :) <$> go retTy call
| otherwise
= (ValArg argTy :) <$> go retTy call
where
(argTy, retTy) = splitFunTy_maybe ty `orElse`
pprPanic "mkArgDescs" (ppr ty <+> ppr arg)
go _ [] = Just []
argDescsForKontTy :: Type -> [ArgDesc]
argDescsForKontTy kontTy
| Just ty <- isKontTy_maybe kontTy
= go ty []
| otherwise
= pprPanic "argDescsForKontTy" (ppr kontTy)
where
go ty acc | Just (tyVar, retTy) <- isUbxExistsTy_maybe ty
= go retTy (TyArg tyVar : acc)
| isUnboxedTupleType ty
, Just (_, tyArgs) <- splitTyConApp_maybe ty
= goTuple tyArgs acc
| otherwise
= pprPanic "argDescsForKontTy" (ppr kontTy)
goTuple [] acc = done (FixedVoidArg : acc)
goTuple [ty] acc | Just (tyVar, retTy) <- isUbxExistsTy_maybe ty
= go retTy (TyArg tyVar : acc)
| otherwise
= done (ValArg ty : acc)
goTuple (ty:tys) acc = goTuple tys (ValArg ty : acc)
done acc = reverse acc
splitPiTyN :: Type -> TotalArity -> ([Maybe TyVar], Type)
splitPiTyN ty n
| Just (tyVar, ty') <- splitForAllTy_maybe ty
= let (args, retTy) = splitPiTyN ty' (n-1)
in (Just tyVar : args, retTy)
| Just (_, ty') <- splitFunTy_maybe ty
= let (args, retTy) = splitPiTyN ty' (n-1)
in (Nothing : args, retTy)
| otherwise
= ([], ty)
-- Escape analysis --
escAnalProgram :: Core.CoreProgram -> [Core.Bind MarkedVar]
escAnalProgram binds = runEscM (go binds)
where
go :: [Core.CoreBind] -> EscM [Core.Bind MarkedVar]
go (bind:binds)
= do
(_escs, bind', binds') <- mfix $ \ ~(rec_escs_body, _, _) -> do
(env', bind') <- escAnalBind TopLevel bind rec_escs_body
(escs_body, binds') <- readAnalysis $ withEnv env' $ go binds
return (escs_body, bind', binds')
return (bind':binds')
go [] = return []
escAnalBind :: TopLevelFlag -> Core.CoreBind -> EscapeAnalysis
-> EscM (CandidateEnv, Core.Bind MarkedVar)
escAnalBind lvl (Core.NonRec bndr rhs) escs_body
= do
(escs_rhs, (rhs', lamCount)) <-
captureAnalysis $ escAnalId bndr >> escAnalRhs rhs
-- escAnalId looks at rules and unfoldings, which act as alternate RHSes
let (marked, kontifying, unsat)
| isNotTopLevel lvl
, Just (NonEsc ci) <- occurrences escs_body bndr
= let (marked, kontifying) = markVar bndr ci
in (marked, kontifying, ci_arity ci < lamCount)
| otherwise
= (Marked bndr MakeFunc, False, False)
escs_rhs' | not kontifying || unsat = markAllAsEscaping escs_rhs
| otherwise = escs_rhs
writeAnalysis escs_rhs'
env <- getCandidates
return (addCandidate env bndr Outside, Core.NonRec marked rhs')
escAnalBind lvl (Core.Rec pairs) escs_body
= do
env <- getCandidates
let bndrs = map fst pairs
env_rhs = addCandidates env bndrs Inside
(unzip -> (escs_rhss, unzip -> (rhss', lamCounts)))
<- withEnv env_rhs $ forM pairs $ \(bndr, rhs) ->
captureAnalysis $ escAnalId bndr >> escAnalRhs rhs
let escs = mconcat escs_rhss <> escs_body
(pairs', kontifying, unsats)
| isNotTopLevel lvl
, Just occsList <- allOccurrences escs bndrs
= let (bndrs_live, cis, rhss'_live, lamCounts_live)
= unzip4 [ (bndr, ci, rhs', lamCount)
| ((bndr, Just ci), rhs', lamCount) <-
zip3 occsList rhss' lamCounts ]
(bndrs_marked, kontifying) = markVars bndrs_live cis
isUnsat ci lamCount = ci_arity ci < lamCount
in ( zip bndrs_marked rhss'_live, kontifying
, zipWith isUnsat cis lamCounts_live )
| otherwise
= ([ (Marked bndr MakeFunc, rhs') | (bndr, rhs') <- zip bndrs rhss' ], False, repeat False)
escs_rhss' | not kontifying = map markAllAsEscaping escs_rhss
| otherwise = [ if unsat then markAllAsEscaping escs else escs
| escs <- escs_rhss
| unsat <- unsats ]
writeAnalysis (mconcat escs_rhss' `forgetVars` bndrs)
let env_body = addCandidates env bndrs Outside
return (env_body, Core.Rec pairs')
-- | Analyse an expression, but don't let its top-level lambdas cause calls to
-- escape. Returns the number of lambdas ignored; if the function is partially
-- invoked, the calls escape after all.
escAnalRhs :: Core.CoreExpr -> EscM (Core.Expr MarkedVar, Int)
escAnalRhs expr
= do
let (bndrs, body) = Core.collectBinders expr
body' <- withoutCandidates bndrs $ escAnalExpr body
return $ ( Core.mkLams [ Marked bndr MakeFunc | bndr <- bndrs ] body'
, length bndrs )
escAnalExpr :: Core.CoreExpr -> EscM (Core.Expr MarkedVar)
escAnalExpr (Core.Var x)
= escAnalApp x []
escAnalExpr (Core.Lit lit)
= return $ Core.Lit lit
escAnalExpr expr@(Core.App {})
= let (func, args) = Core.collectArgs expr
in case func of
Core.Var fid -> escAnalApp fid args
_ -> filterAnalysis markAllAsEscaping $ do
func' <- escAnalExpr func
args' <- mapM escAnalExpr args
return $ Core.mkApps func' args'
escAnalExpr expr@(Core.Lam {})
= do
let (tyBndrs, valBndrs, body) = Core.collectTyAndValBinders expr
-- Remove value binders from the environment in case of shadowing - we
-- won't report them as free vars
body' <- withoutCandidates valBndrs $
-- Lambdas ruin contification, so the free vars escape
filterAnalysis markAllAsEscaping $
escAnalExpr body
let bndrs' = [ Marked bndr MakeFunc | bndr <- tyBndrs ++ valBndrs ]
return $ Core.mkLams bndrs' body'
escAnalExpr (Core.Let bind body)
= do
(_escs, bind', body') <- mfix $ \ ~(rec_escs_body, _, _) -> do
(env', bind') <- escAnalBind NotTopLevel bind rec_escs_body
(escs_body, body') <- readAnalysis $ withEnv env' $ escAnalExpr body
return (escs_body, bind', body')
return $ Core.Let bind' body'
escAnalExpr (Core.Case scrut bndr ty alts)
= do
scrut' <- filterAnalysis markAllAsEscaping $ escAnalExpr scrut
alts' <- withoutCandidate bndr $ forM alts $ \(con, bndrs, rhs) -> do
rhs' <- withoutCandidates bndrs $ escAnalExpr rhs
return (con, map (`Marked` MakeFunc) bndrs, rhs')
return $ Core.Case scrut' (Marked bndr MakeFunc) ty alts'
escAnalExpr (Core.Cast expr co)
-- A call under a cast isn't a tail call, so pretend the free vars escape
= (`Core.Cast` co) <$> filterAnalysis markAllAsEscaping (escAnalExpr expr)
escAnalExpr (Core.Tick ti expr)
= Core.Tick ti <$> escAnalExpr expr
escAnalExpr (Core.Type ty)
= return $ Core.Type ty
escAnalExpr (Core.Coercion co)
= return $ Core.Coercion co
escAnalApp :: Id -> [Core.CoreExpr] -> EscM (Core.Expr MarkedVar)
escAnalApp fid args
= do
env <- getCandidates
let candidacy = candidateScope env fid
whenIsJust candidacy $ \scope -> reportCall fid args scope
args' <- filterAnalysis markAllAsEscaping $ mapM escAnalExpr args
return $ Core.mkApps (Core.Var fid) args'
-- Analyse unfolding and rules
escAnalId :: Id -> EscM ()
escAnalId x
| isId x
= do
mapM_ escAnalRule (idCoreRules x)
escAnalUnfolding (idUnfolding x)
| otherwise
= return ()
escAnalRule :: Core.CoreRule -> EscM ()
escAnalRule (Core.Rule { Core.ru_bndrs = bndrs, Core.ru_rhs = rhs })
= void $ withoutCandidates bndrs $ escAnalExpr rhs
escAnalRule _
= return ()
escAnalUnfolding :: Core.Unfolding -> EscM ()
escAnalUnfolding (Core.CoreUnfolding { Core.uf_tmpl = rhs }) = void $ escAnalExpr rhs
escAnalUnfolding (Core.DFunUnfolding { Core.df_args = args }) = mapM_ escAnalExpr args
escAnalUnfolding _ = return ()
----------------------------------------
-- Phase 2: Translate to Sequent Core --
----------------------------------------
-- Continuation calling conventions --
-- | The protocol for invoking a given let-bound continuation. Currently all
-- such continuations must be invoked using a jump, so @ByJump@ is the only
-- constructor, but we must still keep track of which arguments are fixed and
-- should be omitted when converting a function call.
newtype KontCallConv = ByJump [ArgDesc]
-- Auxiliary datatype for idToKontId
data KontType = KTExists TyVar KontType | KTTuple [KontType] | KTType Type
-- | Convert an id to the id of a parameterized continuation, changing its type
-- according to the given calling convention.
idToJoinId :: Id -> KontCallConv -> JoinId
idToJoinId p conv@(ByJump descs)
= p `setIdType` kontTypeToType (go (idType p) descs)
`setIdInfo` (idInfo p `setArityInfo` valArgCount)
`tweakUnfolding` conv
where
valArgCount = count (\case { ValArg {} -> True; _ -> False }) descs
go _ [] = KTTuple []
go ty (FixedType tyArg : descs')
| Just (tyVar, ty') <- splitForAllTy_maybe ty
= go (substTyWith [tyVar] [tyArg] ty') descs'
go ty (FixedVoidArg : descs')
| Just (argTy, retTy) <- splitFunTy_maybe ty
= assert (argTy `eqType` voidPrimTy) $
go retTy descs'
go ty (TyArg tyVar : descs')
| Just (tyVar', ty') <- splitForAllTy_maybe ty
= assert (tyVar == tyVar') $
KTExists tyVar (go ty' descs')
go ty (ValArg argTy : descs')
| Just (argTy', retTy) <- splitFunTy_maybe ty
= assert (argTy `eqType` argTy') $
argTy `consKT` go retTy descs'
go _ _
= pprPanic "idToJoinId" (pprBndr LetBind p $$ ppr descs)
kontTypeToType :: KontType -> Type
kontTypeToType = mkKontTy . go
where
go (KTExists bndr kty) = mkUbxExistsTy bndr (go kty)
go (KTTuple ktys) = mkTupleTy UnboxedTuple (map go ktys)
go (KTType ty) = ty
consKT :: Type -> KontType -> KontType
ty `consKT` kty@KTExists {} = KTTuple [KTType ty, kty]
ty `consKT` (KTTuple ktys) = KTTuple (KTType ty : ktys)
ty `consKT` (KTType ty2) = KTTuple [KTType ty, KTType ty2]
-- | Remove from a list the elements corresponding to fixed arguments according
-- to the given calling convention.
removeFixedArgs :: [a] -> KontCallConv -> [a]
removeFixedArgs args (ByJump descs)
= [ arg | (arg, desc) <- zip args descs, keep desc ]
where
keep (FixedType _) = False
keep FixedVoidArg = False
keep _ = True
-- | Alter an id's unfolding according to the given calling convention.
tweakUnfolding :: Id -> KontCallConv -> Id
tweakUnfolding id (ByJump descs)
= case unf of
Core.CoreUnfolding {} ->
let expr = uf_tmpl unf
env = initFromCoreEnvForExpr expr
(env', bndrs, body) = etaExpandForJoinBody env descs expr
bndrs' | noValArgs = bndrs ++ [voidPrimId]
| otherwise = bndrs
expr' = substExpr (text "tweakUnfolding") (fce_subst env') (Core.mkLams bndrs' body)
arity' = valArgCount `min` 1
in id `setIdUnfolding`
mkCoreUnfolding (uf_src unf) (uf_is_top unf) (simpleOptExpr expr')
arity' (fixGuid (uf_guidance unf))
_ -> id
where
unf = realIdUnfolding id
isValArgDesc (ValArg {}) = True
isValArgDesc _ = False
valArgCount = count isValArgDesc descs
noValArgs = valArgCount == 0
fixGuid guid@(UnfIfGoodArgs { ug_args = args })
| noValArgs
= guid { ug_args = [0] } -- We keep a single Void# lambda in the unfolding
| otherwise
= guid { ug_args = fixArgs args descs }
fixGuid guid = guid
fixArgs [] [] = []
fixArgs [] (ValArg _ : _)
= warnPprTrace True __FILE__ __LINE__
(text "Out of value discounts" $$
text "Unfolding:" <+> ppr unf $$
text "Arg descs:" <+> ppr descs)
[]
fixArgs args []
= warnPprTrace True __FILE__ __LINE__
(text "Leftover arg discounts:" <+> ppr args $$
text "Unfolding:" <+> ppr unf $$
text "Arg descs:" <+> ppr descs)
[]
fixArgs (arg:args) (ValArg _ : descs)
= arg : fixArgs args descs
fixArgs (_:args) (FixedVoidArg : descs)
= fixArgs args descs
fixArgs args (_ : descs) -- Type argument (fixed or variable)
= fixArgs args descs
-- Environment for Core -> Sequent Core translation --
data FromCoreEnv
= FCE { fce_subst :: Subst
, fce_boundKonts :: IdEnv KontCallConv }
initFromCoreEnv :: FromCoreEnv
initFromCoreEnv = FCE { fce_subst = emptySubst
, fce_boundKonts = emptyVarEnv }
initFromCoreEnvForExpr :: Core.CoreExpr -> FromCoreEnv
initFromCoreEnvForExpr expr = initFromCoreEnv { fce_subst = freeVarSet }
where
freeVarSet = mkSubst (mkInScopeSet (Core.exprFreeVars expr))
emptyVarEnv emptyVarEnv emptyVarEnv
bindAsJoin :: FromCoreEnv -> JoinId -> KontCallConv -> FromCoreEnv
bindAsJoin env p conv
= env { fce_boundKonts = extendVarEnv (fce_boundKonts env) p conv }
bindAsJoins :: FromCoreEnv -> [(JoinId, KontCallConv)] -> FromCoreEnv
bindAsJoins env ps = foldr (\(p, conv) env' -> bindAsJoin env' p conv) env ps
kontCallConv :: FromCoreEnv -> Var -> Maybe KontCallConv
kontCallConv env var = lookupVarEnv (fce_boundKonts env) var
fromCoreExpr :: FromCoreEnv -> Core.Expr MarkedVar -> SeqCoreKont
-> SeqCoreCommand
fromCoreExpr env expr (fs, end) = go [] env expr fs end
where
go :: [SeqCoreBind] -> FromCoreEnv -> Core.Expr MarkedVar
-> [SeqCoreFrame] -> SeqCoreEnd -> SeqCoreCommand
go binds env expr fs end = case expr of
Core.Var x -> goApp x []
Core.Lit l -> done $ Lit l
Core.App {} | (Core.Var x, args) <- Core.collectArgs expr
-> goApp x args
Core.App e1 e2 ->
let e2' = fromCoreExprAsTerm env e2
in go binds env e1 (App e2' : fs) end
Core.Lam x e -> done $ fromCoreLams env x e
Core.Let bs e ->
let (env', bs') = fromCoreBind env (Just (fs, end)) bs
in go (bs' : binds) env' e fs end
Core.Case e (Marked x _) _ as
-- If the continuation is just a return, copy it into the branches
| null fs, Return {} <- end -> go binds env e [] end'
-- Otherwise be more careful. In the simplifier, we get clever and
-- split the continuation into a duplicable part and a non-duplicable
-- part (see splitDupableKont); for now just share the whole thing.
| otherwise ->
let join_arg = mkKontArgId (idType x')
join_rhs = Join [join_arg] (Eval (Var join_arg) [] end')
join_ty = mkKontTy (mkTupleTy UnboxedTuple [idType x'])
join_bndr = mkInlinableJoinBinder join_ty
join_bind = NonRec (BindJoin join_bndr join_rhs)
in go (join_bind : binds) env e [] (Case join_arg [Alt DEFAULT [] (Jump [Var join_arg] join_bndr)])
where
(subst_rhs, x') = substBndr subst x
env_rhs = env { fce_subst = subst_rhs }
end' = Case x' $ map (fromCoreAlt env_rhs (fs, end)) as
Core.Coercion co -> done $ Coercion (substCo subst co)
Core.Cast e co -> go binds env e (Cast (substCo subst co) : fs) end
Core.Tick ti e -> go binds env e (Tick (substTickish subst ti) : fs) end
Core.Type t -> done $ Type (substTy subst t)
where
subst = fce_subst env
done term = mkCommand (reverse binds) term fs end
goApp x args = case conv_maybe of
Just conv@(ByJump descs)
-> assert (length args == length descs) $
doneJump (removeFixedArgs args' conv) p
Nothing
-> doneEval (Var x') (map App args' ++ fs) end
where
x' = substIdOcc subst x
args' = map (\e -> fromCoreExprAsTerm env e) args
conv_maybe = kontCallConv env x'
p = let Just conv = conv_maybe in idToJoinId x' conv
doneEval v fs e = mkCommand (reverse binds) v fs e
doneJump vs j = foldr Let (Jump vs j) (reverse binds)
fromCoreLams :: FromCoreEnv -> MarkedVar -> Core.Expr MarkedVar
-> SeqCoreTerm
fromCoreLams env (Marked x _) expr
= mkLambdas xs' body'
where
(xs, body) = Core.collectBinders expr
bodyComm = fromCoreExpr env' body ([], Return)
body' = mkCompute ty bodyComm
(subst', xs') = substBndrs (fce_subst env) (x : map unmark xs)
env' = env { fce_subst = subst' }
ty = substTy subst' (Core.exprType (unmarkExpr body))
fromCoreExprAsTerm :: FromCoreEnv -> Core.Expr MarkedVar -> SeqCoreTerm
fromCoreExprAsTerm env expr
= mkCompute ty body
where
body = fromCoreExpr env expr ([], Return)
subst = fce_subst env
ty = substTy subst (Core.exprType (unmarkExpr expr))
fromCoreExprAsJoin :: FromCoreEnv -> SeqCoreKont -> [ArgDesc]
-> Core.Expr MarkedVar
-> SeqCoreJoin
fromCoreExprAsJoin env kont descs expr
= --pprTrace "fromCoreExprAsJoin" (ppr descs $$ ppr bndrs $$ ppr bndrs_final)
Join bndrs comm
where
-- Eta-expand the body *before* translating to Sequent Core so that the
-- parameterized continuation has all the arguments it should get
(env', bndrs, etaBody) = etaExpandForJoinBody env descs expr
comm = fromCoreExpr env' etaBody kont
etaExpandForJoinBody :: HasId b
=> FromCoreEnv -> [ArgDesc] -> Core.Expr b
-> (FromCoreEnv, [Var], Core.Expr b)
etaExpandForJoinBody env descs expr
= (env', bndrs_final, etaBody)
where
subst = fce_subst env
-- Calculate outer binders (existing ones from expr, minus fixed args)
(bndrs, body) = collectNBinders (length descs) expr
bndrs_unmarked = identifiers bndrs
(subst', bndr_maybes) = mapAccumL doBndr subst (zip bndrs_unmarked descs)
bndrs' = catMaybes bndr_maybes
-- Calculate eta-expanding binders and arguments
extraArgs = drop (length bndrs) descs -- will need to eta-expand with these
(subst'', unzip -> (etaBndr_maybes, etaArgs))
= mapAccumL mkEtaBndr subst' (zip [1..] extraArgs)
etaBndrs = catMaybes etaBndr_maybes
env' = env { fce_subst = subst'' }
bndrs_final = bndrs' ++ etaBndrs
etaBody | null extraArgs = body
| otherwise = Core.mkApps body etaArgs
-- Process a binder, possibly dropping it, and return a new subst
doBndr :: Subst -> (Var, ArgDesc) -> (Subst, Maybe Var)
doBndr subst (bndr, FixedType ty)
= (CoreSubst.extendTvSubst subst bndr (substTy subst ty), Nothing)
doBndr subst (bndr, FixedVoidArg)
-- Usually, a binder for a Void# is dead, but in case it's not, take the
-- argument to be void#. Note that, under the let/app invariant, any
-- argument of unlifted type must be ok-for-speculation, and any
-- ok-for-speculation expression of Void# is equal to void# (it can't be
-- _|_ or have side effects or possible errors and still be OFS; it could
-- still be case x +# y of z -> void#, but then we can eliminate the case).
-- So this is always correct.
= (extendSubstWithVar subst bndr voidPrimId, Nothing)
doBndr subst (bndr, TyArg tyVar)
= (subst'', Just bndr')
where
(subst', bndr') = substBndr subst bndr
-- Further ArgInfos may refer to tyVar, so we need to substitute to get
-- the right types for generated arguments (when eta-expanding).
subst'' = CoreSubst.extendTvSubst subst' tyVar (mkTyVarTy bndr')
doBndr subst (bndr, ValArg _)
= (subst', Just bndr')
where
(subst', bndr') = substBndr subst bndr
-- From an ArgDesc, generate an argument to apply and (possibly) a parameter
-- to the eta-expanded function
mkEtaBndr :: Subst -> (Int, ArgDesc) -> (Subst, (Maybe Var, Core.Expr b))
mkEtaBndr subst (_, FixedType ty)
= (subst, (Nothing, Core.Type (substTy subst ty)))
mkEtaBndr subst (_, FixedVoidArg)
= (subst, (Nothing, Core.Var voidPrimId))
mkEtaBndr subst (_, TyArg tyVar)
= (subst', (Just tv', Core.Type (mkTyVarTy tv')))
where
(subst', tv') = substBndr subst tyVar
mkEtaBndr subst (n, ValArg ty)
= (subst', (Just x, Core.Var x))
where
(subst', x) = freshEtaId n subst ty
collectNBinders :: TotalArity -> Core.Expr b -> ([b], Core.Expr b)
collectNBinders = go []
where
go acc 0 e = (reverse acc, e)
go acc n (Core.Lam x e) = go (x:acc) (n-1) e
go acc _ e = (reverse acc, e)
-- | Translates a Core case alternative into Sequent Core.
fromCoreAlt :: FromCoreEnv -> SeqCoreKont -> Core.Alt MarkedVar
-> SeqCoreAlt
fromCoreAlt env kont (ac, bs, e)
= let (subst', bs') = substBndrs (fce_subst env) (map unmark bs)
e' = fromCoreExpr (env { fce_subst = subst' }) e kont
in Alt ac bs' e'
-- | Translates a Core binding into Sequent Core.
fromCoreBind :: FromCoreEnv -> Maybe SeqCoreKont -> Core.Bind MarkedVar
-> (FromCoreEnv, SeqCoreBind)
fromCoreBind (env@FCE { fce_subst = subst }) kont_maybe bind =
case bind of
Core.NonRec (Marked x mark) rhs -> (env_final, NonRec pair')
where
(subst', x') = substBndr subst x
env' = env { fce_subst = subst' }
env_final | MakeKont _ <- mark = bindAsJoin env' x' conv
| otherwise = env'
(~(Just conv), pair') = fromCoreBindPair env kont_maybe x' mark rhs
Core.Rec pairs -> (env_final, Rec pairs_final)
where
xs = map (unmark . fst) pairs
(subst', xs') = assert (all isId xs) $ substRecBndrs subst xs
env' = env { fce_subst = subst' }
pairs' = [ fromCoreBindPair env_final kont_maybe x' mark rhs
| (Marked _ mark, rhs) <- pairs
| x' <- xs' ]
env_final = bindAsJoins env' [ (binderOfPair pair, conv)
| (Just conv, pair) <- pairs' ]
pairs_final = map snd pairs'
fromCoreBindPair :: FromCoreEnv -> Maybe SeqCoreKont -> Var -> KontOrFunc
-> Core.Expr MarkedVar -> (Maybe KontCallConv, SeqCoreBindPair)
fromCoreBindPair env kont_maybe x mark rhs
= case mark of
MakeKont descs -> let Just kont = kont_maybe
join = fromCoreExprAsJoin env kont descs rhs
in (Just (ByJump descs),
BindJoin (idToJoinId x (ByJump descs)) join)
MakeFunc -> (Nothing, BindTerm x $ fromCoreExprAsTerm env rhs)
fromCoreBinds :: [Core.Bind MarkedVar] -> [SeqCoreBind]
fromCoreBinds = snd . mapAccumL (\env -> fromCoreBind env Nothing) initFromCoreEnv
------------------------------------------------
-- Public interface for Sequent Core --> Core --
------------------------------------------------
-- | Translates a command into Core.
commandToCoreExpr :: Type -> SeqCoreCommand -> Core.CoreExpr
commandToCoreExpr retTy comm
= case comm of
Let bind comm' -> Core.mkCoreLet (bindToCore (Just retTy) bind)
(commandToCoreExpr retTy comm')
Eval term fs end -> kontToCoreExpr retTy (fs, end) (termToCoreExpr term)
Jump args j -> Core.mkCoreApps (Core.Var (joinIdToCore retTy j))
(map termToCoreExpr args ++ extraArgs)
where
extraArgs | all isTypeArg args = [ Core.Var voidPrimId ]
| otherwise = []
-- | Translates a term into Core.
termToCoreExpr :: SeqCoreTerm -> Core.CoreExpr
termToCoreExpr val =
case val of
Lit l -> Core.Lit l
Var x -> Core.Var x
Lam x t -> Core.Lam x (termToCoreExpr t)
Type t -> Core.Type t
Coercion co -> Core.Coercion co
Compute kb c -> commandToCoreExpr kb c
-- | Translates a join point into Core.
joinToCoreExpr :: Type -> SeqCoreJoin -> Core.CoreExpr
joinToCoreExpr = joinToCoreExpr' NonRecursive
joinToCoreExpr' :: RecFlag -> Type -> SeqCoreJoin -> Core.CoreExpr
joinToCoreExpr' recFlag retTy (Join xs comm)
= Core.mkCoreLams (maybeOneShots xs') (commandToCoreExpr retTy comm)
where
xs' | null xs = [ voidArgId ]
| otherwise = xs
maybeOneShots xs | isNonRec recFlag = map setOneShotLambdaIfId xs
| otherwise = xs
setOneShotLambdaIfId x | isId x = setOneShotLambda x
| otherwise = x
-- | Functional representation of expression contexts in Core.
type CoreContext = Core.CoreExpr -> Core.CoreExpr
-- | Translates a continuation into a function that will wrap a Core expression
-- with a fragment of context (an argument to apply to, a case expression to
-- run, etc.).
kontToCoreExpr :: Type -> SeqCoreKont -> CoreContext
kontToCoreExpr retTy (fs, end) =
foldr (flip (.)) (endToCoreExpr retTy end) (map frameToCoreExpr fs)
frameToCoreExpr :: SeqCoreFrame -> CoreContext
frameToCoreExpr k =
case k of
App {- expr -} v -> \e -> Core.mkCoreApp e (termToCoreExpr v)
Cast {- expr -} co -> \e -> Core.Cast e co
Tick ti {- expr -} -> \e -> Core.Tick ti e
endToCoreExpr :: Type -> SeqCoreEnd -> CoreContext
endToCoreExpr retTy k =
case k of
Case {- expr -} b as -> \e -> Core.Case e b retTy (map (altToCore retTy) as)
Return -> \e -> e
-- | Convert a join id to its Core form. For instance, given a return type of
-- String,
-- @j :: Cont# (exists# a. (# a, Int, Char #))
-- becomes
-- @j :: forall a. a -> Int -> Char -> String
joinIdToCore :: Type -> JoinId -> Id
joinIdToCore retTy j = maybeAddArity $ j `setIdType` kontTyToCoreTy argTy retTy
where
argTy = isKontTy_maybe (idType j) `orElse` pprPanic "joinIdToCore" (pprBndr LetBind j)
maybeAddArity j' | idArity j' == 0 = j' `setIdInfo` (idInfo j' `setArityInfo` 1)
| otherwise = j'
kontTyToCoreTy :: Type -> Type -> Type
kontTyToCoreTy ty retTy
| Just (a, body) <- isUbxExistsTy_maybe ty
= mkForAllTy a (kontTyToCoreTy body retTy)
| isUnboxedTupleType ty
= let (_, args) = splitTyConApp ty
in if | null args -> mkFunTy voidPrimTy retTy
| Just (a, ty') <- isUbxExistsTy_maybe (last args)
-> mkFunTys (init args)
(mkForAllTy a (kontTyToCoreTy ty' retTy))
| otherwise -> mkFunTys args retTy
| otherwise
= pprPanic "kontArgsTyToCoreTy" (ppr ty)
-- | Translates a binding into Core.
bindToCore :: Maybe Type -> SeqCoreBind -> Core.CoreBind
bindToCore retTy_maybe bind =
case bind of
NonRec pair -> Core.NonRec b v
where (b, v) = bindPairToCore retTy_maybe NonRecursive pair
Rec pairs -> Core.Rec (map (bindPairToCore retTy_maybe Recursive) pairs)
bindPairToCore :: Maybe Type -> RecFlag -> SeqCoreBindPair
-> (Core.CoreBndr, Core.CoreExpr)
bindPairToCore retTy_maybe recFlag pair =
case pair of
BindTerm b v -> (b, termToCoreExpr v)
BindJoin b pk -> (joinIdToCore retTy b, joinToCoreExpr' recFlag retTy pk)
where
retTy = retTy_maybe `orElse` panic "bindPairToCore: top-level cont"
-- | Translates a list of top-level bindings into Core.
bindsToCore :: [SeqCoreBind] -> [Core.CoreBind]
bindsToCore binds = map (bindToCore Nothing) binds
altToCore :: Type -> SeqCoreAlt -> Core.CoreAlt
altToCore retTy (Alt ac bs c) = (ac, bs, commandToCoreExpr retTy c)
--------------------------------------------------------------
-- Public interface for operations going in both directions --
--------------------------------------------------------------
-- | Take an operation on Sequent Core terms and perform it on Core expressions
onCoreExpr :: (SeqCoreTerm -> SeqCoreTerm) -> (Core.CoreExpr -> Core.CoreExpr)
onCoreExpr f = termToCoreExpr . f . termFromCoreExpr
-- | Take an operation on Core expressions and perform it on Sequent Core terms
onSequentCoreTerm :: (Core.CoreExpr -> Core.CoreExpr) -> (SeqCoreTerm -> SeqCoreTerm)
onSequentCoreTerm f = termFromCoreExpr . f . termToCoreExpr
----------------
-- Miscellany --
----------------
instance Outputable EscapeAnalysis where
ppr (EA { ea_nonEsc = nonEsc, ea_allVars = allVars })
= text "non-escaping:" <+> ppr (mapVarEnv ci_arity nonEsc) $$
text " escaping:" <+> ppr (allVars `minusVarEnv` nonEsc)
instance Outputable ScopeType where
ppr Inside = text "inside scope"
ppr Outside = text "outside scope"
instance Outputable Occs where
ppr Esc = text "esc"
ppr (NonEsc ci) = text "arity" <+> int (ci_arity ci)
instance Outputable KontOrFunc where
ppr MakeFunc = text "func"
ppr (MakeKont _) = text "cont"
instance Outputable MarkedVar where
ppr (Marked var mark) = ppr var <+> brackets (ppr mark)
instance OutputableBndr MarkedVar where
pprBndr site (Marked var MakeFunc) = pprBndr site var
pprBndr site (Marked var mark) = pprBndr site var <+> brackets (ppr mark)
pprPrefixOcc (Marked var _) = pprPrefixOcc var
pprInfixOcc (Marked var _) = pprInfixOcc var
instance Outputable ArgDesc where
ppr (FixedType ty) = text "fixed type:" <+> ppr ty
ppr FixedVoidArg = text "fixed void#"
ppr (TyArg tyVar) = text "type arg:" <+> pprBndr LambdaBind tyVar
ppr (ValArg ty) = text "arg of type" <+> ppr ty
mapCore :: (a -> b) -> Core.Expr a -> Core.Expr b
mapCore f = go
where
go (Core.Var x) = Core.Var x
go (Core.Lit l) = Core.Lit l
go (Core.App e1 e2) = Core.App (go e1) (go e2)
go (Core.Lam b e) = Core.Lam (f b) (go e)
go (Core.Let bind e) = Core.Let (goBind bind) (go e)
go (Core.Case scrut bndr ty alts)
= Core.Case (go scrut) (f bndr) ty
[ (con, map f bndrs, go rhs)
| (con, bndrs, rhs) <- alts ]
go (Core.Cast e co) = Core.Cast (go e) co
go (Core.Tick ti e) = Core.Tick ti (go e)
go (Core.Type ty) = Core.Type ty
go (Core.Coercion co) = Core.Coercion co
goBind (Core.NonRec bndr rhs) = Core.NonRec (f bndr) (go rhs)
goBind (Core.Rec pairs) = Core.Rec [ (f bndr, go rhs)
| (bndr, rhs) <- pairs ]
unmarkExpr :: Core.Expr MarkedVar -> Core.CoreExpr
unmarkExpr = mapCore unmark
-- copied from CoreArity
freshEtaId :: Int -> Subst -> Type -> (Subst, Id)
freshEtaId n subst ty
= (subst', eta_id')
where
ty' = substTy subst ty
eta_id' = uniqAway (substInScope subst) $
mkSysLocal (fsLit "eta") (mkBuiltinUnique n) ty'
subst' = extendInScope subst eta_id'
| pdownen/sequent-core | src/Language/SequentCore/Translate.hs | bsd-3-clause | 48,887 | 151 | 19 | 12,925 | 12,342 | 6,399 | 5,943 | 755 | 12 |
{-# LANGUAGE InstanceSigs #-}
module PFDS.Sec9.Ex11 where
import PFDS.Commons.Heap (Heap(..))
data Tree a = Node a [Tree a] deriving Show
data Digit a = Zero | Ones [Tree a] | Two (Tree a) (Tree a) deriving Show
newtype BHeap a = BH [Digit a] deriving Show
instance Heap BHeap where
empty :: Ord a => BHeap a
empty = BH []
isEmpty :: Ord a => BHeap a -> Bool
isEmpty (BH ts) = null ts
insert :: Ord a => a -> BHeap a -> BHeap a
insert x (BH ts) = BH $ fixup $ simpleInsTree (Node x []) ts
merge :: Ord a => BHeap a -> BHeap a -> BHeap a
merge (BH ts1) (BH ts2) = BH $ merge' ts1 ts2
findMin :: Ord a => BHeap a -> a
findMin = undefined
deleteMin :: Ord a => BHeap a -> BHeap a
deleteMin = undefined
-- helper functions
link :: Ord a => Tree a -> Tree a -> Tree a
link t1@(Node x1 c1) t2@(Node x2 c2) =
if x1 <= x2
then Node x1 (t2:c1)
else Node x2 (t1:c2)
merge' :: Ord a => [Digit a] -> [Digit a] -> [Digit a]
merge' ds1 [] = ds1
merge' [] ds2 = ds2
merge' (Zero : ds1) (Zero : ds2) = Zero : merge' (fixup ds1) (fixup ds2)
merge' (Ones (t1 : ts1) : ds1) (Zero : ds2) = ones [t1] (merge' (f ts1 ds1) (fixup ds2))
merge' (Zero : ds1) (Ones (t2 : ts2) : ds2) = ones [t2] (merge' (fixup ds1) (f ts2 ds2))
merge' (Ones (t1 : ts1) : ds1) (Ones (t2 : ts2) : ds2) = Zero : simpleInsTree (link t1 t2) (merge' (f ts1 ds1) (f ts2 ds2))
f :: Ord a => [Tree a] -> [Digit a] -> [Digit a]
f [] = fixup
f ts = ones ts
simpleInsTree :: Tree a -> [Digit a] -> [Digit a]
simpleInsTree t [] = [Ones [t]]
simpleInsTree t (Zero : ds) = ones [t] ds
simpleInsTree t1 (Ones (t2 : ts) : ds) = Two t1 t2 : ones ts ds
ones :: [Tree a]-> [Digit a] -> [Digit a]
ones [] ds = ds
ones ts1 (Ones ts2 : ds) = Ones (ts1 ++ ts2) : ds -- 多分大丈夫
ones ts ds = Ones ts : ds
fixup :: Ord a => [Digit a] -> [Digit a]
fixup (Two t1 t2 : ds) = Zero : simpleInsTree (link t1 t2) ds
fixup (Ones ts : Two t1 t2 : ds) = Ones ts : Zero : simpleInsTree (link t1 t2) ds
fixup ds = ds
| matonix/pfds | src/PFDS/Sec9/Ex11.hs | bsd-3-clause | 1,992 | 0 | 10 | 487 | 1,159 | 587 | 572 | 46 | 2 |
module MyQuickCheck where
import Game
import ChessBoard
import Elements
import Test.QuickCheck
instance Arbitrary Player where
arbitrary = oneof [return WhitePl, return BlackPl]
prop_invertPlayer x = (inverse $ inverse x) == x
prop_insideBoard = forAll (choose ( 1, 8)) $ \ x -> forAll (choose ( 1, 8)) $ \ y ->
insideBoard (x, y)
testQ = do
quickCheck prop_invertPlayer
quickCheck prop_insideBoard
| leskiw77/Checkers | test/MyQuickCheck.hs | bsd-3-clause | 434 | 0 | 11 | 96 | 147 | 78 | 69 | 13 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Wyas.Parser
( readExpr
, parseExpr
) where
import Wyas.Types
import Wyas.Parser.Primitives
import Wyas.Error
import Text.ParserCombinators.Parsec hiding (spaces)
import Control.Monad (liftM)
import Control.Monad.Error (throwError)
import Data.Vector (fromList)
readExpr :: String -> ThrowsError LispVal
readExpr input = case parse parseExpr "lisp" input of
Left err -> throwError $ Parser err
Right val -> return val
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseQuoted
<|> parseQuasiQuoted
<|> parseUnQuote
<|> parseAnyList
<|> try parseVector
<|> try parseChar
<|> try parseComplex
<|> try parseFloat
<|> try parseRational
<|> try parseNumber
<|> try parseBool
--
-- Lists, dotted lists, quoted datums
--
parseAnyList :: Parser LispVal
parseAnyList = do
char '('
optional spaces
h <- sepEndBy parseExpr spaces
t <- (char '.' >> spaces >> parseExpr) <|> return (Nil ())
optional spaces
char ')'
return $ case t of
(Nil ()) -> List h
_ -> DottedList h t
parseQuoted :: Parser LispVal
parseQuoted = do char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseQuasiQuoted :: Parser LispVal
parseQuasiQuoted = do char '`'
x <- parseExpr
return $ List [Atom "quasiquote", x]
parseUnQuote :: Parser LispVal
parseUnQuote = do char ','
x <- parseExpr
return $ List [Atom "unquote", x]
parseVector :: Parser LispVal
parseVector = do string "#("
(List x) <- liftM List $ sepBy parseExpr spaces
char ')'
return . Vector $ fromList x
| grtlr/wyas | src/Wyas/Parser.hs | bsd-3-clause | 1,872 | 0 | 16 | 594 | 541 | 261 | 280 | 57 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# OPTIONS -Wall #-}
module RegisterMachine.Models.CM2 (
mach
) where
import Basic.Types
import Basic.MemoryImpl (ListMem, fillMem, Address(..))
import Basic.Features
import RegisterMachine.State
import RegisterMachine.Operations
import RegisterMachine.Machine
--4.1.2 Base model 2
data Lang i = INC i | Halt | CLR i | JE i i i
instance Language (Lang i)
instance IsHalt (Lang i) where
isHaltInstr Halt = True
isHaltInstr _ = False
trans :: (Zero (HContents st), RWValue (Address v) (Heap st) (HContents st),
Incr v, Incr (HContents st),
HasQ st, HasHeap st, Q st ~ Address v) =>
Lang v -> st -> st
trans (INC r) = incrMem (A r)
trans Halt = id
trans (CLR r) = clearMem (A r)
trans (JE r1 r2 z) = jumpe2Mem (A r1) (A r2) (A z)
prog :: ListMem (Lang Int)
prog = fillMem [CLR 2, INC 3, INC 1, JE 0 1 1, JE 0 1 2, CLR 1, INC 2, JE 0 2 6, Halt]
mach :: RM1 (Lang Int) ListMem (CounterMachineState Int ListMem (Address Int))
mach = RM prog initCM (compile trans)
| davidzhulijun/TAM | RegisterMachine/Models/CM2.hs | bsd-3-clause | 1,106 | 0 | 9 | 228 | 468 | 244 | 224 | 28 | 1 |
module Idris.Directives where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Imports
import Idris.Core.Evaluate
import Idris.Core.TT
import Util.DynamicLinker
-- | Run the action corresponding to a directive
directiveAction :: Directive -> Idris ()
directiveAction (DLib cgn lib) = do addLib cgn lib
addIBC (IBCLib cgn lib)
directiveAction (DLink cgn obj) = do dirs <- allImportDirs
o <- runIO $ findInPath dirs obj
addIBC (IBCObj cgn obj) -- just name, search on loading ibc
addObjectFile cgn o
directiveAction (DFlag cgn flag) = do addIBC (IBCCGFlag cgn flag)
addFlag cgn flag
directiveAction (DInclude cgn hdr) = do addHdr cgn hdr
addIBC (IBCHeader cgn hdr)
directiveAction (DHide n) = do setAccessibility n Hidden
addIBC (IBCAccess n Hidden)
directiveAction (DFreeze n) = do setAccessibility n Frozen
addIBC (IBCAccess n Frozen)
directiveAction (DAccess acc) = do updateIState (\i -> i { default_access = acc })
directiveAction (DDefault tot) = do updateIState (\i -> i { default_total = tot })
directiveAction (DLogging lvl) = setLogLevel (fromInteger lvl)
directiveAction (DDynamicLibs libs) = do added <- addDyLib libs
case added of
Left lib -> addIBC (IBCDyLib (lib_name lib))
Right msg -> fail $ msg
directiveAction (DNameHint ty ns) = do ty' <- disambiguate ty
mapM_ (addNameHint ty') ns
mapM_ (\n -> addIBC (IBCNameHint (ty', n))) ns
directiveAction (DErrorHandlers fn arg ns) = do fn' <- disambiguate fn
ns' <- mapM disambiguate ns
addFunctionErrorHandlers fn' arg ns'
mapM_ (addIBC .
IBCFunctionErrorHandler fn' arg) ns'
directiveAction (DLanguage ext) = addLangExt ext
directiveAction (DUsed fc fn arg) = addUsedName fc fn arg
disambiguate :: Name -> Idris Name
disambiguate n = do i <- getIState
case lookupCtxtName n (idris_implicits i) of
[(n', _)] -> return n'
[] -> throwError (NoSuchVariable n)
more -> throwError (CantResolveAlts (map fst more))
| bkoropoff/Idris-dev | src/Idris/Directives.hs | bsd-3-clause | 2,714 | 0 | 14 | 1,120 | 750 | 358 | 392 | 45 | 3 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
module GHC.Environment (getFullArgs) where
import GHC.Base
import GHC.Pack
import Java
import Java.StringBase
getFullArgs :: IO [String]
getFullArgs = do
jargs <- getFullArgs'
return (map fromJString (fromJava jargs))
foreign import java unsafe "@static eta.runtime.Runtime.getLocalProgramArguments"
getFullArgs' :: IO JStringArray
| rahulmutt/ghcvm | libraries/base/GHC/Environment.hs | bsd-3-clause | 428 | 3 | 12 | 60 | 91 | 51 | 40 | -1 | -1 |
-- {-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE RankNTypes #-}
module T3692 where
type Foo a b = () -> (Bar a => a)
class Bar a where {}
foo :: Foo a b
foo = id (undefined :: Foo p q)
| rahulmutt/ghcvm | tests/suite/typecheck/compile/T3692.hs | bsd-3-clause | 208 | 0 | 8 | 45 | 70 | 40 | 30 | -1 | -1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "System/Log/FastLogger/Date.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Formatting time is slow.
-- This package provides mechanisms to cache formatted date.
module System.Log.FastLogger.Date (
-- * Types
TimeFormat
, FormattedTime
-- * Date cacher
, newTimeCache
, simpleTimeFormat
, simpleTimeFormat'
) where
import Control.AutoUpdate (mkAutoUpdate, defaultUpdateSettings, updateAction)
import Data.ByteString (ByteString)
import Data.UnixTime (formatUnixTime, fromEpochTime)
import System.Posix (EpochTime, epochTime)
----------------------------------------------------------------
-- | Type aliaes for date format and formatted date.
type FormattedTime = ByteString
type TimeFormat = ByteString
----------------------------------------------------------------
-- | Get date using UnixTime.
getTime :: IO EpochTime
getTime = epochTime
-- | Format unix EpochTime date.
formatDate :: TimeFormat -> EpochTime -> IO FormattedTime
formatDate fmt = formatUnixTime fmt . fromEpochTime
----------------------------------------------------------------
-- | Make 'IO' action which get cached formatted local time.
-- Use this to avoid the cost of frequently time formatting by caching an
-- auto updating formatted time, this cache update every 1 second.
-- more detail in "Control.AutoUpdate"
newTimeCache :: TimeFormat -> IO (IO FormattedTime)
newTimeCache fmt = mkAutoUpdate defaultUpdateSettings{
updateAction = getTime >>= formatDate fmt
}
-- | A simple time cache using format @"%d/%b/%Y:%T %z"@
simpleTimeFormat :: TimeFormat
simpleTimeFormat = "%d/%b/%Y:%T %z"
-- | A simple time cache using format @"%d-%b-%Y %T"@
simpleTimeFormat' :: TimeFormat
simpleTimeFormat' = "%d-%b-%Y %T"
| phischu/fragnix | tests/packages/scotty/System.Log.FastLogger.Date.hs | bsd-3-clause | 1,863 | 0 | 9 | 333 | 218 | 134 | 84 | 27 | 1 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ExistentialQuantification, DeriveDataTypeable #-}
-- |
-- Module : Scion.Types
-- Copyright : (c) Thomas Schilling 2008
-- License : BSD-style
--
-- Maintainer : nominolo@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- Types used throughout Scion.
--
module Scion.Types
( module Scion.Types
, liftIO, MonadIO
) where
import Scion.Types.Notes
import Scion.Types.ExtraInstances()
import GHC
import HscTypes
import MonadUtils ( liftIO, MonadIO )
import Exception
import qualified Data.Map as M
import qualified Data.MultiSet as MS
import Distribution.Simple.LocalBuildInfo
import Control.Monad ( when )
import Data.IORef
import Data.Monoid
import Data.Time.Clock ( NominalDiffTime )
import Data.Typeable
import Control.Exception
import Control.Applicative
------------------------------------------------------------------------------
-- * The Scion Monad and Session State
-- XXX: Can we get rid of some of this maybe stuff?
data SessionState
= SessionState {
scionVerbosity :: Verbosity,
initialDynFlags :: DynFlags,
-- ^ The DynFlags as they were when Scion was started. This is used
-- to reset flags when opening a new project. Arguably, the GHC API
-- should provide calls to reset a session.
localBuildInfo :: Maybe LocalBuildInfo,
-- ^ Build info from current Cabal project.
activeComponent :: Maybe Component,
-- ^ The current active Cabal component. This affects DynFlags and
-- targets. ATM, we don't support multiple active components.
lastCompResult :: CompilationResult,
focusedModule :: Maybe ModSummary,
-- ^ The currently focused module for background typechecking.
bgTcCache :: Maybe BgTcCache,
-- ^ Cached state of the background typechecker.
defSiteDB :: DefSiteDB,
-- ^ Source code locations.
client :: String
-- ^ can be set by the client. Only used by vim to enable special hack
}
mkSessionState :: DynFlags -> IO (IORef SessionState)
mkSessionState dflags =
newIORef (SessionState normal dflags Nothing Nothing mempty Nothing Nothing mempty "")
newtype ScionM a
= ScionM { unScionM :: IORef SessionState -> Ghc a }
instance Monad ScionM where
return x = ScionM $ \_ -> return x
(ScionM ma) >>= fb =
ScionM $ \s -> do
a <- ma s
unScionM (fb a) s
fail msg = dieHard msg
instance Functor ScionM where
fmap f (ScionM ma) =
ScionM $ \s -> fmap f (ma s)
instance Applicative ScionM where
pure a = ScionM $ \_ -> return a
ScionM mf <*> ScionM ma =
ScionM $ \s -> do f <- mf s; a <- ma s; return (f a)
liftScionM :: Ghc a -> ScionM a
liftScionM m = ScionM $ \_ -> m
instance MonadIO ScionM where
liftIO m = liftScionM $ liftIO m
instance ExceptionMonad ScionM where
gcatch (ScionM act) handler =
ScionM $ \s -> act s `gcatch` (\e -> unScionM (handler e) s)
gblock (ScionM act) = ScionM $ \s -> gblock (act s)
gunblock (ScionM act) = ScionM $ \s -> gunblock (act s)
instance WarnLogMonad ScionM where
setWarnings = liftScionM . setWarnings
getWarnings = liftScionM getWarnings
instance GhcMonad ScionM where
getSession = liftScionM getSession
setSession = liftScionM . setSession
modifySessionState :: (SessionState -> SessionState) -> ScionM ()
modifySessionState f =
ScionM $ \r -> liftIO $ do s <- readIORef r; writeIORef r $! f s
getSessionState :: ScionM SessionState
getSessionState = ScionM $ \s -> liftIO $ readIORef s
gets :: (SessionState -> a) -> ScionM a
gets sel = getSessionState >>= return . sel
setSessionState :: SessionState -> ScionM ()
setSessionState s' = ScionM $ \r -> liftIO $ writeIORef r s'
------------------------------------------------------------------------------
-- ** Verbosity Levels
data Verbosity
= Silent
| Normal
| Verbose
| Deafening
deriving (Eq, Ord, Show, Enum, Bounded)
intToVerbosity :: Int -> Verbosity
intToVerbosity n
| n < 0 = minBound
| n > fromEnum (maxBound :: Verbosity) = maxBound
| otherwise = toEnum n
verbosityToInt :: Verbosity -> Int
verbosityToInt = fromEnum
silent :: Verbosity
silent = Silent
normal :: Verbosity
normal = Normal
verbose :: Verbosity
verbose = Verbose
deafening :: Verbosity
deafening = Deafening
getVerbosity :: ScionM Verbosity
getVerbosity = gets scionVerbosity
setVerbosity :: Verbosity -> ScionM ()
setVerbosity v = modifySessionState $ \s -> s { scionVerbosity = v }
message :: Verbosity -> String -> ScionM ()
message v s = do
v0 <- getVerbosity
when (v0 >= v) $ liftIO $ putStrLn s
------------------------------------------------------------------------
-- * Reflection into IO
-- | Reflect a computation in the 'ScionM' monad into the 'IO' monad.
reflectScionM :: ScionM a -> (IORef SessionState, Session) -> IO a
reflectScionM (ScionM f) = \(st, sess) -> reflectGhc (f st) sess
-- | Dual to 'reflectScionM'. See its documentation.
reifyScionM :: ((IORef SessionState, Session) -> IO a) -> ScionM a
reifyScionM act = ScionM $ \st -> reifyGhc $ \sess -> act (st, sess)
------------------------------------------------------------------------------
-- * Compilation Results
data BgTcCache
= Parsed ParsedModule
| Typechecked TypecheckedModule
data CompilationResult = CompilationResult {
compilationSucceeded :: Bool,
compilationNotes :: MS.MultiSet Note,
compilationTime :: NominalDiffTime
}
instance Monoid CompilationResult where
mempty = CompilationResult True mempty 0
mappend r1 r2 =
CompilationResult
{ compilationSucceeded =
compilationSucceeded r1 && compilationSucceeded r2
, compilationNotes =
compilationNotes r1 `MS.union` compilationNotes r2
, compilationTime = compilationTime r1 + compilationTime r2
}
------------------------------------------------------------------------------
-- * Exceptions
-- | Any exception raised inside Scion is a subtype of this exception.
data SomeScionException
= forall e. (Exception e) => SomeScionException e
deriving Typeable
instance Show SomeScionException where show (SomeScionException e) = show e
instance Exception SomeScionException
scionToException :: Exception e => e -> SomeException
scionToException = toException . SomeScionException
scionFromException :: Exception e => SomeException -> Maybe e
scionFromException x = do
SomeScionException e <- fromException x
cast e
-- | A fatal error. Like 'error' but suggests submitting a bug report.
dieHard :: String -> a
dieHard last_wish = do
error $ "************** Panic **************\n" ++
last_wish ++
"\nPlease file a bug report at:\n " ++ bug_tracker_url
where
bug_tracker_url = "http://code.google.com/p/scion-lib/issues/list"
------------------------------------------------------------------------------
-- * Others \/ Helpers
data Component
= Library
| Executable String
| File FilePath
deriving (Eq, Show, Typeable)
-- | Shorthand for 'undefined'.
__ :: a
__ = undefined
-- * Go To Definition
-- | A definition site database.
--
-- This is a map from names to the location of their definition and
-- information about the defined entity. Note that a name may refer to
-- multiple entities.
--
-- XXX: Currently we use GHC's 'TyThing' data type. However, this probably
-- holds on to a lot of stuff we don't need. It also cannot be serialised
-- directly. The reason it's done this way is that wrapping 'TyThing' leads
-- to a lot of duplicated code. Using a custom type might be useful to have
-- fewer dependencies on the GHC API; however it also creates problems
-- mapping things back into GHC API data structures. If we do this, we
-- should at least remember the 'Unique' in order to quickly look up the
-- original thing.
newtype DefSiteDB =
DefSiteDB (M.Map String [(Location,TyThing)])
instance Monoid DefSiteDB where
mempty = emptyDefSiteDB
mappend = unionDefSiteDB
-- | The empty 'DefSiteDB'.
emptyDefSiteDB :: DefSiteDB
emptyDefSiteDB = DefSiteDB M.empty
-- | Combine two 'DefSiteDB's. XXX: check for duplicates?
unionDefSiteDB :: DefSiteDB -> DefSiteDB -> DefSiteDB
unionDefSiteDB (DefSiteDB m1) (DefSiteDB m2) =
DefSiteDB (M.unionWith (++) m1 m2)
-- | Return the list of defined names (the domain) of the 'DefSiteDB'.
-- The result is, in fact, ordered.
definedNames :: DefSiteDB -> [String]
definedNames (DefSiteDB m) = M.keys m
-- | Returns all the entities that the given name may refer to.
lookupDefSite :: DefSiteDB -> String -> [(Location, TyThing)]
lookupDefSite (DefSiteDB m) key =
case M.lookup key m of
Nothing -> []
Just xs -> xs
-- use this exception for everything else which is not important enough to
-- create a new Exception (kiss)
-- some more Exception types are defined in Session.hs (TODO?)
data ScionError = ScionError String
deriving (Show, Typeable)
instance Exception ScionError where
toException = scionToException
fromException = scionFromException
scionError :: String -> ScionM a
scionError = liftIO . throwIO . ScionError
-- will be extended in the future
data CabalConfiguration = CabalConfiguration {
distDir :: FilePath,
extraArgs :: [String] -- additional args used to configure the project
}
type FileComponentConfiguration =
( FilePath, -- rel filepath to config file
[String] -- set of flags to be used to compile that file
)
-- the ScionProjectConfig is a project specific configuration file
-- The syntax must be simple and human readable. One JSON object per line.
-- Example:
-- { 'type' : 'build-configuration', 'dist-dir' : 'dist-custom', 'extra-args' : [ ] }
-- helperf functions see Utils.hs
data ScionProjectConfig = ScionProjectConfig {
buildConfigurations :: [CabalConfiguration],
fileComponentExtraFlags :: [FileComponentConfiguration],
scionDefaultCabalConfig :: Maybe String
}
emptyScionProjectConfig :: ScionProjectConfig
emptyScionProjectConfig = ScionProjectConfig [] [] Nothing
| CristhianMotoche/scion | lib/Scion/Types.hs | bsd-3-clause | 10,154 | 13 | 15 | 2,055 | 2,098 | 1,134 | 964 | 184 | 2 |
module Paper.Haskell where
import Data.Ix
import Data.Char
import Data.List
import System.Directory
import System.FilePath
import Paper.Util.FileData
import Paper.Haskell.Fragment
import Paper.Haskell.Check
haskell obj files = do
let incFile = directory files </> "Include.hs"
b <- doesFileExist incFile
pre <- if b then readFile incFile else return ""
src <- readFile (directory files </> mainFile files)
let (debug,ranges) = partition (== "d") $ flags files
checkFragments (not $ null debug)
(checkRange $ parseRanges ranges)
pre (parseFragments src)
-- ranges are one of:
-- -from..to
-- -from.. (where the .. is optional)
parseRanges :: [String] -> [(Int,Int)]
parseRanges [] = [(minBound,maxBound)]
parseRanges xs = map f xs
where
f x = (read a, if null c then maxBound else read c)
where
(a,b) = span isDigit x
c = dropWhile (== '.') b
-- a list of ranges, union'ed together
checkRange :: [(Int,Int)] -> Int -> Bool
checkRange [] i = False
checkRange (x:xs) i = inRange x i || checkRange xs i
rep from with = map (\x -> if x == from then with else x)
| saep/neil | src/Paper/Haskell.hs | bsd-3-clause | 1,203 | 0 | 12 | 320 | 418 | 221 | 197 | 28 | 2 |
import Language.KansasLava
-- define a circuit
halfAdder a b = (sum,carry)
where sum = xor2 a b
carry = and2 a b
fullAdder a b cin = (sum,cout)
where (s1,c1) = probe "ha1" halfAdder a b -- probe an entire function
(sum,c2) = halfAdder cin s1
cout = xor2 c1 (probe "c2" c2) -- probe a single sequence
-- turn it into a fabric
dut = do
let a = toSeq $ cycle [True,False]
b = toSeq $ cycle [True,True,False,False]
cin = toSeq $ cycle [False,False,False,False,True]
(sum,cout) = fullAdder a b cin
outStdLogic "sum" sum
outStdLogic "cout" cout
-- get the first 20 values of each probe and print it
-- printProbes -> horizontal, one probe per line
-- printProbeTable -> vertical, cycle count and one probe per column
main = do
-- if KANSAS_LAVA_PROBE=capture
streams <- probeCircuit 20 dut
-- you have two options for output, list of probes or a table
-- we do both here
printProbes streams
putStrLn ""
printProbeTable streams
-- if KANSAS_LAVA_PROBE=trace
let (s,c) = fullAdder (toSeq $ cycle [True,False])
(toSeq $ cycle [True,True,False,False])
(toSeq $ cycle [False,False,False,False,True])
-- have to force evaluation (traces are lazy too)
-- print isn't a good way, as the print/trace info is interleaved,
-- but it's simple. We ascribe the type to 's' to specify the clock
print (s :: Seq Bool,c)
-- run this file with:
-- KANSAS_LAVA_PROBE=capture ghci -i../ -i../dist/build/autogen Probes.hs
{- ------------------------- Output --------------------------------------------
0c2: TraceStream B [0b0,0b0,0b0,0b0,0b0,0b0,0b0,0b0,0b0,0b1,0b0,0b0,0b0,0b0,0b1,0b0,0b0,0b0,0b0,0b0]
2ha1-snd: TraceStream B [0b1,0b0,0b0,0b0,0b1,0b0,0b0,0b0,0b1,0b0,0b0,0b0,0b1,0b0,0b0,0b0,0b1,0b0,0b0,0b0]
2ha1-fst: TraceStream B [0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0]
1ha1: TraceStream B [0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0,0b1,0b1,0b0,0b0]
0ha1: TraceStream B [0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0,0b1,0b0]
clk 0c2 2ha1-snd 2ha1-fst 1ha1 0ha1
0 0b0 0b1 0b0 0b1 0b1
1 0b0 0b0 0b1 0b1 0b0
2 0b0 0b0 0b1 0b0 0b1
3 0b0 0b0 0b0 0b0 0b0
4 0b0 0b1 0b0 0b1 0b1
5 0b0 0b0 0b1 0b1 0b0
6 0b0 0b0 0b1 0b0 0b1
7 0b0 0b0 0b0 0b0 0b0
8 0b0 0b1 0b0 0b1 0b1
9 0b1 0b0 0b1 0b1 0b0
10 0b0 0b0 0b1 0b0 0b1
11 0b0 0b0 0b0 0b0 0b0
12 0b0 0b1 0b0 0b1 0b1
13 0b0 0b0 0b1 0b1 0b0
14 0b1 0b0 0b1 0b0 0b1
15 0b0 0b0 0b0 0b0 0b0
16 0b0 0b1 0b0 0b1 0b1
17 0b0 0b0 0b1 0b1 0b0
18 0b0 0b0 0b1 0b0 0b1
19 0b0 0b0 0b0 0b0 0b0
----------------------------------------------------------------------------- -}
| andygill/kansas-lava | examples/Probes.hs | bsd-3-clause | 3,182 | 2 | 14 | 1,008 | 397 | 211 | 186 | 24 | 1 |
module Pair () where
{-@ LIQUID "--no-termination" @-}
import Language.Haskell.Liquid.Prelude
incr z = (x, [x + 1])
where
x = choose z
incr z = (x, [x + 1])
where
x = choose z
chk (x, [y]) = liquidAssertB (x < y)
prop = chk $ incr n
where
n = choose 0
incr2 x = (True, 9, x, 'o', x+1)
chk2 (_, _, x, _, y) = liquidAssertB (x <y)
prop2 = chk2 $ incr2 n
where n = choose 0
incr3 x = (x, ( (0, x+1)))
chk3 (x, ((_, y))) = liquidAssertB (x <y)
prop3 = chk3 (incr3 n)
where n = choose 0
| ssaavedra/liquidhaskell | tests/pos/pair00.hs | bsd-3-clause | 517 | 0 | 8 | 142 | 292 | 164 | 128 | 17 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_fr_FR/helpset_fr_FR.hs | apache-2.0 | 959 | 89 | 29 | 156 | 389 | 209 | 180 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Browser View | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/browserView/src/main/javahelp/org/zaproxy/zap/extension/browserView/resources/help_sr_CS/helpset_sr_CS.hs | apache-2.0 | 973 | 87 | 29 | 159 | 396 | 212 | 184 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
-- This only typechecks if forall-hoisting works ok when
-- importing from an interface file. The type of Twins.gzipWithQ
-- is this:
-- type GenericQ r = forall a. Data a => a -> r
-- gzipWithQ :: GenericQ (GenericQ r) -> GenericQ (GenericQ [r])
-- It's kept this way in the interface file for brevity and documentation,
-- but when the type synonym is expanded, the foralls need expanding
module Foo where
import Data.Generics.Basics
import Data.Generics.Aliases
import Data.Generics.Twins(gzipWithQ)
-- | Generic equality: an alternative to \deriving Eq\
geq :: Data a => a -> a -> Bool
geq x y = geq' x y
where
-- This type signature no longer works, because it is
-- insufficiently polymoprhic.
-- geq' :: forall a b. (Data a, Data b) => a -> b -> Bool
geq' :: GenericQ (GenericQ Bool)
geq' x y = (toConstr x == toConstr y)
&& and (gzipWithQ geq' x y)
| hvr/jhc | regress/tests/1_typecheck/2_pass/ghc/uncat/tc191.hs | mit | 939 | 0 | 10 | 210 | 130 | 74 | 56 | 10 | 1 |
{-
Poly: limited polynomial arithmetic
Part of Mackerel: a strawman device definition DSL for Barrelfish
Copyright (c) 2007, 2008, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
-}
module Poly where
import Data.List
reduce :: [ (Integer, [String]) ] -> [ (Integer, [String]) ]
reduce p = [ (i, sort s) | (i,s) <- reduce1 (sort p), i /= 0 ]
reduce1 :: [ (Integer, [String]) ] -> [ (Integer, [String]) ]
reduce1 [] = []
reduce1 [h] = [h]
reduce1 ((i1, idlist1):(i2, idlist2):t)
| idlist1 == idlist2 =
reduce1 ((i1+i2, idlist1):t)
| otherwise =
(i1, idlist1):(reduce1 ((i2, idlist2):t))
| daleooo/barrelfish | tools/mackerel/Poly.hs | mit | 840 | 0 | 11 | 192 | 252 | 144 | 108 | 12 | 1 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE StandaloneDeriving, DeriveDataTypeable #-}
-- | Here we used typeable to produce an illegal value
-- Now using SAFE though so will fail
module Main where
import Data.OldTypeable
import BadImport03_A
deriving instance Typeable Nat
data NInt = NInt Int deriving Show
instance Typeable NInt where
typeOf _ = typeOf (undefined::Nat)
main = do
let a = succ' $ zero
Just n@(NInt z) = (cast a) :: Maybe NInt
n' = NInt (-z)
Just m = (cast n') :: Maybe Nat
putStrLn $ showNat a
putStrLn $ show n
putStrLn $ showNat m
return ()
| lukexi/ghc | testsuite/tests/safeHaskell/unsafeLibs/BadImport03.hs | bsd-3-clause | 615 | 0 | 13 | 161 | 181 | 92 | 89 | 18 | 1 |
module ShaderLoader where
import Graphics.GL
import Control.Monad
import Control.Monad.Trans
import Foreign
import Foreign.C.String
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as Text
import qualified Data.Text.IO as Text
overPtr :: (MonadIO m, Storable a) => (Ptr a -> IO b) -> m a
overPtr f = liftIO (alloca (\p -> f p >> peek p))
newtype GLProgram = GLProgram { unGLProgram :: GLuint }
newtype VertexArrayObject = VertexArrayObject { unVertexArrayObject :: GLuint }
newtype AttributeLocation = AttributeLocation { unAttributeLocation :: GLint }
newtype UniformLocation = UniformLocation { unUniformLocation :: GLint }
newtype TextureID = TextureID { unTextureID :: GLuint }
---------------
-- Load shaders
---------------
createShaderProgram :: FilePath -> FilePath -> IO GLProgram
createShaderProgram vertexShaderPath fragmentShaderPath =
do vertexShader <- glCreateShader GL_VERTEX_SHADER
compileShader vertexShaderPath vertexShader
fragmentShader <- glCreateShader GL_FRAGMENT_SHADER
compileShader fragmentShaderPath fragmentShader
shaderProg <- glCreateProgram
glAttachShader shaderProg vertexShader
glAttachShader shaderProg fragmentShader
glLinkProgram shaderProg
linked <- overPtr (glGetProgramiv shaderProg GL_LINK_STATUS)
when (linked == GL_FALSE)
(do maxLength <- overPtr (glGetProgramiv shaderProg GL_INFO_LOG_LENGTH)
logLines <- allocaArray
(fromIntegral maxLength)
(\p ->
alloca (\lenP ->
do glGetProgramInfoLog shaderProg maxLength lenP p
len <- peek lenP
peekCStringLen (p,fromIntegral len)))
putStrLn logLines)
return (GLProgram shaderProg)
where compileShader path shader =
do src <- Text.readFile path
BS.useAsCString
(Text.encodeUtf8 src)
(\ptr ->
withArray [ptr]
(\srcs ->
glShaderSource shader 1 srcs nullPtr))
glCompileShader shader
when True
(do maxLength <- overPtr (glGetShaderiv shader GL_INFO_LOG_LENGTH)
logLines <- allocaArray
(fromIntegral maxLength)
(\p ->
alloca (\lenP ->
do glGetShaderInfoLog shader maxLength lenP p
len <- peek lenP
peekCStringLen (p,fromIntegral len)))
putStrLn logLines)
getShaderAttribute :: GLProgram -> String -> IO AttributeLocation
getShaderAttribute (GLProgram prog) attributeName = do
location <- withCString attributeName $ \attributeNameCString ->
glGetAttribLocation prog attributeNameCString
when (location == -1) $ error $ "Coudn't bind attribute: " ++ attributeName
return (AttributeLocation location)
getShaderUniform :: GLProgram -> String -> IO UniformLocation
getShaderUniform (GLProgram prog) uniformName = do
location <- withCString uniformName $ \uniformNameCString ->
glGetUniformLocation prog uniformNameCString
when (location == -1) $ error $ "Coudn't bind uniform: " ++ uniformName
return (UniformLocation location)
glGetErrors :: IO ()
glGetErrors = do
code <- glGetError
case code of
GL_NO_ERROR -> return ()
e -> do
case e of
GL_INVALID_ENUM -> putStrLn "* Invalid Enum"
GL_INVALID_VALUE -> putStrLn "* Invalid Value"
GL_INVALID_OPERATION -> putStrLn "* Invalid Operation"
GL_INVALID_FRAMEBUFFER_OPERATION -> putStrLn "* Invalid Framebuffer Operation"
GL_OUT_OF_MEMORY -> putStrLn "* OOM"
GL_STACK_UNDERFLOW -> putStrLn "* Stack underflow"
GL_STACK_OVERFLOW -> putStrLn "* Stack overflow"
_ -> return ()
glGetErrors
| lukexi/oculus-mini | test/ShaderLoader.hs | mit | 4,327 | 0 | 24 | 1,494 | 945 | 467 | 478 | 85 | 9 |
module Input
( AppInput
, parseWinInput
, mousePos
, lbp
, lbpPos
, lbDown
, rbp
, rbpPos
, rbDown
, keyPress
, keyPressed
, quitEvent
, module SDL.Input.Keyboard.Codes
) where
import Data.Maybe
import FRP.Yampa
import Linear (V2(..))
import Linear.Affine (Point(..))
import SDL.Input.Keyboard.Codes
import qualified SDL
import Types
-- <| Signal Functions |> --
-- | Current mouse position
mousePos :: SF AppInput (Double,Double)
mousePos = arr inpMousePos
-- | Events that indicate left button click
lbp :: SF AppInput (Event ())
lbp = lbpPos >>^ tagWith ()
-- | Events that indicate left button click and are tagged with mouse position
lbpPos :: SF AppInput (Event (Double,Double))
lbpPos = inpMouseLeft ^>> edgeJust
-- | Is left button down
lbDown :: SF AppInput Bool
lbDown = arr (isJust . inpMouseLeft)
-- | Events that indicate right button click
rbp :: SF AppInput (Event ())
rbp = rbpPos >>^ tagWith ()
-- | Events that indicate right button click and are tagged with mouse position
rbpPos :: SF AppInput (Event (Double,Double))
rbpPos = inpMouseRight ^>> edgeJust
-- | Is right button down
rbDown :: SF AppInput Bool
rbDown = arr (isJust . inpMouseRight)
keyPress :: SF AppInput (Event SDL.Scancode)
keyPress = inpKeyPressed ^>> edgeJust
keyPressed :: SDL.Scancode -> SF AppInput (Event ())
keyPressed code = keyPress >>^ filterE (code ==) >>^ tagWith ()
quitEvent :: SF AppInput (Event ())
quitEvent = arr inpQuit >>> edge
-- | Exported as abstract type. Fields are accessed with signal functions.
data AppInput = AppInput
{ inpMousePos :: (Double, Double) -- ^ Current mouse position
, inpMouseLeft :: Maybe (Double, Double) -- ^ Left button currently down
, inpMouseRight :: Maybe (Double, Double) -- ^ Right button currently down
, inpKeyPressed :: Maybe SDL.Scancode
, inpQuit :: Bool -- ^ SDL's QuitEvent
}
initAppInput :: AppInput
initAppInput = AppInput { inpMousePos = (0, 0)
, inpMouseLeft = Nothing
, inpMouseRight = Nothing
, inpKeyPressed = Nothing
, inpQuit = False
}
-- | Filter and transform SDL events into events which are relevant to our
-- application
parseWinInput :: SF WinInput AppInput
parseWinInput = accumHoldBy nextAppInput initAppInput
-- | Compute next input
nextAppInput :: AppInput -> SDL.EventPayload -> AppInput
-- | When the user closes the window
nextAppInput inp SDL.QuitEvent = inp { inpQuit = True }
-- | Pressing ESC closes the game
nextAppInput inp (SDL.KeyboardEvent ev)
| SDL.keysymScancode (SDL.keyboardEventKeysym ev) == ScancodeEscape
= inp { inpQuit = True }
-- | Getting mouse coordinates
nextAppInput inp (SDL.MouseMotionEvent ev) =
inp { inpMousePos = (fromIntegral x, fromIntegral y) }
where P (V2 x y) = SDL.mouseMotionEventPos ev
-- | Storing pressed/released button
nextAppInput inp (SDL.KeyboardEvent ev)
| SDL.keyboardEventKeyMotion ev == SDL.Pressed
= inp { inpKeyPressed = Just $ SDL.keysymScancode $ SDL.keyboardEventKeysym ev }
| SDL.keyboardEventKeyMotion ev == SDL.Released
= inp { inpKeyPressed = Nothing }
-- | Storing moused pressed button
nextAppInput inp (SDL.MouseButtonEvent ev) = inp { inpMouseLeft = lmb
, inpMouseRight = rmb }
where motion = SDL.mouseButtonEventMotion ev
button = SDL.mouseButtonEventButton ev
pos = inpMousePos inp
inpMod = case (motion,button) of
(SDL.Released, SDL.ButtonLeft) -> first (const Nothing)
(SDL.Pressed, SDL.ButtonLeft) -> first (const (Just pos))
(SDL.Released, SDL.ButtonRight) -> second (const Nothing)
(SDL.Pressed, SDL.ButtonRight) -> second (const (Just pos))
_ -> id
(lmb,rmb) = inpMod $ (inpMouseLeft &&& inpMouseRight) inp
nextAppInput inp _ = inp
| Rydgel/flappy-haskell | src/Input.hs | mit | 4,074 | 0 | 14 | 1,056 | 1,029 | 568 | 461 | 81 | 5 |
module Kriek.Backend.ES6 where
import Kriek.Data
-- compile :: Analysis -> JSExpression
-- compile (Analysis scope context op) = case op of
-- Do as ->
| kalouantonis/kriek | src/hs/Kriek/Backend/ES6.hs | mit | 157 | 0 | 4 | 30 | 15 | 11 | 4 | 2 | 0 |
-- Perlin implementation lifted from Glome https://wiki.haskell.org/Glome
{-# LANGUAGE BangPatterns #-}
module GlomePerlin where
import Data.Array.IArray
type Flt = Double
-- | 3d type represented as a record of unboxed floats.
data Vec = Vec !Flt !Flt !Flt deriving Show
vec :: Flt -> Flt -> Flt -> Vec
vec !x !y !z = (Vec x y z)
iabs :: Int -> Int
iabs !a =
if a < 0 then (-a) else a
fabs :: Flt -> Flt
fabs !a =
if a < 0 then (-a) else a
vdot :: Vec -> Vec -> Flt
vdot !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
(x1*x2)+(y1*y2)+(z1*z2)
vlen :: Vec -> Flt
vlen !v1 = sqrt (vdot v1 v1)
phi :: Array Int Int
phi = listArray (0,11) [3,0,2,7,4,1,5,11,8,10,9,6]
gamma :: Int -> Int -> Int -> Vec
gamma i j k =
let a = phi!(mod (iabs k) 12)
b = phi!(mod (iabs (j+a)) 12)
c = phi!(mod (iabs (i+b)) 12)
in grad!c
grad :: Array Int Vec
grad = listArray (0,11)
$ filter (\x -> let l = vlen x in l < 1.5 && l > 1.1)
[Vec x y z | x <- [(-1),0,1],
y <- [(-1),0,1],
z <- [(-1),0,1]]
omega :: Flt -> Flt
omega t_ =
let t = fabs t_
tsqr = t*t
tcube = tsqr*t
in (-6)*tcube*tsqr + 15*tcube*t - 10*tcube + 1
knot :: Int -> Int -> Int -> Vec -> Flt
knot i j k v =
let Vec x y z = v
in (omega x) * (omega y) * (omega z) * (vdot (gamma i j k) v)
noise :: Vec -> Flt
noise (Vec x y z) =
let i = floor x
j = floor y
k = floor z
u = x-(fromIntegral i)
v = y-(fromIntegral j)
w = z-(fromIntegral k)
in knot i j k (Vec u v w) +
knot (i+1) j k (Vec (u-1) v w) +
knot i (j+1) k (Vec u (v-1) w) +
knot i j (k+1) (Vec u v (w-1)) +
knot (i+1) (j+1) k (Vec (u-1) (v-1) w) +
knot (i+1) j (k+1) (Vec (u-1) v (w-1)) +
knot i (j+1) (k+1) (Vec u (v-1) (w-1)) +
knot (i+1) (j+1) (k+1) (Vec (u-1) (v-1) (w-1))
perlin :: Vec -> Flt
perlin v =
let p = ((noise v)+1)*0.5
in if p > 1
then error $ "perlin noise error, 1 < " ++ (show p)
else if p < 0
then error $ "perlin noise error, 0 > " ++ (show p)
else p
| rickerbh/GenerativeArtHaskell | GenerativeArt.hsproj/GlomePerlin.hs | mit | 2,140 | 0 | 18 | 711 | 1,315 | 691 | 624 | 72 | 3 |
-- https://wiki.haskell.org/Haskell_in_5_steps
{-
fac n =
if n == 0
then 1
else n * fac (n-1)
-}
fac 0 = 1
fac n = n * fac (n-1)
main=print (fac 42) -- 括号不可避 | SnowOnion/GrahamHaskellPractice | Haskell_in_5_steps.hs | mit | 183 | 2 | 8 | 49 | 50 | 26 | 24 | 3 | 1 |
module StringMingling where
tupleToString :: (char,char) -> [char]
tupleToString (a, b) = [a,b]
mingle :: String -> String -> String
mingle string1 string2 =
concat
$ map tupleToString
$ zip string1 string2
{-
main :: IO ()
main = do
string1 <- getLine
string2 <- getLine
putStrLn $ mingle string1 string2
-}
| Sobieck00/practice | hr/nonvisualstudio/haskell/functionalprogramming/recursion/stringmingling/Solution.hs | mit | 356 | 0 | 7 | 99 | 82 | 46 | 36 | 8 | 1 |
{-# htermination minusFM :: FiniteMap () b1 -> FiniteMap () b2 -> FiniteMap () b1 #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_minusFM_2.hs | mit | 103 | 0 | 3 | 18 | 5 | 3 | 2 | 1 | 0 |
module Cascade.Parse.Comment (parseComment) where
import Cascade.Data.Parse (State(..), Result(..), Token(Token), expect, readToken)
import Cascade.Data.Ast (Item(Comment))
commentStart = "/*"
commentEnd = "*/"
parseComment :: State -> (Result Item)
parseComment state =
case (expect state commentStart) of
(Just state') -> (make' (parse' state' ""))
(Nothing) -> Error { message = "expected /*" }
parse' :: State -> String -> (Result String)
parse' state body =
case (expect state commentEnd) of
(Just state') -> Result { state = state', result = body }
(Nothing) ->
let (Token state' string) = readToken state 1
in parse' state' (body ++ string)
make' :: (Result String) -> (Result Item)
make' (Error message) = Error { message = message }
make' (Result state result) =
Result { state = state, result = Comment result } | bash/cascade | src/Cascade/Parse/Comment.hs | mit | 895 | 0 | 13 | 200 | 339 | 188 | 151 | 21 | 2 |
module Main where
import Web.Scotty
import Database.PostgreSQL.Simple (connectPostgreSQL)
import Oinc.API.Server
main = do
conn <- connectPostgreSQL "dbname='oinc_haskell_dev' user='postgres'"
runServer 3000 conn | DeTeam/bitoinc-haskell-server | src/Main.hs | mit | 219 | 0 | 8 | 27 | 49 | 27 | 22 | 7 | 1 |
--Script para gerar o início de um arquivo .ppm
module RaytracerEtapa4_1113331018 where
--Cria um um tipo Cor que é formado por uma tupla de inteiro e um outro tipo coordenada que é uma tupla de Double
--e deriva da classe Show a forma de imprimir
--O data nada mais é que um guarda chuva de construtores de objetos no meu caso tenho dois tipos de construtores um para Pixel outro para Coordenada ambos são Vetor3D
data Vetor3D = Pixel(Int, Int, Int)
| Coordenada(Double, Double, Double)
deriving (Show, Eq)
data Objeto = Esfera Vetor3D Double
deriving(Show, Eq)
data Objeto2 = Raio Vetor3D Vetor3D
deriving(Show, Eq)
--Creating a function to tranform pixel in string
pixelToString :: (Int, Int, Int) -> String
pixelToString (x, y, z) = (show x) ++ " " ++ (show y) ++ " " ++ (show z) ++ "\n"
--Some contants to write a .ppm image
p3 = "P3\n"
comment = "# It's a .ppm imagem for a raytracer\n"
--How map works it receive a function and a list and aplly this function on each elemento of the list
--How foldr(reduce starting the right side of the list) works it receive a function, a start item and a list and join the elements in a String
create_text_to_ppm_file :: Int -> Int -> [(Int, Int, Int)] -> String
create_text_to_ppm_file _ _ [] = "Nao e possivel criar uma imagem sem pixels"
create_text_to_ppm_file width height pixels = (p3 ++ comment ++ (show width) ++ " " ++ (show height) ++ "\n" ++ "255\n" ++ (foldr (++) "" (map pixelToString pixels)))
--Soma vetorial: (x1, y1, z1) + (x2, y2, z2) = (x1 + x2, y1 + y2, z1 + z2)
($+) :: Vetor3D -> Vetor3D -> Vetor3D
Coordenada(x1, y1, z1) $+ Coordenada(x2, y2, z2) = Coordenada(x1 + x2, y1 + y2, z1 + z2)
Pixel(x1, y1, z1) $+ Pixel(x2, y2, z2) = Pixel(x1 + x2, y1 + y2, z1 + z2)
--Subtração vetorial: (x1, y1, z1) − (x2, y2, z2) = (x1 − x2, y1 − y2, z1 − z2)
($-) :: Vetor3D -> Vetor3D -> Vetor3D
Coordenada(x1, y1, z1) $- Coordenada(x2, y2, z2) = Coordenada(x1 - x2, y1 - y2, z1 - z2)
Pixel(x1, y1, z1) $- Pixel(x2, y2, z2) = Pixel(x1 - x2, y1 - y2, z1 - z2)
--Produto escalar: (x1, y1, z1).(x2, y2, z2) = x1x2 + y1y2 + z1z2
($.) :: Vetor3D -> Vetor3D -> Double
Coordenada(x1, y1, z1) $. Coordenada(x2, y2, z2) = ((x1 * x2) + (y1 * y2) + (z1 * z2))
Pixel(x1, y1, z1) $. Pixel(x2, y2, z2) = fromIntegral ((x1 * x2) + (y1 * y2) + (z1 * z2))
--Multiplicação por escalar: a(x1, y1, z1) = (ax1, ay1, az1)
($*) :: Vetor3D -> Double -> Vetor3D
Coordenada(x1, y1, z1) $* escalar = Coordenada(x1 * escalar, y1 * escalar, z1 * escalar)
Pixel(x1, y1, z1) $* escalar = Pixel(x1 * (floor escalar), y1 * (floor escalar), z1 * (floor escalar))
--Divisão por escalar: (x1, y1, z1)/a = (x1/a, y1/a, z1/a)
($/) :: Vetor3D -> Double -> Vetor3D
Coordenada(x1, y1, z1) $/ escalar = Coordenada(x1 / escalar, y1 / escalar, z1 / escalar)
Pixel(x1, y1, z1) $/ escalar = Pixel(floor((fromIntegral x1) / escalar), floor(fromIntegral y1 / escalar), floor(fromIntegral z1 / escalar))
calcDiscriminante :: Objeto2 -> Objeto -> Double
calcDiscriminante (Raio origem direcao) (Esfera centro raio) = do let Coordenada temp = origem - centro
let double a = direcao $. direcao
let double b = (2.0 * (temp $. direcao))
let double c = (temp $. temp) - (raio * raio)
let double discriminante = b * b - 4.0 * a * c
discriminante
--OBS.:
--Cores que para mim são pixels
--TODO
--Usar a dica que o Rafael deu no e-mail para melhorar os testes:
--Que tal experimentar um pouco com a função property também?
--Por exemplo, a função pixelToString sempre gera dois espaços em branco, a gente poderia escrever o seguinte:
-- it "should always have two empty spaces" $
-- property $ \x y z -> (length $ filter (==' ') $ pixelToString (x, y, z)) == 2
-- Não testei para ver se funciona :P, mas acho que é isso.
-- Quais outras invariantes a gente consegue achar no código?
-- A parte com operações vetoriais pode ter umas interessantes.
-- Por exemplo, a expressão a seguir deve ser verdadeira
-- para todo x diferente de 0:
-- v $* x $/ x == v.
| lipemorais/haskellando | RaytracerEtapa4_1113331018.hs | mit | 4,424 | 0 | 13 | 1,181 | 1,162 | 636 | 526 | 37 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html
module Stratosphere.ResourceProperties.ApiGatewayDocumentationPartLocation where
import Stratosphere.ResourceImports
-- | Full data type definition for ApiGatewayDocumentationPartLocation. See
-- 'apiGatewayDocumentationPartLocation' for a more convenient constructor.
data ApiGatewayDocumentationPartLocation =
ApiGatewayDocumentationPartLocation
{ _apiGatewayDocumentationPartLocationMethod :: Maybe (Val Text)
, _apiGatewayDocumentationPartLocationName :: Maybe (Val Text)
, _apiGatewayDocumentationPartLocationPath :: Maybe (Val Text)
, _apiGatewayDocumentationPartLocationStatusCode :: Maybe (Val Text)
, _apiGatewayDocumentationPartLocationType :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON ApiGatewayDocumentationPartLocation where
toJSON ApiGatewayDocumentationPartLocation{..} =
object $
catMaybes
[ fmap (("Method",) . toJSON) _apiGatewayDocumentationPartLocationMethod
, fmap (("Name",) . toJSON) _apiGatewayDocumentationPartLocationName
, fmap (("Path",) . toJSON) _apiGatewayDocumentationPartLocationPath
, fmap (("StatusCode",) . toJSON) _apiGatewayDocumentationPartLocationStatusCode
, fmap (("Type",) . toJSON) _apiGatewayDocumentationPartLocationType
]
-- | Constructor for 'ApiGatewayDocumentationPartLocation' containing required
-- fields as arguments.
apiGatewayDocumentationPartLocation
:: ApiGatewayDocumentationPartLocation
apiGatewayDocumentationPartLocation =
ApiGatewayDocumentationPartLocation
{ _apiGatewayDocumentationPartLocationMethod = Nothing
, _apiGatewayDocumentationPartLocationName = Nothing
, _apiGatewayDocumentationPartLocationPath = Nothing
, _apiGatewayDocumentationPartLocationStatusCode = Nothing
, _apiGatewayDocumentationPartLocationType = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html#cfn-apigateway-documentationpart-location-method
agdplMethod :: Lens' ApiGatewayDocumentationPartLocation (Maybe (Val Text))
agdplMethod = lens _apiGatewayDocumentationPartLocationMethod (\s a -> s { _apiGatewayDocumentationPartLocationMethod = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html#cfn-apigateway-documentationpart-location-name
agdplName :: Lens' ApiGatewayDocumentationPartLocation (Maybe (Val Text))
agdplName = lens _apiGatewayDocumentationPartLocationName (\s a -> s { _apiGatewayDocumentationPartLocationName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html#cfn-apigateway-documentationpart-location-path
agdplPath :: Lens' ApiGatewayDocumentationPartLocation (Maybe (Val Text))
agdplPath = lens _apiGatewayDocumentationPartLocationPath (\s a -> s { _apiGatewayDocumentationPartLocationPath = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html#cfn-apigateway-documentationpart-location-statuscode
agdplStatusCode :: Lens' ApiGatewayDocumentationPartLocation (Maybe (Val Text))
agdplStatusCode = lens _apiGatewayDocumentationPartLocationStatusCode (\s a -> s { _apiGatewayDocumentationPartLocationStatusCode = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigateway-documentationpart-location.html#cfn-apigateway-documentationpart-location-type
agdplType :: Lens' ApiGatewayDocumentationPartLocation (Maybe (Val Text))
agdplType = lens _apiGatewayDocumentationPartLocationType (\s a -> s { _apiGatewayDocumentationPartLocationType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/ApiGatewayDocumentationPartLocation.hs | mit | 3,903 | 0 | 12 | 349 | 537 | 304 | 233 | 42 | 1 |
{- |
Module : $Header$
Description : data types for consistency aka conservativity
Copyright : (c) Christian Maeder, DFKI GmbH 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Data types for conservativity
-}
module Common.Consistency where
import Common.Doc
import Common.DocUtils
import Common.AS_Annotation
import Common.Result
{- | Conservativity annotations. For compactness, only the greatest applicable
value is used in a DG. PCons stands for prooftheoretic conservativity as
required for extending imports (no confusion) in Maude -}
data Conservativity =
Inconsistent
| Unknown String
| None
| PCons
| Cons
| Mono
| Def
deriving (Show, Read, Eq, Ord)
showConsistencyStatus :: Conservativity -> String
showConsistencyStatus cs = case cs of
Inconsistent -> "not conservative"
Unknown str -> "unknown if being conservative. Cause is : " ++ str
None -> "unknown if being conservative"
Cons -> "conservative"
PCons -> "proof-theoretically conservative"
Mono -> "monomorphic"
Def -> "definitional"
instance Pretty Conservativity where
pretty = text . showConsistencyStatus
{- | All target sentences must be implied by the source translated
along the morphism. They are axioms only and not identical to any
translated sentence of the source. -}
data ConservativityChecker sign sentence morphism = ConservativityChecker
{ checkerId :: String
, checkConservativity
:: (sign, [Named sentence])
-> morphism
-> [Named sentence]
-> IO (Result (Conservativity, [sentence])) }
| nevrenato/HetsAlloy | Common/Consistency.hs | gpl-2.0 | 1,669 | 0 | 16 | 335 | 234 | 132 | 102 | 32 | 7 |
--
-- riot/Riot/Editor.hs
--
-- Copyright (c) Tuomo Valkonen 2004-2005.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
module Riot.Editor(
edittext,
editfile,
get_editor
)where
import System(getEnv)
import System.Posix.Temp(mkstemp)
import System.Posix.Files(removeLink)
import System.Posix.Unistd
import System.Cmd(system)
import Riot.Version(package)
import IO
shell_safe_string s =
concat ["'", escape s, "'"]
where
escape [] = []
escape ('\'':s) = "\\'"++escape s
escape (x:s) = x:escape s
my_system cmd args =
system $ concat (cmd:map (\s -> " "++shell_safe_string s) args)
--import Config(preferred_editor, fallback_editor)
preferred_editor = Nothing -- Use environment
fallback_editor = "vi" -- If environment fails
get_editor =
case preferred_editor of
Just e -> return e
Nothing -> catch (getEnv "VISUAL")
(\_ -> catch (getEnv "EDITOR")
(\_ -> return fallback_editor))
editfile fname = do
editor <- get_editor
my_system editor [fname]
make_temp = do
mkstemp $ "/tmp/"++package++"-XXXXXX"
edittext text = do
(fname, h) <- make_temp
catch (do_edittext fname h text)
(\e -> finish fname h >> ioError e)
where
finish fname h = hClose h >> removeLink fname
do_edittext fname h text = do
hPutStr h text
hClose h
editfile fname
txt <- readFile fname
removeLink fname
return txt
| opqdonut/riot | Riot/Editor.hs | gpl-2.0 | 1,759 | 0 | 14 | 490 | 473 | 246 | 227 | 43 | 3 |
{-# LANGUAGE CPP, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : Main
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- | Windows systems do not often have a real echo executable (so --with-ghc=echo fails)
--
-----------------------------------------------------------------------------
module Main (
main
) where
import System.Environment (getArgs)
import System.Exit (exitWith, exitSuccess, exitFailure, ExitCode(..))
import IDE.Utils.Tool
(toolProcess, executeGhciCommand, ToolOutput(..), runTool',
newGhci')
#ifdef MIN_VERSION_process_leksah
import IDE.System.Process (interruptProcessGroup, getProcessExitCode)
#else
import System.Process (interruptProcessGroupOf, getProcessExitCode)
#endif
import Test.HUnit
((@=?), (@?=), putTextToHandle, Counts(..), runTestTT, assertBool,
runTestText, (~:), Testable(..), Test(..))
import System.IO (hPutStr, stdout, hPutStrLn, stderr, hFlush)
import qualified Data.Conduit.List as EL (consume)
import Control.Concurrent
(threadDelay, forkIO, takeMVar, putMVar, newEmptyMVar)
import Control.Monad.IO.Class (liftIO)
import Control.Monad (forM_)
import System.Log.Logger
(setLevel, rootLoggerName, updateGlobalLogger)
import System.Log (Priority(..))
runSelf' args = runTool' "dist/build/test-tool/test-tool" args Nothing
-- stderr and stdout may not be in sync
check output expected = do
checkFiltered notOut
checkFiltered notErr
where
checkFiltered f = filter f output @?= filter f expected
notErr (ToolError _) = False
notErr _ = True
notOut (ToolOutput _) = False
notOut _ = True
runTests testMVar = loop
where
loop = do
mbTest <- takeMVar testMVar
case mbTest of
Just test -> do
test
loop
Nothing -> return ()
sendTest testMVar test = do
liftIO $ putMVar testMVar $ Just test
doneTesting testMVar = do
liftIO $ putMVar testMVar $ Nothing
tests = test [
"Exit Success" ~: do
(output, _) <- runSelf' ["ExitSuccess"]
output `check` [ToolInput "dist/build/test-tool/test-tool ExitSuccess", ToolExit ExitSuccess],
"Exit Failure" ~: do
(output, _) <- runSelf' ["Exit42"]
output `check` [ToolInput "dist/build/test-tool/test-tool Exit42", ToolExit (ExitFailure 42)],
"Single Blank Out Line" ~: do
(output, _) <- runSelf' ["BlankLine", "StdOut"]
output `check` [ToolInput "dist/build/test-tool/test-tool BlankLine StdOut", ToolOutput "", ToolExit ExitSuccess],
"Single Blank Err Line" ~: do
(output, _) <- runSelf' ["BlankLine", "StdErr"]
output `check` [ToolInput "dist/build/test-tool/test-tool BlankLine StdErr", ToolError "", ToolExit ExitSuccess],
"Hello Out" ~: do
(output, _) <- runSelf' ["Hello", "StdOut"]
output `check` [ToolInput "dist/build/test-tool/test-tool Hello StdOut", ToolOutput "Hello World", ToolExit ExitSuccess],
"Hello Err" ~: do
(output, _) <- runSelf' ["Hello", "StdErr"]
output `check` [ToolInput "dist/build/test-tool/test-tool Hello StdErr", ToolError "Hello World", ToolExit ExitSuccess],
"Both" ~: do
(output, _) <- runSelf' ["ErrAndOut"]
output `check` [ToolInput "dist/build/test-tool/test-tool ErrAndOut", ToolError "Error", ToolOutput "Output", ToolExit ExitSuccess],
"Unterminated Out" ~: do
(output, _) <- runSelf' ["Unterminated", "StdOut"]
output `check` [ToolInput "dist/build/test-tool/test-tool Unterminated StdOut", ToolOutput "Unterminated", ToolExit ExitSuccess],
"Unterminated Err" ~: do
(output, _) <- runSelf' ["Unterminated", "StdErr"]
output `check` [ToolInput "dist/build/test-tool/test-tool Unterminated StdErr", ToolError "Unterminated", ToolExit ExitSuccess],
"GHCi Failed Sart" ~: do
t <- newEmptyMVar
tool <- newGhci' ["MissingFile.hs"] $ do
output <- EL.consume
sendTest t $ last output @?= (ToolPrompt "")
executeGhciCommand tool ":quit" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput ":quit",
ToolOutput "Leaving GHCi.",
ToolExit ExitSuccess],
"GHCi" ~: do
t <- newEmptyMVar
tool <- newGhci' [] $ do
output <- EL.consume
sendTest t $ last output @?= (ToolPrompt "")
executeGhciCommand tool ":m +System.IO" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput ":m +System.IO",
ToolPrompt ""]
executeGhciCommand tool "hPutStr stderr \"Test\"" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "hPutStr stderr \"Test\"",
ToolError "Test",
ToolPrompt ""]
executeGhciCommand tool "1+1" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "1+1",
ToolOutput "2",
ToolPrompt ""]
executeGhciCommand tool "jfkdfjdkl" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "jfkdfjdkl",
ToolError "",
#if __GLASGOW_HASKELL__ > 706
ToolError "<interactive>:23:1: Not in scope: ‘jfkdfjdkl’",
#elif __GLASGOW_HASKELL__ > 702
ToolError "<interactive>:23:1: Not in scope: `jfkdfjdkl'",
#else
ToolError "<interactive>:1:1: Not in scope: `jfkdfjdkl'",
#endif
ToolPrompt ""]
executeGhciCommand tool "\n1+1" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "",
ToolInput "1+1",
ToolOutput "2",
ToolPrompt ""]
executeGhciCommand tool ":m + Prelude" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput ":m + Prelude",
ToolPrompt ""]
executeGhciCommand tool "\njfkdfjdkl" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "",
ToolInput "jfkdfjdkl",
ToolError "",
#if __GLASGOW_HASKELL__ > 706
ToolError "<interactive>:36:1: Not in scope: ‘jfkdfjdkl’",
#elif __GLASGOW_HASKELL__ > 702
ToolError "<interactive>:38:1: Not in scope: `jfkdfjdkl'",
#else
ToolError "<interactive>:1:1: Not in scope: `jfkdfjdkl'",
#endif
ToolPrompt ""]
executeGhciCommand tool "do\n putStrLn \"1\"\n putStrLn \"2\"\n putStrLn \"3\"\n putStrLn \"4\"\n putStrLn \"5\"\n" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "do",
ToolInput " putStrLn \"1\"",
ToolInput " putStrLn \"2\"",
ToolInput " putStrLn \"3\"",
ToolInput " putStrLn \"4\"",
ToolInput " putStrLn \"5\"",
ToolOutput "1",
ToolOutput "2",
ToolOutput "3",
ToolOutput "4",
ToolOutput "5",
ToolPrompt ""]
executeGhciCommand tool "do\n putStrLn \"| 1\"\n putStrLn \"| 2\"\n putStrLn \"| 3\"\n putStrLn \"| 4\"\n putStrLn \"| 5\"\n" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "do",
ToolInput " putStrLn \"| 1\"",
ToolInput " putStrLn \"| 2\"",
ToolInput " putStrLn \"| 3\"",
ToolInput " putStrLn \"| 4\"",
ToolInput " putStrLn \"| 5\"",
ToolOutput "| 1",
ToolOutput "| 2",
ToolOutput "| 3",
ToolOutput "| 4",
ToolOutput "| 5",
ToolPrompt ""]
executeGhciCommand tool "putStr \"ABC\"" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput "putStr \"ABC\"",
ToolPrompt "ABC"]
executeGhciCommand tool ":m +Data.List" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput ":m +Data.List",
ToolPrompt ""]
executeGhciCommand tool ":quit" $ do
output <- EL.consume
sendTest t $ output `check` [
ToolInput ":quit",
ToolOutput "Leaving GHCi.",
ToolExit ExitSuccess]
runTests t]
main :: IO ()
main = do
args <- getArgs
case args of
[] -> do
updateGlobalLogger rootLoggerName (\ l -> setLevel DEBUG l)
(Counts{failures=failures}, _) <- runTestText (putTextToHandle stderr False) tests
if failures == 0
then exitSuccess
else exitFailure
["ExitSuccess"] -> exitSuccess
["Exit42"] -> exitWith (ExitFailure 42)
["BlankLine", o] -> hPutStrLn (h o) ""
["Hello", o] -> hPutStrLn (h o) "Hello World"
["ErrAndOut"] -> hPutStrLn stderr "Error" >> hPutStrLn stdout "Output"
["Unterminated", o] -> hPutStr (h o) "Unterminated" >> hFlush (h o)
_ -> exitFailure
where
h "StdErr" = stderr
h _ = stdout
| JPMoresmau/leksah-server | tests/TestTool.hs | gpl-2.0 | 9,606 | 0 | 16 | 3,017 | 2,206 | 1,138 | 1,068 | 196 | 10 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Bitmaps
-- Copyright : (c) Sven Panne 2002-2009
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 3.7 (Bitmaps) of the OpenGL 2.1 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Bitmaps (
bitmap
) where
import Data.Tensor
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.Core31
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility ( glBitmap )
import Graphics.Rendering.OpenGL.GL.CoordTrans
--------------------------------------------------------------------------------
bitmap :: Size -> (Vertex2 GLfloat) -> (Vector2 GLfloat) -> Ptr GLubyte -> IO ()
bitmap (Size w h) (Vertex2 xbo ybo) (Vector2 xbi ybi) =
glBitmap w h xbo ybo xbi ybi
| ducis/haAni | hs/common/Graphics/Rendering/OpenGL/GL/Bitmaps.hs | gpl-2.0 | 1,026 | 0 | 10 | 135 | 158 | 96 | 62 | 10 | 1 |
{-# LANGUAGE ConstraintKinds #-}
module Language.Jasmin.Transformation.VCGen where
import Language.Jasmin.Syntax
import Utils
import Language.Jasmin.TypeChecker.TyInfo
import Language.Jasmin.Transformation.Simplify
import Language.Position
import Language.Location
import Control.Monad
import Control.Monad.IO.Class
import Text.PrettyPrint.Exts
-- converts a procedure into straightline code
straightPfundef :: VCK m => Pfundef TyInfo -> m [Pinstr TyInfo]
straightPfundef (Pfundef cc n args rty anns (Pfunbody vars instrs ret) info) = do
args' <- concatMapM straightParg args
let (pres,posts) = splitProcAnns anns
vars' <- concatMapM straightPbodyarg vars
let pres' = map anninstr2instr pres
let posts' = map anninstr2instr posts
return $ args' ++ vars' ++ pres' ++ instrs ++ posts'
straightParg :: VCK m => Parg TyInfo -> m [Pinstr TyInfo]
straightParg (Parg t (Just n)) = do
return [Pinstr (noTyInfo $ infoLoc $ loc n) $ Anninstr $ VarDefAnn $ Annarg (snd t) n Nothing]
straightPbodyarg :: VCK m => Pbodyarg TyInfo -> m [Pinstr TyInfo]
straightPbodyarg (Pbodyarg t n) = do
return [Pinstr (noTyInfo $ infoLoc $ loc n) $ Anninstr $ VarDefAnn $ Annarg (snd t) n Nothing]
genVCsPfundef :: VCK m => Pfundef TyInfo -> m [([Pinstr TyInfo],Pexpr TyInfo)]
genVCsPfundef = straightPfundef >=> genTriples
-- generates a set of Hoare Triples from straightline code (pre-conditions within the code)
genTriples :: VCK m => [Pinstr TyInfo] -> m [([Pinstr TyInfo],Pexpr TyInfo)]
genTriples is = do
res <- genTriples' [] is
return res
genTriples' :: VCK m => [Pinstr TyInfo] -> [Pinstr TyInfo] -> m [([Pinstr TyInfo],Pexpr TyInfo)]
genTriples' acc [] = return []
genTriples' acc (x:xs) = do
e <- genTriplePinstr x
case e of
Left x' -> genTriples' (acc++[x']) xs
Right expr -> do
let assume = Pinstr (loc x) $ Anninstr $ AssumeAnn False expr
ys <- genTriples' (acc++[assume]) xs
return $ (acc,expr) : ys
genTriplePinstr :: VCK m => Pinstr TyInfo -> m (Either (Pinstr TyInfo) (Pexpr TyInfo))
genTriplePinstr (Pinstr t i) = genTriplePinstr_r t i
genTriplePinstr_r :: VCK m => TyInfo -> Pinstr_r TyInfo -> m (Either (Pinstr TyInfo) (Pexpr TyInfo))
genTriplePinstr_r t (Anninstr i) = genTriplePanninstr_r t i
genTriplePinstr_r t i = return (Left $ Pinstr t i)
genTriplePanninstr_r :: VCK m => TyInfo -> StatementAnnotation_r TyInfo -> m (Either (Pinstr TyInfo) (Pexpr TyInfo))
genTriplePanninstr_r t (AssertAnn isLeak e) = if isLeak
then return $ Left $ Pinstr t $ Anninstr $ AssumeAnn False truePexpr
else return $ Right e
genTriplePanninstr_r t (EmbedAnn isLeak i) = if isLeak
then return $ Left $ Pinstr t $ Anninstr $ AssumeAnn False truePexpr
else genTriplePinstr i
genTriplePanninstr_r t i = return $ Left $ Pinstr t $ Anninstr i
-- * State
type VCK m = MonadIO m | hpacheco/jasminv | src/Language/Jasmin/Transformation/VCGen.hs | gpl-3.0 | 2,883 | 0 | 19 | 562 | 1,093 | 539 | 554 | 55 | 3 |
module Masque.Objects.Bool where
import Masque.Monte
import Masque.Objects.Builtin
-- | Pass a message to a Bool.
callBool :: Bool -> String -> [Obj] -> [(Obj, Obj)] -> Monte Obj
-- Unary.
callBool b "not" [] _ = wrapBool $ not b
-- Binary.
callBool b "and" [other] _ = do
x <- unwrapBool other
wrapBool $ b && x
callBool b "butNot" [other] _ = do
x <- unwrapBool other
wrapBool $ b && not x
callBool b "or" [other] _ = do
x <- unwrapBool other
wrapBool $ b || x
callBool b "xor" [other] _ = do
x <- unwrapBool other
wrapBool $ b /= x
-- And other methods.
callBool b "pick" [l, r] _ = return $ if b then l else r
callBool _ _ _ _ = refuse
| monte-language/masque | Masque/Objects/Bool.hs | gpl-3.0 | 674 | 0 | 10 | 167 | 282 | 145 | 137 | 19 | 2 |
// Weve already seen that a composition of functors is a functor we can easily convince ourselves
// that the same is true of bifunctors.
// Given two morphisms, we simply lift one with one functor and the other with the other functor.
// We then lift the resulting pair of lifted morphisms with the bifunctor.
// We can express this composition in Haskell.
// Lets define a data type that is parameterized by
// a bifunctor bf (its a type variable that is a type constructor that takes two types as arguments),
// two functors fu and gu (type constructors that take one type variable each),
// and two regular types a and b.
//
// We apply fu to a and gu to b, and then apply bf to the resulting two types:
newtype BiComp bf fu gu a b = BiComp (bf (fu a) (gu b))
// BiComp Either (Const ()) Identity a b
// - bf = Either
// - fu = Const ()
// - gu = Identity
// = BiComp (Either (Const () a) (Identity b))
// = BiComp (Either () b)
// = BiComp (Maybe b)
//
// See Ex_2.hs for proof of this isomorphism
// The new data type BiComp is a bifunctor in a and b, but only if bf is itself a Bifunctor and fu and gu are Functors.
// The compiler must know that there will be a definition of bimap available for bf,
// and definitions of fmap for fu and gu.
//
// In Haskell, this is expressed as a precondition in the instance declaration:
// a set of class constraints followed by a double arrow:
//
instance (Bifunctor bf, Functor fu, Functor gu) =>
Bifunctor (BiComp bf fu gu) where
bimap f1 f2 (BiComp x) = BiComp ((bimap (fmap f1) (fmap f2)) x)
// The implementation of bimap for BiComp is given in terms of bimap for bf and the two fmaps for fu and gu.
// The compiler automatically infers all the types and picks the correct overloaded functions whenever BiComp is used.
// The x in the definition of bimap has the type:
// bf (fu a) (gu b)
// If the types of f1 and f2 are:
//
// f1 :: a -> a'
// f2 :: b -> b'
// then the final result is of the type bf (fu a') (gu b')
//
// bimap :: (fu a -> fu a')
// -> (gu b -> gu b')
// -> bf (fu a) (gu b) -> bf (fu a') (gu b')
| sujeet4github/MyLangUtils | CategoryTheory_BartoszMilewsky/PI_08_Functoriality/BiComp.hs | gpl-3.0 | 2,149 | 64 | 4 | 506 | 257 | 230 | 27 | 4 | 0 |
{-# LANGUAGE NoMonomorphismRestriction,BangPatterns,FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2013 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module MiRanda.Diagram
(
recordDiagram
, tableDiagram
, recordDiagram'
, tableDiagram'
, module MiRanda.Diagram.HeatMap
)
where
import Control.Lens (set)
import qualified Data.ByteString.Char8 as B8
import Data.Char (isAlpha)
import Data.Colour.Names
import Data.Default.Class
import Data.Function
import Data.List
import Diagrams.Prelude hiding (align)
import Diagrams.TwoD.Types
import MiRanda.Diagram.Alignment
import MiRanda.Diagram.HeatMap
import MiRanda.Diagram.Icon
import MiRanda.Diagram.LocalAU
import MiRanda.Diagram.Pos
import MiRanda.Diagram.Structure
import MiRanda.Score
import qualified MiRanda.Storage.Type as ST
import MiRanda.Types
import MiRanda.Util
import Diagrams.TwoD
import Diagrams.TwoD.Text
hW = 0.6
hH = 1
bgColor = aliceblue
tableHeader :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => [Diagram b R2]
tableHeader =
map
(\str ->
centerXY $
text str
# font "Arial" # bold
# fc white <>
rect (fromIntegral (length str) * hW) hH
# lcA transparent
# fc darkblue
)
["2D Structure"
,"Local AU"
,"Position"
,"Conservation"
,"Predicted By"
]
-- 8.27 inch , cairo default dpi = 72
-- widthA4 = Width 600
-- heightA4 = Height 840
-- renderPDF :: FilePath -> Diagram Cairo R2 -> IO ()
-- renderPDF outFile d =
-- fst $ renderDia Cairo (CairoOptions outFile widthA4 PDF False) d
-- renderPNG :: FilePath -> Diagram Cairo R2 -> IO ()
-- renderPNG outFile d =
-- fst $ renderDia Cairo (CairoOptions outFile widthA4 PNG False) d
recordDiagram :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => Record -> Diagram b R2
recordDiagram re =
let ss = sortBy (compare `on` utrRange) $ predictedSites re
u = utr re
us = homoUTRs re
as = map
(\s ->
let seedR = seedMatchRange s
siteR = utrRange s
in plotMultiAlign u us seedR siteR # centerXY) ss
t = tableDiagram re
vsep = 1
catOptSetteing = set sep vsep $ set catMethod Cat def
aPlot d1 ds = scale (getScaleFactor d1 ds) $ vcat' catOptSetteing ds
getScaleFactor :: (Renderable Text b,Renderable (Path R2) b,Backend b R2) => Diagram b R2 -> [Diagram b R2] -> Double
getScaleFactor d1 ds = width d1 / (maximum $ map width ds)
in pad 1.01 (t === strutY 1 === aPlot t as)
tableDiagram :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => Record -> Diagram b R2
tableDiagram re =
let (ss,cons) = unzip $ sortBy (compare `on` (utrRange.fst)) $
zip (predictedSites re) (myHead $ getConservations [re])
thisUTR = B8.filter isAlpha $ extractSeq $ utr re
utrLen = B8.length thisUTR
myHead ls = if null ls
then error "tableDiagram"
else head ls
col1 = map
(\s ->
pad 1.05 $
renderBinding (seedType s) (utrRange s) (align s)) ss
col2 = map
(\s ->
pad 1.05 $
plotLocalAU thisUTR (seedType s) (seedMatchRange s)) ss
col3 = map
(\s ->
pad 1.05 $ scale (10 / fromIntegral utrLen) $
plotPos utrLen (seedMatchRange s)) ss
col4 = map (\c ->
if isConserved c
then true
else false
) cons
col5 = map
(\s -> -- contextScorePlus 有定义,且小于0,3种主要seed match
pad 1.05 $
case seedType s of
M6 -> onlyM
M6O -> onlyM
Imperfect -> onlyM
_ -> case contextScorePlus s of
Just csp ->
if contextPlus csp < 0
then mAndT
else onlyM
Nothing -> onlyM
) ss
traned :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => [Diagram b R2] -> [[(Diagram b R2,(Double,Double))]]
traned xs = transpose $ map
(\col ->
let w = maximum $ map (width . fst) col
in map (\(d,h) -> (d,(w,h))) col
) $ transpose $ (zip xs $ map height xs) :
(map
(\row ->
let h = maximum $ map height row
in zip row $ repeat h
) $ transpose [col1,col2,col3,col4,col5])
result :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => [[(Diagram b R2,(Double,Double))]] -> [[Diagram b R2]]
result xss = if null xss
then []
else (map (\(d,(w,h)) ->
d <>
rect w h
# lcA transparent
# fc darkblue) $ head xss) :
(map (map
(\(d,(w,h)) ->
d <>
rect w h
# lcA transparent
# fc bgColor
)) $ tail xss)
vsep = 0.2
hsep = 0.2
vCatOptSetteing = set sep vsep $ set catMethod Cat def
hCatOptSetteing = set sep hsep $ set catMethod Cat def
in centerXY $
vcat' vCatOptSetteing $
map (hcat' hCatOptSetteing) $
result (traned tableHeader)
tableDiagram' :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => ST.GeneInfo -> ST.MiRSites -> Diagram b R2
tableDiagram' gi mirSs =
let ss = sortBy (compare `on` ST.siteRange) $
ST.sites mirSs
thisUTR = B8.filter isAlpha $ extractSeq $ ST.thisSpecies gi
utrLen = B8.length thisUTR
col1 = map
(\s ->
pad 1.05 $
renderBinding (ST.seed s) (ST.siteRange s) (ST.alignStructure s)) ss
col2 = map
(\s ->
pad 1.05 $
plotLocalAU thisUTR (ST.seed s) (ST.seedRange s)) ss
col3 = map
(\s ->
pad 1.05 $ scale (10 / fromIntegral utrLen) $
plotPos utrLen (ST.siteRange s)) ss
col4 = map (\s ->
if isConserved $ ST.conserveScore s
then true
else false
) ss
col5 = map
(\s -> -- contextScorePlus 有定义,且小于0,3种主要seed match
pad 1.05 $
case ST.seed s of
M6 -> onlyM
M6O -> onlyM
Imperfect -> onlyM
_ -> case ST.contextScorePlus s of
Just csp ->
if contextPlus csp < 0
then mAndT
else onlyM
Nothing -> onlyM
) ss
traned :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => [Diagram b R2] -> [[(Diagram b R2,(Double,Double))]]
traned xs = transpose $ map
(\col ->
let w = maximum $ map (width . fst) col
in map (\(d,h) -> (d,(w,h))) col
) $ transpose $ (zip xs $ map height xs) :
(map
(\row ->
let h = maximum $ map height row
in zip row $ repeat h
) $ transpose [col1,col2,col3,col4,col5])
result :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => [[(Diagram b R2,(Double,Double))]] -> [[Diagram b R2]]
result xss = if null xss
then []
else (map (\(d,(w,h)) ->
d <>
rect w h
# lcA transparent
# fc darkblue) $ head xss) :
(map (map
(\(d,(w,h)) ->
d <>
rect w h
# lcA transparent
# fc bgColor
)) $ tail xss)
vsep = 0.2
hsep = 0.2
vCatOptSetteing = set sep vsep $ set catMethod Cat def
hCatOptSetteing = set sep hsep $ set catMethod Cat def
in centerXY $
vcat' vCatOptSetteing $
map (hcat' hCatOptSetteing) $
result (traned tableHeader)
recordDiagram' :: (Renderable Text b,Renderable (Path R2) b, Backend b R2) => ST.GeneRecord -> [Diagram b R2]
recordDiagram' gr =
let gi = ST.geneInfo gr
u = ST.thisSpecies gi
us = ST.otherSpecies gi
in map
(\mrSite ->
let ss = sortBy (compare `on` ST.siteRange) $
ST.sites mrSite
as = map
(\s ->
let seedR = ST.seedRange s
siteR = ST.siteRange s
in plotMultiAlign u us seedR siteR # centerXY) ss
t = tableDiagram' gi mrSite
vsep = 1
catOptSetteing = set sep vsep $ set catMethod Cat def
aPlot d1 ds = if null us
then mempty
else scale (getScaleFactor d1 ds) $ vcat' catOptSetteing ds
getScaleFactor :: (Renderable Text b,Renderable (Path R2) b,Backend b R2) => Diagram b R2 -> [Diagram b R2] -> Double
getScaleFactor d1 ds = width d1 / (maximum $ map width ds)
in if null ss
then mempty
else pad 1.01 (t === strutY 1 === aPlot t as)) $
ST.mirSites gr
| tangboyun/miranda | src/MiRanda/Diagram.hs | gpl-3.0 | 10,504 | 0 | 23 | 4,541 | 3,057 | 1,593 | 1,464 | 236 | 9 |
module SimulateTests
(
propToBitsFromBitsId
) where
import Simulate
import Test.QuickCheck
propToBitsFromBitsId :: Positive Integer -> Bool
propToBitsFromBitsId (Positive x) = x == bitsToInt (intToBits x)
| aparent/jcc | tests/SimulateTests.hs | gpl-3.0 | 209 | 0 | 8 | 29 | 56 | 30 | 26 | 7 | 1 |
{-# LANGUAGE TemplateHaskell #-}
------------------------------------------------------------------------------
-- | This module defines our application's state type and an alias for its
-- handler monad.
module Application where
------------------------------------------------------------------------------
import Control.Lens
import Snap.Snaplet
import Api.Core
------------------------------------------------------------------------------
data App = App
{ _api :: Snaplet Api }
makeLenses ''App
------------------------------------------------------------------------------
type AppHandler = Handler App App
| japesinator/eve-api | src/Application.hs | mpl-2.0 | 625 | 0 | 9 | 64 | 63 | 38 | 25 | 9 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.Edits.DeobfuscationFiles.Upload
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Uploads the deobfuscation file of the specified APK. If a deobfuscation
-- file already exists, it will be replaced.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Developer API Reference> for @androidpublisher.edits.deobfuscationfiles.upload@.
module Network.Google.Resource.AndroidPublisher.Edits.DeobfuscationFiles.Upload
(
-- * REST Resource
EditsDeobfuscationFilesUploadResource
-- * Creating a Request
, editsDeobfuscationFilesUpload
, EditsDeobfuscationFilesUpload
-- * Request Lenses
, edfuDeobfuscationFileType
, edfuPackageName
, edfuAPKVersionCode
, edfuEditId
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.edits.deobfuscationfiles.upload@ method which the
-- 'EditsDeobfuscationFilesUpload' request conforms to.
type EditsDeobfuscationFilesUploadResource =
"androidpublisher" :>
"v2" :>
"applications" :>
Capture "packageName" Text :>
"edits" :>
Capture "editId" Text :>
"apks" :>
Capture "apkVersionCode" (Textual Int32) :>
"deobfuscationFiles" :>
Capture "deobfuscationFileType"
EditsDeobfuscationFilesUploadDeobfuscationFileType
:>
QueryParam "alt" AltJSON :>
Post '[JSON] DeobfuscationFilesUploadResponse
:<|>
"upload" :>
"androidpublisher" :>
"v2" :>
"applications" :>
Capture "packageName" Text :>
"edits" :>
Capture "editId" Text :>
"apks" :>
Capture "apkVersionCode" (Textual Int32) :>
"deobfuscationFiles" :>
Capture "deobfuscationFileType"
EditsDeobfuscationFilesUploadDeobfuscationFileType
:>
QueryParam "alt" AltJSON :>
QueryParam "uploadType" AltMedia :>
AltMedia :>
Post '[JSON] DeobfuscationFilesUploadResponse
-- | Uploads the deobfuscation file of the specified APK. If a deobfuscation
-- file already exists, it will be replaced.
--
-- /See:/ 'editsDeobfuscationFilesUpload' smart constructor.
data EditsDeobfuscationFilesUpload = EditsDeobfuscationFilesUpload'
{ _edfuDeobfuscationFileType :: !EditsDeobfuscationFilesUploadDeobfuscationFileType
, _edfuPackageName :: !Text
, _edfuAPKVersionCode :: !(Textual Int32)
, _edfuEditId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EditsDeobfuscationFilesUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'edfuDeobfuscationFileType'
--
-- * 'edfuPackageName'
--
-- * 'edfuAPKVersionCode'
--
-- * 'edfuEditId'
editsDeobfuscationFilesUpload
:: EditsDeobfuscationFilesUploadDeobfuscationFileType -- ^ 'edfuDeobfuscationFileType'
-> Text -- ^ 'edfuPackageName'
-> Int32 -- ^ 'edfuAPKVersionCode'
-> Text -- ^ 'edfuEditId'
-> EditsDeobfuscationFilesUpload
editsDeobfuscationFilesUpload pEdfuDeobfuscationFileType_ pEdfuPackageName_ pEdfuAPKVersionCode_ pEdfuEditId_ =
EditsDeobfuscationFilesUpload'
{ _edfuDeobfuscationFileType = pEdfuDeobfuscationFileType_
, _edfuPackageName = pEdfuPackageName_
, _edfuAPKVersionCode = _Coerce # pEdfuAPKVersionCode_
, _edfuEditId = pEdfuEditId_
}
edfuDeobfuscationFileType :: Lens' EditsDeobfuscationFilesUpload EditsDeobfuscationFilesUploadDeobfuscationFileType
edfuDeobfuscationFileType
= lens _edfuDeobfuscationFileType
(\ s a -> s{_edfuDeobfuscationFileType = a})
-- | Unique identifier of the Android app for which the deobfuscatiuon files
-- are being uploaded; for example, \"com.spiffygame\".
edfuPackageName :: Lens' EditsDeobfuscationFilesUpload Text
edfuPackageName
= lens _edfuPackageName
(\ s a -> s{_edfuPackageName = a})
-- | The version code of the APK whose deobfuscation file is being uploaded.
edfuAPKVersionCode :: Lens' EditsDeobfuscationFilesUpload Int32
edfuAPKVersionCode
= lens _edfuAPKVersionCode
(\ s a -> s{_edfuAPKVersionCode = a})
. _Coerce
-- | Unique identifier for this edit.
edfuEditId :: Lens' EditsDeobfuscationFilesUpload Text
edfuEditId
= lens _edfuEditId (\ s a -> s{_edfuEditId = a})
instance GoogleRequest EditsDeobfuscationFilesUpload
where
type Rs EditsDeobfuscationFilesUpload =
DeobfuscationFilesUploadResponse
type Scopes EditsDeobfuscationFilesUpload =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient EditsDeobfuscationFilesUpload'{..}
= go _edfuPackageName _edfuEditId _edfuAPKVersionCode
_edfuDeobfuscationFileType
(Just AltJSON)
androidPublisherService
where go :<|> _
= buildClient
(Proxy ::
Proxy EditsDeobfuscationFilesUploadResource)
mempty
instance GoogleRequest
(MediaUpload EditsDeobfuscationFilesUpload) where
type Rs (MediaUpload EditsDeobfuscationFilesUpload) =
DeobfuscationFilesUploadResponse
type Scopes
(MediaUpload EditsDeobfuscationFilesUpload)
= Scopes EditsDeobfuscationFilesUpload
requestClient
(MediaUpload EditsDeobfuscationFilesUpload'{..} body)
= go _edfuPackageName _edfuEditId _edfuAPKVersionCode
_edfuDeobfuscationFileType
(Just AltJSON)
(Just AltMedia)
body
androidPublisherService
where _ :<|> go
= buildClient
(Proxy ::
Proxy EditsDeobfuscationFilesUploadResource)
mempty
| rueshyna/gogol | gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Edits/DeobfuscationFiles/Upload.hs | mpl-2.0 | 7,004 | 0 | 33 | 1,941 | 797 | 446 | 351 | 133 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.RevokeSecurityGroupIngress
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Removes one or more ingress rules from a security group. The values that you
-- specify in the revoke request (for example, ports) must match the existing
-- rule's values for the rule to be removed.
--
-- Each rule consists of the protocol and the CIDR range or source security
-- group. For the TCP and UDP protocols, you must also specify the destination
-- port or range of ports. For the ICMP protocol, you must also specify the ICMP
-- type and code.
--
-- Rule changes are propagated to instances within the security group as
-- quickly as possible. However, a small delay might occur.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-RevokeSecurityGroupIngress.html>
module Network.AWS.EC2.RevokeSecurityGroupIngress
(
-- * Request
RevokeSecurityGroupIngress
-- ** Request constructor
, revokeSecurityGroupIngress
-- ** Request lenses
, rsgiCidrIp
, rsgiDryRun
, rsgiFromPort
, rsgiGroupId
, rsgiGroupName
, rsgiIpPermissions
, rsgiIpProtocol
, rsgiSourceSecurityGroupName
, rsgiSourceSecurityGroupOwnerId
, rsgiToPort
-- * Response
, RevokeSecurityGroupIngressResponse
-- ** Response constructor
, revokeSecurityGroupIngressResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data RevokeSecurityGroupIngress = RevokeSecurityGroupIngress
{ _rsgiCidrIp :: Maybe Text
, _rsgiDryRun :: Maybe Bool
, _rsgiFromPort :: Maybe Int
, _rsgiGroupId :: Maybe Text
, _rsgiGroupName :: Maybe Text
, _rsgiIpPermissions :: List "item" IpPermission
, _rsgiIpProtocol :: Maybe Text
, _rsgiSourceSecurityGroupName :: Maybe Text
, _rsgiSourceSecurityGroupOwnerId :: Maybe Text
, _rsgiToPort :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'RevokeSecurityGroupIngress' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rsgiCidrIp' @::@ 'Maybe' 'Text'
--
-- * 'rsgiDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'rsgiFromPort' @::@ 'Maybe' 'Int'
--
-- * 'rsgiGroupId' @::@ 'Maybe' 'Text'
--
-- * 'rsgiGroupName' @::@ 'Maybe' 'Text'
--
-- * 'rsgiIpPermissions' @::@ ['IpPermission']
--
-- * 'rsgiIpProtocol' @::@ 'Maybe' 'Text'
--
-- * 'rsgiSourceSecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'rsgiSourceSecurityGroupOwnerId' @::@ 'Maybe' 'Text'
--
-- * 'rsgiToPort' @::@ 'Maybe' 'Int'
--
revokeSecurityGroupIngress :: RevokeSecurityGroupIngress
revokeSecurityGroupIngress = RevokeSecurityGroupIngress
{ _rsgiDryRun = Nothing
, _rsgiGroupName = Nothing
, _rsgiGroupId = Nothing
, _rsgiSourceSecurityGroupName = Nothing
, _rsgiSourceSecurityGroupOwnerId = Nothing
, _rsgiIpProtocol = Nothing
, _rsgiFromPort = Nothing
, _rsgiToPort = Nothing
, _rsgiCidrIp = Nothing
, _rsgiIpPermissions = mempty
}
-- | The CIDR IP address range. You can't specify this parameter when specifying a
-- source security group.
rsgiCidrIp :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiCidrIp = lens _rsgiCidrIp (\s a -> s { _rsgiCidrIp = a })
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
rsgiDryRun :: Lens' RevokeSecurityGroupIngress (Maybe Bool)
rsgiDryRun = lens _rsgiDryRun (\s a -> s { _rsgiDryRun = a })
-- | The start of port range for the TCP and UDP protocols, or an ICMP type
-- number. For the ICMP type number, use '-1' to specify all ICMP types.
rsgiFromPort :: Lens' RevokeSecurityGroupIngress (Maybe Int)
rsgiFromPort = lens _rsgiFromPort (\s a -> s { _rsgiFromPort = a })
-- | The ID of the security group.
rsgiGroupId :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiGroupId = lens _rsgiGroupId (\s a -> s { _rsgiGroupId = a })
-- | [EC2-Classic, default VPC] The name of the security group.
rsgiGroupName :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiGroupName = lens _rsgiGroupName (\s a -> s { _rsgiGroupName = a })
-- | A set of IP permissions. You can't specify a source security group and a CIDR
-- IP address range.
rsgiIpPermissions :: Lens' RevokeSecurityGroupIngress [IpPermission]
rsgiIpPermissions =
lens _rsgiIpPermissions (\s a -> s { _rsgiIpPermissions = a })
. _List
-- | The IP protocol name ('tcp', 'udp', 'icmp') or number (see <http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml Protocol Numbers>). Use '-1'
-- to specify all.
rsgiIpProtocol :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiIpProtocol = lens _rsgiIpProtocol (\s a -> s { _rsgiIpProtocol = a })
-- | [EC2-Classic, default VPC] The name of the source security group. You can't
-- specify a source security group and a CIDR IP address range.
rsgiSourceSecurityGroupName :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiSourceSecurityGroupName =
lens _rsgiSourceSecurityGroupName
(\s a -> s { _rsgiSourceSecurityGroupName = a })
-- | The ID of the source security group. You can't specify a source security
-- group and a CIDR IP address range.
rsgiSourceSecurityGroupOwnerId :: Lens' RevokeSecurityGroupIngress (Maybe Text)
rsgiSourceSecurityGroupOwnerId =
lens _rsgiSourceSecurityGroupOwnerId
(\s a -> s { _rsgiSourceSecurityGroupOwnerId = a })
-- | The end of port range for the TCP and UDP protocols, or an ICMP code number.
-- For the ICMP code number, use '-1' to specify all ICMP codes for the ICMP type.
rsgiToPort :: Lens' RevokeSecurityGroupIngress (Maybe Int)
rsgiToPort = lens _rsgiToPort (\s a -> s { _rsgiToPort = a })
data RevokeSecurityGroupIngressResponse = RevokeSecurityGroupIngressResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RevokeSecurityGroupIngressResponse' constructor.
revokeSecurityGroupIngressResponse :: RevokeSecurityGroupIngressResponse
revokeSecurityGroupIngressResponse = RevokeSecurityGroupIngressResponse
instance ToPath RevokeSecurityGroupIngress where
toPath = const "/"
instance ToQuery RevokeSecurityGroupIngress where
toQuery RevokeSecurityGroupIngress{..} = mconcat
[ "CidrIp" =? _rsgiCidrIp
, "DryRun" =? _rsgiDryRun
, "FromPort" =? _rsgiFromPort
, "GroupId" =? _rsgiGroupId
, "GroupName" =? _rsgiGroupName
, "IpPermissions" `toQueryList` _rsgiIpPermissions
, "IpProtocol" =? _rsgiIpProtocol
, "SourceSecurityGroupName" =? _rsgiSourceSecurityGroupName
, "SourceSecurityGroupOwnerId" =? _rsgiSourceSecurityGroupOwnerId
, "ToPort" =? _rsgiToPort
]
instance ToHeaders RevokeSecurityGroupIngress
instance AWSRequest RevokeSecurityGroupIngress where
type Sv RevokeSecurityGroupIngress = EC2
type Rs RevokeSecurityGroupIngress = RevokeSecurityGroupIngressResponse
request = post "RevokeSecurityGroupIngress"
response = nullResponse RevokeSecurityGroupIngressResponse
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/RevokeSecurityGroupIngress.hs | mpl-2.0 | 8,511 | 0 | 10 | 1,961 | 992 | 597 | 395 | 104 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.ConsumerSurveys.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.ConsumerSurveys.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
| brendanhay/gogol | gogol-consumersurveys/gen/Network/Google/ConsumerSurveys/Types/Sum.hs | mpl-2.0 | 619 | 0 | 5 | 101 | 35 | 29 | 6 | 8 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.RespondDecisionTaskCompleted
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Used by deciders to tell the service that the 'DecisionTask' identified by the 'taskToken' has successfully completed. The 'decisions' argument specifies the list of
-- decisions made while processing the task.
--
-- A 'DecisionTaskCompleted' event is added to the workflow history. The 'executionContext' specified is attached to the event in the workflow execution history.
--
-- Access Control
--
-- If an IAM policy grants permission to use 'RespondDecisionTaskCompleted', it
-- can express permissions for the list of decisions in the 'decisions' parameter.
-- Each of the decisions has one or more parameters, much like a regular API
-- call. To allow for policies to be as readable as possible, you can express
-- permissions on decisions as if they were actual API calls, including applying
-- conditions to some parameters. For more information, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to ManageAccess to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_RespondDecisionTaskCompleted.html>
module Network.AWS.SWF.RespondDecisionTaskCompleted
(
-- * Request
RespondDecisionTaskCompleted
-- ** Request constructor
, respondDecisionTaskCompleted
-- ** Request lenses
, rdtcDecisions
, rdtcExecutionContext
, rdtcTaskToken
-- * Response
, RespondDecisionTaskCompletedResponse
-- ** Response constructor
, respondDecisionTaskCompletedResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
data RespondDecisionTaskCompleted = RespondDecisionTaskCompleted
{ _rdtcDecisions :: List "decisions" Decision
, _rdtcExecutionContext :: Maybe Text
, _rdtcTaskToken :: Text
} deriving (Eq, Read, Show)
-- | 'RespondDecisionTaskCompleted' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rdtcDecisions' @::@ ['Decision']
--
-- * 'rdtcExecutionContext' @::@ 'Maybe' 'Text'
--
-- * 'rdtcTaskToken' @::@ 'Text'
--
respondDecisionTaskCompleted :: Text -- ^ 'rdtcTaskToken'
-> RespondDecisionTaskCompleted
respondDecisionTaskCompleted p1 = RespondDecisionTaskCompleted
{ _rdtcTaskToken = p1
, _rdtcDecisions = mempty
, _rdtcExecutionContext = Nothing
}
-- | The list of decisions (possibly empty) made by the decider while processing
-- this decision task. See the docs for the 'Decision' structure for details.
rdtcDecisions :: Lens' RespondDecisionTaskCompleted [Decision]
rdtcDecisions = lens _rdtcDecisions (\s a -> s { _rdtcDecisions = a }) . _List
-- | User defined context to add to workflow execution.
rdtcExecutionContext :: Lens' RespondDecisionTaskCompleted (Maybe Text)
rdtcExecutionContext =
lens _rdtcExecutionContext (\s a -> s { _rdtcExecutionContext = a })
-- | The 'taskToken' from the 'DecisionTask'.
--
-- 'taskToken' is generated by the service and should be treated as an opaque
-- value. If the task is passed to another process, its 'taskToken' must also be
-- passed. This enables it to provide its progress and respond with results.
rdtcTaskToken :: Lens' RespondDecisionTaskCompleted Text
rdtcTaskToken = lens _rdtcTaskToken (\s a -> s { _rdtcTaskToken = a })
data RespondDecisionTaskCompletedResponse = RespondDecisionTaskCompletedResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RespondDecisionTaskCompletedResponse' constructor.
respondDecisionTaskCompletedResponse :: RespondDecisionTaskCompletedResponse
respondDecisionTaskCompletedResponse = RespondDecisionTaskCompletedResponse
instance ToPath RespondDecisionTaskCompleted where
toPath = const "/"
instance ToQuery RespondDecisionTaskCompleted where
toQuery = const mempty
instance ToHeaders RespondDecisionTaskCompleted
instance ToJSON RespondDecisionTaskCompleted where
toJSON RespondDecisionTaskCompleted{..} = object
[ "taskToken" .= _rdtcTaskToken
, "decisions" .= _rdtcDecisions
, "executionContext" .= _rdtcExecutionContext
]
instance AWSRequest RespondDecisionTaskCompleted where
type Sv RespondDecisionTaskCompleted = SWF
type Rs RespondDecisionTaskCompleted = RespondDecisionTaskCompletedResponse
request = post "RespondDecisionTaskCompleted"
response = nullResponse RespondDecisionTaskCompletedResponse
| dysinger/amazonka | amazonka-swf/gen/Network/AWS/SWF/RespondDecisionTaskCompleted.hs | mpl-2.0 | 5,489 | 0 | 10 | 1,034 | 499 | 307 | 192 | 60 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.WebPropertyUserLinks.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes a user from the given web property.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.webpropertyUserLinks.delete@.
module Network.Google.Resource.Analytics.Management.WebPropertyUserLinks.Delete
(
-- * REST Resource
ManagementWebPropertyUserLinksDeleteResource
-- * Creating a Request
, managementWebPropertyUserLinksDelete
, ManagementWebPropertyUserLinksDelete
-- * Request Lenses
, mwpuldWebPropertyId
, mwpuldAccountId
, mwpuldLinkId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.webpropertyUserLinks.delete@ method which the
-- 'ManagementWebPropertyUserLinksDelete' request conforms to.
type ManagementWebPropertyUserLinksDeleteResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"entityUserLinks" :>
Capture "linkId" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Removes a user from the given web property.
--
-- /See:/ 'managementWebPropertyUserLinksDelete' smart constructor.
data ManagementWebPropertyUserLinksDelete =
ManagementWebPropertyUserLinksDelete'
{ _mwpuldWebPropertyId :: !Text
, _mwpuldAccountId :: !Text
, _mwpuldLinkId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagementWebPropertyUserLinksDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mwpuldWebPropertyId'
--
-- * 'mwpuldAccountId'
--
-- * 'mwpuldLinkId'
managementWebPropertyUserLinksDelete
:: Text -- ^ 'mwpuldWebPropertyId'
-> Text -- ^ 'mwpuldAccountId'
-> Text -- ^ 'mwpuldLinkId'
-> ManagementWebPropertyUserLinksDelete
managementWebPropertyUserLinksDelete pMwpuldWebPropertyId_ pMwpuldAccountId_ pMwpuldLinkId_ =
ManagementWebPropertyUserLinksDelete'
{ _mwpuldWebPropertyId = pMwpuldWebPropertyId_
, _mwpuldAccountId = pMwpuldAccountId_
, _mwpuldLinkId = pMwpuldLinkId_
}
-- | Web Property ID to delete the user link for.
mwpuldWebPropertyId :: Lens' ManagementWebPropertyUserLinksDelete Text
mwpuldWebPropertyId
= lens _mwpuldWebPropertyId
(\ s a -> s{_mwpuldWebPropertyId = a})
-- | Account ID to delete the user link for.
mwpuldAccountId :: Lens' ManagementWebPropertyUserLinksDelete Text
mwpuldAccountId
= lens _mwpuldAccountId
(\ s a -> s{_mwpuldAccountId = a})
-- | Link ID to delete the user link for.
mwpuldLinkId :: Lens' ManagementWebPropertyUserLinksDelete Text
mwpuldLinkId
= lens _mwpuldLinkId (\ s a -> s{_mwpuldLinkId = a})
instance GoogleRequest
ManagementWebPropertyUserLinksDelete
where
type Rs ManagementWebPropertyUserLinksDelete = ()
type Scopes ManagementWebPropertyUserLinksDelete =
'["https://www.googleapis.com/auth/analytics.manage.users"]
requestClient
ManagementWebPropertyUserLinksDelete'{..}
= go _mwpuldAccountId _mwpuldWebPropertyId
_mwpuldLinkId
(Just AltJSON)
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementWebPropertyUserLinksDeleteResource)
mempty
| brendanhay/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/WebPropertyUserLinks/Delete.hs | mpl-2.0 | 4,358 | 0 | 17 | 958 | 469 | 279 | 190 | 82 | 1 |
doubleMe x = x + x
doubleUs x y = x * 2 + y * 2
doubleUs2 x y = doubleMe x + doubleMe y
doubleSmalls x = if x > 100 then x else x * 2
doubleSmalls' x = (if x > 100 then x else x * 2) + 20
conanO'Brien = "It's a-me, Conan O'Brien!"
length' xs = sum [1 | _ <- xs]
--removeNonUpperCase :: [Char] -> [Char]
--equivalent to:
removeNonUpperCase :: String -> String
removeNonUpperCase st = [c | c <- st, c `elem` ['A'..'Z']]
--we can use the fact that characters are Ordered too
removeNonUpperCase2 :: String -> String
removeNonUpperCase2 st = [c | c <- st, c >= 'A', c <= 'Z']
| alexliew/learn_you_a_haskell | 1_starting_out.hs | unlicense | 583 | 0 | 8 | 133 | 227 | 120 | 107 | 11 | 2 |
module Codez where
data CharSpec = C Char | CRange Char Char | CSet [Char]
type Alphabet = [CharSpec]
type CodePoint = Either Char Int
parse :: Alphabet -> String -> [CodePoint]
parse a s = map (parseChar a) s
parseChar :: Alphabet -> Char -> CodePoint
parseChar a c =
case maybeIndexOf a c of
Nothing -> Left c
Just ix -> Right ix
maybeIndexOf :: Alphabet -> Char -> Maybe Int
maybeIndexOf a c = Nothing -- implement!
nullAlphabet :: Alphabet
nullAlphabet = []
| pdbartlett/misc-stuff | haskell/codez/Codez.hs | apache-2.0 | 521 | 0 | 8 | 145 | 177 | 94 | 83 | 15 | 2 |
ans (b:r:g:c:s:t:_) =
let n = t - (b+r+g+c+s) - (s) - (b*5) - (r*3)
in
100 - (3*(b+r+g+c+s+n)) - (2*(b*5+r*3)) + (15*(b*5+r*3)) + (15*b) + (15*r) + (7*g) + (2*c)
main = do
c <- getContents
let i = takeWhile (/= [0,0,0,0,0,0]) $ map (map read) $ map words $ lines c :: [[Int]]
o = map ans i
mapM_ print o
| a143753/AOJ | 0229.hs | apache-2.0 | 326 | 0 | 21 | 85 | 329 | 173 | 156 | 8 | 1 |
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-} -- For Fetchable (TensorExpr a)
module TensorFlow.Nodes where
import Control.Applicative (liftA2, liftA3)
import Data.Functor.Identity (Identity)
import Data.Map.Strict (Map)
import Data.Monoid ((<>))
import Data.Set (Set)
import Data.Text (Text)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import TensorFlow.Build
import TensorFlow.Output
import TensorFlow.Tensor
import TensorFlow.Types
import qualified TensorFlow.Internal.FFI as FFI
-- | Types that contain ops which can be run.
class Nodes t where
getNodes :: t -> Build (Set NodeName)
-- | Types that tensor representations (e.g. 'Tensor', 'ControlNode') can be
-- fetched into.
--
-- Includes collections of tensors (e.g. tuples).
class Nodes t => Fetchable t a where
getFetch :: t -> Build (Fetch a)
-- | Fetch action. Keeps track of what needs to be fetched and how to decode
-- the fetched data.
data Fetch a = Fetch
{ -- | Nodes to fetch
fetches :: Set Text
-- | Function to create an 'a' from the fetched data.
, fetchRestore :: Map Text FFI.TensorData -> a
}
instance Functor Fetch where
fmap f (Fetch fetch restore) = Fetch fetch (f . restore)
instance Applicative Fetch where
pure x = Fetch Set.empty (const x)
Fetch fetch restore <*> Fetch fetch' restore' =
Fetch (fetch <> fetch') (restore <*> restore')
nodesUnion :: (Monoid b, Traversable t, Applicative f) => t (f b) -> f b
nodesUnion = fmap (foldMap id) . sequenceA
instance (Nodes t1, Nodes t2) => Nodes (t1, t2) where
getNodes (x, y) = nodesUnion [getNodes x, getNodes y]
instance (Nodes t1, Nodes t2, Nodes t3) => Nodes (t1, t2, t3) where
getNodes (x, y, z) = nodesUnion [getNodes x, getNodes y, getNodes z]
instance (Fetchable t1 a1, Fetchable t2 a2) => Fetchable (t1, t2) (a1, a2) where
getFetch (x, y) = liftA2 (,) <$> getFetch x <*> getFetch y
instance (Fetchable t1 a1, Fetchable t2 a2, Fetchable t3 a3)
=> Fetchable (t1, t2, t3) (a1, a2, a3) where
getFetch (x, y, z) =
liftA3 (,,) <$> getFetch x <*> getFetch y <*> getFetch z
instance Nodes t => Nodes [t] where
getNodes = nodesUnion . map getNodes
instance Fetchable t a => Fetchable [t] [a] where
getFetch ts = sequenceA <$> mapM getFetch ts
instance Nodes t => Nodes (Maybe t) where
getNodes = nodesUnion . fmap getNodes
instance Fetchable t a => Fetchable (Maybe t) (Maybe a) where
getFetch = fmap sequenceA . mapM getFetch
instance Nodes ControlNode where
getNodes (ControlNode o) = pure $ Set.singleton o
-- We use the constraint @(a ~ ())@ to help with type inference. For example,
-- if @t :: ControlNode@, then this constraint ensures that @run t :: Session
-- ()@. If we used @instance Fetchable ControlNode ()@ instead, then that
-- expression would be ambiguous without explicitly specifying the return type.
instance a ~ () => Fetchable ControlNode a where
getFetch _ = return $ pure ()
instance Nodes (ListOf f '[]) where
getNodes _ = return Set.empty
instance (Nodes (f a), Nodes (ListOf f as)) => Nodes (ListOf f (a ': as)) where
getNodes (x :/ xs) = liftA2 Set.union (getNodes x) (getNodes xs)
instance l ~ List '[] => Fetchable (ListOf f '[]) l where
getFetch _ = return $ pure Nil
instance (Fetchable (f t) a, Fetchable (ListOf f ts) (List as), i ~ Identity)
=> Fetchable (ListOf f (t ': ts)) (ListOf i (a ': as)) where
getFetch (x :/ xs) = liftA2 (\y ys -> y /:/ ys) <$> getFetch x <*> getFetch xs
instance Nodes (Tensor v a) where
getNodes (Tensor o) = Set.singleton . outputNodeName <$> toBuild o
fetchTensorVector :: forall a v . (TensorType a)
=> Tensor v a -> Build (Fetch (TensorData a))
fetchTensorVector (Tensor o) = do
outputName <- encodeOutput <$> toBuild o
pure $ Fetch (Set.singleton outputName) $ \tensors ->
let tensorData = tensors Map.! outputName
expectedType = tensorType (undefined :: a)
actualType = FFI.tensorDataType tensorData
badTypeError = error $ "Bad tensor type: expected "
++ show expectedType
++ ", got "
++ show actualType
in if expectedType /= actualType
then badTypeError
else TensorData tensorData
-- The constraint "a ~ a'" means that the input/output of fetch can constrain
-- the TensorType of each other.
instance (TensorType a, a ~ a') => Fetchable (Tensor v a) (TensorData a') where
getFetch = fetchTensorVector
instance (TensorType a, TensorDataType s a, a ~ a') => Fetchable (Tensor v a) (s a') where
getFetch t = fmap decodeTensorData <$> fetchTensorVector t
| jcberentsen/haskell | tensorflow/src/TensorFlow/Nodes.hs | apache-2.0 | 5,642 | 0 | 16 | 1,284 | 1,603 | 847 | 756 | 90 | 2 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
----------------------------------------------------------------------------
-- |
-- Module : Web.Skroutz.Model.Base.Manufacturer
-- Copyright : (c) 2016 Remous-Aris Koutsiamanis
-- License : Apache License 2.0
-- Maintainer : Remous-Aris Koutsiamanis <ariskou@gmail.com>
-- Stability : alpha
-- Portability : non-portable
--
-- Provides the 'Manufacturer' type.
----------------------------------------------------------------------------
module Web.Skroutz.Model.Base.Manufacturer
where
import Control.DeepSeq (NFData)
import Data.Data (Data, Typeable)
import Data.Text (Text)
import GHC.Generics (Generic)
import Web.Skroutz.Model.Base.URI
import Web.Skroutz.TH
data Manufacturer = Manufacturer {
_manufacturerId :: Int
, _manufacturerName :: Text
, _manufacturerImageUrl :: Maybe URI
} deriving (Eq, Ord, Typeable, Data, Generic, Show, NFData)
makeLensesAndJSON ''Manufacturer "_manufacturer"
| ariskou/skroutz-haskell-api | src/Web/Skroutz/Model/Base/Manufacturer.hs | apache-2.0 | 1,203 | 0 | 9 | 280 | 152 | 96 | 56 | 17 | 0 |
add :: (Num a) => a -> a -> a
add x y = x + y
| jplahn/haskell-book | examples/ghci/add.hs | apache-2.0 | 46 | 0 | 7 | 17 | 37 | 19 | 18 | 2 | 1 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes, OverloadedStrings #-}
module Handler.Root (
getRootR,
postRootR,
) where
import Ywitter
import Control.Applicative
import Control.Monad
import Data.Maybe
import qualified Data.Text as T
import Data.Time
getRootR :: Handler RepHtml
getRootR = do
mu <- maybeAuth
case mu of
Just (uid, u) -> do
case userName u of
'*':_ -> redirect RedirectTemporary SettingR
_ -> return ()
_ -> return ()
posts <- case mu of
Just (uid, u) -> runDB $ do
fs <- selectList [FollowFollowerEq uid] [] 0 0
ps <- selectList [PostUserIn $ uid : map (followFollowee . snd) fs] [PostDateDesc] 20 0
us <- forM ps $ \(_, p) -> get $ postUser p
return $ zip us ps
Nothing -> do
return []
cur <- liftIO $ getCurrentTime
let tweets = renderPosts posts cur
defaultLayout $ do
h2id <- lift newIdent
setTitle "ywitter homepage"
addWidget $(widgetFile "homepage")
postRootR :: Handler ()
postRootR = do
(uid, u) <- requireAuth
mcontent <- runFormPost' $ maybeStringInput "content"
when (isNothing mcontent || fromJust mcontent == "" || length (fromJust mcontent) > 140) $ do
redirect RedirectTemporary RootR
let Just content = mcontent
cur <- liftIO $ getCurrentTime
runDB $ do
insert $ Post uid (T.pack content) Nothing Nothing cur
redirect RedirectTemporary RootR
| tanakh/Ywitter | Handler/Root.hs | bsd-2-clause | 1,463 | 0 | 20 | 397 | 510 | 244 | 266 | 44 | 4 |
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds, PolyKinds, ScopedTypeVariables, DataKinds #-}
module Control.Coeffect where
import GHC.Exts ( Constraint )
{- Coeffect parameterised comonad
Also called "indexed comonads".
For more details see "Coeffects: Unified static analysis of context-dependence"
by Petricek, Orchard, Mycroft: http://www.cl.cam.ac.uk/~dao29/publ/coeffects-icalp13.pdf
-}
{-| Specifies "parametric coeffect comonads" which are essentially comonads but
annotated by a type-level monoid formed by 'Plus' and 'Unit' -}
class Coeffect (c :: k -> * -> *) where
type Inv c (s :: k) (t :: k) :: Constraint
type Inv c s t = ()
type Unit c :: k
type Plus c (s :: k) (t :: k) :: k
{-| Coeffect-parameterised version of 'extract',
annotated with the 'Unit m' effect, denoting pure contexts -}
extract :: c (Unit c) a -> a
{-| Coeffect-parameterise version of 'extend'.
The two coeffec annotations 's' and 't' on its parameter computations
get combined in the parameter computation by 'Plus' -}
extend :: Inv c s t => (c t a -> b) -> c (Plus c s t) a -> c s b
{-| Zips two coeffecting computations together -}
class CoeffectZip (c :: k -> * -> *) where
type Meet c (s :: k) (t :: k) :: k
type CzipInv c (s :: k) (t :: k) :: Constraint
czip :: CzipInv c s t => c s a -> c t b -> c (Meet c s t) (a, b)
{-| Specifies sub-coeffecting behaviour -}
class Subcoeffect (c :: k -> * -> *) s t where
subco :: c s a -> c t a | dorchard/effect-monad | src/Control/Coeffect.hs | bsd-2-clause | 1,552 | 0 | 12 | 361 | 353 | 202 | 151 | 17 | 0 |
{-# LANGUAGE RecordWildCards #-}
import Graphics.GL
import qualified Graphics.UI.GLFW as GLFW
import Control.Monad
import Data.Bits
import Foreign
import Linear
import Cube
import Data.Time
import Halive.Utils
import Mesh
import Quad
import SetupGLFW
import Shader
import TransCube
import WBOIT
---------------------
-- Implementing WBOIT
---------------------
{-
NOTE: we need to preserve the depth buffer for
positional time-warp... is that possible?
Update: we are now blitting the opaque depth buffer into the wboit buffer,
and we could write to it (but not use depth testing) during wboit
and then blit it back again.
-}
resX, resY :: Num a => a
resX = 640
resY = 480
main :: IO a
main = do
-- Create our window
win <- reacquire 0 (setupGLFW "WBOIT" resX resY)
wboit <- createWBOIT resX resY
-- Load the shaders and geometry for our scene
cubeProgram <- createShaderProgram "test/cube.vert" "test/cube.frag"
transProgram <- createShaderProgram "test/renderPass.vert" "test/renderPass.frag"
transMVPUniform <- getShaderUniform transProgram "uMVP"
transColorUniform <- getShaderUniform transProgram "uDiffuseColor"
opaqueCube <- makeCube cubeProgram
transCube <- makeTransCube transProgram
transQuad <- makeQuad transProgram
glEnable GL_DEPTH_TEST
glClearDepth 1
glClearColor 0.0 0.0 0.0 1
-- Begin our renderloop
forever $ do
now <- realToFrac . utctDayTime <$> getCurrentTime
glGetErrors
-- Get mouse/keyboard/OS events from GLFW
GLFW.pollEvents
-- Recalculate projection and camera view
(w,h)<- GLFW.getWindowSize win
let projection = perspective 45 (fromIntegral w/fromIntegral h) 0.01 1000
view = lookAt (V3 0 2 5) (V3 0 0 (-50)) (V3 0 1 0)
-- view = lookAt (V3 0 20 5) (V3 0 0 (-50)) (V3 0 1 0)
viewProj = projection !*! view
-- Render opaque surfaces
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
glDepthMask GL_TRUE
let wav1 = (+ 1) . sin $ now/2
wav2 = (+ 1) . sin $ now/3 + 1
let cubeModel = mkTransformation 1 (V3 0 0 ((-29) * wav2))
cubeModel2 = mkTransformation 1 (V3 0 0 ((-30) * wav1))
renderCube opaqueCube (viewProj !*! cubeModel2)
-- Render transparent surfaces
withWBOIT wboit resX resY $ do
useProgram transProgram
let cubeModel = mkTransformation (axisAngle (V3 0 1 0) wav1) (V3 0 0 ((-29) * wav2))
glUniform4f (unUniformLocation transColorUniform) 0 1 1 0.3
renderTransCube transCube (viewProj !*! cubeModel)
drawTransQuad transQuad transColorUniform transMVPUniform viewProj (1,0,0,0.75) (-30)
drawTransQuad transQuad transColorUniform transMVPUniform viewProj (1,1,0,0.75) (-20)
-- drawTransQuad transQuad transColorUniform transMVPUniform viewProj (0,0,1,0.75) (-10)
-- drawTransQuad transQuad transColorUniform transMVPUniform viewProj (1,0,1,0.75) ((-31) * wav1)
-- drawTransQuad transQuad transColorUniform transMVPUniform viewProj (0,1,0,0.75) (-8)
GLFW.swapBuffers win
drawTransQuad :: Mesh
-> UniformLocation
-> UniformLocation
-> V4 (V4 GLfloat)
-> (GLfloat, GLfloat, GLfloat, GLfloat)
-> GLfloat
-> IO ()
drawTransQuad transQuad transColorUniform transMVPUniform viewProj (r,g,b,a) z = do
let transModel = mkTransformation 1 (V3 0 0 z)
transMVP = viewProj !*! transModel
glUniform4f (unUniformLocation transColorUniform) r g b a
uniformM44 transMVPUniform transMVP
drawMesh transQuad
| lukexi/wboit | test/TestWBOIT.hs | bsd-2-clause | 3,928 | 0 | 22 | 1,147 | 873 | 441 | 432 | 69 | 1 |
module Test.HClTest
( module X
) where
import Test.HClTest.Monad as X (Config(), HClTest())
import Test.HClTest.Monad as X hiding (timeoutFactor, Config, HClTest)
import Test.HClTest.Program as X
import Test.HClTest.Setup as X
import Test.HClTest.Trace as X
| bennofs/hcltest | src/Test/HClTest.hs | bsd-3-clause | 274 | 0 | 6 | 49 | 78 | 55 | 23 | 7 | 0 |
module Main where
import Codec.Picture
import Codec.Picture.Types
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector as V
import Day25
import Graphics.Rasterific
main :: IO ()
main = do
-- p1v <- visualizePartOne
-- p2v <- visualizePartTwo
-- bs <- either error return $ encodeGifAnimation 10 LoopingForever (sizeImages 250 250 p1v)
-- L.writeFile "part-one.gif" bs
-- bs <- either error return $ encodeGifAnimation 10 LoopingForever (sizeImages 250 250 p2v)
-- L.writeFile "part-two.gif" bs
--p1 <- partOne
--print partOne
--p2 <- partTwo
print partOne
return ()
sizeImages :: Int -> Int -> [Image PixelRGB8] -> [Image PixelRGB8]
sizeImages h w = map (\i -> pixelMap dropTransparency $ renderDrawing h w (PixelRGBA8 255 255 255 255) $ drawImageAtSize (promoteImage i) 0 (V2 0 0) (fromIntegral h) (fromIntegral w))
| z0isch/advent-of-code | app/Main.hs | bsd-3-clause | 910 | 0 | 12 | 199 | 195 | 108 | 87 | 13 | 1 |
module JDec.Class.Raw.VerificationTypeInfo (
VerificationTypeInfo(
TopVariableInfo,
IntegerVariableInfo,
FloatVariableInfo,
LongVariableInfo,
DoubleVariableInfo,
NullVariableInfo,
UninitializedThisVariableInfo,
ObjectVariableInfo,
UninitializedVariableInfo),
objectVariableConstantPoolIndex,
uninitializedVariableOffset
) where
import JDec.Class.Raw.ConstantPoolIndex (ConstantPoolIndex)
-- | Specifies the verification type of one or two locations.
data VerificationTypeInfo = TopVariableInfo -- ^ Indicates that the local variable has the verification type top (⊤).
| IntegerVariableInfo -- ^ Indicates that the location contains the verification type int.
| FloatVariableInfo -- ^ Indicates that the location contains the verification type float.
| LongVariableInfo -- ^ Indicates that the location contains the verification type long. If the location is a local variable, then it must not be the local variable with the highest index and the next higher numbered local variable contains the verification type ⊤. If the location is an operand stack entry, then the current location must not be the topmost location of the operand stack and the next location closer to the top of the operand stack contains the verification type ⊤. It gives the contents of two locations in the operand stack or in the local variables.
| DoubleVariableInfo -- ^ Indicates that the location contains the verification type double. If the location is a local variable, then it must not be the local variable with the highest index and the next higher numbered local variable contains the verification type ⊤. If the location is an operand stack entry, then the current location must not be the topmost location of the operand stack and the next location closer to the top of the operand stack contains the verification type ⊤. It gives the contents of two locations in in the operand stack or in the local variables.
| NullVariableInfo -- ^ Indicates that location contains the verification type null.
| UninitializedThisVariableInfo -- ^ Indicates that the location contains the verification type uninitializedThis.
| ObjectVariableInfo { -- ^ Indicates that the location contains an instance of the class represented by the ClassConstantPoolEntry found in the constant pool table at the index given by objectVariableConstantPoolIndex.
objectVariableConstantPoolIndex :: ConstantPoolIndex -- ^ Index of the ClassConstantPoolEntry in the constant pool table
}
| UninitializedVariableInfo { -- ^ Indicates that the location contains the verification type uninitialized(offset).
uninitializedVariableOffset :: Integer -- ^ Indicates the offset, in the code array of the Code attribute that contains this StackMapTable attribute, of the new instruction that created the object being stored in the location.
} deriving Show | rel-eng/jdec | src/JDec/Class/Raw/VerificationTypeInfo.hs | bsd-3-clause | 2,841 | 0 | 8 | 424 | 113 | 84 | 29 | 28 | 0 |
module Sword.Gui where
import Control.Monad (when)
import UI.HSCurses.Curses
import UI.HSCurses.CursesHelper
import qualified Data.Map as Map
import Prelude hiding (Either(..))
import Sword.Utils
import Sword.World
import Sword.Hero
import Sword.ViewPorter
initGui :: IO (ViewPort)
initGui = do
initCurses
echo False
noDelay stdScr True
cursSet CursorInvisible
(sizeY, sizeX) <- scrSize
return ((0, 0), (sizeX, sizeY - 5))
endGui :: IO ()
endGui = endWin
getInput = do
char <- getch
case decodeKey char of
KeyChar 'k' -> return "k"
KeyChar 'j' -> return "j"
KeyChar 'h' -> return "h"
KeyChar 'l' -> return "l"
KeyChar 'K' -> return "K"
KeyChar 'J' -> return "J"
KeyChar 'H' -> return "H"
KeyChar 'L' -> return "L"
KeyChar 'q' -> return "quit"
otherwise -> return ""
castEnum = toEnum . fromEnum
drawWorld :: Maybe Hero -> ViewPort -> WorldMap -> World -> IO ()
drawWorld Nothing _ _ _ = return ()
drawWorld (Just hero) viewPort worldMap world = do
erase
sequence_ (Map.foldWithKey (drawObj viewPort) [] worldMap)
sequence_ (Map.fold drawHero [] (heros world))
sequence_ (Map.foldrWithKey drawMonster [] (monster world))
drawStats hero viewPort
drawLog (gamelog world) (0, 23)
refresh
where drawMonster x _ acc = drawFunc 'x' x : acc
drawHero h acc = (drawFunc '@' (position h)) : acc
drawFunc = drawElem viewPort
drawObj :: ViewPort -> Coord -> WorldObj -> [IO ()] -> [IO ()]
drawObj viewPort c obj acc
| obj == Wall = drawFunc '#' c : acc
| obj == Tree = drawFunc '4' c : acc
| obj == Ground = drawFunc '.' c : acc
| otherwise = drawFunc ' ' c : acc
where drawFunc = drawElem viewPort
drawElem :: ViewPort -> Char -> Coord -> IO ()
drawElem _ ' ' _ = return ()
drawElem viewPort char coord =
when (insideViewPort viewPort coord) $
drawChar char (subtractCoords coord (fst viewPort))
drawChar :: Char -> Coord -> IO ()
drawChar ' ' _ = return ()
drawChar char (x, y) = mvAddCh y x (castEnum char)
drawString :: String -> Coord -> IO ()
drawString [] _ = return ()
drawString (x:xs) (a, b) = do
drawChar x (a, b)
drawString xs (a + 1, b)
drawLog :: [String] -> Coord -> IO ()
drawLog [] _ = return ()
drawLog (x:xs) (a ,b) = do
drawString x (a, b)
drawLog xs (0, b + 1)
drawStats :: Hero -> ViewPort -> IO ()
drawStats (Hero (x,y) life name maxLife _ _) vp =
drawString (name ++ " " ++ show (x, y) ++ " Life: " ++ show life ++ "% ViewPort: " ++ show vp) (0, 22)
| kmerz/the_sword | src/Sword/Gui.hs | bsd-3-clause | 2,475 | 3 | 13 | 544 | 1,107 | 550 | 557 | 75 | 10 |
module Handler.User.Permissions
( Permissions (..)
, reqPermissions
, optPermissions
, Permission (..)
, editor
, commenter
, reqPermission
) where
import Import
import Data.Text as T
import Yesod.Auth
data Permissions = Permissions
{ canComment :: Bool
, canEdit :: Bool
}
instance Default Permissions where
def = Permissions False False
mkPermissions :: UserId -> Entity User -> Permissions
mkPermissions author (Entity userId _) = Permissions
{ canComment = True
, canEdit = userId == author
}
reqPermissions :: UserId -> Handler Permissions
reqPermissions author = mkPermissions author <$> requireAuth
optPermissions :: UserId -> Handler Permissions
optPermissions author = maybe def (mkPermissions author) <$> maybeAuth
type Permission = (Permissions -> Bool, AppMessage)
editor :: Permission
editor = (canEdit, MsgPermissionDeniedEdit)
commenter :: Permission
commenter = (canEdit, MsgPermissionDeniedComment)
reqPermission :: Permission -> UserId -> Handler a -> Handler a
reqPermission (selector, msg) author action = do
permissions <- reqPermissions author
if selector permissions then action else permissionDeniedI msg
| pxqr/bitidx | Handler/User/Permissions.hs | bsd-3-clause | 1,224 | 0 | 8 | 250 | 322 | 179 | 143 | 33 | 2 |
{-# LANGUAGE FlexibleInstances, GADTs, GeneralizedNewtypeDeriving, DeriveDataTypeable, MultiParamTypeClasses, ScopedTypeVariables, LambdaCase, InstanceSigs, FlexibleContexts #-}
module Web.Chione
( -- * main things
clean
-- * key directory names
, build_dir
, html_dir
, admin_dir
-- * Build target detection
, findBuildTargets
-- * Utils
, makeHtmlRedirect
-- * Link and URL issues
, findLinks
, LinkData(..)
, getURLResponse
, URLResponse(..)
-- * Building content
, generateStatus
-- * KURE rewrites
, findURL
, mapURL
, injectHTML
, insertTeaser
, module Web.Chione -- include everything right now
) where
import Development.Shake hiding (getDirectoryContents, doesDirectoryExist)
import Development.Shake.FilePath
import Development.Shake.Classes
import System.Directory hiding (doesFileExist)
import qualified System.Directory as Directory
import Control.Monad
import qualified Control.Exception as E
import System.Posix (getSymbolicLinkStatus, isDirectory)
import Control.Arrow
import Control.Applicative hiding ((*>))
import Data.List
import Data.Char
import Data.Time.Clock
import System.Exit
import Language.KURE.Walker
import Language.KURE.Debug
import qualified Language.KURE as KURE
import Language.KURE hiding (apply)
import System.Process
import Text.HTML.KURE
import Data.Monoid
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as UTF8
import Control.Concurrent.ParallelIO.Local hiding (parallelInterleaved)
import Control.Concurrent.ParallelIO.Local (parallelInterleaved)
-- parallelInterleaved _ = sequence
-- | Name of location for all generated files.
-- Can always be removed safely, and rebuilt.
build_dir :: String
build_dir = "_make"
-- | Name of location of our target HTML directory.
html_dir :: String
html_dir = build_dir </> "html"
-- | Name of location of our admin HTML directory.
admin_dir :: String
admin_dir = build_dir </> "admin"
-- | Name of location of our HTML contents directory.
contents_dir :: String
contents_dir = build_dir </> "contents"
-- | 'findBuildTargets' looks to find the names and build instructions for
-- the final website. The first argument is the subdirectory to look into,
-- the second is the suffix to find.
findBuildTargets :: String -> String -> IO [String]
findBuildTargets subdir suffix = do
contents <- getRecursiveContents subdir
return $ filter ((subdir ++ "//*." ++ suffix) ?==) $ contents
-- (local to this module.)
-- From RWH, first edition, with handle from Johann Giwer.
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topdir = E.handle (\ E.SomeException {} -> return []) $ do -- 5
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- forM properNames $ \name -> do
let path = topdir </> name
s <- getSymbolicLinkStatus path
if isDirectory s
then getRecursiveContents path
else return [path]
return (concat paths)
------------------------------------------------------------------------------------
findURL :: (Monad m) => Translate Context m Attr String
findURL = do (nm,val) <- attrT (,)
cxt@(Context (c:_)) <- contextT
tag <- KURE.apply getTag cxt c
case (nm,[tag]) of
("href","a":_) -> return val
("href","link":_) -> return val
("src","script":_) -> return val
("src","img":_) -> return val
_ -> fail "no correct context"
mapURL :: (Monad m) => (String -> String) -> Rewrite Context m Attr
mapURL f = do (nm,val) <- attrT (,)
cxt@(Context (c:_)) <- contextT
tag <- KURE.apply getTag cxt c
case (nm,[tag]) of
("href","a":_) -> return $ attrC nm $ f val
("href","link":_) -> return $ attrC nm $ f val
("src","script":_) -> return $ attrC nm $ f val
("src","img":_) -> return $ attrC nm $ f val
_ -> fail "no correct context"
-- | Replace given id (2nd argument) with an HTML file (filename is first argument).
--
-- > let tr = inject "Foo.hs" "contents"
--
-- DEAD CODE
injectHTML :: String -> String -> R HTML
injectHTML fileName idName = extractR' $ prunetdR (promoteR (anyElementHTML fn))
where
fn :: T Element HTML
fn = do nm <- getAttr "id"
debugR 100 $ show ("inject",idName,nm)
if nm == idName
then translate $ \ _ _ -> do
file <- liftActionFPGM $ readFile' fileName
return $ parseHTML fileName file
-- read the file
else fail "no match"
insertTeaser :: T Element HTML
insertTeaser = do
"a" <- getTag
"teaser" <- getAttr "class"
('/':url) <- getAttr "href"
inside <- getInner
let sub_content = contents_dir </> replaceExtension url "html"
inside_content <- contextfreeT $ \ _ -> liftActionFPGM $ do
need [ sub_content ]
sub_txt <- readFile' sub_content
let sub_html = parseHTML sub_content sub_txt
applyFPGM (extractT' (onetdT (promoteT findTeaser))
<+ return (text ("Can not find teaser in " ++ sub_content)))
sub_html
return $ element "div" [attr "class" "teaser-copy"]
$ mconcat
[ inside_content
, element "a" [ attr "href" ('/':url) ]
$ mconcat
[ element "i" [attr "class" "icon-hand-right"] zero
, text " "
, inside
]
]
where
findTeaser :: T Element HTML
findTeaser = do
"div" <- getTag
"teaser" <- getAttr "class"
getInner
-----------------------------------------------------------------------
-- Build a redirection page.
makeHtmlRedirect :: String -> String -> Action ()
makeHtmlRedirect out target = do
writeFile' out $ "<meta http-equiv=\"Refresh\" content=\"0; url='" ++ target ++ "'\">\n"
-----------------------------------------------
data UploadedPage
= NotAttempted -- -
| NotPresent -- cross
| Different -- !
| Same -- tick
deriving (Show,Enum)
data LinkData a = LinkData
{ ld_pageName :: String
, ld_bytes :: Int -- bytes in file
, ld_wc :: Int -- words of text
, ld_match :: UploadedPage -- does the web version match?
, ld_localURLs :: a
, ld_remoteURLs :: a
}
deriving Show
instance Functor LinkData where
fmap f (LinkData n by wc ma a b) = LinkData n by wc ma (f a) (f b)
-- | Reads an HTML file, finds all the local and global links.
-- The local links are normalize to the site-root.
findLinks :: String -> String -> IO (LinkData [String])
findLinks prefix name = do
txt <- BS.readFile (html_dir </> name)
let tree = parseHTML name (UTF8.toString txt)
let urls = fromKureM error $ KURE.apply (extractT $ collectT $ promoteT' $ findURL) mempty tree
let txt_lens = fromKureM error $ KURE.apply (extractT $ collectT $ promoteT' $ textT (length . words)) mempty tree
-- now try get the remote version
remote <- getURLContent $ prefix ++ "/" ++ name
let rem_stat = case remote of
Nothing -> NotPresent
Just txt' | txt' == txt -> Same
| otherwise -> Different
-- What about ftp?
let isRemote url = ("http://" `isPrefixOf` url)
|| ("https://" `isPrefixOf` url)
let isNotLink url = ("mailto:" `isPrefixOf` url)
|| (all (`elem` "./") url) -- not quite right
let locals = [ takeDirectory name </> url
| url <- urls
, not (isNotLink url)
, not (isRemote url)
]
let globals = [ url
| url <- urls
, not (isNotLink url)
, isRemote url
]
return $ LinkData name
(BS.length txt)
(sum txt_lens)
rem_stat
locals
globals
data URLResponse
= URLResponse { respCodes :: [Int], respTime :: Int }
deriving Show
-- | Check external link for viability. Returns time in ms, and list of codes returned; redirections are followed.
getURLResponse :: String -> IO URLResponse
-- getURLResponse url | "http://scholar.google.com/" `isPrefixOf` url = return $ URLResponse [200] 999
getURLResponse url | "http://dl.acm.org/" `isPrefixOf` url = return $ URLResponse [200] 999
getURLResponse url | "http://doi.acm.org/" `isPrefixOf` url = return $ URLResponse [200] 999
getURLResponse url | "http://dx.doi.org/" `isPrefixOf` url = return $ URLResponse [200] 999
getURLResponse url | "http://portal.acm.org/" `isPrefixOf` url = return $ URLResponse [200] 999
getURLResponse url = do
urlRep <- response1
case respCodes urlRep of
[n] | n `div` 100 == 4
-> do urlRep' <- response2
return $ URLResponse ([n] ++ respCodes urlRep') (respTime urlRep + respTime urlRep')
_ -> return urlRep
where
response1 = do
tm1 <- getCurrentTime
(res,out,err) <- readProcessWithExitCode "curl"
["-A","Other","-L","-m","10","-s","--head",url]
""
tm2 <- getCurrentTime
let code = concat
$ map (\ case
("HTTP/1.1":n:_) | all isDigit n -> [read n :: Int]
_ -> [])
$ map words
$ lines
$ filter (/= '\r')
$ out
return $ URLResponse code (floor (diffUTCTime tm2 tm1 * 1000))
response2 = do
tm1 <- getCurrentTime
(res,out,err) <- readProcessWithExitCode "curl"
["-A","Other","-L","-m","10","-s",
"-o","/dev/null","-i","-w","%{http_code}",
url]
""
tm2 <- getCurrentTime
let code = concat
$ map (\ case
(n:_) | all isDigit n -> [read n :: Int]
_ -> [])
$ map words
$ lines
$ filter (/= '\r')
$ out
return $ URLResponse code (floor (diffUTCTime tm2 tm1 * 1000))
----------------------------------------------------------
getURLContent :: String -> IO (Maybe BS.ByteString)
{-
getURLContent url = do
(res,out,err) <- readProcessWithExitCode "curl"
["-A","Other","-L","-m","5","-s",
url]
"" `E.catch` (\ (e :: E.SomeException) -> do
print e
return (ExitFailure 0,"",""))
case res of
ExitSuccess -> return (Just out)
_ -> return Nothing
-}
getURLContent url = (do
(inp,out,err,pid) <- runInteractiveProcess "curl"
["-A","Other","-L","-m","5","-s",
url]
Nothing
Nothing
txt <- BS.hGetContents out
res <- waitForProcess pid
case res of
ExitSuccess -> return (Just txt)
_ -> return Nothing)
`E.catch` (\ (e :: E.SomeException) -> do
print e
return Nothing)
-- Case sensitive version of doesFileExist. Important on OSX, which ignores case
-- then break the web server will use case.
doesCasedFileExist :: String -> IO Bool
doesCasedFileExist file = do
ok <- Directory.doesFileExist file
if not ok then return False else recUp file
where
recUp "." = return True
recUp path = do
files <- getDirectoryContents (takeDirectory path)
if takeFileName path `elem` files then recUp (takeDirectory path) else return False
----------------------------------------------------------
generateStatus :: String -> [String] -> IO HTML
generateStatus prefix files = do
{-
let files = [ nm0
| nm0 <- inp
, "//*.html" ?== nm0
]
need [ html_dir </> file
| file <- files
]
-}
links <- withPool 32
$ \ pool -> parallelInterleaved pool
[ findLinks prefix file
| file <- files
]
good_local_links <- liftM concat $ sequence
[ do b <- doesCasedFileExist $ (html_dir </> file)
if b then return [file]
else return []
| file <- nub (concatMap ld_localURLs links)
]
-- sh -c 'curl -m 1 -s --head http://www.chalmers.se/cse/EN/people/persson-anders || echo ""'
-- -L <= redirect automatically
{-
let classify (x:xs) = case words x of
("HTTP/1.1":n:_) | all isDigit n -> classifyCode (read n) xs
_ -> []
classify _ = []
classifyCode :: Int -> [String] -> String
classifyCode n xs | n >= 300 && n < 400 = if again == unknown
then show n
else again
where again = classify xs
classifyCode n _ = show n
-}
let fake = False
external_links <- withPool 32
$ \ pool -> parallelInterleaved pool
[ do resp <- getURLResponse url
putStrLn $ "examining " ++ url ++ " ==> " ++ show resp
return (url,resp)
| url <- take 500 $ nub (concatMap ld_remoteURLs links)
]
-- liftIO$ print $ external_links
{-
curl -s --head http://www.haskell.org/
HTTP/1.1 307 Temporary Redirect
Date: Wed, 02 Jan 2013 02:51:59 GMT
Server: Apache/2.2.9 (Debian) PHP/5.2.6-1+lenny13 with Suhosin-Patch
Location: http://www.haskell.org/haskellwiki/Haskell
Vary: Accept-Encoding
Content-Type: text/html; charset=iso-8859-1
orange:fpg-web andy$ curl -s --head http://www.haskell.org/
-}
-- bad links get False, good links get true
let findGoodLinks :: LinkData [String] -> LinkData [(Bool,String)]
findGoodLinks link = link
{ ld_localURLs = map (\ x -> (x `elem` good_local_links,x)) $ ld_localURLs link
, ld_remoteURLs = map (\ url -> case lookup url external_links of
Nothing -> error "should never happen! (all links looked at)"
Just resp -> (goodLinkCode resp,url))
$ ld_remoteURLs link
}
markupCount :: [a] -> HTML
markupCount = text . show . length
markupCount' :: [(Bool,a)] -> HTML
markupCount' xs = element "span" [attr "class" $ "badge " ++ label] $ text (show len)
where len = length $ filter (\ (b,_) -> not b) xs
label = if len == 0 then "badge-success" else "badge-important"
let up txt tag = element "span" [attr "class" $ "badge badge-" ++ tag] txt
let showUpload NotAttempted = text "-"
showUpload NotPresent = up tick "important"
where
tick = element "i" [attr "class" "icon-remove icon-white"] $ zero -- icon-white"] $ zero
showUpload Different = up tick "warning"
where
tick = element "i" [attr "class" "icon-refresh icon-white"] $ zero -- icon-white"] $ zero
showUpload Same = up tick "success"
where
tick = element "i" [attr "class" "icon-ok icon-white"] $ zero -- icon-white"] $ zero
let errorCount page = length
[ ()
| (False,_) <- ld_localURLs page ++ ld_remoteURLs page
]
let checked_links = map findGoodLinks links
br = element "br" [] mempty
let page_tabel = element "div" [attr "class" "page-table"]
$ element "table" [] $ mconcat $
[ element "tr" [] $ mconcat
[ element "th" [] $ text $ "#"
, element "th" [] $ text $ "Page Name"
, element "th" [attr "style" "text-align: right"] $ text $ "size"
, element "th" [attr "style" "text-align: right"] $ text $ "words"
, element "th" [attr "style" "text-align: center"] $ text $ "up"
, element "th" [attr "style" "text-align: right"] $ mconcat [text "local",br,text "links"]
, element "th" [attr "style" "text-align: right"] $ mconcat [text "extern",br,text "links"]
, element "th" [attr "style" "text-align: right"] $ mconcat [text "local",br,text "fail"]
, element "th" [attr "style" "text-align: right"] $ mconcat [text "extern",br,text "fail"]
, element "th" [attr "style" "text-align: center"] $ mconcat [text "bad links"]
]
] ++
[ element "tr" [] $ mconcat
[ element "td" [attr "style" "text-align: right"] $ text $ show n
, element "td" []
$ element "a" [attr "href" (ld_pageName page) ]
$ text $ shorten 50 $ ld_pageName page
, element "td" [attr "style" "text-align: right"] $ text $ show $ ld_bytes page
, element "td" [attr "style" "text-align: right"] $ text $ show $ ld_wc page
, element "td" [attr "style" "text-align: center"] $ showUpload $ ld_match page
, element "td" [attr "style" "text-align: right"] $ markupCount $ ld_localURLs $ page
, element "td" [attr "style" "text-align: right"] $ markupCount $ ld_remoteURLs $ page
, element "td" [attr "style" "text-align: right"] $ markupCount' $ ld_localURLs $ page
, element "td" [attr "style" "text-align: right"] $ markupCount' $ ld_remoteURLs $ page
, element "td" [] $ mconcat
[ text bad <> br
| (False,bad) <- ld_localURLs page ++ ld_remoteURLs page
]
]
| (n,page) <- [1..] `zip`
( sortBy (\ a b -> errorCount b `compare` errorCount a)
$ sortBy (\ a b -> ld_pageName a `compare` ld_pageName b)
$ checked_links
)
{-
| (n,page,page_bad,page_bad') <- zip4 [1..]
(map (fmap markupCount) links)
(map (fmap markupCount') bad_links)
bad_links
-}
]
let colorURLCode :: URLResponse -> HTML
colorURLCode (URLResponse [] n) =
element "span" [attr "class" $ "badge badge-important"]
$ text $ if n > 3000
then "..."
else "!"
-- $ element "i" [attr "class" "icon-warning-sign icon-white"]
-- $ text "" -- intentionally
colorURLCode resp@(URLResponse xs _) =
mconcat $ [ element "span" [attr "class" $ "badge " ++ label] $ text $ show x
| x <- xs
]
where label = if goodLinkCode resp
then "badge-success"
else "badge-important"
let timing (_,URLResponse _ t1) (_,URLResponse _ t2) = t1 `compare` t2
let correctness (_,u1) (_,u2) = goodLinkCode u1 `compare` goodLinkCode u2
let link_tabel = element "div" [attr "class" "link-table"]
$ element "table" [] $ mconcat $
[ element "tr" [] $ mconcat
[ element "th" [] $ text $ "#"
, element "th" [] $ text $ "External URL"
, element "th" [attr "style" "text-align: center"] $ mconcat [text "HTTP",br,text "code(s)"]
, element "th" [attr "style" "text-align: right"] $ mconcat [text "time",br,text "ms"]
]
] ++
[ element "tr" [] $ mconcat
[ element "td" [attr "style" "text-align: right"] $ text $ show n
, element "td" []
$ element "a" [attr "href" url ]
$ text $ shorten 72 $ url
, element "td" [attr "style" "text-align: right"]
$ colorURLCode resp
, element "td" [attr "style" "text-align: right"] $ text $ show tm
]
| (n,(url,resp@(URLResponse _ tm))) <- zip [1..]
$ sortBy correctness
$ sortBy timing
$ external_links
]
let f = element "div" [attr "class" "row"] . element "div" [attr "class" "span10 offset1"]
let summary_table =
element "div" [attr "class" "summary-table"]
$ element "div" [attr "class" "row"] . element "div" [attr "class" "span4"]
$ element "table" [] $ mconcat $
[ element "tr" [] $ mconcat
[ element "th" [] $ text title
, element "td" [attr "style" "text-align: right"] $ text $ txt ]
| (title,txt) <-
[ ("Local Pages", show $ length $ links)
, ("External Links",show $ length $ external_links)
, ("Broken Internal Links",show $ length $ nub
$ filter (not . fst)
$ concat
$ map ld_localURLs checked_links)
, ("Broken External Links",show $ length $
[ ()
| (_,url) <- external_links
, not (goodLinkCode url)])
]
]
return $ f $ mconcat
[ element "h2" [] $ text "Summary"
, summary_table
, element "h2" [] $ text "Pages"
, page_tabel
, element "h2" [] $ text "External URLs"
, link_tabel
]
{-
findURL :: (Monad m) => Translate Context m Node String
findURL = promoteT $ do
(nm,val) <- attrT (,)
cxt@(Context (c:_)) <- contextT
tag <- KURE.apply getTag cxt c
case (nm,[tag]) of
("href","a":_) -> return val
("href","link":_) -> return val
("src","script":_) -> return val
("src","img":_) -> return val
_ -> fail "no correct context"
-}
shorten n xs | length xs < n = xs
| otherwise = take (n - 3) xs ++ "..."
---------------------------------------------------------
-- Call with the path top the wrapper template,
-- and
wrapTemplateFile :: String -> Int -> R HTML
wrapTemplateFile fullPath count = rewrite $ \ c inside -> do
src <- liftActionFPGM $ readFile' fullPath
let contents = parseHTML fullPath src
let local_prefix nm = concat (take count (repeat "../")) ++ nm
let normalizeTplURL nm
-- should really check for ccs, js, img, etc.
| "../" `isPrefixOf` nm = local_prefix (dropDirectory1 nm)
| otherwise = nm
let fn = do
"contents" <- getAttr "id"
return inside
let prog = extractR' (tryR (prunetdR (promoteR $ mapURL normalizeTplURL)))
>>> extractR' (prunetdR (promoteR (anyElementHTML fn)))
KURE.apply prog mempty contents
---------------------------------------------------------
makeStatus :: String -> String -> Rules ()
makeStatus prefix dir = ("_make" </> dir </> "status.html" ==) ?> \ out -> do
alwaysRerun -- we want to retest these each time
contents :: [String] <- targetPages
let contents' = filter (/= "status.html") $ contents
let files = [ nm0
| nm0 <- contents'
, "//*.html" ?== nm0
]
need [ html_dir </> file
| file <- files
]
status <- liftIO $ generateStatus prefix files
writeFileChanged out $ show $ status
-------------------------------------------------------------------------
clean :: IO ()
clean = do
b <- doesDirectoryExist build_dir
when b $ do
removeDirectoryRecursive build_dir
return ()
-------------------------------------------------------------------------
data MyURL = MyURL String -- name of target (without the _make/html)
(Rules ()) -- The rule to build this target
instance Show MyURL where
show = urlName
buildURL :: String -> (String -> Action ()) -> MyURL
buildURL target action = MyURL target $ (== (html_dir </> target)) ?> action
urlRules :: MyURL -> Rules ()
urlRules (MyURL _ rules) = rules
urlName :: MyURL -> String
urlName (MyURL name _) = name
copyPage :: String -> MyURL
copyPage urlFile = buildURL urlFile $ \ out -> do
let src = dropDirectory1 $ dropDirectory1 $ out
copyFile' src out
htmlPage :: String -> String -> R HTML -> MyURL
htmlPage htmlFile srcDir processor = buildURL htmlFile $ \ out -> do
let srcName = build_dir </> srcDir </> dropDirectory1 (dropDirectory1 out)
-- liftIO $ print (htmlFile,srcDir,srcName,out)
need [ srcName ]
src <- readFile' srcName
let contents = parseHTML srcName src
page <- applyFPGM processor contents
writeFile' out $ show $ page
getRedirect :: String -> Action String
getRedirect = askOracle . Redirect'
chioneRules :: [MyURL] -> Rules()
chioneRules urls = do
mapM_ urlRules urls
-- action $ liftIO $ print ("chioneRules", map (\ (MyURL file _) -> html_dir </> file) $ urls)
action $ need $ map (\ (MyURL file _) -> html_dir </> file) $ urls
addOracle $ \ Targets{} -> return $ map (\ (MyURL file _) -> file) $ urls
return ()
newtype Targets = Targets () deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- List of all page
targetPages :: Action [String]
targetPages = askOracle $ Targets ()
----------------------
newtype Redirect = Redirect' String deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
addRedirectOracle :: [(String,String)] -> Rules ()
addRedirectOracle db = do
addOracle $ \ (Redirect' htmlFile) ->
case lookup htmlFile db of
Just target -> return target
Nothing -> error $ "unknown redirection for file " ++ show htmlFile
return ()
-- | needs addRedirectOracle
redirectPage :: String -> MyURL
redirectPage htmlFile = buildURL htmlFile $ \ out -> do
target <- getRedirect htmlFile
makeHtmlRedirect out target
----------------------
relativeURL :: Int -> String -> String
relativeURL n ('/':rest)
| null (takeExtension rest) = replaceExtension (local_prefix </> rest) "html"
| otherwise = local_prefix </> rest
where local_prefix = concat (take n $ repeat "../")
relativeURL n other
| "http://" `isPrefixOf` other
|| "https://" `isPrefixOf` other = other
| otherwise = other
----------------------
divSpanExpand :: (String -> FPGM HTML) -> T Element HTML
divSpanExpand macro = do
tag <- getTag
-- () <- trace ("trace: " ++ tag) $ return ()
guardMsg (tag == "div" || tag == "span") "wrong tag"
-- () <- trace ("trace: " ++ show tag) $ return ()
cls <- getAttr "class"
--- () <- trace ("$$$$$$$$$$$$$$$$$ trace: " ++ show (tag,cls)) $ return ()
constT $ macro cls
-----------------------------------------------
goodLinkCode :: URLResponse -> Bool
goodLinkCode (URLResponse [] _) = False
goodLinkCode (URLResponse xs _) = last xs == 200
-----------------------------------------------
newtype FPGM a = FPGM { runFPGM :: IO (FPGMResult a) }
data FPGMResult a
= FPGMResult a
| FPGMFail String
| forall r . FPGMAction (Action r) (r -> FPGM a)
-- for testint
applyFPGM'' :: forall a b . Translate Context FPGM a b -> a -> IO b
applyFPGM'' t a = do
r <- runFPGM (KURE.apply t mempty a)
case r of
FPGMResult a -> return a
FPGMFail msg -> fail msg
applyFPGM :: forall a b . Translate Context FPGM a b -> a -> Action b
applyFPGM t a = do
let loop (FPGMResult a) = return a
loop (FPGMFail msg) = fail $ "applyFPGM " ++ msg
loop (FPGMAction act rest) = do
res <- act
run (rest res)
run m = do res <- traced "apply-yah" $ runFPGM m
loop res
run $ KURE.apply t mempty a
liftActionFPGM :: Action a -> FPGM a
liftActionFPGM m = FPGM $ return $ FPGMAction m return
type T a b = Translate Context FPGM a b
type R a = T a a
instance Monad FPGM where
return = FPGM . return . FPGMResult
m1 >>= k = FPGM $ do
r <- runFPGM m1
let f (FPGMResult a) = runFPGM (k a)
f (FPGMFail msg) = return (FPGMFail msg)
f (FPGMAction act rest) = return $ FPGMAction act (\ a -> rest a >>= k)
f r
fail = FPGM . return . FPGMFail
instance Functor FPGM where
fmap f m = pure f <*> m
instance Applicative FPGM where
pure a = return a
af <*> aa = af >>= \ f -> aa >>= \ a -> return (f a)
instance MonadCatch FPGM where
catchM m1 handle = FPGM $ do
r <- runFPGM m1
let f (FPGMResult a) = return (FPGMResult a)
f (FPGMFail msg) = runFPGM (handle msg)
f (FPGMAction act rest) = return (FPGMAction act rest)
f r
--instance MonadIO FPGM where
-- liftIO m = FPGM (FPGMResult <$> m)
--------------------------------------------------
| ku-fpg/chione | Web/Chione.hs | bsd-3-clause | 32,867 | 350 | 20 | 13,095 | 7,137 | 3,829 | 3,308 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.