code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Arrows, FlexibleContexts, FlexibleInstances,
MultiParamTypeClasses #-}
module Karamaan.Opaleye.LeftJoin where
import Data.Profunctor.Product.Default (Default, def)
import Karamaan.Opaleye.QueryColspec (QueryColspec)
import Karamaan.Opaleye.QueryArr (Query)
import Karamaan.Opaleye.Wire (Wire)
import qualified Karamaan.Opaleye.Wire as Wire
import qualified Karamaan.Opaleye.Operators2 as Op2
import Control.Arrow (arr, returnA, (<<<), (***), (&&&))
import qualified Karamaan.Opaleye.Predicates as P
import Data.Profunctor (Profunctor, dimap)
import Data.Profunctor.Product (ProductProfunctor, empty, (***!))
import qualified Database.HaskellDB.PrimQuery as PQ
-- FIXME: this seems to fail on the union because the NULLs are not
-- given explicit types. This is an annoyance of Postgres. There'll be
-- a way to work around it (of course: just give the NULLs explicity types!)
-- but it is annoying.
-- NullMaker a b represents a way of turning a 'QueryArr z a' into a
-- 'QueryArr z b' where all the columns of 'b' are made nullable.
-- For example 'QueryArr (Wire Int, Wire Bool, Wire String)' could
-- become 'QueryArr (Wire (Maybe Int), Wire (Maybe Bool), Wire (Maybe String)'.
--
-- I don't really like that this is 'a -> b'. To be safe it should be
-- QueryArr a b, or ExprArr a b, when that exists. I don't think it
-- will cause any problems though, if it is not exported.
data NullMaker a b = NullMaker (a -> b) (Query b)
toNullable :: NullMaker a b -> a -> b
toNullable (NullMaker f _) = f
-- When we have proper support for ExprArr I suppose this can be
-- NullMaker a b -> Expr b
nulls :: NullMaker a b -> Query b
nulls (NullMaker _ n) = n
instance Default NullMaker (Wire a) (Wire (Maybe a)) where
def = NullMaker Wire.unsafeCoerce (Op2.constantLit PQ.NullLit)
instance Profunctor NullMaker where
dimap f g nm = NullMaker (dimap f g (toNullable nm)) (fmap g (nulls nm))
instance ProductProfunctor NullMaker where
empty = NullMaker id (arr id)
NullMaker f n ***! NullMaker f' n' = NullMaker (f *** f') (n &&& n')
leftJoin :: (Default QueryColspec l l, Default QueryColspec r' r')
=> NullMaker r r'
-> Query l -> (l -> Wire b)
-> Query r -> (r -> Wire b)
-> Query (l, r')
leftJoin nm qL fL qR fR = join `Op2.union` outer
where join = proc () -> do
rowL <- qL -< ()
keyL <- arr fL -< rowL
rowR <- qR -< ()
keyR <- arr fR -< rowR
P.restrict <<< Op2.eq -< (keyL, keyR)
returnA -< (rowL, toNullable nm rowR)
outer = proc () -> do
rowL <- qL `Op2.difference` (arr fst <<< join) -< ()
nulls' <- nulls nm -< ()
returnA -< (rowL, nulls')
| dbp/karamaan-opaleye | Karamaan/Opaleye/LeftJoin.hs | bsd-3-clause | 2,730 | 2 | 15 | 607 | 725 | 398 | 327 | 43 | 1 |
{-# LANGUAGE
CPP
, DeriveDataTypeable
, FlexibleContexts
, FlexibleInstances
, MultiParamTypeClasses #-}
{- |
Copyright : (c) Andy Sonnenburg 2013
License : BSD3
Maintainer : andy22286@gmail.com
-}
module Data.Var.IO
( module Data.Var.Class
, IOVar
, IOUVar
) where
#ifdef MODULE_Control_Monad_ST_Safe
import Control.Monad.ST.Safe (RealWorld)
#else
import Control.Monad.ST (RealWorld)
#endif
import Data.ByteArraySlice
import Data.IOVar
import Data.Var.ByteArray
import Data.Var.Class
import Data.Typeable
{- |
A mutable variable containing an unboxed value of type @a@ in the 'IO' monad
-}
newtype IOUVar a =
IOUVar { unIOUVar :: ByteArrayVar RealWorld a
} deriving (Eq, Typeable)
instance ByteArraySlice a => Var IOUVar a IO where
newVar = fmap IOUVar . newVar
readVar = readVar . unIOUVar
writeVar = writeVar . unIOUVar
| sonyandy/var | src/Data/Var/IO.hs | bsd-3-clause | 894 | 0 | 7 | 186 | 146 | 89 | 57 | 23 | 0 |
{-|
Form functions that report errors about each form field.
Most code are copied and modified from yesod-form.
-}
module Yesod.Helpers.Form2
( FieldErrors, oneFieldError, overallFieldError, nullFieldErrors, fieldErrorsToList
, EMForm, SEMForm
, runEMFormPost
, runEMFormPostNoToken
, runEMFormGet
, generateEMFormPost
, generateEMFormGet'
, generateEMFormGet
, emreq, emopt, emstatic
, semreq, semopt, semreqOpt, semstatic, semstatic'
, addEMFieldError
, addEMOverallError
, renderBootstrapES
, renderBootstrapES'
, renderBootstrap3ES
, renderBootstrap3ES'
, jsendFormData
, optTimeRangeEndpointField
, reqTimeRangeEndpointField
) where
-- {{{1 imports
import ClassyPrelude.Yesod
import qualified Data.Text.Encoding as TE
import qualified Control.Monad.Trans.State.Strict as SS
import Control.Monad.Trans.RWS (RWST, tell, evalRWST)
import Control.Monad.Trans.Writer (runWriterT, WriterT(..))
import qualified Control.Monad.Trans.Writer as W
import Data.Byteable (constEqBytes)
import Data.Time
import Network.Wai (requestMethod)
import Text.Blaze (Markup)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Data.Aeson.Types (Pair)
import Yesod.Helpers.JSend
import Yesod.Form.Jquery
#if MIN_VERSION_yesod_form(1, 3, 8)
import Yesod.Form.Bootstrap3 ( renderBootstrap3
, BootstrapFormLayout(BootstrapBasicForm)
)
#endif
-- }}}1
newtype WrappedFieldSettings master = WrappedFieldSettings
{ unWrappedFieldSettings :: FieldSettings master }
-- | Use 'fsId' and 'fsName' to compare/sort.
getFieldSettingsCoreFields :: FieldSettings master -> (Maybe Text, Maybe Text)
getFieldSettingsCoreFields = fsName &&& fsId
instance Eq (WrappedFieldSettings master) where
(==) (WrappedFieldSettings x) (WrappedFieldSettings y) =
getFieldSettingsCoreFields x == getFieldSettingsCoreFields y
instance Ord (WrappedFieldSettings master) where
compare (WrappedFieldSettings x) (WrappedFieldSettings y) =
compare (getFieldSettingsCoreFields x) (getFieldSettingsCoreFields y)
instance Hashable (WrappedFieldSettings master) where
hashWithSalt salt (WrappedFieldSettings x) =
hashWithSalt salt (getFieldSettingsCoreFields x)
hash (WrappedFieldSettings x) = hash (getFieldSettingsCoreFields x)
newtype FieldErrors master = FieldErrors
{ unFieldErrors :: HashMap (Text, WrappedFieldSettings master) (Set Text) }
oneFieldError :: Text
-> FieldSettings master
-> Text
-> FieldErrors master
oneFieldError name fs msg = FieldErrors $ singletonMap
(name, WrappedFieldSettings fs)
(singletonSet msg)
overallFieldError :: Text -> FieldErrors master
overallFieldError msg = oneFieldError "__all__" (fieldSettingsLabel ("" :: Text)) msg
nullFieldErrors :: FieldErrors master -> Bool
nullFieldErrors = null . unFieldErrors
fieldErrorsToList :: FieldErrors master -> [((Text, FieldSettings master), [Text])]
fieldErrorsToList = map (second unWrappedFieldSettings *** toList) .
mapToList .
unFieldErrors
fieldErrorsToJSON :: (SomeMessage master ->Text)
-> FieldErrors master
-> Value
fieldErrorsToJSON render_msg = object . map json_it . mapToList . unFieldErrors
where
json_it ((name, fs), v) = (name, object [ "fs" .= json_fs fs, "errs" .= toJSON (toList v) ])
json_fs (WrappedFieldSettings x) = object
[ "id" .= fsId x
, "label" .= render_msg (fsLabel x)
, "tooltip" .= fmap render_msg (fsTooltip x)
]
instance Monoid (FieldErrors master) where
mempty = FieldErrors mempty
mappend (FieldErrors x1) (FieldErrors x2) = FieldErrors $ unionWith mappend x1 x2
instance Semigroup (FieldErrors master) where
(<>) = mappend
type EMForm m a = WriterT
(FieldErrors (HandlerSite m))
(RWST (Maybe (Env, FileEnv), HandlerSite m, [Lang]) Enctype Ints m)
a
-- | the following long type synonym actually says:
-- type SEMForm site m a = SS.StateT [FieldView site] (EMForm m) a
-- but haskell does not allow partially applied synonym in the above line,
-- we have the expand the synonym manually.
--
-- Usage: With the following helpers (smreq, smopt), all FieldView's are remembered.
-- So usually we don't need to name all FieldView's one by one,
-- which simplify code a little.
type SEMForm m a = SS.StateT [FieldView (HandlerSite m)]
(WriterT
(FieldErrors (HandlerSite m))
(RWST (Maybe (Env, FileEnv), HandlerSite m, [Lang]) Enctype Ints m)
)
a
-- | This function is used to both initially render a form and to later extract
-- results from it. Note that, due to CSRF protection and a few other issues,
-- forms submitted via GET and POST are slightly different. As such, be sure to
-- call the relevant function based on how the form will be submitted, /not/
-- the current request method.
--
-- For example, a common case is displaying a form on a GET request and having
-- the form submit to a POST page. In such a case, both the GET and POST
-- handlers should use 'runFormPost'.
runEMFormPost :: (RenderMessage (HandlerSite m) FormMessage, MonadResource m, MonadHandler m)
=> (Html -> EMForm m (FormResult a, xml))
-> m (((FormResult a, xml), Enctype), FieldErrors (HandlerSite m))
-- {{{1
runEMFormPost form = do
env <- postEnv
postHelper form env
-- }}}1
runEMFormPostNoToken :: MonadHandler m
=> (Html -> EMForm m a)
-> m ((a, Enctype), FieldErrors (HandlerSite m))
-- {{{1
runEMFormPostNoToken form = do
langs <- languages
m <- getYesod
env <- postEnv
runEMFormGeneric (form mempty) m langs env
-- }}}1
runEMFormGet :: MonadHandler m
=> (Html -> EMForm m a)
-> m ((a, Enctype), FieldErrors (HandlerSite m))
-- {{{1
runEMFormGet form = do
gets <- liftM reqGetParams getRequest
let env =
case lookup getKey gets of
Nothing -> Nothing
Just _ -> Just (unionsWith (++) $ map (\(x, y) -> singletonMap x [y]) gets, mempty)
getHelper form env
-- }}}1
-- | Similar to 'runFormPost', except it always ignores the currently available
-- environment. This is necessary in cases like a wizard UI, where a single
-- page will both receive and incoming form and produce a new, blank form. For
-- general usage, you can stick with @runFormPost@.
generateEMFormPost
:: (RenderMessage (HandlerSite m) FormMessage, MonadHandler m)
=> (Html -> EMForm m (FormResult a, xml))
-> m ((xml, Enctype), FieldErrors (HandlerSite m))
generateEMFormPost form = first (first snd) `liftM` postHelper form Nothing
generateEMFormGet' :: (MonadHandler m)
=> (Html -> EMForm m (FormResult a, xml))
-> m ((xml, Enctype), FieldErrors (HandlerSite m))
generateEMFormGet' form = first (first snd) `liftM` getHelper form Nothing
generateEMFormGet :: MonadHandler m
=> (Html -> EMForm m a)
-> m (a, Enctype)
generateEMFormGet form = liftM fst $ getHelper form Nothing
runEMFormGeneric :: Monad m
=> EMForm m a
-> HandlerSite m
-> [Text]
-> Maybe (Env, FileEnv)
-> m ((a, Enctype), FieldErrors (HandlerSite m))
-- {{{1
runEMFormGeneric form site langs env = do
((res, err_fields), enctype) <- evalRWST (runWriterT form) (env, site, langs) (IntSingle 0)
return ((res, enctype), err_fields)
-- }}}1
postEnv :: (MonadHandler m) => m (Maybe (Env, FileEnv))
-- {{{1
postEnv = do
req <- getRequest
if requestMethod (reqWaiRequest req) == "GET"
then return Nothing
else do
(p, f) <- runRequestBody
let p' = unionsWith (++) $ map (\(x, y) -> singletonMap x [y]) p
return $ Just (p', unionsWith (++) $ map (\(k, v) -> singletonMap k [v]) f)
-- }}}1
postHelper :: (MonadHandler m, RenderMessage (HandlerSite m) FormMessage)
=> (Html -> EMForm m (FormResult a, xml))
-> Maybe (Env, FileEnv)
-> m (((FormResult a, xml), Enctype), FieldErrors (HandlerSite m))
-- {{{1
postHelper form env = do
req <- getRequest
let tokenKey = asText "_token"
let token =
case reqToken req of
Nothing -> mempty
Just n -> [shamlet|<input type=hidden name=#{tokenKey} value=#{n}>|]
m <- getYesod
langs <- languages
(((res, xml), enctype), err_fields) <- runEMFormGeneric (form token) m langs env
(res', err_fields') <- do
let (Just [t1]) === (Just t2) = TE.encodeUtf8 t1 `constEqBytes` TE.encodeUtf8 t2
Nothing === Nothing = True -- It's important to use constTimeEq
_ === _ = False -- in order to avoid timing attacks.
case (res, env) of
(_, Nothing) -> return (FormMissing, err_fields)
(FormSuccess{}, Just (params, _))
| not (lookup tokenKey params === reqToken req) -> do
let err_msg = renderMessage m langs MsgCsrfWarning
return ( FormFailure [err_msg]
, err_fields `mappend` overallFieldError err_msg
)
_ -> return (res, err_fields)
return (((res', xml), enctype), err_fields')
-- }}}1
-- | Converts a form field into monadic form. This field requires a value
-- and will return 'FormFailure' if left empty.
emreq :: (RenderMessage site FormMessage, HandlerSite m ~ site, MonadHandler m)
=> Field m a -- ^ form field
-> FieldSettings site -- ^ settings for this field
-> Maybe a -- ^ optional default value
-> EMForm m (FormResult a, FieldView site)
emreq field fs mdef = mhelper field fs mdef
(\m l name -> do
let err_msg = renderMessage m l MsgValueRequired
W.tell $ oneFieldError name fs err_msg
return $ FormFailure [err_msg]
)
FormSuccess True
-- | Converts a form field into monadic form. This field is optional, i.e.
-- if filled in, it returns 'Just a', if left empty, it returns 'Nothing'.
-- Arguments are the same as for 'mreq' (apart from type of default value).
emopt :: (site ~ HandlerSite m, MonadHandler m)
=> Field m a
-> FieldSettings site
-> Maybe (Maybe a)
-> EMForm m (FormResult (Maybe a), FieldView site)
emopt field fs mdef = mhelper field fs
(join mdef) (const $ const $ const $ return $ FormSuccess Nothing)
(FormSuccess . Just) False
emstatic :: (site ~ HandlerSite m, MonadHandler m)
=> FieldSettings site
-> a
-> Text
-> EMForm m (FormResult a, FieldView site)
emstatic (FieldSettings {..}) v text = do
theId <- lift $ lift $ maybe newIdent return fsId
(_, site, langs) <- lift $ ask
let mr2 = renderMessage site langs
return (FormSuccess v, FieldView
{ fvLabel = toHtml $ mr2 fsLabel
, fvTooltip = fmap toHtml $ fmap mr2 fsTooltip
, fvId = theId
, fvInput = toWidget
[shamlet|<p id="#{theId}" *{fsAttrs} .form-control-static>#{text}|]
, fvErrors = Nothing
, fvRequired = False
})
addEMFieldError :: (MonadHandler m, RenderMessage (HandlerSite m) msg)
=> Text
-> FieldSettings (HandlerSite m)
-> msg
-> EMForm m ()
addEMFieldError name fs msg = do
mr <- lift getMessageRender
W.tell $ oneFieldError name fs (mr msg)
addEMOverallError :: (MonadHandler m, RenderMessage (HandlerSite m) msg)
=> msg
-> EMForm m ()
addEMOverallError msg = do
mr <- lift getMessageRender
W.tell $ overallFieldError (mr msg)
semreq ::
(RenderMessage site FormMessage, HandlerSite m ~ site, MonadHandler m) =>
Field m a
-> FieldSettings site
-> Maybe a
-> SEMForm m (FormResult a)
semreq field settings initv = do
(res, view) <- lift $ emreq field settings initv
SS.modify ( view : )
return res
semopt :: (HandlerSite m ~ site, MonadHandler m)
=> Field m a
-> FieldSettings site
-> Maybe (Maybe a)
-> SEMForm m (FormResult (Maybe a))
semopt field settings initv = do
(res, view) <- lift $ emopt field settings initv
SS.modify ( view : )
return res
semstatic :: (HandlerSite m ~ site, MonadHandler m)
=> FieldSettings site
-> a
-> Text
-> SEMForm m (FormResult a)
semstatic settings v text = do
(res, view) <- lift $ emstatic settings v text
SS.modify ( view : )
return res
semstatic' :: (HandlerSite m ~ site, MonadHandler m)
=> Text
-> FieldSettings site
-> a
-> SEMForm m (FormResult a)
semstatic' = flip $ flip . semstatic
-- | Use `semreq` internally, but make the signature like `semopt`.
-- Useful when whether some fields is required depends on other conditions.
semreqOpt :: (HandlerSite m ~ site, MonadHandler m, RenderMessage site FormMessage)
=> Field m a
-> FieldSettings site
-> Maybe (Maybe a)
-> SEMForm m (FormResult (Maybe a))
semreqOpt field settings initv = do
fmap (fmap Just) $ semreq field settings (join initv)
renderBootstrapES :: Monad m =>
Markup
-> FormResult a
-> SEMForm m (FormResult a, WidgetT (HandlerSite m) IO ())
-- {{{1
renderBootstrapES extra result = do
views <- liftM reverse $ SS.get
let aform = formToAForm $ return (result, views)
lift $ lift $
#if MIN_VERSION_yesod_form(1, 3, 8)
-- renderBootstrap is deprecated, but the new recommended function
-- is for bootstrap v3.
-- We assume that bootstrap v3 is used here.
renderBootstrap3 BootstrapBasicForm
#else
renderBootstrap
#endif
aform extra
-- }}}1
#if MIN_VERSION_yesod_form(1, 3, 8)
renderBootstrap3ES :: Monad m
=> BootstrapFormLayout
-> Markup
-> FormResult a
-> SEMForm m (FormResult a, WidgetT (HandlerSite m) IO ())
renderBootstrap3ES layout extra result = do
views <- liftM reverse $ SS.get
let aform = formToAForm $ return (result, views)
lift $ lift $ renderBootstrap3 layout aform extra
#endif
-- | combines renderBootstrapS and runSEMForm, smToForm
renderBootstrapES' :: Monad m =>
Markup
-> SEMForm m (FormResult a)
-> EMForm m (FormResult a, WidgetT (HandlerSite m) IO ())
renderBootstrapES' extra result = do
runSEMForm $ result >>= renderBootstrapES extra
#if MIN_VERSION_yesod_form(1, 3, 8)
renderBootstrap3ES' :: Monad m
=> BootstrapFormLayout
-> Markup
-> SEMForm m (FormResult a)
-> EMForm m (FormResult a, WidgetT (HandlerSite m) IO ())
renderBootstrap3ES' layout extra result = do
runSEMForm $ result >>= renderBootstrap3ES layout extra
#endif
runSEMForm :: Monad m => SEMForm m a -> EMForm m a
runSEMForm = flip SS.evalStateT []
-- | our standard way to encode a form and its errors into a JSON value.
jsendFormData :: (SomeMessage master -> Text)
-> Maybe Html
-- ^ html code of the form body
-- sometimes client don't need the html code (just the errors are needed)
-- to save bandwidth, html code is optional
-> FieldErrors master
-> [Pair]
-> JSendMsg
-- {{{1
jsendFormData render_msg m_form_html field_errs extra_fields =
if nullFieldErrors field_errs
then JSendSuccess dat
else JSendFail dat
where
dat = object $
[ "form" .= object
(catMaybes $
[ fmap (("body" .=) . renderHtml) m_form_html
, Just $ "errors" .= fieldErrorsToJSON render_msg field_errs
]
)
] ++ extra_fields
-- }}}1
mhelper :: (site ~ HandlerSite m, MonadHandler m)
=> Field m a
-> FieldSettings site
-> Maybe a
-> (site -> [Text] -> Text -> EMForm m (FormResult b)) -- ^ on missing
-> (a -> FormResult b) -- ^ on success
-> Bool -- ^ is it required?
-> EMForm m (FormResult b, FieldView site)
-- {{{1
mhelper Field {..} fs@(FieldSettings {..}) mdef onMissing onFound isReq = do
lift $ tell fieldEnctype
mp <- lift $ askParams
name <- lift $ maybe newFormIdent return fsName
theId <- lift $ lift $ maybe newIdent return fsId
(_, site, langs) <- lift $ ask
let mr2 = renderMessage site langs
(res, val) <-
case mp of
Nothing -> return (FormMissing, maybe (Left "") Right mdef)
Just p -> do
mfs <- lift askFiles
let mvals = fromMaybe [] $ lookup name p
files = fromMaybe [] $ mfs >>= lookup name
emx <- lift $ lift $ fieldParse mvals files
case emx of
Left (SomeMessage e) -> do
let err_msg = renderMessage site langs e
W.tell $ oneFieldError name fs err_msg
return $ (FormFailure [err_msg], maybe (Left "") Left (listToMaybe mvals))
Right mx ->
case mx of
Nothing -> do
r <- onMissing site langs name
return (r, Left "")
Just x -> return (onFound x, Right x)
return (res, FieldView
{ fvLabel = toHtml $ mr2 fsLabel
, fvTooltip = fmap toHtml $ fmap mr2 fsTooltip
, fvId = theId
, fvInput = fieldView theId name fsAttrs val isReq
, fvErrors =
case res of
FormFailure [e] -> Just $ toHtml e
_ -> Nothing
, fvRequired = isReq
})
-- }}}1
getKey :: Text
getKey = "_hasdata"
getHelper :: MonadHandler m
=> (Html -> EMForm m a)
-> Maybe (Env, FileEnv)
-> m ((a, Enctype), FieldErrors (HandlerSite m))
-- {{{1
getHelper form env = do
let fragment = [shamlet|<input type=hidden name=#{getKey}>|]
langs <- languages
m <- getYesod
runEMFormGeneric (form fragment) m langs env
-- }}}1
-- | 用一个 Day 和 TimeOfDay 的输入,合成一个时间范围的端点(开始或结束)
optTimeRangeEndpointField :: (RenderMessage site FormMessage, YesodJquery site)
=> TimeZone
-> Bool
-> FieldSettings site
-> FieldSettings site
-> Maybe UTCTime
-> SEMForm (HandlerT site IO) (FormResult (Maybe UTCTime))
-- {{{1
optTimeRangeEndpointField tz is_start day_fs tod_fs old = do
day <- semopt (jqueryDayField def) day_fs
(Just $ fmap (localDay . utcToLocalTime tz) $ old)
tod <- semopt timeFieldTypeTime tod_fs
(Just $ fmap (localTimeOfDay . utcToLocalTime tz) $ old)
compose_utc_time_field_result day tod
where
compose_utc_time Nothing (Just _) = Left $ asText "需先指定日期"
compose_utc_time Nothing Nothing = Right Nothing
compose_utc_time (Just d) m_tod = Right $ Just $
localTimeToUTC tz $
LocalTime d $
case m_tod of
Nothing -> if is_start
then midnight
else TimeOfDay 23 59 59.9999999
Just tod -> tod
compose_utc_time_field_result (FormSuccess d) (FormSuccess tod) = case compose_utc_time d tod of
Left err ->
lift (addEMOverallError err) >> return FormMissing
Right x -> return $ FormSuccess x
compose_utc_time_field_result _ _ = return FormMissing -- 每个参数的错误已被单独收集,这里不用处理
-- }}}1
-- | 用一个 Day 和 TimeOfDay 的输入,合成一个时间范围的端点(开始或结束)
reqTimeRangeEndpointField :: (RenderMessage site FormMessage, YesodJquery site)
=> TimeZone
-> Bool
-> FieldSettings site
-> FieldSettings site
-> Maybe UTCTime
-> SEMForm (HandlerT site IO) (FormResult UTCTime)
-- {{{1
reqTimeRangeEndpointField tz is_start day_fs tod_fs old_time = do
day <- semreq (jqueryDayField def) day_fs
(fmap (localDay . utcToLocalTime tz) old_time)
tod <- semopt timeFieldTypeTime tod_fs
(Just $ fmap (localTimeOfDay . utcToLocalTime tz) old_time)
return $ compose_utc_time <$> day <*> tod
where
compose_utc_time d (Just t) = localTimeToUTC tz $ LocalTime d t
compose_utc_time d Nothing = localTimeToUTC tz $ LocalTime d $
if is_start
then midnight
else TimeOfDay 23 59 59.9999999
-- }}}1
-- vim: set foldmethod=marker:
| yoo-e/yesod-helpers | Yesod/Helpers/Form2.hs | bsd-3-clause | 22,231 | 0 | 23 | 7,282 | 5,790 | 2,974 | 2,816 | -1 | -1 |
{-# LANGUAGE TemplateHaskell, RankNTypes, ScopedTypeVariables #-}
module LineCounter (countLines) where
import Control.Exception (catch)
import Control.Monad (forever)
import Control.Monad.Trans.State.Strict (StateT)
import Control.Lens ((^.), (.=), (%=), makeLenses, use)
import Data.Char (isSpace)
import Pipes
import Pipes.Lift (execStateP)
import Pipes.Safe (runSafeT)
import Pipes.Safe.Prelude (readFile)
import Prelude hiding (readFile, map)
import qualified Pipes.Prelude as P
import Control.Monad.Extras (unlessM)
import Language
data LineCount = LineCount
{ _lineCount :: Int
, _inComment :: Bool
} deriving Show
initLineCount :: LineCount
initLineCount = LineCount
{ _lineCount = 0
, _inComment = False
}
makeLenses ''LineCount
countLines :: FilePath -> Language -> IO Int
countLines path lang = countLines' `catch` (\(e :: IOError) -> print e >> return 0)
where
countLines' :: IO Int
countLines' = do
line_count <-
runSafeT $
runEffect $
execStateP initLineCount $
readFile path >->
P.map trimLeft >->
P.filter (not . null) >->
P.filter (not . isLineComment lang) >->
hoist (hoist lift) countLinesConsumer
return $ line_count ^. lineCount
countLinesConsumer :: Consumer' String (StateT LineCount IO) ()
countLinesConsumer = forever $ await >>= count lang
count :: Language -> String -> Consumer' String (StateT LineCount IO) ()
count lang line
-- isEnd must come before isBegin, so that inline block comments function correctly.
-- Otherwise, there must be an "if isEnd" inside the "isBegin" body.
| isEndBlockComment lang line = inComment .= False
| isBeginBlockComment lang line = inComment .= True
| otherwise = unlessM (use inComment) $ lineCount %= (+1)
trimLeft :: String -> String
trimLeft = dropWhile isSpace
| mitchellwrosen/Sloch | src/sloch/LineCounter.hs | bsd-3-clause | 2,044 | 0 | 16 | 560 | 532 | 292 | 240 | 47 | 1 |
-- | Helpers for setting up a tls connection with @tls@ package,
-- for further customization, please refer to @tls@ package.
--
-- Note, functions in this module will throw error if can't load certificates or CA store.
--
module Data.TLSSetting
( -- * Choose a CAStore
TrustedCAStore(..)
-- * Make TLS settings
, makeClientParams
, makeClientParams'
, makeServerParams
, makeServerParams'
-- * Internal
, mozillaCAStorePath
) where
import qualified Data.ByteString as B
import Data.Default.Class (def)
import qualified Data.PEM as X509
import qualified Data.X509 as X509
import qualified Data.X509.CertificateStore as X509
import qualified Network.TLS as TLS
import qualified Network.TLS.Extra as TLS
import Paths_tcp_streams (getDataFileName)
import qualified System.X509 as X509
-- | The whole point of TLS is that: a peer should have already trusted
-- some certificates, which can be used for validating other peer's certificates.
-- if the certificates sent by other side form a chain. and one of them is issued
-- by one of 'TrustedCAStore', Then the peer will be trusted.
--
data TrustedCAStore
= SystemCAStore -- ^ provided by your operating system.
| MozillaCAStore -- ^ provided by <https://curl.haxx.se/docs/caextract.html Mozilla>.
| CustomCAStore FilePath -- ^ provided by your self, the CA file can contain multiple certificates.
deriving (Show, Eq)
-- | Get the built-in mozilla CA's path.
mozillaCAStorePath :: IO FilePath
mozillaCAStorePath = getDataFileName "mozillaCAStore.pem"
makeCAStore :: TrustedCAStore -> IO X509.CertificateStore
makeCAStore SystemCAStore = X509.getSystemCertificateStore
makeCAStore MozillaCAStore = makeCAStore . CustomCAStore =<< mozillaCAStorePath
makeCAStore (CustomCAStore fp) = do
bs <- B.readFile fp
let Right pems = X509.pemParseBS bs
case mapM (X509.decodeSignedCertificate . X509.pemContent) pems of
Right cas -> return (X509.makeCertificateStore cas)
Left err -> error err
-- | make a simple tls 'TLS.ClientParams' that will validate server and use tls connection
-- without providing client's own certificate. suitable for connecting server which don't
-- validate clients.
--
-- we defer setting of 'TLS.clientServerIdentification' to connecting phase.
--
-- Note, tls's default validating method require server has v3 certificate.
-- you can use openssl's V3 extension to issue such a certificate. or change 'TLS.ClientParams'
-- before connecting.
--
makeClientParams :: TrustedCAStore -- ^ trusted certificates.
-> IO TLS.ClientParams
makeClientParams tca = do
caStore <- makeCAStore tca
return (TLS.defaultParamsClient "" B.empty)
{ TLS.clientSupported = def { TLS.supportedCiphers = TLS.ciphersuite_all }
, TLS.clientShared = def
{ TLS.sharedCAStore = caStore
, TLS.sharedValidationCache = def
}
}
-- | make a simple tls 'TLS.ClientParams' that will validate server and use tls connection
-- while providing client's own certificate as well. suitable for connecting server which
-- validate clients.
--
-- Also only accept v3 certificate.
--
makeClientParams' :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificates (X.509 format).
-- the root of your certificate chain should be
-- already trusted by server, or tls will fail.
-> FilePath -- ^ private key associated.
-> TrustedCAStore -- ^ trusted certificates.
-> IO TLS.ClientParams
makeClientParams' pub certs priv tca = do
p <- makeClientParams tca
c <- TLS.credentialLoadX509Chain pub certs priv
case c of
Right c' ->
return p
{ TLS.clientShared = (TLS.clientShared p)
{
TLS.sharedCredentials = TLS.Credentials [c']
}
}
Left err -> error err
-- | make a simple tls 'TLS.ServerParams' without validating client's certificate.
--
makeServerParams :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificates (X.509 format).
-- the root of your certificate chain should be
-- already trusted by client, or tls will fail.
-> FilePath -- ^ private key associated.
-> IO TLS.ServerParams
makeServerParams pub certs priv = do
c <- TLS.credentialLoadX509Chain pub certs priv
case c of
Right c'@(X509.CertificateChain c'', _) ->
return def
{ TLS.serverCACertificates = c''
, TLS.serverShared = def
{
TLS.sharedCredentials = TLS.Credentials [c']
}
, TLS.serverSupported = def { TLS.supportedCiphers = TLS.ciphersuite_strong }
}
Left err -> error err
-- | make a tls 'TLS.ServerParams' that also validating client's certificate.
--
makeServerParams' :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificates (X.509 format).
-> FilePath -- ^ private key associated.
-> TrustedCAStore -- ^ server will use these certificates to validate clients.
-> IO TLS.ServerParams
makeServerParams' pub certs priv tca = do
caStore <- makeCAStore tca
p <- makeServerParams pub certs priv
return p
{ TLS.serverWantClientCert = True
, TLS.serverShared = (TLS.serverShared p)
{ TLS.sharedCAStore = caStore
}
}
| didi-FP/tcp-streams | Data/TLSSetting.hs | bsd-3-clause | 6,056 | 0 | 16 | 1,872 | 838 | 469 | 369 | 82 | 2 |
-- Copyright 2013 Kevin Backhouse.
module TestMonoid2 ( instanceTest ) where
import Control.Monad.ST2
import Control.Monad.MultiPass
import Control.Monad.MultiPass.Instrument.Monoid2
import Control.Monad.MultiPass.Utils.InstanceTest
import Data.Monoid
-- This test checks that all the necessary instances have been
-- defined. Its only purpose is to check that there are no compile
-- errors, so it does not need to be executed.
instanceTest :: ST2 r w ()
instanceTest =
do instanceTest1
instanceTest2
instanceTest1 :: ST2 r w ()
instanceTest1 = run instanceTestBody1
instanceTestBody1 :: TestInstrument2 (Monoid2 Any r w) r w
instanceTestBody1 = testInstrument2
instanceTest2 :: ST2 r w ()
instanceTest2 = run instanceTestBody2
instanceTestBody2 :: TestInstrument2 (Monoid2 All r w) r w
instanceTestBody2 = testInstrument2
| kevinbackhouse/Control-Monad-MultiPass | tests/TestMonoid2.hs | bsd-3-clause | 838 | 0 | 7 | 124 | 169 | 95 | 74 | 18 | 1 |
{-# LANGUAGE CPP #-}
module TcSimplify(
simplifyInfer, InferMode(..),
growThetaTyVars,
simplifyAmbiguityCheck,
simplifyDefault,
simplifyTop, simplifyTopImplic,
simplifyInteractive,
solveEqualities, solveLocalEqualities, solveLocalEqualitiesX,
simplifyWantedsTcM,
tcCheckSatisfiability,
tcNormalise,
captureTopConstraints,
simpl_top,
promoteTyVar,
promoteTyVarSet,
-- For Rules we need these
solveWanteds, solveWantedsAndDrop,
approximateWC, runTcSDeriveds
) where
#include "HsVersions.h"
import GhcPrelude
import Bag
import Class ( Class, classKey, classTyCon )
import DynFlags
import Id ( idType, mkLocalId )
import Inst
import ListSetOps
import Name
import Outputable
import PrelInfo
import PrelNames
import TcErrors
import TcEvidence
import TcInteract
import TcCanonical ( makeSuperClasses, solveCallStack )
import TcMType as TcM
import TcRnMonad as TcM
import TcSMonad as TcS
import Constraint
import Predicate
import TcOrigin
import TcType
import Type
import TysWiredIn ( liftedRepTy )
import Unify ( tcMatchTyKi )
import Util
import Var
import VarSet
import UniqSet
import BasicTypes ( IntWithInf, intGtLimit )
import ErrUtils ( emptyMessages )
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Data.Foldable ( toList )
import Data.List ( partition )
import Data.List.NonEmpty ( NonEmpty(..) )
import Maybes ( isJust )
{-
*********************************************************************************
* *
* External interface *
* *
*********************************************************************************
-}
captureTopConstraints :: TcM a -> TcM (a, WantedConstraints)
-- (captureTopConstraints m) runs m, and returns the type constraints it
-- generates plus the constraints produced by static forms inside.
-- If it fails with an exception, it reports any insolubles
-- (out of scope variables) before doing so
--
-- captureTopConstraints is used exclusively by TcRnDriver at the top
-- level of a module.
--
-- Importantly, if captureTopConstraints propagates an exception, it
-- reports any insoluble constraints first, lest they be lost
-- altogether. This is important, because solveLocalEqualities (maybe
-- other things too) throws an exception without adding any error
-- messages; it just puts the unsolved constraints back into the
-- monad. See TcRnMonad Note [Constraints and errors]
-- #16376 is an example of what goes wrong if you don't do this.
--
-- NB: the caller should bring any environments into scope before
-- calling this, so that the reportUnsolved has access to the most
-- complete GlobalRdrEnv
captureTopConstraints thing_inside
= do { static_wc_var <- TcM.newTcRef emptyWC ;
; (mb_res, lie) <- TcM.updGblEnv (\env -> env { tcg_static_wc = static_wc_var } ) $
TcM.tryCaptureConstraints thing_inside
; stWC <- TcM.readTcRef static_wc_var
-- See TcRnMonad Note [Constraints and errors]
-- If the thing_inside threw an exception, but generated some insoluble
-- constraints, report the latter before propagating the exception
-- Otherwise they will be lost altogether
; case mb_res of
Just res -> return (res, lie `andWC` stWC)
Nothing -> do { _ <- simplifyTop lie; failM } }
-- This call to simplifyTop is the reason
-- this function is here instead of TcRnMonad
-- We call simplifyTop so that it does defaulting
-- (esp of runtime-reps) before reporting errors
simplifyTopImplic :: Bag Implication -> TcM ()
simplifyTopImplic implics
= do { empty_binds <- simplifyTop (mkImplicWC implics)
-- Since all the inputs are implications the returned bindings will be empty
; MASSERT2( isEmptyBag empty_binds, ppr empty_binds )
; return () }
simplifyTop :: WantedConstraints -> TcM (Bag EvBind)
-- Simplify top-level constraints
-- Usually these will be implications,
-- but when there is nothing to quantify we don't wrap
-- in a degenerate implication, so we do that here instead
simplifyTop wanteds
= do { traceTc "simplifyTop {" $ text "wanted = " <+> ppr wanteds
; ((final_wc, unsafe_ol), binds1) <- runTcS $
do { final_wc <- simpl_top wanteds
; unsafe_ol <- getSafeOverlapFailures
; return (final_wc, unsafe_ol) }
; traceTc "End simplifyTop }" empty
; binds2 <- reportUnsolved final_wc
; traceTc "reportUnsolved (unsafe overlapping) {" empty
; unless (isEmptyCts unsafe_ol) $ do {
-- grab current error messages and clear, warnAllUnsolved will
-- update error messages which we'll grab and then restore saved
-- messages.
; errs_var <- getErrsVar
; saved_msg <- TcM.readTcRef errs_var
; TcM.writeTcRef errs_var emptyMessages
; warnAllUnsolved $ WC { wc_simple = unsafe_ol
, wc_impl = emptyBag }
; whyUnsafe <- fst <$> TcM.readTcRef errs_var
; TcM.writeTcRef errs_var saved_msg
; recordUnsafeInfer whyUnsafe
}
; traceTc "reportUnsolved (unsafe overlapping) }" empty
; return (evBindMapBinds binds1 `unionBags` binds2) }
-- | Type-check a thing that emits only equality constraints, solving any
-- constraints we can and re-emitting constraints that we can't. The thing_inside
-- should generally bump the TcLevel to make sure that this run of the solver
-- doesn't affect anything lying around.
solveLocalEqualities :: String -> TcM a -> TcM a
solveLocalEqualities callsite thing_inside
= do { (wanted, res) <- solveLocalEqualitiesX callsite thing_inside
; emitConstraints wanted
-- See Note [Fail fast if there are insoluble kind equalities]
; when (insolubleWC wanted) $
failM
; return res }
{- Note [Fail fast if there are insoluble kind equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Rather like in simplifyInfer, fail fast if there is an insoluble
constraint. Otherwise we'll just succeed in kind-checking a nonsense
type, with a cascade of follow-up errors.
For example polykinds/T12593, T15577, and many others.
Take care to ensure that you emit the insoluble constraints before
failing, because they are what will ultimately lead to the error
messsage!
-}
solveLocalEqualitiesX :: String -> TcM a -> TcM (WantedConstraints, a)
solveLocalEqualitiesX callsite thing_inside
= do { traceTc "solveLocalEqualitiesX {" (vcat [ text "Called from" <+> text callsite ])
; (result, wanted) <- captureConstraints thing_inside
; traceTc "solveLocalEqualities: running solver" (ppr wanted)
; residual_wanted <- runTcSEqualities (solveWanteds wanted)
; traceTc "solveLocalEqualitiesX end }" $
text "residual_wanted =" <+> ppr residual_wanted
; return (residual_wanted, result) }
-- | Type-check a thing that emits only equality constraints, then
-- solve those constraints. Fails outright if there is trouble.
-- Use this if you're not going to get another crack at solving
-- (because, e.g., you're checking a datatype declaration)
solveEqualities :: TcM a -> TcM a
solveEqualities thing_inside
= checkNoErrs $ -- See Note [Fail fast on kind errors]
do { lvl <- TcM.getTcLevel
; traceTc "solveEqualities {" (text "level =" <+> ppr lvl)
; (result, wanted) <- captureConstraints thing_inside
; traceTc "solveEqualities: running solver" $ text "wanted = " <+> ppr wanted
; final_wc <- runTcSEqualities $ simpl_top wanted
-- NB: Use simpl_top here so that we potentially default RuntimeRep
-- vars to LiftedRep. This is needed to avoid #14991.
; traceTc "End solveEqualities }" empty
; reportAllUnsolved final_wc
; return result }
-- | Simplify top-level constraints, but without reporting any unsolved
-- constraints nor unsafe overlapping.
simpl_top :: WantedConstraints -> TcS WantedConstraints
-- See Note [Top-level Defaulting Plan]
simpl_top wanteds
= do { wc_first_go <- nestTcS (solveWantedsAndDrop wanteds)
-- This is where the main work happens
; dflags <- getDynFlags
; try_tyvar_defaulting dflags wc_first_go }
where
try_tyvar_defaulting :: DynFlags -> WantedConstraints -> TcS WantedConstraints
try_tyvar_defaulting dflags wc
| isEmptyWC wc
= return wc
| insolubleWC wc
, gopt Opt_PrintExplicitRuntimeReps dflags -- See Note [Defaulting insolubles]
= try_class_defaulting wc
| otherwise
= do { free_tvs <- TcS.zonkTyCoVarsAndFVList (tyCoVarsOfWCList wc)
; let meta_tvs = filter (isTyVar <&&> isMetaTyVar) free_tvs
-- zonkTyCoVarsAndFV: the wc_first_go is not yet zonked
-- filter isMetaTyVar: we might have runtime-skolems in GHCi,
-- and we definitely don't want to try to assign to those!
-- The isTyVar is needed to weed out coercion variables
; defaulted <- mapM defaultTyVarTcS meta_tvs -- Has unification side effects
; if or defaulted
then do { wc_residual <- nestTcS (solveWanteds wc)
-- See Note [Must simplify after defaulting]
; try_class_defaulting wc_residual }
else try_class_defaulting wc } -- No defaulting took place
try_class_defaulting :: WantedConstraints -> TcS WantedConstraints
try_class_defaulting wc
| isEmptyWC wc || insolubleWC wc -- See Note [Defaulting insolubles]
= return wc
| otherwise -- See Note [When to do type-class defaulting]
= do { something_happened <- applyDefaultingRules wc
-- See Note [Top-level Defaulting Plan]
; if something_happened
then do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
; try_class_defaulting wc_residual }
-- See Note [Overview of implicit CallStacks] in TcEvidence
else try_callstack_defaulting wc }
try_callstack_defaulting :: WantedConstraints -> TcS WantedConstraints
try_callstack_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= defaultCallStacks wc
-- | Default any remaining @CallStack@ constraints to empty @CallStack@s.
defaultCallStacks :: WantedConstraints -> TcS WantedConstraints
-- See Note [Overview of implicit CallStacks] in TcEvidence
defaultCallStacks wanteds
= do simples <- handle_simples (wc_simple wanteds)
mb_implics <- mapBagM handle_implic (wc_impl wanteds)
return (wanteds { wc_simple = simples
, wc_impl = catBagMaybes mb_implics })
where
handle_simples simples
= catBagMaybes <$> mapBagM defaultCallStack simples
handle_implic :: Implication -> TcS (Maybe Implication)
-- The Maybe is because solving the CallStack constraint
-- may well allow us to discard the implication entirely
handle_implic implic
| isSolvedStatus (ic_status implic)
= return (Just implic)
| otherwise
= do { wanteds <- setEvBindsTcS (ic_binds implic) $
-- defaultCallStack sets a binding, so
-- we must set the correct binding group
defaultCallStacks (ic_wanted implic)
; setImplicationStatus (implic { ic_wanted = wanteds }) }
defaultCallStack ct
| ClassPred cls tys <- classifyPredType (ctPred ct)
, Just {} <- isCallStackPred cls tys
= do { solveCallStack (ctEvidence ct) EvCsEmpty
; return Nothing }
defaultCallStack ct
= return (Just ct)
{- Note [Fail fast on kind errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
solveEqualities is used to solve kind equalities when kind-checking
user-written types. If solving fails we should fail outright, rather
than just accumulate an error message, for two reasons:
* A kind-bogus type signature may cause a cascade of knock-on
errors if we let it pass
* More seriously, we don't have a convenient term-level place to add
deferred bindings for unsolved kind-equality constraints, so we
don't build evidence bindings (by usine reportAllUnsolved). That
means that we'll be left with with a type that has coercion holes
in it, something like
<type> |> co-hole
where co-hole is not filled in. Eeek! That un-filled-in
hole actually causes GHC to crash with "fvProv falls into a hole"
See #11563, #11520, #11516, #11399
So it's important to use 'checkNoErrs' here!
Note [When to do type-class defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC
was false, on the grounds that defaulting can't help solve insoluble
constraints. But if we *don't* do defaulting we may report a whole
lot of errors that would be solved by defaulting; these errors are
quite spurious because fixing the single insoluble error means that
defaulting happens again, which makes all the other errors go away.
This is jolly confusing: #9033.
So it seems better to always do type-class defaulting.
However, always doing defaulting does mean that we'll do it in
situations like this (#5934):
run :: (forall s. GenST s) -> Int
run = fromInteger 0
We don't unify the return type of fromInteger with the given function
type, because the latter involves foralls. So we're left with
(Num alpha, alpha ~ (forall s. GenST s) -> Int)
Now we do defaulting, get alpha := Integer, and report that we can't
match Integer with (forall s. GenST s) -> Int. That's not totally
stupid, but perhaps a little strange.
Another potential alternative would be to suppress *all* non-insoluble
errors if there are *any* insoluble errors, anywhere, but that seems
too drastic.
Note [Must simplify after defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may have a deeply buried constraint
(t:*) ~ (a:Open)
which we couldn't solve because of the kind incompatibility, and 'a' is free.
Then when we default 'a' we can solve the constraint. And we want to do
that before starting in on type classes. We MUST do it before reporting
errors, because it isn't an error! #7967 was due to this.
Note [Top-level Defaulting Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have considered two design choices for where/when to apply defaulting.
(i) Do it in SimplCheck mode only /whenever/ you try to solve some
simple constraints, maybe deep inside the context of implications.
This used to be the case in GHC 7.4.1.
(ii) Do it in a tight loop at simplifyTop, once all other constraints have
finished. This is the current story.
Option (i) had many disadvantages:
a) Firstly, it was deep inside the actual solver.
b) Secondly, it was dependent on the context (Infer a type signature,
or Check a type signature, or Interactive) since we did not want
to always start defaulting when inferring (though there is an exception to
this, see Note [Default while Inferring]).
c) It plainly did not work. Consider typecheck/should_compile/DfltProb2.hs:
f :: Int -> Bool
f x = const True (\y -> let w :: a -> a
w a = const a (y+1)
in w y)
We will get an implication constraint (for beta the type of y):
[untch=beta] forall a. 0 => Num beta
which we really cannot default /while solving/ the implication, since beta is
untouchable.
Instead our new defaulting story is to pull defaulting out of the solver loop and
go with option (ii), implemented at SimplifyTop. Namely:
- First, have a go at solving the residual constraint of the whole
program
- Try to approximate it with a simple constraint
- Figure out derived defaulting equations for that simple constraint
- Go round the loop again if you did manage to get some equations
Now, that has to do with class defaulting. However there exists type variable /kind/
defaulting. Again this is done at the top-level and the plan is:
- At the top-level, once you had a go at solving the constraint, do
figure out /all/ the touchable unification variables of the wanted constraints.
- Apply defaulting to their kinds
More details in Note [DefaultTyVar].
Note [Safe Haskell Overlapping Instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In Safe Haskell, we apply an extra restriction to overlapping instances. The
motive is to prevent untrusted code provided by a third-party, changing the
behavior of trusted code through type-classes. This is due to the global and
implicit nature of type-classes that can hide the source of the dictionary.
Another way to state this is: if a module M compiles without importing another
module N, changing M to import N shouldn't change the behavior of M.
Overlapping instances with type-classes can violate this principle. However,
overlapping instances aren't always unsafe. They are just unsafe when the most
selected dictionary comes from untrusted code (code compiled with -XSafe) and
overlaps instances provided by other modules.
In particular, in Safe Haskell at a call site with overlapping instances, we
apply the following rule to determine if it is a 'unsafe' overlap:
1) Most specific instance, I1, defined in an `-XSafe` compiled module.
2) I1 is an orphan instance or a MPTC.
3) At least one overlapped instance, Ix, is both:
A) from a different module than I1
B) Ix is not marked `OVERLAPPABLE`
This is a slightly involved heuristic, but captures the situation of an
imported module N changing the behavior of existing code. For example, if
condition (2) isn't violated, then the module author M must depend either on a
type-class or type defined in N.
Secondly, when should these heuristics be enforced? We enforced them when the
type-class method call site is in a module marked `-XSafe` or `-XTrustworthy`.
This allows `-XUnsafe` modules to operate without restriction, and for Safe
Haskell inferrence to infer modules with unsafe overlaps as unsafe.
One alternative design would be to also consider if an instance was imported as
a `safe` import or not and only apply the restriction to instances imported
safely. However, since instances are global and can be imported through more
than one path, this alternative doesn't work.
Note [Safe Haskell Overlapping Instances Implementation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
How is this implemented? It's complicated! So we'll step through it all:
1) `InstEnv.lookupInstEnv` -- Performs instance resolution, so this is where
we check if a particular type-class method call is safe or unsafe. We do this
through the return type, `ClsInstLookupResult`, where the last parameter is a
list of instances that are unsafe to overlap. When the method call is safe,
the list is null.
2) `TcInteract.matchClassInst` -- This module drives the instance resolution
/ dictionary generation. The return type is `ClsInstResult`, which either
says no instance matched, or one found, and if it was a safe or unsafe
overlap.
3) `TcInteract.doTopReactDict` -- Takes a dictionary / class constraint and
tries to resolve it by calling (in part) `matchClassInst`. The resolving
mechanism has a work list (of constraints) that it process one at a time. If
the constraint can't be resolved, it's added to an inert set. When compiling
an `-XSafe` or `-XTrustworthy` module, we follow this approach as we know
compilation should fail. These are handled as normal constraint resolution
failures from here-on (see step 6).
Otherwise, we may be inferring safety (or using `-Wunsafe`), and
compilation should succeed, but print warnings and/or mark the compiled module
as `-XUnsafe`. In this case, we call `insertSafeOverlapFailureTcS` which adds
the unsafe (but resolved!) constraint to the `inert_safehask` field of
`InertCans`.
4) `TcSimplify.simplifyTop`:
* Call simpl_top, the top-level function for driving the simplifier for
constraint resolution.
* Once finished, call `getSafeOverlapFailures` to retrieve the
list of overlapping instances that were successfully resolved,
but unsafe. Remember, this is only applicable for generating warnings
(`-Wunsafe`) or inferring a module unsafe. `-XSafe` and `-XTrustworthy`
cause compilation failure by not resolving the unsafe constraint at all.
* For unresolved constraints (all types), call `TcErrors.reportUnsolved`,
while for resolved but unsafe overlapping dictionary constraints, call
`TcErrors.warnAllUnsolved`. Both functions convert constraints into a
warning message for the user.
* In the case of `warnAllUnsolved` for resolved, but unsafe
dictionary constraints, we collect the generated warning
message (pop it) and call `TcRnMonad.recordUnsafeInfer` to
mark the module we are compiling as unsafe, passing the
warning message along as the reason.
5) `TcErrors.*Unsolved` -- Generates error messages for constraints by
actually calling `InstEnv.lookupInstEnv` again! Yes, confusing, but all we
know is the constraint that is unresolved or unsafe. For dictionary, all we
know is that we need a dictionary of type C, but not what instances are
available and how they overlap. So we once again call `lookupInstEnv` to
figure that out so we can generate a helpful error message.
6) `TcRnMonad.recordUnsafeInfer` -- Save the unsafe result and reason in an
IORef called `tcg_safeInfer`.
7) `HscMain.tcRnModule'` -- Reads `tcg_safeInfer` after type-checking, calling
`HscMain.markUnsafeInfer` (passing the reason along) when safe-inferrence
failed.
Note [No defaulting in the ambiguity check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When simplifying constraints for the ambiguity check, we use
solveWantedsAndDrop, not simpl_top, so that we do no defaulting.
#11947 was an example:
f :: Num a => Int -> Int
This is ambiguous of course, but we don't want to default the
(Num alpha) constraint to (Num Int)! Doing so gives a defaulting
warning, but no error.
Note [Defaulting insolubles]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a set of wanteds is insoluble, we have no hope of accepting the
program. Yet we do not stop constraint solving, etc., because we may
simplify the wanteds to produce better error messages. So, once
we have an insoluble constraint, everything we do is just about producing
helpful error messages.
Should we default in this case or not? Let's look at an example (tcfail004):
(f,g) = (1,2,3)
With defaulting, we get a conflict between (a0,b0) and (Integer,Integer,Integer).
Without defaulting, we get a conflict between (a0,b0) and (a1,b1,c1). I (Richard)
find the latter more helpful. Several other test cases (e.g. tcfail005) suggest
similarly. So: we should not do class defaulting with insolubles.
On the other hand, RuntimeRep-defaulting is different. Witness tcfail078:
f :: Integer i => i
f = 0
Without RuntimeRep-defaulting, we GHC suggests that Integer should have kind
TYPE r0 -> Constraint and then complains that r0 is actually untouchable
(presumably, because it can't be sure if `Integer i` entails an equality).
If we default, we are told of a clash between (* -> Constraint) and Constraint.
The latter seems far better, suggesting we *should* do RuntimeRep-defaulting
even on insolubles.
But, evidently, not always. Witness UnliftedNewtypesInfinite:
newtype Foo = FooC (# Int#, Foo #)
This should fail with an occurs-check error on the kind of Foo (with -XUnliftedNewtypes).
If we default RuntimeRep-vars, we get
Expecting a lifted type, but ‘(# Int#, Foo #)’ is unlifted
which is just plain wrong.
Conclusion: we should do RuntimeRep-defaulting on insolubles only when the user does not
want to hear about RuntimeRep stuff -- that is, when -fprint-explicit-runtime-reps
is not set.
-}
------------------
simplifyAmbiguityCheck :: Type -> WantedConstraints -> TcM ()
simplifyAmbiguityCheck ty wanteds
= do { traceTc "simplifyAmbiguityCheck {" (text "type = " <+> ppr ty $$ text "wanted = " <+> ppr wanteds)
; (final_wc, _) <- runTcS $ solveWantedsAndDrop wanteds
-- NB: no defaulting! See Note [No defaulting in the ambiguity check]
; traceTc "End simplifyAmbiguityCheck }" empty
-- Normally report all errors; but with -XAllowAmbiguousTypes
-- report only insoluble ones, since they represent genuinely
-- inaccessible code
; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes
; traceTc "reportUnsolved(ambig) {" empty
; unless (allow_ambiguous && not (insolubleWC final_wc))
(discardResult (reportUnsolved final_wc))
; traceTc "reportUnsolved(ambig) }" empty
; return () }
------------------
simplifyInteractive :: WantedConstraints -> TcM (Bag EvBind)
simplifyInteractive wanteds
= traceTc "simplifyInteractive" empty >>
simplifyTop wanteds
------------------
simplifyDefault :: ThetaType -- Wanted; has no type variables in it
-> TcM () -- Succeeds if the constraint is soluble
simplifyDefault theta
= do { traceTc "simplifyDefault" empty
; wanteds <- newWanteds DefaultOrigin theta
; unsolved <- runTcSDeriveds (solveWantedsAndDrop (mkSimpleWC wanteds))
; reportAllUnsolved unsolved
; return () }
------------------
tcCheckSatisfiability :: Bag EvVar -> TcM Bool
-- Return True if satisfiable, False if definitely contradictory
tcCheckSatisfiability given_ids
= do { lcl_env <- TcM.getLclEnv
; let given_loc = mkGivenLoc topTcLevel UnkSkol lcl_env
; (res, _ev_binds) <- runTcS $
do { traceTcS "checkSatisfiability {" (ppr given_ids)
; let given_cts = mkGivens given_loc (bagToList given_ids)
-- See Note [Superclasses and satisfiability]
; solveSimpleGivens given_cts
; insols <- getInertInsols
; insols <- try_harder insols
; traceTcS "checkSatisfiability }" (ppr insols)
; return (isEmptyBag insols) }
; return res }
where
try_harder :: Cts -> TcS Cts
-- Maybe we have to search up the superclass chain to find
-- an unsatisfiable constraint. Example: pmcheck/T3927b.
-- At the moment we try just once
try_harder insols
| not (isEmptyBag insols) -- We've found that it's definitely unsatisfiable
= return insols -- Hurrah -- stop now.
| otherwise
= do { pending_given <- getPendingGivenScs
; new_given <- makeSuperClasses pending_given
; solveSimpleGivens new_given
; getInertInsols }
-- | Normalise a type as much as possible using the given constraints.
-- See @Note [tcNormalise]@.
tcNormalise :: Bag EvVar -> Type -> TcM Type
tcNormalise given_ids ty
= do { lcl_env <- TcM.getLclEnv
; let given_loc = mkGivenLoc topTcLevel UnkSkol lcl_env
; wanted_ct <- mk_wanted_ct
; (res, _ev_binds) <- runTcS $
do { traceTcS "tcNormalise {" (ppr given_ids)
; let given_cts = mkGivens given_loc (bagToList given_ids)
; solveSimpleGivens given_cts
; wcs <- solveSimpleWanteds (unitBag wanted_ct)
-- It's an invariant that this wc_simple will always be
-- a singleton Ct, since that's what we fed in as input.
; let ty' = case bagToList (wc_simple wcs) of
(ct:_) -> ctEvPred (ctEvidence ct)
cts -> pprPanic "tcNormalise" (ppr cts)
; traceTcS "tcNormalise }" (ppr ty')
; pure ty' }
; return res }
where
mk_wanted_ct :: TcM Ct
mk_wanted_ct = do
let occ = mkVarOcc "$tcNorm"
name <- newSysName occ
let ev = mkLocalId name ty
newHoleCt ExprHole ev ty
{- Note [Superclasses and satisfiability]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand superclasses before starting, because (Int ~ Bool), has
(Int ~~ Bool) as a superclass, which in turn has (Int ~N# Bool)
as a superclass, and it's the latter that is insoluble. See
Note [The equality types story] in TysPrim.
If we fail to prove unsatisfiability we (arbitrarily) try just once to
find superclasses, using try_harder. Reason: we might have a type
signature
f :: F op (Implements push) => ..
where F is a type function. This happened in #3972.
We could do more than once but we'd have to have /some/ limit: in the
the recursive case, we would go on forever in the common case where
the constraints /are/ satisfiable (#10592 comment:12!).
For stratightforard situations without type functions the try_harder
step does nothing.
Note [tcNormalise]
~~~~~~~~~~~~~~~~~~
tcNormalise is a rather atypical entrypoint to the constraint solver. Whereas
most invocations of the constraint solver are intended to simplify a set of
constraints or to decide if a particular set of constraints is satisfiable,
the purpose of tcNormalise is to take a type, plus some local constraints, and
normalise the type as much as possible with respect to those constraints.
It does *not* reduce type or data family applications or look through newtypes.
Why is this useful? As one example, when coverage-checking an EmptyCase
expression, it's possible that the type of the scrutinee will only reduce
if some local equalities are solved for. See "Wrinkle: Local equalities"
in Note [Type normalisation] in Check.
To accomplish its stated goal, tcNormalise first feeds the local constraints
into solveSimpleGivens, then stuffs the argument type in a CHoleCan, and feeds
that singleton Ct into solveSimpleWanteds, which reduces the type in the
CHoleCan as much as possible with respect to the local given constraints. When
solveSimpleWanteds is finished, we dig out the type from the CHoleCan and
return that.
***********************************************************************************
* *
* Inference
* *
***********************************************************************************
Note [Inferring the type of a let-bound variable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = rhs
To infer f's type we do the following:
* Gather the constraints for the RHS with ambient level *one more than*
the current one. This is done by the call
pushLevelAndCaptureConstraints (tcMonoBinds...)
in TcBinds.tcPolyInfer
* Call simplifyInfer to simplify the constraints and decide what to
quantify over. We pass in the level used for the RHS constraints,
here called rhs_tclvl.
This ensures that the implication constraint we generate, if any,
has a strictly-increased level compared to the ambient level outside
the let binding.
-}
-- | How should we choose which constraints to quantify over?
data InferMode = ApplyMR -- ^ Apply the monomorphism restriction,
-- never quantifying over any constraints
| EagerDefaulting -- ^ See Note [TcRnExprMode] in TcRnDriver,
-- the :type +d case; this mode refuses
-- to quantify over any defaultable constraint
| NoRestrictions -- ^ Quantify over any constraint that
-- satisfies TcType.pickQuantifiablePreds
instance Outputable InferMode where
ppr ApplyMR = text "ApplyMR"
ppr EagerDefaulting = text "EagerDefaulting"
ppr NoRestrictions = text "NoRestrictions"
simplifyInfer :: TcLevel -- Used when generating the constraints
-> InferMode
-> [TcIdSigInst] -- Any signatures (possibly partial)
-> [(Name, TcTauType)] -- Variables to be generalised,
-- and their tau-types
-> WantedConstraints
-> TcM ([TcTyVar], -- Quantify over these type variables
[EvVar], -- ... and these constraints (fully zonked)
TcEvBinds, -- ... binding these evidence variables
WantedConstraints, -- Redidual as-yet-unsolved constraints
Bool) -- True <=> the residual constraints are insoluble
simplifyInfer rhs_tclvl infer_mode sigs name_taus wanteds
| isEmptyWC wanteds
= do { -- When quantifying, we want to preserve any order of variables as they
-- appear in partial signatures. cf. decideQuantifiedTyVars
let psig_tv_tys = [ mkTyVarTy tv | sig <- partial_sigs
, (_,tv) <- sig_inst_skols sig ]
psig_theta = [ pred | sig <- partial_sigs
, pred <- sig_inst_theta sig ]
; dep_vars <- candidateQTyVarsOfTypes (psig_tv_tys ++ psig_theta ++ map snd name_taus)
; qtkvs <- quantifyTyVars dep_vars
; traceTc "simplifyInfer: empty WC" (ppr name_taus $$ ppr qtkvs)
; return (qtkvs, [], emptyTcEvBinds, emptyWC, False) }
| otherwise
= do { traceTc "simplifyInfer {" $ vcat
[ text "sigs =" <+> ppr sigs
, text "binds =" <+> ppr name_taus
, text "rhs_tclvl =" <+> ppr rhs_tclvl
, text "infer_mode =" <+> ppr infer_mode
, text "(unzonked) wanted =" <+> ppr wanteds
]
; let psig_theta = concatMap sig_inst_theta partial_sigs
-- First do full-blown solving
-- NB: we must gather up all the bindings from doing
-- this solving; hence (runTcSWithEvBinds ev_binds_var).
-- And note that since there are nested implications,
-- calling solveWanteds will side-effect their evidence
-- bindings, so we can't just revert to the input
-- constraint.
; tc_env <- TcM.getEnv
; ev_binds_var <- TcM.newTcEvBinds
; psig_theta_vars <- mapM TcM.newEvVar psig_theta
; wanted_transformed_incl_derivs
<- setTcLevel rhs_tclvl $
runTcSWithEvBinds ev_binds_var $
do { let loc = mkGivenLoc rhs_tclvl UnkSkol $
env_lcl tc_env
psig_givens = mkGivens loc psig_theta_vars
; _ <- solveSimpleGivens psig_givens
-- See Note [Add signature contexts as givens]
; solveWanteds wanteds }
-- Find quant_pred_candidates, the predicates that
-- we'll consider quantifying over
-- NB1: wanted_transformed does not include anything provable from
-- the psig_theta; it's just the extra bit
-- NB2: We do not do any defaulting when inferring a type, this can lead
-- to less polymorphic types, see Note [Default while Inferring]
; wanted_transformed_incl_derivs <- TcM.zonkWC wanted_transformed_incl_derivs
; let definite_error = insolubleWC wanted_transformed_incl_derivs
-- See Note [Quantification with errors]
-- NB: must include derived errors in this test,
-- hence "incl_derivs"
wanted_transformed = dropDerivedWC wanted_transformed_incl_derivs
quant_pred_candidates
| definite_error = []
| otherwise = ctsPreds (approximateWC False wanted_transformed)
-- Decide what type variables and constraints to quantify
-- NB: quant_pred_candidates is already fully zonked
-- NB: bound_theta are constraints we want to quantify over,
-- including the psig_theta, which we always quantify over
-- NB: bound_theta are fully zonked
; (qtvs, bound_theta, co_vars) <- decideQuantification infer_mode rhs_tclvl
name_taus partial_sigs
quant_pred_candidates
; bound_theta_vars <- mapM TcM.newEvVar bound_theta
-- We must produce bindings for the psig_theta_vars, because we may have
-- used them in evidence bindings constructed by solveWanteds earlier
-- Easiest way to do this is to emit them as new Wanteds (#14643)
; ct_loc <- getCtLocM AnnOrigin Nothing
; let psig_wanted = [ CtWanted { ctev_pred = idType psig_theta_var
, ctev_dest = EvVarDest psig_theta_var
, ctev_nosh = WDeriv
, ctev_loc = ct_loc }
| psig_theta_var <- psig_theta_vars ]
-- Now construct the residual constraint
; residual_wanted <- mkResidualConstraints rhs_tclvl ev_binds_var
name_taus co_vars qtvs bound_theta_vars
(wanted_transformed `andWC` mkSimpleWC psig_wanted)
-- All done!
; traceTc "} simplifyInfer/produced residual implication for quantification" $
vcat [ text "quant_pred_candidates =" <+> ppr quant_pred_candidates
, text "psig_theta =" <+> ppr psig_theta
, text "bound_theta =" <+> ppr bound_theta
, text "qtvs =" <+> ppr qtvs
, text "definite_error =" <+> ppr definite_error ]
; return ( qtvs, bound_theta_vars, TcEvBinds ev_binds_var
, residual_wanted, definite_error ) }
-- NB: bound_theta_vars must be fully zonked
where
partial_sigs = filter isPartialSig sigs
--------------------
mkResidualConstraints :: TcLevel -> EvBindsVar
-> [(Name, TcTauType)]
-> VarSet -> [TcTyVar] -> [EvVar]
-> WantedConstraints -> TcM WantedConstraints
-- Emit the remaining constraints from the RHS.
-- See Note [Emitting the residual implication in simplifyInfer]
mkResidualConstraints rhs_tclvl ev_binds_var
name_taus co_vars qtvs full_theta_vars wanteds
| isEmptyWC wanteds
= return wanteds
| otherwise
= do { wanted_simple <- TcM.zonkSimples (wc_simple wanteds)
; let (outer_simple, inner_simple) = partitionBag is_mono wanted_simple
is_mono ct = isWantedCt ct && ctEvId ct `elemVarSet` co_vars
; _ <- promoteTyVarSet (tyCoVarsOfCts outer_simple)
; let inner_wanted = wanteds { wc_simple = inner_simple }
; implics <- if isEmptyWC inner_wanted
then return emptyBag
else do implic1 <- newImplication
return $ unitBag $
implic1 { ic_tclvl = rhs_tclvl
, ic_skols = qtvs
, ic_telescope = Nothing
, ic_given = full_theta_vars
, ic_wanted = inner_wanted
, ic_binds = ev_binds_var
, ic_no_eqs = False
, ic_info = skol_info }
; return (WC { wc_simple = outer_simple
, wc_impl = implics })}
where
full_theta = map idType full_theta_vars
skol_info = InferSkol [ (name, mkSigmaTy [] full_theta ty)
| (name, ty) <- name_taus ]
-- Don't add the quantified variables here, because
-- they are also bound in ic_skols and we want them
-- to be tidied uniformly
--------------------
ctsPreds :: Cts -> [PredType]
ctsPreds cts = [ ctEvPred ev | ct <- bagToList cts
, let ev = ctEvidence ct ]
{- Note [Emitting the residual implication in simplifyInfer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f = e
where f's type is inferred to be something like (a, Proxy k (Int |> co))
and we have an as-yet-unsolved, or perhaps insoluble, constraint
[W] co :: Type ~ k
We can't form types like (forall co. blah), so we can't generalise over
the coercion variable, and hence we can't generalise over things free in
its kind, in the case 'k'. But we can still generalise over 'a'. So
we'll generalise to
f :: forall a. (a, Proxy k (Int |> co))
Now we do NOT want to form the residual implication constraint
forall a. [W] co :: Type ~ k
because then co's eventual binding (which will be a value binding if we
use -fdefer-type-errors) won't scope over the entire binding for 'f' (whose
type mentions 'co'). Instead, just as we don't generalise over 'co', we
should not bury its constraint inside the implication. Instead, we must
put it outside.
That is the reason for the partitionBag in emitResidualConstraints,
which takes the CoVars free in the inferred type, and pulls their
constraints out. (NB: this set of CoVars should be closed-over-kinds.)
All rather subtle; see #14584.
Note [Add signature contexts as givens]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (#11016):
f2 :: (?x :: Int) => _
f2 = ?x
or this
f3 :: a ~ Bool => (a, _)
f3 = (True, False)
or theis
f4 :: (Ord a, _) => a -> Bool
f4 x = x==x
We'll use plan InferGen because there are holes in the type. But:
* For f2 we want to have the (?x :: Int) constraint floating around
so that the functional dependencies kick in. Otherwise the
occurrence of ?x on the RHS produces constraint (?x :: alpha), and
we won't unify alpha:=Int.
* For f3 we want the (a ~ Bool) available to solve the wanted (a ~ Bool)
in the RHS
* For f4 we want to use the (Ord a) in the signature to solve the Eq a
constraint.
Solution: in simplifyInfer, just before simplifying the constraints
gathered from the RHS, add Given constraints for the context of any
type signatures.
************************************************************************
* *
Quantification
* *
************************************************************************
Note [Deciding quantification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the monomorphism restriction does not apply, then we quantify as follows:
* Step 1. Take the global tyvars, and "grow" them using the equality
constraints
E.g. if x:alpha is in the environment, and alpha ~ [beta] (which can
happen because alpha is untouchable here) then do not quantify over
beta, because alpha fixes beta, and beta is effectively free in
the environment too
We also account for the monomorphism restriction; if it applies,
add the free vars of all the constraints.
Result is mono_tvs; we will not quantify over these.
* Step 2. Default any non-mono tyvars (i.e ones that are definitely
not going to become further constrained), and re-simplify the
candidate constraints.
Motivation for re-simplification (#7857): imagine we have a
constraint (C (a->b)), where 'a :: TYPE l1' and 'b :: TYPE l2' are
not free in the envt, and instance forall (a::*) (b::*). (C a) => C
(a -> b) The instance doesn't match while l1,l2 are polymorphic, but
it will match when we default them to LiftedRep.
This is all very tiresome.
* Step 3: decide which variables to quantify over, as follows:
- Take the free vars of the tau-type (zonked_tau_tvs) and "grow"
them using all the constraints. These are tau_tvs_plus
- Use quantifyTyVars to quantify over (tau_tvs_plus - mono_tvs), being
careful to close over kinds, and to skolemise the quantified tyvars.
(This actually unifies each quantifies meta-tyvar with a fresh skolem.)
Result is qtvs.
* Step 4: Filter the constraints using pickQuantifiablePreds and the
qtvs. We have to zonk the constraints first, so they "see" the
freshly created skolems.
-}
decideQuantification
:: InferMode
-> TcLevel
-> [(Name, TcTauType)] -- Variables to be generalised
-> [TcIdSigInst] -- Partial type signatures (if any)
-> [PredType] -- Candidate theta; already zonked
-> TcM ( [TcTyVar] -- Quantify over these (skolems)
, [PredType] -- and this context (fully zonked)
, VarSet)
-- See Note [Deciding quantification]
decideQuantification infer_mode rhs_tclvl name_taus psigs candidates
= do { -- Step 1: find the mono_tvs
; (mono_tvs, candidates, co_vars) <- decideMonoTyVars infer_mode
name_taus psigs candidates
-- Step 2: default any non-mono tyvars, and re-simplify
-- This step may do some unification, but result candidates is zonked
; candidates <- defaultTyVarsAndSimplify rhs_tclvl mono_tvs candidates
-- Step 3: decide which kind/type variables to quantify over
; qtvs <- decideQuantifiedTyVars name_taus psigs candidates
-- Step 4: choose which of the remaining candidate
-- predicates to actually quantify over
-- NB: decideQuantifiedTyVars turned some meta tyvars
-- into quantified skolems, so we have to zonk again
; candidates <- TcM.zonkTcTypes candidates
; psig_theta <- TcM.zonkTcTypes (concatMap sig_inst_theta psigs)
; let quantifiable_candidates
= pickQuantifiablePreds (mkVarSet qtvs) candidates
-- NB: do /not/ run pickQuantifiablePreds over psig_theta,
-- because we always want to quantify over psig_theta, and not
-- drop any of them; e.g. CallStack constraints. c.f #14658
theta = mkMinimalBySCs id $ -- See Note [Minimize by Superclasses]
(psig_theta ++ quantifiable_candidates)
; traceTc "decideQuantification"
(vcat [ text "infer_mode:" <+> ppr infer_mode
, text "candidates:" <+> ppr candidates
, text "psig_theta:" <+> ppr psig_theta
, text "mono_tvs:" <+> ppr mono_tvs
, text "co_vars:" <+> ppr co_vars
, text "qtvs:" <+> ppr qtvs
, text "theta:" <+> ppr theta ])
; return (qtvs, theta, co_vars) }
------------------
decideMonoTyVars :: InferMode
-> [(Name,TcType)]
-> [TcIdSigInst]
-> [PredType]
-> TcM (TcTyCoVarSet, [PredType], CoVarSet)
-- Decide which tyvars and covars cannot be generalised:
-- (a) Free in the environment
-- (b) Mentioned in a constraint we can't generalise
-- (c) Connected by an equality to (a) or (b)
-- Also return CoVars that appear free in the final quantified types
-- we can't quantify over these, and we must make sure they are in scope
decideMonoTyVars infer_mode name_taus psigs candidates
= do { (no_quant, maybe_quant) <- pick infer_mode candidates
-- If possible, we quantify over partial-sig qtvs, so they are
-- not mono. Need to zonk them because they are meta-tyvar TyVarTvs
; psig_qtvs <- mapM zonkTcTyVarToTyVar $
concatMap (map snd . sig_inst_skols) psigs
; psig_theta <- mapM TcM.zonkTcType $
concatMap sig_inst_theta psigs
; taus <- mapM (TcM.zonkTcType . snd) name_taus
; tc_lvl <- TcM.getTcLevel
; let psig_tys = mkTyVarTys psig_qtvs ++ psig_theta
co_vars = coVarsOfTypes (psig_tys ++ taus)
co_var_tvs = closeOverKinds co_vars
-- The co_var_tvs are tvs mentioned in the types of covars or
-- coercion holes. We can't quantify over these covars, so we
-- must include the variable in their types in the mono_tvs.
-- E.g. If we can't quantify over co :: k~Type, then we can't
-- quantify over k either! Hence closeOverKinds
mono_tvs0 = filterVarSet (not . isQuantifiableTv tc_lvl) $
tyCoVarsOfTypes candidates
-- We need to grab all the non-quantifiable tyvars in the
-- candidates so that we can grow this set to find other
-- non-quantifiable tyvars. This can happen with something
-- like
-- f x y = ...
-- where z = x 3
-- The body of z tries to unify the type of x (call it alpha[1])
-- with (beta[2] -> gamma[2]). This unification fails because
-- alpha is untouchable. But we need to know not to quantify over
-- beta or gamma, because they are in the equality constraint with
-- alpha. Actual test case: typecheck/should_compile/tc213
mono_tvs1 = mono_tvs0 `unionVarSet` co_var_tvs
eq_constraints = filter isEqPrimPred candidates
mono_tvs2 = growThetaTyVars eq_constraints mono_tvs1
constrained_tvs = filterVarSet (isQuantifiableTv tc_lvl) $
(growThetaTyVars eq_constraints
(tyCoVarsOfTypes no_quant)
`minusVarSet` mono_tvs2)
`delVarSetList` psig_qtvs
-- constrained_tvs: the tyvars that we are not going to
-- quantify solely because of the monomorphism restriction
--
-- (`minusVarSet` mono_tvs2`): a type variable is only
-- "constrained" (so that the MR bites) if it is not
-- free in the environment (#13785)
--
-- (`delVarSetList` psig_qtvs): if the user has explicitly
-- asked for quantification, then that request "wins"
-- over the MR. Note: do /not/ delete psig_qtvs from
-- mono_tvs1, because mono_tvs1 cannot under any circumstances
-- be quantified (#14479); see
-- Note [Quantification and partial signatures], Wrinkle 3, 4
mono_tvs = mono_tvs2 `unionVarSet` constrained_tvs
-- Warn about the monomorphism restriction
; warn_mono <- woptM Opt_WarnMonomorphism
; when (case infer_mode of { ApplyMR -> warn_mono; _ -> False}) $
warnTc (Reason Opt_WarnMonomorphism)
(constrained_tvs `intersectsVarSet` tyCoVarsOfTypes taus)
mr_msg
; traceTc "decideMonoTyVars" $ vcat
[ text "mono_tvs0 =" <+> ppr mono_tvs0
, text "no_quant =" <+> ppr no_quant
, text "maybe_quant =" <+> ppr maybe_quant
, text "eq_constraints =" <+> ppr eq_constraints
, text "mono_tvs =" <+> ppr mono_tvs
, text "co_vars =" <+> ppr co_vars ]
; return (mono_tvs, maybe_quant, co_vars) }
where
pick :: InferMode -> [PredType] -> TcM ([PredType], [PredType])
-- Split the candidates into ones we definitely
-- won't quantify, and ones that we might
pick NoRestrictions cand = return ([], cand)
pick ApplyMR cand = return (cand, [])
pick EagerDefaulting cand = do { os <- xoptM LangExt.OverloadedStrings
; return (partition (is_int_ct os) cand) }
-- For EagerDefaulting, do not quantify over
-- over any interactive class constraint
is_int_ct ovl_strings pred
| Just (cls, _) <- getClassPredTys_maybe pred
= isInteractiveClass ovl_strings cls
| otherwise
= False
pp_bndrs = pprWithCommas (quotes . ppr . fst) name_taus
mr_msg =
hang (sep [ text "The Monomorphism Restriction applies to the binding"
<> plural name_taus
, text "for" <+> pp_bndrs ])
2 (hsep [ text "Consider giving"
, text (if isSingleton name_taus then "it" else "them")
, text "a type signature"])
-------------------
defaultTyVarsAndSimplify :: TcLevel
-> TyCoVarSet
-> [PredType] -- Assumed zonked
-> TcM [PredType] -- Guaranteed zonked
-- Default any tyvar free in the constraints,
-- and re-simplify in case the defaulting allows further simplification
defaultTyVarsAndSimplify rhs_tclvl mono_tvs candidates
= do { -- Promote any tyvars that we cannot generalise
-- See Note [Promote momomorphic tyvars]
; traceTc "decideMonoTyVars: promotion:" (ppr mono_tvs)
; (prom, _) <- promoteTyVarSet mono_tvs
-- Default any kind/levity vars
; DV {dv_kvs = cand_kvs, dv_tvs = cand_tvs}
<- candidateQTyVarsOfTypes candidates
-- any covars should already be handled by
-- the logic in decideMonoTyVars, which looks at
-- the constraints generated
; poly_kinds <- xoptM LangExt.PolyKinds
; default_kvs <- mapM (default_one poly_kinds True)
(dVarSetElems cand_kvs)
; default_tvs <- mapM (default_one poly_kinds False)
(dVarSetElems (cand_tvs `minusDVarSet` cand_kvs))
; let some_default = or default_kvs || or default_tvs
; case () of
_ | some_default -> simplify_cand candidates
| prom -> mapM TcM.zonkTcType candidates
| otherwise -> return candidates
}
where
default_one poly_kinds is_kind_var tv
| not (isMetaTyVar tv)
= return False
| tv `elemVarSet` mono_tvs
= return False
| otherwise
= defaultTyVar (not poly_kinds && is_kind_var) tv
simplify_cand candidates
= do { clone_wanteds <- newWanteds DefaultOrigin candidates
; WC { wc_simple = simples } <- setTcLevel rhs_tclvl $
simplifyWantedsTcM clone_wanteds
-- Discard evidence; simples is fully zonked
; let new_candidates = ctsPreds simples
; traceTc "Simplified after defaulting" $
vcat [ text "Before:" <+> ppr candidates
, text "After:" <+> ppr new_candidates ]
; return new_candidates }
------------------
decideQuantifiedTyVars
:: [(Name,TcType)] -- Annotated theta and (name,tau) pairs
-> [TcIdSigInst] -- Partial signatures
-> [PredType] -- Candidates, zonked
-> TcM [TyVar]
-- Fix what tyvars we are going to quantify over, and quantify them
decideQuantifiedTyVars name_taus psigs candidates
= do { -- Why psig_tys? We try to quantify over everything free in here
-- See Note [Quantification and partial signatures]
-- Wrinkles 2 and 3
; psig_tv_tys <- mapM TcM.zonkTcTyVar [ tv | sig <- psigs
, (_,tv) <- sig_inst_skols sig ]
; psig_theta <- mapM TcM.zonkTcType [ pred | sig <- psigs
, pred <- sig_inst_theta sig ]
; tau_tys <- mapM (TcM.zonkTcType . snd) name_taus
; let -- Try to quantify over variables free in these types
psig_tys = psig_tv_tys ++ psig_theta
seed_tys = psig_tys ++ tau_tys
-- Now "grow" those seeds to find ones reachable via 'candidates'
grown_tcvs = growThetaTyVars candidates (tyCoVarsOfTypes seed_tys)
-- Now we have to classify them into kind variables and type variables
-- (sigh) just for the benefit of -XNoPolyKinds; see quantifyTyVars
--
-- Keep the psig_tys first, so that candidateQTyVarsOfTypes produces
-- them in that order, so that the final qtvs quantifies in the same
-- order as the partial signatures do (#13524)
; dv@DV {dv_kvs = cand_kvs, dv_tvs = cand_tvs} <- candidateQTyVarsOfTypes $
psig_tys ++ candidates ++ tau_tys
; let pick = (`dVarSetIntersectVarSet` grown_tcvs)
dvs_plus = dv { dv_kvs = pick cand_kvs, dv_tvs = pick cand_tvs }
; traceTc "decideQuantifiedTyVars" (vcat
[ text "candidates =" <+> ppr candidates
, text "tau_tys =" <+> ppr tau_tys
, text "seed_tys =" <+> ppr seed_tys
, text "seed_tcvs =" <+> ppr (tyCoVarsOfTypes seed_tys)
, text "grown_tcvs =" <+> ppr grown_tcvs
, text "dvs =" <+> ppr dvs_plus])
; quantifyTyVars dvs_plus }
------------------
growThetaTyVars :: ThetaType -> TyCoVarSet -> TyCoVarSet
-- See Note [Growing the tau-tvs using constraints]
growThetaTyVars theta tcvs
| null theta = tcvs
| otherwise = transCloVarSet mk_next seed_tcvs
where
seed_tcvs = tcvs `unionVarSet` tyCoVarsOfTypes ips
(ips, non_ips) = partition isIPPred theta
-- See Note [Inheriting implicit parameters] in TcType
mk_next :: VarSet -> VarSet -- Maps current set to newly-grown ones
mk_next so_far = foldr (grow_one so_far) emptyVarSet non_ips
grow_one so_far pred tcvs
| pred_tcvs `intersectsVarSet` so_far = tcvs `unionVarSet` pred_tcvs
| otherwise = tcvs
where
pred_tcvs = tyCoVarsOfType pred
{- Note [Promote momomorphic tyvars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Promote any type variables that are free in the environment. Eg
f :: forall qtvs. bound_theta => zonked_tau
The free vars of f's type become free in the envt, and hence will show
up whenever 'f' is called. They may currently at rhs_tclvl, but they
had better be unifiable at the outer_tclvl! Example: envt mentions
alpha[1]
tau_ty = beta[2] -> beta[2]
constraints = alpha ~ [beta]
we don't quantify over beta (since it is fixed by envt)
so we must promote it! The inferred type is just
f :: beta -> beta
NB: promoteTyVar ignores coercion variables
Note [Quantification and partial signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When choosing type variables to quantify, the basic plan is to
quantify over all type variables that are
* free in the tau_tvs, and
* not forced to be monomorphic (mono_tvs),
for example by being free in the environment.
However, in the case of a partial type signature, be doing inference
*in the presence of a type signature*. For example:
f :: _ -> a
f x = ...
or
g :: (Eq _a) => _b -> _b
In both cases we use plan InferGen, and hence call simplifyInfer. But
those 'a' variables are skolems (actually TyVarTvs), and we should be
sure to quantify over them. This leads to several wrinkles:
* Wrinkle 1. In the case of a type error
f :: _ -> Maybe a
f x = True && x
The inferred type of 'f' is f :: Bool -> Bool, but there's a
left-over error of form (HoleCan (Maybe a ~ Bool)). The error-reporting
machine expects to find a binding site for the skolem 'a', so we
add it to the quantified tyvars.
* Wrinkle 2. Consider the partial type signature
f :: (Eq _) => Int -> Int
f x = x
In normal cases that makes sense; e.g.
g :: Eq _a => _a -> _a
g x = x
where the signature makes the type less general than it could
be. But for 'f' we must therefore quantify over the user-annotated
constraints, to get
f :: forall a. Eq a => Int -> Int
(thereby correctly triggering an ambiguity error later). If we don't
we'll end up with a strange open type
f :: Eq alpha => Int -> Int
which isn't ambiguous but is still very wrong.
Bottom line: Try to quantify over any variable free in psig_theta,
just like the tau-part of the type.
* Wrinkle 3 (#13482). Also consider
f :: forall a. _ => Int -> Int
f x = if (undefined :: a) == undefined then x else 0
Here we get an (Eq a) constraint, but it's not mentioned in the
psig_theta nor the type of 'f'. But we still want to quantify
over 'a' even if the monomorphism restriction is on.
* Wrinkle 4 (#14479)
foo :: Num a => a -> a
foo xxx = g xxx
where
g :: forall b. Num b => _ -> b
g y = xxx + y
In the signature for 'g', we cannot quantify over 'b' because it turns out to
get unified with 'a', which is free in g's environment. So we carefully
refrain from bogusly quantifying, in TcSimplify.decideMonoTyVars. We
report the error later, in TcBinds.chooseInferredQuantifiers.
Note [Growing the tau-tvs using constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(growThetaTyVars insts tvs) is the result of extending the set
of tyvars, tvs, using all conceivable links from pred
E.g. tvs = {a}, preds = {H [a] b, K (b,Int) c, Eq e}
Then growThetaTyVars preds tvs = {a,b,c}
Notice that
growThetaTyVars is conservative if v might be fixed by vs
=> v `elem` grow(vs,C)
Note [Quantification with errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we find that the RHS of the definition has some absolutely-insoluble
constraints (including especially "variable not in scope"), we
* Abandon all attempts to find a context to quantify over,
and instead make the function fully-polymorphic in whatever
type we have found
* Return a flag from simplifyInfer, indicating that we found an
insoluble constraint. This flag is used to suppress the ambiguity
check for the inferred type, which may well be bogus, and which
tends to obscure the real error. This fix feels a bit clunky,
but I failed to come up with anything better.
Reasons:
- Avoid downstream errors
- Do not perform an ambiguity test on a bogus type, which might well
fail spuriously, thereby obfuscating the original insoluble error.
#14000 is an example
I tried an alternative approach: simply failM, after emitting the
residual implication constraint; the exception will be caught in
TcBinds.tcPolyBinds, which gives all the binders in the group the type
(forall a. a). But that didn't work with -fdefer-type-errors, because
the recovery from failM emits no code at all, so there is no function
to run! But -fdefer-type-errors aspires to produce a runnable program.
NB that we must include *derived* errors in the check for insolubles.
Example:
(a::*) ~ Int#
We get an insoluble derived error *~#, and we don't want to discard
it before doing the isInsolubleWC test! (#8262)
Note [Default while Inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our current plan is that defaulting only happens at simplifyTop and
not simplifyInfer. This may lead to some insoluble deferred constraints.
Example:
instance D g => C g Int b
constraint inferred = (forall b. 0 => C gamma alpha b) /\ Num alpha
type inferred = gamma -> gamma
Now, if we try to default (alpha := Int) we will be able to refine the implication to
(forall b. 0 => C gamma Int b)
which can then be simplified further to
(forall b. 0 => D gamma)
Finally, we /can/ approximate this implication with (D gamma) and infer the quantified
type: forall g. D g => g -> g
Instead what will currently happen is that we will get a quantified type
(forall g. g -> g) and an implication:
forall g. 0 => (forall b. 0 => C g alpha b) /\ Num alpha
Which, even if the simplifyTop defaults (alpha := Int) we will still be left with an
unsolvable implication:
forall g. 0 => (forall b. 0 => D g)
The concrete example would be:
h :: C g a s => g -> a -> ST s a
f (x::gamma) = (\_ -> x) (runST (h x (undefined::alpha)) + 1)
But it is quite tedious to do defaulting and resolve the implication constraints, and
we have not observed code breaking because of the lack of defaulting in inference, so
we don't do it for now.
Note [Minimize by Superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we quantify over a constraint, in simplifyInfer we need to
quantify over a constraint that is minimal in some sense: For
instance, if the final wanted constraint is (Eq alpha, Ord alpha),
we'd like to quantify over Ord alpha, because we can just get Eq alpha
from superclass selection from Ord alpha. This minimization is what
mkMinimalBySCs does. Then, simplifyInfer uses the minimal constraint
to check the original wanted.
Note [Avoid unnecessary constraint simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-------- NB NB NB (Jun 12) -------------
This note not longer applies; see the notes with #4361.
But I'm leaving it in here so we remember the issue.)
----------------------------------------
When inferring the type of a let-binding, with simplifyInfer,
try to avoid unnecessarily simplifying class constraints.
Doing so aids sharing, but it also helps with delicate
situations like
instance C t => C [t] where ..
f :: C [t] => ....
f x = let g y = ...(constraint C [t])...
in ...
When inferring a type for 'g', we don't want to apply the
instance decl, because then we can't satisfy (C t). So we
just notice that g isn't quantified over 't' and partition
the constraints before simplifying.
This only half-works, but then let-generalisation only half-works.
*********************************************************************************
* *
* Main Simplifier *
* *
***********************************************************************************
-}
simplifyWantedsTcM :: [CtEvidence] -> TcM WantedConstraints
-- Solve the specified Wanted constraints
-- Discard the evidence binds
-- Discards all Derived stuff in result
-- Postcondition: fully zonked and unflattened constraints
simplifyWantedsTcM wanted
= do { traceTc "simplifyWantedsTcM {" (ppr wanted)
; (result, _) <- runTcS (solveWantedsAndDrop (mkSimpleWC wanted))
; result <- TcM.zonkWC result
; traceTc "simplifyWantedsTcM }" (ppr result)
; return result }
solveWantedsAndDrop :: WantedConstraints -> TcS WantedConstraints
-- Since solveWanteds returns the residual WantedConstraints,
-- it should always be called within a runTcS or something similar,
-- Result is not zonked
solveWantedsAndDrop wanted
= do { wc <- solveWanteds wanted
; return (dropDerivedWC wc) }
solveWanteds :: WantedConstraints -> TcS WantedConstraints
-- so that the inert set doesn't mindlessly propagate.
-- NB: wc_simples may be wanted /or/ derived now
solveWanteds wc@(WC { wc_simple = simples, wc_impl = implics })
= do { cur_lvl <- TcS.getTcLevel
; traceTcS "solveWanteds {" $
vcat [ text "Level =" <+> ppr cur_lvl
, ppr wc ]
; wc1 <- solveSimpleWanteds simples
-- Any insoluble constraints are in 'simples' and so get rewritten
-- See Note [Rewrite insolubles] in TcSMonad
; (floated_eqs, implics2) <- solveNestedImplications $
implics `unionBags` wc_impl wc1
; dflags <- getDynFlags
; final_wc <- simpl_loop 0 (solverIterations dflags) floated_eqs
(wc1 { wc_impl = implics2 })
; ev_binds_var <- getTcEvBindsVar
; bb <- TcS.getTcEvBindsMap ev_binds_var
; traceTcS "solveWanteds }" $
vcat [ text "final wc =" <+> ppr final_wc
, text "current evbinds =" <+> ppr (evBindMapBinds bb) ]
; return final_wc }
simpl_loop :: Int -> IntWithInf -> Cts
-> WantedConstraints -> TcS WantedConstraints
simpl_loop n limit floated_eqs wc@(WC { wc_simple = simples })
| n `intGtLimit` limit
= do { -- Add an error (not a warning) if we blow the limit,
-- Typically if we blow the limit we are going to report some other error
-- (an unsolved constraint), and we don't want that error to suppress
-- the iteration limit warning!
addErrTcS (hang (text "solveWanteds: too many iterations"
<+> parens (text "limit =" <+> ppr limit))
2 (vcat [ text "Unsolved:" <+> ppr wc
, ppUnless (isEmptyBag floated_eqs) $
text "Floated equalities:" <+> ppr floated_eqs
, text "Set limit with -fconstraint-solver-iterations=n; n=0 for no limit"
]))
; return wc }
| not (isEmptyBag floated_eqs)
= simplify_again n limit True (wc { wc_simple = floated_eqs `unionBags` simples })
-- Put floated_eqs first so they get solved first
-- NB: the floated_eqs may include /derived/ equalities
-- arising from fundeps inside an implication
| superClassesMightHelp wc
= -- We still have unsolved goals, and apparently no way to solve them,
-- so try expanding superclasses at this level, both Given and Wanted
do { pending_given <- getPendingGivenScs
; let (pending_wanted, simples1) = getPendingWantedScs simples
; if null pending_given && null pending_wanted
then return wc -- After all, superclasses did not help
else
do { new_given <- makeSuperClasses pending_given
; new_wanted <- makeSuperClasses pending_wanted
; solveSimpleGivens new_given -- Add the new Givens to the inert set
; simplify_again n limit (null pending_given)
wc { wc_simple = simples1 `unionBags` listToBag new_wanted } } }
| otherwise
= return wc
simplify_again :: Int -> IntWithInf -> Bool
-> WantedConstraints -> TcS WantedConstraints
-- We have definitely decided to have another go at solving
-- the wanted constraints (we have tried at least once already
simplify_again n limit no_new_given_scs
wc@(WC { wc_simple = simples, wc_impl = implics })
= do { csTraceTcS $
text "simpl_loop iteration=" <> int n
<+> (parens $ hsep [ text "no new given superclasses =" <+> ppr no_new_given_scs <> comma
, int (lengthBag simples) <+> text "simples to solve" ])
; traceTcS "simpl_loop: wc =" (ppr wc)
; (unifs1, wc1) <- reportUnifications $
solveSimpleWanteds $
simples
-- See Note [Cutting off simpl_loop]
-- We have already tried to solve the nested implications once
-- Try again only if we have unified some meta-variables
-- (which is a bit like adding more givens), or we have some
-- new Given superclasses
; let new_implics = wc_impl wc1
; if unifs1 == 0 &&
no_new_given_scs &&
isEmptyBag new_implics
then -- Do not even try to solve the implications
simpl_loop (n+1) limit emptyBag (wc1 { wc_impl = implics })
else -- Try to solve the implications
do { (floated_eqs2, implics2) <- solveNestedImplications $
implics `unionBags` new_implics
; simpl_loop (n+1) limit floated_eqs2 (wc1 { wc_impl = implics2 })
} }
solveNestedImplications :: Bag Implication
-> TcS (Cts, Bag Implication)
-- Precondition: the TcS inerts may contain unsolved simples which have
-- to be converted to givens before we go inside a nested implication.
solveNestedImplications implics
| isEmptyBag implics
= return (emptyBag, emptyBag)
| otherwise
= do { traceTcS "solveNestedImplications starting {" empty
; (floated_eqs_s, unsolved_implics) <- mapAndUnzipBagM solveImplication implics
; let floated_eqs = concatBag floated_eqs_s
-- ... and we are back in the original TcS inerts
-- Notice that the original includes the _insoluble_simples so it was safe to ignore
-- them in the beginning of this function.
; traceTcS "solveNestedImplications end }" $
vcat [ text "all floated_eqs =" <+> ppr floated_eqs
, text "unsolved_implics =" <+> ppr unsolved_implics ]
; return (floated_eqs, catBagMaybes unsolved_implics) }
solveImplication :: Implication -- Wanted
-> TcS (Cts, -- All wanted or derived floated equalities: var = type
Maybe Implication) -- Simplified implication (empty or singleton)
-- Precondition: The TcS monad contains an empty worklist and given-only inerts
-- which after trying to solve this implication we must restore to their original value
solveImplication imp@(Implic { ic_tclvl = tclvl
, ic_binds = ev_binds_var
, ic_skols = skols
, ic_given = given_ids
, ic_wanted = wanteds
, ic_info = info
, ic_status = status })
| isSolvedStatus status
= return (emptyCts, Just imp) -- Do nothing
| otherwise -- Even for IC_Insoluble it is worth doing more work
-- The insoluble stuff might be in one sub-implication
-- and other unsolved goals in another; and we want to
-- solve the latter as much as possible
= do { inerts <- getTcSInerts
; traceTcS "solveImplication {" (ppr imp $$ text "Inerts" <+> ppr inerts)
-- commented out; see `where` clause below
-- ; when debugIsOn check_tc_level
-- Solve the nested constraints
; (no_given_eqs, given_insols, residual_wanted)
<- nestImplicTcS ev_binds_var tclvl $
do { let loc = mkGivenLoc tclvl info (ic_env imp)
givens = mkGivens loc given_ids
; solveSimpleGivens givens
; residual_wanted <- solveWanteds wanteds
-- solveWanteds, *not* solveWantedsAndDrop, because
-- we want to retain derived equalities so we can float
-- them out in floatEqualities
; (no_eqs, given_insols) <- getNoGivenEqs tclvl skols
-- Call getNoGivenEqs /after/ solveWanteds, because
-- solveWanteds can augment the givens, via expandSuperClasses,
-- to reveal given superclass equalities
; return (no_eqs, given_insols, residual_wanted) }
; (floated_eqs, residual_wanted)
<- floatEqualities skols given_ids ev_binds_var
no_given_eqs residual_wanted
; traceTcS "solveImplication 2"
(ppr given_insols $$ ppr residual_wanted)
; let final_wanted = residual_wanted `addInsols` given_insols
-- Don't lose track of the insoluble givens,
-- which signal unreachable code; put them in ic_wanted
; res_implic <- setImplicationStatus (imp { ic_no_eqs = no_given_eqs
, ic_wanted = final_wanted })
; evbinds <- TcS.getTcEvBindsMap ev_binds_var
; tcvs <- TcS.getTcEvTyCoVars ev_binds_var
; traceTcS "solveImplication end }" $ vcat
[ text "no_given_eqs =" <+> ppr no_given_eqs
, text "floated_eqs =" <+> ppr floated_eqs
, text "res_implic =" <+> ppr res_implic
, text "implication evbinds =" <+> ppr (evBindMapBinds evbinds)
, text "implication tvcs =" <+> ppr tcvs ]
; return (floated_eqs, res_implic) }
where
-- TcLevels must be strictly increasing (see (ImplicInv) in
-- Note [TcLevel and untouchable type variables] in TcType),
-- and in fact I thinkthey should always increase one level at a time.
-- Though sensible, this check causes lots of testsuite failures. It is
-- remaining commented out for now.
{-
check_tc_level = do { cur_lvl <- TcS.getTcLevel
; MASSERT2( tclvl == pushTcLevel cur_lvl , text "Cur lvl =" <+> ppr cur_lvl $$ text "Imp lvl =" <+> ppr tclvl ) }
-}
----------------------
setImplicationStatus :: Implication -> TcS (Maybe Implication)
-- Finalise the implication returned from solveImplication:
-- * Set the ic_status field
-- * Trim the ic_wanted field to remove Derived constraints
-- Precondition: the ic_status field is not already IC_Solved
-- Return Nothing if we can discard the implication altogether
setImplicationStatus implic@(Implic { ic_status = status
, ic_info = info
, ic_wanted = wc
, ic_given = givens })
| ASSERT2( not (isSolvedStatus status ), ppr info )
-- Precondition: we only set the status if it is not already solved
not (isSolvedWC pruned_wc)
= do { traceTcS "setImplicationStatus(not-all-solved) {" (ppr implic)
; implic <- neededEvVars implic
; let new_status | insolubleWC pruned_wc = IC_Insoluble
| otherwise = IC_Unsolved
new_implic = implic { ic_status = new_status
, ic_wanted = pruned_wc }
; traceTcS "setImplicationStatus(not-all-solved) }" (ppr new_implic)
; return $ Just new_implic }
| otherwise -- Everything is solved
-- Set status to IC_Solved,
-- and compute the dead givens and outer needs
-- See Note [Tracking redundant constraints]
= do { traceTcS "setImplicationStatus(all-solved) {" (ppr implic)
; implic@(Implic { ic_need_inner = need_inner
, ic_need_outer = need_outer }) <- neededEvVars implic
; bad_telescope <- checkBadTelescope implic
; let dead_givens | warnRedundantGivens info
= filterOut (`elemVarSet` need_inner) givens
| otherwise = [] -- None to report
discard_entire_implication -- Can we discard the entire implication?
= null dead_givens -- No warning from this implication
&& not bad_telescope
&& isEmptyWC pruned_wc -- No live children
&& isEmptyVarSet need_outer -- No needed vars to pass up to parent
final_status
| bad_telescope = IC_BadTelescope
| otherwise = IC_Solved { ics_dead = dead_givens }
final_implic = implic { ic_status = final_status
, ic_wanted = pruned_wc }
; traceTcS "setImplicationStatus(all-solved) }" $
vcat [ text "discard:" <+> ppr discard_entire_implication
, text "new_implic:" <+> ppr final_implic ]
; return $ if discard_entire_implication
then Nothing
else Just final_implic }
where
WC { wc_simple = simples, wc_impl = implics } = wc
pruned_simples = dropDerivedSimples simples
pruned_implics = filterBag keep_me implics
pruned_wc = WC { wc_simple = pruned_simples
, wc_impl = pruned_implics }
keep_me :: Implication -> Bool
keep_me ic
| IC_Solved { ics_dead = dead_givens } <- ic_status ic
-- Fully solved
, null dead_givens -- No redundant givens to report
, isEmptyBag (wc_impl (ic_wanted ic))
-- And no children that might have things to report
= False -- Tnen we don't need to keep it
| otherwise
= True -- Otherwise, keep it
checkBadTelescope :: Implication -> TcS Bool
-- True <=> the skolems form a bad telescope
-- See Note [Keeping scoped variables in order: Explicit] in TcHsType
checkBadTelescope (Implic { ic_telescope = m_telescope
, ic_skols = skols })
| isJust m_telescope
= do{ skols <- mapM TcS.zonkTyCoVarKind skols
; return (go emptyVarSet (reverse skols))}
| otherwise
= return False
where
go :: TyVarSet -- skolems that appear *later* than the current ones
-> [TcTyVar] -- ordered skolems, in reverse order
-> Bool -- True <=> there is an out-of-order skolem
go _ [] = False
go later_skols (one_skol : earlier_skols)
| tyCoVarsOfType (tyVarKind one_skol) `intersectsVarSet` later_skols
= True
| otherwise
= go (later_skols `extendVarSet` one_skol) earlier_skols
warnRedundantGivens :: SkolemInfo -> Bool
warnRedundantGivens (SigSkol ctxt _ _)
= case ctxt of
FunSigCtxt _ warn_redundant -> warn_redundant
ExprSigCtxt -> True
_ -> False
-- To think about: do we want to report redundant givens for
-- pattern synonyms, PatSynSigSkol? c.f #9953, comment:21.
warnRedundantGivens (InstSkol {}) = True
warnRedundantGivens _ = False
neededEvVars :: Implication -> TcS Implication
-- Find all the evidence variables that are "needed",
-- and delete dead evidence bindings
-- See Note [Tracking redundant constraints]
-- See Note [Delete dead Given evidence bindings]
--
-- - Start from initial_seeds (from nested implications)
--
-- - Add free vars of RHS of all Wanted evidence bindings
-- and coercion variables accumulated in tcvs (all Wanted)
--
-- - Generate 'needed', the needed set of EvVars, by doing transitive
-- closure through Given bindings
-- e.g. Needed {a,b}
-- Given a = sc_sel a2
-- Then a2 is needed too
--
-- - Prune out all Given bindings that are not needed
--
-- - From the 'needed' set, delete ev_bndrs, the binders of the
-- evidence bindings, to give the final needed variables
--
neededEvVars implic@(Implic { ic_given = givens
, ic_binds = ev_binds_var
, ic_wanted = WC { wc_impl = implics }
, ic_need_inner = old_needs })
= do { ev_binds <- TcS.getTcEvBindsMap ev_binds_var
; tcvs <- TcS.getTcEvTyCoVars ev_binds_var
; let seeds1 = foldr add_implic_seeds old_needs implics
seeds2 = foldEvBindMap add_wanted seeds1 ev_binds
seeds3 = seeds2 `unionVarSet` tcvs
need_inner = findNeededEvVars ev_binds seeds3
live_ev_binds = filterEvBindMap (needed_ev_bind need_inner) ev_binds
need_outer = foldEvBindMap del_ev_bndr need_inner live_ev_binds
`delVarSetList` givens
; TcS.setTcEvBindsMap ev_binds_var live_ev_binds
-- See Note [Delete dead Given evidence bindings]
; traceTcS "neededEvVars" $
vcat [ text "old_needs:" <+> ppr old_needs
, text "seeds3:" <+> ppr seeds3
, text "tcvs:" <+> ppr tcvs
, text "ev_binds:" <+> ppr ev_binds
, text "live_ev_binds:" <+> ppr live_ev_binds ]
; return (implic { ic_need_inner = need_inner
, ic_need_outer = need_outer }) }
where
add_implic_seeds (Implic { ic_need_outer = needs }) acc
= needs `unionVarSet` acc
needed_ev_bind needed (EvBind { eb_lhs = ev_var
, eb_is_given = is_given })
| is_given = ev_var `elemVarSet` needed
| otherwise = True -- Keep all wanted bindings
del_ev_bndr :: EvBind -> VarSet -> VarSet
del_ev_bndr (EvBind { eb_lhs = v }) needs = delVarSet needs v
add_wanted :: EvBind -> VarSet -> VarSet
add_wanted (EvBind { eb_is_given = is_given, eb_rhs = rhs }) needs
| is_given = needs -- Add the rhs vars of the Wanted bindings only
| otherwise = evVarsOfTerm rhs `unionVarSet` needs
{- Note [Delete dead Given evidence bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As a result of superclass expansion, we speculatively
generate evidence bindings for Givens. E.g.
f :: (a ~ b) => a -> b -> Bool
f x y = ...
We'll have
[G] d1 :: (a~b)
and we'll speculatively generate the evidence binding
[G] d2 :: (a ~# b) = sc_sel d
Now d2 is available for solving. But it may not be needed! Usually
such dead superclass selections will eventually be dropped as dead
code, but:
* It won't always be dropped (#13032). In the case of an
unlifted-equality superclass like d2 above, we generate
case heq_sc d1 of d2 -> ...
and we can't (in general) drop that case expression in case
d1 is bottom. So it's technically unsound to have added it
in the first place.
* Simply generating all those extra superclasses can generate lots of
code that has to be zonked, only to be discarded later. Better not
to generate it in the first place.
Moreover, if we simplify this implication more than once
(e.g. because we can't solve it completely on the first iteration
of simpl_looop), we'll generate all the same bindings AGAIN!
Easy solution: take advantage of the work we are doing to track dead
(unused) Givens, and use it to prune the Given bindings too. This is
all done by neededEvVars.
This led to a remarkable 25% overall compiler allocation decrease in
test T12227.
But we don't get to discard all redundant equality superclasses, alas;
see #15205.
Note [Tracking redundant constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With Opt_WarnRedundantConstraints, GHC can report which
constraints of a type signature (or instance declaration) are
redundant, and can be omitted. Here is an overview of how it
works:
----- What is a redundant constraint?
* The things that can be redundant are precisely the Given
constraints of an implication.
* A constraint can be redundant in two different ways:
a) It is implied by other givens. E.g.
f :: (Eq a, Ord a) => blah -- Eq a unnecessary
g :: (Eq a, a~b, Eq b) => blah -- Either Eq a or Eq b unnecessary
b) It is not needed by the Wanted constraints covered by the
implication E.g.
f :: Eq a => a -> Bool
f x = True -- Equality not used
* To find (a), when we have two Given constraints,
we must be careful to drop the one that is a naked variable (if poss).
So if we have
f :: (Eq a, Ord a) => blah
then we may find [G] sc_sel (d1::Ord a) :: Eq a
[G] d2 :: Eq a
We want to discard d2 in favour of the superclass selection from
the Ord dictionary. This is done by TcInteract.solveOneFromTheOther
See Note [Replacement vs keeping].
* To find (b) we need to know which evidence bindings are 'wanted';
hence the eb_is_given field on an EvBind.
----- How tracking works
* The ic_need fields of an Implic records in-scope (given) evidence
variables bound by the context, that were needed to solve this
implication (so far). See the declaration of Implication.
* When the constraint solver finishes solving all the wanteds in
an implication, it sets its status to IC_Solved
- The ics_dead field, of IC_Solved, records the subset of this
implication's ic_given that are redundant (not needed).
* We compute which evidence variables are needed by an implication
in setImplicationStatus. A variable is needed if
a) it is free in the RHS of a Wanted EvBind,
b) it is free in the RHS of an EvBind whose LHS is needed,
c) it is in the ics_need of a nested implication.
* We need to be careful not to discard an implication
prematurely, even one that is fully solved, because we might
thereby forget which variables it needs, and hence wrongly
report a constraint as redundant. But we can discard it once
its free vars have been incorporated into its parent; or if it
simply has no free vars. This careful discarding is also
handled in setImplicationStatus.
----- Reporting redundant constraints
* TcErrors does the actual warning, in warnRedundantConstraints.
* We don't report redundant givens for *every* implication; only
for those which reply True to TcSimplify.warnRedundantGivens:
- For example, in a class declaration, the default method *can*
use the class constraint, but it certainly doesn't *have* to,
and we don't want to report an error there.
- More subtly, in a function definition
f :: (Ord a, Ord a, Ix a) => a -> a
f x = rhs
we do an ambiguity check on the type (which would find that one
of the Ord a constraints was redundant), and then we check that
the definition has that type (which might find that both are
redundant). We don't want to report the same error twice, so we
disable it for the ambiguity check. Hence using two different
FunSigCtxts, one with the warn-redundant field set True, and the
other set False in
- TcBinds.tcSpecPrag
- TcBinds.tcTySig
This decision is taken in setImplicationStatus, rather than TcErrors
so that we can discard implication constraints that we don't need.
So ics_dead consists only of the *reportable* redundant givens.
----- Shortcomings
Consider (see #9939)
f2 :: (Eq a, Ord a) => a -> a -> Bool
-- Ord a redundant, but Eq a is reported
f2 x y = (x == y)
We report (Eq a) as redundant, whereas actually (Ord a) is. But it's
really not easy to detect that!
Note [Cutting off simpl_loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is very important not to iterate in simpl_loop unless there is a chance
of progress. #8474 is a classic example:
* There's a deeply-nested chain of implication constraints.
?x:alpha => ?y1:beta1 => ... ?yn:betan => [W] ?x:Int
* From the innermost one we get a [D] alpha ~ Int,
but alpha is untouchable until we get out to the outermost one
* We float [D] alpha~Int out (it is in floated_eqs), but since alpha
is untouchable, the solveInteract in simpl_loop makes no progress
* So there is no point in attempting to re-solve
?yn:betan => [W] ?x:Int
via solveNestedImplications, because we'll just get the
same [D] again
* If we *do* re-solve, we'll get an infinite loop. It is cut off by
the fixed bound of 10, but solving the next takes 10*10*...*10 (ie
exponentially many) iterations!
Conclusion: we should call solveNestedImplications only if we did
some unification in solveSimpleWanteds; because that's the only way
we'll get more Givens (a unification is like adding a Given) to
allow the implication to make progress.
-}
promoteTyVar :: TcTyVar -> TcM (Bool, TcTyVar)
-- When we float a constraint out of an implication we must restore
-- invariant (WantedInv) in Note [TcLevel and untouchable type variables] in TcType
-- Return True <=> we did some promotion
-- Also returns either the original tyvar (no promotion) or the new one
-- See Note [Promoting unification variables]
promoteTyVar tv
= do { tclvl <- TcM.getTcLevel
; if (isFloatedTouchableMetaTyVar tclvl tv)
then do { cloned_tv <- TcM.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; TcM.writeMetaTyVar tv (mkTyVarTy rhs_tv)
; return (True, rhs_tv) }
else return (False, tv) }
-- Returns whether or not *any* tyvar is defaulted
promoteTyVarSet :: TcTyVarSet -> TcM (Bool, TcTyVarSet)
promoteTyVarSet tvs
= do { (bools, tyvars) <- mapAndUnzipM promoteTyVar (nonDetEltsUniqSet tvs)
-- non-determinism is OK because order of promotion doesn't matter
; return (or bools, mkVarSet tyvars) }
promoteTyVarTcS :: TcTyVar -> TcS ()
-- When we float a constraint out of an implication we must restore
-- invariant (WantedInv) in Note [TcLevel and untouchable type variables] in TcType
-- See Note [Promoting unification variables]
-- We don't just call promoteTyVar because we want to use unifyTyVar,
-- not writeMetaTyVar
promoteTyVarTcS tv
= do { tclvl <- TcS.getTcLevel
; when (isFloatedTouchableMetaTyVar tclvl tv) $
do { cloned_tv <- TcS.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; unifyTyVar tv (mkTyVarTy rhs_tv) } }
-- | Like 'defaultTyVar', but in the TcS monad.
defaultTyVarTcS :: TcTyVar -> TcS Bool
defaultTyVarTcS the_tv
| isRuntimeRepVar the_tv
, not (isTyVarTyVar the_tv)
-- TyVarTvs should only be unified with a tyvar
-- never with a type; c.f. TcMType.defaultTyVar
-- and Note [Inferring kinds for type declarations] in TcTyClsDecls
= do { traceTcS "defaultTyVarTcS RuntimeRep" (ppr the_tv)
; unifyTyVar the_tv liftedRepTy
; return True }
| otherwise
= return False -- the common case
approximateWC :: Bool -> WantedConstraints -> Cts
-- Postcondition: Wanted or Derived Cts
-- See Note [ApproximateWC]
approximateWC float_past_equalities wc
= float_wc emptyVarSet wc
where
float_wc :: TcTyCoVarSet -> WantedConstraints -> Cts
float_wc trapping_tvs (WC { wc_simple = simples, wc_impl = implics })
= filterBag (is_floatable trapping_tvs) simples `unionBags`
do_bag (float_implic trapping_tvs) implics
where
float_implic :: TcTyCoVarSet -> Implication -> Cts
float_implic trapping_tvs imp
| float_past_equalities || ic_no_eqs imp
= float_wc new_trapping_tvs (ic_wanted imp)
| otherwise -- Take care with equalities
= emptyCts -- See (1) under Note [ApproximateWC]
where
new_trapping_tvs = trapping_tvs `extendVarSetList` ic_skols imp
do_bag :: (a -> Bag c) -> Bag a -> Bag c
do_bag f = foldr (unionBags.f) emptyBag
is_floatable skol_tvs ct
| isGivenCt ct = False
| isHoleCt ct = False
| insolubleEqCt ct = False
| otherwise = tyCoVarsOfCt ct `disjointVarSet` skol_tvs
{- Note [ApproximateWC]
~~~~~~~~~~~~~~~~~~~~~~~
approximateWC takes a constraint, typically arising from the RHS of a
let-binding whose type we are *inferring*, and extracts from it some
*simple* constraints that we might plausibly abstract over. Of course
the top-level simple constraints are plausible, but we also float constraints
out from inside, if they are not captured by skolems.
The same function is used when doing type-class defaulting (see the call
to applyDefaultingRules) to extract constraints that that might be defaulted.
There is one caveat:
1. When inferring most-general types (in simplifyInfer), we do *not*
float anything out if the implication binds equality constraints,
because that defeats the OutsideIn story. Consider
data T a where
TInt :: T Int
MkT :: T a
f TInt = 3::Int
We get the implication (a ~ Int => res ~ Int), where so far we've decided
f :: T a -> res
We don't want to float (res~Int) out because then we'll infer
f :: T a -> Int
which is only on of the possible types. (GHC 7.6 accidentally *did*
float out of such implications, which meant it would happily infer
non-principal types.)
HOWEVER (#12797) in findDefaultableGroups we are not worried about
the most-general type; and we /do/ want to float out of equalities.
Hence the boolean flag to approximateWC.
------ Historical note -----------
There used to be a second caveat, driven by #8155
2. We do not float out an inner constraint that shares a type variable
(transitively) with one that is trapped by a skolem. Eg
forall a. F a ~ beta, Integral beta
We don't want to float out (Integral beta). Doing so would be bad
when defaulting, because then we'll default beta:=Integer, and that
makes the error message much worse; we'd get
Can't solve F a ~ Integer
rather than
Can't solve Integral (F a)
Moreover, floating out these "contaminated" constraints doesn't help
when generalising either. If we generalise over (Integral b), we still
can't solve the retained implication (forall a. F a ~ b). Indeed,
arguably that too would be a harder error to understand.
But this transitive closure stuff gives rise to a complex rule for
when defaulting actually happens, and one that was never documented.
Moreover (#12923), the more complex rule is sometimes NOT what
you want. So I simply removed the extra code to implement the
contamination stuff. There was zero effect on the testsuite (not even
#8155).
------ End of historical note -----------
Note [DefaultTyVar]
~~~~~~~~~~~~~~~~~~~
defaultTyVar is used on any un-instantiated meta type variables to
default any RuntimeRep variables to LiftedRep. This is important
to ensure that instance declarations match. For example consider
instance Show (a->b)
foo x = show (\_ -> True)
Then we'll get a constraint (Show (p ->q)) where p has kind (TYPE r),
and that won't match the tcTypeKind (*) in the instance decl. See tests
tc217 and tc175.
We look only at touchable type variables. No further constraints
are going to affect these type variables, so it's time to do it by
hand. However we aren't ready to default them fully to () or
whatever, because the type-class defaulting rules have yet to run.
An alternate implementation would be to emit a derived constraint setting
the RuntimeRep variable to LiftedRep, but this seems unnecessarily indirect.
Note [Promote _and_ default when inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are inferring a type, we simplify the constraint, and then use
approximateWC to produce a list of candidate constraints. Then we MUST
a) Promote any meta-tyvars that have been floated out by
approximateWC, to restore invariant (WantedInv) described in
Note [TcLevel and untouchable type variables] in TcType.
b) Default the kind of any meta-tyvars that are not mentioned in
in the environment.
To see (b), suppose the constraint is (C ((a :: OpenKind) -> Int)), and we
have an instance (C ((x:*) -> Int)). The instance doesn't match -- but it
should! If we don't solve the constraint, we'll stupidly quantify over
(C (a->Int)) and, worse, in doing so skolemiseQuantifiedTyVar will quantify over
(b:*) instead of (a:OpenKind), which can lead to disaster; see #7332.
#7641 is a simpler example.
Note [Promoting unification variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we float an equality out of an implication we must "promote" free
unification variables of the equality, in order to maintain Invariant
(WantedInv) from Note [TcLevel and untouchable type variables] in
TcType. for the leftover implication.
This is absolutely necessary. Consider the following example. We start
with two implications and a class with a functional dependency.
class C x y | x -> y
instance C [a] [a]
(I1) [untch=beta]forall b. 0 => F Int ~ [beta]
(I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c]
We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2.
They may react to yield that (beta := [alpha]) which can then be pushed inwards
the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that
(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable
beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs:
class C x y | x -> y where
op :: x -> y -> ()
instance C [a] [a]
type family F a :: *
h :: F Int -> ()
h = undefined
data TEx where
TEx :: a -> TEx
f (x::beta) =
let g1 :: forall b. b -> ()
g1 _ = h [x]
g2 z = case z of TEx y -> (h [[undefined]], op x [y])
in (g1 '3', g2 undefined)
*********************************************************************************
* *
* Floating equalities *
* *
*********************************************************************************
Note [Float Equalities out of Implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For ordinary pattern matches (including existentials) we float
equalities out of implications, for instance:
data T where
MkT :: Eq a => a -> T
f x y = case x of MkT _ -> (y::Int)
We get the implication constraint (x::T) (y::alpha):
forall a. [untouchable=alpha] Eq a => alpha ~ Int
We want to float out the equality into a scope where alpha is no
longer untouchable, to solve the implication!
But we cannot float equalities out of implications whose givens may
yield or contain equalities:
data T a where
T1 :: T Int
T2 :: T Bool
T3 :: T a
h :: T a -> a -> Int
f x y = case x of
T1 -> y::Int
T2 -> y::Bool
T3 -> h x y
We generate constraint, for (x::T alpha) and (y :: beta):
[untouchables = beta] (alpha ~ Int => beta ~ Int) -- From 1st branch
[untouchables = beta] (alpha ~ Bool => beta ~ Bool) -- From 2nd branch
(alpha ~ beta) -- From 3rd branch
If we float the equality (beta ~ Int) outside of the first implication and
the equality (beta ~ Bool) out of the second we get an insoluble constraint.
But if we just leave them inside the implications, we unify alpha := beta and
solve everything.
Principle:
We do not want to float equalities out which may
need the given *evidence* to become soluble.
Consequence: classes with functional dependencies don't matter (since there is
no evidence for a fundep equality), but equality superclasses do matter (since
they carry evidence).
-}
floatEqualities :: [TcTyVar] -> [EvId] -> EvBindsVar -> Bool
-> WantedConstraints
-> TcS (Cts, WantedConstraints)
-- Main idea: see Note [Float Equalities out of Implications]
--
-- Precondition: the wc_simple of the incoming WantedConstraints are
-- fully zonked, so that we can see their free variables
--
-- Postcondition: The returned floated constraints (Cts) are only
-- Wanted or Derived
--
-- Also performs some unifications (via promoteTyVar), adding to
-- monadically-carried ty_binds. These will be used when processing
-- floated_eqs later
--
-- Subtleties: Note [Float equalities from under a skolem binding]
-- Note [Skolem escape]
-- Note [What prevents a constraint from floating]
floatEqualities skols given_ids ev_binds_var no_given_eqs
wanteds@(WC { wc_simple = simples })
| not no_given_eqs -- There are some given equalities, so don't float
= return (emptyBag, wanteds) -- Note [Float Equalities out of Implications]
| otherwise
= do { -- First zonk: the inert set (from whence they came) is fully
-- zonked, but unflattening may have filled in unification
-- variables, and we /must/ see them. Otherwise we may float
-- constraints that mention the skolems!
simples <- TcS.zonkSimples simples
; binds <- TcS.getTcEvBindsMap ev_binds_var
-- Now we can pick the ones to float
-- The constraints are un-flattened and de-canonicalised
; let (candidate_eqs, no_float_cts) = partitionBag is_float_eq_candidate simples
seed_skols = mkVarSet skols `unionVarSet`
mkVarSet given_ids `unionVarSet`
foldr add_non_flt_ct emptyVarSet no_float_cts `unionVarSet`
foldEvBindMap add_one_bind emptyVarSet binds
-- seed_skols: See Note [What prevents a constraint from floating] (1,2,3)
-- Include the EvIds of any non-floating constraints
extended_skols = transCloVarSet (add_captured_ev_ids candidate_eqs) seed_skols
-- extended_skols contains the EvIds of all the trapped constraints
-- See Note [What prevents a constraint from floating] (3)
(flt_eqs, no_flt_eqs) = partitionBag (is_floatable extended_skols)
candidate_eqs
remaining_simples = no_float_cts `andCts` no_flt_eqs
-- Promote any unification variables mentioned in the floated equalities
-- See Note [Promoting unification variables]
; mapM_ promoteTyVarTcS (tyCoVarsOfCtsList flt_eqs)
; traceTcS "floatEqualities" (vcat [ text "Skols =" <+> ppr skols
, text "Extended skols =" <+> ppr extended_skols
, text "Simples =" <+> ppr simples
, text "Candidate eqs =" <+> ppr candidate_eqs
, text "Floated eqs =" <+> ppr flt_eqs])
; return ( flt_eqs, wanteds { wc_simple = remaining_simples } ) }
where
add_one_bind :: EvBind -> VarSet -> VarSet
add_one_bind bind acc = extendVarSet acc (evBindVar bind)
add_non_flt_ct :: Ct -> VarSet -> VarSet
add_non_flt_ct ct acc | isDerivedCt ct = acc
| otherwise = extendVarSet acc (ctEvId ct)
is_floatable :: VarSet -> Ct -> Bool
is_floatable skols ct
| isDerivedCt ct = not (tyCoVarsOfCt ct `intersectsVarSet` skols)
| otherwise = not (ctEvId ct `elemVarSet` skols)
add_captured_ev_ids :: Cts -> VarSet -> VarSet
add_captured_ev_ids cts skols = foldr extra_skol emptyVarSet cts
where
extra_skol ct acc
| isDerivedCt ct = acc
| tyCoVarsOfCt ct `intersectsVarSet` skols = extendVarSet acc (ctEvId ct)
| otherwise = acc
-- Identify which equalities are candidates for floating
-- Float out alpha ~ ty, or ty ~ alpha which might be unified outside
-- See Note [Which equalities to float]
is_float_eq_candidate ct
| pred <- ctPred ct
, EqPred NomEq ty1 ty2 <- classifyPredType pred
, tcTypeKind ty1 `tcEqType` tcTypeKind ty2
= case (tcGetTyVar_maybe ty1, tcGetTyVar_maybe ty2) of
(Just tv1, _) -> float_tv_eq_candidate tv1 ty2
(_, Just tv2) -> float_tv_eq_candidate tv2 ty1
_ -> False
| otherwise = False
float_tv_eq_candidate tv1 ty2 -- See Note [Which equalities to float]
= isMetaTyVar tv1
&& (not (isTyVarTyVar tv1) || isTyVarTy ty2)
{- Note [Float equalities from under a skolem binding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which of the simple equalities can we float out? Obviously, only
ones that don't mention the skolem-bound variables. But that is
over-eager. Consider
[2] forall a. F a beta[1] ~ gamma[2], G beta[1] gamma[2] ~ Int
The second constraint doesn't mention 'a'. But if we float it,
we'll promote gamma[2] to gamma'[1]. Now suppose that we learn that
beta := Bool, and F a Bool = a, and G Bool _ = Int. Then we'll
we left with the constraint
[2] forall a. a ~ gamma'[1]
which is insoluble because gamma became untouchable.
Solution: float only constraints that stand a jolly good chance of
being soluble simply by being floated, namely ones of form
a ~ ty
where 'a' is a currently-untouchable unification variable, but may
become touchable by being floated (perhaps by more than one level).
We had a very complicated rule previously, but this is nice and
simple. (To see the notes, look at this Note in a version of
TcSimplify prior to Oct 2014).
Note [Which equalities to float]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which equalities should we float? We want to float ones where there
is a decent chance that floating outwards will allow unification to
happen. In particular, float out equalities that are:
* Of form (alpha ~# ty) or (ty ~# alpha), where
* alpha is a meta-tyvar.
* And 'alpha' is not a TyVarTv with 'ty' being a non-tyvar. In that
case, floating out won't help either, and it may affect grouping
of error messages.
* Homogeneous (both sides have the same kind). Why only homogeneous?
Because heterogeneous equalities have derived kind equalities.
See Note [Equalities with incompatible kinds] in TcCanonical.
If we float out a hetero equality, then it will spit out the same
derived kind equality again, which might create duplicate error
messages.
Instead, we do float out the kind equality (if it's worth floating
out, as above). If/when we solve it, we'll be able to rewrite the
original hetero equality to be homogeneous, and then perhaps make
progress / float it out. The duplicate error message was spotted in
typecheck/should_fail/T7368.
* Nominal. No point in floating (alpha ~R# ty), because we do not
unify representational equalities even if alpha is touchable.
See Note [Do not unify representational equalities] in TcInteract.
Note [Skolem escape]
~~~~~~~~~~~~~~~~~~~~
You might worry about skolem escape with all this floating.
For example, consider
[2] forall a. (a ~ F beta[2] delta,
Maybe beta[2] ~ gamma[1])
The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and
solve with gamma := beta. But what if later delta:=Int, and
F b Int = b.
Then we'd get a ~ beta[2], and solve to get beta:=a, and now the
skolem has escaped!
But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2]
to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be.
Note [What prevents a constraint from floating]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What /prevents/ a constraint from floating? If it mentions one of the
"bound variables of the implication". What are they?
The "bound variables of the implication" are
1. The skolem type variables `ic_skols`
2. The "given" evidence variables `ic_given`. Example:
forall a. (co :: t1 ~# t2) => [W] co2 : (a ~# b |> co)
Here 'co' is bound
3. The binders of all evidence bindings in `ic_binds`. Example
forall a. (d :: t1 ~ t2)
EvBinds { (co :: t1 ~# t2) = superclass-sel d }
=> [W] co2 : (a ~# b |> co)
Here `co` is gotten by superclass selection from `d`, and the
wanted constraint co2 must not float.
4. And the evidence variable of any equality constraint (incl
Wanted ones) whose type mentions a bound variable. Example:
forall k. [W] co1 :: t1 ~# t2 |> co2
[W] co2 :: k ~# *
Here, since `k` is bound, so is `co2` and hence so is `co1`.
Here (1,2,3) are handled by the "seed_skols" calculation, and
(4) is done by the transCloVarSet call.
The possible dependence on givens, and evidence bindings, is more
subtle than we'd realised at first. See #14584.
How can (4) arise? Suppose we have (k :: *), (a :: k), and ([G} k ~ *).
Then form an equality like (a ~ Int) we might end up with
[W] co1 :: k ~ *
[W] co2 :: (a |> co1) ~ Int
*********************************************************************************
* *
* Defaulting and disambiguation *
* *
*********************************************************************************
-}
applyDefaultingRules :: WantedConstraints -> TcS Bool
-- True <=> I did some defaulting, by unifying a meta-tyvar
-- Input WantedConstraints are not necessarily zonked
applyDefaultingRules wanteds
| isEmptyWC wanteds
= return False
| otherwise
= do { info@(default_tys, _) <- getDefaultInfo
; wanteds <- TcS.zonkWC wanteds
; let groups = findDefaultableGroups info wanteds
; traceTcS "applyDefaultingRules {" $
vcat [ text "wanteds =" <+> ppr wanteds
, text "groups =" <+> ppr groups
, text "info =" <+> ppr info ]
; something_happeneds <- mapM (disambigGroup default_tys) groups
; traceTcS "applyDefaultingRules }" (ppr something_happeneds)
; return (or something_happeneds) }
findDefaultableGroups
:: ( [Type]
, (Bool,Bool) ) -- (Overloaded strings, extended default rules)
-> WantedConstraints -- Unsolved (wanted or derived)
-> [(TyVar, [Ct])]
findDefaultableGroups (default_tys, (ovl_strings, extended_defaults)) wanteds
| null default_tys
= []
| otherwise
= [ (tv, map fstOf3 group)
| group'@((_,_,tv) :| _) <- unary_groups
, let group = toList group'
, defaultable_tyvar tv
, defaultable_classes (map sndOf3 group) ]
where
simples = approximateWC True wanteds
(unaries, non_unaries) = partitionWith find_unary (bagToList simples)
unary_groups = equivClasses cmp_tv unaries
unary_groups :: [NonEmpty (Ct, Class, TcTyVar)] -- (C tv) constraints
unaries :: [(Ct, Class, TcTyVar)] -- (C tv) constraints
non_unaries :: [Ct] -- and *other* constraints
-- Finds unary type-class constraints
-- But take account of polykinded classes like Typeable,
-- which may look like (Typeable * (a:*)) (#8931)
find_unary :: Ct -> Either (Ct, Class, TyVar) Ct
find_unary cc
| Just (cls,tys) <- getClassPredTys_maybe (ctPred cc)
, [ty] <- filterOutInvisibleTypes (classTyCon cls) tys
-- Ignore invisible arguments for this purpose
, Just tv <- tcGetTyVar_maybe ty
, isMetaTyVar tv -- We might have runtime-skolems in GHCi, and
-- we definitely don't want to try to assign to those!
= Left (cc, cls, tv)
find_unary cc = Right cc -- Non unary or non dictionary
bad_tvs :: TcTyCoVarSet -- TyVars mentioned by non-unaries
bad_tvs = mapUnionVarSet tyCoVarsOfCt non_unaries
cmp_tv (_,_,tv1) (_,_,tv2) = tv1 `compare` tv2
defaultable_tyvar :: TcTyVar -> Bool
defaultable_tyvar tv
= let b1 = isTyConableTyVar tv -- Note [Avoiding spurious errors]
b2 = not (tv `elemVarSet` bad_tvs)
in b1 && (b2 || extended_defaults) -- Note [Multi-parameter defaults]
defaultable_classes :: [Class] -> Bool
defaultable_classes clss
| extended_defaults = any (isInteractiveClass ovl_strings) clss
| otherwise = all is_std_class clss && (any (isNumClass ovl_strings) clss)
-- is_std_class adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
is_std_class cls = isStandardClass cls ||
(ovl_strings && (cls `hasKey` isStringClassKey))
------------------------------
disambigGroup :: [Type] -- The default types
-> (TcTyVar, [Ct]) -- All classes of the form (C a)
-- sharing same type variable
-> TcS Bool -- True <=> something happened, reflected in ty_binds
disambigGroup [] _
= return False
disambigGroup (default_ty:default_tys) group@(the_tv, wanteds)
= do { traceTcS "disambigGroup {" (vcat [ ppr default_ty, ppr the_tv, ppr wanteds ])
; fake_ev_binds_var <- TcS.newTcEvBinds
; tclvl <- TcS.getTcLevel
; success <- nestImplicTcS fake_ev_binds_var (pushTcLevel tclvl) try_group
; if success then
-- Success: record the type variable binding, and return
do { unifyTyVar the_tv default_ty
; wrapWarnTcS $ warnDefaulting wanteds default_ty
; traceTcS "disambigGroup succeeded }" (ppr default_ty)
; return True }
else
-- Failure: try with the next type
do { traceTcS "disambigGroup failed, will try other default types }"
(ppr default_ty)
; disambigGroup default_tys group } }
where
try_group
| Just subst <- mb_subst
= do { lcl_env <- TcS.getLclEnv
; tc_lvl <- TcS.getTcLevel
; let loc = mkGivenLoc tc_lvl UnkSkol lcl_env
; wanted_evs <- mapM (newWantedEvVarNC loc . substTy subst . ctPred)
wanteds
; fmap isEmptyWC $
solveSimpleWanteds $ listToBag $
map mkNonCanonical wanted_evs }
| otherwise
= return False
the_ty = mkTyVarTy the_tv
mb_subst = tcMatchTyKi the_ty default_ty
-- Make sure the kinds match too; hence this call to tcMatchTyKi
-- E.g. suppose the only constraint was (Typeable k (a::k))
-- With the addition of polykinded defaulting we also want to reject
-- ill-kinded defaulting attempts like (Eq []) or (Foldable Int) here.
-- In interactive mode, or with -XExtendedDefaultRules,
-- we default Show a to Show () to avoid graututious errors on "show []"
isInteractiveClass :: Bool -- -XOverloadedStrings?
-> Class -> Bool
isInteractiveClass ovl_strings cls
= isNumClass ovl_strings cls || (classKey cls `elem` interactiveClassKeys)
-- isNumClass adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
isNumClass :: Bool -- -XOverloadedStrings?
-> Class -> Bool
isNumClass ovl_strings cls
= isNumericClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
{-
Note [Avoiding spurious errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When doing the unification for defaulting, we check for skolem
type variables, and simply don't default them. For example:
f = (*) -- Monomorphic
g :: Num a => a -> a
g x = f x x
Here, we get a complaint when checking the type signature for g,
that g isn't polymorphic enough; but then we get another one when
dealing with the (Num a) context arising from f's definition;
we try to unify a with Int (to default it), but find that it's
already been unified with the rigid variable from g's type sig.
Note [Multi-parameter defaults]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With -XExtendedDefaultRules, we default only based on single-variable
constraints, but do not exclude from defaulting any type variables which also
appear in multi-variable constraints. This means that the following will
default properly:
default (Integer, Double)
class A b (c :: Symbol) where
a :: b -> Proxy c
instance A Integer c where a _ = Proxy
main = print (a 5 :: Proxy "5")
Note that if we change the above instance ("instance A Integer") to
"instance A Double", we get an error:
No instance for (A Integer "5")
This is because the first defaulted type (Integer) has successfully satisfied
its single-parameter constraints (in this case Num).
-}
| sdiehl/ghc | compiler/typecheck/TcSimplify.hs | bsd-3-clause | 119,249 | 6 | 19 | 32,759 | 12,025 | 6,284 | 5,741 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Utils where
import Data.Char
import Data.Set
nubOrd :: forall a . Ord a => [a] -> [a]
nubOrd = inner empty
where
inner :: Set a -> [a] -> [a]
inner acc (a : r)
| a `member` acc = inner acc r
| otherwise = a : inner (insert a acc) r
inner _ [] = []
errorNyi :: String -> a
errorNyi message = error $ stripSpaces $ unlines $
"Encountered a language construct that is" :
"not yet implemented. Please consider opening a bug report about" :
"this here: https://github.com/soenkehahn/dead-code-detection/issues" :
"" :
"Here's some debugging output that will probably help to solve this problem:" :
message :
[]
stripSpaces :: String -> String
stripSpaces =
reverse . dropWhile isSpace .
reverse . dropWhile isSpace
mapLeft :: (a -> b) -> Either a c -> Either b c
mapLeft f = \ case
Left a -> Left $ f a
Right x -> Right x
| soenkehahn/dead-code-detection | src/Utils.hs | bsd-3-clause | 963 | 0 | 13 | 241 | 304 | 155 | 149 | 29 | 2 |
module CmmMachOp
( MachOp(..)
, pprMachOp, isCommutableMachOp, isAssociativeMachOp
, isComparisonMachOp, machOpResultType
, machOpArgReps, maybeInvertComparison
-- MachOp builders
, mo_wordAdd, mo_wordSub, mo_wordEq, mo_wordNe,mo_wordMul, mo_wordSQuot
, mo_wordSRem, mo_wordSNeg, mo_wordUQuot, mo_wordURem
, mo_wordSGe, mo_wordSLe, mo_wordSGt, mo_wordSLt, mo_wordUGe
, mo_wordULe, mo_wordUGt, mo_wordULt
, mo_wordAnd, mo_wordOr, mo_wordXor, mo_wordNot, mo_wordShl, mo_wordSShr, mo_wordUShr
, mo_u_8To32, mo_s_8To32, mo_u_16To32, mo_s_16To32
, mo_u_8ToWord, mo_s_8ToWord, mo_u_16ToWord, mo_s_16ToWord, mo_u_32ToWord, mo_s_32ToWord
, mo_32To8, mo_32To16, mo_WordTo8, mo_WordTo16, mo_WordTo32, mo_WordTo64
-- CallishMachOp
, CallishMachOp(..), callishMachOpHints
, pprCallishMachOp
)
where
#include "HsVersions.h"
import CmmType
import Outputable
import DynFlags
-----------------------------------------------------------------------------
-- MachOp
-----------------------------------------------------------------------------
{- |
Machine-level primops; ones which we can reasonably delegate to the
native code generators to handle.
Most operations are parameterised by the 'Width' that they operate on.
Some operations have separate signed and unsigned versions, and float
and integer versions.
-}
data MachOp
-- Integer operations (insensitive to signed/unsigned)
= MO_Add Width
| MO_Sub Width
| MO_Eq Width
| MO_Ne Width
| MO_Mul Width -- low word of multiply
-- Signed multiply/divide
| MO_S_MulMayOflo Width -- nonzero if signed multiply overflows
| MO_S_Quot Width -- signed / (same semantics as IntQuotOp)
| MO_S_Rem Width -- signed % (same semantics as IntRemOp)
| MO_S_Neg Width -- unary -
-- Unsigned multiply/divide
| MO_U_MulMayOflo Width -- nonzero if unsigned multiply overflows
| MO_U_Quot Width -- unsigned / (same semantics as WordQuotOp)
| MO_U_Rem Width -- unsigned % (same semantics as WordRemOp)
-- Signed comparisons
| MO_S_Ge Width
| MO_S_Le Width
| MO_S_Gt Width
| MO_S_Lt Width
-- Unsigned comparisons
| MO_U_Ge Width
| MO_U_Le Width
| MO_U_Gt Width
| MO_U_Lt Width
-- Floating point arithmetic
| MO_F_Add Width
| MO_F_Sub Width
| MO_F_Neg Width -- unary -
| MO_F_Mul Width
| MO_F_Quot Width
-- Floating point comparison
| MO_F_Eq Width
| MO_F_Ne Width
| MO_F_Ge Width
| MO_F_Le Width
| MO_F_Gt Width
| MO_F_Lt Width
-- Bitwise operations. Not all of these may be supported
-- at all sizes, and only integral Widths are valid.
| MO_And Width
| MO_Or Width
| MO_Xor Width
| MO_Not Width
| MO_Shl Width
| MO_U_Shr Width -- unsigned shift right
| MO_S_Shr Width -- signed shift right
-- Conversions. Some of these will be NOPs.
-- Floating-point conversions use the signed variant.
| MO_SF_Conv Width Width -- Signed int -> Float
| MO_FS_Conv Width Width -- Float -> Signed int
| MO_SS_Conv Width Width -- Signed int -> Signed int
| MO_UU_Conv Width Width -- unsigned int -> unsigned int
| MO_FF_Conv Width Width -- Float -> Float
-- Vector element insertion and extraction operations
| MO_V_Insert Length Width -- Insert scalar into vector
| MO_V_Extract Length Width -- Extract scalar from vector
-- Integer vector operations
| MO_V_Add Length Width
| MO_V_Sub Length Width
| MO_V_Mul Length Width
-- Signed vector multiply/divide
| MO_VS_Quot Length Width
| MO_VS_Rem Length Width
| MO_VS_Neg Length Width
-- Unsigned vector multiply/divide
| MO_VU_Quot Length Width
| MO_VU_Rem Length Width
-- Floting point vector element insertion and extraction operations
| MO_VF_Insert Length Width -- Insert scalar into vector
| MO_VF_Extract Length Width -- Extract scalar from vector
-- Floating point vector operations
| MO_VF_Add Length Width
| MO_VF_Sub Length Width
| MO_VF_Neg Length Width -- unary -
| MO_VF_Mul Length Width
| MO_VF_Quot Length Width
deriving (Eq, Show)
pprMachOp :: MachOp -> SDoc
pprMachOp mo = text (show mo)
-- -----------------------------------------------------------------------------
-- Some common MachReps
-- A 'wordRep' is a machine word on the target architecture
-- Specifically, it is the size of an Int#, Word#, Addr#
-- and the unit of allocation on the stack and the heap
-- Any pointer is also guaranteed to be a wordRep.
mo_wordAdd, mo_wordSub, mo_wordEq, mo_wordNe,mo_wordMul, mo_wordSQuot
, mo_wordSRem, mo_wordSNeg, mo_wordUQuot, mo_wordURem
, mo_wordSGe, mo_wordSLe, mo_wordSGt, mo_wordSLt, mo_wordUGe
, mo_wordULe, mo_wordUGt, mo_wordULt
, mo_wordAnd, mo_wordOr, mo_wordXor, mo_wordNot, mo_wordShl, mo_wordSShr, mo_wordUShr
, mo_u_8ToWord, mo_s_8ToWord, mo_u_16ToWord, mo_s_16ToWord, mo_u_32ToWord, mo_s_32ToWord
, mo_WordTo8, mo_WordTo16, mo_WordTo32, mo_WordTo64
:: DynFlags -> MachOp
mo_u_8To32, mo_s_8To32, mo_u_16To32, mo_s_16To32
, mo_32To8, mo_32To16
:: MachOp
mo_wordAdd dflags = MO_Add (wordWidth dflags)
mo_wordSub dflags = MO_Sub (wordWidth dflags)
mo_wordEq dflags = MO_Eq (wordWidth dflags)
mo_wordNe dflags = MO_Ne (wordWidth dflags)
mo_wordMul dflags = MO_Mul (wordWidth dflags)
mo_wordSQuot dflags = MO_S_Quot (wordWidth dflags)
mo_wordSRem dflags = MO_S_Rem (wordWidth dflags)
mo_wordSNeg dflags = MO_S_Neg (wordWidth dflags)
mo_wordUQuot dflags = MO_U_Quot (wordWidth dflags)
mo_wordURem dflags = MO_U_Rem (wordWidth dflags)
mo_wordSGe dflags = MO_S_Ge (wordWidth dflags)
mo_wordSLe dflags = MO_S_Le (wordWidth dflags)
mo_wordSGt dflags = MO_S_Gt (wordWidth dflags)
mo_wordSLt dflags = MO_S_Lt (wordWidth dflags)
mo_wordUGe dflags = MO_U_Ge (wordWidth dflags)
mo_wordULe dflags = MO_U_Le (wordWidth dflags)
mo_wordUGt dflags = MO_U_Gt (wordWidth dflags)
mo_wordULt dflags = MO_U_Lt (wordWidth dflags)
mo_wordAnd dflags = MO_And (wordWidth dflags)
mo_wordOr dflags = MO_Or (wordWidth dflags)
mo_wordXor dflags = MO_Xor (wordWidth dflags)
mo_wordNot dflags = MO_Not (wordWidth dflags)
mo_wordShl dflags = MO_Shl (wordWidth dflags)
mo_wordSShr dflags = MO_S_Shr (wordWidth dflags)
mo_wordUShr dflags = MO_U_Shr (wordWidth dflags)
mo_u_8To32 = MO_UU_Conv W8 W32
mo_s_8To32 = MO_SS_Conv W8 W32
mo_u_16To32 = MO_UU_Conv W16 W32
mo_s_16To32 = MO_SS_Conv W16 W32
mo_u_8ToWord dflags = MO_UU_Conv W8 (wordWidth dflags)
mo_s_8ToWord dflags = MO_SS_Conv W8 (wordWidth dflags)
mo_u_16ToWord dflags = MO_UU_Conv W16 (wordWidth dflags)
mo_s_16ToWord dflags = MO_SS_Conv W16 (wordWidth dflags)
mo_s_32ToWord dflags = MO_SS_Conv W32 (wordWidth dflags)
mo_u_32ToWord dflags = MO_UU_Conv W32 (wordWidth dflags)
mo_WordTo8 dflags = MO_UU_Conv (wordWidth dflags) W8
mo_WordTo16 dflags = MO_UU_Conv (wordWidth dflags) W16
mo_WordTo32 dflags = MO_UU_Conv (wordWidth dflags) W32
mo_WordTo64 dflags = MO_UU_Conv (wordWidth dflags) W64
mo_32To8 = MO_UU_Conv W32 W8
mo_32To16 = MO_UU_Conv W32 W16
-- ----------------------------------------------------------------------------
-- isCommutableMachOp
{- |
Returns 'True' if the MachOp has commutable arguments. This is used
in the platform-independent Cmm optimisations.
If in doubt, return 'False'. This generates worse code on the
native routes, but is otherwise harmless.
-}
isCommutableMachOp :: MachOp -> Bool
isCommutableMachOp mop =
case mop of
MO_Add _ -> True
MO_Eq _ -> True
MO_Ne _ -> True
MO_Mul _ -> True
MO_S_MulMayOflo _ -> True
MO_U_MulMayOflo _ -> True
MO_And _ -> True
MO_Or _ -> True
MO_Xor _ -> True
MO_F_Add _ -> True
MO_F_Mul _ -> True
_other -> False
-- ----------------------------------------------------------------------------
-- isAssociativeMachOp
{- |
Returns 'True' if the MachOp is associative (i.e. @(x+y)+z == x+(y+z)@)
This is used in the platform-independent Cmm optimisations.
If in doubt, return 'False'. This generates worse code on the
native routes, but is otherwise harmless.
-}
isAssociativeMachOp :: MachOp -> Bool
isAssociativeMachOp mop =
case mop of
MO_Add {} -> True -- NB: does not include
MO_Mul {} -> True -- floatint point!
MO_And {} -> True
MO_Or {} -> True
MO_Xor {} -> True
_other -> False
-- ----------------------------------------------------------------------------
-- isComparisonMachOp
{- |
Returns 'True' if the MachOp is a comparison.
If in doubt, return False. This generates worse code on the
native routes, but is otherwise harmless.
-}
isComparisonMachOp :: MachOp -> Bool
isComparisonMachOp mop =
case mop of
MO_Eq _ -> True
MO_Ne _ -> True
MO_S_Ge _ -> True
MO_S_Le _ -> True
MO_S_Gt _ -> True
MO_S_Lt _ -> True
MO_U_Ge _ -> True
MO_U_Le _ -> True
MO_U_Gt _ -> True
MO_U_Lt _ -> True
MO_F_Eq {} -> True
MO_F_Ne {} -> True
MO_F_Ge {} -> True
MO_F_Le {} -> True
MO_F_Gt {} -> True
MO_F_Lt {} -> True
_other -> False
-- -----------------------------------------------------------------------------
-- Inverting conditions
-- Sometimes it's useful to be able to invert the sense of a
-- condition. Not all conditional tests are invertible: in
-- particular, floating point conditionals cannot be inverted, because
-- there exist floating-point values which return False for both senses
-- of a condition (eg. !(NaN > NaN) && !(NaN /<= NaN)).
maybeInvertComparison :: MachOp -> Maybe MachOp
maybeInvertComparison op
= case op of -- None of these Just cases include floating point
MO_Eq r -> Just (MO_Ne r)
MO_Ne r -> Just (MO_Eq r)
MO_U_Lt r -> Just (MO_U_Ge r)
MO_U_Gt r -> Just (MO_U_Le r)
MO_U_Le r -> Just (MO_U_Gt r)
MO_U_Ge r -> Just (MO_U_Lt r)
MO_S_Lt r -> Just (MO_S_Ge r)
MO_S_Gt r -> Just (MO_S_Le r)
MO_S_Le r -> Just (MO_S_Gt r)
MO_S_Ge r -> Just (MO_S_Lt r)
_other -> Nothing
-- ----------------------------------------------------------------------------
-- machOpResultType
{- |
Returns the MachRep of the result of a MachOp.
-}
machOpResultType :: DynFlags -> MachOp -> [CmmType] -> CmmType
machOpResultType dflags mop tys =
case mop of
MO_Add {} -> ty1 -- Preserve GC-ptr-hood
MO_Sub {} -> ty1 -- of first arg
MO_Mul r -> cmmBits r
MO_S_MulMayOflo r -> cmmBits r
MO_S_Quot r -> cmmBits r
MO_S_Rem r -> cmmBits r
MO_S_Neg r -> cmmBits r
MO_U_MulMayOflo r -> cmmBits r
MO_U_Quot r -> cmmBits r
MO_U_Rem r -> cmmBits r
MO_Eq {} -> comparisonResultRep dflags
MO_Ne {} -> comparisonResultRep dflags
MO_S_Ge {} -> comparisonResultRep dflags
MO_S_Le {} -> comparisonResultRep dflags
MO_S_Gt {} -> comparisonResultRep dflags
MO_S_Lt {} -> comparisonResultRep dflags
MO_U_Ge {} -> comparisonResultRep dflags
MO_U_Le {} -> comparisonResultRep dflags
MO_U_Gt {} -> comparisonResultRep dflags
MO_U_Lt {} -> comparisonResultRep dflags
MO_F_Add r -> cmmFloat r
MO_F_Sub r -> cmmFloat r
MO_F_Mul r -> cmmFloat r
MO_F_Quot r -> cmmFloat r
MO_F_Neg r -> cmmFloat r
MO_F_Eq {} -> comparisonResultRep dflags
MO_F_Ne {} -> comparisonResultRep dflags
MO_F_Ge {} -> comparisonResultRep dflags
MO_F_Le {} -> comparisonResultRep dflags
MO_F_Gt {} -> comparisonResultRep dflags
MO_F_Lt {} -> comparisonResultRep dflags
MO_And {} -> ty1 -- Used for pointer masking
MO_Or {} -> ty1
MO_Xor {} -> ty1
MO_Not r -> cmmBits r
MO_Shl r -> cmmBits r
MO_U_Shr r -> cmmBits r
MO_S_Shr r -> cmmBits r
MO_SS_Conv _ to -> cmmBits to
MO_UU_Conv _ to -> cmmBits to
MO_FS_Conv _ to -> cmmBits to
MO_SF_Conv _ to -> cmmFloat to
MO_FF_Conv _ to -> cmmFloat to
MO_V_Insert l w -> cmmVec l (cmmBits w)
MO_V_Extract _ w -> cmmBits w
MO_V_Add l w -> cmmVec l (cmmBits w)
MO_V_Sub l w -> cmmVec l (cmmBits w)
MO_V_Mul l w -> cmmVec l (cmmBits w)
MO_VS_Quot l w -> cmmVec l (cmmBits w)
MO_VS_Rem l w -> cmmVec l (cmmBits w)
MO_VS_Neg l w -> cmmVec l (cmmBits w)
MO_VU_Quot l w -> cmmVec l (cmmBits w)
MO_VU_Rem l w -> cmmVec l (cmmBits w)
MO_VF_Insert l w -> cmmVec l (cmmFloat w)
MO_VF_Extract _ w -> cmmFloat w
MO_VF_Add l w -> cmmVec l (cmmFloat w)
MO_VF_Sub l w -> cmmVec l (cmmFloat w)
MO_VF_Mul l w -> cmmVec l (cmmFloat w)
MO_VF_Quot l w -> cmmVec l (cmmFloat w)
MO_VF_Neg l w -> cmmVec l (cmmFloat w)
where
(ty1:_) = tys
comparisonResultRep :: DynFlags -> CmmType
comparisonResultRep = bWord -- is it?
-- -----------------------------------------------------------------------------
-- machOpArgReps
-- | This function is used for debugging only: we can check whether an
-- application of a MachOp is "type-correct" by checking that the MachReps of
-- its arguments are the same as the MachOp expects. This is used when
-- linting a CmmExpr.
machOpArgReps :: DynFlags -> MachOp -> [Width]
machOpArgReps dflags op =
case op of
MO_Add r -> [r,r]
MO_Sub r -> [r,r]
MO_Eq r -> [r,r]
MO_Ne r -> [r,r]
MO_Mul r -> [r,r]
MO_S_MulMayOflo r -> [r,r]
MO_S_Quot r -> [r,r]
MO_S_Rem r -> [r,r]
MO_S_Neg r -> [r]
MO_U_MulMayOflo r -> [r,r]
MO_U_Quot r -> [r,r]
MO_U_Rem r -> [r,r]
MO_S_Ge r -> [r,r]
MO_S_Le r -> [r,r]
MO_S_Gt r -> [r,r]
MO_S_Lt r -> [r,r]
MO_U_Ge r -> [r,r]
MO_U_Le r -> [r,r]
MO_U_Gt r -> [r,r]
MO_U_Lt r -> [r,r]
MO_F_Add r -> [r,r]
MO_F_Sub r -> [r,r]
MO_F_Mul r -> [r,r]
MO_F_Quot r -> [r,r]
MO_F_Neg r -> [r]
MO_F_Eq r -> [r,r]
MO_F_Ne r -> [r,r]
MO_F_Ge r -> [r,r]
MO_F_Le r -> [r,r]
MO_F_Gt r -> [r,r]
MO_F_Lt r -> [r,r]
MO_And r -> [r,r]
MO_Or r -> [r,r]
MO_Xor r -> [r,r]
MO_Not r -> [r]
MO_Shl r -> [r, wordWidth dflags]
MO_U_Shr r -> [r, wordWidth dflags]
MO_S_Shr r -> [r, wordWidth dflags]
MO_SS_Conv from _ -> [from]
MO_UU_Conv from _ -> [from]
MO_SF_Conv from _ -> [from]
MO_FS_Conv from _ -> [from]
MO_FF_Conv from _ -> [from]
MO_V_Insert l r -> [typeWidth (vec l (cmmBits r)),r,wordWidth dflags]
MO_V_Extract l r -> [typeWidth (vec l (cmmBits r)),wordWidth dflags]
MO_V_Add _ r -> [r,r]
MO_V_Sub _ r -> [r,r]
MO_V_Mul _ r -> [r,r]
MO_VS_Quot _ r -> [r,r]
MO_VS_Rem _ r -> [r,r]
MO_VS_Neg _ r -> [r]
MO_VU_Quot _ r -> [r,r]
MO_VU_Rem _ r -> [r,r]
MO_VF_Insert l r -> [typeWidth (vec l (cmmFloat r)),r,wordWidth dflags]
MO_VF_Extract l r -> [typeWidth (vec l (cmmFloat r)),wordWidth dflags]
MO_VF_Add _ r -> [r,r]
MO_VF_Sub _ r -> [r,r]
MO_VF_Mul _ r -> [r,r]
MO_VF_Quot _ r -> [r,r]
MO_VF_Neg _ r -> [r]
-----------------------------------------------------------------------------
-- CallishMachOp
-----------------------------------------------------------------------------
-- CallishMachOps tend to be implemented by foreign calls in some backends,
-- so we separate them out. In Cmm, these can only occur in a
-- statement position, in contrast to an ordinary MachOp which can occur
-- anywhere in an expression.
data CallishMachOp
= MO_F64_Pwr
| MO_F64_Sin
| MO_F64_Cos
| MO_F64_Tan
| MO_F64_Sinh
| MO_F64_Cosh
| MO_F64_Tanh
| MO_F64_Asin
| MO_F64_Acos
| MO_F64_Atan
| MO_F64_Log
| MO_F64_Exp
| MO_F64_Sqrt
| MO_F32_Pwr
| MO_F32_Sin
| MO_F32_Cos
| MO_F32_Tan
| MO_F32_Sinh
| MO_F32_Cosh
| MO_F32_Tanh
| MO_F32_Asin
| MO_F32_Acos
| MO_F32_Atan
| MO_F32_Log
| MO_F32_Exp
| MO_F32_Sqrt
| MO_UF_Conv Width
| MO_S_QuotRem Width
| MO_U_QuotRem Width
| MO_U_QuotRem2 Width
| MO_Add2 Width
| MO_U_Mul2 Width
| MO_WriteBarrier
| MO_Touch -- Keep variables live (when using interior pointers)
-- Prefetch
| MO_Prefetch_Data -- Prefetch hint. May change program performance but not
-- program behavior.
-- Note that these three MachOps all take 1 extra parameter than the
-- standard C lib versions. The extra (last) parameter contains
-- alignment of the pointers. Used for optimisation in backends.
| MO_Memcpy
| MO_Memset
| MO_Memmove
| MO_PopCnt Width
| MO_BSwap Width
deriving (Eq, Show)
pprCallishMachOp :: CallishMachOp -> SDoc
pprCallishMachOp mo = text (show mo)
callishMachOpHints :: CallishMachOp -> ([ForeignHint], [ForeignHint])
callishMachOpHints op = case op of
MO_Memcpy -> ([], [AddrHint,AddrHint,NoHint,NoHint])
MO_Memset -> ([], [AddrHint,NoHint,NoHint,NoHint])
MO_Memmove -> ([], [AddrHint,AddrHint,NoHint,NoHint])
_ -> ([],[])
-- empty lists indicate NoHint
| ekmett/ghc | compiler/cmm/CmmMachOp.hs | bsd-3-clause | 18,306 | 0 | 13 | 5,199 | 4,317 | 2,282 | 2,035 | 371 | 60 |
{-# LANGUAGE GADTs, Arrows #-}
module Grammar
-- ( grammar, NT (Decls) )
where
import qualified Data.Text as T
import Control.Applicative
import Data.Type.Equality
import Data.Type.Eq
import Data.Type.Show
import Language.Forvie.Parsing.Grammar
import qualified Token as T
import Display
--------------------------------------------------------------------------------
data NT a b where
Decls :: NT () File
Decl :: NT () Declaration
Term :: NT () Term
Iden :: NT () Ident
Cons :: NT () Constructor
instance Eq3 NT where
Decls ==== Decls = Just (Refl, Refl)
Decl ==== Decl = Just (Refl, Refl)
Term ==== Term = Just (Refl, Refl)
Iden ==== Iden = Just (Refl, Refl)
Cons ==== Cons = Just (Refl, Refl)
_ ==== _ = Nothing
instance Show3 NT where
show3 Decls = "Decls"
show3 Decl = "Decl"
show3 Term = "Term"
show3 Iden = "Iden"
show3 Cons = "Cons"
--------------------------------------------------------------------------------
decls :: [v Declaration] -> RHS NT T.Token v (AST v File)
decls d = RHS [ Accept (File (reverse d))
, WfCall False (Call Decl () 0) $ \v ->
RHS [ WfToken T.Semicolon $ \_ -> decls (v:d) ]
]
grammar :: Grammar AST NT T.Token
grammar Decls = \_ _ -> decls []
-- (File <$> list (call Decl <* terminal T.Semicolon))
grammar Decl = \_ _ ->
RHS [ assume, typedeclOrDef, datatype ]
where
assume = WfToken T.Assume $ \_ ->
RHS [ WfCall False (Call Iden () 0) $ \i ->
RHS [ WfToken T.Colon $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \t ->
RHS [ Accept (Assumption i t) ] ] ] ]
typedeclOrDef = WfCall False (Call Iden () 0) $ \i ->
RHS [ WfToken T.Colon $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \t ->
RHS [ Accept (TypeDecl i t) ] ]
, WfToken T.Equals $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \t ->
RHS [ Accept (Definition i [] t) ] ]
, WfCall False (Call Iden () 0) $ \a -> def i [a]
]
def i a = RHS [ WfToken T.Equals $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \t ->
RHS [ Accept (Definition i [] t) ] ]
, WfCall False (Call Iden () 0) $ \b -> def i (b:a)
]
datatype = WfToken T.Data $ \_ ->
Datatype <$> call Iden
<*> list ((,) <$ terminal T.LParen <*> call Iden <* terminal T.Colon <*> (callTop Term) <* terminal T.RParen)
<* terminal T.Colon <* terminal T.Set <* terminal T.ColonEquals <*> list (call Cons)
-- noPrec
-- (Assumption <$ terminal T.Assume <*> call Iden <* terminal T.Colon <*> (callTop Term)
-- <|> TypeDecl <$> call Iden <* terminal T.Colon <*> (callTop Term)
-- <|> Definition <$> call Iden <*> list (call Iden) <* terminal T.Equals <*> (callTop Term)
-- <|> Datatype <$ terminal T.Data
-- <*> call Iden
-- <*> list ((,) <$ terminal T.LParen <*> call Iden <* terminal T.Colon <*> (callTop Term) <* terminal T.RParen)
-- <* terminal T.Colon <* terminal T.Set <* terminal T.ColonEquals <*> list (call Cons))
grammar Cons = noPrec
(Constr <$ terminal T.Pipe <*> call Iden <* terminal T.Colon <*> list (callAt Term 0))
grammar Iden = \_ _ -> RHS [ WfToken T.Ident $ \t -> RHS [ Accept (Identifier t) ] ]
-- noPrec
-- (Identifier <$> terminal T.Ident)
-- Idea: when we have a call to 'Term (PL 9)', the predictor ought to
-- spark off calls to everything below that (assuming they haven't
-- been called already). Change atLevel to be strict.
-- Idea is to simulate the effect of having a fallthrough case in the
-- grammar, but without marking the return values. So:
-- - When calling 'Term (PL 9)' the expander:
-- - calls 'Term (PL 9)' as normal
-- - generates a call to 'Term (PL 8)'
-- - generates a special item that awaits the response from the call
-- to 'Term (PL 8)', upon completion of this item with a variable
-- of type 'v Term', the variable is passed straight up to the
-- caller of 'Term (PL 9)'
-- normally, when the call to 'Term (PL 8)' completes, it will return
-- back up to callers of 'Term (PL 8)'. Want it to return to callers
-- of any precedence level above '8' (up to ten). Could do this by
-- fiddling the 'findCalls' function.
-- When doing a completion, we should let 'Term (PL 4)' complete
-- something that requires 'Term (PL 5)'.
grammar Term =
\l () ->
case l of
4 -> term4
3 -> term3
2 -> term2
1 -> term1
0 -> term0
term4 :: RHS NT T.Token v (AST v Term)
term4 = RHS [ WfToken T.Lambda $ \_ -> RHS [ WfCall False (Call Iden () 0) $ \v -> lambda [v] ]
, WfToken T.LParen $ \_ -> RHS [ WfCall False (Call Iden () 0) $ \v -> piOrSigma [v] ]
, WfCall False (Call Term () 3) $ \v1 ->
RHS [ WfToken T.Arrow $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \v2 ->
RHS [ Accept (Arr v1 v2) ] ]]
]
where
lambda nms = RHS [ WfToken T.FullStop $ \_ -> RHS [ WfCall False (Call Term () 4) $ \v -> RHS [ Accept (Lam (reverse nms) v) ] ]
, WfCall False (Call Iden () 0) $ \v -> lambda (v:nms)
]
piOrSigma nms = RHS [ WfToken T.Colon $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \vt ->
RHS [ WfToken T.RParen $ \_ ->
RHS [ WfToken T.Arrow $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \vt' ->
RHS [ Accept (Pi (reverse nms) vt vt') ] ]
, WfToken T.Times $ \_ ->
RHS [ WfCall False (Call Term () 4) $ \vt' ->
RHS [ Accept (Sigma (reverse nms) vt vt') ] ]
]
]
]
, WfCall False (Call Iden () 0) $ \v -> piOrSigma (v:nms)
]
-- ((Lam <$ terminal T.Lambda <*> nonEmptyList (call Iden) <* terminal T.FullStop <*> callAt Term 4)
-- <|> (Pi
-- <$ terminal T.LParen
-- <*> nonEmptyList (call Iden)
-- <* terminal T.Colon
-- <*> callAt Term 4
-- <* terminal T.RParen
-- <* terminal T.Arrow
-- <*> callAt Term 4)
-- <|> (Sigma
-- <$ terminal T.LParen
-- <*> nonEmptyList (call Iden)
-- <* terminal T.Colon
-- <*> callAt Term 4
-- <* terminal T.RParen
-- <* terminal T.Times
-- <*> callAt Term 4)
-- <|> (Arr <$> callAt Term 3 <* terminal T.Arrow <*> callAt Term 4))
term3 :: RHS NT T.Token v (AST v Term)
term3 = RHS [ WfCall False (Call Term () 2) $ \v ->
RHS [ WfToken T.Plus $ \_ -> RHS [ WfCall False (Call Term () 3) $ \v' -> RHS [ Accept (Sum v v') ] ]
, WfToken T.QuotePlus $ \_ -> RHS [ WfCall False (Call Term () 3) $ \v' -> RHS [ Accept (Desc_Sum v v') ] ]
]
]
-- ((Sum <$> callAt Term 2 <* terminal T.Plus <*> callAt Term 3)
-- <|> (Desc_Sum <$> callAt Term 2 <* terminal T.QuotePlus <*> callAt Term 3))
term2 :: RHS NT T.Token v (AST v Term)
term2 = RHS [ WfCall False (Call Term () 1) $ \v ->
RHS [ WfToken T.Times $ \_ -> RHS [ WfCall False (Call Term () 2) $ \v' -> RHS [ Accept (Prod v v') ] ]
, WfToken T.QuoteTimes $ \_ -> RHS [ WfCall False (Call Term () 2) $ \v' -> RHS [ Accept (Desc_Prod v v') ] ]
]
]
-- ((Prod <$> callAt Term 1 <* terminal T.Times <*> callAt Term 2)
-- <|> (Desc_Prod <$> callAt Term 1 <* terminal T.QuoteTimes <*> callAt Term 2))
term1 :: RHS NT T.Token v (AST v Term)
term1 = RHS [ WfToken T.Inl $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (Inl v) ] ]
, WfToken T.Inr $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (Inr v) ] ]
, WfToken T.QuoteK $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (Desc_K v) ] ]
, WfToken T.Mu $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (Mu v) ] ]
, WfToken T.Construct $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (Construct v) ] ]
, WfToken T.Quote_IId $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v -> RHS [ Accept (IDesc_Id v) ] ]
, WfToken T.Quote_Sg $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v1 -> RHS [ WfCall False (Call Term () 0) $ \v2 -> RHS [ Accept (IDesc_Sg v1 v2) ] ] ]
, WfToken T.Quote_Pi $ \_ -> RHS [ WfCall False (Call Term () 0) $ \v1 -> RHS [ WfCall False (Call Term () 0) $ \v2 -> RHS [ Accept (IDesc_Pi v1 v2) ] ] ]
, WfCall False (Call Term () 0) $ \x ->
RHS [ WfCall False (Call Term () 0) $ \y -> app x [y] ]
]
where
app x ys = RHS [ Accept (App x (reverse ys))
, WfCall False (Call Term () 0) $ \y -> app x (y:ys)
]
-- (Inl <$ terminal T.Inl <*> callAt Term 0)
-- <|> (Inr <$ terminal T.Inr <*> callAt Term 0)
-- <|> (Desc_K <$ terminal T.QuoteK <*> callAt Term 0)
-- <|> (Mu <$ terminal T.Mu <*> callAt Term 0)
-- <|> (Construct <$ terminal T.Construct <*> callAt Term 0)
-- <|> (IDesc_Id <$ terminal T.Quote_IId <*> callAt Term 0)
-- <|> (IDesc_Sg <$ terminal T.Quote_Sg <*> callAt Term 0 <*> callAt Term 0)
-- <|> (IDesc_Pi <$ terminal T.Quote_Pi <*> callAt Term 0 <*> callAt Term 0)
-- <|> (App <$> callAt Term 0 <*> nonEmptyList (callAt Term 0))
term0 :: RHS NT T.Token v (AST v Term)
term0 = (Proj1 <$ terminal T.Fst <*> callAt Term 0
<|> (Proj2 <$ terminal T.Snd <*> callAt Term 0)
<|> (MuI <$ terminal T.MuI <*> callAt Term 0 <*> callAt Term 0)
<|> (Induction <$ terminal T.Induction)
<|> (Desc_Elim <$ terminal T.ElimD)
<|> (UnitI <$ terminal T.UnitValue)
<|> (Pair <$ terminal T.LDoubleAngle <*> (callTop Term) <* terminal T.Comma <*> (callTop Term) <* terminal T.RDoubleAngle)
<|> (Case
<$ terminal T.Case
<*> (callTop Term)
<* terminal T.For <*> call Iden <* terminal T.FullStop <*> (callTop Term) <* terminal T.With
<* terminal T.LBrace
<* terminal T.Inl <*> call Iden <* terminal T.FullStop <*> (callTop Term)
<* terminal T.Semicolon
<* terminal T.Inr <*> call Iden <* terminal T.FullStop <*> (callTop Term)
<* terminal T.RBrace)
<|> (Set <$ terminal T.Set <*> (pure 0 <|> (read . T.unpack <$> terminal T.Number)))
<|> (Empty <$ terminal T.EmptyType)
<|> (ElimEmpty <$ terminal T.ElimEmpty)
<|> (Unit <$ terminal T.UnitType)
<|> (Desc_Id <$ terminal T.QuoteId)
<|> (Desc <$ terminal T.Desc)
<|> (IDesc <$ terminal T.IDesc)
<|> (IDesc_Elim <$ terminal T.IDesc_Elim)
<|> (Var <$> call Iden)
<|> (Paren <$ terminal T.LParen <*> (callTop Term) <* terminal T.RParen)) | bobatkey/Forvie | benchmarks/parse-foveran/Grammar.hs | bsd-3-clause | 12,399 | 0 | 37 | 4,848 | 3,632 | 1,849 | 1,783 | 136 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Network.CommSec.KeyExchange
-- import Network.CommSec
import qualified Data.ByteString as B
import Data.ByteString.Char8 ()
import Crypto.PubKey.OpenSsh
import Crypto.Types.PubKey.RSA
import Control.Concurrent
import System.FilePath
port :: PortNumber
port = 1874
host = "127.0.0.1"
readSSHKeys :: FilePath -> IO (PublicKey, PrivateKey)
readSSHKeys fp = do
OpenSshPublicKeyRsa pub _ <- (either error id . decodePublic) `fmap` B.readFile (fp <.> "pub")
OpenSshPrivateKeyRsa priv <- (either error id . (\x -> decodePrivate x)) `fmap` B.readFile fp
return (pub,priv)
main = do
(pubA,privA) <- readSSHKeys "id_rsa"
(pubB,privB) <- readSSHKeys "id_rsa2"
{-print pubB
print pubA
print privB
print privA
-}
forkIO $ listener privA pubB
threadDelay 1000000
connecter privB pubA
listener priv pub = do
conn <- snd `fmap` accept port [pub] priv Nothing
recv conn >>= print
send conn "Hello to you too!"
return ()
connecter priv pub = do
conn <- snd `fmap` connect host port [pub] priv
send conn "Hello!"
recv conn >>= print
return ()
| TomMD/commsec-keyexchange | Test/test.hs | bsd-3-clause | 1,175 | 0 | 13 | 248 | 377 | 194 | 183 | 33 | 1 |
-- | Calculates the new simulation step taking into account changes made to the
-- MultiCoreStatus that reflect user input.
module Controller.Helpers.NextSimState where
-- External imports
import Control.Monad
import Hails.MVC.Model.ProtectedModel.Reactive
import SoOSiM (tick)
import SoOSiM.Types (SimState)
-- Internal imports
import Data.History
import Graphics.Diagrams.Transformations.SimState2MultiCoreStatus
import Model.SystemStatus
-- Local imports
import CombinedEnvironment
import Model.Model
modelUpdateNextStep :: CEnv -> IO ()
modelUpdateNextStep cenv = do
st <- getter statusField (model cenv)
when (isActiveState st) $
let f = if st == Running then tick else tick
in modelUpdateNextStepWith cenv f
where isActiveState Running = True
isActiveState SlowRunning = True
isActiveState _ = False
modelUpdateNextStepWith :: CEnv -> (SimState -> IO SimState) -> IO ()
modelUpdateNextStepWith cenv nextStepCalc =
modifierIO simStateField (model cenv) $ maybeM $ \state -> do
(a', b') <- nextStepWith nextStepCalc (simGLSystemStatus state, simGLSimState state)
return $ state { simGLSystemStatus = a'
, simGLSimState = b'
}
-- newSelection :: SystemStatus -> SystemStatus
-- newSelection ss
-- | [] <- selection ss
-- = ss
-- | [nn,cn] <- selection ss
-- , isJust (findRunningElement (nn,cn) (present (multiCoreStatus ss')))
-- | otherwise = ss
-- FIXME: To be moved to monad extra (or Control.Monad.IfElse)
maybeM :: Monad m => (a -> m b) -> Maybe a -> m (Maybe b)
maybeM f v = maybe (return Nothing) (liftM Just . f) v
-- | Moves to a future step if available, otherwise executes one simulation
-- step and updates the multi-core status
nextStepWith :: (SimState -> IO SimState) -> (SystemStatus, SimState) -> IO (SystemStatus, SimState)
nextStepWith f (sys, ss) =
case future history of
[] -> nextStepWith' f (sys,ss)
_ -> return (sys { multiCoreStatus = historyNext history}, ss)
where history = multiCoreStatus sys
nextStepWith' :: (SimState -> IO SimState) -> (SystemStatus, SimState) -> IO (SystemStatus, SimState)
nextStepWith' f (sys,ss) = do
let mcs = present history
ns <- f ss
mcs' <- updateFromSimState mcs ns
let sys' = sys { multiCoreStatus = historyBranch history mcs' }
return (sys',ns)
where history = multiCoreStatus sys
| ivanperez-keera/SoOSiM-ui | src/Controller/Helpers/NextSimState.hs | bsd-3-clause | 2,382 | 0 | 13 | 469 | 624 | 331 | 293 | 41 | 4 |
module Language.Css.Decl where
import Language.Css.Selectors
import Language.Css.Properties
data CssRule = CssRule
{ rule :: CssProperty
, isImportant :: Bool
} deriving (Show, Eq)
data CssDecl = CssDecl
{ declSelector :: CssCombinator
, declRules :: [CssRule]
} deriving (Show, Eq)
| athanclark/css-grammar | src/Language/Css/Decl.hs | bsd-3-clause | 302 | 0 | 9 | 58 | 86 | 53 | 33 | 11 | 0 |
{--import qualified Data.Map as Map
import Control.Monad.State (runState)
import System (getArgs)
import Text.XML.HaXml
import Text.XML.HaXml.Pretty
import GenNew
import GenXml
import ParseXml
main :: IO ()
main = do --interact (render . document . genXml . fst . (\f -> runState (genNewIds f) (0,Map.empty)) . parseXml)
args <- getArgs
input <- readFile (args !! 0)
let out = render . document . genXml . fst . (\f -> runState (genNewIds f) (0,Map.empty)) $ parseXml input
writeFile (args !! 1) out
--}
-- Accepts file uploads and saves the files in the given directory.
-- WARNING: this script is a SECURITY RISK and only for
-- demo purposes. Do not put it on a public web server.
import qualified Data.Map as Map
import Control.Monad.State (runState)
import System (getArgs)
import Text.XML.HaXml (render)
import Text.XML.HaXml.Pretty
import GenNew
import GenXml
import ParseXml
import Network.CGI
import Text.XHtml
import qualified Data.ByteString.Lazy as BS
import Control.Monad (liftM)
import Data.Maybe (fromJust)
fileForm = form ! [method "post", enctype "multipart/form-data"]
<< [afile "file", submit "" "Upload"]
saveFile :: (MonadCGI m) => m (Maybe String)
saveFile = do
let converter = render . document . genXml . fst . (\f -> runState (genNewIds f) (0,Map.empty)) . parseXml
cont <- liftM (liftM converter) $ getInput "file"
return $ cont
page t b = header << thetitle << t +++ body << b
basename = reverse . takeWhile (`notElem` "/\\") . reverse
cgiMain =
do ret <- saveFile
case ret of
Nothing -> do
output . renderHtml $ page "Upload example" fileForm
Just h -> do
setHeader "Content-type" "text/xml"
output h
main = runCGI $ handleErrors cgiMain
| molysgaard/OsmXmlTool | OsmXmlTool.hs | bsd-3-clause | 1,831 | 0 | 16 | 429 | 375 | 202 | 173 | 31 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
module OIS.OISPrereqs(
component_with_void,
component_with,
button_with_void,
button_with,
axis_with,
vector3_with_void,
vector3_with,
component_delete,
button_delete,
axis_delete,
vector3_delete,
component_new_void,
component_new,
button_new_void,
button_new,
axis_new,
axis_clear,
vector3_new_void,
vector3_new,
vector3_clear
)
where
import OIS.Types
import Control.Monad
import Foreign
import Foreign.C.String
import Foreign.C.Types
component_with_void :: (Component -> IO a) -> IO a
component_with_void f = do
obj <- component_new_void
res <- f obj
component_delete obj
return res
component_with :: ComponentType -> (Component -> IO a) -> IO a
component_with p1 f = do
obj <- component_new p1
res <- f obj
component_delete obj
return res
button_with_void :: (Button -> IO a) -> IO a
button_with_void f = do
obj <- button_new_void
res <- f obj
button_delete obj
return res
button_with :: Bool -> (Button -> IO a) -> IO a
button_with p1 f = do
obj <- button_new p1
res <- f obj
button_delete obj
return res
axis_with :: (Axis -> IO a) -> IO a
axis_with f = do
obj <- axis_new
res <- f obj
axis_delete obj
return res
vector3_with_void :: (Vector3 -> IO a) -> IO a
vector3_with_void f = do
obj <- vector3_new_void
res <- f obj
vector3_delete obj
return res
vector3_with :: Float -> Float -> Float -> (Vector3 -> IO a) -> IO a
vector3_with p1 p2 p3 f = do
obj <- vector3_new p1 p2 p3
res <- f obj
vector3_delete obj
return res
foreign import ccall "OISPrereqs.h OIS_Component_delete" c_component_delete :: Component -> IO ()
component_delete :: Component -> IO ()
component_delete p1 = c_component_delete p1
foreign import ccall "OISPrereqs.h OIS_Button_delete" c_button_delete :: Button -> IO ()
button_delete :: Button -> IO ()
button_delete p1 = c_button_delete p1
foreign import ccall "OISPrereqs.h OIS_Axis_delete" c_axis_delete :: Axis -> IO ()
axis_delete :: Axis -> IO ()
axis_delete p1 = c_axis_delete p1
foreign import ccall "OISPrereqs.h OIS_Vector3_delete" c_vector3_delete :: Vector3 -> IO ()
vector3_delete :: Vector3 -> IO ()
vector3_delete p1 = c_vector3_delete p1
foreign import ccall "OISPrereqs.h OIS_Component_new_void" c_component_new_void :: IO Component
component_new_void :: IO Component
component_new_void = c_component_new_void
foreign import ccall "OISPrereqs.h OIS_Component_new" c_component_new :: CInt -> IO Component
component_new :: ComponentType -> IO Component
component_new p1 = c_component_new (componentTypeToCInt p1)
foreign import ccall "OISPrereqs.h OIS_Button_new_void" c_button_new_void :: IO Button
button_new_void :: IO Button
button_new_void = c_button_new_void
foreign import ccall "OISPrereqs.h OIS_Button_new" c_button_new :: CBool -> IO Button
button_new :: Bool -> IO Button
button_new p1 = c_button_new (fromBool p1)
foreign import ccall "OISPrereqs.h OIS_Axis_new" c_axis_new :: IO Axis
axis_new :: IO Axis
axis_new = c_axis_new
foreign import ccall "OISPrereqs.h OIS_Axis_clear" c_axis_clear :: Axis -> IO ()
axis_clear :: Axis -> IO ()
axis_clear p1 = c_axis_clear p1
foreign import ccall "OISPrereqs.h OIS_Vector3_new_void" c_vector3_new_void :: IO Vector3
vector3_new_void :: IO Vector3
vector3_new_void = c_vector3_new_void
foreign import ccall "OISPrereqs.h OIS_Vector3_new" c_vector3_new :: CFloat -> CFloat -> CFloat -> IO Vector3
vector3_new :: Float -> Float -> Float -> IO Vector3
vector3_new p1 p2 p3 = c_vector3_new (realToFrac p1) (realToFrac p2) (realToFrac p3)
foreign import ccall "OISPrereqs.h OIS_Vector3_clear" c_vector3_clear :: Vector3 -> IO ()
vector3_clear :: Vector3 -> IO ()
vector3_clear p1 = c_vector3_clear p1
| ghorn/hois | OIS/OISPrereqs.hs | bsd-3-clause | 3,829 | 0 | 11 | 702 | 1,118 | 552 | 566 | 108 | 1 |
module Language.GroteTrap.Show (lshow, format) where
import Data.Maybe
import Data.List
import Data.Generics hiding (Prefix)
import Language.GroteTrap.Language
import Language.GroteTrap.Parser
lshow lang = lshow' lang maxBound
lshow' :: Data a => Language a -> Int -> a -> String
lshow' lang contextPrio val =
if isJust (variable lang) && con == toConstr (fromJust (variable lang) undefined)
then fromJust $ gfindtype val
else if isJust (number lang) && con == toConstr (fromJust (number lang) undefined)
then show $ (fromJust $ gfindtype val :: Int)
else fromJust $ lookup con [ (toConstr $ opCon op, lshow'Op lang contextPrio op val) | op <- operators lang ]
where
con = toConstr val
opCon :: Operator a -> a
opCon (Unary { opSem1 = o }) = o undefined
opCon (Binary { opSem2 = o }) = o undefined undefined
opCon (Assoc { opSemN = o }) = o undefined
gchildren :: (Data a, Typeable b) => a -> [b]
gchildren v = catMaybes $ gmapQ (Nothing `mkQ` Just) v
lshow'Op :: Data a => Language a -> Int -> Operator a -> a -> String
lshow'Op lang contextPrio op val = par $ case (op, gchildren val) of
(Unary _ Prefix prio tok, [c]) ->
tok ++ sh prio c
(Unary _ Postfix prio tok, [c]) ->
sh prio c ++ tok
(Binary _ _ prio tok, [lhs, rhs]) ->
sh prio lhs ++ " " ++ tok ++ " " ++ sh prio rhs
(Assoc _ prio tok, _) ->
concat $ intersperse (" " ++ tok ++ " ") $ map (sh prio) (head (gchildren val))
_ ->
error "unexpected number of children"
where
sh = lshow' lang
par s
| opPrio op >= contextPrio = "(" ++ s ++ ")"
| otherwise = s
-- | Formats a sentence according to a language.
format :: Data a => Language a -> String -> String
format lang = lshow lang . readExpression lang
| MedeaMelana/GroteTrap | Language/GroteTrap/Show.hs | bsd-3-clause | 1,775 | 0 | 14 | 431 | 746 | 380 | 366 | 39 | 5 |
{-# LANGUAGE TemplateHaskell, TupleSections #-}
module Ask (askTestGroup) where
import Test.Tasty
import Test.Tasty.HUnit
import Utility
import Language.Logo
import Control.Monad.Trans.Class (lift)
import Control.Monad (liftM4)
import Prelude hiding (show)
globals ["glob1"]
turtles_own ["tvar"]
breeds ["frogs", "frog"]
breeds ["mice", "mouse"]
breeds_own "frogs" []
breeds_own "mice" []
run [] -- workaround for tests
askTestGroup =
[testCase "case_AskRNG_2D" $ runT $ do
ca
random_seed 0 -- not needed, because observer is initialized anyway with seed=0
ask (sprout 1) =<< one_of =<< patches
ask (sprout 1) =<< one_of =<< patches
ask (sprout 1) =<< one_of =<< patches
ask (sprout 1) =<< one_of =<< patches
a1 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 0
let e1 = (14,13,95,224)
lift $ e1 @=? a1
a2 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 1
let e2 = (11,16,115,144)
lift $ e2 @=? a2
a3 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 2
let e3 = (8,8,75,62)
lift $ e3 @=? a3
a4 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 3
let e4 = (-6,-1,5,58)
lift $ e4 @=? a4
-- case_AskRNG_2D_Nof = runT $ do
-- atomic $ random_seed 0 -- not needed, because observer is initialized anyway with seed=0
-- ca
-- -- this is not the same as 4 times one_of! (above), because we delete successive draws from the agentset (so as not to return duplicates)
-- ask (atomic $ sprout 1) =<< atomic (n_of 4 =<< patches)
-- let e1 = (14,13,95,224)
-- let e2 = (-12,9,75,287)
-- let e3 = (-16,-4,5,275)
-- let e4 = (9,13,135,150)
-- a1 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 0
-- a2 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 1
-- a3 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 2
-- a4 <- of_ (atomic $ liftM4 (,,,) xcor ycor color heading) =<< turtle 3
-- -- we have to do this because turtle-n ends up in different patches, since patches run in parallel
-- -- so we cannot associate a n-who of turtle to its attributes
-- lift $ assertBool "wrong attributes of turtles" $ null ([a1,a2,a3,a4]\\[e1,e2,e3,e4])
,testCase "case_RecursiveCallInsideAsk1" $ let
go1 = do
crt 1
go2 5
crt 1
go2 :: Int -> C a b IO () -- sig. needed because of monomorphism restriction?
go2 x =
ask (do
g <- glob1
atomic $ set_glob1 (g + 1)
when (x > 0) (go2 (x - 1))
) =<< turtle 0
in
runT $ do
ca
atomic $ set_glob1 0
go1
a1 <- count =<< turtles
let e1 = 2
lift $ e1 @=? a1
a2 <- glob1
let e2 = 6
lift $ e2 @=? a2
,testCase "case_RecursiveCallInsideAsk2" $ let
go1 = do
crt 1
go2
crt 1
go2 :: C a b IO () -- sig. needed because of monomorphism restriction?
go2 = ask (do
g <- glob1
atomic $ set_glob1 (g + 1)
r <- random (10 :: Int)
when (r > 0) go2) -- recurses until it reaches random=0
=<< turtle 0
in runT $ do
ca
atomic $ set_glob1 0 -- not needed, because untyped (double) globals are initialized anyway to 0
random_seed 0 -- not needed, because observer is initialized anyway with seed=0
go1
a1 <- count =<< turtles
let e1 = 2
lift $ e1 @=? a1
a2 <- glob1
let e2 = 10
lift $ e2 @=? a2
,testCase "case_RecursionOverAsk" $ let
explore :: C Turtle a IO () -- sig. needed because of monomorphism restriction?
explore = do
t <- tvar
when (t == 0) (do
atomic $ set_tvar 1
ns <- atomic $ neighbors
ask explore =<< turtles_on ns)
in runT $ do
ca
ask (sprout 1) =<< patches
ask explore =<< (one_of =<< turtles)
a1 <- anyp =<< with (liftM (== 0) tvar) =<< turtles
let e1 = False
lift $ e1 @=? a1
,testCase "case_AskInsideReporterProcedure" $ let
foo = do
ask (atomic $ set_glob1 =<< liftM fromIntegral who) =<< turtle 1
return 10
in runT $ do
crt 2
a1 <- of_ foo =<< turtle 0
let e1 = 10
lift $ e1 @=? a1
a2 <- glob1
let e2 = 1
lift $ e2 @=? a2
,testCase "case_AskAllTurtles" $ runT $ do
crt 1
let a1 = ask (ask (atomic die) =<< turtles) =<< (one_of =<< patches)
--assertContextException (lift . evaluate =<< a1)
let a2 = ask (ask (atomic die) =<< turtles) =<< (one_of =<< turtles)
--assertContextException (lift . evaluate =<< a2)
lift $ assertFailure "HLogo does not have the ask limitation (Only the observer can ASK the set of all turtles or patches)"
,testCase "case_AskAllPatches" $ runT $ do
crt 1
let a1 = ask (ask (sprout 1) =<< patches) =<< (one_of =<< patches)
--assertContextException (lift . evaluate =<< a1)
let a2 = ask (ask (sprout 1) =<< patches) =<< one_of =<< turtles
-- assertContextException (lift . evaluate =<< a2)
lift $ assertFailure "HLogo does not have the ask limitation (Only the observer can ASK the set of all turtles or patches)"
,testCase "case_AskObserverBlock" $ runT $ do
reset_ticks
atomic $ set_glob1 0
crt 10
ask (do
wait 0.01
t <- ticks
when (t == 1) $ atomic $ set_glob1 1) =<< turtles
tick
wait 0.5
a1 <- glob1
let e1 = 0
lift $ e1 @=? a1
,testCase "case_AskNobody" $ runT $ do
crt 2
assertTypeException $ ask (do
ask (atomic die) =<< turtle 1
ask (show =<< self) =<< turtle 1 -- this should raise an exception to the parent since the agentref is nullified
) =<< turtle 0
,testCase "case_OfDie" $ runT $ do
crt 2
assertSomeException $ of_ (error "mplo" >> atomic die) =<< turtles
]
| bezirg/hlogo | tests/Ask.hs | bsd-3-clause | 6,734 | 0 | 20 | 2,563 | 1,752 | 837 | 915 | 140 | 1 |
module Main where
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import Frame
import Init
import MonadFrame
import MonadVulkan
import Render
import SDL ( showWindow
, time
)
import Swapchain ( threwSwapchainError )
import Utils
import Window
main :: IO ()
main = runResourceT $ do
--
-- Initialization
--
withSDL
win <- createWindow "Vulkan ⚡ Haskell" 1280 720
inst <- Init.createInstance win
(phys, pdi, dev, qs, surf) <- Init.createDevice inst win
vma <- createVMA inst phys dev
--
-- Go
--
start <- SDL.time @Double
let reportFPS f = do
end <- SDL.time
let frames = fIndex f
mean = realToFrac frames / (end - start)
liftIO $ putStrLn $ "Average: " <> show mean
let rtInfo = pdiRTInfo pdi
let frame f = do
shouldQuit NoLimit >>= \case
True -> do
reportFPS f
pure Nothing
False -> Just <$> do
needsNewSwapchain <- threwSwapchainError (runFrame f renderFrame)
advanceFrame needsNewSwapchain f
runV inst phys rtInfo dev qs vma $ do
initial <- initialFrame win surf
showWindow win
loopJust frame initial
| expipiplus1/vulkan | examples/rays/Main.hs | bsd-3-clause | 1,530 | 0 | 23 | 673 | 375 | 184 | 191 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
module Data.Object.Parse where
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.Hashable
-- import Control.Comonad.Cofree
import Control.StructParser
import Data.Object.Types
import Data.ConstIndex
----------------------------------------------------------------------
-- Nodes
withObject
:: (GetId s) =>
(HashMap k (AnnotatedObject k s) -> Parser a)
-> AnnotatedObject k s
-> Parser a
withObject = withNode (QObject "Object") $ \cont err obj ->
case obj of
Object pairs -> cont pairs
_ -> err
withArray
:: (GetId s) =>
([AnnotatedObject k s] -> Parser a)
-> AnnotatedObject k s
-> Parser a
withArray = withNode (QObject "Array") $ \cont err obj ->
case obj of
Array elems -> cont elems
_ -> err
withScalar
:: (GetId s) =>
Qualifier
-> (s -> Parser a)
-> AnnotatedObject k s
-> Parser a
withScalar expectation = withNode expectation $ \cont err obj ->
case obj of
Scalar s -> cont s
_ -> err
----------------------------------------------------------------------
-- Collections
withFields
:: (FieldKey k)
=> (k -> AnnotatedObject k s -> Parser a)
-> HashMap k (AnnotatedObject k s)
-> Parser (HashMap k a)
withFields = withLeaves HM.mapWithKey
withElems
:: (AnnotatedObject k s -> Parser a)
-> [AnnotatedObject k s]
-> Parser [a]
withElems p vs =
fmap runConstIndex $
withLeaves (fmap . ($ ())) (const p) (ConstIndex vs)
----------------------------------------------------------------------
-- Elements
withField
:: (FieldKey k, Hashable k)
=> k
-> (AnnotatedObject k s -> Parser a)
-> HashMap k (AnnotatedObject k s)
-> Parser a
withField = withLookup $ \cont err k hm ->
maybe (err (fieldQualifier k)) cont (HM.lookup k hm)
withElem
:: Int
-> (AnnotatedObject k s -> Parser a)
-> [AnnotatedObject k s]
-> Parser a
withElem = withIndex $ \cont err n ls ->
if n < length ls then cont (ls !! n)
else err
| esmolanka/struct-parse | Data/Object/Parse.hs | bsd-3-clause | 2,146 | 0 | 11 | 451 | 714 | 369 | 345 | 68 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Sonos.XML where
import Text.XML
import Text.XML.Cursor
import Sonos.Types
import Data.Maybe ( fromJust)
import Data.String ( fromString
, IsString
)
import Formatting ( stext
, (%)
, sformat
)
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
data BrowseContainer = BrowseDefault T.Text
| BrowseSpecified T.Text
lookupWrapper :: BrowseContainer
-> T.Text
lookupWrapper (BrowseSpecified c) = c
lookupWrapper (BrowseDefault k) = fromJust
$ M.lookup k
$ M.fromList [ ("A:ARTIST", "container")
, ("A:ALBUM", "container")
, ("A:TRACKS", "item")
, ("0", "container")
, ("FV:2", "item")
, ("FV:3", "item")
]
browsedContent :: BrowseContainer
-> BSL.ByteString
-> (Int, Int, [(T.Text, DBData)])
browsedContent typeKey body =
let cursor = fromDocument $ parseLBS_ def body
wrapper = lookupWrapper typeKey
[root] = cursor $/ laxElement "Body"
[resultO] = root $/ laxElement "BrowseResponse"
[numReturned] = resultO $/ laxElement "NumberReturned"
&// content
[totalMatches] = resultO $/ laxElement "TotalMatches"
&// content
[result] = resultO $/ laxElement "Result"
result' = (T.concat $ result $/ content)
resultCursor = fromDocument $ parseLBS_ def $ BSL.fromStrict $ TE.encodeUtf8 result'
things = resultCursor $/ laxElement wrapper
res = map transform things
transform elem =
let title = case elem $/ laxElement "title" &// content of
[] -> ""
t -> head t
link = case elem $/ laxElement "res" &// content of
[] -> ""
l -> head l
resMD = case elem $/ laxElement "resMD" &// content of
[] -> ""
m -> head m
in (title, DBData title link resMD)
in (read $ T.unpack numReturned, read $ T.unpack totalMatches, res)
getUUID :: BSL.ByteString
-> T.Text
getUUID body =
let cursor = fromDocument $ parseLBS_ def body
ns = "{urn:schemas-upnp-org:device-1-0}"
in T.drop 5 $ head $ cursor $/ element (fromString $ ns ++ "device")
&/ element (fromString $ ns ++ "UDN")
&// content
getQueueData :: BSL.ByteString
-> SonosQueueData
getQueueData body =
let cursor = fromDocument $ parseLBS_ def body
[elem] = cursor $/ laxElement "Body"
&/ laxElement "AddURIToQueueResponse"
tread = read . T.unpack
queueLength = case elem $/ element "NewQueueLength" &// content of
[] -> 0
[ql] -> tread ql
numTracksAdded = case elem $/ element "NumTracksAdded" &// content of
[] -> 0
[nta] -> tread nta
firstTrackNumberEnq = case elem $/ element "FirstTrackNumberEnqueued" &// content of
[] -> 0
[ftn] -> tread ftn
in SonosQueueData {
sqdNewQueueLength = queueLength
, sqdNumTracksAdded = numTracksAdded
, sqdFirstTrackNumberEnqueued = firstTrackNumberEnq
, sqdFirstTrackOfNewQueue = (queueLength - numTracksAdded) + 1
}
getTrackNum :: BSL.ByteString
-> Int
getTrackNum body =
let cursor = fromDocument $ parseLBS_ def body
[elem] = cursor $/ laxElement "Body"
&/ laxElement "AddURIToQueueResponse"
tread = read . T.unpack
queueLength = case elem $/ element "NewQueueLength" &// content of
[] -> 0
[ql] -> tread ql
numTracksAdded = case elem $/ element "NumTracksAdded" &// content of
[] -> 0
[nta] -> tread nta
in (queueLength - numTracksAdded) + 1
| merc1031/haskell-sonos-http-api | src/Sonos/XML.hs | bsd-3-clause | 4,664 | 0 | 16 | 1,979 | 1,112 | 589 | 523 | 100 | 4 |
module Graphics.DrawingCombinators.Utils (
Image, square,
textHeight, textSize,
textLinesWidth, textLinesHeight, textLinesSize,
drawText, drawTextLines, backgroundColor) where
import Control.Monad(void)
import Data.List(genericLength)
import Data.Monoid(Monoid(..))
import Data.Vector.Vector2(Vector2(..))
import Graphics.DrawingCombinators((%%))
import qualified Graphics.DrawingCombinators as Draw
type Image = Draw.Image ()
square :: Image
square = void $ Draw.convexPoly [ (0, 0), (1, 0), (1, 1), (0, 1) ]
textHeight :: Draw.R
textHeight = 2
textSize :: Draw.Font -> String -> Vector2 Draw.R
textSize font str = Vector2 (Draw.textWidth font str) textHeight
drawText :: Draw.Font -> String -> Image
drawText font =
-- We want to reverse it so that higher y is down, and it is also
-- moved to 0..2
(Draw.scale 1 (-1) %%) .
-- Text is normally at height -1.5..0.5. We move it to be -2..0
(Draw.translate (0, -1.5) %%) .
void . Draw.text font
textLinesHeight :: [String] -> Draw.R
textLinesHeight = (textHeight *) . genericLength
textLinesWidth :: Draw.Font -> [String] -> Draw.R
textLinesWidth font = maximum . map (Draw.textWidth font)
textLinesSize :: Draw.Font -> [String] -> Vector2 Draw.R
textLinesSize font textLines = Vector2 (textLinesWidth font textLines) (textLinesHeight textLines)
drawTextLines :: Draw.Font -> [String] -> Image
drawTextLines font =
foldr (step . drawText font) mempty
where
step lineImage restImage =
mappend lineImage $
Draw.translate (0, textHeight) %% restImage
backgroundColor :: Draw.Color -> Vector2 Draw.R -> Image -> Image
backgroundColor color (Vector2 width height) image =
mappend image $
Draw.tint color $ Draw.scale width height %% square
| sinelaw/lamdu | bottlelib/Graphics/DrawingCombinators/Utils.hs | gpl-3.0 | 1,740 | 0 | 11 | 289 | 577 | 319 | 258 | 39 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.AttachNetworkInterface
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Attaches a network interface to an instance.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AttachNetworkInterface.html AWS API Reference> for AttachNetworkInterface.
module Network.AWS.EC2.AttachNetworkInterface
(
-- * Creating a Request
attachNetworkInterface
, AttachNetworkInterface
-- * Request Lenses
, aniDryRun
, aniNetworkInterfaceId
, aniInstanceId
, aniDeviceIndex
-- * Destructuring the Response
, attachNetworkInterfaceResponse
, AttachNetworkInterfaceResponse
-- * Response Lenses
, anirsAttachmentId
, anirsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'attachNetworkInterface' smart constructor.
data AttachNetworkInterface = AttachNetworkInterface'
{ _aniDryRun :: !(Maybe Bool)
, _aniNetworkInterfaceId :: !Text
, _aniInstanceId :: !Text
, _aniDeviceIndex :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttachNetworkInterface' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aniDryRun'
--
-- * 'aniNetworkInterfaceId'
--
-- * 'aniInstanceId'
--
-- * 'aniDeviceIndex'
attachNetworkInterface
:: Text -- ^ 'aniNetworkInterfaceId'
-> Text -- ^ 'aniInstanceId'
-> Int -- ^ 'aniDeviceIndex'
-> AttachNetworkInterface
attachNetworkInterface pNetworkInterfaceId_ pInstanceId_ pDeviceIndex_ =
AttachNetworkInterface'
{ _aniDryRun = Nothing
, _aniNetworkInterfaceId = pNetworkInterfaceId_
, _aniInstanceId = pInstanceId_
, _aniDeviceIndex = pDeviceIndex_
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
aniDryRun :: Lens' AttachNetworkInterface (Maybe Bool)
aniDryRun = lens _aniDryRun (\ s a -> s{_aniDryRun = a});
-- | The ID of the network interface.
aniNetworkInterfaceId :: Lens' AttachNetworkInterface Text
aniNetworkInterfaceId = lens _aniNetworkInterfaceId (\ s a -> s{_aniNetworkInterfaceId = a});
-- | The ID of the instance.
aniInstanceId :: Lens' AttachNetworkInterface Text
aniInstanceId = lens _aniInstanceId (\ s a -> s{_aniInstanceId = a});
-- | The index of the device for the network interface attachment.
aniDeviceIndex :: Lens' AttachNetworkInterface Int
aniDeviceIndex = lens _aniDeviceIndex (\ s a -> s{_aniDeviceIndex = a});
instance AWSRequest AttachNetworkInterface where
type Rs AttachNetworkInterface =
AttachNetworkInterfaceResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
AttachNetworkInterfaceResponse' <$>
(x .@? "attachmentId") <*> (pure (fromEnum s)))
instance ToHeaders AttachNetworkInterface where
toHeaders = const mempty
instance ToPath AttachNetworkInterface where
toPath = const "/"
instance ToQuery AttachNetworkInterface where
toQuery AttachNetworkInterface'{..}
= mconcat
["Action" =:
("AttachNetworkInterface" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"DryRun" =: _aniDryRun,
"NetworkInterfaceId" =: _aniNetworkInterfaceId,
"InstanceId" =: _aniInstanceId,
"DeviceIndex" =: _aniDeviceIndex]
-- | /See:/ 'attachNetworkInterfaceResponse' smart constructor.
data AttachNetworkInterfaceResponse = AttachNetworkInterfaceResponse'
{ _anirsAttachmentId :: !(Maybe Text)
, _anirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttachNetworkInterfaceResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'anirsAttachmentId'
--
-- * 'anirsResponseStatus'
attachNetworkInterfaceResponse
:: Int -- ^ 'anirsResponseStatus'
-> AttachNetworkInterfaceResponse
attachNetworkInterfaceResponse pResponseStatus_ =
AttachNetworkInterfaceResponse'
{ _anirsAttachmentId = Nothing
, _anirsResponseStatus = pResponseStatus_
}
-- | The ID of the network interface attachment.
anirsAttachmentId :: Lens' AttachNetworkInterfaceResponse (Maybe Text)
anirsAttachmentId = lens _anirsAttachmentId (\ s a -> s{_anirsAttachmentId = a});
-- | The response status code.
anirsResponseStatus :: Lens' AttachNetworkInterfaceResponse Int
anirsResponseStatus = lens _anirsResponseStatus (\ s a -> s{_anirsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/AttachNetworkInterface.hs | mpl-2.0 | 5,602 | 0 | 13 | 1,131 | 771 | 461 | 310 | 100 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- http://members.shaw.ca/el.supremo/MagickWand/3dlogo.htm
-- Better 3-D Logo Generation example
-- http://www.imagemagick.org/Usage/advanced/#3d-logos-2
import Graphics.ImageMagick.MagickWand
main :: IO ()
main = do
withMagickWandGenesis $ do
localGenesis $ do
{-
convert -size 170x100 xc:black \
-fill white -draw 'circle 50,50 13,50' \
-draw 'circle 120,50 157,50' \
-draw 'rectangle 50,13 120,87' \
-fill black -draw 'circle 50,50 25,50' \
-draw 'circle 120,50 145,50' \
-draw 'rectangle 50,25 120,75' \
-fill white -draw 'circle 60,50 40,50' \
-draw 'circle 110,50 130,50' \
-draw 'rectangle 60,30 110,70' \
-gaussian 1x1 +matte logo_mask.png
-}
(_,mw) <- magickWand
pw <- pixelWand
(_,dw) <- drawingWand
setSize mw 170 100
mw `readImage` "xc:black"
pw `setColor` "white"
dw `setFillColor` pw
drawCircle dw 50 50 13 50
drawCircle dw 120 50 157 50
drawRectangle dw 50 13 120 87
pw `setColor` "black"
dw `setFillColor` pw
drawCircle dw 50 50 25 50
drawCircle dw 50 50 25 50
drawCircle dw 120 50 145 50
drawRectangle dw 50 25 120 75
pw `setColor` "white"
dw `setFillColor` pw
drawCircle dw 60 50 40 50
drawCircle dw 110 50 130 50
drawRectangle dw 60 30 110 70
-- Now we draw the Drawing wand on to the Magick Wand
mw `drawImage` dw
gaussianBlurImage mw 1 1
-- Turn the matte of == +matte
mw `setImageMatte` False
mw `writeImage` (Just "logo_mask.png")
localGenesis $ do
(_,mw) <- magickWand
(_,mwc) <- magickWand
pw <- pixelWand
(_,dw) <- drawingWand
{-
convert ant_mask.png -fill red -draw 'color 0,0 reset' \
ant_mask.png +matte -compose CopyOpacity -composite \
-font Candice -pointsize 36 -fill white -stroke black \
-gravity Center -annotate 0 "Ant" \
ant.png
-}
mw `readImage` "logo_mask.png"
pw `setColor` "red"
dw `setFillColor` pw
drawColor dw 0 0 resetMethod
mw `drawImage` dw
mwc `readImage` "logo_mask.png"
mwc `setImageMatte` False
compositeImage mw mwc copyOpacityCompositeOp 0 0
-- Annotate gets all the font information from the drawingwand
-- but draws the text on the magickwand
-- I haven't got the Candice font so I'll use a pretty one
-- that I know I have
dw `setFont` "Lucida-Handwriting-Italic"
dw `setFontSize` 36
pw `setColor` "white"
dw `setFillColor` pw
pw `setColor` "black"
dw `setStrokeColor` pw
dw `setGravity` centerGravity
annotateImage mw dw 0 0 0 "Ant"
mw `writeImage` (Just "logo_ant.png")
{-
convert ant.png -fx A +matte -blur 0x6 -shade 110x30 -normalize \
ant.png -compose Overlay -composite \
ant.png -matte -compose Dst_In -composite \
ant_3D.png
-}
localGenesis $ do
(_,mw) <- magickWand
mw `readImage` "logo_ant.png"
(_,mwf) <- fxImage mw "A"
-- MagickSetImageMatte(mw,MagickFalse);
-- +matte is the same as -alpha off
-- mwf `setImageAlphaChannel` deactivateAlphaChannel
blurImage mwf 0 6
shadeImage mwf True 110 30
normalizeImage mwf
-- ant.png -compose Overlay -composite
(_, mwc) <- magickWand
mwc `readImage` "logo_ant.png"
compositeImage mwf mwc overlayCompositeOp 0 0
-- ant.png -matte -compose Dst_In -composite
(_,mwc') <- magickWand
mwc' `readImage` "logo_ant.png"
-- -matte is the same as -alpha on
-- I don't understand why the -matte in the command line
-- does NOT operate on the image just read in (logo_ant.png in mwc)
-- but on the image before it in the list
-- It would appear that the -matte affects each wand currently in the
-- command list because applying it to both wands gives the same result
-- setImageAlphaChannel mwf setAlphaChannel
-- setImageAlphaChannel mwc setAlphaChannel
compositeImage mwf mwc' dstInCompositeOp 0 0
writeImage mwf (Just "logo_ant_3D.png")
{- Now for the shadow
convert ant_3D.png \( +clone -background navy -shadow 80x4+6+6 \) +swap \
-background none -layers merge +repage ant_3D_shadowed.png
-}
localGenesis $ do
pw <- pixelWand
(_,mw) <- magickWand
readImage mw "logo_ant_3D.png"
(_,mwc) <- cloneMagickWand mw
pw `setColor` "navy"
mwc `setImageBackgroundColor` pw
shadowImage mwc 80 4 6 6
-- at this point
-- mw = ant_3D.png
-- mwc = +clone -background navy -shadow 80x4+6+6
-- To do the +swap I create a new blank MagickWand and then
-- put mwc and mw into it. ImageMagick probably doesn't do it
-- this way but it works here and that's good enough for me!
(_,mwf) <- magickWand
mwf `addImage` mwc
mwf `addImage` mw
pw `setColor` "none"
setImageBackgroundColor mwf pw
(_,mwc') <- mergeImageLayers mwf mergeLayer
mwc' `writeImage` (Just "logo_shadow_3D.png")
{-
and now for the fancy background
convert ant_3D_shadowed.png \
\( +clone +repage +matte -fx 'rand()' -shade 120x30 \
-fill grey70 -colorize 60 \
-fill lavender -tint 100 \) -insert 0 \
-flatten ant_3D_bg.jpg
-}
localGenesis $ do
pw <- pixelWand
(_,mw) <- magickWand
mw `readImage` "logo_shadow_3D.png"
(_,mwc) <- cloneMagickWand mw
-- +repage
resetImagePage mwc Nothing
-- +matte is the same as -alpha off
-- setImageAlphaChannel mwc deactivateAlphaChannel
(_, mwf) <- fxImage mwc "rand()"
shadeImage mwf True 120 30
setColor pw "grey70"
-- It seems that this must be a separate pixelwand for Colorize to work!
pwo <- pixelWand
-- AHA .. this is how to do a 60% colorize
pwo `setColor` "rgb(60%,60%,60%)"
colorizeImage mwf pw pwo
pw `setColor` "lavender"
-- and this is a 100% tint
pwo `setColor` "rgb(100%,100%,100%)"
tintImage mwf pw pwo
(_, mwc') <- magickWand
mwc' `addImage` mwf
mwc' `addImage` mwc
(_, mwf') <- mergeImageLayers mwc flattenLayer
mwf' `writeImage` (Just "logo_bg_3D.jpg")
| flowbox-public/imagemagick | examples/3dlogo.hs | apache-2.0 | 7,006 | 0 | 15 | 2,441 | 1,133 | 579 | 554 | 102 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Language.Embedded.Signature where
import Data.Proxy
import Language.C.Monad
import Language.Embedded.Expression
import Language.Embedded.Backend.C.Expression
import Language.C.Quote.C
-- * Language
-- | Signature annotations
data Ann exp a where
Empty :: Ann exp a
Native :: (FreePred exp a) => exp len -> Ann exp [a]
Named :: String -> Ann exp a
-- | Signatures
data Signature exp pred a where
Ret :: pred a => String -> exp a -> Signature exp pred a
Ptr :: pred a => String -> exp a -> Signature exp pred a
Lam :: pred a => Ann exp a -> (Val a -> Signature exp pred b)
-> Signature exp pred (a -> b)
-- * Combinators
lam :: (pred a, FreeExp exp, FreePred exp a)
=> (exp a -> Signature exp pred b) -> Signature exp pred (a -> b)
lam f = Lam Empty $ \x -> f (valToExp x)
name :: (pred a, FreeExp exp, FreePred exp a)
=> String -> (exp a -> Signature exp pred b) -> Signature exp pred (a -> b)
name s f = Lam (Named s) $ \x -> f (valToExp x)
ret,ptr :: (pred a)
=> String -> exp a -> Signature exp pred a
ret = Ret
ptr = Ptr
arg :: (pred a, FreeExp exp, FreePred exp a)
=> Ann exp a
-> (exp a -> exp b)
-> (exp b -> Signature exp pred c)
-> Signature exp pred (a -> c)
arg s g f = Lam s $ \x -> f $ g $ valToExp x
-- * Compilation
-- | Compile a function @Signature@ to C code
translateFunction :: forall m exp a. (MonadC m, CompExp exp)
=> Signature exp CType a -> m ()
translateFunction sig = go sig (return ())
where
go :: Signature exp CType d -> m () -> m ()
go (Ret n a) prelude = do
t <- cType a
inFunctionTy t n $ do
prelude
e <- compExp a
addStm [cstm| return $e; |]
go (Ptr n a) prelude = do
t <- cType a
inFunction n $ do
prelude
e <- compExp a
addParam [cparam| $ty:t *out |]
addStm [cstm| *out = $e; |]
go fun@(Lam Empty f) prelude = do
t <- cType (argProxy fun)
v <- freshVar (Proxy :: Proxy CType)
go (f v) $ prelude >> addParam [cparam| $ty:t $id:v |]
go fun@(Lam n@(Native l) f) prelude = do
t <- cType n
i <- freshId
let vi = 'v' : show i
let w = ValComp vi
let n = vi ++ "_buf"
withAlias i ('&':vi) $ go (f w) $ do
prelude
len <- compExp l
addLocal [cdecl| struct array $id:vi = { .buffer = $id:n
, .length=$len
, .elemSize=sizeof($ty:t)
, .bytes=sizeof($ty:t)*$len
}; |]
addParam [cparam| $ty:t * $id:n |]
go fun@(Lam (Named s) f) prelude = do
t <- cType (argProxy fun)
i <- freshId
let w = ValComp ('v' : show i)
withAlias i s $ go (f w) $ prelude >> addParam [cparam| $ty:t $id:s |]
argProxy :: Signature exp pred (b -> c) -> Proxy b
argProxy _ = Proxy
| kmate/imperative-edsl | src/Language/Embedded/Signature.hs | bsd-3-clause | 3,111 | 0 | 15 | 1,041 | 1,205 | 608 | 597 | -1 | -1 |
{-# LANGUAGE GADTs, DisambiguateRecordFields #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
module CmmProcPoint
( ProcPointSet, Status(..)
, callProcPoints, minimalProcPointSet
, addProcPointProtocols, splitAtProcPoints, procPointAnalysis
)
where
import Prelude hiding (last, unzip, succ, zip)
import BlockId
import CLabel
import Cmm
import CmmUtils
import CmmContFlowOpt
import CmmInfo
import CmmLive
import Constants
import Data.List (sortBy)
import Maybes
import MkGraph
import Control.Monad
import OptimizationFuel
import Outputable
import Platform
import UniqSet
import UniqSupply
import Compiler.Hoopl
import qualified Data.Map as Map
-- Compute a minimal set of proc points for a control-flow graph.
-- Determine a protocol for each proc point (which live variables will
-- be passed as arguments and which will be on the stack).
{-
A proc point is a basic block that, after CPS transformation, will
start a new function. The entry block of the original function is a
proc point, as is the continuation of each function call.
A third kind of proc point arises if we want to avoid copying code.
Suppose we have code like the following:
f() {
if (...) { ..1..; call foo(); ..2..}
else { ..3..; call bar(); ..4..}
x = y + z;
return x;
}
The statement 'x = y + z' can be reached from two different proc
points: the continuations of foo() and bar(). We would prefer not to
put a copy in each continuation; instead we would like 'x = y + z' to
be the start of a new procedure to which the continuations can jump:
f_cps () {
if (...) { ..1..; push k_foo; jump foo_cps(); }
else { ..3..; push k_bar; jump bar_cps(); }
}
k_foo() { ..2..; jump k_join(y, z); }
k_bar() { ..4..; jump k_join(y, z); }
k_join(y, z) { x = y + z; return x; }
You might think then that a criterion to make a node a proc point is
that it is directly reached by two distinct proc points. (Note
[Direct reachability].) But this criterion is a bit too simple; for
example, 'return x' is also reached by two proc points, yet there is
no point in pulling it out of k_join. A good criterion would be to
say that a node should be made a proc point if it is reached by a set
of proc points that is different than its immediate dominator. NR
believes this criterion can be shown to produce a minimum set of proc
points, and given a dominator tree, the proc points can be chosen in
time linear in the number of blocks. Lacking a dominator analysis,
however, we turn instead to an iterative solution, starting with no
proc points and adding them according to these rules:
1. The entry block is a proc point.
2. The continuation of a call is a proc point.
3. A node is a proc point if it is directly reached by more proc
points than one of its predecessors.
Because we don't understand the problem very well, we apply rule 3 at
most once per iteration, then recompute the reachability information.
(See Note [No simple dataflow].) The choice of the new proc point is
arbitrary, and I don't know if the choice affects the final solution,
so I don't know if the number of proc points chosen is the
minimum---but the set will be minimal.
-}
type ProcPointSet = BlockSet
data Status
= ReachedBy ProcPointSet -- set of proc points that directly reach the block
| ProcPoint -- this block is itself a proc point
instance Outputable Status where
ppr (ReachedBy ps)
| setNull ps = text "<not-reached>"
| otherwise = text "reached by" <+>
(hsep $ punctuate comma $ map ppr $ setElems ps)
ppr ProcPoint = text "<procpt>"
lattice :: DataflowLattice Status
lattice = DataflowLattice "direct proc-point reachability" unreached add_to
where unreached = ReachedBy setEmpty
add_to _ (OldFact ProcPoint) _ = (NoChange, ProcPoint)
add_to _ _ (NewFact ProcPoint) = (SomeChange, ProcPoint) -- because of previous case
add_to _ (OldFact (ReachedBy p)) (NewFact (ReachedBy p')) =
let union = setUnion p' p
in if setSize union > setSize p then (SomeChange, ReachedBy union)
else (NoChange, ReachedBy p)
--------------------------------------------------
-- transfer equations
forward :: FwdTransfer CmmNode Status
forward = mkFTransfer3 first middle ((mkFactBase lattice . ) . last)
where first :: CmmNode C O -> Status -> Status
first (CmmEntry id) ProcPoint = ReachedBy $ setSingleton id
first _ x = x
middle _ x = x
last :: CmmNode O C -> Status -> [(Label, Status)]
last (CmmCall {cml_cont = Just k}) _ = [(k, ProcPoint)]
last (CmmForeignCall {succ = k}) _ = [(k, ProcPoint)]
last l x = map (\id -> (id, x)) (successors l)
-- It is worth distinguishing two sets of proc points:
-- those that are induced by calls in the original graph
-- and those that are introduced because they're reachable from multiple proc points.
callProcPoints :: CmmGraph -> ProcPointSet
callProcPoints g = foldGraphBlocks add (setSingleton (g_entry g)) g
where add :: CmmBlock -> BlockSet -> BlockSet
add b set = case lastNode b of
CmmCall {cml_cont = Just k} -> setInsert k set
CmmForeignCall {succ=k} -> setInsert k set
_ -> set
minimalProcPointSet :: Platform -> ProcPointSet -> CmmGraph -> FuelUniqSM ProcPointSet
-- Given the set of successors of calls (which must be proc-points)
-- figure out the minimal set of necessary proc-points
minimalProcPointSet platform callProcPoints g = extendPPSet platform g (postorderDfs g) callProcPoints
procPointAnalysis :: ProcPointSet -> CmmGraph -> FuelUniqSM (BlockEnv Status)
-- Once you know what the proc-points are, figure out
-- what proc-points each block is reachable from
procPointAnalysis procPoints g =
liftM snd $ dataflowPassFwd g initProcPoints $ analFwd lattice forward
where initProcPoints = [(id, ProcPoint) | id <- setElems procPoints]
extendPPSet :: Platform -> CmmGraph -> [CmmBlock] -> ProcPointSet -> FuelUniqSM ProcPointSet
extendPPSet platform g blocks procPoints =
do env <- procPointAnalysis procPoints g
let add block pps = let id = entryLabel block
in case mapLookup id env of
Just ProcPoint -> setInsert id pps
_ -> pps
procPoints' = foldGraphBlocks add setEmpty g
newPoints = mapMaybe ppSuccessor blocks
newPoint = listToMaybe newPoints
ppSuccessor b =
let nreached id = case mapLookup id env `orElse`
pprPanic "no ppt" (ppr id <+> pprPlatform platform b) of
ProcPoint -> 1
ReachedBy ps -> setSize ps
block_procpoints = nreached (entryLabel b)
-- | Looking for a successor of b that is reached by
-- more proc points than b and is not already a proc
-- point. If found, it can become a proc point.
newId succ_id = not (setMember succ_id procPoints') &&
nreached succ_id > block_procpoints
in listToMaybe $ filter newId $ successors b
{-
case newPoints of
[] -> return procPoints'
pps -> extendPPSet g blocks
(foldl extendBlockSet procPoints' pps)
-}
case newPoint of Just id ->
if setMember id procPoints' then panic "added old proc pt"
else extendPPSet platform g blocks (setInsert id procPoints')
Nothing -> return procPoints'
------------------------------------------------------------------------
-- Computing Proc-Point Protocols --
------------------------------------------------------------------------
{-
There is one major trick, discovered by Michael Adams, which is that
we want to choose protocols in a way that enables us to optimize away
some continuations. The optimization is very much like branch-chain
elimination, except that it involves passing results as well as
control. The idea is that if a call's continuation k does nothing but
CopyIn its results and then goto proc point P, the call's continuation
may be changed to P, *provided* P's protocol is identical to the
protocol for the CopyIn. We choose protocols to make this so.
Here's an explanatory example; we begin with the source code (lines
separate basic blocks):
..1..;
x, y = g();
goto P;
-------
P: ..2..;
Zipperization converts this code as follows:
..1..;
call g() returns to k;
-------
k: CopyIn(x, y);
goto P;
-------
P: ..2..;
What we'd like to do is assign P the same CopyIn protocol as k, so we
can eliminate k:
..1..;
call g() returns to P;
-------
P: CopyIn(x, y); ..2..;
Of course, P may be the target of more than one continuation, and
different continuations may have different protocols. Michael Adams
implemented a voting mechanism, but he thinks a simple greedy
algorithm would be just as good, so that's what we do.
-}
data Protocol = Protocol Convention [CmmFormal] Area
deriving Eq
instance Outputable Protocol where
ppr (Protocol c fs a) = text "Protocol" <+> ppr c <+> ppr fs <+> ppr a
-- | Function 'optimize_calls' chooses protocols only for those proc
-- points that are relevant to the optimization explained above.
-- The others are assigned by 'add_unassigned', which is not yet clever.
addProcPointProtocols :: ProcPointSet -> ProcPointSet -> CmmGraph -> FuelUniqSM CmmGraph
addProcPointProtocols callPPs procPoints g =
do liveness <- cmmLiveness g
(protos, g') <- optimize_calls liveness g
blocks'' <- add_CopyOuts protos procPoints g'
return $ ofBlockMap (g_entry g) blocks''
where optimize_calls liveness g = -- see Note [Separate Adams optimization]
do let (protos, blocks') =
foldGraphBlocks maybe_add_call (mapEmpty, mapEmpty) g
protos' = add_unassigned liveness procPoints protos
let g' = ofBlockMap (g_entry g) (add_CopyIns callPPs protos' blocks')
return (protos', removeUnreachableBlocks g')
maybe_add_call :: CmmBlock -> (BlockEnv Protocol, BlockEnv CmmBlock)
-> (BlockEnv Protocol, BlockEnv CmmBlock)
-- ^ If the block is a call whose continuation goes to a proc point
-- whose protocol either matches the continuation's or is not yet set,
-- redirect the call (cf 'newblock') and set the protocol if necessary
maybe_add_call block (protos, blocks) =
case lastNode block of
CmmCall tgt (Just k) args res s
| Just proto <- mapLookup k protos,
Just pee <- branchesToProcPoint k
-> let newblock = replaceLastNode block (CmmCall tgt (Just pee)
args res s)
changed_blocks = insertBlock newblock blocks
unchanged_blocks = insertBlock block blocks
in case mapLookup pee protos of
Nothing -> (mapInsert pee proto protos, changed_blocks)
Just proto' ->
if proto == proto' then (protos, changed_blocks)
else (protos, unchanged_blocks)
_ -> (protos, insertBlock block blocks)
branchesToProcPoint :: BlockId -> Maybe BlockId
-- ^ Tells whether the named block is just a branch to a proc point
branchesToProcPoint id =
let block = mapLookup id (toBlockMap g) `orElse`
panic "branch out of graph"
in case blockToNodeList block of
(_, [], JustC (CmmBranch pee)) | setMember pee procPoints -> Just pee
_ -> Nothing
-- | For now, following a suggestion by Ben Lippmeier, we pass all
-- live variables as arguments, hoping that a clever register
-- allocator might help.
add_unassigned :: BlockEnv CmmLive -> ProcPointSet -> BlockEnv Protocol ->
BlockEnv Protocol
add_unassigned = pass_live_vars_as_args
pass_live_vars_as_args :: BlockEnv CmmLive -> ProcPointSet ->
BlockEnv Protocol -> BlockEnv Protocol
pass_live_vars_as_args _liveness procPoints protos = protos'
where protos' = setFold addLiveVars protos procPoints
addLiveVars :: BlockId -> BlockEnv Protocol -> BlockEnv Protocol
addLiveVars id protos =
case mapLookup id protos of
Just _ -> protos
Nothing -> let live = emptyRegSet
--lookupBlockEnv _liveness id `orElse`
--panic ("no liveness at block " ++ show id)
formals = uniqSetToList live
prot = Protocol Private formals $ CallArea $ Young id
in mapInsert id prot protos
-- | Add copy-in instructions to each proc point that did not arise from a call
-- instruction. (Proc-points that arise from calls already have their copy-in instructions.)
add_CopyIns :: ProcPointSet -> BlockEnv Protocol -> BlockEnv CmmBlock -> BlockEnv CmmBlock
add_CopyIns callPPs protos blocks = mapFold maybe_insert_CopyIns mapEmpty blocks
where maybe_insert_CopyIns block blocks
| not $ setMember bid callPPs
, Just (Protocol c fs _area) <- mapLookup bid protos
= let nodes = copyInSlot c fs
(h, m, l) = blockToNodeList block
in insertBlock (blockOfNodeList (h, nodes ++ m, l)) blocks
| otherwise = insertBlock block blocks
where bid = entryLabel block
-- | Add a CopyOut node before each procpoint.
-- If the predecessor is a call, then the copy outs should already be done by the callee.
-- Note: If we need to add copy-out instructions, they may require stack space,
-- so we accumulate a map from the successors to the necessary stack space,
-- then update the successors after we have finished inserting the copy-outs.
add_CopyOuts :: BlockEnv Protocol -> ProcPointSet -> CmmGraph ->
FuelUniqSM (BlockEnv CmmBlock)
add_CopyOuts protos procPoints g = foldGraphBlocks mb_copy_out (return mapEmpty) g
where mb_copy_out :: CmmBlock -> FuelUniqSM (BlockEnv CmmBlock) ->
FuelUniqSM (BlockEnv CmmBlock)
mb_copy_out b z | entryLabel b == g_entry g = skip b z
mb_copy_out b z =
case lastNode b of
CmmCall {} -> skip b z -- copy out done by callee
CmmForeignCall {} -> skip b z -- copy out done by callee
_ -> copy_out b z
copy_out b z = foldr trySucc init (successors b) >>= finish
where init = (\bmap -> (b, bmap)) `liftM` z
trySucc succId z =
if setMember succId procPoints then
case mapLookup succId protos of
Nothing -> z
Just (Protocol c fs _area) -> insert z succId $ copyOutSlot c fs
else z
insert z succId m =
do (b, bmap) <- z
(b, bs) <- insertBetween b m succId
-- pprTrace "insert for succ" (ppr succId <> ppr m) $ do
return $ (b, foldl (flip insertBlock) bmap bs)
finish (b, bmap) = return $ insertBlock b bmap
skip b bs = insertBlock b `liftM` bs
-- At this point, we have found a set of procpoints, each of which should be
-- the entry point of a procedure.
-- Now, we create the procedure for each proc point,
-- which requires that we:
-- 1. build a map from proc points to the blocks reachable from the proc point
-- 2. turn each branch to a proc point into a jump
-- 3. turn calls and returns into jumps
-- 4. build info tables for the procedures -- and update the info table for
-- the SRTs in the entry procedure as well.
-- Input invariant: A block should only be reachable from a single ProcPoint.
-- ToDo: use the _ret naming convention that the old code generator
-- used. -- EZY
splitAtProcPoints :: CLabel -> ProcPointSet-> ProcPointSet -> BlockEnv Status ->
CmmDecl -> FuelUniqSM [CmmDecl]
splitAtProcPoints entry_label callPPs procPoints procMap
(CmmProc (TopInfo {info_tbl=info_tbl,
stack_info=stack_info})
top_l g@(CmmGraph {g_entry=entry})) =
do -- Build a map from procpoints to the blocks they reach
let addBlock b graphEnv =
case mapLookup bid procMap of
Just ProcPoint -> add graphEnv bid bid b
Just (ReachedBy set) ->
case setElems set of
[] -> graphEnv
[id] -> add graphEnv id bid b
_ -> panic "Each block should be reachable from only one ProcPoint"
Nothing -> pprPanic "block not reached by a proc point?" (ppr bid)
where bid = entryLabel b
add graphEnv procId bid b = mapInsert procId graph' graphEnv
where graph = mapLookup procId graphEnv `orElse` mapEmpty
graph' = mapInsert bid b graph
graphEnv <- return $ foldGraphBlocks addBlock emptyBlockMap g
-- Build a map from proc point BlockId to pairs of:
-- * Labels for their new procedures
-- * Labels for the info tables of their new procedures (only if the proc point is a callPP)
-- Due to common blockification, we may overestimate the set of procpoints.
let add_label map pp = Map.insert pp lbls map
where lbls | pp == entry = (entry_label, Just entry_info_lbl)
| otherwise = (blockLbl pp, guard (setMember pp callPPs) >>
Just (infoTblLbl pp))
entry_info_lbl = cit_lbl info_tbl
procLabels = foldl add_label Map.empty
(filter (flip mapMember (toBlockMap g)) (setElems procPoints))
-- For each procpoint, we need to know the SP offset on entry.
-- If the procpoint is:
-- - continuation of a call, the SP offset is in the call
-- - otherwise, 0 (and left out of the spEntryMap)
let add_sp_off :: CmmBlock -> BlockEnv CmmStackInfo -> BlockEnv CmmStackInfo
add_sp_off b env =
case lastNode b of
CmmCall {cml_cont = Just succ, cml_ret_args = off, cml_ret_off = updfr_off} ->
mapInsert succ (StackInfo { arg_space = off, updfr_space = Just updfr_off}) env
CmmForeignCall {succ = succ, updfr = updfr_off} ->
mapInsert succ (StackInfo { arg_space = wORD_SIZE, updfr_space = Just updfr_off}) env
_ -> env
spEntryMap = foldGraphBlocks add_sp_off (mapInsert entry stack_info emptyBlockMap) g
getStackInfo id = mapLookup id spEntryMap `orElse` StackInfo {arg_space = 0, updfr_space = Nothing}
-- In each new graph, add blocks jumping off to the new procedures,
-- and replace branches to procpoints with branches to the jump-off blocks
let add_jump_block (env, bs) (pp, l) =
do bid <- liftM mkBlockId getUniqueM
let b = blockOfNodeList (JustC (CmmEntry bid), [], JustC jump)
StackInfo {arg_space = argSpace, updfr_space = off} = getStackInfo pp
jump = CmmCall (CmmLit (CmmLabel l)) Nothing argSpace 0
(off `orElse` 0) -- Jump's shouldn't need the offset...
return (mapInsert pp bid env, b : bs)
add_jumps (newGraphEnv) (ppId, blockEnv) =
do let needed_jumps = -- find which procpoints we currently branch to
mapFold add_if_branch_to_pp [] blockEnv
add_if_branch_to_pp :: CmmBlock -> [(BlockId, CLabel)] -> [(BlockId, CLabel)]
add_if_branch_to_pp block rst =
case lastNode block of
CmmBranch id -> add_if_pp id rst
CmmCondBranch _ ti fi -> add_if_pp ti (add_if_pp fi rst)
CmmSwitch _ tbl -> foldr add_if_pp rst (catMaybes tbl)
_ -> rst
add_if_pp id rst = case Map.lookup id procLabels of
Just (lbl, mb_info_lbl) -> (id, mb_info_lbl `orElse` lbl) : rst
Nothing -> rst
(jumpEnv, jumpBlocks) <-
foldM add_jump_block (mapEmpty, []) needed_jumps
-- update the entry block
let b = expectJust "block in env" $ mapLookup ppId blockEnv
off = getStackInfo ppId
blockEnv' = mapInsert ppId b blockEnv
-- replace branches to procpoints with branches to jumps
blockEnv'' = toBlockMap $ replaceBranches jumpEnv $ ofBlockMap ppId blockEnv'
-- add the jump blocks to the graph
blockEnv''' = foldl (flip insertBlock) blockEnv'' jumpBlocks
let g' = (off, ofBlockMap ppId blockEnv''')
-- pprTrace "g' pre jumps" (ppr g') $ do
return (mapInsert ppId g' newGraphEnv)
graphEnv <- foldM add_jumps emptyBlockMap $ mapToList graphEnv
let to_proc (bid, (stack_info, g)) = case expectJust "pp label" $ Map.lookup bid procLabels of
(lbl, Just info_lbl)
| bid == entry
-> CmmProc (TopInfo {info_tbl=info_tbl, stack_info=stack_info})
top_l (replacePPIds g)
| otherwise
-> CmmProc (TopInfo {info_tbl=mkEmptyContInfoTable info_lbl, stack_info=stack_info})
lbl (replacePPIds g)
(lbl, Nothing)
-> CmmProc (TopInfo {info_tbl=CmmNonInfoTable, stack_info=stack_info})
lbl (replacePPIds g)
-- References to procpoint IDs can now be replaced with the infotable's label
replacePPIds g = mapGraphNodes (id, mapExp repl, mapExp repl) g
where repl e@(CmmLit (CmmBlock bid)) =
case Map.lookup bid procLabels of
Just (_, Just info_lbl) -> CmmLit (CmmLabel info_lbl)
_ -> e
repl e = e
-- The C back end expects to see return continuations before the call sites.
-- Here, we sort them in reverse order -- it gets reversed later.
let (_, block_order) = foldl add_block_num (0::Int, emptyBlockMap) (postorderDfs g)
add_block_num (i, map) block = (i+1, mapInsert (entryLabel block) i map)
sort_fn (bid, _) (bid', _) =
compare (expectJust "block_order" $ mapLookup bid block_order)
(expectJust "block_order" $ mapLookup bid' block_order)
procs <- return $ map to_proc $ sortBy sort_fn $ mapToList graphEnv
return -- pprTrace "procLabels" (ppr procLabels)
-- pprTrace "splitting graphs" (ppr procs)
procs
splitAtProcPoints _ _ _ _ t@(CmmData _ _) = return [t]
----------------------------------------------------------------
{-
Note [Direct reachability]
Block B is directly reachable from proc point P iff control can flow
from P to B without passing through an intervening proc point.
-}
----------------------------------------------------------------
{-
Note [No simple dataflow]
Sadly, it seems impossible to compute the proc points using a single
dataflow pass. One might attempt to use this simple lattice:
data Location = Unknown
| InProc BlockId -- node is in procedure headed by the named proc point
| ProcPoint -- node is itself a proc point
At a join, a node in two different blocks becomes a proc point.
The difficulty is that the change of information during iterative
computation may promote a node prematurely. Here's a program that
illustrates the difficulty:
f () {
entry:
....
L1:
if (...) { ... }
else { ... }
L2: if (...) { g(); goto L1; }
return x + y;
}
The only proc-point needed (besides the entry) is L1. But in an
iterative analysis, consider what happens to L2. On the first pass
through, it rises from Unknown to 'InProc entry', but when L1 is
promoted to a proc point (because it's the successor of g()), L1's
successors will be promoted to 'InProc L1'. The problem hits when the
new fact 'InProc L1' flows into L2 which is already bound to 'InProc entry'.
The join operation makes it a proc point when in fact it needn't be,
because its immediate dominator L1 is already a proc point and there
are no other proc points that directly reach L2.
-}
{- Note [Separate Adams optimization]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It may be worthwhile to attempt the Adams optimization by rewriting
the graph before the assignment of proc-point protocols. Here are a
couple of rules:
g() returns to k; g() returns to L;
k: CopyIn c ress; goto L:
... ==> ...
L: // no CopyIn node here L: CopyIn c ress;
And when c == c' and ress == ress', this also:
g() returns to k; g() returns to L;
k: CopyIn c ress; goto L:
... ==> ...
L: CopyIn c' ress' L: CopyIn c' ress' ;
In both cases the goal is to eliminate k.
-}
| mcmaniac/ghc | compiler/cmm/CmmProcPoint.hs | bsd-3-clause | 26,213 | 0 | 20 | 8,263 | 4,323 | 2,212 | 2,111 | 275 | 14 |
{-# LANGUAGE NoMonomorphismRestriction, RankNTypes #-}
{-# LANGUAGE FunctionalDependencies, MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts, ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module T2239 where
data A = A
data B = B
class C a where c :: a -> String
instance C Bool where c _ = "Bool"
instance C Char where c _ = "Char"
-- via TFs
type family TF a
type instance TF A = Char
type instance TF B = Bool
tf :: forall a b. (b ~ TF a,C b) => a -> String
tf a = c (undefined:: b)
tfa = tf A
tfb = tf B
-- via FDs
class FD a b | a -> b
instance FD A Char
instance FD B Bool
fd :: forall a b. (FD a b,C b) => a -> String
fd a = c (undefined:: b)
fda = fd A
fdb = fd B
class MyEq a b | a->b, b->a
instance MyEq a a
simpleFD = id :: (forall b. MyEq b Bool => b->b)
simpleTF = id :: (forall b. b~Bool => b->b)
-- Actually these two do not involve impredicative instantiation,
-- so they now succeed
complexFD = id :: (forall b. MyEq b Bool => b->b)
-> (forall c. MyEq c Bool => c->c)
complexTF = id :: (forall b. b~Bool => b->b)
-> (forall c. c~Bool => c->c)
{- For exmaple, here is how the subsumption check works for complexTF
when type-checking the expression
(id :: (forall b. b~Bool => b->b) -> (forall c. c~Bool => c->c))
First, deeply skolemise the type sig, (level 3) before calling
tcExpr on 'id'. Then instantiate id's type:
b~Bool |-3 alpha[3] -> alpha <= (forall c. c~Bool => c->c) -> b -> b
Now decompose the ->
b~Bool |-3 alpha[3] ~ b->b, (forall c. c~Bool => c->c) <= a
And this is perfectly soluble. alpha is touchable; and c is instantiated.
-} | bitemyapp/ghc | testsuite/tests/indexed-types/should_fail/T2239.hs | bsd-3-clause | 1,687 | 0 | 10 | 409 | 464 | 254 | 210 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-| DRBD proc file parser
This module holds the definition of the parser that extracts status
information from the DRBD proc file.
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Storage.Drbd.Parser (drbdStatusParser, commaIntParser) where
import Control.Applicative ((<*>), (*>), (<*), (<$>), (<|>), pure)
import qualified Data.Attoparsec.Text as A
import qualified Data.Attoparsec.Combinator as AC
import Data.Attoparsec.Text (Parser)
import Data.List
import Data.Maybe
import Data.Text (Text, unpack)
import Ganeti.Storage.Drbd.Types
-- | Our own space-skipping function, because A.skipSpace also skips
-- newline characters. It skips ZERO or more spaces, so it does not
-- fail if there are no spaces.
skipSpaces :: Parser ()
skipSpaces = A.skipWhile A.isHorizontalSpace
-- | Skips spaces and the given string, then executes a parser and
-- returns its result.
skipSpacesAndString :: Text -> Parser a -> Parser a
skipSpacesAndString s parser =
skipSpaces
*> A.string s
*> parser
-- | Predicate verifying (potentially bad) end of lines
isBadEndOfLine :: Char -> Bool
isBadEndOfLine c = (c == '\0') || A.isEndOfLine c
-- | Takes a parser and returns it with the content wrapped in a Maybe
-- object. The resulting parser never fails, but contains Nothing if
-- it couldn't properly parse the string.
optional :: Parser a -> Parser (Maybe a)
optional parser = (Just <$> parser) <|> pure Nothing
-- | The parser for a whole DRBD status file.
drbdStatusParser :: [DrbdInstMinor] -> Parser DRBDStatus
drbdStatusParser instMinor =
DRBDStatus <$> versionInfoParser
<*> deviceParser instMinor `AC.manyTill` A.endOfInput
<* A.endOfInput
-- | The parser for the version information lines.
versionInfoParser :: Parser VersionInfo
versionInfoParser = do
versionF <- optional versionP
apiF <- optional apiP
protoF <- optional protoP
srcVersionF <- optional srcVersion
ghF <- fmap unpack <$> optional gh
builderF <- fmap unpack <$> optional builder
if isNothing versionF
&& isNothing apiF
&& isNothing protoF
&& isNothing srcVersionF
&& isNothing ghF
&& isNothing builderF
then fail "versionInfo"
else pure $ VersionInfo versionF apiF protoF srcVersionF ghF builderF
where versionP =
A.string "version:"
*> skipSpaces
*> fmap unpack (A.takeWhile $ not . A.isHorizontalSpace)
apiP =
skipSpacesAndString "(api:" . fmap unpack $ A.takeWhile (/= '/')
protoP =
A.string "/proto:"
*> fmap Data.Text.unpack (A.takeWhile (/= ')'))
<* A.takeTill A.isEndOfLine <* A.endOfLine
srcVersion =
A.string "srcversion:"
*> AC.skipMany1 A.space
*> fmap unpack (A.takeTill A.isEndOfLine)
<* A.endOfLine
gh =
A.string "GIT-hash:"
*> skipSpaces
*> A.takeWhile (not . A.isHorizontalSpace)
builder =
skipSpacesAndString "build by" $
skipSpaces
*> A.takeTill A.isEndOfLine
<* A.endOfLine
-- | The parser for a (multi-line) string representing a device.
deviceParser :: [DrbdInstMinor] -> Parser DeviceInfo
deviceParser instMinor = do
deviceNum <- skipSpaces *> A.decimal <* A.char ':'
cs <- skipSpacesAndString "cs:" connStateParser
if cs == Unconfigured
then do
_ <- additionalEOL
return $ UnconfiguredDevice deviceNum
else do
ro <- skipSpaces *> skipRoleString *> localRemoteParser roleParser
ds <- skipSpacesAndString "ds:" $ localRemoteParser diskStateParser
replicProtocol <- A.space *> A.anyChar
io <- skipSpaces *> ioFlagsParser <* A.skipWhile isBadEndOfLine
pIndicators <- perfIndicatorsParser
syncS <- conditionalSyncStatusParser cs
reS <- optional resyncParser
act <- optional actLogParser
_ <- additionalEOL
let inst = find ((deviceNum ==) . dimMinor) instMinor
iName = fmap dimInstName inst
return $ DeviceInfo deviceNum cs ro ds replicProtocol io pIndicators
syncS reS act iName
where conditionalSyncStatusParser SyncSource = Just <$> syncStatusParser
conditionalSyncStatusParser SyncTarget = Just <$> syncStatusParser
conditionalSyncStatusParser _ = pure Nothing
skipRoleString = A.string "ro:" <|> A.string "st:"
resyncParser = skipSpacesAndString "resync:" additionalInfoParser
actLogParser = skipSpacesAndString "act_log:" additionalInfoParser
additionalEOL = A.skipWhile A.isEndOfLine
-- | The parser for the connection state.
connStateParser :: Parser ConnState
connStateParser =
standAlone
<|> disconnecting
<|> unconnected
<|> timeout
<|> brokenPipe
<|> networkFailure
<|> protocolError
<|> tearDown
<|> wfConnection
<|> wfReportParams
<|> connected
<|> startingSyncS
<|> startingSyncT
<|> wfBitMapS
<|> wfBitMapT
<|> wfSyncUUID
<|> syncSource
<|> syncTarget
<|> pausedSyncS
<|> pausedSyncT
<|> verifyS
<|> verifyT
<|> unconfigured
where standAlone = A.string "StandAlone" *> pure StandAlone
disconnecting = A.string "Disconnectiog" *> pure Disconnecting
unconnected = A.string "Unconnected" *> pure Unconnected
timeout = A.string "Timeout" *> pure Timeout
brokenPipe = A.string "BrokenPipe" *> pure BrokenPipe
networkFailure = A.string "NetworkFailure" *> pure NetworkFailure
protocolError = A.string "ProtocolError" *> pure ProtocolError
tearDown = A.string "TearDown" *> pure TearDown
wfConnection = A.string "WFConnection" *> pure WFConnection
wfReportParams = A.string "WFReportParams" *> pure WFReportParams
connected = A.string "Connected" *> pure Connected
startingSyncS = A.string "StartingSyncS" *> pure StartingSyncS
startingSyncT = A.string "StartingSyncT" *> pure StartingSyncT
wfBitMapS = A.string "WFBitMapS" *> pure WFBitMapS
wfBitMapT = A.string "WFBitMapT" *> pure WFBitMapT
wfSyncUUID = A.string "WFSyncUUID" *> pure WFSyncUUID
syncSource = A.string "SyncSource" *> pure SyncSource
syncTarget = A.string "SyncTarget" *> pure SyncTarget
pausedSyncS = A.string "PausedSyncS" *> pure PausedSyncS
pausedSyncT = A.string "PausedSyncT" *> pure PausedSyncT
verifyS = A.string "VerifyS" *> pure VerifyS
verifyT = A.string "VerifyT" *> pure VerifyT
unconfigured = A.string "Unconfigured" *> pure Unconfigured
-- | Parser for recognizing strings describing two elements of the
-- same type separated by a '/'. The first one is considered local,
-- the second remote.
localRemoteParser :: Parser a -> Parser (LocalRemote a)
localRemoteParser parser = LocalRemote <$> parser <*> (A.char '/' *> parser)
-- | The parser for resource roles.
roleParser :: Parser Role
roleParser =
primary
<|> secondary
<|> unknown
where primary = A.string "Primary" *> pure Primary
secondary = A.string "Secondary" *> pure Secondary
unknown = A.string "Unknown" *> pure Unknown
-- | The parser for disk states.
diskStateParser :: Parser DiskState
diskStateParser =
diskless
<|> attaching
<|> failed
<|> negotiating
<|> inconsistent
<|> outdated
<|> dUnknown
<|> consistent
<|> upToDate
where diskless = A.string "Diskless" *> pure Diskless
attaching = A.string "Attaching" *> pure Attaching
failed = A.string "Failed" *> pure Failed
negotiating = A.string "Negotiating" *> pure Negotiating
inconsistent = A.string "Inconsistent" *> pure Inconsistent
outdated = A.string "Outdated" *> pure Outdated
dUnknown = A.string "DUnknown" *> pure DUnknown
consistent = A.string "Consistent" *> pure Consistent
upToDate = A.string "UpToDate" *> pure UpToDate
-- | The parser for I/O flags.
ioFlagsParser :: Parser String
ioFlagsParser = fmap unpack . A.takeWhile $ not . isBadEndOfLine
-- | The parser for performance indicators.
perfIndicatorsParser :: Parser PerfIndicators
perfIndicatorsParser =
PerfIndicators
<$> skipSpacesAndString "ns:" A.decimal
<*> skipSpacesAndString "nr:" A.decimal
<*> skipSpacesAndString "dw:" A.decimal
<*> skipSpacesAndString "dr:" A.decimal
<*> skipSpacesAndString "al:" A.decimal
<*> skipSpacesAndString "bm:" A.decimal
<*> skipSpacesAndString "lo:" A.decimal
<*> skipSpacesAndString "pe:" A.decimal
<*> skipSpacesAndString "ua:" A.decimal
<*> skipSpacesAndString "ap:" A.decimal
<*> optional (skipSpacesAndString "ep:" A.decimal)
<*> optional (skipSpacesAndString "wo:" A.anyChar)
<*> optional (skipSpacesAndString "oos:" A.decimal)
<* skipSpaces <* A.endOfLine
-- | The parser for the syncronization status.
syncStatusParser :: Parser SyncStatus
syncStatusParser = do
_ <- statusBarParser
percent <-
skipSpacesAndString "sync'ed:" $ skipSpaces *> A.double <* A.char '%'
partSyncSize <- skipSpaces *> A.char '(' *> A.decimal
totSyncSize <- A.char '/' *> A.decimal <* A.char ')'
sizeUnit <- sizeUnitParser <* optional A.endOfLine
timeToEnd <- skipSpacesAndString "finish:" $ skipSpaces *> timeParser
sp <-
skipSpacesAndString "speed:" $
skipSpaces
*> commaIntParser
<* skipSpaces
<* A.char '('
<* commaIntParser
<* A.char ')'
w <- skipSpacesAndString "want:" (
skipSpaces
*> (Just <$> commaIntParser)
)
<|> pure Nothing
sSizeUnit <- skipSpaces *> sizeUnitParser
sTimeUnit <- A.char '/' *> timeUnitParser
_ <- A.endOfLine
return $
SyncStatus percent partSyncSize totSyncSize sizeUnit timeToEnd sp w
sSizeUnit sTimeUnit
-- | The parser for recognizing (and discarding) the sync status bar.
statusBarParser :: Parser ()
statusBarParser =
skipSpaces
*> A.char '['
*> A.skipWhile (== '=')
*> A.skipWhile (== '>')
*> A.skipWhile (== '.')
*> A.char ']'
*> pure ()
-- | The parser for recognizing data size units (only the ones
-- actually found in DRBD files are implemented).
sizeUnitParser :: Parser SizeUnit
sizeUnitParser =
kilobyte
<|> megabyte
where kilobyte = A.string "K" *> pure KiloByte
megabyte = A.string "M" *> pure MegaByte
-- | The parser for recognizing time (hh:mm:ss).
timeParser :: Parser Time
timeParser = Time <$> h <*> m <*> s
where h = A.decimal :: Parser Int
m = A.char ':' *> A.decimal :: Parser Int
s = A.char ':' *> A.decimal :: Parser Int
-- | The parser for recognizing time units (only the ones actually
-- found in DRBD files are implemented).
timeUnitParser :: Parser TimeUnit
timeUnitParser = second
where second = A.string "sec" *> pure Second
-- | Haskell does not recognise ',' as the thousands separator every 3
-- digits but DRBD uses it, so we need an ah-hoc parser.
-- If a number beginning with more than 3 digits without a comma is
-- parsed, only the first 3 digits are considered to be valid, the rest
-- is not consumed, and left for further parsing.
commaIntParser :: Parser Int
commaIntParser = do
first <-
AC.count 3 A.digit <|> AC.count 2 A.digit <|> AC.count 1 A.digit
allDigits <- commaIntHelper (read first)
pure allDigits
-- | Helper (triplet parser) for the commaIntParser
commaIntHelper :: Int -> Parser Int
commaIntHelper acc = nextTriplet <|> end
where nextTriplet = do
_ <- A.char ','
triplet <- AC.count 3 A.digit
commaIntHelper $ acc * 1000 + (read triplet :: Int)
end = pure acc :: Parser Int
-- | Parser for the additional information provided by DRBD <= 8.0.
additionalInfoParser::Parser AdditionalInfo
additionalInfoParser = AdditionalInfo
<$> skipSpacesAndString "used:" A.decimal
<*> (A.char '/' *> A.decimal)
<*> skipSpacesAndString "hits:" A.decimal
<*> skipSpacesAndString "misses:" A.decimal
<*> skipSpacesAndString "starving:" A.decimal
<*> skipSpacesAndString "dirty:" A.decimal
<*> skipSpacesAndString "changed:" A.decimal
<* A.endOfLine
| vladimir-ipatov/ganeti | src/Ganeti/Storage/Drbd/Parser.hs | gpl-2.0 | 13,107 | 0 | 26 | 3,130 | 2,904 | 1,429 | 1,475 | 265 | 4 |
{-# LANGUAGE BangPatterns #-}
module Main where
import Criterion.Main
import System.Random
import BenchmarkTypes
import qualified Data.OrdPSQ.Benchmark as OrdPSQ
import qualified Data.IntPSQ.Benchmark as IntPSQ
import qualified Data.HashPSQ.Benchmark as HashPSQ
import qualified Data.PSQueue.Benchmark as PSQueue
import qualified Data.FingerTree.PSQueue.Benchmark as FingerPSQ
benchmarkSize :: Int
benchmarkSize = 2 ^ (12 :: Int)
{-# NOINLINE increasing #-}
increasing :: [BElem]
increasing = [(n, n, ()) | n <- [1 .. benchmarkSize]]
{-# NOINLINE decreasing #-}
decreasing :: [BElem]
decreasing = reverse increasing
{-# NOINLINE semirandom #-}
semirandom :: [BElem]
semirandom =
[ (x, y, ())
| (_, x, y) <- zip3 [1 .. benchmarkSize] (randoms gen1) (randoms gen2)
]
where
gen1 = mkStdGen 1234
gen2 = mkStdGen 5678
main :: IO ()
main = defaultMain $ runBenchmark
[ IntPSQ.benchmark "IntPSQ increasing" increasing
, IntPSQ.benchmark "IntPSQ decreasing" decreasing
, IntPSQ.benchmark "IntPSQ semirandom" semirandom
, HashPSQ.benchmark "HashPSQ increasing" increasing
, HashPSQ.benchmark "HashPSQ decreasing" decreasing
, HashPSQ.benchmark "HashPSQ semirandom" semirandom
, OrdPSQ.benchmark "OrdPSQ increasing" increasing
, OrdPSQ.benchmark "OrdPSQ decreasing" decreasing
, OrdPSQ.benchmark "OrdPSQ semirandom" semirandom
, PSQueue.benchmark "PSQueue increasing" increasing
, PSQueue.benchmark "PSQueue decreasing" decreasing
, PSQueue.benchmark "PSQueue semirandom" semirandom
, FingerPSQ.benchmark "FingerTree PSQueue increasing" increasing
, FingerPSQ.benchmark "FingerTree PSQueue decreasing" decreasing
, FingerPSQ.benchmark "FingerTree PSQueue semirandom" semirandom
]
| ariep/psqueues | benchmarks/Main.hs | bsd-3-clause | 2,013 | 0 | 10 | 544 | 403 | 232 | 171 | 42 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE GeneralizedNewtypeDeriving, QuasiQuotes, TemplateHaskell, CPP, GADTs, TypeFamilies, OverloadedStrings, FlexibleContexts, FlexibleInstances, EmptyDataDecls, MultiParamTypeClasses #-}
module MaxLenTest (
specs
#ifndef WITH_NOSQL
, maxlenMigrate
#endif
) where
import Init
import Data.String (IsString)
#ifdef WITH_NOSQL
db :: Action IO () -> Assertion
db = db' (return ())
mkPersist persistSettings [persistUpperCase|
#else
share [mkPersist sqlSettings, mkMigrate "maxlenMigrate"] [persistLowerCase|
#endif
MaxLen
text1 Text
text2 Text maxlen=3
bs1 ByteString
bs2 ByteString maxlen=3
str1 String
str2 String maxlen=3
MLText1 text1
MLText2 text2
MLBs1 bs1
MLBs2 bs2
MLStr1 str1
MLStr2 str2
deriving Show Eq
|]
specs :: Spec
specs = describe "Maximum length attribute" $ do
it "" $ db $ do
let t1 = MaxLen a a a a a a
t2 = MaxLen b b b b b b
t2' = MaxLen b b' b b' b b'
a, b, b' :: IsString t => t
a = "a"
b = "12345"
b' = "123"
t1k <- insert t1
t2k <- insert t2
Just t1v <- get t1k
Just t2v <- get t2k
liftIO $ do t1v @?= t1
if t2v == t2
then t2v @?= t2 -- FIXME: why u no truncate?
else t2v @?= t2'
| pseudonom/persistent | persistent-test/src/MaxLenTest.hs | mit | 1,351 | 0 | 15 | 393 | 278 | 143 | 135 | 26 | 2 |
main = print "Hello world"
| sdiehl/ghc | testsuite/tests/driver/T17143.hs | bsd-3-clause | 28 | 0 | 5 | 6 | 9 | 4 | 5 | 1 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>AJAX Spider | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Suche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/spiderAjax/src/main/javahelp/org/zaproxy/zap/extension/spiderAjax/resources/help_de_DE/helpset_de_DE.hs | apache-2.0 | 972 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE CPP, GADTs #-}
-----------------------------------------------------------------------------
--
-- Generating machine code (instruction selection)
--
-- (c) The University of Glasgow 1996-2004
--
-----------------------------------------------------------------------------
-- This is a big module, but, if you pay attention to
-- (a) the sectioning, (b) the type signatures, and
-- (c) the #if blah_TARGET_ARCH} things, the
-- structure should not be too overwhelming.
module PPC.CodeGen (
cmmTopCodeGen,
generateJumpTableForInstr,
InstrBlock
)
where
#include "HsVersions.h"
#include "nativeGen/NCG.h"
#include "../includes/MachDeps.h"
-- NCG stuff:
import CodeGen.Platform
import PPC.Instr
import PPC.Cond
import PPC.Regs
import CPrim
import NCGMonad
import Instruction
import PIC
import Size
import RegClass
import Reg
import TargetReg
import Platform
-- Our intermediate code:
import BlockId
import PprCmm ( pprExpr )
import Cmm
import CmmUtils
import CLabel
import Hoopl
-- The rest:
import OrdList
import Outputable
import Unique
import DynFlags
import Control.Monad ( mapAndUnzipM, when )
import Data.Bits
import Data.Word
import BasicTypes
import FastString
import Util
-- -----------------------------------------------------------------------------
-- Top-level of the instruction selector
-- | 'InstrBlock's are the insn sequences generated by the insn selectors.
-- They are really trees of insns to facilitate fast appending, where a
-- left-to-right traversal (pre-order?) yields the insns in the correct
-- order.
cmmTopCodeGen
:: RawCmmDecl
-> NatM [NatCmmDecl CmmStatics Instr]
cmmTopCodeGen (CmmProc info lab live graph) = do
let blocks = toBlockListEntryFirst graph
(nat_blocks,statics) <- mapAndUnzipM basicBlockCodeGen blocks
picBaseMb <- getPicBaseMaybeNat
dflags <- getDynFlags
let proc = CmmProc info lab live (ListGraph $ concat nat_blocks)
tops = proc : concat statics
os = platformOS $ targetPlatform dflags
case picBaseMb of
Just picBase -> initializePicBase_ppc ArchPPC os picBase tops
Nothing -> return tops
cmmTopCodeGen (CmmData sec dat) = do
return [CmmData sec dat] -- no translation, we just use CmmStatic
basicBlockCodeGen
:: Block CmmNode C C
-> NatM ( [NatBasicBlock Instr]
, [NatCmmDecl CmmStatics Instr])
basicBlockCodeGen block = do
let (_, nodes, tail) = blockSplit block
id = entryLabel block
stmts = blockToList nodes
mid_instrs <- stmtsToInstrs stmts
tail_instrs <- stmtToInstrs tail
let instrs = mid_instrs `appOL` tail_instrs
-- code generation may introduce new basic block boundaries, which
-- are indicated by the NEWBLOCK instruction. We must split up the
-- instruction stream into basic blocks again. Also, we extract
-- LDATAs here too.
let
(top,other_blocks,statics) = foldrOL mkBlocks ([],[],[]) instrs
mkBlocks (NEWBLOCK id) (instrs,blocks,statics)
= ([], BasicBlock id instrs : blocks, statics)
mkBlocks (LDATA sec dat) (instrs,blocks,statics)
= (instrs, blocks, CmmData sec dat:statics)
mkBlocks instr (instrs,blocks,statics)
= (instr:instrs, blocks, statics)
return (BasicBlock id top : other_blocks, statics)
stmtsToInstrs :: [CmmNode e x] -> NatM InstrBlock
stmtsToInstrs stmts
= do instrss <- mapM stmtToInstrs stmts
return (concatOL instrss)
stmtToInstrs :: CmmNode e x -> NatM InstrBlock
stmtToInstrs stmt = do
dflags <- getDynFlags
case stmt of
CmmComment s -> return (unitOL (COMMENT s))
CmmTick {} -> return nilOL
CmmUnwind {} -> return nilOL
CmmAssign reg src
| isFloatType ty -> assignReg_FltCode size reg src
| target32Bit (targetPlatform dflags) &&
isWord64 ty -> assignReg_I64Code reg src
| otherwise -> assignReg_IntCode size reg src
where ty = cmmRegType dflags reg
size = cmmTypeSize ty
CmmStore addr src
| isFloatType ty -> assignMem_FltCode size addr src
| target32Bit (targetPlatform dflags) &&
isWord64 ty -> assignMem_I64Code addr src
| otherwise -> assignMem_IntCode size addr src
where ty = cmmExprType dflags src
size = cmmTypeSize ty
CmmUnsafeForeignCall target result_regs args
-> genCCall target result_regs args
CmmBranch id -> genBranch id
CmmCondBranch arg true false -> do b1 <- genCondJump true arg
b2 <- genBranch false
return (b1 `appOL` b2)
CmmSwitch arg ids -> do dflags <- getDynFlags
genSwitch dflags arg ids
CmmCall { cml_target = arg } -> genJump arg
_ ->
panic "stmtToInstrs: statement should have been cps'd away"
--------------------------------------------------------------------------------
-- | 'InstrBlock's are the insn sequences generated by the insn selectors.
-- They are really trees of insns to facilitate fast appending, where a
-- left-to-right traversal yields the insns in the correct order.
--
type InstrBlock
= OrdList Instr
-- | Register's passed up the tree. If the stix code forces the register
-- to live in a pre-decided machine register, it comes out as @Fixed@;
-- otherwise, it comes out as @Any@, and the parent can decide which
-- register to put it in.
--
data Register
= Fixed Size Reg InstrBlock
| Any Size (Reg -> InstrBlock)
swizzleRegisterRep :: Register -> Size -> Register
swizzleRegisterRep (Fixed _ reg code) size = Fixed size reg code
swizzleRegisterRep (Any _ codefn) size = Any size codefn
-- | Grab the Reg for a CmmReg
getRegisterReg :: Platform -> CmmReg -> Reg
getRegisterReg _ (CmmLocal (LocalReg u pk))
= RegVirtual $ mkVirtualReg u (cmmTypeSize pk)
getRegisterReg platform (CmmGlobal mid)
= case globalRegMaybe platform mid of
Just reg -> RegReal reg
Nothing -> pprPanic "getRegisterReg-memory" (ppr $ CmmGlobal mid)
-- By this stage, the only MagicIds remaining should be the
-- ones which map to a real machine register on this
-- platform. Hence ...
{-
Now, given a tree (the argument to an CmmLoad) that references memory,
produce a suitable addressing mode.
A Rule of the Game (tm) for Amodes: use of the addr bit must
immediately follow use of the code part, since the code part puts
values in registers which the addr then refers to. So you can't put
anything in between, lest it overwrite some of those registers. If
you need to do some other computation between the code part and use of
the addr bit, first store the effective address from the amode in a
temporary, then do the other computation, and then use the temporary:
code
LEA amode, tmp
... other computation ...
... (tmp) ...
-}
-- | Convert a BlockId to some CmmStatic data
jumpTableEntry :: DynFlags -> Maybe BlockId -> CmmStatic
jumpTableEntry dflags Nothing = CmmStaticLit (CmmInt 0 (wordWidth dflags))
jumpTableEntry _ (Just blockid) = CmmStaticLit (CmmLabel blockLabel)
where blockLabel = mkAsmTempLabel (getUnique blockid)
-- -----------------------------------------------------------------------------
-- General things for putting together code sequences
-- Expand CmmRegOff. ToDo: should we do it this way around, or convert
-- CmmExprs into CmmRegOff?
mangleIndexTree :: DynFlags -> CmmExpr -> CmmExpr
mangleIndexTree dflags (CmmRegOff reg off)
= CmmMachOp (MO_Add width) [CmmReg reg, CmmLit (CmmInt (fromIntegral off) width)]
where width = typeWidth (cmmRegType dflags reg)
mangleIndexTree _ _
= panic "PPC.CodeGen.mangleIndexTree: no match"
-- -----------------------------------------------------------------------------
-- Code gen for 64-bit arithmetic on 32-bit platforms
{-
Simple support for generating 64-bit code (ie, 64 bit values and 64
bit assignments) on 32-bit platforms. Unlike the main code generator
we merely shoot for generating working code as simply as possible, and
pay little attention to code quality. Specifically, there is no
attempt to deal cleverly with the fixed-vs-floating register
distinction; all values are generated into (pairs of) floating
registers, even if this would mean some redundant reg-reg moves as a
result. Only one of the VRegUniques is returned, since it will be
of the VRegUniqueLo form, and the upper-half VReg can be determined
by applying getHiVRegFromLo to it.
-}
data ChildCode64 -- a.k.a "Register64"
= ChildCode64
InstrBlock -- code
Reg -- the lower 32-bit temporary which contains the
-- result; use getHiVRegFromLo to find the other
-- VRegUnique. Rules of this simplified insn
-- selection game are therefore that the returned
-- Reg may be modified
-- | The dual to getAnyReg: compute an expression into a register, but
-- we don't mind which one it is.
getSomeReg :: CmmExpr -> NatM (Reg, InstrBlock)
getSomeReg expr = do
r <- getRegister expr
case r of
Any rep code -> do
tmp <- getNewRegNat rep
return (tmp, code tmp)
Fixed _ reg code ->
return (reg, code)
getI64Amodes :: CmmExpr -> NatM (AddrMode, AddrMode, InstrBlock)
getI64Amodes addrTree = do
Amode hi_addr addr_code <- getAmode addrTree
case addrOffset hi_addr 4 of
Just lo_addr -> return (hi_addr, lo_addr, addr_code)
Nothing -> do (hi_ptr, code) <- getSomeReg addrTree
return (AddrRegImm hi_ptr (ImmInt 0),
AddrRegImm hi_ptr (ImmInt 4),
code)
assignMem_I64Code :: CmmExpr -> CmmExpr -> NatM InstrBlock
assignMem_I64Code addrTree valueTree = do
(hi_addr, lo_addr, addr_code) <- getI64Amodes addrTree
ChildCode64 vcode rlo <- iselExpr64 valueTree
let
rhi = getHiVRegFromLo rlo
-- Big-endian store
mov_hi = ST II32 rhi hi_addr
mov_lo = ST II32 rlo lo_addr
return (vcode `appOL` addr_code `snocOL` mov_lo `snocOL` mov_hi)
assignReg_I64Code :: CmmReg -> CmmExpr -> NatM InstrBlock
assignReg_I64Code (CmmLocal (LocalReg u_dst _)) valueTree = do
ChildCode64 vcode r_src_lo <- iselExpr64 valueTree
let
r_dst_lo = RegVirtual $ mkVirtualReg u_dst II32
r_dst_hi = getHiVRegFromLo r_dst_lo
r_src_hi = getHiVRegFromLo r_src_lo
mov_lo = MR r_dst_lo r_src_lo
mov_hi = MR r_dst_hi r_src_hi
return (
vcode `snocOL` mov_lo `snocOL` mov_hi
)
assignReg_I64Code _ _
= panic "assignReg_I64Code(powerpc): invalid lvalue"
iselExpr64 :: CmmExpr -> NatM ChildCode64
iselExpr64 (CmmLoad addrTree ty) | isWord64 ty = do
(hi_addr, lo_addr, addr_code) <- getI64Amodes addrTree
(rlo, rhi) <- getNewRegPairNat II32
let mov_hi = LD II32 rhi hi_addr
mov_lo = LD II32 rlo lo_addr
return $ ChildCode64 (addr_code `snocOL` mov_lo `snocOL` mov_hi)
rlo
iselExpr64 (CmmReg (CmmLocal (LocalReg vu ty))) | isWord64 ty
= return (ChildCode64 nilOL (RegVirtual $ mkVirtualReg vu II32))
iselExpr64 (CmmLit (CmmInt i _)) = do
(rlo,rhi) <- getNewRegPairNat II32
let
half0 = fromIntegral (fromIntegral i :: Word16)
half1 = fromIntegral (fromIntegral (i `shiftR` 16) :: Word16)
half2 = fromIntegral (fromIntegral (i `shiftR` 32) :: Word16)
half3 = fromIntegral (fromIntegral (i `shiftR` 48) :: Word16)
code = toOL [
LIS rlo (ImmInt half1),
OR rlo rlo (RIImm $ ImmInt half0),
LIS rhi (ImmInt half3),
OR rhi rhi (RIImm $ ImmInt half2)
]
return (ChildCode64 code rlo)
iselExpr64 (CmmMachOp (MO_Add _) [e1,e2]) = do
ChildCode64 code1 r1lo <- iselExpr64 e1
ChildCode64 code2 r2lo <- iselExpr64 e2
(rlo,rhi) <- getNewRegPairNat II32
let
r1hi = getHiVRegFromLo r1lo
r2hi = getHiVRegFromLo r2lo
code = code1 `appOL`
code2 `appOL`
toOL [ ADDC rlo r1lo r2lo,
ADDE rhi r1hi r2hi ]
return (ChildCode64 code rlo)
iselExpr64 (CmmMachOp (MO_Sub _) [e1,e2]) = do
ChildCode64 code1 r1lo <- iselExpr64 e1
ChildCode64 code2 r2lo <- iselExpr64 e2
(rlo,rhi) <- getNewRegPairNat II32
let
r1hi = getHiVRegFromLo r1lo
r2hi = getHiVRegFromLo r2lo
code = code1 `appOL`
code2 `appOL`
toOL [ SUBFC rlo r2lo r1lo,
SUBFE rhi r2hi r1hi ]
return (ChildCode64 code rlo)
iselExpr64 (CmmMachOp (MO_UU_Conv W32 W64) [expr]) = do
(expr_reg,expr_code) <- getSomeReg expr
(rlo, rhi) <- getNewRegPairNat II32
let mov_hi = LI rhi (ImmInt 0)
mov_lo = MR rlo expr_reg
return $ ChildCode64 (expr_code `snocOL` mov_lo `snocOL` mov_hi)
rlo
iselExpr64 expr
= pprPanic "iselExpr64(powerpc)" (pprExpr expr)
getRegister :: CmmExpr -> NatM Register
getRegister e = do dflags <- getDynFlags
getRegister' dflags e
getRegister' :: DynFlags -> CmmExpr -> NatM Register
getRegister' _ (CmmReg (CmmGlobal PicBaseReg))
= do
reg <- getPicBaseNat archWordSize
return (Fixed archWordSize reg nilOL)
getRegister' dflags (CmmReg reg)
= return (Fixed (cmmTypeSize (cmmRegType dflags reg))
(getRegisterReg (targetPlatform dflags) reg) nilOL)
getRegister' dflags tree@(CmmRegOff _ _)
= getRegister' dflags (mangleIndexTree dflags tree)
-- for 32-bit architectuers, support some 64 -> 32 bit conversions:
-- TO_W_(x), TO_W_(x >> 32)
getRegister' dflags (CmmMachOp (MO_UU_Conv W64 W32)
[CmmMachOp (MO_U_Shr W64) [x,CmmLit (CmmInt 32 _)]])
| target32Bit (targetPlatform dflags) = do
ChildCode64 code rlo <- iselExpr64 x
return $ Fixed II32 (getHiVRegFromLo rlo) code
getRegister' dflags (CmmMachOp (MO_SS_Conv W64 W32)
[CmmMachOp (MO_U_Shr W64) [x,CmmLit (CmmInt 32 _)]])
| target32Bit (targetPlatform dflags) = do
ChildCode64 code rlo <- iselExpr64 x
return $ Fixed II32 (getHiVRegFromLo rlo) code
getRegister' dflags (CmmMachOp (MO_UU_Conv W64 W32) [x])
| target32Bit (targetPlatform dflags) = do
ChildCode64 code rlo <- iselExpr64 x
return $ Fixed II32 rlo code
getRegister' dflags (CmmMachOp (MO_SS_Conv W64 W32) [x])
| target32Bit (targetPlatform dflags) = do
ChildCode64 code rlo <- iselExpr64 x
return $ Fixed II32 rlo code
getRegister' dflags (CmmLoad mem pk)
| not (isWord64 pk)
= do
let platform = targetPlatform dflags
Amode addr addr_code <- getAmode mem
let code dst = ASSERT((targetClassOfReg platform dst == RcDouble) == isFloatType pk)
addr_code `snocOL` LD size dst addr
return (Any size code)
where size = cmmTypeSize pk
-- catch simple cases of zero- or sign-extended load
getRegister' _ (CmmMachOp (MO_UU_Conv W8 W32) [CmmLoad mem _]) = do
Amode addr addr_code <- getAmode mem
return (Any II32 (\dst -> addr_code `snocOL` LD II8 dst addr))
-- Note: there is no Load Byte Arithmetic instruction, so no signed case here
getRegister' _ (CmmMachOp (MO_UU_Conv W16 W32) [CmmLoad mem _]) = do
Amode addr addr_code <- getAmode mem
return (Any II32 (\dst -> addr_code `snocOL` LD II16 dst addr))
getRegister' _ (CmmMachOp (MO_SS_Conv W16 W32) [CmmLoad mem _]) = do
Amode addr addr_code <- getAmode mem
return (Any II32 (\dst -> addr_code `snocOL` LA II16 dst addr))
getRegister' dflags (CmmMachOp mop [x]) -- unary MachOps
= case mop of
MO_Not rep -> triv_ucode_int rep NOT
MO_F_Neg w -> triv_ucode_float w FNEG
MO_S_Neg w -> triv_ucode_int w NEG
MO_FF_Conv W64 W32 -> trivialUCode FF32 FRSP x
MO_FF_Conv W32 W64 -> conversionNop FF64 x
MO_FS_Conv from to -> coerceFP2Int from to x
MO_SF_Conv from to -> coerceInt2FP from to x
MO_SS_Conv from to
| from == to -> conversionNop (intSize to) x
-- narrowing is a nop: we treat the high bits as undefined
MO_SS_Conv W32 to -> conversionNop (intSize to) x
MO_SS_Conv W16 W8 -> conversionNop II8 x
MO_SS_Conv W8 to -> triv_ucode_int to (EXTS II8)
MO_SS_Conv W16 to -> triv_ucode_int to (EXTS II16)
MO_UU_Conv from to
| from == to -> conversionNop (intSize to) x
-- narrowing is a nop: we treat the high bits as undefined
MO_UU_Conv W32 to -> conversionNop (intSize to) x
MO_UU_Conv W16 W8 -> conversionNop II8 x
MO_UU_Conv W8 to -> trivialCode to False AND x (CmmLit (CmmInt 255 W32))
MO_UU_Conv W16 to -> trivialCode to False AND x (CmmLit (CmmInt 65535 W32))
_ -> panic "PPC.CodeGen.getRegister: no match"
where
triv_ucode_int width instr = trivialUCode (intSize width) instr x
triv_ucode_float width instr = trivialUCode (floatSize width) instr x
conversionNop new_size expr
= do e_code <- getRegister' dflags expr
return (swizzleRegisterRep e_code new_size)
getRegister' _ (CmmMachOp mop [x, y]) -- dyadic PrimOps
= case mop of
MO_F_Eq _ -> condFltReg EQQ x y
MO_F_Ne _ -> condFltReg NE x y
MO_F_Gt _ -> condFltReg GTT x y
MO_F_Ge _ -> condFltReg GE x y
MO_F_Lt _ -> condFltReg LTT x y
MO_F_Le _ -> condFltReg LE x y
MO_Eq rep -> condIntReg EQQ (extendUExpr rep x) (extendUExpr rep y)
MO_Ne rep -> condIntReg NE (extendUExpr rep x) (extendUExpr rep y)
MO_S_Gt rep -> condIntReg GTT (extendSExpr rep x) (extendSExpr rep y)
MO_S_Ge rep -> condIntReg GE (extendSExpr rep x) (extendSExpr rep y)
MO_S_Lt rep -> condIntReg LTT (extendSExpr rep x) (extendSExpr rep y)
MO_S_Le rep -> condIntReg LE (extendSExpr rep x) (extendSExpr rep y)
MO_U_Gt rep -> condIntReg GU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Ge rep -> condIntReg GEU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Lt rep -> condIntReg LU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Le rep -> condIntReg LEU (extendUExpr rep x) (extendUExpr rep y)
MO_F_Add w -> triv_float w FADD
MO_F_Sub w -> triv_float w FSUB
MO_F_Mul w -> triv_float w FMUL
MO_F_Quot w -> triv_float w FDIV
-- optimize addition with 32-bit immediate
-- (needed for PIC)
MO_Add W32 ->
case y of
CmmLit (CmmInt imm immrep) | Just _ <- makeImmediate W32 True (-imm)
-> trivialCode W32 True ADD x (CmmLit $ CmmInt imm immrep)
CmmLit lit
-> do
(src, srcCode) <- getSomeReg x
let imm = litToImm lit
code dst = srcCode `appOL` toOL [
ADDIS dst src (HA imm),
ADD dst dst (RIImm (LO imm))
]
return (Any II32 code)
_ -> trivialCode W32 True ADD x y
MO_Add rep -> trivialCode rep True ADD x y
MO_Sub rep ->
case y of -- subfi ('substract from' with immediate) doesn't exist
CmmLit (CmmInt imm immrep) | Just _ <- makeImmediate rep True (-imm)
-> trivialCode rep True ADD x (CmmLit $ CmmInt (-imm) immrep)
_ -> trivialCodeNoImm' (intSize rep) SUBF y x
MO_Mul rep -> trivialCode rep True MULLW x y
MO_S_MulMayOflo W32 -> trivialCodeNoImm' II32 MULLW_MayOflo x y
MO_S_MulMayOflo _ -> panic "S_MulMayOflo (rep /= II32): not implemented"
MO_U_MulMayOflo _ -> panic "U_MulMayOflo: not implemented"
MO_S_Quot rep -> trivialCodeNoImm' (intSize rep) DIVW (extendSExpr rep x) (extendSExpr rep y)
MO_U_Quot rep -> trivialCodeNoImm' (intSize rep) DIVWU (extendUExpr rep x) (extendUExpr rep y)
MO_S_Rem rep -> remainderCode rep DIVW (extendSExpr rep x) (extendSExpr rep y)
MO_U_Rem rep -> remainderCode rep DIVWU (extendUExpr rep x) (extendUExpr rep y)
MO_And rep -> trivialCode rep False AND x y
MO_Or rep -> trivialCode rep False OR x y
MO_Xor rep -> trivialCode rep False XOR x y
MO_Shl rep -> trivialCode rep False SLW x y
MO_S_Shr rep -> trivialCode rep False SRAW (extendSExpr rep x) y
MO_U_Shr rep -> trivialCode rep False SRW (extendUExpr rep x) y
_ -> panic "PPC.CodeGen.getRegister: no match"
where
triv_float :: Width -> (Size -> Reg -> Reg -> Reg -> Instr) -> NatM Register
triv_float width instr = trivialCodeNoImm (floatSize width) instr x y
getRegister' _ (CmmLit (CmmInt i rep))
| Just imm <- makeImmediate rep True i
= let
code dst = unitOL (LI dst imm)
in
return (Any (intSize rep) code)
getRegister' _ (CmmLit (CmmFloat f frep)) = do
lbl <- getNewLabelNat
dflags <- getDynFlags
dynRef <- cmmMakeDynamicReference dflags DataReference lbl
Amode addr addr_code <- getAmode dynRef
let size = floatSize frep
code dst =
LDATA ReadOnlyData (Statics lbl
[CmmStaticLit (CmmFloat f frep)])
`consOL` (addr_code `snocOL` LD size dst addr)
return (Any size code)
getRegister' dflags (CmmLit lit)
= let rep = cmmLitType dflags lit
imm = litToImm lit
code dst = toOL [
LIS dst (HA imm),
ADD dst dst (RIImm (LO imm))
]
in return (Any (cmmTypeSize rep) code)
getRegister' _ other = pprPanic "getRegister(ppc)" (pprExpr other)
-- extend?Rep: wrap integer expression of type rep
-- in a conversion to II32
extendSExpr :: Width -> CmmExpr -> CmmExpr
extendSExpr W32 x = x
extendSExpr rep x = CmmMachOp (MO_SS_Conv rep W32) [x]
extendUExpr :: Width -> CmmExpr -> CmmExpr
extendUExpr W32 x = x
extendUExpr rep x = CmmMachOp (MO_UU_Conv rep W32) [x]
-- -----------------------------------------------------------------------------
-- The 'Amode' type: Memory addressing modes passed up the tree.
data Amode
= Amode AddrMode InstrBlock
{-
Now, given a tree (the argument to an CmmLoad) that references memory,
produce a suitable addressing mode.
A Rule of the Game (tm) for Amodes: use of the addr bit must
immediately follow use of the code part, since the code part puts
values in registers which the addr then refers to. So you can't put
anything in between, lest it overwrite some of those registers. If
you need to do some other computation between the code part and use of
the addr bit, first store the effective address from the amode in a
temporary, then do the other computation, and then use the temporary:
code
LEA amode, tmp
... other computation ...
... (tmp) ...
-}
getAmode :: CmmExpr -> NatM Amode
getAmode tree@(CmmRegOff _ _) = do dflags <- getDynFlags
getAmode (mangleIndexTree dflags tree)
getAmode (CmmMachOp (MO_Sub W32) [x, CmmLit (CmmInt i _)])
| Just off <- makeImmediate W32 True (-i)
= do
(reg, code) <- getSomeReg x
return (Amode (AddrRegImm reg off) code)
getAmode (CmmMachOp (MO_Add W32) [x, CmmLit (CmmInt i _)])
| Just off <- makeImmediate W32 True i
= do
(reg, code) <- getSomeReg x
return (Amode (AddrRegImm reg off) code)
-- optimize addition with 32-bit immediate
-- (needed for PIC)
getAmode (CmmMachOp (MO_Add W32) [x, CmmLit lit])
= do
tmp <- getNewRegNat II32
(src, srcCode) <- getSomeReg x
let imm = litToImm lit
code = srcCode `snocOL` ADDIS tmp src (HA imm)
return (Amode (AddrRegImm tmp (LO imm)) code)
getAmode (CmmLit lit)
= do
tmp <- getNewRegNat II32
let imm = litToImm lit
code = unitOL (LIS tmp (HA imm))
return (Amode (AddrRegImm tmp (LO imm)) code)
getAmode (CmmMachOp (MO_Add W32) [x, y])
= do
(regX, codeX) <- getSomeReg x
(regY, codeY) <- getSomeReg y
return (Amode (AddrRegReg regX regY) (codeX `appOL` codeY))
getAmode other
= do
(reg, code) <- getSomeReg other
let
off = ImmInt 0
return (Amode (AddrRegImm reg off) code)
-- The 'CondCode' type: Condition codes passed up the tree.
data CondCode
= CondCode Bool Cond InstrBlock
-- Set up a condition code for a conditional branch.
getCondCode :: CmmExpr -> NatM CondCode
-- almost the same as everywhere else - but we need to
-- extend small integers to 32 bit first
getCondCode (CmmMachOp mop [x, y])
= case mop of
MO_F_Eq W32 -> condFltCode EQQ x y
MO_F_Ne W32 -> condFltCode NE x y
MO_F_Gt W32 -> condFltCode GTT x y
MO_F_Ge W32 -> condFltCode GE x y
MO_F_Lt W32 -> condFltCode LTT x y
MO_F_Le W32 -> condFltCode LE x y
MO_F_Eq W64 -> condFltCode EQQ x y
MO_F_Ne W64 -> condFltCode NE x y
MO_F_Gt W64 -> condFltCode GTT x y
MO_F_Ge W64 -> condFltCode GE x y
MO_F_Lt W64 -> condFltCode LTT x y
MO_F_Le W64 -> condFltCode LE x y
MO_Eq rep -> condIntCode EQQ (extendUExpr rep x) (extendUExpr rep y)
MO_Ne rep -> condIntCode NE (extendUExpr rep x) (extendUExpr rep y)
MO_S_Gt rep -> condIntCode GTT (extendSExpr rep x) (extendSExpr rep y)
MO_S_Ge rep -> condIntCode GE (extendSExpr rep x) (extendSExpr rep y)
MO_S_Lt rep -> condIntCode LTT (extendSExpr rep x) (extendSExpr rep y)
MO_S_Le rep -> condIntCode LE (extendSExpr rep x) (extendSExpr rep y)
MO_U_Gt rep -> condIntCode GU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Ge rep -> condIntCode GEU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Lt rep -> condIntCode LU (extendUExpr rep x) (extendUExpr rep y)
MO_U_Le rep -> condIntCode LEU (extendUExpr rep x) (extendUExpr rep y)
_ -> pprPanic "getCondCode(powerpc)" (pprMachOp mop)
getCondCode _ = panic "getCondCode(2)(powerpc)"
-- @cond(Int|Flt)Code@: Turn a boolean expression into a condition, to be
-- passed back up the tree.
condIntCode, condFltCode :: Cond -> CmmExpr -> CmmExpr -> NatM CondCode
-- ###FIXME: I16 and I8!
condIntCode cond x (CmmLit (CmmInt y rep))
| Just src2 <- makeImmediate rep (not $ condUnsigned cond) y
= do
(src1, code) <- getSomeReg x
let
code' = code `snocOL`
(if condUnsigned cond then CMPL else CMP) II32 src1 (RIImm src2)
return (CondCode False cond code')
condIntCode cond x y = do
(src1, code1) <- getSomeReg x
(src2, code2) <- getSomeReg y
let
code' = code1 `appOL` code2 `snocOL`
(if condUnsigned cond then CMPL else CMP) II32 src1 (RIReg src2)
return (CondCode False cond code')
condFltCode cond x y = do
(src1, code1) <- getSomeReg x
(src2, code2) <- getSomeReg y
let
code' = code1 `appOL` code2 `snocOL` FCMP src1 src2
code'' = case cond of -- twiddle CR to handle unordered case
GE -> code' `snocOL` CRNOR ltbit eqbit gtbit
LE -> code' `snocOL` CRNOR gtbit eqbit ltbit
_ -> code'
where
ltbit = 0 ; eqbit = 2 ; gtbit = 1
return (CondCode True cond code'')
-- -----------------------------------------------------------------------------
-- Generating assignments
-- Assignments are really at the heart of the whole code generation
-- business. Almost all top-level nodes of any real importance are
-- assignments, which correspond to loads, stores, or register
-- transfers. If we're really lucky, some of the register transfers
-- will go away, because we can use the destination register to
-- complete the code generation for the right hand side. This only
-- fails when the right hand side is forced into a fixed register
-- (e.g. the result of a call).
assignMem_IntCode :: Size -> CmmExpr -> CmmExpr -> NatM InstrBlock
assignReg_IntCode :: Size -> CmmReg -> CmmExpr -> NatM InstrBlock
assignMem_FltCode :: Size -> CmmExpr -> CmmExpr -> NatM InstrBlock
assignReg_FltCode :: Size -> CmmReg -> CmmExpr -> NatM InstrBlock
assignMem_IntCode pk addr src = do
(srcReg, code) <- getSomeReg src
Amode dstAddr addr_code <- getAmode addr
return $ code `appOL` addr_code `snocOL` ST pk srcReg dstAddr
-- dst is a reg, but src could be anything
assignReg_IntCode _ reg src
= do
dflags <- getDynFlags
let dst = getRegisterReg (targetPlatform dflags) reg
r <- getRegister src
return $ case r of
Any _ code -> code dst
Fixed _ freg fcode -> fcode `snocOL` MR dst freg
-- Easy, isn't it?
assignMem_FltCode = assignMem_IntCode
assignReg_FltCode = assignReg_IntCode
genJump :: CmmExpr{-the branch target-} -> NatM InstrBlock
genJump (CmmLit (CmmLabel lbl))
= return (unitOL $ JMP lbl)
genJump tree
= do
(target,code) <- getSomeReg tree
return (code `snocOL` MTCTR target `snocOL` BCTR [] Nothing)
-- -----------------------------------------------------------------------------
-- Unconditional branches
genBranch :: BlockId -> NatM InstrBlock
genBranch = return . toOL . mkJumpInstr
-- -----------------------------------------------------------------------------
-- Conditional jumps
{-
Conditional jumps are always to local labels, so we can use branch
instructions. We peek at the arguments to decide what kind of
comparison to do.
-}
genCondJump
:: BlockId -- the branch target
-> CmmExpr -- the condition on which to branch
-> NatM InstrBlock
genCondJump id bool = do
CondCode _ cond code <- getCondCode bool
return (code `snocOL` BCC cond id)
-- -----------------------------------------------------------------------------
-- Generating C calls
-- Now the biggest nightmare---calls. Most of the nastiness is buried in
-- @get_arg@, which moves the arguments to the correct registers/stack
-- locations. Apart from that, the code is easy.
--
-- (If applicable) Do not fill the delay slots here; you will confuse the
-- register allocator.
genCCall :: ForeignTarget -- function to call
-> [CmmFormal] -- where to put the result
-> [CmmActual] -- arguments (of mixed type)
-> NatM InstrBlock
genCCall target dest_regs argsAndHints
= do dflags <- getDynFlags
let platform = targetPlatform dflags
case platformOS platform of
OSLinux -> genCCall' dflags GCPLinux target dest_regs argsAndHints
OSDarwin -> genCCall' dflags GCPDarwin target dest_regs argsAndHints
_ -> panic "PPC.CodeGen.genCCall: not defined for this os"
data GenCCallPlatform = GCPLinux | GCPDarwin
genCCall'
:: DynFlags
-> GenCCallPlatform
-> ForeignTarget -- function to call
-> [CmmFormal] -- where to put the result
-> [CmmActual] -- arguments (of mixed type)
-> NatM InstrBlock
{-
The PowerPC calling convention for Darwin/Mac OS X
is described in Apple's document
"Inside Mac OS X - Mach-O Runtime Architecture".
PowerPC Linux uses the System V Release 4 Calling Convention
for PowerPC. It is described in the
"System V Application Binary Interface PowerPC Processor Supplement".
Both conventions are similar:
Parameters may be passed in general-purpose registers starting at r3, in
floating point registers starting at f1, or on the stack.
But there are substantial differences:
* The number of registers used for parameter passing and the exact set of
nonvolatile registers differs (see MachRegs.lhs).
* On Darwin, stack space is always reserved for parameters, even if they are
passed in registers. The called routine may choose to save parameters from
registers to the corresponding space on the stack.
* On Darwin, a corresponding amount of GPRs is skipped when a floating point
parameter is passed in an FPR.
* SysV insists on either passing I64 arguments on the stack, or in two GPRs,
starting with an odd-numbered GPR. It may skip a GPR to achieve this.
Darwin just treats an I64 like two separate II32s (high word first).
* I64 and FF64 arguments are 8-byte aligned on the stack for SysV, but only
4-byte aligned like everything else on Darwin.
* The SysV spec claims that FF32 is represented as FF64 on the stack. GCC on
PowerPC Linux does not agree, so neither do we.
According to both conventions, The parameter area should be part of the
caller's stack frame, allocated in the caller's prologue code (large enough
to hold the parameter lists for all called routines). The NCG already
uses the stack for register spilling, leaving 64 bytes free at the top.
If we need a larger parameter area than that, we just allocate a new stack
frame just before ccalling.
-}
genCCall' _ _ (PrimTarget MO_WriteBarrier) _ _
= return $ unitOL LWSYNC
genCCall' _ _ (PrimTarget MO_Touch) _ _
= return $ nilOL
genCCall' _ _ (PrimTarget (MO_Prefetch_Data _)) _ _
= return $ nilOL
genCCall' dflags gcp target dest_regs args0
= ASSERT(not $ any (`elem` [II16]) $ map cmmTypeSize argReps)
-- we rely on argument promotion in the codeGen
do
(finalStack,passArgumentsCode,usedRegs) <- passArguments
(zip args argReps)
allArgRegs
(allFPArgRegs platform)
initialStackOffset
(toOL []) []
(labelOrExpr, reduceToFF32) <- case target of
ForeignTarget (CmmLit (CmmLabel lbl)) _ -> do
uses_pic_base_implicitly
return (Left lbl, False)
ForeignTarget expr _ -> do
uses_pic_base_implicitly
return (Right expr, False)
PrimTarget mop -> outOfLineMachOp mop
let codeBefore = move_sp_down finalStack `appOL` passArgumentsCode
codeAfter = move_sp_up finalStack `appOL` moveResult reduceToFF32
case labelOrExpr of
Left lbl -> do
return ( codeBefore
`snocOL` BL lbl usedRegs
`appOL` codeAfter)
Right dyn -> do
(dynReg, dynCode) <- getSomeReg dyn
return ( dynCode
`snocOL` MTCTR dynReg
`appOL` codeBefore
`snocOL` BCTRL usedRegs
`appOL` codeAfter)
where
platform = targetPlatform dflags
uses_pic_base_implicitly = do
-- See Note [implicit register in PPC PIC code]
-- on why we claim to use PIC register here
when (gopt Opt_PIC dflags) $ do
_ <- getPicBaseNat archWordSize
return ()
initialStackOffset = case gcp of
GCPDarwin -> 24
GCPLinux -> 8
-- size of linkage area + size of arguments, in bytes
stackDelta finalStack = case gcp of
GCPDarwin ->
roundTo 16 $ (24 +) $ max 32 $ sum $
map (widthInBytes . typeWidth) argReps
GCPLinux -> roundTo 16 finalStack
-- need to remove alignment information
args | PrimTarget mop <- target,
(mop == MO_Memcpy ||
mop == MO_Memset ||
mop == MO_Memmove)
= init args0
| otherwise
= args0
argReps = map (cmmExprType dflags) args0
roundTo a x | x `mod` a == 0 = x
| otherwise = x + a - (x `mod` a)
move_sp_down finalStack
| delta > 64 =
toOL [STU II32 sp (AddrRegImm sp (ImmInt (-delta))),
DELTA (-delta)]
| otherwise = nilOL
where delta = stackDelta finalStack
move_sp_up finalStack
| delta > 64 =
toOL [ADD sp sp (RIImm (ImmInt delta)),
DELTA 0]
| otherwise = nilOL
where delta = stackDelta finalStack
passArguments [] _ _ stackOffset accumCode accumUsed = return (stackOffset, accumCode, accumUsed)
passArguments ((arg,arg_ty):args) gprs fprs stackOffset
accumCode accumUsed | isWord64 arg_ty =
do
ChildCode64 code vr_lo <- iselExpr64 arg
let vr_hi = getHiVRegFromLo vr_lo
case gcp of
GCPDarwin ->
do let storeWord vr (gpr:_) _ = MR gpr vr
storeWord vr [] offset
= ST II32 vr (AddrRegImm sp (ImmInt offset))
passArguments args
(drop 2 gprs)
fprs
(stackOffset+8)
(accumCode `appOL` code
`snocOL` storeWord vr_hi gprs stackOffset
`snocOL` storeWord vr_lo (drop 1 gprs) (stackOffset+4))
((take 2 gprs) ++ accumUsed)
GCPLinux ->
do let stackOffset' = roundTo 8 stackOffset
stackCode = accumCode `appOL` code
`snocOL` ST II32 vr_hi (AddrRegImm sp (ImmInt stackOffset'))
`snocOL` ST II32 vr_lo (AddrRegImm sp (ImmInt (stackOffset'+4)))
regCode hireg loreg =
accumCode `appOL` code
`snocOL` MR hireg vr_hi
`snocOL` MR loreg vr_lo
case gprs of
hireg : loreg : regs | even (length gprs) ->
passArguments args regs fprs stackOffset
(regCode hireg loreg) (hireg : loreg : accumUsed)
_skipped : hireg : loreg : regs ->
passArguments args regs fprs stackOffset
(regCode hireg loreg) (hireg : loreg : accumUsed)
_ -> -- only one or no regs left
passArguments args [] fprs (stackOffset'+8)
stackCode accumUsed
passArguments ((arg,rep):args) gprs fprs stackOffset accumCode accumUsed
| reg : _ <- regs = do
register <- getRegister arg
let code = case register of
Fixed _ freg fcode -> fcode `snocOL` MR reg freg
Any _ acode -> acode reg
stackOffsetRes = case gcp of
-- The Darwin ABI requires that we reserve
-- stack slots for register parameters
GCPDarwin -> stackOffset + stackBytes
-- ... the SysV ABI doesn't.
GCPLinux -> stackOffset
passArguments args
(drop nGprs gprs)
(drop nFprs fprs)
stackOffsetRes
(accumCode `appOL` code)
(reg : accumUsed)
| otherwise = do
(vr, code) <- getSomeReg arg
passArguments args
(drop nGprs gprs)
(drop nFprs fprs)
(stackOffset' + stackBytes)
(accumCode `appOL` code `snocOL` ST (cmmTypeSize rep) vr stackSlot)
accumUsed
where
stackOffset' = case gcp of
GCPDarwin ->
-- stackOffset is at least 4-byte aligned
-- The Darwin ABI is happy with that.
stackOffset
GCPLinux
-- ... the SysV ABI requires 8-byte
-- alignment for doubles.
| isFloatType rep && typeWidth rep == W64 ->
roundTo 8 stackOffset
| otherwise ->
stackOffset
stackSlot = AddrRegImm sp (ImmInt stackOffset')
(nGprs, nFprs, stackBytes, regs)
= case gcp of
GCPDarwin ->
case cmmTypeSize rep of
II8 -> (1, 0, 4, gprs)
II16 -> (1, 0, 4, gprs)
II32 -> (1, 0, 4, gprs)
-- The Darwin ABI requires that we skip a
-- corresponding number of GPRs when we use
-- the FPRs.
FF32 -> (1, 1, 4, fprs)
FF64 -> (2, 1, 8, fprs)
II64 -> panic "genCCall' passArguments II64"
FF80 -> panic "genCCall' passArguments FF80"
GCPLinux ->
case cmmTypeSize rep of
II8 -> (1, 0, 4, gprs)
II16 -> (1, 0, 4, gprs)
II32 -> (1, 0, 4, gprs)
-- ... the SysV ABI doesn't.
FF32 -> (0, 1, 4, fprs)
FF64 -> (0, 1, 8, fprs)
II64 -> panic "genCCall' passArguments II64"
FF80 -> panic "genCCall' passArguments FF80"
moveResult reduceToFF32 =
case dest_regs of
[] -> nilOL
[dest]
| reduceToFF32 && isFloat32 rep -> unitOL (FRSP r_dest f1)
| isFloat32 rep || isFloat64 rep -> unitOL (MR r_dest f1)
| isWord64 rep -> toOL [MR (getHiVRegFromLo r_dest) r3,
MR r_dest r4]
| otherwise -> unitOL (MR r_dest r3)
where rep = cmmRegType dflags (CmmLocal dest)
r_dest = getRegisterReg platform (CmmLocal dest)
_ -> panic "genCCall' moveResult: Bad dest_regs"
outOfLineMachOp mop =
do
dflags <- getDynFlags
mopExpr <- cmmMakeDynamicReference dflags CallReference $
mkForeignLabel functionName Nothing ForeignLabelInThisPackage IsFunction
let mopLabelOrExpr = case mopExpr of
CmmLit (CmmLabel lbl) -> Left lbl
_ -> Right mopExpr
return (mopLabelOrExpr, reduce)
where
(functionName, reduce) = case mop of
MO_F32_Exp -> (fsLit "exp", True)
MO_F32_Log -> (fsLit "log", True)
MO_F32_Sqrt -> (fsLit "sqrt", True)
MO_F32_Sin -> (fsLit "sin", True)
MO_F32_Cos -> (fsLit "cos", True)
MO_F32_Tan -> (fsLit "tan", True)
MO_F32_Asin -> (fsLit "asin", True)
MO_F32_Acos -> (fsLit "acos", True)
MO_F32_Atan -> (fsLit "atan", True)
MO_F32_Sinh -> (fsLit "sinh", True)
MO_F32_Cosh -> (fsLit "cosh", True)
MO_F32_Tanh -> (fsLit "tanh", True)
MO_F32_Pwr -> (fsLit "pow", True)
MO_F64_Exp -> (fsLit "exp", False)
MO_F64_Log -> (fsLit "log", False)
MO_F64_Sqrt -> (fsLit "sqrt", False)
MO_F64_Sin -> (fsLit "sin", False)
MO_F64_Cos -> (fsLit "cos", False)
MO_F64_Tan -> (fsLit "tan", False)
MO_F64_Asin -> (fsLit "asin", False)
MO_F64_Acos -> (fsLit "acos", False)
MO_F64_Atan -> (fsLit "atan", False)
MO_F64_Sinh -> (fsLit "sinh", False)
MO_F64_Cosh -> (fsLit "cosh", False)
MO_F64_Tanh -> (fsLit "tanh", False)
MO_F64_Pwr -> (fsLit "pow", False)
MO_UF_Conv w -> (fsLit $ word2FloatLabel w, False)
MO_Memcpy -> (fsLit "memcpy", False)
MO_Memset -> (fsLit "memset", False)
MO_Memmove -> (fsLit "memmove", False)
MO_BSwap w -> (fsLit $ bSwapLabel w, False)
MO_PopCnt w -> (fsLit $ popCntLabel w, False)
MO_Clz w -> (fsLit $ clzLabel w, False)
MO_Ctz w -> (fsLit $ ctzLabel w, False)
MO_AtomicRMW w amop -> (fsLit $ atomicRMWLabel w amop, False)
MO_Cmpxchg w -> (fsLit $ cmpxchgLabel w, False)
MO_AtomicRead w -> (fsLit $ atomicReadLabel w, False)
MO_AtomicWrite w -> (fsLit $ atomicWriteLabel w, False)
MO_S_QuotRem {} -> unsupported
MO_U_QuotRem {} -> unsupported
MO_U_QuotRem2 {} -> unsupported
MO_Add2 {} -> unsupported
MO_AddIntC {} -> unsupported
MO_SubIntC {} -> unsupported
MO_U_Mul2 {} -> unsupported
MO_WriteBarrier -> unsupported
MO_Touch -> unsupported
(MO_Prefetch_Data _ ) -> unsupported
unsupported = panic ("outOfLineCmmOp: " ++ show mop
++ " not supported")
-- -----------------------------------------------------------------------------
-- Generating a table-branch
genSwitch :: DynFlags -> CmmExpr -> [Maybe BlockId] -> NatM InstrBlock
genSwitch dflags expr ids
| gopt Opt_PIC dflags
= do
(reg,e_code) <- getSomeReg expr
tmp <- getNewRegNat II32
lbl <- getNewLabelNat
dflags <- getDynFlags
dynRef <- cmmMakeDynamicReference dflags DataReference lbl
(tableReg,t_code) <- getSomeReg $ dynRef
let code = e_code `appOL` t_code `appOL` toOL [
SLW tmp reg (RIImm (ImmInt 2)),
LD II32 tmp (AddrRegReg tableReg tmp),
ADD tmp tmp (RIReg tableReg),
MTCTR tmp,
BCTR ids (Just lbl)
]
return code
| otherwise
= do
(reg,e_code) <- getSomeReg expr
tmp <- getNewRegNat II32
lbl <- getNewLabelNat
let code = e_code `appOL` toOL [
SLW tmp reg (RIImm (ImmInt 2)),
ADDIS tmp tmp (HA (ImmCLbl lbl)),
LD II32 tmp (AddrRegImm tmp (LO (ImmCLbl lbl))),
MTCTR tmp,
BCTR ids (Just lbl)
]
return code
generateJumpTableForInstr :: DynFlags -> Instr
-> Maybe (NatCmmDecl CmmStatics Instr)
generateJumpTableForInstr dflags (BCTR ids (Just lbl)) =
let jumpTable
| gopt Opt_PIC dflags = map jumpTableEntryRel ids
| otherwise = map (jumpTableEntry dflags) ids
where jumpTableEntryRel Nothing
= CmmStaticLit (CmmInt 0 (wordWidth dflags))
jumpTableEntryRel (Just blockid)
= CmmStaticLit (CmmLabelDiffOff blockLabel lbl 0)
where blockLabel = mkAsmTempLabel (getUnique blockid)
in Just (CmmData ReadOnlyData (Statics lbl jumpTable))
generateJumpTableForInstr _ _ = Nothing
-- -----------------------------------------------------------------------------
-- 'condIntReg' and 'condFltReg': condition codes into registers
-- Turn those condition codes into integers now (when they appear on
-- the right hand side of an assignment).
--
-- (If applicable) Do not fill the delay slots here; you will confuse the
-- register allocator.
condIntReg, condFltReg :: Cond -> CmmExpr -> CmmExpr -> NatM Register
condReg :: NatM CondCode -> NatM Register
condReg getCond = do
CondCode _ cond cond_code <- getCond
let
{- code dst = cond_code `appOL` toOL [
BCC cond lbl1,
LI dst (ImmInt 0),
BCC ALWAYS lbl2,
NEWBLOCK lbl1,
LI dst (ImmInt 1),
BCC ALWAYS lbl2,
NEWBLOCK lbl2
]-}
code dst = cond_code
`appOL` negate_code
`appOL` toOL [
MFCR dst,
RLWINM dst dst (bit + 1) 31 31
]
negate_code | do_negate = unitOL (CRNOR bit bit bit)
| otherwise = nilOL
(bit, do_negate) = case cond of
LTT -> (0, False)
LE -> (1, True)
EQQ -> (2, False)
GE -> (0, True)
GTT -> (1, False)
NE -> (2, True)
LU -> (0, False)
LEU -> (1, True)
GEU -> (0, True)
GU -> (1, False)
_ -> panic "PPC.CodeGen.codeReg: no match"
return (Any II32 code)
condIntReg cond x y = condReg (condIntCode cond x y)
condFltReg cond x y = condReg (condFltCode cond x y)
-- -----------------------------------------------------------------------------
-- 'trivial*Code': deal with trivial instructions
-- Trivial (dyadic: 'trivialCode', floating-point: 'trivialFCode',
-- unary: 'trivialUCode', unary fl-pt:'trivialUFCode') instructions.
-- Only look for constants on the right hand side, because that's
-- where the generic optimizer will have put them.
-- Similarly, for unary instructions, we don't have to worry about
-- matching an StInt as the argument, because genericOpt will already
-- have handled the constant-folding.
{-
Wolfgang's PowerPC version of The Rules:
A slightly modified version of The Rules to take advantage of the fact
that PowerPC instructions work on all registers and don't implicitly
clobber any fixed registers.
* The only expression for which getRegister returns Fixed is (CmmReg reg).
* If getRegister returns Any, then the code it generates may modify only:
(a) fresh temporaries
(b) the destination register
It may *not* modify global registers, unless the global
register happens to be the destination register.
It may not clobber any other registers. In fact, only ccalls clobber any
fixed registers.
Also, it may not modify the counter register (used by genCCall).
Corollary: If a getRegister for a subexpression returns Fixed, you need
not move it to a fresh temporary before evaluating the next subexpression.
The Fixed register won't be modified.
Therefore, we don't need a counterpart for the x86's getStableReg on PPC.
* SDM's First Rule is valid for PowerPC, too: subexpressions can depend on
the value of the destination register.
-}
trivialCode
:: Width
-> Bool
-> (Reg -> Reg -> RI -> Instr)
-> CmmExpr
-> CmmExpr
-> NatM Register
trivialCode rep signed instr x (CmmLit (CmmInt y _))
| Just imm <- makeImmediate rep signed y
= do
(src1, code1) <- getSomeReg x
let code dst = code1 `snocOL` instr dst src1 (RIImm imm)
return (Any (intSize rep) code)
trivialCode rep _ instr x y = do
(src1, code1) <- getSomeReg x
(src2, code2) <- getSomeReg y
let code dst = code1 `appOL` code2 `snocOL` instr dst src1 (RIReg src2)
return (Any (intSize rep) code)
trivialCodeNoImm' :: Size -> (Reg -> Reg -> Reg -> Instr)
-> CmmExpr -> CmmExpr -> NatM Register
trivialCodeNoImm' size instr x y = do
(src1, code1) <- getSomeReg x
(src2, code2) <- getSomeReg y
let code dst = code1 `appOL` code2 `snocOL` instr dst src1 src2
return (Any size code)
trivialCodeNoImm :: Size -> (Size -> Reg -> Reg -> Reg -> Instr)
-> CmmExpr -> CmmExpr -> NatM Register
trivialCodeNoImm size instr x y = trivialCodeNoImm' size (instr size) x y
trivialUCode
:: Size
-> (Reg -> Reg -> Instr)
-> CmmExpr
-> NatM Register
trivialUCode rep instr x = do
(src, code) <- getSomeReg x
let code' dst = code `snocOL` instr dst src
return (Any rep code')
-- There is no "remainder" instruction on the PPC, so we have to do
-- it the hard way.
-- The "div" parameter is the division instruction to use (DIVW or DIVWU)
remainderCode :: Width -> (Reg -> Reg -> Reg -> Instr)
-> CmmExpr -> CmmExpr -> NatM Register
remainderCode rep div x y = do
(src1, code1) <- getSomeReg x
(src2, code2) <- getSomeReg y
let code dst = code1 `appOL` code2 `appOL` toOL [
div dst src1 src2,
MULLW dst dst (RIReg src2),
SUBF dst dst src1
]
return (Any (intSize rep) code)
coerceInt2FP :: Width -> Width -> CmmExpr -> NatM Register
coerceInt2FP fromRep toRep x = do
(src, code) <- getSomeReg x
lbl <- getNewLabelNat
itmp <- getNewRegNat II32
ftmp <- getNewRegNat FF64
dflags <- getDynFlags
dynRef <- cmmMakeDynamicReference dflags DataReference lbl
Amode addr addr_code <- getAmode dynRef
let
code' dst = code `appOL` maybe_exts `appOL` toOL [
LDATA ReadOnlyData $ Statics lbl
[CmmStaticLit (CmmInt 0x43300000 W32),
CmmStaticLit (CmmInt 0x80000000 W32)],
XORIS itmp src (ImmInt 0x8000),
ST II32 itmp (spRel dflags 3),
LIS itmp (ImmInt 0x4330),
ST II32 itmp (spRel dflags 2),
LD FF64 ftmp (spRel dflags 2)
] `appOL` addr_code `appOL` toOL [
LD FF64 dst addr,
FSUB FF64 dst ftmp dst
] `appOL` maybe_frsp dst
maybe_exts = case fromRep of
W8 -> unitOL $ EXTS II8 src src
W16 -> unitOL $ EXTS II16 src src
W32 -> nilOL
_ -> panic "PPC.CodeGen.coerceInt2FP: no match"
maybe_frsp dst
= case toRep of
W32 -> unitOL $ FRSP dst dst
W64 -> nilOL
_ -> panic "PPC.CodeGen.coerceInt2FP: no match"
return (Any (floatSize toRep) code')
coerceFP2Int :: Width -> Width -> CmmExpr -> NatM Register
coerceFP2Int _ toRep x = do
dflags <- getDynFlags
-- the reps don't really matter: F*->FF64 and II32->I* are no-ops
(src, code) <- getSomeReg x
tmp <- getNewRegNat FF64
let
code' dst = code `appOL` toOL [
-- convert to int in FP reg
FCTIWZ tmp src,
-- store value (64bit) from FP to stack
ST FF64 tmp (spRel dflags 2),
-- read low word of value (high word is undefined)
LD II32 dst (spRel dflags 3)]
return (Any (intSize toRep) code')
-- Note [.LCTOC1 in PPC PIC code]
-- The .LCTOC1 label is defined to point 32768 bytes into the GOT table
-- to make the most of the PPC's 16-bit displacements.
-- As 16-bit signed offset is used (usually via addi/lwz instructions)
-- first element will have '-32768' offset against .LCTOC1.
-- Note [implicit register in PPC PIC code]
-- PPC generates calls by labels in assembly
-- in form of:
-- bl puts+32768@plt
-- in this form it's not seen directly (by GHC NCG)
-- that r30 (PicBaseReg) is used,
-- but r30 is a required part of PLT code setup:
-- puts+32768@plt:
-- lwz r11,-30484(r30) ; offset in .LCTOC1
-- mtctr r11
-- bctr
| forked-upstream-packages-for-ghcjs/ghc | compiler/nativeGen/PPC/CodeGen.hs | bsd-3-clause | 57,030 | 0 | 25 | 19,024 | 13,673 | 6,783 | 6,890 | -1 | -1 |
module T15369 where
x :: Int
x = 1
| sdiehl/ghc | testsuite/tests/ghci/should_run/T15369.hs | bsd-3-clause | 35 | 0 | 4 | 9 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE TypeOperators #-}
module AnnotationLet (foo) where
{
import qualified Data.List as DL
;
foo = let
a 0 = 1
a _ = 2
b = 2
in a b
;
infixr 8 +
;
data ((f + g)) a = InL (f a) | InR (g a)
;
}
| siddhanathan/ghc | testsuite/tests/ghc-api/annotations/AnnotationLet.hs | bsd-3-clause | 231 | 2 | 9 | 83 | 90 | 56 | 34 | 10 | 2 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE Arrows #-}
module Yage.Rendering.Pipeline.Deferred.Bloom
( addBloom
) where
import Yage.Prelude hiding ((</>), foldM, cons, (++))
import Yage.Lens
import Yage.Math (V2(V2))
import Control.Arrow
import Yage.Rendering.RenderSystem as RenderSystem
import Yage.Rendering.RenderTarget
import Yage.Rendering.Resources.GL
import Yage.Rendering.GL
import Yage.Scene
import Yage.HDR
import Yage.Rendering.Pipeline.Deferred.Downsampling as Pass
import Yage.Rendering.Pipeline.Deferred.GaussianBlur as Pass
import Yage.Rendering.Pipeline.Deferred.LuminanceFilter as Pass
import Data.Maybe (fromJust)
-- redundancy Yage.Rendering.Pipeline.Deferred.GaussianBlur.blurRenderSystem will be fixed with 'YageResource' factored out
addBloom :: (ImageFormat px, MonadResource m) => YageResource (RenderSystem m (HDRBloomSettings,Texture2D px) (Texture2D px))
addBloom = do
dsampler <- downsampler
let halfSamplers = batchedDownsampler dsampler
gaussPass <- dimap (\((a,b),c)->(a,b,c)) Just <$> gaussianSampler
filterLuma <- luminanceFilter
return $ proc (settings, inTexture) -> do
-- filter luma on half texture
half <- if settings^.bloomPreDownsampling > 1
then do
halfTarget <- autoResized mkTarget -< inTexture^.asRectangle & extend.mapped %~ (`div` (settings^.bloomPreDownsampling))
processPass dsampler -< (halfTarget,inTexture)
else returnA -< inTexture
filteredTex <- filterLuma -< (settings^.bloomThreshold, half)
downTargets <- mapA (autoResized mkTarget) -< targetRects (settings^.bloomGaussPasses) (inTexture^.asRectangle)
downsampledTextures <- halfSamplers -< (downTargets,[filteredTex])
targets <- mapA (autoResized mkTarget) -< downsampledTextures & mapped %~ view asRectangle
fromJust <$> foldA gaussPass -< (zip targets downsampledTextures, Nothing)
where
mkTarget rect = let V2 w h = rect^.extend in createTexture2D GL_TEXTURE_2D (Tex2D w h) 1
targetRects :: Int -> Rectangle Int -> [Rectangle Int]
targetRects n src = map ( \i -> src & extend.mapped %~ (\x -> max 1 (x `div` (2^i))) ) $ [1..n]
| MaxDaten/yage | src/Yage/Rendering/Pipeline/Deferred/Bloom.hs | mit | 2,393 | 1 | 20 | 586 | 644 | 357 | 287 | -1 | -1 |
module E04.A3
where
-- We revisit the former assignment from exercise 1 and redo it using a few
-- helpful builtin Haskell functions:
-- http://hackage.haskell.org/package/base-4.8.0.0/docs/Prelude.html#v:reverse
-- http://hackage.haskell.org/package/base-4.8.0.0/docs/Prelude.html#v:filter
-- http://hackage.haskell.org/package/base-4.8.0.0/docs/Prelude.html#v:.
--
-- Higher order function are functions that take functions as arguments.
-- For example filter has the type signature (a -> Bool) -> [a] -> [a].
--
-- (>) has type of Ord a => a -> (a -> Bool)
-- If we apply just one argument, e.g. 0 to (>) like this (>0) we get a
-- function with type signature a -> Bool. This is called partial application.
--
-- The dot operator (.) composes two functions:
-- (f . g) x == f (g x) for all x
-- (a)
f :: [Int] -> [Int]
f = reverse . filter (>0)
-- (b)
-- pow is a higher order function, because the second argument is a function
-- a -> a
pow :: Int -> (a -> a) -> a -> a
pow 0 _ x = x
pow n f x = pow (n-1) f (f x)
-- We can also use a fancy fold to do the same:
pow' n f = foldr (.) id (replicate n f)
-- (c)
-- here is the first argument
-- of type (a -> a -> b)
pleat :: (a -> a -> b) -> [a] -> [b]
pleat f [] = []
pleat f [a] = []
pleat f (a1:a2:as) = f a1 a2 : pleat f (a2:as)
-- Advanced: We can use the higher order function zipWith
-- http://hackage.haskell.org/package/base-4.8.0.0/docs/Prelude.html#v:zipWith
-- zipWith is lazy in its seconds argument, so that we do not have the handle
-- the empty list case specially.
pleat' f as = zipWith f as (tail as)
| sebschrader/programmierung-ss2015 | E04/A3.hs | mit | 1,595 | 0 | 8 | 316 | 283 | 162 | 121 | 12 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module ClassFileParser
( parseJavaClass
, parseOnlyJavaClass
, javaClass
) where
import JavaClass
import Data.Array (array, (!))
import Data.Attoparsec.ByteString
import Data.Binary.IEEE754 (wordToFloat, wordToDouble)
import Data.Bits
import Data.ByteString (ByteString, foldl', pack, elem, cons)
import Data.Char (ord)
import Prelude hiding (take, takeWhile, lookup, elem)
type BString = Data.ByteString.ByteString
parseJavaClass :: BString -> Result JavaClass
parseJavaClass = parse $ javaClass <* endOfInput
parseOnlyJavaClass :: BString -> Either String JavaClass
parseOnlyJavaClass = parseOnly $ javaClass <* endOfInput
javaClass :: Parser JavaClass
javaClass = magic >> versions >>= pools
where
pools v = cPool >>= jcparser v
jcparser v cp = JavaClass v cp
<$> accFlags
<*> index
<*> index
<*> itfcs
<*> flds cp
<*> mthds cp
<*> attrs cp
magic :: Parser BString
magic = string magicVal <?> "not a class file"
versions :: Parser Version
versions = Version <$> u2 <*> u2
-- constant pool
cPool :: Parser ConstantPool
cPool = index >>= elements
where
elements n = array (1, n - 1) <$> ctelem n 1
ctelem :: Int -> Int -> Parser [(Int, CpInfo)]
ctelem n m
| n > m = cpinfo >>= check
| otherwise = return []
where
check x@(Long _) = ([(m, x), (m + 1, None)] ++) <$> ctelem n (m + 2)
check x@(Double _) = ([(m, x), (m + 1, None)] ++) <$> ctelem n (m + 2)
check x = ((m ,x) :) <$> ctelem n (m + 1)
cpinfo :: Parser CpInfo
cpinfo = choice
[ word8 0x7 >> cclass
, word8 0x9 >> cfieldref
, word8 0xa >> cmethodref
, word8 0xb >> cinterfaceMethodref
, word8 0x8 >> cstring
, word8 0x3 >> cinteger
, word8 0x4 >> cfloat
, word8 0x5 >> clong
, word8 0x6 >> cdouble
, word8 0xc >> cnameAndType
, word8 0x1 >> cutf8
, word8 0xf >> cmethodHandle
, word8 0x10 >> cmethodType
, word8 0x12 >> cinvokeDynamic
]
cclass :: Parser CpInfo
cclass = Class <$> index
cfieldref :: Parser CpInfo
cfieldref = Fieldref <$> index <*> index
cmethodref :: Parser CpInfo
cmethodref = Methodref <$> index <*> index
cinterfaceMethodref :: Parser CpInfo
cinterfaceMethodref = InterfaceMethodref <$> index <*> index
cstring :: Parser CpInfo
cstring = String <$> index
cinteger :: Parser CpInfo
cinteger = Integer . convert <$> u4
cfloat :: Parser CpInfo
cfloat = Float . wordToFloat <$> u4
clong :: Parser CpInfo
clong = Long . convert <$> u8
cdouble :: Parser CpInfo
cdouble = Double . wordToDouble <$> u8
cnameAndType :: Parser CpInfo
cnameAndType = NameAndType <$> index <*> index
cutf8 :: Parser CpInfo
cutf8 = index >>= utf8bytes
where
utf8bytes n = Utf8 <$> take n
cmethodHandle :: Parser CpInfo
cmethodHandle = MethodHandle <$> u1 <*> index
cmethodType :: Parser CpInfo
cmethodType = MethodType <$> index
cinvokeDynamic :: Parser CpInfo
cinvokeDynamic = InvokeDynamic <$> index <*> index
accFlags :: Parser [AccessFlag]
accFlags = checkMask <$> u2
checkMask :: U2 -> [AccessFlag]
checkMask x = foldr ff [] masks
where
ff (a, n) acc
| x .&. n /= 0 = a : acc
| otherwise = acc
-- end
itfcs :: Parser [Int]
itfcs = index >>= iidxs
where
iidxs n = count n index
flds :: ConstantPool -> Parser [FieldInfo]
flds cp = index >>= flip count (finfo cp)
finfo :: ConstantPool -> Parser FieldInfo
finfo cp = FieldInfo <$> flags <*> index <*> index <*> attrs cp
where
flags = checkMask <$> u2
mthds :: ConstantPool -> Parser [MethodInfo]
mthds cp = index >>= flip count (minfo cp)
minfo :: ConstantPool -> Parser MethodInfo
minfo cp = MethodInfo <$> flags <*> index <*> index <*> attrs cp
where
flags = checkMask <$> u2
-- attributes
attrs :: ConstantPool -> Parser [AttributeInfo]
attrs cp = index >>= flip count (attrinfo cp)
attrinfo :: ConstantPool -> Parser AttributeInfo
attrinfo cp = index >>= alength
where
alength = getConstructors cp
{-
attrinfo = AttributeInfo <$> index <*> ainfo
where
ainfo = u4 >>= take . fromIntegral
-}
getConstructors :: ConstantPool -> Int -> Parser AttributeInfo
getConstructors cp n = constructors s
where
s = getStr $ cp ! n
where
getStr (Utf8 x) = x
getStr _ = error "not a UTF8"
constructors "ConstantValue" = aconstvalue
constructors "Code" = acode cp
constructors "StackMapTable" = astackmaptable
constructors "Exceptions" = aexceptions
constructors "InnerClasses" = ainnerclass
constructors "EnclosingMethod" = aenclosingmethod
constructors "Synthetic" = asynthetic
constructors "Signature" = asignature
constructors "SourceFile" = asourcefile
constructors "SourceDebugExtension" = asourcedebugextension
constructors "LineNumberTable" = alinenumbertable
constructors "LocalVariableTable" = alocalvariabletable
constructors "LocalVariableTypeTable" = alocalvariabletypetable
constructors "Deprecated" = adeprecated
constructors "RuntimeVisibleAnnotations" = aruntimevisibleannotation
constructors "RuntimeInvisibleAnnotations" = aruntimeinvisibleannotation
constructors "RuntimeVisibleParameterAnnotations" = aruntimevisibleparameterannotation
constructors "RuntimeInvisibleParameterAnnotations" = aruntimeinvisibleparameterannotation
constructors "RuntimeVisibleTypeAnnotations" = aruntimevisibletypeannotation
constructors "RuntimeInvisibleTypeAnnotations" = aruntimeinvisibletypeannotation
constructors "AnnotationDefault" = aannotationdefault
constructors "BootstrapMethods" = abootstrapmethods
constructors "MethodParameters" = amethodparameters
constructors _ = OtherAttribute n <$> (u4 >>= take . fromIntegral)
aconstvalue :: Parser AttributeInfo
aconstvalue = ConstantValue <$> (u4 >> index)
acode :: ConstantPool -> Parser AttributeInfo
acode cp = u4 >> acflds
where
acflds = Code
<$> index
<*> index
<*> (u4 >>= acodearray . fromIntegral)
<*> (index >>= flip count execinfo)
<*> attrs cp
execinfo :: Parser Exception
execinfo = Exception <$> index <*> index <*> index <*> index
astackmaptable :: Parser AttributeInfo
astackmaptable = u4 >> index >>= asmt
where
asmt n = StackMapTable <$> count n astackmapframe
astackmapframe :: Parser StackMapFrameUnion
astackmapframe = u1 >>= checkTag
where
checkTag t
| 0 <= t && t < 64 = return $ SameFrame t
| 63 < t && t < 128 = SameLocalsOneStackItemFrame t <$> averificationtype
| t == 247 = SameLocalsOneStackItemFrameExtended t <$> u2 <*> averificationtype
| 247 < t && t < 251 = ChopFrame t <$> u2
| t == 251 = SameFrameExtended t <$> u2
| 251 < t && t < 255 = AppendFrame t <$> u2 <*> count (fromIntegral t - 251) averificationtype
| t == 255 = FullFrame t <$> u2 <*> locals <*> stack
| otherwise = error "stack_map_frame: reserved tag"
where
locals = index >>= flip count averificationtype
stack = index >>= flip count averificationtype
averificationtype :: Parser VerificationTypeUnion
averificationtype = u1 >>= checkTag
where
checkTag t
| t == 0 = return TopVariable
| t == 1 = return IntegerVariable
| t == 2 = return FloatVariable
| t == 3 = return DoubleVariable
| t == 4 = return LongVariable
| t == 5 = return NullVariable
| t == 6 = return UninitializedThisVariable
| t == 7 = ObjectVariable <$> index
| t == 8 = UninitializedVariable <$> u2
| otherwise = error "verification_type_info: unused tag"
aexceptions :: Parser AttributeInfo
aexceptions = u4 >> index >>= aexcf
where
aexcf n = Exceptions <$> count n index
ainnerclass :: Parser AttributeInfo
ainnerclass = u4 >> index >>= aincs
where
aincs n = InnerClass <$> count n f
f = InClass <$> index <*> index <*> index <*> u2
aenclosingmethod :: Parser AttributeInfo
aenclosingmethod = u4 >> aemethod
where
aemethod = EnclosingMethod <$> index <*> index
asynthetic :: Parser AttributeInfo
asynthetic = u4 >> return SyntheticA
asignature :: Parser AttributeInfo
asignature = u4 >> asig
where
asig = Signature <$> index
asourcefile :: Parser AttributeInfo
asourcefile = u4 >> asf
where
asf = SourceFile <$> index
asourcedebugextension :: Parser AttributeInfo
asourcedebugextension = u4 >>= asde . fromIntegral
where
asde n = SourceDebugExtension <$> take n
alinenumbertable :: Parser AttributeInfo
alinenumbertable = u4 >> index >>= alnt
where
alnt n = LineNumberTable <$> count n alinfo
alinfo = LineNumberInfo <$> index <*> index
alocalvariabletable :: Parser AttributeInfo
alocalvariabletable = u4 >> index >>= alvt
where
alvt n = LocalVariableTable <$> count n alvinfo
alvinfo = LocalVariableInfo
<$> index
<*> index
<*> index
<*> index
<*> index
alocalvariabletypetable :: Parser AttributeInfo
alocalvariabletypetable = u4 >> index >>= alvtt
where
alvtt n = LocalVariableTypeTable <$> count n alvtinfo
alvtinfo = LocalVariableTypeInfo
<$> index
<*> index
<*> index
<*> index
<*> index
adeprecated :: Parser AttributeInfo
adeprecated = u4 >> return Deprecated
ara :: ([AnnotationInfo] -> AttributeInfo) -> Parser AttributeInfo
ara f = u4 >> index >>= arva
where
arva n = f <$> count n arvainfo
arvainfo :: Parser AnnotationInfo
arvainfo = AnnotationInfo <$> index <*> elemarray
where
elemarray = index >>= flip count aelem
aelem = (,) <$> index <*> aelemvalue
aelemvalue :: Parser ElementValue
aelemvalue = u1 >>= checkTag
where
toU1 = fromIntegral . ord :: (Char -> U1)
checkTag t
| t `elem` "BCDFIJSZs" = ElementValue t <$> (ConstValue <$> index)
| t == toU1 'e' = ElementValue t <$> (EnumConst <$> index <*> index)
| t == toU1 'c' = ElementValue t <$> (ClassInfo <$> index)
| t == toU1 '@' = ElementValue t <$> (AnnotationValue <$> arvainfo)
| otherwise = ElementValue t <$> (ArrayValue <$> elemarray)
where
elemarray = index >>= flip count aelemvalue
aruntimevisibleannotation :: Parser AttributeInfo
aruntimevisibleannotation = ara RuntimeVisibleAnnotations
aruntimeinvisibleannotation :: Parser AttributeInfo
aruntimeinvisibleannotation = ara RuntimeInvisibleAnnotations
arpa :: ([[AnnotationInfo]] -> AttributeInfo) -> Parser AttributeInfo
arpa f = u4 >> u1 >>= aparray . fromIntegral
where
aparray n = f <$> count n apearray
apearray = index >>= flip count arvainfo
aruntimevisibleparameterannotation :: Parser AttributeInfo
aruntimevisibleparameterannotation = arpa RuntimeVisibleParameterAnnotations
aruntimeinvisibleparameterannotation :: Parser AttributeInfo
aruntimeinvisibleparameterannotation = arpa RuntimeInvisibleParameterAnnotations
arta :: ([TypeAnnotation] -> AttributeInfo) -> Parser AttributeInfo
arta f = u4 >> index >>= atarray
where
atarray n = f <$> count n atypeannotation
atypeannotation = u1 >>= ata
ata t = TypeAnnotation t
<$> checkType t
<*> tpath
<*> index
<*> evalpairs
checkType t
| t `elem` pack [0x00, 0x01] = TypeParameter <$> u1
| t == 0x10 = Supertype <$> u2
| t `elem` pack [0x11, 0x12] = TypeParameterBound <$> u1 <*> u1
| t `elem` pack [0x13..0x15] = return Empty
| t == 0x16 = MethodFormalParameter <$> u1
| t == 0x17 = Throws <$> u2
| t `elem` pack [0x40, 0x41] = Localvar <$> localvartable
| t == 0x42 = Catch <$> u2
| t `elem` pack [0x43..0x46] = Offset <$> u2
| t `elem` pack [0x47..0x4b] = TypeArgument <$> u2 <*> u1
| otherwise = error $ "TypeInfoUnion with tag:" ++ show t
where
localvartable = index >>= flip count lvtelem
lvtelem = (,,) <$> index <*> index <*> index
tpath = u1 >>= tpc . fromIntegral
tpc n = TypePath <$> count n tpelem
tpelem = (,) <$> u1 <*> u1
evalpairs = index >>= flip count evpelem
evpelem = (,) <$> index <*> aelemvalue
aruntimevisibletypeannotation :: Parser AttributeInfo
aruntimevisibletypeannotation = arta RuntimeVisibleTypeAnnotations
aruntimeinvisibletypeannotation :: Parser AttributeInfo
aruntimeinvisibletypeannotation = arta RuntimeInvisibleTypeAnnotations
aannotationdefault :: Parser AttributeInfo
aannotationdefault = u4 >> (AnnotationDefault <$> aelemvalue)
abootstrapmethods :: Parser AttributeInfo
abootstrapmethods = u4 >> index >>= bms
where
bms n = BootstrapMethods <$> count n bmsarray
bmsarray = (,) <$> index <*> (index >>= flip count index)
amethodparameters :: Parser AttributeInfo
amethodparameters = u4 >> u1 >>= mps . fromIntegral
where
mps n = MethodParameters <$> count n parray
parray = (,) <$> index <*> u2
-- end
-- code
-- if v >= 51, no jsr, jsr_w
acodearray :: Int -> Parser [Instruction]
acodearray n
| n == 0 = return []
| n > 0 = parseInst >>= app
| otherwise = error "acodearray: internal error"
where
app (i, l) = (i :) <$> acodearray l
parseInst = u1 >>= retval
retval t = (,) <$> inst <*> return (n - m - 1)
where
inst = Instruction t s <$> sptake
(s, m) = opcodes ! t
sptake
| t == 0xc4 = u1 >>= wideCheck -- wide
| otherwise = take m
wideCheck x
| x `elem` pack (0xa9 : [0x15..0x19] ++ [0x36..0x3a]) = cons x <$> take 2 -- <x>load, <x>store, ret
| otherwise = cons x <$> take 4
-- end
index :: Parser Int
index = fromIntegral <$> u2
u1 :: Parser U1
u1 = anyWord8
u2 :: Parser U2
u2 = cmb <$> take 2
u4 :: Parser U4
u4 = cmb <$> take 4
u8 :: Parser U8
u8 = cmb <$> take 8
cmb :: (Bits a, Num a) => BString -> a
cmb = foldl' ff zeroBits
where
ff acc x = shiftL acc 8 .|. fromIntegral x
convert :: (FiniteBits a, Integral a, Enum a) => a -> Int
convert x
| testBit x (finiteBitSize x - 1) = negate . fromIntegral . complement . pred $ x
| otherwise = fromIntegral x
| MichaeGon/java | ClassFileParser.hs | mit | 14,909 | 0 | 17 | 4,097 | 4,383 | 2,228 | 2,155 | 333 | 25 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
module Database where
import Control.Applicative
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Control.Exception
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Text.Encoding
import Network ( withSocketsDo )
import Network.HTTP.Conduit ( simpleHttp, HttpException )
import Database.SQLite.Simple
data EmailRaw = EmailRaw
{ erUidl :: T.Text
, erDate :: T.Text
, erSubject :: T.Text
, erFrom :: T.Text
, erTo :: T.Text
, rawMessage :: BS.ByteString
}
instance ToRow EmailRaw where
toRow(EmailRaw u d s f t w) = toRow (u,d,s,f,t,w)
instance FromRow EmailRaw where
fromRow = EmailRaw <$> field <*> field <*> field <*> field <*> field <*> field
data EmailLinks = EmailLinks
{ elUidl :: T.Text
, elhttpLink :: T.Text
}
instance ToRow EmailLinks where
toRow (EmailLinks l r) = toRow (l,r)
data LinkRaw = LinkRaw
{ lrHttpLink :: T.Text
, lrRawPage :: BS.ByteString
}
instance ToRow LinkRaw where
toRow (LinkRaw l r) = toRow (l,r)
instance FromRow LinkRaw where
fromRow = LinkRaw <$> field <*> field
dbWriteEmails :: [EmailRaw] -> IO ()
dbWriteEmails rawEmails = runDB $ \conn -> withTransaction conn $
insertEmailRaw conn rawEmails
insertEmailRaw :: Connection -> [EmailRaw] -> IO ()
insertEmailRaw conn =
mapM_ (execute conn "insert into email_raw values (?,?,?,?,?,?)")
dbReadKeys :: IO [BS.ByteString]
dbReadKeys = runDB $ \conn -> do
uidls :: [Only T.Text] <- query_ conn "select uidl from email_raw"
return $! map (encodeUtf8 . fromOnly ) uidls
-- dbStorePages :: IO ()
dbStorePages :: IO ()
dbStorePages = withSocketsDo $ runDB $ \conn -> do
httpLinks :: [Only T.Text]
<- query_ conn "SELECT DISTINCT http_Link FROM email_links \
\ where not exists(select * from link_raw where http_link = email_links.http_link)"
mapM_ (\(Only link) ->
do TIO.putStrLn link
page <- catch ((simpleHttp $ T.unpack link) >>= return . Just)
(\e -> do let err = show (e :: HttpException)
TIO.putStrLn "Failed!"
let fn = T.unpack $ snd $ T.breakOnEnd "/" link
writeFile fn err
-- putStrLn err
return Nothing)
case page of
Just p1 -> execute conn "insert into link_raw values (?,?)" $
LinkRaw link $ BL.toStrict p1
Nothing -> return () -- $!
) $ httpLinks
-- insertLinkRaw conn linkRaw
insertLinkRaw :: Connection -> [LinkRaw] -> IO ()
insertLinkRaw conn = mapM_ (execute conn "insert into link_raw values (?,?)")
insertEmailLinks :: Connection -> [EmailLinks] -> IO ()
insertEmailLinks conn = mapM_ (execute conn "insert into email_links values (?,?)")
dbReadPages :: IO [LinkRaw]
dbReadPages = runDB $ \conn -> do
pages :: [LinkRaw] <- query_ conn "select * from link_raw"
return pages
-- | Fetch all the emails that do not have entries in DB table EmailLinks
dbEmailNoLinks :: Connection -> IO [EmailRaw]
dbEmailNoLinks conn = do
rawEmails :: [EmailRaw] <- query_ conn
"select * from email_raw \
\ where not exists (select * from email_links where uidl = email_raw.uidl)"
return rawEmails
testDb01 :: IO [EmailRaw]
testDb01 = runDB $ \conn -> do
rawEmails <- dbEmailNoLinks conn
return $! take 1 rawEmails
runDB :: (Connection -> IO b) -> IO b
runDB = withConnection "Email.sqlite3"
| habbler/immo | Database.hs | mit | 3,689 | 0 | 26 | 945 | 1,026 | 540 | 486 | 83 | 2 |
import Control.Monad
import Data.List
diff line1 line2 = length (line1 \\ line2) + length (line2 \\ line1)
main :: IO ()
main = do
line1 <- getLine
line2 <- getLine
let ans = diff line1 line2
print ans
| mgrebenets/hackerrank | alg/strings/make-it-anagram.hs | mit | 221 | 0 | 10 | 57 | 94 | 45 | 49 | 9 | 1 |
import qualified Data.Vector as Vec
import Graphics.Perfract
import Graphics.Perfract.Shape
-- . . . .
-- . o o .
-- .o/\ /\o.
-- \/__\/
-- | |
-- |__|
sqrHair :: RecFig3
sqrHair = RecFig3 basicCube
(Vec.fromList
[ Prz3 (XYZ 1.6 0.4 0.4) (ratRot $ 0.1) (0.6)
, Prz3 (XYZ 1.6 (-0.4) (-0.4)) (ratRot $ 0.2) (0.6)
])
main :: IO ()
main = perfract3 sqrHair
| dancor/perfract | src/Main3d.hs | mit | 398 | 0 | 13 | 111 | 137 | 78 | 59 | 10 | 1 |
module J2S.AI.RandomTest
( randomTests
) where
import Control.Monad.Random
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
import System.Random (mkStdGen)
import qualified J2S as AI
import qualified J2S.Game.Mock as M
randomTests = testGroup "Test Random AI"
[ testProperty "Random returns a listed action" randomReturnsAListedAction
]
randomReturnsAListedAction :: Int -> M.MockGame -> Bool
randomReturnsAListedAction s g = (`elem` (fst <$> AI.listActions g)) $ AI.rand g `evalRand` mkStdGen s
| berewt/J2S | test/J2S/AI/RandomTest.hs | mit | 574 | 0 | 11 | 102 | 139 | 83 | 56 | 13 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Pos.Diffusion.Full
( FullDiffusionConfiguration (..)
, diffusionLayerFull
, diffusionLayerFullExposeInternals
, FullDiffusionInternals (..)
, RunFullDiffusionInternals (..)
) where
import Universum
import Control.Concurrent.Async (Concurrently (..), race)
import Control.Concurrent.MVar (modifyMVar_)
import qualified Control.Concurrent.STM as STM
import Data.Functor.Contravariant (contramap)
import qualified Data.Map as M
import Data.Time.Units (Microsecond, Millisecond, Second)
import Formatting (Format)
import qualified Network.Broadcast.OutboundQueue as OQ
import Network.Broadcast.OutboundQueue.Types (MsgType (..),
Origin (..))
import Network.Transport (Transport)
import Node (Node, NodeAction (..), NodeEnvironment (..),
defaultNodeEnvironment, node, simpleNodeEndPoint)
import Node.Conversation (Conversation, Converse, converseWith)
import qualified System.Metrics as Monitoring
import System.Random (newStdGen)
import Pos.Chain.Block (Block, BlockHeader, HeaderHash,
MainBlockHeader)
import Pos.Chain.Delegation (ProxySKHeavy)
import Pos.Chain.Ssc (InnerSharesMap, MCCommitment (..),
MCOpening (..), MCShares (..), MCVssCertificate (..),
Opening, SignedCommitment, VssCertificate)
import Pos.Chain.Txp (TxAux)
import Pos.Chain.Update (BlockVersion, BlockVersionData (..), UpId,
UpdateProposal, UpdateVote)
import Pos.Communication (EnqueueMsg, HandlerSpecs, InSpecs (..),
InvOrDataTK, Listener, MkListeners (..), Msg,
MsgSubscribe, MsgSubscribe1, NodeId, OutSpecs (..),
PackingType, PeerData, SendActions, VerInfo (..),
bipPacking, convH, createOutSpecs, makeEnqueueMsg,
makeSendActions, toOutSpecs)
import Pos.Core (ProtocolConstants (..), StakeholderId)
import Pos.Core.Chrono (OldestFirst)
import Pos.Core.Metrics.Constants (withCardanoNamespace)
import Pos.Crypto.Configuration (ProtocolMagic (..), getProtocolMagic)
import qualified Pos.Diffusion.Full.Block as Diffusion.Block
import qualified Pos.Diffusion.Full.Delegation as Diffusion.Delegation
import qualified Pos.Diffusion.Full.Ssc as Diffusion.Ssc
import qualified Pos.Diffusion.Full.Txp as Diffusion.Txp
import qualified Pos.Diffusion.Full.Update as Diffusion.Update
import Pos.Infra.Communication.Relay.Logic (invReqDataFlowTK)
import Pos.Infra.DHT.Real (KademliaDHTInstance (..),
KademliaParams (..), kademliaJoinNetworkNoThrow,
kademliaJoinNetworkRetry, startDHTInstance,
stopDHTInstance)
import Pos.Infra.Diffusion.Subscription.Common (subscriptionListeners)
import Pos.Infra.Diffusion.Subscription.Dht (dhtSubscriptionWorker)
import Pos.Infra.Diffusion.Subscription.Dns (dnsSubscriptionWorker)
import Pos.Infra.Diffusion.Subscription.Status (SubscriptionStates,
emptySubscriptionStates)
import Pos.Infra.Diffusion.Transport.TCP (bracketTransportTCP)
import Pos.Infra.Diffusion.Types (Diffusion (..),
DiffusionHealth (..), DiffusionLayer (..),
StreamBlocks (..))
import Pos.Infra.Network.Types (Bucket (..), NetworkConfig (..),
NodeType, SubscriptionWorker (..), initQueue,
topologyHealthStatus, topologyRunKademlia,
topologySubscribers, topologySubscriptionWorker)
import Pos.Infra.Reporting.Ekg (EkgNodeMetrics (..),
registerEkgNodeMetrics)
import Pos.Infra.Reporting.Health.Types (HealthStatus (..))
import Pos.Logic.Types (Logic (..))
import Pos.Network.Block.Types (MsgBlock, MsgGetBlocks, MsgGetHeaders,
MsgHeaders, MsgStream, MsgStreamBlock)
import Pos.Util.OutboundQueue (EnqueuedConversation (..))
import Pos.Util.Timer (Timer, startTimer)
import Pos.Util.Trace (Severity (Error), Trace)
import Pos.Util.Trace.Named (LogNamed, appendName, named)
{-# ANN module ("HLint: ignore Reduce duplication" :: Text) #-}
{-# ANN module ("HLint: ignore Use whenJust" :: Text) #-}
{-# ANN module ("HLint: ignore Use record patterns" :: Text) #-}
data FullDiffusionConfiguration = FullDiffusionConfiguration
{ fdcProtocolMagic :: !ProtocolMagic
, fdcProtocolConstants :: !ProtocolConstants
, fdcRecoveryHeadersMessage :: !Word
, fdcLastKnownBlockVersion :: !BlockVersion
, fdcConvEstablishTimeout :: !Microsecond
, fdcBatchSize :: !Word32
-- ^ Size of batches of blocks to process when streaming.
, fdcStreamWindow :: !Word32
-- ^ Size of window for block streaming.
, fdcTrace :: !(Trace IO (LogNamed (Severity, Text)))
}
data RunFullDiffusionInternals = RunFullDiffusionInternals
{ runFullDiffusionInternals :: forall y . (FullDiffusionInternals -> IO y) -> IO y
}
data FullDiffusionInternals = FullDiffusionInternals
{ fdiNode :: Node
, fdiConverse :: Converse PackingType PeerData
, fdiSendActions :: SendActions
}
-- | Make a full diffusion layer, filling in many details using a
-- 'NetworkConfig' and its constituent 'Topology'.
-- An 'OutboundQ' is brought up for you, based on the 'NetworkConfig'.
-- A TCP transport is brought up as well, again using the 'NetworkConfig',
-- which includes information about the address. This is why we use CPS here:
-- the transport is bracketed.
-- The 'NetworkConfig's topology is also used to fill in various options
-- related to subscription, health status reporting, etc.
diffusionLayerFull
:: FullDiffusionConfiguration
-> NetworkConfig KademliaParams
-> Maybe EkgNodeMetrics
-> (Diffusion IO -> Logic IO)
-- ^ The logic layer can use the diffusion layer.
-> (DiffusionLayer IO -> IO x)
-> IO x
diffusionLayerFull fdconf networkConfig mEkgNodeMetrics mkLogic k = do
let -- A trace for the Outbound Queue. We use the one from the
-- configuration, and put an outboundqueue suffix on it.
oqTrace =appendName "outboundqueue" (fdcTrace fdconf)
-- Make the outbound queue using network policies.
oq :: OQ.OutboundQ EnqueuedConversation NodeId Bucket <-
-- NB: <> it's not Text semigroup append, it's LoggerName append, which
-- puts a "." in the middle.
initQueue networkConfig oqTrace (enmStore <$> mEkgNodeMetrics)
let topology = ncTopology networkConfig
mSubscriptionWorker = topologySubscriptionWorker topology
mSubscribers = topologySubscribers topology
healthStatus = topologyHealthStatus topology oq
mKademliaParams = topologyRunKademlia topology
-- Transport needs a Trace IO Text. We re-use the 'Trace' given in
-- the configuration at severity 'Error' (when transport has an
-- exception trying to 'accept' a new connection).
logTrace :: Trace IO Text
logTrace = contramap ((,) Error) $ named $
appendName "transport" (fdcTrace fdconf)
bracketTransportTCP logTrace (fdcConvEstablishTimeout fdconf) (ncTcpAddr networkConfig) $ \transport -> do
rec (fullDiffusion, internals) <-
diffusionLayerFullExposeInternals fdconf
transport
oq
(ncDefaultPort networkConfig)
mSubscriptionWorker
mSubscribers
mKademliaParams
healthStatus
mEkgNodeMetrics
logic
let logic = mkLogic fullDiffusion
k $ DiffusionLayer
{ diffusion = fullDiffusion
, runDiffusionLayer = \action -> runFullDiffusionInternals internals (const action)
}
resetKeepAlive :: IO Millisecond -> MVar (Map NodeId Timer) -> NodeId -> IO ()
resetKeepAlive slotDuration timersVar nodeId =
modifyMVar_ timersVar $ \timers ->
let timer_m = M.lookup nodeId timers in
case timer_m of
Just timer -> do
currentDuration <- slotDuration
startTimer (3 * currentDuration) timer
pure timers
Nothing -> pure timers
diffusionLayerFullExposeInternals
:: FullDiffusionConfiguration
-> Transport
-> OQ.OutboundQ EnqueuedConversation NodeId Bucket
-> Word16 -- ^ Port on which peers are assumed to listen.
-> Maybe SubscriptionWorker
-> Maybe (NodeType, OQ.MaxBucketSize)
-> Maybe (KademliaParams, Bool)
-- ^ KademliaParams and a default port for kademlia.
-- Bool says whether the node must join before starting normal
-- operation, as opposed to passively trying to join.
-> IO HealthStatus
-- ^ Amazon Route53 health check support (stopgap measure, see note
-- in Pos.Infra.Diffusion.Types, above 'healthStatus' record field).
-> Maybe EkgNodeMetrics
-> Logic IO
-> IO (Diffusion IO, RunFullDiffusionInternals)
diffusionLayerFullExposeInternals fdconf
transport
oq
defaultPort
mSubscriptionWorker
mSubscribers
mKademliaParams
healthStatus -- named to be picked up by record wildcard
mEkgNodeMetrics
logic = do
let protocolMagic = fdcProtocolMagic fdconf
protocolConstants = fdcProtocolConstants fdconf
lastKnownBlockVersion = fdcLastKnownBlockVersion fdconf
recoveryHeadersMessage = fdcRecoveryHeadersMessage fdconf
batchSize = fdcBatchSize fdconf
streamWindow = fdcStreamWindow fdconf
logTrace = named (fdcTrace fdconf)
-- Subscription states.
subscriptionStates <- emptySubscriptionStates
keepaliveTimerVar <- newMVar M.empty
diffusionHealth <- case mEkgNodeMetrics of
Nothing -> return Nothing
Just m -> liftIO $ do
wqgM <- Monitoring.createGauge (withCardanoNamespace "diffusion.WriteQueue") $ enmStore m
wM <- Monitoring.createGauge (withCardanoNamespace "diffusion.Window") $ enmStore m
return $ Just $ DiffusionHealth wqgM wM
let -- VerInfo is a diffusion-layer-specific thing. It's only used for
-- negotiating with peers.
--
-- Known bug: if the block version changes, the VerInfo will be
-- out of date, as it's immutable.
-- Solution: don't put it in the VerInfo. Other clients don't need
-- to know the peer's latest adopted block version, they need only
-- know what software version its running.
ourVerInfo :: VerInfo
ourVerInfo = VerInfo (getProtocolMagic protocolMagic)
lastKnownBlockVersion
ins
(outs <> workerOuts)
ins :: HandlerSpecs
InSpecs ins = inSpecs mkL
-- The out specs come not just from listeners but also from workers.
-- Workers in the existing implementation were bundled up in
-- allWorkers :: ([WorkerSpec m], OutSpecs)
-- and they performed logic layer tasks, so having out specs defined
-- by them doesn't make sense.
-- For the first iteration, we just dump those out specs here, since
-- we know in the diffusion layer the set of all requests that might
-- be made.
--
-- Find below a definition of each of the worker out specs,
-- copied from Pos.Worker (allWorkers). Each one was manually
-- inspected to determine the out specs.
--
-- FIXME this system must change. Perhaps replace it with a
-- version number?
outs :: HandlerSpecs
OutSpecs outs = outSpecs mkL
workerOuts :: HandlerSpecs
OutSpecs workerOuts = mconcat
[ -- First: the relay system out specs.
Diffusion.Txp.txOutSpecs logic
, Diffusion.Update.updateOutSpecs logic
, Diffusion.Delegation.delegationOutSpecs logic
, Diffusion.Ssc.sscOutSpecs logic
-- Relay system for blocks is ad-hoc.
, blockWorkerOutSpecs
-- SSC has non-relay out specs, defined below.
, sscWorkerOutSpecs
, securityWorkerOutSpecs
, slottingWorkerOutSpecs
, subscriptionWorkerOutSpecs
, dhtWorkerOutSpecs
]
-- An onNewSlotWorker and a localWorker. Latter is mempty. Former
-- actually does the ssc stuff.
sscWorkerOutSpecs = mconcat
[ createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCCommitment))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCOpening))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCShares))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCVssCertificate))
]
-- A single worker checkForReceivedBlocksWorker with
-- requestTipOuts from Pos.Network.Block.Types
securityWorkerOutSpecs = toOutSpecs
[ convH (Proxy :: Proxy MsgGetHeaders)
(Proxy :: Proxy MsgHeaders)
]
-- announceBlockHeaderOuts from blkCreatorWorker
-- announceBlockHeaderOuts from blkMetricCheckerWorker
-- along with the retrieval worker outs which also include
-- announceBlockHeaderOuts.
blockWorkerOutSpecs = mconcat
[ announceBlockHeaderOuts
, announceBlockHeaderOuts
, announceBlockHeaderOuts <> toOutSpecs [ convH (Proxy :: Proxy MsgGetBlocks)
(Proxy :: Proxy MsgBlock)
]
, streamBlockHeaderOuts
]
announceBlockHeaderOuts = toOutSpecs [ convH (Proxy :: Proxy MsgHeaders)
(Proxy :: Proxy MsgGetHeaders)
]
streamBlockHeaderOuts = toOutSpecs [ convH (Proxy :: Proxy MsgStream)
(Proxy :: Proxy MsgStreamBlock)
]
-- Plainly mempty from the definition of allWorkers.
slottingWorkerOutSpecs = mempty
subscriptionWorkerOutSpecs = toOutSpecs
[ convH (Proxy @MsgSubscribe) (Proxy @Void)
, convH (Proxy @MsgSubscribe1) (Proxy @Void)
]
-- It's a localOnNewSlotWorker, so mempty.
dhtWorkerOutSpecs = mempty
mkL :: MkListeners
mkL = mconcat $
[ Diffusion.Block.blockListeners logTrace logic protocolConstants recoveryHeadersMessage oq
(Diffusion.Block.ResetNodeTimer $ resetKeepAlive currentSlotDuration keepaliveTimerVar)
, Diffusion.Txp.txListeners logTrace logic oq enqueue
, Diffusion.Update.updateListeners logTrace logic oq enqueue
, Diffusion.Delegation.delegationListeners logTrace logic oq enqueue
, Diffusion.Ssc.sscListeners logTrace logic oq enqueue
] ++ [
subscriptionListeners logTrace oq subscriberNodeType
| Just (subscriberNodeType, _) <- [mSubscribers]
]
listeners :: VerInfo -> [Listener]
listeners = mkListeners mkL ourVerInfo
currentSlotDuration :: IO Millisecond
currentSlotDuration = bvdSlotDuration <$> getAdoptedBVData logic
-- Bracket kademlia and network-transport, create a node. This
-- will be very involved. Should make it top-level I think.
runDiffusionLayer :: forall y . (FullDiffusionInternals -> IO y) -> IO y
runDiffusionLayer = runDiffusionLayerFull
logTrace
transport
oq
(fdcConvEstablishTimeout fdconf)
ourVerInfo
defaultPort
mKademliaParams
mSubscriptionWorker
mEkgNodeMetrics
keepaliveTimerVar
currentSlotDuration
subscriptionStates
listeners
enqueue :: EnqueueMsg
enqueue = makeEnqueueMsg logTrace ourVerInfo $ \msgType k -> do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, k))
pure (M.fromList itList)
getBlocks :: NodeId
-> HeaderHash
-> [HeaderHash]
-> IO (OldestFirst [] Block)
getBlocks = Diffusion.Block.getBlocks logTrace logic recoveryHeadersMessage enqueue
requestTip :: IO (Map NodeId (IO BlockHeader))
requestTip = Diffusion.Block.requestTip logTrace logic enqueue recoveryHeadersMessage
streamBlocks :: forall t .
NodeId
-> HeaderHash
-> [HeaderHash]
-> StreamBlocks Block IO t
-> IO (Maybe t)
streamBlocks = Diffusion.Block.streamBlocks logTrace diffusionHealth logic batchSize streamWindow enqueue
announceBlockHeader :: MainBlockHeader -> IO ()
announceBlockHeader = void . Diffusion.Block.announceBlockHeader logTrace logic protocolConstants recoveryHeadersMessage enqueue
sendTx :: TxAux -> IO Bool
sendTx = Diffusion.Txp.sendTx logTrace enqueue
sendUpdateProposal :: UpId -> UpdateProposal -> [UpdateVote] -> IO ()
sendUpdateProposal = Diffusion.Update.sendUpdateProposal logTrace enqueue
sendVote :: UpdateVote -> IO ()
sendVote = Diffusion.Update.sendVote logTrace enqueue
-- TODO put these into a Pos.Diffusion.Full.Ssc module.
sendSscCert :: StakeholderId -> VssCertificate -> IO ()
sendSscCert sid = void . invReqDataFlowTK logTrace "ssc" enqueue (MsgMPC OriginSender) sid . MCVssCertificate
sendSscOpening :: StakeholderId -> Opening -> IO ()
sendSscOpening sid = void . invReqDataFlowTK logTrace "ssc" enqueue (MsgMPC OriginSender) sid . MCOpening sid
sendSscShares :: StakeholderId -> InnerSharesMap -> IO ()
sendSscShares sid = void . invReqDataFlowTK logTrace "ssc" enqueue (MsgMPC OriginSender) sid . MCShares sid
sendSscCommitment :: StakeholderId -> SignedCommitment -> IO ()
sendSscCommitment sid = void . invReqDataFlowTK logTrace "ssc" enqueue (MsgMPC OriginSender) sid . MCCommitment
sendPskHeavy :: ProxySKHeavy -> IO ()
sendPskHeavy = Diffusion.Delegation.sendPskHeavy logTrace enqueue
-- TODO better status text.
formatStatus :: forall r . (forall a . Format r a -> a) -> IO r
formatStatus formatter = OQ.dumpState oq formatter
diffusion :: Diffusion IO
diffusion = Diffusion {..}
runInternals = RunFullDiffusionInternals
{ runFullDiffusionInternals = runDiffusionLayer
}
return (diffusion, runInternals)
-- | Create kademlia, network-transport, and run the outbound queue's
-- dequeue thread.
runDiffusionLayerFull
:: Trace IO (Severity, Text)
-> Transport
-> OQ.OutboundQ EnqueuedConversation NodeId Bucket
-> Microsecond -- ^ Conversation establish timeout
-> VerInfo
-> Word16 -- ^ Default port to use for resolved hosts (from dns)
-> Maybe (KademliaParams, Bool)
-> Maybe SubscriptionWorker
-> Maybe EkgNodeMetrics
-> MVar (Map NodeId Timer) -- ^ Keepalive timer.
-> IO Millisecond -- ^ Slot duration; may change over time.
-> SubscriptionStates NodeId
-> (VerInfo -> [Listener])
-> (FullDiffusionInternals -> IO x)
-> IO x
runDiffusionLayerFull logTrace
transport
oq
convEstablishTimeout
ourVerInfo
defaultPort
mKademliaParams
mSubscriptionWorker
mEkgNodeMetrics
keepaliveTimerVar
slotDuration
subscriptionStates
listeners
k =
maybeBracketKademliaInstance logTrace mKademliaParams defaultPort $ \mKademlia ->
timeWarpNode logTrace transport convEstablishTimeout ourVerInfo listeners $ \nd converse -> do
-- Concurrently run the dequeue thread, subscription thread, and
-- main action.
let sendActions :: SendActions
sendActions = makeSendActions logTrace ourVerInfo oqEnqueue converse
dequeueDaemon = OQ.dequeueThread oq (sendMsgFromConverse converse)
-- If there's no subscription thread, the main action with
-- outbound queue is all we need, but if there is a subscription
-- thread, we run it forever and race it with the others. This
-- ensures that
-- 1) The subscription system never stops trying.
-- 2) The subscription system is incapable of stopping shutdown
-- (unless it uninterruptible masks exceptions indefinitely).
-- FIXME perhaps it's better to let the subscription thread
-- decide if it should go forever or not. Or, demand it does,
-- by choosing `forall x . IO x` as the result.
withSubscriptionDaemon :: IO a -> IO (Either x a)
withSubscriptionDaemon =
case mSubscriptionThread (fst <$> mKademlia) sendActions of
Nothing -> fmap Right
Just subscriptionThread -> \other ->
-- A subscription worker can finish normally (without
-- exception). But we don't want that, so we'll run it
-- forever.
let subForever = subscriptionThread >> subForever
in race subForever other
mainAction = do
maybe (pure ()) (flip registerEkgNodeMetrics nd) mEkgNodeMetrics
maybe (pure ()) (joinKademlia logTrace) mKademlia
let fdi = FullDiffusionInternals
{ fdiNode = nd
, fdiConverse = converse
, fdiSendActions = sendActions
}
t <- k fdi
-- If everything went well, stop the outbound queue
-- normally. If 'k fdi' threw an exception, the dequeue
-- thread ('dequeueDaemon') will be killed.
OQ.waitShutdown oq
pure t
action = Concurrently dequeueDaemon *> Concurrently mainAction
outcome <- withSubscriptionDaemon (runConcurrently action)
case outcome of
Left impossible -> pure impossible
Right t -> pure t
where
oqEnqueue :: Msg
-> (NodeId -> VerInfo -> Conversation PackingType t)
-> IO (Map NodeId (STM.TVar (OQ.PacketStatus t)))
oqEnqueue msgType l = do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, l))
return (M.fromList itList)
mSubscriptionThread :: Maybe KademliaDHTInstance
-> SendActions
-> Maybe (IO ())
mSubscriptionThread mKademliaInst sactions = case mSubscriptionWorker of
Just (SubscriptionWorkerBehindNAT dnsDomains) -> Just $
dnsSubscriptionWorker logTrace oq defaultPort dnsDomains keepaliveTimerVar slotDuration subscriptionStates sactions
Just (SubscriptionWorkerKademlia nodeType valency fallbacks) -> case mKademliaInst of
-- Caller wanted a DHT subscription worker, but not a Kademlia
-- instance. Shouldn't be allowed, but oh well FIXME later.
Nothing -> Nothing
Just kInst -> Just $ dhtSubscriptionWorker
logTrace
oq
kInst
nodeType
valency
fallbacks
sactions
Nothing -> Nothing
sendMsgFromConverse
:: Converse PackingType PeerData
-> OQ.SendMsg EnqueuedConversation NodeId
sendMsgFromConverse converse (EnqueuedConversation (_, k)) nodeId =
converseWith converse nodeId (k nodeId)
-- | Bring up a time-warp node. It will come down when the continuation ends.
timeWarpNode
:: Trace IO (Severity, Text)
-> Transport
-> Microsecond -- Timeout.
-> VerInfo
-> (VerInfo -> [Listener])
-> (Node -> Converse PackingType PeerData -> IO t)
-> IO t
timeWarpNode logTrace transport convEstablishTimeout ourVerInfo listeners k = do
stdGen <- newStdGen
node logTrace mkTransport mkReceiveDelay mkConnectDelay stdGen bipPacking ourVerInfo nodeEnv $ \theNode ->
NodeAction listeners $ k theNode
where
mkTransport = simpleNodeEndPoint transport
mkReceiveDelay = const (pure Nothing)
mkConnectDelay = const (pure Nothing)
nodeEnv = defaultNodeEnvironment { nodeAckTimeout = convEstablishTimeout }
----------------------------------------------------------------------------
-- Kademlia
----------------------------------------------------------------------------
createKademliaInstance
:: Trace IO (Severity, Text)
-> KademliaParams
-> Word16 -- ^ Default port to bind to.
-> IO KademliaDHTInstance
createKademliaInstance logTrace kp defaultPort =
startDHTInstance logTrace instConfig defaultBindAddress
where
instConfig = kp {kpPeers = ordNub $ kpPeers kp}
defaultBindAddress = ("0.0.0.0", defaultPort)
-- | RAII for 'KademliaDHTInstance'.
bracketKademliaInstance
:: Trace IO (Severity, Text)
-> (KademliaParams, Bool)
-> Word16
-> ((KademliaDHTInstance, Bool) -> IO a)
-> IO a
bracketKademliaInstance logTrace (kp, mustJoin) defaultPort action =
bracket (createKademliaInstance logTrace kp defaultPort) stopDHTInstance $ \kinst ->
action (kinst, mustJoin)
maybeBracketKademliaInstance
:: Trace IO (Severity, Text)
-> Maybe (KademliaParams, Bool)
-> Word16
-> (Maybe (KademliaDHTInstance, Bool) -> IO a)
-> IO a
maybeBracketKademliaInstance _ Nothing _ k = k Nothing
maybeBracketKademliaInstance logTrace (Just kp) defaultPort k =
bracketKademliaInstance logTrace kp defaultPort (k . Just)
-- | Join the Kademlia network.
joinKademlia :: Trace IO (Severity, Text) -> (KademliaDHTInstance, Bool) -> IO ()
joinKademlia logTrace (kInst, mustJoin) = case mustJoin of
True -> kademliaJoinNetworkRetry logTrace kInst (kdiInitialPeers kInst) retryInterval
False -> kademliaJoinNetworkNoThrow logTrace kInst (kdiInitialPeers kInst)
where
retryInterval :: Second
retryInterval = 5
| input-output-hk/pos-haskell-prototype | lib/src/Pos/Diffusion/Full.hs | mit | 28,050 | 0 | 22 | 8,890 | 4,741 | 2,597 | 2,144 | -1 | -1 |
{-# htermination (==) :: Ordering -> Ordering -> Bool #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_EQEQ_7.hs | mit | 58 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
module Handler.Api.Snippets where
import Import
import qualified Data.Aeson as Aeson
import qualified GHC.Generics as GHC
import qualified Handler.Snippets as SnippetsHandler
import qualified Handler.Snippet as SnippetHandler
import qualified Handler.UserSnippets as UserSnippetsHandler
import qualified Glot.Pagination as Pagination
import qualified Util.Persistent as Persistent
import qualified Util.Handler as HandlerUtils
import qualified Data.Time.Format.ISO8601 as ISO8601
import qualified Text.Read as Read
import qualified Data.Text.Encoding as Encoding
import qualified Data.Text.Encoding.Error as Encoding.Error
import qualified Glot.Snippet as Snippet
import qualified Glot.Language as Language
import qualified Network.Wai as Wai
import qualified Data.List.NonEmpty as NonEmpty
import Data.Function ((&))
data ApiListSnippet = ApiListSnippet
{ id :: Text
, url :: Text
, language :: Language.Id
, title :: Text
, public :: Bool
, owner :: Text
, filesHash :: Text
, created :: Text
, modified :: Text
}
deriving (Show, GHC.Generic)
instance Aeson.ToJSON ApiListSnippet
data ApiSnippet = ApiSnippet
{ id :: Text
, url :: Text
, language :: Language.Id
, title :: Text
, public :: Bool
, owner :: Text
, filesHash :: Text
, created :: Text
, modified :: Text
, files :: [ApiFile]
}
deriving (Show, GHC.Generic)
instance Aeson.ToJSON ApiSnippet
data ApiFile = ApiFile
{ name :: Text
, content :: Text
}
deriving (Show, GHC.Generic)
instance Aeson.ToJSON ApiFile
intParam :: Text -> Maybe Int
intParam value =
Read.readMaybe (unpack value)
getApiSnippetR :: Text -> Handler Value
getApiSnippetR slug = do
renderUrl <- getUrlRender
(snippet, files, profile) <- runDB $ do
Entity snippetId snippet <- getBy404 $ UniqueCodeSnippetSlug slug
files <- selectList [CodeFileCodeSnippetId ==. snippetId] [Asc CodeFileId]
profile <- maybe (pure Nothing) (getBy . UniqueProfile) (codeSnippetUserId snippet)
pure (snippet, map entityVal files, profile)
let apiSnippet = toApiSnippet renderUrl snippet files (fmap entityVal profile)
pure $ Aeson.toJSON apiSnippet
postApiSnippetsR :: Handler Value
postApiSnippetsR = do
req <- reqWaiRequest <$> getRequest
body <- liftIO $ Wai.strictRequestBody req
now <- liftIO getCurrentTime
maybeApiUser <- HandlerUtils.lookupApiUser
let maybeUserId = fmap apiUserUserId maybeApiUser
case Aeson.eitherDecode' body of
Left err ->
sendResponseStatus status400 $ object ["message" .= ("Invalid request body: " <> err)]
Right payload -> do
let snippetSlug = Snippet.newSlug now
let snippet = Snippet.toCodeSnippet snippetSlug now now maybeUserId payload
runDB $ do
snippetId <- insert snippet
insertMany_ (map (Snippet.toCodeFile snippetId) (NonEmpty.toList $ Snippet.files payload))
pure ()
getApiSnippetR snippetSlug
putApiSnippetR :: Text -> Handler Value
putApiSnippetR snippetSlug = do
req <- reqWaiRequest <$> getRequest
body <- liftIO $ Wai.strictRequestBody req
now <- liftIO getCurrentTime
maybeApiUser <- HandlerUtils.lookupApiUser
let maybeUserId = fmap apiUserUserId maybeApiUser
case Aeson.eitherDecode' body of
Left err ->
sendResponseStatus status400 $ object ["message" .= ("Invalid request body: " <> err)]
Right payload -> do
runDB $ do
Entity snippetId oldSnippet <- getBy404 (UniqueCodeSnippetSlug snippetSlug)
lift $ SnippetHandler.ensureSnippetOwner maybeUserId oldSnippet
let snippet = Snippet.toCodeSnippet snippetSlug (codeSnippetCreated oldSnippet) now maybeUserId payload
replace snippetId snippet
deleteWhere [ CodeFileCodeSnippetId ==. snippetId ]
insertMany_ (map (Snippet.toCodeFile snippetId) (NonEmpty.toList $ Snippet.files payload))
pure ()
getApiSnippetR snippetSlug
deleteApiSnippetR :: Text -> Handler Value
deleteApiSnippetR slug = do
maybeApiUser <- HandlerUtils.lookupApiUser
let maybeUserId = fmap apiUserUserId maybeApiUser
runDB $ do
Entity snippetId snippet <- getBy404 $ UniqueCodeSnippetSlug slug
lift $ SnippetHandler.ensureSnippetOwner maybeUserId snippet
deleteWhere [ CodeFileCodeSnippetId ==. snippetId ]
delete snippetId
pure ()
sendResponseNoContent
getApiSnippetsR :: Handler Value
getApiSnippetsR = do
currentPage <- HandlerUtils.pageNo <$> lookupGetParam "page"
perPageParam <- lookupGetParam "per_page"
languageParam <- lookupGetParam "language"
ownerParam <- lookupGetParam "owner"
maybeApiUser <- HandlerUtils.lookupApiUser
let maybeUserId = fmap apiUserUserId maybeApiUser
let snippetsPerPage = fromMaybe 100 (perPageParam >>= intParam)
let limitOffset = Persistent.LimitOffset
{ limit = snippetsPerPage
, offset = (currentPage - 1) * snippetsPerPage
}
renderUrl <- getUrlRender
case ownerParam of
Just username -> do
Entity _ profile <- runDB $ getBy404Json "Profile not found" (UniqueUsername username)
let allowedUserSnippets = UserSnippetsHandler.allowedUserSnippetsFromLoggedInUser (profileUserId profile) maybeUserId
Persistent.EntitiesWithCount{..} <- Persistent.getEntitiesWithCount (UserSnippetsHandler.getEntitiesQuery limitOffset allowedUserSnippets languageParam)
let snippets = map entityVal entities :: [CodeSnippet]
let pagination = Pagination.fromPageData
Pagination.PageData
{ currentPage = currentPage
, totalEntries = entitiesCount
, entriesPerPage = snippetsPerPage
}
addLinkHeader pagination
pure $ Aeson.toJSON (map (\snippet -> toListSnippet renderUrl snippet (Just profile)) snippets)
Nothing -> do
Persistent.EntitiesWithCount{..} <- Persistent.getEntitiesWithCount (SnippetsHandler.getEntitiesQuery limitOffset languageParam)
let SnippetsHandler.SnippetEntriesWithPagination{..} = SnippetsHandler.SnippetEntriesWithPagination
{ entries = map (uncurry SnippetsHandler.snippetEntryFromEntity) entities
, pagination = Pagination.fromPageData
Pagination.PageData
{ currentPage = currentPage
, totalEntries = entitiesCount
, entriesPerPage = snippetsPerPage
}
}
addLinkHeader pagination
pure $ Aeson.toJSON (map (listSnippetFromSnippetEntry renderUrl) entries)
listSnippetFromSnippetEntry :: (Route App -> Text) -> SnippetsHandler.SnippetEntry -> ApiListSnippet
listSnippetFromSnippetEntry renderUrl SnippetsHandler.SnippetEntry{..} =
toListSnippet renderUrl entrySnippet entryProfile
toListSnippet :: (Route App -> Text) -> CodeSnippet -> Maybe Profile -> ApiListSnippet
toListSnippet renderUrl codeSnippet maybeProfile =
ApiListSnippet
{ id = codeSnippetSlug codeSnippet
, url = renderUrl $ ApiSnippetR (codeSnippetSlug codeSnippet)
, language = codeSnippetLanguage codeSnippet
, title = codeSnippetTitle codeSnippet
, public = codeSnippetPublic codeSnippet
, owner = case maybeProfile of
Just profile ->
profileUsername profile
Nothing ->
"anonymous"
, filesHash = "<deprecated>"
, created = pack $ ISO8601.iso8601Show (codeSnippetCreated codeSnippet)
, modified = pack $ ISO8601.iso8601Show (codeSnippetModified codeSnippet)
}
toApiSnippet :: (Route App -> Text) -> CodeSnippet -> [CodeFile] -> Maybe Profile -> ApiSnippet
toApiSnippet renderUrl codeSnippet codeFiles maybeProfile =
ApiSnippet
{ id = codeSnippetSlug codeSnippet
, url = renderUrl $ ApiSnippetR (codeSnippetSlug codeSnippet)
, language = codeSnippetLanguage codeSnippet
, title = codeSnippetTitle codeSnippet
, public = codeSnippetPublic codeSnippet
, owner = case maybeProfile of
Just profile ->
profileUsername profile
Nothing ->
"anonymous"
, filesHash = "<deprecated>"
, created = pack $ ISO8601.iso8601Show (codeSnippetCreated codeSnippet)
, modified = pack $ ISO8601.iso8601Show (codeSnippetModified codeSnippet)
, files = map toApiFile codeFiles
}
toApiFile :: CodeFile -> ApiFile
toApiFile codeFile =
ApiFile
{ name =
codeFileName codeFile
, content =
Encoding.decodeUtf8With Encoding.Error.lenientDecode (codeFileContent codeFile)
}
getBy404Json :: (PersistUniqueRead backend, PersistRecordBackend val backend, MonadIO m, MonadHandler m)
=> Text
-> Unique val
-> ReaderT backend m (Entity val)
getBy404Json errorMsg key = do
mres <- getBy key
case mres of
Nothing ->
sendResponseStatus status404 $ object ["error" .= errorMsg]
Just res ->
return res
addLinkHeader :: Pagination.Pagination -> Handler ()
addLinkHeader pagination = do
queryParams <- reqGetParams <$> getRequest
renderUrlParams <- getUrlRenderParams
Pagination.toPageLinks pagination
& toLinkHeaderValue renderUrlParams queryParams
& addHeader "Link"
toLinkHeaderValue :: (Route App -> [(Text, Text)] -> Text) -> [(Text, Text)] -> [Pagination.PageLink] -> Text
toLinkHeaderValue renderUrlParams otherQueryParams pageLinks =
let
queryParamsWithoutPage =
filter (\(key, _) -> key /= "page") otherQueryParams
toLinkEntry Pagination.PageLink{..} =
mconcat
[ "<"
, renderUrlParams ApiSnippetsR (("page", pageLinkPage):queryParamsWithoutPage)
, ">; rel=\""
, pageLinkRel
, "\""
]
in
map toLinkEntry pageLinks
& intercalate ", "
| prasmussen/glot-www | Handler/Api/Snippets.hs | mit | 10,483 | 0 | 21 | 2,794 | 2,525 | 1,292 | 1,233 | -1 | -1 |
-- |
-- Reexports of most definitions from \"mtl\" and \"transformers\".
--
-- For details check out the source.
module MTLPrelude
(
module Exports,
)
where
-- Cont
-------------------------
import Control.Monad.Cont.Class
as Exports
import Control.Monad.Trans.Cont
as Exports
hiding (callCC)
-- Except
-------------------------
import Control.Monad.Error.Class
as Exports
import Control.Monad.Trans.Except
as Exports
(ExceptT(ExceptT), Except, except, runExcept, runExceptT,
mapExcept, mapExceptT, withExcept, withExceptT)
-- Identity
-------------------------
import Data.Functor.Identity
as Exports
-- IO
-------------------------
import Control.Monad.IO.Class
as Exports
-- Maybe
-------------------------
import Control.Monad.Trans.Maybe
as Exports
-- Reader
-------------------------
import Control.Monad.Reader.Class
as Exports
import Control.Monad.Trans.Reader
as Exports
(Reader, runReader, mapReader, withReader,
ReaderT(ReaderT), runReaderT, mapReaderT, withReaderT)
-- RWS
-------------------------
import Control.Monad.RWS.Class
as Exports
import Control.Monad.Trans.RWS.Strict
as Exports
(RWS, rws, runRWS, evalRWS, execRWS, mapRWS, withRWS,
RWST(RWST), runRWST, evalRWST, execRWST, mapRWST, withRWST)
-- State
-------------------------
import Control.Monad.State.Class
as Exports
import Control.Monad.Trans.State.Strict
as Exports
(State, runState, evalState, execState, mapState, withState,
StateT(StateT), runStateT, evalStateT, execStateT, mapStateT, withStateT)
-- Trans
-------------------------
import Control.Monad.Trans.Class
as Exports
-- Writer
-------------------------
import Control.Monad.Writer.Class
as Exports
import Control.Monad.Trans.Writer.Strict
as Exports
(Writer, runWriter, execWriter, mapWriter,
WriterT(..), execWriterT, mapWriterT)
| nikita-volkov/mtl-prelude | library/MTLPrelude.hs | mit | 1,879 | 0 | 6 | 276 | 364 | 268 | 96 | 46 | 0 |
{-# htermination zip3 :: [a] -> [b] -> [c] -> [(a,b,c)] #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_zip3_1.hs | mit | 60 | 0 | 2 | 12 | 3 | 2 | 1 | 1 | 0 |
module Irg.Lab1.Bresenham (bresenham) where
import Data.List (sort,unfoldr)
bresenham :: (Num a, Ord a) => (a, a) -> (a, a) -> [(a, a)]
bresenham pa@(xa,ya) pb@(xb,yb) = map maySwitch . unfoldr go $ (x1,y1,0)
where
steep = abs (yb - ya) > abs (xb - xa)
maySwitch = if steep then (\(x,y) -> (y,x)) else id
[(x1,y1),(x2,y2)] = sort [maySwitch pa, maySwitch pb]
deltax = x2 - x1
deltay = abs (y2 - y1)
ystep = if y1 < y2 then 1 else -1
go (xTemp, yTemp, errory)
| xTemp > x2 = Nothing
| otherwise = Just ((xTemp, yTemp), (xTemp + 1, newY, newError))
where
tempError = errory + deltay
(newY, newError) = if (2*tempError) >= deltax
then (yTemp+ystep,tempError-deltax)
else (yTemp,tempError)
| DominikDitoIvosevic/Uni | IRG/src/Irg/Lab1/Bresenham.hs | mit | 819 | 0 | 12 | 254 | 397 | 228 | 169 | 17 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Y2017.M12.D15.Solution where
import Data.Maybe (fromJust)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP (urlEncode)
import System.Directory
{--
New topic!
Today we're going to create an oembed service where we serve some simple
resources, like, goats, ... because I like kids; they are SO CUTE!
So, there are three things that make up a kids oembed service:
1. the resources themselves. They are here at this directory
--}
-- below import available via 1HaskellDay git repository
import Control.DList
import Data.XHTML
goatsDir :: FilePath
goatsDir = "Y2017/M12/D13/goats/"
{--
... but, actually, you can defer that to the resource, itself, if you have
a repository of your assets, so, let's make that today's exercise, instead.
Create a repository of the goats at that inode so that it returns your URL
concatenated with the oembed request for the specific resource.
... so that also means you have to know how to extract files from an inode.
--}
goatsFiles :: FilePath -> IO [FilePath]
goatsFiles = fmap (filter ((/= '.') . head)) . getDirectoryContents
{--
>>> goatsFiles goatsDir
["grace-n-goats.jpg","goat-small-town.jpg","CjPcyoUUoAAo5li.jpg",
"CjPcyySWUAE2-E1.jpg","goats3.png","goats2.png","goats1.png","goats-deux.png",
"goat-kid-sweater.jpg","ChIBH-DU0AE7Qzt.jpg","too-close.jpg",
"CYVG4BoWwAAKfSt.jpg","goatz.jpg","CFsge-vUMAIUmW4.jpg","CFujXdtUIAAk_HW.jpg",
"goat-india.jpg"]
--}
-- return the path to the goats files prepended to only the goats files,
-- themselves (so, no "." or "..", etc, if you please)
{--
Now from those files, create a webpage that says something fetching, like,
HERE ARE THE GOATS!
And then list the links to the assets that, when selected, makes the oembed
request to your web server. At your URL. That you (will) have.
What does an oembed request look like. Let's look at a sample:
http://www.flickr.com/services/oembed/?format=json&url=http%3A//www.flickr.com/photos/bees/2341623661/
A request is composed of several parts.
--}
-- 1. the uri of the (specific) oembed service
rootURI :: FilePath
rootURI = "http://127.0.0.1:8080/"
oembedService :: FilePath
oembedService = rootURI ++ "services/oembed.php"
-- 2. the query string which includes the url and which (may or may not)
-- include the format
data Format = JSON
deriving Eq
instance Show Format where
show = const "json"
query :: Maybe Format -> FilePath -> String
query format relativeURI =
'?': fromJust (((++ "&") . ("format=" ++) . show <$> format) <> Just "")
++ "url=" ++ urlEncode (rootURI ++ relativeURI)
{--
>>> query (Just JSON) "bleh"
"?format=json&url=http://127.0.0.1:8080/bleh"
>>> query Nothing "bleh"
"?url=http://127.0.0.1:8080/bleh"
--}
-- and with that, you can create your webpage with your goatsFiles links:
goatsWebPage :: [FilePath] -> Document Element Element
goatsWebPage goats =
Doc []
[Elt "center" [] [E (Elt "h2" [] [S "Goats Я Awesome!"])],
table -- we'll tile the goats 2 to a row
(tileContent 2 (map (E . linkedImg) goats))]
{--
>>> goatsFiles goatsDir >>= putStrLn . pprint . rep . goatsWebPage
generates your html
--}
tileContent :: Int -> [a] -> [[a]]
tileContent grp = -- um, list function?
tile grp grp emptyDL
tile :: Int -> Int -> DList a -> [a] -> [[a]]
tile totes cnt accm [] = [dlToList accm]
tile totes cnt accm (h:t) =
if cnt == 0 then dlToList accm:tile totes totes emptyDL t
else tile totes (pred cnt) (accm <| h) t
-- creating a table seems a common enough thing in HTML, we'll move this
-- to Data.XHTML when we've got it working here.
table :: [[Content]] -> Element
table = Elt "table" [Attrib "border" "1"] . map (E . Elt "tr" [] . mapEachTR)
mapEachTR :: [Content] -> [Content]
mapEachTR = map (E . Elt "td" [] . pure)
-- but first let's generate the entire URL from an asset:
assetURL :: FilePath -> String
assetURL asset = oembedService ++ query (Just JSON) (goatsDir ++ asset)
{--
>>> take 2 . map assetURL <$> goatsFiles goatsDir
["http://127.0.0.1:8080/services/oembed/?format=json&url=http://127.0.0.1:8080/Y2017/M12/D13/goats/grace-n-goats.jpg",
"http://127.0.0.1:8080/services/oembed/?format=json&url=http://127.0.0.1:8080/Y2017/M12/D13/goats/goat-small-town.jpg"]
--}
-- and also the image
linkedImg :: FilePath -> Element
linkedImg = linkURL <*> E . imageURL
imageURL :: FilePath -> Element
imageURL = flip (Elt "img") [] . pure . Attrib "src"
. ((rootURI ++ goatsDir) ++)
-- and also the link to the oembed service that contains some content
-- (which may be the image)
linkURL :: FilePath -> Content -> Element
linkURL asset =
Elt "a" [Attrib "href" (oembedService ++ query (Just JSON) asset)] . pure
{-- BONUS -----------------------------------------------------------------
Of course, who wants to look at raw oembed requests? You can hide those
requests in the tiled images, right? Or something like that.
Get fancy with your output that creates your web page.
Now.
Create an application that, given a directory of goat-assets, outputs a
'User Friendly' goats web page.
--}
main' :: [String] -> IO ()
main' [dir] = goatsFiles dir >>= putStrLn . pprint . rep . goatsWebPage
main' _ = putStrLn (unlines ["", "oembedder <dir>", "",
"\twhere dir is the directory of the images to oembed",""])
-- We'll look at creating the oembed response tomorrow
| geophf/1HaskellADay | exercises/HAD/Y2017/M12/D15/Solution.hs | mit | 5,465 | 0 | 15 | 950 | 894 | 490 | 404 | 58 | 2 |
{-# htermination plusFM_C :: (b -> b -> b) -> FiniteMap Ordering b -> FiniteMap Ordering b -> FiniteMap Ordering b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_plusFM_C_11.hs | mit | 136 | 0 | 3 | 24 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Network.UPnP.Types where
import Data.String
import Data.Word
import Data.Int
import Data.ByteString (ByteString)
import Network.URI
import Text.XML.Light
import Network.SSDP.Types
type ActionName = String
data Device
data Service
data Actions
data Action
data Arguments
data Statement
type family Upnp a
type instance Upnp Device = UpnpXml Device
type instance Upnp Service = UpnpXml Service
type instance Upnp Actions = UpnpXml Actions
type instance Upnp Action = UpnpXml Action
type instance Upnp Arguments = UpnpXml Arguments
type instance Upnp Statement = UpnpXml Statement
data UpnpXml a
= UpnpXml { getUpnpParent :: Maybe (UpnpParent a)
, getUpnpURI :: URI
, getUpnpServiceType :: Maybe ServiceType
, getUpnpActionName :: Maybe ActionName
, getUpnpXmlContent :: [Element]
}
type family UpnpParent a
type instance UpnpParent Device = Upnp Device
type instance UpnpParent Service = Upnp Device
type instance UpnpParent Actions = Upnp Service
type instance UpnpParent Action = Upnp Actions
type instance UpnpParent Arguments = Upnp Action
type instance UpnpParent Statement = Upnp Action
data DeviceType
= DeviceType { deviceVendorDomain :: String
, deviceType :: String
, deviceVersion :: String
}
deriving (Show, Eq)
data ServiceType
= ServiceType { serviceVendorDomain :: String
, serviceType :: String
, serviceVersion :: String
}
deriving (Show, Eq)
data ServiceId
= ServiceId { serviceIdVendorDomain :: String
, serviceId :: String
}
deriving (Show, Eq)
data UpnpDataType
= Upnp_ui1
| Upnp_ui2
| Upnp_ui4
| Upnp_i1
| Upnp_i2
| Upnp_i4
| Upnp_int
| Upnp_r4
| Upnp_r8
| Upnp_number
| Upnp_fixed_14_4
| Upnp_float
| Upnp_char
| Upnp_string
| Upnp_date
| Upnp_dateTime
| Upnp_dateTime_tz
| Upnp_time
| Upnp_time_tz
| Upnp_boolean
| Upnp_bin_base64
| Upnp_bin_hex
| Upnp_uri
| Upnp_uuid
deriving (Eq, Show)
data InOut = In | Out
deriving (Eq, Show)
data ValueRange = ValueRange
{ rangeMin :: String
, rangeMax :: String
, rangeStep :: Maybe String
}
deriving (Eq, Show)
data StateVariable = StateVariable
{ svarName :: String
, svarSendEvents :: Bool
, svarMulticast :: Bool
, svarType :: UpnpDataType
, svarDefault :: Maybe String
, svarAllowedValues :: Maybe [String]
, svarAllowedRange :: Maybe ValueRange
}
deriving (Eq, Show)
data ArgumentDesc = ArgumentDesc
{ argumentDescName :: String
, argumentDescDirection :: InOut
, argumentDescStateVariable :: StateVariable
}
deriving (Eq, Show)
data UpnpValue
= UpnpVal_ui1 Word8
| UpnpVal_ui2 Word16
| UpnpVal_ui4 Word32
| UpnpVal_i1 Int8
| UpnpVal_i2 Int16
| UpnpVal_i4 Int32
| UpnpVal_int Integer
| UpnpVal_r4 Float
| UpnpVal_r8 Double
| UpnpVal_number Double
| UpnpVal_fixed_14_4 Double
| UpnpVal_float Float
| UpnpVal_char Char
| UpnpVal_string String
-- not supported yet (TODO):
-- | UpnpVal_date
-- | UpnpVal_dateTime
-- | UpnpVal_dateTime_tz
-- | UpnpVal_time
-- | UpnpVal_time_tz
| UpnpVal_boolean Bool
| UpnpVal_bin_base64 ByteString
| UpnpVal_bin_hex ByteString
| UpnpVal_uri URI
| UpnpVal_uuid UUID
deriving (Eq)
instance IsString UpnpValue where
fromString = UpnpVal_string
data Argument = Argument
{ argumentName :: String
, argumentValue :: UpnpValue
}
| mcmaniac/ssdp-upnp | src/Network/UPnP/Types.hs | mit | 4,064 | 0 | 11 | 1,303 | 817 | 489 | 328 | -1 | -1 |
module Types (
WidgetTree(..),
FunctionArgs(..),
Function(..),
Class(..),
Attribute(..),
Fluid,
FluidBlock(..),
UnbrokenOrBraced(..),
BracedStringParts(..),
HaskellIdentifier(..),
ModuleIdentifier(..),
PathElem(..),
Path,
PathIndexedElements,
FluidElement(..),
Name,
NameLookupResult(..),
ElementIdentifier(..),
IdentifierIndexedElements,
Type,
LookupTables,
GenerationError(..),
TakenNames(..)
) where
type Name = Maybe String
data Attribute
= Code0 [BracedStringParts]
| Code1 [BracedStringParts]
| Code2 [BracedStringParts]
| Code3 [BracedStringParts]
| Label UnbrokenOrBraced
| Callback UnbrokenOrBraced
| XYWH (Int,Int,Int,Int)
| Color Int
| Maximum Int
| Value UnbrokenOrBraced
| Box String
| Labelsize Int
| Resizable
| Visible
| Align Int
| Minimum Int
| Step Double
| SelectionColor Int
| Labeltype String
| Labelcolor Int
| Labelfont Int
| Open
| Hide
| ReturnType UnbrokenOrBraced
| Shortcut String
| Private
| UserData UnbrokenOrBraced
| UserDataType UnbrokenOrBraced
| Tooltip UnbrokenOrBraced
| Comment UnbrokenOrBraced
| Inherits String
| When Int
| Hotspot
| Modal
| Selected
| Local
| Public
| TextFont Int
| TextSize Int
| TextColor Int
| SliderSize Double
| WidgetType UnbrokenOrBraced
| Deactivate
| InSource
| InHeader
| Global
| DownBox String
| SizeRange (Int,Int,Int,Int)
| LineComment String
| AfterCode UnbrokenOrBraced
| DerivedFromClass String
| Filename UnbrokenOrBraced
| Divider
| Image UnbrokenOrBraced
| Deimage UnbrokenOrBraced
deriving (Show)
type Type = String
data WidgetTree = Group Type HaskellIdentifier [Attribute] [WidgetTree]
| Menu Type HaskellIdentifier [Attribute] [WidgetTree]
| Component Type HaskellIdentifier [Attribute]
| Code [Attribute] UnbrokenOrBraced
| StandAloneComment [Attribute] UnbrokenOrBraced
| CodeBlock UnbrokenOrBraced [Attribute] [WidgetTree]
| Version String
deriving Show
newtype FunctionArgs =
FunctionArgs (Maybe String)
deriving (Show)
data Function =
Function HaskellIdentifier
FunctionArgs
[Attribute]
[WidgetTree]
deriving (Show)
data Class =
Class HaskellIdentifier
[Attribute]
[FluidBlock]
deriving (Show)
data FluidBlock
= FluidClass Class
| FluidFunction Function
| DeclBlock [Attribute]
UnbrokenOrBraced
[FluidBlock]
| Decl [Attribute]
UnbrokenOrBraced
| Data String
[Attribute]
deriving (Show)
data UnbrokenOrBraced
= UnbrokenString String
| BracedString [BracedStringParts]
deriving (Show)
data BracedStringParts
= BareString String
| QuotedCharCode String
| QuotedHex Integer
| QuotedOctal Integer
| QuotedChar String
| NestedBrace [BracedStringParts]
deriving (Show)
type Fluid = [FluidBlock]
data FluidElement
= BlockElement FluidBlock
| WidgetElement WidgetTree
data HaskellIdentifier
= ValidHaskell String
| InvalidHaskell String
| UnidentifiedFunction
| Unidentified
deriving (Eq,Show)
data ModuleIdentifier
= ValidModule String
| InvalidModule String
data PathElem
= D
| I
deriving (Show,Eq)
data NameLookupResult
= FoundUniquePath Path
| FoundMultiplePaths [Path]
| PathNotFound
type Path = [PathElem]
data ElementIdentifier
= ElementPath Path
| ElementName HaskellIdentifier
| ElementType Type
type PathIndexedElements = [(Path,FluidElement)]
type IdentifierIndexedElements = [([ElementIdentifier], FluidElement)]
type LookupTables = ([(HaskellIdentifier,[Path])],[(Maybe Type,[Path])],[(Maybe String,[Path])])
data GenerationError = BadModuleName String
newtype TakenNames = TakenNames [String] deriving Show
| deech/fltkhs | src/Fluid/Types.hs | mit | 3,866 | 0 | 8 | 869 | 988 | 611 | 377 | 159 | 0 |
{- |
Module : ./TPTP/Prover/Vampire.hs
Description : Interface for the Vampire theorem prover.
Copyright : (c) Eugen Kuksa University of Magdeburg 2017
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Eugen Kuksa <kuksa@iks.cs.ovgu.de>
Stability : provisional
Portability : non-portable (imports Logic)
-}
module TPTP.Prover.Vampire (vampire) where
import TPTP.Prover.Common
import TPTP.Prover.Vampire.ProofParser
import TPTP.Prover.ProofParser hiding (filterProofLines)
import TPTP.Prover.ProverState
import TPTP.Morphism
import TPTP.Sign
import TPTP.Sublogic
import Common.AS_Annotation
import Common.ProofTree
import Common.SZSOntology
import Interfaces.GenericATPState hiding (proverState)
import Logic.Prover hiding (proofLines)
vampire :: Prover Sign Sentence Morphism Sublogic ProofTree
vampire = mkProver binary_name prover_name sublogics runTheProver
binary_name :: String
binary_name = "vampire"
-- renamed so it does not clash with Vampire for SoftFOL
prover_name :: String
prover_name = "Vampire-TPTP"
sublogics :: Sublogic
sublogics = FOF
runTheProver :: ProverState
{- ^ logical part containing the input Sign and axioms and possibly
goals that have been proved earlier as additional axioms -}
-> GenericConfig ProofTree -- ^ configuration to use
-> Bool -- ^ True means save TPTP file
-> String -- ^ name of the theory in the DevGraph
-> Named Sentence -- ^ goal to prove
-> IO (ATPRetval, GenericConfig ProofTree)
-- ^ (retval, configuration with proof status and complete output)
runTheProver proverState cfg saveTPTPFile theoryName namedGoal = do
let proverTimeLimitS = show $ getTimeLimit cfg
allOptions = [ "--input_syntax", "tptp"
, "--proof", "tptp"
, "--output_axiom_names", "on"
, "--time_limit", proverTimeLimitS
, "--memory_limit", "4096"
, "--mode", "vampire"
]
problemFileName <-
prepareProverInput proverState cfg saveTPTPFile theoryName namedGoal prover_name
(_, out, _) <-
executeTheProver binary_name (allOptions ++ [problemFileName])
let szsStatusLine = parseStatus out
let resultedTimeUsed = parseTimeUsed out
let proofLines = filterProofLines out
axiomsUsed <- if szsProved szsStatusLine || szsDisproved szsStatusLine
then case axiomsFromProofObject proofLines of
(axiomNames, []) -> return axiomNames
(_, errs) -> do
putStrLn $ unlines errs
return $ getAxioms proverState
else return $ getAxioms proverState
let (atpRetval, resultedProofStatus) =
atpRetValAndProofStatus cfg namedGoal resultedTimeUsed axiomsUsed
szsStatusLine prover_name
return (atpRetval, cfg { proofStatus = resultedProofStatus
, resultOutput = out
, timeUsed = resultedTimeUsed })
| spechub/Hets | TPTP/Prover/Vampire.hs | gpl-2.0 | 3,082 | 0 | 15 | 815 | 515 | 281 | 234 | 55 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module MusicBrainz.API.Iswc ( findByWorks ) where
import Control.Applicative
import Data.Map
import Data.Text
import Text.Digestive
import qualified Data.Set as Set
import MusicBrainz
import MusicBrainz.API
import qualified MusicBrainz.Data.Work as MB
findByWorks :: Form Text MusicBrainz (Map (Ref (Revision Work)) (Set.Set ISWC))
findByWorks = runApi $
MB.findIswcs <$> (Set.fromList <$> "revisions" .: listOf (const revision) Nothing)
| metabrainz/musicbrainz-data-service | src/MusicBrainz/API/Iswc.hs | gpl-2.0 | 479 | 0 | 11 | 63 | 137 | 79 | 58 | 13 | 1 |
module Phascal.Types where
data Type = TyInt | TyBool deriving(Show, Eq)
| zenhack/phascal | src/Phascal/Types.hs | gpl-3.0 | 74 | 0 | 6 | 12 | 27 | 16 | 11 | 2 | 0 |
module PulseLambda.CodeStream
( CodeStream
, getInQueue
, getStreamLocation
, fromString
, getCurrChar
, goNext
, isEof
, skipWhile
, readWhile
, readWhileWithMax
) where
import PulseLambda.Location
import Data.Maybe
data CodeStream
= StringStream
{ getInQueue :: String
, getStreamLocation :: Location
}
deriving (Show)
fromString :: String -> FilePath -> CodeStream
fromString str file = StringStream str $! Location file 1 1
getCurrChar :: CodeStream -> Maybe Char
getCurrChar (StringStream [] _) = Nothing
getCurrChar (StringStream (x:_) _) = Just x
goNext :: CodeStream -> CodeStream
goNext (StringStream (x:xs) (Location file line col)) = if x == '\n'
then StringStream xs (Location file (line+1) 1)
else StringStream xs (Location file line (col+1))
goNext stream = stream
isEof :: CodeStream -> Bool
isEof (StringStream [] _) = True
isEof _ = False
skipWhile :: (Char -> Bool) -> CodeStream -> CodeStream
skipWhile cond stream = if isJust curr && (cond . fromJust) curr
then skipWhile cond (goNext stream)
else stream
where curr = getCurrChar stream
readWhile :: (Char -> Bool) -> CodeStream -> (String, CodeStream)
readWhile cond stream = if isJust curr && (cond . fromJust) curr
then
let (str, newstream) = readWhile cond (goNext stream)
in (fromJust curr:str, newstream)
else ("", stream)
where
curr = getCurrChar stream
readWhileWithMax :: (Char -> Bool) -> Int -> CodeStream -> (String, CodeStream)
readWhileWithMax cond maxCount stream = _readWhileWithMax stream 0 where
_readWhileWithMax _stream count = if isNothing curr || (not . cond . fromJust) curr || count >= maxCount
then ("", stream)
else
let (str, newstream) = readWhile cond (goNext stream)
in (fromJust curr:str, newstream)
where
curr = getCurrChar stream
| brunoczim/PulseLambda | PulseLambda/CodeStream.hs | gpl-3.0 | 1,893 | 0 | 14 | 427 | 659 | 349 | 310 | 49 | 2 |
{-|
Module : State
Copyright : (c) 2014 Kaashif Hymabaccus
License : GPL-3
Maintainer : kaashif@kaashif.co.uk
Stability : experimental
Portability : POSIX
-}
module State where
import qualified Player
import qualified Board
import String
-- | Data structure passed around by the main game loop
data State = State { player :: Player.Player -- ^ The one and only player
, board :: Board.Board -- ^ The map of cells the game takes place in
, continue :: Bool -- ^ Whether to continue running, or to terminate after the current loop
, messages :: [String] -- ^ Message queue to be printed next loop
}
-- | Reads the initial board from file and creates the initial game state
initState :: IO State
initState = Board.readBoard >>= \s -> return State { player = Player.initial
, board = s
, continue = True
, messages = []
}
-- | Returns a function to modify the game state based on a command it is given
process :: String -> (State -> State)
process "quit" = discontinue
process "help" = addMessage terseHelp
process "north" = describe.(modPlayer Player.north).markVisited
process "east" = describe.(modPlayer Player.east).markVisited
process "south" = describe.(modPlayer Player.south).markVisited
process "west" = describe.(modPlayer Player.west).markVisited
process "look" = describe
process _ = id
-- | Appends a message to the queue waiting to be printed
addMessage :: String -> State -> State
addMessage msg s = s { messages = (messages s) ++ [msg] }
-- | Prints all messages in the queue
printMessages :: State -> IO ()
printMessages s = (putStr . unlines . map (init . unlines . wrapLine 72) . messages) s
-- | Checks whether the game state is valid (i.e. whether "Player" is on the "Board")
valid :: State -> Bool
valid s = Board.valid (player s) (board s)
-- | Modifies player record using the given function
modPlayer :: (Player.Player -> Player.Player) -> State -> State
modPlayer f s = s { player = (f . player) s }
-- | Sets the continue record of the state to false, stopping the game
discontinue :: State -> State
discontinue s = s { continue = False }
-- | Adds an appropriate description for the "Player"'s location to the queue
describe :: State -> State
describe s = addMessage (Board.describe (board s) $ Player.position $ player s) s
-- | Marks the "Player"'s current location as visited
markVisited :: State -> State
markVisited s = s { board = Board.markVisited (player s) (board s) }
-- | Clears the message queue and other ephemeral state
reset :: State -> State
reset s = s { messages = [] }
-- | Shorter help, printed in-game
terseHelp = "valid commands: help, quit, north, east, south, west, look"
| kaashif/venture | src/State.hs | gpl-3.0 | 2,953 | 4 | 12 | 792 | 607 | 333 | 274 | 39 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.MobileDevices.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieve Mobile Device
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.mobiledevices.get@.
module Network.Google.Resource.Directory.MobileDevices.Get
(
-- * REST Resource
MobileDevicesGetResource
-- * Creating a Request
, mobileDevicesGet
, MobileDevicesGet
-- * Request Lenses
, mdgResourceId
, mdgCustomerId
, mdgProjection
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.mobiledevices.get@ method which the
-- 'MobileDevicesGet' request conforms to.
type MobileDevicesGetResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customerId" Text :>
"devices" :>
"mobile" :>
Capture "resourceId" Text :>
QueryParam "projection" MobileDevicesGetProjection :>
QueryParam "alt" AltJSON :> Get '[JSON] MobileDevice
-- | Retrieve Mobile Device
--
-- /See:/ 'mobileDevicesGet' smart constructor.
data MobileDevicesGet = MobileDevicesGet'
{ _mdgResourceId :: !Text
, _mdgCustomerId :: !Text
, _mdgProjection :: !(Maybe MobileDevicesGetProjection)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MobileDevicesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mdgResourceId'
--
-- * 'mdgCustomerId'
--
-- * 'mdgProjection'
mobileDevicesGet
:: Text -- ^ 'mdgResourceId'
-> Text -- ^ 'mdgCustomerId'
-> MobileDevicesGet
mobileDevicesGet pMdgResourceId_ pMdgCustomerId_ =
MobileDevicesGet'
{ _mdgResourceId = pMdgResourceId_
, _mdgCustomerId = pMdgCustomerId_
, _mdgProjection = Nothing
}
-- | Immutable id of Mobile Device
mdgResourceId :: Lens' MobileDevicesGet Text
mdgResourceId
= lens _mdgResourceId
(\ s a -> s{_mdgResourceId = a})
-- | Immutable id of the Google Apps account
mdgCustomerId :: Lens' MobileDevicesGet Text
mdgCustomerId
= lens _mdgCustomerId
(\ s a -> s{_mdgCustomerId = a})
-- | Restrict information returned to a set of selected fields.
mdgProjection :: Lens' MobileDevicesGet (Maybe MobileDevicesGetProjection)
mdgProjection
= lens _mdgProjection
(\ s a -> s{_mdgProjection = a})
instance GoogleRequest MobileDevicesGet where
type Rs MobileDevicesGet = MobileDevice
type Scopes MobileDevicesGet =
'["https://www.googleapis.com/auth/admin.directory.device.mobile",
"https://www.googleapis.com/auth/admin.directory.device.mobile.action",
"https://www.googleapis.com/auth/admin.directory.device.mobile.readonly"]
requestClient MobileDevicesGet'{..}
= go _mdgCustomerId _mdgResourceId _mdgProjection
(Just AltJSON)
directoryService
where go
= buildClient
(Proxy :: Proxy MobileDevicesGetResource)
mempty
| rueshyna/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/MobileDevices/Get.hs | mpl-2.0 | 3,903 | 0 | 17 | 917 | 475 | 282 | 193 | 79 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudResourceManager.TagBindings.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the TagBindings for the given cloud resource, as specified with
-- \`parent\`. NOTE: The \`parent\` field is expected to be a full resource
-- name:
-- https:\/\/cloud.google.com\/apis\/design\/resource_names#full_resource_name
--
-- /See:/ <https://cloud.google.com/resource-manager Cloud Resource Manager API Reference> for @cloudresourcemanager.tagBindings.list@.
module Network.Google.Resource.CloudResourceManager.TagBindings.List
(
-- * REST Resource
TagBindingsListResource
-- * Creating a Request
, tagBindingsList
, TagBindingsList
-- * Request Lenses
, tblParent
, tblXgafv
, tblUploadProtocol
, tblAccessToken
, tblUploadType
, tblPageToken
, tblPageSize
, tblCallback
) where
import Network.Google.Prelude
import Network.Google.ResourceManager.Types
-- | A resource alias for @cloudresourcemanager.tagBindings.list@ method which the
-- 'TagBindingsList' request conforms to.
type TagBindingsListResource =
"v3" :>
"tagBindings" :>
QueryParam "parent" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListTagBindingsResponse
-- | Lists the TagBindings for the given cloud resource, as specified with
-- \`parent\`. NOTE: The \`parent\` field is expected to be a full resource
-- name:
-- https:\/\/cloud.google.com\/apis\/design\/resource_names#full_resource_name
--
-- /See:/ 'tagBindingsList' smart constructor.
data TagBindingsList =
TagBindingsList'
{ _tblParent :: !(Maybe Text)
, _tblXgafv :: !(Maybe Xgafv)
, _tblUploadProtocol :: !(Maybe Text)
, _tblAccessToken :: !(Maybe Text)
, _tblUploadType :: !(Maybe Text)
, _tblPageToken :: !(Maybe Text)
, _tblPageSize :: !(Maybe (Textual Int32))
, _tblCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TagBindingsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tblParent'
--
-- * 'tblXgafv'
--
-- * 'tblUploadProtocol'
--
-- * 'tblAccessToken'
--
-- * 'tblUploadType'
--
-- * 'tblPageToken'
--
-- * 'tblPageSize'
--
-- * 'tblCallback'
tagBindingsList
:: TagBindingsList
tagBindingsList =
TagBindingsList'
{ _tblParent = Nothing
, _tblXgafv = Nothing
, _tblUploadProtocol = Nothing
, _tblAccessToken = Nothing
, _tblUploadType = Nothing
, _tblPageToken = Nothing
, _tblPageSize = Nothing
, _tblCallback = Nothing
}
-- | Required. The full resource name of a resource for which you want to
-- list existing TagBindings. E.g.
-- \"\/\/cloudresourcemanager.googleapis.com\/projects\/123\"
tblParent :: Lens' TagBindingsList (Maybe Text)
tblParent
= lens _tblParent (\ s a -> s{_tblParent = a})
-- | V1 error format.
tblXgafv :: Lens' TagBindingsList (Maybe Xgafv)
tblXgafv = lens _tblXgafv (\ s a -> s{_tblXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tblUploadProtocol :: Lens' TagBindingsList (Maybe Text)
tblUploadProtocol
= lens _tblUploadProtocol
(\ s a -> s{_tblUploadProtocol = a})
-- | OAuth access token.
tblAccessToken :: Lens' TagBindingsList (Maybe Text)
tblAccessToken
= lens _tblAccessToken
(\ s a -> s{_tblAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tblUploadType :: Lens' TagBindingsList (Maybe Text)
tblUploadType
= lens _tblUploadType
(\ s a -> s{_tblUploadType = a})
-- | Optional. A pagination token returned from a previous call to
-- \`ListTagBindings\` that indicates where this listing should continue
-- from.
tblPageToken :: Lens' TagBindingsList (Maybe Text)
tblPageToken
= lens _tblPageToken (\ s a -> s{_tblPageToken = a})
-- | Optional. The maximum number of TagBindings to return in the response.
-- The server allows a maximum of 300 TagBindings to return. If
-- unspecified, the server will use 100 as the default.
tblPageSize :: Lens' TagBindingsList (Maybe Int32)
tblPageSize
= lens _tblPageSize (\ s a -> s{_tblPageSize = a}) .
mapping _Coerce
-- | JSONP
tblCallback :: Lens' TagBindingsList (Maybe Text)
tblCallback
= lens _tblCallback (\ s a -> s{_tblCallback = a})
instance GoogleRequest TagBindingsList where
type Rs TagBindingsList = ListTagBindingsResponse
type Scopes TagBindingsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient TagBindingsList'{..}
= go _tblParent _tblXgafv _tblUploadProtocol
_tblAccessToken
_tblUploadType
_tblPageToken
_tblPageSize
_tblCallback
(Just AltJSON)
resourceManagerService
where go
= buildClient
(Proxy :: Proxy TagBindingsListResource)
mempty
| brendanhay/gogol | gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/TagBindings/List.hs | mpl-2.0 | 6,125 | 0 | 18 | 1,392 | 896 | 522 | 374 | 124 | 1 |
-- | This module describes internal states and transformations en those
--
-- Internal state should be kept in every part of the ject at its minimal and
-- thus there are multiple state datatypes for different parts of inerpreter
--
-- Transformations between different states should also be provided in this file
module CoALPj.InternalState (
CoALP
, CoALPOptions
, defaultCoALPOptions
, caOptions
, program
, programPath
, resolves
, programA
, varCount
, signature
, optVerbosity
, REPLState
, replInit
, Verbosity(..)
, runIO
, iputStrLn
) where
import Control.Monad.Trans (lift, liftIO)
import Control.Monad.Trans.State (StateT)
import Control.Monad.Trans.Except (ExceptT, throwE)
import Data.Monoid (mempty)
import System.IO.Error (tryIOError)
import CoALP.Error (Err(Msg))
import CoALP.Program (Program1, Succ1, ProgramA, Signature1)
--
-- TODO refactor
--
--type CoALP = StateT IState (ErrorT IO)
--type CoALP = ErrorT Err IO
type CoALP = StateT REPLState (ExceptT Err IO)
iputStrLn :: String -> CoALP ()
iputStrLn s = runIO $ putStrLn s
-- | A version of liftIO that puts errors into the error type of the CoALPj monad
-- TODO is the use of ExceptT neccessary?
runIO :: IO a -> CoALP a
runIO x = lift $ liftIO (tryIOError x) >>= (either (throwE . Msg . show) return)
-- | General CoALPj options that affect all code
data CoALPOptions = CoALPOptions {
optVerbosity :: Verbosity
}
-- | default CoALPj options
defaultCoALPOptions :: CoALPOptions
defaultCoALPOptions = CoALPOptions {
optVerbosity = Default
}
-- | Read-Eval-Print loop state
data REPLState = REPLState {
caOptions :: CoALPOptions
, program :: Maybe Program1
, programPath :: Maybe FilePath
, resolves :: Maybe [Succ1]
, programA :: Maybe ProgramA
, varCount :: Maybe Integer
, signature :: Maybe Signature1
}
-- | Create initial state from general CoALPj options
replInit :: REPLState
--replInit caopts = REPLState {
replInit = REPLState {
caOptions = defaultCoALPOptions
, program = mempty
, programPath = mempty
, resolves = mempty
, programA = mempty
, varCount = Nothing
, signature = Nothing
}
-- | Verbosity levels
data Verbosity = Quiet | Default | Verbose | VVerbose
deriving (Eq, Ord)
| frantisekfarka/CoALP | inter/CoALPj/InternalState.hs | lgpl-3.0 | 2,289 | 55 | 11 | 470 | 531 | 313 | 218 | 53 | 1 |
module SimpleJSON2
(
JValue (..)
, getString
, getInt
, getDouble
, getObject
, getArray
, isNull
) where
data JValue = JString String
| JNumber Double
| JBool Bool
| JNull
| JObject [(String, JValue)]
| JArray [JValue]
deriving (Eq, Ord, Show)
getString :: JValue -> Maybe String
getString (JString s) = Just s
getString _ = Nothing
getBool :: JValue -> Maybe Bool
getBool (JBool b) = Just b
getBool _ = Nothing
getNumber :: JValue -> Maybe Double
getNumber (JNumber n) = Just n
getNumber _ = Nothing
getDouble = getNumber
getObject :: JValue -> Maybe [(String, JValue)]
getObject js = case js of
JObject xs -> Just xs
_ -> Nothing
getArray :: JValue -> Maybe [JValue]
getArray js = case js of
JArray xs -> Just xs
_ -> Nothing
getInt (JNumber n) = Just (truncate n)
getInt _ = Nothing
isNull :: JValue -> Bool
isNull JNull = True
| caiorss/Functional-Programming | haskell/rwh/ch05/SimpleJSON2.hs | unlicense | 1,118 | 0 | 8 | 441 | 350 | 185 | 165 | 38 | 2 |
module PowerDivisibility.A019554Spec (main, spec) where
import Test.Hspec
import PowerDivisibility.A019554 (a019554)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A019554" $
it "correctly computes the first 20 elements" $
take 20 (map a019554 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,3,2,5,6,7,4,3,10,11,6,13,14,15,4,17,6,19,10]
| peterokagey/haskellOEIS | test/PowerDivisibility/A019554Spec.hs | apache-2.0 | 377 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
module External.A053797Spec (main, spec) where
import Test.Hspec
import External.A053797 (a053797)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A053797" $
it "correctly computes the first 20 elements" $
take 20 (map a053797 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,1,1,1,1,2,2,1,1,1,2,3,1,1,1,1,2,1,1]
| peterokagey/haskellOEIS | test/External/A053797Spec.hs | apache-2.0 | 351 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
import System.Environment (getArgs)
import Control.Monad
import BridgeBuddy.Cards
import BridgeBuddy.OpeningBids
import BridgeBuddy.OpeningResponses
main :: IO ()
main = do
args <- getArgs
let num_decks =
case args of
[] -> 1
(x:_) -> (read x) :: Int
forM_ [1 .. num_decks] $ \_ -> do
printDeck
printDeck :: IO ()
printDeck = do
deck <- shuffleDeck fullDeck
let table = dealHands deck
case keepFindingBiddableHand table of
Just tbl -> do
case getOpeningResponse tbl of
Just _ -> do
printTable "HAS OPENING RESPONSE" tbl
Nothing ->
printTable "NO OPENING RESPONSE" tbl
Nothing -> do
printTable "TABLE PASSED OUT" table
printTable :: String -> TableHands -> IO ()
printTable header hands = do
putStrLn $ "-------" ++ header
mapM_ (\(s, f) -> printHand s (f hands)) players
where players = [ ("North", north),
("East", east),
("South", south),
("West", west)]
printHand :: String -> Hand -> IO ()
printHand position hand = do
putStrLn position
putStr $ show hand
putStrLn $ "HCP: " ++ show (hcp hand)
putStrLn $ "Playing Tricks: " ++ show (playingTricks hand)
putStrLn $ "Balanced: " ++ show (isBalanced hand)
let (bid, reasons) = openingBid hand
putStrLn $ "Bid: " ++ show bid
putStrLn $ "Reasons: " ++ show reasons
| derekmcloughlin/BridgeBuddyServer | bridge-buddy/Main.hs | apache-2.0 | 1,524 | 0 | 17 | 510 | 494 | 242 | 252 | 45 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
module Language.K3.Driver.Service where
import Control.Arrow ( (&&&), second )
import Control.Concurrent
import Control.Concurrent.Async ( Async, asyncThreadId, cancel )
import Control.Exception ( IOException, ErrorCall(..), PatternMatchFail(..) )
import Control.Monad
import Control.Monad.Catch ( Handler(..), throwM, catches, finally )
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Except
import Control.Monad.Trans.State.Strict
import Data.Binary ( Binary )
import Data.Serialize ( Serialize )
import Data.ByteString.Char8 ( ByteString )
import qualified Data.Serialize as SC
import qualified Data.ByteString.Char8 as BC
import Data.Time.Format
import Data.Time.Clock
import Data.Time.Clock.POSIX ( getPOSIXTime )
import Data.Time.LocalTime
import Data.Monoid
import Data.Ord
import Data.Function
import Data.List
import Data.List.Split
import Data.Map ( Map )
import Data.Set ( Set )
import qualified Data.Heap as Heap
import qualified Data.Map as Map
import qualified Data.Set as Set
import Criterion.Types ( Measured(..) )
import Criterion.Measurement ( initializeTime, getTime, secs )
import GHC.Conc
import GHC.Generics ( Generic )
import System.Random
import System.ZMQ4.Monadic
import System.IO ( Handle, stdout, stderr )
import qualified System.Log.Logger as Log
import qualified System.Log.Formatter as LogF
import qualified System.Log.Handler as LogH
import qualified System.Log.Handler.Simple as LogHS
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Utils
import qualified Language.K3.Core.Constructor.Declaration as DC
import qualified Language.K3.Codegen.CPP.Materialization.Inference as MatI
import Language.K3.Parser ( parseK3 )
import Language.K3.Stages ( TransformReport(..) )
import qualified Language.K3.Stages as ST
import Language.K3.Driver.Common
import Language.K3.Driver.Options
import Language.K3.Driver.Driver hiding ( liftEitherM, reasonM )
import qualified Language.K3.Driver.CompilerTarget.CPP as CPPC
import Language.K3.Utils.Pretty
-- | Compiler service protocol, supporting compile tasks (Program*, Block* messages)
-- and worker tracking (Register*, Heartbeat*)
data CProtocol = Register String
| RegisterAck CompileOptions
| Heartbeat Integer
| HeartbeatAck Integer
| Program Request String DistributedCompileOptions
| ProgramDone Request (K3 Declaration) String
| ProgramAborted Request String
-- Round 1: distributed K3 optimization
| R1Block ProgramID BlockCompileSpec CompileBlockU (K3 Declaration)
| R1BlockDone WorkerID ProgramID CompileBlockD TransformReport
| R1BlockAborted WorkerID ProgramID [BlockID] String
-- Round 2: distributed materialization
| R2Block ProgramID BlockCompileSpec CompileBlockU (K3 Declaration)
| R2BlockDone WorkerID ProgramID CompileBlockD TransformReport
| R2BlockAborted WorkerID ProgramID [BlockID] String
| Query ServiceQuery
| QueryResponse ServiceResponse
| Quit
deriving (Eq, Read, Show, Generic)
instance Binary CProtocol
instance Serialize CProtocol
-- | Service thread state, encapsulating task workers and connection workers.
data ThreadState = ThreadState { sthread :: Maybe (Async ())
, ttworkers :: Set (Async ()) }
deriving ( Eq )
-- | Primitive service datatypes.
type Request = String
type RequestID = Int
type SocketID = ByteString
type WorkerID = String
type DeclID = Int
type BlockID = Int
type ProgramID = Int
-- | Block compilation datatypes.
data BlockCompileSpec = BlockCompileSpec { bprepStages :: CompileStages
, bcompileStages :: CompileStages
, wSymForkFactor :: Int
, wSymOffset :: Int }
deriving (Eq, Read, Show, Generic)
type CompileBlock a = [(BlockID, [(DeclID, a)])]
type CompileBlockU = CompileBlock UID
type CompileBlockD = CompileBlock (K3 Declaration)
instance Binary BlockCompileSpec
instance Serialize BlockCompileSpec
data JobRound = JobRound { jpending :: BlockSet
, jcompleted :: BlockSourceMap
, jaborted :: [(WorkerID, String)]}
deriving (Eq, Read, Show)
-- | Compilation progress state per program.
data JobState = JobState { jrid :: RequestID
, jrq :: Request
, jprofile :: JobProfile
, jround1 :: JobRound
, jround2 :: JobRound
, jjobOpts :: DistributedCompileOptions }
deriving (Eq, Read, Show)
-- | Profiling information per worker and block.
data JobProfile = JobProfile { jstartTime :: Double
, jendTimes :: Map WorkerID Double
, jworkerst :: Map WorkerID WorkerJobState
, jppreport :: TransformReport
, jreports :: [TransformReport] }
deriving (Eq, Read, Show)
-- | Cost book-keeping per worker.
-- This is a snapshot of the worker's total weight (across all jobs),
-- the per-block contributions from the current job during assignment,
-- and a block completion counter per worker.
-- We use this information to validate the cost model accuracy.
type JobBlockCosts = Map BlockID Double
data WorkerJobState = WorkerJobState { jwwsnap :: Double
, jwassign :: JobBlockCosts
, jwcomplete :: Int }
deriving (Eq, Read, Show)
-- | Assignment book-keeping for a single service worker.
data WorkerAssignment = WorkerAssignment { wablocks :: BlockSet
, waweight :: Double }
deriving (Eq, Read, Show)
type BlockSet = Set BlockID
type BlockSourceMap = Map BlockID [(DeclID, K3 Declaration)]
type JobsMap = Map ProgramID JobState
type AssignmentsMap = Map WorkerID WorkerAssignment
type WorkersMap = Map WorkerID SocketID
type RequestSet = Set RequestID
type ClientMap = Map SocketID RequestSet
type RequestMap = Map RequestID SocketID
-- | Common state for the compiler service.
data ServiceState a = ServiceState { sterminate :: MVar ()
, threadSt :: ThreadState
, stcompileOpts :: CompileOptions
, compileSt :: a }
-- | Compiler service master state.
data SMST = SMST { cworkers :: WorkersMap
, cclients :: ClientMap
, crequests :: RequestMap
, bcnt :: Int
, pcnt :: Int
, rcnt :: Int
, jobs :: JobsMap
, assignments :: AssignmentsMap }
type ServiceMState = ServiceState SMST
-- | Compiler service worker state. Note: workers are stateless.
data SWST = SWST { hbcnt :: Integer }
type ServiceWState = ServiceState SWST
-- | Mutable service state variable.
type ServiceSTVar a = MVar a
type ServiceST a = MVar (ServiceState a)
type ServiceMSTVar = ServiceST SMST
type ServiceWSTVar = ServiceST SWST
-- | A K3 compiler service monad.
type ServiceM z a = ExceptT String (StateT (ServiceSTVar a) (ZMQ z))
-- | Type definitions for service master and worker monads.
type ServiceMM z = ServiceM z ServiceMState
type ServiceWM z = ServiceM z ServiceWState
type SocketAction z = Int -> Socket z Dealer -> ZMQ z ()
type ClientHandler t = forall z. Socket z t -> ZMQ z ()
type MessageHandler = forall z. CProtocol -> ZMQ z ()
-- | Service querying datatypes.
data SJobStatus = SJobStatus { jobPending :: BlockSet, jobCompleted :: BlockSet }
deriving (Eq, Read, Show, Generic)
data SWorkerStatus = SWorkerStatus { wNumBlocks :: Int, wWeight :: Double }
deriving (Eq, Read, Show, Generic)
data ServiceQuery = SQJobStatus [ProgramID]
| SQWorkerStatus [WorkerID]
deriving (Eq, Read, Show, Generic)
data ServiceResponse = SRJobStatus (Map ProgramID SJobStatus)
| SRWorkerStatus (Map WorkerID SWorkerStatus)
deriving (Eq, Read, Show, Generic)
instance Binary SJobStatus
instance Binary SWorkerStatus
instance Binary ServiceQuery
instance Binary ServiceResponse
instance Serialize SJobStatus
instance Serialize SWorkerStatus
instance Serialize ServiceQuery
instance Serialize ServiceResponse
{- Initial states -}
sthread0 :: ThreadState
sthread0 = ThreadState Nothing Set.empty
sst0 :: MVar () -> CompileOptions -> a -> ServiceState a
sst0 trmv cOpts st = ServiceState trmv sthread0 cOpts st
sstm0 :: MVar () -> CompileOptions -> ServiceMState
sstm0 trmv cOpts = sst0 trmv cOpts $ SMST Map.empty Map.empty Map.empty 0 0 0 Map.empty Map.empty
svm0 :: CompileOptions -> IO ServiceMSTVar
svm0 cOpts = newEmptyMVar >>= \trmv0 -> newMVar $ sstm0 trmv0 cOpts
sstw0 :: MVar () -> CompileOptions -> ServiceWState
sstw0 trmv cOpts = sst0 trmv cOpts $ SWST 0
svw0 :: CompileOptions -> IO ServiceWSTVar
svw0 cOpts = newEmptyMVar >>= \trmv0 -> newMVar $ sstw0 trmv0 cOpts
{- Service monad utilities -}
runServiceM :: ServiceSTVar a -> (forall z. ServiceM z a b) -> IO (Either String b)
runServiceM st m = runZMQ $ evalStateT (runExceptT m) st
runServiceM_ :: ServiceSTVar a -> (forall z. ServiceM z a b) -> IO ()
runServiceM_ st m = runServiceM st m >>= either putStrLn (const $ return ())
runServiceZ :: ServiceSTVar a -> ServiceM z a b -> ZMQ z (Either String b)
runServiceZ st m = evalStateT (runExceptT m) st
liftZ :: ZMQ z a -> ServiceM z b a
liftZ = lift . lift
liftI :: IO b -> ServiceM z a b
liftI = lift . liftIO
liftIE :: IO (Either String b) -> ServiceM z a b
liftIE m = ExceptT $ liftIO m
liftEitherM :: Either String b -> ServiceM z a b
liftEitherM e = liftIE $ return e
reasonM :: String -> ServiceM z a b -> ServiceM z a b
reasonM msg m = withExceptT (msg ++) m
modifyM :: (a -> (a, b)) -> ServiceM z a b
modifyM f = getV >>= \sv -> liftI (modifyMVar sv (return . f))
modifyM_ :: (a -> a) -> ServiceM z a ()
modifyM_ f = getV >>= \sv -> liftI (modifyMVar_ sv (return . f))
getV :: ServiceM z a (ServiceSTVar a)
getV = lift get
getSt :: ServiceM z a a
getSt = getV >>= liftIO . readMVar
{- Thread and async task management -}
getTS :: ServiceM z (ServiceState a) ThreadState
getTS = getSt >>= return . threadSt
modifyTS :: (ThreadState -> (ThreadState, b)) -> ServiceM z (ServiceState a) b
modifyTS f = modifyM $ \st -> let (nt,r) = f $ threadSt st in (st {threadSt = nt}, r)
modifyTS_ :: (ThreadState -> ThreadState) -> ServiceM z (ServiceState a) ()
modifyTS_ f = modifyM_ $ \st -> st { threadSt = f $ threadSt st }
getTSIO :: ServiceST a -> IO ThreadState
getTSIO v = readMVar v >>= return . threadSt
modifyTSIO_ :: ServiceST a -> (ThreadState -> ThreadState) -> IO ()
modifyTSIO_ v f = modifyMVar_ v $ \st -> return $ st { threadSt = f $ threadSt st }
{- Compile options accessors -}
getCO :: ServiceM z (ServiceState a) CompileOptions
getCO = getSt >>= return . stcompileOpts
putCO :: CompileOptions -> ServiceM z (ServiceState a) ()
putCO cOpts = modifyM_ $ \st -> st { stcompileOpts = cOpts }
modifyCO_ :: (CompileOptions -> CompileOptions) -> ServiceM z (ServiceState a) ()
modifyCO_ f = modifyM_ $ \st -> st { stcompileOpts = f $ stcompileOpts st }
{- Service master accessors -}
modifyMM :: (SMST -> (SMST, a)) -> ServiceMM z a
modifyMM f = modifyM $ \st -> let (ncst, r) = f $ compileSt st in (st { compileSt = ncst }, r)
modifyMM_ :: (SMST -> SMST) -> ServiceMM z ()
modifyMM_ f = modifyM_ $ \st -> st { compileSt = f $ compileSt st }
modifyMIO :: ServiceMSTVar -> (SMST -> (SMST, a)) -> IO a
modifyMIO sv f = modifyMVar sv $ \st -> let (ncst, r) = f $ compileSt st in return (st { compileSt = ncst }, r)
getM :: ServiceMM z SMST
getM = getSt >>= return . compileSt
getMIO :: ServiceMSTVar -> IO SMST
getMIO sv = liftIO (readMVar sv) >>= return . compileSt
putM :: SMST -> ServiceMM z ()
putM ncst = modifyM_ $ \st -> st { compileSt = ncst }
{- Counter accessors -}
progIDM :: ServiceMM z ProgramID
progIDM = modifyMM $ \cst -> let i = pcnt cst + 1 in (cst {pcnt = i}, i)
blockIDM :: ServiceMM z BlockID
blockIDM = modifyMM $ \cst -> let i = bcnt cst + 1 in (cst {bcnt = i}, i)
requestIDM :: ServiceMM z BlockID
requestIDM = modifyMM $ \cst -> let i = rcnt cst + 1 in (cst {rcnt = i}, i)
{- Assignments accessors -}
getMA :: ServiceMM z AssignmentsMap
getMA = getM >>= return . assignments
putMA :: AssignmentsMap -> ServiceMM z ()
putMA nassigns = modifyMM_ $ \cst -> cst { assignments = nassigns }
modifyMA :: (AssignmentsMap -> (AssignmentsMap, a)) -> ServiceMM z a
modifyMA f = modifyMM $ \cst -> let (nassigns, r) = f $ assignments cst in (cst { assignments = nassigns }, r)
modifyMA_ :: (AssignmentsMap -> AssignmentsMap) -> ServiceMM z ()
modifyMA_ f = modifyMM_ $ \cst -> cst { assignments = f $ assignments cst }
mapMA :: (WorkerID -> WorkerAssignment -> a) -> ServiceMM z (Map WorkerID a)
mapMA f = getMA >>= return . Map.mapWithKey f
foldMA :: (a -> WorkerID -> WorkerAssignment -> a) -> a -> ServiceMM z a
foldMA f z = getMA >>= return . Map.foldlWithKey f z
{- Job state accessors -}
getMJ :: ServiceMM z JobsMap
getMJ = getM >>= return . jobs
putMJ :: JobsMap -> ServiceMM z ()
putMJ njobs = modifyMM_ $ \cst -> cst { jobs = njobs }
modifyMJ :: (JobsMap -> (JobsMap, a)) -> ServiceMM z a
modifyMJ f = modifyMM $ \cst -> let (njobs, r) = f $ jobs cst in (cst { jobs = njobs }, r)
modifyMJ_ :: (JobsMap -> JobsMap) -> ServiceMM z ()
modifyMJ_ f = modifyMM_ $ \cst -> cst { jobs = f $ jobs cst }
{- Service request, client and worker accessors -}
getMC :: SocketID -> ServiceMM z (Maybe RequestSet)
getMC c = getM >>= return . Map.lookup c . cclients
putMC :: SocketID -> RequestSet -> ServiceMM z ()
putMC c r = modifyMM_ $ \cst -> cst { cclients = Map.insert c r $ cclients cst }
modifyMC :: (ClientMap -> (ClientMap, a)) -> ServiceMM z a
modifyMC f = modifyMM $ \cst -> let (ncc, r) = f $ cclients cst in (cst {cclients = ncc}, r)
modifyMC_ :: (ClientMap -> ClientMap) -> ServiceMM z ()
modifyMC_ f = modifyMM_ $ \cst -> cst {cclients = f $ cclients cst}
getMR :: RequestID -> ServiceMM z (Maybe SocketID)
getMR r = getM >>= return . Map.lookup r . crequests
putMR :: RequestID -> SocketID -> ServiceMM z ()
putMR r c = modifyMM_ $ \cst -> cst { crequests = Map.insert r c $ crequests cst }
modifyMR :: (RequestMap -> (RequestMap, a)) -> ServiceMM z a
modifyMR f = modifyMM $ \cst -> let (nrq, r) = f $ crequests cst in (cst {crequests = nrq}, r)
modifyMR_ :: (RequestMap -> RequestMap) -> ServiceMM z ()
modifyMR_ f = modifyMM_ $ \cst -> cst {crequests = f $ crequests cst}
getMW :: ServiceMM z WorkersMap
getMW = getM >>= return . cworkers
getMWIO :: ServiceMSTVar -> IO WorkersMap
getMWIO sv = getMIO sv >>= return . cworkers
getMWI :: WorkerID -> ServiceMM z (Maybe SocketID)
getMWI wid = getM >>= return . Map.lookup wid . cworkers
putMWI :: WorkerID -> SocketID -> ServiceMM z ()
putMWI wid s = modifyMM_ $ \cst -> cst { cworkers = Map.insert wid s $ cworkers cst }
{- Worker state accessors -}
modifyWM :: (SWST -> (SWST, a)) -> ServiceWM z a
modifyWM f = modifyM $ \st -> let (ncst, r) = f $ compileSt st in (st { compileSt = ncst }, r)
modifyWM_ :: (SWST -> SWST) -> ServiceWM z ()
modifyWM_ f = modifyM_ $ \st -> st { compileSt = f $ compileSt st }
modifyWIO :: ServiceWSTVar -> (SWST -> (SWST, a)) -> IO a
modifyWIO sv f = modifyMVar sv $ \st -> let (ncst, r) = f $ compileSt st in return (st { compileSt = ncst }, r)
getW :: ServiceWM z SWST
getW = getSt >>= return . compileSt
putW :: SWST -> ServiceWM z ()
putW ncst = modifyM_ $ \st -> st { compileSt = ncst }
heartbeatM :: ServiceWM z Integer
heartbeatM = modifyWM $ \cst -> let i = hbcnt cst + 1 in (cst {hbcnt = i}, i)
heartbeatIO :: ServiceWSTVar -> IO Integer
heartbeatIO sv = modifyWIO sv $ \cst -> let i = hbcnt cst + 1 in (cst {hbcnt = i}, i)
{- Misc -}
putStrLnM :: String -> ServiceM z a ()
putStrLnM = liftIO . putStrLn
-- From ZHelpers.hs
-- In General Since We use Randomness, You should Pass in
-- an StdGen, but for simplicity we just use newStdGen
setRandomIdentity :: Socket z t -> ZMQ z ()
setRandomIdentity sock = do
ident <- liftIO genUniqueId
setIdentity (restrict $ BC.pack ident) sock
-- From ZHelpers.hs
-- You probably want to use a ext lib to generate random unique id in production code
genUniqueId :: IO String
genUniqueId = do
gen <- liftIO newStdGen
let (val1, gen') = randomR (0 :: Int, 65536) gen
let (val2, _) = randomR (0 :: Int, 65536) gen'
return $ show val1 ++ show val2
tcpConnStr :: ServiceOptions -> String
tcpConnStr sOpts = "tcp://" ++ (serviceHost sOpts) ++ ":" ++ (show $ servicePort sOpts)
logFormat :: String -> String -> LogF.LogFormatter a
logFormat timeFormat format h (prio, msg) loggername = LogF.varFormatter vars format h (prio,msg) loggername
where vars = [("time", formatTime defaultTimeLocale timeFormat <$> getZonedTime)
,("utcTime", formatTime defaultTimeLocale timeFormat <$> getCurrentTime)
,("rprio", rprio)
]
rprio = return $ replicate rpad ' ' ++ sprio
sprio = show prio
rpad = length "EMERGENCY" - length sprio
streamLogging :: Handle -> Log.Priority -> IO ()
streamLogging hndl lvl = do
lh <- LogHS.streamHandler hndl lvl
h <- return $ LogH.setFormatter lh (logFormat "%s.%q" "[$time $rprio] $msg")
Log.updateGlobalLogger Log.rootLoggerName (Log.setLevel lvl . Log.setHandlers [h])
fileLogging :: FilePath -> Log.Priority -> IO ()
fileLogging path lvl = do
lh <- LogHS.fileHandler path lvl
h <- return $ LogH.setFormatter lh (logFormat "%s.%q" "[$time $rprio] $msg")
Log.updateGlobalLogger Log.rootLoggerName (Log.setLevel lvl . Log.setHandlers [h])
logN :: String
logN = "Language.K3.Driver.Service"
{- Logging helpers. Bypass L.K3.Utils.Logger to avoid need for -DDEBUG -}
debugM :: (Monad m, MonadIO m) => String -> m ()
debugM = liftIO . Log.debugM logN
infoM :: (Monad m, MonadIO m) => String -> m ()
infoM = liftIO . Log.infoM logN
noticeM :: (Monad m, MonadIO m) => String -> m ()
noticeM = liftIO . Log.noticeM logN
warningM :: (Monad m, MonadIO m) => String -> m ()
warningM = liftIO . Log.warningM logN
errorM :: (Monad m, MonadIO m) => String -> m ()
errorM = liftIO . Log.errorM logN
cshow :: CProtocol -> String
cshow = \case
Register wid -> "Register " ++ wid
RegisterAck _ -> "RegisterAck"
Heartbeat hbid -> "Heartbeat " ++ show hbid
HeartbeatAck hbid -> "HeartbeatAck " ++ show hbid
Program rq _ _ -> "Program " ++ rq
ProgramDone rq _ _ -> "ProgramDone " ++ rq
ProgramAborted rq _ -> "ProgramAborted " ++ rq
R1Block pid _ bids _ -> unwords ["R1Block", show pid, intercalate "," $ map (show . fst) bids]
R1BlockDone wid pid bids _ -> unwords ["R1BlockDone", show wid, show pid, intercalate "," $ map (show . fst) bids]
R1BlockAborted wid pid bids _ -> unwords ["R1BlockAborted", show wid, show pid, intercalate "," $ map show bids]
R2Block pid _ bids _ -> unwords ["R2Block", show pid, intercalate "," $ map (show . fst) bids]
R2BlockDone wid pid bids _ -> unwords ["R2BlockDone", show wid, show pid, intercalate "," $ map (show . fst) bids]
R2BlockAborted wid pid bids _ -> unwords ["R2BlockAborted", show wid, show pid, intercalate "," $ map show bids]
Query sq -> unwords ["Query", show sq]
QueryResponse sr -> unwords ["QueryResponse", show sr]
Quit -> "Quit"
-- | Service utilities.
initService :: ServiceOptions -> IO () -> IO ()
initService sOpts m = initializeTime >> slog (serviceLog sOpts) >> m
where slog (Left "stdout") = streamLogging stdout $ serviceLogLevel sOpts
slog (Left "stderr") = streamLogging stderr $ serviceLogLevel sOpts
slog (Left s) = error $ "Invalid service logging handle " ++ s
slog (Right path) = fileLogging path $ serviceLogLevel sOpts
-- | Compiler service master.
runServiceMaster :: ServiceOptions -> Options -> IO ()
runServiceMaster sOpts opts = initService sOpts $ runZMQ $ do
let bqid = "mbackend"
sv <- liftIO $ svm0 (scompileOpts sOpts)
frontend <- socket Router
bind frontend mconn
backend <- workqueue sv nworkers bqid Nothing $ processMasterConn sOpts opts sv
as <- async $ proxy frontend backend Nothing
noticeM $ unwords ["Service Master", show $ asyncThreadId as, mconn]
flip finally (stopService sv bqid) $ liftIO $ do
modifyTSIO_ sv $ \tst -> tst { sthread = Just as }
wait sv
where
mconn = tcpConnStr sOpts
nworkers = serviceThreads sOpts
stopService :: ServiceMSTVar -> String -> ZMQ z ()
stopService sv qid = do
wsock <- socket Dealer
connect wsock $ "inproc://" ++ qid
shutdownRemote sv wsock
liftIO $ threadDelay $ 5 * 1000 * 1000
-- | Compiler service worker.
runServiceWorker :: ServiceOptions -> IO ()
runServiceWorker sOpts@(serviceId -> wid) = initService sOpts $ runZMQ $ do
let bqid = "wbackend"
sv <- liftIO $ svw0 (scompileOpts sOpts)
frontend <- socket Dealer
setRandomIdentity frontend
connect frontend wconn
backend <- workqueue sv nworkers bqid (Just registerWorker) $ processWorkerConn sOpts sv
as <- async $ proxy frontend backend Nothing
noticeM $ unwords ["Service Worker", wid, show $ asyncThreadId as, wconn]
void $ async $ heartbeatLoop sv
liftIO $ do
modifyTSIO_ sv $ \tst -> tst { sthread = Just as }
wait sv
where
wconn = tcpConnStr sOpts
nworkers = serviceThreads sOpts
registerWorker :: Int -> Socket z Dealer -> ZMQ z ()
registerWorker wtid wsock
| wtid == 1 = do
noticeM $ unwords ["Worker", wid, "registering"]
sendC wsock $ Register wid
| otherwise = return ()
heartbeatLoop :: ServiceWSTVar -> ZMQ z ()
heartbeatLoop sv = do
hbsock <- socket Dealer
setRandomIdentity hbsock
connect hbsock wconn
noticeM $ unwords ["Heartbeat loop started with period", show heartbeatPeriod, "us."]
void $ forever $ do
liftIO $ threadDelay heartbeatPeriod
pingPongHeartbeat sv hbsock
close hbsock
pingPongHeartbeat :: ServiceWSTVar -> Socket z Dealer -> ZMQ z ()
pingPongHeartbeat sv hbsock = do
hbid <- liftIO $ heartbeatIO sv
sendC hbsock $ Heartbeat hbid
[evts] <- poll heartbeatPollPeriod [Sock hbsock [In] Nothing]
if In `elem` evts then do
ackmsg <- receive hbsock
case SC.decode ackmsg of
Right (HeartbeatAck i) -> noticeM $ unwords ["Got a heartbeat ack", show i]
Right m -> errorM $ unwords ["Invalid heartbeat ack", cshow m]
Left err -> errorM err
else noticeM $ "No heartbeat acknowledgement received during the last epoch."
heartbeatPeriod = seconds $ sHeartbeatEpoch sOpts
heartbeatPollPeriod = fromIntegral $ sHeartbeatEpoch sOpts * 1000
seconds x = x * 1000000
runClient :: (SocketType t) => t -> ServiceOptions -> ClientHandler t -> IO ()
runClient sockT sOpts clientF = initService sOpts $ runZMQ $ do
client <- socket sockT
setRandomIdentity client
connect client $ tcpConnStr sOpts
clientF client
workqueue :: ServiceST a -> Int -> String -> Maybe (SocketAction z) -> SocketAction z -> ZMQ z (Socket z Dealer)
workqueue sv n qid initFOpt workerF = do
backend <- socket Dealer
bind backend $ "inproc://" ++ qid
as <- forM [1..n] $ \i -> async $ worker i qid initFOpt workerF
liftIO $ modifyTSIO_ sv $ \tst -> tst { ttworkers = Set.fromList as `Set.union` ttworkers tst }
return backend
worker :: Int -> String -> Maybe (SocketAction z) -> SocketAction z -> ZMQ z ()
worker i qid initFOpt workerF = do
wsock <- socket Dealer
connect wsock $ "inproc://" ++ qid
noticeM "Worker started"
void $ maybe (return ()) (\f -> f i wsock) initFOpt
forever $ workerF i wsock
-- | Control primitives.
wait :: ServiceST a -> IO ()
wait sv = do
st <- readMVar sv
readMVar $ sterminate st
terminate :: ServiceST a -> IO ()
terminate sv = do
st <- readMVar sv
void $ tryPutMVar (sterminate st) ()
threadstatus :: ServiceST a -> IO ()
threadstatus sv = do
tst <- getTSIO sv
tids <- mapM (return . asyncThreadId) $ (maybe [] (:[]) $ sthread tst) ++ (Set.toList $ ttworkers tst)
thsl <- mapM (\t -> threadStatus t >>= \s -> return (unwords [show t, ":", show s])) tids
noticeM $ concat thsl
shutdown :: ServiceST a -> IO ()
shutdown sv = do
tst <- getTSIO sv
tid <- myThreadId
mapM_ (\a -> unless (tid == asyncThreadId a) $ cancel a) $ Set.toList $ ttworkers $ tst
-- | Distributed control primitives.
shutdownRemote :: (SocketType t, Sender t) => ServiceMSTVar -> Socket z t -> ZMQ z ()
shutdownRemote sv master = do
wm <- liftIO $ getMWIO sv
forM_ (Map.elems wm) $ \wsid -> do
noticeM $ "Shutting down service worker " ++ (show $ BC.unpack wsid)
sendCI wsid master Quit
-- | Messaging primitives.
sendC :: (SocketType t, Sender t) => Socket z t -> CProtocol -> ZMQ z ()
sendC s m = send s [] $ SC.encode m
sendCI :: (SocketType t, Sender t) => SocketID -> Socket z t -> CProtocol -> ZMQ z ()
sendCI sid s m = send s [SendMore] sid >> sendC s m
sendCIs :: (SocketType t, Sender t) => Socket z t -> [(SocketID, CProtocol)] -> ZMQ z ()
sendCIs s msgs = forM_ msgs $ \(sid,m) -> sendCI sid s m
-- | Client primitives.
command :: (SocketType t, Sender t) => t -> ServiceOptions -> CProtocol -> IO ()
command t sOpts msg = runClient t sOpts $ \client -> sendC client msg
requestreply :: (SocketType t, Receiver t, Sender t) => t -> ServiceOptions -> CProtocol -> MessageHandler -> IO ()
requestreply t sOpts req replyF = runClient t sOpts $ \client -> do
sendC client req
rep <- receive client
either errorM replyF $ SC.decode rep
-- | Compiler service protocol handlers.
processMasterConn :: ServiceOptions -> Options -> ServiceMSTVar -> Int -> Socket z Dealer -> ZMQ z ()
processMasterConn (serviceId -> msid) opts sv wtid mworker = do
sid <- receive mworker
msg <- receive mworker
logmHandler sid msg
where
mPfxM = "[" ++ msid ++ " " ++ show wtid ++ "] "
mlogM :: (Monad m, MonadIO m) => String -> m ()
mlogM msg = noticeM $ mPfxM ++ msg
merrM :: (Monad m, MonadIO m) => String -> m ()
merrM msg = errorM $ mPfxM ++ msg
zm :: ServiceMM z a -> ZMQ z a
zm m = runServiceZ sv m >>= either (throwM . userError) return
mkReport n profs = TransformReport (Map.singleton n profs) Map.empty
logmHandler sid (SC.decode -> msgE) = either merrM (\msg -> mlogM (cshow msg) >> mHandler sid msg) msgE
mHandler sid (Program rq prog jobOpts) = do
rid' <- zm $ do
rid <- requestIDM
putMC sid (Set.singleton rid) >> putMR rid sid >> return rid
process prog jobOpts rq rid'
mHandler _ (R1BlockDone wid pid blocksByBID report) = completeBlocks completeRound1 wid pid blocksByBID report
mHandler _ (R1BlockAborted wid pid bids reason) = abortBlocks wid pid bids reason
mHandler _ (R2BlockDone wid pid blocksByBID report) = completeBlocks completeProgram wid pid blocksByBID report
mHandler _ (R2BlockAborted wid pid bids reason) = abortBlocks wid pid bids reason
mHandler sid (Register wid) = do
mlogM $ unwords ["Registering worker", wid]
cOpts <- zm $ putMWI wid sid >> getCO
sendCI sid mworker $ RegisterAck cOpts
-- TODO: detect worker changes.
mHandler sid (Heartbeat hbid) = sendCI sid mworker $ HeartbeatAck hbid
-- Query processing.
mHandler sid (Query sq) = processQuery sid sq
-- Service shutdown.
mHandler _ Quit = liftIO $ terminate sv
mHandler _ m = merrM $ boxToString $ ["Invalid message:"] %$ [show m]
-- | Compilation functions
nfP = noFeed $ input opts
includesP = (includes $ paths opts)
abortcatch rid rq m = m `catches`
[Handler (\(e :: IOException) -> abortProgram Nothing rid rq $ show e),
Handler (\(e :: PatternMatchFail) -> abortProgram Nothing rid rq $ show e),
Handler (\(e :: ErrorCall) -> abortProgram Nothing rid rq $ show e)]
process prog jobOpts rq rid = abortcatch rid rq $ do
void $ zm $ do
mlogM $ unwords ["Processing program", rq, "(", show rid, ")"]
round1 prog jobOpts rq rid
adjustProfile rep js@(jprofile -> jprof@(jppreport -> jpp)) =
js {jprofile = jprof {jppreport = jpp `mappend` rep}}
-- | Parse, evaluate metaprogram, and apply prepare transform.
preprocess prog = do
mlogM $ "Parsing with paths " ++ show includesP
(pP, pProf) <- reasonM parseError . liftMeasured $ parseK3 nfP includesP prog
(mP, mProf) <- liftMeasured $ runDriverM $ metaprogram opts pP
return (mP, [pProf, mProf])
-- | Compilation round 1: distribute blocks for optimization.
round1 prog jobOpts rq rid = do
(initP, ppProfs) <- preprocess prog
let ppRep = mkReport "Master R1 preprocessing" ppProfs
(pid, blocksByWID, wConfig) <- assignBlocks rid rq jobOpts initP $ Left ppRep
(_, sProf) <- ST.profile $ const $ do
msgs <- mkMessages bcStages wConfig blocksByWID $ \bcs cb -> R1Block pid bcs cb initP
liftZ $ sendCIs mworker msgs
let sRep = mkReport "Master R1 distribution" [sProf]
modifyMJ_ $ \jbs -> Map.adjust (adjustProfile sRep) pid jbs
where bcStages = (dcWorkerPrepStages wst, dcWorkerExecStages wst)
wst = case dcompileSpec jobOpts of
DistributedOpt r1 -> r1
DistributedOptMat r1 _ -> r1
liftMeasured :: IO (Either String a) -> ServiceMM z (a, Measured)
liftMeasured m = liftIE $ do
(rE, p) <- ST.profile $ const m
return $ either Left (\a -> Right (a,p)) rE
{------------------------
- Job assignment.
-----------------------}
-- | Cost-based compile block assignment.
assignBlocks rid rq jobOpts initP repE = do
pid <- progIDM
-- Get the current worker weights, and use them to partition the program.
((wConfig', wBlocks', nassigns', pending', wjs'), aProf) <- ST.profile $ const $ do
(wWeights, wConfig) <- workerWeightsAndConfig jobOpts
when ( Heap.null wWeights ) $ assignError pid
(nwWeights, wBlocks, jobCosts) <- partitionProgram wConfig wWeights initP
-- Compute assignment map delta.
-- Extract block ids per worker, and join with new weights per worker to compute
-- an assignment map with updated weights and new block sets.
let nwaBlockIds = Map.map (\cb -> Set.fromList $ map fst cb) wBlocks
let nwaWeights = foldl (\m (w,wid) -> Map.insert wid w m) Map.empty nwWeights
let nassigns = Map.intersectionWith WorkerAssignment nwaBlockIds nwaWeights
-- Compute new job state.
let pending = foldl (\acc cb -> acc `Set.union` (Set.fromList $ map fst cb)) Set.empty wBlocks
let wjs = Map.intersectionWith (\w c -> WorkerJobState w c $ Map.size c) nwaWeights jobCosts
return (wConfig, wBlocks, nassigns, pending, wjs)
let aRep = mkReport "Master assignment" [aProf]
js <- case repE of
Left ppRep -> js0 rid rq pending' wjs' jobOpts (ppRep <> aRep) []
Right (prof, ppRep) -> js0Prof rid rq pending' jobOpts ppRep prof
modifyMJ_ $ \jbs -> Map.insert pid js jbs
modifyMA_ $ \assigns -> Map.unionWith incrWorkerAssignments assigns nassigns'
logAssignment pid wConfig' nassigns' js
return (pid, wBlocks', wConfig')
wcBlock wid wConfig = maybe blockSizeErr (return . fst) $ Map.lookup wid wConfig
wcFactor wid wConfig = maybe factorErr (return . snd) $ Map.lookup wid wConfig
-- | Compute a min-heap of worker assignment weights, returning a zero-heap if no assignments exist.
-- We also pattern match against worker-specific metadata available in the job options.
workerWeightsAndConfig jobOpts = do
(weightHeap, configMap) <- flip foldMA wp0 accumWorker
if Heap.null weightHeap
then getMW >>= return . Map.foldlWithKey initWorker wp0
else return (weightHeap, configMap)
where wp0 = (Heap.empty, Map.empty)
initWorker (hacc, macc) wid _ =
(Heap.insert (0.0, wid) hacc,
Map.insert wid (workerBlock wid, workerAssignFactor wid) macc)
accumWorker (hacc, macc) wid assigns =
(Heap.insert (waweight assigns, wid) hacc,
Map.insert wid (workerBlock wid, workerAssignFactor wid) macc)
blockSize = defaultBlockSize jobOpts
workerBlock wid = Map.foldlWithKey (matchWorker wid) blockSize $ workerBlockSize jobOpts
workerAssignFactor wid = Map.foldlWithKey (matchWorker wid) 1 $ workerFactor jobOpts
matchWorker wid rv matchstr matchv = if matchstr `isInfixOf` wid then matchv else rv
-- | Cost-based program partitioning.
-- This performs assignments at a per-declaration granularity rather than per-block,
-- using a greedy heuristic to balance work across each worker (the k-partition problem).
-- This returns final worker weights, new compile block assignments, and the job costs per worker.
partitionProgram wConfig wWeights (tnc -> (DRole _, ch)) = do
(_, sCostAndCh) <- sortByCost ch
(nwWeights, newAssigns) <- greedyPartition wConfig wWeights sCostAndCh
(wcBlocks, wcosts) <- foldMapKeyM (return (Map.empty, Map.empty)) newAssigns $ chunkAssigns wConfig
return (nwWeights, Map.map reverse wcBlocks, wcosts)
partitionProgram _ _ _ = throwE "Top level declaration is not a role."
-- | A simple compilation cost model
sortByCost ch = do
(tc, cich) <- foldM foldCost (0,[]) $ zip [0..] ch
return (tc, sortOn fst cich)
foldCost (total, acc) (i, d@(cost -> c)) = do
uid <- liftEitherM $ uidOfD d
return (total + c, acc ++ [(c, (i, uid))])
cost (tag -> DGlobal _ _ (Just (treesize -> n))) = n
cost (tag -> DTrigger _ _ (treesize -> n)) = n
cost _ = 1
-- | Greedy solution to the weighted k-partitioning problem.
-- We maintain a list of heaps (buckets) to make an assignment, where each
-- bucket corresponds to a weight class.
-- We consider each bucket as a candidate and greedily pick the bucket which
-- leads to the smallest imbalance across assignments.
greedyPartition wConfig wWeights sCostAndCh = do
wWeightsByFactor <- foldM groupByFactor Map.empty wWeights
(_, rassigns) <- foldM greedy (wWeightsByFactor, Map.empty) sCostAndCh
let rheap = Heap.map (rebuildWeights rassigns) wWeights
return (rheap, rassigns)
where
rebuildWeights assigns (sz, wid) =
let ichwl = maybe [] id $ Map.lookup wid assigns
in (foldl (\rsz (_, w) -> rsz + fromIntegral w) sz ichwl, wid)
groupByFactor acc (sz, wid) = do
f <- wcFactor wid wConfig
return $ Map.insertWith Heap.union f (Heap.singleton (sz, wid)) acc
greedy (scaledHeap, assignsAndCosts) (w, ich) = do
let candidates = Map.mapWithKey candidateCost scaledHeap
(cf,cwid,_) <- maybe partitionError return $ Map.foldlWithKey pick Nothing candidates
let nscaledHeap = Map.mapWithKey (rebuildS cf) candidates
let nassigns = Map.insertWith (++) cwid [(ich, w)] assignsAndCosts
return (nscaledHeap, nassigns)
where
candidateCost f h = do
((sz, wid), h') <- Heap.uncons h
return (sz, sz + (fromIntegral w / fromIntegral f), wid, h')
pick rOpt _ Nothing = rOpt
pick Nothing f (Just (_, nsz, wid, _)) = Just (f, wid, nsz)
pick rOpt@(Just (_, _, rsz)) f (Just (_, nsz, wid, _)) = if rsz < nsz then rOpt else Just (f, wid, nsz)
rebuildS _ _ Nothing = Heap.empty
rebuildS cf f (Just (sz,nsz,wid,h)) = Heap.insert (if f == cf then nsz else sz, wid) h
-- Creates compile block chunks per worker, and sums up costs per chunk.
chunkAssigns wConfig accM wid wbwl = do
blockSize <- wcBlock wid wConfig
(wcbm,wcm) <- accM
biwsl <- forM (chunksOf blockSize wbwl) $ \bwl -> do
let (chunkcb, chunkcost) = second sum $ unzip bwl
bid <- blockIDM
return ((bid, sortOn fst chunkcb), (bid, fromIntegral chunkcost))
let (wcompileblock, wblockcosts) = second Map.fromList $ unzip biwsl
return $ ( Map.insertWith (++) wid wcompileblock wcbm
, Map.insertWith mergeBlockCosts wid wblockcosts wcm )
-- | Compile block messages construction.
mkMessages (prepStages, cStages) wConfig cBlocksByWID ctor = do
forkFactor <- foldMapKeyM (return 0) cBlocksByWID $ \m wid _ -> (+) <$> m <*> wcBlock wid wConfig
liftM fst $ foldMapKeyM (return ([], 0)) cBlocksByWID $ \m wid cb -> do
wDelta <- wcBlock wid wConfig
(msgacc, wOffset) <- m
let bcSpec = BlockCompileSpec prepStages cStages forkFactor wOffset
wsockid <- getMWI wid >>= maybe (workerError wid) return
return $ (msgacc ++ [(wsockid, ctor bcSpec cb)], wOffset + wDelta)
-- Map helpers to supply fold function as last argument.
foldMapKeyM a m f = Map.foldlWithKey f a m
-- | Merge block costs
mergeBlockCosts = Map.unionWith (+)
-- | Update worker assignments with a new weight and a delta blockset.
incrWorkerAssignments (WorkerAssignment oldbs _) (WorkerAssignment deltabs w) =
flip WorkerAssignment w $ oldbs `Set.union` deltabs
-- | Update worker assignments with a completed or aborted block.
decrWorkerAssignments bid dw (WorkerAssignment blocks weight) =
WorkerAssignment (Set.delete bid blocks) (weight - dw)
-- | Job state constructors.
jprof0 workerjs pprep wreps =
liftIO getTime >>= \start -> return $ JobProfile start Map.empty workerjs pprep wreps
jr0 = JobRound Set.empty Map.empty []
js0 rid rq pending workerjs jobOpts pprep wreps = do
prof <- jprof0 workerjs pprep wreps
return $ JobState rid rq prof (JobRound pending Map.empty []) jr0 jobOpts
js0Prof rid rq pending jobOpts pprep prof = do
return $ JobState rid rq (prof {jppreport = jppreport prof <> pprep}) (JobRound pending Map.empty []) jr0 jobOpts
logAssignment pid wConfig nassigns js =
let wk wid s = wid ++ ":" ++ s
wcstr (wid, (bs,f)) = wk wid $ unwords [" bs:", show bs, "af:", show f]
wastr (wid, WorkerAssignment b w) = (wk wid $ show $ length b, wk wid $ show w)
wststr (wid, jwassign -> jbc) = wk wid $ show $ foldl (+) 0.0 jbc
wconf = map wcstr $ Map.toList wConfig
(wlens, ww) = unzip $ map wastr $ Map.toList nassigns
wcontrib = map wststr $ Map.toList $ jworkerst $ jprofile $ js
in
mlogM $ boxToString $ ["Assignment for program: " ++ show pid]
%$ ["Worker config:"] %$ (indent 2 wconf)
%$ ["Worker weights:"] %$ (indent 2 ww)
%$ ["Block distribution:"] %$ (indent 2 wlens)
%$ ["Load distribution:"] %$ (indent 2 wcontrib)
{------------------------------
- Block completion handling.
-----------------------------}
-- | Block completion processing. This garbage collects jobs and assignment state.
completeBlocks completeF wid pid cblocksByBID report = do
time <- liftIO $ getTime
forM_ cblocksByBID $ \(bid, iblock) -> do
psOpt <- zm $ do
(r, bcontrib) <- modifyMJ $ \sjobs -> tryCompleteJS time wid pid bid iblock sjobs $ Map.lookup pid sjobs
modifyMA_ $ \assigns -> Map.adjust (decrWorkerAssignments bid bcontrib) wid assigns
return r
zm $ modifyMJ_ $ \sjobs -> Map.adjust (appendProfileReport report) pid sjobs
maybe (return ()) (completeF pid) psOpt
-- | Block abort processing. This aborts the given block ids, cleaning up state, but
-- does not affect any other in-flight blocks.
-- TODO: pre-emptively abort all other remaining blocks, and clean up the job state.
abortBlocks wid pid bids reason = do
time <- liftIO $ getTime
forM_ bids $ \bid -> do
psOpt <- zm $ do
(r, bcontrib) <- modifyMJ $ \sjobs -> tryAbortJS time wid pid bid reason sjobs $ Map.lookup pid sjobs
modifyMA_ $ \assigns -> Map.adjust (decrWorkerAssignments bid bcontrib) wid assigns
return r
maybe (return ()) (\(rid,rq,aborts) -> abortProgram (Just pid) rid rq $ formatAborts aborts) psOpt
tryCompleteJS time wid pid bid iblock sjobs jsOpt =
maybe (sjobs, (Nothing, 0.0)) id $ jsOpt >>= \js ->
let jsE = completeJobBlock time wid bid iblock js in
return $ either (completeJS pid sjobs) (incompleteJS pid sjobs) jsE
tryAbortJS time wid pid bid reason sjobs jsOpt =
maybe (sjobs, (Nothing, 0.0)) id $ jsOpt >>= \js ->
let jsE = abortJobBlock time wid bid reason js in
return $ either (completeJS pid sjobs) (incompleteJS pid sjobs) jsE
completeJS pid sjobs (result, contrib) = (Map.delete pid sjobs, (Just result, contrib))
incompleteJS pid sjobs (partials, contrib) = (Map.insert pid partials sjobs, (Nothing, contrib))
completeJobBlock time wid bid iblock js =
let ncomp = if null $ jaborted $ jround1 js
then Map.insertWith (++) bid iblock $ jcompleted $ jround1 js
else Map.empty
npend = Set.delete bid $ jpending $ jround1 js
(nprof, bcontrib) = updateProfile time wid bid $ jprofile js
nround1 = (jround1 js) { jpending = npend, jcompleted = ncomp }
in if Set.null npend
then Left $ ((jrid js, jrq js, jaborted $ jround1 js, nprof, ncomp, jjobOpts js), bcontrib)
else Right $ (js { jround1 = nround1, jprofile = nprof }, bcontrib)
abortJobBlock time wid bid reason js =
let npend = Set.delete bid $ jpending $ jround1 js
(nprof, bcontrib) = updateProfile time wid bid $ jprofile js
nround1 = (jround1 js) { jpending = npend, jaborted = (jaborted $ jround1 js) ++ [(wid, reason)] }
in if Set.null npend
then Left ((jrid js, jrq js, jaborted $ jround1 js), bcontrib)
else Right (js { jround1 = nround1, jprofile = nprof }, bcontrib)
-- | Profile maintenance.
updateProfile time wid bid jprof@(jendTimes &&& jworkerst -> (jends, jws)) =
let njws = Map.adjust (\wjs -> wjs { jwcomplete = jwcomplete wjs - 1 }) wid jws
njends = maybe jends completew $ Map.lookup wid njws
completew wjs = if jwcomplete wjs == 0 then Map.insert wid time jends else jends
bcontrib = maybe 0.0 id (Map.lookup wid njws >>= Map.lookup bid . jwassign)
in (jprof { jendTimes = njends, jworkerst = njws }, bcontrib)
appendProfileReport report js@(jprofile -> jprof) =
js { jprofile = jprof { jreports = (jreports jprof) ++ [report] } }
formatAborts l = boxToString $ flip concatMap l $ \(w,r) -> [unwords ["Worker", w]] ++ (indent 2 $ lines r)
{------------------------------
- Round 1 completion handling.
-----------------------------}
completeRound1 pid (rid, rq, aborts, profile, sources, jobOpts) = abortcatch rid rq $ do
case (aborts, dcompileSpec jobOpts) of
(h:t, _) -> abortProgram (Just pid) rid rq $ formatAborts $ h:t
([], DistributedOpt _) -> completeProgram pid (rid, rq, aborts, profile, sources, jobOpts)
([], DistributedOptMat r1Spec r2Spec) -> do
let prog = DC.role "__global" $ map snd $ sortOn fst $ concatMap snd $ Map.toAscList sources
finalizeRound1 rq rid pid prog jobOpts profile
(dcMasterFinalStages r1Spec)
(dcWorkerPrepStages r2Spec, dcWorkerExecStages r2Spec)
finalizeRound1 rq rid pid prog jobOpts profile mfinalStages wStages = do
mlogM $ unwords ["Completed request", show rid, "round 1", show pid]
void $ zm $ do
modifyMJ_ $ \sjobs -> Map.delete pid sjobs
round2 prog jobOpts rq rid profile mfinalStages wStages
round2 prog jobOpts rq rid profile mfinalStages wStages = do
((r2P, _), r2Prof) <- liftMeasured $ evalTransform Nothing mfinalStages prog
let cleanP = stripProperties $ stripTypeAndEffectAnns r2P
let ppRep = mkReport "Master R2 preprocessing" [r2Prof]
(pid, blocksByWID, wConfig) <- assignBlocks rid rq jobOpts cleanP $ Right (profile, ppRep)
(_, sProf) <- ST.profile $ const $ do
msgs <- mkMessages wStages wConfig blocksByWID $ \bcs cb -> R2Block pid bcs cb cleanP
liftZ $ sendCIs mworker msgs
let sRep = mkReport "Master R2 distribution" [sProf]
modifyMJ_ $ \jbs -> Map.adjust (adjustProfile sRep) pid jbs
{------------------------------
- Program completion handling.
-----------------------------}
-- | Program completion processing. This garbage collects client request state.
completeProgram pid (rid, rq, aborts, profile, sources, jobOpts) = abortcatch rid rq $ do
let prog = DC.role "__global" $ map snd $ sortOn fst $ concatMap snd $ Map.toAscList sources
let finalStages = case dcompileSpec jobOpts of
DistributedOpt r1 -> dcMasterFinalStages r1
DistributedOptMat _ r2 -> dcMasterFinalStages r2
(nprogrpE, fpProf) <- liftIO $ ST.profile $ const $ evalTransform Nothing finalStages prog
case (aborts, nprogrpE) of
(_, Left err) -> abortProgram (Just pid) rid rq err
(h:t, _) -> abortProgram (Just pid) rid rq $ formatAborts $ h:t
([], Right (nprog, _)) -> do
clOpt <- zm $ getMR rid
case clOpt of
Nothing -> zm $ requestError rid
Just cid -> let fpRep = TransformReport (Map.singleton "Master finalization" [fpProf]) Map.empty
in completeRequest pid cid rid rq nprog $ generateReport (reportSize jobOpts) profile fpRep
completeRequest pid cid rid rq prog report = do
mlogM $ unwords ["Completed program", show pid]
sockOpt <- zm $ do
modifyMJ_ $ \sjobs -> Map.delete pid sjobs
modifyMR_ $ \rm -> Map.delete rid rm
modifyMC $ \cm -> tryCompleteCL cid rid cm $ Map.lookup cid cm
maybe (return ()) (\sid -> sendCI sid mworker $ ProgramDone rq prog report) sockOpt
-- | Abort compilation.
abortProgram pidOpt rid rq reason = do
mlogM $ unwords ["Aborting program", rq, maybe "" show pidOpt, take 100 reason ++ "..."]
cid <- zm $ getMR rid >>= maybe (requestError rid) return
abortRequest pidOpt cid rid rq reason
abortRequest pidOpt cid rid rq reason = do
clOpt <- zm $ do
modifyMJ_ $ \sjobs -> maybe sjobs (flip Map.delete sjobs) pidOpt
modifyMR_ $ \rm -> Map.delete rid rm
modifyMC $ \cm -> tryCompleteCL cid rid cm $ Map.lookup cid cm
maybe (return ()) (\sid -> sendCI sid mworker $ ProgramAborted rq reason) clOpt
tryCompleteCL cid rid cm rOpt = maybe (cm, Nothing) (completeClient cid rid cm) rOpt
completeClient cid rid cm rs =
let nrs = Set.delete rid rs in
(if null nrs then Map.delete cid cm else Map.insert cid nrs cm, Just cid)
{------------------------
- Query processing.
-----------------------}
processQuery sid (SQJobStatus qJobs) = do
jstm <- zm $ do
jm <- getMJ
pqueryJobs qJobs jm
sendCI sid mworker $ QueryResponse $ SRJobStatus jstm
processQuery sid (SQWorkerStatus qWorkers) = do
wstm <- zm $ do
wam <- getMA
pqueryWorkers qWorkers wam
sendCI sid mworker $ QueryResponse $ SRWorkerStatus wstm
pqueryJobs jobIds jm =
let qjobs = if null jobIds then Map.keys jm else jobIds
in return $ foldl (summarizeJob jm) Map.empty qjobs
summarizeJob jm resultAcc pid = adjustMap resultAcc pid $ do
js <- Map.lookup pid jm
return $ SJobStatus (jpending $ jround1 js) (Map.keysSet $ jcompleted $ jround1 js)
pqueryWorkers wrkIds wam =
let workers = if null wrkIds then Map.keys wam else wrkIds
in return $ foldl (summarizeWorker wam) Map.empty workers
summarizeWorker wam resultAcc wid = adjustMap resultAcc wid $ do
wa <- Map.lookup wid wam
return $ SWorkerStatus (Set.size $ wablocks wa) (waweight wa)
adjustMap m k opt = maybe m (\v -> Map.insert k v m) opt
{------------------------
- Reporting.
-----------------------}
-- | Compilation report construction.
generateReport reportsz profile finalreport =
let mkspan s e = e - s
mkwtrep (wid, tspan) = wid ++ ": " ++ (secs $ tspan)
mkwvstr (wid, v) = wid ++ ": " ++ (show v)
-- Compile time per worker
workertimes = Map.map (mkspan $ jstartTime profile) $ jendTimes profile
-- Assigned cost per worker
workercontribs = Map.map (foldl (+) 0.0 . jwassign) $ jworkerst $ profile
-- Per-worker fraction of total worker time.
totaltime = foldl (+) 0.0 workertimes
workertratios = Map.map (/ totaltime) workertimes
-- Per-worker fraction of total worker cost.
totalcontrib = foldl (+) 0.0 workercontribs
workercratios = Map.map (/ totalcontrib) workercontribs
-- Absolute time ratio and cost ratio difference.
wtcratiodiff = Map.intersectionWith (\t c -> 1.0 - ((abs $ t - c) / t)) workertratios workercratios
-- Reports.
masterreport = prettyLines $ mconcat [jppreport profile, finalreport]
profreport = prettyLines $ limitReport reportsz $ mconcat $ jreports profile
timereport = map mkwtrep $ Map.toList workertimes
wtratioreport = map mkwvstr $ Map.toList workertratios
wcratioreport = map mkwvstr $ Map.toList workercratios
costreport = map mkwvstr $ Map.toList wtcratiodiff
limitReport l (TransformReport st sn) = TransformReport (limitMeasures l st) sn
limitMeasures l st = Map.filterWithKey (\k _ -> Set.member k $ limitKeys l st) st
limitKeys l st = Set.fromList $ take l $ map fst $ sortBy (compare `on` (Down . snd)) $ limitTimes st
limitTimes st = map (second $ sum . map measTime) $ Map.toList st
i x = indent $ 2*x
in
boxToString $ ["Workers"] %$ (i 1 profreport)
%$ ["Sequential"] %$ (i 1 masterreport)
%$ ["Compiler service"]
%$ (i 1 ["Time ratios"] %$ (i 2 wtratioreport))
%$ (i 1 ["Cost ratios"] %$ (i 2 wcratioreport))
%$ (i 1 ["Cost accuracy"] %$ (i 2 costreport))
%$ ["Time"] %$ (i 1 timereport)
uidOfD d = maybe uidErrD (\case {(DUID u) -> return u ; _ -> uidErrD}) $ d @~ isDUID
where uidErrD = Left $ boxToString $ ["No uid found on "] %+ prettyLines d
parseError = "Could not parse input: "
blockSizeErr = throwE $ "Could not find a worker's block size."
factorErr = throwE $ "Could not find a worker's assignment factor."
partitionError = throwE $ "Could not greedily pick a partition"
assignError pid = throwE $ unwords ["Could not assign program", show pid, "(no workers available)"]
workerError wid = throwE $ "No worker named " ++ show wid
requestError rid = throwE $ "No request found: " ++ show rid
processWorkerConn :: ServiceOptions -> ServiceWSTVar -> Int -> Socket z Dealer -> ZMQ z ()
processWorkerConn (serviceId -> wid) sv wtid wworker = do
msg <- receive wworker
logmHandler msg
where
wPfxM = "[" ++ wid ++ " " ++ show wtid ++ "] "
wlogM :: (Monad m, MonadIO m) => String -> m ()
wlogM msg = noticeM $ wPfxM ++ msg
werrM :: (Monad m, MonadIO m) => String -> m ()
werrM msg = errorM $ wPfxM ++ msg
zm :: ServiceWM z a -> ZMQ z a
zm m = runServiceZ sv m >>= either (throwM . userError) return
logmHandler (SC.decode -> msgE) = either werrM (\msg -> wlogM (cshow msg) >> mHandler msg) msgE
-- | Worker message processing.
mHandler (R1Block pid bcSpec ublocksByBID prog) = processR1Block pid bcSpec ublocksByBID prog
mHandler (R2Block pid bcSpec ublocksByBID prog) = processR2Block pid bcSpec ublocksByBID prog
mHandler (RegisterAck cOpts) = zm $ do
wlogM $ unwords ["Registered", show cOpts]
modifyCO_ $ mergeCompileOpts cOpts
mHandler Quit = liftIO $ terminate sv
mHandler m = werrM $ boxToString $ ["Invalid message:"] %$ [show m]
-- | Synchronizes relevant master and worker compiler options.
-- These are defaults that can be overridden per compile job.
mergeCompileOpts mcopts wcopts = wcopts { outLanguage = outLanguage $ mcopts
, programName = programName $ mcopts
, outputFile = outputFile $ mcopts
, useSubTypes = useSubTypes $ mcopts
, optimizationLevel = optimizationLevel $ mcopts }
-- | Block compilation functions.
processR1Block pid (BlockCompileSpec prepStages [SDeclOpt cSpec] wForkFactor wOffset) ublocksByBID prog =
abortcatch R1BlockAborted pid ublocksByBID $ do
start <- liftIO getTime
startP <- liftIO getPOSIXTime
wlogM $ boxToString $ ["Worker R1 blocks start"] %$ (indent 2 [show startP])
(cBlocksByBID, finalSt) <- zm $ compileAllBlocks prepStages wForkFactor wOffset ublocksByBID prog
$ compileR1Block pid cSpec
end <- liftIO getTime
wlogM $ boxToString $ ["Worker R1 local time"] %$ (indent 2 [secs $ end - start])
sendC wworker $ R1BlockDone wid pid cBlocksByBID $ ST.rp0 -- ST.report finalSt
processR1Block pid _ ublocksByBID _ = abortBlock R1BlockAborted pid ublocksByBID $ "Invalid worker compile stages"
processR2Block pid (BlockCompileSpec prepStages [SMaterialization matDebug] wForkFactor wOffset) ublocksByBID prog =
abortcatch R2BlockAborted pid ublocksByBID $ do
start <- liftIO getTime
startP <- liftIO getPOSIXTime
wlogM $ boxToString $ ["Worker R2 blocks start"] %$ (indent 2 [show startP])
(cBlocksByBID, finalSt) <- zm $ compileAllBlocks prepStages wForkFactor wOffset ublocksByBID prog
$ compileR2Block pid matDebug
end <- liftIO getTime
wlogM $ boxToString $ ["Worker R2 local time"] %$ (indent 2 [secs $ end - start])
sendC wworker $ R2BlockDone wid pid cBlocksByBID $ ST.report finalSt
processR2Block pid _ ublocksByBID _ = abortBlock R2BlockAborted pid ublocksByBID $ "Invalid worker compile stages"
abortcatch ctor pid ublocksByBID m = m `catches`
[Handler (\(e :: IOException) -> abortBlock ctor pid ublocksByBID $ show e),
Handler (\(e :: PatternMatchFail) -> abortBlock ctor pid ublocksByBID $ show e),
Handler (\(e :: ErrorCall) -> abortBlock ctor pid ublocksByBID $ show e)]
abortBlock ctor pid ublocksByBID reason =
sendC wworker $ ctor wid pid (map fst ublocksByBID) reason
compileAllBlocks prepStages wForkFactor wOffset ublocksByBID prog compileF = do
((initP, _), initSt) <- liftIE $ runTransform Nothing prepStages prog
workerSt <- maybe wstateErr return $ ST.partitionTransformStSyms wForkFactor wOffset initSt
dblocksByBID <- extractBlocksByUID initP ublocksByBID
foldM (compileF initP) ([], workerSt) dblocksByBID
compileR1Block pid cSpec _ (blacc, st) (bid, unzip -> (ids, block)) = do
(nblock, nst) <- debugCompileBlock pid bid block False
$ liftIE $ ST.runTransformM st $ ST.runDeclOptPassesBLM cSpec Nothing block
return (blacc ++ [(bid, zip ids $ map stripTypeAndEffectAnns $ nblock)], ST.mergeTransformStReport st nst)
compileR2Block pid dbg prog (blacc, st) (bid, unzip -> (ids, block)) = do
let mst = MatI.prepareInitialIState dbg prog
(nblock, nst) <- debugCompileBlock pid bid block dbg
$ liftIE $ ST.runTransformM st
$ mapM (ST.materializationPass dbg ST.mz0 mst) block
return (blacc ++ [(bid, zip ids nblock)], ST.mergeTransformStReport st nst)
extractBlocksByUID prog ublocksByBID = do
declsByUID <- indexProgramDecls prog
forM ublocksByBID $ \(bid,idul) -> do
iddl <- forM idul $ \(i, UID j) -> maybe (uidErr j) (return . (i,)) $ Map.lookup j declsByUID
return (bid, iddl)
debugCompileBlock pid bid block dbg m = do
wlogM $ boxToString $ [unwords ["got block", show pid, show bid, show $ length block]]
++ (if dbg then concatMap prettyLines block else [])
result <- m
wlogM $ unwords ["finished block", show pid, show bid]
return result
uidErr duid = throwE $ "Could not find declaration " ++ show duid
wstateErr = throwE $ "Could not create a worker symbol state."
-- | One-shot connection to submit a remote job and wait for compilation to complete.
submitJob :: ServiceOptions -> DistributedCompileOptions -> Options -> IO ()
submitJob sOpts@(serviceId -> rq) dcOpts opts = do
start <- getTime
progE <- runDriverM $ k3read opts
either putStrLn (\p -> runClient Dealer sOpts $ processClientConn start $ concat p) progE
where
processClientConn :: Double -> String -> (forall z. Socket z Dealer -> ZMQ z ())
processClientConn start prog client = do
noticeM $ "Client submitting compilation job " ++ rq
sendC client $ Program rq prog dcOpts
msg <- receive client
either errorM (mHandler start) $ SC.decode msg
mHandler :: forall z. Double -> CProtocol -> ZMQ z ()
mHandler start (ProgramDone rrq prog report) | rq == rrq = liftIO $ do
end <- getTime
noticeM $ unwords ["Client finalizing request", rq]
noticeM $ clientReport (end - start) report
CPPC.compile (ensureSaves opts) (scompileOpts sOpts) ($) $ prog
mHandler _ (ProgramAborted rrq reason) | rq == rrq = liftIO $ do
errorM $ unwords ["Failed to compile request", rq, ":", reason]
mHandler _ m = errorM $ boxToString $ ["Invalid message:"] %$ [show m]
ensureSaves opts' = opts' {input = (input opts') {saveRawAST = True}}
clientReport time report =
boxToString $ ["Compile report:"] %$ (indent 4 $ lines $ report)
%$ ["Compile time: " ++ secs time]
shutdownService :: ServiceOptions -> IO ()
shutdownService sOpts = command Dealer sOpts Quit
queryService :: ServiceOptions -> QueryOptions -> IO ()
queryService sOpts (QueryOptions args) = either (queryWorkers sOpts) (queryJobs sOpts) args
queryWorkers :: ServiceOptions -> [WorkerID] -> IO ()
queryWorkers sOpts wids = requestreply Dealer sOpts (Query $ SQWorkerStatus wids) mHandler
where mHandler (QueryResponse (SRWorkerStatus wsm)) = noticeM $ boxToString $ ["Workers"] %$ (map wststr $ Map.toList wsm)
mHandler m = errorM $ boxToString $ ["Invalid worker query response:"] %$ [show m]
wststr (wid, SWorkerStatus n w) = unwords [wid, ":", show n, show w]
queryJobs :: ServiceOptions -> [ProgramID] -> IO ()
queryJobs sOpts pids = requestreply Dealer sOpts (Query $ SQJobStatus pids) mHandler
where mHandler (QueryResponse (SRJobStatus jsm)) = noticeM $ boxToString $ ["Jobs"] %$ (concatMap jststr $ Map.toList jsm)
mHandler m = errorM $ boxToString $ ["Invalid worker query response:"] %$ [show m]
jststr (pid, SJobStatus pb cb) = [show pid]
%$ (indent 2 $ [unwords ["Pending:", show pb]])
%$ (indent 2 $ [unwords ["Completed:", show cb]])
| DaMSL/K3 | src/Language/K3/Driver/Service.hs | apache-2.0 | 61,571 | 225 | 62 | 16,406 | 17,818 | 9,487 | 8,331 | 979 | 37 |
module Vision where
import Data.List
import Data.Maybe
import Map
import Position
import Vision.Unrealistic
-- Brute force visibility algorithm
computeVisible :: GameMap -> Position -> GameMap
computeVisible map pos = doVisibilityBF map pos 0
doVisibilityBF :: GameMap -> Position -> Int -> GameMap
doVisibilityBF map pos row | row >= length map = []
| otherwise = [checkRow map pos row 0]
++ doVisibilityBF map pos (row+1)
checkRow :: GameMap -> Position -> Int -> Int -> String
checkRow map pos y x | x >= length (map !! y) = ""
| not visible = " " ++ checkRow map pos y (x+1)
| otherwise = [if c == ' ' then '.' else c] ++ checkRow map pos y (x+1)
where visible = isVisible map pos (x, y)
c = mapElm map (x, y)
isVisible :: GameMap -> Position -> Position -> Bool
isVisible map start end = isNothing $ find isWall elms
where elms = getElements map start end
| CheeseSucker/haskell-roguelike | src/Vision.hs | bsd-2-clause | 1,013 | 0 | 11 | 312 | 358 | 183 | 175 | 21 | 2 |
-- ------------------------------------------------------------
{- |
Module : Yuuko.Control.Arrow.ArrowNF
Copyright : Copyright (C) 2005-8 Uwe Schmidt
License : MIT
Maintainer : Uwe Schmidt (uwe\@fh-wedel.de)
Stability : experimental
Portability: non-portable
Arrows for evaluation of normal form results
-}
-- ------------------------------------------------------------
module Yuuko.Control.Arrow.ArrowNF
where
import Control.Arrow
import Control.DeepSeq
-- |
-- complete evaluation of an arrow result using 'Control.DeepSeq'
--
-- this is sometimes useful for preventing space leaks, especially after reading
-- and validation of a document, all DTD stuff is not longer in use and can be
-- recycled by the GC.
strictA :: (Arrow a, NFData b) => a b b
strictA = arr $ \ x -> deepseq x x
class (Arrow a) => ArrowNF a where
rnfA :: (NFData c) => a b c -> a b c
rnfA f = f >>> strictA
-- ------------------------------------------------------------
| nfjinjing/yuuko | src/Yuuko/Control/Arrow/ArrowNF.hs | bsd-3-clause | 997 | 11 | 8 | 186 | 133 | 78 | 55 | 8 | 1 |
import Test.HUnit
import Data
import Functions
import Ratio
main = runTestTT tests
tests = TestList (tree ++ alg)
tree1 = read "((a+b)*(c-d))"::ExprTree Algebraic
tree2 = read "((a+b)/(c-d))"::ExprTree Algebraic
tree3 = read "((a+b)+c)"::ExprTree Algebraic
tree4 = read "((a+b)+1%2)"::ExprTree Algebraic
ltree1 = relabel tree1
ltree2 = relabel tree2
ltree3 = relabel tree3
srules = [(Alg "a",1%1),(Alg "b",2%1),(Alg "c",3%1),(Alg "d",4%1)]
tree = [
TestLabel "Tree" $ TestList [
TestLabel "insert" $ TestList [
TestCase (is "insert L" (insert ltree1 (1%2) Add L ltree2)
(read "((((a+b)*(c-d))+(a+b))/(c-d))"::ExprTree Algebraic)),
TestCase (is "insert R" (insert ltree1 (3%2) Add R ltree2)
(relabel $ read "((a+b)/((c-d)+((a+b)*(c-d))))"::ExprTree Algebraic))
],
TestLabel "substitute" $ TestList [
TestCase (is "tree1" (substitute srules tree1)
(read "((1%1+2%1)*(3%1-4%1))"::ExprTree Rational)),
TestCase (is "(a+b)" (substitute srules (read "(a+b)"::ExprTree Algebraic))
(read "(1%1+2%1)"::ExprTree Rational))
],
TestLabel "forkWith" $ TestList [
TestCase (is "tree1 /*\\ tree2" (forkWith tree1 Mul L tree2)
(read "(((a+b)*(c-d))*((a+b)/(c-d)))"::ExprTree Algebraic))
],
TestLabel "commutate" $ TestList [
TestCase (is "commutate" (commutate tree1)
(read "((c-d)*(a+b))"::ExprTree Algebraic))
]
]
]
alg = [
TestLabel "Algebraic" $ TestList [
TestLabel "_substitute" $ TestList [
TestCase (is "_substiute a" (_substitute srules (Alg "a")) (1%1)),
TestCase (is "_substiute 1%2" (_substitute srules (Alg "1%2")) (1%2)),
TestCase (is "_substiute b" (_substitute srules (Alg "b")) (2%1)),
TestCase (is "_substiute c" (_substitute srules (Alg "c")) (3%1))
],
TestLabel "($$)" $ TestList [
TestCase (is "(f1,f2)(1,2)" (((+2),(==3))$$(1,2)) (3,False))
],
TestLabel "diag" $ TestList [
TestCase (is "1 -> (1,1)" (diag 1) (1,1))
]
]
]
is ::(Show a, Eq a) => String -> a -> a -> Assertion
is = assertEqual
| epsilonhalbe/Algebra-Alchemy | tFunctions.hs | bsd-3-clause | 2,410 | 3 | 19 | 765 | 847 | 432 | 415 | 45 | 1 |
{-# LANGUAGE ScopedTypeVariables, MultiParamTypeClasses, KindSignatures #-}
module EFA2.Interpreter.Arith where
{-
import qualified Data.Vector.Unboxed as UV
import Data.Ratio
-}
--type Val = Ratio Integer
type Val = Double
type Container = []
--type Container = UV.Vector
{-
-- ATTENTION on operator presedence: TODO!!!
class Arith a where
zero :: a
cst :: Val -> a
neg :: a -> a
rec :: a -> a
(.+) :: a -> a -> a
(.*) :: a -> a -> a
(./) :: a -> a -> a
(.-) :: a -> a -> a
x .- y = x .+ (neg y)
absol :: (Ord a, Num a) => a -> a
absol x | x < 0 = -x
absol x = x
instance Arith Val where
zero = 0.0
cst = id
neg = negate
rec = recip
(.+) = (+)
(.*) = (*)
x ./ 0 = 0
x ./ y = x / y
instance (Arith a) => Arith [a] where
zero = repeat (zero :: a)
cst x = repeat (cst x :: a)
neg = map neg
rec = map rec
(.+) = zipWith (.+)
(.*) = zipWith (.*)
(./) = zipWith (./)
instance (Arith a, UV.Unbox a) => Arith (UV.Vector a) where
zero = UV.singleton (zero :: a) -- UV.repeat (zero :: a)
cst x = UV.singleton (cst x :: a) -- UV.repeat (cst x :: a)
neg = UV.map neg
rec = UV.map rec
(.+) = UV.zipWith (.+)
(.*) = UV.zipWith (.*)
(./) = UV.zipWith (./)
-} | energyflowanalysis/efa-2.1 | attic/src/EFA2/Interpreter/Arith.hs | bsd-3-clause | 1,428 | 0 | 5 | 536 | 26 | 19 | 7 | 4 | 0 |
-- | Common functionality for rendering pretty output.
module DataFlow.PrettyRenderer where
import Control.Monad.State
import Control.Monad.Writer
type Indent = Int
type IndentNext = Bool
data RendererState = RendererState Indent IndentNext
-- | The Renderer represents some output generator that runs on a 'Diagram'.
type Renderer t = WriterT [String] (State RendererState) t
-- | Write a string to the output (no linefeed).
write :: String -> Renderer ()
write s = do
(RendererState n indentNext) <- lift get
if indentNext
then tell [replicate n ' ' ++ s]
else tell [s]
put $ RendererState n False
-- | Write a string to the output (with linefeed).
writeln :: String -> Renderer ()
writeln s = do
write s
write "\n"
modify $ \(RendererState n _) -> RendererState n True
-- | Increase indent with 2 spaces.
indent :: Renderer ()
indent = modify $ \(RendererState n indentNext) -> RendererState (n + 2) indentNext
-- | Decrease indent with 2 spaces.
dedent :: Renderer ()
dedent = modify $ \(RendererState n indentNext) -> RendererState (n - 2) indentNext
-- | Indent the output of gen with 2 spaces.
withIndent :: Renderer () -> Renderer ()
withIndent gen = do
indent
gen
dedent
renderWithIndent :: Renderer () -> String
renderWithIndent r =
concat $ evalState (execWriterT r) (RendererState 0 False)
| sonyxperiadev/dataflow | src/DataFlow/PrettyRenderer.hs | bsd-3-clause | 1,339 | 0 | 11 | 254 | 383 | 197 | 186 | 31 | 2 |
{-# LANGUAGE
FlexibleContexts
, GADTs
, Rank2Types
, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Main (main) where
import Control.Applicative
import Control.Monad.ST
import qualified Control.Monad.ST.Lazy as Lazy
import Data.ByteArraySlice
import Data.Int
import Data.Tuple.IO
import Data.Tuple.ST
import Data.Tuple.Storable
import Data.Typeable (Typeable, typeOf)
import Data.Var.IO
import Data.Var.ST
import Data.Var.Storable
import Data.Word
import Foreign.Storable
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
import Test.QuickCheck.Gen
import Test.QuickCheck.Monadic
main :: IO ()
main =
defaultMain
[ testProperty "IOVar" prop_IOVar
, testProperty "IOUVar" prop_IOUVar
, testProperty "STVar" prop_STVar
, testProperty "LazySTVar" prop_LazySTVar
, testProperty "STUVar" prop_STUVar
, testProperty "LazySTUVar" prop_LazySTUVar
, testProperty "StorableVar" prop_StorableVar
, testProperty "IOTuple" prop_IOTuple
, testProperty "IOUTuple" prop_IOUTuple
, testProperty "STTuple" prop_STTuple
, testProperty "LazySTTuple" prop_LazySTTuple
, testProperty "STUTuple" prop_STUTuple
, testProperty "LazySTUTuple" prop_LazySTUTuple
, testProperty "StorableTuple" prop_StorableTuple
, testProperty "StorableTuple'" (prop_StorableTuple' ::
Int16 -> Int16 ->
Float -> Float ->
Double -> Double ->
Float -> Float ->
Word16 -> Word16 ->
Property)
]
varWriteRead var a = do
a' <- run $ do
writeVar var a
readVar var
assert $ a == a'
prop_IOVar (a :: Integer, b) = monadicIO $ do
var <- run $ newIOVar a
varWriteRead var b
newIOVar :: a -> IO (IOVar a)
newIOVar = newVar
prop_IOUVar (SomeByteArraySlice2 a a') = monadicIO $ do
var <- run $ newIOUVar a
varWriteRead var a'
newIOUVar :: ByteArraySlice a => a -> IO (IOUVar a)
newIOUVar = newVar
prop_STVar (a :: Integer, b) = monadicST $ do
var <- run $ newSTVar a
varWriteRead var b
newSTVar :: a -> ST s (STVar s a)
newSTVar = newVar
prop_LazySTVar (a :: Integer, b) = monadicLazyST $ do
var <- run $ newLazySTVar a
varWriteRead var b
newLazySTVar :: a -> Lazy.ST s (STVar s a)
newLazySTVar = newVar
prop_STUVar (SomeByteArraySlice2 a a') = monadicST $ do
var <- run $ newSTUVar a
varWriteRead var a'
newSTUVar :: ByteArraySlice a => a -> ST s (STUVar s a)
newSTUVar = newVar
prop_LazySTUVar (SomeByteArraySlice2 a a') = monadicLazyST $ do
var <- run $ newLazySTUVar a
varWriteRead var a'
newLazySTUVar :: ByteArraySlice a => a -> Lazy.ST s (STUVar s a)
newLazySTUVar = newVar
prop_StorableVar (SomeStorable2 a a') = monadicIO $ do
var <- run $ newStorableVar a
varWriteRead var a'
newStorableVar :: Storable a => a -> IO (StorableVar a)
newStorableVar = newVar
tupleWriteRead tuple (a, b, c, d, e) = do
writeOnly1 tuple a
a' <- run $ read1 tuple
assert $ a == a'
writeOnly2 tuple b
b' <- run $ read2 tuple
assert $ b == b'
writeOnly3 tuple c
c' <- run $ read3 tuple
assert $ c == c'
writeOnly4 tuple d
d' <- run $ read4 tuple
assert $ d == d'
writeOnly5 tuple e
e' <- run $ read5 tuple
assert $ e == e'
tupleThawFreeze thaw xs = do
tuple <- run $ thaw xs
xs' <- run $ freezeTuple tuple
assert $ xs == xs'
writeOnly1 t a = do
xs <- run $ readAllBut1 t
run $ write1 t a
xs' <- run $ readAllBut1 t
assert $ xs == xs'
readAllBut1 t = (,,,) <$> read2 t <*> read3 t <*> read4 t <*> read5 t
writeOnly2 t a = do
xs <- run $ readAllBut2 t
run $ write2 t a
xs' <- run $ readAllBut2 t
assert $ xs == xs'
readAllBut2 t = (,,,) <$> read1 t <*> read3 t <*> read4 t <*> read5 t
writeOnly3 t a = do
xs <- run $ readAllBut3 t
run $ write3 t a
xs' <- run $ readAllBut3 t
assert $ xs == xs'
readAllBut3 t = (,,,) <$> read1 t <*> read2 t <*> read4 t <*> read5 t
writeOnly4 t a = do
xs <- run $ readAllBut4 t
run $ write4 t a
xs' <- run $ readAllBut4 t
assert $ xs == xs'
readAllBut4 t = (,,,) <$> read1 t <*> read2 t <*> read3 t <*> read5 t
writeOnly5 t a = do
xs <- run $ readAllBut5 t
run $ write5 t a
xs' <- run $ readAllBut5 t
assert $ xs == xs'
readAllBut5 t = (,,,) <$> read1 t <*> read2 t <*> read3 t <*> read4 t
prop_IOTuple (a :: IntegerTuple, b) = monadicIO $ do
tuple <- run $ thawIOTuple a
tupleWriteRead tuple b
tupleThawFreeze thawIOTuple a
thawIOTuple :: MTuple IOTuple a IO => a -> IO (IOTuple a)
thawIOTuple = thawTuple
prop_IOUTuple (SomeByteArraySlice2 a a',
SomeByteArraySlice2 b b',
SomeByteArraySlice2 c c',
SomeByteArraySlice2 d d',
SomeByteArraySlice2 e e') = monadicIO $ do
let xs = (a, b, c, d, e)
tuple <- run $ thawIOUTuple xs
tupleWriteRead tuple (a', b', c', d', e')
tupleThawFreeze thawIOUTuple xs
thawIOUTuple :: MTuple IOUTuple a IO => a -> IO (IOUTuple a)
thawIOUTuple = thawTuple
prop_STTuple (a :: IntegerTuple, b) = monadicST $ do
tuple <- run $ thawSTTuple a
tupleWriteRead tuple b
tupleThawFreeze thawSTTuple a
thawSTTuple :: MTuple (STTuple s) a (ST s) => a -> ST s (STTuple s a)
thawSTTuple = thawTuple
prop_LazySTTuple (a :: IntegerTuple, b) = monadicLazyST $ do
tuple <- run $ thawLazySTTuple a
tupleWriteRead tuple b
tupleThawFreeze thawLazySTTuple a
thawLazySTTuple :: MTuple (STTuple s) a (Lazy.ST s) => a -> Lazy.ST s (STTuple s a)
thawLazySTTuple = thawTuple
prop_STUTuple (SomeByteArraySlice2 a a',
SomeByteArraySlice2 b b',
SomeByteArraySlice2 c c',
SomeByteArraySlice2 d d',
SomeByteArraySlice2 e e') = monadicST $ do
let xs = (a, b, c, d, e)
tuple <- run $ thawSTUTuple xs
tupleWriteRead tuple (a', b', c', d', e')
tupleThawFreeze thawSTUTuple xs
thawSTUTuple :: MTuple (STUTuple s) a (ST s) => a -> ST s (STUTuple s a)
thawSTUTuple = thawTuple
prop_LazySTUTuple (SomeByteArraySlice2 a a',
SomeByteArraySlice2 b b',
SomeByteArraySlice2 c c',
SomeByteArraySlice2 d d',
SomeByteArraySlice2 e e') = monadicLazyST $ do
let xs = (a, b, c, d, e)
tuple <- run $ thawLazySTUTuple xs
tupleWriteRead tuple (a', b', c', d', e')
tupleThawFreeze thawLazySTUTuple xs
thawLazySTUTuple :: MTuple (STUTuple s) a (Lazy.ST s) => a -> Lazy.ST s (STUTuple s a)
thawLazySTUTuple = thawTuple
prop_StorableTuple (SomeStorable2 a a',
SomeStorable2 b b',
SomeStorable2 c c',
SomeStorable2 d d',
SomeStorable2 e e') = prop_StorableTuple' a a' b b' c c' d d' e e'
prop_StorableTuple' a a' b b' c c' d d' e e' = monadicIO $ do
let xs = (a, b, c, d, e)
tuple <- run $ thawStorableTuple xs
tupleWriteRead tuple (a', b', c', d', e')
tupleThawFreeze thawStorableTuple xs
thawStorableTuple :: MTuple StorableTuple a IO => a -> IO (StorableTuple a)
thawStorableTuple = thawTuple
data SomeByteArraySlice2 where
SomeByteArraySlice2 :: ( Show a
, Eq a
, Arbitrary a
, ByteArraySlice a
) => a -> a -> SomeByteArraySlice2
instance Show SomeByteArraySlice2 where
showsPrec p (SomeByteArraySlice2 a a') = showsPrec p (a, a')
show (SomeByteArraySlice2 a a') = show (a, a')
instance Arbitrary SomeByteArraySlice2 where
arbitrary = do
(ByteArraySliceDict (Proxy :: Proxy a)) <- arbitrary
(a, a') :: (a, a) <- arbitrary
return $ SomeByteArraySlice2 a a'
shrink (SomeByteArraySlice2 a a') =
map (uncurry SomeByteArraySlice2) $ shrink (a, a')
data ByteArraySliceDict where
ByteArraySliceDict :: ( Show a
, Eq a
, Arbitrary a
, ByteArraySlice a
) => Proxy a -> ByteArraySliceDict
instance Arbitrary ByteArraySliceDict where
arbitrary = do
n <- size
oneof $ if n <= 0 then leaf else leaf ++ branch
where
leaf =
map pure
[ ByteArraySliceDict (Proxy :: Proxy Bool)
, ByteArraySliceDict (Proxy :: Proxy Char)
, ByteArraySliceDict (Proxy :: Proxy Double)
, ByteArraySliceDict (Proxy :: Proxy Float)
, ByteArraySliceDict (Proxy :: Proxy Int)
, ByteArraySliceDict (Proxy :: Proxy Int8)
, ByteArraySliceDict (Proxy :: Proxy Int16)
, ByteArraySliceDict (Proxy :: Proxy Int32)
, ByteArraySliceDict (Proxy :: Proxy Int64)
, ByteArraySliceDict (Proxy :: Proxy Word)
, ByteArraySliceDict (Proxy :: Proxy Word8)
, ByteArraySliceDict (Proxy :: Proxy Word16)
, ByteArraySliceDict (Proxy :: Proxy Word32)
, ByteArraySliceDict (Proxy :: Proxy Word64)
]
branch =
[ do
let m = resize' (`div` 2) arbitrary
ByteArraySliceDict (Proxy :: Proxy a) <- m
ByteArraySliceDict (Proxy :: Proxy b) <- m
return $ ByteArraySliceDict (Proxy :: Proxy (a, b))
, do
let m = resize' (`div` 3) arbitrary
ByteArraySliceDict (Proxy :: Proxy a) <- m
ByteArraySliceDict (Proxy :: Proxy b) <- m
ByteArraySliceDict (Proxy :: Proxy c) <- m
return $ ByteArraySliceDict (Proxy :: Proxy (a, b, c))
, do
let m = resize' (`div` 4) arbitrary
ByteArraySliceDict (Proxy :: Proxy a) <- m
ByteArraySliceDict (Proxy :: Proxy b) <- m
ByteArraySliceDict (Proxy :: Proxy c) <- m
ByteArraySliceDict (Proxy :: Proxy d) <- m
return $ ByteArraySliceDict (Proxy :: Proxy (a, b, c, d))
, do
let m = resize' (`div` 5) arbitrary
ByteArraySliceDict (Proxy :: Proxy a) <- m
ByteArraySliceDict (Proxy :: Proxy b) <- m
ByteArraySliceDict (Proxy :: Proxy c) <- m
ByteArraySliceDict (Proxy :: Proxy d) <- m
ByteArraySliceDict (Proxy :: Proxy e) <- m
return $ ByteArraySliceDict (Proxy :: Proxy (a, b, c, d, e))
]
data SomeStorable2 where
SomeStorable2 :: ( Show a
, Eq a
, Typeable a
, Arbitrary a
, Storable a
) => a -> a -> SomeStorable2
instance Show SomeStorable2 where
showsPrec p (SomeStorable2 a a') = showsPrec p ((a, typeOf a), (a', typeOf a'))
show (SomeStorable2 a a') = show ((a, typeOf a), (a', typeOf a'))
instance Arbitrary SomeStorable2 where
arbitrary = do
StorableDict (Proxy :: Proxy a) <- arbitrary
(a, a') :: (a, a) <- arbitrary
return $ SomeStorable2 a a'
shrink (SomeStorable2 a a') =
map (uncurry SomeStorable2) $ shrink (a, a')
data StorableDict where
StorableDict :: ( Show a
, Eq a
, Typeable a
, Arbitrary a
, Storable a
) => Proxy a -> StorableDict
instance Arbitrary StorableDict where
arbitrary =
elements
[ StorableDict (Proxy :: Proxy Bool)
, StorableDict (Proxy :: Proxy Char)
, StorableDict (Proxy :: Proxy Double)
, StorableDict (Proxy :: Proxy Float)
, StorableDict (Proxy :: Proxy Int)
, StorableDict (Proxy :: Proxy Int8)
, StorableDict (Proxy :: Proxy Int16)
, StorableDict (Proxy :: Proxy Int32)
, StorableDict (Proxy :: Proxy Int64)
, StorableDict (Proxy :: Proxy Word)
, StorableDict (Proxy :: Proxy Word8)
, StorableDict (Proxy :: Proxy Word16)
, StorableDict (Proxy :: Proxy Word32)
, StorableDict (Proxy :: Proxy Word64)
]
type IntegerTuple = (Integer, Integer, Integer, Integer, Integer)
monadicLazyST :: (forall s . PropertyM (Lazy.ST s) a) -> Property
monadicLazyST m = property (runLazySTGen (monadic' m))
runLazySTGen :: (forall s . Gen (Lazy.ST s a)) -> Gen a
runLazySTGen g = MkGen $ \ r n -> Lazy.runST (unGen g r n)
size :: Gen Int
size = MkGen $ \ _ n -> n
resize' :: (Int -> Int) -> Gen a -> Gen a
resize' f m = sized $ \ n -> resize (f n) m
data Proxy a = Proxy
| sonyandy/var | tests/properties.hs | bsd-3-clause | 12,307 | 0 | 18 | 3,543 | 4,391 | 2,202 | 2,189 | 323 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.DrawInstanced
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.DrawInstanced (
-- * Extension Support
glGetEXTDrawInstanced,
gl_EXT_draw_instanced,
-- * Functions
glDrawArraysInstancedEXT,
glDrawElementsInstancedEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/EXT/DrawInstanced.hs | bsd-3-clause | 651 | 0 | 4 | 89 | 47 | 36 | 11 | 7 | 0 |
module Main where
import Lib
main :: IO ()
main = someFunc
{-99 Haskell Problems-}
{-| Get the last element of a list-}
myLast :: [a] -> a
myLast [x] = x
myLast (_:xs) = myLast xs
{-| Get the second to last element of a list-}
myButtLast :: [a] -> a
myButtLast [x, _] = x
myButtLast (_:xs) = myButtLast xs
{-| Get the kth element of a list-}
elementAt :: [a] -> Int -> a
elementAt (x:_) 0 = x
elementAt (_:xs) k = elementAt xs (k - 1)
{-| Get the length of a list-}
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + (myLength xs)
{-| Reverse a list-}
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = (myReverse xs) ++ [x]
{-| Checks if list is a palindrome.-}
myPalindrome :: (Eq a) => [a] -> Bool
myPalindrome x
| x == (reverse x) = True
| otherwise = False
{-| Remove dupes in list-}
compress :: (Eq a) => [a] -> [a]
compress [] = []
compress (x:xs) = [x] ++ compress (clean x xs)
where clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Put duplicates in sublists-}
pack :: (Eq a) => [a] -> [[a]]
pack [] = []
pack [x] = [[x]]
pack (x:xs) = combine x xs ++ pack (clean x xs)
where
combine _ [] = []
combine x s = [[z | z <- x:s, z == x]]
clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Does stuff-}
encode :: (Eq a) => [a] -> [(Int, a)]
encode [] = []
encode s = map (\(x:xs) -> (length (x:xs), x)) (pack s)
data List a = Single a | Multiple Int a
deriving Show
{-| Similar to before-}
encodeModified :: (Eq a) => [a] -> [List a]
encodeModified s = map f (encode s)
where f (1, x) = Single x
f (n, x) = Multiple n x
decode :: [List a] -> [a]
decode s = foldr (++) [] (map f s)
where f (Single x) = [x]
f (Multiple n x) = replicate n x
encodeDirect :: (Eq a) => [a] -> [List a]
encodeDirect [] = []
encodeDirect (x:xs) = [toList (count x (x:xs)) x] ++
encodeDirect (filter (x /=) xs)
where count x s = length (filter (x==) s)
toList 1 x = Single x
toList n x = Multiple n x
dupl :: [a] -> [a]
dupl [] = []
dupl (x:xs) = [x,x] ++ dupl xs
repli :: [a] -> Int -> [a]
repli [] _ = []
repli (x:xs) n = replicate n x ++ repli xs n
dropEvery :: [a] -> Int -> [a]
dropEvery [] _ = []
dropEvery s n = foldr (++) [] (map (f n) (zip [1..] s))
where f n (m, x)
| m `mod` n == 0 = []
| otherwise = [x]
spliter :: [a] -> Int -> [[a]]
spliter [] _ = []
spliter s n = [reverse (drop ((length s) - n) (reverse s))] ++ [drop n s]
slice :: [a] -> Int -> Int -> [a]
slice [] _ _ = []
slice s start stop = reverse (drop (((length s)) - stop) (reverse (drop (start - 1) s)))
rotate :: [a] -> Int -> [a]
rotate [] _ = []
rotate s n = slice s ((f n (length s)) + 1) (length s) ++ slice s 1 (f n (length s))
where f n m
| n > m = f (n - m) m
| n < 0 = f (m + n) m
| otherwise = n
removeAt :: [a] -> Int -> (a, [a])
removeAt s n = (elementAt (slice s (n + 1) (n + 2)) 0,
slice s 1 n ++ slice s (n+2) (length s))
insertAt :: [a] -> a -> Int -> [a]
insertAt xs x n = slice xs 1 (n-1) ++ [x] ++ slice xs n (length xs)
range :: Int -> Int -> [Int]
range n1 n2 = [n1..n2]
listEq :: (Eq a) => [a] -> [a] -> Bool
listEq [] [] = True
listEq [] _ = False
listEq _ [] = False
listEq s1 s2 = False `notElem` (map (`elem`s1) s2 ++ map (`elem`s2) s1)
listNeq :: (Eq a) => [a] -> [a] -> Bool
listNeq s1 s2
| listEq s1 s2 = False
| otherwise = True
listRemoveDupes :: (Eq a) => [[a]] -> [[a]]
listRemoveDupes [[]] = [[]]
listRemoveDupes [] = []
listRemoveDupes (x:xs) = [x] ++ listRemoveDupes (filter (listNeq x) xs)
combinations :: (Eq a) => Int -> [a] -> [[a]]
combinations 0 _ = [[]]
combinations _ [] = [[]]
combinations n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 (listRemoveDupes
[x ++ [y] |
x <- s2,
y <- s1,
y `notElem` x])
{- TODO the second combinatorics problem on the haskell website.-}
isDisjoint :: (Eq a) => [a] -> [a] -> Bool
isDisjoint [] [] = False
isDisjoint [] _ = True
isDisjoint _ [] = True
isDisjoint (x:xs) s2
| x `elem` s2 = False
| otherwise = isDisjoint xs s2
{-| TODO Finish this.-}
{-grouper :: (Eq a) => [Int] -> [a] -> [[[a]]]
grouper n s = g (map (`combinations`s) n)
where f x s = filter (isDisjoint x) s
g (x:y:s)
|y == [] = []
|otherwise = map (\z -> g (f z y) (y:s)) x -}
sortOnLength :: [[a]] -> [[a]]
sortOnLength [] = []
sortOnLength (x:xs) =
sortOnLength [y | y <- xs, (length y) < (length x)]
++ [x]
++ sortOnLength [y | y <- xs, (length y) > (length x)]
sieveEratosthenes :: Int -> [Int]
sieveEratosthenes n = f n [2..n]
where f n [] = []
f n (x:xs) = [x] ++ f n [y | y <- xs,
y `notElem` (map (x*) [2..n])]
isPrime :: Int -> Bool
isPrime n = n `elem` (sieveEratosthenes n)
gcd' :: Int -> Int -> Int
gcd' n1 n2
| n1 == n2 = n1
| n1 > n2 = gcd' (n1 - n2) n2
| otherwise = gcd' (n2 - n1) n1
isCoPrime :: Int -> Int -> Bool
isCoPrime n1 n2
| (gcd' n1 n2) == 1 = True
| otherwise = False
eulerTotient :: Int -> Int
eulerTotient n = length (filter id (map (isCoPrime n) [1..n]))
primeFactors :: Int -> [Int]
primeFactors n
|isPrime n = [n]
|otherwise = [f] ++ primeFactors (n `div` f)
where f = fst (head (filter (\(x,y) ->
y == 0) (map (\x ->
(x, (n `mod` x)))
(sieveEratosthenes n))))
encodePrimeFactors :: Int -> [(Int, Int)]
encodePrimeFactors = encode . primeFactors
eulerTotient' :: Int -> Int
eulerTotient' n = foldr (*) 1
. map (\(x, y) ->
(y-1) * (y^(x - 1)))
. encodePrimeFactors $ n
primesRange :: Int -> Int -> [Int]
primesRange l u = filter (>=l) (sieveEratosthenes u)
combinationsWithDupes :: (Eq a) => Int -> [a] -> [[a]]
combinationsWithDupes 0 _ = [[]]
combinationsWithDupes _ [] = [[]]
combinationsWithDupes n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 [x ++ [y] |
x <- s2,
y <- s1]
{-| Fix empty list issue-}
goldbach :: Int -> (Int,Int)
goldbach n = snd
. head
. filter (\(x, _) -> x == n)
. map (\[x,y] -> ((x+y),(x,y)))
. combinationsWithDupes 2
. sieveEratosthenes $ n
goldbachList :: Int -> Int -> [(Int,Int)]
goldbachList l u = map goldbach
. dropWhile (<= l) $ [2,4 .. u]
grayC :: Int -> [String]
grayC n = combinationsWithDupes n
$ replicate n '1' ++ replicate n '0'
| MauriceIsAG/HaskellScratch | .stack-work/intero/intero19793okA.hs | bsd-3-clause | 7,055 | 0 | 18 | 2,298 | 3,617 | 1,915 | 1,702 | 186 | 3 |
module Data.Classifier
( Classifier(Classifier)
, empty
, train
, classify
)
where
import Data.Char
import Data.Function
import Data.Maybe
import Data.Tokenizer
import qualified Data.List as L
import qualified Data.Map as M
data Classifier = Classifier {
ccount :: M.Map String Int
, wcount :: M.Map (String, String) Int
} deriving (Eq, Show)
empty :: Classifier
empty = Classifier M.empty M.empty
findCCount :: Classifier -> String -> Int
findCCount cls category = fromMaybe 0 $ M.lookup category $ ccount cls
findWCount :: Classifier -> String -> String -> Int
findWCount cls category word = fromMaybe 0 $ M.lookup (category, word) $ wcount cls
updateWCounts :: M.Map (String, String) Int -> String -> [(String, Int)] -> M.Map (String, String) Int
updateWCounts wm category groups
| groups == [] = wm
| otherwise = updateWCounts
(M.insertWith (+) (category, fst $ head groups) (snd $ head groups) wm)
category (tail groups)
train :: Classifier -> String -> String -> Classifier
train cls category text = Classifier ccounts wcounts
where ccounts = M.insertWith (+) category 1 $ ccount cls
wordGroups = map (\xs -> (head xs, length xs)) $ L.group $ L.sort $ stemText text
wcounts = updateWCounts (wcount cls) category wordGroups
classify :: Classifier -> String -> String
classify cls text = fst $ head $ L.sortBy (compare `on` (\e -> 1.0 - (snd e))) $ categoryScores cls text
totalCount :: Classifier -> Int
totalCount cls = sum $ M.elems $ ccount cls
categories :: Classifier -> [String]
categories cls = M.keys $ ccount cls
textProb :: Classifier -> String -> String -> Double
textProb cls category text = c / t * d
where c = fromIntegral (findCCount cls category) :: Double
t = fromIntegral (totalCount cls) :: Double
d = documentProb cls category text
documentProb :: Classifier -> String -> String -> Double
documentProb cls category text = foldl (\a b -> a * b) 1.0 wordWeights
where wordWeights = map (\w -> wordWeightedAvg cls category w) $ stemText text
categoryScores :: Classifier -> String -> [(String, Double)]
categoryScores cls text = map (\c -> (c, textProb cls c text)) $ M.keys $ ccount cls
wordProb :: Classifier -> String -> String -> Double
wordProb cls category word = (fromIntegral wc :: Double) / (fromIntegral cc :: Double)
where wc = findWCount cls category word
cc = findCCount cls category
wordWeightedAvg :: Classifier -> String -> String -> Double
wordWeightedAvg cls category word = (weight * assumed_prob + totals * basic_prob) / (weight + totals)
where weight = 1.0
assumed_prob = 0.5
basic_prob = wordProb cls category word
totals = fromIntegral (sum $ map (\c -> findWCount cls c word) $ M.keys $ ccount cls) :: Double
| derek-schaefer/haskell-classifier | src/Data/Classifier.hs | bsd-3-clause | 2,857 | 0 | 15 | 646 | 1,060 | 558 | 502 | 60 | 1 |
-- |
-- Module: Utils
-- Description: Utility bounded-list functions (e.g., folds, scans, etc.)
-- Copyright: (c) 2011 National Institute of Aerospace / Galois, Inc.
--
-- Utility bounded-list functions (e.g., folds, scans, etc.)
module Copilot.Library.Utils
( -- * Functions similar to the Prelude functions on lists
take, tails, cycle,
-- ** Folds
nfoldl, nfoldl1, nfoldr, nfoldr1,
-- ** Scans
nscanl, nscanr, nscanl1, nscanr1,
-- ** Indexing
case', (!!))
where
import Copilot.Language
import qualified Prelude as P
tails :: ( Typed a )
=> Stream a -> [ Stream a ]
tails s = [ drop x s | x <- [ 0 .. ] ]
take :: ( Integral a, Typed b )
=> a -> Stream b -> [ Stream b ]
take n s = P.take ( fromIntegral n ) $ tails s
nfoldl :: ( Typed a, Typed b )
=> Int -> ( Stream a -> Stream b -> Stream a )
-> Stream a -> Stream b -> Stream a
nfoldl n f e s = foldl f e $ take n s
nfoldl1 :: ( Typed a )
=> Int -> ( Stream a -> Stream a -> Stream a )
-> Stream a -> Stream a
nfoldl1 n f s = foldl1 f $ take n s
nfoldr :: ( Typed a, Typed b )
=> Int -> ( Stream a -> Stream b -> Stream b )
-> Stream b -> Stream a -> Stream b
nfoldr n f e s = foldr f e $ take n s
nfoldr1 :: ( Typed a )
=> Int -> ( Stream a -> Stream a -> Stream a )
-> Stream a -> Stream a
nfoldr1 n f s = foldr1 f $ take n s
nscanl :: ( Typed a, Typed b )
=> Int -> ( Stream a -> Stream b -> Stream a )
-> Stream a -> Stream b -> [ Stream a ]
nscanl n f e s = scanl f e $ take n s
nscanr :: ( Typed a )
=> Int -> ( Stream a -> Stream b -> Stream b )
-> Stream b -> Stream a -> [ Stream b ]
nscanr n f e s = scanr f e $ take n s
nscanl1 :: ( Typed a )
=> Int -> ( Stream a -> Stream a -> Stream a )
-> Stream a -> [ Stream a ]
nscanl1 n f s = scanl1 f $ take n s
nscanr1 :: ( Typed a )
=> Int -> ( Stream a -> Stream a -> Stream a )
-> Stream a -> [ Stream a ]
nscanr1 n f s = scanr1 f $ take n s
-- | Case-like function: The index of the first predicate that is true
-- in the predicate list selects the stream result. If no predicate
-- is true, the last element is chosen (default element)
case' :: ( Typed a )
=> [ Stream Bool ] -> [ Stream a ] -> Stream a
case' predicates alternatives =
let case'' [] ( default' : _ ) = default'
case'' ( p : ps ) ( a : as ) = mux p a ( case'' ps as )
case'' _ _ =
badUsage $ "in case' in Utils library: "
P.++ "length of alternatives list is not "
P.++ "greater by one than the length of predicates list"
in case'' predicates alternatives
-- | Index. WARNING: Very expensive! Consider using this only for very short
-- lists.
(!!) :: (Typed a, Eq b, Num b, Typed b) => [Stream a] -> Stream b -> Stream a
ls !! n = let indices = map
( constant . fromIntegral )
[ 0 .. P.length ls - 1 ]
select [] _ = last ls
select
( i : is )
( x : xs ) = mux ( i == n ) x ( select is xs )
-- should not happen
select _ [] = badUsage ("in (!!) defined in Utils.hs " P.++
"in copilot-libraries")
in if null ls then
badUsage ("in (!!) defined in Utils.hs " P.++
"indexing the empty list with !! is not defined")
else
select indices ls
cycle :: ( Typed a ) => [ a ] -> Stream a
cycle ls = cycle'
where cycle' = ls ++ cycle'
| fredyr/copilot-libraries | src/Copilot/Library/Utils.hs | bsd-3-clause | 3,800 | 0 | 13 | 1,433 | 1,280 | 647 | 633 | 73 | 4 |
module Zero.Account.Profile.Handlers
(
profile
) where
------------------------------------------------------------------------------
import Servant (Handler(..))
import Zero.View.Handlers (makeProtectedView, makeView)
import Zero.View (View(..))
import Zero.Account.Profile (Profile(..))
import Zero.SessionToken.State
------------------------------------------------------------------------------
profile :: SessionTokenState -> Handler (View Profile)
profile sessionToken =
makeView Profile
| et4te/zero | server/src/Zero/Account/Profile/Handlers.hs | bsd-3-clause | 558 | 0 | 8 | 99 | 105 | 65 | 40 | 11 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] -
[@ISO639-2@] ast
[@ISO639-3@] ast
[@Native name@] asturianu
[@English name@] Asturian
-}
module Text.Numeral.Language.AST.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Prelude ( Num )
import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
http://www.languagesandnumbers.com/how-to-count-in-asturian/en/ast/
-}
cardinals ∷ (Num i) ⇒ TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (0, "ceru")
, (1, "un")
, (2, "dos")
, (3, "tres")
, (4, "cuatro")
, (5, "cinco")
, (6, "seis")
, (7, "siete")
, (8, "ocho")
, (9, "nueve")
, (10, "diez")
, (11, "once")
, (12, "doce")
, (13, "trece")
, (14, "catorce")
, (15, "quince")
, (16, "deciséis")
, (17, "decisiete")
, (18, "deciocho")
, (19, "decinueve")
, (20, "venti")
, (21, "ventiún")
, (22, "ventidós")
, (23, "ventitrés")
, (24, "venticuatro")
, (25, "venticinco")
, (26, "ventiséis")
, (27, "ventisiete")
, (28, "ventiocho")
, (29, "ventinueve")
, (30, "trenta")
, (31, "trenta y un")
, (32, "trenta y dos")
, (33, "trenta y tres")
, (34, "trenta y cuatro")
, (35, "trenta y cinco")
, (36, "trenta y seis")
, (37, "trenta y siete")
, (38, "trenta y ocho")
, (39, "trenta y nueve")
, (40, "cuaranta")
, (41, "cuaranta y un")
, (42, "cuaranta y dos")
, (43, "cuaranta y tres")
, (44, "cuaranta y cuatro")
, (45, "cuaranta y cinco")
, (46, "cuaranta y seis")
, (47, "cuaranta y siete")
, (48, "cuaranta y ocho")
, (49, "cuaranta y nueve")
, (50, "cincuenta")
, (51, "cincuenta y un")
, (52, "cincuenta y dos")
, (53, "cincuenta y tres")
, (54, "cincuenta y cuatro")
, (55, "cincuenta y cinco")
, (56, "cincuenta y seis")
, (57, "cincuenta y siete")
, (58, "cincuenta y ocho")
, (59, "cincuenta y nueve")
, (60, "sesenta")
, (61, "sesenta y un")
, (62, "sesenta y dos")
, (63, "sesenta y tres")
, (64, "sesenta y cuatro")
, (65, "sesenta y cinco")
, (66, "sesenta y seis")
, (67, "sesenta y siete")
, (68, "sesenta y ocho")
, (69, "sesenta y nueve")
, (70, "setanta")
, (71, "setanta y un")
, (72, "setanta y dos")
, (73, "setanta y tres")
, (74, "setanta y cuatro")
, (75, "setanta y cinco")
, (76, "setanta y seis")
, (77, "setanta y siete")
, (78, "setanta y ocho")
, (79, "setanta y nueve")
, (80, "ochenta")
, (81, "ochenta y un")
, (82, "ochenta y dos")
, (83, "ochenta y tres")
, (84, "ochenta y cuatro")
, (85, "ochenta y cinco")
, (86, "ochenta y seis")
, (87, "ochenta y siete")
, (88, "ochenta y ocho")
, (89, "ochenta y nueve")
, (90, "noventa")
, (91, "noventa y un")
, (92, "noventa y dos")
, (93, "noventa y tres")
, (94, "noventa y cuatro")
, (95, "noventa y cinco")
, (96, "noventa y seis")
, (97, "noventa y siete")
, (98, "noventa y ocho")
, (99, "noventa y nueve")
, (100, "cien")
, (101, "cien un")
, (102, "cien dos")
, (103, "cien tres")
, (104, "cien cuatro")
, (105, "cien cinco")
, (106, "cien seis")
, (107, "cien siete")
, (108, "cien ocho")
, (109, "cien nueve")
, (110, "cien diez")
, (123, "cien ventitrés")
, (200, "doscientos")
, (300, "trescientos")
, (321, "trescientos ventiún")
, (400, "cuatrocientos")
, (500, "quinientos")
, (600, "seiscientos")
, (700, "setecientos")
, (800, "ochocientos")
, (900, "novecientos")
, (909, "novecientos nueve")
, (990, "novecientos noventa")
, (999, "novecientos noventa y nueve")
, (1000, "mil")
, (1001, "mil un")
, (1008, "mil ocho")
, (1234, "mil doscientos trenta y cuatro")
, (2000, "dos mil")
, (3000, "tres mil")
, (4000, "cuatro mil")
, (4321, "cuatro mil trescientos ventiún")
, (5000, "cinco mil")
, (6000, "seis mil")
, (7000, "siete mil")
, (8000, "ocho mil")
, (9000, "nueve mil")
, (10000, "diez mil")
, (12345, "doce mil trescientos cuaranta y cinco")
, (20000, "venti mil")
, (30000, "trenta mil")
, (40000, "cuaranta mil")
, (50000, "cincuenta mil")
, (54321, "cincuenta y cuatro mil trescientos ventiún")
, (60000, "sesenta mil")
, (70000, "setanta mil")
, (80000, "ochenta mil")
, (90000, "noventa mil")
, (100000, "cien mil")
, (123456, "cien ventitrés mil cuatrocientos cincuenta y seis")
, (200000, "doscientos mil")
, (300000, "trescientos mil")
, (400000, "cuatrocientos mil")
, (500000, "quinientos mil")
, (600000, "seiscientos mil")
, (654321, "seiscientos cincuenta y cuatro mil trescientos ventiún")
, (700000, "setecientos mil")
, (800000, "ochocientos mil")
, (900000, "novecientos mil")
, (1000000, "un millón")
]
)
]
| telser/numerals | src-test/Text/Numeral/Language/AST/TestData.hs | bsd-3-clause | 5,885 | 0 | 8 | 1,789 | 1,534 | 1,025 | 509 | 172 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE NoNamedWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Foldable.Singletons
-- Copyright : (C) 2018 Ryan Scott
-- License : BSD-style (see LICENSE)
-- Maintainer : Ryan Scott
-- Stability : experimental
-- Portability : non-portable
--
-- Defines the promoted and singled versions of the 'Foldable' type class.
--
----------------------------------------------------------------------------
module Data.Foldable.Singletons (
PFoldable(..), SFoldable(..),
FoldrM, sFoldrM,
FoldlM, sFoldlM,
Traverse_, sTraverse_,
For_, sFor_,
SequenceA_, sSequenceA_,
Asum, sAsum,
MapM_, sMapM_,
ForM_, sForM_,
Sequence_, sSequence_,
Msum, sMsum,
Concat, sConcat,
ConcatMap, sConcatMap,
And, sAnd,
Or, sOr,
Any, sAny,
All, sAll,
MaximumBy, sMaximumBy,
MinimumBy, sMinimumBy,
NotElem, sNotElem,
Find, sFind,
-- * Defunctionalization symbols
FoldSym0, FoldSym1,
FoldMapSym0, FoldMapSym1, FoldMapSym2,
FoldrSym0, FoldrSym1, FoldrSym2, FoldrSym3,
Foldr'Sym0, Foldr'Sym1, Foldr'Sym2, Foldr'Sym3,
FoldlSym0, FoldlSym1, FoldlSym2, FoldlSym3,
Foldl'Sym0, Foldl'Sym1, Foldl'Sym2, Foldl'Sym3,
Foldr1Sym0, Foldr1Sym1, Foldr1Sym2,
Foldl1Sym0, Foldl1Sym1, Foldl1Sym2,
ToListSym0, ToListSym1,
NullSym0, NullSym1,
LengthSym0, LengthSym1,
ElemSym0, ElemSym1, ElemSym2,
MaximumSym0, MaximumSym1,
MinimumSym0, MinimumSym1,
SumSym0, SumSym1,
ProductSym0, ProductSym1,
FoldrMSym0, FoldrMSym1, FoldrMSym2, FoldrMSym3,
FoldlMSym0, FoldlMSym1, FoldlMSym2, FoldlMSym3,
Traverse_Sym0, Traverse_Sym1, Traverse_Sym2,
For_Sym0, For_Sym1, For_Sym2,
SequenceA_Sym0, SequenceA_Sym1,
AsumSym0, AsumSym1,
MapM_Sym0, MapM_Sym1, MapM_Sym2,
ForM_Sym0, ForM_Sym1, ForM_Sym2,
Sequence_Sym0, Sequence_Sym1,
MsumSym0, MsumSym1,
ConcatSym0, ConcatSym1,
ConcatMapSym0, ConcatMapSym1, ConcatMapSym2,
AndSym0, AndSym1,
OrSym0, OrSym1,
AnySym0, AnySym1, AnySym2,
AllSym0, AllSym1, AllSym2,
MaximumBySym0, MaximumBySym1, MaximumBySym2,
MinimumBySym0, MinimumBySym1, MinimumBySym2,
NotElemSym0, NotElemSym1, NotElemSym2,
FindSym0, FindSym1, FindSym2
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Singletons.Internal
import Data.Bool.Singletons
import Data.Either.Singletons
import Data.Eq.Singletons
import Data.Kind
import Data.List.NonEmpty (NonEmpty(..))
import Data.List.Singletons.Internal.Disambiguation
import Data.Maybe.Singletons
import Data.Monoid hiding (All(..), Any(..), Endo(..), Product(..), Sum(..))
import Data.Monoid.Singletons
hiding ( AllSym0, AllSym1
, AnySym0, AnySym1
, ProductSym0, ProductSym1
, SumSym0, SumSym1 )
import qualified Data.Monoid as Monoid (Product(..), Sum(..))
import Data.Ord.Singletons
hiding ( Max, MaxSym0, MaxSym1, MaxSym2, sMax
, Min, MinSym0, MinSym1, MinSym2, sMin )
import Data.Semigroup.Singletons.Internal
hiding ( AllSym0(..), AllSym1, SAll
, AnySym0(..), AnySym1, SAny
, FirstSym0, FirstSym1, SFirst
, GetFirstSym0, sGetFirst
, LastSym0, LastSym1, SLast
, ProductSym0(..), ProductSym1, SProduct
, SumSym0(..), SumSym1, SSum )
import Data.Semigroup.Singletons.Internal.Disambiguation
import Data.Singletons
import Data.Singletons.Base.Instances
hiding (Foldl, FoldlSym0(..), FoldlSym1(..), FoldlSym2(..), FoldlSym3, sFoldl)
import Data.Singletons.TH
import GHC.Base.Singletons
hiding (Foldr, FoldrSym0, FoldrSym1, FoldrSym2, FoldrSym3, sFoldr)
import GHC.Num.Singletons
import GHC.TypeLits.Singletons.Internal
type Endo :: Type -> Type
newtype Endo a = Endo (a ~> a)
type SEndo :: Endo a -> Type
data SEndo e where
SEndo :: Sing x -> SEndo ('Endo x)
type instance Sing = SEndo
type EndoSym0 :: (a ~> a) ~> Endo a
data EndoSym0 tf
type instance Apply EndoSym0 x = 'Endo x
$(singletonsOnly [d|
appEndo :: Endo a -> (a -> a)
appEndo (Endo x) = x
instance Semigroup (Endo a) where
Endo x <> Endo y = Endo (x . y)
instance Monoid (Endo a) where
mempty = Endo id
|])
$(singletons [d|
newtype MaxInternal a = MaxInternal { getMaxInternal :: Maybe a }
newtype MinInternal a = MinInternal { getMinInternal :: Maybe a }
|])
$(singletonsOnly [d|
instance Ord a => Semigroup (MaxInternal a) where
m <> MaxInternal Nothing = m
MaxInternal Nothing <> n = n
(MaxInternal m@(Just x)) <> (MaxInternal n@(Just y))
= if x >= y then MaxInternal m else MaxInternal n
instance Ord a => Monoid (MaxInternal a) where
mempty = MaxInternal Nothing
instance Ord a => Semigroup (MinInternal a) where
m <> MinInternal Nothing = m
MinInternal Nothing <> n = n
(MinInternal m@(Just x)) <> (MinInternal n@(Just y))
= if x <= y then MinInternal m else MinInternal n
instance Ord a => Monoid (MinInternal a) where
mempty = MinInternal Nothing
|])
$(singletonsOnly [d|
-- -| Data structures that can be folded.
--
-- For example, given a data type
--
-- > data Tree a = Empty | Leaf a | Node (Tree a) a (Tree a)
--
-- a suitable instance would be
--
-- > instance Foldable Tree where
-- > foldMap f Empty = mempty
-- > foldMap f (Leaf x) = f x
-- > foldMap f (Node l k r) = foldMap f l `mappend` f k `mappend` foldMap f r
--
-- This is suitable even for abstract types, as the monoid is assumed
-- to satisfy the monoid laws. Alternatively, one could define @foldr@:
--
-- > instance Foldable Tree where
-- > foldr f z Empty = z
-- > foldr f z (Leaf x) = f x z
-- > foldr f z (Node l k r) = foldr f (f k (foldr f z r)) l
--
-- @Foldable@ instances are expected to satisfy the following laws:
--
-- > foldr f z t = appEndo (foldMap (Endo . f) t ) z
--
-- > foldl f z t = appEndo (getDual (foldMap (Dual . Endo . flip f) t)) z
--
-- > fold = foldMap id
--
-- > length = getSum . foldMap (Sum . const 1)
--
-- @sum@, @product@, @maximum@, and @minimum@ should all be essentially
-- equivalent to @foldMap@ forms, such as
--
-- > sum = getSum . foldMap Sum
--
-- but may be less defined.
--
-- If the type is also a 'Functor' instance, it should satisfy
--
-- > foldMap f = fold . fmap f
--
-- which implies that
--
-- > foldMap f . fmap g = foldMap (f . g)
class Foldable t where
-- {-# MINIMAL foldMap | foldr #-}
-- -| Combine the elements of a structure using a monoid.
fold :: Monoid m => t m -> m
fold = foldMap id
-- -| Map each element of the structure to a monoid,
-- and combine the results.
foldMap :: Monoid m => (a -> m) -> t a -> m
foldMap f = foldr (mappend . f) mempty
-- -| Right-associative fold of a structure.
--
-- In the case of lists, 'foldr', when applied to a binary operator, a
-- starting value (typically the right-identity of the operator), and a
-- list, reduces the list using the binary operator, from right to left:
--
-- > foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
--
-- Note that, since the head of the resulting expression is produced by
-- an application of the operator to the first element of the list,
-- 'foldr' can produce a terminating expression from an infinite list.
--
-- For a general 'Foldable' structure this should be semantically identical
-- to,
--
-- @foldr f z = 'List.foldr' f z . 'toList'@
--
foldr :: (a -> b -> b) -> b -> t a -> b
foldr f z t = appEndo (foldMap (Endo . f) t) z
-- -| Right-associative fold of a structure, but with strict application of
-- the operator.
--
foldr' :: (a -> b -> b) -> b -> t a -> b
foldr' f z0 xs = foldl f' id xs z0
where f' k x z = k $! f x z
-- -| Left-associative fold of a structure.
--
-- In the case of lists, 'foldl', when applied to a binary
-- operator, a starting value (typically the left-identity of the operator),
-- and a list, reduces the list using the binary operator, from left to
-- right:
--
-- > foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
--
-- Note that to produce the outermost application of the operator the
-- entire input list must be traversed. This means that 'foldl'' will
-- diverge if given an infinite list.
--
-- Also note that if you want an efficient left-fold, you probably want to
-- use 'foldl'' instead of 'foldl'. The reason for this is that latter does
-- not force the "inner" results (e.g. @z `f` x1@ in the above example)
-- before applying them to the operator (e.g. to @(`f` x2)@). This results
-- in a thunk chain @O(n)@ elements long, which then must be evaluated from
-- the outside-in.
--
-- For a general 'Foldable' structure this should be semantically identical
-- to,
--
-- @foldl f z = 'List.foldl' f z . 'toList'@
--
foldl :: (b -> a -> b) -> b -> t a -> b
foldl f z t = appEndo (getDual (foldMap (Dual . Endo . flip f) t)) z
-- There's no point mucking around with coercions here,
-- because flip forces us to build a new function anyway.
-- -| Left-associative fold of a structure but with strict application of
-- the operator.
--
-- This ensures that each step of the fold is forced to weak head normal
-- form before being applied, avoiding the collection of thunks that would
-- otherwise occur. This is often what you want to strictly reduce a finite
-- list to a single, monolithic result (e.g. 'length').
--
-- For a general 'Foldable' structure this should be semantically identical
-- to,
--
-- @foldl f z = 'List.foldl'' f z . 'toList'@
--
foldl' :: (b -> a -> b) -> b -> t a -> b
foldl' f z0 xs = foldr f' id xs z0
where f' x k z = k $! f z x
-- -| A variant of 'foldr' that has no base case,
-- and thus may only be applied to non-empty structures.
--
-- @'foldr1' f = 'List.foldr1' f . 'toList'@
foldr1 :: (a -> a -> a) -> t a -> a
foldr1 f xs = fromMaybe (errorWithoutStackTrace "foldr1: empty structure")
(foldr mf Nothing xs)
where
mf x m = Just (case m of
Nothing -> x
Just y -> f x y)
-- -| A variant of 'foldl' that has no base case,
-- and thus may only be applied to non-empty structures.
--
-- @'foldl1' f = 'List.foldl1' f . 'toList'@
foldl1 :: (a -> a -> a) -> t a -> a
foldl1 f xs = fromMaybe (errorWithoutStackTrace "foldl1: empty structure")
(foldl mf Nothing xs)
where
mf m y = Just (case m of
Nothing -> y
Just x -> f x y)
-- -| List of elements of a structure, from left to right.
toList :: t a -> [a]
toList = foldr (:) []
-- -| Test whether the structure is empty. The default implementation is
-- optimized for structures that are similar to cons-lists, because there
-- is no general way to do better.
null :: t a -> Bool
null = foldr (\_ _ -> False) True
-- -| Returns the size/length of a finite structure as an 'Int'. The
-- default implementation is optimized for structures that are similar to
-- cons-lists, because there is no general way to do better.
length :: t a -> Natural
length = foldl' (\c _ -> c+1) 0
-- -| Does the element occur in the structure?
elem :: Eq a => a -> t a -> Bool
elem = any . (==)
-- -| The largest element of a non-empty structure.
maximum :: forall a . Ord a => t a -> a
maximum = fromMaybe (errorWithoutStackTrace "maximum: empty structure") .
getMaxInternal . foldMap (MaxInternal . mkJust)
where
mkJust :: a -> Maybe a
mkJust = Just
-- -| The least element of a non-empty structure.
minimum :: forall a . Ord a => t a -> a
minimum = fromMaybe (errorWithoutStackTrace "minimum: empty structure") .
getMinInternal . foldMap (MinInternal . mkJust)
where
mkJust :: a -> Maybe a
mkJust = Just
-- -| The 'sum' function computes the sum of the numbers of a structure.
sum :: Num a => t a -> a
sum = getSum . foldMap sum_
-- -| The 'product' function computes the product of the numbers of a
-- structure.
product :: Num a => t a -> a
product = getProduct . foldMap product_
-- instances for Prelude types
instance Foldable Maybe where
foldMap = maybe_ mempty
foldr _ z Nothing = z
foldr f z (Just x) = f x z
foldl _ z Nothing = z
foldl f z (Just x) = f z x
instance Foldable [] where
elem = listelem
foldl = listfoldl
foldl' = listfoldl'
foldl1 = listfoldl1
foldr = listfoldr
foldr1 = listfoldr1
length = listlength
maximum = listmaximum
minimum = listminimum
null = listnull
product = listproduct
sum = listsum
toList = id
instance Foldable NonEmpty where
foldr f z (a :| as) = f a (listfoldr f z as)
foldl f z (a :| as) = listfoldl f (f z a) as
foldl1 f (a :| as) = listfoldl f a as
-- GHC isn't clever enough to transform the default definition
-- into anything like this, so we'd end up shuffling a bunch of
-- Maybes around.
foldr1 f (p :| ps) = foldr go id ps p
where
go x r prev = f prev (r x)
-- We used to say
--
-- length (_ :| as) = 1 + length as
--
-- but the default definition is better, counting from 1.
--
-- The default definition also works great for null and foldl'.
-- As usual for cons lists, foldr' is basically hopeless.
foldMap f (a :| as) = f a `mappend` foldMap f as
fold (m :| ms) = m `mappend` fold ms
toList (a :| as) = a : as
instance Foldable (Either a) where
foldMap _ (Left _) = mempty
foldMap f (Right y) = f y
foldr _ z (Left _) = z
foldr f z (Right y) = f y z
length (Left _) = 0
length (Right _) = 1
null = isLeft
instance Foldable Proxy where
foldMap _ _ = mempty
fold _ = mempty
foldr _ z _ = z
foldl _ z _ = z
foldl1 _ _ = errorWithoutStackTrace "foldl1: Proxy"
foldr1 _ _ = errorWithoutStackTrace "foldr1: Proxy"
-- Why do we give length (and null) an instance signature here? If we
-- didn't, singletons-th would generate one for us when singling it:
--
-- instance SFoldable Proxy where
-- sLength :: forall a (x :: Proxy a). Sing x -> Sing (Length x)
-- sLength = ...
--
-- If you squint, you'll notice that that instance signature is actually
-- /too/ general. This is because GHC will infer that `a` should be
-- kind-polymorphic, but Length is only defined when `a` is of kind
-- `Type`! Ugh. To force GHC to come to its senses, we explicitly inform
-- it that `a :: Type` through our own instance signature.
length :: forall (a :: Type). Proxy a -> Natural
length _ = 0
null :: forall (a :: Type). Proxy a -> Bool
null _ = True
elem _ _ = False
sum _ = 0
product _ = 1
instance Foldable Dual where
foldMap f (Dual x) = f x
elem = (. getDual) . (==)
foldl f z (Dual x) = f z x
foldl' f z (Dual x) = f z x
foldl1 _ = getDual
foldr f z (Dual x) = f x z
foldr' = foldr
foldr1 _ = getDual
length _ = 1
maximum = getDual
minimum = getDual
null _ = False
product = getDual
sum = getDual
toList (Dual x) = [x]
instance Foldable Monoid.Sum where
foldMap f (Monoid.Sum x) = f x
elem = (. getSum) . (==)
foldl f z (Monoid.Sum x) = f z x
foldl' f z (Monoid.Sum x) = f z x
foldl1 _ = getSum
foldr f z (Monoid.Sum x) = f x z
foldr' = foldr
foldr1 _ = getSum
length _ = 1
maximum = getSum
minimum = getSum
null _ = False
product = getSum
sum = getSum
toList (Monoid.Sum x) = [x]
instance Foldable Monoid.Product where
foldMap f (Monoid.Product x) = f x
elem = (. getProduct) . (==)
foldl f z (Monoid.Product x) = f z x
foldl' f z (Monoid.Product x) = f z x
foldl1 _ = getProduct
foldr f z (Monoid.Product x) = f x z
foldr' = foldr
foldr1 _ = getProduct
length _ = 1
maximum = getProduct
minimum = getProduct
null _ = False
product = getProduct
sum = getProduct
toList (Monoid.Product x) = [x]
-- -| Monadic fold over the elements of a structure,
-- associating to the right, i.e. from right to left.
foldrM :: (Foldable t, Monad m) => (a -> b -> m b) -> b -> t a -> m b
foldrM f z0 xs = foldl f' return xs z0
where f' k x z = f x z >>= k
-- -| Monadic fold over the elements of a structure,
-- associating to the left, i.e. from left to right.
foldlM :: (Foldable t, Monad m) => (b -> a -> m b) -> b -> t a -> m b
foldlM f z0 xs = foldr f' return xs z0
where f' x k z = f z x >>= k
-- -| Map each element of a structure to an action, evaluate these
-- actions from left to right, and ignore the results. For a version
-- that doesn't ignore the results see 'Data.Traversable.traverse'.
traverse_ :: (Foldable t, Applicative f) => (a -> f b) -> t a -> f ()
traverse_ f = foldr ((*>) . f) (pure ())
-- -| 'for_' is 'traverse_' with its arguments flipped. For a version
-- that doesn't ignore the results see 'Data.Traversable.for'.
--
-- >>> for_ [1..4] print
-- 1
-- 2
-- 3
-- 4
for_ :: (Foldable t, Applicative f) => t a -> (a -> f b) -> f ()
for_ = flip traverse_
-- -| Map each element of a structure to a monadic action, evaluate
-- these actions from left to right, and ignore the results. For a
-- version that doesn't ignore the results see
-- 'Data.Traversable.mapM'.
--
-- As of base 4.8.0.0, 'mapM_' is just 'traverse_', specialized to
-- 'Monad'.
mapM_ :: (Foldable t, Monad m) => (a -> m b) -> t a -> m ()
mapM_ f= foldr ((>>) . f) (return ())
-- -| 'forM_' is 'mapM_' with its arguments flipped. For a version that
-- doesn't ignore the results see 'Data.Traversable.forM'.
--
-- As of base 4.8.0.0, 'forM_' is just 'for_', specialized to 'Monad'.
forM_ :: (Foldable t, Monad m) => t a -> (a -> m b) -> m ()
forM_ = flip mapM_
-- -| Evaluate each action in the structure from left to right, and
-- ignore the results. For a version that doesn't ignore the results
-- see 'Data.Traversable.sequenceA'.
sequenceA_ :: (Foldable t, Applicative f) => t (f a) -> f ()
sequenceA_ = foldr (*>) (pure ())
-- -| Evaluate each monadic action in the structure from left to right,
-- and ignore the results. For a version that doesn't ignore the
-- results see 'Data.Traversable.sequence'.
--
-- As of base 4.8.0.0, 'sequence_' is just 'sequenceA_', specialized
-- to 'Monad'.
sequence_ :: (Foldable t, Monad m) => t (m a) -> m ()
sequence_ = foldr (>>) (return ())
-- -| The sum of a collection of actions, generalizing 'concat'.
--
-- asum [Just "Hello", Nothing, Just "World"]
-- Just "Hello"
asum :: (Foldable t, Alternative f) => t (f a) -> f a
asum = foldr (<|>) empty
-- -| The sum of a collection of actions, generalizing 'concat'.
-- As of base 4.8.0.0, 'msum' is just 'asum', specialized to 'MonadPlus'.
msum :: (Foldable t, MonadPlus m) => t (m a) -> m a
msum = asum
-- -| The concatenation of all the elements of a container of lists.
concat :: Foldable t => t [a] -> [a]
concat xs = foldr (\x y -> foldr (:) y x) [] xs
-- -| Map a function over all the elements of a container and concatenate
-- the resulting lists.
concatMap :: Foldable t => (a -> [b]) -> t a -> [b]
concatMap f xs = foldr (\x b -> foldr (:) b (f x)) [] xs
-- These use foldr rather than foldMap to avoid repeated concatenation.
-- -| 'and' returns the conjunction of a container of Bools. For the
-- result to be 'True', the container must be finite; 'False', however,
-- results from a 'False' value finitely far from the left end.
and :: Foldable t => t Bool -> Bool
and = getAll . foldMap all_
-- -| 'or' returns the disjunction of a container of Bools. For the
-- result to be 'False', the container must be finite; 'True', however,
-- results from a 'True' value finitely far from the left end.
or :: Foldable t => t Bool -> Bool
or = getAny . foldMap any_
-- -| Determines whether any element of the structure satisfies the predicate.
any :: Foldable t => (a -> Bool) -> t a -> Bool
any p = getAny . foldMap (any_ . p)
-- -| Determines whether all elements of the structure satisfy the predicate.
all :: Foldable t => (a -> Bool) -> t a -> Bool
all p = getAll . foldMap (all_ . p)
-- -| The largest element of a non-empty structure with respect to the
-- given comparison function.
-- See Note [maximumBy/minimumBy space usage]
maximumBy :: Foldable t => (a -> a -> Ordering) -> t a -> a
maximumBy cmp = foldl1 max'
where max' x y = case cmp x y of
GT -> x
LT -> y
EQ -> y
-- -| The least element of a non-empty structure with respect to the
-- given comparison function.
-- See Note [maximumBy/minimumBy space usage]
minimumBy :: Foldable t => (a -> a -> Ordering) -> t a -> a
minimumBy cmp = foldl1 min'
where min' x y = case cmp x y of
GT -> y
LT -> x
EQ -> x
-- -| 'notElem' is the negation of 'elem'.
notElem :: (Foldable t, Eq a) => a -> t a -> Bool
notElem x = not . elem x
-- -| The 'find' function takes a predicate and a structure and returns
-- the leftmost element of the structure matching the predicate, or
-- 'Nothing' if there is no such element.
find :: Foldable t => (a -> Bool) -> t a -> Maybe a
find p = getFirst . foldMap (\ x -> First (if p x then Just x else Nothing))
|])
$(singletonsOnly [d|
-- instances for Prelude types (part 2)
deriving instance Foldable ((,) a)
deriving instance Foldable First
deriving instance Foldable Last
|])
| goldfirere/singletons | singletons-base/src/Data/Foldable/Singletons.hs | bsd-3-clause | 23,443 | 0 | 10 | 7,023 | 1,038 | 687 | 351 | -1 | -1 |
module Data.AS3.AST.Grammar.Expressions where
import Control.Monad
import Data.AS3.AST.Def
import Data.AS3.AST.Grammar.Lexicon
import Data.AS3.AST.Prims
import Data.AS3.AST.ThirdParty
import Text.Parsec
-- $11.1 Primary Expressions
primary_expression :: As3Parser Expression
primary_expression =
((try $ string "this") >> return This)
<|> try paren_group
<|> try (liftM TODO_E literal)
<|> try array_literal
<|> try object_literal
<|> try scoped_identifier -- check last because it fails slower
<?> "primary_expression"
paren_group :: As3Parser Expression
paren_group = liftM ParenGroup $ between_parens comma_expression
array_literal :: As3Parser Expression
array_literal = liftM ArrayLiteral $ between_brackets comma_expression
element_list :: As3Parser Expression
element_list = undefined
object_literal :: As3Parser Expression
object_literal = liftM ObjectLiteral $ between_braces kvps where
kvps :: As3Parser Expression
kvps = liftM Comma $ property_assignment `sepBy` comma
property_assignment :: As3Parser Expression
property_assignment = liftM2 KeyValue
(property_name <* ss <* char ':' <* ss)
assignment_expression
property_name :: As3Parser Expression
property_name =
try untyped_id
<|> try (liftM (Lit . L_String) string_literal)
<|> try (liftM (Lit . L_Number) numeric_literal)
-- $11.2 Left-Hand-Side Expressions
{-
MemberExpression:
PrimaryExpression
FunctionExpression
MemberExpression [ Expression ]
MemberExpression . IdentifierName
new MemberExpression Arguments
-}
member_expression :: As3Parser Expression
member_expression = subexpressions >>= loop
where
subexpressions :: As3Parser Expression
subexpressions = try primary_expression {-<|> function_expression-}
loop :: Expression -> As3Parser Expression
loop e =
try (call loop e)
<|> try (array_access loop e)
<|> return e
function_expression :: As3Parser Expression
function_expression = undefined
{-function_expression = liftM3 FunctionExp
(optionMaybe identifier)
comma_expression
(many statement)-}
new_expression :: As3Parser Expression
new_expression =
try (liftM New $ string "new " *> new_expression)
<|> member_expression
<?> "new expression"
{-
CallExpression:
MemberExpression Arguments
CallExpression Arguments
CallExpression [ Expression ]
CallExpression . IdentifierName
-}
call_expression :: As3Parser Expression
call_expression = subexpressions >>= loop
where
subexpressions :: As3Parser Expression
subexpressions = liftM2 CallEMember member_expression arguments
loop :: Expression -> As3Parser Expression
loop e =
try (call_arguments e)
<|> try (call loop e)
<|> try (array_access loop e)
<|> return e
call_arguments :: Expression -> As3Parser Expression
call_arguments l = do
r <- arguments
loop $ CallEArguments l r
arguments :: As3Parser Expression
arguments = liftM Comma $
between_parens $ assignment_expression `sepBy` comma
lhs_expression :: As3Parser Expression
lhs_expression = try call_expression <|> new_expression <?> "lhs_expression"
-- $11.3 Postfix Expressions
postfix_expression :: As3Parser Expression
postfix_expression =
try (liftM2 Postfix (lhs_expression <* ss) unary_expression_post)
<|> lhs_expression
<?> "postfix_expression"
where
unary_expression_post :: As3Parser UnaryOp
unary_expression_post =
symR Increment
<|> symR Decrement
<?> "unary POSTFIX op"
-- $11.4 Unary Operators
unary_expression :: As3Parser Expression
unary_expression =
try (liftM2 Unary unary_expression_pre unary_expression)
<|> postfix_expression
where
unary_expression_pre :: As3Parser UnaryOp
unary_expression_pre =
(symR Delete)
<|> (symR Void)
<|> (symR TypeOf)
<|> try (symR Increment)
<|> try (symR Decrement)
<|> (symR Positive)
<|> (symR Negative)
<|> (symR BitwiseNOT)
<|> (symR LogicalNOT)
<?> "unary PREFIX op"
-- $11.5 Multiplicative Operators
multiplicative_expression :: As3Parser Expression
multiplicative_expression =
chainl1 (tok unary_expression) multiplicative_op
<?> "multiplicative expression"
where
multiplicative_op :: As3Parser (Expression -> Expression -> Expression)
multiplicative_op =
linkL Multiplication
<|> linkL Division
<|> linkL Modulo
<?> "multiplicative operator"
-- $11.6 Additive Operators
additive_expression :: As3Parser Expression
additive_expression =
chainl1 (tok multiplicative_expression) additive_op
<?> "additive expression"
where
additive_op :: As3Parser (Expression -> Expression -> Expression)
additive_op =
linkL Addition
<|> linkL Subtraction
<?> "additive operator"
-- $11.7 Bitwise Shift Operators
shift_expression :: As3Parser Expression
shift_expression =
chainl1 (tok additive_expression) shift_op
<?> "shift expression"
where
shift_op :: As3Parser (Expression -> Expression -> Expression)
shift_op =
linkL URShift -- >>> before >>
<|> linkL RShift
<|> linkL LShift
<?> "bitwise shift operator"
-- $11.8 Relational Operators
relational_expression :: As3Parser Expression
relational_expression =
chainl1 (tok shift_expression) relational_op <?> "relational expression"
where
relational_op :: As3Parser (Expression -> Expression -> Expression)
relational_op = do
true <- usein
if true then relational_op_in else relational_op_noin
relational_op_in :: As3Parser (Expression -> Expression -> Expression)
relational_op_in =
linkL LessThanEq -- <= before <
<|> linkL LessThan
<|> linkL GreaterThanEq -- >= before >
<|> linkL GreaterThan
<|> linkL InstanceOf -- instanceof before in
<|> linkL In
<?> "relational operator"
relational_op_noin :: As3Parser (Expression -> Expression -> Expression)
relational_op_noin =
linkL LessThanEq -- <= before <
<|> linkL LessThan
<|> linkL GreaterThanEq -- >= before >
<|> linkL GreaterThan
<|> linkL InstanceOf -- instanceof before in
<?> "relational operator NO IN"
-- $11.9 Equality Operators
equality_expression :: As3Parser Expression
equality_expression =
chainl1 (tok relational_expression) equality_op <?> "equality expression"
where
equality_op :: As3Parser (Expression -> Expression -> Expression)
equality_op =
linkL StrictEquality -- === before ==
<|> linkL Equality
<|> linkL StrictInEquality -- !== before !=
<|> linkL InEquality
<?> "equality operator"
-- $11.10 Binary Bitwise Operators
bitwiseAND_expression :: As3Parser Expression
bitwiseAND_expression =
chainl1 (tok equality_expression) (try op) -- try op so part of && isn't consumed
<?> "bitwise AND expression"
where
op :: As3Parser (Expression -> Expression -> Expression)
op = linkL BitwiseAND
bitwiseXOR_expression :: As3Parser Expression
bitwiseXOR_expression =
chainl1 (tok bitwiseAND_expression) op <?> "bitwise XOR expression"
where
op :: As3Parser (Expression -> Expression -> Expression)
op = linkL BitwiseXOR
bitwiseOR_expression :: As3Parser Expression
bitwiseOR_expression =
chainl1 (tok bitwiseXOR_expression) (try op) -- try op so part of || isn't consumed
<?> "bitwise OR expression"
where
op :: As3Parser (Expression -> Expression -> Expression)
op = linkL BitwiseOR
-- $11.11 Binary Logical Operators
logicalAND_expression :: As3Parser Expression
logicalAND_expression =
chainl1 (tok bitwiseOR_expression) op <?> "logical AND expression"
where
op :: As3Parser (Expression -> Expression -> Expression)
op = linkL LogicalAND
logicalOR_expression :: As3Parser Expression
logicalOR_expression =
chainl1 (tok logicalAND_expression) op <?> "logical OR expression"
where
op :: As3Parser (Expression -> Expression -> Expression)
op = linkL LogicalOR
-- $11.12 Conditional Operator ( ? : )
conditional_expression :: As3Parser Expression
conditional_expression =
try (liftM3
TernOp
(tok logicalOR_expression <* tok (string "?"))
(tok assignment_expression <* tok (string ":")) -- true
(tok assignment_expression)) -- false
<|> logicalOR_expression
<?> "conditional expression"
-- $11.13 Assignment Operators
assignment_expression :: As3Parser Expression
assignment_expression =
try (liftM3 RBinOp
(tok lhs_expression)
(tok assignment_op)
(with_scope PS_UntypedIds assignment_expression))
<|> conditional_expression
<?> "assignment expression"
where
assignment_op :: As3Parser BinaryOp
assignment_op =
symR Assignment
<|> symR PlusAssignment
<|> symR MinusAssignment
<|> symR MultiplicationAssignment
<|> symR DivisionAssignment
<|> symR ModuloAssignment
<|> symR LShiftAssignment
<|> symR RShiftAssignment
<|> symR URShiftAssignment
<|> symR BitwiseANDAssignment
<|> symR BitwiseORAssignment
<|> symR BitwiseXORAssignment
<?> "assignment operator"
-- $11.14 Comma Operator
-- sepBy1. compare to other uses of Comma
comma_expression :: As3Parser Expression
comma_expression = liftM Comma (assignment_expression `sepBy1` comma)
expression :: As3Parser Expression
expression = comma_expression
expression_no_in :: As3Parser Expression
expression_no_in = noin expression
-- $Chain links
{-linkR :: BinaryOp -> As3Parser (Expression -> Expression -> Expression)
linkR = linkCommon RBinOp-}
linkL :: BinaryOp -> As3Parser (Expression -> Expression -> Expression)
linkL = linkCommon LBinOp
linkCommon :: (BinaryOp -> Expression -> Expression -> Expression)
-> BinaryOp
-> As3Parser (Expression -> Expression -> Expression)
linkCommon f op = try $ tok lop >> notFollowedBy lop >> return (f op)
where
lop = sym op
-- $Helpers
call :: (Expression -> As3Parser Expression) -> Expression -> As3Parser Expression
call loop l = do
r <- char '.' *> var_id
loop $ Call l r
array_access :: (Expression -> As3Parser Expression) -> Expression -> As3Parser Expression
array_access loop l = do
r <- between_brackets expression
loop $ ArrayAccess l r
| phylake/AS3-AST | Data/AS3/AST/Grammar/Expressions.hs | bsd-3-clause | 10,495 | 0 | 19 | 2,280 | 2,248 | 1,136 | 1,112 | 239 | 2 |
{-# LANGUAGE BangPatterns, CPP, MagicHash, Rank2Types, UnboxedTuples #-}
{-# OPTIONS_GHC -fno-full-laziness -funbox-strict-fields #-}
-- | Zero based arrays.
--
-- Note that no bounds checking are performed.
module Data.HashMap.Array
( Array
, MArray
-- * Creation
, new
, new_
, singleton
, singleton'
, pair
-- * Basic interface
, length
, lengthM
, read
, write
, index
, index_
, indexM_
, update
, update'
, updateWith
, unsafeUpdate'
, insert
, insert'
, delete
, delete'
, unsafeFreeze
, unsafeThaw
, run
, run2
, copy
, copyM
-- * Folds
, foldl'
, foldr
, thaw
, map
, map'
, traverse
, filter
, toList
) where
import qualified Data.Traversable as Traversable
import Control.Applicative (Applicative)
import Control.DeepSeq
import Control.Monad.ST hiding (runST)
import GHC.Exts
import GHC.ST (ST(..))
import Prelude hiding (filter, foldr, length, map, read)
import Data.HashMap.Unsafe (runST)
------------------------------------------------------------------------
#if defined(ASSERTS)
-- This fugly hack is brought by GHC's apparent reluctance to deal
-- with MagicHash and UnboxedTuples when inferring types. Eek!
# define CHECK_BOUNDS(_func_,_len_,_k_) \
if (_k_) < 0 || (_k_) >= (_len_) then error ("Data.HashMap.Array." ++ (_func_) ++ ": bounds error, offset " ++ show (_k_) ++ ", length " ++ show (_len_)) else
# define CHECK_OP(_func_,_op_,_lhs_,_rhs_) \
if not ((_lhs_) _op_ (_rhs_)) then error ("Data.HashMap.Array." ++ (_func_) ++ ": Check failed: _lhs_ _op_ _rhs_ (" ++ show (_lhs_) ++ " vs. " ++ show (_rhs_) ++ ")") else
# define CHECK_GT(_func_,_lhs_,_rhs_) CHECK_OP(_func_,>,_lhs_,_rhs_)
# define CHECK_LE(_func_,_lhs_,_rhs_) CHECK_OP(_func_,<=,_lhs_,_rhs_)
#else
# define CHECK_BOUNDS(_func_,_len_,_k_)
# define CHECK_OP(_func_,_op_,_lhs_,_rhs_)
# define CHECK_GT(_func_,_lhs_,_rhs_)
# define CHECK_LE(_func_,_lhs_,_rhs_)
#endif
data Array a = Array {
unArray :: !(Array# a)
#if __GLASGOW_HASKELL__ < 702
, length :: !Int
#endif
}
instance Show a => Show (Array a) where
show = show . toList
#if __GLASGOW_HASKELL__ >= 702
length :: Array a -> Int
length ary = I# (sizeofArray# (unArray ary))
{-# INLINE length #-}
#endif
-- | Smart constructor
array :: Array# a -> Int -> Array a
#if __GLASGOW_HASKELL__ >= 702
array ary _n = Array ary
#else
array = Array
#endif
{-# INLINE array #-}
data MArray s a = MArray {
unMArray :: !(MutableArray# s a)
#if __GLASGOW_HASKELL__ < 702
, lengthM :: !Int
#endif
}
#if __GLASGOW_HASKELL__ >= 702
lengthM :: MArray s a -> Int
lengthM mary = I# (sizeofMutableArray# (unMArray mary))
{-# INLINE lengthM #-}
#endif
-- | Smart constructor
marray :: MutableArray# s a -> Int -> MArray s a
#if __GLASGOW_HASKELL__ >= 702
marray mary _n = MArray mary
#else
marray = MArray
#endif
{-# INLINE marray #-}
------------------------------------------------------------------------
instance NFData a => NFData (Array a) where
rnf = rnfArray
rnfArray :: NFData a => Array a -> ()
rnfArray ary0 = go ary0 n0 0
where
n0 = length ary0
go !ary !n !i
| i >= n = ()
| otherwise = rnf (index ary i) `seq` go ary n (i+1)
{-# INLINE rnfArray #-}
-- | Create a new mutable array of specified size, in the specified
-- state thread, with each element containing the specified initial
-- value.
new :: Int -> a -> ST s (MArray s a)
new n@(I# n#) b =
CHECK_GT("new",n,(0 :: Int))
ST $ \s ->
case newArray# n# b s of
(# s', ary #) -> (# s', marray ary n #)
{-# INLINE new #-}
new_ :: Int -> ST s (MArray s a)
new_ n = new n undefinedElem
singleton :: a -> Array a
singleton x = runST (singleton' x)
{-# INLINE singleton #-}
singleton' :: a -> ST s (Array a)
singleton' x = new 1 x >>= unsafeFreeze
{-# INLINE singleton' #-}
pair :: a -> a -> Array a
pair x y = run $ do
ary <- new 2 x
write ary 1 y
return ary
{-# INLINE pair #-}
read :: MArray s a -> Int -> ST s a
read ary _i@(I# i#) = ST $ \ s ->
CHECK_BOUNDS("read", lengthM ary, _i)
readArray# (unMArray ary) i# s
{-# INLINE read #-}
write :: MArray s a -> Int -> a -> ST s ()
write ary _i@(I# i#) b = ST $ \ s ->
CHECK_BOUNDS("write", lengthM ary, _i)
case writeArray# (unMArray ary) i# b s of
s' -> (# s' , () #)
{-# INLINE write #-}
index :: Array a -> Int -> a
index ary _i@(I# i#) =
CHECK_BOUNDS("index", length ary, _i)
case indexArray# (unArray ary) i# of (# b #) -> b
{-# INLINE index #-}
index_ :: Array a -> Int -> ST s a
index_ ary _i@(I# i#) =
CHECK_BOUNDS("index_", length ary, _i)
case indexArray# (unArray ary) i# of (# b #) -> return b
{-# INLINE index_ #-}
indexM_ :: MArray s a -> Int -> ST s a
indexM_ ary _i@(I# i#) =
CHECK_BOUNDS("index_", lengthM ary, _i)
ST $ \ s# -> readArray# (unMArray ary) i# s#
{-# INLINE indexM_ #-}
unsafeFreeze :: MArray s a -> ST s (Array a)
unsafeFreeze mary
= ST $ \s -> case unsafeFreezeArray# (unMArray mary) s of
(# s', ary #) -> (# s', array ary (lengthM mary) #)
{-# INLINE unsafeFreeze #-}
unsafeThaw :: Array a -> ST s (MArray s a)
unsafeThaw ary
= ST $ \s -> case unsafeThawArray# (unArray ary) s of
(# s', mary #) -> (# s', marray mary (length ary) #)
{-# INLINE unsafeThaw #-}
run :: (forall s . ST s (MArray s e)) -> Array e
run act = runST $ act >>= unsafeFreeze
{-# INLINE run #-}
run2 :: (forall s. ST s (MArray s e, a)) -> (Array e, a)
run2 k = runST (do
(marr,b) <- k
arr <- unsafeFreeze marr
return (arr,b))
-- | Unsafely copy the elements of an array. Array bounds are not checked.
copy :: Array e -> Int -> MArray s e -> Int -> Int -> ST s ()
#if __GLASGOW_HASKELL__ >= 702
copy !src !_sidx@(I# sidx#) !dst !_didx@(I# didx#) _n@(I# n#) =
CHECK_LE("copy", _sidx + _n, length src)
CHECK_LE("copy", _didx + _n, lengthM dst)
ST $ \ s# ->
case copyArray# (unArray src) sidx# (unMArray dst) didx# n# s# of
s2 -> (# s2, () #)
#else
copy !src !sidx !dst !didx n =
CHECK_LE("copy", sidx + n, length src)
CHECK_LE("copy", didx + n, lengthM dst)
copy_loop sidx didx 0
where
copy_loop !i !j !c
| c >= n = return ()
| otherwise = do b <- index_ src i
write dst j b
copy_loop (i+1) (j+1) (c+1)
#endif
-- | Unsafely copy the elements of an array. Array bounds are not checked.
copyM :: MArray s e -> Int -> MArray s e -> Int -> Int -> ST s ()
#if __GLASGOW_HASKELL__ >= 702
copyM !src !_sidx@(I# sidx#) !dst !_didx@(I# didx#) _n@(I# n#) =
CHECK_BOUNDS("copyM: src", lengthM src, _sidx + _n - 1)
CHECK_BOUNDS("copyM: dst", lengthM dst, _didx + _n - 1)
ST $ \ s# ->
case copyMutableArray# (unMArray src) sidx# (unMArray dst) didx# n# s# of
s2 -> (# s2, () #)
#else
copyM !src !sidx !dst !didx n =
CHECK_BOUNDS("copyM: src", lengthM src, sidx + n - 1)
CHECK_BOUNDS("copyM: dst", lengthM dst, didx + n - 1)
copy_loop sidx didx 0
where
copy_loop !i !j !c
| c >= n = return ()
| otherwise = do b <- indexM_ src i
write dst j b
copy_loop (i+1) (j+1) (c+1)
#endif
-- | /O(n)/ Insert an element at the given position in this array,
-- increasing its size by one.
insert :: Array e -> Int -> e -> Array e
insert ary idx b = runST (insert' ary idx b)
{-# INLINE insert #-}
-- | /O(n)/ Insert an element at the given position in this array,
-- increasing its size by one.
insert' :: Array e -> Int -> e -> ST s (Array e)
insert' ary idx b =
CHECK_BOUNDS("insert'", count + 1, idx)
do mary <- new_ (count+1)
copy ary 0 mary 0 idx
write mary idx b
copy ary idx mary (idx+1) (count-idx)
unsafeFreeze mary
where !count = length ary
{-# INLINE insert' #-}
-- | /O(n)/ Update the element at the given position in this array.
update :: Array e -> Int -> e -> Array e
update ary idx b = runST (update' ary idx b)
{-# INLINE update #-}
-- | /O(n)/ Update the element at the given position in this array.
update' :: Array e -> Int -> e -> ST s (Array e)
update' ary idx b =
CHECK_BOUNDS("update'", count, idx)
do mary <- thaw ary 0 count
write mary idx b
unsafeFreeze mary
where !count = length ary
{-# INLINE update' #-}
-- | /O(n)/ Update the element at the given positio in this array, by
-- applying a function to it. Evaluates the element to WHNF before
-- inserting it into the array.
updateWith :: Array e -> Int -> (e -> e) -> Array e
updateWith ary idx f = update ary idx $! f (index ary idx)
{-# INLINE updateWith #-}
-- | /O(1)/ Update the element at the given position in this array,
-- without copying.
unsafeUpdate' :: Array e -> Int -> e -> ST s ()
unsafeUpdate' ary idx b =
CHECK_BOUNDS("unsafeUpdate'", length ary, idx)
do mary <- unsafeThaw ary
write mary idx b
_ <- unsafeFreeze mary
return ()
{-# INLINE unsafeUpdate' #-}
foldl' :: (b -> a -> b) -> b -> Array a -> b
foldl' f = \ z0 ary0 -> go ary0 (length ary0) 0 z0
where
go ary n i !z
| i >= n = z
| otherwise = go ary n (i+1) (f z (index ary i))
{-# INLINE foldl' #-}
foldr :: (a -> b -> b) -> b -> Array a -> b
foldr f = \ z0 ary0 -> go ary0 (length ary0) 0 z0
where
go ary n i z
| i >= n = z
| otherwise = f (index ary i) (go ary n (i+1) z)
{-# INLINE foldr #-}
undefinedElem :: a
undefinedElem = error "Data.HashMap.Array: Undefined element"
{-# NOINLINE undefinedElem #-}
thaw :: Array e -> Int -> Int -> ST s (MArray s e)
#if __GLASGOW_HASKELL__ >= 702
thaw !ary !_o@(I# o#) !n@(I# n#) =
CHECK_LE("thaw", _o + n, length ary)
ST $ \ s -> case thawArray# (unArray ary) o# n# s of
(# s2, mary# #) -> (# s2, marray mary# n #)
#else
thaw !ary !o !n =
CHECK_LE("thaw", o + n, length ary)
do mary <- new_ n
copy ary o mary 0 n
return mary
#endif
{-# INLINE thaw #-}
-- | /O(n)/ Delete an element at the given position in this array,
-- decreasing its size by one.
delete :: Array e -> Int -> Array e
delete ary idx = runST (delete' ary idx)
{-# INLINE delete #-}
-- | /O(n)/ Delete an element at the given position in this array,
-- decreasing its size by one.
delete' :: Array e -> Int -> ST s (Array e)
delete' ary idx = do
mary <- new_ (count-1)
copy ary 0 mary 0 idx
copy ary (idx+1) mary idx (count-(idx+1))
unsafeFreeze mary
where !count = length ary
{-# INLINE delete' #-}
map :: (a -> b) -> Array a -> Array b
map f = \ ary ->
let !n = length ary
in run $ do
mary <- new_ n
go ary mary 0 n
where
go ary mary i n
| i >= n = return mary
| otherwise = do
write mary i $ f (index ary i)
go ary mary (i+1) n
{-# INLINE map #-}
-- | Strict version of 'map'.
map' :: (a -> b) -> Array a -> Array b
map' f = \ ary ->
let !n = length ary
in run $ do
mary <- new_ n
go ary mary 0 n
where
go ary mary i n
| i >= n = return mary
| otherwise = do
write mary i $! f (index ary i)
go ary mary (i+1) n
{-# INLINE map' #-}
fromList :: Int -> [a] -> Array a
fromList n xs0 = run $ do
mary <- new_ n
go xs0 mary 0
where
go [] !mary !_ = return mary
go (x:xs) mary i = do write mary i x
go xs mary (i+1)
toList :: Array a -> [a]
toList = foldr (:) []
traverse :: Applicative f => (a -> f b) -> Array a -> f (Array b)
traverse f = \ ary -> fromList (length ary) `fmap`
Traversable.traverse f (toList ary)
{-# INLINE traverse #-}
filter :: (a -> Bool) -> Array a -> Array a
filter p = \ ary ->
let !n = length ary
in run $ do
mary <- new_ n
go ary mary 0 0 n
where
go ary mary i j n
| i >= n = if i == j
then return mary
else do mary2 <- new_ j
copyM mary 0 mary2 0 j
return mary2
| p el = write mary j el >> go ary mary (i+1) (j+1) n
| otherwise = go ary mary (i+1) j n
where el = index ary i
{-# INLINE filter #-}
| hvr/unordered-containers | Data/HashMap/Array.hs | bsd-3-clause | 12,474 | 0 | 13 | 3,583 | 3,904 | 1,988 | 1,916 | -1 | -1 |
{-# LANGUAGE BangPatterns, PatternGuards #-}
module Ant.Renderer where
import Control.Monad
import Graphics.Rendering.Cairo
import qualified Data.Vector as V
import Ant.Game
import Ant.Scheduler
import Data.Colour.SRGB
import Data.Colour.Names
import Data.Colour.RGBSpace
import Data.Colour.RGBSpace.HSV
import Data.Colour
import Data.Function
import Data.List
import Data.Maybe
import qualified Data.Vector as V
import Debug.Trace
import qualified Data.IntMap as M
import qualified Data.Map as MM
setColour :: Colour Float -> Render ()
setColour c = setSourceRGB (realToFrac r) (realToFrac g) (realToFrac b) where
(RGB r g b) = toSRGB c
{-# INLINE setColour #-}
squareColour :: Square -> Colour Float
squareColour square | not (wasSeen square) = black
| isVisible square = squareColour'
| otherwise = blend 0.5 black squareColour'
where
squareColour' | isWater square = blue
| otherwise = saddlebrown
{-# INLINE squareColour #-}
rectangle' :: Int -> Int -> Int -> Int -> Render ()
rectangle' !x !y !w !h = rectangle (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h)
{-# INLINE rectangle' #-}
renderMap :: (Point -> Colour Float) -> Point -> Point -> Render ()
renderMap f (Point sx sy) (Point ex ey) = do
setAntialias AntialiasNone
forM_ [sx.. ex] $ \x -> do
forM_ [sy.. ey] $ \y -> do
setColour $ f (Point x y)
rectangle (fromIntegral x - 0.5) (fromIntegral y - 0.5) 1 1
fill
setAntialias AntialiasDefault
drawHill :: Player -> Point -> Render ()
drawHill player p = do
setColour black
drawCircle p 0.8 >> fillPreserve
setLineWidth 0.1
setColour (antColour player) >> stroke
drawTextAt p "H"
drawAnt :: Player -> Point -> Render ()
drawAnt player p = do
setColour (antColour player)
drawCircle p 0.5
fill
drawFood :: Point -> Render ()
drawFood p = do
setFontSize 2.0
setColour magenta
drawTextAt p "F"
whenMaybe :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
whenMaybe (Just v) f = f v
whenMaybe (Nothing) _ = return ()
renderContent :: Map -> Point -> Point -> Render ()
renderContent world (Point sx sy) (Point ex ey) = do
setFontSize 1.0
forM_ [sx.. ex] $ \x -> do
forM_ [sy.. ey] $ \y -> do
let p = (Point x y)
renderSquare (world `at` p) p
renderPoints :: [Point] -> Render ()
renderPoints ps = do
setColour magenta
forM_ ps $ \p -> do
drawCircle p 0.5
fill
-- data Task = Unassigned | Goto !RegionIndex | Gather !Point | Guard | Retreat deriving Eq
wrapLine :: Size -> Point -> Point -> Render ()
wrapLine worldSize p p' = drawLine' p d
where d = wrapDiff worldSize p p'
renderTasks :: Size -> Graph -> [AntTask] -> Render ()
renderTasks size graph ants = do
setLineWidth 0.2
forM_ ants $ renderTask
where
renderTask (p, Goto r) = setColour lightblue >> drawAnt p >> setColour black >> wrapLine size p p'
where p' = regionCentre (graph `grIndex` r)
renderTask (p, Gather food) = setColour lightgreen >> drawAnt p >> setColour black >> wrapLine size p food
renderTask (p, Guard enemy) = setColour red >> drawAnt p >> setColour black >> wrapLine size p enemy
renderTask (p, _) = setColour gray >> drawAnt p
drawAnt p = drawCircle p 0.5 >> fill
renderSquare :: Square -> Point -> Render ()
renderSquare sq p = do
whenMaybe (squareHill sq) $ \player ->
drawHill player p
whenMaybe (squareAnt sq) $ \player ->
drawAnt player p
when (hasFood sq) $ drawFood p
worldColour :: Map -> Point -> Colour Float
worldColour world p = squareColour (world `at` p)
{-# INLINE worldColour #-}
regionColourSet :: V.Vector (Colour Float)
regionColourSet = V.fromList [lightsalmon, lightseagreen, cornflowerblue, brown, pink, cadetblue, olive, brown, moccasin, darkkhaki, cornsilk, lightsteelblue, darkgoldenrod, azure]
regionColour :: Int -> Colour Float
regionColour i = blend 0.5 black $ regionColourSet `V.unsafeIndex` (i `mod` V.length regionColourSet)
antColourSet :: V.Vector (Colour Float)
antColourSet = V.fromList [white, lightgreen, orange, darkturquoise, red, blue, lightsalmon, mediumpurple]
antColour :: Int -> Colour Float
antColour i = antColourSet `V.unsafeIndex` (i `mod` V.length antColourSet)
{-makeRGB (fromIntegral (i * 117 `mod` 360))
where
makeRGB hue = uncurryRGB sRGB (hsv hue 1 1)
-}
drawCircle :: Point -> Double -> Render ()
drawCircle (Point x y) r = arc (fromIntegral x) (fromIntegral y) r 0 (pi * 2.0)
{-# INLINE drawCircle #-}
drawLine :: Point -> Point -> Render ()
drawLine (Point x y) (Point x' y') = do
moveTo (fromIntegral x) (fromIntegral y)
lineTo (fromIntegral x') (fromIntegral y')
stroke
drawLine' :: Point -> Size -> Render ()
drawLine' (Point x y) (Size dx dy) = do
moveTo (fromIntegral x) (fromIntegral y)
lineTo (fromIntegral (x + dx)) (fromIntegral (y + dy))
stroke
wrapDiff :: Size -> Point -> Point -> Size
wrapDiff (Size width height) (Point x1 y1) (Point x2 y2) = Size (wrap x1 x2 width) (wrap y1 y2 height)
where
wrap d1 d2 dim = minimumBy (compare `on` abs) [d2 - d1, d2 - d1 + dim, d2 - d1 - dim]
moveTo' :: Point -> Render ()
moveTo' (Point x y) = moveTo (fromIntegral x) (fromIntegral y)
drawTextAt :: Point -> String -> Render ()
drawTextAt (Point x y) str = do
ext <- textExtents str
moveTo (fromIntegral x + 0.4 - textExtentsWidth ext * 0.5) (fromIntegral y + textExtentsHeight ext * 0.5)
showText str
renderGraph :: Size -> Graph -> Render()
renderGraph worldSize graph = do
setLineWidth 0.3
setSourceRGBA 1 1 0 0.4
forM_ (grRegions graph) $ \r -> do
forM_ (grNeighbors graph r) $ \(r', e) -> do
let d = wrapDiff worldSize (regionCentre r) (regionCentre r')
drawLine' (regionCentre r) d
setFontSize 1.0
forM_ (grRegions graph) $ \r -> do
setSourceRGB 0 0 0
drawCircle (regionCentre r) 0.6
fill
setSourceRGB 1 1 0
drawTextAt (regionCentre r) (show (regionId r))
mapColours :: Map -> Colour Float -> Point -> Colour Float
mapColours world c p | isWater square = black
| otherwise = c
where
square = world `at` p
{-# INLINE mapColours #-}
regionColours :: Map -> RegionMap -> Point -> Colour Float
regionColours world regionMap p = mapColours world colour p
where
region = regionAt regionMap (mapSize world) p
colour | region >= 0 = (regionColour region)
| otherwise = red
{-# INLINE regionColours #-}
regionColours' :: (RegionIndex -> Float) -> Map -> RegionMap -> Point -> Colour Float
regionColours' lookupColor world regionMap p = mapColours world colour p
where
region = regionAt regionMap (mapSize world) p
intensity = lookupColor region
colour | region >= 0 = (sRGB 0 intensity 0)
| otherwise = red
{-# INLINE regionColours' #-}
passColours :: Map -> Passibility -> Point -> Colour Float
passColours world pass p = mapColours world colour p
where
cost = pass `squareCost` p
scale = fromIntegral cost / fromIntegral (maxCost pass)
colour = blend scale white darkgreen
{-# INLINE passColours #-}
| Saulzar/Ants | Ant/Renderer.hs | bsd-3-clause | 7,774 | 0 | 20 | 2,222 | 2,724 | 1,346 | 1,378 | 168 | 4 |
module Sortier.Programm.Check where
import Sortier.Common.Util
import Sortier.Common.Config
import Sortier.Programm.Type
import Sortier.Programm.Exec
import Sortier.Programm.Example
import qualified Autolib.Util.Wort ( alle )
import Data.List ( tails)
import Data.Typeable
import Autolib.Reporter
import Autolib.ToDoc
import qualified Challenger as C
import Inter.Types
import Autolib.Size
import Autolib.Util.Sort ( sortBy )
check :: Int -> Program -> Reporter ()
check breit p = do
let verify xs = do
let ( mres, com ) = export $ Sortier.Programm.Exec.ute p xs
case mres of
Nothing -> reject $ vcat [ text "Fehler bei Ausführung" , com ]
Just xs -> when ( not $ is_increasing xs ) $ reject $ vcat
[ text "Fehler im Resultat (nicht geordnet)"
, com
]
mapM_ verify $ testing breit
inform $ text "Das Programm hat alle möglichen Eingaben korrekt geordnet."
return ()
data Sortier_Programm = Sortier_Programm
deriving ( Eq, Ord, Show, Read, Typeable )
instance OrderScore Sortier_Programm where
scoringOrder _ = Increasing
instance C.Verify Sortier_Programm Config where
verify p i = do
assert ( width i > 0)
$ text "die Breite (width) soll eine positive Zahl sein"
assert ( max_size i > 0)
$ text "die Größe (max_size) soll eine positive Zahl sein"
let bound = 1 + truncate ( logBase 2 $ fromIntegral $ product [ 1 .. width i ] )
assert ( max_size i >= bound ) $ vcat
[ text "die Größe (max_size) darf nicht kleiner sein"
, text "als die informationstheoretische Schranke (log2 (width!))"
]
instance C.Partial Sortier_Programm Config Program where
describe p i = vcat
[ text "Finden Sie ein Sortierprogramm für"
<+> toDoc ( width i ) <+> text "Eingaben,"
, text "das für jede Eingabe höchstens" <+> toDoc ( max_size i )
<+> text "Vergleiche ausführt."
]
initial p i = nonsense $ names $ width i
partial p i b = do
let s = size b
inform $ text "Die größtmögliche Anzahl von Vergleichen ist" <+> toDoc s
when ( s > max_size i ) $ reject $ text "Das ist zuviel."
total p i b = do
check ( width i ) b
make :: Make
make = direct Sortier_Programm Sortier.Common.Config.example
| florianpilz/autotool | src/Sortier/Programm/Check.hs | gpl-2.0 | 2,272 | 22 | 24 | 540 | 562 | 306 | 256 | -1 | -1 |
import Distribution.Simple
import Distribution.Simple.Setup ( BuildFlags )
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo )
import Distribution.PackageDescription ( PackageDescription
, HookedBuildInfo
, emptyHookedBuildInfo )
import System.Exit ( die )
import System.Process ( readProcess )
import System.Directory ( doesFileExist )
main = defaultMainWithHooks $ simpleUserHooks {preBuild = buildSassFiles}
buildSassFiles :: Args -> BuildFlags -> IO HookedBuildInfo
buildSassFiles _ _ = do
let sassBuildFile = "makeSASS"
foundSassBuild <- doesFileExist sassBuildFile
if foundSassBuild
then do
result <- readProcess "sh" [sassBuildFile] ""
putStr result
return emptyHookedBuildInfo
else die $ "Could not find SASS build script \"" ++ sassBuildFile ++ "\""
| MortimerMcMire315/sparkive | Setup.hs | gpl-3.0 | 1,020 | 0 | 12 | 345 | 188 | 100 | 88 | 20 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.ApplyPendingMaintenanceAction
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Applies a pending maintenance action to a resource (for example, a DB
-- instance).
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_ApplyPendingMaintenanceAction.html>
module Network.AWS.RDS.ApplyPendingMaintenanceAction
(
-- * Request
ApplyPendingMaintenanceAction
-- ** Request constructor
, applyPendingMaintenanceAction
-- ** Request lenses
, apmaApplyAction
, apmaOptInType
, apmaResourceIdentifier
-- * Response
, ApplyPendingMaintenanceActionResponse
-- ** Response constructor
, applyPendingMaintenanceActionResponse
-- ** Response lenses
, apmarResourcePendingMaintenanceActions
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data ApplyPendingMaintenanceAction = ApplyPendingMaintenanceAction
{ _apmaApplyAction :: Text
, _apmaOptInType :: Text
, _apmaResourceIdentifier :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'ApplyPendingMaintenanceAction' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'apmaApplyAction' @::@ 'Text'
--
-- * 'apmaOptInType' @::@ 'Text'
--
-- * 'apmaResourceIdentifier' @::@ 'Text'
--
applyPendingMaintenanceAction :: Text -- ^ 'apmaResourceIdentifier'
-> Text -- ^ 'apmaApplyAction'
-> Text -- ^ 'apmaOptInType'
-> ApplyPendingMaintenanceAction
applyPendingMaintenanceAction p1 p2 p3 = ApplyPendingMaintenanceAction
{ _apmaResourceIdentifier = p1
, _apmaApplyAction = p2
, _apmaOptInType = p3
}
-- | The pending maintenance action to apply to this resource.
apmaApplyAction :: Lens' ApplyPendingMaintenanceAction Text
apmaApplyAction = lens _apmaApplyAction (\s a -> s { _apmaApplyAction = a })
-- | A value that specifies the type of opt-in request, or undoes an opt-in
-- request. An opt-in request of type 'immediate' cannot be undone.
--
-- Valid values:
--
-- 'immediate' - Apply the maintenance action immediately. 'next-maintenance' -
-- Apply the maintenance action during the next maintenance window for the
-- resource. 'undo-opt-in' - Cancel any existing 'next-maintenance' opt-in requests.
--
apmaOptInType :: Lens' ApplyPendingMaintenanceAction Text
apmaOptInType = lens _apmaOptInType (\s a -> s { _apmaOptInType = a })
-- | The ARN of the resource that the pending maintenance action applies to.
apmaResourceIdentifier :: Lens' ApplyPendingMaintenanceAction Text
apmaResourceIdentifier =
lens _apmaResourceIdentifier (\s a -> s { _apmaResourceIdentifier = a })
newtype ApplyPendingMaintenanceActionResponse = ApplyPendingMaintenanceActionResponse
{ _apmarResourcePendingMaintenanceActions :: Maybe ResourcePendingMaintenanceActions
} deriving (Eq, Read, Show)
-- | 'ApplyPendingMaintenanceActionResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'apmarResourcePendingMaintenanceActions' @::@ 'Maybe' 'ResourcePendingMaintenanceActions'
--
applyPendingMaintenanceActionResponse :: ApplyPendingMaintenanceActionResponse
applyPendingMaintenanceActionResponse = ApplyPendingMaintenanceActionResponse
{ _apmarResourcePendingMaintenanceActions = Nothing
}
apmarResourcePendingMaintenanceActions :: Lens' ApplyPendingMaintenanceActionResponse (Maybe ResourcePendingMaintenanceActions)
apmarResourcePendingMaintenanceActions =
lens _apmarResourcePendingMaintenanceActions
(\s a -> s { _apmarResourcePendingMaintenanceActions = a })
instance ToPath ApplyPendingMaintenanceAction where
toPath = const "/"
instance ToQuery ApplyPendingMaintenanceAction where
toQuery ApplyPendingMaintenanceAction{..} = mconcat
[ "ApplyAction" =? _apmaApplyAction
, "OptInType" =? _apmaOptInType
, "ResourceIdentifier" =? _apmaResourceIdentifier
]
instance ToHeaders ApplyPendingMaintenanceAction
instance AWSRequest ApplyPendingMaintenanceAction where
type Sv ApplyPendingMaintenanceAction = RDS
type Rs ApplyPendingMaintenanceAction = ApplyPendingMaintenanceActionResponse
request = post "ApplyPendingMaintenanceAction"
response = xmlResponse
instance FromXML ApplyPendingMaintenanceActionResponse where
parseXML = withElement "ApplyPendingMaintenanceActionResult" $ \x -> ApplyPendingMaintenanceActionResponse
<$> x .@? "ResourcePendingMaintenanceActions"
| kim/amazonka | amazonka-rds/gen/Network/AWS/RDS/ApplyPendingMaintenanceAction.hs | mpl-2.0 | 5,561 | 0 | 9 | 1,079 | 564 | 346 | 218 | 70 | 1 |
{-# LANGUAGE BangPatterns, CPP, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
--
-- The register allocator
--
-- (c) The University of Glasgow 2004
--
-----------------------------------------------------------------------------
{-
The algorithm is roughly:
1) Compute strongly connected components of the basic block list.
2) Compute liveness (mapping from pseudo register to
point(s) of death?).
3) Walk instructions in each basic block. We keep track of
(a) Free real registers (a bitmap?)
(b) Current assignment of temporaries to machine registers and/or
spill slots (call this the "assignment").
(c) Partial mapping from basic block ids to a virt-to-loc mapping.
When we first encounter a branch to a basic block,
we fill in its entry in this table with the current mapping.
For each instruction:
(a) For each temporary *read* by the instruction:
If the temporary does not have a real register allocation:
- Allocate a real register from the free list. If
the list is empty:
- Find a temporary to spill. Pick one that is
not used in this instruction (ToDo: not
used for a while...)
- generate a spill instruction
- If the temporary was previously spilled,
generate an instruction to read the temp from its spill loc.
(optimisation: if we can see that a real register is going to
be used soon, then don't use it for allocation).
(b) For each real register clobbered by this instruction:
If a temporary resides in it,
If the temporary is live after this instruction,
Move the temporary to another (non-clobbered & free) reg,
or spill it to memory. Mark the temporary as residing
in both memory and a register if it was spilled (it might
need to be read by this instruction).
(ToDo: this is wrong for jump instructions?)
We do this after step (a), because if we start with
movq v1, %rsi
which is an instruction that clobbers %rsi, if v1 currently resides
in %rsi we want to get
movq %rsi, %freereg
movq %rsi, %rsi -- will disappear
instead of
movq %rsi, %freereg
movq %freereg, %rsi
(c) Update the current assignment
(d) If the instruction is a branch:
if the destination block already has a register assignment,
Generate a new block with fixup code and redirect the
jump to the new block.
else,
Update the block id->assignment mapping with the current
assignment.
(e) Delete all register assignments for temps which are read
(only) and die here. Update the free register list.
(f) Mark all registers clobbered by this instruction as not free,
and mark temporaries which have been spilled due to clobbering
as in memory (step (a) marks then as in both mem & reg).
(g) For each temporary *written* by this instruction:
Allocate a real register as for (b), spilling something
else if necessary.
- except when updating the assignment, drop any memory
locations that the temporary was previously in, since
they will be no longer valid after this instruction.
(h) Delete all register assignments for temps which are
written and die here (there should rarely be any). Update
the free register list.
(i) Rewrite the instruction with the new mapping.
(j) For each spilled reg known to be now dead, re-add its stack slot
to the free list.
-}
module RegAlloc.Linear.Main (
regAlloc,
module RegAlloc.Linear.Base,
module RegAlloc.Linear.Stats
) where
#include "HsVersions.h"
import RegAlloc.Linear.State
import RegAlloc.Linear.Base
import RegAlloc.Linear.StackMap
import RegAlloc.Linear.FreeRegs
import RegAlloc.Linear.Stats
import RegAlloc.Linear.JoinToTargets
import qualified RegAlloc.Linear.PPC.FreeRegs as PPC
import qualified RegAlloc.Linear.SPARC.FreeRegs as SPARC
import qualified RegAlloc.Linear.X86.FreeRegs as X86
import qualified RegAlloc.Linear.X86_64.FreeRegs as X86_64
import TargetReg
import RegAlloc.Liveness
import Instruction
import Reg
import BlockId
import Hoopl
import Cmm hiding (RegSet)
import Digraph
import DynFlags
import Unique
import UniqSet
import UniqFM
import UniqSupply
import Outputable
import Platform
import Data.Maybe
import Data.List
import Control.Monad
-- -----------------------------------------------------------------------------
-- Top level of the register allocator
-- Allocate registers
regAlloc
:: (Outputable instr, Instruction instr)
=> DynFlags
-> LiveCmmDecl statics instr
-> UniqSM ( NatCmmDecl statics instr
, Maybe Int -- number of extra stack slots required,
-- beyond maxSpillSlots
, Maybe RegAllocStats)
regAlloc _ (CmmData sec d)
= return
( CmmData sec d
, Nothing
, Nothing )
regAlloc _ (CmmProc (LiveInfo info _ _ _) lbl live [])
= return ( CmmProc info lbl live (ListGraph [])
, Nothing
, Nothing )
regAlloc dflags (CmmProc static lbl live sccs)
| LiveInfo info entry_ids@(first_id:_) (Just block_live) _ <- static
= do
-- do register allocation on each component.
(final_blocks, stats, stack_use)
<- linearRegAlloc dflags entry_ids block_live sccs
-- make sure the block that was first in the input list
-- stays at the front of the output
let ((first':_), rest')
= partition ((== first_id) . blockId) final_blocks
let max_spill_slots = maxSpillSlots dflags
extra_stack
| stack_use > max_spill_slots
= Just (stack_use - max_spill_slots)
| otherwise
= Nothing
return ( CmmProc info lbl live (ListGraph (first' : rest'))
, extra_stack
, Just stats)
-- bogus. to make non-exhaustive match warning go away.
regAlloc _ (CmmProc _ _ _ _)
= panic "RegAllocLinear.regAlloc: no match"
-- -----------------------------------------------------------------------------
-- Linear sweep to allocate registers
-- | Do register allocation on some basic blocks.
-- But be careful to allocate a block in an SCC only if it has
-- an entry in the block map or it is the first block.
--
linearRegAlloc
:: (Outputable instr, Instruction instr)
=> DynFlags
-> [BlockId] -- ^ entry points
-> BlockMap RegSet
-- ^ live regs on entry to each basic block
-> [SCC (LiveBasicBlock instr)]
-- ^ instructions annotated with "deaths"
-> UniqSM ([NatBasicBlock instr], RegAllocStats, Int)
linearRegAlloc dflags entry_ids block_live sccs
= case platformArch platform of
ArchX86 -> go $ (frInitFreeRegs platform :: X86.FreeRegs)
ArchX86_64 -> go $ (frInitFreeRegs platform :: X86_64.FreeRegs)
ArchSPARC -> go $ (frInitFreeRegs platform :: SPARC.FreeRegs)
ArchSPARC64 -> panic "linearRegAlloc ArchSPARC64"
ArchPPC -> go $ (frInitFreeRegs platform :: PPC.FreeRegs)
ArchARM _ _ _ -> panic "linearRegAlloc ArchARM"
ArchARM64 -> panic "linearRegAlloc ArchARM64"
ArchPPC_64 _ -> go $ (frInitFreeRegs platform :: PPC.FreeRegs)
ArchAlpha -> panic "linearRegAlloc ArchAlpha"
ArchMipseb -> panic "linearRegAlloc ArchMipseb"
ArchMipsel -> panic "linearRegAlloc ArchMipsel"
ArchJavaScript -> panic "linearRegAlloc ArchJavaScript"
ArchUnknown -> panic "linearRegAlloc ArchUnknown"
where
go f = linearRegAlloc' dflags f entry_ids block_live sccs
platform = targetPlatform dflags
linearRegAlloc'
:: (FR freeRegs, Outputable instr, Instruction instr)
=> DynFlags
-> freeRegs
-> [BlockId] -- ^ entry points
-> BlockMap RegSet -- ^ live regs on entry to each basic block
-> [SCC (LiveBasicBlock instr)] -- ^ instructions annotated with "deaths"
-> UniqSM ([NatBasicBlock instr], RegAllocStats, Int)
linearRegAlloc' dflags initFreeRegs entry_ids block_live sccs
= do us <- getUniqueSupplyM
let (_, stack, stats, blocks) =
runR dflags mapEmpty initFreeRegs emptyRegMap (emptyStackMap dflags) us
$ linearRA_SCCs entry_ids block_live [] sccs
return (blocks, stats, getStackUse stack)
linearRA_SCCs :: (FR freeRegs, Instruction instr, Outputable instr)
=> [BlockId]
-> BlockMap RegSet
-> [NatBasicBlock instr]
-> [SCC (LiveBasicBlock instr)]
-> RegM freeRegs [NatBasicBlock instr]
linearRA_SCCs _ _ blocksAcc []
= return $ reverse blocksAcc
linearRA_SCCs entry_ids block_live blocksAcc (AcyclicSCC block : sccs)
= do blocks' <- processBlock block_live block
linearRA_SCCs entry_ids block_live
((reverse blocks') ++ blocksAcc)
sccs
linearRA_SCCs entry_ids block_live blocksAcc (CyclicSCC blocks : sccs)
= do
blockss' <- process entry_ids block_live blocks [] (return []) False
linearRA_SCCs entry_ids block_live
(reverse (concat blockss') ++ blocksAcc)
sccs
{- from John Dias's patch 2008/10/16:
The linear-scan allocator sometimes allocates a block
before allocating one of its predecessors, which could lead to
inconsistent allocations. Make it so a block is only allocated
if a predecessor has set the "incoming" assignments for the block, or
if it's the procedure's entry block.
BL 2009/02: Careful. If the assignment for a block doesn't get set for
some reason then this function will loop. We should probably do some
more sanity checking to guard against this eventuality.
-}
process :: (FR freeRegs, Instruction instr, Outputable instr)
=> [BlockId]
-> BlockMap RegSet
-> [GenBasicBlock (LiveInstr instr)]
-> [GenBasicBlock (LiveInstr instr)]
-> [[NatBasicBlock instr]]
-> Bool
-> RegM freeRegs [[NatBasicBlock instr]]
process _ _ [] [] accum _
= return $ reverse accum
process entry_ids block_live [] next_round accum madeProgress
| not madeProgress
{- BUGS: There are so many unreachable blocks in the code the warnings are overwhelming.
pprTrace "RegAlloc.Linear.Main.process: no progress made, bailing out."
( text "Unreachable blocks:"
$$ vcat (map ppr next_round)) -}
= return $ reverse accum
| otherwise
= process entry_ids block_live
next_round [] accum False
process entry_ids block_live (b@(BasicBlock id _) : blocks)
next_round accum madeProgress
= do
block_assig <- getBlockAssigR
if isJust (mapLookup id block_assig)
|| id `elem` entry_ids
then do
b' <- processBlock block_live b
process entry_ids block_live blocks
next_round (b' : accum) True
else process entry_ids block_live blocks
(b : next_round) accum madeProgress
-- | Do register allocation on this basic block
--
processBlock
:: (FR freeRegs, Outputable instr, Instruction instr)
=> BlockMap RegSet -- ^ live regs on entry to each basic block
-> LiveBasicBlock instr -- ^ block to do register allocation on
-> RegM freeRegs [NatBasicBlock instr] -- ^ block with registers allocated
processBlock block_live (BasicBlock id instrs)
= do initBlock id block_live
(instrs', fixups)
<- linearRA block_live [] [] id instrs
return $ BasicBlock id instrs' : fixups
-- | Load the freeregs and current reg assignment into the RegM state
-- for the basic block with this BlockId.
initBlock :: FR freeRegs
=> BlockId -> BlockMap RegSet -> RegM freeRegs ()
initBlock id block_live
= do dflags <- getDynFlags
let platform = targetPlatform dflags
block_assig <- getBlockAssigR
case mapLookup id block_assig of
-- no prior info about this block: we must consider
-- any fixed regs to be allocated, but we can ignore
-- virtual regs (presumably this is part of a loop,
-- and we'll iterate again). The assignment begins
-- empty.
Nothing
-> do -- pprTrace "initFreeRegs" (text $ show initFreeRegs) (return ())
case mapLookup id block_live of
Nothing ->
setFreeRegsR (frInitFreeRegs platform)
Just live ->
setFreeRegsR $ foldr (frAllocateReg platform) (frInitFreeRegs platform) [ r | RegReal r <- nonDetEltsUFM live ]
-- See Note [Unique Determinism and code generation]
setAssigR emptyRegMap
-- load info about register assignments leading into this block.
Just (freeregs, assig)
-> do setFreeRegsR freeregs
setAssigR assig
-- | Do allocation for a sequence of instructions.
linearRA
:: (FR freeRegs, Outputable instr, Instruction instr)
=> BlockMap RegSet -- ^ map of what vregs are live on entry to each block.
-> [instr] -- ^ accumulator for instructions already processed.
-> [NatBasicBlock instr] -- ^ accumulator for blocks of fixup code.
-> BlockId -- ^ id of the current block, for debugging.
-> [LiveInstr instr] -- ^ liveness annotated instructions in this block.
-> RegM freeRegs
( [instr] -- instructions after register allocation
, [NatBasicBlock instr]) -- fresh blocks of fixup code.
linearRA _ accInstr accFixup _ []
= return
( reverse accInstr -- instrs need to be returned in the correct order.
, accFixup) -- it doesn't matter what order the fixup blocks are returned in.
linearRA block_live accInstr accFixups id (instr:instrs)
= do
(accInstr', new_fixups) <- raInsn block_live accInstr id instr
linearRA block_live accInstr' (new_fixups ++ accFixups) id instrs
-- | Do allocation for a single instruction.
raInsn
:: (FR freeRegs, Outputable instr, Instruction instr)
=> BlockMap RegSet -- ^ map of what vregs are love on entry to each block.
-> [instr] -- ^ accumulator for instructions already processed.
-> BlockId -- ^ the id of the current block, for debugging
-> LiveInstr instr -- ^ the instr to have its regs allocated, with liveness info.
-> RegM freeRegs
( [instr] -- new instructions
, [NatBasicBlock instr]) -- extra fixup blocks
raInsn _ new_instrs _ (LiveInstr ii Nothing)
| Just n <- takeDeltaInstr ii
= do setDeltaR n
return (new_instrs, [])
raInsn _ new_instrs _ (LiveInstr ii@(Instr i) Nothing)
| isMetaInstr ii
= return (i : new_instrs, [])
raInsn block_live new_instrs id (LiveInstr (Instr instr) (Just live))
= do
assig <- getAssigR
-- If we have a reg->reg move between virtual registers, where the
-- src register is not live after this instruction, and the dst
-- register does not already have an assignment,
-- and the source register is assigned to a register, not to a spill slot,
-- then we can eliminate the instruction.
-- (we can't eliminate it if the source register is on the stack, because
-- we do not want to use one spill slot for different virtual registers)
case takeRegRegMoveInstr instr of
Just (src,dst) | src `elementOfUniqSet` (liveDieRead live),
isVirtualReg dst,
not (dst `elemUFM` assig),
isRealReg src || isInReg src assig -> do
case src of
(RegReal rr) -> setAssigR (addToUFM assig dst (InReg rr))
-- if src is a fixed reg, then we just map dest to this
-- reg in the assignment. src must be an allocatable reg,
-- otherwise it wouldn't be in r_dying.
_virt -> case lookupUFM assig src of
Nothing -> panic "raInsn"
Just loc ->
setAssigR (addToUFM (delFromUFM assig src) dst loc)
-- we have eliminated this instruction
{-
freeregs <- getFreeRegsR
assig <- getAssigR
pprTrace "raInsn" (text "ELIMINATED: " <> docToSDoc (pprInstr instr)
$$ ppr r_dying <+> ppr w_dying $$ text (show freeregs) $$ ppr assig) $ do
-}
return (new_instrs, [])
_ -> genRaInsn block_live new_instrs id instr
(nonDetEltsUFM $ liveDieRead live)
(nonDetEltsUFM $ liveDieWrite live)
-- See Note [Unique Determinism and code generation]
raInsn _ _ _ instr
= pprPanic "raInsn" (text "no match for:" <> ppr instr)
-- ToDo: what can we do about
--
-- R1 = x
-- jump I64[x] // [R1]
--
-- where x is mapped to the same reg as R1. We want to coalesce x and
-- R1, but the register allocator doesn't know whether x will be
-- assigned to again later, in which case x and R1 should be in
-- different registers. Right now we assume the worst, and the
-- assignment to R1 will clobber x, so we'll spill x into another reg,
-- generating another reg->reg move.
isInReg :: Reg -> RegMap Loc -> Bool
isInReg src assig | Just (InReg _) <- lookupUFM assig src = True
| otherwise = False
genRaInsn :: (FR freeRegs, Instruction instr, Outputable instr)
=> BlockMap RegSet
-> [instr]
-> BlockId
-> instr
-> [Reg]
-> [Reg]
-> RegM freeRegs ([instr], [NatBasicBlock instr])
genRaInsn block_live new_instrs block_id instr r_dying w_dying = do
dflags <- getDynFlags
let platform = targetPlatform dflags
case regUsageOfInstr platform instr of { RU read written ->
do
let real_written = [ rr | (RegReal rr) <- written ]
let virt_written = [ vr | (RegVirtual vr) <- written ]
-- we don't need to do anything with real registers that are
-- only read by this instr. (the list is typically ~2 elements,
-- so using nub isn't a problem).
let virt_read = nub [ vr | (RegVirtual vr) <- read ]
-- debugging
{- freeregs <- getFreeRegsR
assig <- getAssigR
pprDebugAndThen (defaultDynFlags Settings{ sTargetPlatform=platform }) trace "genRaInsn"
(ppr instr
$$ text "r_dying = " <+> ppr r_dying
$$ text "w_dying = " <+> ppr w_dying
$$ text "virt_read = " <+> ppr virt_read
$$ text "virt_written = " <+> ppr virt_written
$$ text "freeregs = " <+> text (show freeregs)
$$ text "assig = " <+> ppr assig)
$ do
-}
-- (a), (b) allocate real regs for all regs read by this instruction.
(r_spills, r_allocd) <-
allocateRegsAndSpill True{-reading-} virt_read [] [] virt_read
-- (c) save any temporaries which will be clobbered by this instruction
clobber_saves <- saveClobberedTemps real_written r_dying
-- (d) Update block map for new destinations
-- NB. do this before removing dead regs from the assignment, because
-- these dead regs might in fact be live in the jump targets (they're
-- only dead in the code that follows in the current basic block).
(fixup_blocks, adjusted_instr)
<- joinToTargets block_live block_id instr
-- (e) Delete all register assignments for temps which are read
-- (only) and die here. Update the free register list.
releaseRegs r_dying
-- (f) Mark regs which are clobbered as unallocatable
clobberRegs real_written
-- (g) Allocate registers for temporaries *written* (only)
(w_spills, w_allocd) <-
allocateRegsAndSpill False{-writing-} virt_written [] [] virt_written
-- (h) Release registers for temps which are written here and not
-- used again.
releaseRegs w_dying
let
-- (i) Patch the instruction
patch_map
= listToUFM
[ (t, RegReal r)
| (t, r) <- zip virt_read r_allocd
++ zip virt_written w_allocd ]
patched_instr
= patchRegsOfInstr adjusted_instr patchLookup
patchLookup x
= case lookupUFM patch_map x of
Nothing -> x
Just y -> y
-- (j) free up stack slots for dead spilled regs
-- TODO (can't be bothered right now)
-- erase reg->reg moves where the source and destination are the same.
-- If the src temp didn't die in this instr but happened to be allocated
-- to the same real reg as the destination, then we can erase the move anyway.
let squashed_instr = case takeRegRegMoveInstr patched_instr of
Just (src, dst)
| src == dst -> []
_ -> [patched_instr]
let code = squashed_instr ++ w_spills ++ reverse r_spills
++ clobber_saves ++ new_instrs
-- pprTrace "patched-code" ((vcat $ map (docToSDoc . pprInstr) code)) $ do
-- pprTrace "pached-fixup" ((ppr fixup_blocks)) $ do
return (code, fixup_blocks)
}
-- -----------------------------------------------------------------------------
-- releaseRegs
releaseRegs :: FR freeRegs => [Reg] -> RegM freeRegs ()
releaseRegs regs = do
dflags <- getDynFlags
let platform = targetPlatform dflags
assig <- getAssigR
free <- getFreeRegsR
let loop assig !free [] = do setAssigR assig; setFreeRegsR free; return ()
loop assig !free (RegReal rr : rs) = loop assig (frReleaseReg platform rr free) rs
loop assig !free (r:rs) =
case lookupUFM assig r of
Just (InBoth real _) -> loop (delFromUFM assig r)
(frReleaseReg platform real free) rs
Just (InReg real) -> loop (delFromUFM assig r)
(frReleaseReg platform real free) rs
_ -> loop (delFromUFM assig r) free rs
loop assig free regs
-- -----------------------------------------------------------------------------
-- Clobber real registers
-- For each temp in a register that is going to be clobbered:
-- - if the temp dies after this instruction, do nothing
-- - otherwise, put it somewhere safe (another reg if possible,
-- otherwise spill and record InBoth in the assignment).
-- - for allocateRegs on the temps *read*,
-- - clobbered regs are allocatable.
--
-- for allocateRegs on the temps *written*,
-- - clobbered regs are not allocatable.
--
saveClobberedTemps
:: (Instruction instr, FR freeRegs)
=> [RealReg] -- real registers clobbered by this instruction
-> [Reg] -- registers which are no longer live after this insn
-> RegM freeRegs [instr] -- return: instructions to spill any temps that will
-- be clobbered.
saveClobberedTemps [] _
= return []
saveClobberedTemps clobbered dying
= do
assig <- getAssigR
let to_spill
= [ (temp,reg)
| (temp, InReg reg) <- nonDetUFMToList assig
-- This is non-deterministic but we do not
-- currently support deterministic code-generation.
-- See Note [Unique Determinism and code generation]
, any (realRegsAlias reg) clobbered
, temp `notElem` map getUnique dying ]
(instrs,assig') <- clobber assig [] to_spill
setAssigR assig'
return instrs
where
clobber assig instrs []
= return (instrs, assig)
clobber assig instrs ((temp, reg) : rest)
= do dflags <- getDynFlags
let platform = targetPlatform dflags
freeRegs <- getFreeRegsR
let regclass = targetClassOfRealReg platform reg
freeRegs_thisClass = frGetFreeRegs platform regclass freeRegs
case filter (`notElem` clobbered) freeRegs_thisClass of
-- (1) we have a free reg of the right class that isn't
-- clobbered by this instruction; use it to save the
-- clobbered value.
(my_reg : _) -> do
setFreeRegsR (frAllocateReg platform my_reg freeRegs)
let new_assign = addToUFM assig temp (InReg my_reg)
let instr = mkRegRegMoveInstr platform
(RegReal reg) (RegReal my_reg)
clobber new_assign (instr : instrs) rest
-- (2) no free registers: spill the value
[] -> do
(spill, slot) <- spillR (RegReal reg) temp
-- record why this reg was spilled for profiling
recordSpill (SpillClobber temp)
let new_assign = addToUFM assig temp (InBoth reg slot)
clobber new_assign (spill : instrs) rest
-- | Mark all these real regs as allocated,
-- and kick out their vreg assignments.
--
clobberRegs :: FR freeRegs => [RealReg] -> RegM freeRegs ()
clobberRegs []
= return ()
clobberRegs clobbered
= do dflags <- getDynFlags
let platform = targetPlatform dflags
freeregs <- getFreeRegsR
setFreeRegsR $! foldr (frAllocateReg platform) freeregs clobbered
assig <- getAssigR
setAssigR $! clobber assig (nonDetUFMToList assig)
-- This is non-deterministic but we do not
-- currently support deterministic code-generation.
-- See Note [Unique Determinism and code generation]
where
-- if the temp was InReg and clobbered, then we will have
-- saved it in saveClobberedTemps above. So the only case
-- we have to worry about here is InBoth. Note that this
-- also catches temps which were loaded up during allocation
-- of read registers, not just those saved in saveClobberedTemps.
clobber assig []
= assig
clobber assig ((temp, InBoth reg slot) : rest)
| any (realRegsAlias reg) clobbered
= clobber (addToUFM assig temp (InMem slot)) rest
clobber assig (_:rest)
= clobber assig rest
-- -----------------------------------------------------------------------------
-- allocateRegsAndSpill
-- Why are we performing a spill?
data SpillLoc = ReadMem StackSlot -- reading from register only in memory
| WriteNew -- writing to a new variable
| WriteMem -- writing to register only in memory
-- Note that ReadNew is not valid, since you don't want to be reading
-- from an uninitialized register. We also don't need the location of
-- the register in memory, since that will be invalidated by the write.
-- Technically, we could coalesce WriteNew and WriteMem into a single
-- entry as well. -- EZY
-- This function does several things:
-- For each temporary referred to by this instruction,
-- we allocate a real register (spilling another temporary if necessary).
-- We load the temporary up from memory if necessary.
-- We also update the register assignment in the process, and
-- the list of free registers and free stack slots.
allocateRegsAndSpill
:: (FR freeRegs, Outputable instr, Instruction instr)
=> Bool -- True <=> reading (load up spilled regs)
-> [VirtualReg] -- don't push these out
-> [instr] -- spill insns
-> [RealReg] -- real registers allocated (accum.)
-> [VirtualReg] -- temps to allocate
-> RegM freeRegs ( [instr] , [RealReg])
allocateRegsAndSpill _ _ spills alloc []
= return (spills, reverse alloc)
allocateRegsAndSpill reading keep spills alloc (r:rs)
= do assig <- getAssigR
let doSpill = allocRegsAndSpill_spill reading keep spills alloc r rs assig
case lookupUFM assig r of
-- case (1a): already in a register
Just (InReg my_reg) ->
allocateRegsAndSpill reading keep spills (my_reg:alloc) rs
-- case (1b): already in a register (and memory)
-- NB1. if we're writing this register, update its assignment to be
-- InReg, because the memory value is no longer valid.
-- NB2. This is why we must process written registers here, even if they
-- are also read by the same instruction.
Just (InBoth my_reg _)
-> do when (not reading) (setAssigR (addToUFM assig r (InReg my_reg)))
allocateRegsAndSpill reading keep spills (my_reg:alloc) rs
-- Not already in a register, so we need to find a free one...
Just (InMem slot) | reading -> doSpill (ReadMem slot)
| otherwise -> doSpill WriteMem
Nothing | reading ->
pprPanic "allocateRegsAndSpill: Cannot read from uninitialized register" (ppr r)
-- NOTE: if the input to the NCG contains some
-- unreachable blocks with junk code, this panic
-- might be triggered. Make sure you only feed
-- sensible code into the NCG. In CmmPipeline we
-- call removeUnreachableBlocks at the end for this
-- reason.
| otherwise -> doSpill WriteNew
-- reading is redundant with reason, but we keep it around because it's
-- convenient and it maintains the recursive structure of the allocator. -- EZY
allocRegsAndSpill_spill :: (FR freeRegs, Instruction instr, Outputable instr)
=> Bool
-> [VirtualReg]
-> [instr]
-> [RealReg]
-> VirtualReg
-> [VirtualReg]
-> UniqFM Loc
-> SpillLoc
-> RegM freeRegs ([instr], [RealReg])
allocRegsAndSpill_spill reading keep spills alloc r rs assig spill_loc
= do dflags <- getDynFlags
let platform = targetPlatform dflags
freeRegs <- getFreeRegsR
let freeRegs_thisClass = frGetFreeRegs platform (classOfVirtualReg r) freeRegs
case freeRegs_thisClass of
-- case (2): we have a free register
(my_reg : _) ->
do spills' <- loadTemp r spill_loc my_reg spills
setAssigR (addToUFM assig r $! newLocation spill_loc my_reg)
setFreeRegsR $ frAllocateReg platform my_reg freeRegs
allocateRegsAndSpill reading keep spills' (my_reg : alloc) rs
-- case (3): we need to push something out to free up a register
[] ->
do let keep' = map getUnique keep
-- the vregs we could kick out that are already in a slot
let candidates_inBoth
= [ (temp, reg, mem)
| (temp, InBoth reg mem) <- nonDetUFMToList assig
-- This is non-deterministic but we do not
-- currently support deterministic code-generation.
-- See Note [Unique Determinism and code generation]
, temp `notElem` keep'
, targetClassOfRealReg platform reg == classOfVirtualReg r ]
-- the vregs we could kick out that are only in a reg
-- this would require writing the reg to a new slot before using it.
let candidates_inReg
= [ (temp, reg)
| (temp, InReg reg) <- nonDetUFMToList assig
-- This is non-deterministic but we do not
-- currently support deterministic code-generation.
-- See Note [Unique Determinism and code generation]
, temp `notElem` keep'
, targetClassOfRealReg platform reg == classOfVirtualReg r ]
let result
-- we have a temporary that is in both register and mem,
-- just free up its register for use.
| (temp, my_reg, slot) : _ <- candidates_inBoth
= do spills' <- loadTemp r spill_loc my_reg spills
let assig1 = addToUFM assig temp (InMem slot)
let assig2 = addToUFM assig1 r $! newLocation spill_loc my_reg
setAssigR assig2
allocateRegsAndSpill reading keep spills' (my_reg:alloc) rs
-- otherwise, we need to spill a temporary that currently
-- resides in a register.
| (temp_to_push_out, (my_reg :: RealReg)) : _
<- candidates_inReg
= do
(spill_insn, slot) <- spillR (RegReal my_reg) temp_to_push_out
let spill_store = (if reading then id else reverse)
[ -- COMMENT (fsLit "spill alloc")
spill_insn ]
-- record that this temp was spilled
recordSpill (SpillAlloc temp_to_push_out)
-- update the register assignment
let assig1 = addToUFM assig temp_to_push_out (InMem slot)
let assig2 = addToUFM assig1 r $! newLocation spill_loc my_reg
setAssigR assig2
-- if need be, load up a spilled temp into the reg we've just freed up.
spills' <- loadTemp r spill_loc my_reg spills
allocateRegsAndSpill reading keep
(spill_store ++ spills')
(my_reg:alloc) rs
-- there wasn't anything to spill, so we're screwed.
| otherwise
= pprPanic ("RegAllocLinear.allocRegsAndSpill: no spill candidates\n")
$ vcat
[ text "allocating vreg: " <> text (show r)
, text "assignment: " <> ppr assig
, text "freeRegs: " <> text (show freeRegs)
, text "initFreeRegs: " <> text (show (frInitFreeRegs platform `asTypeOf` freeRegs)) ]
result
-- | Calculate a new location after a register has been loaded.
newLocation :: SpillLoc -> RealReg -> Loc
-- if the tmp was read from a slot, then now its in a reg as well
newLocation (ReadMem slot) my_reg = InBoth my_reg slot
-- writes will always result in only the register being available
newLocation _ my_reg = InReg my_reg
-- | Load up a spilled temporary if we need to (read from memory).
loadTemp
:: (Instruction instr)
=> VirtualReg -- the temp being loaded
-> SpillLoc -- the current location of this temp
-> RealReg -- the hreg to load the temp into
-> [instr]
-> RegM freeRegs [instr]
loadTemp vreg (ReadMem slot) hreg spills
= do
insn <- loadR (RegReal hreg) slot
recordSpill (SpillLoad $ getUnique vreg)
return $ {- COMMENT (fsLit "spill load") : -} insn : spills
loadTemp _ _ _ spills =
return spills
| olsner/ghc | compiler/nativeGen/RegAlloc/Linear/Main.hs | bsd-3-clause | 37,782 | 108 | 23 | 13,352 | 5,403 | 2,884 | 2,519 | 460 | 13 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Stack.PackageDump
( Line
, eachSection
, eachPair
, DumpPackage (..)
, conduitDumpPackage
, ghcPkgDump
, ghcPkgDescribe
, InstalledCache
, InstalledCacheEntry (..)
, newInstalledCache
, loadInstalledCache
, saveInstalledCache
, addProfiling
, addHaddock
, sinkMatching
, pruneDeps
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Exception.Enclosed (tryIO)
import Control.Monad (liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Trans.Control
import Data.Attoparsec.Args
import Data.Attoparsec.Text as P
import Data.Binary.VersionTagged
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Either (partitionEithers)
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, listToMaybe, fromMaybe)
import qualified Data.Set as Set
import qualified Data.Text.Encoding as T
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Path
import Path.IO (createTree)
import Prelude -- Fix AMP warning
import Stack.GhcPkg
import Stack.Types
import System.Directory (getDirectoryContents, doesFileExist)
import System.Process.Read
-- | Cached information on whether package have profiling libraries and haddocks.
newtype InstalledCache = InstalledCache (IORef InstalledCacheInner)
newtype InstalledCacheInner = InstalledCacheInner (Map GhcPkgId InstalledCacheEntry)
deriving (Binary, NFData, Generic)
instance HasStructuralInfo InstalledCacheInner
instance HasSemanticVersion InstalledCacheInner
-- | Cached information on whether a package has profiling libraries and haddocks.
data InstalledCacheEntry = InstalledCacheEntry
{ installedCacheProfiling :: !Bool
, installedCacheHaddock :: !Bool
, installedCacheIdent :: !PackageIdentifier }
deriving (Eq, Generic)
instance Binary InstalledCacheEntry
instance HasStructuralInfo InstalledCacheEntry
instance NFData InstalledCacheEntry
-- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDump
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink ByteString IO a
-> m a
ghcPkgDump = ghcPkgCmdArgs ["dump"]
-- | Call ghc-pkg describe with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDescribe
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m)
=> PackageName
-> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink ByteString IO a
-> m a
ghcPkgDescribe pkgName = ghcPkgCmdArgs ["describe", "--simple-output", packageNameString pkgName]
-- | Call ghc-pkg and stream to the given @Sink@, for a single database
ghcPkgCmdArgs
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m)
=> [String]
-> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ if empty, use global
-> Sink ByteString IO a
-> m a
ghcPkgCmdArgs cmd menv wc mpkgDbs sink = do
case reverse mpkgDbs of
(pkgDb:_) -> createDatabase menv wc pkgDb -- TODO maybe use some retry logic instead?
_ -> return ()
sinkProcessStdout Nothing menv (ghcPkgExeName wc) args sink
where
args = concat
[ case mpkgDbs of
[] -> ["--global", "--no-user-package-db"]
_ -> ["--user", "--no-user-package-db"] ++ concatMap (\pkgDb -> ["--package-db", toFilePath pkgDb]) mpkgDbs
, cmd
, ["--expand-pkgroot"]
]
-- | Create a new, empty @InstalledCache@
newInstalledCache :: MonadIO m => m InstalledCache
newInstalledCache = liftIO $ InstalledCache <$> newIORef (InstalledCacheInner Map.empty)
-- | Load a @InstalledCache@ from disk, swallowing any errors and returning an
-- empty cache.
loadInstalledCache :: (MonadLogger m, MonadIO m) => Path Abs File -> m InstalledCache
loadInstalledCache path = do
m <- taggedDecodeOrLoad path (return $ InstalledCacheInner Map.empty)
liftIO $ fmap InstalledCache $ newIORef m
-- | Save a @InstalledCache@ to disk
saveInstalledCache :: MonadIO m => Path Abs File -> InstalledCache -> m ()
saveInstalledCache path (InstalledCache ref) = liftIO $ do
createTree (parent path)
readIORef ref >>= taggedEncodeFile path
-- | Prune a list of possible packages down to those whose dependencies are met.
--
-- * id uniquely identifies an item
--
-- * There can be multiple items per name
pruneDeps
:: (Ord name, Ord id)
=> (id -> name) -- ^ extract the name from an id
-> (item -> id) -- ^ the id of an item
-> (item -> [id]) -- ^ get the dependencies of an item
-> (item -> item -> item) -- ^ choose the desired of two possible items
-> [item] -- ^ input items
-> Map name item
pruneDeps getName getId getDepends chooseBest =
Map.fromList
. (map $ \item -> (getName $ getId item, item))
. loop Set.empty Set.empty []
where
loop foundIds usedNames foundItems dps =
case partitionEithers $ map depsMet dps of
([], _) -> foundItems
(s', dps') ->
let foundIds' = Map.fromListWith chooseBest s'
foundIds'' = Set.fromList $ map getId $ Map.elems foundIds'
usedNames' = Map.keysSet foundIds'
foundItems' = Map.elems foundIds'
in loop
(Set.union foundIds foundIds'')
(Set.union usedNames usedNames')
(foundItems ++ foundItems')
(catMaybes dps')
where
depsMet dp
| name `Set.member` usedNames = Right Nothing
| all (`Set.member` foundIds) (getDepends dp) = Left (name, dp)
| otherwise = Right $ Just dp
where
id' = getId dp
name = getName id'
-- | Find the package IDs matching the given constraints with all dependencies installed.
-- Packages not mentioned in the provided @Map@ are allowed to be present too.
sinkMatching :: Monad m
=> Bool -- ^ require profiling?
-> Bool -- ^ require haddock?
-> Map PackageName Version -- ^ allowed versions
-> Consumer (DumpPackage Bool Bool)
m
(Map PackageName (DumpPackage Bool Bool))
sinkMatching reqProfiling reqHaddock allowed = do
dps <- CL.filter (\dp -> isAllowed (dpPackageIdent dp) &&
(not reqProfiling || dpProfiling dp) &&
(not reqHaddock || dpHaddock dp))
=$= CL.consume
return $ Map.fromList $ map (packageIdentifierName . dpPackageIdent &&& id) $ Map.elems $ pruneDeps
id
dpGhcPkgId
dpDepends
const -- Could consider a better comparison in the future
dps
where
isAllowed (PackageIdentifier name version) =
case Map.lookup name allowed of
Just version' | version /= version' -> False
_ -> True
-- | Add profiling information to the stream of @DumpPackage@s
addProfiling :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage Bool b)
addProfiling (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
p <- case Map.lookup gid m of
Just installed -> return (installedCacheProfiling installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let loop [] = return False
loop (dir:dirs) = do
econtents <- tryIO $ getDirectoryContents dir
let contents = either (const []) id econtents
if or [isProfiling content lib
| content <- contents
, lib <- dpLibraries dp
] && not (null contents)
then return True
else loop dirs
loop $ dpLibDirs dp
return dp { dpProfiling = p }
isProfiling :: FilePath -- ^ entry in directory
-> ByteString -- ^ name of library
-> Bool
isProfiling content lib =
prefix `S.isPrefixOf` S8.pack content
where
prefix = S.concat ["lib", lib, "_p"]
-- | Add haddock information to the stream of @DumpPackage@s
addHaddock :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage a Bool)
addHaddock (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
h <- case Map.lookup gid m of
Just installed -> return (installedCacheHaddock installed)
Nothing | not (dpHasExposedModules dp) -> return True
Nothing -> do
let loop [] = return False
loop (ifc:ifcs) = do
exists <- doesFileExist ifc
if exists
then return True
else loop ifcs
loop $ dpHaddockInterfaces dp
return dp { dpHaddock = h }
-- | Dump information for a single package
data DumpPackage profiling haddock = DumpPackage
{ dpGhcPkgId :: !GhcPkgId
, dpPackageIdent :: !PackageIdentifier
, dpLibDirs :: ![FilePath]
, dpLibraries :: ![ByteString]
, dpHasExposedModules :: !Bool
, dpDepends :: ![GhcPkgId]
, dpHaddockInterfaces :: ![FilePath]
, dpHaddockHtml :: !(Maybe FilePath)
, dpProfiling :: !profiling
, dpHaddock :: !haddock
, dpIsExposed :: !Bool
}
deriving (Show, Eq, Ord)
data PackageDumpException
= MissingSingleField ByteString (Map ByteString [Line])
| Couldn'tParseField ByteString [Line]
deriving Typeable
instance Exception PackageDumpException
instance Show PackageDumpException where
show (MissingSingleField name values) = unlines $ concat
[ return $ concat
[ "Expected single value for field name "
, show name
, " when parsing ghc-pkg dump output:"
]
, map (\(k, v) -> " " ++ show (k, v)) (Map.toList values)
]
show (Couldn'tParseField name ls) =
"Couldn't parse the field " ++ show name ++ " from lines: " ++ show ls
-- | Convert a stream of bytes into a stream of @DumpPackage@s
conduitDumpPackage :: MonadThrow m
=> Conduit ByteString m (DumpPackage () ())
conduitDumpPackage = (=$= CL.catMaybes) $ eachSection $ do
pairs <- eachPair (\k -> (k, ) <$> CL.consume) =$= CL.consume
let m = Map.fromList pairs
let parseS k =
case Map.lookup k m of
Just [v] -> return v
_ -> throwM $ MissingSingleField k m
-- Can't fail: if not found, same as an empty list. See:
-- https://github.com/fpco/stack/issues/182
parseM k = fromMaybe [] (Map.lookup k m)
parseDepend :: MonadThrow m => ByteString -> m (Maybe GhcPkgId)
parseDepend "builtin_rts" = return Nothing
parseDepend bs =
liftM Just $ parseGhcPkgId bs'
where
(bs', _builtinRts) =
case stripSuffixBS " builtin_rts" bs of
Nothing ->
case stripPrefixBS "builtin_rts " bs of
Nothing -> (bs, False)
Just x -> (x, True)
Just x -> (x, True)
case Map.lookup "id" m of
Just ["builtin_rts"] -> return Nothing
_ -> do
name <- parseS "name" >>= parsePackageName
version <- parseS "version" >>= parseVersion
ghcPkgId <- parseS "id" >>= parseGhcPkgId
-- if a package has no modules, these won't exist
let libDirKey = "library-dirs"
libraries = parseM "hs-libraries"
exposedModules = parseM "exposed-modules"
exposed = parseM "exposed"
depends <- mapM parseDepend $ parseM "depends"
let parseQuoted key =
case mapM (P.parseOnly (argsParser NoEscaping) . T.decodeUtf8) val of
Left{} -> throwM (Couldn'tParseField key val)
Right dirs -> return (concat dirs)
where
val = parseM key
libDirPaths <- parseQuoted libDirKey
haddockInterfaces <- parseQuoted "haddock-interfaces"
haddockHtml <- parseQuoted "haddock-html"
return $ Just DumpPackage
{ dpGhcPkgId = ghcPkgId
, dpPackageIdent = PackageIdentifier name version
, dpLibDirs = libDirPaths
, dpLibraries = S8.words $ S8.unwords libraries
, dpHasExposedModules = not (null libraries || null exposedModules)
, dpDepends = catMaybes (depends :: [Maybe GhcPkgId])
, dpHaddockInterfaces = haddockInterfaces
, dpHaddockHtml = listToMaybe haddockHtml
, dpProfiling = ()
, dpHaddock = ()
, dpIsExposed = exposed == ["True"]
}
stripPrefixBS :: ByteString -> ByteString -> Maybe ByteString
stripPrefixBS x y
| x `S.isPrefixOf` y = Just $ S.drop (S.length x) y
| otherwise = Nothing
stripSuffixBS :: ByteString -> ByteString -> Maybe ByteString
stripSuffixBS x y
| x `S.isSuffixOf` y = Just $ S.take (S.length y - S.length x) y
| otherwise = Nothing
-- | A single line of input, not including line endings
type Line = ByteString
-- | Apply the given Sink to each section of output, broken by a single line containing ---
eachSection :: Monad m
=> Sink Line m a
-> Conduit ByteString m a
eachSection inner =
CL.map (S.filter (/= _cr)) =$= CB.lines =$= start
where
_cr = 13
peekBS = await >>= maybe (return Nothing) (\bs ->
if S.null bs
then peekBS
else leftover bs >> return (Just bs))
start = peekBS >>= maybe (return ()) (const go)
go = do
x <- toConsumer $ takeWhileC (/= "---") =$= inner
yield x
CL.drop 1
start
-- | Grab each key/value pair
eachPair :: Monad m
=> (ByteString -> Sink Line m a)
-> Conduit Line m a
eachPair inner =
start
where
start = await >>= maybe (return ()) start'
_colon = 58
_space = 32
start' bs1 =
toConsumer (valSrc =$= inner key) >>= yield >> start
where
(key, bs2) = S.break (== _colon) bs1
(spaces, bs3) = S.span (== _space) $ S.drop 1 bs2
indent = S.length key + 1 + S.length spaces
valSrc
| S.null bs3 = noIndent
| otherwise = yield bs3 >> loopIndent indent
noIndent = do
mx <- await
case mx of
Nothing -> return ()
Just bs -> do
let (spaces, val) = S.span (== _space) bs
if S.length spaces == 0
then leftover val
else do
yield val
loopIndent (S.length spaces)
loopIndent i =
loop
where
loop = await >>= maybe (return ()) go
go bs
| S.length spaces == i && S.all (== _space) spaces =
yield val >> loop
| otherwise = leftover bs
where
(spaces, val) = S.splitAt i bs
-- | General purpose utility
takeWhileC :: Monad m => (a -> Bool) -> Conduit a m a
takeWhileC f =
loop
where
loop = await >>= maybe (return ()) go
go x
| f x = yield x >> loop
| otherwise = leftover x
| mathhun/stack | src/Stack/PackageDump.hs | bsd-3-clause | 16,673 | 23 | 37 | 5,382 | 4,236 | 2,181 | 2,055 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Pentomino.Find where
import qualified Pentomino.Position as P
import Pentomino.Cover (
roll_shift, pieces, covers, reach, unreach, form, orig, Figure
, halo, area, modify, figure_shift, Piece(..), container
, points, spoints
)
import Pentomino.Force ( step, total )
import qualified Autolib.Genetic as G
import Autolib.ToDoc
import Autolib.Util.Zufall
import Autolib.Util.Splits
import Data.Map ( Map )
import qualified Data.Map as M
import Data.Set ( Set )
import qualified Data.Set as S
import Data.Ix
import System.Environment
import System.IO
import Data.List ( partition )
import Control.Monad ( guard )
main = main1
main2 = run
main1 = do
argv <- getArgs
let z = case argv of
[ zz ] -> read zz
[] -> 10
G.evolve $ conf z
conf z = G.Config
{ G.fitness = \ f -> let ( v, _ ) = evaluate f in v
, G.threshold = ( 130, 0 )
, G.present = mapM_ ( \ (v,f) -> printf (toDoc v <+> form f ) )
. reverse . take 3
, G.trace = printf . map fst . take 5
, G.size = 1 * z
, G.generate = roll_shift
, G.combine = undefined
, G.num_combine = 0 * z
, G.mutate = improve 3
, G.num_mutate = 2 * z
, G.num_compact = z
, G.num_steps = Nothing
, G.num_parallel = 1
}
printf x = do print x ; hFlush stdout
diversity f = S.size $ S.fromList $ map orig $ pieces f
---------------------------------------------------------------------------
complete_changes w f = do
k <- [ 0 .. length ( pieces f ) - 1 ]
changes_for w f k
changes_for w f k = do
let ( pre, this : post ) = splitAt k $ pieces f
( xs, _ : ys ) = splitAt k $ covers f
rest = xs ++ ys
that <- changes w rest this
return $ figure_shift $ pre ++ that : post
several_complete_changes width f n =
let fun pre 0 ps = return $ reverse pre ++ ps
fun pre n (p : ps) =
do guard $ n <= length ps
guard $ S.null $ S.intersection ( spoints p ) $ S.unions ( map spoints pre )
fun (p : pre ) n ps
++ do q <- changes width ( map spoints $ pre ) p
fun (q : pre) (n-1) ps
in map figure_shift $ fun [] n $ pieces f
-- | with swap
changes2_for w f i j = do
let ps = pieces f
x0 = ps !! i
y0 = ps !! j
rest = covers f `without` [ i, j ]
x <- changes w rest $ x0 { orig = orig y0 }
y <- changes w rest $ y0 { orig = orig x0 }
return $ figure_shift $ ps // [(i,x),(j,y)]
---------------------------------------------------------------------
xs // [] = xs
xs // ((i,x) : rest ) =
let ( pre, _ : post ) = splitAt i xs
ys = pre ++ x : post
in ys // rest
xs `without` [] = xs
xs `without` (i : rest) =
let ( pre, _ : post ) = splitAt i xs
ys = pre ++ post
in ys `without` rest
---------------------------------------------------------------------
changes w rest this = do
let others = S.unions rest
t <- [ 0 .. 3 ]
m <- [ 0 .. 1 ]
let bnd0 = ((negate w, negate w),(w,w))
(sx,sy) <- range bnd0
let p = this
{ turns = t
, mirrors = m
, shift = shift this + P.Position sx sy
}
guard $ S.null $ S.intersection ( spoints p ) others
return p
some_best fs = do
let m = M.fromListWith S.union $ do
f <- fs
let ( v, _) = evaluate f
return ( v, S.singleton f )
let ( v, gs ) = last $ M.toAscList m
g <- eins $ S.toList gs
return ( v, g )
run = do
let width = 3
f <- roll_shift
let runner ( v, f ) = do
printf $ toDoc v <+> form f
r <- randomRIO ( 0, 10 :: Int )
let action = if 0 == r
then improve_double_repeat width
else \ ( v,f) -> improve_simple width f
(w, g) <- action (v, f)
runner ( w, g )
runner $ evaluate f
first_best v fs =
let ( yeah, hmnoh ) = partition ( \(w,_) -> w > v ) $ map evaluate fs
( hm, noh ) = partition ( \(w,_) -> w == v ) $ map evaluate fs
in case yeah of
[] -> case hm of
p : _ -> eins $ take 5 hm
p : _ -> eins $ take 5 yeah
run2 = do
f <- roll_shift
let strat0 = [(3,1)]
let runner strat ( v @ ( u,_ ) , f ) = do
printf $ toDoc v <+> form f
if null strat
then do
(w,g) <- improve_double_repeat 2 (v, f)
runner strat0 (w,g)
else do
let (width,num) = head strat
print $ toDoc ( width, num )
( w @ (u',_), g ) <- first_best v -- some_best
$ several_complete_changes width f num
let strat' = if w > v then strat0
else tail strat
runner strat' ( w, g )
runner strat0 $ evaluate f
improve_straight width f = do
( w, g ) <- some_best $ complete_changes width f
return g
improve w f = do
r <- randomRIO ( 0, 10 :: Int )
let action = if 0 == r then improve_double else improve_simple
(w, g) <- action w f
return g
improve_simple w f = do
let n = length $ pieces f
k <- eins [ 0 .. n-1 ]
-- print $ text "simple/select:" <+> toDoc k <+> toDoc ( ['a' .. ] !! k )
some_best $ changes_for w f k
improve_double_repeat width (v, f) = do
(w,g) <- improve_double width f
if w >= v then return( w, g)
else improve_double_repeat width (v,f)
improve_double w f = do
let n = length $ pieces f
ks = [ 0 .. n - 1 ]
i <- eins ks
j <- eins ( ks `without` [i] )
-- print $ text "double/select:" <+> toDoc (i,j)
let x = orig $ get f i ; y = orig $ get f j
( _ , g ) <- some_best $ changes_for w ( reshape f i y ) i
some_best $ changes_for w ( reshape g j x ) j
reshape f i s =
replace f i $ ( get f i ) { orig = s }
replace f i p =
f { pieces = pieces f // [ (i, p) ] }
get f i = pieces f !! i
evaluate f = ( (unreach f - 5 * 12, negate $ max_extension f ) , f )
max_extension f =
let ((l,u),(r,o)) = container f
in max (r-l) (o-u)
{-
swapper ( v, f ) = do
print $ text "stagnation"
i <- randomRIO ( 0, length ( pieces f ) - 2 )
j0 <- randomRIO ( 0, length ( pieces f ) - 2 )
let j = if j0 >= i then j0 + 1 else j0
printf $ text "select:" <+> toDoc (i,j)
case filter ( \ (w,g) -> w > v )
$ best2_for f i j of
(w,g) : _ -> do
printf $ toDoc v <+> form g
runner ( w, g )
[] -> do
runner ( v, f)
-}
---------------------------------------------------------------------
{-
glue f g = do
let gs = do
( p, o ) <- zip ( pieces g ) $ map orig $ pieces f
return $ p { orig = o }
k <- randomRIO ( 0, length ( pieces f ) )
let ps = reverse ( drop k gs ) ++ take k ( pieces f )
return $ figure_delta ps
merge f g = fmap figure_delta $ sequence $ do
k <- [ 0 .. length ( pieces f ) - 1 ]
return $ do
s <- randomRIO ( False, True )
return $ pieces ( if s then f else g ) !! k
-}
change :: Figure -> IO Figure
change f = fmap figure_shift $ sequence $ do
p <- pieces f
return $ do
k <- randomRIO ( 0, length $ pieces f )
if k == 0 then modify' p else return p
modify' p = do
dx <- eins [ -1 .. 1 ]
dy <- eins [ -1 .. 1 ]
return $ p { shift = shift p + P.Position dx dy }
returning :: Figure -> Int
returning f = minimum $ do
p <- S.toList $ halo $ last $ covers f
return $ maximum $ map abs [ P.x p, P.y p ]
-- | penalty for overlapping figures
overlaps :: Figure -> Int
overlaps f =
let m = M.fromListWith S.union $ do
(k, c) <- zip [ 0 .. ] $ covers f
x <- S.toList c
return ( x, S.singleton k )
in sum $ filter ( > 1 )
$ map S.size
$ M.elems m
| Erdwolf/autotool-bonn | src/Pentomino/Find.hs | gpl-2.0 | 8,071 | 36 | 20 | 2,918 | 2,750 | 1,468 | 1,282 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Bead.View.Content.GroupRegistration.Page (
groupRegistration
, unsubscribeFromCourse
) where
import Control.Monad
import Control.Arrow ((***))
import Data.List (intersperse)
import Data.String (fromString)
import Text.Blaze.Html5 as H hiding (map)
import Bead.Controller.UserStories (availableGroups, attendedGroups)
import qualified Bead.Controller.Pages as Pages
import Bead.View.Content
import qualified Bead.View.Content.Bootstrap as Bootstrap
groupRegistration = ViewModifyHandler groupRegistrationPage postGroupReg
unsubscribeFromCourse =
ModifyHandler (UnsubscribeFromCourse <$> getParameter unsubscribeUserGroupKeyPrm)
data GroupRegData = GroupRegData {
groups :: [(GroupKey, GroupDesc)]
, groupsRegistered :: [(GroupKey, GroupDesc, Bool)]
}
postGroupReg :: POSTContentHandler
postGroupReg = SubscribeToGroup
<$> getParameter (jsonGroupKeyPrm (fieldName groupRegistrationField))
groupRegistrationPage :: GETContentHandler
groupRegistrationPage = do
desc <- userStory $ do
as <- attendedGroups
let attendedGroupKeys = map fst3 as
newGroupForUser (gk,_) = not (elem gk attendedGroupKeys)
gs <- (filter newGroupForUser) <$> availableGroups
return GroupRegData {
groups = gs
, groupsRegistered = as
}
return $ groupRegistrationContent desc
where
fst3 (f,_,_) = f
groupRegistrationContent :: GroupRegData -> IHtml
groupRegistrationContent desc = do
msg <- getI18N
return $ do
let registeredGroups = groupsRegistered desc
Bootstrap.rowColMd12 $ do
H.h3 $ fromString $ msg $ msg_GroupRegistration_RegisteredCourses "Registered courses"
i18n msg $ groupsAlreadyRegistered registeredGroups
when (not . null $ registeredGroups) $ Bootstrap.rowColMd12 $ do
H.p $ (fromString . msg $ msg_GroupRegistration_Warning $ concat
[ "It is possible to quit from a group or move between groups until a submission is "
, "submitted. Otherwise, the teacher of the given group should be asked to undo the "
, "group registration."
])
Bootstrap.rowColMd12 $ do
H.h3 $ (fromString . msg $ msg_GroupRegistration_SelectGroup "Select course and group")
i18n msg $ groupsForTheUser (groups desc)
Bootstrap.turnSelectionsOn
groupsAlreadyRegistered :: [(GroupKey, GroupDesc, Bool)] -> IHtml
groupsAlreadyRegistered ds = do
msg <- getI18N
return $ nonEmpty ds
(fromString . msg $ msg_GroupRegistration_NoRegisteredCourses
"No registered courses. Choose a group.")
(Bootstrap.table $ do
thead $ H.tr $ do
H.th . fromString . msg $ msg_GroupRegistration_Courses "Groups"
H.th . fromString . msg $ msg_GroupRegistration_Admins "Teachers"
H.th . fromString . msg $ msg_GroupRegistration_Unsubscribe "Unregister"
tbody $ mapM_ (groupLine msg) ds)
where
unsubscribeFromCourse k = Pages.unsubscribeFromCourse k ()
groupLine msg (key, desc, hasSubmission) = flip groupDescFold desc $ \n as -> do
H.tr $ do
H.td $ fromString n
H.td $ fromString $ join $ intersperse " " as
H.td $
if hasSubmission
then (fromString . msg $ msg_GroupRegistration_NoUnsubscriptionAvailable
"Unregistration is not allowed.")
else postForm (routeOf $ unsubscribeFromCourse key) $
Bootstrap.smallSubmitButton
(fieldName unsubscribeFromCourseSubmitBtn)
(msg $ msg_GroupRegistration_Unsubscribe "Unregister")
groupsForTheUser :: [(GroupKey, GroupDesc)] -> IHtml
groupsForTheUser gs = do
msg <- getI18N
return $
nonEmpty gs
(Bootstrap.rowColMd12 $ p $ fromString . msg $ msg_GroupRegistration_NoAvailableCourses "There are no available groups yet.") $
postForm (routeOf groupRegistration) $ do
Bootstrap.selection (fieldName groupRegistrationField) (const False) (map (id *** descriptive) gs)
Bootstrap.submitButton (fieldName regGroupSubmitBtn) (msg $ msg_GroupRegistration_Register "Register")
where
groupRegistration = Pages.groupRegistration ()
descriptive :: GroupDesc -> String
descriptive g = join [gName g, " / ", join (intersperse " , " (gAdmins g))]
| andorp/bead | src/Bead/View/Content/GroupRegistration/Page.hs | bsd-3-clause | 4,339 | 0 | 20 | 963 | 1,090 | 559 | 531 | 88 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module provides a simple dialog widget. You get to pick the
-- dialog title, if any, as well as its body and buttons.
module Brick.Widgets.Dialog
( Dialog
, dialogTitle
, dialogName
, dialogButtons
, dialogSelectedIndex
, dialogWidth
-- * Construction and rendering
, dialog
, renderDialog
-- * Getting a dialog's current value
, dialogSelection
-- * Attributes
, dialogAttr
, buttonAttr
, buttonSelectedAttr
-- * Lenses
, dialogNameL
, dialogButtonsL
, dialogSelectedIndexL
, dialogWidthL
, dialogTitleL
)
where
import Control.Lens
import Control.Applicative
import Data.Monoid
import Data.List (intersperse)
import Graphics.Vty.Input (Event(..), Key(..))
import Brick.Util (clamp)
import Brick.Types
import Brick.Widgets.Core
import Brick.Widgets.Center
import Brick.Widgets.Border
import Brick.AttrMap
-- | Dialogs present a window with a title (optional), a body, and
-- buttons (optional). They provide a 'HandleEvent' instance that knows
-- about Tab and Shift-Tab for changing which button is active. Dialog
-- buttons are labeled with strings and map to values of type 'a', which
-- you choose.
--
-- Dialogs handle the following events by default:
--
-- * Tab: selecte the next button
-- * Shift-tab: select the previous button
data Dialog a =
Dialog { dialogName :: Name
-- ^ The dialog name
, dialogTitle :: Maybe String
-- ^ The dialog title
, dialogButtons :: [(String, a)]
-- ^ The dialog button labels and values
, dialogSelectedIndex :: Maybe Int
-- ^ The currently selected dialog button index (if any)
, dialogWidth :: Int
-- ^ The maximum width of the dialog
}
suffixLenses ''Dialog
instance HandleEvent (Dialog a) where
handleEvent ev d =
case ev of
EvKey (KChar '\t') [] -> return $ nextButtonBy 1 d
EvKey KBackTab [] -> return $ nextButtonBy (-1) d
_ -> return d
-- | Create a dialog.
dialog :: Name
-- ^ The dialog name, provided so that you can use this as a
-- basis for viewport names in the dialog if desired
-> Maybe String
-- ^ The dialog title
-> Maybe (Int, [(String, a)])
-- ^ The currently-selected button index (starting at zero) and
-- the button labels and values to use
-> Int
-- ^ The maximum width of the dialog
-> Dialog a
dialog name title buttonData w =
let (buttons, idx) = case buttonData of
Nothing -> ([], Nothing)
Just (_, []) -> ([], Nothing)
Just (i, bs) -> (bs, Just $ clamp 0 (length bs - 1) i)
in Dialog name title buttons idx w
-- | The default attribute of the dialog
dialogAttr :: AttrName
dialogAttr = "dialog"
-- | The default attribute for all dialog buttons
buttonAttr :: AttrName
buttonAttr = "button"
-- | The attribute for the selected dialog button (extends 'dialogAttr')
buttonSelectedAttr :: AttrName
buttonSelectedAttr = buttonAttr <> "selected"
-- | Render a dialog with the specified body widget.
renderDialog :: Dialog a -> Widget -> Widget
renderDialog d body =
let buttonPadding = str " "
mkButton (i, (s, _)) = let att = if Just i == d^.dialogSelectedIndexL
then buttonSelectedAttr
else buttonAttr
in withAttr att $ str $ " " <> s <> " "
buttons = hBox $ intersperse buttonPadding $
mkButton <$> (zip [0..] (d^.dialogButtonsL))
doBorder = maybe border borderWithLabel (str <$> d^.dialogTitleL)
in center $
withDefAttr dialogAttr $
hLimit (d^.dialogWidthL) $
doBorder $
vBox [ body
, hCenter buttons
]
nextButtonBy :: Int -> Dialog a -> Dialog a
nextButtonBy amt d =
let numButtons = length $ d^.dialogButtonsL
in if numButtons == 0 then d
else case d^.dialogSelectedIndexL of
Nothing -> d & dialogSelectedIndexL .~ (Just 0)
Just i -> d & dialogSelectedIndexL .~ (Just $ (i + amt) `mod` numButtons)
-- | Obtain the value associated with the dialog's currently-selected
-- button, if any. This function is probably what you want when someone
-- presses 'Enter' in a dialog.
dialogSelection :: Dialog a -> Maybe a
dialogSelection d =
case d^.dialogSelectedIndexL of
Nothing -> Nothing
Just i -> Just $ ((d^.dialogButtonsL) !! i)^._2
| ktvoelker/brick | src/Brick/Widgets/Dialog.hs | bsd-3-clause | 4,590 | 0 | 17 | 1,273 | 948 | 528 | 420 | 89 | 3 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE CPP, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} -- instance MonadThings is necessarily an
-- orphan
module TcEnv(
TyThing(..), TcTyThing(..), TcId,
-- Instance environment, and InstInfo type
InstInfo(..), iDFunId, pprInstInfoDetails,
simpleInstInfoClsTy, simpleInstInfoTy, simpleInstInfoTyCon,
InstBindings(..),
-- Global environment
tcExtendGlobalEnv, tcExtendGlobalEnvImplicit, setGlobalTypeEnv,
tcExtendGlobalValEnv,
tcLookupLocatedGlobal, tcLookupGlobal,
tcLookupTyCon, tcLookupClass,
tcLookupDataCon, tcLookupPatSyn, tcLookupConLike,
tcLookupLocatedGlobalId, tcLookupLocatedTyCon,
tcLookupLocatedClass, tcLookupAxiom,
lookupGlobal,
-- Local environment
tcExtendKindEnv, tcExtendKindEnv2,
tcExtendTyVarEnv, tcExtendTyVarEnv2,
tcExtendLetEnv, tcExtendLetEnvIds,
tcExtendIdEnv, tcExtendIdEnv1, tcExtendIdEnv2,
tcExtendIdBndrs, tcExtendLocalTypeEnv,
isClosedLetBndr,
tcLookup, tcLookupLocated, tcLookupLocalIds,
tcLookupId, tcLookupTyVar,
tcLookupLcl_maybe,
getScopedTyVarBinds, getInLocalScope,
wrongThingErr, pprBinders,
tcExtendRecEnv, -- For knot-tying
-- Instances
tcLookupInstance, tcGetInstEnvs,
-- Rules
tcExtendRules,
-- Defaults
tcGetDefaultTys,
-- Global type variables
tcGetGlobalTyVars,
-- Template Haskell stuff
checkWellStaged, tcMetaTy, thLevel,
topIdLvl, isBrackStage,
-- New Ids
newLocalName, newDFunName, newDFunName', newFamInstTyConName,
newFamInstAxiomName,
mkStableIdFromString, mkStableIdFromName,
mkWrapperName
) where
#include "HsVersions.h"
import HsSyn
import IfaceEnv
import TcRnMonad
import TcMType
import TcType
import LoadIface
import PrelNames
import TysWiredIn
import Id
import IdInfo( IdDetails(VanillaId) )
import Var
import VarSet
import RdrName
import InstEnv
import DataCon ( DataCon )
import PatSyn ( PatSyn )
import ConLike
import TyCon
import CoAxiom
import TypeRep
import Class
import Name
import NameEnv
import VarEnv
import HscTypes
import DynFlags
import SrcLoc
import BasicTypes hiding( SuccessFlag(..) )
import Module
import Outputable
import Encoding
import FastString
import ListSetOps
import Util
import Maybes( MaybeErr(..) )
import Data.IORef
import Data.List
{- *********************************************************************
* *
An IO interface to looking up globals
* *
********************************************************************* -}
lookupGlobal :: HscEnv -> Name -> IO TyThing
-- An IO version, used outside the typechecker
-- It's more complicated than it looks, because it may
-- need to suck in an interface file
lookupGlobal hsc_env name
= initTcForLookup hsc_env (tcLookupGlobal name)
-- This initTcForLookup stuff is massive overkill
-- but that's how it is right now, and at least
-- this function localises it
{-
************************************************************************
* *
* tcLookupGlobal *
* *
************************************************************************
Using the Located versions (eg. tcLookupLocatedGlobal) is preferred,
unless you know that the SrcSpan in the monad is already set to the
span of the Name.
-}
tcLookupLocatedGlobal :: Located Name -> TcM TyThing
-- c.f. IfaceEnvEnv.tcIfaceGlobal
tcLookupLocatedGlobal name
= addLocM tcLookupGlobal name
tcLookupGlobal :: Name -> TcM TyThing
-- The Name is almost always an ExternalName, but not always
-- In GHCi, we may make command-line bindings (ghci> let x = True)
-- that bind a GlobalId, but with an InternalName
tcLookupGlobal name
= do { -- Try local envt
env <- getGblEnv
; case lookupNameEnv (tcg_type_env env) name of {
Just thing -> return thing ;
Nothing ->
-- Should it have been in the local envt?
if nameIsLocalOrFrom (tcg_mod env) name
then notFound name -- Internal names can happen in GHCi
else
-- Try home package table and external package table
do { mb_thing <- tcLookupImported_maybe name
; case mb_thing of
Succeeded thing -> return thing
Failed msg -> failWithTc msg
}}}
tcLookupDataCon :: Name -> TcM DataCon
tcLookupDataCon name = do
thing <- tcLookupGlobal name
case thing of
AConLike (RealDataCon con) -> return con
_ -> wrongThingErr "data constructor" (AGlobal thing) name
tcLookupPatSyn :: Name -> TcM PatSyn
tcLookupPatSyn name = do
thing <- tcLookupGlobal name
case thing of
AConLike (PatSynCon ps) -> return ps
_ -> wrongThingErr "pattern synonym" (AGlobal thing) name
tcLookupConLike :: Name -> TcM ConLike
tcLookupConLike name = do
thing <- tcLookupGlobal name
case thing of
AConLike cl -> return cl
_ -> wrongThingErr "constructor-like thing" (AGlobal thing) name
tcLookupClass :: Name -> TcM Class
tcLookupClass name = do
thing <- tcLookupGlobal name
case thing of
ATyCon tc | Just cls <- tyConClass_maybe tc -> return cls
_ -> wrongThingErr "class" (AGlobal thing) name
tcLookupTyCon :: Name -> TcM TyCon
tcLookupTyCon name = do
thing <- tcLookupGlobal name
case thing of
ATyCon tc -> return tc
_ -> wrongThingErr "type constructor" (AGlobal thing) name
tcLookupAxiom :: Name -> TcM (CoAxiom Branched)
tcLookupAxiom name = do
thing <- tcLookupGlobal name
case thing of
ACoAxiom ax -> return ax
_ -> wrongThingErr "axiom" (AGlobal thing) name
tcLookupLocatedGlobalId :: Located Name -> TcM Id
tcLookupLocatedGlobalId = addLocM tcLookupId
tcLookupLocatedClass :: Located Name -> TcM Class
tcLookupLocatedClass = addLocM tcLookupClass
tcLookupLocatedTyCon :: Located Name -> TcM TyCon
tcLookupLocatedTyCon = addLocM tcLookupTyCon
-- Find the instance that exactly matches a type class application. The class arguments must be precisely
-- the same as in the instance declaration (modulo renaming).
--
tcLookupInstance :: Class -> [Type] -> TcM ClsInst
tcLookupInstance cls tys
= do { instEnv <- tcGetInstEnvs
; case lookupUniqueInstEnv instEnv cls tys of
Left err -> failWithTc $ ptext (sLit "Couldn't match instance:") <+> err
Right (inst, tys)
| uniqueTyVars tys -> return inst
| otherwise -> failWithTc errNotExact
}
where
errNotExact = ptext (sLit "Not an exact match (i.e., some variables get instantiated)")
uniqueTyVars tys = all isTyVarTy tys && hasNoDups (map extractTyVar tys)
where
extractTyVar (TyVarTy tv) = tv
extractTyVar _ = panic "TcEnv.tcLookupInstance: extractTyVar"
tcGetInstEnvs :: TcM InstEnvs
-- Gets both the external-package inst-env
-- and the home-pkg inst env (includes module being compiled)
tcGetInstEnvs = do { eps <- getEps
; env <- getGblEnv
; return (InstEnvs { ie_global = eps_inst_env eps
, ie_local = tcg_inst_env env
, ie_visible = tcVisibleOrphanMods env }) }
instance MonadThings (IOEnv (Env TcGblEnv TcLclEnv)) where
lookupThing = tcLookupGlobal
{-
************************************************************************
* *
Extending the global environment
* *
************************************************************************
-}
setGlobalTypeEnv :: TcGblEnv -> TypeEnv -> TcM TcGblEnv
-- Use this to update the global type env
-- It updates both * the normal tcg_type_env field
-- * the tcg_type_env_var field seen by interface files
setGlobalTypeEnv tcg_env new_type_env
= do { -- Sync the type-envt variable seen by interface files
writeMutVar (tcg_type_env_var tcg_env) new_type_env
; return (tcg_env { tcg_type_env = new_type_env }) }
tcExtendGlobalEnvImplicit :: [TyThing] -> TcM r -> TcM r
-- Extend the global environment with some TyThings that can be obtained
-- via implicitTyThings from other entities in the environment. Examples
-- are dfuns, famInstTyCons, data cons, etc.
-- These TyThings are not added to tcg_tcs.
tcExtendGlobalEnvImplicit things thing_inside
= do { tcg_env <- getGblEnv
; let ge' = extendTypeEnvList (tcg_type_env tcg_env) things
; tcg_env' <- setGlobalTypeEnv tcg_env ge'
; setGblEnv tcg_env' thing_inside }
tcExtendGlobalEnv :: [TyThing] -> TcM r -> TcM r
-- Given a mixture of Ids, TyCons, Classes, all defined in the
-- module being compiled, extend the global environment
tcExtendGlobalEnv things thing_inside
= do { env <- getGblEnv
; let env' = env { tcg_tcs = [tc | ATyCon tc <- things] ++ tcg_tcs env,
tcg_patsyns = [ps | AConLike (PatSynCon ps) <- things] ++ tcg_patsyns env }
; setGblEnv env' $
tcExtendGlobalEnvImplicit things thing_inside
}
tcExtendGlobalValEnv :: [Id] -> TcM a -> TcM a
-- Same deal as tcExtendGlobalEnv, but for Ids
tcExtendGlobalValEnv ids thing_inside
= tcExtendGlobalEnvImplicit [AnId id | id <- ids] thing_inside
tcExtendRecEnv :: [(Name,TyThing)] -> TcM r -> TcM r
-- Extend the global environments for the type/class knot tying game
-- Just like tcExtendGlobalEnv, except the argument is a list of pairs
tcExtendRecEnv gbl_stuff thing_inside
= do { tcg_env <- getGblEnv
; let ge' = extendNameEnvList (tcg_type_env tcg_env) gbl_stuff
; tcg_env' <- setGlobalTypeEnv tcg_env ge'
; setGblEnv tcg_env' thing_inside }
{-
************************************************************************
* *
\subsection{The local environment}
* *
************************************************************************
-}
tcLookupLocated :: Located Name -> TcM TcTyThing
tcLookupLocated = addLocM tcLookup
tcLookupLcl_maybe :: Name -> TcM (Maybe TcTyThing)
tcLookupLcl_maybe name
= do { local_env <- getLclTypeEnv
; return (lookupNameEnv local_env name) }
tcLookup :: Name -> TcM TcTyThing
tcLookup name = do
local_env <- getLclTypeEnv
case lookupNameEnv local_env name of
Just thing -> return thing
Nothing -> AGlobal <$> tcLookupGlobal name
tcLookupTyVar :: Name -> TcM TcTyVar
tcLookupTyVar name
= do { thing <- tcLookup name
; case thing of
ATyVar _ tv -> return tv
_ -> pprPanic "tcLookupTyVar" (ppr name) }
tcLookupId :: Name -> TcM Id
-- Used when we aren't interested in the binding level, nor refinement.
-- The "no refinement" part means that we return the un-refined Id regardless
--
-- The Id is never a DataCon. (Why does that matter? see TcExpr.tcId)
tcLookupId name = do
thing <- tcLookup name
case thing of
ATcId { tct_id = id} -> return id
AGlobal (AnId id) -> return id
_ -> pprPanic "tcLookupId" (ppr name)
tcLookupLocalIds :: [Name] -> TcM [TcId]
-- We expect the variables to all be bound, and all at
-- the same level as the lookup. Only used in one place...
tcLookupLocalIds ns
= do { env <- getLclEnv
; return (map (lookup (tcl_env env)) ns) }
where
lookup lenv name
= case lookupNameEnv lenv name of
Just (ATcId { tct_id = id }) -> id
_ -> pprPanic "tcLookupLocalIds" (ppr name)
getInLocalScope :: TcM (Name -> Bool)
-- Ids only
getInLocalScope = do { lcl_env <- getLclTypeEnv
; return (`elemNameEnv` lcl_env) }
tcExtendKindEnv2 :: [(Name, TcTyThing)] -> TcM r -> TcM r
-- Used only during kind checking, for TcThings that are
-- AThing or APromotionErr
-- No need to update the global tyvars, or tcl_th_bndrs, or tcl_rdr
tcExtendKindEnv2 things thing_inside
= updLclEnv upd_env thing_inside
where
upd_env env = env { tcl_env = extendNameEnvList (tcl_env env) things }
tcExtendKindEnv :: [(Name, TcKind)] -> TcM r -> TcM r
tcExtendKindEnv name_kind_prs
= tcExtendKindEnv2 [(n, AThing k) | (n,k) <- name_kind_prs]
-----------------------
-- Scoped type and kind variables
tcExtendTyVarEnv :: [TyVar] -> TcM r -> TcM r
tcExtendTyVarEnv tvs thing_inside
= tcExtendTyVarEnv2 [(tyVarName tv, tv) | tv <- tvs] thing_inside
tcExtendTyVarEnv2 :: [(Name,TcTyVar)] -> TcM r -> TcM r
tcExtendTyVarEnv2 binds thing_inside
= do { tc_extend_local_env NotTopLevel
[(name, ATyVar name tv) | (name, tv) <- binds] $
do { env <- getLclEnv
; let env' = env { tcl_tidy = add_tidy_tvs (tcl_tidy env) }
; setLclEnv env' thing_inside }}
where
add_tidy_tvs env = foldl add env binds
-- We initialise the "tidy-env", used for tidying types before printing,
-- by building a reverse map from the in-scope type variables to the
-- OccName that the programmer originally used for them
add :: TidyEnv -> (Name, TcTyVar) -> TidyEnv
add (env,subst) (name, tyvar)
= case tidyOccName env (nameOccName name) of
(env', occ') -> (env', extendVarEnv subst tyvar tyvar')
where
tyvar' = setTyVarName tyvar name'
name' = tidyNameOcc name occ'
getScopedTyVarBinds :: TcM [(Name, TcTyVar)]
getScopedTyVarBinds
= do { lcl_env <- getLclEnv
; return [(name, tv) | ATyVar name tv <- nameEnvElts (tcl_env lcl_env)] }
isClosedLetBndr :: Id -> TopLevelFlag
-- See Note [Bindings with closed types] in TcRnTypes
-- Note that we decided if a let-bound variable is closed by
-- looking at its type, which is slightly more liberal, and a whole
-- lot easier to implement, than looking at its free variables
isClosedLetBndr id
| isEmptyVarSet (tyVarsOfType (idType id)) = TopLevel
| otherwise = NotTopLevel
tcExtendLetEnv :: TopLevelFlag -> [TcId] -> TcM a -> TcM a
-- Used for both top-level value bindings and and nested let/where-bindings
-- Adds to the TcIdBinderStack too
tcExtendLetEnv top_lvl ids thing_inside
= tcExtendIdBndrs [TcIdBndr id top_lvl | id <- ids] $
tcExtendLetEnvIds top_lvl [(idName id, id) | id <- ids] thing_inside
tcExtendLetEnvIds :: TopLevelFlag -> [(Name,TcId)] -> TcM a -> TcM a
-- Used for both top-level value bindings and and nested let/where-bindings
-- Does not extend the TcIdBinderStack
tcExtendLetEnvIds top_lvl pairs thing_inside
= tc_extend_local_env top_lvl [ (name, ATcId { tct_id = id
, tct_closed = isClosedLetBndr id })
| (name,id) <- pairs ] $
thing_inside
tcExtendIdEnv :: [TcId] -> TcM a -> TcM a
-- For lambda-bound and case-bound Ids
-- Extends the the TcIdBinderStack as well
tcExtendIdEnv ids thing_inside
= tcExtendIdEnv2 [(idName id, id) | id <- ids] thing_inside
tcExtendIdEnv1 :: Name -> TcId -> TcM a -> TcM a
-- Exactly like tcExtendIdEnv2, but for a single (name,id) pair
tcExtendIdEnv1 name id thing_inside
= tcExtendIdEnv2 [(name,id)] thing_inside
tcExtendIdEnv2 :: [(Name,TcId)] -> TcM a -> TcM a
tcExtendIdEnv2 names_w_ids thing_inside
= tcExtendIdBndrs [ TcIdBndr mono_id NotTopLevel
| (_,mono_id) <- names_w_ids ] $
do { tc_extend_local_env NotTopLevel
[ (name, ATcId { tct_id = id
, tct_closed = NotTopLevel })
| (name,id) <- names_w_ids] $
thing_inside }
tc_extend_local_env :: TopLevelFlag -> [(Name, TcTyThing)]
-> TcM a -> TcM a
tc_extend_local_env top_lvl extra_env thing_inside
-- Precondition: the argument list extra_env has TcTyThings
-- that ATcId or ATyVar, but nothing else
--
-- Invariant: the ATcIds are fully zonked. Reasons:
-- (a) The kinds of the forall'd type variables are defaulted
-- (see Kind.defaultKind, done in zonkQuantifiedTyVar)
-- (b) There are no via-Indirect occurrences of the bound variables
-- in the types, because instantiation does not look through such things
-- (c) The call to tyVarsOfTypes is ok without looking through refs
-- The second argument of type TyVarSet is a set of type variables
-- that are bound together with extra_env and should not be regarded
-- as free in the types of extra_env.
= do { traceTc "env2" (ppr extra_env)
; env0 <- getLclEnv
; env1 <- tcExtendLocalTypeEnv env0 extra_env
; stage <- getStage
; let env2 = extend_local_env (top_lvl, thLevel stage) extra_env env1
; setLclEnv env2 thing_inside }
where
extend_local_env :: (TopLevelFlag, ThLevel) -> [(Name, TcTyThing)] -> TcLclEnv -> TcLclEnv
-- Extend the local LocalRdrEnv and Template Haskell staging env simultaneously
-- Reason for extending LocalRdrEnv: after running a TH splice we need
-- to do renaming.
extend_local_env thlvl pairs env@(TcLclEnv { tcl_rdr = rdr_env
, tcl_th_bndrs = th_bndrs })
= env { tcl_rdr = extendLocalRdrEnvList rdr_env
[ n | (n, _) <- pairs, isInternalName n ]
-- The LocalRdrEnv contains only non-top-level names
-- (GlobalRdrEnv handles the top level)
, tcl_th_bndrs = extendNameEnvList th_bndrs -- We only track Ids in tcl_th_bndrs
[(n, thlvl) | (n, ATcId {}) <- pairs] }
tcExtendLocalTypeEnv :: TcLclEnv -> [(Name, TcTyThing)] -> TcM TcLclEnv
tcExtendLocalTypeEnv lcl_env@(TcLclEnv { tcl_env = lcl_type_env }) tc_ty_things
| isEmptyVarSet extra_tvs
= return (lcl_env { tcl_env = extendNameEnvList lcl_type_env tc_ty_things })
| otherwise
= do { global_tvs <- readMutVar (tcl_tyvars lcl_env)
; new_g_var <- newMutVar (global_tvs `unionVarSet` extra_tvs)
; return (lcl_env { tcl_tyvars = new_g_var
, tcl_env = extendNameEnvList lcl_type_env tc_ty_things } ) }
where
extra_tvs = foldr get_tvs emptyVarSet tc_ty_things
get_tvs (_, ATcId { tct_id = id, tct_closed = closed }) tvs
= case closed of
TopLevel -> ASSERT2( isEmptyVarSet id_tvs, ppr id $$ ppr (idType id) )
tvs
NotTopLevel -> tvs `unionVarSet` id_tvs
where id_tvs = tyVarsOfType (idType id)
get_tvs (_, ATyVar _ tv) tvs -- See Note [Global TyVars]
= tvs `unionVarSet` tyVarsOfType (tyVarKind tv) `extendVarSet` tv
get_tvs (_, AThing k) tvs = tvs `unionVarSet` tyVarsOfType k
get_tvs (_, AGlobal {}) tvs = tvs
get_tvs (_, APromotionErr {}) tvs = tvs
-- Note [Global TyVars]
-- It's important to add the in-scope tyvars to the global tyvar set
-- as well. Consider
-- f (_::r) = let g y = y::r in ...
-- Here, g mustn't be generalised. This is also important during
-- class and instance decls, when we mustn't generalise the class tyvars
-- when typechecking the methods.
--
-- Nor must we generalise g over any kind variables free in r's kind
-------------------------------------------------------------
-- Extending the TcIdBinderStack, used only for error messages
tcExtendIdBndrs :: [TcIdBinder] -> TcM a -> TcM a
tcExtendIdBndrs bndrs thing_inside
= do { traceTc "tcExtendIdBndrs" (ppr bndrs)
; updLclEnv (\env -> env { tcl_bndrs = bndrs ++ tcl_bndrs env })
thing_inside }
{-
************************************************************************
* *
\subsection{Rules}
* *
************************************************************************
-}
tcExtendRules :: [LRuleDecl Id] -> TcM a -> TcM a
-- Just pop the new rules into the EPS and envt resp
-- All the rules come from an interface file, not source
-- Nevertheless, some may be for this module, if we read
-- its interface instead of its source code
tcExtendRules lcl_rules thing_inside
= do { env <- getGblEnv
; let
env' = env { tcg_rules = lcl_rules ++ tcg_rules env }
; setGblEnv env' thing_inside }
{-
************************************************************************
* *
Meta level
* *
************************************************************************
-}
checkWellStaged :: SDoc -- What the stage check is for
-> ThLevel -- Binding level (increases inside brackets)
-> ThLevel -- Use stage
-> TcM () -- Fail if badly staged, adding an error
checkWellStaged pp_thing bind_lvl use_lvl
| use_lvl >= bind_lvl -- OK! Used later than bound
= return () -- E.g. \x -> [| $(f x) |]
| bind_lvl == outerLevel -- GHC restriction on top level splices
= stageRestrictionError pp_thing
| otherwise -- Badly staged
= failWithTc $ -- E.g. \x -> $(f x)
ptext (sLit "Stage error:") <+> pp_thing <+>
hsep [ptext (sLit "is bound at stage") <+> ppr bind_lvl,
ptext (sLit "but used at stage") <+> ppr use_lvl]
stageRestrictionError :: SDoc -> TcM a
stageRestrictionError pp_thing
= failWithTc $
sep [ ptext (sLit "GHC stage restriction:")
, nest 2 (vcat [ pp_thing <+> ptext (sLit "is used in a top-level splice, quasi-quote, or annotation,")
, ptext (sLit "and must be imported, not defined locally")])]
topIdLvl :: Id -> ThLevel
-- Globals may either be imported, or may be from an earlier "chunk"
-- (separated by declaration splices) of this module. The former
-- *can* be used inside a top-level splice, but the latter cannot.
-- Hence we give the former impLevel, but the latter topLevel
-- E.g. this is bad:
-- x = [| foo |]
-- $( f x )
-- By the time we are prcessing the $(f x), the binding for "x"
-- will be in the global env, not the local one.
topIdLvl id | isLocalId id = outerLevel
| otherwise = impLevel
tcMetaTy :: Name -> TcM Type
-- Given the name of a Template Haskell data type,
-- return the type
-- E.g. given the name "Expr" return the type "Expr"
tcMetaTy tc_name = do
t <- tcLookupTyCon tc_name
return (mkTyConApp t [])
isBrackStage :: ThStage -> Bool
isBrackStage (Brack {}) = True
isBrackStage _other = False
{-
************************************************************************
* *
getDefaultTys
* *
************************************************************************
-}
tcGetDefaultTys :: TcM ([Type], -- Default types
(Bool, -- True <=> Use overloaded strings
Bool)) -- True <=> Use extended defaulting rules
tcGetDefaultTys
= do { dflags <- getDynFlags
; let ovl_strings = xopt Opt_OverloadedStrings dflags
extended_defaults = xopt Opt_ExtendedDefaultRules dflags
-- See also Trac #1974
flags = (ovl_strings, extended_defaults)
; mb_defaults <- getDeclaredDefaultTys
; case mb_defaults of {
Just tys -> return (tys, flags) ;
-- User-supplied defaults
Nothing -> do
-- No use-supplied default
-- Use [Integer, Double], plus modifications
{ integer_ty <- tcMetaTy integerTyConName
; list_ty <- tcMetaTy listTyConName
; checkWiredInTyCon doubleTyCon
; let deflt_tys = opt_deflt extended_defaults [unitTy, list_ty]
-- Note [Extended defaults]
++ [integer_ty, doubleTy]
++ opt_deflt ovl_strings [stringTy]
; return (deflt_tys, flags) } } }
where
opt_deflt True xs = xs
opt_deflt False _ = []
{-
Note [Extended defaults]
~~~~~~~~~~~~~~~~~~~~~
In interative mode (or with -XExtendedDefaultRules) we add () as the first type we
try when defaulting. This has very little real impact, except in the following case.
Consider:
Text.Printf.printf "hello"
This has type (forall a. IO a); it prints "hello", and returns 'undefined'. We don't
want the GHCi repl loop to try to print that 'undefined'. The neatest thing is to
default the 'a' to (), rather than to Integer (which is what would otherwise happen;
and then GHCi doesn't attempt to print the (). So in interactive mode, we add
() to the list of defaulting types. See Trac #1200.
Additonally, the list type [] is added as a default specialization for
Traversable and Foldable. As such the default default list now has types of
varying kinds, e.g. ([] :: * -> *) and (Integer :: *).
************************************************************************
* *
\subsection{The InstInfo type}
* *
************************************************************************
The InstInfo type summarises the information in an instance declaration
instance c => k (t tvs) where b
It is used just for *local* instance decls (not ones from interface files).
But local instance decls includes
- derived ones
- generic ones
as well as explicit user written ones.
-}
data InstInfo a
= InstInfo {
iSpec :: ClsInst, -- Includes the dfun id. Its forall'd type
iBinds :: InstBindings a -- variables scope over the stuff in InstBindings!
}
iDFunId :: InstInfo a -> DFunId
iDFunId info = instanceDFunId (iSpec info)
data InstBindings a
= InstBindings
{ ib_tyvars :: [Name] -- Names of the tyvars from the instance head
-- that are lexically in scope in the bindings
, ib_binds :: (LHsBinds a) -- Bindings for the instance methods
, ib_pragmas :: [LSig a] -- User pragmas recorded for generating
-- specialised instances
, ib_extensions :: [ExtensionFlag] -- Any extra extensions that should
-- be enabled when type-checking this
-- instance; needed for
-- GeneralizedNewtypeDeriving
, ib_derived :: Bool
-- True <=> This code was generated by GHC from a deriving clause
-- or standalone deriving declaration
-- Used only to improve error messages
}
instance OutputableBndr a => Outputable (InstInfo a) where
ppr = pprInstInfoDetails
pprInstInfoDetails :: OutputableBndr a => InstInfo a -> SDoc
pprInstInfoDetails info
= hang (pprInstanceHdr (iSpec info) <+> ptext (sLit "where"))
2 (details (iBinds info))
where
details (InstBindings { ib_binds = b }) = pprLHsBinds b
simpleInstInfoClsTy :: InstInfo a -> (Class, Type)
simpleInstInfoClsTy info = case instanceHead (iSpec info) of
(_, cls, [ty]) -> (cls, ty)
_ -> panic "simpleInstInfoClsTy"
simpleInstInfoTy :: InstInfo a -> Type
simpleInstInfoTy info = snd (simpleInstInfoClsTy info)
simpleInstInfoTyCon :: InstInfo a -> TyCon
-- Gets the type constructor for a simple instance declaration,
-- i.e. one of the form instance (...) => C (T a b c) where ...
simpleInstInfoTyCon inst = tcTyConAppTyCon (simpleInstInfoTy inst)
-- | Make a name for the dict fun for an instance decl. It's an *external*
-- name, like other top-level names, and hence must be made with
-- newGlobalBinder.
newDFunName :: Class -> [Type] -> SrcSpan -> TcM Name
newDFunName clas tys loc
= do { is_boot <- tcIsHsBootOrSig
; mod <- getModule
; let info_string = occNameString (getOccName clas) ++
concatMap (occNameString.getDFunTyKey) tys
; dfun_occ <- chooseUniqueOccTc (mkDFunOcc info_string is_boot)
; newGlobalBinder mod dfun_occ loc }
-- | Special case of 'newDFunName' to generate dict fun name for a single TyCon.
newDFunName' :: Class -> TyCon -> TcM Name
newDFunName' clas tycon -- Just a simple wrapper
= do { loc <- getSrcSpanM -- The location of the instance decl,
-- not of the tycon
; newDFunName clas [mkTyConApp tycon []] loc }
-- The type passed to newDFunName is only used to generate
-- a suitable string; hence the empty type arg list
{-
Make a name for the representation tycon of a family instance. It's an
*external* name, like other top-level names, and hence must be made with
newGlobalBinder.
-}
newFamInstTyConName :: Located Name -> [Type] -> TcM Name
newFamInstTyConName (L loc name) tys = mk_fam_inst_name id loc name [tys]
newFamInstAxiomName :: SrcSpan -> Name -> [CoAxBranch] -> TcM Name
newFamInstAxiomName loc name branches
= mk_fam_inst_name mkInstTyCoOcc loc name (map coAxBranchLHS branches)
mk_fam_inst_name :: (OccName -> OccName) -> SrcSpan -> Name -> [[Type]] -> TcM Name
mk_fam_inst_name adaptOcc loc tc_name tyss
= do { mod <- getModule
; let info_string = occNameString (getOccName tc_name) ++
intercalate "|" ty_strings
; occ <- chooseUniqueOccTc (mkInstTyTcOcc info_string)
; newGlobalBinder mod (adaptOcc occ) loc }
where
ty_strings = map (concatMap (occNameString . getDFunTyKey)) tyss
{-
Stable names used for foreign exports and annotations.
For stable names, the name must be unique (see #1533). If the
same thing has several stable Ids based on it, the
top-level bindings generated must not have the same name.
Hence we create an External name (doesn't change), and we
append a Unique to the string right here.
-}
mkStableIdFromString :: String -> Type -> SrcSpan -> (OccName -> OccName) -> TcM TcId
mkStableIdFromString str sig_ty loc occ_wrapper = do
uniq <- newUnique
mod <- getModule
name <- mkWrapperName "stable" str
let occ = mkVarOccFS name :: OccName
gnm = mkExternalName uniq mod (occ_wrapper occ) loc :: Name
id = mkExportedLocalId VanillaId gnm sig_ty :: Id
return id
mkStableIdFromName :: Name -> Type -> SrcSpan -> (OccName -> OccName) -> TcM TcId
mkStableIdFromName nm = mkStableIdFromString (getOccString nm)
mkWrapperName :: (MonadIO m, HasDynFlags m, HasModule m)
=> String -> String -> m FastString
mkWrapperName what nameBase
= do dflags <- getDynFlags
thisMod <- getModule
let -- Note [Generating fresh names for ccall wrapper]
wrapperRef = nextWrapperNum dflags
pkg = unitIdString (moduleUnitId thisMod)
mod = moduleNameString (moduleName thisMod)
wrapperNum <- liftIO $ atomicModifyIORef' wrapperRef $ \mod_env ->
let num = lookupWithDefaultModuleEnv mod_env 0 thisMod
mod_env' = extendModuleEnv mod_env thisMod (num+1)
in (mod_env', num)
let components = [what, show wrapperNum, pkg, mod, nameBase]
return $ mkFastString $ zEncodeString $ intercalate ":" components
{-
Note [Generating fresh names for FFI wrappers]
We used to use a unique, rather than nextWrapperNum, to distinguish
between FFI wrapper functions. However, the wrapper names that we
generate are external names. This means that if a call to them ends up
in an unfolding, then we can't alpha-rename them, and thus if the
unique randomly changes from one compile to another then we get a
spurious ABI change (#4012).
The wrapper counter has to be per-module, not global, so that the number we end
up using is not dependent on the modules compiled before the current one.
-}
{-
************************************************************************
* *
\subsection{Errors}
* *
************************************************************************
-}
pprBinders :: [Name] -> SDoc
-- Used in error messages
-- Use quotes for a single one; they look a bit "busy" for several
pprBinders [bndr] = quotes (ppr bndr)
pprBinders bndrs = pprWithCommas ppr bndrs
notFound :: Name -> TcM TyThing
notFound name
= do { lcl_env <- getLclEnv
; namedWildCardsEnabled <- xoptM Opt_NamedWildCards
; let stage = tcl_th_ctxt lcl_env
isWildCard = case getOccString name of
('_':_:_) | namedWildCardsEnabled -> True
"_" -> True
_ -> False
; case stage of -- See Note [Out of scope might be a staging error]
Splice {} -> stageRestrictionError (quotes (ppr name))
_ | isWildCard -> failWithTc $
text "Unexpected wild card:" <+> quotes (ppr name)
_ -> failWithTc $
vcat[ptext (sLit "GHC internal error:") <+> quotes (ppr name) <+>
ptext (sLit "is not in scope during type checking, but it passed the renamer"),
ptext (sLit "tcl_env of environment:") <+> ppr (tcl_env lcl_env)]
-- Take case: printing the whole gbl env can
-- cause an infinite loop, in the case where we
-- are in the middle of a recursive TyCon/Class group;
-- so let's just not print it! Getting a loop here is
-- very unhelpful, because it hides one compiler bug with another
}
wrongThingErr :: String -> TcTyThing -> Name -> TcM a
-- It's important that this only calls pprTcTyThingCategory, which in
-- turn does not look at the details of the TcTyThing.
-- See Note [Placeholder PatSyn kinds] in TcBinds
wrongThingErr expected thing name
= failWithTc (pprTcTyThingCategory thing <+> quotes (ppr name) <+>
ptext (sLit "used as a") <+> text expected)
{-
Note [Out of scope might be a staging error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
x = 3
data T = MkT $(foo x)
This is really a staging error, because we can't run code involving 'x'.
But in fact the type checker processes types first, so 'x' won't even be
in the type envt when we look for it in $(foo x). So inside splices we
report something missing from the type env as a staging error.
See Trac #5752 and #5795.
-}
| AlexanderPankiv/ghc | compiler/typecheck/TcEnv.hs | bsd-3-clause | 35,875 | 6 | 18 | 10,433 | 6,282 | 3,313 | 2,969 | 477 | 6 |
module Test.LongWeekYears where
import Data.Time.Calendar.WeekDate
import Data.Time.Calendar
import Test.TestUtil
import Test.LongWeekYearsRef
longYear :: Integer -> Bool
longYear year = case toWeekDate (fromGregorian year 12 31) of
(_,53,_) -> True
_ -> False
showLongYear :: Integer -> String
showLongYear year
= unwords [ show year ++ ":"
, (if isLeapYear year then "L" else " ") ++ (if longYear year then "*" else " ") ]
longWeekYears :: Test
longWeekYears = pureTest "longWeekYears" $
diff longWeekYearsRef $ unlines $ map showLongYear [1901 .. 2050]
| bergmark/time | test/Test/LongWeekYears.hs | bsd-3-clause | 580 | 0 | 10 | 107 | 188 | 103 | 85 | 16 | 3 |
module C1 where
import D1
sumSquares1 (x:xs) = sq x + sumSquares1 xs
sumSquares1 [] = 0
| kmate/HaRe | old/testing/addOneParameter/C1.hs | bsd-3-clause | 99 | 0 | 7 | 28 | 42 | 22 | 20 | 4 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Quick Start | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Sadržaj</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Traži</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriti</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | ccgreen13/zap-extensions | src/org/zaproxy/zap/extension/quickstart/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 973 | 80 | 66 | 160 | 421 | 212 | 209 | -1 | -1 |
module LinkerUnload (init) where
import GHC
import DynFlags
import Linker
import System.Environment
import MonadUtils ( MonadIO(..) )
foreign export ccall loadPackages :: IO ()
loadPackages :: IO ()
loadPackages = do
[libdir] <- getArgs
runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
let dflags' = dflags { hscTarget = HscNothing
, ghcLink = LinkInMemory }
pkgs <- setSessionDynFlags dflags'
hsc_env <- getSession
liftIO $ Linker.linkPackages hsc_env pkgs
| sdiehl/ghc | testsuite/tests/rts/linker/LinkerUnload.hs | bsd-3-clause | 517 | 0 | 14 | 120 | 152 | 80 | 72 | 17 | 1 |
module IntegerConversionRules where
import Data.Word
f1 :: Int -> Double
f1 = fi
f2 :: Int -> Float
f2 = fi
f3 :: Int -> Int
f3 = fi
f4 :: Int -> Word
f4 = fi
fi :: (Integral a, Num b) => a -> b
fi = fromIntegral
| frantisekfarka/ghc-dsi | testsuite/tests/lib/integer/IntegerConversionRules.hs | bsd-3-clause | 221 | 0 | 6 | 58 | 94 | 54 | 40 | 12 | 1 |
module B where
b :: Char
b = 'b'
| urbanslug/ghc | testsuite/tests/driver/dynamicToo/dynamicToo004/pkg2/B.hs | bsd-3-clause | 35 | 0 | 4 | 11 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE DataKinds, TypeOperators, PolyKinds, FlexibleInstances, FlexibleContexts #-}
module T7095 where
data Wrapped t = Wrapped
class Len l where
len :: l -> Int
instance Len (Wrapped '[]) where
len = const 0
instance (Len (Wrapped xs)) => Len (Wrapped (x ': xs)) where
len x = 1 + (len $ wrappedTail x)
wrappedTail :: Wrapped (x ': xs) -> Wrapped xs
wrappedTail = const Wrapped
-- | test1 == zero just as excepted.
test1 = len (undefined :: Wrapped '[])
-- | Since I have typeclasses defined for Wrapped (* ': [*]) and for (Wrapped '[])
-- I except to get 1 here, but this does not typecheck with following message:
-- No instance for (Len (Wrapped [*] ([] *)))
test2 = len (undefined :: Wrapped '[Int])
| urbanslug/ghc | testsuite/tests/polykinds/T7095.hs | bsd-3-clause | 744 | 0 | 9 | 162 | 196 | 105 | 91 | 13 | 1 |
import Prelude (IO)
import Yesod.Default.Config (fromArgs)
import Yesod.Default.Main (defaultMain)
import Settings (parseExtra)
import Application (makeApplication)
main :: IO ()
main = defaultMain (fromArgs parseExtra) makeApplication
| Tener/deeplearning-thesis | yesod/abaloney/app/main.hs | bsd-3-clause | 273 | 0 | 7 | 62 | 73 | 42 | 31 | 7 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Futhark.Internalise.AccurateSizes
( shapeBody
, annotateArrayShape
, argShapes
, ensureResultShape
, ensureResultExtShape
, ensureExtShape
, ensureShape
, ensureArgShapes
)
where
import Control.Applicative
import Control.Monad
import Data.Loc
import qualified Data.Map.Strict as M
import Prelude
import Futhark.Construct
import Futhark.Representation.AST
import Futhark.MonadFreshNames
shapeBody :: (HasScope lore m, MonadFreshNames m, BinderOps lore, Bindable lore) =>
[VName] -> [Type] -> Body lore
-> m (Body lore)
shapeBody shapenames ts body =
runBodyBinder $ do
ses <- bodyBind body
sets <- mapM subExpType ses
return $ resultBody $ argShapes shapenames ts sets
annotateArrayShape :: ArrayShape shape =>
TypeBase shape u -> [Int] -> TypeBase Shape u
annotateArrayShape t newshape =
t `setArrayShape` Shape (take (arrayRank t) $
map (intConst Int32 . toInteger) $ newshape ++ repeat 0)
argShapes :: [VName] -> [TypeBase Shape u0] -> [TypeBase Shape u1] -> [SubExp]
argShapes shapes valts valargts =
map addShape shapes
where mapping = shapeMapping valts valargts
addShape name
| Just se <- M.lookup name mapping = se
| otherwise = intConst Int32 0
ensureResultShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> [Type] -> Body (Lore m)
-> m (Body (Lore m))
ensureResultShape asserting msg loc =
ensureResultExtShape asserting msg loc . staticShapes
ensureResultExtShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> [ExtType] -> Body (Lore m)
-> m (Body (Lore m))
ensureResultExtShape asserting msg loc rettype body =
insertStmsM $ do
es <- bodyBind body
es_ts <- mapM subExpType es
let ext_mapping = shapeExtMapping rettype es_ts
rettype' = foldr (uncurry fixExt) rettype $ M.toList ext_mapping
assertProperShape t se =
let name = "result_proper_shape"
in ensureExtShape asserting msg loc t name se
reses <- zipWithM assertProperShape rettype' es
ts <- mapM subExpType reses
let ctx = extractShapeContext rettype $ map arrayDims ts
mkBodyM [] (ctx ++ reses)
ensureExtShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> ExtType -> String -> SubExp
-> m SubExp
ensureExtShape asserting msg loc t name orig
| Array{} <- t, Var v <- orig =
Var <$> ensureShapeVar asserting msg loc t name v
| otherwise = return orig
ensureShape :: MonadBinder m =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> Type -> String -> SubExp
-> m SubExp
ensureShape asserting msg loc = ensureExtShape asserting msg loc . staticShapes1
-- | Reshape the arguments to a function so that they fit the expected
-- shape declarations. Not used to change rank of arguments. Assumes
-- everything is otherwise type-correct.
ensureArgShapes :: (MonadBinder m, Typed (TypeBase Shape u)) =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> [VName] -> [TypeBase Shape u] -> [SubExp]
-> m [SubExp]
ensureArgShapes asserting msg loc shapes paramts args =
zipWithM ensureArgShape (expectedTypes shapes paramts args) args
where ensureArgShape _ (Constant v) = return $ Constant v
ensureArgShape t (Var v)
| arrayRank t < 1 = return $ Var v
| otherwise =
ensureShape asserting msg loc t (baseString v) $ Var v
ensureShapeVar :: MonadBinder m =>
(m Certificates -> m Certificates)
-> String -> SrcLoc -> ExtType -> String -> VName
-> m VName
ensureShapeVar asserting msg loc t name v
| Array{} <- t = do
newshape <- arrayDims . removeExistentials t <$> lookupType v
oldshape <- arrayDims <$> lookupType v
let checkDim desired has =
letExp "shape_cert" =<<
eAssert (pure $ BasicOp $ CmpOp (CmpEq int32) desired has) msg loc
certs <- asserting $ Certificates <$> zipWithM checkDim newshape oldshape
certifying certs $
letExp name $ shapeCoerce newshape v
| otherwise = return v
| ihc/futhark | src/Futhark/Internalise/AccurateSizes.hs | isc | 4,454 | 0 | 17 | 1,257 | 1,388 | 673 | 715 | 100 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Spock.Safe
import qualified Templates.Helpers as T
import qualified Templates.Pages as P
import qualified Files.Helpers as F
import qualified Data.HashMap.Strict as H (lookup)
import Data.Text (unpack)
import Control.Monad.IO.Class (liftIO)
import Data.Monoid ((<>))
main :: IO ()
main =
runSpock 3000 $ spockT id $ do
liftIO F.setupDirs
get "/" $
T.renderHtmlStrict P.mainPage
post "/f" $ do
mSize <- header "Content-Length"
case mSize of
Nothing -> redirect "/"
Just size ->
if (read $ unpack size) > 10485760 then
T.renderHtmlStrict P.fileTooBig
else do
file <- files
case H.lookup "file" file of
Nothing -> redirect "/"
Just uf -> do
loc <- liftIO $
F.saveUploadedFile
(uf_tempLocation uf)
(uf_name uf)
T.renderHtmlStrict
$ P.urlPage
$ "http://localhost:3000/f/" <> loc
get ("f" <//> var) $ \dir -> do
retFileTup <- liftIO $ F.prepAndReturnFileTup dir
case retFileTup of
Nothing -> redirect "/"
Just ft -> do
setHeader
"Content-Disposition"
("attachment; filename=\"" <> fst ft <> "\"")
file "application/octet-stream" (snd ft)
| ifo/onetimefiles.com | src/Main.hs | isc | 1,449 | 0 | 26 | 512 | 398 | 201 | 197 | 44 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Strict #-}
-- | Internalising bindings.
module Futhark.Internalise.Bindings
( internaliseAttrs,
internaliseAttr,
bindingFParams,
bindingLoopParams,
bindingLambdaParams,
stmPat,
)
where
import Control.Monad.Reader hiding (mapM)
import Data.Bifunctor
import qualified Data.Map.Strict as M
import Data.Maybe
import qualified Futhark.IR.SOACS as I
import Futhark.Internalise.Monad
import Futhark.Internalise.TypesValues
import Futhark.Util
import Language.Futhark as E hiding (matchDims)
internaliseAttr :: E.AttrInfo VName -> InternaliseM I.Attr
internaliseAttr (E.AttrAtom (E.AtomName v) _) =
pure $ I.AttrName v
internaliseAttr (E.AttrAtom (E.AtomInt x) _) =
pure $ I.AttrInt x
internaliseAttr (E.AttrComp f attrs _) =
I.AttrComp f <$> mapM internaliseAttr attrs
internaliseAttrs :: [E.AttrInfo VName] -> InternaliseM I.Attrs
internaliseAttrs = fmap (mconcat . map I.oneAttr) . mapM internaliseAttr
bindingFParams ::
[E.TypeParam] ->
[E.Pat] ->
([I.FParam] -> [[I.FParam]] -> InternaliseM a) ->
InternaliseM a
bindingFParams tparams params m = do
flattened_params <- mapM flattenPat params
let params_idents = concat flattened_params
params_ts <-
internaliseParamTypes $
map (flip E.setAliases () . E.unInfo . E.identType . fst) params_idents
let num_param_idents = map length flattened_params
num_param_ts = map (sum . map length) $ chunks num_param_idents params_ts
let shape_params = [I.Param mempty v $ I.Prim I.int64 | E.TypeParamDim v _ <- tparams]
shape_subst = M.fromList [(I.paramName p, [I.Var $ I.paramName p]) | p <- shape_params]
bindingFlatPat params_idents (concat params_ts) $ \valueparams -> do
let (certparams, valueparams') = unzip $ map fixAccParam (concat valueparams)
I.localScope (I.scopeOfFParams $ catMaybes certparams ++ shape_params ++ valueparams') $
substitutingVars shape_subst $
m (catMaybes certparams ++ shape_params) $ chunks num_param_ts valueparams'
where
fixAccParam (I.Param attrs pv (I.Acc acc ispace ts u)) =
( Just (I.Param attrs acc $ I.Prim I.Unit),
I.Param attrs pv (I.Acc acc ispace ts u)
)
fixAccParam p = (Nothing, p)
bindingLoopParams ::
[E.TypeParam] ->
E.Pat ->
[I.Type] ->
([I.FParam] -> [I.FParam] -> InternaliseM a) ->
InternaliseM a
bindingLoopParams tparams pat ts m = do
pat_idents <- flattenPat pat
pat_ts <- internaliseLoopParamType (E.patternStructType pat) ts
let shape_params = [I.Param mempty v $ I.Prim I.int64 | E.TypeParamDim v _ <- tparams]
shape_subst = M.fromList [(I.paramName p, [I.Var $ I.paramName p]) | p <- shape_params]
bindingFlatPat pat_idents pat_ts $ \valueparams ->
I.localScope (I.scopeOfFParams $ shape_params ++ concat valueparams) $
substitutingVars shape_subst $ m shape_params $ concat valueparams
bindingLambdaParams ::
[E.Pat] ->
[I.Type] ->
([I.LParam] -> InternaliseM a) ->
InternaliseM a
bindingLambdaParams params ts m = do
params_idents <- concat <$> mapM flattenPat params
bindingFlatPat params_idents ts $ \params' ->
I.localScope (I.scopeOfLParams $ concat params') $ m $ concat params'
processFlatPat ::
Show t =>
[(E.Ident, [E.AttrInfo VName])] ->
[t] ->
InternaliseM ([[I.Param t]], VarSubsts)
processFlatPat x y = processFlatPat' [] x y
where
processFlatPat' pat [] _ = do
let (vs, substs) = unzip pat
return (reverse vs, M.fromList substs)
processFlatPat' pat ((p, attrs) : rest) ts = do
attrs' <- internaliseAttrs attrs
(ps, rest_ts) <- handleMapping attrs' ts <$> internaliseBindee p
processFlatPat' ((ps, (E.identName p, map (I.Var . I.paramName) ps)) : pat) rest rest_ts
handleMapping _ ts [] =
([], ts)
handleMapping attrs (t : ts) (r : rs) =
let (ps, ts') = handleMapping attrs ts rs
in (I.Param attrs r t : ps, ts')
handleMapping _ [] _ =
error $ "handleMapping: insufficient identifiers in pattern." ++ show (x, y)
internaliseBindee :: E.Ident -> InternaliseM [VName]
internaliseBindee bindee = do
let name = E.identName bindee
n <- internalisedTypeSize $ E.unInfo $ E.identType bindee
case n of
1 -> return [name]
_ -> replicateM n $ newVName $ baseString name
bindingFlatPat ::
Show t =>
[(E.Ident, [E.AttrInfo VName])] ->
[t] ->
([[I.Param t]] -> InternaliseM a) ->
InternaliseM a
bindingFlatPat idents ts m = do
(ps, substs) <- processFlatPat idents ts
local (\env -> env {envSubsts = substs `M.union` envSubsts env}) $
m ps
-- | Flatten a pattern. Returns a list of identifiers.
flattenPat :: MonadFreshNames m => E.Pat -> m [(E.Ident, [E.AttrInfo VName])]
flattenPat = flattenPat'
where
flattenPat' (E.PatParens p _) =
flattenPat' p
flattenPat' (E.PatAttr attr p _) =
map (second (attr :)) <$> flattenPat' p
flattenPat' (E.Wildcard t loc) = do
name <- newVName "nameless"
flattenPat' $ E.Id name t loc
flattenPat' (E.Id v (Info t) loc) =
return [(E.Ident v (Info t) loc, mempty)]
-- XXX: treat empty tuples and records as unit.
flattenPat' (E.TuplePat [] loc) =
flattenPat' (E.Wildcard (Info $ E.Scalar $ E.Record mempty) loc)
flattenPat' (E.RecordPat [] loc) =
flattenPat' (E.Wildcard (Info $ E.Scalar $ E.Record mempty) loc)
flattenPat' (E.TuplePat pats _) =
concat <$> mapM flattenPat' pats
flattenPat' (E.RecordPat fs loc) =
flattenPat' $ E.TuplePat (map snd $ sortFields $ M.fromList fs) loc
flattenPat' (E.PatAscription p _ _) =
flattenPat' p
flattenPat' (E.PatLit _ t loc) =
flattenPat' $ E.Wildcard t loc
flattenPat' (E.PatConstr _ _ ps _) =
concat <$> mapM flattenPat' ps
stmPat ::
E.Pat ->
[I.Type] ->
([VName] -> InternaliseM a) ->
InternaliseM a
stmPat pat ts m = do
pat' <- flattenPat pat
bindingFlatPat pat' ts $ m . map I.paramName . concat
| HIPERFIT/futhark | src/Futhark/Internalise/Bindings.hs | isc | 5,988 | 0 | 19 | 1,279 | 2,297 | 1,155 | 1,142 | 144 | 11 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.