code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module U.Util.Hash where
-- (Hash, toBytes, base32Hex, base32Hexs, fromBase32Hex, fromBytes, unsafeFromBase32Hex, showBase32Hex, validBase32HexChars) where
-- import Unison.Prelude
import qualified Data.ByteArray as BA
import Data.ByteString (ByteString)
import qualified Crypto.Hash as CH
import qualified Data.ByteString as B
import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE)
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Short as B.Short
import Data.Text.Encoding (encodeUtf8)
import GHC.Generics (Generic)
import qualified U.Util.Hashable as H
import Data.ByteString.Short (fromShort, ShortByteString)
import qualified U.Util.Base32Hex as Base32Hex
import U.Util.Base32Hex (Base32Hex)
-- | Hash which uniquely identifies a Unison type or term
newtype Hash = Hash {toShort :: ShortByteString} deriving (Eq, Ord, Generic)
toBase32Hex :: Hash -> Base32Hex
toBase32Hex = Base32Hex.fromByteString . toBytes
fromBase32Hex :: Base32Hex -> Hash
fromBase32Hex = Hash . B.Short.toShort . Base32Hex.toByteString
toBytes :: Hash -> ByteString
toBytes = fromShort . toShort
instance H.Hashable Hash where
tokens h = [H.Bytes (toBytes h)]
instance H.Accumulate Hash where
accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit
where
go :: CH.Context CH.SHA3_512 -> [H.Token Hash] -> CH.Context CH.SHA3_512
go acc tokens = CH.hashUpdates acc (tokens >>= toBS)
toBS (H.Tag b) = [B.singleton b]
toBS (H.Bytes bs) = [encodeLength $ B.length bs, bs]
toBS (H.Int i) = BL.toChunks . toLazyByteString . int64BE $ i
toBS (H.Nat i) = BL.toChunks . toLazyByteString . word64BE $ i
toBS (H.Double d) = BL.toChunks . toLazyByteString . doubleBE $ d
toBS (H.Text txt) =
let tbytes = encodeUtf8 txt
in [encodeLength (B.length tbytes), tbytes]
toBS (H.Hashed h) = [toBytes h]
encodeLength :: Integral n => n -> B.ByteString
encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral
fromBytes = U.Util.Hash.fromBytes
toBytes = U.Util.Hash.toBytes
fromBytes :: ByteString -> Hash
fromBytes = Hash . B.Short.toShort
instance Show Hash where
show h = "fromBase32Hex " ++ (show . Base32Hex.toText . toBase32Hex) h | unisonweb/platform | codebase2/util/U/Util/Hash.hs | mit | 2,348 | 0 | 14 | 406 | 698 | 390 | 308 | 46 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import Data.List (intersperse, isSuffixOf)
import Data.List.Split
import Data.Monoid ()
import Constants
import Hakyll
import System.FilePath (splitExtension)
--------------------------------------------------------------------------------
main :: IO ()
main = hakyll $ do
match ("images/**" .||. "webfonts/*" .||. "js/*" .||. "content/certificates/*" .||. "content/slides/*.pdf") $ do
route $ stripContent `composeRoutes` idRoute
compile copyFileCompiler
match "css/*" $ do
route idRoute
compile compressCssCompiler
match "content/slides/internal/*" $ do
route $ stripContent `composeRoutes` customRoute indexRoute
compile $ getResourceBody
>>= loadAndApplyTemplate "templates/slides.html" postCtx
>>= applyAsTemplate postCtx
>>= relativizeUrls
match "content/*.md" $ do
route $ stripContent `composeRoutes` customRoute indexRoute
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/default.html" postCtx
>>= relativizeUrls
match allPosts $ do
route $ stripContent `composeRoutes`
stripPosts `composeRoutes`
customRoute (indexRoute . removeDate)
compile $ do
c <- pandocCompiler
full <- loadAndApplyTemplate "templates/post.html" postCtx c
teaser <- loadAndApplyTemplate "templates/teaser.html" postCtx $
extractTeaser c
_ <- saveSnapshot "teaser" teaser
loadAndApplyTemplate "templates/default.html" postCtx full
>>= relativizeUrls
match "content/index.html" $ do
route $ stripContent `composeRoutes` idRoute
compile $ do
posts <- recentFirst =<< loadAllSnapshots allPosts "teaser"
let indexCtx = listField "posts" postCtx (return posts) `mappend`
constField "title" "Home" `mappend`
postCtx
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
match "templates/*" $ compile templateBodyCompiler
--------------------------------------------------------------------------------
postCtx :: Context String
postCtx =
deIndexedUrl "url" `mappend`
dateField "date" "%B %e, %Y" `mappend`
siteCtx `mappend`
socialCtx `mappend`
defaultContext
allPosts :: Pattern
allPosts = "content/posts/articles/*" .||. "content/posts/blog/*"
stripContent :: Routes
stripContent = gsubRoute "content/" $ const ""
stripPosts :: Routes
stripPosts = gsubRoute "posts/" $ const ""
removeDate :: Identifier -> Identifier
removeDate s =
fromFilePath $ concat $ folder ++ intersperse "-" (snd $ splitAt 3 $ splitOn "-" file)
where
(folder, file) = (intersperse "/" $ init l ++ ["/"], last l)
where
l = splitOn "/" $ toFilePath s
indexRoute :: Identifier -> FilePath
indexRoute i = name i ++ "/index.html"
where name path = fst $ splitExtension $ toFilePath path
stripIndex :: String -> String
stripIndex url =
if "index.html" `isSuffixOf` url && elem (head url) ("/." :: String)
then take (length url - 10) url
else url
deIndexedUrl :: String -> Context a
deIndexedUrl key = field key $
fmap (stripIndex . maybe mempty toUrl) . getRoute . itemIdentifier
extractTeaser :: Item String -> Item String
extractTeaser = fmap (unlines .
takeWhile (/= "<!-- TEASER STOP -->") .
dropWhile (/= "<!-- TEASER START -->") .
lines)
| ilya-murzinov/ilya-murzinov.github.io | app/site.hs | mit | 3,645 | 0 | 21 | 845 | 892 | 442 | 450 | 84 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Language.Syntax where
import Grammatik.Type
import Autolib.Reader
import Autolib.ToDoc
import Data.Typeable
data Type
= ABCdiff
| Gleich String [Int]
| Ordered_Gleich String
| Ordered_Ungleich String
| AK String
| AmBnCmDn
| BinCompare
| Cee Char Type
| Lukas
| NoLukas
| Dyck
| NoDyck
| Pali String
| NoPali String
| Potenzen Int
| Power String Int
| NoPower String Int
| Vielfache Int
| Form String
| Reg String String
| Regular String String
| From_Grammatik Grammatik
| Center String Char
| Uneps Type
| Komplement Type
| Mirror Type
deriving ( Eq, Typeable )
$(derives [makeReader, makeToDoc] [''Type])
-- local variables:
-- mode: haskell
-- end:
| marcellussiegburg/autotool | collection/src/Language/Syntax.hs | gpl-2.0 | 870 | 0 | 9 | 257 | 202 | 121 | 81 | 36 | 0 |
-- | Very simple visualisation of BDDs using /dot/.
module Data.HasCacBDD.Visuals (
genGraph,
genGraphWith,
showGraph,
svgGraph
) where
import System.Exit
import System.IO
import System.Process
import Data.HasCacBDD
-- | Generate a string which describes the BDD in the dot language.
genGraph :: Bdd -> String
genGraph = genGraphWith show
-- | Given a function to show variables, generate a string which describes the BDD in the dot language.
genGraphWith :: (Int -> String) -> Bdd -> String
genGraphWith myShow myb
| myb == bot = "digraph g { Bot [label=\"0\",shape=\"box\"]; }"
| myb == top = "digraph g { Top [label=\"1\",shape=\"box\"]; }"
| otherwise = "strict digraph g {\n" ++ links ++ sinks ++ rankings ++ "}" where
(links,topdone) = genGraphStep [] myb
genGraphStep :: [(Bdd,Int)] -> Bdd -> (String,[(Bdd,Int)])
genGraphStep done curB =
if curB `elem` [top,bot] ++ map fst done then ("",done) else
let
thisn = if null done then 0 else maximum (map snd done) + 1
thisnstr = show thisn
(Just thisvar) = firstVarOf curB
out1 = "n" ++ thisnstr ++ " [label=\"" ++ myShow thisvar ++ "\",shape=\"circle\"];\n"
(lhs, rhs) = (thenOf curB, elseOf curB)
(lhsoutput,lhsdone) = genGraphStep ((curB,thisn):done) lhs
(Just leftn) = lookup lhs lhsdone
out2
| lhs == top = "n"++ thisnstr ++" -> Top;\n"
| lhs == bot = "n"++ thisnstr ++" -> Bot;\n"
| otherwise = "n"++ thisnstr ++" -> n" ++ show leftn ++";\n" ++ lhsoutput
(rhsoutput,rhsdone) = genGraphStep lhsdone rhs
(Just rightn) = lookup rhs rhsdone
out3
| rhs == top = "n"++ thisnstr ++" -> Top [style=dashed];\n"
| rhs == bot = "n"++ thisnstr ++" -> Bot [style=dashed];\n"
| otherwise = "n"++ thisnstr ++" -> n"++ show rightn ++" [style=dashed];\n" ++ rhsoutput
in (out1 ++ out2 ++ out3, rhsdone)
sinks = "Bot [label=\"0\",shape=\"box\"];\n" ++ "Top [label=\"1\",shape=\"box\"];\n"
rankings = concat [ "{ rank=same; "++ unwords (nodesOf v) ++ " }\n" | v <- allVarsOf myb ]
nodesOf v = map (("n"++).show.snd) $ filter ( \(b,_) -> firstVarOf b == Just v ) topdone
-- | Display the graph of a BDD with dot.
showGraph :: Bdd -> IO ()
showGraph b = do
(inp,_,_,pid) <- runInteractiveProcess "/usr/bin/dot" ["-Tx11"] Nothing Nothing
hPutStr inp (genGraph b)
hFlush inp
hClose inp
_ <- waitForProcess pid
return ()
-- | Generate SVG of a BDD with dot.
svgGraph :: Bdd -> IO String
svgGraph b = do
(exitCode,out,err) <- readProcessWithExitCode "/usr/bin/dot" ["-Tsvg" ] (genGraph b)
case exitCode of
ExitSuccess -> return $ (unlines.tail.lines) out
ExitFailure n -> error $ "dot -Tsvg failed with exit code " ++ show n ++ " and error: " ++ err
| m4lvin/HasCacBDD | hs/Data/HasCacBDD/Visuals.hs | gpl-2.0 | 2,950 | 0 | 17 | 797 | 906 | 465 | 441 | 56 | 3 |
module Quenelle.File (
TopLevelExpr(..),
ExprLocation(..),
moduleExprsFromFile,
moduleExprsFromString
) where
import Control.Applicative
import Data.Maybe
import Language.Python.Common.AST
import Language.Python.Common.SrcLocation
import Language.Python.Version2.Parser
import Quenelle.Lens
import Quenelle.Normalize
data ExprLocation = ExprLocation !String !Int
instance Show ExprLocation where
show (ExprLocation filename line) = filename ++ ":" ++ show line
data TopLevelExpr = TopLevelExpr StatementSpan ExprLocation QExpr
moduleExprsFromString :: FilePath -> String -> Either String [TopLevelExpr]
moduleExprsFromString filename contents =
case parseModule contents filename of
Left err -> Left $ show err
Right (m, _) -> Right $ allModuleExprs m
moduleExprsFromFile :: FilePath -> IO (Either String [TopLevelExpr])
moduleExprsFromFile filename = moduleExprsFromString filename <$> readFile filename
allModuleExprs :: ModuleSpan -> [TopLevelExpr]
allModuleExprs (Module stmts) = concatMap stmtExprs stmts
stmtExprs :: StatementSpan -> [TopLevelExpr]
stmtExprs s@(While cond body else_ _) = [normalizeAndSpan s cond] ++ suiteExprs body ++ suiteExprs else_
stmtExprs s@(For tgts gen body else_ _) = map (normalizeAndSpan s) (tgts ++ [gen]) ++ suiteExprs body ++ suiteExprs else_
stmtExprs s@(Assign tos expr _) = map (normalizeAndSpan s) (tos ++ [expr])
stmtExprs s@(AugmentedAssign lhs _ rhs _) = map (normalizeAndSpan s) [lhs, rhs]
stmtExprs s@(Return (Just expr) _) = [normalizeAndSpan s expr]
stmtExprs s@(Return Nothing _) = []
stmtExprs s@(With ctxt body _) = map (normalizeAndSpan s) (concatMap flattenWithContext ctxt) ++ suiteExprs body
stmtExprs s@(Delete exprs _) = map (normalizeAndSpan s) exprs
stmtExprs s@(StmtExpr e _) = [normalizeAndSpan s e]
stmtExprs s@(Assert exprs _) = map (normalizeAndSpan s) exprs
stmtExprs s@(Print _ exprs _ _) = map (normalizeAndSpan s) exprs
stmtExprs s@(Exec expr scope _) = map (normalizeAndSpan s) $ expr : flattenExecScope scope
stmtExprs _ = []
flattenExecScope :: Maybe (ExprSpan, Maybe ExprSpan) -> [ExprSpan]
flattenExecScope (Just (l, Just r)) = [l, r]
flattenExecScope (Just (l, Nothing)) = [l]
flattenExecScope Nothing = []
flattenWithContext :: (ExprSpan, Maybe ExprSpan) -> [ExprSpan]
flattenWithContext (e1, Just e2) = [e1, e2]
flattenWithContext (e1, Nothing) = [e1]
suiteExprs :: SuiteSpan -> [TopLevelExpr]
suiteExprs = concatMap stmtExprs
normalizeAndSpan :: StatementSpan -> ExprSpan -> TopLevelExpr
normalizeAndSpan s e = TopLevelExpr s (exprLocation (expr_annot e)) (normalizeExpr e)
exprLocation (SpanCoLinear filename row _ _) = ExprLocation filename row
exprLocation (SpanMultiLine filename row _ _ _) = ExprLocation filename row
exprLocation (SpanPoint filename row _) = ExprLocation filename row
exprLocation _ = ExprLocation "unknown" 0
| philipturnbull/quenelle | src/Quenelle/File.hs | gpl-2.0 | 2,868 | 0 | 10 | 426 | 1,055 | 548 | 507 | 58 | 2 |
module HMbo (module X) where
import HMbo.ManyBodyOperator as X
import HMbo.Dim as X
import HMbo.Ket as X
import HMbo.Amplitude as X
| d-meiser/hmbo | src/HMbo.hs | gpl-3.0 | 134 | 0 | 4 | 23 | 38 | 27 | 11 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Lamdu.GUI.ExpressionGui
( ExpressionGui(..), egWidget
, fromValueWidget
, scaleFromTop
, hbox, hboxSpaced, addBelow
, addType -- TODO: s/type/info
, TypeStyle(..)
, MyPrecedence(..), ParentPrecedence(..), Precedence
, parenify, wrapParenify
, wrapExpression, wrapDelegated
-- ExprGuiM:
, makeNameEdit
, withBgColor
-- TODO: Maybe move to ExpressionGui.Collapser:
, Collapser(..), makeCollapser
, makeLabel, makeColoredLabel
, makeFocusableView
, makeRow
, makeNameView
, addInferredTypes
, truncateSize
) where
import Control.Applicative ((<$>))
import Control.Lens (Lens')
import Control.Lens.Operators
import Control.Monad ((<=<))
import Control.MonadA (MonadA)
import Data.Function (on)
import Data.Store.Guid (Guid)
import Data.Store.Transaction (Transaction)
import Data.Vector.Vector2 (Vector2(..))
import Graphics.UI.Bottle.Animation (AnimId, Layer)
import Graphics.UI.Bottle.Widget (Widget)
import Graphics.UI.Bottle.Widgets.Box (KBox)
import Lamdu.Config (Config)
import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM)
import Lamdu.GUI.ExpressionGui.Types (WidgetT, MyPrecedence(..), ParentPrecedence(..), Precedence, ExpressionGui(..), egWidget, egAlignment)
import qualified Control.Lens as Lens
import qualified Data.List as List
import qualified Data.Store.Transaction as Transaction
import qualified Graphics.DrawingCombinators as Draw
import qualified Graphics.UI.Bottle.EventMap as EventMap
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Graphics.UI.Bottle.Widgets.Box as Box
import qualified Graphics.UI.Bottle.Widgets.FocusDelegator as FocusDelegator
import qualified Graphics.UI.Bottle.Widgets.Grid as Grid
import qualified Graphics.UI.Bottle.Widgets.Spacer as Spacer
import qualified Graphics.UI.Bottle.Widgets.TextEdit as TextEdit
import qualified Lamdu.Config as Config
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.GUI.BottleWidgets as BWidgets
import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM
import qualified Lamdu.GUI.WidgetEnvT as WE
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import qualified Lamdu.Sugar.Types as Sugar
fromValueWidget :: WidgetT m -> ExpressionGui m
fromValueWidget widget = ExpressionGui widget 0.5
-- | Scale the given ExpressionGui without moving its top alignment
-- point:
scaleFromTop :: Vector2 Widget.R -> ExpressionGui m -> ExpressionGui m
scaleFromTop ratio (ExpressionGui widget alignment) =
ExpressionGui (Widget.scale ratio widget) (alignment / (ratio ^. Lens._2))
truncateSize :: Vector2 Widget.R -> ExpressionGui m -> ExpressionGui m
truncateSize newSize (ExpressionGui widget alignment) =
ExpressionGui
(widget & Widget.wSize .~ newSize)
(alignment * (widget ^. Widget.wSize / newSize) ^. Lens._2)
hbox :: [ExpressionGui m] -> ExpressionGui m
hbox guis =
ExpressionGui (Box.toWidget box) $
case box ^. Box.boxContent of
((_, x) : _) -> x ^. Grid.elementAlign . Lens._2
_ -> error "hbox must not get empty list :("
where
box = Box.make Box.horizontal $ map f guis
f (ExpressionGui widget alignment) = (Vector2 0.5 alignment, widget)
hboxSpaced :: [ExpressionGui m] -> ExpressionGui m
hboxSpaced = hbox . List.intersperse (fromValueWidget BWidgets.stdSpaceWidget)
fromBox :: KBox Bool (Transaction m) -> ExpressionGui m
fromBox box =
ExpressionGui (Box.toWidget box) alignment
where
alignment =
maybe (error "True disappeared from box list?!")
(^. Grid.elementAlign . Lens._2) .
lookup True $ box ^. Box.boxContent
addBelow ::
Widget.R ->
[(Box.Alignment, WidgetT m)] ->
ExpressionGui m ->
ExpressionGui m
addBelow egHAlign ws eg =
fromBox . Box.makeKeyed Box.vertical $
(True, (Vector2 egHAlign (eg ^. egAlignment), eg ^. egWidget)) :
map ((,) False) ws
data TypeStyle = HorizLine | Background
wWidth :: Lens' (Widget f) Widget.R
wWidth = Widget.wSize . Lens._1
addType ::
Config ->
TypeStyle ->
Widget.Id ->
[WidgetT m] ->
ExpressionGui m ->
ExpressionGui m
addType _ _ _ [] eg = eg
addType config style exprId typeEdits eg =
addBelow 0.5 items eg
where
items = middleElement : [(0.5, annotatedTypes)]
middleElement =
case style of
HorizLine -> (0.5, Spacer.makeHorizLine underlineId (Vector2 width 1))
Background -> (0.5, Spacer.makeWidget 5)
annotatedTypes =
addBackground . (wWidth .~ width) $
Widget.translate (Vector2 ((width - typesBox ^. wWidth)/2) 0) typesBox
width = on max (^. wWidth) (eg ^. egWidget) typesBox
typesBox = Box.vboxCentered typeEdits
isError = length typeEdits >= 2
bgAnimId = Widget.toAnimId exprId ++ ["type background"]
addBackground = maybe id (Widget.backgroundColor (Config.layerTypes (Config.layers config)) bgAnimId) bgColor
bgColor
| isError = Just $ Config.inferredTypeErrorBGColor config
| otherwise = do
Background <- Just style
return $ Config.inferredTypeBGColor config
underlineId = WidgetIds.underlineId $ Widget.toAnimId exprId
exprFocusDelegatorConfig :: Config -> FocusDelegator.Config
exprFocusDelegatorConfig config = FocusDelegator.Config
{ FocusDelegator.startDelegatingKeys = Config.enterSubexpressionKeys config
, FocusDelegator.startDelegatingDoc = EventMap.Doc ["Navigation", "Enter subexpression"]
, FocusDelegator.stopDelegatingKeys = Config.leaveSubexpressionKeys config
, FocusDelegator.stopDelegatingDoc = EventMap.Doc ["Navigation", "Leave subexpression"]
}
-- ExprGuiM GUIs (TODO: Move to Monad.hs?)
disallowedNameChars :: [(Char, EventMap.IsShifted)]
disallowedNameChars =
EventMap.anyShiftedChars "[]\\`()" ++
[ ('0', EventMap.Shifted)
, ('9', EventMap.Shifted)
]
makeBridge ::
MonadA m =>
(Widget.Id -> WE.WidgetEnvT m (Widget f)) ->
(Widget.Id -> WE.WidgetEnvT m (Widget f)) ->
Widget.Id -> WE.WidgetEnvT m (Widget f)
makeBridge mkFocused mkUnfocused myId = do
isFocused <- WE.isSubCursor myId
(if isFocused then mkFocused else mkUnfocused) myId
nameSrcTint :: Config -> Sugar.NameSource -> Widget f -> Widget f
nameSrcTint config Sugar.AutoGeneratedName = Widget.tint $ Config.autoGeneratedNameTint config
nameSrcTint _ Sugar.StoredName = id
makeNameEdit ::
MonadA m => Sugar.Name -> Guid -> Widget.Id -> ExprGuiM m (WidgetT m)
makeNameEdit (Sugar.Name nameSrc nameCollision name) ident myId = do
nameProp <- ExprGuiM.transaction . (^. Transaction.mkProperty) $ Anchors.assocNameRef ident
collisionSuffixes <-
ExprGuiM.widgetEnv . makeCollisionSuffixLabels nameCollision $
Widget.toAnimId myId
config <- ExprGuiM.widgetEnv WE.readConfig
nameEdit <-
fmap (nameSrcTint config nameSrc) .
ExprGuiM.widgetEnv .
WE.localEnv (WE.envTextStyle . TextEdit.sEmptyFocusedString .~ bracketedName) $
makeEditor nameProp
return . Box.hboxCentered $ nameEdit : collisionSuffixes
where
bracketedName = concat ["<", name, ">"]
makeEditor property =
makeBridge (makeWordEdit property) (BWidgets.makeFocusableTextView name) myId
makeWordEdit =
BWidgets.makeWordEdit <&>
Lens.mapped . Lens.mapped . Widget.wEventMap %~
EventMap.filterSChars (curry (`notElem` disallowedNameChars))
wrapDelegated ::
MonadA m =>
Sugar.Payload Sugar.Name m a ->
FocusDelegator.Config -> FocusDelegator.IsDelegating ->
(Widget.Id -> ExprGuiM m (ExpressionGui m)) ->
Widget.Id -> ExprGuiM m (ExpressionGui m)
wrapDelegated pl fdConfig isDelegating f =
addInferredTypes pl <=<
ExprGuiM.wrapDelegated fdConfig isDelegating (egWidget %~) f
wrapExpression ::
MonadA m =>
Sugar.Payload Sugar.Name m a ->
(Widget.Id -> ExprGuiM m (ExpressionGui m)) ->
Widget.Id -> ExprGuiM m (ExpressionGui m)
wrapExpression pl f myId = do
config <- ExprGuiM.widgetEnv WE.readConfig
wrapDelegated pl (exprFocusDelegatorConfig config) FocusDelegator.Delegating f myId
makeLabel ::
MonadA m => String -> Widget.Id -> ExprGuiM m (WidgetT f)
makeLabel text myId = ExprGuiM.widgetEnv . BWidgets.makeLabel text $ Widget.toAnimId myId
makeColoredLabel ::
MonadA m => Int -> Draw.Color -> String -> Widget.Id -> ExprGuiM m (WidgetT f)
makeColoredLabel textSize color text myId =
ExprGuiM.localEnv (WE.setTextSizeColor textSize color) $
makeLabel text myId
makeFocusableView ::
(MonadA m, MonadA n) => Widget.Id -> ExpressionGui n -> ExprGuiM m (ExpressionGui n)
makeFocusableView myId gui =
ExprGuiM.widgetEnv $
egWidget (BWidgets.makeFocusableView myId) gui
parenify ::
MonadA m =>
ParentPrecedence -> MyPrecedence ->
(Widget.Id -> ExpressionGui f -> ExprGuiM m (ExpressionGui f)) ->
(Widget.Id -> ExprGuiM m (ExpressionGui f)) ->
Widget.Id -> ExprGuiM m (ExpressionGui f)
parenify (ParentPrecedence parent) (MyPrecedence prec) addParens mkWidget myId
| parent > prec = addParens myId =<< mkWidget myId
| otherwise = mkWidget myId
wrapParenify ::
MonadA m =>
Sugar.Payload Sugar.Name m a ->
ParentPrecedence -> MyPrecedence ->
(Widget.Id -> ExpressionGui m -> ExprGuiM m (ExpressionGui m)) ->
(Widget.Id -> ExprGuiM m (ExpressionGui m)) ->
Widget.Id -> ExprGuiM m (ExpressionGui m)
wrapParenify pl parentPrec prec addParens =
wrapExpression pl . parenify parentPrec prec addParens
withBgColor :: Layer -> Draw.Color -> AnimId -> ExpressionGui m -> ExpressionGui m
withBgColor layer color animId =
egWidget %~ Widget.backgroundColor layer animId color
data Collapser m = Collapser
{ cMakeExpanded :: ExprGuiM m (ExpressionGui m)
, cMakeFocusedCompact :: ExprGuiM m (ExpressionGui m)
}
makeCollapser ::
MonadA m =>
FocusDelegator.Config ->
(Widget.Id -> Collapser m) ->
Widget.Id -> ExprGuiM m (ExpressionGui m)
makeCollapser fdConfig f =
ExprGuiM.wrapDelegated fdConfig FocusDelegator.NotDelegating (egWidget %~) $
\myId -> do
let Collapser makeExpanded makeFocusedCompact = f myId
-- TODO: This is just to detect whether cursor is in the full
-- expression. Even when it's not displayed, which may be wasteful
-- (even with laziness, at least the names are going to be read).
expandedEdit <- makeExpanded
-- We are inside a focus delegator, so if the cursor is on us it
-- means user entered our widget.
if expandedEdit ^. egWidget . Widget.wIsFocused
then return expandedEdit
else makeFocusedCompact
makeRow :: [(Widget.R, ExpressionGui m)] -> [(Vector2 Widget.R, WidgetT m)]
makeRow =
map item
where
item (halign, ExpressionGui widget alignment) =
(Vector2 halign alignment, widget)
makeNameView :: MonadA m => Sugar.Name -> AnimId -> WE.WidgetEnvT m (Widget f)
makeNameView (Sugar.Name nameSrc collision name) animId = do
label <- BWidgets.makeLabel name animId
suffixLabels <- makeCollisionSuffixLabels collision $ animId ++ ["suffix"]
config <- WE.readConfig
return .
nameSrcTint config nameSrc .
Box.hboxCentered $ label : suffixLabels
makeCollisionSuffixLabels ::
MonadA m => Sugar.NameCollision -> AnimId -> WE.WidgetEnvT m [Widget f]
makeCollisionSuffixLabels Sugar.NoCollision _ = return []
makeCollisionSuffixLabels (Sugar.Collision suffix) animId = do
config <- WE.readConfig
let
onSuffixWidget =
Widget.backgroundColor (Config.layerNameCollisionBG (Config.layers config))
(animId ++ ["bg"]) (Config.collisionSuffixBGColor config) .
Widget.scale (realToFrac <$> Config.collisionSuffixScaleFactor config)
BWidgets.makeLabel (show suffix) animId
& (WE.localEnv . WE.setTextColor . Config.collisionSuffixTextColor) config
<&> (:[]) . onSuffixWidget
addInferredTypes ::
MonadA m =>
Sugar.Payload Sugar.Name m a ->
ExpressionGui m ->
ExprGuiM m (ExpressionGui m)
addInferredTypes exprPl eg = do
config <- ExprGuiM.widgetEnv WE.readConfig
typeEdits <-
exprPl ^. Sugar.plInferredTypes
& Lens.traversed . Lens.mapped . Lens.mapped .~
ExprGuiM.emptyPayload
& Lens.traversed (ExprGuiM.makeSubexpression 0)
<&>
map
( Widget.tint (Config.inferredTypeTint config)
. Widget.scale (realToFrac <$> Config.typeScaleFactor config)
. (^. egWidget)
)
return $ addType config Background exprId typeEdits eg
where
exprId = WidgetIds.fromGuid $ exprPl ^. Sugar.plGuid
| sinelaw/lamdu | Lamdu/GUI/ExpressionGui.hs | gpl-3.0 | 12,244 | 0 | 17 | 2,078 | 3,693 | 1,942 | 1,751 | -1 | -1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Main.GLFW.IsValid
(
mainIsValid,
) where
import MyPrelude
import File
import LoadM
import Game
import Game.GameData
import LevelTools.EditWorld
import LevelTools.Helpers
import LevelTools.EditWorld.Make
import LevelTools.Iteration
import LevelTools.Make
import LevelTools.File
import OpenGL
import OpenGL.Helpers
import Data.Maybe
import Graphics.UI.GLFW as GLFW
import System.IO
mainIsValid path = do
let init = error "no EnvInit"
edit <- makeEditWorldIO path
case findInvalidObject edit of
Nothing ->
putStrLn $ path ++ " is valid"
Just (room, node, str) -> do
putStrLn $ path ++ " invalid in RoomIx: " ++ show room ++ " at " ++ show node ++
": " ++ str
| karamellpelle/grid | designer/source/Main/GLFW/IsValid.hs | gpl-3.0 | 1,540 | 0 | 18 | 361 | 214 | 125 | 89 | 28 | 2 |
module CCTK.RandomNatural where
import System.Random
import GHC.Natural
instance Random Natural where
randomR (a,b) g = let (x,g') = randomR (toInteger a, toInteger b) g in (fromInteger x, g')
random = error "No reasonable default range for Natural"
| maugier/cctk | src/CCTK/RandomNatural.hs | gpl-3.0 | 260 | 0 | 12 | 47 | 92 | 49 | 43 | 6 | 0 |
module Utility where
import Control.Monad.Random
meshGrid :: [a] -> [a] -> [(a, a)]
meshGrid xs ys = concatMap ((`zip` ys) . repeat) xs
randomDouble :: IO Double
randomDouble = (subtract 1 . (* 2)) <$> getRandom
iterateUntilM :: (Monad m) => (a -> Bool) -> (a -> m a) -> a -> m [a]
iterateUntilM p r a = if
| p a -> return [a]
| otherwise -> do
a' <- r a
(a :) <$> (iterateUntilM p r $! a')
| mrlovre/super-memory | Pro6/src/Utility.hs | gpl-3.0 | 430 | 0 | 12 | 121 | 218 | 117 | 101 | -1 | -1 |
module SugarScape.Core.Random
( randomBoolM
, randomExpM
, randomElemM
, randomElemsM
, randomShuffleM
, fisherYatesShuffle
, fisherYatesShuffleM
, avoidM
, rngSplits
) where
import System.Random
import Control.Monad.Random
import Control.Monad.State.Strict
import qualified Data.Map as Map
randomBoolM :: MonadRandom m => Double -> m Bool
randomBoolM p = getRandomR (0.0, 1.0) >>= (\r -> return $ p >= r)
-- NOTE: THIS CODE INSPIRED BY Euterpea-1.0.0 (I didn't want to create dependencies and their implementation seems neat and tidy)
randomExpM :: MonadRandom m => Double -> m Double
randomExpM lambda = avoidM 0 >>= (\r -> return $ (-log r) / lambda)
randomElemM :: MonadRandom m => [a] -> m a
randomElemM as = do
let len = length as
idx <- getRandomR (0, len - 1)
return (as !! idx)
randomElemsM :: MonadRandom m => Int -> [a] -> m [a]
randomElemsM n as = mapM (const $ randomElemM as) [1..n]
-- NOTE: THIS CODE INSPIRED BY Euterpea-1.0.0 (I didn't want to create dependencies and their implementation seems neat and tidy)
avoidM :: (Random a, Eq a, MonadRandom m) => a -> m a
avoidM x = do
r <- getRandom
if r == x
then avoidM x
else return r
randomShuffleM :: (MonadState g m, RandomGen g, MonadRandom m) => [a] -> m [a]
randomShuffleM as = do
g <- get
let (as', g') = fisherYatesShuffle g as
put g'
return as'
-- Taken from https://wiki.haskell.org/Random_shuffle
-- | Randomly shuffle a list without the IO Monad
-- /O(N)/
fisherYatesShuffle :: RandomGen g => g -> [a] -> ([a], g)
fisherYatesShuffle gen0 [] = ([], gen0)
fisherYatesShuffle gen0 l = toElems $ foldl fisherYatesStep (initial (head l) gen0) (numerate (tail l))
where
toElems (x, y) = (Map.elems x, y)
numerate = zip [1..]
initial x gen' = (Map.singleton 0 x, gen')
fisherYatesStep :: RandomGen g => (Map.Map Int a, g) -> (Int, a) -> (Map.Map Int a, g)
fisherYatesStep (m, gen) (i, x) = ((Map.insert j x . Map.insert i (m Map.! j)) m, gen')
where
(j, gen') = randomR (0, i) gen
fisherYatesShuffleM :: MonadRandom m
=> [a]
-> m [a]
fisherYatesShuffleM [] = return []
fisherYatesShuffleM l = do
lMap <- foldM fisherYatesStep (Map.singleton 0 (head l)) (numerate (tail l))
return $ Map.elems lMap
where
numerate = zip [1..]
fisherYatesStep :: MonadRandom m
=> Map.Map Int a
-> (Int, a)
-> m (Map.Map Int a)
fisherYatesStep m (i, x) = do
j <- getRandomR (0, i)
return ((Map.insert j x . Map.insert i (m Map.! j)) m)
rngSplits :: RandomGen g => Int -> g -> ([g], g)
rngSplits n0 g0 = rngSplitsAux n0 g0 []
where
rngSplitsAux :: RandomGen g => Int -> g -> [g] -> ([g], g)
rngSplitsAux 0 g acc = (acc, g)
rngSplitsAux n g acc = rngSplitsAux (n - 1) g'' (g' : acc)
where
(g', g'') = split g | thalerjonathan/phd | public/towards/SugarScape/experimental/concurrent/src/SugarScape/Core/Random.hs | gpl-3.0 | 2,921 | 1 | 16 | 743 | 1,158 | 606 | 552 | 67 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- FIXME: better types in checkLevel
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- |
-- Copyright : (c) 2010-2012 Simon Meier & Benedikt Schmidt
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Portability : GHC only
--
-- Types to represent proofs.
module Theory.Proof (
-- * Utilities
LTree(..)
, mergeMapsWith
-- * Types
, ProofStep(..)
, DiffProofStep(..)
, Proof
, DiffProof
-- ** Paths inside proofs
, ProofPath
, atPath
, atPathDiff
, insertPaths
, insertPathsDiff
-- ** Folding/modifying proofs
, mapProofInfo
, mapDiffProofInfo
, foldProof
, foldDiffProof
, annotateProof
, annotateDiffProof
, ProofStatus(..)
, proofStepStatus
, diffProofStepStatus
-- ** Unfinished proofs
, sorry
, unproven
, diffSorry
, diffUnproven
-- ** Incremental proof construction
, IncrementalProof
, IncrementalDiffProof
, Prover
, DiffProver
, runProver
, runDiffProver
, mapProverProof
, mapDiffProverDiffProof
, orelse
, tryProver
, sorryProver
, sorryDiffProver
, oneStepProver
, oneStepDiffProver
, focus
, focusDiff
, checkAndExtendProver
, checkAndExtendDiffProver
, replaceSorryProver
, replaceDiffSorryProver
, contradictionProver
, contradictionDiffProver
-- ** Explicit representation of a fully automatic prover
, SolutionExtractor(..)
, AutoProver(..)
, runAutoProver
, runAutoDiffProver
-- ** Pretty Printing
, prettyProof
, prettyDiffProof
, prettyProofWith
, prettyDiffProofWith
, showProofStatus
, showDiffProofStatus
-- ** Parallel Strategy for exploring a proof
, parLTreeDFS
-- ** Small-step interface to the constraint solver
, module Theory.Constraint.Solver
) where
import GHC.Generics (Generic)
import Data.Binary
import Data.List
import qualified Data.Label as L
import qualified Data.Map as M
import Data.Maybe
-- import Data.Monoid
import Debug.Trace
import Control.Basics
import Control.DeepSeq
import qualified Control.Monad.State as S
import Control.Parallel.Strategies
import Theory.Constraint.Solver
import Theory.Model
import Theory.Text.Pretty
------------------------------------------------------------------------------
-- Utility: Trees with uniquely labelled edges.
------------------------------------------------------------------------------
-- | Trees with uniquely labelled edges.
data LTree l a = LNode
{ root :: a
, children :: M.Map l (LTree l a)
}
deriving( Eq, Ord, Show )
instance Functor (LTree l) where
fmap f (LNode r cs) = LNode (f r) (M.map (fmap f) cs)
instance Foldable (LTree l) where
foldMap f (LNode x cs) = f x `mappend` foldMap (foldMap f) cs
instance Traversable (LTree l) where
traverse f (LNode x cs) = LNode <$> f x <*> traverse (traverse f) cs
-- | A parallel evaluation strategy well-suited for DFS traversal: As soon as
-- a node is forced it sparks off the computation of the number of case-maps
-- of all its children. This way most of the data is already evaulated, when
-- the actual DFS traversal visits it.
--
-- NOT used for now. It sometimes required too much memory.
parLTreeDFS :: Strategy (LTree l a)
parLTreeDFS (LNode x0 cs0) = do
cs0' <- (`parTraversable` cs0) $ \(LNode x cs) -> LNode x <$> rseq cs
return $ LNode x0 (M.map (runEval . parLTreeDFS) cs0')
------------------------------------------------------------------------------
-- Utility: Merging maps
------------------------------------------------------------------------------
-- | /O(n+m)/. A generalized union operator for maps with differing types.
mergeMapsWith :: Ord k
=> (a -> c) -> (b -> c) -> (a -> b -> c)
-> M.Map k a -> M.Map k b -> M.Map k c
mergeMapsWith leftOnly rightOnly combine l r =
M.map extract $ M.unionWith combine' l' r'
where
l' = M.map (Left . Left) l
r' = M.map (Left . Right) r
combine' (Left (Left a)) (Left (Right b)) = Right $ combine a b
combine' _ _ = error "mergeMapsWith: impossible"
extract (Left (Left a)) = leftOnly a
extract (Left (Right b)) = rightOnly b
extract (Right c) = c
------------------------------------------------------------------------------
-- Proof Steps
------------------------------------------------------------------------------
-- | A proof steps is a proof method together with additional context-dependent
-- information.
data ProofStep a = ProofStep
{ psMethod :: ProofMethod
, psInfo :: a
}
deriving( Eq, Ord, Show, Generic, NFData, Binary )
instance Functor ProofStep where
fmap f (ProofStep m i) = ProofStep m (f i)
instance Foldable ProofStep where
foldMap f = f . psInfo
instance Traversable ProofStep where
traverse f (ProofStep m i) = ProofStep m <$> f i
instance HasFrees a => HasFrees (ProofStep a) where
foldFrees f (ProofStep m i) = foldFrees f m `mappend` foldFrees f i
foldFreesOcc _ _ = const mempty
mapFrees f (ProofStep m i) = ProofStep <$> mapFrees f m <*> mapFrees f i
-- | A diff proof steps is a proof method together with additional context-dependent
-- information.
data DiffProofStep a = DiffProofStep
{ dpsMethod :: DiffProofMethod
, dpsInfo :: a
}
deriving( Eq, Ord, Show, Generic, NFData, Binary )
instance Functor DiffProofStep where
fmap f (DiffProofStep m i) = DiffProofStep m (f i)
instance Foldable DiffProofStep where
foldMap f = f . dpsInfo
instance Traversable DiffProofStep where
traverse f (DiffProofStep m i) = DiffProofStep m <$> f i
instance HasFrees a => HasFrees (DiffProofStep a) where
foldFrees f (DiffProofStep m i) = foldFrees f m `mappend` foldFrees f i
foldFreesOcc _ _ = const mempty
mapFrees f (DiffProofStep m i) = DiffProofStep <$> mapFrees f m <*> mapFrees f i
------------------------------------------------------------------------------
-- Proof Trees
------------------------------------------------------------------------------
-- | A path to a subproof.
type ProofPath = [CaseName]
-- | A proof is a tree of proof steps whose edges are labelled with case names.
type Proof a = LTree CaseName (ProofStep a)
-- | A diff proof is a tree of proof steps whose edges are labelled with case names.
type DiffProof a = LTree CaseName (DiffProofStep a)
-- Unfinished proofs
--------------------
-- | A proof using the 'sorry' proof method.
sorry :: Maybe String -> a -> Proof a
sorry reason ann = LNode (ProofStep (Sorry reason) ann) M.empty
-- | A proof using the 'sorry' proof method.
diffSorry :: Maybe String -> a -> DiffProof a
diffSorry reason ann = LNode (DiffProofStep (DiffSorry reason) ann) M.empty
-- | A proof denoting an unproven part of the proof.
unproven :: a -> Proof a
unproven = sorry Nothing
-- | A proof denoting an unproven part of the proof.
diffUnproven :: a -> DiffProof a
diffUnproven = diffSorry Nothing
-- Paths in proofs
------------------
-- | @prf `atPath` path@ returns the subproof at the @path@ in @prf@.
atPath :: Proof a -> ProofPath -> Maybe (Proof a)
atPath = foldM (flip M.lookup . children)
-- | @prf `atPath` path@ returns the subproof at the @path@ in @prf@.
atPathDiff :: DiffProof a -> ProofPath -> Maybe (DiffProof a)
atPathDiff = foldM (flip M.lookup . children)
-- | @modifyAtPath f path prf@ applies @f@ to the subproof at @path@,
-- if there is one.
modifyAtPath :: (Proof a -> Maybe (Proof a)) -> ProofPath
-> Proof a -> Maybe (Proof a)
modifyAtPath f =
go
where
go [] prf = f prf
go (l:ls) prf = do
let cs = children prf
prf' <- go ls =<< M.lookup l cs
return (prf { children = M.insert l prf' cs })
-- | @modifyAtPath f path prf@ applies @f@ to the subproof at @path@,
-- if there is one.
modifyAtPathDiff :: (DiffProof a -> Maybe (DiffProof a)) -> ProofPath
-> DiffProof a -> Maybe (DiffProof a)
modifyAtPathDiff f =
go
where
go [] prf = f prf
go (l:ls) prf = do
let cs = children prf
prf' <- go ls =<< M.lookup l cs
return (prf { children = M.insert l prf' cs })
-- | @insertPaths prf@ inserts the path to every proof node.
insertPaths :: Proof a -> Proof (a, ProofPath)
insertPaths =
insertPath []
where
insertPath path (LNode ps cs) =
LNode (fmap (,reverse path) ps)
(M.mapWithKey (\n prf -> insertPath (n:path) prf) cs)
-- | @insertPaths prf@ inserts the path to every diff proof node.
insertPathsDiff :: DiffProof a -> DiffProof (a, ProofPath)
insertPathsDiff =
insertPath []
where
insertPath path (LNode ps cs) =
LNode (fmap (,reverse path) ps)
(M.mapWithKey (\n prf -> insertPath (n:path) prf) cs)
-- Utilities for dealing with proofs
------------------------------------
-- | Apply a function to the information of every proof step.
mapProofInfo :: (a -> b) -> Proof a -> Proof b
mapProofInfo = fmap . fmap
-- | Apply a function to the information of every proof step.
mapDiffProofInfo :: (a -> b) -> DiffProof a -> DiffProof b
mapDiffProofInfo = fmap . fmap
-- | @boundProofDepth bound prf@ bounds the depth of the proof @prf@ using
-- 'Sorry' steps to replace the cut sub-proofs.
boundProofDepth :: Int -> Proof a -> Proof a
boundProofDepth bound =
go bound
where
go n (LNode ps@(ProofStep _ info) cs)
| 0 < n = LNode ps $ M.map (go (pred n)) cs
| otherwise = sorry (Just $ "bound " ++ show bound ++ " hit") info
-- | @boundProofDepth bound prf@ bounds the depth of the proof @prf@ using
-- 'Sorry' steps to replace the cut sub-proofs.
boundDiffProofDepth :: Int -> DiffProof a -> DiffProof a
boundDiffProofDepth bound =
go bound
where
go n (LNode ps@(DiffProofStep _ info) cs)
| 0 < n = LNode ps $ M.map (go (pred n)) cs
| otherwise = diffSorry (Just $ "bound " ++ show bound ++ " hit") info
-- | Fold a proof.
foldProof :: Monoid m => (ProofStep a -> m) -> Proof a -> m
foldProof f =
go
where
go (LNode step cs) = f step `mappend` foldMap go (M.elems cs)
-- | Fold a proof.
foldDiffProof :: Monoid m => (DiffProofStep a -> m) -> DiffProof a -> m
foldDiffProof f =
go
where
go (LNode step cs) = f step `mappend` foldMap go (M.elems cs)
-- | Annotate a proof in a bottom-up fashion.
annotateProof :: (ProofStep a -> [b] -> b) -> Proof a -> Proof b
annotateProof f =
go
where
go (LNode step@(ProofStep method _) cs) =
LNode (ProofStep method info') cs'
where
cs' = M.map go cs
info' = f step (map (psInfo . root . snd) (M.toList cs'))
-- | Annotate a proof in a bottom-up fashion.
annotateDiffProof :: (DiffProofStep a -> [b] -> b) -> DiffProof a -> DiffProof b
annotateDiffProof f =
go
where
go (LNode step@(DiffProofStep method _) cs) =
LNode (DiffProofStep method info') cs'
where
cs' = M.map go cs
info' = f step (map (dpsInfo . root . snd) (M.toList cs'))
-- Proof cutting
----------------
-- | The status of a 'Proof'.
data ProofStatus =
UndeterminedProof -- ^ All steps are unannotated
| CompleteProof -- ^ The proof is complete: no annotated sorry,
-- no annotated solved step
| IncompleteProof -- ^ There is a annotated sorry,
-- but no annotated solved step.
| TraceFound -- ^ There is an annotated solved step
deriving ( Show, Generic, NFData, Binary )
instance Semigroup ProofStatus where
TraceFound <> _ = TraceFound
_ <> TraceFound = TraceFound
IncompleteProof <> _ = IncompleteProof
_ <> IncompleteProof = IncompleteProof
_ <> CompleteProof = CompleteProof
CompleteProof <> _ = CompleteProof
UndeterminedProof <> UndeterminedProof = UndeterminedProof
instance Monoid ProofStatus where
mempty = CompleteProof
-- | The status of a 'ProofStep'.
proofStepStatus :: ProofStep (Maybe a) -> ProofStatus
proofStepStatus (ProofStep _ Nothing ) = UndeterminedProof
proofStepStatus (ProofStep Solved (Just _)) = TraceFound
proofStepStatus (ProofStep (Sorry _) (Just _)) = IncompleteProof
proofStepStatus (ProofStep _ (Just _)) = CompleteProof
-- | The status of a 'DiffProofStep'.
diffProofStepStatus :: DiffProofStep (Maybe a) -> ProofStatus
diffProofStepStatus (DiffProofStep _ Nothing ) = UndeterminedProof
diffProofStepStatus (DiffProofStep DiffAttack (Just _)) = TraceFound
diffProofStepStatus (DiffProofStep (DiffSorry _) (Just _)) = IncompleteProof
diffProofStepStatus (DiffProofStep _ (Just _)) = CompleteProof
{- TODO: Test and probably improve
-- | @proveSystem rules se@ tries to construct a proof that @se@ is valid.
-- This proof may contain 'Sorry' steps, if the prover is stuck. It can also be
-- of infinite depth, if the proof strategy loops.
proveSystemIterDeep :: ProofContext -> System -> Proof System
proveSystemIterDeep rules se0 =
fromJust $ asum $ map (prove se0 . round) $ iterate (*1.5) (3::Double)
where
prove :: System -> Int -> Maybe (Proof System)
prove se bound
| bound < 0 = Nothing
| otherwise =
case next of
[] -> pure $ sorry "prover stuck => possible attack found" se
xs -> asum $ map mkProof xs
where
next = do m <- possibleProofMethods se
(m,) <$> maybe mzero return (execProofMethod rules m se)
mkProof (method, cases) =
LNode (ProofStep method se) <$> traverse (`prove` (bound - 1)) cases
-}
-- | @checkProof rules se prf@ replays the proof @prf@ against the start
-- sequent @se@. A failure to apply a proof method is denoted by a resulting
-- proof step without an annotated sequent. An unhandled case is denoted using
-- the 'Sorry' proof method.
checkProof :: ProofContext
-> (Int -> System -> Proof (Maybe System)) -- prover for new cases in depth
-> Int -- ^ Original depth
-> System
-> Proof a
-> Proof (Maybe a, Maybe System)
checkProof ctxt prover d sys prf@(LNode (ProofStep method info) cs) =
case (method, execProofMethod ctxt method sys) of
(Sorry reason, _ ) -> sorryNode reason cs
(_ , Just cases) -> node method $ checkChildren cases
(_ , Nothing ) ->
sorryNode (Just "invalid proof step encountered")
(M.singleton "" prf)
where
node m = LNode (ProofStep m (Just info, Just sys))
sorryNode reason cases = node (Sorry reason) (M.map noSystemPrf cases)
noSystemPrf = mapProofInfo (\i -> (Just i, Nothing))
checkChildren cases = mergeMapsWith
unhandledCase noSystemPrf (checkProof ctxt prover (d + 1)) cases cs
where
unhandledCase = mapProofInfo ((,) Nothing) . prover d
-- | @checkDiffProof rules se prf@ replays the proof @prf@ against the start
-- sequent @se@. A failure to apply a proof method is denoted by a resulting
-- proof step without an annotated sequent. An unhandled case is denoted using
-- the 'Sorry' proof method.
checkDiffProof :: DiffProofContext
-> (Int -> DiffSystem -> DiffProof (Maybe DiffSystem)) -- prover for new cases in depth
-> Int -- ^ Original depth
-> DiffSystem
-> DiffProof a
-> DiffProof (Maybe a, Maybe DiffSystem)
checkDiffProof ctxt prover d sys prf@(LNode (DiffProofStep method info) cs) =
case (method, execDiffProofMethod ctxt method sys) of
(DiffSorry reason, _ ) -> sorryNode reason cs
(_ , Just cases) -> node method $ checkChildren cases
(_ , Nothing ) ->
sorryNode (Just "invalid proof step encountered")
(M.singleton "" prf)
where
node m = LNode (DiffProofStep m (Just info, Just sys))
sorryNode reason cases = node (DiffSorry reason) (M.map noSystemPrf cases)
noSystemPrf = mapDiffProofInfo (\i -> (Just i, Nothing))
checkChildren cases = mergeMapsWith
unhandledCase noSystemPrf (checkDiffProof ctxt prover (d + 1)) cases cs
where
unhandledCase = mapDiffProofInfo ((,) Nothing) . prover d
-- | Annotate a proof with the constraint systems of all intermediate steps
-- under the assumption that all proof steps are valid. If some proof steps
-- might be invalid, then you must use 'checkProof', which handles them
-- gracefully.
annotateWithSystems :: ProofContext -> System -> Proof () -> Proof System
annotateWithSystems ctxt =
go
where
-- Here we are careful to construct the result such that an inspection of
-- the proof does not force the recomputed constraint systems.
go sysOrig (LNode (ProofStep method _) csOrig) =
LNode (ProofStep method sysOrig) $ M.fromList $ do
(name, prf) <- M.toList csOrig
let sysAnn = extract ("case '" ++ name ++ "' non-existent") $
M.lookup name csAnn
return (name, go sysAnn prf)
where
extract msg = fromMaybe (error $ "annotateWithSystems: " ++ msg)
csAnn = extract "proof method execution failed" $
execProofMethod ctxt method sysOrig
-- | Annotate a proof with the constraint systems of all intermediate steps
-- under the assumption that all proof steps are valid. If some proof steps
-- might be invalid, then you must use 'checkProof', which handles them
-- gracefully.
annotateWithDiffSystems :: DiffProofContext -> DiffSystem -> DiffProof () -> DiffProof DiffSystem
annotateWithDiffSystems ctxt =
go
where
-- Here we are careful to construct the result such that an inspection of
-- the proof does not force the recomputed constraint systems.
go sysOrig (LNode (DiffProofStep method _) csOrig) =
LNode (DiffProofStep method sysOrig) $ M.fromList $ do
(name, prf) <- M.toList csOrig
let sysAnn = extract ("case '" ++ name ++ "' non-existent") $
M.lookup name csAnn
return (name, go sysAnn prf)
where
extract msg = fromMaybe (error $ "annotateWithSystems: " ++ msg)
csAnn = extract "diff proof method execution failed" $
execDiffProofMethod ctxt method sysOrig
------------------------------------------------------------------------------
-- Provers: the interface to the outside world.
------------------------------------------------------------------------------
-- | Incremental proofs are used to represent intermediate results of proof
-- checking/construction.
type IncrementalProof = Proof (Maybe System)
-- | Incremental diff proofs are used to represent intermediate results of proof
-- checking/construction.
type IncrementalDiffProof = DiffProof (Maybe DiffSystem)
-- | Provers whose sequencing is handled via the 'Monoid' instance.
--
-- > p1 `mappend` p2
--
-- Is a prover that first runs p1 and then p2 on the resulting proof.
newtype Prover = Prover
{ runProver
:: ProofContext -- proof rules to use
-> Int -- proof depth
-> System -- original sequent to start with
-> IncrementalProof -- original proof
-> Maybe IncrementalProof -- resulting proof
}
instance Semigroup Prover where
p1 <> p2 = Prover $ \ctxt d se ->
runProver p1 ctxt d se >=> runProver p2 ctxt d se
instance Monoid Prover where
mempty = Prover $ \_ _ _ -> Just
-- | Provers whose sequencing is handled via the 'Monoid' instance.
--
-- > p1 `mappend` p2
--
-- Is a prover that first runs p1 and then p2 on the resulting proof.
newtype DiffProver = DiffProver
{ runDiffProver
:: DiffProofContext -- proof rules to use
-> Int -- proof depth
-> DiffSystem -- original sequent to start with
-> IncrementalDiffProof -- original proof
-> Maybe IncrementalDiffProof -- resulting proof
}
instance Semigroup DiffProver where
p1 <> p2 = DiffProver $ \ctxt d se ->
runDiffProver p1 ctxt d se >=> runDiffProver p2 ctxt d se
instance Monoid DiffProver where
mempty = DiffProver $ \_ _ _ -> Just
-- | Map the proof generated by the prover.
mapProverProof :: (IncrementalProof -> IncrementalProof) -> Prover -> Prover
mapProverProof f p = Prover $ \ ctxt d se prf -> f <$> runProver p ctxt d se prf
-- | Map the proof generated by the prover.
mapDiffProverDiffProof :: (IncrementalDiffProof -> IncrementalDiffProof) -> DiffProver -> DiffProver
mapDiffProverDiffProof f p = DiffProver $ \ctxt d se prf -> f <$> runDiffProver p ctxt d se prf
-- | Prover that always fails.
failProver :: Prover
failProver = Prover (\ _ _ _ _ -> Nothing)
-- | Prover that always fails.
failDiffProver :: DiffProver
failDiffProver = DiffProver (\_ _ _ _ -> Nothing)
-- | Resorts to the second prover, if the first one is not successful.
orelse :: Prover -> Prover -> Prover
orelse p1 p2 = Prover $ \ctxt d se prf ->
runProver p1 ctxt d se prf `mplus` runProver p2 ctxt d se prf
-- | Resorts to the second prover, if the first one is not successful.
orelseDiff :: DiffProver -> DiffProver -> DiffProver
orelseDiff p1 p2 = DiffProver $ \ctxt d se prf ->
runDiffProver p1 ctxt d se prf `mplus` runDiffProver p2 ctxt d se prf
-- | Try to apply a prover. If it fails, just return the original proof.
tryProver :: Prover -> Prover
tryProver = (`orelse` mempty)
-- | Try to execute one proof step using the given proof method.
oneStepProver :: ProofMethod -> Prover
oneStepProver method = Prover $ \ctxt _ se _ -> do
cases <- execProofMethod ctxt method se
return $ LNode (ProofStep method (Just se)) (M.map (unproven . Just) cases)
-- | Try to execute one proof step using the given proof method.
oneStepDiffProver :: DiffProofMethod -> DiffProver
oneStepDiffProver method = DiffProver $ \ctxt _ se _ -> do
cases <- execDiffProofMethod ctxt method se
return $ LNode (DiffProofStep method (Just se)) (M.map (diffUnproven . Just) cases)
-- | Replace the current proof with a sorry step and the given reason.
sorryProver :: Maybe String -> Prover
sorryProver reason = Prover $ \_ _ se _ -> return $ sorry reason (Just se)
-- | Replace the current proof with a sorry step and the given reason.
sorryDiffProver :: Maybe String -> DiffProver
sorryDiffProver reason = DiffProver $ \_ _ se _ -> return $ diffSorry reason (Just se)
-- | Apply a prover only to a sub-proof, fails if the subproof doesn't exist.
focus :: ProofPath -> Prover -> Prover
focus [] prover = prover
focus path prover =
Prover $ \ctxt d _ prf ->
modifyAtPath (prover' ctxt (d + length path)) path prf
where
prover' ctxt d prf = do
se <- psInfo (root prf)
runProver prover ctxt d se prf
-- | Apply a diff prover only to a sub-proof, fails if the subproof doesn't exist.
focusDiff :: ProofPath -> DiffProver -> DiffProver
focusDiff [] prover = prover
focusDiff path prover =
DiffProver $ \ctxt d _ prf ->
modifyAtPathDiff (prover' ctxt (d + length path)) path prf
where
prover' ctxt d prf = do
se <- dpsInfo (root prf)
runDiffProver prover ctxt d se prf
-- | Check the proof and handle new cases using the given prover.
checkAndExtendProver :: Prover -> Prover
checkAndExtendProver prover0 = Prover $ \ctxt d se prf ->
return $ mapProofInfo snd $ checkProof ctxt (prover ctxt) d se prf
where
unhandledCase = sorry (Just "unhandled case") Nothing
prover ctxt d se =
fromMaybe unhandledCase $ runProver prover0 ctxt d se unhandledCase
-- | Check the proof and handle new cases using the given prover.
checkAndExtendDiffProver :: DiffProver -> DiffProver
checkAndExtendDiffProver prover0 = DiffProver $ \ctxt d se prf ->
return $ mapDiffProofInfo snd $ checkDiffProof ctxt (prover ctxt) d se prf
where
unhandledCase = diffSorry (Just "unhandled case") Nothing
prover ctxt d se =
fromMaybe unhandledCase $ runDiffProver prover0 ctxt d se unhandledCase
-- | Replace all annotated sorry steps using the given prover.
replaceSorryProver :: Prover -> Prover
replaceSorryProver prover0 = Prover prover
where
prover ctxt d _ = return . replace
where
replace prf@(LNode (ProofStep (Sorry _) (Just se)) _) =
fromMaybe prf $ runProver prover0 ctxt d se prf
replace (LNode ps cases) =
LNode ps $ M.map replace cases
-- | Replace all annotated sorry steps using the given prover.
replaceDiffSorryProver :: DiffProver -> DiffProver
replaceDiffSorryProver prover0 = DiffProver prover
where
prover ctxt d _ = return . replace
where
replace prf@(LNode (DiffProofStep (DiffSorry _) (Just se)) _) =
fromMaybe prf $ runDiffProver prover0 ctxt d se prf
replace (LNode ps cases) =
LNode ps $ M.map replace cases
-- | Use the first prover that works.
firstProver :: [Prover] -> Prover
firstProver = foldr orelse failProver
-- | Prover that does one contradiction step.
contradictionProver :: Prover
contradictionProver = Prover $ \ctxt d sys prf ->
runProver
(firstProver $ map oneStepProver $
(Contradiction . Just <$> contradictions ctxt sys))
ctxt d sys prf
-- | Use the first diff prover that works.
firstDiffProver :: [DiffProver] -> DiffProver
firstDiffProver = foldr orelseDiff failDiffProver
-- | Diff Prover that does one contradiction step if possible.
contradictionDiffProver :: DiffProver
contradictionDiffProver = DiffProver $ \ctxt d sys prf ->
case (L.get dsCurrentRule sys, L.get dsSide sys, L.get dsSystem sys) of
(Just _, Just s, Just sys') -> runDiffProver
(firstDiffProver $ map oneStepDiffProver $
(DiffBackwardSearchStep . Contradiction . Just <$> contradictions (eitherProofContext ctxt s) sys'))
ctxt d sys prf
(_ , _ , _ ) -> Nothing
------------------------------------------------------------------------------
-- Automatic Prover's
------------------------------------------------------------------------------
data SolutionExtractor = CutDFS | CutBFS | CutSingleThreadDFS | CutNothing
deriving( Eq, Ord, Show, Read, Generic, NFData, Binary )
data AutoProver = AutoProver
{ apDefaultHeuristic :: Heuristic
, apBound :: Maybe Int
, apCut :: SolutionExtractor
}
deriving ( Generic, NFData, Binary )
runAutoProver :: AutoProver -> Prover
runAutoProver (AutoProver defaultHeuristic bound cut) =
mapProverProof cutSolved $ maybe id boundProver bound autoProver
where
cutSolved = case cut of
CutDFS -> cutOnSolvedDFS
CutBFS -> cutOnSolvedBFS
CutSingleThreadDFS -> cutOnSolvedSingleThreadDFS
CutNothing -> id
-- | The standard automatic prover that ignores the existing proof and
-- tries to find one by itself.
autoProver :: Prover
autoProver = Prover $ \ctxt depth sys _ ->
return $ fmap (fmap Just)
$ annotateWithSystems ctxt sys
$ proveSystemDFS (heuristic ctxt) ctxt depth sys
heuristic ctxt = fromMaybe defaultHeuristic $ L.get pcHeuristic ctxt
-- | Bound the depth of proofs generated by the given prover.
boundProver :: Int -> Prover -> Prover
boundProver b p = Prover $ \ctxt d se prf ->
boundProofDepth b <$> runProver p ctxt d se prf
runAutoDiffProver :: AutoProver -> DiffProver
runAutoDiffProver (AutoProver defaultHeuristic bound cut) =
mapDiffProverDiffProof cutSolved $ maybe id boundProver bound autoProver
where
cutSolved = case cut of
CutDFS -> cutOnSolvedDFSDiff
CutBFS -> cutOnSolvedBFSDiff
CutSingleThreadDFS -> cutOnSolvedSingleThreadDFSDiff
CutNothing -> id
-- | The standard automatic prover that ignores the existing proof and
-- tries to find one by itself.
autoProver :: DiffProver
autoProver = DiffProver $ \ctxt depth sys _ ->
return $ fmap (fmap Just)
$ annotateWithDiffSystems ctxt sys
$ proveDiffSystemDFS (heuristic ctxt) ctxt depth sys
heuristic ctxt = fromMaybe defaultHeuristic $ L.get pcHeuristic $ L.get dpcPCLeft ctxt
-- | Bound the depth of proofs generated by the given prover.
boundProver :: Int -> DiffProver -> DiffProver
boundProver b p = DiffProver $ \ctxt d se prf ->
boundDiffProofDepth b <$> runDiffProver p ctxt d se prf
-- | The result of one pass of iterative deepening.
data IterDeepRes = NoSolution | MaybeNoSolution | Solution ProofPath
instance Semigroup IterDeepRes where
x@(Solution _) <> _ = x
_ <> y@(Solution _) = y
MaybeNoSolution <> _ = MaybeNoSolution
_ <> MaybeNoSolution = MaybeNoSolution
NoSolution <> NoSolution = NoSolution
instance Monoid IterDeepRes where
mempty = NoSolution
-- | @cutOnSolvedSingleThreadDFS prf@ removes all other cases if an attack is
-- found. The attack search is performed using a single-thread DFS traversal.
--
-- FIXME: Note that this function may use a lot of space, as it holds onto the
-- whole proof tree.
cutOnSolvedSingleThreadDFS :: Proof (Maybe a) -> Proof (Maybe a)
cutOnSolvedSingleThreadDFS prf0 =
go $ insertPaths prf0
where
go prf = case findSolved prf of
NoSolution -> prf0
Solution path -> extractSolved path prf0
MaybeNoSolution -> error "Theory.Constraint.cutOnSolvedSingleThreadDFS: impossible, MaybeNoSolution in single thread dfs"
where
findSolved node = case node of
-- do not search in nodes that are not annotated
LNode (ProofStep _ (Nothing, _ )) _ -> NoSolution
LNode (ProofStep Solved (Just _ , path)) _ -> Solution path
LNode (ProofStep _ (Just _ , _ )) cs ->
foldMap findSolved cs
extractSolved [] p = p
extractSolved (label:ps) (LNode pstep m) = case M.lookup label m of
Just subprf ->
LNode pstep (M.fromList [(label, extractSolved ps subprf)])
Nothing ->
error "Theory.Constraint.cutOnSolvedSingleThreadDFS: impossible, extractSolved failed, invalid path"
-- | @cutOnSolvedSingleThreadDFSDiffDFS prf@ removes all other cases if an
-- attack is found. The attack search is performed using a single-thread DFS
-- traversal.
--
-- FIXME: Note that this function may use a lot of space, as it holds onto the
-- whole proof tree.
cutOnSolvedSingleThreadDFSDiff :: DiffProof (Maybe a) -> DiffProof (Maybe a)
cutOnSolvedSingleThreadDFSDiff prf0 =
go $ insertPathsDiff prf0
where
go prf = case findSolved prf of
NoSolution -> prf0
Solution path -> extractSolved path prf0
MaybeNoSolution -> error "Theory.Constraint.cutOnSolvedSingleThreadDFSDiff: impossible, MaybeNoSolution in single thread dfs"
where
findSolved node = case node of
-- do not search in nodes that are not annotated
LNode (DiffProofStep _ (Nothing, _ )) _ -> NoSolution
LNode (DiffProofStep DiffAttack (Just _ , path)) _ -> Solution path
LNode (DiffProofStep _ (Just _ , _ )) cs ->
foldMap findSolved cs
extractSolved [] p = p
extractSolved (label:ps) (LNode pstep m) = case M.lookup label m of
Just subprf ->
LNode pstep (M.fromList [(label, extractSolved ps subprf)])
Nothing ->
error "Theory.Constraint.cutOnSolvedSingleThreadDFSDiff: impossible, extractSolved failed, invalid path"
-- | @cutOnSolvedDFS prf@ removes all other cases if an attack is found. The
-- attack search is performed using a parallel DFS traversal with iterative
-- deepening.
-- Note that when an attack is found, other, already started threads will not be
-- stopped. They will first run to completion, and only afterwards will the proof
-- complete. If this is undesirable bahavior, use cutOnSolvedSingleThreadDFS.
--
-- FIXME: Note that this function may use a lot of space, as it holds onto the
-- whole proof tree.
cutOnSolvedDFS :: Proof (Maybe a) -> Proof (Maybe a)
cutOnSolvedDFS prf0 =
go (4 :: Integer) $ insertPaths prf0
where
go dMax prf = case findSolved 0 prf of
NoSolution -> prf0
MaybeNoSolution -> go (2 * dMax) prf
Solution path -> extractSolved path prf0
where
findSolved d node
| d >= dMax = MaybeNoSolution
| otherwise = case node of
-- do not search in nodes that are not annotated
LNode (ProofStep _ (Nothing, _ )) _ -> NoSolution
LNode (ProofStep Solved (Just _ , path)) _ -> Solution path
LNode (ProofStep _ (Just _ , _ )) cs ->
foldMap (findSolved (succ d))
(cs `using` parTraversable nfProofMethod)
nfProofMethod node = do
void $ rseq (psMethod $ root node)
void $ rseq (psInfo $ root node)
void $ rseq (children node)
return node
extractSolved [] p = p
extractSolved (label:ps) (LNode pstep m) = case M.lookup label m of
Just subprf ->
LNode pstep (M.fromList [(label, extractSolved ps subprf)])
Nothing ->
error "Theory.Constraint.cutOnSolvedDFS: impossible, extractSolved failed, invalid path"
-- | @cutOnSolvedDFSDiff prf@ removes all other cases if an attack is found. The
-- attack search is performed using a parallel DFS traversal with iterative
-- deepening.
-- Note that when an attack is found, other, already started threads will not be
-- stopped. They will first run to completion, and only afterwards will the proof
-- complete. If this is undesirable bahavior, use cutOnSolvedSingleThreadDFSDiff.
--
-- FIXME: Note that this function may use a lot of space, as it holds onto the
-- whole proof tree.
cutOnSolvedDFSDiff :: DiffProof (Maybe a) -> DiffProof (Maybe a)
cutOnSolvedDFSDiff prf0 =
go (4 :: Integer) $ insertPathsDiff prf0
where
go dMax prf = case findSolved 0 prf of
NoSolution -> prf0
MaybeNoSolution -> go (2 * dMax) prf
Solution path -> extractSolved path prf0
where
findSolved d node
| d >= dMax = MaybeNoSolution
| otherwise = case node of
-- do not search in nodes that are not annotated
LNode (DiffProofStep _ (Nothing, _ )) _ -> NoSolution
LNode (DiffProofStep DiffAttack (Just _ , path)) _ -> Solution path
LNode (DiffProofStep _ (Just _ , _ )) cs ->
foldMap (findSolved (succ d))
(cs `using` parTraversable nfProofMethod)
nfProofMethod node = do
void $ rseq (dpsMethod $ root node)
void $ rseq (dpsInfo $ root node)
void $ rseq (children node)
return node
extractSolved [] p = p
extractSolved (label:ps) (LNode pstep m) = case M.lookup label m of
Just subprf ->
LNode pstep (M.fromList [(label, extractSolved ps subprf)])
Nothing ->
error "Theory.Constraint.cutOnSolvedDFSDiff: impossible, extractSolved failed, invalid path"
-- | Search for attacks in a BFS manner.
cutOnSolvedBFS :: Proof (Maybe a) -> Proof (Maybe a)
cutOnSolvedBFS =
go (1::Int)
where
go l prf =
-- FIXME: See if that poor man's logging could be done better.
trace ("searching for attacks at depth: " ++ show l) $
case S.runState (checkLevel l prf) CompleteProof of
(_, UndeterminedProof) -> error "cutOnSolvedBFS: impossible"
(_, CompleteProof) -> prf
(_, IncompleteProof) -> go (l+1) prf
(prf', TraceFound) ->
trace ("attack found at depth: " ++ show l) prf'
checkLevel 0 (LNode step@(ProofStep Solved (Just _)) _) =
S.put TraceFound >> return (LNode step M.empty)
checkLevel 0 prf@(LNode (ProofStep _ x) cs)
| M.null cs = return prf
| otherwise = do
st <- S.get
msg <- case st of
TraceFound -> return $ "ignored (attack exists)"
_ -> S.put IncompleteProof >> return "bound reached"
return $ LNode (ProofStep (Sorry (Just msg)) x) M.empty
checkLevel l prf@(LNode step cs)
| isNothing (psInfo step) = return prf
| otherwise = LNode step <$> traverse (checkLevel (l-1)) cs
-- | Search for attacks in a BFS manner.
cutOnSolvedBFSDiff :: DiffProof (Maybe a) -> DiffProof (Maybe a)
cutOnSolvedBFSDiff =
go (1::Int)
where
go l prf =
-- FIXME: See if that poor man's logging could be done better.
trace ("searching for attacks at depth: " ++ show l) $
case S.runState (checkLevel l prf) CompleteProof of
(_, UndeterminedProof) -> error "cutOnSolvedBFS: impossible"
(_, CompleteProof) -> prf
(_, IncompleteProof) -> go (l+1) prf
(prf', TraceFound) ->
trace ("attack found at depth: " ++ show l) prf'
checkLevel 0 (LNode step@(DiffProofStep DiffAttack (Just _)) _) =
S.put TraceFound >> return (LNode step M.empty)
checkLevel 0 prf@(LNode (DiffProofStep _ x) cs)
| M.null cs = return prf
| otherwise = do
st <- S.get
msg <- case st of
TraceFound -> return $ "ignored (attack exists)"
_ -> S.put IncompleteProof >> return "bound reached"
return $ LNode (DiffProofStep (DiffSorry (Just msg)) x) M.empty
checkLevel l prf@(LNode step cs)
| isNothing (dpsInfo step) = return prf
| otherwise = LNode step <$> traverse (checkLevel (l-1)) cs
-- | @proveSystemDFS rules se@ explores all solutions of the initial
-- constraint system using a depth-first-search strategy to resolve the
-- non-determinism wrt. what goal to solve next. This proof can be of
-- infinite depth, if the proof strategy loops.
--
-- Use 'annotateWithSystems' to annotate the proof tree with the constraint
-- systems.
proveSystemDFS :: Heuristic -> ProofContext -> Int -> System -> Proof ()
proveSystemDFS heuristic ctxt d0 sys0 =
prove d0 sys0
where
prove !depth sys =
case rankProofMethods (useHeuristic heuristic depth) ctxt sys of
[] -> node Solved M.empty
(method, (cases, _expl)):_ -> node method cases
where
node method cases =
LNode (ProofStep method ()) (M.map (prove (succ depth)) cases)
-- | @proveSystemDFS rules se@ explores all solutions of the initial
-- constraint system using a depth-first-search strategy to resolve the
-- non-determinism wrt. what goal to solve next. This proof can be of
-- infinite depth, if the proof strategy loops.
--
-- Use 'annotateWithSystems' to annotate the proof tree with the constraint
-- systems.
proveDiffSystemDFS :: Heuristic -> DiffProofContext -> Int -> DiffSystem -> DiffProof ()
proveDiffSystemDFS heuristic ctxt d0 sys0 =
prove d0 sys0
where
prove !depth sys =
case rankDiffProofMethods (useHeuristic heuristic depth) ctxt sys of
[] -> node (DiffSorry (Just "Cannot prove")) M.empty
(method, (cases, _expl)):_ -> node method cases
where
node method cases =
LNode (DiffProofStep method ()) (M.map (prove (succ depth)) cases)
------------------------------------------------------------------------------
-- Pretty printing
------------------------------------------------------------------------------
prettyProof :: HighlightDocument d => Proof a -> d
prettyProof = prettyProofWith (prettyProofMethod . psMethod) (const id)
prettyProofWith :: HighlightDocument d
=> (ProofStep a -> d) -- ^ Make proof step pretty
-> (ProofStep a -> d -> d) -- ^ Make whole case pretty
-> Proof a -- ^ The proof to prettify
-> d
prettyProofWith prettyStep prettyCase =
ppPrf
where
ppPrf (LNode ps cs) = ppCases ps (M.toList cs)
ppCases ps@(ProofStep Solved _) [] = prettyStep ps
ppCases ps [] = prettyCase ps (kwBy <> text " ")
<> prettyStep ps
ppCases ps [("", prf)] = prettyStep ps $-$ ppPrf prf
ppCases ps cases =
prettyStep ps $-$
(vcat $ intersperse (prettyCase ps kwNext) $ map ppCase cases) $-$
prettyCase ps kwQED
ppCase (name, prf) = nest 2 $
(prettyCase (root prf) $ kwCase <-> text name) $-$
ppPrf prf
prettyDiffProof :: HighlightDocument d => DiffProof a -> d
prettyDiffProof = prettyDiffProofWith (prettyDiffProofMethod . dpsMethod) (const id)
prettyDiffProofWith :: HighlightDocument d
=> (DiffProofStep a -> d) -- ^ Make proof step pretty
-> (DiffProofStep a -> d -> d) -- ^ Make whole case pretty
-> DiffProof a -- ^ The proof to prettify
-> d
prettyDiffProofWith prettyStep prettyCase =
ppPrf
where
ppPrf (LNode ps cs) = ppCases ps (M.toList cs)
ppCases ps@(DiffProofStep DiffMirrored _) [] = prettyStep ps
ppCases ps [] = prettyCase ps (kwBy <> text " ")
<> prettyStep ps
ppCases ps [("", prf)] = prettyStep ps $-$ ppPrf prf
ppCases ps cases =
prettyStep ps $-$
(vcat $ intersperse (prettyCase ps kwNext) $ map ppCase cases) $-$
prettyCase ps kwQED
ppCase (name, prf) = nest 2 $
(prettyCase (root prf) $ kwCase <-> text name) $-$
ppPrf prf
-- | Convert a proof status to a readable string.
showProofStatus :: SystemTraceQuantifier -> ProofStatus -> String
showProofStatus ExistsNoTrace TraceFound = "falsified - found trace"
showProofStatus ExistsNoTrace CompleteProof = "verified"
showProofStatus ExistsSomeTrace CompleteProof = "falsified - no trace found"
showProofStatus ExistsSomeTrace TraceFound = "verified"
showProofStatus _ IncompleteProof = "analysis incomplete"
showProofStatus _ UndeterminedProof = "analysis undetermined"
-- | Convert a proof status to a readable string.
showDiffProofStatus :: ProofStatus -> String
showDiffProofStatus TraceFound = "falsified - found trace"
showDiffProofStatus CompleteProof = "verified"
showDiffProofStatus IncompleteProof = "analysis incomplete"
showDiffProofStatus UndeterminedProof = "analysis undetermined"
-- Instances
--------------------
instance (Ord l, NFData l, NFData a) => NFData (LTree l a) where
rnf (LNode r m) = rnf r `seq` rnf m
instance (Ord l, Binary l, Binary a) => Binary (LTree l a) where
put (LNode r m) = put r >> put m
get = LNode <$> get <*> get
| kmilner/tamarin-prover | lib/theory/src/Theory/Proof.hs | gpl-3.0 | 43,974 | 0 | 17 | 11,824 | 10,590 | 5,426 | 5,164 | 681 | 7 |
{-# LANGUAGE RecordWildCards #-}
module Castle (
Castle(..)
,testCastle
,drawCastleInfo
)where
import FontRenderer
import System.Random
import Data.Function
import Data.List(sortBy)
import Graphics.Gloss
data Building = Building
{ buildingSize :: Int
, workers :: Int
, level :: Int
, bType :: BuildingType
}
data BuildingType = Smith | TrollCave | WolfKennel
| Tower | Wall
data Castle = Castle
{ gold :: Int
, freeWorkers :: Int
, food :: Int
, weapons :: Int
, army :: Army
, buildings :: [Building]
}
testCastle = Castle{
gold=50,
freeWorkers=0,food=10,weapons=99,army=[], buildings=[]}
drawCastleInfo :: Castle -> Picture
drawCastleInfo Castle{..} = color white $ scale 0.2 0.2 $ multiline str
where str = ["Gold: "++show gold
, "Food: "++show food
] ++ formatArmy army
formatArmy :: Army -> [String]
formatArmy a = (\(m,c) -> show m ++ ": "++show c) `map` a
--drawCastleInfo Castle{..} =
data MonsterType = Soldier | Wolf | Troll | Dragon
deriving (Show, Eq, Ord)
monsterDamage Soldier = 1
monsterDamage Wolf = 10
monsterDamage Troll = 50
monsterDamage Dragon = 1000
type Army = [(MonsterType, Int)]
data Decision = DefenderRetreat | AttackerRetreat | ContinueFight
damage :: Army -> Int
damage = sum . map (\(m,c) -> (monsterDamage m) * c)
fight :: Army -> Army -> IO(Army, Army)
fight a b =
case decide a b of
ContinueFight -> do
(na, nb) <- battleRound a b
fight na nb
_ -> return (a, b)
randomDamage :: Army -> IO Int
randomDamage army = randomRIO (dam-maxDev, dam+maxDev)
where dam = damage army
maxDev = (dam*2) `div` 3
applyDamage :: Int -> Army -> Army
applyDamage dam [] = []
applyDamage dam army =
let (m,c):ds = reverse $ sortBy (compare `on` fst) army
mdam = monsterDamage m :: Int
minDiff = min c $ dam `mod` mdam
in (m,c-minDiff):(applyDamage (dam-minDiff*(mdam)) ds)
battleRound :: Army -> Army -> IO (Army, Army)
battleRound a b = do
da <- randomDamage a
let a = applyDamage da a
db <- randomDamage b
let b = applyDamage db b
return (a, b)
decide :: Army -> Army -> Decision
decide attacker defender = if da-db> 800 && da>db*5
then DefenderRetreat
else if db-da> 500 && db>da*4
then AttackerRetreat
else ContinueFight
where da = damage attacker
db = damage defender
| Marthog/ld33 | Castle.hs | gpl-3.0 | 2,591 | 0 | 12 | 777 | 946 | 519 | 427 | 77 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.PubSub.Projects.Schemas.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a schema.
--
-- /See:/ <https://cloud.google.com/pubsub/docs Cloud Pub/Sub API Reference> for @pubsub.projects.schemas.get@.
module Network.Google.Resource.PubSub.Projects.Schemas.Get
(
-- * REST Resource
ProjectsSchemasGetResource
-- * Creating a Request
, projectsSchemasGet
, ProjectsSchemasGet
-- * Request Lenses
, prorXgafv
, prorUploadProtocol
, prorAccessToken
, prorUploadType
, prorName
, prorView
, prorCallback
) where
import Network.Google.Prelude
import Network.Google.PubSub.Types
-- | A resource alias for @pubsub.projects.schemas.get@ method which the
-- 'ProjectsSchemasGet' request conforms to.
type ProjectsSchemasGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "view" ProjectsSchemasGetView :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Schema
-- | Gets a schema.
--
-- /See:/ 'projectsSchemasGet' smart constructor.
data ProjectsSchemasGet =
ProjectsSchemasGet'
{ _prorXgafv :: !(Maybe Xgafv)
, _prorUploadProtocol :: !(Maybe Text)
, _prorAccessToken :: !(Maybe Text)
, _prorUploadType :: !(Maybe Text)
, _prorName :: !Text
, _prorView :: !(Maybe ProjectsSchemasGetView)
, _prorCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsSchemasGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prorXgafv'
--
-- * 'prorUploadProtocol'
--
-- * 'prorAccessToken'
--
-- * 'prorUploadType'
--
-- * 'prorName'
--
-- * 'prorView'
--
-- * 'prorCallback'
projectsSchemasGet
:: Text -- ^ 'prorName'
-> ProjectsSchemasGet
projectsSchemasGet pProrName_ =
ProjectsSchemasGet'
{ _prorXgafv = Nothing
, _prorUploadProtocol = Nothing
, _prorAccessToken = Nothing
, _prorUploadType = Nothing
, _prorName = pProrName_
, _prorView = Nothing
, _prorCallback = Nothing
}
-- | V1 error format.
prorXgafv :: Lens' ProjectsSchemasGet (Maybe Xgafv)
prorXgafv
= lens _prorXgafv (\ s a -> s{_prorXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prorUploadProtocol :: Lens' ProjectsSchemasGet (Maybe Text)
prorUploadProtocol
= lens _prorUploadProtocol
(\ s a -> s{_prorUploadProtocol = a})
-- | OAuth access token.
prorAccessToken :: Lens' ProjectsSchemasGet (Maybe Text)
prorAccessToken
= lens _prorAccessToken
(\ s a -> s{_prorAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prorUploadType :: Lens' ProjectsSchemasGet (Maybe Text)
prorUploadType
= lens _prorUploadType
(\ s a -> s{_prorUploadType = a})
-- | Required. The name of the schema to get. Format is
-- \`projects\/{project}\/schemas\/{schema}\`.
prorName :: Lens' ProjectsSchemasGet Text
prorName = lens _prorName (\ s a -> s{_prorName = a})
-- | The set of fields to return in the response. If not set, returns a
-- Schema with \`name\` and \`type\`, but not \`definition\`. Set to
-- \`FULL\` to retrieve all fields.
prorView :: Lens' ProjectsSchemasGet (Maybe ProjectsSchemasGetView)
prorView = lens _prorView (\ s a -> s{_prorView = a})
-- | JSONP
prorCallback :: Lens' ProjectsSchemasGet (Maybe Text)
prorCallback
= lens _prorCallback (\ s a -> s{_prorCallback = a})
instance GoogleRequest ProjectsSchemasGet where
type Rs ProjectsSchemasGet = Schema
type Scopes ProjectsSchemasGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub"]
requestClient ProjectsSchemasGet'{..}
= go _prorName _prorXgafv _prorUploadProtocol
_prorAccessToken
_prorUploadType
_prorView
_prorCallback
(Just AltJSON)
pubSubService
where go
= buildClient
(Proxy :: Proxy ProjectsSchemasGetResource)
mempty
| brendanhay/gogol | gogol-pubsub/gen/Network/Google/Resource/PubSub/Projects/Schemas/Get.hs | mpl-2.0 | 5,053 | 0 | 16 | 1,160 | 781 | 456 | 325 | 111 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Instances.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified Instance resource. For more information, see
-- Deleting an instance.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instances.delete@.
module Network.Google.Resource.Compute.Instances.Delete
(
-- * REST Resource
InstancesDeleteResource
-- * Creating a Request
, instancesDelete
, InstancesDelete
-- * Request Lenses
, idRequestId
, idProject
, idZone
, idInstance
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instances.delete@ method which the
-- 'InstancesDelete' request conforms to.
type InstancesDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instances" :>
Capture "instance" Text :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified Instance resource. For more information, see
-- Deleting an instance.
--
-- /See:/ 'instancesDelete' smart constructor.
data InstancesDelete =
InstancesDelete'
{ _idRequestId :: !(Maybe Text)
, _idProject :: !Text
, _idZone :: !Text
, _idInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstancesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idRequestId'
--
-- * 'idProject'
--
-- * 'idZone'
--
-- * 'idInstance'
instancesDelete
:: Text -- ^ 'idProject'
-> Text -- ^ 'idZone'
-> Text -- ^ 'idInstance'
-> InstancesDelete
instancesDelete pIdProject_ pIdZone_ pIdInstance_ =
InstancesDelete'
{ _idRequestId = Nothing
, _idProject = pIdProject_
, _idZone = pIdZone_
, _idInstance = pIdInstance_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
idRequestId :: Lens' InstancesDelete (Maybe Text)
idRequestId
= lens _idRequestId (\ s a -> s{_idRequestId = a})
-- | Project ID for this request.
idProject :: Lens' InstancesDelete Text
idProject
= lens _idProject (\ s a -> s{_idProject = a})
-- | The name of the zone for this request.
idZone :: Lens' InstancesDelete Text
idZone = lens _idZone (\ s a -> s{_idZone = a})
-- | Name of the instance resource to delete.
idInstance :: Lens' InstancesDelete Text
idInstance
= lens _idInstance (\ s a -> s{_idInstance = a})
instance GoogleRequest InstancesDelete where
type Rs InstancesDelete = Operation
type Scopes InstancesDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient InstancesDelete'{..}
= go _idProject _idZone _idInstance _idRequestId
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy InstancesDeleteResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Instances/Delete.hs | mpl-2.0 | 4,568 | 0 | 17 | 1,066 | 554 | 332 | 222 | 84 | 1 |
func _ = x
| lspitzner/brittany | data/Test79.hs | agpl-3.0 | 11 | 0 | 5 | 4 | 9 | 4 | 5 | 1 | 1 |
module Dictionary (Dict(..),
Dictionary,
Entry,
prTable,
removeAttr,
FullFormLex,
Ident,
classifyDict,
noAttr,
entry,
entryI,
EntryN,
dictionary,
unDict,
size,
sizeW,
unionDictionary,
emptyDict,
dict2fullform,
nWords
) where
import General
import Data.List (sortBy, group)
import Data.Char
-- untyped dictionary: dictionary word, category, inherent features, inflection
class Param a => Dict a where
dictword :: (a -> Str) -> String
dictword f = concat $ take 1 $ unStr (f value0)
category :: (a -> Str) -> String
category = const "Undefined"
defaultAttr :: (a -> Str) -> Attr
defaultAttr = const noComp
attrException :: (a -> Str) -> [(a,Attr)]
attrException = const []
data Dictionary = Dict [Entry]
type Dictionary_Word = String
type Category = String
type Inherent = String
type Untyped = String
type Inflection_Table = [(Untyped,(Attr,Str))]
type Entry = (Dictionary_Word, Category, [Inherent], Inflection_Table)
type Ident = String
type EntryN = (Dictionary_Word, Category, [Inherent], [(Ident,Str)])
emptyDict :: Dictionary
emptyDict = Dict []
infTable :: Dict a => (a -> Str) -> Inflection_Table
infTable f = prTableAttr f (defaultAttr f) (attrException f)
entry :: Dict a => (a -> Str) -> Entry
entry f = entryI f []
entryI :: Dict a => (a -> Str) -> [Inherent] -> Entry
entryI f ihs = (dictword f, category f, ihs, infTable f)
prTableAttr :: Param a => (a -> Str) -> Attr -> [(a,Attr)] -> [(String,(Attr,Str))]
prTableAttr t da ts =
[(prValue a,(maybe da id (lookup a ts),s)) | (a,s) <- table t]
prTableW :: Param a => Table a -> [(String,(Attr,Str))]
prTableW t = [ (a,(noComp,s)) | (a,s) <- prTable t]
prTable :: Param a => Table a -> Table String
prTable = map (\ (a,b) -> (prValue a, b))
unDict :: Dictionary -> [Entry]
unDict (Dict xs) = xs
size :: Dictionary -> Int
size = length . unDict
sizeW :: Dictionary -> Int
sizeW dict = sum [length t | (_,_,_,t) <- unDict dict]
sizeEntry :: Entry -> Int
sizeEntry (_,_,_,t) = length t
dictionary :: [Entry] -> Dictionary
dictionary = Dict
unionDictionary :: Dictionary -> Dictionary -> Dictionary
unionDictionary (Dict xs) (Dict ys) = Dict $ xs ++ ys
removeAttr :: Dictionary -> [EntryN]
removeAttr = map noAttr . unDict
noAttr :: Entry -> EntryN
noAttr (d,c,inh,tab) = (d,c,inh,[(i,s) | (i,(_,s)) <- tab])
-- group a dictionary into categories; reverses the entries...
classifyDict :: Dictionary -> [(Ident,[Entry])]
classifyDict = foldr addNext [] . unDict
where
addNext entry@(_,cat,_,_) dict = case dict of
(c,es) : dict' | cat == c -> (c, entry:es) : dict'
ces : dict' -> ces : addNext entry dict'
[] -> [(cat,[entry])]
-- full-form lexicon: show all different analyses of a word as strings
type FullFormLex = [(String,[(Attr,String)])]
dict2fullform :: Dictionary -> FullFormLex
dict2fullform dict = sortAssocs $
concatMap mkOne $ zip (unDict dict) [0..] where
mkOne ((stem, typ, inhs, infl),n) = concatMap mkForm infl where
mkForm (par,(a,str)) = [(s, (a,
unwords (stem : ("(" ++ show n ++ ")") : typ : sp : par : sp : inhs))) | s <- (unStr str)]
sp = "-"
-- word analyzator that handles interpunctation and initial upper case letter.
--aAáeEéiIíoOóuUúüyýY
nWords :: String -> [String]
nWords [] = []
nWords (c:cs)
| alphanumeric c = case span alphanumeric cs of
(xs,ys) -> ((case c of
'Á' -> 'á'
'É' -> 'é'
'Í' -> 'í'
'Ó' -> 'ó'
'U' -> 'ú'--obs!
'Ü' -> 'ü'
'Ý' -> 'ý'
'Ñ' -> 'ñ'
c -> toLower c):xs):nWords ys
| isSpace c = nWords cs
| otherwise = nWords cs -- throw away special characters
where
alphanumeric c = isAlpha c || elem c "ÁáÉéÍíÓóúÜüÝýÑñ"
-- binary search tree applicable to analysis
-- auxiliaries
-- binary search tree with logarithmic lookup
{-
data BinTree a = NT | BT a (BinTree a) (BinTree a) deriving (Show,Read)
sorted2tree :: [(a,b)] -> BinTree (a,b)
sorted2tree [] = NT
sorted2tree xs = BT x (sorted2tree t1) (sorted2tree t2) where
(t1,(x:t2)) = splitAt (length xs `div` 2) xs
lookupTree :: (Ord a) => a -> BinTree (a,b) -> Maybe b
lookupTree x tree = case tree of
NT -> Nothing
BT (a,b) left right
| x < a -> lookupTree x left
| x > a -> lookupTree x right
| x == a -> return b
tree2list :: BinTree a -> [a] -- inorder
tree2list NT = []
tree2list (BT z left right) = tree2list left ++ [z] ++ tree2list right
-}
--sortAssocs :: Ord a => [(a,b)] -> [(a,[b])]
--sortAssocs xs = flatten $ xs |->++ empty
-- Merge sort from List.hs adapted to the problem at hand
-- This function is a key function of FM, so optimizations
-- are essential.
sortAssocs :: [(String,(Attr,String))] -> [(String,[(Attr,String)])]
sortAssocs = mergesort (\(a,_) -> \(b,_) -> compare a b)
where
mergesort cmp = mergesort' cmp . map wrap
mergesort' cmp [] = []
mergesort' cmp [xs] = xs
mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss)
merge_pairs cmp [] = []
merge_pairs cmp [xs] = [xs]
merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss
wrap (a,b) = [(a,[b])]
merge cmp xs [] = xs
merge cmp [] ys = ys
merge cmp (x@(a,xs):xss) (y@(_,ys):yss)
= case x `cmp` y of
GT -> y : merge cmp (x:xss) yss
EQ -> case xs of
[z] -> merge cmp ((a,z:ys):xss) yss
zs -> merge cmp ((a,zs++ys):xss) yss
_ -> x : merge cmp xss (y:yss)
{-
sortAssocs :: [(String,(Attr,String))] -> [(String,[(Attr,String)])]
sortAssocs ys = case sortBy (\(a,_) -> \(b,_) -> compare a b) ys of
((x,v):zs) -> arrange x [v] zs
[] -> []
where
arrange y vs ((x,v):xs)
| x == y = arrange y (v:vs) xs
| otherwise = (y,vs):arrange x [v] xs
arrange y vs [] = [(y,vs)]
-}
| johnjcamilleri/maltese-functional-morphology | lib-1.1/Dictionary.hs | lgpl-3.0 | 5,968 | 48 | 24 | 1,468 | 2,052 | 1,142 | 910 | 122 | 10 |
{-# LANGUAGE OverloadedStrings, NamedFieldPuns #-}
module FormEngine.FormElement.Identifiers where
import Prelude
import Data.Maybe (fromMaybe)
import Data.Monoid
import FormEngine.JQuery
import FormEngine.FormItem
import FormEngine.FormElement.FormElement as E
element2jq :: FormElement -> IO JQuery
element2jq element = selectByName $ elementId element
tabId :: FormElement -> Identifier
tabId element = "tab_" <> elementId element
tabName :: FormElement -> String
tabName element = fromMaybe "" (iLabel $ fiDescriptor $ formItem element)
paneId :: FormElement -> Identifier
paneId element = "pane_" <> elementId element
diagramId :: FormElement -> Identifier
diagramId element = "diagram_" <> elementId (elemChapter element)
flagPlaceId :: FormElement -> Identifier
flagPlaceId element = elementId element <> "_flagPlaceId"
radioName :: FormElement -> String
radioName = elementId
radioId :: FormElement -> OptionElement -> Identifier
radioId element optionElem =
elementRawId element
<> "_"
<> filter (\ch -> (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') || (ch >= '0' && ch <= '9') || ch == '_' || ch == '-') (optionElemValue optionElem)
<> "_" <> elementGroupId element
optionSectionId :: FormElement -> OptionElement -> Identifier
optionSectionId element option = radioId element option <> "_detail"
checkboxId :: FormElement -> Identifier
checkboxId element = elementRawId element <> "_optional_group" <> elementGroupId element
descSubpaneId :: FormElement -> Identifier
descSubpaneId element = elementId (elemChapter element) <> "_desc-subpane"
descSubpaneParagraphId :: FormElement -> Identifier
descSubpaneParagraphId element = elementId (elemChapter element) <> "_desc-subpane-text"
autoCompleteBoxId :: FormElement -> Identifier
autoCompleteBoxId element = elementId element <> "_autocomplete_box"
| DataStewardshipPortal/ds-form-engine | FormElement/Identifiers.hs | apache-2.0 | 1,854 | 0 | 20 | 271 | 503 | 260 | 243 | 38 | 1 |
module S2E4 where
pyth :: Integer -> [(Integer, Integer, Integer)]
pyth n = [(a, b, c) | a <- [2..n], b <- [2..n], c <- [2..n], a^2 + b^2 == c^2]
pyth' :: Integer -> [(Integer, Integer, Integer)]
pyth' n = [(a, b, c) | c <- [2..n], b <- [2..c], a <- [2..b], a^2 + b^2 == c^2, gcd a b == 1]
| wouwouwou/module_8 | src/main/haskell/series2/exercise4.hs | apache-2.0 | 292 | 0 | 11 | 66 | 225 | 125 | 100 | 5 | 1 |
module Miscellaneous.A328863 (a328863) where
import HelperSequences.A000041 (a000041)
a328863 :: Integer -> Integer
a328863 n = a000041 n + a069905 n where
a069905 n = (n^2 + 6) `div` 12
| peterokagey/haskellOEIS | src/Miscellaneous/A328863.hs | apache-2.0 | 190 | 0 | 10 | 32 | 75 | 41 | 34 | 5 | 1 |
import Data.Char
import Data.HashTable
import Data.IORef
import Data.Maybe
import qualified System.IO
import Text.ParserCombinators.Parsec
data Expr = Num Int | Ident Int | XMul Expr Expr | XDiv Expr Expr | XAdd Expr Expr | XSub Expr Expr deriving Show
data Cond = Lt Expr Expr | Gt Expr Expr | Eq Expr Expr | Ne Expr Expr | XCond Expr deriving Show
data Decl = DList [Int] deriving Show
data Stmt = Nil | Assgn Int Expr | Wh Cond Stmt | For Stmt Cond Stmt Stmt | Bl [Stmt] deriving Show
data Program = P Decl Stmt deriving Show
prs p = (\(Right x) -> x) . (parse p "!")
program = do
d <- declaration
l <- many statement
eof
return (P d (Bl l))
declaration = do
spaces
l <- identifier `sepEndBy` spaces
spaces
terminator
return (DList (map (\(Ident x) -> x) l))
statement = (try block) <|> (try for) <|> (try while) <|> (try assignment) <|> (terminator >> (return Nil))
block = do
l <- (try $ between open_brace close_brace (many statement)) <|> (try $ between open_brace close_brace block')
return (Bl l)
block' = do
l <- many statement
l' <- assignment'
return (l ++ [l'])
for = do
spaces
string "for"
spaces
open_paren
a1 <- (try assignment') <|> (return Nil)
colon
c <- cond
colon
a2 <- (try assignment') <|> (return Nil)
close_paren
spaces
b <- statement
spaces
return (For a1 c a2 b)
while = do
spaces
string "while"
spaces
c <- between open_paren close_paren cond
spaces
b <- statement
spaces
return (Wh c b)
assignment = do
a <- assignment'
terminator
return a
assignment' = do
spaces
(Ident l) <- identifier
sc '='
r <- expr
spaces
return (Assgn l r)
--cond = (try paren_cond) <|> (try (bin_op x_cond f_cond expr expr)) <|> cond_expr
cond = (try paren_cond) <|> (bin_op x_cond f_cond expr expr)
paren_cond = between open_paren close_paren cond
cond_expr = do
x <- expr
return (XCond x)
expr = (try $ add_expr expr) <|> (try $ mul_expr expr) <|> (try paren_expr) <|> (try identifier) <|> number
non_add_expr = (try $ mul_expr non_add_expr) <|> (try paren_expr) <|> (try identifier) <|> number
non_mul_expr = (try paren_expr) <|> (try identifier) <|> number
add_expr x = bin_op x_addsub (oneOf "+-") non_add_expr x
mul_expr x = bin_op x_muldiv (oneOf "*/") non_mul_expr x
paren_expr = between open_paren close_paren expr
open_brace = sc '{'
close_brace = sc '}'
open_paren = sc '('
close_paren = sc ')'
identifier = (try n_identifier) <|> a_identifier
n_identifier = do
char 'N'
return (Ident (-1))
a_identifier = do
c <- oneOf "abcdefghijklmnopqrstuvxyzABCDEFGHIJKLMNOPQRSTUVXYZ"
x <- many digit
let x' = if null x then "0" else x
return (Ident $ (read x) + ((ord c) * 100))
number = do
x <- many1 digit
return (Num $ read x)
colon = sc ':'
terminator = sc ';'
f_cond = (try lt) <|> (try gt) <|> (try eq) <|> ne
lt = do
(try $ string "<") <|> string "<"
return '<'
gt = do
(try $ string ">") <|> string ">"
return '>'
eq = do
string "=="
return '='
ne = do
string "!="
return '!'
x_cond '<' = Lt
x_cond '>' = Gt
x_cond '=' = Eq
x_cond '!' = Ne
x_addsub '+' = XAdd
x_addsub '-' = XSub
x_muldiv '*' = XMul
x_muldiv '/' = XDiv
bin_op c ch xl xr = do
spaces
x1 <- xl
spaces
op_c <- ch
spaces
x2 <- xr
spaces
return (c op_c x1 x2)
sc c = do
spaces
char c
spaces
evalCond :: HashTable Int Int -> Cond -> IO Bool
evalCond vars (Lt x1 x2) = evalX vars x1 x2 (<)
evalCond vars (Gt x1 x2) = evalX vars x1 x2 (>)
evalCond vars (Eq x1 x2) = evalX vars x1 x2 (==)
evalCond vars (Ne x1 x2) = evalX vars x1 x2 (/=)
evalCond vars (XCond x) = do
v <- evalExpr vars x
return (v /= 0)
evalExpr :: HashTable Int Int -> Expr -> IO Int
evalExpr vars (Num x) = return x
evalExpr vars (Ident x) = do
v <- Data.HashTable.lookup vars x
return (fromMaybe 0 v)
evalExpr vars (XMul x1 x2) = evalX vars x1 x2 (*)
evalExpr vars (XDiv x1 x2) = evalX vars x1 x2 div
evalExpr vars (XAdd x1 x2) = evalX vars x1 x2 (+)
evalExpr vars (XSub x1 x2) = evalX vars x1 x2 (-)
evalX vars x1 x2 f = do
v1 <- evalExpr vars x1
v2 <- evalExpr vars x2
return (f v1 v2)
eval cnt vars Nil = return ()
eval cnt vars (Assgn k x) = do
v <- evalExpr vars x
update vars k v
modifyIORef cnt (+ 1)
eval cnt vars w@(Wh c b) = do
f <- evalCond vars c
if f then ((eval cnt vars b) >> (eval cnt vars w)) else return ()
eval cnt vars (For a1 c a2 b) = do
eval cnt vars a1
f <- evalCond vars c
if f then ((eval cnt vars b) >> (eval cnt vars a2) >> (eval cnt vars (For Nil c a2 b))) else return ()
eval cnt vars (Bl []) = return ()
eval cnt vars (Bl (s : ss)) = do
eval cnt vars s
eval cnt vars (Bl ss)
brrrng n block = do
cnt <- newIORef 0
vars <- new (==) hashInt
update vars (-1) n
eval cnt vars block
cnt' <- readIORef cnt
return cnt'
--ff l = all (== 0) l
ff l = let ll = filter (== 0) l in (length ll) >= ((length l) `div` 3)
diffs' n [] = n
diffs' n l = if ff l then (n - 1) else diffs' (n + 1) (diffs l)
diffs l = map (max 0) (zipWith (-) (tail l) l)
main = do
p <- getContents
let (P decl block) = prs program p
l <- mapM (\n -> brrrng n block) [1 .. 20]
--putStrLn (show $ l)
putStrLn (show $ diffs' 0 l)
| pbl64k/CodeSprints | CodeSprint-2012-05-12-Interviewstreet/Complexity/c.accepted.hs | bsd-2-clause | 6,592 | 0 | 15 | 2,629 | 2,529 | 1,232 | 1,297 | 182 | 3 |
module Printer.Tree where
import Printer.Dot
import Tree
instance DotPrinter Tree where
labelNode t@(Call _ ch _ _) = addChild t ch
labelNode t@(Gen _ _ ch _ _) = addChild t ch
labelNode t@(Or ch1 ch2 _ _) = addChildren t [ch1, ch2]
labelNode t@(Split _ ch _ _) = addChildren t ch
labelNode t = addLeaf t
| kajigor/uKanren_transformations | src/Printer/Tree.hs | bsd-3-clause | 345 | 0 | 9 | 98 | 153 | 79 | 74 | 9 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcSplice: Template Haskell splices
-}
{-# LANGUAGE CPP, FlexibleInstances #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ETA.TypeCheck.TcSplice(
-- These functions are defined in stage1 and stage2
-- The raise civilised errors in stage1
tcSpliceExpr, tcTypedBracket, tcUntypedBracket,
runQuasiQuoteExpr, runQuasiQuotePat,
runQuasiQuoteDecl, runQuasiQuoteType,
runAnnotation,
#ifdef GHCI
-- These ones are defined only in stage2, and are
-- called only in stage2 (ie GHCI is on)
runMetaE, runMetaP, runMetaT, runMetaD, runQuasi,
tcTopSpliceExpr, lookupThName_maybe, traceSplice, SpliceInfo(..),
defaultRunMeta, runMeta'
#endif
) where
import ETA.HsSyn.HsSyn
import ETA.Main.Annotations
import ETA.BasicTypes.Name
import ETA.TypeCheck.TcRnMonad
import ETA.BasicTypes.RdrName
import ETA.TypeCheck.TcType
#ifdef GHCI
import ETA.Main.HscMain
-- These imports are the reason that TcSplice
-- is very high up the module hierarchy
import ETA.Main.HscTypes
import ETA.HsSyn.Convert
import ETA.Rename.RnExpr
import ETA.Rename.RnEnv
import ETA.Rename.RnTypes
import ETA.TypeCheck.TcExpr
import ETA.TypeCheck.TchsSyn
import ETA.TypeCheck.TcSimplify
import ETA.TypeCheck.TcUnify
import ETA.Types.Type
import ETA.Types.Kind
import ETA.BasicTypes.NameSet
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcMType
import ETA.TypeCheck.TcHsType
import ETA.Iface.TcIface
import ETA.Types.TypeRep
import FamInst
import ETA.Types.FamInstEnv
import ETA.Types.InstEnv
import ETA.BasicTypes.NameEnv
import ETA.Prelude.PrelNames
import ETA.BasicTypes.OccName
import ETA.Main.Hooks
import ETA.BasicTypes.Var
import ETA.BasicTypes.Module
import ETA.Iface.LoadIface
import ETA.Types.Class
import Inst
import ETA.Types.TyCon
import ETA.Types.CoAxiom
import ETA.BasicTypes.PatSyn ( patSynName )
import ETA.BasicTypes.ConLike
import ETA.BasicTypes.DataCon
import ETA.TypeCheck.TcEvidence( TcEvBinds(..) )
import ETA.BasicTypes.Id
import ETA.BasicTypes.IdInfo
import DsExpr
import ETA.DeSugar.DsMonad
import ETA.Utils.Serialized
import ETA.Main.ErrUtils
import ETA.BasicTypes.SrcLoc
import ETA.Utils.Util
import Data.List ( mapAccumL )
import ETA.BasicTypes.Unique
import ETA.BasicTypes.VarSet ( isEmptyVarSet )
import Data.Maybe
import ETA.BasicTypes.BasicTypes hiding( SuccessFlag(..) )
import ETA.Utils.Maybes( MaybeErr(..) )
import ETA.Main.DynFlags
import ETA.Utils.Panic
import ETA.BasicTypes.Lexeme
import ETA.Utils.FastString
import ETA.Utils.Outputable
import Control.Monad ( when )
import ETA.DeSugar.DsMeta
import qualified Language.Haskell.TH as TH
-- THSyntax gives access to internal functions and data types
import qualified Language.Haskell.TH.Syntax as TH
-- Because GHC.Desugar might not be in the base library of the bootstrapping compiler
import GHC.Desugar ( AnnotationWrapper(..) )
import qualified Data.Map as Map
import Data.Dynamic ( fromDynamic, toDyn )
import Data.Typeable ( typeOf, Typeable )
import Data.Data (Data)
import GHC.Exts ( unsafeCoerce# )
#endif
{-
************************************************************************
* *
\subsection{Main interface + stubs for the non-GHCI case
* *
************************************************************************
-}
tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcSpliceExpr :: HsSplice Name -> TcRhoType -> TcM (HsExpr TcId)
-- None of these functions add constraints to the LIE
runQuasiQuoteExpr :: HsQuasiQuote RdrName -> RnM (LHsExpr RdrName)
runQuasiQuotePat :: HsQuasiQuote RdrName -> RnM (LPat RdrName)
runQuasiQuoteType :: HsQuasiQuote RdrName -> RnM (LHsType RdrName)
runQuasiQuoteDecl :: HsQuasiQuote RdrName -> RnM [LHsDecl RdrName]
runAnnotation :: CoreAnnTarget -> LHsExpr Name -> TcM Annotation
#ifndef GHCI
tcTypedBracket x _ = failTH x "Template Haskell bracket"
tcUntypedBracket x _ _ = failTH x "Template Haskell bracket"
tcSpliceExpr e _ = failTH e "Template Haskell splice"
runQuasiQuoteExpr q = failTH q "quasiquote"
runQuasiQuotePat q = failTH q "pattern quasiquote"
runQuasiQuoteType q = failTH q "type quasiquote"
runQuasiQuoteDecl q = failTH q "declaration quasiquote"
runAnnotation _ q = failTH q "annotation"
#else
-- The whole of the rest of the file is the else-branch (ie stage2 only)
{-
Note [How top-level splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Top-level splices (those not inside a [| .. |] quotation bracket) are handled
very straightforwardly:
1. tcTopSpliceExpr: typecheck the body e of the splice $(e)
2. runMetaT: desugar, compile, run it, and convert result back to
HsSyn RdrName (of the appropriate flavour, eg HsType RdrName,
HsExpr RdrName etc)
3. treat the result as if that's what you saw in the first place
e.g for HsType, rename and kind-check
for HsExpr, rename and type-check
(The last step is different for decls, because they can *only* be
top-level: we return the result of step 2.)
Note [How brackets and nested splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nested splices (those inside a [| .. |] quotation bracket),
are treated quite differently.
Remember, there are two forms of bracket
typed [|| e ||]
and untyped [| e |]
The life cycle of a typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s RnPendingTyped)
* Rename the body
* Result is still a HsBracket
* When typechecking:
* Set the ThStage to (Brack s (TcPending ps_var lie_var))
* Typecheck the body, and throw away the elaborated result
* Nested splices (which must be typed) are typechecked, and
the results accumulated in ps_var; their constraints
accumulate in lie_var
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
The life cycle of a un-typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s (RnPendingUntyped ps_var))
* Rename the body
* Nested splices (which must be untyped) are renamed, and the
results accumulated in ps_var
* Result is still (HsRnBracketOut rn_body pending_splices)
* When typechecking a HsRnBracketOut
* Typecheck the pending_splices individually
* Ignore the body of the bracket; just check that the context
expects a bracket of that type (e.g. a [p| pat |] bracket should
be in a context needing a (Q Pat)
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
In both cases, desugaring happens like this:
* HsTcBracketOut is desugared by DsMeta.dsBracket. It
a) Extends the ds_meta environment with the PendingSplices
attached to the bracket
b) Converts the quoted (HsExpr Name) to a CoreExpr that, when
run, will produce a suitable TH expression/type/decl. This
is why we leave the *renamed* expression attached to the bracket:
the quoted expression should not be decorated with all the goop
added by the type checker
* Each splice carries a unique Name, called a "splice point", thus
${n}(e). The name is initialised to an (Unqual "splice") when the
splice is created; the renamer gives it a unique.
* When DsMeta (used to desugar the body of the bracket) comes across
a splice, it looks up the splice's Name, n, in the ds_meta envt,
to find an (HsExpr Id) that should be substituted for the splice;
it just desugars it to get a CoreExpr (DsMeta.repSplice).
Example:
Source: f = [| Just $(g 3) |]
The [| |] part is a HsBracket
Typechecked: f = [| Just ${s7}(g 3) |]{s7 = g Int 3}
The [| |] part is a HsBracketOut, containing *renamed*
(not typechecked) expression
The "s7" is the "splice point"; the (g Int 3) part
is a typechecked expression
Desugared: f = do { s7 <- g Int 3
; return (ConE "Data.Maybe.Just" s7) }
Note [Template Haskell state diagram]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here are the ThStages, s, their corresponding level numbers
(the result of (thLevel s)), and their state transitions.
The top level of the program is stage Comp:
Start here
|
V
----------- $ ------------ $
| Comp | ---------> | Splice | -----|
| 1 | | 0 | <----|
----------- ------------
^ | ^ |
$ | | [||] $ | | [||]
| v | v
-------------- ----------------
| Brack Comp | | Brack Splice |
| 2 | | 1 |
-------------- ----------------
* Normal top-level declarations start in state Comp
(which has level 1).
Annotations start in state Splice, since they are
treated very like a splice (only without a '$')
* Code compiled in state Splice (and only such code)
will be *run at compile time*, with the result replacing
the splice
* The original paper used level -1 instead of 0, etc.
* The original paper did not allow a splice within a
splice, but there is no reason not to. This is the
$ transition in the top right.
Note [Template Haskell levels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Imported things are impLevel (= 0)
* However things at level 0 are not *necessarily* imported.
eg $( \b -> ... ) here b is bound at level 0
* In GHCi, variables bound by a previous command are treated
as impLevel, because we have bytecode for them.
* Variables are bound at the "current level"
* The current level starts off at outerLevel (= 1)
* The level is decremented by splicing $(..)
incremented by brackets [| |]
incremented by name-quoting 'f
When a variable is used, we compare
bind: binding level, and
use: current level at usage site
Generally
bind > use Always error (bound later than used)
[| \x -> $(f x) |]
bind = use Always OK (bound same stage as used)
[| \x -> $(f [| x |]) |]
bind < use Inside brackets, it depends
Inside splice, OK
Inside neither, OK
For (bind < use) inside brackets, there are three cases:
- Imported things OK f = [| map |]
- Top-level things OK g = [| f |]
- Non-top-level Only if there is a liftable instance
h = \(x:Int) -> [| x |]
To track top-level-ness we use the ThBindEnv in TcLclEnv
For example:
f = ...
g1 = $(map ...) is OK
g2 = $(f ...) is not OK; because we havn't compiled f yet
************************************************************************
* *
\subsection{Quoting an expression}
* *
************************************************************************
-}
-- See Note [How brackets and nested splices are handled]
-- tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcTypedBracket brack@(TExpBr expr) res_ty
= addErrCtxt (quotationCtxtDoc brack) $
do { cur_stage <- getStage
; ps_ref <- newMutVar []
; lie_var <- getConstraintVar -- Any constraints arising from nested splices
-- should get thrown into the constraint set
-- from outside the bracket
-- Typecheck expr to make sure it is valid,
-- Throw away the typechecked expression but return its type.
-- We'll typecheck it again when we splice it in somewhere
; (_tc_expr, expr_ty) <- setStage (Brack cur_stage (TcPending ps_ref lie_var)) $
tcInferRhoNC expr
-- NC for no context; tcBracket does that
; meta_ty <- tcTExpTy expr_ty
; co <- unifyType meta_ty res_ty
; ps' <- readMutVar ps_ref
; texpco <- tcLookupId unsafeTExpCoerceName
; return (mkHsWrapCo co (unLoc (mkHsApp (nlHsTyApp texpco [expr_ty])
(noLoc (HsTcBracketOut brack ps'))))) }
tcTypedBracket other_brack _
= pprPanic "tcTypedBracket" (ppr other_brack)
-- tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket brack ps res_ty
= do { traceTc "tc_bracket untyped" (ppr brack $$ ppr ps)
; ps' <- mapM tcPendingSplice ps
; meta_ty <- tcBrackTy brack
; co <- unifyType meta_ty res_ty
; traceTc "tc_bracket done untyped" (ppr meta_ty)
; return (mkHsWrapCo co (HsTcBracketOut brack ps')) }
---------------
tcBrackTy :: HsBracket Name -> TcM TcType
tcBrackTy (VarBr _ _) = tcMetaTy nameTyConName -- Result type is Var (not Q-monadic)
tcBrackTy (ExpBr _) = tcMetaTy expQTyConName -- Result type is ExpQ (= Q Exp)
tcBrackTy (TypBr _) = tcMetaTy typeQTyConName -- Result type is Type (= Q Typ)
tcBrackTy (DecBrG _) = tcMetaTy decsQTyConName -- Result type is Q [Dec]
tcBrackTy (PatBr _) = tcMetaTy patQTyConName -- Result type is PatQ (= Q Pat)
tcBrackTy (DecBrL _) = panic "tcBrackTy: Unexpected DecBrL"
tcBrackTy (TExpBr _) = panic "tcUntypedBracket: Unexpected TExpBr"
---------------
tcPendingSplice :: PendingRnSplice -> TcM PendingTcSplice
tcPendingSplice (PendingRnExpSplice (PendSplice n expr))
= do { res_ty <- tcMetaTy expQTyConName
; tc_pending_splice n expr res_ty }
tcPendingSplice (PendingRnPatSplice (PendSplice n expr))
= do { res_ty <- tcMetaTy patQTyConName
; tc_pending_splice n expr res_ty }
tcPendingSplice (PendingRnTypeSplice (PendSplice n expr))
= do { res_ty <- tcMetaTy typeQTyConName
; tc_pending_splice n expr res_ty }
tcPendingSplice (PendingRnDeclSplice (PendSplice n expr))
= do { res_ty <- tcMetaTy decsQTyConName
; tc_pending_splice n expr res_ty }
tcPendingSplice (PendingRnCrossStageSplice n)
-- Behave like $(lift x); not very pretty
= do { res_ty <- tcMetaTy expQTyConName
; tc_pending_splice n (nlHsApp (nlHsVar liftName) (nlHsVar n)) res_ty }
---------------
tc_pending_splice :: Name -> LHsExpr Name -> TcRhoType -> TcM PendingTcSplice
tc_pending_splice splice_name expr res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (PendSplice splice_name expr') }
---------------
-- Takes a type tau and returns the type Q (TExp tau)
tcTExpTy :: TcType -> TcM TcType
tcTExpTy tau = do
q <- tcLookupTyCon qTyConName
texp <- tcLookupTyCon tExpTyConName
return (mkTyConApp q [mkTyConApp texp [tau]])
{-
************************************************************************
* *
\subsection{Splicing an expression}
* *
************************************************************************
-}
tcSpliceExpr splice@(HsSplice name expr) res_ty
= addErrCtxt (spliceCtxtDoc splice) $
setSrcSpan (getLoc expr) $ do
{ stage <- getStage
; case stage of
Splice {} -> tcTopSplice expr res_ty
Comp -> tcTopSplice expr res_ty
Brack pop_stage pend -> tcNestedSplice pop_stage pend name expr res_ty }
tcNestedSplice :: ThStage -> PendingStuff -> Name
-> LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
-- See Note [How brackets and nested splices are handled]
-- A splice inside brackets
tcNestedSplice pop_stage (TcPending ps_var lie_var) splice_name expr res_ty
= do { meta_exp_ty <- tcTExpTy res_ty
; expr' <- setStage pop_stage $
setConstraintVar lie_var $
tcMonoExpr expr meta_exp_ty
; untypeq <- tcLookupId unTypeQName
; let expr'' = mkHsApp (nlHsTyApp untypeq [res_ty]) expr'
; ps <- readMutVar ps_var
; writeMutVar ps_var (PendSplice splice_name expr'' : ps)
-- The returned expression is ignored; it's in the pending splices
; return (panic "tcSpliceExpr") }
tcNestedSplice _ _ splice_name _ _
= pprPanic "tcNestedSplice: rename stage found" (ppr splice_name)
tcTopSplice :: LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
tcTopSplice expr res_ty
= do { -- Typecheck the expression,
-- making sure it has type Q (T res_ty)
meta_exp_ty <- tcTExpTy res_ty
; zonked_q_expr <- tcTopSpliceExpr True $
tcMonoExpr expr meta_exp_ty
-- Run the expression
; expr2 <- runMetaE zonked_q_expr
; showSplice False "expression" expr (ppr expr2)
-- Rename and typecheck the spliced-in expression,
-- making sure it has type res_ty
-- These steps should never fail; this is a *typed* splice
; addErrCtxt (spliceResultDoc expr) $ do
{ (exp3, _fvs) <- rnLExpr expr2
; exp4 <- tcMonoExpr exp3 res_ty
; return (unLoc exp4) } }
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
quotationCtxtDoc :: HsBracket Name -> SDoc
quotationCtxtDoc br_body
= hang (ptext (sLit "In the Template Haskell quotation"))
2 (ppr br_body)
spliceCtxtDoc :: HsSplice Name -> SDoc
spliceCtxtDoc splice
= hang (ptext (sLit "In the Template Haskell splice"))
2 (pprTypedSplice splice)
spliceResultDoc :: LHsExpr Name -> SDoc
spliceResultDoc expr
= sep [ ptext (sLit "In the result of the splice:")
, nest 2 (char '$' <> pprParendExpr expr)
, ptext (sLit "To see what the splice expanded to, use -ddump-splices")]
-------------------
tcTopSpliceExpr :: Bool -> TcM (LHsExpr Id) -> TcM (LHsExpr Id)
-- Note [How top-level splices are handled]
-- Type check an expression that is the body of a top-level splice
-- (the caller will compile and run it)
-- Note that set the level to Splice, regardless of the original level,
-- before typechecking the expression. For example:
-- f x = $( ...$(g 3) ... )
-- The recursive call to tcMonoExpr will simply expand the
-- inner escape before dealing with the outer one
tcTopSpliceExpr isTypedSplice tc_action
= checkNoErrs $ -- checkNoErrs: must not try to run the thing
-- if the type checker fails!
unsetGOptM Opt_DeferTypeErrors $
-- Don't defer type errors. Not only are we
-- going to run this code, but we do an unsafe
-- coerce, so we get a seg-fault if, say we
-- splice a type into a place where an expression
-- is expected (Trac #7276)
setStage (Splice isTypedSplice) $
do { -- Typecheck the expression
(expr', lie) <- captureConstraints tc_action
-- Solve the constraints
; const_binds <- simplifyTop lie
-- Zonk it and tie the knot of dictionary bindings
; zonkTopLExpr (mkHsDictLet (EvBinds const_binds) expr') }
{-
************************************************************************
* *
Annotations
* *
************************************************************************
-}
runAnnotation target expr = do
-- Find the classes we want instances for in order to call toAnnotationWrapper
loc <- getSrcSpanM
data_class <- tcLookupClass dataClassName
to_annotation_wrapper_id <- tcLookupId toAnnotationWrapperName
-- Check the instances we require live in another module (we want to execute it..)
-- and check identifiers live in other modules using TH stage checks. tcSimplifyStagedExpr
-- also resolves the LIE constraints to detect e.g. instance ambiguity
zonked_wrapped_expr' <- tcTopSpliceExpr False $
do { (expr', expr_ty) <- tcInferRhoNC expr
-- We manually wrap the typechecked expression in a call to toAnnotationWrapper
-- By instantiating the call >here< it gets registered in the
-- LIE consulted by tcTopSpliceExpr
-- and hence ensures the appropriate dictionary is bound by const_binds
; wrapper <- instCall AnnOrigin [expr_ty] [mkClassPred data_class [expr_ty]]
; let specialised_to_annotation_wrapper_expr
= L loc (HsWrap wrapper (HsVar to_annotation_wrapper_id))
; return (L loc (HsApp specialised_to_annotation_wrapper_expr expr')) }
-- Run the appropriately wrapped expression to get the value of
-- the annotation and its dictionaries. The return value is of
-- type AnnotationWrapper by construction, so this conversion is
-- safe
serialized <- runMetaAW zonked_wrapped_expr'
return Annotation {
ann_target = target,
ann_value = serialized
}
convertAnnotationWrapper :: AnnotationWrapper -> Either MsgDoc Serialized
convertAnnotationWrapper annotation_wrapper = Right $
case annotation_wrapper of
AnnotationWrapper value | let serialized = toSerialized serializeWithData value ->
-- Got the value and dictionaries: build the serialized value and
-- call it a day. We ensure that we seq the entire serialized value
-- in order that any errors in the user-written code for the
-- annotation are exposed at this point. This is also why we are
-- doing all this stuff inside the context of runMeta: it has the
-- facilities to deal with user error in a meta-level expression
seqSerialized serialized `seq` serialized
{-
************************************************************************
* *
Quasi-quoting
* *
************************************************************************
Note [Quasi-quote overview]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The GHC "quasi-quote" extension is described by Geoff Mainland's paper
"Why it's nice to be quoted: quasiquoting for Haskell" (Haskell
Workshop 2007).
Briefly, one writes
[p| stuff |]
and the arbitrary string "stuff" gets parsed by the parser 'p', whose
type should be Language.Haskell.TH.Quote.QuasiQuoter. 'p' must be
defined in another module, because we are going to run it here. It's
a bit like a TH splice:
$(p "stuff")
However, you can do this in patterns as well as terms. Because of this,
the splice is run by the *renamer* rather than the type checker.
************************************************************************
* *
\subsubsection{Quasiquotation}
* *
************************************************************************
See Note [Quasi-quote overview] in TcSplice.
-}
runQuasiQuote :: Outputable hs_syn
=> HsQuasiQuote RdrName -- Contains term of type QuasiQuoter, and the String
-> Name -- Of type QuasiQuoter -> String -> Q th_syn
-> Name -- Name of th_syn type
-> String -- Description of splice type
-> (MetaHook RnM -> LHsExpr Id -> RnM hs_syn)
-> RnM hs_syn
runQuasiQuote (HsQuasiQuote quoter q_span quote) quote_selector meta_ty descr meta_req
= do { -- Drop the leading "$" from the quoter name, if present
-- This is old-style syntax, now deprecated
-- NB: when removing this backward-compat, remove
-- the matching code in Lexer.x (around line 310)
let occ_str = occNameString (rdrNameOcc quoter)
; quoter <- ASSERT( not (null occ_str) ) -- Lexer ensures this
if head occ_str /= '$' then return quoter
else do { addWarn (deprecatedDollar quoter)
; return (mkRdrUnqual (mkVarOcc (tail occ_str))) }
; quoter' <- lookupOccRn quoter
-- We use lookupOcc rather than lookupGlobalOcc because in the
-- erroneous case of \x -> [x| ...|] we get a better error message
-- (stage restriction rather than out of scope).
; when (isUnboundName quoter') failM
-- If 'quoter' is not in scope, proceed no further
-- The error message was generated by lookupOccRn, but it then
-- succeeds with an "unbound name", which makes the subsequent
-- attempt to run the quote fail in a confusing way
-- Check that the quoter is not locally defined, otherwise the TH
-- machinery will not be able to run the quasiquote.
; this_mod <- getModule
; let is_local = nameIsLocalOrFrom this_mod quoter'
; checkTc (not is_local) (quoteStageError quoter')
; traceTc "runQQ" (ppr quoter <+> ppr is_local)
; HsQuasiQuote quoter'' _ quote' <- getHooked runQuasiQuoteHook return >>=
($ HsQuasiQuote quoter' q_span quote)
-- Build the expression
; let quoterExpr = L q_span $! HsVar $! quoter''
; let quoteExpr = L q_span $! HsLit $! HsString "" quote'
; let expr = L q_span $
HsApp (L q_span $
HsApp (L q_span (HsVar quote_selector)) quoterExpr) quoteExpr
; meta_exp_ty <- tcMetaTy meta_ty
-- Typecheck the expression
; zonked_q_expr <- tcTopSpliceExpr False (tcMonoExpr expr meta_exp_ty)
-- Run the expression
; result <- runMeta meta_req zonked_q_expr
; showSplice (descr == "declarations") descr quoteExpr (ppr result)
; return result }
runQuasiQuoteExpr qq
= runQuasiQuote qq quoteExpName expQTyConName "expression" metaRequestE
runQuasiQuotePat qq
= runQuasiQuote qq quotePatName patQTyConName "pattern" metaRequestP
runQuasiQuoteType qq
= runQuasiQuote qq quoteTypeName typeQTyConName "type" metaRequestT
runQuasiQuoteDecl qq
= runQuasiQuote qq quoteDecName decsQTyConName "declarations" metaRequestD
quoteStageError :: Name -> SDoc
quoteStageError quoter
= sep [ptext (sLit "GHC stage restriction:") <+> ppr quoter,
nest 2 (ptext (sLit "is used in a quasiquote, and must be imported, not defined locally"))]
deprecatedDollar :: RdrName -> SDoc
deprecatedDollar quoter
= hang (ptext (sLit "Deprecated syntax:"))
2 (ptext (sLit "quasiquotes no longer need a dollar sign:")
<+> ppr quoter)
{-
************************************************************************
* *
\subsection{Running an expression}
* *
************************************************************************
-}
runQuasi :: TH.Q a -> TcM a
runQuasi act = TH.runQ act
runQResult :: (a -> String) -> (SrcSpan -> a -> b) -> SrcSpan -> TH.Q a -> TcM b
runQResult show_th f expr_span hval
= do { th_result <- TH.runQ hval
; traceTc "Got TH result:" (text (show_th th_result))
; return (f expr_span th_result) }
-----------------
runMeta :: (MetaHook TcM -> LHsExpr Id -> TcM hs_syn)
-> LHsExpr Id
-> TcM hs_syn
runMeta unwrap e
= do { h <- getHooked runMetaHook defaultRunMeta
; unwrap h e }
defaultRunMeta :: MetaHook TcM
defaultRunMeta (MetaE r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsExpr)
defaultRunMeta (MetaP r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToPat)
defaultRunMeta (MetaT r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsType)
defaultRunMeta (MetaD r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsDecls)
defaultRunMeta (MetaAW r)
= fmap r . runMeta' False (const empty) (const (return . convertAnnotationWrapper))
-- We turn off showing the code in meta-level exceptions because doing so exposes
-- the toAnnotationWrapper function that we slap around the users code
----------------
runMetaAW :: LHsExpr Id -- Of type AnnotationWrapper
-> TcM Serialized
runMetaAW = runMeta metaRequestAW
runMetaE :: LHsExpr Id -- Of type (Q Exp)
-> TcM (LHsExpr RdrName)
runMetaE = runMeta metaRequestE
runMetaP :: LHsExpr Id -- Of type (Q Pat)
-> TcM (LPat RdrName)
runMetaP = runMeta metaRequestP
runMetaT :: LHsExpr Id -- Of type (Q Type)
-> TcM (LHsType RdrName)
runMetaT = runMeta metaRequestT
runMetaD :: LHsExpr Id -- Of type Q [Dec]
-> TcM [LHsDecl RdrName]
runMetaD = runMeta metaRequestD
---------------
runMeta' :: Bool -- Whether code should be printed in the exception message
-> (hs_syn -> SDoc) -- how to print the code
-> (SrcSpan -> x -> TcM (Either MsgDoc hs_syn)) -- How to run x
-> LHsExpr Id -- Of type x; typically x = Q TH.Exp, or something like that
-> TcM hs_syn -- Of type t
runMeta' show_code ppr_hs run_and_convert expr
= do { traceTc "About to run" (ppr expr)
; recordThSpliceUse -- seems to be the best place to do this,
-- we catch all kinds of splices and annotations.
-- Check that we've had no errors of any sort so far.
-- For example, if we found an error in an earlier defn f, but
-- recovered giving it type f :: forall a.a, it'd be very dodgy
-- to carry ont. Mind you, the staging restrictions mean we won't
-- actually run f, but it still seems wrong. And, more concretely,
-- see Trac #5358 for an example that fell over when trying to
-- reify a function with a "?" kind in it. (These don't occur
-- in type-correct programs.
; failIfErrsM
-- Desugar
; ds_expr <- initDsTc (dsLExpr expr)
-- Compile and link it; might fail if linking fails
; hsc_env <- getTopEnv
; src_span <- getSrcSpanM
; traceTc "About to run (desugared)" (ppr ds_expr)
; either_hval <- tryM $ liftIO $
HscMain.hscCompileCoreExpr hsc_env src_span ds_expr
; case either_hval of {
Left exn -> fail_with_exn "compile and link" exn ;
Right hval -> do
{ -- Coerce it to Q t, and run it
-- Running might fail if it throws an exception of any kind (hence tryAllM)
-- including, say, a pattern-match exception in the code we are running
--
-- We also do the TH -> HS syntax conversion inside the same
-- exception-cacthing thing so that if there are any lurking
-- exceptions in the data structure returned by hval, we'll
-- encounter them inside the try
--
-- See Note [Exceptions in TH]
let expr_span = getLoc expr
; either_tval <- tryAllM $
setSrcSpan expr_span $ -- Set the span so that qLocation can
-- see where this splice is
do { mb_result <- run_and_convert expr_span (unsafeCoerce# hval)
; case mb_result of
Left err -> failWithTc err
Right result -> do { traceTc "Got HsSyn result:" (ppr_hs result)
; return $! result } }
; case either_tval of
Right v -> return v
Left se -> case fromException se of
Just IOEnvFailure -> failM -- Error already in Tc monad
_ -> fail_with_exn "run" se -- Exception
}}}
where
-- see Note [Concealed TH exceptions]
fail_with_exn phase exn = do
exn_msg <- liftIO $ Panic.safeShowException exn
let msg = vcat [text "Exception when trying to" <+> text phase <+> text "compile-time code:",
nest 2 (text exn_msg),
if show_code then text "Code:" <+> ppr expr else empty]
failWithTc msg
{-
Note [Exceptions in TH]
~~~~~~~~~~~~~~~~~~~~~~~
Supppose we have something like this
$( f 4 )
where
f :: Int -> Q [Dec]
f n | n>3 = fail "Too many declarations"
| otherwise = ...
The 'fail' is a user-generated failure, and should be displayed as a
perfectly ordinary compiler error message, not a panic or anything
like that. Here's how it's processed:
* 'fail' is the monad fail. The monad instance for Q in TH.Syntax
effectively transforms (fail s) to
qReport True s >> fail
where 'qReport' comes from the Quasi class and fail from its monad
superclass.
* The TcM monad is an instance of Quasi (see TcSplice), and it implements
(qReport True s) by using addErr to add an error message to the bag of errors.
The 'fail' in TcM raises an IOEnvFailure exception
* 'qReport' forces the message to ensure any exception hidden in unevaluated
thunk doesn't get into the bag of errors. Otherwise the following splice
will triger panic (Trac #8987):
$(fail undefined)
See also Note [Concealed TH exceptions]
* So, when running a splice, we catch all exceptions; then for
- an IOEnvFailure exception, we assume the error is already
in the error-bag (above)
- other errors, we add an error to the bag
and then fail
Note [Concealed TH exceptions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When displaying the error message contained in an exception originated from TH
code, we need to make sure that the error message itself does not contain an
exception. For example, when executing the following splice:
$( error ("foo " ++ error "bar") )
the message for the outer exception is a thunk which will throw the inner
exception when evaluated.
For this reason, we display the message of a TH exception using the
'safeShowException' function, which recursively catches any exception thrown
when showing an error message.
To call runQ in the Tc monad, we need to make TcM an instance of Quasi:
-}
instance TH.Quasi (IOEnv (Env TcGblEnv TcLclEnv)) where
qNewName s = do { u <- newUnique
; let i = getKey u
; return (TH.mkNameU s i) }
-- 'msg' is forced to ensure exceptions don't escape,
-- see Note [Exceptions in TH]
qReport True msg = seqList msg $ addErr (text msg)
qReport False msg = seqList msg $ addWarn (text msg)
qLocation = do { m <- getModule
; l <- getSrcSpanM
; r <- case l of
UnhelpfulSpan _ -> pprPanic "qLocation: Unhelpful location"
(ppr l)
RealSrcSpan s -> return s
; return (TH.Loc { TH.loc_filename = unpackFS (srcSpanFile r)
, TH.loc_module = moduleNameString (moduleName m)
, TH.loc_package = packageKeyString (modulePackageKey m)
, TH.loc_start = (srcSpanStartLine r, srcSpanStartCol r)
, TH.loc_end = (srcSpanEndLine r, srcSpanEndCol r) }) }
qLookupName = lookupName
qReify = reify
qReifyInstances = reifyInstances
qReifyRoles = reifyRoles
qReifyAnnotations = reifyAnnotations
qReifyModule = reifyModule
-- For qRecover, discard error messages if
-- the recovery action is chosen. Otherwise
-- we'll only fail higher up. c.f. tryTcLIE_
qRecover recover main = do { (msgs, mb_res) <- tryTcErrs main
; case mb_res of
Just val -> do { addMessages msgs -- There might be warnings
; return val }
Nothing -> recover -- Discard all msgs
}
qRunIO io = liftIO io
qAddDependentFile fp = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fp:dep_files)
qAddTopDecls thds = do
l <- getSrcSpanM
let either_hval = convertToHsDecls l thds
ds <- case either_hval of
Left exn -> pprPanic "qAddTopDecls: can't convert top-level declarations" exn
Right ds -> return ds
mapM_ (checkTopDecl . unLoc) ds
th_topdecls_var <- fmap tcg_th_topdecls getGblEnv
updTcRef th_topdecls_var (\topds -> ds ++ topds)
where
checkTopDecl :: HsDecl RdrName -> TcM ()
checkTopDecl (ValD binds)
= mapM_ bindName (collectHsBindBinders binds)
checkTopDecl (SigD _)
= return ()
checkTopDecl (ForD (ForeignImport (L _ name) _ _ _))
= bindName name
checkTopDecl _
= addErr $ text "Only function, value, and foreign import declarations may be added with addTopDecl"
bindName :: RdrName -> TcM ()
bindName (Exact n)
= do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; updTcRef th_topnames_var (\ns -> extendNameSet ns n)
}
bindName name =
addErr $
hang (ptext (sLit "The binder") <+> quotes (ppr name) <+> ptext (sLit "is not a NameU."))
2 (text "Probable cause: you used mkName instead of newName to generate a binding.")
qAddModFinalizer fin = do
th_modfinalizers_var <- fmap tcg_th_modfinalizers getGblEnv
updTcRef th_modfinalizers_var (\fins -> fin:fins)
qGetQ :: forall a. Typeable a => IOEnv (Env TcGblEnv TcLclEnv) (Maybe a)
qGetQ = do
th_state_var <- fmap tcg_th_state getGblEnv
th_state <- readTcRef th_state_var
-- See #10596 for why we use a scoped type variable here.
-- ToDo: convert @undefined :: a@ to @proxy :: Proxy a@ when
-- we drop support for GHC 7.6.
return (Map.lookup (typeOf (undefined :: a)) th_state >>= fromDynamic)
qPutQ x = do
th_state_var <- fmap tcg_th_state getGblEnv
updTcRef th_state_var (\m -> Map.insert (typeOf x) (toDyn x) m)
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
-}
-- Note that 'before' is *renamed* but not *typechecked*
-- Reason (a) less typechecking crap
-- (b) data constructors after type checking have been
-- changed to their *wrappers*, and that makes them
-- print always fully qualified
showSplice :: Bool -> String -> LHsExpr Name -> SDoc -> TcM ()
showSplice isDec what before after =
traceSplice $ SpliceInfo isDec what Nothing (Just $ ppr before) after
-- | The splice data to be logged
--
-- duplicates code in RnSplice.lhs
data SpliceInfo
= SpliceInfo
{ spliceIsDeclaration :: Bool
, spliceDescription :: String
, spliceLocation :: Maybe SrcSpan
, spliceSource :: Maybe SDoc
, spliceGenerated :: SDoc
}
-- | outputs splice information for 2 flags which have different output formats:
-- `-ddump-splices` and `-dth-dec-file`
--
-- This duplicates code in RnSplice.lhs
traceSplice :: SpliceInfo -> TcM ()
traceSplice sd = do
loc <- case sd of
SpliceInfo { spliceLocation = Nothing } -> getSrcSpanM
SpliceInfo { spliceLocation = Just loc } -> return loc
traceOptTcRn Opt_D_dump_splices (spliceDebugDoc loc sd)
when (spliceIsDeclaration sd) $ do
dflags <- getDynFlags
liftIO $ dumpIfSet_dyn_printer alwaysQualify dflags Opt_D_th_dec_file
(spliceCodeDoc loc sd)
where
-- `-ddump-splices`
spliceDebugDoc :: SrcSpan -> SpliceInfo -> SDoc
spliceDebugDoc loc sd
= let code = case spliceSource sd of
Nothing -> ending
Just b -> nest 2 b : ending
ending = [ text "======>", nest 2 (spliceGenerated sd) ]
in (vcat [ ppr loc <> colon
<+> text "Splicing" <+> text (spliceDescription sd)
, nest 2 (sep code)
])
-- `-dth-dec-file`
spliceCodeDoc :: SrcSpan -> SpliceInfo -> SDoc
spliceCodeDoc loc sd
= (vcat [ text "--" <+> ppr loc <> colon
<+> text "Splicing" <+> text (spliceDescription sd)
, sep [spliceGenerated sd]
])
{-
************************************************************************
* *
Instance Testing
* *
************************************************************************
-}
reifyInstances :: TH.Name -> [TH.Type] -> TcM [TH.Dec]
reifyInstances th_nm th_tys
= addErrCtxt (ptext (sLit "In the argument of reifyInstances:")
<+> ppr_th th_nm <+> sep (map ppr_th th_tys)) $
do { loc <- getSrcSpanM
; rdr_ty <- cvt loc (mkThAppTs (TH.ConT th_nm) th_tys)
-- #9262 says to bring vars into scope, like in HsForAllTy case
-- of rnHsTyKi
; let (kvs, tvs) = extractHsTyRdrTyVars rdr_ty
tv_bndrs = userHsTyVarBndrs loc tvs
hs_tvbs = mkHsQTvs tv_bndrs
-- Rename to HsType Name
; ((rn_tvbs, rn_ty), _fvs)
<- bindHsTyVars doc Nothing kvs hs_tvbs $ \ rn_tvbs ->
do { (rn_ty, fvs) <- rnLHsType doc rdr_ty
; return ((rn_tvbs, rn_ty), fvs) }
; (ty, _kind) <- tcHsTyVarBndrs rn_tvbs $ \ _tvs ->
tcLHsType rn_ty
; ty <- zonkTcTypeToType emptyZonkEnv ty
-- Substitute out the meta type variables
-- In particular, the type might have kind
-- variables inside it (Trac #7477)
; traceTc "reifyInstances" (ppr ty $$ ppr (typeKind ty))
; case splitTyConApp_maybe ty of -- This expands any type synonyms
Just (tc, tys) -- See Trac #7910
| Just cls <- tyConClass_maybe tc
-> do { inst_envs <- tcGetInstEnvs
; let (matches, unifies, _) = lookupInstEnv inst_envs cls tys
; traceTc "reifyInstances1" (ppr matches)
; reifyClassInstances cls (map fst matches ++ unifies) }
| isOpenFamilyTyCon tc
-> do { inst_envs <- tcGetFamInstEnvs
; let matches = lookupFamInstEnv inst_envs tc tys
; traceTc "reifyInstances2" (ppr matches)
; reifyFamilyInstances tc (map fim_instance matches) }
_ -> bale_out (hang (ptext (sLit "reifyInstances:") <+> quotes (ppr ty))
2 (ptext (sLit "is not a class constraint or type family application"))) }
where
doc = ClassInstanceCtx
bale_out msg = failWithTc msg
cvt :: SrcSpan -> TH.Type -> TcM (LHsType RdrName)
cvt loc th_ty = case convertToHsType loc th_ty of
Left msg -> failWithTc msg
Right ty -> return ty
{-
************************************************************************
* *
Reification
* *
************************************************************************
-}
lookupName :: Bool -- True <=> type namespace
-- False <=> value namespace
-> String -> TcM (Maybe TH.Name)
lookupName is_type_name s
= do { lcl_env <- getLocalRdrEnv
; case lookupLocalRdrEnv lcl_env rdr_name of
Just n -> return (Just (reifyName n))
Nothing -> do { mb_nm <- lookupGlobalOccRn_maybe rdr_name
; return (fmap reifyName mb_nm) } }
where
th_name = TH.mkName s -- Parses M.x into a base of 'x' and a module of 'M'
occ_fs :: FastString
occ_fs = mkFastString (TH.nameBase th_name)
occ :: OccName
occ | is_type_name
= if isLexCon occ_fs then mkTcOccFS occ_fs
else mkTyVarOccFS occ_fs
| otherwise
= if isLexCon occ_fs then mkDataOccFS occ_fs
else mkVarOccFS occ_fs
rdr_name = case TH.nameModule th_name of
Nothing -> mkRdrUnqual occ
Just mod -> mkRdrQual (mkModuleName mod) occ
getThing :: TH.Name -> TcM TcTyThing
getThing th_name
= do { name <- lookupThName th_name
; traceIf (text "reify" <+> text (show th_name) <+> brackets (ppr_ns th_name) <+> ppr name)
; tcLookupTh name }
-- ToDo: this tcLookup could fail, which would give a
-- rather unhelpful error message
where
ppr_ns (TH.Name _ (TH.NameG TH.DataName _pkg _mod)) = text "data"
ppr_ns (TH.Name _ (TH.NameG TH.TcClsName _pkg _mod)) = text "tc"
ppr_ns (TH.Name _ (TH.NameG TH.VarName _pkg _mod)) = text "var"
ppr_ns _ = panic "reify/ppr_ns"
reify :: TH.Name -> TcM TH.Info
reify th_name
= do { thing <- getThing th_name
; reifyThing thing }
lookupThName :: TH.Name -> TcM Name
lookupThName th_name = do
mb_name <- lookupThName_maybe th_name
case mb_name of
Nothing -> failWithTc (notInScope th_name)
Just name -> return name
lookupThName_maybe :: TH.Name -> TcM (Maybe Name)
lookupThName_maybe th_name
= do { names <- mapMaybeM lookup (thRdrNameGuesses th_name)
-- Pick the first that works
-- E.g. reify (mkName "A") will pick the class A in preference to the data constructor A
; return (listToMaybe names) }
where
lookup rdr_name
= do { -- Repeat much of lookupOccRn, becase we want
-- to report errors in a TH-relevant way
; rdr_env <- getLocalRdrEnv
; case lookupLocalRdrEnv rdr_env rdr_name of
Just name -> return (Just name)
Nothing -> lookupGlobalOccRn_maybe rdr_name }
tcLookupTh :: Name -> TcM TcTyThing
-- This is a specialised version of TcEnv.tcLookup; specialised mainly in that
-- it gives a reify-related error message on failure, whereas in the normal
-- tcLookup, failure is a bug.
tcLookupTh name
= do { (gbl_env, lcl_env) <- getEnvs
; case lookupNameEnv (tcl_env lcl_env) name of {
Just thing -> return thing;
Nothing ->
case lookupNameEnv (tcg_type_env gbl_env) name of {
Just thing -> return (AGlobal thing);
Nothing ->
if nameIsLocalOrFrom (tcg_mod gbl_env) name
then -- It's defined in this module
failWithTc (notInEnv name)
else
do { mb_thing <- tcLookupImported_maybe name
; case mb_thing of
Succeeded thing -> return (AGlobal thing)
Failed msg -> failWithTc msg
}}}}
notInScope :: TH.Name -> SDoc
notInScope th_name = quotes (text (TH.pprint th_name)) <+>
ptext (sLit "is not in scope at a reify")
-- Ugh! Rather an indirect way to display the name
notInEnv :: Name -> SDoc
notInEnv name = quotes (ppr name) <+>
ptext (sLit "is not in the type environment at a reify")
------------------------------
reifyRoles :: TH.Name -> TcM [TH.Role]
reifyRoles th_name
= do { thing <- getThing th_name
; case thing of
AGlobal (ATyCon tc) -> return (map reify_role (tyConRoles tc))
_ -> failWithTc (ptext (sLit "No roles associated with") <+> (ppr thing))
}
where
reify_role Nominal = TH.NominalR
reify_role Representational = TH.RepresentationalR
reify_role Phantom = TH.PhantomR
------------------------------
reifyThing :: TcTyThing -> TcM TH.Info
-- The only reason this is monadic is for error reporting,
-- which in turn is mainly for the case when TH can't express
-- some random GHC extension
reifyThing (AGlobal (AnId id))
= do { ty <- reifyType (idType id)
; fix <- reifyFixity (idName id)
; let v = reifyName id
; case idDetails id of
ClassOpId cls -> return (TH.ClassOpI v ty (reifyName cls) fix)
_ -> return (TH.VarI v ty Nothing fix)
}
reifyThing (AGlobal (ATyCon tc)) = reifyTyCon tc
reifyThing (AGlobal (AConLike (RealDataCon dc)))
= do { let name = dataConName dc
; ty <- reifyType (idType (dataConWrapId dc))
; fix <- reifyFixity name
; return (TH.DataConI (reifyName name) ty
(reifyName (dataConOrigTyCon dc)) fix)
}
reifyThing (AGlobal (AConLike (PatSynCon ps)))
= noTH (sLit "pattern synonyms") (ppr $ patSynName ps)
reifyThing (ATcId {tct_id = id})
= do { ty1 <- zonkTcType (idType id) -- Make use of all the info we have, even
-- though it may be incomplete
; ty2 <- reifyType ty1
; fix <- reifyFixity (idName id)
; return (TH.VarI (reifyName id) ty2 Nothing fix) }
reifyThing (ATyVar tv tv1)
= do { ty1 <- zonkTcTyVar tv1
; ty2 <- reifyType ty1
; return (TH.TyVarI (reifyName tv) ty2) }
reifyThing thing = pprPanic "reifyThing" (pprTcTyThingCategory thing)
-------------------------------------------
reifyAxBranch :: CoAxBranch -> TcM TH.TySynEqn
reifyAxBranch (CoAxBranch { cab_lhs = args, cab_rhs = rhs })
-- remove kind patterns (#8884)
= do { args' <- mapM reifyType (filter (not . isKind) args)
; rhs' <- reifyType rhs
; return (TH.TySynEqn args' rhs') }
reifyTyCon :: TyCon -> TcM TH.Info
reifyTyCon tc
| Just cls <- tyConClass_maybe tc
= reifyClass cls
| isFunTyCon tc
= return (TH.PrimTyConI (reifyName tc) 2 False)
| isPrimTyCon tc
= return (TH.PrimTyConI (reifyName tc) (tyConArity tc) (isUnLiftedTyCon tc))
| isFamilyTyCon tc
= do { let tvs = tyConTyVars tc
kind = tyConKind tc
-- we need the *result kind* (see #8884)
(kvs, mono_kind) = splitForAllTys kind
-- tyConArity includes *kind* params
(_, res_kind) = splitKindFunTysN (tyConArity tc - length kvs)
mono_kind
; kind' <- fmap Just (reifyKind res_kind)
; tvs' <- reifyTyVars tvs
; flav' <- reifyFamFlavour tc
; case flav' of
{ Left flav -> -- open type/data family
do { fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc
(familyInstances fam_envs tc)
; return (TH.FamilyI
(TH.FamilyD flav (reifyName tc) tvs' kind')
instances) }
; Right eqns -> -- closed type family
return (TH.FamilyI
(TH.ClosedTypeFamilyD (reifyName tc) tvs' kind' eqns)
[]) } }
| Just (tvs, rhs) <- synTyConDefn_maybe tc -- Vanilla type synonym
= do { rhs' <- reifyType rhs
; tvs' <- reifyTyVars tvs
; return (TH.TyConI
(TH.TySynD (reifyName tc) tvs' rhs'))
}
| otherwise
= do { cxt <- reifyCxt (tyConStupidTheta tc)
; let tvs = tyConTyVars tc
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons tc)
; r_tvs <- reifyTyVars tvs
; let name = reifyName tc
deriv = [] -- Don't know about deriving
decl | isNewTyCon tc = TH.NewtypeD cxt name r_tvs (head cons) deriv
| otherwise = TH.DataD cxt name r_tvs cons deriv
; return (TH.TyConI decl) }
reifyDataCon :: [Type] -> DataCon -> TcM TH.Con
-- For GADTs etc, see Note [Reifying data constructors]
reifyDataCon tys dc
= do { let (tvs, theta, arg_tys, _) = dataConSig dc
subst = mkTopTvSubst (tvs `zip` tys) -- Dicard ex_tvs
(subst', ex_tvs') = mapAccumL substTyVarBndr subst (dropList tys tvs)
theta' = substTheta subst' theta
arg_tys' = substTys subst' arg_tys
stricts = map reifyStrict (dataConSrcBangs dc)
fields = dataConFieldLabels dc
name = reifyName dc
; r_arg_tys <- reifyTypes arg_tys'
; let main_con | not (null fields)
= TH.RecC name (zip3 (map reifyName fields) stricts r_arg_tys)
| dataConIsInfix dc
= ASSERT( length arg_tys == 2 )
TH.InfixC (s1,r_a1) name (s2,r_a2)
| otherwise
= TH.NormalC name (stricts `zip` r_arg_tys)
[r_a1, r_a2] = r_arg_tys
[s1, s2] = stricts
; ASSERT( length arg_tys == length stricts )
if null ex_tvs' && null theta then
return main_con
else do
{ cxt <- reifyCxt theta'
; ex_tvs'' <- reifyTyVars ex_tvs'
; return (TH.ForallC ex_tvs'' cxt main_con) } }
------------------------------
reifyClass :: Class -> TcM TH.Info
reifyClass cls
= do { cxt <- reifyCxt theta
; inst_envs <- tcGetInstEnvs
; insts <- reifyClassInstances cls (InstEnv.classInstances inst_envs cls)
; ops <- concatMapM reify_op op_stuff
; tvs' <- reifyTyVars tvs
; let dec = TH.ClassD cxt (reifyName cls) tvs' fds' ops
; return (TH.ClassI dec insts ) }
where
(tvs, fds, theta, _, _, op_stuff) = classExtraBigSig cls
fds' = map reifyFunDep fds
reify_op (op, def_meth)
= do { ty <- reifyType (idType op)
; let nm' = reifyName op
; case def_meth of
GenDefMeth gdm_nm ->
do { gdm_id <- tcLookupId gdm_nm
; gdm_ty <- reifyType (idType gdm_id)
; return [TH.SigD nm' ty, TH.DefaultSigD nm' gdm_ty] }
_ -> return [TH.SigD nm' ty] }
------------------------------
-- | Annotate (with TH.SigT) a type if the first parameter is True
-- and if the type contains a free variable.
-- This is used to annotate type patterns for poly-kinded tyvars in
-- reifying class and type instances. See #8953 and th/T8953.
annotThType :: Bool -- True <=> annotate
-> TypeRep.Type -> TH.Type -> TcM TH.Type
-- tiny optimization: if the type is annotated, don't annotate again.
annotThType _ _ th_ty@(TH.SigT {}) = return th_ty
annotThType True ty th_ty
| not $ isEmptyVarSet $ tyVarsOfType ty
= do { let ki = typeKind ty
; th_ki <- reifyKind ki
; return (TH.SigT th_ty th_ki) }
annotThType _ _ th_ty = return th_ty
-- | For every *type* variable (not *kind* variable) in the input,
-- report whether or not the tv is poly-kinded. This is used to eventually
-- feed into 'annotThType'.
mkIsPolyTvs :: [TyVar] -> [Bool]
mkIsPolyTvs tvs = [ is_poly_tv tv | tv <- tvs
, not (isKindVar tv) ]
where
is_poly_tv tv = not $ isEmptyVarSet $ tyVarsOfType $ tyVarKind tv
------------------------------
reifyClassInstances :: Class -> [ClsInst] -> TcM [TH.Dec]
reifyClassInstances cls insts
= mapM (reifyClassInstance (mkIsPolyTvs tvs)) insts
where
tvs = classTyVars cls
reifyClassInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> ClsInst -> TcM TH.Dec
reifyClassInstance is_poly_tvs i
= do { cxt <- reifyCxt (drop n_silent theta)
; let types_only = filterOut isKind types
; thtypes <- reifyTypes types_only
; annot_thtypes <- zipWith3M annotThType is_poly_tvs types_only thtypes
; let head_ty = mkThAppTs (TH.ConT (reifyName cls)) annot_thtypes
; return $ (TH.InstanceD cxt head_ty []) }
where
(_tvs, theta, cls, types) = tcSplitDFunTy (idType dfun)
dfun = instanceDFunId i
n_silent = dfunNSilent dfun
------------------------------
reifyFamilyInstances :: TyCon -> [FamInst] -> TcM [TH.Dec]
reifyFamilyInstances fam_tc fam_insts
= mapM (reifyFamilyInstance (mkIsPolyTvs fam_tvs)) fam_insts
where
fam_tvs = tyConTyVars fam_tc
reifyFamilyInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> FamInst -> TcM TH.Dec
reifyFamilyInstance is_poly_tvs (FamInst { fi_flavor = flavor
, fi_fam = fam
, fi_tys = lhs
, fi_rhs = rhs })
= case flavor of
SynFamilyInst ->
-- remove kind patterns (#8884)
do { let lhs_types_only = filterOut isKind lhs
; th_lhs <- reifyTypes lhs_types_only
; annot_th_lhs <- zipWith3M annotThType is_poly_tvs lhs_types_only
th_lhs
; th_rhs <- reifyType rhs
; return (TH.TySynInstD (reifyName fam)
(TH.TySynEqn annot_th_lhs th_rhs)) }
DataFamilyInst rep_tc ->
do { let tvs = tyConTyVars rep_tc
fam' = reifyName fam
-- eta-expand lhs types, because sometimes data/newtype
-- instances are eta-reduced; See Trac #9692
-- See Note [Eta reduction for data family axioms]
-- in TcInstDcls
(_rep_tc, rep_tc_args) = splitTyConApp rhs
etad_tyvars = dropList rep_tc_args tvs
eta_expanded_lhs = lhs `chkAppend` mkTyVarTys etad_tyvars
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons rep_tc)
; let types_only = filterOut isKind eta_expanded_lhs
; th_tys <- reifyTypes types_only
; annot_th_tys <- zipWith3M annotThType is_poly_tvs types_only th_tys
; return (if isNewTyCon rep_tc
then TH.NewtypeInstD [] fam' annot_th_tys (head cons) []
else TH.DataInstD [] fam' annot_th_tys cons []) }
------------------------------
reifyType :: TypeRep.Type -> TcM TH.Type
-- Monadic only because of failure
reifyType ty@(ForAllTy _ _) = reify_for_all ty
reifyType (LitTy t) = do { r <- reifyTyLit t; return (TH.LitT r) }
reifyType (TyVarTy tv) = return (TH.VarT (reifyName tv))
reifyType (TyConApp tc tys) = reify_tc_app tc tys -- Do not expand type synonyms here
reifyType (AppTy t1 t2) = do { [r1,r2] <- reifyTypes [t1,t2] ; return (r1 `TH.AppT` r2) }
reifyType ty@(FunTy t1 t2)
| isPredTy t1 = reify_for_all ty -- Types like ((?x::Int) => Char -> Char)
| otherwise = do { [r1,r2] <- reifyTypes [t1,t2] ; return (TH.ArrowT `TH.AppT` r1 `TH.AppT` r2) }
reify_for_all :: TypeRep.Type -> TcM TH.Type
reify_for_all ty
= do { cxt' <- reifyCxt cxt;
; tau' <- reifyType tau
; tvs' <- reifyTyVars tvs
; return (TH.ForallT tvs' cxt' tau') }
where
(tvs, cxt, tau) = tcSplitSigmaTy ty
reifyTyLit :: TypeRep.TyLit -> TcM TH.TyLit
reifyTyLit (NumTyLit n) = return (TH.NumTyLit n)
reifyTyLit (StrTyLit s) = return (TH.StrTyLit (unpackFS s))
reifyTypes :: [Type] -> TcM [TH.Type]
reifyTypes = mapM reifyType
reifyKind :: Kind -> TcM TH.Kind
reifyKind ki
= do { let (kis, ki') = splitKindFunTys ki
; ki'_rep <- reifyNonArrowKind ki'
; kis_rep <- mapM reifyKind kis
; return (foldr (TH.AppT . TH.AppT TH.ArrowT) ki'_rep kis_rep) }
where
reifyNonArrowKind k | isLiftedTypeKind k = return TH.StarT
| isConstraintKind k = return TH.ConstraintT
reifyNonArrowKind (TyVarTy v) = return (TH.VarT (reifyName v))
reifyNonArrowKind (ForAllTy _ k) = reifyKind k
reifyNonArrowKind (TyConApp kc kis) = reify_kc_app kc kis
reifyNonArrowKind (AppTy k1 k2) = do { k1' <- reifyKind k1
; k2' <- reifyKind k2
; return (TH.AppT k1' k2')
}
reifyNonArrowKind k = noTH (sLit "this kind") (ppr k)
reify_kc_app :: TyCon -> [TypeRep.Kind] -> TcM TH.Kind
reify_kc_app kc kis
= fmap (mkThAppTs r_kc) (mapM reifyKind kis)
where
r_kc | Just tc <- isPromotedTyCon_maybe kc
, isTupleTyCon tc = TH.TupleT (tyConArity kc)
| kc `hasKey` listTyConKey = TH.ListT
| otherwise = TH.ConT (reifyName kc)
reifyCxt :: [PredType] -> TcM [TH.Pred]
reifyCxt = mapM reifyPred
reifyFunDep :: ([TyVar], [TyVar]) -> TH.FunDep
reifyFunDep (xs, ys) = TH.FunDep (map reifyName xs) (map reifyName ys)
reifyFamFlavour :: TyCon -> TcM (Either TH.FamFlavour [TH.TySynEqn])
reifyFamFlavour tc
| isOpenTypeFamilyTyCon tc = return $ Left TH.TypeFam
| isDataFamilyTyCon tc = return $ Left TH.DataFam
-- this doesn't really handle abstract closed families, but let's not worry
-- about that now
| Just ax <- isClosedSynFamilyTyCon_maybe tc
= do { eqns <- brListMapM reifyAxBranch $ coAxiomBranches ax
; return $ Right eqns }
| otherwise
= panic "TcSplice.reifyFamFlavour: not a type family"
reifyTyVars :: [TyVar]
-> TcM [TH.TyVarBndr]
reifyTyVars tvs = mapM reify_tv $ filter isTypeVar tvs
where
-- even if the kind is *, we need to include a kind annotation,
-- in case a poly-kind would be inferred without the annotation.
-- See #8953 or test th/T8953
reify_tv tv = TH.KindedTV name <$> reifyKind kind
where
kind = tyVarKind tv
name = reifyName tv
{-
Note [Kind annotations on TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A poly-kinded tycon sometimes needs a kind annotation to be unambiguous.
For example:
type family F a :: k
type instance F Int = (Proxy :: * -> *)
type instance F Bool = (Proxy :: (* -> *) -> *)
It's hard to figure out where these annotations should appear, so we do this:
Suppose the tycon is applied to n arguments. We strip off the first n
arguments of the tycon's kind. If there are any variables left in the result
kind, we put on a kind annotation. But we must be slightly careful: it's
possible that the tycon's kind will have fewer than n arguments, in the case
that the concrete application instantiates a result kind variable with an
arrow kind. So, if we run out of arguments, we conservatively put on a kind
annotation anyway. This should be a rare case, indeed. Here is an example:
data T1 :: k1 -> k2 -> *
data T2 :: k1 -> k2 -> *
type family G (a :: k) :: k
type instance G T1 = T2
type instance F Char = (G T1 Bool :: (* -> *) -> *) -- F from above
Here G's kind is (forall k. k -> k), and the desugared RHS of that last
instance of F is (G (* -> (* -> *) -> *) (T1 * (* -> *)) Bool). According to
the algoritm above, there are 3 arguments to G so we should peel off 3
arguments in G's kind. But G's kind has only two arguments. This is the
rare special case, and we conservatively choose to put the annotation
in.
See #8953 and test th/T8953.
-}
reify_tc_app :: TyCon -> [TypeRep.Type] -> TcM TH.Type
reify_tc_app tc tys
= do { tys' <- reifyTypes (removeKinds tc_kind tys)
; maybe_sig_t (mkThAppTs r_tc tys') }
where
arity = tyConArity tc
tc_kind = tyConKind tc
r_tc | isTupleTyCon tc = if isPromotedDataCon tc
then TH.PromotedTupleT arity
else TH.TupleT arity
| tc `hasKey` listTyConKey = TH.ListT
| tc `hasKey` nilDataConKey = TH.PromotedNilT
| tc `hasKey` consDataConKey = TH.PromotedConsT
| tc `hasKey` eqTyConKey = TH.EqualityT
| otherwise = TH.ConT (reifyName tc)
-- See Note [Kind annotations on TyConApps]
maybe_sig_t th_type
| needs_kind_sig
= do { let full_kind = typeKind (mkTyConApp tc tys)
; th_full_kind <- reifyKind full_kind
; return (TH.SigT th_type th_full_kind) }
| otherwise
= return th_type
needs_kind_sig
| Just result_ki <- peel_off_n_args tc_kind (length tys)
= not $ isEmptyVarSet $ kiVarsOfKind result_ki
| otherwise
= True
peel_off_n_args :: Kind -> Arity -> Maybe Kind
peel_off_n_args k 0 = Just k
peel_off_n_args k n
| Just (_, res_k) <- splitForAllTy_maybe k
= peel_off_n_args res_k (n-1)
| Just (_, res_k) <- splitFunTy_maybe k
= peel_off_n_args res_k (n-1)
| otherwise
= Nothing
removeKinds :: Kind -> [TypeRep.Type] -> [TypeRep.Type]
removeKinds (FunTy k1 k2) (h:t)
| isSuperKind k1 = removeKinds k2 t
| otherwise = h : removeKinds k2 t
removeKinds (ForAllTy v k) (h:t)
| isSuperKind (varType v) = removeKinds k t
| otherwise = h : removeKinds k t
removeKinds _ tys = tys
reifyPred :: TypeRep.PredType -> TcM TH.Pred
reifyPred ty
-- We could reify the implicit paramter as a class but it seems
-- nicer to support them properly...
| isIPPred ty = noTH (sLit "implicit parameters") (ppr ty)
| otherwise = reifyType ty
------------------------------
reifyName :: NamedThing n => n -> TH.Name
reifyName thing
| isExternalName name = mk_varg pkg_str mod_str occ_str
| otherwise = TH.mkNameU occ_str (getKey (getUnique name))
-- Many of the things we reify have local bindings, and
-- NameL's aren't supposed to appear in binding positions, so
-- we use NameU. When/if we start to reify nested things, that
-- have free variables, we may need to generate NameL's for them.
where
name = getName thing
mod = ASSERT( isExternalName name ) nameModule name
pkg_str = packageKeyString (modulePackageKey mod)
mod_str = moduleNameString (moduleName mod)
occ_str = occNameString occ
occ = nameOccName name
mk_varg | OccName.isDataOcc occ = TH.mkNameG_d
| OccName.isVarOcc occ = TH.mkNameG_v
| OccName.isTcOcc occ = TH.mkNameG_tc
| otherwise = pprPanic "reifyName" (ppr name)
------------------------------
reifyFixity :: Name -> TcM TH.Fixity
reifyFixity name
= do { fix <- lookupFixityRn name
; return (conv_fix fix) }
where
conv_fix (BasicTypes.Fixity i d) = TH.Fixity i (conv_dir d)
conv_dir BasicTypes.InfixR = TH.InfixR
conv_dir BasicTypes.InfixL = TH.InfixL
conv_dir BasicTypes.InfixN = TH.InfixN
reifyStrict :: DataCon.HsSrcBang -> TH.Strict
reifyStrict HsNoBang = TH.NotStrict
reifyStrict (HsSrcBang _ _ False) = TH.NotStrict
reifyStrict (HsSrcBang _ (Just True) True) = TH.Unpacked
reifyStrict (HsSrcBang _ _ True) = TH.IsStrict
reifyStrict HsStrict = TH.IsStrict
reifyStrict (HsUnpack {}) = TH.Unpacked
------------------------------
lookupThAnnLookup :: TH.AnnLookup -> TcM CoreAnnTarget
lookupThAnnLookup (TH.AnnLookupName th_nm) = fmap NamedTarget (lookupThName th_nm)
lookupThAnnLookup (TH.AnnLookupModule (TH.Module pn mn))
= return $ ModuleTarget $
mkModule (stringToPackageKey $ TH.pkgString pn) (mkModuleName $ TH.modString mn)
reifyAnnotations :: Data a => TH.AnnLookup -> TcM [a]
reifyAnnotations th_name
= do { name <- lookupThAnnLookup th_name
; topEnv <- getTopEnv
; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing
; tcg <- getGblEnv
; let selectedEpsHptAnns = findAnns deserializeWithData epsHptAnns name
; let selectedTcgAnns = findAnns deserializeWithData (tcg_ann_env tcg) name
; return (selectedEpsHptAnns ++ selectedTcgAnns) }
------------------------------
modToTHMod :: Module -> TH.Module
modToTHMod m = TH.Module (TH.PkgName $ packageKeyString $ modulePackageKey m)
(TH.ModName $ moduleNameString $ moduleName m)
reifyModule :: TH.Module -> TcM TH.ModuleInfo
reifyModule (TH.Module (TH.PkgName pkgString) (TH.ModName mString)) = do
this_mod <- getModule
let reifMod = mkModule (stringToPackageKey pkgString) (mkModuleName mString)
if (reifMod == this_mod) then reifyThisModule else reifyFromIface reifMod
where
reifyThisModule = do
usages <- fmap (map modToTHMod . moduleEnvKeys . imp_mods) getImports
return $ TH.ModuleInfo usages
reifyFromIface reifMod = do
iface <- loadInterfaceForModule (ptext (sLit "reifying module from TH for") <+> ppr reifMod) reifMod
let usages = [modToTHMod m | usage <- mi_usages iface,
Just m <- [usageToModule (modulePackageKey reifMod) usage] ]
return $ TH.ModuleInfo usages
usageToModule :: PackageKey -> Usage -> Maybe Module
usageToModule _ (UsageFile {}) = Nothing
usageToModule this_pkg (UsageHomeModule { usg_mod_name = mn }) = Just $ mkModule this_pkg mn
usageToModule _ (UsagePackageModule { usg_mod = m }) = Just m
------------------------------
mkThAppTs :: TH.Type -> [TH.Type] -> TH.Type
mkThAppTs fun_ty arg_tys = foldl TH.AppT fun_ty arg_tys
noTH :: LitString -> SDoc -> TcM a
noTH s d = failWithTc (hsep [ptext (sLit "Can't represent") <+> ptext s <+>
ptext (sLit "in Template Haskell:"),
nest 2 d])
ppr_th :: TH.Ppr a => a -> SDoc
ppr_th x = text (TH.pprint x)
{-
Note [Reifying data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Template Haskell syntax is rich enough to express even GADTs,
provided we do so in the equality-predicate form. So a GADT
like
data T a where
MkT1 :: a -> T [a]
MkT2 :: T Int
will appear in TH syntax like this
data T a = forall b. (a ~ [b]) => MkT1 b
| (a ~ Int) => MkT2
-}
#endif /* GHCI */
| alexander-at-github/eta | compiler/ETA/TypeCheck/TcSplice.hs | bsd-3-clause | 71,684 | 0 | 10 | 21,621 | 893 | 544 | 349 | 32 | 1 |
module Problem28 where
getLayerCellCount :: Int -> Int
getLayerCellCount 1 = 1
getLayerCellCount n = 8 * (n - 1)
getFirstNum :: Int -> Int
getFirstNum 1 = 0
getFirstNum n = prevLayerLength ^ 2
where prevLayerLength = ((getLayerCellCount (n-1)) `div` 4) + 1
getCornerSum :: Int -> Int
getCornerSum 1 = 1
getCornerSum n = first * 4 + add * 10
where
first = getFirstNum n
add = (getLayerCellCount n) `div` 4
getDiagnoalSum :: Int -> Int
getDiagnoalSum dimension = sum $ map getCornerSum [1..n]
where n = (dimension `div` 2) + 1
main :: IO ()
main = putStrLn . show $ getDiagnoalSum 1001
| noraesae/euler | src/Problem28.hs | bsd-3-clause | 603 | 0 | 13 | 126 | 243 | 131 | 112 | 18 | 1 |
-- | The Term type.
module Data.BERT.Types
( Term(..)
) where
import Data.ByteString.Lazy (ByteString)
import Data.Time (UTCTime)
-- | A single BERT term.
data Term
-- Simple (erlang) terms:
= IntTerm Int
| FloatTerm Float
| AtomTerm String
| TupleTerm [Term]
| BytelistTerm ByteString
| ListTerm [Term]
| BinaryTerm ByteString
| BigintTerm Integer
| BigbigintTerm Integer
-- Composite (BERT specific) terms:
| NilTerm
| BoolTerm Bool
| DictionaryTerm [(Term, Term)]
| TimeTerm UTCTime
| RegexTerm String [String]
deriving (Eq, Ord, Show, Read)
| feuerbach/bert | src/Data/BERT/Types.hs | bsd-3-clause | 644 | 0 | 8 | 181 | 152 | 94 | 58 | 20 | 0 |
{-# LANGUAGE TemplateHaskell, PatternGuards #-}
module HsImport.ImportSpec
( ImportSpec(..)
, hsImportSpec
) where
import qualified Language.Haskell.Exts as HS
import qualified HsImport.Args as Args
import HsImport.Args (HsImportArgs)
import HsImport.Parse (parseFile)
import HsImport.Symbol (Symbol(..))
import HsImport.Module
import Data.List (find)
data ImportSpec = ImportSpec
{ sourceFile :: FilePath
, parsedSrcFile :: HS.Module
, moduleToImport :: Module
, symbolToImport :: Maybe Symbol
, saveToFile :: Maybe FilePath
} deriving (Show)
type Error = String
hsImportSpec :: HsImportArgs -> IO (Either Error ImportSpec)
hsImportSpec args
| Just error <- checkArgs args = return $ Left error
| otherwise = do
result <- parseFile $ Args.inputSrcFile args
case result of
Right (HS.ParseOk hsModule) -> return $ Right $
ImportSpec { sourceFile = Args.inputSrcFile args
, parsedSrcFile = hsModule
, moduleToImport = module_
, symbolToImport = symbol
, saveToFile = saveToFile
}
Right (HS.ParseFailed srcLoc error) -> return $ Left (show srcLoc ++ error)
Left error -> return $ Left error
where
module_ = Module { moduleName = Args.moduleName args
, qualified = not . null $ Args.qualifiedName args
, as = find (/= "") [Args.qualifiedName args, Args.as args]
}
symbol =
case Args.symbolName args of
"" -> Nothing
name | Args.all args -> Just $ AllOfSymbol name
| ws@(_:_) <- Args.with args -> Just $ SomeOfSymbol name ws
| otherwise -> Just $ Symbol name
saveToFile =
case Args.outputSrcFile args of
"" -> Nothing
fp -> Just fp
checkArgs args
| null . Args.inputSrcFile $ args
= Just "Missing source file!"
| null . Args.moduleName $ args
= Just "Missing module name!"
| (not . null $ qualifiedName) && (not . null $ asName)
= Just "Invalid usage of options '--qualifiedname' and '--as' at once!"
| otherwise
= Nothing
where
qualifiedName = Args.qualifiedName args
asName = Args.as args
| jystic/hsimport | lib/HsImport/ImportSpec.hs | bsd-3-clause | 2,494 | 0 | 16 | 900 | 656 | 341 | 315 | 57 | 5 |
{-|
A quick and simple way to run Elerea networks with GLFW window and event handling.
e.g.
> runGLFWExt
> "Good Stuff!"
> (myCreateSignal :: GLFWSignal ())
> defaultConfig {
>
> postOpenInit = do
> clearColor $= Color4 0 0 0 0
> clearDepth $= 1
> depthFunc $= Just Less
> return ()
>
> resizeCallback = \size@(Size w h) -> do
> viewport $= (Position 0 0, size)
> matrixMode $= Projection
> loadIdentity
> perspective 45 (fromIntegral w / fromIntegral h) 0.1 100
> matrixMode $= Modelview 0
> }
-}
module FRP.Elerea.GLFW (
GLFWSignal,
-- * Configuring
GLFWConfig (..), Step (..), defaultConfig
-- * Running
, runGLFWExt, runGLFW
) where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLFW
import FRP.Elerea
import Control.Monad ( join, unless )
import Data.List ( delete )
import Data.IORef
import System.Exit
-- | The type of Elerea callbacks that receive initialization results and event
-- sources and create the network.
type GLFWSignal a =
a
-- ^ the 'postOpenInit' result
-> Signal [Key]
-- ^ keyboard state
-> (Signal Position, Signal [MouseButton], Signal Int)
-- ^ mouse position, button state and scroll-wheel value
-> Signal Size
-- ^ window size
-> SignalMonad (Signal (IO ()))
data Step = Sliding -- ^ feed delta time to the network, once each iteration
| Fixed DTime -- ^ increment the net in fixed steps
-- | Extra knobs
data GLFWConfig a =
GAC { winSize :: Maybe Size
-- ^ window size, 'FullScreen' if Nothing
, displayBits :: [DisplayBits]
-- ^ ... for 'openWindow'
, preOpenInit :: IO ()
-- ^ initialization to perform /before/ opening the window, after 'initialize'
, postOpenInit :: IO a
-- ^ initialization to perform /after/ opening the window, hence, with full OpenGL
-- context (typically, the usual initGL-type stuff + rendering list construction)
, extraCleanup :: IO ()
-- ^ called just before 'terminate'
, resizeCallback :: WindowSizeCallback
-- ^ the raw 'WindowSizeCallback'
, timeStep :: Step
-- ^ timing regimen
}
-- | Default 'winSize'.
-- (8,8,8) rbg bits \/ 8 alpha bits \/ 24 depth bits for 'displayBits'.
-- 'preOpenInit', 'postOpenInit', 'resizeCallback' and 'extraCleanup' do nothing.
-- 'Sliding' timing.
defaultConfig :: GLFWConfig ()
defaultConfig =
GAC { winSize = Just (Size 0 0)
, displayBits = [ DisplayRGBBits 8 8 8
, DisplayAlphaBits 8
, DisplayDepthBits 24 ]
, preOpenInit = return ()
, postOpenInit = return ()
, resizeCallback = \_ -> return ()
, extraCleanup = return ()
, timeStep = Sliding
}
-- | Just go with the default config.
runGLFW :: String -> GLFWSignal () -> IO ()
runGLFW t s = runGLFWExt t s defaultConfig
-- | Initialize GLFW, open a window, hook up external signal sources to GLFW keyboard/mouse,
-- maybe perform custom inits, create the signal and /spin it in a tight loop/.
-- Terminates on ESC or when the window is closed.
runGLFWExt :: String -> GLFWSignal a -> GLFWConfig a -> IO ()
runGLFWExt title sgn cfg = do
initialize
preOpenInit cfg
let (sz, fs) = maybe (Size 0 0, FullScreen)
(\s -> (s, Window))
(winSize cfg)
openWindow sz (displayBits cfg) fs
windowTitle $= title
windowCloseCallback $= ( endit >> exitSuccess )
a <- postOpenInit cfg
(keys, keySink) <- external []
keyMon <- mkPressReleaseMonitor
keyCallback $= keyMon keySink
(mbutt, mbuttSink) <- external []
mbMon <- mkPressReleaseMonitor
mouseButtonCallback $= mbMon mbuttSink
(mouse, mouseSink) <- external (Position 0 0)
(mwheel, mwheelSink) <- external 0
let mousePosSnapshot = get mousePos >>= mouseSink
mouseWheelSnapshot = get mouseWheel >>= mwheelSink
(winSize, winSizeSink) <- external (Size 0 0)
windowSizeCallback $= \s -> winSizeSink s >> resizeCallback cfg s
net <- createSignal $
sgn a keys (mouse, mbutt, mwheel) winSize
drive net (timeStep cfg) (mousePosSnapshot >> mouseWheelSnapshot)
endit
where
endit = closeWindow >> extraCleanup cfg >> terminate
drive :: Signal (IO ()) -> Step -> IO () -> IO ()
drive sgn tstep preInit = do
k <- getKey ESC
unless (k == Press) $ do
preInit
t <- get time
time $= 0
join $ case tstep of
Sliding -> superstep sgn t
Fixed d -> do
(act, t') <- stepper t (return ())
get time >>= (time $=) . (+ t')
return act
drive sgn tstep preInit
where
stepper timeAcc act | timeAcc >= dt = superstep sgn dt >>= stepper (timeAcc - dt)
| otherwise = return (act, timeAcc)
Fixed dt = tstep
mkPressReleaseMonitor :: (Eq a) => IO (([a] -> IO ()) -> a -> KeyButtonState -> IO ())
mkPressReleaseMonitor = do
accum <- newIORef []
return $ \sink key state -> do
prev <- readIORef accum
let new = case state of
Press -> key : prev
Release -> key `delete` prev
writeIORef accum new
sink new
| pqwy/haskell-glfw-elerea | FRP/Elerea/GLFW.hs | bsd-3-clause | 5,540 | 0 | 20 | 1,764 | 1,236 | 638 | 598 | 95 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Module : $Header$
Copyright : (c) 2016 Deakin Software & Technology Innovation Lab
License : BSD3
Maintainer : Rhys Adams <rhys.adams@deakin.edu.au>
Stability : unstable
Portability : portable
-}
module Eclogues.State.Types where
import Eclogues.Prelude
import qualified Eclogues.Job as Job
import Control.Lens.TH (makeClassy, makePrisms)
import Data.Default.Generics (Default)
-- TODO: test performance. it might be better to use stm-containers.
type Nodes = HashMap Job.Name Job.AnyStatus
type Jobs = HashMap Job.Name Job.Status
-- Dependencies is misleading, they're dependents in this case
type RevDeps = HashMap Job.Name Job.Dependencies
type Containers = HashMap Job.ContainerId UUID
data AppState = AppState { -- | Map of jobs names to status.
_nodes :: Nodes
-- | Map of job names to jobs that depend on them
-- and have yet to terminate.
, _revDeps :: RevDeps
, _containers :: Containers }
deriving (Generic, Show, Eq)
$(makePrisms ''Job.Dependencies)
$(makePrisms ''Job.AnyStatus)
$(makePrisms ''Job.Sealed)
$(makeClassy ''AppState)
job :: (HasAppState s) => Job.Name -> Traversal' s Job.Status
job n = nodes . ix n . _AJob
jobs :: (HasAppState s) => Traversal' s Job.Status
jobs = nodes . each . _AJob
instance Default AppState
| rimmington/eclogues | eclogues-impl/app/api/Eclogues/State/Types.hs | bsd-3-clause | 1,555 | 0 | 9 | 378 | 296 | 163 | 133 | 27 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, GADTs, MultiParamTypeClasses #-}
module Data.Pattern
( PatternF(..)
, Pattern(..)
, commaSep1
, commaSep
, sep1
, sep
, oneOf
, cat
, char
, token
, category
, match
, delta
, ret
, label
, string
, anyToken
, compactF
, isTerminal
) where
import Control.Applicative
import Data.Bifunctor (first)
import Data.Char
import Data.Higher.Foldable
import Data.Higher.Functor
import Data.Higher.Functor.Eq
import Data.Higher.Functor.Fix
import Data.Higher.Functor.Recursive
import Data.Higher.Functor.Show
import Data.Higher.Graph
import qualified Data.Monoid as Monoid
import Data.Monoid hiding (Alt)
import Data.Predicate
-- Types
data PatternF t f a where
Cat :: f a -> f b -> PatternF t f (a, b)
Alt :: f a -> f a -> PatternF t f a
Rep :: f a -> PatternF t f [a]
Map :: (a -> b) -> f a -> PatternF t f b
Bnd :: f a -> (a -> f b) -> PatternF t f b
Sat :: Predicate t -> PatternF t f t
Mat :: (t -> Maybe u) -> PatternF t f u
Ret :: [a] -> PatternF t f a
Nul :: PatternF t f a
Lab :: f a -> String -> PatternF t f a
Del :: f a -> PatternF t f a
-- Smart constructors
commaSep1 :: (Alternative r, Pattern r Char) => r a -> r [a]
commaSep1 = sep1 (char ',')
commaSep :: (Alternative r, Pattern r Char) => r a -> r [a]
commaSep = sep (char ',')
sep1 :: Alternative r => r sep -> r a -> r [a]
sep1 s p = (:) <$> p <*> many (s *> p)
sep :: Alternative r => r sep -> r a -> r [a]
sep s p = s `sep1` p <|> pure []
oneOf :: (Foldable t, Alternative f) => t (f a) -> f a
oneOf = getAlt . foldMap Monoid.Alt
infixl 4 `cat`
cat :: Pattern r t => r a -> r b -> r (a, b)
cat a = hembed . Cat a
char :: Pattern r Char => Char -> r Char
char = token
token :: (Eq t, Pattern r t) => t -> r t
token = hembed . Sat . Equal
category :: Pattern r Char => GeneralCategory -> r Char
category = hembed . Sat . Category
match :: Pattern r t => (t -> Maybe u) -> r u
match = hembed . Mat
delta :: Pattern r t => r a -> r a
delta = hembed . Del
ret :: Pattern r t => [a] -> r a
ret = hembed . Ret
infixr 2 `label`
label :: Pattern r t => r a -> String -> r a
label p = hembed . Lab p
string :: (Applicative r, Pattern r Char) => String -> r String
string string = sequenceA (hembed . Sat . Equal <$> string)
anyToken :: Pattern r t => r t
anyToken = hembed (Sat (Constant True))
-- API
compactF :: Pattern r t => PatternF t r a -> PatternF t r a
compactF p = case p of
Cat a _ | Just Nul <- pattern a -> Nul
Cat _ b | Just Nul <- pattern b -> Nul
Cat l r | Just (Ret [t]) <- pattern l, Just b <- pattern r -> ((,) t) <$> b
Cat l r | Just a <- pattern l, Just (Ret [t]) <- pattern r -> flip (,) t <$> a
Cat l c | Just (Cat a b) <- pattern l -> (\ (a, (b, c)) -> ((a, b), c)) <$> Cat a (hembed (Cat b c))
Cat l b | Just (Map f a) <- pattern l -> first f <$> Cat a b
Alt a r | Just Nul <- pattern a, Just b <- pattern r -> b
Alt a b | Just p <- pattern a, Just Nul <- pattern b -> p
Alt l r | Just (Ret a) <- pattern l, Just (Ret b) <- pattern r -> Ret (a <> b)
Rep a | Just Nul <- pattern a -> Ret [[]]
Map f p | Just (Ret as) <- pattern p -> Ret (f <$> as)
Map g a | Just (Map f p) <- pattern a -> Map (g . f) p
Map _ p | Just Nul <- pattern p -> Nul
Lab p _ | Just Nul <- pattern p -> Nul
Lab p _ | Just (Ret t) <- pattern p -> Ret t
Lab a _ | Just (Del p) <- pattern a -> Del p
Del p | Just Nul <- pattern p -> Nul
Del a | Just (Del p) <- pattern a -> Del p
Del p | Just (Ret a) <- pattern p -> Ret a
Del a | Just p <- pattern a, isTerminal p -> Nul
a -> a
isTerminal :: PatternF t f a -> Bool
isTerminal p = case p of
Cat _ _ -> False
Alt _ _ -> False
Rep _ -> False
Map _ _ -> False
Bnd _ _ -> False
Lab _ _ -> False
_ -> True
-- Classes
class (HCorecursive r, Base r ~ PatternF t) => Pattern r t
where pattern :: r a -> Maybe (Base r r a)
-- Instances
instance HFunctor (PatternF t) where
hfmap f p = case p of
Cat a b -> Cat (f a) (f b)
Alt a b -> Alt (f a) (f b)
Rep a -> Rep (f a)
Map g p -> Map g (f p)
Bnd p g -> Bnd (f p) (f . g)
Sat p -> Sat p
Mat p -> Mat p
Ret as -> Ret as
Nul -> Nul
Lab p s -> Lab (f p) s
Del a -> Del (f a)
instance (Alternative a, Monad a) => HFoldable (PatternF t) a where
hfoldMap f p = case p of
Cat a b -> f ((,) <$> a <*> b)
Alt a b -> f (a <|> b)
Map g p -> f (g <$> p)
Bnd p g -> f (p >>= g)
Lab p _ -> f p
Del a -> f a
_ -> empty
instance HEqF (PatternF t)
where heqF eq a b = case (a, b) of
(Cat a1 b1, Cat a2 b2) -> eq a1 a2 && eq b1 b2
(Alt a1 b1, Alt a2 b2) -> eq a1 a2 && eq b1 b2
-- (Map f1 p1, Map f2 p2) -> eq p1 p2
-- (Bnd p1 f1, Bnd p2 f2) -> eq p1 p2
(Sat p1, Sat p2) -> p1 == p2
(Ret r1, Ret r2) -> length r1 == length r2
(Nul, Nul) -> True
(Lab p1 s1, Lab p2 s2) -> s1 == s2 && eq p1 p2
_ -> False
instance Show t => HShowF (PatternF t)
where hshowsPrecF showsPrec n p = case p of
Cat a b -> showParen (n > 4) $ showsPrec 4 a . showString " `cat` " . showsPrec 5 b
Alt a b -> showParen (n > 3) $ showsPrec 3 a . showString " <|> " . showsPrec 4 b
Rep a -> showParen (n >= 10) $ showString "many " . showsPrec 10 a
Map _ p -> showParen (n > 4) $ showString "f <$> " . showsPrec 5 p
Bnd p _ -> showParen (n > 1) $ showsPrec 1 p . showString " >>= f"
Sat (Equal c) -> showParen (n >= 10) $ showString "char " . shows c
Sat (Category c) -> showParen (n >= 10) $ showString "category " . shows c
Sat (Constant _) -> showString "anyToken"
Mat _ -> showParen (n >= 10) $ showString "match f"
Ret [_] -> showParen (n >= 10) $ showString "pure t"
Ret t -> showString "ret [" . showIndices (length t) . showString "]"
Nul -> showString "empty"
Lab p s -> showParen (n > 2) $ showsPrec 3 p . showString " `label` " . shows s
Del a -> showParen (n >= 10) $ showString "delta " . showsPrec 10 a
where showIndices n = foldr (.) id ((showChar 't' .) . shows <$> take n (iterate succ (0 :: Integer)))
instance Pattern r t => Functor (PatternF t r)
where fmap f p = compactF (Map f (hembed p))
instance Functor (Rec (PatternF t) v)
where fmap = (hembed .) . Map
instance Applicative (Rec (PatternF t) v)
where pure = hembed . Ret . pure
(<*>) = ((fmap (uncurry ($)) . hembed) .) . Cat
instance Alternative (Rec (PatternF t) v)
where empty = hembed Nul
(<|>) = (hembed .) . Alt
some v = (:) <$> v <*> many v
many = hembed . Rep
instance Monad (Rec (PatternF t) v)
where return = pure
(>>=) = (hembed .) . Bnd
instance Functor (Graph (PatternF t))
where fmap f (Graph rec) = Graph (f <$> rec)
instance Applicative (Graph (PatternF t))
where pure a = Graph (pure a)
Graph f <*> Graph a = Graph (f <*> a)
instance Alternative (Graph (PatternF t))
where empty = Graph empty
Graph a <|> Graph b = Graph (a <|> b)
some (Graph p) = Graph (some p)
many (Graph p) = Graph (many p)
instance Monad (Graph (PatternF t))
where return = pure
Graph p >>= f = Graph (p >>= unGraph . f)
instance Functor (Fix (PatternF t))
where fmap = (hembed .) . Map
instance Applicative (Fix (PatternF t))
where pure = hembed . Ret . pure
(<*>) = (((fmap (uncurry ($))) . hembed) .) . Cat
instance Alternative (Fix (PatternF t))
where empty = hembed Nul
(<|>) = (hembed .) . Alt
some v = (:) <$> v <*> many v
many = hembed . Rep
instance Monad (Fix (PatternF t))
where return = pure
(>>=) = (hembed .) . Bnd
instance HCorecursive (Rec (PatternF t) v)
where hembed = liftRec compactF . wrap
instance HCorecursive (Fix (PatternF t))
where hembed = Fix . compactF
instance Pattern (Rec (PatternF t) v) t
where pattern (Rec (In r)) = Just r
pattern _ = Nothing
instance Pattern (Fix (PatternF t)) t
where pattern = Just . unFix
| robrix/derivative-parsing | src/Data/Pattern.hs | bsd-3-clause | 8,064 | 0 | 15 | 2,325 | 4,236 | 2,080 | 2,156 | 210 | 21 |
{-
(c) The University of Glasgow 2011
The deriving code for the Generic class
(equivalent to the code in TcGenDeriv, for other classes)
-}
{-# LANGUAGE CPP, ScopedTypeVariables, TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
module TcGenGenerics (canDoGenerics, canDoGenerics1,
GenericKind(..),
gen_Generic_binds, get_gen1_constrained_tys) where
import HsSyn
import Type
import TcType
import TcGenDeriv
import DataCon
import TyCon
import FamInstEnv ( FamInst, FamFlavor(..), mkSingleCoAxiom )
import FamInst
import Module ( Module, moduleName, moduleNameFS
, moduleUnitId, unitIdFS )
import IfaceEnv ( newGlobalBinder )
import Name hiding ( varName )
import RdrName
import BasicTypes
import TysPrim
import TysWiredIn
import PrelNames
import TcEnv
import TcRnMonad
import HscTypes
import ErrUtils( Validity(..), andValid )
import SrcLoc
import Bag
import VarEnv
import VarSet (elemVarSet)
import Outputable
import FastString
import Util
import Control.Monad (mplus)
import Data.List (zip4, partition)
import Data.Maybe (isJust)
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{Bindings for the new generic deriving mechanism}
* *
************************************************************************
For the generic representation we need to generate:
\begin{itemize}
\item A Generic instance
\item A Rep type instance
\item Many auxiliary datatypes and instances for them (for the meta-information)
\end{itemize}
-}
gen_Generic_binds :: GenericKind -> TyCon -> [Type] -> Module
-> TcM (LHsBinds RdrName, FamInst)
gen_Generic_binds gk tc inst_tys mod = do
repTyInsts <- tc_mkRepFamInsts gk tc inst_tys mod
return (mkBindsRep gk tc, repTyInsts)
{-
************************************************************************
* *
\subsection{Generating representation types}
* *
************************************************************************
-}
get_gen1_constrained_tys :: TyVar -> Type -> [Type]
-- called by TcDeriv.inferConstraints; generates a list of types, each of which
-- must be a Functor in order for the Generic1 instance to work.
get_gen1_constrained_tys argVar
= argTyFold argVar $ ArgTyAlg { ata_rec0 = const []
, ata_par1 = [], ata_rec1 = const []
, ata_comp = (:) }
{-
Note [Requirements for deriving Generic and Rep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the following, T, Tfun, and Targ are "meta-variables" ranging over type
expressions.
(Generic T) and (Rep T) are derivable for some type expression T if the
following constraints are satisfied.
(a) D is a type constructor *value*. In other words, D is either a type
constructor or it is equivalent to the head of a data family instance (up to
alpha-renaming).
(b) D cannot have a "stupid context".
(c) The right-hand side of D cannot include existential types, universally
quantified types, or "exotic" unlifted types. An exotic unlifted type
is one which is not listed in the definition of allowedUnliftedTy
(i.e., one for which we have no representation type).
See Note [Generics and unlifted types]
(d) T :: *.
(Generic1 T) and (Rep1 T) are derivable for some type expression T if the
following constraints are satisfied.
(a),(b),(c) As above.
(d) T must expect arguments, and its last parameter must have kind *.
We use `a' to denote the parameter of D that corresponds to the last
parameter of T.
(e) For any type-level application (Tfun Targ) in the right-hand side of D
where the head of Tfun is not a tuple constructor:
(b1) `a' must not occur in Tfun.
(b2) If `a' occurs in Targ, then Tfun :: * -> *.
-}
canDoGenerics :: TyCon -> Validity
-- canDoGenerics determines if Generic/Rep can be derived.
--
-- Check (a) from Note [Requirements for deriving Generic and Rep] is taken
-- care of because canDoGenerics is applied to rep tycons.
--
-- It returns Nothing if deriving is possible. It returns (Just reason) if not.
canDoGenerics tc
= mergeErrors (
-- Check (b) from Note [Requirements for deriving Generic and Rep].
(if (not (null (tyConStupidTheta tc)))
then (NotValid (tc_name <+> text "must not have a datatype context"))
else IsValid)
-- See comment below
: (map bad_con (tyConDataCons tc)))
where
-- The tc can be a representation tycon. When we want to display it to the
-- user (in an error message) we should print its parent
tc_name = ppr $ case tyConFamInst_maybe tc of
Just (ptc, _) -> ptc
_ -> tc
-- Check (c) from Note [Requirements for deriving Generic and Rep].
--
-- If any of the constructors has an exotic unlifted type as argument,
-- then we can't build the embedding-projection pair, because
-- it relies on instantiating *polymorphic* sum and product types
-- at the argument types of the constructors
bad_con dc = if (any bad_arg_type (dataConOrigArgTys dc))
then (NotValid (ppr dc <+> text
"must not have exotic unlifted or polymorphic arguments"))
else (if (not (isVanillaDataCon dc))
then (NotValid (ppr dc <+> text "must be a vanilla data constructor"))
else IsValid)
-- Nor can we do the job if it's an existential data constructor,
-- Nor if the args are polymorphic types (I don't think)
bad_arg_type ty = (isUnliftedType ty && not (allowedUnliftedTy ty))
|| not (isTauTy ty)
-- Returns True the Type argument is an unlifted type which has a
-- corresponding generic representation type. For example,
-- (allowedUnliftedTy Int#) would return True since there is the UInt
-- representation type.
allowedUnliftedTy :: Type -> Bool
allowedUnliftedTy = isJust . unboxedRepRDRs
mergeErrors :: [Validity] -> Validity
mergeErrors [] = IsValid
mergeErrors (NotValid s:t) = case mergeErrors t of
IsValid -> NotValid s
NotValid s' -> NotValid (s <> text ", and" $$ s')
mergeErrors (IsValid : t) = mergeErrors t
-- A datatype used only inside of canDoGenerics1. It's the result of analysing
-- a type term.
data Check_for_CanDoGenerics1 = CCDG1
{ _ccdg1_hasParam :: Bool -- does the parameter of interest occurs in
-- this type?
, _ccdg1_errors :: Validity -- errors generated by this type
}
{-
Note [degenerate use of FFoldType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We use foldDataConArgs here only for its ability to treat tuples
specially. foldDataConArgs also tracks covariance (though it assumes all
higher-order type parameters are covariant) and has hooks for special handling
of functions and polytypes, but we do *not* use those.
The key issue is that Generic1 deriving currently offers no sophisticated
support for functions. For example, we cannot handle
data F a = F ((a -> Int) -> Int)
even though a is occurring covariantly.
In fact, our rule is harsh: a is simply not allowed to occur within the first
argument of (->). We treat (->) the same as any other non-tuple tycon.
Unfortunately, this means we have to track "the parameter occurs in this type"
explicitly, even though foldDataConArgs is also doing this internally.
-}
-- canDoGenerics1 determines if a Generic1/Rep1 can be derived.
--
-- Checks (a) through (c) from Note [Requirements for deriving Generic and Rep]
-- are taken care of by the call to canDoGenerics.
--
-- It returns Nothing if deriving is possible. It returns (Just reason) if not.
canDoGenerics1 :: TyCon -> Validity
canDoGenerics1 rep_tc =
canDoGenerics rep_tc `andValid` additionalChecks
where
additionalChecks
-- check (d) from Note [Requirements for deriving Generic and Rep]
| null (tyConTyVars rep_tc) = NotValid $
text "Data type" <+> quotes (ppr rep_tc)
<+> text "must have some type parameters"
| otherwise = mergeErrors $ concatMap check_con data_cons
data_cons = tyConDataCons rep_tc
check_con con = case check_vanilla con of
j@(NotValid {}) -> [j]
IsValid -> _ccdg1_errors `map` foldDataConArgs (ft_check con) con
bad :: DataCon -> SDoc -> SDoc
bad con msg = text "Constructor" <+> quotes (ppr con) <+> msg
check_vanilla :: DataCon -> Validity
check_vanilla con | isVanillaDataCon con = IsValid
| otherwise = NotValid (bad con existential)
bmzero = CCDG1 False IsValid
bmbad con s = CCDG1 True $ NotValid $ bad con s
bmplus (CCDG1 b1 m1) (CCDG1 b2 m2) = CCDG1 (b1 || b2) (m1 `andValid` m2)
-- check (e) from Note [Requirements for deriving Generic and Rep]
-- See also Note [degenerate use of FFoldType]
ft_check :: DataCon -> FFoldType Check_for_CanDoGenerics1
ft_check con = FT
{ ft_triv = bmzero
, ft_var = caseVar, ft_co_var = caseVar
-- (component_0,component_1,...,component_n)
, ft_tup = \_ components -> if any _ccdg1_hasParam (init components)
then bmbad con wrong_arg
else foldr bmplus bmzero components
-- (dom -> rng), where the head of ty is not a tuple tycon
, ft_fun = \dom rng -> -- cf #8516
if _ccdg1_hasParam dom
then bmbad con wrong_arg
else bmplus dom rng
-- (ty arg), where head of ty is neither (->) nor a tuple constructor and
-- the parameter of interest does not occur in ty
, ft_ty_app = \_ arg -> arg
, ft_bad_app = bmbad con wrong_arg
, ft_forall = \_ body -> body -- polytypes are handled elsewhere
}
where
caseVar = CCDG1 True IsValid
existential = text "must not have existential arguments"
wrong_arg = text "applies a type to an argument involving the last parameter"
$$ text "but the applied type is not of kind * -> *"
{-
************************************************************************
* *
\subsection{Generating the RHS of a generic default method}
* *
************************************************************************
-}
type US = Int -- Local unique supply, just a plain Int
type Alt = (LPat RdrName, LHsExpr RdrName)
-- GenericKind serves to mark if a datatype derives Generic (Gen0) or
-- Generic1 (Gen1).
data GenericKind = Gen0 | Gen1
-- as above, but with a payload of the TyCon's name for "the" parameter
data GenericKind_ = Gen0_ | Gen1_ TyVar
-- as above, but using a single datacon's name for "the" parameter
data GenericKind_DC = Gen0_DC | Gen1_DC TyVar
forgetArgVar :: GenericKind_DC -> GenericKind
forgetArgVar Gen0_DC = Gen0
forgetArgVar Gen1_DC{} = Gen1
-- When working only within a single datacon, "the" parameter's name should
-- match that datacon's name for it.
gk2gkDC :: GenericKind_ -> DataCon -> GenericKind_DC
gk2gkDC Gen0_ _ = Gen0_DC
gk2gkDC Gen1_{} d = Gen1_DC $ last $ dataConUnivTyVars d
-- Bindings for the Generic instance
mkBindsRep :: GenericKind -> TyCon -> LHsBinds RdrName
mkBindsRep gk tycon =
unitBag (mkRdrFunBind (L loc from01_RDR) from_matches)
`unionBags`
unitBag (mkRdrFunBind (L loc to01_RDR) to_matches)
where
from_matches = [mkSimpleHsAlt pat rhs | (pat,rhs) <- from_alts]
to_matches = [mkSimpleHsAlt pat rhs | (pat,rhs) <- to_alts ]
loc = srcLocSpan (getSrcLoc tycon)
datacons = tyConDataCons tycon
(from01_RDR, to01_RDR) = case gk of
Gen0 -> (from_RDR, to_RDR)
Gen1 -> (from1_RDR, to1_RDR)
-- Recurse over the sum first
from_alts, to_alts :: [Alt]
(from_alts, to_alts) = mkSum gk_ (1 :: US) tycon datacons
where gk_ = case gk of
Gen0 -> Gen0_
Gen1 -> ASSERT(length tyvars >= 1)
Gen1_ (last tyvars)
where tyvars = tyConTyVars tycon
--------------------------------------------------------------------------------
-- The type synonym instance and synonym
-- type instance Rep (D a b) = Rep_D a b
-- type Rep_D a b = ...representation type for D ...
--------------------------------------------------------------------------------
tc_mkRepFamInsts :: GenericKind -- Gen0 or Gen1
-> TyCon -- The type to generate representation for
-> [Type] -- The type(s) to which Generic(1) is applied
-- in the generated instance
-> Module -- Used as the location of the new RepTy
-> TcM (FamInst) -- Generated representation0 coercion
tc_mkRepFamInsts gk tycon inst_tys mod =
-- Consider the example input tycon `D`, where data D a b = D_ a
-- Also consider `R:DInt`, where { data family D x y :: * -> *
-- ; data instance D Int a b = D_ a }
do { -- `rep` = GHC.Generics.Rep or GHC.Generics.Rep1 (type family)
fam_tc <- case gk of
Gen0 -> tcLookupTyCon repTyConName
Gen1 -> tcLookupTyCon rep1TyConName
; fam_envs <- tcGetFamInstEnvs
; let -- If the derived instance is
-- instance Generic (Foo x)
-- then:
-- `arg_ki` = *, `inst_ty` = Foo x :: *
--
-- If the derived instance is
-- instance Generic1 (Bar x :: k -> *)
-- then:
-- `arg_k` = k, `inst_ty` = Bar x :: k -> *
(arg_ki, inst_ty) = case (gk, inst_tys) of
(Gen0, [inst_t]) -> (liftedTypeKind, inst_t)
(Gen1, [arg_k, inst_t]) -> (arg_k, inst_t)
_ -> pprPanic "tc_mkRepFamInsts" (ppr inst_tys)
; let mbFamInst = tyConFamInst_maybe tycon
-- If we're examining a data family instance, we grab the parent
-- TyCon (ptc) and use it to determine the type arguments
-- (inst_args) for the data family *instance*'s type variables.
ptc = maybe tycon fst mbFamInst
(_, inst_args, _) = tcLookupDataFamInst fam_envs ptc $ snd
$ tcSplitTyConApp inst_ty
; let -- `tyvars` = [a,b]
(tyvars, gk_) = case gk of
Gen0 -> (all_tyvars, Gen0_)
Gen1 -> ASSERT(not $ null all_tyvars)
(init all_tyvars, Gen1_ $ last all_tyvars)
where all_tyvars = tyConTyVars tycon
-- `repTy` = D1 ... (C1 ... (S1 ... (Rec0 a))) :: * -> *
; repTy <- tc_mkRepTy gk_ tycon arg_ki
-- `rep_name` is a name we generate for the synonym
; rep_name <- let mkGen = case gk of Gen0 -> mkGenR; Gen1 -> mkGen1R
in newGlobalBinder mod (mkGen (nameOccName (tyConName tycon)))
(nameSrcSpan (tyConName tycon))
-- We make sure to substitute the tyvars with their user-supplied
-- type arguments before generating the Rep/Rep1 instance, since some
-- of the tyvars might have been instantiated when deriving.
-- See Note [Generating a correctly typed Rep instance].
; let env = zipTyEnv tyvars inst_args
in_scope = mkInScopeSet (tyCoVarsOfTypes inst_tys)
subst = mkTvSubst in_scope env
repTy' = substTy subst repTy
tcv' = tyCoVarsOfTypeList inst_ty
(tv', cv') = partition isTyVar tcv'
tvs' = toposortTyVars tv'
cvs' = toposortTyVars cv'
axiom = mkSingleCoAxiom Nominal rep_name tvs' cvs'
fam_tc inst_tys repTy'
; newFamInst SynFamilyInst axiom }
--------------------------------------------------------------------------------
-- Type representation
--------------------------------------------------------------------------------
-- | See documentation of 'argTyFold'; that function uses the fields of this
-- type to interpret the structure of a type when that type is considered as an
-- argument to a constructor that is being represented with 'Rep1'.
data ArgTyAlg a = ArgTyAlg
{ ata_rec0 :: (Type -> a)
, ata_par1 :: a, ata_rec1 :: (Type -> a)
, ata_comp :: (Type -> a -> a)
}
-- | @argTyFold@ implements a generalised and safer variant of the @arg@
-- function from Figure 3 in <http://dreixel.net/research/pdf/gdmh.pdf>. @arg@
-- is conceptually equivalent to:
--
-- > arg t = case t of
-- > _ | isTyVar t -> if (t == argVar) then Par1 else Par0 t
-- > App f [t'] |
-- > representable1 f &&
-- > t' == argVar -> Rec1 f
-- > App f [t'] |
-- > representable1 f &&
-- > t' has tyvars -> f :.: (arg t')
-- > _ -> Rec0 t
--
-- where @argVar@ is the last type variable in the data type declaration we are
-- finding the representation for.
--
-- @argTyFold@ is more general than @arg@ because it uses 'ArgTyAlg' to
-- abstract out the concrete invocations of @Par0@, @Rec0@, @Par1@, @Rec1@, and
-- @:.:@.
--
-- @argTyFold@ is safer than @arg@ because @arg@ would lead to a GHC panic for
-- some data types. The problematic case is when @t@ is an application of a
-- non-representable type @f@ to @argVar@: @App f [argVar]@ is caught by the
-- @_@ pattern, and ends up represented as @Rec0 t@. This type occurs /free/ in
-- the RHS of the eventual @Rep1@ instance, which is therefore ill-formed. Some
-- representable1 checks have been relaxed, and others were moved to
-- @canDoGenerics1@.
argTyFold :: forall a. TyVar -> ArgTyAlg a -> Type -> a
argTyFold argVar (ArgTyAlg {ata_rec0 = mkRec0,
ata_par1 = mkPar1, ata_rec1 = mkRec1,
ata_comp = mkComp}) =
-- mkRec0 is the default; use it if there is no interesting structure
-- (e.g. occurrences of parameters or recursive occurrences)
\t -> maybe (mkRec0 t) id $ go t where
go :: Type -> -- type to fold through
Maybe a -- the result (e.g. representation type), unless it's trivial
go t = isParam `mplus` isApp where
isParam = do -- handles parameters
t' <- getTyVar_maybe t
Just $ if t' == argVar then mkPar1 -- moreover, it is "the" parameter
else mkRec0 t -- NB mkRec0 instead of the conventional mkPar0
isApp = do -- handles applications
(phi, beta) <- tcSplitAppTy_maybe t
let interesting = argVar `elemVarSet` exactTyCoVarsOfType beta
-- Does it have no interesting structure to represent?
if not interesting then Nothing
else -- Is the argument the parameter? Special case for mkRec1.
if Just argVar == getTyVar_maybe beta then Just $ mkRec1 phi
else mkComp phi `fmap` go beta -- It must be a composition.
tc_mkRepTy :: -- Gen0_ or Gen1_, for Rep or Rep1
GenericKind_
-- The type to generate representation for
-> TyCon
-- The kind of the representation type's argument
-- See Note [Handling kinds in a Rep instance]
-> Kind
-- Generated representation0 type
-> TcM Type
tc_mkRepTy gk_ tycon k =
do
d1 <- tcLookupTyCon d1TyConName
c1 <- tcLookupTyCon c1TyConName
s1 <- tcLookupTyCon s1TyConName
rec0 <- tcLookupTyCon rec0TyConName
rec1 <- tcLookupTyCon rec1TyConName
par1 <- tcLookupTyCon par1TyConName
u1 <- tcLookupTyCon u1TyConName
v1 <- tcLookupTyCon v1TyConName
plus <- tcLookupTyCon sumTyConName
times <- tcLookupTyCon prodTyConName
comp <- tcLookupTyCon compTyConName
uAddr <- tcLookupTyCon uAddrTyConName
uChar <- tcLookupTyCon uCharTyConName
uDouble <- tcLookupTyCon uDoubleTyConName
uFloat <- tcLookupTyCon uFloatTyConName
uInt <- tcLookupTyCon uIntTyConName
uWord <- tcLookupTyCon uWordTyConName
let tcLookupPromDataCon = fmap promoteDataCon . tcLookupDataCon
md <- tcLookupPromDataCon metaDataDataConName
mc <- tcLookupPromDataCon metaConsDataConName
ms <- tcLookupPromDataCon metaSelDataConName
pPrefix <- tcLookupPromDataCon prefixIDataConName
pInfix <- tcLookupPromDataCon infixIDataConName
pLA <- tcLookupPromDataCon leftAssociativeDataConName
pRA <- tcLookupPromDataCon rightAssociativeDataConName
pNA <- tcLookupPromDataCon notAssociativeDataConName
pSUpk <- tcLookupPromDataCon sourceUnpackDataConName
pSNUpk <- tcLookupPromDataCon sourceNoUnpackDataConName
pNSUpkness <- tcLookupPromDataCon noSourceUnpackednessDataConName
pSLzy <- tcLookupPromDataCon sourceLazyDataConName
pSStr <- tcLookupPromDataCon sourceStrictDataConName
pNSStrness <- tcLookupPromDataCon noSourceStrictnessDataConName
pDLzy <- tcLookupPromDataCon decidedLazyDataConName
pDStr <- tcLookupPromDataCon decidedStrictDataConName
pDUpk <- tcLookupPromDataCon decidedUnpackDataConName
fix_env <- getFixityEnv
let mkSum' a b = mkTyConApp plus [k,a,b]
mkProd a b = mkTyConApp times [k,a,b]
-- The second kind variable of (:.:) must always be *.
-- See Note [Handling kinds in a Rep instance]
mkComp a b = mkTyConApp comp [k,liftedTypeKind,a,b]
mkRec0 a = mkBoxTy uAddr uChar uDouble uFloat uInt uWord rec0 k a
mkRec1 a = mkTyConApp rec1 [k,a]
mkPar1 = mkTyConTy par1
mkD a = mkTyConApp d1 [ k, metaDataTy, sumP (tyConDataCons a) ]
mkC a = mkTyConApp c1 [ k
, metaConsTy a
, prod (dataConInstOrigArgTys a
. mkTyVarTys . tyConTyVars $ tycon)
(dataConSrcBangs a)
(dataConImplBangs a)
(dataConFieldLabels a)]
mkS mlbl su ss ib a = mkTyConApp s1 [k, metaSelTy mlbl su ss ib, a]
-- Sums and products are done in the same way for both Rep and Rep1
sumP [] = mkTyConApp v1 [k]
sumP l = foldBal mkSum' . map mkC $ l
-- The Bool is True if this constructor has labelled fields
prod :: [Type] -> [HsSrcBang] -> [HsImplBang] -> [FieldLabel] -> Type
prod [] _ _ _ = mkTyConApp u1 [k]
prod l sb ib fl = foldBal mkProd
[ ASSERT(null fl || length fl > j)
arg t sb' ib' (if null fl
then Nothing
else Just (fl !! j))
| (t,sb',ib',j) <- zip4 l sb ib [0..] ]
arg :: Type -> HsSrcBang -> HsImplBang -> Maybe FieldLabel -> Type
arg t (HsSrcBang _ su ss) ib fl = mkS fl su ss ib $ case gk_ of
-- Here we previously used Par0 if t was a type variable, but we
-- realized that we can't always guarantee that we are wrapping-up
-- all type variables in Par0. So we decided to stop using Par0
-- altogether, and use Rec0 all the time.
Gen0_ -> mkRec0 t
Gen1_ argVar -> argPar argVar t
where
-- Builds argument representation for Rep1 (more complicated due to
-- the presence of composition).
argPar argVar = argTyFold argVar $ ArgTyAlg
{ata_rec0 = mkRec0, ata_par1 = mkPar1,
ata_rec1 = mkRec1, ata_comp = mkComp}
tyConName_user = case tyConFamInst_maybe tycon of
Just (ptycon, _) -> tyConName ptycon
Nothing -> tyConName tycon
dtName = mkStrLitTy . occNameFS . nameOccName $ tyConName_user
mdName = mkStrLitTy . moduleNameFS . moduleName
. nameModule . tyConName $ tycon
pkgName = mkStrLitTy . unitIdFS . moduleUnitId
. nameModule . tyConName $ tycon
isNT = mkTyConTy $ if isNewTyCon tycon
then promotedTrueDataCon
else promotedFalseDataCon
ctName = mkStrLitTy . occNameFS . nameOccName . dataConName
ctFix c
| dataConIsInfix c
= case lookupFixity fix_env (dataConName c) of
Fixity _ n InfixL -> buildFix n pLA
Fixity _ n InfixR -> buildFix n pRA
Fixity _ n InfixN -> buildFix n pNA
| otherwise = mkTyConTy pPrefix
buildFix n assoc = mkTyConApp pInfix [ mkTyConTy assoc
, mkNumLitTy (fromIntegral n)]
isRec c = mkTyConTy $ if length (dataConFieldLabels c) > 0
then promotedTrueDataCon
else promotedFalseDataCon
selName = mkStrLitTy . flLabel
mbSel Nothing = mkTyConApp promotedNothingDataCon [typeSymbolKind]
mbSel (Just s) = mkTyConApp promotedJustDataCon
[typeSymbolKind, selName s]
metaDataTy = mkTyConApp md [dtName, mdName, pkgName, isNT]
metaConsTy c = mkTyConApp mc [ctName c, ctFix c, isRec c]
metaSelTy mlbl su ss ib =
mkTyConApp ms [mbSel mlbl, pSUpkness, pSStrness, pDStrness]
where
pSUpkness = mkTyConTy $ case su of
SrcUnpack -> pSUpk
SrcNoUnpack -> pSNUpk
NoSrcUnpack -> pNSUpkness
pSStrness = mkTyConTy $ case ss of
SrcLazy -> pSLzy
SrcStrict -> pSStr
NoSrcStrict -> pNSStrness
pDStrness = mkTyConTy $ case ib of
HsLazy -> pDLzy
HsStrict -> pDStr
HsUnpack{} -> pDUpk
return (mkD tycon)
-- Given the TyCons for each URec-related type synonym, check to see if the
-- given type is an unlifted type that generics understands. If so, return
-- its representation type. Otherwise, return Rec0.
-- See Note [Generics and unlifted types]
mkBoxTy :: TyCon -- UAddr
-> TyCon -- UChar
-> TyCon -- UDouble
-> TyCon -- UFloat
-> TyCon -- UInt
-> TyCon -- UWord
-> TyCon -- Rec0
-> Kind -- What to instantiate Rec0's kind variable with
-> Type
-> Type
mkBoxTy uAddr uChar uDouble uFloat uInt uWord rec0 k ty
| ty `eqType` addrPrimTy = mkTyConApp uAddr [k]
| ty `eqType` charPrimTy = mkTyConApp uChar [k]
| ty `eqType` doublePrimTy = mkTyConApp uDouble [k]
| ty `eqType` floatPrimTy = mkTyConApp uFloat [k]
| ty `eqType` intPrimTy = mkTyConApp uInt [k]
| ty `eqType` wordPrimTy = mkTyConApp uWord [k]
| otherwise = mkTyConApp rec0 [k,ty]
--------------------------------------------------------------------------------
-- Dealing with sums
--------------------------------------------------------------------------------
mkSum :: GenericKind_ -- Generic or Generic1?
-> US -- Base for generating unique names
-> TyCon -- The type constructor
-> [DataCon] -- The data constructors
-> ([Alt], -- Alternatives for the T->Trep "from" function
[Alt]) -- Alternatives for the Trep->T "to" function
-- Datatype without any constructors
mkSum _ _ tycon [] = ([from_alt], [to_alt])
where
from_alt = (nlWildPat, mkM1_E (makeError errMsgFrom))
to_alt = (mkM1_P nlWildPat, makeError errMsgTo)
-- These M1s are meta-information for the datatype
makeError s = nlHsApp (nlHsVar error_RDR) (nlHsLit (mkHsString s))
tyConStr = occNameString (nameOccName (tyConName tycon))
errMsgFrom = "No generic representation for empty datatype " ++ tyConStr
errMsgTo = "No values for empty datatype " ++ tyConStr
-- Datatype with at least one constructor
mkSum gk_ us _ datacons =
-- switch the payload of gk_ to be datacon-centric instead of tycon-centric
unzip [ mk1Sum (gk2gkDC gk_ d) us i (length datacons) d
| (d,i) <- zip datacons [1..] ]
-- Build the sum for a particular constructor
mk1Sum :: GenericKind_DC -- Generic or Generic1?
-> US -- Base for generating unique names
-> Int -- The index of this constructor
-> Int -- Total number of constructors
-> DataCon -- The data constructor
-> (Alt, -- Alternative for the T->Trep "from" function
Alt) -- Alternative for the Trep->T "to" function
mk1Sum gk_ us i n datacon = (from_alt, to_alt)
where
gk = forgetArgVar gk_
-- Existentials already excluded
argTys = dataConOrigArgTys datacon
n_args = dataConSourceArity datacon
datacon_varTys = zip (map mkGenericLocal [us .. us+n_args-1]) argTys
datacon_vars = map fst datacon_varTys
us' = us + n_args
datacon_rdr = getRdrName datacon
from_alt = (nlConVarPat datacon_rdr datacon_vars, from_alt_rhs)
from_alt_rhs = mkM1_E (genLR_E i n (mkProd_E gk_ us' datacon_varTys))
to_alt = ( mkM1_P (genLR_P i n (mkProd_P gk us' datacon_varTys))
, to_alt_rhs
) -- These M1s are meta-information for the datatype
to_alt_rhs = case gk_ of
Gen0_DC -> nlHsVarApps datacon_rdr datacon_vars
Gen1_DC argVar -> nlHsApps datacon_rdr $ map argTo datacon_varTys
where
argTo (var, ty) = converter ty `nlHsApp` nlHsVar var where
converter = argTyFold argVar $ ArgTyAlg
{ata_rec0 = nlHsVar . unboxRepRDR,
ata_par1 = nlHsVar unPar1_RDR,
ata_rec1 = const $ nlHsVar unRec1_RDR,
ata_comp = \_ cnv -> (nlHsVar fmap_RDR `nlHsApp` cnv)
`nlHsCompose` nlHsVar unComp1_RDR}
-- Generates the L1/R1 sum pattern
genLR_P :: Int -> Int -> LPat RdrName -> LPat RdrName
genLR_P i n p
| n == 0 = error "impossible"
| n == 1 = p
| i <= div n 2 = nlConPat l1DataCon_RDR [genLR_P i (div n 2) p]
| otherwise = nlConPat r1DataCon_RDR [genLR_P (i-m) (n-m) p]
where m = div n 2
-- Generates the L1/R1 sum expression
genLR_E :: Int -> Int -> LHsExpr RdrName -> LHsExpr RdrName
genLR_E i n e
| n == 0 = error "impossible"
| n == 1 = e
| i <= div n 2 = nlHsVar l1DataCon_RDR `nlHsApp` genLR_E i (div n 2) e
| otherwise = nlHsVar r1DataCon_RDR `nlHsApp` genLR_E (i-m) (n-m) e
where m = div n 2
--------------------------------------------------------------------------------
-- Dealing with products
--------------------------------------------------------------------------------
-- Build a product expression
mkProd_E :: GenericKind_DC -- Generic or Generic1?
-> US -- Base for unique names
-> [(RdrName, Type)] -- List of variables matched on the lhs and their types
-> LHsExpr RdrName -- Resulting product expression
mkProd_E _ _ [] = mkM1_E (nlHsVar u1DataCon_RDR)
mkProd_E gk_ _ varTys = mkM1_E (foldBal prod appVars)
-- These M1s are meta-information for the constructor
where
appVars = map (wrapArg_E gk_) varTys
prod a b = prodDataCon_RDR `nlHsApps` [a,b]
wrapArg_E :: GenericKind_DC -> (RdrName, Type) -> LHsExpr RdrName
wrapArg_E Gen0_DC (var, ty) = mkM1_E $
boxRepRDR ty `nlHsVarApps` [var]
-- This M1 is meta-information for the selector
wrapArg_E (Gen1_DC argVar) (var, ty) = mkM1_E $
converter ty `nlHsApp` nlHsVar var
-- This M1 is meta-information for the selector
where converter = argTyFold argVar $ ArgTyAlg
{ata_rec0 = nlHsVar . boxRepRDR,
ata_par1 = nlHsVar par1DataCon_RDR,
ata_rec1 = const $ nlHsVar rec1DataCon_RDR,
ata_comp = \_ cnv -> nlHsVar comp1DataCon_RDR `nlHsCompose`
(nlHsVar fmap_RDR `nlHsApp` cnv)}
boxRepRDR :: Type -> RdrName
boxRepRDR = maybe k1DataCon_RDR fst . unboxedRepRDRs
unboxRepRDR :: Type -> RdrName
unboxRepRDR = maybe unK1_RDR snd . unboxedRepRDRs
-- Retrieve the RDRs associated with each URec data family instance
-- constructor. See Note [Generics and unlifted types]
unboxedRepRDRs :: Type -> Maybe (RdrName, RdrName)
unboxedRepRDRs ty
| ty `eqType` addrPrimTy = Just (uAddrDataCon_RDR, uAddrHash_RDR)
| ty `eqType` charPrimTy = Just (uCharDataCon_RDR, uCharHash_RDR)
| ty `eqType` doublePrimTy = Just (uDoubleDataCon_RDR, uDoubleHash_RDR)
| ty `eqType` floatPrimTy = Just (uFloatDataCon_RDR, uFloatHash_RDR)
| ty `eqType` intPrimTy = Just (uIntDataCon_RDR, uIntHash_RDR)
| ty `eqType` wordPrimTy = Just (uWordDataCon_RDR, uWordHash_RDR)
| otherwise = Nothing
-- Build a product pattern
mkProd_P :: GenericKind -- Gen0 or Gen1
-> US -- Base for unique names
-> [(RdrName, Type)] -- List of variables to match,
-- along with their types
-> LPat RdrName -- Resulting product pattern
mkProd_P _ _ [] = mkM1_P (nlNullaryConPat u1DataCon_RDR)
mkProd_P gk _ varTys = mkM1_P (foldBal prod appVars)
-- These M1s are meta-information for the constructor
where
appVars = unzipWith (wrapArg_P gk) varTys
prod a b = prodDataCon_RDR `nlConPat` [a,b]
wrapArg_P :: GenericKind -> RdrName -> Type -> LPat RdrName
wrapArg_P Gen0 v ty = mkM1_P (boxRepRDR ty `nlConVarPat` [v])
-- This M1 is meta-information for the selector
wrapArg_P Gen1 v _ = m1DataCon_RDR `nlConVarPat` [v]
mkGenericLocal :: US -> RdrName
mkGenericLocal u = mkVarUnqual (mkFastString ("g" ++ show u))
mkM1_E :: LHsExpr RdrName -> LHsExpr RdrName
mkM1_E e = nlHsVar m1DataCon_RDR `nlHsApp` e
mkM1_P :: LPat RdrName -> LPat RdrName
mkM1_P p = m1DataCon_RDR `nlConPat` [p]
nlHsCompose :: LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
nlHsCompose x y = compose_RDR `nlHsApps` [x, y]
-- | Variant of foldr1 for producing balanced lists
foldBal :: (a -> a -> a) -> [a] -> a
foldBal op = foldBal' op (error "foldBal: empty list")
foldBal' :: (a -> a -> a) -> a -> [a] -> a
foldBal' _ x [] = x
foldBal' _ _ [y] = y
foldBal' op x l = let (a,b) = splitAt (length l `div` 2) l
in foldBal' op x a `op` foldBal' op x b
{-
Note [Generics and unlifted types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Normally, all constants are marked with K1/Rec0. The exception to this rule is
when a data constructor has an unlifted argument (e.g., Int#, Char#, etc.). In
that case, we must use a data family instance of URec (from GHC.Generics) to
mark it. As a result, before we can generate K1 or unK1, we must first check
to see if the type is actually one of the unlifted types for which URec has a
data family instance; if so, we generate that instead.
See wiki:Commentary/Compiler/GenericDeriving#Handlingunliftedtypes for more
details on why URec is implemented the way it is.
Note [Generating a correctly typed Rep instance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tc_mkRepTy derives the RHS of the Rep(1) type family instance when deriving
Generic(1). That is, it derives the ellipsis in the following:
instance Generic Foo where
type Rep Foo = ...
However, tc_mkRepTy only has knowledge of the *TyCon* of the type for which
a Generic(1) instance is being derived, not the fully instantiated type. As a
result, tc_mkRepTy builds the most generalized Rep(1) instance possible using
the type variables it learns from the TyCon (i.e., it uses tyConTyVars). This
can cause problems when the instance has instantiated type variables
(see Trac #11732). As an example:
data T a = MkT a
deriving instance Generic (T Int)
==>
instance Generic (T Int) where
type Rep (T Int) = (... (Rec0 a)) -- wrong!
-XStandaloneDeriving is one way for the type variables to become instantiated.
Another way is when Generic1 is being derived for a datatype with a visible
kind binder, e.g.,
data P k (a :: k) = MkP k deriving Generic1
==>
instance Generic1 (P *) where
type Rep1 (P *) = (... (Rec0 k)) -- wrong!
See Note [Unify kinds in deriving] in TcDeriv.
In any such scenario, we must prevent a discrepancy between the LHS and RHS of
a Rep(1) instance. To do so, we create a type variable substitution that maps
the tyConTyVars of the TyCon to their counterparts in the fully instantiated
type. (For example, using T above as example, you'd map a :-> Int.) We then
apply the substitution to the RHS before generating the instance.
Note [Handling kinds in a Rep instance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because Generic1 is poly-kinded, the representation types were generalized to
be kind-polymorphic as well. As a result, tc_mkRepTy must explicitly apply
the kind of the instance being derived to all the representation type
constructors. For instance, if you have
data Empty (a :: k) = Empty deriving Generic1
Then the generated code is now approximately (with -fprint-explicit-kinds
syntax):
instance Generic1 k (Empty k) where
type Rep1 k (Empty k) = U1 k
Most representation types have only one kind variable, making them easy to deal
with. The only non-trivial case is (:.:), which is only used in Generic1
instances:
newtype (:.:) (f :: k2 -> *) (g :: k1 -> k2) (p :: k1) =
Comp1 { unComp1 :: f (g p) }
Here, we do something a bit counter-intuitive: we make k1 be the kind of the
instance being derived, and we always make k2 be *. Why *? It's because
the code that GHC generates using (:.:) is always of the form x :.: Rec1 y
for some types x and y. In other words, the second type to which (:.:) is
applied always has kind k -> *, for some kind k, so k2 cannot possibly be
anything other than * in a generated Generic1 instance.
-}
| vikraman/ghc | compiler/typecheck/TcGenGenerics.hs | bsd-3-clause | 39,000 | 0 | 21 | 11,498 | 6,427 | 3,415 | 3,012 | 461 | 17 |
module Statistics.Shitty
( module X
) where
import Statistics.Shitty.Regression as X
| tranma/shitty-statistics | src/Statistics/Shitty.hs | bsd-3-clause | 90 | 0 | 4 | 16 | 20 | 14 | 6 | 3 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.ZH.TW.Corpus
( allExamples
) where
import Data.String
import Prelude
import Duckling.Testing.Types hiding (examples)
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 10, 10, 0, 0, 0) Day)
[ "国庆"
, "國慶"
, "国庆节"
, "国庆節"
, "國慶节"
, "國慶節"
]
, examples (datetimeInterval ((2013, 10, 10, 18, 0, 0), (2013, 10, 11, 0, 0, 0)) Hour)
[ "国庆节晚上"
, "國慶節晚上"
]
]
| facebookincubator/duckling | Duckling/Time/ZH/TW/Corpus.hs | bsd-3-clause | 946 | 0 | 11 | 260 | 202 | 130 | 72 | 21 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Math.Probably.IterLap where
import Math.Probably.Sampler
import Math.Probably.FoldingStats
import Math.Probably.MCMC (empiricalMean, empiricalCovariance)
import Math.Probably.PDF (posdefify)
import Debug.Trace
import Data.Maybe
import Numeric.LinearAlgebra hiding (find)
weightedMeanCov :: [(Vector Double, Double)] -> (Vector Double, Matrix Double)
weightedMeanCov pts = (mu, cov) where
mu = sum $ flip map pts $ \(x,w) -> scale w x
npts = dim $ fst $ head pts
sumSqrWs = sum $ flip map pts $ \(x,w) -> w*w
factor = 1/(1-sumSqrWs)
xmeans :: [Double]
xmeans = flip map [0..npts-1] $ \i -> mean $ flip map pts $ \(x,w) -> x@>i
cov = scale factor $ buildMatrix npts npts $ \(j,k) ->
sum $ flip map pts $ \(xi,wi) -> wi*(xi@>j - xmeans!!j)*
(xi@>k - xmeans!!k)
mean :: [Double] -> Double
mean xs = sum xs / realToFrac (length xs)
improve :: Int -> (Vector Double -> Double)
-> (Vector Double, Matrix Double)
-> Sampler (Vector Double, Matrix Double)
improve n f (mu, cov) = do
xs <- sequence $ replicate n $ multiNormal mu cov
let xps = catMaybes $ map (\x-> let fx = f x in if isNaN fx || isInfinite fx then Nothing else Just (x,fx)) xs
pmin = runStat (before (minFrom 1e80) snd) xps
psubmin = map (\(x,p)-> (x, exp $ p - pmin)) xps
psum = sum $ map snd psubmin
ws = map (\(x,p)-> (x,p/psum)) psubmin
(mu', cov') = weightedMeanCov $ ws
return $ (mu', posdefify $ scale 2 cov')
iterateM :: Int -> (a -> Sampler a) -> a-> Sampler a
iterateM 0 _ x = return x
iterateM n f x = do
newx <- f x
iterateM (n-1) f newx
iterLap :: [Int]
-> (Vector Double -> Double)
-> (Vector Double, Matrix Double)
-> Sampler (Vector Double, Matrix Double)
iterLap [] _ x = return x
iterLap (n:ns) f x = do
newx <- improve n f x
iterLap (ns) f newx
| glutamate/probably | Math/Probably/IterLap.hs | bsd-3-clause | 1,940 | 0 | 18 | 492 | 922 | 486 | 436 | 47 | 2 |
{-# LANGUAGE BangPatterns #-}
module Data.Digest.GroestlMutable (
fM,
outputTransform,
parseMessage,
pad,
truncate,
DigestLength(..),
GroestlCtx(..),
groestlInit,
groestlUpdate,
groestlFinalize,
printWAsHex,
printAsHex
) where
import Data.Word (Word8, Word64)
import Data.Int (Int64)
import Data.Bits (xor, shiftR, setBit)
import qualified Data.Binary as B
import qualified Data.Binary.Get as G
import qualified Data.Serialize as S
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector.Unboxed.Mutable as MV
import Control.Monad (liftM, foldM, void, (>=>))
import Control.Monad.ST (ST, runST)
import Control.Arrow ((***))
import Prelude hiding (truncate)
import Text.Printf (printf)
import Data.Digest.GroestlTables
-------------------------------------- Data types used in the implementation ----------------------
data DigestLength = G224 | G256 | G384 | G512
deriving (Eq, Ord)
type BlockLength = Int64
newtype HashState s = H { getState :: (MV.STVector s Word64) }
---------------------------------- A port of the optimized 64-bit C version -----------------------
{-# INLINE fM #-}
fM :: BlockLength -> V.Vector Word64 -> V.Vector Word64 -> ST s (V.Vector Word64)
fM b h m = do
outP <- V.unsafeFreeze . getState =<< permPM b =<< H `liftM` V.unsafeThaw inP
outQ <- V.unsafeFreeze . getState =<< permQM b =<< H `liftM` V.unsafeThaw m
return $ V.zipWith3 xor3 h outQ outP
where xor3 x1 x2 x3 = x1 `xor` x2 `xor` x3
inP = V.zipWith xor h m
{-# INLINE permPM #-}
permPM :: BlockLength -> HashState s -> ST s (HashState s)
permPM 512 x = V.foldM' rnd512PM x (V.enumFromStepN 0 0x0100000000000000 10)
permPM 1024 x = V.foldM' rnd1024PM x (V.enumFromStepN 0 0x0100000000000000 14)
-- !!! Inlining this function leads to 4 times the run-time.
-- See also: rnd512QM
--{-# INLINE permQM #-}
permQM :: BlockLength -> HashState s -> ST s (HashState s)
permQM 512 x = V.foldM' rnd512QM x (V.enumFromN 0 10)
permQM 1024 x = V.foldM' rnd1024QM x (V.enumFromN 0 14)
{-# INLINE rnd512PM #-}
rnd512PM :: HashState s -> Word64 -> ST s (HashState s)
rnd512PM x rndNr = do
addRndConstant x 0 rndNr 0x0000000000000000
addRndConstant x 1 rndNr 0x1000000000000000
addRndConstant x 2 rndNr 0x2000000000000000
addRndConstant x 3 rndNr 0x3000000000000000
addRndConstant x 4 rndNr 0x4000000000000000
addRndConstant x 5 rndNr 0x5000000000000000
addRndConstant x 6 rndNr 0x6000000000000000
addRndConstant x 7 rndNr 0x7000000000000000
y <- MV.unsafeNew 8
extractColumn 0 x y 0 1 2 3 4 5 6 7
extractColumn 1 x y 1 2 3 4 5 6 7 0
extractColumn 2 x y 2 3 4 5 6 7 0 1
extractColumn 3 x y 3 4 5 6 7 0 1 2
extractColumn 4 x y 4 5 6 7 0 1 2 3
extractColumn 5 x y 5 6 7 0 1 2 3 4
extractColumn 6 x y 6 7 0 1 2 3 4 5
extractColumn 7 x y 7 0 1 2 3 4 5 6
return (H y)
-- !!! Inlining this function leads to 4 times the run-time.
-- Why?! It's practically the same as rnd512PM, so why does this perform sp badly?
--{-# INLINE rnd512QM #-}
rnd512QM :: HashState s -> Word64 -> ST s (HashState s)
rnd512QM x rndNr = do
addRndConstant x 0 rndNr 0xffffffffffffffff
addRndConstant x 1 rndNr 0xffffffffffffffef
addRndConstant x 2 rndNr 0xffffffffffffffdf
addRndConstant x 3 rndNr 0xffffffffffffffcf
addRndConstant x 4 rndNr 0xffffffffffffffbf
addRndConstant x 5 rndNr 0xffffffffffffffaf
addRndConstant x 6 rndNr 0xffffffffffffff9f
addRndConstant x 7 rndNr 0xffffffffffffff8f
y <- MV.unsafeNew 8
extractColumn 0 x y 1 3 5 7 0 2 4 6
extractColumn 1 x y 2 4 6 0 1 3 5 7
extractColumn 2 x y 3 5 7 1 2 4 6 0
extractColumn 3 x y 4 6 0 2 3 5 7 1
extractColumn 4 x y 5 7 1 3 4 6 0 2
extractColumn 5 x y 6 0 2 4 5 7 1 3
extractColumn 6 x y 7 1 3 5 6 0 2 4
extractColumn 7 x y 0 2 4 6 7 1 3 5
return (H y)
rnd1024PM :: HashState s -> Word64 -> ST s (HashState s)
rnd1024PM x rndNr = do
addRndConstant x 0 rndNr 0x0000000000000000
addRndConstant x 1 rndNr 0x1000000000000000
addRndConstant x 2 rndNr 0x2000000000000000
addRndConstant x 3 rndNr 0x3000000000000000
addRndConstant x 4 rndNr 0x4000000000000000
addRndConstant x 5 rndNr 0x5000000000000000
addRndConstant x 6 rndNr 0x6000000000000000
addRndConstant x 7 rndNr 0x7000000000000000
addRndConstant x 8 rndNr 0x8000000000000000
addRndConstant x 9 rndNr 0x9000000000000000
addRndConstant x 10 rndNr 0xa000000000000000
addRndConstant x 11 rndNr 0xb000000000000000
addRndConstant x 12 rndNr 0xc000000000000000
addRndConstant x 13 rndNr 0xd000000000000000
addRndConstant x 14 rndNr 0xe000000000000000
addRndConstant x 15 rndNr 0xf000000000000000
y <- MV.unsafeNew 16
extractColumn 15 x y 15 0 1 2 3 4 5 10
extractColumn 14 x y 14 15 0 1 2 3 4 9
extractColumn 13 x y 13 14 15 0 1 2 3 8
extractColumn 12 x y 12 13 14 15 0 1 2 7
extractColumn 11 x y 11 12 13 14 15 0 1 6
extractColumn 10 x y 10 11 12 13 14 15 0 5
extractColumn 9 x y 9 10 11 12 13 14 15 4
extractColumn 8 x y 8 9 10 11 12 13 14 3
extractColumn 7 x y 7 8 9 10 11 12 13 2
extractColumn 6 x y 6 7 8 9 10 11 12 1
extractColumn 5 x y 5 6 7 8 9 10 11 0
extractColumn 4 x y 4 5 6 7 8 9 10 15
extractColumn 3 x y 3 4 5 6 7 8 9 14
extractColumn 2 x y 2 3 4 5 6 7 8 13
extractColumn 1 x y 1 2 3 4 5 6 7 12
extractColumn 0 x y 0 1 2 3 4 5 6 11
return (H y)
rnd1024QM :: HashState s -> Word64 -> ST s (HashState s)
rnd1024QM x rndNr = do
addRndConstant x 0 rndNr 0xffffffffffffffff
addRndConstant x 1 rndNr 0xffffffffffffffef
addRndConstant x 2 rndNr 0xffffffffffffffdf
addRndConstant x 3 rndNr 0xffffffffffffffcf
addRndConstant x 4 rndNr 0xffffffffffffffbf
addRndConstant x 5 rndNr 0xffffffffffffffaf
addRndConstant x 6 rndNr 0xffffffffffffff9f
addRndConstant x 7 rndNr 0xffffffffffffff8f
addRndConstant x 8 rndNr 0xffffffffffffff7f
addRndConstant x 9 rndNr 0xffffffffffffff6f
addRndConstant x 10 rndNr 0xffffffffffffff5f
addRndConstant x 11 rndNr 0xffffffffffffff4f
addRndConstant x 12 rndNr 0xffffffffffffff3f
addRndConstant x 13 rndNr 0xffffffffffffff2f
addRndConstant x 14 rndNr 0xffffffffffffff1f
addRndConstant x 15 rndNr 0xffffffffffffff0f
y <- MV.unsafeNew 16
extractColumn 15 x y 0 2 4 10 15 1 3 5
extractColumn 14 x y 15 1 3 9 14 0 2 4
extractColumn 13 x y 14 0 2 8 13 15 1 3
extractColumn 12 x y 13 15 1 7 12 14 0 2
extractColumn 11 x y 12 14 0 6 11 13 15 1
extractColumn 10 x y 11 13 15 5 10 12 14 0
extractColumn 9 x y 10 12 14 4 9 11 13 15
extractColumn 8 x y 9 11 13 3 8 10 12 14
extractColumn 7 x y 8 10 12 2 7 9 11 13
extractColumn 6 x y 7 9 11 1 6 8 10 12
extractColumn 5 x y 6 8 10 0 5 7 9 11
extractColumn 4 x y 5 7 9 15 4 6 8 10
extractColumn 3 x y 4 6 8 14 3 5 7 9
extractColumn 2 x y 3 5 7 13 2 4 6 8
extractColumn 1 x y 2 4 6 12 1 3 5 7
extractColumn 0 x y 1 3 5 11 0 2 4 6
return (H y)
{-# INLINE addRndConstant #-}
addRndConstant :: HashState s -> Int -> Word64 -> Word64 -> ST s ()
addRndConstant (H x) i rndNr c = do
xi <- MV.unsafeRead x i
MV.unsafeWrite x i (xi `xor` c `xor` rndNr)
extractColumn :: Int
-> HashState s
-> MV.STVector s Word64
-> Int -> Int -> Int -> Int
-> Int -> Int -> Int -> Int
-> ST s ()
extractColumn i x y c0 c1 c2 c3 c4 c5 c6 c7 = do
x0 <- tableLookup x 0 c0
x1 <- tableLookup x 1 c1
x2 <- tableLookup x 2 c2
x3 <- tableLookup x 3 c3
x4 <- tableLookup x 4 c4
x5 <- tableLookup x 5 c5
x6 <- tableLookup x 6 c6
x7 <- tableLookup x 7 c7
MV.unsafeWrite y i (x0 `xor` x1 `xor` x2 `xor` x3 `xor` x4 `xor` x5 `xor` x6 `xor` x7)
{-# INLINE tableLookup #-}
tableLookup :: HashState s -> Int -> Int -> ST s Word64
tableLookup (H x) i c = do
w <- MV.unsafeRead x c
return . V.unsafeIndex tables $ i * 256 + fromIntegral (w # i)
where -- Extract byte from Word64
(#) :: Word64 -> Int -> Word8
w # n = fromIntegral $ shiftR w (8 * (7 - n))
outputTransform :: BlockLength -> V.Vector Word64 -> V.Vector Word64
outputTransform blockLen x = V.zipWith xor (permP' x) x
where permP' y = V.create (H `liftM` V.thaw y >>= permPM blockLen >>= return . getState)
---------------------------- Parsing, padding and truncating ------------------------------
parseMessage :: Int64 -> Int64 -> L.ByteString -> [V.Vector Word64]
parseMessage dataLen blockLen xs
| L.null suf = pad dataLen blockLen pre
| otherwise = parseBlock pre : parseMessage dataLen blockLen suf
where (!pre,suf) = L.splitAt byteBlockLen xs
byteBlockLen = blockLen `div` 8
{-# INLINE parseBlock #-}
parseBlock :: L.ByteString -> V.Vector Word64
parseBlock = V.unfoldr (\bs -> if L.null bs then Nothing else Just (G.runGet G.getWord64be bs, L.drop 8 bs))
-- This function is a mess. Needs to be cleaned up!
pad :: Int64 -> BlockLength -> L.ByteString -> [V.Vector Word64]
pad dataLen blockLen xs
| dataLen == 0 || L.null xs = [pad1AndBlockNumber zeroBlock]
| dataLen `rem` blockLen == 0 = [parseBlock xs, pad1AndBlockNumber zeroBlock]
| dataLen `rem` blockLen <= blockLen - 65 = [pad1AndBlockNumber fullBlock]
| otherwise = [onePadded, blockNumberPadded]
where
pad1AndBlockNumber = V.modify (padOne byte bit >=> padBlockNumber blocks)
onePadded = V.modify (void . padOne byte bit) fullBlock
blockNumberPadded = V.modify (padBlockNumber (blocks + 1)) zeroBlock
byte = (fromIntegral (dataLen `div` 64)) `rem` vectorLen
bit = fromIntegral (63 - dataLen `rem` 64)
fullBlock = parseBlock . L.take (blockLen `div` 8) . L.append xs $ L.repeat 0x00
blocks = fromIntegral $ dataLen `div` blockLen + 1
zeroBlock = V.replicate vectorLen 0x00
vectorLen = fromIntegral $ blockLen `div` 64
padBlockNumber :: Word64 -> MV.STVector s Word64 -> ST s ()
padBlockNumber blocks v = MV.write v (MV.length v - 1) blocks
padOne :: Int -> Int -> MV.STVector s Word64 -> ST s (MV.STVector s Word64)
padOne i bit v = (flip setBit bit) `liftM` (MV.read v i) >>= MV.write v i >> return v
truncate :: DigestLength -> V.Vector Word64 -> L.ByteString
truncate G224 = L.drop 4 . L.concat . map B.encode . V.toList . V.unsafeSlice 4 4
truncate G256 = L.concat . map B.encode . V.toList . V.unsafeSlice 4 4
truncate G384 = L.concat . map B.encode . V.toList . V.unsafeSlice 10 6
truncate G512 = L.concat . map B.encode . V.toList . V.unsafeSlice 8 8
--------------------------------- Iterative hashing --------------------
data GroestlCtx = Ctx {
dataParsed :: !Int64,
digestLength :: DigestLength,
blockLength :: BlockLength,
hashState :: V.Vector Word64
}
groestlInit :: DigestLength -> BlockLength -> V.Vector Word64 -> GroestlCtx
groestlInit dLen bLen h0 = Ctx {dataParsed = 0,
digestLength = dLen,
blockLength = bLen,
hashState = h0}
groestlUpdate :: GroestlCtx -> BS.ByteString -> GroestlCtx
groestlUpdate ctx bs
| BS.null bs = ctx
| otherwise = result
where
(!newState, result) = foldUpdate . BS.splitAt blockByteLen $ bs
foldUpdate = hashBlock *** groestlUpdate newCtx
hashBlock bs = runST $ fM blockLen (hashState ctx) $ parseBlock' bs
newCtx = Ctx (dataParsed ctx + blockLen) (digestLength ctx) blockLen newState
blockLen = blockLength ctx
blockByteLen = fromIntegral $ blockLen `div` 8
{-# INLINE parseBlock' #-}
parseBlock' :: BS.ByteString -> V.Vector Word64
parseBlock' = V.unfoldr p
where p bs = case S.runGet S.getWord64be bs of
Left _ -> Nothing
Right w -> Just (w, BS.drop 8 bs)
groestlFinalize :: GroestlCtx -> BS.ByteString -> L.ByteString
groestlFinalize ctx bs = runST $ liftM (truncate digestLen . outputTransform blockLen) $ padLast bs
where padLast = foldM (fM blockLen) prevState . pad dataLen blockLen . L.pack . BS.unpack
dataLen = dataParsed ctx + fromIntegral (BS.length bs * 8)
prevState = hashState ctx
digestLen = digestLength ctx
blockLen = blockLength ctx
------------------------------------ Some convenience functions -----------------------
printWAsHex :: Word64 -> String
printWAsHex = printf "0x%016x"
printAsHex :: L.ByteString -> String
printAsHex = concat . ("0x" :) . map (printf "%02x") . L.unpack
| hakoja/SHA3 | Data/Digest/GroestlMutable.hs | bsd-3-clause | 13,346 | 0 | 17 | 3,765 | 4,726 | 2,314 | 2,412 | 265 | 2 |
{- Data/Singletons/Single/Type.hs
(c) Richard Eisenberg 2013
eir@cis.upenn.edu
Singletonizes types.
-}
module Data.Singletons.Single.Type where
import Language.Haskell.TH.Desugar
import Language.Haskell.TH.Syntax
import Data.Singletons.Names
import Data.Singletons.Single.Monad
import Data.Singletons.Promote.Type
import Data.Singletons.Util
import Control.Monad
singType :: DType -- the promoted version of the thing classified by...
-> DType -- ... this type
-> SgM ( DType -- the singletonized type
, Int -- the number of arguments
, [Name] -- the names of the tyvars used in the sing'd type
, DKind ) -- the kind of the result type
singType prom ty = do
let (_, cxt, args, res) = unravel ty
num_args = length args
cxt' <- mapM singPred cxt
arg_names <- replicateM num_args (qNewName "t")
prom_args <- mapM promoteType args
prom_res <- promoteType res
let args' = map (\n -> singFamily `DAppT` (DVarT n)) arg_names
res' = singFamily `DAppT` (foldl apply prom (map DVarT arg_names) `DSigT` prom_res)
tau = ravel args' res'
let ty' = DForallT (zipWith DKindedTV arg_names prom_args)
cxt' tau
return (ty', num_args, arg_names, prom_res)
singPred :: DPred -> SgM DPred
singPred = singPredRec []
singPredRec :: [DType] -> DPred -> SgM DPred
singPredRec ctx (DAppPr pr ty) = singPredRec (ty : ctx) pr
singPredRec _ctx (DSigPr _pr _ki) =
fail "Singling of constraints with explicit kinds not yet supported"
singPredRec _ctx (DVarPr _n) =
fail "Singling of contraint variables not yet supported"
singPredRec ctx (DConPr n)
| n == equalityName
= fail "Singling of type equality constraints not yet supported"
| otherwise = do
kis <- mapM promoteType ctx
let sName = singClassName n
return $ foldl DAppPr (DConPr sName) kis
singPredRec _ctx DWildCardPr = return DWildCardPr -- it just might work
| int-index/singletons | src/Data/Singletons/Single/Type.hs | bsd-3-clause | 1,984 | 0 | 15 | 466 | 516 | 269 | 247 | 43 | 1 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
module Web.ApiAi.API.Query
( ApiAiQueryAPI
, query
, queryM
) where
import ClassyPrelude
import Web.ApiAi.API.Core
import Web.ApiAi.Data.Core
import Servant.Client
import Servant.API
import Data.Proxy
import Control.Monad.State
import Web.ApiAi.Requests.Query
import Web.ApiAi.Responses.Query
import Network.HTTP.Client ( newManager )
import Network.HTTP.Client.TLS ( tlsManagerSettings )
import Servant.API.JSONUtf8
type ApiAiQueryAPI = AuthProtect ClientToken :> "query" :> QueryParam "v" Text
:> ReqBody '[JSONUtf8] (WithDefaultSessionId QueryRequest)
:> Post '[JSON] QueryResponse
versionDate :: Text
versionDate = "20170211"
queryAPI :: Proxy ApiAiQueryAPI
queryAPI = Proxy
query_ :: AuthenticateReq (AuthProtect ClientToken) -> Maybe Text -> WithDefaultSessionId QueryRequest -> ClientM QueryResponse
query_ = client queryAPI
queryM :: QueryRequest -> ApiAiClient QueryResponse
queryM r = do
a <- getAuth
ms <- gets clientSession
lift $ query_ a (Just versionDate) $ WithDefaultSessionId r ms
query :: HasClientToken t => t -> SessionId -> QueryRequest -> IO (Either ServantError QueryResponse)
query t s r = runWithState (queryM r) $ ApiAiClientState (getClientToken t) $ Just s
| CthulhuDen/api-ai | src/Web/ApiAi/API/Query.hs | bsd-3-clause | 1,328 | 0 | 11 | 240 | 364 | 195 | 169 | 34 | 1 |
module Main(main) where
import Control.Exception (bracket_)
import System.Environment (getArgs, getProgName)
import System.IO (hSetEcho, hFlush, stdin, stdout)
import Web.Zenfolio.API
import qualified Web.Zenfolio.Photos as Photos
dumpPhoto :: LoginName -> Password -> PhotoID -> ZM ()
dumpPhoto username password pId = do
token <- login username password
withToken token $ withDebug True $ do
photo <- Photos.loadPhoto pId
liftIO $ do
putStrLn $ "Photo: " ++ show photo
prompt :: String -> IO Password
prompt message = do
putStr message >> hFlush stdout
bracket_ (hSetEcho stdin False)
(hSetEcho stdin True >> putStrLn "")
(getLine)
main :: IO ()
main = do
ls <- getArgs
case ls of
(username:photo:_) -> do
password <- prompt "Password: "
zenfolio $ dumpPhoto username password (read photo)
_ -> do
prg <- getProgName
putStrLn ("Usage: " ++ prg ++ " user-name photo-id")
| md5/hs-zenfolio | examples/LoadPhoto.hs | bsd-3-clause | 1,053 | 0 | 15 | 315 | 337 | 168 | 169 | 29 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Kiosk.Backend.Data
import Kiosk.Backend.Form
import Kiosk.Backend.Data.DataTemplateEntry (fromDataTemplateEntryToXlsxWorksheet)
import Criterion.Main (bench, bgroup,
defaultMain, whnf)
import Data.Maybe (fromJust)
import Data.UUID (fromString)
fromDataTemplateEntryToXlsx = fromDataTemplateEntryToXlsxWorksheet
main :: IO ()
main =
defaultMain
[bgroup
"from data template entries to xlsx"
[bench
"300 entries"
(whnf fromDataTemplateEntryToXlsx (benchDataTemplateEntries 300))
,bench
"600 entries"
(whnf fromDataTemplateEntryToXlsx (benchDataTemplateEntries 600))
,bench
"900 entries"
(whnf fromDataTemplateEntryToXlsx (benchDataTemplateEntries 900))]]
benchDataTemplateEntries :: Int -> [DataTemplateEntry]
benchDataTemplateEntries = flip replicate benchDataTemplateEntry
benchDataTemplateEntry :: DataTemplateEntry
benchDataTemplateEntry = DataTemplateEntry
{ _dataTemplateEntryKey = benchDataTemplateEntryKey
, _dataTemplateEntryValue = benchDataTemplateEntryValue
}
benchDataTemplateEntryKey :: DataTemplateEntryKey
benchDataTemplateEntryKey = DataTemplateEntryKey
{ _getDate = 1
, _getUUID = fromJust (fromString "c2cc10e1-57d6-4b6f-9899-38d972112d8c")
, _getTicketId = TicketId (1,1)
, _getFormId = 1
}
benchDataTemplateEntryValue :: DataTemplate
benchDataTemplateEntryValue = DataTemplate
{ templateItems =
[TemplateItem "A" (InputTypeText (InputText "1"))
,TemplateItem "B" (InputTypeText (InputText "2"))
,TemplateItem "C" (InputTypeText (InputText "3"))
,TemplateItem "D" (InputTypeText (InputText "4"))
,TemplateItem "E" (InputTypeText (InputText "5"))
,TemplateItem "F" (InputTypeText (InputText "6"))
,TemplateItem "G" (InputTypeText (InputText "7"))
,TemplateItem "H" (InputTypeText (InputText "8"))
,TemplateItem "I" (InputTypeText (InputText "9"))
,TemplateItem "J" (InputTypeText (InputText "10"))
,TemplateItem "K" (InputTypeText (InputText "11"))
,TemplateItem "L" (InputTypeText (InputText "12"))
,TemplateItem "M" (InputTypeText (InputText "13"))
,TemplateItem "N" (InputTypeText (InputText "14"))
,TemplateItem "O" (InputTypeText (InputText "15"))
,TemplateItem "P" (InputTypeText (InputText "16"))
,TemplateItem "Q" (InputTypeText (InputText "17"))
,TemplateItem "R" (InputTypeText (InputText "18"))
,TemplateItem "S" (InputTypeText (InputText "19"))
,TemplateItem "T" (InputTypeText (InputText "20"))
,TemplateItem "U" (InputTypeText (InputText "21"))
,TemplateItem "V" (InputTypeText (InputText "22"))
,TemplateItem "W" (InputTypeText (InputText "23"))
,TemplateItem "X" (InputTypeText (InputText "24"))
,TemplateItem "Y" (InputTypeText (InputText "25"))
,TemplateItem "Z" (InputTypeText (InputText "26"))]
}
| plow-technologies/cobalt-kiosk-data-template | benchmarks/CriterionBenchmarks.hs | bsd-3-clause | 3,183 | 0 | 13 | 739 | 808 | 426 | 382 | 65 | 1 |
module Control.ConstraintClasses.MonadZero
(
-- * Constraint MonadZero
CMonadZero
) where
import Control.ConstraintClasses.Domain
import Control.ConstraintClasses.Alternative
import Control.ConstraintClasses.Monad
-- base
import Data.Functor.Product
import Data.Functor.Sum
import Data.Functor.Compose
-- vector
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as VectorStorable
import qualified Data.Vector.Unboxed as VectorUnboxed
--------------------------------------------------------------------------------
-- CLASS
--------------------------------------------------------------------------------
class (CAlternative f, CMonad f) => CMonadZero f
--------------------------------------------------------------------------------
-- INSTANCES
--------------------------------------------------------------------------------
-- base
instance CMonadZero []
instance CMonadZero Maybe
-- vector
instance CMonadZero Vector.Vector
instance CMonadZero VectorStorable.Vector
instance CMonadZero VectorUnboxed.Vector
| guaraqe/constraint-classes | src/Control/ConstraintClasses/MonadZero.hs | bsd-3-clause | 1,058 | 0 | 6 | 94 | 150 | 93 | 57 | -1 | -1 |
-- | Definitions of place kinds. Every room in the game is an instantiated
-- place kind.
module Content.PlaceKind
( -- * Group name patterns
pattern ROGUE, pattern LABORATORY, pattern ZOO, pattern BRAWL, pattern SHOOTOUT, pattern ARENA, pattern FLIGHT, pattern AMBUSH, pattern BATTLE, pattern NOISE, pattern MINE, pattern EMPTY
, pattern INDOOR_ESCAPE_DOWN, pattern INDOOR_ESCAPE_UP, pattern OUTDOOR_ESCAPE_DOWN, pattern TINY_STAIRCASE, pattern OPEN_STAIRCASE, pattern CLOSED_STAIRCASE, pattern WALLED_STAIRCASE, pattern GATED_TINY_STAIRCASE, pattern GATED_OPEN_STAIRCASE, pattern GATED_CLOSED_STAIRCASE, pattern OUTDOOR_TINY_STAIRCASE, pattern OUTDOOR_CLOSED_STAIRCASE, pattern OUTDOOR_WALLED_STAIRCASE
, groupNamesSingleton, groupNames
, -- * Content
content
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.Text as T
import Game.LambdaHack.Content.PlaceKind
import Game.LambdaHack.Content.TileKind (TileKind)
import Game.LambdaHack.Definition.Defs
import Game.LambdaHack.Definition.DefsInternal
import Content.TileKind hiding (content, groupNames, groupNamesSingleton)
-- * Group name patterns
groupNamesSingleton :: [GroupName PlaceKind]
groupNamesSingleton = []
-- TODO: if we stick to the current system of generating extra kinds and their
-- group names, let's also add the generated group names to @groupNames@.
groupNames :: [GroupName PlaceKind]
groupNames =
[ROGUE, LABORATORY, ZOO, BRAWL, SHOOTOUT, ARENA, FLIGHT, AMBUSH, BATTLE, NOISE, MINE, EMPTY]
++ [INDOOR_ESCAPE_DOWN, INDOOR_ESCAPE_UP, OUTDOOR_ESCAPE_DOWN, TINY_STAIRCASE, OPEN_STAIRCASE, CLOSED_STAIRCASE, WALLED_STAIRCASE]
++ fst generatedStairs
pattern ROGUE, LABORATORY, ZOO, BRAWL, SHOOTOUT, ARENA, FLIGHT, AMBUSH, BATTLE, NOISE, MINE, EMPTY :: GroupName PlaceKind
pattern INDOOR_ESCAPE_DOWN, INDOOR_ESCAPE_UP, OUTDOOR_ESCAPE_DOWN, TINY_STAIRCASE, OPEN_STAIRCASE, CLOSED_STAIRCASE, WALLED_STAIRCASE, GATED_TINY_STAIRCASE, GATED_OPEN_STAIRCASE, GATED_CLOSED_STAIRCASE, OUTDOOR_TINY_STAIRCASE, OUTDOOR_CLOSED_STAIRCASE, OUTDOOR_WALLED_STAIRCASE :: GroupName PlaceKind
pattern ROGUE = GroupName "rogue"
pattern LABORATORY = GroupName "laboratory"
pattern ZOO = GroupName "zoo"
pattern BRAWL = GroupName "brawl"
pattern SHOOTOUT = GroupName "shootout"
pattern ARENA = GroupName "arena"
pattern FLIGHT = GroupName "flight"
pattern AMBUSH = GroupName "ambush"
pattern BATTLE = GroupName "battle"
pattern NOISE = GroupName "noise"
pattern MINE = GroupName "mine"
pattern EMPTY = GroupName "empty"
pattern INDOOR_ESCAPE_DOWN = GroupName "indoor escape down"
pattern INDOOR_ESCAPE_UP = GroupName "indoor escape up"
pattern OUTDOOR_ESCAPE_DOWN = GroupName "outdoor escape down"
pattern TINY_STAIRCASE = GroupName "tiny staircase"
pattern OPEN_STAIRCASE = GroupName "open staircase"
pattern CLOSED_STAIRCASE = GroupName "closed staircase"
pattern WALLED_STAIRCASE = GroupName "walled staircase"
-- This is a rotten compromise, because these are synthesized below,
-- so typos can happen.
pattern GATED_TINY_STAIRCASE = GroupName "gated tiny staircase"
pattern GATED_OPEN_STAIRCASE = GroupName "gated open staircase"
pattern GATED_CLOSED_STAIRCASE = GroupName "gated closed staircase"
pattern OUTDOOR_TINY_STAIRCASE = GroupName "outdoor tiny staircase"
pattern OUTDOOR_CLOSED_STAIRCASE = GroupName "outdoor closed staircase"
pattern OUTDOOR_WALLED_STAIRCASE = GroupName "outdoor walled staircase"
-- * Content
content :: [PlaceKind]
content =
[deadEnd, rect, rect2, rect3, rect4, rectWindows, glasshouse, glasshouse2, glasshouse3, pulpit, ruin, ruin2, collapsed, collapsed2, collapsed3, collapsed4, collapsed5, collapsed6, collapsed7, pillar, pillar2, pillar3, pillar4, pillar5, colonnade, colonnade2, colonnade3, colonnade4, colonnade5, colonnade6, lampPost, lampPost2, lampPost3, lampPost4, treeShade, fogClump, fogClump2, smokeClump, smokeClump2, smokeClump3FGround, bushClump, bushClump2, escapeDown, escapeDown2, escapeDown3, escapeDown4, escapeDown5, staircase1, staircase2, staircase3, staircase4, staircase5, staircase6, staircase7, staircase8, staircase9, staircase10, staircase11, staircase12, staircase13, staircase14, staircase15, staircase16, staircase17, staircase18, staircase19, staircase20, staircase21, staircase22, staircase23, staircase24, staircase25, staircase26, staircase27, staircase28, staircase29, staircase30, staircase31, staircase32, staircase33, staircase34, staircase35, staircase36, staircase37]
-- automatically generated
++ snd generatedStairs ++ generatedEscapes
deadEnd, rect, rect2, rect3, rect4, rectWindows, glasshouse, glasshouse2, glasshouse3, pulpit, ruin, ruin2, collapsed, collapsed2, collapsed3, collapsed4, collapsed5, collapsed6, collapsed7, pillar, pillar2, pillar3, pillar4, pillar5, colonnade, colonnade2, colonnade3, colonnade4, colonnade5, colonnade6, lampPost, lampPost2, lampPost3, lampPost4, treeShade, fogClump, fogClump2, smokeClump, smokeClump2, smokeClump3FGround, bushClump, bushClump2, escapeDown, escapeDown2, escapeDown3, escapeDown4, escapeDown5, staircase1, staircase2, staircase3, staircase4, staircase5, staircase6, staircase7, staircase8, staircase9, staircase10, staircase11, staircase12, staircase13, staircase14, staircase15, staircase16, staircase17, staircase18, staircase19, staircase20, staircase21, staircase22, staircase23, staircase24, staircase25, staircase26, staircase27, staircase28, staircase29, staircase30, staircase31, staircase32, staircase33, staircase34, staircase35, staircase36, staircase37 :: PlaceKind
staircase :: PlaceKind -- template
staircaseBasic :: [PlaceKind]
staircaseBasic = [staircase1, staircase2, staircase3, staircase4, staircase5, staircase6, staircase7, staircase8, staircase9, staircase10, staircase11, staircase12, staircase13, staircase14, staircase15, staircase16, staircase17, staircase18, staircase19, staircase20, staircase21, staircase22, staircase23, staircase24, staircase25, staircase26, staircase27, staircase28, staircase29, staircase30, staircase31, staircase32, staircase33, staircase34, staircase35, staircase36, staircase37]
generatedStairs :: ([GroupName PlaceKind], [PlaceKind])
generatedStairs =
let gatedStairs = map switchStaircaseToGated staircaseBasic
outdoorStairs = map switchStaircaseToOutdoor staircaseBasic
stairsAll = staircaseBasic ++ gatedStairs ++ outdoorStairs
upStairs = map switchStaircaseToUp stairsAll
downStairs = map switchStaircaseToDown stairsAll
genStairs = gatedStairs ++ outdoorStairs ++ upStairs ++ downStairs
in ( nub $ sort $ concatMap (map fst . pfreq) genStairs
, genStairs )
escapeDownBasic :: [PlaceKind]
escapeDownBasic =
[escapeDown, escapeDown2, escapeDown3, escapeDown4, escapeDown5]
generatedEscapes :: [PlaceKind]
generatedEscapes =
let upEscapes = map switchEscapeToUp escapeDownBasic
outdoorEscapes = map switchEscapeToOutdoorDown escapeDownBasic
in upEscapes ++ outdoorEscapes
-- The dots below are @'\x00B7'@, as defined in `TileKind.floorSymbol`.
defaultLegendLit :: EM.EnumMap Char (GroupName TileKind)
defaultLegendLit = EM.fromList
[ (' ', FILLER_WALL)
, ('|', S_WALL_LIT)
, ('-', S_WALL_HORIZONTAL_LIT)
, ('0', S_PILLAR)
, ('&', S_RUBBLE_PILE)
, ('<', TILE_INDOOR_ESCAPE_UP)
, ('>', TILE_INDOOR_ESCAPE_DOWN)
, ('·', FLOOR_ACTOR_ITEM_LIT)
, ('~', S_SHALLOW_WATER_LIT)
, ('I', SIGNBOARD) ]
defaultLegendDark :: EM.EnumMap Char (GroupName TileKind)
defaultLegendDark = EM.fromList
[ (' ', FILLER_WALL)
, ('|', S_WALL_DARK)
, ('-', S_WALL_HORIZONTAL_DARK)
, ('0', S_PILLAR)
, ('&', S_RUBBLE_PILE)
, ('<', TILE_INDOOR_ESCAPE_UP)
, ('>', TILE_INDOOR_ESCAPE_DOWN)
, ('·', FLOOR_ACTOR_ITEM_DARK)
, ('~', S_SHALLOW_WATER_DARK)
, ('I', SIGNBOARD) ]
deadEnd = PlaceKind -- needs to have index 0
{ pname = "a dead end"
, pfreq = []
, prarity = []
, pcover = CStretch
, pfence = FNone
, ptopLeft = ["·"]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
rect = PlaceKind -- Valid for any nonempty area, hence low frequency.
{ pname = "a chamber"
, pfreq = [(ROGUE, 30), (LABORATORY, 10)]
, prarity = [(1, 10), (10, 6)]
, pcover = CStretch
, pfence = FNone
, ptopLeft = [ "--"
, "|·"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
rect2 = rect
{ pname = "a pen"
, pfreq = [(ZOO, 3)]
}
rect3 = overridePlaceKind [ ('|', S_WALL_LIT) -- visible from afar
, ('-', S_WALL_HORIZONTAL_LIT) ] $ rect
{ pname = "a shed"
, pfreq = [(BRAWL, 10), (SHOOTOUT, 1)]
}
rect4 = rect3
{ pname = "cabinet"
, pfreq = [(ARENA, 10)]
}
rectWindows = override2PlaceKind
[ ('=', RECT_WINDOWS_HORIZONTAL_DARK)
, ('!', RECT_WINDOWS_VERTICAL_DARK) ]
[ ('=', RECT_WINDOWS_HORIZONTAL_LIT)
, ('!', RECT_WINDOWS_VERTICAL_LIT) ] $ PlaceKind
{ pname = "a hut"
, pfreq = [(FLIGHT, 10), (AMBUSH, 7)]
, prarity = [(1, 10), (10, 10)]
, pcover = CStretch
, pfence = FNone
, ptopLeft = [ "-="
, "!·"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
glasshouse = overridePlaceKind
[ ('=', GLASSHOUSE_HORIZONTAL_LIT) -- visible from afar
, ('!', GLASSHOUSE_VERTICAL_LIT) ] $ PlaceKind
{ pname = "a glasshouse"
, pfreq = [(SHOOTOUT, 4)]
, prarity = [(1, 10), (10, 7)]
, pcover = CStretch
, pfence = FNone
, ptopLeft = [ "=="
, "!·"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
glasshouse2 = override2PlaceKind [ ('=', GLASSHOUSE_HORIZONTAL_DARK)
, ('!', GLASSHOUSE_VERTICAL_DARK) ]
[ ('=', GLASSHOUSE_HORIZONTAL_LIT)
, ('!', GLASSHOUSE_VERTICAL_LIT) ] $ glasshouse
{ pname = "a glass cage"
, pfreq = [(ZOO, 10)]
}
glasshouse3 = glasshouse
{ pname = "a reading room"
, pfreq = [(ARENA, 40)]
}
pulpit = overridePlaceKind [ ('=', GLASSHOUSE_HORIZONTAL_LIT)
, ('!', GLASSHOUSE_VERTICAL_LIT)
, ('0', S_PULPIT) ] $ PlaceKind
-- except for floor, all will be lit, regardless of night/dark; OK
{ pname = "a stand dais"
, pfreq = [(ARENA, 200), (ZOO, 200)]
, prarity = [(1, 1)]
, pcover = CMirror
, pfence = FGround
, ptopLeft = [ "==·"
, "!··"
, "··0"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
ruin = PlaceKind
{ pname = "ruins"
, pfreq = [(BATTLE, 330)]
, prarity = [(1, 1)]
, pcover = CStretch
, pfence = FNone
, ptopLeft = [ "--"
, "|X"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
ruin2 = overridePlaceKind [ ('|', S_WALL_LIT) -- visible from afar
, ('-', S_WALL_HORIZONTAL_LIT) ] $ ruin
{ pname = "blasted walls"
, pfreq = [(AMBUSH, 50)]
}
collapsed = PlaceKind
{ pname = "a collapsed cavern"
, pfreq = [(NOISE, 1)]
-- no point taking up space if very little space taken,
-- but if no other place can be generated, a failsafe is useful
, prarity = [(1, 1)]
, pcover = CStretch
, pfence = FNone
, ptopLeft = [ "0"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
collapsed2 = collapsed
{ pfreq = [(NOISE, 1000), (BATTLE, 200)]
, ptopLeft = [ "X0"
, "00"
]
}
collapsed3 = collapsed
{ pfreq = [(NOISE, 2000), (BATTLE, 200)]
, ptopLeft = [ "XX0"
, "000"
]
}
collapsed4 = collapsed
{ pfreq = [(NOISE, 2000), (BATTLE, 200)]
, ptopLeft = [ "XXX0"
, "0000"
]
}
collapsed5 = collapsed
{ pfreq = [(NOISE, 3000), (BATTLE, 500)]
, ptopLeft = [ "XX0"
, "X00"
, "000"
]
}
collapsed6 = collapsed
{ pfreq = [(NOISE, 4000), (BATTLE, 1000)]
, ptopLeft = [ "XXX0"
, "X000"
, "0000"
]
}
collapsed7 = collapsed
{ pfreq = [(NOISE, 4000), (BATTLE, 1000)]
, ptopLeft = [ "XXX0"
, "XX00"
, "0000"
]
}
pillar = PlaceKind
{ pname = "a hall"
, pfreq = [(ROGUE, 600), (LABORATORY, 2000)]
, prarity = [(1, 1)]
, pcover = CStretch
, pfence = FNone
-- Larger rooms require support pillars.
, ptopLeft = [ "----"
, "|···"
, "|·0·"
, "|···"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
pillar2 = pillar
{ pfreq = [(ROGUE, 60), (LABORATORY, 200)]
, ptopLeft = [ "----"
, "|0··"
, "|···"
, "|···"
]
}
pillar3 = pillar
{ pfreq = [(ROGUE, 8000), (LABORATORY, 25000)]
, ptopLeft = [ "-----"
, "|0···"
, "|····"
, "|··0·"
, "|····"
]
}
pillar4 = overridePlaceKind [('&', CACHE)] $ pillar
{ pname = "an exquisite hall"
, pfreq = [(ROGUE, 30000), (LABORATORY, 100000)]
, ptopLeft = [ "-----"
, "|&·0·"
, "|····"
, "|0·0·"
, "|····"
]
}
pillar5 = overridePlaceKind [('&', CACHE)] $ pillar
{ pname = "a decorated hall"
, pfreq = [(ROGUE, 30000), (LABORATORY, 100000)]
, ptopLeft = [ "-----"
, "|&·0·"
, "|····"
, "|0···"
, "|····"
]
}
colonnade = PlaceKind
{ pname = "a colonnade"
, pfreq = [ (ROGUE, 3), (ARENA, 20), (LABORATORY, 2)
, (EMPTY, 10000), (MINE, 1000), (BRAWL, 4)
, (FLIGHT, 40), (AMBUSH, 40) ]
, prarity = [(1, 10), (10, 10)]
, pcover = CAlternate
, pfence = FFloor
, ptopLeft = [ "0·"
, "··"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
colonnade2 = colonnade
{ prarity = [(1, 15), (10, 15)]
, ptopLeft = [ "0·"
, "·0"
]
}
colonnade3 = colonnade
{ prarity = [(1, 800), (10, 800)]
, ptopLeft = [ "··0"
, "·0·"
, "0··"
]
}
colonnade4 = colonnade
{ prarity = [(1, 200), (10, 200)]
, ptopLeft = [ "0··"
, "·0·"
, "··0"
]
}
colonnade5 = colonnade
{ prarity = [(1, 10), (10, 10)]
, ptopLeft = [ "0··"
, "··0"
]
}
colonnade6 = colonnade
{ prarity = [(1, 100), (10, 100)]
, ptopLeft = [ "0·"
, "··"
, "·0"
]
}
lampPost = overridePlaceKind [ ('0', S_LAMP_POST)
, ('·', S_FLOOR_ACTOR_LIT) ] $ PlaceKind
{ pname = "a lamp-lit area"
, pfreq = [(FLIGHT, 200), (AMBUSH, 200), (ZOO, 100), (BATTLE, 100)]
, prarity = [(1, 1)]
, pcover = CVerbatim
, pfence = FNone
, ptopLeft = [ "X·X"
, "·0·"
, "X·X"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
lampPost2 = lampPost
{ ptopLeft = [ "···"
, "·0·"
, "···"
]
}
lampPost3 = lampPost
{ pfreq = [ (FLIGHT, 3000), (AMBUSH, 3000), (ZOO, 50)
, (BATTLE, 110) ]
, ptopLeft = [ "XX·XX"
, "X···X"
, "··0··"
, "X···X"
, "XX·XX"
]
}
lampPost4 = lampPost
{ pfreq = [(FLIGHT, 3000), (AMBUSH, 3000), (ZOO, 50), (BATTLE, 60)]
, ptopLeft = [ "X···X"
, "·····"
, "··0··"
, "·····"
, "X···X"
]
}
treeShade = override2PlaceKind [ ('0', S_TREE_DARK)
, ('s', TREE_SHADE_WALKABLE_DARK) ]
[ ('0', S_TREE_LIT)
, ('s', TREE_SHADE_WALKABLE_LIT) ] $
overridePlaceKind [('·', S_SHADED_GROUND)] $ PlaceKind
{ pname = "a tree shade"
, pfreq = [(BRAWL, 1000)]
, prarity = [(1, 1)]
, pcover = CMirror
, pfence = FNone
, ptopLeft = [ "··s"
, "s0·"
, "Xs·"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
fogClump = override2PlaceKind [('f', FOG_CLUMP_DARK)]
[('f', FOG_CLUMP_LIT)] $
overridePlaceKind [(';', S_FOG_LIT)] $ PlaceKind
{ pname = "a foggy patch"
, pfreq = [(SHOOTOUT, 150), (EMPTY, 15)]
, prarity = [(1, 1)]
, pcover = CMirror
, pfence = FNone
, ptopLeft = [ "f;"
, ";f"
, ";X"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
fogClump2 = fogClump
{ pfreq = [(SHOOTOUT, 500), (EMPTY, 50)]
, ptopLeft = [ "X;f"
, "f;f"
, ";;f"
, "Xff"
]
}
smokeClump = override2PlaceKind [ ('f', SMOKE_CLUMP_DARK)
, ('·', S_FLOOR_ACTOR_DARK) ]
[ ('f', SMOKE_CLUMP_LIT)
, ('·', S_FLOOR_ACTOR_LIT) ] $
overridePlaceKind [(';', S_SMOKE_LIT)] $ PlaceKind
{ pname = "a smoky patch"
, pfreq = [(ZOO, 50)]
, prarity = [(1, 1)]
, pcover = CMirror
, pfence = FNone
, ptopLeft = [ "f;"
, ";f"
, ";X"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
smokeClump2 = smokeClump
{ pfreq = [(ZOO, 500)]
, ptopLeft = [ "X;f"
, "f;f"
, ";;f"
, "Xff"
]
}
smokeClump3FGround = smokeClump
{ pname = "a burned out area"
, pfreq = [(LABORATORY, 150)]
, prarity = [(1, 1)]
, pcover = CVerbatim
, pfence = FGround
, ptopLeft = [ ";f;"
, "f·f"
, "f·f"
, ";f;"
]
-- should not be used in caves with trails, because bushes should
-- not grow over such artificial trails
}
bushClump = override2PlaceKind [('f', BUSH_CLUMP_DARK)]
[('f', BUSH_CLUMP_LIT)] $
overridePlaceKind [(';', S_BUSH_LIT)] $ PlaceKind
{ pname = "a bushy patch"
, pfreq = [(SHOOTOUT, 40)]
, prarity = [(1, 1)]
, pcover = CMirror
, pfence = FNone
, ptopLeft = [ "Xf" -- one sure exit needed not to block a corner
, ";X"
, ";;"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
-- should not be used in caves with trails, because bushes can't
-- grow over such artificial trails
}
bushClump2 = bushClump
{ pfreq = [(SHOOTOUT, 80)]
, ptopLeft = [ "Xf" -- one sure exit needed not to block a corner
, ";X"
, ";X"
, ";;"
]
}
escapeDown = overridePlaceKind [ ('|', S_WALL_LIT) -- visible from afar
, ('-', S_WALL_HORIZONTAL_LIT) ] $ PlaceKind
{ pname = "an escape down"
, pfreq = [(INDOOR_ESCAPE_DOWN, 1)]
, prarity = [(1, 1)]
, pcover = CVerbatim
, pfence = FGround
, ptopLeft = [ ">"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
escapeDown2 = escapeDown
{ pfreq = [(INDOOR_ESCAPE_DOWN, 1000)]
, pfence = FFloor
, ptopLeft = [ "0·0"
, "·>·"
, "0·0"
]
}
escapeDown3 = escapeDown
{ pfreq = [(INDOOR_ESCAPE_DOWN, 2000)]
, pfence = FNone
, ptopLeft = [ "-----"
, "|0·0|"
, "|·>·|"
, "|0·0|"
, "-----"
]
}
escapeDown4 = escapeDown
{ pfreq = [(INDOOR_ESCAPE_DOWN, 1000)]
, pcover = CMirror
, pfence = FFloor
, ptopLeft = [ "0··"
, "·>·"
, "··0"
]
}
escapeDown5 = escapeDown
{ pfreq = [(INDOOR_ESCAPE_DOWN, 2000)]
, pcover = CMirror
, pfence = FNone
, ptopLeft = [ "-----"
, "|0··|"
, "|·>·|"
, "|0·0|"
, "-----"
]
}
staircase = overridePlaceKind [ ('<', STAIRCASE_UP)
, ('>', STAIRCASE_DOWN)
, ('|', S_WALL_LIT) -- visible from afar
, ('-', S_WALL_HORIZONTAL_LIT) ] $ PlaceKind
{ pname = "a staircase"
, pfreq = [(TINY_STAIRCASE, 1)] -- no cover when arriving; low freq
, prarity = [(1, 100), (10, 100)]
, pcover = CVerbatim
, pfence = FGround
, ptopLeft = [ "<·>"
]
, plegendDark = defaultLegendDark
, plegendLit = defaultLegendLit
}
staircase1 = staircase
{ prarity = [(1, 1)] -- no cover when arriving; so low rarity
}
staircase2 = staircase
{ pfreq = [(TINY_STAIRCASE, 3)]
, prarity = [(1, 1)]
, pfence = FGround
, ptopLeft = [ "·<·>·"
]
}
staircase3 = staircase
{ prarity = [(1, 1)]
, pfence = FFloor
}
staircase4 = staircase2
{ pfence = FFloor
, prarity = [(1, 1)]
}
staircase5 = staircase
{ pfreq = [(OPEN_STAIRCASE, 200)] -- no cover, open
, pfence = FGround
, ptopLeft = [ "0·0"
, "···"
, "<·>"
, "···"
, "0·0"
]
}
staircase6 = staircase
{ pfreq = [(OPEN_STAIRCASE, 300)]
, pfence = FGround
, ptopLeft = [ "0·0·0"
, "·····"
, "·<·>·"
, "·····"
, "0·0·0"
]
}
staircase7 = staircase
{ pfreq = [(OPEN_STAIRCASE, 500)]
, pfence = FGround
, ptopLeft = [ "0·0·0·0"
, "·······"
, "0·<·>·0"
, "·······"
, "0·0·0·0"
]
}
staircase8 = staircase
{ pfreq = [(OPEN_STAIRCASE, 2000)]
, pfence = FGround
, ptopLeft = [ "·0·I·0·"
, "0·····0"
, "··<·>··"
, "0·····0"
, "·0·0·0·"
]
}
staircase9 = staircase
{ pfreq = [(OPEN_STAIRCASE, 500)]
, pfence = FGround
, ptopLeft = [ "0·······0"
, "···<·>···"
, "0·······0"
]
}
staircase10 = staircase
{ pfreq = [(OPEN_STAIRCASE, 500)]
, pfence = FGround
, ptopLeft = [ "0·····0"
, "··<·>··"
, "0·····0"
]
}
staircase11 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 2000)] -- weak cover, low freq
, pfence = FFloor
, ptopLeft = [ "·0·"
, "0·0"
, "···"
, "<·>"
, "···"
, "0·0"
, "·0·"
]
}
staircase12 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 4000)]
, pfence = FFloor
, ptopLeft = [ "·0·0·"
, "0·0·0"
, "·····"
, "·<·>·"
, "·····"
, "0·0·0"
, "·0·0·"
]
}
staircase13 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 6000)]
, pfence = FFloor
, ptopLeft = [ "·0·0·0·"
, "0·0·0·0"
, "·······"
, "0·<·>·0"
, "·······"
, "0·0·0·0"
, "·0·0·0·"
]
}
staircase14 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 10000)]
, pfence = FFloor
, ptopLeft = [ "0·0·0·0"
, "·0·0·0·"
, "0·····0"
, "··<·>··"
, "0·····0"
, "·0·0·0·"
, "0·0·0·0"
]
}
staircase15 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 20000)]
, pfence = FFloor
, ptopLeft = [ "·0·0·0·0·"
, "0·0·0·0·0"
, "·0·····0·"
, "0··<·>··0"
, "·0·····0·"
, "0·0·0·0·0"
, "·0·0·0·0·"
]
}
staircase16 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 20000)]
, pfence = FFloor
, ptopLeft = [ "0·0·0·0·0"
, "·0·0·0·0·"
, "0·······0"
, "·0·<·>·0·"
, "0·······0"
, "·0·0·0·0·"
, "0·0·0·0·0"
]
}
staircase17 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 20000)]
, pfence = FFloor
, ptopLeft = [ "0·0·0·0·0·0"
, "·0·0·0·0·0·"
, "0·0·····0·0"
, "·0··<·>··0·"
, "0·0·····0·0"
, "·0·0·0·0·0·"
, "0·0·0·0·0·0"
]
}
staircase18 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 80000)]
, pfence = FFloor
, ptopLeft = [ "··0·0·0·0··"
, "·0·0·0·0·0·"
, "0·0·····0·0"
, "·0··<·>··0·"
, "0·0·····0·0"
, "·0·0·0·0·0·"
, "··0·0·0·0··"
]
}
staircase19 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 20000)]
, pfence = FFloor
, ptopLeft = [ "·0·0·0·0·0·"
, "0·0·0·0·0·0"
, "·0·······0·"
, "0·0·<·>·0·0"
, "·0·······0·"
, "0·0·0·0·0·0"
, "·0·0·0·0·0·"
]
}
staircase20 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 5000)]
, pfence = FFloor
, ptopLeft = [ "·0·0·0·0·0·"
, "0·0·····0·0"
, "·0··<·>··0·"
, "0·0·····0·0"
, "·0·0·I·0·0·"
]
}
staircase21 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 5000)]
, pfence = FFloor
, ptopLeft = [ "0·0·I·0·0"
, "·0·····0·"
, "0··<·>··0"
, "·0·····0·"
, "0·0·0·0·0"
]
}
staircase22 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 2000)]
, pfence = FFloor
, ptopLeft = [ "0·0·····0·0"
, "·0··<·>··0·"
, "0·0·····0·0"
]
}
staircase23 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 1000)]
, pfence = FFloor
, ptopLeft = [ "·0·······0·"
, "0·0·<·>·0·0"
, "·0·······0·"
]
}
staircase24 = staircase
{ pfreq = [(CLOSED_STAIRCASE, 1000)]
, pfence = FFloor
, ptopLeft = [ "·0·····0·"
, "0··<·>··0"
, "·0·····0·"
]
}
staircase25 = staircase
{ pfreq = [(WALLED_STAIRCASE, 10)]
, pfence = FNone
, ptopLeft = [ "-------"
, "|·····|"
, "|·<·>·|"
, "|·····|"
, "-------"
]
}
staircase26 = staircase
{ pfreq = [(WALLED_STAIRCASE, 50)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|·······|"
, "|··<·>··|"
, "|·······|"
, "---------"
]
}
staircase27 = staircase
{ pfreq = [(WALLED_STAIRCASE, 100)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|0·····0|"
, "|··<·>··|"
, "|0·····0|"
, "---------"
]
}
staircase28 = staircase
{ pfreq = [(WALLED_STAIRCASE, 1000)]
, pfence = FNone
, ptopLeft = [ "-------"
, "|·····|"
, "|·····|"
, "|·<·>·|"
, "|·····|"
, "|·····|"
, "-------"
]
}
staircase29 = staircase
{ pfreq = [(WALLED_STAIRCASE, 1000)]
, pfence = FNone
, ptopLeft = [ "-------"
, "|0···0|"
, "|·····|"
, "|·<·>·|"
, "|·····|"
, "|0···0|"
, "-------"
]
}
staircase30 = staircase
{ pfreq = [(WALLED_STAIRCASE, 1000)]
, pfence = FNone
, ptopLeft = [ "-------"
, "|0·0·0|"
, "|·····|"
, "|·<·>·|"
, "|·····|"
, "|0·0·0|"
, "-------"
]
}
staircase31 = staircase
{ pfreq = [(WALLED_STAIRCASE, 2000)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|·······|"
, "|·······|"
, "|··<·>··|"
, "|·······|"
, "|·······|"
, "---------"
]
}
staircase32 = staircase
{ pfreq = [(WALLED_STAIRCASE, 5000)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|0·····0|"
, "|·······|"
, "|··<·>··|"
, "|·······|"
, "|0·····0|"
, "---------"
]
}
staircase33 = staircase
{ pfreq = [(WALLED_STAIRCASE, 5000)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|0·0·0·0|"
, "|·······|"
, "|0·<·>·0|"
, "|·······|"
, "|0·0·0·0|"
, "---------"
]
}
staircase34 = staircase
{ pfreq = [(WALLED_STAIRCASE, 5000)]
, pfence = FNone
, ptopLeft = [ "---------"
, "|·0·0·0·|"
, "|0·····0|"
, "|··<·>··|"
, "|0·····0|"
, "|·0·I·0·|"
, "---------"
]
}
staircase35 = staircase
{ pfreq = [(WALLED_STAIRCASE, 200)]
, pfence = FNone
, ptopLeft = [ "-----------"
, "|·········|"
, "|···<·>···|"
, "|·········|"
, "-----------"
]
}
staircase36 = staircase
{ pfreq = [(WALLED_STAIRCASE, 500)]
, pfence = FNone
, ptopLeft = [ "-----------"
, "|·0·····0·|"
, "|0··<·>··0|"
, "|·0·····0·|"
, "-----------"
]
}
staircase37 = staircase
{ pfreq = [(WALLED_STAIRCASE, 500)]
, pfence = FNone
, ptopLeft = [ "-----------"
, "|0·······0|"
, "|·0·<·>·0·|"
, "|0·······0|"
, "-----------"
]
}
switchStaircaseToUp :: PlaceKind -> PlaceKind
switchStaircaseToUp s = override2PlaceKind [('>', STAIR_TERMINAL_DARK)]
[('>', STAIR_TERMINAL_LIT)] $ s
{ pname = pname s <+> "up"
, pfreq = renameFreqs (<+> "up") $ pfreq s
}
switchStaircaseToDown :: PlaceKind -> PlaceKind
switchStaircaseToDown s = override2PlaceKind [('<', STAIR_TERMINAL_DARK)]
[('<', STAIR_TERMINAL_LIT)] $ s
{ pname = pname s <+> "down"
, pfreq = renameFreqs (<+> "down") $ pfreq s
}
overrideGated :: [(Char, GroupName TileKind)]
overrideGated =
[ ('<', GATED_STAIRCASE_UP), ('>', GATED_STAIRCASE_DOWN)
, ('|', S_WALL_LIT), ('-', S_WALL_HORIZONTAL_LIT) ] -- visible from afar
switchStaircaseToGated :: PlaceKind -> PlaceKind
switchStaircaseToGated s = overridePlaceKind overrideGated $ s
{ pname = T.unwords $ "a gated" : tail (T.words (pname s))
, pfreq = renameFreqs ("gated" <+>) $ pfreq s
}
overrideOutdoor :: [(Char, GroupName TileKind)]
overrideOutdoor =
[ ('<', STAIRCASE_OUTDOOR_UP), ('>', STAIRCASE_OUTDOOR_DOWN)
, ('|', S_WALL_LIT), ('-', S_WALL_HORIZONTAL_LIT) ] -- visible from afar
switchStaircaseToOutdoor :: PlaceKind -> PlaceKind
switchStaircaseToOutdoor s = overridePlaceKind overrideOutdoor $ s
{ pname = "an outdoor area exit"
, pfreq = renameFreqs ("outdoor" <+>) $ pfreq s
}
switchEscapeToUp :: PlaceKind -> PlaceKind
switchEscapeToUp s = overridePlaceKind [('>', TILE_INDOOR_ESCAPE_UP)] $ s
{ pname = "an escape up"
, pfreq = map (\(_, n) -> (INDOOR_ESCAPE_UP, n)) $ pfreq s
}
switchEscapeToOutdoorDown :: PlaceKind -> PlaceKind
switchEscapeToOutdoorDown s = overridePlaceKind
[('>', TILE_OUTDOOR_ESCAPE_DOWN)] $ s
{ pname = "outdoor escape route"
, pfreq = map (\(_, n) -> (OUTDOOR_ESCAPE_DOWN, n)) $ pfreq s
}
| LambdaHack/LambdaHack | GameDefinition/Content/PlaceKind.hs | bsd-3-clause | 33,342 | 47 | 13 | 11,036 | 7,553 | 4,828 | 2,725 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.RestApi (tests) where
import RestApi
import Tests.App
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Test)
import qualified Data.Map as Map
import Snap.Snaplet
import qualified Snap.Test as ST
import Snap.Snaplet.Test
tests :: [Test]
tests = [testGetAllChars]
{-
testGetMeta :: Test
testGetMeta = testCase "RestApi/testGetMeta" $ assertMetaInfo
where assertMetaInfo :: Assertion
assertMetaInfo = do
let hdl = with restApi metaHandler
res <- runHandler (ST.get "" Map.empty) hdl appInit
either (assertFailure . show) (ST.assertBodyContains "version") res
-}
testGetAllChars :: Test
testGetAllChars = testCase "RestApi/testGetAllChars" $ do
assertEqual "RestApi/testGetAllChars" 1 1
| g0v/encoding-mashup-server | tests/testsuite/Tests/RestApi.hs | bsd-3-clause | 842 | 0 | 8 | 166 | 114 | 71 | 43 | 16 | 1 |
module Main where
import Test.Framework (defaultMain, testGroup)
import qualified Text.FastAleck.Tests
main :: IO ()
main = defaultMain
[ testGroup "Text.FastAleck.Tests" Text.FastAleck.Tests.tests
]
| jaspervdj/fast-aleck-hs | tests/TestSuite.hs | bsd-3-clause | 211 | 0 | 8 | 33 | 54 | 32 | 22 | 6 | 1 |
{-# Language EmptyDataDecls #-}
module Uri where
import FFI
import Prelude
-- | Creation and conversion
-- Choose to make Uri an opaque data type. If the accessors on the
-- jsUri objects were properties and not functions we could have
-- defined Uri as a record instead.
data Uri
-- To make Google Closure play nice we use [] instead of . for accessing properties.
currentUri :: Fay String
currentUri = ffi "window['location']['href']"
-- This assumes that Uri is defined globally, which it is by default.
newUri :: String -> Uri
newUri = ffi "new window['Uri'](%1)"
toString :: Uri -> String
toString = ffi "%1['toString']()"
-- If we ever want to pass a Uri back to JS we need to make sure we keep persistance internally.
clone :: Uri -> Uri
clone = ffi "%1['clone']()"
-- | Getters
-- All getters (except query! But lets be consistent) may return null if the value isn't set so we use
-- Language.FFI.Nullable here which converts null -> Null and String -> Nullable String.
-- Nullable is distinguished from Maybe to not break haskell compatibility.
-- If we want to decode null into Nullable (Nullable a) there is no way of
-- knowing if Null or Nullable Null is correct. This problem does not exist when
-- working with Maybe values in client server communication.
protocol :: Uri -> Nullable String
protocol = ffi "%1['protocol']()"
userInfo :: Uri -> Nullable String
userInfo = ffi "%1['userInfo']()"
host :: Uri -> Nullable String
host = ffi "%1['host']()"
port :: Uri -> Nullable String
port = ffi "%1['port']()"
path :: Uri -> Nullable String
path = ffi "%1['path']()"
query :: Uri -> Nullable String
query = ffi "%1['query']()"
anchor :: Uri -> Nullable String
anchor = ffi "%1['anchor']()"
-- | Other getters
queryParamValue :: String -> Uri -> String
queryParamValue = ffi "%2['getQueryParamValue'](%1)"
queryParamValues :: String -> Uri -> [String]
queryParamValues = ffi "%2['getQueryParamValues'](%1)"
-- | Setters
-- We could use Nullable here to combine the with* and remove* functions
-- but usage would be more verbose that way.
-- JsUri has clone() conveniently defined so we use it to get
-- persistence, otherwise our types would be `-> Fay Uri` which is of
-- course worse.
withProtocol :: String -> Uri -> Uri
withProtocol = ffi "%2['clone']()['setProtocol'](%1)"
withUserInfo :: String -> Uri -> Uri
withUserInfo = ffi "%2['clone']()['setUserInfo'](%1)"
withHost :: String -> Uri -> Uri
withHost = ffi "%2['clone']()['setHost'](%1)"
withPort :: String -> Uri -> Uri
withPort = ffi "%2['clone']()['setPort'](%1)"
withPath :: String -> Uri -> Uri
withPath = ffi "%2['clone']()['setPath'](%1)"
withQuery :: String -> Uri -> Uri
withQuery = ffi "%2['clone']()['setQuery'](%1)"
withAnchor :: String -> Uri -> Uri
withAnchor = ffi "%2['clone']()['setAnchor'](%1)"
-- | Removals
removeProtocol :: Uri -> Uri
removeProtocol = ffi "%1['clone']()['setProtocol'](null)"
removeUserInfo :: Uri -> Uri
removeUserInfo = ffi "%1['clone']()['setUserInfo'](null)"
removeHost :: Uri -> Uri
removeHost = ffi "%1['clone']()['setHost'](null)"
removePort :: Uri -> Uri
removePort = ffi "%1['clone']()['setPort'](null)"
removePath :: Uri -> Uri
removePath = ffi "%1['clone']()['setPath'](null)"
removeQuery :: Uri -> Uri
removeQuery = ffi "%1['clone']()['setQuery'](null)"
removeAnchor :: Uri -> Uri
removeAnchor = ffi "%1['clone']()['setAnchor'](null)"
-- | Other setters
addQueryParam :: String -> String -> Uri -> Uri
addQueryParam = ffi "%3['clone']()['addQueryParam'](%1,%2)"
replaceQueryParam :: String -> String -> Uri -> Uri
replaceQueryParam = ffi "%3['clone']()['replaceQueryParam'](%1,%2)"
-- The order of the arguments differ from the jsUri api, it is now
-- key -> oldValue -> newValue -> Uri -> Uri
replaceQueryParamValue :: String -> String -> String -> Uri -> Uri
replaceQueryParamValue = ffi "%4['clone']()['replaceQueryParam'](%1, %3, %2)"
deleteQueryParam :: String -> Uri -> Uri
deleteQueryParam = ffi "%2['clone']()['deleteQueryParam'](%1)"
deleteQueryParamValue :: String -> String -> Uri -> Uri
deleteQueryParamValue = ffi "%3['clone']()['deleteQueryParam'](%1,%2)"
| faylang/fay-uri | src/Uri.hs | bsd-3-clause | 4,139 | 0 | 8 | 657 | 682 | 369 | 313 | -1 | -1 |
module QSum( qsum ) where
{- |
Module : QSum
Description : Computating digit sum
Copyright : (c) Thomas Lang
License : BSD3
Stability : stable
Portability : portable
This module contains the function 'qsum'.
This function will take an Integer and
calculates it's digit sum.
-}
qsum :: Integer -> Integer
qsum n | n < 10 = n
| otherwise = (n `mod` 10) + (qsum $ n `div` 10)
| langthom/Hack-A-Thon-Haskell | QSum.hs | bsd-3-clause | 462 | 0 | 9 | 161 | 72 | 40 | 32 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings #-}
-- | This module re-exports the @Github.Data.Definitions@ module, adding
-- instances of @FromJSON@ to it. If you wish to use the data without the
-- instances, use the @Github.Data.Definitions@ module instead.
module Github.Data (module Github.Data.Definitions) where
import Data.Time
import Control.Applicative
import Control.Monad
import qualified Data.Text as T
import Data.Aeson.Types
import System.Locale (defaultTimeLocale)
import qualified Data.Vector as V
import qualified Data.HashMap.Lazy as Map
import Data.Hashable (Hashable)
import Github.Data.Definitions
instance FromJSON GithubDate where
parseJSON (String t) =
case parseTime defaultTimeLocale "%FT%T%Z" (T.unpack t) of
Just d -> pure $ GithubDate d
_ -> fail "could not parse Github datetime"
parseJSON _ = fail "Given something besides a String"
instance FromJSON Commit where
parseJSON (Object o) =
Commit <$> o .: "sha"
<*> o .: "parents"
<*> o .: "url"
<*> o .: "commit"
<*> o .:? "committer"
<*> o .:? "author"
<*> o .:< "files"
<*> o .:? "stats"
parseJSON _ = fail "Could not build a Commit"
instance FromJSON Tree where
parseJSON (Object o) =
Tree <$> o .: "sha"
<*> o .: "url"
<*> o .:< "tree"
parseJSON _ = fail "Could not build a Tree"
instance FromJSON GitTree where
parseJSON (Object o) =
GitTree <$> o .: "type"
<*> o .: "sha"
<*> o .: "url"
<*> o .:? "size"
<*> o .: "path"
<*> o .: "mode"
parseJSON _ = fail "Could not build a GitTree"
instance FromJSON GitCommit where
parseJSON (Object o) =
GitCommit <$> o .: "message"
<*> o .: "url"
<*> o .: "committer"
<*> o .: "author"
<*> o .: "tree"
<*> o .:? "sha"
<*> o .:< "parents"
parseJSON _ = fail "Could not build a GitCommit"
instance FromJSON GithubOwner where
parseJSON (Object o)
| o `at` "gravatar_id" == Nothing =
GithubOrganization <$> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
| otherwise =
GithubUser <$> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
<*> o .: "gravatar_id"
parseJSON v = fail $ "Could not build a GithubOwner out of " ++ (show v)
instance FromJSON GitUser where
parseJSON (Object o) =
GitUser <$> o .: "name"
<*> o .: "email"
<*> o .: "date"
parseJSON _ = fail "Could not build a GitUser"
instance FromJSON File where
parseJSON (Object o) =
File <$> o .: "blob_url"
<*> o .: "status"
<*> o .: "raw_url"
<*> o .: "additions"
<*> o .: "sha"
<*> o .: "changes"
<*> o .: "patch"
<*> o .: "filename"
<*> o .: "deletions"
parseJSON _ = fail "Could not build a File"
instance FromJSON Stats where
parseJSON (Object o) =
Stats <$> o .: "additions"
<*> o .: "total"
<*> o .: "deletions"
parseJSON _ = fail "Could not build a Stats"
instance FromJSON Comment where
parseJSON (Object o) =
Comment <$> o .:? "position"
<*> o .:? "line"
<*> o .: "body"
<*> o .: "commit_id"
<*> o .: "updated_at"
<*> o .:? "html_url"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "path"
<*> o .: "user"
<*> o .: "id"
parseJSON _ = fail "Could not build a Comment"
instance ToJSON NewComment where
toJSON (NewComment b) = object [ "body" .= b ]
instance ToJSON EditComment where
toJSON (EditComment b) = object [ "body" .= b ]
instance FromJSON Diff where
parseJSON (Object o) =
Diff <$> o .: "status"
<*> o .: "behind_by"
<*> o .: "patch_url"
<*> o .: "url"
<*> o .: "base_commit"
<*> o .:< "commits"
<*> o .: "total_commits"
<*> o .: "html_url"
<*> o .:< "files"
<*> o .: "ahead_by"
<*> o .: "diff_url"
<*> o .: "permalink_url"
parseJSON _ = fail "Could not build a Diff"
instance FromJSON Gist where
parseJSON (Object o) =
Gist <$> o .: "user"
<*> o .: "git_push_url"
<*> o .: "url"
<*> o .:? "description"
<*> o .: "created_at"
<*> o .: "public"
<*> o .: "comments"
<*> o .: "updated_at"
<*> o .: "html_url"
<*> o .: "id"
<*> o `values` "files"
<*> o .: "git_push_url"
parseJSON _ = fail "Could not build a Gist"
instance FromJSON GistFile where
parseJSON (Object o) =
GistFile <$> o .: "type"
<*> o .: "raw_url"
<*> o .: "size"
<*> o .:? "language"
<*> o .: "filename"
<*> o .:? "content"
parseJSON _ = fail "Could not build a GistFile"
instance FromJSON GistComment where
parseJSON (Object o) =
GistComment <$> o .: "user"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "updated_at"
<*> o .: "id"
parseJSON _ = fail "Could not build a GistComment"
instance FromJSON Blob where
parseJSON (Object o) =
Blob <$> o .: "url"
<*> o .: "encoding"
<*> o .: "content"
<*> o .: "sha"
<*> o .: "size"
parseJSON _ = fail "Could not build a Blob"
instance FromJSON GitReference where
parseJSON (Object o) =
GitReference <$> o .: "object"
<*> o .: "url"
<*> o .: "ref"
parseJSON _ = fail "Could not build a GitReference"
instance FromJSON GitObject where
parseJSON (Object o) =
GitObject <$> o .: "type"
<*> o .: "sha"
<*> o .: "url"
parseJSON _ = fail "Could not build a GitObject"
instance FromJSON Issue where
parseJSON (Object o) =
Issue <$> o .:? "closed_at"
<*> o .: "updated_at"
<*> o .: "html_url"
<*> o .:? "closed_by"
<*> o .: "labels"
<*> o .: "number"
<*> o .:? "assignee"
<*> o .: "user"
<*> o .: "title"
<*> o .: "pull_request"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "state"
<*> o .: "id"
<*> o .: "comments"
<*> o .:? "milestone"
parseJSON _ = fail "Could not build an Issue"
instance ToJSON NewIssue where
toJSON (NewIssue t b a m ls) =
object
[ "title" .= t
, "body" .= b
, "assignee" .= a
, "milestone" .= m
, "labels" .= ls ]
instance ToJSON EditIssue where
toJSON (EditIssue t b a s m ls) =
object $ filter notNull $ [ "title" .= t
, "body" .= b
, "assignee" .= a
, "state" .= s
, "milestone" .= m
, "labels" .= ls ]
where notNull (_, Null) = False
notNull (_, _) = True
instance FromJSON Milestone where
parseJSON (Object o) =
Milestone <$> o .: "creator"
<*> o .: "due_on"
<*> o .: "open_issues"
<*> o .: "number"
<*> o .: "closed_issues"
<*> o .: "description"
<*> o .: "title"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "state"
parseJSON _ = fail "Could not build a Milestone"
instance FromJSON IssueLabel where
parseJSON (Object o) =
IssueLabel <$> o .: "color"
<*> o .: "url"
<*> o .: "name"
parseJSON _ = fail "Could not build a Milestone"
instance FromJSON PullRequestReference where
parseJSON (Object o) =
PullRequestReference <$> o .:? "html_url"
<*> o .:? "patch_url"
<*> o .:? "diff_url"
parseJSON _ = fail "Could not build a PullRequest"
instance FromJSON IssueComment where
parseJSON (Object o) =
IssueComment <$> o .: "updated_at"
<*> o .: "user"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "id"
parseJSON _ = fail "Could not build an IssueComment"
instance FromJSON Event where
parseJSON (Object o) =
Event <$> o .: "actor"
<*> o .: "event"
<*> o .:? "commit_id"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "id"
<*> o .:? "issue"
parseJSON _ = fail "Could not build an Event"
instance FromJSON EventType where
parseJSON (String "closed") = pure Closed
parseJSON (String "reopened") = pure Reopened
parseJSON (String "subscribed") = pure Subscribed
parseJSON (String "merged") = pure Merged
parseJSON (String "referenced") = pure Referenced
parseJSON (String "mentioned") = pure Mentioned
parseJSON (String "assigned") = pure Assigned
parseJSON (String "unsubscribed") = pure Unsubscribed
parseJSON _ = fail "Could not build an EventType"
instance FromJSON SimpleOrganization where
parseJSON (Object o) =
SimpleOrganization <$> o .: "url"
<*> o .: "avatar_url"
<*> o .: "id"
<*> o .: "login"
parseJSON _ = fail "Could not build a SimpleOrganization"
instance FromJSON Organization where
parseJSON (Object o) =
Organization <$> o .: "type"
<*> o .:? "blog"
<*> o .:? "location"
<*> o .: "login"
<*> o .: "followers"
<*> o .:? "company"
<*> o .: "avatar_url"
<*> o .: "public_gists"
<*> o .: "html_url"
<*> o .:? "email"
<*> o .: "following"
<*> o .: "public_repos"
<*> o .: "url"
<*> o .: "created_at"
<*> o .:? "name"
<*> o .: "id"
parseJSON _ = fail "Could not build an Organization"
instance FromJSON PullRequest where
parseJSON (Object o) =
PullRequest
<$> o .:? "closed_at"
<*> o .: "created_at"
<*> o .: "user"
<*> o .: "patch_url"
<*> o .: "state"
<*> o .: "number"
<*> o .: "html_url"
<*> o .: "updated_at"
<*> o .: "body"
<*> o .: "issue_url"
<*> o .: "diff_url"
<*> o .: "url"
<*> o .: "_links"
<*> o .:? "merged_at"
<*> o .: "title"
<*> o .: "id"
parseJSON _ = fail "Could not build a PullRequest"
instance FromJSON DetailedPullRequest where
parseJSON (Object o) =
DetailedPullRequest
<$> o .:? "closed_at"
<*> o .: "created_at"
<*> o .: "user"
<*> o .: "patch_url"
<*> o .: "state"
<*> o .: "number"
<*> o .: "html_url"
<*> o .: "updated_at"
<*> o .: "body"
<*> o .: "issue_url"
<*> o .: "diff_url"
<*> o .: "url"
<*> o .: "_links"
<*> o .:? "merged_at"
<*> o .: "title"
<*> o .: "id"
<*> o .:? "merged_by"
<*> o .: "changed_files"
<*> o .: "head"
<*> o .: "comments"
<*> o .: "deletions"
<*> o .: "additions"
<*> o .: "review_comments"
<*> o .: "base"
<*> o .: "commits"
<*> o .: "merged"
<*> o .: "mergeable"
parseJSON _ = fail "Could not build a DetailedPullRequest"
instance FromJSON PullRequestLinks where
parseJSON (Object o) =
PullRequestLinks <$> o <.:> ["review_comments", "href"]
<*> o <.:> ["comments", "href"]
<*> o <.:> ["html", "href"]
<*> o <.:> ["self", "href"]
parseJSON _ = fail "Could not build a PullRequestLinks"
instance FromJSON PullRequestCommit where
parseJSON (Object _) =
return PullRequestCommit
parseJSON _ = fail "Could not build a PullRequestCommit"
instance FromJSON SearchReposResult where
parseJSON (Object o) =
SearchReposResult <$> o .: "total_count"
<*> o .:< "items"
parseJSON _ = fail "Could not build a SearchReposResult"
instance FromJSON Repo where
parseJSON (Object o) =
Repo <$> o .: "ssh_url"
<*> o .: "description"
<*> o .: "created_at"
<*> o .: "html_url"
<*> o .: "svn_url"
<*> o .: "forks"
<*> o .:? "homepage"
<*> o .: "fork"
<*> o .: "git_url"
<*> o .: "private"
<*> o .: "clone_url"
<*> o .: "size"
<*> o .: "updated_at"
<*> o .: "watchers"
<*> o .: "owner"
<*> o .: "name"
<*> o .: "language"
<*> o .:? "master_branch"
<*> o .: "pushed_at"
<*> o .: "id"
<*> o .: "url"
<*> o .: "open_issues"
<*> o .:? "has_wiki"
<*> o .:? "has_issues"
<*> o .:? "has_downloads"
<*> o .:? "parent"
<*> o .:? "source"
parseJSON _ = fail "Could not build a Repo"
instance FromJSON RepoRef where
parseJSON (Object o) =
RepoRef <$> o .: "owner"
<*> o .: "name"
parseJSON _ = fail "Could not build a RepoRef"
instance FromJSON Contributor where
parseJSON (Object o)
| o `at` "type" == (Just "Anonymous") =
AnonymousContributor <$> o .: "contributions"
<*> o .: "name"
| otherwise =
KnownContributor <$> o .: "contributions"
<*> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
<*> o .: "gravatar_id"
parseJSON _ = fail "Could not build a Contributor"
instance FromJSON Languages where
parseJSON (Object o) =
Languages <$>
mapM (\name -> Language (T.unpack name) <$> o .: name)
(Map.keys o)
parseJSON _ = fail "Could not build Languages"
instance FromJSON Tag where
parseJSON (Object o) =
Tag <$> o .: "name"
<*> o .: "zipball_url"
<*> o .: "tarball_url"
<*> o .: "commit"
parseJSON _ = fail "Could not build a Tag"
instance FromJSON Branch where
parseJSON (Object o) = Branch <$> o .: "name" <*> o .: "commit"
parseJSON _ = fail "Could not build a Branch"
instance FromJSON BranchCommit where
parseJSON (Object o) = BranchCommit <$> o .: "sha" <*> o .: "url"
parseJSON _ = fail "Could not build a BranchCommit"
instance FromJSON DetailedOwner where
parseJSON (Object o)
| o `at` "gravatar_id" == Nothing =
DetailedOrganization <$> o .: "created_at"
<*> o .: "type"
<*> o .: "public_gists"
<*> o .: "avatar_url"
<*> o .: "followers"
<*> o .: "following"
<*> o .:? "blog"
<*> o .:? "bio"
<*> o .: "public_repos"
<*> o .:? "name"
<*> o .:? "location"
<*> o .:? "company"
<*> o .: "url"
<*> o .: "id"
<*> o .: "html_url"
<*> o .: "login"
| otherwise =
DetailedUser <$> o .: "created_at"
<*> o .: "type"
<*> o .: "public_gists"
<*> o .: "avatar_url"
<*> o .: "followers"
<*> o .: "following"
<*> o .: "hireable"
<*> o .: "gravatar_id"
<*> o .:? "blog"
<*> o .:? "bio"
<*> o .: "public_repos"
<*> o .:? "name"
<*> o .:? "location"
<*> o .:? "company"
<*> o .: "email"
<*> o .: "url"
<*> o .: "id"
<*> o .: "html_url"
<*> o .: "login"
parseJSON _ = fail "Could not build a DetailedOwner"
-- | A slightly more generic version of Aeson's @(.:?)@, using `mzero' instead
-- of `Nothing'.
(.:<) :: (FromJSON a) => Object -> T.Text -> Parser [a]
obj .:< key = case Map.lookup key obj of
Nothing -> pure mzero
Just v -> parseJSON v
-- | Produce all values for the given key.
values :: (Eq k, Hashable k, FromJSON v) => Map.HashMap k Value -> k -> Parser v
obj `values` key =
let (Object children) = findWithDefault (Object Map.empty) key obj in
parseJSON $ Array $ V.fromList $ Map.elems children
-- | Produce the value for the last key by traversing.
(<.:>) :: (FromJSON v) => Object => [T.Text] -> Parser v
obj <.:> [key] = obj .: key
obj <.:> (key:keys) =
let (Object nextObj) = findWithDefault (Object Map.empty) key obj in
nextObj <.:> keys
-- | Produce the value for the given key, maybe.
at :: Object -> T.Text -> Maybe Value
obj `at` key = Map.lookup key obj
-- Taken from Data.Map:
findWithDefault :: (Eq k, Hashable k) => v -> k -> Map.HashMap k v -> v
findWithDefault def k m =
case Map.lookup k m of
Nothing -> def
Just x -> x
| schell/github | Github/Data.hs | bsd-3-clause | 17,542 | 7 | 59 | 6,663 | 4,775 | 2,392 | 2,383 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudFormation.DescribeStacks
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the description for the specified stack; if no stack name was
-- specified, then it returns the description for all the stacks created.
--
-- /See:/ <http://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_DescribeStacks.html AWS API Reference> for DescribeStacks.
--
-- This operation returns paginated results.
module Network.AWS.CloudFormation.DescribeStacks
(
-- * Creating a Request
describeStacks
, DescribeStacks
-- * Request Lenses
, dNextToken
, dStackName
-- * Destructuring the Response
, describeStacksResponse
, DescribeStacksResponse
-- * Response Lenses
, dsrsNextToken
, dsrsStacks
, dsrsResponseStatus
) where
import Network.AWS.CloudFormation.Types
import Network.AWS.CloudFormation.Types.Product
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The input for DescribeStacks action.
--
-- /See:/ 'describeStacks' smart constructor.
data DescribeStacks = DescribeStacks'
{ _dNextToken :: !(Maybe Text)
, _dStackName :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeStacks' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dNextToken'
--
-- * 'dStackName'
describeStacks
:: DescribeStacks
describeStacks =
DescribeStacks'
{ _dNextToken = Nothing
, _dStackName = Nothing
}
-- | String that identifies the start of the next list of stacks, if there is
-- one.
dNextToken :: Lens' DescribeStacks (Maybe Text)
dNextToken = lens _dNextToken (\ s a -> s{_dNextToken = a});
-- | The name or the unique stack ID that is associated with the stack, which
-- are not always interchangeable:
--
-- - Running stacks: You can specify either the stack\'s name or its
-- unique stack ID.
-- - Deleted stacks: You must specify the unique stack ID.
--
-- Default: There is no default value.
dStackName :: Lens' DescribeStacks (Maybe Text)
dStackName = lens _dStackName (\ s a -> s{_dStackName = a});
instance AWSPager DescribeStacks where
page rq rs
| stop (rs ^. dsrsNextToken) = Nothing
| stop (rs ^. dsrsStacks) = Nothing
| otherwise =
Just $ rq & dNextToken .~ rs ^. dsrsNextToken
instance AWSRequest DescribeStacks where
type Rs DescribeStacks = DescribeStacksResponse
request = postQuery cloudFormation
response
= receiveXMLWrapper "DescribeStacksResult"
(\ s h x ->
DescribeStacksResponse' <$>
(x .@? "NextToken") <*>
(x .@? "Stacks" .!@ mempty >>=
may (parseXMLList "member"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeStacks where
toHeaders = const mempty
instance ToPath DescribeStacks where
toPath = const "/"
instance ToQuery DescribeStacks where
toQuery DescribeStacks'{..}
= mconcat
["Action" =: ("DescribeStacks" :: ByteString),
"Version" =: ("2010-05-15" :: ByteString),
"NextToken" =: _dNextToken,
"StackName" =: _dStackName]
-- | The output for a DescribeStacks action.
--
-- /See:/ 'describeStacksResponse' smart constructor.
data DescribeStacksResponse = DescribeStacksResponse'
{ _dsrsNextToken :: !(Maybe Text)
, _dsrsStacks :: !(Maybe [Stack])
, _dsrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeStacksResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsrsNextToken'
--
-- * 'dsrsStacks'
--
-- * 'dsrsResponseStatus'
describeStacksResponse
:: Int -- ^ 'dsrsResponseStatus'
-> DescribeStacksResponse
describeStacksResponse pResponseStatus_ =
DescribeStacksResponse'
{ _dsrsNextToken = Nothing
, _dsrsStacks = Nothing
, _dsrsResponseStatus = pResponseStatus_
}
-- | String that identifies the start of the next list of stacks, if there is
-- one.
dsrsNextToken :: Lens' DescribeStacksResponse (Maybe Text)
dsrsNextToken = lens _dsrsNextToken (\ s a -> s{_dsrsNextToken = a});
-- | A list of stack structures.
dsrsStacks :: Lens' DescribeStacksResponse [Stack]
dsrsStacks = lens _dsrsStacks (\ s a -> s{_dsrsStacks = a}) . _Default . _Coerce;
-- | The response status code.
dsrsResponseStatus :: Lens' DescribeStacksResponse Int
dsrsResponseStatus = lens _dsrsResponseStatus (\ s a -> s{_dsrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cloudformation/gen/Network/AWS/CloudFormation/DescribeStacks.hs | mpl-2.0 | 5,424 | 0 | 15 | 1,228 | 836 | 492 | 344 | 96 | 1 |
module ParseMonad where
import Control.Monad.Reader
type ParseResult = Either String
type P a = ReaderT (String, Int) ParseResult a
mkP :: (String -> Int -> ParseResult a) -> P a
mkP = ReaderT . uncurry
runP :: P a -> String -> Int -> ParseResult a
runP f s l = runReaderT f (s, l)
lineP :: P Int
lineP = asks snd >>= return
| PhilThomas/happy | src/ParseMonad.hs | bsd-2-clause | 330 | 0 | 9 | 70 | 140 | 75 | 65 | 10 | 1 |
{-# LANGUAGE ImplicitParams #-}
foo = choice flips $ map (\p -> \b -> let ?pat = p in match s{ flips = b }) ps
| mpickering/ghc-exactprint | tests/examples/ghc710/SlidingLambda.hs | bsd-3-clause | 112 | 0 | 14 | 27 | 54 | 28 | 26 | 2 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, MultiWayIf #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module PostgREST.PgQuery where
import PostgREST.RangeQuery
import qualified Hasql as H
import qualified Hasql.Postgres as P
import qualified Hasql.Backend as B
import qualified Data.Text as T
import Text.Regex.TDFA ( (=~) )
import qualified Network.HTTP.Types.URI as Net
import qualified Data.ByteString.Char8 as BS
import Data.Monoid
import Data.Vector (empty)
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Functor
import Control.Monad (join)
import Data.String.Conversions (cs)
import qualified Data.Aeson as JSON
import qualified Data.List as L
import qualified Data.Vector as V
import Data.Scientific (isInteger, formatScientific, FPFormat(..))
import Prelude
type PStmt = H.Stmt P.Postgres
instance Monoid PStmt where
mappend (B.Stmt query params prep) (B.Stmt query' params' prep') =
B.Stmt (query <> query') (params <> params') (prep && prep')
mempty = B.Stmt "" empty True
type StatementT = PStmt -> PStmt
data QualifiedTable = QualifiedTable {
qtSchema :: T.Text
, qtName :: T.Text
} deriving (Show)
data OrderTerm = OrderTerm {
otTerm :: T.Text
, otDirection :: BS.ByteString
, otNullOrder :: Maybe BS.ByteString
}
limitT :: Maybe NonnegRange -> StatementT
limitT r q =
q <> B.Stmt (" LIMIT " <> limit <> " OFFSET " <> offset <> " ") empty True
where
limit = maybe "ALL" (cs . show) $ join $ rangeLimit <$> r
offset = cs . show $ fromMaybe 0 $ rangeOffset <$> r
whereT :: QualifiedTable -> Net.Query -> StatementT
whereT table params q =
if L.null cols
then q
else q <> B.Stmt " where " empty True <> conjunction
where
cols = [ col | col <- params, fst col `notElem` ["order"] ]
wherePredTable = wherePred table
conjunction = mconcat $ L.intersperse andq (map wherePredTable cols)
withT :: PStmt -> T.Text -> StatementT
withT (B.Stmt eq ep epre) v (B.Stmt wq wp wpre) =
B.Stmt ("WITH " <> v <> " AS (" <> eq <> ") " <> wq <> " from " <> v)
(ep <> wp)
(epre && wpre)
orderT :: [OrderTerm] -> StatementT
orderT ts q =
if L.null ts
then q
else q <> B.Stmt " order by " empty True <> clause
where
clause = mconcat $ L.intersperse commaq (map queryTerm ts)
queryTerm :: OrderTerm -> PStmt
queryTerm t = B.Stmt
(" " <> cs (pgFmtIdent $ otTerm t) <> " "
<> cs (otDirection t) <> " "
<> maybe "" cs (otNullOrder t) <> " ")
empty True
parentheticT :: StatementT
parentheticT s =
s { B.stmtTemplate = " (" <> B.stmtTemplate s <> ") " }
iffNotT :: PStmt -> StatementT
iffNotT (B.Stmt aq ap apre) (B.Stmt bq bp bpre) =
B.Stmt
("WITH aaa AS (" <> aq <> " returning *) " <>
bq <> " WHERE NOT EXISTS (SELECT * FROM aaa)")
(ap <> bp)
(apre && bpre)
countT :: StatementT
countT s =
s { B.stmtTemplate = "WITH qqq AS (" <> B.stmtTemplate s <> ") SELECT pg_catalog.count(1) FROM qqq" }
countRows :: QualifiedTable -> PStmt
countRows t = B.Stmt ("select pg_catalog.count(1) from " <> fromQt t) empty True
asJsonWithCount :: StatementT
asJsonWithCount s = s { B.stmtTemplate =
"pg_catalog.count(t), array_to_json(array_agg(row_to_json(t)))::character varying from ("
<> B.stmtTemplate s <> ") t" }
asJsonRow :: StatementT
asJsonRow s = s { B.stmtTemplate = "row_to_json(t) from (" <> B.stmtTemplate s <> ") t" }
selectStar :: QualifiedTable -> PStmt
selectStar t = B.Stmt ("select * from " <> fromQt t) empty True
returningStarT :: StatementT
returningStarT s = s { B.stmtTemplate = B.stmtTemplate s <> " RETURNING *" }
deleteFrom :: QualifiedTable -> PStmt
deleteFrom t = B.Stmt ("delete from " <> fromQt t) empty True
insertInto :: QualifiedTable
-> V.Vector T.Text
-> V.Vector (V.Vector JSON.Value)
-> PStmt
insertInto t cols vals
| V.null cols = B.Stmt ("insert into " <> fromQt t <> " default values returning *") empty True
| otherwise = B.Stmt
("insert into " <> fromQt t <> " (" <>
T.intercalate ", " (V.toList $ V.map pgFmtIdent cols) <>
") values "
<> T.intercalate ", "
(V.toList $ V.map (\v -> "("
<> T.intercalate ", " (V.toList $ V.map insertableValue v)
<> ")"
) vals
)
<> " returning row_to_json(" <> fromQt t <> ".*)")
empty True
insertSelect :: QualifiedTable -> [T.Text] -> [JSON.Value] -> PStmt
insertSelect t [] _ = B.Stmt
("insert into " <> fromQt t <> " default values returning *") empty True
insertSelect t cols vals = B.Stmt
("insert into " <> fromQt t <> " ("
<> T.intercalate ", " (map pgFmtIdent cols)
<> ") select "
<> T.intercalate ", " (map insertableValue vals))
empty True
update :: QualifiedTable -> [T.Text] -> [JSON.Value] -> PStmt
update t cols vals = B.Stmt
("update " <> fromQt t <> " set ("
<> T.intercalate ", " (map pgFmtIdent cols)
<> ") = ("
<> T.intercalate ", " (map insertableValue vals)
<> ")")
empty True
wherePred :: QualifiedTable -> Net.QueryItem -> PStmt
wherePred table (col, predicate) =
B.Stmt (" " <> pgFmtJsonbPath table (cs col) <> " " <> op <> " " <>
if opCode `elem` ["is","isnot"] then whiteList value
else cs sqlValue)
empty True
where
opCode:rest = T.split (=='.') $ cs $ fromMaybe "." predicate
value = T.intercalate "." rest
whiteList val = fromMaybe (cs (pgFmtLit val) <> "::unknown ")
(L.find ((==) . T.toLower $ val)
["null","true","false"])
star c = if c == '*' then '%' else c
unknownLiteral = (<> "::unknown ") . pgFmtLit
sqlValue = case opCode of
"like" -> unknownLiteral $ T.map star value
"ilike" -> unknownLiteral $ T.map star value
"in" -> "(" <> T.intercalate ", " (map unknownLiteral $ T.split (==',') value) <> ") "
"@@" -> "to_tsquery(" <> unknownLiteral value <> ") "
_ -> unknownLiteral value
op = case opCode of
"eq" -> "="
"gt" -> ">"
"lt" -> "<"
"gte" -> ">="
"lte" -> "<="
"neq" -> "<>"
"like"-> "like"
"ilike"-> "ilike"
"in" -> "in"
"is" -> "is"
"isnot" -> "is not"
"@@" -> "@@"
_ -> "="
orderParse :: Net.Query -> [OrderTerm]
orderParse q =
mapMaybe orderParseTerm . T.split (==',') $ cs order
where
order = fromMaybe "" $ join (lookup "order" q)
orderParseTerm :: T.Text -> Maybe OrderTerm
orderParseTerm s =
case T.split (=='.') s of
(c:d:nls) ->
if d `elem` ["asc", "desc"]
then Just $ OrderTerm c
( if d == "asc" then "asc" else "desc" )
( case nls of
[n] -> if | n == "nullsfirst" -> Just "nulls first"
| n == "nullslast" -> Just "nulls last"
| otherwise -> Nothing
_ -> Nothing
)
else Nothing
_ -> Nothing
commaq :: PStmt
commaq = B.Stmt ", " empty True
andq :: PStmt
andq = B.Stmt " and " empty True
data JsonbPath =
ColIdentifier T.Text
| KeyIdentifier T.Text
| SingleArrow JsonbPath JsonbPath
| DoubleArrow JsonbPath JsonbPath
deriving (Show)
parseJsonbPath :: T.Text -> Maybe JsonbPath
parseJsonbPath p =
case T.splitOn "->>" p of
[a,b] ->
let i:is = T.splitOn "->" a in
Just $ DoubleArrow
(foldl SingleArrow (ColIdentifier i) (map KeyIdentifier is))
(KeyIdentifier b)
_ -> Nothing
pgFmtJsonbPath :: QualifiedTable -> T.Text -> T.Text
pgFmtJsonbPath table p =
pgFmtJsonbPath' $ fromMaybe (ColIdentifier p) (parseJsonbPath p)
where
pgFmtJsonbPath' (ColIdentifier i) = fromQt table <> "." <> pgFmtIdent i
pgFmtJsonbPath' (KeyIdentifier i) = pgFmtLit i
pgFmtJsonbPath' (SingleArrow a b) =
pgFmtJsonbPath' a <> "->" <> pgFmtJsonbPath' b
pgFmtJsonbPath' (DoubleArrow a b) =
pgFmtJsonbPath' a <> "->>" <> pgFmtJsonbPath' b
pgFmtIdent :: T.Text -> T.Text
pgFmtIdent x =
let escaped = T.replace "\"" "\"\"" (trimNullChars $ cs x) in
if (cs escaped :: BS.ByteString) =~ danger
then "\"" <> escaped <> "\""
else escaped
where danger = "^$|^[^a-z_]|[^a-z_0-9]" :: BS.ByteString
pgFmtLit :: T.Text -> T.Text
pgFmtLit x =
let trimmed = trimNullChars x
escaped = "'" <> T.replace "'" "''" trimmed <> "'"
slashed = T.replace "\\" "\\\\" escaped in
if T.isInfixOf "\\\\" escaped
then "E" <> slashed
else slashed
trimNullChars :: T.Text -> T.Text
trimNullChars = T.takeWhile (/= '\x0')
fromQt :: QualifiedTable -> T.Text
fromQt t = pgFmtIdent (qtSchema t) <> "." <> pgFmtIdent (qtName t)
unquoted :: JSON.Value -> T.Text
unquoted (JSON.String t) = t
unquoted (JSON.Number n) =
cs $ formatScientific Fixed (if isInteger n then Just 0 else Nothing) n
unquoted (JSON.Bool b) = cs . show $ b
unquoted v = cs $ JSON.encode v
insertableText :: T.Text -> T.Text
insertableText = (<> "::unknown") . pgFmtLit
insertableValue :: JSON.Value -> T.Text
insertableValue JSON.Null = "null"
insertableValue v = insertableText $ unquoted v
paramFilter :: JSON.Value -> T.Text
paramFilter JSON.Null = "is.null"
paramFilter v = "eq." <> unquoted v
| acrispin/postgrest | src/PostgREST/PgQuery.hs | mit | 9,379 | 0 | 24 | 2,474 | 3,199 | 1,662 | 1,537 | 239 | 19 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : parsing VSE parts
Copyright : (c) C. Maeder, DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Parser for VSE logic extension of CASL
-}
module VSE.Parse where
import Common.AnnoState
import Common.DocUtils
import Common.Id
import Common.Lexer
import Common.Result
import Common.Token
import VSE.As
import Text.ParserCombinators.Parsec
import CASL.Formula
import CASL.AS_Basic_CASL
import Data.Char (toUpper, toLower)
declWords :: [String]
declWords = let
ps = ["procedure", "function"]
rs = ps ++ map (++ "s") ps
in rs ++ map (map toUpper) rs
reservedWords :: [String]
reservedWords = let
rs =
[ "in", "out", "begin", "end", "abort", "skip", "return", "declare"
, "if", "then", "else", "fi", "while", "do", "od"
, "defprocs", "defprocsend", "restricted" ]
in [ "<:", ":>"] ++ declWords ++ rs ++ map (map toUpper) rs
keyword :: String -> AParser st Token
keyword s = pToken $ try $ do
annos
str <- scanAnyWords
lineAnnos
if map toLower str == s then return s else unexpected str <?> map toUpper s
vseVarDecl :: AParser st VarDecl
vseVarDecl = do
v <- varId reservedWords
c <- colonT
s <- sortId reservedWords
option (VarDecl v s Nothing $ tokPos c) $ do
a <- asKey ":="
t <- term reservedWords
return $ VarDecl v s (Just t) $ toRange c [] a
fromVarDecl :: [VarDecl] -> Program -> ([VAR_DECL], Program)
fromVarDecl vs p = case vs of
[] -> ([], p)
VarDecl v s mt r : n ->
let (rs, q) = fromVarDecl n p
in (Var_decl [v] s r : rs, case mt of
Nothing -> q
Just t -> Ranged (Seq (Ranged (Assign v t) r) q) r)
program :: AParser st Program
program = do
t <- keyword "abort"
return $ Ranged Abort $ tokPos t
<|> do
t <- keyword "skip"
return $ Ranged Skip $ tokPos t
<|> do
r <- keyword "return"
t <- term reservedWords
return $ Ranged (Return t) $ tokPos r
<|> do
b <- keyword "begin"
p <- programSeq
e <- keyword "end"
return $ Ranged (Block [] p) $ toRange b [] e
<|> do
d <- keyword "declare"
(vs, ps) <- separatedBy vseVarDecl commaT
s <- anSemi
p <- programSeq
let (cs, q) = fromVarDecl vs p
return $ Ranged (Block cs q) $ toRange d ps s
<|> do
i <- keyword "if"
c <- formula reservedWords
p <- keyword "then"
t <- programSeq
do r <- keyword "fi"
let s = toRange i [p] r
return $ Ranged (If c t $ Ranged Skip s) s
<|> do
q <- keyword "else"
e <- programSeq
r <- keyword "fi"
return $ Ranged (If c t e) $ toRange i [p, q] r
<|> do
w <- keyword "while"
c <- formula reservedWords
d <- keyword "do"
p <- programSeq
o <- keyword "od"
return $ Ranged (While c p) $ toRange w [d] o
<|> do
(v, a) <- try $ do
v <- varId reservedWords
a <- asKey ":="
return (v, a)
t <- term reservedWords
return $ Ranged (Assign v t) $ tokPos a
<|> do
t <- term reservedWords
return . Ranged (Call $ Mixfix_formula t) . Range $ rangeSpan t
programSeq :: AParser st Program
programSeq = do
p1 <- program
option p1 $ do
s <- semiT
p2 <- programSeq
return $ Ranged (Seq p1 p2) $ tokPos s
procKind :: AParser st (ProcKind, Token)
procKind = do
k <- keyword "procedure"
return (Proc, k)
<|> do
k <- keyword "function"
return (Func, k)
defproc :: AParser st Defproc
defproc = do
(pk, q) <- procKind
i <- parseId reservedWords
o <- oParenT
(ts, ps) <- option ([], []) $
varId reservedWords `separatedBy` commaT
c <- cParenT
p <- program
return $ Defproc pk i ts p $ toRange q (o : ps) c
boxOrDiamandProg :: AParser st (Token, BoxOrDiamond, Program, Token)
boxOrDiamandProg = do
o <- asKey "<:"
p <- programSeq
c <- asKey ":>"
return (o, Diamond, p, c)
<|> do
o <- asKey "[:"
p <- programSeq
c <- asKey ":]"
return (o, Box, p, c)
dlformula :: AParser st Dlformula
dlformula = do
p <- keyword "defprocs"
(ps, qs) <- separatedBy defproc semiT
q <- keyword "defprocsend"
return $ Ranged (Defprocs ps) $ toRange p qs q
<|> do
(o, b, p, c) <- boxOrDiamandProg
f <- formula reservedWords
return $ Ranged (Dlformula b p f) $ toRange o [] c
param :: AParser st Procparam
param = do
k <- (keyword "in" >> return In) <|> (keyword "out" >> return Out)
s <- sortId reservedWords
return $ Procparam k s
profile :: AParser st Profile
profile = do
(ps, _) <- option ([], []) $ separatedBy param commaT
m <- optionMaybe $ asKey "->" >> sortId reservedWords
return $ Profile ps m
procdecl :: AParser st Sigentry
procdecl = do
i <- parseId reservedWords
c <- colonT
p <- profile
return $ Procedure i p $ tokPos c
procdecls :: AParser st Procdecls
procdecls = do
k <- keyword "procedures" <|> keyword "procedure"
auxItemList (declWords ++ startKeyword) [k] procdecl Procdecls
instance TermParser Dlformula where
termParser = aToTermParser dlformula
instance AParsable Procdecls where
aparser = procdecls
-- | just for testing
testParse :: String -> String
testParse str = case runParser (formula [] :: AParser () Sentence)
(emptyAnnos ()) "" str of
Left err -> showErr err
Right ps -> showDoc ps ""
| keithodulaigh/Hets | VSE/Parse.hs | gpl-2.0 | 5,450 | 0 | 20 | 1,427 | 2,193 | 1,054 | 1,139 | 175 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Parsing command line targets
module Stack.Build.Target
( -- * Types
ComponentName
, UnresolvedComponent (..)
, RawTarget (..)
, LocalPackageView (..)
, SimpleTarget (..)
, NeedTargets (..)
-- * Parsers
, parseRawTarget
, parseTargets
) where
import Control.Applicative
import Control.Arrow (second)
import Control.Monad.Catch (MonadCatch, throwM)
import Control.Monad.IO.Class
import Data.Either (partitionEithers)
import Data.Foldable
import Data.List.Extra (groupSort)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Path
import Path.Extra (rejectMissingDir)
import Path.IO
import Prelude hiding (concat, concatMap) -- Fix redundant import warnings
import Stack.Types
-- | The name of a component, which applies to executables, test suites, and benchmarks
type ComponentName = Text
newtype RawInput = RawInput { unRawInput :: Text }
-- | Either a fully resolved component, or a component name that could be
-- either an executable, test, or benchmark
data UnresolvedComponent
= ResolvedComponent !NamedComponent
| UnresolvedComponent !ComponentName
deriving (Show, Eq, Ord)
-- | Raw command line input, without checking against any databases or list of
-- locals. Does not deal with directories
data RawTarget (a :: RawTargetType) where
RTPackageComponent :: !PackageName -> !UnresolvedComponent -> RawTarget a
RTComponent :: !ComponentName -> RawTarget a
RTPackage :: !PackageName -> RawTarget a
RTPackageIdentifier :: !PackageIdentifier -> RawTarget 'HasIdents
deriving instance Show (RawTarget a)
deriving instance Eq (RawTarget a)
deriving instance Ord (RawTarget a)
data RawTargetType = HasIdents | NoIdents
-- | If this function returns @Nothing@, the input should be treated as a
-- directory.
parseRawTarget :: Text -> Maybe (RawTarget 'HasIdents)
parseRawTarget t =
(RTPackageIdentifier <$> parsePackageIdentifierFromString s)
<|> (RTPackage <$> parsePackageNameFromString s)
<|> (RTComponent <$> T.stripPrefix ":" t)
<|> parsePackageComponent
where
s = T.unpack t
parsePackageComponent =
case T.splitOn ":" t of
[pname, "lib"]
| Just pname' <- parsePackageNameFromString (T.unpack pname) ->
Just $ RTPackageComponent pname' $ ResolvedComponent CLib
[pname, cname]
| Just pname' <- parsePackageNameFromString (T.unpack pname) ->
Just $ RTPackageComponent pname' $ UnresolvedComponent cname
[pname, typ, cname]
| Just pname' <- parsePackageNameFromString (T.unpack pname)
, Just wrapper <- parseCompType typ ->
Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname
_ -> Nothing
parseCompType t' =
case t' of
"exe" -> Just CExe
"test" -> Just CTest
"bench" -> Just CBench
_ -> Nothing
-- | A view of a local package needed for resolving components
data LocalPackageView = LocalPackageView
{ lpvVersion :: !Version
, lpvRoot :: !(Path Abs Dir)
, lpvCabalFP :: !(Path Abs File)
, lpvComponents :: !(Set NamedComponent)
, lpvExtraDep :: !TreatLikeExtraDep
}
-- | Same as @parseRawTarget@, but also takes directories into account.
parseRawTargetDirs :: (MonadIO m, MonadCatch m)
=> Path Abs Dir -- ^ current directory
-> Map PackageName LocalPackageView
-> Text
-> m (Either Text [(RawInput, RawTarget 'HasIdents)])
parseRawTargetDirs root locals t =
case parseRawTarget t of
Just rt -> return $ Right [(ri, rt)]
Nothing -> do
mdir <- forgivingAbsence (resolveDir root (T.unpack t))
>>= rejectMissingDir
case mdir of
Nothing -> return $ Left $ "Directory not found: " `T.append` t
Just dir ->
case mapMaybe (childOf dir) $ Map.toList locals of
[] -> return $ Left $
"No local directories found as children of " `T.append`
t
names -> return $ Right $ map ((ri, ) . RTPackage) names
where
ri = RawInput t
childOf dir (name, lpv) =
if (dir == lpvRoot lpv || isParentOf dir (lpvRoot lpv)) && not (lpvExtraDep lpv)
then Just name
else Nothing
data SimpleTarget
= STUnknown
| STNonLocal
| STLocalComps !(Set NamedComponent)
| STLocalAll
deriving (Show, Eq, Ord)
resolveIdents :: Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> (RawInput, RawTarget 'HasIdents)
-> Either Text ((RawInput, RawTarget 'NoIdents), Map PackageName Version)
resolveIdents _ _ _ (ri, RTPackageComponent x y) = Right ((ri, RTPackageComponent x y), Map.empty)
resolveIdents _ _ _ (ri, RTComponent x) = Right ((ri, RTComponent x), Map.empty)
resolveIdents _ _ _ (ri, RTPackage x) = Right ((ri, RTPackage x), Map.empty)
resolveIdents snap extras locals (ri, RTPackageIdentifier (PackageIdentifier name version)) =
fmap ((ri, RTPackage name), ) newExtras
where
newExtras =
case (Map.lookup name locals, mfound) of
-- Error if it matches a local package, pkg idents not
-- supported for local.
(Just _, _) -> Left $ T.concat
[ packageNameText name
, " target has a specific version number, but it is a local package."
, "\nTo avoid confusion, we will not install the specified version or build the local one."
, "\nTo build the local package, specify the target without an explicit version."
]
-- If the found version matches, no need for an extra-dep.
(_, Just foundVersion) | foundVersion == version -> Right Map.empty
-- Otherwise, if there is no specified version or a
-- mismatch, add an extra-dep.
_ -> Right $ Map.singleton name version
mfound = asum (map (Map.lookup name) [extras, snap])
resolveRawTarget :: Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> (RawInput, RawTarget 'NoIdents)
-> Either Text (PackageName, (RawInput, SimpleTarget))
resolveRawTarget snap extras locals (ri, rt) =
go rt
where
go (RTPackageComponent name ucomp) =
case Map.lookup name locals of
Nothing -> Left $ T.pack $ "Unknown local package: " ++ packageNameString name
Just lpv ->
case ucomp of
ResolvedComponent comp
| comp `Set.member` lpvComponents lpv ->
Right (name, (ri, STLocalComps $ Set.singleton comp))
| otherwise -> Left $ T.pack $ concat
[ "Component "
, show comp
, " does not exist in package "
, packageNameString name
]
UnresolvedComponent comp ->
case filter (isCompNamed comp) $ Set.toList $ lpvComponents lpv of
[] -> Left $ T.concat
[ "Component "
, comp
, " does not exist in package "
, T.pack $ packageNameString name
]
[x] -> Right (name, (ri, STLocalComps $ Set.singleton x))
matches -> Left $ T.concat
[ "Ambiguous component name "
, comp
, " for package "
, T.pack $ packageNameString name
, ": "
, T.pack $ show matches
]
go (RTComponent cname) =
let allPairs = concatMap
(\(name, lpv) -> map (name,) $ Set.toList $ lpvComponents lpv)
(Map.toList locals)
in case filter (isCompNamed cname . snd) allPairs of
[] -> Left $ "Could not find a component named " `T.append` cname
[(name, comp)] ->
Right (name, (ri, STLocalComps $ Set.singleton comp))
matches -> Left $ T.concat
[ "Ambiugous component name "
, cname
, ", matches: "
, T.pack $ show matches
]
go (RTPackage name) =
case Map.lookup name locals of
Just _lpv -> Right (name, (ri, STLocalAll))
Nothing ->
case Map.lookup name extras of
Just _ -> Right (name, (ri, STNonLocal))
Nothing ->
case Map.lookup name snap of
Just _ -> Right (name, (ri, STNonLocal))
Nothing -> Right (name, (ri, STUnknown))
isCompNamed :: Text -> NamedComponent -> Bool
isCompNamed _ CLib = False
isCompNamed t1 (CExe t2) = t1 == t2
isCompNamed t1 (CTest t2) = t1 == t2
isCompNamed t1 (CBench t2) = t1 == t2
simplifyTargets :: [(PackageName, (RawInput, SimpleTarget))]
-> ([Text], Map PackageName SimpleTarget)
simplifyTargets =
foldMap go . collect
where
go :: (PackageName, NonEmpty (RawInput, SimpleTarget))
-> ([Text], Map PackageName SimpleTarget)
go (name, (_, st) :| []) = ([], Map.singleton name st)
go (name, pairs) =
case partitionEithers $ map (getLocalComp . snd) (NonEmpty.toList pairs) of
([], comps) -> ([], Map.singleton name $ STLocalComps $ Set.unions comps)
_ ->
let err = T.pack $ concat
[ "Overlapping targets provided for package "
, packageNameString name
, ": "
, show $ map (unRawInput . fst) (NonEmpty.toList pairs)
]
in ([err], Map.empty)
collect :: Ord a => [(a, b)] -> [(a, NonEmpty b)]
collect = map (second NonEmpty.fromList) . groupSort
getLocalComp (STLocalComps comps) = Right comps
getLocalComp _ = Left ()
-- | Need targets, e.g. `stack build` or allow none?
data NeedTargets
= NeedTargets
| AllowNoTargets
parseTargets :: (MonadCatch m, MonadIO m)
=> NeedTargets -- ^ need at least one target
-> Bool -- ^ using implicit global project?
-> Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> Path Abs Dir -- ^ current directory
-> [Text] -- ^ command line targets
-> m (Map PackageName Version, Map PackageName SimpleTarget)
parseTargets needTargets implicitGlobal snap extras locals currDir textTargets' = do
let nonExtraDeps = Map.keys $ Map.filter (not . lpvExtraDep) locals
textTargets =
if null textTargets'
then map (T.pack . packageNameString) nonExtraDeps
else textTargets'
erawTargets <- mapM (parseRawTargetDirs currDir locals) textTargets
let (errs1, rawTargets) = partitionEithers erawTargets
(errs2, unzip -> (rawTargets', newExtras)) = partitionEithers $
map (resolveIdents snap extras locals) $ concat rawTargets
(errs3, targetTypes) = partitionEithers $
map (resolveRawTarget snap extras locals) rawTargets'
(errs4, targets) = simplifyTargets targetTypes
errs = concat [errs1, errs2, errs3, errs4]
if null errs
then if Map.null targets
then case needTargets of
AllowNoTargets ->
return (Map.empty, Map.empty)
NeedTargets
| null textTargets' && implicitGlobal -> throwM $ TargetParseException
["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"]
| null textTargets' && null nonExtraDeps -> throwM $ TargetParseException
["The project contains no local packages (packages not marked with 'extra-dep')"]
| otherwise -> throwM $ TargetParseException
["The specified targets matched no packages"]
else return (Map.unions newExtras, targets)
else throwM $ TargetParseException errs
| Heather/stack | src/Stack/Build/Target.hs | bsd-3-clause | 13,644 | 0 | 21 | 4,834 | 3,285 | 1,737 | 1,548 | 285 | 12 |
module NN.Examples.ImageNet(test, train) where
import Control.Lens
import Gen.Caffe.DataParameter.DB as DP
import Gen.Caffe.Phase as P
import NN.DSL
-- |Base layer specifications
imagenetData = data'
& meanFile' "data/ilsvrc12/imagenet_mean.binaryproto"
& backend' LMDB
-- |Data
test = imagenetData & phase' TEST & source' "examples/imagenet/ilsvrc12_train_lmdb"
train = imagenetData & phase' TRAIN & source' "examples/imagenet/ilsvrc12_train_lmdb"
| sjfloat/dnngraph | NN/Examples/ImageNet.hs | bsd-3-clause | 534 | 0 | 7 | 134 | 101 | 58 | 43 | 10 | 1 |
module AddRmParamSpec (main, spec) where
import Test.Hspec
import Language.Haskell.Refact.Refactoring.AddRmParam
import TestUtils
import System.Directory
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
describe "Adding" $ do
it "addOneParameter in D3 A3" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/D3.hs" "y" (7,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/D3.hs" "y" (7,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/D3.hs"
, "AddOneParameter/A3.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/D3.expected.hs"
"./AddOneParameter/D3.refactored.hs"
diffD `shouldBe` []
diffA <- ct $ compareFiles "./AddOneParameter/A3.expected.hs"
"./AddOneParameter/A3.refactored.hs"
diffA `shouldBe` []
-- -------------------
it "addOneParameter in D1 B1 C1 A1" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/D1.hs" "f" (10,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/D1.hs" "f" (10,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/D1.hs"
, "AddOneParameter/A1.hs"
, "AddOneParameter/C1.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/D1.expected.hs"
"./AddOneParameter/D1.refactored.hs"
diffD `shouldBe` []
diffC <- ct $ compareFiles "./AddOneParameter/C1.expected.hs"
"./AddOneParameter/C1.refactored.hs"
diffC `shouldBe` []
diffA <- ct $ compareFiles "./AddOneParameter/A1.expected.hs"
"./AddOneParameter/A1.refactored.hs"
diffA `shouldBe` []
-- -------------------
it "addOneParameter in D2 B2 C2 A2" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/D2.hs" "f" (11,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/D2.hs" "f" (11,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/D2.hs"
, "AddOneParameter/A2.hs"
, "AddOneParameter/C2.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/D2.expected.hs"
"./AddOneParameter/D2.refactored.hs"
diffD `shouldBe` []
diffC <- ct $ compareFiles "./AddOneParameter/C2.expected.hs"
"./AddOneParameter/C2.refactored.hs"
diffC `shouldBe` []
diffA <- ct $ compareFiles "./AddOneParameter/A2.expected.hs"
"./AddOneParameter/A2.refactored.hs"
diffA `shouldBe` []
-- -------------------
it "addOneParameter in PatIn1" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/PatIn1.hs" "x" (7,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/PatIn1.hs" "x" (7,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/PatIn1.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/PatIn1.expected.hs"
"./AddOneParameter/PatIn1.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in FunIn1" $ do
-- (["FunIn1.hs"],["y","7","1"]),
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn1.hs" "y" (7,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/FunIn1.hs" "y" (7,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/FunIn1.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/FunIn1.expected.hs"
"./AddOneParameter/FunIn1.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in FunIn2" $ do
-- (["FunIn2.hs"],["y","10","18"]),
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn2.hs" "y" (10,18)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/FunIn2.hs" "y" (10,18)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/FunIn2.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/FunIn2.expected.hs"
"./AddOneParameter/FunIn2.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in FunIn3" $ do
-- (["FunIn3.hs"],["y","9","11"]),
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn3.hs" "y" (9,11)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/FunIn3.hs" "y" (9,11)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/FunIn3.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/FunIn3.expected.hs"
"./AddOneParameter/FunIn3.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in FunIn4" $ do
-- (["FunIn4.hs"],["y","8","22"])],
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn4.hs" "y" (8,22)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/FunIn4.hs" "y" (8,22)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/FunIn4.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/FunIn4.expected.hs"
"./AddOneParameter/FunIn4.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in FunIn6" $ do
-- (["FunIn6.hs"],["y","8","22"])],
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn6.hs" "y" (9,7)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/FunIn6.hs" "y" (9,7)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/FunIn6.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/FunIn6.expected.hs"
"./AddOneParameter/FunIn6.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in Nested" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/Nested.hs" "y" (8,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/Nested.hs" "y" (8,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/Nested.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/Nested.expected.hs"
"./AddOneParameter/Nested.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "addOneParameter in MultiFun1" $ do
r <- ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/MultiFun1.hs" "x" (9,1)
-- r <- ct $ addOneParameter logTestSettings testOptions "./AddOneParameter/MultiFun1.hs" "x" (9,1)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "AddOneParameter/MultiFun1.hs"
]
diffD <- ct $ compareFiles "./AddOneParameter/MultiFun1.expected.hs"
"./AddOneParameter/MultiFun1.refactored.hs"
diffD `shouldBe` []
-- ---------------------------------
-- Negative tests
-- ---------------------------------
it "fails complex pat binding PatIn2" $ do
-- [(["PatIn2.hs"],["x","7","20"]),
res <- catchException (ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/PatIn2.hs" "x" (7,20))
(show res) `shouldBe` "Just \"Parameter can not be added to complex pattern binding\""
-- -------------------
it "fails name clash FunIn5" $ do
-- (["FunIn5.hs"],["h","8","1"])]
res <- catchException (ct $ addOneParameter defaultTestSettings testOptions "./AddOneParameter/FunIn5.hs" "h" (8,1))
(show res) `shouldBe` "Just \"The new parameter name will cause name clash or semantics change, please choose another name!\""
-- -------------------
{-
TestCases{refactorCmd="addOneParameter",
positive=[(["D3.hs","A3.hs"],["y","7","1"]),
(["D1.hs","C1.hs","A1.hs"],["f","10","1"]),
(["D2.hs","C2.hs","A2.hs"],["f","11","1"]),
(["PatIn1.hs"],["x","7","1"]),
(["FunIn1.hs"],["y","7","1"]),
(["FunIn2.hs"],["y","10","18"]),
(["FunIn3.hs"],["y","9","11"]),
(["FunIn4.hs"],["y","8","22"])],
negative=[(["PatIn2.hs"],["x","7","20"]),
(["FunIn5.hs"],["h","8","1"])]
-}
-- ---------------------------------
describe "Removing" $ do
it "rmOneParameter in D1 A1" $ do
-- (["D1.hs","A1.hs"],["6","19"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/D1.hs" (6,19)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/D1.hs" (6,19)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/D1.hs"
, "RmOneParameter/A1.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/D1.expected.hs"
"./RmOneParameter/D1.refactored.hs"
diffD `shouldBe` []
diffA <- ct $ compareFiles "./RmOneParameter/A1.expected.hs"
"./RmOneParameter/A1.refactored.hs"
diffA `shouldBe` []
-- -------------------
it "rmOneParameter in D2 A2" $ do
-- (["D2.hs","A2.hs"],["7","19"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/D2.hs" (7,19)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/D2.hs" (7,19)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/D2.hs"
, "RmOneParameter/A2.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/D2.expected.hs"
"./RmOneParameter/D2.refactored.hs"
diffD `shouldBe` []
diffA <- ct $ compareFiles "./RmOneParameter/A2.expected.hs"
"./RmOneParameter/A2.refactored.hs"
diffA `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn1" $ do
-- (["FunIn1.hs"],["8","5"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn1.hs" (8,5)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn1.hs" (8,5)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn1.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn1.expected.hs"
"./RmOneParameter/FunIn1.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn2" $ do
-- (["FunIn2.hs"],["8","5"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn2.hs" (8,5)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn2.hs" (8,5)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn2.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn2.expected.hs"
"./RmOneParameter/FunIn2.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn2a" $ do
-- (["FunIn2.hs"],["8","5"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn2a.hs" (8,5)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn2a.hs" (8,5)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn2a.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn2a.expected.hs"
"./RmOneParameter/FunIn2a.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn3" $ do
-- (["FunIn3.hs"],["7","5"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn3.hs" (7,5)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn3.hs" (7,5)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn3.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn3.expected.hs"
"./RmOneParameter/FunIn3.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn5" $ do
-- (["FunIn5.hs"],["7","6"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn5.hs" (7,6)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn5.hs" (7,6)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn5.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn5.expected.hs"
"./RmOneParameter/FunIn5.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn6" $ do
-- (["FunIn6.hs"],["7","5"]),
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn6.hs" (7,5)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn6.hs" (7,5)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn6.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn6.expected.hs"
"./RmOneParameter/FunIn6.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in FunIn0" $ do
-- (["FunIn0.hs"],["10","7"])],
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn0.hs" (10,7)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn0.hs" (10,7)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/FunIn0.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/FunIn0.expected.hs"
"./RmOneParameter/FunIn0.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in SubFun1" $ do
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/SubFun1.hs" (10,9)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/SubFun1.hs" (10,9)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/SubFun1.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/SubFun1.expected.hs"
"./RmOneParameter/SubFun1.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in SubFun2" $ do
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/SubFun2.hs" (10,9)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/SubFun2.hs" (10,9)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/SubFun2.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/SubFun2.expected.hs"
"./RmOneParameter/SubFun2.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in SubFun3" $ do
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/SubFun3.hs" (10,9)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/SubFun3.hs" (10,9)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/SubFun3.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/SubFun3.expected.hs"
"./RmOneParameter/SubFun3.refactored.hs"
diffD `shouldBe` []
-- -------------------
it "rmOneParameter in MultiFun1" $ do
r <- ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/MultiFun1.hs" (10,8)
-- r <- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/MultiFun1.hs" (10,8)
r' <- ct $ mapM makeRelativeToCurrentDirectory r
r' `shouldBe` [ "RmOneParameter/MultiFun1.hs"
]
diffD <- ct $ compareFiles "./RmOneParameter/MultiFun1.expected.hs"
"./RmOneParameter/MultiFun1.refactored.hs"
diffD `shouldBe` []
-- ---------------------------------
-- Negative tests
-- ---------------------------------
it "fails FunIn4" $ do
-- (["FunIn4.hs"],["7","6"]),
res <- catchException (ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn4.hs" (7,6))
-- ct $ rmOneParameter logTestSettings testOptions "./RmOneParameter/FunIn4.hs" (7,6)
(show res) `shouldBe` "Just \"This parameter can not be removed, as it is used!\""
-- -------------------
it "fails FunIn7" $ do
-- (["FunIn7.hs"],["10","4"])]
res <- catchException (ct $ rmOneParameter defaultTestSettings testOptions "./RmOneParameter/FunIn7.hs" (10,4))
(show res) `shouldBe` "Just \"Invalid cursor position!\""
-- -------------------
{-
TestCases{refactorCmd="rmOneParameter",
positive=[
(["D1.hs","A1.hs"],["6","19"]),
(["D2.hs","A2.hs"],["7","19"]),
(["FunIn1.hs"],["8","5"]),
(["FunIn2.hs"],["8","5"]),
(["FunIn3.hs"],["7","5"]),
(["FunIn5.hs"],["7","6"]),
(["FunIn6.hs"],["7","5"]),
(["FunIn0.hs"],["10","7"])],
negative=[(["FunIn4.hs"],["7","6"]),
(["FunIn7.hs"],["10","4"])]
}
TODO: Add a test for a PatBind too
-}
| SAdams601/ParRegexSearch | test/HaRe/test/AddRmParamSpec.hs | mit | 18,605 | 0 | 18 | 4,924 | 2,811 | 1,411 | 1,400 | 220 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Directory List v2.3 LC</title>
<maps>
<homeID>directorylistv2_3_lc</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/directorylistv2_3_lc/src/main/javahelp/help_az_AZ/helpset_az_AZ.hs | apache-2.0 | 984 | 78 | 66 | 158 | 414 | 210 | 204 | -1 | -1 |
{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest,
linkingNeeded, checkLinkInfo, writeInterfaceOnlyMode
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import UniqFM ( eltsUFM )
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import SrcLoc
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import Exception
import Data.IORef ( readIORef )
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import Data.Char
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
let dflags0 = ms_hspp_opts summary
this_mod = ms_mod summary
src_flavour = ms_hsc_src summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt Opt_TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt Opt_QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files.
let dflags1 = if needsLinker && dynamicGhc && not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
let basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import path.
-- This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
let current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
let hsc_lang = hscTarget dflags
-- ... and what the next phase should be
let next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
-- ... and what file to generate the output into
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- -fforce-recomp should also work with --make
let force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp = SourceModified
| otherwise = source_modified0
object_filename = ml_obj_file location
let always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
e <- genericHscCompileGetFrontendResult
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
case e of
Left iface ->
do details <- genModDetails hsc_env iface
MASSERT(isJust maybe_old_linkable || isNoLink (ghcLink dflags))
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
Right (tc_result, mb_old_hash) ->
-- run the compiler
case hsc_lang of
HscInterpreted ->
case ms_hsc_src summary of
t | isHsBootOrSig t ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, _changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash
(hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc modBreaks]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time this_mod
(hs_unlinked ++ stub_o)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
HscNothing ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
when (gopt Opt_WriteInterface dflags) $
hscWriteIface dflags iface changed summary
let linkable = if isHsBootOrSig src_flavour
then maybe_old_linkable
else Just (LM (ms_hs_date summary) this_mod [])
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = linkable })
_ ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
touchObjectFile dflags object_filename
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
HsigFile ->
do (iface, changed, details) <-
hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
-- #10660: Use the pipeline instead of calling
-- compileEmptyStub directly, so -dynamic-too gets
-- handled properly
let mod_name = ms_mod_name summary
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name HscUpdateSig))
(Just basename)
Persistent
(Just location)
Nothing
-- Same as Hs
o_time <- getModificationUTCTime object_filename
let linkable =
LM o_time this_mod [DotO object_filename]
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
HsSrcFile ->
do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash
hscWriteIface dflags iface changed summary
-- We're in --make mode: finish the compilation pipeline.
let mod_name = ms_mod_name summary
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
compileEmptyStub :: DynFlags -> HscEnv -> FilePath -> ModLocation -> IO ()
compileEmptyStub dflags hsc_env basename location = do
-- To maintain the invariant that every Haskell file
-- compiles to object code, we make an empty (but
-- valid) stub object file for signatures
empty_stub <- newTempName dflags "c"
writeFile empty_stub ""
_ <- runPipeline StopLn hsc_env
(empty_stub, Nothing)
(Just basename)
Persistent
(Just location)
Nothing
return ()
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> ptext (sLit "is up to date, linking not required."))
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [PackageKey] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupPackage dflags) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [PackageKey] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfSection dflags ghcLinkInfoSectionName exe_file
debugTraceMsg dflags 3 $ text ("Exe link info: " ++ show m_exe_link_info)
return (Just link_info /= m_exe_link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if gopt Opt_Static dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
-- See Note [Partial ordering on phases]
-- Not the same as: (stop_phase `happensBefore` start_phase')
when (not (start_phase' `happensBefore'` stop_phase ||
start_phase' `eqPhase` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = extractDynFlags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
-- See Note [Partial ordering on phases]
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(ptext (sLit "Running phase") <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
-- See Note [Don't normalise input filenames].
, SysTools.Option $ escape input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.isGoodTickSrcSpan where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.hs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $ do
do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
hi_file = ml_hi_file location
dest_file | writeInterfaceOnlyMode dflags
= hi_file
| otherwise
= o_file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do dest_file_exists <- doesFileExist dest_file
if not dest_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime dest_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_iface_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
result <- liftIO $ hscCompileOneShot hsc_env'
mod_summary source_unchanged
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase next_phase,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase next_phase, o_file)
HscUpdateSig ->
do -- We need to create a REAL but empty .o file
-- because we are going to attempt to put it in a library
PipeState{hsc_env=hsc_env'} <- getPipeState
let input_fn = expectJust "runPhase" (ml_hs_file location)
basename = dropExtension input_fn
liftIO $ compileEmptyStub dflags hsc_env' basename location
return (RealPhase next_phase, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccxx, HCc, Cobjc, Cobjcxx]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
ghcVersionH <- liftIO $ getGhcVersionPathName dflags
let gcc_lang_opt | cc_phase `eqPhase` Ccxx = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcxx = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageKey
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccxx && cc_phase /= Cobjcxx)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt
, "-include", ghcVersionH
]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
llvmVer <- liftIO $ figureLlvmVersion dflags
return $ case llvmVer of
Just n | n >= 30 -> SysTools.runClang
_ -> SysTools.runAs
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
let pic_c_flags = picCCOpts dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- See Note [-fPIC for assembler]
++ map SysTools.Option pic_c_flags
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
let pic_c_flags = picCCOpts dflags
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
-- See Note [-fPIC for assembler]
map SysTools.Option pic_c_flags ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts ver !! opt_lvl)
else []
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts ver = [ "-mem2reg -globalopt"
, if ver >= 34 then "-O1 -globalopt" else "-O1"
-- LLVM 3.4 -O1 doesn't eliminate aliases reliably (bug #8855)
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| not (gopt Opt_Static dflags) = "dynamic-no-pic"
| otherwise = "static"
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
-- AVX can cause LLVM 3.2 to generate a C-like frame pointer
-- prelude, see #9391
when (ver == 32 && isAvxEnabled dflags) $ liftIO $ errorMsg dflags $ text
"Note: LLVM 3.2 has known problems with AVX instructions (see trac #9391)"
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option (avxOpts ver)
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts ver | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| ver == 32 = ["-mattr=-avx"] -- see #9391
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | HsBootFile <- src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
let rtsDetails = getPackageDetails dflags rtsPackageKey
pic_c_flags = picCCOpts dflags
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ map (FileOption "-I") (includeDirs rtsDetails)
++ map Option pic_c_flags)
return oFile
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = Outputable.empty
| otherwise = vcat [
text "#include \"Rts.h\"",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> text (if rtsOptsSuggestions dflags
then "rtsTrue"
else "rtsFalse") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> ptext (sLit " __conf.rts_opts= ") <>
text (show opts) <> semi,
text " __conf.rts_hs_main = rtsTrue;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [PackageKey] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
text "\t.section ", text ghcLinkInfoSectionName,
text ",\"\",",
text elfSectionNote,
text "\n",
text "\t.ascii \"", info', text "\"\n",
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.hs for another instance
-- where we need to do this.
(if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else Outputable.empty)
]
where
info' = text $ escape info
escape :: String -> String
escape = concatMap (charToC.fromIntegral.ord)
elfSectionNote :: String
elfSectionNote = case platformArch (targetPlatform dflags) of
ArchARM _ _ _ -> "%note"
_ -> "@note"
-- The "link info" is a string representing the parameters of the
-- link. We save this information in the binary, and the next time we
-- link, if nothing else has changed, we use the link info stored in
-- the existing binary to decide whether to re-link or not.
getLinkInfo :: DynFlags -> [PackageKey] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [PackageKey]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageKey (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [PackageKey] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [PackageKey] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags) &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform dep_packages
let framework_opts = getFrameworkOpts dflags platform
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os == OSOsf3 then ["-lpthread", "-lexc"]
else if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS, OSDarwin]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
ArchARM64 -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [PackageKey] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [PackageKey] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__=1" | isSseEnabled dflags ] ++
[ "-D__SSE2__=1" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__=1" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__=1" | isAvxEnabled dflags ] ++
[ "-D__AVX2__=1" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__=1" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__=1" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__=1" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__=1" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
#ifdef GHCI
let th_defs = [ "-D__GLASGOW_HASKELL_TH__=YES" ]
#else
let th_defs = [ "-D__GLASGOW_HASKELL_TH__=NO" ]
#endif
-- Default CPP defines in Haskell source
ghcVersionH <- getGhcVersionPathName dflags
let hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt
, "-include", ghcVersionH
]
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option th_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__="++show n ]
_ -> []
getBackendDefs _ =
return []
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
cwd <- getCurrentDirectory
let o_files_abs = map (cwd </>) o_files
writeFile script $ "INPUT(" ++ unwords o_files_abs ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
writeInterfaceOnlyMode :: DynFlags -> Bool
writeInterfaceOnlyMode dflags =
gopt Opt_WriteInterface dflags &&
HscNothing == hscTarget dflags
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase _ HsigFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
-- | Find out path to @ghcversion.h@ file
getGhcVersionPathName :: DynFlags -> IO FilePath
getGhcVersionPathName dflags = do
dirs <- getPackageIncludePath dflags [rtsPackageKey]
found <- filterM doesFileExist (map (</> "ghcversion.h") dirs)
case found of
[] -> throwGhcExceptionIO (InstallationError ("ghcversion.h missing"))
(x:_) -> return x
-- Note [-fPIC for assembler]
-- When compiling .c source file GHC's driver pipeline basically
-- does the following two things:
-- 1. ${CC} -S 'PIC_CFLAGS' source.c
-- 2. ${CC} -x assembler -c 'PIC_CFLAGS' source.S
--
-- Why do we need to pass 'PIC_CFLAGS' both to C compiler and assembler?
-- Because on some architectures (at least sparc32) assembler also chooses
-- the relocation type!
-- Consider the following C module:
--
-- /* pic-sample.c */
-- int v;
-- void set_v (int n) { v = n; }
-- int get_v (void) { return v; }
--
-- $ gcc -S -fPIC pic-sample.c
-- $ gcc -c pic-sample.s -o pic-sample.no-pic.o # incorrect binary
-- $ gcc -c -fPIC pic-sample.s -o pic-sample.pic.o # correct binary
--
-- $ objdump -r -d pic-sample.pic.o > pic-sample.pic.o.od
-- $ objdump -r -d pic-sample.no-pic.o > pic-sample.no-pic.o.od
-- $ diff -u pic-sample.pic.o.od pic-sample.no-pic.o.od
--
-- Most of architectures won't show any difference in this test, but on sparc32
-- the following assembly snippet:
--
-- sethi %hi(_GLOBAL_OFFSET_TABLE_-8), %l7
--
-- generates two kinds or relocations, only 'R_SPARC_PC22' is correct:
--
-- 3c: 2f 00 00 00 sethi %hi(0), %l7
-- - 3c: R_SPARC_PC22 _GLOBAL_OFFSET_TABLE_-0x8
-- + 3c: R_SPARC_HI22 _GLOBAL_OFFSET_TABLE_-0x8
{- Note [Don't normalise input filenames]
Summary
We used to normalise input filenames when starting the unlit phase. This
broke hpc in `--make` mode with imported literate modules (#2991).
Introduction
1) --main
When compiling a module with --main, GHC scans its imports to find out which
other modules it needs to compile too. It turns out that there is a small
difference between saying `ghc --make A.hs`, when `A` imports `B`, and
specifying both modules on the command line with `ghc --make A.hs B.hs`. In
the former case, the filename for B is inferred to be './B.hs' instead of
'B.hs'.
2) unlit
When GHC compiles a literate haskell file, the source code first needs to go
through unlit, which turns it into normal Haskell source code. At the start
of the unlit phase, in `Driver.Pipeline.runPhase`, we call unlit with the
option `-h` and the name of the original file. We used to normalise this
filename using System.FilePath.normalise, which among other things removes
an initial './'. unlit then uses that filename in #line directives that it
inserts in the transformed source code.
3) SrcSpan
A SrcSpan represents a portion of a source code file. It has fields
linenumber, start column, end column, and also a reference to the file it
originated from. The SrcSpans for a literate haskell file refer to the
filename that was passed to unlit -h.
4) -fhpc
At some point during compilation with -fhpc, in the function
`deSugar.Coverage.isGoodTickSrcSpan`, we compare the filename that a
`SrcSpan` refers to with the name of the file we are currently compiling.
For some reason I don't yet understand, they can sometimes legitimally be
different, and then hpc ignores that SrcSpan.
Problem
When running `ghc --make -fhpc A.hs`, where `A.hs` imports the literate
module `B.lhs`, `B` is inferred to be in the file `./B.lhs` (1). At the
start of the unlit phase, the name `./B.lhs` is normalised to `B.lhs` (2).
Therefore the SrcSpans of `B` refer to the file `B.lhs` (3), but we are
still compiling `./B.lhs`. Hpc thinks these two filenames are different (4),
doesn't include ticks for B, and we have unhappy customers (#2991).
Solution
Do not normalise `input_fn` when starting the unlit phase.
Alternative solution
Another option would be to not compare the two filenames on equality, but to
use System.FilePath.equalFilePath. That function first normalises its
arguments. The problem is that by the time we need to do the comparison, the
filenames have been turned into FastStrings, probably for performance
reasons, so System.FilePath.equalFilePath can not be used directly.
Archeology
The call to `normalise` was added in a commit called "Fix slash
direction on Windows with the new filePath code" (c9b6b5e8). The problem
that commit was addressing has since been solved in a different manner, in a
commit called "Fix the filename passed to unlit" (1eedbc6b). So the
`normalise` is no longer necessary.
-}
| TomMD/ghc | compiler/main/DriverPipeline.hs | bsd-3-clause | 101,091 | 0 | 31 | 35,528 | 16,066 | 8,100 | 7,966 | 1,370 | 45 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
module T15431a where
import Data.Coerce
import Data.Functor.Identity
g1 :: Coercible (t a) Int => t a -> Int
g1 = coerce
g2 :: Coercible Int (t a) => t a -> Int
g2 = coerce
| sdiehl/ghc | testsuite/tests/typecheck/should_compile/T15431a.hs | bsd-3-clause | 233 | 0 | 8 | 46 | 81 | 44 | 37 | 9 | 1 |
module Main where
import Idris.AbsSyntax
import Idris.ElabDecls
import Idris.Main
import Idris.Options
import IRTS.CodegenC
import IRTS.Compiler
import Util.System
import Control.Monad
import System.Environment
import System.Exit
data Opts = Opts { inputs :: [FilePath],
interface :: Bool,
output :: FilePath }
showUsage = do putStrLn "A code generator which is intended to be called by the compiler, not by a user."
putStrLn "Usage: idris-codegen-c <ibc-files> [-o <output-file>]"
exitWith ExitSuccess
getOpts :: IO Opts
getOpts = do xs <- getArgs
return $ process (Opts [] False "a.out") xs
where
process opts ("-o":o:xs) = process (opts { output = o }) xs
process opts ("--interface":xs) = process (opts { interface = True }) xs
process opts (x:xs) = process (opts { inputs = x:inputs opts }) xs
process opts [] = opts
c_main :: Opts -> Idris ()
c_main opts = do runIO setupBundledCC
elabPrims
loadInputs (inputs opts) Nothing
mainProg <- if interface opts
then liftM Just elabMain
else return Nothing
ir <- compile (Via IBCFormat "c") (output opts) mainProg
runIO $ codegenC ir
main :: IO ()
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (c_main opts)
| uuhan/Idris-dev | codegen/idris-codegen-c/Main.hs | bsd-3-clause | 1,483 | 0 | 12 | 484 | 438 | 224 | 214 | 38 | 4 |
{-# LANGUAGE OverloadedStrings, BangPatterns #-}
import Blaze.ByteString.Builder (copyByteString)
import Control.Concurrent (runInUnboundThread)
import Data.Aeson ((.=), object, encode)
import qualified Data.ByteString.Lazy as L
import Data.Text (Text)
import Network.HTTP.Types (status200, status404)
import Network.Wai (responseBuilder, rawPathInfo)
import qualified Network.Wai.Handler.Warp as W
main :: IO ()
main =
runInUnboundThread $ W.runSettings settings app
where
settings = W.setPort 8000
$ W.setOnException (\_ _ -> return ()) W.defaultSettings
app request respond = case rawPathInfo request of
"/json" -> respond responseJson
"/plaintext" -> respond responsePlaintext
_ -> respond $ responseBuilder status404 [] ""
!responseJson = responseBuilder status200 ctJson json
ctJson = [("Content-Type", "application/json")]
!json = copyByteString
$ L.toStrict
$ encode
$ object ["message" .= ("Hello, World!" :: Text)]
!responsePlaintext = responseBuilder status200 ctPlaintext plaintext
ctPlaintext = [("Content-type", "text/plain")]
plaintext = "Hello, World!"
| zdanek/FrameworkBenchmarks | frameworks/Haskell/wai/bench/wai.hs | bsd-3-clause | 1,176 | 0 | 12 | 231 | 315 | 174 | 141 | 27 | 3 |
module T7878B where
import {-# SOURCE #-} T7878A
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/generics/T7878B.hs | bsd-3-clause | 50 | 0 | 3 | 9 | 8 | 6 | 2 | 2 | 0 |
-------------------------------------------------------------------
--作者:Mega Show
--时间:2016.10.11
--说明:该源文件定义分数类型Fraction,并实现了分数的加减乘除、以及
-- ratfloor、ratfloat、rateq等函数
-------------------------------------------------------------------
module MyFraction where
import Test.QuickCheck
import Prelude hiding ((<*>))
--定义分数类型(分子,分母)
type Fraction = (Integer,Integer)
--分数加法函数
ratplus :: Fraction -> Fraction -> Fraction
ratplus (a,b) (c,d) = (div (a*d+b*c) g,div (b*d) g)
where g = gcd (a*d+b*c) (b*d)
--分数减法函数
ratminus :: Fraction -> Fraction -> Fraction
ratminus (a,b) (c,d) = (div (a*d-b*c) g,div (b*d) g)
where g = gcd (a*d+b*c) (b*d)
--分数乘法函数
rattimes :: Fraction -> Fraction -> Fraction
rattimes (a,b) (c,d) = (div (a*c) g,div (b*d) g)
where g = gcd (a*c) (b*d)
--分数除法函数
ratdiv :: Fraction -> Fraction -> Fraction
ratdiv (a,b) (c,d) = (div (a*d) g,div (b*c) g)
where g = gcd (a*d) (b*c)
--将分数转换成不大于它的最大整数
ratfloor :: Fraction -> Integer
ratfloor (a,b) = (div a b)
--将分数转换成浮点数
ratfloat :: Fraction -> Float
ratfloat (a,b) = (fromInteger a / fromInteger b)
--判断分数是否相等
rateq :: Fraction -> Fraction -> Bool
rateq (a,b) (c,d) = if (a==c && b==d) then True else False
--中缀函数声明
(<+>) :: Fraction -> Fraction -> Fraction
(<+>) (a,b) (c,d) = ratplus (a,b) (c,d)
(<->) :: Fraction -> Fraction -> Fraction
(<->) (a,b) (c,d) = ratminus (a,b) (c,d)
(<*>) :: Fraction -> Fraction -> Fraction
(<*>) (a,b) (c,d) = rattimes (a,b) (c,d)
(</>) :: Fraction -> Fraction -> Fraction
(</>) (a,b) (c,d) = ratdiv (a,b) (c,d)
(<==>) :: Fraction -> Fraction -> Bool
(<==>) (a,b) (c,d) = rateq (a,b) (c,d)
------------------------------------------------------------------
--测试:
-- 用(1,2),(2,5)对所有函数进行了操作
--困难:
-- 不懂使用quickCheck
------------------------------------------------------------------
| MegaShow/college-programming | Homework/Haskell Function Programming/MyFraction.hs | mit | 2,076 | 0 | 11 | 280 | 854 | 498 | 356 | 32 | 2 |
module Homework.Week11Spec (
main,
spec
) where
import Test.Hspec
import Homework.Week11.Assignment
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "week 11" $ do
it "needs some tests!" $ do
pending | jxv/cis-194-winter-2016 | test/Homework/Week11Spec.hs | mit | 234 | 0 | 12 | 56 | 76 | 40 | 36 | 12 | 1 |
-- |
-- Module: BigE.Texture
-- Copyright: (c) 2017 Patrik Sandahl
-- Licence: MIT
-- Maintainer: Patrik Sandahl <patrik.sandahl@gmail.com>
-- Stability: experimental
-- Portability: portable
module BigE.Texture
( TextureParameters (..)
, CubeMapFiles (..)
, defaultParams2D
, fromFile2D
, fromFileCube
, enable2D
, disable2D
, enableCube
, disableCube
, delete
, readImageRGB8
, readImageRGB8A
) where
import BigE.Internal.GLResources (deleteTexture, genTexture)
import BigE.Types (Texture (..), TextureFormat (..),
TextureMagFilter (..),
TextureMinFilter (..),
TextureWrap (..), ToGLint (..))
import Codec.Picture
import Control.Monad (forM, when)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.Vector.Storable as Vector
import Graphics.GL (GLenum, GLfloat)
import qualified Graphics.GL as GL
-- | User parameters for the loading of a 'Texture'.
data TextureParameters = TextureParameters
{ format :: !TextureFormat
, genMipmaps :: !Bool
, wrapS :: !TextureWrap
, wrapT :: !TextureWrap
, minFilter :: !TextureMinFilter
, magFilter :: !TextureMagFilter
, lodBias :: !GLfloat
} deriving Show
-- | The six files for the cube map faces.
data CubeMapFiles = CubeMapFiles
{ negativeX :: !FilePath
, positiveX :: !FilePath
, negativeY :: !FilePath
, positiveY :: !FilePath
, negativeZ :: !FilePath
, positiveZ :: !FilePath
} deriving Show
-- | Default values for 2D texture parameters. The 'TextureFormat' is set to
-- RGB8, and the other values are set to resonable defaults.
defaultParams2D :: TextureParameters
defaultParams2D =
TextureParameters
{ format = RGB8
, genMipmaps = True
, wrapS = WrapRepeat
, wrapT = WrapRepeat
, minFilter = MinNearestMipmapLinear
, magFilter = MagLinear
, lodBias = 0
}
-- | Load a 2D texture from file, using the given 'TextureParameters'. The textures
-- loaded from this function must be "turned up side down" in the fragment
-- shader by flipping the T value.
fromFile2D :: MonadIO m => FilePath -> TextureParameters -> m (Either String Texture)
fromFile2D file params = do
tex@(Texture handle) <- genTexture
GL.glBindTexture GL.GL_TEXTURE_2D handle
eResult <- load2D GL.GL_TEXTURE_2D file (format params)
case eResult of
Right () -> do
when (genMipmaps params) $
GL.glGenerateMipmap GL.GL_TEXTURE_2D
GL.glTexParameteri GL.GL_TEXTURE_2D GL.GL_TEXTURE_WRAP_S (toGLint $ wrapS params)
GL.glTexParameteri GL.GL_TEXTURE_2D GL.GL_TEXTURE_WRAP_T (toGLint $ wrapT params)
GL.glTexParameteri GL.GL_TEXTURE_2D GL.GL_TEXTURE_MIN_FILTER (toGLint $ minFilter params)
GL.glTexParameteri GL.GL_TEXTURE_2D GL.GL_TEXTURE_MAG_FILTER (toGLint $ magFilter params)
GL.glTexParameterf GL.GL_TEXTURE_2D GL.GL_TEXTURE_LOD_BIAS (lodBias params)
GL.glBindTexture GL.GL_TEXTURE_2D 0
return $ Right tex
Left err -> do
GL.glBindTexture GL.GL_TEXTURE_2D 0
deleteTexture tex
return $ Left err
-- | Load a cube map texture from a set of six files.
fromFileCube :: MonadIO m => CubeMapFiles -> TextureFormat -> m (Either String Texture)
fromFileCube files format' = do
tex@(Texture handle) <- genTexture
GL.glBindTexture GL.GL_TEXTURE_CUBE_MAP handle
let xs = [ (negativeX files, GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_X)
, (positiveX files, GL.GL_TEXTURE_CUBE_MAP_POSITIVE_X)
, (negativeY files, GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y)
, (positiveY files, GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Y)
, (negativeZ files, GL.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z)
, (positiveZ files, GL.GL_TEXTURE_CUBE_MAP_POSITIVE_Z)
]
eResult <- sequence <$>
forM xs (\(path, target) -> load2D target path format')
case eResult of
Right _ -> do
GL.glTexParameteri GL.GL_TEXTURE_CUBE_MAP GL.GL_TEXTURE_WRAP_S (toGLint WrapClampToEdge)
GL.glTexParameteri GL.GL_TEXTURE_CUBE_MAP GL.GL_TEXTURE_WRAP_T (toGLint WrapClampToEdge)
GL.glTexParameteri GL.GL_TEXTURE_CUBE_MAP GL.GL_TEXTURE_WRAP_R (toGLint WrapClampToEdge)
GL.glTexParameteri GL.GL_TEXTURE_CUBE_MAP GL.GL_TEXTURE_MIN_FILTER (toGLint MinLinear)
GL.glTexParameteri GL.GL_TEXTURE_CUBE_MAP GL.GL_TEXTURE_MAG_FILTER (toGLint MagLinear)
GL.glBindTexture GL.GL_TEXTURE_2D 0
return $ Right tex
Left err -> do
GL.glBindTexture GL.GL_TEXTURE_CUBE_MAP 0
deleteTexture tex
return $ Left err
-- | Enable the 2D texture at the given texture unit.
enable2D :: MonadIO m => Int -> Texture -> m ()
enable2D unit (Texture texture) = do
GL.glActiveTexture $ GL.GL_TEXTURE0 + fromIntegral unit
GL.glBindTexture GL.GL_TEXTURE_2D texture
-- | Disable the 2D texture at the given texture unit.
disable2D :: MonadIO m => Int -> m ()
disable2D unit = do
GL.glActiveTexture $ GL.GL_TEXTURE0 + fromIntegral unit
GL.glBindTexture GL.GL_TEXTURE_2D 0
-- | Enable the cube map texture at the given texture unit.
enableCube :: MonadIO m => Int -> Texture -> m ()
enableCube unit (Texture texture) = do
GL.glActiveTexture $ GL.GL_TEXTURE0 + fromIntegral unit
GL.glBindTexture GL.GL_TEXTURE_CUBE_MAP texture
-- | Disable the cube map texture at the given texture unit.
disableCube :: MonadIO m => Int -> m ()
disableCube unit = do
GL.glActiveTexture $ GL.GL_TEXTURE0 + fromIntegral unit
GL.glBindTexture GL.GL_TEXTURE_CUBE_MAP 0
-- | Delete the given texture.
delete :: MonadIO m => Texture -> m ()
delete = deleteTexture
readImageRGB8 :: MonadIO m => FilePath -> m (Either String (Image PixelRGB8))
readImageRGB8 file = fmap convertRGB8 <$> liftIO (readImage file)
readImageRGB8A :: MonadIO m => FilePath -> m (Either String (Image PixelRGBA8))
readImageRGB8A file = fmap convertRGBA8 <$> liftIO (readImage file)
load2D :: MonadIO m => GLenum -> FilePath -> TextureFormat -> m (Either String ())
load2D target file RGB8 = do
eImage <- readImageRGB8 file
case eImage of
Right image -> Right <$> setTexture2DRGB8 target image
Left err -> return $ Left err
load2D target file RGBA8 = do
eImage <- readImageRGB8A file
case eImage of
Right image -> Right <$> setTexture2DRGBA8 target image
Left err -> return $ Left err
setTexture2DRGB8 :: MonadIO m => GLenum -> Image PixelRGB8 -> m ()
setTexture2DRGB8 target image = liftIO $
Vector.unsafeWith (imageData image) $
GL.glTexImage2D target 0 (fromIntegral GL.GL_RGB)
(fromIntegral $ imageWidth image)
(fromIntegral $ imageHeight image) 0
GL.GL_RGB GL.GL_UNSIGNED_BYTE
setTexture2DRGBA8 :: MonadIO m => GLenum -> Image PixelRGBA8 -> m ()
setTexture2DRGBA8 target image = liftIO $
Vector.unsafeWith (imageData image) $
GL.glTexImage2D target 0 (fromIntegral GL.GL_RGBA)
(fromIntegral $ imageWidth image)
(fromIntegral $ imageHeight image) 0
GL.GL_RGBA GL.GL_UNSIGNED_BYTE
| psandahl/big-engine | src/BigE/Texture.hs | mit | 7,574 | 0 | 15 | 2,011 | 1,847 | 934 | 913 | 169 | 3 |
module Main where
import Test.Tasty
import qualified Ratscrew.Game.Tests
import qualified Ratscrew.Game.Internal.Snapping.Tests
import qualified Ratscrew.Game.Arbitrary
main :: IO ()
main = defaultMain $ testGroup "Tests" [
Ratscrew.Game.Tests.tests,
Ratscrew.Game.Internal.Snapping.Tests.tests,
Ratscrew.Game.Arbitrary.tests
]
| smobs/Ratscrew | tests/Main.hs | mit | 362 | 0 | 8 | 63 | 79 | 52 | 27 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Hex.Extra where
import ClassyPrelude
import Text.Read (read)
import Numeric
showHexFixed :: (Integral a, Show a) => Int -> a -> String
showHexFixed len val =
padZeros $ showHex val ""
where padZeros s = if length s >= len then s else padZeros ('0' : s)
fromHex :: (Integral a, Read a) => String -> a
fromHex val =
let hexPrefix = "0x" :: String
in read (if hexPrefix `isPrefixOf` val then val else (hexPrefix ++ val))
| circuithub/circuithub-prelude | Data/Hex/Extra.hs | mit | 479 | 0 | 11 | 96 | 179 | 98 | 81 | 13 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.API.Mandrill.Subaccounts where
import Network.API.Mandrill.Response
import Network.API.Mandrill.Utils
import Network.API.Mandrill.Types
-- | Get the list of subaccounts defined for the account,
-- optionally filtered by a prefix
list :: (MonadIO m) =>
Maybe Query ->
MandrillT m (Either ApiError [Subaccount])
list q = performRequest "/subaccounts/list.json" [ "q" .= q ]
-- | Add a new subaccount
add :: (MonadIO m) =>
SubaccountId ->
Name ->
Notes ->
Count ->
MandrillT m (Either ApiError Subaccount)
add s na no c =
performRequest "/subaccounts/add.json" $
[ "id" .= s
, "name" .= na
, "notes" .= no
, "custom_quota" .= c ]
-- | Given the ID of an existing subaccount, return the data about it
info :: (MonadIO m) =>
SubaccountId ->
MandrillT m (Either ApiError Subaccount)
info i = performRequest "/subaccounts/info.json" [ "id" .= i ]
-- | Update an existing subaccount
update :: (MonadIO m) =>
SubaccountId ->
Name ->
Notes ->
Count ->
MandrillT m (Either ApiError Subaccount)
update s na no c =
performRequest "/subaccounts/update.json" $
[ "id" .= s
, "name" .= na
, "notes" .= no
, "custom_quota" .= c ]
-- | Delete an existing subaccount. Any email related to the subaccount will be
-- saved, but stats will be removed and any future sending calls to this
-- subaccount will fail.
delete :: (MonadIO m) =>
SubaccountId ->
MandrillT m (Either ApiError Subaccount)
delete i = performRequest "/subaccounts/delete.json" [ "id" .= i ]
-- | Pause a subaccount's sending. Any future emails delivered to this
-- subaccount will be queued for a maximum of 3 days until the subaccount
-- is resumed.
pause :: (MonadIO m) =>
SubaccountId ->
MandrillT m (Either ApiError Subaccount)
pause i = performRequest "/subaccounts/pause.json" [ "id" .= i ]
-- | Resume a paused subaccount's sending
resume :: (MonadIO m) =>
SubaccountId ->
MandrillT m (Either ApiError Subaccount)
resume i = performRequest "/subaccounts/resume.json" [ "id" .= i ]
| krgn/hamdrill | src/Network/API/Mandrill/Subaccounts.hs | mit | 2,327 | 0 | 12 | 663 | 487 | 263 | 224 | 49 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Helpers to work with `Entity` attributes.
module HelHUG.DB.Attribute where
import Data.Map as Map
import HelHUG.DB
class Attribute a where
getAttr :: AttrName -> Entity -> Maybe a
instance Attribute String where
getAttr attrName (Entity _ attrs) = Map.lookup attrName attrs >>= fromAttrStr
instance Attribute [String] where
getAttr attrName (Entity _ attrs) = Map.lookup attrName attrs >>= fromAttrMultiStr
instance Attribute Int where
getAttr attrName (Entity _ attrs) = Map.lookup attrName attrs >>= fromAttrInt
instance Attribute [EntityId] where
getAttr attrName (Entity _ attrs) = Map.lookup attrName attrs >>= fromAttrCollection
fromAttrInt :: AttrValue -> Maybe Int
fromAttrInt (AttrInt i) = Just i
fromAttrInt _ = Nothing
fromAttrStr :: AttrValue -> Maybe String
fromAttrStr (AttrStr str) = Just str
fromAttrStr _ = Nothing
fromAttrMultiStr :: AttrValue -> Maybe [String]
fromAttrMultiStr (AttrMultiStr strs) = Just strs
fromAttrMultiStr (AttrCollection []) = Just []
fromAttrMultiStr _ = Nothing
fromAttrCollection :: AttrValue -> Maybe [EntityId]
fromAttrCollection (AttrCollection coll) = Just coll
fromAttrCollection (AttrMultiStr []) = Just []
fromAttrCollection _ = Nothing
| phadej/helhug-types | src/HelHUG/DB/Attribute.hs | mit | 1,351 | 0 | 9 | 258 | 397 | 200 | 197 | 29 | 1 |
{-# LANGUAGE BangPatterns, ScopedTypeVariables #-}
-- TODO: Add some comments describing how this implementation works.
-- | A reimplementation of Data.WordMap that seems to be 1.4-4x faster.
module Data.WordMap.Strict (
-- * Map type
WordMap, Key
-- * Operators
, (!)
, (\\)
-- * Query
, null
, size
, member
, notMember
, lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
, alterF
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- * Traversal
-- ** Map
, map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, foldr
, foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered Lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
, splitRoot
-- * Submap
, isSubmapOf
, isSubmapOfBy
, isProperSubmapOf
, isProperSubmapOfBy
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, valid
) where
import Data.WordMap.Base
import Control.Applicative (Applicative(..))
import Data.Functor ((<$>))
import Data.Bits (xor)
import Data.StrictPair (StrictPair(..), toPair)
import qualified Data.List (foldl')
import Prelude hiding (foldr, foldl, lookup, null, map, filter, min, max)
(#!), (#) :: (a -> b) -> a -> b
(#!) = ($!)
(#) = ($)
-- | /O(1)/. A map of one element.
--
-- > singleton 1 'a' == fromList [(1, 'a')]
-- > size (singleton 1 'a') == 1
singleton :: Key -> a -> WordMap a
singleton k v = v `seq` WordMap (NonEmpty k v Tip)
-- | /O(min(n,W))/. Insert a new key\/value pair in the map.
-- If the key is already present in the map, the associated value is
-- replaced with the supplied value, i.e. 'insert' is equivalent to
-- @'insertWith' 'const'@
--
-- > insert 5 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'x')]
-- > insert 7 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'a'), (7, 'x')]
-- > insert 5 'x' empty == singleton 5 'x'
insert :: Key -> a -> WordMap a -> WordMap a
insert = start
where
start !k !v (WordMap Empty) = WordMap (NonEmpty k v Tip)
start !k !v (WordMap (NonEmpty min minV root))
| k > min = WordMap (NonEmpty min minV (goL k v (xor min k) min root))
| k < min = WordMap (NonEmpty k v (insertMinL (xor min k) min minV root))
| otherwise = WordMap (NonEmpty k v root)
goL !k v !_ !_ Tip = Bin k v Tip Tip
goL !k v !xorCache !min (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goL k v xorCache min l) r
else Bin max maxV l (goR k v xorCacheMax max r)
| k > max = if xor min max < xorCacheMax
then Bin k v (Bin max maxV l r) Tip
else Bin k v l (insertMaxR xorCacheMax max maxV r)
| otherwise = Bin max v l r
where xorCacheMax = xor k max
goR !k v !_ !_ Tip = Bin k v Tip Tip
goR !k v !xorCache !max (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goR k v xorCache max r)
else Bin min minV (goL k v xorCacheMin min l) r
| k < min = if xor min max < xorCacheMin
then Bin k v Tip (Bin min minV l r)
else Bin k v (insertMinL xorCacheMin min minV l) r
| otherwise = Bin min v l r
where xorCacheMin = xor min k
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWith' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f new_value old_value@.
--
-- > insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "xxxa")]
-- > insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWith (++) 5 "xxx" empty == singleton 5 "xxx"
insertWith :: (a -> a -> a) -> Key -> a -> WordMap a -> WordMap a
insertWith combine = start
where
start !k v (WordMap Empty) = WordMap (NonEmpty k #! v # Tip)
start !k v (WordMap (NonEmpty min minV root))
| k > min = WordMap (NonEmpty min minV (goL k v (xor min k) min root))
| k < min = WordMap (NonEmpty k #! v # insertMinL (xor min k) min minV root)
| otherwise = WordMap (NonEmpty k #! combine v minV # root)
goL !k v !_ !_ Tip = Bin k #! v # Tip # Tip
goL !k v !xorCache !min (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goL k v xorCache min l) r
else Bin max maxV l (goR k v xorCacheMax max r)
| k > max = if xor min max < xorCacheMax
then Bin k #! v # Bin max maxV l r # Tip
else Bin k #! v # l # insertMaxR xorCacheMax max maxV r
| otherwise = Bin max #! combine v maxV # l # r
where xorCacheMax = xor k max
goR !k v !_ !_ Tip = Bin k #! v # Tip # Tip
goR !k v !xorCache !max (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goR k v xorCache max r)
else Bin min minV (goL k v xorCacheMin min l) r
| k < min = if xor min max < xorCacheMin
then Bin k #! v # Tip # Bin min minV l r
else Bin k #! v # insertMinL xorCacheMin min minV l # r
| otherwise = Bin min #! combine v minV # l # r
where xorCacheMin = xor min k
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWithKey' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f key new_value old_value@.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:xxx|a")]
-- > insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWithKey f 5 "xxx" empty == singleton 5 "xxx"
insertWithKey :: (Key -> a -> a -> a) -> Key -> a -> WordMap a -> WordMap a
insertWithKey f k = insertWith (f k) k
-- | /O(min(n,W))/. The expression (@'insertLookupWithKey' f k x map@)
-- is a pair where the first element is equal to (@'lookup' k map@)
-- and the second element equal to (@'insertWithKey' f k x map@).
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
-- > insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
-- > insertLookupWithKey f 5 "xxx" empty == (Nothing, singleton 5 "xxx")
--
-- This is how to define @insertLookup@ using @insertLookupWithKey@:
--
-- > let insertLookup kx x t = insertLookupWithKey (\_ a _ -> a) kx x t
-- > insertLookup 5 "x" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "x")])
-- > insertLookup 7 "x" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "x")])
insertLookupWithKey :: (Key -> a -> a -> a) -> Key -> a -> WordMap a -> (Maybe a, WordMap a)
insertLookupWithKey combine !k v = toPair . start
where
start (WordMap Empty) = Nothing :*: WordMap (NonEmpty k #! v # Tip)
start (WordMap (NonEmpty min minV root))
| k > min = let mv :*: root' = goL (xor min k) min root
in mv :*: WordMap (NonEmpty min minV root')
| k < min = Nothing :*: WordMap (NonEmpty k #! v # insertMinL (xor min k) min minV root)
| otherwise = Just minV :*: WordMap (NonEmpty k #! combine k v minV # root)
goL !_ _ Tip = Nothing :*: (Bin k #! v # Tip # Tip)
goL !xorCache min (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then let mv :*: l' = goL xorCache min l
in mv :*: Bin max maxV l' r
else let mv :*: r' = goR xorCacheMax max r
in mv :*: Bin max maxV l r'
| k > max = if xor min max < xorCacheMax
then Nothing :*: (Bin k #! v # Bin max maxV l r # Tip)
else Nothing :*: (Bin k #! v # l # insertMaxR xorCacheMax max maxV r)
| otherwise = Just maxV :*: (Bin max #! combine k v maxV # l # r)
where xorCacheMax = xor k max
goR !_ _ Tip = Nothing :*: (Bin k #! v # Tip # Tip)
goR !xorCache max (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then let mv :*: r' = goR xorCache max r
in mv :*: Bin min minV l r'
else let mv :*: l' = goL xorCacheMin min l
in mv :*: Bin min minV l' r
| k < min = if xor min max < xorCacheMin
then Nothing :*: (Bin k #! v # Tip # Bin min minV l r)
else Nothing :*: (Bin k #! v # insertMinL xorCacheMin min minV l # r)
| otherwise = Just minV :*: (Bin min #! combine k v minV # l # r)
where xorCacheMin = xor min k
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjust ("new " ++) 7 empty == empty
adjust :: (a -> a) -> Key -> WordMap a -> WordMap a
adjust f k = k `seq` start
where
start (WordMap Empty) = WordMap Empty
start m@(WordMap (NonEmpty min minV node))
| k > min = WordMap (NonEmpty min minV (goL (xor min k) min node))
| k < min = m
| otherwise = WordMap (NonEmpty min #! f minV # node)
goL !_ _ Tip = Tip
goL !xorCache min n@(Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goL xorCache min l) r
else Bin max maxV l (goR xorCacheMax max r)
| k > max = n
| otherwise = Bin max #! f maxV # l # r
where xorCacheMax = xor k max
goR !_ _ Tip = Tip
goR !xorCache max n@(Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goR xorCache max r)
else Bin min minV (goL xorCacheMin min l) r
| k < min = n
| otherwise = Bin min #! f minV # l # r
where xorCacheMin = xor min k
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > let f key x = (show key) ++ ":new " ++ x
-- > adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjustWithKey f 7 empty == empty
adjustWithKey :: (Key -> a -> a) -> Key -> WordMap a -> WordMap a
adjustWithKey f k = adjust (f k) k
-- | /O(min(n,W))/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > update f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > update f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > update f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
update :: (a -> Maybe a) -> Key -> WordMap a -> WordMap a
update f k = k `seq` start
where
start (WordMap Empty) = WordMap Empty
start m@(WordMap (NonEmpty min minV Tip))
| k == min = case f minV of
Nothing -> WordMap Empty
Just !minV' -> WordMap (NonEmpty min minV' Tip)
| otherwise = m
start m@(WordMap (NonEmpty min minV root@(Bin max maxV l r)))
| k < min = m
| k == min = case f minV of
Nothing -> let DR min' minV' root' = deleteMinL max maxV l r
in WordMap (NonEmpty min' minV' root')
Just !minV' -> WordMap (NonEmpty min minV' root)
| otherwise = WordMap (NonEmpty min minV (goL (xor min k) min root))
goL !_ _ Tip = Tip
goL !xorCache min n@(Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goL xorCache min l) r
else Bin max maxV l (goR xorCacheMax max r)
| k > max = n
| otherwise = case f maxV of
Nothing -> extractBinL l r
Just !maxV' -> Bin max maxV' l r
where xorCacheMax = xor k max
goR !_ _ Tip = Tip
goR !xorCache max n@(Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goR xorCache max r)
else Bin min minV (goL xorCacheMin min l) r
| k < min = n
| otherwise = case f minV of
Nothing -> extractBinR l r
Just !minV' -> Bin min minV' l r
where xorCacheMin = xor min k
-- | /O(min(n,W))/. The expression (@'updateWithKey' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f k x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateWithKey :: (Key -> a -> Maybe a) -> Key -> WordMap a -> WordMap a
updateWithKey f k = update (f k) k
-- | /O(min(n,W))/. Lookup and update.
-- The function returns original value, if it is updated.
-- This is different behavior than 'Data.Map.updateLookupWithKey'.
-- Returns the original key value if the map entry is deleted.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:new a")])
-- > updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a")])
-- > updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) == (Just "b", singleton 5 "a")
updateLookupWithKey :: (Key -> a -> Maybe a) -> Key -> WordMap a -> (Maybe a, WordMap a)
updateLookupWithKey f k = k `seq` start
where
start (WordMap Empty) = (Nothing, WordMap Empty)
start m@(WordMap (NonEmpty min minV Tip))
| k == min = case f min minV of
Nothing -> (Just minV, WordMap Empty)
Just !minV' -> (Just minV, WordMap (NonEmpty min minV' Tip))
| otherwise = (Nothing, m)
start m@(WordMap (NonEmpty min minV root@(Bin max maxV l r)))
| k < min = (Nothing, m)
| k == min = case f min minV of
Nothing -> let DR min' minV' root' = deleteMinL max maxV l r
in (Just minV, WordMap (NonEmpty min' minV' root'))
Just !minV' -> (Just minV, WordMap (NonEmpty min minV' root))
| otherwise = let (mv, root') = goL (xor min k) min root
in (mv, WordMap (NonEmpty min minV root'))
goL !_ _ Tip = (Nothing, Tip)
goL !xorCache min n@(Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then let (mv, l') = goL xorCache min l
in (mv, Bin max maxV l' r)
else let (mv, r') = goR xorCacheMax max r
in (mv, Bin max maxV l r')
| k > max = (Nothing, n)
| otherwise = case f max maxV of
Nothing -> (Just maxV, extractBinL l r)
Just !maxV' -> (Just maxV, Bin max maxV' l r)
where xorCacheMax = xor k max
goR !_ _ Tip = (Nothing, Tip)
goR !xorCache max n@(Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then let (mv, r') = goR xorCache max r
in (mv, Bin min minV l r')
else let (mv, l') = goL xorCacheMin min l
in (mv, Bin min minV l' r)
| k < min = (Nothing, n)
| otherwise = case f min minV of
Nothing -> (Just minV, extractBinR l r)
Just !minV' -> (Just minV, Bin min minV' l r)
where xorCacheMin = xor min k
-- | /O(min(n,W))/. The expression (@'alter' f k map@) alters the value @x@ at @k@, or absence thereof.
-- 'alter' can be used to insert, delete, or update a value in an 'IntMap'.
-- In short : @'lookup' k ('alter' f k m) = f ('lookup' k m)@.
alter :: (Maybe a -> Maybe a) -> Key -> WordMap a -> WordMap a
alter f k m = case lookup k m of
Nothing -> case f Nothing of
Nothing -> m
Just v -> insert k v m
Just v -> case f (Just v) of
Nothing -> delete k m
Just v' -> insert k v' m
-- | /O(log n)/. The expression (@'alterF' f k map@) alters the value @x@ at
-- @k@, or absence thereof. 'alterF' can be used to inspect, insert, delete,
-- or update a value in an 'IntMap'. In short : @'lookup' k <$> 'alterF' f k m = f
-- ('lookup' k m)@.
--
-- Example:
--
-- @
-- interactiveAlter :: Word -> WordMap String -> IO (WordMap String)
-- interactiveAlter k m = alterF f k m where
-- f Nothing -> do
-- putStrLn $ show k ++
-- " was not found in the map. Would you like to add it?"
-- getUserResponse1 :: IO (Maybe String)
-- f (Just old) -> do
-- putStrLn "The key is currently bound to " ++ show old ++
-- ". Would you like to change or delete it?"
-- getUserresponse2 :: IO (Maybe String)
-- @
--
-- 'alterF' is the most general operation for working with an individual
-- key that may or may not be in a given map.
--
-- Note: 'alterF' is a flipped version of the 'at' combinator from
-- 'Control.Lens.At'.
--
-- @since 0.5.8
alterF :: Functor f => (Maybe a -> f (Maybe a)) -> Key -> WordMap a -> f (WordMap a)
alterF f k m = case lookup k m of
Nothing -> fmap (\ret -> case ret of
Nothing -> m
Just v -> insert k v m) (f Nothing)
Just v -> fmap (\ret -> case ret of
Nothing -> delete k m
Just v' -> insert k v' m) (f (Just v))
-- | /O(n+m)/. The union with a combining function.
--
-- > unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "aA"), (7, "C")]
unionWith :: (a -> a -> a) -> WordMap a -> WordMap a -> WordMap a
unionWith f = unionWithKey (const f)
-- | /O(n+m)/. The union with a combining function.
--
-- > let f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
-- > unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
unionWithKey :: (Key -> a -> a -> a) -> WordMap a -> WordMap a -> WordMap a
unionWithKey combine = start
where
start (WordMap Empty) m2 = m2
start m1 (WordMap Empty) = m1
start (WordMap (NonEmpty min1 minV1 root1)) (WordMap (NonEmpty min2 minV2 root2))
| min1 < min2 = WordMap (NonEmpty min1 minV1 (goL2 minV2 min1 root1 min2 root2))
| min1 > min2 = WordMap (NonEmpty min2 minV2 (goL1 minV1 min1 root1 min2 root2))
| otherwise = WordMap (NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 root1 root2) -- we choose min1 arbitrarily, as min1 == min2
-- TODO: Should I bind 'minV1' in a closure? It never changes.
-- TODO: Should I cache @xor min1 min2@?
goL1 minV1 min1 Tip !_ Tip = Bin min1 minV1 Tip Tip
goL1 minV1 min1 Tip min2 n2 = goInsertL1 min1 minV1 (xor min1 min2) min2 n2
goL1 minV1 min1 n1 min2 Tip = insertMinL (xor min1 min2) min1 minV1 n1
goL1 minV1 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max2 `ltMSB` xor min1 min2 -> disjoint -- we choose min1 and min2 arbitrarily - we just need something from tree 1 and something from tree 2
| xor min2 min1 < xor min1 max2 -> Bin max2 maxV2 (goL1 minV1 min1 n1 min2 l2) r2 -- we choose min1 arbitrarily - we just need something from tree 1
| max1 > max2 -> Bin max1 maxV1 l2 (goR2 maxV2 max1 (Bin min1 minV1 l1 r1) max2 r2)
| max1 < max2 -> Bin max2 maxV2 l2 (goR1 maxV1 max1 (Bin min1 minV1 l1 r1) max2 r2)
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # l2 # goRFused max1 (Bin min1 minV1 l1 r1) r2 -- we choose max1 arbitrarily, as max1 == max2
EQ | max2 < min1 -> disjoint
| max1 > max2 -> Bin max1 maxV1 (goL1 minV1 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| max1 < max2 -> Bin max2 maxV2 (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # goL1 minV1 min1 l1 min2 l2 # goRFused max1 r1 r2 -- we choose max1 arbitrarily, as max1 == max2
GT | xor min1 max1 `ltMSB` xor min1 min2 -> disjoint -- we choose min1 and min2 arbitrarily - we just need something from tree 1 and something from tree 2
| otherwise -> Bin max1 maxV1 (goL1 minV1 min1 l1 min2 n2) r1
where
disjoint = Bin max1 maxV1 n2 (Bin min1 minV1 l1 r1)
-- TODO: Should I bind 'minV2' in a closure? It never changes.
-- TODO: Should I cache @xor min1 min2@?
goL2 minV2 !_ Tip min2 Tip = Bin min2 minV2 Tip Tip
goL2 minV2 min1 Tip min2 n2 = insertMinL (xor min1 min2) min2 minV2 n2
goL2 minV2 min1 n1 min2 Tip = goInsertL2 min2 minV2 (xor min1 min2) min1 n1
goL2 minV2 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max2 `ltMSB` xor min1 min2 -> disjoint -- we choose min1 and min2 arbitrarily - we just need something from tree 1 and something from tree 2
| otherwise -> Bin max2 maxV2 (goL2 minV2 min1 n1 min2 l2) r2
EQ | max1 < min2 -> disjoint
| max1 > max2 -> Bin max1 maxV1 (goL2 minV2 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| max1 < max2 -> Bin max2 maxV2 (goL2 minV2 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # goL2 minV2 min1 l1 min2 l2 # goRFused max1 r1 r2 -- we choose max1 arbitrarily, as max1 == max2
GT | xor min1 max1 `ltMSB` xor min1 min2 -> disjoint -- we choose min1 and min2 arbitrarily - we just need something from tree 1 and something from tree 2
| xor min1 min2 < xor min2 max1 -> Bin max1 maxV1 (goL2 minV2 min1 l1 min2 n2) r1 -- we choose min2 arbitrarily - we just need something from tree 2
| max1 > max2 -> Bin max1 maxV1 l1 (goR2 maxV2 max1 r1 max2 (Bin min2 minV2 l2 r2))
| max1 < max2 -> Bin max2 maxV2 l1 (goR1 maxV1 max1 r1 max2 (Bin min2 minV2 l2 r2))
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # l1 # goRFused max1 r1 (Bin min2 minV2 l2 r2) -- we choose max1 arbitrarily, as max1 == max2
where
disjoint = Bin max2 maxV2 n1 (Bin min2 minV2 l2 r2)
-- TODO: Should I bind 'min' in a closure? It never changes.
-- TODO: Should I use an xor cache here?
-- 'goLFused' is called instead of 'goL' if the minimums of the two trees are the same
-- Note that because of this property, the trees cannot be disjoint, so we can skip most of the checks in 'goL'
goLFused !_ Tip n2 = n2
goLFused !_ n1 Tip = n1
goLFused min n1@(Bin max1 maxV1 l1 r1) n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min max1) (xor min max2) of
LT -> Bin max2 maxV2 (goLFused min n1 l2) r2
EQ | max1 > max2 -> Bin max1 maxV1 (goLFused min l1 l2) (goR2 maxV2 max1 r1 max2 r2)
| max1 < max2 -> Bin max2 maxV2 (goLFused min l1 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # goLFused min l1 l2 # goRFused max1 r1 r2 -- we choose max1 arbitrarily, as max1 == max2
GT -> Bin max1 maxV1 (goLFused min l1 n2) r1
-- TODO: Should I bind 'maxV1' in a closure? It never changes.
-- TODO: Should I cache @xor max1 max2@?
goR1 maxV1 max1 Tip !_ Tip = Bin max1 maxV1 Tip Tip
goR1 maxV1 max1 Tip max2 n2 = goInsertR1 max1 maxV1 (xor max1 max2) max2 n2
goR1 maxV1 max1 n1 max2 Tip = insertMaxR (xor max1 max2) max1 maxV1 n1
goR1 maxV1 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max2 `ltMSB` xor max1 max2 -> disjoint -- we choose max1 and max2 arbitrarily - we just need something from tree 1 and something from tree 2
| xor min2 max1 > xor max1 max2 -> Bin min2 minV2 l2 (goR1 maxV1 max1 n1 max2 r2) -- we choose max1 arbitrarily - we just need something from tree 1
| min1 < min2 -> Bin min1 minV1 (goL2 minV2 min1 (Bin max1 maxV1 l1 r1) min2 l2) r2
| min1 > min2 -> Bin min2 minV2 (goL1 minV1 min1 (Bin max1 maxV1 l1 r1) min2 l2) r2
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 (Bin max1 maxV1 l1 r1) l2 # r2 -- we choose min1 arbitrarily, as min1 == min2
EQ | max1 < min2 -> disjoint
| min1 < min2 -> Bin min1 minV1 (goL2 minV2 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| min1 > min2 -> Bin min2 minV2 (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # goR1 maxV1 max1 r1 max2 r2 -- we choose min1 arbitrarily, as min1 == min2
GT | xor min1 max1 `ltMSB` xor max1 max2 -> disjoint -- we choose max1 and max2 arbitrarily - we just need something from tree 1 and something from tree 2
| otherwise -> Bin min1 minV1 l1 (goR1 maxV1 max1 r1 max2 n2)
where
disjoint = Bin min1 minV1 (Bin max1 maxV1 l1 r1) n2
-- TODO: Should I bind 'minV2' in a closure? It never changes.
-- TODO: Should I cache @xor min1 min2@?
goR2 maxV2 !_ Tip max2 Tip = Bin max2 maxV2 Tip Tip
goR2 maxV2 max1 Tip max2 n2 = insertMaxR (xor max1 max2) max2 maxV2 n2
goR2 maxV2 max1 n1 max2 Tip = goInsertR2 max2 maxV2 (xor max1 max2) max1 n1
goR2 maxV2 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max2 `ltMSB` xor max1 max2 -> disjoint -- we choose max1 and max2 arbitrarily - we just need something from tree 1 and something from tree 2
| otherwise -> Bin min2 minV2 l2 (goR2 maxV2 max1 n1 max2 r2)
EQ | max2 < min1 -> disjoint
| min1 < min2 -> Bin min1 minV1 (goL2 minV2 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| min1 > min2 -> Bin min2 minV2 (goL1 minV1 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # goR2 maxV2 max1 r1 max2 r2 -- we choose min1 arbitrarily, as min1 == min2
GT | xor min1 max1 `ltMSB` xor max1 max2 -> disjoint -- we choose max1 and max2 arbitrarily - we just need something from tree 1 and something from tree 2
| xor min1 max2 > xor max2 max1 -> Bin min1 minV1 l1 (goR2 maxV2 max1 r1 max2 n2) -- we choose max2 arbitrarily - we just need something from tree 2
| min1 < min2 -> Bin min1 minV1 (goL2 minV2 min1 l1 min2 (Bin max2 maxV2 l2 r2)) r1
| min1 > min2 -> Bin min2 minV2 (goL1 minV1 min1 l1 min2 (Bin max2 maxV2 l2 r2)) r1
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 (Bin max2 maxV2 l2 r2) # r1 -- we choose min1 arbitrarily, as min1 == min2
where
disjoint = Bin min2 minV2 (Bin max2 maxV2 l2 r2) n1
-- TODO: Should I bind 'max' in a closure? It never changes.
-- TODO: Should I use an xor cache here?
-- 'goRFused' is called instead of 'goR' if the minimums of the two trees are the same
-- Note that because of this property, the trees cannot be disjoint, so we can skip most of the checks in 'goR'
goRFused !_ Tip n2 = n2
goRFused !_ n1 Tip = n1
goRFused max n1@(Bin min1 minV1 l1 r1) n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max) (xor min2 max) of
LT -> Bin min2 minV2 l2 (goRFused max n1 r2)
EQ | min1 < min2 -> Bin min1 minV1 (goL2 minV2 min1 l1 min2 l2) (goRFused max r1 r2)
| min1 > min2 -> Bin min2 minV2 (goL1 minV1 min1 l1 min2 l2) (goRFused max r1 r2)
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # goRFused max r1 r2 -- we choose min1 arbitrarily, as min1 == min2
GT -> Bin min1 minV1 l1 (goRFused max r1 n2)
goInsertL1 k v !_ _ Tip = Bin k #! v # Tip # Tip
goInsertL1 k v !xorCache min (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goInsertL1 k v xorCache min l) r
else Bin max maxV l (goInsertR1 k v xorCacheMax max r)
| k > max = if xor min max < xorCacheMax
then Bin k v (Bin max maxV l r) Tip
else Bin k v l (insertMaxR xorCacheMax max maxV r)
| otherwise = Bin max #! combine k v maxV # l # r
where xorCacheMax = xor k max
goInsertR1 k v !_ _ Tip = Bin k v Tip Tip
goInsertR1 k v !xorCache max (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goInsertR1 k v xorCache max r)
else Bin min minV (goInsertL1 k v xorCacheMin min l) r
| k < min = if xor min max < xorCacheMin
then Bin k v Tip (Bin min minV l r)
else Bin k v (insertMinL xorCacheMin min minV l) r
| otherwise = Bin min #! combine k v minV # l # r
where xorCacheMin = xor min k
goInsertL2 k v !_ _ Tip = Bin k v Tip Tip
goInsertL2 k v !xorCache min (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then Bin max maxV (goInsertL2 k v xorCache min l) r
else Bin max maxV l (goInsertR2 k v xorCacheMax max r)
| k > max = if xor min max < xorCacheMax
then Bin k v (Bin max maxV l r) Tip
else Bin k v l (insertMaxR xorCacheMax max maxV r)
| otherwise = Bin max #! combine k maxV v # l # r
where xorCacheMax = xor k max
goInsertR2 k v !_ _ Tip = Bin k v Tip Tip
goInsertR2 k v !xorCache max (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then Bin min minV l (goInsertR2 k v xorCache max r)
else Bin min minV (goInsertL2 k v xorCacheMin min l) r
| k < min = if xor min max < xorCacheMin
then Bin k v Tip (Bin min minV l r)
else Bin k v (insertMinL xorCacheMin min minV l) r
| otherwise = Bin min #! combine k minV v # l # r
where xorCacheMin = xor min k
-- | The union of a list of maps, with a combining operation.
--
-- > unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
unionsWith :: (a -> a -> a) -> [WordMap a] -> WordMap a
unionsWith f = Data.List.foldl' (unionWith f) empty
-- | /O(n+m)/. Difference with a combining function.
--
-- > let f al ar = if al == "b" then Just (al ++ ":" ++ ar) else Nothing
-- > differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
-- > == singleton 3 "b:B"
differenceWith :: (a -> b -> Maybe a) -> WordMap a -> WordMap b -> WordMap a
differenceWith f = differenceWithKey (const f)
-- | /O(n+m)/. Difference with a combining function. When two equal keys are
-- encountered, the combining function is applied to the key and both values.
-- If it returns 'Nothing', the element is discarded (proper set difference).
-- If it returns (@'Just' y@), the element is updated with a new value @y@.
--
-- > let f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
-- > differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
-- > == singleton 3 "3:b|B"
differenceWithKey :: (Key -> a -> b -> Maybe a) -> WordMap a -> WordMap b -> WordMap a
differenceWithKey combine = start
where
start (WordMap Empty) !_ = WordMap Empty
start !m (WordMap Empty) = m
start (WordMap (NonEmpty min1 minV1 root1)) (WordMap (NonEmpty min2 minV2 root2))
| min1 < min2 = WordMap (NonEmpty min1 minV1 (goL2 min1 root1 min2 root2))
| min1 > min2 = WordMap (goL1 minV1 min1 root1 min2 root2)
| otherwise = case combine min1 minV1 minV2 of
Nothing -> WordMap (goLFused min1 root1 root2)
Just !minV1' -> WordMap (NonEmpty min1 minV1' (goLFusedKeep min1 root1 root2))
goL1 minV1 min1 Tip min2 n2 = goLookupL min1 minV1 (xor min1 min2) n2
goL1 minV1 min1 n1 _ Tip = NonEmpty min1 minV1 n1
goL1 minV1 min1 n1@(Bin _ _ _ _) _ (Bin max2 _ _ _) | min1 > max2 = NonEmpty min1 minV1 n1
goL1 minV1 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 min1 < xor min1 max2 -> goL1 minV1 min1 n1 min2 l2 -- min1 is arbitrary here - we just need something from tree 1
| max1 > max2 -> r2lMap $ NonEmpty max1 maxV1 (goR2 max1 (Bin min1 minV1 l1 r1) max2 r2)
| max1 < max2 -> r2lMap $ goR1 maxV1 max1 (Bin min1 minV1 l1 r1) max2 r2
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> r2lMap $ goRFused max1 (Bin min1 minV1 l1 r1) r2
Just !maxV1' -> r2lMap $ NonEmpty max1 maxV1' (goRFusedKeep max1 (Bin min1 minV1 l1 r1) r2)
EQ | max1 > max2 -> binL (goL1 minV1 min1 l1 min2 l2) (NonEmpty max1 maxV1 (goR2 max1 r1 max2 r2))
| max1 < max2 -> binL (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> binL (goL1 minV1 min1 l1 min2 l2) (goRFused max1 r1 r2)
Just !maxV1' -> binL (goL1 minV1 min1 l1 min2 l2) (NonEmpty max1 maxV1' (goRFusedKeep max1 r1 r2))
GT -> binL (goL1 minV1 min1 l1 min2 n2) (NonEmpty max1 maxV1 r1)
goL2 !_ Tip !_ !_ = Tip
goL2 min1 n1 min2 Tip = deleteL min2 (xor min1 min2) n1
goL2 _ n1@(Bin max1 _ _ _) min2 (Bin _ _ _ _) | min2 > max1 = n1
goL2 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT -> goL2 min1 n1 min2 l2
EQ | max1 > max2 -> Bin max1 maxV1 (goL2 min1 l1 min2 l2) (goR2 max1 r1 max2 r2)
| max1 < max2 -> case goR1 maxV1 max1 r1 max2 r2 of
Empty -> goL2 min1 l1 min2 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (goL2 min1 l1 min2 l2) r'
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> case goRFused max1 r1 r2 of
Empty -> goL2 min1 l1 min2 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (goL2 min1 l1 min2 l2) r'
Just !maxV1' -> Bin max1 maxV1' (goL2 min1 l1 min2 l2) (goRFusedKeep max1 r1 r2)
GT | xor min1 min2 < xor min2 max1 -> Bin max1 maxV1 (goL2 min1 l1 min2 n2) r1 -- min2 is arbitrary here - we just need something from tree 2
| max1 > max2 -> Bin max1 maxV1 l1 (goR2 max1 r1 max2 (Bin min2 dummyV l2 r2))
| max1 < max2 -> case goR1 maxV1 max1 r1 max2 (Bin min2 dummyV l2 r2) of
Empty -> l1
NonEmpty max' maxV' r' -> Bin max' maxV' l1 r'
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> case goRFused max1 r1 (Bin min2 dummyV l2 r2) of
Empty -> l1
NonEmpty max' maxV' r' -> Bin max' maxV' l1 r'
Just !maxV1' -> Bin max1 maxV1' l1 (goRFusedKeep max1 r1 (Bin min2 dummyV l2 r2))
goLFused min = loop
where
loop Tip !_ = Empty
loop (Bin max1 maxV1 l1 r1) Tip = case deleteMinL max1 maxV1 l1 r1 of
DR min' minV' n' -> NonEmpty min' minV' n'
loop n1@(Bin max1 maxV1 l1 r1) n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min max1) (xor min max2) of
LT -> loop n1 l2
EQ | max1 > max2 -> binL (loop l1 l2) (NonEmpty max1 maxV1 (goR2 max1 r1 max2 r2))
| max1 < max2 -> binL (loop l1 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> binL (loop l1 l2) (goRFused max1 r1 r2) -- we choose max1 arbitrarily, as max1 == max2
Just !maxV1' -> binL (loop l1 l2) (NonEmpty max1 maxV1' (goRFusedKeep max1 r1 r2))
GT -> binL (loop l1 n2) (NonEmpty max1 maxV1 r1)
goLFusedKeep min = loop
where
loop n1 Tip = n1
loop Tip !_ = Tip
loop n1@(Bin max1 maxV1 l1 r1) n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min max1) (xor min max2) of
LT -> loop n1 l2
EQ | max1 > max2 -> Bin max1 maxV1 (loop l1 l2) (goR2 max1 r1 max2 r2)
| max1 < max2 -> case goR1 maxV1 max1 r1 max2 r2 of
Empty -> loop l1 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (loop l1 l2) r'
| otherwise -> case combine max1 maxV1 maxV2 of
Nothing -> case goRFused max1 r1 r2 of -- we choose max1 arbitrarily, as max1 == max2
Empty -> loop l1 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (loop l1 l2) r'
Just !maxV1' -> Bin max1 maxV1' (loop l1 l2) (goRFusedKeep max1 r1 r2)
GT -> Bin max1 maxV1 (loop l1 n2) r1
goR1 maxV1 max1 Tip max2 n2 = goLookupR max1 maxV1 (xor max1 max2) n2
goR1 maxV1 max1 n1 _ Tip = NonEmpty max1 maxV1 n1
goR1 maxV1 max1 n1@(Bin _ _ _ _) _ (Bin min2 _ _ _) | min2 > max1 = NonEmpty max1 maxV1 n1
goR1 maxV1 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max1 > xor max1 max2 -> goR1 maxV1 max1 n1 max2 r2 -- max1 is arbitrary here - we just need something from tree 1
| min1 < min2 -> l2rMap $ NonEmpty min1 minV1 (goL2 min1 (Bin max1 maxV1 l1 r1) min2 l2)
| min1 > min2 -> l2rMap $ goL1 minV1 min1 (Bin max1 maxV1 l1 r1) min2 l2
| otherwise -> case combine min1 minV1 minV2 of
Nothing -> l2rMap $ goLFused min1 (Bin max1 maxV1 l1 r1) l2
Just !minV1' -> l2rMap $ NonEmpty min1 minV1' (goLFusedKeep min1 (Bin max1 maxV1 l1 r1) l2)
EQ | min1 < min2 -> binR (NonEmpty min1 minV1 (goL2 min1 l1 min2 l2)) (goR1 maxV1 max1 r1 max2 r2)
| min1 > min2 -> binR (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case combine min1 minV1 minV2 of
Nothing -> binR (goLFused min1 l1 l2) (goR1 maxV1 max1 r1 max2 r2)
Just !minV1' -> binR (NonEmpty min1 minV1' (goLFusedKeep min1 l1 l2)) (goR1 maxV1 max1 r1 max2 r2)
GT -> binR (NonEmpty min1 minV1 l1) (goR1 maxV1 max1 r1 max2 n2)
goR2 !_ Tip !_ !_ = Tip
goR2 max1 n1 max2 Tip = deleteR max2 (xor max1 max2) n1
goR2 _ n1@(Bin min1 _ _ _) max2 (Bin _ _ _ _) | min1 > max2 = n1
goR2 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT -> goR2 max1 n1 max2 r2
EQ | min1 < min2 -> Bin min1 minV1 (goL2 min1 l1 min2 l2) (goR2 max1 r1 max2 r2)
| min1 > min2 -> case goL1 minV1 min1 l1 min2 l2 of
Empty -> goR2 max1 r1 max2 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (goR2 max1 r1 max2 r2)
| otherwise -> case combine min1 minV1 minV2 of
Nothing -> case goLFused min1 l1 l2 of
Empty -> goR2 max1 r1 max2 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (goR2 max1 r1 max2 r2)
Just !minV1' -> Bin min1 minV1' (goLFusedKeep min1 l1 l2) (goR2 max1 r1 max2 r2)
GT | xor min1 max2 > xor max2 max1 -> Bin min1 minV1 l1 (goR2 max1 r1 max2 n2) -- max2 is arbitrary here - we just need something from tree 2
| min1 < min2 -> Bin min1 minV1 (goL2 min1 l1 min2 (Bin max2 dummyV l2 r2)) r1
| min1 > min2 -> case goL1 minV1 min1 l1 min2 (Bin max2 dummyV l2 r2) of
Empty -> r1
NonEmpty min' minV' l' -> Bin min' minV' l' r1
| otherwise -> case combine min1 minV1 minV2 of
Nothing -> case goLFused min1 l1 (Bin max2 dummyV l2 r2) of
Empty -> r1
NonEmpty min' minV' l' -> Bin min' minV' l' r1
Just !minV1' -> Bin min1 minV1' (goLFusedKeep min1 l1 (Bin max2 dummyV l2 r2)) r1
goRFused max = loop
where
loop Tip !_ = Empty
loop (Bin min1 minV1 l1 r1) Tip = case deleteMaxR min1 minV1 l1 r1 of
DR max' maxV' n' -> NonEmpty max' maxV' n'
loop n1@(Bin min1 minV1 l1 r1) n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max) (xor min2 max) of
LT -> loop n1 r2
EQ | min1 < min2 -> binR (NonEmpty min1 minV1 (goL2 min1 l1 min2 l2)) (loop r1 r2)
| min1 > min2 -> binR (goL1 minV1 min1 l1 min2 l2) (loop r1 r2)
| otherwise -> case combine min1 minV1 minV2 of
Nothing -> binR (goLFused min1 l1 l2) (loop r1 r2) -- we choose min1 arbitrarily, as min1 == min2
Just !minV1' -> binR (NonEmpty min1 minV1' (goLFusedKeep min1 l1 l2)) (loop r1 r2)
GT -> binR (NonEmpty min1 minV1 l1) (loop r1 n2)
goRFusedKeep max = loop
where
loop n1 Tip = n1
loop Tip !_ = Tip
loop n1@(Bin min1 minV1 l1 r1) n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max) (xor min2 max) of
LT -> loop n1 r2
EQ | min1 < min2 -> Bin min1 minV1 (goL2 min1 l1 min2 l2) (loop r1 r2)
| min1 > min2 -> case goL1 minV1 min1 l1 min2 l2 of
Empty -> loop r1 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (loop r1 r2)
| otherwise -> case combine min1 minV1 minV2 of -- we choose min1 arbitrarily, as min1 == min2
Nothing -> case goLFused min1 l1 l2 of
Empty -> loop r1 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (loop r1 r2)
Just !minV1' -> Bin min1 minV1' (goLFusedKeep min1 l1 l2) (loop r1 r2)
GT -> Bin min1 minV1 l1 (loop r1 n2)
goLookupL k v !_ Tip = NonEmpty k v Tip
goLookupL k v !xorCache (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then goLookupL k v xorCache l
else goLookupR k v xorCacheMax r
| k > max = NonEmpty k v Tip
| otherwise = case combine k v maxV of
Nothing -> Empty
Just !v' -> NonEmpty k v' Tip
where xorCacheMax = xor k max
goLookupR k v !_ Tip = NonEmpty k v Tip
goLookupR k v !xorCache (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then goLookupR k v xorCache r
else goLookupL k v xorCacheMin l
| k < min = NonEmpty k v Tip
| otherwise = case combine k v minV of
Nothing -> Empty
Just !v' -> NonEmpty k v' Tip
where xorCacheMin = xor min k
dummyV = error "impossible"
-- | /O(n+m)/. The intersection with a combining function.
--
-- > intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "aA"
intersectionWith :: (a -> b -> c) -> WordMap a -> WordMap b -> WordMap c
intersectionWith f = intersectionWithKey (const f)
-- | /O(n+m)/. The intersection with a combining function.
--
-- > let f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
-- > intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "5:a|A"
intersectionWithKey :: (Key -> a -> b -> c) -> WordMap a -> WordMap b -> WordMap c
intersectionWithKey combine = start
where
start (WordMap Empty) !_ = WordMap Empty
start !_ (WordMap Empty) = WordMap Empty
start (WordMap (NonEmpty min1 minV1 root1)) (WordMap (NonEmpty min2 minV2 root2))
| min1 < min2 = WordMap (goL2 minV2 min1 root1 min2 root2)
| min1 > min2 = WordMap (goL1 minV1 min1 root1 min2 root2)
| otherwise = WordMap (NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 root1 root2) -- we choose min1 arbitrarily, as min1 == min2
-- TODO: This scheme might produce lots of unnecessary l2r and r2l calls. This should be rectified.
goL1 _ !_ !_ !_ Tip = Empty
goL1 minV1 min1 Tip min2 n2 = goLookupL1 min1 minV1 (xor min1 min2) n2
goL1 _ min1 (Bin _ _ _ _) _ (Bin max2 _ _ _) | min1 > max2 = Empty
goL1 minV1 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 min1 < xor min1 max2 -> goL1 minV1 min1 n1 min2 l2 -- min1 is arbitrary here - we just need something from tree 1
| max1 > max2 -> r2lMap $ goR2 maxV2 max1 (Bin min1 minV1 l1 r1) max2 r2
| max1 < max2 -> r2lMap $ goR1 maxV1 max1 (Bin min1 minV1 l1 r1) max2 r2
| otherwise -> r2lMap $ NonEmpty max1 #! combine max1 maxV1 maxV2 # goRFused max1 (Bin min1 minV1 l1 r1) r2
EQ | max1 > max2 -> binL (goL1 minV1 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| max1 < max2 -> binL (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case goL1 minV1 min1 l1 min2 l2 of
Empty -> r2lMap (NonEmpty max1 #! combine max1 maxV1 maxV2 # goRFused max1 r1 r2)
NonEmpty min' minV' l' -> NonEmpty min' minV' (Bin max1 #! combine max1 maxV1 maxV2 # l' # goRFused max1 r1 r2)
GT -> goL1 minV1 min1 l1 min2 n2
goL2 _ !_ Tip !_ !_ = Empty
goL2 minV2 min1 n1 min2 Tip = goLookupL2 min2 minV2 (xor min1 min2) n1
goL2 _ _ (Bin max1 _ _ _) min2 (Bin _ _ _ _) | min2 > max1 = Empty
goL2 minV2 min1 n1@(Bin max1 maxV1 l1 r1) min2 n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT -> goL2 minV2 min1 n1 min2 l2
EQ | max1 > max2 -> binL (goL2 minV2 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| max1 < max2 -> binL (goL2 minV2 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case goL2 minV2 min1 l1 min2 l2 of
Empty -> r2lMap (NonEmpty max1 #! combine max1 maxV1 maxV2 # goRFused max1 r1 r2)
NonEmpty min' minV' l' -> NonEmpty min' minV' (Bin max1 #! combine max1 maxV1 maxV2 # l' # goRFused max1 r1 r2)
GT | xor min1 min2 < xor min2 max1 -> goL2 minV2 min1 l1 min2 n2 -- min2 is arbitrary here - we just need something from tree 2
| max1 > max2 -> r2lMap $ goR2 maxV2 max1 r1 max2 (Bin min2 minV2 l2 r2)
| max1 < max2 -> r2lMap $ goR1 maxV1 max1 r1 max2 (Bin min2 minV2 l2 r2)
| otherwise -> r2lMap $ NonEmpty max1 #! combine max1 maxV1 maxV2 # goRFused max1 r1 (Bin min2 minV2 l2 r2)
goLFused min = loop
where
loop Tip !_ = Tip
loop !_ Tip = Tip
loop n1@(Bin max1 maxV1 l1 r1) n2@(Bin max2 maxV2 l2 r2) = case compareMSB (xor min max1) (xor min max2) of
LT -> loop n1 l2
EQ | max1 > max2 -> case goR2 maxV2 max1 r1 max2 r2 of
Empty -> loop l1 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (loop l1 l2) r'
| max1 < max2 -> case goR1 maxV1 max1 r1 max2 r2 of
Empty -> loop l1 l2
NonEmpty max' maxV' r' -> Bin max' maxV' (loop l1 l2) r'
| otherwise -> Bin max1 #! combine max1 maxV1 maxV2 # loop l1 l2 # goRFused max1 r1 r2 -- we choose max1 arbitrarily, as max1 == max2
GT -> loop l1 n2
goR1 _ !_ !_ !_ Tip = Empty
goR1 maxV1 max1 Tip max2 n2 = goLookupR1 max1 maxV1 (xor max1 max2) n2
goR1 _ max1 (Bin _ _ _ _) _ (Bin min2 _ _ _) | min2 > max1 = Empty
goR1 maxV1 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT | xor min2 max1 > xor max1 max2 -> goR1 maxV1 max1 n1 max2 r2 -- max1 is arbitrary here - we just need something from tree 1
| min1 < min2 -> l2rMap $ goL2 minV2 min1 (Bin max1 maxV1 l1 r1) min2 l2
| min1 > min2 -> l2rMap $ goL1 minV1 min1 (Bin max1 maxV1 l1 r1) min2 l2
| otherwise -> l2rMap $ NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 (Bin max1 maxV1 l1 r1) l2
EQ | min1 < min2 -> binR (goL2 minV2 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| min1 > min2 -> binR (goL1 minV1 min1 l1 min2 l2) (goR1 maxV1 max1 r1 max2 r2)
| otherwise -> case goR1 maxV1 max1 r1 max2 r2 of
Empty -> l2rMap (NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2)
NonEmpty max' maxV' r' -> NonEmpty max' maxV' (Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # r')
GT -> goR1 maxV1 max1 r1 max2 n2
goR2 _ !_ Tip !_ !_ = Empty
goR2 maxV2 max1 n1 max2 Tip = goLookupR2 max2 maxV2 (xor max1 max2) n1
goR2 _ _ (Bin min1 _ _ _) max2 (Bin _ _ _ _) | min1 > max2 = Empty
goR2 maxV2 max1 n1@(Bin min1 minV1 l1 r1) max2 n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max1) (xor min2 max2) of
LT -> goR2 maxV2 max1 n1 max2 r2
EQ | min1 < min2 -> binR (goL2 minV2 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| min1 > min2 -> binR (goL1 minV1 min1 l1 min2 l2) (goR2 maxV2 max1 r1 max2 r2)
| otherwise -> case goR2 maxV2 max1 r1 max2 r2 of
Empty -> l2rMap (NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2)
NonEmpty max' maxV' r' -> NonEmpty max' maxV' (Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # r')
GT | xor min1 max2 > xor max2 max1 -> goR2 maxV2 max1 r1 max2 n2 -- max2 is arbitrary here - we just need something from tree 2
| min1 < min2 -> l2rMap $ goL2 minV2 min1 l1 min2 (Bin max2 maxV2 l2 r2)
| min1 > min2 -> l2rMap $ goL1 minV1 min1 l1 min2 (Bin max2 maxV2 l2 r2)
| otherwise -> l2rMap $ NonEmpty min1 #! combine min1 minV1 minV2 # goLFused min1 l1 (Bin max2 maxV2 l2 r2)
goRFused max = loop
where
loop Tip !_ = Tip
loop !_ Tip = Tip
loop n1@(Bin min1 minV1 l1 r1) n2@(Bin min2 minV2 l2 r2) = case compareMSB (xor min1 max) (xor min2 max) of
LT -> loop n1 r2
EQ | min1 < min2 -> case goL2 minV2 min1 l1 min2 l2 of
Empty -> loop r1 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (loop r1 r2)
| min1 > min2 -> case goL1 minV1 min1 l1 min2 l2 of
Empty -> loop r1 r2
NonEmpty min' minV' l' -> Bin min' minV' l' (loop r1 r2)
| otherwise -> Bin min1 #! combine min1 minV1 minV2 # goLFused min1 l1 l2 # loop r1 r2 -- we choose max1 arbitrarily, as max1 == max2
GT -> loop r1 n2
goLookupL1 !_ _ !_ Tip = Empty
goLookupL1 k v !xorCache (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then goLookupL1 k v xorCache l
else goLookupR1 k v xorCacheMax r
| k > max = Empty
| otherwise = NonEmpty k #! combine k v maxV # Tip
where xorCacheMax = xor k max
goLookupR1 !_ _ !_ Tip = Empty
goLookupR1 k v !xorCache (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then goLookupR1 k v xorCache r
else goLookupL1 k v xorCacheMin l
| k < min = Empty
| otherwise = NonEmpty k #! combine k v minV # Tip
where xorCacheMin = xor min k
goLookupL2 !_ _ !_ Tip = Empty
goLookupL2 k v !xorCache (Bin max maxV l r)
| k < max = if xorCache < xorCacheMax
then goLookupL2 k v xorCache l
else goLookupR2 k v xorCacheMax r
| k > max = Empty
| otherwise = NonEmpty k #! combine k maxV v # Tip
where xorCacheMax = xor k max
goLookupR2 !_ _ !_ Tip = Empty
goLookupR2 k v !xorCache (Bin min minV l r)
| k > min = if xorCache < xorCacheMin
then goLookupR2 k v xorCache r
else goLookupL2 k v xorCacheMin l
| k < min = Empty
| otherwise = NonEmpty k #! combine k minV v # Tip
where xorCacheMin = xor min k
-- | /O(n)/. Map a function over all values in the map.
--
-- > map (++ "x") (fromList [(5,"a"), (3,"b")]) == fromList [(3, "bx"), (5, "ax")]
map :: forall a b. (a -> b) -> WordMap a -> WordMap b
map f = start
where
start (WordMap Empty) = WordMap Empty
start (WordMap (NonEmpty min minV root)) = WordMap (NonEmpty min #! f minV # go root)
go :: Node t a -> Node t b
go Tip = Tip
go (Bin k v l r) = Bin k #! f v # go l # go r
-- | /O(n)/. Map a function over all values in the map.
--
-- > let f key x = (show key) ++ ":" ++ x
-- > mapWithKey f (fromList [(5,"a"), (3,"b")]) == fromList [(3, "3:b"), (5, "5:a")]
mapWithKey :: forall a b. (Key -> a -> b) -> WordMap a -> WordMap b
mapWithKey f = start
where
start (WordMap Empty) = WordMap Empty
start (WordMap (NonEmpty min minV root)) = WordMap (NonEmpty min #! f min minV # go root)
go :: Node t a -> Node t b
go Tip = Tip
go (Bin k v l r) = Bin k #! f k v # go l # go r
-- | /O(n)/.
-- @'traverseWithKey' f s == 'fromList' <$> 'traverse' (\(k, v) -> (,) k <$> f k v) ('toList' m)@
-- That is, behaves exactly like a regular 'traverse' except that the traversing
-- function also has access to the key associated with a value.
--
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(1, 'a'), (5, 'e')]) == Just (fromList [(1, 'b'), (5, 'f')])
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(2, 'c')]) == Nothing
traverseWithKey :: Applicative f => (Key -> a -> f b) -> WordMap a -> f (WordMap b)
traverseWithKey f = start
where
start (WordMap Empty) = pure (WordMap Empty)
start (WordMap (NonEmpty min minV root)) = (\minV' root' -> WordMap (NonEmpty min minV' root')) <$> f min minV <*> goL root
goL Tip = pure Tip
goL (Bin max maxV l r) = (\l' r' maxV' -> Bin max #! maxV' # l' # r') <$> goL l <*> goR r <*> f max maxV
goR Tip = pure Tip
goR (Bin min minV l r) = (\minV' l' r' -> Bin min #! minV' # l' # r') <$> f min minV <*> goL l <*> goR r
-- | /O(n)/. The function @'mapAccum'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a b = (a ++ b, b ++ "X")
-- > mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) == ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
mapAccum :: (a -> b -> (a, c)) -> a -> WordMap b -> (a, WordMap c)
mapAccum f = mapAccumWithKey (\a _ x -> f a x)
-- | /O(n)/. The function @'mapAccumWithKey'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
-- > mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) == ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
mapAccumWithKey :: (a -> Key -> b -> (a, c)) -> a -> WordMap b -> (a, WordMap c)
mapAccumWithKey f = start
where
start a (WordMap Empty) = (a, WordMap Empty)
start a (WordMap (NonEmpty min minV root)) =
let (a', !minV') = f a min minV
(a'', root') = goL root a'
in (a'', WordMap (NonEmpty min minV' root'))
goL Tip a = (a, Tip)
goL (Bin max maxV l r) a =
let (a', l') = goL l a
(a'', r') = goR r a'
(a''', !maxV') = f a'' max maxV
in (a''', Bin max maxV' l' r')
goR Tip a = (a, Tip)
goR (Bin min minV l r) a =
let (a', !minV') = f a min minV
(a'', l') = goL l a'
(a''', r') = goR r a''
in (a''', Bin min minV' l' r')
-- | /O(n)/. The function @'mapAccumRWithKey'@ threads an accumulating
-- argument through the map in descending order of keys.
mapAccumRWithKey :: (a -> Key -> b -> (a, c)) -> a -> WordMap b -> (a, WordMap c)
mapAccumRWithKey f = start
where
start a (WordMap Empty) = (a, WordMap Empty)
start a (WordMap (NonEmpty min minV root)) =
let (a', root') = goL root a
(a'', !minV') = f a' min minV
in (a'', WordMap (NonEmpty min minV' root'))
goL Tip a = (a, Tip)
goL (Bin max maxV l r) a =
let (a', !maxV') = f a max maxV
(a'', r') = goR r a'
(a''', l') = goL l a''
in (a''', Bin max maxV' l' r')
goR Tip a = (a, Tip)
goR (Bin min minV l r) a =
let (a', r') = goR r a
(a'', l') = goL l a'
(a''', !minV') = f a'' min minV
in (a''', Bin min minV' l' r')
-- | /O(n*min(n,W))/.
-- @'mapKeys' f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the value at the greatest of the
-- original keys is retained.
--
-- > mapKeys (+ 1) (fromList [(5,"a"), (3,"b")]) == fromList [(4, "b"), (6, "a")]
-- > mapKeys (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "c"
-- > mapKeys (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "c"
mapKeys :: (Key -> Key) -> WordMap a -> WordMap a
mapKeys f = foldlWithKey' (\m k a -> insert (f k) a m) empty
-- | /O(n*min(n,W))/.
-- @'mapKeysWith' c f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the associated values will be
-- combined using @c@.
--
-- > mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "cdab"
-- > mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "cdab"
mapKeysWith :: (a -> a -> a) -> (Key -> Key) -> WordMap a -> WordMap a
mapKeysWith combine f = foldlWithKey' (\m k a -> insertWith combine (f k) a m) empty
-- | /O(n*min(n,W))/.
-- @'mapKeysMonotonic' f s == 'mapKeys' f s@, but works only when @f@
-- is strictly monotonic.
-- That is, for any values @x@ and @y@, if @x@ < @y@ then @f x@ < @f y@.
-- /The precondition is not checked./
-- Semi-formally, we have:
--
-- > and [x < y ==> f x < f y | x <- ls, y <- ls]
-- > ==> mapKeysMonotonic f s == mapKeys f s
-- > where ls = keys s
--
-- This means that @f@ maps distinct original keys to distinct resulting keys.
-- This function has slightly better performance than 'mapKeys'.
--
-- > mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")]) == fromList [(6, "b"), (10, "a")]
mapKeysMonotonic :: (Key -> Key) -> WordMap a -> WordMap a
mapKeysMonotonic = mapKeys
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs.
fromList :: [(Key, a)] -> WordMap a
fromList = Data.List.foldl' (\t (k, a) -> insert k a t) empty
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs with a combining function. See also 'fromAscListWith'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"c")] == fromList [(3, "ab"), (5, "cba")]
-- > fromListWith (++) [] == empty
fromListWith :: (a -> a -> a) -> [(Key, a)] -> WordMap a
fromListWith f = Data.List.foldl' (\t (k, a) -> insertWith f k a t) empty
-- | /O(n*min(n,W))/. Build a map from a list of key\/value pairs with a combining function. See also fromAscListWithKey'.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > fromListWithKey f [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"c")] == fromList [(3, "3:a|b"), (5, "5:c|5:b|a")]
-- > fromListWithKey f [] == empty
fromListWithKey :: (Key -> a -> a -> a) -> [(Key, a)] -> WordMap a
fromListWithKey f = Data.List.foldl' (\t (k, a) -> insertWithKey f k a t) empty
-- TODO: Use the ordering
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order.
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
fromAscList :: [(Key, a)] -> WordMap a
fromAscList = fromList
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order.
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
fromAscListWith :: (a -> a -> a) -> [(Key, a)] -> WordMap a
fromAscListWith = fromListWith
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order, with a combining function on equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "5:b|a")]
fromAscListWithKey :: (Key -> a -> a -> a) -> [(Key, a)] -> WordMap a
fromAscListWithKey = fromListWithKey
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order and all distinct.
-- /The precondition (input list is strictly ascending) is not checked./
--
-- > fromDistinctAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
fromDistinctAscList :: [(Key, a)] -> WordMap a
fromDistinctAscList = fromList
-- | /O(n)/. Map values and collect the 'Just' results.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > mapMaybe f (fromList [(5,"a"), (3,"b")]) == singleton 5 "new a"
mapMaybe :: (a -> Maybe b) -> WordMap a -> WordMap b
mapMaybe f = mapMaybeWithKey (const f)
-- | /O(n)/. Map keys\/values and collect the 'Just' results.
--
-- > let f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
-- > mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) == singleton 3 "key : 3"
mapMaybeWithKey :: (Key -> a -> Maybe b) -> WordMap a -> WordMap b
mapMaybeWithKey f = start
where
start (WordMap Empty) = WordMap Empty
start (WordMap (NonEmpty min minV root)) = case f min minV of
Just !minV' -> WordMap (NonEmpty min minV' (goL root))
Nothing -> WordMap (goDeleteL root)
goL Tip = Tip
goL (Bin max maxV l r) = case f max maxV of
Just !maxV' -> Bin max maxV' (goL l) (goR r)
Nothing -> case goDeleteR r of
Empty -> goL l
NonEmpty max' maxV' r' -> Bin max' maxV' (goL l) r'
goR Tip = Tip
goR (Bin min minV l r) = case f min minV of
Just !minV' -> Bin min minV' (goL l) (goR r)
Nothing -> case goDeleteL l of
Empty -> goR r
NonEmpty min' minV' l' -> Bin min' minV' l' (goR r)
goDeleteL Tip = Empty
goDeleteL (Bin max maxV l r) = case f max maxV of
Just !maxV' -> case goDeleteL l of
Empty -> case goR r of
Tip -> NonEmpty max maxV' Tip
Bin minI minVI lI rI -> NonEmpty minI minVI (Bin max maxV' lI rI)
NonEmpty min minV l' -> NonEmpty min minV (Bin max maxV' l' (goR r))
Nothing -> binL (goDeleteL l) (goDeleteR r)
goDeleteR Tip = Empty
goDeleteR (Bin min minV l r) = case f min minV of
Just !minV' -> case goDeleteR r of
Empty -> case goL l of
Tip -> NonEmpty min minV' Tip
Bin maxI maxVI lI rI -> NonEmpty maxI maxVI (Bin min minV' lI rI)
NonEmpty max maxV r' -> NonEmpty max maxV (Bin min minV' (goL l) r')
Nothing -> binR (goDeleteL l) (goDeleteR r)
-- | /O(n)/. Map values and separate the 'Left' and 'Right' results.
--
-- > let f a = if a < "c" then Left a else Right a
-- > mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
-- >
-- > mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
mapEither :: (a -> Either b c) -> WordMap a -> (WordMap b, WordMap c)
mapEither f = mapEitherWithKey (const f)
-- | /O(n)/. Map keys\/values and separate the 'Left' and 'Right' results.
--
-- > let f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
-- > mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
-- >
-- > mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
mapEitherWithKey :: (Key -> a -> Either b c) -> WordMap a -> (WordMap b, WordMap c)
mapEitherWithKey func = start
where
start (WordMap Empty) = (WordMap Empty, WordMap Empty)
start (WordMap (NonEmpty min minV root)) = case func min minV of
Left !v -> let SP t f = goTrueL root
in (WordMap (NonEmpty min v t), WordMap f)
Right !v -> let SP t f = goFalseL root
in (WordMap t, WordMap (NonEmpty min v f))
goTrueL Tip = SP Tip Empty
goTrueL (Bin max maxV l r) = case func max maxV of
Left !v -> let SP tl fl = goTrueL l
SP tr fr = goTrueR r
in SP (Bin max v tl tr) (binL fl fr)
Right !v -> let SP tl fl = goTrueL l
SP tr fr = goFalseR r
t = case tr of
Empty -> tl
NonEmpty max' maxV' r' -> Bin max' maxV' tl r'
f = case fl of
Empty -> r2lMap $ NonEmpty max v fr
NonEmpty min' minV' l' -> NonEmpty min' minV' (Bin max v l' fr)
in SP t f
goTrueR Tip = SP Tip Empty
goTrueR (Bin min minV l r) = case func min minV of
Left !v -> let SP tl fl = goTrueL l
SP tr fr = goTrueR r
in SP (Bin min v tl tr) (binR fl fr)
Right !v -> let SP tl fl = goFalseL l
SP tr fr = goTrueR r
t = case tl of
Empty -> tr
NonEmpty min' minV' l' -> Bin min' minV' l' tr
f = case fr of
Empty -> l2rMap $ NonEmpty min v fl
NonEmpty max' maxV' r' -> NonEmpty max' maxV' (Bin min v fl r')
in SP t f
goFalseL Tip = SP Empty Tip
goFalseL (Bin max maxV l r) = case func max maxV of
Left !v -> let SP tl fl = goFalseL l
SP tr fr = goTrueR r
t = case tl of
Empty -> r2lMap $ NonEmpty max v tr
NonEmpty min' minV' l' -> NonEmpty min' minV' (Bin max v l' tr)
f = case fr of
Empty -> fl
NonEmpty max' maxV' r' -> Bin max' maxV' fl r'
in SP t f
Right !v -> let SP tl fl = goFalseL l
SP tr fr = goFalseR r
in SP (binL tl tr) (Bin max v fl fr)
goFalseR Tip = SP Empty Tip
goFalseR (Bin min minV l r) = case func min minV of
Left !v -> let SP tl fl = goTrueL l
SP tr fr = goFalseR r
t = case tr of
Empty -> l2rMap $ NonEmpty min v tl
NonEmpty max' maxV' r' -> NonEmpty max' maxV' (Bin min v tl r')
f = case fl of
Empty -> fr
NonEmpty min' minV' l' -> Bin min' minV' l' fr
in SP t f
Right !v -> let SP tl fl = goFalseL l
SP tr fr = goFalseR r
in SP (binR tl tr) (Bin min v fl fr)
-- | /O(min(n,W))/. Update the value at the minimal key.
--
-- > updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "Xb"), (5, "a")]
-- > updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMin :: (a -> Maybe a) -> WordMap a -> WordMap a
updateMin _ (WordMap Empty) = WordMap Empty
updateMin f m = update f (fst (findMin m)) m
-- | /O(min(n,W))/. Update the value at the maximal key.
--
-- > updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "Xa")]
-- > updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMax :: (a -> Maybe a) -> WordMap a -> WordMap a
updateMax _ (WordMap Empty) = WordMap Empty
updateMax f m = update f (fst (findMax m)) m
-- | /O(min(n,W))/. Update the value at the minimal key.
--
-- > updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"3:b"), (5,"a")]
-- > updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMinWithKey :: (Key -> a -> Maybe a) -> WordMap a -> WordMap a
updateMinWithKey _ (WordMap Empty) = WordMap Empty
updateMinWithKey f m = updateWithKey f (fst (findMin m)) m
-- | /O(min(n,W))/. Update the value at the maximal key.
--
-- > updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"b"), (5,"5:a")]
-- > updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMaxWithKey :: (Key -> a -> Maybe a) -> WordMap a -> WordMap a
updateMaxWithKey _ (WordMap Empty) = WordMap Empty
updateMaxWithKey f m = updateWithKey f (fst (findMax m)) m
| gereeter/bounded-intmap | src/Data/WordMap/Strict.hs | mit | 72,074 | 0 | 21 | 22,618 | 23,120 | 11,305 | 11,815 | 939 | 70 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Scrabble.BagTests where
import Scrabble
import Scrabble.ScrabbleArbitrary()
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.TH
import Test.HUnit
import TestHelpers
bag = orderedBag
case_sanity_check = bag @?= orderedBag
case_es_in_bag = countLettersInBag E bag @?= 12
case_blanks_in_bag = countLettersInBag Blank bag @?= 2
case_letters_in_bag = bagSize bag @?= 100
case_count_shuffled_bag = do { b <- newShuffledBag; bagSize b @?= 100 }
case_total_points = do { b <- newShuffledBag; pointsInBag b @?= 187 }
prop_rack_round_trip_json :: Rack -> Bool
prop_rack_round_trip_json = roundTripJSON
prop_bag_round_trip_json :: Bag -> Bool
prop_bag_round_trip_json = roundTripJSON
tests = $testGroupGenerator
| joshcough/Scrabble | test/Scrabble/BagTests.hs | mit | 884 | 0 | 8 | 133 | 190 | 108 | 82 | 22 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.WebGPURenderPipelineState
(setLabel, getLabel, WebGPURenderPipelineState(..),
gTypeWebGPURenderPipelineState)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPURenderPipelineState.label Mozilla WebGPURenderPipelineState.label documentation>
setLabel ::
(MonadDOM m, ToJSString val) =>
WebGPURenderPipelineState -> val -> m ()
setLabel self val = liftDOM (self ^. jss "label" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPURenderPipelineState.label Mozilla WebGPURenderPipelineState.label documentation>
getLabel ::
(MonadDOM m, FromJSString result) =>
WebGPURenderPipelineState -> m result
getLabel self
= liftDOM ((self ^. js "label") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/WebGPURenderPipelineState.hs | mit | 1,711 | 0 | 10 | 228 | 419 | 262 | 157 | 28 | 1 |
-- Cummulative product. Primitive recursion with "foldr".
module Product where
import Prelude hiding (product)
import Data.List (foldr)
product :: [Integer] -> Integer
product = foldr (*) 1
{- GHCi>
product []
product [1]
product [1 , 2]
-}
-- 1
-- 1
-- 2
| pascal-knodel/haskell-craft | Examples/· Folds/product/foldr/Product.hs | mit | 273 | 0 | 6 | 61 | 52 | 34 | 18 | 5 | 1 |
{- Quaternion.hs; Mun Hon Cheong (mhch295@cse.unsw.edu.au) 2005
This module just performs some basic
converions between quaternions and matrices
-}
module Quaternion where
import Graphics.UI.GLUT -- (GLmatrix, GLfloat, newMatrix, ColumnMajor)
type Quaternion = (Float,Float,Float,Float)
type Matrix3x3 = ((Float,Float,Float),
(Float,Float,Float),
(Float,Float,Float))
-- converts from quaternion to matrix
quat2Mat :: Quaternion -> (Float,Float,Float) -> IO (GLmatrix GLfloat)
quat2Mat (x,y,z,w) (t1,t2,t3)=
newMatrix ColumnMajor [r00 :: GLfloat,r01,r02,r03,
r10,r11,r12,r13,
r20,r21,r22,r23,
r30,r31,r32,r33]
where r00 = 1 - (2*((y*y)+(z*z)))
r01 = 2 * ((x*y)-(w*z))
r02 = 2 * ((x*z)+(w*y))
r03 = 0
r10 = 2 * ((x*y)+(w*z))
r11 = 1 - (2*((x*x)+(z*z)))
r12 = 2 * ((y*z)-(w*x))
r13 = 0
r20 = 2 * ((x*z)-(w*y))
r21 = 2 * ((y*z)+(w*x))
r22 = 1 - (2*((x*x)+(y*y)))
r23 = 0
r30 = t1
r31 = t2
r32 = t3
r33 = 1
-- converts from matrix to quaternion
mat2Quat :: Matrix3x3 -> Quaternion
mat2Quat ((r00,r01,r02),
(r10,r11,r12),
(r20,r21,r22))
| diag > 0.00000001 = ((r21-r12)/scale0,
(r02-r20)/scale0,
(r10-r01)/scale0,
0.25*scale0)
| r00 > r11 && r00 > r22 = (0.25*scale1,
(r10+r01)/scale1,
(r02+r20)/scale1,
(r21-r12)/scale1)
| r11 > r22 = ((r10+r01)/scale2,
0.25*scale2,
(r21+r12)/scale2,
(r02-r20)/scale2)
| otherwise = ((r02+r20)/scale3,
(r21+r12)/scale3,
0.25*scale3,
(r10-r01)/scale3)
where
diag = r00+r11+r22+1
scale0 = 2*sqrt diag
scale1 = 2*sqrt (r00-r11-r22+1)
scale2 = 2*sqrt (r11-r00-r22+1)
scale3 = 2*sqrt (r22-r00-r11+1)
-- does not really perform spherical linaer interpolation
-- but the difference isn't really noticeable
slerp :: Quaternion -> Quaternion -> Float -> Quaternion
slerp q1@(x0,y0,z0,w0) q2@(x1,y1,z1,w1) t
| q1 == q2 = q1
|otherwise = ((scale0*x0)+(scale1*x1),
(scale0*y0)+(scale1*y1),
(scale0*z0)+(scale1*z1),
(scale0*w0)+(scale1*w1))
where
scale0 = 1 - t
scale1 = t
| pushkinma/frag | src/Quaternion.hs | gpl-2.0 | 2,693 | 0 | 13 | 1,057 | 1,114 | 639 | 475 | 62 | 1 |
-- type 类型别名
type Name = String
type Author = String
type ISBN = String
type Price = Float
-- first Book, type constructor
-- second Book, data constructor
-- data Book = Book Name Author ISBN Price deriving (Show, Eq)
data Book = Book {
name :: Name,
author :: Author,
isbn :: ISBN,
price :: Price
} deriving (Show, Eq)
main = do
print ""
| solvery/lang-features | haskell/type_2.hs | gpl-2.0 | 381 | 0 | 8 | 100 | 84 | 52 | 32 | 12 | 1 |
{-# OPTIONS -fglasgow-exts -fallow-undecidable-instances #-}
----------------------------------------------------------------------------
-- |
-- Module : Text.XML.Serializer.Core
-- Copyright : (c) Simon Foster 2005
-- License : GPL version 2 (see COPYING)
--
-- Maintainer : aca01sdf@shef.ac.uk
-- Stability : experimental
-- Portability : non-portable (ghc >= 6 only)
--
-- A Generic XML Serializer using HXT and the Generics package (SYB3). This new version of
-- GXS is based on type classes, and thus allows modular customization. More coming soon.
--
-- This is the core serializer, as such it is capable of doing very little, and needs propogating
-- with serialization rules. A set of basic rules can be found in Text.XML.Serializer.DefaultRules
--
-- @This file is part of HAIFA.@
--
-- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the
-- GNU General Public License as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.@
--
-- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
-- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.@
--
-- @You should have received a copy of the GNU General Public License along with HAIFA; if not,
-- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@
----------------------------------------------------------------------------
module Text.XML.Serializer.Core where
import Data.Generics2
import Data.Char
import Data.DynamicMap
import Data.List
import Data.Maybe
import Text.XML.HXT.Parser
import Network.URI
import Text.XML.Serializer.Datatypes
import Control.Monad.State
-- | Get the namespaces exported by an XML Hook.
getNamespaces :: InitXMLHook a => a -> [(String, URI)]
getNamespaces x = lookupDM_D nstKey $ hookDM x emptyDM
{- | Derive an XML Constructor for a data-type using SYB3 to generate the rules. Will either be based on field labels if presents or
on internal defaults of the type being serialized.
-}
deriveXConsElem :: (Data (DictXMLData h) a, XMLNamespace a) => String -> h -> a -> XMLConstr
deriveXConsElem n = deriveXMLConstrPrim [n] Elem
deriveXConsAttr :: (Data (DictXMLData h) a, XMLNamespace a) => String -> h -> a -> XMLConstr
deriveXConsAttr n = deriveXMLConstrPrim [n] Attr
deriveXMLConstr (q::h) x = deriveXMLConstrPrim names Elem q x
where names = if (isAlgType $ dataTypeOf ctx x) then (map showConstr $ dataTypeConstrs $ dataTypeOf ctx x) else replicate (glength ctx x) "item"
ctx = undefined::DictXMLData h ()
deriveXMLConstrPrim :: (Data (DictXMLData h) a, XMLNamespace a) => [String] -> (String -> Maybe URI -> FieldProp) -> h -> a -> XMLConstr
deriveXMLConstrPrim names f (q::h) x =
xmlConstr { xmlFields = if (isAlgType $ dataTypeOf ctx x) then fieldElems else []
, elementNames = names
, attributeNames = names
, xmlNsURI = namespaceURI x
, xmlPrefix = defaultPrefix x
, defaultProp = let l = map (\n -> f n (namespaceURI x)) names in
case l of
[] -> Nothing
[x] -> Just x
y -> Just $ Choice y
}
where fieldElems = if (null fields) then zipWith (\e -> \d -> fromMaybe d e) subFields subElements
else (zipWith3 decideField subFields subDefault fields)
fields = constrFields $ toConstr ctx x
flength = glength ctx x
ctx = undefined::DictXMLData h ()
decideField sf d fn = if (d&&isJust sf) then (fromJust sf) else Elem fn (namespaceURI x)
subConstrs = gmapQ ctx subConstr x
where subConstr (x::a) = toXMLConstrA q x
{-let dt = dataTypeOf ctx x in
if (isAlgType dt) then map (toXMLConstrA q) ((map (fromConstr ctx) $ dataTypeConstrs dt)::[a])
else [toXMLConstrA q x]-}
subFields = map defaultProp subConstrs
subElements = map (\c -> let es = elementNames c in
case es of
[] -> Elem "item" Nothing
[x] -> Elem x (xmlNsURI c)
y -> Choice $ map (\x -> Elem x (xmlNsURI c)) y) subConstrs
subDefault = map forceDefault subConstrs
-- | Get all the possible XML Constructors of a particular type.
getXMLConstrs :: Data (DictXMLData h) a => h -> a -> [XMLConstr]
getXMLConstrs (q::h) (x::a) =
let dt = dataTypeOf ctx x
ctx = undefined::DictXMLData h () in
if (isAlgType dt) then map (toXMLConstrA q) ((map (fromConstr ctx) $ dataTypeConstrs dt)::[a])
else [toXMLConstrA q x]
-----------------------------------------------------------------------------------------------------
-- The Serialization Type-Classes
-----------------------------------------------------------------------------------------------------
-- | The XMLData class is an extension of Data which allows customization of XML serialization.
class (Data (DictXMLData h) a) => XMLData h a where
xmlEncode :: DynamicMap -> h -> a -> [[XmlFilter]] -- Custom encoder
{- Perform the default case for serialization, provided the given data-item is XMLData and Data and there are
no field properties specified, hand control over to XMLData to perform serialization. If there *are* field
properties serialize by running serialize on each sub-term via gmapQ and then use zipWith to wrap up each
serialization in elements and attributes (via xmlWrap).
Strategy for encoding should be;
1) Use a custom XML encoder if present
2) If an algebraic data-type with custom fields, use those for encoding.
3) If an algebraic data-type with record field, use those for encoding as element names.
4) Use the default instance (atm via Show, but should be via SYB generic)
-}
xmlEncode dm q x = if (null fs)
then []
else [concat $ zipWith (xmlWrap nst) elements $ gmapQ ctx (xmlEncodeA dm q) x]
where ctx = undefined::DictXMLData h ()
fs = xmlFields $ toXMLConstr q x
nst = lookupDM_D nstIKey dm
ns = xmlNsURI $ toXMLConstrA q x
prefix = maybe "" (\ns -> fromMaybe "" $ lookup ns nst) ns
xmlConstrs = gmapQ ctx (toXMLConstrA q) x
xmlCons = toXMLConstr q x
--elements = map element (zip [0..] fs)
elements = zipWith (\f -> \fn -> f fn) (gmapQ ctx (toFieldProp q) x) fs
element (n, f) = case f of
Choice ps -> ps!!((constrIndex $ constrs!!n)-1)
x -> x
where --constr = (constrs!!n)
qualName n q = maybe n (\x->x++":"++n) (q >>= \x -> lookup x nst)
{- askName = map elementName xmlConstrs
askPrefix = map (\x -> maybe "" (\x -> x++":") $ ((snd x) >>= \x -> lookup x nst)) (gmapQ ctx (getNamespace q) x)-}
constrs = gmapQ ctx (toConstr ctx) x
xmlDecode :: h -> ReadX a -- Monadic Decoder
xmlDecode h = xmlDecodeDefault h (undefined::a)
toXMLConstr :: h -> a -> XMLConstr
toXMLConstr _ x = xmlConstr
-- | The default case for deserializing a type. Use field labels and defaulting as before.
xmlDecodeDefault :: (XMLData h a, Data (DictXMLData h) a) => h -> a -> ReadX a
xmlDecodeDefault (q::h) (x::a) = do s <- get
let desCons c = do let xc = toXMLConstr q ((fromConstr ctx c)::a)
put s{fields = xmlFields xc, thisXMLConstr = xc, thisConstr = c}
fromConstrM ctx deserialize c
-- If forceConstr has been set by something further up the tree, use that without backtracking.
maybe
(msum $ map desCons cons)
(\n -> desCons (cons!!(n-1))) (forceConstr s)
where ctx = undefined :: DictXMLData h ()
cons = dataTypeConstrs $ dataTypeOf ctx x
switchOnParticle f readElem
= do s <- get
case f of
(Elem n u) -> do c <- readElem (undefined, n)
let es = map getChildren c; as = map getAttrl c
newReadX s{ elements = es
, attribs = as
} (xmlDecodeA q)
(Attr n u) -> do c <- readAttr (undefined, n)
let es = map getChildren c; as = map getAttrl c
newReadX s{ elements = es
, attribs = as
} (xmlDecodeA q)
Choice ps -> msum $ map (\(p, cn) -> do -- put s{ forceConstr = Just cn }
switchOnParticle p readElem) (zip ps [1..])
(Splice) -> mzero
deserialize :: (Data (DictXMLData h) b) => ReadX b
deserialize = result
where
result = do f <- nextField
s <- get
put s{forceConstr = Nothing}
let cons = toConstr ctx thisType
xc = toXMLConstrA q thisType
xcs = map (toXMLConstrA q) ((map (fromConstr ctx) $ dataTypeConstrs $ dataTypeOf ctx thisType)::[b])
tc = thisConstr s
txc = thisXMLConstr s
readElem = if (isMulti xc) then if (isInterleaved txc) then readElemI else readElemS
else \n -> do let f = if (isInterleaved txc) then read1ElemI else read1ElemS
f n >>= \x -> return [x]
-- FIXME : All of the following should be made namespace aware at some point.
switchOnParticle f readElem
(thisType::b) = thisTypeOf result
where
thisTypeOf :: ReadX b -> b
thisTypeOf = undefined
-- | Get the XML Constructor when only the dictonary is available.
toXMLConstrA :: Data (DictXMLData b) a => b -> a -> XMLConstr
toXMLConstrA (h::b) (x::a) = toXMLConstrD (dict::DictXMLData b a) x
-- | Get the XML Encoder when only the dictionary is available.
xmlEncodeA :: Data (DictXMLData b) a => DynamicMap -> b -> a -> [[XmlFilter]]
xmlEncodeA dm (h::b) (x::a) = xmlEncodeD (dict::DictXMLData b a) dm x
-- | Get the XML Decoder when only the dictionary is available.
xmlDecodeA :: Data (DictXMLData b) a => b -> ReadX a
xmlDecodeA (q::h) = result
where
result :: ReadX a = xmlDecodeD (dict::DictXMLData h a)
-- | Get all the namespace URIs and prefixes of a particular type.
getURIs :: (Data (DictXMLData b) a) => b -> a -> [(String, Maybe URI)]
getURIs (h::b) (x::a) = let c = getXMLConstrs h x in map (\c -> (xmlPrefix c, xmlNsURI c)) c
defaultPropA c = if (isNothing $ defaultProp c) then (case (elementNames c) of
[] -> Nothing
[x] -> Just $ Elem x (xmlNsURI c)
y -> Just $ Choice $ map (\x -> Elem x (xmlNsURI c)) y)
else defaultProp c
-- | The dictionary lookup for XMLData
instance (Data (DictXMLData b) a, XMLHook b a, XMLData b a) => Sat (DictXMLData b a) where
dict = DictXMLData { xmlEncodeD = \d -> \x -> zipWith (++) (xmlEncode d ctx x) (encodeHook d ctx x)
, xmlDecodeD = xmlDecode ctx
, toXMLConstrD = toXMLConstr ctx
}
where ctx = undefined::b
toFieldPropA :: (Data (DictXMLData b) a) => b -> a -> Maybe FieldProp
toFieldPropA h x = (defaultProp $ toXMLConstrA h x) >>= return . toFieldProp h x
toFieldProp :: (Data (DictXMLData b) a) => b -> a -> FieldProp -> FieldProp
toFieldProp (q::h) x p = let con = toConstr ctx x; ctx = (undefined::DictXMLData h ()) in
case p of
Choice ps -> head $ gmapQ ctx (\x -> toFieldProp q x (ps!!((constrIndex con)-1))) x
x -> x
-----------------------------------------------------------------------------------------------------
-- XML Utilities for Serialization
-----------------------------------------------------------------------------------------------------
{-
fromConstrM' :: (Data (DictR q (StateT ReadXO Maybe)) a)
=> q
-> Constr
-> ReadX a
fromConstrM' (q::q) = fromConstrM ctx (applyR q)
where ctx = undefined::DictR q (StateT ReadXO Maybe) ()
apply_gmapQ :: (Data (DictQ q r) a) => q -> a -> [r]
apply_gmapQ = gmapQ' undefined
where
gmapQ' :: (Data (DictQ q r) a) => DictQ q r () -> q -> a -> [r]
gmapQ' ctx q a = gmapQ ctx (applyQ q) a
-}
-- | Use the namespace classes to recursively read off a namespace table.
getNamespaceTable :: Data (DictXMLData b) a => b -> a -> [(String, URI)] -> [(String, URI)]
getNamespaceTable (q::b) x onst =
assign 0 $ nubBy (snds (==)) $ reverse $ sortBy (fsts compare) $ (++) onst $ (\x->[(k,v)|(k,Just v)<-x]) $ concat $ getURIs q x : everyone ctx (getURIs q) x
where
ctx = undefined :: DictXMLData b ()
assign _ [] = []
assign n (h@(p,ns):t) = if (null p) then ("tns"++show n,ns):(assign (n+1) t) else h:assign n t
-- | Produce a filter which adds a namespace table to the root of a tree.
namespaceTableFilter :: [(String, URI)] -> [XmlFilter]
namespaceTableFilter = map (\(p, ns)->attr ("xmlns:"++p) (txt $ show ns))
-- | Convert a list of lists of XmlFilters, with a specified root name to a list of XmlTrees.
toTrees :: String -> [[XmlFilter]] -> XmlTrees
toTrees n f = concat $ map (\x -> etag n ++= x $ emptyRoot) f
snds f = \(_,x) -> \(_,y) -> f x y
fsts f = \(x,_) -> \(y,_) -> f x y
-----------------------------------------------------------------------------------------------------
-- Core Serialization functions
-----------------------------------------------------------------------------------------------------
{- | The main function; given a Serialization Hook a piece of data and a Flag indicating whether we should
encode namespaces, perform serialization.
-}
serialize q x n = fst $ serializeAux q x n
serializeAux :: (Data (DictXMLData h) a, InitXMLHook h, XMLHook h a) => h -> a -> Bool -> ([[XmlFilter]], DynamicMap)
serializeAux (q::h) (x::a) n = (map ((++) (namespaceTableFilter ns)) $ xmlEncodeD (dict::DictXMLData h a) dm x, dm)
where dm = addToDM nstI nstIKey emptyDM
nstI = map swap ns
ns = if n then getNamespaceTable q x (getNamespaces q) else []
-- | A simpler version of the above, apply no hook and don't encode namespaces.
basicSerialize a = serialize NullXMLHook a False
-- | Perform serialization and apply the filters output to produce an actual list of trees.
toXML :: (XMLNamespace a, Data (DictXMLData h) a, InitXMLHook h, XMLHook h a) => h -> a -> Bool -> XmlTrees
toXML (q::h) a b = let (s, dm) = serializeAux q a b; cons = toConstr (undefined::DictXMLData h ()) a in
toTrees (applyPrefix dm a ((elementNames $ toXMLConstrA q a)!!((constrIndex cons)-1))) s
-- | Perform deserialization, take an XmlTree and deserialize to a type.
deserialize xml = do let s = RO Nothing undefined undefined [] [getAttrl xml] [getChildren xml] emptyDM
runReadX s (xmlDecode NullXMLHook)
-- | A test function, given a type, perform a no-namespace serialization, deserialize it and see whether the same thing comes out.
testGXSReflection x = let des = (deserialize (head $ toTrees "" $ basicSerialize x)) in
if ((Just x) == des) then Right () else Left ((show x) ++ " /= " ++ (show des))
---------------------------------------------------------------------------------------------------------------------
-- ALIASES
---------------------------------------------------------------------------------------------------------------------
-- | Use Show to perform serialization of a simple type.
encodeViaShow :: (Show a, (Data (DictXMLData h) a)) => DynamicMap -> h -> a -> [[XmlFilter]]
encodeViaShow dm q x = [[txt $ show x]]
-- | Use Read to perform deserialization of a simple type.
decodeViaRead :: (Read a, (Data (DictXMLData h) a)) => h -> ReadX a
decodeViaRead q = readText >>= \x -> (if (null x) then mzero else return $ read x)
| twopoint718/haifa | src/Text/XML/Serializer/Core.hs | gpl-2.0 | 17,163 | 140 | 21 | 5,304 | 4,109 | 2,217 | 1,892 | -1 | -1 |
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module PlotOptions (PlotCommand(..), commands) where
import Control.Monad (forM)
import Data.Conduit ((.|))
import qualified Data.Conduit.Combinators as C
import Data.Foldable (asum)
import Data.IntervalSet (IntervalSet)
import qualified Data.IntervalSet as IS
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (catMaybes)
import Data.Set (Set)
import qualified Data.Set as S
import Core
import Schema
import Sql ((==.))
import qualified Sql
import Options
import BarPlot
import GlobalPlotOptions
import Heatmap
import Interesting
import Query.Dump (plotQueryDump)
import Query.Level (levelTimePlotQuery)
import Query.Time (timePlotQuery)
import Query.Variant (VariantInfoConfig(..))
data PlotCommand
= PlotBar BarPlot
| PlotHeatmap Heatmap
| ReportInteresting
{ variantFilter :: VariantFilter
, variantConfigInfo :: VariantInfoConfig
, implFilter :: ImplFilter
, shortSummary :: Bool
}
queryVariants :: Key Algorithm -> IntervalSet Int64 -> SqlM (Set (Key Variant))
queryVariants algoId graphs = do
gids <- Sql.selectKeys [] [] $
C.filter (\i -> IS.member (fromSqlKey i) graphs)
.| C.foldMap S.singleton
variantConfigId <- Sql.selectKeysList
[VariantConfigAlgorithmId ==. algoId, VariantConfigIsDefault ==. Active]
[] >>= \case
[key] -> return key
[] -> logThrowM . PatternFailed $
"No default variant config for algorithm #" <> showSqlKey algoId
_ -> logThrowM . PatternFailed . mconcat $
[ "Multiple default variant configs for algorithm #"
, showSqlKey algoId ]
variants <- forM (S.toList gids) $ \gId ->
Sql.getBy $ UniqVariant gId variantConfigId
return . S.fromList . map Sql.entityKey . catMaybes $ variants
barPlotParser :: Parser (BarPlotType -> SqlM BarPlot)
barPlotParser = do
getGlobalOpts <- globalOptionsParser
barPlotSlideFormat <- slideFlag
barPlotPrintStdout <- printFlag
barPlotRotateLabels <- rotateFlag
barPlotNumberedGroups <- numberedFlag
graphSet <- intervalFlag "graphs" "graph"
pure $ \barPlotType -> do
barPlotGlobalOpts@GlobalPlotOptions{..} <- getGlobalOpts
let mkBarPlot variants = BarPlot
{ barPlotVariants = variants
, ..
}
mkBarPlot <$> queryVariants globalPlotAlgorithm graphSet
where
slideFlag :: Parser Bool
slideFlag = flag False True $ mconcat
[ long "slide", help "Render 4:3 slide dimensions" ]
printFlag :: Parser Bool
printFlag = flag False True $ mconcat
[ long "print", help "Print results to stdout, rather than plotting" ]
rotateFlag :: Parser Bool
rotateFlag = flag False True $ mconcat
[ long "rotate", help "Rotate X axis labels" ]
numberedFlag :: Parser Bool
numberedFlag = flag False True $ mconcat
[ long "numbered", help "Use indices for group labels" ]
variantSelectionOption :: Parser (Key Algorithm -> SqlM VariantSelection)
variantSelectionOption = asum
[ fmap (fmap ConfigSelection) <$> variantConfigIdParser
, pure $ const (return Everything)
]
totalsHeatmapParser :: Parser (SqlM Heatmap)
totalsHeatmapParser = do
getGlobalOpts <- globalOptionsParser
getVariantSelection <- variantSelectionOption
showOptimal <- showOptimalFlag
getDatasets <- setParser datasetIdParser
pure $ do
globalOpts@GlobalPlotOptions{..} <- getGlobalOpts
TotalHeatmap globalOpts
<$> getVariantSelection globalPlotAlgorithm
<*> getDatasets <*> pure showOptimal
where
showOptimalFlag :: Parser Bool
showOptimalFlag = flag False True $ mconcat [long "show-optimal"]
levelsHeatmapParser :: Parser (SqlM Heatmap)
levelsHeatmapParser = do
getGlobalOpts <- globalOptionsParser
getVariantId <- variantIdParser
pure $ do
globalOpts@GlobalPlotOptions{..} <- getGlobalOpts
LevelHeatmap globalOpts <$> getVariantId globalPlotAlgorithm
predictHeatmapParser :: Parser (SqlM Heatmap)
predictHeatmapParser = do
getGlobalOpts <- globalOptionsParser
getVariantSelection <- variantSelectionOption
getDatasets <- setParser datasetIdParser
getPredictorConfigs <- predictorConfigsParser
pure $ do
globalOpts@GlobalPlotOptions{..} <- getGlobalOpts
PredictHeatmap globalOpts
<$> getVariantSelection globalPlotAlgorithm
<*> getDatasets <*> getPredictorConfigs
commands :: CommandRoot (SqlM PlotCommand)
commands = CommandRoot
{ mainHeaderDesc = "a tool for plotting benchmark results"
, mainDesc = ""
, mainQueryDump = plotQueryDump
, mainQueryMap = plotQueryMap
, mainCommands = SubCommands
[ fmap PlotBar <$> CommandGroup CommandInfo
{ commandName = "bar"
, commandHeaderDesc = "bar plots"
, commandDesc = "Generates a bar plot of the specified runs/graphs."
}
[ SingleCommand CommandInfo
{ commandName = "levels"
, commandHeaderDesc = "plot level times for a graph"
, commandDesc =
"Generate a bar plot of the time per BFS level."
}
$ barPlotParser <*> pure Levels
, SingleCommand CommandInfo
{ commandName = "totals"
, commandHeaderDesc = "plot total times for a set of graphs"
, commandDesc =
"Generate a bar plot of the total implementation time for a \
\set of graphs."
}
$ barPlotParser <*> (Totals <$> normaliseFlag
<*> useGraphIdFlag
<*> fileNameFlag "times-totals.pdf")
, SingleCommand CommandInfo
{ commandName = "vs-optimal"
, commandHeaderDesc =
"plot total times for a set of graphs against the optimal"
, commandDesc =
"Generate a bar plot of the total implementation time \
\compared to optimal for a set of graphs."
}
$ barPlotParser <*>
(VsOptimal <$> normaliseFlag
<*> useGraphIdFlag
<*> fileNameFlag "times-vs-optimal.pdf")
]
, fmap PlotHeatmap <$> CommandGroup CommandInfo
{ commandName = "heatmap"
, commandHeaderDesc = "heatmap plots"
, commandDesc =
"Generate a heatmap of the runtimes per variant/implementation."
}
[ SingleCommand CommandInfo
{ commandName = "total"
, commandHeaderDesc = "plot heatmap for all variants"
, commandDesc =
"Generate a heatmap of the implementation runtimes for all \
\variants."
}
$ totalsHeatmapParser
, SingleCommand CommandInfo
{ commandName = "levels"
, commandHeaderDesc = "plot heatmap for a variant"
, commandDesc =
"Generate a heatmap of the implementation runtimes for each \
\level of a specfic variant."
}
$ levelsHeatmapParser
, SingleCommand CommandInfo
{ commandName = "predict"
, commandHeaderDesc = "plot heatmap for a predictor"
, commandDesc =
"Generate a heatmap of the implementation runtimes for all \
\variants, including predicted switching runtimes."
}
$ predictHeatmapParser
]
, SingleCommand CommandInfo
{ commandName = "report"
, commandHeaderDesc = "report interesting variants"
, commandDesc = "Highlights variants of interest for various criteria"
}
$ reportParser
]
}
where
fileNameFlag :: FilePath -> Parser FilePath
fileNameFlag def = strArgument $ mconcat
[ metavar "FILE", value def
, help "Path of the output PDF"
]
normaliseFlag :: Parser Bool
normaliseFlag = flag False True $ mconcat
[ long "normalise"
, help "Normalise bars to slowest implementation"
]
useGraphIdFlag :: Parser Bool
useGraphIdFlag = flag False True $ mconcat
[ long "use-graph-ids"
, help "Label groups with the graph's id, rather than name"
]
summaryFlag :: Parser Bool
summaryFlag = switch $ mconcat
[ long "summary"
, help "Print only a short summary of the interesting variants."
]
reportParser :: Parser (SqlM PlotCommand)
reportParser = do
getVariantInfoConfig <- variantInfoConfigParser
getVariantConfigId <- optional variantConfigIdParser
minEdges <- optional $ minPropParser "edge" "edges"
minVertices <- optional $ minPropParser "vertex" "vertices"
filterFun <- intMapFilter "impl-set" "implementation" <|> pure id
summary <- summaryFlag
pure $ do
cfg <- getVariantInfoConfig
variantConfigId <- sequence $
getVariantConfigId <*> pure (variantInfoAlgorithm cfg)
let vFilter = VFilter
{ filterVariantConfigId = variantConfigId
, filterEdgeSize = minEdges
, filterVertexSize = minVertices
}
return $ ReportInteresting vFilter cfg filterFun summary
where
minPropParser :: String -> String -> Parser Int
minPropParser name desc = option auto $ mconcat
[ long $ "min-" <> name <> "-count"
, help $ "Minimum number of " <> desc <> " required for a graph \
\to be considered."
]
plotQueryMap :: Map String (Parser DebugQuery)
plotQueryMap = M.fromList
[ nameDebugQuery "timePlotQuery" . Compose $ do
getAlgorithmId <- algorithmIdParser
getPlatformId <- platformIdParser
getCommit <- commitIdParser
getVariants <- variantsParser
pure $ do
algoId <- getAlgorithmId
timePlotQuery algoId
<$> getPlatformId <*> getCommit algoId <*> getVariants algoId
, nameDebugQuery "levelTimePlotQuery" . Compose $ do
getAlgorithmId <- algorithmIdParser
getPlatformId <- platformIdParser
getCommit <- commitIdParser
getVariantId <- variantIdParser
pure $ do
algoId <- getAlgorithmId
levelTimePlotQuery
<$> getPlatformId <*> getCommit algoId <*> getVariantId algoId
]
where
variantsParser :: Parser (Key Algorithm -> SqlM (Set (Key Variant)))
variantsParser = do
variants <- some variantIdParser
pure $ \algoId -> S.fromList <$> traverse ($algoId) variants
| merijn/GPU-benchmarks | benchmark-analysis/plot-src/PlotOptions.hs | gpl-3.0 | 10,941 | 0 | 16 | 3,270 | 2,192 | 1,125 | 1,067 | 239 | 3 |
{-# LANGUAGE TemplateHaskell #-}
import Paths_DefendTheKing (getDataFileName)
import Chess
import Draw
import Font
import GameLogic (Move(..), PartialData(..))
import Intro
import NetEngine
import NetMatching
import Networking
import Control.Applicative
import Control.Category
import Control.FilterCategory
import Control.Monad ((>=>), forM_, guard, join)
import Data.ADT.Getters
import Data.Function (fix)
import Data.List (foldl')
import Data.Map (Map, findWithDefault, insert)
import Data.Maybe (fromMaybe, isNothing)
import Data.Monoid
import Data.Time.Clock
import Data.Traversable (sequenceA)
import FRP.Peakachu
import FRP.Peakachu.Program
import FRP.Peakachu.Backend.GLUT
import FRP.Peakachu.Backend.GLUT.Getters
import FRP.Peakachu.Backend.StdIO
import FRP.Peakachu.Backend.Time
import Graphics.UI.GLUT hiding (Program, Exit)
import Network.Socket (SockAddr)
import System.Random (randomRIO)
import Prelude hiding ((.), id)
data MyTimers
= TimerMatching
| TimerGameIter
| TimerTransmit
deriving Show
$(mkADTGetters ''MyTimers)
data MoveLimitType
= SinglePieceLimit BoardPos
| GlobalMoveLimit
deriving (Eq, Ord, Show)
data MyNode
= IGlut UTCTime (GlutToProgram MyTimers)
| IUdp (UdpToProg ())
| IHttp (Maybe String)
| OGlut (ProgramToGlut MyTimers)
| OUdp (ProgToUdp ())
| OHttp String
| OPrint String
| Exit
| AText String
| ABoard Board
| ASelection Move
| AQueueMove Move
| AMoves [Move]
| AResetBoard
| ASide (Maybe PieceSide)
| AMatching [SockAddr]
| AMoveLimits (Map MoveLimitType Integer)
| AGameIteration Integer
| ALoopback MyNode
| ADrawTimes UTCTime
| AReadyForGame
$(mkADTGetters ''MyNode)
maybeMinimumOn :: Ord b => (a -> b) -> [a] -> Maybe a
maybeMinimumOn f =
foldl' maybeMin Nothing
where
maybeMin Nothing x = Just x
maybeMin (Just x) y
| f y < f x = Just y
| otherwise = Just x
distance :: DrawPos -> DrawPos -> GLfloat
distance (xA, yA) (xB, yB) =
join (*) (xA-xB) + join (*) (yA-yB)
chooseMove :: Board -> BoardPos -> DrawPos -> Maybe (BoardPos, Board)
chooseMove board src drawPos =
join $
maybeMinimumOn (distance drawPos . board2screen . fst) . possibleMoves board <$>
pieceAt board src
keyState :: Key -> Program (GlutToProgram a) KeyState
keyState key =
(lstPs . Just) Up (gKeyboardMouseEvent >=> f)
where
f (k, s, _, _) = do
guard $ k == key
return s
addP :: (Category cat, Monoid (cat a a)) => cat a a -> cat a a
addP = mappend id
atP :: FilterCategory cat => (a -> Maybe b) -> cat a b
atP = mapMaybeC
genericCycle :: Monoid a => a -> a
genericCycle = fix . mappend
gGlut :: MyNode -> Maybe (GlutToProgram MyTimers)
gGlut = (fmap . fmap) snd gIGlut
matching :: Program MyNode MyNode
matching =
mconcat
[ OHttp . fst <$> atP gMOHttp
, OGlut (SetTimer 1000 TimerMatching) <$ atP gMOSetRetryTimer
, OPrint . ("Matching:" ++) . (++ "\n") . show <$> atP gMatchingResult
, AMatching . fst <$> atP gMatchingResult
]
. netMatching
. mconcat
[ arrC (`MIHttp` ()) . atP gIHttp
, MITimerEvent () <$ atP (gGlut >=> gTimerEvent >=> gTimerMatching)
, uncurry DoMatching <$> atP (gIUdp >=> gUdpSocketAddresses)
]
neteng :: Integer -> Program MyNode MyNode
neteng myPeerId =
mconcat
[ OGlut (SetTimer 25 TimerTransmit) <$
mconcat
[ atP (gGlut >=> gTimerEvent >=> gTimerTransmit)
, singleValueP
]
, mconcat
[ AMoves <$> atP gNEOMove
, OGlut (SetTimer 50 TimerGameIter) <$ atP gNEOSetIterTimer
, OUdp . ($ ()) . uncurry SendTo <$> atP gNEOPacket
, ASide . Just . pickSide <$> atP gNEOPeerConnected
, AResetBoard <$ atP gNEOPeerConnected
, AGameIteration <$> atP gNEOGameIteration
]
. netEngine myPeerId
. mconcat
[ NEIMatching <$> atP gAMatching
, prepMoveToNe <$> atP gAQueueMove
, NEIIterTimer <$ atP (gGlut >=> gTimerEvent >=> gTimerGameIter)
, NEITransmitTimer <$ atP (gGlut >=> gTimerEvent >=> gTimerTransmit)
, (\(a, b, _) -> NEIPacket a b) <$> atP (gIUdp >=> gRecvFrom)
]
]
where
prepMoveToNe move = NEIMove (moveIter move) [move]
pickSide peerId
| myPeerId < peerId = Black
| otherwise = White
drawText :: DefendFont -> String -> Image
drawText font text =
Image $ do
lighting $= Disabled
color $ Color4 0.25 0 0.5 (0.5 :: GLfloat)
renderPrimitive Triangles
. (forM_ . join) (renderText font text)
$ \(x, y) ->
vertex $ Vertex4 (x/2) (y/2) 0 1
gNothing :: Maybe a -> Maybe ()
gNothing Nothing = Just ()
gNothing _ = Nothing
game :: Integer -> DefendFont -> Program MyNode MyNode
game myPeerId font =
takeWhileP (isNothing . gExit)
. mconcat
[ id
, OGlut . DrawImage . mconcat
<$ atP gADrawTimes
<*> sequenceA
[ pure glStyle
, draw font
<$> lstP gABoard
<*> lstP gASelection
<*> mouseMotion
<*> lstP gASide
<*> lstP gAGameIteration
, drawText font <$> lstP gAText
, intro font . atP gADrawTimes
]
, Exit <$ atP (gGlut >=> gKeyboardMouseEvent >=> quitButton)
, OPrint "Got Udp Addr\n" <$ atP (gIUdp >=> gUdpSocketAddresses)
]
-- loopback because board affects moves and vice versa
. loopbackP (
lb
. addP (neteng myPeerId)
. addP calculateLimits
. addP calculateMoves
. addP calculateSelection
. addP calculateBoard
)
. addP emptyP
. addP (
withAppendProgram2
mappend
( atP (const Nothing)
. takeWhileP (isNothing . gAReadyForGame)
)
( mconcat
[ matching
, OUdp (CreateUdpListenSocket stunServer ()) <$ singleValueP
]
)
)
. mconcat
[ id
, drawTimes
, ASide Nothing <$ singleValueP
, mconcat
[ AText <$> atP id
, AReadyForGame <$ atP gNothing
, AText "" <$ atP gNothing
]
. arrC head
. takeWhileP (not . null)
. scanlP (flip (const tail)) instructions
. atP (gGlut >=> gKeyboardMouseEvent >=> space)
]
where
instructions =
map Just
[ "welcome.\nget ready to\ndefend\nthe king.\npress space."
, "drag and drop\nsome pieces.\nhit spacebar\nto resume."
, "there are no turns.\nwhen the cursor\nis green\n"
++ "you can move.\nspace for more."
, "when you\ndefend\nthe king\nyou will not\n"
++ "see the whole\nbattlefield.\nspace to see\nmore instructions."
, "you will only\nsee the squares\nin reach of\n"
++ "your army.\nspace for\nnext message.."
, "press space again\nto battle\nagainst a real\nking\n"
++ "like you.\nexcept evil."
] ++ [Nothing]
globalMoveLimit = 20
pieceMoveLimit = 50
drawTimes =
ADrawTimes <$> atP snd
. scanlP drawTimeStep (Nothing, Nothing)
. arrC fst . atP gIGlut
drawTimeStep (Nothing, _) now = (Just now, Just now)
drawTimeStep (Just prev, _) now
| diffUTCTime now prev > 0.03 = (Just now, Just now)
| otherwise = (Just prev, Nothing)
resetOnResetBoard prog =
runAppendProg . genericCycle $
AppendProg prog . (AppendProg . takeWhileP) (isNothing . (gALoopback >=> gAResetBoard))
calculateBoard =
resetOnResetBoard $
ABoard <$> scanlP (foldl doMove) chessStart
. atP (gALoopback >=> gAMoves)
lb =
mconcat
[ Left . ALoopback <$> filterC (isNothing . gALoopback)
, Right <$> id
]
space (Char ' ', Down, _, _) = Just ()
space _ = Nothing
quitButton (Char 'q', _, _, _) = Just ()
quitButton _ = Nothing
mouseMotion = (lstPs . Just) (0, 0) (gGlut >=> gMouseMotionEvent)
calculateMoves =
AQueueMove
<$ (atP gUp <* atP gDown . delayP (1 :: Int))
. keyState (MouseButton LeftButton) . lstP gGlut
<*> delayP (1 :: Int) . lstP gASelection
rid = genericFlattenC
calculateSelection =
(rid .) $ aSelection
<$> lstP gABoard
<*> arrC snd . scanlP drag (Up, Move (0, 0) White 0) .
((,)
<$> keyState (MouseButton LeftButton) . lstP gGlut
<*> (calcMoveIter
<$> rid . (selectionSrc
<$> lstP gABoard
<*> lstP (gALoopback >=> gASide)
<*> mouseMotion
)
<*> lstP (gALoopback >=> gAMoveLimits)
)
)
<*> mouseMotion
calculateLimits =
resetOnResetBoard $
AMoveLimits <$> scanlP updateLimits mempty
. ( (,)
<$> atP gAQueueMove
<*> lstP (gALoopback >=> gAGameIteration)
)
updateLimits prev (move, iter) =
insert GlobalMoveLimit (iter + globalMoveLimit)
. insert (SinglePieceLimit (moveDst move)) (iter + pieceMoveLimit)
$ prev
aSelection board move pos =
ASelection . move . fst <$>
chooseMove board (moveSrc (getPartial move)) pos
calcMoveIter move limits =
move
. max (f GlobalMoveLimit)
. f . SinglePieceLimit
. moveSrc . getPartial $ move
where
f k = findWithDefault 0 k limits
doMove board move =
fromMaybe board $ do
piece <- pieceAt board . moveSrc $ move
guard $ pieceSide piece == movePlayer move
lookup (moveDst move) . possibleMoves board $ piece
selectionSrc board side pos =
fmap (Move <$> piecePos <*> pieceSide)
. maybeMinimumOn (distance pos . board2screen . piecePos)
. filter ((&&)
<$> (/= Just False) . (<$> side) . (==) . pieceSide
<*> canMove board)
. boardPieces $ board
canMove board = not . null . possibleMoves board
drag (Down, pos) (Down, _) = (Down, pos)
drag _ x = x
-- more options at http://www.voip-info.org/wiki/view/STUN
stunServer :: String
stunServer = "stun.ekiga.net"
main :: IO ()
main = do
initialWindowSize $= Size 600 600
initialDisplayCapabilities $=
[ With DisplayRGB
, Where DisplaySamples IsAtLeast 2
]
font <- loadFont <$> (readFile =<< getDataFileName "data/defend.font")
peerId <- randomRIO (0, 2^(128::Int))
let
backend =
mconcat
[ uncurry IGlut <$> getTimeB . glut . atP gOGlut
, IHttp . fst <$> httpGetB . arrC (flip (,) ()) . atP gOHttp
, IUdp <$> udpB . atP gOUdp
, atP (const Nothing) . stdoutB . atP gOPrint
]
runProgram backend (game peerId font)
| yairchu/defend | src/defend.hs | gpl-3.0 | 10,204 | 118 | 17 | 2,612 | 3,557 | 1,741 | 1,816 | 300 | 5 |
{-# LANGUAGE InstanceSigs #-}
module ReaderT where
newtype ReaderT r m a =
ReaderT { runReaderT :: r -> m a }
instance Functor m => Functor (ReaderT r m) where
fmap :: (a -> b) -> ReaderT r m a -> ReaderT r m b
fmap f (ReaderT rma) = ReaderT $ (fmap.fmap) f rma
instance Applicative m => Applicative (ReaderT r m) where
pure a = ReaderT (pure (pure a))
(ReaderT rmfa) <*> (ReaderT rma) =
ReaderT $ (<*>) <$> rmfa <*> rma
instance Monad m => Monad (ReaderT r m) where
return = pure
(>>=) :: ReaderT r m a -> (a -> ReaderT r m b) -> ReaderT r m b
(ReaderT rma) >>= f = ReaderT $ \r -> do
a <- rma r
runReaderT (f a) r
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | MonadTransformers/src/ReaderT.hs | gpl-3.0 | 678 | 0 | 12 | 195 | 319 | 162 | 157 | 17 | 0 |
module NormalizerSpec (spec) where
import Test.Hspec
import Language.Mulang.Ast
import Language.Mulang.Ast.Operator
import Language.Mulang.Parsers.Haskell (hs)
import Language.Mulang.Parsers.Java (java)
import Language.Mulang.Parsers.JavaScript (js)
import Language.Mulang.Parsers.Python (py)
import Language.Mulang.Normalizers.Java (javaNormalizationOptions)
import Language.Mulang.Normalizers.Python (pythonNormalizationOptions)
import Language.Mulang.Normalizers.Haskell (haskellNormalizationOptions)
import Language.Mulang.Transform.Normalizer
njava = normalize javaNormalizationOptions . java
npy = normalize pythonNormalizationOptions . py
nhs = normalize haskellNormalizationOptions . hs
spec :: Spec
spec = do
describe "can convert dicts" $ do
let options = unnormalized { convertObjectIntoDict = True }
let n = normalize options
it "converts dict and its var contents" $ do
n (MuObject (Variable "x" (MuNumber 5))) `shouldBe` (MuDict (Arrow (MuString "x") (MuNumber 5)))
n (MuObject (Sequence [Variable "x" (MuNumber 5), Variable "y" (MuNumber 6)])) `shouldBe` (MuDict (Sequence [
Arrow (MuString "x") (MuNumber 5),
Arrow (MuString "y") (MuNumber 6)]))
describe "can trim code" $ do
let options = unnormalized { trimSequences = True }
let n = normalize options
it "removes nones from sequences" $ do
n (Object "X" (Sequence [None, None, MuNumber 5, None])) `shouldBe` (Object "X" (Sequence [MuNumber 5]))
it "does not remove nones from literals" $ do
n (MuList [None, None, MuNumber 5, None]) `shouldBe` (MuList [None, None, MuNumber 5, None])
describe "can sort commutative operations" $ do
let options = unnormalized { sortCommutativeApplications = True }
let n = normalize options
it "sorts references" $ do
n (Application (Primitive Equal) [Reference "a", Reference "b"]) `shouldBe` n (Application (Primitive Equal) [Reference "b", Reference "a"])
n (Application (Primitive Max) [Reference "a", Reference "b"]) `shouldBe` n (Application (Primitive Max) [Reference "b", Reference "a"])
n (Application (Primitive Similar) [Reference "a", Reference "b"]) `shouldBe` n (Application (Primitive Similar) [Reference "b", Reference "a"])
describe "can compact code" $ do
let options = unnormalized { compactSequences = True }
let n = normalize options
it "compacts sequences" $ do
n (Object "X" (Sequence [MuNumber 5])) `shouldBe` (Object "X" (MuNumber 5))
n (Object "X" (Sequence [])) `shouldBe` (Object "X" None)
n (Object "X" (Sequence [MuNumber 5, MuNumber 6])) `shouldBe` (Object "X" (Sequence [MuNumber 5, MuNumber 6]))
n (Object "X" (Sequence [None])) `shouldBe` (Object "X" None)
n (Object "X" (Sequence [None, None])) `shouldBe` (Object "X" (Sequence [None, None]))
n (Object "X" (Sequence [None, None, MuNumber 5])) `shouldBe` (Object "X" (Sequence [None, None, MuNumber 5]))
describe "can trim and compact code" $ do
let options = unnormalized { trimSequences = True, compactSequences = True }
let n = normalize options
it "trims first, compacts later" $ do
n (Object "X" (Sequence [MuNumber 5])) `shouldBe` (Object "X" (MuNumber 5))
n (Object "X" (Sequence [])) `shouldBe` (Object "X" None)
n (Object "X" (Sequence [MuNumber 5, MuNumber 6])) `shouldBe` (Object "X" (Sequence [MuNumber 5, MuNumber 6]))
n (Object "X" (Sequence [None])) `shouldBe` (Object "X" None)
n (Object "X" (Sequence [None, None])) `shouldBe` (Object "X" None)
n (Object "X" (Sequence [None, None, MuNumber 5])) `shouldBe` (Object "X" (MuNumber 5))
describe "can insert implicit retuns" $ do
let options = unnormalized { insertImplicitReturn = True }
let n = normalize options
it "does not insert return in single literal statement" $ do
n (py "def x(): x = 1") `shouldBe` SimpleProcedure "x" [] (Assignment "x" (MuNumber 1.0))
it "inserts return in single literal expression" $ do
n (py "def x(): 3") `shouldBe` SimpleProcedure "x" [] (Return (MuNumber 3.0))
it "does not insert return in empty block" $ do
n (SimpleFunction "x" [] None) `shouldBe` (SimpleFunction "x" [] None)
it "does not insert return in singleton nil block" $ do
let expression = SimpleFunction "x" [] MuNil
n expression `shouldBe` expression
it "inserts return in non-singleton nil block" $ do
let expression = SimpleFunction "x" [] (Sequence [Print (MuString "hello"), MuNil])
n expression `shouldBe` expression
it "inserts return in last literal expression" $ do
n (js "function x() { let x = 1; x += 1; x }") `shouldBe` SimpleProcedure "x" [] (Sequence [
Variable "x" (MuNumber 1.0),
Assignment "x" (Application (Primitive Plus) [Reference "x",MuNumber 1.0]),
Return (Reference "x")])
describe "sorts declarations by default" $ do
it "sorts functions on Python" $ do
npy "def foo():\n return 2\n\ndef bar():\n return 1\n\n" `shouldBe` py "def bar():\n return 1\n\ndef foo():\n return 2\n\n"
it "sorts classes on Java" $ do
njava "class Foo {} class Bar {}" `shouldBe` java "class Bar {} class Foo {}"
it "sorts functions on haskell" $ do
nhs "g 2 = 2\nf 1 = 1" `shouldBe` hs "f 1 = 1\ng 2 = 2"
it "sorts declarations on haskell even when there are variables" $ do
nhs "g 2 = 2\nf 1 = 1\nn = 1" `shouldBe` hs "f 1 = 1\ng 2 = 2\nn = 1"
it "sorts functions on javascript if there are only functions" $ do
js "function f() {} function g() {}" `shouldBe` js "function g() {} function f() {}"
it "doesn't sort functions on javascript if there are duplicates names" $ do
js "function f() { return 1 } function g() {} function f() { return 2 }" `shouldNotBe` js "function g() {} function f() { return 2 } function f() { return 1 } "
it "doesn't sort declarations on javascript if there are also statements" $ do
js "function f() {}; let x = 2; function g() {}" `shouldNotBe` js "function g() {}; let x = 2; function f() {}"
| mumuki/mulang | spec/NormalizerSpec.hs | gpl-3.0 | 6,817 | 0 | 22 | 1,999 | 2,051 | 1,022 | 1,029 | 94 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Datastore.Projects.Lookup
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Looks up entities by key.
--
-- /See:/ <https://cloud.google.com/datastore/ Google Cloud Datastore API Reference> for @datastore.projects.lookup@.
module Network.Google.Resource.Datastore.Projects.Lookup
(
-- * REST Resource
ProjectsLookupResource
-- * Creating a Request
, projectsLookup
, ProjectsLookup
-- * Request Lenses
, plXgafv
, plUploadProtocol
, plPp
, plAccessToken
, plUploadType
, plPayload
, plBearerToken
, plProjectId
, plCallback
) where
import Network.Google.Datastore.Types
import Network.Google.Prelude
-- | A resource alias for @datastore.projects.lookup@ method which the
-- 'ProjectsLookup' request conforms to.
type ProjectsLookupResource =
"v1" :>
"projects" :>
CaptureMode "projectId" "lookup" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LookupRequest :>
Post '[JSON] LookupResponse
-- | Looks up entities by key.
--
-- /See:/ 'projectsLookup' smart constructor.
data ProjectsLookup = ProjectsLookup'
{ _plXgafv :: !(Maybe Xgafv)
, _plUploadProtocol :: !(Maybe Text)
, _plPp :: !Bool
, _plAccessToken :: !(Maybe Text)
, _plUploadType :: !(Maybe Text)
, _plPayload :: !LookupRequest
, _plBearerToken :: !(Maybe Text)
, _plProjectId :: !Text
, _plCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsLookup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plXgafv'
--
-- * 'plUploadProtocol'
--
-- * 'plPp'
--
-- * 'plAccessToken'
--
-- * 'plUploadType'
--
-- * 'plPayload'
--
-- * 'plBearerToken'
--
-- * 'plProjectId'
--
-- * 'plCallback'
projectsLookup
:: LookupRequest -- ^ 'plPayload'
-> Text -- ^ 'plProjectId'
-> ProjectsLookup
projectsLookup pPlPayload_ pPlProjectId_ =
ProjectsLookup'
{ _plXgafv = Nothing
, _plUploadProtocol = Nothing
, _plPp = True
, _plAccessToken = Nothing
, _plUploadType = Nothing
, _plPayload = pPlPayload_
, _plBearerToken = Nothing
, _plProjectId = pPlProjectId_
, _plCallback = Nothing
}
-- | V1 error format.
plXgafv :: Lens' ProjectsLookup (Maybe Xgafv)
plXgafv = lens _plXgafv (\ s a -> s{_plXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plUploadProtocol :: Lens' ProjectsLookup (Maybe Text)
plUploadProtocol
= lens _plUploadProtocol
(\ s a -> s{_plUploadProtocol = a})
-- | Pretty-print response.
plPp :: Lens' ProjectsLookup Bool
plPp = lens _plPp (\ s a -> s{_plPp = a})
-- | OAuth access token.
plAccessToken :: Lens' ProjectsLookup (Maybe Text)
plAccessToken
= lens _plAccessToken
(\ s a -> s{_plAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plUploadType :: Lens' ProjectsLookup (Maybe Text)
plUploadType
= lens _plUploadType (\ s a -> s{_plUploadType = a})
-- | Multipart request metadata.
plPayload :: Lens' ProjectsLookup LookupRequest
plPayload
= lens _plPayload (\ s a -> s{_plPayload = a})
-- | OAuth bearer token.
plBearerToken :: Lens' ProjectsLookup (Maybe Text)
plBearerToken
= lens _plBearerToken
(\ s a -> s{_plBearerToken = a})
-- | The ID of the project against which to make the request.
plProjectId :: Lens' ProjectsLookup Text
plProjectId
= lens _plProjectId (\ s a -> s{_plProjectId = a})
-- | JSONP
plCallback :: Lens' ProjectsLookup (Maybe Text)
plCallback
= lens _plCallback (\ s a -> s{_plCallback = a})
instance GoogleRequest ProjectsLookup where
type Rs ProjectsLookup = LookupResponse
type Scopes ProjectsLookup =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore"]
requestClient ProjectsLookup'{..}
= go _plProjectId _plXgafv _plUploadProtocol
(Just _plPp)
_plAccessToken
_plUploadType
_plBearerToken
_plCallback
(Just AltJSON)
_plPayload
datastoreService
where go
= buildClient (Proxy :: Proxy ProjectsLookupResource)
mempty
| rueshyna/gogol | gogol-datastore/gen/Network/Google/Resource/Datastore/Projects/Lookup.hs | mpl-2.0 | 5,503 | 0 | 19 | 1,430 | 937 | 543 | 394 | 132 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.AssignInstance
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Assign a registered instance to a custom layer. You cannot use this action
-- with instances that were created with AWS OpsWorks.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_AssignInstance.html>
module Network.AWS.OpsWorks.AssignInstance
(
-- * Request
AssignInstance
-- ** Request constructor
, assignInstance
-- ** Request lenses
, aiInstanceId
, aiLayerIds
-- * Response
, AssignInstanceResponse
-- ** Response constructor
, assignInstanceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data AssignInstance = AssignInstance
{ _aiInstanceId :: Text
, _aiLayerIds :: List "LayerIds" Text
} deriving (Eq, Ord, Read, Show)
-- | 'AssignInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'aiInstanceId' @::@ 'Text'
--
-- * 'aiLayerIds' @::@ ['Text']
--
assignInstance :: Text -- ^ 'aiInstanceId'
-> AssignInstance
assignInstance p1 = AssignInstance
{ _aiInstanceId = p1
, _aiLayerIds = mempty
}
-- | The instance ID.
aiInstanceId :: Lens' AssignInstance Text
aiInstanceId = lens _aiInstanceId (\s a -> s { _aiInstanceId = a })
-- | The layer ID, which must correspond to a custom layer. You cannot assign a
-- registered instance to a built-in layer.
aiLayerIds :: Lens' AssignInstance [Text]
aiLayerIds = lens _aiLayerIds (\s a -> s { _aiLayerIds = a }) . _List
data AssignInstanceResponse = AssignInstanceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AssignInstanceResponse' constructor.
assignInstanceResponse :: AssignInstanceResponse
assignInstanceResponse = AssignInstanceResponse
instance ToPath AssignInstance where
toPath = const "/"
instance ToQuery AssignInstance where
toQuery = const mempty
instance ToHeaders AssignInstance
instance ToJSON AssignInstance where
toJSON AssignInstance{..} = object
[ "InstanceId" .= _aiInstanceId
, "LayerIds" .= _aiLayerIds
]
instance AWSRequest AssignInstance where
type Sv AssignInstance = OpsWorks
type Rs AssignInstance = AssignInstanceResponse
request = post "AssignInstance"
response = nullResponse AssignInstanceResponse
| dysinger/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/AssignInstance.hs | mpl-2.0 | 3,675 | 0 | 10 | 777 | 420 | 256 | 164 | 53 | 1 |
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
-----------------------------------------------------------------------------------------
{-| Module : WxcObject
Copyright : (c) Daan Leijen 2003, 2004
License : wxWindows
Maintainer : wxhaskell-devel@lists.sourceforge.net
Stability : provisional
Portability : portable
Basic object type.
-}
-----------------------------------------------------------------------------------------
module Graphics.UI.WXCore.WxcObject(
-- * Object types
Object, objectNull, objectIsNull, objectCast, objectIsManaged
, objectFromPtr, objectFromManagedPtr
, withObjectPtr
, objectFinalize, objectNoFinalize
-- * Managed objects
, ManagedPtr, TManagedPtr, CManagedPtr
) where
import Control.Exception
import System.IO.Unsafe( unsafePerformIO )
import Foreign.C
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
{- note: for GHC 6.10.2 or higher, recommends to use "import Foreign.Concurrent"
See http://www.haskell.org/pipermail/cvs-ghc/2009-January/047120.html -}
import Foreign.ForeignPtr hiding (newForeignPtr,addForeignPtrFinalizer)
import Foreign.Concurrent
{-----------------------------------------------------------------------------------------
Objects
-----------------------------------------------------------------------------------------}
{- | An @Object a@ is a pointer to an object of type @a@. The @a@ parameter is used
to encode the inheritance relation. When the type parameter is unit @()@, it denotes
an object of exactly that class, when the parameter is a type variable @a@, it
specifies an object that is at least an instance of that class. For example in
wxWindows, we have the following class hierarchy:
> EvtHandler
> |- Window
> |- Frame
> |- Control
> |- Button
> |- Radiobox
In wxHaskell, all the creation functions will return objects of exactly that
class and use the @()@ type:
> frameCreate :: Window a -> ... -> IO (Frame ())
> buttonCreate :: Window a -> ... -> IO (Button ())
> ...
In contrast, all the /this/ (or /self/) pointers of methods can take objects
of any instance of that class and have a type variable, for example:
> windowSetClientSize :: Window a -> Size -> IO ()
> controlSetLabel :: Control a -> String -> IO ()
> buttonSetDefault :: Button a -> IO ()
This means that we can use @windowSetClientSize@ on any window, including
buttons and frames, but we can only use @controlSetLabel@ on controls, not
includeing frames.
In wxHaskell, this works since a @Frame ()@ is actually a type synonym for
@Window (CFrame ())@ (where @CFrame@ is an abstract data type). We can thus
pass a value of type @Frame ()@ to anything that expects some @Window a@.
For a button this works too, as it is a synonym for @Control (CButton ())@
which is in turn a synonym for @Window (CControl (CButton ()))@. Note that
we can\'t pass a frame to something that expects a value of type @Control a@.
Of course, a @Window a@ is actually a type synonym for @EvtHandler (CWindow a)@.
If you study the documentation in "Graphics.UI.WXH.WxcClasses" closely, you
can discover where this chain ends :-).
Objects are not automatically deleted. Normally you can use a delete function
like @windowDelete@ to delete an object. However, almost all objects in the
wxWindows library are automatically deleted by the library. The only objects
that should be used with care are resources as bitmaps, fonts and brushes.
-}
data Object a = Object !(Ptr a)
| Managed !(ForeignPtr (TManagedPtr a))
-- | Managed pointer (proxy) objects
type ManagedPtr a = Ptr (CManagedPtr a)
type TManagedPtr a = CManagedPtr a
data CManagedPtr a = CManagedPtr
instance Eq (Object a) where
obj1 == obj2
= unsafePerformIO $
withObjectPtr obj1 $ \p1 ->
withObjectPtr obj2 $ \p2 ->
return (p1 == p2)
instance Ord (Object a) where
compare obj1 obj2
= unsafePerformIO $
withObjectPtr obj1 $ \p1 ->
withObjectPtr obj2 $ \p2 ->
return (compare p1 p2)
instance Show (Object a) where
show obj
= unsafePerformIO $
withObjectPtr obj $ \p ->
return (show p)
-- | A null object. Use with care.
objectNull :: Object a
objectNull
= Object nullPtr
-- | Is this a managed object.
objectIsManaged :: Object a -> Bool
objectIsManaged obj
= case obj of
Managed fp -> True
_ -> False
-- | Test for null object.
objectIsNull :: Object a -> Bool
objectIsNull obj
= unsafePerformIO $
withObjectPtr obj $ \p -> return (p == nullPtr)
-- | Cast an object to another type. Use with care.
objectCast :: Object a -> Object b
objectCast obj
= case obj of
Object p -> Object (castPtr p)
Managed fp -> Managed (castForeignPtr fp)
-- | Do something with the object pointer.
withObjectPtr :: Object a -> (Ptr a -> IO b) -> IO b
withObjectPtr obj f
= case obj of
Object p -> f p
Managed fp -> withForeignPtr fp $ \mp ->
do p <- wxManagedPtr_GetPtr mp
f p
-- | Finalize a managed object manually. (no effect on unmanaged objects)
objectFinalize :: Object a -> IO ()
objectFinalize obj
= case obj of
Object p -> return ()
Managed fp -> withForeignPtr fp $ wxManagedPtr_Finalize
-- | Remove the finalizer on a managed object. (no effect on unmanaged objects)
objectNoFinalize :: Object a -> IO ()
objectNoFinalize obj
= case obj of
Object p -> return ()
Managed fp -> withForeignPtr fp $ wxManagedPtr_NoFinalize
-- | Create an unmanaged object.
objectFromPtr :: Ptr a -> Object a
objectFromPtr p
= Object p
-- | Create a managed object with a given finalizer.
objectFromManagedPtr :: ManagedPtr a -> IO (Object a)
objectFromManagedPtr mp
= do fun <- wxManagedPtrDeleteFunction
-- wxManagedPtr_NoFinalize mp {- turn off finalization -}
fp <- newForeignPtr mp (fun mp)
return (Managed fp)
wxManagedPtrDeleteFunction :: IO (ManagedPtr a -> IO ())
wxManagedPtrDeleteFunction
= do fun <- wxManagedPtr_GetDeleteFunction
return $ wxManagedPtr_CallbackFunction fun
{--------------------------------------------------------------------------
Managed pointers
--------------------------------------------------------------------------}
foreign import ccall wxManagedPtr_GetPtr :: Ptr (TManagedPtr a) -> IO (Ptr a)
foreign import ccall wxManagedPtr_Finalize :: ManagedPtr a -> IO ()
foreign import ccall wxManagedPtr_NoFinalize :: ManagedPtr a -> IO ()
foreign import ccall wxManagedPtr_GetDeleteFunction :: IO (FunPtr (ManagedPtr a -> IO ()))
foreign import ccall "dynamic" wxManagedPtr_CallbackFunction :: FunPtr (ManagedPtr a -> IO ()) -> ManagedPtr a -> IO ()
| thielema/wxhaskell | wxcore/src/haskell/Graphics/UI/WXCore/WxcObject.hs | lgpl-2.1 | 7,009 | 0 | 13 | 1,578 | 1,044 | 532 | 512 | 93 | 2 |
{-# OPTIONS_GHC -fno-warn-missing-signatures -fno-warn-unused-binds #-}
{-# LANGUAGE OverloadedStrings #-}
module EntityTagCache where
import Control.Monad ((>=>))
import Data.List (isPrefixOf)
import Data.Map.Strict as M
import Data.Maybe as MB (mapMaybe)
import Prelude as P
import System.IO (IOMode (ReadMode), hGetContents, withFile)
import Test.HUnit (Counts, Test (TestList), runTestTT)
import qualified Test.HUnit.Util as U (t)
------------------------------------------------------------------------------
type MSI = Map String Int
type MIS = Map Int String
data Cache' a = Cache' { mii :: a
, next :: Int -- next tag number
, msi :: MSI -- tag to tag number
, mis :: MIS -- tag number to tag
}
ss = mii
ks = mii
type Cache = Cache' (Map Int [Int]) -- entity id to tag numbers
type DDIn = Cache' [String] -- input to DD
type DDOut = Cache' [Int] -- output of DD
------------------------------------------------------------------------------
-- Use cache
-- impure
getTagsIO :: Int -> IO Cache -> IO (Maybe [String])
getTagsIO = fmap . getTags
updateTagsIO :: Int -> [String] -> IO Cache -> IO Cache
updateTagsIO x ts = fmap (updateTags x ts)
-- pure
getTags :: Int -> Cache -> Maybe [String]
getTags x c = M.lookup x (mii c) >>= \r -> return $ MB.mapMaybe (`M.lookup` mis c) r
updateTags :: Int -> [String] -> Cache -> Cache
updateTags x ts c = -- (c,i0,msi0,mis0)
let o = dedupTagIdTags (Cache' ts (next c) (msi c) (mis c))
in o { mii = M.insert x (ks o) (mii c) }
------------------------------------------------------------------------------
-- Populate cache
loadCacheFromFile :: FilePath -> IO Cache
loadCacheFromFile filename =
withFile filename ReadMode (hGetContents >=> return . stringToCache)
stringToCache :: String -> Cache
stringToCache = dedupTags . collectTagIdAndTags
------------------------------------------------------------------------------
-- Internals
collectTagIdAndTags :: String -> [(Int, [String])]
collectTagIdAndTags = P.map mkEntry . lines
where
mkEntry x = let (i:xs) = splitOn "," x in (read i, xs)
dedupTags :: [(Int, [String])] -> Cache
dedupTags = P.foldr level1 (Cache' M.empty (-1) M.empty M.empty)
where
level1 (tag, ss0) c =
let o = dedupTagIdTags (c { mii = ss0 })
in o { mii = M.insert tag (ks o) (mii c) }
dedupTagIdTags :: DDIn -> DDOut
dedupTagIdTags i = P.foldr level2 (i { mii = mempty }) (ss i)
where
level2 s o = case M.lookup s (msi o) of
Just j -> o { mii = j:ks o }
Nothing -> Cache' (next o:ks o) (next o + 1)
(M.insert s (next o) (msi o)) (M.insert (next o) s (mis o))
------------------------------------------------------------------------------
-- Test
exCsv = "0,foo,bar\n1,abc,foo\n2\n3,xyz,bar"
cache = stringToCache exCsv
cToList c = (M.toList (mii c), next c, M.toList (msi c), M.toList (mis c))
tgt = U.t "tgt"
(P.map (`getTags` cache) [0..4])
[ Just ["foo","bar"]
, Just ["abc","foo"]
, Just []
, Just ["xyz","bar"]
, Nothing
]
tut = U.t "tut"
(cToList $ updateTags 2 ["new1","new2"] cache)
( [(0,[1,-1]), (1,[2,1]), (2,[4,3]), (3,[0,-1])]
, 5
, [("abc",2),("bar",-1),("foo",1),("new1",4),("new2",3),("xyz",0)]
, [(-1,"bar"),(0,"xyz"),(1,"foo"),(2,"abc"),(3,"new2"),(4,"new1")] )
tstc = U.t "tstc"
(cToList cache)
( [(0,[1,-1]), (1,[2,1]), (2,[]), (3,[0,-1])]
, 3
, [("abc",2),("bar",-1),("foo",1),("xyz",0)]
, [(-1,"bar"),(0,"xyz"),(1,"foo"),(2,"abc")] )
tct = U.t "tct"
(collectTagIdAndTags exCsv)
[(0,["foo","bar"]), (1,["abc","foo"]), (2,[]), (3,["xyz","bar"])]
tddt = U.t "tddt"
(let o = dedupTagIdTags (Cache' ["foo", "bar", "baz", "foo", "baz", "baz", "foo", "qux", "new"]
(-1) M.empty M.empty)
in (ks o, next o, M.toList (msi o), M.toList (mis o)))
( [ 1, 3, 2, 1, 2, 2, 1, 0, -1]
, 4::Int
, [("bar",3),("baz",2),("foo",1),("new",-1),("qux",0)]
, [(-1,"new"),(0,"qux"),(1,"foo"),(2,"baz"),(3,"bar")])
test :: IO Counts
test =
runTestTT $ TestList $ tgt ++ tut ++ tstc ++ tct ++ tddt
------------------------------------------------------------------------------
-- Utililties (I can't find this in the Haskell libraries available on stackage)
splitOn :: Eq a => [a] -> [a] -> [[a]]
splitOn _ [] = []
splitOn delim str =
let (firstline, remainder) = breakList (isPrefixOf delim) str
in firstline : case remainder of
[] -> []
x -> if x == delim
then [[]]
else splitOn delim (drop (length delim) x)
breakList :: ([a] -> Bool) -> [a] -> ([a], [a])
breakList func = spanList (not . func)
spanList :: ([a] -> Bool) -> [a] -> ([a], [a])
spanList _ [] = ([],[])
spanList func list@(x:xs) = if func list then (x:ys,zs) else ([],list)
where (ys,zs) = spanList func xs
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/interview/api-catalog/src/EntityTagCache.hs | unlicense | 5,300 | 0 | 16 | 1,448 | 2,183 | 1,253 | 930 | -1 | -1 |
import Data.Char
op "+" n0 n1 = show $ read(n0) + read(n1)
op "-" n0 n1 = show $ read(n0) - read(n1)
op "*" n0 n1 = show $ read(n0) * read(n1)
isOperator s = s == "+" || s == "-" || s == "*"
exec :: [String] -> [String]
exec (n0:n1:o:ls)
| isOperator o = let r = op o n0 n1
in r:ls
| otherwise = n0:(exec (n1:o:ls))
ans :: [String] -> Int
ans x =
if length x == 1
then read $ head x
else ans $ exec x
main = do
c <- getLine
let i = words c
o = ans i
print o
| a143753/AOJ | ALDS1_3_A.hs | apache-2.0 | 504 | 0 | 11 | 157 | 316 | 156 | 160 | 20 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
-- * Demonstrating `non-compositional', context-sensitive processing
-- * Extending the final style
module PushNegFExt where
-- Explain the imports
import Intro2 hiding (main) -- Exp in the final form
import PushNegF hiding (main) -- Push_neg interpreter
import ExtF hiding (main) -- `mul' extension
-- * //
-- But the multiplication is not a homomorphism with respect to negation!
-- * neg (a * b) /= (neg a) * (neg b)
instance MulSYM repr => MulSYM (Ctx -> repr) where
mul e1 e2 Pos = mul (e1 Pos) (e2 Pos)
mul e1 e2 Neg = mul (e1 Pos) (e2 Neg)
-- Let us recall how an extended term looked like
tfm1_view = view tfm1
-- "(7 + (-(1 * 2)))"
tfm1_eval = eval tfm1
-- 5
tfm1_norm = push_neg tfm1
-- The new expression can be evaluated with any interpreter
tfm1_norm_view = view tfm1_norm
-- "(7 + ((-1) * 2))"
-- The result of the standard evaluation (the `meaning') is preserved
tfm1_norm_eval = eval tfm1_norm
-- 5
-- Add an extra negation
tfm1n_norm = push_neg (neg tfm1)
-- see the result
tfm1n_norm_view = view tfm1n_norm
-- "((-7) + (1 * 2))"
tfm1n_norm_eval = eval tfm1n_norm
-- -5
-- Negate the already negated term
tfm1nn_norm = push_neg (neg tfm1n_norm)
tfm1nn_norm_view = view tfm1nn_norm
-- "(7 + ((-1) * 2))"
tfm1nn_norm_eval = eval tfm1nn_norm
-- 5
-- The same for tmf2
-- We can even use a previously defined unextended expression (tf1)
-- as a part of the extended expression.
-- We can indeed mix-and-match
tfm2_view = view tfm2
-- "(7 * (8 + (-(1 + 2))))"
tfm2_eval = eval tfm2
-- 35
tfm2_norm = push_neg tfm2
tfm2_norm_view = view tfm2_norm
-- "(7 * (8 + ((-1) + (-2))))"
tfm2_norm_eval = eval tfm2_norm
-- 35
-- Add an extra negation
tfm2n_norm = push_neg (neg tfm2)
-- see the result
tfm2n_norm_view = view tfm2n_norm
-- "(7 * ((-8) + (1 + 2)))"
tfm2n_norm_eval = eval tfm2n_norm
-- -35
-- Negate the already negated term
tfm2nn_norm = push_neg (neg tfm2n_norm)
tfm2nn_norm_view = view tfm2nn_norm
-- "(7 * (8 + ((-1) + (-2))))"
tfm2nn_norm_eval = eval tfm2nn_norm
-- 35
main = do
print PushNegF.tf1_norm_view -- old terms still work
print PushNegFExt.tfm1_view
print PushNegFExt.tfm1_eval
print tfm1_norm_view
print tfm1_norm_eval
print tfm1n_norm_view
print tfm1n_norm_eval
print tfm1nn_norm_view
print tfm1nn_norm_eval
if tfm1_norm_view == tfm1nn_norm_view then return ()
else error "Double neg"
if PushNegFExt.tfm1_eval == tfm1_norm_eval then return ()
else error "Normalization"
if PushNegFExt.tfm1_eval == - tfm1n_norm_eval then return ()
else error "Normalization"
print PushNegFExt.tfm2_view
print PushNegFExt.tfm2_eval
print tfm2_norm_view
print tfm2_norm_eval
print tfm2n_norm_view
print tfm2n_norm_eval
print tfm2nn_norm_view
print tfm2nn_norm_eval
if tfm2_norm_view == tfm2nn_norm_view then return ()
else error "Double neg"
if PushNegFExt.tfm2_eval == tfm2_norm_eval then return ()
else error "Normalization"
if PushNegFExt.tfm2_eval == - tfm2n_norm_eval then return ()
else error "Normalization"
| egaburov/funstuff | Haskell/tytag/codes/PushNegFExt.hs | apache-2.0 | 3,223 | 12 | 9 | 683 | 631 | 321 | 310 | 61 | 7 |
{-# LANGUAGE ViewPatterns #-}
module Language.K3.Codegen.CPP.MultiIndex where
import Language.K3.Core.Annotation
import Language.K3.Core.Declaration
import Language.K3.Core.Common
import Language.K3.Core.Type
import Language.K3.Codegen.CPP.Primitives (genCType)
import qualified Language.K3.Codegen.CPP.Representation as R
import Language.K3.Codegen.CPP.Types
import Control.Arrow ( (&&&) )
import Data.Functor ((<$>))
import Data.List (isInfixOf, nub)
import Data.Maybe (catMaybes, fromMaybe)
-- Given a list of annotations
-- Return a tuple
-- fst: List of index types to use for specializing K3::MultiIndex
-- snd: List of function definitions to attach as members for this annotation combination (lookup functions)
indexes :: Identifier -> [(Identifier, [AnnMemDecl])] -> CPPGenM ([R.Type], [R.Definition])
indexes name ans = do
let indexed = zip [1..] ans
let flattened = concatMap (\(n, (i, mems)) -> zip (repeat (n,i)) mems) indexed
index_types <- (nub . catMaybes) <$> mapM index_type flattened
--let base_name = R.Specialized ((R.Named $ R.Name "__CONTENT") : index_types) (R.Qualified (R.Name "K3") (R.Name "MultiIndex"))
lookup_defns <- catMaybes <$> mapM lookup_fn flattened
slice_defns <- catMaybes <$> mapM slice_fn flattened
return (index_types, lookup_defns ++ slice_defns)
where
key_field = "name"
elem_type = R.Named $ R.Name "__CONTENT"
elem_r = R.Name "&__CONTENT"
bmi n = R.Qualified (R.Name "boost") (R.Qualified (R.Name "multi_index") n)
get_key_type :: K3 Type -> Maybe (K3 Type)
get_key_type (tag &&& children -> (TFunction, [k, _])) = Just k
get_key_type _ = Nothing
index_type :: ((Integer, Identifier), AnnMemDecl) -> CPPGenM (Maybe R.Type)
index_type ((_,n), decl) =
if "Index" `isInfixOf` n
then extract_type n decl
else return Nothing
-- Build a boost index type e.g. ordered_non_unique
extract_type :: Identifier -> AnnMemDecl -> CPPGenM (Maybe R.Type)
extract_type _ (Lifted _ _ t _ _) = do
let key_t = get_key_type t
let fields = maybe Nothing get_fields key_t
types <- maybe (return Nothing) (\x -> mapM single_field_type x >>= return . Just) fields
let i_t ts =
R.Named $
R.Specialized
[ R.Named $ R.Specialized
(elem_type : ts)
( bmi $ R.Name "composite_key")
]
(bmi $ R.Name "ordered_non_unique")
return $ i_t <$> types
extract_type _ _ = return Nothing
get_fields :: K3 Type -> Maybe [(Identifier, K3 Type)]
get_fields (tag &&& children -> (TRecord ids, ts) ) = Just $ zip ids ts
get_fields _ = Nothing
single_field_type :: (Identifier, K3 Type) -> CPPGenM R.Type
single_field_type (n, t) = do
cType <- genCType t
return $
R.Named $ R.Specialized
[ elem_type
, cType
, R.Named $ R.Qualified elem_r (R.Name n)
]
( bmi $ R.Name "member")
tuple :: R.Name -> K3 Type -> R.Expression
tuple n t =
let fields = fromMaybe (error "not a record") (get_fields t)
ids = map fst fields
projs = map (R.Project (R.Variable n) . R.Name) ids
in R.Call (R.Variable $ R.Qualified (R.Name "boost") (R.Name "make_tuple")) projs
-- Build a lookup function, wrapping boost 'find'
lookup_fn :: ((Integer, Identifier), AnnMemDecl) -> CPPGenM (Maybe R.Definition)
lookup_fn ((i,_) ,Lifted _ fname t _ _ ) = do
let key_t = get_key_type t
let this = R.Dereference $ R.Variable $ R.Name "this"
let container = R.Call
(R.Project this (R.Name "getConstContainer") )
[]
let index = R.Call
(R.Variable $ (R.Specialized [R.Named $ R.Name $ show i] (R.Name "get")))
[container]
let look k_t = R.Call
(R.Project this (R.Name "lookup_with_index") )
[index, tuple (R.Name "key") k_t]
let defn k_t c_t = R.FunctionDefn
(R.Name fname)
[("key", c_t)]
(Just $ R.Named $ R.Specialized [R.Named $ R.Name "__CONTENT"] (R.Name "shared_ptr"))
[]
False
[R.Return $ look k_t]
cType <- maybe (return Nothing) (\x -> genCType x >>= return . Just) key_t
let result = key_t >>= \k_t -> cType >>= \c_t -> Just $ defn k_t c_t
return $ if "lookup" `isInfixOf` fname then result else Nothing
lookup_fn _ = return Nothing
slice_fn :: ((Integer, Identifier), AnnMemDecl) -> CPPGenM (Maybe R.Definition)
slice_fn ((i,_),Lifted _ fname t _ _ ) = do
let key_t = get_key_type t
let this = R.Dereference $ R.Variable $ R.Name "this"
let container = R.Call
(R.Project this (R.Name "getConstContainer") )
[]
let index = R.Call
((R.Variable $ R.Specialized [R.Named $ R.Name $ show i] (R.Name "get")))
[container]
let slice k_t = R.Call
(R.Project this (R.Name "slice_with_index") )
[index, tuple (R.Name "a") k_t, tuple (R.Name "b") k_t]
let defn k_t c_t = R.FunctionDefn
(R.Name fname)
[("a", c_t), ("b", c_t)]
(Just $ R.Named $ R.Specialized [R.Named $ R.Name "__CONTENT"] (R.Name name))
[]
False
[R.Return $ slice k_t]
cType <- maybe (return Nothing) (\x -> genCType x >>= return . Just) key_t
let result = key_t >>= \k_t -> cType >>= \c_t -> Just $ defn k_t c_t
return $ if "slice" `isInfixOf` fname then result else Nothing
slice_fn _ = return Nothing
| yliu120/K3 | src/Language/K3/Codegen/CPP/MultiIndex.hs | apache-2.0 | 5,971 | 0 | 20 | 1,876 | 2,021 | 1,045 | 976 | 113 | 9 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Network.Wai.Middleware.RequestLogger (logStdout, logStdoutDev)
import qualified Database.Persist.Store
import Database.Persist.GenericSql (runMigration)
import Network.HTTP.Conduit (newManager, def)
import Data.Conduit.Pool
import qualified Text.HyperEstraier.Database as Hs
import Control.Exception
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
import Handler.View
import Handler.Edit
import Handler.Add
import Handler.List
import Handler.Markdown
import Handler.Search
import Handler.NewLink
import Handler.ListLinks
import Handler.EditLink
import Handler.ViewLink
import Handler.LinkInfo
import Handler.CreateJournal
import Handler.AddJournalItem
import Handler.JournalEdit
import Handler.ListJournals
import Handler.WriteJournal
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see
-- the comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO Application
makeApplication conf = do
foundation <- makeFoundation conf
app <- toWaiAppPlain foundation
return $ logWare app
where
logWare = if development then logStdoutDev
else logStdout
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
index <- createPool
(dbOrThrow "casket" (Hs.Writer [Hs.Create []]))
Hs.closeDatabase
1
5
1
s <- staticSite
dbconf <- withYamlEnvironment "config/postgresql.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
Database.Persist.Store.runPool dbconf (runMigration migrateAll) p
return $ App conf s p index manager dbconf
where
dbOrThrow path iomode = do
conn <- Hs.openDatabase path iomode
case conn of
Right db -> return db
Left err -> throw err
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader makeApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
| MasseR/introitu | Application.hs | bsd-2-clause | 2,888 | 0 | 15 | 554 | 572 | 312 | 260 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
{-
Idea shortly: client initiates the connection by calling msgHandler-url
and then server starts waiting for files: first comes short text and file size,
then file name and as a last thing, the file. When the file is received, the
file is written into the downloads-directory.
TBD / questions:
- see msgReadLoop
-}
------------------------------------------------------------------------------
import Control.Concurrent (forkIO, MVar, newMVar, modifyMVar_,
modifyMVar, readMVar)
import qualified Control.Exception as CE
import Control.Exception (try)
import Control.Monad (forever, unless)
import Control.Monad.Except
import Control.Monad.Trans.Either
import Control.Lens
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Monoid ((<>))
import Data.Time
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Read as T
import qualified Network.WebSockets as WS
import qualified Network.WebSockets.Snap as WS
import System.Directory
import System.Exit
import Snap
import Snap.Snaplet.Session
import Snap.Snaplet.Session.Backends.CookieSession
import Snap.Util.FileServe
------------------------------------------------------------------------------
-- See
-- https://github.com/jaspervdj/websockets-snap/blob/master/example/server.hs
-- and
-- https://github.com/jaspervdj/websockets/tree/master/example
newWSServerState :: WSServerState
newWSServerState = []
clientExists :: Client -> WSServerState -> Bool
clientExists client = any ((== fst client) . fst)
addClient :: Client -> WSServerState -> WSServerState
addClient client clients = client : clients
removeClient :: Client -> WSServerState -> WSServerState
removeClient client = filter ((/= fst client) . fst)
type Client = (Text, WS.Connection)
type WSServerState = [Client]
type MVWSSS = MVar WSServerState
------------------------------------------------------------------------------
type AppHandler = Handler App App
data App = App
{ -- _heist :: Snaplet (Heist App)
_sess :: Snaplet SessionManager
-- , _auth :: Snaplet (AuthManager App)
, _wsss :: MVWSSS
}
makeLenses ''App
-- instance HasHeist App where heistLens = subSnaplet heist
------------------------------------------------------------------------------
-- Source: a stackoverflow q about exceptions and its answer.
data MsgError = MsgError {meState :: String,
meErrorMsg :: String}
deriving (Eq, Show, Read) -- , Typeable)
instance CE.Exception MsgError
tryIO :: (MonadError e m, MonadIO m, CE.Exception e) => IO a -> m a
tryIO = (>>= either throwError return) . liftIO . try
anyException :: EitherT CE.SomeException m a -> EitherT CE.SomeException m a
anyException = id
message :: (Show e, Functor m) => EitherT e m a -> EitherT String m a
message = bimapEitherT show id
connectionError :: EitherT WS.ConnectionException m a -> EitherT WS.ConnectionException m a
connectionError = id
handshakeError :: EitherT WS.HandshakeException m a -> EitherT WS.HandshakeException m a
handshakeError = id
msgError :: EitherT MsgError m a -> EitherT MsgError m a
msgError = id
------------------------------------------------------------------------------
msgHandler :: AppHandler ()
msgHandler = do
-- usr <- with auth currentUser
sapp <- getSnapletState
let vapp = view snapletValue sapp
usr = "User name" :: Text
WS.runWebSocketsSnap $ msgHandlerApp vapp usr
msgHandlerApp :: (MonadIO m) => App -> Text -> WS.PendingConnection -> m ()
msgHandlerApp sApp usr pending = do
liftIO $ T.putStrLn $ "msgHandlerApp, 1. line, usr=" <> usr
-- let requesthead = WS.pendingRequest pending
conn <- liftIO $ WS.acceptRequest pending
liftIO $ WS.forkPingThread conn 30 -- Ping every 30 secs to keep connection alive.
liftIO $ T.putStrLn "msgHandlerApp, after acceptRequest and forkPingThread"
{- let client = (usr,conn) -}
{- clients <- liftIO $ readMVar mvwsss -}
{- unless (clientExists client clients) $ do -}
{- liftIO $ modifyMVar_ mvwsss (return . addClient client) -}
{- return () -}
{- liftIO $ T.putStrLn "msgHandlerApp, after modifyMVar_, next msgReadLoop" -}
msgReadLoop conn
{- where -}
{- mvwsss = view wsss sApp -}
------------------------------------------------------------------------------
-- This receives messages from the client.
-- We read file size, name and then contents.
-- When uploading has no problems, this can receive several files that can be
-- large. Files are stored in "downloads" subdirectory relative to the
-- directory the server is running (remember to make it).
-- How to do questions / TBD:
-- - Exception/error handling with regards to reveiveData, this is not ok
-- below.
-- - If we decide that file is too large, how to tell it to the client and
-- abort the whole uploading of the file?
-- - And in case of hangling large files, should we split/splice the file
-- to thunks of known size?
-- - Should we check the filename before using it? (probably yes -> how?)
msgReadLoop :: (MonadIO m) => WS.Connection -> m ()
msgReadLoop conn = forever $ do
liftIO $ T.putStrLn "msgReadLoop, first line in forever loop"
-- res <- runEitherT (message . connectionError . tryIO $ liftIO (WS.receiveData conn :: IO ByteString))
res <- runEitherT (message . connectionError . tryIO
$ liftIO (WS.receiveData conn :: IO Text))
t <- liftIO getCurrentTime
liftIO $ T.putStrLn
$ "msgReadLoop, after receiveData, at " <> (T.pack . show) t
let msg = case res of
Left _ -> "connectionError (first receiveData)"
Right r -> r
liftIO $ T.putStrLn $ "msgReadLoop, the first message is " <> msg
when (msg == "connectionError (first receiveData)")
$ liftIO (die "msgReadLoop problem1")
when (prefix `T.isPrefixOf` msg) $ do
let efsz = (T.decimal . T.drop (T.length prefix)) msg
:: Either String (Int, Text)
(fsz,_rtxt) = case efsz of
Left _ -> (0, T.empty)
Right i -> i
res2 <- runEitherT (message . connectionError . tryIO
$ liftIO (WS.receiveData conn :: IO Text))
let fname = case res2 of
Left _ -> "connectionError (second receiveData)"
Right r -> r
liftIO $ T.putStrLn $ "msgReadLoop, the file name is "
<> fname <> " and its size is " <> (T.pack . show) fsz <> "."
when (msg == "connectionError (second receiveData)")
$ liftIO (die "msgReadLoop problem2")
res3 <- runEitherT (message . connectionError . tryIO
$ liftIO (WS.receiveData conn :: IO ByteString))
ok <-
liftIO $ case res3 of
Left _ -> return "connectionError (third receiveData)"
Right fileb -> do
liftIO $ T.putStrLn "msgReadLoop, got the file"
fnames <- getDirectoryContents d
liftIO $ T.putStrLn $ "Files in directory are: "
<> (T.pack . show) fnames
if T.unpack fname `elem` fnames
then return $ "File " <> fname <> " already exists"
else do -- Write the file with given name into the directory.
let fn = T.pack d <> fname
liftIO $ T.putStrLn $ "writing file " <> (T.pack . show ) fn
BS.writeFile (T.unpack fn) fileb
return $ "just wrote the file " <> fname
liftIO $ T.putStrLn $ "msgReadLoop, ok = " <> ok
when (msg == "connectionError (third receiveData)")
$ liftIO (die "msgReadLoop problem3")
where
prefix = "File coming:"
d = "./downloads/" :: FilePath
------------------------------------------------------------------------------
hmm :: AppHandler ()
hmm = liftIO $ T.putStrLn "hmm"
-- writeBS "Hello "
------------------------------------------------------------------------------
-- | The application's routes. From template, stripped down.
routes :: [(ByteString, AppHandler ())]
{- routes = [ ("/", writeText "hello") -}
routes = [ -- ("login", with auth handleLoginSubmit)
-- , ("logout", with auth handleLogout)
-- , ("new_user", with auth handleNewUser)
-- , ("loginInits", loginInits)
("/", serveDirectory "static" >> hmm)
, ("msgHandler", msgHandler)
]
------------------------------------------------------------------------------
-- | The application initializer. From template, stripped down.
app :: SnapletInit App App
app = makeSnaplet "app2" "An snaplet example application." Nothing $ do
-- h <- nestSnaplet "" heist $ heistInit "templates"
s <- nestSnaplet "" sess $
initCookieSessionManager "site_key.txt" "_cookie" Nothing (Just 3600)
-- a <- nestSnaplet "auth" auth $ initPostgresAuth sess d
w <- liftIO $ newMVar newWSServerState
--
addRoutes routes
return $ App s w
------------------------------------------------------------------------------
-- | Main with defaul values (e.g. logging).
--
main :: IO ()
main =
serveSnapletNoArgParsing defaultConfig app
-- httpServe defaultConfig app
-- _ <- try $ httpServe defaultConfig app :: IO (Either SomeException ())
------------------------------------------------------------------------------
-- | Render login form
{- handleLogin :: Maybe T.Text -> Handler App (AuthManager App) () -}
{- handleLogin authError = heistLocal (I.bindSplices errs) $ render "login" -}
{- where -}
{- errs = maybe mempty splice authError -}
{- splice err = "loginError" ## I.textSplice err -}
------------------------------------------------------------------------------
-- | Handle login submit
{- handleLoginSubmit :: Handler App (AuthManager App) () -}
{- handleLoginSubmit = -}
{- loginUser "login" "password" Nothing -}
{- (\_ -> handleLogin err) -}
{- (redirect "/loginInits") -}
{- -- (redirect "/koe") -}
{- -- (redirect "/dist") -}
{- where -}
{- err = Just "Unknown user or password" -}
------------------------------------------------------------------------------
-- | Logs out and redirects the user to the site index.
{- handleLogout :: Handler App (AuthManager App) () -}
{- handleLogout = logout >> redirect "/" -}
------------------------------------------------------------------------------
-- | Handle new user form submit
{- handleNewUser :: Handler App (AuthManager App) () -}
{- handleNewUser = method GET handleForm <|> method POST handleFormSubmit -}
{- where -}
{- handleForm = render "new_user" -}
{- handleFormSubmit = registerUser "login" "password" >> redirect "/dist" -}
------------------------------------------------------------------------------
-- loginInits :: Handler App App ()
{- loginInits :: AppHandler () -}
{- loginInits = do -}
{- withKatip $ logFM InfoS "loginInits, Calling msgHandler" -}
{- redirect "/dist" -}
{- msgHandler -}
| gspia/half-baked | hb3-filesending/hssrc/Main.hs | bsd-2-clause | 11,400 | 0 | 26 | 2,546 | 1,871 | 1,013 | 858 | 141 | 6 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module HackageGit where
import Control.Monad
import Control.Lens hiding ( (<.>) )
import Data.Aeson
import Data.ByteString.Char8 ( ByteString )
import qualified Data.ByteString.Char8 as BS
import Data.ByteString.Lazy.Char8 ( fromStrict )
import Data.Digest.Pure.SHA ( sha256, showDigest )
import Data.Map as Map
import Data.Set as Set
import Data.String
import Data.String.UTF8 ( toString, fromRep )
import Distribution.Nixpkgs.Haskell.OrphanInstances ( )
import Distribution.Package
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse ( parsePackageDescription, ParseResult(..) )
import Distribution.Text
import Distribution.Version
import System.Directory
import System.FilePath
type Hackage = Map PackageName (Set Version)
readHackage :: FilePath -> IO Hackage
readHackage path = getSubDirs path >>= foldM discoverPackageVersions mempty
where
discoverPackageVersions :: Hackage -> String -> IO Hackage
discoverPackageVersions db pkg = do
vs <- getSubDirs (path </> pkg)
return (Map.insert (PackageName pkg) (Set.fromList (Prelude.map fromString vs)) db)
getSubDirs :: FilePath -> IO [FilePath]
getSubDirs path = do
let isDirectory p = doesDirectoryExist (path </> p)
getDirectoryContents path >>= filterM isDirectory . Prelude.filter (\x -> head x /= '.')
decodeUTF8 :: ByteString -> String
decodeUTF8 = toString . fromRep
type SHA256Hash = String
readPackage :: FilePath -> PackageIdentifier -> IO (GenericPackageDescription, SHA256Hash)
readPackage dirPrefix (PackageIdentifier name version) = do
let cabalFile = dirPrefix </> unPackageName name </> display version </> unPackageName name <.> "cabal"
buf <- BS.readFile cabalFile
cabal <- case parsePackageDescription (decodeUTF8 buf) of
ParseOk _ a -> return a
ParseFailed err -> fail (cabalFile ++ ": " ++ show err)
return (cabal, mkSHA256 buf)
mkSHA256 :: ByteString -> SHA256Hash
mkSHA256 = showDigest . sha256 . fromStrict
declareLenses [d|
data Meta = Meta { hashes :: Map String String
, locations :: [String]
, pkgsize :: Int
}
deriving (Show)
|]
instance FromJSON Meta where
parseJSON (Object v) = Meta
<$> v .: "package-hashes"
<*> v .: "package-locations"
<*> v .: "package-size"
parseJSON o = fail ("invalid Cabal metadata: " ++ show o)
readPackageMeta :: FilePath -> PackageIdentifier -> IO Meta
readPackageMeta dirPrefix (PackageIdentifier name version) = do
let metaFile = dirPrefix </> unPackageName name </> display version </> unPackageName name <.> "json"
buf <- BS.readFile metaFile
case eitherDecodeStrict buf of
Left msg -> fail (metaFile ++ ": " ++ msg)
Right x -> return x
| bennofs/cabal2nix | hackage2nix/HackageGit.hs | bsd-3-clause | 2,893 | 0 | 16 | 601 | 794 | 413 | 381 | 64 | 2 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Language.Haskell.Liquid.Constraint.Axioms (
expandProofs
-- * Combining proofs
, makeCombineType
, makeCombineVar
) where
import Prelude hiding (error)
import Literal
import Coercion
import DataCon
import CoreSyn
import Type
import TyCon
import TypeRep
import Var
import Name
import NameSet
import Text.PrettyPrint.HughesPJ hiding (first, sep)
import Control.Monad.State
import qualified Data.List as L
import qualified Data.HashMap.Strict as M
import Data.Maybe (fromJust)
import Language.Fixpoint.Types.Names
import Language.Fixpoint.Utils.Files
import qualified Language.Fixpoint.Types as F
import Language.Haskell.Liquid.UX.Tidy (panicError)
import Language.Haskell.Liquid.Types.Visitors (freeVars)
import Language.Haskell.Liquid.Types hiding (binds, Loc, loc, freeTyVars, Def, HAxiom)
import qualified Language.Haskell.Liquid.Types as T
import Language.Haskell.Liquid.WiredIn
import Language.Haskell.Liquid.Types.RefType
import Language.Haskell.Liquid.Types.Visitors hiding (freeVars)
import Language.Haskell.Liquid.GHC.Misc
import Language.Haskell.Liquid.GHC.SpanStack (showSpan)
import Language.Fixpoint.Misc hiding (errorstar)
import Language.Haskell.Liquid.Constraint.ProofToCore
import Language.Haskell.Liquid.Transforms.CoreToLogic
import Language.Haskell.Liquid.Constraint.Types
import System.IO.Unsafe
import Prover.Types (Axiom(..), Query(..))
import qualified Prover.Types as P
import Prover.Solve (solve)
import qualified Data.HashSet as S
class Provable a where
expandProofs :: GhcInfo -> [(F.Symbol, SpecType)] -> a -> CG a
expandProofs info sigs x =
do (x, s) <- runState (expProofs x) <$> initAEEnv info sigs
modify $ \st -> st {freshIndex = ae_index s}
return x
expProofs :: a -> Pr a
expProofs = return
instance Provable CoreBind where
-- expProofs (NonRec x e) | returnsProof x = (\e -> Rec [(traceShow ("\n\nMake it Rec\n\n" ++ show (F.symbol x)) x,e)]) <$> (addRec (x,e) >> expProofs e)
expProofs (NonRec x e) =
do e' <- addRec (x,e) >> expProofs e
if x `elem` freeVars S.empty e'
then return $ Rec [(x, e')]
else return $ NonRec x e'
expProofs (Rec xes) = Rec <$> (addRecs xes >> mapSndM expProofs xes)
instance Provable CoreExpr where
expProofs ee@(App (App (Tick _ (Var f)) i) e) | isAuto f = grapInt i >>= expandAutoProof ee e
expProofs ee@(App (App (Var f) i) e) | isAuto f = grapInt i >>= expandAutoProof ee e
expProofs ee@(App (Tick _ (App (Tick _ (Var f)) i)) e) | isAuto f = grapInt i >>= expandAutoProof ee e
expProofs ee@(App (Tick _ (App (Var f) i)) e) | isAuto f = grapInt i >>= expandAutoProof ee e
expProofs ee@(App (App (Tick _ (Var f)) i) e) | isCases f = grapInt i >>= expandCasesProof ee e
expProofs ee@(App (App (Var f) i) e) | isCases f = grapInt i >>= expandCasesProof ee e
expProofs ee@(App (Tick _ (App (Tick _ (Var f)) i)) e) | isCases f = grapInt i >>= expandCasesProof ee e
expProofs ee@(App (Tick _ (App (Var f) i)) e) | isCases f = grapInt i >>= expandCasesProof ee e
expProofs (App e1 e2) = liftM2 App (expProofs e1) (expProofs e2)
expProofs (Lam x e) = addVar x >> liftM (Lam x) (expProofs e)
expProofs (Let b e) = do b' <- expProofs b
addBind b'
liftM (Let b') (expProofs e)
expProofs (Case e v t alts) = liftM2 (\e -> Case e v t) (expProofs e) (mapM (expProofsCase e) alts)
expProofs (Cast e c) = liftM (`Cast` c) (expProofs e)
expProofs (Tick t e) = liftM (Tick t) (expProofs e)
expProofs (Var v) = return $ Var v
expProofs (Lit l) = return $ Lit l
expProofs (Type t) = return $ Type t
expProofs (Coercion c) = return $ Coercion c
expProofsCase :: CoreExpr -> CoreAlt -> Pr CoreAlt
expProofsCase (Var x) (DataAlt c, xs, e)
= do addVars xs
t <- L.lookup (symbol c) . ae_sigs <$> get
addAssert $ makeRefinement t (x:xs)
res <- liftM (DataAlt c,xs,) (expProofs e)
rmAssert
return res
expProofsCase _ (c, xs, e)
= addVars xs >> liftM (c,xs,) (expProofs e)
instance Provable CoreAlt where
expProofs (c, xs, e) = addVars xs >> liftM (c,xs,) (expProofs e)
expandCasesProof :: CoreExpr -> CoreExpr -> Integer -> Pr CoreExpr
expandCasesProof inite e it
= do vs <- reverse . ae_vars <$> get
case L.find (isAlgType . varType) vs of
Nothing -> return inite
Just v -> makeCases v inite e it
makeDataCons v = data_cons $ algTyConRhs tc
where
t = varType v
tc = fst $ splitTyConApp t
makeCases v inite e it = Case (Var v) v (varType v) <$> (mapM go $ makeDataCons v)
where
go c = do xs <- makeDataConArgs v c
addVars xs
t <- L.lookup (symbol c) . ae_sigs <$> get
addAssert $ makeRefinement t (v:xs)
proof <- expandAutoProof inite (e) it
rmAssert
return (DataAlt c, xs, proof)
makeDataConArgs v dc = mapM freshVar ts
where
ts = dataConInstOrigArgTys dc ats
ats = snd $ splitTyConApp $ varType v
expandAutoProof :: CoreExpr -> CoreExpr -> Integer -> Pr CoreExpr
expandAutoProof inite e it
= do ams <- ae_axioms <$> get
vs' <- ae_vars <$> get
cts <- ae_consts <$> get
ds <- ae_assert <$> get
cmb <- ae_cmb <$> get
lmap <- ae_lmap <$> get
isHO <- ae_isHO <$> get
e' <- unANFExpr e
foldM (\lm x -> (updateLMap lm (dummyLoc $ F.symbol x) x >> (ae_lmap <$> get))) lmap vs'
let (vs, vlits) = L.partition (`elem` readVars e') $ nub' vs'
let allvs = nub' ((fst . aname <$> ams) ++ cts ++ vs')
let (cts', vcts) = L.partition (isFunctionType . varType) allvs
let usedVs = nub' (vs++vcts)
env <- makeEnvironment ((L.\\) allvs usedVs) ((L.\\) vlits usedVs)
ctors <- mapM makeCtor cts'
pvs <- mapM makeVar usedVs
le <- makeGoalPredicate e'
fn <- freshFilePath
axioms <- makeAxioms
let sol = unsafePerformIO (solve $ makeQuery fn it isHO le axioms ctors ds env pvs)
return $ {-
traceShow (
"\n\nTo prove\n" ++ show (showpp le) ++
"\n\nWe need \n" ++ show sol ++
"\n\nExpr = \n" ++ show (toCore cmb inite sol) ++
"\n\n"
) $ -}
traceShow "\nexpandedExpr\n" $ toCore cmb inite sol
nub' = L.nubBy (\v1 v2 -> F.symbol v1 == F.symbol v2)
-- TODO: merge this with the Bare.Axiom.hs
updateLMap :: LogicMap -> LocSymbol -> Var -> Pr ()
updateLMap _ _ v | not (isFun $ varType v)
= return ()
where
isFun (FunTy _ _) = True
isFun (ForAllTy _ t) = isFun t
isFun _ = False
updateLMap _ x vv
= insertLogicEnv x' ys (F.eApps (F.EVar $ val x) (F.EVar <$> ys))
where
nargs = dropWhile isClassType $ ty_args $ toRTypeRep $ ((ofType $ varType vv) :: RRType ())
ys = zipWith (\i _ -> symbol (("x" ++ show i) :: String)) [1..] nargs
x' = simpleSymbolVar vv
insertLogicEnv x ys e
= modify $ \be -> be {ae_lmap = (ae_lmap be) {logic_map = M.insert x (LMap x ys e) $ logic_map $ ae_lmap be}}
simpleSymbolVar x = dropModuleNames $ symbol $ showPpr $ getName x
-------------------------------------------------------------------------------
---------------- From Haskell to Prover ------------------------------------
-------------------------------------------------------------------------------
makeEnvironment :: [Var] -> [Var] -> Pr [P.LVar]
makeEnvironment avs vs
= do lits <- ae_lits <$> get
let lts' = filter (\(x,_) -> not (x `elem` (F.symbol <$> avs))) (normalize lits)
let lts1 = [P.Var x s () | (x, s) <- lts']
lts2 <- mapM makeLVar vs
return (lts1 ++ lts2)
makeQuery :: FilePath -> Integer -> Bool -> F.Expr -> [HAxiom] -> [HVarCtor] -> [F.Expr] -> [P.LVar] -> [HVar] -> HQuery
makeQuery fn i isHO p axioms cts ds env vs
= Query { q_depth = fromInteger i
, q_goal = P.Pred p
, q_vars = checkVar <$> vs -- local variables
, q_ctors = cts -- constructors: globals with function type
, q_env = checkEnv <$> env -- environment: anything else that can appear in the logic
, q_fname = fn
, q_axioms = axioms
, q_decls = (P.Pred <$> ds)
, q_isHO = isHO
}
checkEnv pv@(P.Var x s _)
| isBaseSort s = pv
| otherwise = panic Nothing ("\nEnv:\nNon Basic " ++ show x ++ " :: " ++ show s)
checkVar pv@(P.Var x s _)
| isBaseSort s = pv
| otherwise = panic Nothing ("\nVar:\nNon Basic " ++ show x ++ " :: " ++ show s)
makeAxioms =
do recs <- ae_recs <$> get
tce <- ae_emb <$> get
sigs <- ae_sigs <$> get
gs <- ae_globals <$> get
let (rgs, gs') = L.partition (`elem` (fst <$> recs)) $ filter returnsProof gs
let as1 = varToPAxiom tce sigs <$> gs'
let as2 = varToPAxiomWithGuard tce sigs recs <$> rgs
return (as1 ++ as2)
unANFExpr e = (foldl (flip Let) e . ae_binds) <$> get
makeGoalPredicate e =
do lm <- ae_lmap <$> get
tce <- ae_emb <$> get
case runToLogic tce lm (ErrOther (showSpan "makeGoalPredicate") . text) (coreToPred e) of
Left p -> return p
Right err -> panicError err
makeRefinement :: Maybe SpecType -> [Var] -> F.Expr
makeRefinement Nothing _ = F.PTrue
makeRefinement (Just t) xs = rr
where trep = toRTypeRep t
ys = [x | (x, t') <- zip (ty_binds trep) (ty_args trep), not (isClassType t')]
rr = case stripRTypeBase $ ty_res trep of
Nothing -> F.PTrue
Just ref -> let F.Reft(v, r) = F.toReft ref
su = F.mkSubst $ zip (v:ys) (F.EVar . F.symbol <$> xs)
in F.subst su r
makeCtor :: Var -> Pr HVarCtor
makeCtor c
= do tce <- ae_emb <$> get
sigs <- ae_sigs <$> get
lmap <- ae_lmap <$> get
lvs <- ae_vars <$> get
return $ makeCtor' tce lmap sigs (c `elem` lvs) c
makeCtor' :: F.TCEmb TyCon -> LogicMap -> [(F.Symbol, SpecType)] -> Bool -> Var -> HVarCtor
makeCtor' tce _ _ islocal v | islocal
= P.VarCtor (P.Var (F.symbol v) (typeSort tce $ varType v) v) [] (P.Pred F.PTrue)
makeCtor' tce lmap sigs _ v
= case M.lookup v (axiom_map lmap) of
Nothing -> P.VarCtor (P.Var (F.symbol v) (typeSort tce $ varType v) v) vs r
Just x -> P.VarCtor (P.Var x (typeSort tce $ varType v) v) [] (P.Pred F.PTrue)
where
x = F.symbol v
(vs, r) = case L.lookup x sigs of
Nothing -> ([], P.Pred F.PTrue)
Just t -> let trep = toRTypeRep t
in case stripRTypeBase $ ty_res trep of
Nothing -> ([], P.Pred F.PTrue)
Just r -> let (F.Reft(v, p)) = F.toReft r
xts = [(x,t) | (x, t) <- zip (ty_binds trep) (ty_args trep), not $ isClassType t]
e = F.mkEApp (dummyLoc x) (F.EVar . fst <$> xts)
in ([P.Var x (rTypeSort tce t) () | (x, t) <- xts], P.Pred $ F.subst1 p (v, e))
makeVar :: Var -> Pr HVar
makeVar v = do {tce <- ae_emb <$> get; return $ makeVar' tce v}
makeVar' tce v = P.Var (F.symbol v) (typeSort tce $ varType v) v
makeLVar :: Var -> Pr P.LVar
makeLVar v = do {tce <- ae_emb <$> get; return $ makeLVar' tce v}
makeLVar' tce v = P.Var (F.symbol v) (typeSort tce $ varType v) ()
varToPAxiomWithGuard :: F.TCEmb TyCon -> [(Symbol, SpecType)] -> [(Var, [Var])] -> Var -> HAxiom
varToPAxiomWithGuard tce sigs recs v
= P.Axiom { axiom_name = makeVar' tce v
, axiom_vars = vs
, axiom_body = P.Pred $ F.PImp q bd
}
where
q = makeGuard $ zip (symbol <$> args) xts
args = fromJust $ L.lookup v recs
x = F.symbol v
(vs, xts, bd) = case L.lookup x sigs of
Nothing -> panic Nothing ("haxiomToPAxiom: " ++ show x ++ " not found")
Just t -> let trep = toRTypeRep t
bd' = case stripRTypeBase $ ty_res trep of
Nothing -> F.PTrue
Just r -> let (F.Reft(_, p)) = F.toReft r in p
xts = filter (not . isClassType . snd) $ zip (ty_binds trep) (ty_args trep)
vs' = [P.Var x (rTypeSort tce t) () | (x, t) <- xts]
in (vs', xts, bd')
makeGuard :: [(F.Symbol, (F.Symbol, SpecType))] -> F.Expr
makeGuard xs = F.POr $ go [] xs
where
go _ []
= []
go acc ((x, (x', RApp c _ _ _)):xxs)
| Just f <- sizeFunction $ rtc_info c
= (F.PAnd (F.PAtom F.Lt (f x') (f x):acc)) : go (F.PAtom F.Le (f x') (f x):acc) xxs
go acc (_:xxs)
= go acc xxs
varToPAxiom :: F.TCEmb TyCon -> [(Symbol, SpecType)] -> Var -> HAxiom
varToPAxiom tce sigs v
= P.Axiom { axiom_name = makeVar' tce v
, axiom_vars = vs
, axiom_body = P.Pred bd
}
where
x = F.symbol v
(vs, bd) = case L.lookup x sigs of
Nothing -> panic Nothing ("haxiomToPAxiom: " ++ show x ++ " not found")
Just t -> let trep = toRTypeRep t
bd' = case stripRTypeBase $ ty_res trep of
Nothing -> F.PTrue
Just r -> let (F.Reft(_, p)) = F.toReft r in p
vs' = [P.Var x (rTypeSort tce t) () | (x, t) <- zip (ty_binds trep) (ty_args trep), not $ isClassType t]
in (vs', bd')
-------------------------------------------------------------------------------
------------- Proof State Environment ----------------------------------------
-------------------------------------------------------------------------------
type Pr = State AEnv
data AEnv = AE { ae_axioms :: [T.HAxiom] -- axiomatized functions
, ae_binds :: [CoreBind] -- local bindings, tracked st they are expanded in logic
, ae_lmap :: LogicMap -- logical mapping
, ae_consts :: [Var] -- Data constructors and imported variables
, ae_globals :: [Var] -- Global definitions, like axioms
, ae_vars :: [Var] -- local variables in scope
, ae_emb :: F.TCEmb TyCon -- type constructor information
, ae_lits :: [(Symbol, F.Sort)] -- literals
, ae_index :: Integer -- unique integer
, ae_sigs :: [(Symbol, SpecType)] -- Refined type signatures
, ae_target :: FilePath -- file name of target source coude
, ae_recs :: [(Var, [Var])] -- axioms that are used recursively:
-- these axioms are guarded to used only with "smaller" arguments
, ae_assert :: [F.Expr] --
, ae_cmb :: CoreExpr -> CoreExpr -> CoreExpr -- how to combine proofs
, ae_isHO :: Bool -- allow higher order binders
}
initAEEnv info sigs
= do tce <- tyConEmbed <$> get
lts <- lits <$> get
i <- freshIndex <$> get
modify $ \s -> s{freshIndex = i + 1}
return $ AE { ae_axioms = axioms spc
, ae_binds = []
, ae_lmap = logicMap spc
, ae_consts = L.nub vs
, ae_globals = L.nub tp
, ae_vars = []
, ae_emb = tce
, ae_lits = wiredSortedSyms ++ lts
, ae_index = i
, ae_sigs = sigs
, ae_target = target info
, ae_recs = []
, ae_assert = []
, ae_cmb = \x y -> (App (App (Var by) x) y)
, ae_isHO = higherorder $ config spc
}
where
spc = spec info
vs = filter validVar (snd <$> freeSyms spc)
tp = filter validExp (defVars info)
isExported = flip elemNameSet (exports $ spec info) . getName
validVar = not . canIgnore
validExp x = validVar x && isExported x
by = makeCombineVar $ makeCombineType τProof
τProof = proofType $ spec info
addBind b = modify $ \ae -> ae{ae_binds = b:ae_binds ae}
addAssert p = modify $ \ae -> ae{ae_assert = p:ae_assert ae}
rmAssert = modify $ \ae -> ae{ae_assert = tail $ ae_assert ae}
addRec (x,e) = modify $ \ae -> ae{ae_recs = (x, grapArgs e):ae_recs ae}
addRecs xes = modify $ \ae -> ae{ae_recs = [(x, grapArgs e) | (x, e) <- xes] ++ ae_recs ae}
addVar x | canIgnore x = return ()
| otherwise = modify $ \ae -> ae{ae_vars = x:ae_vars ae}
addVars x = modify $ \ae -> ae{ae_vars = x' ++ ae_vars ae}
where
x' = filter (not . canIgnore) x
getUniq :: Pr Integer
getUniq
= do modify (\s -> s{ae_index = 1 + (ae_index s)})
ae_index <$> get
freshVar :: Type -> Pr Var
freshVar t =
do n <- getUniq
return $ stringVar ("x" ++ show n) t
freshFilePath :: Pr FilePath
freshFilePath =
do fn <- ae_target <$> get
n <- getUniq
return $ (extFileName (Auto $ fromInteger n) fn)
-------------------------------------------------------------------------------
-------------- Playing with Fixpoint ----------------------------------------
-------------------------------------------------------------------------------
isBaseSort _ = True
-------------------------------------------------------------------------------
-------------- Playing with GHC Core ----------------------------------------
-------------------------------------------------------------------------------
-- hasBaseType = isBaseTy . varType
isFunctionType (FunTy _ _) = True
isFunctionType (ForAllTy _ t) = isFunctionType t
isFunctionType _ = False
resultType (ForAllTy _ t) = resultType t
resultType (FunTy _ t) = resultType t
resultType t = t
grapArgs (Lam x e) | isTyVar x = grapArgs e
grapArgs (Lam x e) | isClassPred $ varType x = grapArgs e
grapArgs (Lam x e) = x : grapArgs e
grapArgs (Let _ e) = grapArgs e
grapArgs _ = []
grapInt (Var v)
= do bs <- ae_binds <$> get
let (e:_) = [ex | NonRec x ex <- bs, x == v]
return $ go e
where
go (Tick _ e) = go e
go (App _ l) = go l
go (Lit l) = litToInt l
go e = panic Nothing $ ("grapInt called with wrong argument " ++ showPpr e)
litToInt (MachInt i) = i
litToInt (MachInt64 i) = i
litToInt _ = panic Nothing "litToInt: non integer literal"
grapInt (Tick _ e) = grapInt e
grapInt _ = return 2
-------------------------------------------------------------------------------
-------------------- Combine Proofs ----------------------------------------
-------------------------------------------------------------------------------
makeCombineType Nothing
= panic Nothing "proofType not found"
makeCombineType (Just τ)
= FunTy τ (FunTy τ τ)
makeCombineVar τ = stringVar combineProofsName τ
-------------------------------------------------------------------------------
------------------- Helper Functions ----------------------------------------
-------------------------------------------------------------------------------
canIgnore v = isInternal v || isTyVar v
isAuto v = isPrefixOfSym "auto" $ dropModuleNames $ F.symbol v
isCases v = isPrefixOfSym "cases" $ dropModuleNames $ F.symbol v
isProof v = isPrefixOfSym "Proof" $ dropModuleNames $ F.symbol v
returnsProof :: Var -> Bool
returnsProof = isProof' . resultType . varType
where
isProof' (TyConApp tc _) = isProof tc
isProof' _ = False
normalize xts = filter hasBaseSort $ L.nub xts
where
hasBaseSort = isBaseSort . snd
mapSndM act xys = mapM (\(x, y) -> (x,) <$> act y) xys
| ssaavedra/liquidhaskell | src/Language/Haskell/Liquid/Constraint/Axioms.hs | bsd-3-clause | 21,224 | 0 | 24 | 6,665 | 7,223 | 3,707 | 3,516 | 390 | 6 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
import Test.Tasty (defaultMain, localOption, testGroup)
import Test.Tasty.QuickCheck hiding ((.&.))
import Data.Bits
import Data.Word
import Data.Int
import Data.DoubleWord (BinaryWord(..))
import Types
class Iso α τ | τ → α where
fromArbitrary ∷ α → τ
toArbitrary ∷ τ → α
instance Iso Word64 U64 where
fromArbitrary w = U64 (fromIntegral $ w `shiftR` 32) (fromIntegral w)
toArbitrary (U64 h l) = fromIntegral h `shiftL` 32 .|. fromIntegral l
instance Iso Int64 I64 where
fromArbitrary w = I64 (fromIntegral $ w `shiftR` 32) (fromIntegral w)
toArbitrary (I64 h l) = fromIntegral h `shiftL` 32 .|. fromIntegral l
instance Iso Word64 UU64 where
fromArbitrary w = UU64 (fromIntegral $ w `shiftR` 48)
(U48 (fromIntegral $ w `shiftR` 32) (fromIntegral w))
toArbitrary (UU64 h (U48 lh ll)) = fromIntegral h `shiftL` 48
.|. fromIntegral lh `shiftL` 32
.|. fromIntegral ll
instance Iso Int64 II64 where
fromArbitrary w = II64 (fromIntegral $ w `shiftR` 48)
(U48 (fromIntegral $ w `shiftR` 32) (fromIntegral w))
toArbitrary (II64 h (U48 lh ll)) = fromIntegral h `shiftL` 48
.|. fromIntegral lh `shiftL` 32
.|. fromIntegral ll
main = defaultMain
$ localOption (QuickCheckTests 10000)
$ testGroup "Tests"
[ isoTestGroup "|Word32|Word32|" (0 ∷ U64)
, isoTestGroup "|Int32|Word32|" (0 ∷ I64)
, isoTestGroup "|Word16|Word16|Word32|" (0 ∷ UU64)
, isoTestGroup "|Int16|Word16|Word32|" (0 ∷ II64) ]
isoTestGroup name t =
testGroup name
[ testProperty "Iso" $ prop_conv t
, testGroup "Eq" [ testProperty "(==)" $ prop_eq t ]
, testGroup "Ord" [ testProperty "compare" $ prop_compare t ]
, testGroup "Bounded"
[ testProperty "minBound" $ prop_minBound t
, testProperty "maxBound" $ prop_maxBound t ]
, testGroup "Enum"
[ testProperty "succ" $ prop_succ t
, testProperty "pred" $ prop_pred t
, testProperty "enumFromTo" $ prop_enumFromTo t
, testProperty "enumFromThen" $ prop_enumFromThen t
, testProperty "enumFromThenTo" $ prop_enumFromThenTo t ]
, testGroup "Num"
[ testProperty "negate" $ prop_negate t
, testProperty "abs" $ prop_abs t
, testProperty "signum" $ prop_signum t
, testProperty "(+)" $ prop_add t
, testProperty "(-)" $ prop_sub t
, testProperty "(*)" $ prop_mul t
, testProperty "fromInteger" $ prop_fromInteger t ]
, testGroup "Real"
[ testProperty "toRational" $ prop_toRational t ]
, testGroup "Integral"
[ testProperty "toInteger" $ prop_toInteger t
, testProperty "quotRem" $ prop_quotRem t
, testProperty "quot" $ prop_quot t
, testProperty "rem" $ prop_rem t
, testProperty "divMod" $ prop_divMod t
, testProperty "div" $ prop_div t
, testProperty "mod" $ prop_mod t ]
, testGroup "Bits"
[ testProperty "complement" $ prop_complement t
, testProperty "xor" $ prop_xor t
, testProperty "(.&.)" $ prop_and t
, testProperty "(.|.)" $ prop_or t
, testProperty "shiftL" $ prop_shiftL t
, testProperty "shiftR" $ prop_shiftR t
, testProperty "rotateL" $ prop_rotateL t
, testProperty "rotateR" $ prop_rotateR t
, testProperty "bit" $ prop_bit t
, testProperty "setBit" $ prop_setBit t
, testProperty "clearBit" $ prop_clearBit t
, testProperty "complementBit" $ prop_complementBit t
, testProperty "testBit" $ prop_testBit t
, testProperty "popCount" $ prop_popCount t
]
, testGroup "BinaryWord"
[ testProperty "unwrappedAdd" $ prop_unwrappedAdd t
, testProperty "unwrappedMul" $ prop_unwrappedMul t
, testProperty "leadingZeroes" $ prop_leadingZeroes t
, testProperty "trailingZeroes" $ prop_trailingZeroes t
, testProperty "allZeroes" $ prop_allZeroes t
, testProperty "allOnes" $ prop_allOnes t
, testProperty "msb" $ prop_msb t
, testProperty "lsb" $ prop_lsb t
, testProperty "testMsb" $ prop_testMsb t
, testProperty "testLsb" $ prop_testLsb t
]
]
#if !MIN_VERSION_base(4,7,0)
finiteBitSize = bitSize
#endif
toType ∷ Iso α τ ⇒ τ → α → τ
toType _ = fromArbitrary
fromType ∷ Iso α τ ⇒ τ → τ → α
fromType _ = toArbitrary
withUnary ∷ Iso α τ ⇒ τ → (τ → τ) → α → α
withUnary _ f = toArbitrary . f . fromArbitrary
withUnary' ∷ Iso α τ ⇒ τ → (τ → β) → α → β
withUnary' _ f = f . fromArbitrary
withBinary ∷ Iso α τ ⇒ τ → (τ → τ → τ) → α → α → α
withBinary _ f x y = toArbitrary $ f (fromArbitrary x) (fromArbitrary y)
withBinary' ∷ Iso α τ ⇒ τ → (τ → τ → β) → α → α → β
withBinary' _ f x y = f (fromArbitrary x) (fromArbitrary y)
withTernary' ∷ Iso α τ ⇒ τ → (τ → τ → τ → β) → α → α → α → β
withTernary' _ f x y z =
f (fromArbitrary x) (fromArbitrary y) (fromArbitrary z)
propUnary f g t w = f w == withUnary t g w
propUnary' f g t w = f w == withUnary' t g w
propBinary f g t w1 w2 = f w1 w2 == withBinary t g w1 w2
propBinary' f g t w1 w2 = f w1 w2 == withBinary' t g w1 w2
propTernary' f g t w1 w2 w3 = f w1 w2 w3 == withTernary' t g w1 w2 w3
prop_conv t w = toArbitrary (toType t w) == w
prop_eq = propBinary' (==) (==)
prop_compare = propBinary' compare compare
prop_minBound t = minBound == fromType t minBound
prop_maxBound t = maxBound == fromType t maxBound
prop_succ t w = (w /= maxBound) ==> (succ w == withUnary t succ w)
prop_pred t w = (w /= minBound) ==> (pred w == withUnary t pred w)
prop_enumFromTo =
propBinary' ((take 8 .) . enumFromTo)
(((fmap toArbitrary . take 8) .) . enumFromTo)
prop_enumFromThen =
propBinary' ((take 8 .) . enumFromThen)
(((fmap toArbitrary . take 8) .) . enumFromThen)
prop_enumFromThenTo =
propTernary' (((take 8 .) .) . enumFromThenTo)
((((fmap toArbitrary . take 8) .) .) . enumFromThenTo)
prop_unwrappedAdd ∷ (Iso α τ, Iso (UnsignedWord α) (UnsignedWord τ),
BinaryWord α, BinaryWord τ)
⇒ τ → α → α → Bool
prop_unwrappedAdd t x y = h1 == toArbitrary h2 && l1 == toArbitrary l2
where (h1, l1) = unwrappedAdd x y
(h2, l2) = unwrappedAdd (toType t x) (toType t y)
prop_unwrappedMul ∷ (Iso α τ, Iso (UnsignedWord α) (UnsignedWord τ),
BinaryWord α, BinaryWord τ)
⇒ τ → α → α → Bool
prop_unwrappedMul t x y = h1 == toArbitrary h2 && l1 == toArbitrary l2
where (h1, l1) = unwrappedMul x y
(h2, l2) = unwrappedMul (toType t x) (toType t y)
prop_leadingZeroes = propUnary' leadingZeroes leadingZeroes
prop_trailingZeroes = propUnary' trailingZeroes trailingZeroes
prop_allZeroes t = allZeroes == fromType t allZeroes
prop_allOnes t = allOnes == fromType t allOnes
prop_msb t = msb == fromType t msb
prop_lsb t = lsb == fromType t lsb
prop_testMsb = propUnary' testMsb testMsb
prop_testLsb = propUnary' testLsb testLsb
prop_negate = propUnary negate negate
prop_abs = propUnary abs abs
prop_signum = propUnary signum signum
prop_add = propBinary (+) (+)
prop_sub = propBinary (-) (-)
prop_mul = propBinary (*) (*)
prop_fromInteger t i = fromInteger i == fromType t (fromInteger i)
prop_toRational = propUnary' toRational toRational
prop_toInteger = propUnary' toInteger toInteger
prop_quotRem t n d = (d /= 0) ==> (qr == (fromType t q1, fromType t r1))
where qr = quotRem n d
(q1, r1) = quotRem (fromArbitrary n) (fromArbitrary d)
prop_quot t n d = (d /= 0) ==> (q == fromType t q1)
where q = quot n d
q1 = quot (fromArbitrary n) (fromArbitrary d)
prop_rem t n d = (d /= 0) ==> (r == fromType t r1)
where r = rem n d
r1 = rem (fromArbitrary n) (fromArbitrary d)
prop_divMod t n d = (d /= 0) ==> (qr == (fromType t q1, fromType t r1))
where qr = divMod n d
(q1, r1) = divMod (fromArbitrary n) (fromArbitrary d)
prop_div t n d = (d /= 0) ==> (q == fromType t q1)
where q = div n d
q1 = div (fromArbitrary n) (fromArbitrary d)
prop_mod t n d = (d /= 0) ==> (r == fromType t r1)
where r = mod n d
r1 = mod (fromArbitrary n) (fromArbitrary d)
prop_complement = propUnary complement complement
prop_xor = propBinary xor xor
prop_and = propBinary (.&.) (.&.)
prop_or = propBinary (.|.) (.|.)
propOffsets f g t w =
all (\b → f w b == withUnary t (`g` b) w) [0 .. finiteBitSize t]
prop_shiftL = propOffsets shiftL shiftL
prop_shiftR = propOffsets shiftR shiftR
prop_rotateL = propOffsets rotateL rotateL
prop_rotateR = propOffsets rotateR rotateR
prop_bit t = all (\b → bit b == fromType t (bit b)) [0 .. finiteBitSize t - 1]
propBits f g t w =
all (\b → f w b == withUnary t (`g` b) w) [0 .. finiteBitSize t - 1]
prop_setBit = propBits setBit setBit
prop_clearBit = propBits clearBit clearBit
prop_complementBit = propBits complementBit complementBit
prop_testBit t w =
all (\b → testBit w b == withUnary' t (`testBit` b) w) [0 .. finiteBitSize t - 1]
prop_popCount = propUnary' popCount popCount
| mvv/data-dword | tests/Tests.hs | bsd-3-clause | 9,678 | 8 | 12 | 2,423 | 3,495 | 1,772 | 1,723 | 204 | 1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1994-1998
UniqFM: Specialised finite maps, for things with @Uniques@.
Basically, the things need to be in class @Uniquable@, and we use the
@getUnique@ method to grab their @Uniques@.
(A similar thing to @UniqSet@, as opposed to @Set@.)
The interface is based on @FiniteMap@s, but the implementation uses
@Data.IntMap@, which is both maintained and faster than the past
implementation (see commit log).
The @UniqFM@ interface maps directly to Data.IntMap, only
``Data.IntMap.union'' is left-biased and ``plusUFM'' right-biased
and ``addToUFM\_C'' and ``Data.IntMap.insertWith'' differ in the order
of arguments of combining function.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -Wall #-}
module ETA.Utils.UniqFM (
-- * Unique-keyed mappings
UniqFM, -- abstract type
-- ** Manipulating those mappings
emptyUFM,
unitUFM,
unitDirectlyUFM,
listToUFM,
listToUFM_Directly,
listToUFM_C,
addToUFM,addToUFM_C,addToUFM_Acc,
addListToUFM,addListToUFM_C,
addToUFM_Directly,
addListToUFM_Directly,
adjustUFM, alterUFM,
adjustUFM_Directly,
delFromUFM,
delFromUFM_Directly,
delListFromUFM,
plusUFM,
plusUFM_C,
plusUFM_CD,
minusUFM,
intersectUFM,
intersectUFM_C,
foldUFM, foldUFM_Directly,
mapUFM, mapUFM_Directly,
elemUFM, elemUFM_Directly,
filterUFM, filterUFM_Directly, partitionUFM,
sizeUFM,
isNullUFM,
lookupUFM, lookupUFM_Directly,
lookupWithDefaultUFM, lookupWithDefaultUFM_Directly,
eltsUFM, keysUFM, splitUFM,
ufmToSet_Directly,
ufmToList,
-- joinUFM,
pprUniqFM
) where
import ETA.Utils.FastString
import ETA.BasicTypes.Unique ( Uniquable(..), Unique, getKey )
import ETA.Utils.Outputable
--import Compiler.Hoopl hiding (Unique)
import qualified Data.IntMap as M
import qualified Data.IntSet as S
import qualified Data.Foldable as Foldable
import qualified Data.Traversable as Traversable
import Data.Typeable
import Data.Data
{-
************************************************************************
* *
\subsection{The signature of the module}
* *
************************************************************************
-}
emptyUFM :: UniqFM elt
isNullUFM :: UniqFM elt -> Bool
unitUFM :: Uniquable key => key -> elt -> UniqFM elt
unitDirectlyUFM -- got the Unique already
:: Unique -> elt -> UniqFM elt
listToUFM :: Uniquable key => [(key,elt)] -> UniqFM elt
listToUFM_Directly
:: [(Unique, elt)] -> UniqFM elt
listToUFM_C :: Uniquable key => (elt -> elt -> elt)
-> [(key, elt)]
-> UniqFM elt
addToUFM :: Uniquable key => UniqFM elt -> key -> elt -> UniqFM elt
addListToUFM :: Uniquable key => UniqFM elt -> [(key,elt)] -> UniqFM elt
addListToUFM_Directly :: UniqFM elt -> [(Unique,elt)] -> UniqFM elt
addToUFM_Directly
:: UniqFM elt -> Unique -> elt -> UniqFM elt
addToUFM_C :: Uniquable key => (elt -> elt -> elt) -- old -> new -> result
-> UniqFM elt -- old
-> key -> elt -- new
-> UniqFM elt -- result
addToUFM_Acc :: Uniquable key =>
(elt -> elts -> elts) -- Add to existing
-> (elt -> elts) -- New element
-> UniqFM elts -- old
-> key -> elt -- new
-> UniqFM elts -- result
alterUFM :: Uniquable key =>
(Maybe elt -> Maybe elt) -- How to adjust
-> UniqFM elt -- old
-> key -- new
-> UniqFM elt -- result
addListToUFM_C :: Uniquable key => (elt -> elt -> elt)
-> UniqFM elt -> [(key,elt)]
-> UniqFM elt
adjustUFM :: Uniquable key => (elt -> elt) -> UniqFM elt -> key -> UniqFM elt
adjustUFM_Directly :: (elt -> elt) -> UniqFM elt -> Unique -> UniqFM elt
delFromUFM :: Uniquable key => UniqFM elt -> key -> UniqFM elt
delListFromUFM :: Uniquable key => UniqFM elt -> [key] -> UniqFM elt
delFromUFM_Directly :: UniqFM elt -> Unique -> UniqFM elt
-- Bindings in right argument shadow those in the left
plusUFM :: UniqFM elt -> UniqFM elt -> UniqFM elt
plusUFM_C :: (elt -> elt -> elt)
-> UniqFM elt -> UniqFM elt -> UniqFM elt
-- | `plusUFM_CD f m1 d1 m2 d2` merges the maps using `f` as the
-- combinding function and `d1` resp. `d2` as the default value if
-- there is no entry in `m1` reps. `m2`. The domain is the union of
-- the domains of `m1` and `m2`.
--
-- Representative example:
--
-- @
-- plusUFM_CD f {A: 1, B: 2} 23 {B: 3, C: 4} 42
-- == {A: f 1 42, B: f 2 3, C: f 23 4 }
-- @
plusUFM_CD :: (elt -> elt -> elt)
-> UniqFM elt -> elt -> UniqFM elt -> elt -> UniqFM elt
minusUFM :: UniqFM elt1 -> UniqFM elt2 -> UniqFM elt1
intersectUFM :: UniqFM elt -> UniqFM elt -> UniqFM elt
intersectUFM_C :: (elt1 -> elt2 -> elt3)
-> UniqFM elt1 -> UniqFM elt2 -> UniqFM elt3
foldUFM :: (elt -> a -> a) -> a -> UniqFM elt -> a
foldUFM_Directly:: (Unique -> elt -> a -> a) -> a -> UniqFM elt -> a
mapUFM :: (elt1 -> elt2) -> UniqFM elt1 -> UniqFM elt2
mapUFM_Directly :: (Unique -> elt1 -> elt2) -> UniqFM elt1 -> UniqFM elt2
filterUFM :: (elt -> Bool) -> UniqFM elt -> UniqFM elt
filterUFM_Directly :: (Unique -> elt -> Bool) -> UniqFM elt -> UniqFM elt
partitionUFM :: (elt -> Bool) -> UniqFM elt -> (UniqFM elt, UniqFM elt)
sizeUFM :: UniqFM elt -> Int
--hashUFM :: UniqFM elt -> Int
elemUFM :: Uniquable key => key -> UniqFM elt -> Bool
elemUFM_Directly:: Unique -> UniqFM elt -> Bool
splitUFM :: Uniquable key => UniqFM elt -> key -> (UniqFM elt, Maybe elt, UniqFM elt)
-- Splits a UFM into things less than, equal to, and greater than the key
lookupUFM :: Uniquable key => UniqFM elt -> key -> Maybe elt
lookupUFM_Directly -- when you've got the Unique already
:: UniqFM elt -> Unique -> Maybe elt
lookupWithDefaultUFM
:: Uniquable key => UniqFM elt -> elt -> key -> elt
lookupWithDefaultUFM_Directly
:: UniqFM elt -> elt -> Unique -> elt
keysUFM :: UniqFM elt -> [Unique] -- Get the keys
eltsUFM :: UniqFM elt -> [elt]
ufmToSet_Directly :: UniqFM elt -> S.IntSet
ufmToList :: UniqFM elt -> [(Unique, elt)]
{-
************************************************************************
* *
\subsection{Monoid interface}
* *
************************************************************************
-}
instance Monoid (UniqFM a) where
mempty = emptyUFM
mappend = plusUFM
{-
************************************************************************
* *
\subsection{Implementation using ``Data.IntMap''}
* *
************************************************************************
-}
newtype UniqFM ele = UFM (M.IntMap ele)
deriving (Show, Data, Eq, Functor, Traversable.Traversable,
Typeable)
deriving instance Foldable.Foldable UniqFM
emptyUFM = UFM M.empty
isNullUFM (UFM m) = M.null m
unitUFM k v = UFM (M.singleton (getKey $ getUnique k) v)
unitDirectlyUFM u v = UFM (M.singleton (getKey u) v)
listToUFM = foldl (\m (k, v) -> addToUFM m k v) emptyUFM
listToUFM_Directly = foldl (\m (u, v) -> addToUFM_Directly m u v) emptyUFM
listToUFM_C f = foldl (\m (k, v) -> addToUFM_C f m k v) emptyUFM
alterUFM f (UFM m) k = UFM (M.alter f (getKey $ getUnique k) m)
addToUFM (UFM m) k v = UFM (M.insert (getKey $ getUnique k) v m)
addListToUFM = foldl (\m (k, v) -> addToUFM m k v)
addListToUFM_Directly = foldl (\m (k, v) -> addToUFM_Directly m k v)
addToUFM_Directly (UFM m) u v = UFM (M.insert (getKey u) v m)
-- Arguments of combining function of M.insertWith and addToUFM_C are flipped.
addToUFM_C f (UFM m) k v =
UFM (M.insertWith (flip f) (getKey $ getUnique k) v m)
addToUFM_Acc exi new (UFM m) k v =
UFM (M.insertWith (\_new old -> exi v old) (getKey $ getUnique k) (new v) m)
addListToUFM_C f = foldl (\m (k, v) -> addToUFM_C f m k v)
adjustUFM f (UFM m) k = UFM (M.adjust f (getKey $ getUnique k) m)
adjustUFM_Directly f (UFM m) u = UFM (M.adjust f (getKey u) m)
delFromUFM (UFM m) k = UFM (M.delete (getKey $ getUnique k) m)
delListFromUFM = foldl delFromUFM
delFromUFM_Directly (UFM m) u = UFM (M.delete (getKey u) m)
-- M.union is left-biased, plusUFM should be right-biased.
plusUFM (UFM x) (UFM y) = UFM (M.union y x)
-- Note (M.union y x), with arguments flipped
-- M.union is left-biased, plusUFM should be right-biased.
plusUFM_C f (UFM x) (UFM y) = UFM (M.unionWith f x y)
plusUFM_CD f (UFM xm) dx (UFM ym) dy
= UFM $ M.mergeWithKey
(\_ x y -> Just (x `f` y))
(M.map (\x -> x `f` dy))
(M.map (\y -> dx `f` y))
xm ym
minusUFM (UFM x) (UFM y) = UFM (M.difference x y)
intersectUFM (UFM x) (UFM y) = UFM (M.intersection x y)
intersectUFM_C f (UFM x) (UFM y) = UFM (M.intersectionWith f x y)
foldUFM k z (UFM m) = M.fold k z m
foldUFM_Directly k z (UFM m) = M.foldWithKey (k . getUnique) z m
mapUFM f (UFM m) = UFM (M.map f m)
mapUFM_Directly f (UFM m) = UFM (M.mapWithKey (f . getUnique) m)
filterUFM p (UFM m) = UFM (M.filter p m)
filterUFM_Directly p (UFM m) = UFM (M.filterWithKey (p . getUnique) m)
partitionUFM p (UFM m) = case M.partition p m of
(left, right) -> (UFM left, UFM right)
sizeUFM (UFM m) = M.size m
elemUFM k (UFM m) = M.member (getKey $ getUnique k) m
elemUFM_Directly u (UFM m) = M.member (getKey u) m
splitUFM (UFM m) k = case M.splitLookup (getKey $ getUnique k) m of
(less, equal, greater) -> (UFM less, equal, UFM greater)
lookupUFM (UFM m) k = M.lookup (getKey $ getUnique k) m
lookupUFM_Directly (UFM m) u = M.lookup (getKey u) m
lookupWithDefaultUFM (UFM m) v k = M.findWithDefault v (getKey $ getUnique k) m
lookupWithDefaultUFM_Directly (UFM m) v u = M.findWithDefault v (getKey u) m
keysUFM (UFM m) = map getUnique $ M.keys m
eltsUFM (UFM m) = M.elems m
ufmToSet_Directly (UFM m) = M.keysSet m
ufmToList (UFM m) = map (\(k, v) -> (getUnique k, v)) $ M.toList m
-- -- Hoopl
-- joinUFM :: JoinFun v -> JoinFun (UniqFM v)
-- joinUFM eltJoin l (OldFact old) (NewFact new) = foldUFM_Directly add (NoChange, old) new
-- where add k new_v (ch, joinmap) =
-- case lookupUFM_Directly joinmap k of
-- Nothing -> (SomeChange, addToUFM_Directly joinmap k new_v)
-- Just old_v -> case eltJoin l (OldFact old_v) (NewFact new_v) of
-- (SomeChange, v') -> (SomeChange, addToUFM_Directly joinmap k v')
-- (NoChange, _) -> (ch, joinmap)
{-
************************************************************************
* *
\subsection{Output-ery}
* *
************************************************************************
-}
instance Outputable a => Outputable (UniqFM a) where
ppr ufm = pprUniqFM ppr ufm
pprUniqFM :: (a -> SDoc) -> UniqFM a -> SDoc
pprUniqFM ppr_elt ufm
= brackets $ fsep $ punctuate comma $
[ ppr uq <+> ptext (sLit ":->") <+> ppr_elt elt
| (uq, elt) <- ufmToList ufm ]
| alexander-at-github/eta | compiler/ETA/Utils/UniqFM.hs | bsd-3-clause | 12,500 | 0 | 11 | 3,824 | 3,375 | 1,757 | 1,618 | 189 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Particles where
import Prelude hiding ((.)) -- To use (.) in the scope of Categories instead
import Control.Wire
import FRP.Netwire
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.UI.GLFW as GLFW
import qualified Graphics.Rendering.FTGL as FTGL
import System.Random
import Control.Monad hiding (unless,when)
import Numeric
import Lib
import FRP.Netwire.Input
import FRP.Netwire.Input.GLFW
import Debug.Trace
import Data.Either (rights,isRight)
data Particle = Particle
{ particleX :: FT
, particleY :: FT
, particleV :: FT
} deriving Show
swirlUp :: (HasTime t s, Monad m) => (FT,FT) -> Wire s () m a (Point,Point,FT,FT)
swirlUp (x,y) = (,,,) <$> pos <*> pure (0,0) <*> r <*> pure 0.5
where pos = (,) <$> ((/4) . sin <$> integral x . pure (pi/2)) <*> integral y . pure 0.4
r = -pi/2
circling :: (HasTime t s, Monad m) => FT -> Wire s () m a (Point,Point,FT,FT)
circling raccel = (\r' pos'@(x,y) v -> (rotatePoint (0,0) r' pos',(0,0),r',v)) <$> r <*> pos <*> pure 0.5
where r = integral 0 . pure raccel
pos = (,) <$> 0 <*> (-0.8)
--testExpel = testParticle $ expel 4 200 (randomRs (2*pi-0.3,2*pi+0.3) $ mkStdGen 11) thrustParticleSpeed [] . circling (-1)
testFlyCircle = testParticle $ thruster . circling (-1)
testFlyUp = testParticle $ thruster . swirlUp (0,-1)
explosion :: (HasTime t s, Fractional t, Monad m) => FT -> FT -> Wire s () m a FT
explosion speed accel = when (if accel > 0 then (<0) else (>0)) . integral speed . pure accel --> for 0.3 . pure 1000 --> pure 0
-- FIXME replace workaround with inhibit
-- and findout how to remove inhibted
-- wires from a list
--braking :: (HasTime t s, Fractional t, Monad m) => Point -> Point -> Wire s () m a Point
thrustParticleSpeed :: (HasTime t s, Fractional t, Monad m) => FT -> Wire s () m a FT
thrustParticleSpeed v = pure v--explosion v (-v*2)
randDelayWiresWith :: (Fractional t, HasTime t s, Monoid e, Monad m) => (FT,FT) -> Wire s e m a b -> [Wire s e m a b] -> [Wire s e m a b]
randDelayWiresWith (f,t) placeholder wires = zipWith
(\w t -> for t . placeholder --> w)
wires
$ map (fromRational . toRational) $ randomRs (f,t) $ mkStdGen 3
--particles :: (HasTime t s, Monad m) => (FT, FT) -> FT -> FT -> Wire s () m a [Point]
--particles origin speed accel = sequenceA $ map (particle origin $ explosion speed accel) [0..360]
thruster :: (HasTime t s, Monad m, Fractional t) => Wire s () m (Point,Point,FT,FT) [Particle]
thruster = expel 3 0.3 (mkStdGen 12) thrustParticleSpeed []
expel :: (HasTime t s, Monad m, Fractional t, RandomGen g)
=> Int
-> FT
-> g
-> (FT -> Wire s () m a FT)
-> [Wire s () m a Particle]
-> Wire s () m (Point,Point,FT,FT) [Particle]
expel newN angleMax seeder speedWire particleWires = mkGen $ \ds (origin,speed,r,a) -> do
let (angleSeed,g') = random seeder
angles = take newN $ randomRs (-angleMax,angleMax) $ mkStdGen angleSeed
newParticle angle = for (fromRational $ toRational $ 1 - abs angle) . particle origin speed (speedWire a) (angle+r)
newParticles = map newParticle angles
updatedParticleWires = if a /= 0 then newParticles ++ particleWires else particleWires
(particles,particleWires') <- fmap (unzip.(filter (\(p,_) -> isRight p))) $ sequenceA $ map (\w -> stepWire w ds $ Right undefined) updatedParticleWires
return (sequenceA particles, expel newN angleMax g' speedWire particleWires')
{--
thruster origin speed r =
particleCone origin
(randDelayWiresWith (0,10) (pure 0) $ map (\v -> explosion v (-v)) ds)
r (0.3)
where n = 567 -- truncate $ 1000 * speed
ds = take n $ randomRs (speed-speed/10,speed+speed/10) $ mkStdGen 9
--}
particleCone :: (HasTime t s, Monad m) => (FT, FT) -> [Wire s () m a FT] -> FT -> FT -> Wire s () m a [Particle]
particleCone origin speedWires offsetR r = sequenceA $ zipWith (particle origin (0,0)) speedWires $ randomRs range (mkStdGen 1)
where range = (offsetR-r/2,offsetR+r/2)
particle :: (HasTime t s, Monad m) => (FT, FT) -> (FT,FT) -> Wire s () m a FT -> FT -> Wire s () m a Particle
particle (x,y) (vX,vY) speedWire r = Particle <$> posX <*> posY <*> speedWire
where vs = (\(vX',vY') -> (vX+vX',vY+vY')) . (\d -> rotatePoint (0,0) r (d, 0)) <$> speedWire
posX = integral x . (fst <$> vs)
posY = integral y . (snd <$> vs)
recycle :: Monad m => Wire s e m a b -> Wire s e m a b
recycle p = p --> recycle p
renderParticles :: [Particle] -> IO ()
renderParticles = GL.renderPrimitive GL.Points . mapM_ renderPoint . map (\Particle {..} -> (particleX, particleY))
renderThrustParticles :: [Particle] -> IO ()
renderThrustParticles = GL.renderPrimitive GL.Points . mapM_ (\Particle {..} -> do
GL.color $ GL.Color4 1 0.7 (0 :: GL.GLfloat) 0.5
renderPoint (particleX,particleY)
--renderPoint (particleX+size,particleY)
--renderPoint (particleX+size,particleY+size)
--renderPoint (particleX,particleY+size)
)
where size = 0.007
renderXYcoloredParticles :: [Particle] -> IO ()
renderXYcoloredParticles = GL.renderPrimitive GL.Points . mapM_ (\Particle {..} -> do
let r = (particleX + 1) / 1.7
g = (particleY + 1) / 1.7
b = (r + g) / 2
GL.color $ GL.Color4 r g b 1
renderPoint (particleX,particleY)
)
runParticle :: GLFW.Window -> Wire (Timed NominalDiffTime ()) () IO a [Particle] -> IO ()
runParticle window particleWire = do
g <- getStdGen
runNetwork clockSession_ particleWire
where runNetwork sess wire = do
GLFW.pollEvents
(st,sess') <- stepSession sess
(particles,wire') <- stepWire wire st $ Right undefined
shouldClose <- GLFW.windowShouldClose window
if shouldClose
then return ()
else case particles of
Left _ -> runNetwork sess' wire'
Right particles -> do
GL.clearColor GL.$= GL.Color4 0.0 0.0 0.0 1
GL.clear [GL.ColorBuffer]
renderXYcoloredParticles particles
GL.loadIdentity
GL.perspective 1 1 1 100000
GL.flush
GLFW.swapBuffers window
runNetwork sess' wire'
testExplosion v = testParticle $ boom (0,0) v
boom (x,y) v = mconcat $ map cone
[randDelayWiresWith (0.2,0.6) 0 $ map (\v -> explosion v (-v*3)) $ vs 100
,randDelayWiresWith (0.0,0.3) 0 $ map (\v -> explosion v (-v*1)) $ vs 150
,randDelayWiresWith (0.0,0.4) 0 $ map (\v -> explosion (v*1.5) (-v)) $ vs 300
]
where --delays = randDelayWiresWith (0.2,3) 0
cone ws = particleCone (x,y) ws 0 (pi*2)
vs n = take n $ randomRs (v-v/2,v+v/2) $ mkStdGen 10
testParticle :: Wire (Timed NominalDiffTime ()) () IO a [Particle] -> IO ()
testParticle wire = do
GLFW.init
(Just window) <- GLFW.createWindow 800 800 "Particle Demo" Nothing Nothing
GLFW.makeContextCurrent (Just window)
runParticle window wire
GLFW.destroyWindow window
GLFW.terminate
| querqueq/netwire01 | src/Particles.hs | bsd-3-clause | 7,457 | 1 | 19 | 2,039 | 2,744 | 1,441 | 1,303 | -1 | -1 |
import Test.HUnit
testHelloWorld = assertEqual "test HelloWorld" "HelloWorld" "HelloWorld"
main :: IO ()
main = do
testHelloWorld | cjw-charleswu/Wizard | RockPaperScissors/test/HelloWorldTest.hs | bsd-3-clause | 132 | 0 | 6 | 19 | 36 | 18 | 18 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Common.Responses where
import Servant
import Control.Monad.Trans.Class
import Control.Monad.Trans.Either
import Data.ByteString.Char8 (pack)
import AppM
redirect :: String -> AppM ()
redirect url = lift $ left $ err303 { errHeaders = [("location", pack url)] }
| hectorhon/autotrace2 | app/Common/Responses.hs | bsd-3-clause | 306 | 0 | 10 | 42 | 88 | 53 | 35 | 9 | 1 |
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module Client where
import BasePrelude hiding (catch)
import Control.Monad.Catch
import qualified Data.Text as T
import qualified Network.WebSockets as WS
import System.IO
import Text.Printf
application :: Bool -> WS.ClientApp ()
application shouldPong conn = do
(pingPongThread, send) <- makePingPongThread
send "{\"email\": \"%s\"}"
send "{\"player\": {\"email\": \"%s\"}, \"x\": 1.0, \"y\": 1.0}"
send "{\"player\": {\"email\": \"%s\"}, \"x\": 0.0, \"y\": 0.0}"
threadDelay $ 5 * 10 ^ 6
send "{\"player\": {\"email\": \"%s\"}, \"x\": 0.5, \"y\": 0.5}"
send "{\"player\": {\"email\": \"%s\"}, \"x\": 0.7, \"y\": 0.7}"
killThread pingPongThread
putStrLn "+ Good night"
where
makeSend :: ThreadId -> String -> IO ()
makeSend pptid t = do
let t' = printf t (show pptid)
putStrLn ("+ Sending " <> t')
WS.sendTextData conn (T.pack t')
makePingPongThread = do
thread <- forkIO $ (`catch` net) . forever $ do
msg <- WS.receiveData conn
putStrLn ("+ Pinged: " <> show (msg :: T.Text))
case shouldPong of
True -> WS.sendTextData conn ("{\"pong\": true}" :: T.Text)
putStrLn ("+ Ponged")
return (thread, makeSend thread)
net ThreadKilled =
return ()
client :: Bool -> IO ()
client shouldPong = do
hSetBuffering stdout LineBuffering
WS.runClient "localhost" 9160 "/" (application shouldPong)
| trello/staunton | server/Client.hs | bsd-3-clause | 1,507 | 0 | 18 | 360 | 404 | 199 | 205 | 38 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module TestServerAPI where
import Control.Applicative
import Control.Monad.Catch.Pure
import Control.Monad.Reader
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LB
import qualified Data.HashMap.Strict as HashMap
import Data.Maybe
import Network.HTTP.Date
import Network.HTTP.Types
import Webcrank
import Webcrank.ServerAPI
import Webcrank.ServerAPI.WebcrankT
data Req = Req
{ reqMethod :: Method
, reqURI :: ByteString
, reqHeaders :: HeadersMap
, reqTime :: HTTPDate
} deriving Show
data Res = Res
{ resStatus :: Status
, resHeaders :: HeadersMap
, resBody :: Maybe LB.ByteString
} deriving (Show, Eq)
req :: Req
req = Req
{ reqMethod = methodGet
, reqURI = "http://example.com"
, reqHeaders = HashMap.empty
, reqTime = defaultHTTPDate
{ hdYear = 1994
, hdMonth = 11
, hdDay = 15
, hdHour = 8
, hdMinute = 12
, hdSecond = 31
, hdWkday = 2
}
}
res :: Res
res = Res ok200 HashMap.empty Nothing
type TestState = CatchT (Reader Req)
type TestCrank = WebcrankT TestState
runTestCrank :: TestCrank a -> Resource TestCrank -> ReqData -> TestState (a, ReqData, LogData)
runTestCrank a r = runWebcrankT a testAPI r
testAPI :: ServerAPI TestCrank
testAPI = ServerAPI
{ srvGetRequestMethod = lift $ asks reqMethod
, srvGetRequestURI = lift $ asks reqURI
, srvGetRequestHeader = \h -> lift $ asks ((listToMaybe =<<) . HashMap.lookup h . reqHeaders)
, srvGetRequestTime = lift $ asks reqTime
}
handleTestReq :: Resource TestCrank -> Req -> Res
handleTestReq r rq = runReader run rq where
run = handleE <$> run'
handleE = \case
Left e -> error $ show e
Right (s, hs, b) -> Res s hs b
run' = runCatchT (handleRequest (\a -> runTestCrank a r newReqData))
| webcrank/webcrank.hs | test/TestServerAPI.hs | bsd-3-clause | 1,845 | 0 | 14 | 386 | 550 | 317 | 233 | 58 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Main where
import Control.Monad.Trans.Except
import Data.Aeson
import Data.Text
import Data.Text.IO as TIO
import GHC.Generics
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.Cors
import Servant
import Servant.Elm
import System.Environment (getArgs)
type UserAPI = "users" :> Get '[JSON] [User]
:<|> "discovery" :> Capture "id" Int :> Get '[JSON] Discovery
data User = User
{ name :: String
, age :: Int
} deriving (Eq, Show, Generic)
data Discovery = Discovery
{ details :: String
, year :: Int
} deriving (Eq, Show, Generic)
instance ToJSON User
instance ToJSON Discovery
users :: ExceptT ServantErr IO [User]
users = return [ User "Isaac Newton" 372
, User "Albert Einstein" 136
]
discovery :: Int -> ExceptT ServantErr IO Discovery
discovery date = return (Discovery "The thing" date)
server :: Server UserAPI
server = users :<|> discovery
userAPI :: Proxy UserAPI
userAPI = Proxy
app :: Application
app = simpleCors $ serve userAPI server
main :: IO ()
main = do
outfile:_ <- getArgs
Prelude.putStrLn $ "Writing elm to " ++ outfile
TIO.writeFile outfile (elmForAPI userAPI)
Prelude.putStrLn "Listening on http://localhost:8081/"
run 8081 app
| purcell/servant-elm | example/Main.hs | bsd-3-clause | 1,688 | 0 | 11 | 448 | 409 | 224 | 185 | 50 | 1 |
-- | Processing code writer monad.
module Graphics.Web.Processing.Core.Monad (
ProcM
, runProcM, execProcM
, runProcMWith
, ProcMonad (..)
, readArrayVar
, writeArrayVar
, newVarNumber
, getVarNumber
, setVarNumber
) where
import Control.Arrow (second)
import Control.Monad.Trans.Class
import Control.Monad.Trans.Writer.Strict
import Control.Monad.Trans.State.Strict
import Graphics.Web.Processing.Core.Primal
import Control.Applicative (Applicative (..))
import Data.Text (Text)
import Data.Monoid ((<>))
import Data.String (fromString)
-- | Processing script producer monad. The context @c@ indicates the context
-- of the underlying 'ProcCode'. This context restricts the use of certain
-- commands only to places where they are expected.
--
-- The commands that you can run under this monad are mostly defined in
-- "Graphics.Web.Processing.Interface".
--
-- Once you have all the commands you want, use 'runProcM' or 'execProcM'
-- to generate the corresponding Processing code under the 'ProcCode' type.
newtype ProcM c a = ProcM { unProcM :: StateT Int (Writer (ProcCode c)) a }
{- ProcM monad definition
On the inside, ProcM is a monad which stores both a counter and some
processing code. The purpose of the counter is to give each variable
an unique name. Using an inner writer monad, using 'tell', we append
processing code. Each time we append the creation of a new var, we
generate the name of that variable depending on the state of the
counter. For example, if the counter is in 2, the variable will be
named "v_2" (see 'intVarNumber'). The context of the ProcCode stored
in the inner writer monad is propagated to the ProcM monad.
-}
-- | Generate Processing code using the 'ProcM' monad.
-- The code output is reduced.
runProcM :: ProcM c a -> (a,ProcCode c)
runProcM = runProcMWith 0
-- | Run a 'ProcM' computation with an initial var number.
-- It also applies a reduction to the output Processing code.
runProcMWith :: Int -> ProcM c a -> (a,ProcCode c)
runProcMWith n = second reduce . runWriter . (\sw -> evalStateT sw n) . unProcM
-- | Generate Processing code using the 'ProcM' monad, discarding the final
-- value.
--
-- > execProcM = snd . runProcM
--
execProcM :: ProcM c a -> ProcCode c
execProcM = snd . runProcM
instance Functor (ProcM c) where
fmap f (ProcM w) = ProcM $ fmap f w
instance Applicative (ProcM c) where
pure x = ProcM $ pure x
pf <*> p = ProcM $ unProcM pf <*> unProcM p
instance Monad (ProcM c) where
return = pure
(ProcM w) >>= f = ProcM $ w >>= unProcM . f
-- | Add @1@ to the variable counter and returns the result.
newVarNumber :: ProcM c Int
newVarNumber = ProcM $ modify (+1) >> get
-- | Get the current variable number.
getVarNumber :: ProcM c Int
getVarNumber = ProcM get
-- | Set the current variable number.
setVarNumber :: Int -> ProcM c ()
setVarNumber = ProcM . put
intVarName :: Int -> Text
intVarName n = "v_" <> fromString (show n)
-- Processing Monad class
-- | Types in this instance form a monad when they are applied
-- to a context @c@. They are used to write Processing
-- code.
class ProcMonad m where
-- | Internal function to process commands in the target monad.
commandM :: Text -> [ProcArg] -> m c ()
-- | Internal function to process asignments in the target monad.
assignM :: ProcAssign -> m c ()
-- | Internal function to process variable creations in the target monad.
createVarM :: ProcAssign -> m c ()
-- | Internal function to process array varaible creations in the target monad.
createArrayVarM :: Text -> ProcList -> m c ()
-- | Write a comment in the code.
writeComment :: Text -> m c ()
-- | Conditional execution.
iff :: Proc_Bool -- ^ Condition.
-> m c a -- ^ Execution when the condition is 'true'.
-> m c b -- ^ Execution when the condition is 'false'.
-> m c ()
-- | Lift a 'ProcM' computation.
liftProc :: ProcM c a -> m c a
-- | Create a new variable with a starting value.
newVar :: ProcType a => a -> m Preamble (Var a)
-- | Create a new array variable with a starting list of values.
newArrayVar :: ProcType a => [a] -> m Preamble (ArrayVar a)
-- | Read a variable.
readVar :: ProcType a => Var a -> m c a
-- | Write a new value to a variable.
writeVar :: ProcType a => Var a -> a -> m c ()
-- | When using this instance, please, be aware of the
-- behavior of 'readVar'.
--
-- /It does not matter when the variable is read/.
-- The result will /always/ hold the last value asigned to the variable.
-- For example, this code
--
-- > v <- newVar 10
-- > ten <- readVar v
-- > writeVar v 20
-- > point (10,ten)
--
-- will draw a point at (10,20).
instance ProcMonad ProcM where
-- commandM, assignM, createVarM, createArrayVarM and writeComment
-- send, using 'tell', to the inner writer monad.
commandM n as = ProcM $ lift $ tell $ Command n as
assignM = ProcM . lift . tell . Assignment
createVarM = ProcM . lift . tell . CreateVar
createArrayVarM n xs = ProcM $ lift $ tell $ CreateArrayVar n xs
writeComment = ProcM . lift . tell . Comment
-- Conditionals are a bit trickier. We need to make sure that
-- the variable number traverses the conditional and keeps any
-- modifications performed inside the conditional.
iff b (ProcM e1) (ProcM e2) = ProcM $ do
i0 <- get
let (i1,c1) = runWriter $ execStateT e1 i0
(i2,c2) = runWriter $ execStateT e2 i1
put i2
lift $ tell $ Conditional b c1 c2
-- The method liftProc is useful for other mondas, like EventM
-- or ScriptM that are built in top of ProcM.
liftProc = id
-- Create a new variable, automatically asigning a name depending
-- on the current variable number.
newVar x = do
n <- newVarNumber
let v = intVarName n
createVarM (proc_assign v x)
return $ varFromText v
newArrayVar xs = do
n <- newVarNumber
let v = intVarName n
createArrayVarM v $ proc_list xs
return $ arrayVarFromText (length xs) v
readVar = return . proc_read
writeVar v x = assignM $ proc_assign (varName v) x
-- | Read a component of an array variable.
readArrayVar :: (ProcMonad m, Monad (m c), ProcType a) => ArrayVar a -> Proc_Int -> m c a
readArrayVar v n =
case n of
Proc_Int i -> let s = arraySize v
in if (i < 0) || (i >= s)
then fail $ "readArrayVar: index out of bounds.\nArray size: "
++ show s
++ ".\nIndex given: "
++ show i
++ ".\nRemember that indices start from 0."
else readVar $ arrayVarToVar v n
_ -> readVar $ arrayVarToVar v n
-- | Write a component of an array variable.
writeArrayVar :: (ProcMonad m, ProcType a) => ArrayVar a -> Proc_Int -> a -> m c ()
writeArrayVar v n x = writeVar (arrayVarToVar v n) x
| Daniel-Diaz/processing | Graphics/Web/Processing/Core/Monad.hs | bsd-3-clause | 6,847 | 0 | 15 | 1,586 | 1,453 | 770 | 683 | 96 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
#if SHOW_INTERNAL
{-# LANGUAGE StandaloneDeriving #-}
#endif
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_HADDOCK hide #-}
#include "thyme.h"
#if HLINT
#include "cabal_macros.h"
#endif
module Data.Thyme.Calendar.Internal where
import Prelude
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Arrow
import Control.DeepSeq
import Control.Lens
import Control.Monad
import Data.AffineSpace
import Data.Bits
import Data.Data
import Data.Hashable
import Data.Int
import Data.Ix
import Data.Thyme.Format.Internal
#if __GLASGOW_HASKELL__ == 704
import qualified Data.Vector.Generic
import qualified Data.Vector.Generic.Mutable
#endif
import qualified Data.Vector.Unboxed as VU
import Data.Vector.Unboxed.Deriving
import GHC.Generics (Generic)
import System.Random
import Test.QuickCheck hiding ((.&.))
-- | A duration/count of years.
type Years = Int
-- | A duration/count of months.
type Months = Int
-- | A duration/count of days.
type Days = Int
-- | A calendar-agnostic day, internally represented as a count of days
-- since /1858-11-17/, the
-- <https://en.wikipedia.org/wiki/Julian_day#Variants Modified Julian Day>
-- (MJD) epoch.
--
-- To convert a 'Day' to the corresponding 'YearMonthDay' in the W_GREGORIAN
-- calendar, see 'gregorian'.
--
-- @
-- > 'gregorian' 'Control.Lens.#' 'YearMonthDay' 2016 3 1
-- 2016-03-01
-- @
--
-- 'Day' is an instance of 'AffineSpace' where @'Diff' 'Day' = 'Days'@, so
-- arithmetic on 'Day' and 'Days' can be performed with the '.-.', '.+^',
-- and '.-^' operators.
--
-- @
-- > 'gregorian' 'Control.Lens.#' 'YearMonthDay' 2016 3 1 '.-.' 'gregorian' 'Control.Lens.#' 'YearMonthDay' 2016 2 1
-- 29
-- @
--
-- @
-- > 'gregorian' 'Control.Lens.#' 'YearMonthDay' 2016 3 1 '.-^' 1
-- 2016-02-29
-- @
--
-- Other ways of viewing a 'Day' include 'ordinalDate', and 'weekDate'.
newtype Day = ModifiedJulianDay
{ toModifiedJulianDay :: Int
} deriving (INSTANCES_NEWTYPE, CoArbitrary)
instance AffineSpace Day where
type Diff Day = Days
{-# INLINE (.-.) #-}
(.-.) = \ (ModifiedJulianDay a) (ModifiedJulianDay b) -> a - b
{-# INLINE (.+^) #-}
(.+^) = \ (ModifiedJulianDay a) d -> ModifiedJulianDay (a + d)
-- | Convert between a 'Day' and the corresponding count of days from
-- 1858-11-17, the MJD epoch.
--
-- @
-- 'modifiedJulianDay' = 'iso' 'toModifiedJulianDay' 'ModifiedJulianDay'
-- @
--
-- @
-- > 'modifiedJulianDay' 'Control.Lens.#' 0
-- 1858-11-17
-- > 'gregorian' 'Control.Lens.#' 'YearMonthDay' 2016 3 1 '&' 'modifiedJulianDay' '%~' 'subtract' 1
-- 2016-02-29
-- @
{-# INLINE modifiedJulianDay #-}
modifiedJulianDay :: Iso' Day Int
modifiedJulianDay = iso toModifiedJulianDay ModifiedJulianDay
-- | Conversion between a W_GREGORIAN 'OrdinalDate' and the corresponding
-- 'YearMonthDay'.
--
-- @
-- > 'OrdinalDate' 2016 32 '^.' 'yearMonthDay'
-- 'YearMonthDay' {ymdYear = 2016, ymdMonth = 2, ymdDay = 1}
-- @
--
-- @
-- > 'yearMonthDay' 'Control.Lens.#' 'YearMonthDay' 2016 2 1
-- 'OrdinalDate' {odYear = 2016, odDay = 32}
-- @
{-# INLINE yearMonthDay #-}
yearMonthDay :: Iso' OrdinalDate YearMonthDay
yearMonthDay = iso fromOrdinal toOrdinal where
{-# INLINEABLE fromOrdinal #-}
fromOrdinal :: OrdinalDate -> YearMonthDay
fromOrdinal (OrdinalDate y yd) = YearMonthDay y m d where
MonthDay m d = yd ^. monthDay (isLeapYear y)
{-# INLINEABLE toOrdinal #-}
toOrdinal :: YearMonthDay -> OrdinalDate
toOrdinal (YearMonthDay y m d) = OrdinalDate y $
monthDay (isLeapYear y) # MonthDay m d
-- | Conversion between a 'Day' and its 'YearMonthDay'.
--
-- @
-- 'gregorian' = 'ordinalDate' . 'yearMonthDay'
-- @
--
-- @
-- > 'ModifiedJulianDay' 0 '^.' 'gregorian'
-- 'YearMonthDay' {ymdYear = 1858, ymdMonth = 11, ymdDay = 17}
-- @
--
-- @
-- > 'gregorian' 'Control.Lens.#' 'YearMonthDay' 1858 11 17
-- 1858-11-17
-- @
{-# INLINE gregorian #-}
gregorian :: Iso' Day YearMonthDay
gregorian = ordinalDate . yearMonthDay
-- | Conversion between a 'YearMonthDay' and the corresponding 'Day'.
-- Returns 'Nothing' for invalid input.
--
-- @
-- > 'gregorianValid' ('YearMonthDay' 2015 2 28)
-- 'Just' 2015-02-28
-- @
--
-- @
-- > 'gregorianValid' ('YearMonthDay' 2015 2 29)
-- 'Nothing'
-- @
{-# INLINEABLE gregorianValid #-}
gregorianValid :: YearMonthDay -> Maybe Day
gregorianValid (YearMonthDay y m d) = review ordinalDate . OrdinalDate y
<$> monthDayValid (isLeapYear y) (MonthDay m d)
-- | Shows a 'Day' in
-- <https://en.wikipedia.org/wiki/ISO_8601#Calendar_dates ISO 8601>
-- /YYYY-MM-DD/ format.
--
-- See "Data.Thyme.Format" for other possibilities.
{-# INLINEABLE showGregorian #-}
showGregorian :: Day -> String
showGregorian (view gregorian -> YearMonthDay y m d) =
showsYear y . (:) '-' . shows02 m . (:) '-' . shows02 d $ ""
#if SHOW_INTERNAL
deriving instance Show Day
#else
instance Show Day where show = showGregorian
#endif
------------------------------------------------------------------------
-- | Calendar year.
type Year = Int
-- | Calendar month. /January = 1/
type Month = Int
-- | Calendar day-of-month, starting from /1/.
type DayOfMonth = Int
-- | A strict triple of a 'Year', a 'Day', and a 'Month'.
data YearMonthDay = YearMonthDay
{ ymdYear :: {-# UNPACK #-}!Year
, ymdMonth :: {-# UNPACK #-}!Month
, ymdDay :: {-# UNPACK #-}!DayOfMonth
} deriving (INSTANCES_USUAL, Show)
LENS(YearMonthDay,ymdYear,Year)
LENS(YearMonthDay,ymdMonth,Month)
LENS(YearMonthDay,ymdDay,DayOfMonth)
instance Hashable YearMonthDay
instance NFData YearMonthDay
------------------------------------------------------------------------
-- | Is it a leap year according to the W_GREGORIAN calendar?
isLeapYear :: Year -> Bool
isLeapYear y = y .&. 3 == 0 && (r100 /= 0 || q100 .&. 3 == 0) where
(q100, r100) = y `quotRem` 100
-- | The day of the year, with /1 = January 1st/.
type DayOfYear = Int
-- | An
-- <https://en.wikipedia.org/wiki/ISO_8601#Ordinal_dates ISO 8601 ordinal date>.
data OrdinalDate = OrdinalDate
{ odYear :: {-# UNPACK #-}!Year
, odDay :: {-# UNPACK #-}!DayOfYear
} deriving (INSTANCES_USUAL, Show)
LENS(OrdinalDate,odYear,Year)
LENS(OrdinalDate,odDay,DayOfYear)
instance Hashable OrdinalDate
instance NFData OrdinalDate
-- | Conversion between the MJD 'Day' and 'OrdinalDate'.
--
-- @
-- > 'ordinalDate' 'Control.Lens.#' 'OrdinalDate' 2016 32
-- 2016-02-01
-- @
--
-- @
-- > 'toModifiedJulianDay' $ 'ordinalDate' 'Control.Lens.#' 'OrdinalDate' 2016 32
-- 57419
-- @
--
-- @
-- > 'ModifiedJulianDay' 57419 '^.' 'ordinalDate'
-- 'OrdinalDate' {odYear = 2016, odDay = 32}
-- @
{-# INLINE ordinalDate #-}
ordinalDate :: Iso' Day OrdinalDate
ordinalDate = iso toOrd fromOrd where
-- Brief description of the toOrd computation
--
-- The length of the years in the Gregorian calendar is periodic with period
-- of /400/ years. There are /100 - 4 + 1 = 97/ leap years in a period, so
-- the average length of a year is /365 + 97\/400 = 146097\/400/ days.
--
-- Now, if you consider these — let's call them nominal — years,
-- then for any point in time, for any linear day number we can
-- determine which nominal year does it fall into by a single
-- division. Moreover, if we align the start of the calendar year /1/
-- with the start of the nominal year /1/, then the calendar years and
-- nominal years never get too much out of sync. Specifically:
--
-- * The start of the first day of a calendar year might fall into the
-- preceding nominal year, but never more than by /1.5/ days (/591\/400/
-- days, to be precise).
--
-- * The start of the last day of a calendar year always falls into
-- its nominal year (even for the leap years).
--
-- So, to find out the calendar year for a given day, we calculate
-- on which nominal year does its start fall. And, if we are not too
-- close to the end of year, we have the right calendar
-- year. Othewise, we just check whether it falls within the next
-- calendar year.
--
-- Notes: to make the reasoning simpler and more efficient ('quot' is
-- faster than 'div') we do the computation directly only for positive
-- years (days after /0001-01-01/). For earlier dates we translate by an
-- integral number of /400/ year periods, do the computation and
-- translate back.
{-# INLINEABLE toOrd #-}
toOrd :: Day -> OrdinalDate
toOrd (ModifiedJulianDay mjd)
| dayB0 <= 0 = case toOrdB0 dayInQC of
OrdinalDate y yd -> OrdinalDate (y + quadCent * 400) yd
| otherwise = toOrdB0 dayB0
where
dayB0 = mjd + 678575
(quadCent, dayInQC) = dayB0 `divMod` 146097
-- Input: days since 0001-01-01. Precondition: has to be positive!
{-# INLINE toOrdB0 #-}
toOrdB0 :: Int -> OrdinalDate
toOrdB0 dayB0 = res
where
(y0, r) = (400 * dayB0) `quotRem` 146097
d0 = dayInYear y0 dayB0
d1 = dayInYear (y0 + 1) dayB0
res = if r > 146097 - 600 && d1 > 0
then OrdinalDate (y0 + 1 + 1) d1
else OrdinalDate (y0 + 1) d0
-- Input: (year - 1) (day as days since 0001-01-01)
-- Precondition: year is positive!
{-# INLINE dayInYear #-}
dayInYear :: Int -> Int -> Int
dayInYear y0 dayB0 = dayB0 - 365 * y0 - leaps + 1
where
leaps = y0 `shiftR` 2 - centuries + centuries `shiftR` 2
centuries = y0 `quot` 100
{-# INLINEABLE fromOrd #-}
fromOrd :: OrdinalDate -> Day
fromOrd (OrdinalDate year yd) = ModifiedJulianDay mjd where
years = year - 1
centuries = years `div` 100
leaps = years `shiftR` 2 - centuries + centuries `shiftR` 2
mjd = 365 * years + leaps - 678576
+ clip 1 (if isLeapYear year then 366 else 365) yd
clip a b = max a . min b
------------------------------------------------------------------------
-- Lookup tables for Data.Thyme.Calendar.MonthDay
{-# NOINLINE monthLengths #-}
{-# NOINLINE monthLengthsLeap #-}
monthLengths, monthLengthsLeap :: VU.Vector Days
monthLengths = VU.fromList [31,28,31,30,31,30,31,31,30,31,30,31]
monthLengthsLeap = VU.fromList [31,29,31,30,31,30,31,31,30,31,30,31]
-- J F M A M J J A S O N D
{-# ANN monthDays "HLint: ignore Use fromMaybe" #-}
{-# NOINLINE monthDays #-}
monthDays :: VU.Vector ({-Month-}Int8, {-DayOfMonth-}Int8)
monthDays = VU.generate 365 go where
dom01 = VU.prescanl' (+) 0 monthLengths
go yd = (fromIntegral m, fromIntegral d) where
m = maybe 12 id $ VU.findIndex (yd <) dom01
d = succ yd - VU.unsafeIndex dom01 (pred m)
{-# ANN monthDaysLeap "HLint: ignore Use fromMaybe" #-}
{-# NOINLINE monthDaysLeap #-}
monthDaysLeap :: VU.Vector ({-Month-}Int8, {-DayOfMonth-}Int8)
monthDaysLeap = VU.generate 366 go where
dom01 = VU.prescanl' (+) 0 monthLengthsLeap
go yd = (fromIntegral m, fromIntegral d) where
m = maybe 12 id $ VU.findIndex (yd <) dom01
d = succ yd - VU.unsafeIndex dom01 (pred m)
-- | No good home for this within the current hierarchy. This will do.
{-# INLINEABLE randomIsoR #-}
randomIsoR :: (Random s, RandomGen g) => Iso' s a -> (a, a) -> g -> (a, g)
randomIsoR l (x, y) = first (^. l) . randomR (l # x, l # y)
------------------------------------------------------------------------
-- | A strict pair of a 'Month' and a 'DayOfMonth'.
data MonthDay = MonthDay
{ mdMonth :: {-# UNPACK #-}!Month
, mdDay :: {-# UNPACK #-}!DayOfMonth
} deriving (INSTANCES_USUAL, Show)
LENS(MonthDay,mdMonth,Month)
LENS(MonthDay,mdDay,DayOfMonth)
instance Hashable MonthDay
instance NFData MonthDay
instance Bounded MonthDay where
minBound = MonthDay 1 1
maxBound = MonthDay 12 31
instance Random MonthDay where
randomR r g = randomIsoR (monthDay leap) r g' where
(isLeapYear -> leap, g') = random g
random = randomR (minBound, maxBound)
instance Arbitrary MonthDay where
arbitrary = choose (minBound, maxBound)
shrink md = view (monthDay True) <$> shrink (monthDay True # md)
instance CoArbitrary MonthDay where
coarbitrary (MonthDay m d) = coarbitrary m . coarbitrary d
-- | Predicated on whether or not it's a leap year, convert between an
-- ordinal 'DayOfYear' and the corresponding 'Month' and 'DayOfMonth'.
--
-- @
-- > 60 '^.' 'monthDay' ('isLeapYear' 2015)
-- 'MonthDay' {'mdMonth' = 3, 'mdDay' = 1}
-- @
--
-- @
-- > 60 '^.' 'monthDay' ('isLeapYear' 2016)
-- 'MonthDay' {'mdMonth' = 2, 'mdDay' = 29}
-- @
--
-- @
-- > 'monthDay' ('isLeapYear' 2016) 'Control.Lens.#' 'MonthDay' 2 29
-- 60
-- @
--
-- @
-- > 'monthDay' ('isLeapYear' 2015) 'Control.Lens.#' 'MonthDay' 2 28
-- 59
-- @
--
-- Note that 'monthDay' is an improper 'Iso', as the following example
-- shows. To handle this case correctly, use 'monthDayValid'.
--
-- @
-- > 'monthDay' ('isLeapYear' 2015) 'Control.Lens.#' 'MonthDay' 2 29
-- 59
-- @
{-# INLINE monthDay #-}
monthDay
:: Bool -- ^ 'isLeapYear'?
-> Iso' DayOfYear MonthDay
monthDay leap = iso fromOrdinal toOrdinal where
(lastDay, lengths, table, ok) = if leap
then (365, monthLengthsLeap, monthDaysLeap, -1)
else (364, monthLengths, monthDays, -2)
{-# INLINE fromOrdinal #-}
fromOrdinal :: DayOfYear -> MonthDay
fromOrdinal (max 0 . min lastDay . pred -> i) = MonthDay m d where
(fromIntegral -> m, fromIntegral -> d) = VU.unsafeIndex table i
{-# INLINE toOrdinal #-}
toOrdinal :: MonthDay -> DayOfYear
toOrdinal (MonthDay month day) = div (367 * m - 362) 12 + k + d where
m = max 1 . min 12 $ month
l = VU.unsafeIndex lengths (pred m)
d = max 1 . min l $ day
k = if m <= 2 then 0 else ok
-- | Predicated on whether or not it's a leap year, convert a 'MonthDay' to
-- an ordinal 'DayOfYear'.
--
-- @
-- > 'monthDayValid' ('isLeapYear' 2016) ('MonthDay' 2 29)
-- 'Just' 60
-- @
--
-- @
-- > 'monthDayValid' ('isLeapYear' 2015) ('MonthDay' 2 29)
-- 'Nothing'
-- @
{-# INLINEABLE monthDayValid #-}
monthDayValid
:: Bool -- ^ 'isLeapYear'?
-> MonthDay
-> Maybe DayOfYear
monthDayValid leap md@(MonthDay m d) = monthDay leap # md
<$ guard (1 <= m && m <= 12 && 1 <= d && d <= monthLength leap m)
-- | Predicated on whether or not the year is a leap year, return the number
-- of 'Days' in the given 'Month'.
--
-- @
-- > monthLength ('isLeapYear' 2015) 2
-- 28
-- @
--
-- @
-- > monthLength ('isLeapYear' 2016) 2
-- 29
-- @
{-# INLINEABLE monthLength #-}
monthLength
:: Bool -- ^ 'isLeapYear'?
-> Month
-> Days
monthLength leap = VU.unsafeIndex ls . max 0 . min 11 . pred where
ls = if leap then monthLengthsLeap else monthLengths
------------------------------------------------------------------------
-- | Week of the year.
--
-- Meaning of values depends on context; see 'wdWeek', 'swWeek', 'mwWeek'.
type WeekOfYear = Int
-- | Day of the week.
--
-- [/0/] /Sunday/ for 'SundayWeek'
--
-- [/1/…/6/] /Monday/…/Saturday/
--
-- [/7/] /Sunday/ for 'WeekDate', 'MondayWeek', and 'Data.Thyme.Calendar.WeekdayOfMonth.WeekdayOfMonth'
type DayOfWeek = Int
-- | <https://en.wikipedia.org/wiki/ISO_week_date ISO 8601 Week Date>.
--
-- Note that week /01/ is defined as the week with the first Thursday, thus
-- 'wdYear' may differ from the Gregorian year between /December 29th/ and
-- /January 3rd/.
data WeekDate = WeekDate
{ wdYear :: {-# UNPACK #-}!Year
, wdWeek :: {-# UNPACK #-}!WeekOfYear
-- ^ Numbered /01/ to /53/. Days before week /01/ are considered to
-- belong to the previous year.
, wdDay :: {-# UNPACK #-}!DayOfWeek
-- ^ /1 = Monday/ … /7 = Sunday/.
} deriving (INSTANCES_USUAL, Show)
LENS(WeekDate,wdYear,Year)
LENS(WeekDate,wdWeek,WeekOfYear)
LENS(WeekDate,wdDay,DayOfWeek)
instance Hashable WeekDate
instance NFData WeekDate
-- | Convert between a 'Day' and an ISO 8601 'WeekDate'.
--
-- @
-- > 'YearMonthDay' 2016 1 1 '^.' 'from' 'gregorian' '.' 'weekDate'
-- 'WeekDate' {'wdYear' = 2015, 'wdWeek' = 53, 'wdDay' = 5}
-- @
{-# INLINE weekDate #-}
weekDate :: Iso' Day WeekDate
weekDate = iso toWeek fromWeek where
{-# INLINEABLE toWeek #-}
toWeek :: Day -> WeekDate
toWeek = join (toWeekOrdinal . view ordinalDate)
{-# INLINEABLE fromWeek #-}
fromWeek :: WeekDate -> Day
fromWeek wd@(WeekDate y _ _) = fromWeekLast (lastWeekOfYear y) wd
{-# INLINE toWeekOrdinal #-}
toWeekOrdinal :: OrdinalDate -> Day -> WeekDate
toWeekOrdinal (OrdinalDate y0 yd) (ModifiedJulianDay mjd) =
WeekDate y1 (w1 + 1) (d7mod + 1) where
-- pilfered and refactored; no idea what foo and bar mean
d = mjd + 2
(d7div, d7mod) = divMod d 7
foo :: Year -> {-WeekOfYear-1-}Int
foo y = bar $ ordinalDate # OrdinalDate y 6
bar :: Day -> {-WeekOfYear-1-}Int
bar (ModifiedJulianDay k) = d7div - div k 7
w0 = bar $ ModifiedJulianDay (d - yd + 4)
(y1, w1) = case w0 of
-1 -> (y0 - 1, foo (y0 - 1))
52 | foo (y0 + 1) == 0 -> (y0 + 1, 0)
_ -> (y0, w0)
{-# INLINE lastWeekOfYear #-}
lastWeekOfYear :: Year -> WeekOfYear
lastWeekOfYear y = if wdWeek wd == 53 then 53 else 52 where
wd = OrdinalDate y 365 ^. from ordinalDate . weekDate
{-# INLINE fromWeekLast #-}
fromWeekLast :: WeekOfYear -> WeekDate -> Day
fromWeekLast wMax (WeekDate y w d) = ModifiedJulianDay mjd where
-- pilfered and refactored
ModifiedJulianDay k = ordinalDate # OrdinalDate y 6
mjd = k - mod k 7 - 10 + clip 1 7 d + clip 1 wMax w * 7
clip a b = max a . min b
-- | Convert a 'WeekDate' to a 'Day', or 'Nothing' for invalid 'WeekDate'.
{-# INLINEABLE weekDateValid #-}
weekDateValid :: WeekDate -> Maybe Day
weekDateValid wd@(WeekDate (lastWeekOfYear -> wMax) w d) =
fromWeekLast wMax wd <$ guard (1 <= d && d <= 7 && 1 <= w && w <= wMax)
-- | Shows a 'Day' using the @yyyy-Www-d@ ISO 8601 Week Date format.
--
-- @
-- > 'showWeekDate' ('gregorian' 'Control.Lens.#' 'YearMonthDay' 2006 11 15)
-- "2006-W46-3"
-- @
{-# INLINEABLE showWeekDate #-}
showWeekDate :: Day -> String
showWeekDate (view weekDate -> WeekDate y w d) =
showsYear y . (++) "-W" . shows02 w . (:) '-' $ show d
------------------------------------------------------------------------
-- | Week-based calendar date with the first /Sunday/ of the year as the first
-- day of week /01/. This corresponds to @%U@ and @%w@ of
-- @<http://www.gnu.org/software/libc/manual/html_node/Formatting-Calendar-Time.html#index-strftime strftime(3)>@.
--
-- The final week of a given year and week /00/ of the next both refer to
-- the same week.
data SundayWeek = SundayWeek
{ swYear :: {-# UNPACK #-}!Year
-- ^ Coincides with that of 'gregorian'.
, swWeek :: {-# UNPACK #-}!WeekOfYear
-- ^ Weeks numbered from /00/ to /53/, starting with the first
-- /Sunday/ of the year as the first day of week /01/.
, swDay :: {-# UNPACK #-}!DayOfWeek
-- ^ /0 = Sunday/.
} deriving (INSTANCES_USUAL, Show)
LENS(SundayWeek,swYear,Year)
LENS(SundayWeek,swWeek,WeekOfYear)
LENS(SundayWeek,swDay,DayOfWeek)
instance Hashable SundayWeek
instance NFData SundayWeek
-- | Conversion between 'Day' and 'SundayWeek'.
--
-- @
-- > 'YearMonthDay' 2016 1 3 '^.' 'from' 'gregorian' '.' 'sundayWeek'
-- 'SundayWeek' {'swYear' = 2016, 'swWeek' = 1, 'swDay' = 0}
-- @
{-# INLINE sundayWeek #-}
sundayWeek :: Iso' Day SundayWeek
sundayWeek = iso toSunday fromSunday where
{-# INLINEABLE toSunday #-}
toSunday :: Day -> SundayWeek
toSunday = join (toSundayOrdinal . view ordinalDate)
{-# INLINEABLE fromSunday #-}
fromSunday :: SundayWeek -> Day
fromSunday (SundayWeek y w d) = ModifiedJulianDay (firstDay + yd) where
ModifiedJulianDay firstDay = ordinalDate # OrdinalDate y 1
-- following are all 0-based year days
firstSunday = mod (4 - firstDay) 7
yd = firstSunday + 7 * (w - 1) + d
{-# INLINE toSundayOrdinal #-}
toSundayOrdinal :: OrdinalDate -> Day -> SundayWeek
toSundayOrdinal (OrdinalDate y yd) (ModifiedJulianDay mjd) =
SundayWeek y (d7div - div k 7) d7mod where
d = mjd + 3
k = d - yd
(d7div, d7mod) = divMod d 7
-- | Convert a 'SundayWeek' to a 'Day', or 'Nothing' for invalid 'SundayWeek'.
{-# INLINEABLE sundayWeekValid #-}
sundayWeekValid :: SundayWeek -> Maybe Day
sundayWeekValid (SundayWeek y w d) = ModifiedJulianDay (firstDay + yd)
<$ guard (0 <= d && d <= 6 && 0 <= yd && yd <= lastDay) where
ModifiedJulianDay firstDay = ordinalDate # OrdinalDate y 1
-- following are all 0-based year days
firstSunday = mod (4 - firstDay) 7
yd = firstSunday + 7 * (w - 1) + d
lastDay = if isLeapYear y then 365 else 364
------------------------------------------------------------------------
-- | Week-based calendar date with the first /Monday/ of the year as the first
-- day of week /01/. This corresponds to @%W@ and @%u@ of
-- @<http://www.gnu.org/software/libc/manual/html_node/Formatting-Calendar-Time.html#index-strftime strftime(3)>@.
--
-- The final week of a given year and week /00/ of the next both refer to
-- the same week.
data MondayWeek = MondayWeek
{ mwYear :: {-# UNPACK #-}!Year
-- ^ Coincides with that of 'gregorian'.
, mwWeek :: {-# UNPACK #-}!WeekOfYear
-- ^ Weeks numbered from /00/ to /53/, starting with the first
-- /Monday/ of the year as the first day of week /01/.
, mwDay :: {-# UNPACK #-}!DayOfWeek
-- ^ /7 = Sunday/.
} deriving (INSTANCES_USUAL, Show)
LENS(MondayWeek,mwYear,Year)
LENS(MondayWeek,mwWeek,WeekOfYear)
LENS(MondayWeek,mwDay,DayOfWeek)
instance Hashable MondayWeek
instance NFData MondayWeek
-- | Conversion between 'Day' and 'MondayWeek'.
--
-- @
-- > 'YearMonthDay' 2016 1 3 '^.' 'from' 'gregorian' '.' 'mondayWeek'
-- 'MondayWeek' {'mwYear' = 2016, 'mwWeek' = 0, 'mwDay' = 7}
-- @
{-# INLINE mondayWeek #-}
mondayWeek :: Iso' Day MondayWeek
mondayWeek = iso toMonday fromMonday where
{-# INLINEABLE toMonday #-}
toMonday :: Day -> MondayWeek
toMonday = join (toMondayOrdinal . view ordinalDate)
{-# INLINEABLE fromMonday #-}
fromMonday :: MondayWeek -> Day
fromMonday (MondayWeek y w d) = ModifiedJulianDay (firstDay + yd) where
ModifiedJulianDay firstDay = ordinalDate # OrdinalDate y 1
-- following are all 0-based year days
firstMonday = mod (5 - firstDay) 7
yd = firstMonday + 7 * (w - 1) + d - 1
{-# INLINE toMondayOrdinal #-}
toMondayOrdinal :: OrdinalDate -> Day -> MondayWeek
toMondayOrdinal (OrdinalDate y yd) (ModifiedJulianDay mjd) =
MondayWeek y (d7div - div k 7) (d7mod + 1) where
d = mjd + 2
k = d - yd
(d7div, d7mod) = divMod d 7
-- | Convert a 'MondayWeek' to a 'Day', or 'Nothing' for invalid 'MondayWeek'.
{-# INLINEABLE mondayWeekValid #-}
mondayWeekValid :: MondayWeek -> Maybe Day
mondayWeekValid (MondayWeek y w d) = ModifiedJulianDay (firstDay + yd)
<$ guard (1 <= d && d <= 7 && 0 <= yd && yd <= lastDay) where
ModifiedJulianDay firstDay = ordinalDate # OrdinalDate y 1
-- following are all 0-based year days
firstMonday = mod (5 - firstDay) 7
yd = firstMonday + 7 * (w - 1) + d - 1
lastDay = if isLeapYear y then 365 else 364
------------------------------------------------------------------------
-- Unbox instances at the end avoids TH-related declaration order issues
derivingUnbox "Day" [t| Day -> Int |]
[| toModifiedJulianDay |] [| ModifiedJulianDay |]
derivingUnbox "YearMonthDay" [t| YearMonthDay -> Int |]
[| \ YearMonthDay {..} -> shiftL ymdYear 9 .|. shiftL ymdMonth 5 .|. ymdDay |]
[| \ n -> YearMonthDay (shiftR n 9) (shiftR n 5 .&. 0xf) (n .&. 0x1f) |]
derivingUnbox "OrdinalDate" [t| OrdinalDate -> Int |]
[| \ OrdinalDate {..} -> shiftL odYear 9 .|. odDay |]
[| \ n -> OrdinalDate (shiftR n 9) (n .&. 0x1ff) |]
derivingUnbox "MonthDay" [t| MonthDay -> Int |]
[| \ MonthDay {..} -> shiftL mdMonth 5 .|. mdDay |]
[| \ n -> MonthDay (shiftR n 5) (n .&. 0x1f) |]
derivingUnbox "WeekDate" [t| WeekDate -> Int |]
[| \ WeekDate {..} -> shiftL wdYear 9 .|. shiftL wdWeek 3 .|. wdDay |]
[| \ n -> WeekDate (shiftR n 9) (shiftR n 3 .&. 0x3f) (n .&. 0x7) |]
derivingUnbox "SundayWeek" [t| SundayWeek -> Int |]
[| \ SundayWeek {..} -> shiftL swYear 9 .|. shiftL swWeek 3 .|. swDay |]
[| \ n -> SundayWeek (shiftR n 9) (shiftR n 3 .&. 0x3f) (n .&. 0x7) |]
derivingUnbox "MondayWeek" [t| MondayWeek -> Int |]
[| \ MondayWeek {..} -> shiftL mwYear 9 .|. shiftL mwWeek 3 .|. mwDay |]
[| \ n -> MondayWeek (shiftR n 9) (shiftR n 3 .&. 0x3f) (n .&. 0x7) |]
| liyang/thyme | src/Data/Thyme/Calendar/Internal.hs | bsd-3-clause | 24,815 | 0 | 16 | 5,108 | 4,757 | 2,705 | 2,052 | -1 | -1 |
{-# OPTIONS_GHC -fbang-patterns #-}
-- Copyright (c) 2008 Stephen C. Harris.
-- See COPYING file at the root of this distribution for copyright information.
module HMQ.RowValueExtractors where
import Database.HDBC
import HMQ.Query
type Row = [SqlValue]
-- A row value extractor for a single table.
type EntityRowValueExtractor a = TableAlias -> Query -> Row -> Maybe a
-- A row value extractor for a mapped query.
type QueryRowValueExtractor t = Query -> Row -> Maybe t
| scharris/hmq | RowValueExtractors.hs | bsd-3-clause | 480 | 0 | 8 | 85 | 70 | 43 | 27 | 7 | 0 |
{-# Language OverloadedStrings #-}
{-|
Module : Client.View.UrlSelection
Description : URL selection module
Copyright : (c) Eric Mertens, 2016
License : ISC
Maintainer : emertens@gmail.com
This module provides a list of the URLs found in the current message
window in order to assist in selecting one to open with @/url@
-}
module Client.View.UrlSelection
( urlSelectionView
) where
import Client.Configuration
import Client.Image.Message
import Client.Image.PackedImage
import Client.Image.Palette
import Client.Image.LineWrap
import Client.Message
import Client.State
import Client.State.Focus
import Client.State.Window
import Control.Lens
import Data.HashMap.Strict (HashMap)
import Data.Text (Text)
import Graphics.Vty.Attributes
import Irc.Identifier
import Text.Read (readMaybe)
-- | Generate the lines used for the view when typing @/url@
urlSelectionView ::
Int {- ^ render width -} ->
Focus {- ^ window to search -} ->
String {- ^ argument to command -} ->
ClientState {- ^ client state -} ->
[Image'] {- ^ image lines -}
urlSelectionView w focus arg st
= concat
$ zipWith (draw w hilites pal padding selected) [1..] (toListOf urled st)
where
urled = clientWindows . ix focus
. winMessages . each
. folding matches
focused = focus == view clientFocus st
selected
| not focused = 0
| all (==' ') arg = 1
| Just i <- readMaybe arg = i
| otherwise = 0 -- won't match
cfg = view clientConfig st
padding = view configNickPadding cfg
pal = view configPalette cfg
hilites = clientHighlightsFocus focus st
matches :: WindowLine -> [(Maybe Identifier, Text)]
matches wl = [ (views wlSummary summaryActor wl, url) | url <- views wlText urlMatches wl ]
-- | Render one line of the url list
draw ::
Int {- ^ rendered width -} ->
HashMap Identifier Highlight {- ^ highlights -} ->
Palette {- ^ palette -} ->
PaddingMode {- ^ nick render padding -} ->
Int {- ^ selected index -} ->
Int {- ^ url index -} ->
(Maybe Identifier, Text) {- ^ sender and url text -} ->
[Image'] {- ^ rendered lines -}
draw w hilites pal padding selected i (who,url)
= reverse
$ lineWrapPrefix w
(string defAttr (shows i ". ") <>
nickPad padding
(foldMap (coloredIdentifier pal NormalIdentifier hilites) who) <> ": ")
(text' attr (cleanText url))
where
attr | selected == i = withStyle defAttr reverseVideo
| otherwise = defAttr
| glguy/irc-core | src/Client/View/UrlSelection.hs | isc | 2,933 | 0 | 14 | 1,004 | 584 | 315 | 269 | 60 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.